mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-12-26 13:43:55 +03:00
Merge branch 'rebase-forgejo-dependency' into wip-forgejo
This commit is contained in:
commit
094c84ed6d
292 changed files with 8842 additions and 1269 deletions
|
@ -100,6 +100,8 @@ package "code.gitea.io/gitea/models/unittest"
|
||||||
func LoadFixtures
|
func LoadFixtures
|
||||||
func Copy
|
func Copy
|
||||||
func CopyDir
|
func CopyDir
|
||||||
|
func NewMockWebServer
|
||||||
|
func NormalizedFullPath
|
||||||
func FixturesDir
|
func FixturesDir
|
||||||
func fatalTestError
|
func fatalTestError
|
||||||
func InitSettings
|
func InitSettings
|
||||||
|
@ -322,6 +324,7 @@ package "code.gitea.io/gitea/services/pull"
|
||||||
|
|
||||||
package "code.gitea.io/gitea/services/repository"
|
package "code.gitea.io/gitea/services/repository"
|
||||||
func IsErrForkAlreadyExist
|
func IsErrForkAlreadyExist
|
||||||
|
func UpdateRepositoryUnits
|
||||||
|
|
||||||
package "code.gitea.io/gitea/services/repository/archiver"
|
package "code.gitea.io/gitea/services/repository/archiver"
|
||||||
func ArchiveRepository
|
func ArchiveRepository
|
||||||
|
|
|
@ -90,6 +90,8 @@ linters-settings:
|
||||||
desc: do not use the internal package, use AddXxx function instead
|
desc: do not use the internal package, use AddXxx function instead
|
||||||
- pkg: gopkg.in/ini.v1
|
- pkg: gopkg.in/ini.v1
|
||||||
desc: do not use the ini package, use gitea's config system instead
|
desc: do not use the ini package, use gitea's config system instead
|
||||||
|
- pkg: github.com/minio/sha256-simd
|
||||||
|
desc: use crypto/sha256 instead, see https://codeberg.org/forgejo/forgejo/pulls/1528
|
||||||
|
|
||||||
issues:
|
issues:
|
||||||
max-issues-per-linter: 0
|
max-issues-per-linter: 0
|
||||||
|
|
10
assets/go-licenses.json
generated
10
assets/go-licenses.json
generated
File diff suppressed because one or more lines are too long
|
@ -70,7 +70,7 @@ func runGenerateInternalToken(c *cli.Context) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func runGenerateLfsJwtSecret(c *cli.Context) error {
|
func runGenerateLfsJwtSecret(c *cli.Context) error {
|
||||||
_, jwtSecretBase64, err := generate.NewJwtSecretBase64()
|
_, jwtSecretBase64, err := generate.NewJwtSecret()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
{
|
{
|
||||||
"go.buildTags": "'sqlite sqlite_unlock_notify'",
|
"go.buildTags": "sqlite,sqlite_unlock_notify",
|
||||||
"go.testFlags": ["-v"]
|
"go.testFlags": ["-v"]
|
||||||
}
|
}
|
||||||
|
|
|
@ -412,6 +412,10 @@ USER = root
|
||||||
;;
|
;;
|
||||||
;; Whether execute database models migrations automatically
|
;; Whether execute database models migrations automatically
|
||||||
;AUTO_MIGRATION = true
|
;AUTO_MIGRATION = true
|
||||||
|
;;
|
||||||
|
;; Threshold value (in seconds) beyond which query execution time is logged as a warning in the xorm logger
|
||||||
|
;;
|
||||||
|
;SLOW_QUERY_TRESHOLD = 5s
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
@ -817,6 +821,11 @@ LEVEL = Info
|
||||||
;; Every new user will have restricted permissions depending on this setting
|
;; Every new user will have restricted permissions depending on this setting
|
||||||
;DEFAULT_USER_IS_RESTRICTED = false
|
;DEFAULT_USER_IS_RESTRICTED = false
|
||||||
;;
|
;;
|
||||||
|
;; Users will be able to use dots when choosing their username. Disabling this is
|
||||||
|
;; helpful if your usersare having issues with e.g. RSS feeds or advanced third-party
|
||||||
|
;; extensions that use strange regex patterns.
|
||||||
|
; ALLOW_DOTS_IN_USERNAMES = true
|
||||||
|
;;
|
||||||
;; Either "public", "limited" or "private", default is "public"
|
;; Either "public", "limited" or "private", default is "public"
|
||||||
;; Limited is for users visible only to signed users
|
;; Limited is for users visible only to signed users
|
||||||
;; Private is for users visible only to members of their organizations
|
;; Private is for users visible only to members of their organizations
|
||||||
|
@ -903,6 +912,14 @@ LEVEL = Info
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
;[badges]
|
||||||
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
;; Enable repository badges (via shields.io or a similar generator)
|
||||||
|
;ENABLED = true
|
||||||
|
;; Template for the badge generator.
|
||||||
|
;GENERATOR_URL_TEMPLATE = https://img.shields.io/badge/{{.label}}-{{.text}}-{{.color}}
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;[repository]
|
;[repository]
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
@ -1470,6 +1487,8 @@ LEVEL = Info
|
||||||
;;
|
;;
|
||||||
;; Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled
|
;; Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled
|
||||||
;DEFAULT_EMAIL_NOTIFICATIONS = enabled
|
;DEFAULT_EMAIL_NOTIFICATIONS = enabled
|
||||||
|
;; Send an email to all admins when a new user signs up to inform the admins about this act. Options: true, false
|
||||||
|
;SEND_NOTIFICATION_EMAIL_ON_NEW_USER = false
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
@ -1783,9 +1802,6 @@ LEVEL = Info
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;
|
;;
|
||||||
;AVATAR_UPLOAD_PATH = data/avatars
|
|
||||||
;REPOSITORY_AVATAR_UPLOAD_PATH = data/repo-avatars
|
|
||||||
;;
|
|
||||||
;; How Gitea deals with missing repository avatars
|
;; How Gitea deals with missing repository avatars
|
||||||
;; none = no avatar will be displayed; random = random avatar will be displayed; image = default image will be used
|
;; none = no avatar will be displayed; random = random avatar will be displayed; image = default image will be used
|
||||||
;REPOSITORY_AVATAR_FALLBACK = none
|
;REPOSITORY_AVATAR_FALLBACK = none
|
||||||
|
|
|
@ -458,6 +458,7 @@ The following configuration set `Content-Type: application/vnd.android.package-a
|
||||||
- `MAX_IDLE_CONNS` **2**: Max idle database connections on connection pool, default is 2 - this will be capped to `MAX_OPEN_CONNS`.
|
- `MAX_IDLE_CONNS` **2**: Max idle database connections on connection pool, default is 2 - this will be capped to `MAX_OPEN_CONNS`.
|
||||||
- `CONN_MAX_LIFETIME` **0 or 3s**: Sets the maximum amount of time a DB connection may be reused - default is 0, meaning there is no limit (except on MySQL where it is 3s - see #6804 & #7071).
|
- `CONN_MAX_LIFETIME` **0 or 3s**: Sets the maximum amount of time a DB connection may be reused - default is 0, meaning there is no limit (except on MySQL where it is 3s - see #6804 & #7071).
|
||||||
- `AUTO_MIGRATION` **true**: Whether execute database models migrations automatically.
|
- `AUTO_MIGRATION` **true**: Whether execute database models migrations automatically.
|
||||||
|
- `SLOW_QUERY_THRESHOLD` **5s**: Threshold value in seconds beyond which query execution time is logged as a warning in the xorm logger.
|
||||||
|
|
||||||
[^1]: It may be necessary to specify a hostport even when listening on a unix socket, as the port is part of the socket name. see [#24552](https://github.com/go-gitea/gitea/issues/24552#issuecomment-1681649367) for additional details.
|
[^1]: It may be necessary to specify a hostport even when listening on a unix socket, as the port is part of the socket name. see [#24552](https://github.com/go-gitea/gitea/issues/24552#issuecomment-1681649367) for additional details.
|
||||||
|
|
||||||
|
@ -517,6 +518,7 @@ And the following unique queues:
|
||||||
|
|
||||||
- `DEFAULT_EMAIL_NOTIFICATIONS`: **enabled**: Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled
|
- `DEFAULT_EMAIL_NOTIFICATIONS`: **enabled**: Default configuration for email notifications for users (user configurable). Options: enabled, onmention, disabled
|
||||||
- `DISABLE_REGULAR_ORG_CREATION`: **false**: Disallow regular (non-admin) users from creating organizations.
|
- `DISABLE_REGULAR_ORG_CREATION`: **false**: Disallow regular (non-admin) users from creating organizations.
|
||||||
|
- `SEND_NOTIFICATION_EMAIL_ON_NEW_USER`: **false**: Send an email to all admins when a new user signs up to inform the admins about this act.
|
||||||
|
|
||||||
## Security (`security`)
|
## Security (`security`)
|
||||||
|
|
||||||
|
|
8
go.mod
8
go.mod
|
@ -15,7 +15,6 @@ require (
|
||||||
gitea.com/lunny/levelqueue v0.4.2-0.20230414023320-3c0159fe0fe4
|
gitea.com/lunny/levelqueue v0.4.2-0.20230414023320-3c0159fe0fe4
|
||||||
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121
|
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121
|
||||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
|
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
|
||||||
github.com/NYTimes/gziphandler v1.1.1
|
|
||||||
github.com/PuerkitoBio/goquery v1.8.1
|
github.com/PuerkitoBio/goquery v1.8.1
|
||||||
github.com/alecthomas/chroma/v2 v2.12.0
|
github.com/alecthomas/chroma/v2 v2.12.0
|
||||||
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
|
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
|
||||||
|
@ -77,14 +76,12 @@ require (
|
||||||
github.com/mholt/archiver/v3 v3.5.1
|
github.com/mholt/archiver/v3 v3.5.1
|
||||||
github.com/microcosm-cc/bluemonday v1.0.26
|
github.com/microcosm-cc/bluemonday v1.0.26
|
||||||
github.com/minio/minio-go/v7 v7.0.66
|
github.com/minio/minio-go/v7 v7.0.66
|
||||||
github.com/minio/sha256-simd v1.0.1
|
|
||||||
github.com/msteinert/pam v1.2.0
|
github.com/msteinert/pam v1.2.0
|
||||||
github.com/nektos/act v0.2.52
|
github.com/nektos/act v0.2.52
|
||||||
github.com/niklasfasching/go-org v1.7.0
|
github.com/niklasfasching/go-org v1.7.0
|
||||||
github.com/olivere/elastic/v7 v7.0.32
|
github.com/olivere/elastic/v7 v7.0.32
|
||||||
github.com/opencontainers/go-digest v1.0.0
|
github.com/opencontainers/go-digest v1.0.0
|
||||||
github.com/opencontainers/image-spec v1.1.0-rc6
|
github.com/opencontainers/image-spec v1.1.0-rc6
|
||||||
github.com/pkg/errors v0.9.1
|
|
||||||
github.com/pquerna/otp v1.4.0
|
github.com/pquerna/otp v1.4.0
|
||||||
github.com/prometheus/client_golang v1.18.0
|
github.com/prometheus/client_golang v1.18.0
|
||||||
github.com/quasoft/websspi v1.1.2
|
github.com/quasoft/websspi v1.1.2
|
||||||
|
@ -100,7 +97,6 @@ require (
|
||||||
github.com/ulikunitz/xz v0.5.11
|
github.com/ulikunitz/xz v0.5.11
|
||||||
github.com/urfave/cli/v2 v2.27.1
|
github.com/urfave/cli/v2 v2.27.1
|
||||||
github.com/xanzy/go-gitlab v0.96.0
|
github.com/xanzy/go-gitlab v0.96.0
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0
|
|
||||||
github.com/yohcop/openid-go v1.0.1
|
github.com/yohcop/openid-go v1.0.1
|
||||||
github.com/yuin/goldmark v1.6.0
|
github.com/yuin/goldmark v1.6.0
|
||||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
||||||
|
@ -230,6 +226,7 @@ require (
|
||||||
github.com/mholt/acmez v1.2.0 // indirect
|
github.com/mholt/acmez v1.2.0 // indirect
|
||||||
github.com/miekg/dns v1.1.58 // indirect
|
github.com/miekg/dns v1.1.58 // indirect
|
||||||
github.com/minio/md5-simd v1.1.2 // indirect
|
github.com/minio/md5-simd v1.1.2 // indirect
|
||||||
|
github.com/minio/sha256-simd v1.0.1 // indirect
|
||||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||||
|
@ -245,6 +242,7 @@ require (
|
||||||
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
|
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
|
||||||
github.com/pierrec/lz4/v4 v4.1.21 // indirect
|
github.com/pierrec/lz4/v4 v4.1.21 // indirect
|
||||||
github.com/pjbgf/sha1cd v0.3.0 // indirect
|
github.com/pjbgf/sha1cd v0.3.0 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/prometheus/client_model v0.5.0 // indirect
|
github.com/prometheus/client_model v0.5.0 // indirect
|
||||||
github.com/prometheus/common v0.46.0 // indirect
|
github.com/prometheus/common v0.46.0 // indirect
|
||||||
|
@ -275,8 +273,6 @@ require (
|
||||||
github.com/valyala/fastjson v1.6.4 // indirect
|
github.com/valyala/fastjson v1.6.4 // indirect
|
||||||
github.com/x448/float16 v0.8.4 // indirect
|
github.com/x448/float16 v0.8.4 // indirect
|
||||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
|
||||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
|
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
|
||||||
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
|
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
|
||||||
github.com/zeebo/blake3 v0.2.3 // indirect
|
github.com/zeebo/blake3 v0.2.3 // indirect
|
||||||
|
|
9
go.sum
9
go.sum
|
@ -93,8 +93,6 @@ github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBa
|
||||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||||
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
|
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
|
||||||
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
|
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
|
||||||
github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
|
|
||||||
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
|
|
||||||
github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78=
|
github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78=
|
||||||
github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
|
github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
|
||||||
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
||||||
|
@ -832,13 +830,6 @@ github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23n
|
||||||
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
||||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
|
||||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
|
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
|
||||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
|
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
|
||||||
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e h1:+SOyEddqYF09QP7vr7CgJ1eti3pY9Fn3LHO1M1r/0sI=
|
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e h1:+SOyEddqYF09QP7vr7CgJ1eti3pY9Fn3LHO1M1r/0sI=
|
||||||
|
|
|
@ -171,14 +171,13 @@ func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) err
|
||||||
}
|
}
|
||||||
|
|
||||||
// CancelRunningJobs cancels all running and waiting jobs associated with a specific workflow.
|
// CancelRunningJobs cancels all running and waiting jobs associated with a specific workflow.
|
||||||
func CancelRunningJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
|
func CancelRunningJobs(ctx context.Context, repoID int64, ref, workflowID string) error {
|
||||||
// Find all runs in the specified repository, reference, and workflow with statuses 'Running' or 'Waiting'.
|
// Find all runs in the specified repository, reference, and workflow with statuses 'Running' or 'Waiting'.
|
||||||
runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{
|
runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{
|
||||||
RepoID: repoID,
|
RepoID: repoID,
|
||||||
Ref: ref,
|
Ref: ref,
|
||||||
WorkflowID: workflowID,
|
WorkflowID: workflowID,
|
||||||
TriggerEvent: event,
|
Status: []Status{StatusRunning, StatusWaiting},
|
||||||
Status: []Status{StatusRunning, StatusWaiting},
|
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -312,6 +311,32 @@ func InsertRun(ctx context.Context, run *ActionRun, jobs []*jobparser.SingleWork
|
||||||
return commiter.Commit()
|
return commiter.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetLatestRun(ctx context.Context, repoID int64) (*ActionRun, error) {
|
||||||
|
var run ActionRun
|
||||||
|
has, err := db.GetEngine(ctx).Where("repo_id=?", repoID).OrderBy("id DESC").Limit(1).Get(&run)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if !has {
|
||||||
|
return nil, fmt.Errorf("latest run: %w", util.ErrNotExist)
|
||||||
|
}
|
||||||
|
return &run, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetLatestRunForBranchAndWorkflow(ctx context.Context, repoID int64, branch, workflowFile, event string) (*ActionRun, error) {
|
||||||
|
var run ActionRun
|
||||||
|
q := db.GetEngine(ctx).Where("repo_id=?", repoID).And("ref=?", branch).And("workflow_id=?", workflowFile)
|
||||||
|
if event != "" {
|
||||||
|
q = q.And("event=?", event)
|
||||||
|
}
|
||||||
|
has, err := q.Desc("id").Get(&run)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if !has {
|
||||||
|
return nil, util.NewNotExistErrorf("run with repo_id %d, ref %s, workflow_id %s", repoID, branch, workflowFile)
|
||||||
|
}
|
||||||
|
return &run, nil
|
||||||
|
}
|
||||||
|
|
||||||
func GetRunByID(ctx context.Context, id int64) (*ActionRun, error) {
|
func GetRunByID(ctx context.Context, id int64) (*ActionRun, error) {
|
||||||
var run ActionRun
|
var run ActionRun
|
||||||
has, err := db.GetEngine(ctx).Where("id=?", id).Get(&run)
|
has, err := db.GetEngine(ctx).Where("id=?", id).Get(&run)
|
||||||
|
|
|
@ -10,7 +10,6 @@ import (
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
webhook_module "code.gitea.io/gitea/modules/webhook"
|
|
||||||
|
|
||||||
"xorm.io/builder"
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
@ -72,7 +71,6 @@ type FindRunOptions struct {
|
||||||
WorkflowID string
|
WorkflowID string
|
||||||
Ref string // the commit/tag/… that caused this workflow
|
Ref string // the commit/tag/… that caused this workflow
|
||||||
TriggerUserID int64
|
TriggerUserID int64
|
||||||
TriggerEvent webhook_module.HookEventType
|
|
||||||
Approved bool // not util.OptionalBool, it works only when it's true
|
Approved bool // not util.OptionalBool, it works only when it's true
|
||||||
Status []Status
|
Status []Status
|
||||||
}
|
}
|
||||||
|
@ -100,9 +98,6 @@ func (opts FindRunOptions) ToConds() builder.Cond {
|
||||||
if opts.Ref != "" {
|
if opts.Ref != "" {
|
||||||
cond = cond.And(builder.Eq{"ref": opts.Ref})
|
cond = cond.And(builder.Eq{"ref": opts.Ref})
|
||||||
}
|
}
|
||||||
if opts.TriggerEvent != "" {
|
|
||||||
cond = cond.And(builder.Eq{"trigger_event": opts.TriggerEvent})
|
|
||||||
}
|
|
||||||
return cond
|
return cond
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ package actions
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
@ -119,22 +118,3 @@ func DeleteScheduleTaskByRepo(ctx context.Context, id int64) error {
|
||||||
|
|
||||||
return committer.Commit()
|
return committer.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) error {
|
|
||||||
// If actions disabled when there is schedule task, this will remove the outdated schedule tasks
|
|
||||||
// There is no other place we can do this because the app.ini will be changed manually
|
|
||||||
if err := DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil {
|
|
||||||
return fmt.Errorf("DeleteCronTaskByRepo: %v", err)
|
|
||||||
}
|
|
||||||
// cancel running cron jobs of this repository and delete old schedules
|
|
||||||
if err := CancelRunningJobs(
|
|
||||||
ctx,
|
|
||||||
repo.ID,
|
|
||||||
repo.DefaultBranch,
|
|
||||||
"",
|
|
||||||
webhook_module.HookEventSchedule,
|
|
||||||
); err != nil {
|
|
||||||
return fmt.Errorf("CancelRunningJobs: %v", err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -14,6 +14,7 @@ func TestMain(m *testing.M) {
|
||||||
FixtureFiles: []string{
|
FixtureFiles: []string{
|
||||||
"gpg_key.yml",
|
"gpg_key.yml",
|
||||||
"public_key.yml",
|
"public_key.yml",
|
||||||
|
"TestParseCommitWithSSHSignature/public_key.yml",
|
||||||
"deploy_key.yml",
|
"deploy_key.yml",
|
||||||
"gpg_key_import.yml",
|
"gpg_key_import.yml",
|
||||||
"user.yml",
|
"user.yml",
|
||||||
|
|
|
@ -169,7 +169,12 @@ func RewriteAllPublicKeys(ctx context.Context) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Close()
|
if err := t.Sync(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := t.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return util.Rename(tmpPath, fPath)
|
return util.Rename(tmpPath, fPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,12 @@ func RewriteAllPrincipalKeys(ctx context.Context) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Close()
|
if err := t.Sync(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := t.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return util.Rename(tmpPath, fPath)
|
return util.Rename(tmpPath, fPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,12 @@ func ParseCommitWithSSHSignature(ctx context.Context, c *git.Commit, committer *
|
||||||
log.Error("GetEmailAddresses: %v", err)
|
log.Error("GetEmailAddresses: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add the noreply email address as verified address.
|
||||||
|
committerEmailAddresses = append(committerEmailAddresses, &user_model.EmailAddress{
|
||||||
|
IsActivated: true,
|
||||||
|
Email: committer.GetPlaceholderEmail(),
|
||||||
|
})
|
||||||
|
|
||||||
activated := false
|
activated := false
|
||||||
for _, e := range committerEmailAddresses {
|
for _, e := range committerEmailAddresses {
|
||||||
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
|
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
|
||||||
|
|
146
models/asymkey/ssh_key_commit_verification_test.go
Normal file
146
models/asymkey/ssh_key_commit_verification_test.go
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
// Copyright 2023 The Forgejo Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package asymkey
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParseCommitWithSSHSignature(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
|
sshKey := unittest.AssertExistsAndLoadBean(t, &PublicKey{ID: 1000, OwnerID: 2})
|
||||||
|
|
||||||
|
t.Run("No commiter", func(t *testing.T) {
|
||||||
|
commitVerification := ParseCommitWithSSHSignature(db.DefaultContext, &git.Commit{}, &user_model.User{})
|
||||||
|
assert.False(t, commitVerification.Verified)
|
||||||
|
assert.Equal(t, NoKeyFound, commitVerification.Reason)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Commiter without keys", func(t *testing.T) {
|
||||||
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||||
|
|
||||||
|
commitVerification := ParseCommitWithSSHSignature(db.DefaultContext, &git.Commit{Committer: &git.Signature{Email: user.Email}}, user)
|
||||||
|
assert.False(t, commitVerification.Verified)
|
||||||
|
assert.Equal(t, NoKeyFound, commitVerification.Reason)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Correct signature with wrong email", func(t *testing.T) {
|
||||||
|
gitCommit := &git.Commit{
|
||||||
|
Committer: &git.Signature{
|
||||||
|
Email: "non-existent",
|
||||||
|
},
|
||||||
|
Signature: &git.CommitGPGSignature{
|
||||||
|
Payload: `tree 2d491b2985a7ff848d5c02748e7ea9f9f7619f9f
|
||||||
|
parent 45b03601635a1f463b81963a4022c7f87ce96ef9
|
||||||
|
author user2 <non-existent> 1699710556 +0100
|
||||||
|
committer user2 <non-existent> 1699710556 +0100
|
||||||
|
|
||||||
|
Using email that isn't known to Forgejo
|
||||||
|
`,
|
||||||
|
Signature: `-----BEGIN SSH SIGNATURE-----
|
||||||
|
U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgoGSe9Zy7Ez9bSJcaTNjh/Y7p95
|
||||||
|
f5DujjqkpzFRtw6CEAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
|
||||||
|
AAAAQIMufOuSjZeDUujrkVK4sl7ICa0WwEftas8UAYxx0Thdkiw2qWjR1U1PKfTLm16/w8
|
||||||
|
/bS1LX1lZNuzm2LR2qEgw=
|
||||||
|
-----END SSH SIGNATURE-----
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
commitVerification := ParseCommitWithSSHSignature(db.DefaultContext, gitCommit, user2)
|
||||||
|
assert.False(t, commitVerification.Verified)
|
||||||
|
assert.Equal(t, NoKeyFound, commitVerification.Reason)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Incorrect signature with correct email", func(t *testing.T) {
|
||||||
|
gitCommit := &git.Commit{
|
||||||
|
Committer: &git.Signature{
|
||||||
|
Email: "user2@example.com",
|
||||||
|
},
|
||||||
|
Signature: &git.CommitGPGSignature{
|
||||||
|
Payload: `tree 853694aae8816094a0d875fee7ea26278dbf5d0f
|
||||||
|
parent c2780d5c313da2a947eae22efd7dacf4213f4e7f
|
||||||
|
author user2 <user2@example.com> 1699707877 +0100
|
||||||
|
committer user2 <user2@example.com> 1699707877 +0100
|
||||||
|
|
||||||
|
Add content
|
||||||
|
`,
|
||||||
|
Signature: `-----BEGIN SSH SIGNATURE-----`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
commitVerification := ParseCommitWithSSHSignature(db.DefaultContext, gitCommit, user2)
|
||||||
|
assert.False(t, commitVerification.Verified)
|
||||||
|
assert.Equal(t, NoKeyFound, commitVerification.Reason)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Valid signature with correct email", func(t *testing.T) {
|
||||||
|
gitCommit := &git.Commit{
|
||||||
|
Committer: &git.Signature{
|
||||||
|
Email: "user2@example.com",
|
||||||
|
},
|
||||||
|
Signature: &git.CommitGPGSignature{
|
||||||
|
Payload: `tree 853694aae8816094a0d875fee7ea26278dbf5d0f
|
||||||
|
parent c2780d5c313da2a947eae22efd7dacf4213f4e7f
|
||||||
|
author user2 <user2@example.com> 1699707877 +0100
|
||||||
|
committer user2 <user2@example.com> 1699707877 +0100
|
||||||
|
|
||||||
|
Add content
|
||||||
|
`,
|
||||||
|
Signature: `-----BEGIN SSH SIGNATURE-----
|
||||||
|
U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgoGSe9Zy7Ez9bSJcaTNjh/Y7p95
|
||||||
|
f5DujjqkpzFRtw6CEAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
|
||||||
|
AAAAQBe2Fwk/FKY3SBCnG6jSYcO6ucyahp2SpQ/0P+otslzIHpWNW8cQ0fGLdhhaFynJXQ
|
||||||
|
fs9cMpZVM9BfIKNUSO8QY=
|
||||||
|
-----END SSH SIGNATURE-----
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
commitVerification := ParseCommitWithSSHSignature(db.DefaultContext, gitCommit, user2)
|
||||||
|
assert.True(t, commitVerification.Verified)
|
||||||
|
assert.Equal(t, "user2 / SHA256:TKfwbZMR7e9OnlV2l1prfah1TXH8CmqR0PvFEXVCXA4", commitVerification.Reason)
|
||||||
|
assert.Equal(t, sshKey, commitVerification.SigningSSHKey)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Valid signature with noreply email", func(t *testing.T) {
|
||||||
|
defer test.MockVariableValue(&setting.Service.NoReplyAddress, "noreply.example.com")()
|
||||||
|
|
||||||
|
gitCommit := &git.Commit{
|
||||||
|
Committer: &git.Signature{
|
||||||
|
Email: "user2@noreply.example.com",
|
||||||
|
},
|
||||||
|
Signature: &git.CommitGPGSignature{
|
||||||
|
Payload: `tree 4836c7f639f37388bab4050ef5c97bbbd54272fc
|
||||||
|
parent 795be1b0117ea5c65456050bb9fd84744d4fd9c6
|
||||||
|
author user2 <user2@noreply.example.com> 1699709594 +0100
|
||||||
|
committer user2 <user2@noreply.example.com> 1699709594 +0100
|
||||||
|
|
||||||
|
Commit with noreply
|
||||||
|
`,
|
||||||
|
Signature: `-----BEGIN SSH SIGNATURE-----
|
||||||
|
U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgoGSe9Zy7Ez9bSJcaTNjh/Y7p95
|
||||||
|
f5DujjqkpzFRtw6CEAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
|
||||||
|
AAAAQJz83KKxD6Bz/ZvNpqkA3RPOSQ4LQ5FfEItbtoONkbwV9wAWMnmBqgggo/lnXCJ3oq
|
||||||
|
muPLbvEduU+Ze/1Ol1pgk=
|
||||||
|
-----END SSH SIGNATURE-----
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
commitVerification := ParseCommitWithSSHSignature(db.DefaultContext, gitCommit, user2)
|
||||||
|
assert.True(t, commitVerification.Verified)
|
||||||
|
assert.Equal(t, "user2 / SHA256:TKfwbZMR7e9OnlV2l1prfah1TXH8CmqR0PvFEXVCXA4", commitVerification.Reason)
|
||||||
|
assert.Equal(t, sshKey, commitVerification.SigningSSHKey)
|
||||||
|
})
|
||||||
|
}
|
|
@ -250,7 +250,7 @@ func (s AccessTokenScope) parse() (accessTokenScopeBitmap, error) {
|
||||||
remainingScopes = remainingScopes[i+1:]
|
remainingScopes = remainingScopes[i+1:]
|
||||||
}
|
}
|
||||||
singleScope := AccessTokenScope(v)
|
singleScope := AccessTokenScope(v)
|
||||||
if singleScope == "" {
|
if singleScope == "" || singleScope == "sudo" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if singleScope == AccessTokenScopeAll {
|
if singleScope == AccessTokenScopeAll {
|
||||||
|
|
|
@ -20,7 +20,7 @@ func TestAccessTokenScope_Normalize(t *testing.T) {
|
||||||
tests := []scopeTestNormalize{
|
tests := []scopeTestNormalize{
|
||||||
{"", "", nil},
|
{"", "", nil},
|
||||||
{"write:misc,write:notification,read:package,write:notification,public-only", "public-only,write:misc,write:notification,read:package", nil},
|
{"write:misc,write:notification,read:package,write:notification,public-only", "public-only,write:misc,write:notification,read:package", nil},
|
||||||
{"all", "all", nil},
|
{"all,sudo", "all", nil},
|
||||||
{"write:activitypub,write:admin,write:misc,write:notification,write:organization,write:package,write:issue,write:repository,write:user", "all", nil},
|
{"write:activitypub,write:admin,write:misc,write:notification,write:organization,write:package,write:issue,write:repository,write:user", "all", nil},
|
||||||
{"write:activitypub,write:admin,write:misc,write:notification,write:organization,write:package,write:issue,write:repository,write:user,public-only", "public-only,all", nil},
|
{"write:activitypub,write:admin,write:misc,write:notification,write:organization,write:package,write:issue,write:repository,write:user,public-only", "public-only,all", nil},
|
||||||
}
|
}
|
||||||
|
|
142
models/auth/session_test.go
Normal file
142
models/auth/session_test.go
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
// Copyright 2023 The Forgejo Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package auth_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/auth"
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAuthSession(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
defer timeutil.MockUnset()
|
||||||
|
|
||||||
|
key := "I-Like-Free-Software"
|
||||||
|
|
||||||
|
t.Run("Create Session", func(t *testing.T) {
|
||||||
|
// Ensure it doesn't exist.
|
||||||
|
ok, err := auth.ExistSession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, ok)
|
||||||
|
|
||||||
|
preCount, err := auth.CountSessions(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
now := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||||
|
timeutil.MockSet(now)
|
||||||
|
|
||||||
|
// New session is created.
|
||||||
|
sess, err := auth.ReadSession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, key, sess.Key)
|
||||||
|
assert.Empty(t, sess.Data)
|
||||||
|
assert.EqualValues(t, now.Unix(), sess.Expiry)
|
||||||
|
|
||||||
|
// Ensure it exists.
|
||||||
|
ok, err = auth.ExistSession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
// Ensure the session is taken into account for count..
|
||||||
|
postCount, err := auth.CountSessions(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Greater(t, postCount, preCount)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Update session", func(t *testing.T) {
|
||||||
|
data := []byte{0xba, 0xdd, 0xc0, 0xde}
|
||||||
|
now := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||||
|
timeutil.MockSet(now)
|
||||||
|
|
||||||
|
// Update session.
|
||||||
|
err := auth.UpdateSession(db.DefaultContext, key, data)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
timeutil.MockSet(time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||||
|
|
||||||
|
// Read updated session.
|
||||||
|
// Ensure data is updated and expiry is set from the update session call.
|
||||||
|
sess, err := auth.ReadSession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, key, sess.Key)
|
||||||
|
assert.EqualValues(t, data, sess.Data)
|
||||||
|
assert.EqualValues(t, now.Unix(), sess.Expiry)
|
||||||
|
|
||||||
|
timeutil.MockSet(now)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Delete session", func(t *testing.T) {
|
||||||
|
// Ensure it't exist.
|
||||||
|
ok, err := auth.ExistSession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
preCount, err := auth.CountSessions(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
err = auth.DestroySession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// Ensure it doens't exists.
|
||||||
|
ok, err = auth.ExistSession(db.DefaultContext, key)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, ok)
|
||||||
|
|
||||||
|
// Ensure the session is taken into account for count..
|
||||||
|
postCount, err := auth.CountSessions(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Less(t, postCount, preCount)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Cleanup sessions", func(t *testing.T) {
|
||||||
|
timeutil.MockSet(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||||
|
|
||||||
|
_, err := auth.ReadSession(db.DefaultContext, "sess-1")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// One minute later.
|
||||||
|
timeutil.MockSet(time.Date(2023, 1, 1, 0, 1, 0, 0, time.UTC))
|
||||||
|
_, err = auth.ReadSession(db.DefaultContext, "sess-2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// 5 minutes, shouldn't clean up anything.
|
||||||
|
err = auth.CleanupSessions(db.DefaultContext, 5*60)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
ok, err := auth.ExistSession(db.DefaultContext, "sess-1")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
ok, err = auth.ExistSession(db.DefaultContext, "sess-2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
// 1 minute, should clean up sess-1.
|
||||||
|
err = auth.CleanupSessions(db.DefaultContext, 60)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
ok, err = auth.ExistSession(db.DefaultContext, "sess-1")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, ok)
|
||||||
|
|
||||||
|
ok, err = auth.ExistSession(db.DefaultContext, "sess-2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
// Now, should clean up sess-2.
|
||||||
|
err = auth.CleanupSessions(db.DefaultContext, 0)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
ok, err = auth.ExistSession(db.DefaultContext, "sess-2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, ok)
|
||||||
|
})
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ package auth
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
|
"crypto/sha256"
|
||||||
"crypto/subtle"
|
"crypto/subtle"
|
||||||
"encoding/base32"
|
"encoding/base32"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
@ -18,7 +19,6 @@ import (
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
"github.com/pquerna/otp/totp"
|
"github.com/pquerna/otp/totp"
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
)
|
)
|
||||||
|
|
|
@ -11,10 +11,13 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"reflect"
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
|
"xorm.io/xorm/contexts"
|
||||||
"xorm.io/xorm/names"
|
"xorm.io/xorm/names"
|
||||||
"xorm.io/xorm/schemas"
|
"xorm.io/xorm/schemas"
|
||||||
|
|
||||||
|
@ -144,6 +147,16 @@ func InitEngine(ctx context.Context) error {
|
||||||
xormEngine.SetConnMaxLifetime(setting.Database.ConnMaxLifetime)
|
xormEngine.SetConnMaxLifetime(setting.Database.ConnMaxLifetime)
|
||||||
xormEngine.SetDefaultContext(ctx)
|
xormEngine.SetDefaultContext(ctx)
|
||||||
|
|
||||||
|
if setting.Database.SlowQueryThreshold > 0 {
|
||||||
|
xormEngine.AddHook(&SlowQueryHook{
|
||||||
|
Treshold: setting.Database.SlowQueryThreshold,
|
||||||
|
Logger: log.GetLogger("xorm"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
xormEngine.AddHook(&ErrorQueryHook{
|
||||||
|
Logger: log.GetLogger("xorm"),
|
||||||
|
})
|
||||||
|
|
||||||
SetDefaultEngine(ctx, xormEngine)
|
SetDefaultEngine(ctx, xormEngine)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -299,3 +312,38 @@ func SetLogSQL(ctx context.Context, on bool) {
|
||||||
sess.Engine().ShowSQL(on)
|
sess.Engine().ShowSQL(on)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SlowQueryHook struct {
|
||||||
|
Treshold time.Duration
|
||||||
|
Logger log.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ contexts.Hook = &SlowQueryHook{}
|
||||||
|
|
||||||
|
func (SlowQueryHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) {
|
||||||
|
return c.Ctx, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SlowQueryHook) AfterProcess(c *contexts.ContextHook) error {
|
||||||
|
if c.ExecuteTime >= h.Treshold {
|
||||||
|
h.Logger.Log(8, log.WARN, "[Slow SQL Query] %s %v - %v", c.SQL, c.Args, c.ExecuteTime)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type ErrorQueryHook struct {
|
||||||
|
Logger log.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ contexts.Hook = &ErrorQueryHook{}
|
||||||
|
|
||||||
|
func (ErrorQueryHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) {
|
||||||
|
return c.Ctx, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *ErrorQueryHook) AfterProcess(c *contexts.ContextHook) error {
|
||||||
|
if c.Err != nil {
|
||||||
|
h.Logger.Log(8, log.ERROR, "[Error SQL Query] %s %v - %v", c.SQL, c.Args, c.Err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
|
@ -6,15 +6,19 @@ package db_test
|
||||||
import (
|
import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
issues_model "code.gitea.io/gitea/models/issues"
|
issues_model "code.gitea.io/gitea/models/issues"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
_ "code.gitea.io/gitea/cmd" // for TestPrimaryKeys
|
_ "code.gitea.io/gitea/cmd" // for TestPrimaryKeys
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDumpDatabase(t *testing.T) {
|
func TestDumpDatabase(t *testing.T) {
|
||||||
|
@ -85,3 +89,65 @@ func TestPrimaryKeys(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSlowQuery(t *testing.T) {
|
||||||
|
lc, cleanup := test.NewLogChecker("slow-query", log.INFO)
|
||||||
|
lc.StopMark("[Slow SQL Query]")
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
e := db.GetEngine(db.DefaultContext)
|
||||||
|
engine, ok := e.(*xorm.Engine)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
// It's not possible to clean this up with XORM, but it's luckily not harmful
|
||||||
|
// to leave around.
|
||||||
|
engine.AddHook(&db.SlowQueryHook{
|
||||||
|
Treshold: time.Second * 10,
|
||||||
|
Logger: log.GetLogger("slow-query"),
|
||||||
|
})
|
||||||
|
|
||||||
|
// NOOP query.
|
||||||
|
e.Exec("SELECT 1 WHERE false;")
|
||||||
|
|
||||||
|
_, stopped := lc.Check(100 * time.Millisecond)
|
||||||
|
assert.False(t, stopped)
|
||||||
|
|
||||||
|
engine.AddHook(&db.SlowQueryHook{
|
||||||
|
Treshold: 0, // Every query should be logged.
|
||||||
|
Logger: log.GetLogger("slow-query"),
|
||||||
|
})
|
||||||
|
|
||||||
|
// NOOP query.
|
||||||
|
e.Exec("SELECT 1 WHERE false;")
|
||||||
|
|
||||||
|
_, stopped = lc.Check(100 * time.Millisecond)
|
||||||
|
assert.True(t, stopped)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestErrorQuery(t *testing.T) {
|
||||||
|
lc, cleanup := test.NewLogChecker("error-query", log.INFO)
|
||||||
|
lc.StopMark("[Error SQL Query]")
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
e := db.GetEngine(db.DefaultContext)
|
||||||
|
engine, ok := e.(*xorm.Engine)
|
||||||
|
assert.True(t, ok)
|
||||||
|
|
||||||
|
// It's not possible to clean this up with XORM, but it's luckily not harmful
|
||||||
|
// to leave around.
|
||||||
|
engine.AddHook(&db.ErrorQueryHook{
|
||||||
|
Logger: log.GetLogger("error-query"),
|
||||||
|
})
|
||||||
|
|
||||||
|
// Valid query.
|
||||||
|
e.Exec("SELECT 1 WHERE false;")
|
||||||
|
|
||||||
|
_, stopped := lc.Check(100 * time.Millisecond)
|
||||||
|
assert.False(t, stopped)
|
||||||
|
|
||||||
|
// Table doesn't exist.
|
||||||
|
e.Exec("SELECT column FROM table;")
|
||||||
|
|
||||||
|
_, stopped = lc.Check(100 * time.Millisecond)
|
||||||
|
assert.True(t, stopped)
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
-
|
||||||
|
id: 1000
|
||||||
|
owner_id: 2
|
||||||
|
name: user2@localhost
|
||||||
|
fingerprint: "SHA256:TKfwbZMR7e9OnlV2l1prfah1TXH8CmqR0PvFEXVCXA4"
|
||||||
|
content: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKBknvWcuxM/W0iXGkzY4f2O6feX+Q7o46pKcxUbcOgh user2@localhost"
|
||||||
|
# private key (base64-ed) LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNDZ1pKNzFuTHNUUDF0SWx4cE0yT0g5anVuM2wva082T09xU25NVkczRG9JUUFBQUpocG43YTZhWisyCnVnQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQ2daSjcxbkxzVFAxdElseHBNMk9IOWp1bjNsL2tPNk9PcVNuTVZHM0RvSVEKQUFBRUFxVm12bmo1LzZ5TW12ck9Ub29xa3F5MmUrc21aK0tBcEtKR0crRnY5MlA2QmtudldjdXhNL1cwaVhHa3pZNGYyTwo2ZmVYK1E3bzQ2cEtjeFViY09naEFBQUFFMmQxYzNSbFpFQm5kWE4wWldRdFltVmhjM1FCQWc9PQotLS0tLUVORCBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0=
|
||||||
|
mode: 2
|
||||||
|
type: 1
|
||||||
|
verified: true
|
||||||
|
created_unix: 1559593109
|
||||||
|
updated_unix: 1565224552
|
||||||
|
login_source_id: 0
|
|
@ -150,3 +150,17 @@
|
||||||
is_prerelease: false
|
is_prerelease: false
|
||||||
is_tag: false
|
is_tag: false
|
||||||
created_unix: 946684803
|
created_unix: 946684803
|
||||||
|
|
||||||
|
- id: 12
|
||||||
|
repo_id: 1059
|
||||||
|
publisher_id: 2
|
||||||
|
tag_name: "v1.0"
|
||||||
|
lower_tag_name: "v1.0"
|
||||||
|
target: "main"
|
||||||
|
title: "v1.0"
|
||||||
|
sha1: "d8f53dfb33f6ccf4169c34970b5e747511c18beb"
|
||||||
|
num_commits: 1
|
||||||
|
is_draft: false
|
||||||
|
is_prerelease: false
|
||||||
|
is_tag: false
|
||||||
|
created_unix: 946684803
|
||||||
|
|
|
@ -608,6 +608,38 @@
|
||||||
type: 1
|
type: 1
|
||||||
created_unix: 946684810
|
created_unix: 946684810
|
||||||
|
|
||||||
|
# BEGIN Forgejo [GITEA] Improve HTML title on repositories
|
||||||
|
-
|
||||||
|
id: 1093
|
||||||
|
repo_id: 1059
|
||||||
|
type: 1
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 1094
|
||||||
|
repo_id: 1059
|
||||||
|
type: 2
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 1095
|
||||||
|
repo_id: 1059
|
||||||
|
type: 3
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 1096
|
||||||
|
repo_id: 1059
|
||||||
|
type: 4
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 1097
|
||||||
|
repo_id: 1059
|
||||||
|
type: 5
|
||||||
|
created_unix: 946684810
|
||||||
|
# END Forgejo [GITEA] Improve HTML title on repositories
|
||||||
|
|
||||||
-
|
-
|
||||||
id: 91
|
id: 91
|
||||||
repo_id: 58
|
repo_id: 58
|
||||||
|
|
|
@ -1467,6 +1467,7 @@
|
||||||
owner_name: user27
|
owner_name: user27
|
||||||
lower_name: repo49
|
lower_name: repo49
|
||||||
name: repo49
|
name: repo49
|
||||||
|
description: A wonderful repository with more than just a README.md
|
||||||
default_branch: master
|
default_branch: master
|
||||||
num_watches: 0
|
num_watches: 0
|
||||||
num_stars: 0
|
num_stars: 0
|
||||||
|
@ -1694,6 +1695,19 @@
|
||||||
is_fsck_enabled: true
|
is_fsck_enabled: true
|
||||||
close_issues_via_commit_in_any_branch: false
|
close_issues_via_commit_in_any_branch: false
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 1059
|
||||||
|
owner_id: 2
|
||||||
|
owner_name: user2
|
||||||
|
lower_name: repo59
|
||||||
|
name: repo59
|
||||||
|
default_branch: master
|
||||||
|
is_empty: false
|
||||||
|
is_archived: false
|
||||||
|
is_private: false
|
||||||
|
status: 0
|
||||||
|
num_issues: 0
|
||||||
|
|
||||||
-
|
-
|
||||||
id: 59
|
id: 59
|
||||||
owner_id: 2
|
owner_id: 2
|
||||||
|
|
|
@ -66,7 +66,7 @@
|
||||||
num_followers: 2
|
num_followers: 2
|
||||||
num_following: 1
|
num_following: 1
|
||||||
num_stars: 2
|
num_stars: 2
|
||||||
num_repos: 15
|
num_repos: 16
|
||||||
num_teams: 0
|
num_teams: 0
|
||||||
num_members: 0
|
num_members: 0
|
||||||
visibility: 0
|
visibility: 0
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/forgejo/semver"
|
"code.gitea.io/gitea/models/forgejo/semver"
|
||||||
forgejo_v1_20 "code.gitea.io/gitea/models/forgejo_migrations/v1_20"
|
forgejo_v1_20 "code.gitea.io/gitea/models/forgejo_migrations/v1_20"
|
||||||
|
forgejo_v1_22 "code.gitea.io/gitea/models/forgejo_migrations/v1_22"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
@ -43,6 +44,12 @@ var migrations = []*Migration{
|
||||||
NewMigration("create the forgejo_sem_ver table", forgejo_v1_20.CreateSemVerTable),
|
NewMigration("create the forgejo_sem_ver table", forgejo_v1_20.CreateSemVerTable),
|
||||||
// v2 -> v3
|
// v2 -> v3
|
||||||
NewMigration("create the forgejo_auth_token table", forgejo_v1_20.CreateAuthorizationTokenTable),
|
NewMigration("create the forgejo_auth_token table", forgejo_v1_20.CreateAuthorizationTokenTable),
|
||||||
|
// v3 -> v4
|
||||||
|
NewMigration("Add default_permissions to repo_unit", forgejo_v1_22.AddDefaultPermissionsToRepoUnit),
|
||||||
|
// v4 -> v5
|
||||||
|
NewMigration("create the forgejo_repo_flag table", forgejo_v1_22.CreateRepoFlagTable),
|
||||||
|
// v5 -> v6
|
||||||
|
NewMigration("Add wiki_branch to repository", forgejo_v1_22.AddWikiBranchToRepository),
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCurrentDBVersion returns the current Forgejo database version.
|
// GetCurrentDBVersion returns the current Forgejo database version.
|
||||||
|
|
17
models/forgejo_migrations/v1_22/v4.go
Normal file
17
models/forgejo_migrations/v1_22/v4.go
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package v1_22 //nolint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func AddDefaultPermissionsToRepoUnit(x *xorm.Engine) error {
|
||||||
|
type RepoUnit struct {
|
||||||
|
ID int64
|
||||||
|
DefaultPermissions int `xorm:"NOT NULL DEFAULT 0"`
|
||||||
|
}
|
||||||
|
|
||||||
|
return x.Sync(&RepoUnit{})
|
||||||
|
}
|
22
models/forgejo_migrations/v1_22/v5.go
Normal file
22
models/forgejo_migrations/v1_22/v5.go
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package v1_22 //nolint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RepoFlag struct {
|
||||||
|
ID int64 `xorm:"pk autoincr"`
|
||||||
|
RepoID int64 `xorm:"UNIQUE(s) INDEX"`
|
||||||
|
Name string `xorm:"UNIQUE(s) INDEX"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (RepoFlag) TableName() string {
|
||||||
|
return "forgejo_repo_flag"
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateRepoFlagTable(x *xorm.Engine) error {
|
||||||
|
return x.Sync(new(RepoFlag))
|
||||||
|
}
|
24
models/forgejo_migrations/v1_22/v6.go
Normal file
24
models/forgejo_migrations/v1_22/v6.go
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package v1_22 //nolint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func AddWikiBranchToRepository(x *xorm.Engine) error {
|
||||||
|
type Repository struct {
|
||||||
|
ID int64
|
||||||
|
WikiBranch string
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := x.Sync(&Repository{}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update existing repositories to use `master` as the wiki branch, for
|
||||||
|
// compatilibty's sake.
|
||||||
|
_, err := x.Exec("UPDATE repository SET wiki_branch = 'master' WHERE wiki_branch = '' OR wiki_branch IS NULL")
|
||||||
|
return err
|
||||||
|
}
|
|
@ -128,6 +128,10 @@ func (b *Branch) LoadDeletedBy(ctx context.Context) (err error) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *Branch) GetRepo(ctx context.Context) (*repo_model.Repository, error) {
|
||||||
|
return repo_model.GetRepositoryByID(ctx, b.RepoID)
|
||||||
|
}
|
||||||
|
|
||||||
func (b *Branch) LoadPusher(ctx context.Context) (err error) {
|
func (b *Branch) LoadPusher(ctx context.Context) (err error) {
|
||||||
if b.Pusher == nil && b.PusherID > 0 {
|
if b.Pusher == nil && b.PusherID > 0 {
|
||||||
b.Pusher, err = user_model.GetUserByID(ctx, b.PusherID)
|
b.Pusher, err = user_model.GetUserByID(ctx, b.PusherID)
|
||||||
|
@ -283,7 +287,7 @@ func FindRenamedBranch(ctx context.Context, repoID int64, from string) (branch *
|
||||||
}
|
}
|
||||||
|
|
||||||
// RenameBranch rename a branch
|
// RenameBranch rename a branch
|
||||||
func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to string, gitAction func(ctx context.Context, isDefault bool) error) (err error) {
|
func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to string, gitAction func(isDefault bool) error) (err error) {
|
||||||
ctx, committer, err := db.TxContext(ctx)
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -358,7 +362,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. do git action
|
// 5. do git action
|
||||||
if err = gitAction(ctx, isDefault); err != nil {
|
if err = gitAction(isDefault); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
package git_test
|
package git_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
@ -133,7 +132,7 @@ func TestRenameBranch(t *testing.T) {
|
||||||
}, git_model.WhitelistOptions{}))
|
}, git_model.WhitelistOptions{}))
|
||||||
assert.NoError(t, committer.Commit())
|
assert.NoError(t, committer.Commit())
|
||||||
|
|
||||||
assert.NoError(t, git_model.RenameBranch(db.DefaultContext, repo1, "master", "main", func(ctx context.Context, isDefault bool) error {
|
assert.NoError(t, git_model.RenameBranch(db.DefaultContext, repo1, "master", "main", func(isDefault bool) error {
|
||||||
_isDefault = isDefault
|
_isDefault = isDefault
|
||||||
return nil
|
return nil
|
||||||
}))
|
}))
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
@ -97,3 +98,29 @@ func TestMigrate_InsertIssueComments(t *testing.T) {
|
||||||
|
|
||||||
unittest.CheckConsistencyFor(t, &issues_model.Issue{})
|
unittest.CheckConsistencyFor(t, &issues_model.Issue{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUpdateCommentsMigrationsByType(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
|
||||||
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
|
||||||
|
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 1, IssueID: issue.ID})
|
||||||
|
|
||||||
|
// Set repository to migrated from Gitea.
|
||||||
|
repo.OriginalServiceType = structs.GiteaService
|
||||||
|
repo_model.UpdateRepositoryCols(db.DefaultContext, repo, "original_service_type")
|
||||||
|
|
||||||
|
// Set comment to have an original author.
|
||||||
|
comment.OriginalAuthor = "Example User"
|
||||||
|
comment.OriginalAuthorID = 1
|
||||||
|
comment.PosterID = 0
|
||||||
|
_, err := db.GetEngine(db.DefaultContext).ID(comment.ID).Cols("original_author", "original_author_id", "poster_id").Update(comment)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.NoError(t, issues_model.UpdateCommentsMigrationsByType(db.DefaultContext, structs.GiteaService, "1", 513))
|
||||||
|
|
||||||
|
comment = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 1, IssueID: issue.ID})
|
||||||
|
assert.Empty(t, comment.OriginalAuthor)
|
||||||
|
assert.Empty(t, comment.OriginalAuthorID)
|
||||||
|
assert.EqualValues(t, 513, comment.PosterID)
|
||||||
|
}
|
||||||
|
|
|
@ -9,6 +9,14 @@ import (
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func GetMaxIssueIndexForRepo(ctx context.Context, repoID int64) (int64, error) {
|
||||||
|
var max int64
|
||||||
|
if _, err := db.GetEngine(ctx).Select("MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&max); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return max, nil
|
||||||
|
}
|
||||||
|
|
||||||
// RecalculateIssueIndexForRepo create issue_index for repo if not exist and
|
// RecalculateIssueIndexForRepo create issue_index for repo if not exist and
|
||||||
// update it based on highest index of existing issues assigned to a repo
|
// update it based on highest index of existing issues assigned to a repo
|
||||||
func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
|
func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
|
||||||
|
@ -18,8 +26,8 @@ func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
|
||||||
}
|
}
|
||||||
defer committer.Close()
|
defer committer.Close()
|
||||||
|
|
||||||
var max int64
|
max, err := GetMaxIssueIndexForRepo(ctx, repoID)
|
||||||
if _, err = db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&max); err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
38
models/issues/issue_index_test.go
Normal file
38
models/issues/issue_index_test.go
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package issues_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
issues_model "code.gitea.io/gitea/models/issues"
|
||||||
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGetMaxIssueIndexForRepo(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
|
||||||
|
|
||||||
|
maxPR, err := issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repo.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
issue := testCreateIssue(t, repo.ID, repo.OwnerID, "title1", "content1", false)
|
||||||
|
assert.Greater(t, issue.Index, maxPR)
|
||||||
|
|
||||||
|
maxPR, err = issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repo.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
pull := testCreateIssue(t, repo.ID, repo.OwnerID, "title2", "content2", true)
|
||||||
|
assert.Greater(t, pull.Index, maxPR)
|
||||||
|
|
||||||
|
maxPR, err = issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repo.ID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, maxPR, pull.Index)
|
||||||
|
}
|
|
@ -50,6 +50,14 @@ func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullR
|
||||||
return sess, nil
|
return sess, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetUnmergedPullRequestsByHeadInfoMax(ctx context.Context, repoID, maxIndex int64, branch string) ([]*PullRequest, error) {
|
||||||
|
prs := make([]*PullRequest, 0, 2)
|
||||||
|
sess := db.GetEngine(ctx).
|
||||||
|
Join("INNER", "issue", "issue.id = `pull_request`.issue_id").
|
||||||
|
Where("`pull_request`.head_repo_id = ? AND `pull_request`.head_branch = ? AND `pull_request`.has_merged = ? AND `issue`.is_closed = ? AND `pull_request`.flow = ? AND `issue`.`index` <= ?", repoID, branch, false, false, PullRequestFlowGithub, maxIndex)
|
||||||
|
return prs, sess.Find(&prs)
|
||||||
|
}
|
||||||
|
|
||||||
// GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged
|
// GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged
|
||||||
func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) ([]*PullRequest, error) {
|
func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) ([]*PullRequest, error) {
|
||||||
prs := make([]*PullRequest, 0, 2)
|
prs := make([]*PullRequest, 0, 2)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package issues_test
|
package issues_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
@ -158,6 +159,91 @@ func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetUnmergedPullRequestsByHeadInfoMax(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
repoID := int64(1)
|
||||||
|
maxPR := int64(0)
|
||||||
|
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, maxPR, "branch2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, prs, 0)
|
||||||
|
maxPR, err = issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repoID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, maxPR, "branch2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, prs, 1)
|
||||||
|
for _, pr := range prs {
|
||||||
|
assert.Equal(t, int64(1), pr.HeadRepoID)
|
||||||
|
assert.Equal(t, "branch2", pr.HeadBranch)
|
||||||
|
}
|
||||||
|
pr := prs[0]
|
||||||
|
|
||||||
|
for _, testCase := range []struct {
|
||||||
|
table string
|
||||||
|
field string
|
||||||
|
id int64
|
||||||
|
match any
|
||||||
|
nomatch any
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
table: "issue",
|
||||||
|
field: "is_closed",
|
||||||
|
id: pr.IssueID,
|
||||||
|
match: false,
|
||||||
|
nomatch: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
table: "pull_request",
|
||||||
|
field: "flow",
|
||||||
|
id: pr.ID,
|
||||||
|
match: issues_model.PullRequestFlowGithub,
|
||||||
|
nomatch: issues_model.PullRequestFlowAGit,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
table: "pull_request",
|
||||||
|
field: "head_repo_id",
|
||||||
|
id: pr.ID,
|
||||||
|
match: pr.HeadRepoID,
|
||||||
|
nomatch: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
table: "pull_request",
|
||||||
|
field: "head_branch",
|
||||||
|
id: pr.ID,
|
||||||
|
match: pr.HeadBranch,
|
||||||
|
nomatch: "something else",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
table: "pull_request",
|
||||||
|
field: "has_merged",
|
||||||
|
id: pr.ID,
|
||||||
|
match: false,
|
||||||
|
nomatch: true,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run(testCase.field, func(t *testing.T) {
|
||||||
|
update := fmt.Sprintf("UPDATE `%s` SET `%s` = ? WHERE `id` = ?", testCase.table, testCase.field)
|
||||||
|
|
||||||
|
// expect no match
|
||||||
|
_, err = db.GetEngine(db.DefaultContext).Exec(update, testCase.nomatch, testCase.id)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, maxPR, "branch2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, prs, 0)
|
||||||
|
|
||||||
|
// expect one match
|
||||||
|
_, err = db.GetEngine(db.DefaultContext).Exec(update, testCase.match, testCase.id)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, maxPR, "branch2")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, prs, 1)
|
||||||
|
|
||||||
|
// identical to the known PR
|
||||||
|
assert.Equal(t, pr.ID, prs[0].ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestGetUnmergedPullRequestsByBaseInfo(t *testing.T) {
|
func TestGetUnmergedPullRequestsByBaseInfo(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(db.DefaultContext, 1, "master")
|
prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(db.DefaultContext, 1, "master")
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
package base
|
package base
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
package v1_14 //nolint
|
package v1_14 //nolint
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
"golang.org/x/crypto/argon2"
|
"golang.org/x/crypto/argon2"
|
||||||
"golang.org/x/crypto/bcrypt"
|
"golang.org/x/crypto/bcrypt"
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
|
|
|
@ -4,13 +4,7 @@
|
||||||
package v1_21 //nolint
|
package v1_21 //nolint
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
|
||||||
giturl "code.gitea.io/gitea/modules/git/url"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
|
@ -73,7 +67,7 @@ func migratePullMirrors(x *xorm.Engine) error {
|
||||||
start += len(mirrors)
|
start += len(mirrors)
|
||||||
|
|
||||||
for _, m := range mirrors {
|
for _, m := range mirrors {
|
||||||
remoteAddress, err := getRemoteAddress(m.RepoOwner, m.RepoName, "origin")
|
remoteAddress, err := repo_model.GetPushMirrorRemoteAddress(m.RepoOwner, m.RepoName, "origin")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -136,7 +130,7 @@ func migratePushMirrors(x *xorm.Engine) error {
|
||||||
start += len(mirrors)
|
start += len(mirrors)
|
||||||
|
|
||||||
for _, m := range mirrors {
|
for _, m := range mirrors {
|
||||||
remoteAddress, err := getRemoteAddress(m.RepoOwner, m.RepoName, m.RemoteName)
|
remoteAddress, err := repo_model.GetPushMirrorRemoteAddress(m.RepoOwner, m.RepoName, m.RemoteName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -160,20 +154,3 @@ func migratePushMirrors(x *xorm.Engine) error {
|
||||||
|
|
||||||
return sess.Commit()
|
return sess.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRemoteAddress(ownerName, repoName, remoteName string) (string, error) {
|
|
||||||
repoPath := filepath.Join(setting.RepoRootPath, strings.ToLower(ownerName), strings.ToLower(repoName)+".git")
|
|
||||||
|
|
||||||
remoteURL, err := git.GetRemoteAddress(context.Background(), repoPath, remoteName)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("get remote %s's address of %s/%s failed: %v", remoteName, ownerName, repoName, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
u, err := giturl.Parse(remoteURL)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
u.User = nil
|
|
||||||
|
|
||||||
return u.String(), nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -33,6 +33,16 @@ func (p *Permission) IsAdmin() bool {
|
||||||
return p.AccessMode >= perm_model.AccessModeAdmin
|
return p.AccessMode >= perm_model.AccessModeAdmin
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsGloballyWriteable returns true if the unit is writeable by all users of the instance.
|
||||||
|
func (p *Permission) IsGloballyWriteable(unitType unit.Type) bool {
|
||||||
|
for _, u := range p.Units {
|
||||||
|
if u.Type == unitType {
|
||||||
|
return u.DefaultPermissions == repo_model.UnitAccessModeWrite
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// HasAccess returns true if the current user has at least read access to any unit of this repository
|
// HasAccess returns true if the current user has at least read access to any unit of this repository
|
||||||
func (p *Permission) HasAccess() bool {
|
func (p *Permission) HasAccess() bool {
|
||||||
if p.UnitsMode == nil {
|
if p.UnitsMode == nil {
|
||||||
|
@ -198,7 +208,19 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
|
||||||
if err := repo.LoadOwner(ctx); err != nil {
|
if err := repo.LoadOwner(ctx); err != nil {
|
||||||
return perm, err
|
return perm, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !repo.Owner.IsOrganization() {
|
if !repo.Owner.IsOrganization() {
|
||||||
|
// for a public repo, different repo units may have different default
|
||||||
|
// permissions for non-restricted users.
|
||||||
|
if !repo.IsPrivate && !user.IsRestricted && len(repo.Units) > 0 {
|
||||||
|
perm.UnitsMode = make(map[unit.Type]perm_model.AccessMode)
|
||||||
|
for _, u := range repo.Units {
|
||||||
|
if _, ok := perm.UnitsMode[u.Type]; !ok {
|
||||||
|
perm.UnitsMode[u.Type] = u.DefaultPermissions.ToAccessMode(perm.AccessMode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return perm, nil
|
return perm, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,10 +261,12 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// for a public repo on an organization, a non-restricted user has read permission on non-team defined units.
|
// for a public repo on an organization, a non-restricted user should
|
||||||
|
// have the same permission on non-team defined units as the default
|
||||||
|
// permissions for the repo unit.
|
||||||
if !found && !repo.IsPrivate && !user.IsRestricted {
|
if !found && !repo.IsPrivate && !user.IsRestricted {
|
||||||
if _, ok := perm.UnitsMode[u.Type]; !ok {
|
if _, ok := perm.UnitsMode[u.Type]; !ok {
|
||||||
perm.UnitsMode[u.Type] = perm_model.AccessModeRead
|
perm.UnitsMode[u.Type] = u.DefaultPermissions.ToAccessMode(perm_model.AccessModeRead)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,7 +74,7 @@ func GetScheduledMergeByPullID(ctx context.Context, pullID int64) (bool, *AutoMe
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
doer, err := user_model.GetUserByID(ctx, scheduledPRM.DoerID)
|
doer, err := user_model.GetPossibleUserByID(ctx, scheduledPRM.DoerID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,16 @@ package repo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
|
giturl "code.gitea.io/gitea/modules/git/url"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
|
@ -132,3 +138,21 @@ func PushMirrorsIterate(ctx context.Context, limit int, f func(idx int, bean any
|
||||||
}
|
}
|
||||||
return sess.Iterate(new(PushMirror), f)
|
return sess.Iterate(new(PushMirror), f)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPushMirrorRemoteAddress returns the address of associated with a repository's given remote.
|
||||||
|
func GetPushMirrorRemoteAddress(ownerName, repoName, remoteName string) (string, error) {
|
||||||
|
repoPath := filepath.Join(setting.RepoRootPath, strings.ToLower(ownerName), strings.ToLower(repoName)+".git")
|
||||||
|
|
||||||
|
remoteURL, err := git.GetRemoteAddress(context.Background(), repoPath, remoteName)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("get remote %s's address of %s/%s failed: %v", remoteName, ownerName, repoName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
u, err := giturl.Parse(remoteURL)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
u.User = nil
|
||||||
|
|
||||||
|
return u.String(), nil
|
||||||
|
}
|
||||||
|
|
|
@ -135,6 +135,7 @@ type Repository struct {
|
||||||
OriginalServiceType api.GitServiceType `xorm:"index"`
|
OriginalServiceType api.GitServiceType `xorm:"index"`
|
||||||
OriginalURL string `xorm:"VARCHAR(2048)"`
|
OriginalURL string `xorm:"VARCHAR(2048)"`
|
||||||
DefaultBranch string
|
DefaultBranch string
|
||||||
|
WikiBranch string
|
||||||
|
|
||||||
NumWatches int
|
NumWatches int
|
||||||
NumStars int
|
NumStars int
|
||||||
|
@ -204,6 +205,13 @@ func (repo *Repository) GetOwnerName() string {
|
||||||
return repo.OwnerName
|
return repo.OwnerName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) GetWikiBranchName() string {
|
||||||
|
if repo.WikiBranch == "" {
|
||||||
|
return setting.Repository.DefaultBranch
|
||||||
|
}
|
||||||
|
return repo.WikiBranch
|
||||||
|
}
|
||||||
|
|
||||||
// SanitizedOriginalURL returns a sanitized OriginalURL
|
// SanitizedOriginalURL returns a sanitized OriginalURL
|
||||||
func (repo *Repository) SanitizedOriginalURL() string {
|
func (repo *Repository) SanitizedOriginalURL() string {
|
||||||
if repo.OriginalURL == "" {
|
if repo.OriginalURL == "" {
|
||||||
|
|
102
models/repo/repo_flags.go
Normal file
102
models/repo/repo_flags.go
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RepoFlag represents a single flag against a repository
|
||||||
|
type RepoFlag struct { //revive:disable-line:exported
|
||||||
|
ID int64 `xorm:"pk autoincr"`
|
||||||
|
RepoID int64 `xorm:"UNIQUE(s) INDEX"`
|
||||||
|
Name string `xorm:"UNIQUE(s) INDEX"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
db.RegisterModel(new(RepoFlag))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName provides the real table name
|
||||||
|
func (RepoFlag) TableName() string {
|
||||||
|
return "forgejo_repo_flag"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListFlags returns the array of flags on the repo.
|
||||||
|
func (repo *Repository) ListFlags(ctx context.Context) ([]RepoFlag, error) {
|
||||||
|
var flags []RepoFlag
|
||||||
|
err := db.GetEngine(ctx).Table(&RepoFlag{}).Where("repo_id = ?", repo.ID).Find(&flags)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return flags, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsFlagged returns whether a repo has any flags or not
|
||||||
|
func (repo *Repository) IsFlagged(ctx context.Context) bool {
|
||||||
|
has, _ := db.Exist[RepoFlag](ctx, builder.Eq{"repo_id": repo.ID})
|
||||||
|
return has
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetFlag returns a single RepoFlag based on its name
|
||||||
|
func (repo *Repository) GetFlag(ctx context.Context, flagName string) (bool, *RepoFlag, error) {
|
||||||
|
flag, has, err := db.Get[RepoFlag](ctx, builder.Eq{"repo_id": repo.ID, "name": flagName})
|
||||||
|
if err != nil {
|
||||||
|
return false, nil, err
|
||||||
|
}
|
||||||
|
return has, flag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasFlag returns true if a repo has a given flag, false otherwise
|
||||||
|
func (repo *Repository) HasFlag(ctx context.Context, flagName string) bool {
|
||||||
|
has, _ := db.Exist[RepoFlag](ctx, builder.Eq{"repo_id": repo.ID, "name": flagName})
|
||||||
|
return has
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddFlag adds a new flag to the repo
|
||||||
|
func (repo *Repository) AddFlag(ctx context.Context, flagName string) error {
|
||||||
|
return db.Insert(ctx, RepoFlag{
|
||||||
|
RepoID: repo.ID,
|
||||||
|
Name: flagName,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteFlag removes a flag from the repo
|
||||||
|
func (repo *Repository) DeleteFlag(ctx context.Context, flagName string) (int64, error) {
|
||||||
|
return db.DeleteByBean(ctx, &RepoFlag{RepoID: repo.ID, Name: flagName})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceAllFlags replaces all flags of a repo with a new set
|
||||||
|
func (repo *Repository) ReplaceAllFlags(ctx context.Context, flagNames []string) error {
|
||||||
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer committer.Close()
|
||||||
|
|
||||||
|
if err := db.DeleteBeans(ctx, &RepoFlag{RepoID: repo.ID}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(flagNames) == 0 {
|
||||||
|
return committer.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
var flags []RepoFlag
|
||||||
|
for _, name := range flagNames {
|
||||||
|
flags = append(flags, RepoFlag{
|
||||||
|
RepoID: repo.ID,
|
||||||
|
Name: name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if err := db.Insert(ctx, &flags); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return committer.Commit()
|
||||||
|
}
|
114
models/repo/repo_flags_test.go
Normal file
114
models/repo/repo_flags_test.go
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package repo_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRepositoryFlags(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
|
||||||
|
|
||||||
|
// ********************
|
||||||
|
// ** NEGATIVE TESTS **
|
||||||
|
// ********************
|
||||||
|
|
||||||
|
// Unless we add flags, the repo has none
|
||||||
|
flags, err := repo.ListFlags(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Empty(t, flags)
|
||||||
|
|
||||||
|
// If the repo has no flags, it is not flagged
|
||||||
|
flagged := repo.IsFlagged(db.DefaultContext)
|
||||||
|
assert.False(t, flagged)
|
||||||
|
|
||||||
|
// Trying to find a flag when there is none
|
||||||
|
has := repo.HasFlag(db.DefaultContext, "foo")
|
||||||
|
assert.False(t, has)
|
||||||
|
|
||||||
|
// Trying to retrieve a non-existent flag indicates not found
|
||||||
|
has, _, err = repo.GetFlag(db.DefaultContext, "foo")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, has)
|
||||||
|
|
||||||
|
// Deleting a non-existent flag fails
|
||||||
|
deleted, err := repo.DeleteFlag(db.DefaultContext, "no-such-flag")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(0), deleted)
|
||||||
|
|
||||||
|
// ********************
|
||||||
|
// ** POSITIVE TESTS **
|
||||||
|
// ********************
|
||||||
|
|
||||||
|
// Adding a flag works
|
||||||
|
err = repo.AddFlag(db.DefaultContext, "foo")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// Adding it again fails
|
||||||
|
err = repo.AddFlag(db.DefaultContext, "foo")
|
||||||
|
assert.Error(t, err)
|
||||||
|
|
||||||
|
// Listing flags includes the one we added
|
||||||
|
flags, err = repo.ListFlags(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, flags, 1)
|
||||||
|
assert.Equal(t, "foo", flags[0].Name)
|
||||||
|
|
||||||
|
// With a flag added, the repo is flagged
|
||||||
|
flagged = repo.IsFlagged(db.DefaultContext)
|
||||||
|
assert.True(t, flagged)
|
||||||
|
|
||||||
|
// The flag can be found
|
||||||
|
has = repo.HasFlag(db.DefaultContext, "foo")
|
||||||
|
assert.True(t, has)
|
||||||
|
|
||||||
|
// Added flag can be retrieved
|
||||||
|
_, flag, err := repo.GetFlag(db.DefaultContext, "foo")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "foo", flag.Name)
|
||||||
|
|
||||||
|
// Deleting a flag works
|
||||||
|
deleted, err = repo.DeleteFlag(db.DefaultContext, "foo")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(1), deleted)
|
||||||
|
|
||||||
|
// The list is now empty
|
||||||
|
flags, err = repo.ListFlags(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Empty(t, flags)
|
||||||
|
|
||||||
|
// Replacing an empty list works
|
||||||
|
err = repo.ReplaceAllFlags(db.DefaultContext, []string{"bar"})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// The repo is now flagged with "bar"
|
||||||
|
has = repo.HasFlag(db.DefaultContext, "bar")
|
||||||
|
assert.True(t, has)
|
||||||
|
|
||||||
|
// Replacing a tag set with another works
|
||||||
|
err = repo.ReplaceAllFlags(db.DefaultContext, []string{"baz", "quux"})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// The repo now has two tags
|
||||||
|
flags, err = repo.ListFlags(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, flags, 2)
|
||||||
|
assert.Equal(t, "baz", flags[0].Name)
|
||||||
|
assert.Equal(t, "quux", flags[1].Name)
|
||||||
|
|
||||||
|
// Replacing flags with an empty set deletes all flags
|
||||||
|
err = repo.ReplaceAllFlags(db.DefaultContext, []string{})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// The repo is now unflagged
|
||||||
|
flagged = repo.IsFlagged(db.DefaultContext)
|
||||||
|
assert.False(t, flagged)
|
||||||
|
}
|
|
@ -138,12 +138,12 @@ func getTestCases() []struct {
|
||||||
{
|
{
|
||||||
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
||||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: util.OptionalBoolFalse},
|
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: util.OptionalBoolFalse},
|
||||||
count: 31,
|
count: 32,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
||||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: util.OptionalBoolFalse},
|
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: util.OptionalBoolFalse},
|
||||||
count: 36,
|
count: 37,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
||||||
|
@ -158,7 +158,7 @@ func getTestCases() []struct {
|
||||||
{
|
{
|
||||||
name: "AllPublic/PublicRepositoriesOfOrganization",
|
name: "AllPublic/PublicRepositoriesOfOrganization",
|
||||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: util.OptionalBoolFalse, Template: util.OptionalBoolFalse},
|
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: util.OptionalBoolFalse, Template: util.OptionalBoolFalse},
|
||||||
count: 31,
|
count: 32,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllTemplates",
|
name: "AllTemplates",
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/perm"
|
||||||
"code.gitea.io/gitea/models/unit"
|
"code.gitea.io/gitea/models/unit"
|
||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
@ -39,13 +40,43 @@ func (err ErrUnitTypeNotExist) Unwrap() error {
|
||||||
return util.ErrNotExist
|
return util.ErrNotExist
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RepoUnitAccessMode specifies the users access mode to a repo unit
|
||||||
|
type UnitAccessMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// UnitAccessModeUnset - no unit mode set
|
||||||
|
UnitAccessModeUnset UnitAccessMode = iota // 0
|
||||||
|
// UnitAccessModeNone no access
|
||||||
|
UnitAccessModeNone // 1
|
||||||
|
// UnitAccessModeRead read access
|
||||||
|
UnitAccessModeRead // 2
|
||||||
|
// UnitAccessModeWrite write access
|
||||||
|
UnitAccessModeWrite // 3
|
||||||
|
)
|
||||||
|
|
||||||
|
func (mode UnitAccessMode) ToAccessMode(modeIfUnset perm.AccessMode) perm.AccessMode {
|
||||||
|
switch mode {
|
||||||
|
case UnitAccessModeUnset:
|
||||||
|
return modeIfUnset
|
||||||
|
case UnitAccessModeNone:
|
||||||
|
return perm.AccessModeNone
|
||||||
|
case UnitAccessModeRead:
|
||||||
|
return perm.AccessModeRead
|
||||||
|
case UnitAccessModeWrite:
|
||||||
|
return perm.AccessModeWrite
|
||||||
|
default:
|
||||||
|
return perm.AccessModeNone
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// RepoUnit describes all units of a repository
|
// RepoUnit describes all units of a repository
|
||||||
type RepoUnit struct { //revive:disable-line:exported
|
type RepoUnit struct { //revive:disable-line:exported
|
||||||
ID int64
|
ID int64
|
||||||
RepoID int64 `xorm:"INDEX(s)"`
|
RepoID int64 `xorm:"INDEX(s)"`
|
||||||
Type unit.Type `xorm:"INDEX(s)"`
|
Type unit.Type `xorm:"INDEX(s)"`
|
||||||
Config convert.Conversion `xorm:"TEXT"`
|
Config convert.Conversion `xorm:"TEXT"`
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
|
CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
|
||||||
|
DefaultPermissions UnitAccessMode `xorm:"NOT NULL DEFAULT 0"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -283,3 +314,29 @@ func UpdateRepoUnit(ctx context.Context, unit *RepoUnit) error {
|
||||||
_, err := db.GetEngine(ctx).ID(unit.ID).Update(unit)
|
_, err := db.GetEngine(ctx).ID(unit.ID).Update(unit)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateRepositoryUnits updates a repository's units
|
||||||
|
func UpdateRepositoryUnits(ctx context.Context, repo *Repository, units []RepoUnit, deleteUnitTypes []unit.Type) (err error) {
|
||||||
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer committer.Close()
|
||||||
|
|
||||||
|
// Delete existing settings of units before adding again
|
||||||
|
for _, u := range units {
|
||||||
|
deleteUnitTypes = append(deleteUnitTypes, u.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = db.GetEngine(ctx).Where("repo_id = ?", repo.ID).In("type", deleteUnitTypes).Delete(new(RepoUnit)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(units) > 0 {
|
||||||
|
if err = db.Insert(ctx, units); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return committer.Commit()
|
||||||
|
}
|
||||||
|
|
|
@ -6,6 +6,8 @@ package repo
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/perm"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -28,3 +30,10 @@ func TestActionsConfig(t *testing.T) {
|
||||||
cfg.DisableWorkflow("test3.yaml")
|
cfg.DisableWorkflow("test3.yaml")
|
||||||
assert.EqualValues(t, "test1.yaml,test2.yaml,test3.yaml", cfg.ToString())
|
assert.EqualValues(t, "test1.yaml,test2.yaml,test3.yaml", cfg.ToString())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRepoUnitAccessMode(t *testing.T) {
|
||||||
|
assert.Equal(t, UnitAccessModeNone.ToAccessMode(perm.AccessModeAdmin), perm.AccessModeNone)
|
||||||
|
assert.Equal(t, UnitAccessModeRead.ToAccessMode(perm.AccessModeAdmin), perm.AccessModeRead)
|
||||||
|
assert.Equal(t, UnitAccessModeWrite.ToAccessMode(perm.AccessModeAdmin), perm.AccessModeWrite)
|
||||||
|
assert.Equal(t, UnitAccessModeUnset.ToAccessMode(perm.AccessModeRead), perm.AccessModeRead)
|
||||||
|
}
|
||||||
|
|
|
@ -199,7 +199,7 @@ func FindTopics(ctx context.Context, opts *FindTopicOptions) ([]*Topic, int64, e
|
||||||
sess.Join("INNER", "repo_topic", "repo_topic.topic_id = topic.id")
|
sess.Join("INNER", "repo_topic", "repo_topic.topic_id = topic.id")
|
||||||
orderBy = "topic.name" // when render topics for a repo, it's better to sort them by name, to get consistent result
|
orderBy = "topic.name" // when render topics for a repo, it's better to sort them by name, to get consistent result
|
||||||
}
|
}
|
||||||
if opts.PageSize != 0 && opts.Page != 0 {
|
if opts.PageSize > 0 {
|
||||||
sess = db.SetSessionPagination(sess, opts)
|
sess = db.SetSessionPagination(sess, opts)
|
||||||
}
|
}
|
||||||
topics := make([]*Topic, 0, 10)
|
topics := make([]*Topic, 0, 10)
|
||||||
|
|
113
models/unittest/mock_http.go
Normal file
113
models/unittest/mock_http.go
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
// Copyright 2017 The Forgejo Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package unittest
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Mocks HTTP responses of a third-party service (such as GitHub, GitLab…)
|
||||||
|
// This has two modes:
|
||||||
|
// - live mode: the requests made to the mock HTTP server are transmitted to the live
|
||||||
|
// service, and responses are saved as test data files
|
||||||
|
// - test mode: the responses to requests to the mock HTTP server are read from the
|
||||||
|
// test data files
|
||||||
|
func NewMockWebServer(t *testing.T, liveServerBaseURL, testDataDir string, liveMode bool) *httptest.Server {
|
||||||
|
mockServerBaseURL := ""
|
||||||
|
ignoredHeaders := []string{"cf-ray", "server", "date", "report-to", "nel", "x-request-id"}
|
||||||
|
|
||||||
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
path := NormalizedFullPath(r.URL)
|
||||||
|
log.Info("Mock HTTP Server: got request for path %s", r.URL.Path)
|
||||||
|
// TODO check request method (support POST?)
|
||||||
|
fixturePath := fmt.Sprintf("%s/%s", testDataDir, strings.NewReplacer("/", "_", "?", "!").Replace(path))
|
||||||
|
if liveMode {
|
||||||
|
liveURL := fmt.Sprintf("%s%s", liveServerBaseURL, path)
|
||||||
|
|
||||||
|
request, err := http.NewRequest(r.Method, liveURL, nil)
|
||||||
|
assert.NoError(t, err, "constructing an HTTP request to %s failed", liveURL)
|
||||||
|
for headerName, headerValues := range r.Header {
|
||||||
|
// do not pass on the encoding: let the Transport of the HTTP client handle that for us
|
||||||
|
if strings.ToLower(headerName) != "accept-encoding" {
|
||||||
|
for _, headerValue := range headerValues {
|
||||||
|
request.Header.Add(headerName, headerValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response, err := http.DefaultClient.Do(request)
|
||||||
|
assert.NoError(t, err, "HTTP request to %s failed: %s", liveURL)
|
||||||
|
|
||||||
|
fixture, err := os.Create(fixturePath)
|
||||||
|
assert.NoError(t, err, "failed to open the fixture file %s for writing", fixturePath)
|
||||||
|
defer fixture.Close()
|
||||||
|
fixtureWriter := bufio.NewWriter(fixture)
|
||||||
|
|
||||||
|
for headerName, headerValues := range response.Header {
|
||||||
|
for _, headerValue := range headerValues {
|
||||||
|
if !slices.Contains(ignoredHeaders, strings.ToLower(headerName)) {
|
||||||
|
_, err := fixtureWriter.WriteString(fmt.Sprintf("%s: %s\n", headerName, headerValue))
|
||||||
|
assert.NoError(t, err, "writing the header of the HTTP response to the fixture file failed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_, err = fixtureWriter.WriteString("\n")
|
||||||
|
assert.NoError(t, err, "writing the header of the HTTP response to the fixture file failed")
|
||||||
|
fixtureWriter.Flush()
|
||||||
|
|
||||||
|
log.Info("Mock HTTP Server: writing response to %s", fixturePath)
|
||||||
|
_, err = io.Copy(fixture, response.Body)
|
||||||
|
assert.NoError(t, err, "writing the body of the HTTP response to %s failed", liveURL)
|
||||||
|
|
||||||
|
err = fixture.Sync()
|
||||||
|
assert.NoError(t, err, "writing the body of the HTTP response to the fixture file failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
fixture, err := os.ReadFile(fixturePath)
|
||||||
|
assert.NoError(t, err, "missing mock HTTP response: "+fixturePath)
|
||||||
|
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
|
||||||
|
// replace any mention of the live HTTP service by the mocked host
|
||||||
|
stringFixture := strings.ReplaceAll(string(fixture), liveServerBaseURL, mockServerBaseURL)
|
||||||
|
// parse back the fixture file into a series of HTTP headers followed by response body
|
||||||
|
lines := strings.Split(stringFixture, "\n")
|
||||||
|
for idx, line := range lines {
|
||||||
|
colonIndex := strings.Index(line, ": ")
|
||||||
|
if colonIndex != -1 {
|
||||||
|
w.Header().Set(line[0:colonIndex], line[colonIndex+2:])
|
||||||
|
} else {
|
||||||
|
// we reached the end of the headers (empty line), so what follows is the body
|
||||||
|
responseBody := strings.Join(lines[idx+1:], "\n")
|
||||||
|
_, err := w.Write([]byte(responseBody))
|
||||||
|
assert.NoError(t, err, "writing the body of the HTTP response failed")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
mockServerBaseURL = server.URL
|
||||||
|
return server
|
||||||
|
}
|
||||||
|
|
||||||
|
func NormalizedFullPath(url *url.URL) string {
|
||||||
|
// TODO normalize path (remove trailing slash?)
|
||||||
|
// TODO normalize RawQuery (order query parameters?)
|
||||||
|
if len(url.Query()) == 0 {
|
||||||
|
return url.EscapedPath()
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s?%s", url.EscapedPath(), url.RawQuery)
|
||||||
|
}
|
|
@ -231,6 +231,25 @@ func GetEmailAddresses(ctx context.Context, uid int64) ([]*EmailAddress, error)
|
||||||
return emails, nil
|
return emails, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ActivatedEmailAddress struct {
|
||||||
|
ID int64
|
||||||
|
Email string
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetActivatedEmailAddresses(ctx context.Context, uid int64) ([]*ActivatedEmailAddress, error) {
|
||||||
|
emails := make([]*ActivatedEmailAddress, 0, 8)
|
||||||
|
if err := db.GetEngine(ctx).
|
||||||
|
Table("email_address").
|
||||||
|
Select("id, email").
|
||||||
|
Where("uid=?", uid).
|
||||||
|
And("is_activated=?", true).
|
||||||
|
Asc("id").
|
||||||
|
Find(&emails); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return emails, nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetEmailAddressByID gets a user's email address by ID
|
// GetEmailAddressByID gets a user's email address by ID
|
||||||
func GetEmailAddressByID(ctx context.Context, uid, id int64) (*EmailAddress, error) {
|
func GetEmailAddressByID(ctx context.Context, uid, id int64) (*EmailAddress, error) {
|
||||||
// User ID is required for security reasons
|
// User ID is required for security reasons
|
||||||
|
@ -313,31 +332,7 @@ func updateActivation(ctx context.Context, email *EmailAddress, activate bool) e
|
||||||
return UpdateUserCols(ctx, user, "rands")
|
return UpdateUserCols(ctx, user, "rands")
|
||||||
}
|
}
|
||||||
|
|
||||||
// MakeEmailPrimary sets primary email address of given user.
|
func MakeEmailPrimaryWithUser(ctx context.Context, user *User, email *EmailAddress) error {
|
||||||
func MakeEmailPrimary(ctx context.Context, email *EmailAddress) error {
|
|
||||||
has, err := db.GetEngine(ctx).Get(email)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
} else if !has {
|
|
||||||
return ErrEmailAddressNotExist{Email: email.Email}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !email.IsActivated {
|
|
||||||
return ErrEmailNotActivated
|
|
||||||
}
|
|
||||||
|
|
||||||
user := &User{}
|
|
||||||
has, err = db.GetEngine(ctx).ID(email.UID).Get(user)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
} else if !has {
|
|
||||||
return ErrUserNotExist{
|
|
||||||
UID: email.UID,
|
|
||||||
Name: "",
|
|
||||||
KeyID: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, committer, err := db.TxContext(ctx)
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -367,6 +362,30 @@ func MakeEmailPrimary(ctx context.Context, email *EmailAddress) error {
|
||||||
return committer.Commit()
|
return committer.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MakeEmailPrimary sets primary email address of given user.
|
||||||
|
func MakeEmailPrimary(ctx context.Context, email *EmailAddress) error {
|
||||||
|
has, err := db.GetEngine(ctx).Get(email)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
} else if !has {
|
||||||
|
return ErrEmailAddressNotExist{Email: email.Email}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !email.IsActivated {
|
||||||
|
return ErrEmailNotActivated
|
||||||
|
}
|
||||||
|
|
||||||
|
user := &User{}
|
||||||
|
has, err = db.GetEngine(ctx).ID(email.UID).Get(user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
} else if !has {
|
||||||
|
return ErrUserNotExist{UID: email.UID}
|
||||||
|
}
|
||||||
|
|
||||||
|
return MakeEmailPrimaryWithUser(ctx, user, email)
|
||||||
|
}
|
||||||
|
|
||||||
// VerifyActiveEmailCode verifies active email code when active account
|
// VerifyActiveEmailCode verifies active email code when active account
|
||||||
func VerifyActiveEmailCode(ctx context.Context, code, email string) *EmailAddress {
|
func VerifyActiveEmailCode(ctx context.Context, code, email string) *EmailAddress {
|
||||||
minutes := setting.Service.ActiveCodeLives
|
minutes := setting.Service.ActiveCodeLives
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package user_test
|
package user_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
@ -219,3 +220,37 @@ func TestEmailAddressValidate(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetActivatedEmailAddresses(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
UID int64
|
||||||
|
expected []*user_model.ActivatedEmailAddress
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
UID: 1,
|
||||||
|
expected: []*user_model.ActivatedEmailAddress{{ID: 9, Email: "user1@example.com"}, {ID: 33, Email: "user1-2@example.com"}, {ID: 34, Email: "user1-3@example.com"}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
UID: 2,
|
||||||
|
expected: []*user_model.ActivatedEmailAddress{{ID: 3, Email: "user2@example.com"}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
UID: 4,
|
||||||
|
expected: []*user_model.ActivatedEmailAddress{{ID: 11, Email: "user4@example.com"}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
UID: 11,
|
||||||
|
expected: []*user_model.ActivatedEmailAddress{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
t.Run(fmt.Sprintf("User %d", testCase.UID), func(t *testing.T) {
|
||||||
|
emails, err := user_model.GetActivatedEmailAddresses(db.DefaultContext, testCase.UID)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, testCase.expected, emails)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -216,6 +216,12 @@ func GetAllUsers(ctx context.Context) ([]*User, error) {
|
||||||
return users, db.GetEngine(ctx).OrderBy("id").Where("type = ?", UserTypeIndividual).Find(&users)
|
return users, db.GetEngine(ctx).OrderBy("id").Where("type = ?", UserTypeIndividual).Find(&users)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetAllAdmins returns a slice of all adminusers found in DB.
|
||||||
|
func GetAllAdmins(ctx context.Context) ([]*User, error) {
|
||||||
|
users := make([]*User, 0)
|
||||||
|
return users, db.GetEngine(ctx).OrderBy("id").Where("type = ?", UserTypeIndividual).And("is_admin = ?", true).Find(&users)
|
||||||
|
}
|
||||||
|
|
||||||
// IsLocal returns true if user login type is LoginPlain.
|
// IsLocal returns true if user login type is LoginPlain.
|
||||||
func (u *User) IsLocal() bool {
|
func (u *User) IsLocal() bool {
|
||||||
return u.LoginType <= auth.Plain
|
return u.LoginType <= auth.Plain
|
||||||
|
|
|
@ -484,6 +484,16 @@ func TestIsUserVisibleToViewer(t *testing.T) {
|
||||||
test(user31, nil, false)
|
test(user31, nil, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetAllAdmins(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
admins, err := user_model.GetAllAdmins(db.DefaultContext)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Len(t, admins, 1)
|
||||||
|
assert.Equal(t, int64(1), admins[0].ID)
|
||||||
|
}
|
||||||
|
|
||||||
func Test_ValidateUser(t *testing.T) {
|
func Test_ValidateUser(t *testing.T) {
|
||||||
oldSetting := setting.Service.AllowedUserVisibilityModesSlice
|
oldSetting := setting.Service.AllowedUserVisibilityModesSlice
|
||||||
defer func() {
|
defer func() {
|
||||||
|
@ -501,6 +511,11 @@ func Test_ValidateUser(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_NormalizeUserFromEmail(t *testing.T) {
|
func Test_NormalizeUserFromEmail(t *testing.T) {
|
||||||
|
oldSetting := setting.Service.AllowDotsInUsernames
|
||||||
|
defer func() {
|
||||||
|
setting.Service.AllowDotsInUsernames = oldSetting
|
||||||
|
}()
|
||||||
|
setting.Service.AllowDotsInUsernames = true
|
||||||
testCases := []struct {
|
testCases := []struct {
|
||||||
Input string
|
Input string
|
||||||
Expected string
|
Expected string
|
||||||
|
|
|
@ -35,6 +35,9 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
|
||||||
case GithubEventGollum:
|
case GithubEventGollum:
|
||||||
return triggedEvent == webhook_module.HookEventWiki
|
return triggedEvent == webhook_module.HookEventWiki
|
||||||
|
|
||||||
|
case GithubEventSchedule:
|
||||||
|
return triggedEvent == webhook_module.HookEventSchedule
|
||||||
|
|
||||||
case GithubEventIssues:
|
case GithubEventIssues:
|
||||||
switch triggedEvent {
|
switch triggedEvent {
|
||||||
case webhook_module.HookEventIssues,
|
case webhook_module.HookEventIssues,
|
||||||
|
@ -70,9 +73,6 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
case GithubEventSchedule:
|
|
||||||
return triggedEvent == webhook_module.HookEventSchedule
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return eventName == string(triggedEvent)
|
return eventName == string(triggedEvent)
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ import (
|
||||||
|
|
||||||
type DetectedWorkflow struct {
|
type DetectedWorkflow struct {
|
||||||
EntryName string
|
EntryName string
|
||||||
TriggerEvent *jobparser.Event
|
TriggerEvent string
|
||||||
Content []byte
|
Content []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,7 +103,6 @@ func DetectWorkflows(
|
||||||
commit *git.Commit,
|
commit *git.Commit,
|
||||||
triggedEvent webhook_module.HookEventType,
|
triggedEvent webhook_module.HookEventType,
|
||||||
payload api.Payloader,
|
payload api.Payloader,
|
||||||
detectSchedule bool,
|
|
||||||
) ([]*DetectedWorkflow, []*DetectedWorkflow, error) {
|
) ([]*DetectedWorkflow, []*DetectedWorkflow, error) {
|
||||||
entries, err := ListWorkflows(commit)
|
entries, err := ListWorkflows(commit)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -118,7 +117,6 @@ func DetectWorkflows(
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// one workflow may have multiple events
|
|
||||||
events, err := GetEventsFromContent(content)
|
events, err := GetEventsFromContent(content)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
|
log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
|
||||||
|
@ -127,18 +125,17 @@ func DetectWorkflows(
|
||||||
for _, evt := range events {
|
for _, evt := range events {
|
||||||
log.Trace("detect workflow %q for event %#v matching %q", entry.Name(), evt, triggedEvent)
|
log.Trace("detect workflow %q for event %#v matching %q", entry.Name(), evt, triggedEvent)
|
||||||
if evt.IsSchedule() {
|
if evt.IsSchedule() {
|
||||||
if detectSchedule {
|
|
||||||
dwf := &DetectedWorkflow{
|
|
||||||
EntryName: entry.Name(),
|
|
||||||
TriggerEvent: evt,
|
|
||||||
Content: content,
|
|
||||||
}
|
|
||||||
schedules = append(schedules, dwf)
|
|
||||||
}
|
|
||||||
} else if detectMatched(gitRepo, commit, triggedEvent, payload, evt) {
|
|
||||||
dwf := &DetectedWorkflow{
|
dwf := &DetectedWorkflow{
|
||||||
EntryName: entry.Name(),
|
EntryName: entry.Name(),
|
||||||
TriggerEvent: evt,
|
TriggerEvent: evt.Name,
|
||||||
|
Content: content,
|
||||||
|
}
|
||||||
|
schedules = append(schedules, dwf)
|
||||||
|
}
|
||||||
|
if detectMatched(gitRepo, commit, triggedEvent, payload, evt) {
|
||||||
|
dwf := &DetectedWorkflow{
|
||||||
|
EntryName: entry.Name(),
|
||||||
|
TriggerEvent: evt.Name,
|
||||||
Content: content,
|
Content: content,
|
||||||
}
|
}
|
||||||
workflows = append(workflows, dwf)
|
workflows = append(workflows, dwf)
|
||||||
|
@ -149,41 +146,6 @@ func DetectWorkflows(
|
||||||
return workflows, schedules, nil
|
return workflows, schedules, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func DetectScheduledWorkflows(gitRepo *git.Repository, commit *git.Commit) ([]*DetectedWorkflow, error) {
|
|
||||||
entries, err := ListWorkflows(commit)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
wfs := make([]*DetectedWorkflow, 0, len(entries))
|
|
||||||
for _, entry := range entries {
|
|
||||||
content, err := GetContentFromEntry(entry)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// one workflow may have multiple events
|
|
||||||
events, err := GetEventsFromContent(content)
|
|
||||||
if err != nil {
|
|
||||||
log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, evt := range events {
|
|
||||||
if evt.IsSchedule() {
|
|
||||||
log.Trace("detect scheduled workflow: %q", entry.Name())
|
|
||||||
dwf := &DetectedWorkflow{
|
|
||||||
EntryName: entry.Name(),
|
|
||||||
TriggerEvent: evt,
|
|
||||||
Content: content,
|
|
||||||
}
|
|
||||||
wfs = append(wfs, dwf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return wfs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent webhook_module.HookEventType, payload api.Payloader, evt *jobparser.Event) bool {
|
func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent webhook_module.HookEventType, payload api.Payloader, evt *jobparser.Event) bool {
|
||||||
if !canGithubEventMatch(evt.Name, triggedEvent) {
|
if !canGithubEventMatch(evt.Name, triggedEvent) {
|
||||||
return false
|
return false
|
||||||
|
@ -191,11 +153,11 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
|
||||||
|
|
||||||
switch triggedEvent {
|
switch triggedEvent {
|
||||||
case // events with no activity types
|
case // events with no activity types
|
||||||
|
webhook_module.HookEventSchedule,
|
||||||
webhook_module.HookEventCreate,
|
webhook_module.HookEventCreate,
|
||||||
webhook_module.HookEventDelete,
|
webhook_module.HookEventDelete,
|
||||||
webhook_module.HookEventFork,
|
webhook_module.HookEventFork,
|
||||||
webhook_module.HookEventWiki,
|
webhook_module.HookEventWiki:
|
||||||
webhook_module.HookEventSchedule:
|
|
||||||
if len(evt.Acts()) != 0 {
|
if len(evt.Acts()) != 0 {
|
||||||
log.Warn("Ignore unsupported %s event arguments %v", triggedEvent, evt.Acts())
|
log.Warn("Ignore unsupported %s event arguments %v", triggedEvent, evt.Acts())
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,12 +4,12 @@
|
||||||
package hash
|
package hash
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -4,10 +4,9 @@
|
||||||
package avatar
|
package avatar
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// HashAvatar will generate a unique string, which ensures that when there's a
|
// HashAvatar will generate a unique string, which ensures that when there's a
|
||||||
|
|
|
@ -7,11 +7,10 @@
|
||||||
package identicon
|
package identicon
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"image/color"
|
"image/color"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const minImageSize = 16
|
const minImageSize = 16
|
||||||
|
|
|
@ -5,6 +5,7 @@ package base
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
|
@ -22,7 +23,6 @@ import (
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/dustin/go-humanize"
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// EncodeSha1 string to sha1 hex value.
|
// EncodeSha1 string to sha1 hex value.
|
||||||
|
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
issues_model "code.gitea.io/gitea/models/issues"
|
||||||
"code.gitea.io/gitea/models/unit"
|
"code.gitea.io/gitea/models/unit"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
mc "code.gitea.io/gitea/modules/cache"
|
mc "code.gitea.io/gitea/modules/cache"
|
||||||
|
@ -38,6 +39,7 @@ type APIContext struct {
|
||||||
ContextUser *user_model.User // the user which is being visited, in most cases it differs from Doer
|
ContextUser *user_model.User // the user which is being visited, in most cases it differs from Doer
|
||||||
|
|
||||||
Repo *Repository
|
Repo *Repository
|
||||||
|
Comment *issues_model.Comment
|
||||||
Org *APIOrganization
|
Org *APIOrganization
|
||||||
Package *Package
|
Package *Package
|
||||||
}
|
}
|
||||||
|
|
|
@ -400,6 +400,7 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) {
|
||||||
ctx.Data["PushMirrors"] = pushMirrors
|
ctx.Data["PushMirrors"] = pushMirrors
|
||||||
ctx.Data["RepoName"] = ctx.Repo.Repository.Name
|
ctx.Data["RepoName"] = ctx.Repo.Repository.Name
|
||||||
ctx.Data["IsEmptyRepo"] = ctx.Repo.Repository.IsEmpty
|
ctx.Data["IsEmptyRepo"] = ctx.Repo.Repository.IsEmpty
|
||||||
|
ctx.Data["DefaultWikiBranchName"] = setting.Repository.DefaultBranch
|
||||||
}
|
}
|
||||||
|
|
||||||
// RepoIDAssignment returns a handler which assigns the repo to the context.
|
// RepoIDAssignment returns a handler which assigns the repo to the context.
|
||||||
|
|
|
@ -38,22 +38,14 @@ func NewInternalToken() (string, error) {
|
||||||
return internalToken, nil
|
return internalToken, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewJwtSecret generates a new value intended to be used for JWT secrets.
|
// NewJwtSecret generates a new base64 encoded value intended to be used for JWT secrets.
|
||||||
func NewJwtSecret() ([]byte, error) {
|
func NewJwtSecret() ([]byte, string, error) {
|
||||||
bytes := make([]byte, 32)
|
bytes := make([]byte, 32)
|
||||||
_, err := io.ReadFull(rand.Reader, bytes)
|
_, err := rand.Read(bytes)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return bytes, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewJwtSecretBase64 generates a new base64 encoded value intended to be used for JWT secrets.
|
|
||||||
func NewJwtSecretBase64() ([]byte, string, error) {
|
|
||||||
bytes, err := NewJwtSecret()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", err
|
return nil, "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
return bytes, base64.RawURLEncoding.EncodeToString(bytes), nil
|
return bytes, base64.RawURLEncoding.EncodeToString(bytes), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -515,6 +515,62 @@ func GetCommitFileStatus(ctx context.Context, repoPath, commitID string) (*Commi
|
||||||
return fileStatus, nil
|
return fileStatus, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func parseCommitRenames(renames *[][2]string, stdout io.Reader) {
|
||||||
|
rd := bufio.NewReader(stdout)
|
||||||
|
for {
|
||||||
|
// Skip (R || three digits || NULL byte)
|
||||||
|
_, err := rd.Discard(5)
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
oldFileName, err := rd.ReadString('\x00')
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
newFileName, err := rd.ReadString('\x00')
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
oldFileName = strings.TrimSuffix(oldFileName, "\x00")
|
||||||
|
newFileName = strings.TrimSuffix(newFileName, "\x00")
|
||||||
|
*renames = append(*renames, [2]string{oldFileName, newFileName})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCommitFileRenames returns the renames that the commit contains.
|
||||||
|
func GetCommitFileRenames(ctx context.Context, repoPath, commitID string) ([][2]string, error) {
|
||||||
|
renames := [][2]string{}
|
||||||
|
stdout, w := io.Pipe()
|
||||||
|
done := make(chan struct{})
|
||||||
|
go func() {
|
||||||
|
parseCommitRenames(&renames, stdout)
|
||||||
|
close(done)
|
||||||
|
}()
|
||||||
|
|
||||||
|
stderr := new(bytes.Buffer)
|
||||||
|
err := NewCommand(ctx, "show", "--name-status", "--pretty=format:", "-z", "--diff-filter=R").AddDynamicArguments(commitID).Run(&RunOpts{
|
||||||
|
Dir: repoPath,
|
||||||
|
Stdout: w,
|
||||||
|
Stderr: stderr,
|
||||||
|
})
|
||||||
|
w.Close() // Close writer to exit parsing goroutine
|
||||||
|
if err != nil {
|
||||||
|
return nil, ConcatenateError(err, stderr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
<-done
|
||||||
|
return renames, nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetFullCommitID returns full length (40) of commit ID by given short SHA in a repository.
|
// GetFullCommitID returns full length (40) of commit ID by given short SHA in a repository.
|
||||||
func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) {
|
func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) {
|
||||||
commitID, _, err := NewCommand(ctx, "rev-parse").AddDynamicArguments(shortID).RunStdString(&RunOpts{Dir: repoPath})
|
commitID, _, err := NewCommand(ctx, "rev-parse").AddDynamicArguments(shortID).RunStdString(&RunOpts{Dir: repoPath})
|
||||||
|
|
|
@ -278,3 +278,30 @@ func TestGetCommitFileStatusMerges(t *testing.T) {
|
||||||
assert.Equal(t, commitFileStatus.Removed, expected.Removed)
|
assert.Equal(t, commitFileStatus.Removed, expected.Removed)
|
||||||
assert.Equal(t, commitFileStatus.Modified, expected.Modified)
|
assert.Equal(t, commitFileStatus.Modified, expected.Modified)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseCommitRenames(t *testing.T) {
|
||||||
|
testcases := []struct {
|
||||||
|
output string
|
||||||
|
renames [][2]string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
output: "R090\x00renamed.txt\x00history.txt\x00",
|
||||||
|
renames: [][2]string{{"renamed.txt", "history.txt"}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
output: "R090\x00renamed.txt\x00history.txt\x00R000\x00corruptedstdouthere",
|
||||||
|
renames: [][2]string{{"renamed.txt", "history.txt"}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
output: "R100\x00renamed.txt\x00history.txt\x00R001\x00readme.md\x00README.md\x00",
|
||||||
|
renames: [][2]string{{"renamed.txt", "history.txt"}, {"readme.md", "README.md"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testcase := range testcases {
|
||||||
|
renames := [][2]string{}
|
||||||
|
parseCommitRenames(&renames, strings.NewReader(testcase.output))
|
||||||
|
|
||||||
|
assert.Equal(t, testcase.renames, renames)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -4,12 +4,11 @@
|
||||||
package git
|
package git
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cache represents a caching interface
|
// Cache represents a caching interface
|
||||||
|
|
|
@ -291,7 +291,7 @@ func (repo *Repository) CheckAttributeReader(commitID string) (*CheckAttributeRe
|
||||||
}
|
}
|
||||||
|
|
||||||
checker := &CheckAttributeReader{
|
checker := &CheckAttributeReader{
|
||||||
Attributes: []string{"linguist-vendored", "linguist-generated", "linguist-language", "gitlab-language"},
|
Attributes: []string{"linguist-vendored", "linguist-generated", "linguist-language", "gitlab-language", "linguist-documentation", "linguist-detectable"},
|
||||||
Repo: repo,
|
Repo: repo,
|
||||||
IndexFile: indexFilename,
|
IndexFile: indexFilename,
|
||||||
WorkTree: worktree,
|
WorkTree: worktree,
|
||||||
|
|
|
@ -13,6 +13,18 @@ const (
|
||||||
bigFileSize int64 = 1024 * 1024 // 1 MiB
|
bigFileSize int64 = 1024 * 1024 // 1 MiB
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type LinguistBoolAttrib struct {
|
||||||
|
Value string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (attrib *LinguistBoolAttrib) IsTrue() bool {
|
||||||
|
return attrib.Value == "set" || attrib.Value == "true"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (attrib *LinguistBoolAttrib) IsFalse() bool {
|
||||||
|
return attrib.Value == "unset" || attrib.Value == "false"
|
||||||
|
}
|
||||||
|
|
||||||
// mergeLanguageStats mergers language names with different cases. The name with most upper case letters is used.
|
// mergeLanguageStats mergers language names with different cases. The name with most upper case letters is used.
|
||||||
func mergeLanguageStats(stats map[string]int64) map[string]int64 {
|
func mergeLanguageStats(stats map[string]int64) map[string]int64 {
|
||||||
names := map[string]struct {
|
names := map[string]struct {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
//go:build gogit
|
//go:build gogit
|
||||||
|
@ -57,23 +58,25 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
notVendored := false
|
isVendored := LinguistBoolAttrib{}
|
||||||
notGenerated := false
|
isGenerated := LinguistBoolAttrib{}
|
||||||
|
isDocumentation := LinguistBoolAttrib{}
|
||||||
|
isDetectable := LinguistBoolAttrib{}
|
||||||
|
|
||||||
if checker != nil {
|
if checker != nil {
|
||||||
attrs, err := checker.CheckPath(f.Name)
|
attrs, err := checker.CheckPath(f.Name)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if vendored, has := attrs["linguist-vendored"]; has {
|
if vendored, has := attrs["linguist-vendored"]; has {
|
||||||
if vendored == "set" || vendored == "true" {
|
isVendored = LinguistBoolAttrib{Value: vendored}
|
||||||
return nil
|
|
||||||
}
|
|
||||||
notVendored = vendored == "false"
|
|
||||||
}
|
}
|
||||||
if generated, has := attrs["linguist-generated"]; has {
|
if generated, has := attrs["linguist-generated"]; has {
|
||||||
if generated == "set" || generated == "true" {
|
isGenerated = LinguistBoolAttrib{Value: generated}
|
||||||
return nil
|
}
|
||||||
}
|
if documentation, has := attrs["linguist-documentation"]; has {
|
||||||
notGenerated = generated == "false"
|
isDocumentation = LinguistBoolAttrib{Value: documentation}
|
||||||
|
}
|
||||||
|
if detectable, has := attrs["linguist-detectable"]; has {
|
||||||
|
isDetectable = LinguistBoolAttrib{Value: detectable}
|
||||||
}
|
}
|
||||||
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
||||||
// group languages, such as Pug -> HTML; SCSS -> CSS
|
// group languages, such as Pug -> HTML; SCSS -> CSS
|
||||||
|
@ -105,8 +108,11 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!notVendored && analyze.IsVendor(f.Name)) || enry.IsDotFile(f.Name) ||
|
if isDetectable.IsFalse() || isVendored.IsTrue() || isDocumentation.IsTrue() ||
|
||||||
enry.IsDocumentation(f.Name) || enry.IsConfiguration(f.Name) {
|
(!isVendored.IsFalse() && analyze.IsVendor(f.Name)) ||
|
||||||
|
enry.IsDotFile(f.Name) ||
|
||||||
|
enry.IsConfiguration(f.Name) ||
|
||||||
|
(!isDocumentation.IsFalse() && enry.IsDocumentation(f.Name)) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,12 +121,11 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
if f.Size <= bigFileSize {
|
if f.Size <= bigFileSize {
|
||||||
content, _ = readFile(f, fileSizeLimit)
|
content, _ = readFile(f, fileSizeLimit)
|
||||||
}
|
}
|
||||||
if !notGenerated && enry.IsGenerated(f.Name, content) {
|
if !isGenerated.IsTrue() && enry.IsGenerated(f.Name, content) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Use .gitattributes file for linguist overrides
|
// TODO: Use .gitattributes file for linguist overrides
|
||||||
|
|
||||||
language := analyze.GetCodeLanguage(f.Name, content)
|
language := analyze.GetCodeLanguage(f.Name, content)
|
||||||
if language == enry.OtherLanguage || language == "" {
|
if language == enry.OtherLanguage || language == "" {
|
||||||
return nil
|
return nil
|
||||||
|
@ -136,6 +141,13 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
if !checked {
|
if !checked {
|
||||||
langtype := enry.GetLanguageType(language)
|
langtype := enry.GetLanguageType(language)
|
||||||
included = langtype == enry.Programming || langtype == enry.Markup
|
included = langtype == enry.Programming || langtype == enry.Markup
|
||||||
|
if !included {
|
||||||
|
if isDetectable.IsTrue() {
|
||||||
|
included = true
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
includedLanguage[language] = included
|
includedLanguage[language] = included
|
||||||
}
|
}
|
||||||
if included {
|
if included {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
//go:build !gogit
|
//go:build !gogit
|
||||||
|
@ -90,23 +91,25 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
notVendored := false
|
isVendored := LinguistBoolAttrib{}
|
||||||
notGenerated := false
|
isGenerated := LinguistBoolAttrib{}
|
||||||
|
isDocumentation := LinguistBoolAttrib{}
|
||||||
|
isDetectable := LinguistBoolAttrib{}
|
||||||
|
|
||||||
if checker != nil {
|
if checker != nil {
|
||||||
attrs, err := checker.CheckPath(f.Name())
|
attrs, err := checker.CheckPath(f.Name())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if vendored, has := attrs["linguist-vendored"]; has {
|
if vendored, has := attrs["linguist-vendored"]; has {
|
||||||
if vendored == "set" || vendored == "true" {
|
isVendored = LinguistBoolAttrib{Value: vendored}
|
||||||
continue
|
|
||||||
}
|
|
||||||
notVendored = vendored == "false"
|
|
||||||
}
|
}
|
||||||
if generated, has := attrs["linguist-generated"]; has {
|
if generated, has := attrs["linguist-generated"]; has {
|
||||||
if generated == "set" || generated == "true" {
|
isGenerated = LinguistBoolAttrib{Value: generated}
|
||||||
continue
|
}
|
||||||
}
|
if documentation, has := attrs["linguist-documentation"]; has {
|
||||||
notGenerated = generated == "false"
|
isDocumentation = LinguistBoolAttrib{Value: documentation}
|
||||||
|
}
|
||||||
|
if detectable, has := attrs["linguist-detectable"]; has {
|
||||||
|
isDetectable = LinguistBoolAttrib{Value: detectable}
|
||||||
}
|
}
|
||||||
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
||||||
// group languages, such as Pug -> HTML; SCSS -> CSS
|
// group languages, such as Pug -> HTML; SCSS -> CSS
|
||||||
|
@ -139,8 +142,11 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!notVendored && analyze.IsVendor(f.Name())) || enry.IsDotFile(f.Name()) ||
|
if isDetectable.IsFalse() || isVendored.IsTrue() || isDocumentation.IsTrue() ||
|
||||||
enry.IsDocumentation(f.Name()) || enry.IsConfiguration(f.Name()) {
|
(!isVendored.IsFalse() && analyze.IsVendor(f.Name())) ||
|
||||||
|
enry.IsDotFile(f.Name()) ||
|
||||||
|
enry.IsConfiguration(f.Name()) ||
|
||||||
|
(!isDocumentation.IsFalse() && enry.IsDocumentation(f.Name())) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,7 +179,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !notGenerated && enry.IsGenerated(f.Name(), content) {
|
if !isGenerated.IsTrue() && enry.IsGenerated(f.Name(), content) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,6 +200,13 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
||||||
if !checked {
|
if !checked {
|
||||||
langType := enry.GetLanguageType(language)
|
langType := enry.GetLanguageType(language)
|
||||||
included = langType == enry.Programming || langType == enry.Markup
|
included = langType == enry.Programming || langType == enry.Markup
|
||||||
|
if !included {
|
||||||
|
if isDetectable.IsTrue() {
|
||||||
|
included = true
|
||||||
|
} else {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
includedLanguage[language] = included
|
includedLanguage[language] = included
|
||||||
}
|
}
|
||||||
if included {
|
if included {
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
// Copyright 2015 The Gogs Authors. All rights reserved.
|
// Copyright 2015 The Gogs Authors. All rights reserved.
|
||||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
package git
|
package git
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
// GetBlobByPath get the blob object according the path
|
// GetBlobByPath get the blob object according the path
|
||||||
func (t *Tree) GetBlobByPath(relpath string) (*Blob, error) {
|
func (t *Tree) GetBlobByPath(relpath string) (*Blob, error) {
|
||||||
entry, err := t.GetTreeEntryByPath(relpath)
|
entry, err := t.GetTreeEntryByPath(relpath)
|
||||||
|
@ -17,3 +20,21 @@ func (t *Tree) GetBlobByPath(relpath string) (*Blob, error) {
|
||||||
|
|
||||||
return nil, ErrNotExist{"", relpath}
|
return nil, ErrNotExist{"", relpath}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetBlobByFoldedPath returns the blob object at relpath, regardless of the
|
||||||
|
// case of relpath. If there are multiple files with the same case-insensitive
|
||||||
|
// name, the first one found will be returned.
|
||||||
|
func (t *Tree) GetBlobByFoldedPath(relpath string) (*Blob, error) {
|
||||||
|
entries, err := t.ListEntries()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if strings.EqualFold(entry.Name(), relpath) {
|
||||||
|
return t.GetBlobByPath(entry.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, ErrNotExist{"", relpath}
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package lfs
|
package lfs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
"hash"
|
"hash"
|
||||||
|
@ -12,8 +13,6 @@ import (
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/storage"
|
"code.gitea.io/gitea/modules/storage"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
package lfs
|
package lfs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -12,8 +13,6 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
|
@ -29,12 +29,17 @@ func CleanValue(value []byte) []byte {
|
||||||
value = bytes.TrimSpace(value)
|
value = bytes.TrimSpace(value)
|
||||||
rs := bytes.Runes(value)
|
rs := bytes.Runes(value)
|
||||||
result := make([]rune, 0, len(rs))
|
result := make([]rune, 0, len(rs))
|
||||||
|
needsDash := false
|
||||||
for _, r := range rs {
|
for _, r := range rs {
|
||||||
if unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_' || r == '-' {
|
switch {
|
||||||
|
case unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_':
|
||||||
|
if needsDash && len(result) > 0 {
|
||||||
|
result = append(result, '-')
|
||||||
|
}
|
||||||
|
needsDash = false
|
||||||
result = append(result, unicode.ToLower(r))
|
result = append(result, unicode.ToLower(r))
|
||||||
}
|
default:
|
||||||
if unicode.IsSpace(r) {
|
needsDash = true
|
||||||
result = append(result, '-')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return []byte(string(result))
|
return []byte(string(result))
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// Copyright 2023 The Forgejo Authors. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
package common
|
package common
|
||||||
|
|
||||||
|
@ -15,44 +16,45 @@ func TestCleanValue(t *testing.T) {
|
||||||
}{
|
}{
|
||||||
// Github behavior test cases
|
// Github behavior test cases
|
||||||
{"", ""},
|
{"", ""},
|
||||||
{"test(0)", "test0"},
|
{"test.0.1", "test-0-1"},
|
||||||
{"test!1", "test1"},
|
{"test(0)", "test-0"},
|
||||||
{"test:2", "test2"},
|
{"test!1", "test-1"},
|
||||||
{"test*3", "test3"},
|
{"test:2", "test-2"},
|
||||||
{"test!4", "test4"},
|
{"test*3", "test-3"},
|
||||||
{"test:5", "test5"},
|
{"test!4", "test-4"},
|
||||||
{"test*6", "test6"},
|
{"test:5", "test-5"},
|
||||||
{"test:6 a", "test6-a"},
|
{"test*6", "test-6"},
|
||||||
{"test:6 !b", "test6-b"},
|
{"test:6 a", "test-6-a"},
|
||||||
{"test:ad # df", "testad--df"},
|
{"test:6 !b", "test-6-b"},
|
||||||
{"test:ad #23 df 2*/*", "testad-23-df-2"},
|
{"test:ad # df", "test-ad-df"},
|
||||||
{"test:ad 23 df 2*/*", "testad-23-df-2"},
|
{"test:ad #23 df 2*/*", "test-ad-23-df-2"},
|
||||||
{"test:ad # 23 df 2*/*", "testad--23-df-2"},
|
{"test:ad 23 df 2*/*", "test-ad-23-df-2"},
|
||||||
|
{"test:ad # 23 df 2*/*", "test-ad-23-df-2"},
|
||||||
{"Anchors in Markdown", "anchors-in-markdown"},
|
{"Anchors in Markdown", "anchors-in-markdown"},
|
||||||
{"a_b_c", "a_b_c"},
|
{"a_b_c", "a_b_c"},
|
||||||
{"a-b-c", "a-b-c"},
|
{"a-b-c", "a-b-c"},
|
||||||
{"a-b-c----", "a-b-c----"},
|
{"a-b-c----", "a-b-c"},
|
||||||
{"test:6a", "test6a"},
|
{"test:6a", "test-6a"},
|
||||||
{"test:a6", "testa6"},
|
{"test:a6", "test-a6"},
|
||||||
{"tes a a a a", "tes-a-a---a--a"},
|
{"tes a a a a", "tes-a-a-a-a"},
|
||||||
{" tes a a a a ", "tes-a-a---a--a"},
|
{" tes a a a a ", "tes-a-a-a-a"},
|
||||||
{"Header with \"double quotes\"", "header-with-double-quotes"},
|
{"Header with \"double quotes\"", "header-with-double-quotes"},
|
||||||
{"Placeholder to force scrolling on link's click", "placeholder-to-force-scrolling-on-links-click"},
|
{"Placeholder to force scrolling on link's click", "placeholder-to-force-scrolling-on-link-s-click"},
|
||||||
{"tes()", "tes"},
|
{"tes()", "tes"},
|
||||||
{"tes(0)", "tes0"},
|
{"tes(0)", "tes-0"},
|
||||||
{"tes{0}", "tes0"},
|
{"tes{0}", "tes-0"},
|
||||||
{"tes[0]", "tes0"},
|
{"tes[0]", "tes-0"},
|
||||||
{"test【0】", "test0"},
|
{"test【0】", "test-0"},
|
||||||
{"tes…@a", "tesa"},
|
{"tes…@a", "tes-a"},
|
||||||
{"tes¥& a", "tes-a"},
|
{"tes¥& a", "tes-a"},
|
||||||
{"tes= a", "tes-a"},
|
{"tes= a", "tes-a"},
|
||||||
{"tes|a", "tesa"},
|
{"tes|a", "tes-a"},
|
||||||
{"tes\\a", "tesa"},
|
{"tes\\a", "tes-a"},
|
||||||
{"tes/a", "tesa"},
|
{"tes/a", "tes-a"},
|
||||||
{"a啊啊b", "a啊啊b"},
|
{"a啊啊b", "a啊啊b"},
|
||||||
{"c🤔️🤔️d", "cd"},
|
{"c🤔️🤔️d", "c-d"},
|
||||||
{"a⚡a", "aa"},
|
{"a⚡a", "a-a"},
|
||||||
{"e.~f", "ef"},
|
{"e.~f", "e-f"},
|
||||||
}
|
}
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
assert.Equal(t, []byte(test.expect), CleanValue([]byte(test.param)), test.param)
|
assert.Equal(t, []byte(test.expect), CleanValue([]byte(test.param)), test.param)
|
||||||
|
|
|
@ -524,6 +524,18 @@ func TestMathBlock(t *testing.T) {
|
||||||
"$$a$$",
|
"$$a$$",
|
||||||
`<pre class="code-block is-loading"><code class="chroma language-math display">a</code></pre>` + nl,
|
`<pre class="code-block is-loading"><code class="chroma language-math display">a</code></pre>` + nl,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`\[a b\]`,
|
||||||
|
`<pre class="code-block is-loading"><code class="chroma language-math display">a b</code></pre>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`\[a b]`,
|
||||||
|
`<p>[a b]</p>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`$$a`,
|
||||||
|
`<p>$$a</p>` + nl,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range testcases {
|
for _, test := range testcases {
|
||||||
|
@ -534,6 +546,204 @@ func TestMathBlock(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFootnote(t *testing.T) {
|
||||||
|
testcases := []struct {
|
||||||
|
testcase string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
`Citation needed[^0].
|
||||||
|
[^0]: Source`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup>.</p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-0">
|
||||||
|
<p>Source <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]`,
|
||||||
|
`<p>Citation needed[^0]</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^1], Citation needed twice[^3]
|
||||||
|
[^3]: Source`,
|
||||||
|
`<p>Citation needed[^1], Citation needed twice<sup id="fnref:user-content-3"><a href="#fn:user-content-3" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-3">
|
||||||
|
<p>Source <a href="#fnref:user-content-3" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^1]: Source`,
|
||||||
|
`<p>Citation needed[^0]</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^0]: Source 1
|
||||||
|
[^0]: Source 2`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-0">
|
||||||
|
<p>Source 1 <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed![^0]
|
||||||
|
[^0]: Source`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-0">
|
||||||
|
<p>Source <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Trigger [^`,
|
||||||
|
`<p>Trigger [^</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Trigger 2 [^0`,
|
||||||
|
`<p>Trigger 2 [^0</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^0]: Source with citation needed[^1]
|
||||||
|
[^1]: Source`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-0">
|
||||||
|
<p>Source with citation needed<sup id="fnref:user-content-1"><a href="#fn:user-content-1" rel="nofollow">2</a></sup> <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
<li id="fn:user-content-1">
|
||||||
|
<p>Source <a href="#fnref:user-content-1" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^#]
|
||||||
|
[^#]: Source`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-1"><a href="#fn:user-content-1" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-1">
|
||||||
|
<p>Source <a href="#fnref:user-content-1" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^0]: Source`,
|
||||||
|
`<p>Citation needed[^0]<br/>
|
||||||
|
[^0]: Source</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`[^0]: Source
|
||||||
|
|
||||||
|
Citation needed[^0].`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup>.</p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-0">
|
||||||
|
<p>Source <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^]
|
||||||
|
[^]: Source`,
|
||||||
|
`<p>Citation needed[^]<br/>
|
||||||
|
[^]: Source</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^0] Source`,
|
||||||
|
`<p>Citation needed[^0]<br/>
|
||||||
|
[^0] Source</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^0 Source`,
|
||||||
|
`<p>Citation needed[^0]<br/>
|
||||||
|
[^0 Source</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0] [^0]: Source`,
|
||||||
|
`<p>Citation needed[^0] [^0]: Source</p>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^Source here 0 # 9-3]
|
||||||
|
[^Source here 0 # 9-3]: Source`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-source-here-0-9-3"><a href="#fn:user-content-source-here-0-9-3" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-source-here-0-9-3">
|
||||||
|
<p>Source <a href="#fnref:user-content-source-here-0-9-3" rel="nofollow">↩︎</a></p>
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`Citation needed[^0]
|
||||||
|
[^0]:`,
|
||||||
|
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||||
|
<div>
|
||||||
|
<hr/>
|
||||||
|
<ol>
|
||||||
|
<li id="fn:user-content-0">
|
||||||
|
<a href="#fnref:user-content-0" rel="nofollow">↩︎</a></li>
|
||||||
|
</ol>
|
||||||
|
</div>
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range testcases {
|
||||||
|
res, err := markdown.RenderString(&markup.RenderContext{Ctx: git.DefaultContext}, test.testcase)
|
||||||
|
assert.NoError(t, err, "Unexpected error in testcase: %q", test.testcase)
|
||||||
|
assert.Equal(t, test.expected, res, "Unexpected result in testcase %q", test.testcase)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestTaskList(t *testing.T) {
|
func TestTaskList(t *testing.T) {
|
||||||
testcases := []struct {
|
testcases := []struct {
|
||||||
testcase string
|
testcase string
|
||||||
|
|
|
@ -55,10 +55,7 @@ func (b *blockParser) Open(parent ast.Node, reader text.Reader, pc parser.Contex
|
||||||
return node, parser.Close | parser.NoChildren
|
return node, parser.Close | parser.NoChildren
|
||||||
}
|
}
|
||||||
|
|
||||||
reader.Advance(segment.Len() - 1)
|
return nil, parser.NoChildren
|
||||||
segment.Start += 2
|
|
||||||
node.Lines().Append(segment)
|
|
||||||
return node, parser.NoChildren
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Continue parses the current line and returns a result of parsing.
|
// Continue parses the current line and returns a result of parsing.
|
||||||
|
|
|
@ -135,7 +135,12 @@ type Writer struct {
|
||||||
|
|
||||||
const mailto = "mailto:"
|
const mailto = "mailto:"
|
||||||
|
|
||||||
func (r *Writer) resolveLink(l org.RegularLink) string {
|
func (r *Writer) resolveLink(node org.Node) string {
|
||||||
|
l, ok := node.(org.RegularLink)
|
||||||
|
if !ok {
|
||||||
|
l = org.RegularLink{URL: strings.TrimPrefix(org.String(node), "file:")}
|
||||||
|
}
|
||||||
|
|
||||||
link := html.EscapeString(l.URL)
|
link := html.EscapeString(l.URL)
|
||||||
if l.Protocol == "file" {
|
if l.Protocol == "file" {
|
||||||
link = link[len("file:"):]
|
link = link[len("file:"):]
|
||||||
|
@ -162,14 +167,14 @@ func (r *Writer) WriteRegularLink(l org.RegularLink) {
|
||||||
if l.Description == nil {
|
if l.Description == nil {
|
||||||
fmt.Fprintf(r, `<img src="%s" alt="%s" />`, link, link)
|
fmt.Fprintf(r, `<img src="%s" alt="%s" />`, link, link)
|
||||||
} else {
|
} else {
|
||||||
imageSrc := r.resolveLink(l.Description[0].(org.RegularLink))
|
imageSrc := r.resolveLink(l.Description[0])
|
||||||
fmt.Fprintf(r, `<a href="%s"><img src="%s" alt="%s" /></a>`, link, imageSrc, imageSrc)
|
fmt.Fprintf(r, `<a href="%s"><img src="%s" alt="%s" /></a>`, link, imageSrc, imageSrc)
|
||||||
}
|
}
|
||||||
case "video":
|
case "video":
|
||||||
if l.Description == nil {
|
if l.Description == nil {
|
||||||
fmt.Fprintf(r, `<video src="%s">%s</video>`, link, link)
|
fmt.Fprintf(r, `<video src="%s">%s</video>`, link, link)
|
||||||
} else {
|
} else {
|
||||||
videoSrc := r.resolveLink(l.Description[0].(org.RegularLink))
|
videoSrc := r.resolveLink(l.Description[0])
|
||||||
fmt.Fprintf(r, `<a href="%s"><video src="%s">%s</video></a>`, link, videoSrc, videoSrc)
|
fmt.Fprintf(r, `<a href="%s"><video src="%s">%s</video></a>`, link, videoSrc, videoSrc)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -80,6 +80,12 @@ func TestRender_Media(t *testing.T) {
|
||||||
`<p><img src="https://example.com/example.svg" alt="https://example.com/example.svg" /></p>`)
|
`<p><img src="https://example.com/example.svg" alt="https://example.com/example.svg" /></p>`)
|
||||||
test("[[https://example.com/example.mp4]]",
|
test("[[https://example.com/example.mp4]]",
|
||||||
`<p><video src="https://example.com/example.mp4">https://example.com/example.mp4</video></p>`)
|
`<p><video src="https://example.com/example.mp4">https://example.com/example.mp4</video></p>`)
|
||||||
|
|
||||||
|
// Text description.
|
||||||
|
test("[[file:./lem-post.png][file:./lem-post.png]]",
|
||||||
|
`<p><a href="http://localhost:3000/gogits/gogs/lem-post.png"><img src="http://localhost:3000/gogits/gogs/lem-post.png" alt="http://localhost:3000/gogits/gogs/lem-post.png" /></a></p>`)
|
||||||
|
test("[[file:./lem-post.mp4][file:./lem-post.mp4]]",
|
||||||
|
`<p><a href="http://localhost:3000/gogits/gogs/lem-post.mp4"><video src="http://localhost:3000/gogits/gogs/lem-post.mp4">http://localhost:3000/gogits/gogs/lem-post.mp4</video></a></p>`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_Source(t *testing.T) {
|
func TestRender_Source(t *testing.T) {
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
package repository
|
package repository
|
||||||
|
@ -99,7 +100,6 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
|
||||||
Mirror: true,
|
Mirror: true,
|
||||||
Quiet: true,
|
Quiet: true,
|
||||||
Timeout: migrateTimeout,
|
Timeout: migrateTimeout,
|
||||||
Branch: "master",
|
|
||||||
SkipTLSVerify: setting.Migrations.SkipTLSVerify,
|
SkipTLSVerify: setting.Migrations.SkipTLSVerify,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Warn("Clone wiki: %v", err)
|
log.Warn("Clone wiki: %v", err)
|
||||||
|
@ -107,6 +107,30 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
|
||||||
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
// Figure out the branch of the wiki we just cloned. We assume
|
||||||
|
// that the default branch is to be used, and we'll use the same
|
||||||
|
// name as the source.
|
||||||
|
gitRepo, err := git.OpenRepository(ctx, wikiPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("Failed to open wiki repository during migration: %v", err)
|
||||||
|
if err := util.RemoveAll(wikiPath); err != nil {
|
||||||
|
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
||||||
|
}
|
||||||
|
return repo, err
|
||||||
|
}
|
||||||
|
defer gitRepo.Close()
|
||||||
|
|
||||||
|
branch, err := gitRepo.GetDefaultBranch()
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("Failed to get the default branch of a migrated wiki repo: %v", err)
|
||||||
|
if err := util.RemoveAll(wikiPath); err != nil {
|
||||||
|
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return repo, err
|
||||||
|
}
|
||||||
|
repo.WikiBranch = branch
|
||||||
|
|
||||||
if err := git.WriteCommitGraph(ctx, wikiPath); err != nil {
|
if err := git.WriteCommitGraph(ctx, wikiPath); err != nil {
|
||||||
return repo, err
|
return repo, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,13 +7,12 @@ import (
|
||||||
"crypto/aes"
|
"crypto/aes"
|
||||||
"crypto/cipher"
|
"crypto/cipher"
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
|
"crypto/sha256"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// AesEncrypt encrypts text and given key with AES.
|
// AesEncrypt encrypts text and given key with AES.
|
||||||
|
|
|
@ -5,8 +5,9 @@ package setting
|
||||||
|
|
||||||
// Admin settings
|
// Admin settings
|
||||||
var Admin struct {
|
var Admin struct {
|
||||||
DisableRegularOrgCreation bool
|
DisableRegularOrgCreation bool
|
||||||
DefaultEmailNotification string
|
DefaultEmailNotification string
|
||||||
|
SendNotificationEmailOnNewUser bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadAdminFrom(rootCfg ConfigProvider) {
|
func loadAdminFrom(rootCfg ConfigProvider) {
|
||||||
|
|
24
modules/setting/badges.go
Normal file
24
modules/setting/badges.go
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package setting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Badges settings
|
||||||
|
var Badges = struct {
|
||||||
|
Enabled bool `ini:"ENABLED"`
|
||||||
|
GeneratorURLTemplate string `ini:"GENERATOR_URL_TEMPLATE"`
|
||||||
|
GeneratorURLTemplateTemplate *template.Template `ini:"-"`
|
||||||
|
}{
|
||||||
|
Enabled: true,
|
||||||
|
GeneratorURLTemplate: "https://img.shields.io/badge/{{.label}}-{{.text}}-{{.color}}",
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadBadgesFrom(rootCfg ConfigProvider) {
|
||||||
|
mustMapSetting(rootCfg, "badges", &Badges)
|
||||||
|
|
||||||
|
Badges.GeneratorURLTemplateTemplate = template.Must(template.New("").Parse(Badges.GeneratorURLTemplate))
|
||||||
|
}
|
|
@ -25,26 +25,27 @@ var (
|
||||||
|
|
||||||
// Database holds the database settings
|
// Database holds the database settings
|
||||||
Database = struct {
|
Database = struct {
|
||||||
Type DatabaseType
|
Type DatabaseType
|
||||||
Host string
|
Host string
|
||||||
Name string
|
Name string
|
||||||
User string
|
User string
|
||||||
Passwd string
|
Passwd string
|
||||||
Schema string
|
Schema string
|
||||||
SSLMode string
|
SSLMode string
|
||||||
Path string
|
Path string
|
||||||
LogSQL bool
|
LogSQL bool
|
||||||
MysqlCharset string
|
MysqlCharset string
|
||||||
CharsetCollation string
|
CharsetCollation string
|
||||||
Timeout int // seconds
|
Timeout int // seconds
|
||||||
SQLiteJournalMode string
|
SQLiteJournalMode string
|
||||||
DBConnectRetries int
|
DBConnectRetries int
|
||||||
DBConnectBackoff time.Duration
|
DBConnectBackoff time.Duration
|
||||||
MaxIdleConns int
|
MaxIdleConns int
|
||||||
MaxOpenConns int
|
MaxOpenConns int
|
||||||
ConnMaxLifetime time.Duration
|
ConnMaxLifetime time.Duration
|
||||||
IterateBufferSize int
|
IterateBufferSize int
|
||||||
AutoMigration bool
|
AutoMigration bool
|
||||||
|
SlowQueryThreshold time.Duration
|
||||||
}{
|
}{
|
||||||
Timeout: 500,
|
Timeout: 500,
|
||||||
IterateBufferSize: 50,
|
IterateBufferSize: 50,
|
||||||
|
@ -87,6 +88,13 @@ func loadDBSetting(rootCfg ConfigProvider) {
|
||||||
Database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10)
|
Database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10)
|
||||||
Database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second)
|
Database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second)
|
||||||
Database.AutoMigration = sec.Key("AUTO_MIGRATION").MustBool(true)
|
Database.AutoMigration = sec.Key("AUTO_MIGRATION").MustBool(true)
|
||||||
|
|
||||||
|
deprecatedSetting(rootCfg, "database", "SLOW_QUERY_TRESHOLD", "database", "SLOW_QUERY_THRESHOLD", "1.23")
|
||||||
|
if sec.HasKey("SLOW_QUERY_TRESHOLD") && !sec.HasKey("SLOW_QUERY_THRESHOLD") {
|
||||||
|
Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_TRESHOLD").MustDuration(5 * time.Second)
|
||||||
|
} else {
|
||||||
|
Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_THRESHOLD").MustDuration(5 * time.Second)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DBConnStr returns database connection string
|
// DBConnStr returns database connection string
|
||||||
|
|
|
@ -64,7 +64,7 @@ func loadLFSFrom(rootCfg ConfigProvider) error {
|
||||||
LFS.JWTSecretBase64 = loadSecret(rootCfg.Section("server"), "LFS_JWT_SECRET_URI", "LFS_JWT_SECRET")
|
LFS.JWTSecretBase64 = loadSecret(rootCfg.Section("server"), "LFS_JWT_SECRET_URI", "LFS_JWT_SECRET")
|
||||||
LFS.JWTSecretBytes, err = util.Base64FixedDecode(base64.RawURLEncoding, []byte(LFS.JWTSecretBase64), 32)
|
LFS.JWTSecretBytes, err = util.Base64FixedDecode(base64.RawURLEncoding, []byte(LFS.JWTSecretBase64), 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
LFS.JWTSecretBytes, LFS.JWTSecretBase64, err = generate.NewJwtSecretBase64()
|
LFS.JWTSecretBytes, LFS.JWTSecretBase64, err = generate.NewJwtSecret()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error generating JWT Secret for custom config: %v", err)
|
return fmt.Errorf("error generating JWT Secret for custom config: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -138,12 +138,11 @@ func loadOAuth2From(rootCfg ConfigProvider) {
|
||||||
|
|
||||||
if InstallLock {
|
if InstallLock {
|
||||||
if _, err := util.Base64FixedDecode(base64.RawURLEncoding, []byte(OAuth2.JWTSecretBase64), 32); err != nil {
|
if _, err := util.Base64FixedDecode(base64.RawURLEncoding, []byte(OAuth2.JWTSecretBase64), 32); err != nil {
|
||||||
key, err := generate.NewJwtSecret()
|
_, OAuth2.JWTSecretBase64, err = generate.NewJwtSecret()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal("error generating JWT secret: %v", err)
|
log.Fatal("error generating JWT secret: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
OAuth2.JWTSecretBase64 = base64.RawURLEncoding.EncodeToString(key)
|
|
||||||
saveCfg, err := rootCfg.PrepareSaving()
|
saveCfg, err := rootCfg.PrepareSaving()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal("save oauth2.JWT_SECRET failed: %v", err)
|
log.Fatal("save oauth2.JWT_SECRET failed: %v", err)
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
@ -19,6 +20,8 @@ const (
|
||||||
RepoCreatingPublic = "public"
|
RepoCreatingPublic = "public"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var RecognisedRepositoryDownloadOrCloneMethods = []string{"download-zip", "download-targz", "download-bundle", "vscode-clone", "vscodium-clone", "cite"}
|
||||||
|
|
||||||
// ItemsPerPage maximum items per page in forks, watchers and stars of a repo
|
// ItemsPerPage maximum items per page in forks, watchers and stars of a repo
|
||||||
const ItemsPerPage = 40
|
const ItemsPerPage = 40
|
||||||
|
|
||||||
|
@ -43,6 +46,7 @@ var (
|
||||||
DisabledRepoUnits []string
|
DisabledRepoUnits []string
|
||||||
DefaultRepoUnits []string
|
DefaultRepoUnits []string
|
||||||
DefaultForkRepoUnits []string
|
DefaultForkRepoUnits []string
|
||||||
|
DownloadOrCloneMethods []string
|
||||||
PrefixArchiveFiles bool
|
PrefixArchiveFiles bool
|
||||||
DisableMigrations bool
|
DisableMigrations bool
|
||||||
DisableStars bool `ini:"DISABLE_STARS"`
|
DisableStars bool `ini:"DISABLE_STARS"`
|
||||||
|
@ -109,6 +113,9 @@ var (
|
||||||
Wiki []string
|
Wiki []string
|
||||||
DefaultTrustModel string
|
DefaultTrustModel string
|
||||||
} `ini:"repository.signing"`
|
} `ini:"repository.signing"`
|
||||||
|
|
||||||
|
SettableFlags []string
|
||||||
|
EnableFlags bool
|
||||||
}{
|
}{
|
||||||
DetectedCharsetsOrder: []string{
|
DetectedCharsetsOrder: []string{
|
||||||
"UTF-8",
|
"UTF-8",
|
||||||
|
@ -151,7 +158,7 @@ var (
|
||||||
DefaultPrivate: RepoCreatingLastUserVisibility,
|
DefaultPrivate: RepoCreatingLastUserVisibility,
|
||||||
DefaultPushCreatePrivate: true,
|
DefaultPushCreatePrivate: true,
|
||||||
MaxCreationLimit: -1,
|
MaxCreationLimit: -1,
|
||||||
PreferredLicenses: []string{"Apache License 2.0", "MIT License"},
|
PreferredLicenses: []string{"Apache-2.0", "MIT"},
|
||||||
DisableHTTPGit: false,
|
DisableHTTPGit: false,
|
||||||
AccessControlAllowOrigin: "",
|
AccessControlAllowOrigin: "",
|
||||||
UseCompatSSHURI: false,
|
UseCompatSSHURI: false,
|
||||||
|
@ -161,6 +168,7 @@ var (
|
||||||
DisabledRepoUnits: []string{},
|
DisabledRepoUnits: []string{},
|
||||||
DefaultRepoUnits: []string{},
|
DefaultRepoUnits: []string{},
|
||||||
DefaultForkRepoUnits: []string{},
|
DefaultForkRepoUnits: []string{},
|
||||||
|
DownloadOrCloneMethods: []string{"download-zip", "download-targz", "download-bundle", "vscode-clone"},
|
||||||
PrefixArchiveFiles: true,
|
PrefixArchiveFiles: true,
|
||||||
DisableMigrations: false,
|
DisableMigrations: false,
|
||||||
DisableStars: false,
|
DisableStars: false,
|
||||||
|
@ -265,6 +273,8 @@ var (
|
||||||
Wiki: []string{"never"},
|
Wiki: []string{"never"},
|
||||||
DefaultTrustModel: "collaborator",
|
DefaultTrustModel: "collaborator",
|
||||||
},
|
},
|
||||||
|
|
||||||
|
EnableFlags: false,
|
||||||
}
|
}
|
||||||
RepoRootPath string
|
RepoRootPath string
|
||||||
ScriptType = "bash"
|
ScriptType = "bash"
|
||||||
|
@ -361,4 +371,12 @@ func loadRepositoryFrom(rootCfg ConfigProvider) {
|
||||||
if err := loadRepoArchiveFrom(rootCfg); err != nil {
|
if err := loadRepoArchiveFrom(rootCfg); err != nil {
|
||||||
log.Fatal("loadRepoArchiveFrom: %v", err)
|
log.Fatal("loadRepoArchiveFrom: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, method := range Repository.DownloadOrCloneMethods {
|
||||||
|
if !slices.Contains(RecognisedRepositoryDownloadOrCloneMethods, method) {
|
||||||
|
log.Error("Unrecognised repository download or clone method: %s", method)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Repository.EnableFlags = sec.Key("ENABLE_FLAGS").MustBool()
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,6 +68,7 @@ var Service = struct {
|
||||||
DefaultKeepEmailPrivate bool
|
DefaultKeepEmailPrivate bool
|
||||||
DefaultAllowCreateOrganization bool
|
DefaultAllowCreateOrganization bool
|
||||||
DefaultUserIsRestricted bool
|
DefaultUserIsRestricted bool
|
||||||
|
AllowDotsInUsernames bool
|
||||||
EnableTimetracking bool
|
EnableTimetracking bool
|
||||||
DefaultEnableTimetracking bool
|
DefaultEnableTimetracking bool
|
||||||
DefaultEnableDependencies bool
|
DefaultEnableDependencies bool
|
||||||
|
@ -180,6 +181,7 @@ func loadServiceFrom(rootCfg ConfigProvider) {
|
||||||
Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
|
Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
|
||||||
Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
|
Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
|
||||||
Service.DefaultUserIsRestricted = sec.Key("DEFAULT_USER_IS_RESTRICTED").MustBool(false)
|
Service.DefaultUserIsRestricted = sec.Key("DEFAULT_USER_IS_RESTRICTED").MustBool(false)
|
||||||
|
Service.AllowDotsInUsernames = sec.Key("ALLOW_DOTS_IN_USERNAMES").MustBool(true)
|
||||||
Service.EnableTimetracking = sec.Key("ENABLE_TIMETRACKING").MustBool(true)
|
Service.EnableTimetracking = sec.Key("ENABLE_TIMETRACKING").MustBool(true)
|
||||||
if Service.EnableTimetracking {
|
if Service.EnableTimetracking {
|
||||||
Service.DefaultEnableTimetracking = sec.Key("DEFAULT_ENABLE_TIMETRACKING").MustBool(true)
|
Service.DefaultEnableTimetracking = sec.Key("DEFAULT_ENABLE_TIMETRACKING").MustBool(true)
|
||||||
|
|
|
@ -147,6 +147,7 @@ func loadCommonSettingsFrom(cfg ConfigProvider) error {
|
||||||
loadUIFrom(cfg)
|
loadUIFrom(cfg)
|
||||||
loadAdminFrom(cfg)
|
loadAdminFrom(cfg)
|
||||||
loadAPIFrom(cfg)
|
loadAPIFrom(cfg)
|
||||||
|
loadBadgesFrom(cfg)
|
||||||
loadMetricsFrom(cfg)
|
loadMetricsFrom(cfg)
|
||||||
loadCamoFrom(cfg)
|
loadCamoFrom(cfg)
|
||||||
loadI18nFrom(cfg)
|
loadI18nFrom(cfg)
|
||||||
|
|
|
@ -402,6 +402,16 @@ func (p *PullRequestPayload) JSONPayload() ([]byte, error) {
|
||||||
return json.MarshalIndent(p, "", " ")
|
return json.MarshalIndent(p, "", " ")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type HookScheduleAction string
|
||||||
|
|
||||||
|
const (
|
||||||
|
HookScheduleCreated HookScheduleAction = "schedule"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SchedulePayload struct {
|
||||||
|
Action HookScheduleAction `json:"action"`
|
||||||
|
}
|
||||||
|
|
||||||
// ReviewPayload FIXME
|
// ReviewPayload FIXME
|
||||||
type ReviewPayload struct {
|
type ReviewPayload struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
|
|
|
@ -89,6 +89,9 @@ type CreatePullReviewComment struct {
|
||||||
NewLineNum int64 `json:"new_position"`
|
NewLineNum int64 `json:"new_position"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreatePullReviewCommentOptions are options to create a pull review comment
|
||||||
|
type CreatePullReviewCommentOptions CreatePullReviewComment
|
||||||
|
|
||||||
// SubmitPullReviewOptions are options to submit a pending pull review
|
// SubmitPullReviewOptions are options to submit a pending pull review
|
||||||
type SubmitPullReviewOptions struct {
|
type SubmitPullReviewOptions struct {
|
||||||
Event ReviewStateType `json:"event"`
|
Event ReviewStateType `json:"event"`
|
||||||
|
|
|
@ -88,6 +88,7 @@ type Repository struct {
|
||||||
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
|
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
|
||||||
HasWiki bool `json:"has_wiki"`
|
HasWiki bool `json:"has_wiki"`
|
||||||
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
||||||
|
WikiBranch string `json:"wiki_branch,omitempty"`
|
||||||
HasPullRequests bool `json:"has_pull_requests"`
|
HasPullRequests bool `json:"has_pull_requests"`
|
||||||
HasProjects bool `json:"has_projects"`
|
HasProjects bool `json:"has_projects"`
|
||||||
HasReleases bool `json:"has_releases"`
|
HasReleases bool `json:"has_releases"`
|
||||||
|
@ -175,6 +176,8 @@ type EditRepoOption struct {
|
||||||
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
||||||
// sets the default branch for this repository.
|
// sets the default branch for this repository.
|
||||||
DefaultBranch *string `json:"default_branch,omitempty"`
|
DefaultBranch *string `json:"default_branch,omitempty"`
|
||||||
|
// sets the branch used for this repository's wiki.
|
||||||
|
WikiBranch *string `json:"wiki_branch,omitempty"`
|
||||||
// either `true` to allow pull requests, or `false` to prevent pull request.
|
// either `true` to allow pull requests, or `false` to prevent pull request.
|
||||||
HasPullRequests *bool `json:"has_pull_requests,omitempty"`
|
HasPullRequests *bool `json:"has_pull_requests,omitempty"`
|
||||||
// either `true` to enable project unit, or `false` to disable them.
|
// either `true` to enable project unit, or `false` to disable them.
|
||||||
|
|
9
modules/structs/repo_flags.go
Normal file
9
modules/structs/repo_flags.go
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package structs
|
||||||
|
|
||||||
|
// ReplaceFlagsOption options when replacing the flags of a repository
|
||||||
|
type ReplaceFlagsOption struct {
|
||||||
|
Flags []string `json:"flags"`
|
||||||
|
}
|
|
@ -96,6 +96,9 @@ func NewFuncMap() template.FuncMap {
|
||||||
"AppDomain": func() string { // documented in mail-templates.md
|
"AppDomain": func() string { // documented in mail-templates.md
|
||||||
return setting.Domain
|
return setting.Domain
|
||||||
},
|
},
|
||||||
|
"RepoFlagsEnabled": func() bool {
|
||||||
|
return setting.Repository.EnableFlags
|
||||||
|
},
|
||||||
"AssetVersion": func() string {
|
"AssetVersion": func() string {
|
||||||
return setting.AssetVersion
|
return setting.AssetVersion
|
||||||
},
|
},
|
||||||
|
|
|
@ -55,13 +55,15 @@ func (lc *LogChecker) checkLogEvent(event *log.EventFormatted) {
|
||||||
|
|
||||||
var checkerIndex int64
|
var checkerIndex int64
|
||||||
|
|
||||||
func NewLogChecker(namePrefix string) (logChecker *LogChecker, cancel func()) {
|
func NewLogChecker(namePrefix string, level log.Level) (logChecker *LogChecker, cancel func()) {
|
||||||
logger := log.GetManager().GetLogger(namePrefix)
|
logger := log.GetManager().GetLogger(namePrefix)
|
||||||
newCheckerIndex := atomic.AddInt64(&checkerIndex, 1)
|
newCheckerIndex := atomic.AddInt64(&checkerIndex, 1)
|
||||||
writerName := namePrefix + "-" + fmt.Sprint(newCheckerIndex)
|
writerName := namePrefix + "-" + fmt.Sprint(newCheckerIndex)
|
||||||
|
|
||||||
lc := &LogChecker{}
|
lc := &LogChecker{}
|
||||||
lc.EventWriterBaseImpl = log.NewEventWriterBase(writerName, "test-log-checker", log.WriterMode{})
|
lc.EventWriterBaseImpl = log.NewEventWriterBase(writerName, "test-log-checker", log.WriterMode{
|
||||||
|
Level: level,
|
||||||
|
})
|
||||||
logger.AddWriters(lc)
|
logger.AddWriters(lc)
|
||||||
return lc, func() { _ = logger.RemoveWriter(writerName) }
|
return lc, func() { _ = logger.RemoveWriter(writerName) }
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,8 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestLogChecker(t *testing.T) {
|
func TestLogCheckerInfo(t *testing.T) {
|
||||||
lc, cleanup := NewLogChecker(log.DEFAULT)
|
lc, cleanup := NewLogChecker(log.DEFAULT, log.INFO)
|
||||||
defer cleanup()
|
defer cleanup()
|
||||||
|
|
||||||
lc.Filter("First", "Third").StopMark("End")
|
lc.Filter("First", "Third").StopMark("End")
|
||||||
|
@ -24,11 +24,13 @@ func TestLogChecker(t *testing.T) {
|
||||||
assert.False(t, stopped)
|
assert.False(t, stopped)
|
||||||
|
|
||||||
log.Info("First")
|
log.Info("First")
|
||||||
|
log.Debug("Third")
|
||||||
filtered, stopped = lc.Check(100 * time.Millisecond)
|
filtered, stopped = lc.Check(100 * time.Millisecond)
|
||||||
assert.ElementsMatch(t, []bool{true, false}, filtered)
|
assert.ElementsMatch(t, []bool{true, false}, filtered)
|
||||||
assert.False(t, stopped)
|
assert.False(t, stopped)
|
||||||
|
|
||||||
log.Info("Second")
|
log.Info("Second")
|
||||||
|
log.Debug("Third")
|
||||||
filtered, stopped = lc.Check(100 * time.Millisecond)
|
filtered, stopped = lc.Check(100 * time.Millisecond)
|
||||||
assert.ElementsMatch(t, []bool{true, false}, filtered)
|
assert.ElementsMatch(t, []bool{true, false}, filtered)
|
||||||
assert.False(t, stopped)
|
assert.False(t, stopped)
|
||||||
|
@ -43,3 +45,14 @@ func TestLogChecker(t *testing.T) {
|
||||||
assert.ElementsMatch(t, []bool{true, true}, filtered)
|
assert.ElementsMatch(t, []bool{true, true}, filtered)
|
||||||
assert.True(t, stopped)
|
assert.True(t, stopped)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestLogCheckerDebug(t *testing.T) {
|
||||||
|
lc, cleanup := NewLogChecker(log.DEFAULT, log.DEBUG)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
|
lc.StopMark("End")
|
||||||
|
|
||||||
|
log.Debug("End")
|
||||||
|
_, stopped := lc.Check(100 * time.Millisecond)
|
||||||
|
assert.True(t, stopped)
|
||||||
|
}
|
||||||
|
|
|
@ -7,10 +7,9 @@ import (
|
||||||
"crypto"
|
"crypto"
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"crypto/rsa"
|
"crypto/rsa"
|
||||||
|
"crypto/sha256"
|
||||||
"crypto/x509"
|
"crypto/x509"
|
||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
|
|
||||||
"github.com/minio/sha256-simd"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// GenerateKeyPair generates a public and private keypair
|
// GenerateKeyPair generates a public and private keypair
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue