Merge remote-tracking branch 'origin/main' into forgejo-federation

This commit is contained in:
Anthony Wang 2023-02-11 23:52:36 +00:00
commit e61e9fba59
No known key found for this signature in database
GPG Key ID: 42A5B952E6DD8D38
183 changed files with 4808 additions and 1281 deletions

View File

@ -763,10 +763,16 @@ steps:
image: woodpeckerci/plugin-s3:latest
pull: always
settings:
acl: public-read
bucket: gitea-artifacts
endpoint: https://ams3.digitaloceanspaces.com
path_style: true
acl:
from_secret: aws_s3_acl
region:
from_secret: aws_s3_region
bucket:
from_secret: aws_s3_bucket
endpoint:
from_secret: aws_s3_endpoint
path_style:
from_secret: aws_s3_path_style
source: "dist/release/*"
strip_prefix: dist/release/
target: "/gitea/${DRONE_BRANCH##release/v}"
@ -784,10 +790,16 @@ steps:
- name: release-main
image: woodpeckerci/plugin-s3:latest
settings:
acl: public-read
bucket: gitea-artifacts
endpoint: https://ams3.digitaloceanspaces.com
path_style: true
acl:
from_secret: aws_s3_acl
region:
from_secret: aws_s3_region
bucket:
from_secret: aws_s3_bucket
endpoint:
from_secret: aws_s3_endpoint
path_style:
from_secret: aws_s3_path_style
source: "dist/release/*"
strip_prefix: dist/release/
target: /gitea/main
@ -886,10 +898,16 @@ steps:
image: woodpeckerci/plugin-s3:latest
pull: always
settings:
acl: public-read
bucket: gitea-artifacts
endpoint: https://ams3.digitaloceanspaces.com
path_style: true
acl:
from_secret: aws_s3_acl
region:
from_secret: aws_s3_region
bucket:
from_secret: aws_s3_bucket
endpoint:
from_secret: aws_s3_endpoint
path_style:
from_secret: aws_s3_path_style
source: "dist/release/*"
strip_prefix: dist/release/
target: "/gitea/${DRONE_TAG##v}"

File diff suppressed because one or more lines are too long

View File

@ -308,6 +308,11 @@ var (
Value: "false",
Usage: "Use custom URLs for GitLab/GitHub OAuth endpoints",
},
cli.StringFlag{
Name: "custom-tenant-id",
Value: "",
Usage: "Use custom Tenant ID for OAuth endpoints",
},
cli.StringFlag{
Name: "custom-auth-url",
Value: "",
@ -367,6 +372,15 @@ var (
Value: "",
Usage: "Group Claim value for restricted users",
},
cli.StringFlag{
Name: "group-team-map",
Value: "",
Usage: "JSON mapping between groups and org teams",
},
cli.BoolFlag{
Name: "group-team-map-removal",
Usage: "Activate automatic team membership removal depending on groups",
},
}
microcmdAuthUpdateOauth = cli.Command{
@ -578,12 +592,16 @@ func runCreateUser(c *cli.Context) error {
restricted = util.OptionalBoolOf(c.Bool("restricted"))
}
// default user visibility in app.ini
visibility := setting.Service.DefaultUserVisibilityMode
u := &user_model.User{
Name: username,
Email: c.String("email"),
Passwd: password,
IsAdmin: c.Bool("admin"),
MustChangePassword: changePassword,
Visibility: visibility,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
@ -825,6 +843,7 @@ func parseOAuth2Config(c *cli.Context) *oauth2.Source {
AuthURL: c.String("custom-auth-url"),
ProfileURL: c.String("custom-profile-url"),
EmailURL: c.String("custom-email-url"),
Tenant: c.String("custom-tenant-id"),
}
} else {
customURLMapping = nil
@ -843,6 +862,8 @@ func parseOAuth2Config(c *cli.Context) *oauth2.Source {
GroupClaimName: c.String("group-claim-name"),
AdminGroup: c.String("admin-group"),
RestrictedGroup: c.String("restricted-group"),
GroupTeamMap: c.String("group-team-map"),
GroupTeamMapRemoval: c.Bool("group-team-map-removal"),
}
}
@ -925,6 +946,12 @@ func runUpdateOauth(c *cli.Context) error {
if c.IsSet("restricted-group") {
oAuth2Config.RestrictedGroup = c.String("restricted-group")
}
if c.IsSet("group-team-map") {
oAuth2Config.GroupTeamMap = c.String("group-team-map")
}
if c.IsSet("group-team-map-removal") {
oAuth2Config.GroupTeamMapRemoval = c.Bool("group-team-map-removal")
}
// update custom URL mapping
customURLMapping := &oauth2.CustomURLMapping{}
@ -934,6 +961,7 @@ func runUpdateOauth(c *cli.Context) error {
customURLMapping.AuthURL = oAuth2Config.CustomURLMapping.AuthURL
customURLMapping.ProfileURL = oAuth2Config.CustomURLMapping.ProfileURL
customURLMapping.EmailURL = oAuth2Config.CustomURLMapping.EmailURL
customURLMapping.Tenant = oAuth2Config.CustomURLMapping.Tenant
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-token-url") {
customURLMapping.TokenURL = c.String("custom-token-url")
@ -951,6 +979,10 @@ func runUpdateOauth(c *cli.Context) error {
customURLMapping.EmailURL = c.String("custom-email-url")
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-tenant-id") {
customURLMapping.Tenant = c.String("custom-tenant-id")
}
oAuth2Config.CustomURLMapping = customURLMapping
source.Cfg = oAuth2Config

View File

@ -765,7 +765,7 @@ ROUTER = console
;; Enable this to require captcha validation for login
;REQUIRE_CAPTCHA_FOR_LOGIN = false
;;
;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha, mcaptcha.
;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha, mcaptcha, cfturnstile.
;CAPTCHA_TYPE = image
;;
;; Change this to use recaptcha.net or other recaptcha service
@ -787,6 +787,10 @@ ROUTER = console
;MCAPTCHA_SECRET =
;MCAPTCHA_SITEKEY =
;;
;; Go to https://dash.cloudflare.com/?to=/:account/turnstile to sign up for a key
;CF_TURNSTILE_SITEKEY =
;CF_TURNSTILE_SECRET =
;;
;; Default value for KeepEmailPrivate
;; Each new user will get the value of this setting copied into their profile
;DEFAULT_KEEP_EMAIL_PRIVATE = false
@ -2454,6 +2458,10 @@ ROUTER = console
;LIMIT_TOTAL_OWNER_COUNT = -1
;; Maximum size of packages a single owner can use (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
;LIMIT_TOTAL_OWNER_SIZE = -1
;; Maximum size of a Cargo upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
;LIMIT_SIZE_CARGO = -1
;; Maximum size of a Chef upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
;LIMIT_SIZE_CHEF = -1
;; Maximum size of a Composer upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
;LIMIT_SIZE_COMPOSER = -1
;; Maximum size of a Conan upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)

View File

@ -643,7 +643,7 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o
- `REQUIRE_CAPTCHA_FOR_LOGIN`: **false**: Enable this to require captcha validation for login. You also must enable `ENABLE_CAPTCHA`.
- `REQUIRE_EXTERNAL_REGISTRATION_CAPTCHA`: **false**: Enable this to force captcha validation
even for External Accounts (i.e. GitHub, OpenID Connect, etc). You also must enable `ENABLE_CAPTCHA`.
- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha\]
- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha, cfturnstile\]
- `RECAPTCHA_SECRET`: **""**: Go to https://www.google.com/recaptcha/admin to get a secret for recaptcha.
- `RECAPTCHA_SITEKEY`: **""**: Go to https://www.google.com/recaptcha/admin to get a sitekey for recaptcha.
- `RECAPTCHA_URL`: **https://www.google.com/recaptcha/**: Set the recaptcha url - allows the use of recaptcha net.
@ -652,6 +652,8 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o
- `MCAPTCHA_SECRET`: **""**: Go to your mCaptcha instance to get a secret for mCaptcha.
- `MCAPTCHA_SITEKEY`: **""**: Go to your mCaptcha instance to get a sitekey for mCaptcha.
- `MCAPTCHA_URL` **https://demo.mcaptcha.org/**: Set the mCaptcha URL.
- `CF_TURNSTILE_SECRET` **""**: Go to https://dash.cloudflare.com/?to=/:account/turnstile to get a secret for cloudflare turnstile.
- `CF_TURNSTILE_SITEKEY` **""**: Go to https://dash.cloudflare.com/?to=/:account/turnstile to get a sitekey for cloudflare turnstile.
- `DEFAULT_KEEP_EMAIL_PRIVATE`: **false**: By default set users to keep their email address private.
- `DEFAULT_ALLOW_CREATE_ORGANIZATION`: **true**: Allow new users to create organizations by default.
- `DEFAULT_USER_IS_RESTRICTED`: **false**: Give new users restricted permissions by default
@ -1211,6 +1213,8 @@ Task queue configuration has been moved to `queue.task`. However, the below conf
- `CHUNKED_UPLOAD_PATH`: **tmp/package-upload**: Path for chunked uploads. Defaults to `APP_DATA_PATH` + `tmp/package-upload`
- `LIMIT_TOTAL_OWNER_COUNT`: **-1**: Maximum count of package versions a single owner can have (`-1` means no limits)
- `LIMIT_TOTAL_OWNER_SIZE`: **-1**: Maximum size of packages a single owner can use (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
- `LIMIT_SIZE_CARGO`: **-1**: Maximum size of a Cargo upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
- `LIMIT_SIZE_CHEF`: **-1**: Maximum size of a Chef upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
- `LIMIT_SIZE_COMPOSER`: **-1**: Maximum size of a Composer upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
- `LIMIT_SIZE_CONAN`: **-1**: Maximum size of a Conan upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
- `LIMIT_SIZE_CONDA`: **-1**: Maximum size of a Conda upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)

View File

@ -147,6 +147,17 @@ menu:
- `ENABLE_REVERSE_PROXY_AUTO_REGISTRATION`: 允许通过反向认证做自动注册。
- `ENABLE_CAPTCHA`: **false**: 注册时使用图片验证码。
- `REQUIRE_CAPTCHA_FOR_LOGIN`: **false**: 登录时需要图片验证码。需要同时开启 `ENABLE_CAPTCHA`
- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha, cfturnstile\],人机验证类型,分别表示图片认证、 recaptcha 、 hcaptcha 、mcaptcha 、和 cloudlfare 的 turnstile。
- `RECAPTCHA_SECRET`: **""**: recaptcha 服务的密钥,可在 https://www.google.com/recaptcha/admin 获取。
- `RECAPTCHA_SITEKEY`: **""**: recaptcha 服务的网站密钥 ,可在 https://www.google.com/recaptcha/admin 获取。
- `RECAPTCHA_URL`: **https://www.google.com/recaptcha/**: 设置 recaptcha 的 url 。
- `HCAPTCHA_SECRET`: **""**: hcaptcha 服务的密钥,可在 https://www.hcaptcha.com/ 获取。
- `HCAPTCHA_SITEKEY`: **""**: hcaptcha 服务的网站密钥,可在 https://www.hcaptcha.com/ 获取。
- `MCAPTCHA_SECRET`: **""**: mCaptcha 服务的密钥。
- `MCAPTCHA_SITEKEY`: **""**: mCaptcha 服务的网站密钥。
- `MCAPTCHA_URL` **https://demo.mcaptcha.org/**: 设置 remCaptchacaptcha 的 url 。
- `CF_TURNSTILE_SECRET` **""**: cloudlfare turnstile 服务的密钥,可在 https://dash.cloudflare.com/?to=/:account/turnstile 获取。
- `CF_TURNSTILE_SITEKEY` **""**: cloudlfare turnstile 服务的网站密钥 ,可在 https://www.google.com/recaptcha/admin 获取。
### Service - Expore (`service.explore`)

View File

@ -449,3 +449,14 @@ It is highly recommended to back-up your database before running these commands.
If you are using Cloudflare, turn off the auto-minify option in the dashboard.
`Speed` -> `Optimization` -> Uncheck `HTML` within the `Auto-Minify` settings.
## How to adopt repositories from disk
- Add your (bare) repositories to the correct spot for your configuration (`repository.ROOT`), ensuring they are in the correct layout `<REPO_ROOT>/[user]/[repo].git`.
- **Note:** the directory names must be lowercase.
- You can also check `<ROOT_URL>/admin/config` for the repository root path.
- Ensure that the user/org exists that you want to adopt repositories for.
- As an admin, go to `<ROOT_URL>/admin/repos/unadopted` and search.
- Users can also be given similar permissions via config [`ALLOW_ADOPTION_OF_UNADOPTED_REPOSITORIES`]({{< relref "doc/advanced/config-cheat-sheet.en-us.md#repository" >}}).
- If the above steps are done correctly, you should be able to select repositories to adopt.
- If no repositories are found, enable [debug logging]({{< relref "doc/advanced/config-cheat-sheet.en-us.md#repository" >}}) to check for any specific errors.

View File

@ -19,7 +19,7 @@ Gitea provides automatically updated Docker images within its Docker Hub organiz
possible to always use the latest stable tag or to use another service that handles updating
Docker images.
The rootless image use Gitea internal SSH to provide Git protocol and doesn't support OpenSSH.
The rootless image uses Gitea internal SSH to provide Git protocol and doesn't support OpenSSH.
This reference setup guides users through the setup based on `docker-compose`, but the installation
of `docker-compose` is out of scope of this documentation. To install `docker-compose` itself, follow

View File

@ -0,0 +1,109 @@
---
date: "2022-11-20T00:00:00+00:00"
title: "Cargo Packages Repository"
slug: "packages/cargo"
draft: false
toc: false
menu:
sidebar:
parent: "packages"
name: "Cargo"
weight: 5
identifier: "cargo"
---
# Cargo Packages Repository
Publish [Cargo](https://doc.rust-lang.org/stable/cargo/) packages for your user or organization.
**Table of Contents**
{{< toc >}}
## Requirements
To work with the Cargo package registry, you need [Rust and Cargo](https://www.rust-lang.org/tools/install).
Cargo stores informations about the available packages in a package index stored in a git repository.
This repository is needed to work with the registry.
The following section describes how to create it.
## Index Repository
Cargo stores informations about the available packages in a package index stored in a git repository.
In Gitea this repository has the special name `_cargo-index`.
After a package was uploaded, its metadata is automatically written to the index.
The content of this repository should not be manually modified.
The user or organization package settings page allows to create the index repository along with the configuration file.
If needed this action will rewrite the configuration file.
This can be useful if for example the Gitea instance domain was changed.
If the case arises where the packages stored in Gitea and the information in the index repository are out of sync, the settings page allows to rebuild the index repository.
This action iterates all packages in the registry and writes their information to the index.
If there are lot of packages this process may take some time.
## Configuring the package registry
To register the package registry the Cargo configuration must be updated.
Add the following text to the configuration file located in the current users home directory (for example `~/.cargo/config.toml`):
```
[registry]
default = "gitea"
[registries.gitea]
index = "https://gitea.example.com/{owner}/_cargo-index.git"
[net]
git-fetch-with-cli = true
```
| Parameter | Description |
| --------- | ----------- |
| `owner` | The owner of the package. |
If the registry is private or you want to publish new packages, you have to configure your credentials.
Add the credentials section to the credentials file located in the current users home directory (for example `~/.cargo/credentials.toml`):
```
[registries.gitea]
token = "Bearer {token}"
```
| Parameter | Description |
| --------- | ----------- |
| `token` | Your [personal access token]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}) |
## Publish a package
Publish a package by running the following command in your project:
```shell
cargo publish
```
You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first.
## Install a package
To install a package from the package registry, execute the following command:
```shell
cargo add {package_name}
```
| Parameter | Description |
| -------------- | ----------- |
| `package_name` | The package name. |
## Supported commands
```
cargo publish
cargo add
cargo install
cargo yank
cargo unyank
cargo search
```

View File

@ -0,0 +1,96 @@
---
date: "2023-01-20T00:00:00+00:00"
title: "Chef Packages Repository"
slug: "packages/chef"
draft: false
toc: false
menu:
sidebar:
parent: "packages"
name: "Chef"
weight: 5
identifier: "chef"
---
# Chef Packages Repository
Publish [Chef](https://chef.io/) cookbooks for your user or organization.
**Table of Contents**
{{< toc >}}
## Requirements
To work with the Chef package registry, you have to use [`knife`](https://docs.chef.io/workstation/knife/).
## Authentication
The Chef package registry does not use an username:password authentication but signed requests with a private:public key pair.
Visit the package owner settings page to create the necessary key pair.
Only the public key is stored inside Gitea. if you loose access to the private key you must re-generate the key pair.
[Configure `knife`](https://docs.chef.io/workstation/knife_setup/) to use the downloaded private key with your Gitea username as `client_name`.
## Configure the package registry
To [configure `knife`](https://docs.chef.io/workstation/knife_setup/) to use the Gitea package registry add the url to the `~/.chef/config.rb` file.
```
knife[:supermarket_site] = 'https://gitea.example.com/api/packages/{owner}/chef'
```
| Parameter | Description |
| --------- | ----------- |
| `owner` | The owner of the package. |
## Publish a package
To publish a Chef package execute the following command:
```shell
knife supermarket share {package_name}
```
| Parameter | Description |
| -------------- | ----------- |
| `package_name` | The package name. |
You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first.
## Install a package
To install a package from the package registry, execute the following command:
```shell
knife supermarket install {package_name}
```
Optional you can specify the package version:
```shell
knife supermarket install {package_name} {package_version}
```
| Parameter | Description |
| ----------------- | ----------- |
| `package_name` | The package name. |
| `package_version` | The package version. |
## Delete a package
If you want to remove a package from the registry, execute the following command:
```shell
knife supermarket unshare {package_name}
```
Optional you can specify the package version:
```shell
knife supermarket unshare {package_name}/versions/{package_version}
```
| Parameter | Description |
| ----------------- | ----------- |
| `package_name` | The package name. |
| `package_version` | The package version. |

View File

@ -26,6 +26,8 @@ The following package managers are currently supported:
| Name | Language | Package client |
| ---- | -------- | -------------- |
| [Cargo]({{< relref "doc/packages/cargo.en-us.md" >}}) | Rust | `cargo` |
| [Chef]({{< relref "doc/packages/chef.en-us.md" >}}) | - | `knife` |
| [Composer]({{< relref "doc/packages/composer.en-us.md" >}}) | PHP | `composer` |
| [Conan]({{< relref "doc/packages/conan.en-us.md" >}}) | C++ | `conan` |
| [Conda]({{< relref "doc/packages/conda.en-us.md" >}}) | - | `conda` |

View File

@ -85,8 +85,10 @@ Then repeat the procedure, but this time using the [latest release](https://dl.g
## Upgrading from a more recent version of Gogs
Upgrading from a more recent version of Gogs is also possible, but requires a bit more work.
See [#4286](https://github.com/go-gitea/gitea/issues/4286).
Upgrading from a more recent version of Gogs (up to `0.11.x`) may also be possible, but will require a bit more work.
See [#4286](https://github.com/go-gitea/gitea/issues/4286), which includes various Gogs `0.11.x` versions.
Upgrading from Gogs `0.12.x` and above will be increasingly more difficult as the projects diverge further apart in configuration and schema.
## Troubleshooting

View File

@ -124,6 +124,7 @@ Admin operations:
- `--secret`: Client Secret.
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
- `--custom-tenant-id`: Use custom Tenant ID for OAuth endpoints.
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
@ -136,6 +137,8 @@ Admin operations:
- `--group-claim-name`: Claim name providing group names for this source. (Optional)
- `--admin-group`: Group Claim value for administrator users. (Optional)
- `--restricted-group`: Group Claim value for restricted users. (Optional)
- `--group-team-map`: JSON mapping between groups and org teams. (Optional)
- `--group-team-map-removal`: Activate automatic team membership removal depending on groups. (Optional)
- Examples:
- `gitea admin auth add-oauth --name external-github --provider github --key OBTAIN_FROM_SOURCE --secret OBTAIN_FROM_SOURCE`
- `update-oauth`:
@ -147,6 +150,7 @@ Admin operations:
- `--secret`: Client Secret.
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
- `--custom-tenant-id`: Use custom Tenant ID for OAuth endpoints.
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).

2
go.mod
View File

@ -82,6 +82,8 @@ require (
github.com/niklasfasching/go-org v1.6.5
github.com/oliamb/cutter v0.2.2
github.com/olivere/elastic/v7 v7.0.32
github.com/opencontainers/go-digest v1.0.0
github.com/opencontainers/image-spec v1.1.0-rc2
github.com/pkg/errors v0.9.1
github.com/pquerna/otp v1.4.0
github.com/prometheus/client_golang v1.14.0

4
go.sum
View File

@ -1009,6 +1009,10 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y
github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0-rc2 h1:2zx/Stx4Wc5pIPDvIxHXvXtQFW/7XWJGmnM7r3wg034=
github.com/opencontainers/image-spec v1.1.0-rc2/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ=
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=

View File

@ -223,18 +223,24 @@ func (a *Action) GetRepoAbsoluteLink() string {
return setting.AppURL + url.PathEscape(a.GetRepoUserName()) + "/" + url.PathEscape(a.GetRepoName())
}
// GetCommentLink returns link to action comment.
func (a *Action) GetCommentLink() string {
return a.getCommentLink(db.DefaultContext)
// GetCommentHTMLURL returns link to action comment.
func (a *Action) GetCommentHTMLURL() string {
return a.getCommentHTMLURL(db.DefaultContext)
}
func (a *Action) getCommentLink(ctx context.Context) string {
func (a *Action) loadComment(ctx context.Context) (err error) {
if a.CommentID == 0 || a.Comment != nil {
return nil
}
a.Comment, err = issues_model.GetCommentByID(ctx, a.CommentID)
return err
}
func (a *Action) getCommentHTMLURL(ctx context.Context) string {
if a == nil {
return "#"
}
if a.Comment == nil && a.CommentID != 0 {
a.Comment, _ = issues_model.GetCommentByID(ctx, a.CommentID)
}
_ = a.loadComment(ctx)
if a.Comment != nil {
return a.Comment.HTMLURL()
}
@ -260,6 +266,41 @@ func (a *Action) getCommentLink(ctx context.Context) string {
return issue.HTMLURL()
}
// GetCommentLink returns link to action comment.
func (a *Action) GetCommentLink() string {
return a.getCommentLink(db.DefaultContext)
}
func (a *Action) getCommentLink(ctx context.Context) string {
if a == nil {
return "#"
}
_ = a.loadComment(ctx)
if a.Comment != nil {
return a.Comment.Link()
}
if len(a.GetIssueInfos()) == 0 {
return "#"
}
// Return link to issue
issueIDString := a.GetIssueInfos()[0]
issueID, err := strconv.ParseInt(issueIDString, 10, 64)
if err != nil {
return "#"
}
issue, err := issues_model.GetIssueByID(ctx, issueID)
if err != nil {
return "#"
}
if err = issue.LoadRepo(ctx); err != nil {
return "#"
}
return issue.Link()
}
// GetBranch returns the action's repository branch.
func (a *Action) GetBranch() string {
return strings.TrimPrefix(a.RefName, git.BranchPrefix)

View File

@ -36,7 +36,7 @@ func TestAction_GetRepoLink(t *testing.T) {
expected := path.Join(setting.AppSubURL, owner.Name, repo.Name)
assert.Equal(t, expected, action.GetRepoLink())
assert.Equal(t, repo.HTMLURL(), action.GetRepoAbsoluteLink())
assert.Equal(t, comment.HTMLURL(), action.GetCommentLink())
assert.Equal(t, comment.HTMLURL(), action.GetCommentHTMLURL())
}
func TestGetFeeds(t *testing.T) {

View File

@ -459,6 +459,22 @@ func (n *Notification) HTMLURL() string {
return ""
}
// Link formats a relative URL-string to the notification
func (n *Notification) Link() string {
switch n.Source {
case NotificationSourceIssue, NotificationSourcePullRequest:
if n.Comment != nil {
return n.Comment.Link()
}
return n.Issue.Link()
case NotificationSourceCommit:
return n.Repository.Link() + "/commit/" + url.PathEscape(n.CommitID)
case NotificationSourceRepository:
return n.Repository.Link()
}
return ""
}
// APIURL formats a URL-string to the notification
func (n *Notification) APIURL() string {
return setting.AppURL + "api/v1/notifications/threads/" + strconv.FormatInt(n.ID, 10)

View File

@ -393,21 +393,40 @@ func (c *Comment) HTMLURL() string {
log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
return ""
}
return c.Issue.HTMLURL() + c.hashLink()
}
// Link formats a relative URL-string to the issue-comment
func (c *Comment) Link() string {
err := c.LoadIssue(db.DefaultContext)
if err != nil { // Silently dropping errors :unamused:
log.Error("LoadIssue(%d): %v", c.IssueID, err)
return ""
}
err = c.Issue.LoadRepo(db.DefaultContext)
if err != nil { // Silently dropping errors :unamused:
log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
return ""
}
return c.Issue.Link() + c.hashLink()
}
func (c *Comment) hashLink() string {
if c.Type == CommentTypeCode {
if c.ReviewID == 0 {
return fmt.Sprintf("%s/files#%s", c.Issue.HTMLURL(), c.HashTag())
return "/files#" + c.HashTag()
}
if c.Review == nil {
if err := c.LoadReview(); err != nil {
log.Warn("LoadReview(%d): %v", c.ReviewID, err)
return fmt.Sprintf("%s/files#%s", c.Issue.HTMLURL(), c.HashTag())
return "/files#" + c.HashTag()
}
}
if c.Review.Type <= ReviewTypePending {
return fmt.Sprintf("%s/files#%s", c.Issue.HTMLURL(), c.HashTag())
return "/files#" + c.HashTag()
}
}
return fmt.Sprintf("%s#%s", c.Issue.HTMLURL(), c.HashTag())
return "#" + c.HashTag()
}
// APIURL formats a API-string to the issue-comment
@ -710,8 +729,8 @@ func (c *Comment) UnsignedLine() uint64 {
return uint64(c.Line)
}
// CodeCommentURL returns the url to a comment in code
func (c *Comment) CodeCommentURL() string {
// CodeCommentLink returns the url to a comment in code
func (c *Comment) CodeCommentLink() string {
err := c.LoadIssue(db.DefaultContext)
if err != nil { // Silently dropping errors :unamused:
log.Error("LoadIssue(%d): %v", c.IssueID, err)
@ -722,7 +741,7 @@ func (c *Comment) CodeCommentURL() string {
log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
return ""
}
return fmt.Sprintf("%s/files#%s", c.Issue.HTMLURL(), c.HashTag())
return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag())
}
// LoadPushCommits Load push commits

View File

@ -421,7 +421,7 @@ func (issue *Issue) HTMLURL() string {
return fmt.Sprintf("%s/%s/%d", issue.Repo.HTMLURL(), path, issue.Index)
}
// Link returns the Link URL to this issue.
// Link returns the issue's relative URL.
func (issue *Issue) Link() string {
var path string
if issue.IsPull {

View File

@ -277,26 +277,26 @@ func CommentTypeIsRef(t CommentType) bool {
return t == CommentTypeCommentRef || t == CommentTypePullRef || t == CommentTypeIssueRef
}
// RefCommentHTMLURL returns the HTML URL for the comment that created this reference
func (c *Comment) RefCommentHTMLURL() string {
// RefCommentLink returns the relative URL for the comment that created this reference
func (c *Comment) RefCommentLink() string {
// Edge case for when the reference is inside the title or the description of the referring issue
if c.RefCommentID == 0 {
return c.RefIssueHTMLURL()
return c.RefIssueLink()
}
if err := c.LoadRefComment(); err != nil { // Silently dropping errors :unamused:
log.Error("LoadRefComment(%d): %v", c.RefCommentID, err)
return ""
}
return c.RefComment.HTMLURL()
return c.RefComment.Link()
}
// RefIssueHTMLURL returns the HTML URL of the issue where this reference was created
func (c *Comment) RefIssueHTMLURL() string {
// RefIssueLink returns the relative URL of the issue where this reference was created
func (c *Comment) RefIssueLink() string {
if err := c.LoadRefIssue(); err != nil { // Silently dropping errors :unamused:
log.Error("LoadRefIssue(%d): %v", c.RefCommentID, err)
return ""
}
return c.RefIssue.HTMLURL()
return c.RefIssue.Link()
}
// RefIssueTitle returns the title of the issue where this reference was created

View File

@ -759,8 +759,8 @@ func GetPullRequestsByHeadBranch(ctx context.Context, headBranch string, headRep
return prs, nil
}
// GetBaseBranchHTMLURL returns the HTML URL of the base branch
func (pr *PullRequest) GetBaseBranchHTMLURL() string {
// GetBaseBranchLink returns the relative URL of the base branch
func (pr *PullRequest) GetBaseBranchLink() string {
if err := pr.LoadBaseRepo(db.DefaultContext); err != nil {
log.Error("LoadBaseRepo: %v", err)
return ""
@ -768,11 +768,11 @@ func (pr *PullRequest) GetBaseBranchHTMLURL() string {
if pr.BaseRepo == nil {
return ""
}
return pr.BaseRepo.HTMLURL() + "/src/branch/" + util.PathEscapeSegments(pr.BaseBranch)
return pr.BaseRepo.Link() + "/src/branch/" + util.PathEscapeSegments(pr.BaseBranch)
}
// GetHeadBranchHTMLURL returns the HTML URL of the head branch
func (pr *PullRequest) GetHeadBranchHTMLURL() string {
// GetHeadBranchLink returns the relative URL of the head branch
func (pr *PullRequest) GetHeadBranchLink() string {
if pr.Flow == PullRequestFlowAGit {
return ""
}
@ -784,7 +784,7 @@ func (pr *PullRequest) GetHeadBranchHTMLURL() string {
if pr.HeadRepo == nil {
return ""
}
return pr.HeadRepo.HTMLURL() + "/src/branch/" + util.PathEscapeSegments(pr.HeadBranch)
return pr.HeadRepo.Link() + "/src/branch/" + util.PathEscapeSegments(pr.HeadBranch)
}
// UpdateAllowEdits update if PR can be edited from maintainers

View File

@ -13,6 +13,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
"xorm.io/xorm"
)
@ -175,7 +176,18 @@ func (prs PullRequestList) loadAttributes(ctx context.Context) error {
}
for _, pr := range prs {
pr.Issue = set[pr.IssueID]
pr.Issue.PullRequest = pr // panic here means issueIDs and prs are not in sync
/*
Old code:
pr.Issue.PullRequest = pr // panic here means issueIDs and prs are not in sync
It's worth panic because it's almost impossible to happen under normal use.
But in integration testing, an asynchronous task could read a database that has been reset.
So returning an error would make more sense, let the caller has a choice to ignore it.
*/
if pr.Issue == nil {
return fmt.Errorf("issues and prs may be not in sync: cannot find issue %v for pr %v: %w", pr.IssueID, pr.ID, util.ErrNotExist)
}
pr.Issue.PullRequest = pr
}
return nil
}

View File

@ -110,22 +110,14 @@ func (org *Organization) CanCreateOrgRepo(uid int64) (bool, error) {
return CanCreateOrgRepo(db.DefaultContext, org.ID, uid)
}
func (org *Organization) getTeam(ctx context.Context, name string) (*Team, error) {
// GetTeam returns named team of organization.
func (org *Organization) GetTeam(ctx context.Context, name string) (*Team, error) {
return GetTeam(ctx, org.ID, name)
}
// GetTeam returns named team of organization.
func (org *Organization) GetTeam(name string) (*Team, error) {
return org.getTeam(db.DefaultContext, name)
}
func (org *Organization) getOwnerTeam(ctx context.Context) (*Team, error) {
return org.getTeam(ctx, OwnerTeamName)
}
// GetOwnerTeam returns owner team of organization.
func (org *Organization) GetOwnerTeam() (*Team, error) {
return org.getOwnerTeam(db.DefaultContext)
func (org *Organization) GetOwnerTeam(ctx context.Context) (*Team, error) {
return org.GetTeam(ctx, OwnerTeamName)
}
// FindOrgTeams returns all teams of a given organization
@ -342,7 +334,7 @@ func CreateOrganization(org *Organization, owner *user_model.User) (err error) {
}
// GetOrgByName returns organization by given name.
func GetOrgByName(name string) (*Organization, error) {
func GetOrgByName(ctx context.Context, name string) (*Organization, error) {
if len(name) == 0 {
return nil, ErrOrgNotExist{0, name}
}
@ -350,7 +342,7 @@ func GetOrgByName(name string) (*Organization, error) {
LowerName: strings.ToLower(name),
Type: user_model.UserTypeOrganization,
}
has, err := db.GetEngine(db.DefaultContext).Get(u)
has, err := db.GetEngine(ctx).Get(u)
if err != nil {
return nil, err
} else if !has {

View File

@ -61,28 +61,28 @@ func TestUser_IsOrgMember(t *testing.T) {
func TestUser_GetTeam(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
team, err := org.GetTeam("team1")
team, err := org.GetTeam(db.DefaultContext, "team1")
assert.NoError(t, err)
assert.Equal(t, org.ID, team.OrgID)
assert.Equal(t, "team1", team.LowerName)
_, err = org.GetTeam("does not exist")
_, err = org.GetTeam(db.DefaultContext, "does not exist")
assert.True(t, organization.IsErrTeamNotExist(err))
nonOrg := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 2})
_, err = nonOrg.GetTeam("team")
_, err = nonOrg.GetTeam(db.DefaultContext, "team")
assert.True(t, organization.IsErrTeamNotExist(err))
}
func TestUser_GetOwnerTeam(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
team, err := org.GetOwnerTeam()
team, err := org.GetOwnerTeam(db.DefaultContext)
assert.NoError(t, err)
assert.Equal(t, org.ID, team.OrgID)
nonOrg := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 2})
_, err = nonOrg.GetOwnerTeam()
_, err = nonOrg.GetOwnerTeam(db.DefaultContext)
assert.True(t, organization.IsErrTeamNotExist(err))
}
@ -115,15 +115,15 @@ func TestUser_GetMembers(t *testing.T) {
func TestGetOrgByName(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
org, err := organization.GetOrgByName("user3")
org, err := organization.GetOrgByName(db.DefaultContext, "user3")
assert.NoError(t, err)
assert.EqualValues(t, 3, org.ID)
assert.Equal(t, "user3", org.Name)
_, err = organization.GetOrgByName("user2") // user2 is an individual
_, err = organization.GetOrgByName(db.DefaultContext, "user2") // user2 is an individual
assert.True(t, organization.IsErrOrgNotExist(err))
_, err = organization.GetOrgByName("") // corner case
_, err = organization.GetOrgByName(db.DefaultContext, "") // corner case
assert.True(t, organization.IsErrOrgNotExist(err))
}

View File

@ -11,6 +11,8 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/packages/cargo"
"code.gitea.io/gitea/modules/packages/chef"
"code.gitea.io/gitea/modules/packages/composer"
"code.gitea.io/gitea/modules/packages/conan"
"code.gitea.io/gitea/modules/packages/conda"
@ -63,7 +65,7 @@ type PackageFileDescriptor struct {
// PackageWebLink returns the package web link
func (pd *PackageDescriptor) PackageWebLink() string {
return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HTMLURL(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName))
return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HomeLink(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName))
}
// FullWebLink returns the package version web link
@ -129,6 +131,10 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
var metadata interface{}
switch p.Type {
case TypeCargo:
metadata = &cargo.Metadata{}
case TypeChef:
metadata = &chef.Metadata{}
case TypeComposer:
metadata = &composer.Metadata{}
case TypeConan:

View File

@ -30,6 +30,8 @@ type Type string
// List of supported packages
const (
TypeCargo Type = "cargo"
TypeChef Type = "chef"
TypeComposer Type = "composer"
TypeConan Type = "conan"
TypeConda Type = "conda"
@ -46,6 +48,8 @@ const (
)
var TypeList = []Type{
TypeCargo,
TypeChef,
TypeComposer,
TypeConan,
TypeConda,
@ -64,6 +68,10 @@ var TypeList = []Type{
// Name gets the name of the package type
func (pt Type) Name() string {
switch pt {
case TypeCargo:
return "Cargo"
case TypeChef:
return "Chef"
case TypeComposer:
return "Composer"
case TypeConan:
@ -97,6 +105,10 @@ func (pt Type) Name() string {
// SVGName gets the name of the package type svg image
func (pt Type) SVGName() string {
switch pt {
case TypeCargo:
return "gitea-cargo"
case TypeChef:
return "gitea-chef"
case TypeComposer:
return "gitea-composer"
case TypeConan:

View File

@ -58,6 +58,12 @@ func GetPropertiesByName(ctx context.Context, refType PropertyType, refID int64,
return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Find(&pps)
}
// UpdateProperty updates a property
func UpdateProperty(ctx context.Context, pp *PackageProperty) error {
_, err := db.GetEngine(ctx).ID(pp.ID).Update(pp)
return err
}
// DeleteAllProperties deletes all properties of a ref
func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error {
_, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{})

View File

@ -116,6 +116,7 @@ func (p *Project) LoadRepo(ctx context.Context) (err error) {
return err
}
// Link returns the project's relative URL.
func (p *Project) Link() string {
if p.OwnerID > 0 {
err := p.LoadOwner(db.DefaultContext)

View File

@ -130,6 +130,11 @@ func (r *Release) HTMLURL() string {
return r.Repo.HTMLURL() + "/releases/tag/" + util.PathEscapeSegments(r.TagName)
}
// Link the relative url for a release on the web UI. release must have attributes loaded
func (r *Release) Link() string {
return r.Repo.Link() + "/releases/tag/" + util.PathEscapeSegments(r.TagName)
}
// IsReleaseExist returns true if release with given tag name already exists.
func IsReleaseExist(ctx context.Context, repoID int64, tagName string) (bool, error) {
if len(tagName) == 0 {

View File

@ -481,7 +481,7 @@ func (repo *Repository) RepoPath() string {
return RepoPath(repo.OwnerName, repo.Name)
}
// Link returns the repository link
// Link returns the repository relative url
func (repo *Repository) Link() string {
return setting.AppSubURL + "/" + url.PathEscape(repo.OwnerName) + "/" + url.PathEscape(repo.Name)
}

22
modules/auth/common.go Normal file
View File

@ -0,0 +1,22 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
)
func UnmarshalGroupTeamMapping(raw string) (map[string]map[string][]string, error) {
groupTeamMapping := make(map[string]map[string][]string)
if raw == "" {
return groupTeamMapping, nil
}
err := json.Unmarshal([]byte(raw), &groupTeamMapping)
if err != nil {
log.Error("Failed to unmarshal group team mapping: %v", err)
return nil, err
}
return groupTeamMapping, nil
}

View File

@ -6,7 +6,6 @@ package charset
import (
"fmt"
"regexp"
"sort"
"strings"
"unicode"
"unicode/utf8"
@ -20,12 +19,16 @@ import (
var defaultWordRegexp = regexp.MustCompile(`(-?\d*\.\d\w*)|([^\` + "`" + `\~\!\@\#\$\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s\x00-\x1f]+)`)
func NewEscapeStreamer(locale translation.Locale, next HTMLStreamer, allowed ...rune) HTMLStreamer {
allowedM := make(map[rune]bool, len(allowed))
for _, v := range allowed {
allowedM[v] = true
}
return &escapeStreamer{
escaped: &EscapeStatus{},
PassthroughHTMLStreamer: *NewPassthroughStreamer(next),
locale: locale,
ambiguousTables: AmbiguousTablesForLocale(locale),
allowed: allowed,
allowed: allowedM,
}
}
@ -34,7 +37,7 @@ type escapeStreamer struct {
escaped *EscapeStatus
locale translation.Locale
ambiguousTables []*AmbiguousTable
allowed []rune
allowed map[rune]bool
}
func (e *escapeStreamer) EscapeStatus() *EscapeStatus {
@ -256,7 +259,7 @@ func (e *escapeStreamer) runeTypes(runes ...rune) (types []runeType, confusables
runeCounts.numBrokenRunes++
case r == ' ' || r == '\t' || r == '\n':
runeCounts.numBasicRunes++
case e.isAllowed(r):
case e.allowed[r]:
if r > 0x7e || r < 0x20 {
types[i] = nonBasicASCIIRuneType
runeCounts.numNonConfusingNonBasicRunes++
@ -282,16 +285,3 @@ func (e *escapeStreamer) runeTypes(runes ...rune) (types []runeType, confusables
}
return types, confusables, runeCounts
}
func (e *escapeStreamer) isAllowed(r rune) bool {
if len(e.allowed) == 0 {
return false
}
if len(e.allowed) == 1 {
return e.allowed[0] == r
}
return sort.Search(len(e.allowed), func(i int) bool {
return e.allowed[i] >= r
}) >= 0
}

View File

@ -19,7 +19,6 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/web/middleware"
auth_service "code.gitea.io/gitea/services/auth"
)
// APIContext is a specific context for API service
@ -215,35 +214,6 @@ func (ctx *APIContext) CheckForOTP() {
}
}
// APIAuth converts auth_service.Auth as a middleware
func APIAuth(authMethod auth_service.Method) func(*APIContext) {
return func(ctx *APIContext) {
// Get user from session if logged in.
var err error
ctx.Doer, err = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session)
if err != nil {
ctx.Error(http.StatusUnauthorized, "APIAuth", err)
return
}
if ctx.Doer != nil {
if ctx.Locale.Language() != ctx.Doer.Language {
ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req)
}
ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == auth_service.BasicMethodName
ctx.IsSigned = true
ctx.Data["IsSigned"] = ctx.IsSigned
ctx.Data["SignedUser"] = ctx.Doer
ctx.Data["SignedUserID"] = ctx.Doer.ID
ctx.Data["SignedUserName"] = ctx.Doer.Name
ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin
} else {
ctx.Data["SignedUserID"] = int64(0)
ctx.Data["SignedUserName"] = ""
}
}
}
// APIContexter returns apicontext as middleware
func APIContexter() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {

View File

@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/modules/mcaptcha"
"code.gitea.io/gitea/modules/recaptcha"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/turnstile"
"gitea.com/go-chi/captcha"
)
@ -47,12 +48,14 @@ func SetCaptchaData(ctx *Context) {
ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
}
const (
gRecaptchaResponseField = "g-recaptcha-response"
hCaptchaResponseField = "h-captcha-response"
mCaptchaResponseField = "m-captcha-response"
gRecaptchaResponseField = "g-recaptcha-response"
hCaptchaResponseField = "h-captcha-response"
mCaptchaResponseField = "m-captcha-response"
cfTurnstileResponseField = "cf-turnstile-response"
)
// VerifyCaptcha verifies Captcha data
@ -73,6 +76,8 @@ func VerifyCaptcha(ctx *Context, tpl base.TplName, form interface{}) {
valid, err = hcaptcha.Verify(ctx, ctx.Req.Form.Get(hCaptchaResponseField))
case setting.MCaptcha:
valid, err = mcaptcha.Verify(ctx, ctx.Req.Form.Get(mCaptchaResponseField))
case setting.CfTurnstile:
valid, err = turnstile.Verify(ctx, ctx.Req.Form.Get(cfTurnstileResponseField))
default:
ctx.ServerError("Unknown Captcha Type", fmt.Errorf("Unknown Captcha Type: %s", setting.Service.CaptchaType))
return

View File

@ -36,7 +36,6 @@ import (
"code.gitea.io/gitea/modules/typesniffer"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web/middleware"
"code.gitea.io/gitea/services/auth"
"gitea.com/go-chi/cache"
"gitea.com/go-chi/session"
@ -659,37 +658,6 @@ func getCsrfOpts() CsrfOptions {
}
}
// Auth converts auth.Auth as a middleware
func Auth(authMethod auth.Method) func(*Context) {
return func(ctx *Context) {
var err error
ctx.Doer, err = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session)
if err != nil {
log.Error("Failed to verify user %v: %v", ctx.Req.RemoteAddr, err)
ctx.Error(http.StatusUnauthorized, "Verify")
return
}
if ctx.Doer != nil {
if ctx.Locale.Language() != ctx.Doer.Language {
ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req)
}
ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == auth.BasicMethodName
ctx.IsSigned = true
ctx.Data["IsSigned"] = ctx.IsSigned
ctx.Data["SignedUser"] = ctx.Doer
ctx.Data["SignedUserID"] = ctx.Doer.ID
ctx.Data["SignedUserName"] = ctx.Doer.Name
ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin
} else {
ctx.Data["SignedUserID"] = int64(0)
ctx.Data["SignedUserName"] = ""
// ensure the session uid is deleted
_ = ctx.Session.Delete("uid")
}
}
}
// Contexter initializes a classic context for a request.
func Contexter(ctx context.Context) func(next http.Handler) http.Handler {
_, rnd := templates.HTMLRenderer(ctx)

View File

@ -80,7 +80,7 @@ func HandleOrgAssignment(ctx *Context, args ...bool) {
orgName := ctx.Params(":org")
var err error
ctx.Org.Organization, err = organization.GetOrgByName(orgName)
ctx.Org.Organization, err = organization.GetOrgByName(ctx, orgName)
if err != nil {
if organization.IsErrOrgNotExist(err) {
redirectUserID, err := user_model.LookupUserRedirect(orgName)

View File

@ -20,11 +20,12 @@ type BlamePart struct {
// BlameReader returns part of file blame one by one
type BlameReader struct {
cmd *Command
output io.WriteCloser
reader io.ReadCloser
done chan error
lastSha *string
cmd *Command
output io.WriteCloser
reader io.ReadCloser
bufferedReader *bufio.Reader
done chan error
lastSha *string
}
var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})")
@ -33,8 +34,6 @@ var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})")
func (r *BlameReader) NextPart() (*BlamePart, error) {
var blamePart *BlamePart
reader := bufio.NewReader(r.reader)
if r.lastSha != nil {
blamePart = &BlamePart{*r.lastSha, make([]string, 0)}
}
@ -44,7 +43,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
var err error
for err != io.EOF {
line, isPrefix, err = reader.ReadLine()
line, isPrefix, err = r.bufferedReader.ReadLine()
if err != nil && err != io.EOF {
return blamePart, err
}
@ -66,7 +65,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
r.lastSha = &sha1
// need to munch to end of line...
for isPrefix {
_, isPrefix, err = reader.ReadLine()
_, isPrefix, err = r.bufferedReader.ReadLine()
if err != nil && err != io.EOF {
return blamePart, err
}
@ -81,7 +80,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
// need to munch to end of line...
for isPrefix {
_, isPrefix, err = reader.ReadLine()
_, isPrefix, err = r.bufferedReader.ReadLine()
if err != nil && err != io.EOF {
return blamePart, err
}
@ -96,6 +95,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
// Close BlameReader - don't run NextPart after invoking that
func (r *BlameReader) Close() error {
err := <-r.done
r.bufferedReader = nil
_ = r.reader.Close()
_ = r.output.Close()
return err
@ -126,10 +126,13 @@ func CreateBlameReader(ctx context.Context, repoPath, commitID, file string) (*B
done <- err
}(cmd, repoPath, stdout, done)
bufferedReader := bufio.NewReader(reader)
return &BlameReader{
cmd: cmd,
output: stdout,
reader: reader,
done: done,
cmd: cmd,
output: stdout,
reader: reader,
bufferedReader: bufferedReader,
done: done,
}, nil
}

View File

@ -28,7 +28,7 @@ func TestReadingBlameOutput(t *testing.T) {
},
{
"f32b0a9dfd09a60f616f29158f772cedd89942d2",
[]string{},
[]string{"", "Do not make any changes to this repo it is used for unit testing"},
},
}

View File

@ -163,10 +163,8 @@ func CloneWithArgs(ctx context.Context, args TrustedCmdArgs, from, to string, op
envs := os.Environ()
u, err := url.Parse(from)
if err == nil && (strings.EqualFold(u.Scheme, "http") || strings.EqualFold(u.Scheme, "https")) {
if proxy.Match(u.Host) {
envs = append(envs, fmt.Sprintf("https_proxy=%s", proxy.GetProxyURL()))
}
if err == nil {
envs = proxy.EnvWithProxy(u)
}
stderr := new(bytes.Buffer)

View File

@ -135,8 +135,7 @@ func (c *CheckAttributeReader) Init(ctx context.Context) error {
c.env = append(c.env, "GIT_FLUSH=1")
// The empty "--" comes from #16773 , and it seems unnecessary because nothing else would be added later.
c.cmd.AddDynamicArguments(c.Attributes...).AddArguments("--")
c.cmd.AddDynamicArguments(c.Attributes...)
var err error

View File

@ -4,7 +4,10 @@
package metrics
import (
"runtime"
activities_model "code.gitea.io/gitea/models/activities"
"code.gitea.io/gitea/modules/setting"
"github.com/prometheus/client_golang/prometheus"
)
@ -17,6 +20,7 @@ type Collector struct {
Accesses *prometheus.Desc
Actions *prometheus.Desc
Attachments *prometheus.Desc
BuildInfo *prometheus.Desc
Comments *prometheus.Desc
Follows *prometheus.Desc
HookTasks *prometheus.Desc
@ -62,6 +66,16 @@ func NewCollector() Collector {
"Number of Attachments",
nil, nil,
),
BuildInfo: prometheus.NewDesc(
namespace+"build_info",
"Build information",
[]string{
"goarch",
"goos",
"goversion",
"version",
}, nil,
),
Comments: prometheus.NewDesc(
namespace+"comments",
"Number of Comments",
@ -195,6 +209,7 @@ func (c Collector) Describe(ch chan<- *prometheus.Desc) {
ch <- c.Accesses
ch <- c.Actions
ch <- c.Attachments
ch <- c.BuildInfo
ch <- c.Comments
ch <- c.Follows
ch <- c.HookTasks
@ -241,6 +256,15 @@ func (c Collector) Collect(ch chan<- prometheus.Metric) {
prometheus.GaugeValue,
float64(stats.Counter.Attachment),
)
ch <- prometheus.MustNewConstMetric(
c.BuildInfo,
prometheus.GaugeValue,
1,
runtime.GOARCH,
runtime.GOOS,
runtime.Version(),
setting.AppVer,
)
ch <- prometheus.MustNewConstMetric(
c.Comments,
prometheus.GaugeValue,

View File

@ -8,8 +8,7 @@ import "time"
// Commentable can be commented upon
type Commentable interface {
GetLocalIndex() int64
GetForeignIndex() int64
Reviewable
GetContext() DownloaderContext
}

View File

@ -34,6 +34,15 @@ func (issue *Issue) GetExternalName() string { return issue.PosterName }
// GetExternalID ExternalUserMigrated interface
func (issue *Issue) GetExternalID() int64 { return issue.PosterID }
func (issue *Issue) GetLocalIndex() int64 { return issue.Number }
func (issue *Issue) GetForeignIndex() int64 { return issue.ForeignIndex }
func (issue *Issue) GetLocalIndex() int64 { return issue.Number }
func (issue *Issue) GetForeignIndex() int64 {
// see the comment of Reviewable.GetForeignIndex
// if there is no ForeignIndex, then use LocalIndex
if issue.ForeignIndex == 0 {
return issue.Number
}
return issue.ForeignIndex
}
func (issue *Issue) GetContext() DownloaderContext { return issue.Context }

View File

@ -8,6 +8,16 @@ import "time"
// Reviewable can be reviewed
type Reviewable interface {
GetLocalIndex() int64
// GetForeignIndex presents the foreign index, which could be misused:
// For example, if there are 2 Gitea sites: site-A exports a dataset, then site-B imports it:
// * if site-A exports files by using its LocalIndex
// * from site-A's view, LocalIndex is site-A's IssueIndex while ForeignIndex is site-B's IssueIndex
// * but from site-B's view, LocalIndex is site-B's IssueIndex while ForeignIndex is site-A's IssueIndex
//
// So the exporting/importing must be paired, but the meaning of them looks confusing then:
// * either site-A and site-B both use LocalIndex during dumping/restoring
// * or site-A and site-B both use ForeignIndex
GetForeignIndex() int64
}
@ -37,7 +47,7 @@ type Review struct {
// GetExternalName ExternalUserMigrated interface
func (r *Review) GetExternalName() string { return r.ReviewerName }
// ExternalID ExternalUserMigrated interface
// GetExternalID ExternalUserMigrated interface
func (r *Review) GetExternalID() int64 { return r.ReviewerID }
// ReviewComment represents a review comment

View File

@ -0,0 +1,169 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cargo
import (
"encoding/binary"
"errors"
"io"
"regexp"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/validation"
"github.com/hashicorp/go-version"
)
const PropertyYanked = "cargo.yanked"
var (
ErrInvalidName = errors.New("package name is invalid")
ErrInvalidVersion = errors.New("package version is invalid")
)
// Package represents a Cargo package
type Package struct {
Name string
Version string
Metadata *Metadata
Content io.Reader
ContentSize int64
}
// Metadata represents the metadata of a Cargo package
type Metadata struct {
Dependencies []*Dependency `json:"dependencies,omitempty"`
Features map[string][]string `json:"features,omitempty"`
Authors []string `json:"authors,omitempty"`
Description string `json:"description,omitempty"`
DocumentationURL string `json:"documentation_url,omitempty"`
ProjectURL string `json:"project_url,omitempty"`
Readme string `json:"readme,omitempty"`
Keywords []string `json:"keywords,omitempty"`
Categories []string `json:"categories,omitempty"`
License string `json:"license,omitempty"`
RepositoryURL string `json:"repository_url,omitempty"`
Links string `json:"links,omitempty"`
}
type Dependency struct {
Name string `json:"name"`
Req string `json:"req"`
Features []string `json:"features"`
Optional bool `json:"optional"`
DefaultFeatures bool `json:"default_features"`
Target *string `json:"target"`
Kind string `json:"kind"`
Registry *string `json:"registry"`
Package *string `json:"package"`
}
var nameMatch = regexp.MustCompile(`\A[a-zA-Z][a-zA-Z0-9-_]{0,63}\z`)
// ParsePackage reads the metadata and content of a package
func ParsePackage(r io.Reader) (*Package, error) {
var size uint32
if err := binary.Read(r, binary.LittleEndian, &size); err != nil {
return nil, err
}
p, err := parsePackage(io.LimitReader(r, int64(size)))
if err != nil {
return nil, err
}
if err := binary.Read(r, binary.LittleEndian, &size); err != nil {
return nil, err
}
p.Content = io.LimitReader(r, int64(size))
p.ContentSize = int64(size)
return p, nil
}
func parsePackage(r io.Reader) (*Package, error) {
var meta struct {
Name string `json:"name"`
Vers string `json:"vers"`
Deps []struct {
Name string `json:"name"`
VersionReq string `json:"version_req"`
Features []string `json:"features"`
Optional bool `json:"optional"`
DefaultFeatures bool `json:"default_features"`
Target *string `json:"target"`
Kind string `json:"kind"`
Registry *string `json:"registry"`
ExplicitNameInToml string `json:"explicit_name_in_toml"`
} `json:"deps"`
Features map[string][]string `json:"features"`
Authors []string `json:"authors"`
Description string `json:"description"`
Documentation string `json:"documentation"`
Homepage string `json:"homepage"`
Readme string `json:"readme"`
ReadmeFile string `json:"readme_file"`
Keywords []string `json:"keywords"`
Categories []string `json:"categories"`
License string `json:"license"`
LicenseFile string `json:"license_file"`
Repository string `json:"repository"`
Links string `json:"links"`
}
if err := json.NewDecoder(r).Decode(&meta); err != nil {
return nil, err
}
if !nameMatch.MatchString(meta.Name) {
return nil, ErrInvalidName
}
if _, err := version.NewSemver(meta.Vers); err != nil {
return nil, ErrInvalidVersion
}
if !validation.IsValidURL(meta.Homepage) {
meta.Homepage = ""
}
if !validation.IsValidURL(meta.Documentation) {
meta.Documentation = ""
}
if !validation.IsValidURL(meta.Repository) {
meta.Repository = ""
}
dependencies := make([]*Dependency, 0, len(meta.Deps))
for _, dep := range meta.Deps {
dependencies = append(dependencies, &Dependency{
Name: dep.Name,
Req: dep.VersionReq,
Features: dep.Features,
Optional: dep.Optional,
DefaultFeatures: dep.DefaultFeatures,
Target: dep.Target,
Kind: dep.Kind,
Registry: dep.Registry,
})
}
return &Package{
Name: meta.Name,
Version: meta.Vers,
Metadata: &Metadata{
Dependencies: dependencies,
Features: meta.Features,
Authors: meta.Authors,
Description: meta.Description,
DocumentationURL: meta.Documentation,
ProjectURL: meta.Homepage,
Readme: meta.Readme,
Keywords: meta.Keywords,
Categories: meta.Categories,
License: meta.License,
RepositoryURL: meta.Repository,
Links: meta.Links,
},
}, nil
}

View File

@ -0,0 +1,86 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cargo
import (
"bytes"
"encoding/binary"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
const (
description = "Package Description"
author = "KN4CK3R"
homepage = "https://gitea.io/"
license = "MIT"
)
func TestParsePackage(t *testing.T) {
createPackage := func(name, version string) io.Reader {
metadata := `{
"name":"` + name + `",
"vers":"` + version + `",
"description":"` + description + `",
"authors": ["` + author + `"],
"deps":[
{
"name":"dep",
"version_req":"1.0"
}
],
"homepage":"` + homepage + `",
"license":"` + license + `"
}`
var buf bytes.Buffer
binary.Write(&buf, binary.LittleEndian, uint32(len(metadata)))
buf.WriteString(metadata)
binary.Write(&buf, binary.LittleEndian, uint32(4))
buf.WriteString("test")
return &buf
}
t.Run("InvalidName", func(t *testing.T) {
for _, name := range []string{"", "0test", "-test", "_test", strings.Repeat("a", 65)} {
data := createPackage(name, "1.0.0")
cp, err := ParsePackage(data)
assert.Nil(t, cp)
assert.ErrorIs(t, err, ErrInvalidName)
}
})
t.Run("InvalidVersion", func(t *testing.T) {
for _, version := range []string{"", "1.", "-1.0", "1.0.0/1"} {
data := createPackage("test", version)
cp, err := ParsePackage(data)
assert.Nil(t, cp)
assert.ErrorIs(t, err, ErrInvalidVersion)
}
})
t.Run("Valid", func(t *testing.T) {
data := createPackage("test", "1.0.0")
cp, err := ParsePackage(data)
assert.NotNil(t, cp)
assert.NoError(t, err)
assert.Equal(t, "test", cp.Name)
assert.Equal(t, "1.0.0", cp.Version)
assert.Equal(t, description, cp.Metadata.Description)
assert.Equal(t, []string{author}, cp.Metadata.Authors)
assert.Len(t, cp.Metadata.Dependencies, 1)
assert.Equal(t, "dep", cp.Metadata.Dependencies[0].Name)
assert.Equal(t, homepage, cp.Metadata.ProjectURL)
assert.Equal(t, license, cp.Metadata.License)
content, _ := io.ReadAll(cp.Content)
assert.Equal(t, "test", string(content))
})
}

View File

@ -0,0 +1,134 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package chef
import (
"archive/tar"
"compress/gzip"
"io"
"regexp"
"strings"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/validation"
)
const (
KeyBits = 4096
SettingPublicPem = "chef.public_pem"
)
var (
ErrMissingMetadataFile = util.NewInvalidArgumentErrorf("metadata.json file is missing")
ErrInvalidName = util.NewInvalidArgumentErrorf("package name is invalid")
ErrInvalidVersion = util.NewInvalidArgumentErrorf("package version is invalid")
namePattern = regexp.MustCompile(`\A\S+\z`)
versionPattern = regexp.MustCompile(`\A\d+\.\d+(?:\.\d+)?\z`)
)
// Package represents a Chef package
type Package struct {
Name string
Version string
Metadata *Metadata
}
// Metadata represents the metadata of a Chef package
type Metadata struct {
Description string `json:"description,omitempty"`
LongDescription string `json:"long_description,omitempty"`
Author string `json:"author,omitempty"`
License string `json:"license,omitempty"`
RepositoryURL string `json:"repository_url,omitempty"`
Dependencies map[string]string `json:"dependencies,omitempty"`
}
type chefMetadata struct {
Name string `json:"name"`
Description string `json:"description"`
LongDescription string `json:"long_description"`
Maintainer string `json:"maintainer"`
MaintainerEmail string `json:"maintainer_email"`
License string `json:"license"`
Platforms map[string]string `json:"platforms"`
Dependencies map[string]string `json:"dependencies"`
Providing map[string]string `json:"providing"`
Recipes map[string]string `json:"recipes"`
Version string `json:"version"`
SourceURL string `json:"source_url"`
IssuesURL string `json:"issues_url"`
Privacy bool `json:"privacy"`
ChefVersions [][]string `json:"chef_versions"`
Gems [][]string `json:"gems"`
EagerLoadLibraries bool `json:"eager_load_libraries"`
}
// ParsePackage parses the Chef package file
func ParsePackage(r io.Reader) (*Package, error) {
gzr, err := gzip.NewReader(r)
if err != nil {
return nil, err
}
defer gzr.Close()
tr := tar.NewReader(gzr)
for {
hd, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
if hd.Typeflag != tar.TypeReg {
continue
}
if strings.Count(hd.Name, "/") != 1 {
continue
}
if hd.FileInfo().Name() == "metadata.json" {
return ParseChefMetadata(tr)
}
}
return nil, ErrMissingMetadataFile
}
// ParseChefMetadata parses a metadata.json file to retrieve the metadata of a Chef package
func ParseChefMetadata(r io.Reader) (*Package, error) {
var cm chefMetadata
if err := json.NewDecoder(r).Decode(&cm); err != nil {
return nil, err
}
if !namePattern.MatchString(cm.Name) {
return nil, ErrInvalidName
}
if !versionPattern.MatchString(cm.Version) {
return nil, ErrInvalidVersion
}
if !validation.IsValidURL(cm.SourceURL) {
cm.SourceURL = ""
}
return &Package{
Name: cm.Name,
Version: cm.Version,
Metadata: &Metadata{
Description: cm.Description,
LongDescription: cm.LongDescription,
Author: cm.Maintainer,
License: cm.License,
RepositoryURL: cm.SourceURL,
Dependencies: cm.Dependencies,
},
}, nil
}

View File

@ -0,0 +1,92 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package chef
import (
"archive/tar"
"bytes"
"compress/gzip"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
const (
packageName = "gitea"
packageVersion = "1.0.1"
packageAuthor = "KN4CK3R"
packageDescription = "Package Description"
packageRepositoryURL = "https://gitea.io/gitea/gitea"
)
func TestParsePackage(t *testing.T) {
t.Run("MissingMetadataFile", func(t *testing.T) {
var buf bytes.Buffer
zw := gzip.NewWriter(&buf)
tw := tar.NewWriter(zw)
tw.Close()
zw.Close()
p, err := ParsePackage(&buf)
assert.Nil(t, p)
assert.ErrorIs(t, err, ErrMissingMetadataFile)
})
t.Run("Valid", func(t *testing.T) {
var buf bytes.Buffer
zw := gzip.NewWriter(&buf)
tw := tar.NewWriter(zw)
content := `{"name":"` + packageName + `","version":"` + packageVersion + `"}`
hdr := &tar.Header{
Name: packageName + "/metadata.json",
Mode: 0o600,
Size: int64(len(content)),
}
tw.WriteHeader(hdr)
tw.Write([]byte(content))
tw.Close()
zw.Close()
p, err := ParsePackage(&buf)
assert.NoError(t, err)
assert.NotNil(t, p)
assert.Equal(t, packageName, p.Name)
assert.Equal(t, packageVersion, p.Version)
assert.NotNil(t, p.Metadata)
})
}
func TestParseChefMetadata(t *testing.T) {
t.Run("InvalidName", func(t *testing.T) {
for _, name := range []string{" test", "test "} {
p, err := ParseChefMetadata(strings.NewReader(`{"name":"` + name + `","version":"1.0.0"}`))
assert.Nil(t, p)
assert.ErrorIs(t, err, ErrInvalidName)
}
})
t.Run("InvalidVersion", func(t *testing.T) {
for _, version := range []string{"1", "1.2.3.4", "1.0.0 "} {
p, err := ParseChefMetadata(strings.NewReader(`{"name":"test","version":"` + version + `"}`))
assert.Nil(t, p)
assert.ErrorIs(t, err, ErrInvalidVersion)
}
})
t.Run("Valid", func(t *testing.T) {
p, err := ParseChefMetadata(strings.NewReader(`{"name":"` + packageName + `","version":"` + packageVersion + `","description":"` + packageDescription + `","maintainer":"` + packageAuthor + `","source_url":"` + packageRepositoryURL + `"}`))
assert.NotNil(t, p)
assert.NoError(t, err)
assert.Equal(t, packageName, p.Name)
assert.Equal(t, packageVersion, p.Version)
assert.Equal(t, packageDescription, p.Metadata.Description)
assert.Equal(t, packageAuthor, p.Metadata.Author)
assert.Equal(t, packageRepositoryURL, p.Metadata.RepositoryURL)
})
}

View File

@ -10,8 +10,9 @@ import (
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/packages/container/helm"
"code.gitea.io/gitea/modules/packages/container/oci"
"code.gitea.io/gitea/modules/validation"
oci "github.com/opencontainers/image-spec/specs-go/v1"
)
const (
@ -65,8 +66,8 @@ type Metadata struct {
}
// ParseImageConfig parses the metadata of an image config
func ParseImageConfig(mediaType oci.MediaType, r io.Reader) (*Metadata, error) {
if strings.EqualFold(string(mediaType), helm.ConfigMediaType) {
func ParseImageConfig(mt string, r io.Reader) (*Metadata, error) {
if strings.EqualFold(mt, helm.ConfigMediaType) {
return parseHelmConfig(r)
}

View File

@ -8,8 +8,8 @@ import (
"testing"
"code.gitea.io/gitea/modules/packages/container/helm"
"code.gitea.io/gitea/modules/packages/container/oci"
oci "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/stretchr/testify/assert"
)
@ -23,7 +23,7 @@ func TestParseImageConfig(t *testing.T) {
configOCI := `{"config": {"labels": {"` + labelAuthors + `": "` + author + `", "` + labelLicenses + `": "` + license + `", "` + labelURL + `": "` + projectURL + `", "` + labelSource + `": "` + repositoryURL + `", "` + labelDocumentation + `": "` + documentationURL + `", "` + labelDescription + `": "` + description + `"}}, "history": [{"created_by": "do it 1"}, {"created_by": "dummy #(nop) do it 2"}]}`
metadata, err := ParseImageConfig(oci.MediaType(oci.MediaTypeImageManifest), strings.NewReader(configOCI))
metadata, err := ParseImageConfig(oci.MediaTypeImageManifest, strings.NewReader(configOCI))
assert.NoError(t, err)
assert.Equal(t, TypeOCI, metadata.Type)
@ -50,7 +50,7 @@ func TestParseImageConfig(t *testing.T) {
configHelm := `{"description":"` + description + `", "home": "` + projectURL + `", "sources": ["` + repositoryURL + `"], "maintainers":[{"name":"` + author + `"}]}`
metadata, err = ParseImageConfig(oci.MediaType(helm.ConfigMediaType), strings.NewReader(configHelm))
metadata, err = ParseImageConfig(helm.ConfigMediaType, strings.NewReader(configHelm))
assert.NoError(t, err)
assert.Equal(t, TypeHelm, metadata.Type)

View File

@ -1,26 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oci
import (
"regexp"
"strings"
)
var digestPattern = regexp.MustCompile(`\Asha256:[a-f0-9]{64}\z`)
type Digest string
// Validate checks if the digest has a valid SHA256 signature
func (d Digest) Validate() bool {
return digestPattern.MatchString(string(d))
}
func (d Digest) Hash() string {
p := strings.SplitN(string(d), ":", 2)
if len(p) != 2 {
return ""
}
return p[1]
}

View File

@ -1,35 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oci
import (
"strings"
)
const (
MediaTypeImageManifest = "application/vnd.oci.image.manifest.v1+json"
MediaTypeImageIndex = "application/vnd.oci.image.index.v1+json"
MediaTypeDockerManifest = "application/vnd.docker.distribution.manifest.v2+json"
MediaTypeDockerManifestList = "application/vnd.docker.distribution.manifest.list.v2+json"
)
type MediaType string
// IsValid tests if the media type is in the OCI or Docker namespace
func (m MediaType) IsValid() bool {
s := string(m)
return strings.HasPrefix(s, "application/vnd.docker.") || strings.HasPrefix(s, "application/vnd.oci.")
}
// IsImageManifest tests if the media type is an image manifest
func (m MediaType) IsImageManifest() bool {
s := string(m)
return strings.EqualFold(s, MediaTypeDockerManifest) || strings.EqualFold(s, MediaTypeImageManifest)
}
// IsImageIndex tests if the media type is an image index
func (m MediaType) IsImageIndex() bool {
s := string(m)
return strings.EqualFold(s, MediaTypeDockerManifestList) || strings.EqualFold(s, MediaTypeImageIndex)
}

View File

@ -1,190 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oci
import (
"time"
)
// https://github.com/opencontainers/image-spec/tree/main/specs-go/v1
// ImageConfig defines the execution parameters which should be used as a base when running a container using an image.
type ImageConfig struct {
// User defines the username or UID which the process in the container should run as.
User string `json:"User,omitempty"`
// ExposedPorts a set of ports to expose from a container running this image.
ExposedPorts map[string]struct{} `json:"ExposedPorts,omitempty"`
// Env is a list of environment variables to be used in a container.
Env []string `json:"Env,omitempty"`
// Entrypoint defines a list of arguments to use as the command to execute when the container starts.
Entrypoint []string `json:"Entrypoint,omitempty"`
// Cmd defines the default arguments to the entrypoint of the container.
Cmd []string `json:"Cmd,omitempty"`
// Volumes is a set of directories describing where the process is likely write data specific to a container instance.
Volumes map[string]struct{} `json:"Volumes,omitempty"`
// WorkingDir sets the current working directory of the entrypoint process in the container.
WorkingDir string `json:"WorkingDir,omitempty"`
// Labels contains arbitrary metadata for the container.
Labels map[string]string `json:"Labels,omitempty"`
// StopSignal contains the system call signal that will be sent to the container to exit.
StopSignal string `json:"StopSignal,omitempty"`
}
// RootFS describes a layer content addresses
type RootFS struct {
// Type is the type of the rootfs.
Type string `json:"type"`
// DiffIDs is an array of layer content hashes, in order from bottom-most to top-most.
DiffIDs []string `json:"diff_ids"`
}
// History describes the history of a layer.
type History struct {
// Created is the combined date and time at which the layer was created, formatted as defined by RFC 3339, section 5.6.
Created *time.Time `json:"created,omitempty"`
// CreatedBy is the command which created the layer.
CreatedBy string `json:"created_by,omitempty"`
// Author is the author of the build point.
Author string `json:"author,omitempty"`
// Comment is a custom message set when creating the layer.
Comment string `json:"comment,omitempty"`
// EmptyLayer is used to mark if the history item created a filesystem diff.
EmptyLayer bool `json:"empty_layer,omitempty"`
}
// Image is the JSON structure which describes some basic information about the image.
// This provides the `application/vnd.oci.image.config.v1+json` mediatype when marshalled to JSON.
type Image struct {
// Created is the combined date and time at which the image was created, formatted as defined by RFC 3339, section 5.6.
Created *time.Time `json:"created,omitempty"`
// Author defines the name and/or email address of the person or entity which created and is responsible for maintaining the image.
Author string `json:"author,omitempty"`
// Architecture is the CPU architecture which the binaries in this image are built to run on.
Architecture string `json:"architecture"`
// Variant is the variant of the specified CPU architecture which image binaries are intended to run on.
Variant string `json:"variant,omitempty"`
// OS is the name of the operating system which the image is built to run on.
OS string `json:"os"`
// OSVersion is an optional field specifying the operating system
// version, for example on Windows `10.0.14393.1066`.
OSVersion string `json:"os.version,omitempty"`
// OSFeatures is an optional field specifying an array of strings,
// each listing a required OS feature (for example on Windows `win32k`).
OSFeatures []string `json:"os.features,omitempty"`
// Config defines the execution parameters which should be used as a base when running a container using the image.
Config ImageConfig `json:"config,omitempty"`
// RootFS references the layer content addresses used by the image.
RootFS RootFS `json:"rootfs"`
// History describes the history of each layer.
History []History `json:"history,omitempty"`
}
// Descriptor describes the disposition of targeted content.
// This structure provides `application/vnd.oci.descriptor.v1+json` mediatype
// when marshalled to JSON.
type Descriptor struct {
// MediaType is the media type of the object this schema refers to.
MediaType MediaType `json:"mediaType,omitempty"`
// Digest is the digest of the targeted content.
Digest Digest `json:"digest"`
// Size specifies the size in bytes of the blob.
Size int64 `json:"size"`
// URLs specifies a list of URLs from which this object MAY be downloaded
URLs []string `json:"urls,omitempty"`
// Annotations contains arbitrary metadata relating to the targeted content.
Annotations map[string]string `json:"annotations,omitempty"`
// Data is an embedding of the targeted content. This is encoded as a base64
// string when marshalled to JSON (automatically, by encoding/json). If
// present, Data can be used directly to avoid fetching the targeted content.
Data []byte `json:"data,omitempty"`
// Platform describes the platform which the image in the manifest runs on.
//
// This should only be used when referring to a manifest.
Platform *Platform `json:"platform,omitempty"`
}
// Platform describes the platform which the image in the manifest runs on.
type Platform struct {
// Architecture field specifies the CPU architecture, for example
// `amd64` or `ppc64`.
Architecture string `json:"architecture"`
// OS specifies the operating system, for example `linux` or `windows`.
OS string `json:"os"`
// OSVersion is an optional field specifying the operating system
// version, for example on Windows `10.0.14393.1066`.
OSVersion string `json:"os.version,omitempty"`
// OSFeatures is an optional field specifying an array of strings,
// each listing a required OS feature (for example on Windows `win32k`).
OSFeatures []string `json:"os.features,omitempty"`
// Variant is an optional field specifying a variant of the CPU, for
// example `v7` to specify ARMv7 when architecture is `arm`.
Variant string `json:"variant,omitempty"`
}
type SchemaMediaBase struct {
// SchemaVersion is the image manifest schema that this image follows
SchemaVersion int `json:"schemaVersion"`
// MediaType specifies the type of this document data structure e.g. `application/vnd.oci.image.manifest.v1+json`
MediaType MediaType `json:"mediaType,omitempty"`
}
// Manifest provides `application/vnd.oci.image.manifest.v1+json` mediatype structure when marshalled to JSON.
type Manifest struct {
SchemaMediaBase
// Config references a configuration object for a container, by digest.
// The referenced configuration object is a JSON blob that the runtime uses to set up the container.
Config Descriptor `json:"config"`
// Layers is an indexed list of layers referenced by the manifest.
Layers []Descriptor `json:"layers"`
// Annotations contains arbitrary metadata for the image manifest.
Annotations map[string]string `json:"annotations,omitempty"`
}
// Index references manifests for various platforms.
// This structure provides `application/vnd.oci.image.index.v1+json` mediatype when marshalled to JSON.
type Index struct {
SchemaMediaBase
// Manifests references platform specific manifests.
Manifests []Descriptor `json:"manifests"`
// Annotations contains arbitrary metadata for the image index.
Annotations map[string]string `json:"annotations,omitempty"`
}

View File

@ -1,16 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oci
import (
"regexp"
)
var referencePattern = regexp.MustCompile(`\A[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}\z`)
type Reference string
func (r Reference) Validate() bool {
return referencePattern.MatchString(string(r))
}

View File

@ -7,6 +7,7 @@ import (
"net/http"
"net/url"
"os"
"strings"
"sync"
"code.gitea.io/gitea/modules/log"
@ -82,3 +83,16 @@ func Proxy() func(req *http.Request) (*url.URL, error) {
return http.ProxyFromEnvironment(req)
}
}
// EnvWithProxy returns os.Environ(), with a https_proxy env, if the given url
// needs to be proxied.
func EnvWithProxy(u *url.URL) []string {
envs := os.Environ()
if strings.EqualFold(u.Scheme, "http") || strings.EqualFold(u.Scheme, "https") {
if Match(u.Host) {
envs = append(envs, "https_proxy="+GetProxyURL())
}
}
return envs
}

View File

@ -211,6 +211,7 @@ func CreateRepository(doer, u *user_model.User, opts CreateRepoOptions) (*repo_m
IsEmpty: !opts.AutoInit,
TrustModel: opts.TrustModel,
IsMirror: opts.IsMirror,
DefaultBranch: opts.DefaultBranch,
}
var rollbackRepo *repo_model.Repository

View File

@ -49,7 +49,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
assert.NoError(t, organization.CreateOrganization(org, user), "CreateOrganization")
// Check Owner team.
ownerTeam, err := org.GetOwnerTeam()
ownerTeam, err := org.GetOwnerTeam(db.DefaultContext)
assert.NoError(t, err, "GetOwnerTeam")
assert.True(t, ownerTeam.IncludesAllRepositories, "Owner team includes all repositories")
@ -63,7 +63,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
}
}
// Get fresh copy of Owner team after creating repos.
ownerTeam, err = org.GetOwnerTeam()
ownerTeam, err = org.GetOwnerTeam(db.DefaultContext)
assert.NoError(t, err, "GetOwnerTeam")
// Create teams and check repositories.

View File

@ -57,7 +57,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
repoPath := repo_model.RepoPath(u.Name, opts.RepoName)
if u.IsOrganization() {
t, err := organization.OrgFromUser(u).GetOwnerTeam()
t, err := organization.OrgFromUser(u).GetOwnerTeam(ctx)
if err != nil {
return nil, err
}

View File

@ -25,6 +25,8 @@ var (
LimitTotalOwnerCount int64
LimitTotalOwnerSize int64
LimitSizeCargo int64
LimitSizeChef int64
LimitSizeComposer int64
LimitSizeConan int64
LimitSizeConda int64
@ -65,6 +67,8 @@ func newPackages() {
}
Packages.LimitTotalOwnerSize = mustBytes(sec, "LIMIT_TOTAL_OWNER_SIZE")
Packages.LimitSizeCargo = mustBytes(sec, "LIMIT_SIZE_CARGO")
Packages.LimitSizeChef = mustBytes(sec, "LIMIT_SIZE_CHEF")
Packages.LimitSizeComposer = mustBytes(sec, "LIMIT_SIZE_COMPOSER")
Packages.LimitSizeConan = mustBytes(sec, "LIMIT_SIZE_CONAN")
Packages.LimitSizeConda = mustBytes(sec, "LIMIT_SIZE_CONDA")

View File

@ -46,6 +46,8 @@ var Service = struct {
RecaptchaSecret string
RecaptchaSitekey string
RecaptchaURL string
CfTurnstileSecret string
CfTurnstileSitekey string
HcaptchaSecret string
HcaptchaSitekey string
McaptchaSecret string
@ -137,6 +139,8 @@ func newService() {
Service.RecaptchaSecret = sec.Key("RECAPTCHA_SECRET").MustString("")
Service.RecaptchaSitekey = sec.Key("RECAPTCHA_SITEKEY").MustString("")
Service.RecaptchaURL = sec.Key("RECAPTCHA_URL").MustString("https://www.google.com/recaptcha/")
Service.CfTurnstileSecret = sec.Key("CF_TURNSTILE_SECRET").MustString("")
Service.CfTurnstileSitekey = sec.Key("CF_TURNSTILE_SITEKEY").MustString("")
Service.HcaptchaSecret = sec.Key("HCAPTCHA_SECRET").MustString("")
Service.HcaptchaSitekey = sec.Key("HCAPTCHA_SITEKEY").MustString("")
Service.McaptchaURL = sec.Key("MCAPTCHA_URL").MustString("https://demo.mcaptcha.org/")

View File

@ -61,6 +61,7 @@ const (
ReCaptcha = "recaptcha"
HCaptcha = "hcaptcha"
MCaptcha = "mcaptcha"
CfTurnstile = "cfturnstile"
)
// settings

View File

@ -63,6 +63,7 @@ type Repository struct {
Language string `json:"language"`
LanguagesURL string `json:"languages_url"`
HTMLURL string `json:"html_url"`
Link string `json:"link"`
SSHURL string `json:"ssh_url"`
CloneURL string `json:"clone_url"`
OriginalURL string `json:"original_url"`

View File

@ -3,7 +3,7 @@
package structs
// VisibleType defines the visibility (Organization only)
// VisibleType defines the visibility of user and org
type VisibleType int
const (
@ -13,11 +13,11 @@ const (
// VisibleTypeLimited Visible for every connected user
VisibleTypeLimited
// VisibleTypePrivate Visible only for organization's members
// VisibleTypePrivate Visible only for self or admin user
VisibleTypePrivate
)
// VisibilityModes is a map of org Visibility types
// VisibilityModes is a map of Visibility types
var VisibilityModes = map[string]VisibleType{
"public": VisibleTypePublic,
"limited": VisibleTypeLimited,

View File

@ -72,6 +72,10 @@ func NewFuncMap() []template.FuncMap {
return setting.StaticURLPrefix + "/assets"
},
"AppUrl": func() string {
// The usage of AppUrl should be avoided as much as possible,
// because the AppURL(ROOT_URL) may not match user's visiting site and the ROOT_URL in app.ini may be incorrect.
// And it's difficult for Gitea to guess absolute URL correctly with zero configuration,
// because Gitea doesn't know whether the scheme is HTTP or HTTPS unless the reverse proxy could tell Gitea.
return setting.AppURL
},
"AppVer": func() string {

View File

@ -0,0 +1,92 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package turnstile
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
)
// Response is the structure of JSON returned from API
type Response struct {
Success bool `json:"success"`
ChallengeTS string `json:"challenge_ts"`
Hostname string `json:"hostname"`
ErrorCodes []ErrorCode `json:"error-codes"`
Action string `json:"login"`
Cdata string `json:"cdata"`
}
// Verify calls Cloudflare Turnstile API to verify token
func Verify(ctx context.Context, response string) (bool, error) {
// Cloudflare turnstile official access instruction address: https://developers.cloudflare.com/turnstile/get-started/server-side-validation/
post := url.Values{
"secret": {setting.Service.CfTurnstileSecret},
"response": {response},
}
// Basically a copy of http.PostForm, but with a context
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
"https://challenges.cloudflare.com/turnstile/v0/siteverify", strings.NewReader(post.Encode()))
if err != nil {
return false, fmt.Errorf("Failed to create CAPTCHA request: %w", err)
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return false, fmt.Errorf("Failed to send CAPTCHA response: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return false, fmt.Errorf("Failed to read CAPTCHA response: %w", err)
}
var jsonResponse Response
if err := json.Unmarshal(body, &jsonResponse); err != nil {
return false, fmt.Errorf("Failed to parse CAPTCHA response: %w", err)
}
var respErr error
if len(jsonResponse.ErrorCodes) > 0 {
respErr = jsonResponse.ErrorCodes[0]
}
return jsonResponse.Success, respErr
}
// ErrorCode is a reCaptcha error
type ErrorCode string
// String fulfills the Stringer interface
func (e ErrorCode) String() string {
switch e {
case "missing-input-secret":
return "The secret parameter was not passed."
case "invalid-input-secret":
return "The secret parameter was invalid or did not exist."
case "missing-input-response":
return "The response parameter was not passed."
case "invalid-input-response":
return "The response parameter is invalid or has expired."
case "bad-request":
return "The request was rejected because it was malformed."
case "timeout-or-duplicate":
return "The response parameter has already been validated before."
case "internal-error":
return "An internal error happened while validating the response. The request can be retried."
}
return string(e)
}
// Error fulfills the error interface
func (e ErrorCode) Error() string {
return e.String()
}

View File

@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package activitypub
package util
import (
"crypto/rand"
@ -10,11 +10,9 @@ import (
"encoding/pem"
)
const rsaBits = 2048
// GenerateKeyPair generates a public and private keypair for signing actions by users for activitypub purposes
func GenerateKeyPair() (string, string, error) {
priv, _ := rsa.GenerateKey(rand.Reader, rsaBits)
// GenerateKeyPair generates a public and private keypair
func GenerateKeyPair(bits int) (string, string, error) {
priv, _ := rsa.GenerateKey(rand.Reader, bits)
privPem, err := pemBlockForPriv(priv)
if err != nil {
return "", "", err

View File

@ -1,7 +1,7 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package activitypub
package util
import (
"crypto"
@ -17,7 +17,7 @@ import (
)
func TestKeygen(t *testing.T) {
priv, pub, err := GenerateKeyPair()
priv, pub, err := GenerateKeyPair(2048)
assert.NoError(t, err)
assert.NotEmpty(t, priv)
@ -28,7 +28,7 @@ func TestKeygen(t *testing.T) {
}
func TestSignUsingKeys(t *testing.T) {
priv, pub, err := GenerateKeyPair()
priv, pub, err := GenerateKeyPair(2048)
assert.NoError(t, err)
privPem, _ := pem.Decode([]byte(priv))

View File

@ -8,6 +8,7 @@ import (
"regexp"
"strings"
"code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/git"
"gitea.com/go-chi/binding"
@ -17,15 +18,14 @@ import (
const (
// ErrGitRefName is git reference name error
ErrGitRefName = "GitRefNameError"
// ErrGlobPattern is returned when glob pattern is invalid
ErrGlobPattern = "GlobPattern"
// ErrRegexPattern is returned when a regex pattern is invalid
ErrRegexPattern = "RegexPattern"
// ErrUsername is username error
ErrUsername = "UsernameError"
// ErrInvalidGroupTeamMap is returned when a group team mapping is invalid
ErrInvalidGroupTeamMap = "InvalidGroupTeamMap"
)
// AddBindingRules adds additional binding rules
@ -37,6 +37,7 @@ func AddBindingRules() {
addRegexPatternRule()
addGlobOrRegexPatternRule()
addUsernamePatternRule()
addValidGroupTeamMapRule()
}
func addGitRefNameBindingRule() {
@ -167,6 +168,23 @@ func addUsernamePatternRule() {
})
}
func addValidGroupTeamMapRule() {
binding.AddRule(&binding.Rule{
IsMatch: func(rule string) bool {
return strings.HasPrefix(rule, "ValidGroupTeamMap")
},
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
_, err := auth.UnmarshalGroupTeamMapping(fmt.Sprintf("%v", val))
if err != nil {
errs.Add([]string{name}, ErrInvalidGroupTeamMap, err.Error())
return false, errs
}
return true, errs
},
})
}
func portOnly(hostport string) string {
colon := strings.IndexByte(hostport, ':')
if colon == -1 {

View File

@ -136,6 +136,8 @@ func Validate(errs binding.Errors, data map[string]interface{}, f Form, l transl
data["ErrorMsg"] = trName + l.Tr("form.regex_pattern_error", errs[0].Message)
case validation.ErrUsername:
data["ErrorMsg"] = trName + l.Tr("form.username_error")
case validation.ErrInvalidGroupTeamMap:
data["ErrorMsg"] = trName + l.Tr("form.invalid_group_team_map_error", errs[0].Message)
default:
msg := errs[0].Classification
if msg != "" && errs[0].Message != "" {

View File

@ -477,6 +477,7 @@ include_error = ` must contain substring '%s'.`
glob_pattern_error = ` glob pattern is invalid: %s.`
regex_pattern_error = ` regex pattern is invalid: %s.`
username_error = ` can only contain alphanumeric chars ('0-9','a-z','A-Z'), dash ('-'), underscore ('_') and dot ('.'). It cannot begin or end with non-alphanumeric chars, and consecutive non-alphanumeric chars are also forbidden.`
invalid_group_team_map_error = ` mapping is invalid: %s`
unknown_error = Unknown error:
captcha_incorrect = The CAPTCHA code is incorrect.
password_not_match = The passwords do not match.
@ -2758,6 +2759,8 @@ auths.oauth2_required_claim_value_helper = Set this value to restrict login from
auths.oauth2_group_claim_name = Claim name providing group names for this source. (Optional)
auths.oauth2_admin_group = Group Claim value for administrator users. (Optional - requires claim name above)
auths.oauth2_restricted_group = Group Claim value for restricted users. (Optional - requires claim name above)
auths.oauth2_map_group_to_team = Map claimed groups to Organization teams. (Optional - requires claim name above)
auths.oauth2_map_group_to_team_removal = Remove users from synchronized teams if user does not belong to corresponding group.
auths.enable_auto_register = Enable Auto Registration
auths.sspi_auto_create_users = Automatically create users
auths.sspi_auto_create_users_helper = Allow SSPI auth method to automatically create new accounts for users that login for the first time
@ -3145,6 +3148,8 @@ keywords = Keywords
details = Details
details.author = Author
details.project_site = Project Site
details.repository_site = Repository Site
details.documentation_site = Documentation Site
details.license = License
assets = Assets
versions = Versions
@ -3152,6 +3157,14 @@ versions.on = on
versions.view_all = View all
dependency.id = ID
dependency.version = Version
cargo.registry = Setup this registry in the Cargo configuration file (for example <code>~/.cargo/config.toml</code>):
cargo.install = To install the package using Cargo, run the following command:
cargo.documentation = For more information on the Cargo registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/cargo/">the documentation</a>.
cargo.details.repository_site = Repository Site
cargo.details.documentation_site = Documentation Site
chef.registry = Setup this registry in your <code>~/.chef/config.rb</code> file:
chef.install = To install the package, run the following command:
chef.documentation = For more information on the Chef registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/chef/">the documentation</a>.
composer.registry = Setup this registry in your <code>~/.composer/config.json</code> file:
composer.install = To install the package using Composer, run the following command:
composer.documentation = For more information on the Composer registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/composer/">the documentation</a>.
@ -3168,8 +3181,6 @@ conda.details.repository_site = Repository Site
conda.details.documentation_site = Documentation Site
container.details.type = Image Type
container.details.platform = Platform
container.details.repository_site = Repository Site
container.details.documentation_site = Documentation Site
container.pull = Pull the image from the command line:
container.digest = Digest:
container.documentation = For more information on the Container registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/container/">the documentation</a>.
@ -3203,8 +3214,6 @@ npm.dependencies.optional = Optional Dependencies
npm.details.tag = Tag
pub.install = To install the package using Dart, run the following command:
pub.documentation = For more information on the Pub registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/pub/">the documentation</a>.
pub.details.repository_site = Repository Site
pub.details.documentation_site = Documentation Site
pypi.requires = Requires Python
pypi.install = To install the package using pip, run the following command:
pypi.documentation = For more information on the PyPI registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/pypi/">the documentation</a>.
@ -3228,6 +3237,15 @@ settings.delete.description = Deleting a package is permanent and cannot be undo
settings.delete.notice = You are about to delete %s (%s). This operation is irreversible, are you sure?
settings.delete.success = The package has been deleted.
settings.delete.error = Failed to delete the package.
owner.settings.cargo.title = Cargo Registry Index
owner.settings.cargo.initialize = Initialize Index
owner.settings.cargo.initialize.description = To use the Cargo registry a special index git repository is needed. Here you can (re)create it with the required config.
owner.settings.cargo.initialize.error = Failed to initialize Cargo index: %v
owner.settings.cargo.initialize.success = The Cargo index was successfully created.
owner.settings.cargo.rebuild = Rebuild Index
owner.settings.cargo.rebuild.description = If the index is out of sync with the cargo packages stored you can rebuild it here.
owner.settings.cargo.rebuild.error = Failed to rebuild Cargo index: %v
owner.settings.cargo.rebuild.success = The Cargo index was successfully rebuild.
owner.settings.cleanuprules.title = Manage Cleanup Rules
owner.settings.cleanuprules.add = Add Cleanup Rule
owner.settings.cleanuprules.edit = Edit Cleanup Rule
@ -3248,6 +3266,9 @@ owner.settings.cleanuprules.remove.days = Remove versions older than
owner.settings.cleanuprules.remove.pattern = Remove versions matching
owner.settings.cleanuprules.success.update = Cleanup rule has been updated.
owner.settings.cleanuprules.success.delete = Cleanup rule has been deleted.
owner.settings.chef.title = Chef Registry
owner.settings.chef.keypair = Generate key pair
owner.settings.chef.keypair.description = Generate a key pair used to authenticate against the Chef registry. The previous key can not be used afterwards.
[secrets]
secrets = Secrets

View File

@ -0,0 +1 @@
<svg xml:space="preserve" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round" clip-rule="evenodd" viewBox="0 0 32 32" class="svg gitea-cargo" width="16" height="16" aria-hidden="true"><path d="M15.993 1.54c-7.972 0-14.461 6.492-14.461 14.462 0 7.969 6.492 14.461 14.461 14.461 7.97 0 14.462-6.492 14.462-14.461 0-7.97-6.492-14.462-14.462-14.462zm-.021 1.285a.954.954 0 0 1 .924.951c0 .522-.43.952-.952.952s-.951-.43-.951-.952.429-.952.951-.952l.028.001zm2.178 1.566a11.717 11.717 0 0 1 8.016 5.709l-1.123 2.533a.874.874 0 0 0 .44 1.147l2.16.958c.067.675.076 1.355.025 2.031h-1.202c-.12 0-.169.08-.169.196v.551c0 1.297-.731 1.582-1.373 1.652-.612.07-1.288-.257-1.374-.63-.361-2.029-.961-2.46-1.909-3.21 1.178-.746 2.401-1.85 2.401-3.325 0-1.594-1.092-2.597-1.835-3.09-1.046-.688-2.203-.826-2.515-.826H7.271a11.712 11.712 0 0 1 6.55-3.696l1.466 1.536a.862.862 0 0 0 1.223.028l1.64-1.564zM4.628 11.434c.511.015.924.44.924.951 0 .522-.43.952-.952.952s-.951-.43-.951-.952.429-.951.951-.951h.028zm22.685.043c.511.015.924.44.924.951 0 .522-.43.952-.952.952s-.951-.43-.951-.952a.956.956 0 0 1 .979-.951zm-20.892.153h1.658v7.477H4.732a11.715 11.715 0 0 1-.38-4.47l2.05-.912a.865.865 0 0 0 .441-1.144l-.422-.951zm6.92.079h3.949c.205 0 1.441.236 1.441 1.163 0 .768-.948 1.043-1.728 1.043h-3.665l.003-2.206zm0 5.373h3.026c.275 0 1.477.079 1.86 1.615.119.471.385 2.007.566 2.499.18.551.911 1.652 1.691 1.652h4.938c-.331.444-.693.863-1.083 1.255l-2.01-.432a.87.87 0 0 0-1.031.667l-.477 2.228a11.714 11.714 0 0 1-9.762-.046l-.478-2.228a.867.867 0 0 0-1.028-.667l-1.967.423a11.866 11.866 0 0 1-1.016-1.2h9.567c.107 0 .181-.018.181-.119v-3.384c0-.097-.074-.119-.181-.119h-2.799l.003-2.144zm-4.415 7.749c.512.015.924.44.924.951 0 .522-.429.952-.951.952s-.952-.43-.952-.952.43-.952.952-.952l.027.001zm14.089.043a.954.954 0 0 1 .923.951c0 .522-.429.952-.951.952s-.951-.43-.951-.952a.956.956 0 0 1 .979-.951z"/><path d="M29.647 16.002c0 7.49-6.163 13.653-13.654 13.653-7.49 0-13.654-6.163-13.654-13.653 0-7.491 6.164-13.654 13.654-13.654 7.491 0 13.654 6.163 13.654 13.654zm-.257-1.319 2.13 1.319-2.13 1.318 1.83 1.71-2.344.878 1.463 2.035-2.475.404 1.04 2.282-2.506-.089.575 2.442-2.441-.576.089 2.506-2.283-1.04-.403 2.475-2.035-1.462-.878 2.343-1.71-1.829-1.319 2.129-1.318-2.129-1.71 1.829-.878-2.343-2.035 1.462-.404-2.475-2.282 1.04.089-2.506-2.442.576.575-2.442-2.505.089 1.04-2.282-2.475-.404 1.462-2.035-2.343-.878 1.829-1.71-2.129-1.318 2.129-1.319-1.829-1.71 2.343-.878-1.462-2.035 2.475-.404-1.04-2.282 2.505.089-.575-2.441 2.442.575-.089-2.506 2.282 1.04.404-2.475 2.035 1.463.878-2.344 1.71 1.83 1.318-2.13 1.319 2.13 1.71-1.83.878 2.344 2.035-1.463.403 2.475 2.283-1.04-.089 2.506 2.441-.575-.575 2.441 2.506-.089-1.04 2.282 2.475.404-1.463 2.035 2.344.878-1.83 1.71z"/></svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@ -0,0 +1 @@
<svg viewBox="0 0 36 36" class="svg gitea-chef" width="16" height="16" aria-hidden="true"><g fill="none" fill-rule="evenodd"><path fill="#435363" d="M18 25.8c-4.3 0-7.7-3.6-7.7-8s3.4-7.9 7.7-7.9c3.5 0 6.4 2.4 7.3 5.7h3c-1-5-5.2-8.7-10.3-8.7-5.9 0-10.6 4.9-10.6 10.9 0 6.1 4.7 11 10.6 11 5.1 0 9.3-3.7 10.3-8.7h-3c-.9 3.3-3.8 5.7-7.3 5.7"/><path fill="#435363" d="M12.8 23.2c1.3 1.4 3.1 2.3 5.2 2.3v-3.2c-1.2 0-2.3-.5-3.1-1.3l-2.1 2.2"/><path fill="#F38B00" d="M10.6 17.8c0 1.1.3 2.2.6 3.1l2.9-1.3c-.3-.5-.4-1.1-.4-1.8 0-2.4 1.9-4.4 4.3-4.4v-3.2c-4.1 0-7.4 3.4-7.4 7.6"/><path fill="#435363" d="m20.6 10.7-1.1 3c.9.4 1.7 1.1 2.2 1.9H25c-.7-2.2-2.3-4-4.4-4.9"/><path fill="#F38B00" d="m19.5 22 1.1 2.9c2.1-.8 3.7-2.6 4.4-4.8h-3.3c-.5.8-1.3 1.5-2.2 1.9"/><path fill="#435363" d="M4.4 22.1c-.1-.2-.1-.3-.1-.5-.1-.2-.1-.3-.2-.5V21c0-.1 0-.3-.1-.4v-.5c-.1-.1-.1-.2-.1-.3-.1-.6-.1-1.3-.1-2H.9c0 .8 0 1.5.1 2.2 0 .2.1.4.1.6v.1c0 .2.1.4.1.5s0 .2.1.3v.3c.1.1.1.2.1.4 0 0 .1.1.1.2 0 .2 0 .3.1.4v.2c.2.7.5 1.3.7 2L5 23.8c-.2-.6-.4-1.1-.6-1.7"/><path fill="#F38B00" d="M18 32.6c-3.9 0-7.5-1.7-10.1-4.4l-2 2.2c3.1 3.2 7.3 5.2 12.1 5.2 8.7 0 15.8-6.8 16.9-15.5H32c-1.1 7-7 12.5-14 12.5M18 3.1c3.1 0 6.1 1.1 8.4 2.9l1.8-2.4C25.3 1.4 21.8.1 18 .1 10.7.1 4.5 4.8 2.1 11.4l2.7 1.1C6.8 7 12 3.1 18 3.1"/><path fill="#435363" d="M32 15.6h2.9c-.3-2.6-1.2-5-2.5-7.2L30 10c1 1.7 1.7 3.6 2 5.6"/><path fill="#F38B00" d="M28.7 15.6h2.9c-.8-5.1-4.1-9.3-8.6-11.1l-1.1 2.8c3.5 1.3 6 4.5 6.8 8.3"/><path fill="#435363" d="M18 6.5v-3c-5.9 0-10.9 3.8-12.9 9.1l2.7 1.1C9.4 9.5 13.3 6.5 18 6.5"/><path fill="#F38B00" d="M7 17.8H4.1c0 6.1 3.6 11.2 8.7 13.4l1.1-2.8C9.9 26.7 7 22.6 7 17.8"/><path fill="#435363" d="M18 29.2v3c6.9 0 12.6-5.3 13.6-12.1h-2.9c-1 5.2-5.4 9.1-10.7 9.1"/></g></svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -14,6 +14,8 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/api/packages/cargo"
"code.gitea.io/gitea/routers/api/packages/chef"
"code.gitea.io/gitea/routers/api/packages/composer"
"code.gitea.io/gitea/routers/api/packages/conan"
"code.gitea.io/gitea/routers/api/packages/conda"
@ -53,6 +55,7 @@ func CommonRoutes(ctx gocontext.Context) *web.Route {
&auth.Basic{},
&nuget.Auth{},
&conan.Auth{},
&chef.Auth{},
}
if setting.Service.EnableReverseProxyAuth {
authMethods = append(authMethods, &auth.ReverseProxy{})
@ -71,6 +74,39 @@ func CommonRoutes(ctx gocontext.Context) *web.Route {
})
r.Group("/{username}", func() {
r.Group("/cargo", func() {
r.Group("/api/v1/crates", func() {
r.Get("", cargo.SearchPackages)
r.Put("/new", reqPackageAccess(perm.AccessModeWrite), cargo.UploadPackage)
r.Group("/{package}", func() {
r.Group("/{version}", func() {
r.Get("/download", cargo.DownloadPackageFile)
r.Delete("/yank", reqPackageAccess(perm.AccessModeWrite), cargo.YankPackage)
r.Put("/unyank", reqPackageAccess(perm.AccessModeWrite), cargo.UnyankPackage)
})
r.Get("/owners", cargo.ListOwners)
})
})
}, reqPackageAccess(perm.AccessModeRead))
r.Group("/chef", func() {
r.Group("/api/v1", func() {
r.Get("/universe", chef.PackagesUniverse)
r.Get("/search", chef.EnumeratePackages)
r.Group("/cookbooks", func() {
r.Get("", chef.EnumeratePackages)
r.Post("", reqPackageAccess(perm.AccessModeWrite), chef.UploadPackage)
r.Group("/{name}", func() {
r.Get("", chef.PackageMetadata)
r.Group("/versions/{version}", func() {
r.Get("", chef.PackageVersionMetadata)
r.Delete("", reqPackageAccess(perm.AccessModeWrite), chef.DeletePackageVersion)
r.Get("/download", chef.DownloadPackage)
})
r.Delete("", reqPackageAccess(perm.AccessModeWrite), chef.DeletePackage)
})
})
})
}, reqPackageAccess(perm.AccessModeRead))
r.Group("/composer", func() {
r.Get("/packages.json", composer.ServiceIndex)
r.Get("/search.json", composer.SearchPackages)

View File

@ -0,0 +1,281 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cargo
import (
"fmt"
"net/http"
"strconv"
"strings"
"code.gitea.io/gitea/models/db"
packages_model "code.gitea.io/gitea/models/packages"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
packages_module "code.gitea.io/gitea/modules/packages"
cargo_module "code.gitea.io/gitea/modules/packages/cargo"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/api/packages/helper"
"code.gitea.io/gitea/services/convert"
packages_service "code.gitea.io/gitea/services/packages"
cargo_service "code.gitea.io/gitea/services/packages/cargo"
)
// https://doc.rust-lang.org/cargo/reference/registries.html#web-api
type StatusResponse struct {
OK bool `json:"ok"`
Errors []StatusMessage `json:"errors,omitempty"`
}
type StatusMessage struct {
Message string `json:"detail"`
}
func apiError(ctx *context.Context, status int, obj interface{}) {
helper.LogAndProcessError(ctx, status, obj, func(message string) {
ctx.JSON(status, StatusResponse{
OK: false,
Errors: []StatusMessage{
{
Message: message,
},
},
})
})
}
type SearchResult struct {
Crates []*SearchResultCrate `json:"crates"`
Meta SearchResultMeta `json:"meta"`
}
type SearchResultCrate struct {
Name string `json:"name"`
LatestVersion string `json:"max_version"`
Description string `json:"description"`
}
type SearchResultMeta struct {
Total int64 `json:"total"`
}
// https://doc.rust-lang.org/cargo/reference/registries.html#search
func SearchPackages(ctx *context.Context) {
page := ctx.FormInt("page")
if page < 1 {
page = 1
}
perPage := ctx.FormInt("per_page")
paginator := db.ListOptions{
Page: page,
PageSize: convert.ToCorrectPageSize(perPage),
}
pvs, total, err := packages_model.SearchLatestVersions(
ctx,
&packages_model.PackageSearchOptions{
OwnerID: ctx.Package.Owner.ID,
Type: packages_model.TypeCargo,
Name: packages_model.SearchValue{Value: ctx.FormTrim("q")},
IsInternal: util.OptionalBoolFalse,
Paginator: &paginator,
},
)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
crates := make([]*SearchResultCrate, 0, len(pvs))
for _, pd := range pds {
crates = append(crates, &SearchResultCrate{
Name: pd.Package.Name,
LatestVersion: pd.Version.Version,
Description: pd.Metadata.(*cargo_module.Metadata).Description,
})
}
ctx.JSON(http.StatusOK, SearchResult{
Crates: crates,
Meta: SearchResultMeta{
Total: total,
},
})
}
type Owners struct {
Users []OwnerUser `json:"users"`
}
type OwnerUser struct {
ID int64 `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
}
// https://doc.rust-lang.org/cargo/reference/registries.html#owners-list
func ListOwners(ctx *context.Context) {
ctx.JSON(http.StatusOK, Owners{
Users: []OwnerUser{
{
ID: ctx.Package.Owner.ID,
Login: ctx.Package.Owner.Name,
Name: ctx.Package.Owner.DisplayName(),
},
},
})
}
// DownloadPackageFile serves the content of a package
func DownloadPackageFile(ctx *context.Context) {
s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion(
ctx,
&packages_service.PackageInfo{
Owner: ctx.Package.Owner,
PackageType: packages_model.TypeCargo,
Name: ctx.Params("package"),
Version: ctx.Params("version"),
},
&packages_service.PackageFileInfo{
Filename: strings.ToLower(fmt.Sprintf("%s-%s.crate", ctx.Params("package"), ctx.Params("version"))),
},
)
if err != nil {
if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist {
apiError(ctx, http.StatusNotFound, err)
return
}
apiError(ctx, http.StatusInternalServerError, err)
return
}
defer s.Close()
ctx.ServeContent(s, &context.ServeHeaderOptions{
Filename: pf.Name,
LastModified: pf.CreatedUnix.AsLocalTime(),
})
}
// https://doc.rust-lang.org/cargo/reference/registries.html#publish
func UploadPackage(ctx *context.Context) {
defer ctx.Req.Body.Close()
cp, err := cargo_module.ParsePackage(ctx.Req.Body)
if err != nil {
apiError(ctx, http.StatusBadRequest, err)
return
}
buf, err := packages_module.CreateHashedBufferFromReader(cp.Content, 32*1024*1024)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
defer buf.Close()
if buf.Size() != cp.ContentSize {
apiError(ctx, http.StatusBadRequest, "invalid content size")
return
}
pv, _, err := packages_service.CreatePackageAndAddFile(
&packages_service.PackageCreationInfo{
PackageInfo: packages_service.PackageInfo{
Owner: ctx.Package.Owner,
PackageType: packages_model.TypeCargo,
Name: cp.Name,
Version: cp.Version,
},
SemverCompatible: true,
Creator: ctx.Doer,
Metadata: cp.Metadata,
VersionProperties: map[string]string{
cargo_module.PropertyYanked: strconv.FormatBool(false),
},
},
&packages_service.PackageFileCreationInfo{
PackageFileInfo: packages_service.PackageFileInfo{
Filename: strings.ToLower(fmt.Sprintf("%s-%s.crate", cp.Name, cp.Version)),
},
Creator: ctx.Doer,
Data: buf,
IsLead: true,
},
)
if err != nil {
switch err {
case packages_model.ErrDuplicatePackageVersion:
apiError(ctx, http.StatusConflict, err)
case packages_service.ErrQuotaTotalCount, packages_service.ErrQuotaTypeSize, packages_service.ErrQuotaTotalSize:
apiError(ctx, http.StatusForbidden, err)
default:
apiError(ctx, http.StatusInternalServerError, err)
}
return
}
if err := cargo_service.AddOrUpdatePackageIndex(ctx, ctx.Doer, ctx.Package.Owner, pv.PackageID); err != nil {
if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil {
log.Error("Rollback creation of package version: %v", err)
}
apiError(ctx, http.StatusInternalServerError, err)
return
}
ctx.JSON(http.StatusOK, StatusResponse{OK: true})
}
// https://doc.rust-lang.org/cargo/reference/registries.html#yank
func YankPackage(ctx *context.Context) {
yankPackage(ctx, true)
}
// https://doc.rust-lang.org/cargo/reference/registries.html#unyank
func UnyankPackage(ctx *context.Context) {
yankPackage(ctx, false)
}
func yankPackage(ctx *context.Context, yank bool) {
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeCargo, ctx.Params("package"), ctx.Params("version"))
if err != nil {
if err == packages_model.ErrPackageNotExist {
apiError(ctx, http.StatusNotFound, err)
return
}
apiError(ctx, http.StatusInternalServerError, err)
return
}
pps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeVersion, pv.ID, cargo_module.PropertyYanked)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
if len(pps) == 0 {
apiError(ctx, http.StatusInternalServerError, "Property not found")
return
}
pp := pps[0]
pp.Value = strconv.FormatBool(yank)
if err := packages_model.UpdateProperty(ctx, pp); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
if err := cargo_service.AddOrUpdatePackageIndex(ctx, ctx.Doer, ctx.Package.Owner, pv.PackageID); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
ctx.JSON(http.StatusOK, StatusResponse{OK: true})
}

View File

@ -0,0 +1,270 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package chef
import (
"crypto"
"crypto/rsa"
"crypto/sha1"
"crypto/sha256"
"crypto/x509"
"encoding/base64"
"encoding/pem"
"fmt"
"hash"
"math/big"
"net/http"
"path"
"regexp"
"strconv"
"strings"
"time"
user_model "code.gitea.io/gitea/models/user"
chef_module "code.gitea.io/gitea/modules/packages/chef"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/auth"
)
const (
maxTimeDifference = 10 * time.Minute
)
var (
algorithmPattern = regexp.MustCompile(`algorithm=(\w+)`)
versionPattern = regexp.MustCompile(`version=(\d+\.\d+)`)
authorizationPattern = regexp.MustCompile(`\AX-Ops-Authorization-(\d+)`)
)
// Documentation:
// https://docs.chef.io/server/api_chef_server/#required-headers
// https://github.com/chef-boneyard/chef-rfc/blob/master/rfc065-sign-v1.3.md
// https://github.com/chef/mixlib-authentication/blob/bc8adbef833d4be23dc78cb23e6fe44b51ebc34f/lib/mixlib/authentication/signedheaderauth.rb
type Auth struct{}
func (a *Auth) Name() string {
return "chef"
}
// Verify extracts the user from the signed request
// If the request is signed with the user private key the user is verified.
func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) (*user_model.User, error) {
u, err := getUserFromRequest(req)
if err != nil {
return nil, err
}
if u == nil {
return nil, nil
}
pub, err := getUserPublicKey(u)
if err != nil {
return nil, err
}
if err := verifyTimestamp(req); err != nil {
return nil, err
}
version, err := getSignVersion(req)
if err != nil {
return nil, err
}
if err := verifySignedHeaders(req, version, pub.(*rsa.PublicKey)); err != nil {
return nil, err
}
return u, nil
}
func getUserFromRequest(req *http.Request) (*user_model.User, error) {
username := req.Header.Get("X-Ops-Userid")
if username == "" {
return nil, nil
}
return user_model.GetUserByName(req.Context(), username)
}
func getUserPublicKey(u *user_model.User) (crypto.PublicKey, error) {
pubKey, err := user_model.GetSetting(u.ID, chef_module.SettingPublicPem)
if err != nil {
return nil, err
}
pubPem, _ := pem.Decode([]byte(pubKey))
return x509.ParsePKIXPublicKey(pubPem.Bytes)
}
func verifyTimestamp(req *http.Request) error {
hdr := req.Header.Get("X-Ops-Timestamp")
if hdr == "" {
return util.NewInvalidArgumentErrorf("X-Ops-Timestamp header missing")
}
ts, err := time.Parse(time.RFC3339, hdr)
if err != nil {
return err
}
diff := time.Now().UTC().Sub(ts)
if diff < 0 {
diff = -diff
}
if diff > maxTimeDifference {
return fmt.Errorf("time difference")
}
return nil
}
func getSignVersion(req *http.Request) (string, error) {
hdr := req.Header.Get("X-Ops-Sign")
if hdr == "" {
return "", util.NewInvalidArgumentErrorf("X-Ops-Sign header missing")
}
m := versionPattern.FindStringSubmatch(hdr)
if len(m) != 2 {
return "", util.NewInvalidArgumentErrorf("invalid X-Ops-Sign header")
}
switch m[1] {
case "1.0", "1.1", "1.2", "1.3":
default:
return "", util.NewInvalidArgumentErrorf("unsupported version")
}
version := m[1]
m = algorithmPattern.FindStringSubmatch(hdr)
if len(m) == 2 && m[1] != "sha1" && !(m[1] == "sha256" && version == "1.3") {
return "", util.NewInvalidArgumentErrorf("unsupported algorithm")
}
return version, nil
}
func verifySignedHeaders(req *http.Request, version string, pub *rsa.PublicKey) error {
authorizationData, err := getAuthorizationData(req)
if err != nil {
return err
}
checkData := buildCheckData(req, version)
switch version {
case "1.3":
return verifyDataNew(authorizationData, checkData, pub, crypto.SHA256)
case "1.2":
return verifyDataNew(authorizationData, checkData, pub, crypto.SHA1)
default:
return verifyDataOld(authorizationData, checkData, pub)
}
}
func getAuthorizationData(req *http.Request) ([]byte, error) {
valueList := make(map[int]string)
for k, vs := range req.Header {
if m := authorizationPattern.FindStringSubmatch(k); m != nil {
index, _ := strconv.Atoi(m[1])
var v string
if len(vs) == 0 {
v = ""
} else {
v = vs[0]
}
valueList[index] = v
}
}
tmp := make([]string, len(valueList))
for k, v := range valueList {
if k > len(tmp) {
return nil, fmt.Errorf("invalid X-Ops-Authorization headers")
}
tmp[k-1] = v
}
return base64.StdEncoding.DecodeString(strings.Join(tmp, ""))
}
func buildCheckData(req *http.Request, version string) []byte {
username := req.Header.Get("X-Ops-Userid")
if version != "1.0" && version != "1.3" {
sum := sha1.Sum([]byte(username))
username = base64.StdEncoding.EncodeToString(sum[:])
}
var data string
if version == "1.3" {
data = fmt.Sprintf(
"Method:%s\nPath:%s\nX-Ops-Content-Hash:%s\nX-Ops-Sign:version=%s\nX-Ops-Timestamp:%s\nX-Ops-UserId:%s\nX-Ops-Server-API-Version:%s",
req.Method,
path.Clean(req.URL.Path),
req.Header.Get("X-Ops-Content-Hash"),
version,
req.Header.Get("X-Ops-Timestamp"),
username,
req.Header.Get("X-Ops-Server-Api-Version"),
)
} else {
sum := sha1.Sum([]byte(path.Clean(req.URL.Path)))
data = fmt.Sprintf(
"Method:%s\nHashed Path:%s\nX-Ops-Content-Hash:%s\nX-Ops-Timestamp:%s\nX-Ops-UserId:%s",
req.Method,
base64.StdEncoding.EncodeToString(sum[:]),
req.Header.Get("X-Ops-Content-Hash"),
req.Header.Get("X-Ops-Timestamp"),
username,
)
}
return []byte(data)
}
func verifyDataNew(signature, data []byte, pub *rsa.PublicKey, algo crypto.Hash) error {
var h hash.Hash
if algo == crypto.SHA256 {
h = sha256.New()
} else {
h = sha1.New()
}
if _, err := h.Write(data); err != nil {
return err
}
return rsa.VerifyPKCS1v15(pub, algo, h.Sum(nil), signature)
}
func verifyDataOld(signature, data []byte, pub *rsa.PublicKey) error {
c := new(big.Int)
m := new(big.Int)
m.SetBytes(signature)
e := big.NewInt(int64(pub.E))
c.Exp(m, e, pub.N)
out := c.Bytes()
skip := 0
for i := 2; i < len(out); i++ {
if i+1 >= len(out) {
break
}
if out[i] == 0xFF && out[i+1] == 0 {
skip = i + 2
break
}
}
if !util.SliceEqual(out[skip:], data) {
return fmt.Errorf("could not verify signature")
}
return nil
}

View File

@ -0,0 +1,404 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package chef
import (
"errors"
"fmt"
"io"
"net/http"
"net/url"
"sort"
"strings"
"time"
"code.gitea.io/gitea/models/db"
packages_model "code.gitea.io/gitea/models/packages"
"code.gitea.io/gitea/modules/context"
packages_module "code.gitea.io/gitea/modules/packages"
chef_module "code.gitea.io/gitea/modules/packages/chef"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/api/packages/helper"
packages_service "code.gitea.io/gitea/services/packages"
)
func apiError(ctx *context.Context, status int, obj interface{}) {
type Error struct {
ErrorMessages []string `json:"error_messages"`
}
helper.LogAndProcessError(ctx, status, obj, func(message string) {
ctx.JSON(status, Error{
ErrorMessages: []string{message},
})
})
}
func PackagesUniverse(ctx *context.Context) {
pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
OwnerID: ctx.Package.Owner.ID,
Type: packages_model.TypeChef,
IsInternal: util.OptionalBoolFalse,
})
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
type VersionInfo struct {
LocationType string `json:"location_type"`
LocationPath string `json:"location_path"`
DownloadURL string `json:"download_url"`
Dependencies map[string]string `json:"dependencies"`
}
baseURL := setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/chef/api/v1"
universe := make(map[string]map[string]*VersionInfo)
for _, pd := range pds {
if _, ok := universe[pd.Package.Name]; !ok {
universe[pd.Package.Name] = make(map[string]*VersionInfo)
}
universe[pd.Package.Name][pd.Version.Version] = &VersionInfo{
LocationType: "opscode",
LocationPath: baseURL,
DownloadURL: fmt.Sprintf("%s/cookbooks/%s/versions/%s/download", baseURL, url.PathEscape(pd.Package.Name), pd.Version.Version),
Dependencies: pd.Metadata.(*chef_module.Metadata).Dependencies,
}
}
ctx.JSON(http.StatusOK, universe)
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_list.rb
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_search.rb
func EnumeratePackages(ctx *context.Context) {
opts := &packages_model.PackageSearchOptions{
OwnerID: ctx.Package.Owner.ID,
Type: packages_model.TypeChef,
Name: packages_model.SearchValue{Value: ctx.FormTrim("q")},
IsInternal: util.OptionalBoolFalse,
Paginator: db.NewAbsoluteListOptions(
ctx.FormInt("start"),
ctx.FormInt("items"),
),
}
switch strings.ToLower(ctx.FormTrim("order")) {
case "recently_updated", "recently_added":
opts.Sort = packages_model.SortCreatedDesc
default:
opts.Sort = packages_model.SortNameAsc
}
pvs, total, err := packages_model.SearchLatestVersions(ctx, opts)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
type Item struct {
CookbookName string `json:"cookbook_name"`
CookbookMaintainer string `json:"cookbook_maintainer"`
CookbookDescription string `json:"cookbook_description"`
Cookbook string `json:"cookbook"`
}
type Result struct {
Start int `json:"start"`
Total int `json:"total"`
Items []*Item `json:"items"`
}
baseURL := setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/chef/api/v1/cookbooks/"
items := make([]*Item, 0, len(pds))
for _, pd := range pds {
metadata := pd.Metadata.(*chef_module.Metadata)
items = append(items, &Item{
CookbookName: pd.Package.Name,
CookbookMaintainer: metadata.Author,
CookbookDescription: metadata.Description,
Cookbook: baseURL + url.PathEscape(pd.Package.Name),
})
}
skip, _ := opts.Paginator.GetSkipTake()
ctx.JSON(http.StatusOK, &Result{
Start: skip,
Total: int(total),
Items: items,
})
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_show.rb
func PackageMetadata(ctx *context.Context) {
packageName := ctx.Params("name")
pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, packageName)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
if len(pvs) == 0 {
apiError(ctx, http.StatusNotFound, nil)
return
}
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
sort.Slice(pds, func(i, j int) bool {
return pds[i].SemVer.LessThan(pds[j].SemVer)
})
type Result struct {
Name string `json:"name"`
Maintainer string `json:"maintainer"`
Description string `json:"description"`
Category string `json:"category"`
LatestVersion string `json:"latest_version"`
SourceURL string `json:"source_url"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
Deprecated bool `json:"deprecated"`
Versions []string `json:"versions"`
}
baseURL := fmt.Sprintf("%sapi/packages/%s/chef/api/v1/cookbooks/%s/versions/", setting.AppURL, ctx.Package.Owner.Name, url.PathEscape(packageName))
versions := make([]string, 0, len(pds))
for _, pd := range pds {
versions = append(versions, baseURL+pd.Version.Version)
}
latest := pds[len(pds)-1]
metadata := latest.Metadata.(*chef_module.Metadata)
ctx.JSON(http.StatusOK, &Result{
Name: latest.Package.Name,
Maintainer: metadata.Author,
Description: metadata.Description,
LatestVersion: baseURL + latest.Version.Version,
SourceURL: metadata.RepositoryURL,
CreatedAt: latest.Version.CreatedUnix.AsLocalTime(),
UpdatedAt: latest.Version.CreatedUnix.AsLocalTime(),
Deprecated: false,
Versions: versions,
})
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_show.rb
func PackageVersionMetadata(ctx *context.Context) {
packageName := ctx.Params("name")
packageVersion := strings.ReplaceAll(ctx.Params("version"), "_", ".") // Chef calls this endpoint with "_" instead of "."?!
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, packageName, packageVersion)
if err != nil {
if err == packages_model.ErrPackageNotExist {
apiError(ctx, http.StatusNotFound, err)
return
}
apiError(ctx, http.StatusInternalServerError, err)
return
}
pd, err := packages_model.GetPackageDescriptor(ctx, pv)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
type Result struct {
Version string `json:"version"`
TarballFileSize int64 `json:"tarball_file_size"`
PublishedAt time.Time `json:"published_at"`
Cookbook string `json:"cookbook"`
File string `json:"file"`
License string `json:"license"`
Dependencies map[string]string `json:"dependencies"`
}
baseURL := fmt.Sprintf("%sapi/packages/%s/chef/api/v1/cookbooks/%s", setting.AppURL, ctx.Package.Owner.Name, url.PathEscape(pd.Package.Name))
metadata := pd.Metadata.(*chef_module.Metadata)
ctx.JSON(http.StatusOK, &Result{
Version: pd.Version.Version,
TarballFileSize: pd.Files[0].Blob.Size,
PublishedAt: pd.Version.CreatedUnix.AsLocalTime(),
Cookbook: baseURL,
File: fmt.Sprintf("%s/versions/%s/download", baseURL, pd.Version.Version),
License: metadata.License,
Dependencies: metadata.Dependencies,
})
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_share.rb
func UploadPackage(ctx *context.Context) {
file, _, err := ctx.Req.FormFile("tarball")
if err != nil {
apiError(ctx, http.StatusBadRequest, err)
return
}
defer file.Close()
buf, err := packages_module.CreateHashedBufferFromReader(file, 32*1024*1024)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
defer buf.Close()
pck, err := chef_module.ParsePackage(buf)
if err != nil {
if errors.Is(err, util.ErrInvalidArgument) {
apiError(ctx, http.StatusBadRequest, err)
} else {
apiError(ctx, http.StatusInternalServerError, err)
}
return
}
if _, err := buf.Seek(0, io.SeekStart); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
_, _, err = packages_service.CreatePackageAndAddFile(
&packages_service.PackageCreationInfo{
PackageInfo: packages_service.PackageInfo{
Owner: ctx.Package.Owner,
PackageType: packages_model.TypeChef,
Name: pck.Name,
Version: pck.Version,
},
Creator: ctx.Doer,
SemverCompatible: true,
Metadata: pck.Metadata,
},
&packages_service.PackageFileCreationInfo{
PackageFileInfo: packages_service.PackageFileInfo{
Filename: strings.ToLower(pck.Version + ".tar.gz"),
},
Creator: ctx.Doer,
Data: buf,
IsLead: true,
},
)
if err != nil {
switch err {
case packages_model.ErrDuplicatePackageVersion:
apiError(ctx, http.StatusBadRequest, err)
case packages_service.ErrQuotaTotalCount, packages_service.ErrQuotaTypeSize, packages_service.ErrQuotaTotalSize:
apiError(ctx, http.StatusForbidden, err)
default:
apiError(ctx, http.StatusInternalServerError, err)
}
return
}
ctx.JSON(http.StatusCreated, make(map[any]any))
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_download.rb
func DownloadPackage(ctx *context.Context) {
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, ctx.Params("name"), ctx.Params("version"))
if err != nil {
if err == packages_model.ErrPackageNotExist {
apiError(ctx, http.StatusNotFound, err)
return
}
apiError(ctx, http.StatusInternalServerError, err)
return
}
pd, err := packages_model.GetPackageDescriptor(ctx, pv)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
pf := pd.Files[0].File
s, _, err := packages_service.GetPackageFileStream(ctx, pf)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
defer s.Close()
ctx.ServeContent(s, &context.ServeHeaderOptions{
Filename: pf.Name,
LastModified: pf.CreatedUnix.AsLocalTime(),
})
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_unshare.rb
func DeletePackageVersion(ctx *context.Context) {
packageName := ctx.Params("name")
packageVersion := ctx.Params("version")
err := packages_service.RemovePackageVersionByNameAndVersion(
ctx.Doer,
&packages_service.PackageInfo{
Owner: ctx.Package.Owner,
PackageType: packages_model.TypeChef,
Name: packageName,
Version: packageVersion,
},
)
if err != nil {
if err == packages_model.ErrPackageNotExist {
apiError(ctx, http.StatusNotFound, err)
} else {
apiError(ctx, http.StatusInternalServerError, err)
}
return
}
ctx.Status(http.StatusOK)
}
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_unshare.rb
func DeletePackage(ctx *context.Context) {
pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, ctx.Params("name"))
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
if len(pvs) == 0 {
apiError(ctx, http.StatusNotFound, err)
return
}
for _, pv := range pvs {
if err := packages_service.RemovePackageVersion(ctx.Doer, pv); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
}
ctx.Status(http.StatusOK)
}

View File

@ -22,19 +22,23 @@ import (
"code.gitea.io/gitea/modules/log"
packages_module "code.gitea.io/gitea/modules/packages"
container_module "code.gitea.io/gitea/modules/packages/container"
"code.gitea.io/gitea/modules/packages/container/oci"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/api/packages/helper"
packages_service "code.gitea.io/gitea/services/packages"
container_service "code.gitea.io/gitea/services/packages/container"
digest "github.com/opencontainers/go-digest"
)
// maximum size of a container manifest
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-manifests
const maxManifestSize = 10 * 1024 * 1024
var imageNamePattern = regexp.MustCompile(`\A[a-z0-9]+([._-][a-z0-9]+)*(/[a-z0-9]+([._-][a-z0-9]+)*)*\z`)
var (
imageNamePattern = regexp.MustCompile(`\A[a-z0-9]+([._-][a-z0-9]+)*(/[a-z0-9]+([._-][a-z0-9]+)*)*\z`)
referencePattern = regexp.MustCompile(`\A[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}\z`)
)
type containerHeaders struct {
Status int
@ -434,16 +438,16 @@ func CancelUploadBlob(ctx *context.Context) {
}
func getBlobFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
digest := ctx.Params("digest")
d := ctx.Params("digest")
if !oci.Digest(digest).Validate() {
if digest.Digest(d).Validate() != nil {
return nil, container_model.ErrContainerBlobNotExist
}
return workaroundGetContainerBlob(ctx, &container_model.BlobSearchOptions{
OwnerID: ctx.Package.Owner.ID,
Image: ctx.Params("image"),
Digest: digest,
Digest: d,
})
}
@ -498,14 +502,14 @@ func GetBlob(ctx *context.Context) {
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-blobs
func DeleteBlob(ctx *context.Context) {
digest := ctx.Params("digest")
d := ctx.Params("digest")
if !oci.Digest(digest).Validate() {
if digest.Digest(d).Validate() != nil {
apiErrorDefined(ctx, errBlobUnknown)
return
}
if err := deleteBlob(ctx.Package.Owner.ID, ctx.Params("image"), digest); err != nil {
if err := deleteBlob(ctx.Package.Owner.ID, ctx.Params("image"), d); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
@ -520,15 +524,15 @@ func UploadManifest(ctx *context.Context) {
reference := ctx.Params("reference")
mci := &manifestCreationInfo{
MediaType: oci.MediaType(ctx.Req.Header.Get("Content-Type")),
MediaType: ctx.Req.Header.Get("Content-Type"),
Owner: ctx.Package.Owner,
Creator: ctx.Doer,
Image: ctx.Params("image"),
Reference: reference,
IsTagged: !oci.Digest(reference).Validate(),
IsTagged: digest.Digest(reference).Validate() != nil,
}
if mci.IsTagged && !oci.Reference(reference).Validate() {
if mci.IsTagged && !referencePattern.MatchString(reference) {
apiErrorDefined(ctx, errManifestInvalid.WithMessage("Tag is invalid"))
return
}
@ -571,7 +575,7 @@ func UploadManifest(ctx *context.Context) {
})
}
func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
func getBlobSearchOptionsFromContext(ctx *context.Context) (*container_model.BlobSearchOptions, error) {
reference := ctx.Params("reference")
opts := &container_model.BlobSearchOptions{
@ -579,14 +583,24 @@ func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDe
Image: ctx.Params("image"),
IsManifest: true,
}
if oci.Digest(reference).Validate() {
if digest.Digest(reference).Validate() == nil {
opts.Digest = reference
} else if oci.Reference(reference).Validate() {
} else if referencePattern.MatchString(reference) {
opts.Tag = reference
} else {
return nil, container_model.ErrContainerBlobNotExist
}
return opts, nil
}
func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
opts, err := getBlobSearchOptionsFromContext(ctx)
if err != nil {
return nil, err
}
return workaroundGetContainerBlob(ctx, opts)
}
@ -643,18 +657,8 @@ func GetManifest(ctx *context.Context) {
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-tags
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-manifests
func DeleteManifest(ctx *context.Context) {
reference := ctx.Params("reference")
opts := &container_model.BlobSearchOptions{
OwnerID: ctx.Package.Owner.ID,
Image: ctx.Params("image"),
IsManifest: true,
}
if oci.Digest(reference).Validate() {
opts.Digest = reference
} else if oci.Reference(reference).Validate() {
opts.Tag = reference
} else {
opts, err := getBlobSearchOptionsFromContext(ctx)
if err != nil {
apiErrorDefined(ctx, errManifestUnknown)
return
}

View File

@ -19,14 +19,28 @@ import (
"code.gitea.io/gitea/modules/log"
packages_module "code.gitea.io/gitea/modules/packages"
container_module "code.gitea.io/gitea/modules/packages/container"
"code.gitea.io/gitea/modules/packages/container/oci"
"code.gitea.io/gitea/modules/util"
packages_service "code.gitea.io/gitea/services/packages"
digest "github.com/opencontainers/go-digest"
oci "github.com/opencontainers/image-spec/specs-go/v1"
)
func isValidMediaType(mt string) bool {
return strings.HasPrefix(mt, "application/vnd.docker.") || strings.HasPrefix(mt, "application/vnd.oci.")
}
func isImageManifestMediaType(mt string) bool {
return strings.EqualFold(mt, oci.MediaTypeImageManifest) || strings.EqualFold(mt, "application/vnd.docker.distribution.manifest.v2+json")
}
func isImageIndexMediaType(mt string) bool {
return strings.EqualFold(mt, oci.MediaTypeImageIndex) || strings.EqualFold(mt, "application/vnd.docker.distribution.manifest.list.v2+json")
}
// manifestCreationInfo describes a manifest to create
type manifestCreationInfo struct {
MediaType oci.MediaType
MediaType string
Owner *user_model.User
Creator *user_model.User
Image string
@ -36,12 +50,12 @@ type manifestCreationInfo struct {
}
func processManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffer) (string, error) {
var schema oci.SchemaMediaBase
if err := json.NewDecoder(buf).Decode(&schema); err != nil {
var index oci.Index
if err := json.NewDecoder(buf).Decode(&index); err != nil {
return "", err
}
if schema.SchemaVersion != 2 {
if index.SchemaVersion != 2 {
return "", errUnsupported.WithMessage("Schema version is not supported")
}
@ -49,17 +63,17 @@ func processManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffe
return "", err
}
if !mci.MediaType.IsValid() {
mci.MediaType = schema.MediaType
if !mci.MediaType.IsValid() {
if !isValidMediaType(mci.MediaType) {
mci.MediaType = index.MediaType
if !isValidMediaType(mci.MediaType) {
return "", errManifestInvalid.WithMessage("MediaType not recognized")
}
}
if mci.MediaType.IsImageManifest() {
if isImageManifestMediaType(mci.MediaType) {
d, err := processImageManifest(mci, buf)
return d, err
} else if mci.MediaType.IsImageIndex() {
} else if isImageIndexMediaType(mci.MediaType) {
d, err := processImageManifestIndex(mci, buf)
return d, err
}
@ -204,7 +218,7 @@ func processImageManifestIndex(mci *manifestCreationInfo, buf *packages_module.H
}
for _, manifest := range index.Manifests {
if !manifest.MediaType.IsImageManifest() {
if !isImageManifestMediaType(manifest.MediaType) {
return errManifestInvalid
}
@ -348,8 +362,8 @@ func createPackageAndVersion(ctx context.Context, mci *manifestCreationInfo, met
}
type blobReference struct {
Digest oci.Digest
MediaType oci.MediaType
Digest digest.Digest
MediaType string
Name string
File *packages_model.PackageFileDescriptor
ExpectedSize int64
@ -383,7 +397,7 @@ func createFileFromBlobReference(ctx context.Context, pv, uploadVersion *package
}
props := map[string]string{
container_module.PropertyMediaType: string(ref.MediaType),
container_module.PropertyMediaType: ref.MediaType,
container_module.PropertyDigest: string(ref.Digest),
}
for name, value := range props {
@ -428,7 +442,7 @@ func createManifestBlob(ctx context.Context, mci *manifestCreationInfo, pv *pack
manifestDigest := digestFromHashSummer(buf)
err = createFileFromBlobReference(ctx, pv, nil, &blobReference{
Digest: oci.Digest(manifestDigest),
Digest: digest.Digest(manifestDigest),
MediaType: mci.MediaType,
Name: container_model.ManifestFilename,
File: &packages_model.PackageFileDescriptor{Blob: pb},

View File

@ -507,7 +507,7 @@ func orgAssignment(args ...bool) func(ctx *context.APIContext) {
var err error
if assignOrg {
ctx.Org.Organization, err = organization.GetOrgByName(ctx.Params(":org"))
ctx.Org.Organization, err = organization.GetOrgByName(ctx, ctx.Params(":org"))
if err != nil {
if organization.IsErrOrgNotExist(err) {
redirectUserID, err := user_model.LookupUserRedirect(ctx.Params(":org"))
@ -687,7 +687,7 @@ func Routes(ctx gocontext.Context) *web.Route {
}
// Get user from session if logged in.
m.Use(context.APIAuth(group))
m.Use(auth.APIAuth(group))
m.Use(context.ToggleAPI(&context.ToggleOptions{
SignInRequired: setting.Service.RequireSignInView,

View File

@ -40,7 +40,7 @@ func ListPackages(ctx *context.APIContext) {
// in: query
// description: package type filter
// type: string
// enum: [composer, conan, conda, container, generic, helm, maven, npm, nuget, pub, pypi, rubygems, vagrant]
// enum: [cargo, chef, composer, conan, conda, container, generic, helm, maven, npm, nuget, pub, pypi, rubygems, vagrant]
// - name: q
// in: query
// description: name filter

View File

@ -108,7 +108,7 @@ func CreateFork(ctx *context.APIContext) {
if form.Organization == nil {
forker = ctx.Doer
} else {
org, err := organization.GetOrgByName(*form.Organization)
org, err := organization.GetOrgByName(ctx, *form.Organization)
if err != nil {
if organization.IsErrOrgNotExist(err) {
ctx.Error(http.StatusUnprocessableEntity, "", err)

View File

@ -468,7 +468,7 @@ func CreateOrgRepo(ctx *context.APIContext) {
// "403":
// "$ref": "#/responses/forbidden"
opt := web.GetForm(ctx).(*api.CreateRepoOption)
org, err := organization.GetOrgByName(ctx.Params(":org"))
org, err := organization.GetOrgByName(ctx, ctx.Params(":org"))
if err != nil {
if organization.IsErrOrgNotExist(err) {
ctx.Error(http.StatusUnprocessableEntity, "", err)

View File

@ -204,6 +204,8 @@ func parseOAuth2Config(form forms.AuthenticationForm) *oauth2.Source {
GroupClaimName: form.Oauth2GroupClaimName,
RestrictedGroup: form.Oauth2RestrictedGroup,
AdminGroup: form.Oauth2AdminGroup,
GroupTeamMap: form.Oauth2GroupTeamMap,
GroupTeamMapRemoval: form.Oauth2GroupTeamMapRemoval,
}
}

View File

@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/web"
auth_service "code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/oauth2"
"code.gitea.io/gitea/services/externalaccount"
"code.gitea.io/gitea/services/forms"
@ -267,5 +268,11 @@ func LinkAccountPostRegister(ctx *context.Context) {
return
}
source := authSource.Cfg.(*oauth2.Source)
if err := syncGroupsToTeams(ctx, source, &gothUser, u); err != nil {
ctx.ServerError("SyncGroupsToTeams", err)
return
}
handleSignIn(ctx, u, false)
}

View File

@ -17,7 +17,9 @@ import (
"code.gitea.io/gitea/models/auth"
org_model "code.gitea.io/gitea/models/organization"
user_model "code.gitea.io/gitea/models/user"
auth_module "code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
@ -27,6 +29,7 @@ import (
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/modules/web/middleware"
auth_service "code.gitea.io/gitea/services/auth"
source_service "code.gitea.io/gitea/services/auth/source"
"code.gitea.io/gitea/services/auth/source/oauth2"
"code.gitea.io/gitea/services/externalaccount"
"code.gitea.io/gitea/services/forms"
@ -963,12 +966,19 @@ func SignInOAuthCallback(ctx *context.Context) {
IsActive: util.OptionalBoolOf(!setting.OAuth2Client.RegisterEmailConfirm),
}
setUserGroupClaims(authSource, u, &gothUser)
source := authSource.Cfg.(*oauth2.Source)
setUserAdminAndRestrictedFromGroupClaims(source, u, &gothUser)
if !createAndHandleCreatedUser(ctx, base.TplName(""), nil, u, overwriteDefault, &gothUser, setting.OAuth2Client.AccountLinking != setting.OAuth2AccountLinkingDisabled) {
// error already handled
return
}
if err := syncGroupsToTeams(ctx, source, &gothUser, u); err != nil {
ctx.ServerError("SyncGroupsToTeams", err)
return
}
} else {
// no existing user is found, request attach or new account
showLinkingLogin(ctx, gothUser)
@ -979,7 +989,7 @@ func SignInOAuthCallback(ctx *context.Context) {
handleOAuth2SignIn(ctx, authSource, u, gothUser)
}
func claimValueToStringSlice(claimValue interface{}) []string {
func claimValueToStringSet(claimValue interface{}) container.Set[string] {
var groups []string
switch rawGroup := claimValue.(type) {
@ -993,37 +1003,45 @@ func claimValueToStringSlice(claimValue interface{}) []string {
str := fmt.Sprintf("%s", rawGroup)
groups = strings.Split(str, ",")
}
return groups
return container.SetOf(groups...)
}
func setUserGroupClaims(loginSource *auth.Source, u *user_model.User, gothUser *goth.User) bool {
source := loginSource.Cfg.(*oauth2.Source)
if source.GroupClaimName == "" || (source.AdminGroup == "" && source.RestrictedGroup == "") {
return false
func syncGroupsToTeams(ctx *context.Context, source *oauth2.Source, gothUser *goth.User, u *user_model.User) error {
if source.GroupTeamMap != "" || source.GroupTeamMapRemoval {
groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
if err != nil {
return err
}
groups := getClaimedGroups(source, gothUser)
if err := source_service.SyncGroupsToTeams(ctx, u, groups, groupTeamMapping, source.GroupTeamMapRemoval); err != nil {
return err
}
}
return nil
}
func getClaimedGroups(source *oauth2.Source, gothUser *goth.User) container.Set[string] {
groupClaims, has := gothUser.RawData[source.GroupClaimName]
if !has {
return false
return nil
}
groups := claimValueToStringSlice(groupClaims)
return claimValueToStringSet(groupClaims)
}
func setUserAdminAndRestrictedFromGroupClaims(source *oauth2.Source, u *user_model.User, gothUser *goth.User) bool {
groups := getClaimedGroups(source, gothUser)
wasAdmin, wasRestricted := u.IsAdmin, u.IsRestricted
if source.AdminGroup != "" {
u.IsAdmin = false
u.IsAdmin = groups.Contains(source.AdminGroup)
}
if source.RestrictedGroup != "" {
u.IsRestricted = false
}
for _, g := range groups {
if source.AdminGroup != "" && g == source.AdminGroup {
u.IsAdmin = true
} else if source.RestrictedGroup != "" && g == source.RestrictedGroup {
u.IsRestricted = true
}
u.IsRestricted = groups.Contains(source.RestrictedGroup)
}
return wasAdmin != u.IsAdmin || wasRestricted != u.IsRestricted
@ -1070,6 +1088,15 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model
needs2FA = err == nil
}
oauth2Source := source.Cfg.(*oauth2.Source)
groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(oauth2Source.GroupTeamMap)
if err != nil {
ctx.ServerError("UnmarshalGroupTeamMapping", err)
return
}
groups := getClaimedGroups(oauth2Source, &gothUser)
// If this user is enrolled in 2FA and this source doesn't override it,
// we can't sign the user in just yet. Instead, redirect them to the 2FA authentication page.
if !needs2FA {
@ -1088,7 +1115,7 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model
u.SetLastLogin()
// Update GroupClaims
changed := setUserGroupClaims(source, u, &gothUser)
changed := setUserAdminAndRestrictedFromGroupClaims(oauth2Source, u, &gothUser)
cols := []string{"last_login_unix"}
if changed {
cols = append(cols, "is_admin", "is_restricted")
@ -1099,6 +1126,13 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model
return
}
if oauth2Source.GroupTeamMap != "" || oauth2Source.GroupTeamMapRemoval {
if err := source_service.SyncGroupsToTeams(ctx, u, groups, groupTeamMapping, oauth2Source.GroupTeamMapRemoval); err != nil {
ctx.ServerError("SyncGroupsToTeams", err)
return
}
}
// update external user information
if err := externalaccount.UpdateExternalUser(u, gothUser); err != nil {
if !errors.Is(err, util.ErrNotExist) {
@ -1121,7 +1155,7 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model
return
}
changed := setUserGroupClaims(source, u, &gothUser)
changed := setUserAdminAndRestrictedFromGroupClaims(oauth2Source, u, &gothUser)
if changed {
if err := user_model.UpdateUserCols(ctx, u, "is_admin", "is_restricted"); err != nil {
ctx.ServerError("UpdateUserCols", err)
@ -1129,6 +1163,13 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model
}
}
if oauth2Source.GroupTeamMap != "" || oauth2Source.GroupTeamMapRemoval {
if err := source_service.SyncGroupsToTeams(ctx, u, groups, groupTeamMapping, oauth2Source.GroupTeamMapRemoval); err != nil {
ctx.ServerError("SyncGroupsToTeams", err)
return
}
}
if err := updateSession(ctx, nil, map[string]interface{}{
// User needs to use 2FA, save data and redirect to 2FA page.
"twofaUid": u.ID,
@ -1188,15 +1229,9 @@ func oAuth2UserLoginCallback(authSource *auth.Source, request *http.Request, res
}
if oauth2Source.RequiredClaimValue != "" {
groups := claimValueToStringSlice(claimInterface)
found := false
for _, group := range groups {
if group == oauth2Source.RequiredClaimValue {
found = true
break
}
}
if !found {
groups := claimValueToStringSet(claimInterface)
if !groups.Contains(oauth2Source.RequiredClaimValue) {
return nil, goth.User{}, user_model.ErrUserProhibitLogin{Name: gothUser.UserID}
}
}

View File

@ -73,7 +73,7 @@ func feedActionsToFeedItems(ctx *context.Context, actions activities_model.Actio
var content, desc, title string
link := &feeds.Link{Href: act.GetCommentLink()}
link := &feeds.Link{Href: act.GetCommentHTMLURL()}
// title
title = act.ActUser.DisplayName() + " "

View File

@ -84,3 +84,23 @@ func PackagesRulePreview(ctx *context.Context) {
ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview)
}
func InitializeCargoIndex(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("packages.title")
ctx.Data["PageIsOrgSettings"] = true
ctx.Data["PageIsSettingsPackages"] = true
shared.InitializeCargoIndex(ctx, ctx.ContextUser)
ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name))
}
func RebuildCargoIndex(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("packages.title")
ctx.Data["PageIsOrgSettings"] = true
ctx.Data["PageIsSettingsPackages"] = true
shared.RebuildCargoIndex(ctx, ctx.ContextUser)
ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name))
}

View File

@ -424,60 +424,40 @@ func (h *serviceHandler) sendFile(contentType, file string) {
// one or more key=value pairs separated by colons
var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`)
func getGitConfig(ctx gocontext.Context, option, dir string) string {
out, _, err := git.NewCommand(ctx, "config").AddDynamicArguments(option).RunStdString(&git.RunOpts{Dir: dir})
if err != nil {
log.Error("%v - %s", err, out)
func prepareGitCmdWithAllowedService(service string, h *serviceHandler) (*git.Command, error) {
if service == "receive-pack" && h.cfg.ReceivePack {
return git.NewCommand(h.r.Context(), "receive-pack"), nil
}
return out[0 : len(out)-1]
if service == "upload-pack" && h.cfg.UploadPack {
return git.NewCommand(h.r.Context(), "upload-pack"), nil
}
return nil, fmt.Errorf("service %q is not allowed", service)
}
func getConfigSetting(ctx gocontext.Context, service, dir string) bool {
service = strings.ReplaceAll(service, "-", "")
setting := getGitConfig(ctx, "http."+service, dir)
if service == "uploadpack" {
return setting != "false"
}
return setting == "true"
}
func hasAccess(ctx gocontext.Context, service string, h serviceHandler, checkContentType bool) bool {
if checkContentType {
if h.r.Header.Get("Content-Type") != fmt.Sprintf("application/x-git-%s-request", service) {
return false
}
}
if !(service == "upload-pack" || service == "receive-pack") {
return false
}
if service == "receive-pack" {
return h.cfg.ReceivePack
}
if service == "upload-pack" {
return h.cfg.UploadPack
}
return getConfigSetting(ctx, service, h.dir)
}
func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) {
func serviceRPC(h *serviceHandler, service string) {
defer func() {
if err := h.r.Body.Close(); err != nil {
log.Error("serviceRPC: Close: %v", err)
}
}()
if !hasAccess(ctx, service, h, true) {
expectedContentType := fmt.Sprintf("application/x-git-%s-request", service)
if h.r.Header.Get("Content-Type") != expectedContentType {
log.Error("Content-Type (%q) doesn't match expected: %q", h.r.Header.Get("Content-Type"), expectedContentType)
h.w.WriteHeader(http.StatusUnauthorized)
return
}
cmd, err := prepareGitCmdWithAllowedService(service, h)
if err != nil {
log.Error("Failed to prepareGitCmdWithService: %v", err)
h.w.WriteHeader(http.StatusUnauthorized)
return
}
h.w.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service))
var err error
reqBody := h.r.Body
// Handle GZIP.
@ -498,8 +478,7 @@ func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) {
}
var stderr bytes.Buffer
// the service is generated by ourselves, so it's safe to trust it
cmd := git.NewCommand(h.r.Context(), git.ToTrustedCmdArgs([]string{service})...).AddArguments("--stateless-rpc").AddDynamicArguments(h.dir)
cmd.AddArguments("--stateless-rpc").AddDynamicArguments(h.dir)
cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", h.dir))
if err := cmd.Run(&git.RunOpts{
Dir: h.dir,
@ -520,7 +499,7 @@ func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) {
func ServiceUploadPack(ctx *context.Context) {
h := httpBase(ctx)
if h != nil {
serviceRPC(ctx, *h, "upload-pack")
serviceRPC(h, "upload-pack")
}
}
@ -528,7 +507,7 @@ func ServiceUploadPack(ctx *context.Context) {
func ServiceReceivePack(ctx *context.Context) {
h := httpBase(ctx)
if h != nil {
serviceRPC(ctx, *h, "receive-pack")
serviceRPC(h, "receive-pack")
}
}
@ -537,7 +516,7 @@ func getServiceType(r *http.Request) string {
if !strings.HasPrefix(serviceType, "git-") {
return ""
}
return strings.Replace(serviceType, "git-", "", 1)
return strings.TrimPrefix(serviceType, "git-")
}
func updateServerInfo(ctx gocontext.Context, dir string) []byte {
@ -563,16 +542,15 @@ func GetInfoRefs(ctx *context.Context) {
return
}
h.setHeaderNoCache()
if hasAccess(ctx, getServiceType(h.r), *h, false) {
service := getServiceType(h.r)
service := getServiceType(h.r)
cmd, err := prepareGitCmdWithAllowedService(service, h)
if err == nil {
if protocol := h.r.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
}
h.environ = append(os.Environ(), h.environ...)
// the service is generated by ourselves, so we can trust it
refs, _, err := git.NewCommand(ctx, git.ToTrustedCmdArgs([]string{service})...).AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir})
refs, _, err := cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir})
if err != nil {
log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
}

View File

@ -78,7 +78,7 @@ func RetrieveLabels(ctx *context.Context) {
}
ctx.Data["OrgLabels"] = orgLabels
org, err := organization.GetOrgByName(ctx.Repo.Owner.LowerName)
org, err := organization.GetOrgByName(ctx, ctx.Repo.Owner.LowerName)
if err != nil {
ctx.ServerError("GetOrgByName", err)
return

View File

@ -339,8 +339,8 @@ func setMergeTarget(ctx *context.Context, pull *issues_model.PullRequest) {
ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + "/" + pull.HeadRepo.Name + ":" + pull.HeadBranch
}
ctx.Data["BaseTarget"] = pull.BaseBranch
ctx.Data["HeadBranchHTMLURL"] = pull.GetHeadBranchHTMLURL()
ctx.Data["BaseBranchHTMLURL"] = pull.GetBaseBranchHTMLURL()
ctx.Data["HeadBranchLink"] = pull.GetHeadBranchLink()
ctx.Data["BaseBranchLink"] = pull.GetBaseBranchLink()
}
// PrepareMergedViewPullInfo show meta information for a merged pull request view page

View File

@ -569,6 +569,7 @@ func SearchRepo(ctx *context.Context) {
Mirror: repo.IsMirror,
Stars: repo.NumStars,
HTMLURL: repo.HTMLURL(),
Link: repo.Link(),
Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate,
}
}

View File

@ -1006,7 +1006,7 @@ func AddTeamPost(ctx *context.Context) {
return
}
team, err := organization.OrgFromUser(ctx.Repo.Owner).GetTeam(name)
team, err := organization.OrgFromUser(ctx.Repo.Owner).GetTeam(ctx, name)
if err != nil {
if organization.IsErrTeamNotExist(err) {
ctx.Flash.Error(ctx.Tr("form.team_not_exist"))

View File

@ -13,9 +13,11 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/services/forms"
cargo_service "code.gitea.io/gitea/services/packages/cargo"
container_service "code.gitea.io/gitea/services/packages/container"
)
@ -223,3 +225,23 @@ func getCleanupRuleByContext(ctx *context.Context, owner *user_model.User) *pack
return nil
}
func InitializeCargoIndex(ctx *context.Context, owner *user_model.User) {
err := cargo_service.InitializeIndexRepository(ctx, owner, owner)
if err != nil {
log.Error("InitializeIndexRepository failed: %v", err)
ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.initialize.error", err))
} else {
ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.initialize.success"))
}
}
func RebuildCargoIndex(ctx *context.Context, owner *user_model.User) {
err := cargo_service.RebuildIndex(ctx, owner, owner)
if err != nil {
log.Error("RebuildIndex failed: %v", err)
ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.rebuild.error", err))
} else {
ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.rebuild.success"))
}
}

View File

@ -5,10 +5,14 @@ package setting
import (
"net/http"
"strings"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/context"
chef_module "code.gitea.io/gitea/modules/packages/chef"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
shared "code.gitea.io/gitea/routers/web/shared/packages"
)
@ -77,3 +81,39 @@ func PackagesRulePreview(ctx *context.Context) {
ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview)
}
func InitializeCargoIndex(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("packages.title")
ctx.Data["PageIsSettingsPackages"] = true
shared.InitializeCargoIndex(ctx, ctx.Doer)
ctx.Redirect(setting.AppSubURL + "/user/settings/packages")
}
func RebuildCargoIndex(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("packages.title")
ctx.Data["PageIsSettingsPackages"] = true
shared.RebuildCargoIndex(ctx, ctx.Doer)
ctx.Redirect(setting.AppSubURL + "/user/settings/packages")
}
func RegenerateChefKeyPair(ctx *context.Context) {
priv, pub, err := util.GenerateKeyPair(chef_module.KeyBits)
if err != nil {
ctx.ServerError("GenerateKeyPair", err)
return
}
if err := user_model.SetUserSetting(ctx.Doer.ID, chef_module.SettingPublicPem, pub); err != nil {
ctx.ServerError("SetUserSetting", err)
return
}
ctx.ServeContent(strings.NewReader(priv), &context.ServeHeaderOptions{
ContentType: "application/x-pem-file",
Filename: ctx.Doer.Name + ".priv",
})
}

View File

@ -137,11 +137,8 @@ func ProfilePost(ctx *context.Context) {
return
}
// Update the language to the one we just set
middleware.SetLocaleCookie(ctx.Resp, ctx.Doer.Language, 0)
log.Trace("User settings updated: %s", ctx.Doer.Name)
ctx.Flash.Success(translation.NewLocale(ctx.Doer.Language).Tr("settings.update_profile_success"))
ctx.Flash.Success(ctx.Tr("settings.update_profile_success"))
ctx.Redirect(setting.AppSubURL + "/user/settings")
}

View File

@ -203,7 +203,7 @@ func Routes(ctx gocontext.Context) *web.Route {
}
// Get user from session if logged in.
common = append(common, context.Auth(group))
common = append(common, auth_service.Auth(group))
// GetHead allows a HEAD request redirect to GET if HEAD method is not defined for that route
common = append(common, middleware.GetHead)
@ -468,6 +468,11 @@ func RegisterRoutes(m *web.Route) {
m.Get("/preview", user_setting.PackagesRulePreview)
})
})
m.Group("/cargo", func() {
m.Post("/initialize", user_setting.InitializeCargoIndex)
m.Post("/rebuild", user_setting.RebuildCargoIndex)
})
m.Post("/chef/regenerate_keypair", user_setting.RegenerateChefKeyPair)
}, packagesEnabled)
m.Group("/secrets", func() {
m.Get("", user_setting.Secrets)
@ -818,6 +823,10 @@ func RegisterRoutes(m *web.Route) {
m.Get("/preview", org.PackagesRulePreview)
})
})
m.Group("/cargo", func() {
m.Post("/initialize", org.InitializeCargoIndex)
m.Post("/rebuild", org.RebuildCargoIndex)
})
}, packagesEnabled)
}, func(ctx *context.Context) {
ctx.Data["EnableOAuth2"] = setting.OAuth2.Enable

View File

@ -5,8 +5,11 @@ package activitypub
import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/util"
)
const rsaBits = 2048
// GetKeyPair function returns a user's private and public keys
func GetKeyPair(user *user_model.User) (pub, priv string, err error) {
var settings map[string]*user_model.Setting
@ -14,7 +17,7 @@ func GetKeyPair(user *user_model.User) (pub, priv string, err error) {
if err != nil {
return
} else if len(settings) == 0 {
if priv, pub, err = GenerateKeyPair(); err != nil {
if priv, pub, err = util.GenerateKeyPair(rsaBits); err != nil {
return
}
if err = user_model.SetUserSetting(user.ID, user_model.UserActivityPubPrivPem, priv); err != nil {

View File

@ -0,0 +1,60 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"net/http"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/web/middleware"
)
// Auth is a middleware to authenticate a web user
func Auth(authMethod Method) func(*context.Context) {
return func(ctx *context.Context) {
if err := authShared(ctx, authMethod); err != nil {
log.Error("Failed to verify user: %v", err)
ctx.Error(http.StatusUnauthorized, "Verify")
return
}
if ctx.Doer == nil {
// ensure the session uid is deleted
_ = ctx.Session.Delete("uid")
}
}
}
// APIAuth is a middleware to authenticate an api user
func APIAuth(authMethod Method) func(*context.APIContext) {
return func(ctx *context.APIContext) {
if err := authShared(ctx.Context, authMethod); err != nil {
ctx.Error(http.StatusUnauthorized, "APIAuth", err)
}
}
}
func authShared(ctx *context.Context, authMethod Method) error {
var err error
ctx.Doer, err = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session)
if err != nil {
return err
}
if ctx.Doer != nil {
if ctx.Locale.Language() != ctx.Doer.Language {
ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req)
}
ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == BasicMethodName
ctx.IsSigned = true
ctx.Data["IsSigned"] = ctx.IsSigned
ctx.Data["SignedUser"] = ctx.Doer
ctx.Data["SignedUserID"] = ctx.Doer.ID
ctx.Data["SignedUserName"] = ctx.Doer.Name
ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin
} else {
ctx.Data["SignedUserID"] = int64(0)
ctx.Data["SignedUserName"] = ""
}
return nil
}

View File

@ -10,9 +10,10 @@ import (
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization"
user_model "code.gitea.io/gitea/models/user"
auth_module "code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/util"
source_service "code.gitea.io/gitea/services/auth/source"
"code.gitea.io/gitea/services/mailer"
user_service "code.gitea.io/gitea/services/user"
)
@ -64,61 +65,66 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str
}
if user != nil {
if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) {
orgCache := make(map[string]*organization.Organization)
teamCache := make(map[string]*organization.Team)
source.SyncLdapGroupsToTeams(user, sr.LdapTeamAdd, sr.LdapTeamRemove, orgCache, teamCache)
}
if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(user, source.authSource, sr.SSHPublicKey) {
return user, asymkey_model.RewriteAllPublicKeys()
if err := asymkey_model.RewriteAllPublicKeys(); err != nil {
return user, err
}
}
} else {
// Fallback.
if len(sr.Username) == 0 {
sr.Username = userName
}
if len(sr.Mail) == 0 {
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
}
user = &user_model.User{
LowerName: strings.ToLower(sr.Username),
Name: sr.Username,
FullName: composeFullName(sr.Name, sr.Surname, sr.Username),
Email: sr.Mail,
LoginType: source.authSource.Type,
LoginSource: source.authSource.ID,
LoginName: userName,
IsAdmin: sr.IsAdmin,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsRestricted: util.OptionalBoolOf(sr.IsRestricted),
IsActive: util.OptionalBoolTrue,
}
err := user_model.CreateUser(user, overwriteDefault)
if err != nil {
return user, err
}
mailer.SendRegisterNotifyMail(user)
if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(user, source.authSource, sr.SSHPublicKey) {
if err := asymkey_model.RewriteAllPublicKeys(); err != nil {
return user, err
}
}
if len(source.AttributeAvatar) > 0 {
if err := user_service.UploadAvatar(user, sr.Avatar); err != nil {
return user, err
}
}
return user, nil
}
// Fallback.
if len(sr.Username) == 0 {
sr.Username = userName
}
if len(sr.Mail) == 0 {
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
}
user = &user_model.User{
LowerName: strings.ToLower(sr.Username),
Name: sr.Username,
FullName: composeFullName(sr.Name, sr.Surname, sr.Username),
Email: sr.Mail,
LoginType: source.authSource.Type,
LoginSource: source.authSource.ID,
LoginName: userName,
IsAdmin: sr.IsAdmin,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsRestricted: util.OptionalBoolOf(sr.IsRestricted),
IsActive: util.OptionalBoolTrue,
}
err := user_model.CreateUser(user, overwriteDefault)
if err != nil {
return user, err
}
mailer.SendRegisterNotifyMail(user)
if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(user, source.authSource, sr.SSHPublicKey) {
err = asymkey_model.RewriteAllPublicKeys()
}
if err == nil && len(source.AttributeAvatar) > 0 {
_ = user_service.UploadAvatar(user, sr.Avatar)
}
if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) {
orgCache := make(map[string]*organization.Organization)
teamCache := make(map[string]*organization.Team)
source.SyncLdapGroupsToTeams(user, sr.LdapTeamAdd, sr.LdapTeamRemove, orgCache, teamCache)
groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
if err != nil {
return user, err
}
if err := source_service.SyncGroupsToTeams(db.DefaultContext, user, sr.Groups, groupTeamMapping, source.GroupTeamMapRemoval); err != nil {
return user, err
}
}
return user, err
return user, nil
}
// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication

Some files were not shown because too many files have changed in this diff Show More