mirror of
https://github.com/go-gitea/gitea.git
synced 2025-07-14 00:01:44 -04:00
Compare commits
9 Commits
e35f8e15a6
...
c13eb8e6b3
Author | SHA1 | Date | |
---|---|---|---|
|
c13eb8e6b3 | ||
|
50111c71c3 | ||
|
d987ac6bf1 | ||
|
ff18d17442 | ||
|
c07199f9ab | ||
|
d74d16a4b1 | ||
|
c18a62279a | ||
|
df789d962b | ||
|
7baeb9c52a |
42
.drone.yml
42
.drone.yml
@ -763,10 +763,16 @@ steps:
|
||||
image: woodpeckerci/plugin-s3:latest
|
||||
pull: always
|
||||
settings:
|
||||
acl: public-read
|
||||
bucket: gitea-artifacts
|
||||
endpoint: https://ams3.digitaloceanspaces.com
|
||||
path_style: true
|
||||
acl:
|
||||
from_secret: aws_s3_acl
|
||||
region:
|
||||
from_secret: aws_s3_region
|
||||
bucket:
|
||||
from_secret: aws_s3_bucket
|
||||
endpoint:
|
||||
from_secret: aws_s3_endpoint
|
||||
path_style:
|
||||
from_secret: aws_s3_path_style
|
||||
source: "dist/release/*"
|
||||
strip_prefix: dist/release/
|
||||
target: "/gitea/${DRONE_BRANCH##release/v}"
|
||||
@ -784,10 +790,16 @@ steps:
|
||||
- name: release-main
|
||||
image: woodpeckerci/plugin-s3:latest
|
||||
settings:
|
||||
acl: public-read
|
||||
bucket: gitea-artifacts
|
||||
endpoint: https://ams3.digitaloceanspaces.com
|
||||
path_style: true
|
||||
acl:
|
||||
from_secret: aws_s3_acl
|
||||
region:
|
||||
from_secret: aws_s3_region
|
||||
bucket:
|
||||
from_secret: aws_s3_bucket
|
||||
endpoint:
|
||||
from_secret: aws_s3_endpoint
|
||||
path_style:
|
||||
from_secret: aws_s3_path_style
|
||||
source: "dist/release/*"
|
||||
strip_prefix: dist/release/
|
||||
target: /gitea/main
|
||||
@ -886,10 +898,16 @@ steps:
|
||||
image: woodpeckerci/plugin-s3:latest
|
||||
pull: always
|
||||
settings:
|
||||
acl: public-read
|
||||
bucket: gitea-artifacts
|
||||
endpoint: https://ams3.digitaloceanspaces.com
|
||||
path_style: true
|
||||
acl:
|
||||
from_secret: aws_s3_acl
|
||||
region:
|
||||
from_secret: aws_s3_region
|
||||
bucket:
|
||||
from_secret: aws_s3_bucket
|
||||
endpoint:
|
||||
from_secret: aws_s3_endpoint
|
||||
path_style:
|
||||
from_secret: aws_s3_path_style
|
||||
source: "dist/release/*"
|
||||
strip_prefix: dist/release/
|
||||
target: "/gitea/${DRONE_TAG##v}"
|
||||
|
11
cmd/admin.go
11
cmd/admin.go
@ -308,6 +308,11 @@ var (
|
||||
Value: "false",
|
||||
Usage: "Use custom URLs for GitLab/GitHub OAuth endpoints",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "custom-tenant-id",
|
||||
Value: "",
|
||||
Usage: "Use custom Tenant ID for OAuth endpoints",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "custom-auth-url",
|
||||
Value: "",
|
||||
@ -829,6 +834,7 @@ func parseOAuth2Config(c *cli.Context) *oauth2.Source {
|
||||
AuthURL: c.String("custom-auth-url"),
|
||||
ProfileURL: c.String("custom-profile-url"),
|
||||
EmailURL: c.String("custom-email-url"),
|
||||
Tenant: c.String("custom-tenant-id"),
|
||||
}
|
||||
} else {
|
||||
customURLMapping = nil
|
||||
@ -938,6 +944,7 @@ func runUpdateOauth(c *cli.Context) error {
|
||||
customURLMapping.AuthURL = oAuth2Config.CustomURLMapping.AuthURL
|
||||
customURLMapping.ProfileURL = oAuth2Config.CustomURLMapping.ProfileURL
|
||||
customURLMapping.EmailURL = oAuth2Config.CustomURLMapping.EmailURL
|
||||
customURLMapping.Tenant = oAuth2Config.CustomURLMapping.Tenant
|
||||
}
|
||||
if c.IsSet("use-custom-urls") && c.IsSet("custom-token-url") {
|
||||
customURLMapping.TokenURL = c.String("custom-token-url")
|
||||
@ -955,6 +962,10 @@ func runUpdateOauth(c *cli.Context) error {
|
||||
customURLMapping.EmailURL = c.String("custom-email-url")
|
||||
}
|
||||
|
||||
if c.IsSet("use-custom-urls") && c.IsSet("custom-tenant-id") {
|
||||
customURLMapping.Tenant = c.String("custom-tenant-id")
|
||||
}
|
||||
|
||||
oAuth2Config.CustomURLMapping = customURLMapping
|
||||
source.Cfg = oAuth2Config
|
||||
|
||||
|
@ -765,7 +765,7 @@ ROUTER = console
|
||||
;; Enable this to require captcha validation for login
|
||||
;REQUIRE_CAPTCHA_FOR_LOGIN = false
|
||||
;;
|
||||
;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha, mcaptcha.
|
||||
;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha, mcaptcha, cfturnstile.
|
||||
;CAPTCHA_TYPE = image
|
||||
;;
|
||||
;; Change this to use recaptcha.net or other recaptcha service
|
||||
@ -787,6 +787,10 @@ ROUTER = console
|
||||
;MCAPTCHA_SECRET =
|
||||
;MCAPTCHA_SITEKEY =
|
||||
;;
|
||||
;; Go to https://dash.cloudflare.com/?to=/:account/turnstile to sign up for a key
|
||||
;CF_TURNSTILE_SITEKEY =
|
||||
;CF_TURNSTILE_SECRET =
|
||||
;;
|
||||
;; Default value for KeepEmailPrivate
|
||||
;; Each new user will get the value of this setting copied into their profile
|
||||
;DEFAULT_KEEP_EMAIL_PRIVATE = false
|
||||
@ -2454,6 +2458,10 @@ ROUTER = console
|
||||
;LIMIT_TOTAL_OWNER_COUNT = -1
|
||||
;; Maximum size of packages a single owner can use (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
;LIMIT_TOTAL_OWNER_SIZE = -1
|
||||
;; Maximum size of a Cargo upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
;LIMIT_SIZE_CARGO = -1
|
||||
;; Maximum size of a Chef upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
;LIMIT_SIZE_CHEF = -1
|
||||
;; Maximum size of a Composer upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
;LIMIT_SIZE_COMPOSER = -1
|
||||
;; Maximum size of a Conan upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
|
@ -643,7 +643,7 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o
|
||||
- `REQUIRE_CAPTCHA_FOR_LOGIN`: **false**: Enable this to require captcha validation for login. You also must enable `ENABLE_CAPTCHA`.
|
||||
- `REQUIRE_EXTERNAL_REGISTRATION_CAPTCHA`: **false**: Enable this to force captcha validation
|
||||
even for External Accounts (i.e. GitHub, OpenID Connect, etc). You also must enable `ENABLE_CAPTCHA`.
|
||||
- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha\]
|
||||
- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha, cfturnstile\]
|
||||
- `RECAPTCHA_SECRET`: **""**: Go to https://www.google.com/recaptcha/admin to get a secret for recaptcha.
|
||||
- `RECAPTCHA_SITEKEY`: **""**: Go to https://www.google.com/recaptcha/admin to get a sitekey for recaptcha.
|
||||
- `RECAPTCHA_URL`: **https://www.google.com/recaptcha/**: Set the recaptcha url - allows the use of recaptcha net.
|
||||
@ -652,6 +652,8 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o
|
||||
- `MCAPTCHA_SECRET`: **""**: Go to your mCaptcha instance to get a secret for mCaptcha.
|
||||
- `MCAPTCHA_SITEKEY`: **""**: Go to your mCaptcha instance to get a sitekey for mCaptcha.
|
||||
- `MCAPTCHA_URL` **https://demo.mcaptcha.org/**: Set the mCaptcha URL.
|
||||
- `CF_TURNSTILE_SECRET` **""**: Go to https://dash.cloudflare.com/?to=/:account/turnstile to get a secret for cloudflare turnstile.
|
||||
- `CF_TURNSTILE_SITEKEY` **""**: Go to https://dash.cloudflare.com/?to=/:account/turnstile to get a sitekey for cloudflare turnstile.
|
||||
- `DEFAULT_KEEP_EMAIL_PRIVATE`: **false**: By default set users to keep their email address private.
|
||||
- `DEFAULT_ALLOW_CREATE_ORGANIZATION`: **true**: Allow new users to create organizations by default.
|
||||
- `DEFAULT_USER_IS_RESTRICTED`: **false**: Give new users restricted permissions by default
|
||||
@ -1211,6 +1213,8 @@ Task queue configuration has been moved to `queue.task`. However, the below conf
|
||||
- `CHUNKED_UPLOAD_PATH`: **tmp/package-upload**: Path for chunked uploads. Defaults to `APP_DATA_PATH` + `tmp/package-upload`
|
||||
- `LIMIT_TOTAL_OWNER_COUNT`: **-1**: Maximum count of package versions a single owner can have (`-1` means no limits)
|
||||
- `LIMIT_TOTAL_OWNER_SIZE`: **-1**: Maximum size of packages a single owner can use (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
- `LIMIT_SIZE_CARGO`: **-1**: Maximum size of a Cargo upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
- `LIMIT_SIZE_CHEF`: **-1**: Maximum size of a Chef upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
- `LIMIT_SIZE_COMPOSER`: **-1**: Maximum size of a Composer upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
- `LIMIT_SIZE_CONAN`: **-1**: Maximum size of a Conan upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
- `LIMIT_SIZE_CONDA`: **-1**: Maximum size of a Conda upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`)
|
||||
|
@ -147,6 +147,17 @@ menu:
|
||||
- `ENABLE_REVERSE_PROXY_AUTO_REGISTRATION`: 允许通过反向认证做自动注册。
|
||||
- `ENABLE_CAPTCHA`: **false**: 注册时使用图片验证码。
|
||||
- `REQUIRE_CAPTCHA_FOR_LOGIN`: **false**: 登录时需要图片验证码。需要同时开启 `ENABLE_CAPTCHA`。
|
||||
- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha, cfturnstile\],人机验证类型,分别表示图片认证、 recaptcha 、 hcaptcha 、mcaptcha 、和 cloudlfare 的 turnstile。
|
||||
- `RECAPTCHA_SECRET`: **""**: recaptcha 服务的密钥,可在 https://www.google.com/recaptcha/admin 获取。
|
||||
- `RECAPTCHA_SITEKEY`: **""**: recaptcha 服务的网站密钥 ,可在 https://www.google.com/recaptcha/admin 获取。
|
||||
- `RECAPTCHA_URL`: **https://www.google.com/recaptcha/**: 设置 recaptcha 的 url 。
|
||||
- `HCAPTCHA_SECRET`: **""**: hcaptcha 服务的密钥,可在 https://www.hcaptcha.com/ 获取。
|
||||
- `HCAPTCHA_SITEKEY`: **""**: hcaptcha 服务的网站密钥,可在 https://www.hcaptcha.com/ 获取。
|
||||
- `MCAPTCHA_SECRET`: **""**: mCaptcha 服务的密钥。
|
||||
- `MCAPTCHA_SITEKEY`: **""**: mCaptcha 服务的网站密钥。
|
||||
- `MCAPTCHA_URL` **https://demo.mcaptcha.org/**: 设置 remCaptchacaptcha 的 url 。
|
||||
- `CF_TURNSTILE_SECRET` **""**: cloudlfare turnstile 服务的密钥,可在 https://dash.cloudflare.com/?to=/:account/turnstile 获取。
|
||||
- `CF_TURNSTILE_SITEKEY` **""**: cloudlfare turnstile 服务的网站密钥 ,可在 https://www.google.com/recaptcha/admin 获取。
|
||||
|
||||
### Service - Expore (`service.explore`)
|
||||
|
||||
|
109
docs/content/doc/packages/cargo.en-us.md
Normal file
109
docs/content/doc/packages/cargo.en-us.md
Normal file
@ -0,0 +1,109 @@
|
||||
---
|
||||
date: "2022-11-20T00:00:00+00:00"
|
||||
title: "Cargo Packages Repository"
|
||||
slug: "packages/cargo"
|
||||
draft: false
|
||||
toc: false
|
||||
menu:
|
||||
sidebar:
|
||||
parent: "packages"
|
||||
name: "Cargo"
|
||||
weight: 5
|
||||
identifier: "cargo"
|
||||
---
|
||||
|
||||
# Cargo Packages Repository
|
||||
|
||||
Publish [Cargo](https://doc.rust-lang.org/stable/cargo/) packages for your user or organization.
|
||||
|
||||
**Table of Contents**
|
||||
|
||||
{{< toc >}}
|
||||
|
||||
## Requirements
|
||||
|
||||
To work with the Cargo package registry, you need [Rust and Cargo](https://www.rust-lang.org/tools/install).
|
||||
|
||||
Cargo stores informations about the available packages in a package index stored in a git repository.
|
||||
This repository is needed to work with the registry.
|
||||
The following section describes how to create it.
|
||||
|
||||
## Index Repository
|
||||
|
||||
Cargo stores informations about the available packages in a package index stored in a git repository.
|
||||
In Gitea this repository has the special name `_cargo-index`.
|
||||
After a package was uploaded, its metadata is automatically written to the index.
|
||||
The content of this repository should not be manually modified.
|
||||
|
||||
The user or organization package settings page allows to create the index repository along with the configuration file.
|
||||
If needed this action will rewrite the configuration file.
|
||||
This can be useful if for example the Gitea instance domain was changed.
|
||||
|
||||
If the case arises where the packages stored in Gitea and the information in the index repository are out of sync, the settings page allows to rebuild the index repository.
|
||||
This action iterates all packages in the registry and writes their information to the index.
|
||||
If there are lot of packages this process may take some time.
|
||||
|
||||
## Configuring the package registry
|
||||
|
||||
To register the package registry the Cargo configuration must be updated.
|
||||
Add the following text to the configuration file located in the current users home directory (for example `~/.cargo/config.toml`):
|
||||
|
||||
```
|
||||
[registry]
|
||||
default = "gitea"
|
||||
|
||||
[registries.gitea]
|
||||
index = "https://gitea.example.com/{owner}/_cargo-index.git"
|
||||
|
||||
[net]
|
||||
git-fetch-with-cli = true
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| --------- | ----------- |
|
||||
| `owner` | The owner of the package. |
|
||||
|
||||
If the registry is private or you want to publish new packages, you have to configure your credentials.
|
||||
Add the credentials section to the credentials file located in the current users home directory (for example `~/.cargo/credentials.toml`):
|
||||
|
||||
```
|
||||
[registries.gitea]
|
||||
token = "Bearer {token}"
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| --------- | ----------- |
|
||||
| `token` | Your [personal access token]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}) |
|
||||
|
||||
## Publish a package
|
||||
|
||||
Publish a package by running the following command in your project:
|
||||
|
||||
```shell
|
||||
cargo publish
|
||||
```
|
||||
|
||||
You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first.
|
||||
|
||||
## Install a package
|
||||
|
||||
To install a package from the package registry, execute the following command:
|
||||
|
||||
```shell
|
||||
cargo add {package_name}
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| -------------- | ----------- |
|
||||
| `package_name` | The package name. |
|
||||
|
||||
## Supported commands
|
||||
|
||||
```
|
||||
cargo publish
|
||||
cargo add
|
||||
cargo install
|
||||
cargo yank
|
||||
cargo unyank
|
||||
cargo search
|
||||
```
|
96
docs/content/doc/packages/chef.en-us.md
Normal file
96
docs/content/doc/packages/chef.en-us.md
Normal file
@ -0,0 +1,96 @@
|
||||
---
|
||||
date: "2023-01-20T00:00:00+00:00"
|
||||
title: "Chef Packages Repository"
|
||||
slug: "packages/chef"
|
||||
draft: false
|
||||
toc: false
|
||||
menu:
|
||||
sidebar:
|
||||
parent: "packages"
|
||||
name: "Chef"
|
||||
weight: 5
|
||||
identifier: "chef"
|
||||
---
|
||||
|
||||
# Chef Packages Repository
|
||||
|
||||
Publish [Chef](https://chef.io/) cookbooks for your user or organization.
|
||||
|
||||
**Table of Contents**
|
||||
|
||||
{{< toc >}}
|
||||
|
||||
## Requirements
|
||||
|
||||
To work with the Chef package registry, you have to use [`knife`](https://docs.chef.io/workstation/knife/).
|
||||
|
||||
## Authentication
|
||||
|
||||
The Chef package registry does not use an username:password authentication but signed requests with a private:public key pair.
|
||||
Visit the package owner settings page to create the necessary key pair.
|
||||
Only the public key is stored inside Gitea. if you loose access to the private key you must re-generate the key pair.
|
||||
[Configure `knife`](https://docs.chef.io/workstation/knife_setup/) to use the downloaded private key with your Gitea username as `client_name`.
|
||||
|
||||
## Configure the package registry
|
||||
|
||||
To [configure `knife`](https://docs.chef.io/workstation/knife_setup/) to use the Gitea package registry add the url to the `~/.chef/config.rb` file.
|
||||
|
||||
```
|
||||
knife[:supermarket_site] = 'https://gitea.example.com/api/packages/{owner}/chef'
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| --------- | ----------- |
|
||||
| `owner` | The owner of the package. |
|
||||
|
||||
## Publish a package
|
||||
|
||||
To publish a Chef package execute the following command:
|
||||
|
||||
```shell
|
||||
knife supermarket share {package_name}
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| -------------- | ----------- |
|
||||
| `package_name` | The package name. |
|
||||
|
||||
You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first.
|
||||
|
||||
## Install a package
|
||||
|
||||
To install a package from the package registry, execute the following command:
|
||||
|
||||
```shell
|
||||
knife supermarket install {package_name}
|
||||
```
|
||||
|
||||
Optional you can specify the package version:
|
||||
|
||||
```shell
|
||||
knife supermarket install {package_name} {package_version}
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| ----------------- | ----------- |
|
||||
| `package_name` | The package name. |
|
||||
| `package_version` | The package version. |
|
||||
|
||||
## Delete a package
|
||||
|
||||
If you want to remove a package from the registry, execute the following command:
|
||||
|
||||
```shell
|
||||
knife supermarket unshare {package_name}
|
||||
```
|
||||
|
||||
Optional you can specify the package version:
|
||||
|
||||
```shell
|
||||
knife supermarket unshare {package_name}/versions/{package_version}
|
||||
```
|
||||
|
||||
| Parameter | Description |
|
||||
| ----------------- | ----------- |
|
||||
| `package_name` | The package name. |
|
||||
| `package_version` | The package version. |
|
@ -26,6 +26,8 @@ The following package managers are currently supported:
|
||||
|
||||
| Name | Language | Package client |
|
||||
| ---- | -------- | -------------- |
|
||||
| [Cargo]({{< relref "doc/packages/cargo.en-us.md" >}}) | Rust | `cargo` |
|
||||
| [Chef]({{< relref "doc/packages/chef.en-us.md" >}}) | - | `knife` |
|
||||
| [Composer]({{< relref "doc/packages/composer.en-us.md" >}}) | PHP | `composer` |
|
||||
| [Conan]({{< relref "doc/packages/conan.en-us.md" >}}) | C++ | `conan` |
|
||||
| [Conda]({{< relref "doc/packages/conda.en-us.md" >}}) | - | `conda` |
|
||||
|
@ -124,6 +124,7 @@ Admin operations:
|
||||
- `--secret`: Client Secret.
|
||||
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
|
||||
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
|
||||
- `--custom-tenant-id`: Use custom Tenant ID for OAuth endpoints.
|
||||
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
|
||||
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
|
||||
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
|
||||
@ -147,6 +148,7 @@ Admin operations:
|
||||
- `--secret`: Client Secret.
|
||||
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
|
||||
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
|
||||
- `--custom-tenant-id`: Use custom Tenant ID for OAuth endpoints.
|
||||
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
|
||||
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
|
||||
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
@ -175,7 +176,18 @@ func (prs PullRequestList) loadAttributes(ctx context.Context) error {
|
||||
}
|
||||
for _, pr := range prs {
|
||||
pr.Issue = set[pr.IssueID]
|
||||
pr.Issue.PullRequest = pr // panic here means issueIDs and prs are not in sync
|
||||
/*
|
||||
Old code:
|
||||
pr.Issue.PullRequest = pr // panic here means issueIDs and prs are not in sync
|
||||
|
||||
It's worth panic because it's almost impossible to happen under normal use.
|
||||
But in integration testing, an asynchronous task could read a database that has been reset.
|
||||
So returning an error would make more sense, let the caller has a choice to ignore it.
|
||||
*/
|
||||
if pr.Issue == nil {
|
||||
return fmt.Errorf("issues and prs may be not in sync: cannot find issue %v for pr %v: %w", pr.IssueID, pr.ID, util.ErrNotExist)
|
||||
}
|
||||
pr.Issue.PullRequest = pr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -11,6 +11,8 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/packages/cargo"
|
||||
"code.gitea.io/gitea/modules/packages/chef"
|
||||
"code.gitea.io/gitea/modules/packages/composer"
|
||||
"code.gitea.io/gitea/modules/packages/conan"
|
||||
"code.gitea.io/gitea/modules/packages/conda"
|
||||
@ -129,6 +131,10 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
|
||||
|
||||
var metadata interface{}
|
||||
switch p.Type {
|
||||
case TypeCargo:
|
||||
metadata = &cargo.Metadata{}
|
||||
case TypeChef:
|
||||
metadata = &chef.Metadata{}
|
||||
case TypeComposer:
|
||||
metadata = &composer.Metadata{}
|
||||
case TypeConan:
|
||||
|
@ -30,6 +30,8 @@ type Type string
|
||||
|
||||
// List of supported packages
|
||||
const (
|
||||
TypeCargo Type = "cargo"
|
||||
TypeChef Type = "chef"
|
||||
TypeComposer Type = "composer"
|
||||
TypeConan Type = "conan"
|
||||
TypeConda Type = "conda"
|
||||
@ -46,6 +48,8 @@ const (
|
||||
)
|
||||
|
||||
var TypeList = []Type{
|
||||
TypeCargo,
|
||||
TypeChef,
|
||||
TypeComposer,
|
||||
TypeConan,
|
||||
TypeConda,
|
||||
@ -64,6 +68,10 @@ var TypeList = []Type{
|
||||
// Name gets the name of the package type
|
||||
func (pt Type) Name() string {
|
||||
switch pt {
|
||||
case TypeCargo:
|
||||
return "Cargo"
|
||||
case TypeChef:
|
||||
return "Chef"
|
||||
case TypeComposer:
|
||||
return "Composer"
|
||||
case TypeConan:
|
||||
@ -97,6 +105,10 @@ func (pt Type) Name() string {
|
||||
// SVGName gets the name of the package type svg image
|
||||
func (pt Type) SVGName() string {
|
||||
switch pt {
|
||||
case TypeCargo:
|
||||
return "gitea-cargo"
|
||||
case TypeChef:
|
||||
return "gitea-chef"
|
||||
case TypeComposer:
|
||||
return "gitea-composer"
|
||||
case TypeConan:
|
||||
|
@ -58,6 +58,12 @@ func GetPropertiesByName(ctx context.Context, refType PropertyType, refID int64,
|
||||
return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Find(&pps)
|
||||
}
|
||||
|
||||
// UpdateProperty updates a property
|
||||
func UpdateProperty(ctx context.Context, pp *PackageProperty) error {
|
||||
_, err := db.GetEngine(ctx).ID(pp.ID).Update(pp)
|
||||
return err
|
||||
}
|
||||
|
||||
// DeleteAllProperties deletes all properties of a ref
|
||||
func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error {
|
||||
_, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{})
|
||||
|
@ -5,8 +5,11 @@ package activitypub
|
||||
|
||||
import (
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
const rsaBits = 2048
|
||||
|
||||
// GetKeyPair function returns a user's private and public keys
|
||||
func GetKeyPair(user *user_model.User) (pub, priv string, err error) {
|
||||
var settings map[string]*user_model.Setting
|
||||
@ -14,7 +17,7 @@ func GetKeyPair(user *user_model.User) (pub, priv string, err error) {
|
||||
if err != nil {
|
||||
return
|
||||
} else if len(settings) == 0 {
|
||||
if priv, pub, err = GenerateKeyPair(); err != nil {
|
||||
if priv, pub, err = util.GenerateKeyPair(rsaBits); err != nil {
|
||||
return
|
||||
}
|
||||
if err = user_model.SetUserSetting(user.ID, user_model.UserActivityPubPrivPem, priv); err != nil {
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/mcaptcha"
|
||||
"code.gitea.io/gitea/modules/recaptcha"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/turnstile"
|
||||
|
||||
"gitea.com/go-chi/captcha"
|
||||
)
|
||||
@ -47,12 +48,14 @@ func SetCaptchaData(ctx *Context) {
|
||||
ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
|
||||
ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
|
||||
ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
|
||||
ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
|
||||
}
|
||||
|
||||
const (
|
||||
gRecaptchaResponseField = "g-recaptcha-response"
|
||||
hCaptchaResponseField = "h-captcha-response"
|
||||
mCaptchaResponseField = "m-captcha-response"
|
||||
gRecaptchaResponseField = "g-recaptcha-response"
|
||||
hCaptchaResponseField = "h-captcha-response"
|
||||
mCaptchaResponseField = "m-captcha-response"
|
||||
cfTurnstileResponseField = "cf-turnstile-response"
|
||||
)
|
||||
|
||||
// VerifyCaptcha verifies Captcha data
|
||||
@ -73,6 +76,8 @@ func VerifyCaptcha(ctx *Context, tpl base.TplName, form interface{}) {
|
||||
valid, err = hcaptcha.Verify(ctx, ctx.Req.Form.Get(hCaptchaResponseField))
|
||||
case setting.MCaptcha:
|
||||
valid, err = mcaptcha.Verify(ctx, ctx.Req.Form.Get(mCaptchaResponseField))
|
||||
case setting.CfTurnstile:
|
||||
valid, err = turnstile.Verify(ctx, ctx.Req.Form.Get(cfTurnstileResponseField))
|
||||
default:
|
||||
ctx.ServerError("Unknown Captcha Type", fmt.Errorf("Unknown Captcha Type: %s", setting.Service.CaptchaType))
|
||||
return
|
||||
|
@ -135,8 +135,7 @@ func (c *CheckAttributeReader) Init(ctx context.Context) error {
|
||||
|
||||
c.env = append(c.env, "GIT_FLUSH=1")
|
||||
|
||||
// The empty "--" comes from #16773 , and it seems unnecessary because nothing else would be added later.
|
||||
c.cmd.AddDynamicArguments(c.Attributes...).AddArguments("--")
|
||||
c.cmd.AddDynamicArguments(c.Attributes...)
|
||||
|
||||
var err error
|
||||
|
||||
|
169
modules/packages/cargo/parser.go
Normal file
169
modules/packages/cargo/parser.go
Normal file
@ -0,0 +1,169 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cargo
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
"regexp"
|
||||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/validation"
|
||||
|
||||
"github.com/hashicorp/go-version"
|
||||
)
|
||||
|
||||
const PropertyYanked = "cargo.yanked"
|
||||
|
||||
var (
|
||||
ErrInvalidName = errors.New("package name is invalid")
|
||||
ErrInvalidVersion = errors.New("package version is invalid")
|
||||
)
|
||||
|
||||
// Package represents a Cargo package
|
||||
type Package struct {
|
||||
Name string
|
||||
Version string
|
||||
Metadata *Metadata
|
||||
Content io.Reader
|
||||
ContentSize int64
|
||||
}
|
||||
|
||||
// Metadata represents the metadata of a Cargo package
|
||||
type Metadata struct {
|
||||
Dependencies []*Dependency `json:"dependencies,omitempty"`
|
||||
Features map[string][]string `json:"features,omitempty"`
|
||||
Authors []string `json:"authors,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
DocumentationURL string `json:"documentation_url,omitempty"`
|
||||
ProjectURL string `json:"project_url,omitempty"`
|
||||
Readme string `json:"readme,omitempty"`
|
||||
Keywords []string `json:"keywords,omitempty"`
|
||||
Categories []string `json:"categories,omitempty"`
|
||||
License string `json:"license,omitempty"`
|
||||
RepositoryURL string `json:"repository_url,omitempty"`
|
||||
Links string `json:"links,omitempty"`
|
||||
}
|
||||
|
||||
type Dependency struct {
|
||||
Name string `json:"name"`
|
||||
Req string `json:"req"`
|
||||
Features []string `json:"features"`
|
||||
Optional bool `json:"optional"`
|
||||
DefaultFeatures bool `json:"default_features"`
|
||||
Target *string `json:"target"`
|
||||
Kind string `json:"kind"`
|
||||
Registry *string `json:"registry"`
|
||||
Package *string `json:"package"`
|
||||
}
|
||||
|
||||
var nameMatch = regexp.MustCompile(`\A[a-zA-Z][a-zA-Z0-9-_]{0,63}\z`)
|
||||
|
||||
// ParsePackage reads the metadata and content of a package
|
||||
func ParsePackage(r io.Reader) (*Package, error) {
|
||||
var size uint32
|
||||
if err := binary.Read(r, binary.LittleEndian, &size); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p, err := parsePackage(io.LimitReader(r, int64(size)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := binary.Read(r, binary.LittleEndian, &size); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.Content = io.LimitReader(r, int64(size))
|
||||
p.ContentSize = int64(size)
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func parsePackage(r io.Reader) (*Package, error) {
|
||||
var meta struct {
|
||||
Name string `json:"name"`
|
||||
Vers string `json:"vers"`
|
||||
Deps []struct {
|
||||
Name string `json:"name"`
|
||||
VersionReq string `json:"version_req"`
|
||||
Features []string `json:"features"`
|
||||
Optional bool `json:"optional"`
|
||||
DefaultFeatures bool `json:"default_features"`
|
||||
Target *string `json:"target"`
|
||||
Kind string `json:"kind"`
|
||||
Registry *string `json:"registry"`
|
||||
ExplicitNameInToml string `json:"explicit_name_in_toml"`
|
||||
} `json:"deps"`
|
||||
Features map[string][]string `json:"features"`
|
||||
Authors []string `json:"authors"`
|
||||
Description string `json:"description"`
|
||||
Documentation string `json:"documentation"`
|
||||
Homepage string `json:"homepage"`
|
||||
Readme string `json:"readme"`
|
||||
ReadmeFile string `json:"readme_file"`
|
||||
Keywords []string `json:"keywords"`
|
||||
Categories []string `json:"categories"`
|
||||
License string `json:"license"`
|
||||
LicenseFile string `json:"license_file"`
|
||||
Repository string `json:"repository"`
|
||||
Links string `json:"links"`
|
||||
}
|
||||
if err := json.NewDecoder(r).Decode(&meta); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !nameMatch.MatchString(meta.Name) {
|
||||
return nil, ErrInvalidName
|
||||
}
|
||||
|
||||
if _, err := version.NewSemver(meta.Vers); err != nil {
|
||||
return nil, ErrInvalidVersion
|
||||
}
|
||||
|
||||
if !validation.IsValidURL(meta.Homepage) {
|
||||
meta.Homepage = ""
|
||||
}
|
||||
if !validation.IsValidURL(meta.Documentation) {
|
||||
meta.Documentation = ""
|
||||
}
|
||||
if !validation.IsValidURL(meta.Repository) {
|
||||
meta.Repository = ""
|
||||
}
|
||||
|
||||
dependencies := make([]*Dependency, 0, len(meta.Deps))
|
||||
for _, dep := range meta.Deps {
|
||||
dependencies = append(dependencies, &Dependency{
|
||||
Name: dep.Name,
|
||||
Req: dep.VersionReq,
|
||||
Features: dep.Features,
|
||||
Optional: dep.Optional,
|
||||
DefaultFeatures: dep.DefaultFeatures,
|
||||
Target: dep.Target,
|
||||
Kind: dep.Kind,
|
||||
Registry: dep.Registry,
|
||||
})
|
||||
}
|
||||
|
||||
return &Package{
|
||||
Name: meta.Name,
|
||||
Version: meta.Vers,
|
||||
Metadata: &Metadata{
|
||||
Dependencies: dependencies,
|
||||
Features: meta.Features,
|
||||
Authors: meta.Authors,
|
||||
Description: meta.Description,
|
||||
DocumentationURL: meta.Documentation,
|
||||
ProjectURL: meta.Homepage,
|
||||
Readme: meta.Readme,
|
||||
Keywords: meta.Keywords,
|
||||
Categories: meta.Categories,
|
||||
License: meta.License,
|
||||
RepositoryURL: meta.Repository,
|
||||
Links: meta.Links,
|
||||
},
|
||||
}, nil
|
||||
}
|
86
modules/packages/cargo/parser_test.go
Normal file
86
modules/packages/cargo/parser_test.go
Normal file
@ -0,0 +1,86 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cargo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const (
|
||||
description = "Package Description"
|
||||
author = "KN4CK3R"
|
||||
homepage = "https://gitea.io/"
|
||||
license = "MIT"
|
||||
)
|
||||
|
||||
func TestParsePackage(t *testing.T) {
|
||||
createPackage := func(name, version string) io.Reader {
|
||||
metadata := `{
|
||||
"name":"` + name + `",
|
||||
"vers":"` + version + `",
|
||||
"description":"` + description + `",
|
||||
"authors": ["` + author + `"],
|
||||
"deps":[
|
||||
{
|
||||
"name":"dep",
|
||||
"version_req":"1.0"
|
||||
}
|
||||
],
|
||||
"homepage":"` + homepage + `",
|
||||
"license":"` + license + `"
|
||||
}`
|
||||
|
||||
var buf bytes.Buffer
|
||||
binary.Write(&buf, binary.LittleEndian, uint32(len(metadata)))
|
||||
buf.WriteString(metadata)
|
||||
binary.Write(&buf, binary.LittleEndian, uint32(4))
|
||||
buf.WriteString("test")
|
||||
return &buf
|
||||
}
|
||||
|
||||
t.Run("InvalidName", func(t *testing.T) {
|
||||
for _, name := range []string{"", "0test", "-test", "_test", strings.Repeat("a", 65)} {
|
||||
data := createPackage(name, "1.0.0")
|
||||
|
||||
cp, err := ParsePackage(data)
|
||||
assert.Nil(t, cp)
|
||||
assert.ErrorIs(t, err, ErrInvalidName)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("InvalidVersion", func(t *testing.T) {
|
||||
for _, version := range []string{"", "1.", "-1.0", "1.0.0/1"} {
|
||||
data := createPackage("test", version)
|
||||
|
||||
cp, err := ParsePackage(data)
|
||||
assert.Nil(t, cp)
|
||||
assert.ErrorIs(t, err, ErrInvalidVersion)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
data := createPackage("test", "1.0.0")
|
||||
|
||||
cp, err := ParsePackage(data)
|
||||
assert.NotNil(t, cp)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "test", cp.Name)
|
||||
assert.Equal(t, "1.0.0", cp.Version)
|
||||
assert.Equal(t, description, cp.Metadata.Description)
|
||||
assert.Equal(t, []string{author}, cp.Metadata.Authors)
|
||||
assert.Len(t, cp.Metadata.Dependencies, 1)
|
||||
assert.Equal(t, "dep", cp.Metadata.Dependencies[0].Name)
|
||||
assert.Equal(t, homepage, cp.Metadata.ProjectURL)
|
||||
assert.Equal(t, license, cp.Metadata.License)
|
||||
content, _ := io.ReadAll(cp.Content)
|
||||
assert.Equal(t, "test", string(content))
|
||||
})
|
||||
}
|
134
modules/packages/chef/metadata.go
Normal file
134
modules/packages/chef/metadata.go
Normal file
@ -0,0 +1,134 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package chef
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/validation"
|
||||
)
|
||||
|
||||
const (
|
||||
KeyBits = 4096
|
||||
SettingPublicPem = "chef.public_pem"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrMissingMetadataFile = util.NewInvalidArgumentErrorf("metadata.json file is missing")
|
||||
ErrInvalidName = util.NewInvalidArgumentErrorf("package name is invalid")
|
||||
ErrInvalidVersion = util.NewInvalidArgumentErrorf("package version is invalid")
|
||||
|
||||
namePattern = regexp.MustCompile(`\A\S+\z`)
|
||||
versionPattern = regexp.MustCompile(`\A\d+\.\d+(?:\.\d+)?\z`)
|
||||
)
|
||||
|
||||
// Package represents a Chef package
|
||||
type Package struct {
|
||||
Name string
|
||||
Version string
|
||||
Metadata *Metadata
|
||||
}
|
||||
|
||||
// Metadata represents the metadata of a Chef package
|
||||
type Metadata struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
LongDescription string `json:"long_description,omitempty"`
|
||||
Author string `json:"author,omitempty"`
|
||||
License string `json:"license,omitempty"`
|
||||
RepositoryURL string `json:"repository_url,omitempty"`
|
||||
Dependencies map[string]string `json:"dependencies,omitempty"`
|
||||
}
|
||||
|
||||
type chefMetadata struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
LongDescription string `json:"long_description"`
|
||||
Maintainer string `json:"maintainer"`
|
||||
MaintainerEmail string `json:"maintainer_email"`
|
||||
License string `json:"license"`
|
||||
Platforms map[string]string `json:"platforms"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
Providing map[string]string `json:"providing"`
|
||||
Recipes map[string]string `json:"recipes"`
|
||||
Version string `json:"version"`
|
||||
SourceURL string `json:"source_url"`
|
||||
IssuesURL string `json:"issues_url"`
|
||||
Privacy bool `json:"privacy"`
|
||||
ChefVersions [][]string `json:"chef_versions"`
|
||||
Gems [][]string `json:"gems"`
|
||||
EagerLoadLibraries bool `json:"eager_load_libraries"`
|
||||
}
|
||||
|
||||
// ParsePackage parses the Chef package file
|
||||
func ParsePackage(r io.Reader) (*Package, error) {
|
||||
gzr, err := gzip.NewReader(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer gzr.Close()
|
||||
|
||||
tr := tar.NewReader(gzr)
|
||||
for {
|
||||
hd, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if hd.Typeflag != tar.TypeReg {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.Count(hd.Name, "/") != 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
if hd.FileInfo().Name() == "metadata.json" {
|
||||
return ParseChefMetadata(tr)
|
||||
}
|
||||
}
|
||||
|
||||
return nil, ErrMissingMetadataFile
|
||||
}
|
||||
|
||||
// ParseChefMetadata parses a metadata.json file to retrieve the metadata of a Chef package
|
||||
func ParseChefMetadata(r io.Reader) (*Package, error) {
|
||||
var cm chefMetadata
|
||||
if err := json.NewDecoder(r).Decode(&cm); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !namePattern.MatchString(cm.Name) {
|
||||
return nil, ErrInvalidName
|
||||
}
|
||||
|
||||
if !versionPattern.MatchString(cm.Version) {
|
||||
return nil, ErrInvalidVersion
|
||||
}
|
||||
|
||||
if !validation.IsValidURL(cm.SourceURL) {
|
||||
cm.SourceURL = ""
|
||||
}
|
||||
|
||||
return &Package{
|
||||
Name: cm.Name,
|
||||
Version: cm.Version,
|
||||
Metadata: &Metadata{
|
||||
Description: cm.Description,
|
||||
LongDescription: cm.LongDescription,
|
||||
Author: cm.Maintainer,
|
||||
License: cm.License,
|
||||
RepositoryURL: cm.SourceURL,
|
||||
Dependencies: cm.Dependencies,
|
||||
},
|
||||
}, nil
|
||||
}
|
92
modules/packages/chef/metadata_test.go
Normal file
92
modules/packages/chef/metadata_test.go
Normal file
@ -0,0 +1,92 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package chef
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const (
|
||||
packageName = "gitea"
|
||||
packageVersion = "1.0.1"
|
||||
packageAuthor = "KN4CK3R"
|
||||
packageDescription = "Package Description"
|
||||
packageRepositoryURL = "https://gitea.io/gitea/gitea"
|
||||
)
|
||||
|
||||
func TestParsePackage(t *testing.T) {
|
||||
t.Run("MissingMetadataFile", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
zw := gzip.NewWriter(&buf)
|
||||
tw := tar.NewWriter(zw)
|
||||
tw.Close()
|
||||
zw.Close()
|
||||
|
||||
p, err := ParsePackage(&buf)
|
||||
assert.Nil(t, p)
|
||||
assert.ErrorIs(t, err, ErrMissingMetadataFile)
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
zw := gzip.NewWriter(&buf)
|
||||
tw := tar.NewWriter(zw)
|
||||
|
||||
content := `{"name":"` + packageName + `","version":"` + packageVersion + `"}`
|
||||
|
||||
hdr := &tar.Header{
|
||||
Name: packageName + "/metadata.json",
|
||||
Mode: 0o600,
|
||||
Size: int64(len(content)),
|
||||
}
|
||||
tw.WriteHeader(hdr)
|
||||
tw.Write([]byte(content))
|
||||
|
||||
tw.Close()
|
||||
zw.Close()
|
||||
|
||||
p, err := ParsePackage(&buf)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, p)
|
||||
assert.Equal(t, packageName, p.Name)
|
||||
assert.Equal(t, packageVersion, p.Version)
|
||||
assert.NotNil(t, p.Metadata)
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseChefMetadata(t *testing.T) {
|
||||
t.Run("InvalidName", func(t *testing.T) {
|
||||
for _, name := range []string{" test", "test "} {
|
||||
p, err := ParseChefMetadata(strings.NewReader(`{"name":"` + name + `","version":"1.0.0"}`))
|
||||
assert.Nil(t, p)
|
||||
assert.ErrorIs(t, err, ErrInvalidName)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("InvalidVersion", func(t *testing.T) {
|
||||
for _, version := range []string{"1", "1.2.3.4", "1.0.0 "} {
|
||||
p, err := ParseChefMetadata(strings.NewReader(`{"name":"test","version":"` + version + `"}`))
|
||||
assert.Nil(t, p)
|
||||
assert.ErrorIs(t, err, ErrInvalidVersion)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
p, err := ParseChefMetadata(strings.NewReader(`{"name":"` + packageName + `","version":"` + packageVersion + `","description":"` + packageDescription + `","maintainer":"` + packageAuthor + `","source_url":"` + packageRepositoryURL + `"}`))
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, packageName, p.Name)
|
||||
assert.Equal(t, packageVersion, p.Version)
|
||||
assert.Equal(t, packageDescription, p.Metadata.Description)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author)
|
||||
assert.Equal(t, packageRepositoryURL, p.Metadata.RepositoryURL)
|
||||
})
|
||||
}
|
@ -211,6 +211,7 @@ func CreateRepository(doer, u *user_model.User, opts CreateRepoOptions) (*repo_m
|
||||
IsEmpty: !opts.AutoInit,
|
||||
TrustModel: opts.TrustModel,
|
||||
IsMirror: opts.IsMirror,
|
||||
DefaultBranch: opts.DefaultBranch,
|
||||
}
|
||||
|
||||
var rollbackRepo *repo_model.Repository
|
||||
|
@ -25,6 +25,8 @@ var (
|
||||
|
||||
LimitTotalOwnerCount int64
|
||||
LimitTotalOwnerSize int64
|
||||
LimitSizeCargo int64
|
||||
LimitSizeChef int64
|
||||
LimitSizeComposer int64
|
||||
LimitSizeConan int64
|
||||
LimitSizeConda int64
|
||||
@ -65,6 +67,8 @@ func newPackages() {
|
||||
}
|
||||
|
||||
Packages.LimitTotalOwnerSize = mustBytes(sec, "LIMIT_TOTAL_OWNER_SIZE")
|
||||
Packages.LimitSizeCargo = mustBytes(sec, "LIMIT_SIZE_CARGO")
|
||||
Packages.LimitSizeChef = mustBytes(sec, "LIMIT_SIZE_CHEF")
|
||||
Packages.LimitSizeComposer = mustBytes(sec, "LIMIT_SIZE_COMPOSER")
|
||||
Packages.LimitSizeConan = mustBytes(sec, "LIMIT_SIZE_CONAN")
|
||||
Packages.LimitSizeConda = mustBytes(sec, "LIMIT_SIZE_CONDA")
|
||||
|
@ -46,6 +46,8 @@ var Service = struct {
|
||||
RecaptchaSecret string
|
||||
RecaptchaSitekey string
|
||||
RecaptchaURL string
|
||||
CfTurnstileSecret string
|
||||
CfTurnstileSitekey string
|
||||
HcaptchaSecret string
|
||||
HcaptchaSitekey string
|
||||
McaptchaSecret string
|
||||
@ -137,6 +139,8 @@ func newService() {
|
||||
Service.RecaptchaSecret = sec.Key("RECAPTCHA_SECRET").MustString("")
|
||||
Service.RecaptchaSitekey = sec.Key("RECAPTCHA_SITEKEY").MustString("")
|
||||
Service.RecaptchaURL = sec.Key("RECAPTCHA_URL").MustString("https://www.google.com/recaptcha/")
|
||||
Service.CfTurnstileSecret = sec.Key("CF_TURNSTILE_SECRET").MustString("")
|
||||
Service.CfTurnstileSitekey = sec.Key("CF_TURNSTILE_SITEKEY").MustString("")
|
||||
Service.HcaptchaSecret = sec.Key("HCAPTCHA_SECRET").MustString("")
|
||||
Service.HcaptchaSitekey = sec.Key("HCAPTCHA_SITEKEY").MustString("")
|
||||
Service.McaptchaURL = sec.Key("MCAPTCHA_URL").MustString("https://demo.mcaptcha.org/")
|
||||
|
@ -61,6 +61,7 @@ const (
|
||||
ReCaptcha = "recaptcha"
|
||||
HCaptcha = "hcaptcha"
|
||||
MCaptcha = "mcaptcha"
|
||||
CfTurnstile = "cfturnstile"
|
||||
)
|
||||
|
||||
// settings
|
||||
|
92
modules/turnstile/turnstile.go
Normal file
92
modules/turnstile/turnstile.go
Normal file
@ -0,0 +1,92 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package turnstile
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
// Response is the structure of JSON returned from API
|
||||
type Response struct {
|
||||
Success bool `json:"success"`
|
||||
ChallengeTS string `json:"challenge_ts"`
|
||||
Hostname string `json:"hostname"`
|
||||
ErrorCodes []ErrorCode `json:"error-codes"`
|
||||
Action string `json:"login"`
|
||||
Cdata string `json:"cdata"`
|
||||
}
|
||||
|
||||
// Verify calls Cloudflare Turnstile API to verify token
|
||||
func Verify(ctx context.Context, response string) (bool, error) {
|
||||
// Cloudflare turnstile official access instruction address: https://developers.cloudflare.com/turnstile/get-started/server-side-validation/
|
||||
post := url.Values{
|
||||
"secret": {setting.Service.CfTurnstileSecret},
|
||||
"response": {response},
|
||||
}
|
||||
// Basically a copy of http.PostForm, but with a context
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
"https://challenges.cloudflare.com/turnstile/v0/siteverify", strings.NewReader(post.Encode()))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("Failed to create CAPTCHA request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("Failed to send CAPTCHA response: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("Failed to read CAPTCHA response: %w", err)
|
||||
}
|
||||
|
||||
var jsonResponse Response
|
||||
if err := json.Unmarshal(body, &jsonResponse); err != nil {
|
||||
return false, fmt.Errorf("Failed to parse CAPTCHA response: %w", err)
|
||||
}
|
||||
|
||||
var respErr error
|
||||
if len(jsonResponse.ErrorCodes) > 0 {
|
||||
respErr = jsonResponse.ErrorCodes[0]
|
||||
}
|
||||
return jsonResponse.Success, respErr
|
||||
}
|
||||
|
||||
// ErrorCode is a reCaptcha error
|
||||
type ErrorCode string
|
||||
|
||||
// String fulfills the Stringer interface
|
||||
func (e ErrorCode) String() string {
|
||||
switch e {
|
||||
case "missing-input-secret":
|
||||
return "The secret parameter was not passed."
|
||||
case "invalid-input-secret":
|
||||
return "The secret parameter was invalid or did not exist."
|
||||
case "missing-input-response":
|
||||
return "The response parameter was not passed."
|
||||
case "invalid-input-response":
|
||||
return "The response parameter is invalid or has expired."
|
||||
case "bad-request":
|
||||
return "The request was rejected because it was malformed."
|
||||
case "timeout-or-duplicate":
|
||||
return "The response parameter has already been validated before."
|
||||
case "internal-error":
|
||||
return "An internal error happened while validating the response. The request can be retried."
|
||||
}
|
||||
return string(e)
|
||||
}
|
||||
|
||||
// Error fulfills the error interface
|
||||
func (e ErrorCode) Error() string {
|
||||
return e.String()
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package activitypub
|
||||
package util
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
@ -10,11 +10,9 @@ import (
|
||||
"encoding/pem"
|
||||
)
|
||||
|
||||
const rsaBits = 2048
|
||||
|
||||
// GenerateKeyPair generates a public and private keypair for signing actions by users for activitypub purposes
|
||||
func GenerateKeyPair() (string, string, error) {
|
||||
priv, _ := rsa.GenerateKey(rand.Reader, rsaBits)
|
||||
// GenerateKeyPair generates a public and private keypair
|
||||
func GenerateKeyPair(bits int) (string, string, error) {
|
||||
priv, _ := rsa.GenerateKey(rand.Reader, bits)
|
||||
privPem, err := pemBlockForPriv(priv)
|
||||
if err != nil {
|
||||
return "", "", err
|
@ -1,7 +1,7 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package activitypub
|
||||
package util
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
@ -17,7 +17,7 @@ import (
|
||||
)
|
||||
|
||||
func TestKeygen(t *testing.T) {
|
||||
priv, pub, err := GenerateKeyPair()
|
||||
priv, pub, err := GenerateKeyPair(2048)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.NotEmpty(t, priv)
|
||||
@ -28,7 +28,7 @@ func TestKeygen(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSignUsingKeys(t *testing.T) {
|
||||
priv, pub, err := GenerateKeyPair()
|
||||
priv, pub, err := GenerateKeyPair(2048)
|
||||
assert.NoError(t, err)
|
||||
|
||||
privPem, _ := pem.Decode([]byte(priv))
|
@ -3145,6 +3145,8 @@ keywords = Keywords
|
||||
details = Details
|
||||
details.author = Author
|
||||
details.project_site = Project Site
|
||||
details.repository_site = Repository Site
|
||||
details.documentation_site = Documentation Site
|
||||
details.license = License
|
||||
assets = Assets
|
||||
versions = Versions
|
||||
@ -3152,6 +3154,14 @@ versions.on = on
|
||||
versions.view_all = View all
|
||||
dependency.id = ID
|
||||
dependency.version = Version
|
||||
cargo.registry = Setup this registry in the Cargo configuration file (for example <code>~/.cargo/config.toml</code>):
|
||||
cargo.install = To install the package using Cargo, run the following command:
|
||||
cargo.documentation = For more information on the Cargo registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/cargo/">the documentation</a>.
|
||||
cargo.details.repository_site = Repository Site
|
||||
cargo.details.documentation_site = Documentation Site
|
||||
chef.registry = Setup this registry in your <code>~/.chef/config.rb</code> file:
|
||||
chef.install = To install the package, run the following command:
|
||||
chef.documentation = For more information on the Chef registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/chef/">the documentation</a>.
|
||||
composer.registry = Setup this registry in your <code>~/.composer/config.json</code> file:
|
||||
composer.install = To install the package using Composer, run the following command:
|
||||
composer.documentation = For more information on the Composer registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/composer/">the documentation</a>.
|
||||
@ -3168,8 +3178,6 @@ conda.details.repository_site = Repository Site
|
||||
conda.details.documentation_site = Documentation Site
|
||||
container.details.type = Image Type
|
||||
container.details.platform = Platform
|
||||
container.details.repository_site = Repository Site
|
||||
container.details.documentation_site = Documentation Site
|
||||
container.pull = Pull the image from the command line:
|
||||
container.digest = Digest:
|
||||
container.documentation = For more information on the Container registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/container/">the documentation</a>.
|
||||
@ -3203,8 +3211,6 @@ npm.dependencies.optional = Optional Dependencies
|
||||
npm.details.tag = Tag
|
||||
pub.install = To install the package using Dart, run the following command:
|
||||
pub.documentation = For more information on the Pub registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/pub/">the documentation</a>.
|
||||
pub.details.repository_site = Repository Site
|
||||
pub.details.documentation_site = Documentation Site
|
||||
pypi.requires = Requires Python
|
||||
pypi.install = To install the package using pip, run the following command:
|
||||
pypi.documentation = For more information on the PyPI registry, see <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.io/en-us/packages/pypi/">the documentation</a>.
|
||||
@ -3228,6 +3234,15 @@ settings.delete.description = Deleting a package is permanent and cannot be undo
|
||||
settings.delete.notice = You are about to delete %s (%s). This operation is irreversible, are you sure?
|
||||
settings.delete.success = The package has been deleted.
|
||||
settings.delete.error = Failed to delete the package.
|
||||
owner.settings.cargo.title = Cargo Registry Index
|
||||
owner.settings.cargo.initialize = Initialize Index
|
||||
owner.settings.cargo.initialize.description = To use the Cargo registry a special index git repository is needed. Here you can (re)create it with the required config.
|
||||
owner.settings.cargo.initialize.error = Failed to initialize Cargo index: %v
|
||||
owner.settings.cargo.initialize.success = The Cargo index was successfully created.
|
||||
owner.settings.cargo.rebuild = Rebuild Index
|
||||
owner.settings.cargo.rebuild.description = If the index is out of sync with the cargo packages stored you can rebuild it here.
|
||||
owner.settings.cargo.rebuild.error = Failed to rebuild Cargo index: %v
|
||||
owner.settings.cargo.rebuild.success = The Cargo index was successfully rebuild.
|
||||
owner.settings.cleanuprules.title = Manage Cleanup Rules
|
||||
owner.settings.cleanuprules.add = Add Cleanup Rule
|
||||
owner.settings.cleanuprules.edit = Edit Cleanup Rule
|
||||
@ -3248,6 +3263,9 @@ owner.settings.cleanuprules.remove.days = Remove versions older than
|
||||
owner.settings.cleanuprules.remove.pattern = Remove versions matching
|
||||
owner.settings.cleanuprules.success.update = Cleanup rule has been updated.
|
||||
owner.settings.cleanuprules.success.delete = Cleanup rule has been deleted.
|
||||
owner.settings.chef.title = Chef Registry
|
||||
owner.settings.chef.keypair = Generate key pair
|
||||
owner.settings.chef.keypair.description = Generate a key pair used to authenticate against the Chef registry. The previous key can not be used afterwards.
|
||||
|
||||
[secrets]
|
||||
secrets = Secrets
|
||||
|
1
public/img/svg/gitea-cargo.svg
Normal file
1
public/img/svg/gitea-cargo.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xml:space="preserve" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round" clip-rule="evenodd" viewBox="0 0 32 32" class="svg gitea-cargo" width="16" height="16" aria-hidden="true"><path d="M15.993 1.54c-7.972 0-14.461 6.492-14.461 14.462 0 7.969 6.492 14.461 14.461 14.461 7.97 0 14.462-6.492 14.462-14.461 0-7.97-6.492-14.462-14.462-14.462zm-.021 1.285a.954.954 0 0 1 .924.951c0 .522-.43.952-.952.952s-.951-.43-.951-.952.429-.952.951-.952l.028.001zm2.178 1.566a11.717 11.717 0 0 1 8.016 5.709l-1.123 2.533a.874.874 0 0 0 .44 1.147l2.16.958c.067.675.076 1.355.025 2.031h-1.202c-.12 0-.169.08-.169.196v.551c0 1.297-.731 1.582-1.373 1.652-.612.07-1.288-.257-1.374-.63-.361-2.029-.961-2.46-1.909-3.21 1.178-.746 2.401-1.85 2.401-3.325 0-1.594-1.092-2.597-1.835-3.09-1.046-.688-2.203-.826-2.515-.826H7.271a11.712 11.712 0 0 1 6.55-3.696l1.466 1.536a.862.862 0 0 0 1.223.028l1.64-1.564zM4.628 11.434c.511.015.924.44.924.951 0 .522-.43.952-.952.952s-.951-.43-.951-.952.429-.951.951-.951h.028zm22.685.043c.511.015.924.44.924.951 0 .522-.43.952-.952.952s-.951-.43-.951-.952a.956.956 0 0 1 .979-.951zm-20.892.153h1.658v7.477H4.732a11.715 11.715 0 0 1-.38-4.47l2.05-.912a.865.865 0 0 0 .441-1.144l-.422-.951zm6.92.079h3.949c.205 0 1.441.236 1.441 1.163 0 .768-.948 1.043-1.728 1.043h-3.665l.003-2.206zm0 5.373h3.026c.275 0 1.477.079 1.86 1.615.119.471.385 2.007.566 2.499.18.551.911 1.652 1.691 1.652h4.938c-.331.444-.693.863-1.083 1.255l-2.01-.432a.87.87 0 0 0-1.031.667l-.477 2.228a11.714 11.714 0 0 1-9.762-.046l-.478-2.228a.867.867 0 0 0-1.028-.667l-1.967.423a11.866 11.866 0 0 1-1.016-1.2h9.567c.107 0 .181-.018.181-.119v-3.384c0-.097-.074-.119-.181-.119h-2.799l.003-2.144zm-4.415 7.749c.512.015.924.44.924.951 0 .522-.429.952-.951.952s-.952-.43-.952-.952.43-.952.952-.952l.027.001zm14.089.043a.954.954 0 0 1 .923.951c0 .522-.429.952-.951.952s-.951-.43-.951-.952a.956.956 0 0 1 .979-.951z"/><path d="M29.647 16.002c0 7.49-6.163 13.653-13.654 13.653-7.49 0-13.654-6.163-13.654-13.653 0-7.491 6.164-13.654 13.654-13.654 7.491 0 13.654 6.163 13.654 13.654zm-.257-1.319 2.13 1.319-2.13 1.318 1.83 1.71-2.344.878 1.463 2.035-2.475.404 1.04 2.282-2.506-.089.575 2.442-2.441-.576.089 2.506-2.283-1.04-.403 2.475-2.035-1.462-.878 2.343-1.71-1.829-1.319 2.129-1.318-2.129-1.71 1.829-.878-2.343-2.035 1.462-.404-2.475-2.282 1.04.089-2.506-2.442.576.575-2.442-2.505.089 1.04-2.282-2.475-.404 1.462-2.035-2.343-.878 1.829-1.71-2.129-1.318 2.129-1.319-1.829-1.71 2.343-.878-1.462-2.035 2.475-.404-1.04-2.282 2.505.089-.575-2.441 2.442.575-.089-2.506 2.282 1.04.404-2.475 2.035 1.463.878-2.344 1.71 1.83 1.318-2.13 1.319 2.13 1.71-1.83.878 2.344 2.035-1.463.403 2.475 2.283-1.04-.089 2.506 2.441-.575-.575 2.441 2.506-.089-1.04 2.282 2.475.404-1.463 2.035 2.344.878-1.83 1.71z"/></svg>
|
After Width: | Height: | Size: 2.7 KiB |
1
public/img/svg/gitea-chef.svg
Normal file
1
public/img/svg/gitea-chef.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg viewBox="0 0 36 36" class="svg gitea-chef" width="16" height="16" aria-hidden="true"><g fill="none" fill-rule="evenodd"><path fill="#435363" d="M18 25.8c-4.3 0-7.7-3.6-7.7-8s3.4-7.9 7.7-7.9c3.5 0 6.4 2.4 7.3 5.7h3c-1-5-5.2-8.7-10.3-8.7-5.9 0-10.6 4.9-10.6 10.9 0 6.1 4.7 11 10.6 11 5.1 0 9.3-3.7 10.3-8.7h-3c-.9 3.3-3.8 5.7-7.3 5.7"/><path fill="#435363" d="M12.8 23.2c1.3 1.4 3.1 2.3 5.2 2.3v-3.2c-1.2 0-2.3-.5-3.1-1.3l-2.1 2.2"/><path fill="#F38B00" d="M10.6 17.8c0 1.1.3 2.2.6 3.1l2.9-1.3c-.3-.5-.4-1.1-.4-1.8 0-2.4 1.9-4.4 4.3-4.4v-3.2c-4.1 0-7.4 3.4-7.4 7.6"/><path fill="#435363" d="m20.6 10.7-1.1 3c.9.4 1.7 1.1 2.2 1.9H25c-.7-2.2-2.3-4-4.4-4.9"/><path fill="#F38B00" d="m19.5 22 1.1 2.9c2.1-.8 3.7-2.6 4.4-4.8h-3.3c-.5.8-1.3 1.5-2.2 1.9"/><path fill="#435363" d="M4.4 22.1c-.1-.2-.1-.3-.1-.5-.1-.2-.1-.3-.2-.5V21c0-.1 0-.3-.1-.4v-.5c-.1-.1-.1-.2-.1-.3-.1-.6-.1-1.3-.1-2H.9c0 .8 0 1.5.1 2.2 0 .2.1.4.1.6v.1c0 .2.1.4.1.5s0 .2.1.3v.3c.1.1.1.2.1.4 0 0 .1.1.1.2 0 .2 0 .3.1.4v.2c.2.7.5 1.3.7 2L5 23.8c-.2-.6-.4-1.1-.6-1.7"/><path fill="#F38B00" d="M18 32.6c-3.9 0-7.5-1.7-10.1-4.4l-2 2.2c3.1 3.2 7.3 5.2 12.1 5.2 8.7 0 15.8-6.8 16.9-15.5H32c-1.1 7-7 12.5-14 12.5M18 3.1c3.1 0 6.1 1.1 8.4 2.9l1.8-2.4C25.3 1.4 21.8.1 18 .1 10.7.1 4.5 4.8 2.1 11.4l2.7 1.1C6.8 7 12 3.1 18 3.1"/><path fill="#435363" d="M32 15.6h2.9c-.3-2.6-1.2-5-2.5-7.2L30 10c1 1.7 1.7 3.6 2 5.6"/><path fill="#F38B00" d="M28.7 15.6h2.9c-.8-5.1-4.1-9.3-8.6-11.1l-1.1 2.8c3.5 1.3 6 4.5 6.8 8.3"/><path fill="#435363" d="M18 6.5v-3c-5.9 0-10.9 3.8-12.9 9.1l2.7 1.1C9.4 9.5 13.3 6.5 18 6.5"/><path fill="#F38B00" d="M7 17.8H4.1c0 6.1 3.6 11.2 8.7 13.4l1.1-2.8C9.9 26.7 7 22.6 7 17.8"/><path fill="#435363" d="M18 29.2v3c6.9 0 12.6-5.3 13.6-12.1h-2.9c-1 5.2-5.4 9.1-10.7 9.1"/></g></svg>
|
After Width: | Height: | Size: 1.7 KiB |
@ -14,6 +14,8 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
"code.gitea.io/gitea/routers/api/packages/cargo"
|
||||
"code.gitea.io/gitea/routers/api/packages/chef"
|
||||
"code.gitea.io/gitea/routers/api/packages/composer"
|
||||
"code.gitea.io/gitea/routers/api/packages/conan"
|
||||
"code.gitea.io/gitea/routers/api/packages/conda"
|
||||
@ -53,6 +55,7 @@ func CommonRoutes(ctx gocontext.Context) *web.Route {
|
||||
&auth.Basic{},
|
||||
&nuget.Auth{},
|
||||
&conan.Auth{},
|
||||
&chef.Auth{},
|
||||
}
|
||||
if setting.Service.EnableReverseProxyAuth {
|
||||
authMethods = append(authMethods, &auth.ReverseProxy{})
|
||||
@ -71,6 +74,39 @@ func CommonRoutes(ctx gocontext.Context) *web.Route {
|
||||
})
|
||||
|
||||
r.Group("/{username}", func() {
|
||||
r.Group("/cargo", func() {
|
||||
r.Group("/api/v1/crates", func() {
|
||||
r.Get("", cargo.SearchPackages)
|
||||
r.Put("/new", reqPackageAccess(perm.AccessModeWrite), cargo.UploadPackage)
|
||||
r.Group("/{package}", func() {
|
||||
r.Group("/{version}", func() {
|
||||
r.Get("/download", cargo.DownloadPackageFile)
|
||||
r.Delete("/yank", reqPackageAccess(perm.AccessModeWrite), cargo.YankPackage)
|
||||
r.Put("/unyank", reqPackageAccess(perm.AccessModeWrite), cargo.UnyankPackage)
|
||||
})
|
||||
r.Get("/owners", cargo.ListOwners)
|
||||
})
|
||||
})
|
||||
}, reqPackageAccess(perm.AccessModeRead))
|
||||
r.Group("/chef", func() {
|
||||
r.Group("/api/v1", func() {
|
||||
r.Get("/universe", chef.PackagesUniverse)
|
||||
r.Get("/search", chef.EnumeratePackages)
|
||||
r.Group("/cookbooks", func() {
|
||||
r.Get("", chef.EnumeratePackages)
|
||||
r.Post("", reqPackageAccess(perm.AccessModeWrite), chef.UploadPackage)
|
||||
r.Group("/{name}", func() {
|
||||
r.Get("", chef.PackageMetadata)
|
||||
r.Group("/versions/{version}", func() {
|
||||
r.Get("", chef.PackageVersionMetadata)
|
||||
r.Delete("", reqPackageAccess(perm.AccessModeWrite), chef.DeletePackageVersion)
|
||||
r.Get("/download", chef.DownloadPackage)
|
||||
})
|
||||
r.Delete("", reqPackageAccess(perm.AccessModeWrite), chef.DeletePackage)
|
||||
})
|
||||
})
|
||||
})
|
||||
}, reqPackageAccess(perm.AccessModeRead))
|
||||
r.Group("/composer", func() {
|
||||
r.Get("/packages.json", composer.ServiceIndex)
|
||||
r.Get("/search.json", composer.SearchPackages)
|
||||
|
281
routers/api/packages/cargo/cargo.go
Normal file
281
routers/api/packages/cargo/cargo.go
Normal file
@ -0,0 +1,281 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cargo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
packages_model "code.gitea.io/gitea/models/packages"
|
||||
"code.gitea.io/gitea/modules/context"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
packages_module "code.gitea.io/gitea/modules/packages"
|
||||
cargo_module "code.gitea.io/gitea/modules/packages/cargo"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/routers/api/packages/helper"
|
||||
"code.gitea.io/gitea/services/convert"
|
||||
packages_service "code.gitea.io/gitea/services/packages"
|
||||
cargo_service "code.gitea.io/gitea/services/packages/cargo"
|
||||
)
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#web-api
|
||||
type StatusResponse struct {
|
||||
OK bool `json:"ok"`
|
||||
Errors []StatusMessage `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
type StatusMessage struct {
|
||||
Message string `json:"detail"`
|
||||
}
|
||||
|
||||
func apiError(ctx *context.Context, status int, obj interface{}) {
|
||||
helper.LogAndProcessError(ctx, status, obj, func(message string) {
|
||||
ctx.JSON(status, StatusResponse{
|
||||
OK: false,
|
||||
Errors: []StatusMessage{
|
||||
{
|
||||
Message: message,
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
type SearchResult struct {
|
||||
Crates []*SearchResultCrate `json:"crates"`
|
||||
Meta SearchResultMeta `json:"meta"`
|
||||
}
|
||||
|
||||
type SearchResultCrate struct {
|
||||
Name string `json:"name"`
|
||||
LatestVersion string `json:"max_version"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
type SearchResultMeta struct {
|
||||
Total int64 `json:"total"`
|
||||
}
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#search
|
||||
func SearchPackages(ctx *context.Context) {
|
||||
page := ctx.FormInt("page")
|
||||
if page < 1 {
|
||||
page = 1
|
||||
}
|
||||
perPage := ctx.FormInt("per_page")
|
||||
paginator := db.ListOptions{
|
||||
Page: page,
|
||||
PageSize: convert.ToCorrectPageSize(perPage),
|
||||
}
|
||||
|
||||
pvs, total, err := packages_model.SearchLatestVersions(
|
||||
ctx,
|
||||
&packages_model.PackageSearchOptions{
|
||||
OwnerID: ctx.Package.Owner.ID,
|
||||
Type: packages_model.TypeCargo,
|
||||
Name: packages_model.SearchValue{Value: ctx.FormTrim("q")},
|
||||
IsInternal: util.OptionalBoolFalse,
|
||||
Paginator: &paginator,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
crates := make([]*SearchResultCrate, 0, len(pvs))
|
||||
for _, pd := range pds {
|
||||
crates = append(crates, &SearchResultCrate{
|
||||
Name: pd.Package.Name,
|
||||
LatestVersion: pd.Version.Version,
|
||||
Description: pd.Metadata.(*cargo_module.Metadata).Description,
|
||||
})
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, SearchResult{
|
||||
Crates: crates,
|
||||
Meta: SearchResultMeta{
|
||||
Total: total,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
type Owners struct {
|
||||
Users []OwnerUser `json:"users"`
|
||||
}
|
||||
|
||||
type OwnerUser struct {
|
||||
ID int64 `json:"id"`
|
||||
Login string `json:"login"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#owners-list
|
||||
func ListOwners(ctx *context.Context) {
|
||||
ctx.JSON(http.StatusOK, Owners{
|
||||
Users: []OwnerUser{
|
||||
{
|
||||
ID: ctx.Package.Owner.ID,
|
||||
Login: ctx.Package.Owner.Name,
|
||||
Name: ctx.Package.Owner.DisplayName(),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// DownloadPackageFile serves the content of a package
|
||||
func DownloadPackageFile(ctx *context.Context) {
|
||||
s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion(
|
||||
ctx,
|
||||
&packages_service.PackageInfo{
|
||||
Owner: ctx.Package.Owner,
|
||||
PackageType: packages_model.TypeCargo,
|
||||
Name: ctx.Params("package"),
|
||||
Version: ctx.Params("version"),
|
||||
},
|
||||
&packages_service.PackageFileInfo{
|
||||
Filename: strings.ToLower(fmt.Sprintf("%s-%s.crate", ctx.Params("package"), ctx.Params("version"))),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist {
|
||||
apiError(ctx, http.StatusNotFound, err)
|
||||
return
|
||||
}
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
defer s.Close()
|
||||
|
||||
ctx.ServeContent(s, &context.ServeHeaderOptions{
|
||||
Filename: pf.Name,
|
||||
LastModified: pf.CreatedUnix.AsLocalTime(),
|
||||
})
|
||||
}
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#publish
|
||||
func UploadPackage(ctx *context.Context) {
|
||||
defer ctx.Req.Body.Close()
|
||||
|
||||
cp, err := cargo_module.ParsePackage(ctx.Req.Body)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
buf, err := packages_module.CreateHashedBufferFromReader(cp.Content, 32*1024*1024)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
defer buf.Close()
|
||||
|
||||
if buf.Size() != cp.ContentSize {
|
||||
apiError(ctx, http.StatusBadRequest, "invalid content size")
|
||||
return
|
||||
}
|
||||
|
||||
pv, _, err := packages_service.CreatePackageAndAddFile(
|
||||
&packages_service.PackageCreationInfo{
|
||||
PackageInfo: packages_service.PackageInfo{
|
||||
Owner: ctx.Package.Owner,
|
||||
PackageType: packages_model.TypeCargo,
|
||||
Name: cp.Name,
|
||||
Version: cp.Version,
|
||||
},
|
||||
SemverCompatible: true,
|
||||
Creator: ctx.Doer,
|
||||
Metadata: cp.Metadata,
|
||||
VersionProperties: map[string]string{
|
||||
cargo_module.PropertyYanked: strconv.FormatBool(false),
|
||||
},
|
||||
},
|
||||
&packages_service.PackageFileCreationInfo{
|
||||
PackageFileInfo: packages_service.PackageFileInfo{
|
||||
Filename: strings.ToLower(fmt.Sprintf("%s-%s.crate", cp.Name, cp.Version)),
|
||||
},
|
||||
Creator: ctx.Doer,
|
||||
Data: buf,
|
||||
IsLead: true,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
switch err {
|
||||
case packages_model.ErrDuplicatePackageVersion:
|
||||
apiError(ctx, http.StatusConflict, err)
|
||||
case packages_service.ErrQuotaTotalCount, packages_service.ErrQuotaTypeSize, packages_service.ErrQuotaTotalSize:
|
||||
apiError(ctx, http.StatusForbidden, err)
|
||||
default:
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if err := cargo_service.AddOrUpdatePackageIndex(ctx, ctx.Doer, ctx.Package.Owner, pv.PackageID); err != nil {
|
||||
if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil {
|
||||
log.Error("Rollback creation of package version: %v", err)
|
||||
}
|
||||
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, StatusResponse{OK: true})
|
||||
}
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#yank
|
||||
func YankPackage(ctx *context.Context) {
|
||||
yankPackage(ctx, true)
|
||||
}
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#unyank
|
||||
func UnyankPackage(ctx *context.Context) {
|
||||
yankPackage(ctx, false)
|
||||
}
|
||||
|
||||
func yankPackage(ctx *context.Context, yank bool) {
|
||||
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeCargo, ctx.Params("package"), ctx.Params("version"))
|
||||
if err != nil {
|
||||
if err == packages_model.ErrPackageNotExist {
|
||||
apiError(ctx, http.StatusNotFound, err)
|
||||
return
|
||||
}
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeVersion, pv.ID, cargo_module.PropertyYanked)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
if len(pps) == 0 {
|
||||
apiError(ctx, http.StatusInternalServerError, "Property not found")
|
||||
return
|
||||
}
|
||||
|
||||
pp := pps[0]
|
||||
pp.Value = strconv.FormatBool(yank)
|
||||
|
||||
if err := packages_model.UpdateProperty(ctx, pp); err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := cargo_service.AddOrUpdatePackageIndex(ctx, ctx.Doer, ctx.Package.Owner, pv.PackageID); err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, StatusResponse{OK: true})
|
||||
}
|
270
routers/api/packages/chef/auth.go
Normal file
270
routers/api/packages/chef/auth.go
Normal file
@ -0,0 +1,270 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package chef
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/rsa"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"encoding/pem"
|
||||
"fmt"
|
||||
"hash"
|
||||
"math/big"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
chef_module "code.gitea.io/gitea/modules/packages/chef"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/auth"
|
||||
)
|
||||
|
||||
const (
|
||||
maxTimeDifference = 10 * time.Minute
|
||||
)
|
||||
|
||||
var (
|
||||
algorithmPattern = regexp.MustCompile(`algorithm=(\w+)`)
|
||||
versionPattern = regexp.MustCompile(`version=(\d+\.\d+)`)
|
||||
authorizationPattern = regexp.MustCompile(`\AX-Ops-Authorization-(\d+)`)
|
||||
)
|
||||
|
||||
// Documentation:
|
||||
// https://docs.chef.io/server/api_chef_server/#required-headers
|
||||
// https://github.com/chef-boneyard/chef-rfc/blob/master/rfc065-sign-v1.3.md
|
||||
// https://github.com/chef/mixlib-authentication/blob/bc8adbef833d4be23dc78cb23e6fe44b51ebc34f/lib/mixlib/authentication/signedheaderauth.rb
|
||||
|
||||
type Auth struct{}
|
||||
|
||||
func (a *Auth) Name() string {
|
||||
return "chef"
|
||||
}
|
||||
|
||||
// Verify extracts the user from the signed request
|
||||
// If the request is signed with the user private key the user is verified.
|
||||
func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) (*user_model.User, error) {
|
||||
u, err := getUserFromRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if u == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
pub, err := getUserPublicKey(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := verifyTimestamp(req); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
version, err := getSignVersion(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := verifySignedHeaders(req, version, pub.(*rsa.PublicKey)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func getUserFromRequest(req *http.Request) (*user_model.User, error) {
|
||||
username := req.Header.Get("X-Ops-Userid")
|
||||
if username == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return user_model.GetUserByName(req.Context(), username)
|
||||
}
|
||||
|
||||
func getUserPublicKey(u *user_model.User) (crypto.PublicKey, error) {
|
||||
pubKey, err := user_model.GetSetting(u.ID, chef_module.SettingPublicPem)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pubPem, _ := pem.Decode([]byte(pubKey))
|
||||
|
||||
return x509.ParsePKIXPublicKey(pubPem.Bytes)
|
||||
}
|
||||
|
||||
func verifyTimestamp(req *http.Request) error {
|
||||
hdr := req.Header.Get("X-Ops-Timestamp")
|
||||
if hdr == "" {
|
||||
return util.NewInvalidArgumentErrorf("X-Ops-Timestamp header missing")
|
||||
}
|
||||
|
||||
ts, err := time.Parse(time.RFC3339, hdr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
diff := time.Now().UTC().Sub(ts)
|
||||
if diff < 0 {
|
||||
diff = -diff
|
||||
}
|
||||
|
||||
if diff > maxTimeDifference {
|
||||
return fmt.Errorf("time difference")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSignVersion(req *http.Request) (string, error) {
|
||||
hdr := req.Header.Get("X-Ops-Sign")
|
||||
if hdr == "" {
|
||||
return "", util.NewInvalidArgumentErrorf("X-Ops-Sign header missing")
|
||||
}
|
||||
|
||||
m := versionPattern.FindStringSubmatch(hdr)
|
||||
if len(m) != 2 {
|
||||
return "", util.NewInvalidArgumentErrorf("invalid X-Ops-Sign header")
|
||||
}
|
||||
|
||||
switch m[1] {
|
||||
case "1.0", "1.1", "1.2", "1.3":
|
||||
default:
|
||||
return "", util.NewInvalidArgumentErrorf("unsupported version")
|
||||
}
|
||||
|
||||
version := m[1]
|
||||
|
||||
m = algorithmPattern.FindStringSubmatch(hdr)
|
||||
if len(m) == 2 && m[1] != "sha1" && !(m[1] == "sha256" && version == "1.3") {
|
||||
return "", util.NewInvalidArgumentErrorf("unsupported algorithm")
|
||||
}
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func verifySignedHeaders(req *http.Request, version string, pub *rsa.PublicKey) error {
|
||||
authorizationData, err := getAuthorizationData(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
checkData := buildCheckData(req, version)
|
||||
|
||||
switch version {
|
||||
case "1.3":
|
||||
return verifyDataNew(authorizationData, checkData, pub, crypto.SHA256)
|
||||
case "1.2":
|
||||
return verifyDataNew(authorizationData, checkData, pub, crypto.SHA1)
|
||||
default:
|
||||
return verifyDataOld(authorizationData, checkData, pub)
|
||||
}
|
||||
}
|
||||
|
||||
func getAuthorizationData(req *http.Request) ([]byte, error) {
|
||||
valueList := make(map[int]string)
|
||||
for k, vs := range req.Header {
|
||||
if m := authorizationPattern.FindStringSubmatch(k); m != nil {
|
||||
index, _ := strconv.Atoi(m[1])
|
||||
var v string
|
||||
if len(vs) == 0 {
|
||||
v = ""
|
||||
} else {
|
||||
v = vs[0]
|
||||
}
|
||||
valueList[index] = v
|
||||
}
|
||||
}
|
||||
|
||||
tmp := make([]string, len(valueList))
|
||||
for k, v := range valueList {
|
||||
if k > len(tmp) {
|
||||
return nil, fmt.Errorf("invalid X-Ops-Authorization headers")
|
||||
}
|
||||
tmp[k-1] = v
|
||||
}
|
||||
|
||||
return base64.StdEncoding.DecodeString(strings.Join(tmp, ""))
|
||||
}
|
||||
|
||||
func buildCheckData(req *http.Request, version string) []byte {
|
||||
username := req.Header.Get("X-Ops-Userid")
|
||||
if version != "1.0" && version != "1.3" {
|
||||
sum := sha1.Sum([]byte(username))
|
||||
username = base64.StdEncoding.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
var data string
|
||||
if version == "1.3" {
|
||||
data = fmt.Sprintf(
|
||||
"Method:%s\nPath:%s\nX-Ops-Content-Hash:%s\nX-Ops-Sign:version=%s\nX-Ops-Timestamp:%s\nX-Ops-UserId:%s\nX-Ops-Server-API-Version:%s",
|
||||
req.Method,
|
||||
path.Clean(req.URL.Path),
|
||||
req.Header.Get("X-Ops-Content-Hash"),
|
||||
version,
|
||||
req.Header.Get("X-Ops-Timestamp"),
|
||||
username,
|
||||
req.Header.Get("X-Ops-Server-Api-Version"),
|
||||
)
|
||||
} else {
|
||||
sum := sha1.Sum([]byte(path.Clean(req.URL.Path)))
|
||||
data = fmt.Sprintf(
|
||||
"Method:%s\nHashed Path:%s\nX-Ops-Content-Hash:%s\nX-Ops-Timestamp:%s\nX-Ops-UserId:%s",
|
||||
req.Method,
|
||||
base64.StdEncoding.EncodeToString(sum[:]),
|
||||
req.Header.Get("X-Ops-Content-Hash"),
|
||||
req.Header.Get("X-Ops-Timestamp"),
|
||||
username,
|
||||
)
|
||||
}
|
||||
|
||||
return []byte(data)
|
||||
}
|
||||
|
||||
func verifyDataNew(signature, data []byte, pub *rsa.PublicKey, algo crypto.Hash) error {
|
||||
var h hash.Hash
|
||||
if algo == crypto.SHA256 {
|
||||
h = sha256.New()
|
||||
} else {
|
||||
h = sha1.New()
|
||||
}
|
||||
if _, err := h.Write(data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return rsa.VerifyPKCS1v15(pub, algo, h.Sum(nil), signature)
|
||||
}
|
||||
|
||||
func verifyDataOld(signature, data []byte, pub *rsa.PublicKey) error {
|
||||
c := new(big.Int)
|
||||
m := new(big.Int)
|
||||
m.SetBytes(signature)
|
||||
e := big.NewInt(int64(pub.E))
|
||||
c.Exp(m, e, pub.N)
|
||||
|
||||
out := c.Bytes()
|
||||
|
||||
skip := 0
|
||||
for i := 2; i < len(out); i++ {
|
||||
if i+1 >= len(out) {
|
||||
break
|
||||
}
|
||||
if out[i] == 0xFF && out[i+1] == 0 {
|
||||
skip = i + 2
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !util.SliceEqual(out[skip:], data) {
|
||||
return fmt.Errorf("could not verify signature")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
404
routers/api/packages/chef/chef.go
Normal file
404
routers/api/packages/chef/chef.go
Normal file
@ -0,0 +1,404 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package chef
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
packages_model "code.gitea.io/gitea/models/packages"
|
||||
"code.gitea.io/gitea/modules/context"
|
||||
packages_module "code.gitea.io/gitea/modules/packages"
|
||||
chef_module "code.gitea.io/gitea/modules/packages/chef"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/routers/api/packages/helper"
|
||||
packages_service "code.gitea.io/gitea/services/packages"
|
||||
)
|
||||
|
||||
func apiError(ctx *context.Context, status int, obj interface{}) {
|
||||
type Error struct {
|
||||
ErrorMessages []string `json:"error_messages"`
|
||||
}
|
||||
|
||||
helper.LogAndProcessError(ctx, status, obj, func(message string) {
|
||||
ctx.JSON(status, Error{
|
||||
ErrorMessages: []string{message},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func PackagesUniverse(ctx *context.Context) {
|
||||
pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||
OwnerID: ctx.Package.Owner.ID,
|
||||
Type: packages_model.TypeChef,
|
||||
IsInternal: util.OptionalBoolFalse,
|
||||
})
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
type VersionInfo struct {
|
||||
LocationType string `json:"location_type"`
|
||||
LocationPath string `json:"location_path"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
}
|
||||
|
||||
baseURL := setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/chef/api/v1"
|
||||
|
||||
universe := make(map[string]map[string]*VersionInfo)
|
||||
for _, pd := range pds {
|
||||
if _, ok := universe[pd.Package.Name]; !ok {
|
||||
universe[pd.Package.Name] = make(map[string]*VersionInfo)
|
||||
}
|
||||
universe[pd.Package.Name][pd.Version.Version] = &VersionInfo{
|
||||
LocationType: "opscode",
|
||||
LocationPath: baseURL,
|
||||
DownloadURL: fmt.Sprintf("%s/cookbooks/%s/versions/%s/download", baseURL, url.PathEscape(pd.Package.Name), pd.Version.Version),
|
||||
Dependencies: pd.Metadata.(*chef_module.Metadata).Dependencies,
|
||||
}
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, universe)
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_list.rb
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_search.rb
|
||||
func EnumeratePackages(ctx *context.Context) {
|
||||
opts := &packages_model.PackageSearchOptions{
|
||||
OwnerID: ctx.Package.Owner.ID,
|
||||
Type: packages_model.TypeChef,
|
||||
Name: packages_model.SearchValue{Value: ctx.FormTrim("q")},
|
||||
IsInternal: util.OptionalBoolFalse,
|
||||
Paginator: db.NewAbsoluteListOptions(
|
||||
ctx.FormInt("start"),
|
||||
ctx.FormInt("items"),
|
||||
),
|
||||
}
|
||||
|
||||
switch strings.ToLower(ctx.FormTrim("order")) {
|
||||
case "recently_updated", "recently_added":
|
||||
opts.Sort = packages_model.SortCreatedDesc
|
||||
default:
|
||||
opts.Sort = packages_model.SortNameAsc
|
||||
}
|
||||
|
||||
pvs, total, err := packages_model.SearchLatestVersions(ctx, opts)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
CookbookName string `json:"cookbook_name"`
|
||||
CookbookMaintainer string `json:"cookbook_maintainer"`
|
||||
CookbookDescription string `json:"cookbook_description"`
|
||||
Cookbook string `json:"cookbook"`
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
Start int `json:"start"`
|
||||
Total int `json:"total"`
|
||||
Items []*Item `json:"items"`
|
||||
}
|
||||
|
||||
baseURL := setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/chef/api/v1/cookbooks/"
|
||||
|
||||
items := make([]*Item, 0, len(pds))
|
||||
for _, pd := range pds {
|
||||
metadata := pd.Metadata.(*chef_module.Metadata)
|
||||
|
||||
items = append(items, &Item{
|
||||
CookbookName: pd.Package.Name,
|
||||
CookbookMaintainer: metadata.Author,
|
||||
CookbookDescription: metadata.Description,
|
||||
Cookbook: baseURL + url.PathEscape(pd.Package.Name),
|
||||
})
|
||||
}
|
||||
|
||||
skip, _ := opts.Paginator.GetSkipTake()
|
||||
|
||||
ctx.JSON(http.StatusOK, &Result{
|
||||
Start: skip,
|
||||
Total: int(total),
|
||||
Items: items,
|
||||
})
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_show.rb
|
||||
func PackageMetadata(ctx *context.Context) {
|
||||
packageName := ctx.Params("name")
|
||||
|
||||
pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, packageName)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
if len(pvs) == 0 {
|
||||
apiError(ctx, http.StatusNotFound, nil)
|
||||
return
|
||||
}
|
||||
|
||||
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
sort.Slice(pds, func(i, j int) bool {
|
||||
return pds[i].SemVer.LessThan(pds[j].SemVer)
|
||||
})
|
||||
|
||||
type Result struct {
|
||||
Name string `json:"name"`
|
||||
Maintainer string `json:"maintainer"`
|
||||
Description string `json:"description"`
|
||||
Category string `json:"category"`
|
||||
LatestVersion string `json:"latest_version"`
|
||||
SourceURL string `json:"source_url"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
Deprecated bool `json:"deprecated"`
|
||||
Versions []string `json:"versions"`
|
||||
}
|
||||
|
||||
baseURL := fmt.Sprintf("%sapi/packages/%s/chef/api/v1/cookbooks/%s/versions/", setting.AppURL, ctx.Package.Owner.Name, url.PathEscape(packageName))
|
||||
|
||||
versions := make([]string, 0, len(pds))
|
||||
for _, pd := range pds {
|
||||
versions = append(versions, baseURL+pd.Version.Version)
|
||||
}
|
||||
|
||||
latest := pds[len(pds)-1]
|
||||
|
||||
metadata := latest.Metadata.(*chef_module.Metadata)
|
||||
|
||||
ctx.JSON(http.StatusOK, &Result{
|
||||
Name: latest.Package.Name,
|
||||
Maintainer: metadata.Author,
|
||||
Description: metadata.Description,
|
||||
LatestVersion: baseURL + latest.Version.Version,
|
||||
SourceURL: metadata.RepositoryURL,
|
||||
CreatedAt: latest.Version.CreatedUnix.AsLocalTime(),
|
||||
UpdatedAt: latest.Version.CreatedUnix.AsLocalTime(),
|
||||
Deprecated: false,
|
||||
Versions: versions,
|
||||
})
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_show.rb
|
||||
func PackageVersionMetadata(ctx *context.Context) {
|
||||
packageName := ctx.Params("name")
|
||||
packageVersion := strings.ReplaceAll(ctx.Params("version"), "_", ".") // Chef calls this endpoint with "_" instead of "."?!
|
||||
|
||||
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, packageName, packageVersion)
|
||||
if err != nil {
|
||||
if err == packages_model.ErrPackageNotExist {
|
||||
apiError(ctx, http.StatusNotFound, err)
|
||||
return
|
||||
}
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pd, err := packages_model.GetPackageDescriptor(ctx, pv)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
Version string `json:"version"`
|
||||
TarballFileSize int64 `json:"tarball_file_size"`
|
||||
PublishedAt time.Time `json:"published_at"`
|
||||
Cookbook string `json:"cookbook"`
|
||||
File string `json:"file"`
|
||||
License string `json:"license"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
}
|
||||
|
||||
baseURL := fmt.Sprintf("%sapi/packages/%s/chef/api/v1/cookbooks/%s", setting.AppURL, ctx.Package.Owner.Name, url.PathEscape(pd.Package.Name))
|
||||
|
||||
metadata := pd.Metadata.(*chef_module.Metadata)
|
||||
|
||||
ctx.JSON(http.StatusOK, &Result{
|
||||
Version: pd.Version.Version,
|
||||
TarballFileSize: pd.Files[0].Blob.Size,
|
||||
PublishedAt: pd.Version.CreatedUnix.AsLocalTime(),
|
||||
Cookbook: baseURL,
|
||||
File: fmt.Sprintf("%s/versions/%s/download", baseURL, pd.Version.Version),
|
||||
License: metadata.License,
|
||||
Dependencies: metadata.Dependencies,
|
||||
})
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_share.rb
|
||||
func UploadPackage(ctx *context.Context) {
|
||||
file, _, err := ctx.Req.FormFile("tarball")
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
buf, err := packages_module.CreateHashedBufferFromReader(file, 32*1024*1024)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
defer buf.Close()
|
||||
|
||||
pck, err := chef_module.ParsePackage(buf)
|
||||
if err != nil {
|
||||
if errors.Is(err, util.ErrInvalidArgument) {
|
||||
apiError(ctx, http.StatusBadRequest, err)
|
||||
} else {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if _, err := buf.Seek(0, io.SeekStart); err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
_, _, err = packages_service.CreatePackageAndAddFile(
|
||||
&packages_service.PackageCreationInfo{
|
||||
PackageInfo: packages_service.PackageInfo{
|
||||
Owner: ctx.Package.Owner,
|
||||
PackageType: packages_model.TypeChef,
|
||||
Name: pck.Name,
|
||||
Version: pck.Version,
|
||||
},
|
||||
Creator: ctx.Doer,
|
||||
SemverCompatible: true,
|
||||
Metadata: pck.Metadata,
|
||||
},
|
||||
&packages_service.PackageFileCreationInfo{
|
||||
PackageFileInfo: packages_service.PackageFileInfo{
|
||||
Filename: strings.ToLower(pck.Version + ".tar.gz"),
|
||||
},
|
||||
Creator: ctx.Doer,
|
||||
Data: buf,
|
||||
IsLead: true,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
switch err {
|
||||
case packages_model.ErrDuplicatePackageVersion:
|
||||
apiError(ctx, http.StatusBadRequest, err)
|
||||
case packages_service.ErrQuotaTotalCount, packages_service.ErrQuotaTypeSize, packages_service.ErrQuotaTotalSize:
|
||||
apiError(ctx, http.StatusForbidden, err)
|
||||
default:
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusCreated, make(map[any]any))
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_download.rb
|
||||
func DownloadPackage(ctx *context.Context) {
|
||||
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, ctx.Params("name"), ctx.Params("version"))
|
||||
if err != nil {
|
||||
if err == packages_model.ErrPackageNotExist {
|
||||
apiError(ctx, http.StatusNotFound, err)
|
||||
return
|
||||
}
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pd, err := packages_model.GetPackageDescriptor(ctx, pv)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
pf := pd.Files[0].File
|
||||
|
||||
s, _, err := packages_service.GetPackageFileStream(ctx, pf)
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
defer s.Close()
|
||||
|
||||
ctx.ServeContent(s, &context.ServeHeaderOptions{
|
||||
Filename: pf.Name,
|
||||
LastModified: pf.CreatedUnix.AsLocalTime(),
|
||||
})
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_unshare.rb
|
||||
func DeletePackageVersion(ctx *context.Context) {
|
||||
packageName := ctx.Params("name")
|
||||
packageVersion := ctx.Params("version")
|
||||
|
||||
err := packages_service.RemovePackageVersionByNameAndVersion(
|
||||
ctx.Doer,
|
||||
&packages_service.PackageInfo{
|
||||
Owner: ctx.Package.Owner,
|
||||
PackageType: packages_model.TypeChef,
|
||||
Name: packageName,
|
||||
Version: packageVersion,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
if err == packages_model.ErrPackageNotExist {
|
||||
apiError(ctx, http.StatusNotFound, err)
|
||||
} else {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Status(http.StatusOK)
|
||||
}
|
||||
|
||||
// https://github.com/chef/chef/blob/main/knife/lib/chef/knife/supermarket_unshare.rb
|
||||
func DeletePackage(ctx *context.Context) {
|
||||
pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeChef, ctx.Params("name"))
|
||||
if err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
if len(pvs) == 0 {
|
||||
apiError(ctx, http.StatusNotFound, err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, pv := range pvs {
|
||||
if err := packages_service.RemovePackageVersion(ctx.Doer, pv); err != nil {
|
||||
apiError(ctx, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Status(http.StatusOK)
|
||||
}
|
@ -40,7 +40,7 @@ func ListPackages(ctx *context.APIContext) {
|
||||
// in: query
|
||||
// description: package type filter
|
||||
// type: string
|
||||
// enum: [composer, conan, conda, container, generic, helm, maven, npm, nuget, pub, pypi, rubygems, vagrant]
|
||||
// enum: [cargo, chef, composer, conan, conda, container, generic, helm, maven, npm, nuget, pub, pypi, rubygems, vagrant]
|
||||
// - name: q
|
||||
// in: query
|
||||
// description: name filter
|
||||
|
@ -84,3 +84,23 @@ func PackagesRulePreview(ctx *context.Context) {
|
||||
|
||||
ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview)
|
||||
}
|
||||
|
||||
func InitializeCargoIndex(ctx *context.Context) {
|
||||
ctx.Data["Title"] = ctx.Tr("packages.title")
|
||||
ctx.Data["PageIsOrgSettings"] = true
|
||||
ctx.Data["PageIsSettingsPackages"] = true
|
||||
|
||||
shared.InitializeCargoIndex(ctx, ctx.ContextUser)
|
||||
|
||||
ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name))
|
||||
}
|
||||
|
||||
func RebuildCargoIndex(ctx *context.Context) {
|
||||
ctx.Data["Title"] = ctx.Tr("packages.title")
|
||||
ctx.Data["PageIsOrgSettings"] = true
|
||||
ctx.Data["PageIsSettingsPackages"] = true
|
||||
|
||||
shared.RebuildCargoIndex(ctx, ctx.ContextUser)
|
||||
|
||||
ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name))
|
||||
}
|
||||
|
@ -424,60 +424,40 @@ func (h *serviceHandler) sendFile(contentType, file string) {
|
||||
// one or more key=value pairs separated by colons
|
||||
var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`)
|
||||
|
||||
func getGitConfig(ctx gocontext.Context, option, dir string) string {
|
||||
out, _, err := git.NewCommand(ctx, "config").AddDynamicArguments(option).RunStdString(&git.RunOpts{Dir: dir})
|
||||
if err != nil {
|
||||
log.Error("%v - %s", err, out)
|
||||
func prepareGitCmdWithAllowedService(service string, h *serviceHandler) (*git.Command, error) {
|
||||
if service == "receive-pack" && h.cfg.ReceivePack {
|
||||
return git.NewCommand(h.r.Context(), "receive-pack"), nil
|
||||
}
|
||||
return out[0 : len(out)-1]
|
||||
if service == "upload-pack" && h.cfg.UploadPack {
|
||||
return git.NewCommand(h.r.Context(), "upload-pack"), nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("service %q is not allowed", service)
|
||||
}
|
||||
|
||||
func getConfigSetting(ctx gocontext.Context, service, dir string) bool {
|
||||
service = strings.ReplaceAll(service, "-", "")
|
||||
setting := getGitConfig(ctx, "http."+service, dir)
|
||||
|
||||
if service == "uploadpack" {
|
||||
return setting != "false"
|
||||
}
|
||||
|
||||
return setting == "true"
|
||||
}
|
||||
|
||||
func hasAccess(ctx gocontext.Context, service string, h serviceHandler, checkContentType bool) bool {
|
||||
if checkContentType {
|
||||
if h.r.Header.Get("Content-Type") != fmt.Sprintf("application/x-git-%s-request", service) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if !(service == "upload-pack" || service == "receive-pack") {
|
||||
return false
|
||||
}
|
||||
if service == "receive-pack" {
|
||||
return h.cfg.ReceivePack
|
||||
}
|
||||
if service == "upload-pack" {
|
||||
return h.cfg.UploadPack
|
||||
}
|
||||
|
||||
return getConfigSetting(ctx, service, h.dir)
|
||||
}
|
||||
|
||||
func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) {
|
||||
func serviceRPC(h *serviceHandler, service string) {
|
||||
defer func() {
|
||||
if err := h.r.Body.Close(); err != nil {
|
||||
log.Error("serviceRPC: Close: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if !hasAccess(ctx, service, h, true) {
|
||||
expectedContentType := fmt.Sprintf("application/x-git-%s-request", service)
|
||||
if h.r.Header.Get("Content-Type") != expectedContentType {
|
||||
log.Error("Content-Type (%q) doesn't match expected: %q", h.r.Header.Get("Content-Type"), expectedContentType)
|
||||
h.w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
cmd, err := prepareGitCmdWithAllowedService(service, h)
|
||||
if err != nil {
|
||||
log.Error("Failed to prepareGitCmdWithService: %v", err)
|
||||
h.w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
h.w.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service))
|
||||
|
||||
var err error
|
||||
reqBody := h.r.Body
|
||||
|
||||
// Handle GZIP.
|
||||
@ -498,8 +478,7 @@ func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) {
|
||||
}
|
||||
|
||||
var stderr bytes.Buffer
|
||||
// the service is generated by ourselves, so it's safe to trust it
|
||||
cmd := git.NewCommand(h.r.Context(), git.ToTrustedCmdArgs([]string{service})...).AddArguments("--stateless-rpc").AddDynamicArguments(h.dir)
|
||||
cmd.AddArguments("--stateless-rpc").AddDynamicArguments(h.dir)
|
||||
cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", h.dir))
|
||||
if err := cmd.Run(&git.RunOpts{
|
||||
Dir: h.dir,
|
||||
@ -520,7 +499,7 @@ func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) {
|
||||
func ServiceUploadPack(ctx *context.Context) {
|
||||
h := httpBase(ctx)
|
||||
if h != nil {
|
||||
serviceRPC(ctx, *h, "upload-pack")
|
||||
serviceRPC(h, "upload-pack")
|
||||
}
|
||||
}
|
||||
|
||||
@ -528,7 +507,7 @@ func ServiceUploadPack(ctx *context.Context) {
|
||||
func ServiceReceivePack(ctx *context.Context) {
|
||||
h := httpBase(ctx)
|
||||
if h != nil {
|
||||
serviceRPC(ctx, *h, "receive-pack")
|
||||
serviceRPC(h, "receive-pack")
|
||||
}
|
||||
}
|
||||
|
||||
@ -537,7 +516,7 @@ func getServiceType(r *http.Request) string {
|
||||
if !strings.HasPrefix(serviceType, "git-") {
|
||||
return ""
|
||||
}
|
||||
return strings.Replace(serviceType, "git-", "", 1)
|
||||
return strings.TrimPrefix(serviceType, "git-")
|
||||
}
|
||||
|
||||
func updateServerInfo(ctx gocontext.Context, dir string) []byte {
|
||||
@ -563,16 +542,15 @@ func GetInfoRefs(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
h.setHeaderNoCache()
|
||||
if hasAccess(ctx, getServiceType(h.r), *h, false) {
|
||||
service := getServiceType(h.r)
|
||||
|
||||
service := getServiceType(h.r)
|
||||
cmd, err := prepareGitCmdWithAllowedService(service, h)
|
||||
if err == nil {
|
||||
if protocol := h.r.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
|
||||
h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
|
||||
}
|
||||
h.environ = append(os.Environ(), h.environ...)
|
||||
|
||||
// the service is generated by ourselves, so we can trust it
|
||||
refs, _, err := git.NewCommand(ctx, git.ToTrustedCmdArgs([]string{service})...).AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir})
|
||||
refs, _, err := cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir})
|
||||
if err != nil {
|
||||
log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
|
||||
}
|
||||
|
@ -13,9 +13,11 @@ import (
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/context"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
"code.gitea.io/gitea/services/forms"
|
||||
cargo_service "code.gitea.io/gitea/services/packages/cargo"
|
||||
container_service "code.gitea.io/gitea/services/packages/container"
|
||||
)
|
||||
|
||||
@ -223,3 +225,23 @@ func getCleanupRuleByContext(ctx *context.Context, owner *user_model.User) *pack
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func InitializeCargoIndex(ctx *context.Context, owner *user_model.User) {
|
||||
err := cargo_service.InitializeIndexRepository(ctx, owner, owner)
|
||||
if err != nil {
|
||||
log.Error("InitializeIndexRepository failed: %v", err)
|
||||
ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.initialize.error", err))
|
||||
} else {
|
||||
ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.initialize.success"))
|
||||
}
|
||||
}
|
||||
|
||||
func RebuildCargoIndex(ctx *context.Context, owner *user_model.User) {
|
||||
err := cargo_service.RebuildIndex(ctx, owner, owner)
|
||||
if err != nil {
|
||||
log.Error("RebuildIndex failed: %v", err)
|
||||
ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.rebuild.error", err))
|
||||
} else {
|
||||
ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.rebuild.success"))
|
||||
}
|
||||
}
|
||||
|
@ -5,10 +5,14 @@ package setting
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/context"
|
||||
chef_module "code.gitea.io/gitea/modules/packages/chef"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
shared "code.gitea.io/gitea/routers/web/shared/packages"
|
||||
)
|
||||
|
||||
@ -77,3 +81,39 @@ func PackagesRulePreview(ctx *context.Context) {
|
||||
|
||||
ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview)
|
||||
}
|
||||
|
||||
func InitializeCargoIndex(ctx *context.Context) {
|
||||
ctx.Data["Title"] = ctx.Tr("packages.title")
|
||||
ctx.Data["PageIsSettingsPackages"] = true
|
||||
|
||||
shared.InitializeCargoIndex(ctx, ctx.Doer)
|
||||
|
||||
ctx.Redirect(setting.AppSubURL + "/user/settings/packages")
|
||||
}
|
||||
|
||||
func RebuildCargoIndex(ctx *context.Context) {
|
||||
ctx.Data["Title"] = ctx.Tr("packages.title")
|
||||
ctx.Data["PageIsSettingsPackages"] = true
|
||||
|
||||
shared.RebuildCargoIndex(ctx, ctx.Doer)
|
||||
|
||||
ctx.Redirect(setting.AppSubURL + "/user/settings/packages")
|
||||
}
|
||||
|
||||
func RegenerateChefKeyPair(ctx *context.Context) {
|
||||
priv, pub, err := util.GenerateKeyPair(chef_module.KeyBits)
|
||||
if err != nil {
|
||||
ctx.ServerError("GenerateKeyPair", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := user_model.SetUserSetting(ctx.Doer.ID, chef_module.SettingPublicPem, pub); err != nil {
|
||||
ctx.ServerError("SetUserSetting", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.ServeContent(strings.NewReader(priv), &context.ServeHeaderOptions{
|
||||
ContentType: "application/x-pem-file",
|
||||
Filename: ctx.Doer.Name + ".priv",
|
||||
})
|
||||
}
|
||||
|
@ -137,11 +137,8 @@ func ProfilePost(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
// Update the language to the one we just set
|
||||
middleware.SetLocaleCookie(ctx.Resp, ctx.Doer.Language, 0)
|
||||
|
||||
log.Trace("User settings updated: %s", ctx.Doer.Name)
|
||||
ctx.Flash.Success(translation.NewLocale(ctx.Doer.Language).Tr("settings.update_profile_success"))
|
||||
ctx.Flash.Success(ctx.Tr("settings.update_profile_success"))
|
||||
ctx.Redirect(setting.AppSubURL + "/user/settings")
|
||||
}
|
||||
|
||||
|
@ -468,6 +468,11 @@ func RegisterRoutes(m *web.Route) {
|
||||
m.Get("/preview", user_setting.PackagesRulePreview)
|
||||
})
|
||||
})
|
||||
m.Group("/cargo", func() {
|
||||
m.Post("/initialize", user_setting.InitializeCargoIndex)
|
||||
m.Post("/rebuild", user_setting.RebuildCargoIndex)
|
||||
})
|
||||
m.Post("/chef/regenerate_keypair", user_setting.RegenerateChefKeyPair)
|
||||
}, packagesEnabled)
|
||||
m.Group("/secrets", func() {
|
||||
m.Get("", user_setting.Secrets)
|
||||
@ -818,6 +823,10 @@ func RegisterRoutes(m *web.Route) {
|
||||
m.Get("/preview", org.PackagesRulePreview)
|
||||
})
|
||||
})
|
||||
m.Group("/cargo", func() {
|
||||
m.Post("/initialize", org.InitializeCargoIndex)
|
||||
m.Post("/rebuild", org.RebuildCargoIndex)
|
||||
})
|
||||
}, packagesEnabled)
|
||||
}, func(ctx *context.Context) {
|
||||
ctx.Data["EnableOAuth2"] = setting.OAuth2.Enable
|
||||
|
@ -16,7 +16,7 @@ import (
|
||||
"code.gitea.io/gitea/services/auth"
|
||||
"code.gitea.io/gitea/services/migrations"
|
||||
mirror_service "code.gitea.io/gitea/services/mirror"
|
||||
packages_service "code.gitea.io/gitea/services/packages"
|
||||
packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup"
|
||||
repo_service "code.gitea.io/gitea/services/repository"
|
||||
archiver_service "code.gitea.io/gitea/services/repository/archiver"
|
||||
)
|
||||
@ -152,7 +152,7 @@ func registerCleanupPackages() {
|
||||
OlderThan: 24 * time.Hour,
|
||||
}, func(ctx context.Context, _ *user_model.User, config Config) error {
|
||||
realConfig := config.(*OlderThanConfig)
|
||||
return packages_service.Cleanup(ctx, realConfig.OlderThan)
|
||||
return packages_cleanup_service.Cleanup(ctx, realConfig.OlderThan)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ import (
|
||||
type PackageCleanupRuleForm struct {
|
||||
ID int64
|
||||
Enabled bool
|
||||
Type string `binding:"Required;In(composer,conan,conda,container,generic,helm,maven,npm,nuget,pub,pypi,rubygems,vagrant)"`
|
||||
Type string `binding:"Required;In(cargo,chef,composer,conan,conda,container,generic,helm,maven,npm,nuget,pub,pypi,rubygems,vagrant)"`
|
||||
KeepCount int `binding:"In(0,1,5,10,25,50,100)"`
|
||||
KeepPattern string `binding:"RegexPattern"`
|
||||
RemoveDays int `binding:"In(0,7,14,30,60,90,180)"`
|
||||
|
290
services/packages/cargo/index.go
Normal file
290
services/packages/cargo/index.go
Normal file
@ -0,0 +1,290 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cargo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
packages_model "code.gitea.io/gitea/models/packages"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
cargo_module "code.gitea.io/gitea/modules/packages/cargo"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
files_service "code.gitea.io/gitea/services/repository/files"
|
||||
)
|
||||
|
||||
const (
|
||||
IndexRepositoryName = "_cargo-index"
|
||||
ConfigFileName = "config.json"
|
||||
)
|
||||
|
||||
// https://doc.rust-lang.org/cargo/reference/registries.html#index-format
|
||||
|
||||
func BuildPackagePath(name string) string {
|
||||
switch len(name) {
|
||||
case 0:
|
||||
panic("Cargo package name can not be empty")
|
||||
case 1:
|
||||
return path.Join("1", name)
|
||||
case 2:
|
||||
return path.Join("2", name)
|
||||
case 3:
|
||||
return path.Join("3", string(name[0]), name)
|
||||
default:
|
||||
return path.Join(name[0:2], name[2:4], name)
|
||||
}
|
||||
}
|
||||
|
||||
func InitializeIndexRepository(ctx context.Context, doer, owner *user_model.User) error {
|
||||
repo, err := getOrCreateIndexRepository(ctx, doer, owner)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := createOrUpdateConfigFile(ctx, repo, doer, owner); err != nil {
|
||||
return fmt.Errorf("createOrUpdateConfigFile: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func RebuildIndex(ctx context.Context, doer, owner *user_model.User) error {
|
||||
repo, err := getOrCreateIndexRepository(ctx, doer, owner)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ps, err := packages_model.GetPackagesByType(ctx, owner.ID, packages_model.TypeCargo)
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetPackagesByType: %w", err)
|
||||
}
|
||||
|
||||
return alterRepositoryContent(
|
||||
ctx,
|
||||
doer,
|
||||
repo,
|
||||
"Rebuild Cargo Index",
|
||||
func(t *files_service.TemporaryUploadRepository) error {
|
||||
// Remove all existing content but the Cargo config
|
||||
files, err := t.LsFiles()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i, file := range files {
|
||||
if file == ConfigFileName {
|
||||
files[i] = files[len(files)-1]
|
||||
files = files[:len(files)-1]
|
||||
break
|
||||
}
|
||||
}
|
||||
if err := t.RemoveFilesFromIndex(files...); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Add all packages
|
||||
for _, p := range ps {
|
||||
if err := addOrUpdatePackageIndex(ctx, t, p); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func AddOrUpdatePackageIndex(ctx context.Context, doer, owner *user_model.User, packageID int64) error {
|
||||
repo, err := getOrCreateIndexRepository(ctx, doer, owner)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p, err := packages_model.GetPackageByID(ctx, packageID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetPackageByID[%d]: %w", packageID, err)
|
||||
}
|
||||
|
||||
return alterRepositoryContent(
|
||||
ctx,
|
||||
doer,
|
||||
repo,
|
||||
"Update "+p.Name,
|
||||
func(t *files_service.TemporaryUploadRepository) error {
|
||||
return addOrUpdatePackageIndex(ctx, t, p)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
type IndexVersionEntry struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"vers"`
|
||||
Dependencies []*cargo_module.Dependency `json:"deps"`
|
||||
FileChecksum string `json:"cksum"`
|
||||
Features map[string][]string `json:"features"`
|
||||
Yanked bool `json:"yanked"`
|
||||
Links string `json:"links,omitempty"`
|
||||
}
|
||||
|
||||
func addOrUpdatePackageIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, p *packages_model.Package) error {
|
||||
pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||
PackageID: p.ID,
|
||||
Sort: packages_model.SortVersionAsc,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("SearchVersions[%s]: %w", p.Name, err)
|
||||
}
|
||||
if len(pvs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetPackageDescriptors[%s]: %w", p.Name, err)
|
||||
}
|
||||
|
||||
var b bytes.Buffer
|
||||
for _, pd := range pds {
|
||||
metadata := pd.Metadata.(*cargo_module.Metadata)
|
||||
|
||||
dependencies := metadata.Dependencies
|
||||
if dependencies == nil {
|
||||
dependencies = make([]*cargo_module.Dependency, 0)
|
||||
}
|
||||
|
||||
features := metadata.Features
|
||||
if features == nil {
|
||||
features = make(map[string][]string)
|
||||
}
|
||||
|
||||
yanked, _ := strconv.ParseBool(pd.VersionProperties.GetByName(cargo_module.PropertyYanked))
|
||||
entry, err := json.Marshal(&IndexVersionEntry{
|
||||
Name: pd.Package.Name,
|
||||
Version: pd.Version.Version,
|
||||
Dependencies: dependencies,
|
||||
FileChecksum: pd.Files[0].Blob.HashSHA256,
|
||||
Features: features,
|
||||
Yanked: yanked,
|
||||
Links: metadata.Links,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b.Write(entry)
|
||||
b.WriteString("\n")
|
||||
}
|
||||
|
||||
return writeObjectToIndex(t, BuildPackagePath(pds[0].Package.LowerName), &b)
|
||||
}
|
||||
|
||||
func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.User) (*repo_model.Repository, error) {
|
||||
repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner.Name, IndexRepositoryName)
|
||||
if err != nil {
|
||||
if errors.Is(err, util.ErrNotExist) {
|
||||
repo, err = repo_module.CreateRepository(doer, owner, repo_module.CreateRepoOptions{
|
||||
Name: IndexRepositoryName,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("CreateRepository: %w", err)
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("GetRepositoryByOwnerAndName: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return repo, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
DownloadURL string `json:"dl"`
|
||||
APIURL string `json:"api"`
|
||||
}
|
||||
|
||||
func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository, doer, owner *user_model.User) error {
|
||||
return alterRepositoryContent(
|
||||
ctx,
|
||||
doer,
|
||||
repo,
|
||||
"Initialize Cargo Config",
|
||||
func(t *files_service.TemporaryUploadRepository) error {
|
||||
var b bytes.Buffer
|
||||
err := json.NewEncoder(&b).Encode(Config{
|
||||
DownloadURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo/api/v1/crates",
|
||||
APIURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo",
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return writeObjectToIndex(t, ConfigFileName, &b)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// This is a shorter version of CreateOrUpdateRepoFile which allows to perform multiple actions on a git repository
|
||||
func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commitMessage string, fn func(*files_service.TemporaryUploadRepository) error) error {
|
||||
t, err := files_service.NewTemporaryUploadRepository(ctx, repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer t.Close()
|
||||
|
||||
var lastCommitID string
|
||||
if err := t.Clone(repo.DefaultBranch); err != nil {
|
||||
if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
|
||||
return err
|
||||
}
|
||||
if err := t.Init(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := t.SetDefaultIndex(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
commit, err := t.GetBranchCommit(repo.DefaultBranch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
lastCommitID = commit.ID.String()
|
||||
}
|
||||
|
||||
if err := fn(t); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
treeHash, err := t.WriteTree()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
commitHash, err := t.CommitTreeWithDate(lastCommitID, doer, doer, treeHash, commitMessage, false, now, now)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return t.Push(doer, commitHash, repo.DefaultBranch)
|
||||
}
|
||||
|
||||
func writeObjectToIndex(t *files_service.TemporaryUploadRepository, path string, r io.Reader) error {
|
||||
hash, err := t.HashObject(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return t.AddObjectToIndex("100644", hash, path)
|
||||
}
|
154
services/packages/cleanup/cleanup.go
Normal file
154
services/packages/cleanup/cleanup.go
Normal file
@ -0,0 +1,154 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package container
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
packages_model "code.gitea.io/gitea/models/packages"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
packages_module "code.gitea.io/gitea/modules/packages"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
packages_service "code.gitea.io/gitea/services/packages"
|
||||
cargo_service "code.gitea.io/gitea/services/packages/cargo"
|
||||
container_service "code.gitea.io/gitea/services/packages/container"
|
||||
)
|
||||
|
||||
// Cleanup removes expired package data
|
||||
func Cleanup(taskCtx context.Context, olderThan time.Duration) error {
|
||||
ctx, committer, err := db.TxContext(taskCtx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer committer.Close()
|
||||
|
||||
err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error {
|
||||
select {
|
||||
case <-taskCtx.Done():
|
||||
return db.ErrCancelledf("While processing package cleanup rules")
|
||||
default:
|
||||
}
|
||||
|
||||
if err := pcr.CompiledPattern(); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err)
|
||||
}
|
||||
|
||||
olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays)
|
||||
|
||||
packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type)
|
||||
if err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err)
|
||||
}
|
||||
|
||||
for _, p := range packages {
|
||||
pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||
PackageID: p.ID,
|
||||
IsInternal: util.OptionalBoolFalse,
|
||||
Sort: packages_model.SortCreatedDesc,
|
||||
Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200),
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err)
|
||||
}
|
||||
versionDeleted := false
|
||||
for _, pv := range pvs {
|
||||
if pcr.Type == packages_model.TypeContainer {
|
||||
if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err)
|
||||
} else if skip {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
toMatch := pv.LowerVersion
|
||||
if pcr.MatchFullName {
|
||||
toMatch = p.LowerName + "/" + pv.LowerVersion
|
||||
}
|
||||
|
||||
if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
if pv.CreatedUnix.AsLocalTime().After(olderThan) {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version)
|
||||
|
||||
if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err)
|
||||
}
|
||||
|
||||
versionDeleted = true
|
||||
}
|
||||
|
||||
if versionDeleted {
|
||||
if pcr.Type == packages_model.TypeCargo {
|
||||
owner, err := user_model.GetUserByID(ctx, pcr.OwnerID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("GetUserByID failed: %w", err)
|
||||
}
|
||||
if err := cargo_service.AddOrUpdatePackageIndex(ctx, owner, owner, p.ID); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: cargo.AddOrUpdatePackageIndex failed: %w", pcr.ID, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := container_service.Cleanup(ctx, olderThan); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ps, err := packages_model.FindUnreferencedPackages(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, p := range ps {
|
||||
if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pb := range pbs {
|
||||
if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := committer.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
contentStore := packages_module.NewContentStore()
|
||||
for _, pb := range pbs {
|
||||
if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
|
||||
log.Error("Error deleting package blob [%v]: %v", pb.ID, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -10,7 +10,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
packages_model "code.gitea.io/gitea/models/packages"
|
||||
@ -22,7 +21,6 @@ import (
|
||||
packages_module "code.gitea.io/gitea/modules/packages"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
container_service "code.gitea.io/gitea/services/packages/container"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -335,6 +333,10 @@ func CheckSizeQuotaExceeded(ctx context.Context, doer, owner *user_model.User, p
|
||||
|
||||
var typeSpecificSize int64
|
||||
switch packageType {
|
||||
case packages_model.TypeCargo:
|
||||
typeSpecificSize = setting.Packages.LimitSizeCargo
|
||||
case packages_model.TypeChef:
|
||||
typeSpecificSize = setting.Packages.LimitSizeChef
|
||||
case packages_model.TypeComposer:
|
||||
typeSpecificSize = setting.Packages.LimitSizeComposer
|
||||
case packages_model.TypeConan:
|
||||
@ -448,123 +450,6 @@ func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) erro
|
||||
return packages_model.DeleteFileByID(ctx, pf.ID)
|
||||
}
|
||||
|
||||
// Cleanup removes expired package data
|
||||
func Cleanup(taskCtx context.Context, olderThan time.Duration) error {
|
||||
ctx, committer, err := db.TxContext(taskCtx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer committer.Close()
|
||||
|
||||
err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error {
|
||||
select {
|
||||
case <-taskCtx.Done():
|
||||
return db.ErrCancelledf("While processing package cleanup rules")
|
||||
default:
|
||||
}
|
||||
|
||||
if err := pcr.CompiledPattern(); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err)
|
||||
}
|
||||
|
||||
olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays)
|
||||
|
||||
packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type)
|
||||
if err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err)
|
||||
}
|
||||
|
||||
for _, p := range packages {
|
||||
pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||
PackageID: p.ID,
|
||||
IsInternal: util.OptionalBoolFalse,
|
||||
Sort: packages_model.SortCreatedDesc,
|
||||
Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200),
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err)
|
||||
}
|
||||
for _, pv := range pvs {
|
||||
if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err)
|
||||
} else if skip {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
|
||||
toMatch := pv.LowerVersion
|
||||
if pcr.MatchFullName {
|
||||
toMatch = p.LowerName + "/" + pv.LowerVersion
|
||||
}
|
||||
|
||||
if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
if pv.CreatedUnix.AsLocalTime().After(olderThan) {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) {
|
||||
log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version)
|
||||
|
||||
if err := DeletePackageVersionAndReferences(ctx, pv); err != nil {
|
||||
return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := container_service.Cleanup(ctx, olderThan); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ps, err := packages_model.FindUnreferencedPackages(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, p := range ps {
|
||||
if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pb := range pbs {
|
||||
if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := committer.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
contentStore := packages_module.NewContentStore()
|
||||
for _, pb := range pbs {
|
||||
if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
|
||||
log.Error("Error deleting package blob [%v]: %v", pb.ID, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetFileStreamByPackageNameAndVersion returns the content of the specific package file
|
||||
func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *packages_model.PackageFile, error) {
|
||||
log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey)
|
||||
|
@ -263,6 +263,24 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
|
||||
return
|
||||
}
|
||||
|
||||
for _, pr := range prs {
|
||||
log.Trace("Updating PR[%d]: composing new test task", pr.ID)
|
||||
if pr.Flow == issues_model.PullRequestFlowGithub {
|
||||
if err := PushToBaseRepo(ctx, pr); err != nil {
|
||||
log.Error("PushToBaseRepo: %v", err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
|
||||
AddToTaskQueue(pr)
|
||||
comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID)
|
||||
if err == nil && comment != nil {
|
||||
notification.NotifyPullRequestPushCommits(ctx, doer, pr, comment)
|
||||
}
|
||||
}
|
||||
|
||||
if isSync {
|
||||
requests := issues_model.PullRequestList(prs)
|
||||
if err = requests.LoadAttributes(); err != nil {
|
||||
@ -303,24 +321,6 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
|
||||
}
|
||||
}
|
||||
|
||||
for _, pr := range prs {
|
||||
log.Trace("Updating PR[%d]: composing new test task", pr.ID)
|
||||
if pr.Flow == issues_model.PullRequestFlowGithub {
|
||||
if err := PushToBaseRepo(ctx, pr); err != nil {
|
||||
log.Error("PushToBaseRepo: %v", err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
|
||||
AddToTaskQueue(pr)
|
||||
comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID)
|
||||
if err == nil && comment != nil {
|
||||
notification.NotifyPullRequestPushCommits(ctx, doer, pr, comment)
|
||||
}
|
||||
}
|
||||
|
||||
log.Trace("AddTestPullRequestTask [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch)
|
||||
prs, err = issues_model.GetUnmergedPullRequestsByBaseInfo(repoID, branch)
|
||||
if err != nil {
|
||||
|
@ -16,10 +16,13 @@
|
||||
<!-- Third-party libraries -->
|
||||
{{if .EnableCaptcha}}
|
||||
{{if eq .CaptchaType "recaptcha"}}
|
||||
<script src='{{URLJoin .RecaptchaURL "api.js"}}' async></script>
|
||||
<script src='{{URLJoin .RecaptchaURL "api.js"}}'></script>
|
||||
{{end}}
|
||||
{{if eq .CaptchaType "hcaptcha"}}
|
||||
<script src='https://hcaptcha.com/1/api.js' async></script>
|
||||
<script src='https://hcaptcha.com/1/api.js'></script>
|
||||
{{end}}
|
||||
{{if eq .CaptchaType "cfturnstile"}}
|
||||
<script src='https://challenges.cloudflare.com/turnstile/v0/api.js'></script>
|
||||
{{end}}
|
||||
{{end}}
|
||||
<script src="{{AssetUrlPrefix}}/js/index.js?v={{AssetVersion}}" onerror="alert('Failed to load asset files from ' + this.src + '. Please make sure the asset files can be accessed.')"></script>
|
||||
|
@ -7,6 +7,7 @@
|
||||
<div class="twelve wide column content">
|
||||
{{template "base/alert" .}}
|
||||
{{template "package/shared/cleanup_rules/list" .}}
|
||||
{{template "package/shared/cargo" .}}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
62
templates/package/content/cargo.tmpl
Normal file
62
templates/package/content/cargo.tmpl
Normal file
@ -0,0 +1,62 @@
|
||||
{{if eq .PackageDescriptor.Package.Type "cargo"}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.installation"}}</h4>
|
||||
<div class="ui attached segment">
|
||||
<div class="ui form">
|
||||
<div class="field">
|
||||
<label>{{svg "octicon-code"}} {{.locale.Tr "packages.cargo.registry" | Safe}}</label>
|
||||
<div class="markup"><pre class="code-block"><code>[registry]
|
||||
default = "gitea"
|
||||
|
||||
[registries.gitea]
|
||||
index = "{{AppUrl}}{{.PackageDescriptor.Owner.Name}}/_cargo-index.git"
|
||||
|
||||
[net]
|
||||
git-fetch-with-cli = true</code></pre></div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>{{svg "octicon-terminal"}} {{.locale.Tr "packages.cargo.install"}}</label>
|
||||
<div class="markup"><pre class="code-block"><code>cargo add {{.PackageDescriptor.Package.Name}}@{{.PackageDescriptor.Version.Version}}</code></pre></div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>{{.locale.Tr "packages.cargo.documentation" | Safe}}</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.Readme}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.about"}}</h4>
|
||||
{{if .PackageDescriptor.Metadata.Description}}<div class="ui attached segment">{{.PackageDescriptor.Metadata.Description}}</div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.Readme}}<div class="ui attached segment">{{RenderMarkdownToHtml .PackageDescriptor.Metadata.Readme}}</div>{{end}}
|
||||
{{end}}
|
||||
|
||||
{{if .PackageDescriptor.Metadata.Dependencies}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.dependencies"}}</h4>
|
||||
<div class="ui attached segment">
|
||||
<table class="ui single line very basic table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="ten wide">{{.locale.Tr "packages.dependency.id"}}</th>
|
||||
<th class="six wide">{{.locale.Tr "packages.dependency.version"}}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{{range .PackageDescriptor.Metadata.Dependencies}}
|
||||
<tr>
|
||||
<td>{{.Name}}</td>
|
||||
<td>{{.Req}}</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .PackageDescriptor.Metadata.Keywords}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.keywords"}}</h4>
|
||||
<div class="ui attached segment">
|
||||
{{range .PackageDescriptor.Metadata.Keywords}}
|
||||
{{.}}
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
{{end}}
|
48
templates/package/content/chef.tmpl
Normal file
48
templates/package/content/chef.tmpl
Normal file
@ -0,0 +1,48 @@
|
||||
{{if eq .PackageDescriptor.Package.Type "chef"}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.installation"}}</h4>
|
||||
<div class="ui attached segment">
|
||||
<div class="ui form">
|
||||
<div class="field">
|
||||
<label>{{svg "octicon-code"}} {{.locale.Tr "packages.chef.registry" | Safe}}</label>
|
||||
<div class="markup"><pre class="code-block"><code>knife[:supermarket_site] = '{{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/chef'</code></pre></div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>{{svg "octicon-terminal"}} {{.locale.Tr "packages.chef.install"}}</label>
|
||||
<div class="markup"><pre class="code-block"><code>knife supermarket install {{.PackageDescriptor.Package.Name}} {{.PackageDescriptor.Version.Version}}</code></pre></div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>{{.locale.Tr "packages.chef.documentation" | Safe}}</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.LongDescription}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.about"}}</h4>
|
||||
<div class="ui attached segment">
|
||||
{{if .PackageDescriptor.Metadata.Description}}<p>{{.PackageDescriptor.Metadata.Description}}</p>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.LongDescription}}{{RenderMarkdownToHtml .PackageDescriptor.Metadata.LongDescription}}{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{if .PackageDescriptor.Metadata.Dependencies}}
|
||||
<h4 class="ui top attached header">{{.locale.Tr "packages.dependencies"}}</h4>
|
||||
<div class="ui attached segment">
|
||||
<table class="ui single line very basic table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="eleven wide">{{.locale.Tr "packages.dependency.id"}}</th>
|
||||
<th class="five wide">{{.locale.Tr "packages.dependency.version"}}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{{range $dependency, $version := .PackageDescriptor.Metadata.Dependencies}}
|
||||
<tr>
|
||||
<td>{{$dependency}}</td>
|
||||
<td>{{$version}}</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{{end}}
|
||||
{{end}}
|
7
templates/package/metadata/cargo.tmpl
Normal file
7
templates/package/metadata/cargo.tmpl
Normal file
@ -0,0 +1,7 @@
|
||||
{{if eq .PackageDescriptor.Package.Type "cargo"}}
|
||||
{{range .PackageDescriptor.Metadata.Authors}}<div class="item" title="{{$.locale.Tr "packages.details.author"}}">{{svg "octicon-person" 16 "mr-3"}} {{.}}</div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.ProjectURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.ProjectURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.project_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.RepositoryURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.RepositoryURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.cargo.details.repository_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.DocumentationURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.DocumentationURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.cargo.details.documentation_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.License}}<div class="item" title="{{$.locale.Tr "packages.details.license"}}">{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.License}}</div>{{end}}
|
||||
{{end}}
|
5
templates/package/metadata/chef.tmpl
Normal file
5
templates/package/metadata/chef.tmpl
Normal file
@ -0,0 +1,5 @@
|
||||
{{if eq .PackageDescriptor.Package.Type "chef"}}
|
||||
{{if .PackageDescriptor.Metadata.Author}}<div class="item" title="{{$.locale.Tr "packages.details.author"}}">{{svg "octicon-person" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Author}}</div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.RepositoryURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.RepositoryURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.repository_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.License}}<div class="item" title="{{$.locale.Tr "packages.details.license"}}">{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.License}}</div>{{end}}
|
||||
{{end}}
|
@ -4,6 +4,6 @@
|
||||
{{range .PackageDescriptor.Metadata.Authors}}<div class="item" title="{{$.locale.Tr "packages.details.author"}}">{{svg "octicon-person" 16 "mr-3"}} {{.}}</div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.Licenses}}<div class="item">{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Licenses}}</div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.ProjectURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.ProjectURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.project_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.RepositoryURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.RepositoryURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.container.details.repository_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.DocumentationURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.DocumentationURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.container.details.documentation_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.RepositoryURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.RepositoryURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.repository_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.DocumentationURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.DocumentationURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.documentation_site"}}</a></div>{{end}}
|
||||
{{end}}
|
||||
|
@ -1,5 +1,5 @@
|
||||
{{if eq .PackageDescriptor.Package.Type "pub"}}
|
||||
{{if .PackageDescriptor.Metadata.ProjectURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.ProjectURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.project_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.RepositoryURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.RepositoryURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.pub.details.repository_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.DocumentationURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.DocumentationURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.pub.details.documentation_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.RepositoryURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.RepositoryURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.repository_site"}}</a></div>{{end}}
|
||||
{{if .PackageDescriptor.Metadata.DocumentationURL}}<div class="item">{{svg "octicon-link-external" 16 "mr-3"}} <a href="{{.PackageDescriptor.Metadata.DocumentationURL}}" target="_blank" rel="noopener noreferrer me">{{.locale.Tr "packages.details.documentation_site"}}</a></div>{{end}}
|
||||
{{end}}
|
||||
|
24
templates/package/shared/cargo.tmpl
Normal file
24
templates/package/shared/cargo.tmpl
Normal file
@ -0,0 +1,24 @@
|
||||
<h4 class="ui top attached header">
|
||||
{{.locale.Tr "packages.owner.settings.cargo.title"}}
|
||||
</h4>
|
||||
<div class="ui attached segment">
|
||||
<div class="ui form">
|
||||
<div class="field">
|
||||
<label>{{$.locale.Tr "packages.owner.settings.cargo.initialize.description"}}</label>
|
||||
</div>
|
||||
<form class="field" action="{{.Link}}/cargo/initialize" method="post">
|
||||
{{.CsrfTokenHtml}}
|
||||
<button class="ui green button">{{$.locale.Tr "packages.owner.settings.cargo.initialize"}}</button>
|
||||
</form>
|
||||
<div class="field">
|
||||
<label>{{$.locale.Tr "packages.owner.settings.cargo.rebuild.description"}}</label>
|
||||
</div>
|
||||
<form class="field" action="{{.Link}}/cargo/rebuild" method="post">
|
||||
{{.CsrfTokenHtml}}
|
||||
<button class="ui green button">{{$.locale.Tr "packages.owner.settings.cargo.rebuild"}}</button>
|
||||
</form>
|
||||
<div class="field">
|
||||
<label>{{.locale.Tr "packages.cargo.documentation" | Safe}}</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -19,6 +19,8 @@
|
||||
<div class="ui divider"></div>
|
||||
</div>
|
||||
<div class="twelve wide column">
|
||||
{{template "package/content/cargo" .}}
|
||||
{{template "package/content/chef" .}}
|
||||
{{template "package/content/composer" .}}
|
||||
{{template "package/content/conan" .}}
|
||||
{{template "package/content/conda" .}}
|
||||
@ -43,6 +45,8 @@
|
||||
{{end}}
|
||||
<div class="item">{{svg "octicon-calendar" 16 "mr-3"}} {{TimeSinceUnix .PackageDescriptor.Version.CreatedUnix $.locale}}</div>
|
||||
<div class="item">{{svg "octicon-download" 16 "mr-3"}} {{.PackageDescriptor.Version.DownloadCount}}</div>
|
||||
{{template "package/metadata/cargo" .}}
|
||||
{{template "package/metadata/chef" .}}
|
||||
{{template "package/metadata/composer" .}}
|
||||
{{template "package/metadata/conan" .}}
|
||||
{{template "package/metadata/conda" .}}
|
||||
|
@ -2100,6 +2100,8 @@
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"cargo",
|
||||
"chef",
|
||||
"composer",
|
||||
"conan",
|
||||
"conda",
|
||||
|
@ -9,16 +9,20 @@
|
||||
</div>
|
||||
{{else if eq .CaptchaType "recaptcha"}}
|
||||
<div class="inline field required">
|
||||
<div class="g-recaptcha" data-sitekey="{{.RecaptchaSitekey}}"></div>
|
||||
<div id="captcha" data-captcha-type="g-recaptcha" class="g-recaptcha-style" data-sitekey="{{.RecaptchaSitekey}}"></div>
|
||||
</div>
|
||||
{{else if eq .CaptchaType "hcaptcha"}}
|
||||
<div class="inline field required">
|
||||
<div class="h-captcha" data-sitekey="{{.HcaptchaSitekey}}"></div>
|
||||
<div id="captcha" data-captcha-type="h-captcha" class="h-captcha-style" data-sitekey="{{.HcaptchaSitekey}}"></div>
|
||||
</div>
|
||||
{{else if eq .CaptchaType "mcaptcha"}}
|
||||
<div class="inline field df ac db-small captcha-field">
|
||||
<span>{{.locale.Tr "captcha"}}</span>
|
||||
<div class="border-secondary w-100-small" id="mcaptcha__widget-container" style="width: 50%; height: 5em"></div>
|
||||
<div class="m-captcha" data-sitekey="{{.McaptchaSitekey}}" data-instance-url="{{.McaptchaURL}}"></div>
|
||||
<div id="captcha" data-captcha-type="m-captcha" class="m-captcha" data-sitekey="{{.McaptchaSitekey}}" data-instance-url="{{.McaptchaURL}}"></div>
|
||||
</div>
|
||||
{{else if eq .CaptchaType "cfturnstile"}}
|
||||
<div class="inline field captcha-field tc">
|
||||
<div id="captcha" data-captcha-type="cf-turnstile" data-sitekey="{{.CfTurnstileSitekey}}"></div>
|
||||
</div>
|
||||
{{end}}{{end}}
|
||||
|
@ -4,6 +4,25 @@
|
||||
<div class="ui container">
|
||||
{{template "base/alert" .}}
|
||||
{{template "package/shared/cleanup_rules/list" .}}
|
||||
{{template "package/shared/cargo" .}}
|
||||
|
||||
<h4 class="ui top attached header">
|
||||
{{.locale.Tr "packages.owner.settings.chef.title"}}
|
||||
</h4>
|
||||
<div class="ui attached segment">
|
||||
<div class="ui form">
|
||||
<div class="field">
|
||||
<label>{{$.locale.Tr "packages.owner.settings.chef.keypair.description"}}</label>
|
||||
</div>
|
||||
<form class="field" action="{{.Link}}/chef/regenerate_keypair" method="post">
|
||||
{{.CsrfTokenHtml}}
|
||||
<button class="ui green button">{{$.locale.Tr "packages.owner.settings.chef.keypair"}}</button>
|
||||
</form>
|
||||
<div class="field">
|
||||
<label>{{.locale.Tr "packages.chef.documentation" | Safe}}</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{{template "base/footer" .}}
|
||||
|
341
tests/integration/api_packages_cargo_test.go
Normal file
341
tests/integration/api_packages_cargo_test.go
Normal file
@ -0,0 +1,341 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package integration
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
neturl "net/url"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/packages"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
cargo_module "code.gitea.io/gitea/modules/packages/cargo"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
cargo_router "code.gitea.io/gitea/routers/api/packages/cargo"
|
||||
cargo_service "code.gitea.io/gitea/services/packages/cargo"
|
||||
"code.gitea.io/gitea/tests"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPackageCargo(t *testing.T) {
|
||||
onGiteaRun(t, testPackageCargo)
|
||||
}
|
||||
|
||||
func testPackageCargo(t *testing.T, _ *neturl.URL) {
|
||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
|
||||
packageName := "cargo-package"
|
||||
packageVersion := "1.0.3"
|
||||
packageDescription := "Package Description"
|
||||
packageAuthor := "KN4CK3R"
|
||||
packageHomepage := "https://gitea.io/"
|
||||
packageLicense := "MIT"
|
||||
|
||||
createPackage := func(name, version string) io.Reader {
|
||||
metadata := `{
|
||||
"name":"` + name + `",
|
||||
"vers":"` + version + `",
|
||||
"description":"` + packageDescription + `",
|
||||
"authors": ["` + packageAuthor + `"],
|
||||
"deps":[
|
||||
{
|
||||
"name":"dep",
|
||||
"version_req":"1.0",
|
||||
"registry": "https://gitea.io/user/_cargo-index",
|
||||
"kind": "normal",
|
||||
"default_features": true
|
||||
}
|
||||
],
|
||||
"homepage":"` + packageHomepage + `",
|
||||
"license":"` + packageLicense + `"
|
||||
}`
|
||||
|
||||
var buf bytes.Buffer
|
||||
binary.Write(&buf, binary.LittleEndian, uint32(len(metadata)))
|
||||
buf.WriteString(metadata)
|
||||
binary.Write(&buf, binary.LittleEndian, uint32(4))
|
||||
buf.WriteString("test")
|
||||
return &buf
|
||||
}
|
||||
|
||||
err := cargo_service.InitializeIndexRepository(db.DefaultContext, user, user)
|
||||
assert.NoError(t, err)
|
||||
|
||||
repo, err := repo_model.GetRepositoryByOwnerAndName(db.DefaultContext, user.Name, cargo_service.IndexRepositoryName)
|
||||
assert.NotNil(t, repo)
|
||||
assert.NoError(t, err)
|
||||
|
||||
readGitContent := func(t *testing.T, path string) string {
|
||||
gitRepo, err := git.OpenRepository(db.DefaultContext, repo.RepoPath())
|
||||
assert.NoError(t, err)
|
||||
defer gitRepo.Close()
|
||||
|
||||
commit, err := gitRepo.GetBranchCommit(repo.DefaultBranch)
|
||||
assert.NoError(t, err)
|
||||
|
||||
blob, err := commit.GetBlobByPath(path)
|
||||
assert.NoError(t, err)
|
||||
|
||||
content, err := blob.GetBlobContent()
|
||||
assert.NoError(t, err)
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
root := fmt.Sprintf("%sapi/packages/%s/cargo", setting.AppURL, user.Name)
|
||||
url := fmt.Sprintf("%s/api/v1/crates", root)
|
||||
|
||||
t.Run("Index", func(t *testing.T) {
|
||||
t.Run("Config", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
content := readGitContent(t, cargo_service.ConfigFileName)
|
||||
|
||||
var config cargo_service.Config
|
||||
err := json.Unmarshal([]byte(content), &config)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, url, config.DownloadURL)
|
||||
assert.Equal(t, root, config.APIURL)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Upload", func(t *testing.T) {
|
||||
t.Run("InvalidNameOrVersion", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
content := createPackage("0test", "1.0.0")
|
||||
|
||||
req := NewRequestWithBody(t, "PUT", url+"/new", content)
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusBadRequest)
|
||||
|
||||
var status cargo_router.StatusResponse
|
||||
DecodeJSON(t, resp, &status)
|
||||
assert.False(t, status.OK)
|
||||
|
||||
content = createPackage("test", "-1.0.0")
|
||||
|
||||
req = NewRequestWithBody(t, "PUT", url+"/new", content)
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp = MakeRequest(t, req, http.StatusBadRequest)
|
||||
|
||||
DecodeJSON(t, resp, &status)
|
||||
assert.False(t, status.OK)
|
||||
})
|
||||
|
||||
t.Run("InvalidContent", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
metadata := `{"name":"test","vers":"1.0.0"}`
|
||||
|
||||
var buf bytes.Buffer
|
||||
binary.Write(&buf, binary.LittleEndian, uint32(len(metadata)))
|
||||
buf.WriteString(metadata)
|
||||
binary.Write(&buf, binary.LittleEndian, uint32(4))
|
||||
buf.WriteString("te")
|
||||
|
||||
req := NewRequestWithBody(t, "PUT", url+"/new", &buf)
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
MakeRequest(t, req, http.StatusBadRequest)
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequestWithBody(t, "PUT", url+"/new", createPackage(packageName, packageVersion))
|
||||
MakeRequest(t, req, http.StatusUnauthorized)
|
||||
|
||||
req = NewRequestWithBody(t, "PUT", url+"/new", createPackage(packageName, packageVersion))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var status cargo_router.StatusResponse
|
||||
DecodeJSON(t, resp, &status)
|
||||
assert.True(t, status.OK)
|
||||
|
||||
pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeCargo)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, pvs, 1)
|
||||
|
||||
pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0])
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, pd.SemVer)
|
||||
assert.IsType(t, &cargo_module.Metadata{}, pd.Metadata)
|
||||
assert.Equal(t, packageName, pd.Package.Name)
|
||||
assert.Equal(t, packageVersion, pd.Version.Version)
|
||||
|
||||
pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, pfs, 1)
|
||||
assert.Equal(t, fmt.Sprintf("%s-%s.crate", packageName, packageVersion), pfs[0].Name)
|
||||
assert.True(t, pfs[0].IsLead)
|
||||
|
||||
pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 4, pb.Size)
|
||||
|
||||
req = NewRequestWithBody(t, "PUT", url+"/new", createPackage(packageName, packageVersion))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
MakeRequest(t, req, http.StatusConflict)
|
||||
|
||||
t.Run("Index", func(t *testing.T) {
|
||||
t.Run("Entry", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
content := readGitContent(t, cargo_service.BuildPackagePath(packageName))
|
||||
|
||||
var entry cargo_service.IndexVersionEntry
|
||||
err := json.Unmarshal([]byte(content), &entry)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, packageName, entry.Name)
|
||||
assert.Equal(t, packageVersion, entry.Version)
|
||||
assert.Equal(t, pb.HashSHA256, entry.FileChecksum)
|
||||
assert.False(t, entry.Yanked)
|
||||
assert.Len(t, entry.Dependencies, 1)
|
||||
dep := entry.Dependencies[0]
|
||||
assert.Equal(t, "dep", dep.Name)
|
||||
assert.Equal(t, "1.0", dep.Req)
|
||||
assert.Equal(t, "normal", dep.Kind)
|
||||
assert.True(t, dep.DefaultFeatures)
|
||||
assert.Empty(t, dep.Features)
|
||||
assert.False(t, dep.Optional)
|
||||
assert.Nil(t, dep.Target)
|
||||
assert.NotNil(t, dep.Registry)
|
||||
assert.Equal(t, "https://gitea.io/user/_cargo-index", *dep.Registry)
|
||||
assert.Nil(t, dep.Package)
|
||||
})
|
||||
|
||||
t.Run("Rebuild", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
err := cargo_service.RebuildIndex(db.DefaultContext, user, user)
|
||||
assert.NoError(t, err)
|
||||
|
||||
_ = readGitContent(t, cargo_service.BuildPackagePath(packageName))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Download", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
pv, err := packages.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages.TypeCargo, packageName, packageVersion)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 0, pv.DownloadCount)
|
||||
|
||||
pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pv.ID)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, pfs, 1)
|
||||
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s/download", url, neturl.PathEscape(packageName), neturl.PathEscape(pv.Version)))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
assert.Equal(t, "test", resp.Body.String())
|
||||
|
||||
pv, err = packages.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages.TypeCargo, packageName, packageVersion)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, pv.DownloadCount)
|
||||
})
|
||||
|
||||
t.Run("Search", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
cases := []struct {
|
||||
Query string
|
||||
Page int
|
||||
PerPage int
|
||||
ExpectedTotal int64
|
||||
ExpectedResults int
|
||||
}{
|
||||
{"", 0, 0, 1, 1},
|
||||
{"", 1, 10, 1, 1},
|
||||
{"cargo", 1, 0, 1, 1},
|
||||
{"cargo", 1, 10, 1, 1},
|
||||
{"cargo", 2, 10, 1, 0},
|
||||
{"test", 0, 10, 0, 0},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s?q=%s&page=%d&per_page=%d", url, c.Query, c.Page, c.PerPage))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var result cargo_router.SearchResult
|
||||
DecodeJSON(t, resp, &result)
|
||||
|
||||
assert.Equal(t, c.ExpectedTotal, result.Meta.Total, "case %d: unexpected total hits", i)
|
||||
assert.Len(t, result.Crates, c.ExpectedResults, "case %d: unexpected result count", i)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Yank", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "DELETE", fmt.Sprintf("%s/%s/%s/yank", url, neturl.PathEscape(packageName), neturl.PathEscape(packageVersion)))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var status cargo_router.StatusResponse
|
||||
DecodeJSON(t, resp, &status)
|
||||
assert.True(t, status.OK)
|
||||
|
||||
content := readGitContent(t, cargo_service.BuildPackagePath(packageName))
|
||||
|
||||
var entry cargo_service.IndexVersionEntry
|
||||
err := json.Unmarshal([]byte(content), &entry)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.True(t, entry.Yanked)
|
||||
})
|
||||
|
||||
t.Run("Unyank", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "PUT", fmt.Sprintf("%s/%s/%s/unyank", url, neturl.PathEscape(packageName), neturl.PathEscape(packageVersion)))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var status cargo_router.StatusResponse
|
||||
DecodeJSON(t, resp, &status)
|
||||
assert.True(t, status.OK)
|
||||
|
||||
content := readGitContent(t, cargo_service.BuildPackagePath(packageName))
|
||||
|
||||
var entry cargo_service.IndexVersionEntry
|
||||
err := json.Unmarshal([]byte(content), &entry)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.False(t, entry.Yanked)
|
||||
})
|
||||
|
||||
t.Run("ListOwners", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/owners", url, neturl.PathEscape(packageName)))
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var owners cargo_router.Owners
|
||||
DecodeJSON(t, resp, &owners)
|
||||
|
||||
assert.Len(t, owners.Users, 1)
|
||||
assert.Equal(t, user.ID, owners.Users[0].ID)
|
||||
assert.Equal(t, user.Name, owners.Users[0].Login)
|
||||
assert.Equal(t, user.DisplayName(), owners.Users[0].Name)
|
||||
})
|
||||
}
|
560
tests/integration/api_packages_chef_test.go
Normal file
560
tests/integration/api_packages_chef_test.go
Normal file
@ -0,0 +1,560 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package integration
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"encoding/pem"
|
||||
"fmt"
|
||||
"hash"
|
||||
"math/big"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/packages"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
chef_module "code.gitea.io/gitea/modules/packages/chef"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
chef_router "code.gitea.io/gitea/routers/api/packages/chef"
|
||||
"code.gitea.io/gitea/tests"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPackageChef(t *testing.T) {
|
||||
defer tests.PrepareTestEnv(t)()
|
||||
|
||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
|
||||
privPem := `-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpQIBAAKCAQEAtWp2PZz4TSU5A6ixw41HdbfBuGJwPuTtrsdoUf0DQ0/DJBNP
|
||||
qOCBAgEu6ZdUqIbWJ5Da+nevjtncy5hENdi6XrXjyzlUxghMuXjE5SeLGpgfQvkq
|
||||
bTkYaFpMe8PTzNeze3fei8+Eu6mzeb6g1GrqXznuPIc7bNss0w5iX9RiBM9dWPuX
|
||||
onx9xSEy0LYqJm7yXmshNe1aRwkjG/y5C26BzBFnMKp9YRTua0DO1WqLNhcaRnda
|
||||
lIFYouDNVTbwxSlYL16bZVoebqzZvLGrPvZJkPuCu6vH9brvOuYo0q8hLVNkBeXc
|
||||
imRpsDjLhQYzEJjoMTbaiVGnjBky+PWNiofJnwIDAQABAoIBAQCotF1KxLt/ejr/
|
||||
9ROCh9JJXV3v6tL5GgkSPOv9Oq2bHgSZer/cixJNW+5VWd5nbiSe3K1WuJBw5pbW
|
||||
Wj4sWORPiRRR+3mjQzqeS/nGJDTOwWJo9K8IrUzOVhLEEYLX/ksxaXJyT8PehFyb
|
||||
vbNwdhCIB6ZNcXDItTWE+95twWJ5lxAIj2dNwZZni3UkwwjYnCnqFtvHCKOg0NH2
|
||||
RjQcFYmu3fncNeqLezUSdVyRyXxSCHsUdlYeX/e44StCnXdrmLUHlb2P27ZVdPGh
|
||||
SW7qTUPpmJKekYiRPOpTLj+ZKXIsANkyWO+7dVtZLBm5bIyAsmp0W/DmK+wRsejj
|
||||
alFbIsh5AoGBANJr7HSG695wkfn+kvu/V8qHbt+KDv4WjWHjGRsUqvxoHOUNkQmW
|
||||
vZWdk4gjHYn1l+QHWmoOE3AgyqtCZ4bFILkZPLN/F8Mh3+r4B0Ac4biJJt7XGMNQ
|
||||
Nv4wsk7TR7CCARsjO7GP1PT60hpjMvYmc1E36gNM7QIZE9jBE+L8eWYtAoGBANy2
|
||||
JOAWf+QeBlur6o9feH76cEmpQzUUq4Lj9mmnXgIirSsFoBnDb8VA6Ws+ltL9U9H2
|
||||
vaCoaTyi9twW9zWj+Ywg2mVR5nlSAPfdlTWS1GLUbDotlj5apc/lvnGuNlWzN+I4
|
||||
Tu64hhgBXqGvRZ0o7HzFodqRAkpVXp6CQCqBM7p7AoGAIgO0K3oL8t87ma/fTra1
|
||||
mFWgRJ5qogQ/Qo2VZ11F7ptd4GD7CxPE/cSFLsKOadi7fu75XJ994OhMGrcXSR/g
|
||||
lEtSFqn6y15UdgU2FtUUX+I72FXo+Nmkqh5xFHDu68d4Kkzdv2xCvn81K3LRsByz
|
||||
E3P4biQnQ+mN3cIIVu79KNkCgYEAm6uctrEn4y2KLn5DInyj8GuTZ2ELFhVOIzPG
|
||||
SR7TH451tTJyiblezDHMcOfkWUx0IlN1zCr8jtgiZXmNQzg0erFxWKU7ebZtGGYh
|
||||
J3g4dLx+2Unt/mzRJqFUgbnueOO/Nr+gbJ+ZdLUCmeeVohOLOTXrws0kYGl2Izab
|
||||
K1+VrKECgYEAxQohoOegA0f4mofisXItbwwqTIX3bLpxBc4woa1sB4kjNrLo4slc
|
||||
qtWZGVlRxwBvQUg0cYj+xtr5nyBdHLy0qwX/kMq4GqQnvW6NqsbrP3MjCZ8NX/Sj
|
||||
A2W0jx50Hs/XNw6IZFLYgWVoOzCaD+jYFpHhzUZyQD6/rYhwhHrNQmU=
|
||||
-----END RSA PRIVATE KEY-----`
|
||||
|
||||
tmp, _ := pem.Decode([]byte(privPem))
|
||||
privKey, _ := x509.ParsePKCS1PrivateKey(tmp.Bytes)
|
||||
|
||||
pubPem := `-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtWp2PZz4TSU5A6ixw41H
|
||||
dbfBuGJwPuTtrsdoUf0DQ0/DJBNPqOCBAgEu6ZdUqIbWJ5Da+nevjtncy5hENdi6
|
||||
XrXjyzlUxghMuXjE5SeLGpgfQvkqbTkYaFpMe8PTzNeze3fei8+Eu6mzeb6g1Grq
|
||||
XznuPIc7bNss0w5iX9RiBM9dWPuXonx9xSEy0LYqJm7yXmshNe1aRwkjG/y5C26B
|
||||
zBFnMKp9YRTua0DO1WqLNhcaRndalIFYouDNVTbwxSlYL16bZVoebqzZvLGrPvZJ
|
||||
kPuCu6vH9brvOuYo0q8hLVNkBeXcimRpsDjLhQYzEJjoMTbaiVGnjBky+PWNiofJ
|
||||
nwIDAQAB
|
||||
-----END PUBLIC KEY-----`
|
||||
|
||||
err := user_model.SetUserSetting(user.ID, chef_module.SettingPublicPem, pubPem)
|
||||
assert.NoError(t, err)
|
||||
|
||||
t.Run("Authenticate", func(t *testing.T) {
|
||||
auth := &chef_router.Auth{}
|
||||
|
||||
t.Run("MissingUser", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "POST", "/dummy")
|
||||
u, err := auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("NotExistingUser", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "POST", "/dummy")
|
||||
req.Header.Set("X-Ops-Userid", "not-existing-user")
|
||||
u, err := auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("Timestamp", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "POST", "/dummy")
|
||||
req.Header.Set("X-Ops-Userid", user.Name)
|
||||
u, err := auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
|
||||
req.Header.Set("X-Ops-Timestamp", "2023-01-01T00:00:00Z")
|
||||
u, err = auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("SigningVersion", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "POST", "/dummy")
|
||||
req.Header.Set("X-Ops-Userid", user.Name)
|
||||
req.Header.Set("X-Ops-Timestamp", time.Now().UTC().Format(time.RFC3339))
|
||||
u, err := auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
|
||||
req.Header.Set("X-Ops-Sign", "version=none")
|
||||
u, err = auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
|
||||
req.Header.Set("X-Ops-Sign", "version=1.4")
|
||||
u, err = auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
|
||||
req.Header.Set("X-Ops-Sign", "version=1.0;algorithm=sha2")
|
||||
u, err = auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
|
||||
req.Header.Set("X-Ops-Sign", "version=1.0;algorithm=sha256")
|
||||
u, err = auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("SignedHeaders", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
ts := time.Now().UTC().Format(time.RFC3339)
|
||||
|
||||
req := NewRequest(t, "POST", "/dummy")
|
||||
req.Header.Set("X-Ops-Userid", user.Name)
|
||||
req.Header.Set("X-Ops-Timestamp", ts)
|
||||
req.Header.Set("X-Ops-Sign", "version=1.0;algorithm=sha1")
|
||||
req.Header.Set("X-Ops-Content-Hash", "unused")
|
||||
req.Header.Set("X-Ops-Authorization-4", "dummy")
|
||||
u, err := auth.Verify(req, nil, nil, nil)
|
||||
assert.Nil(t, u)
|
||||
assert.Error(t, err)
|
||||
|
||||
signRequest := func(t *testing.T, req *http.Request, version string) {
|
||||
username := req.Header.Get("X-Ops-Userid")
|
||||
if version != "1.0" && version != "1.3" {
|
||||
sum := sha1.Sum([]byte(username))
|
||||
username = base64.StdEncoding.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
req.Header.Set("X-Ops-Sign", "version="+version)
|
||||
|
||||
var data []byte
|
||||
if version == "1.3" {
|
||||
data = []byte(fmt.Sprintf(
|
||||
"Method:%s\nPath:%s\nX-Ops-Content-Hash:%s\nX-Ops-Sign:version=%s\nX-Ops-Timestamp:%s\nX-Ops-UserId:%s\nX-Ops-Server-API-Version:%s",
|
||||
req.Method,
|
||||
path.Clean(req.URL.Path),
|
||||
req.Header.Get("X-Ops-Content-Hash"),
|
||||
version,
|
||||
req.Header.Get("X-Ops-Timestamp"),
|
||||
username,
|
||||
req.Header.Get("X-Ops-Server-Api-Version"),
|
||||
))
|
||||
} else {
|
||||
sum := sha1.Sum([]byte(path.Clean(req.URL.Path)))
|
||||
data = []byte(fmt.Sprintf(
|
||||
"Method:%s\nHashed Path:%s\nX-Ops-Content-Hash:%s\nX-Ops-Timestamp:%s\nX-Ops-UserId:%s",
|
||||
req.Method,
|
||||
base64.StdEncoding.EncodeToString(sum[:]),
|
||||
req.Header.Get("X-Ops-Content-Hash"),
|
||||
req.Header.Get("X-Ops-Timestamp"),
|
||||
username,
|
||||
))
|
||||
}
|
||||
|
||||
for k := range req.Header {
|
||||
if strings.HasPrefix(k, "X-Ops-Authorization-") {
|
||||
req.Header.Del(k)
|
||||
}
|
||||
}
|
||||
|
||||
var signature []byte
|
||||
if version == "1.3" || version == "1.2" {
|
||||
var h hash.Hash
|
||||
var ch crypto.Hash
|
||||
if version == "1.3" {
|
||||
h = sha256.New()
|
||||
ch = crypto.SHA256
|
||||
} else {
|
||||
h = sha1.New()
|
||||
ch = crypto.SHA1
|
||||
}
|
||||
h.Write(data)
|
||||
|
||||
signature, _ = rsa.SignPKCS1v15(rand.Reader, privKey, ch, h.Sum(nil))
|
||||
} else {
|
||||
c := new(big.Int).SetBytes(data)
|
||||
m := new(big.Int).Exp(c, privKey.D, privKey.N)
|
||||
|
||||
signature = m.Bytes()
|
||||
}
|
||||
|
||||
enc := base64.StdEncoding.EncodeToString(signature)
|
||||
|
||||
const chunkSize = 60
|
||||
chunks := make([]string, 0, (len(enc)-1)/chunkSize+1)
|
||||
currentLen := 0
|
||||
currentStart := 0
|
||||
for i := range enc {
|
||||
if currentLen == chunkSize {
|
||||
chunks = append(chunks, enc[currentStart:i])
|
||||
currentLen = 0
|
||||
currentStart = i
|
||||
}
|
||||
currentLen++
|
||||
}
|
||||
chunks = append(chunks, enc[currentStart:])
|
||||
|
||||
for i, chunk := range chunks {
|
||||
req.Header.Set(fmt.Sprintf("X-Ops-Authorization-%d", i+1), chunk)
|
||||
}
|
||||
}
|
||||
|
||||
for _, v := range []string{"1.0", "1.1", "1.2", "1.3"} {
|
||||
t.Run(v, func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
signRequest(t, req, v)
|
||||
u, err = auth.Verify(req, nil, nil, nil)
|
||||
assert.NotNil(t, u)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
packageName := "test"
|
||||
packageVersion := "1.0.1"
|
||||
packageDescription := "Test Description"
|
||||
packageAuthor := "KN4CK3R"
|
||||
|
||||
root := fmt.Sprintf("/api/packages/%s/chef/api/v1", user.Name)
|
||||
|
||||
uploadPackage := func(t *testing.T, version string, expectedStatus int) {
|
||||
var body bytes.Buffer
|
||||
mpw := multipart.NewWriter(&body)
|
||||
part, _ := mpw.CreateFormFile("tarball", fmt.Sprintf("%s.tar.gz", version))
|
||||
zw := gzip.NewWriter(part)
|
||||
tw := tar.NewWriter(zw)
|
||||
|
||||
content := `{"name":"` + packageName + `","version":"` + version + `","description":"` + packageDescription + `","maintainer":"` + packageAuthor + `"}`
|
||||
|
||||
hdr := &tar.Header{
|
||||
Name: packageName + "/metadata.json",
|
||||
Mode: 0o600,
|
||||
Size: int64(len(content)),
|
||||
}
|
||||
tw.WriteHeader(hdr)
|
||||
tw.Write([]byte(content))
|
||||
|
||||
tw.Close()
|
||||
zw.Close()
|
||||
mpw.Close()
|
||||
|
||||
req := NewRequestWithBody(t, "POST", root+"/cookbooks", &body)
|
||||
req.Header.Add("Content-Type", mpw.FormDataContentType())
|
||||
AddBasicAuthHeader(req, user.Name)
|
||||
MakeRequest(t, req, expectedStatus)
|
||||
}
|
||||
|
||||
t.Run("Upload", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequestWithBody(t, "POST", root+"/cookbooks", bytes.NewReader([]byte{}))
|
||||
MakeRequest(t, req, http.StatusUnauthorized)
|
||||
|
||||
uploadPackage(t, packageVersion, http.StatusCreated)
|
||||
|
||||
pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeChef)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, pvs, 1)
|
||||
|
||||
pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0])
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, pd.SemVer)
|
||||
assert.IsType(t, &chef_module.Metadata{}, pd.Metadata)
|
||||
assert.Equal(t, packageName, pd.Package.Name)
|
||||
assert.Equal(t, packageVersion, pd.Version.Version)
|
||||
|
||||
pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, pfs, 1)
|
||||
assert.Equal(t, fmt.Sprintf("%s.tar.gz", packageVersion), pfs[0].Name)
|
||||
assert.True(t, pfs[0].IsLead)
|
||||
|
||||
uploadPackage(t, packageVersion, http.StatusBadRequest)
|
||||
})
|
||||
|
||||
t.Run("Download", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/cookbooks/%s/versions/%s/download", root, packageName, packageVersion))
|
||||
MakeRequest(t, req, http.StatusOK)
|
||||
})
|
||||
|
||||
t.Run("Universe", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "GET", root+"/universe")
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
type VersionInfo struct {
|
||||
LocationType string `json:"location_type"`
|
||||
LocationPath string `json:"location_path"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
}
|
||||
|
||||
var result map[string]map[string]*VersionInfo
|
||||
DecodeJSON(t, resp, &result)
|
||||
|
||||
assert.Len(t, result, 1)
|
||||
assert.Contains(t, result, packageName)
|
||||
|
||||
versions := result[packageName]
|
||||
|
||||
assert.Len(t, versions, 1)
|
||||
assert.Contains(t, versions, packageVersion)
|
||||
|
||||
info := versions[packageVersion]
|
||||
|
||||
assert.Equal(t, "opscode", info.LocationType)
|
||||
assert.Equal(t, setting.AppURL+root[1:], info.LocationPath)
|
||||
assert.Equal(t, fmt.Sprintf("%s%s/cookbooks/%s/versions/%s/download", setting.AppURL, root[1:], packageName, packageVersion), info.DownloadURL)
|
||||
})
|
||||
|
||||
t.Run("Search", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
cases := []struct {
|
||||
Query string
|
||||
Start int
|
||||
Items int
|
||||
ExpectedTotal int
|
||||
ExpectedResults int
|
||||
}{
|
||||
{"", 0, 0, 1, 1},
|
||||
{"", 0, 10, 1, 1},
|
||||
{"gitea", 0, 10, 0, 0},
|
||||
{"test", 0, 10, 1, 1},
|
||||
{"test", 1, 10, 1, 0},
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
CookbookName string `json:"cookbook_name"`
|
||||
CookbookMaintainer string `json:"cookbook_maintainer"`
|
||||
CookbookDescription string `json:"cookbook_description"`
|
||||
Cookbook string `json:"cookbook"`
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
Start int `json:"start"`
|
||||
Total int `json:"total"`
|
||||
Items []*Item `json:"items"`
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/search?q=%s&start=%d&items=%d", root, c.Query, c.Start, c.Items))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var result Result
|
||||
DecodeJSON(t, resp, &result)
|
||||
|
||||
assert.Equal(t, c.ExpectedTotal, result.Total, "case %d: unexpected total hits", i)
|
||||
assert.Len(t, result.Items, c.ExpectedResults, "case %d: unexpected result count", i)
|
||||
|
||||
if len(result.Items) == 1 {
|
||||
item := result.Items[0]
|
||||
assert.Equal(t, packageName, item.CookbookName)
|
||||
assert.Equal(t, packageAuthor, item.CookbookMaintainer)
|
||||
assert.Equal(t, packageDescription, item.CookbookDescription)
|
||||
assert.Equal(t, fmt.Sprintf("%s%s/cookbooks/%s", setting.AppURL, root[1:], packageName), item.Cookbook)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("EnumeratePackages", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
cases := []struct {
|
||||
Sort string
|
||||
Start int
|
||||
Items int
|
||||
ExpectedTotal int
|
||||
ExpectedResults int
|
||||
}{
|
||||
{"", 0, 0, 1, 1},
|
||||
{"", 0, 10, 1, 1},
|
||||
{"RECENTLY_ADDED", 0, 10, 1, 1},
|
||||
{"RECENTLY_UPDATED", 0, 10, 1, 1},
|
||||
{"", 1, 10, 1, 0},
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
CookbookName string `json:"cookbook_name"`
|
||||
CookbookMaintainer string `json:"cookbook_maintainer"`
|
||||
CookbookDescription string `json:"cookbook_description"`
|
||||
Cookbook string `json:"cookbook"`
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
Start int `json:"start"`
|
||||
Total int `json:"total"`
|
||||
Items []*Item `json:"items"`
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/cookbooks?start=%d&items=%d&sort=%s", root, c.Start, c.Items, c.Sort))
|
||||
req = AddBasicAuthHeader(req, user.Name)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var result Result
|
||||
DecodeJSON(t, resp, &result)
|
||||
|
||||
assert.Equal(t, c.ExpectedTotal, result.Total, "case %d: unexpected total hits", i)
|
||||
assert.Len(t, result.Items, c.ExpectedResults, "case %d: unexpected result count", i)
|
||||
|
||||
if len(result.Items) == 1 {
|
||||
item := result.Items[0]
|
||||
assert.Equal(t, packageName, item.CookbookName)
|
||||
assert.Equal(t, packageAuthor, item.CookbookMaintainer)
|
||||
assert.Equal(t, packageDescription, item.CookbookDescription)
|
||||
assert.Equal(t, fmt.Sprintf("%s%s/cookbooks/%s", setting.AppURL, root[1:], packageName), item.Cookbook)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("PackageMetadata", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/cookbooks/%s", root, packageName))
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
type Result struct {
|
||||
Name string `json:"name"`
|
||||
Maintainer string `json:"maintainer"`
|
||||
Description string `json:"description"`
|
||||
Category string `json:"category"`
|
||||
LatestVersion string `json:"latest_version"`
|
||||
SourceURL string `json:"source_url"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
Deprecated bool `json:"deprecated"`
|
||||
Versions []string `json:"versions"`
|
||||
}
|
||||
|
||||
var result Result
|
||||
DecodeJSON(t, resp, &result)
|
||||
|
||||
versionURL := fmt.Sprintf("%s%s/cookbooks/%s/versions/%s", setting.AppURL, root[1:], packageName, packageVersion)
|
||||
|
||||
assert.Equal(t, packageName, result.Name)
|
||||
assert.Equal(t, packageAuthor, result.Maintainer)
|
||||
assert.Equal(t, packageDescription, result.Description)
|
||||
assert.Equal(t, versionURL, result.LatestVersion)
|
||||
assert.False(t, result.Deprecated)
|
||||
assert.ElementsMatch(t, []string{versionURL}, result.Versions)
|
||||
})
|
||||
|
||||
t.Run("PackageVersionMetadata", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/cookbooks/%s/versions/%s", root, packageName, packageVersion))
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
type Result struct {
|
||||
Version string `json:"version"`
|
||||
TarballFileSize int64 `json:"tarball_file_size"`
|
||||
PublishedAt time.Time `json:"published_at"`
|
||||
Cookbook string `json:"cookbook"`
|
||||
File string `json:"file"`
|
||||
License string `json:"license"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
}
|
||||
|
||||
var result Result
|
||||
DecodeJSON(t, resp, &result)
|
||||
|
||||
packageURL := fmt.Sprintf("%s%s/cookbooks/%s", setting.AppURL, root[1:], packageName)
|
||||
|
||||
assert.Equal(t, packageVersion, result.Version)
|
||||
assert.Equal(t, packageURL, result.Cookbook)
|
||||
assert.Equal(t, fmt.Sprintf("%s/versions/%s/download", packageURL, packageVersion), result.File)
|
||||
})
|
||||
|
||||
t.Run("Delete", func(t *testing.T) {
|
||||
uploadPackage(t, "1.0.2", http.StatusCreated)
|
||||
uploadPackage(t, "1.0.3", http.StatusCreated)
|
||||
|
||||
t.Run("Version", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "DELETE", fmt.Sprintf("%s/cookbooks/%s/versions/%s", root, packageName, "1.0.2"))
|
||||
MakeRequest(t, req, http.StatusUnauthorized)
|
||||
|
||||
req = NewRequest(t, "DELETE", fmt.Sprintf("%s/cookbooks/%s/versions/%s", root, packageName, "1.0.2"))
|
||||
AddBasicAuthHeader(req, user.Name)
|
||||
MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
pv, err := packages.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages.TypeChef, packageName, "1.0.2")
|
||||
assert.Nil(t, pv)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("Package", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
req := NewRequest(t, "DELETE", fmt.Sprintf("%s/cookbooks/%s", root, packageName))
|
||||
MakeRequest(t, req, http.StatusUnauthorized)
|
||||
|
||||
req = NewRequest(t, "DELETE", fmt.Sprintf("%s/cookbooks/%s", root, packageName))
|
||||
AddBasicAuthHeader(req, user.Name)
|
||||
MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeChef)
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, pvs)
|
||||
})
|
||||
})
|
||||
}
|
@ -21,6 +21,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
packages_service "code.gitea.io/gitea/services/packages"
|
||||
packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup"
|
||||
"code.gitea.io/gitea/tests"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -246,7 +247,7 @@ func TestPackageCleanup(t *testing.T) {
|
||||
_, err = packages_model.GetInternalVersionByNameAndVersion(db.DefaultContext, 2, packages_model.TypeContainer, "test", container_model.UploadVersion)
|
||||
assert.NoError(t, err)
|
||||
|
||||
err = packages_service.Cleanup(db.DefaultContext, duration)
|
||||
err = packages_cleanup_service.Cleanup(db.DefaultContext, duration)
|
||||
assert.NoError(t, err)
|
||||
|
||||
pbs, err = packages_model.FindExpiredUnreferencedBlobs(db.DefaultContext, duration)
|
||||
@ -383,7 +384,7 @@ func TestPackageCleanup(t *testing.T) {
|
||||
pcr, err := packages_model.InsertCleanupRule(db.DefaultContext, c.Rule)
|
||||
assert.NoError(t, err)
|
||||
|
||||
err = packages_service.Cleanup(db.DefaultContext, duration)
|
||||
err = packages_cleanup_service.Cleanup(db.DefaultContext, duration)
|
||||
assert.NoError(t, err)
|
||||
|
||||
for _, v := range c.Versions {
|
||||
|
51
web_src/js/features/captcha.js
Normal file
51
web_src/js/features/captcha.js
Normal file
@ -0,0 +1,51 @@
|
||||
import {isDarkTheme} from '../utils.js';
|
||||
|
||||
export async function initCaptcha() {
|
||||
const captchaEl = document.querySelector('#captcha');
|
||||
if (!captchaEl) return;
|
||||
|
||||
const siteKey = captchaEl.getAttribute('data-sitekey');
|
||||
const isDark = isDarkTheme();
|
||||
|
||||
const params = {
|
||||
sitekey: siteKey,
|
||||
theme: isDark ? 'dark' : 'light'
|
||||
};
|
||||
|
||||
switch (captchaEl.getAttribute('data-captcha-type')) {
|
||||
case 'g-recaptcha': {
|
||||
if (window.grecaptcha) {
|
||||
window.grecaptcha.ready(() => {
|
||||
window.grecaptcha.render(captchaEl, params);
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'cf-turnstile': {
|
||||
if (window.turnstile) {
|
||||
window.turnstile.render(captchaEl, params);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'h-captcha': {
|
||||
if (window.hcaptcha) {
|
||||
window.hcaptcha.render(captchaEl, params);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'm-captcha': {
|
||||
const {default: mCaptcha} = await import(/* webpackChunkName: "mcaptcha-vanilla-glue" */'@mcaptcha/vanilla-glue');
|
||||
mCaptcha.INPUT_NAME = 'm-captcha-response';
|
||||
const instanceURL = captchaEl.getAttribute('data-instance-url');
|
||||
|
||||
mCaptcha.default({
|
||||
siteKey: {
|
||||
instanceUrl: new URL(instanceURL),
|
||||
key: siteKey,
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
export async function initMcaptcha() {
|
||||
const mCaptchaEl = document.querySelector('.m-captcha');
|
||||
if (!mCaptchaEl) return;
|
||||
|
||||
const {default: mCaptcha} = await import(/* webpackChunkName: "mcaptcha-vanilla-glue" */'@mcaptcha/vanilla-glue');
|
||||
mCaptcha.INPUT_NAME = 'm-captcha-response';
|
||||
const siteKey = mCaptchaEl.getAttribute('data-sitekey');
|
||||
const instanceURL = mCaptchaEl.getAttribute('data-instance-url');
|
||||
|
||||
mCaptcha.default({
|
||||
siteKey: {
|
||||
instanceUrl: new URL(instanceURL),
|
||||
key: siteKey,
|
||||
}
|
||||
});
|
||||
}
|
@ -88,8 +88,8 @@ import {initCommonOrganization} from './features/common-organization.js';
|
||||
import {initRepoWikiForm} from './features/repo-wiki.js';
|
||||
import {initRepoCommentForm, initRepository} from './features/repo-legacy.js';
|
||||
import {initFormattingReplacements} from './features/formatting.js';
|
||||
import {initMcaptcha} from './features/mcaptcha.js';
|
||||
import {initCopyContent} from './features/copycontent.js';
|
||||
import {initCaptcha} from './features/captcha.js';
|
||||
import {initRepositoryActionView} from './components/RepoActionView.vue';
|
||||
|
||||
// Run time-critical code as soon as possible. This is safe to do because this
|
||||
@ -191,7 +191,7 @@ $(document).ready(() => {
|
||||
initRepositoryActionView();
|
||||
|
||||
initCommitStatuses();
|
||||
initMcaptcha();
|
||||
initCaptcha();
|
||||
|
||||
initUserAuthLinkAccountView();
|
||||
initUserAuthOauth2();
|
||||
|
@ -220,18 +220,24 @@ textarea:focus,
|
||||
}
|
||||
|
||||
@media @mediaMdAndUp {
|
||||
.g-recaptcha,
|
||||
.h-captcha {
|
||||
.g-recaptcha-style,
|
||||
.h-captcha-style {
|
||||
margin: 0 auto !important;
|
||||
width: 304px;
|
||||
padding-left: 30px;
|
||||
|
||||
iframe {
|
||||
border-radius: 5px !important;
|
||||
width: 302px !important;
|
||||
height: 76px !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-height: 575px) {
|
||||
#rc-imageselect,
|
||||
.g-recaptcha,
|
||||
.h-captcha {
|
||||
.g-recaptcha-style,
|
||||
.h-captcha-style {
|
||||
transform: scale(.77);
|
||||
transform-origin: 0 0;
|
||||
}
|
||||
|
3
web_src/svg/gitea-cargo.svg
Normal file
3
web_src/svg/gitea-cargo.svg
Normal file
@ -0,0 +1,3 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg clip-rule="evenodd" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round" version="1.1" viewBox="0 0 32 32" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="m15.993 1.54c-7.972 0-14.461 6.492-14.461 14.462 0 7.969 6.492 14.461 14.461 14.461 7.97 0 14.462-6.492 14.462-14.461 0-7.97-6.492-14.462-14.462-14.462zm-0.021 1.285c0.511 0.013 0.924 0.439 0.924 0.951 0 0.522-0.43 0.952-0.952 0.952s-0.951-0.43-0.951-0.952 0.429-0.952 0.951-0.952c0.01 0 0.019 1e-3 0.028 1e-3zm2.178 1.566c3.379 0.633 6.313 2.723 8.016 5.709l-1.123 2.533c-0.193 0.438 6e-3 0.952 0.44 1.147l2.16 0.958c0.067 0.675 0.076 1.355 0.025 2.031h-1.202c-0.12 0-0.169 0.08-0.169 0.196v0.551c0 1.297-0.731 1.582-1.373 1.652-0.612 0.07-1.288-0.257-1.374-0.63-0.361-2.029-0.961-2.46-1.909-3.21 1.178-0.746 2.401-1.85 2.401-3.325 0-1.594-1.092-2.597-1.835-3.09-1.046-0.688-2.203-0.826-2.515-0.826h-12.421c1.717-1.918 4.02-3.218 6.55-3.696l1.466 1.536c0.33 0.346 0.878 0.361 1.223 0.028l1.64-1.564zm-13.522 7.043c0.511 0.015 0.924 0.44 0.924 0.951 0 0.522-0.43 0.952-0.952 0.952s-0.951-0.43-0.951-0.952 0.429-0.951 0.951-0.951h0.028zm22.685 0.043c0.511 0.015 0.924 0.44 0.924 0.951 0 0.522-0.43 0.952-0.952 0.952s-0.951-0.43-0.951-0.952 0.429-0.952 0.951-0.952c0.01 0 0.019 0 0.028 1e-3zm-20.892 0.153h1.658v7.477h-3.347c-0.414-1.452-0.542-2.97-0.38-4.47l2.05-0.912c0.438-0.195 0.637-0.706 0.441-1.144l-0.422-0.951zm6.92 0.079h3.949c0.205 0 1.441 0.236 1.441 1.163 0 0.768-0.948 1.043-1.728 1.043h-3.665l3e-3 -2.206zm0 5.373h3.026c0.275 0 1.477 0.079 1.86 1.615 0.119 0.471 0.385 2.007 0.566 2.499 0.18 0.551 0.911 1.652 1.691 1.652h4.938c-0.331 0.444-0.693 0.863-1.083 1.255l-2.01-0.432c-0.468-0.101-0.93 0.199-1.031 0.667l-0.477 2.228c-3.104 1.406-6.672 1.389-9.762-0.046l-0.478-2.228c-0.101-0.468-0.56-0.767-1.028-0.667l-1.967 0.423c-0.365-0.377-0.704-0.778-1.016-1.2h9.567c0.107 0 0.181-0.018 0.181-0.119v-3.384c0-0.097-0.074-0.119-0.181-0.119h-2.799l3e-3 -2.144zm-4.415 7.749c0.512 0.015 0.924 0.44 0.924 0.951 0 0.522-0.429 0.952-0.951 0.952s-0.952-0.43-0.952-0.952 0.43-0.952 0.952-0.952c9e-3 0 0.018 1e-3 0.027 1e-3zm14.089 0.043c0.511 0.015 0.924 0.439 0.923 0.951 0 0.522-0.429 0.952-0.951 0.952s-0.951-0.43-0.951-0.952 0.429-0.952 0.951-0.952c9e-3 0 0.018 0 0.028 1e-3z"/><path d="m29.647 16.002c0 7.49-6.163 13.653-13.654 13.653-7.49 0-13.654-6.163-13.654-13.653 0-7.491 6.164-13.654 13.654-13.654 7.491 0 13.654 6.163 13.654 13.654zm-0.257-1.319 2.13 1.319-2.13 1.318 1.83 1.71-2.344 0.878 1.463 2.035-2.475 0.404 1.04 2.282-2.506-0.089 0.575 2.442-2.441-0.576 0.089 2.506-2.283-1.04-0.403 2.475-2.035-1.462-0.878 2.343-1.71-1.829-1.319 2.129-1.318-2.129-1.71 1.829-0.878-2.343-2.035 1.462-0.404-2.475-2.282 1.04 0.089-2.506-2.442 0.576 0.575-2.442-2.505 0.089 1.04-2.282-2.475-0.404 1.462-2.035-2.343-0.878 1.829-1.71-2.129-1.318 2.129-1.319-1.829-1.71 2.343-0.878-1.462-2.035 2.475-0.404-1.04-2.282 2.505 0.089-0.575-2.441 2.442 0.575-0.089-2.506 2.282 1.04 0.404-2.475 2.035 1.463 0.878-2.344 1.71 1.83 1.318-2.13 1.319 2.13 1.71-1.83 0.878 2.344 2.035-1.463 0.403 2.475 2.283-1.04-0.089 2.506 2.441-0.575-0.575 2.441 2.506-0.089-1.04 2.282 2.475 0.404-1.463 2.035 2.344 0.878-1.83 1.71z"/></svg>
|
After Width: | Height: | Size: 3.2 KiB |
20
web_src/svg/gitea-chef.svg
Normal file
20
web_src/svg/gitea-chef.svg
Normal file
@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" viewBox="0 0 36 36" xmlns="http://www.w3.org/2000/svg">
|
||||
<g fill="none" fill-rule="evenodd">
|
||||
<g transform="translate(-54 -32)">
|
||||
<path d="m72 57.8c-4.3 0-7.7-3.6-7.7-8s3.4-7.9 7.7-7.9c3.5 0 6.4 2.4 7.3 5.7h3c-1-5-5.2-8.7-10.3-8.7-5.9 0-10.6 4.9-10.6 10.9 0 6.1 4.7 11 10.6 11 5.1 0 9.3-3.7 10.3-8.7h-3c-0.9 3.3-3.8 5.7-7.3 5.7" fill="#435363"/>
|
||||
<path d="m66.8 55.2c1.3 1.4 3.1 2.3 5.2 2.3v-3.2c-1.2 0-2.3-0.5-3.1-1.3l-2.1 2.2" fill="#435363"/>
|
||||
<path d="m64.6 49.8c0 1.1 0.3 2.2 0.6 3.1l2.9-1.3c-0.3-0.5-0.4-1.1-0.4-1.8 0-2.4 1.9-4.4 4.3-4.4v-3.2c-4.1 0-7.4 3.4-7.4 7.6" fill="#F38B00"/>
|
||||
<path d="m74.6 42.7-1.1 3c0.9 0.4 1.7 1.1 2.2 1.9h3.3c-0.7-2.2-2.3-4-4.4-4.9" fill="#435363"/>
|
||||
<path d="m73.5 54 1.1 2.9c2.1-0.8 3.7-2.6 4.4-4.8h-3.3c-0.5 0.8-1.3 1.5-2.2 1.9" fill="#F38B00"/>
|
||||
<path d="m58.4 54.1c-0.1-0.2-0.1-0.3-0.1-0.5-0.1-0.2-0.1-0.3-0.2-0.5v-0.1c0-0.1 0-0.3-0.1-0.4v-0.2-0.3c-0.1-0.1-0.1-0.2-0.1-0.3-0.1-0.6-0.1-1.3-0.1-2h-2.9c0 0.8 0 1.5 0.1 2.2 0 0.2 0.1 0.4 0.1 0.6v0.1c0 0.2 0.1 0.4 0.1 0.5s0 0.2 0.1 0.3v0.3c0.1 0.1 0.1 0.2 0.1 0.4 0 0 0.1 0.1 0.1 0.2 0 0.2 0 0.3 0.1 0.4v0.2c0.2 0.7 0.5 1.3 0.7 2l2.7-1.2c-0.2-0.6-0.4-1.1-0.6-1.7" fill="#435363"/>
|
||||
<path d="m72 64.6c-3.9 0-7.5-1.7-10.1-4.4l-2 2.2c3.1 3.2 7.3 5.2 12.1 5.2 8.7 0 15.8-6.8 16.9-15.5h-2.9c-1.1 7-7 12.5-14 12.5" fill="#F38B00"/>
|
||||
<path d="m72 35.1c3.1 0 6.1 1.1 8.4 2.9l1.8-2.4c-2.9-2.2-6.4-3.5-10.2-3.5-7.3 0-13.5 4.7-15.9 11.3l2.7 1.1c2-5.5 7.2-9.4 13.2-9.4" fill="#F38B00"/>
|
||||
<path d="m86 47.6h2.9c-0.3-2.6-1.2-5-2.5-7.2l-2.4 1.6c1 1.7 1.7 3.6 2 5.6" fill="#435363"/>
|
||||
<path d="m82.7 47.6h2.9c-0.8-5.1-4.1-9.3-8.6-11.1l-1.1 2.8c3.5 1.3 6 4.5 6.8 8.3" fill="#F38B00"/>
|
||||
<path d="m72 38.5v-3c-5.9 0-10.9 3.8-12.9 9.1l2.7 1.1c1.6-4.2 5.5-7.2 10.2-7.2" fill="#435363"/>
|
||||
<path d="m61 49.8h-2.9c0 6.1 3.6 11.2 8.7 13.4l1.1-2.8c-4-1.7-6.9-5.8-6.9-10.6" fill="#F38B00"/>
|
||||
<path d="m72 61.2v3c6.9 0 12.6-5.3 13.6-12.1h-2.9c-1 5.2-5.4 9.1-10.7 9.1" fill="#435363"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.0 KiB |
Loading…
x
Reference in New Issue
Block a user