Compare commits

...

9 Commits

Author SHA1 Message Date
KN4CK3R
3510d7e33a
Fix container blob mount (#22226) 2023-01-16 17:35:48 -05:00
Bradley D. Thornton
1aba53db62
Update overview.en-us.md (#22463)
Fix grammar
2023-01-16 16:24:24 -06:00
KN4CK3R
8117e41dfa
Add reply hint to mail text (#22459)
Addition to #22056

This PR adds a hint to mail text if replies are supported.
I can't tell if the text structure is supported in every language. Maybe
we need to put the whole line in the translation file and use
parameters.
2023-01-16 14:58:01 -06:00
zeripath
2cc3a6381c
Add cron method to gc LFS MetaObjects (#22385)
This PR adds a task to the cron service to allow garbage collection of
LFS meta objects. As repositories may have a large number of
LFSMetaObjects, an updated column is added to this table and it is used
to perform a generational GC to attempt to reduce the amount of work.
(There may need to be a bit more work here but this is probably enough
for the moment.)

Fix #7045

Signed-off-by: Andrew Thornton <art27@cantab.net>
2023-01-16 13:50:53 -06:00
Felipe Leopoldo Sologuren Gutiérrez
04c97aa364
Change use of Walk to WalkDir to improve disk performance (#22462)
As suggest by Go developers, use `filepath.WalkDir` instead of
`filepath.Walk` because [*Walk is less efficient than WalkDir,
introduced in Go 1.16, which avoids calling `os.Lstat` on every file or
directory visited](https://pkg.go.dev/path/filepath#Walk).

This proposition address that, in a similar way as
https://github.com/go-gitea/gitea/pull/22392 did.


Co-authored-by: zeripath <art27@cantab.net>
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-01-16 16:21:44 +00:00
wxiaoguang
da274380a7
Remove the netgo tag for Windows build (#22467)
Fix #22370 and more.

Before Go 1.19, the `netgo` tag for Windows does nothing.

But Go 1.19 rewrite the net package code for Windows DNS, and there is a
bug:

* https://github.com/golang/go/issues/57757

This PR just removes the `netgo` tag for Windows build, then the Gitea
for Windows can have the old DNS behavior.
2023-01-16 13:29:10 +00:00
zeripath
16e9dec827
Fix Operator does not exist bug on explore page with ONLY_SHOW_RELEVANT_REPOS (#22454)
There is a mistake in the code for SearchRepositoryCondition where it
tests topics as a string. This is incorrect for postgres where topics is
cast and stored as json. topics needs to be cast to text for this to
work. (For some reason JSON_ARRAY_LENGTH does not work, so I have taken
the simplest solution of casting to text and doing a string comparison.)

Ref https://github.com/go-gitea/gitea/pull/21962#issuecomment-1379584057

Signed-off-by: Andrew Thornton <art27@cantab.net>
Co-authored-by: delvh <dev.lh@web.de>
2023-01-16 11:25:22 +00:00
zeripath
1e7f3c16a4
Fix environments for KaTeX and error reporting (#22453)
In #22447 it was noticed that display environments were not working
correctly. This was due to the setting displayMode not being set.

Further it was noticed that the error was not being displayed correctly.

This PR fixes both of these issues by forcibly setting the displayMode
setting and corrects an error in displayError.

Fix #22447

Signed-off-by: Andrew Thornton <art27@cantab.net>
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-01-16 18:25:46 +08:00
Lunny Xiao
2782c14396
Supports wildcard protected branch (#20825)
This PR introduce glob match for protected branch name. The separator is
`/` and you can use `*` matching non-separator chars and use `**` across
separator.

It also supports input an exist or non-exist branch name as matching
condition and branch name condition has high priority than glob rule.

Should fix #2529 and #15705

screenshots

<img width="1160" alt="image"
src="https://user-images.githubusercontent.com/81045/205651179-ebb5492a-4ade-4bb4-a13c-965e8c927063.png">

Co-authored-by: zeripath <art27@cantab.net>
2023-01-16 16:00:22 +08:00
64 changed files with 1650 additions and 955 deletions

View File

@ -758,9 +758,9 @@ $(DIST_DIRS):
.PHONY: release-windows
release-windows: | $(DIST_DIRS)
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
ifeq (,$(findstring gogit,$(TAGS)))
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo gogit $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION)-gogit .
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'osusergo gogit $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION)-gogit .
endif
ifeq ($(CI),true)
cp /build/* $(DIST)/binaries

View File

@ -32,11 +32,15 @@ func needsUpdate(dir, filename string) (bool, []byte) {
hasher := sha1.New()
err = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
err = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
_, _ = hasher.Write([]byte(info.Name()))
info, err := d.Info()
if err != nil {
return err
}
_, _ = hasher.Write([]byte(d.Name()))
_, _ = hasher.Write([]byte(info.ModTime().String()))
_, _ = hasher.Write([]byte(strconv.FormatInt(info.Size(), 16)))
return nil

View File

@ -2213,6 +2213,28 @@ ROUTER = console
;SCHEDULE = @every 168h
;OLDER_THAN = 8760h
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Garbage collect LFS pointers in repositories
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;[cron.gc_lfs]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;ENABLED = false
;; Garbage collect LFS pointers in repositories (default false)
;RUN_AT_START = false
;; Interval as a duration between each gc run (default every 24h)
;SCHEDULE = @every 24h
;; Only attempt to garbage collect LFSMetaObjects older than this (default 7 days)
;OLDER_THAN = 168h
;; Only attempt to garbage collect LFSMetaObjects that have not been attempted to be garbage collected for this long (default 3 days)
;LAST_UPDATED_MORE_THAN_AGO = 72h
; Minimum number of stale LFSMetaObjects to check per repo. Set to `0` to always check all.
;NUMBER_TO_CHECK_PER_REPO = 100
;Check at least this proportion of LFSMetaObjects per repo. (This may cause all stale LFSMetaObjects to be checked.)
;PROPORTION_TO_CHECK_PER_REPO = 0.6
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Git Operation timeout in seconds

View File

@ -1039,6 +1039,16 @@ Default templates for project boards:
- `SCHEDULE`: **@every 168h**: Cron syntax to set how often to check.
- `OLDER_THAN`: **@every 8760h**: any system notice older than this expression will be deleted from database.
#### Cron - Garbage collect LFS pointers in repositories ('cron.gc_lfs')
- `ENABLED`: **false**: Enable service.
- `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED).
- `SCHEDULE`: **@every 24h**: Cron syntax to set how often to check.
- `OLDER_THAN`: **168h**: Only attempt to garbage collect LFSMetaObjects older than this (default 7 days)
- `LAST_UPDATED_MORE_THAN_AGO`: **72h**: Only attempt to garbage collect LFSMetaObjects that have not been attempted to be garbage collected for this long (default 3 days)
- `NUMBER_TO_CHECK_PER_REPO`: **100**: Minimum number of stale LFSMetaObjects to check per repo. Set to `0` to always check all.
- `PROPORTION_TO_CHECK_PER_REPO`: **0.6**: Check at least this proportion of LFSMetaObjects per repo. (This may cause all stale LFSMetaObjects to be checked.)
## Git (`git`)
- `PATH`: **""**: The path of Git executable. If empty, Gitea searches through the PATH environment.

View File

@ -55,7 +55,7 @@ and shows a link to the repository on the package site (as well as a link to the
| Package owner type | User | Organization |
|--------------------|------|--------------|
| **read** access | public, if user is public too; otherwise for this user only | public, if org is public, otherwise org members only |
| **read** access | public, if user is public too; otherwise for this user only | public, if org is public, otherwise for org members only |
| **write** access | owner only | org members with admin or write access to the org |
N.B.: These access restrictions are [subject to change](https://github.com/go-gitea/gitea/issues/19270), where more finegrained control will be added via a dedicated organization team permission.
@ -83,7 +83,7 @@ To download a package from your repository:
## Delete a package
You cannot edit a package after you published it in the Package Registry. Instead, you
You cannot edit a package after you have published it in the Package Registry. Instead, you
must delete and recreate it.
To delete a package from your repository:

View File

@ -6,428 +6,15 @@ package git
import (
"context"
"fmt"
"strings"
"time"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
"github.com/gobwas/glob"
)
// ProtectedBranch struct
type ProtectedBranch struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"UNIQUE(s)"`
BranchName string `xorm:"UNIQUE(s)"`
CanPush bool `xorm:"NOT NULL DEFAULT false"`
EnableWhitelist bool
WhitelistUserIDs []int64 `xorm:"JSON TEXT"`
WhitelistTeamIDs []int64 `xorm:"JSON TEXT"`
EnableMergeWhitelist bool `xorm:"NOT NULL DEFAULT false"`
WhitelistDeployKeys bool `xorm:"NOT NULL DEFAULT false"`
MergeWhitelistUserIDs []int64 `xorm:"JSON TEXT"`
MergeWhitelistTeamIDs []int64 `xorm:"JSON TEXT"`
EnableStatusCheck bool `xorm:"NOT NULL DEFAULT false"`
StatusCheckContexts []string `xorm:"JSON TEXT"`
EnableApprovalsWhitelist bool `xorm:"NOT NULL DEFAULT false"`
ApprovalsWhitelistUserIDs []int64 `xorm:"JSON TEXT"`
ApprovalsWhitelistTeamIDs []int64 `xorm:"JSON TEXT"`
RequiredApprovals int64 `xorm:"NOT NULL DEFAULT 0"`
BlockOnRejectedReviews bool `xorm:"NOT NULL DEFAULT false"`
BlockOnOfficialReviewRequests bool `xorm:"NOT NULL DEFAULT false"`
BlockOnOutdatedBranch bool `xorm:"NOT NULL DEFAULT false"`
DismissStaleApprovals bool `xorm:"NOT NULL DEFAULT false"`
RequireSignedCommits bool `xorm:"NOT NULL DEFAULT false"`
ProtectedFilePatterns string `xorm:"TEXT"`
UnprotectedFilePatterns string `xorm:"TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
}
func init() {
db.RegisterModel(new(ProtectedBranch))
db.RegisterModel(new(DeletedBranch))
db.RegisterModel(new(RenamedBranch))
}
// IsProtected returns if the branch is protected
func (protectBranch *ProtectedBranch) IsProtected() bool {
return protectBranch.ID > 0
}
// CanUserPush returns if some user could push to this protected branch
func (protectBranch *ProtectedBranch) CanUserPush(ctx context.Context, userID int64) bool {
if !protectBranch.CanPush {
return false
}
if !protectBranch.EnableWhitelist {
if user, err := user_model.GetUserByID(ctx, userID); err != nil {
log.Error("GetUserByID: %v", err)
return false
} else if repo, err := repo_model.GetRepositoryByID(ctx, protectBranch.RepoID); err != nil {
log.Error("repo_model.GetRepositoryByID: %v", err)
return false
} else if writeAccess, err := access_model.HasAccessUnit(ctx, user, repo, unit.TypeCode, perm.AccessModeWrite); err != nil {
log.Error("HasAccessUnit: %v", err)
return false
} else {
return writeAccess
}
}
if base.Int64sContains(protectBranch.WhitelistUserIDs, userID) {
return true
}
if len(protectBranch.WhitelistTeamIDs) == 0 {
return false
}
in, err := organization.IsUserInTeams(ctx, userID, protectBranch.WhitelistTeamIDs)
if err != nil {
log.Error("IsUserInTeams: %v", err)
return false
}
return in
}
// IsUserMergeWhitelisted checks if some user is whitelisted to merge to this branch
func IsUserMergeWhitelisted(ctx context.Context, protectBranch *ProtectedBranch, userID int64, permissionInRepo access_model.Permission) bool {
if !protectBranch.EnableMergeWhitelist {
// Then we need to fall back on whether the user has write permission
return permissionInRepo.CanWrite(unit.TypeCode)
}
if base.Int64sContains(protectBranch.MergeWhitelistUserIDs, userID) {
return true
}
if len(protectBranch.MergeWhitelistTeamIDs) == 0 {
return false
}
in, err := organization.IsUserInTeams(ctx, userID, protectBranch.MergeWhitelistTeamIDs)
if err != nil {
log.Error("IsUserInTeams: %v", err)
return false
}
return in
}
// IsUserOfficialReviewer check if user is official reviewer for the branch (counts towards required approvals)
func IsUserOfficialReviewer(ctx context.Context, protectBranch *ProtectedBranch, user *user_model.User) (bool, error) {
repo, err := repo_model.GetRepositoryByID(ctx, protectBranch.RepoID)
if err != nil {
return false, err
}
if !protectBranch.EnableApprovalsWhitelist {
// Anyone with write access is considered official reviewer
writeAccess, err := access_model.HasAccessUnit(ctx, user, repo, unit.TypeCode, perm.AccessModeWrite)
if err != nil {
return false, err
}
return writeAccess, nil
}
if base.Int64sContains(protectBranch.ApprovalsWhitelistUserIDs, user.ID) {
return true, nil
}
inTeam, err := organization.IsUserInTeams(ctx, user.ID, protectBranch.ApprovalsWhitelistTeamIDs)
if err != nil {
return false, err
}
return inTeam, nil
}
// GetProtectedFilePatterns parses a semicolon separated list of protected file patterns and returns a glob.Glob slice
func (protectBranch *ProtectedBranch) GetProtectedFilePatterns() []glob.Glob {
return getFilePatterns(protectBranch.ProtectedFilePatterns)
}
// GetUnprotectedFilePatterns parses a semicolon separated list of unprotected file patterns and returns a glob.Glob slice
func (protectBranch *ProtectedBranch) GetUnprotectedFilePatterns() []glob.Glob {
return getFilePatterns(protectBranch.UnprotectedFilePatterns)
}
func getFilePatterns(filePatterns string) []glob.Glob {
extarr := make([]glob.Glob, 0, 10)
for _, expr := range strings.Split(strings.ToLower(filePatterns), ";") {
expr = strings.TrimSpace(expr)
if expr != "" {
if g, err := glob.Compile(expr, '.', '/'); err != nil {
log.Info("Invalid glob expression '%s' (skipped): %v", expr, err)
} else {
extarr = append(extarr, g)
}
}
}
return extarr
}
// MergeBlockedByProtectedFiles returns true if merge is blocked by protected files change
func (protectBranch *ProtectedBranch) MergeBlockedByProtectedFiles(changedProtectedFiles []string) bool {
glob := protectBranch.GetProtectedFilePatterns()
if len(glob) == 0 {
return false
}
return len(changedProtectedFiles) > 0
}
// IsProtectedFile return if path is protected
func (protectBranch *ProtectedBranch) IsProtectedFile(patterns []glob.Glob, path string) bool {
if len(patterns) == 0 {
patterns = protectBranch.GetProtectedFilePatterns()
if len(patterns) == 0 {
return false
}
}
lpath := strings.ToLower(strings.TrimSpace(path))
r := false
for _, pat := range patterns {
if pat.Match(lpath) {
r = true
break
}
}
return r
}
// IsUnprotectedFile return if path is unprotected
func (protectBranch *ProtectedBranch) IsUnprotectedFile(patterns []glob.Glob, path string) bool {
if len(patterns) == 0 {
patterns = protectBranch.GetUnprotectedFilePatterns()
if len(patterns) == 0 {
return false
}
}
lpath := strings.ToLower(strings.TrimSpace(path))
r := false
for _, pat := range patterns {
if pat.Match(lpath) {
r = true
break
}
}
return r
}
// GetProtectedBranchBy getting protected branch by ID/Name
func GetProtectedBranchBy(ctx context.Context, repoID int64, branchName string) (*ProtectedBranch, error) {
rel := &ProtectedBranch{RepoID: repoID, BranchName: branchName}
has, err := db.GetByBean(ctx, rel)
if err != nil {
return nil, err
}
if !has {
return nil, nil
}
return rel, nil
}
// WhitelistOptions represent all sorts of whitelists used for protected branches
type WhitelistOptions struct {
UserIDs []int64
TeamIDs []int64
MergeUserIDs []int64
MergeTeamIDs []int64
ApprovalsUserIDs []int64
ApprovalsTeamIDs []int64
}
// UpdateProtectBranch saves branch protection options of repository.
// If ID is 0, it creates a new record. Otherwise, updates existing record.
// This function also performs check if whitelist user and team's IDs have been changed
// to avoid unnecessary whitelist delete and regenerate.
func UpdateProtectBranch(ctx context.Context, repo *repo_model.Repository, protectBranch *ProtectedBranch, opts WhitelistOptions) (err error) {
if err = repo.GetOwner(ctx); err != nil {
return fmt.Errorf("GetOwner: %w", err)
}
whitelist, err := updateUserWhitelist(ctx, repo, protectBranch.WhitelistUserIDs, opts.UserIDs)
if err != nil {
return err
}
protectBranch.WhitelistUserIDs = whitelist
whitelist, err = updateUserWhitelist(ctx, repo, protectBranch.MergeWhitelistUserIDs, opts.MergeUserIDs)
if err != nil {
return err
}
protectBranch.MergeWhitelistUserIDs = whitelist
whitelist, err = updateApprovalWhitelist(ctx, repo, protectBranch.ApprovalsWhitelistUserIDs, opts.ApprovalsUserIDs)
if err != nil {
return err
}
protectBranch.ApprovalsWhitelistUserIDs = whitelist
// if the repo is in an organization
whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.WhitelistTeamIDs, opts.TeamIDs)
if err != nil {
return err
}
protectBranch.WhitelistTeamIDs = whitelist
whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.MergeWhitelistTeamIDs, opts.MergeTeamIDs)
if err != nil {
return err
}
protectBranch.MergeWhitelistTeamIDs = whitelist
whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.ApprovalsWhitelistTeamIDs, opts.ApprovalsTeamIDs)
if err != nil {
return err
}
protectBranch.ApprovalsWhitelistTeamIDs = whitelist
// Make sure protectBranch.ID is not 0 for whitelists
if protectBranch.ID == 0 {
if _, err = db.GetEngine(ctx).Insert(protectBranch); err != nil {
return fmt.Errorf("Insert: %w", err)
}
return nil
}
if _, err = db.GetEngine(ctx).ID(protectBranch.ID).AllCols().Update(protectBranch); err != nil {
return fmt.Errorf("Update: %w", err)
}
return nil
}
// GetProtectedBranches get all protected branches
func GetProtectedBranches(ctx context.Context, repoID int64) ([]*ProtectedBranch, error) {
protectedBranches := make([]*ProtectedBranch, 0)
return protectedBranches, db.GetEngine(ctx).Find(&protectedBranches, &ProtectedBranch{RepoID: repoID})
}
// IsProtectedBranch checks if branch is protected
func IsProtectedBranch(ctx context.Context, repoID int64, branchName string) (bool, error) {
protectedBranch := &ProtectedBranch{
RepoID: repoID,
BranchName: branchName,
}
has, err := db.GetEngine(ctx).Exist(protectedBranch)
if err != nil {
return true, err
}
return has, nil
}
// updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with
// the users from newWhitelist which have explicit read or write access to the repo.
func updateApprovalWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
hasUsersChanged := !util.SliceSortedEqual(currentWhitelist, newWhitelist)
if !hasUsersChanged {
return currentWhitelist, nil
}
whitelist = make([]int64, 0, len(newWhitelist))
for _, userID := range newWhitelist {
if reader, err := access_model.IsRepoReader(ctx, repo, userID); err != nil {
return nil, err
} else if !reader {
continue
}
whitelist = append(whitelist, userID)
}
return whitelist, err
}
// updateUserWhitelist checks whether the user whitelist changed and returns a whitelist with
// the users from newWhitelist which have write access to the repo.
func updateUserWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
hasUsersChanged := !util.SliceSortedEqual(currentWhitelist, newWhitelist)
if !hasUsersChanged {
return currentWhitelist, nil
}
whitelist = make([]int64, 0, len(newWhitelist))
for _, userID := range newWhitelist {
user, err := user_model.GetUserByID(ctx, userID)
if err != nil {
return nil, fmt.Errorf("GetUserByID [user_id: %d, repo_id: %d]: %w", userID, repo.ID, err)
}
perm, err := access_model.GetUserRepoPermission(ctx, repo, user)
if err != nil {
return nil, fmt.Errorf("GetUserRepoPermission [user_id: %d, repo_id: %d]: %w", userID, repo.ID, err)
}
if !perm.CanWrite(unit.TypeCode) {
continue // Drop invalid user ID
}
whitelist = append(whitelist, userID)
}
return whitelist, err
}
// updateTeamWhitelist checks whether the team whitelist changed and returns a whitelist with
// the teams from newWhitelist which have write access to the repo.
func updateTeamWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
hasTeamsChanged := !util.SliceSortedEqual(currentWhitelist, newWhitelist)
if !hasTeamsChanged {
return currentWhitelist, nil
}
teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead)
if err != nil {
return nil, fmt.Errorf("GetTeamsWithAccessToRepo [org_id: %d, repo_id: %d]: %w", repo.OwnerID, repo.ID, err)
}
whitelist = make([]int64, 0, len(teams))
for i := range teams {
if util.SliceContains(newWhitelist, teams[i].ID) {
whitelist = append(whitelist, teams[i].ID)
}
}
return whitelist, err
}
// DeleteProtectedBranch removes ProtectedBranch relation between the user and repository.
func DeleteProtectedBranch(ctx context.Context, repoID, id int64) (err error) {
protectedBranch := &ProtectedBranch{
RepoID: repoID,
ID: id,
}
if affected, err := db.GetEngine(ctx).Delete(protectedBranch); err != nil {
return err
} else if affected != 1 {
return fmt.Errorf("delete protected branch ID(%v) failed", id)
}
return nil
}
// DeletedBranch struct
type DeletedBranch struct {
ID int64 `xorm:"pk autoincr"`
@ -439,6 +26,11 @@ type DeletedBranch struct {
DeletedUnix timeutil.TimeStamp `xorm:"INDEX created"`
}
func init() {
db.RegisterModel(new(DeletedBranch))
db.RegisterModel(new(RenamedBranch))
}
// AddDeletedBranch adds a deleted branch to the database
func AddDeletedBranch(ctx context.Context, repoID int64, branchName, commit string, deletedByID int64) error {
deletedBranch := &DeletedBranch{
@ -556,17 +148,25 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
}
// 2. Update protected branch if needed
protectedBranch, err := GetProtectedBranchBy(ctx, repo.ID, from)
protectedBranch, err := GetProtectedBranchRuleByName(ctx, repo.ID, from)
if err != nil {
return err
}
if protectedBranch != nil {
protectedBranch.BranchName = to
protectedBranch.RuleName = to
_, err = sess.ID(protectedBranch.ID).Cols("branch_name").Update(protectedBranch)
if err != nil {
return err
}
} else {
protected, err := IsBranchProtected(ctx, repo.ID, from)
if err != nil {
return err
}
if protected {
return ErrBranchIsProtected
}
}
// 3. Update all not merged pull request base branch name

View File

@ -105,8 +105,8 @@ func TestRenameBranch(t *testing.T) {
defer committer.Close()
assert.NoError(t, err)
assert.NoError(t, git_model.UpdateProtectBranch(ctx, repo1, &git_model.ProtectedBranch{
RepoID: repo1.ID,
BranchName: "master",
RepoID: repo1.ID,
RuleName: "master",
}, git_model.WhitelistOptions{}))
assert.NoError(t, committer.Commit())
@ -131,8 +131,8 @@ func TestRenameBranch(t *testing.T) {
assert.Equal(t, int64(1), renamedBranch.RepoID)
unittest.AssertExistsAndLoadBean(t, &git_model.ProtectedBranch{
RepoID: repo1.ID,
BranchName: "main",
RepoID: repo1.ID,
RuleName: "main",
})
}

View File

@ -115,6 +115,7 @@ type LFSMetaObject struct {
RepositoryID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"`
Existing bool `xorm:"-"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}
func init() {
@ -334,8 +335,45 @@ func GetRepoLFSSize(ctx context.Context, repoID int64) (int64, error) {
return lfsSize, nil
}
// IterateRepositoryIDsWithLFSMetaObjects iterates across the repositories that have LFSMetaObjects
func IterateRepositoryIDsWithLFSMetaObjects(ctx context.Context, f func(ctx context.Context, repoID, count int64) error) error {
batchSize := setting.Database.IterateBufferSize
sess := db.GetEngine(ctx)
id := int64(0)
type RepositoryCount struct {
RepositoryID int64
Count int64
}
for {
counts := make([]*RepositoryCount, 0, batchSize)
sess.Select("repository_id, COUNT(id) AS count").
Table("lfs_meta_object").
Where("repository_id > ?", id).
GroupBy("repository_id").
OrderBy("repository_id ASC")
if err := sess.Limit(batchSize, 0).Find(&counts); err != nil {
return err
}
if len(counts) == 0 {
return nil
}
for _, count := range counts {
if err := f(ctx, count.RepositoryID, count.Count); err != nil {
return err
}
}
id = counts[len(counts)-1].RepositoryID
}
}
// IterateLFSMetaObjectsForRepoOptions provides options for IterateLFSMetaObjectsForRepo
type IterateLFSMetaObjectsForRepoOptions struct {
OlderThan time.Time
OlderThan time.Time
UpdatedLessRecentlyThan time.Time
OrderByUpdated bool
LoopFunctionAlwaysUpdates bool
}
// IterateLFSMetaObjectsForRepo provides a iterator for LFSMetaObjects per Repo
@ -348,28 +386,53 @@ func IterateLFSMetaObjectsForRepo(ctx context.Context, repoID int64, f func(cont
LFSMetaObject
}
id := int64(0)
for {
beans := make([]*CountLFSMetaObject, 0, batchSize)
// SELECT `lfs_meta_object`.*, COUNT(`l1`.id) as `count` FROM lfs_meta_object INNER JOIN lfs_meta_object AS l1 ON l1.oid = lfs_meta_object.oid WHERE lfs_meta_object.repository_id = ? GROUP BY lfs_meta_object.id
sess := engine.Select("`lfs_meta_object`.*, COUNT(`l1`.oid) AS `count`").
Join("INNER", "`lfs_meta_object` AS l1", "`lfs_meta_object`.oid = `l1`.oid").
Where("`lfs_meta_object`.repository_id = ?", repoID)
if !opts.OlderThan.IsZero() {
sess.And("`lfs_meta_object`.created_unix < ?", opts.OlderThan)
}
if !opts.UpdatedLessRecentlyThan.IsZero() {
sess.And("`lfs_meta_object`.updated_unix < ?", opts.UpdatedLessRecentlyThan)
}
sess.GroupBy("`lfs_meta_object`.id")
if opts.OrderByUpdated {
sess.OrderBy("`lfs_meta_object`.updated_unix ASC")
} else {
sess.And("`lfs_meta_object`.id > ?", id)
sess.OrderBy("`lfs_meta_object`.id ASC")
}
if err := sess.Limit(batchSize, start).Find(&beans); err != nil {
return err
}
if len(beans) == 0 {
return nil
}
start += len(beans)
if !opts.LoopFunctionAlwaysUpdates {
start += len(beans)
}
for _, bean := range beans {
if err := f(ctx, &bean.LFSMetaObject, bean.Count); err != nil {
return err
}
}
id = beans[len(beans)-1].ID
}
}
// MarkLFSMetaObject updates the updated time for the provided LFSMetaObject
func MarkLFSMetaObject(ctx context.Context, id int64) error {
obj := &LFSMetaObject{
UpdatedUnix: timeutil.TimeStampNow(),
}
count, err := db.GetEngine(ctx).ID(id).Update(obj)
if count != 1 {
log.Error("Unexpectedly updated %d LFSMetaObjects with ID: %d", count, id)
}
return err
}

View File

@ -0,0 +1,501 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"context"
"errors"
"fmt"
"strings"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
"github.com/gobwas/glob"
"github.com/gobwas/glob/syntax"
)
var ErrBranchIsProtected = errors.New("branch is protected")
// ProtectedBranch struct
type ProtectedBranch struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"UNIQUE(s)"`
Repo *repo_model.Repository `xorm:"-"`
RuleName string `xorm:"'branch_name' UNIQUE(s)"` // a branch name or a glob match to branch name
globRule glob.Glob `xorm:"-"`
isPlainName bool `xorm:"-"`
CanPush bool `xorm:"NOT NULL DEFAULT false"`
EnableWhitelist bool
WhitelistUserIDs []int64 `xorm:"JSON TEXT"`
WhitelistTeamIDs []int64 `xorm:"JSON TEXT"`
EnableMergeWhitelist bool `xorm:"NOT NULL DEFAULT false"`
WhitelistDeployKeys bool `xorm:"NOT NULL DEFAULT false"`
MergeWhitelistUserIDs []int64 `xorm:"JSON TEXT"`
MergeWhitelistTeamIDs []int64 `xorm:"JSON TEXT"`
EnableStatusCheck bool `xorm:"NOT NULL DEFAULT false"`
StatusCheckContexts []string `xorm:"JSON TEXT"`
EnableApprovalsWhitelist bool `xorm:"NOT NULL DEFAULT false"`
ApprovalsWhitelistUserIDs []int64 `xorm:"JSON TEXT"`
ApprovalsWhitelistTeamIDs []int64 `xorm:"JSON TEXT"`
RequiredApprovals int64 `xorm:"NOT NULL DEFAULT 0"`
BlockOnRejectedReviews bool `xorm:"NOT NULL DEFAULT false"`
BlockOnOfficialReviewRequests bool `xorm:"NOT NULL DEFAULT false"`
BlockOnOutdatedBranch bool `xorm:"NOT NULL DEFAULT false"`
DismissStaleApprovals bool `xorm:"NOT NULL DEFAULT false"`
RequireSignedCommits bool `xorm:"NOT NULL DEFAULT false"`
ProtectedFilePatterns string `xorm:"TEXT"`
UnprotectedFilePatterns string `xorm:"TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
}
func init() {
db.RegisterModel(new(ProtectedBranch))
}
// IsRuleNameSpecial return true if it contains special character
func IsRuleNameSpecial(ruleName string) bool {
for i := 0; i < len(ruleName); i++ {
if syntax.Special(ruleName[i]) {
return true
}
}
return false
}
func (protectBranch *ProtectedBranch) loadGlob() {
if protectBranch.globRule == nil {
var err error
protectBranch.globRule, err = glob.Compile(protectBranch.RuleName, '/')
if err != nil {
log.Warn("Invalid glob rule for ProtectedBranch[%d]: %s %v", protectBranch.ID, protectBranch.RuleName, err)
protectBranch.globRule = glob.MustCompile(glob.QuoteMeta(protectBranch.RuleName), '/')
}
protectBranch.isPlainName = !IsRuleNameSpecial(protectBranch.RuleName)
}
}
// Match tests if branchName matches the rule
func (protectBranch *ProtectedBranch) Match(branchName string) bool {
protectBranch.loadGlob()
if protectBranch.isPlainName {
return strings.EqualFold(protectBranch.RuleName, branchName)
}
return protectBranch.globRule.Match(branchName)
}
func (protectBranch *ProtectedBranch) LoadRepo(ctx context.Context) (err error) {
if protectBranch.Repo != nil {
return nil
}
protectBranch.Repo, err = repo_model.GetRepositoryByID(ctx, protectBranch.RepoID)
return err
}
// CanUserPush returns if some user could push to this protected branch
func (protectBranch *ProtectedBranch) CanUserPush(ctx context.Context, user *user_model.User) bool {
if !protectBranch.CanPush {
return false
}
if !protectBranch.EnableWhitelist {
if err := protectBranch.LoadRepo(ctx); err != nil {
log.Error("LoadRepo: %v", err)
return false
}
writeAccess, err := access_model.HasAccessUnit(ctx, user, protectBranch.Repo, unit.TypeCode, perm.AccessModeWrite)
if err != nil {
log.Error("HasAccessUnit: %v", err)
return false
}
return writeAccess
}
if base.Int64sContains(protectBranch.WhitelistUserIDs, user.ID) {
return true
}
if len(protectBranch.WhitelistTeamIDs) == 0 {
return false
}
in, err := organization.IsUserInTeams(ctx, user.ID, protectBranch.WhitelistTeamIDs)
if err != nil {
log.Error("IsUserInTeams: %v", err)
return false
}
return in
}
// IsUserMergeWhitelisted checks if some user is whitelisted to merge to this branch
func IsUserMergeWhitelisted(ctx context.Context, protectBranch *ProtectedBranch, userID int64, permissionInRepo access_model.Permission) bool {
if !protectBranch.EnableMergeWhitelist {
// Then we need to fall back on whether the user has write permission
return permissionInRepo.CanWrite(unit.TypeCode)
}
if base.Int64sContains(protectBranch.MergeWhitelistUserIDs, userID) {
return true
}
if len(protectBranch.MergeWhitelistTeamIDs) == 0 {
return false
}
in, err := organization.IsUserInTeams(ctx, userID, protectBranch.MergeWhitelistTeamIDs)
if err != nil {
log.Error("IsUserInTeams: %v", err)
return false
}
return in
}
// IsUserOfficialReviewer check if user is official reviewer for the branch (counts towards required approvals)
func IsUserOfficialReviewer(ctx context.Context, protectBranch *ProtectedBranch, user *user_model.User) (bool, error) {
repo, err := repo_model.GetRepositoryByID(ctx, protectBranch.RepoID)
if err != nil {
return false, err
}
if !protectBranch.EnableApprovalsWhitelist {
// Anyone with write access is considered official reviewer
writeAccess, err := access_model.HasAccessUnit(ctx, user, repo, unit.TypeCode, perm.AccessModeWrite)
if err != nil {
return false, err
}
return writeAccess, nil
}
if base.Int64sContains(protectBranch.ApprovalsWhitelistUserIDs, user.ID) {
return true, nil
}
inTeam, err := organization.IsUserInTeams(ctx, user.ID, protectBranch.ApprovalsWhitelistTeamIDs)
if err != nil {
return false, err
}
return inTeam, nil
}
// GetProtectedFilePatterns parses a semicolon separated list of protected file patterns and returns a glob.Glob slice
func (protectBranch *ProtectedBranch) GetProtectedFilePatterns() []glob.Glob {
return getFilePatterns(protectBranch.ProtectedFilePatterns)
}
// GetUnprotectedFilePatterns parses a semicolon separated list of unprotected file patterns and returns a glob.Glob slice
func (protectBranch *ProtectedBranch) GetUnprotectedFilePatterns() []glob.Glob {
return getFilePatterns(protectBranch.UnprotectedFilePatterns)
}
func getFilePatterns(filePatterns string) []glob.Glob {
extarr := make([]glob.Glob, 0, 10)
for _, expr := range strings.Split(strings.ToLower(filePatterns), ";") {
expr = strings.TrimSpace(expr)
if expr != "" {
if g, err := glob.Compile(expr, '.', '/'); err != nil {
log.Info("Invalid glob expression '%s' (skipped): %v", expr, err)
} else {
extarr = append(extarr, g)
}
}
}
return extarr
}
// MergeBlockedByProtectedFiles returns true if merge is blocked by protected files change
func (protectBranch *ProtectedBranch) MergeBlockedByProtectedFiles(changedProtectedFiles []string) bool {
glob := protectBranch.GetProtectedFilePatterns()
if len(glob) == 0 {
return false
}
return len(changedProtectedFiles) > 0
}
// IsProtectedFile return if path is protected
func (protectBranch *ProtectedBranch) IsProtectedFile(patterns []glob.Glob, path string) bool {
if len(patterns) == 0 {
patterns = protectBranch.GetProtectedFilePatterns()
if len(patterns) == 0 {
return false
}
}
lpath := strings.ToLower(strings.TrimSpace(path))
r := false
for _, pat := range patterns {
if pat.Match(lpath) {
r = true
break
}
}
return r
}
// IsUnprotectedFile return if path is unprotected
func (protectBranch *ProtectedBranch) IsUnprotectedFile(patterns []glob.Glob, path string) bool {
if len(patterns) == 0 {
patterns = protectBranch.GetUnprotectedFilePatterns()
if len(patterns) == 0 {
return false
}
}
lpath := strings.ToLower(strings.TrimSpace(path))
r := false
for _, pat := range patterns {
if pat.Match(lpath) {
r = true
break
}
}
return r
}
// GetProtectedBranchRuleByName getting protected branch rule by name
func GetProtectedBranchRuleByName(ctx context.Context, repoID int64, ruleName string) (*ProtectedBranch, error) {
rel := &ProtectedBranch{RepoID: repoID, RuleName: ruleName}
has, err := db.GetByBean(ctx, rel)
if err != nil {
return nil, err
}
if !has {
return nil, nil
}
return rel, nil
}
// GetProtectedBranchRuleByID getting protected branch rule by rule ID
func GetProtectedBranchRuleByID(ctx context.Context, repoID, ruleID int64) (*ProtectedBranch, error) {
rel := &ProtectedBranch{ID: ruleID, RepoID: repoID}
has, err := db.GetByBean(ctx, rel)
if err != nil {
return nil, err
}
if !has {
return nil, nil
}
return rel, nil
}
// WhitelistOptions represent all sorts of whitelists used for protected branches
type WhitelistOptions struct {
UserIDs []int64
TeamIDs []int64
MergeUserIDs []int64
MergeTeamIDs []int64
ApprovalsUserIDs []int64
ApprovalsTeamIDs []int64
}
// UpdateProtectBranch saves branch protection options of repository.
// If ID is 0, it creates a new record. Otherwise, updates existing record.
// This function also performs check if whitelist user and team's IDs have been changed
// to avoid unnecessary whitelist delete and regenerate.
func UpdateProtectBranch(ctx context.Context, repo *repo_model.Repository, protectBranch *ProtectedBranch, opts WhitelistOptions) (err error) {
if err = repo.GetOwner(ctx); err != nil {
return fmt.Errorf("GetOwner: %v", err)
}
whitelist, err := updateUserWhitelist(ctx, repo, protectBranch.WhitelistUserIDs, opts.UserIDs)
if err != nil {
return err
}
protectBranch.WhitelistUserIDs = whitelist
whitelist, err = updateUserWhitelist(ctx, repo, protectBranch.MergeWhitelistUserIDs, opts.MergeUserIDs)
if err != nil {
return err
}
protectBranch.MergeWhitelistUserIDs = whitelist
whitelist, err = updateApprovalWhitelist(ctx, repo, protectBranch.ApprovalsWhitelistUserIDs, opts.ApprovalsUserIDs)
if err != nil {
return err
}
protectBranch.ApprovalsWhitelistUserIDs = whitelist
// if the repo is in an organization
whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.WhitelistTeamIDs, opts.TeamIDs)
if err != nil {
return err
}
protectBranch.WhitelistTeamIDs = whitelist
whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.MergeWhitelistTeamIDs, opts.MergeTeamIDs)
if err != nil {
return err
}
protectBranch.MergeWhitelistTeamIDs = whitelist
whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.ApprovalsWhitelistTeamIDs, opts.ApprovalsTeamIDs)
if err != nil {
return err
}
protectBranch.ApprovalsWhitelistTeamIDs = whitelist
// Make sure protectBranch.ID is not 0 for whitelists
if protectBranch.ID == 0 {
if _, err = db.GetEngine(ctx).Insert(protectBranch); err != nil {
return fmt.Errorf("Insert: %v", err)
}
return nil
}
if _, err = db.GetEngine(ctx).ID(protectBranch.ID).AllCols().Update(protectBranch); err != nil {
return fmt.Errorf("Update: %v", err)
}
return nil
}
// updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with
// the users from newWhitelist which have explicit read or write access to the repo.
func updateApprovalWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
hasUsersChanged := !util.SliceSortedEqual(currentWhitelist, newWhitelist)
if !hasUsersChanged {
return currentWhitelist, nil
}
whitelist = make([]int64, 0, len(newWhitelist))
for _, userID := range newWhitelist {
if reader, err := access_model.IsRepoReader(ctx, repo, userID); err != nil {
return nil, err
} else if !reader {
continue
}
whitelist = append(whitelist, userID)
}
return whitelist, err
}
// updateUserWhitelist checks whether the user whitelist changed and returns a whitelist with
// the users from newWhitelist which have write access to the repo.
func updateUserWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
hasUsersChanged := !util.SliceSortedEqual(currentWhitelist, newWhitelist)
if !hasUsersChanged {
return currentWhitelist, nil
}
whitelist = make([]int64, 0, len(newWhitelist))
for _, userID := range newWhitelist {
user, err := user_model.GetUserByID(ctx, userID)
if err != nil {
return nil, fmt.Errorf("GetUserByID [user_id: %d, repo_id: %d]: %v", userID, repo.ID, err)
}
perm, err := access_model.GetUserRepoPermission(ctx, repo, user)
if err != nil {
return nil, fmt.Errorf("GetUserRepoPermission [user_id: %d, repo_id: %d]: %v", userID, repo.ID, err)
}
if !perm.CanWrite(unit.TypeCode) {
continue // Drop invalid user ID
}
whitelist = append(whitelist, userID)
}
return whitelist, err
}
// updateTeamWhitelist checks whether the team whitelist changed and returns a whitelist with
// the teams from newWhitelist which have write access to the repo.
func updateTeamWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
hasTeamsChanged := !util.SliceSortedEqual(currentWhitelist, newWhitelist)
if !hasTeamsChanged {
return currentWhitelist, nil
}
teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead)
if err != nil {
return nil, fmt.Errorf("GetTeamsWithAccessToRepo [org_id: %d, repo_id: %d]: %v", repo.OwnerID, repo.ID, err)
}
whitelist = make([]int64, 0, len(teams))
for i := range teams {
if util.SliceContains(newWhitelist, teams[i].ID) {
whitelist = append(whitelist, teams[i].ID)
}
}
return whitelist, err
}
// DeleteProtectedBranch removes ProtectedBranch relation between the user and repository.
func DeleteProtectedBranch(ctx context.Context, repoID, id int64) (err error) {
protectedBranch := &ProtectedBranch{
RepoID: repoID,
ID: id,
}
if affected, err := db.GetEngine(ctx).Delete(protectedBranch); err != nil {
return err
} else if affected != 1 {
return fmt.Errorf("delete protected branch ID(%v) failed", id)
}
return nil
}
// RemoveUserIDFromProtectedBranch remove all user ids from protected branch options
func RemoveUserIDFromProtectedBranch(ctx context.Context, p *ProtectedBranch, userID int64) error {
lenIDs, lenApprovalIDs, lenMergeIDs := len(p.WhitelistUserIDs), len(p.ApprovalsWhitelistUserIDs), len(p.MergeWhitelistUserIDs)
p.WhitelistUserIDs = util.SliceRemoveAll(p.WhitelistUserIDs, userID)
p.ApprovalsWhitelistUserIDs = util.SliceRemoveAll(p.ApprovalsWhitelistUserIDs, userID)
p.MergeWhitelistUserIDs = util.SliceRemoveAll(p.MergeWhitelistUserIDs, userID)
if lenIDs != len(p.WhitelistUserIDs) || lenApprovalIDs != len(p.ApprovalsWhitelistUserIDs) ||
lenMergeIDs != len(p.MergeWhitelistUserIDs) {
if _, err := db.GetEngine(ctx).ID(p.ID).Cols(
"whitelist_user_i_ds",
"merge_whitelist_user_i_ds",
"approvals_whitelist_user_i_ds",
).Update(p); err != nil {
return fmt.Errorf("updateProtectedBranches: %v", err)
}
}
return nil
}
// RemoveTeamIDFromProtectedBranch remove all team ids from protected branch options
func RemoveTeamIDFromProtectedBranch(ctx context.Context, p *ProtectedBranch, teamID int64) error {
lenIDs, lenApprovalIDs, lenMergeIDs := len(p.WhitelistTeamIDs), len(p.ApprovalsWhitelistTeamIDs), len(p.MergeWhitelistTeamIDs)
p.WhitelistTeamIDs = util.SliceRemoveAll(p.WhitelistTeamIDs, teamID)
p.ApprovalsWhitelistTeamIDs = util.SliceRemoveAll(p.ApprovalsWhitelistTeamIDs, teamID)
p.MergeWhitelistTeamIDs = util.SliceRemoveAll(p.MergeWhitelistTeamIDs, teamID)
if lenIDs != len(p.WhitelistTeamIDs) ||
lenApprovalIDs != len(p.ApprovalsWhitelistTeamIDs) ||
lenMergeIDs != len(p.MergeWhitelistTeamIDs) {
if _, err := db.GetEngine(ctx).ID(p.ID).Cols(
"whitelist_team_i_ds",
"merge_whitelist_team_i_ds",
"approvals_whitelist_team_i_ds",
).Update(p); err != nil {
return fmt.Errorf("updateProtectedBranches: %v", err)
}
}
return nil
}

View File

@ -0,0 +1,86 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"context"
"sort"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/git"
"github.com/gobwas/glob"
)
type ProtectedBranchRules []*ProtectedBranch
func (rules ProtectedBranchRules) GetFirstMatched(branchName string) *ProtectedBranch {
for _, rule := range rules {
if rule.Match(branchName) {
return rule
}
}
return nil
}
func (rules ProtectedBranchRules) sort() {
sort.Slice(rules, func(i, j int) bool {
rules[i].loadGlob()
rules[j].loadGlob()
if rules[i].isPlainName {
if !rules[j].isPlainName {
return true
}
} else if rules[j].isPlainName {
return true
}
return rules[i].CreatedUnix < rules[j].CreatedUnix
})
}
// FindRepoProtectedBranchRules load all repository's protected rules
func FindRepoProtectedBranchRules(ctx context.Context, repoID int64) (ProtectedBranchRules, error) {
var rules ProtectedBranchRules
err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Asc("created_unix").Find(&rules)
if err != nil {
return nil, err
}
rules.sort()
return rules, nil
}
// FindAllMatchedBranches find all matched branches
func FindAllMatchedBranches(ctx context.Context, gitRepo *git.Repository, ruleName string) ([]string, error) {
// FIXME: how many should we get?
branches, _, err := gitRepo.GetBranchNames(0, 9999999)
if err != nil {
return nil, err
}
rule := glob.MustCompile(ruleName)
results := make([]string, 0, len(branches))
for _, branch := range branches {
if rule.Match(branch) {
results = append(results, branch)
}
}
return results, nil
}
// GetFirstMatchProtectedBranchRule returns the first matched rules
func GetFirstMatchProtectedBranchRule(ctx context.Context, repoID int64, branchName string) (*ProtectedBranch, error) {
rules, err := FindRepoProtectedBranchRules(ctx, repoID)
if err != nil {
return nil, err
}
return rules.GetFirstMatched(branchName), nil
}
// IsBranchProtected checks if branch is protected
func IsBranchProtected(ctx context.Context, repoID int64, branchName string) (bool, error) {
rule, err := GetFirstMatchProtectedBranchRule(ctx, repoID, branchName)
if err != nil {
return false, err
}
return rule != nil, nil
}

View File

@ -0,0 +1,78 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestBranchRuleMatch(t *testing.T) {
kases := []struct {
Rule string
BranchName string
ExpectedMatch bool
}{
{
Rule: "release/*",
BranchName: "release/v1.17",
ExpectedMatch: true,
},
{
Rule: "release/**/v1.17",
BranchName: "release/test/v1.17",
ExpectedMatch: true,
},
{
Rule: "release/**/v1.17",
BranchName: "release/test/1/v1.17",
ExpectedMatch: true,
},
{
Rule: "release/*/v1.17",
BranchName: "release/test/1/v1.17",
ExpectedMatch: false,
},
{
Rule: "release/v*",
BranchName: "release/v1.16",
ExpectedMatch: true,
},
{
Rule: "*",
BranchName: "release/v1.16",
ExpectedMatch: false,
},
{
Rule: "**",
BranchName: "release/v1.16",
ExpectedMatch: true,
},
{
Rule: "main",
BranchName: "main",
ExpectedMatch: true,
},
{
Rule: "master",
BranchName: "main",
ExpectedMatch: false,
},
}
for _, kase := range kases {
pb := ProtectedBranch{RuleName: kase.Rule}
var should, infact string
if !kase.ExpectedMatch {
should = " not"
} else {
infact = " not"
}
assert.EqualValues(t, kase.ExpectedMatch, pb.Match(kase.BranchName),
fmt.Sprintf("%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact),
)
}
}

View File

@ -164,9 +164,8 @@ type PullRequest struct {
HeadBranch string
HeadCommitID string `xorm:"-"`
BaseBranch string
ProtectedBranch *git_model.ProtectedBranch `xorm:"-"`
MergeBase string `xorm:"VARCHAR(40)"`
AllowMaintainerEdit bool `xorm:"NOT NULL DEFAULT false"`
MergeBase string `xorm:"VARCHAR(40)"`
AllowMaintainerEdit bool `xorm:"NOT NULL DEFAULT false"`
HasMerged bool `xorm:"INDEX"`
MergedCommitID string `xorm:"VARCHAR(40)"`
@ -293,23 +292,6 @@ func (pr *PullRequest) LoadIssue(ctx context.Context) (err error) {
return err
}
// LoadProtectedBranch loads the protected branch of the base branch
func (pr *PullRequest) LoadProtectedBranch(ctx context.Context) (err error) {
if pr.ProtectedBranch == nil {
if pr.BaseRepo == nil {
if pr.BaseRepoID == 0 {
return nil
}
pr.BaseRepo, err = repo_model.GetRepositoryByID(ctx, pr.BaseRepoID)
if err != nil {
return
}
}
pr.ProtectedBranch, err = git_model.GetProtectedBranchBy(ctx, pr.BaseRepo.ID, pr.BaseBranch)
}
return err
}
// ReviewCount represents a count of Reviews
type ReviewCount struct {
IssueID int64

View File

@ -263,15 +263,17 @@ func IsOfficialReviewer(ctx context.Context, issue *Issue, reviewers ...*user_mo
if err != nil {
return false, err
}
if err = pr.LoadProtectedBranch(ctx); err != nil {
rule, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return false, err
}
if pr.ProtectedBranch == nil {
if rule == nil {
return false, nil
}
for _, reviewer := range reviewers {
official, err := git_model.IsUserOfficialReviewer(ctx, pr.ProtectedBranch, reviewer)
official, err := git_model.IsUserOfficialReviewer(ctx, rule, reviewer)
if official || err != nil {
return official, err
}
@ -286,18 +288,19 @@ func IsOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organizatio
if err != nil {
return false, err
}
if err = pr.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return false, err
}
if pr.ProtectedBranch == nil {
if pb == nil {
return false, nil
}
if !pr.ProtectedBranch.EnableApprovalsWhitelist {
if !pb.EnableApprovalsWhitelist {
return team.UnitAccessMode(ctx, unit.TypeCode) >= perm.AccessModeWrite, nil
}
return base.Int64sContains(pr.ProtectedBranch.ApprovalsWhitelistTeamIDs, team.ID), nil
return base.Int64sContains(pb.ApprovalsWhitelistTeamIDs, team.ID), nil
}
// CreateReview creates a new review based on opts

View File

@ -432,6 +432,9 @@ var migrations = []Migration{
NewMigration("Update counts of all open milestones", v1_18.UpdateOpenMilestoneCounts),
// v230 -> v231
NewMigration("Add ConfidentialClient column (default true) to OAuth2Application table", v1_18.AddConfidentialClientColumnToOAuth2ApplicationTable),
// Gitea 1.18.0 ends at v231
// v231 -> v232
NewMigration("Add index for hook_task", v1_19.AddIndexForHookTask),
// v232 -> v233
@ -446,6 +449,8 @@ var migrations = []Migration{
NewMigration("Create secrets table", v1_19.CreateSecretsTable),
// v237 -> v238
NewMigration("Drop ForeignReference table", v1_19.DropForeignReferenceTable),
// v238 -> v239
NewMigration("Add updated unix to LFSMetaObject", v1_19.AddUpdatedUnixToLFSMetaObject),
}
// GetCurrentDBVersion returns the current db version

View File

@ -0,0 +1,27 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_19 //nolint
import (
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/xorm"
)
// AddUpdatedUnixToLFSMetaObject adds an updated column to the LFSMetaObject to allow for garbage collection
func AddUpdatedUnixToLFSMetaObject(x *xorm.Engine) error {
// Drop the table introduced in `v211`, it's considered badly designed and doesn't look like to be used.
// See: https://github.com/go-gitea/gitea/issues/21086#issuecomment-1318217453
// LFSMetaObject stores metadata for LFS tracked files.
type LFSMetaObject struct {
ID int64 `xorm:"pk autoincr"`
Oid string `json:"oid" xorm:"UNIQUE(s) INDEX NOT NULL"`
Size int64 `json:"size" xorm:"NOT NULL"`
RepositoryID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}
return x.Sync(new(LFSMetaObject))
}

View File

@ -378,7 +378,6 @@ func DeleteTeam(t *organization.Team) error {
return err
}
defer committer.Close()
sess := db.GetEngine(ctx)
if err := t.LoadRepositories(ctx); err != nil {
return err
@ -391,27 +390,15 @@ func DeleteTeam(t *organization.Team) error {
// update branch protections
{
protections := make([]*git_model.ProtectedBranch, 0, 10)
err := sess.In("repo_id",
err := db.GetEngine(ctx).In("repo_id",
builder.Select("id").From("repository").Where(builder.Eq{"owner_id": t.OrgID})).
Find(&protections)
if err != nil {
return fmt.Errorf("findProtectedBranches: %w", err)
}
for _, p := range protections {
lenIDs, lenApprovalIDs, lenMergeIDs := len(p.WhitelistTeamIDs), len(p.ApprovalsWhitelistTeamIDs), len(p.MergeWhitelistTeamIDs)
p.WhitelistTeamIDs = util.SliceRemoveAll(p.WhitelistTeamIDs, t.ID)
p.ApprovalsWhitelistTeamIDs = util.SliceRemoveAll(p.ApprovalsWhitelistTeamIDs, t.ID)
p.MergeWhitelistTeamIDs = util.SliceRemoveAll(p.MergeWhitelistTeamIDs, t.ID)
if lenIDs != len(p.WhitelistTeamIDs) ||
lenApprovalIDs != len(p.ApprovalsWhitelistTeamIDs) ||
lenMergeIDs != len(p.MergeWhitelistTeamIDs) {
if _, err = sess.ID(p.ID).Cols(
"whitelist_team_i_ds",
"merge_whitelist_team_i_ds",
"approvals_whitelist_team_i_ds",
).Update(p); err != nil {
return fmt.Errorf("updateProtectedBranches: %w", err)
}
if err := git_model.RemoveTeamIDFromProtectedBranch(ctx, p, t.ID); err != nil {
return err
}
}
}
@ -432,7 +419,7 @@ func DeleteTeam(t *organization.Team) error {
}
// Update organization number of teams.
if _, err := sess.Exec("UPDATE `user` SET num_teams=num_teams-1 WHERE id=?", t.OrgID); err != nil {
if _, err := db.Exec(ctx, "UPDATE `user` SET num_teams=num_teams-1 WHERE id=?", t.OrgID); err != nil {
return err
}

View File

@ -25,6 +25,7 @@ type BlobSearchOptions struct {
Digest string
Tag string
IsManifest bool
Repository string
}
func (opts *BlobSearchOptions) toConds() builder.Cond {
@ -53,6 +54,15 @@ func (opts *BlobSearchOptions) toConds() builder.Cond {
cond = cond.And(builder.In("package_file.id", builder.Select("package_property.ref_id").Where(propsCond).From("package_property")))
}
if opts.Repository != "" {
var propsCond builder.Cond = builder.Eq{
"package_property.ref_type": packages.PropertyTypePackage,
"package_property.name": container_module.PropertyRepository,
"package_property.value": opts.Repository,
}
cond = cond.And(builder.In("package.id", builder.Select("package_property.ref_id").Where(propsCond).From("package_property")))
}
return cond
}

View File

@ -13,6 +13,7 @@ import (
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
@ -496,8 +497,12 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
// Only show a repo that either has a topic or description.
subQueryCond := builder.NewCond()
// Topic checking. Topics is non-null.
subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"}))
// Topic checking. Topics are present.
if setting.Database.UsePostgreSQL { // postgres stores the topics as json and not as text
subQueryCond = subQueryCond.Or(builder.And(builder.NotNull{"topics"}, builder.Neq{"(topics)::text": "[]"}))
} else {
subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"}))
}
// Description checking. Description not empty.
subQueryCond = subQueryCond.Or(builder.Neq{"description": ""})

View File

@ -23,7 +23,6 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// DeleteUser deletes models associated to an user.
@ -141,20 +140,8 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) (err error)
break
}
for _, p := range protections {
lenIDs, lenApprovalIDs, lenMergeIDs := len(p.WhitelistUserIDs), len(p.ApprovalsWhitelistUserIDs), len(p.MergeWhitelistUserIDs)
p.WhitelistUserIDs = util.SliceRemoveAll(p.WhitelistUserIDs, u.ID)
p.ApprovalsWhitelistUserIDs = util.SliceRemoveAll(p.ApprovalsWhitelistUserIDs, u.ID)
p.MergeWhitelistUserIDs = util.SliceRemoveAll(p.MergeWhitelistUserIDs, u.ID)
if lenIDs != len(p.WhitelistUserIDs) ||
lenApprovalIDs != len(p.ApprovalsWhitelistUserIDs) ||
lenMergeIDs != len(p.MergeWhitelistUserIDs) {
if _, err = e.ID(p.ID).Cols(
"whitelist_user_i_ds",
"merge_whitelist_user_i_ds",
"approvals_whitelist_user_i_ds",
).Update(p); err != nil {
return fmt.Errorf("updateProtectedBranches: %w", err)
}
if err := git_model.RemoveUserIDFromProtectedBranch(ctx, p, u.ID); err != nil {
return err
}
}
}

View File

@ -119,14 +119,15 @@ type CanCommitToBranchResults struct {
//
// and branch is not protected for push
func (r *Repository) CanCommitToBranch(ctx context.Context, doer *user_model.User) (CanCommitToBranchResults, error) {
protectedBranch, err := git_model.GetProtectedBranchBy(ctx, r.Repository.ID, r.BranchName)
protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, r.Repository.ID, r.BranchName)
if err != nil {
return CanCommitToBranchResults{}, err
}
userCanPush := true
requireSigned := false
if protectedBranch != nil {
userCanPush = protectedBranch.CanUserPush(ctx, doer.ID)
protectedBranch.Repo = r.Repository
userCanPush = protectedBranch.CanUserPush(ctx, doer)
requireSigned = protectedBranch.RequireSignedCommits
}

View File

@ -6,6 +6,7 @@ package doctor
import (
"context"
"fmt"
"time"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
@ -29,7 +30,20 @@ func garbageCollectLFSCheck(ctx context.Context, logger log.Logger, autofix bool
return fmt.Errorf("LFS support is disabled")
}
if err := repository.GarbageCollectLFSMetaObjects(ctx, logger, autofix); err != nil {
if err := repository.GarbageCollectLFSMetaObjects(ctx, repository.GarbageCollectLFSMetaObjectsOptions{
Logger: logger,
AutoFix: autofix,
// Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
// and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
// an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
// changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
// objects.
//
// It is likely that a week is potentially excessive but it should definitely be enough that any
// unassociated LFS object is genuinely unassociated.
OlderThan: time.Now().Add(-24 * time.Hour * 7),
// We don't set the UpdatedLessRecentlyThan because we want to do a full GC
}); err != nil {
return err
}

View File

@ -225,14 +225,24 @@ func compressOldLogFile(fname string, compressionLevel int) error {
func (log *FileLogger) deleteOldLog() {
dir := filepath.Dir(log.Filename)
_ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) (returnErr error) {
_ = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) (returnErr error) {
defer func() {
if r := recover(); r != nil {
returnErr = fmt.Errorf("Unable to delete old log '%s', error: %+v", path, r)
}
}()
if !info.IsDir() && info.ModTime().Unix() < (time.Now().Unix()-60*60*24*log.Maxdays) {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
info, err := d.Info()
if err != nil {
return err
}
if info.ModTime().Unix() < (time.Now().Unix() - 60*60*24*log.Maxdays) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(log.Filename)) {
if err := util.Remove(path); err != nil {
returnErr = fmt.Errorf("Failed to remove %s: %w", path, err)

View File

@ -173,12 +173,12 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r
// Avoid walking tree if there are no globs
if len(gt.Globs()) > 0 {
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
if err := filepath.Walk(tmpDirSlash, func(path string, info os.FileInfo, walkErr error) error {
if err := filepath.WalkDir(tmpDirSlash, func(path string, d os.DirEntry, walkErr error) error {
if walkErr != nil {
return walkErr
}
if info.IsDir() {
if d.IsDir() {
return nil
}

View File

@ -129,7 +129,7 @@ func (l *LocalStorage) URL(path, name string) (*url.URL, error) {
// IterateObjects iterates across the objects in the local storage
func (l *LocalStorage) IterateObjects(fn func(path string, obj Object) error) error {
return filepath.Walk(l.dir, func(path string, info os.FileInfo, err error) error {
return filepath.WalkDir(l.dir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
@ -141,7 +141,7 @@ func (l *LocalStorage) IterateObjects(fn func(path string, obj Object) error) er
if path == l.dir {
return nil
}
if info.IsDir() {
if d.IsDir() {
return nil
}
relPath, err := filepath.Rel(l.dir, path)

View File

@ -22,7 +22,9 @@ type Branch struct {
// BranchProtection represents a branch protection for a repository
type BranchProtection struct {
// Deprecated: true
BranchName string `json:"branch_name"`
RuleName string `json:"rule_name"`
EnablePush bool `json:"enable_push"`
EnablePushWhitelist bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
@ -52,7 +54,9 @@ type BranchProtection struct {
// CreateBranchProtectionOption options for creating a branch protection
type CreateBranchProtectionOption struct {
// Deprecated: true
BranchName string `json:"branch_name"`
RuleName string `json:"rule_name"`
EnablePush bool `json:"enable_push"`
EnablePushWhitelist bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`

View File

@ -365,6 +365,7 @@ password_pwned_err = Could not complete request to HaveIBeenPwned
[mail]
view_it_on = View it on %s
reply = or reply to this email directly
link_not_working_do_paste = Not working? Try copying and pasting it to your browser.
hi_user_x = Hi <b>%s</b>,
@ -1824,6 +1825,7 @@ settings.mirror_sync_in_progress = Mirror synchronization is in progress. Check
settings.site = Website
settings.update_settings = Update Settings
settings.branches.update_default_branch = Update Default Branch
settings.branches.add_new_rule = Add New Rule
settings.advanced_settings = Advanced Settings
settings.wiki_desc = Enable Repository Wiki
settings.use_internal_wiki = Use Built-In Wiki
@ -2069,6 +2071,8 @@ settings.deploy_key_deletion_desc = Removing a deploy key will revoke its access
settings.deploy_key_deletion_success = The deploy key has been removed.
settings.branches = Branches
settings.protected_branch = Branch Protection
settings.protected_branch.save_rule = Save Rule
settings.protected_branch.delete_rule = Delete Rule
settings.protected_branch_can_push = Allow push?
settings.protected_branch_can_push_yes = You can push
settings.protected_branch_can_push_no = You cannot push
@ -2103,15 +2107,17 @@ settings.dismiss_stale_approvals = Dismiss stale approvals
settings.dismiss_stale_approvals_desc = When new commits that change the content of the pull request are pushed to the branch, old approvals will be dismissed.
settings.require_signed_commits = Require Signed Commits
settings.require_signed_commits_desc = Reject pushes to this branch if they are unsigned or unverifiable.
settings.protect_branch_name_pattern = Protected Branch Name Pattern
settings.protect_protected_file_patterns = Protected file patterns (separated using semicolon '\;'):
settings.protect_protected_file_patterns_desc = Protected files that are not allowed to be changed directly even if user has rights to add, edit, or delete files in this branch. Multiple patterns can be separated using semicolon ('\;'). See <a href="https://pkg.go.dev/github.com/gobwas/glob#Compile">github.com/gobwas/glob</a> documentation for pattern syntax. Examples: <code>.drone.yml</code>, <code>/docs/**/*.txt</code>.
settings.protect_unprotected_file_patterns = Unprotected file patterns (separated using semicolon '\;'):
settings.protect_unprotected_file_patterns_desc = Unprotected files that are allowed to be changed directly if user has write access, bypassing push restriction. Multiple patterns can be separated using semicolon ('\;'). See <a href="https://pkg.go.dev/github.com/gobwas/glob#Compile">github.com/gobwas/glob</a> documentation for pattern syntax. Examples: <code>.drone.yml</code>, <code>/docs/**/*.txt</code>.
settings.add_protected_branch = Enable protection
settings.delete_protected_branch = Disable protection
settings.update_protect_branch_success = Branch protection for branch '%s' has been updated.
settings.remove_protected_branch_success = Branch protection for branch '%s' has been disabled.
settings.protected_branch_deletion = Disable Branch Protection
settings.update_protect_branch_success = Branch protection for rule '%s' has been updated.
settings.remove_protected_branch_success = Branch protection for rule '%s' has been removed.
settings.remove_protected_branch_failed = Removing branch protection rule '%s' failed.
settings.protected_branch_deletion = Delete Branch Protection
settings.protected_branch_deletion_desc = Disabling branch protection allows users with write permission to push to the branch. Continue?
settings.block_rejected_reviews = Block merge on rejected reviews
settings.block_rejected_reviews_desc = Merging will not be possible when changes are requested by official reviewers, even if there are enough approvals.
@ -2124,6 +2130,7 @@ settings.default_merge_style_desc = Default merge style for pull requests:
settings.choose_branch = Choose a branch…
settings.no_protected_branch = There are no protected branches.
settings.edit_protected_branch = Edit
settings.protected_branch_required_rule_name = Required rule name
settings.protected_branch_required_approvals_min = Required approvals cannot be negative.
settings.tags = Tags
settings.tags.protection = Tag Protection
@ -2548,6 +2555,7 @@ dashboard.delete_old_actions = Delete all old actions from database
dashboard.delete_old_actions.started = Delete all old actions from database started.
dashboard.update_checker = Update checker
dashboard.delete_old_system_notices = Delete all old system notices from database
dashboard.gc_lfs = Garbage collect LFS meta objects
users.user_manage_panel = User Account Management
users.new_account = Create User Account

View File

@ -33,6 +33,60 @@ func saveAsPackageBlob(hsr packages_module.HashedSizeReader, pi *packages_servic
contentStore := packages_module.NewContentStore()
uploadVersion, err := getOrCreateUploadVersion(pi)
if err != nil {
return nil, err
}
err = db.WithTx(db.DefaultContext, func(ctx context.Context) error {
pb, exists, err = packages_model.GetOrInsertBlob(ctx, pb)
if err != nil {
log.Error("Error inserting package blob: %v", err)
return err
}
// FIXME: Workaround to be removed in v1.20
// https://github.com/go-gitea/gitea/issues/19586
if exists {
err = contentStore.Has(packages_module.BlobHash256Key(pb.HashSHA256))
if err != nil && (errors.Is(err, util.ErrNotExist) || errors.Is(err, os.ErrNotExist)) {
log.Debug("Package registry inconsistent: blob %s does not exist on file system", pb.HashSHA256)
exists = false
}
}
if !exists {
if err := contentStore.Save(packages_module.BlobHash256Key(pb.HashSHA256), hsr, hsr.Size()); err != nil {
log.Error("Error saving package blob in content store: %v", err)
return err
}
}
return createFileForBlob(ctx, uploadVersion, pb)
})
if err != nil {
if !exists {
if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
log.Error("Error deleting package blob from content store: %v", err)
}
}
return nil, err
}
return pb, nil
}
// mountBlob mounts the specific blob to a different package
func mountBlob(pi *packages_service.PackageInfo, pb *packages_model.PackageBlob) error {
uploadVersion, err := getOrCreateUploadVersion(pi)
if err != nil {
return err
}
return db.WithTx(db.DefaultContext, func(ctx context.Context) error {
return createFileForBlob(ctx, uploadVersion, pb)
})
}
func getOrCreateUploadVersion(pi *packages_service.PackageInfo) (*packages_model.PackageVersion, error) {
var uploadVersion *packages_model.PackageVersion
// FIXME: Replace usage of mutex with database transaction
@ -83,66 +137,35 @@ func saveAsPackageBlob(hsr packages_module.HashedSizeReader, pi *packages_servic
return nil
})
uploadVersionMutex.Unlock()
if err != nil {
return nil, err
return uploadVersion, err
}
func createFileForBlob(ctx context.Context, pv *packages_model.PackageVersion, pb *packages_model.PackageBlob) error {
filename := strings.ToLower(fmt.Sprintf("sha256_%s", pb.HashSHA256))
pf := &packages_model.PackageFile{
VersionID: pv.ID,
BlobID: pb.ID,
Name: filename,
LowerName: filename,
CompositeKey: packages_model.EmptyFileKey,
}
var err error
if pf, err = packages_model.TryInsertFile(ctx, pf); err != nil {
if err == packages_model.ErrDuplicatePackageFile {
return nil
}
log.Error("Error inserting package file: %v", err)
return err
}
err = db.WithTx(db.DefaultContext, func(ctx context.Context) error {
pb, exists, err = packages_model.GetOrInsertBlob(ctx, pb)
if err != nil {
log.Error("Error inserting package blob: %v", err)
return err
}
// FIXME: Workaround to be removed in v1.20
// https://github.com/go-gitea/gitea/issues/19586
if exists {
err = contentStore.Has(packages_module.BlobHash256Key(pb.HashSHA256))
if err != nil && (errors.Is(err, util.ErrNotExist) || errors.Is(err, os.ErrNotExist)) {
log.Debug("Package registry inconsistent: blob %s does not exist on file system", pb.HashSHA256)
exists = false
}
}
if !exists {
if err := contentStore.Save(packages_module.BlobHash256Key(pb.HashSHA256), hsr, hsr.Size()); err != nil {
log.Error("Error saving package blob in content store: %v", err)
return err
}
}
filename := strings.ToLower(fmt.Sprintf("sha256_%s", pb.HashSHA256))
pf := &packages_model.PackageFile{
VersionID: uploadVersion.ID,
BlobID: pb.ID,
Name: filename,
LowerName: filename,
CompositeKey: packages_model.EmptyFileKey,
}
if pf, err = packages_model.TryInsertFile(ctx, pf); err != nil {
if err == packages_model.ErrDuplicatePackageFile {
return nil
}
log.Error("Error inserting package file: %v", err)
return err
}
if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeFile, pf.ID, container_module.PropertyDigest, digestFromPackageBlob(pb)); err != nil {
log.Error("Error setting package file property: %v", err)
return err
}
return nil
})
if err != nil {
if !exists {
if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
log.Error("Error deleting package blob from content store: %v", err)
}
}
return nil, err
if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeFile, pf.ID, container_module.PropertyDigest, digestFromPackageBlob(pb)); err != nil {
log.Error("Error setting package file property: %v", err)
return err
}
return pb, nil
return nil
}
func deleteBlob(ownerID int64, image, digest string) error {

View File

@ -195,10 +195,15 @@ func InitiateUploadBlob(ctx *context.Context) {
from := ctx.FormTrim("from")
if mount != "" {
blob, _ := workaroundGetContainerBlob(ctx, &container_model.BlobSearchOptions{
Image: from,
Digest: mount,
Repository: from,
Digest: mount,
})
if blob != nil {
if err := mountBlob(&packages_service.PackageInfo{Owner: ctx.Package.Owner, Name: image}, blob.Blob); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
setResponseHeaders(ctx.Resp, &containerHeaders{
Location: fmt.Sprintf("/v2/%s/%s/blobs/%s", ctx.Package.Owner.LowerName, image, mount),
ContentDigest: mount,

View File

@ -70,7 +70,7 @@ func GetBranch(ctx *context.APIContext) {
return
}
branchProtection, err := git_model.GetProtectedBranchBy(ctx, ctx.Repo.Repository.ID, branchName)
branchProtection, err := git_model.GetFirstMatchProtectedBranchRule(ctx, ctx.Repo.Repository.ID, branchName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetBranchProtection", err)
return
@ -124,7 +124,7 @@ func DeleteBranch(ctx *context.APIContext) {
ctx.NotFound(err)
case errors.Is(err, repo_service.ErrBranchIsDefault):
ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch"))
case errors.Is(err, repo_service.ErrBranchIsProtected):
case errors.Is(err, git_model.ErrBranchIsProtected):
ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("branch protected"))
default:
ctx.Error(http.StatusInternalServerError, "DeleteBranch", err)
@ -206,7 +206,7 @@ func CreateBranch(ctx *context.APIContext) {
return
}
branchProtection, err := git_model.GetProtectedBranchBy(ctx, ctx.Repo.Repository.ID, branch.Name)
branchProtection, err := git_model.GetFirstMatchProtectedBranchRule(ctx, ctx.Repo.Repository.ID, branch.Name)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetBranchProtection", err)
return
@ -257,6 +257,12 @@ func ListBranches(ctx *context.APIContext) {
listOptions := utils.GetListOptions(ctx)
if !ctx.Repo.Repository.IsEmpty && ctx.Repo.GitRepo != nil {
rules, err := git_model.FindRepoProtectedBranchRules(ctx, ctx.Repo.Repository.ID)
if err != nil {
ctx.Error(http.StatusInternalServerError, "FindMatchedProtectedBranchRules", err)
return
}
skip, _ := listOptions.GetStartEnd()
branches, total, err := ctx.Repo.GitRepo.GetBranches(skip, listOptions.PageSize)
if err != nil {
@ -276,11 +282,8 @@ func ListBranches(ctx *context.APIContext) {
ctx.Error(http.StatusInternalServerError, "GetCommit", err)
return
}
branchProtection, err := git_model.GetProtectedBranchBy(ctx, ctx.Repo.Repository.ID, branches[i].Name)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranchBy", err)
return
}
branchProtection := rules.GetFirstMatched(branches[i].Name)
apiBranch, err := convert.ToBranch(ctx.Repo.Repository, branches[i], c, branchProtection, ctx.Doer, ctx.Repo.IsAdmin())
if err != nil {
ctx.Error(http.StatusInternalServerError, "convert.ToBranch", err)
@ -328,7 +331,7 @@ func GetBranchProtection(ctx *context.APIContext) {
repo := ctx.Repo.Repository
bpName := ctx.Params(":name")
bp, err := git_model.GetProtectedBranchBy(ctx, repo.ID, bpName)
bp, err := git_model.GetProtectedBranchRuleByName(ctx, repo.ID, bpName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranchByID", err)
return
@ -364,7 +367,7 @@ func ListBranchProtections(ctx *context.APIContext) {
// "$ref": "#/responses/BranchProtectionList"
repo := ctx.Repo.Repository
bps, err := git_model.GetProtectedBranches(ctx, repo.ID)
bps, err := git_model.FindRepoProtectedBranchRules(ctx, repo.ID)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranches", err)
return
@ -414,13 +417,18 @@ func CreateBranchProtection(ctx *context.APIContext) {
form := web.GetForm(ctx).(*api.CreateBranchProtectionOption)
repo := ctx.Repo.Repository
// Currently protection must match an actual branch
if !git.IsBranchExist(ctx.Req.Context(), ctx.Repo.Repository.RepoPath(), form.BranchName) {
ctx.NotFound()
return
ruleName := form.RuleName
if ruleName == "" {
ruleName = form.BranchName //nolint
}
protectBranch, err := git_model.GetProtectedBranchBy(ctx, repo.ID, form.BranchName)
isPlainRule := !git_model.IsRuleNameSpecial(ruleName)
var isBranchExist bool
if isPlainRule {
isBranchExist = git.IsBranchExist(ctx.Req.Context(), ctx.Repo.Repository.RepoPath(), ruleName)
}
protectBranch, err := git_model.GetProtectedBranchRuleByName(ctx, repo.ID, ruleName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectBranchOfRepoByName", err)
return
@ -494,7 +502,7 @@ func CreateBranchProtection(ctx *context.APIContext) {
protectBranch = &git_model.ProtectedBranch{
RepoID: ctx.Repo.Repository.ID,
BranchName: form.BranchName,
RuleName: form.RuleName,
CanPush: form.EnablePush,
EnableWhitelist: form.EnablePush && form.EnablePushWhitelist,
EnableMergeWhitelist: form.EnableMergeWhitelist,
@ -525,13 +533,42 @@ func CreateBranchProtection(ctx *context.APIContext) {
return
}
if err = pull_service.CheckPrsForBaseBranch(ctx.Repo.Repository, protectBranch.BranchName); err != nil {
ctx.Error(http.StatusInternalServerError, "CheckPrsForBaseBranch", err)
return
if isBranchExist {
if err = pull_service.CheckPRsForBaseBranch(ctx.Repo.Repository, form.RuleName); err != nil {
ctx.Error(http.StatusInternalServerError, "CheckPRsForBaseBranch", err)
return
}
} else {
if !isPlainRule {
if ctx.Repo.GitRepo == nil {
ctx.Repo.GitRepo, err = git.OpenRepository(ctx, ctx.Repo.Repository.RepoPath())
if err != nil {
ctx.Error(http.StatusInternalServerError, "OpenRepository", err)
return
}
defer func() {
ctx.Repo.GitRepo.Close()
ctx.Repo.GitRepo = nil
}()
}
// FIXME: since we only need to recheck files protected rules, we could improve this
matchedBranches, err := git_model.FindAllMatchedBranches(ctx, ctx.Repo.GitRepo, form.RuleName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "FindAllMatchedBranches", err)
return
}
for _, branchName := range matchedBranches {
if err = pull_service.CheckPRsForBaseBranch(ctx.Repo.Repository, branchName); err != nil {
ctx.Error(http.StatusInternalServerError, "CheckPRsForBaseBranch", err)
return
}
}
}
}
// Reload from db to get all whitelists
bp, err := git_model.GetProtectedBranchBy(ctx, ctx.Repo.Repository.ID, form.BranchName)
bp, err := git_model.GetProtectedBranchRuleByName(ctx, ctx.Repo.Repository.ID, form.RuleName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranchByID", err)
return
@ -583,7 +620,7 @@ func EditBranchProtection(ctx *context.APIContext) {
form := web.GetForm(ctx).(*api.EditBranchProtectionOption)
repo := ctx.Repo.Repository
bpName := ctx.Params(":name")
protectBranch, err := git_model.GetProtectedBranchBy(ctx, repo.ID, bpName)
protectBranch, err := git_model.GetProtectedBranchRuleByName(ctx, repo.ID, bpName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranchByID", err)
return
@ -760,13 +797,49 @@ func EditBranchProtection(ctx *context.APIContext) {
return
}
if err = pull_service.CheckPrsForBaseBranch(ctx.Repo.Repository, protectBranch.BranchName); err != nil {
ctx.Error(http.StatusInternalServerError, "CheckPrsForBaseBranch", err)
return
isPlainRule := !git_model.IsRuleNameSpecial(bpName)
var isBranchExist bool
if isPlainRule {
isBranchExist = git.IsBranchExist(ctx.Req.Context(), ctx.Repo.Repository.RepoPath(), bpName)
}
if isBranchExist {
if err = pull_service.CheckPRsForBaseBranch(ctx.Repo.Repository, bpName); err != nil {
ctx.Error(http.StatusInternalServerError, "CheckPrsForBaseBranch", err)
return
}
} else {
if !isPlainRule {
if ctx.Repo.GitRepo == nil {
ctx.Repo.GitRepo, err = git.OpenRepository(ctx, ctx.Repo.Repository.RepoPath())
if err != nil {
ctx.Error(http.StatusInternalServerError, "OpenRepository", err)
return
}
defer func() {
ctx.Repo.GitRepo.Close()
ctx.Repo.GitRepo = nil
}()
}
// FIXME: since we only need to recheck files protected rules, we could improve this
matchedBranches, err := git_model.FindAllMatchedBranches(ctx, ctx.Repo.GitRepo, protectBranch.RuleName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "FindAllMatchedBranches", err)
return
}
for _, branchName := range matchedBranches {
if err = pull_service.CheckPRsForBaseBranch(ctx.Repo.Repository, branchName); err != nil {
ctx.Error(http.StatusInternalServerError, "CheckPrsForBaseBranch", err)
return
}
}
}
}
// Reload from db to ensure get all whitelists
bp, err := git_model.GetProtectedBranchBy(ctx, repo.ID, bpName)
bp, err := git_model.GetProtectedBranchRuleByName(ctx, repo.ID, bpName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranchBy", err)
return
@ -810,7 +883,7 @@ func DeleteBranchProtection(ctx *context.APIContext) {
repo := ctx.Repo.Repository
bpName := ctx.Params(":name")
bp, err := git_model.GetProtectedBranchBy(ctx, repo.ID, bpName)
bp, err := git_model.GetProtectedBranchRuleByName(ctx, repo.ID, bpName)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetProtectedBranchByID", err)
return

View File

@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/models"
activities_model "code.gitea.io/gitea/models/activities"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
pull_model "code.gitea.io/gitea/models/pull"
@ -902,7 +903,7 @@ func MergePullRequest(ctx *context.APIContext) {
ctx.NotFound(err)
case errors.Is(err, repo_service.ErrBranchIsDefault):
ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch"))
case errors.Is(err, repo_service.ErrBranchIsProtected):
case errors.Is(err, git_model.ErrBranchIsProtected):
ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("branch protected"))
default:
ctx.Error(http.StatusInternalServerError, "DeleteBranch", err)

View File

@ -156,7 +156,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN
return
}
protectBranch, err := git_model.GetProtectedBranchBy(ctx, repo.ID, branchName)
protectBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, branchName)
if err != nil {
log.Error("Unable to get protected branch: %s in %-v Error: %v", branchName, repo, err)
ctx.JSON(http.StatusInternalServerError, private.Response{
@ -166,9 +166,10 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN
}
// Allow pushes to non-protected branches
if protectBranch == nil || !protectBranch.IsProtected() {
if protectBranch == nil {
return
}
protectBranch.Repo = repo
// This ref is a protected branch.
//
@ -238,7 +239,6 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN
Err: fmt.Sprintf("Unable to check file protection for commits from %s to %s: %v", oldCommitID, newCommitID, err),
})
return
}
changedProtectedfiles = true
@ -251,7 +251,15 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN
if ctx.opts.DeployKeyID != 0 {
canPush = !changedProtectedfiles && protectBranch.CanPush && (!protectBranch.EnableWhitelist || protectBranch.WhitelistDeployKeys)
} else {
canPush = !changedProtectedfiles && protectBranch.CanUserPush(ctx, ctx.opts.UserID)
user, err := user_model.GetUserByID(ctx, ctx.opts.UserID)
if err != nil {
log.Error("Unable to GetUserByID for commits from %s to %s in %-v: %v", oldCommitID, newCommitID, repo, err)
ctx.JSON(http.StatusInternalServerError, private.Response{
Err: fmt.Sprintf("Unable to GetUserByID for commits from %s to %s: %v", oldCommitID, newCommitID, err),
})
return
}
canPush = !changedProtectedfiles && protectBranch.CanUserPush(ctx, user)
}
// 6. If we're not allowed to push directly

View File

@ -99,7 +99,7 @@ func DeleteBranchPost(ctx *context.Context) {
case errors.Is(err, repo_service.ErrBranchIsDefault):
log.Debug("DeleteBranch: Can't delete default branch '%s'", branchName)
ctx.Flash.Error(ctx.Tr("repo.branch.default_deletion_failed", branchName))
case errors.Is(err, repo_service.ErrBranchIsProtected):
case errors.Is(err, git_model.ErrBranchIsProtected):
log.Debug("DeleteBranch: Can't delete protected branch '%s'", branchName)
ctx.Flash.Error(ctx.Tr("repo.branch.protected_deletion_failed", branchName))
default:
@ -189,9 +189,9 @@ func loadBranches(ctx *context.Context, skip, limit int) (*Branch, []*Branch, in
return nil, nil, 0
}
protectedBranches, err := git_model.GetProtectedBranches(ctx, ctx.Repo.Repository.ID)
rules, err := git_model.FindRepoProtectedBranchRules(ctx, ctx.Repo.Repository.ID)
if err != nil {
ctx.ServerError("GetProtectedBranches", err)
ctx.ServerError("FindRepoProtectedBranchRules", err)
return nil, nil, 0
}
@ -208,7 +208,7 @@ func loadBranches(ctx *context.Context, skip, limit int) (*Branch, []*Branch, in
continue
}
branch := loadOneBranch(ctx, rawBranches[i], defaultBranch, protectedBranches, repoIDToRepo, repoIDToGitRepo)
branch := loadOneBranch(ctx, rawBranches[i], defaultBranch, &rules, repoIDToRepo, repoIDToGitRepo)
if branch == nil {
return nil, nil, 0
}
@ -220,7 +220,7 @@ func loadBranches(ctx *context.Context, skip, limit int) (*Branch, []*Branch, in
if defaultBranch != nil {
// Always add the default branch
log.Debug("loadOneBranch: load default: '%s'", defaultBranch.Name)
defaultBranchBranch = loadOneBranch(ctx, defaultBranch, defaultBranch, protectedBranches, repoIDToRepo, repoIDToGitRepo)
defaultBranchBranch = loadOneBranch(ctx, defaultBranch, defaultBranch, &rules, repoIDToRepo, repoIDToGitRepo)
branches = append(branches, defaultBranchBranch)
}
@ -236,7 +236,7 @@ func loadBranches(ctx *context.Context, skip, limit int) (*Branch, []*Branch, in
return defaultBranchBranch, branches, totalNumOfBranches
}
func loadOneBranch(ctx *context.Context, rawBranch, defaultBranch *git.Branch, protectedBranches []*git_model.ProtectedBranch,
func loadOneBranch(ctx *context.Context, rawBranch, defaultBranch *git.Branch, protectedBranches *git_model.ProtectedBranchRules,
repoIDToRepo map[int64]*repo_model.Repository,
repoIDToGitRepo map[int64]*git.Repository,
) *Branch {
@ -249,13 +249,8 @@ func loadOneBranch(ctx *context.Context, rawBranch, defaultBranch *git.Branch, p
}
branchName := rawBranch.Name
var isProtected bool
for _, b := range protectedBranches {
if b.BranchName == branchName {
isProtected = true
break
}
}
p := protectedBranches.GetFirstMatched(branchName)
isProtected := p != nil
divergence := &git.DivergeObject{
Ahead: -1,

View File

@ -1604,7 +1604,7 @@ func ViewIssue(ctx *context.Context) {
if perm.CanWrite(unit.TypeCode) {
// Check if branch is not protected
if pull.HeadBranch != pull.HeadRepo.DefaultBranch {
if protected, err := git_model.IsProtectedBranch(ctx, pull.HeadRepo.ID, pull.HeadBranch); err != nil {
if protected, err := git_model.IsBranchProtected(ctx, pull.HeadRepo.ID, pull.HeadBranch); err != nil {
log.Error("IsProtectedBranch: %v", err)
} else if !protected {
canDelete = true
@ -1680,22 +1680,25 @@ func ViewIssue(ctx *context.Context) {
ctx.Data["DefaultSquashMergeMessage"] = defaultSquashMergeMessage
ctx.Data["DefaultSquashMergeBody"] = defaultSquashMergeBody
if err = pull.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
if err != nil {
ctx.ServerError("LoadProtectedBranch", err)
return
}
ctx.Data["ShowMergeInstructions"] = true
if pull.ProtectedBranch != nil {
if pb != nil {
pb.Repo = pull.BaseRepo
var showMergeInstructions bool
if ctx.Doer != nil {
showMergeInstructions = pull.ProtectedBranch.CanUserPush(ctx, ctx.Doer.ID)
showMergeInstructions = pb.CanUserPush(ctx, ctx.Doer)
}
ctx.Data["IsBlockedByApprovals"] = !issues_model.HasEnoughApprovals(ctx, pull.ProtectedBranch, pull)
ctx.Data["IsBlockedByRejection"] = issues_model.MergeBlockedByRejectedReview(ctx, pull.ProtectedBranch, pull)
ctx.Data["IsBlockedByOfficialReviewRequests"] = issues_model.MergeBlockedByOfficialReviewRequests(ctx, pull.ProtectedBranch, pull)
ctx.Data["IsBlockedByOutdatedBranch"] = issues_model.MergeBlockedByOutdatedBranch(pull.ProtectedBranch, pull)
ctx.Data["GrantedApprovals"] = issues_model.GetGrantedApprovalsCount(ctx, pull.ProtectedBranch, pull)
ctx.Data["RequireSigned"] = pull.ProtectedBranch.RequireSignedCommits
ctx.Data["ProtectedBranch"] = pb
ctx.Data["IsBlockedByApprovals"] = !issues_model.HasEnoughApprovals(ctx, pb, pull)
ctx.Data["IsBlockedByRejection"] = issues_model.MergeBlockedByRejectedReview(ctx, pb, pull)
ctx.Data["IsBlockedByOfficialReviewRequests"] = issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pull)
ctx.Data["IsBlockedByOutdatedBranch"] = issues_model.MergeBlockedByOutdatedBranch(pb, pull)
ctx.Data["GrantedApprovals"] = issues_model.GetGrantedApprovalsCount(ctx, pb, pull)
ctx.Data["RequireSigned"] = pb.RequireSignedCommits
ctx.Data["ChangedProtectedFiles"] = pull.ChangedProtectedFiles
ctx.Data["IsBlockedByChangedProtectedFiles"] = len(pull.ChangedProtectedFiles) != 0
ctx.Data["ChangedProtectedFilesNum"] = len(pull.ChangedProtectedFiles)

View File

@ -440,11 +440,12 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
setMergeTarget(ctx, pull)
if err := pull.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pull.BaseBranch)
if err != nil {
ctx.ServerError("LoadProtectedBranch", err)
return nil
}
ctx.Data["EnableStatusCheck"] = pull.ProtectedBranch != nil && pull.ProtectedBranch.EnableStatusCheck
ctx.Data["EnableStatusCheck"] = pb != nil && pb.EnableStatusCheck
var baseGitRepo *git.Repository
if pull.BaseRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
@ -570,16 +571,16 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
}
if pull.ProtectedBranch != nil && pull.ProtectedBranch.EnableStatusCheck {
if pb != nil && pb.EnableStatusCheck {
ctx.Data["is_context_required"] = func(context string) bool {
for _, c := range pull.ProtectedBranch.StatusCheckContexts {
for _, c := range pb.StatusCheckContexts {
if c == context {
return true
}
}
return false
}
ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pull.ProtectedBranch.StatusCheckContexts)
ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts)
}
ctx.Data["HeadBranchMovedOn"] = headBranchSha != sha
@ -752,16 +753,17 @@ func ViewPullFiles(ctx *context.Context) {
return
}
if err = pull.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
if err != nil {
ctx.ServerError("LoadProtectedBranch", err)
return
}
if pull.ProtectedBranch != nil {
glob := pull.ProtectedBranch.GetProtectedFilePatterns()
if pb != nil {
glob := pb.GetProtectedFilePatterns()
if len(glob) != 0 {
for _, file := range diff.Files {
file.IsProtected = pull.ProtectedBranch.IsProtectedFile(glob, file.Name)
file.IsProtected = pb.IsProtectedFile(glob, file.Name)
}
}
}
@ -1400,7 +1402,7 @@ func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *g
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
case errors.Is(err, repo_service.ErrBranchIsDefault):
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
case errors.Is(err, repo_service.ErrBranchIsProtected):
case errors.Is(err, git_model.ErrBranchIsProtected):
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
default:
log.Error("DeleteBranch: %v", err)

View File

@ -56,7 +56,6 @@ const (
tplGithooks base.TplName = "repo/settings/githooks"
tplGithookEdit base.TplName = "repo/settings/githook_edit"
tplDeployKeys base.TplName = "repo/settings/deploy_keys"
tplProtectedBranch base.TplName = "repo/settings/protected_branch"
)
// SettingsCtxData is a middleware that sets all the general context data for the

View File

@ -19,47 +19,33 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/services/forms"
pull_service "code.gitea.io/gitea/services/pull"
"code.gitea.io/gitea/services/repository"
)
// ProtectedBranch render the page to protect the repository
func ProtectedBranch(ctx *context.Context) {
const (
tplProtectedBranch base.TplName = "repo/settings/protected_branch"
)
// ProtectedBranchRules render the page to protect the repository
func ProtectedBranchRules(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings")
ctx.Data["PageIsSettingsBranches"] = true
protectedBranches, err := git_model.GetProtectedBranches(ctx, ctx.Repo.Repository.ID)
rules, err := git_model.FindRepoProtectedBranchRules(ctx, ctx.Repo.Repository.ID)
if err != nil {
ctx.ServerError("GetProtectedBranches", err)
return
}
ctx.Data["ProtectedBranches"] = protectedBranches
branches := ctx.Data["Branches"].([]string)
leftBranches := make([]string, 0, len(branches)-len(protectedBranches))
for _, b := range branches {
var protected bool
for _, pb := range protectedBranches {
if b == pb.BranchName {
protected = true
break
}
}
if !protected {
leftBranches = append(leftBranches, b)
}
}
ctx.Data["LeftBranches"] = leftBranches
ctx.Data["ProtectedBranches"] = rules
ctx.HTML(http.StatusOK, tplBranches)
}
// ProtectedBranchPost response for protect for a branch of a repository
func ProtectedBranchPost(ctx *context.Context) {
// SetDefaultBranchPost set default branch
func SetDefaultBranchPost(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings")
ctx.Data["PageIsSettingsBranches"] = true
@ -101,41 +87,36 @@ func ProtectedBranchPost(ctx *context.Context) {
// SettingsProtectedBranch renders the protected branch setting page
func SettingsProtectedBranch(c *context.Context) {
branch := c.Params("*")
if !c.Repo.GitRepo.IsBranchExist(branch) {
c.NotFound("IsBranchExist", nil)
return
}
c.Data["Title"] = c.Tr("repo.settings.protected_branch") + " - " + branch
c.Data["PageIsSettingsBranches"] = true
protectBranch, err := git_model.GetProtectedBranchBy(c, c.Repo.Repository.ID, branch)
if err != nil {
if !git.IsErrBranchNotExist(err) {
ruleName := c.FormString("rule_name")
var rule *git_model.ProtectedBranch
if ruleName != "" {
var err error
rule, err = git_model.GetProtectedBranchRuleByName(c, c.Repo.Repository.ID, ruleName)
if err != nil {
c.ServerError("GetProtectBranchOfRepoByName", err)
return
}
}
if protectBranch == nil {
if rule == nil {
// No options found, create defaults.
protectBranch = &git_model.ProtectedBranch{
BranchName: branch,
}
rule = &git_model.ProtectedBranch{}
}
c.Data["PageIsSettingsBranches"] = true
c.Data["Title"] = c.Tr("repo.settings.protected_branch") + " - " + rule.RuleName
users, err := access_model.GetRepoReaders(c.Repo.Repository)
if err != nil {
c.ServerError("Repo.Repository.GetReaders", err)
return
}
c.Data["Users"] = users
c.Data["whitelist_users"] = strings.Join(base.Int64sToStrings(protectBranch.WhitelistUserIDs), ",")
c.Data["merge_whitelist_users"] = strings.Join(base.Int64sToStrings(protectBranch.MergeWhitelistUserIDs), ",")
c.Data["approvals_whitelist_users"] = strings.Join(base.Int64sToStrings(protectBranch.ApprovalsWhitelistUserIDs), ",")
c.Data["whitelist_users"] = strings.Join(base.Int64sToStrings(rule.WhitelistUserIDs), ",")
c.Data["merge_whitelist_users"] = strings.Join(base.Int64sToStrings(rule.MergeWhitelistUserIDs), ",")
c.Data["approvals_whitelist_users"] = strings.Join(base.Int64sToStrings(rule.ApprovalsWhitelistUserIDs), ",")
contexts, _ := git_model.FindRepoRecentCommitStatusContexts(c, c.Repo.Repository.ID, 7*24*time.Hour) // Find last week status check contexts
for _, ctx := range protectBranch.StatusCheckContexts {
for _, ctx := range rule.StatusCheckContexts {
var found bool
for i := range contexts {
if contexts[i] == ctx {
@ -150,7 +131,7 @@ func SettingsProtectedBranch(c *context.Context) {
c.Data["branch_status_check_contexts"] = contexts
c.Data["is_context_required"] = func(context string) bool {
for _, c := range protectBranch.StatusCheckContexts {
for _, c := range rule.StatusCheckContexts {
if c == context {
return true
}
@ -165,130 +146,173 @@ func SettingsProtectedBranch(c *context.Context) {
return
}
c.Data["Teams"] = teams
c.Data["whitelist_teams"] = strings.Join(base.Int64sToStrings(protectBranch.WhitelistTeamIDs), ",")
c.Data["merge_whitelist_teams"] = strings.Join(base.Int64sToStrings(protectBranch.MergeWhitelistTeamIDs), ",")
c.Data["approvals_whitelist_teams"] = strings.Join(base.Int64sToStrings(protectBranch.ApprovalsWhitelistTeamIDs), ",")
c.Data["whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.WhitelistTeamIDs), ",")
c.Data["merge_whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.MergeWhitelistTeamIDs), ",")
c.Data["approvals_whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.ApprovalsWhitelistTeamIDs), ",")
}
c.Data["Branch"] = protectBranch
c.Data["Rule"] = rule
c.HTML(http.StatusOK, tplProtectedBranch)
}
// SettingsProtectedBranchPost updates the protected branch settings
func SettingsProtectedBranchPost(ctx *context.Context) {
f := web.GetForm(ctx).(*forms.ProtectBranchForm)
branch := ctx.Params("*")
if !ctx.Repo.GitRepo.IsBranchExist(branch) {
ctx.NotFound("IsBranchExist", nil)
var protectBranch *git_model.ProtectedBranch
if f.RuleName == "" {
ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_rule_name"))
ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit", ctx.Repo.RepoLink))
return
}
protectBranch, err := git_model.GetProtectedBranchBy(ctx, ctx.Repo.Repository.ID, branch)
var err error
protectBranch, err = git_model.GetProtectedBranchRuleByName(ctx, ctx.Repo.Repository.ID, f.RuleName)
if err != nil {
if !git.IsErrBranchNotExist(err) {
ctx.ServerError("GetProtectBranchOfRepoByName", err)
return
ctx.ServerError("GetProtectBranchOfRepoByName", err)
return
}
if protectBranch == nil {
// No options found, create defaults.
protectBranch = &git_model.ProtectedBranch{
RepoID: ctx.Repo.Repository.ID,
RuleName: f.RuleName,
}
}
if f.Protected {
if protectBranch == nil {
// No options found, create defaults.
protectBranch = &git_model.ProtectedBranch{
RepoID: ctx.Repo.Repository.ID,
BranchName: branch,
}
}
if f.RequiredApprovals < 0 {
ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_approvals_min"))
ctx.Redirect(fmt.Sprintf("%s/settings/branches/%s", ctx.Repo.RepoLink, util.PathEscapeSegments(branch)))
}
var whitelistUsers, whitelistTeams, mergeWhitelistUsers, mergeWhitelistTeams, approvalsWhitelistUsers, approvalsWhitelistTeams []int64
protectBranch.RuleName = f.RuleName
if f.RequiredApprovals < 0 {
ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_approvals_min"))
ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, f.RuleName))
return
}
var whitelistUsers, whitelistTeams, mergeWhitelistUsers, mergeWhitelistTeams, approvalsWhitelistUsers, approvalsWhitelistTeams []int64
switch f.EnablePush {
case "all":
protectBranch.CanPush = true
protectBranch.EnableWhitelist = false
protectBranch.WhitelistDeployKeys = false
case "whitelist":
protectBranch.CanPush = true
protectBranch.EnableWhitelist = true
protectBranch.WhitelistDeployKeys = f.WhitelistDeployKeys
if strings.TrimSpace(f.WhitelistUsers) != "" {
whitelistUsers, _ = base.StringsToInt64s(strings.Split(f.WhitelistUsers, ","))
}
if strings.TrimSpace(f.WhitelistTeams) != "" {
whitelistTeams, _ = base.StringsToInt64s(strings.Split(f.WhitelistTeams, ","))
}
default:
protectBranch.CanPush = false
protectBranch.EnableWhitelist = false
protectBranch.WhitelistDeployKeys = false
switch f.EnablePush {
case "all":
protectBranch.CanPush = true
protectBranch.EnableWhitelist = false
protectBranch.WhitelistDeployKeys = false
case "whitelist":
protectBranch.CanPush = true
protectBranch.EnableWhitelist = true
protectBranch.WhitelistDeployKeys = f.WhitelistDeployKeys
if strings.TrimSpace(f.WhitelistUsers) != "" {
whitelistUsers, _ = base.StringsToInt64s(strings.Split(f.WhitelistUsers, ","))
}
if strings.TrimSpace(f.WhitelistTeams) != "" {
whitelistTeams, _ = base.StringsToInt64s(strings.Split(f.WhitelistTeams, ","))
}
default:
protectBranch.CanPush = false
protectBranch.EnableWhitelist = false
protectBranch.WhitelistDeployKeys = false
}
protectBranch.EnableMergeWhitelist = f.EnableMergeWhitelist
if f.EnableMergeWhitelist {
if strings.TrimSpace(f.MergeWhitelistUsers) != "" {
mergeWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistUsers, ","))
}
if strings.TrimSpace(f.MergeWhitelistTeams) != "" {
mergeWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistTeams, ","))
}
protectBranch.EnableMergeWhitelist = f.EnableMergeWhitelist
if f.EnableMergeWhitelist {
if strings.TrimSpace(f.MergeWhitelistUsers) != "" {
mergeWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistUsers, ","))
}
if strings.TrimSpace(f.MergeWhitelistTeams) != "" {
mergeWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistTeams, ","))
}
}
protectBranch.EnableStatusCheck = f.EnableStatusCheck
if f.EnableStatusCheck {
protectBranch.StatusCheckContexts = f.StatusCheckContexts
} else {
protectBranch.StatusCheckContexts = nil
}
protectBranch.RequiredApprovals = f.RequiredApprovals
protectBranch.EnableApprovalsWhitelist = f.EnableApprovalsWhitelist
if f.EnableApprovalsWhitelist {
if strings.TrimSpace(f.ApprovalsWhitelistUsers) != "" {
approvalsWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistUsers, ","))
}
if strings.TrimSpace(f.ApprovalsWhitelistTeams) != "" {
approvalsWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistTeams, ","))
}
}
protectBranch.BlockOnRejectedReviews = f.BlockOnRejectedReviews
protectBranch.BlockOnOfficialReviewRequests = f.BlockOnOfficialReviewRequests
protectBranch.DismissStaleApprovals = f.DismissStaleApprovals
protectBranch.RequireSignedCommits = f.RequireSignedCommits
protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns
protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns
protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch
err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
UserIDs: whitelistUsers,
TeamIDs: whitelistTeams,
MergeUserIDs: mergeWhitelistUsers,
MergeTeamIDs: mergeWhitelistTeams,
ApprovalsUserIDs: approvalsWhitelistUsers,
ApprovalsTeamIDs: approvalsWhitelistTeams,
})
if err != nil {
ctx.ServerError("UpdateProtectBranch", err)
return
}
if err = pull_service.CheckPrsForBaseBranch(ctx.Repo.Repository, protectBranch.BranchName); err != nil {
ctx.ServerError("CheckPrsForBaseBranch", err)
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.update_protect_branch_success", branch))
ctx.Redirect(fmt.Sprintf("%s/settings/branches/%s", ctx.Repo.RepoLink, util.PathEscapeSegments(branch)))
protectBranch.EnableStatusCheck = f.EnableStatusCheck
if f.EnableStatusCheck {
protectBranch.StatusCheckContexts = f.StatusCheckContexts
} else {
if protectBranch != nil {
if err := git_model.DeleteProtectedBranch(ctx, ctx.Repo.Repository.ID, protectBranch.ID); err != nil {
ctx.ServerError("DeleteProtectedBranch", err)
return
}
}
ctx.Flash.Success(ctx.Tr("repo.settings.remove_protected_branch_success", branch))
ctx.Redirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
protectBranch.StatusCheckContexts = nil
}
protectBranch.RequiredApprovals = f.RequiredApprovals
protectBranch.EnableApprovalsWhitelist = f.EnableApprovalsWhitelist
if f.EnableApprovalsWhitelist {
if strings.TrimSpace(f.ApprovalsWhitelistUsers) != "" {
approvalsWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistUsers, ","))
}
if strings.TrimSpace(f.ApprovalsWhitelistTeams) != "" {
approvalsWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistTeams, ","))
}
}
protectBranch.BlockOnRejectedReviews = f.BlockOnRejectedReviews
protectBranch.BlockOnOfficialReviewRequests = f.BlockOnOfficialReviewRequests
protectBranch.DismissStaleApprovals = f.DismissStaleApprovals
protectBranch.RequireSignedCommits = f.RequireSignedCommits
protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns
protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns
protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch
err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
UserIDs: whitelistUsers,
TeamIDs: whitelistTeams,
MergeUserIDs: mergeWhitelistUsers,
MergeTeamIDs: mergeWhitelistTeams,
ApprovalsUserIDs: approvalsWhitelistUsers,
ApprovalsTeamIDs: approvalsWhitelistTeams,
})
if err != nil {
ctx.ServerError("UpdateProtectBranch", err)
return
}
// FIXME: since we only need to recheck files protected rules, we could improve this
matchedBranches, err := git_model.FindAllMatchedBranches(ctx, ctx.Repo.GitRepo, protectBranch.RuleName)
if err != nil {
ctx.ServerError("FindAllMatchedBranches", err)
return
}
for _, branchName := range matchedBranches {
if err = pull_service.CheckPRsForBaseBranch(ctx.Repo.Repository, branchName); err != nil {
ctx.ServerError("CheckPRsForBaseBranch", err)
return
}
}
ctx.Flash.Success(ctx.Tr("repo.settings.update_protect_branch_success", protectBranch.RuleName))
ctx.Redirect(fmt.Sprintf("%s/settings/branches?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
}
// DeleteProtectedBranchRulePost delete protected branch rule by id
func DeleteProtectedBranchRulePost(ctx *context.Context) {
ruleID := ctx.ParamsInt64("id")
if ruleID <= 0 {
ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
ctx.JSON(http.StatusOK, map[string]interface{}{
"redirect": fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink),
})
return
}
rule, err := git_model.GetProtectedBranchRuleByID(ctx, ctx.Repo.Repository.ID, ruleID)
if err != nil {
ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
ctx.JSON(http.StatusOK, map[string]interface{}{
"redirect": fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink),
})
return
}
if rule == nil {
ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
ctx.JSON(http.StatusOK, map[string]interface{}{
"redirect": fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink),
})
return
}
if err := git_model.DeleteProtectedBranch(ctx, ctx.Repo.Repository.ID, ruleID); err != nil {
ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", rule.RuleName))
ctx.JSON(http.StatusOK, map[string]interface{}{
"redirect": fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink),
})
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.remove_protected_branch_success", rule.RuleName))
ctx.JSON(http.StatusOK, map[string]interface{}{
"redirect": fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink),
})
}
// RenameBranchPost responses for rename a branch

View File

@ -280,17 +280,17 @@ func Repos(ctx *context.Context) {
repos := map[string]*repo_model.Repository{}
// We're going to iterate by pagesize.
root := user_model.UserPath(ctxUser.Name)
if err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
if err != nil {
if os.IsNotExist(err) {
return nil
}
return err
}
if !info.IsDir() || path == root {
if !d.IsDir() || path == root {
return nil
}
name := info.Name()
name := d.Name()
if !strings.HasSuffix(name, ".git") {
return filepath.SkipDir
}
@ -304,7 +304,7 @@ func Repos(ctx *context.Context) {
count++
return filepath.SkipDir
}); err != nil {
ctx.ServerError("filepath.Walk", err)
ctx.ServerError("filepath.WalkDir", err)
return
}

View File

@ -861,10 +861,16 @@ func RegisterRoutes(m *web.Route) {
})
m.Group("/branches", func() {
m.Combo("").Get(repo.ProtectedBranch).Post(repo.ProtectedBranchPost)
m.Combo("/*").Get(repo.SettingsProtectedBranch).
Post(web.Bind(forms.ProtectBranchForm{}), context.RepoMustNotBeArchived(), repo.SettingsProtectedBranchPost)
m.Post("/", repo.SetDefaultBranchPost)
}, repo.MustBeNotEmpty)
m.Group("/branches", func() {
m.Get("/", repo.ProtectedBranchRules)
m.Combo("/edit").Get(repo.SettingsProtectedBranch).
Post(web.Bind(forms.ProtectBranchForm{}), context.RepoMustNotBeArchived(), repo.SettingsProtectedBranchPost)
m.Post("/{id}/delete", repo.DeleteProtectedBranchRulePost)
}, repo.MustBeNotEmpty)
m.Post("/rename_branch", web.Bind(forms.RenameBranchForm{}), context.RepoMustNotBeArchived(), repo.RenameBranchPost)
m.Group("/tags", func() {

View File

@ -310,7 +310,7 @@ Loop:
return false, "", nil, &ErrWontSign{twofa}
}
case approved:
protectedBranch, err := git_model.GetProtectedBranchBy(ctx, repo.ID, pr.BaseBranch)
protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pr.BaseBranch)
if err != nil {
return false, "", nil, err
}

View File

@ -79,7 +79,7 @@ func ToBranch(repo *repo_model.Repository, b *git.Branch, c *git.Commit, bp *git
}
if isRepoAdmin {
branch.EffectiveBranchProtectionName = bp.BranchName
branch.EffectiveBranchProtectionName = bp.RuleName
}
if user != nil {
@ -87,7 +87,8 @@ func ToBranch(repo *repo_model.Repository, b *git.Branch, c *git.Commit, bp *git
if err != nil {
return nil, err
}
branch.UserCanPush = bp.CanUserPush(db.DefaultContext, user.ID)
bp.Repo = repo
branch.UserCanPush = bp.CanUserPush(db.DefaultContext, user)
branch.UserCanMerge = git_model.IsUserMergeWhitelisted(db.DefaultContext, bp, user.ID, permission)
}
@ -121,8 +122,14 @@ func ToBranchProtection(bp *git_model.ProtectedBranch) *api.BranchProtection {
log.Error("GetTeamNamesByID (ApprovalsWhitelistTeamIDs): %v", err)
}
branchName := ""
if !git_model.IsRuleNameSpecial(bp.RuleName) {
branchName = bp.RuleName
}
return &api.BranchProtection{
BranchName: bp.BranchName,
BranchName: branchName,
RuleName: bp.RuleName,
EnablePush: bp.CanPush,
EnablePushWhitelist: bp.EnableWhitelist,
PushWhitelistUsernames: pushWhitelistUsernames,

View File

@ -175,6 +175,48 @@ func registerDeleteOldSystemNotices() {
})
}
func registerGCLFS() {
if !setting.LFS.StartServer {
return
}
type GCLFSConfig struct {
OlderThanConfig
LastUpdatedMoreThanAgo time.Duration
NumberToCheckPerRepo int64
ProportionToCheckPerRepo float64
}
RegisterTaskFatal("gc_lfs", &GCLFSConfig{
OlderThanConfig: OlderThanConfig{
BaseConfig: BaseConfig{
Enabled: false,
RunAtStart: false,
Schedule: "@every 24h",
},
// Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
// and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
// an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
// changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
// objects.
//
// It is likely that a week is potentially excessive but it should definitely be enough that any
// unassociated LFS object is genuinely unassociated.
OlderThan: 24 * time.Hour * 7,
},
// Only GC things that haven't been looked at in the past 3 days
LastUpdatedMoreThanAgo: 24 * time.Hour * 3,
NumberToCheckPerRepo: 100,
ProportionToCheckPerRepo: 0.6,
}, func(ctx context.Context, _ *user_model.User, config Config) error {
gcLFSConfig := config.(*GCLFSConfig)
return repo_service.GarbageCollectLFSMetaObjects(ctx, repo_service.GarbageCollectLFSMetaObjectsOptions{
AutoFix: true,
OlderThan: time.Now().Add(-gcLFSConfig.OlderThan),
UpdatedLessRecentlyThan: time.Now().Add(-gcLFSConfig.LastUpdatedMoreThanAgo),
})
})
}
func initExtendedTasks() {
registerDeleteInactiveUsers()
registerDeleteRepositoryArchives()
@ -188,4 +230,5 @@ func initExtendedTasks() {
registerDeleteOldActions()
registerUpdateGiteaChecker()
registerDeleteOldSystemNotices()
registerGCLFS()
}

View File

@ -186,7 +186,7 @@ func (f *RepoSettingForm) Validate(req *http.Request, errs binding.Errors) bindi
// ProtectBranchForm form for changing protected branch settings
type ProtectBranchForm struct {
Protected bool
RuleName string `binding:"Required"`
EnablePush string
WhitelistUsers string
WhitelistTeams string

View File

@ -274,6 +274,7 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipient
"ActionName": actName,
"ReviewComments": reviewComments,
"Language": locale.Language(),
"CanReply": setting.IncomingEmail.Enabled && commentType != issues_model.CommentTypePullRequestPush,
// helper
"locale": locale,
"Str2html": templates.Str2html,

View File

@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
@ -126,11 +127,12 @@ func CheckPullMergable(stdCtx context.Context, doer *user_model.User, perm *acce
// isSignedIfRequired check if merge will be signed if required
func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) (bool, error) {
if err := pr.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return false, err
}
if pr.ProtectedBranch == nil || !pr.ProtectedBranch.RequireSignedCommits {
if pb == nil || !pb.RequireSignedCommits {
return true, nil
}
@ -348,8 +350,8 @@ func testPR(id int64) {
checkAndUpdateStatus(ctx, pr)
}
// CheckPrsForBaseBranch check all pulls with bseBrannch
func CheckPrsForBaseBranch(baseRepo *repo_model.Repository, baseBranchName string) error {
// CheckPRsForBaseBranch check all pulls with baseBrannch
func CheckPRsForBaseBranch(baseRepo *repo_model.Repository, baseBranchName string) error {
prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(baseRepo.ID, baseBranchName)
if err != nil {
return err

View File

@ -83,10 +83,11 @@ func IsCommitStatusContextSuccess(commitStatuses []*git_model.CommitStatus, requ
// IsPullCommitStatusPass returns if all required status checks PASS
func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) (bool, error) {
if err := pr.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return false, errors.Wrap(err, "GetLatestCommitStatus")
}
if pr.ProtectedBranch == nil || !pr.ProtectedBranch.EnableStatusCheck {
if pb == nil || !pb.EnableStatusCheck {
return true, nil
}
@ -137,12 +138,13 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR
return "", errors.Wrap(err, "GetLatestCommitStatus")
}
if err := pr.LoadProtectedBranch(ctx); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return "", errors.Wrap(err, "LoadProtectedBranch")
}
var requiredContexts []string
if pr.ProtectedBranch != nil {
requiredContexts = pr.ProtectedBranch.StatusCheckContexts
if pb != nil {
requiredContexts = pb.StatusCheckContexts
}
return MergeRequiredContextsCommitStatus(commitStatuses, requiredContexts), nil

View File

@ -760,12 +760,12 @@ func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p a
return false, nil
}
err := pr.LoadProtectedBranch(ctx)
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return false, err
}
if (p.CanWrite(unit.TypeCode) && pr.ProtectedBranch == nil) || (pr.ProtectedBranch != nil && git_model.IsUserMergeWhitelisted(ctx, pr.ProtectedBranch, user.ID, p)) {
if (p.CanWrite(unit.TypeCode) && pb == nil) || (pb != nil && git_model.IsUserMergeWhitelisted(ctx, pb, user.ID, p)) {
return true, nil
}
@ -778,10 +778,11 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques
return fmt.Errorf("LoadBaseRepo: %w", err)
}
if err = pr.LoadProtectedBranch(ctx); err != nil {
return fmt.Errorf("LoadProtectedBranch: %w", err)
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return fmt.Errorf("LoadProtectedBranch: %v", err)
}
if pr.ProtectedBranch == nil {
if pb == nil {
return nil
}
@ -795,23 +796,23 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques
}
}
if !issues_model.HasEnoughApprovals(ctx, pr.ProtectedBranch, pr) {
if !issues_model.HasEnoughApprovals(ctx, pb, pr) {
return models.ErrDisallowedToMerge{
Reason: "Does not have enough approvals",
}
}
if issues_model.MergeBlockedByRejectedReview(ctx, pr.ProtectedBranch, pr) {
if issues_model.MergeBlockedByRejectedReview(ctx, pb, pr) {
return models.ErrDisallowedToMerge{
Reason: "There are requested changes",
}
}
if issues_model.MergeBlockedByOfficialReviewRequests(ctx, pr.ProtectedBranch, pr) {
if issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pr) {
return models.ErrDisallowedToMerge{
Reason: "There are official review requests",
}
}
if issues_model.MergeBlockedByOutdatedBranch(pr.ProtectedBranch, pr) {
if issues_model.MergeBlockedByOutdatedBranch(pb, pr) {
return models.ErrDisallowedToMerge{
Reason: "The head branch is behind the base branch",
}
@ -821,7 +822,7 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques
return nil
}
if pr.ProtectedBranch.MergeBlockedByProtectedFiles(pr.ChangedProtectedFiles) {
if pb.MergeBlockedByProtectedFiles(pr.ChangedProtectedFiles) {
return models.ErrDisallowedToMerge{
Reason: "Changed protected files",
}
@ -836,6 +837,9 @@ func MergedManually(pr *issues_model.PullRequest, doer *user_model.User, baseGit
defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
if err := db.WithTx(db.DefaultContext, func(ctx context.Context) error {
if err := pr.LoadBaseRepo(ctx); err != nil {
return err
}
prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
if err != nil {
return err

View File

@ -14,7 +14,7 @@ import (
"strings"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/container"
@ -106,8 +106,8 @@ func TestPatch(pr *issues_model.PullRequest) error {
}
// 3. Check for protected files changes
if err = checkPullFilesProtection(pr, gitRepo); err != nil {
return fmt.Errorf("pr.CheckPullFilesProtection(): %w", err)
if err = checkPullFilesProtection(ctx, pr, gitRepo); err != nil {
return fmt.Errorf("pr.CheckPullFilesProtection(): %v", err)
}
if len(pr.ChangedProtectedFiles) > 0 {
@ -544,23 +544,23 @@ func CheckUnprotectedFiles(repo *git.Repository, oldCommitID, newCommitID string
}
// checkPullFilesProtection check if pr changed protected files and save results
func checkPullFilesProtection(pr *issues_model.PullRequest, gitRepo *git.Repository) error {
func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) error {
if pr.Status == issues_model.PullRequestStatusEmpty {
pr.ChangedProtectedFiles = nil
return nil
}
if err := pr.LoadProtectedBranch(db.DefaultContext); err != nil {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return err
}
if pr.ProtectedBranch == nil {
if pb == nil {
pr.ChangedProtectedFiles = nil
return nil
}
var err error
pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.MergeBase, "tracking", pr.ProtectedBranch.GetProtectedFilePatterns(), 10, os.Environ())
pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.MergeBase, "tracking", pb.GetProtectedFilePatterns(), 10, os.Environ())
if err != nil && !models.IsErrFilePathProtected(err) {
return err
}

View File

@ -8,6 +8,7 @@ import (
"fmt"
"code.gitea.io/gitea/models"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
@ -92,20 +93,29 @@ func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest,
return false, false, err
}
if err := pull.LoadBaseRepo(ctx); err != nil {
return false, false, err
}
pr := &issues_model.PullRequest{
HeadRepoID: pull.BaseRepoID,
HeadRepo: pull.BaseRepo,
BaseRepoID: pull.HeadRepoID,
BaseRepo: pull.HeadRepo,
HeadBranch: pull.BaseBranch,
BaseBranch: pull.HeadBranch,
}
err = pr.LoadProtectedBranch(ctx)
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
if err != nil {
return false, false, err
}
// can't do rebase on protected branch because need force push
if pr.ProtectedBranch == nil {
if pb == nil {
if err := pr.LoadBaseRepo(ctx); err != nil {
return false, false, err
}
prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
if err != nil {
log.Error("pr.BaseRepo.GetUnit(unit.TypePullRequests): %v", err)
@ -115,8 +125,11 @@ func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest,
}
// Update function need push permission
if pr.ProtectedBranch != nil && !pr.ProtectedBranch.CanUserPush(ctx, user.ID) {
return false, false, nil
if pb != nil {
pb.Repo = pull.BaseRepo
if !pb.CanUserPush(ctx, user) {
return false, false, nil
}
}
baseRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.BaseRepo, user)

View File

@ -303,14 +303,16 @@ func ListUnadoptedRepositories(query string, opts *db.ListOptions) ([]string, in
// We're going to iterate by pagesize.
root := filepath.Clean(setting.RepoRootPath)
if err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if !info.IsDir() || path == root {
if !d.IsDir() || path == root {
return nil
}
name := d.Name()
if !strings.ContainsRune(path[len(root)+1:], filepath.Separator) {
// Got a new user
if err = checkUnadoptedRepositories(userName, repoNamesToCheck, unadopted); err != nil {
@ -318,16 +320,14 @@ func ListUnadoptedRepositories(query string, opts *db.ListOptions) ([]string, in
}
repoNamesToCheck = repoNamesToCheck[:0]
if !globUser.Match(info.Name()) {
if !globUser.Match(name) {
return filepath.SkipDir
}
userName = info.Name()
userName = name
return nil
}
name := info.Name()
if !strings.HasSuffix(name, ".git") {
return filepath.SkipDir
}

View File

@ -149,8 +149,7 @@ func RenameBranch(repo *repo_model.Repository, doer *user_model.User, gitRepo *g
// enmuerates all branch related errors
var (
ErrBranchIsDefault = errors.New("branch is default")
ErrBranchIsProtected = errors.New("branch is protected")
ErrBranchIsDefault = errors.New("branch is default")
)
// DeleteBranch delete branch
@ -159,13 +158,12 @@ func DeleteBranch(doer *user_model.User, repo *repo_model.Repository, gitRepo *g
return ErrBranchIsDefault
}
isProtected, err := git_model.IsProtectedBranch(db.DefaultContext, repo.ID, branchName)
isProtected, err := git_model.IsBranchProtected(db.DefaultContext, repo.ID, branchName)
if err != nil {
return err
}
if isProtected {
return ErrBranchIsProtected
return git_model.ErrBranchIsProtected
}
commit, err := gitRepo.GetBranchCommit(branchName)

View File

@ -66,13 +66,16 @@ func (opts *ApplyDiffPatchOptions) Validate(ctx context.Context, repo *repo_mode
return err
}
} else {
protectedBranch, err := git_model.GetProtectedBranchBy(ctx, repo.ID, opts.OldBranch)
protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, opts.OldBranch)
if err != nil {
return err
}
if protectedBranch != nil && !protectedBranch.CanUserPush(ctx, doer.ID) {
return models.ErrUserCannotCommit{
UserName: doer.LowerName,
if protectedBranch != nil {
protectedBranch.Repo = repo
if !protectedBranch.CanUserPush(ctx, doer) {
return models.ErrUserCannotCommit{
UserName: doer.LowerName,
}
}
}
if protectedBranch != nil && protectedBranch.RequireSignedCommits {

View File

@ -463,17 +463,18 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do
// VerifyBranchProtection verify the branch protection for modifying the given treePath on the given branch
func VerifyBranchProtection(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, branchName, treePath string) error {
protectedBranch, err := git_model.GetProtectedBranchBy(ctx, repo.ID, branchName)
protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, branchName)
if err != nil {
return err
}
if protectedBranch != nil {
protectedBranch.Repo = repo
isUnprotectedFile := false
glob := protectedBranch.GetUnprotectedFilePatterns()
if len(glob) != 0 {
isUnprotectedFile = protectedBranch.IsUnprotectedFile(glob, treePath)
}
if !protectedBranch.CanUserPush(ctx, doer.ID) && !isUnprotectedFile {
if !protectedBranch.CanUserPush(ctx, doer) && !isUnprotectedFile {
return models.ErrUserCannotCommit{
UserName: doer.LowerName,
}

View File

@ -5,49 +5,67 @@ package repository
import (
"context"
"errors"
"fmt"
"time"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"xorm.io/builder"
"code.gitea.io/gitea/modules/setting"
)
func GarbageCollectLFSMetaObjects(ctx context.Context, logger log.Logger, autofix bool) error {
log.Trace("Doing: GarbageCollectLFSMetaObjects")
if err := db.Iterate(
ctx,
builder.And(builder.Gt{"id": 0}),
func(ctx context.Context, repo *repo_model.Repository) error {
return GarbageCollectLFSMetaObjectsForRepo(ctx, repo, logger, autofix)
},
); err != nil {
return err
}
log.Trace("Finished: GarbageCollectLFSMetaObjects")
return nil
// GarbageCollectLFSMetaObjectsOptions provides options for GarbageCollectLFSMetaObjects function
type GarbageCollectLFSMetaObjectsOptions struct {
Logger log.Logger
AutoFix bool
OlderThan time.Time
UpdatedLessRecentlyThan time.Time
NumberToCheckPerRepo int64
ProportionToCheckPerRepo float64
}
func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.Repository, logger log.Logger, autofix bool) error {
if logger != nil {
logger.Info("Checking %-v", repo)
// GarbageCollectLFSMetaObjects garbage collects LFS objects for all repositories
func GarbageCollectLFSMetaObjects(ctx context.Context, opts GarbageCollectLFSMetaObjectsOptions) error {
log.Trace("Doing: GarbageCollectLFSMetaObjects")
defer log.Trace("Finished: GarbageCollectLFSMetaObjects")
if !setting.LFS.StartServer {
if opts.Logger != nil {
opts.Logger.Info("LFS support is disabled")
}
return nil
}
total, orphaned, collected, deleted := 0, 0, 0, 0
if logger != nil {
return git_model.IterateRepositoryIDsWithLFSMetaObjects(ctx, func(ctx context.Context, repoID, count int64) error {
repo, err := repo_model.GetRepositoryByID(ctx, repoID)
if err != nil {
return err
}
if newMinimum := int64(float64(count) * opts.ProportionToCheckPerRepo); newMinimum > opts.NumberToCheckPerRepo && opts.NumberToCheckPerRepo != 0 {
opts.NumberToCheckPerRepo = newMinimum
}
return GarbageCollectLFSMetaObjectsForRepo(ctx, repo, opts)
})
}
// GarbageCollectLFSMetaObjectsForRepo garbage collects LFS objects for a specific repository
func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.Repository, opts GarbageCollectLFSMetaObjectsOptions) error {
if opts.Logger != nil {
opts.Logger.Info("Checking %-v", repo)
}
total, orphaned, collected, deleted := int64(0), 0, 0, 0
if opts.Logger != nil {
defer func() {
if orphaned == 0 {
logger.Info("Found %d total LFSMetaObjects in %-v", total, repo)
} else if !autofix {
logger.Info("Found %d/%d orphaned LFSMetaObjects in %-v", orphaned, total, repo)
opts.Logger.Info("Found %d total LFSMetaObjects in %-v", total, repo)
} else if !opts.AutoFix {
opts.Logger.Info("Found %d/%d orphaned LFSMetaObjects in %-v", orphaned, total, repo)
} else {
logger.Info("Collected %d/%d orphaned/%d total LFSMetaObjects in %-v. %d removed from storage.", collected, orphaned, total, repo, deleted)
opts.Logger.Info("Collected %d/%d orphaned/%d total LFSMetaObjects in %-v. %d removed from storage.", collected, orphaned, total, repo, deleted)
}
}()
}
@ -60,17 +78,21 @@ func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.R
defer gitRepo.Close()
store := lfs.NewContentStore()
errStop := errors.New("STOPERR")
return git_model.IterateLFSMetaObjectsForRepo(ctx, repo.ID, func(ctx context.Context, metaObject *git_model.LFSMetaObject, count int64) error {
err = git_model.IterateLFSMetaObjectsForRepo(ctx, repo.ID, func(ctx context.Context, metaObject *git_model.LFSMetaObject, count int64) error {
if opts.NumberToCheckPerRepo > 0 && total > opts.NumberToCheckPerRepo {
return errStop
}
total++
pointerSha := git.ComputeBlobHash([]byte(metaObject.Pointer.StringContent()))
if gitRepo.IsObjectExist(pointerSha.String()) {
return nil
return git_model.MarkLFSMetaObject(ctx, metaObject.ID)
}
orphaned++
if !autofix {
if !opts.AutoFix {
return nil
}
// Non-existent pointer file
@ -100,6 +122,19 @@ func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.R
//
// It is likely that a week is potentially excessive but it should definitely be enough that any
// unassociated LFS object is genuinely unassociated.
OlderThan: time.Now().Add(-24 * 7 * time.Hour),
OlderThan: opts.OlderThan,
UpdatedLessRecentlyThan: opts.UpdatedLessRecentlyThan,
OrderByUpdated: true,
LoopFunctionAlwaysUpdates: true,
})
if err == errStop {
if opts.Logger != nil {
opts.Logger.Info("Processing stopped at %d total LFSMetaObjects in %-v", total, repo)
}
return nil
} else if err != nil {
return err
}
return nil
}

View File

@ -84,7 +84,7 @@
<p>
---
<br>
<a href="{{.Link}}">{{.locale.Tr "mail.view_it_on" AppName}}</a>.
<a href="{{.Link}}">{{.locale.Tr "mail.view_it_on" AppName}}</a>{{if .CanReply}} {{.locale.Tr "mail.reply"}}{{end}}.
</p>
</div>
</body>

View File

@ -51,7 +51,7 @@
{{.locale.Tr "packages.settings.delete"}}
</div>
<div class="content">
<div class="ui warning message text left">
<div class="ui warning message text left word-break">
{{.locale.Tr "packages.settings.delete.notice" .PackageDescriptor.Package.Name .PackageDescriptor.Version.Version}}
</div>
<form class="ui form" action="{{.Link}}" method="post">

View File

@ -204,7 +204,7 @@
{{if .IsBlockedByApprovals}}
<div class="item">
<i class="icon icon-octicon">{{svg "octicon-x"}}</i>
{{$.locale.Tr "repo.pulls.blocked_by_approvals" .GrantedApprovals .Issue.PullRequest.ProtectedBranch.RequiredApprovals}}
{{$.locale.Tr "repo.pulls.blocked_by_approvals" .GrantedApprovals .ProtectedBranch.RequiredApprovals}}
</div>
{{else if .IsBlockedByRejection}}
<div class="item">
@ -444,7 +444,7 @@
{{if .IsBlockedByApprovals}}
<div class="item text red">
{{svg "octicon-x"}}
{{$.locale.Tr "repo.pulls.blocked_by_approvals" .GrantedApprovals .Issue.PullRequest.ProtectedBranch.RequiredApprovals}}
{{$.locale.Tr "repo.pulls.blocked_by_approvals" .GrantedApprovals .ProtectedBranch.RequiredApprovals}}
</div>
{{else if .IsBlockedByRejection}}
<div class="item text red">

View File

@ -43,31 +43,24 @@
<h4 class="ui top attached header">
{{.locale.Tr "repo.settings.protected_branch"}}
<div class="ui right">
<a class="ui primary tiny button" href="{{$.Repository.Link}}/settings/branches/edit">{{$.locale.Tr "repo.settings.branches.add_new_rule"}}</a>
</div>
</h4>
<div class="ui attached table segment">
<div class="ui grid padded">
<div class="eight wide column">
<div class="ui fluid dropdown selection" tabindex="0">
{{svg "octicon-triangle-down" 14 "dropdown icon"}}
<div class="default text">{{.locale.Tr "repo.settings.choose_branch"}}</div>
<div class="menu transition hidden" tabindex="-1" style="display: block !important;">
{{range .LeftBranches}}
<a class="item" href="{{$.Repository.Link}}/settings/branches/{{. | PathEscapeSegments}}">{{.}}</a>
{{end}}
</div>
</div>
</div>
</div>
<div class="ui grid padded">
<div class="sixteen wide column">
<table class="ui single line table padded">
<tbody>
{{range .ProtectedBranches}}
<tr>
<td><div class="ui basic primary label">{{.BranchName}}</div></td>
<td class="right aligned"><a class="rm ui button" href="{{$.Repository.Link}}/settings/branches/{{.BranchName | PathEscapeSegments}}">{{$.locale.Tr "repo.settings.edit_protected_branch"}}</a></td>
<td><div class="ui basic primary label">{{.RuleName}}</div></td>
<td class="right aligned">
<a class="rm ui button" href="{{$.Repository.Link}}/settings/branches/edit?rule_name={{.RuleName}}">{{$.locale.Tr "repo.settings.edit_protected_branch"}}</a>
<button class="ui red tiny button delete-button" data-url="{{$.Repository.Link}}/settings/branches/{{.ID}}/delete" data-id="{{.ID}}">
{{$.locale.Tr "repo.settings.protected_branch.delete_rule"}}</button>
</td>
</tr>
{{else}}
<tr class="center aligned"><td>{{.locale.Tr "repo.settings.no_protected_branch"}}</td></tr>
@ -102,4 +95,16 @@
{{end}}
</div>
</div>
<div class="ui small basic delete modal">
<div class="ui header">
{{svg "octicon-trash" 16 "mr-2"}}
{{.locale.Tr "repo.settings.protected_branch_deletion"}}
</div>
<div class="content">
<p>{{.locale.Tr "repo.settings.protected_branch_deletion_desc"}}</p>
</div>
{{template "base/delete_modal_actions" .}}
</div>
{{template "base/footer" .}}

View File

@ -4,42 +4,43 @@
{{template "repo/settings/navbar" .}}
<div class="ui container">
{{template "base/alert" .}}
<h4 class="ui top attached header">
{{.locale.Tr "repo.settings.branch_protection" (.Branch.BranchName|Escape) | Str2html}}
</h4>
<div class="ui attached segment branch-protection">
<form class="ui form" action="{{.Link}}" method="post">
{{.CsrfTokenHtml}}
<div class="inline field">
<div class="ui checkbox">
<input class="enable-protection" name="protected" type="checkbox" data-target="#protection_box" {{if .Branch.IsProtected}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_this_branch"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_this_branch_desc"}}</p>
</div>
<form class="ui form" action="{{.Link}}" method="post">
<h4 class="ui top attached header">
{{.locale.Tr "repo.settings.branch_protection" (.Rule.RuleName|Escape) | Str2html}}
</h4>
<div class="ui attached segment branch-protection">
<div class="field">
<label for="protected_file_patterns">{{.locale.Tr "repo.settings.protect_branch_name_pattern"}}</label>
<input name="rule_name" type="text" value="{{.Rule.RuleName}}">
<input name="rule_id" type="hidden" value="{{.Rule.ID}}">
</div>
<div id="protection_box" class="fields {{if not .Branch.IsProtected}}disabled{{end}}">
<div class="ui divider"></div>
{{.CsrfTokenHtml}}
<div id="protection_box" class="fields">
<div class="field">
<div class="ui radio checkbox">
<input name="enable_push" type="radio" value="none" class="disable-whitelist" data-target="#whitelist_box" {{if not .Branch.CanPush}}checked{{end}}>
<input name="enable_push" type="radio" value="none" class="disable-whitelist" data-target="#whitelist_box" {{if not .Rule.CanPush}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_disable_push"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_disable_push_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui radio checkbox">
<input name="enable_push" type="radio" value="all" class="disable-whitelist" data-target="#whitelist_box" {{if and (.Branch.CanPush) (not .Branch.EnableWhitelist)}}checked{{end}}>
<input name="enable_push" type="radio" value="all" class="disable-whitelist" data-target="#whitelist_box" {{if and (.Rule.CanPush) (not .Rule.EnableWhitelist)}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_enable_push"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_enable_push_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui radio checkbox">
<input name="enable_push" type="radio" value="whitelist" class="enable-whitelist" data-target="#whitelist_box" {{if and (.Branch.CanPush) (.Branch.EnableWhitelist)}}checked{{end}}>
<input name="enable_push" type="radio" value="whitelist" class="enable-whitelist" data-target="#whitelist_box" {{if and (.Rule.CanPush) (.Rule.EnableWhitelist)}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_whitelist_committers"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_whitelist_committers_desc"}}</p>
</div>
</div>
<div id="whitelist_box" class="fields {{if not .Branch.EnableWhitelist}}disabled{{end}}">
<div id="whitelist_box" class="fields {{if not .Rule.EnableWhitelist}}disabled{{end}}">
<div class="whitelist field">
<label>{{.locale.Tr "repo.settings.protect_whitelist_users"}}</label>
<div class="ui multiple search selection dropdown">
@ -76,20 +77,22 @@
<br>
<div class="whitelist field">
<div class="ui checkbox">
<input type="checkbox" name="whitelist_deploy_keys" {{if .Branch.WhitelistDeployKeys}}checked{{end}}>
<input type="checkbox" name="whitelist_deploy_keys" {{if .Rule.WhitelistDeployKeys}}checked{{end}}>
<label for="whitelist_deploy_keys">{{.locale.Tr "repo.settings.protect_whitelist_deploy_keys"}}</label>
</div>
</div>
</div>
<div class="ui divider"></div>
<div class="field">
<div class="ui checkbox">
<input class="enable-whitelist" name="enable_merge_whitelist" type="checkbox" data-target="#merge_whitelist_box" {{if .Branch.EnableMergeWhitelist}}checked{{end}}>
<input class="enable-whitelist" name="enable_merge_whitelist" type="checkbox" data-target="#merge_whitelist_box" {{if .Rule.EnableMergeWhitelist}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_merge_whitelist_committers"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_merge_whitelist_committers_desc"}}</p>
</div>
</div>
<div id="merge_whitelist_box" class="fields {{if not .Branch.EnableMergeWhitelist}}disabled{{end}}">
<div id="merge_whitelist_box" class="fields {{if not .Rule.EnableMergeWhitelist}}disabled{{end}}">
<div class="whitelist field">
<label>{{.locale.Tr "repo.settings.protect_merge_whitelist_users"}}</label>
<div class="ui multiple search selection dropdown">
@ -127,13 +130,13 @@
<div class="field">
<div class="ui checkbox">
<input class="enable-statuscheck" name="enable_status_check" type="checkbox" data-target="#statuscheck_contexts_box" {{if eq (len .branch_status_check_contexts) 0}}disabled{{end}} {{if .Branch.EnableStatusCheck}}checked{{end}}>
<input class="enable-statuscheck" name="enable_status_check" type="checkbox" data-target="#statuscheck_contexts_box" {{if eq (len .branch_status_check_contexts) 0}}disabled{{end}} {{if .Rule.EnableStatusCheck}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_check_status_contexts"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_check_status_contexts_desc"}}</p>
</div>
</div>
<div id="statuscheck_contexts_box" class="fields {{if not .Branch.EnableStatusCheck}}disabled{{end}}">
<div id="statuscheck_contexts_box" class="fields {{if not .Rule.EnableStatusCheck}}disabled{{end}}">
<div class="field">
<table class="ui celled table six column">
<thead>
@ -159,17 +162,17 @@
<div class="field">
<label for="required-approvals">{{.locale.Tr "repo.settings.protect_required_approvals"}}</label>
<input name="required_approvals" id="required-approvals" type="number" value="{{.Branch.RequiredApprovals}}">
<input name="required_approvals" id="required-approvals" type="number" value="{{.Rule.RequiredApprovals}}">
<p class="help">{{.locale.Tr "repo.settings.protect_required_approvals_desc"}}</p>
</div>
<div class="field">
<div class="ui checkbox">
<input class="enable-whitelist" name="enable_approvals_whitelist" type="checkbox" data-target="#approvals_whitelist_box" {{if .Branch.EnableApprovalsWhitelist}}checked{{end}}>
<input class="enable-whitelist" name="enable_approvals_whitelist" type="checkbox" data-target="#approvals_whitelist_box" {{if .Rule.EnableApprovalsWhitelist}}checked{{end}}>
<label>{{.locale.Tr "repo.settings.protect_approvals_whitelist_enabled"}}</label>
<p class="help">{{.locale.Tr "repo.settings.protect_approvals_whitelist_enabled_desc"}}</p>
</div>
</div>
<div id="approvals_whitelist_box" class="fields {{if not .Branch.EnableApprovalsWhitelist}}disabled{{end}}">
<div id="approvals_whitelist_box" class="fields {{if not .Rule.EnableApprovalsWhitelist}}disabled{{end}}">
<div class="whitelist field">
<label>{{.locale.Tr "repo.settings.protect_approvals_whitelist_users"}}</label>
<div class="ui multiple search selection dropdown">
@ -206,59 +209,59 @@
</div>
<div class="field">
<div class="ui checkbox">
<input name="block_on_rejected_reviews" type="checkbox" {{if .Branch.BlockOnRejectedReviews}}checked{{end}}>
<input name="block_on_rejected_reviews" type="checkbox" {{if .Rule.BlockOnRejectedReviews}}checked{{end}}>
<label for="block_on_rejected_reviews">{{.locale.Tr "repo.settings.block_rejected_reviews"}}</label>
<p class="help">{{.locale.Tr "repo.settings.block_rejected_reviews_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui checkbox">
<input name="block_on_official_review_requests" type="checkbox" {{if .Branch.BlockOnOfficialReviewRequests}}checked{{end}}>
<input name="block_on_official_review_requests" type="checkbox" {{if .Rule.BlockOnOfficialReviewRequests}}checked{{end}}>
<label for="block_on_official_review_requests">{{.locale.Tr "repo.settings.block_on_official_review_requests"}}</label>
<p class="help">{{.locale.Tr "repo.settings.block_on_official_review_requests_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui checkbox">
<input name="dismiss_stale_approvals" type="checkbox" {{if .Branch.DismissStaleApprovals}}checked{{end}}>
<input name="dismiss_stale_approvals" type="checkbox" {{if .Rule.DismissStaleApprovals}}checked{{end}}>
<label for="dismiss_stale_approvals">{{.locale.Tr "repo.settings.dismiss_stale_approvals"}}</label>
<p class="help">{{.locale.Tr "repo.settings.dismiss_stale_approvals_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui checkbox">
<input name="require_signed_commits" type="checkbox" {{if .Branch.RequireSignedCommits}}checked{{end}}>
<input name="require_signed_commits" type="checkbox" {{if .Rule.RequireSignedCommits}}checked{{end}}>
<label for="require_signed_commits">{{.locale.Tr "repo.settings.require_signed_commits"}}</label>
<p class="help">{{.locale.Tr "repo.settings.require_signed_commits_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui checkbox">
<input name="block_on_outdated_branch" type="checkbox" {{if .Branch.BlockOnOutdatedBranch}}checked{{end}}>
<input name="block_on_outdated_branch" type="checkbox" {{if .Rule.BlockOnOutdatedBranch}}checked{{end}}>
<label for="block_on_outdated_branch">{{.locale.Tr "repo.settings.block_outdated_branch"}}</label>
<p class="help">{{.locale.Tr "repo.settings.block_outdated_branch_desc"}}</p>
</div>
</div>
<div class="field">
<label for="protected_file_patterns">{{.locale.Tr "repo.settings.protect_protected_file_patterns"}}</label>
<input name="protected_file_patterns" id="protected_file_patterns" type="text" value="{{.Branch.ProtectedFilePatterns}}">
<input name="protected_file_patterns" id="protected_file_patterns" type="text" value="{{.Rule.ProtectedFilePatterns}}">
<p class="help">{{.locale.Tr "repo.settings.protect_protected_file_patterns_desc" | Safe}}</p>
</div>
<div class="field">
<label for="unprotected_file_patterns">{{.locale.Tr "repo.settings.protect_unprotected_file_patterns"}}</label>
<input name="unprotected_file_patterns" id="unprotected_file_patterns" type="text" value="{{.Branch.UnprotectedFilePatterns}}">
<input name="unprotected_file_patterns" id="unprotected_file_patterns" type="text" value="{{.Rule.UnprotectedFilePatterns}}">
<p class="help">{{.locale.Tr "repo.settings.protect_unprotected_file_patterns_desc" | Safe}}</p>
</div>
</div>
<div class="ui divider"></div>
<div class="field">
<button class="ui green button">{{$.locale.Tr "repo.settings.update_settings"}}</button>
<button class="ui green button">{{$.locale.Tr "repo.settings.protected_branch.save_rule"}}</button>
<button class="ui gray button">{{$.locale.Tr "cancel"}}</button>
</div>
</form>
</div>
</div>
</form>
</div>
</div>
{{template "base/footer" .}}

View File

@ -14233,6 +14233,7 @@
"x-go-name": "BlockOnRejectedReviews"
},
"branch_name": {
"description": "Deprecated: true",
"type": "string",
"x-go-name": "BranchName"
},
@ -14310,6 +14311,10 @@
"format": "int64",
"x-go-name": "RequiredApprovals"
},
"rule_name": {
"type": "string",
"x-go-name": "RuleName"
},
"status_check_contexts": {
"type": "array",
"items": {
@ -14772,6 +14777,7 @@
"x-go-name": "BlockOnRejectedReviews"
},
"branch_name": {
"description": "Deprecated: true",
"type": "string",
"x-go-name": "BranchName"
},
@ -14844,6 +14850,10 @@
"format": "int64",
"x-go-name": "RequiredApprovals"
},
"rule_name": {
"type": "string",
"x-go-name": "RuleName"
},
"status_check_contexts": {
"type": "array",
"items": {

View File

@ -38,21 +38,21 @@ func testAPIGetBranchProtection(t *testing.T, branchName string, expectedHTTPSta
if resp.Code == http.StatusOK {
var branchProtection api.BranchProtection
DecodeJSON(t, resp, &branchProtection)
assert.EqualValues(t, branchName, branchProtection.BranchName)
assert.EqualValues(t, branchName, branchProtection.RuleName)
}
}
func testAPICreateBranchProtection(t *testing.T, branchName string, expectedHTTPStatus int) {
token := getUserToken(t, "user2")
req := NewRequestWithJSON(t, "POST", "/api/v1/repos/user2/repo1/branch_protections?token="+token, &api.BranchProtection{
BranchName: branchName,
RuleName: branchName,
})
resp := MakeRequest(t, req, expectedHTTPStatus)
if resp.Code == http.StatusCreated {
var branchProtection api.BranchProtection
DecodeJSON(t, resp, &branchProtection)
assert.EqualValues(t, branchName, branchProtection.BranchName)
assert.EqualValues(t, branchName, branchProtection.RuleName)
}
}
@ -64,7 +64,7 @@ func testAPIEditBranchProtection(t *testing.T, branchName string, body *api.Bran
if resp.Code == http.StatusOK {
var branchProtection api.BranchProtection
DecodeJSON(t, resp, &branchProtection)
assert.EqualValues(t, branchName, branchProtection.BranchName)
assert.EqualValues(t, branchName, branchProtection.RuleName)
}
}
@ -169,8 +169,8 @@ func testAPICreateBranch(t testing.TB, session *TestSession, user, repo, oldBran
func TestAPIBranchProtection(t *testing.T) {
defer tests.PrepareTestEnv(t)()
// Branch protection only on branch that exist
testAPICreateBranchProtection(t, "master/doesnotexist", http.StatusNotFound)
// Branch protection on branch that not exist
testAPICreateBranchProtection(t, "master/doesnotexist", http.StatusCreated)
// Get branch protection on branch that exist but not branch protection
testAPIGetBranchProtection(t, "master", http.StatusNotFound)

View File

@ -256,6 +256,32 @@ func TestPackageContainer(t *testing.T) {
})
})
t.Run("UploadBlob/Mount", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
req := NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s", url, unknownDigest))
addTokenAuthHeader(req, userToken)
MakeRequest(t, req, http.StatusAccepted)
req = NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s", url, blobDigest))
addTokenAuthHeader(req, userToken)
resp := MakeRequest(t, req, http.StatusCreated)
assert.Equal(t, fmt.Sprintf("/v2/%s/%s/blobs/%s", user.Name, image, blobDigest), resp.Header().Get("Location"))
assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest"))
req = NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s&from=%s", url, unknownDigest, "unknown/image"))
addTokenAuthHeader(req, userToken)
MakeRequest(t, req, http.StatusAccepted)
req = NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s&from=%s/%s", url, blobDigest, user.Name, image))
addTokenAuthHeader(req, userToken)
resp = MakeRequest(t, req, http.StatusCreated)
assert.Equal(t, fmt.Sprintf("/v2/%s/%s/blobs/%s", user.Name, image, blobDigest), resp.Header().Get("Location"))
assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest"))
})
for _, tag := range tags {
t.Run(fmt.Sprintf("[Tag:%s]", tag), func(t *testing.T) {
t.Run("UploadManifest", func(t *testing.T) {
@ -444,21 +470,6 @@ func TestPackageContainer(t *testing.T) {
assert.Equal(t, indexManifestDigest, pd.Files[0].Properties.GetByName(container_module.PropertyDigest))
})
t.Run("UploadBlob/Mount", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
req := NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s", url, unknownDigest))
addTokenAuthHeader(req, userToken)
MakeRequest(t, req, http.StatusAccepted)
req = NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s", url, blobDigest))
addTokenAuthHeader(req, userToken)
resp := MakeRequest(t, req, http.StatusCreated)
assert.Equal(t, fmt.Sprintf("/v2/%s/%s/blobs/%s", user.Name, image, blobDigest), resp.Header().Get("Location"))
assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest"))
})
t.Run("HeadBlob", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()

View File

@ -10,6 +10,8 @@ import (
"path"
"testing"
"code.gitea.io/gitea/modules/json"
"github.com/stretchr/testify/assert"
)
@ -43,15 +45,16 @@ func TestCreateFileOnProtectedBranch(t *testing.T) {
csrf := GetCSRF(t, session, "/user2/repo1/settings/branches")
// Change master branch to protected
req := NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/master", map[string]string{
"_csrf": csrf,
"protected": "on",
req := NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/edit", map[string]string{
"_csrf": csrf,
"rule_name": "master",
"enable_push": "true",
})
session.MakeRequest(t, req, http.StatusSeeOther)
// Check if master branch has been locked successfully
flashCookie := session.GetCookie("macaron_flash")
assert.NotNil(t, flashCookie)
assert.EqualValues(t, "success%3DBranch%2Bprotection%2Bfor%2Bbranch%2B%2527master%2527%2Bhas%2Bbeen%2Bupdated.", flashCookie.Value)
assert.EqualValues(t, "success%3DBranch%2Bprotection%2Bfor%2Brule%2B%2527master%2527%2Bhas%2Bbeen%2Bupdated.", flashCookie.Value)
// Request editor page
req = NewRequest(t, "GET", "/user2/repo1/_new/master/")
@ -76,16 +79,22 @@ func TestCreateFileOnProtectedBranch(t *testing.T) {
// remove the protected branch
csrf = GetCSRF(t, session, "/user2/repo1/settings/branches")
// Change master branch to protected
req = NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/master", map[string]string{
"_csrf": csrf,
"protected": "off",
req = NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/1/delete", map[string]string{
"_csrf": csrf,
})
session.MakeRequest(t, req, http.StatusSeeOther)
resp = session.MakeRequest(t, req, http.StatusOK)
res := make(map[string]string)
assert.NoError(t, json.NewDecoder(resp.Body).Decode(&res))
assert.EqualValues(t, "/user2/repo1/settings/branches", res["redirect"])
// Check if master branch has been locked successfully
flashCookie = session.GetCookie("macaron_flash")
assert.NotNil(t, flashCookie)
assert.EqualValues(t, "success%3DBranch%2Bprotection%2Bfor%2Bbranch%2B%2527master%2527%2Bhas%2Bbeen%2Bdisabled.", flashCookie.Value)
assert.EqualValues(t, "error%3DRemoving%2Bbranch%2Bprotection%2Brule%2B%25271%2527%2Bfailed.", flashCookie.Value)
})
}

View File

@ -414,9 +414,9 @@ func doProtectBranch(ctx APITestContext, branch, userToWhitelist, unprotectedFil
if userToWhitelist == "" {
// Change branch to protected
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/settings/branches/%s", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame), url.PathEscape(branch)), map[string]string{
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/settings/branches/edit", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame)), map[string]string{
"_csrf": csrf,
"protected": "on",
"rule_name": branch,
"unprotected_file_patterns": unprotectedFilePatterns,
})
ctx.Session.MakeRequest(t, req, http.StatusSeeOther)
@ -424,9 +424,9 @@ func doProtectBranch(ctx APITestContext, branch, userToWhitelist, unprotectedFil
user, err := user_model.GetUserByName(db.DefaultContext, userToWhitelist)
assert.NoError(t, err)
// Change branch to protected
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/settings/branches/%s", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame), url.PathEscape(branch)), map[string]string{
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/settings/branches/edit", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame)), map[string]string{
"_csrf": csrf,
"protected": "on",
"rule_name": branch,
"enable_push": "whitelist",
"enable_whitelist": "on",
"whitelist_users": strconv.FormatInt(user.ID, 10),
@ -437,7 +437,7 @@ func doProtectBranch(ctx APITestContext, branch, userToWhitelist, unprotectedFil
// Check if master branch has been locked successfully
flashCookie := ctx.Session.GetCookie("macaron_flash")
assert.NotNil(t, flashCookie)
assert.EqualValues(t, "success%3DBranch%2Bprotection%2Bfor%2Bbranch%2B%2527"+url.QueryEscape(branch)+"%2527%2Bhas%2Bbeen%2Bupdated.", flashCookie.Value)
assert.EqualValues(t, "success%3DBranch%2Bprotection%2Bfor%2Brule%2B%2527"+url.QueryEscape(branch)+"%2527%2Bhas%2Bbeen%2Bupdated.", flashCookie.Value)
}
}

View File

@ -1,6 +1,6 @@
function displayError(el, err) {
const target = targetElement(el);
target.remove('is-loading');
target.classList.remove('is-loading');
const errorNode = document.createElement('div');
errorNode.setAttribute('class', 'ui message error markup-block-error mono');
errorNode.textContent = err.str || err.message || String(err);
@ -23,13 +23,15 @@ export async function renderMath() {
for (const el of els) {
const source = el.textContent;
const nodeName = el.classList.contains('display') ? 'p' : 'span';
const displayMode = el.classList.contains('display');
const nodeName = displayMode ? 'p' : 'span';
try {
const tempEl = document.createElement(nodeName);
katex.render(source, tempEl, {
maxSize: 25,
maxExpand: 50,
displayMode,
});
targetElement(el).replaceWith(tempEl);
} catch (error) {