Compare commits

...

8 Commits

Author SHA1 Message Date
wxiaoguang
7e3b7c2346
Do not recognize text files as audio (#23355)
Close #17108

This PR uses a trick (removing the ID3 tag) to detect the content again
to to see whether the content is text type.

---------

Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: techknowlogick <techknowlogick@gitea.io>
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-03-07 22:40:41 -05:00
wxiaoguang
b70c7f8660
Fix incorrect display for comment context menu (#23343)
Replace #23342 

Fix a regression of #23014: the `a` couldn't be used here because
Fomantic UI has style conflicts: `.ui.comments .comment .actions a {
display: inline-block; }`


And complete one more of my TODOs: "in the future there could be a
special CSS class for it"
2023-03-08 11:26:37 +08:00
GiteaBot
2dc4f80af0 [skip ci] Updated translations via Crowdin 2023-03-08 00:15:50 +00:00
delvh
608a3eeb59
Pass context to avatar for projects view (#23359)
Previously, a 500 response was returned when
- an issue had assignees
- the issue was assigned to a project
- you tried to view this project

Co-authored-by: techknowlogick <techknowlogick@gitea.io>
2023-03-07 18:17:35 -05:00
zeripath
8598356df1
Refactor and tidy-up the merge/update branch code (#22568)
The merge and update branch code was previously a little tangled and had
some very long functions. The functions were not very clear in their
reasoning and there were deficiencies in their logging and at least one
bug in the handling of LFS for update by rebase.

This PR substantially refactors this code and splits things out to into
separate functions. It also attempts to tidy up the calls by wrapping
things in "context"s. There are also attempts to improve logging when
there are errors.

Signed-off-by: Andrew Thornton <art27@cantab.net>

---------

Signed-off-by: Andrew Thornton <art27@cantab.net>
Co-authored-by: techknowlogick <techknowlogick@gitea.io>
Co-authored-by: delvh <dev.lh@web.de>
2023-03-07 15:07:35 -05:00
Hester Gong
a2f44463f0
Fix adding of empty class name (#23352)
This PR is to fix the error shown below. The reason is because
[`class-name`
prop](https://github.com/go-gitea/gitea/blob/main/web_src/js/components/ActionRunStatus.vue#L6)
given to `svg` component has a space, and classList cannot add empty
string.

https://user-images.githubusercontent.com/17645053/223346720-c7f9de43-5e69-4ecf-93c0-90bf04090693.mov

---------

Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-03-07 23:08:22 +08:00
wxiaoguang
4c59c8c768
Fix various ImageDiff/SVG bugs (#23312)
Replace #23310, Close #19733

And fix various UI problems, including regressions from #22959 #22950
and more.

## SVG Detection

The old regexp may mismatch non-SVG files. This PR adds new tests for
those cases.

## UI Changes

### Before

![image](https://user-images.githubusercontent.com/2114189/222967716-f6ad8721-f46a-4a3f-9eb0-a89e488d3436.png)

![image](https://user-images.githubusercontent.com/2114189/222967780-8af8981a-e69d-4304-9dc4-0235582fa4f4.png)

### After

![image](https://user-images.githubusercontent.com/2114189/222967575-c21c23d4-0200-4e09-aac3-57895e853000.png)

![image](https://user-images.githubusercontent.com/2114189/222967585-8b8da262-bc96-441a-9851-8d3845f2659d.png)

![image](https://user-images.githubusercontent.com/2114189/222967595-58d9bea5-6df4-41fa-bf8a-86704117959d.png)

![image](https://user-images.githubusercontent.com/2114189/222967608-38757c1a-b8bd-4ebf-b7a8-3b30edb7f303.png)

![image](https://user-images.githubusercontent.com/2114189/222967623-9849a339-6fae-4484-8fa5-939e2fdacbf5.png)

![image](https://user-images.githubusercontent.com/2114189/222967633-4383d7dd-62ba-47a3-8c10-86f7ca7757ae.png)

---------

Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-03-07 20:11:24 +08:00
Jason Song
c84238800b
Refactor setting.Database.UseXXX to methods (#23354)
Replace #23350.

Refactor `setting.Database.UseMySQL` to
`setting.Database.Type.IsMySQL()`.

To avoid mismatching between `Type` and `UseXXX`.

This refactor can fix the bug mentioned in #23350, so it should be
backported.
2023-03-07 18:51:06 +08:00
57 changed files with 938 additions and 732 deletions

View File

@ -35,7 +35,7 @@ func runConvert(ctx *cli.Context) error {
log.Info("Log path: %s", setting.Log.RootPath)
log.Info("Configuration file: %s", setting.CustomConf)
if !setting.Database.UseMySQL {
if !setting.Database.Type.IsMySQL() {
fmt.Println("This command can only be used with a MySQL database")
return nil
}

View File

@ -279,7 +279,7 @@ func runDump(ctx *cli.Context) error {
}()
targetDBType := ctx.String("database")
if len(targetDBType) > 0 && targetDBType != setting.Database.Type {
if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() {
log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType)
} else {
log.Info("Dumping database...")

View File

@ -99,7 +99,7 @@ func (a *Action) TableIndices() []*schemas.Index {
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
indices := []*schemas.Index{actUserIndex, repoIndex}
if setting.Database.UsePostgreSQL {
if setting.Database.Type.IsPostgreSQL() {
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
indices = append(indices, cudIndex)
@ -640,7 +640,7 @@ func DeleteIssueActions(ctx context.Context, repoID, issueID int64) error {
// CountActionCreatedUnixString count actions where created_unix is an empty string
func CountActionCreatedUnixString(ctx context.Context) (int64, error) {
if setting.Database.UseSQLite3 {
if setting.Database.Type.IsSQLite3() {
return db.GetEngine(ctx).Where(`created_unix = ""`).Count(new(Action))
}
return 0, nil
@ -648,7 +648,7 @@ func CountActionCreatedUnixString(ctx context.Context) (int64, error) {
// FixActionCreatedUnixString set created_unix to zero if it is an empty string
func FixActionCreatedUnixString(ctx context.Context) (int64, error) {
if setting.Database.UseSQLite3 {
if setting.Database.Type.IsSQLite3() {
res, err := db.GetEngine(ctx).Exec(`UPDATE action SET created_unix = 0 WHERE created_unix = ""`)
if err != nil {
return 0, err

View File

@ -243,7 +243,7 @@ func TestGetFeedsCorrupted(t *testing.T) {
}
func TestConsistencyUpdateAction(t *testing.T) {
if !setting.Database.UseSQLite3 {
if !setting.Database.Type.IsSQLite3() {
t.Skip("Test is only for SQLite database.")
}
assert.NoError(t, unittest.PrepareTestDatabase())

View File

@ -39,9 +39,9 @@ func getUserHeatmapData(user *user_model.User, team *organization.Team, doer *us
groupBy := "created_unix / 900 * 900"
groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
groupBy = "created_unix DIV 900 * 900"
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
groupByName = groupBy
}

View File

@ -15,7 +15,7 @@ import (
// BuildCaseInsensitiveLike returns a condition to check if the given value is like the given key case-insensitively.
// Handles especially SQLite correctly as UPPER there only transforms ASCII letters.
func BuildCaseInsensitiveLike(key, value string) builder.Cond {
if setting.Database.UseSQLite3 {
if setting.Database.Type.IsSQLite3() {
return builder.Like{"UPPER(" + key + ")", util.ToUpperASCII(value)}
}
return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)}

View File

@ -101,12 +101,12 @@ func newXORMEngine() (*xorm.Engine, error) {
var engine *xorm.Engine
if setting.Database.UsePostgreSQL && len(setting.Database.Schema) > 0 {
if setting.Database.Type.IsPostgreSQL() && len(setting.Database.Schema) > 0 {
// OK whilst we sort out our schema issues - create a schema aware postgres
registerPostgresSchemaDriver()
engine, err = xorm.NewEngine("postgresschema", connStr)
} else {
engine, err = xorm.NewEngine(setting.Database.Type, connStr)
engine, err = xorm.NewEngine(setting.Database.Type.String(), connStr)
}
if err != nil {

View File

@ -73,7 +73,7 @@ func postgresGetNextResourceIndex(ctx context.Context, tableName string, groupID
// GetNextResourceIndex generates a resource index, it must run in the same transaction where the resource is created
func GetNextResourceIndex(ctx context.Context, tableName string, groupID int64) (int64, error) {
if setting.Database.UsePostgreSQL {
if setting.Database.Type.IsPostgreSQL() {
return postgresGetNextResourceIndex(ctx, tableName, groupID)
}

View File

@ -13,7 +13,7 @@ import (
// CountBadSequences looks for broken sequences from recreate-table mistakes
func CountBadSequences(_ context.Context) (int64, error) {
if !setting.Database.UsePostgreSQL {
if !setting.Database.Type.IsPostgreSQL() {
return 0, nil
}
@ -34,7 +34,7 @@ func CountBadSequences(_ context.Context) (int64, error) {
// FixBadSequences fixes for broken sequences from recreate-table mistakes
func FixBadSequences(_ context.Context) error {
if !setting.Database.UsePostgreSQL {
if !setting.Database.Type.IsPostgreSQL() {
return nil
}

View File

@ -65,7 +65,7 @@ func postgresGetCommitStatusIndex(ctx context.Context, repoID int64, sha string)
// GetNextCommitStatusIndex retried 3 times to generate a resource index
func GetNextCommitStatusIndex(ctx context.Context, repoID int64, sha string) (int64, error) {
if setting.Database.UsePostgreSQL {
if setting.Database.Type.IsPostgreSQL() {
return postgresGetCommitStatusIndex(ctx, repoID, sha)
}

View File

@ -89,7 +89,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
hasID = hasID || (column.IsPrimaryKey && column.IsAutoIncrement)
}
if hasID && setting.Database.UseMSSQL {
if hasID && setting.Database.Type.IsMSSQL() {
if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` ON", tempTableName)); err != nil {
log.Error("Unable to set identity insert for table %s. Error: %v", tempTableName, err)
return err
@ -143,7 +143,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
return err
}
if hasID && setting.Database.UseMSSQL {
if hasID && setting.Database.Type.IsMSSQL() {
if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` OFF", tempTableName)); err != nil {
log.Error("Unable to switch off identity insert for table %s. Error: %v", tempTableName, err)
return err
@ -151,7 +151,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
}
switch {
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
// SQLite will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -178,7 +178,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
return err
}
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
// MySQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -205,7 +205,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err)
return err
}
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
var originalSequences []string
type sequenceData struct {
LastValue int `xorm:"'last_value'"`
@ -296,7 +296,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
// MSSQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -323,7 +323,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
// TODO: This will not work if there are foreign keys
switch {
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
// First drop the indexes on the columns
res, errIndex := sess.Query(fmt.Sprintf("PRAGMA index_list(`%s`)", tableName))
if errIndex != nil {
@ -405,7 +405,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
return err
}
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
cols := ""
for _, col := range columnNames {
if cols != "" {
@ -416,7 +416,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
}
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
// Drop indexes on columns first
sql := fmt.Sprintf("SHOW INDEX FROM %s WHERE column_name IN ('%s')", tableName, strings.Join(columnNames, "','"))
res, err := sess.Query(sql)
@ -444,7 +444,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
cols := ""
for _, col := range columnNames {
if cols != "" {
@ -543,13 +543,13 @@ func newXORMEngine() (*xorm.Engine, error) {
func deleteDB() error {
switch {
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
if err := util.Remove(setting.Database.Path); err != nil {
return err
}
return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host))
if err != nil {
@ -565,7 +565,7 @@ func deleteDB() error {
return err
}
return nil
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
if err != nil {
@ -612,7 +612,7 @@ func deleteDB() error {
}
return nil
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -13,9 +13,9 @@ func PrependRefsHeadsToIssueRefs(x *xorm.Engine) error {
var query string
switch {
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
query = "UPDATE `issue` SET `ref` = 'refs/heads/' + `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'"
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
query = "UPDATE `issue` SET `ref` = CONCAT('refs/heads/', `ref`) WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%';"
default:
query = "UPDATE `issue` SET `ref` = 'refs/heads/' || `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'"

View File

@ -41,7 +41,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error {
// Delete language stat statuses
truncExpr := "TRUNCATE TABLE"
if setting.Database.UseSQLite3 {
if setting.Database.Type.IsSQLite3() {
truncExpr = "DELETE FROM"
}

View File

@ -21,7 +21,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
return err
}
if setting.Database.UseSQLite3 {
if setting.Database.Type.IsSQLite3() {
// SQLite maps VARCHAR to TEXT without size so we're done
return nil
}
@ -41,11 +41,11 @@ func IncreaseLanguageField(x *xorm.Engine) error {
}
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil {
return err
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
// Yet again MSSQL just has to be awkward.
// Here we have to drop the constraints first and then rebuild them
constraints := make([]string, 0)
@ -71,7 +71,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
if err := sess.CreateUniques(new(LanguageStat)); err != nil {
return err
}
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil {
return err
}

View File

@ -17,13 +17,13 @@ import (
func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
_, err := x.Exec("ALTER TABLE `user` ALTER passwd_hash_algo SET DEFAULT 'argon2';")
return err
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
_, err := x.Exec("ALTER TABLE `user` ALTER COLUMN passwd_hash_algo SET DEFAULT 'argon2';")
return err
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
// need to find the constraint and drop it, then recreate it.
sess := x.NewSession()
defer sess.Close()
@ -53,7 +53,7 @@ func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
}
return sess.Commit()
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
// drop through
default:
log.Fatal("Unrecognized DB")

View File

@ -62,7 +62,7 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
return err
}
if setting.Database.UseMSSQL {
if setting.Database.Type.IsMSSQL() {
if _, err := sess.Exec(sqlSelect + " INTO #temp_comments" + sqlTail); err != nil {
log.Error("unable to create temporary table")
return err
@ -72,13 +72,13 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
comments := make([]*Comment, 0, batchSize)
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + ", " + strconv.Itoa(start)
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
fallthrough
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + " OFFSET " + strconv.Itoa(start)
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " +
"(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id"
default:

View File

@ -14,7 +14,7 @@ import (
)
func FixPostgresIDSequences(x *xorm.Engine) error {
if !setting.Database.UsePostgreSQL {
if !setting.Database.Type.IsPostgreSQL() {
return nil
}

View File

@ -54,11 +54,11 @@ func RenameTaskErrorsToMessage(x *xorm.Engine) error {
}
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
if _, err := sess.Exec("ALTER TABLE `task` CHANGE errors message text"); err != nil {
return err
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
if _, err := sess.Exec("sp_rename 'task.errors', 'message', 'COLUMN'"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterIssueAndCommentTextFieldsToLongText(x *xorm.Engine) error {
return err
}
if setting.Database.UseMySQL {
if setting.Database.Type.IsMySQL() {
if _, err := sess.Exec("ALTER TABLE `issue` CHANGE `content` `content` LONGTEXT"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterHookTaskTextFieldsToLongText(x *xorm.Engine) error {
return err
}
if setting.Database.UseMySQL {
if setting.Database.Type.IsMySQL() {
if _, err := sess.Exec("ALTER TABLE `hook_task` CHANGE `payload_content` `payload_content` LONGTEXT, CHANGE `request_content` `request_content` LONGTEXT, change `response_content` `response_content` LONGTEXT"); err != nil {
return err
}

View File

@ -38,7 +38,7 @@ func (*improveActionTableIndicesAction) TableIndices() []*schemas.Index {
actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType)
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
indices := []*schemas.Index{actUserIndex, repoIndex}
if setting.Database.UsePostgreSQL {
if setting.Database.Type.IsPostgreSQL() {
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
indices = append(indices, cudIndex)

View File

@ -65,11 +65,11 @@ func RenameCredentialIDBytes(x *xorm.Engine) error {
}
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
if _, err := sess.Exec("ALTER TABLE `webauthn_credential` CHANGE credential_id_bytes credential_id VARBINARY(1024)"); err != nil {
return err
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
if _, err := sess.Exec("sp_rename 'webauthn_credential.credential_id_bytes', 'credential_id', 'COLUMN'"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterPublicGPGKeyContentFieldsToMediumText(x *xorm.Engine) error {
return err
}
if setting.Database.UseMySQL {
if setting.Database.Type.IsMySQL() {
if _, err := sess.Exec("ALTER TABLE `gpg_key` CHANGE `content` `content` MEDIUMTEXT"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterPackageVersionMetadataToLongText(x *xorm.Engine) error {
return err
}
if setting.Database.UseMySQL {
if setting.Database.Type.IsMySQL() {
if _, err := sess.Exec("ALTER TABLE `package_version` MODIFY COLUMN `metadata_json` LONGTEXT"); err != nil {
return err
}

View File

@ -17,7 +17,7 @@ func AlterPublicGPGKeyImportContentFieldToMediumText(x *xorm.Engine) error {
return err
}
if setting.Database.UseMySQL {
if setting.Database.Type.IsMySQL() {
if _, err := sess.Exec("ALTER TABLE `gpg_key_import` CHANGE `content` `content` MEDIUMTEXT"); err != nil {
return err
}

View File

@ -409,7 +409,7 @@ func DeleteProjectByID(ctx context.Context, id int64) error {
func DeleteProjectByRepoID(ctx context.Context, repoID int64) error {
switch {
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue WHERE project_issue.id IN (SELECT project_issue.id FROM project_issue INNER JOIN project WHERE project.id = project_issue.project_id AND project.repo_id = ?)", repoID); err != nil {
return err
}
@ -419,7 +419,7 @@ func DeleteProjectByRepoID(ctx context.Context, repoID int64) error {
if _, err := db.GetEngine(ctx).Table("project").Where("repo_id = ? ", repoID).Delete(&Project{}); err != nil {
return err
}
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue USING project WHERE project.id = project_issue.project_id AND project.repo_id = ? ", repoID); err != nil {
return err
}

View File

@ -498,7 +498,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
subQueryCond := builder.NewCond()
// Topic checking. Topics are present.
if setting.Database.UsePostgreSQL { // postgres stores the topics as json and not as text
if setting.Database.Type.IsPostgreSQL() { // postgres stores the topics as json and not as text
subQueryCond = subQueryCond.Or(builder.And(builder.NotNull{"topics"}, builder.Neq{"(topics)::text": "[]"}))
} else {
subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"}))

View File

@ -76,7 +76,7 @@ func MainTest(m *testing.M, testOpts *TestOptions) {
setting.SSH.BuiltinServerUser = "builtinuser"
setting.SSH.Port = 3000
setting.SSH.Domain = "try.gitea.io"
setting.Database.UseSQLite3 = true
setting.Database.Type = "sqlite3"
setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master"
repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos")
if err != nil {

View File

@ -155,7 +155,7 @@ func checkDBConsistency(ctx context.Context, logger log.Logger, autofix bool) er
// TODO: function to recalc all counters
if setting.Database.UsePostgreSQL {
if setting.Database.Type.IsPostgreSQL() {
consistencyChecks = append(consistencyChecks, consistencyCheck{
Name: "Sequence values",
Counter: db.CountBadSequences,

View File

@ -42,7 +42,10 @@ func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.
defer revListWriter.Close()
stderr := new(bytes.Buffer)
var errbuf strings.Builder
cmd := git.NewCommand(ctx, "rev-list", "--objects").AddDynamicArguments(headSHA).AddArguments("--not").AddDynamicArguments(baseSHA)
cmd := git.NewCommand(ctx, "rev-list", "--objects").AddDynamicArguments(headSHA)
if baseSHA != "" {
cmd = cmd.AddArguments("--not").AddDynamicArguments(baseSHA)
}
if err := cmd.Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: revListWriter,

View File

@ -27,7 +27,7 @@ var (
// Database holds the database settings
Database = struct {
Type string
Type DatabaseType
Host string
Name string
User string
@ -39,10 +39,6 @@ var (
Charset string
Timeout int // seconds
SQLiteJournalMode string
UseSQLite3 bool
UseMySQL bool
UseMSSQL bool
UsePostgreSQL bool
DBConnectRetries int
DBConnectBackoff time.Duration
MaxIdleConns int
@ -59,24 +55,13 @@ var (
// LoadDBSetting loads the database settings
func LoadDBSetting() {
sec := CfgProvider.Section("database")
Database.Type = sec.Key("DB_TYPE").String()
Database.Type = DatabaseType(sec.Key("DB_TYPE").String())
defaultCharset := "utf8"
Database.UseMySQL = false
Database.UseSQLite3 = false
Database.UsePostgreSQL = false
Database.UseMSSQL = false
switch Database.Type {
case "sqlite3":
Database.UseSQLite3 = true
case "mysql":
Database.UseMySQL = true
if Database.Type.IsMySQL() {
defaultCharset = "utf8mb4"
case "postgres":
Database.UsePostgreSQL = true
case "mssql":
Database.UseMSSQL = true
}
Database.Host = sec.Key("HOST").String()
Database.Name = sec.Key("NAME").String()
Database.User = sec.Key("USER").String()
@ -86,7 +71,7 @@ func LoadDBSetting() {
Database.Schema = sec.Key("SCHEMA").String()
Database.SSLMode = sec.Key("SSL_MODE").MustString("disable")
Database.Charset = sec.Key("CHARSET").In(defaultCharset, []string{"utf8", "utf8mb4"})
if Database.UseMySQL && defaultCharset != "utf8mb4" {
if Database.Type.IsMySQL() && defaultCharset != "utf8mb4" {
log.Error("Deprecated database mysql charset utf8 support, please use utf8mb4 or convert utf8 to utf8mb4.")
}
@ -95,7 +80,7 @@ func LoadDBSetting() {
Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("")
Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2)
if Database.UseMySQL {
if Database.Type.IsMySQL() {
Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(3 * time.Second)
} else {
Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(0)
@ -207,3 +192,25 @@ func ParseMSSQLHostPort(info string) (string, string) {
}
return host, port
}
type DatabaseType string
func (t DatabaseType) String() string {
return string(t)
}
func (t DatabaseType) IsSQLite3() bool {
return t == "sqlite3"
}
func (t DatabaseType) IsMySQL() bool {
return t == "mysql"
}
func (t DatabaseType) IsMSSQL() bool {
return t == "mssql"
}
func (t DatabaseType) IsPostgreSQL() bool {
return t == "postgres"
}

View File

@ -4,6 +4,7 @@
package typesniffer
import (
"bytes"
"fmt"
"io"
"net/http"
@ -24,8 +25,9 @@ const (
)
var (
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!--.*?-->|<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg[\s>\/]`)
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!--.*?-->|<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg[\s>\/]`)
svgComment = regexp.MustCompile(`(?s)<!--.*?-->`)
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
)
// SniffedType contains information about a blobs type.
@ -91,10 +93,27 @@ func DetectContentType(data []byte) SniffedType {
data = data[:sniffLen]
}
if (strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")) && svgTagRegex.Match(data) ||
strings.Contains(ct, "text/xml") && svgTagInXMLRegex.Match(data) {
// SVG is unsupported. https://github.com/golang/go/issues/15888
ct = SvgMimeType
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
detectByXML := strings.Contains(ct, "text/xml")
if detectByHTML || detectByXML {
dataProcessed := svgComment.ReplaceAll(data, nil)
dataProcessed = bytes.TrimSpace(dataProcessed)
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
ct = SvgMimeType
}
}
if strings.HasPrefix(ct, "audio/") && bytes.HasPrefix(data, []byte("ID3")) {
// The MP3 detection is quite inaccurate, any content with "ID3" prefix will result in "audio/mpeg".
// So remove the "ID3" prefix and detect again, if result is text, then it must be text content.
// This works especially because audio files contain many unprintable/invalid characters like `0x00`
ct2 := http.DetectContentType(data[3:])
if strings.HasPrefix(ct2, "text/") {
ct = ct2
}
}
return SniffedType{ct}

View File

@ -28,7 +28,6 @@ func TestIsSvgImage(t *testing.T) {
assert.True(t, DetectContentType([]byte("<svg></svg>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(" <svg></svg>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<svg width="100"></svg>`)).IsSvgImage())
assert.True(t, DetectContentType([]byte("<svg/>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?><svg></svg>`)).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<!-- Comment -->
<svg></svg>`)).IsSvgImage())
@ -57,6 +56,10 @@ func TestIsSvgImage(t *testing.T) {
<!-- Multline
Comment -->
<svg></svg>`)).IsSvgImage())
// the DetectContentType should work for incomplete data, because only beginning bytes are used for detection
assert.True(t, DetectContentType([]byte(`<svg>....`)).IsSvgImage())
assert.False(t, DetectContentType([]byte{}).IsSvgImage())
assert.False(t, DetectContentType([]byte("svg")).IsSvgImage())
assert.False(t, DetectContentType([]byte("<svgfoo></svgfoo>")).IsSvgImage())
@ -68,6 +71,26 @@ func TestIsSvgImage(t *testing.T) {
assert.False(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?>
<!-- <svg></svg> inside comment -->
<foo></foo>`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`
<!-- comment1 -->
<div>
<!-- comment2 -->
<svg></svg>
</div>
`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`
<!-- comment1
-->
<div>
<!-- comment2
-->
<svg></svg>
</div>
`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`<html><body><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg></svg></body></html>`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`<html><body><?xml version="1.0" encoding="UTF-8"?><svg></svg></body></html>`)).IsSvgImage())
}
func TestIsPDF(t *testing.T) {
@ -86,6 +109,10 @@ func TestIsAudio(t *testing.T) {
mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl")
assert.True(t, DetectContentType(mp3).IsAudio())
assert.False(t, DetectContentType([]byte("plain text")).IsAudio())
assert.True(t, DetectContentType([]byte("ID3Toy\000")).IsAudio())
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ...")).IsText()) // test ID3 tag for plain text
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ..."+"🌛"[0:2])).IsText()) // test ID3 tag with incomplete UTF8 char
}
func TestDetectContentTypeFromReader(t *testing.T) {

View File

@ -247,6 +247,7 @@ default_enable_timetracking_popup=新しいリポジトリのタイムトラッ
no_reply_address=メールを隠すときのドメイン
no_reply_address_helper=メールアドレスを隠しているユーザーに使用するドメイン名。 例えば 'noreply.example.org' と設定した場合、ユーザー名 'joe' はGitに 'joe@noreply.example.org' としてログインすることになります。
password_algorithm=パスワードハッシュアルゴリズム
invalid_password_algorithm=無効なパスワードハッシュアルゴリズム
password_algorithm_helper=パスワードハッシュアルゴリズムを設定します。 アルゴリズムにより動作要件と強度が異なります。 `argon2`は良い特性を備えていますが、多くのメモリを使用するため小さなシステムには適さない場合があります。
enable_update_checker=アップデートチェッカーを有効にする
enable_update_checker_helper=gitea.ioに接続して定期的に新しいバージョンのリリースを確認します。

View File

@ -141,7 +141,7 @@ func GlobalInitInstalled(ctx context.Context) {
if setting.EnableSQLite3 {
log.Info("SQLite3 support is enabled")
} else if setting.Database.UseSQLite3 {
} else if setting.Database.Type.IsSQLite3() {
log.Fatal("SQLite3 support is disabled, but it is used for database setting. Please get or build a Gitea release with SQLite3 support.")
}

View File

@ -104,7 +104,7 @@ func Install(ctx *context.Context) {
form.DbSchema = setting.Database.Schema
form.Charset = setting.Database.Charset
curDBType := setting.Database.Type
curDBType := setting.Database.Type.String()
var isCurDBTypeSupported bool
for _, dbType := range setting.SupportedDatabaseTypes {
if dbType == curDBType {
@ -272,7 +272,7 @@ func SubmitInstall(ctx *context.Context) {
// ---- Basic checks are passed, now test configuration.
// Test database setting.
setting.Database.Type = form.DbType
setting.Database.Type = setting.DatabaseType(form.DbType)
setting.Database.Host = form.DbHost
setting.Database.User = form.DbUser
setting.Database.Passwd = form.DbPasswd
@ -392,7 +392,7 @@ func SubmitInstall(ctx *context.Context) {
log.Error("Failed to load custom conf '%s': %v", setting.CustomConf, err)
}
}
cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type)
cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type.String())
cfg.Section("database").Key("HOST").SetValue(setting.Database.Host)
cfg.Section("database").Key("NAME").SetValue(setting.Database.Name)
cfg.Section("database").Key("USER").SetValue(setting.Database.User)

View File

@ -100,7 +100,7 @@ func checkDatabase(checks checks) status {
st.Time = getCheckTime()
}
if setting.Database.UseSQLite3 && st.Status == pass {
if setting.Database.Type.IsSQLite3() && st.Status == pass {
if !setting.EnableSQLite3 {
st.Status = fail
st.Time = getCheckTime()

View File

@ -116,7 +116,7 @@ func createLFSMetaObjectsFromCatFileBatch(catFileBatchReader *io.PipeReader, wg
}
// Then we need to check that this pointer is in the db
if _, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, pr.HeadRepo.ID, pointer.Oid); err != nil {
if _, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, pr.HeadRepoID, pointer.Oid); err != nil {
if err == git_model.ErrLFSObjectNotExist {
log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo)
continue

View File

@ -5,8 +5,6 @@
package pull
import (
"bufio"
"bytes"
"context"
"fmt"
"os"
@ -14,7 +12,6 @@ import (
"regexp"
"strconv"
"strings"
"time"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/db"
@ -34,22 +31,17 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
asymkey_service "code.gitea.io/gitea/services/asymkey"
issue_service "code.gitea.io/gitea/services/issue"
)
// GetDefaultMergeMessage returns default message used when merging pull request
func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle) (message, body string, err error) {
if err := pr.LoadHeadRepo(ctx); err != nil {
return "", "", err
}
if err := pr.LoadBaseRepo(ctx); err != nil {
return "", "", err
}
if pr.BaseRepo == nil {
return "", "", repo_model.ErrRepoNotExist{ID: pr.BaseRepoID}
if err := pr.LoadHeadRepo(ctx); err != nil {
return "", "", err
}
if err := pr.LoadIssue(ctx); err != nil {
return "", "", err
}
@ -144,18 +136,19 @@ func expandDefaultMergeMessage(template string, vars map[string]string) (message
// Merge merges pull request to base repository.
// Caller should check PR is ready to be merged (review and status checks)
func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, wasAutoMerged bool) error {
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("LoadHeadRepo: %v", err)
return fmt.Errorf("LoadHeadRepo: %w", err)
} else if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return fmt.Errorf("LoadBaseRepo: %w", err)
if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("Unable to load base repo: %v", err)
return fmt.Errorf("unable to load base repo: %w", err)
} else if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("Unable to load head repo: %v", err)
return fmt.Errorf("unable to load head repo: %w", err)
}
pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
// Removing an auto merge pull and ignore if not exist
// FIXME: is this the correct point to do this? Shouldn't this be after IsMergeStyleAllowed?
if err := pull_model.DeleteScheduledAutoMerge(ctx, pr.ID); err != nil && !db.IsErrNotExist(err) {
return err
}
@ -179,7 +172,7 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
// Run the merge in the hammer context to prevent cancellation
hammerCtx := graceful.GetManager().HammerContext()
pr.MergedCommitID, err = rawMerge(hammerCtx, pr, doer, mergeStyle, expectedHeadCommitID, message)
pr.MergedCommitID, err = doMergeAndPush(hammerCtx, pr, doer, mergeStyle, expectedHeadCommitID, message)
if err != nil {
return err
}
@ -189,18 +182,18 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
pr.MergerID = doer.ID
if _, err := pr.SetMerged(hammerCtx); err != nil {
log.Error("SetMerged [%d]: %v", pr.ID, err)
log.Error("SetMerged %-v: %v", pr, err)
}
if err := pr.LoadIssue(hammerCtx); err != nil {
log.Error("LoadIssue [%d]: %v", pr.ID, err)
log.Error("LoadIssue %-v: %v", pr, err)
}
if err := pr.Issue.LoadRepo(hammerCtx); err != nil {
log.Error("LoadRepo for issue [%d]: %v", pr.ID, err)
log.Error("pr.Issue.LoadRepo %-v: %v", pr, err)
}
if err := pr.Issue.Repo.LoadOwner(hammerCtx); err != nil {
log.Error("LoadOwner for PR [%d]: %v", pr.ID, err)
log.Error("LoadOwner for %-v: %v", pr, err)
}
if wasAutoMerged {
@ -239,326 +232,43 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
return nil
}
// rawMerge perform the merge operation without changing any pull information in database
func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (string, error) {
// doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository
func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (string, error) {
// Clone base repo.
tmpBasePath, err := createTemporaryRepo(ctx, pr)
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID)
if err != nil {
log.Error("CreateTemporaryPath: %v", err)
return "", err
}
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("Merge: RemoveTemporaryPath: %s", err)
}
}()
baseBranch := "base"
trackingBranch := "tracking"
stagingBranch := "staging"
if expectedHeadCommitID != "" {
trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: tmpBasePath})
if err != nil {
log.Error("show-ref[%s] --hash refs/heads/trackingn: %v", tmpBasePath, git.BranchPrefix+trackingBranch, err)
return "", fmt.Errorf("getDiffTree: %w", err)
}
if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID {
return "", models.ErrSHADoesNotMatch{
GivenSHA: expectedHeadCommitID,
CurrentSHA: trackingCommitID,
}
}
}
var outbuf, errbuf strings.Builder
// Enable sparse-checkout
sparseCheckoutList, err := getDiffTree(ctx, tmpBasePath, baseBranch, trackingBranch)
if err != nil {
log.Error("getDiffTree(%s, %s, %s): %v", tmpBasePath, baseBranch, trackingBranch, err)
return "", fmt.Errorf("getDiffTree: %w", err)
}
infoPath := filepath.Join(tmpBasePath, ".git", "info")
if err := os.MkdirAll(infoPath, 0o700); err != nil {
log.Error("Unable to create .git/info in %s: %v", tmpBasePath, err)
return "", fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err)
}
sparseCheckoutListPath := filepath.Join(infoPath, "sparse-checkout")
if err := os.WriteFile(sparseCheckoutListPath, []byte(sparseCheckoutList), 0o600); err != nil {
log.Error("Unable to write .git/info/sparse-checkout file in %s: %v", tmpBasePath, err)
return "", fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err)
}
gitConfigCommand := func() *git.Command {
return git.NewCommand(ctx, "config", "--local")
}
// Switch off LFS process (set required, clean and smudge here also)
if err := gitConfigCommand().AddArguments("filter.lfs.process", "").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.process -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.process -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("filter.lfs.required", "false").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.required -> <false> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.required -> <false> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("filter.lfs.clean", "").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.clean -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.clean -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("filter.lfs.smudge", "").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.smudge -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.smudge -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("core.sparseCheckout", "true").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [core.sparseCheckout -> true ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [core.sparsecheckout -> true]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
// Read base branch index
if err := git.NewCommand(ctx, "read-tree", "HEAD").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git read-tree HEAD: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
sig := doer.NewGitSig()
committer := sig
// Determine if we should sign. If no signKeyID, use --no-gpg-sign to countermand the sign config (from gitconfig)
var signArgs git.TrustedCmdArgs
sign, signKeyID, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, tmpBasePath, "HEAD", trackingBranch)
if sign {
if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
committer = signer
}
signArgs = git.ToTrustedCmdArgs([]string{"-S" + signKeyID})
} else {
signArgs = append(signArgs, "--no-gpg-sign")
}
commitTimeStr := time.Now().Format(time.RFC3339)
// Because this may call hooks we should pass in the environment
env := append(os.Environ(),
"GIT_AUTHOR_NAME="+sig.Name,
"GIT_AUTHOR_EMAIL="+sig.Email,
"GIT_AUTHOR_DATE="+commitTimeStr,
"GIT_COMMITTER_NAME="+committer.Name,
"GIT_COMMITTER_EMAIL="+committer.Email,
"GIT_COMMITTER_DATE="+commitTimeStr,
)
defer cancel()
// Merge commits.
switch mergeStyle {
case repo_model.MergeStyleMerge:
cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil {
log.Error("Unable to merge tracking into base: %v", err)
if err := doMergeStyleMerge(mergeCtx, message); err != nil {
return "", err
}
if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil {
log.Error("Unable to make final commit: %v", err)
case repo_model.MergeStyleRebase, repo_model.MergeStyleRebaseMerge:
if err := doMergeStyleRebase(mergeCtx, mergeStyle, message); err != nil {
return "", err
}
case repo_model.MergeStyleRebase:
fallthrough
case repo_model.MergeStyleRebaseUpdate:
fallthrough
case repo_model.MergeStyleRebaseMerge:
// Checkout head branch
if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
// Rebase before merging
if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
// Rebase will leave a REBASE_HEAD file in .git if there is a conflict
if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
var commitSha string
ok := false
failingCommitPaths := []string{
filepath.Join(tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26
filepath.Join(tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26
}
for _, failingCommitPath := range failingCommitPaths {
if _, statErr := os.Stat(failingCommitPath); statErr == nil {
commitShaBytes, readErr := os.ReadFile(failingCommitPath)
if readErr != nil {
// Abandon this attempt to handle the error
log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
commitSha = strings.TrimSpace(string(commitShaBytes))
ok = true
break
}
}
if !ok {
log.Error("Unable to determine failing commit sha for this rebase message. Cannot cast as models.ErrRebaseConflicts.")
log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
log.Debug("RebaseConflict at %s [%s:%s -> %s:%s]: %v\n%s\n%s", commitSha, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", models.ErrRebaseConflicts{
Style: mergeStyle,
CommitSHA: commitSha,
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
}
log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
// not need merge, just update by rebase. so skip
if mergeStyle == repo_model.MergeStyleRebaseUpdate {
break
}
// Checkout base branch again
if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
cmd := git.NewCommand(ctx, "merge")
if mergeStyle == repo_model.MergeStyleRebase {
cmd.AddArguments("--ff-only")
} else {
cmd.AddArguments("--no-ff", "--no-commit")
}
cmd.AddDynamicArguments(stagingBranch)
// Prepare merge with commit
if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil {
log.Error("Unable to merge staging into base: %v", err)
return "", err
}
if mergeStyle == repo_model.MergeStyleRebaseMerge {
if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil {
log.Error("Unable to make final commit: %v", err)
return "", err
}
}
case repo_model.MergeStyleSquash:
// Merge with squash
cmd := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil {
log.Error("Unable to merge --squash tracking into base: %v", err)
if err := doMergeStyleSquash(mergeCtx, message); err != nil {
return "", err
}
if err = pr.Issue.LoadPoster(ctx); err != nil {
log.Error("LoadPoster: %v", err)
return "", fmt.Errorf("LoadPoster: %w", err)
}
sig := pr.Issue.Poster.NewGitSig()
if setting.Repository.PullRequest.AddCoCommitterTrailers && committer.String() != sig.String() {
// add trailer
message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String())
}
if err := git.NewCommand(ctx, "commit").
AddArguments(signArgs...).
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message).
Run(&git.RunOpts{
Env: env,
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
default:
return "", models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle}
}
// OK we should cache our current head and origin/headbranch
mergeHeadSHA, err := git.GetFullCommitID(ctx, tmpBasePath, "HEAD")
mergeHeadSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "HEAD")
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
}
mergeBaseSHA, err := git.GetFullCommitID(ctx, tmpBasePath, "original_"+baseBranch)
mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch)
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err)
}
mergeCommitID, err := git.GetFullCommitID(ctx, tmpBasePath, baseBranch)
mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch)
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err)
}
@ -567,7 +277,7 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode
// I think in the interests of data safety - failures to push to the lfs should prevent
// the merge as you can always remerge.
if setting.LFS.StartServer {
if err := LFSPush(ctx, tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil {
if err := LFSPush(ctx, mergeCtx.tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil {
return "", err
}
}
@ -576,167 +286,92 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode
err = pr.HeadRepo.LoadOwner(ctx)
if err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("Can't find user: %d for head repository - %v", pr.HeadRepo.OwnerID, err)
log.Error("Can't find user: %d for head repository in %-v: %v", pr.HeadRepo.OwnerID, pr, err)
return "", err
}
log.Error("Can't find user: %d for head repository - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, doer.Name, err)
log.Warn("Can't find user: %d for head repository in %-v - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, pr, doer.Name, err)
headUser = doer
} else {
headUser = pr.HeadRepo.Owner
}
var pushCmd *git.Command
if mergeStyle == repo_model.MergeStyleRebaseUpdate {
// force push the rebase result to head branch
env = repo_module.FullPushingEnvironment(
headUser,
doer,
pr.HeadRepo,
pr.HeadRepo.Name,
pr.ID,
)
pushCmd = git.NewCommand(ctx, "push", "-f", "head_repo").AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
} else {
env = repo_module.FullPushingEnvironment(
headUser,
doer,
pr.BaseRepo,
pr.BaseRepo.Name,
pr.ID,
)
pushCmd = git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
}
mergeCtx.env = repo_module.FullPushingEnvironment(
headUser,
doer,
pr.BaseRepo,
pr.BaseRepo.Name,
pr.ID,
)
pushCmd := git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
// Push back to upstream.
// TODO: this cause an api call to "/api/internal/hook/post-receive/...",
// that prevents us from doint the whole merge in one db transaction
if err := pushCmd.Run(&git.RunOpts{
Env: env,
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
if strings.Contains(errbuf.String(), "non-fast-forward") {
if err := pushCmd.Run(mergeCtx.RunOpts()); err != nil {
if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
return "", &git.ErrPushOutOfDate{
StdOut: outbuf.String(),
StdErr: errbuf.String(),
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
Err: err,
}
} else if strings.Contains(errbuf.String(), "! [remote rejected]") {
} else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
err := &git.ErrPushRejected{
StdOut: outbuf.String(),
StdErr: errbuf.String(),
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
Err: err,
}
err.GenerateMessage()
return "", err
}
return "", fmt.Errorf("git push: %s", errbuf.String())
return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
return mergeCommitID, nil
}
func commitAndSignNoAuthor(ctx context.Context, pr *issues_model.PullRequest, message string, signArgs git.TrustedCmdArgs, tmpBasePath string, env []string) error {
var outbuf, errbuf strings.Builder
if err := git.NewCommand(ctx, "commit").AddArguments(signArgs...).AddOptionFormat("--message=%s", message).
Run(&git.RunOpts{
Env: env,
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
func commitAndSignNoAuthor(ctx *mergeContext, message string) error {
if err := git.NewCommand(ctx, "commit").AddArguments(ctx.signArg...).AddOptionFormat("--message=%s", message).
Run(ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
return nil
}
func runMergeCommand(pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle, cmd *git.Command, tmpBasePath string) error {
var outbuf, errbuf strings.Builder
if err := cmd.Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *git.Command) error {
if err := cmd.Run(ctx.RunOpts()); err != nil {
// Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict
if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
// We have a merge conflict error
log.Debug("MergeConflict [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return models.ErrMergeConflicts{
Style: mergeStyle,
StdOut: outbuf.String(),
StdErr: errbuf.String(),
StdOut: ctx.outbuf.String(),
StdErr: ctx.errbuf.String(),
Err: err,
}
} else if strings.Contains(errbuf.String(), "refusing to merge unrelated histories") {
log.Debug("MergeUnrelatedHistories [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
} else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") {
log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return models.ErrMergeUnrelatedHistories{
Style: mergeStyle,
StdOut: outbuf.String(),
StdErr: errbuf.String(),
StdOut: ctx.outbuf.String(),
StdErr: ctx.errbuf.String(),
Err: err,
}
}
log.Error("git merge [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return fmt.Errorf("git merge [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}
var escapedSymbols = regexp.MustCompile(`([*[?! \\])`)
func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string) (string, error) {
getDiffTreeFromBranch := func(repoPath, baseBranch, headBranch string) (string, error) {
var outbuf, errbuf strings.Builder
// Compute the diff-tree for sparse-checkout
if err := git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
Run(&git.RunOpts{
Dir: repoPath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
return "", fmt.Errorf("git diff-tree [%s base:%s head:%s]: %s", repoPath, baseBranch, headBranch, errbuf.String())
}
return outbuf.String(), nil
}
scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\x00'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
}
list, err := getDiffTreeFromBranch(repoPath, baseBranch, headBranch)
if err != nil {
return "", err
}
// Prefixing '/' for each entry, otherwise all files with the same name in subdirectories would be matched.
out := bytes.Buffer{}
scanner := bufio.NewScanner(strings.NewReader(list))
scanner.Split(scanNullTerminatedStrings)
for scanner.Scan() {
filepath := scanner.Text()
// escape '*', '?', '[', spaces and '!' prefix
filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
// no necessary to escape the first '#' symbol because the first symbol is '/'
fmt.Fprintf(&out, "/%s\n", filepath)
}
return out.String(), nil
}
// IsUserAllowedToMerge check if user is allowed to merge PR with given permissions and branch protections
func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p access_model.Permission, user *user_model.User) (bool, error) {
if user == nil {

View File

@ -0,0 +1,25 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
)
// doMergeStyleMerge merges the tracking into the current HEAD - which is assumed to tbe staging branch (equal to the pr.BaseBranch)
func doMergeStyleMerge(ctx *mergeContext, message string) error {
cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil {
log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
return err
}
if err := commitAndSignNoAuthor(ctx, message); err != nil {
log.Error("%-v Unable to make final commit: %v", ctx.pr, err)
return err
}
return nil
}

View File

@ -0,0 +1,297 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
"code.gitea.io/gitea/models"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
asymkey_service "code.gitea.io/gitea/services/asymkey"
)
type mergeContext struct {
*prContext
doer *user_model.User
sig *git.Signature
committer *git.Signature
signArg git.TrustedCmdArgs
env []string
}
func (ctx *mergeContext) RunOpts() *git.RunOpts {
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return &git.RunOpts{
Env: ctx.env,
Dir: ctx.tmpBasePath,
Stdout: ctx.outbuf,
Stderr: ctx.errbuf,
}
}
func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, expectedHeadCommitID string) (mergeCtx *mergeContext, cancel context.CancelFunc, err error) {
// Clone base repo.
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
if err != nil {
log.Error("createTemporaryRepoForPR: %v", err)
return nil, cancel, err
}
mergeCtx = &mergeContext{
prContext: prCtx,
doer: doer,
}
if expectedHeadCommitID != "" {
trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
if err != nil {
defer cancel()
log.Error("failed to get sha of head branch in %-v: show-ref[%s] --hash refs/heads/tracking: %v", mergeCtx.pr, mergeCtx.tmpBasePath, err)
return nil, nil, fmt.Errorf("unable to get sha of head branch in %v %w", pr, err)
}
if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID {
defer cancel()
return nil, nil, models.ErrSHADoesNotMatch{
GivenSHA: expectedHeadCommitID,
CurrentSHA: trackingCommitID,
}
}
}
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil {
defer cancel()
return nil, nil, err
}
mergeCtx.sig = doer.NewGitSig()
mergeCtx.committer = mergeCtx.sig
// Determine if we should sign
sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch)
if sign {
mergeCtx.signArg = git.ToTrustedCmdArgs([]string{"-S" + keyID})
if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
mergeCtx.committer = signer
}
} else {
mergeCtx.signArg = git.ToTrustedCmdArgs([]string{"--no-gpg-sign"})
}
commitTimeStr := time.Now().Format(time.RFC3339)
// Because this may call hooks we should pass in the environment
mergeCtx.env = append(os.Environ(),
"GIT_AUTHOR_NAME="+mergeCtx.sig.Name,
"GIT_AUTHOR_EMAIL="+mergeCtx.sig.Email,
"GIT_AUTHOR_DATE="+commitTimeStr,
"GIT_COMMITTER_NAME="+mergeCtx.committer.Name,
"GIT_COMMITTER_EMAIL="+mergeCtx.committer.Email,
"GIT_COMMITTER_DATE="+commitTimeStr,
)
return mergeCtx, cancel, nil
}
// prepareTemporaryRepoForMerge takes a repository that has been created using createTemporaryRepo
// it then sets up the sparse-checkout and other things
func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
infoPath := filepath.Join(ctx.tmpBasePath, ".git", "info")
if err := os.MkdirAll(infoPath, 0o700); err != nil {
log.Error("%-v Unable to create .git/info in %s: %v", ctx.pr, ctx.tmpBasePath, err)
return fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err)
}
// Enable sparse-checkout
// Here we use the .git/info/sparse-checkout file as described in the git documentation
sparseCheckoutListFile, err := os.OpenFile(filepath.Join(infoPath, "sparse-checkout"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600)
if err != nil {
log.Error("%-v Unable to write .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
return fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err)
}
defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error
if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil {
log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err)
return fmt.Errorf("getDiffTree: %w", err)
}
if err := sparseCheckoutListFile.Close(); err != nil {
log.Error("%-v Unable to close .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
return fmt.Errorf("Unable to close .git/info/sparse-checkout file in tmpBasePath: %w", err)
}
gitConfigCommand := func() *git.Command {
return git.NewCommand(ctx, "config", "--local")
}
setConfig := func(key, value string) error {
if err := gitConfigCommand().AddArguments(git.ToTrustedCmdArgs([]string{key, value})...).
Run(ctx.RunOpts()); err != nil {
if value == "" {
value = "<>"
}
log.Error("git config [%s -> %s ]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git config [%s -> %s ]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}
// Switch off LFS process (set required, clean and smudge here also)
if err := setConfig("filter.lfs.process", ""); err != nil {
return err
}
if err := setConfig("filter.lfs.required", "false"); err != nil {
return err
}
if err := setConfig("filter.lfs.clean", ""); err != nil {
return err
}
if err := setConfig("filter.lfs.smudge", ""); err != nil {
return err
}
if err := setConfig("core.sparseCheckout", "true"); err != nil {
return err
}
// Read base branch index
if err := git.NewCommand(ctx, "read-tree", "HEAD").
Run(ctx.RunOpts()); err != nil {
log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}
// getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch
// the filenames are escaped so as to fit the format required for .git/info/sparse-checkout
func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error {
diffOutReader, diffOutWriter, err := os.Pipe()
if err != nil {
log.Error("Unable to create os.Pipe for %s", repoPath)
return err
}
defer func() {
_ = diffOutReader.Close()
_ = diffOutWriter.Close()
}()
scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\x00'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
}
err = git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
Run(&git.RunOpts{
Dir: repoPath,
Stdout: diffOutWriter,
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
// Close the writer end of the pipe to begin processing
_ = diffOutWriter.Close()
defer func() {
// Close the reader on return to terminate the git command if necessary
_ = diffOutReader.Close()
}()
// Now scan the output from the command
scanner := bufio.NewScanner(diffOutReader)
scanner.Split(scanNullTerminatedStrings)
for scanner.Scan() {
filepath := scanner.Text()
// escape '*', '?', '[', spaces and '!' prefix
filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
// no necessary to escape the first '#' symbol because the first symbol is '/'
fmt.Fprintf(out, "/%s\n", filepath)
}
return scanner.Err()
},
})
return err
}
// rebaseTrackingOnToBase checks out the tracking branch as staging and rebases it on to the base branch
// if there is a conflict it will return a models.ErrRebaseConflicts
func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
// Checkout head branch
if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
Run(ctx.RunOpts()); err != nil {
return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
// Rebase before merging
if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
Run(ctx.RunOpts()); err != nil {
// Rebase will leave a REBASE_HEAD file in .git if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
var commitSha string
ok := false
failingCommitPaths := []string{
filepath.Join(ctx.tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26
filepath.Join(ctx.tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26
}
for _, failingCommitPath := range failingCommitPaths {
if _, statErr := os.Stat(failingCommitPath); statErr == nil {
commitShaBytes, readErr := os.ReadFile(failingCommitPath)
if readErr != nil {
// Abandon this attempt to handle the error
return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
commitSha = strings.TrimSpace(string(commitShaBytes))
ok = true
break
}
}
if !ok {
log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as models.ErrRebaseConflicts.", ctx.pr)
return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String())
return models.ErrRebaseConflicts{
CommitSHA: commitSha,
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
StdErr: ctx.errbuf.String(),
Err: err,
}
}
return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}

View File

@ -0,0 +1,50 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"fmt"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
)
// doMergeStyleRebase rebaases the tracking branch on the base branch as the current HEAD with or with a merge commit to the original pr branch
func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, message string) error {
if err := rebaseTrackingOnToBase(ctx, mergeStyle); err != nil {
return err
}
// Checkout base branch again
if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
Run(ctx.RunOpts()); err != nil {
log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
cmd := git.NewCommand(ctx, "merge")
if mergeStyle == repo_model.MergeStyleRebase {
cmd.AddArguments("--ff-only")
} else {
cmd.AddArguments("--no-ff", "--no-commit")
}
cmd.AddDynamicArguments(stagingBranch)
// Prepare merge with commit
if err := runMergeCommand(ctx, mergeStyle, cmd); err != nil {
log.Error("Unable to merge staging into base: %v", err)
return err
}
if mergeStyle == repo_model.MergeStyleRebaseMerge {
if err := commitAndSignNoAuthor(ctx, message); err != nil {
log.Error("Unable to make final commit: %v", err)
return err
}
}
return nil
}

View File

@ -0,0 +1,53 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"fmt"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
// doMergeStyleSquash squashes the tracking branch on the current HEAD (=base)
func doMergeStyleSquash(ctx *mergeContext, message string) error {
cmd := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmd); err != nil {
log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err)
return err
}
if err := ctx.pr.Issue.LoadPoster(ctx); err != nil {
log.Error("%-v Issue[%d].LoadPoster: %v", ctx.pr, ctx.pr.Issue.ID, err)
return fmt.Errorf("LoadPoster: %w", err)
}
sig := ctx.pr.Issue.Poster.NewGitSig()
if len(ctx.signArg) == 0 {
if err := git.NewCommand(ctx, "commit").
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message).
Run(ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
}
} else {
if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() {
// add trailer
message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String())
}
if err := git.NewCommand(ctx, "commit").
AddArguments(ctx.signArg...).
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message).
Run(ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
}
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}

View File

@ -22,7 +22,6 @@ import (
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
@ -64,25 +63,21 @@ func TestPatch(pr *issues_model.PullRequest) error {
defer finished()
// Clone base repo.
tmpBasePath, err := createTemporaryRepo(ctx, pr)
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
if err != nil {
log.Error("CreateTemporaryPath: %v", err)
log.Error("createTemporaryRepoForPR %-v: %v", pr, err)
return err
}
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("Merge: RemoveTemporaryPath: %s", err)
}
}()
defer cancel()
gitRepo, err := git.OpenRepository(ctx, tmpBasePath)
gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
if err != nil {
return fmt.Errorf("OpenRepository: %w", err)
}
defer gitRepo.Close()
// 1. update merge base
pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: tmpBasePath})
pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath})
if err != nil {
var err2 error
pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
@ -101,7 +96,7 @@ func TestPatch(pr *issues_model.PullRequest) error {
}
// 2. Check for conflicts
if conflicts, err := checkConflicts(ctx, pr, gitRepo, tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
return err
}

View File

@ -349,18 +349,14 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
// checkIfPRContentChanged checks if diff to target branch has changed by push
// A commit can be considered to leave the PR untouched if the patch/diff with its merge base is unchanged
func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (hasChanged bool, err error) {
tmpBasePath, err := createTemporaryRepo(ctx, pr)
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
if err != nil {
log.Error("CreateTemporaryRepo: %v", err)
log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
return false, err
}
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("checkIfPRContentChanged: RemoveTemporaryPath: %s", err)
}
}()
defer cancel()
tmpRepo, err := git.OpenRepository(ctx, tmpBasePath)
tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
if err != nil {
return false, fmt.Errorf("OpenRepository: %w", err)
}
@ -379,7 +375,7 @@ func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest,
}
if err := cmd.Run(&git.RunOpts{
Dir: tmpBasePath,
Dir: prCtx.tmpBasePath,
Stdout: stdoutWriter,
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
_ = stdoutWriter.Close()

View File

@ -19,49 +19,85 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
)
// createTemporaryRepo creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
// Temporary repos created here use standard branch names to help simplify
// merging code
const (
baseBranch = "base" // equivalent to pr.BaseBranch
trackingBranch = "tracking" // equivalent to pr.HeadBranch
stagingBranch = "staging" // this is used for a working branch
)
type prContext struct {
context.Context
tmpBasePath string
pr *issues_model.PullRequest
outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation,
errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use
}
func (ctx *prContext) RunOpts() *git.RunOpts {
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return &git.RunOpts{
Dir: ctx.tmpBasePath,
Stdout: ctx.outbuf,
Stderr: ctx.errbuf,
}
}
// createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
// it also create a second base branch called "original_base"
func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (string, error) {
func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) {
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("LoadHeadRepo: %v", err)
return "", fmt.Errorf("LoadHeadRepo: %w", err)
log.Error("%-v LoadHeadRepo: %v", pr, err)
return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err)
} else if pr.HeadRepo == nil {
log.Error("Pr %d HeadRepo %d does not exist", pr.ID, pr.HeadRepoID)
return "", &repo_model.ErrRepoNotExist{
log.Error("%-v HeadRepo %d does not exist", pr, pr.HeadRepoID)
return nil, nil, &repo_model.ErrRepoNotExist{
ID: pr.HeadRepoID,
}
} else if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return "", fmt.Errorf("LoadBaseRepo: %w", err)
log.Error("%-v LoadBaseRepo: %v", pr, err)
return nil, nil, fmt.Errorf("%v LoadBaseRepo: %w", pr, err)
} else if pr.BaseRepo == nil {
log.Error("Pr %d BaseRepo %d does not exist", pr.ID, pr.BaseRepoID)
return "", &repo_model.ErrRepoNotExist{
log.Error("%-v BaseRepo %d does not exist", pr, pr.BaseRepoID)
return nil, nil, &repo_model.ErrRepoNotExist{
ID: pr.BaseRepoID,
}
} else if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
log.Error("HeadRepo.LoadOwner: %v", err)
return "", fmt.Errorf("HeadRepo.LoadOwner: %w", err)
log.Error("%-v HeadRepo.LoadOwner: %v", pr, err)
return nil, nil, fmt.Errorf("%v HeadRepo.LoadOwner: %w", pr, err)
} else if err := pr.BaseRepo.LoadOwner(ctx); err != nil {
log.Error("BaseRepo.LoadOwner: %v", err)
return "", fmt.Errorf("BaseRepo.LoadOwner: %w", err)
log.Error("%-v BaseRepo.LoadOwner: %v", pr, err)
return nil, nil, fmt.Errorf("%v BaseRepo.LoadOwner: %w", pr, err)
}
// Clone base repo.
tmpBasePath, err := repo_module.CreateTemporaryPath("pull")
if err != nil {
log.Error("CreateTemporaryPath: %v", err)
return "", err
log.Error("CreateTemporaryPath[%-v]: %v", pr, err)
return nil, nil, err
}
prCtx = &prContext{
Context: ctx,
tmpBasePath: tmpBasePath,
pr: pr,
outbuf: &strings.Builder{},
errbuf: &strings.Builder{},
}
cancel = func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err)
}
}
baseRepoPath := pr.BaseRepo.RepoPath()
headRepoPath := pr.HeadRepo.RepoPath()
if err := git.InitRepository(ctx, tmpBasePath, false); err != nil {
log.Error("git init tmpBasePath: %v", err)
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", err
log.Error("Unable to init tmpBasePath for %-v: %v", pr, err)
cancel()
return nil, nil, err
}
remoteRepoName := "head_repo"
@ -73,99 +109,63 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str
fetchArgs = append(fetchArgs, "--no-write-commit-graph")
}
// Add head repo remote.
addCacheRepo := func(staging, cache string) error {
p := filepath.Join(staging, ".git", "objects", "info", "alternates")
// addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
addCacheRepo := func(repoPath, cacheRepoPath string) error {
p := filepath.Join(repoPath, ".git", "objects", "info", "alternates")
f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o600)
if err != nil {
log.Error("Could not create .git/objects/info/alternates file in %s: %v", staging, err)
log.Error("Could not create .git/objects/info/alternates file in %s: %v", repoPath, err)
return err
}
defer f.Close()
data := filepath.Join(cache, "objects")
data := filepath.Join(cacheRepoPath, "objects")
if _, err := fmt.Fprintln(f, data); err != nil {
log.Error("Could not write to .git/objects/info/alternates file in %s: %v", staging, err)
log.Error("Could not write to .git/objects/info/alternates file in %s: %v", repoPath, err)
return err
}
return nil
}
// Add head repo remote.
if err := addCacheRepo(tmpBasePath, baseRepoPath); err != nil {
log.Error("Unable to add base repository to temporary repo [%s -> %s]: %v", pr.BaseRepo.FullName(), tmpBasePath, err)
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
log.Error("%-v Unable to add base repository to temporary repo [%s -> %s]: %v", pr, pr.BaseRepo.FullName(), tmpBasePath, err)
cancel()
return nil, nil, fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
}
var outbuf, errbuf strings.Builder
if err := git.NewCommand(ctx, "remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr.BaseRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, outbuf.String(), errbuf.String())
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := git.NewCommand(ctx, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to set HEAD as base branch [%s]: %v\n%s\n%s", tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to set HEAD as base branch [tmpBasePath]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil {
log.Error("Unable to add head repository to temporary repo [%s -> %s]: %v", pr.HeadRepo.FullName(), tmpBasePath, err)
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
log.Error("%-v Unable to add head repository to temporary repo [%s -> %s]: %v", pr, pr.HeadRepo.FullName(), tmpBasePath, err)
cancel()
return nil, nil, fmt.Errorf("Unable to add head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
}
if err := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr.HeadRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, outbuf.String(), errbuf.String())
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
trackingBranch := "tracking"
// Fetch head branch
@ -178,24 +178,18 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str
headBranch = pr.GetGitRefName()
}
if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
Run(prCtx.RunOpts()); err != nil {
cancel()
if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
return "", models.ErrBranchDoesNotExist{
return nil, nil, models.ErrBranchDoesNotExist{
BranchName: pr.HeadBranch,
}
}
log.Error("Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, outbuf.String(), errbuf.String())
log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
prCtx.outbuf.Reset()
prCtx.errbuf.Reset()
return tmpBasePath, nil
return prCtx, cancel, nil
}

View File

@ -16,61 +16,67 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
)
// Update updates pull request with base branch.
func Update(ctx context.Context, pull *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
var (
pr *issues_model.PullRequest
style repo_model.MergeStyle
)
pullWorkingPool.CheckIn(fmt.Sprint(pull.ID))
defer pullWorkingPool.CheckOut(fmt.Sprint(pull.ID))
if rebase {
pr = pull
style = repo_model.MergeStyleRebaseUpdate
} else {
// use merge functions but switch repo's and branch's
pr = &issues_model.PullRequest{
HeadRepoID: pull.BaseRepoID,
BaseRepoID: pull.HeadRepoID,
HeadBranch: pull.BaseBranch,
BaseBranch: pull.HeadBranch,
}
style = repo_model.MergeStyleMerge
func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
if pr.Flow == issues_model.PullRequestFlowAGit {
// TODO: update of agit flow pull request's head branch is unsupported
return fmt.Errorf("update of agit flow pull request's head branch is unsupported")
}
if pull.Flow == issues_model.PullRequestFlowAGit {
// TODO: Not support update agit flow pull request's head branch
return fmt.Errorf("Not support update agit flow pull request's head branch")
}
pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("LoadHeadRepo: %v", err)
return fmt.Errorf("LoadHeadRepo: %w", err)
} else if err = pr.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return fmt.Errorf("LoadBaseRepo: %w", err)
}
diffCount, err := GetDiverging(ctx, pull)
diffCount, err := GetDiverging(ctx, pr)
if err != nil {
return err
} else if diffCount.Behind == 0 {
return fmt.Errorf("HeadBranch of PR %d is up to date", pull.Index)
return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index)
}
_, err = rawMerge(ctx, pr, doer, style, "", message)
if rebase {
defer func() {
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
}()
return updateHeadByRebaseOnToBase(ctx, pr, doer, message)
}
if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err)
return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
}
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
}
if pr.HeadRepo == nil {
// LoadHeadRepo will swallow ErrRepoNotExist so if pr.HeadRepo is still nil recreate the error
err := repo_model.ErrRepoNotExist{
ID: pr.HeadRepoID,
}
log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
}
// use merge functions but switch repos and branches
reversePR := &issues_model.PullRequest{
ID: pr.ID,
HeadRepoID: pr.BaseRepoID,
HeadRepo: pr.BaseRepo,
HeadBranch: pr.BaseBranch,
BaseRepoID: pr.HeadRepoID,
BaseRepo: pr.HeadRepo,
BaseBranch: pr.HeadBranch,
}
_, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message)
defer func() {
if rebase {
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
return
}
go AddTestPullRequestTask(doer, pr.HeadRepo.ID, pr.HeadBranch, false, "", "")
go AddTestPullRequestTask(doer, reversePR.HeadRepo.ID, reversePR.HeadBranch, false, "", "")
}()
return err
@ -159,27 +165,16 @@ func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest,
// GetDiverging determines how many commits a PR is ahead or behind the PR base branch
func GetDiverging(ctx context.Context, pr *issues_model.PullRequest) (*git.DivergeObject, error) {
log.Trace("GetDiverging[%d]: compare commits", pr.ID)
if err := pr.LoadBaseRepo(ctx); err != nil {
return nil, err
}
if err := pr.LoadHeadRepo(ctx); err != nil {
return nil, err
}
tmpRepo, err := createTemporaryRepo(ctx, pr)
log.Trace("GetDiverging[%-v]: compare commits", pr)
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
if err != nil {
if !models.IsErrBranchDoesNotExist(err) {
log.Error("CreateTemporaryRepo: %v", err)
log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
}
return nil, err
}
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpRepo); err != nil {
log.Error("Merge: RemoveTemporaryPath: %s", err)
}
}()
defer cancel()
diff, err := git.GetDivergingCommits(ctx, tmpRepo, "base", "tracking")
diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
return &diff, err
}

View File

@ -0,0 +1,107 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"context"
"fmt"
"strings"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
)
// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string) error {
// "Clone" base repo and add the cache headers for the head repo and branch
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "")
if err != nil {
return err
}
defer cancel()
// Determine the old merge-base before the rebase - we use this for LFS push later on
oldMergeBase, _, _ := git.NewCommand(ctx, "merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
oldMergeBase = strings.TrimSpace(oldMergeBase)
// Rebase the tracking branch on to the base as the staging branch
if err := rebaseTrackingOnToBase(mergeCtx, repo_model.MergeStyleRebaseUpdate); err != nil {
return err
}
if setting.LFS.StartServer {
// Now we need to ensure that the head repository contains any LFS objects between the new base and the old mergebase
// It's questionable about where this should go - either after or before the push
// I think in the interests of data safety - failures to push to the lfs should prevent
// the push as you can always re-rebase.
if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{
HeadRepoID: pr.BaseRepoID,
BaseRepoID: pr.HeadRepoID,
}); err != nil {
log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err)
return err
}
}
// Now determine who the pushing author should be
var headUser *user_model.User
if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("Can't find user: %d for head repository in %-v - %v", pr.HeadRepo.OwnerID, pr, err)
return err
}
log.Error("Can't find user: %d for head repository in %-v - defaulting to doer: %-v - %v", pr.HeadRepo.OwnerID, pr, doer, err)
headUser = doer
} else {
headUser = pr.HeadRepo.Owner
}
pushCmd := git.NewCommand(ctx, "push", "-f", "head_repo").
AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
// Push back to the head repository.
// TODO: this cause an api call to "/api/internal/hook/post-receive/...",
// that prevents us from doint the whole merge in one db transaction
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
if err := pushCmd.Run(&git.RunOpts{
Env: repo_module.FullPushingEnvironment(
headUser,
doer,
pr.HeadRepo,
pr.HeadRepo.Name,
pr.ID,
),
Dir: mergeCtx.tmpBasePath,
Stdout: mergeCtx.outbuf,
Stderr: mergeCtx.errbuf,
}); err != nil {
if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
return &git.ErrPushOutOfDate{
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
Err: err,
}
} else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
err := &git.ErrPushRejected{
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
Err: err,
}
err.GenerateMessage()
return err
}
return fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
}
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
return nil
}

View File

@ -238,7 +238,7 @@
{{end}}
<div class="right floated">
{{range .Assignees}}
<a class="tooltip" target="_blank" href="{{.HomeLink}}" data-content="{{$.locale.Tr "repo.projects.column.assigned_to"}} {{.Name}}">{{avatar . 28 "mini gt-mr-3"}}</a>
<a class="tooltip" target="_blank" href="{{.HomeLink}}" data-content="{{$.locale.Tr "repo.projects.column.assigned_to"}} {{.Name}}">{{avatar $.Context . 28 "mini gt-mr-3"}}</a>
{{end}}
</div>
</div>

View File

@ -10,16 +10,16 @@
{{else}}
{{$referenceUrl = Printf "%s/files#%s" .ctxData.Issue.Link .item.HashTag}}
{{end}}
<a class="item context" data-clipboard-text-type="url" data-clipboard-text="{{AppSubUrl}}{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.copy_link"}}</a>
<a class="item context quote-reply {{if .diff}}quote-reply-diff{{end}}" data-target="{{.item.HashTag}}-raw">{{.ctxData.locale.Tr "repo.issues.context.quote_reply"}}</a>
<div class="item context js-aria-clickable" data-clipboard-text-type="url" data-clipboard-text="{{AppSubUrl}}{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.copy_link"}}</div>
<div class="item context js-aria-clickable quote-reply {{if .diff}}quote-reply-diff{{end}}" data-target="{{.item.HashTag}}-raw">{{.ctxData.locale.Tr "repo.issues.context.quote_reply"}}</div>
{{if not .ctxData.UnitIssuesGlobalDisabled}}
<a class="item context reference-issue" data-target="{{.item.HashTag}}-raw" data-modal="#reference-issue-modal" data-poster="{{.item.Poster.GetDisplayName}}" data-poster-username="{{.item.Poster.Name}}" data-reference="{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.reference_issue"}}</a>
<div class="item context js-aria-clickable reference-issue" data-target="{{.item.HashTag}}-raw" data-modal="#reference-issue-modal" data-poster="{{.item.Poster.GetDisplayName}}" data-poster-username="{{.item.Poster.Name}}" data-reference="{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.reference_issue"}}</div>
{{end}}
{{if or .ctxData.Permission.IsAdmin .IsCommentPoster .ctxData.HasIssuesOrPullsWritePermission}}
<div class="divider"></div>
<a class="item context edit-content">{{.ctxData.locale.Tr "repo.issues.context.edit"}}</a>
<div class="item context js-aria-clickable edit-content">{{.ctxData.locale.Tr "repo.issues.context.edit"}}</div>
{{if .delete}}
<a class="item context delete-comment" data-comment-id={{.item.HashTag}} data-url="{{.ctxData.RepoLink}}/comments/{{.item.ID}}/delete" data-locale="{{.ctxData.locale.Tr "repo.issues.delete_comment_confirm"}}">{{.ctxData.locale.Tr "repo.issues.context.delete"}}</a>
<div class="item context js-aria-clickable delete-comment" data-comment-id={{.item.HashTag}} data-url="{{.ctxData.RepoLink}}/comments/{{.item.ID}}/delete" data-locale="{{.ctxData.locale.Tr "repo.issues.delete_comment_confirm"}}">{{.ctxData.locale.Tr "repo.issues.context.delete"}}</div>
{{end}}
{{end}}
</div>

View File

@ -19,7 +19,7 @@ import (
func TestExternalMarkupRenderer(t *testing.T) {
defer tests.PrepareTestEnv(t)()
if !setting.Database.UseSQLite3 {
if !setting.Database.Type.IsSQLite3() {
t.Skip()
return
}

View File

@ -94,7 +94,7 @@ func availableVersions() ([]string, error) {
return nil, err
}
defer migrationsDir.Close()
versionRE, err := regexp.Compile("gitea-v(?P<version>.+)\\." + regexp.QuoteMeta(setting.Database.Type) + "\\.sql.gz")
versionRE, err := regexp.Compile("gitea-v(?P<version>.+)\\." + regexp.QuoteMeta(setting.Database.Type.String()) + "\\.sql.gz")
if err != nil {
return nil, err
}
@ -149,7 +149,7 @@ func restoreOldDB(t *testing.T, version string) bool {
}
switch {
case setting.Database.UseSQLite3:
case setting.Database.Type.IsSQLite3():
util.Remove(setting.Database.Path)
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
assert.NoError(t, err)
@ -162,7 +162,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err)
db.Close()
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host))
assert.NoError(t, err)
@ -184,7 +184,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err)
db.Close()
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
var db *sql.DB
var err error
if setting.Database.Host[0] == '/' {
@ -252,7 +252,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err)
db.Close()
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -74,7 +74,7 @@ func InitTest(requireGitea bool) {
}
switch {
case setting.Database.UseMySQL:
case setting.Database.Type.IsMySQL():
connType := "tcp"
if len(setting.Database.Host) > 0 && setting.Database.Host[0] == '/' { // looks like a unix socket
connType = "unix"
@ -89,7 +89,7 @@ func InitTest(requireGitea bool) {
if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil {
log.Fatal("db.Exec: %v", err)
}
case setting.Database.UsePostgreSQL:
case setting.Database.Type.IsPostgreSQL():
var db *sql.DB
var err error
if setting.Database.Host[0] == '/' {
@ -146,7 +146,7 @@ func InitTest(requireGitea bool) {
}
}
case setting.Database.UseMSSQL:
case setting.Database.Type.IsMSSQL():
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -83,8 +83,9 @@ function attachOneDropdownAria($dropdown) {
if (e.key === 'Enter') {
let $item = $dropdown.dropdown('get item', $dropdown.dropdown('get value'));
if (!$item) $item = $menu.find('> .item.selected'); // when dropdown filters items by input, there is no "value", so query the "selected" item
// if the selected item is clickable, then trigger the click event. in the future there could be a special CSS class for it.
if ($item && $item.is('a')) $item[0].click();
// if the selected item is clickable, then trigger the click event.
// we can not click any item without check, because Fomantic code might also handle the Enter event. that would result in double click.
if ($item && ($item.is('a') || $item.is('.js-aria-clickable'))) $item[0].click();
}
});

View File

@ -129,8 +129,8 @@ export function initImageDiff() {
initOverlay(createContext($imageAfter[2], $imageBefore[2]));
}
hideElem($container.find('> .loader'));
$container.find('> .gt-hidden').removeClass('gt-hidden');
hideElem($container.find('.ui.loader'));
}
function initSideBySide(sizes) {
@ -155,7 +155,7 @@ export function initImageDiff() {
height: sizes.size1.height * factor
});
sizes.image1.parent().css({
margin: `${sizes.ratio[1] * factor + 15}px ${sizes.ratio[0] * factor}px ${sizes.ratio[1] * factor}px`,
margin: `10px auto`,
width: sizes.size1.width * factor + 2,
height: sizes.size1.height * factor + 2
});
@ -164,7 +164,7 @@ export function initImageDiff() {
height: sizes.size2.height * factor
});
sizes.image2.parent().css({
margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`,
margin: `10px auto`,
width: sizes.size2.width * factor + 2,
height: sizes.size2.height * factor + 2
});
@ -255,13 +255,12 @@ export function initImageDiff() {
width: sizes.size2.width * factor + 2,
height: sizes.size2.height * factor + 2
});
// some inner elements are `position: absolute`, so the container's height must be large enough
// the "css(width, height)" is somewhat hacky and not easy to understand, it could be improved in the future
sizes.image2.parent().parent().css({
width: sizes.max.width * factor + 2,
height: sizes.max.height * factor + 2
});
$container.find('.onion-skin').css({
width: sizes.max.width * factor + 2,
height: sizes.max.height * factor + 4
height: sizes.max.height * factor + 2 + 20 /* extra height for inner "position: absolute" elements */,
});
const $range = $container.find("input[type='range']");

View File

@ -80,7 +80,8 @@ export function svg(name, size = 16, className = '') {
const svgNode = document.firstChild;
if (size !== 16) svgNode.setAttribute('width', String(size));
if (size !== 16) svgNode.setAttribute('height', String(size));
if (className) svgNode.classList.add(...className.split(/\s+/));
// filter array to remove empty string
if (className) svgNode.classList.add(...className.split(/\s+/).filter(Boolean));
return serializer.serializeToString(svgNode);
}

View File

@ -1,6 +1,6 @@
.image-diff-container {
text-align: center;
padding: 30px 0;
padding: 1em 0;
img {
border: 1px solid var(--color-primary-light-7);
@ -22,6 +22,7 @@
display: inline-block;
line-height: 0;
vertical-align: top;
margin: 0 1em;
.side-header {
font-weight: bold;
@ -98,7 +99,7 @@
}
input {
width: 300px;
max-width: 300px;
}
}
}