Compare commits

..

No commits in common. "7e3b7c23463bf71c4e2ab93f184a675f1e30df0e" and "84a299310d9a8f6387f18a1711485b7f33e6f6b5" have entirely different histories.

57 changed files with 733 additions and 939 deletions

View File

@ -35,7 +35,7 @@ func runConvert(ctx *cli.Context) error {
log.Info("Log path: %s", setting.Log.RootPath)
log.Info("Configuration file: %s", setting.CustomConf)
if !setting.Database.Type.IsMySQL() {
if !setting.Database.UseMySQL {
fmt.Println("This command can only be used with a MySQL database")
return nil
}

View File

@ -279,7 +279,7 @@ func runDump(ctx *cli.Context) error {
}()
targetDBType := ctx.String("database")
if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() {
if len(targetDBType) > 0 && targetDBType != setting.Database.Type {
log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType)
} else {
log.Info("Dumping database...")

View File

@ -99,7 +99,7 @@ func (a *Action) TableIndices() []*schemas.Index {
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
indices := []*schemas.Index{actUserIndex, repoIndex}
if setting.Database.Type.IsPostgreSQL() {
if setting.Database.UsePostgreSQL {
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
indices = append(indices, cudIndex)
@ -640,7 +640,7 @@ func DeleteIssueActions(ctx context.Context, repoID, issueID int64) error {
// CountActionCreatedUnixString count actions where created_unix is an empty string
func CountActionCreatedUnixString(ctx context.Context) (int64, error) {
if setting.Database.Type.IsSQLite3() {
if setting.Database.UseSQLite3 {
return db.GetEngine(ctx).Where(`created_unix = ""`).Count(new(Action))
}
return 0, nil
@ -648,7 +648,7 @@ func CountActionCreatedUnixString(ctx context.Context) (int64, error) {
// FixActionCreatedUnixString set created_unix to zero if it is an empty string
func FixActionCreatedUnixString(ctx context.Context) (int64, error) {
if setting.Database.Type.IsSQLite3() {
if setting.Database.UseSQLite3 {
res, err := db.GetEngine(ctx).Exec(`UPDATE action SET created_unix = 0 WHERE created_unix = ""`)
if err != nil {
return 0, err

View File

@ -243,7 +243,7 @@ func TestGetFeedsCorrupted(t *testing.T) {
}
func TestConsistencyUpdateAction(t *testing.T) {
if !setting.Database.Type.IsSQLite3() {
if !setting.Database.UseSQLite3 {
t.Skip("Test is only for SQLite database.")
}
assert.NoError(t, unittest.PrepareTestDatabase())

View File

@ -39,9 +39,9 @@ func getUserHeatmapData(user *user_model.User, team *organization.Team, doer *us
groupBy := "created_unix / 900 * 900"
groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
groupBy = "created_unix DIV 900 * 900"
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
groupByName = groupBy
}

View File

@ -15,7 +15,7 @@ import (
// BuildCaseInsensitiveLike returns a condition to check if the given value is like the given key case-insensitively.
// Handles especially SQLite correctly as UPPER there only transforms ASCII letters.
func BuildCaseInsensitiveLike(key, value string) builder.Cond {
if setting.Database.Type.IsSQLite3() {
if setting.Database.UseSQLite3 {
return builder.Like{"UPPER(" + key + ")", util.ToUpperASCII(value)}
}
return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)}

View File

@ -101,12 +101,12 @@ func newXORMEngine() (*xorm.Engine, error) {
var engine *xorm.Engine
if setting.Database.Type.IsPostgreSQL() && len(setting.Database.Schema) > 0 {
if setting.Database.UsePostgreSQL && len(setting.Database.Schema) > 0 {
// OK whilst we sort out our schema issues - create a schema aware postgres
registerPostgresSchemaDriver()
engine, err = xorm.NewEngine("postgresschema", connStr)
} else {
engine, err = xorm.NewEngine(setting.Database.Type.String(), connStr)
engine, err = xorm.NewEngine(setting.Database.Type, connStr)
}
if err != nil {

View File

@ -73,7 +73,7 @@ func postgresGetNextResourceIndex(ctx context.Context, tableName string, groupID
// GetNextResourceIndex generates a resource index, it must run in the same transaction where the resource is created
func GetNextResourceIndex(ctx context.Context, tableName string, groupID int64) (int64, error) {
if setting.Database.Type.IsPostgreSQL() {
if setting.Database.UsePostgreSQL {
return postgresGetNextResourceIndex(ctx, tableName, groupID)
}

View File

@ -13,7 +13,7 @@ import (
// CountBadSequences looks for broken sequences from recreate-table mistakes
func CountBadSequences(_ context.Context) (int64, error) {
if !setting.Database.Type.IsPostgreSQL() {
if !setting.Database.UsePostgreSQL {
return 0, nil
}
@ -34,7 +34,7 @@ func CountBadSequences(_ context.Context) (int64, error) {
// FixBadSequences fixes for broken sequences from recreate-table mistakes
func FixBadSequences(_ context.Context) error {
if !setting.Database.Type.IsPostgreSQL() {
if !setting.Database.UsePostgreSQL {
return nil
}

View File

@ -65,7 +65,7 @@ func postgresGetCommitStatusIndex(ctx context.Context, repoID int64, sha string)
// GetNextCommitStatusIndex retried 3 times to generate a resource index
func GetNextCommitStatusIndex(ctx context.Context, repoID int64, sha string) (int64, error) {
if setting.Database.Type.IsPostgreSQL() {
if setting.Database.UsePostgreSQL {
return postgresGetCommitStatusIndex(ctx, repoID, sha)
}

View File

@ -89,7 +89,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
hasID = hasID || (column.IsPrimaryKey && column.IsAutoIncrement)
}
if hasID && setting.Database.Type.IsMSSQL() {
if hasID && setting.Database.UseMSSQL {
if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` ON", tempTableName)); err != nil {
log.Error("Unable to set identity insert for table %s. Error: %v", tempTableName, err)
return err
@ -143,7 +143,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
return err
}
if hasID && setting.Database.Type.IsMSSQL() {
if hasID && setting.Database.UseMSSQL {
if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` OFF", tempTableName)); err != nil {
log.Error("Unable to switch off identity insert for table %s. Error: %v", tempTableName, err)
return err
@ -151,7 +151,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
}
switch {
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
// SQLite will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -178,7 +178,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
return err
}
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
// MySQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -205,7 +205,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err)
return err
}
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
var originalSequences []string
type sequenceData struct {
LastValue int `xorm:"'last_value'"`
@ -296,7 +296,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
// MSSQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -323,7 +323,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
// TODO: This will not work if there are foreign keys
switch {
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
// First drop the indexes on the columns
res, errIndex := sess.Query(fmt.Sprintf("PRAGMA index_list(`%s`)", tableName))
if errIndex != nil {
@ -405,7 +405,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
return err
}
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
cols := ""
for _, col := range columnNames {
if cols != "" {
@ -416,7 +416,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
}
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
// Drop indexes on columns first
sql := fmt.Sprintf("SHOW INDEX FROM %s WHERE column_name IN ('%s')", tableName, strings.Join(columnNames, "','"))
res, err := sess.Query(sql)
@ -444,7 +444,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
cols := ""
for _, col := range columnNames {
if cols != "" {
@ -543,13 +543,13 @@ func newXORMEngine() (*xorm.Engine, error) {
func deleteDB() error {
switch {
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
if err := util.Remove(setting.Database.Path); err != nil {
return err
}
return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host))
if err != nil {
@ -565,7 +565,7 @@ func deleteDB() error {
return err
}
return nil
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
if err != nil {
@ -612,7 +612,7 @@ func deleteDB() error {
}
return nil
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -13,9 +13,9 @@ func PrependRefsHeadsToIssueRefs(x *xorm.Engine) error {
var query string
switch {
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
query = "UPDATE `issue` SET `ref` = 'refs/heads/' + `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'"
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
query = "UPDATE `issue` SET `ref` = CONCAT('refs/heads/', `ref`) WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%';"
default:
query = "UPDATE `issue` SET `ref` = 'refs/heads/' || `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'"

View File

@ -41,7 +41,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error {
// Delete language stat statuses
truncExpr := "TRUNCATE TABLE"
if setting.Database.Type.IsSQLite3() {
if setting.Database.UseSQLite3 {
truncExpr = "DELETE FROM"
}

View File

@ -21,7 +21,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsSQLite3() {
if setting.Database.UseSQLite3 {
// SQLite maps VARCHAR to TEXT without size so we're done
return nil
}
@ -41,11 +41,11 @@ func IncreaseLanguageField(x *xorm.Engine) error {
}
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil {
return err
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
// Yet again MSSQL just has to be awkward.
// Here we have to drop the constraints first and then rebuild them
constraints := make([]string, 0)
@ -71,7 +71,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
if err := sess.CreateUniques(new(LanguageStat)); err != nil {
return err
}
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil {
return err
}

View File

@ -17,13 +17,13 @@ import (
func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
_, err := x.Exec("ALTER TABLE `user` ALTER passwd_hash_algo SET DEFAULT 'argon2';")
return err
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
_, err := x.Exec("ALTER TABLE `user` ALTER COLUMN passwd_hash_algo SET DEFAULT 'argon2';")
return err
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
// need to find the constraint and drop it, then recreate it.
sess := x.NewSession()
defer sess.Close()
@ -53,7 +53,7 @@ func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
}
return sess.Commit()
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
// drop through
default:
log.Fatal("Unrecognized DB")

View File

@ -62,7 +62,7 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsMSSQL() {
if setting.Database.UseMSSQL {
if _, err := sess.Exec(sqlSelect + " INTO #temp_comments" + sqlTail); err != nil {
log.Error("unable to create temporary table")
return err
@ -72,13 +72,13 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
comments := make([]*Comment, 0, batchSize)
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + ", " + strconv.Itoa(start)
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
fallthrough
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + " OFFSET " + strconv.Itoa(start)
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " +
"(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id"
default:

View File

@ -14,7 +14,7 @@ import (
)
func FixPostgresIDSequences(x *xorm.Engine) error {
if !setting.Database.Type.IsPostgreSQL() {
if !setting.Database.UsePostgreSQL {
return nil
}

View File

@ -54,11 +54,11 @@ func RenameTaskErrorsToMessage(x *xorm.Engine) error {
}
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
if _, err := sess.Exec("ALTER TABLE `task` CHANGE errors message text"); err != nil {
return err
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
if _, err := sess.Exec("sp_rename 'task.errors', 'message', 'COLUMN'"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterIssueAndCommentTextFieldsToLongText(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsMySQL() {
if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `issue` CHANGE `content` `content` LONGTEXT"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterHookTaskTextFieldsToLongText(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsMySQL() {
if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `hook_task` CHANGE `payload_content` `payload_content` LONGTEXT, CHANGE `request_content` `request_content` LONGTEXT, change `response_content` `response_content` LONGTEXT"); err != nil {
return err
}

View File

@ -38,7 +38,7 @@ func (*improveActionTableIndicesAction) TableIndices() []*schemas.Index {
actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType)
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
indices := []*schemas.Index{actUserIndex, repoIndex}
if setting.Database.Type.IsPostgreSQL() {
if setting.Database.UsePostgreSQL {
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
indices = append(indices, cudIndex)

View File

@ -65,11 +65,11 @@ func RenameCredentialIDBytes(x *xorm.Engine) error {
}
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
if _, err := sess.Exec("ALTER TABLE `webauthn_credential` CHANGE credential_id_bytes credential_id VARBINARY(1024)"); err != nil {
return err
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
if _, err := sess.Exec("sp_rename 'webauthn_credential.credential_id_bytes', 'credential_id', 'COLUMN'"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterPublicGPGKeyContentFieldsToMediumText(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsMySQL() {
if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `gpg_key` CHANGE `content` `content` MEDIUMTEXT"); err != nil {
return err
}

View File

@ -16,7 +16,7 @@ func AlterPackageVersionMetadataToLongText(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsMySQL() {
if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `package_version` MODIFY COLUMN `metadata_json` LONGTEXT"); err != nil {
return err
}

View File

@ -17,7 +17,7 @@ func AlterPublicGPGKeyImportContentFieldToMediumText(x *xorm.Engine) error {
return err
}
if setting.Database.Type.IsMySQL() {
if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `gpg_key_import` CHANGE `content` `content` MEDIUMTEXT"); err != nil {
return err
}

View File

@ -409,7 +409,7 @@ func DeleteProjectByID(ctx context.Context, id int64) error {
func DeleteProjectByRepoID(ctx context.Context, repoID int64) error {
switch {
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue WHERE project_issue.id IN (SELECT project_issue.id FROM project_issue INNER JOIN project WHERE project.id = project_issue.project_id AND project.repo_id = ?)", repoID); err != nil {
return err
}
@ -419,7 +419,7 @@ func DeleteProjectByRepoID(ctx context.Context, repoID int64) error {
if _, err := db.GetEngine(ctx).Table("project").Where("repo_id = ? ", repoID).Delete(&Project{}); err != nil {
return err
}
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue USING project WHERE project.id = project_issue.project_id AND project.repo_id = ? ", repoID); err != nil {
return err
}

View File

@ -498,7 +498,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
subQueryCond := builder.NewCond()
// Topic checking. Topics are present.
if setting.Database.Type.IsPostgreSQL() { // postgres stores the topics as json and not as text
if setting.Database.UsePostgreSQL { // postgres stores the topics as json and not as text
subQueryCond = subQueryCond.Or(builder.And(builder.NotNull{"topics"}, builder.Neq{"(topics)::text": "[]"}))
} else {
subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"}))

View File

@ -76,7 +76,7 @@ func MainTest(m *testing.M, testOpts *TestOptions) {
setting.SSH.BuiltinServerUser = "builtinuser"
setting.SSH.Port = 3000
setting.SSH.Domain = "try.gitea.io"
setting.Database.Type = "sqlite3"
setting.Database.UseSQLite3 = true
setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master"
repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos")
if err != nil {

View File

@ -155,7 +155,7 @@ func checkDBConsistency(ctx context.Context, logger log.Logger, autofix bool) er
// TODO: function to recalc all counters
if setting.Database.Type.IsPostgreSQL() {
if setting.Database.UsePostgreSQL {
consistencyChecks = append(consistencyChecks, consistencyCheck{
Name: "Sequence values",
Counter: db.CountBadSequences,

View File

@ -42,10 +42,7 @@ func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.
defer revListWriter.Close()
stderr := new(bytes.Buffer)
var errbuf strings.Builder
cmd := git.NewCommand(ctx, "rev-list", "--objects").AddDynamicArguments(headSHA)
if baseSHA != "" {
cmd = cmd.AddArguments("--not").AddDynamicArguments(baseSHA)
}
cmd := git.NewCommand(ctx, "rev-list", "--objects").AddDynamicArguments(headSHA).AddArguments("--not").AddDynamicArguments(baseSHA)
if err := cmd.Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: revListWriter,

View File

@ -27,7 +27,7 @@ var (
// Database holds the database settings
Database = struct {
Type DatabaseType
Type string
Host string
Name string
User string
@ -39,6 +39,10 @@ var (
Charset string
Timeout int // seconds
SQLiteJournalMode string
UseSQLite3 bool
UseMySQL bool
UseMSSQL bool
UsePostgreSQL bool
DBConnectRetries int
DBConnectBackoff time.Duration
MaxIdleConns int
@ -55,13 +59,24 @@ var (
// LoadDBSetting loads the database settings
func LoadDBSetting() {
sec := CfgProvider.Section("database")
Database.Type = DatabaseType(sec.Key("DB_TYPE").String())
Database.Type = sec.Key("DB_TYPE").String()
defaultCharset := "utf8"
Database.UseMySQL = false
Database.UseSQLite3 = false
Database.UsePostgreSQL = false
Database.UseMSSQL = false
if Database.Type.IsMySQL() {
switch Database.Type {
case "sqlite3":
Database.UseSQLite3 = true
case "mysql":
Database.UseMySQL = true
defaultCharset = "utf8mb4"
case "postgres":
Database.UsePostgreSQL = true
case "mssql":
Database.UseMSSQL = true
}
Database.Host = sec.Key("HOST").String()
Database.Name = sec.Key("NAME").String()
Database.User = sec.Key("USER").String()
@ -71,7 +86,7 @@ func LoadDBSetting() {
Database.Schema = sec.Key("SCHEMA").String()
Database.SSLMode = sec.Key("SSL_MODE").MustString("disable")
Database.Charset = sec.Key("CHARSET").In(defaultCharset, []string{"utf8", "utf8mb4"})
if Database.Type.IsMySQL() && defaultCharset != "utf8mb4" {
if Database.UseMySQL && defaultCharset != "utf8mb4" {
log.Error("Deprecated database mysql charset utf8 support, please use utf8mb4 or convert utf8 to utf8mb4.")
}
@ -80,7 +95,7 @@ func LoadDBSetting() {
Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("")
Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2)
if Database.Type.IsMySQL() {
if Database.UseMySQL {
Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(3 * time.Second)
} else {
Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(0)
@ -192,25 +207,3 @@ func ParseMSSQLHostPort(info string) (string, string) {
}
return host, port
}
type DatabaseType string
func (t DatabaseType) String() string {
return string(t)
}
func (t DatabaseType) IsSQLite3() bool {
return t == "sqlite3"
}
func (t DatabaseType) IsMySQL() bool {
return t == "mysql"
}
func (t DatabaseType) IsMSSQL() bool {
return t == "mssql"
}
func (t DatabaseType) IsPostgreSQL() bool {
return t == "postgres"
}

View File

@ -4,7 +4,6 @@
package typesniffer
import (
"bytes"
"fmt"
"io"
"net/http"
@ -25,9 +24,8 @@ const (
)
var (
svgComment = regexp.MustCompile(`(?s)<!--.*?-->`)
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!--.*?-->|<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg[\s>\/]`)
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!--.*?-->|<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg[\s>\/]`)
)
// SniffedType contains information about a blobs type.
@ -93,27 +91,10 @@ func DetectContentType(data []byte) SniffedType {
data = data[:sniffLen]
}
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
detectByXML := strings.Contains(ct, "text/xml")
if detectByHTML || detectByXML {
dataProcessed := svgComment.ReplaceAll(data, nil)
dataProcessed = bytes.TrimSpace(dataProcessed)
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
ct = SvgMimeType
}
}
if strings.HasPrefix(ct, "audio/") && bytes.HasPrefix(data, []byte("ID3")) {
// The MP3 detection is quite inaccurate, any content with "ID3" prefix will result in "audio/mpeg".
// So remove the "ID3" prefix and detect again, if result is text, then it must be text content.
// This works especially because audio files contain many unprintable/invalid characters like `0x00`
ct2 := http.DetectContentType(data[3:])
if strings.HasPrefix(ct2, "text/") {
ct = ct2
}
if (strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")) && svgTagRegex.Match(data) ||
strings.Contains(ct, "text/xml") && svgTagInXMLRegex.Match(data) {
// SVG is unsupported. https://github.com/golang/go/issues/15888
ct = SvgMimeType
}
return SniffedType{ct}

View File

@ -28,6 +28,7 @@ func TestIsSvgImage(t *testing.T) {
assert.True(t, DetectContentType([]byte("<svg></svg>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(" <svg></svg>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<svg width="100"></svg>`)).IsSvgImage())
assert.True(t, DetectContentType([]byte("<svg/>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?><svg></svg>`)).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<!-- Comment -->
<svg></svg>`)).IsSvgImage())
@ -56,10 +57,6 @@ func TestIsSvgImage(t *testing.T) {
<!-- Multline
Comment -->
<svg></svg>`)).IsSvgImage())
// the DetectContentType should work for incomplete data, because only beginning bytes are used for detection
assert.True(t, DetectContentType([]byte(`<svg>....`)).IsSvgImage())
assert.False(t, DetectContentType([]byte{}).IsSvgImage())
assert.False(t, DetectContentType([]byte("svg")).IsSvgImage())
assert.False(t, DetectContentType([]byte("<svgfoo></svgfoo>")).IsSvgImage())
@ -71,26 +68,6 @@ func TestIsSvgImage(t *testing.T) {
assert.False(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?>
<!-- <svg></svg> inside comment -->
<foo></foo>`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`
<!-- comment1 -->
<div>
<!-- comment2 -->
<svg></svg>
</div>
`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`
<!-- comment1
-->
<div>
<!-- comment2
-->
<svg></svg>
</div>
`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`<html><body><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg></svg></body></html>`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`<html><body><?xml version="1.0" encoding="UTF-8"?><svg></svg></body></html>`)).IsSvgImage())
}
func TestIsPDF(t *testing.T) {
@ -109,10 +86,6 @@ func TestIsAudio(t *testing.T) {
mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl")
assert.True(t, DetectContentType(mp3).IsAudio())
assert.False(t, DetectContentType([]byte("plain text")).IsAudio())
assert.True(t, DetectContentType([]byte("ID3Toy\000")).IsAudio())
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ...")).IsText()) // test ID3 tag for plain text
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ..."+"🌛"[0:2])).IsText()) // test ID3 tag with incomplete UTF8 char
}
func TestDetectContentTypeFromReader(t *testing.T) {

View File

@ -247,7 +247,6 @@ default_enable_timetracking_popup=新しいリポジトリのタイムトラッ
no_reply_address=メールを隠すときのドメイン
no_reply_address_helper=メールアドレスを隠しているユーザーに使用するドメイン名。 例えば 'noreply.example.org' と設定した場合、ユーザー名 'joe' はGitに 'joe@noreply.example.org' としてログインすることになります。
password_algorithm=パスワードハッシュアルゴリズム
invalid_password_algorithm=無効なパスワードハッシュアルゴリズム
password_algorithm_helper=パスワードハッシュアルゴリズムを設定します。 アルゴリズムにより動作要件と強度が異なります。 `argon2`は良い特性を備えていますが、多くのメモリを使用するため小さなシステムには適さない場合があります。
enable_update_checker=アップデートチェッカーを有効にする
enable_update_checker_helper=gitea.ioに接続して定期的に新しいバージョンのリリースを確認します。

View File

@ -141,7 +141,7 @@ func GlobalInitInstalled(ctx context.Context) {
if setting.EnableSQLite3 {
log.Info("SQLite3 support is enabled")
} else if setting.Database.Type.IsSQLite3() {
} else if setting.Database.UseSQLite3 {
log.Fatal("SQLite3 support is disabled, but it is used for database setting. Please get or build a Gitea release with SQLite3 support.")
}

View File

@ -104,7 +104,7 @@ func Install(ctx *context.Context) {
form.DbSchema = setting.Database.Schema
form.Charset = setting.Database.Charset
curDBType := setting.Database.Type.String()
curDBType := setting.Database.Type
var isCurDBTypeSupported bool
for _, dbType := range setting.SupportedDatabaseTypes {
if dbType == curDBType {
@ -272,7 +272,7 @@ func SubmitInstall(ctx *context.Context) {
// ---- Basic checks are passed, now test configuration.
// Test database setting.
setting.Database.Type = setting.DatabaseType(form.DbType)
setting.Database.Type = form.DbType
setting.Database.Host = form.DbHost
setting.Database.User = form.DbUser
setting.Database.Passwd = form.DbPasswd
@ -392,7 +392,7 @@ func SubmitInstall(ctx *context.Context) {
log.Error("Failed to load custom conf '%s': %v", setting.CustomConf, err)
}
}
cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type.String())
cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type)
cfg.Section("database").Key("HOST").SetValue(setting.Database.Host)
cfg.Section("database").Key("NAME").SetValue(setting.Database.Name)
cfg.Section("database").Key("USER").SetValue(setting.Database.User)

View File

@ -100,7 +100,7 @@ func checkDatabase(checks checks) status {
st.Time = getCheckTime()
}
if setting.Database.Type.IsSQLite3() && st.Status == pass {
if setting.Database.UseSQLite3 && st.Status == pass {
if !setting.EnableSQLite3 {
st.Status = fail
st.Time = getCheckTime()

View File

@ -116,7 +116,7 @@ func createLFSMetaObjectsFromCatFileBatch(catFileBatchReader *io.PipeReader, wg
}
// Then we need to check that this pointer is in the db
if _, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, pr.HeadRepoID, pointer.Oid); err != nil {
if _, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, pr.HeadRepo.ID, pointer.Oid); err != nil {
if err == git_model.ErrLFSObjectNotExist {
log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo)
continue

View File

@ -5,6 +5,8 @@
package pull
import (
"bufio"
"bytes"
"context"
"fmt"
"os"
@ -12,6 +14,7 @@ import (
"regexp"
"strconv"
"strings"
"time"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/db"
@ -31,17 +34,22 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
asymkey_service "code.gitea.io/gitea/services/asymkey"
issue_service "code.gitea.io/gitea/services/issue"
)
// GetDefaultMergeMessage returns default message used when merging pull request
func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle) (message, body string, err error) {
if err := pr.LoadBaseRepo(ctx); err != nil {
return "", "", err
}
if err := pr.LoadHeadRepo(ctx); err != nil {
return "", "", err
}
if err := pr.LoadBaseRepo(ctx); err != nil {
return "", "", err
}
if pr.BaseRepo == nil {
return "", "", repo_model.ErrRepoNotExist{ID: pr.BaseRepoID}
}
if err := pr.LoadIssue(ctx); err != nil {
return "", "", err
}
@ -136,19 +144,18 @@ func expandDefaultMergeMessage(template string, vars map[string]string) (message
// Merge merges pull request to base repository.
// Caller should check PR is ready to be merged (review and status checks)
func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, wasAutoMerged bool) error {
if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("Unable to load base repo: %v", err)
return fmt.Errorf("unable to load base repo: %w", err)
} else if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("Unable to load head repo: %v", err)
return fmt.Errorf("unable to load head repo: %w", err)
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("LoadHeadRepo: %v", err)
return fmt.Errorf("LoadHeadRepo: %w", err)
} else if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return fmt.Errorf("LoadBaseRepo: %w", err)
}
pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
// Removing an auto merge pull and ignore if not exist
// FIXME: is this the correct point to do this? Shouldn't this be after IsMergeStyleAllowed?
if err := pull_model.DeleteScheduledAutoMerge(ctx, pr.ID); err != nil && !db.IsErrNotExist(err) {
return err
}
@ -172,7 +179,7 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
// Run the merge in the hammer context to prevent cancellation
hammerCtx := graceful.GetManager().HammerContext()
pr.MergedCommitID, err = doMergeAndPush(hammerCtx, pr, doer, mergeStyle, expectedHeadCommitID, message)
pr.MergedCommitID, err = rawMerge(hammerCtx, pr, doer, mergeStyle, expectedHeadCommitID, message)
if err != nil {
return err
}
@ -182,18 +189,18 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
pr.MergerID = doer.ID
if _, err := pr.SetMerged(hammerCtx); err != nil {
log.Error("SetMerged %-v: %v", pr, err)
log.Error("SetMerged [%d]: %v", pr.ID, err)
}
if err := pr.LoadIssue(hammerCtx); err != nil {
log.Error("LoadIssue %-v: %v", pr, err)
log.Error("LoadIssue [%d]: %v", pr.ID, err)
}
if err := pr.Issue.LoadRepo(hammerCtx); err != nil {
log.Error("pr.Issue.LoadRepo %-v: %v", pr, err)
log.Error("LoadRepo for issue [%d]: %v", pr.ID, err)
}
if err := pr.Issue.Repo.LoadOwner(hammerCtx); err != nil {
log.Error("LoadOwner for %-v: %v", pr, err)
log.Error("LoadOwner for PR [%d]: %v", pr.ID, err)
}
if wasAutoMerged {
@ -232,43 +239,326 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
return nil
}
// doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository
func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (string, error) {
// rawMerge perform the merge operation without changing any pull information in database
func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (string, error) {
// Clone base repo.
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID)
tmpBasePath, err := createTemporaryRepo(ctx, pr)
if err != nil {
log.Error("CreateTemporaryPath: %v", err)
return "", err
}
defer cancel()
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("Merge: RemoveTemporaryPath: %s", err)
}
}()
baseBranch := "base"
trackingBranch := "tracking"
stagingBranch := "staging"
if expectedHeadCommitID != "" {
trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: tmpBasePath})
if err != nil {
log.Error("show-ref[%s] --hash refs/heads/trackingn: %v", tmpBasePath, git.BranchPrefix+trackingBranch, err)
return "", fmt.Errorf("getDiffTree: %w", err)
}
if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID {
return "", models.ErrSHADoesNotMatch{
GivenSHA: expectedHeadCommitID,
CurrentSHA: trackingCommitID,
}
}
}
var outbuf, errbuf strings.Builder
// Enable sparse-checkout
sparseCheckoutList, err := getDiffTree(ctx, tmpBasePath, baseBranch, trackingBranch)
if err != nil {
log.Error("getDiffTree(%s, %s, %s): %v", tmpBasePath, baseBranch, trackingBranch, err)
return "", fmt.Errorf("getDiffTree: %w", err)
}
infoPath := filepath.Join(tmpBasePath, ".git", "info")
if err := os.MkdirAll(infoPath, 0o700); err != nil {
log.Error("Unable to create .git/info in %s: %v", tmpBasePath, err)
return "", fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err)
}
sparseCheckoutListPath := filepath.Join(infoPath, "sparse-checkout")
if err := os.WriteFile(sparseCheckoutListPath, []byte(sparseCheckoutList), 0o600); err != nil {
log.Error("Unable to write .git/info/sparse-checkout file in %s: %v", tmpBasePath, err)
return "", fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err)
}
gitConfigCommand := func() *git.Command {
return git.NewCommand(ctx, "config", "--local")
}
// Switch off LFS process (set required, clean and smudge here also)
if err := gitConfigCommand().AddArguments("filter.lfs.process", "").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.process -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.process -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("filter.lfs.required", "false").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.required -> <false> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.required -> <false> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("filter.lfs.clean", "").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.clean -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.clean -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("filter.lfs.smudge", "").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [filter.lfs.smudge -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [filter.lfs.smudge -> <> ]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := gitConfigCommand().AddArguments("core.sparseCheckout", "true").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git config [core.sparseCheckout -> true ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git config [core.sparsecheckout -> true]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
// Read base branch index
if err := git.NewCommand(ctx, "read-tree", "HEAD").
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git read-tree HEAD: %v\n%s\n%s", err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
sig := doer.NewGitSig()
committer := sig
// Determine if we should sign. If no signKeyID, use --no-gpg-sign to countermand the sign config (from gitconfig)
var signArgs git.TrustedCmdArgs
sign, signKeyID, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, tmpBasePath, "HEAD", trackingBranch)
if sign {
if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
committer = signer
}
signArgs = git.ToTrustedCmdArgs([]string{"-S" + signKeyID})
} else {
signArgs = append(signArgs, "--no-gpg-sign")
}
commitTimeStr := time.Now().Format(time.RFC3339)
// Because this may call hooks we should pass in the environment
env := append(os.Environ(),
"GIT_AUTHOR_NAME="+sig.Name,
"GIT_AUTHOR_EMAIL="+sig.Email,
"GIT_AUTHOR_DATE="+commitTimeStr,
"GIT_COMMITTER_NAME="+committer.Name,
"GIT_COMMITTER_EMAIL="+committer.Email,
"GIT_COMMITTER_DATE="+commitTimeStr,
)
// Merge commits.
switch mergeStyle {
case repo_model.MergeStyleMerge:
if err := doMergeStyleMerge(mergeCtx, message); err != nil {
cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil {
log.Error("Unable to merge tracking into base: %v", err)
return "", err
}
case repo_model.MergeStyleRebase, repo_model.MergeStyleRebaseMerge:
if err := doMergeStyleRebase(mergeCtx, mergeStyle, message); err != nil {
if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil {
log.Error("Unable to make final commit: %v", err)
return "", err
}
case repo_model.MergeStyleRebase:
fallthrough
case repo_model.MergeStyleRebaseUpdate:
fallthrough
case repo_model.MergeStyleRebaseMerge:
// Checkout head branch
if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
// Rebase before merging
if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
// Rebase will leave a REBASE_HEAD file in .git if there is a conflict
if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
var commitSha string
ok := false
failingCommitPaths := []string{
filepath.Join(tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26
filepath.Join(tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26
}
for _, failingCommitPath := range failingCommitPaths {
if _, statErr := os.Stat(failingCommitPath); statErr == nil {
commitShaBytes, readErr := os.ReadFile(failingCommitPath)
if readErr != nil {
// Abandon this attempt to handle the error
log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
commitSha = strings.TrimSpace(string(commitShaBytes))
ok = true
break
}
}
if !ok {
log.Error("Unable to determine failing commit sha for this rebase message. Cannot cast as models.ErrRebaseConflicts.")
log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
log.Debug("RebaseConflict at %s [%s:%s -> %s:%s]: %v\n%s\n%s", commitSha, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", models.ErrRebaseConflicts{
Style: mergeStyle,
CommitSHA: commitSha,
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
}
log.Error("git rebase staging on to base [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git rebase staging on to base [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
// not need merge, just update by rebase. so skip
if mergeStyle == repo_model.MergeStyleRebaseUpdate {
break
}
// Checkout base branch again
if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
cmd := git.NewCommand(ctx, "merge")
if mergeStyle == repo_model.MergeStyleRebase {
cmd.AddArguments("--ff-only")
} else {
cmd.AddArguments("--no-ff", "--no-commit")
}
cmd.AddDynamicArguments(stagingBranch)
// Prepare merge with commit
if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil {
log.Error("Unable to merge staging into base: %v", err)
return "", err
}
if mergeStyle == repo_model.MergeStyleRebaseMerge {
if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil {
log.Error("Unable to make final commit: %v", err)
return "", err
}
}
case repo_model.MergeStyleSquash:
if err := doMergeStyleSquash(mergeCtx, message); err != nil {
// Merge with squash
cmd := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(pr, mergeStyle, cmd, tmpBasePath); err != nil {
log.Error("Unable to merge --squash tracking into base: %v", err)
return "", err
}
if err = pr.Issue.LoadPoster(ctx); err != nil {
log.Error("LoadPoster: %v", err)
return "", fmt.Errorf("LoadPoster: %w", err)
}
sig := pr.Issue.Poster.NewGitSig()
if setting.Repository.PullRequest.AddCoCommitterTrailers && committer.String() != sig.String() {
// add trailer
message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String())
}
if err := git.NewCommand(ctx, "commit").
AddArguments(signArgs...).
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message).
Run(&git.RunOpts{
Env: env,
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
default:
return "", models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle}
}
// OK we should cache our current head and origin/headbranch
mergeHeadSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "HEAD")
mergeHeadSHA, err := git.GetFullCommitID(ctx, tmpBasePath, "HEAD")
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
}
mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch)
mergeBaseSHA, err := git.GetFullCommitID(ctx, tmpBasePath, "original_"+baseBranch)
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err)
}
mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch)
mergeCommitID, err := git.GetFullCommitID(ctx, tmpBasePath, baseBranch)
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err)
}
@ -277,7 +567,7 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use
// I think in the interests of data safety - failures to push to the lfs should prevent
// the merge as you can always remerge.
if setting.LFS.StartServer {
if err := LFSPush(ctx, mergeCtx.tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil {
if err := LFSPush(ctx, tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil {
return "", err
}
}
@ -286,92 +576,167 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use
err = pr.HeadRepo.LoadOwner(ctx)
if err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("Can't find user: %d for head repository in %-v: %v", pr.HeadRepo.OwnerID, pr, err)
log.Error("Can't find user: %d for head repository - %v", pr.HeadRepo.OwnerID, err)
return "", err
}
log.Warn("Can't find user: %d for head repository in %-v - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, pr, doer.Name, err)
log.Error("Can't find user: %d for head repository - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, doer.Name, err)
headUser = doer
} else {
headUser = pr.HeadRepo.Owner
}
mergeCtx.env = repo_module.FullPushingEnvironment(
headUser,
doer,
pr.BaseRepo,
pr.BaseRepo.Name,
pr.ID,
)
pushCmd := git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
var pushCmd *git.Command
if mergeStyle == repo_model.MergeStyleRebaseUpdate {
// force push the rebase result to head branch
env = repo_module.FullPushingEnvironment(
headUser,
doer,
pr.HeadRepo,
pr.HeadRepo.Name,
pr.ID,
)
pushCmd = git.NewCommand(ctx, "push", "-f", "head_repo").AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
} else {
env = repo_module.FullPushingEnvironment(
headUser,
doer,
pr.BaseRepo,
pr.BaseRepo.Name,
pr.ID,
)
pushCmd = git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
}
// Push back to upstream.
// TODO: this cause an api call to "/api/internal/hook/post-receive/...",
// that prevents us from doint the whole merge in one db transaction
if err := pushCmd.Run(mergeCtx.RunOpts()); err != nil {
if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
if err := pushCmd.Run(&git.RunOpts{
Env: env,
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
if strings.Contains(errbuf.String(), "non-fast-forward") {
return "", &git.ErrPushOutOfDate{
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
} else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
} else if strings.Contains(errbuf.String(), "! [remote rejected]") {
err := &git.ErrPushRejected{
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
err.GenerateMessage()
return "", err
}
return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
return "", fmt.Errorf("git push: %s", errbuf.String())
}
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
outbuf.Reset()
errbuf.Reset()
return mergeCommitID, nil
}
func commitAndSignNoAuthor(ctx *mergeContext, message string) error {
if err := git.NewCommand(ctx, "commit").AddArguments(ctx.signArg...).AddOptionFormat("--message=%s", message).
Run(ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
func commitAndSignNoAuthor(ctx context.Context, pr *issues_model.PullRequest, message string, signArgs git.TrustedCmdArgs, tmpBasePath string, env []string) error {
var outbuf, errbuf strings.Builder
if err := git.NewCommand(ctx, "commit").AddArguments(signArgs...).AddOptionFormat("--message=%s", message).
Run(&git.RunOpts{
Env: env,
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
return nil
}
func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *git.Command) error {
if err := cmd.Run(ctx.RunOpts()); err != nil {
func runMergeCommand(pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle, cmd *git.Command, tmpBasePath string) error {
var outbuf, errbuf strings.Builder
if err := cmd.Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
// Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
// We have a merge conflict error
log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
log.Debug("MergeConflict [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return models.ErrMergeConflicts{
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
StdErr: ctx.errbuf.String(),
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
} else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") {
log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
} else if strings.Contains(errbuf.String(), "refusing to merge unrelated histories") {
log.Debug("MergeUnrelatedHistories [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return models.ErrMergeUnrelatedHistories{
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
StdErr: ctx.errbuf.String(),
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
}
log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
log.Error("git merge [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
return fmt.Errorf("git merge [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}
var escapedSymbols = regexp.MustCompile(`([*[?! \\])`)
func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string) (string, error) {
getDiffTreeFromBranch := func(repoPath, baseBranch, headBranch string) (string, error) {
var outbuf, errbuf strings.Builder
// Compute the diff-tree for sparse-checkout
if err := git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
Run(&git.RunOpts{
Dir: repoPath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
return "", fmt.Errorf("git diff-tree [%s base:%s head:%s]: %s", repoPath, baseBranch, headBranch, errbuf.String())
}
return outbuf.String(), nil
}
scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\x00'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
}
list, err := getDiffTreeFromBranch(repoPath, baseBranch, headBranch)
if err != nil {
return "", err
}
// Prefixing '/' for each entry, otherwise all files with the same name in subdirectories would be matched.
out := bytes.Buffer{}
scanner := bufio.NewScanner(strings.NewReader(list))
scanner.Split(scanNullTerminatedStrings)
for scanner.Scan() {
filepath := scanner.Text()
// escape '*', '?', '[', spaces and '!' prefix
filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
// no necessary to escape the first '#' symbol because the first symbol is '/'
fmt.Fprintf(&out, "/%s\n", filepath)
}
return out.String(), nil
}
// IsUserAllowedToMerge check if user is allowed to merge PR with given permissions and branch protections
func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p access_model.Permission, user *user_model.User) (bool, error) {
if user == nil {

View File

@ -1,25 +0,0 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
)
// doMergeStyleMerge merges the tracking into the current HEAD - which is assumed to tbe staging branch (equal to the pr.BaseBranch)
func doMergeStyleMerge(ctx *mergeContext, message string) error {
cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil {
log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
return err
}
if err := commitAndSignNoAuthor(ctx, message); err != nil {
log.Error("%-v Unable to make final commit: %v", ctx.pr, err)
return err
}
return nil
}

View File

@ -1,297 +0,0 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
"code.gitea.io/gitea/models"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
asymkey_service "code.gitea.io/gitea/services/asymkey"
)
type mergeContext struct {
*prContext
doer *user_model.User
sig *git.Signature
committer *git.Signature
signArg git.TrustedCmdArgs
env []string
}
func (ctx *mergeContext) RunOpts() *git.RunOpts {
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return &git.RunOpts{
Env: ctx.env,
Dir: ctx.tmpBasePath,
Stdout: ctx.outbuf,
Stderr: ctx.errbuf,
}
}
func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, expectedHeadCommitID string) (mergeCtx *mergeContext, cancel context.CancelFunc, err error) {
// Clone base repo.
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
if err != nil {
log.Error("createTemporaryRepoForPR: %v", err)
return nil, cancel, err
}
mergeCtx = &mergeContext{
prContext: prCtx,
doer: doer,
}
if expectedHeadCommitID != "" {
trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
if err != nil {
defer cancel()
log.Error("failed to get sha of head branch in %-v: show-ref[%s] --hash refs/heads/tracking: %v", mergeCtx.pr, mergeCtx.tmpBasePath, err)
return nil, nil, fmt.Errorf("unable to get sha of head branch in %v %w", pr, err)
}
if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID {
defer cancel()
return nil, nil, models.ErrSHADoesNotMatch{
GivenSHA: expectedHeadCommitID,
CurrentSHA: trackingCommitID,
}
}
}
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil {
defer cancel()
return nil, nil, err
}
mergeCtx.sig = doer.NewGitSig()
mergeCtx.committer = mergeCtx.sig
// Determine if we should sign
sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch)
if sign {
mergeCtx.signArg = git.ToTrustedCmdArgs([]string{"-S" + keyID})
if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
mergeCtx.committer = signer
}
} else {
mergeCtx.signArg = git.ToTrustedCmdArgs([]string{"--no-gpg-sign"})
}
commitTimeStr := time.Now().Format(time.RFC3339)
// Because this may call hooks we should pass in the environment
mergeCtx.env = append(os.Environ(),
"GIT_AUTHOR_NAME="+mergeCtx.sig.Name,
"GIT_AUTHOR_EMAIL="+mergeCtx.sig.Email,
"GIT_AUTHOR_DATE="+commitTimeStr,
"GIT_COMMITTER_NAME="+mergeCtx.committer.Name,
"GIT_COMMITTER_EMAIL="+mergeCtx.committer.Email,
"GIT_COMMITTER_DATE="+commitTimeStr,
)
return mergeCtx, cancel, nil
}
// prepareTemporaryRepoForMerge takes a repository that has been created using createTemporaryRepo
// it then sets up the sparse-checkout and other things
func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
infoPath := filepath.Join(ctx.tmpBasePath, ".git", "info")
if err := os.MkdirAll(infoPath, 0o700); err != nil {
log.Error("%-v Unable to create .git/info in %s: %v", ctx.pr, ctx.tmpBasePath, err)
return fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err)
}
// Enable sparse-checkout
// Here we use the .git/info/sparse-checkout file as described in the git documentation
sparseCheckoutListFile, err := os.OpenFile(filepath.Join(infoPath, "sparse-checkout"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600)
if err != nil {
log.Error("%-v Unable to write .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
return fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err)
}
defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error
if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil {
log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err)
return fmt.Errorf("getDiffTree: %w", err)
}
if err := sparseCheckoutListFile.Close(); err != nil {
log.Error("%-v Unable to close .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
return fmt.Errorf("Unable to close .git/info/sparse-checkout file in tmpBasePath: %w", err)
}
gitConfigCommand := func() *git.Command {
return git.NewCommand(ctx, "config", "--local")
}
setConfig := func(key, value string) error {
if err := gitConfigCommand().AddArguments(git.ToTrustedCmdArgs([]string{key, value})...).
Run(ctx.RunOpts()); err != nil {
if value == "" {
value = "<>"
}
log.Error("git config [%s -> %s ]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git config [%s -> %s ]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}
// Switch off LFS process (set required, clean and smudge here also)
if err := setConfig("filter.lfs.process", ""); err != nil {
return err
}
if err := setConfig("filter.lfs.required", "false"); err != nil {
return err
}
if err := setConfig("filter.lfs.clean", ""); err != nil {
return err
}
if err := setConfig("filter.lfs.smudge", ""); err != nil {
return err
}
if err := setConfig("core.sparseCheckout", "true"); err != nil {
return err
}
// Read base branch index
if err := git.NewCommand(ctx, "read-tree", "HEAD").
Run(ctx.RunOpts()); err != nil {
log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}
// getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch
// the filenames are escaped so as to fit the format required for .git/info/sparse-checkout
func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error {
diffOutReader, diffOutWriter, err := os.Pipe()
if err != nil {
log.Error("Unable to create os.Pipe for %s", repoPath)
return err
}
defer func() {
_ = diffOutReader.Close()
_ = diffOutWriter.Close()
}()
scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\x00'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
}
err = git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
Run(&git.RunOpts{
Dir: repoPath,
Stdout: diffOutWriter,
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
// Close the writer end of the pipe to begin processing
_ = diffOutWriter.Close()
defer func() {
// Close the reader on return to terminate the git command if necessary
_ = diffOutReader.Close()
}()
// Now scan the output from the command
scanner := bufio.NewScanner(diffOutReader)
scanner.Split(scanNullTerminatedStrings)
for scanner.Scan() {
filepath := scanner.Text()
// escape '*', '?', '[', spaces and '!' prefix
filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
// no necessary to escape the first '#' symbol because the first symbol is '/'
fmt.Fprintf(out, "/%s\n", filepath)
}
return scanner.Err()
},
})
return err
}
// rebaseTrackingOnToBase checks out the tracking branch as staging and rebases it on to the base branch
// if there is a conflict it will return a models.ErrRebaseConflicts
func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
// Checkout head branch
if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
Run(ctx.RunOpts()); err != nil {
return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
// Rebase before merging
if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
Run(ctx.RunOpts()); err != nil {
// Rebase will leave a REBASE_HEAD file in .git if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
var commitSha string
ok := false
failingCommitPaths := []string{
filepath.Join(ctx.tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26
filepath.Join(ctx.tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26
}
for _, failingCommitPath := range failingCommitPaths {
if _, statErr := os.Stat(failingCommitPath); statErr == nil {
commitShaBytes, readErr := os.ReadFile(failingCommitPath)
if readErr != nil {
// Abandon this attempt to handle the error
return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
commitSha = strings.TrimSpace(string(commitShaBytes))
ok = true
break
}
}
if !ok {
log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as models.ErrRebaseConflicts.", ctx.pr)
return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String())
return models.ErrRebaseConflicts{
CommitSHA: commitSha,
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
StdErr: ctx.errbuf.String(),
Err: err,
}
}
return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}

View File

@ -1,50 +0,0 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"fmt"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
)
// doMergeStyleRebase rebaases the tracking branch on the base branch as the current HEAD with or with a merge commit to the original pr branch
func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, message string) error {
if err := rebaseTrackingOnToBase(ctx, mergeStyle); err != nil {
return err
}
// Checkout base branch again
if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
Run(ctx.RunOpts()); err != nil {
log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
cmd := git.NewCommand(ctx, "merge")
if mergeStyle == repo_model.MergeStyleRebase {
cmd.AddArguments("--ff-only")
} else {
cmd.AddArguments("--no-ff", "--no-commit")
}
cmd.AddDynamicArguments(stagingBranch)
// Prepare merge with commit
if err := runMergeCommand(ctx, mergeStyle, cmd); err != nil {
log.Error("Unable to merge staging into base: %v", err)
return err
}
if mergeStyle == repo_model.MergeStyleRebaseMerge {
if err := commitAndSignNoAuthor(ctx, message); err != nil {
log.Error("Unable to make final commit: %v", err)
return err
}
}
return nil
}

View File

@ -1,53 +0,0 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"fmt"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
// doMergeStyleSquash squashes the tracking branch on the current HEAD (=base)
func doMergeStyleSquash(ctx *mergeContext, message string) error {
cmd := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmd); err != nil {
log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err)
return err
}
if err := ctx.pr.Issue.LoadPoster(ctx); err != nil {
log.Error("%-v Issue[%d].LoadPoster: %v", ctx.pr, ctx.pr.Issue.ID, err)
return fmt.Errorf("LoadPoster: %w", err)
}
sig := ctx.pr.Issue.Poster.NewGitSig()
if len(ctx.signArg) == 0 {
if err := git.NewCommand(ctx, "commit").
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message).
Run(ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
}
} else {
if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() {
// add trailer
message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String())
}
if err := git.NewCommand(ctx, "commit").
AddArguments(ctx.signArg...).
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message).
Run(ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
}
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return nil
}

View File

@ -22,6 +22,7 @@ import (
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
@ -63,21 +64,25 @@ func TestPatch(pr *issues_model.PullRequest) error {
defer finished()
// Clone base repo.
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
tmpBasePath, err := createTemporaryRepo(ctx, pr)
if err != nil {
log.Error("createTemporaryRepoForPR %-v: %v", pr, err)
log.Error("CreateTemporaryPath: %v", err)
return err
}
defer cancel()
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("Merge: RemoveTemporaryPath: %s", err)
}
}()
gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
gitRepo, err := git.OpenRepository(ctx, tmpBasePath)
if err != nil {
return fmt.Errorf("OpenRepository: %w", err)
}
defer gitRepo.Close()
// 1. update merge base
pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath})
pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: tmpBasePath})
if err != nil {
var err2 error
pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
@ -96,7 +101,7 @@ func TestPatch(pr *issues_model.PullRequest) error {
}
// 2. Check for conflicts
if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
if conflicts, err := checkConflicts(ctx, pr, gitRepo, tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
return err
}

View File

@ -349,14 +349,18 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
// checkIfPRContentChanged checks if diff to target branch has changed by push
// A commit can be considered to leave the PR untouched if the patch/diff with its merge base is unchanged
func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (hasChanged bool, err error) {
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
tmpBasePath, err := createTemporaryRepo(ctx, pr)
if err != nil {
log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
log.Error("CreateTemporaryRepo: %v", err)
return false, err
}
defer cancel()
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("checkIfPRContentChanged: RemoveTemporaryPath: %s", err)
}
}()
tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
tmpRepo, err := git.OpenRepository(ctx, tmpBasePath)
if err != nil {
return false, fmt.Errorf("OpenRepository: %w", err)
}
@ -375,7 +379,7 @@ func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest,
}
if err := cmd.Run(&git.RunOpts{
Dir: prCtx.tmpBasePath,
Dir: tmpBasePath,
Stdout: stdoutWriter,
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
_ = stdoutWriter.Close()

View File

@ -19,85 +19,49 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
)
// Temporary repos created here use standard branch names to help simplify
// merging code
const (
baseBranch = "base" // equivalent to pr.BaseBranch
trackingBranch = "tracking" // equivalent to pr.HeadBranch
stagingBranch = "staging" // this is used for a working branch
)
type prContext struct {
context.Context
tmpBasePath string
pr *issues_model.PullRequest
outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation,
errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use
}
func (ctx *prContext) RunOpts() *git.RunOpts {
ctx.outbuf.Reset()
ctx.errbuf.Reset()
return &git.RunOpts{
Dir: ctx.tmpBasePath,
Stdout: ctx.outbuf,
Stderr: ctx.errbuf,
}
}
// createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
// createTemporaryRepo creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
// it also create a second base branch called "original_base"
func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) {
func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (string, error) {
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("%-v LoadHeadRepo: %v", pr, err)
return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err)
log.Error("LoadHeadRepo: %v", err)
return "", fmt.Errorf("LoadHeadRepo: %w", err)
} else if pr.HeadRepo == nil {
log.Error("%-v HeadRepo %d does not exist", pr, pr.HeadRepoID)
return nil, nil, &repo_model.ErrRepoNotExist{
log.Error("Pr %d HeadRepo %d does not exist", pr.ID, pr.HeadRepoID)
return "", &repo_model.ErrRepoNotExist{
ID: pr.HeadRepoID,
}
} else if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("%-v LoadBaseRepo: %v", pr, err)
return nil, nil, fmt.Errorf("%v LoadBaseRepo: %w", pr, err)
log.Error("LoadBaseRepo: %v", err)
return "", fmt.Errorf("LoadBaseRepo: %w", err)
} else if pr.BaseRepo == nil {
log.Error("%-v BaseRepo %d does not exist", pr, pr.BaseRepoID)
return nil, nil, &repo_model.ErrRepoNotExist{
log.Error("Pr %d BaseRepo %d does not exist", pr.ID, pr.BaseRepoID)
return "", &repo_model.ErrRepoNotExist{
ID: pr.BaseRepoID,
}
} else if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
log.Error("%-v HeadRepo.LoadOwner: %v", pr, err)
return nil, nil, fmt.Errorf("%v HeadRepo.LoadOwner: %w", pr, err)
log.Error("HeadRepo.LoadOwner: %v", err)
return "", fmt.Errorf("HeadRepo.LoadOwner: %w", err)
} else if err := pr.BaseRepo.LoadOwner(ctx); err != nil {
log.Error("%-v BaseRepo.LoadOwner: %v", pr, err)
return nil, nil, fmt.Errorf("%v BaseRepo.LoadOwner: %w", pr, err)
log.Error("BaseRepo.LoadOwner: %v", err)
return "", fmt.Errorf("BaseRepo.LoadOwner: %w", err)
}
// Clone base repo.
tmpBasePath, err := repo_module.CreateTemporaryPath("pull")
if err != nil {
log.Error("CreateTemporaryPath[%-v]: %v", pr, err)
return nil, nil, err
}
prCtx = &prContext{
Context: ctx,
tmpBasePath: tmpBasePath,
pr: pr,
outbuf: &strings.Builder{},
errbuf: &strings.Builder{},
}
cancel = func() {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err)
}
log.Error("CreateTemporaryPath: %v", err)
return "", err
}
baseRepoPath := pr.BaseRepo.RepoPath()
headRepoPath := pr.HeadRepo.RepoPath()
if err := git.InitRepository(ctx, tmpBasePath, false); err != nil {
log.Error("Unable to init tmpBasePath for %-v: %v", pr, err)
cancel()
return nil, nil, err
log.Error("git init tmpBasePath: %v", err)
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", err
}
remoteRepoName := "head_repo"
@ -109,63 +73,99 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
fetchArgs = append(fetchArgs, "--no-write-commit-graph")
}
// addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
addCacheRepo := func(repoPath, cacheRepoPath string) error {
p := filepath.Join(repoPath, ".git", "objects", "info", "alternates")
// Add head repo remote.
addCacheRepo := func(staging, cache string) error {
p := filepath.Join(staging, ".git", "objects", "info", "alternates")
f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o600)
if err != nil {
log.Error("Could not create .git/objects/info/alternates file in %s: %v", repoPath, err)
log.Error("Could not create .git/objects/info/alternates file in %s: %v", staging, err)
return err
}
defer f.Close()
data := filepath.Join(cacheRepoPath, "objects")
data := filepath.Join(cache, "objects")
if _, err := fmt.Fprintln(f, data); err != nil {
log.Error("Could not write to .git/objects/info/alternates file in %s: %v", repoPath, err)
log.Error("Could not write to .git/objects/info/alternates file in %s: %v", staging, err)
return err
}
return nil
}
// Add head repo remote.
if err := addCacheRepo(tmpBasePath, baseRepoPath); err != nil {
log.Error("%-v Unable to add base repository to temporary repo [%s -> %s]: %v", pr, pr.BaseRepo.FullName(), tmpBasePath, err)
cancel()
return nil, nil, fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
log.Error("Unable to add base repository to temporary repo [%s -> %s]: %v", pr.BaseRepo.FullName(), tmpBasePath, err)
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
}
var outbuf, errbuf strings.Builder
if err := git.NewCommand(ctx, "remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath).
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr.BaseRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := git.NewCommand(ctx, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch).
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String())
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to set HEAD as base branch [%s]: %v\n%s\n%s", tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to set HEAD as base branch [tmpBasePath]: %w\n%s\n%s", err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil {
log.Error("%-v Unable to add head repository to temporary repo [%s -> %s]: %v", pr, pr.HeadRepo.FullName(), tmpBasePath, err)
cancel()
return nil, nil, fmt.Errorf("Unable to add head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
log.Error("Unable to add head repository to temporary repo [%s -> %s]: %v", pr.HeadRepo.FullName(), tmpBasePath, err)
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
}
if err := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath).
Run(prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
log.Error("Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr.HeadRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String())
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
return "", fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, outbuf.String(), errbuf.String())
}
outbuf.Reset()
errbuf.Reset()
trackingBranch := "tracking"
// Fetch head branch
@ -178,18 +178,24 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
headBranch = pr.GetGitRefName()
}
if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
Run(prCtx.RunOpts()); err != nil {
cancel()
Run(&git.RunOpts{
Dir: tmpBasePath,
Stdout: &outbuf,
Stderr: &errbuf,
}); err != nil {
if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err)
}
if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
return nil, nil, models.ErrBranchDoesNotExist{
return "", models.ErrBranchDoesNotExist{
BranchName: pr.HeadBranch,
}
}
log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
log.Error("Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, outbuf.String(), errbuf.String())
return "", fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, outbuf.String(), errbuf.String())
}
prCtx.outbuf.Reset()
prCtx.errbuf.Reset()
outbuf.Reset()
errbuf.Reset()
return prCtx, cancel, nil
return tmpBasePath, nil
}

View File

@ -16,67 +16,61 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
)
// Update updates pull request with base branch.
func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
if pr.Flow == issues_model.PullRequestFlowAGit {
// TODO: update of agit flow pull request's head branch is unsupported
return fmt.Errorf("update of agit flow pull request's head branch is unsupported")
func Update(ctx context.Context, pull *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
var (
pr *issues_model.PullRequest
style repo_model.MergeStyle
)
pullWorkingPool.CheckIn(fmt.Sprint(pull.ID))
defer pullWorkingPool.CheckOut(fmt.Sprint(pull.ID))
if rebase {
pr = pull
style = repo_model.MergeStyleRebaseUpdate
} else {
// use merge functions but switch repo's and branch's
pr = &issues_model.PullRequest{
HeadRepoID: pull.BaseRepoID,
BaseRepoID: pull.HeadRepoID,
HeadBranch: pull.BaseBranch,
BaseBranch: pull.HeadBranch,
}
style = repo_model.MergeStyleMerge
}
pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
if pull.Flow == issues_model.PullRequestFlowAGit {
// TODO: Not support update agit flow pull request's head branch
return fmt.Errorf("Not support update agit flow pull request's head branch")
}
diffCount, err := GetDiverging(ctx, pr)
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("LoadHeadRepo: %v", err)
return fmt.Errorf("LoadHeadRepo: %w", err)
} else if err = pr.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return fmt.Errorf("LoadBaseRepo: %w", err)
}
diffCount, err := GetDiverging(ctx, pull)
if err != nil {
return err
} else if diffCount.Behind == 0 {
return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index)
return fmt.Errorf("HeadBranch of PR %d is up to date", pull.Index)
}
if rebase {
defer func() {
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
}()
return updateHeadByRebaseOnToBase(ctx, pr, doer, message)
}
if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err)
return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
}
if err := pr.LoadHeadRepo(ctx); err != nil {
log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
}
if pr.HeadRepo == nil {
// LoadHeadRepo will swallow ErrRepoNotExist so if pr.HeadRepo is still nil recreate the error
err := repo_model.ErrRepoNotExist{
ID: pr.HeadRepoID,
}
log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
}
// use merge functions but switch repos and branches
reversePR := &issues_model.PullRequest{
ID: pr.ID,
HeadRepoID: pr.BaseRepoID,
HeadRepo: pr.BaseRepo,
HeadBranch: pr.BaseBranch,
BaseRepoID: pr.HeadRepoID,
BaseRepo: pr.HeadRepo,
BaseBranch: pr.HeadBranch,
}
_, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message)
_, err = rawMerge(ctx, pr, doer, style, "", message)
defer func() {
go AddTestPullRequestTask(doer, reversePR.HeadRepo.ID, reversePR.HeadBranch, false, "", "")
if rebase {
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
return
}
go AddTestPullRequestTask(doer, pr.HeadRepo.ID, pr.HeadBranch, false, "", "")
}()
return err
@ -165,16 +159,27 @@ func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest,
// GetDiverging determines how many commits a PR is ahead or behind the PR base branch
func GetDiverging(ctx context.Context, pr *issues_model.PullRequest) (*git.DivergeObject, error) {
log.Trace("GetDiverging[%-v]: compare commits", pr)
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
log.Trace("GetDiverging[%d]: compare commits", pr.ID)
if err := pr.LoadBaseRepo(ctx); err != nil {
return nil, err
}
if err := pr.LoadHeadRepo(ctx); err != nil {
return nil, err
}
tmpRepo, err := createTemporaryRepo(ctx, pr)
if err != nil {
if !models.IsErrBranchDoesNotExist(err) {
log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
log.Error("CreateTemporaryRepo: %v", err)
}
return nil, err
}
defer cancel()
defer func() {
if err := repo_module.RemoveTemporaryPath(tmpRepo); err != nil {
log.Error("Merge: RemoveTemporaryPath: %s", err)
}
}()
diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
diff, err := git.GetDivergingCommits(ctx, tmpRepo, "base", "tracking")
return &diff, err
}

View File

@ -1,107 +0,0 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pull
import (
"context"
"fmt"
"strings"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
)
// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string) error {
// "Clone" base repo and add the cache headers for the head repo and branch
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "")
if err != nil {
return err
}
defer cancel()
// Determine the old merge-base before the rebase - we use this for LFS push later on
oldMergeBase, _, _ := git.NewCommand(ctx, "merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
oldMergeBase = strings.TrimSpace(oldMergeBase)
// Rebase the tracking branch on to the base as the staging branch
if err := rebaseTrackingOnToBase(mergeCtx, repo_model.MergeStyleRebaseUpdate); err != nil {
return err
}
if setting.LFS.StartServer {
// Now we need to ensure that the head repository contains any LFS objects between the new base and the old mergebase
// It's questionable about where this should go - either after or before the push
// I think in the interests of data safety - failures to push to the lfs should prevent
// the push as you can always re-rebase.
if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{
HeadRepoID: pr.BaseRepoID,
BaseRepoID: pr.HeadRepoID,
}); err != nil {
log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err)
return err
}
}
// Now determine who the pushing author should be
var headUser *user_model.User
if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("Can't find user: %d for head repository in %-v - %v", pr.HeadRepo.OwnerID, pr, err)
return err
}
log.Error("Can't find user: %d for head repository in %-v - defaulting to doer: %-v - %v", pr.HeadRepo.OwnerID, pr, doer, err)
headUser = doer
} else {
headUser = pr.HeadRepo.Owner
}
pushCmd := git.NewCommand(ctx, "push", "-f", "head_repo").
AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
// Push back to the head repository.
// TODO: this cause an api call to "/api/internal/hook/post-receive/...",
// that prevents us from doint the whole merge in one db transaction
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
if err := pushCmd.Run(&git.RunOpts{
Env: repo_module.FullPushingEnvironment(
headUser,
doer,
pr.HeadRepo,
pr.HeadRepo.Name,
pr.ID,
),
Dir: mergeCtx.tmpBasePath,
Stdout: mergeCtx.outbuf,
Stderr: mergeCtx.errbuf,
}); err != nil {
if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
return &git.ErrPushOutOfDate{
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
Err: err,
}
} else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
err := &git.ErrPushRejected{
StdOut: mergeCtx.outbuf.String(),
StdErr: mergeCtx.errbuf.String(),
Err: err,
}
err.GenerateMessage()
return err
}
return fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
}
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
return nil
}

View File

@ -238,7 +238,7 @@
{{end}}
<div class="right floated">
{{range .Assignees}}
<a class="tooltip" target="_blank" href="{{.HomeLink}}" data-content="{{$.locale.Tr "repo.projects.column.assigned_to"}} {{.Name}}">{{avatar $.Context . 28 "mini gt-mr-3"}}</a>
<a class="tooltip" target="_blank" href="{{.HomeLink}}" data-content="{{$.locale.Tr "repo.projects.column.assigned_to"}} {{.Name}}">{{avatar . 28 "mini gt-mr-3"}}</a>
{{end}}
</div>
</div>

View File

@ -10,16 +10,16 @@
{{else}}
{{$referenceUrl = Printf "%s/files#%s" .ctxData.Issue.Link .item.HashTag}}
{{end}}
<div class="item context js-aria-clickable" data-clipboard-text-type="url" data-clipboard-text="{{AppSubUrl}}{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.copy_link"}}</div>
<div class="item context js-aria-clickable quote-reply {{if .diff}}quote-reply-diff{{end}}" data-target="{{.item.HashTag}}-raw">{{.ctxData.locale.Tr "repo.issues.context.quote_reply"}}</div>
<a class="item context" data-clipboard-text-type="url" data-clipboard-text="{{AppSubUrl}}{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.copy_link"}}</a>
<a class="item context quote-reply {{if .diff}}quote-reply-diff{{end}}" data-target="{{.item.HashTag}}-raw">{{.ctxData.locale.Tr "repo.issues.context.quote_reply"}}</a>
{{if not .ctxData.UnitIssuesGlobalDisabled}}
<div class="item context js-aria-clickable reference-issue" data-target="{{.item.HashTag}}-raw" data-modal="#reference-issue-modal" data-poster="{{.item.Poster.GetDisplayName}}" data-poster-username="{{.item.Poster.Name}}" data-reference="{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.reference_issue"}}</div>
<a class="item context reference-issue" data-target="{{.item.HashTag}}-raw" data-modal="#reference-issue-modal" data-poster="{{.item.Poster.GetDisplayName}}" data-poster-username="{{.item.Poster.Name}}" data-reference="{{$referenceUrl}}">{{.ctxData.locale.Tr "repo.issues.context.reference_issue"}}</a>
{{end}}
{{if or .ctxData.Permission.IsAdmin .IsCommentPoster .ctxData.HasIssuesOrPullsWritePermission}}
<div class="divider"></div>
<div class="item context js-aria-clickable edit-content">{{.ctxData.locale.Tr "repo.issues.context.edit"}}</div>
<a class="item context edit-content">{{.ctxData.locale.Tr "repo.issues.context.edit"}}</a>
{{if .delete}}
<div class="item context js-aria-clickable delete-comment" data-comment-id={{.item.HashTag}} data-url="{{.ctxData.RepoLink}}/comments/{{.item.ID}}/delete" data-locale="{{.ctxData.locale.Tr "repo.issues.delete_comment_confirm"}}">{{.ctxData.locale.Tr "repo.issues.context.delete"}}</div>
<a class="item context delete-comment" data-comment-id={{.item.HashTag}} data-url="{{.ctxData.RepoLink}}/comments/{{.item.ID}}/delete" data-locale="{{.ctxData.locale.Tr "repo.issues.delete_comment_confirm"}}">{{.ctxData.locale.Tr "repo.issues.context.delete"}}</a>
{{end}}
{{end}}
</div>

View File

@ -19,7 +19,7 @@ import (
func TestExternalMarkupRenderer(t *testing.T) {
defer tests.PrepareTestEnv(t)()
if !setting.Database.Type.IsSQLite3() {
if !setting.Database.UseSQLite3 {
t.Skip()
return
}

View File

@ -94,7 +94,7 @@ func availableVersions() ([]string, error) {
return nil, err
}
defer migrationsDir.Close()
versionRE, err := regexp.Compile("gitea-v(?P<version>.+)\\." + regexp.QuoteMeta(setting.Database.Type.String()) + "\\.sql.gz")
versionRE, err := regexp.Compile("gitea-v(?P<version>.+)\\." + regexp.QuoteMeta(setting.Database.Type) + "\\.sql.gz")
if err != nil {
return nil, err
}
@ -149,7 +149,7 @@ func restoreOldDB(t *testing.T, version string) bool {
}
switch {
case setting.Database.Type.IsSQLite3():
case setting.Database.UseSQLite3:
util.Remove(setting.Database.Path)
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
assert.NoError(t, err)
@ -162,7 +162,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err)
db.Close()
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host))
assert.NoError(t, err)
@ -184,7 +184,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err)
db.Close()
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
var db *sql.DB
var err error
if setting.Database.Host[0] == '/' {
@ -252,7 +252,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err)
db.Close()
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -74,7 +74,7 @@ func InitTest(requireGitea bool) {
}
switch {
case setting.Database.Type.IsMySQL():
case setting.Database.UseMySQL:
connType := "tcp"
if len(setting.Database.Host) > 0 && setting.Database.Host[0] == '/' { // looks like a unix socket
connType = "unix"
@ -89,7 +89,7 @@ func InitTest(requireGitea bool) {
if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil {
log.Fatal("db.Exec: %v", err)
}
case setting.Database.Type.IsPostgreSQL():
case setting.Database.UsePostgreSQL:
var db *sql.DB
var err error
if setting.Database.Host[0] == '/' {
@ -146,7 +146,7 @@ func InitTest(requireGitea bool) {
}
}
case setting.Database.Type.IsMSSQL():
case setting.Database.UseMSSQL:
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -83,9 +83,8 @@ function attachOneDropdownAria($dropdown) {
if (e.key === 'Enter') {
let $item = $dropdown.dropdown('get item', $dropdown.dropdown('get value'));
if (!$item) $item = $menu.find('> .item.selected'); // when dropdown filters items by input, there is no "value", so query the "selected" item
// if the selected item is clickable, then trigger the click event.
// we can not click any item without check, because Fomantic code might also handle the Enter event. that would result in double click.
if ($item && ($item.is('a') || $item.is('.js-aria-clickable'))) $item[0].click();
// if the selected item is clickable, then trigger the click event. in the future there could be a special CSS class for it.
if ($item && $item.is('a')) $item[0].click();
}
});

View File

@ -129,8 +129,8 @@ export function initImageDiff() {
initOverlay(createContext($imageAfter[2], $imageBefore[2]));
}
hideElem($container.find('> .loader'));
$container.find('> .gt-hidden').removeClass('gt-hidden');
hideElem($container.find('.ui.loader'));
}
function initSideBySide(sizes) {
@ -155,7 +155,7 @@ export function initImageDiff() {
height: sizes.size1.height * factor
});
sizes.image1.parent().css({
margin: `10px auto`,
margin: `${sizes.ratio[1] * factor + 15}px ${sizes.ratio[0] * factor}px ${sizes.ratio[1] * factor}px`,
width: sizes.size1.width * factor + 2,
height: sizes.size1.height * factor + 2
});
@ -164,7 +164,7 @@ export function initImageDiff() {
height: sizes.size2.height * factor
});
sizes.image2.parent().css({
margin: `10px auto`,
margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`,
width: sizes.size2.width * factor + 2,
height: sizes.size2.height * factor + 2
});
@ -255,12 +255,13 @@ export function initImageDiff() {
width: sizes.size2.width * factor + 2,
height: sizes.size2.height * factor + 2
});
// some inner elements are `position: absolute`, so the container's height must be large enough
// the "css(width, height)" is somewhat hacky and not easy to understand, it could be improved in the future
sizes.image2.parent().parent().css({
width: sizes.max.width * factor + 2,
height: sizes.max.height * factor + 2 + 20 /* extra height for inner "position: absolute" elements */,
height: sizes.max.height * factor + 2
});
$container.find('.onion-skin').css({
width: sizes.max.width * factor + 2,
height: sizes.max.height * factor + 4
});
const $range = $container.find("input[type='range']");

View File

@ -80,8 +80,7 @@ export function svg(name, size = 16, className = '') {
const svgNode = document.firstChild;
if (size !== 16) svgNode.setAttribute('width', String(size));
if (size !== 16) svgNode.setAttribute('height', String(size));
// filter array to remove empty string
if (className) svgNode.classList.add(...className.split(/\s+/).filter(Boolean));
if (className) svgNode.classList.add(...className.split(/\s+/));
return serializer.serializeToString(svgNode);
}

View File

@ -1,6 +1,6 @@
.image-diff-container {
text-align: center;
padding: 1em 0;
padding: 30px 0;
img {
border: 1px solid var(--color-primary-light-7);
@ -22,7 +22,6 @@
display: inline-block;
line-height: 0;
vertical-align: top;
margin: 0 1em;
.side-header {
font-weight: bold;
@ -99,7 +98,7 @@
}
input {
max-width: 300px;
width: 300px;
}
}
}