Compare commits

..

No commits in common. "ed25e094abfc93e83c6fecb5d82ce64d0e220717" and "6be6c19daf93c720718ce65c71fcf288d6e070e3" have entirely different histories.

47 changed files with 130 additions and 184 deletions

View File

@ -35,7 +35,7 @@ func runConvert(ctx *cli.Context) error {
log.Info("Log path: %s", setting.Log.RootPath) log.Info("Log path: %s", setting.Log.RootPath)
log.Info("Configuration file: %s", setting.CustomConf) log.Info("Configuration file: %s", setting.CustomConf)
if !setting.Database.Type.IsMySQL() { if !setting.Database.UseMySQL {
fmt.Println("This command can only be used with a MySQL database") fmt.Println("This command can only be used with a MySQL database")
return nil return nil
} }

View File

@ -279,7 +279,7 @@ func runDump(ctx *cli.Context) error {
}() }()
targetDBType := ctx.String("database") targetDBType := ctx.String("database")
if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() { if len(targetDBType) > 0 && targetDBType != setting.Database.Type {
log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType) log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType)
} else { } else {
log.Info("Dumping database...") log.Info("Dumping database...")

View File

@ -99,7 +99,7 @@ func (a *Action) TableIndices() []*schemas.Index {
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted") actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
indices := []*schemas.Index{actUserIndex, repoIndex} indices := []*schemas.Index{actUserIndex, repoIndex}
if setting.Database.Type.IsPostgreSQL() { if setting.Database.UsePostgreSQL {
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType) cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
cudIndex.AddColumn("created_unix", "user_id", "is_deleted") cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
indices = append(indices, cudIndex) indices = append(indices, cudIndex)
@ -640,7 +640,7 @@ func DeleteIssueActions(ctx context.Context, repoID, issueID int64) error {
// CountActionCreatedUnixString count actions where created_unix is an empty string // CountActionCreatedUnixString count actions where created_unix is an empty string
func CountActionCreatedUnixString(ctx context.Context) (int64, error) { func CountActionCreatedUnixString(ctx context.Context) (int64, error) {
if setting.Database.Type.IsSQLite3() { if setting.Database.UseSQLite3 {
return db.GetEngine(ctx).Where(`created_unix = ""`).Count(new(Action)) return db.GetEngine(ctx).Where(`created_unix = ""`).Count(new(Action))
} }
return 0, nil return 0, nil
@ -648,7 +648,7 @@ func CountActionCreatedUnixString(ctx context.Context) (int64, error) {
// FixActionCreatedUnixString set created_unix to zero if it is an empty string // FixActionCreatedUnixString set created_unix to zero if it is an empty string
func FixActionCreatedUnixString(ctx context.Context) (int64, error) { func FixActionCreatedUnixString(ctx context.Context) (int64, error) {
if setting.Database.Type.IsSQLite3() { if setting.Database.UseSQLite3 {
res, err := db.GetEngine(ctx).Exec(`UPDATE action SET created_unix = 0 WHERE created_unix = ""`) res, err := db.GetEngine(ctx).Exec(`UPDATE action SET created_unix = 0 WHERE created_unix = ""`)
if err != nil { if err != nil {
return 0, err return 0, err

View File

@ -234,7 +234,7 @@ func TestGetFeedsCorrupted(t *testing.T) {
} }
func TestConsistencyUpdateAction(t *testing.T) { func TestConsistencyUpdateAction(t *testing.T) {
if !setting.Database.Type.IsSQLite3() { if !setting.Database.UseSQLite3 {
t.Skip("Test is only for SQLite database.") t.Skip("Test is only for SQLite database.")
} }
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())

View File

@ -39,9 +39,9 @@ func getUserHeatmapData(user *user_model.User, team *organization.Team, doer *us
groupBy := "created_unix / 900 * 900" groupBy := "created_unix / 900 * 900"
groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
groupBy = "created_unix DIV 900 * 900" groupBy = "created_unix DIV 900 * 900"
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
groupByName = groupBy groupByName = groupBy
} }

View File

@ -15,7 +15,7 @@ import (
// BuildCaseInsensitiveLike returns a condition to check if the given value is like the given key case-insensitively. // BuildCaseInsensitiveLike returns a condition to check if the given value is like the given key case-insensitively.
// Handles especially SQLite correctly as UPPER there only transforms ASCII letters. // Handles especially SQLite correctly as UPPER there only transforms ASCII letters.
func BuildCaseInsensitiveLike(key, value string) builder.Cond { func BuildCaseInsensitiveLike(key, value string) builder.Cond {
if setting.Database.Type.IsSQLite3() { if setting.Database.UseSQLite3 {
return builder.Like{"UPPER(" + key + ")", util.ToUpperASCII(value)} return builder.Like{"UPPER(" + key + ")", util.ToUpperASCII(value)}
} }
return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)} return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)}

View File

@ -100,12 +100,12 @@ func newXORMEngine() (*xorm.Engine, error) {
var engine *xorm.Engine var engine *xorm.Engine
if setting.Database.Type.IsPostgreSQL() && len(setting.Database.Schema) > 0 { if setting.Database.UsePostgreSQL && len(setting.Database.Schema) > 0 {
// OK whilst we sort out our schema issues - create a schema aware postgres // OK whilst we sort out our schema issues - create a schema aware postgres
registerPostgresSchemaDriver() registerPostgresSchemaDriver()
engine, err = xorm.NewEngine("postgresschema", connStr) engine, err = xorm.NewEngine("postgresschema", connStr)
} else { } else {
engine, err = xorm.NewEngine(setting.Database.Type.String(), connStr) engine, err = xorm.NewEngine(setting.Database.Type, connStr)
} }
if err != nil { if err != nil {

View File

@ -73,7 +73,7 @@ func postgresGetNextResourceIndex(ctx context.Context, tableName string, groupID
// GetNextResourceIndex generates a resource index, it must run in the same transaction where the resource is created // GetNextResourceIndex generates a resource index, it must run in the same transaction where the resource is created
func GetNextResourceIndex(ctx context.Context, tableName string, groupID int64) (int64, error) { func GetNextResourceIndex(ctx context.Context, tableName string, groupID int64) (int64, error) {
if setting.Database.Type.IsPostgreSQL() { if setting.Database.UsePostgreSQL {
return postgresGetNextResourceIndex(ctx, tableName, groupID) return postgresGetNextResourceIndex(ctx, tableName, groupID)
} }

View File

@ -13,7 +13,7 @@ import (
// CountBadSequences looks for broken sequences from recreate-table mistakes // CountBadSequences looks for broken sequences from recreate-table mistakes
func CountBadSequences(_ context.Context) (int64, error) { func CountBadSequences(_ context.Context) (int64, error) {
if !setting.Database.Type.IsPostgreSQL() { if !setting.Database.UsePostgreSQL {
return 0, nil return 0, nil
} }
@ -34,7 +34,7 @@ func CountBadSequences(_ context.Context) (int64, error) {
// FixBadSequences fixes for broken sequences from recreate-table mistakes // FixBadSequences fixes for broken sequences from recreate-table mistakes
func FixBadSequences(_ context.Context) error { func FixBadSequences(_ context.Context) error {
if !setting.Database.Type.IsPostgreSQL() { if !setting.Database.UsePostgreSQL {
return nil return nil
} }

View File

@ -65,7 +65,7 @@ func postgresGetCommitStatusIndex(ctx context.Context, repoID int64, sha string)
// GetNextCommitStatusIndex retried 3 times to generate a resource index // GetNextCommitStatusIndex retried 3 times to generate a resource index
func GetNextCommitStatusIndex(ctx context.Context, repoID int64, sha string) (int64, error) { func GetNextCommitStatusIndex(ctx context.Context, repoID int64, sha string) (int64, error) {
if setting.Database.Type.IsPostgreSQL() { if setting.Database.UsePostgreSQL {
return postgresGetCommitStatusIndex(ctx, repoID, sha) return postgresGetCommitStatusIndex(ctx, repoID, sha)
} }

View File

@ -52,16 +52,13 @@ func listPullRequestStatement(baseRepoID int64, opts *PullRequestsOptions) (*xor
// GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged // GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged
// by given head information (repo and branch). // by given head information (repo and branch).
// arg `includeClosed` controls whether the SQL returns closed PRs func GetUnmergedPullRequestsByHeadInfo(repoID int64, branch string) ([]*PullRequest, error) {
func GetUnmergedPullRequestsByHeadInfo(repoID int64, branch string, includeClosed bool) ([]*PullRequest, error) {
prs := make([]*PullRequest, 0, 2) prs := make([]*PullRequest, 0, 2)
sess := db.GetEngine(db.DefaultContext). return prs, db.GetEngine(db.DefaultContext).
Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND issue.is_closed = ? AND flow = ?",
repoID, branch, false, false, PullRequestFlowGithub).
Join("INNER", "issue", "issue.id = pull_request.issue_id"). Join("INNER", "issue", "issue.id = pull_request.issue_id").
Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND flow = ?", repoID, branch, false, PullRequestFlowGithub) Find(&prs)
if !includeClosed {
sess.Where("issue.is_closed = ?", false)
}
return prs, sess.Find(&prs)
} }
// CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch // CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch
@ -74,7 +71,7 @@ func CanMaintainerWriteToBranch(p access_model.Permission, branch string, user *
return false return false
} }
prs, err := GetUnmergedPullRequestsByHeadInfo(p.Units[0].RepoID, branch, false) prs, err := GetUnmergedPullRequestsByHeadInfo(p.Units[0].RepoID, branch)
if err != nil { if err != nil {
return false return false
} }

View File

@ -118,7 +118,7 @@ func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) {
func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) { func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(1, "branch2", false) prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(1, "branch2")
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, prs, 1) assert.Len(t, prs, 1)
for _, pr := range prs { for _, pr := range prs {

View File

@ -89,7 +89,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
hasID = hasID || (column.IsPrimaryKey && column.IsAutoIncrement) hasID = hasID || (column.IsPrimaryKey && column.IsAutoIncrement)
} }
if hasID && setting.Database.Type.IsMSSQL() { if hasID && setting.Database.UseMSSQL {
if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` ON", tempTableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` ON", tempTableName)); err != nil {
log.Error("Unable to set identity insert for table %s. Error: %v", tempTableName, err) log.Error("Unable to set identity insert for table %s. Error: %v", tempTableName, err)
return err return err
@ -143,7 +143,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
return err return err
} }
if hasID && setting.Database.Type.IsMSSQL() { if hasID && setting.Database.UseMSSQL {
if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` OFF", tempTableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("SET IDENTITY_INSERT `%s` OFF", tempTableName)); err != nil {
log.Error("Unable to switch off identity insert for table %s. Error: %v", tempTableName, err) log.Error("Unable to switch off identity insert for table %s. Error: %v", tempTableName, err)
return err return err
@ -151,7 +151,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
} }
switch { switch {
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
// SQLite will drop all the constraints on the old table // SQLite will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err) log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -178,7 +178,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
return err return err
} }
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
// MySQL will drop all the constraints on the old table // MySQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err) log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -205,7 +205,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err) log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err)
return err return err
} }
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
var originalSequences []string var originalSequences []string
type sequenceData struct { type sequenceData struct {
LastValue int `xorm:"'last_value'"` LastValue int `xorm:"'last_value'"`
@ -296,7 +296,7 @@ func RecreateTable(sess *xorm.Session, bean interface{}) error {
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
// MSSQL will drop all the constraints on the old table // MSSQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to drop old table %s. Error: %v", tableName, err) log.Error("Unable to drop old table %s. Error: %v", tableName, err)
@ -323,7 +323,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
// TODO: This will not work if there are foreign keys // TODO: This will not work if there are foreign keys
switch { switch {
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
// First drop the indexes on the columns // First drop the indexes on the columns
res, errIndex := sess.Query(fmt.Sprintf("PRAGMA index_list(`%s`)", tableName)) res, errIndex := sess.Query(fmt.Sprintf("PRAGMA index_list(`%s`)", tableName))
if errIndex != nil { if errIndex != nil {
@ -405,7 +405,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
return err return err
} }
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
cols := "" cols := ""
for _, col := range columnNames { for _, col := range columnNames {
if cols != "" { if cols != "" {
@ -416,7 +416,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil { if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err) return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
} }
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
// Drop indexes on columns first // Drop indexes on columns first
sql := fmt.Sprintf("SHOW INDEX FROM %s WHERE column_name IN ('%s')", tableName, strings.Join(columnNames, "','")) sql := fmt.Sprintf("SHOW INDEX FROM %s WHERE column_name IN ('%s')", tableName, strings.Join(columnNames, "','"))
res, err := sess.Query(sql) res, err := sess.Query(sql)
@ -444,7 +444,7 @@ func DropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil { if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err) return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
cols := "" cols := ""
for _, col := range columnNames { for _, col := range columnNames {
if cols != "" { if cols != "" {
@ -543,13 +543,13 @@ func newXORMEngine() (*xorm.Engine, error) {
func deleteDB() error { func deleteDB() error {
switch { switch {
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
if err := util.Remove(setting.Database.Path); err != nil { if err := util.Remove(setting.Database.Path); err != nil {
return err return err
} }
return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm) return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/", db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host)) setting.Database.User, setting.Database.Passwd, setting.Database.Host))
if err != nil { if err != nil {
@ -565,7 +565,7 @@ func deleteDB() error {
return err return err
} }
return nil return nil
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s", db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode)) setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
if err != nil { if err != nil {
@ -612,7 +612,7 @@ func deleteDB() error {
} }
return nil return nil
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
host, port := setting.ParseMSSQLHostPort(setting.Database.Host) host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd)) host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -13,9 +13,9 @@ func PrependRefsHeadsToIssueRefs(x *xorm.Engine) error {
var query string var query string
switch { switch {
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
query = "UPDATE `issue` SET `ref` = 'refs/heads/' + `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'" query = "UPDATE `issue` SET `ref` = 'refs/heads/' + `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'"
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
query = "UPDATE `issue` SET `ref` = CONCAT('refs/heads/', `ref`) WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%';" query = "UPDATE `issue` SET `ref` = CONCAT('refs/heads/', `ref`) WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%';"
default: default:
query = "UPDATE `issue` SET `ref` = 'refs/heads/' || `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'" query = "UPDATE `issue` SET `ref` = 'refs/heads/' || `ref` WHERE `ref` IS NOT NULL AND `ref` <> '' AND `ref` NOT LIKE 'refs/%'"

View File

@ -41,7 +41,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error {
// Delete language stat statuses // Delete language stat statuses
truncExpr := "TRUNCATE TABLE" truncExpr := "TRUNCATE TABLE"
if setting.Database.Type.IsSQLite3() { if setting.Database.UseSQLite3 {
truncExpr = "DELETE FROM" truncExpr = "DELETE FROM"
} }

View File

@ -21,7 +21,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsSQLite3() { if setting.Database.UseSQLite3 {
// SQLite maps VARCHAR to TEXT without size so we're done // SQLite maps VARCHAR to TEXT without size so we're done
return nil return nil
} }
@ -41,11 +41,11 @@ func IncreaseLanguageField(x *xorm.Engine) error {
} }
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil { if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil {
return err return err
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
// Yet again MSSQL just has to be awkward. // Yet again MSSQL just has to be awkward.
// Here we have to drop the constraints first and then rebuild them // Here we have to drop the constraints first and then rebuild them
constraints := make([]string, 0) constraints := make([]string, 0)
@ -71,7 +71,7 @@ func IncreaseLanguageField(x *xorm.Engine) error {
if err := sess.CreateUniques(new(LanguageStat)); err != nil { if err := sess.CreateUniques(new(LanguageStat)); err != nil {
return err return err
} }
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil { if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil {
return err return err
} }

View File

@ -17,13 +17,13 @@ import (
func SetDefaultPasswordToArgon2(x *xorm.Engine) error { func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
_, err := x.Exec("ALTER TABLE `user` ALTER passwd_hash_algo SET DEFAULT 'argon2';") _, err := x.Exec("ALTER TABLE `user` ALTER passwd_hash_algo SET DEFAULT 'argon2';")
return err return err
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
_, err := x.Exec("ALTER TABLE `user` ALTER COLUMN passwd_hash_algo SET DEFAULT 'argon2';") _, err := x.Exec("ALTER TABLE `user` ALTER COLUMN passwd_hash_algo SET DEFAULT 'argon2';")
return err return err
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
// need to find the constraint and drop it, then recreate it. // need to find the constraint and drop it, then recreate it.
sess := x.NewSession() sess := x.NewSession()
defer sess.Close() defer sess.Close()
@ -53,7 +53,7 @@ func SetDefaultPasswordToArgon2(x *xorm.Engine) error {
} }
return sess.Commit() return sess.Commit()
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
// drop through // drop through
default: default:
log.Fatal("Unrecognized DB") log.Fatal("Unrecognized DB")

View File

@ -62,7 +62,7 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsMSSQL() { if setting.Database.UseMSSQL {
if _, err := sess.Exec(sqlSelect + " INTO #temp_comments" + sqlTail); err != nil { if _, err := sess.Exec(sqlSelect + " INTO #temp_comments" + sqlTail); err != nil {
log.Error("unable to create temporary table") log.Error("unable to create temporary table")
return err return err
@ -72,13 +72,13 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error {
comments := make([]*Comment, 0, batchSize) comments := make([]*Comment, 0, batchSize)
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + ", " + strconv.Itoa(start) sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + ", " + strconv.Itoa(start)
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
fallthrough fallthrough
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + " OFFSET " + strconv.Itoa(start) sqlCmd = sqlSelect + sqlTail + " LIMIT " + strconv.Itoa(batchSize) + " OFFSET " + strconv.Itoa(start)
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " + sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " +
"(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id" "(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id"
default: default:

View File

@ -14,7 +14,7 @@ import (
) )
func FixPostgresIDSequences(x *xorm.Engine) error { func FixPostgresIDSequences(x *xorm.Engine) error {
if !setting.Database.Type.IsPostgreSQL() { if !setting.Database.UsePostgreSQL {
return nil return nil
} }

View File

@ -54,11 +54,11 @@ func RenameTaskErrorsToMessage(x *xorm.Engine) error {
} }
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
if _, err := sess.Exec("ALTER TABLE `task` CHANGE errors message text"); err != nil { if _, err := sess.Exec("ALTER TABLE `task` CHANGE errors message text"); err != nil {
return err return err
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
if _, err := sess.Exec("sp_rename 'task.errors', 'message', 'COLUMN'"); err != nil { if _, err := sess.Exec("sp_rename 'task.errors', 'message', 'COLUMN'"); err != nil {
return err return err
} }

View File

@ -16,7 +16,7 @@ func AlterIssueAndCommentTextFieldsToLongText(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsMySQL() { if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `issue` CHANGE `content` `content` LONGTEXT"); err != nil { if _, err := sess.Exec("ALTER TABLE `issue` CHANGE `content` `content` LONGTEXT"); err != nil {
return err return err
} }

View File

@ -16,7 +16,7 @@ func AlterHookTaskTextFieldsToLongText(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsMySQL() { if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `hook_task` CHANGE `payload_content` `payload_content` LONGTEXT, CHANGE `request_content` `request_content` LONGTEXT, change `response_content` `response_content` LONGTEXT"); err != nil { if _, err := sess.Exec("ALTER TABLE `hook_task` CHANGE `payload_content` `payload_content` LONGTEXT, CHANGE `request_content` `request_content` LONGTEXT, change `response_content` `response_content` LONGTEXT"); err != nil {
return err return err
} }

View File

@ -38,7 +38,7 @@ func (*improveActionTableIndicesAction) TableIndices() []*schemas.Index {
actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType) actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType)
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted") actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
indices := []*schemas.Index{actUserIndex, repoIndex} indices := []*schemas.Index{actUserIndex, repoIndex}
if setting.Database.Type.IsPostgreSQL() { if setting.Database.UsePostgreSQL {
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType) cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
cudIndex.AddColumn("created_unix", "user_id", "is_deleted") cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
indices = append(indices, cudIndex) indices = append(indices, cudIndex)

View File

@ -65,11 +65,11 @@ func RenameCredentialIDBytes(x *xorm.Engine) error {
} }
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
if _, err := sess.Exec("ALTER TABLE `webauthn_credential` CHANGE credential_id_bytes credential_id VARBINARY(1024)"); err != nil { if _, err := sess.Exec("ALTER TABLE `webauthn_credential` CHANGE credential_id_bytes credential_id VARBINARY(1024)"); err != nil {
return err return err
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
if _, err := sess.Exec("sp_rename 'webauthn_credential.credential_id_bytes', 'credential_id', 'COLUMN'"); err != nil { if _, err := sess.Exec("sp_rename 'webauthn_credential.credential_id_bytes', 'credential_id', 'COLUMN'"); err != nil {
return err return err
} }

View File

@ -16,7 +16,7 @@ func AlterPublicGPGKeyContentFieldsToMediumText(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsMySQL() { if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `gpg_key` CHANGE `content` `content` MEDIUMTEXT"); err != nil { if _, err := sess.Exec("ALTER TABLE `gpg_key` CHANGE `content` `content` MEDIUMTEXT"); err != nil {
return err return err
} }

View File

@ -16,7 +16,7 @@ func AlterPackageVersionMetadataToLongText(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsMySQL() { if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `package_version` MODIFY COLUMN `metadata_json` LONGTEXT"); err != nil { if _, err := sess.Exec("ALTER TABLE `package_version` MODIFY COLUMN `metadata_json` LONGTEXT"); err != nil {
return err return err
} }

View File

@ -17,7 +17,7 @@ func AlterPublicGPGKeyImportContentFieldToMediumText(x *xorm.Engine) error {
return err return err
} }
if setting.Database.Type.IsMySQL() { if setting.Database.UseMySQL {
if _, err := sess.Exec("ALTER TABLE `gpg_key_import` CHANGE `content` `content` MEDIUMTEXT"); err != nil { if _, err := sess.Exec("ALTER TABLE `gpg_key_import` CHANGE `content` `content` MEDIUMTEXT"); err != nil {
return err return err
} }

View File

@ -416,7 +416,7 @@ func DeleteProjectByID(ctx context.Context, id int64) error {
func DeleteProjectByRepoID(ctx context.Context, repoID int64) error { func DeleteProjectByRepoID(ctx context.Context, repoID int64) error {
switch { switch {
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue WHERE project_issue.id IN (SELECT project_issue.id FROM project_issue INNER JOIN project WHERE project.id = project_issue.project_id AND project.repo_id = ?)", repoID); err != nil { if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue WHERE project_issue.id IN (SELECT project_issue.id FROM project_issue INNER JOIN project WHERE project.id = project_issue.project_id AND project.repo_id = ?)", repoID); err != nil {
return err return err
} }
@ -426,7 +426,7 @@ func DeleteProjectByRepoID(ctx context.Context, repoID int64) error {
if _, err := db.GetEngine(ctx).Table("project").Where("repo_id = ? ", repoID).Delete(&Project{}); err != nil { if _, err := db.GetEngine(ctx).Table("project").Where("repo_id = ? ", repoID).Delete(&Project{}); err != nil {
return err return err
} }
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue USING project WHERE project.id = project_issue.project_id AND project.repo_id = ? ", repoID); err != nil { if _, err := db.GetEngine(ctx).Exec("DELETE FROM project_issue USING project WHERE project.id = project_issue.project_id AND project.repo_id = ? ", repoID); err != nil {
return err return err
} }

View File

@ -498,7 +498,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
subQueryCond := builder.NewCond() subQueryCond := builder.NewCond()
// Topic checking. Topics are present. // Topic checking. Topics are present.
if setting.Database.Type.IsPostgreSQL() { // postgres stores the topics as json and not as text if setting.Database.UsePostgreSQL { // postgres stores the topics as json and not as text
subQueryCond = subQueryCond.Or(builder.And(builder.NotNull{"topics"}, builder.Neq{"(topics)::text": "[]"})) subQueryCond = subQueryCond.Or(builder.And(builder.NotNull{"topics"}, builder.Neq{"(topics)::text": "[]"}))
} else { } else {
subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"})) subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"}))

View File

@ -76,7 +76,7 @@ func MainTest(m *testing.M, testOpts *TestOptions) {
setting.SSH.BuiltinServerUser = "builtinuser" setting.SSH.BuiltinServerUser = "builtinuser"
setting.SSH.Port = 3000 setting.SSH.Port = 3000
setting.SSH.Domain = "try.gitea.io" setting.SSH.Domain = "try.gitea.io"
setting.Database.Type = "sqlite3" setting.Database.UseSQLite3 = true
setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master" setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master"
repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos") repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos")
if err != nil { if err != nil {

View File

@ -155,7 +155,7 @@ func checkDBConsistency(ctx context.Context, logger log.Logger, autofix bool) er
// TODO: function to recalc all counters // TODO: function to recalc all counters
if setting.Database.Type.IsPostgreSQL() { if setting.Database.UsePostgreSQL {
consistencyChecks = append(consistencyChecks, consistencyCheck{ consistencyChecks = append(consistencyChecks, consistencyCheck{
Name: "Sequence values", Name: "Sequence values",
Counter: db.CountBadSequences, Counter: db.CountBadSequences,

View File

@ -27,7 +27,7 @@ var (
// Database holds the database settings // Database holds the database settings
Database = struct { Database = struct {
Type DatabaseType Type string
Host string Host string
Name string Name string
User string User string
@ -39,6 +39,10 @@ var (
Charset string Charset string
Timeout int // seconds Timeout int // seconds
SQLiteJournalMode string SQLiteJournalMode string
UseSQLite3 bool
UseMySQL bool
UseMSSQL bool
UsePostgreSQL bool
DBConnectRetries int DBConnectRetries int
DBConnectBackoff time.Duration DBConnectBackoff time.Duration
MaxIdleConns int MaxIdleConns int
@ -55,13 +59,24 @@ var (
// LoadDBSetting loads the database settings // LoadDBSetting loads the database settings
func LoadDBSetting() { func LoadDBSetting() {
sec := CfgProvider.Section("database") sec := CfgProvider.Section("database")
Database.Type = DatabaseType(sec.Key("DB_TYPE").String()) Database.Type = sec.Key("DB_TYPE").String()
defaultCharset := "utf8" defaultCharset := "utf8"
Database.UseMySQL = false
Database.UseSQLite3 = false
Database.UsePostgreSQL = false
Database.UseMSSQL = false
if Database.Type.IsMySQL() { switch Database.Type {
case "sqlite3":
Database.UseSQLite3 = true
case "mysql":
Database.UseMySQL = true
defaultCharset = "utf8mb4" defaultCharset = "utf8mb4"
case "postgres":
Database.UsePostgreSQL = true
case "mssql":
Database.UseMSSQL = true
} }
Database.Host = sec.Key("HOST").String() Database.Host = sec.Key("HOST").String()
Database.Name = sec.Key("NAME").String() Database.Name = sec.Key("NAME").String()
Database.User = sec.Key("USER").String() Database.User = sec.Key("USER").String()
@ -71,7 +86,7 @@ func LoadDBSetting() {
Database.Schema = sec.Key("SCHEMA").String() Database.Schema = sec.Key("SCHEMA").String()
Database.SSLMode = sec.Key("SSL_MODE").MustString("disable") Database.SSLMode = sec.Key("SSL_MODE").MustString("disable")
Database.Charset = sec.Key("CHARSET").In(defaultCharset, []string{"utf8", "utf8mb4"}) Database.Charset = sec.Key("CHARSET").In(defaultCharset, []string{"utf8", "utf8mb4"})
if Database.Type.IsMySQL() && defaultCharset != "utf8mb4" { if Database.UseMySQL && defaultCharset != "utf8mb4" {
log.Error("Deprecated database mysql charset utf8 support, please use utf8mb4 or convert utf8 to utf8mb4.") log.Error("Deprecated database mysql charset utf8 support, please use utf8mb4 or convert utf8 to utf8mb4.")
} }
@ -80,7 +95,7 @@ func LoadDBSetting() {
Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("") Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("")
Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2) Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2)
if Database.Type.IsMySQL() { if Database.UseMySQL {
Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(3 * time.Second) Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(3 * time.Second)
} else { } else {
Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(0) Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(0)
@ -192,25 +207,3 @@ func ParseMSSQLHostPort(info string) (string, string) {
} }
return host, port return host, port
} }
type DatabaseType string
func (t DatabaseType) String() string {
return string(t)
}
func (t DatabaseType) IsSQLite3() bool {
return t == "sqlite3"
}
func (t DatabaseType) IsMySQL() bool {
return t == "mysql"
}
func (t DatabaseType) IsMSSQL() bool {
return t == "mssql"
}
func (t DatabaseType) IsPostgreSQL() bool {
return t == "postgres"
}

View File

@ -4,7 +4,6 @@
package typesniffer package typesniffer
import ( import (
"bytes"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@ -25,9 +24,8 @@ const (
) )
var ( var (
svgComment = regexp.MustCompile(`(?s)<!--.*?-->`) svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!--.*?-->|<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg[\s>\/]`)
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`) svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!--.*?-->|<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg[\s>\/]`)
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
) )
// SniffedType contains information about a blobs type. // SniffedType contains information about a blobs type.
@ -93,18 +91,11 @@ func DetectContentType(data []byte) SniffedType {
data = data[:sniffLen] data = data[:sniffLen]
} }
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888 if (strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")) && svgTagRegex.Match(data) ||
strings.Contains(ct, "text/xml") && svgTagInXMLRegex.Match(data) {
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html") // SVG is unsupported. https://github.com/golang/go/issues/15888
detectByXML := strings.Contains(ct, "text/xml")
if detectByHTML || detectByXML {
dataProcessed := svgComment.ReplaceAll(data, nil)
dataProcessed = bytes.TrimSpace(dataProcessed)
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
ct = SvgMimeType ct = SvgMimeType
} }
}
return SniffedType{ct} return SniffedType{ct}
} }

View File

@ -28,6 +28,7 @@ func TestIsSvgImage(t *testing.T) {
assert.True(t, DetectContentType([]byte("<svg></svg>")).IsSvgImage()) assert.True(t, DetectContentType([]byte("<svg></svg>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(" <svg></svg>")).IsSvgImage()) assert.True(t, DetectContentType([]byte(" <svg></svg>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<svg width="100"></svg>`)).IsSvgImage()) assert.True(t, DetectContentType([]byte(`<svg width="100"></svg>`)).IsSvgImage())
assert.True(t, DetectContentType([]byte("<svg/>")).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?><svg></svg>`)).IsSvgImage()) assert.True(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?><svg></svg>`)).IsSvgImage())
assert.True(t, DetectContentType([]byte(`<!-- Comment --> assert.True(t, DetectContentType([]byte(`<!-- Comment -->
<svg></svg>`)).IsSvgImage()) <svg></svg>`)).IsSvgImage())
@ -56,10 +57,6 @@ func TestIsSvgImage(t *testing.T) {
<!-- Multline <!-- Multline
Comment --> Comment -->
<svg></svg>`)).IsSvgImage()) <svg></svg>`)).IsSvgImage())
// the DetectContentType should work for incomplete data, because only beginning bytes are used for detection
assert.True(t, DetectContentType([]byte(`<svg>....`)).IsSvgImage())
assert.False(t, DetectContentType([]byte{}).IsSvgImage()) assert.False(t, DetectContentType([]byte{}).IsSvgImage())
assert.False(t, DetectContentType([]byte("svg")).IsSvgImage()) assert.False(t, DetectContentType([]byte("svg")).IsSvgImage())
assert.False(t, DetectContentType([]byte("<svgfoo></svgfoo>")).IsSvgImage()) assert.False(t, DetectContentType([]byte("<svgfoo></svgfoo>")).IsSvgImage())
@ -71,26 +68,6 @@ func TestIsSvgImage(t *testing.T) {
assert.False(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?> assert.False(t, DetectContentType([]byte(`<?xml version="1.0" encoding="UTF-8"?>
<!-- <svg></svg> inside comment --> <!-- <svg></svg> inside comment -->
<foo></foo>`)).IsSvgImage()) <foo></foo>`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`
<!-- comment1 -->
<div>
<!-- comment2 -->
<svg></svg>
</div>
`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`
<!-- comment1
-->
<div>
<!-- comment2
-->
<svg></svg>
</div>
`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`<html><body><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg></svg></body></html>`)).IsSvgImage())
assert.False(t, DetectContentType([]byte(`<html><body><?xml version="1.0" encoding="UTF-8"?><svg></svg></body></html>`)).IsSvgImage())
} }
func TestIsPDF(t *testing.T) { func TestIsPDF(t *testing.T) {

View File

@ -141,7 +141,7 @@ func GlobalInitInstalled(ctx context.Context) {
if setting.EnableSQLite3 { if setting.EnableSQLite3 {
log.Info("SQLite3 support is enabled") log.Info("SQLite3 support is enabled")
} else if setting.Database.Type.IsSQLite3() { } else if setting.Database.UseSQLite3 {
log.Fatal("SQLite3 support is disabled, but it is used for database setting. Please get or build a Gitea release with SQLite3 support.") log.Fatal("SQLite3 support is disabled, but it is used for database setting. Please get or build a Gitea release with SQLite3 support.")
} }

View File

@ -104,7 +104,7 @@ func Install(ctx *context.Context) {
form.DbSchema = setting.Database.Schema form.DbSchema = setting.Database.Schema
form.Charset = setting.Database.Charset form.Charset = setting.Database.Charset
curDBType := setting.Database.Type.String() curDBType := setting.Database.Type
var isCurDBTypeSupported bool var isCurDBTypeSupported bool
for _, dbType := range setting.SupportedDatabaseTypes { for _, dbType := range setting.SupportedDatabaseTypes {
if dbType == curDBType { if dbType == curDBType {
@ -272,7 +272,7 @@ func SubmitInstall(ctx *context.Context) {
// ---- Basic checks are passed, now test configuration. // ---- Basic checks are passed, now test configuration.
// Test database setting. // Test database setting.
setting.Database.Type = setting.DatabaseType(form.DbType) setting.Database.Type = form.DbType
setting.Database.Host = form.DbHost setting.Database.Host = form.DbHost
setting.Database.User = form.DbUser setting.Database.User = form.DbUser
setting.Database.Passwd = form.DbPasswd setting.Database.Passwd = form.DbPasswd
@ -392,7 +392,7 @@ func SubmitInstall(ctx *context.Context) {
log.Error("Failed to load custom conf '%s': %v", setting.CustomConf, err) log.Error("Failed to load custom conf '%s': %v", setting.CustomConf, err)
} }
} }
cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type.String()) cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type)
cfg.Section("database").Key("HOST").SetValue(setting.Database.Host) cfg.Section("database").Key("HOST").SetValue(setting.Database.Host)
cfg.Section("database").Key("NAME").SetValue(setting.Database.Name) cfg.Section("database").Key("NAME").SetValue(setting.Database.Name)
cfg.Section("database").Key("USER").SetValue(setting.Database.User) cfg.Section("database").Key("USER").SetValue(setting.Database.User)

View File

@ -100,7 +100,7 @@ func checkDatabase(checks checks) status {
st.Time = getCheckTime() st.Time = getCheckTime()
} }
if setting.Database.Type.IsSQLite3() && st.Status == pass { if setting.Database.UseSQLite3 && st.Status == pass {
if !setting.EnableSQLite3 { if !setting.EnableSQLite3 {
st.Status = fail st.Status = fail
st.Time = getCheckTime() st.Time = getCheckTime()

View File

@ -1420,7 +1420,6 @@ func ViewIssue(ctx *context.Context) {
marked = make(map[int64]issues_model.RoleDescriptor) marked = make(map[int64]issues_model.RoleDescriptor)
comment *issues_model.Comment comment *issues_model.Comment
participants = make([]*user_model.User, 1, 10) participants = make([]*user_model.User, 1, 10)
latestCloseCommentID int64
) )
if ctx.Repo.Repository.IsTimetrackerEnabled(ctx) { if ctx.Repo.Repository.IsTimetrackerEnabled(ctx) {
if ctx.IsSigned { if ctx.IsSigned {
@ -1627,15 +1626,9 @@ func ViewIssue(ctx *context.Context) {
comment.Type == issues_model.CommentTypeStopTracking { comment.Type == issues_model.CommentTypeStopTracking {
// drop error since times could be pruned from DB.. // drop error since times could be pruned from DB..
_ = comment.LoadTime() _ = comment.LoadTime()
} else if comment.Type == issues_model.CommentTypeClose {
// record ID of latest closed comment.
// if PR is closed, the comments whose type is CommentTypePullRequestPush(29) after latestCloseCommentID won't be rendered.
latestCloseCommentID = comment.ID
} }
} }
ctx.Data["LatestCloseCommentID"] = latestCloseCommentID
// Combine multiple label assignments into a single comment // Combine multiple label assignments into a single comment
combineLabelComments(issue) combineLabelComments(issue)

View File

@ -587,7 +587,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.Data["HeadBranchCommitID"] = headBranchSha ctx.Data["HeadBranchCommitID"] = headBranchSha
ctx.Data["PullHeadCommitID"] = sha ctx.Data["PullHeadCommitID"] = sha
if pull.HeadRepo == nil || !headBranchExist || (!pull.Issue.IsClosed && (headBranchSha != sha)) { if pull.HeadRepo == nil || !headBranchExist || headBranchSha != sha {
ctx.Data["IsPullRequestBroken"] = true ctx.Data["IsPullRequestBroken"] = true
if pull.IsSameRepo() { if pull.IsSameRepo() {
ctx.Data["HeadTarget"] = pull.HeadBranch ctx.Data["HeadTarget"] = pull.HeadBranch

View File

@ -257,7 +257,7 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
// If you don't let it run all the way then you will lose data // If you don't let it run all the way then you will lose data
// TODO: graceful: AddTestPullRequestTask needs to become a queue! // TODO: graceful: AddTestPullRequestTask needs to become a queue!
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch, true) prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch)
if err != nil { if err != nil {
log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err) log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err)
return return
@ -500,7 +500,7 @@ func (errs errlist) Error() string {
// CloseBranchPulls close all the pull requests who's head branch is the branch // CloseBranchPulls close all the pull requests who's head branch is the branch
func CloseBranchPulls(doer *user_model.User, repoID int64, branch string) error { func CloseBranchPulls(doer *user_model.User, repoID int64, branch string) error {
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch, false) prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch)
if err != nil { if err != nil {
return err return err
} }
@ -536,7 +536,7 @@ func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *re
var errs errlist var errs errlist
for _, branch := range branches { for _, branch := range branches {
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repo.ID, branch.Name, false) prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repo.ID, branch.Name)
if err != nil { if err != nil {
return err return err
} }

View File

@ -697,10 +697,6 @@
</span> </span>
</div> </div>
{{else if and (eq .Type 29) (or (gt .CommitsNum 0) .IsForcePush)}} {{else if and (eq .Type 29) (or (gt .CommitsNum 0) .IsForcePush)}}
<!-- If PR is closed, the comments whose type is CommentTypePullRequestPush(29) after latestCloseCommentID won't be rendered. //-->
{{if and .Issue.IsClosed (gt .ID $.LatestCloseCommentID)}}
{{continue}}
{{end}}
<div class="timeline-item event" id="{{.HashTag}}"> <div class="timeline-item event" id="{{.HashTag}}">
<span class="badge">{{svg "octicon-repo-push"}}</span> <span class="badge">{{svg "octicon-repo-push"}}</span>
<span class="text grey muted-links"> <span class="text grey muted-links">

View File

@ -19,7 +19,7 @@ import (
func TestExternalMarkupRenderer(t *testing.T) { func TestExternalMarkupRenderer(t *testing.T) {
defer tests.PrepareTestEnv(t)() defer tests.PrepareTestEnv(t)()
if !setting.Database.Type.IsSQLite3() { if !setting.Database.UseSQLite3 {
t.Skip() t.Skip()
return return
} }

View File

@ -94,7 +94,7 @@ func availableVersions() ([]string, error) {
return nil, err return nil, err
} }
defer migrationsDir.Close() defer migrationsDir.Close()
versionRE, err := regexp.Compile("gitea-v(?P<version>.+)\\." + regexp.QuoteMeta(setting.Database.Type.String()) + "\\.sql.gz") versionRE, err := regexp.Compile("gitea-v(?P<version>.+)\\." + regexp.QuoteMeta(setting.Database.Type) + "\\.sql.gz")
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -149,7 +149,7 @@ func restoreOldDB(t *testing.T, version string) bool {
} }
switch { switch {
case setting.Database.Type.IsSQLite3(): case setting.Database.UseSQLite3:
util.Remove(setting.Database.Path) util.Remove(setting.Database.Path)
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm) err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
assert.NoError(t, err) assert.NoError(t, err)
@ -162,7 +162,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err) assert.NoError(t, err)
db.Close() db.Close()
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/", db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host)) setting.Database.User, setting.Database.Passwd, setting.Database.Host))
assert.NoError(t, err) assert.NoError(t, err)
@ -184,7 +184,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err) assert.NoError(t, err)
db.Close() db.Close()
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
var db *sql.DB var db *sql.DB
var err error var err error
if setting.Database.Host[0] == '/' { if setting.Database.Host[0] == '/' {
@ -252,7 +252,7 @@ func restoreOldDB(t *testing.T, version string) bool {
assert.NoError(t, err) assert.NoError(t, err)
db.Close() db.Close()
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
host, port := setting.ParseMSSQLHostPort(setting.Database.Host) host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd)) host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -72,7 +72,7 @@ func InitTest(requireGitea bool) {
} }
switch { switch {
case setting.Database.Type.IsMySQL(): case setting.Database.UseMySQL:
connType := "tcp" connType := "tcp"
if len(setting.Database.Host) > 0 && setting.Database.Host[0] == '/' { // looks like a unix socket if len(setting.Database.Host) > 0 && setting.Database.Host[0] == '/' { // looks like a unix socket
connType = "unix" connType = "unix"
@ -87,7 +87,7 @@ func InitTest(requireGitea bool) {
if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil { if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil {
log.Fatal("db.Exec: %v", err) log.Fatal("db.Exec: %v", err)
} }
case setting.Database.Type.IsPostgreSQL(): case setting.Database.UsePostgreSQL:
var db *sql.DB var db *sql.DB
var err error var err error
if setting.Database.Host[0] == '/' { if setting.Database.Host[0] == '/' {
@ -144,7 +144,7 @@ func InitTest(requireGitea bool) {
} }
} }
case setting.Database.Type.IsMSSQL(): case setting.Database.UseMSSQL:
host, port := setting.ParseMSSQLHostPort(setting.Database.Host) host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd)) host, port, "master", setting.Database.User, setting.Database.Passwd))

View File

@ -129,8 +129,8 @@ export function initImageDiff() {
initOverlay(createContext($imageAfter[2], $imageBefore[2])); initOverlay(createContext($imageAfter[2], $imageBefore[2]));
} }
hideElem($container.find('> .loader'));
$container.find('> .gt-hidden').removeClass('gt-hidden'); $container.find('> .gt-hidden').removeClass('gt-hidden');
hideElem($container.find('.ui.loader'));
} }
function initSideBySide(sizes) { function initSideBySide(sizes) {
@ -155,7 +155,7 @@ export function initImageDiff() {
height: sizes.size1.height * factor height: sizes.size1.height * factor
}); });
sizes.image1.parent().css({ sizes.image1.parent().css({
margin: `10px auto`, margin: `${sizes.ratio[1] * factor + 15}px ${sizes.ratio[0] * factor}px ${sizes.ratio[1] * factor}px`,
width: sizes.size1.width * factor + 2, width: sizes.size1.width * factor + 2,
height: sizes.size1.height * factor + 2 height: sizes.size1.height * factor + 2
}); });
@ -164,7 +164,7 @@ export function initImageDiff() {
height: sizes.size2.height * factor height: sizes.size2.height * factor
}); });
sizes.image2.parent().css({ sizes.image2.parent().css({
margin: `10px auto`, margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`,
width: sizes.size2.width * factor + 2, width: sizes.size2.width * factor + 2,
height: sizes.size2.height * factor + 2 height: sizes.size2.height * factor + 2
}); });
@ -255,12 +255,13 @@ export function initImageDiff() {
width: sizes.size2.width * factor + 2, width: sizes.size2.width * factor + 2,
height: sizes.size2.height * factor + 2 height: sizes.size2.height * factor + 2
}); });
// some inner elements are `position: absolute`, so the container's height must be large enough
// the "css(width, height)" is somewhat hacky and not easy to understand, it could be improved in the future
sizes.image2.parent().parent().css({ sizes.image2.parent().parent().css({
width: sizes.max.width * factor + 2, width: sizes.max.width * factor + 2,
height: sizes.max.height * factor + 2 + 20 /* extra height for inner "position: absolute" elements */, height: sizes.max.height * factor + 2
});
$container.find('.onion-skin').css({
width: sizes.max.width * factor + 2,
height: sizes.max.height * factor + 4
}); });
const $range = $container.find("input[type='range']"); const $range = $container.find("input[type='range']");

View File

@ -80,8 +80,7 @@ export function svg(name, size = 16, className = '') {
const svgNode = document.firstChild; const svgNode = document.firstChild;
if (size !== 16) svgNode.setAttribute('width', String(size)); if (size !== 16) svgNode.setAttribute('width', String(size));
if (size !== 16) svgNode.setAttribute('height', String(size)); if (size !== 16) svgNode.setAttribute('height', String(size));
// filter array to remove empty string if (className) svgNode.classList.add(...className.split(/\s+/));
if (className) svgNode.classList.add(...className.split(/\s+/).filter(Boolean));
return serializer.serializeToString(svgNode); return serializer.serializeToString(svgNode);
} }

View File

@ -1,6 +1,6 @@
.image-diff-container { .image-diff-container {
text-align: center; text-align: center;
padding: 1em 0; padding: 30px 0;
img { img {
border: 1px solid var(--color-primary-light-7); border: 1px solid var(--color-primary-light-7);
@ -22,7 +22,6 @@
display: inline-block; display: inline-block;
line-height: 0; line-height: 0;
vertical-align: top; vertical-align: top;
margin: 0 1em;
.side-header { .side-header {
font-weight: bold; font-weight: bold;
@ -99,7 +98,7 @@
} }
input { input {
max-width: 300px; width: 300px;
} }
} }
} }