0
0
Fork 0
mirror of https://github.com/go-gitea/gitea synced 2024-11-21 21:23:36 +01:00

Merge branch 'main' into lunny/fix_edit_team

This commit is contained in:
Lunny Xiao 2024-10-30 10:59:20 -07:00
commit c2ff613582
71 changed files with 1929 additions and 1640 deletions

View file

@ -324,6 +324,10 @@ RUN_USER = ; git
;; Maximum number of locks returned per page
;LFS_LOCKS_PAGING_NUM = 50
;;
;; When clients make lfs batch requests, reject them if there are more pointers than this number
;; zero means 'unlimited'
;LFS_MAX_BATCH_SIZE = 0
;;
;; Allow graceful restarts using SIGHUP to fork
;ALLOW_GRACEFUL_RESTARTS = true
;;
@ -2638,6 +2642,10 @@ LEVEL = Info
;; override the azure blob base path if storage type is azureblob
;AZURE_BLOB_BASE_PATH = lfs/
;[lfs_client]
;; When mirroring an upstream lfs endpoint, limit the number of pointers in each batch request to this number
;BATCH_SIZE = 20
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; settings for packages, will override storage setting

View file

@ -6,6 +6,12 @@ package db
import (
"context"
"database/sql"
"errors"
"runtime"
"slices"
"sync"
"code.gitea.io/gitea/modules/setting"
"xorm.io/builder"
"xorm.io/xorm"
@ -15,45 +21,23 @@ import (
// will be overwritten by Init with HammerContext
var DefaultContext context.Context
// contextKey is a value for use with context.WithValue.
type contextKey struct {
name string
}
type engineContextKeyType struct{}
// enginedContextKey is a context key. It is used with context.Value() to get the current Engined for the context
var (
enginedContextKey = &contextKey{"engined"}
_ Engined = &Context{}
)
var engineContextKey = engineContextKeyType{}
// Context represents a db context
type Context struct {
context.Context
e Engine
transaction bool
engine Engine
}
func newContext(ctx context.Context, e Engine, transaction bool) *Context {
return &Context{
Context: ctx,
e: e,
transaction: transaction,
}
}
// InTransaction if context is in a transaction
func (ctx *Context) InTransaction() bool {
return ctx.transaction
}
// Engine returns db engine
func (ctx *Context) Engine() Engine {
return ctx.e
func newContext(ctx context.Context, e Engine) *Context {
return &Context{Context: ctx, engine: e}
}
// Value shadows Value for context.Context but allows us to get ourselves and an Engined object
func (ctx *Context) Value(key any) any {
if key == enginedContextKey {
if key == engineContextKey {
return ctx
}
return ctx.Context.Value(key)
@ -61,30 +45,66 @@ func (ctx *Context) Value(key any) any {
// WithContext returns this engine tied to this context
func (ctx *Context) WithContext(other context.Context) *Context {
return newContext(ctx, ctx.e.Context(other), ctx.transaction)
return newContext(ctx, ctx.engine.Context(other))
}
// Engined structs provide an Engine
type Engined interface {
Engine() Engine
var (
contextSafetyOnce sync.Once
contextSafetyDeniedFuncPCs []uintptr
)
func contextSafetyCheck(e Engine) {
if setting.IsProd && !setting.IsInTesting {
return
}
if e == nil {
return
}
// Only do this check for non-end-users. If the problem could be fixed in the future, this code could be removed.
contextSafetyOnce.Do(func() {
// try to figure out the bad functions to deny
type m struct{}
_ = e.SQL("SELECT 1").Iterate(&m{}, func(int, any) error {
callers := make([]uintptr, 32)
callerNum := runtime.Callers(1, callers)
for i := 0; i < callerNum; i++ {
if funcName := runtime.FuncForPC(callers[i]).Name(); funcName == "xorm.io/xorm.(*Session).Iterate" {
contextSafetyDeniedFuncPCs = append(contextSafetyDeniedFuncPCs, callers[i])
}
}
return nil
})
if len(contextSafetyDeniedFuncPCs) != 1 {
panic(errors.New("unable to determine the functions to deny"))
}
})
// it should be very fast: xxxx ns/op
callers := make([]uintptr, 32)
callerNum := runtime.Callers(3, callers) // skip 3: runtime.Callers, contextSafetyCheck, GetEngine
for i := 0; i < callerNum; i++ {
if slices.Contains(contextSafetyDeniedFuncPCs, callers[i]) {
panic(errors.New("using database context in an iterator would cause corrupted results"))
}
}
}
// GetEngine will get a db Engine from this context or return an Engine restricted to this context
// GetEngine gets an existing db Engine/Statement or creates a new Session
func GetEngine(ctx context.Context) Engine {
if e := getEngine(ctx); e != nil {
if e := getExistingEngine(ctx); e != nil {
return e
}
return x.Context(ctx)
}
// getEngine will get a db Engine from this context or return nil
func getEngine(ctx context.Context) Engine {
if engined, ok := ctx.(Engined); ok {
return engined.Engine()
// getExistingEngine gets an existing db Engine/Statement from this context or returns nil
func getExistingEngine(ctx context.Context) (e Engine) {
defer func() { contextSafetyCheck(e) }()
if engined, ok := ctx.(*Context); ok {
return engined.engine
}
enginedInterface := ctx.Value(enginedContextKey)
if enginedInterface != nil {
return enginedInterface.(Engined).Engine()
if engined, ok := ctx.Value(engineContextKey).(*Context); ok {
return engined.engine
}
return nil
}
@ -132,23 +152,23 @@ func (c *halfCommitter) Close() error {
// d. It doesn't mean rollback is forbidden, but always do it only when there is an error, and you do want to rollback.
func TxContext(parentCtx context.Context) (*Context, Committer, error) {
if sess, ok := inTransaction(parentCtx); ok {
return newContext(parentCtx, sess, true), &halfCommitter{committer: sess}, nil
return newContext(parentCtx, sess), &halfCommitter{committer: sess}, nil
}
sess := x.NewSession()
if err := sess.Begin(); err != nil {
sess.Close()
_ = sess.Close()
return nil, nil, err
}
return newContext(DefaultContext, sess, true), sess, nil
return newContext(DefaultContext, sess), sess, nil
}
// WithTx represents executing database operations on a transaction, if the transaction exist,
// this function will reuse it otherwise will create a new one and close it when finished.
func WithTx(parentCtx context.Context, f func(ctx context.Context) error) error {
if sess, ok := inTransaction(parentCtx); ok {
err := f(newContext(parentCtx, sess, true))
err := f(newContext(parentCtx, sess))
if err != nil {
// rollback immediately, in case the caller ignores returned error and tries to commit the transaction.
_ = sess.Close()
@ -165,7 +185,7 @@ func txWithNoCheck(parentCtx context.Context, f func(ctx context.Context) error)
return err
}
if err := f(newContext(parentCtx, sess, true)); err != nil {
if err := f(newContext(parentCtx, sess)); err != nil {
return err
}
@ -312,7 +332,7 @@ func InTransaction(ctx context.Context) bool {
}
func inTransaction(ctx context.Context) (*xorm.Session, bool) {
e := getEngine(ctx)
e := getExistingEngine(ctx)
if e == nil {
return nil, false
}

View file

@ -84,3 +84,47 @@ func TestTxContext(t *testing.T) {
}))
}
}
func TestContextSafety(t *testing.T) {
type TestModel1 struct {
ID int64
}
type TestModel2 struct {
ID int64
}
assert.NoError(t, unittest.GetXORMEngine().Sync(&TestModel1{}, &TestModel2{}))
assert.NoError(t, db.TruncateBeans(db.DefaultContext, &TestModel1{}, &TestModel2{}))
testCount := 10
for i := 1; i <= testCount; i++ {
assert.NoError(t, db.Insert(db.DefaultContext, &TestModel1{ID: int64(i)}))
assert.NoError(t, db.Insert(db.DefaultContext, &TestModel2{ID: int64(-i)}))
}
actualCount := 0
// here: db.GetEngine(db.DefaultContext) is a new *Session created from *Engine
_ = db.WithTx(db.DefaultContext, func(ctx context.Context) error {
_ = db.GetEngine(ctx).Iterate(&TestModel1{}, func(i int, bean any) error {
// here: db.GetEngine(ctx) is always the unclosed "Iterate" *Session with autoResetStatement=false,
// and the internal states (including "cond" and others) are always there and not be reset in this callback.
m1 := bean.(*TestModel1)
assert.EqualValues(t, i+1, m1.ID)
// here: XORM bug, it fails because the SQL becomes "WHERE id=-1", "WHERE id=-1 AND id=-2", "WHERE id=-1 AND id=-2 AND id=-3" ...
// and it conflicts with the "Iterate"'s internal states.
// has, err := db.GetEngine(ctx).Get(&TestModel2{ID: -m1.ID})
actualCount++
return nil
})
return nil
})
assert.EqualValues(t, testCount, actualCount)
// deny the bad usages
assert.PanicsWithError(t, "using database context in an iterator would cause corrupted results", func() {
_ = unittest.GetXORMEngine().Iterate(&TestModel1{}, func(i int, bean any) error {
_ = db.GetEngine(db.DefaultContext)
return nil
})
})
}

View file

@ -161,10 +161,7 @@ func InitEngine(ctx context.Context) error {
// SetDefaultEngine sets the default engine for db
func SetDefaultEngine(ctx context.Context, eng *xorm.Engine) {
x = eng
DefaultContext = &Context{
Context: ctx,
e: x,
}
DefaultContext = &Context{Context: ctx, engine: x}
}
// UnsetDefaultEngine closes and unsets the default engine

View file

@ -11,7 +11,7 @@ import (
)
func getXORMEngine() *xorm.Engine {
return db.DefaultContext.(*db.Context).Engine().(*xorm.Engine)
return db.GetEngine(db.DefaultContext).(*xorm.Engine)
}
// CheckDatabaseConnection checks the database connection

View file

@ -11,7 +11,7 @@ import (
"xorm.io/builder"
)
// Iterate iterate all the Bean object
// Iterate iterates all the Bean object
func Iterate[Bean any](ctx context.Context, cond builder.Cond, f func(ctx context.Context, bean *Bean) error) error {
var start int
batchSize := setting.Database.IterateBufferSize

View file

@ -63,6 +63,7 @@ type ProtectedBranch struct {
RequireSignedCommits bool `xorm:"NOT NULL DEFAULT false"`
ProtectedFilePatterns string `xorm:"TEXT"`
UnprotectedFilePatterns string `xorm:"TEXT"`
BlockAdminMergeOverride bool `xorm:"NOT NULL DEFAULT false"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
@ -83,14 +84,20 @@ func IsRuleNameSpecial(ruleName string) bool {
}
func (protectBranch *ProtectedBranch) loadGlob() {
if protectBranch.globRule == nil {
var err error
protectBranch.globRule, err = glob.Compile(protectBranch.RuleName, '/')
if err != nil {
log.Warn("Invalid glob rule for ProtectedBranch[%d]: %s %v", protectBranch.ID, protectBranch.RuleName, err)
protectBranch.globRule = glob.MustCompile(glob.QuoteMeta(protectBranch.RuleName), '/')
}
protectBranch.isPlainName = !IsRuleNameSpecial(protectBranch.RuleName)
if protectBranch.isPlainName || protectBranch.globRule != nil {
return
}
// detect if it is not glob
if !IsRuleNameSpecial(protectBranch.RuleName) {
protectBranch.isPlainName = true
return
}
// now we load the glob
var err error
protectBranch.globRule, err = glob.Compile(protectBranch.RuleName, '/')
if err != nil {
log.Warn("Invalid glob rule for ProtectedBranch[%d]: %s %v", protectBranch.ID, protectBranch.RuleName, err)
protectBranch.globRule = glob.MustCompile(glob.QuoteMeta(protectBranch.RuleName), '/')
}
}

View file

@ -74,3 +74,32 @@ func TestBranchRuleMatchPriority(t *testing.T) {
}
}
}
func TestBranchRuleSort(t *testing.T) {
in := []*ProtectedBranch{{
RuleName: "b",
CreatedUnix: 1,
}, {
RuleName: "b/*",
CreatedUnix: 3,
}, {
RuleName: "a/*",
CreatedUnix: 2,
}, {
RuleName: "c",
CreatedUnix: 0,
}, {
RuleName: "a",
CreatedUnix: 4,
}}
expect := []string{"c", "b", "a", "a/*", "b/*"}
pbr := ProtectedBranchRules(in)
pbr.sort()
var got []string
for i := range pbr {
got = append(got, pbr[i].RuleName)
}
assert.Equal(t, expect, got)
}

View file

@ -36,25 +36,15 @@ import (
const minDBVersion = 70 // Gitea 1.5.3
// Migration describes on migration from lower version to high version
type Migration interface {
Description() string
Migrate(*xorm.Engine) error
}
type migration struct {
idNumber int64 // DB version is "the last migration's idNumber" + 1
description string
migrate func(*xorm.Engine) error
}
// NewMigration creates a new migration
func NewMigration(desc string, fn func(*xorm.Engine) error) Migration {
return &migration{desc, fn}
}
// Description returns the migration's description
func (m *migration) Description() string {
return m.description
// newMigration creates a new migration
func newMigration(idNumber int64, desc string, fn func(*xorm.Engine) error) *migration {
return &migration{idNumber, desc, fn}
}
// Migrate executes the migration
@ -65,544 +55,317 @@ func (m *migration) Migrate(x *xorm.Engine) error {
// Version describes the version table. Should have only one row with id==1
type Version struct {
ID int64 `xorm:"pk autoincr"`
Version int64
Version int64 // DB version is "the last migration's idNumber" + 1
}
// Use noopMigration when there is a migration that has been no-oped
var noopMigration = func(_ *xorm.Engine) error { return nil }
var preparedMigrations []*migration
// This is a sequence of migrations. Add new migrations to the bottom of the list.
// If you want to "retire" a migration, remove it from the top of the list and
// update minDBVersion accordingly
var migrations = []Migration{
// Gitea 1.5.0 ends at v69
func prepareMigrationTasks() []*migration {
if preparedMigrations != nil {
return preparedMigrations
}
preparedMigrations = []*migration{
// Gitea 1.5.0 ends at database version 69
// v70 -> v71
NewMigration("add issue_dependencies", v1_6.AddIssueDependencies),
// v71 -> v72
NewMigration("protect each scratch token", v1_6.AddScratchHash),
// v72 -> v73
NewMigration("add review", v1_6.AddReview),
newMigration(70, "add issue_dependencies", v1_6.AddIssueDependencies),
newMigration(71, "protect each scratch token", v1_6.AddScratchHash),
newMigration(72, "add review", v1_6.AddReview),
// Gitea 1.6.0 ends at v73
// Gitea 1.6.0 ends at database version 73
// v73 -> v74
NewMigration("add must_change_password column for users table", v1_7.AddMustChangePassword),
// v74 -> v75
NewMigration("add approval whitelists to protected branches", v1_7.AddApprovalWhitelistsToProtectedBranches),
// v75 -> v76
NewMigration("clear nonused data which not deleted when user was deleted", v1_7.ClearNonusedData),
newMigration(73, "add must_change_password column for users table", v1_7.AddMustChangePassword),
newMigration(74, "add approval whitelists to protected branches", v1_7.AddApprovalWhitelistsToProtectedBranches),
newMigration(75, "clear nonused data which not deleted when user was deleted", v1_7.ClearNonusedData),
// Gitea 1.7.0 ends at v76
// Gitea 1.7.0 ends at database version 76
// v76 -> v77
NewMigration("add pull request rebase with merge commit", v1_8.AddPullRequestRebaseWithMerge),
// v77 -> v78
NewMigration("add theme to users", v1_8.AddUserDefaultTheme),
// v78 -> v79
NewMigration("rename repo is_bare to repo is_empty", v1_8.RenameRepoIsBareToIsEmpty),
// v79 -> v80
NewMigration("add can close issues via commit in any branch", v1_8.AddCanCloseIssuesViaCommitInAnyBranch),
// v80 -> v81
NewMigration("add is locked to issues", v1_8.AddIsLockedToIssues),
// v81 -> v82
NewMigration("update U2F counter type", v1_8.ChangeU2FCounterType),
newMigration(76, "add pull request rebase with merge commit", v1_8.AddPullRequestRebaseWithMerge),
newMigration(77, "add theme to users", v1_8.AddUserDefaultTheme),
newMigration(78, "rename repo is_bare to repo is_empty", v1_8.RenameRepoIsBareToIsEmpty),
newMigration(79, "add can close issues via commit in any branch", v1_8.AddCanCloseIssuesViaCommitInAnyBranch),
newMigration(80, "add is locked to issues", v1_8.AddIsLockedToIssues),
newMigration(81, "update U2F counter type", v1_8.ChangeU2FCounterType),
// Gitea 1.8.0 ends at v82
// Gitea 1.8.0 ends at database version 82
// v82 -> v83
NewMigration("hot fix for wrong release sha1 on release table", v1_9.FixReleaseSha1OnReleaseTable),
// v83 -> v84
NewMigration("add uploader id for table attachment", v1_9.AddUploaderIDForAttachment),
// v84 -> v85
NewMigration("add table to store original imported gpg keys", v1_9.AddGPGKeyImport),
// v85 -> v86
NewMigration("hash application token", v1_9.HashAppToken),
// v86 -> v87
NewMigration("add http method to webhook", v1_9.AddHTTPMethodToWebhook),
// v87 -> v88
NewMigration("add avatar field to repository", v1_9.AddAvatarFieldToRepository),
newMigration(82, "hot fix for wrong release sha1 on release table", v1_9.FixReleaseSha1OnReleaseTable),
newMigration(83, "add uploader id for table attachment", v1_9.AddUploaderIDForAttachment),
newMigration(84, "add table to store original imported gpg keys", v1_9.AddGPGKeyImport),
newMigration(85, "hash application token", v1_9.HashAppToken),
newMigration(86, "add http method to webhook", v1_9.AddHTTPMethodToWebhook),
newMigration(87, "add avatar field to repository", v1_9.AddAvatarFieldToRepository),
// Gitea 1.9.0 ends at v88
// Gitea 1.9.0 ends at database version 88
// v88 -> v89
NewMigration("add commit status context field to commit_status", v1_10.AddCommitStatusContext),
// v89 -> v90
NewMigration("add original author/url migration info to issues, comments, and repo ", v1_10.AddOriginalMigrationInfo),
// v90 -> v91
NewMigration("change length of some repository columns", v1_10.ChangeSomeColumnsLengthOfRepo),
// v91 -> v92
NewMigration("add index on owner_id of repository and type, review_id of comment", v1_10.AddIndexOnRepositoryAndComment),
// v92 -> v93
NewMigration("remove orphaned repository index statuses", v1_10.RemoveLingeringIndexStatus),
// v93 -> v94
NewMigration("add email notification enabled preference to user", v1_10.AddEmailNotificationEnabledToUser),
// v94 -> v95
NewMigration("add enable_status_check, status_check_contexts to protected_branch", v1_10.AddStatusCheckColumnsForProtectedBranches),
// v95 -> v96
NewMigration("add table columns for cross referencing issues", v1_10.AddCrossReferenceColumns),
// v96 -> v97
NewMigration("delete orphaned attachments", v1_10.DeleteOrphanedAttachments),
// v97 -> v98
NewMigration("add repo_admin_change_team_access to user", v1_10.AddRepoAdminChangeTeamAccessColumnForUser),
// v98 -> v99
NewMigration("add original author name and id on migrated release", v1_10.AddOriginalAuthorOnMigratedReleases),
// v99 -> v100
NewMigration("add task table and status column for repository table", v1_10.AddTaskTable),
// v100 -> v101
NewMigration("update migration repositories' service type", v1_10.UpdateMigrationServiceTypes),
// v101 -> v102
NewMigration("change length of some external login users columns", v1_10.ChangeSomeColumnsLengthOfExternalLoginUser),
newMigration(88, "add commit status context field to commit_status", v1_10.AddCommitStatusContext),
newMigration(89, "add original author/url migration info to issues, comments, and repo ", v1_10.AddOriginalMigrationInfo),
newMigration(90, "change length of some repository columns", v1_10.ChangeSomeColumnsLengthOfRepo),
newMigration(91, "add index on owner_id of repository and type, review_id of comment", v1_10.AddIndexOnRepositoryAndComment),
newMigration(92, "remove orphaned repository index statuses", v1_10.RemoveLingeringIndexStatus),
newMigration(93, "add email notification enabled preference to user", v1_10.AddEmailNotificationEnabledToUser),
newMigration(94, "add enable_status_check, status_check_contexts to protected_branch", v1_10.AddStatusCheckColumnsForProtectedBranches),
newMigration(95, "add table columns for cross referencing issues", v1_10.AddCrossReferenceColumns),
newMigration(96, "delete orphaned attachments", v1_10.DeleteOrphanedAttachments),
newMigration(97, "add repo_admin_change_team_access to user", v1_10.AddRepoAdminChangeTeamAccessColumnForUser),
newMigration(98, "add original author name and id on migrated release", v1_10.AddOriginalAuthorOnMigratedReleases),
newMigration(99, "add task table and status column for repository table", v1_10.AddTaskTable),
newMigration(100, "update migration repositories' service type", v1_10.UpdateMigrationServiceTypes),
newMigration(101, "change length of some external login users columns", v1_10.ChangeSomeColumnsLengthOfExternalLoginUser),
// Gitea 1.10.0 ends at v102
// Gitea 1.10.0 ends at database version 102
// v102 -> v103
NewMigration("update migration repositories' service type", v1_11.DropColumnHeadUserNameOnPullRequest),
// v103 -> v104
NewMigration("Add WhitelistDeployKeys to protected branch", v1_11.AddWhitelistDeployKeysToBranches),
// v104 -> v105
NewMigration("remove unnecessary columns from label", v1_11.RemoveLabelUneededCols),
// v105 -> v106
NewMigration("add includes_all_repositories to teams", v1_11.AddTeamIncludesAllRepositories),
// v106 -> v107
NewMigration("add column `mode` to table watch", v1_11.AddModeColumnToWatch),
// v107 -> v108
NewMigration("Add template options to repository", v1_11.AddTemplateToRepo),
// v108 -> v109
NewMigration("Add comment_id on table notification", v1_11.AddCommentIDOnNotification),
// v109 -> v110
NewMigration("add can_create_org_repo to team", v1_11.AddCanCreateOrgRepoColumnForTeam),
// v110 -> v111
NewMigration("change review content type to text", v1_11.ChangeReviewContentToText),
// v111 -> v112
NewMigration("update branch protection for can push and whitelist enable", v1_11.AddBranchProtectionCanPushAndEnableWhitelist),
// v112 -> v113
NewMigration("remove release attachments which repository deleted", v1_11.RemoveAttachmentMissedRepo),
// v113 -> v114
NewMigration("new feature: change target branch of pull requests", v1_11.FeatureChangeTargetBranch),
// v114 -> v115
NewMigration("Remove authentication credentials from stored URL", v1_11.SanitizeOriginalURL),
// v115 -> v116
NewMigration("add user_id prefix to existing user avatar name", v1_11.RenameExistingUserAvatarName),
// v116 -> v117
NewMigration("Extend TrackedTimes", v1_11.ExtendTrackedTimes),
newMigration(102, "update migration repositories' service type", v1_11.DropColumnHeadUserNameOnPullRequest),
newMigration(103, "Add WhitelistDeployKeys to protected branch", v1_11.AddWhitelistDeployKeysToBranches),
newMigration(104, "remove unnecessary columns from label", v1_11.RemoveLabelUneededCols),
newMigration(105, "add includes_all_repositories to teams", v1_11.AddTeamIncludesAllRepositories),
newMigration(106, "add column `mode` to table watch", v1_11.AddModeColumnToWatch),
newMigration(107, "Add template options to repository", v1_11.AddTemplateToRepo),
newMigration(108, "Add comment_id on table notification", v1_11.AddCommentIDOnNotification),
newMigration(109, "add can_create_org_repo to team", v1_11.AddCanCreateOrgRepoColumnForTeam),
newMigration(110, "change review content type to text", v1_11.ChangeReviewContentToText),
newMigration(111, "update branch protection for can push and whitelist enable", v1_11.AddBranchProtectionCanPushAndEnableWhitelist),
newMigration(112, "remove release attachments which repository deleted", v1_11.RemoveAttachmentMissedRepo),
newMigration(113, "new feature: change target branch of pull requests", v1_11.FeatureChangeTargetBranch),
newMigration(114, "Remove authentication credentials from stored URL", v1_11.SanitizeOriginalURL),
newMigration(115, "add user_id prefix to existing user avatar name", v1_11.RenameExistingUserAvatarName),
newMigration(116, "Extend TrackedTimes", v1_11.ExtendTrackedTimes),
// Gitea 1.11.0 ends at v117
// Gitea 1.11.0 ends at database version 117
// v117 -> v118
NewMigration("Add block on rejected reviews branch protection", v1_12.AddBlockOnRejectedReviews),
// v118 -> v119
NewMigration("Add commit id and stale to reviews", v1_12.AddReviewCommitAndStale),
// v119 -> v120
NewMigration("Fix migrated repositories' git service type", v1_12.FixMigratedRepositoryServiceType),
// v120 -> v121
NewMigration("Add owner_name on table repository", v1_12.AddOwnerNameOnRepository),
// v121 -> v122
NewMigration("add is_restricted column for users table", v1_12.AddIsRestricted),
// v122 -> v123
NewMigration("Add Require Signed Commits to ProtectedBranch", v1_12.AddRequireSignedCommits),
// v123 -> v124
NewMigration("Add original information for reactions", v1_12.AddReactionOriginals),
// v124 -> v125
NewMigration("Add columns to user and repository", v1_12.AddUserRepoMissingColumns),
// v125 -> v126
NewMigration("Add some columns on review for migration", v1_12.AddReviewMigrateInfo),
// v126 -> v127
NewMigration("Fix topic repository count", v1_12.FixTopicRepositoryCount),
// v127 -> v128
NewMigration("add repository code language statistics", v1_12.AddLanguageStats),
// v128 -> v129
NewMigration("fix merge base for pull requests", v1_12.FixMergeBase),
// v129 -> v130
NewMigration("remove dependencies from deleted repositories", v1_12.PurgeUnusedDependencies),
// v130 -> v131
NewMigration("Expand webhooks for more granularity", v1_12.ExpandWebhooks),
// v131 -> v132
NewMigration("Add IsSystemWebhook column to webhooks table", v1_12.AddSystemWebhookColumn),
// v132 -> v133
NewMigration("Add Branch Protection Protected Files Column", v1_12.AddBranchProtectionProtectedFilesColumn),
// v133 -> v134
NewMigration("Add EmailHash Table", v1_12.AddEmailHashTable),
// v134 -> v135
NewMigration("Refix merge base for merged pull requests", v1_12.RefixMergeBase),
// v135 -> v136
NewMigration("Add OrgID column to Labels table", v1_12.AddOrgIDLabelColumn),
// v136 -> v137
NewMigration("Add CommitsAhead and CommitsBehind Column to PullRequest Table", v1_12.AddCommitDivergenceToPulls),
// v137 -> v138
NewMigration("Add Branch Protection Block Outdated Branch", v1_12.AddBlockOnOutdatedBranch),
// v138 -> v139
NewMigration("Add ResolveDoerID to Comment table", v1_12.AddResolveDoerIDCommentColumn),
// v139 -> v140
NewMigration("prepend refs/heads/ to issue refs", v1_12.PrependRefsHeadsToIssueRefs),
newMigration(117, "Add block on rejected reviews branch protection", v1_12.AddBlockOnRejectedReviews),
newMigration(118, "Add commit id and stale to reviews", v1_12.AddReviewCommitAndStale),
newMigration(119, "Fix migrated repositories' git service type", v1_12.FixMigratedRepositoryServiceType),
newMigration(120, "Add owner_name on table repository", v1_12.AddOwnerNameOnRepository),
newMigration(121, "add is_restricted column for users table", v1_12.AddIsRestricted),
newMigration(122, "Add Require Signed Commits to ProtectedBranch", v1_12.AddRequireSignedCommits),
newMigration(123, "Add original information for reactions", v1_12.AddReactionOriginals),
newMigration(124, "Add columns to user and repository", v1_12.AddUserRepoMissingColumns),
newMigration(125, "Add some columns on review for migration", v1_12.AddReviewMigrateInfo),
newMigration(126, "Fix topic repository count", v1_12.FixTopicRepositoryCount),
newMigration(127, "add repository code language statistics", v1_12.AddLanguageStats),
newMigration(128, "fix merge base for pull requests", v1_12.FixMergeBase),
newMigration(129, "remove dependencies from deleted repositories", v1_12.PurgeUnusedDependencies),
newMigration(130, "Expand webhooks for more granularity", v1_12.ExpandWebhooks),
newMigration(131, "Add IsSystemWebhook column to webhooks table", v1_12.AddSystemWebhookColumn),
newMigration(132, "Add Branch Protection Protected Files Column", v1_12.AddBranchProtectionProtectedFilesColumn),
newMigration(133, "Add EmailHash Table", v1_12.AddEmailHashTable),
newMigration(134, "Refix merge base for merged pull requests", v1_12.RefixMergeBase),
newMigration(135, "Add OrgID column to Labels table", v1_12.AddOrgIDLabelColumn),
newMigration(136, "Add CommitsAhead and CommitsBehind Column to PullRequest Table", v1_12.AddCommitDivergenceToPulls),
newMigration(137, "Add Branch Protection Block Outdated Branch", v1_12.AddBlockOnOutdatedBranch),
newMigration(138, "Add ResolveDoerID to Comment table", v1_12.AddResolveDoerIDCommentColumn),
newMigration(139, "prepend refs/heads/ to issue refs", v1_12.PrependRefsHeadsToIssueRefs),
// Gitea 1.12.0 ends at v140
// Gitea 1.12.0 ends at database version 140
// v140 -> v141
NewMigration("Save detected language file size to database instead of percent", v1_13.FixLanguageStatsToSaveSize),
// v141 -> v142
NewMigration("Add KeepActivityPrivate to User table", v1_13.AddKeepActivityPrivateUserColumn),
// v142 -> v143
NewMigration("Ensure Repository.IsArchived is not null", v1_13.SetIsArchivedToFalse),
// v143 -> v144
NewMigration("recalculate Stars number for all user", v1_13.RecalculateStars),
// v144 -> v145
NewMigration("update Matrix Webhook http method to 'PUT'", v1_13.UpdateMatrixWebhookHTTPMethod),
// v145 -> v146
NewMigration("Increase Language field to 50 in LanguageStats", v1_13.IncreaseLanguageField),
// v146 -> v147
NewMigration("Add projects info to repository table", v1_13.AddProjectsInfo),
// v147 -> v148
NewMigration("create review for 0 review id code comments", v1_13.CreateReviewsForCodeComments),
// v148 -> v149
NewMigration("remove issue dependency comments who refer to non existing issues", v1_13.PurgeInvalidDependenciesComments),
// v149 -> v150
NewMigration("Add Created and Updated to Milestone table", v1_13.AddCreatedAndUpdatedToMilestones),
// v150 -> v151
NewMigration("add primary key to repo_topic", v1_13.AddPrimaryKeyToRepoTopic),
// v151 -> v152
NewMigration("set default password algorithm to Argon2", v1_13.SetDefaultPasswordToArgon2),
// v152 -> v153
NewMigration("add TrustModel field to Repository", v1_13.AddTrustModelToRepository),
// v153 > v154
NewMigration("add Team review request support", v1_13.AddTeamReviewRequestSupport),
// v154 > v155
NewMigration("add timestamps to Star, Label, Follow, Watch and Collaboration", v1_13.AddTimeStamps),
newMigration(140, "Save detected language file size to database instead of percent", v1_13.FixLanguageStatsToSaveSize),
newMigration(141, "Add KeepActivityPrivate to User table", v1_13.AddKeepActivityPrivateUserColumn),
newMigration(142, "Ensure Repository.IsArchived is not null", v1_13.SetIsArchivedToFalse),
newMigration(143, "recalculate Stars number for all user", v1_13.RecalculateStars),
newMigration(144, "update Matrix Webhook http method to 'PUT'", v1_13.UpdateMatrixWebhookHTTPMethod),
newMigration(145, "Increase Language field to 50 in LanguageStats", v1_13.IncreaseLanguageField),
newMigration(146, "Add projects info to repository table", v1_13.AddProjectsInfo),
newMigration(147, "create review for 0 review id code comments", v1_13.CreateReviewsForCodeComments),
newMigration(148, "remove issue dependency comments who refer to non existing issues", v1_13.PurgeInvalidDependenciesComments),
newMigration(149, "Add Created and Updated to Milestone table", v1_13.AddCreatedAndUpdatedToMilestones),
newMigration(150, "add primary key to repo_topic", v1_13.AddPrimaryKeyToRepoTopic),
newMigration(151, "set default password algorithm to Argon2", v1_13.SetDefaultPasswordToArgon2),
newMigration(152, "add TrustModel field to Repository", v1_13.AddTrustModelToRepository),
newMigration(153, "add Team review request support", v1_13.AddTeamReviewRequestSupport),
newMigration(154, "add timestamps to Star, Label, Follow, Watch and Collaboration", v1_13.AddTimeStamps),
// Gitea 1.13.0 ends at v155
// Gitea 1.13.0 ends at database version 155
// v155 -> v156
NewMigration("add changed_protected_files column for pull_request table", v1_14.AddChangedProtectedFilesPullRequestColumn),
// v156 -> v157
NewMigration("fix publisher ID for tag releases", v1_14.FixPublisherIDforTagReleases),
// v157 -> v158
NewMigration("ensure repo topics are up-to-date", v1_14.FixRepoTopics),
// v158 -> v159
NewMigration("code comment replies should have the commitID of the review they are replying to", v1_14.UpdateCodeCommentReplies),
// v159 -> v160
NewMigration("update reactions constraint", v1_14.UpdateReactionConstraint),
// v160 -> v161
NewMigration("Add block on official review requests branch protection", v1_14.AddBlockOnOfficialReviewRequests),
// v161 -> v162
NewMigration("Convert task type from int to string", v1_14.ConvertTaskTypeToString),
// v162 -> v163
NewMigration("Convert webhook task type from int to string", v1_14.ConvertWebhookTaskTypeToString),
// v163 -> v164
NewMigration("Convert topic name from 25 to 50", v1_14.ConvertTopicNameFrom25To50),
// v164 -> v165
NewMigration("Add scope and nonce columns to oauth2_grant table", v1_14.AddScopeAndNonceColumnsToOAuth2Grant),
// v165 -> v166
NewMigration("Convert hook task type from char(16) to varchar(16) and trim the column", v1_14.ConvertHookTaskTypeToVarcharAndTrim),
// v166 -> v167
NewMigration("Where Password is Valid with Empty String delete it", v1_14.RecalculateUserEmptyPWD),
// v167 -> v168
NewMigration("Add user redirect", v1_14.AddUserRedirect),
// v168 -> v169
NewMigration("Recreate user table to fix default values", v1_14.RecreateUserTableToFixDefaultValues),
// v169 -> v170
NewMigration("Update DeleteBranch comments to set the old_ref to the commit_sha", v1_14.CommentTypeDeleteBranchUseOldRef),
// v170 -> v171
NewMigration("Add Dismissed to Review table", v1_14.AddDismissedReviewColumn),
// v171 -> v172
NewMigration("Add Sorting to ProjectBoard table", v1_14.AddSortingColToProjectBoard),
// v172 -> v173
NewMigration("Add sessions table for go-chi/session", v1_14.AddSessionTable),
// v173 -> v174
NewMigration("Add time_id column to Comment", v1_14.AddTimeIDCommentColumn),
// v174 -> v175
NewMigration("Create repo transfer table", v1_14.AddRepoTransfer),
// v175 -> v176
NewMigration("Fix Postgres ID Sequences broken by recreate-table", v1_14.FixPostgresIDSequences),
// v176 -> v177
NewMigration("Remove invalid labels from comments", v1_14.RemoveInvalidLabels),
// v177 -> v178
NewMigration("Delete orphaned IssueLabels", v1_14.DeleteOrphanedIssueLabels),
newMigration(155, "add changed_protected_files column for pull_request table", v1_14.AddChangedProtectedFilesPullRequestColumn),
newMigration(156, "fix publisher ID for tag releases", v1_14.FixPublisherIDforTagReleases),
newMigration(157, "ensure repo topics are up-to-date", v1_14.FixRepoTopics),
newMigration(158, "code comment replies should have the commitID of the review they are replying to", v1_14.UpdateCodeCommentReplies),
newMigration(159, "update reactions constraint", v1_14.UpdateReactionConstraint),
newMigration(160, "Add block on official review requests branch protection", v1_14.AddBlockOnOfficialReviewRequests),
newMigration(161, "Convert task type from int to string", v1_14.ConvertTaskTypeToString),
newMigration(162, "Convert webhook task type from int to string", v1_14.ConvertWebhookTaskTypeToString),
newMigration(163, "Convert topic name from 25 to 50", v1_14.ConvertTopicNameFrom25To50),
newMigration(164, "Add scope and nonce columns to oauth2_grant table", v1_14.AddScopeAndNonceColumnsToOAuth2Grant),
newMigration(165, "Convert hook task type from char(16) to varchar(16) and trim the column", v1_14.ConvertHookTaskTypeToVarcharAndTrim),
newMigration(166, "Where Password is Valid with Empty String delete it", v1_14.RecalculateUserEmptyPWD),
newMigration(167, "Add user redirect", v1_14.AddUserRedirect),
newMigration(168, "Recreate user table to fix default values", v1_14.RecreateUserTableToFixDefaultValues),
newMigration(169, "Update DeleteBranch comments to set the old_ref to the commit_sha", v1_14.CommentTypeDeleteBranchUseOldRef),
newMigration(170, "Add Dismissed to Review table", v1_14.AddDismissedReviewColumn),
newMigration(171, "Add Sorting to ProjectBoard table", v1_14.AddSortingColToProjectBoard),
newMigration(172, "Add sessions table for go-chi/session", v1_14.AddSessionTable),
newMigration(173, "Add time_id column to Comment", v1_14.AddTimeIDCommentColumn),
newMigration(174, "Create repo transfer table", v1_14.AddRepoTransfer),
newMigration(175, "Fix Postgres ID Sequences broken by recreate-table", v1_14.FixPostgresIDSequences),
newMigration(176, "Remove invalid labels from comments", v1_14.RemoveInvalidLabels),
newMigration(177, "Delete orphaned IssueLabels", v1_14.DeleteOrphanedIssueLabels),
// Gitea 1.14.0 ends at v178
// Gitea 1.14.0 ends at database version 178
// v178 -> v179
NewMigration("Add LFS columns to Mirror", v1_15.AddLFSMirrorColumns),
// v179 -> v180
NewMigration("Convert avatar url to text", v1_15.ConvertAvatarURLToText),
// v180 -> v181
NewMigration("Delete credentials from past migrations", v1_15.DeleteMigrationCredentials),
// v181 -> v182
NewMigration("Always save primary email on email address table", v1_15.AddPrimaryEmail2EmailAddress),
// v182 -> v183
NewMigration("Add issue resource index table", v1_15.AddIssueResourceIndexTable),
// v183 -> v184
NewMigration("Create PushMirror table", v1_15.CreatePushMirrorTable),
// v184 -> v185
NewMigration("Rename Task errors to message", v1_15.RenameTaskErrorsToMessage),
// v185 -> v186
NewMigration("Add new table repo_archiver", v1_15.AddRepoArchiver),
// v186 -> v187
NewMigration("Create protected tag table", v1_15.CreateProtectedTagTable),
// v187 -> v188
NewMigration("Drop unneeded webhook related columns", v1_15.DropWebhookColumns),
// v188 -> v189
NewMigration("Add key is verified to gpg key", v1_15.AddKeyIsVerified),
newMigration(178, "Add LFS columns to Mirror", v1_15.AddLFSMirrorColumns),
newMigration(179, "Convert avatar url to text", v1_15.ConvertAvatarURLToText),
newMigration(180, "Delete credentials from past migrations", v1_15.DeleteMigrationCredentials),
newMigration(181, "Always save primary email on email address table", v1_15.AddPrimaryEmail2EmailAddress),
newMigration(182, "Add issue resource index table", v1_15.AddIssueResourceIndexTable),
newMigration(183, "Create PushMirror table", v1_15.CreatePushMirrorTable),
newMigration(184, "Rename Task errors to message", v1_15.RenameTaskErrorsToMessage),
newMigration(185, "Add new table repo_archiver", v1_15.AddRepoArchiver),
newMigration(186, "Create protected tag table", v1_15.CreateProtectedTagTable),
newMigration(187, "Drop unneeded webhook related columns", v1_15.DropWebhookColumns),
newMigration(188, "Add key is verified to gpg key", v1_15.AddKeyIsVerified),
// Gitea 1.15.0 ends at v189
// Gitea 1.15.0 ends at database version 189
// v189 -> v190
NewMigration("Unwrap ldap.Sources", v1_16.UnwrapLDAPSourceCfg),
// v190 -> v191
NewMigration("Add agit flow pull request support", v1_16.AddAgitFlowPullRequest),
// v191 -> v192
NewMigration("Alter issue/comment table TEXT fields to LONGTEXT", v1_16.AlterIssueAndCommentTextFieldsToLongText),
// v192 -> v193
NewMigration("RecreateIssueResourceIndexTable to have a primary key instead of an unique index", v1_16.RecreateIssueResourceIndexTable),
// v193 -> v194
NewMigration("Add repo id column for attachment table", v1_16.AddRepoIDForAttachment),
// v194 -> v195
NewMigration("Add Branch Protection Unprotected Files Column", v1_16.AddBranchProtectionUnprotectedFilesColumn),
// v195 -> v196
NewMigration("Add table commit_status_index", v1_16.AddTableCommitStatusIndex),
// v196 -> v197
NewMigration("Add Color to ProjectBoard table", v1_16.AddColorColToProjectBoard),
// v197 -> v198
NewMigration("Add renamed_branch table", v1_16.AddRenamedBranchTable),
// v198 -> v199
NewMigration("Add issue content history table", v1_16.AddTableIssueContentHistory),
// v199 -> v200
NewMigration("No-op (remote version is using AppState now)", noopMigration),
// v200 -> v201
NewMigration("Add table app_state", v1_16.AddTableAppState),
// v201 -> v202
NewMigration("Drop table remote_version (if exists)", v1_16.DropTableRemoteVersion),
// v202 -> v203
NewMigration("Create key/value table for user settings", v1_16.CreateUserSettingsTable),
// v203 -> v204
NewMigration("Add Sorting to ProjectIssue table", v1_16.AddProjectIssueSorting),
// v204 -> v205
NewMigration("Add key is verified to ssh key", v1_16.AddSSHKeyIsVerified),
// v205 -> v206
NewMigration("Migrate to higher varchar on user struct", v1_16.MigrateUserPasswordSalt),
// v206 -> v207
NewMigration("Add authorize column to team_unit table", v1_16.AddAuthorizeColForTeamUnit),
// v207 -> v208
NewMigration("Add webauthn table and migrate u2f data to webauthn - NO-OPED", v1_16.AddWebAuthnCred),
// v208 -> v209
NewMigration("Use base32.HexEncoding instead of base64 encoding for cred ID as it is case insensitive - NO-OPED", v1_16.UseBase32HexForCredIDInWebAuthnCredential),
// v209 -> v210
NewMigration("Increase WebAuthentication CredentialID size to 410 - NO-OPED", v1_16.IncreaseCredentialIDTo410),
// v210 -> v211
NewMigration("v208 was completely broken - remigrate", v1_16.RemigrateU2FCredentials),
newMigration(189, "Unwrap ldap.Sources", v1_16.UnwrapLDAPSourceCfg),
newMigration(190, "Add agit flow pull request support", v1_16.AddAgitFlowPullRequest),
newMigration(191, "Alter issue/comment table TEXT fields to LONGTEXT", v1_16.AlterIssueAndCommentTextFieldsToLongText),
newMigration(192, "RecreateIssueResourceIndexTable to have a primary key instead of an unique index", v1_16.RecreateIssueResourceIndexTable),
newMigration(193, "Add repo id column for attachment table", v1_16.AddRepoIDForAttachment),
newMigration(194, "Add Branch Protection Unprotected Files Column", v1_16.AddBranchProtectionUnprotectedFilesColumn),
newMigration(195, "Add table commit_status_index", v1_16.AddTableCommitStatusIndex),
newMigration(196, "Add Color to ProjectBoard table", v1_16.AddColorColToProjectBoard),
newMigration(197, "Add renamed_branch table", v1_16.AddRenamedBranchTable),
newMigration(198, "Add issue content history table", v1_16.AddTableIssueContentHistory),
newMigration(199, "No-op (remote version is using AppState now)", noopMigration),
newMigration(200, "Add table app_state", v1_16.AddTableAppState),
newMigration(201, "Drop table remote_version (if exists)", v1_16.DropTableRemoteVersion),
newMigration(202, "Create key/value table for user settings", v1_16.CreateUserSettingsTable),
newMigration(203, "Add Sorting to ProjectIssue table", v1_16.AddProjectIssueSorting),
newMigration(204, "Add key is verified to ssh key", v1_16.AddSSHKeyIsVerified),
newMigration(205, "Migrate to higher varchar on user struct", v1_16.MigrateUserPasswordSalt),
newMigration(206, "Add authorize column to team_unit table", v1_16.AddAuthorizeColForTeamUnit),
newMigration(207, "Add webauthn table and migrate u2f data to webauthn - NO-OPED", v1_16.AddWebAuthnCred),
newMigration(208, "Use base32.HexEncoding instead of base64 encoding for cred ID as it is case insensitive - NO-OPED", v1_16.UseBase32HexForCredIDInWebAuthnCredential),
newMigration(209, "Increase WebAuthentication CredentialID size to 410 - NO-OPED", v1_16.IncreaseCredentialIDTo410),
newMigration(210, "v208 was completely broken - remigrate", v1_16.RemigrateU2FCredentials),
// Gitea 1.16.2 ends at v211
// Gitea 1.16.2 ends at database version 211
// v211 -> v212
NewMigration("Create ForeignReference table", v1_17.CreateForeignReferenceTable),
// v212 -> v213
NewMigration("Add package tables", v1_17.AddPackageTables),
// v213 -> v214
NewMigration("Add allow edits from maintainers to PullRequest table", v1_17.AddAllowMaintainerEdit),
// v214 -> v215
NewMigration("Add auto merge table", v1_17.AddAutoMergeTable),
// v215 -> v216
NewMigration("allow to view files in PRs", v1_17.AddReviewViewedFiles),
// v216 -> v217
NewMigration("No-op (Improve Action table indices v1)", noopMigration),
// v217 -> v218
NewMigration("Alter hook_task table TEXT fields to LONGTEXT", v1_17.AlterHookTaskTextFieldsToLongText),
// v218 -> v219
NewMigration("Improve Action table indices v2", v1_17.ImproveActionTableIndices),
// v219 -> v220
NewMigration("Add sync_on_commit column to push_mirror table", v1_17.AddSyncOnCommitColForPushMirror),
// v220 -> v221
NewMigration("Add container repository property", v1_17.AddContainerRepositoryProperty),
// v221 -> v222
NewMigration("Store WebAuthentication CredentialID as bytes and increase size to at least 1024", v1_17.StoreWebauthnCredentialIDAsBytes),
// v222 -> v223
NewMigration("Drop old CredentialID column", v1_17.DropOldCredentialIDColumn),
// v223 -> v224
NewMigration("Rename CredentialIDBytes column to CredentialID", v1_17.RenameCredentialIDBytes),
newMigration(211, "Create ForeignReference table", v1_17.CreateForeignReferenceTable),
newMigration(212, "Add package tables", v1_17.AddPackageTables),
newMigration(213, "Add allow edits from maintainers to PullRequest table", v1_17.AddAllowMaintainerEdit),
newMigration(214, "Add auto merge table", v1_17.AddAutoMergeTable),
newMigration(215, "allow to view files in PRs", v1_17.AddReviewViewedFiles),
newMigration(216, "No-op (Improve Action table indices v1)", noopMigration),
newMigration(217, "Alter hook_task table TEXT fields to LONGTEXT", v1_17.AlterHookTaskTextFieldsToLongText),
newMigration(218, "Improve Action table indices v2", v1_17.ImproveActionTableIndices),
newMigration(219, "Add sync_on_commit column to push_mirror table", v1_17.AddSyncOnCommitColForPushMirror),
newMigration(220, "Add container repository property", v1_17.AddContainerRepositoryProperty),
newMigration(221, "Store WebAuthentication CredentialID as bytes and increase size to at least 1024", v1_17.StoreWebauthnCredentialIDAsBytes),
newMigration(222, "Drop old CredentialID column", v1_17.DropOldCredentialIDColumn),
newMigration(223, "Rename CredentialIDBytes column to CredentialID", v1_17.RenameCredentialIDBytes),
// Gitea 1.17.0 ends at v224
// Gitea 1.17.0 ends at database version 224
// v224 -> v225
NewMigration("Add badges to users", v1_18.CreateUserBadgesTable),
// v225 -> v226
NewMigration("Alter gpg_key/public_key content TEXT fields to MEDIUMTEXT", v1_18.AlterPublicGPGKeyContentFieldsToMediumText),
// v226 -> v227
NewMigration("Conan and generic packages do not need to be semantically versioned", v1_18.FixPackageSemverField),
// v227 -> v228
NewMigration("Create key/value table for system settings", v1_18.CreateSystemSettingsTable),
// v228 -> v229
NewMigration("Add TeamInvite table", v1_18.AddTeamInviteTable),
// v229 -> v230
NewMigration("Update counts of all open milestones", v1_18.UpdateOpenMilestoneCounts),
// v230 -> v231
NewMigration("Add ConfidentialClient column (default true) to OAuth2Application table", v1_18.AddConfidentialClientColumnToOAuth2ApplicationTable),
newMigration(224, "Add badges to users", v1_18.CreateUserBadgesTable),
newMigration(225, "Alter gpg_key/public_key content TEXT fields to MEDIUMTEXT", v1_18.AlterPublicGPGKeyContentFieldsToMediumText),
newMigration(226, "Conan and generic packages do not need to be semantically versioned", v1_18.FixPackageSemverField),
newMigration(227, "Create key/value table for system settings", v1_18.CreateSystemSettingsTable),
newMigration(228, "Add TeamInvite table", v1_18.AddTeamInviteTable),
newMigration(229, "Update counts of all open milestones", v1_18.UpdateOpenMilestoneCounts),
newMigration(230, "Add ConfidentialClient column (default true) to OAuth2Application table", v1_18.AddConfidentialClientColumnToOAuth2ApplicationTable),
// Gitea 1.18.0 ends at v231
// Gitea 1.18.0 ends at database version 231
// v231 -> v232
NewMigration("Add index for hook_task", v1_19.AddIndexForHookTask),
// v232 -> v233
NewMigration("Alter package_version.metadata_json to LONGTEXT", v1_19.AlterPackageVersionMetadataToLongText),
// v233 -> v234
NewMigration("Add header_authorization_encrypted column to webhook table", v1_19.AddHeaderAuthorizationEncryptedColWebhook),
// v234 -> v235
NewMigration("Add package cleanup rule table", v1_19.CreatePackageCleanupRuleTable),
// v235 -> v236
NewMigration("Add index for access_token", v1_19.AddIndexForAccessToken),
// v236 -> v237
NewMigration("Create secrets table", v1_19.CreateSecretsTable),
// v237 -> v238
NewMigration("Drop ForeignReference table", v1_19.DropForeignReferenceTable),
// v238 -> v239
NewMigration("Add updated unix to LFSMetaObject", v1_19.AddUpdatedUnixToLFSMetaObject),
// v239 -> v240
NewMigration("Add scope for access_token", v1_19.AddScopeForAccessTokens),
// v240 -> v241
NewMigration("Add actions tables", v1_19.AddActionsTables),
// v241 -> v242
NewMigration("Add card_type column to project table", v1_19.AddCardTypeToProjectTable),
// v242 -> v243
NewMigration("Alter gpg_key_import content TEXT field to MEDIUMTEXT", v1_19.AlterPublicGPGKeyImportContentFieldToMediumText),
// v243 -> v244
NewMigration("Add exclusive label", v1_19.AddExclusiveLabel),
newMigration(231, "Add index for hook_task", v1_19.AddIndexForHookTask),
newMigration(232, "Alter package_version.metadata_json to LONGTEXT", v1_19.AlterPackageVersionMetadataToLongText),
newMigration(233, "Add header_authorization_encrypted column to webhook table", v1_19.AddHeaderAuthorizationEncryptedColWebhook),
newMigration(234, "Add package cleanup rule table", v1_19.CreatePackageCleanupRuleTable),
newMigration(235, "Add index for access_token", v1_19.AddIndexForAccessToken),
newMigration(236, "Create secrets table", v1_19.CreateSecretsTable),
newMigration(237, "Drop ForeignReference table", v1_19.DropForeignReferenceTable),
newMigration(238, "Add updated unix to LFSMetaObject", v1_19.AddUpdatedUnixToLFSMetaObject),
newMigration(239, "Add scope for access_token", v1_19.AddScopeForAccessTokens),
newMigration(240, "Add actions tables", v1_19.AddActionsTables),
newMigration(241, "Add card_type column to project table", v1_19.AddCardTypeToProjectTable),
newMigration(242, "Alter gpg_key_import content TEXT field to MEDIUMTEXT", v1_19.AlterPublicGPGKeyImportContentFieldToMediumText),
newMigration(243, "Add exclusive label", v1_19.AddExclusiveLabel),
// Gitea 1.19.0 ends at v244
// Gitea 1.19.0 ends at database version 244
// v244 -> v245
NewMigration("Add NeedApproval to actions tables", v1_20.AddNeedApprovalToActionRun),
// v245 -> v246
NewMigration("Rename Webhook org_id to owner_id", v1_20.RenameWebhookOrgToOwner),
// v246 -> v247
NewMigration("Add missed column owner_id for project table", v1_20.AddNewColumnForProject),
// v247 -> v248
NewMigration("Fix incorrect project type", v1_20.FixIncorrectProjectType),
// v248 -> v249
NewMigration("Add version column to action_runner table", v1_20.AddVersionToActionRunner),
// v249 -> v250
NewMigration("Improve Action table indices v3", v1_20.ImproveActionTableIndices),
// v250 -> v251
NewMigration("Change Container Metadata", v1_20.ChangeContainerMetadataMultiArch),
// v251 -> v252
NewMigration("Fix incorrect owner team unit access mode", v1_20.FixIncorrectOwnerTeamUnitAccessMode),
// v252 -> v253
NewMigration("Fix incorrect admin team unit access mode", v1_20.FixIncorrectAdminTeamUnitAccessMode),
// v253 -> v254
NewMigration("Fix ExternalTracker and ExternalWiki accessMode in owner and admin team", v1_20.FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam),
// v254 -> v255
NewMigration("Add ActionTaskOutput table", v1_20.AddActionTaskOutputTable),
// v255 -> v256
NewMigration("Add ArchivedUnix Column", v1_20.AddArchivedUnixToRepository),
// v256 -> v257
NewMigration("Add is_internal column to package", v1_20.AddIsInternalColumnToPackage),
// v257 -> v258
NewMigration("Add Actions Artifact table", v1_20.CreateActionArtifactTable),
// v258 -> v259
NewMigration("Add PinOrder Column", v1_20.AddPinOrderToIssue),
// v259 -> v260
NewMigration("Convert scoped access tokens", v1_20.ConvertScopedAccessTokens),
newMigration(244, "Add NeedApproval to actions tables", v1_20.AddNeedApprovalToActionRun),
newMigration(245, "Rename Webhook org_id to owner_id", v1_20.RenameWebhookOrgToOwner),
newMigration(246, "Add missed column owner_id for project table", v1_20.AddNewColumnForProject),
newMigration(247, "Fix incorrect project type", v1_20.FixIncorrectProjectType),
newMigration(248, "Add version column to action_runner table", v1_20.AddVersionToActionRunner),
newMigration(249, "Improve Action table indices v3", v1_20.ImproveActionTableIndices),
newMigration(250, "Change Container Metadata", v1_20.ChangeContainerMetadataMultiArch),
newMigration(251, "Fix incorrect owner team unit access mode", v1_20.FixIncorrectOwnerTeamUnitAccessMode),
newMigration(252, "Fix incorrect admin team unit access mode", v1_20.FixIncorrectAdminTeamUnitAccessMode),
newMigration(253, "Fix ExternalTracker and ExternalWiki accessMode in owner and admin team", v1_20.FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam),
newMigration(254, "Add ActionTaskOutput table", v1_20.AddActionTaskOutputTable),
newMigration(255, "Add ArchivedUnix Column", v1_20.AddArchivedUnixToRepository),
newMigration(256, "Add is_internal column to package", v1_20.AddIsInternalColumnToPackage),
newMigration(257, "Add Actions Artifact table", v1_20.CreateActionArtifactTable),
newMigration(258, "Add PinOrder Column", v1_20.AddPinOrderToIssue),
newMigration(259, "Convert scoped access tokens", v1_20.ConvertScopedAccessTokens),
// Gitea 1.20.0 ends at v260
// Gitea 1.20.0 ends at database version 260
// v260 -> v261
NewMigration("Drop custom_labels column of action_runner table", v1_21.DropCustomLabelsColumnOfActionRunner),
// v261 -> v262
NewMigration("Add variable table", v1_21.CreateVariableTable),
// v262 -> v263
NewMigration("Add TriggerEvent to action_run table", v1_21.AddTriggerEventToActionRun),
// v263 -> v264
NewMigration("Add git_size and lfs_size columns to repository table", v1_21.AddGitSizeAndLFSSizeToRepositoryTable),
// v264 -> v265
NewMigration("Add branch table", v1_21.AddBranchTable),
// v265 -> v266
NewMigration("Alter Actions Artifact table", v1_21.AlterActionArtifactTable),
// v266 -> v267
NewMigration("Reduce commit status", v1_21.ReduceCommitStatus),
// v267 -> v268
NewMigration("Add action_tasks_version table", v1_21.CreateActionTasksVersionTable),
// v268 -> v269
NewMigration("Update Action Ref", v1_21.UpdateActionsRefIndex),
// v269 -> v270
NewMigration("Drop deleted branch table", v1_21.DropDeletedBranchTable),
// v270 -> v271
NewMigration("Fix PackageProperty typo", v1_21.FixPackagePropertyTypo),
// v271 -> v272
NewMigration("Allow archiving labels", v1_21.AddArchivedUnixColumInLabelTable),
// v272 -> v273
NewMigration("Add Version to ActionRun table", v1_21.AddVersionToActionRunTable),
// v273 -> v274
NewMigration("Add Action Schedule Table", v1_21.AddActionScheduleTable),
// v274 -> v275
NewMigration("Add Actions artifacts expiration date", v1_21.AddExpiredUnixColumnInActionArtifactTable),
// v275 -> v276
NewMigration("Add ScheduleID for ActionRun", v1_21.AddScheduleIDForActionRun),
// v276 -> v277
NewMigration("Add RemoteAddress to mirrors", v1_21.AddRemoteAddressToMirrors),
// v277 -> v278
NewMigration("Add Index to issue_user.issue_id", v1_21.AddIndexToIssueUserIssueID),
// v278 -> v279
NewMigration("Add Index to comment.dependent_issue_id", v1_21.AddIndexToCommentDependentIssueID),
// v279 -> v280
NewMigration("Add Index to action.user_id", v1_21.AddIndexToActionUserID),
newMigration(260, "Drop custom_labels column of action_runner table", v1_21.DropCustomLabelsColumnOfActionRunner),
newMigration(261, "Add variable table", v1_21.CreateVariableTable),
newMigration(262, "Add TriggerEvent to action_run table", v1_21.AddTriggerEventToActionRun),
newMigration(263, "Add git_size and lfs_size columns to repository table", v1_21.AddGitSizeAndLFSSizeToRepositoryTable),
newMigration(264, "Add branch table", v1_21.AddBranchTable),
newMigration(265, "Alter Actions Artifact table", v1_21.AlterActionArtifactTable),
newMigration(266, "Reduce commit status", v1_21.ReduceCommitStatus),
newMigration(267, "Add action_tasks_version table", v1_21.CreateActionTasksVersionTable),
newMigration(268, "Update Action Ref", v1_21.UpdateActionsRefIndex),
newMigration(269, "Drop deleted branch table", v1_21.DropDeletedBranchTable),
newMigration(270, "Fix PackageProperty typo", v1_21.FixPackagePropertyTypo),
newMigration(271, "Allow archiving labels", v1_21.AddArchivedUnixColumInLabelTable),
newMigration(272, "Add Version to ActionRun table", v1_21.AddVersionToActionRunTable),
newMigration(273, "Add Action Schedule Table", v1_21.AddActionScheduleTable),
newMigration(274, "Add Actions artifacts expiration date", v1_21.AddExpiredUnixColumnInActionArtifactTable),
newMigration(275, "Add ScheduleID for ActionRun", v1_21.AddScheduleIDForActionRun),
newMigration(276, "Add RemoteAddress to mirrors", v1_21.AddRemoteAddressToMirrors),
newMigration(277, "Add Index to issue_user.issue_id", v1_21.AddIndexToIssueUserIssueID),
newMigration(278, "Add Index to comment.dependent_issue_id", v1_21.AddIndexToCommentDependentIssueID),
newMigration(279, "Add Index to action.user_id", v1_21.AddIndexToActionUserID),
// Gitea 1.21.0 ends at 280
// Gitea 1.21.0 ends at database version 280
// v280 -> v281
NewMigration("Rename user themes", v1_22.RenameUserThemes),
// v281 -> v282
NewMigration("Add auth_token table", v1_22.CreateAuthTokenTable),
// v282 -> v283
NewMigration("Add Index to pull_auto_merge.doer_id", v1_22.AddIndexToPullAutoMergeDoerID),
// v283 -> v284
NewMigration("Add combined Index to issue_user.uid and issue_id", v1_22.AddCombinedIndexToIssueUser),
// v284 -> v285
NewMigration("Add ignore stale approval column on branch table", v1_22.AddIgnoreStaleApprovalsColumnToProtectedBranchTable),
// v285 -> v286
NewMigration("Add PreviousDuration to ActionRun", v1_22.AddPreviousDurationToActionRun),
// v286 -> v287
NewMigration("Add support for SHA256 git repositories", v1_22.AdjustDBForSha256),
// v287 -> v288
NewMigration("Use Slug instead of ID for Badges", v1_22.UseSlugInsteadOfIDForBadges),
// v288 -> v289
NewMigration("Add user_blocking table", v1_22.AddUserBlockingTable),
// v289 -> v290
NewMigration("Add default_wiki_branch to repository table", v1_22.AddDefaultWikiBranch),
// v290 -> v291
NewMigration("Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable),
// v291 -> v292
NewMigration("Add Index to attachment.comment_id", v1_22.AddCommentIDIndexofAttachment),
// v292 -> v293
NewMigration("Ensure every project has exactly one default column - No Op", noopMigration),
// v293 -> v294
NewMigration("Ensure every project has exactly one default column", v1_22.CheckProjectColumnsConsistency),
newMigration(280, "Rename user themes", v1_22.RenameUserThemes),
newMigration(281, "Add auth_token table", v1_22.CreateAuthTokenTable),
newMigration(282, "Add Index to pull_auto_merge.doer_id", v1_22.AddIndexToPullAutoMergeDoerID),
newMigration(283, "Add combined Index to issue_user.uid and issue_id", v1_22.AddCombinedIndexToIssueUser),
newMigration(284, "Add ignore stale approval column on branch table", v1_22.AddIgnoreStaleApprovalsColumnToProtectedBranchTable),
newMigration(285, "Add PreviousDuration to ActionRun", v1_22.AddPreviousDurationToActionRun),
newMigration(286, "Add support for SHA256 git repositories", v1_22.AdjustDBForSha256),
newMigration(287, "Use Slug instead of ID for Badges", v1_22.UseSlugInsteadOfIDForBadges),
newMigration(288, "Add user_blocking table", v1_22.AddUserBlockingTable),
newMigration(289, "Add default_wiki_branch to repository table", v1_22.AddDefaultWikiBranch),
newMigration(290, "Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable),
newMigration(291, "Add Index to attachment.comment_id", v1_22.AddCommentIDIndexofAttachment),
newMigration(292, "Ensure every project has exactly one default column - No Op", noopMigration),
newMigration(293, "Ensure every project has exactly one default column", v1_22.CheckProjectColumnsConsistency),
// Gitea 1.22.0-rc0 ends at 294
// Gitea 1.22.0-rc0 ends at database version 294
// v294 -> v295
NewMigration("Add unique index for project issue table", v1_22.AddUniqueIndexForProjectIssue),
// v295 -> v296
NewMigration("Add commit status summary table", v1_22.AddCommitStatusSummary),
// v296 -> v297
NewMigration("Add missing field of commit status summary table", v1_22.AddCommitStatusSummary2),
// v297 -> v298
NewMigration("Add everyone_access_mode for repo_unit", v1_22.AddRepoUnitEveryoneAccessMode),
// v298 -> v299
NewMigration("Drop wrongly created table o_auth2_application", v1_22.DropWronglyCreatedTable),
newMigration(294, "Add unique index for project issue table", v1_22.AddUniqueIndexForProjectIssue),
newMigration(295, "Add commit status summary table", v1_22.AddCommitStatusSummary),
newMigration(296, "Add missing field of commit status summary table", v1_22.AddCommitStatusSummary2),
newMigration(297, "Add everyone_access_mode for repo_unit", v1_22.AddRepoUnitEveryoneAccessMode),
newMigration(298, "Drop wrongly created table o_auth2_application", v1_22.DropWronglyCreatedTable),
// Gitea 1.22.0-rc1 ends at 299
// Gitea 1.22.0-rc1 ends at migration ID number 298 (database version 299)
// v299 -> v300
NewMigration("Add content version to issue and comment table", v1_23.AddContentVersionToIssueAndComment),
// v300 -> v301
NewMigration("Add force-push branch protection support", v1_23.AddForcePushBranchProtection),
// v301 -> v302
NewMigration("Add skip_secondary_authorization option to oauth2 application table", v1_23.AddSkipSecondaryAuthColumnToOAuth2ApplicationTable),
// v302 -> v303
NewMigration("Add index to action_task stopped log_expired", v1_23.AddIndexToActionTaskStoppedLogExpired),
// v303 -> v304
NewMigration("Add metadata column for comment table", v1_23.AddCommentMetaDataColumn),
// v304 -> v305
NewMigration("Add index for release sha1", v1_23.AddIndexForReleaseSha1),
// v305 -> v306
NewMigration("Add Repository Licenses", v1_23.AddRepositoryLicenses),
newMigration(299, "Add content version to issue and comment table", v1_23.AddContentVersionToIssueAndComment),
newMigration(300, "Add force-push branch protection support", v1_23.AddForcePushBranchProtection),
newMigration(301, "Add skip_secondary_authorization option to oauth2 application table", v1_23.AddSkipSecondaryAuthColumnToOAuth2ApplicationTable),
newMigration(302, "Add index to action_task stopped log_expired", v1_23.AddIndexToActionTaskStoppedLogExpired),
newMigration(303, "Add metadata column for comment table", v1_23.AddCommentMetaDataColumn),
newMigration(304, "Add index for release sha1", v1_23.AddIndexForReleaseSha1),
newMigration(305, "Add Repository Licenses", v1_23.AddRepositoryLicenses),
newMigration(306, "Add BlockAdminMergeOverride to ProtectedBranch", v1_23.AddBlockAdminMergeOverrideBranchProtection),
}
return preparedMigrations
}
// GetCurrentDBVersion returns the current db version
@ -622,9 +385,20 @@ func GetCurrentDBVersion(x *xorm.Engine) (int64, error) {
return currentVersion.Version, nil
}
// ExpectedVersion returns the expected db version
func ExpectedVersion() int64 {
return int64(minDBVersion + len(migrations))
func calcDBVersion(migrations []*migration) int64 {
dbVer := int64(minDBVersion + len(migrations))
if migrations[0].idNumber != minDBVersion {
panic("migrations should start at minDBVersion")
}
if dbVer != migrations[len(migrations)-1].idNumber+1 {
panic("migrations are not in order")
}
return dbVer
}
// ExpectedDBVersion returns the expected db version
func ExpectedDBVersion() int64 {
return calcDBVersion(prepareMigrationTasks())
}
// EnsureUpToDate will check if the db is at the correct version
@ -635,24 +409,35 @@ func EnsureUpToDate(x *xorm.Engine) error {
}
if currentDB < 0 {
return fmt.Errorf("Database has not been initialized")
return fmt.Errorf("database has not been initialized")
}
if minDBVersion > currentDB {
return fmt.Errorf("DB version %d (<= %d) is too old for auto-migration. Upgrade to Gitea 1.6.4 first then upgrade to this version", currentDB, minDBVersion)
}
expected := ExpectedVersion()
expectedDB := ExpectedDBVersion()
if currentDB != expected {
return fmt.Errorf(`Current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expected)
if currentDB != expectedDB {
return fmt.Errorf(`current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expectedDB)
}
return nil
}
func getPendingMigrations(curDBVer int64, migrations []*migration) []*migration {
return migrations[curDBVer-minDBVersion:]
}
func migrationIDNumberToDBVersion(idNumber int64) int64 {
return idNumber + 1
}
// Migrate database to current version
func Migrate(x *xorm.Engine) error {
migrations := prepareMigrationTasks()
maxDBVer := calcDBVersion(migrations)
// Set a new clean the default mapper to GonicMapper as that is the default for Gitea.
x.SetMapper(names.GonicMapper{})
if err := x.Sync(new(Version)); err != nil {
@ -664,29 +449,29 @@ func Migrate(x *xorm.Engine) error {
if err != nil {
return fmt.Errorf("get: %w", err)
} else if !has {
// If the version record does not exist we think
// it is a fresh installation and we can skip all migrations.
// If the version record does not exist, it is a fresh installation, and we can skip all migrations.
// XORM model framework will create all tables when initializing.
currentVersion.ID = 0
currentVersion.Version = int64(minDBVersion + len(migrations))
currentVersion.Version = maxDBVer
if _, err = x.InsertOne(currentVersion); err != nil {
return fmt.Errorf("insert: %w", err)
}
}
v := currentVersion.Version
if minDBVersion > v {
curDBVer := currentVersion.Version
// Outdated Gitea database version is not supported
if curDBVer < minDBVersion {
log.Fatal(`Gitea no longer supports auto-migration from your previously installed version.
Please try upgrading to a lower version first (suggested v1.6.4), then upgrade to this version.`)
return nil
}
// Downgrading Gitea's database version not supported
if int(v-minDBVersion) > len(migrations) {
msg := fmt.Sprintf("Your database (migration version: %d) is for a newer Gitea, you can not use the newer database for this old Gitea release (%d).", v, minDBVersion+len(migrations))
if maxDBVer < curDBVer {
msg := fmt.Sprintf("Your database (migration version: %d) is for a newer Gitea, you can not use the newer database for this old Gitea release (%d).", curDBVer, maxDBVer)
msg += "\nGitea will exit to keep your database safe and unchanged. Please use the correct Gitea release, do not change the migration version manually (incorrect manual operation may lose data)."
if !setting.IsProd {
msg += fmt.Sprintf("\nIf you are in development and really know what you're doing, you can force changing the migration version by executing: UPDATE version SET version=%d WHERE id=1;", minDBVersion+len(migrations))
msg += fmt.Sprintf("\nIf you are in development and really know what you're doing, you can force changing the migration version by executing: UPDATE version SET version=%d WHERE id=1;", maxDBVer)
}
log.Fatal("Migration Error: %s", msg)
return nil
@ -700,14 +485,14 @@ Please try upgrading to a lower version first (suggested v1.6.4), then upgrade t
}
// Migrate
for i, m := range migrations[v-minDBVersion:] {
log.Info("Migration[%d]: %s", v+int64(i), m.Description())
for _, m := range getPendingMigrations(curDBVer, migrations) {
log.Info("Migration[%d]: %s", m.idNumber, m.description)
// Reset the mapper between each migration - migrations are not supposed to depend on each other
x.SetMapper(names.GonicMapper{})
if err = m.Migrate(x); err != nil {
return fmt.Errorf("migration[%d]: %s failed: %w", v+int64(i), m.Description(), err)
return fmt.Errorf("migration[%d]: %s failed: %w", m.idNumber, m.description, err)
}
currentVersion.Version = v + int64(i) + 1
currentVersion.Version = migrationIDNumberToDBVersion(m.idNumber)
if _, err = x.ID(1).Update(currentVersion); err != nil {
return err
}

View file

@ -0,0 +1,28 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package migrations
import (
"testing"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestMigrations(t *testing.T) {
defer test.MockVariableValue(&preparedMigrations)()
preparedMigrations = []*migration{
{idNumber: 70},
{idNumber: 71},
}
assert.EqualValues(t, 72, calcDBVersion(preparedMigrations))
assert.EqualValues(t, 72, ExpectedDBVersion())
assert.EqualValues(t, 71, migrationIDNumberToDBVersion(70))
assert.EqualValues(t, []*migration{{idNumber: 70}, {idNumber: 71}}, getPendingMigrations(70, preparedMigrations))
assert.EqualValues(t, []*migration{{idNumber: 71}}, getPendingMigrations(71, preparedMigrations))
assert.EqualValues(t, []*migration{}, getPendingMigrations(72, preparedMigrations))
}

View file

@ -0,0 +1,13 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_23 //nolint
import "xorm.io/xorm"
func AddBlockAdminMergeOverrideBranchProtection(x *xorm.Engine) error {
type ProtectedBranch struct {
BlockAdminMergeOverride bool `xorm:"NOT NULL DEFAULT false"`
}
return x.Sync(new(ProtectedBranch))
}

View file

@ -75,26 +75,27 @@ func ExistPackages(ctx context.Context, opts *PackageSearchOptions) (bool, error
}
// SearchPackages gets the packages matching the search options
func SearchPackages(ctx context.Context, opts *PackageSearchOptions, iter func(*packages.PackageFileDescriptor)) error {
return db.GetEngine(ctx).
func SearchPackages(ctx context.Context, opts *PackageSearchOptions) ([]*packages.PackageFileDescriptor, error) {
var pkgFiles []*packages.PackageFile
err := db.GetEngine(ctx).
Table("package_file").
Select("package_file.*").
Join("INNER", "package_version", "package_version.id = package_file.version_id").
Join("INNER", "package", "package.id = package_version.package_id").
Where(opts.toCond()).
Asc("package.lower_name", "package_version.created_unix").
Iterate(new(packages.PackageFile), func(_ int, bean any) error {
pf := bean.(*packages.PackageFile)
pfd, err := packages.GetPackageFileDescriptor(ctx, pf)
if err != nil {
return err
}
iter(pfd)
return nil
})
Asc("package.lower_name", "package_version.created_unix").Find(&pkgFiles)
if err != nil {
return nil, err
}
pfds := make([]*packages.PackageFileDescriptor, 0, len(pkgFiles))
for _, pf := range pkgFiles {
pfd, err := packages.GetPackageFileDescriptor(ctx, pf)
if err != nil {
return nil, err
}
pfds = append(pfds, pfd)
}
return pfds, nil
}
// GetDistributions gets all available distributions

View file

@ -25,7 +25,7 @@ func GetXORMEngine(engine ...*xorm.Engine) (x *xorm.Engine) {
if len(engine) == 1 {
return engine[0]
}
return db.DefaultContext.(*db.Context).Engine().(*xorm.Engine)
return db.GetEngine(db.DefaultContext).(*xorm.Engine)
}
// InitFixtures initialize test fixtures for a test database

View file

@ -50,25 +50,35 @@ func (repo *Repository) readTreeToIndex(id ObjectID, indexFilename ...string) er
}
// ReadTreeToTemporaryIndex reads a treeish to a temporary index file
func (repo *Repository) ReadTreeToTemporaryIndex(treeish string) (filename, tmpDir string, cancel context.CancelFunc, err error) {
tmpDir, err = os.MkdirTemp("", "index")
if err != nil {
return filename, tmpDir, cancel, err
}
func (repo *Repository) ReadTreeToTemporaryIndex(treeish string) (tmpIndexFilename, tmpDir string, cancel context.CancelFunc, err error) {
defer func() {
// if error happens and there is a cancel function, do clean up
if err != nil && cancel != nil {
cancel()
cancel = nil
}
}()
filename = filepath.Join(tmpDir, ".tmp-index")
cancel = func() {
err := util.RemoveAll(tmpDir)
if err != nil {
log.Error("failed to remove tmp index file: %v", err)
removeDirFn := func(dir string) func() { // it can't use the return value "tmpDir" directly because it is empty when error occurs
return func() {
if err := util.RemoveAll(dir); err != nil {
log.Error("failed to remove tmp index dir: %v", err)
}
}
}
err = repo.ReadTreeToIndex(treeish, filename)
tmpDir, err = os.MkdirTemp("", "index")
if err != nil {
defer cancel()
return "", "", func() {}, err
return "", "", nil, err
}
return filename, tmpDir, cancel, err
tmpIndexFilename = filepath.Join(tmpDir, ".tmp-index")
cancel = removeDirFn(tmpDir)
err = repo.ReadTreeToIndex(treeish, tmpIndexFilename)
if err != nil {
return "", "", cancel, err
}
return tmpIndexFilename, tmpDir, cancel, err
}
// EmptyIndex empties the index

View file

@ -16,10 +16,9 @@ import (
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/proxy"
"code.gitea.io/gitea/modules/setting"
)
const httpBatchSize = 20
// HTTPClient is used to communicate with the LFS server
// https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
type HTTPClient struct {
@ -30,7 +29,7 @@ type HTTPClient struct {
// BatchSize returns the preferred size of batchs to process
func (c *HTTPClient) BatchSize() int {
return httpBatchSize
return setting.LFSClient.BatchSize
}
func newHTTPClient(endpoint *url.URL, httpTransport *http.Transport) *HTTPClient {

View file

@ -10,7 +10,10 @@ import (
"code.gitea.io/gitea/modules/generate"
)
// LFS represents the configuration for Git LFS
// LFS represents the server-side configuration for Git LFS.
// Ideally these options should be in a section like "[lfs_server]",
// but they are in "[server]" section due to historical reasons.
// Could be refactored in the future while keeping backwards compatibility.
var LFS = struct {
StartServer bool `ini:"LFS_START_SERVER"`
AllowPureSSH bool `ini:"LFS_ALLOW_PURE_SSH"`
@ -18,15 +21,21 @@ var LFS = struct {
HTTPAuthExpiry time.Duration `ini:"LFS_HTTP_AUTH_EXPIRY"`
MaxFileSize int64 `ini:"LFS_MAX_FILE_SIZE"`
LocksPagingNum int `ini:"LFS_LOCKS_PAGING_NUM"`
MaxBatchSize int `ini:"LFS_MAX_BATCH_SIZE"`
Storage *Storage
}{}
// LFSClient represents configuration for Gitea's LFS clients, for example: mirroring upstream Git LFS
var LFSClient = struct {
BatchSize int `ini:"BATCH_SIZE"`
}{}
func loadLFSFrom(rootCfg ConfigProvider) error {
mustMapSetting(rootCfg, "lfs_client", &LFSClient)
mustMapSetting(rootCfg, "server", &LFS)
sec := rootCfg.Section("server")
if err := sec.MapTo(&LFS); err != nil {
return fmt.Errorf("failed to map LFS settings: %v", err)
}
lfsSec, _ := rootCfg.GetSection("lfs")
@ -53,6 +62,10 @@ func loadLFSFrom(rootCfg ConfigProvider) error {
LFS.LocksPagingNum = 50
}
if LFSClient.BatchSize < 1 {
LFSClient.BatchSize = 20
}
LFS.HTTPAuthExpiry = sec.Key("LFS_HTTP_AUTH_EXPIRY").MustDuration(24 * time.Hour)
if !LFS.StartServer || !InstallLock {

View file

@ -99,3 +99,19 @@ STORAGE_TYPE = minio
assert.EqualValues(t, "gitea", LFS.Storage.MinioConfig.Bucket)
assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
}
func Test_LFSClientServerConfigs(t *testing.T) {
iniStr := `
[server]
LFS_MAX_BATCH_SIZE = 100
[lfs_client]
# will default to 20
BATCH_SIZE = 0
`
cfg, err := NewConfigProviderFromData(iniStr)
assert.NoError(t, err)
assert.NoError(t, loadLFSFrom(cfg))
assert.EqualValues(t, 100, LFS.MaxBatchSize)
assert.EqualValues(t, 20, LFSClient.BatchSize)
}

View file

@ -52,6 +52,7 @@ type BranchProtection struct {
RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"`
UnprotectedFilePatterns string `json:"unprotected_file_patterns"`
BlockAdminMergeOverride bool `json:"block_admin_merge_override"`
// swagger:strfmt date-time
Created time.Time `json:"created_at"`
// swagger:strfmt date-time
@ -90,6 +91,7 @@ type CreateBranchProtectionOption struct {
RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"`
UnprotectedFilePatterns string `json:"unprotected_file_patterns"`
BlockAdminMergeOverride bool `json:"block_admin_merge_override"`
}
// EditBranchProtectionOption options for editing a branch protection
@ -121,4 +123,5 @@ type EditBranchProtectionOption struct {
RequireSignedCommits *bool `json:"require_signed_commits"`
ProtectedFilePatterns *string `json:"protected_file_patterns"`
UnprotectedFilePatterns *string `json:"unprotected_file_patterns"`
BlockAdminMergeOverride *bool `json:"block_admin_merge_override"`
}

View file

@ -2461,6 +2461,8 @@ settings.block_on_official_review_requests = Block merge on official review requ
settings.block_on_official_review_requests_desc = Merging will not be possible when it has official review requests, even if there are enough approvals.
settings.block_outdated_branch = Block merge if pull request is outdated
settings.block_outdated_branch_desc = Merging will not be possible when head branch is behind base branch.
settings.block_admin_merge_override = Administrators must follow branch protection rules
settings.block_admin_merge_override_desc = Administrators must follow branch protection rules and can not circumvent it.
settings.default_branch_desc = Select a default repository branch for pull requests and code commits:
settings.merge_style_desc = Merge Styles
settings.default_merge_style_desc = Default Merge Style

384
package-lock.json generated
View file

@ -11,7 +11,7 @@
"@citation-js/plugin-software-formats": "0.6.1",
"@github/markdown-toolbar-element": "2.2.3",
"@github/relative-time-element": "4.4.3",
"@github/text-expander-element": "2.7.1",
"@github/text-expander-element": "2.8.0",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
"@primer/octicons": "19.11.0",
"@silverwind/vue3-calendar-heatmap": "2.0.6",
@ -40,13 +40,13 @@
"monaco-editor": "0.51.0",
"monaco-editor-webpack-plugin": "7.1.0",
"pdfobject": "2.3.0",
"perfect-debounce": "1.0.0",
"postcss": "8.4.41",
"postcss-loader": "8.1.1",
"postcss-nesting": "13.0.0",
"sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.14",
"tailwindcss": "3.4.10",
"temporal-polyfill": "0.2.5",
"throttle-debounce": "5.0.2",
"tinycolor2": "1.6.0",
"tippy.js": "6.3.7",
@ -55,7 +55,7 @@
"typescript": "5.5.4",
"uint8-to-base64": "0.2.0",
"vanilla-colorful": "0.7.2",
"vue": "3.4.38",
"vue": "3.5.12",
"vue-bar-graph": "2.1.0",
"vue-chartjs": "5.3.1",
"vue-loader": "17.4.2",
@ -3115,13 +3115,13 @@
"license": "MIT"
},
"node_modules/@github/text-expander-element": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/@github/text-expander-element/-/text-expander-element-2.7.1.tgz",
"integrity": "sha512-CWxfYxJRkeWVCUhJveproLs6pHsPrWtK8TsjL8ByYVcSCs8CJmNzF8b7ZawrUgfai0F2jb4aIdw2FoBTykj9XA==",
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/@github/text-expander-element/-/text-expander-element-2.8.0.tgz",
"integrity": "sha512-kkS2rZ/CG8HGKblpLDQ8vcK/K7l/Jsvzi/N4ovwPAsFSOImcIbJh2MgCv9tzqE3wAm/qXlscvh3Ms4Hh1vtZvw==",
"license": "MIT",
"dependencies": {
"@github/combobox-nav": "^2.0.2",
"dom-input-range": "^1.1.6"
"dom-input-range": "^1.2.0"
}
},
"node_modules/@humanwhocodes/config-array": {
@ -3618,224 +3618,208 @@
"license": "MIT"
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.1.tgz",
"integrity": "sha512-2thheikVEuU7ZxFXubPDOtspKn1x0yqaYQwvALVtEcvFhMifPADBrgRPyHV0TF3b+9BgvgjgagVyvA/UqPZHmg==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.24.0.tgz",
"integrity": "sha512-Q6HJd7Y6xdB48x8ZNVDOqsbh2uByBhgK8PiQgPhwkIw/HC/YX5Ghq2mQY5sRMZWHb3VsFkWooUVOZHKr7DmDIA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.1.tgz",
"integrity": "sha512-t1lLYn4V9WgnIFHXy1d2Di/7gyzBWS8G5pQSXdZqfrdCGTwi1VasRMSS81DTYb+avDs/Zz4A6dzERki5oRYz1g==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.24.0.tgz",
"integrity": "sha512-ijLnS1qFId8xhKjT81uBHuuJp2lU4x2yxa4ctFPtG+MqEE6+C5f/+X/bStmxapgmwLwiL3ih122xv8kVARNAZA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.1.tgz",
"integrity": "sha512-AH/wNWSEEHvs6t4iJ3RANxW5ZCK3fUnmf0gyMxWCesY1AlUj8jY7GC+rQE4wd3gwmZ9XDOpL0kcFnCjtN7FXlA==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.24.0.tgz",
"integrity": "sha512-bIv+X9xeSs1XCk6DVvkO+S/z8/2AMt/2lMqdQbMrmVpgFvXlmde9mLcbQpztXm1tajC3raFDqegsH18HQPMYtA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.1.tgz",
"integrity": "sha512-dO0BIz/+5ZdkLZrVgQrDdW7m2RkrLwYTh2YMFG9IpBtlC1x1NPNSXkfczhZieOlOLEqgXOFH3wYHB7PmBtf+Bg==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.24.0.tgz",
"integrity": "sha512-X6/nOwoFN7RT2svEQWUsW/5C/fYMBe4fnLK9DQk4SX4mgVBiTA9h64kjUYPvGQ0F/9xwJ5U5UfTbl6BEjaQdBQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.1.tgz",
"integrity": "sha512-sWWgdQ1fq+XKrlda8PsMCfut8caFwZBmhYeoehJ05FdI0YZXk6ZyUjWLrIgbR/VgiGycrFKMMgp7eJ69HOF2pQ==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.24.0.tgz",
"integrity": "sha512-0KXvIJQMOImLCVCz9uvvdPgfyWo93aHHp8ui3FrtOP57svqrF/roSSR5pjqL2hcMp0ljeGlU4q9o/rQaAQ3AYA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.1.tgz",
"integrity": "sha512-9OIiSuj5EsYQlmwhmFRA0LRO0dRRjdCVZA3hnmZe1rEwRk11Jy3ECGGq3a7RrVEZ0/pCsYWx8jG3IvcrJ6RCew==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.24.0.tgz",
"integrity": "sha512-it2BW6kKFVh8xk/BnHfakEeoLPv8STIISekpoF+nBgWM4d55CZKc7T4Dx1pEbTnYm/xEKMgy1MNtYuoA8RFIWw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.1.tgz",
"integrity": "sha512-0kuAkRK4MeIUbzQYu63NrJmfoUVicajoRAL1bpwdYIYRcs57iyIV9NLcuyDyDXE2GiZCL4uhKSYAnyWpjZkWow==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.24.0.tgz",
"integrity": "sha512-i0xTLXjqap2eRfulFVlSnM5dEbTVque/3Pi4g2y7cxrs7+a9De42z4XxKLYJ7+OhE3IgxvfQM7vQc43bwTgPwA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.1.tgz",
"integrity": "sha512-/6dYC9fZtfEY0vozpc5bx1RP4VrtEOhNQGb0HwvYNwXD1BBbwQ5cKIbUVVU7G2d5WRE90NfB922elN8ASXAJEA==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.24.0.tgz",
"integrity": "sha512-9E6MKUJhDuDh604Qco5yP/3qn3y7SLXYuiC0Rpr89aMScS2UAmK1wHP2b7KAa1nSjWJc/f/Lc0Wl1L47qjiyQw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.1.tgz",
"integrity": "sha512-ltUWy+sHeAh3YZ91NUsV4Xg3uBXAlscQe8ZOXRCVAKLsivGuJsrkawYPUEyCV3DYa9urgJugMLn8Z3Z/6CeyRQ==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.24.0.tgz",
"integrity": "sha512-2XFFPJ2XMEiF5Zi2EBf4h73oR1V/lycirxZxHZNc93SqDN/IWhYYSYj8I9381ikUFXZrz2v7r2tOVk2NBwxrWw==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.1.tgz",
"integrity": "sha512-BggMndzI7Tlv4/abrgLwa/dxNEMn2gC61DCLrTzw8LkpSKel4o+O+gtjbnkevZ18SKkeN3ihRGPuBxjaetWzWg==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.24.0.tgz",
"integrity": "sha512-M3Dg4hlwuntUCdzU7KjYqbbd+BLq3JMAOhCKdBE3TcMGMZbKkDdJ5ivNdehOssMCIokNHFOsv7DO4rlEOfyKpg==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.1.tgz",
"integrity": "sha512-z/9rtlGd/OMv+gb1mNSjElasMf9yXusAxnRDrBaYB+eS1shFm6/4/xDH1SAISO5729fFKUkJ88TkGPRUh8WSAA==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.24.0.tgz",
"integrity": "sha512-mjBaoo4ocxJppTorZVKWFpy1bfFj9FeCMJqzlMQGjpNPY9JwQi7OuS1axzNIk0nMX6jSgy6ZURDZ2w0QW6D56g==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.1.tgz",
"integrity": "sha512-kXQVcWqDcDKw0S2E0TmhlTLlUgAmMVqPrJZR+KpH/1ZaZhLSl23GZpQVmawBQGVhyP5WXIsIQ/zqbDBBYmxm5w==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.24.0.tgz",
"integrity": "sha512-ZXFk7M72R0YYFN5q13niV0B7G8/5dcQ9JDp8keJSfr3GoZeXEoMHP/HlvqROA3OMbMdfr19IjCeNAnPUG93b6A==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.1.tgz",
"integrity": "sha512-CbFv/WMQsSdl+bpX6rVbzR4kAjSSBuDgCqb1l4J68UYsQNalz5wOqLGYj4ZI0thGpyX5kc+LLZ9CL+kpqDovZA==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.24.0.tgz",
"integrity": "sha512-w1i+L7kAXZNdYl+vFvzSZy8Y1arS7vMgIy8wusXJzRrPyof5LAb02KGr1PD2EkRcl73kHulIID0M501lN+vobQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.1.tgz",
"integrity": "sha512-3Q3brDgA86gHXWHklrwdREKIrIbxC0ZgU8lwpj0eEKGBQH+31uPqr0P2v11pn0tSIxHvcdOWxa4j+YvLNx1i6g==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.24.0.tgz",
"integrity": "sha512-VXBrnPWgBpVDCVY6XF3LEW0pOU51KbaHhccHw6AS6vBWIC60eqsH19DAeeObl+g8nKAz04QFdl/Cefta0xQtUQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.1.tgz",
"integrity": "sha512-tNg+jJcKR3Uwe4L0/wY3Ro0H+u3nrb04+tcq1GSYzBEmKLeOQF2emk1whxlzNqb6MMrQ2JOcQEpuuiPLyRcSIw==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.24.0.tgz",
"integrity": "sha512-xrNcGDU0OxVcPTH/8n/ShH4UevZxKIO6HJFK0e15XItZP2UcaiLFd5kiX7hJnqCbSztUF8Qot+JWBC/QXRPYWQ==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.1.tgz",
"integrity": "sha512-xGiIH95H1zU7naUyTKEyOA/I0aexNMUdO9qRv0bLKN3qu25bBdrxZHqA3PTJ24YNN/GdMzG4xkDcd/GvjuhfLg==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.24.0.tgz",
"integrity": "sha512-fbMkAF7fufku0N2dE5TBXcNlg0pt0cJue4xBRE2Qc5Vqikxr4VCgKj/ht6SMdFcOacVA9rqF70APJ8RN/4vMJw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
@ -5202,113 +5186,130 @@
}
},
"node_modules/@vue/compiler-core": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.38.tgz",
"integrity": "sha512-8IQOTCWnLFqfHzOGm9+P8OPSEDukgg3Huc92qSG49if/xI2SAwLHQO2qaPQbjCWPBcQoO1WYfXfTACUrWV3c5A==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.12.tgz",
"integrity": "sha512-ISyBTRMmMYagUxhcpyEH0hpXRd/KqDU4ymofPgl2XAkY9ZhQ+h0ovEZJIiPop13UmR/54oA2cgMDjgroRelaEw==",
"dependencies": {
"@babel/parser": "^7.24.7",
"@vue/shared": "3.4.38",
"@babel/parser": "^7.25.3",
"@vue/shared": "3.5.12",
"entities": "^4.5.0",
"estree-walker": "^2.0.2",
"source-map-js": "^1.2.0"
}
},
"node_modules/@vue/compiler-dom": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.38.tgz",
"integrity": "sha512-Osc/c7ABsHXTsETLgykcOwIxFktHfGSUDkb05V61rocEfsFDcjDLH/IHJSNJP+/Sv9KeN2Lx1V6McZzlSb9EhQ==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.12.tgz",
"integrity": "sha512-9G6PbJ03uwxLHKQ3P42cMTi85lDRvGLB2rSGOiQqtXELat6uI4n8cNz9yjfVHRPIu+MsK6TE418Giruvgptckg==",
"dependencies": {
"@vue/compiler-core": "3.4.38",
"@vue/shared": "3.4.38"
"@vue/compiler-core": "3.5.12",
"@vue/shared": "3.5.12"
}
},
"node_modules/@vue/compiler-sfc": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.38.tgz",
"integrity": "sha512-s5QfZ+9PzPh3T5H4hsQDJtI8x7zdJaew/dCGgqZ2630XdzaZ3AD8xGZfBqpT8oaD/p2eedd+pL8tD5vvt5ZYJQ==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.12.tgz",
"integrity": "sha512-2k973OGo2JuAa5+ZlekuQJtitI5CgLMOwgl94BzMCsKZCX/xiqzJYzapl4opFogKHqwJk34vfsaKpfEhd1k5nw==",
"dependencies": {
"@babel/parser": "^7.24.7",
"@vue/compiler-core": "3.4.38",
"@vue/compiler-dom": "3.4.38",
"@vue/compiler-ssr": "3.4.38",
"@vue/shared": "3.4.38",
"@babel/parser": "^7.25.3",
"@vue/compiler-core": "3.5.12",
"@vue/compiler-dom": "3.5.12",
"@vue/compiler-ssr": "3.5.12",
"@vue/shared": "3.5.12",
"estree-walker": "^2.0.2",
"magic-string": "^0.30.10",
"postcss": "^8.4.40",
"magic-string": "^0.30.11",
"postcss": "^8.4.47",
"source-map-js": "^1.2.0"
}
},
"node_modules/@vue/compiler-sfc/node_modules/magic-string": {
"version": "0.30.11",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.11.tgz",
"integrity": "sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==",
"license": "MIT",
"version": "0.30.12",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.12.tgz",
"integrity": "sha512-Ea8I3sQMVXr8JhN4z+H/d8zwo+tYDgHE9+5G4Wnrwhs0gaK9fXTKx0Tw5Xwsd/bCPTTZNRAdpyzvoeORe9LYpw==",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0"
}
},
"node_modules/@vue/compiler-ssr": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.38.tgz",
"integrity": "sha512-YXznKFQ8dxYpAz9zLuVvfcXhc31FSPFDcqr0kyujbOwNhlmaNvL2QfIy+RZeJgSn5Fk54CWoEUeW+NVBAogGaw==",
"license": "MIT",
"node_modules/@vue/compiler-sfc/node_modules/postcss": {
"version": "8.4.47",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz",
"integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"dependencies": {
"@vue/compiler-dom": "3.4.38",
"@vue/shared": "3.4.38"
"nanoid": "^3.3.7",
"picocolors": "^1.1.0",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/@vue/compiler-ssr": {
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.12.tgz",
"integrity": "sha512-eLwc7v6bfGBSM7wZOGPmRavSWzNFF6+PdRhE+VFJhNCgHiF8AM7ccoqcv5kBXA2eWUfigD7byekvf/JsOfKvPA==",
"dependencies": {
"@vue/compiler-dom": "3.5.12",
"@vue/shared": "3.5.12"
}
},
"node_modules/@vue/reactivity": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.38.tgz",
"integrity": "sha512-4vl4wMMVniLsSYYeldAKzbk72+D3hUnkw9z8lDeJacTxAkXeDAP1uE9xr2+aKIN0ipOL8EG2GPouVTH6yF7Gnw==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.12.tgz",
"integrity": "sha512-UzaN3Da7xnJXdz4Okb/BGbAaomRHc3RdoWqTzlvd9+WBR5m3J39J1fGcHes7U3za0ruYn/iYy/a1euhMEHvTAg==",
"dependencies": {
"@vue/shared": "3.4.38"
"@vue/shared": "3.5.12"
}
},
"node_modules/@vue/runtime-core": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.38.tgz",
"integrity": "sha512-21z3wA99EABtuf+O3IhdxP0iHgkBs1vuoCAsCKLVJPEjpVqvblwBnTj42vzHRlWDCyxu9ptDm7sI2ZMcWrQqlA==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.12.tgz",
"integrity": "sha512-hrMUYV6tpocr3TL3Ad8DqxOdpDe4zuQY4HPY3X/VRh+L2myQO8MFXPAMarIOSGNu0bFAjh1yBkMPXZBqCk62Uw==",
"dependencies": {
"@vue/reactivity": "3.4.38",
"@vue/shared": "3.4.38"
"@vue/reactivity": "3.5.12",
"@vue/shared": "3.5.12"
}
},
"node_modules/@vue/runtime-dom": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.38.tgz",
"integrity": "sha512-afZzmUreU7vKwKsV17H1NDThEEmdYI+GCAK/KY1U957Ig2NATPVjCROv61R19fjZNzMmiU03n79OMnXyJVN0UA==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.12.tgz",
"integrity": "sha512-q8VFxR9A2MRfBr6/55Q3umyoN7ya836FzRXajPB6/Vvuv0zOPL+qltd9rIMzG/DbRLAIlREmnLsplEF/kotXKA==",
"dependencies": {
"@vue/reactivity": "3.4.38",
"@vue/runtime-core": "3.4.38",
"@vue/shared": "3.4.38",
"@vue/reactivity": "3.5.12",
"@vue/runtime-core": "3.5.12",
"@vue/shared": "3.5.12",
"csstype": "^3.1.3"
}
},
"node_modules/@vue/server-renderer": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.38.tgz",
"integrity": "sha512-NggOTr82FbPEkkUvBm4fTGcwUY8UuTsnWC/L2YZBmvaQ4C4Jl/Ao4HHTB+l7WnFCt5M/dN3l0XLuyjzswGYVCA==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.12.tgz",
"integrity": "sha512-I3QoeDDeEPZm8yR28JtY+rk880Oqmj43hreIBVTicisFTx/Dl7JpG72g/X7YF8hnQD3IFhkky5i2bPonwrTVPg==",
"dependencies": {
"@vue/compiler-ssr": "3.4.38",
"@vue/shared": "3.4.38"
"@vue/compiler-ssr": "3.5.12",
"@vue/shared": "3.5.12"
},
"peerDependencies": {
"vue": "3.4.38"
"vue": "3.5.12"
}
},
"node_modules/@vue/shared": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.38.tgz",
"integrity": "sha512-q0xCiLkuWWQLzVrecPb0RMsNWyxICOjPrcrwxTUEHb1fsnvni4dcuyG7RT/Ie7VPTvnjzIaWzRMUBsrqNj/hhw==",
"license": "MIT"
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.12.tgz",
"integrity": "sha512-L2RPSAwUFbgZH20etwrXyVyCBu9OxRSi8T/38QsvnkJyvq2LufW2lDCOzm7t/U9C1mkhJGWYfCuFBCmIuNivrg=="
},
"node_modules/@webassemblyjs/ast": {
"version": "1.12.1",
@ -7408,9 +7409,9 @@
}
},
"node_modules/dom-input-range": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/dom-input-range/-/dom-input-range-1.1.6.tgz",
"integrity": "sha512-4o/SkTpscD0n81BeErrrtmE58lG8vTks++92vk//ld0NmkQTb4AVJ2rexh2yor6rtBf5IMte26u+fF3EgCppPQ==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/dom-input-range/-/dom-input-range-1.2.0.tgz",
"integrity": "sha512-8HVA5Oy5Vt872S7IXsjjp6/5Hqsm5YZLhurxwwQXp80T9qVsj8/mEUH3sQlFujLLUoWfxiaThHHuJ3/q1MHVuA==",
"license": "MIT",
"workspaces": [
"demos"
@ -12459,11 +12460,16 @@
"integrity": "sha512-w/9pXDXTDs3IDmOri/w8lM/w6LHR0/F4fcBLLzH+4csSoyshQ5su0TE7k0FLHZO7aOjVLDGecqd1M89+PVpVAA==",
"license": "MIT"
},
"node_modules/perfect-debounce": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz",
"integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==",
"license": "MIT"
},
"node_modules/picocolors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
"license": "ISC"
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="
},
"node_modules/picomatch": {
"version": "2.3.1",
@ -13512,11 +13518,10 @@
"license": "Unlicense"
},
"node_modules/rollup": {
"version": "2.79.1",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz",
"integrity": "sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==",
"version": "2.79.2",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz",
"integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==",
"dev": true,
"license": "MIT",
"bin": {
"rollup": "dist/bin/rollup"
},
@ -13847,10 +13852,9 @@
}
},
"node_modules/source-map-js": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
"license": "BSD-3-Clause",
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"engines": {
"node": ">=0.10.0"
}
@ -14743,21 +14747,6 @@
"node": ">=6"
}
},
"node_modules/temporal-polyfill": {
"version": "0.2.5",
"resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.2.5.tgz",
"integrity": "sha512-ye47xp8Cb0nDguAhrrDS1JT1SzwEV9e26sSsrWzVu+yPZ7LzceEcH0i2gci9jWfOfSCCgM3Qv5nOYShVUUFUXA==",
"license": "MIT",
"dependencies": {
"temporal-spec": "^0.2.4"
}
},
"node_modules/temporal-spec": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.2.4.tgz",
"integrity": "sha512-lDMFv4nKQrSjlkHKAlHVqKrBG4DyFfa9F74cmBZ3Iy3ed8yvWnlWSIdi4IKfSqwmazAohBNwiN64qGx4y5Q3IQ==",
"license": "ISC"
},
"node_modules/terser": {
"version": "5.31.6",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.31.6.tgz",
@ -15378,11 +15367,10 @@
"license": "BSD-2-Clause"
},
"node_modules/vite/node_modules/@types/estree": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
"dev": true,
"license": "MIT"
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true
},
"node_modules/vite/node_modules/fsevents": {
"version": "2.3.3",
@ -15400,13 +15388,12 @@
}
},
"node_modules/vite/node_modules/rollup": {
"version": "4.21.1",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.1.tgz",
"integrity": "sha512-ZnYyKvscThhgd3M5+Qt3pmhO4jIRR5RGzaSovB6Q7rGNrK5cUncrtLmcTTJVSdcKXyZjW8X8MB0JMSuH9bcAJg==",
"version": "4.24.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.24.0.tgz",
"integrity": "sha512-DOmrlGSXNk1DM0ljiQA+i+o0rSLhtii1je5wgk60j49d1jHT5YYttBv1iWOnYSTG+fZZESUOSNiAl89SIet+Cg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/estree": "1.0.5"
"@types/estree": "1.0.6"
},
"bin": {
"rollup": "dist/bin/rollup"
@ -15416,22 +15403,22 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.21.1",
"@rollup/rollup-android-arm64": "4.21.1",
"@rollup/rollup-darwin-arm64": "4.21.1",
"@rollup/rollup-darwin-x64": "4.21.1",
"@rollup/rollup-linux-arm-gnueabihf": "4.21.1",
"@rollup/rollup-linux-arm-musleabihf": "4.21.1",
"@rollup/rollup-linux-arm64-gnu": "4.21.1",
"@rollup/rollup-linux-arm64-musl": "4.21.1",
"@rollup/rollup-linux-powerpc64le-gnu": "4.21.1",
"@rollup/rollup-linux-riscv64-gnu": "4.21.1",
"@rollup/rollup-linux-s390x-gnu": "4.21.1",
"@rollup/rollup-linux-x64-gnu": "4.21.1",
"@rollup/rollup-linux-x64-musl": "4.21.1",
"@rollup/rollup-win32-arm64-msvc": "4.21.1",
"@rollup/rollup-win32-ia32-msvc": "4.21.1",
"@rollup/rollup-win32-x64-msvc": "4.21.1",
"@rollup/rollup-android-arm-eabi": "4.24.0",
"@rollup/rollup-android-arm64": "4.24.0",
"@rollup/rollup-darwin-arm64": "4.24.0",
"@rollup/rollup-darwin-x64": "4.24.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.24.0",
"@rollup/rollup-linux-arm-musleabihf": "4.24.0",
"@rollup/rollup-linux-arm64-gnu": "4.24.0",
"@rollup/rollup-linux-arm64-musl": "4.24.0",
"@rollup/rollup-linux-powerpc64le-gnu": "4.24.0",
"@rollup/rollup-linux-riscv64-gnu": "4.24.0",
"@rollup/rollup-linux-s390x-gnu": "4.24.0",
"@rollup/rollup-linux-x64-gnu": "4.24.0",
"@rollup/rollup-linux-x64-musl": "4.24.0",
"@rollup/rollup-win32-arm64-msvc": "4.24.0",
"@rollup/rollup-win32-ia32-msvc": "4.24.0",
"@rollup/rollup-win32-x64-msvc": "4.24.0",
"fsevents": "~2.3.2"
}
},
@ -15560,16 +15547,15 @@
"license": "MIT"
},
"node_modules/vue": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/vue/-/vue-3.4.38.tgz",
"integrity": "sha512-f0ZgN+mZ5KFgVv9wz0f4OgVKukoXtS3nwET4c2vLBGQR50aI8G0cqbFtLlX9Yiyg3LFGBitruPHt2PxwTduJEw==",
"license": "MIT",
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/vue/-/vue-3.5.12.tgz",
"integrity": "sha512-CLVZtXtn2ItBIi/zHZ0Sg1Xkb7+PU32bJJ8Bmy7ts3jxXTcbfsEfBivFYYWz1Hur+lalqGAh65Coin0r+HRUfg==",
"dependencies": {
"@vue/compiler-dom": "3.4.38",
"@vue/compiler-sfc": "3.4.38",
"@vue/runtime-dom": "3.4.38",
"@vue/server-renderer": "3.4.38",
"@vue/shared": "3.4.38"
"@vue/compiler-dom": "3.5.12",
"@vue/compiler-sfc": "3.5.12",
"@vue/runtime-dom": "3.5.12",
"@vue/server-renderer": "3.5.12",
"@vue/shared": "3.5.12"
},
"peerDependencies": {
"typescript": "*"

View file

@ -10,7 +10,7 @@
"@citation-js/plugin-software-formats": "0.6.1",
"@github/markdown-toolbar-element": "2.2.3",
"@github/relative-time-element": "4.4.3",
"@github/text-expander-element": "2.7.1",
"@github/text-expander-element": "2.8.0",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
"@primer/octicons": "19.11.0",
"@silverwind/vue3-calendar-heatmap": "2.0.6",
@ -39,13 +39,13 @@
"monaco-editor": "0.51.0",
"monaco-editor-webpack-plugin": "7.1.0",
"pdfobject": "2.3.0",
"perfect-debounce": "1.0.0",
"postcss": "8.4.41",
"postcss-loader": "8.1.1",
"postcss-nesting": "13.0.0",
"sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.14",
"tailwindcss": "3.4.10",
"temporal-polyfill": "0.2.5",
"throttle-debounce": "5.0.2",
"tinycolor2": "1.6.0",
"tippy.js": "6.3.7",
@ -54,7 +54,7 @@
"typescript": "5.5.4",
"uint8-to-base64": "0.2.0",
"vanilla-colorful": "0.7.2",
"vue": "3.4.38",
"vue": "3.5.12",
"vue-bar-graph": "2.1.0",
"vue-chartjs": "5.3.1",
"vue-loader": "17.4.2",

View file

@ -642,6 +642,7 @@ func CreateBranchProtection(ctx *context.APIContext) {
ProtectedFilePatterns: form.ProtectedFilePatterns,
UnprotectedFilePatterns: form.UnprotectedFilePatterns,
BlockOnOutdatedBranch: form.BlockOnOutdatedBranch,
BlockAdminMergeOverride: form.BlockAdminMergeOverride,
}
err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
@ -852,6 +853,10 @@ func EditBranchProtection(ctx *context.APIContext) {
protectBranch.BlockOnOutdatedBranch = *form.BlockOnOutdatedBranch
}
if form.BlockAdminMergeOverride != nil {
protectBranch.BlockAdminMergeOverride = *form.BlockAdminMergeOverride
}
var whitelistUsers, forcePushAllowlistUsers, mergeWhitelistUsers, approvalsWhitelistUsers []int64
if form.PushWhitelistUsernames != nil {
whitelistUsers, err = user_model.GetUserIDsByNames(ctx, form.PushWhitelistUsernames, false)

View file

@ -51,7 +51,7 @@ func migrateWithSetting(x *xorm.Engine) error {
} else if current < 0 {
// execute migrations when the database isn't initialized even if AutoMigration is false
return migrations.Migrate(x)
} else if expected := migrations.ExpectedVersion(); current != expected {
} else if expected := migrations.ExpectedDBVersion(); current != expected {
log.Fatal(`"database.AUTO_MIGRATION" is disabled, but current database version %d is not equal to the expected version %d.`+
`You can set "database.AUTO_MIGRATION" to true or migrate manually by running "gitea [--config /path/to/app.ini] migrate"`, current, expected)
}

View file

@ -9,6 +9,7 @@ import (
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
)
@ -39,7 +40,7 @@ func Organizations(ctx *context.Context) {
)
sortOrder := ctx.FormString("sort")
if sortOrder == "" {
sortOrder = "newest"
sortOrder = util.Iif(supportedSortOrders.Contains(setting.UI.ExploreDefaultSort), setting.UI.ExploreDefaultSort, "newest")
ctx.SetFormString("sort", sortOrder)
}

View file

@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/sitemap"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
)
@ -149,7 +150,7 @@ func Users(ctx *context.Context) {
)
sortOrder := ctx.FormString("sort")
if sortOrder == "" {
sortOrder = "newest"
sortOrder = util.Iif(supportedSortOrders.Contains(setting.UI.ExploreDefaultSort), setting.UI.ExploreDefaultSort, "newest")
ctx.SetFormString("sort", sortOrder)
}

View file

@ -0,0 +1,93 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repo
import (
"net/http"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/unit"
issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/services/context"
)
type issueSuggestion struct {
ID int64 `json:"id"`
Title string `json:"title"`
State string `json:"state"`
PullRequest *struct {
Merged bool `json:"merged"`
Draft bool `json:"draft"`
} `json:"pull_request,omitempty"`
}
// IssueSuggestions returns a list of issue suggestions
func IssueSuggestions(ctx *context.Context) {
keyword := ctx.Req.FormValue("q")
canReadIssues := ctx.Repo.CanRead(unit.TypeIssues)
canReadPulls := ctx.Repo.CanRead(unit.TypePullRequests)
var isPull optional.Option[bool]
if canReadPulls && !canReadIssues {
isPull = optional.Some(true)
} else if canReadIssues && !canReadPulls {
isPull = optional.Some(false)
}
searchOpt := &issue_indexer.SearchOptions{
Paginator: &db.ListOptions{
Page: 0,
PageSize: 5,
},
Keyword: keyword,
RepoIDs: []int64{ctx.Repo.Repository.ID},
IsPull: isPull,
IsClosed: nil,
SortBy: issue_indexer.SortByUpdatedDesc,
}
ids, _, err := issue_indexer.SearchIssues(ctx, searchOpt)
if err != nil {
ctx.ServerError("SearchIssues", err)
return
}
issues, err := issues_model.GetIssuesByIDs(ctx, ids, true)
if err != nil {
ctx.ServerError("FindIssuesByIDs", err)
return
}
suggestions := make([]*issueSuggestion, 0, len(issues))
for _, issue := range issues {
suggestion := &issueSuggestion{
ID: issue.ID,
Title: issue.Title,
State: string(issue.State()),
}
if issue.IsPull {
if err := issue.LoadPullRequest(ctx); err != nil {
ctx.ServerError("LoadPullRequest", err)
return
}
if issue.PullRequest != nil {
suggestion.PullRequest = &struct {
Merged bool `json:"merged"`
Draft bool `json:"draft"`
}{
Merged: issue.PullRequest.HasMerged,
Draft: issue.PullRequest.IsWorkInProgress(ctx),
}
}
}
suggestions = append(suggestions, suggestion)
}
ctx.JSON(http.StatusOK, suggestions)
}

View file

@ -256,6 +256,7 @@ func SettingsProtectedBranchPost(ctx *context.Context) {
protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns
protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns
protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch
protectBranch.BlockAdminMergeOverride = f.BlockAdminMergeOverride
err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
UserIDs: whitelistUsers,

View file

@ -39,8 +39,9 @@ func RegenerateScratchTwoFactor(ctx *context.Context) {
if auth.IsErrTwoFactorNotEnrolled(err) {
ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled"))
ctx.Redirect(setting.AppSubURL + "/user/settings/security")
} else {
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
}
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
return
}
@ -74,8 +75,9 @@ func DisableTwoFactor(ctx *context.Context) {
if auth.IsErrTwoFactorNotEnrolled(err) {
ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled"))
ctx.Redirect(setting.AppSubURL + "/user/settings/security")
} else {
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
}
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
return
}
@ -84,8 +86,9 @@ func DisableTwoFactor(ctx *context.Context) {
// There is a potential DB race here - we must have been disabled by another request in the intervening period
ctx.Flash.Success(ctx.Tr("settings.twofa_disabled"))
ctx.Redirect(setting.AppSubURL + "/user/settings/security")
} else {
ctx.ServerError("SettingsTwoFactor: Failed to DeleteTwoFactorByID", err)
}
ctx.ServerError("SettingsTwoFactor: Failed to DeleteTwoFactorByID", err)
return
}

View file

@ -1178,6 +1178,7 @@ func registerRoutes(m *web.Router) {
})
})
}, context.RepoRef())
m.Get("/issues/suggestions", repo.IssueSuggestions)
}, ignSignIn, context.RepoAssignment, reqRepoIssuesOrPullsReader)
// end "/{username}/{reponame}": view milestone, label, issue, pull, etc

View file

@ -185,6 +185,7 @@ func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo
RequireSignedCommits: bp.RequireSignedCommits,
ProtectedFilePatterns: bp.ProtectedFilePatterns,
UnprotectedFilePatterns: bp.UnprotectedFilePatterns,
BlockAdminMergeOverride: bp.BlockAdminMergeOverride,
Created: bp.CreatedUnix.AsTime(),
Updated: bp.UpdatedUnix.AsTime(),
}

View file

@ -12,7 +12,7 @@ import (
)
func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error {
logger.Info("Expected database version: %d", migrations.ExpectedVersion())
logger.Info("Expected database version: %d", migrations.ExpectedDBVersion())
if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil {
if !autofix {
logger.Critical("Error: %v during ensure up to date", err)

View file

@ -219,6 +219,7 @@ type ProtectBranchForm struct {
RequireSignedCommits bool
ProtectedFilePatterns string
UnprotectedFilePatterns string
BlockAdminMergeOverride bool
}
// Validate validates the fields

View file

@ -179,6 +179,11 @@ func BatchHandler(ctx *context.Context) {
return
}
if setting.LFS.MaxBatchSize != 0 && len(br.Objects) > setting.LFS.MaxBatchSize {
writeStatus(ctx, http.StatusRequestEntityTooLarge)
return
}
contentStore := lfs_module.NewContentStore()
var responseObjects []*lfs_module.ObjectResponse

View file

@ -22,7 +22,7 @@ import (
rpm_service "code.gitea.io/gitea/services/packages/rpm"
)
// Task method to execute cleanup rules and cleanup expired package data
// CleanupTask executes cleanup rules and cleanup expired package data
func CleanupTask(ctx context.Context, olderThan time.Duration) error {
if err := ExecuteCleanupRules(ctx); err != nil {
return err

View file

@ -206,7 +206,11 @@ func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packa
w := io.MultiWriter(packagesContent, gzw, xzw)
addSeparator := false
if err := debian_model.SearchPackages(ctx, opts, func(pfd *packages_model.PackageFileDescriptor) {
pfds, err := debian_model.SearchPackages(ctx, opts)
if err != nil {
return err
}
for _, pfd := range pfds {
if addSeparator {
fmt.Fprintln(w)
}
@ -220,10 +224,7 @@ func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packa
fmt.Fprintf(w, "SHA1: %s\n", pfd.Blob.HashSHA1)
fmt.Fprintf(w, "SHA256: %s\n", pfd.Blob.HashSHA256)
fmt.Fprintf(w, "SHA512: %s\n", pfd.Blob.HashSHA512)
}); err != nil {
return err
}
gzw.Close()
xzw.Close()

View file

@ -68,7 +68,7 @@ const (
)
// CheckPullMergeable check if the pull mergeable based on all conditions (branch protection, merge options, ...)
func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *access_model.Permission, pr *issues_model.PullRequest, mergeCheckType MergeCheckType, adminSkipProtectionCheck bool) error {
func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *access_model.Permission, pr *issues_model.PullRequest, mergeCheckType MergeCheckType, adminForceMerge bool) error {
return db.WithTx(stdCtx, func(ctx context.Context) error {
if pr.HasMerged {
return ErrHasMerged
@ -118,13 +118,22 @@ func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *acc
err = nil
}
// * if the doer is admin, they could skip the branch protection check
if adminSkipProtectionCheck {
if isRepoAdmin, errCheckAdmin := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer); errCheckAdmin != nil {
log.Error("Unable to check if %-v is a repo admin in %-v: %v", doer, pr.BaseRepo, errCheckAdmin)
return errCheckAdmin
} else if isRepoAdmin {
err = nil // repo admin can skip the check, so clear the error
// * if admin tries to "Force Merge", they could sometimes skip the branch protection check
if adminForceMerge {
isRepoAdmin, errForceMerge := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer)
if errForceMerge != nil {
return fmt.Errorf("IsUserRepoAdmin failed, repo: %v, doer: %v, err: %w", pr.BaseRepoID, doer.ID, errForceMerge)
}
protectedBranchRule, errForceMerge := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if errForceMerge != nil {
return fmt.Errorf("GetFirstMatchProtectedBranchRule failed, repo: %v, base branch: %v, err: %w", pr.BaseRepoID, pr.BaseBranch, errForceMerge)
}
// if doer is admin and the "Force Merge" is not blocked, then clear the branch protection check error
blockAdminForceMerge := protectedBranchRule != nil && protectedBranchRule.BlockAdminMergeOverride
if isRepoAdmin && !blockAdminForceMerge {
err = nil
}
}

View file

@ -164,7 +164,7 @@
{{$notAllOverridableChecksOk := or .IsBlockedByApprovals .IsBlockedByRejection .IsBlockedByOfficialReviewRequests .IsBlockedByOutdatedBranch .IsBlockedByChangedProtectedFiles (and .EnableStatusCheck (not .RequiredStatusCheckState.IsSuccess))}}
{{/* admin can merge without checks, writer can merge when checks succeed */}}
{{$canMergeNow := and (or $.IsRepoAdmin (not $notAllOverridableChecksOk)) (or (not .AllowMerge) (not .RequireSigned) .WillSign)}}
{{$canMergeNow := and (or (and (not $.ProtectedBranch.BlockAdminMergeOverride) $.IsRepoAdmin) (not $notAllOverridableChecksOk)) (or (not .AllowMerge) (not .RequireSigned) .WillSign)}}
{{/* admin and writer both can make an auto merge schedule */}}
{{if $canMergeNow}}

View file

@ -102,7 +102,7 @@
{{$sameBase := ne $.BaseName $.HeadUserName}}
{{$differentBranch := ne . $.HeadBranch}}
{{if or $sameBase $differentBranch}}
<div class="item {{if eq $.BaseBranch .}}selected{{end}}" data-branch="{{.}}">{{$.BaseName}}{{if $.HeadRepo}}/{{$.HeadRepo}}{{end}}:{{.}}</div>
<div class="item {{if eq $.BaseBranch .}}selected{{end}}" data-branch="{{.}}">{{$.BaseName}}:{{.}}</div>
{{end}}
{{end}}
</div>

View file

@ -323,6 +323,13 @@
<p class="help">{{ctx.Locale.Tr "repo.settings.block_outdated_branch_desc"}}</p>
</div>
</div>
<div class="field">
<div class="ui checkbox">
<input name="block_admin_merge_override" type="checkbox" {{if .Rule.BlockAdminMergeOverride}}checked{{end}}>
<label>{{ctx.Locale.Tr "repo.settings.block_admin_merge_override"}}</label>
<p class="help">{{ctx.Locale.Tr "repo.settings.block_admin_merge_override_desc"}}</p>
</div>
</div>
<div class="divider"></div>
<div class="field">

View file

@ -44,7 +44,7 @@ Template Attributes:
<button class="markdown-toolbar-button markdown-switch-easymde" data-tooltip-content="{{ctx.Locale.Tr "editor.buttons.switch_to_legacy.tooltip"}}">{{svg "octicon-arrow-switch"}}</button>
</div>
</markdown-toolbar>
<text-expander keys=": @" suffix="">
<text-expander keys=": @ #" multiword="#" suffix="">
<textarea class="markdown-text-editor"{{if .TextareaName}} name="{{.TextareaName}}"{{end}}{{if .TextareaPlaceholder}} placeholder="{{.TextareaPlaceholder}}"{{end}}{{if .TextareaAriaLabel}} aria-label="{{.TextareaAriaLabel}}"{{end}}{{if .DisableAutosize}} data-disable-autosize="{{.DisableAutosize}}"{{end}}>{{.TextareaContent}}</textarea>
</text-expander>
<script>

View file

@ -18771,6 +18771,10 @@
},
"x-go-name": "ApprovalsWhitelistUsernames"
},
"block_admin_merge_override": {
"type": "boolean",
"x-go-name": "BlockAdminMergeOverride"
},
"block_on_official_review_requests": {
"type": "boolean",
"x-go-name": "BlockOnOfficialReviewRequests"
@ -19466,6 +19470,10 @@
},
"x-go-name": "ApprovalsWhitelistUsernames"
},
"block_admin_merge_override": {
"type": "boolean",
"x-go-name": "BlockAdminMergeOverride"
},
"block_on_official_review_requests": {
"type": "boolean",
"x-go-name": "BlockOnOfficialReviewRequests"
@ -20685,6 +20693,10 @@
},
"x-go-name": "ApprovalsWhitelistUsernames"
},
"block_admin_merge_override": {
"type": "boolean",
"x-go-name": "BlockAdminMergeOverride"
},
"block_on_official_review_requests": {
"type": "boolean",
"x-go-name": "BlockOnOfficialReviewRequests"

View file

@ -10,6 +10,7 @@ import (
"fmt"
"io"
"net/http"
"strconv"
"strings"
"testing"
@ -19,6 +20,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
debian_module "code.gitea.io/gitea/modules/packages/debian"
packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup"
"code.gitea.io/gitea/tests"
"github.com/blakesmith/ar"
@ -263,4 +265,37 @@ func TestPackageDebian(t *testing.T) {
assert.Contains(t, body, "Components: "+strings.Join(components, " ")+"\n")
assert.Contains(t, body, "Architectures: "+architectures[1]+"\n")
})
t.Run("Cleanup", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
rule := &packages.PackageCleanupRule{
Enabled: true,
RemovePattern: `.*`,
MatchFullName: true,
OwnerID: user.ID,
Type: packages.TypeDebian,
}
_, err := packages.InsertCleanupRule(db.DefaultContext, rule)
assert.NoError(t, err)
// When there were a lot of packages (> 50 or 100) and the code used "Iterate" to get all packages, it ever caused bugs,
// because "Iterate" keeps a dangling SQL session but the callback function still uses the same session to execute statements.
// The "Iterate" problem has been checked by TestContextSafety now, so here we only need to check the cleanup logic with a small number
packagesCount := 2
for i := 0; i < packagesCount; i++ {
uploadURL := fmt.Sprintf("%s/pool/%s/%s/upload", rootURL, "test", "main")
req := NewRequestWithBody(t, "PUT", uploadURL, createArchive(packageName, "1.0."+strconv.Itoa(i), "all")).AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusCreated)
}
req := NewRequest(t, "GET", fmt.Sprintf("%s/dists/%s/Release", rootURL, "test"))
MakeRequest(t, req, http.StatusOK)
err = packages_cleanup_service.CleanupTask(db.DefaultContext, 0)
assert.NoError(t, err)
req = NewRequest(t, "GET", fmt.Sprintf("%s/dists/%s/Release", rootURL, "test"))
MakeRequest(t, req, http.StatusNotFound)
})
}

View file

@ -976,3 +976,50 @@ func TestPullAutoMergeAfterCommitStatusSucceedAndApprovalForAgitFlow(t *testing.
unittest.AssertNotExistsBean(t, &pull_model.AutoMerge{PullID: pr.ID})
})
}
func TestPullNonMergeForAdminWithBranchProtection(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
// create a pull request
session := loginUser(t, "user1")
forkedName := "repo1-1"
testRepoFork(t, session, "user2", "repo1", "user1", forkedName, "")
defer testDeleteRepository(t, session, "user1", forkedName)
testEditFile(t, session, "user1", forkedName, "master", "README.md", "Hello, World (Edited)\n")
testPullCreate(t, session, "user1", forkedName, false, "master", "master", "Indexer notifier test pull")
baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user2", Name: "repo1"})
forkedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user1", Name: forkedName})
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{
BaseRepoID: baseRepo.ID,
BaseBranch: "master",
HeadRepoID: forkedRepo.ID,
HeadBranch: "master",
})
// add protected branch for commit status
csrf := GetUserCSRFToken(t, session)
// Change master branch to protected
pbCreateReq := NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/edit", map[string]string{
"_csrf": csrf,
"rule_name": "master",
"enable_push": "true",
"enable_status_check": "true",
"status_check_contexts": "gitea/actions",
"block_admin_merge_override": "true",
})
session.MakeRequest(t, pbCreateReq, http.StatusSeeOther)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
mergeReq := NewRequestWithValues(t, "POST", "/api/v1/repos/user2/repo1/pulls/6/merge", map[string]string{
"_csrf": csrf,
"head_commit_id": "",
"merge_when_checks_succeed": "false",
"force_merge": "true",
"do": "rebase",
}).AddTokenAuth(token)
session.MakeRequest(t, mergeReq, http.StatusMethodNotAllowed)
})
}

View file

@ -2,31 +2,21 @@
Please also update the template file above if this vue is modified.
action status accepted: success, skipped, waiting, blocked, running, failure, cancelled, unknown
-->
<script lang="ts">
<script lang="ts" setup>
import {SvgIcon} from '../svg.ts';
export default {
components: {SvgIcon},
props: {
status: {
type: String,
required: true,
},
size: {
type: Number,
default: 16,
},
className: {
type: String,
default: '',
},
localeStatus: {
type: String,
default: '',
},
},
};
withDefaults(defineProps<{
status: '',
size?: number,
className?: string,
localeStatus?: string,
}>(), {
size: 16,
className: undefined,
localeStatus: undefined,
});
</script>
<template>
<span class="tw-flex tw-items-center" :data-tooltip-content="localeStatus" v-if="status">
<SvgIcon name="octicon-check-circle-fill" class="text green" :size="size" :class-name="className" v-if="status === 'success'"/>

View file

@ -1,58 +1,56 @@
<script lang="ts">
<script lang="ts" setup>
// TODO: Switch to upstream after https://github.com/razorness/vue3-calendar-heatmap/pull/34 is merged
import {CalendarHeatmap} from '@silverwind/vue3-calendar-heatmap';
import {onMounted, ref} from 'vue';
import type {Value as HeatmapValue, Locale as HeatmapLocale} from '@silverwind/vue3-calendar-heatmap';
export default {
components: {CalendarHeatmap},
props: {
values: {
type: Array,
default: () => [],
},
locale: {
type: Object,
default: () => {},
},
},
data: () => ({
colorRange: [
'var(--color-secondary-alpha-60)',
'var(--color-secondary-alpha-60)',
'var(--color-primary-light-4)',
'var(--color-primary-light-2)',
'var(--color-primary)',
'var(--color-primary-dark-2)',
'var(--color-primary-dark-4)',
],
endDate: new Date(),
}),
mounted() {
// work around issue with first legend color being rendered twice and legend cut off
const legend = document.querySelector('.vch__external-legend-wrapper');
legend.setAttribute('viewBox', '12 0 80 10');
legend.style.marginRight = '-12px';
},
methods: {
handleDayClick(e) {
// Reset filter if same date is clicked
const params = new URLSearchParams(document.location.search);
const queryDate = params.get('date');
// Timezone has to be stripped because toISOString() converts to UTC
const clickedDate = new Date(e.date - (e.date.getTimezoneOffset() * 60000)).toISOString().substring(0, 10);
defineProps<{
values?: HeatmapValue[];
locale: {
textTotalContributions: string;
heatMapLocale: Partial<HeatmapLocale>;
noDataText: string;
tooltipUnit: string;
};
}>();
if (queryDate && queryDate === clickedDate) {
params.delete('date');
} else {
params.set('date', clickedDate);
}
const colorRange = [
'var(--color-secondary-alpha-60)',
'var(--color-secondary-alpha-60)',
'var(--color-primary-light-4)',
'var(--color-primary-light-2)',
'var(--color-primary)',
'var(--color-primary-dark-2)',
'var(--color-primary-dark-4)',
];
params.delete('page');
const endDate = ref(new Date());
const newSearch = params.toString();
window.location.search = newSearch.length ? `?${newSearch}` : '';
},
},
};
onMounted(() => {
// work around issue with first legend color being rendered twice and legend cut off
const legend = document.querySelector<HTMLElement>('.vch__external-legend-wrapper');
legend.setAttribute('viewBox', '12 0 80 10');
legend.style.marginRight = '-12px';
});
function handleDayClick(e: Event & {date: Date}) {
// Reset filter if same date is clicked
const params = new URLSearchParams(document.location.search);
const queryDate = params.get('date');
// Timezone has to be stripped because toISOString() converts to UTC
const clickedDate = new Date(e.date.getTime() - (e.date.getTimezoneOffset() * 60000)).toISOString().substring(0, 10);
if (queryDate && queryDate === clickedDate) {
params.delete('date');
} else {
params.set('date', clickedDate);
}
params.delete('page');
const newSearch = params.toString();
window.location.search = newSearch.length ? `?${newSearch}` : '';
}
</script>
<template>
<div class="total-contributions">

View file

@ -1,100 +1,65 @@
<script lang="ts">
<script lang="ts" setup>
import {SvgIcon} from '../svg.ts';
import {GET} from '../modules/fetch.ts';
import {getIssueColor, getIssueIcon} from '../features/issue.ts';
import {computed, onMounted, ref} from 'vue';
const {appSubUrl, i18n} = window.config;
export default {
components: {SvgIcon},
data: () => ({
loading: false,
issue: null,
renderedLabels: '',
i18nErrorOccurred: i18n.error_occurred,
i18nErrorMessage: null,
}),
computed: {
createdAt() {
return new Date(this.issue.created_at).toLocaleDateString(undefined, {year: 'numeric', month: 'short', day: 'numeric'});
},
const loading = ref(false);
const issue = ref(null);
const renderedLabels = ref('');
const i18nErrorOccurred = i18n.error_occurred;
const i18nErrorMessage = ref(null);
body() {
const body = this.issue.body.replace(/\n+/g, ' ');
if (body.length > 85) {
return `${body.substring(0, 85)}`;
}
return body;
},
const createdAt = computed(() => new Date(issue.value.created_at).toLocaleDateString(undefined, {year: 'numeric', month: 'short', day: 'numeric'}));
const body = computed(() => {
const body = issue.value.body.replace(/\n+/g, ' ');
if (body.length > 85) {
return `${body.substring(0, 85)}`;
}
return body;
});
icon() {
if (this.issue.pull_request !== null) {
if (this.issue.state === 'open') {
if (this.issue.pull_request.draft === true) {
return 'octicon-git-pull-request-draft'; // WIP PR
}
return 'octicon-git-pull-request'; // Open PR
} else if (this.issue.pull_request.merged === true) {
return 'octicon-git-merge'; // Merged PR
}
return 'octicon-git-pull-request'; // Closed PR
} else if (this.issue.state === 'open') {
return 'octicon-issue-opened'; // Open Issue
}
return 'octicon-issue-closed'; // Closed Issue
},
const root = ref<HTMLElement | null>(null);
color() {
if (this.issue.pull_request !== null) {
if (this.issue.pull_request.draft === true) {
return 'grey'; // WIP PR
} else if (this.issue.pull_request.merged === true) {
return 'purple'; // Merged PR
}
}
if (this.issue.state === 'open') {
return 'green'; // Open Issue
}
return 'red'; // Closed Issue
},
},
mounted() {
this.$refs.root.addEventListener('ce-load-context-popup', (e) => {
const data = e.detail;
if (!this.loading && this.issue === null) {
this.load(data);
}
});
},
methods: {
async load(data) {
this.loading = true;
this.i18nErrorMessage = null;
onMounted(() => {
root.value.addEventListener('ce-load-context-popup', (e: CustomEvent) => {
const data = e.detail;
if (!loading.value && issue.value === null) {
load(data);
}
});
});
try {
const response = await GET(`${appSubUrl}/${data.owner}/${data.repo}/issues/${data.index}/info`); // backend: GetIssueInfo
const respJson = await response.json();
if (!response.ok) {
this.i18nErrorMessage = respJson.message ?? i18n.network_error;
return;
}
this.issue = respJson.convertedIssue;
this.renderedLabels = respJson.renderedLabels;
} catch {
this.i18nErrorMessage = i18n.network_error;
} finally {
this.loading = false;
}
},
},
};
async function load(data) {
loading.value = true;
i18nErrorMessage.value = null;
try {
const response = await GET(`${appSubUrl}/${data.owner}/${data.repo}/issues/${data.index}/info`); // backend: GetIssueInfo
const respJson = await response.json();
if (!response.ok) {
i18nErrorMessage.value = respJson.message ?? i18n.network_error;
return;
}
issue.value = respJson.convertedIssue;
renderedLabels.value = respJson.renderedLabels;
} catch {
i18nErrorMessage.value = i18n.network_error;
} finally {
loading.value = false;
}
}
</script>
<template>
<div ref="root">
<div v-if="loading" class="tw-h-12 tw-w-12 is-loading"/>
<div v-if="!loading && issue !== null" class="tw-flex tw-flex-col tw-gap-2">
<div class="tw-text-12">{{ issue.repository.full_name }} on {{ createdAt }}</div>
<div class="flex-text-block">
<svg-icon :name="icon" :class="['text', color]"/>
<svg-icon :name="getIssueIcon(issue)" :class="['text', getIssueColor(issue)]"/>
<span class="issue-title tw-font-semibold tw-break-anywhere">
{{ issue.title }}
<span class="index">#{{ issue.number }}</span>

View file

@ -1,40 +1,42 @@
<script lang="ts">
<script lang="ts" setup>
import {onMounted, onUnmounted} from 'vue';
import {loadMoreFiles} from '../features/repo-diff.ts';
import {diffTreeStore} from '../modules/stores.ts';
export default {
data: () => {
return {store: diffTreeStore()};
},
mounted() {
document.querySelector('#show-file-list-btn').addEventListener('click', this.toggleFileList);
},
unmounted() {
document.querySelector('#show-file-list-btn').removeEventListener('click', this.toggleFileList);
},
methods: {
toggleFileList() {
this.store.fileListIsVisible = !this.store.fileListIsVisible;
},
diffTypeToString(pType) {
const diffTypes = {
1: 'add',
2: 'modify',
3: 'del',
4: 'rename',
5: 'copy',
};
return diffTypes[pType];
},
diffStatsWidth(adds, dels) {
return `${adds / (adds + dels) * 100}%`;
},
loadMoreData() {
loadMoreFiles(this.store.linkLoadMore);
},
},
};
const store = diffTreeStore();
onMounted(() => {
document.querySelector('#show-file-list-btn').addEventListener('click', toggleFileList);
});
onUnmounted(() => {
document.querySelector('#show-file-list-btn').removeEventListener('click', toggleFileList);
});
function toggleFileList() {
store.fileListIsVisible = !store.fileListIsVisible;
}
function diffTypeToString(pType) {
const diffTypes = {
1: 'add',
2: 'modify',
3: 'del',
4: 'rename',
5: 'copy',
};
return diffTypes[pType];
}
function diffStatsWidth(adds, dels) {
return `${adds / (adds + dels) * 100}%`;
}
function loadMoreData() {
loadMoreFiles(store.linkLoadMore);
}
</script>
<template>
<ol class="diff-stats tw-m-0" ref="root" v-if="store.fileListIsVisible">
<li v-for="file in store.files" :key="file.NameHash">

View file

@ -1,130 +1,137 @@
<script lang="ts">
<script lang="ts" setup>
import DiffFileTreeItem from './DiffFileTreeItem.vue';
import {loadMoreFiles} from '../features/repo-diff.ts';
import {toggleElem} from '../utils/dom.ts';
import {diffTreeStore} from '../modules/stores.ts';
import {setFileFolding} from '../features/file-fold.ts';
import {computed, onMounted, onUnmounted} from 'vue';
const LOCAL_STORAGE_KEY = 'diff_file_tree_visible';
export default {
components: {DiffFileTreeItem},
data: () => {
return {store: diffTreeStore()};
},
computed: {
fileTree() {
const result = [];
for (const file of this.store.files) {
// Split file into directories
const splits = file.Name.split('/');
let index = 0;
let parent = null;
let isFile = false;
for (const split of splits) {
index += 1;
// reached the end
if (index === splits.length) {
isFile = true;
}
let newParent = {
name: split,
children: [],
isFile,
};
const store = diffTreeStore();
if (isFile === true) {
newParent.file = file;
}
if (parent) {
// check if the folder already exists
const existingFolder = parent.children.find(
(x) => x.name === split,
);
if (existingFolder) {
newParent = existingFolder;
} else {
parent.children.push(newParent);
}
} else {
const existingFolder = result.find((x) => x.name === split);
if (existingFolder) {
newParent = existingFolder;
} else {
result.push(newParent);
}
}
parent = newParent;
}
const fileTree = computed(() => {
const result = [];
for (const file of store.files) {
// Split file into directories
const splits = file.Name.split('/');
let index = 0;
let parent = null;
let isFile = false;
for (const split of splits) {
index += 1;
// reached the end
if (index === splits.length) {
isFile = true;
}
const mergeChildIfOnlyOneDir = (entries) => {
for (const entry of entries) {
if (entry.children) {
mergeChildIfOnlyOneDir(entry.children);
}
if (entry.children.length === 1 && entry.children[0].isFile === false) {
// Merge it to the parent
entry.name = `${entry.name}/${entry.children[0].name}`;
entry.children = entry.children[0].children;
}
}
let newParent = {
name: split,
children: [],
isFile,
} as {
name: string,
children: any[],
isFile: boolean,
file?: any,
};
// Merge folders with just a folder as children in order to
// reduce the depth of our tree.
mergeChildIfOnlyOneDir(result);
return result;
},
},
mounted() {
// Default to true if unset
this.store.fileTreeIsVisible = localStorage.getItem(LOCAL_STORAGE_KEY) !== 'false';
document.querySelector('.diff-toggle-file-tree-button').addEventListener('click', this.toggleVisibility);
this.hashChangeListener = () => {
this.store.selectedItem = window.location.hash;
this.expandSelectedFile();
};
this.hashChangeListener();
window.addEventListener('hashchange', this.hashChangeListener);
},
unmounted() {
document.querySelector('.diff-toggle-file-tree-button').removeEventListener('click', this.toggleVisibility);
window.removeEventListener('hashchange', this.hashChangeListener);
},
methods: {
expandSelectedFile() {
// expand file if the selected file is folded
if (this.store.selectedItem) {
const box = document.querySelector(this.store.selectedItem);
const folded = box?.getAttribute('data-folded') === 'true';
if (folded) setFileFolding(box, box.querySelector('.fold-file'), false);
if (isFile === true) {
newParent.file = file;
}
},
toggleVisibility() {
this.updateVisibility(!this.store.fileTreeIsVisible);
},
updateVisibility(visible) {
this.store.fileTreeIsVisible = visible;
localStorage.setItem(LOCAL_STORAGE_KEY, this.store.fileTreeIsVisible);
this.updateState(this.store.fileTreeIsVisible);
},
updateState(visible) {
const btn = document.querySelector('.diff-toggle-file-tree-button');
const [toShow, toHide] = btn.querySelectorAll('.icon');
const tree = document.querySelector('#diff-file-tree');
const newTooltip = btn.getAttribute(visible ? 'data-hide-text' : 'data-show-text');
btn.setAttribute('data-tooltip-content', newTooltip);
toggleElem(tree, visible);
toggleElem(toShow, !visible);
toggleElem(toHide, visible);
},
loadMoreData() {
loadMoreFiles(this.store.linkLoadMore);
},
},
};
if (parent) {
// check if the folder already exists
const existingFolder = parent.children.find(
(x) => x.name === split,
);
if (existingFolder) {
newParent = existingFolder;
} else {
parent.children.push(newParent);
}
} else {
const existingFolder = result.find((x) => x.name === split);
if (existingFolder) {
newParent = existingFolder;
} else {
result.push(newParent);
}
}
parent = newParent;
}
}
const mergeChildIfOnlyOneDir = (entries) => {
for (const entry of entries) {
if (entry.children) {
mergeChildIfOnlyOneDir(entry.children);
}
if (entry.children.length === 1 && entry.children[0].isFile === false) {
// Merge it to the parent
entry.name = `${entry.name}/${entry.children[0].name}`;
entry.children = entry.children[0].children;
}
}
};
// Merge folders with just a folder as children in order to
// reduce the depth of our tree.
mergeChildIfOnlyOneDir(result);
return result;
});
onMounted(() => {
// Default to true if unset
store.fileTreeIsVisible = localStorage.getItem(LOCAL_STORAGE_KEY) !== 'false';
document.querySelector('.diff-toggle-file-tree-button').addEventListener('click', toggleVisibility);
hashChangeListener();
window.addEventListener('hashchange', hashChangeListener);
});
onUnmounted(() => {
document.querySelector('.diff-toggle-file-tree-button').removeEventListener('click', toggleVisibility);
window.removeEventListener('hashchange', hashChangeListener);
});
function hashChangeListener() {
store.selectedItem = window.location.hash;
expandSelectedFile();
}
function expandSelectedFile() {
// expand file if the selected file is folded
if (store.selectedItem) {
const box = document.querySelector(store.selectedItem);
const folded = box?.getAttribute('data-folded') === 'true';
if (folded) setFileFolding(box, box.querySelector('.fold-file'), false);
}
}
function toggleVisibility() {
updateVisibility(!store.fileTreeIsVisible);
}
function updateVisibility(visible) {
store.fileTreeIsVisible = visible;
localStorage.setItem(LOCAL_STORAGE_KEY, store.fileTreeIsVisible);
updateState(store.fileTreeIsVisible);
}
function updateState(visible) {
const btn = document.querySelector('.diff-toggle-file-tree-button');
const [toShow, toHide] = btn.querySelectorAll('.icon');
const tree = document.querySelector('#diff-file-tree');
const newTooltip = btn.getAttribute(visible ? 'data-hide-text' : 'data-show-text');
btn.setAttribute('data-tooltip-content', newTooltip);
toggleElem(tree, visible);
toggleElem(toShow, !visible);
toggleElem(toHide, visible);
}
function loadMoreData() {
loadMoreFiles(store.linkLoadMore);
}
</script>
<template>
<div v-if="store.fileTreeIsVisible" class="diff-file-tree-items">
<!-- only render the tree if we're visible. in many cases this is something that doesn't change very often -->
@ -134,6 +141,7 @@ export default {
</div>
</div>
</template>
<style scoped>
.diff-file-tree-items {
display: flex;

View file

@ -1,33 +1,41 @@
<script lang="ts">
<script lang="ts" setup>
import {SvgIcon} from '../svg.ts';
import {diffTreeStore} from '../modules/stores.ts';
import {ref} from 'vue';
export default {
components: {SvgIcon},
props: {
item: {
type: Object,
required: true,
},
},
data: () => ({
store: diffTreeStore(),
collapsed: false,
}),
methods: {
getIconForDiffType(pType) {
const diffTypes = {
1: {name: 'octicon-diff-added', classes: ['text', 'green']},
2: {name: 'octicon-diff-modified', classes: ['text', 'yellow']},
3: {name: 'octicon-diff-removed', classes: ['text', 'red']},
4: {name: 'octicon-diff-renamed', classes: ['text', 'teal']},
5: {name: 'octicon-diff-renamed', classes: ['text', 'green']}, // there is no octicon for copied, so renamed should be ok
};
return diffTypes[pType];
},
},
type File = {
Name: string;
NameHash: string;
Type: number;
IsViewed: boolean;
}
type Item = {
name: string;
isFile: boolean;
file?: File;
children?: Item[];
};
defineProps<{
item: Item,
}>();
const store = diffTreeStore();
const collapsed = ref(false);
function getIconForDiffType(pType) {
const diffTypes = {
1: {name: 'octicon-diff-added', classes: ['text', 'green']},
2: {name: 'octicon-diff-modified', classes: ['text', 'yellow']},
3: {name: 'octicon-diff-removed', classes: ['text', 'red']},
4: {name: 'octicon-diff-renamed', classes: ['text', 'teal']},
5: {name: 'octicon-diff-renamed', classes: ['text', 'green']}, // there is no octicon for copied, so renamed should be ok
};
return diffTypes[pType];
}
</script>
<template>
<!--title instead of tooltip above as the tooltip needs too much work with the current methods, i.e. not being loaded or staying open for "too long"-->
<a

View file

@ -1,84 +1,83 @@
<script lang="ts">
<script lang="ts" setup>
import {computed, onMounted, onUnmounted, ref, watch} from 'vue';
import {SvgIcon} from '../svg.ts';
import {toggleElem} from '../utils/dom.ts';
const {csrfToken, pageData} = window.config;
export default {
components: {SvgIcon},
data: () => ({
csrfToken,
mergeForm: pageData.pullRequestMergeForm,
const mergeForm = ref(pageData.pullRequestMergeForm);
mergeTitleFieldValue: '',
mergeMessageFieldValue: '',
deleteBranchAfterMerge: false,
autoMergeWhenSucceed: false,
const mergeTitleFieldValue = ref('');
const mergeMessageFieldValue = ref('');
const deleteBranchAfterMerge = ref(false);
const autoMergeWhenSucceed = ref(false);
mergeStyle: '',
mergeStyleDetail: { // dummy only, these values will come from one of the mergeForm.mergeStyles
hideMergeMessageTexts: false,
textDoMerge: '',
mergeTitleFieldText: '',
mergeMessageFieldText: '',
hideAutoMerge: false,
},
mergeStyleAllowedCount: 0,
const mergeStyle = ref('');
const mergeStyleDetail = ref({
hideMergeMessageTexts: false,
textDoMerge: '',
mergeTitleFieldText: '',
mergeMessageFieldText: '',
hideAutoMerge: false,
});
showMergeStyleMenu: false,
showActionForm: false,
}),
computed: {
mergeButtonStyleClass() {
if (this.mergeForm.allOverridableChecksOk) return 'primary';
return this.autoMergeWhenSucceed ? 'primary' : 'red';
},
forceMerge() {
return this.mergeForm.canMergeNow && !this.mergeForm.allOverridableChecksOk;
},
},
watch: {
mergeStyle(val) {
this.mergeStyleDetail = this.mergeForm.mergeStyles.find((e) => e.name === val);
for (const elem of document.querySelectorAll('[data-pull-merge-style]')) {
toggleElem(elem, elem.getAttribute('data-pull-merge-style') === val);
}
},
},
created() {
this.mergeStyleAllowedCount = this.mergeForm.mergeStyles.reduce((v, msd) => v + (msd.allowed ? 1 : 0), 0);
const mergeStyleAllowedCount = ref(0);
let mergeStyle = this.mergeForm.mergeStyles.find((e) => e.allowed && e.name === this.mergeForm.defaultMergeStyle)?.name;
if (!mergeStyle) mergeStyle = this.mergeForm.mergeStyles.find((e) => e.allowed)?.name;
this.switchMergeStyle(mergeStyle, !this.mergeForm.canMergeNow);
},
mounted() {
document.addEventListener('mouseup', this.hideMergeStyleMenu);
},
unmounted() {
document.removeEventListener('mouseup', this.hideMergeStyleMenu);
},
methods: {
hideMergeStyleMenu() {
this.showMergeStyleMenu = false;
},
toggleActionForm(show) {
this.showActionForm = show;
if (!show) return;
this.deleteBranchAfterMerge = this.mergeForm.defaultDeleteBranchAfterMerge;
this.mergeTitleFieldValue = this.mergeStyleDetail.mergeTitleFieldText;
this.mergeMessageFieldValue = this.mergeStyleDetail.mergeMessageFieldText;
},
switchMergeStyle(name, autoMerge = false) {
this.mergeStyle = name;
this.autoMergeWhenSucceed = autoMerge;
},
clearMergeMessage() {
this.mergeMessageFieldValue = this.mergeForm.defaultMergeMessage;
},
},
};
const showMergeStyleMenu = ref(false);
const showActionForm = ref(false);
const mergeButtonStyleClass = computed(() => {
if (mergeForm.value.allOverridableChecksOk) return 'primary';
return autoMergeWhenSucceed.value ? 'primary' : 'red';
});
const forceMerge = computed(() => {
return mergeForm.value.canMergeNow && !mergeForm.value.allOverridableChecksOk;
});
watch(mergeStyle, (val) => {
mergeStyleDetail.value = mergeForm.value.mergeStyles.find((e) => e.name === val);
for (const elem of document.querySelectorAll('[data-pull-merge-style]')) {
toggleElem(elem, elem.getAttribute('data-pull-merge-style') === val);
}
});
onMounted(() => {
mergeStyleAllowedCount.value = mergeForm.value.mergeStyles.reduce((v, msd) => v + (msd.allowed ? 1 : 0), 0);
let mergeStyle = mergeForm.value.mergeStyles.find((e) => e.allowed && e.name === mergeForm.value.defaultMergeStyle)?.name;
if (!mergeStyle) mergeStyle = mergeForm.value.mergeStyles.find((e) => e.allowed)?.name;
switchMergeStyle(mergeStyle, !mergeForm.value.canMergeNow);
document.addEventListener('mouseup', hideMergeStyleMenu);
});
onUnmounted(() => {
document.removeEventListener('mouseup', hideMergeStyleMenu);
});
function hideMergeStyleMenu() {
showMergeStyleMenu.value = false;
}
function toggleActionForm(show: boolean) {
showActionForm.value = show;
if (!show) return;
deleteBranchAfterMerge.value = mergeForm.value.defaultDeleteBranchAfterMerge;
mergeTitleFieldValue.value = mergeStyleDetail.value.mergeTitleFieldText;
mergeMessageFieldValue.value = mergeStyleDetail.value.mergeMessageFieldText;
}
function switchMergeStyle(name, autoMerge = false) {
mergeStyle.value = name;
autoMergeWhenSucceed.value = autoMerge;
}
function clearMergeMessage() {
mergeMessageFieldValue.value = mergeForm.value.defaultMergeMessage;
}
</script>
<template>
<!--
if this component is shown, either the user is an admin (can do a merge without checks), or they are a writer who has the permission to do a merge
@ -186,6 +185,7 @@ export default {
</div>
</div>
</template>
<style scoped>
/* to keep UI the same, at the moment we are still using some Fomantic UI styles, but we do not use their scripts, so we need to fine tune some styles */
.ui.dropdown .menu.show {

View file

@ -1,68 +1,62 @@
<script lang="ts">
<script lang="ts" setup>
import {VueBarGraph} from 'vue-bar-graph';
import {createApp} from 'vue';
import {computed, onMounted, ref} from 'vue';
const sfc = {
components: {VueBarGraph},
data: () => ({
colors: {
barColor: 'green',
textColor: 'black',
textAltColor: 'white',
},
const colors = ref({
barColor: 'green',
textColor: 'black',
textAltColor: 'white',
});
// possible keys:
// * avatar_link: (...)
// * commits: (...)
// * home_link: (...)
// * login: (...)
// * name: (...)
activityTopAuthors: window.config.pageData.repoActivityTopAuthors || [],
}),
computed: {
graphPoints() {
return this.activityTopAuthors.map((item) => {
return {
value: item.commits,
label: item.name,
};
});
},
graphAuthors() {
return this.activityTopAuthors.map((item, idx) => {
return {
position: idx + 1,
...item,
};
});
},
graphWidth() {
return this.activityTopAuthors.length * 40;
},
},
mounted() {
const refStyle = window.getComputedStyle(this.$refs.style);
const refAltStyle = window.getComputedStyle(this.$refs.altStyle);
// possible keys:
// * avatar_link: (...)
// * commits: (...)
// * home_link: (...)
// * login: (...)
// * name: (...)
const activityTopAuthors = window.config.pageData.repoActivityTopAuthors || [];
this.colors.barColor = refStyle.backgroundColor;
this.colors.textColor = refStyle.color;
this.colors.textAltColor = refAltStyle.color;
},
};
const graphPoints = computed(() => {
return activityTopAuthors.map((item) => {
return {
value: item.commits,
label: item.name,
};
});
});
export function initRepoActivityTopAuthorsChart() {
const el = document.querySelector('#repo-activity-top-authors-chart');
if (el) {
createApp(sfc).mount(el);
}
}
const graphAuthors = computed(() => {
return activityTopAuthors.map((item, idx) => {
return {
position: idx + 1,
...item,
};
});
});
export default sfc; // activate the IDE's Vue plugin
const graphWidth = computed(() => {
return activityTopAuthors.length * 40;
});
const styleElement = ref<HTMLElement | null>(null);
const altStyleElement = ref<HTMLElement | null>(null);
onMounted(() => {
const refStyle = window.getComputedStyle(styleElement.value);
const refAltStyle = window.getComputedStyle(altStyleElement.value);
colors.value = {
barColor: refStyle.backgroundColor,
textColor: refStyle.color,
textAltColor: refAltStyle.color,
};
});
</script>
<template>
<div>
<div class="activity-bar-graph" ref="style" style="width: 0; height: 0;"/>
<div class="activity-bar-graph-alt" ref="altStyle" style="width: 0; height: 0;"/>
<div class="activity-bar-graph" ref="styleElement" style="width: 0; height: 0;"/>
<div class="activity-bar-graph-alt" ref="altStyleElement" style="width: 0; height: 0;"/>
<vue-bar-graph
:points="graphPoints"
:show-x-axis="true"

View file

@ -1,4 +1,4 @@
<script lang="ts">
<script lang="ts" setup>
import {SvgIcon} from '../svg.ts';
import {
Chart,
@ -15,10 +15,12 @@ import {
startDaysBetween,
firstStartDateAfterDate,
fillEmptyStartDaysWithZeroes,
type DayData,
} from '../utils/time.ts';
import {chartJsColors} from '../utils/color.ts';
import {sleep} from '../utils.ts';
import 'chartjs-adapter-dayjs-4/dist/chartjs-adapter-dayjs-4.esm';
import {onMounted, ref} from 'vue';
const {pageData} = window.config;
@ -34,114 +36,110 @@ Chart.register(
Filler,
);
export default {
components: {ChartLine, SvgIcon},
props: {
locale: {
type: Object,
required: true,
},
},
data: () => ({
isLoading: false,
errorText: '',
repoLink: pageData.repoLink || [],
data: [],
}),
mounted() {
this.fetchGraphData();
},
methods: {
async fetchGraphData() {
this.isLoading = true;
try {
let response;
do {
response = await GET(`${this.repoLink}/activity/code-frequency/data`);
if (response.status === 202) {
await sleep(1000); // wait for 1 second before retrying
}
} while (response.status === 202);
if (response.ok) {
this.data = await response.json();
const weekValues = Object.values(this.data);
const start = weekValues[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
this.data = fillEmptyStartDaysWithZeroes(startDays, this.data);
this.errorText = '';
} else {
this.errorText = response.statusText;
}
} catch (err) {
this.errorText = err.message;
} finally {
this.isLoading = false;
defineProps<{
locale: {
loadingTitle: string;
loadingTitleFailed: string;
loadingInfo: string;
};
}>();
const isLoading = ref(false);
const errorText = ref('');
const repoLink = ref(pageData.repoLink || []);
const data = ref<DayData[]>([]);
onMounted(() => {
fetchGraphData();
});
async function fetchGraphData() {
isLoading.value = true;
try {
let response: Response;
do {
response = await GET(`${repoLink.value}/activity/code-frequency/data`);
if (response.status === 202) {
await sleep(1000); // wait for 1 second before retrying
}
},
} while (response.status === 202);
if (response.ok) {
data.value = await response.json();
const weekValues = Object.values(data.value);
const start = weekValues[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
data.value = fillEmptyStartDaysWithZeroes(startDays, data.value);
errorText.value = '';
} else {
errorText.value = response.statusText;
}
} catch (err) {
errorText.value = err.message;
} finally {
isLoading.value = false;
}
}
toGraphData(data) {
return {
datasets: [
{
data: data.map((i) => ({x: i.week, y: i.additions})),
pointRadius: 0,
pointHitRadius: 0,
fill: true,
label: 'Additions',
backgroundColor: chartJsColors['additions'],
borderWidth: 0,
tension: 0.3,
},
{
data: data.map((i) => ({x: i.week, y: -i.deletions})),
pointRadius: 0,
pointHitRadius: 0,
fill: true,
label: 'Deletions',
backgroundColor: chartJsColors['deletions'],
borderWidth: 0,
tension: 0.3,
},
],
};
},
function toGraphData(data) {
return {
datasets: [
{
data: data.map((i) => ({x: i.week, y: i.additions})),
pointRadius: 0,
pointHitRadius: 0,
fill: true,
label: 'Additions',
backgroundColor: chartJsColors['additions'],
borderWidth: 0,
tension: 0.3,
},
{
data: data.map((i) => ({x: i.week, y: -i.deletions})),
pointRadius: 0,
pointHitRadius: 0,
fill: true,
label: 'Deletions',
backgroundColor: chartJsColors['deletions'],
borderWidth: 0,
tension: 0.3,
},
],
};
}
getOptions() {
return {
responsive: true,
maintainAspectRatio: false,
animation: true,
plugins: {
legend: {
display: true,
},
},
scales: {
x: {
type: 'time',
grid: {
display: false,
},
time: {
minUnit: 'month',
},
ticks: {
maxRotation: 0,
maxTicksLimit: 12,
},
},
y: {
ticks: {
maxTicksLimit: 6,
},
},
},
};
const options = {
responsive: true,
maintainAspectRatio: false,
animation: true,
plugins: {
legend: {
display: true,
},
},
scales: {
x: {
type: 'time',
grid: {
display: false,
},
time: {
minUnit: 'month',
},
ticks: {
maxRotation: 0,
maxTicksLimit: 12,
},
},
y: {
ticks: {
maxTicksLimit: 6,
},
},
},
};
</script>
<template>
<div>
<div class="ui header tw-flex tw-items-center tw-justify-between">
@ -160,11 +158,12 @@ export default {
</div>
<ChartLine
v-memo="data" v-if="data.length !== 0"
:data="toGraphData(data)" :options="getOptions()"
:data="toGraphData(data)" :options="options"
/>
</div>
</div>
</template>
<style scoped>
.main-graph {
height: 440px;

View file

@ -1,4 +1,4 @@
<script lang="ts">
<script lang="ts" setup>
import {SvgIcon} from '../svg.ts';
import {
Chart,
@ -6,6 +6,7 @@ import {
BarElement,
LinearScale,
TimeScale,
type ChartOptions,
} from 'chart.js';
import {GET} from '../modules/fetch.ts';
import {Bar} from 'vue-chartjs';
@ -13,10 +14,12 @@ import {
startDaysBetween,
firstStartDateAfterDate,
fillEmptyStartDaysWithZeroes,
type DayData,
} from '../utils/time.ts';
import {chartJsColors} from '../utils/color.ts';
import {sleep} from '../utils.ts';
import 'chartjs-adapter-dayjs-4/dist/chartjs-adapter-dayjs-4.esm';
import {onMounted, ref} from 'vue';
const {pageData} = window.config;
@ -30,95 +33,91 @@ Chart.register(
Tooltip,
);
export default {
components: {Bar, SvgIcon},
props: {
locale: {
type: Object,
required: true,
},
},
data: () => ({
isLoading: false,
errorText: '',
repoLink: pageData.repoLink || [],
data: [],
}),
mounted() {
this.fetchGraphData();
},
methods: {
async fetchGraphData() {
this.isLoading = true;
try {
let response;
do {
response = await GET(`${this.repoLink}/activity/recent-commits/data`);
if (response.status === 202) {
await sleep(1000); // wait for 1 second before retrying
}
} while (response.status === 202);
if (response.ok) {
const data = await response.json();
const start = Object.values(data)[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
this.data = fillEmptyStartDaysWithZeroes(startDays, data).slice(-52);
this.errorText = '';
} else {
this.errorText = response.statusText;
}
} catch (err) {
this.errorText = err.message;
} finally {
this.isLoading = false;
defineProps<{
locale: {
loadingTitle: string;
loadingTitleFailed: string;
loadingInfo: string;
};
}>();
const isLoading = ref(false);
const errorText = ref('');
const repoLink = ref(pageData.repoLink || []);
const data = ref<DayData[]>([]);
onMounted(() => {
fetchGraphData();
});
async function fetchGraphData() {
isLoading.value = true;
try {
let response: Response;
do {
response = await GET(`${repoLink.value}/activity/recent-commits/data`);
if (response.status === 202) {
await sleep(1000); // wait for 1 second before retrying
}
},
} while (response.status === 202);
if (response.ok) {
const data = await response.json();
const start = Object.values(data)[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
data.value = fillEmptyStartDaysWithZeroes(startDays, data).slice(-52);
errorText.value = '';
} else {
errorText.value = response.statusText;
}
} catch (err) {
errorText.value = err.message;
} finally {
isLoading.value = false;
}
}
toGraphData(data) {
return {
datasets: [
{
data: data.map((i) => ({x: i.week, y: i.commits})),
label: 'Commits',
backgroundColor: chartJsColors['commits'],
borderWidth: 0,
tension: 0.3,
},
],
};
},
function toGraphData(data) {
return {
datasets: [
{
data: data.map((i) => ({x: i.week, y: i.commits})),
label: 'Commits',
backgroundColor: chartJsColors['commits'],
borderWidth: 0,
tension: 0.3,
},
],
};
}
getOptions() {
return {
responsive: true,
maintainAspectRatio: false,
animation: true,
scales: {
x: {
type: 'time',
grid: {
display: false,
},
time: {
minUnit: 'week',
},
ticks: {
maxRotation: 0,
maxTicksLimit: 52,
},
},
y: {
ticks: {
maxTicksLimit: 6,
},
},
},
};
const options = {
responsive: true,
maintainAspectRatio: false,
animation: true,
scales: {
x: {
type: 'time',
grid: {
display: false,
},
time: {
minUnit: 'week',
},
ticks: {
maxRotation: 0,
maxTicksLimit: 52,
},
},
y: {
ticks: {
maxTicksLimit: 6,
},
},
},
};
} satisfies ChartOptions;
</script>
<template>
<div>
<div class="ui header tw-flex tw-items-center tw-justify-between">
@ -137,7 +136,7 @@ export default {
</div>
<Bar
v-memo="data" v-if="data.length !== 0"
:data="toGraphData(data)" :options="getOptions()"
:data="toGraphData(data)" :options="options"
/>
</div>
</div>

View file

@ -1,78 +1,60 @@
<script lang="ts">
<script lang="ts" setup>
import {computed, onMounted, onUnmounted} from 'vue';
import {hideElem, showElem} from '../utils/dom.ts';
const sfc = {
props: {
isAdmin: {
type: Boolean,
required: true,
},
noAccessLabel: {
type: String,
required: true,
},
readLabel: {
type: String,
required: true,
},
writeLabel: {
type: String,
required: true,
},
},
const props = defineProps<{
isAdmin: boolean;
noAccessLabel: string;
readLabel: string;
writeLabel: string;
}>();
computed: {
categories() {
const categories = [
'activitypub',
];
if (this.isAdmin) {
categories.push('admin');
}
categories.push(
'issue',
'misc',
'notification',
'organization',
'package',
'repository',
'user');
return categories;
},
},
const categories = computed(() => {
const categories = [
'activitypub',
];
if (props.isAdmin) {
categories.push('admin');
}
categories.push(
'issue',
'misc',
'notification',
'organization',
'package',
'repository',
'user');
return categories;
});
mounted() {
document.querySelector('#scoped-access-submit').addEventListener('click', this.onClickSubmit);
},
onMounted(() => {
document.querySelector('#scoped-access-submit').addEventListener('click', onClickSubmit);
});
unmounted() {
document.querySelector('#scoped-access-submit').removeEventListener('click', this.onClickSubmit);
},
onUnmounted(() => {
document.querySelector('#scoped-access-submit').removeEventListener('click', onClickSubmit);
});
methods: {
onClickSubmit(e) {
e.preventDefault();
function onClickSubmit(e) {
e.preventDefault();
const warningEl = document.querySelector('#scoped-access-warning');
// check that at least one scope has been selected
for (const el of document.querySelectorAll('.access-token-select')) {
if (el.value) {
// Hide the error if it was visible from previous attempt.
hideElem(warningEl);
// Submit the form.
document.querySelector('#scoped-access-form').submit();
// Don't show the warning.
return;
}
}
// no scopes selected, show validation error
showElem(warningEl);
},
},
};
export default sfc;
const warningEl = document.querySelector('#scoped-access-warning');
// check that at least one scope has been selected
for (const el of document.querySelectorAll<HTMLInputElement>('.access-token-select')) {
if (el.value) {
// Hide the error if it was visible from previous attempt.
hideElem(warningEl);
// Submit the form.
document.querySelector<HTMLFormElement>('#scoped-access-form').submit();
// Don't show the warning.
return;
}
}
// no scopes selected, show validation error
showElem(warningEl);
}
</script>
<template>
<div v-for="category in categories" :key="category" class="field tw-pl-1 tw-pb-1 access-token-category">
<label class="category-label" :for="'access-token-scope-' + category">

View file

@ -3,14 +3,19 @@ import '@github/text-expander-element';
import $ from 'jquery';
import {attachTribute} from '../tribute.ts';
import {hideElem, showElem, autosize, isElemVisible} from '../../utils/dom.ts';
import {initEasyMDEPaste, initTextareaEvents} from './EditorUpload.ts';
import {
EventUploadStateChanged,
initEasyMDEPaste,
initTextareaEvents,
triggerUploadStateChanged,
} from './EditorUpload.ts';
import {handleGlobalEnterQuickSubmit} from './QuickSubmit.ts';
import {renderPreviewPanelContent} from '../repo-editor.ts';
import {easyMDEToolbarActions} from './EasyMDEToolbarActions.ts';
import {initTextExpander} from './TextExpander.ts';
import {showErrorToast} from '../../modules/toast.ts';
import {POST} from '../../modules/fetch.ts';
import {initTextareaMarkdown} from './EditorMarkdown.ts';
import {EventEditorContentChanged, initTextareaMarkdown, triggerEditorContentChanged} from './EditorMarkdown.ts';
import {DropzoneCustomEventReloadFiles, initDropzone} from '../dropzone.ts';
let elementIdCounter = 0;
@ -37,7 +42,34 @@ export function validateTextareaNonEmpty(textarea) {
return true;
}
class ComboMarkdownEditor {
export class ComboMarkdownEditor {
static EventEditorContentChanged = EventEditorContentChanged;
static EventUploadStateChanged = EventUploadStateChanged;
public container : HTMLElement;
// TODO: use correct types to replace these "any" types
options: any;
tabEditor: HTMLElement;
tabPreviewer: HTMLElement;
easyMDE: any;
easyMDEToolbarActions: any;
easyMDEToolbarDefault: any;
textarea: HTMLTextAreaElement & {_giteaComboMarkdownEditor: any};
textareaMarkdownToolbar: HTMLElement;
textareaAutosize: any;
dropzone: HTMLElement;
attachedDropzoneInst: any;
previewUrl: string;
previewContext: string;
previewMode: string;
previewWiki: boolean;
constructor(container, options = {}) {
container._giteaComboMarkdownEditor = this;
this.options = options;
@ -63,14 +95,13 @@ class ComboMarkdownEditor {
setupContainer() {
initTextExpander(this.container.querySelector('text-expander'));
this.container.addEventListener('ce-editor-content-changed', (e) => this.options?.onContentChanged?.(this, e));
}
setupTextarea() {
this.textarea = this.container.querySelector('.markdown-text-editor');
this.textarea._giteaComboMarkdownEditor = this;
this.textarea.id = `_combo_markdown_editor_${String(elementIdCounter++)}`;
this.textarea.addEventListener('input', (e) => this.options?.onContentChanged?.(this, e));
this.textarea.addEventListener('input', () => triggerEditorContentChanged(this.container));
this.applyEditorHeights(this.textarea, this.options.editorHeights);
if (this.textarea.getAttribute('data-disable-autosize') !== 'true') {
@ -115,15 +146,21 @@ class ComboMarkdownEditor {
async setupDropzone() {
const dropzoneParentContainer = this.container.getAttribute('data-dropzone-parent-container');
if (dropzoneParentContainer) {
this.dropzone = this.container.closest(this.container.getAttribute('data-dropzone-parent-container'))?.querySelector('.dropzone');
if (this.dropzone) this.attachedDropzoneInst = await initDropzone(this.dropzone);
}
if (!dropzoneParentContainer) return;
this.dropzone = this.container.closest(this.container.getAttribute('data-dropzone-parent-container'))?.querySelector('.dropzone');
if (!this.dropzone) return;
this.attachedDropzoneInst = await initDropzone(this.dropzone);
// dropzone events
// * "processing" means a file is being uploaded
// * "queuecomplete" means all files have been uploaded
this.attachedDropzoneInst.on('processing', () => triggerUploadStateChanged(this.container));
this.attachedDropzoneInst.on('queuecomplete', () => triggerUploadStateChanged(this.container));
}
dropzoneGetFiles() {
if (!this.dropzone) return null;
return Array.from(this.dropzone.querySelectorAll('.files [name=files]'), (el) => el.value);
return Array.from(this.dropzone.querySelectorAll<HTMLInputElement>('.files [name=files]'), (el) => el.value);
}
dropzoneReloadFiles() {
@ -137,8 +174,13 @@ class ComboMarkdownEditor {
this.attachedDropzoneInst.emit(DropzoneCustomEventReloadFiles);
}
isUploading() {
if (!this.dropzone) return false;
return this.attachedDropzoneInst.getQueuedFiles().length || this.attachedDropzoneInst.getUploadingFiles().length;
}
setupTab() {
const tabs = this.container.querySelectorAll('.tabular.menu > .item');
const tabs = this.container.querySelectorAll<HTMLElement>('.tabular.menu > .item');
// Fomantic Tab requires the "data-tab" to be globally unique.
// So here it uses our defined "data-tab-for" and "data-tab-panel" to generate the "data-tab" attribute for Fomantic.
@ -170,7 +212,7 @@ class ComboMarkdownEditor {
formData.append('mode', this.previewMode);
formData.append('context', this.previewContext);
formData.append('text', this.value());
formData.append('wiki', this.previewWiki);
formData.append('wiki', String(this.previewWiki));
const response = await POST(this.previewUrl, {data: formData});
const data = await response.text();
renderPreviewPanelContent($(panelPreviewer), data);
@ -237,24 +279,24 @@ class ComboMarkdownEditor {
easyMDEOpt.toolbar = this.parseEasyMDEToolbar(EasyMDE, easyMDEOpt.toolbar ?? this.easyMDEToolbarDefault);
this.easyMDE = new EasyMDE(easyMDEOpt);
this.easyMDE.codemirror.on('change', (...args) => {this.options?.onContentChanged?.(this, ...args)});
this.easyMDE.codemirror.on('change', () => triggerEditorContentChanged(this.container));
this.easyMDE.codemirror.setOption('extraKeys', {
'Cmd-Enter': (cm) => handleGlobalEnterQuickSubmit(cm.getTextArea()),
'Ctrl-Enter': (cm) => handleGlobalEnterQuickSubmit(cm.getTextArea()),
Enter: (cm) => {
const tributeContainer = document.querySelector('.tribute-container');
const tributeContainer = document.querySelector<HTMLElement>('.tribute-container');
if (!tributeContainer || tributeContainer.style.display === 'none') {
cm.execCommand('newlineAndIndent');
}
},
Up: (cm) => {
const tributeContainer = document.querySelector('.tribute-container');
const tributeContainer = document.querySelector<HTMLElement>('.tribute-container');
if (!tributeContainer || tributeContainer.style.display === 'none') {
return cm.execCommand('goLineUp');
}
},
Down: (cm) => {
const tributeContainer = document.querySelector('.tribute-container');
const tributeContainer = document.querySelector<HTMLElement>('.tribute-container');
if (!tributeContainer || tributeContainer.style.display === 'none') {
return cm.execCommand('goLineDown');
}
@ -314,13 +356,7 @@ export function getComboMarkdownEditor(el) {
return el?._giteaComboMarkdownEditor;
}
export async function initComboMarkdownEditor(container, options = {}) {
if (container instanceof $) {
if (container.length !== 1) {
throw new Error('initComboMarkdownEditor: container must be a single element');
}
container = container[0];
}
export async function initComboMarkdownEditor(container: HTMLElement, options = {}) {
if (!container) {
throw new Error('initComboMarkdownEditor: container is null');
}

View file

@ -1,5 +1,7 @@
export const EventEditorContentChanged = 'ce-editor-content-changed';
export function triggerEditorContentChanged(target) {
target.dispatchEvent(new CustomEvent('ce-editor-content-changed', {bubbles: true}));
target.dispatchEvent(new CustomEvent(EventEditorContentChanged, {bubbles: true}));
}
function handleIndentSelection(textarea, e) {

View file

@ -7,9 +7,16 @@ import {
DropzoneCustomEventUploadDone,
generateMarkdownLinkForAttachment,
} from '../dropzone.ts';
import type CodeMirror from 'codemirror';
let uploadIdCounter = 0;
export const EventUploadStateChanged = 'ce-upload-state-changed';
export function triggerUploadStateChanged(target) {
target.dispatchEvent(new CustomEvent(EventUploadStateChanged, {bubbles: true}));
}
function uploadFile(dropzoneEl, file) {
return new Promise((resolve) => {
const curUploadId = uploadIdCounter++;
@ -18,7 +25,7 @@ function uploadFile(dropzoneEl, file) {
const onUploadDone = ({file}) => {
if (file._giteaUploadId === curUploadId) {
dropzoneInst.off(DropzoneCustomEventUploadDone, onUploadDone);
resolve();
resolve(file);
}
};
dropzoneInst.on(DropzoneCustomEventUploadDone, onUploadDone);
@ -27,6 +34,8 @@ function uploadFile(dropzoneEl, file) {
}
class TextareaEditor {
editor : HTMLTextAreaElement;
constructor(editor) {
this.editor = editor;
}
@ -61,6 +70,8 @@ class TextareaEditor {
}
class CodeMirrorEditor {
editor: CodeMirror.EditorFromTextArea;
constructor(editor) {
this.editor = editor;
}

View file

@ -1,5 +1,41 @@
import {matchEmoji, matchMention} from '../../utils/match.ts';
import {matchEmoji, matchMention, matchIssue} from '../../utils/match.ts';
import {emojiString} from '../emoji.ts';
import {svg} from '../../svg.ts';
import {parseIssueHref} from '../../utils.ts';
import {createElementFromAttrs, createElementFromHTML} from '../../utils/dom.ts';
import {getIssueColor, getIssueIcon} from '../issue.ts';
import {debounce} from 'perfect-debounce';
const debouncedSuggestIssues = debounce((key: string, text: string) => new Promise<{matched:boolean; fragment?: HTMLElement}>(async (resolve) => {
const {owner, repo, index} = parseIssueHref(window.location.href);
const matches = await matchIssue(owner, repo, index, text);
if (!matches.length) return resolve({matched: false});
const ul = document.createElement('ul');
ul.classList.add('suggestions');
for (const issue of matches) {
const li = createElementFromAttrs('li', {
role: 'option',
'data-value': `${key}${issue.id}`,
class: 'tw-flex tw-gap-2',
});
const icon = svg(getIssueIcon(issue), 16, ['text', getIssueColor(issue)].join(' '));
li.append(createElementFromHTML(icon));
const id = document.createElement('span');
id.textContent = issue.id.toString();
li.append(id);
const nameSpan = document.createElement('span');
nameSpan.textContent = issue.title;
li.append(nameSpan);
ul.append(li);
}
resolve({matched: true, fragment: ul});
}), 100);
export function initTextExpander(expander) {
expander?.addEventListener('text-expander-change', ({detail: {key, provide, text}}) => {
@ -49,12 +85,14 @@ export function initTextExpander(expander) {
}
provide({matched: true, fragment: ul});
} else if (key === '#') {
provide(debouncedSuggestIssues(key, text));
}
});
expander?.addEventListener('text-expander-value', ({detail}) => {
if (detail?.item) {
// add a space after @mentions as it's likely the user wants one
const suffix = detail.key === '@' ? ' ' : '';
// add a space after @mentions and #issue as it's likely the user wants one
const suffix = ['@', '#'].includes(detail.key) ? ' ' : '';
detail.value = `${detail.item.getAttribute('data-value')}${suffix}`;
}
});

View file

@ -128,10 +128,12 @@ export async function initDropzone(dropzoneEl) {
fileUuidDict = {};
for (const attachment of respData) {
const file = {name: attachment.name, uuid: attachment.uuid, size: attachment.size};
const imgSrc = `${attachmentBaseLinkUrl}/${file.uuid}`;
dzInst.emit('addedfile', file);
dzInst.emit('thumbnail', file, imgSrc);
dzInst.emit('complete', file);
if (isImageFile(file.name)) {
const imgSrc = `${attachmentBaseLinkUrl}/${file.uuid}`;
dzInst.emit('thumbnail', file, imgSrc);
}
addCopyLink(file); // it is from server response, so no "type"
fileUuidDict[file.uuid] = {submitted: true};
const input = createElementFromAttrs('input', {name: 'files', type: 'hidden', id: `dropzone-file-${file.uuid}`, value: file.uuid});

View file

@ -0,0 +1,32 @@
import type {Issue} from '../types.ts';
export function getIssueIcon(issue: Issue) {
if (issue.pull_request) {
if (issue.state === 'open') {
if (issue.pull_request.draft === true) {
return 'octicon-git-pull-request-draft'; // WIP PR
}
return 'octicon-git-pull-request'; // Open PR
} else if (issue.pull_request.merged === true) {
return 'octicon-git-merge'; // Merged PR
}
return 'octicon-git-pull-request'; // Closed PR
} else if (issue.state === 'open') {
return 'octicon-issue-opened'; // Open Issue
}
return 'octicon-issue-closed'; // Closed Issue
}
export function getIssueColor(issue: Issue) {
if (issue.pull_request) {
if (issue.pull_request.draft === true) {
return 'grey'; // WIP PR
} else if (issue.pull_request.merged === true) {
return 'purple'; // Merged PR
}
}
if (issue.state === 'open') {
return 'green'; // Open Issue
}
return 'red'; // Closed Issue
}

View file

@ -3,6 +3,8 @@ import {hideElem, queryElems, showElem} from '../utils/dom.ts';
import {POST} from '../modules/fetch.ts';
import {showErrorToast} from '../modules/toast.ts';
import {sleep} from '../utils.ts';
import RepoActivityTopAuthors from '../components/RepoActivityTopAuthors.vue';
import {createApp} from 'vue';
async function onDownloadArchive(e) {
e.preventDefault();
@ -32,6 +34,13 @@ export function initRepoArchiveLinks() {
queryElems('a.archive-link[href]', (el) => el.addEventListener('click', onDownloadArchive));
}
export function initRepoActivityTopAuthorsChart() {
const el = document.querySelector('#repo-activity-top-authors-chart');
if (el) {
createApp(RepoActivityTopAuthors).mount(el);
}
}
export function initRepoCloneLink() {
const $repoCloneSsh = $('#repo-clone-ssh');
const $repoCloneHttps = $('#repo-clone-https');

View file

@ -1,11 +1,12 @@
import $ from 'jquery';
import {handleReply} from './repo-issue.ts';
import {getComboMarkdownEditor, initComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
import {getComboMarkdownEditor, initComboMarkdownEditor, ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
import {POST} from '../modules/fetch.ts';
import {showErrorToast} from '../modules/toast.ts';
import {hideElem, showElem} from '../utils/dom.ts';
import {attachRefIssueContextPopup} from './contextpopup.ts';
import {initCommentContent, initMarkupContent} from '../markup/content.ts';
import {triggerUploadStateChanged} from './comp/EditorUpload.ts';
async function onEditContent(event) {
event.preventDefault();
@ -15,7 +16,7 @@ async function onEditContent(event) {
const renderContent = segment.querySelector('.render-content');
const rawContent = segment.querySelector('.raw-content');
let comboMarkdownEditor;
let comboMarkdownEditor : ComboMarkdownEditor;
const cancelAndReset = (e) => {
e.preventDefault();
@ -79,9 +80,12 @@ async function onEditContent(event) {
comboMarkdownEditor = getComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
if (!comboMarkdownEditor) {
editContentZone.innerHTML = document.querySelector('#issue-comment-editor-template').innerHTML;
const saveButton = editContentZone.querySelector('.ui.primary.button');
comboMarkdownEditor = await initComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
const syncUiState = () => saveButton.disabled = comboMarkdownEditor.isUploading();
comboMarkdownEditor.container.addEventListener(ComboMarkdownEditor.EventUploadStateChanged, syncUiState);
editContentZone.querySelector('.ui.cancel.button').addEventListener('click', cancelAndReset);
editContentZone.querySelector('.ui.primary.button').addEventListener('click', saveAndRefresh);
saveButton.addEventListener('click', saveAndRefresh);
}
// Show write/preview tab and copy raw content as needed
@ -93,6 +97,7 @@ async function onEditContent(event) {
}
comboMarkdownEditor.switchTabToEditor();
comboMarkdownEditor.focus();
triggerUploadStateChanged(comboMarkdownEditor.container);
}
export function initRepoIssueCommentEdit() {

View file

@ -3,7 +3,7 @@ import {htmlEscape} from 'escape-goat';
import {createTippy, showTemporaryTooltip} from '../modules/tippy.ts';
import {hideElem, showElem, toggleElem} from '../utils/dom.ts';
import {setFileFolding} from './file-fold.ts';
import {getComboMarkdownEditor, initComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
import {ComboMarkdownEditor, getComboMarkdownEditor, initComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
import {toAbsoluteUrl} from '../utils.ts';
import {GET, POST} from '../modules/fetch.ts';
import {showErrorToast} from '../modules/toast.ts';
@ -483,9 +483,9 @@ export function initRepoPullRequestReview() {
await handleReply(this);
});
const $reviewBox = $('.review-box-panel');
if ($reviewBox.length === 1) {
const _promise = initComboMarkdownEditor($reviewBox.find('.combo-markdown-editor'));
const elReviewBox = document.querySelector('.review-box-panel');
if (elReviewBox) {
initComboMarkdownEditor(elReviewBox.querySelector('.combo-markdown-editor'));
}
// The following part is only for diff views
@ -548,7 +548,7 @@ export function initRepoPullRequestReview() {
$td.find("input[name='line']").val(idx);
$td.find("input[name='side']").val(side === 'left' ? 'previous' : 'proposed');
$td.find("input[name='path']").val(path);
const editor = await initComboMarkdownEditor($td.find('.combo-markdown-editor'));
const editor = await initComboMarkdownEditor($td[0].querySelector('.combo-markdown-editor'));
editor.focus();
} catch (error) {
console.error(error);
@ -669,20 +669,22 @@ export async function initSingleCommentEditor($commentForm) {
// pages:
// * normal new issue/pr page: no status-button, no comment-button (there is only a normal submit button which can submit empty content)
// * issue/pr view page: with comment form, has status-button and comment-button
const opts = {};
const statusButton = document.querySelector('#status-button');
const commentButton = document.querySelector('#comment-button');
opts.onContentChanged = (editor) => {
const editorText = editor.value().trim();
const editor = await initComboMarkdownEditor($commentForm[0].querySelector('.combo-markdown-editor'));
const statusButton = document.querySelector<HTMLButtonElement>('#status-button');
const commentButton = document.querySelector<HTMLButtonElement>('#comment-button');
const syncUiState = () => {
const editorText = editor.value().trim(), isUploading = editor.isUploading();
if (statusButton) {
statusButton.textContent = statusButton.getAttribute(editorText ? 'data-status-and-comment' : 'data-status');
statusButton.disabled = isUploading;
}
if (commentButton) {
commentButton.disabled = !editorText;
commentButton.disabled = !editorText || isUploading;
}
};
const editor = await initComboMarkdownEditor($commentForm.find('.combo-markdown-editor'), opts);
opts.onContentChanged(editor); // sync state of buttons with the initial content
editor.container.addEventListener(ComboMarkdownEditor.EventUploadStateChanged, syncUiState);
editor.container.addEventListener(ComboMarkdownEditor.EventEditorContentChanged, syncUiState);
syncUiState();
}
export function initIssueTemplateCommentEditors($commentForm) {
@ -690,16 +692,13 @@ export function initIssueTemplateCommentEditors($commentForm) {
// * new issue with issue template
const $comboFields = $commentForm.find('.combo-editor-dropzone');
const initCombo = async ($combo) => {
const $dropzoneContainer = $combo.find('.form-field-dropzone');
const $formField = $combo.find('.form-field-real');
const $markdownEditor = $combo.find('.combo-markdown-editor');
const initCombo = async (elCombo) => {
const $formField = $(elCombo.querySelector('.form-field-real'));
const dropzoneContainer = elCombo.querySelector('.form-field-dropzone');
const markdownEditor = elCombo.querySelector('.combo-markdown-editor');
const editor = await initComboMarkdownEditor($markdownEditor, {
onContentChanged: (editor) => {
$formField.val(editor.value());
},
});
const editor = await initComboMarkdownEditor(markdownEditor);
editor.container.addEventListener(ComboMarkdownEditor.EventEditorContentChanged, () => $formField.val(editor.value()));
$formField.on('focus', async () => {
// deactivate all markdown editors
@ -709,8 +708,8 @@ export function initIssueTemplateCommentEditors($commentForm) {
// activate this markdown editor
hideElem($formField);
showElem($markdownEditor);
showElem($dropzoneContainer);
showElem(markdownEditor);
showElem(dropzoneContainer);
await editor.switchToUserPreference();
editor.focus();
@ -718,7 +717,7 @@ export function initIssueTemplateCommentEditors($commentForm) {
};
for (const el of $comboFields) {
initCombo($(el));
initCombo(el);
}
}

View file

@ -50,7 +50,7 @@ function initTagNameEditor() {
}
function initRepoReleaseEditor() {
const editor = document.querySelector('.repository.new.release .combo-markdown-editor');
const editor = document.querySelector<HTMLElement>('.repository.new.release .combo-markdown-editor');
if (!editor) {
return;
}

View file

@ -4,11 +4,11 @@ import {fomanticMobileScreen} from '../modules/fomantic.ts';
import {POST} from '../modules/fetch.ts';
async function initRepoWikiFormEditor() {
const editArea = document.querySelector('.repository.wiki .combo-markdown-editor textarea');
const editArea = document.querySelector<HTMLTextAreaElement>('.repository.wiki .combo-markdown-editor textarea');
if (!editArea) return;
const form = document.querySelector('.repository.wiki.new .ui.form');
const editorContainer = form.querySelector('.combo-markdown-editor');
const editorContainer = form.querySelector<HTMLElement>('.combo-markdown-editor');
let editor;
let renderRequesting = false;

View file

@ -2,7 +2,6 @@
import './bootstrap.ts';
import './htmx.ts';
import {initRepoActivityTopAuthorsChart} from './components/RepoActivityTopAuthors.vue';
import {initDashboardRepoList} from './components/DashboardRepoList.vue';
import {initGlobalCopyToClipboardListener} from './features/clipboard.ts';
@ -42,7 +41,7 @@ import {initRepoTemplateSearch} from './features/repo-template.ts';
import {initRepoCodeView} from './features/repo-code.ts';
import {initSshKeyFormParser} from './features/sshkey-helper.ts';
import {initUserSettings} from './features/user-settings.ts';
import {initRepoArchiveLinks} from './features/repo-common.ts';
import {initRepoActivityTopAuthorsChart, initRepoArchiveLinks} from './features/repo-common.ts';
import {initRepoMigrationStatusChecker} from './features/repo-migrate.ts';
import {
initRepoSettingGitHook,

View file

@ -36,3 +36,13 @@ export type IssueData = {
type: string,
index: string,
}
export type Issue = {
id: number;
title: string;
state: 'open' | 'closed';
pull_request?: {
draft: boolean;
merged: boolean;
};
};

View file

@ -1,8 +1,10 @@
import emojis from '../../../assets/emoji.json';
import type {Issue} from '../features/issue.ts';
import {GET} from '../modules/fetch.ts';
const maxMatches = 6;
function sortAndReduce(map: Map<string, number>) {
function sortAndReduce<T>(map: Map<T, number>): T[] {
const sortedMap = new Map(Array.from(map.entries()).sort((a, b) => a[1] - b[1]));
return Array.from(sortedMap.keys()).slice(0, maxMatches);
}
@ -27,11 +29,12 @@ export function matchEmoji(queryText: string): string[] {
return sortAndReduce(results);
}
export function matchMention(queryText: string): string[] {
type MentionSuggestion = {value: string; name: string; fullname: string; avatar: string};
export function matchMention(queryText: string): MentionSuggestion[] {
const query = queryText.toLowerCase();
// results is a map of weights, lower is better
const results = new Map();
const results = new Map<MentionSuggestion, number>();
for (const obj of window.config.mentionValues ?? []) {
const index = obj.key.toLowerCase().indexOf(query);
if (index === -1) continue;
@ -41,3 +44,13 @@ export function matchMention(queryText: string): string[] {
return sortAndReduce(results);
}
export async function matchIssue(owner: string, repo: string, issueIndexStr: string, query: string): Promise<Issue[]> {
const res = await GET(`${window.config.appSubUrl}/${owner}/${repo}/issues/suggestions?q=${encodeURIComponent(query)}`);
const issues: Issue[] = await res.json();
const issueIndex = parseInt(issueIndexStr);
// filter out issue with same id
return issues.filter((i) => i.id !== issueIndex);
}

View file

@ -42,14 +42,14 @@ export function firstStartDateAfterDate(inputDate: Date): number {
return resultDate.valueOf();
}
type DayData = {
export type DayData = {
week: number,
additions: number,
deletions: number,
commits: number,
}
export function fillEmptyStartDaysWithZeroes(startDays: number[], data: DayData): DayData[] {
export function fillEmptyStartDaysWithZeroes(startDays: number[], data: DayData[]): DayData[] {
const result = {};
for (const startDay of startDays) {

View file

@ -7,9 +7,15 @@ test('toAbsoluteLocaleDate', () => {
day: 'numeric',
})).toEqual('March 15, 2024');
expect(toAbsoluteLocaleDate('2024-03-15', 'de-DE', {
expect(toAbsoluteLocaleDate('2024-03-15T01:02:03', 'de-DE', {
year: 'numeric',
month: 'long',
day: 'numeric',
})).toEqual('15. März 2024');
expect(toAbsoluteLocaleDate('12345-03-15 01:02:03', '', {
year: 'numeric',
month: 'short',
day: 'numeric',
})).toEqual('Mar 15, 12345');
});

View file

@ -1,30 +1,28 @@
import {Temporal} from 'temporal-polyfill';
export function toAbsoluteLocaleDate(dateStr, lang, opts) {
return Temporal.PlainDate.from(dateStr).toLocaleString(lang ?? [], opts);
export function toAbsoluteLocaleDate(date: string, lang: string, opts: Intl.DateTimeFormatOptions) {
return new Date(date).toLocaleString(lang || [], opts);
}
window.customElements.define('absolute-date', class extends HTMLElement {
static observedAttributes = ['date', 'year', 'month', 'weekday', 'day'];
initialized = false;
update = () => {
const year = this.getAttribute('year') ?? '';
const month = this.getAttribute('month') ?? '';
const weekday = this.getAttribute('weekday') ?? '';
const day = this.getAttribute('day') ?? '';
const opt: Intl.DateTimeFormatOptions = {};
for (const attr of ['year', 'month', 'weekday', 'day']) {
if (this.getAttribute(attr)) opt[attr] = this.getAttribute(attr);
}
const lang = this.closest('[lang]')?.getAttribute('lang') ||
this.ownerDocument.documentElement.getAttribute('lang') || '';
// only use the first 10 characters, e.g. the `yyyy-mm-dd` part
const dateStr = this.getAttribute('date').substring(0, 10);
// only use the date part, it is guaranteed to be in ISO format (YYYY-MM-DDTHH:mm:ss.sssZ)
let date = this.getAttribute('date');
let dateSep = date.indexOf('T');
dateSep = dateSep === -1 ? date.indexOf(' ') : dateSep;
date = dateSep === -1 ? date : date.substring(0, dateSep);
if (!this.shadowRoot) this.attachShadow({mode: 'open'});
this.shadowRoot.textContent = toAbsoluteLocaleDate(dateStr, lang, {
...(year && {year}),
...(month && {month}),
...(weekday && {weekday}),
...(day && {day}),
});
this.shadowRoot.textContent = toAbsoluteLocaleDate(date, lang, opt);
};
attributeChangedCallback(_name, oldValue, newValue) {