mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-12-26 21:14:07 +01:00
Fix bug hidden on CI and make ci failed if tests failure (#29254)
The tests on migration tests failed but CI reports successfully https://github.com/go-gitea/gitea/actions/runs/7364373807/job/20044685969#step:8:141 This PR will fix the bug on migration v283 and also the CI hidden behaviour. The reason is on the Makefile `GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(MIGRATE_TEST_PACKAGES)` will return the error exit code. But `for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \ done` will not work. This also fix #29602 (cherry picked from commit 45277486c2c6213b7766b1da708a991cdb1f3565) Conflicts: .github/workflows/pull-db-tests.yml Makefile models/migrations/v1_22/v283.go models/migrations/v1_22/v286_test.go models/migrations/v1_22/v287_test.go already in Forgejo for the Makefile & CI logic but Gitea changes otherwise rule
This commit is contained in:
parent
82b7de1360
commit
97c8dbf332
17 changed files with 148 additions and 47 deletions
|
@ -36,12 +36,14 @@ func Test_DropTableColumns(t *testing.T) {
|
||||||
"updated_unix",
|
"updated_unix",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
x.SetMapper(names.GonicMapper{})
|
||||||
|
|
||||||
for i := range columns {
|
for i := range columns {
|
||||||
x.SetMapper(names.GonicMapper{})
|
|
||||||
if err := x.Sync(new(DropTest)); err != nil {
|
if err := x.Sync(new(DropTest)); err != nil {
|
||||||
t.Errorf("unable to create DropTest table: %v", err)
|
t.Errorf("unable to create DropTest table: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
sess := x.NewSession()
|
sess := x.NewSession()
|
||||||
if err := sess.Begin(); err != nil {
|
if err := sess.Begin(); err != nil {
|
||||||
sess.Close()
|
sess.Close()
|
||||||
|
@ -64,7 +66,6 @@ func Test_DropTableColumns(t *testing.T) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for j := range columns[i+1:] {
|
for j := range columns[i+1:] {
|
||||||
x.SetMapper(names.GonicMapper{})
|
|
||||||
if err := x.Sync(new(DropTest)); err != nil {
|
if err := x.Sync(new(DropTest)); err != nil {
|
||||||
t.Errorf("unable to create DropTest table: %v", err)
|
t.Errorf("unable to create DropTest table: %v", err)
|
||||||
return
|
return
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
repo_id: 1
|
||||||
|
index: 1
|
|
@ -0,0 +1,11 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11
|
||||||
|
issue_id: 1
|
||||||
|
release_id: 0
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12
|
||||||
|
issue_id: 0
|
||||||
|
release_id: 1
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
repo_id: 1
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
repo_id: 1
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
context_hash: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,5 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
||||||
|
merge_base: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
||||||
|
merged_commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
sha1: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,3 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d
|
|
@ -0,0 +1,4 @@
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
description: the badge
|
||||||
|
image_url: https://gitea.com/myimage.png
|
|
@ -15,7 +15,6 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
|
||||||
type Attachment struct {
|
type Attachment struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64 `xorm:"pk autoincr"`
|
||||||
UUID string `xorm:"uuid UNIQUE"`
|
UUID string `xorm:"uuid UNIQUE"`
|
||||||
RepoID int64 `xorm:"INDEX"` // this should not be zero
|
|
||||||
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
|
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
|
||||||
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
|
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
|
||||||
UploaderID int64 `xorm:"INDEX DEFAULT 0"`
|
UploaderID int64 `xorm:"INDEX DEFAULT 0"`
|
||||||
|
@ -44,12 +43,21 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var issueAttachments []*Attachment
|
type NewAttachment struct {
|
||||||
err := x.Where("issue_id > 0").Find(&issueAttachments)
|
ID int64 `xorm:"pk autoincr"`
|
||||||
|
UUID string `xorm:"uuid UNIQUE"`
|
||||||
|
RepoID int64 `xorm:"INDEX"` // this should not be zero
|
||||||
|
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
|
||||||
|
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
|
||||||
|
UploaderID int64 `xorm:"INDEX DEFAULT 0"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var issueAttachments []*NewAttachment
|
||||||
|
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
for _, attach := range issueAttachments {
|
for _, attach := range issueAttachments {
|
||||||
assert.Greater(t, attach.RepoID, 0)
|
assert.Greater(t, attach.RepoID, int64(0))
|
||||||
assert.Greater(t, attach.IssueID, 0)
|
assert.Greater(t, attach.IssueID, int64(0))
|
||||||
var issue Issue
|
var issue Issue
|
||||||
has, err := x.ID(attach.IssueID).Get(&issue)
|
has, err := x.ID(attach.IssueID).Get(&issue)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -57,12 +65,12 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
|
||||||
assert.EqualValues(t, attach.RepoID, issue.RepoID)
|
assert.EqualValues(t, attach.RepoID, issue.RepoID)
|
||||||
}
|
}
|
||||||
|
|
||||||
var releaseAttachments []*Attachment
|
var releaseAttachments []*NewAttachment
|
||||||
err = x.Where("release_id > 0").Find(&releaseAttachments)
|
err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
for _, attach := range releaseAttachments {
|
for _, attach := range releaseAttachments {
|
||||||
assert.Greater(t, attach.RepoID, 0)
|
assert.Greater(t, attach.RepoID, int64(0))
|
||||||
assert.Greater(t, attach.IssueID, 0)
|
assert.Greater(t, attach.ReleaseID, int64(0))
|
||||||
var release Release
|
var release Release
|
||||||
has, err := x.ID(attach.ReleaseID).Get(&release)
|
has, err := x.ID(attach.ReleaseID).Get(&release)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
|
@ -4,10 +4,40 @@
|
||||||
package v1_22 //nolint
|
package v1_22 //nolint
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
|
"xorm.io/xorm/schemas"
|
||||||
)
|
)
|
||||||
|
|
||||||
func AddCombinedIndexToIssueUser(x *xorm.Engine) error {
|
func AddCombinedIndexToIssueUser(x *xorm.Engine) error {
|
||||||
|
type OldIssueUser struct {
|
||||||
|
IssueID int64
|
||||||
|
UID int64
|
||||||
|
Cnt int64
|
||||||
|
}
|
||||||
|
|
||||||
|
var duplicatedIssueUsers []OldIssueUser
|
||||||
|
if err := x.SQL("select * from (select issue_id, uid, count(1) as cnt from issue_user group by issue_id, uid) a where a.cnt > 1").
|
||||||
|
Find(&duplicatedIssueUsers); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, issueUser := range duplicatedIssueUsers {
|
||||||
|
if x.Dialect().URI().DBType == schemas.MSSQL {
|
||||||
|
if _, err := x.Exec(fmt.Sprintf("delete from issue_user where id in (SELECT top %d id FROM issue_user WHERE issue_id = ? and uid = ?)", issueUser.Cnt-1), issueUser.IssueID, issueUser.UID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var ids []int64
|
||||||
|
if err := x.SQL("SELECT id FROM issue_user WHERE issue_id = ? and uid = ? limit ?", issueUser.IssueID, issueUser.UID, issueUser.Cnt-1).Find(&ids); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := x.Table("issue_user").In("id", ids).Delete(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type IssueUser struct {
|
type IssueUser struct {
|
||||||
UID int64 `xorm:"INDEX unique(uid_to_issue)"` // User ID.
|
UID int64 `xorm:"INDEX unique(uid_to_issue)"` // User ID.
|
||||||
IssueID int64 `xorm:"INDEX unique(uid_to_issue)"`
|
IssueID int64 `xorm:"INDEX unique(uid_to_issue)"`
|
||||||
|
|
|
@ -36,9 +36,9 @@ func expandHashReferencesToSha256(x *xorm.Engine) error {
|
||||||
if setting.Database.Type.IsMSSQL() {
|
if setting.Database.Type.IsMSSQL() {
|
||||||
// drop indexes that need to be re-created afterwards
|
// drop indexes that need to be re-created afterwards
|
||||||
droppedIndexes := []string{
|
droppedIndexes := []string{
|
||||||
"DROP INDEX commit_status.IDX_commit_status_context_hash",
|
"DROP INDEX IF EXISTS [IDX_commit_status_context_hash] ON [commit_status]",
|
||||||
"DROP INDEX review_state.UQE_review_state_pull_commit_user",
|
"DROP INDEX IF EXISTS [UQE_review_state_pull_commit_user] ON [review_state]",
|
||||||
"DROP INDEX repo_archiver.UQE_repo_archiver_s",
|
"DROP INDEX IF EXISTS [UQE_repo_archiver_s] ON [repo_archiver]",
|
||||||
}
|
}
|
||||||
for _, s := range droppedIndexes {
|
for _, s := range droppedIndexes {
|
||||||
_, err := db.Exec(s)
|
_, err := db.Exec(s)
|
||||||
|
@ -53,7 +53,7 @@ func expandHashReferencesToSha256(x *xorm.Engine) error {
|
||||||
if setting.Database.Type.IsMySQL() {
|
if setting.Database.Type.IsMySQL() {
|
||||||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1]))
|
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1]))
|
||||||
} else if setting.Database.Type.IsMSSQL() {
|
} else if setting.Database.Type.IsMSSQL() {
|
||||||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` VARCHAR(64)", alts[0], alts[1]))
|
_, err = db.Exec(fmt.Sprintf("ALTER TABLE [%s] ALTER COLUMN [%s] VARCHAR(64)", alts[0], alts[1]))
|
||||||
} else {
|
} else {
|
||||||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1]))
|
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1]))
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,59 +14,75 @@ import (
|
||||||
|
|
||||||
func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) {
|
func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) {
|
||||||
type Repository struct { // old struct
|
type Repository struct { // old struct
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64 `xorm:"pk autoincr"`
|
||||||
ObjectFormatName string `xorm:"VARCHAR(6) NOT NULL DEFAULT 'sha1'"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type CommitStatus struct { // old struct
|
type CommitStatus struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
ContextHash string `xorm:"char(40)"`
|
ContextHash string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Comment struct { // old struct
|
type RepoArchiver struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
CommitSHA string `xorm:"VARCHAR(40)"`
|
RepoID int64
|
||||||
|
Type int
|
||||||
|
CommitID string
|
||||||
}
|
}
|
||||||
|
|
||||||
type PullRequest struct { // old struct
|
type ReviewState struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
MergeBase string `xorm:"VARCHAR(40)"`
|
CommitSHA string
|
||||||
MergedCommitID string `xorm:"VARCHAR(40)"`
|
UserID int64
|
||||||
|
PullID int64
|
||||||
}
|
}
|
||||||
|
|
||||||
type Review struct { // old struct
|
type Comment struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
CommitID string `xorm:"VARCHAR(40)"`
|
CommitSHA string
|
||||||
}
|
}
|
||||||
|
|
||||||
type ReviewState struct { // old struct
|
type PullRequest struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
CommitSHA string `xorm:"VARCHAR(40)"`
|
CommitSHA string
|
||||||
|
MergeBase string
|
||||||
|
MergedCommitID string
|
||||||
}
|
}
|
||||||
|
|
||||||
type RepoArchiver struct { // old struct
|
type Release struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
CommitID string `xorm:"VARCHAR(40)"`
|
Sha1 string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Release struct { // old struct
|
type RepoIndexerStatus struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
Sha1 string `xorm:"VARCHAR(40)"`
|
CommitSHA string
|
||||||
}
|
}
|
||||||
|
|
||||||
type RepoIndexerStatus struct { // old struct
|
type Review struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64
|
||||||
CommitSha string `xorm:"VARCHAR(40)"`
|
CommitID string
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare and load the testing database
|
// Prepare and load the testing database
|
||||||
return base.PrepareTestEnv(t, 0, new(Repository), new(CommitStatus), new(Comment), new(PullRequest), new(Review), new(ReviewState), new(RepoArchiver), new(Release), new(RepoIndexerStatus))
|
return base.PrepareTestEnv(t, 0,
|
||||||
|
new(Repository),
|
||||||
|
new(CommitStatus),
|
||||||
|
new(RepoArchiver),
|
||||||
|
new(ReviewState),
|
||||||
|
new(Review),
|
||||||
|
new(Comment),
|
||||||
|
new(PullRequest),
|
||||||
|
new(Release),
|
||||||
|
new(RepoIndexerStatus),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_RepositoryFormat(t *testing.T) {
|
func Test_RepositoryFormat(t *testing.T) {
|
||||||
x, deferable := PrepareOldRepository(t)
|
x, deferable := PrepareOldRepository(t)
|
||||||
defer deferable()
|
defer deferable()
|
||||||
|
|
||||||
|
assert.NoError(t, AdjustDBForSha256(x))
|
||||||
|
|
||||||
type Repository struct {
|
type Repository struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64 `xorm:"pk autoincr"`
|
||||||
ObjectFormatName string `xorg:"not null default('sha1')"`
|
ObjectFormatName string `xorg:"not null default('sha1')"`
|
||||||
|
@ -79,12 +95,10 @@ func Test_RepositoryFormat(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, 4, count)
|
assert.EqualValues(t, 4, count)
|
||||||
|
|
||||||
assert.NoError(t, AdjustDBForSha256(x))
|
|
||||||
|
|
||||||
repo.ID = 20
|
|
||||||
repo.ObjectFormatName = "sha256"
|
repo.ObjectFormatName = "sha256"
|
||||||
_, err = x.Insert(repo)
|
_, err = x.Insert(repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
id := repo.ID
|
||||||
|
|
||||||
count, err = x.Count(new(Repository))
|
count, err = x.Count(new(Repository))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -97,7 +111,7 @@ func Test_RepositoryFormat(t *testing.T) {
|
||||||
assert.EqualValues(t, "sha1", repo.ObjectFormatName)
|
assert.EqualValues(t, "sha1", repo.ObjectFormatName)
|
||||||
|
|
||||||
repo = new(Repository)
|
repo = new(Repository)
|
||||||
ok, err = x.ID(20).Get(repo)
|
ok, err = x.ID(id).Get(repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, true, ok)
|
assert.EqualValues(t, true, ok)
|
||||||
assert.EqualValues(t, "sha256", repo.ObjectFormatName)
|
assert.EqualValues(t, "sha256", repo.ObjectFormatName)
|
||||||
|
|
Loading…
Reference in a new issue