Fix bug hidden on CI and make ci failed if tests failure (#29254)

The tests on migration tests failed but CI reports successfully


https://github.com/go-gitea/gitea/actions/runs/7364373807/job/20044685969#step:8:141

This PR will fix the bug on migration v283 and also the CI hidden
behaviour.

The reason is on the Makefile

`GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test
$(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(MIGRATE_TEST_PACKAGES)` will
return the error exit code.

But 

`for pkg in $(shell $(GO) list
code.gitea.io/gitea/models/migrations/...); do \
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test
$(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \
	done`

will not work.

This also fix #29602
This commit is contained in:
Lunny Xiao 2024-03-08 00:43:32 +08:00 committed by GitHub
parent c1331d1f7a
commit 45277486c2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 174 additions and 46 deletions

View File

@ -49,7 +49,10 @@ jobs:
- run: make backend - run: make backend
env: env:
TAGS: bindata TAGS: bindata
- run: make test-pgsql-migration test-pgsql - name: run migration tests
run: make test-pgsql-migration
- name: run tests
run: make test-pgsql
timeout-minutes: 50 timeout-minutes: 50
env: env:
TAGS: bindata gogit TAGS: bindata gogit
@ -72,7 +75,10 @@ jobs:
- run: make backend - run: make backend
env: env:
TAGS: bindata gogit sqlite sqlite_unlock_notify TAGS: bindata gogit sqlite sqlite_unlock_notify
- run: make test-sqlite-migration test-sqlite - name: run migration tests
run: make test-sqlite-migration
- name: run tests
run: make test-sqlite
timeout-minutes: 50 timeout-minutes: 50
env: env:
TAGS: bindata gogit sqlite sqlite_unlock_notify TAGS: bindata gogit sqlite sqlite_unlock_notify
@ -175,8 +181,10 @@ jobs:
- run: make backend - run: make backend
env: env:
TAGS: bindata TAGS: bindata
- name: run migration tests
run: make test-mysql-migration
- name: run tests - name: run tests
run: make test-mysql-migration integration-test-coverage run: make integration-test-coverage
env: env:
TAGS: bindata TAGS: bindata
RACE_ENABLED: true RACE_ENABLED: true
@ -208,7 +216,9 @@ jobs:
- run: make backend - run: make backend
env: env:
TAGS: bindata TAGS: bindata
- run: make test-mssql-migration test-mssql - run: make test-mssql-migration
- name: run tests
run: make test-mssql
timeout-minutes: 50 timeout-minutes: 50
env: env:
TAGS: bindata TAGS: bindata

View File

@ -115,6 +115,7 @@ LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64
GO_PACKAGES ?= $(filter-out code.gitea.io/gitea/tests/integration/migration-test code.gitea.io/gitea/tests code.gitea.io/gitea/tests/integration code.gitea.io/gitea/tests/e2e,$(shell $(GO) list ./... | grep -v /vendor/)) GO_PACKAGES ?= $(filter-out code.gitea.io/gitea/tests/integration/migration-test code.gitea.io/gitea/tests code.gitea.io/gitea/tests/integration code.gitea.io/gitea/tests/e2e,$(shell $(GO) list ./... | grep -v /vendor/))
GO_TEST_PACKAGES ?= $(filter-out $(shell $(GO) list code.gitea.io/gitea/models/migrations/...) code.gitea.io/gitea/tests/integration/migration-test code.gitea.io/gitea/tests code.gitea.io/gitea/tests/integration code.gitea.io/gitea/tests/e2e,$(shell $(GO) list ./... | grep -v /vendor/)) GO_TEST_PACKAGES ?= $(filter-out $(shell $(GO) list code.gitea.io/gitea/models/migrations/...) code.gitea.io/gitea/tests/integration/migration-test code.gitea.io/gitea/tests code.gitea.io/gitea/tests/integration code.gitea.io/gitea/tests/e2e,$(shell $(GO) list ./... | grep -v /vendor/))
MIGRATE_TEST_PACKAGES ?= $(shell $(GO) list code.gitea.io/gitea/models/migrations/...)
FOMANTIC_WORK_DIR := web_src/fomantic FOMANTIC_WORK_DIR := web_src/fomantic
@ -710,9 +711,7 @@ migrations.sqlite.test: $(GO_SOURCES) generate-ini-sqlite
.PHONY: migrations.individual.mysql.test .PHONY: migrations.individual.mysql.test
migrations.individual.mysql.test: $(GO_SOURCES) migrations.individual.mysql.test: $(GO_SOURCES)
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \
done
.PHONY: migrations.individual.sqlite.test\#% .PHONY: migrations.individual.sqlite.test\#%
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite
@ -720,20 +719,15 @@ migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite
.PHONY: migrations.individual.pgsql.test .PHONY: migrations.individual.pgsql.test
migrations.individual.pgsql.test: $(GO_SOURCES) migrations.individual.pgsql.test: $(GO_SOURCES)
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \
done
.PHONY: migrations.individual.pgsql.test\#% .PHONY: migrations.individual.pgsql.test\#%
migrations.individual.pgsql.test\#%: $(GO_SOURCES) generate-ini-pgsql migrations.individual.pgsql.test\#%: $(GO_SOURCES) generate-ini-pgsql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$* GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.mssql.test .PHONY: migrations.individual.mssql.test
migrations.individual.mssql.test: $(GO_SOURCES) generate-ini-mssql migrations.individual.mssql.test: $(GO_SOURCES) generate-ini-mssql
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg -test.failfast; \
done
.PHONY: migrations.individual.mssql.test\#% .PHONY: migrations.individual.mssql.test\#%
migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql
@ -741,9 +735,7 @@ migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql
.PHONY: migrations.individual.sqlite.test .PHONY: migrations.individual.sqlite.test
migrations.individual.sqlite.test: $(GO_SOURCES) generate-ini-sqlite migrations.individual.sqlite.test: $(GO_SOURCES) generate-ini-sqlite
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \
done
.PHONY: migrations.individual.sqlite.test\#% .PHONY: migrations.individual.sqlite.test\#%
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite

View File

@ -36,12 +36,14 @@ func Test_DropTableColumns(t *testing.T) {
"updated_unix", "updated_unix",
} }
for i := range columns {
x.SetMapper(names.GonicMapper{}) x.SetMapper(names.GonicMapper{})
for i := range columns {
if err := x.Sync(new(DropTest)); err != nil { if err := x.Sync(new(DropTest)); err != nil {
t.Errorf("unable to create DropTest table: %v", err) t.Errorf("unable to create DropTest table: %v", err)
return return
} }
sess := x.NewSession() sess := x.NewSession()
if err := sess.Begin(); err != nil { if err := sess.Begin(); err != nil {
sess.Close() sess.Close()
@ -64,7 +66,6 @@ func Test_DropTableColumns(t *testing.T) {
return return
} }
for j := range columns[i+1:] { for j := range columns[i+1:] {
x.SetMapper(names.GonicMapper{})
if err := x.Sync(new(DropTest)); err != nil { if err := x.Sync(new(DropTest)); err != nil {
t.Errorf("unable to create DropTest table: %v", err) t.Errorf("unable to create DropTest table: %v", err)
return return

View File

@ -0,0 +1,4 @@
-
id: 1
repo_id: 1
index: 1

View File

@ -0,0 +1,11 @@
-
id: 1
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11
issue_id: 1
release_id: 0
-
id: 2
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12
issue_id: 0
release_id: 1

View File

@ -0,0 +1,3 @@
-
id: 1
repo_id: 1

View File

@ -0,0 +1,3 @@
-
id: 1
repo_id: 1

View File

@ -0,0 +1,3 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,3 @@
-
id: 1
context_hash: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,5 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d
merge_base: 19fe5caf872476db265596eaac1dc35ad1c6422d
merged_commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,3 @@
-
id: 1
sha1: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,3 @@
-
id: 1
commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,3 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,3 @@
-
id: 1
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d

View File

@ -0,0 +1,4 @@
-
id: 1
description: the badge
image_url: https://gitea.com/myimage.png

View File

@ -15,7 +15,6 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
type Attachment struct { type Attachment struct {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"` UUID string `xorm:"uuid UNIQUE"`
RepoID int64 `xorm:"INDEX"` // this should not be zero
IssueID int64 `xorm:"INDEX"` // maybe zero when creating IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"` UploaderID int64 `xorm:"INDEX DEFAULT 0"`
@ -44,12 +43,21 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
return return
} }
var issueAttachments []*Attachment type NewAttachment struct {
err := x.Where("issue_id > 0").Find(&issueAttachments) ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"`
RepoID int64 `xorm:"INDEX"` // this should not be zero
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"`
}
var issueAttachments []*NewAttachment
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments)
assert.NoError(t, err) assert.NoError(t, err)
for _, attach := range issueAttachments { for _, attach := range issueAttachments {
assert.Greater(t, attach.RepoID, 0) assert.Greater(t, attach.RepoID, int64(0))
assert.Greater(t, attach.IssueID, 0) assert.Greater(t, attach.IssueID, int64(0))
var issue Issue var issue Issue
has, err := x.ID(attach.IssueID).Get(&issue) has, err := x.ID(attach.IssueID).Get(&issue)
assert.NoError(t, err) assert.NoError(t, err)
@ -57,12 +65,12 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
assert.EqualValues(t, attach.RepoID, issue.RepoID) assert.EqualValues(t, attach.RepoID, issue.RepoID)
} }
var releaseAttachments []*Attachment var releaseAttachments []*NewAttachment
err = x.Where("release_id > 0").Find(&releaseAttachments) err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments)
assert.NoError(t, err) assert.NoError(t, err)
for _, attach := range releaseAttachments { for _, attach := range releaseAttachments {
assert.Greater(t, attach.RepoID, 0) assert.Greater(t, attach.RepoID, int64(0))
assert.Greater(t, attach.IssueID, 0) assert.Greater(t, attach.ReleaseID, int64(0))
var release Release var release Release
has, err := x.ID(attach.ReleaseID).Get(&release) has, err := x.ID(attach.ReleaseID).Get(&release)
assert.NoError(t, err) assert.NoError(t, err)

View File

@ -4,7 +4,10 @@
package v1_22 //nolint package v1_22 //nolint
import ( import (
"fmt"
"xorm.io/xorm" "xorm.io/xorm"
"xorm.io/xorm/schemas"
) )
func AddCombinedIndexToIssueUser(x *xorm.Engine) error { func AddCombinedIndexToIssueUser(x *xorm.Engine) error {
@ -20,9 +23,19 @@ func AddCombinedIndexToIssueUser(x *xorm.Engine) error {
return err return err
} }
for _, issueUser := range duplicatedIssueUsers { for _, issueUser := range duplicatedIssueUsers {
if _, err := x.Exec("delete from issue_user where id in (SELECT id FROM issue_user WHERE issue_id = ? and uid = ? limit ?)", issueUser.IssueID, issueUser.UID, issueUser.Cnt-1); err != nil { if x.Dialect().URI().DBType == schemas.MSSQL {
if _, err := x.Exec(fmt.Sprintf("delete from issue_user where id in (SELECT top %d id FROM issue_user WHERE issue_id = ? and uid = ?)", issueUser.Cnt-1), issueUser.IssueID, issueUser.UID); err != nil {
return err return err
} }
} else {
var ids []int64
if err := x.SQL("SELECT id FROM issue_user WHERE issue_id = ? and uid = ? limit ?", issueUser.IssueID, issueUser.UID, issueUser.Cnt-1).Find(&ids); err != nil {
return err
}
if _, err := x.Table("issue_user").In("id", ids).Delete(); err != nil {
return err
}
}
} }
type IssueUser struct { type IssueUser struct {

View File

@ -36,9 +36,9 @@ func expandHashReferencesToSha256(x *xorm.Engine) error {
if setting.Database.Type.IsMSSQL() { if setting.Database.Type.IsMSSQL() {
// drop indexes that need to be re-created afterwards // drop indexes that need to be re-created afterwards
droppedIndexes := []string{ droppedIndexes := []string{
"DROP INDEX commit_status.IDX_commit_status_context_hash", "DROP INDEX IF EXISTS [IDX_commit_status_context_hash] ON [commit_status]",
"DROP INDEX review_state.UQE_review_state_pull_commit_user", "DROP INDEX IF EXISTS [UQE_review_state_pull_commit_user] ON [review_state]",
"DROP INDEX repo_archiver.UQE_repo_archiver_s", "DROP INDEX IF EXISTS [UQE_repo_archiver_s] ON [repo_archiver]",
} }
for _, s := range droppedIndexes { for _, s := range droppedIndexes {
_, err := db.Exec(s) _, err := db.Exec(s)
@ -53,7 +53,7 @@ func expandHashReferencesToSha256(x *xorm.Engine) error {
if setting.Database.Type.IsMySQL() { if setting.Database.Type.IsMySQL() {
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1])) _, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1]))
} else if setting.Database.Type.IsMSSQL() { } else if setting.Database.Type.IsMSSQL() {
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` VARCHAR(64)", alts[0], alts[1])) _, err = db.Exec(fmt.Sprintf("ALTER TABLE [%s] ALTER COLUMN [%s] VARCHAR(64)", alts[0], alts[1]))
} else { } else {
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1])) _, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1]))
} }

View File

@ -17,14 +17,72 @@ func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
} }
type CommitStatus struct {
ID int64
ContextHash string
}
type RepoArchiver struct {
ID int64
RepoID int64
Type int
CommitID string
}
type ReviewState struct {
ID int64
CommitSHA string
UserID int64
PullID int64
}
type Comment struct {
ID int64
CommitSHA string
}
type PullRequest struct {
ID int64
CommitSHA string
MergeBase string
MergedCommitID string
}
type Release struct {
ID int64
Sha1 string
}
type RepoIndexerStatus struct {
ID int64
CommitSHA string
}
type Review struct {
ID int64
CommitID string
}
// Prepare and load the testing database // Prepare and load the testing database
return base.PrepareTestEnv(t, 0, new(Repository)) return base.PrepareTestEnv(t, 0,
new(Repository),
new(CommitStatus),
new(RepoArchiver),
new(ReviewState),
new(Review),
new(Comment),
new(PullRequest),
new(Release),
new(RepoIndexerStatus),
)
} }
func Test_RepositoryFormat(t *testing.T) { func Test_RepositoryFormat(t *testing.T) {
x, deferable := PrepareOldRepository(t) x, deferable := PrepareOldRepository(t)
defer deferable() defer deferable()
assert.NoError(t, AdjustDBForSha256(x))
type Repository struct { type Repository struct {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
ObjectFormatName string `xorg:"not null default('sha1')"` ObjectFormatName string `xorg:"not null default('sha1')"`
@ -37,12 +95,10 @@ func Test_RepositoryFormat(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
assert.EqualValues(t, 4, count) assert.EqualValues(t, 4, count)
assert.NoError(t, AdjustDBForSha256(x))
repo.ID = 20
repo.ObjectFormatName = "sha256" repo.ObjectFormatName = "sha256"
_, err = x.Insert(repo) _, err = x.Insert(repo)
assert.NoError(t, err) assert.NoError(t, err)
id := repo.ID
count, err = x.Count(new(Repository)) count, err = x.Count(new(Repository))
assert.NoError(t, err) assert.NoError(t, err)
@ -55,7 +111,7 @@ func Test_RepositoryFormat(t *testing.T) {
assert.EqualValues(t, "sha1", repo.ObjectFormatName) assert.EqualValues(t, "sha1", repo.ObjectFormatName)
repo = new(Repository) repo = new(Repository)
ok, err = x.ID(20).Get(repo) ok, err = x.ID(id).Get(repo)
assert.NoError(t, err) assert.NoError(t, err)
assert.EqualValues(t, true, ok) assert.EqualValues(t, true, ok)
assert.EqualValues(t, "sha256", repo.ObjectFormatName) assert.EqualValues(t, "sha256", repo.ObjectFormatName)

View File

@ -20,20 +20,20 @@ func Test_UpdateBadgeColName(t *testing.T) {
} }
// Prepare and load the testing database // Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(BadgeUnique), new(Badge)) x, deferable := base.PrepareTestEnv(t, 0, new(Badge))
defer deferable() defer deferable()
if x == nil || t.Failed() { if x == nil || t.Failed() {
return return
} }
oldBadges := []Badge{ oldBadges := []*Badge{
{ID: 1, Description: "Test Badge 1", ImageURL: "https://example.com/badge1.png"}, {Description: "Test Badge 1", ImageURL: "https://example.com/badge1.png"},
{ID: 2, Description: "Test Badge 2", ImageURL: "https://example.com/badge2.png"}, {Description: "Test Badge 2", ImageURL: "https://example.com/badge2.png"},
{ID: 3, Description: "Test Badge 3", ImageURL: "https://example.com/badge3.png"}, {Description: "Test Badge 3", ImageURL: "https://example.com/badge3.png"},
} }
for _, badge := range oldBadges { for _, badge := range oldBadges {
_, err := x.Insert(&badge) _, err := x.Insert(badge)
assert.NoError(t, err) assert.NoError(t, err)
} }
@ -48,7 +48,7 @@ func Test_UpdateBadgeColName(t *testing.T) {
} }
for i, e := range oldBadges { for i, e := range oldBadges {
got := got[i] got := got[i+1] // 1 is in the badge.yml
assert.Equal(t, e.ID, got.ID) assert.Equal(t, e.ID, got.ID)
assert.Equal(t, fmt.Sprintf("%d", e.ID), got.Slug) assert.Equal(t, fmt.Sprintf("%d", e.ID), got.Slug)
} }