mirror of
https://github.com/go-gitea/gitea
synced 2024-11-21 18:13:06 +01:00
Merge branch 'main' into lunny/add_line_through_deleted_branch
This commit is contained in:
commit
0de6b114d8
59 changed files with 660 additions and 577 deletions
10
.github/workflows/pull-db-tests.yml
vendored
10
.github/workflows/pull-db-tests.yml
vendored
|
@ -154,12 +154,15 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
mysql:
|
mysql:
|
||||||
image: mysql:8.0
|
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||||
|
image: bitnami/mysql:8.0
|
||||||
env:
|
env:
|
||||||
MYSQL_ALLOW_EMPTY_PASSWORD: true
|
ALLOW_EMPTY_PASSWORD: true
|
||||||
MYSQL_DATABASE: testgitea
|
MYSQL_DATABASE: testgitea
|
||||||
ports:
|
ports:
|
||||||
- "3306:3306"
|
- "3306:3306"
|
||||||
|
options: >-
|
||||||
|
--mount type=tmpfs,destination=/bitnami/mysql/data
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: elasticsearch:7.5.0
|
image: elasticsearch:7.5.0
|
||||||
env:
|
env:
|
||||||
|
@ -188,7 +191,8 @@ jobs:
|
||||||
- name: run migration tests
|
- name: run migration tests
|
||||||
run: make test-mysql-migration
|
run: make test-mysql-migration
|
||||||
- name: run tests
|
- name: run tests
|
||||||
run: make integration-test-coverage
|
# run: make integration-test-coverage (at the moment, no coverage is really handled)
|
||||||
|
run: make test-mysql
|
||||||
env:
|
env:
|
||||||
TAGS: bindata
|
TAGS: bindata
|
||||||
RACE_ENABLED: true
|
RACE_ENABLED: true
|
||||||
|
|
|
@ -1912,7 +1912,7 @@ LEVEL = Info
|
||||||
;ENABLED = true
|
;ENABLED = true
|
||||||
;;
|
;;
|
||||||
;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
|
;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
|
||||||
;ALLOWED_TYPES = .csv,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip
|
;ALLOWED_TYPES = .avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip
|
||||||
;;
|
;;
|
||||||
;; Max size of each file. Defaults to 2048MB
|
;; Max size of each file. Defaults to 2048MB
|
||||||
;MAX_SIZE = 2048
|
;MAX_SIZE = 2048
|
||||||
|
|
|
@ -15,7 +15,6 @@ import (
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/testlogger"
|
"code.gitea.io/gitea/modules/testlogger"
|
||||||
|
|
||||||
|
@ -91,12 +90,11 @@ func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, fu
|
||||||
}
|
}
|
||||||
|
|
||||||
func MainTest(m *testing.M) {
|
func MainTest(m *testing.M) {
|
||||||
log.RegisterEventWriter("test", testlogger.NewTestLoggerWriter)
|
testlogger.Init()
|
||||||
|
|
||||||
giteaRoot := base.SetupGiteaRoot()
|
giteaRoot := base.SetupGiteaRoot()
|
||||||
if giteaRoot == "" {
|
if giteaRoot == "" {
|
||||||
fmt.Println("Environment variable $GITEA_ROOT not set")
|
testlogger.Fatalf("Environment variable $GITEA_ROOT not set\n")
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
giteaBinary := "gitea"
|
giteaBinary := "gitea"
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
|
@ -104,8 +102,7 @@ func MainTest(m *testing.M) {
|
||||||
}
|
}
|
||||||
setting.AppPath = filepath.Join(giteaRoot, giteaBinary)
|
setting.AppPath = filepath.Join(giteaRoot, giteaBinary)
|
||||||
if _, err := os.Stat(setting.AppPath); err != nil {
|
if _, err := os.Stat(setting.AppPath); err != nil {
|
||||||
fmt.Printf("Could not find gitea binary at %s\n", setting.AppPath)
|
testlogger.Fatalf("Could not find gitea binary at %s\n", setting.AppPath)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
giteaConf := os.Getenv("GITEA_CONF")
|
giteaConf := os.Getenv("GITEA_CONF")
|
||||||
|
@ -122,8 +119,7 @@ func MainTest(m *testing.M) {
|
||||||
|
|
||||||
tmpDataPath, err := os.MkdirTemp("", "data")
|
tmpDataPath, err := os.MkdirTemp("", "data")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("Unable to create temporary data path %v\n", err)
|
testlogger.Fatalf("Unable to create temporary data path %v\n", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom")
|
setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom")
|
||||||
|
@ -131,8 +127,7 @@ func MainTest(m *testing.M) {
|
||||||
|
|
||||||
unittest.InitSettings()
|
unittest.InitSettings()
|
||||||
if err = git.InitFull(context.Background()); err != nil {
|
if err = git.InitFull(context.Background()); err != nil {
|
||||||
fmt.Printf("Unable to InitFull: %v\n", err)
|
testlogger.Fatalf("Unable to InitFull: %v\n", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
setting.LoadDBSetting()
|
setting.LoadDBSetting()
|
||||||
setting.InitLoggersForTest()
|
setting.InitLoggersForTest()
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
"maps"
|
||||||
"net"
|
"net"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -165,8 +166,8 @@ type Repository struct {
|
||||||
|
|
||||||
Status RepositoryStatus `xorm:"NOT NULL DEFAULT 0"`
|
Status RepositoryStatus `xorm:"NOT NULL DEFAULT 0"`
|
||||||
|
|
||||||
RenderingMetas map[string]string `xorm:"-"`
|
commonRenderingMetas map[string]string `xorm:"-"`
|
||||||
DocumentRenderingMetas map[string]string `xorm:"-"`
|
|
||||||
Units []*RepoUnit `xorm:"-"`
|
Units []*RepoUnit `xorm:"-"`
|
||||||
PrimaryLanguage *LanguageStat `xorm:"-"`
|
PrimaryLanguage *LanguageStat `xorm:"-"`
|
||||||
|
|
||||||
|
@ -473,9 +474,8 @@ func (repo *Repository) MustOwner(ctx context.Context) *user_model.User {
|
||||||
return repo.Owner
|
return repo.Owner
|
||||||
}
|
}
|
||||||
|
|
||||||
// ComposeMetas composes a map of metas for properly rendering issue links and external issue trackers.
|
func (repo *Repository) composeCommonMetas(ctx context.Context) map[string]string {
|
||||||
func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
|
if len(repo.commonRenderingMetas) == 0 {
|
||||||
if len(repo.RenderingMetas) == 0 {
|
|
||||||
metas := map[string]string{
|
metas := map[string]string{
|
||||||
"user": repo.OwnerName,
|
"user": repo.OwnerName,
|
||||||
"repo": repo.Name,
|
"repo": repo.Name,
|
||||||
|
@ -508,21 +508,34 @@ func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
|
||||||
metas["org"] = strings.ToLower(repo.OwnerName)
|
metas["org"] = strings.ToLower(repo.OwnerName)
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.RenderingMetas = metas
|
repo.commonRenderingMetas = metas
|
||||||
}
|
}
|
||||||
return repo.RenderingMetas
|
return repo.commonRenderingMetas
|
||||||
}
|
}
|
||||||
|
|
||||||
// ComposeDocumentMetas composes a map of metas for properly rendering documents
|
// ComposeMetas composes a map of metas for properly rendering comments or comment-like contents (commit message)
|
||||||
|
func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
|
||||||
|
metas := maps.Clone(repo.composeCommonMetas(ctx))
|
||||||
|
metas["markdownLineBreakStyle"] = "comment"
|
||||||
|
metas["markupAllowShortIssuePattern"] = "true"
|
||||||
|
return metas
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComposeWikiMetas composes a map of metas for properly rendering wikis
|
||||||
|
func (repo *Repository) ComposeWikiMetas(ctx context.Context) map[string]string {
|
||||||
|
// does wiki need the "teams" and "org" from common metas?
|
||||||
|
metas := maps.Clone(repo.composeCommonMetas(ctx))
|
||||||
|
metas["markdownLineBreakStyle"] = "document"
|
||||||
|
metas["markupAllowShortIssuePattern"] = "true"
|
||||||
|
return metas
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComposeDocumentMetas composes a map of metas for properly rendering documents (repo files)
|
||||||
func (repo *Repository) ComposeDocumentMetas(ctx context.Context) map[string]string {
|
func (repo *Repository) ComposeDocumentMetas(ctx context.Context) map[string]string {
|
||||||
if len(repo.DocumentRenderingMetas) == 0 {
|
// does document(file) need the "teams" and "org" from common metas?
|
||||||
metas := map[string]string{}
|
metas := maps.Clone(repo.composeCommonMetas(ctx))
|
||||||
for k, v := range repo.ComposeMetas(ctx) {
|
metas["markdownLineBreakStyle"] = "document"
|
||||||
metas[k] = v
|
return metas
|
||||||
}
|
|
||||||
repo.DocumentRenderingMetas = metas
|
|
||||||
}
|
|
||||||
return repo.DocumentRenderingMetas
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetBaseRepo populates repo.BaseRepo for a fork repository and
|
// GetBaseRepo populates repo.BaseRepo for a fork repository and
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
// Copyright 2017 The Gitea Authors. All rights reserved.
|
// Copyright 2017 The Gitea Authors. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
package repo_test
|
package repo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
|
||||||
"code.gitea.io/gitea/models/unit"
|
"code.gitea.io/gitea/models/unit"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
@ -20,18 +19,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
countRepospts = repo_model.CountRepositoryOptions{OwnerID: 10}
|
countRepospts = CountRepositoryOptions{OwnerID: 10}
|
||||||
countReposptsPublic = repo_model.CountRepositoryOptions{OwnerID: 10, Private: optional.Some(false)}
|
countReposptsPublic = CountRepositoryOptions{OwnerID: 10, Private: optional.Some(false)}
|
||||||
countReposptsPrivate = repo_model.CountRepositoryOptions{OwnerID: 10, Private: optional.Some(true)}
|
countReposptsPrivate = CountRepositoryOptions{OwnerID: 10, Private: optional.Some(true)}
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGetRepositoryCount(t *testing.T) {
|
func TestGetRepositoryCount(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
ctx := db.DefaultContext
|
ctx := db.DefaultContext
|
||||||
count, err1 := repo_model.CountRepositories(ctx, countRepospts)
|
count, err1 := CountRepositories(ctx, countRepospts)
|
||||||
privateCount, err2 := repo_model.CountRepositories(ctx, countReposptsPrivate)
|
privateCount, err2 := CountRepositories(ctx, countReposptsPrivate)
|
||||||
publicCount, err3 := repo_model.CountRepositories(ctx, countReposptsPublic)
|
publicCount, err3 := CountRepositories(ctx, countReposptsPublic)
|
||||||
assert.NoError(t, err1)
|
assert.NoError(t, err1)
|
||||||
assert.NoError(t, err2)
|
assert.NoError(t, err2)
|
||||||
assert.NoError(t, err3)
|
assert.NoError(t, err3)
|
||||||
|
@ -42,7 +41,7 @@ func TestGetRepositoryCount(t *testing.T) {
|
||||||
func TestGetPublicRepositoryCount(t *testing.T) {
|
func TestGetPublicRepositoryCount(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
count, err := repo_model.CountRepositories(db.DefaultContext, countReposptsPublic)
|
count, err := CountRepositories(db.DefaultContext, countReposptsPublic)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(1), count)
|
assert.Equal(t, int64(1), count)
|
||||||
}
|
}
|
||||||
|
@ -50,14 +49,14 @@ func TestGetPublicRepositoryCount(t *testing.T) {
|
||||||
func TestGetPrivateRepositoryCount(t *testing.T) {
|
func TestGetPrivateRepositoryCount(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
count, err := repo_model.CountRepositories(db.DefaultContext, countReposptsPrivate)
|
count, err := CountRepositories(db.DefaultContext, countReposptsPrivate)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(2), count)
|
assert.Equal(t, int64(2), count)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRepoAPIURL(t *testing.T) {
|
func TestRepoAPIURL(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
|
repo := unittest.AssertExistsAndLoadBean(t, &Repository{ID: 10})
|
||||||
|
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user12/repo10", repo.APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user12/repo10", repo.APIURL())
|
||||||
}
|
}
|
||||||
|
@ -65,22 +64,22 @@ func TestRepoAPIURL(t *testing.T) {
|
||||||
func TestWatchRepo(t *testing.T) {
|
func TestWatchRepo(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
|
repo := unittest.AssertExistsAndLoadBean(t, &Repository{ID: 3})
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
|
|
||||||
assert.NoError(t, repo_model.WatchRepo(db.DefaultContext, user, repo, true))
|
assert.NoError(t, WatchRepo(db.DefaultContext, user, repo, true))
|
||||||
unittest.AssertExistsAndLoadBean(t, &repo_model.Watch{RepoID: repo.ID, UserID: user.ID})
|
unittest.AssertExistsAndLoadBean(t, &Watch{RepoID: repo.ID, UserID: user.ID})
|
||||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
unittest.CheckConsistencyFor(t, &Repository{ID: repo.ID})
|
||||||
|
|
||||||
assert.NoError(t, repo_model.WatchRepo(db.DefaultContext, user, repo, false))
|
assert.NoError(t, WatchRepo(db.DefaultContext, user, repo, false))
|
||||||
unittest.AssertNotExistsBean(t, &repo_model.Watch{RepoID: repo.ID, UserID: user.ID})
|
unittest.AssertNotExistsBean(t, &Watch{RepoID: repo.ID, UserID: user.ID})
|
||||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
unittest.CheckConsistencyFor(t, &Repository{ID: repo.ID})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMetas(t *testing.T) {
|
func TestMetas(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
repo := &repo_model.Repository{Name: "testRepo"}
|
repo := &Repository{Name: "testRepo"}
|
||||||
repo.Owner = &user_model.User{Name: "testOwner"}
|
repo.Owner = &user_model.User{Name: "testOwner"}
|
||||||
repo.OwnerName = repo.Owner.Name
|
repo.OwnerName = repo.Owner.Name
|
||||||
|
|
||||||
|
@ -90,16 +89,16 @@ func TestMetas(t *testing.T) {
|
||||||
assert.Equal(t, "testRepo", metas["repo"])
|
assert.Equal(t, "testRepo", metas["repo"])
|
||||||
assert.Equal(t, "testOwner", metas["user"])
|
assert.Equal(t, "testOwner", metas["user"])
|
||||||
|
|
||||||
externalTracker := repo_model.RepoUnit{
|
externalTracker := RepoUnit{
|
||||||
Type: unit.TypeExternalTracker,
|
Type: unit.TypeExternalTracker,
|
||||||
Config: &repo_model.ExternalTrackerConfig{
|
Config: &ExternalTrackerConfig{
|
||||||
ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
|
ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
testSuccess := func(expectedStyle string) {
|
testSuccess := func(expectedStyle string) {
|
||||||
repo.Units = []*repo_model.RepoUnit{&externalTracker}
|
repo.Units = []*RepoUnit{&externalTracker}
|
||||||
repo.RenderingMetas = nil
|
repo.commonRenderingMetas = nil
|
||||||
metas := repo.ComposeMetas(db.DefaultContext)
|
metas := repo.ComposeMetas(db.DefaultContext)
|
||||||
assert.Equal(t, expectedStyle, metas["style"])
|
assert.Equal(t, expectedStyle, metas["style"])
|
||||||
assert.Equal(t, "testRepo", metas["repo"])
|
assert.Equal(t, "testRepo", metas["repo"])
|
||||||
|
@ -118,7 +117,7 @@ func TestMetas(t *testing.T) {
|
||||||
externalTracker.ExternalTrackerConfig().ExternalTrackerStyle = markup.IssueNameStyleRegexp
|
externalTracker.ExternalTrackerConfig().ExternalTrackerStyle = markup.IssueNameStyleRegexp
|
||||||
testSuccess(markup.IssueNameStyleRegexp)
|
testSuccess(markup.IssueNameStyleRegexp)
|
||||||
|
|
||||||
repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 3)
|
repo, err := GetRepositoryByID(db.DefaultContext, 3)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
metas = repo.ComposeMetas(db.DefaultContext)
|
metas = repo.ComposeMetas(db.DefaultContext)
|
||||||
|
@ -132,7 +131,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
t.Run("InvalidPath", func(t *testing.T) {
|
t.Run("InvalidPath", func(t *testing.T) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, "something")
|
repo, err := GetRepositoryByURL(db.DefaultContext, "something")
|
||||||
|
|
||||||
assert.Nil(t, repo)
|
assert.Nil(t, repo)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
|
@ -140,7 +139,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
||||||
|
|
||||||
t.Run("ValidHttpURL", func(t *testing.T) {
|
t.Run("ValidHttpURL", func(t *testing.T) {
|
||||||
test := func(t *testing.T, url string) {
|
test := func(t *testing.T, url string) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, url)
|
repo, err := GetRepositoryByURL(db.DefaultContext, url)
|
||||||
|
|
||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -155,7 +154,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
||||||
|
|
||||||
t.Run("ValidGitSshURL", func(t *testing.T) {
|
t.Run("ValidGitSshURL", func(t *testing.T) {
|
||||||
test := func(t *testing.T, url string) {
|
test := func(t *testing.T, url string) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, url)
|
repo, err := GetRepositoryByURL(db.DefaultContext, url)
|
||||||
|
|
||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -173,7 +172,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
||||||
|
|
||||||
t.Run("ValidImplicitSshURL", func(t *testing.T) {
|
t.Run("ValidImplicitSshURL", func(t *testing.T) {
|
||||||
test := func(t *testing.T, url string) {
|
test := func(t *testing.T, url string) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, url)
|
repo, err := GetRepositoryByURL(db.DefaultContext, url)
|
||||||
|
|
||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -200,21 +199,21 @@ func TestComposeSSHCloneURL(t *testing.T) {
|
||||||
setting.SSH.Domain = "domain"
|
setting.SSH.Domain = "domain"
|
||||||
setting.SSH.Port = 22
|
setting.SSH.Port = 22
|
||||||
setting.Repository.UseCompatSSHURI = false
|
setting.Repository.UseCompatSSHURI = false
|
||||||
assert.Equal(t, "git@domain:user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "git@domain:user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
setting.Repository.UseCompatSSHURI = true
|
setting.Repository.UseCompatSSHURI = true
|
||||||
assert.Equal(t, "ssh://git@domain/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@domain/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
// test SSH_DOMAIN while use non-standard SSH port
|
// test SSH_DOMAIN while use non-standard SSH port
|
||||||
setting.SSH.Port = 123
|
setting.SSH.Port = 123
|
||||||
setting.Repository.UseCompatSSHURI = false
|
setting.Repository.UseCompatSSHURI = false
|
||||||
assert.Equal(t, "ssh://git@domain:123/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
setting.Repository.UseCompatSSHURI = true
|
setting.Repository.UseCompatSSHURI = true
|
||||||
assert.Equal(t, "ssh://git@domain:123/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
|
|
||||||
// test IPv6 SSH_DOMAIN
|
// test IPv6 SSH_DOMAIN
|
||||||
setting.Repository.UseCompatSSHURI = false
|
setting.Repository.UseCompatSSHURI = false
|
||||||
setting.SSH.Domain = "::1"
|
setting.SSH.Domain = "::1"
|
||||||
setting.SSH.Port = 22
|
setting.SSH.Port = 22
|
||||||
assert.Equal(t, "git@[::1]:user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "git@[::1]:user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
setting.SSH.Port = 123
|
setting.SSH.Port = 123
|
||||||
assert.Equal(t, "ssh://git@[::1]:123/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@[::1]:123/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ var OrderByMap = map[string]map[string]db.SearchOrderBy{
|
||||||
var OrderByFlatMap = map[string]db.SearchOrderBy{
|
var OrderByFlatMap = map[string]db.SearchOrderBy{
|
||||||
"newest": OrderByMap["desc"]["created"],
|
"newest": OrderByMap["desc"]["created"],
|
||||||
"oldest": OrderByMap["asc"]["created"],
|
"oldest": OrderByMap["asc"]["created"],
|
||||||
|
"recentupdate": OrderByMap["desc"]["updated"],
|
||||||
"leastupdate": OrderByMap["asc"]["updated"],
|
"leastupdate": OrderByMap["asc"]["updated"],
|
||||||
"reversealphabetically": OrderByMap["desc"]["alpha"],
|
"reversealphabetically": OrderByMap["desc"]["alpha"],
|
||||||
"alphabetically": OrderByMap["asc"]["alpha"],
|
"alphabetically": OrderByMap["asc"]["alpha"],
|
||||||
|
|
|
@ -46,7 +46,7 @@ func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
|
||||||
w.Header().Add(gzhttp.HeaderNoCompression, "1")
|
w.Header().Add(gzhttp.HeaderNoCompression, "1")
|
||||||
}
|
}
|
||||||
|
|
||||||
contentType := typesniffer.ApplicationOctetStream
|
contentType := typesniffer.MimeTypeApplicationOctetStream
|
||||||
if opts.ContentType != "" {
|
if opts.ContentType != "" {
|
||||||
if opts.ContentTypeCharset != "" {
|
if opts.ContentTypeCharset != "" {
|
||||||
contentType = opts.ContentType + "; charset=" + strings.ToLower(opts.ContentTypeCharset)
|
contentType = opts.ContentType + "; charset=" + strings.ToLower(opts.ContentTypeCharset)
|
||||||
|
@ -107,7 +107,7 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, filePath stri
|
||||||
} else if isPlain {
|
} else if isPlain {
|
||||||
opts.ContentType = "text/plain"
|
opts.ContentType = "text/plain"
|
||||||
} else {
|
} else {
|
||||||
opts.ContentType = typesniffer.ApplicationOctetStream
|
opts.ContentType = typesniffer.MimeTypeApplicationOctetStream
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -86,6 +86,8 @@ type ColoredValue struct {
|
||||||
colors []ColorAttribute
|
colors []ColorAttribute
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var _ fmt.Formatter = (*ColoredValue)(nil)
|
||||||
|
|
||||||
func (c *ColoredValue) Format(f fmt.State, verb rune) {
|
func (c *ColoredValue) Format(f fmt.State, verb rune) {
|
||||||
_, _ = f.Write(ColorBytes(c.colors...))
|
_, _ = f.Write(ColorBytes(c.colors...))
|
||||||
s := fmt.Sprintf(fmt.FormatString(f, verb), c.v)
|
s := fmt.Sprintf(fmt.FormatString(f, verb), c.v)
|
||||||
|
@ -93,6 +95,10 @@ func (c *ColoredValue) Format(f fmt.State, verb rune) {
|
||||||
_, _ = f.Write(resetBytes)
|
_, _ = f.Write(resetBytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *ColoredValue) Value() any {
|
||||||
|
return c.v
|
||||||
|
}
|
||||||
|
|
||||||
func NewColoredValue(v any, color ...ColorAttribute) *ColoredValue {
|
func NewColoredValue(v any, color ...ColorAttribute) *ColoredValue {
|
||||||
return &ColoredValue{v: v, colors: color}
|
return &ColoredValue{v: v, colors: color}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,11 +7,11 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/markup/common"
|
"code.gitea.io/gitea/modules/markup/common"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
"golang.org/x/net/html"
|
||||||
"golang.org/x/net/html/atom"
|
"golang.org/x/net/html/atom"
|
||||||
|
@ -25,7 +25,27 @@ const (
|
||||||
IssueNameStyleRegexp = "regexp"
|
IssueNameStyleRegexp = "regexp"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
// CSS class for action keywords (e.g. "closes: #1")
|
||||||
|
const keywordClass = "issue-keyword"
|
||||||
|
|
||||||
|
type globalVarsType struct {
|
||||||
|
hashCurrentPattern *regexp.Regexp
|
||||||
|
shortLinkPattern *regexp.Regexp
|
||||||
|
anyHashPattern *regexp.Regexp
|
||||||
|
comparePattern *regexp.Regexp
|
||||||
|
fullURLPattern *regexp.Regexp
|
||||||
|
emailRegex *regexp.Regexp
|
||||||
|
blackfridayExtRegex *regexp.Regexp
|
||||||
|
emojiShortCodeRegex *regexp.Regexp
|
||||||
|
issueFullPattern *regexp.Regexp
|
||||||
|
filesChangedFullPattern *regexp.Regexp
|
||||||
|
|
||||||
|
tagCleaner *regexp.Regexp
|
||||||
|
nulCleaner *strings.Replacer
|
||||||
|
}
|
||||||
|
|
||||||
|
var globalVars = sync.OnceValue[*globalVarsType](func() *globalVarsType {
|
||||||
|
v := &globalVarsType{}
|
||||||
// NOTE: All below regex matching do not perform any extra validation.
|
// NOTE: All below regex matching do not perform any extra validation.
|
||||||
// Thus a link is produced even if the linked entity does not exist.
|
// Thus a link is produced even if the linked entity does not exist.
|
||||||
// While fast, this is also incorrect and lead to false positives.
|
// While fast, this is also incorrect and lead to false positives.
|
||||||
|
@ -36,79 +56,56 @@ var (
|
||||||
// hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
// hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
||||||
// Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length
|
// Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length
|
||||||
// so that abbreviated hash links can be used as well. This matches git and GitHub usability.
|
// so that abbreviated hash links can be used as well. This matches git and GitHub usability.
|
||||||
hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,:](\s|$))`)
|
v.hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,:](\s|$))`)
|
||||||
|
|
||||||
// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
||||||
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
|
v.shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
|
||||||
|
|
||||||
// anyHashPattern splits url containing SHA into parts
|
// anyHashPattern splits url containing SHA into parts
|
||||||
anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`)
|
v.anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`)
|
||||||
|
|
||||||
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
||||||
comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`)
|
v.comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`)
|
||||||
|
|
||||||
// fullURLPattern matches full URL like "mailto:...", "https://..." and "ssh+git://..."
|
// fullURLPattern matches full URL like "mailto:...", "https://..." and "ssh+git://..."
|
||||||
fullURLPattern = regexp.MustCompile(`^[a-z][-+\w]+:`)
|
v.fullURLPattern = regexp.MustCompile(`^[a-z][-+\w]+:`)
|
||||||
|
|
||||||
// emailRegex is definitely not perfect with edge cases,
|
// emailRegex is definitely not perfect with edge cases,
|
||||||
// it is still accepted by the CommonMark specification, as well as the HTML5 spec:
|
// it is still accepted by the CommonMark specification, as well as the HTML5 spec:
|
||||||
// http://spec.commonmark.org/0.28/#email-address
|
// http://spec.commonmark.org/0.28/#email-address
|
||||||
// https://html.spec.whatwg.org/multipage/input.html#e-mail-state-(type%3Demail)
|
// https://html.spec.whatwg.org/multipage/input.html#e-mail-state-(type%3Demail)
|
||||||
emailRegex = regexp.MustCompile("(?:\\s|^|\\(|\\[)([a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9]{2,}(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+)(?:\\s|$|\\)|\\]|;|,|\\?|!|\\.(\\s|$))")
|
v.emailRegex = regexp.MustCompile("(?:\\s|^|\\(|\\[)([a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9]{2,}(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+)(?:\\s|$|\\)|\\]|;|,|\\?|!|\\.(\\s|$))")
|
||||||
|
|
||||||
// blackfridayExtRegex is for blackfriday extensions create IDs like fn:user-content-footnote
|
// blackfridayExtRegex is for blackfriday extensions create IDs like fn:user-content-footnote
|
||||||
blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`)
|
v.blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`)
|
||||||
|
|
||||||
// emojiShortCodeRegex find emoji by alias like :smile:
|
// emojiShortCodeRegex find emoji by alias like :smile:
|
||||||
emojiShortCodeRegex = regexp.MustCompile(`:[-+\w]+:`)
|
v.emojiShortCodeRegex = regexp.MustCompile(`:[-+\w]+:`)
|
||||||
)
|
|
||||||
|
|
||||||
// CSS class for action keywords (e.g. "closes: #1")
|
// example: https://domain/org/repo/pulls/27#hash
|
||||||
const keywordClass = "issue-keyword"
|
v.issueFullPattern = regexp.MustCompile(`https?://(?:\S+/)[\w_.-]+/[\w_.-]+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#](\S+)?)?\b`)
|
||||||
|
|
||||||
|
// example: https://domain/org/repo/pulls/27/files#hash
|
||||||
|
v.filesChangedFullPattern = regexp.MustCompile(`https?://(?:\S+/)[\w_.-]+/[\w_.-]+/pulls/((?:\w{1,10}-)?[1-9][0-9]*)/files([\?|#](\S+)?)?\b`)
|
||||||
|
|
||||||
|
v.tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
||||||
|
v.nulCleaner = strings.NewReplacer("\000", "")
|
||||||
|
return v
|
||||||
|
})
|
||||||
|
|
||||||
// IsFullURLBytes reports whether link fits valid format.
|
// IsFullURLBytes reports whether link fits valid format.
|
||||||
func IsFullURLBytes(link []byte) bool {
|
func IsFullURLBytes(link []byte) bool {
|
||||||
return fullURLPattern.Match(link)
|
return globalVars().fullURLPattern.Match(link)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsFullURLString(link string) bool {
|
func IsFullURLString(link string) bool {
|
||||||
return fullURLPattern.MatchString(link)
|
return globalVars().fullURLPattern.MatchString(link)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsNonEmptyRelativePath(link string) bool {
|
func IsNonEmptyRelativePath(link string) bool {
|
||||||
return link != "" && !IsFullURLString(link) && link[0] != '/' && link[0] != '?' && link[0] != '#'
|
return link != "" && !IsFullURLString(link) && link[0] != '/' && link[0] != '?' && link[0] != '#'
|
||||||
}
|
}
|
||||||
|
|
||||||
// regexp for full links to issues/pulls
|
|
||||||
var issueFullPattern *regexp.Regexp
|
|
||||||
|
|
||||||
// Once for to prevent races
|
|
||||||
var issueFullPatternOnce sync.Once
|
|
||||||
|
|
||||||
// regexp for full links to hash comment in pull request files changed tab
|
|
||||||
var filesChangedFullPattern *regexp.Regexp
|
|
||||||
|
|
||||||
// Once for to prevent races
|
|
||||||
var filesChangedFullPatternOnce sync.Once
|
|
||||||
|
|
||||||
func getIssueFullPattern() *regexp.Regexp {
|
|
||||||
issueFullPatternOnce.Do(func() {
|
|
||||||
// example: https://domain/org/repo/pulls/27#hash
|
|
||||||
issueFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) +
|
|
||||||
`[\w_.-]+/[\w_.-]+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#](\S+)?)?\b`)
|
|
||||||
})
|
|
||||||
return issueFullPattern
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFilesChangedFullPattern() *regexp.Regexp {
|
|
||||||
filesChangedFullPatternOnce.Do(func() {
|
|
||||||
// example: https://domain/org/repo/pulls/27/files#hash
|
|
||||||
filesChangedFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) +
|
|
||||||
`[\w_.-]+/[\w_.-]+/pulls/((?:\w{1,10}-)?[1-9][0-9]*)/files([\?|#](\S+)?)?\b`)
|
|
||||||
})
|
|
||||||
return filesChangedFullPattern
|
|
||||||
}
|
|
||||||
|
|
||||||
// CustomLinkURLSchemes allows for additional schemes to be detected when parsing links within text
|
// CustomLinkURLSchemes allows for additional schemes to be detected when parsing links within text
|
||||||
func CustomLinkURLSchemes(schemes []string) {
|
func CustomLinkURLSchemes(schemes []string) {
|
||||||
schemes = append(schemes, "http", "https")
|
schemes = append(schemes, "http", "https")
|
||||||
|
@ -197,13 +194,6 @@ func RenderCommitMessage(
|
||||||
content string,
|
content string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
procs := commitMessageProcessors
|
procs := commitMessageProcessors
|
||||||
if ctx.DefaultLink != "" {
|
|
||||||
// we don't have to fear data races, because being
|
|
||||||
// commitMessageProcessors of fixed len and cap, every time we append
|
|
||||||
// something to it the slice is realloc+copied, so append always
|
|
||||||
// generates the slice ex-novo.
|
|
||||||
procs = append(procs, genDefaultLinkProcessor(ctx.DefaultLink))
|
|
||||||
}
|
|
||||||
return renderProcessString(ctx, procs, content)
|
return renderProcessString(ctx, procs, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,16 +221,17 @@ var emojiProcessors = []processor{
|
||||||
// which changes every text node into a link to the passed default link.
|
// which changes every text node into a link to the passed default link.
|
||||||
func RenderCommitMessageSubject(
|
func RenderCommitMessageSubject(
|
||||||
ctx *RenderContext,
|
ctx *RenderContext,
|
||||||
content string,
|
defaultLink, content string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
procs := commitMessageSubjectProcessors
|
procs := slices.Clone(commitMessageSubjectProcessors)
|
||||||
if ctx.DefaultLink != "" {
|
procs = append(procs, func(ctx *RenderContext, node *html.Node) {
|
||||||
// we don't have to fear data races, because being
|
ch := &html.Node{Parent: node, Type: html.TextNode, Data: node.Data}
|
||||||
// commitMessageSubjectProcessors of fixed len and cap, every time we
|
node.Type = html.ElementNode
|
||||||
// append something to it the slice is realloc+copied, so append always
|
node.Data = "a"
|
||||||
// generates the slice ex-novo.
|
node.DataAtom = atom.A
|
||||||
procs = append(procs, genDefaultLinkProcessor(ctx.DefaultLink))
|
node.Attr = []html.Attribute{{Key: "href", Val: defaultLink}, {Key: "class", Val: "muted"}}
|
||||||
}
|
node.FirstChild, node.LastChild = ch, ch
|
||||||
|
})
|
||||||
return renderProcessString(ctx, procs, content)
|
return renderProcessString(ctx, procs, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,10 +240,8 @@ func RenderIssueTitle(
|
||||||
ctx *RenderContext,
|
ctx *RenderContext,
|
||||||
title string,
|
title string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
|
// do not render other issue/commit links in an issue's title - which in most cases is already a link.
|
||||||
return renderProcessString(ctx, []processor{
|
return renderProcessString(ctx, []processor{
|
||||||
issueIndexPatternProcessor,
|
|
||||||
commitCrossReferencePatternProcessor,
|
|
||||||
hashCurrentPatternProcessor,
|
|
||||||
emojiShortCodeProcessor,
|
emojiShortCodeProcessor,
|
||||||
emojiProcessor,
|
emojiProcessor,
|
||||||
}, title)
|
}, title)
|
||||||
|
@ -288,11 +277,6 @@ func RenderEmoji(
|
||||||
return renderProcessString(ctx, emojiProcessors, content)
|
return renderProcessString(ctx, emojiProcessors, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
|
||||||
nulCleaner = strings.NewReplacer("\000", "")
|
|
||||||
)
|
|
||||||
|
|
||||||
func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error {
|
func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error {
|
||||||
defer ctx.Cancel()
|
defer ctx.Cancel()
|
||||||
// FIXME: don't read all content to memory
|
// FIXME: don't read all content to memory
|
||||||
|
@ -306,7 +290,7 @@ func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output
|
||||||
// prepend "<html><body>"
|
// prepend "<html><body>"
|
||||||
strings.NewReader("<html><body>"),
|
strings.NewReader("<html><body>"),
|
||||||
// Strip out nuls - they're always invalid
|
// Strip out nuls - they're always invalid
|
||||||
bytes.NewReader(tagCleaner.ReplaceAll([]byte(nulCleaner.Replace(string(rawHTML))), []byte("<$1"))),
|
bytes.NewReader(globalVars().tagCleaner.ReplaceAll([]byte(globalVars().nulCleaner.Replace(string(rawHTML))), []byte("<$1"))),
|
||||||
// close the tags
|
// close the tags
|
||||||
strings.NewReader("</body></html>"),
|
strings.NewReader("</body></html>"),
|
||||||
))
|
))
|
||||||
|
@ -353,7 +337,7 @@ func visitNode(ctx *RenderContext, procs []processor, node *html.Node) *html.Nod
|
||||||
// Add user-content- to IDs and "#" links if they don't already have them
|
// Add user-content- to IDs and "#" links if they don't already have them
|
||||||
for idx, attr := range node.Attr {
|
for idx, attr := range node.Attr {
|
||||||
val := strings.TrimPrefix(attr.Val, "#")
|
val := strings.TrimPrefix(attr.Val, "#")
|
||||||
notHasPrefix := !(strings.HasPrefix(val, "user-content-") || blackfridayExtRegex.MatchString(val))
|
notHasPrefix := !(strings.HasPrefix(val, "user-content-") || globalVars().blackfridayExtRegex.MatchString(val))
|
||||||
|
|
||||||
if attr.Key == "id" && notHasPrefix {
|
if attr.Key == "id" && notHasPrefix {
|
||||||
node.Attr[idx].Val = "user-content-" + attr.Val
|
node.Attr[idx].Val = "user-content-" + attr.Val
|
||||||
|
|
|
@ -54,7 +54,7 @@ func createCodeLink(href, content, class string) *html.Node {
|
||||||
}
|
}
|
||||||
|
|
||||||
func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) {
|
func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) {
|
||||||
m := anyHashPattern.FindStringSubmatchIndex(s)
|
m := globalVars().anyHashPattern.FindStringSubmatchIndex(s)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return ret, false
|
return ret, false
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ func comparePatternProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
node = node.NextSibling
|
node = node.NextSibling
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
m := comparePattern.FindStringSubmatchIndex(node.Data)
|
m := globalVars().comparePattern.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil || slices.Contains(m[:8], -1) { // ensure that every group (m[0]...m[7]) has a match
|
if m == nil || slices.Contains(m[:8], -1) { // ensure that every group (m[0]...m[7]) has a match
|
||||||
node = node.NextSibling
|
node = node.NextSibling
|
||||||
continue
|
continue
|
||||||
|
@ -173,7 +173,7 @@ func hashCurrentPatternProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
ctx.ShaExistCache = make(map[string]bool)
|
ctx.ShaExistCache = make(map[string]bool)
|
||||||
}
|
}
|
||||||
for node != nil && node != next && start < len(node.Data) {
|
for node != nil && node != next && start < len(node.Data) {
|
||||||
m := hashCurrentPattern.FindStringSubmatchIndex(node.Data[start:])
|
m := globalVars().hashCurrentPattern.FindStringSubmatchIndex(node.Data[start:])
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ import "golang.org/x/net/html"
|
||||||
func emailAddressProcessor(ctx *RenderContext, node *html.Node) {
|
func emailAddressProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next {
|
for node != nil && node != next {
|
||||||
m := emailRegex.FindStringSubmatchIndex(node.Data)
|
m := globalVars().emailRegex.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,7 +62,7 @@ func emojiShortCodeProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
start := 0
|
start := 0
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next && start < len(node.Data) {
|
for node != nil && node != next && start < len(node.Data) {
|
||||||
m := emojiShortCodeRegex.FindStringSubmatchIndex(node.Data[start:])
|
m := globalVars().emojiShortCodeRegex.FindStringSubmatchIndex(node.Data[start:])
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,6 +44,7 @@ var numericMetas = map[string]string{
|
||||||
"user": "someUser",
|
"user": "someUser",
|
||||||
"repo": "someRepo",
|
"repo": "someRepo",
|
||||||
"style": IssueNameStyleNumeric,
|
"style": IssueNameStyleNumeric,
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
var alphanumericMetas = map[string]string{
|
var alphanumericMetas = map[string]string{
|
||||||
|
@ -51,6 +52,7 @@ var alphanumericMetas = map[string]string{
|
||||||
"user": "someUser",
|
"user": "someUser",
|
||||||
"repo": "someRepo",
|
"repo": "someRepo",
|
||||||
"style": IssueNameStyleAlphanumeric,
|
"style": IssueNameStyleAlphanumeric,
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
var regexpMetas = map[string]string{
|
var regexpMetas = map[string]string{
|
||||||
|
@ -64,6 +66,13 @@ var regexpMetas = map[string]string{
|
||||||
var localMetas = map[string]string{
|
var localMetas = map[string]string{
|
||||||
"user": "test-owner",
|
"user": "test-owner",
|
||||||
"repo": "test-repo",
|
"repo": "test-repo",
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
var localWikiMetas = map[string]string{
|
||||||
|
"user": "test-owner",
|
||||||
|
"repo": "test-repo",
|
||||||
|
"markupContentMode": "wiki",
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_IssueIndexPattern(t *testing.T) {
|
func TestRender_IssueIndexPattern(t *testing.T) {
|
||||||
|
@ -126,7 +135,6 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
|
||||||
testRenderIssueIndexPattern(t, s, expectedNil, &RenderContext{
|
testRenderIssueIndexPattern(t, s, expectedNil, &RenderContext{
|
||||||
Ctx: git.DefaultContext,
|
Ctx: git.DefaultContext,
|
||||||
Metas: localMetas,
|
Metas: localMetas,
|
||||||
ContentMode: RenderContentAsComment,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
class := "ref-issue"
|
class := "ref-issue"
|
||||||
|
@ -141,7 +149,6 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
|
||||||
testRenderIssueIndexPattern(t, s, expectedNum, &RenderContext{
|
testRenderIssueIndexPattern(t, s, expectedNum, &RenderContext{
|
||||||
Ctx: git.DefaultContext,
|
Ctx: git.DefaultContext,
|
||||||
Metas: numericMetas,
|
Metas: numericMetas,
|
||||||
ContentMode: RenderContentAsComment,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,7 +269,7 @@ func TestRender_IssueIndexPattern5(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_IssueIndexPattern_Document(t *testing.T) {
|
func TestRender_IssueIndexPattern_NoShortPattern(t *testing.T) {
|
||||||
setting.AppURL = TestAppURL
|
setting.AppURL = TestAppURL
|
||||||
metas := map[string]string{
|
metas := map[string]string{
|
||||||
"format": "https://someurl.com/{user}/{repo}/{index}",
|
"format": "https://someurl.com/{user}/{repo}/{index}",
|
||||||
|
@ -285,6 +292,22 @@ func TestRender_IssueIndexPattern_Document(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRender_RenderIssueTitle(t *testing.T) {
|
||||||
|
setting.AppURL = TestAppURL
|
||||||
|
metas := map[string]string{
|
||||||
|
"format": "https://someurl.com/{user}/{repo}/{index}",
|
||||||
|
"user": "someUser",
|
||||||
|
"repo": "someRepo",
|
||||||
|
"style": IssueNameStyleNumeric,
|
||||||
|
}
|
||||||
|
actual, err := RenderIssueTitle(&RenderContext{
|
||||||
|
Ctx: git.DefaultContext,
|
||||||
|
Metas: metas,
|
||||||
|
}, "#1")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "#1", actual)
|
||||||
|
}
|
||||||
|
|
||||||
func testRenderIssueIndexPattern(t *testing.T, input, expected string, ctx *RenderContext) {
|
func testRenderIssueIndexPattern(t *testing.T, input, expected string, ctx *RenderContext) {
|
||||||
ctx.Links.AbsolutePrefix = true
|
ctx.Links.AbsolutePrefix = true
|
||||||
if ctx.Links.Base == "" {
|
if ctx.Links.Base == "" {
|
||||||
|
@ -318,8 +341,7 @@ func TestRender_AutoLink(t *testing.T) {
|
||||||
Links: Links{
|
Links: Links{
|
||||||
Base: TestRepoURL,
|
Base: TestRepoURL,
|
||||||
},
|
},
|
||||||
Metas: localMetas,
|
Metas: localWikiMetas,
|
||||||
ContentMode: RenderContentAsWiki,
|
|
||||||
}, strings.NewReader(input), &buffer)
|
}, strings.NewReader(input), &buffer)
|
||||||
assert.Equal(t, err, nil)
|
assert.Equal(t, err, nil)
|
||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
||||||
|
@ -391,10 +413,10 @@ func TestRegExp_sha1CurrentPattern(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, testCase := range trueTestCases {
|
for _, testCase := range trueTestCases {
|
||||||
assert.True(t, hashCurrentPattern.MatchString(testCase))
|
assert.True(t, globalVars().hashCurrentPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
for _, testCase := range falseTestCases {
|
for _, testCase := range falseTestCases {
|
||||||
assert.False(t, hashCurrentPattern.MatchString(testCase))
|
assert.False(t, globalVars().hashCurrentPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -474,9 +496,9 @@ func TestRegExp_shortLinkPattern(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, testCase := range trueTestCases {
|
for _, testCase := range trueTestCases {
|
||||||
assert.True(t, shortLinkPattern.MatchString(testCase))
|
assert.True(t, globalVars().shortLinkPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
for _, testCase := range falseTestCases {
|
for _, testCase := range falseTestCases {
|
||||||
assert.False(t, shortLinkPattern.MatchString(testCase))
|
assert.False(t, globalVars().shortLinkPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
|
"code.gitea.io/gitea/modules/httplib"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/references"
|
"code.gitea.io/gitea/modules/references"
|
||||||
"code.gitea.io/gitea/modules/regexplru"
|
"code.gitea.io/gitea/modules/regexplru"
|
||||||
|
@ -23,18 +24,21 @@ func fullIssuePatternProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
}
|
}
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next {
|
for node != nil && node != next {
|
||||||
m := getIssueFullPattern().FindStringSubmatchIndex(node.Data)
|
m := globalVars().issueFullPattern.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
mDiffView := getFilesChangedFullPattern().FindStringSubmatchIndex(node.Data)
|
mDiffView := globalVars().filesChangedFullPattern.FindStringSubmatchIndex(node.Data)
|
||||||
// leave it as it is if the link is from "Files Changed" tab in PR Diff View https://domain/org/repo/pulls/27/files
|
// leave it as it is if the link is from "Files Changed" tab in PR Diff View https://domain/org/repo/pulls/27/files
|
||||||
if mDiffView != nil {
|
if mDiffView != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
link := node.Data[m[0]:m[1]]
|
link := node.Data[m[0]:m[1]]
|
||||||
|
if !httplib.IsCurrentGiteaSiteURL(ctx.Ctx, link) {
|
||||||
|
return
|
||||||
|
}
|
||||||
text := "#" + node.Data[m[2]:m[3]]
|
text := "#" + node.Data[m[2]:m[3]]
|
||||||
// if m[4] and m[5] is not -1, then link is to a comment
|
// if m[4] and m[5] is not -1, then link is to a comment
|
||||||
// indicate that in the text by appending (comment)
|
// indicate that in the text by appending (comment)
|
||||||
|
@ -67,8 +71,10 @@ func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// crossLinkOnly if not comment and not wiki
|
// crossLinkOnly: do not parse "#123", only parse "owner/repo#123"
|
||||||
crossLinkOnly := ctx.ContentMode != RenderContentAsTitle && ctx.ContentMode != RenderContentAsComment && ctx.ContentMode != RenderContentAsWiki
|
// if there is no repo in the context, then the "#123" format can't be parsed
|
||||||
|
// old logic: crossLinkOnly := ctx.Metas["mode"] == "document" && !ctx.IsWiki
|
||||||
|
crossLinkOnly := ctx.Metas["markupAllowShortIssuePattern"] != "true"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
found bool
|
found bool
|
||||||
|
|
|
@ -20,9 +20,9 @@ func ResolveLink(ctx *RenderContext, link, userContentAnchorPrefix string) (resu
|
||||||
isAnchorFragment := link != "" && link[0] == '#'
|
isAnchorFragment := link != "" && link[0] == '#'
|
||||||
if !isAnchorFragment && !IsFullURLString(link) {
|
if !isAnchorFragment && !IsFullURLString(link) {
|
||||||
linkBase := ctx.Links.Base
|
linkBase := ctx.Links.Base
|
||||||
if ctx.ContentMode == RenderContentAsWiki {
|
if ctx.IsMarkupContentWiki() {
|
||||||
// no need to check if the link should be resolved as a wiki link or a wiki raw link
|
// no need to check if the link should be resolved as a wiki link or a wiki raw link
|
||||||
// just use wiki link here and it will be redirected to a wiki raw link if necessary
|
// just use wiki link here, and it will be redirected to a wiki raw link if necessary
|
||||||
linkBase = ctx.Links.WikiLink()
|
linkBase = ctx.Links.WikiLink()
|
||||||
} else if ctx.Links.BranchPath != "" || ctx.Links.TreePath != "" {
|
} else if ctx.Links.BranchPath != "" || ctx.Links.TreePath != "" {
|
||||||
// if there is no BranchPath, then the link will be something like "/owner/repo/src/{the-file-path}"
|
// if there is no BranchPath, then the link will be something like "/owner/repo/src/{the-file-path}"
|
||||||
|
@ -40,7 +40,7 @@ func ResolveLink(ctx *RenderContext, link, userContentAnchorPrefix string) (resu
|
||||||
func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next {
|
for node != nil && node != next {
|
||||||
m := shortLinkPattern.FindStringSubmatchIndex(node.Data)
|
m := globalVars().shortLinkPattern.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -147,7 +147,7 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
}
|
}
|
||||||
if image {
|
if image {
|
||||||
if !absoluteLink {
|
if !absoluteLink {
|
||||||
link = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.ContentMode == RenderContentAsWiki), link)
|
link = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()), link)
|
||||||
}
|
}
|
||||||
title := props["title"]
|
title := props["title"]
|
||||||
if title == "" {
|
if title == "" {
|
||||||
|
@ -200,25 +200,6 @@ func linkProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func genDefaultLinkProcessor(defaultLink string) processor {
|
|
||||||
return func(ctx *RenderContext, node *html.Node) {
|
|
||||||
ch := &html.Node{
|
|
||||||
Parent: node,
|
|
||||||
Type: html.TextNode,
|
|
||||||
Data: node.Data,
|
|
||||||
}
|
|
||||||
|
|
||||||
node.Type = html.ElementNode
|
|
||||||
node.Data = "a"
|
|
||||||
node.DataAtom = atom.A
|
|
||||||
node.Attr = []html.Attribute{
|
|
||||||
{Key: "href", Val: defaultLink},
|
|
||||||
{Key: "class", Val: "default-link muted"},
|
|
||||||
}
|
|
||||||
node.FirstChild, node.LastChild = ch, ch
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// descriptionLinkProcessor creates links for DescriptionHTML
|
// descriptionLinkProcessor creates links for DescriptionHTML
|
||||||
func descriptionLinkProcessor(ctx *RenderContext, node *html.Node) {
|
func descriptionLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
|
|
|
@ -17,7 +17,7 @@ func visitNodeImg(ctx *RenderContext, img *html.Node) (next *html.Node) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if IsNonEmptyRelativePath(attr.Val) {
|
if IsNonEmptyRelativePath(attr.Val) {
|
||||||
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.ContentMode == RenderContentAsWiki), attr.Val)
|
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()), attr.Val)
|
||||||
|
|
||||||
// By default, the "<img>" tag should also be clickable,
|
// By default, the "<img>" tag should also be clickable,
|
||||||
// because frontend use `<img>` to paste the re-scaled image into the markdown,
|
// because frontend use `<img>` to paste the re-scaled image into the markdown,
|
||||||
|
@ -53,7 +53,7 @@ func visitNodeVideo(ctx *RenderContext, node *html.Node) (next *html.Node) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if IsNonEmptyRelativePath(attr.Val) {
|
if IsNonEmptyRelativePath(attr.Val) {
|
||||||
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.ContentMode == RenderContentAsWiki), attr.Val)
|
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()), attr.Val)
|
||||||
}
|
}
|
||||||
attr.Val = camoHandleLink(attr.Val)
|
attr.Val = camoHandleLink(attr.Val)
|
||||||
node.Attr[i] = attr
|
node.Attr[i] = attr
|
||||||
|
|
|
@ -27,6 +27,11 @@ var (
|
||||||
"user": testRepoOwnerName,
|
"user": testRepoOwnerName,
|
||||||
"repo": testRepoName,
|
"repo": testRepoName,
|
||||||
}
|
}
|
||||||
|
localWikiMetas = map[string]string{
|
||||||
|
"user": testRepoOwnerName,
|
||||||
|
"repo": testRepoName,
|
||||||
|
"markupContentMode": "wiki",
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
type mockRepo struct {
|
type mockRepo struct {
|
||||||
|
@ -413,8 +418,7 @@ func TestRender_ShortLinks(t *testing.T) {
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: markup.TestRepoURL,
|
Base: markup.TestRepoURL,
|
||||||
},
|
},
|
||||||
Metas: localMetas,
|
Metas: localWikiMetas,
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
||||||
|
@ -528,8 +532,7 @@ func TestRender_RelativeMedias(t *testing.T) {
|
||||||
buffer, err := markdown.RenderString(&markup.RenderContext{
|
buffer, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: git.DefaultContext,
|
Ctx: git.DefaultContext,
|
||||||
Links: links,
|
Links: links,
|
||||||
Metas: localMetas,
|
Metas: util.Iif(isWiki, localWikiMetas, localMetas),
|
||||||
ContentMode: util.Iif(isWiki, markup.RenderContentAsWiki, markup.RenderContentAsComment),
|
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
return strings.TrimSpace(string(buffer))
|
return strings.TrimSpace(string(buffer))
|
||||||
|
|
|
@ -75,11 +75,12 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
|
||||||
// TODO: this was a quite unclear part, old code: `if metas["mode"] != "document" { use comment link break setting }`
|
// TODO: this was a quite unclear part, old code: `if metas["mode"] != "document" { use comment link break setting }`
|
||||||
// many places render non-comment contents with no mode=document, then these contents also use comment's hard line break setting
|
// many places render non-comment contents with no mode=document, then these contents also use comment's hard line break setting
|
||||||
// especially in many tests.
|
// especially in many tests.
|
||||||
|
markdownLineBreakStyle := ctx.Metas["markdownLineBreakStyle"]
|
||||||
if markup.RenderBehaviorForTesting.ForceHardLineBreak {
|
if markup.RenderBehaviorForTesting.ForceHardLineBreak {
|
||||||
v.SetHardLineBreak(true)
|
v.SetHardLineBreak(true)
|
||||||
} else if ctx.ContentMode == markup.RenderContentAsComment {
|
} else if markdownLineBreakStyle == "comment" {
|
||||||
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInComments)
|
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInComments)
|
||||||
} else {
|
} else if markdownLineBreakStyle == "document" {
|
||||||
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInDocuments)
|
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInDocuments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,12 @@ var localMetas = map[string]string{
|
||||||
"repo": testRepoName,
|
"repo": testRepoName,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var localWikiMetas = map[string]string{
|
||||||
|
"user": testRepoOwnerName,
|
||||||
|
"repo": testRepoName,
|
||||||
|
"markupContentMode": "wiki",
|
||||||
|
}
|
||||||
|
|
||||||
type mockRepo struct {
|
type mockRepo struct {
|
||||||
OwnerName string
|
OwnerName string
|
||||||
RepoName string
|
RepoName string
|
||||||
|
@ -75,7 +81,7 @@ func TestRender_StandardLinks(t *testing.T) {
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: FullURL,
|
Base: FullURL,
|
||||||
},
|
},
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
Metas: localWikiMetas,
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
||||||
|
@ -308,8 +314,7 @@ func TestTotal_RenderWiki(t *testing.T) {
|
||||||
Base: FullURL,
|
Base: FullURL,
|
||||||
},
|
},
|
||||||
Repo: newMockRepo(testRepoOwnerName, testRepoName),
|
Repo: newMockRepo(testRepoOwnerName, testRepoName),
|
||||||
Metas: localMetas,
|
Metas: localWikiMetas,
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
|
||||||
}, sameCases[i])
|
}, sameCases[i])
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, answers[i], string(line))
|
assert.Equal(t, answers[i], string(line))
|
||||||
|
@ -334,7 +339,7 @@ func TestTotal_RenderWiki(t *testing.T) {
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: FullURL,
|
Base: FullURL,
|
||||||
},
|
},
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
Metas: localWikiMetas,
|
||||||
}, testCases[i])
|
}, testCases[i])
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, testCases[i+1], string(line))
|
assert.EqualValues(t, testCases[i+1], string(line))
|
||||||
|
@ -657,9 +662,9 @@ mail@domain.com
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/image.jpg" rel="nofollow"><img src="/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/image.jpg" rel="nofollow"><img src="/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -684,9 +689,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/wiki/raw/image.jpg" rel="nofollow"><img src="/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/wiki/raw/image.jpg" rel="nofollow"><img src="/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -713,9 +718,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="https://gitea.io/image.jpg" rel="nofollow"><img src="https://gitea.io/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="https://gitea.io/image.jpg" rel="nofollow"><img src="https://gitea.io/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -742,9 +747,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="https://gitea.io/wiki/raw/image.jpg" rel="nofollow"><img src="https://gitea.io/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="https://gitea.io/wiki/raw/image.jpg" rel="nofollow"><img src="https://gitea.io/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -771,9 +776,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/image.jpg" rel="nofollow"><img src="/relative/path/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/image.jpg" rel="nofollow"><img src="/relative/path/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -800,9 +805,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -830,9 +835,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/user/repo/media/branch/main/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/user/repo/media/branch/main/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -860,9 +865,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -890,9 +895,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/user/repo/image.jpg" rel="nofollow"><img src="/user/repo/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/user/repo/image.jpg" rel="nofollow"><img src="/user/repo/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -920,9 +925,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -951,9 +956,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/user/repo/media/branch/main/sub/folder/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/sub/folder/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/user/repo/media/branch/main/sub/folder/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/sub/folder/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -982,9 +987,9 @@ space</p>
|
||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
|
@ -1001,7 +1006,7 @@ space</p>
|
||||||
result, err := markdown.RenderString(&markup.RenderContext{
|
result, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: context.Background(),
|
Ctx: context.Background(),
|
||||||
Links: c.Links,
|
Links: c.Links,
|
||||||
ContentMode: util.Iif(c.IsWiki, markup.RenderContentAsWiki, markup.RenderContentAsDefault),
|
Metas: util.Iif(c.IsWiki, map[string]string{"markupContentMode": "wiki"}, map[string]string{}),
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err, "Unexpected error in testcase: %v", i)
|
assert.NoError(t, err, "Unexpected error in testcase: %v", i)
|
||||||
assert.Equal(t, c.Expected, string(result), "Unexpected result in testcase %v", i)
|
assert.Equal(t, c.Expected, string(result), "Unexpected result in testcase %v", i)
|
||||||
|
|
|
@ -21,7 +21,7 @@ func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image)
|
||||||
// Check if the destination is a real link
|
// Check if the destination is a real link
|
||||||
if len(v.Destination) > 0 && !markup.IsFullURLBytes(v.Destination) {
|
if len(v.Destination) > 0 && !markup.IsFullURLBytes(v.Destination) {
|
||||||
v.Destination = []byte(giteautil.URLJoin(
|
v.Destination = []byte(giteautil.URLJoin(
|
||||||
ctx.Links.ResolveMediaLink(ctx.ContentMode == markup.RenderContentAsWiki),
|
ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()),
|
||||||
strings.TrimLeft(string(v.Destination), "/"),
|
strings.TrimLeft(string(v.Destination), "/"),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,15 +144,14 @@ func (r *Writer) resolveLink(kind, link string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
base := r.Ctx.Links.Base
|
base := r.Ctx.Links.Base
|
||||||
isWiki := r.Ctx.ContentMode == markup.RenderContentAsWiki
|
if r.Ctx.IsMarkupContentWiki() {
|
||||||
if isWiki {
|
|
||||||
base = r.Ctx.Links.WikiLink()
|
base = r.Ctx.Links.WikiLink()
|
||||||
} else if r.Ctx.Links.HasBranchInfo() {
|
} else if r.Ctx.Links.HasBranchInfo() {
|
||||||
base = r.Ctx.Links.SrcLink()
|
base = r.Ctx.Links.SrcLink()
|
||||||
}
|
}
|
||||||
|
|
||||||
if kind == "image" || kind == "video" {
|
if kind == "image" || kind == "video" {
|
||||||
base = r.Ctx.Links.ResolveMediaLink(isWiki)
|
base = r.Ctx.Links.ResolveMediaLink(r.Ctx.IsMarkupContentWiki())
|
||||||
}
|
}
|
||||||
|
|
||||||
link = util.URLJoin(base, link)
|
link = util.URLJoin(base, link)
|
||||||
|
|
|
@ -27,7 +27,7 @@ func TestRender_StandardLinks(t *testing.T) {
|
||||||
Base: "/relative-path",
|
Base: "/relative-path",
|
||||||
BranchPath: "branch/main",
|
BranchPath: "branch/main",
|
||||||
},
|
},
|
||||||
ContentMode: util.Iif(isWiki, markup.RenderContentAsWiki, markup.RenderContentAsDefault),
|
Metas: map[string]string{"markupContentMode": util.Iif(isWiki, "wiki", "")},
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
||||||
|
|
|
@ -27,15 +27,6 @@ const (
|
||||||
RenderMetaAsTable RenderMetaMode = "table"
|
RenderMetaAsTable RenderMetaMode = "table"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RenderContentMode string
|
|
||||||
|
|
||||||
const (
|
|
||||||
RenderContentAsDefault RenderContentMode = "" // empty means "default", no special handling, maybe just a simple "document"
|
|
||||||
RenderContentAsComment RenderContentMode = "comment"
|
|
||||||
RenderContentAsTitle RenderContentMode = "title"
|
|
||||||
RenderContentAsWiki RenderContentMode = "wiki"
|
|
||||||
)
|
|
||||||
|
|
||||||
var RenderBehaviorForTesting struct {
|
var RenderBehaviorForTesting struct {
|
||||||
// Markdown line break rendering has 2 default behaviors:
|
// Markdown line break rendering has 2 default behaviors:
|
||||||
// * Use hard: replace "\n" with "<br>" for comments, setting.Markdown.EnableHardLineBreakInComments=true
|
// * Use hard: replace "\n" with "<br>" for comments, setting.Markdown.EnableHardLineBreakInComments=true
|
||||||
|
@ -59,12 +50,14 @@ type RenderContext struct {
|
||||||
// for file mode, it could be left as empty, and will be detected by file extension in RelativePath
|
// for file mode, it could be left as empty, and will be detected by file extension in RelativePath
|
||||||
MarkupType string
|
MarkupType string
|
||||||
|
|
||||||
// what the content will be used for: eg: for comment or for wiki? or just render a file?
|
|
||||||
ContentMode RenderContentMode
|
|
||||||
|
|
||||||
Links Links // special link references for rendering, especially when there is a branch/tree path
|
Links Links // special link references for rendering, especially when there is a branch/tree path
|
||||||
Metas map[string]string // user&repo, format&style®exp (for external issue pattern), teams&org (for mention), BranchNameSubURL(for iframe&asciicast)
|
|
||||||
DefaultLink string // TODO: need to figure out
|
// user&repo, format&style®exp (for external issue pattern), teams&org (for mention)
|
||||||
|
// BranchNameSubURL (for iframe&asciicast)
|
||||||
|
// markupAllowShortIssuePattern, markupContentMode (wiki)
|
||||||
|
// markdownLineBreakStyle (comment, document)
|
||||||
|
Metas map[string]string
|
||||||
|
|
||||||
GitRepo *git.Repository
|
GitRepo *git.Repository
|
||||||
Repo gitrepo.Repository
|
Repo gitrepo.Repository
|
||||||
ShaExistCache map[string]bool
|
ShaExistCache map[string]bool
|
||||||
|
@ -102,6 +95,10 @@ func (ctx *RenderContext) AddCancel(fn func()) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ctx *RenderContext) IsMarkupContentWiki() bool {
|
||||||
|
return ctx.Metas != nil && ctx.Metas["markupContentMode"] == "wiki"
|
||||||
|
}
|
||||||
|
|
||||||
// Render renders markup file to HTML with all specific handling stuff.
|
// Render renders markup file to HTML with all specific handling stuff.
|
||||||
func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
|
func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
|
||||||
if ctx.MarkupType == "" && ctx.RelativePath != "" {
|
if ctx.MarkupType == "" && ctx.RelativePath != "" {
|
||||||
|
@ -232,3 +229,7 @@ func Init(ph *ProcessorHelper) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ComposeSimpleDocumentMetas() map[string]string {
|
||||||
|
return map[string]string{"markdownLineBreakStyle": "document"}
|
||||||
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ import (
|
||||||
|
|
||||||
type Links struct {
|
type Links struct {
|
||||||
AbsolutePrefix bool // add absolute URL prefix to auto-resolved links like "#issue", but not for pre-provided links and medias
|
AbsolutePrefix bool // add absolute URL prefix to auto-resolved links like "#issue", but not for pre-provided links and medias
|
||||||
Base string // base prefix for pre-provided links and medias (images, videos)
|
Base string // base prefix for pre-provided links and medias (images, videos), usually it is the path to the repo
|
||||||
BranchPath string // actually it is the ref path, eg: "branch/features/feat-12", "tag/v1.0"
|
BranchPath string // actually it is the ref path, eg: "branch/features/feat-12", "tag/v1.0"
|
||||||
TreePath string // the dir of the file, eg: "doc" if the file "doc/CHANGE.md" is being rendered
|
TreePath string // the dir of the file, eg: "doc" if the file "doc/CHANGE.md" is being rendered
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
unhandledItemRequeueDuration.Store(int64(5 * time.Second))
|
unhandledItemRequeueDuration.Store(int64(time.Second))
|
||||||
}
|
}
|
||||||
|
|
||||||
// workerGroup is a group of workers to work with a WorkerPoolQueue
|
// workerGroup is a group of workers to work with a WorkerPoolQueue
|
||||||
|
@ -104,7 +104,12 @@ func (q *WorkerPoolQueue[T]) doWorkerHandle(batch []T) {
|
||||||
// if none of the items were handled, it should back-off for a few seconds
|
// if none of the items were handled, it should back-off for a few seconds
|
||||||
// in this case the handler (eg: document indexer) may have encountered some errors/failures
|
// in this case the handler (eg: document indexer) may have encountered some errors/failures
|
||||||
if len(unhandled) == len(batch) && unhandledItemRequeueDuration.Load() != 0 {
|
if len(unhandled) == len(batch) && unhandledItemRequeueDuration.Load() != 0 {
|
||||||
|
if q.isFlushing.Load() {
|
||||||
|
return // do not requeue items when flushing, since all items failed, requeue them will continue failing.
|
||||||
|
}
|
||||||
log.Error("Queue %q failed to handle batch of %d items, backoff for a few seconds", q.GetName(), len(batch))
|
log.Error("Queue %q failed to handle batch of %d items, backoff for a few seconds", q.GetName(), len(batch))
|
||||||
|
// TODO: ideally it shouldn't "sleep" here (blocks the worker, then blocks flush).
|
||||||
|
// It could debounce the requeue operation, and try to requeue the items in the future.
|
||||||
select {
|
select {
|
||||||
case <-q.ctxRun.Done():
|
case <-q.ctxRun.Done():
|
||||||
case <-time.After(time.Duration(unhandledItemRequeueDuration.Load())):
|
case <-time.After(time.Duration(unhandledItemRequeueDuration.Load())):
|
||||||
|
@ -193,6 +198,9 @@ func (q *WorkerPoolQueue[T]) doStartNewWorker(wp *workerGroup[T]) {
|
||||||
// doFlush flushes the queue: it tries to read all items from the queue and handles them.
|
// doFlush flushes the queue: it tries to read all items from the queue and handles them.
|
||||||
// It is for testing purpose only. It's not designed to work for a cluster.
|
// It is for testing purpose only. It's not designed to work for a cluster.
|
||||||
func (q *WorkerPoolQueue[T]) doFlush(wg *workerGroup[T], flush flushType) {
|
func (q *WorkerPoolQueue[T]) doFlush(wg *workerGroup[T], flush flushType) {
|
||||||
|
q.isFlushing.Store(true)
|
||||||
|
defer q.isFlushing.Store(false)
|
||||||
|
|
||||||
log.Debug("Queue %q starts flushing", q.GetName())
|
log.Debug("Queue %q starts flushing", q.GetName())
|
||||||
defer log.Debug("Queue %q finishes flushing", q.GetName())
|
defer log.Debug("Queue %q finishes flushing", q.GetName())
|
||||||
|
|
||||||
|
@ -236,6 +244,9 @@ loop:
|
||||||
emptyCounter := 0
|
emptyCounter := 0
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
|
case <-q.ctxRun.Done():
|
||||||
|
log.Debug("Queue %q is shutting down", q.GetName())
|
||||||
|
return
|
||||||
case data, dataOk := <-wg.popItemChan:
|
case data, dataOk := <-wg.popItemChan:
|
||||||
if !dataOk {
|
if !dataOk {
|
||||||
return
|
return
|
||||||
|
@ -251,9 +262,6 @@ loop:
|
||||||
log.Error("Failed to pop item from queue %q (doFlush): %v", q.GetName(), err)
|
log.Error("Failed to pop item from queue %q (doFlush): %v", q.GetName(), err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
case <-q.ctxRun.Done():
|
|
||||||
log.Debug("Queue %q is shutting down", q.GetName())
|
|
||||||
return
|
|
||||||
case <-time.After(20 * time.Millisecond):
|
case <-time.After(20 * time.Millisecond):
|
||||||
// There is no reliable way to make sure all queue items are consumed by the Flush, there always might be some items stored in some buffers/temp variables.
|
// There is no reliable way to make sure all queue items are consumed by the Flush, there always might be some items stored in some buffers/temp variables.
|
||||||
// If we run Gitea in a cluster, we can even not guarantee all items are consumed in a deterministic instance.
|
// If we run Gitea in a cluster, we can even not guarantee all items are consumed in a deterministic instance.
|
||||||
|
@ -331,6 +339,15 @@ func (q *WorkerPoolQueue[T]) doRun() {
|
||||||
var batchDispatchC <-chan time.Time = infiniteTimerC
|
var batchDispatchC <-chan time.Time = infiniteTimerC
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
|
case flush := <-q.flushChan:
|
||||||
|
// before flushing, it needs to try to dispatch the batch to worker first, in case there is no worker running
|
||||||
|
// after the flushing, there is at least one worker running, so "doFlush" could wait for workers to finish
|
||||||
|
// since we are already in a "flush" operation, so the dispatching function shouldn't read the flush chan.
|
||||||
|
q.doDispatchBatchToWorker(wg, skipFlushChan)
|
||||||
|
q.doFlush(wg, flush)
|
||||||
|
case <-q.ctxRun.Done():
|
||||||
|
log.Debug("Queue %q is shutting down", q.GetName())
|
||||||
|
return
|
||||||
case data, dataOk := <-wg.popItemChan:
|
case data, dataOk := <-wg.popItemChan:
|
||||||
if !dataOk {
|
if !dataOk {
|
||||||
return
|
return
|
||||||
|
@ -349,20 +366,11 @@ func (q *WorkerPoolQueue[T]) doRun() {
|
||||||
case <-batchDispatchC:
|
case <-batchDispatchC:
|
||||||
batchDispatchC = infiniteTimerC
|
batchDispatchC = infiniteTimerC
|
||||||
q.doDispatchBatchToWorker(wg, q.flushChan)
|
q.doDispatchBatchToWorker(wg, q.flushChan)
|
||||||
case flush := <-q.flushChan:
|
|
||||||
// before flushing, it needs to try to dispatch the batch to worker first, in case there is no worker running
|
|
||||||
// after the flushing, there is at least one worker running, so "doFlush" could wait for workers to finish
|
|
||||||
// since we are already in a "flush" operation, so the dispatching function shouldn't read the flush chan.
|
|
||||||
q.doDispatchBatchToWorker(wg, skipFlushChan)
|
|
||||||
q.doFlush(wg, flush)
|
|
||||||
case err := <-wg.popItemErr:
|
case err := <-wg.popItemErr:
|
||||||
if !q.isCtxRunCanceled() {
|
if !q.isCtxRunCanceled() {
|
||||||
log.Error("Failed to pop item from queue %q (doRun): %v", q.GetName(), err)
|
log.Error("Failed to pop item from queue %q (doRun): %v", q.GetName(), err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
case <-q.ctxRun.Done():
|
|
||||||
log.Debug("Queue %q is shutting down", q.GetName())
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,7 @@ type WorkerPoolQueue[T any] struct {
|
||||||
|
|
||||||
batchChan chan []T
|
batchChan chan []T
|
||||||
flushChan chan flushType
|
flushChan chan flushType
|
||||||
|
isFlushing atomic.Bool
|
||||||
|
|
||||||
batchLength int
|
batchLength int
|
||||||
workerNum int
|
workerNum int
|
||||||
|
|
|
@ -3,33 +3,33 @@
|
||||||
|
|
||||||
package setting
|
package setting
|
||||||
|
|
||||||
// Attachment settings
|
type AttachmentSettingType struct {
|
||||||
var Attachment = struct {
|
|
||||||
Storage *Storage
|
Storage *Storage
|
||||||
AllowedTypes string
|
AllowedTypes string
|
||||||
MaxSize int64
|
MaxSize int64
|
||||||
MaxFiles int
|
MaxFiles int
|
||||||
Enabled bool
|
Enabled bool
|
||||||
}{
|
}
|
||||||
Storage: &Storage{},
|
|
||||||
AllowedTypes: ".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip",
|
var Attachment AttachmentSettingType
|
||||||
|
|
||||||
|
func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
|
||||||
|
Attachment = AttachmentSettingType{
|
||||||
|
AllowedTypes: ".avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip",
|
||||||
MaxSize: 2048,
|
MaxSize: 2048,
|
||||||
MaxFiles: 5,
|
MaxFiles: 5,
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
|
|
||||||
sec, _ := rootCfg.GetSection("attachment")
|
sec, _ := rootCfg.GetSection("attachment")
|
||||||
if sec == nil {
|
if sec == nil {
|
||||||
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", nil)
|
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip")
|
Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(Attachment.AllowedTypes)
|
||||||
Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(2048)
|
Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(Attachment.MaxSize)
|
||||||
Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(5)
|
Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(Attachment.MaxFiles)
|
||||||
Attachment.Enabled = sec.Key("ENABLED").MustBool(true)
|
Attachment.Enabled = sec.Key("ENABLED").MustBool(Attachment.Enabled)
|
||||||
|
|
||||||
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", sec)
|
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", sec)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,6 +86,7 @@ var UI = struct {
|
||||||
Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
|
Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
|
||||||
CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`},
|
CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`},
|
||||||
CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:"},
|
CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:"},
|
||||||
|
ExploreDefaultSort: "recentupdate",
|
||||||
PreferredTimestampTense: "mixed",
|
PreferredTimestampTense: "mixed",
|
||||||
|
|
||||||
AmbiguousUnicodeDetection: true,
|
AmbiguousUnicodeDetection: true,
|
||||||
|
|
|
@ -62,19 +62,18 @@ func (ut *RenderUtils) RenderCommitMessageLinkSubject(msg, urlDefault string, me
|
||||||
}
|
}
|
||||||
msgLine = strings.TrimRightFunc(msgLine, unicode.IsSpace)
|
msgLine = strings.TrimRightFunc(msgLine, unicode.IsSpace)
|
||||||
if len(msgLine) == 0 {
|
if len(msgLine) == 0 {
|
||||||
return template.HTML("")
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// we can safely assume that it will not return any error, since there
|
// we can safely assume that it will not return any error, since there
|
||||||
// shouldn't be any special HTML.
|
// shouldn't be any special HTML.
|
||||||
renderedMessage, err := markup.RenderCommitMessageSubject(&markup.RenderContext{
|
renderedMessage, err := markup.RenderCommitMessageSubject(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
DefaultLink: urlDefault,
|
|
||||||
Metas: metas,
|
Metas: metas,
|
||||||
}, template.HTMLEscapeString(msgLine))
|
}, urlDefault, template.HTMLEscapeString(msgLine))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderCommitMessageSubject: %v", err)
|
log.Error("RenderCommitMessageSubject: %v", err)
|
||||||
return template.HTML("")
|
return ""
|
||||||
}
|
}
|
||||||
return renderCodeBlock(template.HTML(renderedMessage))
|
return renderCodeBlock(template.HTML(renderedMessage))
|
||||||
}
|
}
|
||||||
|
@ -96,7 +95,6 @@ func (ut *RenderUtils) RenderCommitBody(msg string, metas map[string]string) tem
|
||||||
renderedMessage, err := markup.RenderCommitMessage(&markup.RenderContext{
|
renderedMessage, err := markup.RenderCommitMessage(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
Metas: metas,
|
Metas: metas,
|
||||||
ContentMode: markup.RenderContentAsComment,
|
|
||||||
}, template.HTMLEscapeString(msgLine))
|
}, template.HTMLEscapeString(msgLine))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderCommitMessage: %v", err)
|
log.Error("RenderCommitMessage: %v", err)
|
||||||
|
@ -118,7 +116,6 @@ func renderCodeBlock(htmlEscapedTextToRender template.HTML) template.HTML {
|
||||||
func (ut *RenderUtils) RenderIssueTitle(text string, metas map[string]string) template.HTML {
|
func (ut *RenderUtils) RenderIssueTitle(text string, metas map[string]string) template.HTML {
|
||||||
renderedText, err := markup.RenderIssueTitle(&markup.RenderContext{
|
renderedText, err := markup.RenderIssueTitle(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
ContentMode: markup.RenderContentAsTitle,
|
|
||||||
Metas: metas,
|
Metas: metas,
|
||||||
}, template.HTMLEscapeString(text))
|
}, template.HTMLEscapeString(text))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -212,7 +209,7 @@ func reactionToEmoji(reaction string) template.HTML {
|
||||||
func (ut *RenderUtils) MarkdownToHtml(input string) template.HTML { //nolint:revive
|
func (ut *RenderUtils) MarkdownToHtml(input string) template.HTML { //nolint:revive
|
||||||
output, err := markdown.RenderString(&markup.RenderContext{
|
output, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
Metas: map[string]string{"mode": "document"},
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
}, input)
|
}, input)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderString: %v", err)
|
log.Error("RenderString: %v", err)
|
||||||
|
|
|
@ -50,7 +50,8 @@ var testMetas = map[string]string{
|
||||||
"user": "user13",
|
"user": "user13",
|
||||||
"repo": "repo11",
|
"repo": "repo11",
|
||||||
"repoPath": "../../tests/gitea-repositories-meta/user13/repo11.git/",
|
"repoPath": "../../tests/gitea-repositories-meta/user13/repo11.git/",
|
||||||
"mode": "comment",
|
"markdownLineBreakStyle": "comment",
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
|
@ -76,7 +77,6 @@ func TestRenderCommitBody(t *testing.T) {
|
||||||
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableInternalAttributes, true)()
|
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableInternalAttributes, true)()
|
||||||
type args struct {
|
type args struct {
|
||||||
msg string
|
msg string
|
||||||
metas map[string]string
|
|
||||||
}
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
|
@ -108,7 +108,7 @@ func TestRenderCommitBody(t *testing.T) {
|
||||||
ut := newTestRenderUtils()
|
ut := newTestRenderUtils()
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
assert.Equalf(t, tt.want, ut.RenderCommitBody(tt.args.msg, tt.args.metas), "RenderCommitBody(%v, %v)", tt.args.msg, tt.args.metas)
|
assert.Equalf(t, tt.want, ut.RenderCommitBody(tt.args.msg, nil), "RenderCommitBody(%v, %v)", tt.args.msg, nil)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -140,7 +140,7 @@ func TestRenderCommitMessage(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRenderCommitMessageLinkSubject(t *testing.T) {
|
func TestRenderCommitMessageLinkSubject(t *testing.T) {
|
||||||
expected := `<a href="https://example.com/link" class="default-link muted">space </a><a href="/mention-user" data-markdown-generated-content="" class="mention">@mention-user</a>`
|
expected := `<a href="https://example.com/link" class="muted">space </a><a href="/mention-user" data-markdown-generated-content="" class="mention">@mention-user</a>`
|
||||||
assert.EqualValues(t, expected, newTestRenderUtils().RenderCommitMessageLinkSubject(testInput(), "https://example.com/link", testMetas))
|
assert.EqualValues(t, expected, newTestRenderUtils().RenderCommitMessageLinkSubject(testInput(), "https://example.com/link", testMetas))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,11 +164,11 @@ com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span>
|
<span class="emoji" aria-label="thumbs up">👍</span>
|
||||||
mail@domain.com
|
mail@domain.com
|
||||||
@mention-user test
|
@mention-user test
|
||||||
<a href="/user13/repo11/issues/123" class="ref-issue">#123</a>
|
#123
|
||||||
space<SPACE><SPACE>
|
space<SPACE><SPACE>
|
||||||
`
|
`
|
||||||
expected = strings.ReplaceAll(expected, "<SPACE>", " ")
|
expected = strings.ReplaceAll(expected, "<SPACE>", " ")
|
||||||
assert.EqualValues(t, expected, string(newTestRenderUtils().RenderIssueTitle(testInput(), testMetas)))
|
assert.EqualValues(t, expected, string(newTestRenderUtils().RenderIssueTitle(testInput(), nil)))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRenderMarkdownToHtml(t *testing.T) {
|
func TestRenderMarkdownToHtml(t *testing.T) {
|
||||||
|
|
|
@ -20,8 +20,9 @@ import (
|
||||||
|
|
||||||
var (
|
var (
|
||||||
prefix string
|
prefix string
|
||||||
SlowTest = 10 * time.Second
|
TestTimeout = 10 * time.Minute
|
||||||
SlowFlush = 5 * time.Second
|
TestSlowRun = 10 * time.Second
|
||||||
|
TestSlowFlush = 1 * time.Second
|
||||||
)
|
)
|
||||||
|
|
||||||
var WriterCloser = &testLoggerWriterCloser{}
|
var WriterCloser = &testLoggerWriterCloser{}
|
||||||
|
@ -89,79 +90,97 @@ func (w *testLoggerWriterCloser) Reset() {
|
||||||
w.Unlock()
|
w.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Printf takes a format and args and prints the string to os.Stdout
|
||||||
|
func Printf(format string, args ...any) {
|
||||||
|
if !log.CanColorStdout {
|
||||||
|
for i := 0; i < len(args); i++ {
|
||||||
|
if c, ok := args[i].(*log.ColoredValue); ok {
|
||||||
|
args[i] = c.Value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_, _ = fmt.Fprintf(os.Stdout, format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
// PrintCurrentTest prints the current test to os.Stdout
|
// PrintCurrentTest prints the current test to os.Stdout
|
||||||
func PrintCurrentTest(t testing.TB, skip ...int) func() {
|
func PrintCurrentTest(t testing.TB, skip ...int) func() {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
start := time.Now()
|
runStart := time.Now()
|
||||||
actualSkip := util.OptionalArg(skip) + 1
|
actualSkip := util.OptionalArg(skip) + 1
|
||||||
_, filename, line, _ := runtime.Caller(actualSkip)
|
_, filename, line, _ := runtime.Caller(actualSkip)
|
||||||
|
|
||||||
if log.CanColorStdout {
|
Printf("=== %s (%s:%d)\n", log.NewColoredValue(t.Name()), strings.TrimPrefix(filename, prefix), line)
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", fmt.Formatter(log.NewColoredValue(t.Name())), strings.TrimPrefix(filename, prefix), line)
|
|
||||||
} else {
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", t.Name(), strings.TrimPrefix(filename, prefix), line)
|
|
||||||
}
|
|
||||||
WriterCloser.pushT(t)
|
WriterCloser.pushT(t)
|
||||||
|
timeoutChecker := time.AfterFunc(TestTimeout, func() {
|
||||||
|
l := 128 * 1024
|
||||||
|
var stack []byte
|
||||||
|
for {
|
||||||
|
stack = make([]byte, l)
|
||||||
|
n := runtime.Stack(stack, true)
|
||||||
|
if n <= l {
|
||||||
|
stack = stack[:n]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
l = n
|
||||||
|
}
|
||||||
|
Printf("!!! %s ... timeout: %v ... stacktrace:\n%s\n\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestTimeout, string(stack))
|
||||||
|
})
|
||||||
return func() {
|
return func() {
|
||||||
took := time.Since(start)
|
flushStart := time.Now()
|
||||||
if took > SlowTest {
|
slowFlushChecker := time.AfterFunc(TestSlowFlush, func() {
|
||||||
if log.CanColorStdout {
|
Printf("+++ %s ... still flushing after %v ...\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestSlowFlush)
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgYellow)), fmt.Formatter(log.NewColoredValue(took, log.Bold, log.FgYellow)))
|
|
||||||
} else {
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", t.Name(), took)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
timer := time.AfterFunc(SlowFlush, func() {
|
|
||||||
if log.CanColorStdout {
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), SlowFlush)
|
|
||||||
} else {
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", t.Name(), SlowFlush)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
if err := queue.GetManager().FlushAll(context.Background(), -1); err != nil {
|
if err := queue.GetManager().FlushAll(context.Background(), -1); err != nil {
|
||||||
t.Errorf("Flushing queues failed with error %v", err)
|
t.Errorf("Flushing queues failed with error %v", err)
|
||||||
}
|
}
|
||||||
timer.Stop()
|
slowFlushChecker.Stop()
|
||||||
flushTook := time.Since(start) - took
|
timeoutChecker.Stop()
|
||||||
if flushTook > SlowFlush {
|
|
||||||
if log.CanColorStdout {
|
runDuration := time.Since(runStart)
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), fmt.Formatter(log.NewColoredValue(flushTook, log.Bold, log.FgRed)))
|
flushDuration := time.Since(flushStart)
|
||||||
} else {
|
if runDuration > TestSlowRun {
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", t.Name(), flushTook)
|
Printf("+++ %s is a slow test (run: %v, flush: %v)\n", log.NewColoredValue(t.Name(), log.Bold, log.FgYellow), runDuration, flushDuration)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
WriterCloser.popT()
|
WriterCloser.popT()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Printf takes a format and args and prints the string to os.Stdout
|
|
||||||
func Printf(format string, args ...any) {
|
|
||||||
if log.CanColorStdout {
|
|
||||||
for i := 0; i < len(args); i++ {
|
|
||||||
args[i] = log.NewColoredValue(args[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "\t"+format, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestLogEventWriter is a logger which will write to the testing log
|
// TestLogEventWriter is a logger which will write to the testing log
|
||||||
type TestLogEventWriter struct {
|
type TestLogEventWriter struct {
|
||||||
*log.EventWriterBaseImpl
|
*log.EventWriterBaseImpl
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewTestLoggerWriter creates a TestLogEventWriter as a log.LoggerProvider
|
// newTestLoggerWriter creates a TestLogEventWriter as a log.LoggerProvider
|
||||||
func NewTestLoggerWriter(name string, mode log.WriterMode) log.EventWriter {
|
func newTestLoggerWriter(name string, mode log.WriterMode) log.EventWriter {
|
||||||
w := &TestLogEventWriter{}
|
w := &TestLogEventWriter{}
|
||||||
w.EventWriterBaseImpl = log.NewEventWriterBase(name, "test-log-writer", mode)
|
w.EventWriterBaseImpl = log.NewEventWriterBase(name, "test-log-writer", mode)
|
||||||
w.OutputWriteCloser = WriterCloser
|
w.OutputWriteCloser = WriterCloser
|
||||||
return w
|
return w
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func Init() {
|
||||||
const relFilePath = "modules/testlogger/testlogger.go"
|
const relFilePath = "modules/testlogger/testlogger.go"
|
||||||
_, filename, _, _ := runtime.Caller(0)
|
_, filename, _, _ := runtime.Caller(0)
|
||||||
if !strings.HasSuffix(filename, relFilePath) {
|
if !strings.HasSuffix(filename, relFilePath) {
|
||||||
panic("source code file path doesn't match expected: " + relFilePath)
|
panic("source code file path doesn't match expected: " + relFilePath)
|
||||||
}
|
}
|
||||||
prefix = strings.TrimSuffix(filename, relFilePath)
|
prefix = strings.TrimSuffix(filename, relFilePath)
|
||||||
|
|
||||||
|
log.RegisterEventWriter("test", newTestLoggerWriter)
|
||||||
|
|
||||||
|
duration, err := time.ParseDuration(os.Getenv("GITEA_TEST_SLOW_RUN"))
|
||||||
|
if err == nil && duration > 0 {
|
||||||
|
TestSlowRun = duration
|
||||||
|
}
|
||||||
|
|
||||||
|
duration, err = time.ParseDuration(os.Getenv("GITEA_TEST_SLOW_FLUSH"))
|
||||||
|
if err == nil && duration > 0 {
|
||||||
|
TestSlowFlush = duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Fatalf(format string, args ...any) {
|
||||||
|
Printf(format+"\n", args...)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,12 @@ package typesniffer
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
@ -18,10 +20,10 @@ import (
|
||||||
const sniffLen = 1024
|
const sniffLen = 1024
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// SvgMimeType MIME type of SVG images.
|
MimeTypeImageSvg = "image/svg+xml"
|
||||||
SvgMimeType = "image/svg+xml"
|
MimeTypeImageAvif = "image/avif"
|
||||||
// ApplicationOctetStream MIME type of binary files.
|
|
||||||
ApplicationOctetStream = "application/octet-stream"
|
MimeTypeApplicationOctetStream = "application/octet-stream"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -47,7 +49,7 @@ func (ct SniffedType) IsImage() bool {
|
||||||
|
|
||||||
// IsSvgImage detects if data is an SVG image format
|
// IsSvgImage detects if data is an SVG image format
|
||||||
func (ct SniffedType) IsSvgImage() bool {
|
func (ct SniffedType) IsSvgImage() bool {
|
||||||
return strings.Contains(ct.contentType, SvgMimeType)
|
return strings.Contains(ct.contentType, MimeTypeImageSvg)
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsPDF detects if data is a PDF format
|
// IsPDF detects if data is a PDF format
|
||||||
|
@ -81,6 +83,26 @@ func (ct SniffedType) GetMimeType() string {
|
||||||
return strings.SplitN(ct.contentType, ";", 2)[0]
|
return strings.SplitN(ct.contentType, ";", 2)[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://en.wikipedia.org/wiki/ISO_base_media_file_format#File_type_box
|
||||||
|
func detectFileTypeBox(data []byte) (brands []string, found bool) {
|
||||||
|
if len(data) < 12 {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
boxSize := int(binary.BigEndian.Uint32(data[:4]))
|
||||||
|
if boxSize < 12 || boxSize > len(data) {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
tag := string(data[4:8])
|
||||||
|
if tag != "ftyp" {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
brands = append(brands, string(data[8:12]))
|
||||||
|
for i := 16; i+4 <= boxSize; i += 4 {
|
||||||
|
brands = append(brands, string(data[i:i+4]))
|
||||||
|
}
|
||||||
|
return brands, true
|
||||||
|
}
|
||||||
|
|
||||||
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty.
|
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty.
|
||||||
func DetectContentType(data []byte) SniffedType {
|
func DetectContentType(data []byte) SniffedType {
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
|
@ -94,7 +116,6 @@ func DetectContentType(data []byte) SniffedType {
|
||||||
}
|
}
|
||||||
|
|
||||||
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
||||||
|
|
||||||
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
||||||
detectByXML := strings.Contains(ct, "text/xml")
|
detectByXML := strings.Contains(ct, "text/xml")
|
||||||
if detectByHTML || detectByXML {
|
if detectByHTML || detectByXML {
|
||||||
|
@ -102,7 +123,7 @@ func DetectContentType(data []byte) SniffedType {
|
||||||
dataProcessed = bytes.TrimSpace(dataProcessed)
|
dataProcessed = bytes.TrimSpace(dataProcessed)
|
||||||
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
|
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
|
||||||
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
|
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
|
||||||
ct = SvgMimeType
|
ct = MimeTypeImageSvg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -116,6 +137,11 @@ func DetectContentType(data []byte) SniffedType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileTypeBrands, found := detectFileTypeBox(data)
|
||||||
|
if found && slices.Contains(fileTypeBrands, "avif") {
|
||||||
|
ct = MimeTypeImageAvif
|
||||||
|
}
|
||||||
|
|
||||||
if ct == "application/ogg" {
|
if ct == "application/ogg" {
|
||||||
dataHead := data
|
dataHead := data
|
||||||
if len(dataHead) > 256 {
|
if len(dataHead) > 256 {
|
||||||
|
|
|
@ -134,3 +134,33 @@ func TestDetectContentTypeOgg(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.True(t, st.IsVideo())
|
assert.True(t, st.IsVideo())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDetectFileTypeBox(t *testing.T) {
|
||||||
|
_, found := detectFileTypeBox([]byte("\x00\x00\xff\xffftypAAAA...."))
|
||||||
|
assert.False(t, found)
|
||||||
|
|
||||||
|
brands, found := detectFileTypeBox([]byte("\x00\x00\x00\x0cftypAAAA"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA"}, brands)
|
||||||
|
|
||||||
|
brands, found = detectFileTypeBox([]byte("\x00\x00\x00\x10ftypAAAA....BBBB"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA"}, brands)
|
||||||
|
|
||||||
|
brands, found = detectFileTypeBox([]byte("\x00\x00\x00\x14ftypAAAA....BBBB"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA", "BBBB"}, brands)
|
||||||
|
|
||||||
|
_, found = detectFileTypeBox([]byte("\x00\x00\x00\x14ftypAAAA....BBB"))
|
||||||
|
assert.False(t, found)
|
||||||
|
|
||||||
|
brands, found = detectFileTypeBox([]byte("\x00\x00\x00\x13ftypAAAA....BBB"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA"}, brands)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDetectContentTypeAvif(t *testing.T) {
|
||||||
|
buf := []byte("\x00\x00\x00\x20ftypavif.......................")
|
||||||
|
st := DetectContentType(buf)
|
||||||
|
assert.Equal(t, MimeTypeImageAvif, st.contentType)
|
||||||
|
}
|
||||||
|
|
|
@ -47,11 +47,12 @@ func RenderMarkup(ctx *context.Base, repo *context.Repository, mode, text, urlPa
|
||||||
switch mode {
|
switch mode {
|
||||||
case "gfm": // legacy mode, do nothing
|
case "gfm": // legacy mode, do nothing
|
||||||
case "comment":
|
case "comment":
|
||||||
renderCtx.ContentMode = markup.RenderContentAsComment
|
renderCtx.Metas = map[string]string{"markdownLineBreakStyle": "comment"}
|
||||||
case "wiki":
|
case "wiki":
|
||||||
renderCtx.ContentMode = markup.RenderContentAsWiki
|
renderCtx.Metas = map[string]string{"markdownLineBreakStyle": "document", "markupContentMode": "wiki"}
|
||||||
case "file":
|
case "file":
|
||||||
// render the repo file content by its extension
|
// render the repo file content by its extension
|
||||||
|
renderCtx.Metas = map[string]string{"markdownLineBreakStyle": "document"}
|
||||||
renderCtx.MarkupType = ""
|
renderCtx.MarkupType = ""
|
||||||
renderCtx.RelativePath = filePath
|
renderCtx.RelativePath = filePath
|
||||||
renderCtx.InStandalonePage = true
|
renderCtx.InStandalonePage = true
|
||||||
|
@ -74,10 +75,12 @@ func RenderMarkup(ctx *context.Base, repo *context.Repository, mode, text, urlPa
|
||||||
|
|
||||||
if repo != nil && repo.Repository != nil {
|
if repo != nil && repo.Repository != nil {
|
||||||
renderCtx.Repo = repo.Repository
|
renderCtx.Repo = repo.Repository
|
||||||
if renderCtx.ContentMode == markup.RenderContentAsComment {
|
if mode == "file" {
|
||||||
renderCtx.Metas = repo.Repository.ComposeMetas(ctx)
|
|
||||||
} else {
|
|
||||||
renderCtx.Metas = repo.Repository.ComposeDocumentMetas(ctx)
|
renderCtx.Metas = repo.Repository.ComposeDocumentMetas(ctx)
|
||||||
|
} else if mode == "wiki" {
|
||||||
|
renderCtx.Metas = repo.Repository.ComposeWikiMetas(ctx)
|
||||||
|
} else if mode == "comment" {
|
||||||
|
renderCtx.Metas = repo.Repository.ComposeMetas(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := markup.Render(renderCtx, strings.NewReader(text), ctx.Resp); err != nil {
|
if err := markup.Render(renderCtx, strings.NewReader(text), ctx.Resp); err != nil {
|
||||||
|
|
|
@ -122,6 +122,8 @@ func SignInOAuthCallback(ctx *context.Context) {
|
||||||
}
|
}
|
||||||
if err, ok := err.(*go_oauth2.RetrieveError); ok {
|
if err, ok := err.(*go_oauth2.RetrieveError); ok {
|
||||||
ctx.Flash.Error("OAuth2 RetrieveError: "+err.Error(), true)
|
ctx.Flash.Error("OAuth2 RetrieveError: "+err.Error(), true)
|
||||||
|
ctx.Redirect(setting.AppSubURL + "/user/login")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
ctx.ServerError("UserSignIn", err)
|
ctx.ServerError("UserSignIn", err)
|
||||||
return
|
return
|
||||||
|
|
|
@ -56,7 +56,7 @@ func renderMarkdown(ctx *context.Context, act *activities_model.Action, content
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: act.GetRepoLink(ctx),
|
Base: act.GetRepoLink(ctx),
|
||||||
},
|
},
|
||||||
Metas: map[string]string{
|
Metas: map[string]string{ // FIXME: not right here, it should use issue to compose the metas
|
||||||
"user": act.GetRepoUserName(ctx),
|
"user": act.GetRepoUserName(ctx),
|
||||||
"repo": act.GetRepoName(ctx),
|
"repo": act.GetRepoName(ctx),
|
||||||
},
|
},
|
||||||
|
|
|
@ -46,9 +46,7 @@ func showUserFeed(ctx *context.Context, formatType string) {
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: ctx.ContextUser.HTMLURL(),
|
Base: ctx.ContextUser.HTMLURL(),
|
||||||
},
|
},
|
||||||
Metas: map[string]string{
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
"user": ctx.ContextUser.GetDisplayName(),
|
|
||||||
},
|
|
||||||
}, ctx.ContextUser.Description)
|
}, ctx.ContextUser.Description)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("RenderString", err)
|
ctx.ServerError("RenderString", err)
|
||||||
|
|
|
@ -189,7 +189,7 @@ func prepareOrgProfileReadme(ctx *context.Context, viewRepositories bool) bool {
|
||||||
Base: profileDbRepo.Link(),
|
Base: profileDbRepo.Link(),
|
||||||
BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
|
BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
|
||||||
},
|
},
|
||||||
Metas: map[string]string{"mode": "document"},
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
}, bytes); err != nil {
|
}, bytes); err != nil {
|
||||||
log.Error("failed to RenderString: %v", err)
|
log.Error("failed to RenderString: %v", err)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -290,8 +290,7 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||||
|
|
||||||
rctx := &markup.RenderContext{
|
rctx := &markup.RenderContext{
|
||||||
Ctx: ctx,
|
Ctx: ctx,
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
Metas: ctx.Repo.Repository.ComposeWikiMetas(ctx),
|
||||||
Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
|
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: ctx.Repo.RepoLink,
|
Base: ctx.Repo.RepoLink,
|
||||||
},
|
},
|
||||||
|
|
|
@ -50,7 +50,7 @@ func PrepareContextForProfileBigAvatar(ctx *context.Context) {
|
||||||
ctx.Data["OpenIDs"] = openIDs
|
ctx.Data["OpenIDs"] = openIDs
|
||||||
if len(ctx.ContextUser.Description) != 0 {
|
if len(ctx.ContextUser.Description) != 0 {
|
||||||
content, err := markdown.RenderString(&markup.RenderContext{
|
content, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Metas: map[string]string{"mode": "document"},
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
Ctx: ctx,
|
Ctx: ctx,
|
||||||
}, ctx.ContextUser.Description)
|
}, ctx.ContextUser.Description)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
package auth
|
package auth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -141,6 +142,15 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore
|
||||||
}
|
}
|
||||||
|
|
||||||
if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() {
|
if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() {
|
||||||
|
// Check if the user has webAuthn registration
|
||||||
|
hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(req.Context(), u.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if hasWebAuthn {
|
||||||
|
return nil, errors.New("Basic authorization is not allowed while webAuthn enrolled")
|
||||||
|
}
|
||||||
|
|
||||||
if err := validateTOTP(req, u); err != nil {
|
if err := validateTOTP(req, u); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,7 +68,7 @@ func (e RepoRefNotFoundError) Is(err error) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewRequest creates an archival request, based on the URI. The
|
// NewRequest creates an archival request, based on the URI. The
|
||||||
// resulting ArchiveRequest is suitable for being passed to ArchiveRepository()
|
// resulting ArchiveRequest is suitable for being passed to Await()
|
||||||
// if it's determined that the request still needs to be satisfied.
|
// if it's determined that the request still needs to be satisfied.
|
||||||
func NewRequest(repoID int64, repo *git.Repository, uri string) (*ArchiveRequest, error) {
|
func NewRequest(repoID int64, repo *git.Repository, uri string) (*ArchiveRequest, error) {
|
||||||
r := &ArchiveRequest{
|
r := &ArchiveRequest{
|
||||||
|
@ -151,13 +151,14 @@ func (aReq *ArchiveRequest) Await(ctx context.Context) (*repo_model.RepoArchiver
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// doArchive satisfies the ArchiveRequest being passed in. Processing
|
||||||
|
// will occur in a separate goroutine, as this phase may take a while to
|
||||||
|
// complete. If the archive already exists, doArchive will not do
|
||||||
|
// anything. In all cases, the caller should be examining the *ArchiveRequest
|
||||||
|
// being returned for completion, as it may be different than the one they passed
|
||||||
|
// in.
|
||||||
func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver, error) {
|
func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver, error) {
|
||||||
txCtx, committer, err := db.TxContext(ctx)
|
ctx, _, finished := process.GetManager().AddContext(ctx, fmt.Sprintf("ArchiveRequest[%d]: %s", r.RepoID, r.GetArchiveName()))
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer committer.Close()
|
|
||||||
ctx, _, finished := process.GetManager().AddContext(txCtx, fmt.Sprintf("ArchiveRequest[%d]: %s", r.RepoID, r.GetArchiveName()))
|
|
||||||
defer finished()
|
defer finished()
|
||||||
|
|
||||||
archiver, err := repo_model.GetRepoArchiver(ctx, r.RepoID, r.Type, r.CommitID)
|
archiver, err := repo_model.GetRepoArchiver(ctx, r.RepoID, r.Type, r.CommitID)
|
||||||
|
@ -192,7 +193,7 @@ func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return archiver, committer.Commit()
|
return archiver, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors.Is(err, os.ErrNotExist) {
|
if !errors.Is(err, os.ErrNotExist) {
|
||||||
|
@ -261,17 +262,7 @@ func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return archiver, committer.Commit()
|
return archiver, nil
|
||||||
}
|
|
||||||
|
|
||||||
// ArchiveRepository satisfies the ArchiveRequest being passed in. Processing
|
|
||||||
// will occur in a separate goroutine, as this phase may take a while to
|
|
||||||
// complete. If the archive already exists, ArchiveRepository will not do
|
|
||||||
// anything. In all cases, the caller should be examining the *ArchiveRequest
|
|
||||||
// being returned for completion, as it may be different than the one they passed
|
|
||||||
// in.
|
|
||||||
func ArchiveRepository(ctx context.Context, request *ArchiveRequest) (*repo_model.RepoArchiver, error) {
|
|
||||||
return doArchive(ctx, request)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var archiverQueue *queue.WorkerPoolQueue[*ArchiveRequest]
|
var archiverQueue *queue.WorkerPoolQueue[*ArchiveRequest]
|
||||||
|
@ -281,8 +272,10 @@ func Init(ctx context.Context) error {
|
||||||
handler := func(items ...*ArchiveRequest) []*ArchiveRequest {
|
handler := func(items ...*ArchiveRequest) []*ArchiveRequest {
|
||||||
for _, archiveReq := range items {
|
for _, archiveReq := range items {
|
||||||
log.Trace("ArchiverData Process: %#v", archiveReq)
|
log.Trace("ArchiverData Process: %#v", archiveReq)
|
||||||
if _, err := doArchive(ctx, archiveReq); err != nil {
|
if archiver, err := doArchive(ctx, archiveReq); err != nil {
|
||||||
log.Error("Archive %v failed: %v", archiveReq, err)
|
log.Error("Archive %v failed: %v", archiveReq, err)
|
||||||
|
} else {
|
||||||
|
log.Trace("ArchiverData Success: %#v", archiver)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -80,13 +80,13 @@ func TestArchive_Basic(t *testing.T) {
|
||||||
inFlight[1] = tgzReq
|
inFlight[1] = tgzReq
|
||||||
inFlight[2] = secondReq
|
inFlight[2] = secondReq
|
||||||
|
|
||||||
ArchiveRepository(db.DefaultContext, zipReq)
|
doArchive(db.DefaultContext, zipReq)
|
||||||
ArchiveRepository(db.DefaultContext, tgzReq)
|
doArchive(db.DefaultContext, tgzReq)
|
||||||
ArchiveRepository(db.DefaultContext, secondReq)
|
doArchive(db.DefaultContext, secondReq)
|
||||||
|
|
||||||
// Make sure sending an unprocessed request through doesn't affect the queue
|
// Make sure sending an unprocessed request through doesn't affect the queue
|
||||||
// count.
|
// count.
|
||||||
ArchiveRepository(db.DefaultContext, zipReq)
|
doArchive(db.DefaultContext, zipReq)
|
||||||
|
|
||||||
// Sleep two seconds to make sure the queue doesn't change.
|
// Sleep two seconds to make sure the queue doesn't change.
|
||||||
time.Sleep(2 * time.Second)
|
time.Sleep(2 * time.Second)
|
||||||
|
@ -101,7 +101,7 @@ func TestArchive_Basic(t *testing.T) {
|
||||||
// We still have the other three stalled at completion, waiting to remove
|
// We still have the other three stalled at completion, waiting to remove
|
||||||
// from archiveInProgress. Try to submit this new one before its
|
// from archiveInProgress. Try to submit this new one before its
|
||||||
// predecessor has cleared out of the queue.
|
// predecessor has cleared out of the queue.
|
||||||
ArchiveRepository(db.DefaultContext, zipReq2)
|
doArchive(db.DefaultContext, zipReq2)
|
||||||
|
|
||||||
// Now we'll submit a request and TimedWaitForCompletion twice, before and
|
// Now we'll submit a request and TimedWaitForCompletion twice, before and
|
||||||
// after we release it. We should trigger both the timeout and non-timeout
|
// after we release it. We should trigger both the timeout and non-timeout
|
||||||
|
@ -109,7 +109,7 @@ func TestArchive_Basic(t *testing.T) {
|
||||||
timedReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".tar.gz")
|
timedReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".tar.gz")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotNil(t, timedReq)
|
assert.NotNil(t, timedReq)
|
||||||
ArchiveRepository(db.DefaultContext, timedReq)
|
doArchive(db.DefaultContext, timedReq)
|
||||||
|
|
||||||
zipReq2, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
|
zipReq2, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
<div class="field {{if not .item.VisibleOnForm}}tw-hidden{{end}}">
|
<div class="field {{if not .item.VisibleOnForm}}tw-hidden{{end}}">
|
||||||
<div>{{ctx.RenderUtils.MarkdownToHtml .item.Attributes.value}}</div>
|
<div class="markup">{{ctx.RenderUtils.MarkdownToHtml .item.Attributes.value}}</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{{template "base/head" .}}
|
{{template "base/head" .}}
|
||||||
<div role="main" aria-label="{{.Title}}" class="page-content repository projects edit-project new milestone">
|
<div role="main" aria-label="{{.Title}}" class="page-content repository projects edit-project new">
|
||||||
{{template "repo/header" .}}
|
{{template "repo/header" .}}
|
||||||
<div class="ui container">
|
<div class="ui container">
|
||||||
{{template "projects/new" .}}
|
{{template "projects/new" .}}
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
<div class="file-header-left tw-flex tw-items-center tw-py-2 tw-pr-4">
|
<div class="file-header-left tw-flex tw-items-center tw-py-2 tw-pr-4">
|
||||||
{{if .ReadmeInList}}
|
{{if .ReadmeInList}}
|
||||||
{{svg "octicon-book" 16 "tw-mr-2"}}
|
{{svg "octicon-book" 16 "tw-mr-2"}}
|
||||||
<strong><a class="default-link muted" href="#readme">{{.FileName}}</a></strong>
|
<strong><a class="muted" href="#readme">{{.FileName}}</a></strong>
|
||||||
{{else}}
|
{{else}}
|
||||||
{{template "repo/file_info" .}}
|
{{template "repo/file_info" .}}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
|
@ -99,18 +99,8 @@ We appreciate that some testing machines may not be very powerful and
|
||||||
the default timeouts for declaring a slow test or a slow clean-up flush
|
the default timeouts for declaring a slow test or a slow clean-up flush
|
||||||
may not be appropriate.
|
may not be appropriate.
|
||||||
|
|
||||||
You can either:
|
You can set the following environment variables:
|
||||||
|
|
||||||
* Within the test ini file set the following section:
|
|
||||||
|
|
||||||
```ini
|
|
||||||
[integration-tests]
|
|
||||||
SLOW_TEST = 10s ; 10s is the default value
|
|
||||||
SLOW_FLUSH = 5S ; 5s is the default value
|
|
||||||
```
|
|
||||||
|
|
||||||
* Set the following environment variables:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
GITEA_SLOW_TEST_TIME="10s" GITEA_SLOW_FLUSH_TIME="5s" make test-sqlite
|
GITEA_TEST_SLOW_RUN="10s" GITEA_TEST_SLOW_FLUSH="1s" make test-sqlite
|
||||||
```
|
```
|
||||||
|
|
|
@ -39,11 +39,11 @@ func TestAPIGetRawFileOrLFS(t *testing.T) {
|
||||||
|
|
||||||
t.Run("Partial Clone", doPartialGitClone(dstPath2, u))
|
t.Run("Partial Clone", doPartialGitClone(dstPath2, u))
|
||||||
|
|
||||||
lfs := lfsCommitAndPushTest(t, dstPath, littleSize)[0]
|
lfs := lfsCommitAndPushTest(t, dstPath, testFileSizeSmall)[0]
|
||||||
|
|
||||||
reqLFS := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/media/"+lfs)
|
reqLFS := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/media/"+lfs)
|
||||||
respLFS := MakeRequestNilResponseRecorder(t, reqLFS, http.StatusOK)
|
respLFS := MakeRequestNilResponseRecorder(t, reqLFS, http.StatusOK)
|
||||||
assert.Equal(t, littleSize, respLFS.Length)
|
assert.Equal(t, testFileSizeSmall, respLFS.Length)
|
||||||
|
|
||||||
doAPIDeleteRepository(httpContext)
|
doAPIDeleteRepository(httpContext)
|
||||||
})
|
})
|
||||||
|
|
|
@ -53,3 +53,56 @@ func TestAPITwoFactor(t *testing.T) {
|
||||||
req.Header.Set("X-Gitea-OTP", passcode)
|
req.Header.Set("X-Gitea-OTP", passcode)
|
||||||
MakeRequest(t, req, http.StatusOK)
|
MakeRequest(t, req, http.StatusOK)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestBasicAuthWithWebAuthn(t *testing.T) {
|
||||||
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
// user1 has no webauthn enrolled, he can request API with basic auth
|
||||||
|
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||||
|
unittest.AssertNotExistsBean(t, &auth_model.WebAuthnCredential{UserID: user1.ID})
|
||||||
|
req := NewRequest(t, "GET", "/api/v1/user")
|
||||||
|
req.SetBasicAuth(user1.Name, "password")
|
||||||
|
MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
// user1 has no webauthn enrolled, he can request git protocol with basic auth
|
||||||
|
req = NewRequest(t, "GET", "/user2/repo1/info/refs")
|
||||||
|
req.SetBasicAuth(user1.Name, "password")
|
||||||
|
MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
// user1 has no webauthn enrolled, he can request container package with basic auth
|
||||||
|
req = NewRequest(t, "GET", "/v2/token")
|
||||||
|
req.SetBasicAuth(user1.Name, "password")
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
type tokenResponse struct {
|
||||||
|
Token string `json:"token"`
|
||||||
|
}
|
||||||
|
var tokenParsed tokenResponse
|
||||||
|
DecodeJSON(t, resp, &tokenParsed)
|
||||||
|
assert.NotEmpty(t, tokenParsed.Token)
|
||||||
|
|
||||||
|
// user32 has webauthn enrolled, he can't request API with basic auth
|
||||||
|
user32 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 32})
|
||||||
|
unittest.AssertExistsAndLoadBean(t, &auth_model.WebAuthnCredential{UserID: user32.ID})
|
||||||
|
|
||||||
|
req = NewRequest(t, "GET", "/api/v1/user")
|
||||||
|
req.SetBasicAuth(user32.Name, "notpassword")
|
||||||
|
resp = MakeRequest(t, req, http.StatusUnauthorized)
|
||||||
|
|
||||||
|
type userResponse struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
var userParsed userResponse
|
||||||
|
DecodeJSON(t, resp, &userParsed)
|
||||||
|
assert.EqualValues(t, "Basic authorization is not allowed while webAuthn enrolled", userParsed.Message)
|
||||||
|
|
||||||
|
// user32 has webauthn enrolled, he can't request git protocol with basic auth
|
||||||
|
req = NewRequest(t, "GET", "/user2/repo1/info/refs")
|
||||||
|
req.SetBasicAuth(user32.Name, "notpassword")
|
||||||
|
MakeRequest(t, req, http.StatusUnauthorized)
|
||||||
|
|
||||||
|
// user32 has webauthn enrolled, he can't request container package with basic auth
|
||||||
|
req = NewRequest(t, "GET", "/v2/token")
|
||||||
|
req.SetBasicAuth(user1.Name, "notpassword")
|
||||||
|
MakeRequest(t, req, http.StatusUnauthorized)
|
||||||
|
}
|
||||||
|
|
|
@ -4,9 +4,10 @@
|
||||||
package integration
|
package integration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/rand"
|
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
mathRand "math/rand/v2"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
@ -34,8 +35,8 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
littleSize = 1024 // 1K
|
testFileSizeSmall = 10
|
||||||
bigSize = 128 * 1024 * 1024 // 128M
|
testFileSizeLarge = 10 * 1024 * 1024 // 10M
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGitGeneral(t *testing.T) {
|
func TestGitGeneral(t *testing.T) {
|
||||||
|
@ -73,8 +74,8 @@ func testGitGeneral(t *testing.T, u *url.URL) {
|
||||||
|
|
||||||
t.Run("Partial Clone", doPartialGitClone(dstPath2, u))
|
t.Run("Partial Clone", doPartialGitClone(dstPath2, u))
|
||||||
|
|
||||||
pushedFilesStandard := standardCommitAndPushTest(t, dstPath, littleSize, bigSize)
|
pushedFilesStandard := standardCommitAndPushTest(t, dstPath, testFileSizeSmall, testFileSizeLarge)
|
||||||
pushedFilesLFS := lfsCommitAndPushTest(t, dstPath, littleSize, bigSize)
|
pushedFilesLFS := lfsCommitAndPushTest(t, dstPath, testFileSizeSmall, testFileSizeLarge)
|
||||||
rawTest(t, &httpContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
rawTest(t, &httpContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
||||||
mediaTest(t, &httpContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
mediaTest(t, &httpContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
||||||
|
|
||||||
|
@ -114,8 +115,8 @@ func testGitGeneral(t *testing.T, u *url.URL) {
|
||||||
|
|
||||||
t.Run("Clone", doGitClone(dstPath, sshURL))
|
t.Run("Clone", doGitClone(dstPath, sshURL))
|
||||||
|
|
||||||
pushedFilesStandard := standardCommitAndPushTest(t, dstPath, littleSize, bigSize)
|
pushedFilesStandard := standardCommitAndPushTest(t, dstPath, testFileSizeSmall, testFileSizeLarge)
|
||||||
pushedFilesLFS := lfsCommitAndPushTest(t, dstPath, littleSize, bigSize)
|
pushedFilesLFS := lfsCommitAndPushTest(t, dstPath, testFileSizeSmall, testFileSizeLarge)
|
||||||
rawTest(t, &sshContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
rawTest(t, &sshContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
||||||
mediaTest(t, &sshContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
mediaTest(t, &sshContext, pushedFilesStandard[0], pushedFilesStandard[1], pushedFilesLFS[0], pushedFilesLFS[1])
|
||||||
|
|
||||||
|
@ -202,14 +203,14 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
|
||||||
// Request raw paths
|
// Request raw paths
|
||||||
req := NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", little))
|
req := NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", little))
|
||||||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||||
assert.Equal(t, littleSize, resp.Length)
|
assert.Equal(t, testFileSizeSmall, resp.Length)
|
||||||
|
|
||||||
if setting.LFS.StartServer {
|
if setting.LFS.StartServer {
|
||||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS))
|
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS))
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
assert.NotEqual(t, littleSize, resp.Body.Len())
|
assert.NotEqual(t, testFileSizeSmall, resp.Body.Len())
|
||||||
assert.LessOrEqual(t, resp.Body.Len(), 1024)
|
assert.LessOrEqual(t, resp.Body.Len(), 1024)
|
||||||
if resp.Body.Len() != littleSize && resp.Body.Len() <= 1024 {
|
if resp.Body.Len() != testFileSizeSmall && resp.Body.Len() <= 1024 {
|
||||||
assert.Contains(t, resp.Body.String(), lfs.MetaFileIdentifier)
|
assert.Contains(t, resp.Body.String(), lfs.MetaFileIdentifier)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -217,13 +218,13 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
|
||||||
if !testing.Short() {
|
if !testing.Short() {
|
||||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", big))
|
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", big))
|
||||||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||||
assert.Equal(t, bigSize, resp.Length)
|
assert.Equal(t, testFileSizeLarge, resp.Length)
|
||||||
|
|
||||||
if setting.LFS.StartServer {
|
if setting.LFS.StartServer {
|
||||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", bigLFS))
|
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", bigLFS))
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
assert.NotEqual(t, bigSize, resp.Body.Len())
|
assert.NotEqual(t, testFileSizeLarge, resp.Body.Len())
|
||||||
if resp.Body.Len() != bigSize && resp.Body.Len() <= 1024 {
|
if resp.Body.Len() != testFileSizeLarge && resp.Body.Len() <= 1024 {
|
||||||
assert.Contains(t, resp.Body.String(), lfs.MetaFileIdentifier)
|
assert.Contains(t, resp.Body.String(), lfs.MetaFileIdentifier)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -243,21 +244,21 @@ func mediaTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS
|
||||||
// Request media paths
|
// Request media paths
|
||||||
req := NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", little))
|
req := NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", little))
|
||||||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||||
assert.Equal(t, littleSize, resp.Length)
|
assert.Equal(t, testFileSizeSmall, resp.Length)
|
||||||
|
|
||||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", littleLFS))
|
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", littleLFS))
|
||||||
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||||
assert.Equal(t, littleSize, resp.Length)
|
assert.Equal(t, testFileSizeSmall, resp.Length)
|
||||||
|
|
||||||
if !testing.Short() {
|
if !testing.Short() {
|
||||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", big))
|
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", big))
|
||||||
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||||
assert.Equal(t, bigSize, resp.Length)
|
assert.Equal(t, testFileSizeLarge, resp.Length)
|
||||||
|
|
||||||
if setting.LFS.StartServer {
|
if setting.LFS.StartServer {
|
||||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", bigLFS))
|
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", bigLFS))
|
||||||
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||||
assert.Equal(t, bigSize, resp.Length)
|
assert.Equal(t, testFileSizeLarge, resp.Length)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -287,35 +288,19 @@ func doCommitAndPush(t *testing.T, size int, repoPath, prefix string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateCommitWithNewData(size int, repoPath, email, fullName, prefix string) (string, error) {
|
func generateCommitWithNewData(size int, repoPath, email, fullName, prefix string) (string, error) {
|
||||||
// Generate random file
|
|
||||||
bufSize := 4 * 1024
|
|
||||||
if bufSize > size {
|
|
||||||
bufSize = size
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer := make([]byte, bufSize)
|
|
||||||
|
|
||||||
tmpFile, err := os.CreateTemp(repoPath, prefix)
|
tmpFile, err := os.CreateTemp(repoPath, prefix)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
defer tmpFile.Close()
|
defer tmpFile.Close()
|
||||||
written := 0
|
|
||||||
for written < size {
|
var seed [32]byte
|
||||||
n := size - written
|
rander := mathRand.NewChaCha8(seed) // for testing only, no need to seed
|
||||||
if n > bufSize {
|
_, err = io.CopyN(tmpFile, rander, int64(size))
|
||||||
n = bufSize
|
|
||||||
}
|
|
||||||
_, err := rand.Read(buffer[:n])
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
n, err = tmpFile.Write(buffer[:n])
|
_ = tmpFile.Close()
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
written += n
|
|
||||||
}
|
|
||||||
|
|
||||||
// Commit
|
// Commit
|
||||||
// Now here we should explicitly allow lfs filters to run
|
// Now here we should explicitly allow lfs filters to run
|
||||||
|
@ -355,7 +340,7 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
|
|
||||||
// Try to push without permissions, which should fail
|
// Try to push without permissions, which should fail
|
||||||
t.Run("TryPushWithoutPermissions", func(t *testing.T) {
|
t.Run("TryPushWithoutPermissions", func(t *testing.T) {
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
doGitPushTestRepositoryFail(dstPath, "origin", "protected")
|
doGitPushTestRepositoryFail(dstPath, "origin", "protected")
|
||||||
})
|
})
|
||||||
|
@ -367,7 +352,7 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
|
|
||||||
// Normal push should work
|
// Normal push should work
|
||||||
t.Run("NormalPushWithPermissions", func(t *testing.T) {
|
t.Run("NormalPushWithPermissions", func(t *testing.T) {
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
doGitPushTestRepository(dstPath, "origin", "protected")
|
doGitPushTestRepository(dstPath, "origin", "protected")
|
||||||
})
|
})
|
||||||
|
@ -376,7 +361,7 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
t.Run("ForcePushWithoutForcePermissions", func(t *testing.T) {
|
t.Run("ForcePushWithoutForcePermissions", func(t *testing.T) {
|
||||||
t.Run("CreateDivergentHistory", func(t *testing.T) {
|
t.Run("CreateDivergentHistory", func(t *testing.T) {
|
||||||
git.NewCommand(git.DefaultContext, "reset", "--hard", "HEAD~1").Run(&git.RunOpts{Dir: dstPath})
|
git.NewCommand(git.DefaultContext, "reset", "--hard", "HEAD~1").Run(&git.RunOpts{Dir: dstPath})
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-new")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-new")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
doGitPushTestRepositoryFail(dstPath, "-f", "origin", "protected")
|
doGitPushTestRepositoryFail(dstPath, "-f", "origin", "protected")
|
||||||
|
@ -411,7 +396,7 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
t.Run("GenerateCommit", func(t *testing.T) {
|
t.Run("GenerateCommit", func(t *testing.T) {
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected-2"))
|
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected-2"))
|
||||||
|
@ -426,7 +411,7 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
|
|
||||||
t.Run("ProtectProtectedBranchUnprotectedFilePaths", doProtectBranch(ctx, "protected", "", "", "unprotected-file-*"))
|
t.Run("ProtectProtectedBranchUnprotectedFilePaths", doProtectBranch(ctx, "protected", "", "", "unprotected-file-*"))
|
||||||
t.Run("GenerateCommit", func(t *testing.T) {
|
t.Run("GenerateCommit", func(t *testing.T) {
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "unprotected-file-")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "unprotected-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
t.Run("PushUnprotectedFilesToProtectedBranch", doGitPushTestRepository(dstPath, "origin", "protected"))
|
t.Run("PushUnprotectedFilesToProtectedBranch", doGitPushTestRepository(dstPath, "origin", "protected"))
|
||||||
|
@ -436,7 +421,7 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
||||||
t.Run("CheckoutMaster", doGitCheckoutBranch(dstPath, "master"))
|
t.Run("CheckoutMaster", doGitCheckoutBranch(dstPath, "master"))
|
||||||
t.Run("CreateBranchForced", doGitCreateBranch(dstPath, "toforce"))
|
t.Run("CreateBranchForced", doGitCreateBranch(dstPath, "toforce"))
|
||||||
t.Run("GenerateCommit", func(t *testing.T) {
|
t.Run("GenerateCommit", func(t *testing.T) {
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
t.Run("FailToForcePushToProtectedBranch", doGitPushTestRepositoryFail(dstPath, "-f", "origin", "toforce:protected"))
|
t.Run("FailToForcePushToProtectedBranch", doGitPushTestRepositoryFail(dstPath, "-f", "origin", "toforce:protected"))
|
||||||
|
@ -649,7 +634,7 @@ func doAutoPRMerge(baseCtx *APITestContext, dstPath string) func(t *testing.T) {
|
||||||
t.Run("CheckoutProtected", doGitCheckoutBranch(dstPath, "protected"))
|
t.Run("CheckoutProtected", doGitCheckoutBranch(dstPath, "protected"))
|
||||||
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
||||||
t.Run("GenerateCommit", func(t *testing.T) {
|
t.Run("GenerateCommit", func(t *testing.T) {
|
||||||
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
_, err := generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected3"))
|
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected3"))
|
||||||
|
|
|
@ -98,7 +98,7 @@ func TestAgitPullPush(t *testing.T) {
|
||||||
doGitCreateBranch(dstPath, "test-agit-push")
|
doGitCreateBranch(dstPath, "test-agit-push")
|
||||||
|
|
||||||
// commit 1
|
// commit 1
|
||||||
_, err = generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
_, err = generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// push to create an agit pull request
|
// push to create an agit pull request
|
||||||
|
@ -115,7 +115,7 @@ func TestAgitPullPush(t *testing.T) {
|
||||||
assert.Equal(t, "test-description", pr.Issue.Content)
|
assert.Equal(t, "test-description", pr.Issue.Content)
|
||||||
|
|
||||||
// commit 2
|
// commit 2
|
||||||
_, err = generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-2-")
|
_, err = generateCommitWithNewData(testFileSizeSmall, dstPath, "user2@example.com", "User Two", "branch-data-file-2-")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// push 2
|
// push 2
|
||||||
|
|
|
@ -20,7 +20,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/auth"
|
"code.gitea.io/gitea/models/auth"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
@ -28,7 +27,6 @@ import (
|
||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/testlogger"
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
"code.gitea.io/gitea/routers"
|
"code.gitea.io/gitea/routers"
|
||||||
|
@ -90,27 +88,6 @@ func TestMain(m *testing.M) {
|
||||||
tests.InitTest(true)
|
tests.InitTest(true)
|
||||||
testWebRoutes = routers.NormalRoutes()
|
testWebRoutes = routers.NormalRoutes()
|
||||||
|
|
||||||
// integration test settings...
|
|
||||||
if setting.CfgProvider != nil {
|
|
||||||
testingCfg := setting.CfgProvider.Section("integration-tests")
|
|
||||||
testlogger.SlowTest = testingCfg.Key("SLOW_TEST").MustDuration(testlogger.SlowTest)
|
|
||||||
testlogger.SlowFlush = testingCfg.Key("SLOW_FLUSH").MustDuration(testlogger.SlowFlush)
|
|
||||||
}
|
|
||||||
|
|
||||||
if os.Getenv("GITEA_SLOW_TEST_TIME") != "" {
|
|
||||||
duration, err := time.ParseDuration(os.Getenv("GITEA_SLOW_TEST_TIME"))
|
|
||||||
if err == nil {
|
|
||||||
testlogger.SlowTest = duration
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if os.Getenv("GITEA_SLOW_FLUSH_TIME") != "" {
|
|
||||||
duration, err := time.ParseDuration(os.Getenv("GITEA_SLOW_FLUSH_TIME"))
|
|
||||||
if err == nil {
|
|
||||||
testlogger.SlowFlush = duration
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Unsetenv("GIT_AUTHOR_NAME")
|
os.Unsetenv("GIT_AUTHOR_NAME")
|
||||||
os.Unsetenv("GIT_AUTHOR_EMAIL")
|
os.Unsetenv("GIT_AUTHOR_EMAIL")
|
||||||
os.Unsetenv("GIT_AUTHOR_DATE")
|
os.Unsetenv("GIT_AUTHOR_DATE")
|
||||||
|
@ -132,8 +109,6 @@ func TestMain(m *testing.M) {
|
||||||
// Instead, "No tests were found", last nonsense log is "According to the configuration, subsequent logs will not be printed to the console"
|
// Instead, "No tests were found", last nonsense log is "According to the configuration, subsequent logs will not be printed to the console"
|
||||||
exitCode := m.Run()
|
exitCode := m.Run()
|
||||||
|
|
||||||
testlogger.WriterCloser.Reset()
|
|
||||||
|
|
||||||
if err = util.RemoveAll(setting.Indexer.IssuePath); err != nil {
|
if err = util.RemoveAll(setting.Indexer.IssuePath); err != nil {
|
||||||
fmt.Printf("util.RemoveAll: %v\n", err)
|
fmt.Printf("util.RemoveAll: %v\n", err)
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
|
|
|
@ -6,6 +6,7 @@ package integration
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
@ -19,6 +20,7 @@ import (
|
||||||
"code.gitea.io/gitea/modules/queue"
|
"code.gitea.io/gitea/modules/queue"
|
||||||
repo_service "code.gitea.io/gitea/services/repository"
|
repo_service "code.gitea.io/gitea/services/repository"
|
||||||
files_service "code.gitea.io/gitea/services/repository/files"
|
files_service "code.gitea.io/gitea/services/repository/files"
|
||||||
|
"code.gitea.io/gitea/tests"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
@ -218,8 +220,10 @@ func TestLinguist(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, c := range cases {
|
for i, c := range cases {
|
||||||
|
t.Run("Case-"+strconv.Itoa(i), func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
repo, err := repo_service.CreateRepository(db.DefaultContext, user, user, repo_service.CreateRepoOptions{
|
repo, err := repo_service.CreateRepository(db.DefaultContext, user, user, repo_service.CreateRepoOptions{
|
||||||
Name: "linguist-test",
|
Name: "linguist-test-" + strconv.Itoa(i),
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
@ -252,8 +256,7 @@ func TestLinguist(t *testing.T) {
|
||||||
languages = append(languages, s.Language)
|
languages = append(languages, s.Language)
|
||||||
}
|
}
|
||||||
assert.Equal(t, c.ExpectedLanguageOrder, languages, "case %d: unexpected language stats", i)
|
assert.Equal(t, c.ExpectedLanguageOrder, languages, "case %d: unexpected language stats", i)
|
||||||
|
})
|
||||||
assert.NoError(t, repo_service.DeleteRepository(db.DefaultContext, user, repo, false))
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,33 +28,29 @@ import (
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/testlogger"
|
"code.gitea.io/gitea/modules/testlogger"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
"code.gitea.io/gitea/tests"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
var currentEngine *xorm.Engine
|
var currentEngine *xorm.Engine
|
||||||
|
|
||||||
func initMigrationTest(t *testing.T) func() {
|
func initMigrationTest(t *testing.T) func() {
|
||||||
log.RegisterEventWriter("test", testlogger.NewTestLoggerWriter)
|
testlogger.Init()
|
||||||
|
|
||||||
deferFn := tests.PrintCurrentTest(t, 2)
|
deferFn := testlogger.PrintCurrentTest(t, 2)
|
||||||
giteaRoot := base.SetupGiteaRoot()
|
giteaRoot := base.SetupGiteaRoot()
|
||||||
if giteaRoot == "" {
|
if giteaRoot == "" {
|
||||||
tests.Printf("Environment variable $GITEA_ROOT not set\n")
|
testlogger.Fatalf("Environment variable $GITEA_ROOT not set\n")
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
setting.AppPath = path.Join(giteaRoot, "gitea")
|
setting.AppPath = path.Join(giteaRoot, "gitea")
|
||||||
if _, err := os.Stat(setting.AppPath); err != nil {
|
if _, err := os.Stat(setting.AppPath); err != nil {
|
||||||
tests.Printf("Could not find gitea binary at %s\n", setting.AppPath)
|
testlogger.Fatalf(fmt.Sprintf("Could not find gitea binary at %s\n", setting.AppPath))
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
giteaConf := os.Getenv("GITEA_CONF")
|
giteaConf := os.Getenv("GITEA_CONF")
|
||||||
if giteaConf == "" {
|
if giteaConf == "" {
|
||||||
tests.Printf("Environment variable $GITEA_CONF not set\n")
|
testlogger.Fatalf("Environment variable $GITEA_CONF not set\n")
|
||||||
os.Exit(1)
|
|
||||||
} else if !path.IsAbs(giteaConf) {
|
} else if !path.IsAbs(giteaConf) {
|
||||||
setting.CustomConf = path.Join(giteaRoot, giteaConf)
|
setting.CustomConf = path.Join(giteaRoot, giteaConf)
|
||||||
} else {
|
} else {
|
||||||
|
@ -123,13 +119,10 @@ func readSQLFromFile(version string) (string, error) {
|
||||||
return string(charset.MaybeRemoveBOM(bytes, charset.ConvertOpts{})), nil
|
return string(charset.MaybeRemoveBOM(bytes, charset.ConvertOpts{})), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreOldDB(t *testing.T, version string) bool {
|
func restoreOldDB(t *testing.T, version string) {
|
||||||
data, err := readSQLFromFile(version)
|
data, err := readSQLFromFile(version)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
if len(data) == 0 {
|
require.NotEmpty(t, data, "No data found for %s version: %s", setting.Database.Type, version)
|
||||||
tests.Printf("No db found to restore for %s version: %s\n", setting.Database.Type, version)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case setting.Database.Type.IsSQLite3():
|
case setting.Database.Type.IsSQLite3():
|
||||||
|
@ -197,15 +190,12 @@ func restoreOldDB(t *testing.T, version string) bool {
|
||||||
db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s",
|
db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s",
|
||||||
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode))
|
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode))
|
||||||
}
|
}
|
||||||
if !assert.NoError(t, err) {
|
require.NoError(t, err)
|
||||||
return false
|
|
||||||
}
|
|
||||||
defer db.Close()
|
defer db.Close()
|
||||||
|
|
||||||
schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
|
schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
|
||||||
if !assert.NoError(t, err) || !assert.NotEmpty(t, schrows) {
|
require.NoError(t, err)
|
||||||
return false
|
require.NotEmpty(t, schrows)
|
||||||
}
|
|
||||||
|
|
||||||
if !schrows.Next() {
|
if !schrows.Next() {
|
||||||
// Create and setup a DB schema
|
// Create and setup a DB schema
|
||||||
|
@ -260,7 +250,6 @@ func restoreOldDB(t *testing.T, version string) bool {
|
||||||
}
|
}
|
||||||
db.Close()
|
db.Close()
|
||||||
}
|
}
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func wrappedMigrate(x *xorm.Engine) error {
|
func wrappedMigrate(x *xorm.Engine) error {
|
||||||
|
@ -269,11 +258,8 @@ func wrappedMigrate(x *xorm.Engine) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func doMigrationTest(t *testing.T, version string) {
|
func doMigrationTest(t *testing.T, version string) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer testlogger.PrintCurrentTest(t)()
|
||||||
tests.Printf("Performing migration test for %s version: %s\n", setting.Database.Type, version)
|
restoreOldDB(t, version)
|
||||||
if !restoreOldDB(t, version) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
setting.InitSQLLoggersForCli(log.INFO)
|
setting.InitSQLLoggersForCli(log.INFO)
|
||||||
|
|
||||||
|
@ -305,14 +291,9 @@ func TestMigrations(t *testing.T) {
|
||||||
|
|
||||||
dialect := setting.Database.Type
|
dialect := setting.Database.Type
|
||||||
versions, err := availableVersions()
|
versions, err := availableVersions()
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
require.NotEmpty(t, versions, "No old database versions available to migration test for %s", dialect)
|
||||||
|
|
||||||
if len(versions) == 0 {
|
|
||||||
tests.Printf("No old database versions available to migration test for %s\n", dialect)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
tests.Printf("Preparing to test %d migrations for %s\n", len(versions), dialect)
|
|
||||||
for _, version := range versions {
|
for _, version := range versions {
|
||||||
t.Run(fmt.Sprintf("Migrate-%s-%s", dialect, version), func(t *testing.T) {
|
t.Run(fmt.Sprintf("Migrate-%s-%s", dialect, version), func(t *testing.T) {
|
||||||
doMigrationTest(t, version)
|
doMigrationTest(t, version)
|
||||||
|
|
|
@ -209,8 +209,6 @@ func checkRecentlyPushedNewBranches(t *testing.T, session *TestSession, repoPath
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRecentlyPushedNewBranches(t *testing.T) {
|
func TestRecentlyPushedNewBranches(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
|
|
||||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
user1Session := loginUser(t, "user1")
|
user1Session := loginUser(t, "user1")
|
||||||
user2Session := loginUser(t, "user2")
|
user2Session := loginUser(t, "user2")
|
||||||
|
|
|
@ -29,17 +29,12 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func exitf(format string, args ...any) {
|
|
||||||
fmt.Printf(format+"\n", args...)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
func InitTest(requireGitea bool) {
|
func InitTest(requireGitea bool) {
|
||||||
log.RegisterEventWriter("test", testlogger.NewTestLoggerWriter)
|
testlogger.Init()
|
||||||
|
|
||||||
giteaRoot := base.SetupGiteaRoot()
|
giteaRoot := base.SetupGiteaRoot()
|
||||||
if giteaRoot == "" {
|
if giteaRoot == "" {
|
||||||
exitf("Environment variable $GITEA_ROOT not set")
|
testlogger.Fatalf("Environment variable $GITEA_ROOT not set\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Speedup tests that rely on the event source ticker, confirm whether there is any bug or failure.
|
// TODO: Speedup tests that rely on the event source ticker, confirm whether there is any bug or failure.
|
||||||
|
@ -54,7 +49,7 @@ func InitTest(requireGitea bool) {
|
||||||
}
|
}
|
||||||
setting.AppPath = filepath.Join(giteaRoot, giteaBinary)
|
setting.AppPath = filepath.Join(giteaRoot, giteaBinary)
|
||||||
if _, err := os.Stat(setting.AppPath); err != nil {
|
if _, err := os.Stat(setting.AppPath); err != nil {
|
||||||
exitf("Could not find gitea binary at %s", setting.AppPath)
|
testlogger.Fatalf("Could not find gitea binary at %s\n", setting.AppPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
giteaConf := os.Getenv("GITEA_CONF")
|
giteaConf := os.Getenv("GITEA_CONF")
|
||||||
|
@ -66,7 +61,7 @@ func InitTest(requireGitea bool) {
|
||||||
_ = os.Setenv("GITEA_CONF", giteaConf)
|
_ = os.Setenv("GITEA_CONF", giteaConf)
|
||||||
fmt.Printf("Environment variable $GITEA_CONF not set, use default: %s\n", giteaConf)
|
fmt.Printf("Environment variable $GITEA_CONF not set, use default: %s\n", giteaConf)
|
||||||
if !setting.EnableSQLite3 {
|
if !setting.EnableSQLite3 {
|
||||||
exitf(`sqlite3 requires: import _ "github.com/mattn/go-sqlite3" or -tags sqlite,sqlite_unlock_notify`)
|
testlogger.Fatalf(`sqlite3 requires: import _ "github.com/mattn/go-sqlite3" or -tags sqlite,sqlite_unlock_notify` + "\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !filepath.IsAbs(giteaConf) {
|
if !filepath.IsAbs(giteaConf) {
|
||||||
|
@ -85,7 +80,7 @@ func InitTest(requireGitea bool) {
|
||||||
|
|
||||||
setting.LoadDBSetting()
|
setting.LoadDBSetting()
|
||||||
if err := storage.Init(); err != nil {
|
if err := storage.Init(); err != nil {
|
||||||
exitf("Init storage failed: %v", err)
|
testlogger.Fatalf("Init storage failed: %v\n", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
|
@ -258,8 +253,3 @@ func PrintCurrentTest(t testing.TB, skip ...int) func() {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
return testlogger.PrintCurrentTest(t, util.OptionalArg(skip)+1)
|
return testlogger.PrintCurrentTest(t, util.OptionalArg(skip)+1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Printf takes a format and args and prints the string to os.Stdout
|
|
||||||
func Printf(format string, args ...any) {
|
|
||||||
testlogger.Printf(format, args...)
|
|
||||||
}
|
|
||||||
|
|
|
@ -53,6 +53,7 @@
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
flex: 1;
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.ui.comments .comment > .avatar ~ .content {
|
.ui.comments .comment > .avatar ~ .content {
|
||||||
|
|
|
@ -118,7 +118,7 @@ test('encodeURLEncodedBase64, decodeURLEncodedBase64', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test('file detection', () => {
|
test('file detection', () => {
|
||||||
for (const name of ['a.jpg', '/a.jpeg', '.file.png', '.webp', 'file.svg']) {
|
for (const name of ['a.avif', 'a.jpg', '/a.jpeg', '.file.png', '.webp', 'file.svg']) {
|
||||||
expect(isImageFile({name})).toBeTruthy();
|
expect(isImageFile({name})).toBeTruthy();
|
||||||
}
|
}
|
||||||
for (const name of ['', 'a.jpg.x', '/path.png/x', 'webp']) {
|
for (const name of ['', 'a.jpg.x', '/path.png/x', 'webp']) {
|
||||||
|
|
|
@ -165,7 +165,7 @@ export function sleep(ms: number): Promise<void> {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isImageFile({name, type}: {name: string, type?: string}): boolean {
|
export function isImageFile({name, type}: {name: string, type?: string}): boolean {
|
||||||
return /\.(jpe?g|png|gif|webp|svg|heic)$/i.test(name || '') || type?.startsWith('image/');
|
return /\.(avif|jpe?g|png|gif|webp|svg|heic)$/i.test(name || '') || type?.startsWith('image/');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isVideoFile({name, type}: {name: string, type?: string}): boolean {
|
export function isVideoFile({name, type}: {name: string, type?: string}): boolean {
|
||||||
|
|
Loading…
Reference in a new issue