mirror of
https://github.com/go-gitea/gitea
synced 2024-11-14 14:01:34 +01:00
Merge branch 'main' into lunny/rename_board_column
This commit is contained in:
commit
3d0fad89c7
373 changed files with 6817 additions and 10320 deletions
|
@ -2,9 +2,10 @@ root = "."
|
|||
tmp_dir = ".air"
|
||||
|
||||
[build]
|
||||
pre_cmd = ["killall -9 gitea 2>/dev/null || true"] # kill off potential zombie processes from previous runs
|
||||
cmd = "make --no-print-directory backend"
|
||||
bin = "gitea"
|
||||
delay = 1000
|
||||
delay = 2000
|
||||
include_ext = ["go", "tmpl"]
|
||||
include_file = ["main.go"]
|
||||
include_dir = ["cmd", "models", "modules", "options", "routers", "services"]
|
||||
|
|
|
@ -95,6 +95,9 @@ cpu.out
|
|||
/.air
|
||||
/.go-licenses
|
||||
|
||||
# Files and folders that were previously generated
|
||||
/public/assets/img/webpack
|
||||
|
||||
# Snapcraft
|
||||
snap/.snapcraft/
|
||||
parts/
|
||||
|
|
|
@ -318,7 +318,7 @@ rules:
|
|||
jquery/no-serialize: [2]
|
||||
jquery/no-show: [2]
|
||||
jquery/no-size: [2]
|
||||
jquery/no-sizzle: [0]
|
||||
jquery/no-sizzle: [2]
|
||||
jquery/no-slide: [0]
|
||||
jquery/no-submit: [0]
|
||||
jquery/no-text: [0]
|
||||
|
@ -470,7 +470,7 @@ rules:
|
|||
no-jquery/no-selector-prop: [2]
|
||||
no-jquery/no-serialize: [2]
|
||||
no-jquery/no-size: [2]
|
||||
no-jquery/no-sizzle: [0]
|
||||
no-jquery/no-sizzle: [2]
|
||||
no-jquery/no-slide: [2]
|
||||
no-jquery/no-sub: [2]
|
||||
no-jquery/no-support: [2]
|
||||
|
@ -537,7 +537,7 @@ rules:
|
|||
no-underscore-dangle: [0]
|
||||
no-unexpected-multiline: [2]
|
||||
no-unmodified-loop-condition: [2]
|
||||
no-unneeded-ternary: [0]
|
||||
no-unneeded-ternary: [2]
|
||||
no-unreachable-loop: [2]
|
||||
no-unreachable: [2]
|
||||
no-unsafe-finally: [2]
|
||||
|
@ -716,12 +716,14 @@ rules:
|
|||
unicorn/import-style: [0]
|
||||
unicorn/new-for-builtins: [2]
|
||||
unicorn/no-abusive-eslint-disable: [0]
|
||||
unicorn/no-anonymous-default-export: [0]
|
||||
unicorn/no-array-callback-reference: [0]
|
||||
unicorn/no-array-for-each: [2]
|
||||
unicorn/no-array-method-this-argument: [2]
|
||||
unicorn/no-array-push-push: [2]
|
||||
unicorn/no-array-reduce: [2]
|
||||
unicorn/no-await-expression-member: [0]
|
||||
unicorn/no-await-in-promise-methods: [2]
|
||||
unicorn/no-console-spaces: [0]
|
||||
unicorn/no-document-cookie: [2]
|
||||
unicorn/no-empty-file: [2]
|
||||
|
@ -738,6 +740,7 @@ rules:
|
|||
unicorn/no-null: [0]
|
||||
unicorn/no-object-as-default-parameter: [0]
|
||||
unicorn/no-process-exit: [0]
|
||||
unicorn/no-single-promise-in-promise-methods: [2]
|
||||
unicorn/no-static-only-class: [2]
|
||||
unicorn/no-thenable: [2]
|
||||
unicorn/no-this-assignment: [2]
|
||||
|
|
1
.gitattributes
vendored
1
.gitattributes
vendored
|
@ -1,5 +1,6 @@
|
|||
* text=auto eol=lf
|
||||
*.tmpl linguist-language=Handlebars
|
||||
*.pb.go linguist-generated
|
||||
/assets/*.json linguist-generated
|
||||
/public/assets/img/svg/*.svg linguist-generated
|
||||
/templates/swagger/v1_json.tmpl linguist-generated
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -94,6 +94,9 @@ cpu.out
|
|||
/.air
|
||||
/.go-licenses
|
||||
|
||||
# Files and folders that were previously generated
|
||||
/public/assets/img/webpack
|
||||
|
||||
# Snapcraft
|
||||
/gitea_a*.txt
|
||||
snap/.snapcraft/
|
||||
|
|
|
@ -30,10 +30,6 @@ linters:
|
|||
|
||||
run:
|
||||
timeout: 10m
|
||||
skip-dirs:
|
||||
- node_modules
|
||||
- public
|
||||
- web_src
|
||||
|
||||
linters-settings:
|
||||
stylecheck:
|
||||
|
@ -90,10 +86,13 @@ linters-settings:
|
|||
desc: do not use the internal package, use AddXxx function instead
|
||||
- pkg: gopkg.in/ini.v1
|
||||
desc: do not use the ini package, use gitea's config system instead
|
||||
- pkg: gitea.com/go-chi/cache
|
||||
desc: do not use the go-chi cache package, use gitea's cache system
|
||||
|
||||
issues:
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
exclude-dirs: [node_modules, public, web_src]
|
||||
exclude-rules:
|
||||
# Exclude some linters from running on tests files.
|
||||
- path: _test\.go
|
||||
|
|
4
.ignore
4
.ignore
|
@ -4,6 +4,8 @@
|
|||
/modules/options/bindata.go
|
||||
/modules/public/bindata.go
|
||||
/modules/templates/bindata.go
|
||||
/vendor
|
||||
/options/gitignore
|
||||
/options/license
|
||||
/public/assets
|
||||
/vendor
|
||||
node_modules
|
||||
|
|
12
Makefile
12
Makefile
|
@ -25,17 +25,17 @@ COMMA := ,
|
|||
|
||||
XGO_VERSION := go-1.22.x
|
||||
|
||||
AIR_PACKAGE ?= github.com/cosmtrek/air@v1.49.0
|
||||
AIR_PACKAGE ?= github.com/cosmtrek/air@v1
|
||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.7.0
|
||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.6.0
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.56.1
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.57.2
|
||||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
|
||||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.4.1
|
||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@db51e79a0e37c572d8b59ae0c58bf2bbbbe53285
|
||||
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
|
||||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0
|
||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1.0.3
|
||||
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.6.26
|
||||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1
|
||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1
|
||||
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1
|
||||
|
||||
DOCKER_IMAGE ?= gitea/gitea
|
||||
DOCKER_TAG ?= latest
|
||||
|
@ -295,7 +295,7 @@ clean:
|
|||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}'
|
||||
@GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}'
|
||||
$(eval TEMPLATES := $(shell find templates -type f -name '*.tmpl'))
|
||||
@# strip whitespace after '{{' or '(' and before '}}' or ')' unless there is only
|
||||
@# whitespace before it
|
||||
|
|
15
assets/go-licenses.json
generated
15
assets/go-licenses.json
generated
|
@ -304,11 +304,6 @@
|
|||
"path": "github.com/davecgh/go-spew/spew/LICENSE",
|
||||
"licenseText": "ISC License\n\nCopyright (c) 2012-2016 Dave Collins \u003cdave@davec.name\u003e\n\nPermission to use, copy, modify, and/or distribute this software for any\npurpose with or without fee is hereby granted, provided that the above\ncopyright notice and this permission notice appear in all copies.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\nWITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\nANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\nWHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\nACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\nOR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/denisenkom/go-mssqldb",
|
||||
"path": "github.com/denisenkom/go-mssqldb/LICENSE.txt",
|
||||
"licenseText": "Copyright (c) 2012 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/dgryski/go-rendezvous",
|
||||
"path": "github.com/dgryski/go-rendezvous/LICENSE",
|
||||
|
@ -759,6 +754,16 @@
|
|||
"path": "github.com/microcosm-cc/bluemonday/LICENSE.md",
|
||||
"licenseText": "SPDX short identifier: BSD-3-Clause\nhttps://opensource.org/licenses/BSD-3-Clause\n\nCopyright (c) 2014, David Kitchen \u003cdavid@buro9.com\u003e\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n\n* Neither the name of the organisation (Microcosm) nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/microsoft/go-mssqldb",
|
||||
"path": "github.com/microsoft/go-mssqldb/LICENSE.txt",
|
||||
"licenseText": "Copyright (c) 2012 The Go Authors. All rights reserved.\nCopyright (c) Microsoft Corporation.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/microsoft/go-mssqldb/internal/github.com/swisscom/mssql-always-encrypted/pkg",
|
||||
"path": "github.com/microsoft/go-mssqldb/internal/github.com/swisscom/mssql-always-encrypted/pkg/LICENSE.txt",
|
||||
"licenseText": "Copyright (c) 2021 Swisscom (Switzerland) Ltd\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/miekg/dns",
|
||||
"path": "github.com/miekg/dns/LICENSE",
|
||||
|
|
|
@ -69,6 +69,7 @@ func newFileCollector(fileFilter string, batchSize int) (*fileCollector, error)
|
|||
co.includePatterns = append(co.includePatterns, regexp.MustCompile(`.*\.go$`))
|
||||
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`.*\bbindata\.go$`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`\.pb\.go$`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`tests/gitea-repositories-meta`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`tests/integration/migration-test`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`modules/git/tests`))
|
||||
|
@ -203,17 +204,6 @@ Example:
|
|||
`, "file-batch-exec")
|
||||
}
|
||||
|
||||
func getGoVersion() string {
|
||||
goModFile, err := os.ReadFile("go.mod")
|
||||
if err != nil {
|
||||
log.Fatalf(`Faild to read "go.mod": %v`, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
goModVersionRegex := regexp.MustCompile(`go \d+\.\d+`)
|
||||
goModVersionLine := goModVersionRegex.Find(goModFile)
|
||||
return string(goModVersionLine[3:])
|
||||
}
|
||||
|
||||
func newFileCollectorFromMainOptions(mainOptions map[string]string) (fc *fileCollector, err error) {
|
||||
fileFilter := mainOptions["file-filter"]
|
||||
if fileFilter == "" {
|
||||
|
@ -278,7 +268,8 @@ func main() {
|
|||
log.Print("the -d option is not supported by gitea-fmt")
|
||||
}
|
||||
cmdErrors = append(cmdErrors, giteaFormatGoImports(files, containsString(subArgs, "-w")))
|
||||
cmdErrors = append(cmdErrors, passThroughCmd("go", append([]string{"run", os.Getenv("GOFUMPT_PACKAGE"), "-extra", "-lang", getGoVersion()}, substArgs...)))
|
||||
cmdErrors = append(cmdErrors, passThroughCmd("gofmt", append([]string{"-w", "-r", "interface{} -> any"}, substArgs...)))
|
||||
cmdErrors = append(cmdErrors, passThroughCmd("go", append([]string{"run", os.Getenv("GOFUMPT_PACKAGE"), "-extra"}, substArgs...)))
|
||||
default:
|
||||
log.Fatalf("unknown cmd: %s %v", subCmd, subArgs)
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ var microcmdUserChangePassword = &cli.Command{
|
|||
&cli.BoolFlag{
|
||||
Name: "must-change-password",
|
||||
Usage: "User must change password",
|
||||
Value: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -57,23 +58,18 @@ func runChangePassword(c *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
var mustChangePassword optional.Option[bool]
|
||||
if c.IsSet("must-change-password") {
|
||||
mustChangePassword = optional.Some(c.Bool("must-change-password"))
|
||||
}
|
||||
|
||||
opts := &user_service.UpdateAuthOptions{
|
||||
Password: optional.Some(c.String("password")),
|
||||
MustChangePassword: mustChangePassword,
|
||||
MustChangePassword: optional.Some(c.Bool("must-change-password")),
|
||||
}
|
||||
if err := user_service.UpdateAuth(ctx, user, opts); err != nil {
|
||||
switch {
|
||||
case errors.Is(err, password.ErrMinLength):
|
||||
return fmt.Errorf("Password is not long enough. Needs to be at least %d", setting.MinPasswordLength)
|
||||
return fmt.Errorf("password is not long enough, needs to be at least %d characters", setting.MinPasswordLength)
|
||||
case errors.Is(err, password.ErrComplexity):
|
||||
return errors.New("Password does not meet complexity requirements")
|
||||
return errors.New("password does not meet complexity requirements")
|
||||
case errors.Is(err, password.ErrIsPwned):
|
||||
return errors.New("The password you chose is on a list of stolen passwords previously exposed in public data breaches. Please try again with a different password.\nFor more details, see https://haveibeenpwned.com/Passwords")
|
||||
return errors.New("the password is in a list of stolen passwords previously exposed in public data breaches, please try again with a different password, to see more details: https://haveibeenpwned.com/Passwords")
|
||||
default:
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"fmt"
|
||||
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/models/db"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
pwd "code.gitea.io/gitea/modules/auth/password"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
|
@ -46,8 +47,9 @@ var microcmdUserCreate = &cli.Command{
|
|||
Usage: "Generate a random password for the user",
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "must-change-password",
|
||||
Usage: "Set this option to false to prevent forcing the user to change their password after initial login, (Default: true)",
|
||||
Name: "must-change-password",
|
||||
Usage: "Set to false to prevent forcing the user to change their password after initial login",
|
||||
DisableDefaultText: true,
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "random-password-length",
|
||||
|
@ -71,10 +73,10 @@ func runCreateUser(c *cli.Context) error {
|
|||
}
|
||||
|
||||
if c.IsSet("name") && c.IsSet("username") {
|
||||
return errors.New("Cannot set both --name and --username flags")
|
||||
return errors.New("cannot set both --name and --username flags")
|
||||
}
|
||||
if !c.IsSet("name") && !c.IsSet("username") {
|
||||
return errors.New("One of --name or --username flags must be set")
|
||||
return errors.New("one of --name or --username flags must be set")
|
||||
}
|
||||
|
||||
if c.IsSet("password") && c.IsSet("random-password") {
|
||||
|
@ -110,17 +112,21 @@ func runCreateUser(c *cli.Context) error {
|
|||
return errors.New("must set either password or random-password flag")
|
||||
}
|
||||
|
||||
// always default to true
|
||||
changePassword := true
|
||||
|
||||
// If this is the first user being created.
|
||||
// Take it as the admin and don't force a password update.
|
||||
if n := user_model.CountUsers(ctx, nil); n == 0 {
|
||||
changePassword = false
|
||||
}
|
||||
|
||||
isAdmin := c.Bool("admin")
|
||||
mustChangePassword := true // always default to true
|
||||
if c.IsSet("must-change-password") {
|
||||
changePassword = c.Bool("must-change-password")
|
||||
// if the flag is set, use the value provided by the user
|
||||
mustChangePassword = c.Bool("must-change-password")
|
||||
} else {
|
||||
// check whether there are users in the database
|
||||
hasUserRecord, err := db.IsTableNotEmpty(&user_model.User{})
|
||||
if err != nil {
|
||||
return fmt.Errorf("IsTableNotEmpty: %w", err)
|
||||
}
|
||||
if !hasUserRecord && isAdmin {
|
||||
// if this is the first admin being created, don't force to change password (keep the old behavior)
|
||||
mustChangePassword = false
|
||||
}
|
||||
}
|
||||
|
||||
restricted := optional.None[bool]()
|
||||
|
@ -136,8 +142,8 @@ func runCreateUser(c *cli.Context) error {
|
|||
Name: username,
|
||||
Email: c.String("email"),
|
||||
Passwd: password,
|
||||
IsAdmin: c.Bool("admin"),
|
||||
MustChangePassword: changePassword,
|
||||
IsAdmin: isAdmin,
|
||||
MustChangePassword: mustChangePassword,
|
||||
Visibility: visibility,
|
||||
}
|
||||
|
||||
|
|
296
cmd/dump.go
296
cmd/dump.go
|
@ -6,14 +6,13 @@ package cmd
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/modules/dump"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
@ -25,89 +24,17 @@ import (
|
|||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func addReader(w archiver.Writer, r io.ReadCloser, info os.FileInfo, customName string, verbose bool) error {
|
||||
if verbose {
|
||||
log.Info("Adding file %s", customName)
|
||||
}
|
||||
|
||||
return w.Write(archiver.File{
|
||||
FileInfo: archiver.FileInfo{
|
||||
FileInfo: info,
|
||||
CustomName: customName,
|
||||
},
|
||||
ReadCloser: r,
|
||||
})
|
||||
}
|
||||
|
||||
func addFile(w archiver.Writer, filePath, absPath string, verbose bool) error {
|
||||
file, err := os.Open(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
fileInfo, err := file.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return addReader(w, file, fileInfo, filePath, verbose)
|
||||
}
|
||||
|
||||
func isSubdir(upper, lower string) (bool, error) {
|
||||
if relPath, err := filepath.Rel(upper, lower); err != nil {
|
||||
return false, err
|
||||
} else if relPath == "." || !strings.HasPrefix(relPath, ".") {
|
||||
return true, nil
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
type outputType struct {
|
||||
Enum []string
|
||||
Default string
|
||||
selected string
|
||||
}
|
||||
|
||||
func (o outputType) Join() string {
|
||||
return strings.Join(o.Enum, ", ")
|
||||
}
|
||||
|
||||
func (o *outputType) Set(value string) error {
|
||||
for _, enum := range o.Enum {
|
||||
if enum == value {
|
||||
o.selected = value
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("allowed values are %s", o.Join())
|
||||
}
|
||||
|
||||
func (o outputType) String() string {
|
||||
if o.selected == "" {
|
||||
return o.Default
|
||||
}
|
||||
return o.selected
|
||||
}
|
||||
|
||||
var outputTypeEnum = &outputType{
|
||||
Enum: []string{"zip", "tar", "tar.sz", "tar.gz", "tar.xz", "tar.bz2", "tar.br", "tar.lz4", "tar.zst"},
|
||||
Default: "zip",
|
||||
}
|
||||
|
||||
// CmdDump represents the available dump sub-command.
|
||||
var CmdDump = &cli.Command{
|
||||
Name: "dump",
|
||||
Usage: "Dump Gitea files and database",
|
||||
Description: `Dump compresses all related files and database into zip file.
|
||||
It can be used for backup and capture Gitea server image to send to maintainer`,
|
||||
Action: runDump,
|
||||
Name: "dump",
|
||||
Usage: "Dump Gitea files and database",
|
||||
Description: `Dump compresses all related files and database into zip file. It can be used for backup and capture Gitea server image to send to maintainer`,
|
||||
Action: runDump,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "file",
|
||||
Aliases: []string{"f"},
|
||||
Value: fmt.Sprintf("gitea-dump-%d.zip", time.Now().Unix()),
|
||||
Usage: "Name of the dump file which will be created. Supply '-' for stdout. See type for available types.",
|
||||
Usage: `Name of the dump file which will be created, default to "gitea-dump-{time}.zip". Supply '-' for stdout. See type for available types.`,
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "verbose",
|
||||
|
@ -160,64 +87,52 @@ It can be used for backup and capture Gitea server image to send to maintainer`,
|
|||
Name: "skip-index",
|
||||
Usage: "Skip bleve index data",
|
||||
},
|
||||
&cli.GenericFlag{
|
||||
&cli.StringFlag{
|
||||
Name: "type",
|
||||
Value: outputTypeEnum,
|
||||
Usage: fmt.Sprintf("Dump output format: %s", outputTypeEnum.Join()),
|
||||
Usage: fmt.Sprintf(`Dump output format, default to "zip", supported types: %s`, strings.Join(dump.SupportedOutputTypes, ", ")),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func fatal(format string, args ...any) {
|
||||
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
||||
log.Fatal(format, args...)
|
||||
}
|
||||
|
||||
func runDump(ctx *cli.Context) error {
|
||||
var file *os.File
|
||||
fileName := ctx.String("file")
|
||||
outType := ctx.String("type")
|
||||
if fileName == "-" {
|
||||
file = os.Stdout
|
||||
setupConsoleLogger(log.FATAL, log.CanColorStderr, os.Stderr)
|
||||
} else {
|
||||
for _, suffix := range outputTypeEnum.Enum {
|
||||
if strings.HasSuffix(fileName, "."+suffix) {
|
||||
fileName = strings.TrimSuffix(fileName, "."+suffix)
|
||||
break
|
||||
}
|
||||
}
|
||||
fileName += "." + outType
|
||||
}
|
||||
setting.MustInstalled()
|
||||
|
||||
// make sure we are logging to the console no matter what the configuration tells us do to
|
||||
// FIXME: don't use CfgProvider directly
|
||||
if _, err := setting.CfgProvider.Section("log").NewKey("MODE", "console"); err != nil {
|
||||
fatal("Setting logging mode to console failed: %v", err)
|
||||
}
|
||||
if _, err := setting.CfgProvider.Section("log.console").NewKey("STDERR", "true"); err != nil {
|
||||
fatal("Setting console logger to stderr failed: %v", err)
|
||||
}
|
||||
|
||||
// Set loglevel to Warn if quiet-mode is requested
|
||||
if ctx.Bool("quiet") {
|
||||
if _, err := setting.CfgProvider.Section("log.console").NewKey("LEVEL", "Warn"); err != nil {
|
||||
fatal("Setting console log-level failed: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if !setting.InstallLock {
|
||||
log.Error("Is '%s' really the right config path?\n", setting.CustomConf)
|
||||
return fmt.Errorf("gitea is not initialized")
|
||||
}
|
||||
setting.LoadSettings() // cannot access session settings otherwise
|
||||
|
||||
quite := ctx.Bool("quiet")
|
||||
verbose := ctx.Bool("verbose")
|
||||
if verbose && ctx.Bool("quiet") {
|
||||
return fmt.Errorf("--quiet and --verbose cannot both be set")
|
||||
if verbose && quite {
|
||||
fatal("Option --quiet and --verbose cannot both be set")
|
||||
}
|
||||
|
||||
// outFileName is either "-" or a file name (will be made absolute)
|
||||
outFileName, outType := dump.PrepareFileNameAndType(ctx.String("file"), ctx.String("type"))
|
||||
if outType == "" {
|
||||
fatal("Invalid output type")
|
||||
}
|
||||
|
||||
outFile := os.Stdout
|
||||
if outFileName != "-" {
|
||||
var err error
|
||||
if outFileName, err = filepath.Abs(outFileName); err != nil {
|
||||
fatal("Unable to get absolute path of dump file: %v", err)
|
||||
}
|
||||
if exist, _ := util.IsExist(outFileName); exist {
|
||||
fatal("Dump file %q exists", outFileName)
|
||||
}
|
||||
if outFile, err = os.Create(outFileName); err != nil {
|
||||
fatal("Unable to create dump file %q: %v", outFileName, err)
|
||||
}
|
||||
defer outFile.Close()
|
||||
}
|
||||
|
||||
setupConsoleLogger(util.Iif(quite, log.WARN, log.INFO), log.CanColorStderr, os.Stderr)
|
||||
|
||||
setting.DisableLoggerInit()
|
||||
setting.LoadSettings() // cannot access session settings otherwise
|
||||
|
||||
stdCtx, cancel := installSignals()
|
||||
defer cancel()
|
||||
|
||||
|
@ -226,44 +141,32 @@ func runDump(ctx *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if err := storage.Init(); err != nil {
|
||||
if err = storage.Init(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if file == nil {
|
||||
file, err = os.Create(fileName)
|
||||
if err != nil {
|
||||
fatal("Unable to open %s: %v", fileName, err)
|
||||
}
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
absFileName, err := filepath.Abs(fileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var iface any
|
||||
if fileName == "-" {
|
||||
iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType))
|
||||
} else {
|
||||
iface, err = archiver.ByExtension(fileName)
|
||||
}
|
||||
archiverGeneric, err := archiver.ByExtension("." + outType)
|
||||
if err != nil {
|
||||
fatal("Unable to get archiver for extension: %v", err)
|
||||
}
|
||||
|
||||
w, _ := iface.(archiver.Writer)
|
||||
if err := w.Create(file); err != nil {
|
||||
archiverWriter := archiverGeneric.(archiver.Writer)
|
||||
if err := archiverWriter.Create(outFile); err != nil {
|
||||
fatal("Creating archiver.Writer failed: %v", err)
|
||||
}
|
||||
defer w.Close()
|
||||
defer archiverWriter.Close()
|
||||
|
||||
dumper := &dump.Dumper{
|
||||
Writer: archiverWriter,
|
||||
Verbose: verbose,
|
||||
}
|
||||
dumper.GlobalExcludeAbsPath(outFileName)
|
||||
|
||||
if ctx.IsSet("skip-repository") && ctx.Bool("skip-repository") {
|
||||
log.Info("Skip dumping local repositories")
|
||||
} else {
|
||||
log.Info("Dumping local repositories... %s", setting.RepoRootPath)
|
||||
if err := addRecursiveExclude(w, "repos", setting.RepoRootPath, []string{absFileName}, verbose); err != nil {
|
||||
if err := dumper.AddRecursiveExclude("repos", setting.RepoRootPath, nil); err != nil {
|
||||
fatal("Failed to include repositories: %v", err)
|
||||
}
|
||||
|
||||
|
@ -276,8 +179,7 @@ func runDump(ctx *cli.Context) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return addReader(w, object, info, path.Join("data", "lfs", objPath), verbose)
|
||||
return dumper.AddReader(object, info, path.Join("data", "lfs", objPath))
|
||||
}); err != nil {
|
||||
fatal("Failed to dump LFS objects: %v", err)
|
||||
}
|
||||
|
@ -310,15 +212,13 @@ func runDump(ctx *cli.Context) error {
|
|||
fatal("Failed to dump database: %v", err)
|
||||
}
|
||||
|
||||
if err := addFile(w, "gitea-db.sql", dbDump.Name(), verbose); err != nil {
|
||||
if err = dumper.AddFile("gitea-db.sql", dbDump.Name()); err != nil {
|
||||
fatal("Failed to include gitea-db.sql: %v", err)
|
||||
}
|
||||
|
||||
if len(setting.CustomConf) > 0 {
|
||||
log.Info("Adding custom configuration file from %s", setting.CustomConf)
|
||||
if err := addFile(w, "app.ini", setting.CustomConf, verbose); err != nil {
|
||||
fatal("Failed to include specified app.ini: %v", err)
|
||||
}
|
||||
log.Info("Adding custom configuration file from %s", setting.CustomConf)
|
||||
if err = dumper.AddFile("app.ini", setting.CustomConf); err != nil {
|
||||
fatal("Failed to include specified app.ini: %v", err)
|
||||
}
|
||||
|
||||
if ctx.IsSet("skip-custom-dir") && ctx.Bool("skip-custom-dir") {
|
||||
|
@ -326,8 +226,8 @@ func runDump(ctx *cli.Context) error {
|
|||
} else {
|
||||
customDir, err := os.Stat(setting.CustomPath)
|
||||
if err == nil && customDir.IsDir() {
|
||||
if is, _ := isSubdir(setting.AppDataPath, setting.CustomPath); !is {
|
||||
if err := addRecursiveExclude(w, "custom", setting.CustomPath, []string{absFileName}, verbose); err != nil {
|
||||
if is, _ := dump.IsSubdir(setting.AppDataPath, setting.CustomPath); !is {
|
||||
if err := dumper.AddRecursiveExclude("custom", setting.CustomPath, nil); err != nil {
|
||||
fatal("Failed to include custom: %v", err)
|
||||
}
|
||||
} else {
|
||||
|
@ -364,8 +264,7 @@ func runDump(ctx *cli.Context) error {
|
|||
excludes = append(excludes, setting.Attachment.Storage.Path)
|
||||
excludes = append(excludes, setting.Packages.Storage.Path)
|
||||
excludes = append(excludes, setting.Log.RootPath)
|
||||
excludes = append(excludes, absFileName)
|
||||
if err := addRecursiveExclude(w, "data", setting.AppDataPath, excludes, verbose); err != nil {
|
||||
if err := dumper.AddRecursiveExclude("data", setting.AppDataPath, excludes); err != nil {
|
||||
fatal("Failed to include data directory: %v", err)
|
||||
}
|
||||
}
|
||||
|
@ -377,8 +276,7 @@ func runDump(ctx *cli.Context) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return addReader(w, object, info, path.Join("data", "attachments", objPath), verbose)
|
||||
return dumper.AddReader(object, info, path.Join("data", "attachments", objPath))
|
||||
}); err != nil {
|
||||
fatal("Failed to dump attachments: %v", err)
|
||||
}
|
||||
|
@ -392,8 +290,7 @@ func runDump(ctx *cli.Context) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return addReader(w, object, info, path.Join("data", "packages", objPath), verbose)
|
||||
return dumper.AddReader(object, info, path.Join("data", "packages", objPath))
|
||||
}); err != nil {
|
||||
fatal("Failed to dump packages: %v", err)
|
||||
}
|
||||
|
@ -409,80 +306,23 @@ func runDump(ctx *cli.Context) error {
|
|||
log.Error("Unable to check if %s exists. Error: %v", setting.Log.RootPath, err)
|
||||
}
|
||||
if isExist {
|
||||
if err := addRecursiveExclude(w, "log", setting.Log.RootPath, []string{absFileName}, verbose); err != nil {
|
||||
if err := dumper.AddRecursiveExclude("log", setting.Log.RootPath, nil); err != nil {
|
||||
fatal("Failed to include log: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fileName != "-" {
|
||||
if err = w.Close(); err != nil {
|
||||
_ = util.Remove(fileName)
|
||||
fatal("Failed to save %s: %v", fileName, err)
|
||||
if outFileName == "-" {
|
||||
log.Info("Finish dumping to stdout")
|
||||
} else {
|
||||
if err = archiverWriter.Close(); err != nil {
|
||||
_ = os.Remove(outFileName)
|
||||
fatal("Failed to save %q: %v", outFileName, err)
|
||||
}
|
||||
|
||||
if err := os.Chmod(fileName, 0o600); err != nil {
|
||||
if err = os.Chmod(outFileName, 0o600); err != nil {
|
||||
log.Info("Can't change file access permissions mask to 0600: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if fileName != "-" {
|
||||
log.Info("Finish dumping in file %s", fileName)
|
||||
} else {
|
||||
log.Info("Finish dumping to stdout")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// addRecursiveExclude zips absPath to specified insidePath inside writer excluding excludeAbsPath
|
||||
func addRecursiveExclude(w archiver.Writer, insidePath, absPath string, excludeAbsPath []string, verbose bool) error {
|
||||
absPath, err := filepath.Abs(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dir, err := os.Open(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dir.Close()
|
||||
|
||||
files, err := dir.Readdir(0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, file := range files {
|
||||
currentAbsPath := filepath.Join(absPath, file.Name())
|
||||
currentInsidePath := path.Join(insidePath, file.Name())
|
||||
if file.IsDir() {
|
||||
if !util.SliceContainsString(excludeAbsPath, currentAbsPath) {
|
||||
if err := addFile(w, currentInsidePath, currentAbsPath, false); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = addRecursiveExclude(w, currentInsidePath, currentAbsPath, excludeAbsPath, verbose); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// only copy regular files and symlink regular files, skip non-regular files like socket/pipe/...
|
||||
shouldAdd := file.Mode().IsRegular()
|
||||
if !shouldAdd && file.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
target, err := filepath.EvalSymlinks(currentAbsPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
targetStat, err := os.Stat(target)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
shouldAdd = targetStat.Mode().IsRegular()
|
||||
}
|
||||
if shouldAdd {
|
||||
if err = addFile(w, currentInsidePath, currentAbsPath, verbose); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Info("Finish dumping in file %s", outFileName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
37
cmd/hook.go
37
cmd/hook.go
|
@ -448,23 +448,26 @@ Gitea or set your environment appropriately.`, "")
|
|||
|
||||
func hookPrintResults(results []private.HookPostReceiveBranchResult) {
|
||||
for _, res := range results {
|
||||
if !res.Message {
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
if res.Create {
|
||||
fmt.Fprintf(os.Stderr, "Create a new pull request for '%s':\n", res.Branch)
|
||||
fmt.Fprintf(os.Stderr, " %s\n", res.URL)
|
||||
} else {
|
||||
fmt.Fprint(os.Stderr, "Visit the existing pull request:\n")
|
||||
fmt.Fprintf(os.Stderr, " %s\n", res.URL)
|
||||
}
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
os.Stderr.Sync()
|
||||
hookPrintResult(res.Message, res.Create, res.Branch, res.URL)
|
||||
}
|
||||
}
|
||||
|
||||
func hookPrintResult(output, isCreate bool, branch, url string) {
|
||||
if !output {
|
||||
return
|
||||
}
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
if isCreate {
|
||||
fmt.Fprintf(os.Stderr, "Create a new pull request for '%s':\n", branch)
|
||||
fmt.Fprintf(os.Stderr, " %s\n", url)
|
||||
} else {
|
||||
fmt.Fprint(os.Stderr, "Visit the existing pull request:\n")
|
||||
fmt.Fprintf(os.Stderr, " %s\n", url)
|
||||
}
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
os.Stderr.Sync()
|
||||
}
|
||||
|
||||
func pushOptions() map[string]string {
|
||||
opts := make(map[string]string)
|
||||
if pushCount, err := strconv.Atoi(os.Getenv(private.GitPushOptionCount)); err == nil {
|
||||
|
@ -691,6 +694,12 @@ Gitea or set your environment appropriately.`, "")
|
|||
}
|
||||
err = writeFlushPktLine(ctx, os.Stdout)
|
||||
|
||||
if err == nil {
|
||||
for _, res := range resp.Results {
|
||||
hookPrintResult(res.ShouldShowMessage, res.IsCreatePR, res.HeadBranch, res.URL)
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ func showWebStartupMessage(msg string) {
|
|||
log.Info("* WorkPath: %s", setting.AppWorkPath)
|
||||
log.Info("* CustomPath: %s", setting.CustomPath)
|
||||
log.Info("* ConfigFile: %s", setting.CustomConf)
|
||||
log.Info("%s", msg)
|
||||
log.Info("%s", msg) // show startup message
|
||||
}
|
||||
|
||||
func serveInstall(ctx *cli.Context) error {
|
||||
|
|
|
@ -1553,8 +1553,9 @@ LEVEL = Info
|
|||
;; The source of the username for new oauth2 accounts:
|
||||
;; userid = use the userid / sub attribute
|
||||
;; nickname = use the nickname attribute
|
||||
;; preferred_username = use the preferred_username attribute
|
||||
;; email = use the username part of the email attribute
|
||||
;; Note: `nickname` and `email` options will normalize input strings using the following criteria:
|
||||
;; Note: `nickname`, `preferred_username` and `email` options will normalize input strings using the following criteria:
|
||||
;; - diacritics are removed
|
||||
;; - the characters in the set `['´\x60]` are removed
|
||||
;; - the characters in the set `[\s~+]` are replaced with `-`
|
||||
|
@ -2315,6 +2316,8 @@ LEVEL = Info
|
|||
;SHOW_FOOTER_VERSION = true
|
||||
;; Show template execution time in the footer
|
||||
;SHOW_FOOTER_TEMPLATE_LOAD_TIME = true
|
||||
;; Show the "powered by" text in the footer
|
||||
;SHOW_FOOTER_POWERED_BY = true
|
||||
;; Generate sitemap. Defaults to `true`.
|
||||
;ENABLE_SITEMAP = true
|
||||
;; Enable/Disable RSS/Atom feed
|
||||
|
|
|
@ -83,8 +83,7 @@ Admin operations:
|
|||
- `--email value`: Email. Required.
|
||||
- `--admin`: If provided, this makes the user an admin. Optional.
|
||||
- `--access-token`: If provided, an access token will be created for the user. Optional. (default: false).
|
||||
- `--must-change-password`: If provided, the created user will be required to choose a newer password after the
|
||||
initial login. Optional. (default: true).
|
||||
- `--must-change-password`: The created user will be required to set a new password after the initial login, default: true. It could be disabled by `--must-change-password=false`.
|
||||
- `--random-password`: If provided, a randomly generated password will be used as the password of the created
|
||||
user. The value of `--password` will be discarded. Optional.
|
||||
- `--random-password-length`: If provided, it will be used to configure the length of the randomly generated
|
||||
|
@ -95,7 +94,7 @@ Admin operations:
|
|||
- Options:
|
||||
- `--username value`, `-u value`: Username. Required.
|
||||
- `--password value`, `-p value`: New password. Required.
|
||||
- `--must-change-password`: If provided, the user is required to choose a new password after the login. Optional.
|
||||
- `--must-change-password`: The user is required to set a new password after the login, default: true. It could be disabled by `--must-change-password=false`.
|
||||
- Examples:
|
||||
- `gitea admin user change-password --username myname --password asecurepassword`
|
||||
- `must-change-password`:
|
||||
|
|
|
@ -608,9 +608,10 @@ And the following unique queues:
|
|||
- `ENABLE_AUTO_REGISTRATION`: **false**: Automatically create user accounts for new oauth2 users.
|
||||
- `USERNAME`: **nickname**: The source of the username for new oauth2 accounts:
|
||||
- `userid` - use the userid / sub attribute
|
||||
- `nickname` - use the nickname attribute
|
||||
- `nickname` - use the nickname
|
||||
- `preferred_username` - use the preferred_username
|
||||
- `email` - use the username part of the email attribute
|
||||
- Note: `nickname` and `email` options will normalize input strings using the following criteria:
|
||||
- Note: `nickname`, `preferred_username` and `email` options will normalize input strings using the following criteria:
|
||||
- diacritics are removed
|
||||
- the characters in the set `['´\x60]` are removed
|
||||
- the characters in the set `[\s~+]` are replaced with `-`
|
||||
|
@ -1429,5 +1430,6 @@ Like `uses: https://gitea.com/actions/checkout@v4` or `uses: http://your-git-ser
|
|||
|
||||
- `SHOW_FOOTER_VERSION`: **true**: Show Gitea and Go version information in the footer.
|
||||
- `SHOW_FOOTER_TEMPLATE_LOAD_TIME`: **true**: Show time of template execution in the footer.
|
||||
- `SHOW_FOOTER_POWERED_BY`: **true**: Show the "powered by" text in the footer.
|
||||
- `ENABLE_SITEMAP`: **true**: Generate sitemap.
|
||||
- `ENABLE_FEED`: **true**: Enable/Disable RSS/Atom feed.
|
||||
|
|
|
@ -1353,5 +1353,6 @@ PROXY_HOSTS = *.github.com
|
|||
|
||||
- `SHOW_FOOTER_VERSION`: **true**: 在页面底部显示Gitea的版本。
|
||||
- `SHOW_FOOTER_TEMPLATE_LOAD_TIME`: **true**: 在页脚显示模板执行的时间。
|
||||
- `SHOW_FOOTER_POWERED_BY`: **true**: 在页脚显示“由...提供动力”的文本。
|
||||
- `ENABLE_SITEMAP`: **true**: 生成sitemap.
|
||||
- `ENABLE_FEED`: **true**: 是否启用RSS/Atom
|
||||
|
|
|
@ -304,7 +304,8 @@ services:
|
|||
- GITEA__mailer__ENABLED=true
|
||||
- GITEA__mailer__FROM=${GITEA__mailer__FROM:?GITEA__mailer__FROM not set}
|
||||
- GITEA__mailer__PROTOCOL=smtps
|
||||
- GITEA__mailer__HOST=${GITEA__mailer__HOST:?GITEA__mailer__HOST not set}
|
||||
- GITEA__mailer__SMTP_ADDR=${GITEA__mailer__SMTP_ADDR:?GITEA__mailer__SMTP_ADDR not set}
|
||||
- GITEA__mailer__SMTP_PORT=${GITEA__mailer__SMTP_PORT:?GITEA__mailer__SMTP_PORT not set}
|
||||
- GITEA__mailer__USER=${GITEA__mailer__USER:-apikey}
|
||||
- GITEA__mailer__PASSWD="""${GITEA__mailer__PASSWD:?GITEA__mailer__PASSWD not set}"""
|
||||
```
|
||||
|
@ -545,7 +546,7 @@ In this option, the idea is that the host SSH uses an `AuthorizedKeysCommand` in
|
|||
```bash
|
||||
cat <<"EOF" | sudo tee /home/git/docker-shell
|
||||
#!/bin/sh
|
||||
/usr/bin/docker exec -i --env SSH_ORIGINAL_COMMAND="$SSH_ORIGINAL_COMMAND" gitea sh "$@"
|
||||
/usr/bin/docker exec -i -u git --env SSH_ORIGINAL_COMMAND="$SSH_ORIGINAL_COMMAND" gitea sh "$@"
|
||||
EOF
|
||||
sudo chmod +x /home/git/docker-shell
|
||||
sudo usermod -s /home/git/docker-shell git
|
||||
|
@ -560,7 +561,7 @@ Add the following block to `/etc/ssh/sshd_config`, on the host:
|
|||
```bash
|
||||
Match User git
|
||||
AuthorizedKeysCommandUser git
|
||||
AuthorizedKeysCommand /usr/bin/docker exec -i gitea /usr/local/bin/gitea keys -c /data/gitea/conf/app.ini -e git -u %u -t %t -k %k
|
||||
AuthorizedKeysCommand /usr/bin/docker exec -i -u git gitea /usr/local/bin/gitea keys -c /data/gitea/conf/app.ini -e git -u %u -t %t -k %k
|
||||
```
|
||||
|
||||
(From 1.16.0 you will not need to set the `-c /data/gitea/conf/app.ini` option.)
|
||||
|
|
|
@ -303,34 +303,3 @@ sudo systemctl enable act_runner --now
|
|||
```
|
||||
|
||||
If using Docker, the `act_runner` user should also be added to the `docker` group before starting the service. Keep in mind that this effectively gives `act_runner` root access to the system [[1]](https://docs.docker.com/engine/security/#docker-daemon-attack-surface).
|
||||
|
||||
## Configuration variable
|
||||
|
||||
You can create configuration variables on the user, organization and repository level.
|
||||
The level of the variable depends on where you created it.
|
||||
|
||||
### Naming conventions
|
||||
|
||||
The following rules apply to variable names:
|
||||
|
||||
- Variable names can only contain alphanumeric characters (`[a-z]`, `[A-Z]`, `[0-9]`) or underscores (`_`). Spaces are not allowed.
|
||||
|
||||
- Variable names must not start with the `GITHUB_` and `GITEA_` prefix.
|
||||
|
||||
- Variable names must not start with a number.
|
||||
|
||||
- Variable names are case-insensitive.
|
||||
|
||||
- Variable names must be unique at the level they are created at.
|
||||
|
||||
- Variable names must not be `CI`.
|
||||
|
||||
### Using variable
|
||||
|
||||
After creating configuration variables, they will be automatically filled in the `vars` context.
|
||||
They can be accessed through expressions like `{{ vars.VARIABLE_NAME }}` in the workflow.
|
||||
|
||||
### Precedence
|
||||
|
||||
If a variable with the same name exists at multiple levels, the variable at the lowest level takes precedence:
|
||||
A repository variable will always be chosen over an organization/user variable.
|
||||
|
|
|
@ -258,32 +258,3 @@ Runner的标签用于确定Runner可以运行哪些Job以及如何运行它们
|
|||
Runner将从Gitea实例获取Job并自动运行它们。
|
||||
|
||||
由于Act Runner仍处于开发中,建议定期检查最新版本并进行升级。
|
||||
|
||||
## 变量
|
||||
|
||||
您可以创建用户、组织和仓库级别的变量。变量的级别取决于创建它的位置。
|
||||
|
||||
### 命名规则
|
||||
|
||||
以下规则适用于变量名:
|
||||
|
||||
- 变量名称只能包含字母数字字符 (`[a-z]`, `[A-Z]`, `[0-9]`) 或下划线 (`_`)。不允许使用空格。
|
||||
|
||||
- 变量名称不能以 `GITHUB_` 和 `GITEA_` 前缀开头。
|
||||
|
||||
- 变量名称不能以数字开头。
|
||||
|
||||
- 变量名称不区分大小写。
|
||||
|
||||
- 变量名称在创建它们的级别上必须是唯一的。
|
||||
|
||||
- 变量名称不能为 “CI”。
|
||||
|
||||
### 使用
|
||||
|
||||
创建配置变量后,它们将自动填充到 `vars` 上下文中。您可以在工作流中使用类似 `{{ vars.VARIABLE_NAME }}` 这样的表达式来使用它们。
|
||||
|
||||
### 优先级
|
||||
|
||||
如果同名变量存在于多个级别,则级别最低的变量优先。
|
||||
仓库级别的变量总是比组织或者用户级别的变量优先被选中。
|
||||
|
|
|
@ -104,7 +104,7 @@ However, if a job container tries to fetch code from localhost, it will fail bec
|
|||
### Connection 3, act runner to internet
|
||||
|
||||
When you use some actions like `actions/checkout@v4`, the act runner downloads the scripts, not the job containers.
|
||||
By default, it downloads from [gitea.com](http://gitea.com/), so it requires access to the internet.
|
||||
By default, it downloads from [github.com](http://github.com/), so it requires access to the internet. If you configure the `DEFAULT_ACTIONS_URL` to `self`, then it will download from your Gitea instance by default. Then it will not connect to internet when downloading the action itself.
|
||||
It also downloads some docker images from Docker Hub by default, which also requires internet access.
|
||||
|
||||
However, internet access is not strictly necessary.
|
||||
|
|
|
@ -105,7 +105,8 @@ act runner 必须能够连接到Gitea以接收任务并发送执行结果回来
|
|||
### 连接 3,act runner到互联网
|
||||
|
||||
当您使用诸如 `actions/checkout@v4` 的一些Actions时,act runner下载的是脚本,而不是Job容器。
|
||||
默认情况下,它从[gitea.com](http://gitea.com/)下载,因此需要访问互联网。
|
||||
默认情况下,它从[github.com](http://github.com/)下载,因此需要访问互联网。如果您设置的是 self,
|
||||
那么默认将从您的当前Gitea实例下载,那么此步骤不需要连接到互联网。
|
||||
它还默认从Docker Hub下载一些Docker镜像,这也需要互联网访问。
|
||||
|
||||
然而,互联网访问并不是绝对必需的。
|
||||
|
|
41
docs/content/usage/actions/variables.en-us.md
Normal file
41
docs/content/usage/actions/variables.en-us.md
Normal file
|
@ -0,0 +1,41 @@
|
|||
---
|
||||
date: "2024-04-10T22:21:00+08:00"
|
||||
title: "Variables"
|
||||
slug: "actions-variables"
|
||||
sidebar_position: 25
|
||||
draft: false
|
||||
toc: false
|
||||
menu:
|
||||
sidebar:
|
||||
parent: "actions"
|
||||
name: "Variables"
|
||||
sidebar_position: 25
|
||||
identifier: "actions-variables"
|
||||
---
|
||||
|
||||
## Variables
|
||||
|
||||
You can create configuration variables on the user, organization and repository level.
|
||||
The level of the variable depends on where you created it. When creating a variable, the
|
||||
key will be converted to uppercase. You need use uppercase on the yaml file.
|
||||
|
||||
### Naming conventions
|
||||
|
||||
The following rules apply to variable names:
|
||||
|
||||
- Variable names can only contain alphanumeric characters (`[a-z]`, `[A-Z]`, `[0-9]`) or underscores (`_`). Spaces are not allowed.
|
||||
- Variable names must not start with the `GITHUB_` and `GITEA_` prefix.
|
||||
- Variable names must not start with a number.
|
||||
- Variable names are case-insensitive.
|
||||
- Variable names must be unique at the level they are created at.
|
||||
- Variable names must not be `CI`.
|
||||
|
||||
### Using variable
|
||||
|
||||
After creating configuration variables, they will be automatically filled in the `vars` context.
|
||||
They can be accessed through expressions like `${{ vars.VARIABLE_NAME }}` in the workflow.
|
||||
|
||||
### Precedence
|
||||
|
||||
If a variable with the same name exists at multiple levels, the variable at the lowest level takes precedence:
|
||||
A repository variable will always be chosen over an organization/user variable.
|
39
docs/content/usage/actions/variables.zh-cn.md
Normal file
39
docs/content/usage/actions/variables.zh-cn.md
Normal file
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
date: "2024-04-10T22:21:00+08:00"
|
||||
title: "变量"
|
||||
slug: "actions-variables"
|
||||
sidebar_position: 25
|
||||
draft: false
|
||||
toc: false
|
||||
menu:
|
||||
sidebar:
|
||||
parent: "actions"
|
||||
name: "变量"
|
||||
sidebar_position: 25
|
||||
identifier: "actions-variables"
|
||||
---
|
||||
|
||||
## 变量
|
||||
|
||||
您可以创建用户、组织和仓库级别的变量。变量的级别取决于创建它的位置。当创建变量时,变量的名称会被
|
||||
转换为大写,在yaml文件中引用时需要使用大写。
|
||||
|
||||
### 命名规则
|
||||
|
||||
以下规则适用于变量名:
|
||||
|
||||
- 变量名称只能包含字母数字字符 (`[a-z]`, `[A-Z]`, `[0-9]`) 或下划线 (`_`)。不允许使用空格。
|
||||
- 变量名称不能以 `GITHUB_` 和 `GITEA_` 前缀开头。
|
||||
- 变量名称不能以数字开头。
|
||||
- 变量名称不区分大小写。
|
||||
- 变量名称在创建它们的级别上必须是唯一的。
|
||||
- 变量名称不能为 `CI`。
|
||||
|
||||
### 使用
|
||||
|
||||
创建配置变量后,它们将自动填充到 `vars` 上下文中。您可以在工作流中使用类似 `${{ vars.VARIABLE_NAME }}` 这样的表达式来使用它们。
|
||||
|
||||
### 优先级
|
||||
|
||||
如果同名变量存在于多个级别,则级别最低的变量优先。
|
||||
仓库级别的变量总是比组织或者用户级别的变量优先被选中。
|
8
go.mod
8
go.mod
|
@ -24,7 +24,6 @@ require (
|
|||
github.com/buildkite/terminal-to-html/v3 v3.11.0
|
||||
github.com/caddyserver/certmagic v0.20.0
|
||||
github.com/chi-middleware/proxy v1.1.1
|
||||
github.com/denisenkom/go-mssqldb v0.12.3
|
||||
github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21
|
||||
github.com/djherbis/buffer v1.2.0
|
||||
github.com/djherbis/nio/v3 v3.0.1
|
||||
|
@ -77,6 +76,7 @@ require (
|
|||
github.com/meilisearch/meilisearch-go v0.26.2
|
||||
github.com/mholt/archiver/v3 v3.5.1
|
||||
github.com/microcosm-cc/bluemonday v1.0.26
|
||||
github.com/microsoft/go-mssqldb v1.7.0
|
||||
github.com/minio/minio-go/v7 v7.0.69
|
||||
github.com/msteinert/pam v1.2.0
|
||||
github.com/nektos/act v0.2.52
|
||||
|
@ -105,11 +105,11 @@ require (
|
|||
github.com/yuin/goldmark v1.7.0
|
||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
||||
github.com/yuin/goldmark-meta v1.1.0
|
||||
golang.org/x/crypto v0.21.0
|
||||
golang.org/x/crypto v0.22.0
|
||||
golang.org/x/image v0.15.0
|
||||
golang.org/x/net v0.22.0
|
||||
golang.org/x/net v0.24.0
|
||||
golang.org/x/oauth2 v0.18.0
|
||||
golang.org/x/sys v0.18.0
|
||||
golang.org/x/sys v0.19.0
|
||||
golang.org/x/text v0.14.0
|
||||
golang.org/x/tools v0.19.0
|
||||
google.golang.org/grpc v1.62.1
|
||||
|
|
44
go.sum
44
go.sum
|
@ -38,11 +38,20 @@ github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121 h1:r3qt8PCHnfjOv9PN3H
|
|||
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121/go.mod h1:Ock8XgA7pvULhIaHGAk/cDnRfNrF9Jey81nPcc403iU=
|
||||
github.com/6543/go-version v1.3.1 h1:HvOp+Telns7HWJ2Xo/05YXQSB2bE0WmVgbHqwMPZT4U=
|
||||
github.com/6543/go-version v1.3.1/go.mod h1:oqFAHCwtLVUTLdhQmVZWYvaHXTdsbB4SY85at64SQEo=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 h1:lGlwhPtrX6EVml1hO0ivjkUxsSyl4dsiw9qcA1k/3IQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1/go.mod h1:RKUqNu35KJYcVG/fqTRqmuXJZYNhYkBrnC/hX7yGbTA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 h1:6oNBlSdi1QqM1PNW7FPA6xOGA5UNsXnkaYZz9vdPGhA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1 h1:MyVTgWR8qd/Jw1Le0NZebGBUCLbtak3bJ3z1OlqZBpw=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.1/go.mod h1:GpPjLhVR9dnUoJMyHWSPy71xY9/lcmpzIPZXmF0FCVY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
|
||||
github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg=
|
||||
|
@ -220,7 +229,6 @@ github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55k
|
|||
github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
|
||||
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY=
|
||||
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
|
||||
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
|
||||
|
@ -355,7 +363,6 @@ github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOW
|
|||
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||
|
@ -513,6 +520,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
|
@ -551,6 +560,8 @@ github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Cl
|
|||
github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4=
|
||||
github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58=
|
||||
github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs=
|
||||
github.com/microsoft/go-mssqldb v1.7.0 h1:sgMPW0HA6Ihd37Yx0MzHyKD726C2kY/8KJsQtXHNaAs=
|
||||
github.com/microsoft/go-mssqldb v1.7.0/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA=
|
||||
github.com/miekg/dns v1.1.58 h1:ca2Hdkz+cDg/7eNF6V56jjzuZ4aCAE+DbVkILdQWG/4=
|
||||
github.com/miekg/dns v1.1.58/go.mod h1:Ypv+3b/KadlvW9vJfXOTf300O4UqaHFzFCuHz+rPkBY=
|
||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
|
@ -574,7 +585,6 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
|
|||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 h1:j2kD3MT1z4PXCiUllUJF9mWUESr9TWKS7iEKsQ/IipM=
|
||||
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450/go.mod h1:skjdDftzkFALcuGzYSklqYd8gvat6F1gZJ4YPVbkZpM=
|
||||
|
@ -627,7 +637,8 @@ github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ
|
|||
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4=
|
||||
github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI=
|
||||
github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
|
@ -836,7 +847,6 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
|
|||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
|
@ -846,8 +856,8 @@ golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2Uz
|
|||
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
|
||||
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
||||
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
|
||||
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
|
||||
golang.org/x/exp v0.0.0-20240314144324-c7f7c6466f7f h1:3CW0unweImhOzd5FmYuRsD4Y4oQFKZIjAnKbjV4WIrw=
|
||||
golang.org/x/exp v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc=
|
||||
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
|
||||
|
@ -871,7 +881,6 @@ golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/
|
|||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
|
@ -881,8 +890,8 @@ golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
|||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
|
||||
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
|
||||
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
|
||||
golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
|
||||
golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
@ -932,8 +941,8 @@ golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
|
||||
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
|
@ -943,8 +952,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
|||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
||||
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
|
||||
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
||||
golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q=
|
||||
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
|
@ -1022,7 +1031,6 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
lukechampine.com/uint128 v1.2.0 h1:mBi/5l91vocEN8otkC5bDLhi2KdCticRiwbdB0O+rjI=
|
||||
|
|
|
@ -16,14 +16,9 @@ import (
|
|||
type ActionJobList []*ActionRunJob
|
||||
|
||||
func (jobs ActionJobList) GetRunIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(jobs))
|
||||
for _, j := range jobs {
|
||||
if j.RunID == 0 {
|
||||
continue
|
||||
}
|
||||
ids.Add(j.RunID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(jobs, func(j *ActionRunJob) (int64, bool) {
|
||||
return j.RunID, j.RunID != 0
|
||||
})
|
||||
}
|
||||
|
||||
func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error {
|
||||
|
|
|
@ -19,19 +19,15 @@ type RunList []*ActionRun
|
|||
|
||||
// GetUserIDs returns a slice of user's id
|
||||
func (runs RunList) GetUserIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(runs))
|
||||
for _, run := range runs {
|
||||
ids.Add(run.TriggerUserID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(runs, func(run *ActionRun) (int64, bool) {
|
||||
return run.TriggerUserID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (runs RunList) GetRepoIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(runs))
|
||||
for _, run := range runs {
|
||||
ids.Add(run.RepoID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(runs, func(run *ActionRun) (int64, bool) {
|
||||
return run.RepoID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (runs RunList) LoadTriggerUser(ctx context.Context) error {
|
||||
|
|
|
@ -16,14 +16,9 @@ type RunnerList []*ActionRunner
|
|||
|
||||
// GetUserIDs returns a slice of user's id
|
||||
func (runners RunnerList) GetUserIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(runners))
|
||||
for _, runner := range runners {
|
||||
if runner.OwnerID == 0 {
|
||||
continue
|
||||
}
|
||||
ids.Add(runner.OwnerID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(runners, func(runner *ActionRunner) (int64, bool) {
|
||||
return runner.OwnerID, runner.OwnerID != 0
|
||||
})
|
||||
}
|
||||
|
||||
func (runners RunnerList) LoadOwners(ctx context.Context) error {
|
||||
|
@ -41,16 +36,9 @@ func (runners RunnerList) LoadOwners(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (runners RunnerList) getRepoIDs() []int64 {
|
||||
repoIDs := make(container.Set[int64], len(runners))
|
||||
for _, runner := range runners {
|
||||
if runner.RepoID == 0 {
|
||||
continue
|
||||
}
|
||||
if _, ok := repoIDs[runner.RepoID]; !ok {
|
||||
repoIDs[runner.RepoID] = struct{}{}
|
||||
}
|
||||
}
|
||||
return repoIDs.Values()
|
||||
return container.FilterSlice(runners, func(runner *ActionRunner) (int64, bool) {
|
||||
return runner.RepoID, runner.RepoID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (runners RunnerList) LoadRepos(ctx context.Context) error {
|
||||
|
|
|
@ -18,19 +18,15 @@ type ScheduleList []*ActionSchedule
|
|||
|
||||
// GetUserIDs returns a slice of user's id
|
||||
func (schedules ScheduleList) GetUserIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(schedules))
|
||||
for _, schedule := range schedules {
|
||||
ids.Add(schedule.TriggerUserID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(schedules, func(schedule *ActionSchedule) (int64, bool) {
|
||||
return schedule.TriggerUserID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (schedules ScheduleList) GetRepoIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(schedules))
|
||||
for _, schedule := range schedules {
|
||||
ids.Add(schedule.RepoID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(schedules, func(schedule *ActionSchedule) (int64, bool) {
|
||||
return schedule.RepoID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (schedules ScheduleList) LoadTriggerUser(ctx context.Context) error {
|
||||
|
@ -44,6 +40,9 @@ func (schedules ScheduleList) LoadTriggerUser(ctx context.Context) error {
|
|||
schedule.TriggerUser = user_model.NewActionsUser()
|
||||
} else {
|
||||
schedule.TriggerUser = users[schedule.TriggerUserID]
|
||||
if schedule.TriggerUser == nil {
|
||||
schedule.TriggerUser = user_model.NewGhostUser()
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -16,11 +16,9 @@ import (
|
|||
type SpecList []*ActionScheduleSpec
|
||||
|
||||
func (specs SpecList) GetScheduleIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(specs))
|
||||
for _, spec := range specs {
|
||||
ids.Add(spec.ScheduleID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(specs, func(spec *ActionScheduleSpec) (int64, bool) {
|
||||
return spec.ScheduleID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (specs SpecList) LoadSchedules(ctx context.Context) error {
|
||||
|
@ -46,11 +44,9 @@ func (specs SpecList) LoadSchedules(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (specs SpecList) GetRepoIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(specs))
|
||||
for _, spec := range specs {
|
||||
ids.Add(spec.RepoID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(specs, func(spec *ActionScheduleSpec) (int64, bool) {
|
||||
return spec.RepoID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (specs SpecList) LoadRepos(ctx context.Context) error {
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
@ -227,7 +228,9 @@ func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask
|
|||
if runner.RepoID != 0 {
|
||||
jobCond = builder.Eq{"repo_id": runner.RepoID}
|
||||
} else if runner.OwnerID != 0 {
|
||||
jobCond = builder.In("repo_id", builder.Select("id").From("repository").Where(builder.Eq{"owner_id": runner.OwnerID}))
|
||||
jobCond = builder.In("repo_id", builder.Select("`repository`.id").From("repository").
|
||||
Join("INNER", "repo_unit", "`repository`.id = `repo_unit`.repo_id").
|
||||
Where(builder.Eq{"`repository`.owner_id": runner.OwnerID, "`repo_unit`.type": unit.TypeActions}))
|
||||
}
|
||||
if jobCond.IsValid() {
|
||||
jobCond = builder.In("run_id", builder.Select("id").From("action_run").Where(jobCond))
|
||||
|
|
|
@ -16,14 +16,9 @@ import (
|
|||
type TaskList []*ActionTask
|
||||
|
||||
func (tasks TaskList) GetJobIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(tasks))
|
||||
for _, t := range tasks {
|
||||
if t.JobID == 0 {
|
||||
continue
|
||||
}
|
||||
ids.Add(t.JobID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(tasks, func(t *ActionTask) (int64, bool) {
|
||||
return t.JobID, t.JobID != 0
|
||||
})
|
||||
}
|
||||
|
||||
func (tasks TaskList) LoadJobs(ctx context.Context) error {
|
||||
|
|
|
@ -22,11 +22,9 @@ import (
|
|||
type ActionList []*Action
|
||||
|
||||
func (actions ActionList) getUserIDs() []int64 {
|
||||
userIDs := make(container.Set[int64], len(actions))
|
||||
for _, action := range actions {
|
||||
userIDs.Add(action.ActUserID)
|
||||
}
|
||||
return userIDs.Values()
|
||||
return container.FilterSlice(actions, func(action *Action) (int64, bool) {
|
||||
return action.ActUserID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (actions ActionList) LoadActUsers(ctx context.Context) (map[int64]*user_model.User, error) {
|
||||
|
@ -50,11 +48,9 @@ func (actions ActionList) LoadActUsers(ctx context.Context) (map[int64]*user_mod
|
|||
}
|
||||
|
||||
func (actions ActionList) getRepoIDs() []int64 {
|
||||
repoIDs := make(container.Set[int64], len(actions))
|
||||
for _, action := range actions {
|
||||
repoIDs.Add(action.RepoID)
|
||||
}
|
||||
return repoIDs.Values()
|
||||
return container.FilterSlice(actions, func(action *Action) (int64, bool) {
|
||||
return action.RepoID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (actions ActionList) LoadRepositories(ctx context.Context) error {
|
||||
|
@ -80,18 +76,16 @@ func (actions ActionList) loadRepoOwner(ctx context.Context, userMap map[int64]*
|
|||
userMap = make(map[int64]*user_model.User)
|
||||
}
|
||||
|
||||
userSet := make(container.Set[int64], len(actions))
|
||||
for _, action := range actions {
|
||||
missingUserIDs := container.FilterSlice(actions, func(action *Action) (int64, bool) {
|
||||
if action.Repo == nil {
|
||||
continue
|
||||
return 0, false
|
||||
}
|
||||
if _, ok := userMap[action.Repo.OwnerID]; !ok {
|
||||
userSet.Add(action.Repo.OwnerID)
|
||||
}
|
||||
}
|
||||
_, alreadyLoaded := userMap[action.Repo.OwnerID]
|
||||
return action.Repo.OwnerID, !alreadyLoaded
|
||||
})
|
||||
|
||||
if err := db.GetEngine(ctx).
|
||||
In("id", userSet.Values()).
|
||||
In("id", missingUserIDs).
|
||||
Find(&userMap); err != nil {
|
||||
return fmt.Errorf("find user: %w", err)
|
||||
}
|
||||
|
|
|
@ -190,14 +190,12 @@ func (nl NotificationList) LoadAttributes(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (nl NotificationList) getPendingRepoIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(nl))
|
||||
for _, notification := range nl {
|
||||
if notification.Repository != nil {
|
||||
continue
|
||||
return container.FilterSlice(nl, func(n *Notification) (int64, bool) {
|
||||
if n.Repository != nil {
|
||||
return 0, false
|
||||
}
|
||||
ids.Add(notification.RepoID)
|
||||
}
|
||||
return ids.Values()
|
||||
return n.RepoID, true
|
||||
})
|
||||
}
|
||||
|
||||
// LoadRepos loads repositories from database
|
||||
|
|
|
@ -139,13 +139,7 @@ func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *CommitVerific
|
|||
}
|
||||
}
|
||||
|
||||
keyID := ""
|
||||
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
|
||||
keyID = fmt.Sprintf("%X", *sig.IssuerKeyId)
|
||||
}
|
||||
if keyID == "" && sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 {
|
||||
keyID = fmt.Sprintf("%X", sig.IssuerFingerprint[12:20])
|
||||
}
|
||||
keyID := tryGetKeyIDFromSignature(sig)
|
||||
defaultReason := NoKeyFound
|
||||
|
||||
// First check if the sig has a keyID and if so just look at that
|
||||
|
|
|
@ -134,3 +134,13 @@ func extractSignature(s string) (*packet.Signature, error) {
|
|||
}
|
||||
return sig, nil
|
||||
}
|
||||
|
||||
func tryGetKeyIDFromSignature(sig *packet.Signature) string {
|
||||
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
|
||||
return fmt.Sprintf("%016X", *sig.IssuerKeyId)
|
||||
}
|
||||
if sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 {
|
||||
return fmt.Sprintf("%016X", sig.IssuerFingerprint[12:20])
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
|
|
@ -11,7 +11,9 @@ import (
|
|||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/keybase/go-crypto/openpgp/packet"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
|
@ -391,3 +393,13 @@ epiDVQ==
|
|||
assert.Equal(t, time.Unix(1586105389, 0), expire)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTryGetKeyIDFromSignature(t *testing.T) {
|
||||
assert.Empty(t, tryGetKeyIDFromSignature(&packet.Signature{}))
|
||||
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
|
||||
IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)),
|
||||
}))
|
||||
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
|
||||
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -76,23 +76,14 @@ func calcFingerprintNative(publicKeyContent string) (string, error) {
|
|||
// CalcFingerprint calculate public key's fingerprint
|
||||
func CalcFingerprint(publicKeyContent string) (string, error) {
|
||||
// Call the method based on configuration
|
||||
var (
|
||||
fnName, fp string
|
||||
err error
|
||||
)
|
||||
if len(setting.SSH.KeygenPath) == 0 {
|
||||
fnName = "calcFingerprintNative"
|
||||
fp, err = calcFingerprintNative(publicKeyContent)
|
||||
} else {
|
||||
fnName = "calcFingerprintSSHKeygen"
|
||||
fp, err = calcFingerprintSSHKeygen(publicKeyContent)
|
||||
}
|
||||
useNative := setting.SSH.KeygenPath == ""
|
||||
calcFn := util.Iif(useNative, calcFingerprintNative, calcFingerprintSSHKeygen)
|
||||
fp, err := calcFn(publicKeyContent)
|
||||
if err != nil {
|
||||
if IsErrKeyUnableVerify(err) {
|
||||
log.Info("%s", publicKeyContent)
|
||||
return "", err
|
||||
}
|
||||
return "", fmt.Errorf("%s: %w", fnName, err)
|
||||
return "", fmt.Errorf("CalcFingerprint(%s): %w", util.Iif(useNative, "native", "ssh-keygen"), err)
|
||||
}
|
||||
return fp, nil
|
||||
}
|
||||
|
|
|
@ -137,6 +137,11 @@ func (app *OAuth2Application) TableName() string {
|
|||
|
||||
// ContainsRedirectURI checks if redirectURI is allowed for app
|
||||
func (app *OAuth2Application) ContainsRedirectURI(redirectURI string) bool {
|
||||
// OAuth2 requires the redirect URI to be an exact match, no dynamic parts are allowed.
|
||||
// https://stackoverflow.com/questions/55524480/should-dynamic-query-parameters-be-present-in-the-redirection-uri-for-an-oauth2
|
||||
// https://www.rfc-editor.org/rfc/rfc6819#section-5.2.3.3
|
||||
// https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest
|
||||
// https://datatracker.ietf.org/doc/html/draft-ietf-oauth-security-topics-12#section-3.1
|
||||
contains := func(s string) bool {
|
||||
s = strings.TrimSuffix(strings.ToLower(s), "/")
|
||||
for _, u := range app.RedirectURIs {
|
||||
|
|
|
@ -10,21 +10,21 @@ import (
|
|||
)
|
||||
|
||||
// CountOrphanedObjects count subjects with have no existing refobject anymore
|
||||
func CountOrphanedObjects(ctx context.Context, subject, refobject, joinCond string) (int64, error) {
|
||||
func CountOrphanedObjects(ctx context.Context, subject, refObject, joinCond string) (int64, error) {
|
||||
return GetEngine(ctx).
|
||||
Table("`"+subject+"`").
|
||||
Join("LEFT", "`"+refobject+"`", joinCond).
|
||||
Where(builder.IsNull{"`" + refobject + "`.id"}).
|
||||
Join("LEFT", "`"+refObject+"`", joinCond).
|
||||
Where(builder.IsNull{"`" + refObject + "`.id"}).
|
||||
Select("COUNT(`" + subject + "`.`id`)").
|
||||
Count()
|
||||
}
|
||||
|
||||
// DeleteOrphanedObjects delete subjects with have no existing refobject anymore
|
||||
func DeleteOrphanedObjects(ctx context.Context, subject, refobject, joinCond string) error {
|
||||
func DeleteOrphanedObjects(ctx context.Context, subject, refObject, joinCond string) error {
|
||||
subQuery := builder.Select("`"+subject+"`.id").
|
||||
From("`"+subject+"`").
|
||||
Join("LEFT", "`"+refobject+"`", joinCond).
|
||||
Where(builder.IsNull{"`" + refobject + "`.id"})
|
||||
Join("LEFT", "`"+refObject+"`", joinCond).
|
||||
Where(builder.IsNull{"`" + refObject + "`.id"})
|
||||
b := builder.Delete(builder.In("id", subQuery)).From("`" + subject + "`")
|
||||
_, err := GetEngine(ctx).Exec(b)
|
||||
return err
|
||||
|
|
|
@ -21,9 +21,9 @@ import (
|
|||
"xorm.io/xorm/names"
|
||||
"xorm.io/xorm/schemas"
|
||||
|
||||
_ "github.com/denisenkom/go-mssqldb" // Needed for the MSSQL driver
|
||||
_ "github.com/go-sql-driver/mysql" // Needed for the MySQL driver
|
||||
_ "github.com/lib/pq" // Needed for the Postgresql driver
|
||||
_ "github.com/go-sql-driver/mysql" // Needed for the MySQL driver
|
||||
_ "github.com/lib/pq" // Needed for the Postgresql driver
|
||||
_ "github.com/microsoft/go-mssqldb" // Needed for the MSSQL driver
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -284,8 +284,8 @@ func MaxBatchInsertSize(bean any) int {
|
|||
}
|
||||
|
||||
// IsTableNotEmpty returns true if table has at least one record
|
||||
func IsTableNotEmpty(tableName string) (bool, error) {
|
||||
return x.Table(tableName).Exist()
|
||||
func IsTableNotEmpty(beanOrTableName any) (bool, error) {
|
||||
return x.Table(beanOrTableName).Exist()
|
||||
}
|
||||
|
||||
// DeleteAllRecords will delete all the records of this table
|
||||
|
|
|
@ -297,6 +297,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
|
|||
|
||||
sess := db.GetEngine(ctx)
|
||||
|
||||
// check whether from branch exist
|
||||
var branch Branch
|
||||
exist, err := db.GetEngine(ctx).Where("repo_id=? AND name=?", repo.ID, from).Get(&branch)
|
||||
if err != nil {
|
||||
|
@ -308,6 +309,24 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
|
|||
}
|
||||
}
|
||||
|
||||
// check whether to branch exist or is_deleted
|
||||
var dstBranch Branch
|
||||
exist, err = db.GetEngine(ctx).Where("repo_id=? AND name=?", repo.ID, to).Get(&dstBranch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exist {
|
||||
if !dstBranch.IsDeleted {
|
||||
return ErrBranchAlreadyExists{
|
||||
BranchName: to,
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := db.GetEngine(ctx).ID(dstBranch.ID).NoAutoCondition().Delete(&dstBranch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// 1. update branch in database
|
||||
if n, err := sess.Where("repo_id=? AND name=?", repo.ID, from).Update(&Branch{
|
||||
Name: to,
|
||||
|
@ -362,12 +381,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
|
|||
return err
|
||||
}
|
||||
|
||||
// 5. do git action
|
||||
if err = gitAction(ctx, isDefault); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 6. insert renamed branch record
|
||||
// 5. insert renamed branch record
|
||||
renamedBranch := &RenamedBranch{
|
||||
RepoID: repo.ID,
|
||||
From: from,
|
||||
|
@ -378,6 +392,11 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
|
|||
return err
|
||||
}
|
||||
|
||||
// 6. do git action
|
||||
if err = gitAction(ctx, isDefault); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return committer.Commit()
|
||||
}
|
||||
|
||||
|
|
|
@ -17,15 +17,12 @@ import (
|
|||
type BranchList []*Branch
|
||||
|
||||
func (branches BranchList) LoadDeletedBy(ctx context.Context) error {
|
||||
ids := container.Set[int64]{}
|
||||
for _, branch := range branches {
|
||||
if !branch.IsDeleted {
|
||||
continue
|
||||
}
|
||||
ids.Add(branch.DeletedByID)
|
||||
}
|
||||
ids := container.FilterSlice(branches, func(branch *Branch) (int64, bool) {
|
||||
return branch.DeletedByID, branch.IsDeleted
|
||||
})
|
||||
|
||||
usersMap := make(map[int64]*user_model.User, len(ids))
|
||||
if err := db.GetEngine(ctx).In("id", ids.Values()).Find(&usersMap); err != nil {
|
||||
if err := db.GetEngine(ctx).In("id", ids).Find(&usersMap); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, branch := range branches {
|
||||
|
@ -41,14 +38,13 @@ func (branches BranchList) LoadDeletedBy(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (branches BranchList) LoadPusher(ctx context.Context) error {
|
||||
ids := container.Set[int64]{}
|
||||
for _, branch := range branches {
|
||||
if branch.PusherID > 0 { // pusher_id maybe zero because some branches are sync by backend with no pusher
|
||||
ids.Add(branch.PusherID)
|
||||
}
|
||||
}
|
||||
ids := container.FilterSlice(branches, func(branch *Branch) (int64, bool) {
|
||||
// pusher_id maybe zero because some branches are sync by backend with no pusher
|
||||
return branch.PusherID, branch.PusherID > 0
|
||||
})
|
||||
|
||||
usersMap := make(map[int64]*user_model.User, len(ids))
|
||||
if err := db.GetEngine(ctx).In("id", ids.Values()).Find(&usersMap); err != nil {
|
||||
if err := db.GetEngine(ctx).In("id", ids).Find(&usersMap); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, branch := range branches {
|
||||
|
|
|
@ -292,30 +292,27 @@ func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOp
|
|||
}
|
||||
|
||||
// GetLatestCommitStatusForPairs returns all statuses with a unique context for a given list of repo-sha pairs
|
||||
func GetLatestCommitStatusForPairs(ctx context.Context, repoIDsToLatestCommitSHAs map[int64]string, listOptions db.ListOptions) (map[int64][]*CommitStatus, error) {
|
||||
func GetLatestCommitStatusForPairs(ctx context.Context, repoSHAs []RepoSHA) (map[int64][]*CommitStatus, error) {
|
||||
type result struct {
|
||||
Index int64
|
||||
RepoID int64
|
||||
SHA string
|
||||
}
|
||||
|
||||
results := make([]result, 0, len(repoIDsToLatestCommitSHAs))
|
||||
results := make([]result, 0, len(repoSHAs))
|
||||
|
||||
getBase := func() *xorm.Session {
|
||||
return db.GetEngine(ctx).Table(&CommitStatus{})
|
||||
}
|
||||
|
||||
// Create a disjunction of conditions for each repoID and SHA pair
|
||||
conds := make([]builder.Cond, 0, len(repoIDsToLatestCommitSHAs))
|
||||
for repoID, sha := range repoIDsToLatestCommitSHAs {
|
||||
conds = append(conds, builder.Eq{"repo_id": repoID, "sha": sha})
|
||||
conds := make([]builder.Cond, 0, len(repoSHAs))
|
||||
for _, repoSHA := range repoSHAs {
|
||||
conds = append(conds, builder.Eq{"repo_id": repoSHA.RepoID, "sha": repoSHA.SHA})
|
||||
}
|
||||
sess := getBase().Where(builder.Or(conds...)).
|
||||
Select("max( `index` ) as `index`, repo_id").
|
||||
GroupBy("context_hash, repo_id").OrderBy("max( `index` ) desc")
|
||||
|
||||
if !listOptions.IsListAll() {
|
||||
sess = db.SetSessionPagination(sess, &listOptions)
|
||||
}
|
||||
Select("max( `index` ) as `index`, repo_id, sha").
|
||||
GroupBy("context_hash, repo_id, sha").OrderBy("max( `index` ) desc")
|
||||
|
||||
err := sess.Find(&results)
|
||||
if err != nil {
|
||||
|
@ -332,7 +329,7 @@ func GetLatestCommitStatusForPairs(ctx context.Context, repoIDsToLatestCommitSHA
|
|||
cond := builder.Eq{
|
||||
"`index`": result.Index,
|
||||
"repo_id": result.RepoID,
|
||||
"sha": repoIDsToLatestCommitSHAs[result.RepoID],
|
||||
"sha": result.SHA,
|
||||
}
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
|
88
models/git/commit_status_summary.go
Normal file
88
models/git/commit_status_summary.go
Normal file
|
@ -0,0 +1,88 @@
|
|||
// Copyright 2024 Gitea. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
// CommitStatusSummary holds the latest commit Status of a single Commit
|
||||
type CommitStatusSummary struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
RepoID int64 `xorm:"INDEX UNIQUE(repo_id_sha)"`
|
||||
SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_id_sha)"`
|
||||
State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"`
|
||||
TargetURL string `xorm:"TEXT"`
|
||||
}
|
||||
|
||||
func init() {
|
||||
db.RegisterModel(new(CommitStatusSummary))
|
||||
}
|
||||
|
||||
type RepoSHA struct {
|
||||
RepoID int64
|
||||
SHA string
|
||||
}
|
||||
|
||||
func GetLatestCommitStatusForRepoAndSHAs(ctx context.Context, repoSHAs []RepoSHA) ([]*CommitStatus, error) {
|
||||
cond := builder.NewCond()
|
||||
for _, rs := range repoSHAs {
|
||||
cond = cond.Or(builder.Eq{"repo_id": rs.RepoID, "sha": rs.SHA})
|
||||
}
|
||||
|
||||
var summaries []CommitStatusSummary
|
||||
if err := db.GetEngine(ctx).Where(cond).Find(&summaries); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commitStatuses := make([]*CommitStatus, 0, len(repoSHAs))
|
||||
for _, summary := range summaries {
|
||||
commitStatuses = append(commitStatuses, &CommitStatus{
|
||||
RepoID: summary.RepoID,
|
||||
SHA: summary.SHA,
|
||||
State: summary.State,
|
||||
TargetURL: summary.TargetURL,
|
||||
})
|
||||
}
|
||||
return commitStatuses, nil
|
||||
}
|
||||
|
||||
func UpdateCommitStatusSummary(ctx context.Context, repoID int64, sha string) error {
|
||||
commitStatuses, _, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
state := CalcCommitStatus(commitStatuses)
|
||||
// mysql will return 0 when update a record which state hasn't been changed which behaviour is different from other database,
|
||||
// so we need to use insert in on duplicate
|
||||
if setting.Database.Type.IsMySQL() {
|
||||
_, err := db.GetEngine(ctx).Exec("INSERT INTO commit_status_summary (repo_id,sha,state,target_url) VALUES (?,?,?,?) ON DUPLICATE KEY UPDATE state=?",
|
||||
repoID, sha, state.State, state.TargetURL, state.State)
|
||||
return err
|
||||
}
|
||||
|
||||
if cnt, err := db.GetEngine(ctx).Where("repo_id=? AND sha=?", repoID, sha).
|
||||
Cols("state, target_url").
|
||||
Update(&CommitStatusSummary{
|
||||
State: state.State,
|
||||
TargetURL: state.TargetURL,
|
||||
}); err != nil {
|
||||
return err
|
||||
} else if cnt == 0 {
|
||||
_, err = db.GetEngine(ctx).Insert(&CommitStatusSummary{
|
||||
RepoID: repoID,
|
||||
SHA: sha,
|
||||
State: state.State,
|
||||
TargetURL: state.TargetURL,
|
||||
})
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -1272,10 +1272,9 @@ func InsertIssueComments(ctx context.Context, comments []*Comment) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
issueIDs := make(container.Set[int64])
|
||||
for _, comment := range comments {
|
||||
issueIDs.Add(comment.IssueID)
|
||||
}
|
||||
issueIDs := container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.IssueID, true
|
||||
})
|
||||
|
||||
ctx, committer, err := db.TxContext(ctx)
|
||||
if err != nil {
|
||||
|
@ -1298,7 +1297,7 @@ func InsertIssueComments(ctx context.Context, comments []*Comment) error {
|
|||
}
|
||||
}
|
||||
|
||||
for issueID := range issueIDs {
|
||||
for _, issueID := range issueIDs {
|
||||
if _, err := db.Exec(ctx, "UPDATE issue set num_comments = (SELECT count(*) FROM comment WHERE issue_id = ? AND `type`=?) WHERE id = ?",
|
||||
issueID, CommentTypeComment, issueID); err != nil {
|
||||
return err
|
||||
|
|
|
@ -17,13 +17,9 @@ import (
|
|||
type CommentList []*Comment
|
||||
|
||||
func (comments CommentList) getPosterIDs() []int64 {
|
||||
posterIDs := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.PosterID > 0 {
|
||||
posterIDs.Add(comment.PosterID)
|
||||
}
|
||||
}
|
||||
return posterIDs.Values()
|
||||
return container.FilterSlice(comments, func(c *Comment) (int64, bool) {
|
||||
return c.PosterID, c.PosterID > 0
|
||||
})
|
||||
}
|
||||
|
||||
// LoadPosters loads posters
|
||||
|
@ -44,13 +40,9 @@ func (comments CommentList) LoadPosters(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (comments CommentList) getLabelIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.LabelID > 0 {
|
||||
ids.Add(comment.LabelID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.LabelID, comment.LabelID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (comments CommentList) loadLabels(ctx context.Context) error {
|
||||
|
@ -94,13 +86,9 @@ func (comments CommentList) loadLabels(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (comments CommentList) getMilestoneIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.MilestoneID > 0 {
|
||||
ids.Add(comment.MilestoneID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.MilestoneID, comment.MilestoneID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (comments CommentList) loadMilestones(ctx context.Context) error {
|
||||
|
@ -137,13 +125,9 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (comments CommentList) getOldMilestoneIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.OldMilestoneID > 0 {
|
||||
ids.Add(comment.OldMilestoneID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.OldMilestoneID, comment.OldMilestoneID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (comments CommentList) loadOldMilestones(ctx context.Context) error {
|
||||
|
@ -180,13 +164,9 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (comments CommentList) getAssigneeIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.AssigneeID > 0 {
|
||||
ids.Add(comment.AssigneeID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.AssigneeID, comment.AssigneeID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (comments CommentList) loadAssignees(ctx context.Context) error {
|
||||
|
@ -237,14 +217,9 @@ func (comments CommentList) loadAssignees(ctx context.Context) error {
|
|||
|
||||
// getIssueIDs returns all the issue ids on this comment list which issue hasn't been loaded
|
||||
func (comments CommentList) getIssueIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.Issue != nil {
|
||||
continue
|
||||
}
|
||||
ids.Add(comment.IssueID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.IssueID, comment.Issue == nil
|
||||
})
|
||||
}
|
||||
|
||||
// Issues returns all the issues of comments
|
||||
|
@ -311,16 +286,12 @@ func (comments CommentList) LoadIssues(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (comments CommentList) getDependentIssueIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
if comment.DependentIssue != nil {
|
||||
continue
|
||||
return 0, false
|
||||
}
|
||||
if comment.DependentIssueID > 0 {
|
||||
ids.Add(comment.DependentIssueID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return comment.DependentIssueID, comment.DependentIssueID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (comments CommentList) loadDependentIssues(ctx context.Context) error {
|
||||
|
@ -375,15 +346,9 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error {
|
|||
|
||||
// getAttachmentCommentIDs only return the comment ids which possibly has attachments
|
||||
func (comments CommentList) getAttachmentCommentIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.Type == CommentTypeComment ||
|
||||
comment.Type == CommentTypeReview ||
|
||||
comment.Type == CommentTypeCode {
|
||||
ids.Add(comment.ID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.ID, comment.Type.HasAttachmentSupport()
|
||||
})
|
||||
}
|
||||
|
||||
// LoadAttachmentsByIssue loads attachments by issue id
|
||||
|
@ -451,13 +416,9 @@ func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
|
|||
}
|
||||
|
||||
func (comments CommentList) getReviewIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(comments))
|
||||
for _, comment := range comments {
|
||||
if comment.ReviewID > 0 {
|
||||
ids.Add(comment.ReviewID)
|
||||
}
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||
return comment.ReviewID, comment.ReviewID > 0
|
||||
})
|
||||
}
|
||||
|
||||
func (comments CommentList) loadReviews(ctx context.Context) error {
|
||||
|
|
|
@ -21,16 +21,15 @@ type IssueList []*Issue
|
|||
|
||||
// get the repo IDs to be loaded later, these IDs are for issue.Repo and issue.PullRequest.HeadRepo
|
||||
func (issues IssueList) getRepoIDs() []int64 {
|
||||
repoIDs := make(container.Set[int64], len(issues))
|
||||
for _, issue := range issues {
|
||||
return container.FilterSlice(issues, func(issue *Issue) (int64, bool) {
|
||||
if issue.Repo == nil {
|
||||
repoIDs.Add(issue.RepoID)
|
||||
return issue.RepoID, true
|
||||
}
|
||||
if issue.PullRequest != nil && issue.PullRequest.HeadRepo == nil {
|
||||
repoIDs.Add(issue.PullRequest.HeadRepoID)
|
||||
return issue.PullRequest.HeadRepoID, true
|
||||
}
|
||||
}
|
||||
return repoIDs.Values()
|
||||
return 0, false
|
||||
})
|
||||
}
|
||||
|
||||
// LoadRepositories loads issues' all repositories
|
||||
|
@ -74,11 +73,9 @@ func (issues IssueList) LoadRepositories(ctx context.Context) (repo_model.Reposi
|
|||
}
|
||||
|
||||
func (issues IssueList) getPosterIDs() []int64 {
|
||||
posterIDs := make(container.Set[int64], len(issues))
|
||||
for _, issue := range issues {
|
||||
posterIDs.Add(issue.PosterID)
|
||||
}
|
||||
return posterIDs.Values()
|
||||
return container.FilterSlice(issues, func(issue *Issue) (int64, bool) {
|
||||
return issue.PosterID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (issues IssueList) loadPosters(ctx context.Context) error {
|
||||
|
@ -193,11 +190,9 @@ func (issues IssueList) loadLabels(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (issues IssueList) getMilestoneIDs() []int64 {
|
||||
ids := make(container.Set[int64], len(issues))
|
||||
for _, issue := range issues {
|
||||
ids.Add(issue.MilestoneID)
|
||||
}
|
||||
return ids.Values()
|
||||
return container.FilterSlice(issues, func(issue *Issue) (int64, bool) {
|
||||
return issue.MilestoneID, true
|
||||
})
|
||||
}
|
||||
|
||||
func (issues IssueList) loadMilestones(ctx context.Context) error {
|
||||
|
|
|
@ -305,14 +305,12 @@ func (list ReactionList) GroupByType() map[string]ReactionList {
|
|||
}
|
||||
|
||||
func (list ReactionList) getUserIDs() []int64 {
|
||||
userIDs := make(container.Set[int64], len(list))
|
||||
for _, reaction := range list {
|
||||
return container.FilterSlice(list, func(reaction *Reaction) (int64, bool) {
|
||||
if reaction.OriginalAuthor != "" {
|
||||
continue
|
||||
return 0, false
|
||||
}
|
||||
userIDs.Add(reaction.UserID)
|
||||
}
|
||||
return userIDs.Values()
|
||||
return reaction.UserID, true
|
||||
})
|
||||
}
|
||||
|
||||
func valuesUser(m map[int64]*user_model.User) []*user_model.User {
|
||||
|
|
|
@ -38,12 +38,11 @@ func (reviews ReviewList) LoadReviewers(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func (reviews ReviewList) LoadIssues(ctx context.Context) error {
|
||||
issueIDs := container.Set[int64]{}
|
||||
for i := 0; i < len(reviews); i++ {
|
||||
issueIDs.Add(reviews[i].IssueID)
|
||||
}
|
||||
issueIDs := container.FilterSlice(reviews, func(review *Review) (int64, bool) {
|
||||
return review.IssueID, true
|
||||
})
|
||||
|
||||
issues, err := GetIssuesByIDs(ctx, issueIDs.Values())
|
||||
issues, err := GetIssuesByIDs(ctx, issueIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
-
|
||||
id: 1
|
||||
project_id: 1
|
||||
issue_id: 1
|
||||
|
||||
-
|
||||
id: 2
|
||||
project_id: 1
|
||||
issue_id: 1
|
|
@ -21,6 +21,7 @@ import (
|
|||
"code.gitea.io/gitea/models/migrations/v1_20"
|
||||
"code.gitea.io/gitea/models/migrations/v1_21"
|
||||
"code.gitea.io/gitea/models/migrations/v1_22"
|
||||
"code.gitea.io/gitea/models/migrations/v1_23"
|
||||
"code.gitea.io/gitea/models/migrations/v1_6"
|
||||
"code.gitea.io/gitea/models/migrations/v1_7"
|
||||
"code.gitea.io/gitea/models/migrations/v1_8"
|
||||
|
@ -572,6 +573,15 @@ var migrations = []Migration{
|
|||
NewMigration("Ensure every project has exactly one default column - No Op", noopMigration),
|
||||
// v293 -> v294
|
||||
NewMigration("Ensure every project has exactly one default column", v1_22.CheckProjectColumnsConsistency),
|
||||
|
||||
// Gitea 1.22.0 ends at 294
|
||||
|
||||
// v294 -> v295
|
||||
NewMigration("Add unique index for project issue table", v1_23.AddUniqueIndexForProjectIssue),
|
||||
// v295 -> v296
|
||||
NewMigration("Add commit status summary table", v1_23.AddCommitStatusSummary),
|
||||
// v296 -> v297
|
||||
NewMigration("Add missing field of commit status summary table", v1_23.AddCommitStatusSummary2),
|
||||
}
|
||||
|
||||
// GetCurrentDBVersion returns the current db version
|
||||
|
|
14
models/migrations/v1_23/main_test.go
Normal file
14
models/migrations/v1_23/main_test.go
Normal file
|
@ -0,0 +1,14 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package v1_23 //nolint
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/migrations/base"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
base.MainTest(m)
|
||||
}
|
53
models/migrations/v1_23/v294.go
Normal file
53
models/migrations/v1_23/v294.go
Normal file
|
@ -0,0 +1,53 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package v1_23 //nolint
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"xorm.io/xorm"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
// AddUniqueIndexForProjectIssue adds unique indexes for project issue table
|
||||
func AddUniqueIndexForProjectIssue(x *xorm.Engine) error {
|
||||
// remove possible duplicated records in table project_issue
|
||||
type result struct {
|
||||
IssueID int64
|
||||
ProjectID int64
|
||||
Cnt int
|
||||
}
|
||||
var results []result
|
||||
if err := x.Select("issue_id, project_id, count(*) as cnt").
|
||||
Table("project_issue").
|
||||
GroupBy("issue_id, project_id").
|
||||
Having("count(*) > 1").
|
||||
Find(&results); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, r := range results {
|
||||
if x.Dialect().URI().DBType == schemas.MSSQL {
|
||||
if _, err := x.Exec(fmt.Sprintf("delete from project_issue where id in (SELECT top %d id FROM project_issue WHERE issue_id = ? and project_id = ?)", r.Cnt-1), r.IssueID, r.ProjectID); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
var ids []int64
|
||||
if err := x.SQL("SELECT id FROM project_issue WHERE issue_id = ? and project_id = ? limit ?", r.IssueID, r.ProjectID, r.Cnt-1).Find(&ids); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := x.Table("project_issue").In("id", ids).Delete(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add unique index for project_issue table
|
||||
type ProjectIssue struct { //revive:disable-line:exported
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
IssueID int64 `xorm:"INDEX unique(s)"`
|
||||
ProjectID int64 `xorm:"INDEX unique(s)"`
|
||||
}
|
||||
|
||||
return x.Sync(new(ProjectIssue))
|
||||
}
|
52
models/migrations/v1_23/v294_test.go
Normal file
52
models/migrations/v1_23/v294_test.go
Normal file
|
@ -0,0 +1,52 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package v1_23 //nolint
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/migrations/base"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
func Test_AddUniqueIndexForProjectIssue(t *testing.T) {
|
||||
type ProjectIssue struct { //revive:disable-line:exported
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
IssueID int64 `xorm:"INDEX"`
|
||||
ProjectID int64 `xorm:"INDEX"`
|
||||
}
|
||||
|
||||
// Prepare and load the testing database
|
||||
x, deferable := base.PrepareTestEnv(t, 0, new(ProjectIssue))
|
||||
defer deferable()
|
||||
if x == nil || t.Failed() {
|
||||
return
|
||||
}
|
||||
|
||||
cnt, err := x.Table("project_issue").Where("project_id=1 AND issue_id=1").Count()
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 2, cnt)
|
||||
|
||||
assert.NoError(t, AddUniqueIndexForProjectIssue(x))
|
||||
|
||||
cnt, err = x.Table("project_issue").Where("project_id=1 AND issue_id=1").Count()
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, cnt)
|
||||
|
||||
tables, err := x.DBMetas()
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, len(tables))
|
||||
found := false
|
||||
for _, index := range tables[0].Indexes {
|
||||
if index.Type == schemas.UniqueType {
|
||||
found = true
|
||||
slices.Equal(index.Cols, []string{"project_id", "issue_id"})
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found)
|
||||
}
|
18
models/migrations/v1_23/v295.go
Normal file
18
models/migrations/v1_23/v295.go
Normal file
|
@ -0,0 +1,18 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package v1_23 //nolint
|
||||
|
||||
import "xorm.io/xorm"
|
||||
|
||||
func AddCommitStatusSummary(x *xorm.Engine) error {
|
||||
type CommitStatusSummary struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
RepoID int64 `xorm:"INDEX UNIQUE(repo_id_sha)"`
|
||||
SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_id_sha)"`
|
||||
State string `xorm:"VARCHAR(7) NOT NULL"`
|
||||
}
|
||||
// there is no migrations because if there is no data on this table, it will fall back to get data
|
||||
// from commit status
|
||||
return x.Sync2(new(CommitStatusSummary))
|
||||
}
|
16
models/migrations/v1_23/v296.go
Normal file
16
models/migrations/v1_23/v296.go
Normal file
|
@ -0,0 +1,16 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package v1_23 //nolint
|
||||
|
||||
import "xorm.io/xorm"
|
||||
|
||||
func AddCommitStatusSummary2(x *xorm.Engine) error {
|
||||
type CommitStatusSummary struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
TargetURL string `xorm:"TEXT"`
|
||||
}
|
||||
// there is no migrations because if there is no data on this table, it will fall back to get data
|
||||
// from commit status
|
||||
return x.Sync(new(CommitStatusSummary))
|
||||
}
|
|
@ -9,6 +9,7 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
actions_model "code.gitea.io/gitea/models/actions"
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
|
@ -402,6 +403,8 @@ func DeleteOrganization(ctx context.Context, org *Organization) error {
|
|||
&TeamInvite{OrgID: org.ID},
|
||||
&secret_model.Secret{OwnerID: org.ID},
|
||||
&user_model.Blocking{BlockerID: org.ID},
|
||||
&actions_model.ActionRunner{OwnerID: org.ID},
|
||||
&actions_model.ActionRunnerToken{OwnerID: org.ID},
|
||||
); err != nil {
|
||||
return fmt.Errorf("DeleteBeans: %w", err)
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ func (repo *Repository) IsDependenciesEnabled(ctx context.Context) bool {
|
|||
var u *RepoUnit
|
||||
var err error
|
||||
if u, err = repo.GetUnit(ctx, unit.TypeIssues); err != nil {
|
||||
log.Trace("%s", err)
|
||||
log.Trace("IsDependenciesEnabled: %v", err)
|
||||
return setting.Service.DefaultEnableDependencies
|
||||
}
|
||||
return u.IssuesConfig().EnableDependencies
|
||||
|
|
|
@ -104,18 +104,19 @@ func (repos RepositoryList) LoadAttributes(ctx context.Context) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
set := make(container.Set[int64])
|
||||
userIDs := container.FilterSlice(repos, func(repo *Repository) (int64, bool) {
|
||||
return repo.OwnerID, true
|
||||
})
|
||||
repoIDs := make([]int64, len(repos))
|
||||
for i := range repos {
|
||||
set.Add(repos[i].OwnerID)
|
||||
repoIDs[i] = repos[i].ID
|
||||
}
|
||||
|
||||
// Load owners.
|
||||
users := make(map[int64]*user_model.User, len(set))
|
||||
users := make(map[int64]*user_model.User, len(userIDs))
|
||||
if err := db.GetEngine(ctx).
|
||||
Where("id > 0").
|
||||
In("id", set.Values()).
|
||||
In("id", userIDs).
|
||||
Find(&users); err != nil {
|
||||
return fmt.Errorf("find users: %w", err)
|
||||
}
|
||||
|
|
|
@ -256,14 +256,6 @@ func IsEmailUsed(ctx context.Context, email string) (bool, error) {
|
|||
return db.GetEngine(ctx).Where("lower_email=?", strings.ToLower(email)).Get(&EmailAddress{})
|
||||
}
|
||||
|
||||
// DeleteInactiveEmailAddresses deletes inactive email addresses
|
||||
func DeleteInactiveEmailAddresses(ctx context.Context) error {
|
||||
_, err := db.GetEngine(ctx).
|
||||
Where("is_activated = ?", false).
|
||||
Delete(new(EmailAddress))
|
||||
return err
|
||||
}
|
||||
|
||||
// ActivateEmail activates the email address to given user.
|
||||
func ActivateEmail(ctx context.Context, email *EmailAddress) error {
|
||||
ctx, committer, err := db.TxContext(ctx)
|
||||
|
|
138
modules/cache/cache.go
vendored
138
modules/cache/cache.go
vendored
|
@ -4,149 +4,75 @@
|
|||
package cache
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
mc "gitea.com/go-chi/cache"
|
||||
|
||||
_ "gitea.com/go-chi/cache/memcache" // memcache plugin for cache
|
||||
)
|
||||
|
||||
var conn mc.Cache
|
||||
|
||||
func newCache(cacheConfig setting.Cache) (mc.Cache, error) {
|
||||
return mc.NewCacher(mc.Options{
|
||||
Adapter: cacheConfig.Adapter,
|
||||
AdapterConfig: cacheConfig.Conn,
|
||||
Interval: cacheConfig.Interval,
|
||||
})
|
||||
}
|
||||
var defaultCache StringCache
|
||||
|
||||
// Init start cache service
|
||||
func Init() error {
|
||||
var err error
|
||||
|
||||
if conn == nil {
|
||||
if conn, err = newCache(setting.CacheService.Cache); err != nil {
|
||||
if defaultCache == nil {
|
||||
c, err := NewStringCache(setting.CacheService.Cache)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = conn.Ping(); err != nil {
|
||||
for i := 0; i < 10; i++ {
|
||||
if err = c.Ping(); err == nil {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defaultCache = c
|
||||
}
|
||||
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetCache returns the currently configured cache
|
||||
func GetCache() mc.Cache {
|
||||
return conn
|
||||
func GetCache() StringCache {
|
||||
return defaultCache
|
||||
}
|
||||
|
||||
// GetString returns the key value from cache with callback when no key exists in cache
|
||||
func GetString(key string, getFunc func() (string, error)) (string, error) {
|
||||
if conn == nil || setting.CacheService.TTL == 0 {
|
||||
if defaultCache == nil || setting.CacheService.TTL == 0 {
|
||||
return getFunc()
|
||||
}
|
||||
|
||||
cached := conn.Get(key)
|
||||
|
||||
if cached == nil {
|
||||
cached, exist := defaultCache.Get(key)
|
||||
if !exist {
|
||||
value, err := getFunc()
|
||||
if err != nil {
|
||||
return value, err
|
||||
}
|
||||
return value, conn.Put(key, value, setting.CacheService.TTLSeconds())
|
||||
}
|
||||
|
||||
if value, ok := cached.(string); ok {
|
||||
return value, nil
|
||||
}
|
||||
|
||||
if stringer, ok := cached.(fmt.Stringer); ok {
|
||||
return stringer.String(), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s", cached), nil
|
||||
}
|
||||
|
||||
// GetInt returns key value from cache with callback when no key exists in cache
|
||||
func GetInt(key string, getFunc func() (int, error)) (int, error) {
|
||||
if conn == nil || setting.CacheService.TTL == 0 {
|
||||
return getFunc()
|
||||
}
|
||||
|
||||
cached := conn.Get(key)
|
||||
|
||||
if cached == nil {
|
||||
value, err := getFunc()
|
||||
if err != nil {
|
||||
return value, err
|
||||
}
|
||||
|
||||
return value, conn.Put(key, value, setting.CacheService.TTLSeconds())
|
||||
}
|
||||
|
||||
switch v := cached.(type) {
|
||||
case int:
|
||||
return v, nil
|
||||
case string:
|
||||
value, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return value, nil
|
||||
default:
|
||||
value, err := getFunc()
|
||||
if err != nil {
|
||||
return value, err
|
||||
}
|
||||
return value, conn.Put(key, value, setting.CacheService.TTLSeconds())
|
||||
return value, defaultCache.Put(key, value, setting.CacheService.TTLSeconds())
|
||||
}
|
||||
return cached, nil
|
||||
}
|
||||
|
||||
// GetInt64 returns key value from cache with callback when no key exists in cache
|
||||
func GetInt64(key string, getFunc func() (int64, error)) (int64, error) {
|
||||
if conn == nil || setting.CacheService.TTL == 0 {
|
||||
return getFunc()
|
||||
s, err := GetString(key, func() (string, error) {
|
||||
v, err := getFunc()
|
||||
return strconv.FormatInt(v, 10), err
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
cached := conn.Get(key)
|
||||
|
||||
if cached == nil {
|
||||
value, err := getFunc()
|
||||
if err != nil {
|
||||
return value, err
|
||||
}
|
||||
|
||||
return value, conn.Put(key, value, setting.CacheService.TTLSeconds())
|
||||
}
|
||||
|
||||
switch v := conn.Get(key).(type) {
|
||||
case int64:
|
||||
return v, nil
|
||||
case string:
|
||||
value, err := strconv.ParseInt(v, 10, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return value, nil
|
||||
default:
|
||||
value, err := getFunc()
|
||||
if err != nil {
|
||||
return value, err
|
||||
}
|
||||
|
||||
return value, conn.Put(key, value, setting.CacheService.TTLSeconds())
|
||||
if s == "" {
|
||||
return 0, nil
|
||||
}
|
||||
return strconv.ParseInt(s, 10, 64)
|
||||
}
|
||||
|
||||
// Remove key from cache
|
||||
func Remove(key string) {
|
||||
if conn == nil {
|
||||
if defaultCache == nil {
|
||||
return
|
||||
}
|
||||
_ = conn.Delete(key)
|
||||
_ = defaultCache.Delete(key)
|
||||
}
|
||||
|
|
2
modules/cache/cache_redis.go
vendored
2
modules/cache/cache_redis.go
vendored
|
@ -11,7 +11,7 @@ import (
|
|||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/nosql"
|
||||
|
||||
"gitea.com/go-chi/cache"
|
||||
"gitea.com/go-chi/cache" //nolint:depguard
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
|
||||
|
|
40
modules/cache/cache_test.go
vendored
40
modules/cache/cache_test.go
vendored
|
@ -14,7 +14,7 @@ import (
|
|||
)
|
||||
|
||||
func createTestCache() {
|
||||
conn, _ = newCache(setting.Cache{
|
||||
defaultCache, _ = NewStringCache(setting.Cache{
|
||||
Adapter: "memory",
|
||||
TTL: time.Minute,
|
||||
})
|
||||
|
@ -25,7 +25,7 @@ func TestNewContext(t *testing.T) {
|
|||
assert.NoError(t, Init())
|
||||
|
||||
setting.CacheService.Cache = setting.Cache{Adapter: "redis", Conn: "some random string"}
|
||||
con, err := newCache(setting.Cache{
|
||||
con, err := NewStringCache(setting.Cache{
|
||||
Adapter: "rand",
|
||||
Conn: "false conf",
|
||||
Interval: 100,
|
||||
|
@ -76,42 +76,6 @@ func TestGetString(t *testing.T) {
|
|||
Remove("key")
|
||||
}
|
||||
|
||||
func TestGetInt(t *testing.T) {
|
||||
createTestCache()
|
||||
|
||||
data, err := GetInt("key", func() (int, error) {
|
||||
return 0, fmt.Errorf("some error")
|
||||
})
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, 0, data)
|
||||
|
||||
data, err = GetInt("key", func() (int, error) {
|
||||
return 0, nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, data)
|
||||
|
||||
data, err = GetInt("key", func() (int, error) {
|
||||
return 100, nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, data)
|
||||
Remove("key")
|
||||
|
||||
data, err = GetInt("key", func() (int, error) {
|
||||
return 100, nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 100, data)
|
||||
|
||||
data, err = GetInt("key", func() (int, error) {
|
||||
return 0, fmt.Errorf("some error")
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 100, data)
|
||||
Remove("key")
|
||||
}
|
||||
|
||||
func TestGetInt64(t *testing.T) {
|
||||
createTestCache()
|
||||
|
||||
|
|
2
modules/cache/cache_twoqueue.go
vendored
2
modules/cache/cache_twoqueue.go
vendored
|
@ -10,7 +10,7 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
|
||||
mc "gitea.com/go-chi/cache"
|
||||
mc "gitea.com/go-chi/cache" //nolint:depguard
|
||||
lru "github.com/hashicorp/golang-lru/v2"
|
||||
)
|
||||
|
||||
|
|
120
modules/cache/string_cache.go
vendored
Normal file
120
modules/cache/string_cache.go
vendored
Normal file
|
@ -0,0 +1,120 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
chi_cache "gitea.com/go-chi/cache" //nolint:depguard
|
||||
)
|
||||
|
||||
type GetJSONError struct {
|
||||
err error
|
||||
cachedError string // Golang error can't be stored in cache, only the string message could be stored
|
||||
}
|
||||
|
||||
func (e *GetJSONError) ToError() error {
|
||||
if e.err != nil {
|
||||
return e.err
|
||||
}
|
||||
return errors.New("cached error: " + e.cachedError)
|
||||
}
|
||||
|
||||
type StringCache interface {
|
||||
Ping() error
|
||||
|
||||
Get(key string) (string, bool)
|
||||
Put(key, value string, ttl int64) error
|
||||
Delete(key string) error
|
||||
IsExist(key string) bool
|
||||
|
||||
PutJSON(key string, v any, ttl int64) error
|
||||
GetJSON(key string, ptr any) (exist bool, err *GetJSONError)
|
||||
|
||||
ChiCache() chi_cache.Cache
|
||||
}
|
||||
|
||||
type stringCache struct {
|
||||
chiCache chi_cache.Cache
|
||||
}
|
||||
|
||||
func NewStringCache(cacheConfig setting.Cache) (StringCache, error) {
|
||||
adapter := util.IfZero(cacheConfig.Adapter, "memory")
|
||||
interval := util.IfZero(cacheConfig.Interval, 60)
|
||||
cc, err := chi_cache.NewCacher(chi_cache.Options{
|
||||
Adapter: adapter,
|
||||
AdapterConfig: cacheConfig.Conn,
|
||||
Interval: interval,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &stringCache{chiCache: cc}, nil
|
||||
}
|
||||
|
||||
func (sc *stringCache) Ping() error {
|
||||
return sc.chiCache.Ping()
|
||||
}
|
||||
|
||||
func (sc *stringCache) Get(key string) (string, bool) {
|
||||
v := sc.chiCache.Get(key)
|
||||
if v == nil {
|
||||
return "", false
|
||||
}
|
||||
s, ok := v.(string)
|
||||
return s, ok
|
||||
}
|
||||
|
||||
func (sc *stringCache) Put(key, value string, ttl int64) error {
|
||||
return sc.chiCache.Put(key, value, ttl)
|
||||
}
|
||||
|
||||
func (sc *stringCache) Delete(key string) error {
|
||||
return sc.chiCache.Delete(key)
|
||||
}
|
||||
|
||||
func (sc *stringCache) IsExist(key string) bool {
|
||||
return sc.chiCache.IsExist(key)
|
||||
}
|
||||
|
||||
const cachedErrorPrefix = "<CACHED-ERROR>:"
|
||||
|
||||
func (sc *stringCache) PutJSON(key string, v any, ttl int64) error {
|
||||
var s string
|
||||
switch v := v.(type) {
|
||||
case error:
|
||||
s = cachedErrorPrefix + v.Error()
|
||||
default:
|
||||
b, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s = util.UnsafeBytesToString(b)
|
||||
}
|
||||
return sc.chiCache.Put(key, s, ttl)
|
||||
}
|
||||
|
||||
func (sc *stringCache) GetJSON(key string, ptr any) (exist bool, getErr *GetJSONError) {
|
||||
s, ok := sc.Get(key)
|
||||
if !ok || s == "" {
|
||||
return false, nil
|
||||
}
|
||||
s, isCachedError := strings.CutPrefix(s, cachedErrorPrefix)
|
||||
if isCachedError {
|
||||
return true, &GetJSONError{cachedError: s}
|
||||
}
|
||||
if err := json.Unmarshal(util.UnsafeStringToBytes(s), ptr); err != nil {
|
||||
return false, &GetJSONError{err: err}
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (sc *stringCache) ChiCache() chi_cache.Cache {
|
||||
return sc.chiCache
|
||||
}
|
|
@ -4,6 +4,7 @@
|
|||
package charset
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -156,13 +157,16 @@ func TestEscapeControlReader(t *testing.T) {
|
|||
tests = append(tests, test)
|
||||
}
|
||||
|
||||
re := regexp.MustCompile(`repo.ambiguous_character:\d+,\d+`) // simplify the output for the tests, remove the translation variants
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
output := &strings.Builder{}
|
||||
status, err := EscapeControlReader(strings.NewReader(tt.text), output, &translation.MockLocale{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.status, *status)
|
||||
assert.Equal(t, tt.result, output.String())
|
||||
outStr := output.String()
|
||||
outStr = re.ReplaceAllString(outStr, "repo.ambiguous_character")
|
||||
assert.Equal(t, tt.result, outStr)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
21
modules/container/filter.go
Normal file
21
modules/container/filter.go
Normal file
|
@ -0,0 +1,21 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package container
|
||||
|
||||
import "slices"
|
||||
|
||||
// FilterSlice ranges over the slice and calls include() for each element.
|
||||
// If the second returned value is true, the first returned value will be included in the resulting
|
||||
// slice (after deduplication).
|
||||
func FilterSlice[E any, T comparable](s []E, include func(E) (T, bool)) []T {
|
||||
filtered := make([]T, 0, len(s)) // slice will be clipped before returning
|
||||
seen := make(map[T]bool, len(s))
|
||||
for i := range s {
|
||||
if v, ok := include(s[i]); ok && !seen[v] {
|
||||
filtered = append(filtered, v)
|
||||
seen[v] = true
|
||||
}
|
||||
}
|
||||
return slices.Clip(filtered)
|
||||
}
|
28
modules/container/filter_test.go
Normal file
28
modules/container/filter_test.go
Normal file
|
@ -0,0 +1,28 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package container
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFilterMapUnique(t *testing.T) {
|
||||
result := FilterSlice([]int{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
|
||||
}, func(i int) (int, bool) {
|
||||
switch i {
|
||||
case 0:
|
||||
return 0, true // included later
|
||||
case 1:
|
||||
return 0, true // duplicate of previous (should be ignored)
|
||||
case 2:
|
||||
return 2, false // not included
|
||||
default:
|
||||
return i, true
|
||||
}
|
||||
})
|
||||
assert.Equal(t, []int{0, 3, 4, 5, 6, 7, 8, 9}, result)
|
||||
}
|
|
@ -561,14 +561,14 @@ func TestFormatError(t *testing.T) {
|
|||
err: &csv.ParseError{
|
||||
Err: csv.ErrFieldCount,
|
||||
},
|
||||
expectedMessage: "repo.error.csv.invalid_field_count",
|
||||
expectedMessage: "repo.error.csv.invalid_field_count:0",
|
||||
expectsError: false,
|
||||
},
|
||||
{
|
||||
err: &csv.ParseError{
|
||||
Err: csv.ErrBareQuote,
|
||||
},
|
||||
expectedMessage: "repo.error.csv.unexpected",
|
||||
expectedMessage: "repo.error.csv.unexpected:0,0",
|
||||
expectsError: false,
|
||||
},
|
||||
{
|
||||
|
|
174
modules/dump/dumper.go
Normal file
174
modules/dump/dumper.go
Normal file
|
@ -0,0 +1,174 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package dump
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
"github.com/mholt/archiver/v3"
|
||||
)
|
||||
|
||||
var SupportedOutputTypes = []string{"zip", "tar", "tar.sz", "tar.gz", "tar.xz", "tar.bz2", "tar.br", "tar.lz4", "tar.zst"}
|
||||
|
||||
// PrepareFileNameAndType prepares the output file name and type, if the type is not supported, it returns an empty "outType"
|
||||
func PrepareFileNameAndType(argFile, argType string) (outFileName, outType string) {
|
||||
if argFile == "" && argType == "" {
|
||||
outType = SupportedOutputTypes[0]
|
||||
outFileName = fmt.Sprintf("gitea-dump-%d.%s", timeutil.TimeStampNow(), outType)
|
||||
} else if argFile == "" {
|
||||
outType = argType
|
||||
outFileName = fmt.Sprintf("gitea-dump-%d.%s", timeutil.TimeStampNow(), outType)
|
||||
} else if argType == "" {
|
||||
if filepath.Ext(outFileName) == "" {
|
||||
outType = SupportedOutputTypes[0]
|
||||
outFileName = argFile
|
||||
} else {
|
||||
for _, t := range SupportedOutputTypes {
|
||||
if strings.HasSuffix(argFile, "."+t) {
|
||||
outFileName = argFile
|
||||
outType = t
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
outFileName, outType = argFile, argType
|
||||
}
|
||||
if !slices.Contains(SupportedOutputTypes, outType) {
|
||||
return "", ""
|
||||
}
|
||||
return outFileName, outType
|
||||
}
|
||||
|
||||
func IsSubdir(upper, lower string) (bool, error) {
|
||||
if relPath, err := filepath.Rel(upper, lower); err != nil {
|
||||
return false, err
|
||||
} else if relPath == "." || !strings.HasPrefix(relPath, ".") {
|
||||
return true, nil
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
type Dumper struct {
|
||||
Writer archiver.Writer
|
||||
Verbose bool
|
||||
|
||||
globalExcludeAbsPaths []string
|
||||
}
|
||||
|
||||
func (dumper *Dumper) AddReader(r io.ReadCloser, info os.FileInfo, customName string) error {
|
||||
if dumper.Verbose {
|
||||
log.Info("Adding file %s", customName)
|
||||
}
|
||||
|
||||
return dumper.Writer.Write(archiver.File{
|
||||
FileInfo: archiver.FileInfo{
|
||||
FileInfo: info,
|
||||
CustomName: customName,
|
||||
},
|
||||
ReadCloser: r,
|
||||
})
|
||||
}
|
||||
|
||||
func (dumper *Dumper) AddFile(filePath, absPath string) error {
|
||||
file, err := os.Open(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
fileInfo, err := file.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return dumper.AddReader(file, fileInfo, filePath)
|
||||
}
|
||||
|
||||
func (dumper *Dumper) normalizeFilePath(absPath string) string {
|
||||
absPath = filepath.Clean(absPath)
|
||||
if setting.IsWindows {
|
||||
absPath = strings.ToLower(absPath)
|
||||
}
|
||||
return absPath
|
||||
}
|
||||
|
||||
func (dumper *Dumper) GlobalExcludeAbsPath(absPaths ...string) {
|
||||
for _, absPath := range absPaths {
|
||||
dumper.globalExcludeAbsPaths = append(dumper.globalExcludeAbsPaths, dumper.normalizeFilePath(absPath))
|
||||
}
|
||||
}
|
||||
|
||||
func (dumper *Dumper) shouldExclude(absPath string, excludes []string) bool {
|
||||
norm := dumper.normalizeFilePath(absPath)
|
||||
return slices.Contains(dumper.globalExcludeAbsPaths, norm) || slices.Contains(excludes, norm)
|
||||
}
|
||||
|
||||
func (dumper *Dumper) AddRecursiveExclude(insidePath, absPath string, excludes []string) error {
|
||||
excludes = slices.Clone(excludes)
|
||||
for i := range excludes {
|
||||
excludes[i] = dumper.normalizeFilePath(excludes[i])
|
||||
}
|
||||
return dumper.addFileOrDir(insidePath, absPath, excludes)
|
||||
}
|
||||
|
||||
func (dumper *Dumper) addFileOrDir(insidePath, absPath string, excludes []string) error {
|
||||
absPath, err := filepath.Abs(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dir, err := os.Open(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dir.Close()
|
||||
|
||||
files, err := dir.Readdir(0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, file := range files {
|
||||
currentAbsPath := filepath.Join(absPath, file.Name())
|
||||
if dumper.shouldExclude(currentAbsPath, excludes) {
|
||||
continue
|
||||
}
|
||||
|
||||
currentInsidePath := path.Join(insidePath, file.Name())
|
||||
if file.IsDir() {
|
||||
if err := dumper.AddFile(currentInsidePath, currentAbsPath); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = dumper.addFileOrDir(currentInsidePath, currentAbsPath, excludes); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// only copy regular files and symlink regular files, skip non-regular files like socket/pipe/...
|
||||
shouldAdd := file.Mode().IsRegular()
|
||||
if !shouldAdd && file.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
target, err := filepath.EvalSymlinks(currentAbsPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
targetStat, err := os.Stat(target)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
shouldAdd = targetStat.Mode().IsRegular()
|
||||
}
|
||||
if shouldAdd {
|
||||
if err = dumper.AddFile(currentInsidePath, currentAbsPath); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
113
modules/dump/dumper_test.go
Normal file
113
modules/dump/dumper_test.go
Normal file
|
@ -0,0 +1,113 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package dump
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
"github.com/mholt/archiver/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPrepareFileNameAndType(t *testing.T) {
|
||||
defer timeutil.MockSet(time.Unix(1234, 0))()
|
||||
test := func(argFile, argType, expFile, expType string) {
|
||||
outFile, outType := PrepareFileNameAndType(argFile, argType)
|
||||
assert.Equal(t,
|
||||
fmt.Sprintf("outFile=%s, outType=%s", expFile, expType),
|
||||
fmt.Sprintf("outFile=%s, outType=%s", outFile, outType),
|
||||
fmt.Sprintf("argFile=%s, argType=%s", argFile, argType),
|
||||
)
|
||||
}
|
||||
|
||||
test("", "", "gitea-dump-1234.zip", "zip")
|
||||
test("", "tar.gz", "gitea-dump-1234.tar.gz", "tar.gz")
|
||||
test("", "no-such", "", "")
|
||||
|
||||
test("-", "", "-", "zip")
|
||||
test("-", "tar.gz", "-", "tar.gz")
|
||||
test("-", "no-such", "", "")
|
||||
|
||||
test("a", "", "a", "zip")
|
||||
test("a", "tar.gz", "a", "tar.gz")
|
||||
test("a", "no-such", "", "")
|
||||
|
||||
test("a.zip", "", "a.zip", "zip")
|
||||
test("a.zip", "tar.gz", "a.zip", "tar.gz")
|
||||
test("a.zip", "no-such", "", "")
|
||||
|
||||
test("a.tar.gz", "", "a.tar.gz", "zip")
|
||||
test("a.tar.gz", "tar.gz", "a.tar.gz", "tar.gz")
|
||||
test("a.tar.gz", "no-such", "", "")
|
||||
}
|
||||
|
||||
func TestIsSubDir(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
_ = os.MkdirAll(filepath.Join(tmpDir, "include/sub"), 0o755)
|
||||
|
||||
isSub, err := IsSubdir(filepath.Join(tmpDir, "include"), filepath.Join(tmpDir, "include"))
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, isSub)
|
||||
|
||||
isSub, err = IsSubdir(filepath.Join(tmpDir, "include"), filepath.Join(tmpDir, "include/sub"))
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, isSub)
|
||||
|
||||
isSub, err = IsSubdir(filepath.Join(tmpDir, "include/sub"), filepath.Join(tmpDir, "include"))
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, isSub)
|
||||
}
|
||||
|
||||
type testWriter struct {
|
||||
added []string
|
||||
}
|
||||
|
||||
func (t *testWriter) Create(out io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *testWriter) Write(f archiver.File) error {
|
||||
t.added = append(t.added, f.Name())
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *testWriter) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestDumper(t *testing.T) {
|
||||
sortStrings := func(s []string) []string {
|
||||
sort.Strings(s)
|
||||
return s
|
||||
}
|
||||
tmpDir := t.TempDir()
|
||||
_ = os.MkdirAll(filepath.Join(tmpDir, "include/exclude1"), 0o755)
|
||||
_ = os.MkdirAll(filepath.Join(tmpDir, "include/exclude2"), 0o755)
|
||||
_ = os.MkdirAll(filepath.Join(tmpDir, "include/sub"), 0o755)
|
||||
_ = os.WriteFile(filepath.Join(tmpDir, "include/a"), nil, 0o644)
|
||||
_ = os.WriteFile(filepath.Join(tmpDir, "include/sub/b"), nil, 0o644)
|
||||
_ = os.WriteFile(filepath.Join(tmpDir, "include/exclude1/a-1"), nil, 0o644)
|
||||
_ = os.WriteFile(filepath.Join(tmpDir, "include/exclude2/a-2"), nil, 0o644)
|
||||
|
||||
tw := &testWriter{}
|
||||
d := &Dumper{Writer: tw}
|
||||
d.GlobalExcludeAbsPath(filepath.Join(tmpDir, "include/exclude1"))
|
||||
err := d.AddRecursiveExclude("include", filepath.Join(tmpDir, "include"), []string{filepath.Join(tmpDir, "include/exclude2")})
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, sortStrings([]string{"include/a", "include/sub", "include/sub/b"}), sortStrings(tw.added))
|
||||
|
||||
tw = &testWriter{}
|
||||
d = &Dumper{Writer: tw}
|
||||
err = d.AddRecursiveExclude("include", filepath.Join(tmpDir, "include"), nil)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, sortStrings([]string{"include/exclude2", "include/exclude2/a-2", "include/a", "include/sub", "include/sub/b", "include/exclude1", "include/exclude1/a-1"}), sortStrings(tw.added))
|
||||
}
|
|
@ -26,14 +26,14 @@ type Commit struct {
|
|||
Author *Signature
|
||||
Committer *Signature
|
||||
CommitMessage string
|
||||
Signature *CommitGPGSignature
|
||||
Signature *CommitSignature
|
||||
|
||||
Parents []ObjectID // ID strings
|
||||
submoduleCache *ObjectCache
|
||||
}
|
||||
|
||||
// CommitGPGSignature represents a git commit signature part.
|
||||
type CommitGPGSignature struct {
|
||||
// CommitSignature represents a git commit signature part.
|
||||
type CommitSignature struct {
|
||||
Signature string
|
||||
Payload string // TODO check if can be reconstruct from the rest of commit information to not have duplicate data
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
)
|
||||
|
||||
func convertPGPSignature(c *object.Commit) *CommitGPGSignature {
|
||||
func convertPGPSignature(c *object.Commit) *CommitSignature {
|
||||
if c.PGPSignature == "" {
|
||||
return nil
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ func convertPGPSignature(c *object.Commit) *CommitGPGSignature {
|
|||
return nil
|
||||
}
|
||||
|
||||
return &CommitGPGSignature{
|
||||
return &CommitSignature{
|
||||
Signature: c.PGPSignature,
|
||||
Payload: w.String(),
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ readLoop:
|
|||
}
|
||||
}
|
||||
commit.CommitMessage = messageSB.String()
|
||||
commit.Signature = &CommitGPGSignature{
|
||||
commit.Signature = &CommitSignature{
|
||||
Signature: signatureSB.String(),
|
||||
Payload: payloadSB.String(),
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
|
@ -27,6 +28,7 @@ type GrepOptions struct {
|
|||
MaxResultLimit int
|
||||
ContextLineNumber int
|
||||
IsFuzzy bool
|
||||
MaxLineLength int // the maximum length of a line to parse, exceeding chars will be truncated
|
||||
}
|
||||
|
||||
func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepOptions) ([]*GrepResult, error) {
|
||||
|
@ -71,10 +73,20 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
|
|||
defer stdoutReader.Close()
|
||||
|
||||
isInBlock := false
|
||||
scanner := bufio.NewScanner(stdoutReader)
|
||||
rd := bufio.NewReaderSize(stdoutReader, util.IfZero(opts.MaxLineLength, 16*1024))
|
||||
var res *GrepResult
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
for {
|
||||
lineBytes, isPrefix, err := rd.ReadLine()
|
||||
if isPrefix {
|
||||
lineBytes = slices.Clone(lineBytes)
|
||||
for isPrefix && err == nil {
|
||||
_, isPrefix, err = rd.ReadLine()
|
||||
}
|
||||
}
|
||||
if len(lineBytes) == 0 && err != nil {
|
||||
break
|
||||
}
|
||||
line := string(lineBytes) // the memory of lineBytes is mutable
|
||||
if !isInBlock {
|
||||
if _ /* ref */, filename, ok := strings.Cut(line, ":"); ok {
|
||||
isInBlock = true
|
||||
|
@ -100,7 +112,7 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
|
|||
res.LineCodes = append(res.LineCodes, lineCode)
|
||||
}
|
||||
}
|
||||
return scanner.Err()
|
||||
return nil
|
||||
},
|
||||
})
|
||||
// git grep exits by cancel (killed), usually it is caused by the limit of results
|
||||
|
|
|
@ -41,6 +41,16 @@ func TestGrepSearch(t *testing.T) {
|
|||
},
|
||||
}, res)
|
||||
|
||||
res, err = GrepSearch(context.Background(), repo, "void", GrepOptions{MaxResultLimit: 1, MaxLineLength: 39})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []*GrepResult{
|
||||
{
|
||||
Filename: "java-hello/main.java",
|
||||
LineNumbers: []int{3},
|
||||
LineCodes: []string{" public static void main(String[] arg"},
|
||||
},
|
||||
}, res)
|
||||
|
||||
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{})
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, res, 0)
|
||||
|
|
|
@ -7,18 +7,11 @@ import (
|
|||
"crypto/sha256"
|
||||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
// Cache represents a caching interface
|
||||
type Cache interface {
|
||||
// Put puts value into cache with key and expire time.
|
||||
Put(key string, val any, timeout int64) error
|
||||
// Get gets cached value by given key.
|
||||
Get(key string) any
|
||||
}
|
||||
|
||||
func getCacheKey(repoPath, commitID, entryPath string) string {
|
||||
hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%s:%s:%s", repoPath, commitID, entryPath)))
|
||||
return fmt.Sprintf("last_commit:%x", hashBytes)
|
||||
|
@ -30,11 +23,11 @@ type LastCommitCache struct {
|
|||
ttl func() int64
|
||||
repo *Repository
|
||||
commitCache map[string]*Commit
|
||||
cache Cache
|
||||
cache cache.StringCache
|
||||
}
|
||||
|
||||
// NewLastCommitCache creates a new last commit cache for repo
|
||||
func NewLastCommitCache(count int64, repoPath string, gitRepo *Repository, cache Cache) *LastCommitCache {
|
||||
func NewLastCommitCache(count int64, repoPath string, gitRepo *Repository, cache cache.StringCache) *LastCommitCache {
|
||||
if cache == nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -65,7 +58,7 @@ func (c *LastCommitCache) Get(ref, entryPath string) (*Commit, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
commitID, ok := c.cache.Get(getCacheKey(c.repoPath, ref, entryPath)).(string)
|
||||
commitID, ok := c.cache.Get(getCacheKey(c.repoPath, ref, entryPath))
|
||||
if !ok || commitID == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
|
|
@ -185,17 +185,15 @@ func parseTagRef(ref map[string]string) (tag *Tag, err error) {
|
|||
|
||||
tag.Tagger = parseSignatureFromCommitLine(ref["creator"])
|
||||
tag.Message = ref["contents"]
|
||||
// strip PGP signature if present in contents field
|
||||
pgpStart := strings.Index(tag.Message, beginpgp)
|
||||
if pgpStart >= 0 {
|
||||
tag.Message = tag.Message[0:pgpStart]
|
||||
}
|
||||
|
||||
// strip any signature if present in contents field
|
||||
_, tag.Message, _ = parsePayloadSignature(util.UnsafeStringToBytes(tag.Message), 0)
|
||||
|
||||
// annotated tag with GPG signature
|
||||
if tag.Type == "tag" && ref["contents:signature"] != "" {
|
||||
payload := fmt.Sprintf("object %s\ntype commit\ntag %s\ntagger %s\n\n%s\n",
|
||||
tag.Object, tag.Name, ref["creator"], strings.TrimSpace(tag.Message))
|
||||
tag.Signature = &CommitGPGSignature{
|
||||
tag.Signature = &CommitSignature{
|
||||
Signature: ref["contents:signature"],
|
||||
Payload: payload,
|
||||
}
|
||||
|
|
|
@ -315,7 +315,7 @@ qbHDASXl
|
|||
Type: "tag",
|
||||
Tagger: parseSignatureFromCommitLine("Foo Bar <foo@bar.com> 1565789218 +0300"),
|
||||
Message: "Add changelog of v1.9.1 (#7859)\n\n* add changelog of v1.9.1\n* Update CHANGELOG.md",
|
||||
Signature: &CommitGPGSignature{
|
||||
Signature: &CommitSignature{
|
||||
Signature: `-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
aBCGzBAABCgAdFiEEyWRwv/q1Q6IjSv+D4IPOwzt33PoFAmI8jbIACgkQ4IPOwzt3
|
||||
|
|
|
@ -6,16 +6,10 @@ package git
|
|||
import (
|
||||
"bytes"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
const (
|
||||
beginpgp = "\n-----BEGIN PGP SIGNATURE-----\n"
|
||||
endpgp = "\n-----END PGP SIGNATURE-----"
|
||||
)
|
||||
|
||||
// Tag represents a Git tag.
|
||||
type Tag struct {
|
||||
Name string
|
||||
|
@ -24,7 +18,7 @@ type Tag struct {
|
|||
Type string
|
||||
Tagger *Signature
|
||||
Message string
|
||||
Signature *CommitGPGSignature
|
||||
Signature *CommitSignature
|
||||
}
|
||||
|
||||
// Commit return the commit of the tag reference
|
||||
|
@ -32,6 +26,36 @@ func (tag *Tag) Commit(gitRepo *Repository) (*Commit, error) {
|
|||
return gitRepo.getCommit(tag.Object)
|
||||
}
|
||||
|
||||
func parsePayloadSignature(data []byte, messageStart int) (payload, msg, sign string) {
|
||||
pos := messageStart
|
||||
signStart, signEnd := -1, -1
|
||||
for {
|
||||
eol := bytes.IndexByte(data[pos:], '\n')
|
||||
if eol < 0 {
|
||||
break
|
||||
}
|
||||
line := data[pos : pos+eol]
|
||||
signType, hasPrefix := bytes.CutPrefix(line, []byte("-----BEGIN "))
|
||||
signType, hasSuffix := bytes.CutSuffix(signType, []byte(" SIGNATURE-----"))
|
||||
if hasPrefix && hasSuffix {
|
||||
signEndBytes := append([]byte("\n-----END "), signType...)
|
||||
signEndBytes = append(signEndBytes, []byte(" SIGNATURE-----")...)
|
||||
signEnd = bytes.Index(data[pos:], signEndBytes)
|
||||
if signEnd != -1 {
|
||||
signStart = pos
|
||||
signEnd = pos + signEnd + len(signEndBytes)
|
||||
}
|
||||
}
|
||||
pos += eol + 1
|
||||
}
|
||||
|
||||
if signStart != -1 && signEnd != -1 {
|
||||
msgEnd := max(messageStart, signStart-1)
|
||||
return string(data[:msgEnd]), string(data[messageStart:msgEnd]), string(data[signStart:signEnd])
|
||||
}
|
||||
return string(data), string(data[messageStart:]), ""
|
||||
}
|
||||
|
||||
// Parse commit information from the (uncompressed) raw
|
||||
// data from the commit object.
|
||||
// \n\n separate headers from message
|
||||
|
@ -40,47 +64,37 @@ func parseTagData(objectFormat ObjectFormat, data []byte) (*Tag, error) {
|
|||
tag.ID = objectFormat.EmptyObjectID()
|
||||
tag.Object = objectFormat.EmptyObjectID()
|
||||
tag.Tagger = &Signature{}
|
||||
// we now have the contents of the commit object. Let's investigate...
|
||||
nextline := 0
|
||||
l:
|
||||
|
||||
pos := 0
|
||||
for {
|
||||
eol := bytes.IndexByte(data[nextline:], '\n')
|
||||
switch {
|
||||
case eol > 0:
|
||||
line := data[nextline : nextline+eol]
|
||||
spacepos := bytes.IndexByte(line, ' ')
|
||||
reftype := line[:spacepos]
|
||||
switch string(reftype) {
|
||||
case "object":
|
||||
id, err := NewIDFromString(string(line[spacepos+1:]))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tag.Object = id
|
||||
case "type":
|
||||
// A commit can have one or more parents
|
||||
tag.Type = string(line[spacepos+1:])
|
||||
case "tagger":
|
||||
tag.Tagger = parseSignatureFromCommitLine(util.UnsafeBytesToString(line[spacepos+1:]))
|
||||
}
|
||||
nextline += eol + 1
|
||||
case eol == 0:
|
||||
tag.Message = string(data[nextline+1:])
|
||||
break l
|
||||
default:
|
||||
break l
|
||||
eol := bytes.IndexByte(data[pos:], '\n')
|
||||
if eol == -1 {
|
||||
break // shouldn't happen, but could just tolerate it
|
||||
}
|
||||
if eol == 0 {
|
||||
pos++
|
||||
break // end of headers
|
||||
}
|
||||
line := data[pos : pos+eol]
|
||||
key, val, _ := bytes.Cut(line, []byte(" "))
|
||||
switch string(key) {
|
||||
case "object":
|
||||
id, err := NewIDFromString(string(val))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tag.Object = id
|
||||
case "type":
|
||||
tag.Type = string(val) // A commit can have one or more parents
|
||||
case "tagger":
|
||||
tag.Tagger = parseSignatureFromCommitLine(util.UnsafeBytesToString(val))
|
||||
}
|
||||
pos += eol + 1
|
||||
}
|
||||
idx := strings.LastIndex(tag.Message, beginpgp)
|
||||
if idx > 0 {
|
||||
endSigIdx := strings.Index(tag.Message[idx:], endpgp)
|
||||
if endSigIdx > 0 {
|
||||
tag.Signature = &CommitGPGSignature{
|
||||
Signature: tag.Message[idx+1 : idx+endSigIdx+len(endpgp)],
|
||||
Payload: string(data[:bytes.LastIndex(data, []byte(beginpgp))+1]),
|
||||
}
|
||||
tag.Message = tag.Message[:idx+1]
|
||||
}
|
||||
payload, msg, sign := parsePayloadSignature(data, pos)
|
||||
tag.Message = msg
|
||||
if len(sign) > 0 {
|
||||
tag.Signature = &CommitSignature{Signature: sign, Payload: payload}
|
||||
}
|
||||
return tag, nil
|
||||
}
|
||||
|
|
|
@ -12,24 +12,28 @@ import (
|
|||
|
||||
func Test_parseTagData(t *testing.T) {
|
||||
testData := []struct {
|
||||
data []byte
|
||||
tag Tag
|
||||
data string
|
||||
expected Tag
|
||||
}{
|
||||
{data: []byte(`object 3b114ab800c6432ad42387ccf6bc8d4388a2885a
|
||||
{
|
||||
data: `object 3b114ab800c6432ad42387ccf6bc8d4388a2885a
|
||||
type commit
|
||||
tag 1.22.0
|
||||
tagger Lucas Michot <lucas@semalead.com> 1484491741 +0100
|
||||
|
||||
`), tag: Tag{
|
||||
Name: "",
|
||||
ID: Sha1ObjectFormat.EmptyObjectID(),
|
||||
Object: &Sha1Hash{0x3b, 0x11, 0x4a, 0xb8, 0x0, 0xc6, 0x43, 0x2a, 0xd4, 0x23, 0x87, 0xcc, 0xf6, 0xbc, 0x8d, 0x43, 0x88, 0xa2, 0x88, 0x5a},
|
||||
Type: "commit",
|
||||
Tagger: &Signature{Name: "Lucas Michot", Email: "lucas@semalead.com", When: time.Unix(1484491741, 0)},
|
||||
Message: "",
|
||||
Signature: nil,
|
||||
}},
|
||||
{data: []byte(`object 7cdf42c0b1cc763ab7e4c33c47a24e27c66bfccc
|
||||
`,
|
||||
expected: Tag{
|
||||
Name: "",
|
||||
ID: Sha1ObjectFormat.EmptyObjectID(),
|
||||
Object: MustIDFromString("3b114ab800c6432ad42387ccf6bc8d4388a2885a"),
|
||||
Type: "commit",
|
||||
Tagger: &Signature{Name: "Lucas Michot", Email: "lucas@semalead.com", When: time.Unix(1484491741, 0).In(time.FixedZone("", 3600))},
|
||||
Message: "",
|
||||
Signature: nil,
|
||||
},
|
||||
},
|
||||
{
|
||||
data: `object 7cdf42c0b1cc763ab7e4c33c47a24e27c66bfccc
|
||||
type commit
|
||||
tag 1.22.1
|
||||
tagger Lucas Michot <lucas@semalead.com> 1484553735 +0100
|
||||
|
@ -37,37 +41,57 @@ tagger Lucas Michot <lucas@semalead.com> 1484553735 +0100
|
|||
test message
|
||||
o
|
||||
|
||||
ono`), tag: Tag{
|
||||
Name: "",
|
||||
ID: Sha1ObjectFormat.EmptyObjectID(),
|
||||
Object: &Sha1Hash{0x7c, 0xdf, 0x42, 0xc0, 0xb1, 0xcc, 0x76, 0x3a, 0xb7, 0xe4, 0xc3, 0x3c, 0x47, 0xa2, 0x4e, 0x27, 0xc6, 0x6b, 0xfc, 0xcc},
|
||||
Type: "commit",
|
||||
Tagger: &Signature{Name: "Lucas Michot", Email: "lucas@semalead.com", When: time.Unix(1484553735, 0)},
|
||||
Message: "test message\no\n\nono",
|
||||
Signature: nil,
|
||||
}},
|
||||
ono`,
|
||||
expected: Tag{
|
||||
Name: "",
|
||||
ID: Sha1ObjectFormat.EmptyObjectID(),
|
||||
Object: MustIDFromString("7cdf42c0b1cc763ab7e4c33c47a24e27c66bfccc"),
|
||||
Type: "commit",
|
||||
Tagger: &Signature{Name: "Lucas Michot", Email: "lucas@semalead.com", When: time.Unix(1484553735, 0).In(time.FixedZone("", 3600))},
|
||||
Message: "test message\no\n\nono",
|
||||
Signature: nil,
|
||||
},
|
||||
},
|
||||
{
|
||||
data: `object 7cdf42c0b1cc763ab7e4c33c47a24e27c66bfaaa
|
||||
type commit
|
||||
tag v0
|
||||
tagger dummy user <dummy-email@example.com> 1484491741 +0100
|
||||
|
||||
dummy message
|
||||
-----BEGIN SSH SIGNATURE-----
|
||||
dummy signature
|
||||
-----END SSH SIGNATURE-----
|
||||
`,
|
||||
expected: Tag{
|
||||
Name: "",
|
||||
ID: Sha1ObjectFormat.EmptyObjectID(),
|
||||
Object: MustIDFromString("7cdf42c0b1cc763ab7e4c33c47a24e27c66bfaaa"),
|
||||
Type: "commit",
|
||||
Tagger: &Signature{Name: "dummy user", Email: "dummy-email@example.com", When: time.Unix(1484491741, 0).In(time.FixedZone("", 3600))},
|
||||
Message: "dummy message",
|
||||
Signature: &CommitSignature{
|
||||
Signature: `-----BEGIN SSH SIGNATURE-----
|
||||
dummy signature
|
||||
-----END SSH SIGNATURE-----`,
|
||||
Payload: `object 7cdf42c0b1cc763ab7e4c33c47a24e27c66bfaaa
|
||||
type commit
|
||||
tag v0
|
||||
tagger dummy user <dummy-email@example.com> 1484491741 +0100
|
||||
|
||||
dummy message`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testData {
|
||||
tag, err := parseTagData(Sha1ObjectFormat, test.data)
|
||||
tag, err := parseTagData(Sha1ObjectFormat, []byte(test.data))
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, test.tag.ID, tag.ID)
|
||||
assert.EqualValues(t, test.tag.Object, tag.Object)
|
||||
assert.EqualValues(t, test.tag.Name, tag.Name)
|
||||
assert.EqualValues(t, test.tag.Message, tag.Message)
|
||||
assert.EqualValues(t, test.tag.Type, tag.Type)
|
||||
if test.tag.Signature != nil && assert.NotNil(t, tag.Signature) {
|
||||
assert.EqualValues(t, test.tag.Signature.Signature, tag.Signature.Signature)
|
||||
assert.EqualValues(t, test.tag.Signature.Payload, tag.Signature.Payload)
|
||||
} else {
|
||||
assert.Nil(t, tag.Signature)
|
||||
}
|
||||
if test.tag.Tagger != nil && assert.NotNil(t, tag.Tagger) {
|
||||
assert.EqualValues(t, test.tag.Tagger.Name, tag.Tagger.Name)
|
||||
assert.EqualValues(t, test.tag.Tagger.Email, tag.Tagger.Email)
|
||||
assert.EqualValues(t, test.tag.Tagger.When.Unix(), tag.Tagger.When.Unix())
|
||||
} else {
|
||||
assert.Nil(t, tag.Tagger)
|
||||
}
|
||||
assert.Equal(t, test.expected, *tag)
|
||||
}
|
||||
|
||||
tag, err := parseTagData(Sha1ObjectFormat, []byte("type commit\n\nfoo\n-----BEGIN SSH SIGNATURE-----\ncorrupted..."))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "foo\n-----BEGIN SSH SIGNATURE-----\ncorrupted...", tag.Message)
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ type Result struct {
|
|||
UpdatedUnix timeutil.TimeStamp
|
||||
Language string
|
||||
Color string
|
||||
Lines []ResultLine
|
||||
Lines []*ResultLine
|
||||
}
|
||||
|
||||
type ResultLine struct {
|
||||
|
@ -70,16 +70,18 @@ func writeStrings(buf *bytes.Buffer, strs ...string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func HighlightSearchResultCode(filename string, lineNums []int, code string) []ResultLine {
|
||||
func HighlightSearchResultCode(filename, language string, lineNums []int, code string) []*ResultLine {
|
||||
// we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting
|
||||
hl, _ := highlight.Code(filename, "", code)
|
||||
hl, _ := highlight.Code(filename, language, code)
|
||||
highlightedLines := strings.Split(string(hl), "\n")
|
||||
|
||||
// The lineNums outputted by highlight.Code might not match the original lineNums, because "highlight" removes the last `\n`
|
||||
lines := make([]ResultLine, min(len(highlightedLines), len(lineNums)))
|
||||
lines := make([]*ResultLine, min(len(highlightedLines), len(lineNums)))
|
||||
for i := 0; i < len(lines); i++ {
|
||||
lines[i].Num = lineNums[i]
|
||||
lines[i].FormattedContent = template.HTML(highlightedLines[i])
|
||||
lines[i] = &ResultLine{
|
||||
Num: lineNums[i],
|
||||
FormattedContent: template.HTML(highlightedLines[i]),
|
||||
}
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
@ -122,7 +124,7 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
|
|||
UpdatedUnix: result.UpdatedUnix,
|
||||
Language: result.Language,
|
||||
Color: result.Color,
|
||||
Lines: HighlightSearchResultCode(result.Filename, lineNums, formattedLinesBuffer.String()),
|
||||
Lines: HighlightSearchResultCode(result.Filename, result.Language, lineNums, formattedLinesBuffer.String()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ func (c *FilesystemClient) Download(ctx context.Context, objects []Pointer, call
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
if err := callback(p, f, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ func (c *FilesystemClient) Upload(ctx context.Context, objects []Pointer, callba
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
_, err = io.Copy(f, content)
|
||||
|
||||
return err
|
||||
|
|
|
@ -171,6 +171,7 @@ type processor func(ctx *RenderContext, node *html.Node)
|
|||
var defaultProcessors = []processor{
|
||||
fullIssuePatternProcessor,
|
||||
comparePatternProcessor,
|
||||
codePreviewPatternProcessor,
|
||||
fullHashPatternProcessor,
|
||||
shortLinkProcessor,
|
||||
linkProcessor,
|
||||
|
@ -708,7 +709,8 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
|||
|
||||
name += tail
|
||||
image := false
|
||||
switch ext := filepath.Ext(link); ext {
|
||||
ext := filepath.Ext(link)
|
||||
switch ext {
|
||||
// fast path: empty string, ignore
|
||||
case "":
|
||||
// leave image as false
|
||||
|
@ -766,11 +768,26 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
|||
}
|
||||
} else {
|
||||
if !absoluteLink {
|
||||
var base string
|
||||
if ctx.IsWiki {
|
||||
link = util.URLJoin(ctx.Links.WikiLink(), link)
|
||||
switch ext {
|
||||
case "":
|
||||
// no file extension, create a regular wiki link
|
||||
base = ctx.Links.WikiLink()
|
||||
default:
|
||||
// we have a file extension:
|
||||
// return a regular wiki link if it's a renderable file (extension),
|
||||
// raw link otherwise
|
||||
if Type(link) != "" {
|
||||
base = ctx.Links.WikiLink()
|
||||
} else {
|
||||
base = ctx.Links.WikiRawLink()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
link = util.URLJoin(ctx.Links.SrcLink(), link)
|
||||
base = ctx.Links.SrcLink()
|
||||
}
|
||||
link = util.URLJoin(base, link)
|
||||
}
|
||||
childNode.Type = html.TextNode
|
||||
childNode.Data = name
|
||||
|
|
92
modules/markup/html_codepreview.go
Normal file
92
modules/markup/html_codepreview.go
Normal file
|
@ -0,0 +1,92 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package markup
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/httplib"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// codePreviewPattern matches "http://domain/.../{owner}/{repo}/src/commit/{commit}/{filepath}#L10-L20"
|
||||
var codePreviewPattern = regexp.MustCompile(`https?://\S+/([^\s/]+)/([^\s/]+)/src/commit/([0-9a-f]{7,64})(/\S+)#(L\d+(-L\d+)?)`)
|
||||
|
||||
type RenderCodePreviewOptions struct {
|
||||
FullURL string
|
||||
OwnerName string
|
||||
RepoName string
|
||||
CommitID string
|
||||
FilePath string
|
||||
|
||||
LineStart, LineStop int
|
||||
}
|
||||
|
||||
func renderCodeBlock(ctx *RenderContext, node *html.Node) (urlPosStart, urlPosStop int, htm template.HTML, err error) {
|
||||
m := codePreviewPattern.FindStringSubmatchIndex(node.Data)
|
||||
if m == nil {
|
||||
return 0, 0, "", nil
|
||||
}
|
||||
|
||||
opts := RenderCodePreviewOptions{
|
||||
FullURL: node.Data[m[0]:m[1]],
|
||||
OwnerName: node.Data[m[2]:m[3]],
|
||||
RepoName: node.Data[m[4]:m[5]],
|
||||
CommitID: node.Data[m[6]:m[7]],
|
||||
FilePath: node.Data[m[8]:m[9]],
|
||||
}
|
||||
if !httplib.IsCurrentGiteaSiteURL(opts.FullURL) {
|
||||
return 0, 0, "", nil
|
||||
}
|
||||
u, err := url.Parse(opts.FilePath)
|
||||
if err != nil {
|
||||
return 0, 0, "", err
|
||||
}
|
||||
opts.FilePath = strings.TrimPrefix(u.Path, "/")
|
||||
|
||||
lineStartStr, lineStopStr, _ := strings.Cut(node.Data[m[10]:m[11]], "-")
|
||||
lineStart, _ := strconv.Atoi(strings.TrimPrefix(lineStartStr, "L"))
|
||||
lineStop, _ := strconv.Atoi(strings.TrimPrefix(lineStopStr, "L"))
|
||||
opts.LineStart, opts.LineStop = lineStart, lineStop
|
||||
h, err := DefaultProcessorHelper.RenderRepoFileCodePreview(ctx.Ctx, opts)
|
||||
return m[0], m[1], h, err
|
||||
}
|
||||
|
||||
func codePreviewPatternProcessor(ctx *RenderContext, node *html.Node) {
|
||||
for node != nil {
|
||||
if node.Type != html.TextNode {
|
||||
node = node.NextSibling
|
||||
continue
|
||||
}
|
||||
urlPosStart, urlPosEnd, h, err := renderCodeBlock(ctx, node)
|
||||
if err != nil || h == "" {
|
||||
if err != nil {
|
||||
log.Error("Unable to render code preview: %v", err)
|
||||
}
|
||||
node = node.NextSibling
|
||||
continue
|
||||
}
|
||||
next := node.NextSibling
|
||||
textBefore := node.Data[:urlPosStart]
|
||||
textAfter := node.Data[urlPosEnd:]
|
||||
// "textBefore" could be empty if there is only a URL in the text node, then an empty node (p, or li) will be left here.
|
||||
// However, the empty node can't be simply removed, because:
|
||||
// 1. the following processors will still try to access it (need to double-check undefined behaviors)
|
||||
// 2. the new node is inserted as "<p>{TextBefore}<div NewNode/>{TextAfter}</p>" (the parent could also be "li")
|
||||
// then it is resolved as: "<p>{TextBefore}</p><div NewNode/><p>{TextAfter}</p>",
|
||||
// so unless it could correctly replace the parent "p/li" node, it is very difficult to eliminate the "TextBefore" empty node.
|
||||
node.Data = textBefore
|
||||
node.Parent.InsertBefore(&html.Node{Type: html.RawNode, Data: string(h)}, next)
|
||||
if textAfter != "" {
|
||||
node.Parent.InsertBefore(&html.Node{Type: html.TextNode, Data: textAfter}, next)
|
||||
}
|
||||
node = next
|
||||
}
|
||||
}
|
34
modules/markup/html_codepreview_test.go
Normal file
34
modules/markup/html_codepreview_test.go
Normal file
|
@ -0,0 +1,34 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package markup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"html/template"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestRenderCodePreview(t *testing.T) {
|
||||
markup.Init(&markup.ProcessorHelper{
|
||||
RenderRepoFileCodePreview: func(ctx context.Context, opts markup.RenderCodePreviewOptions) (template.HTML, error) {
|
||||
return "<div>code preview</div>", nil
|
||||
},
|
||||
})
|
||||
test := func(input, expected string) {
|
||||
buffer, err := markup.RenderString(&markup.RenderContext{
|
||||
Ctx: git.DefaultContext,
|
||||
Type: "markdown",
|
||||
}, input)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
||||
}
|
||||
test("http://localhost:3000/owner/repo/src/commit/0123456789/foo/bar.md#L10-L20", "<p><div>code preview</div></p>")
|
||||
test("http://other/owner/repo/src/commit/0123456789/foo/bar.md#L10-L20", `<p><a href="http://other/owner/repo/src/commit/0123456789/foo/bar.md#L10-L20" rel="nofollow">http://other/owner/repo/src/commit/0123456789/foo/bar.md#L10-L20</a></p>`)
|
||||
}
|
|
@ -427,6 +427,10 @@ func TestRender_ShortLinks(t *testing.T) {
|
|||
otherImgurlWiki := util.URLJoin(markup.TestRepoURL, "wiki", "raw", "Link+Other.jpg")
|
||||
encodedImgurlWiki := util.URLJoin(markup.TestRepoURL, "wiki", "raw", "Link+%23.jpg")
|
||||
notencodedImgurlWiki := util.URLJoin(markup.TestRepoURL, "wiki", "raw", "some", "path", "Link+#.jpg")
|
||||
renderableFileURL := util.URLJoin(tree, "markdown_file.md")
|
||||
renderableFileURLWiki := util.URLJoin(markup.TestRepoURL, "wiki", "markdown_file.md")
|
||||
unrenderableFileURL := util.URLJoin(tree, "file.zip")
|
||||
unrenderableFileURLWiki := util.URLJoin(markup.TestRepoURL, "wiki", "raw", "file.zip")
|
||||
favicon := "http://google.com/favicon.ico"
|
||||
|
||||
test(
|
||||
|
@ -481,6 +485,14 @@ func TestRender_ShortLinks(t *testing.T) {
|
|||
"[[Link]] [[Other Link]] [[Link?]]",
|
||||
`<p><a href="`+url+`" rel="nofollow">Link</a> <a href="`+otherURL+`" rel="nofollow">Other Link</a> <a href="`+encodedURL+`" rel="nofollow">Link?</a></p>`,
|
||||
`<p><a href="`+urlWiki+`" rel="nofollow">Link</a> <a href="`+otherURLWiki+`" rel="nofollow">Other Link</a> <a href="`+encodedURLWiki+`" rel="nofollow">Link?</a></p>`)
|
||||
test(
|
||||
"[[markdown_file.md]]",
|
||||
`<p><a href="`+renderableFileURL+`" rel="nofollow">markdown_file.md</a></p>`,
|
||||
`<p><a href="`+renderableFileURLWiki+`" rel="nofollow">markdown_file.md</a></p>`)
|
||||
test(
|
||||
"[[file.zip]]",
|
||||
`<p><a href="`+unrenderableFileURL+`" rel="nofollow">file.zip</a></p>`,
|
||||
`<p><a href="`+unrenderableFileURLWiki+`" rel="nofollow">file.zip</a></p>`)
|
||||
test(
|
||||
"[[Link #.jpg]]",
|
||||
`<p><a href="`+encodedImgurl+`" rel="nofollow"><img src="`+encodedImgurl+`" title="Link #.jpg" alt="Link #.jpg"/></a></p>`,
|
||||
|
|
|
@ -511,9 +511,17 @@ func TestMathBlock(t *testing.T) {
|
|||
`\(a\) \(b\)`,
|
||||
`<p><code class="language-math is-loading">a</code> <code class="language-math is-loading">b</code></p>` + nl,
|
||||
},
|
||||
{
|
||||
`$a$.`,
|
||||
`<p><code class="language-math is-loading">a</code>.</p>` + nl,
|
||||
},
|
||||
{
|
||||
`.$a$`,
|
||||
`<p>.$a$</p>` + nl,
|
||||
},
|
||||
{
|
||||
`$a a$b b$`,
|
||||
`<p><code class="language-math is-loading">a a$b b</code></p>` + nl,
|
||||
`<p>$a a$b b$</p>` + nl,
|
||||
},
|
||||
{
|
||||
`a a$b b`,
|
||||
|
@ -521,7 +529,15 @@ func TestMathBlock(t *testing.T) {
|
|||
},
|
||||
{
|
||||
`a$b $a a$b b$`,
|
||||
`<p>a$b <code class="language-math is-loading">a a$b b</code></p>` + nl,
|
||||
`<p>a$b $a a$b b$</p>` + nl,
|
||||
},
|
||||
{
|
||||
"a$x$",
|
||||
`<p>a$x$</p>` + nl,
|
||||
},
|
||||
{
|
||||
"$x$a",
|
||||
`<p>$x$a</p>` + nl,
|
||||
},
|
||||
{
|
||||
"$$a$$",
|
||||
|
@ -637,9 +653,9 @@ space</p>
|
|||
Expected: `<p>space @mention-user<br/>
|
||||
/just/a/path.bin<br/>
|
||||
<a href="https://example.com/file.bin" rel="nofollow">https://example.com/file.bin</a><br/>
|
||||
<a href="/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/wiki/raw/image.jpg" target="_blank" rel="nofollow noopener"><img src="/wiki/raw/image.jpg" alt="local image"/></a><br/>
|
||||
<a href="/wiki/raw/path/file" target="_blank" rel="nofollow noopener"><img src="/wiki/raw/path/file" alt="local image"/></a><br/>
|
||||
|
@ -695,9 +711,9 @@ space</p>
|
|||
Expected: `<p>space @mention-user<br/>
|
||||
/just/a/path.bin<br/>
|
||||
<a href="https://example.com/file.bin" rel="nofollow">https://example.com/file.bin</a><br/>
|
||||
<a href="https://gitea.io/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://gitea.io/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="https://gitea.io/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://gitea.io/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="https://gitea.io/wiki/raw/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://gitea.io/wiki/raw/image.jpg" alt="local image"/></a><br/>
|
||||
<a href="https://gitea.io/wiki/raw/path/file" target="_blank" rel="nofollow noopener"><img src="https://gitea.io/wiki/raw/path/file" alt="local image"/></a><br/>
|
||||
|
@ -753,9 +769,9 @@ space</p>
|
|||
Expected: `<p>space @mention-user<br/>
|
||||
/just/a/path.bin<br/>
|
||||
<a href="https://example.com/file.bin" rel="nofollow">https://example.com/file.bin</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/image.jpg" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/image.jpg" alt="local image"/></a><br/>
|
||||
<a href="/relative/path/wiki/raw/path/file" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/path/file" alt="local image"/></a><br/>
|
||||
|
@ -813,9 +829,9 @@ space</p>
|
|||
Expected: `<p>space @mention-user<br/>
|
||||
/just/a/path.bin<br/>
|
||||
<a href="https://example.com/file.bin" rel="nofollow">https://example.com/file.bin</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/image.jpg" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/image.jpg" alt="local image"/></a><br/>
|
||||
<a href="/relative/path/wiki/raw/path/file" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/path/file" alt="local image"/></a><br/>
|
||||
|
@ -873,9 +889,9 @@ space</p>
|
|||
Expected: `<p>space @mention-user<br/>
|
||||
/just/a/path.bin<br/>
|
||||
<a href="https://example.com/file.bin" rel="nofollow">https://example.com/file.bin</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/image.jpg" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/image.jpg" alt="local image"/></a><br/>
|
||||
<a href="/relative/path/wiki/raw/path/file" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/path/file" alt="local image"/></a><br/>
|
||||
|
@ -935,9 +951,9 @@ space</p>
|
|||
Expected: `<p>space @mention-user<br/>
|
||||
/just/a/path.bin<br/>
|
||||
<a href="https://example.com/file.bin" rel="nofollow">https://example.com/file.bin</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/file.bin" rel="nofollow">local link</a><br/>
|
||||
<a href="https://example.com" rel="nofollow">remote link</a><br/>
|
||||
<a href="/relative/path/wiki/raw/image.jpg" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/image.jpg" alt="local image"/></a><br/>
|
||||
<a href="/relative/path/wiki/raw/path/file" target="_blank" rel="nofollow noopener"><img src="/relative/path/wiki/raw/path/file" alt="local image"/></a><br/>
|
||||
|
|
|
@ -41,9 +41,12 @@ func (parser *inlineParser) Trigger() []byte {
|
|||
return parser.start[0:1]
|
||||
}
|
||||
|
||||
func isPunctuation(b byte) bool {
|
||||
return b == '.' || b == '!' || b == '?' || b == ',' || b == ';' || b == ':'
|
||||
}
|
||||
|
||||
func isAlphanumeric(b byte) bool {
|
||||
// Github only cares about 0-9A-Za-z
|
||||
return (b >= '0' && b <= '9') || (b >= 'A' && b <= 'Z') || (b >= 'a' && b <= 'z')
|
||||
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
|
||||
}
|
||||
|
||||
// Parse parses the current line and returns a result of parsing.
|
||||
|
@ -56,7 +59,7 @@ func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.
|
|||
}
|
||||
|
||||
precedingCharacter := block.PrecendingCharacter()
|
||||
if precedingCharacter < 256 && isAlphanumeric(byte(precedingCharacter)) {
|
||||
if precedingCharacter < 256 && (isAlphanumeric(byte(precedingCharacter)) || isPunctuation(byte(precedingCharacter))) {
|
||||
// need to exclude things like `a$` from being considered a start
|
||||
return nil
|
||||
}
|
||||
|
@ -75,14 +78,19 @@ func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.
|
|||
ender += pos
|
||||
|
||||
// Now we want to check the character at the end of our parser section
|
||||
// that is ender + len(parser.end)
|
||||
// that is ender + len(parser.end) and check if char before ender is '\'
|
||||
pos = ender + len(parser.end)
|
||||
if len(line) <= pos {
|
||||
break
|
||||
}
|
||||
if !isAlphanumeric(line[pos]) {
|
||||
suceedingCharacter := line[pos]
|
||||
if !isPunctuation(suceedingCharacter) && !(suceedingCharacter == ' ') {
|
||||
return nil
|
||||
}
|
||||
if line[ender-1] != '\\' {
|
||||
break
|
||||
}
|
||||
|
||||
// move the pointer onwards
|
||||
ender += len(parser.end)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
package markdown
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
giteautil "code.gitea.io/gitea/modules/util"
|
||||
|
||||
|
@ -18,7 +20,16 @@ func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link, r
|
|||
if !isAnchorFragment && !markup.IsFullURLBytes(link) {
|
||||
base := ctx.Links.Base
|
||||
if ctx.IsWiki {
|
||||
base = ctx.Links.WikiLink()
|
||||
if filepath.Ext(string(link)) == "" {
|
||||
// This link doesn't have a file extension - assume a regular wiki link
|
||||
base = ctx.Links.WikiLink()
|
||||
} else if markup.Type(string(link)) != "" {
|
||||
// If it's a file type we can render, use a regular wiki link
|
||||
base = ctx.Links.WikiLink()
|
||||
} else {
|
||||
// Otherwise, use a raw link instead
|
||||
base = ctx.Links.WikiRawLink()
|
||||
}
|
||||
} else if ctx.Links.HasBranchInfo() {
|
||||
base = ctx.Links.SrcLink()
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
|
@ -33,6 +34,8 @@ type ProcessorHelper struct {
|
|||
IsUsernameMentionable func(ctx context.Context, username string) bool
|
||||
|
||||
ElementDir string // the direction of the elements, eg: "ltr", "rtl", "auto", default to no direction attribute
|
||||
|
||||
RenderRepoFileCodePreview func(ctx context.Context, options RenderCodePreviewOptions) (template.HTML, error)
|
||||
}
|
||||
|
||||
var DefaultProcessorHelper ProcessorHelper
|
||||
|
|
|
@ -60,6 +60,21 @@ func createDefaultPolicy() *bluemonday.Policy {
|
|||
// For JS code copy and Mermaid loading state
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^code-block( is-loading)?$`)).OnElements("pre")
|
||||
|
||||
// For code preview
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^code-preview-[-\w]+( file-content)?$`)).Globally()
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^lines-num$`)).OnElements("td")
|
||||
policy.AllowAttrs("data-line-number").OnElements("span")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^lines-code chroma$`)).OnElements("td")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^code-inner$`)).OnElements("div")
|
||||
|
||||
// For code preview (unicode escape)
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^file-view( unicode-escaped)?$`)).OnElements("table")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^lines-escape$`)).OnElements("td")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^toggle-escape-button btn interact-bg$`)).OnElements("a") // don't use button, button might submit a form
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(ambiguous-code-point|escaped-code-point|broken-code-point)$`)).OnElements("span")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^char$`)).OnElements("span")
|
||||
policy.AllowAttrs("data-tooltip-content", "data-escaped").OnElements("span")
|
||||
|
||||
// For color preview
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^color-preview$`)).OnElements("span")
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ func (o *Option[T]) UnmarshalYAML(value *yaml.Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (o Option[T]) MarshalYAML() (interface{}, error) {
|
||||
func (o Option[T]) MarshalYAML() (any, error) {
|
||||
if !o.Has() {
|
||||
return nil, nil
|
||||
}
|
||||
|
|
|
@ -58,6 +58,7 @@ type Package struct {
|
|||
type Metadata struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
ReleaseNotes string `json:"release_notes,omitempty"`
|
||||
Readme string `json:"readme,omitempty"`
|
||||
Authors string `json:"authors,omitempty"`
|
||||
ProjectURL string `json:"project_url,omitempty"`
|
||||
RepositoryURL string `json:"repository_url,omitempty"`
|
||||
|
@ -71,6 +72,7 @@ type Dependency struct {
|
|||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
// https://learn.microsoft.com/en-us/nuget/reference/nuspec
|
||||
type nuspecPackage struct {
|
||||
Metadata struct {
|
||||
ID string `xml:"id"`
|
||||
|
@ -80,6 +82,7 @@ type nuspecPackage struct {
|
|||
ProjectURL string `xml:"projectUrl"`
|
||||
Description string `xml:"description"`
|
||||
ReleaseNotes string `xml:"releaseNotes"`
|
||||
Readme string `xml:"readme"`
|
||||
PackageTypes struct {
|
||||
PackageType []struct {
|
||||
Name string `xml:"name,attr"`
|
||||
|
@ -89,6 +92,11 @@ type nuspecPackage struct {
|
|||
URL string `xml:"url,attr"`
|
||||
} `xml:"repository"`
|
||||
Dependencies struct {
|
||||
Dependency []struct {
|
||||
ID string `xml:"id,attr"`
|
||||
Version string `xml:"version,attr"`
|
||||
Exclude string `xml:"exclude,attr"`
|
||||
} `xml:"dependency"`
|
||||
Group []struct {
|
||||
TargetFramework string `xml:"targetFramework,attr"`
|
||||
Dependency []struct {
|
||||
|
@ -122,14 +130,14 @@ func ParsePackageMetaData(r io.ReaderAt, size int64) (*Package, error) {
|
|||
}
|
||||
defer f.Close()
|
||||
|
||||
return ParseNuspecMetaData(f)
|
||||
return ParseNuspecMetaData(archive, f)
|
||||
}
|
||||
}
|
||||
return nil, ErrMissingNuspecFile
|
||||
}
|
||||
|
||||
// ParseNuspecMetaData parses a Nuspec file to retrieve the metadata of a Nuget package
|
||||
func ParseNuspecMetaData(r io.Reader) (*Package, error) {
|
||||
func ParseNuspecMetaData(archive *zip.Reader, r io.Reader) (*Package, error) {
|
||||
var p nuspecPackage
|
||||
if err := xml.NewDecoder(r).Decode(&p); err != nil {
|
||||
return nil, err
|
||||
|
@ -166,6 +174,28 @@ func ParseNuspecMetaData(r io.Reader) (*Package, error) {
|
|||
Dependencies: make(map[string][]Dependency),
|
||||
}
|
||||
|
||||
if p.Metadata.Readme != "" {
|
||||
f, err := archive.Open(p.Metadata.Readme)
|
||||
if err == nil {
|
||||
buf, _ := io.ReadAll(f)
|
||||
m.Readme = string(buf)
|
||||
_ = f.Close()
|
||||
}
|
||||
}
|
||||
|
||||
if len(p.Metadata.Dependencies.Dependency) > 0 {
|
||||
deps := make([]Dependency, 0, len(p.Metadata.Dependencies.Dependency))
|
||||
for _, dep := range p.Metadata.Dependencies.Dependency {
|
||||
if dep.ID == "" || dep.Version == "" {
|
||||
continue
|
||||
}
|
||||
deps = append(deps, Dependency{
|
||||
ID: dep.ID,
|
||||
Version: dep.Version,
|
||||
})
|
||||
}
|
||||
m.Dependencies[""] = deps
|
||||
}
|
||||
for _, group := range p.Metadata.Dependencies.Group {
|
||||
deps := make([]Dependency, 0, len(group.Dependency))
|
||||
for _, dep := range group.Dependency {
|
||||
|
|
|
@ -6,7 +6,6 @@ package nuget
|
|||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
@ -19,6 +18,7 @@ const (
|
|||
projectURL = "https://gitea.io"
|
||||
description = "Package Description"
|
||||
releaseNotes = "Package Release Notes"
|
||||
readme = "Readme"
|
||||
repositoryURL = "https://gitea.io/gitea/gitea"
|
||||
targetFramework = ".NETStandard2.1"
|
||||
dependencyID = "System.Text.Json"
|
||||
|
@ -36,6 +36,7 @@ const nuspecContent = `<?xml version="1.0" encoding="utf-8"?>
|
|||
<description>` + description + `</description>
|
||||
<releaseNotes>` + releaseNotes + `</releaseNotes>
|
||||
<repository url="` + repositoryURL + `" />
|
||||
<readme>README.md</readme>
|
||||
<dependencies>
|
||||
<group targetFramework="` + targetFramework + `">
|
||||
<dependency id="` + dependencyID + `" version="` + dependencyVersion + `" exclude="Build,Analyzers" />
|
||||
|
@ -60,17 +61,19 @@ const symbolsNuspecContent = `<?xml version="1.0" encoding="utf-8"?>
|
|||
</package>`
|
||||
|
||||
func TestParsePackageMetaData(t *testing.T) {
|
||||
createArchive := func(name, content string) []byte {
|
||||
createArchive := func(files map[string]string) []byte {
|
||||
var buf bytes.Buffer
|
||||
archive := zip.NewWriter(&buf)
|
||||
w, _ := archive.Create(name)
|
||||
w.Write([]byte(content))
|
||||
for name, content := range files {
|
||||
w, _ := archive.Create(name)
|
||||
w.Write([]byte(content))
|
||||
}
|
||||
archive.Close()
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
t.Run("MissingNuspecFile", func(t *testing.T) {
|
||||
data := createArchive("dummy.txt", "")
|
||||
data := createArchive(map[string]string{"dummy.txt": ""})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, np)
|
||||
|
@ -78,7 +81,7 @@ func TestParsePackageMetaData(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("MissingNuspecFileInRoot", func(t *testing.T) {
|
||||
data := createArchive("sub/package.nuspec", "")
|
||||
data := createArchive(map[string]string{"sub/package.nuspec": ""})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, np)
|
||||
|
@ -86,7 +89,7 @@ func TestParsePackageMetaData(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("InvalidNuspecFile", func(t *testing.T) {
|
||||
data := createArchive("package.nuspec", "")
|
||||
data := createArchive(map[string]string{"package.nuspec": ""})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, np)
|
||||
|
@ -94,10 +97,10 @@ func TestParsePackageMetaData(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("InvalidPackageId", func(t *testing.T) {
|
||||
data := createArchive("package.nuspec", `<?xml version="1.0" encoding="utf-8"?>
|
||||
data := createArchive(map[string]string{"package.nuspec": `<?xml version="1.0" encoding="utf-8"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
||||
<metadata></metadata>
|
||||
</package>`)
|
||||
</package>`})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, np)
|
||||
|
@ -105,30 +108,34 @@ func TestParsePackageMetaData(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("InvalidPackageVersion", func(t *testing.T) {
|
||||
data := createArchive("package.nuspec", `<?xml version="1.0" encoding="utf-8"?>
|
||||
data := createArchive(map[string]string{"package.nuspec": `<?xml version="1.0" encoding="utf-8"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>`+id+`</id>
|
||||
<id>` + id + `</id>
|
||||
</metadata>
|
||||
</package>`)
|
||||
</package>`})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, np)
|
||||
assert.ErrorIs(t, err, ErrNuspecInvalidVersion)
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
data := createArchive("package.nuspec", nuspecContent)
|
||||
t.Run("MissingReadme", func(t *testing.T) {
|
||||
data := createArchive(map[string]string{"package.nuspec": nuspecContent})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, np)
|
||||
assert.Empty(t, np.Metadata.Readme)
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseNuspecMetaData(t *testing.T) {
|
||||
t.Run("Dependency Package", func(t *testing.T) {
|
||||
np, err := ParseNuspecMetaData(strings.NewReader(nuspecContent))
|
||||
data := createArchive(map[string]string{
|
||||
"package.nuspec": nuspecContent,
|
||||
"README.md": readme,
|
||||
})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, np)
|
||||
assert.Equal(t, DependencyPackage, np.PackageType)
|
||||
|
@ -139,6 +146,7 @@ func TestParseNuspecMetaData(t *testing.T) {
|
|||
assert.Equal(t, projectURL, np.Metadata.ProjectURL)
|
||||
assert.Equal(t, description, np.Metadata.Description)
|
||||
assert.Equal(t, releaseNotes, np.Metadata.ReleaseNotes)
|
||||
assert.Equal(t, readme, np.Metadata.Readme)
|
||||
assert.Equal(t, repositoryURL, np.Metadata.RepositoryURL)
|
||||
assert.Len(t, np.Metadata.Dependencies, 1)
|
||||
assert.Contains(t, np.Metadata.Dependencies, targetFramework)
|
||||
|
@ -148,13 +156,15 @@ func TestParseNuspecMetaData(t *testing.T) {
|
|||
assert.Equal(t, dependencyVersion, deps[0].Version)
|
||||
|
||||
t.Run("NormalizedVersion", func(t *testing.T) {
|
||||
np, err := ParseNuspecMetaData(strings.NewReader(`<?xml version="1.0" encoding="utf-8"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>test</id>
|
||||
<version>1.04.5.2.5-rc.1+metadata</version>
|
||||
</metadata>
|
||||
</package>`))
|
||||
data := createArchive(map[string]string{"package.nuspec": `<?xml version="1.0" encoding="utf-8"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>test</id>
|
||||
<version>1.04.5.2.5-rc.1+metadata</version>
|
||||
</metadata>
|
||||
</package>`})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, np)
|
||||
assert.Equal(t, "1.4.5.2-rc.1", np.Version)
|
||||
|
@ -162,7 +172,9 @@ func TestParseNuspecMetaData(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("Symbols Package", func(t *testing.T) {
|
||||
np, err := ParseNuspecMetaData(strings.NewReader(symbolsNuspecContent))
|
||||
data := createArchive(map[string]string{"package.nuspec": symbolsNuspecContent})
|
||||
|
||||
np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, np)
|
||||
assert.Equal(t, SymbolsPackage, np.PackageType)
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
|
@ -32,13 +33,13 @@ const (
|
|||
)
|
||||
|
||||
// Bool checks for a key in the map and parses as a boolean
|
||||
func (g GitPushOptions) Bool(key string, def bool) bool {
|
||||
func (g GitPushOptions) Bool(key string) optional.Option[bool] {
|
||||
if val, ok := g[key]; ok {
|
||||
if b, err := strconv.ParseBool(val); err == nil {
|
||||
return b
|
||||
return optional.Some(b)
|
||||
}
|
||||
}
|
||||
return def
|
||||
return optional.None[bool]()
|
||||
}
|
||||
|
||||
// HookOptions represents the options for the Hook calls
|
||||
|
@ -87,13 +88,17 @@ type HookProcReceiveResult struct {
|
|||
|
||||
// HookProcReceiveRefResult represents an individual result from ProcReceive
|
||||
type HookProcReceiveRefResult struct {
|
||||
OldOID string
|
||||
NewOID string
|
||||
Ref string
|
||||
OriginalRef git.RefName
|
||||
IsForcePush bool
|
||||
IsNotMatched bool
|
||||
Err string
|
||||
OldOID string
|
||||
NewOID string
|
||||
Ref string
|
||||
OriginalRef git.RefName
|
||||
IsForcePush bool
|
||||
IsNotMatched bool
|
||||
Err string
|
||||
IsCreatePR bool
|
||||
URL string
|
||||
ShouldShowMessage bool
|
||||
HeadBranch string
|
||||
}
|
||||
|
||||
// HookPreReceive check whether the provided commits are allowed
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue