mirror of
https://github.com/go-gitea/gitea
synced 2024-11-05 05:39:14 +01:00
Merge branch 'main' into lunny/fix_move_column
This commit is contained in:
commit
aeb0bd89a5
34 changed files with 431 additions and 282 deletions
|
@ -1558,8 +1558,8 @@ LEVEL = Info
|
|||
;; email = use the username part of the email attribute
|
||||
;; Note: `nickname`, `preferred_username` and `email` options will normalize input strings using the following criteria:
|
||||
;; - diacritics are removed
|
||||
;; - the characters in the set `['´\x60]` are removed
|
||||
;; - the characters in the set `[\s~+]` are replaced with `-`
|
||||
;; - the characters in the set ['´`] are removed
|
||||
;; - the characters in the set [\s~+] are replaced with "-"
|
||||
;USERNAME = nickname
|
||||
;;
|
||||
;; Update avatar if available from oauth2 provider.
|
||||
|
|
|
@ -612,7 +612,7 @@ And the following unique queues:
|
|||
- `email` - use the username part of the email attribute
|
||||
- Note: `nickname`, `preferred_username` and `email` options will normalize input strings using the following criteria:
|
||||
- diacritics are removed
|
||||
- the characters in the set `['´\x60]` are removed
|
||||
- the characters in the set ```['´`]``` are removed
|
||||
- the characters in the set `[\s~+]` are replaced with `-`
|
||||
- `UPDATE_AVATAR`: **false**: Update avatar if available from oauth2 provider. Update will be performed on each login.
|
||||
- `ACCOUNT_LINKING`: **login**: How to handle if an account / email already exists:
|
||||
|
|
|
@ -74,6 +74,13 @@ func (run *ActionRun) Link() string {
|
|||
return fmt.Sprintf("%s/actions/runs/%d", run.Repo.Link(), run.Index)
|
||||
}
|
||||
|
||||
func (run *ActionRun) WorkflowLink() string {
|
||||
if run.Repo == nil {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf("%s/actions/?workflow=%s", run.Repo.Link(), run.WorkflowID)
|
||||
}
|
||||
|
||||
// RefLink return the url of run's ref
|
||||
func (run *ActionRun) RefLink() string {
|
||||
refName := git.RefName(run.Ref)
|
||||
|
@ -156,6 +163,10 @@ func (run *ActionRun) GetPullRequestEventPayload() (*api.PullRequestPayload, err
|
|||
return nil, fmt.Errorf("event %s is not a pull request event", run.Event)
|
||||
}
|
||||
|
||||
func (run *ActionRun) IsSchedule() bool {
|
||||
return run.ScheduleID > 0
|
||||
}
|
||||
|
||||
func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) error {
|
||||
_, err := db.GetEngine(ctx).ID(repo.ID).
|
||||
SetExpr("num_action_runs",
|
||||
|
|
|
@ -16,6 +16,7 @@ import (
|
|||
"code.gitea.io/gitea/models/system"
|
||||
"code.gitea.io/gitea/modules/auth/password/hash"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/setting/config"
|
||||
|
@ -106,6 +107,7 @@ func MainTest(m *testing.M, testOpts ...*TestOptions) {
|
|||
fatalTestError("Error creating test engine: %v\n", err)
|
||||
}
|
||||
|
||||
setting.IsInTesting = true
|
||||
setting.AppURL = "https://try.gitea.io/"
|
||||
setting.RunUser = "runuser"
|
||||
setting.SSH.User = "sshuser"
|
||||
|
@ -148,6 +150,9 @@ func MainTest(m *testing.M, testOpts ...*TestOptions) {
|
|||
|
||||
config.SetDynGetter(system.NewDatabaseDynKeyGetter())
|
||||
|
||||
if err = cache.Init(); err != nil {
|
||||
fatalTestError("cache.Init: %v\n", err)
|
||||
}
|
||||
if err = storage.Init(); err != nil {
|
||||
fatalTestError("storage.Init: %v\n", err)
|
||||
}
|
||||
|
|
|
@ -501,19 +501,19 @@ func GetUserSalt() (string, error) {
|
|||
// Note: The set of characters here can safely expand without a breaking change,
|
||||
// but characters removed from this set can cause user account linking to break
|
||||
var (
|
||||
customCharsReplacement = strings.NewReplacer("Æ", "AE")
|
||||
removeCharsRE = regexp.MustCompile(`['´\x60]`)
|
||||
removeDiacriticsTransform = transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)
|
||||
replaceCharsHyphenRE = regexp.MustCompile(`[\s~+]`)
|
||||
customCharsReplacement = strings.NewReplacer("Æ", "AE")
|
||||
removeCharsRE = regexp.MustCompile("['`´]")
|
||||
transformDiacritics = transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)
|
||||
replaceCharsHyphenRE = regexp.MustCompile(`[\s~+]`)
|
||||
)
|
||||
|
||||
// normalizeUserName returns a string with single-quotes and diacritics
|
||||
// removed, and any other non-supported username characters replaced with
|
||||
// a `-` character
|
||||
// NormalizeUserName only takes the name part if it is an email address, transforms it diacritics to ASCII characters.
|
||||
// It returns a string with the single-quotes removed, and any other non-supported username characters are replaced with a `-` character
|
||||
func NormalizeUserName(s string) (string, error) {
|
||||
strDiacriticsRemoved, n, err := transform.String(removeDiacriticsTransform, customCharsReplacement.Replace(s))
|
||||
s, _, _ = strings.Cut(s, "@")
|
||||
strDiacriticsRemoved, n, err := transform.String(transformDiacritics, customCharsReplacement.Replace(s))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Failed to normalize character `%v` in provided username `%v`", s[n], s)
|
||||
return "", fmt.Errorf("failed to normalize the string of provided username %q at position %d", s, n)
|
||||
}
|
||||
return replaceCharsHyphenRE.ReplaceAllLiteralString(removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil
|
||||
}
|
||||
|
|
|
@ -506,15 +506,16 @@ func Test_NormalizeUserFromEmail(t *testing.T) {
|
|||
Expected string
|
||||
IsNormalizedValid bool
|
||||
}{
|
||||
{"test", "test", true},
|
||||
{"name@example.com", "name", true},
|
||||
{"test'`´name", "testname", true},
|
||||
{"Sinéad.O'Connor", "Sinead.OConnor", true},
|
||||
{"Æsir", "AEsir", true},
|
||||
// \u00e9\u0065\u0301
|
||||
{"éé", "ee", true},
|
||||
{"éé", "ee", true}, // \u00e9\u0065\u0301
|
||||
{"Awareness Hub", "Awareness-Hub", true},
|
||||
{"double__underscore", "double__underscore", false}, // We should consider squashing double non-alpha characters
|
||||
{".bad.", ".bad.", false},
|
||||
{"new😀user", "new😀user", false}, // No plans to support
|
||||
{`"quoted"`, `"quoted"`, false}, // No plans to support
|
||||
}
|
||||
for _, testCase := range testCases {
|
||||
normalizedName, err := user_model.NormalizeUserName(testCase.Input)
|
||||
|
|
32
modules/git/pipeline/lfs_common.go
Normal file
32
modules/git/pipeline/lfs_common.go
Normal file
|
@ -0,0 +1,32 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package pipeline
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
)
|
||||
|
||||
// LFSResult represents commits found using a provided pointer file hash
|
||||
type LFSResult struct {
|
||||
Name string
|
||||
SHA string
|
||||
Summary string
|
||||
When time.Time
|
||||
ParentHashes []git.ObjectID
|
||||
BranchName string
|
||||
FullCommitName string
|
||||
}
|
||||
|
||||
type lfsResultSlice []*LFSResult
|
||||
|
||||
func (a lfsResultSlice) Len() int { return len(a) }
|
||||
func (a lfsResultSlice) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a lfsResultSlice) Less(i, j int) bool { return a[j].When.After(a[i].When) }
|
||||
|
||||
func lfsError(msg string, err error) error {
|
||||
return fmt.Errorf("LFS error occurred, %s: err: %w", msg, err)
|
||||
}
|
|
@ -7,12 +7,10 @@ package pipeline
|
|||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
|
||||
|
@ -21,23 +19,6 @@ import (
|
|||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
)
|
||||
|
||||
// LFSResult represents commits found using a provided pointer file hash
|
||||
type LFSResult struct {
|
||||
Name string
|
||||
SHA string
|
||||
Summary string
|
||||
When time.Time
|
||||
ParentHashes []git.ObjectID
|
||||
BranchName string
|
||||
FullCommitName string
|
||||
}
|
||||
|
||||
type lfsResultSlice []*LFSResult
|
||||
|
||||
func (a lfsResultSlice) Len() int { return len(a) }
|
||||
func (a lfsResultSlice) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a lfsResultSlice) Less(i, j int) bool { return a[j].When.After(a[i].When) }
|
||||
|
||||
// FindLFSFile finds commits that contain a provided pointer file hash
|
||||
func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, error) {
|
||||
resultsMap := map[string]*LFSResult{}
|
||||
|
@ -51,7 +32,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
|
|||
All: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to get GoGit CommitsIter. Error: %w", err)
|
||||
return nil, lfsError("failed to get GoGit CommitsIter", err)
|
||||
}
|
||||
|
||||
err = commitsIter.ForEach(func(gitCommit *object.Commit) error {
|
||||
|
@ -85,7 +66,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
|
|||
return nil
|
||||
})
|
||||
if err != nil && err != io.EOF {
|
||||
return nil, fmt.Errorf("Failure in CommitIter.ForEach: %w", err)
|
||||
return nil, lfsError("failure in CommitIter.ForEach", err)
|
||||
}
|
||||
|
||||
for _, result := range resultsMap {
|
||||
|
@ -156,7 +137,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
|
|||
select {
|
||||
case err, has := <-errChan:
|
||||
if has {
|
||||
return nil, fmt.Errorf("Unable to obtain name for LFS files. Error: %w", err)
|
||||
return nil, lfsError("unable to obtain name for LFS files", err)
|
||||
}
|
||||
default:
|
||||
}
|
|
@ -8,33 +8,14 @@ package pipeline
|
|||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
)
|
||||
|
||||
// LFSResult represents commits found using a provided pointer file hash
|
||||
type LFSResult struct {
|
||||
Name string
|
||||
SHA string
|
||||
Summary string
|
||||
When time.Time
|
||||
ParentIDs []git.ObjectID
|
||||
BranchName string
|
||||
FullCommitName string
|
||||
}
|
||||
|
||||
type lfsResultSlice []*LFSResult
|
||||
|
||||
func (a lfsResultSlice) Len() int { return len(a) }
|
||||
func (a lfsResultSlice) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a lfsResultSlice) Less(i, j int) bool { return a[j].When.After(a[i].When) }
|
||||
|
||||
// FindLFSFile finds commits that contain a provided pointer file hash
|
||||
func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, error) {
|
||||
resultsMap := map[string]*LFSResult{}
|
||||
|
@ -137,11 +118,11 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
|
|||
n += int64(count)
|
||||
if bytes.Equal(binObjectID, objectID.RawValue()) {
|
||||
result := LFSResult{
|
||||
Name: curPath + string(fname),
|
||||
SHA: curCommit.ID.String(),
|
||||
Summary: strings.Split(strings.TrimSpace(curCommit.CommitMessage), "\n")[0],
|
||||
When: curCommit.Author.When,
|
||||
ParentIDs: curCommit.Parents,
|
||||
Name: curPath + string(fname),
|
||||
SHA: curCommit.ID.String(),
|
||||
Summary: strings.Split(strings.TrimSpace(curCommit.CommitMessage), "\n")[0],
|
||||
When: curCommit.Author.When,
|
||||
ParentHashes: curCommit.Parents,
|
||||
}
|
||||
resultsMap[curCommit.ID.String()+":"+curPath+string(fname)] = &result
|
||||
} else if string(mode) == git.EntryModeTree.String() {
|
||||
|
@ -183,7 +164,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
|
|||
|
||||
for _, result := range resultsMap {
|
||||
hasParent := false
|
||||
for _, parentID := range result.ParentIDs {
|
||||
for _, parentID := range result.ParentHashes {
|
||||
if _, hasParent = resultsMap[parentID.String()+":"+result.Name]; hasParent {
|
||||
break
|
||||
}
|
||||
|
@ -240,7 +221,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
|
|||
select {
|
||||
case err, has := <-errChan:
|
||||
if has {
|
||||
return nil, fmt.Errorf("Unable to obtain name for LFS files. Error: %w", err)
|
||||
return nil, lfsError("unable to obtain name for LFS files", err)
|
||||
}
|
||||
default:
|
||||
}
|
||||
|
|
26
modules/session/mock.go
Normal file
26
modules/session/mock.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package session
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"gitea.com/go-chi/session"
|
||||
)
|
||||
|
||||
type MockStore struct {
|
||||
*session.MemStore
|
||||
}
|
||||
|
||||
func (m *MockStore) Destroy(writer http.ResponseWriter, request *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type mockStoreContextKeyStruct struct{}
|
||||
|
||||
var MockStoreContextKey = mockStoreContextKeyStruct{}
|
||||
|
||||
func NewMockStore(sid string) *MockStore {
|
||||
return &MockStore{session.NewMemStore(sid)}
|
||||
}
|
|
@ -6,6 +6,8 @@ package session
|
|||
import (
|
||||
"net/http"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"gitea.com/go-chi/session"
|
||||
)
|
||||
|
||||
|
@ -14,6 +16,10 @@ type Store interface {
|
|||
Get(any) any
|
||||
Set(any, any) error
|
||||
Delete(any) error
|
||||
ID() string
|
||||
Release() error
|
||||
Flush() error
|
||||
Destroy(http.ResponseWriter, *http.Request) error
|
||||
}
|
||||
|
||||
// RegenerateSession regenerates the underlying session and returns the new store
|
||||
|
@ -21,8 +27,21 @@ func RegenerateSession(resp http.ResponseWriter, req *http.Request) (Store, erro
|
|||
for _, f := range BeforeRegenerateSession {
|
||||
f(resp, req)
|
||||
}
|
||||
s, err := session.RegenerateSession(resp, req)
|
||||
return s, err
|
||||
if setting.IsInTesting {
|
||||
if store, ok := req.Context().Value(MockStoreContextKey).(*MockStore); ok {
|
||||
return store, nil
|
||||
}
|
||||
}
|
||||
return session.RegenerateSession(resp, req)
|
||||
}
|
||||
|
||||
func GetContextSession(req *http.Request) Store {
|
||||
if setting.IsInTesting {
|
||||
if store, ok := req.Context().Value(MockStoreContextKey).(*MockStore); ok {
|
||||
return store
|
||||
}
|
||||
}
|
||||
return session.GetSession(req)
|
||||
}
|
||||
|
||||
// BeforeRegenerateSession is a list of functions that are called before a session is regenerated.
|
||||
|
|
|
@ -16,14 +16,10 @@ import (
|
|||
type OAuth2UsernameType string
|
||||
|
||||
const (
|
||||
// OAuth2UsernameUserid oauth2 userid field will be used as gitea name
|
||||
OAuth2UsernameUserid OAuth2UsernameType = "userid"
|
||||
// OAuth2UsernameNickname oauth2 nickname field will be used as gitea name
|
||||
OAuth2UsernameNickname OAuth2UsernameType = "nickname"
|
||||
// OAuth2UsernameEmail username of oauth2 email field will be used as gitea name
|
||||
OAuth2UsernameEmail OAuth2UsernameType = "email"
|
||||
// OAuth2UsernameEmail username of oauth2 preferred_username field will be used as gitea name
|
||||
OAuth2UsernamePreferredUsername OAuth2UsernameType = "preferred_username"
|
||||
OAuth2UsernameUserid OAuth2UsernameType = "userid" // use user id (sub) field as gitea's username
|
||||
OAuth2UsernameNickname OAuth2UsernameType = "nickname" // use nickname field
|
||||
OAuth2UsernameEmail OAuth2UsernameType = "email" // use email field
|
||||
OAuth2UsernamePreferredUsername OAuth2UsernameType = "preferred_username" // use preferred_username field
|
||||
)
|
||||
|
||||
func (username OAuth2UsernameType) isValid() bool {
|
||||
|
@ -71,8 +67,8 @@ func loadOAuth2ClientFrom(rootCfg ConfigProvider) {
|
|||
OAuth2Client.EnableAutoRegistration = sec.Key("ENABLE_AUTO_REGISTRATION").MustBool()
|
||||
OAuth2Client.Username = OAuth2UsernameType(sec.Key("USERNAME").MustString(string(OAuth2UsernameNickname)))
|
||||
if !OAuth2Client.Username.isValid() {
|
||||
log.Warn("Username setting is not valid: '%s', will fallback to '%s'", OAuth2Client.Username, OAuth2UsernameNickname)
|
||||
OAuth2Client.Username = OAuth2UsernameNickname
|
||||
log.Warn("[oauth2_client].USERNAME setting is invalid, falls back to %q", OAuth2Client.Username)
|
||||
}
|
||||
OAuth2Client.UpdateAvatar = sec.Key("UPDATE_AVATAR").MustBool()
|
||||
OAuth2Client.AccountLinking = OAuth2AccountLinkingType(sec.Key("ACCOUNT_LINKING").MustString(string(OAuth2AccountLinkingLogin)))
|
||||
|
|
|
@ -436,6 +436,7 @@ oauth_signin_submit = Link Account
|
|||
oauth.signin.error = There was an error processing the authorization request. If this error persists, please contact the site administrator.
|
||||
oauth.signin.error.access_denied = The authorization request was denied.
|
||||
oauth.signin.error.temporarily_unavailable = Authorization failed because the authentication server is temporarily unavailable. Please try again later.
|
||||
oauth_callback_unable_auto_reg = Auto Registration is enabled, but OAuth2 Provider %[1]s returned missing fields: %[2]s, unable to create an account automatically, please create or link to an account, or contact the site administrator.
|
||||
openid_connect_submit = Connect
|
||||
openid_connect_title = Connect to an existing account
|
||||
openid_connect_desc = The chosen OpenID URI is unknown. Associate it with a new account here.
|
||||
|
|
|
@ -2358,7 +2358,7 @@ settings.protected_branch.delete_rule=Eliminar regra
|
|||
settings.protected_branch_can_push=Permitir envios?
|
||||
settings.protected_branch_can_push_yes=Pode enviar
|
||||
settings.protected_branch_can_push_no=Não pode enviar
|
||||
settings.branch_protection=Salvaguarda do ramo '<b>%s</b>'
|
||||
settings.branch_protection=Regras de salvaguarda do ramo '<b>%s</b>'
|
||||
settings.protect_this_branch=Habilitar salvaguarda do ramo
|
||||
settings.protect_this_branch_desc=Impede a eliminação e restringe envios e integrações do Git no ramo.
|
||||
settings.protect_disable_push=Desabilitar envios
|
||||
|
@ -2402,7 +2402,7 @@ settings.protect_patterns=Padrões
|
|||
settings.protect_protected_file_patterns=Padrões de ficheiros protegidos (separados com ponto e vírgula ';'):
|
||||
settings.protect_protected_file_patterns_desc=Ficheiros protegidos não podem ser modificados imediatamente, mesmo que o utilizador tenha direitos para adicionar, editar ou eliminar ficheiros neste ramo. Múltiplos padrões podem ser separados com ponto e vírgula (';'). Veja a documentação em <a href='https://pkg.go.dev/github.com/gobwas/glob#Compile'>github.com/gobwas/glob</a> para ver a sintaxe. Exemplos: <code>.drone.yml</code>, <code>/docs/**/*.txt</code>.
|
||||
settings.protect_unprotected_file_patterns=Padrões de ficheiros desprotegidos (separados com ponto e vírgula ';'):
|
||||
settings.protect_unprotected_file_patterns_desc=Ficheiros desprotegidos que podem ser modificados imediatamente se o utilizador tiver direitos de escrita, contornando a restrição no envio. Múltiplos padrões podem ser separados com ponto e vírgula (';'). Veja a documentação em <a href='https://pkg.go.dev/github.com/gobwas/glob#Compile'>github.com/gobwas/glob</a> para ver a sintaxe. Exemplos: <code>.drone.yml</code>, <code>/docs/**/*.txt</code>.
|
||||
settings.protect_unprotected_file_patterns_desc=Ficheiros desprotegidos que podem ser modificados imediatamente se o utilizador tiver direitos de escrita, contornando a restrição no envio. Padrões múltiplos podem ser separados com ponto e vírgula (';'). Veja a documentação em <a href='https://pkg.go.dev/github.com/gobwas/glob#Compile'>github.com/gobwas/glob</a> para ver a sintaxe. Exemplos: <code>.drone.yml</code>, <code>/docs/**/*.txt</code>.
|
||||
settings.add_protected_branch=Habilitar salvaguarda
|
||||
settings.delete_protected_branch=Desabilitar salvaguarda
|
||||
settings.update_protect_branch_success=A salvaguarda do ramo "%s" foi modificada.
|
||||
|
@ -2418,7 +2418,7 @@ settings.block_outdated_branch=Bloquear integração se o pedido de integração
|
|||
settings.block_outdated_branch_desc=A integração não será possível quando o ramo de topo estiver abaixo do ramo base.
|
||||
settings.default_branch_desc=Escolha um ramo do repositório como sendo o predefinido para pedidos de integração e cometimentos:
|
||||
settings.merge_style_desc=Estilos de integração
|
||||
settings.default_merge_style_desc=Tipo de integração predefinido para pedidos de integração:
|
||||
settings.default_merge_style_desc=Tipo de integração predefinido
|
||||
settings.choose_branch=Escolha um ramo…
|
||||
settings.no_protected_branch=Não existem ramos protegidos.
|
||||
settings.edit_protected_branch=Editar
|
||||
|
@ -2788,7 +2788,7 @@ self_check=Auto-verificação
|
|||
identity_access=Identidade e acesso
|
||||
users=Contas de utilizador
|
||||
organizations=Organizações
|
||||
assets=Recursos de código
|
||||
assets=Recursos do código-fonte
|
||||
repositories=Repositórios
|
||||
hooks=Automatismos web
|
||||
integrations=Integrações
|
||||
|
@ -2869,14 +2869,14 @@ dashboard.mspan_structures_obtained=Estruturas MSpan obtidas
|
|||
dashboard.mcache_structures_usage=Uso das estruturas MCache
|
||||
dashboard.mcache_structures_obtained=Estruturas MCache obtidas
|
||||
dashboard.profiling_bucket_hash_table_obtained=Perfil obtido da tabela de hash do balde
|
||||
dashboard.gc_metadata_obtained=Metadados da recolha de lixo obtidos
|
||||
dashboard.gc_metadata_obtained=Metadados obtidos da recolha de lixo
|
||||
dashboard.other_system_allocation_obtained=Outras alocações de sistema obtidas
|
||||
dashboard.next_gc_recycle=Próxima reciclagem da recolha de lixo
|
||||
dashboard.last_gc_time=Tempo decorrido desde a última recolha de lixo
|
||||
dashboard.total_gc_time=Pausa total da recolha de lixo
|
||||
dashboard.total_gc_pause=Pausa total da recolha de lixo
|
||||
dashboard.last_gc_pause=Última pausa da recolha de lixo
|
||||
dashboard.gc_times=Tempos da recolha de lixo
|
||||
dashboard.gc_times=N.º de recolhas de lixo
|
||||
dashboard.delete_old_actions=Eliminar todas as operações antigas da base de dados
|
||||
dashboard.delete_old_actions.started=Foi iniciado o processo de eliminação de todas as operações antigas da base de dados.
|
||||
dashboard.update_checker=Verificador de novas versões
|
||||
|
@ -3025,7 +3025,7 @@ auths.attribute_surname=Atributo do Sobrenome
|
|||
auths.attribute_mail=Atributo do email
|
||||
auths.attribute_ssh_public_key=Atributo da chave pública SSH
|
||||
auths.attribute_avatar=Atributo do avatar
|
||||
auths.attributes_in_bind=Buscar os atributos no contexto de Bind DN
|
||||
auths.attributes_in_bind=Buscar atributos no contexto do Bind DN
|
||||
auths.allow_deactivate_all=Permitir que um resultado de pesquisa vazio desabilite todos os utilizadores
|
||||
auths.use_paged_search=Usar pesquisa paginada
|
||||
auths.search_page_size=Tamanho da página
|
||||
|
@ -3224,7 +3224,7 @@ config.session_config=Configuração de sessão
|
|||
config.session_provider=Fornecedor da sessão
|
||||
config.provider_config=Configuração do fornecedor
|
||||
config.cookie_name=Nome do cookie
|
||||
config.gc_interval_time=Intervalo da recolha do lixo
|
||||
config.gc_interval_time=Intervalo de tempo entre recolhas do lixo
|
||||
config.session_life_time=Tempo de vida da sessão
|
||||
config.https_only=Apenas HTTPS
|
||||
config.cookie_life_time=Tempo de vida do cookie
|
||||
|
|
8
package-lock.json
generated
8
package-lock.json
generated
|
@ -28,7 +28,7 @@
|
|||
"esbuild-loader": "4.1.0",
|
||||
"escape-goat": "4.0.0",
|
||||
"fast-glob": "3.3.2",
|
||||
"htmx.org": "1.9.11",
|
||||
"htmx.org": "1.9.12",
|
||||
"idiomorph": "0.3.0",
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.10",
|
||||
|
@ -6728,9 +6728,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/htmx.org": {
|
||||
"version": "1.9.11",
|
||||
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-1.9.11.tgz",
|
||||
"integrity": "sha512-WlVuICn8dfNOOgYmdYzYG8zSnP3++AdHkMHooQAzGZObWpVXYathpz/I37ycF4zikR6YduzfCvEcxk20JkIUsw=="
|
||||
"version": "1.9.12",
|
||||
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-1.9.12.tgz",
|
||||
"integrity": "sha512-VZAohXyF7xPGS52IM8d1T1283y+X4D+Owf3qY1NZ9RuBypyu9l8cGsxUMAG5fEAb/DhT7rDoJ9Hpu5/HxFD3cw=="
|
||||
},
|
||||
"node_modules/human-signals": {
|
||||
"version": "5.0.0",
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
"esbuild-loader": "4.1.0",
|
||||
"escape-goat": "4.0.0",
|
||||
"fast-glob": "3.3.2",
|
||||
"htmx.org": "1.9.11",
|
||||
"htmx.org": "1.9.12",
|
||||
"idiomorph": "0.3.0",
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.10",
|
||||
|
|
|
@ -5,6 +5,7 @@ package routers
|
|||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"reflect"
|
||||
"runtime"
|
||||
|
||||
|
@ -25,6 +26,7 @@ import (
|
|||
"code.gitea.io/gitea/modules/templates"
|
||||
"code.gitea.io/gitea/modules/translation"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
"code.gitea.io/gitea/modules/web/routing"
|
||||
actions_router "code.gitea.io/gitea/routers/api/actions"
|
||||
packages_router "code.gitea.io/gitea/routers/api/packages"
|
||||
apiv1 "code.gitea.io/gitea/routers/api/v1"
|
||||
|
@ -202,5 +204,9 @@ func NormalRoutes() *web.Route {
|
|||
r.Mount(prefix, actions_router.ArtifactsV4Routes(prefix))
|
||||
}
|
||||
|
||||
r.NotFound(func(w http.ResponseWriter, req *http.Request) {
|
||||
routing.UpdateFuncInfo(req.Context(), routing.GetFuncInfo(http.NotFound, "GlobalNotFound"))
|
||||
http.NotFound(w, req)
|
||||
})
|
||||
return r
|
||||
}
|
||||
|
|
|
@ -382,17 +382,17 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe
|
|||
return setting.AppSubURL + "/"
|
||||
}
|
||||
|
||||
func getUserName(gothUser *goth.User) (string, error) {
|
||||
// extractUserNameFromOAuth2 tries to extract a normalized username from the given OAuth2 user.
|
||||
// It returns ("", nil) if the required field doesn't exist.
|
||||
func extractUserNameFromOAuth2(gothUser *goth.User) (string, error) {
|
||||
switch setting.OAuth2Client.Username {
|
||||
case setting.OAuth2UsernameEmail:
|
||||
return user_model.NormalizeUserName(strings.Split(gothUser.Email, "@")[0])
|
||||
return user_model.NormalizeUserName(gothUser.Email)
|
||||
case setting.OAuth2UsernamePreferredUsername:
|
||||
preferredUsername, exists := gothUser.RawData["preferred_username"]
|
||||
if exists {
|
||||
return user_model.NormalizeUserName(preferredUsername.(string))
|
||||
} else {
|
||||
return "", fmt.Errorf("preferred_username is missing in received user data but configured as username source for user_id %q. Check if OPENID_CONNECT_SCOPES contains profile", gothUser.UserID)
|
||||
if preferredUsername, ok := gothUser.RawData["preferred_username"].(string); ok {
|
||||
return user_model.NormalizeUserName(preferredUsername)
|
||||
}
|
||||
return "", nil
|
||||
case setting.OAuth2UsernameNickname:
|
||||
return user_model.NormalizeUserName(gothUser.NickName)
|
||||
default: // OAuth2UsernameUserid
|
||||
|
|
|
@ -8,12 +8,31 @@ import (
|
|||
"net/url"
|
||||
"testing"
|
||||
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/modules/session"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/auth/source/oauth2"
|
||||
"code.gitea.io/gitea/services/contexttest"
|
||||
|
||||
"github.com/markbates/goth"
|
||||
"github.com/markbates/goth/gothic"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func addOAuth2Source(t *testing.T, authName string, cfg oauth2.Source) {
|
||||
cfg.Provider = util.IfZero(cfg.Provider, "gitea")
|
||||
err := auth_model.CreateSource(db.DefaultContext, &auth_model.Source{
|
||||
Type: auth_model.OAuth2,
|
||||
Name: authName,
|
||||
IsActive: true,
|
||||
Cfg: &cfg,
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestUserLogin(t *testing.T) {
|
||||
ctx, resp := contexttest.MockContext(t, "/user/login")
|
||||
SignIn(ctx)
|
||||
|
@ -41,3 +60,24 @@ func TestUserLogin(t *testing.T) {
|
|||
SignIn(ctx)
|
||||
assert.Equal(t, "/", test.RedirectURL(resp))
|
||||
}
|
||||
|
||||
func TestSignUpOAuth2ButMissingFields(t *testing.T) {
|
||||
defer test.MockVariableValue(&setting.OAuth2Client.EnableAutoRegistration, true)()
|
||||
defer test.MockVariableValue(&gothic.CompleteUserAuth, func(res http.ResponseWriter, req *http.Request) (goth.User, error) {
|
||||
return goth.User{Provider: "dummy-auth-source", UserID: "dummy-user"}, nil
|
||||
})()
|
||||
|
||||
addOAuth2Source(t, "dummy-auth-source", oauth2.Source{})
|
||||
|
||||
mockOpt := contexttest.MockContextOption{SessionStore: session.NewMockStore("dummy-sid")}
|
||||
ctx, resp := contexttest.MockContext(t, "/user/oauth2/dummy-auth-source/callback?code=dummy-code", mockOpt)
|
||||
ctx.SetParams("provider", "dummy-auth-source")
|
||||
SignInOAuthCallback(ctx)
|
||||
assert.Equal(t, http.StatusSeeOther, resp.Code)
|
||||
assert.Equal(t, "/user/link_account", test.RedirectURL(resp))
|
||||
|
||||
// then the user will be redirected to the link account page, and see a message about the missing fields
|
||||
ctx, _ = contexttest.MockContext(t, "/user/link_account", mockOpt)
|
||||
LinkAccount(ctx)
|
||||
assert.EqualValues(t, "auth.oauth_callback_unable_auto_reg:dummy-auth-source,email", ctx.Data["AutoRegistrationFailedPrompt"])
|
||||
}
|
||||
|
|
|
@ -48,23 +48,27 @@ func LinkAccount(ctx *context.Context) {
|
|||
ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"
|
||||
ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/link_account_signup"
|
||||
|
||||
gothUser := ctx.Session.Get("linkAccountGothUser")
|
||||
if gothUser == nil {
|
||||
ctx.ServerError("UserSignIn", errors.New("not in LinkAccount session"))
|
||||
gothUser, ok := ctx.Session.Get("linkAccountGothUser").(goth.User)
|
||||
if !ok {
|
||||
// no account in session, so just redirect to the login page, then the user could restart the process
|
||||
ctx.Redirect(setting.AppSubURL + "/user/login")
|
||||
return
|
||||
}
|
||||
|
||||
gu, _ := gothUser.(goth.User)
|
||||
uname, err := getUserName(&gu)
|
||||
if missingFields, ok := gothUser.RawData["__giteaAutoRegMissingFields"].([]string); ok {
|
||||
ctx.Data["AutoRegistrationFailedPrompt"] = ctx.Tr("auth.oauth_callback_unable_auto_reg", gothUser.Provider, strings.Join(missingFields, ","))
|
||||
}
|
||||
|
||||
uname, err := extractUserNameFromOAuth2(&gothUser)
|
||||
if err != nil {
|
||||
ctx.ServerError("UserSignIn", err)
|
||||
return
|
||||
}
|
||||
email := gu.Email
|
||||
email := gothUser.Email
|
||||
ctx.Data["user_name"] = uname
|
||||
ctx.Data["email"] = email
|
||||
|
||||
if len(email) != 0 {
|
||||
if email != "" {
|
||||
u, err := user_model.GetUserByEmail(ctx, email)
|
||||
if err != nil && !user_model.IsErrUserNotExist(err) {
|
||||
ctx.ServerError("UserSignIn", err)
|
||||
|
@ -73,7 +77,7 @@ func LinkAccount(ctx *context.Context) {
|
|||
if u != nil {
|
||||
ctx.Data["user_exists"] = true
|
||||
}
|
||||
} else if len(uname) != 0 {
|
||||
} else if uname != "" {
|
||||
u, err := user_model.GetUserByName(ctx, uname)
|
||||
if err != nil && !user_model.IsErrUserNotExist(err) {
|
||||
ctx.ServerError("UserSignIn", err)
|
||||
|
|
|
@ -934,7 +934,7 @@ func SignInOAuthCallback(ctx *context.Context) {
|
|||
|
||||
if u == nil {
|
||||
if ctx.Doer != nil {
|
||||
// attach user to already logged in user
|
||||
// attach user to the current signed-in user
|
||||
err = externalaccount.LinkAccountToUser(ctx, ctx.Doer, gothUser)
|
||||
if err != nil {
|
||||
ctx.ServerError("UserLinkAccount", err)
|
||||
|
@ -952,23 +952,32 @@ func SignInOAuthCallback(ctx *context.Context) {
|
|||
if gothUser.Email == "" {
|
||||
missingFields = append(missingFields, "email")
|
||||
}
|
||||
if setting.OAuth2Client.Username == setting.OAuth2UsernameNickname && gothUser.NickName == "" {
|
||||
missingFields = append(missingFields, "nickname")
|
||||
}
|
||||
if len(missingFields) > 0 {
|
||||
log.Error("OAuth2 Provider %s returned empty or missing fields: %s", authSource.Name, missingFields)
|
||||
if authSource.IsOAuth2() && authSource.Cfg.(*oauth2.Source).Provider == "openidConnect" {
|
||||
log.Error("You may need to change the 'OPENID_CONNECT_SCOPES' setting to request all required fields")
|
||||
}
|
||||
err = fmt.Errorf("OAuth2 Provider %s returned empty or missing fields: %s", authSource.Name, missingFields)
|
||||
ctx.ServerError("CreateUser", err)
|
||||
return
|
||||
}
|
||||
uname, err := getUserName(&gothUser)
|
||||
uname, err := extractUserNameFromOAuth2(&gothUser)
|
||||
if err != nil {
|
||||
ctx.ServerError("UserSignIn", err)
|
||||
return
|
||||
}
|
||||
if uname == "" {
|
||||
if setting.OAuth2Client.Username == setting.OAuth2UsernameNickname {
|
||||
missingFields = append(missingFields, "nickname")
|
||||
} else if setting.OAuth2Client.Username == setting.OAuth2UsernamePreferredUsername {
|
||||
missingFields = append(missingFields, "preferred_username")
|
||||
} // else: "UserID" and "Email" have been handled above separately
|
||||
}
|
||||
if len(missingFields) > 0 {
|
||||
log.Error(`OAuth2 auto registration (ENABLE_AUTO_REGISTRATION) is enabled but OAuth2 provider %q doesn't return required fields: %s. `+
|
||||
`Suggest to: disable auto registration, or make OPENID_CONNECT_SCOPES (for OpenIDConnect) / Authentication Source Scopes (for Admin panel) to request all required fields, and the fields shouldn't be empty.`,
|
||||
authSource.Name, strings.Join(missingFields, ","))
|
||||
// The RawData is the only way to pass the missing fields to the another page at the moment, other ways all have various problems:
|
||||
// by session or cookie: difficult to clean or reset; by URL: could be injected with uncontrollable content; by ctx.Flash: the link_account page is a mess ...
|
||||
// Since the RawData is for the provider's data, so we need to use our own prefix here to avoid conflict.
|
||||
if gothUser.RawData == nil {
|
||||
gothUser.RawData = make(map[string]any)
|
||||
}
|
||||
gothUser.RawData["__giteaAutoRegMissingFields"] = missingFields
|
||||
showLinkingLogin(ctx, gothUser)
|
||||
return
|
||||
}
|
||||
u = &user_model.User{
|
||||
Name: uname,
|
||||
FullName: gothUser.Name,
|
||||
|
|
|
@ -67,6 +67,9 @@ type ViewResponse struct {
|
|||
CanRerun bool `json:"canRerun"`
|
||||
CanDeleteArtifact bool `json:"canDeleteArtifact"`
|
||||
Done bool `json:"done"`
|
||||
WorkflowID string `json:"workflowID"`
|
||||
WorkflowLink string `json:"workflowLink"`
|
||||
IsSchedule bool `json:"isSchedule"`
|
||||
Jobs []*ViewJob `json:"jobs"`
|
||||
Commit ViewCommit `json:"commit"`
|
||||
} `json:"run"`
|
||||
|
@ -90,12 +93,10 @@ type ViewJob struct {
|
|||
}
|
||||
|
||||
type ViewCommit struct {
|
||||
LocaleCommit string `json:"localeCommit"`
|
||||
LocalePushedBy string `json:"localePushedBy"`
|
||||
ShortSha string `json:"shortSHA"`
|
||||
Link string `json:"link"`
|
||||
Pusher ViewUser `json:"pusher"`
|
||||
Branch ViewBranch `json:"branch"`
|
||||
ShortSha string `json:"shortSHA"`
|
||||
Link string `json:"link"`
|
||||
Pusher ViewUser `json:"pusher"`
|
||||
Branch ViewBranch `json:"branch"`
|
||||
}
|
||||
|
||||
type ViewUser struct {
|
||||
|
@ -151,6 +152,9 @@ func ViewPost(ctx *context_module.Context) {
|
|||
resp.State.Run.CanRerun = run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
|
||||
resp.State.Run.CanDeleteArtifact = run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
|
||||
resp.State.Run.Done = run.Status.IsDone()
|
||||
resp.State.Run.WorkflowID = run.WorkflowID
|
||||
resp.State.Run.WorkflowLink = run.WorkflowLink()
|
||||
resp.State.Run.IsSchedule = run.IsSchedule()
|
||||
resp.State.Run.Jobs = make([]*ViewJob, 0, len(jobs)) // marshal to '[]' instead fo 'null' in json
|
||||
resp.State.Run.Status = run.Status.String()
|
||||
for _, v := range jobs {
|
||||
|
@ -172,12 +176,10 @@ func ViewPost(ctx *context_module.Context) {
|
|||
Link: run.RefLink(),
|
||||
}
|
||||
resp.State.Run.Commit = ViewCommit{
|
||||
LocaleCommit: ctx.Locale.TrString("actions.runs.commit"),
|
||||
LocalePushedBy: ctx.Locale.TrString("actions.runs.pushed_by"),
|
||||
ShortSha: base.ShortSha(run.CommitSHA),
|
||||
Link: fmt.Sprintf("%s/commit/%s", run.Repo.Link(), run.CommitSHA),
|
||||
Pusher: pusher,
|
||||
Branch: branch,
|
||||
ShortSha: base.ShortSha(run.CommitSHA),
|
||||
Link: fmt.Sprintf("%s/commit/%s", run.Repo.Link(), run.CommitSHA),
|
||||
Pusher: pusher,
|
||||
Branch: branch,
|
||||
}
|
||||
|
||||
var task *actions_model.ActionTask
|
||||
|
|
|
@ -3149,13 +3149,10 @@ func UpdateCommentContent(ctx *context.Context) {
|
|||
}
|
||||
|
||||
oldContent := comment.Content
|
||||
comment.Content = ctx.FormString("content")
|
||||
if len(comment.Content) == 0 {
|
||||
ctx.JSON(http.StatusOK, map[string]any{
|
||||
"content": "",
|
||||
})
|
||||
return
|
||||
}
|
||||
newContent := ctx.FormString("content")
|
||||
|
||||
// allow to save empty content
|
||||
comment.Content = newContent
|
||||
if err = issue_service.UpdateComment(ctx, comment, ctx.Doer, oldContent); err != nil {
|
||||
if errors.Is(err, user_model.ErrBlockedUser) {
|
||||
ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_user"))
|
||||
|
@ -3178,21 +3175,27 @@ func UpdateCommentContent(ctx *context.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
content, err := markdown.RenderString(&markup.RenderContext{
|
||||
Links: markup.Links{
|
||||
Base: ctx.FormString("context"), // FIXME: <- IS THIS SAFE ?
|
||||
},
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(ctx),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, comment.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
return
|
||||
var renderedContent template.HTML
|
||||
if comment.Content != "" {
|
||||
renderedContent, err = markdown.RenderString(&markup.RenderContext{
|
||||
Links: markup.Links{
|
||||
Base: ctx.FormString("context"), // FIXME: <- IS THIS SAFE ?
|
||||
},
|
||||
Metas: ctx.Repo.Repository.ComposeMetas(ctx),
|
||||
GitRepo: ctx.Repo.GitRepo,
|
||||
Ctx: ctx,
|
||||
}, comment.Content)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderString", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
contentEmpty := fmt.Sprintf(`<span class="no-content">%s</span>`, ctx.Tr("repo.issues.no_content"))
|
||||
renderedContent = template.HTML(contentEmpty)
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, map[string]any{
|
||||
"content": content,
|
||||
"content": renderedContent,
|
||||
"attachments": attachmentsHTML(ctx, comment.Attachments, comment.Content),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1614,7 +1614,7 @@ func registerRoutes(m *web.Route) {
|
|||
|
||||
m.NotFound(func(w http.ResponseWriter, req *http.Request) {
|
||||
ctx := context.GetWebContext(req)
|
||||
routing.UpdateFuncInfo(ctx, routing.GetFuncInfo(ctx.NotFound, "GlobalNotFound"))
|
||||
routing.UpdateFuncInfo(ctx, routing.GetFuncInfo(ctx.NotFound, "WebNotFound"))
|
||||
ctx.NotFound("", nil)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -20,14 +20,13 @@ import (
|
|||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/httpcache"
|
||||
"code.gitea.io/gitea/modules/session"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/templates"
|
||||
"code.gitea.io/gitea/modules/translation"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
"code.gitea.io/gitea/modules/web/middleware"
|
||||
web_types "code.gitea.io/gitea/modules/web/types"
|
||||
|
||||
"gitea.com/go-chi/session"
|
||||
)
|
||||
|
||||
// Render represents a template render
|
||||
|
@ -154,7 +153,7 @@ func Contexter() func(next http.Handler) http.Handler {
|
|||
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
|
||||
base, baseCleanUp := NewBaseContext(resp, req)
|
||||
defer baseCleanUp()
|
||||
ctx := NewWebContext(base, rnd, session.GetSession(req))
|
||||
ctx := NewWebContext(base, rnd, session.GetContextSession(req))
|
||||
|
||||
ctx.Data.MergeFrom(middleware.CommonTemplateContextData())
|
||||
ctx.Data["Context"] = ctx // TODO: use "ctx" in template and remove this
|
||||
|
|
|
@ -19,7 +19,9 @@ import (
|
|||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/session"
|
||||
"code.gitea.io/gitea/modules/templates"
|
||||
"code.gitea.io/gitea/modules/translation"
|
||||
"code.gitea.io/gitea/modules/web/middleware"
|
||||
|
@ -43,7 +45,8 @@ func mockRequest(t *testing.T, reqPath string) *http.Request {
|
|||
}
|
||||
|
||||
type MockContextOption struct {
|
||||
Render context.Render
|
||||
Render context.Render
|
||||
SessionStore *session.MockStore
|
||||
}
|
||||
|
||||
// MockContext mock context for unit tests
|
||||
|
@ -62,12 +65,17 @@ func MockContext(t *testing.T, reqPath string, opts ...MockContextOption) (*cont
|
|||
base.Data = middleware.GetContextData(req.Context())
|
||||
base.Locale = &translation.MockLocale{}
|
||||
|
||||
chiCtx := chi.NewRouteContext()
|
||||
ctx := context.NewWebContext(base, opt.Render, nil)
|
||||
ctx.AppendContextValue(context.WebContextKey, ctx)
|
||||
ctx.AppendContextValue(chi.RouteCtxKey, chiCtx)
|
||||
if opt.SessionStore != nil {
|
||||
ctx.AppendContextValue(session.MockStoreContextKey, opt.SessionStore)
|
||||
ctx.Session = opt.SessionStore
|
||||
}
|
||||
ctx.Cache = cache.GetCache()
|
||||
ctx.PageData = map[string]any{}
|
||||
ctx.Data["PageStartTime"] = time.Now()
|
||||
chiCtx := chi.NewRouteContext()
|
||||
ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
|
||||
return ctx, resp
|
||||
}
|
||||
|
||||
|
|
|
@ -38,12 +38,10 @@ func getCommitStatusCache(repoID int64, branchName string) *commitStatusCacheVal
|
|||
if ok && statusStr != "" {
|
||||
var cv commitStatusCacheValue
|
||||
err := json.Unmarshal([]byte(statusStr), &cv)
|
||||
if err == nil && cv.State != "" {
|
||||
if err == nil {
|
||||
return &cv
|
||||
}
|
||||
if err != nil {
|
||||
log.Warn("getCommitStatusCache: json.Unmarshal failed: %v", err)
|
||||
}
|
||||
log.Warn("getCommitStatusCache: json.Unmarshal failed: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -128,15 +126,22 @@ func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, creato
|
|||
// FindReposLastestCommitStatuses loading repository default branch latest combinded commit status with cache
|
||||
func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Repository) ([]*git_model.CommitStatus, error) {
|
||||
results := make([]*git_model.CommitStatus, len(repos))
|
||||
allCached := true
|
||||
for i, repo := range repos {
|
||||
if cv := getCommitStatusCache(repo.ID, repo.DefaultBranch); cv != nil {
|
||||
results[i] = &git_model.CommitStatus{
|
||||
State: api.CommitStatusState(cv.State),
|
||||
TargetURL: cv.TargetURL,
|
||||
}
|
||||
} else {
|
||||
allCached = false
|
||||
}
|
||||
}
|
||||
|
||||
if allCached {
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// collect the latest commit of each repo
|
||||
// at most there are dozens of repos (limited by MaxResponseItems), so it's not a big problem at the moment
|
||||
repoBranchNames := make(map[int64]string, len(repos))
|
||||
|
@ -165,10 +170,10 @@ func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Rep
|
|||
for i, repo := range repos {
|
||||
if repo.ID == summary.RepoID {
|
||||
results[i] = summary
|
||||
_ = slices.DeleteFunc(repoSHAs, func(repoSHA git_model.RepoSHA) bool {
|
||||
repoSHAs = slices.DeleteFunc(repoSHAs, func(repoSHA git_model.RepoSHA) bool {
|
||||
return repoSHA.RepoID == repo.ID
|
||||
})
|
||||
if results[i].State != "" {
|
||||
if results[i] != nil {
|
||||
if err := updateCommitStatusCache(repo.ID, repo.DefaultBranch, results[i].State, results[i].TargetURL); err != nil {
|
||||
log.Error("updateCommitStatusCache[%d:%s] failed: %v", repo.ID, repo.DefaultBranch, err)
|
||||
}
|
||||
|
@ -177,6 +182,9 @@ func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Rep
|
|||
}
|
||||
}
|
||||
}
|
||||
if len(repoSHAs) == 0 {
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// call the database O(1) times to get the commit statuses for all repos
|
||||
repoToItsLatestCommitStatuses, err := git_model.GetLatestCommitStatusForPairs(ctx, repoSHAs)
|
||||
|
@ -187,7 +195,7 @@ func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Rep
|
|||
for i, repo := range repos {
|
||||
if results[i] == nil {
|
||||
results[i] = git_model.CalcCommitStatus(repoToItsLatestCommitStatuses[repo.ID])
|
||||
if results[i].State != "" {
|
||||
if results[i] != nil {
|
||||
if err := updateCommitStatusCache(repo.ID, repo.DefaultBranch, results[i].State, results[i].TargetURL); err != nil {
|
||||
log.Error("updateCommitStatusCache[%d:%s] failed: %v", repo.ID, repo.DefaultBranch, err)
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
{{if .Title}}{{.Title}}{{else}}{{ctx.Locale.Tr "actions.runs.empty_commit_message"}}{{end}}
|
||||
</a>
|
||||
<div class="flex-item-body">
|
||||
<b>{{if not $.CurWorkflow}}{{.WorkflowID}} {{end}}#{{.Index}}</b>:
|
||||
<span><b>{{if not $.CurWorkflow}}{{.WorkflowID}} {{end}}#{{.Index}}</b>:</span>
|
||||
{{- if .ScheduleID -}}
|
||||
{{ctx.Locale.Tr "actions.runs.scheduled"}}
|
||||
{{- else -}}
|
||||
|
|
|
@ -10,6 +10,9 @@
|
|||
data-locale-cancel="{{ctx.Locale.Tr "cancel"}}"
|
||||
data-locale-rerun="{{ctx.Locale.Tr "rerun"}}"
|
||||
data-locale-rerun-all="{{ctx.Locale.Tr "rerun_all"}}"
|
||||
data-locale-runs-scheduled="{{ctx.Locale.Tr "actions.runs.scheduled"}}"
|
||||
data-locale-runs-commit="{{ctx.Locale.Tr "actions.runs.commit"}}"
|
||||
data-locale-runs-pushed-by="{{ctx.Locale.Tr "actions.runs.pushed_by"}}"
|
||||
data-locale-status-unknown="{{ctx.Locale.Tr "actions.status.unknown"}}"
|
||||
data-locale-status-waiting="{{ctx.Locale.Tr "actions.status.waiting"}}"
|
||||
data-locale-status-running="{{ctx.Locale.Tr "actions.status.running"}}"
|
||||
|
|
|
@ -17,15 +17,12 @@
|
|||
</overflow-menu>
|
||||
<div class="ui middle very relaxed page grid">
|
||||
<div class="column">
|
||||
<div class="ui tab {{if not .user_exists}}active{{end}}"
|
||||
data-tab="auth-link-signup-tab">
|
||||
<div class="ui tab {{if not .user_exists}}active{{end}}" data-tab="auth-link-signup-tab">
|
||||
{{if .AutoRegistrationFailedPrompt}}<div class="ui message">{{.AutoRegistrationFailedPrompt}}</div>{{end}}
|
||||
{{template "user/auth/signup_inner" .}}
|
||||
</div>
|
||||
<div class="ui tab {{if .user_exists}}active{{end}}"
|
||||
data-tab="auth-link-signin-tab">
|
||||
<div class="ui user signin container icon">
|
||||
{{template "user/auth/signin_inner" .}}
|
||||
</div>
|
||||
<div class="ui tab {{if .user_exists}}active{{end}}" data-tab="auth-link-signin-tab">
|
||||
{{template "user/auth/signin_inner" .}}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -4,11 +4,13 @@
|
|||
package integration
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
|
@ -46,22 +48,25 @@ func TestPullCompare(t *testing.T) {
|
|||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||
testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusSeeOther)
|
||||
testEditFile(t, session, "user1", "repo1", "master1", "README.md", "Hello, World (Edited)\n")
|
||||
resp = testPullCreate(t, session, "user1", "repo1", false, "master", "master1", "This is a pull title")
|
||||
testPullCreate(t, session, "user1", "repo1", false, "master", "master1", "This is a pull title")
|
||||
|
||||
// the max value on issue_index.yml for repo_id=1 is 5
|
||||
req = NewRequest(t, "GET", "/user2/repo1/pulls/6/files")
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user2", Name: "repo1"})
|
||||
issueIndex := unittest.AssertExistsAndLoadBean(t, &issues_model.IssueIndex{GroupID: repo1.ID}, unittest.OrderBy("group_id ASC"))
|
||||
prFilesURL := fmt.Sprintf("/user2/repo1/pulls/%d/files", issueIndex.MaxIndex)
|
||||
req = NewRequest(t, "GET", prFilesURL)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
doc := NewHTMLParser(t, resp.Body)
|
||||
editButtonCount := doc.doc.Find(".diff-file-header-actions a[href*='/_edit/']").Length()
|
||||
assert.Greater(t, editButtonCount, 0, "Expected to find a button to edit a file in the PR diff view but there were none")
|
||||
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
repoForked := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user1", Name: "repo1"})
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
|
||||
// delete the head repository and revisit the PR diff view
|
||||
err := repo_service.DeleteRepositoryDirectly(db.DefaultContext, user2, repoForked.ID)
|
||||
assert.NoError(t, err)
|
||||
|
||||
req = NewRequest(t, "GET", "/user2/repo1/pulls/6/files")
|
||||
req = NewRequest(t, "GET", prFilesURL)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
doc = NewHTMLParser(t, resp.Body)
|
||||
editButtonCount = doc.doc.Find(".diff-file-header-actions a[href*='/_edit/']").Length()
|
||||
|
|
|
@ -403,7 +403,7 @@
|
|||
background: var(--color-body);
|
||||
border-top-width: 1px;
|
||||
border-color: var(--color-secondary);
|
||||
font-weight: var(--font-weight-medium);
|
||||
color: var(--color-text-dark);
|
||||
margin-bottom: -1px;
|
||||
border-radius: 0.28571429rem 0.28571429rem 0 0 !important;
|
||||
}
|
||||
|
|
|
@ -44,6 +44,9 @@ const sfc = {
|
|||
canApprove: false,
|
||||
canRerun: false,
|
||||
done: false,
|
||||
workflowID: '',
|
||||
workflowLink: '',
|
||||
isSchedule: false,
|
||||
jobs: [
|
||||
// {
|
||||
// id: 0,
|
||||
|
@ -338,10 +341,13 @@ export function initRepositoryActionView() {
|
|||
approve: el.getAttribute('data-locale-approve'),
|
||||
cancel: el.getAttribute('data-locale-cancel'),
|
||||
rerun: el.getAttribute('data-locale-rerun'),
|
||||
rerun_all: el.getAttribute('data-locale-rerun-all'),
|
||||
scheduled: el.getAttribute('data-locale-runs-scheduled'),
|
||||
commit: el.getAttribute('data-locale-runs-commit'),
|
||||
pushedBy: el.getAttribute('data-locale-runs-pushed-by'),
|
||||
artifactsTitle: el.getAttribute('data-locale-artifacts-title'),
|
||||
areYouSure: el.getAttribute('data-locale-are-you-sure'),
|
||||
confirmDeleteArtifact: el.getAttribute('data-locale-confirm-delete-artifact'),
|
||||
rerun_all: el.getAttribute('data-locale-rerun-all'),
|
||||
showTimeStamps: el.getAttribute('data-locale-show-timestamps'),
|
||||
showLogSeconds: el.getAttribute('data-locale-show-log-seconds'),
|
||||
showFullScreen: el.getAttribute('data-locale-show-full-screen'),
|
||||
|
@ -382,10 +388,16 @@ export function initRepositoryActionView() {
|
|||
</button>
|
||||
</div>
|
||||
<div class="action-commit-summary">
|
||||
{{ run.commit.localeCommit }}
|
||||
<a class="muted" :href="run.commit.link">{{ run.commit.shortSHA }}</a>
|
||||
{{ run.commit.localePushedBy }}
|
||||
<a class="muted" :href="run.commit.pusher.link">{{ run.commit.pusher.displayName }}</a>
|
||||
<span><a class="muted" :href="run.workflowLink"><b>{{ run.workflowID }}</b></a>:</span>
|
||||
<template v-if="run.isSchedule">
|
||||
{{ locale.scheduled }}
|
||||
</template>
|
||||
<template v-else>
|
||||
{{ locale.commit }}
|
||||
<a class="muted" :href="run.commit.link">{{ run.commit.shortSHA }}</a>
|
||||
{{ locale.pushedBy }}
|
||||
<a class="muted" :href="run.commit.pusher.link">{{ run.commit.pusher.displayName }}</a>
|
||||
</template>
|
||||
<span class="ui label tw-max-w-full" v-if="run.commit.shortSHA">
|
||||
<a class="gt-ellipsis" :href="run.commit.branch.link">{{ run.commit.branch.name }}</a>
|
||||
</span>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import $ from 'jquery';
|
||||
import {GET} from '../modules/fetch.js';
|
||||
import {hideElem, loadElem} from '../utils/dom.js';
|
||||
import {hideElem, loadElem, queryElemChildren} from '../utils/dom.js';
|
||||
import {parseDom} from '../utils.js';
|
||||
|
||||
function getDefaultSvgBoundsIfUndefined(text, src) {
|
||||
|
@ -38,36 +38,36 @@ function getDefaultSvgBoundsIfUndefined(text, src) {
|
|||
return null;
|
||||
}
|
||||
|
||||
function createContext(imageAfter, imageBefore) {
|
||||
const sizeAfter = {
|
||||
width: imageAfter?.width || 0,
|
||||
height: imageAfter?.height || 0,
|
||||
};
|
||||
const sizeBefore = {
|
||||
width: imageBefore?.width || 0,
|
||||
height: imageBefore?.height || 0,
|
||||
};
|
||||
const maxSize = {
|
||||
width: Math.max(sizeBefore.width, sizeAfter.width),
|
||||
height: Math.max(sizeBefore.height, sizeAfter.height),
|
||||
};
|
||||
|
||||
return {
|
||||
imageAfter,
|
||||
imageBefore,
|
||||
sizeAfter,
|
||||
sizeBefore,
|
||||
maxSize,
|
||||
ratio: [
|
||||
Math.floor(maxSize.width - sizeAfter.width) / 2,
|
||||
Math.floor(maxSize.height - sizeAfter.height) / 2,
|
||||
Math.floor(maxSize.width - sizeBefore.width) / 2,
|
||||
Math.floor(maxSize.height - sizeBefore.height) / 2,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export function initImageDiff() {
|
||||
function createContext(image1, image2) {
|
||||
const size1 = {
|
||||
width: image1 && image1.width || 0,
|
||||
height: image1 && image1.height || 0,
|
||||
};
|
||||
const size2 = {
|
||||
width: image2 && image2.width || 0,
|
||||
height: image2 && image2.height || 0,
|
||||
};
|
||||
const max = {
|
||||
width: Math.max(size2.width, size1.width),
|
||||
height: Math.max(size2.height, size1.height),
|
||||
};
|
||||
|
||||
return {
|
||||
$image1: $(image1),
|
||||
$image2: $(image2),
|
||||
size1,
|
||||
size2,
|
||||
max,
|
||||
ratio: [
|
||||
Math.floor(max.width - size1.width) / 2,
|
||||
Math.floor(max.height - size1.height) / 2,
|
||||
Math.floor(max.width - size2.width) / 2,
|
||||
Math.floor(max.height - size2.height) / 2,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
$('.image-diff:not([data-image-diff-loaded])').each(async function() {
|
||||
const $container = $(this);
|
||||
this.setAttribute('data-image-diff-loaded', 'true');
|
||||
|
@ -116,94 +116,96 @@ export function initImageDiff() {
|
|||
initOverlay(createContext($imagesAfter[2], $imagesBefore[2]));
|
||||
}
|
||||
|
||||
this.querySelector(':scope > .image-diff-tabs')?.classList.remove('is-loading');
|
||||
queryElemChildren(this, '.image-diff-tabs', (el) => el.classList.remove('is-loading'));
|
||||
|
||||
function initSideBySide(container, sizes) {
|
||||
let factor = 1;
|
||||
if (sizes.max.width > (diffContainerWidth - 24) / 2) {
|
||||
factor = (diffContainerWidth - 24) / 2 / sizes.max.width;
|
||||
if (sizes.maxSize.width > (diffContainerWidth - 24) / 2) {
|
||||
factor = (diffContainerWidth - 24) / 2 / sizes.maxSize.width;
|
||||
}
|
||||
|
||||
const widthChanged = sizes.$image1.length !== 0 && sizes.$image2.length !== 0 && sizes.$image1[0].naturalWidth !== sizes.$image2[0].naturalWidth;
|
||||
const heightChanged = sizes.$image1.length !== 0 && sizes.$image2.length !== 0 && sizes.$image1[0].naturalHeight !== sizes.$image2[0].naturalHeight;
|
||||
if (sizes.$image1?.length) {
|
||||
const widthChanged = sizes.imageAfter && sizes.imageBefore && sizes.imageAfter.naturalWidth !== sizes.imageBefore.naturalWidth;
|
||||
const heightChanged = sizes.imageAfter && sizes.imageBefore && sizes.imageAfter.naturalHeight !== sizes.imageBefore.naturalHeight;
|
||||
if (sizes.imageAfter) {
|
||||
const boundsInfoAfterWidth = container.querySelector('.bounds-info-after .bounds-info-width');
|
||||
boundsInfoAfterWidth.textContent = `${sizes.$image1[0].naturalWidth}px`;
|
||||
if (widthChanged) boundsInfoAfterWidth.classList.add('green');
|
||||
|
||||
if (boundsInfoAfterWidth) {
|
||||
boundsInfoAfterWidth.textContent = `${sizes.imageAfter.naturalWidth}px`;
|
||||
boundsInfoAfterWidth.classList.toggle('green', widthChanged);
|
||||
}
|
||||
const boundsInfoAfterHeight = container.querySelector('.bounds-info-after .bounds-info-height');
|
||||
boundsInfoAfterHeight.textContent = `${sizes.$image1[0].naturalHeight}px`;
|
||||
if (heightChanged) boundsInfoAfterHeight.classList.add('green');
|
||||
if (boundsInfoAfterHeight) {
|
||||
boundsInfoAfterHeight.textContent = `${sizes.imageAfter.naturalHeight}px`;
|
||||
boundsInfoAfterHeight.classList.toggle('green', heightChanged);
|
||||
}
|
||||
}
|
||||
|
||||
if (sizes.$image2?.length) {
|
||||
if (sizes.imageBefore) {
|
||||
const boundsInfoBeforeWidth = container.querySelector('.bounds-info-before .bounds-info-width');
|
||||
boundsInfoBeforeWidth.textContent = `${sizes.$image2[0].naturalWidth}px`;
|
||||
if (widthChanged) boundsInfoBeforeWidth.classList.add('red');
|
||||
|
||||
if (boundsInfoBeforeWidth) {
|
||||
boundsInfoBeforeWidth.textContent = `${sizes.imageBefore.naturalWidth}px`;
|
||||
boundsInfoBeforeWidth.classList.toggle('red', widthChanged);
|
||||
}
|
||||
const boundsInfoBeforeHeight = container.querySelector('.bounds-info-before .bounds-info-height');
|
||||
boundsInfoBeforeHeight.textContent = `${sizes.$image2[0].naturalHeight}px`;
|
||||
if (heightChanged) boundsInfoBeforeHeight.classList.add('red');
|
||||
if (boundsInfoBeforeHeight) {
|
||||
boundsInfoBeforeHeight.textContent = `${sizes.imageBefore.naturalHeight}px`;
|
||||
boundsInfoBeforeHeight.classList.add('red', heightChanged);
|
||||
}
|
||||
}
|
||||
|
||||
const image1 = sizes.$image1[0];
|
||||
if (image1) {
|
||||
const container = image1.parentNode;
|
||||
image1.style.width = `${sizes.size1.width * factor}px`;
|
||||
image1.style.height = `${sizes.size1.height * factor}px`;
|
||||
if (sizes.imageAfter) {
|
||||
const container = sizes.imageAfter.parentNode;
|
||||
sizes.imageAfter.style.width = `${sizes.sizeAfter.width * factor}px`;
|
||||
sizes.imageAfter.style.height = `${sizes.sizeAfter.height * factor}px`;
|
||||
container.style.margin = '10px auto';
|
||||
container.style.width = `${sizes.size1.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.size1.height * factor + 2}px`;
|
||||
container.style.width = `${sizes.sizeAfter.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.sizeAfter.height * factor + 2}px`;
|
||||
}
|
||||
|
||||
const image2 = sizes.$image2[0];
|
||||
if (image2) {
|
||||
const container = image2.parentNode;
|
||||
image2.style.width = `${sizes.size2.width * factor}px`;
|
||||
image2.style.height = `${sizes.size2.height * factor}px`;
|
||||
if (sizes.imageBefore) {
|
||||
const container = sizes.imageBefore.parentNode;
|
||||
sizes.imageBefore.style.width = `${sizes.sizeBefore.width * factor}px`;
|
||||
sizes.imageBefore.style.height = `${sizes.sizeBefore.height * factor}px`;
|
||||
container.style.margin = '10px auto';
|
||||
container.style.width = `${sizes.size2.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.size2.height * factor + 2}px`;
|
||||
container.style.width = `${sizes.sizeBefore.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.sizeBefore.height * factor + 2}px`;
|
||||
}
|
||||
}
|
||||
|
||||
function initSwipe(sizes) {
|
||||
let factor = 1;
|
||||
if (sizes.max.width > diffContainerWidth - 12) {
|
||||
factor = (diffContainerWidth - 12) / sizes.max.width;
|
||||
if (sizes.maxSize.width > diffContainerWidth - 12) {
|
||||
factor = (diffContainerWidth - 12) / sizes.maxSize.width;
|
||||
}
|
||||
|
||||
const image1 = sizes.$image1[0];
|
||||
if (image1) {
|
||||
const container = image1.parentNode;
|
||||
if (sizes.imageAfter) {
|
||||
const container = sizes.imageAfter.parentNode;
|
||||
const swipeFrame = container.parentNode;
|
||||
image1.style.width = `${sizes.size1.width * factor}px`;
|
||||
image1.style.height = `${sizes.size1.height * factor}px`;
|
||||
sizes.imageAfter.style.width = `${sizes.sizeAfter.width * factor}px`;
|
||||
sizes.imageAfter.style.height = `${sizes.sizeAfter.height * factor}px`;
|
||||
container.style.margin = `0px ${sizes.ratio[0] * factor}px`;
|
||||
container.style.width = `${sizes.size1.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.size1.height * factor + 2}px`;
|
||||
container.style.width = `${sizes.sizeAfter.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.sizeAfter.height * factor + 2}px`;
|
||||
swipeFrame.style.padding = `${sizes.ratio[1] * factor}px 0 0 0`;
|
||||
swipeFrame.style.width = `${sizes.max.width * factor + 2}px`;
|
||||
swipeFrame.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
||||
}
|
||||
|
||||
const image2 = sizes.$image2[0];
|
||||
if (image2) {
|
||||
const container = image2.parentNode;
|
||||
if (sizes.imageBefore) {
|
||||
const container = sizes.imageBefore.parentNode;
|
||||
const swipeFrame = container.parentNode;
|
||||
image2.style.width = `${sizes.size2.width * factor}px`;
|
||||
image2.style.height = `${sizes.size2.height * factor}px`;
|
||||
sizes.imageBefore.style.width = `${sizes.sizeBefore.width * factor}px`;
|
||||
sizes.imageBefore.style.height = `${sizes.sizeBefore.height * factor}px`;
|
||||
container.style.margin = `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`;
|
||||
container.style.width = `${sizes.size2.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.size2.height * factor + 2}px`;
|
||||
swipeFrame.style.width = `${sizes.max.width * factor + 2}px`;
|
||||
swipeFrame.style.height = `${sizes.max.height * factor + 2}px`;
|
||||
container.style.width = `${sizes.sizeBefore.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.sizeBefore.height * factor + 2}px`;
|
||||
swipeFrame.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
||||
swipeFrame.style.height = `${sizes.maxSize.height * factor + 2}px`;
|
||||
}
|
||||
|
||||
// extra height for inner "position: absolute" elements
|
||||
const swipe = $container.find('.diff-swipe')[0];
|
||||
if (swipe) {
|
||||
swipe.style.width = `${sizes.max.width * factor + 2}px`;
|
||||
swipe.style.height = `${sizes.max.height * factor + 30}px`;
|
||||
swipe.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
||||
swipe.style.height = `${sizes.maxSize.height * factor + 30}px`;
|
||||
}
|
||||
|
||||
$container.find('.swipe-bar').on('mousedown', function(e) {
|
||||
|
@ -229,39 +231,37 @@ export function initImageDiff() {
|
|||
|
||||
function initOverlay(sizes) {
|
||||
let factor = 1;
|
||||
if (sizes.max.width > diffContainerWidth - 12) {
|
||||
factor = (diffContainerWidth - 12) / sizes.max.width;
|
||||
if (sizes.maxSize.width > diffContainerWidth - 12) {
|
||||
factor = (diffContainerWidth - 12) / sizes.maxSize.width;
|
||||
}
|
||||
|
||||
const image1 = sizes.$image1[0];
|
||||
if (image1) {
|
||||
const container = image1.parentNode;
|
||||
image1.style.width = `${sizes.size1.width * factor}px`;
|
||||
image1.style.height = `${sizes.size1.height * factor}px`;
|
||||
if (sizes.imageAfter) {
|
||||
const container = sizes.imageAfter.parentNode;
|
||||
sizes.imageAfter.style.width = `${sizes.sizeAfter.width * factor}px`;
|
||||
sizes.imageAfter.style.height = `${sizes.sizeAfter.height * factor}px`;
|
||||
container.style.margin = `${sizes.ratio[1] * factor}px ${sizes.ratio[0] * factor}px`;
|
||||
container.style.width = `${sizes.size1.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.size1.height * factor + 2}px`;
|
||||
container.style.width = `${sizes.sizeAfter.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.sizeAfter.height * factor + 2}px`;
|
||||
}
|
||||
|
||||
const image2 = sizes.$image2[0];
|
||||
if (image2) {
|
||||
const container = image2.parentNode;
|
||||
if (sizes.imageBefore) {
|
||||
const container = sizes.imageBefore.parentNode;
|
||||
const overlayFrame = container.parentNode;
|
||||
image2.style.width = `${sizes.size2.width * factor}px`;
|
||||
image2.style.height = `${sizes.size2.height * factor}px`;
|
||||
sizes.imageBefore.style.width = `${sizes.sizeBefore.width * factor}px`;
|
||||
sizes.imageBefore.style.height = `${sizes.sizeBefore.height * factor}px`;
|
||||
container.style.margin = `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`;
|
||||
container.style.width = `${sizes.size2.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.size2.height * factor + 2}px`;
|
||||
container.style.width = `${sizes.sizeBefore.width * factor + 2}px`;
|
||||
container.style.height = `${sizes.sizeBefore.height * factor + 2}px`;
|
||||
|
||||
// some inner elements are `position: absolute`, so the container's height must be large enough
|
||||
overlayFrame.style.width = `${sizes.max.width * factor + 2}px`;
|
||||
overlayFrame.style.height = `${sizes.max.height * factor + 2}px`;
|
||||
overlayFrame.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
||||
overlayFrame.style.height = `${sizes.maxSize.height * factor + 2}px`;
|
||||
}
|
||||
|
||||
const rangeInput = $container[0].querySelector('input[type="range"]');
|
||||
function updateOpacity() {
|
||||
if (sizes?.$image1?.[0]) {
|
||||
sizes.$image1[0].parentNode.style.opacity = `${rangeInput.value / 100}`;
|
||||
if (sizes.imageAfter) {
|
||||
sizes.imageAfter.parentNode.style.opacity = `${rangeInput.value / 100}`;
|
||||
}
|
||||
}
|
||||
rangeInput?.addEventListener('input', updateOpacity);
|
||||
|
|
Loading…
Reference in a new issue