mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-01 23:29:12 +01:00
375fd15fbf
Refactor `modules/indexer` to make it more maintainable. And it can be easier to support more features. I'm trying to solve some of issue searching, this is a precursor to making functional changes. Current supported engines and the index versions: | engines | issues | code | | - | - | - | | db | Just a wrapper for database queries, doesn't need version | - | | bleve | The version of index is **2** | The version of index is **6** | | elasticsearch | The old index has no version, will be treated as version **0** in this PR | The version of index is **1** | | meilisearch | The old index has no version, will be treated as version **0** in this PR | - | ## Changes ### Split Splited it into mutiple packages ```text indexer ├── internal │ ├── bleve │ ├── db │ ├── elasticsearch │ └── meilisearch ├── code │ ├── bleve │ ├── elasticsearch │ └── internal └── issues ├── bleve ├── db ├── elasticsearch ├── internal └── meilisearch ``` - `indexer/interanal`: Internal shared package for indexer. - `indexer/interanal/[engine]`: Internal shared package for each engine (bleve/db/elasticsearch/meilisearch). - `indexer/code`: Implementations for code indexer. - `indexer/code/internal`: Internal shared package for code indexer. - `indexer/code/[engine]`: Implementation via each engine for code indexer. - `indexer/issues`: Implementations for issues indexer. ### Deduplication - Combine `Init/Ping/Close` for code indexer and issues indexer. - ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to `internal.IndexHolder`.~ Remove it, use dummy indexer instead when the indexer is not ready. - Duplicate two copies of creating ES clients. - Duplicate two copies of `indexerID()`. ### Enhancement - [x] Support index version for elasticsearch issues indexer, the old index without version will be treated as version 0. - [x] Fix spell of `elastic_search/ElasticSearch`, it should be `Elasticsearch`. - [x] Improve versioning of ES index. We don't need `Aliases`: - Gitea does't need aliases for "Zero Downtime" because it never delete old indexes. - The old code of issues indexer uses the orignal name to create issue index, so it's tricky to convert it to an alias. - [x] Support index version for meilisearch issues indexer, the old index without version will be treated as version 0. - [x] Do "ping" only when `Ping` has been called, don't ping periodically and cache the status. - [x] Support the context parameter whenever possible. - [x] Fix outdated example config. - [x] Give up the requeue logic of issues indexer: When indexing fails, call Ping to check if it was caused by the engine being unavailable, and only requeue the task if the engine is unavailable. - It is fragile and tricky, could cause data losing (It did happen when I was doing some tests for this PR). And it works for ES only. - Just always requeue the failed task, if it caused by bad data, it's a bug of Gitea which should be fixed. --------- Co-authored-by: Giteabot <teabot@gitea.io>
135 lines
3.4 KiB
Go
135 lines
3.4 KiB
Go
// Copyright 2017 The Gitea Authors. All rights reserved.
|
|
// SPDX-License-Identifier: MIT
|
|
|
|
package code
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"strings"
|
|
|
|
"code.gitea.io/gitea/modules/highlight"
|
|
"code.gitea.io/gitea/modules/indexer/code/internal"
|
|
"code.gitea.io/gitea/modules/timeutil"
|
|
"code.gitea.io/gitea/modules/util"
|
|
)
|
|
|
|
// Result a search result to display
|
|
type Result struct {
|
|
RepoID int64
|
|
Filename string
|
|
CommitID string
|
|
UpdatedUnix timeutil.TimeStamp
|
|
Language string
|
|
Color string
|
|
LineNumbers []int
|
|
FormattedLines string
|
|
}
|
|
|
|
type SearchResultLanguages = internal.SearchResultLanguages
|
|
|
|
func indices(content string, selectionStartIndex, selectionEndIndex int) (int, int) {
|
|
startIndex := selectionStartIndex
|
|
numLinesBefore := 0
|
|
for ; startIndex > 0; startIndex-- {
|
|
if content[startIndex-1] == '\n' {
|
|
if numLinesBefore == 1 {
|
|
break
|
|
}
|
|
numLinesBefore++
|
|
}
|
|
}
|
|
|
|
endIndex := selectionEndIndex
|
|
numLinesAfter := 0
|
|
for ; endIndex < len(content); endIndex++ {
|
|
if content[endIndex] == '\n' {
|
|
if numLinesAfter == 1 {
|
|
break
|
|
}
|
|
numLinesAfter++
|
|
}
|
|
}
|
|
|
|
return startIndex, endIndex
|
|
}
|
|
|
|
func writeStrings(buf *bytes.Buffer, strs ...string) error {
|
|
for _, s := range strs {
|
|
_, err := buf.WriteString(s)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Result, error) {
|
|
startLineNum := 1 + strings.Count(result.Content[:startIndex], "\n")
|
|
|
|
var formattedLinesBuffer bytes.Buffer
|
|
|
|
contentLines := strings.SplitAfter(result.Content[startIndex:endIndex], "\n")
|
|
lineNumbers := make([]int, len(contentLines))
|
|
index := startIndex
|
|
for i, line := range contentLines {
|
|
var err error
|
|
if index < result.EndIndex &&
|
|
result.StartIndex < index+len(line) &&
|
|
result.StartIndex < result.EndIndex {
|
|
openActiveIndex := util.Max(result.StartIndex-index, 0)
|
|
closeActiveIndex := util.Min(result.EndIndex-index, len(line))
|
|
err = writeStrings(&formattedLinesBuffer,
|
|
line[:openActiveIndex],
|
|
line[openActiveIndex:closeActiveIndex],
|
|
line[closeActiveIndex:],
|
|
)
|
|
} else {
|
|
err = writeStrings(&formattedLinesBuffer,
|
|
line,
|
|
)
|
|
}
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
lineNumbers[i] = startLineNum + i
|
|
index += len(line)
|
|
}
|
|
|
|
highlighted, _ := highlight.Code(result.Filename, "", formattedLinesBuffer.String())
|
|
|
|
return &Result{
|
|
RepoID: result.RepoID,
|
|
Filename: result.Filename,
|
|
CommitID: result.CommitID,
|
|
UpdatedUnix: result.UpdatedUnix,
|
|
Language: result.Language,
|
|
Color: result.Color,
|
|
LineNumbers: lineNumbers,
|
|
FormattedLines: highlighted,
|
|
}, nil
|
|
}
|
|
|
|
// PerformSearch perform a search on a repository
|
|
func PerformSearch(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isMatch bool) (int, []*Result, []*internal.SearchResultLanguages, error) {
|
|
if len(keyword) == 0 {
|
|
return 0, nil, nil, nil
|
|
}
|
|
|
|
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, repoIDs, language, keyword, page, pageSize, isMatch)
|
|
if err != nil {
|
|
return 0, nil, nil, err
|
|
}
|
|
|
|
displayResults := make([]*Result, len(results))
|
|
|
|
for i, result := range results {
|
|
startIndex, endIndex := indices(result.Content, result.StartIndex, result.EndIndex)
|
|
displayResults[i], err = searchResult(result, startIndex, endIndex)
|
|
if err != nil {
|
|
return 0, nil, nil, err
|
|
}
|
|
}
|
|
return int(total), displayResults, resultLanguages, nil
|
|
}
|