2014-05-02 03:21:46 +02:00
|
|
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
2016-12-21 13:13:17 +01:00
|
|
|
// Copyright 2016 The Gitea Authors. All rights reserved.
|
2022-11-27 19:20:29 +01:00
|
|
|
// SPDX-License-Identifier: MIT
|
2014-05-02 03:21:46 +02:00
|
|
|
|
|
|
|
package cmd
|
|
|
|
|
|
|
|
import (
|
2014-05-05 19:08:01 +02:00
|
|
|
"fmt"
|
2022-04-26 22:30:51 +02:00
|
|
|
"io"
|
2014-05-02 03:21:46 +02:00
|
|
|
"os"
|
|
|
|
"path"
|
2017-01-12 05:47:20 +01:00
|
|
|
"path/filepath"
|
2020-06-05 22:47:39 +02:00
|
|
|
"strings"
|
2014-05-05 19:08:01 +02:00
|
|
|
"time"
|
2014-05-02 03:21:46 +02:00
|
|
|
|
2021-09-19 13:49:59 +02:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2021-07-24 18:03:58 +02:00
|
|
|
"code.gitea.io/gitea/modules/json"
|
2019-12-17 17:12:10 +01:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2016-11-10 17:24:48 +01:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2020-09-29 11:05:13 +02:00
|
|
|
"code.gitea.io/gitea/modules/storage"
|
2020-08-11 22:05:34 +02:00
|
|
|
"code.gitea.io/gitea/modules/util"
|
2017-04-12 09:44:54 +02:00
|
|
|
|
2021-01-26 16:36:53 +01:00
|
|
|
"gitea.com/go-chi/session"
|
2022-06-18 16:06:32 +02:00
|
|
|
"github.com/mholt/archiver/v3"
|
2023-07-21 11:28:19 +02:00
|
|
|
"github.com/urfave/cli/v2"
|
2014-05-02 03:21:46 +02:00
|
|
|
)
|
|
|
|
|
2022-04-26 22:30:51 +02:00
|
|
|
func addReader(w archiver.Writer, r io.ReadCloser, info os.FileInfo, customName string, verbose bool) error {
|
2020-06-05 22:47:39 +02:00
|
|
|
if verbose {
|
2022-04-26 22:30:51 +02:00
|
|
|
log.Info("Adding file %s", customName)
|
2020-06-05 22:47:39 +02:00
|
|
|
}
|
2022-04-26 22:30:51 +02:00
|
|
|
|
|
|
|
return w.Write(archiver.File{
|
|
|
|
FileInfo: archiver.FileInfo{
|
|
|
|
FileInfo: info,
|
|
|
|
CustomName: customName,
|
|
|
|
},
|
|
|
|
ReadCloser: r,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func addFile(w archiver.Writer, filePath, absPath string, verbose bool) error {
|
2020-06-05 22:47:39 +02:00
|
|
|
file, err := os.Open(absPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
fileInfo, err := file.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-04-26 22:30:51 +02:00
|
|
|
return addReader(w, file, fileInfo, filePath, verbose)
|
2020-06-05 22:47:39 +02:00
|
|
|
}
|
|
|
|
|
2021-12-20 05:41:31 +01:00
|
|
|
func isSubdir(upper, lower string) (bool, error) {
|
2020-06-05 22:47:39 +02:00
|
|
|
if relPath, err := filepath.Rel(upper, lower); err != nil {
|
|
|
|
return false, err
|
|
|
|
} else if relPath == "." || !strings.HasPrefix(relPath, ".") {
|
|
|
|
return true, nil
|
|
|
|
}
|
|
|
|
return false, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type outputType struct {
|
|
|
|
Enum []string
|
|
|
|
Default string
|
|
|
|
selected string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o outputType) Join() string {
|
|
|
|
return strings.Join(o.Enum, ", ")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o *outputType) Set(value string) error {
|
|
|
|
for _, enum := range o.Enum {
|
|
|
|
if enum == value {
|
|
|
|
o.selected = value
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Errorf("allowed values are %s", o.Join())
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o outputType) String() string {
|
|
|
|
if o.selected == "" {
|
|
|
|
return o.Default
|
|
|
|
}
|
|
|
|
return o.selected
|
|
|
|
}
|
|
|
|
|
|
|
|
var outputTypeEnum = &outputType{
|
2022-07-27 08:16:28 +02:00
|
|
|
Enum: []string{"zip", "tar", "tar.sz", "tar.gz", "tar.xz", "tar.bz2", "tar.br", "tar.lz4", "tar.zst"},
|
2020-06-05 22:47:39 +02:00
|
|
|
Default: "zip",
|
|
|
|
}
|
|
|
|
|
2016-11-04 12:42:18 +01:00
|
|
|
// CmdDump represents the available dump sub-command.
|
2023-07-21 11:28:19 +02:00
|
|
|
var CmdDump = &cli.Command{
|
2014-05-02 03:21:46 +02:00
|
|
|
Name: "dump",
|
2023-03-06 05:10:56 +01:00
|
|
|
Usage: "Dump Forgejo files and database",
|
2014-05-05 06:55:17 +02:00
|
|
|
Description: `Dump compresses all related files and database into zip file.
|
2023-03-06 05:10:56 +01:00
|
|
|
It can be used for backup and capture Forgejo server image to send to maintainer`,
|
2014-05-02 03:21:46 +02:00
|
|
|
Action: runDump,
|
2014-09-08 01:39:26 +02:00
|
|
|
Flags: []cli.Flag{
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.StringFlag{
|
|
|
|
Name: "file",
|
|
|
|
Aliases: []string{"f"},
|
2023-03-06 05:10:56 +01:00
|
|
|
Value: fmt.Sprintf("forgejo-dump-%d.zip", time.Now().Unix()),
|
2023-07-21 11:28:19 +02:00
|
|
|
Usage: "Name of the dump file which will be created. Supply '-' for stdout. See type for available types.",
|
2019-04-01 06:31:37 +02:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "verbose",
|
|
|
|
Aliases: []string{"V"},
|
|
|
|
Usage: "Show process details",
|
2016-11-09 23:18:22 +01:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "quiet",
|
|
|
|
Aliases: []string{"q"},
|
|
|
|
Usage: "Only display warnings and errors",
|
2023-04-10 15:46:23 +02:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.StringFlag{
|
|
|
|
Name: "tempdir",
|
|
|
|
Aliases: []string{"t"},
|
|
|
|
Value: os.TempDir(),
|
|
|
|
Usage: "Temporary dir path",
|
2016-11-09 23:18:22 +01:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.StringFlag{
|
|
|
|
Name: "database",
|
|
|
|
Aliases: []string{"d"},
|
2023-09-03 20:44:01 +02:00
|
|
|
Usage: "Specify the database SQL syntax: sqlite3, mysql, mssql, postgres",
|
2017-01-03 09:20:28 +01:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "skip-repository",
|
|
|
|
Aliases: []string{"R"},
|
|
|
|
Usage: "Skip the repository dumping",
|
2019-01-13 22:52:26 +01:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "skip-log",
|
|
|
|
Aliases: []string{"L"},
|
|
|
|
Usage: "Skip the log dumping",
|
2020-05-01 03:30:31 +02:00
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
2021-02-08 02:00:12 +01:00
|
|
|
Name: "skip-custom-dir",
|
|
|
|
Usage: "Skip custom directory",
|
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
2021-04-12 11:33:32 +02:00
|
|
|
Name: "skip-lfs-data",
|
|
|
|
Usage: "Skip LFS data",
|
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
2021-04-12 11:33:32 +02:00
|
|
|
Name: "skip-attachment-data",
|
|
|
|
Usage: "Skip attachment data",
|
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
2022-04-26 22:30:51 +02:00
|
|
|
Name: "skip-package-data",
|
|
|
|
Usage: "Skip package data",
|
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.BoolFlag{
|
2022-10-24 05:19:21 +02:00
|
|
|
Name: "skip-index",
|
|
|
|
Usage: "Skip bleve index data",
|
|
|
|
},
|
2023-07-21 11:28:19 +02:00
|
|
|
&cli.GenericFlag{
|
2020-06-05 22:47:39 +02:00
|
|
|
Name: "type",
|
|
|
|
Value: outputTypeEnum,
|
|
|
|
Usage: fmt.Sprintf("Dump output format: %s", outputTypeEnum.Join()),
|
|
|
|
},
|
2014-09-08 01:39:26 +02:00
|
|
|
},
|
2014-05-02 03:21:46 +02:00
|
|
|
}
|
|
|
|
|
2023-07-04 20:36:08 +02:00
|
|
|
func fatal(format string, args ...any) {
|
2019-12-17 17:12:10 +01:00
|
|
|
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
|
|
|
log.Fatal(format, args...)
|
|
|
|
}
|
|
|
|
|
2016-05-12 20:32:28 +02:00
|
|
|
func runDump(ctx *cli.Context) error {
|
2020-06-05 22:47:39 +02:00
|
|
|
var file *os.File
|
|
|
|
fileName := ctx.String("file")
|
2021-12-17 14:38:45 +01:00
|
|
|
outType := ctx.String("type")
|
2020-06-05 22:47:39 +02:00
|
|
|
if fileName == "-" {
|
|
|
|
file = os.Stdout
|
Rewrite logger system (#24726)
## ⚠️ Breaking
The `log.<mode>.<logger>` style config has been dropped. If you used it,
please check the new config manual & app.example.ini to make your
instance output logs as expected.
Although many legacy options still work, it's encouraged to upgrade to
the new options.
The SMTP logger is deleted because SMTP is not suitable to collect logs.
If you have manually configured Gitea log options, please confirm the
logger system works as expected after upgrading.
## Description
Close #12082 and maybe more log-related issues, resolve some related
FIXMEs in old code (which seems unfixable before)
Just like rewriting queue #24505 : make code maintainable, clear legacy
bugs, and add the ability to support more writers (eg: JSON, structured
log)
There is a new document (with examples): `logging-config.en-us.md`
This PR is safer than the queue rewriting, because it's just for
logging, it won't break other logic.
## The old problems
The logging system is quite old and difficult to maintain:
* Unclear concepts: Logger, NamedLogger, MultiChannelledLogger,
SubLogger, EventLogger, WriterLogger etc
* Some code is diffuclt to konw whether it is right:
`log.DelNamedLogger("console")` vs `log.DelNamedLogger(log.DEFAULT)` vs
`log.DelLogger("console")`
* The old system heavily depends on ini config system, it's difficult to
create new logger for different purpose, and it's very fragile.
* The "color" trick is difficult to use and read, many colors are
unnecessary, and in the future structured log could help
* It's difficult to add other log formats, eg: JSON format
* The log outputer doesn't have full control of its goroutine, it's
difficult to make outputer have advanced behaviors
* The logs could be lost in some cases: eg: no Fatal error when using
CLI.
* Config options are passed by JSON, which is quite fragile.
* INI package makes the KEY in `[log]` section visible in `[log.sub1]`
and `[log.sub1.subA]`, this behavior is quite fragile and would cause
more unclear problems, and there is no strong requirement to support
`log.<mode>.<logger>` syntax.
## The new design
See `logger.go` for documents.
## Screenshot
<details>
![image](https://github.com/go-gitea/gitea/assets/2114189/4462d713-ba39-41f5-bb08-de912e67e1ff)
![image](https://github.com/go-gitea/gitea/assets/2114189/b188035e-f691-428b-8b2d-ff7b2199b2f9)
![image](https://github.com/go-gitea/gitea/assets/2114189/132e9745-1c3b-4e00-9e0d-15eaea495dee)
</details>
## TODO
* [x] add some new tests
* [x] fix some tests
* [x] test some sub-commands (manually ....)
---------
Co-authored-by: Jason Song <i@wolfogre.com>
Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: Giteabot <teabot@gitea.io>
2023-05-22 00:35:11 +02:00
|
|
|
setupConsoleLogger(log.FATAL, log.CanColorStderr, os.Stderr)
|
2021-12-17 14:38:45 +01:00
|
|
|
} else {
|
2022-04-20 20:53:34 +02:00
|
|
|
for _, suffix := range outputTypeEnum.Enum {
|
|
|
|
if strings.HasSuffix(fileName, "."+suffix) {
|
|
|
|
fileName = strings.TrimSuffix(fileName, "."+suffix)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2021-12-17 14:38:45 +01:00
|
|
|
fileName += "." + outType
|
2020-06-05 22:47:39 +02:00
|
|
|
}
|
Refactor path & config system (#25330)
# The problem
There were many "path tricks":
* By default, Gitea uses its program directory as its work path
* Gitea tries to use the "work path" to guess its "custom path" and
"custom conf (app.ini)"
* Users might want to use other directories as work path
* The non-default work path should be passed to Gitea by GITEA_WORK_DIR
or "--work-path"
* But some Gitea processes are started without these values
* The "serv" process started by OpenSSH server
* The CLI sub-commands started by site admin
* The paths are guessed by SetCustomPathAndConf again and again
* The default values of "work path / custom path / custom conf" can be
changed when compiling
# The solution
* Use `InitWorkPathAndCommonConfig` to handle these path tricks, and use
test code to cover its behaviors.
* When Gitea's web server runs, write the WORK_PATH to "app.ini", this
value must be the most correct one, because if this value is not right,
users would find that the web UI doesn't work and then they should be
able to fix it.
* Then all other sub-commands can use the WORK_PATH in app.ini to
initialize their paths.
* By the way, when Gitea starts for git protocol, it shouldn't output
any log, otherwise the git protocol gets broken and client blocks
forever.
The "work path" priority is: WORK_PATH in app.ini > cmd arg --work-path
> env var GITEA_WORK_DIR > builtin default
The "app.ini" searching order is: cmd arg --config > cmd arg "work path
/ custom path" > env var "work path / custom path" > builtin default
## ⚠️ BREAKING
If your instance's "work path / custom path / custom conf" doesn't meet
the requirements (eg: work path must be absolute), Gitea will report a
fatal error and exit. You need to set these values according to the
error log.
----
Close #24818
Close #24222
Close #21606
Close #21498
Close #25107
Close #24981
Maybe close #24503
Replace #23301
Replace #22754
And maybe more
2023-06-21 07:50:26 +02:00
|
|
|
setting.MustInstalled()
|
2021-12-01 08:50:01 +01:00
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
// make sure we are logging to the console no matter what the configuration tells us do to
|
2023-02-19 17:12:01 +01:00
|
|
|
// FIXME: don't use CfgProvider directly
|
|
|
|
if _, err := setting.CfgProvider.Section("log").NewKey("MODE", "console"); err != nil {
|
2020-06-05 22:47:39 +02:00
|
|
|
fatal("Setting logging mode to console failed: %v", err)
|
|
|
|
}
|
2023-02-19 17:12:01 +01:00
|
|
|
if _, err := setting.CfgProvider.Section("log.console").NewKey("STDERR", "true"); err != nil {
|
2020-06-05 22:47:39 +02:00
|
|
|
fatal("Setting console logger to stderr failed: %v", err)
|
|
|
|
}
|
2023-04-10 15:46:23 +02:00
|
|
|
|
|
|
|
// Set loglevel to Warn if quiet-mode is requested
|
|
|
|
if ctx.Bool("quiet") {
|
|
|
|
if _, err := setting.CfgProvider.Section("log.console").NewKey("LEVEL", "Warn"); err != nil {
|
|
|
|
fatal("Setting console log-level failed: %v", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-08 00:27:17 +02:00
|
|
|
if !setting.InstallLock {
|
|
|
|
log.Error("Is '%s' really the right config path?\n", setting.CustomConf)
|
2023-03-06 05:10:56 +01:00
|
|
|
return fmt.Errorf("forgejo is not initialized")
|
2020-09-08 00:27:17 +02:00
|
|
|
}
|
2023-02-19 17:12:01 +01:00
|
|
|
setting.LoadSettings() // cannot access session settings otherwise
|
2017-01-23 10:11:18 +01:00
|
|
|
|
2023-04-10 15:46:23 +02:00
|
|
|
verbose := ctx.Bool("verbose")
|
|
|
|
if verbose && ctx.Bool("quiet") {
|
|
|
|
return fmt.Errorf("--quiet and --verbose cannot both be set")
|
|
|
|
}
|
|
|
|
|
2021-11-07 04:11:27 +01:00
|
|
|
stdCtx, cancel := installSignals()
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
err := db.InitEngine(stdCtx)
|
2017-01-23 10:11:18 +01:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2014-05-02 03:21:46 +02:00
|
|
|
|
2020-09-29 11:05:13 +02:00
|
|
|
if err := storage.Init(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
if file == nil {
|
|
|
|
file, err = os.Create(fileName)
|
|
|
|
if err != nil {
|
|
|
|
fatal("Unable to open %s: %v", fileName, err)
|
|
|
|
}
|
2015-11-28 14:07:51 +01:00
|
|
|
}
|
2020-06-05 22:47:39 +02:00
|
|
|
defer file.Close()
|
2015-11-28 14:07:51 +01:00
|
|
|
|
2021-02-08 02:00:12 +01:00
|
|
|
absFileName, err := filepath.Abs(fileName)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-07-04 20:36:08 +02:00
|
|
|
var iface any
|
2020-06-05 22:47:39 +02:00
|
|
|
if fileName == "-" {
|
|
|
|
iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType))
|
|
|
|
} else {
|
|
|
|
iface, err = archiver.ByExtension(fileName)
|
2017-06-09 02:24:15 +02:00
|
|
|
}
|
2019-01-13 22:52:26 +01:00
|
|
|
if err != nil {
|
2020-06-05 22:47:39 +02:00
|
|
|
fatal("Unable to get archiver for extension: %v", err)
|
2019-01-13 22:52:26 +01:00
|
|
|
}
|
2019-12-17 17:12:10 +01:00
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
w, _ := iface.(archiver.Writer)
|
|
|
|
if err := w.Create(file); err != nil {
|
|
|
|
fatal("Creating archiver.Writer failed: %v", err)
|
|
|
|
}
|
|
|
|
defer w.Close()
|
2019-01-13 22:52:26 +01:00
|
|
|
|
2020-05-03 05:57:45 +02:00
|
|
|
if ctx.IsSet("skip-repository") && ctx.Bool("skip-repository") {
|
2019-12-17 17:12:10 +01:00
|
|
|
log.Info("Skip dumping local repositories")
|
2019-01-13 22:52:26 +01:00
|
|
|
} else {
|
2020-06-05 22:47:39 +02:00
|
|
|
log.Info("Dumping local repositories... %s", setting.RepoRootPath)
|
2021-02-08 02:00:12 +01:00
|
|
|
if err := addRecursiveExclude(w, "repos", setting.RepoRootPath, []string{absFileName}, verbose); err != nil {
|
2020-06-05 22:47:39 +02:00
|
|
|
fatal("Failed to include repositories: %v", err)
|
2019-01-13 22:52:26 +01:00
|
|
|
}
|
2020-06-05 22:47:39 +02:00
|
|
|
|
2021-04-12 11:33:32 +02:00
|
|
|
if ctx.IsSet("skip-lfs-data") && ctx.Bool("skip-lfs-data") {
|
|
|
|
log.Info("Skip dumping LFS data")
|
2023-03-23 13:30:28 +01:00
|
|
|
} else if !setting.LFS.StartServer {
|
|
|
|
log.Info("LFS isn't enabled. Skip dumping LFS data")
|
2023-03-13 11:23:51 +01:00
|
|
|
} else if err := storage.LFS.IterateObjects("", func(objPath string, object storage.Object) error {
|
2020-09-29 11:05:13 +02:00
|
|
|
info, err := object.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2020-06-05 22:47:39 +02:00
|
|
|
}
|
2020-09-29 11:05:13 +02:00
|
|
|
|
2022-04-26 22:30:51 +02:00
|
|
|
return addReader(w, object, info, path.Join("data", "lfs", objPath), verbose)
|
2020-09-29 11:05:13 +02:00
|
|
|
}); err != nil {
|
|
|
|
fatal("Failed to dump LFS objects: %v", err)
|
2019-01-13 22:52:26 +01:00
|
|
|
}
|
2014-05-02 03:21:46 +02:00
|
|
|
}
|
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
tmpDir := ctx.String("tempdir")
|
|
|
|
if _, err := os.Stat(tmpDir); os.IsNotExist(err) {
|
|
|
|
fatal("Path does not exist: %s", tmpDir)
|
|
|
|
}
|
|
|
|
|
2023-03-06 05:10:56 +01:00
|
|
|
dbDump, err := os.CreateTemp(tmpDir, "forgejo-db.sql")
|
2020-06-05 22:47:39 +02:00
|
|
|
if err != nil {
|
|
|
|
fatal("Failed to create tmp file: %v", err)
|
|
|
|
}
|
2020-08-11 22:05:34 +02:00
|
|
|
defer func() {
|
2023-03-02 16:57:31 +01:00
|
|
|
_ = dbDump.Close()
|
2020-08-11 22:05:34 +02:00
|
|
|
if err := util.Remove(dbDump.Name()); err != nil {
|
|
|
|
log.Warn("Unable to remove temporary file: %s: Error: %v", dbDump.Name(), err)
|
|
|
|
}
|
|
|
|
}()
|
2020-06-05 22:47:39 +02:00
|
|
|
|
2017-01-03 09:20:28 +01:00
|
|
|
targetDBType := ctx.String("database")
|
2023-03-07 11:51:06 +01:00
|
|
|
if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() {
|
2019-12-17 17:12:10 +01:00
|
|
|
log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType)
|
2017-01-03 09:20:28 +01:00
|
|
|
} else {
|
2019-12-17 17:12:10 +01:00
|
|
|
log.Info("Dumping database...")
|
2017-01-03 09:20:28 +01:00
|
|
|
}
|
|
|
|
|
2021-09-19 13:49:59 +02:00
|
|
|
if err := db.DumpDatabase(dbDump.Name(), targetDBType); err != nil {
|
2019-12-17 17:12:10 +01:00
|
|
|
fatal("Failed to dump database: %v", err)
|
2014-05-05 06:55:17 +02:00
|
|
|
}
|
|
|
|
|
2023-03-06 05:10:56 +01:00
|
|
|
if err := addFile(w, "forgejo-db.sql", dbDump.Name(), verbose); err != nil {
|
|
|
|
fatal("Failed to include forgejo-db.sql: %v", err)
|
2015-11-28 12:11:38 +01:00
|
|
|
}
|
2019-04-05 15:24:28 +02:00
|
|
|
|
|
|
|
if len(setting.CustomConf) > 0 {
|
2019-12-17 17:12:10 +01:00
|
|
|
log.Info("Adding custom configuration file from %s", setting.CustomConf)
|
2020-06-05 22:47:39 +02:00
|
|
|
if err := addFile(w, "app.ini", setting.CustomConf, verbose); err != nil {
|
2019-12-17 17:12:10 +01:00
|
|
|
fatal("Failed to include specified app.ini: %v", err)
|
2019-04-05 15:24:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-08 02:00:12 +01:00
|
|
|
if ctx.IsSet("skip-custom-dir") && ctx.Bool("skip-custom-dir") {
|
2021-07-08 13:38:13 +02:00
|
|
|
log.Info("Skipping custom directory")
|
2021-02-08 02:00:12 +01:00
|
|
|
} else {
|
|
|
|
customDir, err := os.Stat(setting.CustomPath)
|
|
|
|
if err == nil && customDir.IsDir() {
|
|
|
|
if is, _ := isSubdir(setting.AppDataPath, setting.CustomPath); !is {
|
|
|
|
if err := addRecursiveExclude(w, "custom", setting.CustomPath, []string{absFileName}, verbose); err != nil {
|
|
|
|
fatal("Failed to include custom: %v", err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Info("Custom dir %s is inside data dir %s, skipped", setting.CustomPath, setting.AppDataPath)
|
2020-06-05 22:47:39 +02:00
|
|
|
}
|
|
|
|
} else {
|
2021-02-08 02:00:12 +01:00
|
|
|
log.Info("Custom dir %s doesn't exist, skipped", setting.CustomPath)
|
2016-05-12 20:32:28 +02:00
|
|
|
}
|
2015-11-28 12:11:38 +01:00
|
|
|
}
|
2017-01-12 05:47:20 +01:00
|
|
|
|
2020-11-28 03:42:08 +01:00
|
|
|
isExist, err := util.IsExist(setting.AppDataPath)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Unable to check if %s exists. Error: %v", setting.AppDataPath, err)
|
|
|
|
}
|
|
|
|
if isExist {
|
2019-12-17 17:12:10 +01:00
|
|
|
log.Info("Packing data directory...%s", setting.AppDataPath)
|
2017-01-12 05:47:20 +01:00
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
var excludes []string
|
2023-02-19 17:12:01 +01:00
|
|
|
if setting.SessionConfig.OriginalProvider == "file" {
|
2020-06-05 22:47:39 +02:00
|
|
|
var opts session.Options
|
|
|
|
if err = json.Unmarshal([]byte(setting.SessionConfig.ProviderConfig), &opts); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
excludes = append(excludes, opts.ProviderConfig)
|
2017-03-02 10:41:33 +01:00
|
|
|
}
|
2020-06-05 22:47:39 +02:00
|
|
|
|
2022-10-24 05:19:21 +02:00
|
|
|
if ctx.IsSet("skip-index") && ctx.Bool("skip-index") {
|
|
|
|
excludes = append(excludes, setting.Indexer.RepoPath)
|
|
|
|
excludes = append(excludes, setting.Indexer.IssuePath)
|
|
|
|
}
|
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
excludes = append(excludes, setting.RepoRootPath)
|
2023-06-14 05:42:38 +02:00
|
|
|
excludes = append(excludes, setting.LFS.Storage.Path)
|
|
|
|
excludes = append(excludes, setting.Attachment.Storage.Path)
|
|
|
|
excludes = append(excludes, setting.Packages.Storage.Path)
|
2023-02-19 17:12:01 +01:00
|
|
|
excludes = append(excludes, setting.Log.RootPath)
|
2021-02-08 02:00:12 +01:00
|
|
|
excludes = append(excludes, absFileName)
|
2020-06-05 22:47:39 +02:00
|
|
|
if err := addRecursiveExclude(w, "data", setting.AppDataPath, excludes, verbose); err != nil {
|
2019-12-17 17:12:10 +01:00
|
|
|
fatal("Failed to include data directory: %v", err)
|
2017-03-02 10:41:33 +01:00
|
|
|
}
|
2017-01-12 05:47:20 +01:00
|
|
|
}
|
|
|
|
|
2021-04-12 11:33:32 +02:00
|
|
|
if ctx.IsSet("skip-attachment-data") && ctx.Bool("skip-attachment-data") {
|
|
|
|
log.Info("Skip dumping attachment data")
|
2023-03-13 11:23:51 +01:00
|
|
|
} else if err := storage.Attachments.IterateObjects("", func(objPath string, object storage.Object) error {
|
2020-09-29 11:05:13 +02:00
|
|
|
info, err := object.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-04-26 22:30:51 +02:00
|
|
|
return addReader(w, object, info, path.Join("data", "attachments", objPath), verbose)
|
2020-09-29 11:05:13 +02:00
|
|
|
}); err != nil {
|
|
|
|
fatal("Failed to dump attachments: %v", err)
|
|
|
|
}
|
|
|
|
|
2022-04-26 22:30:51 +02:00
|
|
|
if ctx.IsSet("skip-package-data") && ctx.Bool("skip-package-data") {
|
|
|
|
log.Info("Skip dumping package data")
|
2023-03-23 13:30:28 +01:00
|
|
|
} else if !setting.Packages.Enabled {
|
|
|
|
log.Info("Packages isn't enabled. Skip dumping package data")
|
2023-03-13 11:23:51 +01:00
|
|
|
} else if err := storage.Packages.IterateObjects("", func(objPath string, object storage.Object) error {
|
2022-04-26 22:30:51 +02:00
|
|
|
info, err := object.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return addReader(w, object, info, path.Join("data", "packages", objPath), verbose)
|
|
|
|
}); err != nil {
|
|
|
|
fatal("Failed to dump packages: %v", err)
|
|
|
|
}
|
|
|
|
|
2020-05-01 03:30:31 +02:00
|
|
|
// Doesn't check if LogRootPath exists before processing --skip-log intentionally,
|
|
|
|
// ensuring that it's clear the dump is skipped whether the directory's initialized
|
|
|
|
// yet or not.
|
|
|
|
if ctx.IsSet("skip-log") && ctx.Bool("skip-log") {
|
|
|
|
log.Info("Skip dumping log files")
|
2020-11-28 03:42:08 +01:00
|
|
|
} else {
|
2023-02-19 17:12:01 +01:00
|
|
|
isExist, err := util.IsExist(setting.Log.RootPath)
|
2020-11-28 03:42:08 +01:00
|
|
|
if err != nil {
|
2023-02-19 17:12:01 +01:00
|
|
|
log.Error("Unable to check if %s exists. Error: %v", setting.Log.RootPath, err)
|
2020-11-28 03:42:08 +01:00
|
|
|
}
|
|
|
|
if isExist {
|
2023-02-19 17:12:01 +01:00
|
|
|
if err := addRecursiveExclude(w, "log", setting.Log.RootPath, []string{absFileName}, verbose); err != nil {
|
2020-11-28 03:42:08 +01:00
|
|
|
fatal("Failed to include log: %v", err)
|
|
|
|
}
|
2020-01-17 03:56:51 +01:00
|
|
|
}
|
2015-11-28 12:11:38 +01:00
|
|
|
}
|
2017-02-26 09:01:49 +01:00
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
if fileName != "-" {
|
|
|
|
if err = w.Close(); err != nil {
|
2020-08-11 22:05:34 +02:00
|
|
|
_ = util.Remove(fileName)
|
2020-06-05 22:47:39 +02:00
|
|
|
fatal("Failed to save %s: %v", fileName, err)
|
|
|
|
}
|
2014-05-02 03:21:46 +02:00
|
|
|
|
2022-01-20 18:46:10 +01:00
|
|
|
if err := os.Chmod(fileName, 0o600); err != nil {
|
2020-06-05 22:47:39 +02:00
|
|
|
log.Info("Can't change file access permissions mask to 0600: %v", err)
|
|
|
|
}
|
2016-08-17 20:38:42 +02:00
|
|
|
}
|
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
if fileName != "-" {
|
|
|
|
log.Info("Finish dumping in file %s", fileName)
|
|
|
|
} else {
|
|
|
|
log.Info("Finish dumping to stdout")
|
2016-12-01 00:56:15 +01:00
|
|
|
}
|
2016-05-12 20:32:28 +02:00
|
|
|
|
|
|
|
return nil
|
2014-05-02 03:21:46 +02:00
|
|
|
}
|
2017-01-12 05:47:20 +01:00
|
|
|
|
2020-06-05 22:47:39 +02:00
|
|
|
// addRecursiveExclude zips absPath to specified insidePath inside writer excluding excludeAbsPath
|
|
|
|
func addRecursiveExclude(w archiver.Writer, insidePath, absPath string, excludeAbsPath []string, verbose bool) error {
|
2017-01-12 05:47:20 +01:00
|
|
|
absPath, err := filepath.Abs(absPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
dir, err := os.Open(absPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer dir.Close()
|
|
|
|
|
|
|
|
files, err := dir.Readdir(0)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
for _, file := range files {
|
|
|
|
currentAbsPath := path.Join(absPath, file.Name())
|
2020-06-05 22:47:39 +02:00
|
|
|
currentInsidePath := path.Join(insidePath, file.Name())
|
2017-01-12 05:47:20 +01:00
|
|
|
if file.IsDir() {
|
Improve utils of slices (#22379)
- Move the file `compare.go` and `slice.go` to `slice.go`.
- Fix `ExistsInSlice`, it's buggy
- It uses `sort.Search`, so it assumes that the input slice is sorted.
- It passes `func(i int) bool { return slice[i] == target })` to
`sort.Search`, that's incorrect, check the doc of `sort.Search`.
- Conbine `IsInt64InSlice(int64, []int64)` and `ExistsInSlice(string,
[]string)` to `SliceContains[T]([]T, T)`.
- Conbine `IsSliceInt64Eq([]int64, []int64)` and `IsEqualSlice([]string,
[]string)` to `SliceSortedEqual[T]([]T, T)`.
- Add `SliceEqual[T]([]T, T)` as a distinction from
`SliceSortedEqual[T]([]T, T)`.
- Redesign `RemoveIDFromList([]int64, int64) ([]int64, bool)` to
`SliceRemoveAll[T]([]T, T) []T`.
- Add `SliceContainsFunc[T]([]T, func(T) bool)` and
`SliceRemoveAllFunc[T]([]T, func(T) bool)` for general use.
- Add comments to explain why not `golang.org/x/exp/slices`.
- Add unit tests.
2023-01-11 06:31:16 +01:00
|
|
|
if !util.SliceContainsString(excludeAbsPath, currentAbsPath) {
|
2020-06-05 22:47:39 +02:00
|
|
|
if err := addFile(w, currentInsidePath, currentAbsPath, false); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err = addRecursiveExclude(w, currentInsidePath, currentAbsPath, excludeAbsPath, verbose); err != nil {
|
2017-01-12 05:47:20 +01:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2022-06-18 16:06:32 +02:00
|
|
|
// only copy regular files and symlink regular files, skip non-regular files like socket/pipe/...
|
|
|
|
shouldAdd := file.Mode().IsRegular()
|
|
|
|
if !shouldAdd && file.Mode()&os.ModeSymlink == os.ModeSymlink {
|
|
|
|
target, err := filepath.EvalSymlinks(currentAbsPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
targetStat, err := os.Stat(target)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
shouldAdd = targetStat.Mode().IsRegular()
|
|
|
|
}
|
|
|
|
if shouldAdd {
|
|
|
|
if err = addFile(w, currentInsidePath, currentAbsPath, verbose); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-01-12 05:47:20 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|