1Panel/backend/app/service/backup.go

614 lines
19 KiB
Go
Raw Normal View History

package service
import (
"bufio"
"context"
"encoding/base64"
"encoding/json"
2022-10-28 11:02:47 +08:00
"fmt"
"os"
"path"
"sort"
2023-02-23 16:56:16 +08:00
"strings"
"sync"
"time"
"github.com/1Panel-dev/1Panel/backend/app/dto"
"github.com/1Panel-dev/1Panel/backend/app/model"
"github.com/1Panel-dev/1Panel/backend/buserr"
"github.com/1Panel-dev/1Panel/backend/constant"
"github.com/1Panel-dev/1Panel/backend/global"
"github.com/1Panel-dev/1Panel/backend/utils/cloud_storage"
"github.com/1Panel-dev/1Panel/backend/utils/cloud_storage/client"
fileUtils "github.com/1Panel-dev/1Panel/backend/utils/files"
"github.com/jinzhu/copier"
"github.com/pkg/errors"
)
type BackupService struct{}
type IBackupService interface {
2022-09-19 19:42:06 +08:00
List() ([]dto.BackupInfo, error)
2022-11-29 17:39:10 +08:00
SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error)
SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error)
LoadOneDriveInfo() (dto.OneDriveInfo, error)
2022-10-28 11:02:47 +08:00
DownloadRecord(info dto.DownloadRecord) (string, error)
Create(backupDto dto.BackupOperate) error
GetBuckets(backupDto dto.ForBuckets) ([]interface{}, error)
Update(ireq dto.BackupOperate) error
Delete(id uint) error
BatchDeleteRecord(ids []uint) error
NewClient(backup *model.BackupAccount) (cloud_storage.CloudStorageClient, error)
2023-02-13 15:48:18 +08:00
ListFiles(req dto.BackupSearchFile) []string
MysqlBackup(db dto.CommonBackup) error
PostgresqlBackup(db dto.CommonBackup) error
MysqlRecover(db dto.CommonRecover) error
PostgresqlRecover(db dto.CommonRecover) error
MysqlRecoverByUpload(req dto.CommonRecover) error
PostgresqlRecoverByUpload(req dto.CommonRecover) error
RedisBackup() error
RedisRecover(db dto.CommonRecover) error
WebsiteBackup(db dto.CommonBackup) error
WebsiteRecover(req dto.CommonRecover) error
AppBackup(db dto.CommonBackup) error
AppRecover(req dto.CommonRecover) error
Run()
}
func NewIBackupService() IBackupService {
return &BackupService{}
}
2022-09-19 19:42:06 +08:00
func (u *BackupService) List() ([]dto.BackupInfo, error) {
ops, err := backupRepo.List(commonRepo.WithOrderBy("created_at desc"))
var dtobas []dto.BackupInfo
dtobas = append(dtobas, u.loadByType("LOCAL", ops))
dtobas = append(dtobas, u.loadByType("OSS", ops))
dtobas = append(dtobas, u.loadByType("S3", ops))
dtobas = append(dtobas, u.loadByType("SFTP", ops))
dtobas = append(dtobas, u.loadByType("MINIO", ops))
dtobas = append(dtobas, u.loadByType("COS", ops))
dtobas = append(dtobas, u.loadByType("KODO", ops))
dtobas = append(dtobas, u.loadByType("OneDrive", ops))
dtobas = append(dtobas, u.loadByType("WebDAV", ops))
2022-09-19 19:42:06 +08:00
return dtobas, err
}
2022-11-29 17:39:10 +08:00
func (u *BackupService) SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error) {
total, records, err := backupRepo.PageRecord(
search.Page, search.PageSize,
commonRepo.WithOrderBy("created_at desc"),
commonRepo.WithByName(search.Name),
commonRepo.WithByType(search.Type),
backupRepo.WithByDetailName(search.DetailName),
)
if err != nil {
return 0, nil, err
}
datas, err := u.loadRecordSize(records)
sort.Slice(datas, func(i, j int) bool {
return datas[i].CreatedAt.After(datas[j].CreatedAt)
})
return total, datas, err
}
func (u *BackupService) SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error) {
total, records, err := backupRepo.PageRecord(
search.Page, search.PageSize,
commonRepo.WithOrderBy("created_at desc"),
backupRepo.WithByCronID(search.CronjobID),
)
if err != nil {
return 0, nil, err
}
datas, err := u.loadRecordSize(records)
sort.Slice(datas, func(i, j int) bool {
return datas[i].CreatedAt.After(datas[j].CreatedAt)
})
return total, datas, err
}
type loadSizeHelper struct {
isOk bool
backupPath string
client cloud_storage.CloudStorageClient
}
func (u *BackupService) LoadOneDriveInfo() (dto.OneDriveInfo, error) {
var data dto.OneDriveInfo
data.RedirectUri = constant.OneDriveRedirectURI
clientID, err := settingRepo.Get(settingRepo.WithByKey("OneDriveID"))
if err != nil {
return data, err
}
idItem, err := base64.StdEncoding.DecodeString(clientID.Value)
if err != nil {
return data, err
}
data.ClientID = string(idItem)
clientSecret, err := settingRepo.Get(settingRepo.WithByKey("OneDriveSc"))
if err != nil {
return data, err
}
secretItem, err := base64.StdEncoding.DecodeString(clientSecret.Value)
if err != nil {
return data, err
}
data.ClientSecret = string(secretItem)
return data, err
}
2022-10-28 11:02:47 +08:00
func (u *BackupService) DownloadRecord(info dto.DownloadRecord) (string, error) {
if info.Source == "LOCAL" {
localDir, err := loadLocalDir()
if err != nil {
return "", err
}
return path.Join(localDir, info.FileDir, info.FileName), nil
2022-10-28 11:02:47 +08:00
}
backup, _ := backupRepo.Get(commonRepo.WithByType(info.Source))
if backup.ID == 0 {
return "", constant.ErrRecordNotFound
}
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
return "", err
}
varMap["bucket"] = backup.Bucket
switch backup.Type {
case constant.Sftp, constant.WebDAV:
varMap["username"] = backup.AccessKey
2022-10-28 11:02:47 +08:00
varMap["password"] = backup.Credential
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
varMap["accessKey"] = backup.AccessKey
2022-10-28 11:02:47 +08:00
varMap["secretKey"] = backup.Credential
case constant.OneDrive:
varMap["accessToken"] = backup.Credential
2022-10-28 11:02:47 +08:00
}
backClient, err := cloud_storage.NewCloudStorageClient(backup.Type, varMap)
2022-10-28 11:02:47 +08:00
if err != nil {
return "", fmt.Errorf("new cloud storage client failed, err: %v", err)
}
targetPath := fmt.Sprintf("%s/download/%s/%s", constant.DataDir, info.FileDir, info.FileName)
if _, err := os.Stat(path.Dir(targetPath)); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(path.Dir(targetPath), os.ModePerm); err != nil {
global.LOG.Errorf("mkdir %s failed, err: %v", path.Dir(targetPath), err)
2022-10-28 11:02:47 +08:00
}
}
srcPath := fmt.Sprintf("%s/%s", info.FileDir, info.FileName)
if len(backup.BackupPath) != 0 {
srcPath = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), srcPath)
}
if exist, _ := backClient.Exist(srcPath); exist {
isOK, err := backClient.Download(srcPath, targetPath)
2022-10-28 11:02:47 +08:00
if !isOK {
return "", fmt.Errorf("cloud storage download failed, err: %v", err)
}
}
return targetPath, nil
}
func (u *BackupService) Create(req dto.BackupOperate) error {
backup, _ := backupRepo.Get(commonRepo.WithByType(req.Type))
if backup.ID != 0 {
return constant.ErrRecordExist
}
if err := copier.Copy(&backup, &req); err != nil {
return errors.WithMessage(constant.ErrStructTransform, err.Error())
}
if req.Type == constant.OneDrive {
if err := u.loadAccessToken(&backup); err != nil {
return err
}
}
if req.Type != "LOCAL" {
if _, err := u.checkBackupConn(&backup); err != nil {
return buserr.WithMap("ErrBackupCheck", map[string]interface{}{"err": err.Error()}, err)
}
}
if backup.Type == constant.OneDrive {
StartRefreshOneDriveToken()
}
if err := backupRepo.Create(&backup); err != nil {
return err
}
return nil
}
func (u *BackupService) GetBuckets(backupDto dto.ForBuckets) ([]interface{}, error) {
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(backupDto.Vars), &varMap); err != nil {
return nil, err
}
switch backupDto.Type {
case constant.Sftp, constant.WebDAV:
varMap["username"] = backupDto.AccessKey
varMap["password"] = backupDto.Credential
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
varMap["accessKey"] = backupDto.AccessKey
varMap["secretKey"] = backupDto.Credential
}
client, err := cloud_storage.NewCloudStorageClient(backupDto.Type, varMap)
if err != nil {
return nil, err
}
return client.ListBuckets()
}
func (u *BackupService) Delete(id uint) error {
backup, _ := backupRepo.Get(commonRepo.WithByID(id))
if backup.ID == 0 {
return constant.ErrRecordNotFound
}
if backup.Type == constant.OneDrive {
global.Cron.Remove(global.OneDriveCronID)
}
cronjobs, _ := cronjobRepo.List(cronjobRepo.WithByDefaultDownload(backup.Type))
if len(cronjobs) != 0 {
return buserr.New(constant.ErrBackupInUsed)
}
return backupRepo.Delete(commonRepo.WithByID(id))
}
func (u *BackupService) BatchDeleteRecord(ids []uint) error {
records, err := backupRepo.ListRecord(commonRepo.WithIdsIn(ids))
if err != nil {
return err
}
for _, record := range records {
backupAccount, err := backupRepo.Get(commonRepo.WithByType(record.Source))
if err != nil {
return err
}
client, err := u.NewClient(&backupAccount)
if err != nil {
return err
}
if _, err = client.Delete(path.Join(record.FileDir, record.FileName)); err != nil {
global.LOG.Errorf("remove file %s from %s failed, err: %v", path.Join(record.FileDir, record.FileName), record.Source, err)
}
}
return backupRepo.DeleteRecord(context.Background(), commonRepo.WithIdsIn(ids))
}
func (u *BackupService) Update(req dto.BackupOperate) error {
backup, err := backupRepo.Get(commonRepo.WithByID(req.ID))
if err != nil {
return constant.ErrRecordNotFound
}
2023-02-23 16:56:16 +08:00
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(req.Vars), &varMap); err != nil {
return err
}
oldVars := backup.Vars
oldDir, err := loadLocalDir()
if err != nil {
return err
}
upMap := make(map[string]interface{})
upMap["bucket"] = req.Bucket
upMap["access_key"] = req.AccessKey
upMap["credential"] = req.Credential
upMap["backup_path"] = req.BackupPath
upMap["vars"] = req.Vars
backup.Bucket = req.Bucket
backup.Vars = req.Vars
backup.Credential = req.Credential
backup.AccessKey = req.AccessKey
backup.BackupPath = req.BackupPath
if req.Type == constant.OneDrive {
if err := u.loadAccessToken(&backup); err != nil {
return err
}
upMap["credential"] = backup.Credential
upMap["vars"] = backup.Vars
}
if backup.Type != "LOCAL" {
isOk, err := u.checkBackupConn(&backup)
if err != nil || !isOk {
return buserr.WithMap("ErrBackupCheck", map[string]interface{}{"err": err.Error()}, err)
}
}
if err := backupRepo.Update(req.ID, upMap); err != nil {
return err
}
if backup.Type == "LOCAL" {
if dir, ok := varMap["dir"]; ok {
2023-02-23 16:56:16 +08:00
if dirStr, isStr := dir.(string); isStr {
if strings.HasSuffix(dirStr, "/") && dirStr != "/" {
2023-03-10 00:27:41 +08:00
dirStr = dirStr[:strings.LastIndex(dirStr, "/")]
}
if err := copyDir(oldDir, dirStr); err != nil {
_ = backupRepo.Update(req.ID, (map[string]interface{}{"vars": oldVars}))
2023-02-23 16:56:16 +08:00
return err
}
global.CONF.System.Backup = dirStr
}
}
}
return nil
}
func (u *BackupService) ListFiles(req dto.BackupSearchFile) []string {
var datas []string
2023-02-13 15:48:18 +08:00
backup, err := backupRepo.Get(backupRepo.WithByType(req.Type))
if err != nil {
return datas
2023-02-13 15:48:18 +08:00
}
client, err := u.NewClient(&backup)
if err != nil {
return datas
2023-02-13 15:48:18 +08:00
}
prefix := "system_snapshot"
if len(backup.BackupPath) != 0 {
prefix = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), prefix)
}
files, err := client.ListObjects(prefix)
if err != nil {
global.LOG.Debugf("load files from %s failed, err: %v", req.Type, err)
return datas
}
for _, file := range files {
if len(file) != 0 {
datas = append(datas, path.Base(file))
}
}
return datas
2023-02-13 15:48:18 +08:00
}
func (u *BackupService) NewClient(backup *model.BackupAccount) (cloud_storage.CloudStorageClient, error) {
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
return nil, err
}
varMap["bucket"] = backup.Bucket
switch backup.Type {
case constant.Sftp, constant.WebDAV:
varMap["username"] = backup.AccessKey
varMap["password"] = backup.Credential
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
varMap["accessKey"] = backup.AccessKey
varMap["secretKey"] = backup.Credential
case constant.OneDrive:
varMap["accessToken"] = backup.Credential
}
backClient, err := cloud_storage.NewCloudStorageClient(backup.Type, varMap)
if err != nil {
return nil, err
}
return backClient, nil
}
func (u *BackupService) loadByType(accountType string, accounts []model.BackupAccount) dto.BackupInfo {
for _, account := range accounts {
if account.Type == accountType {
var item dto.BackupInfo
if err := copier.Copy(&item, &account); err != nil {
global.LOG.Errorf("copy backup account to dto backup info failed, err: %v", err)
}
if account.Type == constant.OneDrive {
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(item.Vars), &varMap); err != nil {
return dto.BackupInfo{Type: accountType}
}
delete(varMap, "refresh_token")
itemVars, _ := json.Marshal(varMap)
item.Vars = string(itemVars)
}
return item
}
}
return dto.BackupInfo{Type: accountType}
}
func (u *BackupService) loadAccessToken(backup *model.BackupAccount) error {
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
return fmt.Errorf("unmarshal backup vars failed, err: %v", err)
}
token, refreshToken, err := client.RefreshToken("authorization_code", varMap)
if err != nil {
return err
}
delete(varMap, "code")
backup.Credential = token
varMap["refresh_status"] = constant.StatusSuccess
varMap["refresh_time"] = time.Now().Format("2006-01-02 15:04:05")
varMap["refresh_token"] = refreshToken
itemVars, err := json.Marshal(varMap)
if err != nil {
return fmt.Errorf("json marshal var map failed, err: %v", err)
}
backup.Vars = string(itemVars)
return nil
}
func (u *BackupService) loadRecordSize(records []model.BackupRecord) ([]dto.BackupRecords, error) {
var datas []dto.BackupRecords
clientMap := make(map[string]loadSizeHelper)
var wg sync.WaitGroup
for i := 0; i < len(records); i++ {
var item dto.BackupRecords
if err := copier.Copy(&item, &records[i]); err != nil {
return nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
}
itemPath := path.Join(records[i].FileDir, records[i].FileName)
if _, ok := clientMap[records[i].Source]; !ok {
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
if err != nil {
global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
clientMap[records[i].Source] = loadSizeHelper{}
datas = append(datas, item)
continue
}
client, err := u.NewClient(&backup)
if err != nil {
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
clientMap[records[i].Source] = loadSizeHelper{}
datas = append(datas, item)
continue
}
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
datas = append(datas, item)
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client, isOk: true}
continue
}
if clientMap[records[i].Source].isOk {
wg.Add(1)
go func(index int) {
item.Size, _ = clientMap[records[index].Source].client.Size(path.Join(clientMap[records[index].Source].backupPath, itemPath))
datas = append(datas, item)
wg.Done()
}(i)
} else {
datas = append(datas, item)
}
}
wg.Wait()
return datas, nil
}
func loadLocalDir() (string, error) {
backup, err := backupRepo.Get(commonRepo.WithByType("LOCAL"))
if err != nil {
return "", err
}
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
return "", err
}
if _, ok := varMap["dir"]; !ok {
return "", errors.New("load local backup dir failed")
}
baseDir, ok := varMap["dir"].(string)
if ok {
if _, err := os.Stat(baseDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(baseDir, os.ModePerm); err != nil {
return "", fmt.Errorf("mkdir %s failed, err: %v", baseDir, err)
}
}
return baseDir, nil
}
return "", fmt.Errorf("error type dir: %T", varMap["dir"])
}
func copyDir(src, dst string) error {
srcInfo, err := os.Stat(src)
if err != nil {
return err
}
if err = os.MkdirAll(dst, srcInfo.Mode()); err != nil {
return err
}
files, err := os.ReadDir(src)
2023-02-23 16:56:16 +08:00
if err != nil {
return err
}
fileOP := fileUtils.NewFileOp()
for _, file := range files {
srcPath := fmt.Sprintf("%s/%s", src, file.Name())
dstPath := fmt.Sprintf("%s/%s", dst, file.Name())
if file.IsDir() {
if err = copyDir(srcPath, dstPath); err != nil {
global.LOG.Errorf("copy dir %s to %s failed, err: %v", srcPath, dstPath, err)
}
} else {
if err := fileOP.CopyFile(srcPath, dst); err != nil {
global.LOG.Errorf("copy file %s to %s failed, err: %v", srcPath, dstPath, err)
}
}
}
return nil
}
func (u *BackupService) checkBackupConn(backup *model.BackupAccount) (bool, error) {
client, err := u.NewClient(backup)
if err != nil {
return false, err
}
fileItem := path.Join(global.CONF.System.TmpDir, "test", "1panel")
if _, err := os.Stat(path.Dir(fileItem)); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(path.Dir(fileItem), os.ModePerm); err != nil {
return false, err
}
}
file, err := os.OpenFile(fileItem, os.O_WRONLY|os.O_CREATE, 0666)
if err != nil {
return false, err
}
defer file.Close()
write := bufio.NewWriter(file)
_, _ = write.WriteString(string("1Panel 备份账号测试文件。\n"))
_, _ = write.WriteString(string("1Panel 備份賬號測試文件。\n"))
_, _ = write.WriteString(string("1Panel Backs up account test files.\n"))
_, _ = write.WriteString(string("1Panelアカウントのテストファイルをバックアップします。\n"))
write.Flush()
targetPath := strings.TrimPrefix(path.Join(backup.BackupPath, "test/1panel"), "/")
return client.Upload(fileItem, targetPath)
}
func StartRefreshOneDriveToken() {
service := NewIBackupService()
oneDriveCronID, err := global.Cron.AddJob("0 * * * *", service)
if err != nil {
global.LOG.Errorf("can not add OneDrive corn job: %s", err.Error())
return
}
global.OneDriveCronID = oneDriveCronID
}
func (u *BackupService) Run() {
var backupItem model.BackupAccount
_ = global.DB.Where("`type` = ?", "OneDrive").First(&backupItem)
if backupItem.ID == 0 {
return
}
if len(backupItem.Credential) == 0 {
global.LOG.Error("OneDrive configuration lacks token information, please rebind.")
return
}
global.LOG.Info("start to refresh token of OneDrive ...")
varMap := make(map[string]interface{})
if err := json.Unmarshal([]byte(backupItem.Vars), &varMap); err != nil {
global.LOG.Errorf("Failed to refresh OneDrive token, please retry, err: %v", err)
return
}
token, refreshToken, err := client.RefreshToken("refresh_token", varMap)
varMap["refresh_status"] = constant.StatusSuccess
varMap["refresh_time"] = time.Now().Format("2006-01-02 15:04:05")
if err != nil {
varMap["refresh_status"] = constant.StatusFailed
varMap["refresh_msg"] = err.Error()
global.LOG.Errorf("Failed to refresh OneDrive token, please retry, err: %v", err)
return
}
varMap["refresh_token"] = refreshToken
varsItem, _ := json.Marshal(varMap)
_ = global.DB.Model(&model.BackupAccount{}).
Where("id = ?", backupItem.ID).
Updates(map[string]interface{}{
"credential": token,
"vars": varsItem,
}).Error
global.LOG.Info("Successfully refreshed OneDrive token.")
}