mirror of
https://github.com/1Panel-dev/1Panel.git
synced 2024-11-27 20:49:03 +08:00
fix: 解决计划任务备份网站失败的问题 (#3572)
This commit is contained in:
parent
ce3b841ca7
commit
48713b51f1
@ -13,6 +13,12 @@ type WebsiteDTO struct {
|
||||
RuntimeName string `json:"runtimeName"`
|
||||
}
|
||||
|
||||
type WebsiteOption struct {
|
||||
ID uint `json:"id"`
|
||||
PrimaryDomain string `json:"primaryDomain"`
|
||||
Alias string `json:"alias"`
|
||||
}
|
||||
|
||||
type WebsitePreInstallCheck struct {
|
||||
Name string `json:"name"`
|
||||
Status string `json:"status"`
|
||||
|
@ -340,20 +340,11 @@ func (u *CronjobService) handleDatabase(cronjob model.Cronjob, backup model.Back
|
||||
|
||||
func (u *CronjobService) handleCutWebsiteLog(cronjob *model.Cronjob, startTime time.Time) ([]string, string, error) {
|
||||
var (
|
||||
websites []string
|
||||
err error
|
||||
filePaths []string
|
||||
msgs []string
|
||||
)
|
||||
if cronjob.Website == "all" {
|
||||
websites, _ = NewIWebsiteService().GetWebsiteOptions()
|
||||
if len(websites) == 0 {
|
||||
return msgs, "", nil
|
||||
}
|
||||
} else {
|
||||
websites = append(websites, cronjob.Website)
|
||||
}
|
||||
|
||||
websites := loadWebsForJob(*cronjob)
|
||||
nginx, err := getAppInstallByKey(constant.AppOpenresty)
|
||||
if err != nil {
|
||||
return msgs, "", nil
|
||||
@ -362,42 +353,32 @@ func (u *CronjobService) handleCutWebsiteLog(cronjob *model.Cronjob, startTime t
|
||||
fileOp := files.NewFileOp()
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(websites))
|
||||
for _, websiteName := range websites {
|
||||
name := websiteName
|
||||
go func() {
|
||||
website, _ := websiteRepo.GetFirst(websiteRepo.WithDomain(name))
|
||||
if website.ID == 0 {
|
||||
wg.Done()
|
||||
return
|
||||
}
|
||||
websiteLogDir := path.Join(baseDir, website.Alias, "log")
|
||||
srcAccessLogPath := path.Join(websiteLogDir, "access.log")
|
||||
srcErrorLogPath := path.Join(websiteLogDir, "error.log")
|
||||
dstLogDir := path.Join(global.CONF.System.Backup, "log", "website", website.Alias)
|
||||
if !fileOp.Stat(dstLogDir) {
|
||||
_ = os.MkdirAll(dstLogDir, 0755)
|
||||
}
|
||||
for _, website := range websites {
|
||||
websiteLogDir := path.Join(baseDir, website.Alias, "log")
|
||||
srcAccessLogPath := path.Join(websiteLogDir, "access.log")
|
||||
srcErrorLogPath := path.Join(websiteLogDir, "error.log")
|
||||
dstLogDir := path.Join(global.CONF.System.Backup, "log", "website", website.Alias)
|
||||
if !fileOp.Stat(dstLogDir) {
|
||||
_ = os.MkdirAll(dstLogDir, 0755)
|
||||
}
|
||||
|
||||
dstName := fmt.Sprintf("%s_log_%s.gz", website.PrimaryDomain, startTime.Format("20060102150405"))
|
||||
dstFilePath := path.Join(dstLogDir, dstName)
|
||||
filePaths = append(filePaths, dstFilePath)
|
||||
dstName := fmt.Sprintf("%s_log_%s.gz", website.PrimaryDomain, startTime.Format("20060102150405"))
|
||||
dstFilePath := path.Join(dstLogDir, dstName)
|
||||
filePaths = append(filePaths, dstFilePath)
|
||||
|
||||
if err = backupLogFile(dstFilePath, websiteLogDir, fileOp); err != nil {
|
||||
websiteErr := buserr.WithNameAndErr("ErrCutWebsiteLog", name, err)
|
||||
err = websiteErr
|
||||
msgs = append(msgs, websiteErr.Error())
|
||||
global.LOG.Error(websiteErr.Error())
|
||||
wg.Done()
|
||||
return
|
||||
} else {
|
||||
_ = fileOp.WriteFile(srcAccessLogPath, strings.NewReader(""), 0755)
|
||||
_ = fileOp.WriteFile(srcErrorLogPath, strings.NewReader(""), 0755)
|
||||
}
|
||||
msg := i18n.GetMsgWithMap("CutWebsiteLogSuccess", map[string]interface{}{"name": name, "path": dstFilePath})
|
||||
global.LOG.Infof(msg)
|
||||
msgs = append(msgs, msg)
|
||||
wg.Done()
|
||||
}()
|
||||
if err = backupLogFile(dstFilePath, websiteLogDir, fileOp); err != nil {
|
||||
websiteErr := buserr.WithNameAndErr("ErrCutWebsiteLog", website.PrimaryDomain, err)
|
||||
err = websiteErr
|
||||
msgs = append(msgs, websiteErr.Error())
|
||||
global.LOG.Error(websiteErr.Error())
|
||||
continue
|
||||
} else {
|
||||
_ = fileOp.WriteFile(srcAccessLogPath, strings.NewReader(""), 0755)
|
||||
_ = fileOp.WriteFile(srcErrorLogPath, strings.NewReader(""), 0755)
|
||||
}
|
||||
msg := i18n.GetMsgWithMap("CutWebsiteLogSuccess", map[string]interface{}{"name": website.PrimaryDomain, "path": dstFilePath})
|
||||
global.LOG.Infof(msg)
|
||||
msgs = append(msgs, msg)
|
||||
}
|
||||
wg.Wait()
|
||||
u.HandleRmExpired("LOCAL", "", "", cronjob, nil)
|
||||
@ -505,16 +486,7 @@ func (u *CronjobService) handleWebsite(cronjob model.Cronjob, backup model.Backu
|
||||
return paths, err
|
||||
}
|
||||
|
||||
var weblist []string
|
||||
if cronjob.Website == "all" {
|
||||
weblist, err = NewIWebsiteService().GetWebsiteOptions()
|
||||
if err != nil {
|
||||
return paths, err
|
||||
}
|
||||
} else {
|
||||
weblist = append(weblist, cronjob.Website)
|
||||
}
|
||||
|
||||
weblist := loadWebsForJob(cronjob)
|
||||
var client cloud_storage.CloudStorageClient
|
||||
if backup.Type != "LOCAL" {
|
||||
client, err = NewIBackupService().NewClient(&backup)
|
||||
@ -526,25 +498,21 @@ func (u *CronjobService) handleWebsite(cronjob model.Cronjob, backup model.Backu
|
||||
for _, websiteItem := range weblist {
|
||||
var record model.BackupRecord
|
||||
record.Type = "website"
|
||||
record.Name = cronjob.Website
|
||||
record.Name = websiteItem.PrimaryDomain
|
||||
record.DetailName = websiteItem.Alias
|
||||
record.Source = "LOCAL"
|
||||
record.BackupType = backup.Type
|
||||
website, err := websiteRepo.GetFirst(websiteRepo.WithDomain(websiteItem))
|
||||
if err != nil {
|
||||
return paths, err
|
||||
}
|
||||
backupDir := path.Join(localDir, fmt.Sprintf("website/%s", website.PrimaryDomain))
|
||||
backupDir := path.Join(localDir, fmt.Sprintf("website/%s", websiteItem.PrimaryDomain))
|
||||
record.FileDir = backupDir
|
||||
itemFileDir := strings.TrimPrefix(backupDir, localDir+"/")
|
||||
if !cronjob.KeepLocal && backup.Type != "LOCAL" {
|
||||
record.Source = backup.Type
|
||||
record.FileDir = strings.TrimPrefix(backupDir, localDir+"/")
|
||||
}
|
||||
record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", website.PrimaryDomain, startTime.Format("20060102150405"))
|
||||
if err := handleWebsiteBackup(&website, backupDir, record.FileName); err != nil {
|
||||
record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", websiteItem.PrimaryDomain, startTime.Format("20060102150405"))
|
||||
if err := handleWebsiteBackup(&websiteItem, backupDir, record.FileName); err != nil {
|
||||
return paths, err
|
||||
}
|
||||
record.Name = website.PrimaryDomain
|
||||
if err := backupRepo.CreateRecord(&record); err != nil {
|
||||
global.LOG.Errorf("save backup record failed, err: %v", err)
|
||||
return paths, err
|
||||
@ -759,3 +727,17 @@ func loadDbsForJob(cronjob model.Cronjob) []databaseHelper {
|
||||
}
|
||||
return dbs
|
||||
}
|
||||
|
||||
func loadWebsForJob(cronjob model.Cronjob) []model.Website {
|
||||
var weblist []model.Website
|
||||
if cronjob.Website == "all" {
|
||||
weblist, _ = websiteRepo.List()
|
||||
return weblist
|
||||
}
|
||||
itemID, _ := (strconv.Atoi(cronjob.Website))
|
||||
webItem, _ := websiteRepo.GetFirst(commonRepo.WithByID(uint(itemID)))
|
||||
if webItem.ID != 0 {
|
||||
weblist = append(weblist, webItem)
|
||||
}
|
||||
return weblist
|
||||
}
|
||||
|
@ -9,7 +9,6 @@ import (
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/1Panel-dev/1Panel/backend/utils/common"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
@ -19,6 +18,9 @@ import (
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/1Panel-dev/1Panel/backend/utils/common"
|
||||
"github.com/jinzhu/copier"
|
||||
|
||||
"github.com/1Panel-dev/1Panel/backend/i18n"
|
||||
"github.com/spf13/afero"
|
||||
|
||||
@ -55,7 +57,7 @@ type IWebsiteService interface {
|
||||
GetWebsites() ([]response.WebsiteDTO, error)
|
||||
CreateWebsite(create request.WebsiteCreate) error
|
||||
OpWebsite(req request.WebsiteOp) error
|
||||
GetWebsiteOptions() ([]string, error)
|
||||
GetWebsiteOptions() ([]response.WebsiteOption, error)
|
||||
UpdateWebsite(req request.WebsiteUpdate) error
|
||||
DeleteWebsite(req request.WebsiteDelete) error
|
||||
GetWebsite(id uint) (response.WebsiteDTO, error)
|
||||
@ -348,14 +350,18 @@ func (w WebsiteService) OpWebsite(req request.WebsiteOp) error {
|
||||
return websiteRepo.Save(context.Background(), &website)
|
||||
}
|
||||
|
||||
func (w WebsiteService) GetWebsiteOptions() ([]string, error) {
|
||||
webs, err := websiteRepo.GetBy()
|
||||
func (w WebsiteService) GetWebsiteOptions() ([]response.WebsiteOption, error) {
|
||||
webs, err := websiteRepo.List()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var datas []string
|
||||
var datas []response.WebsiteOption
|
||||
for _, web := range webs {
|
||||
datas = append(datas, web.PrimaryDomain)
|
||||
var item response.WebsiteOption
|
||||
if err := copier.Copy(&item, &web); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
datas = append(datas, item)
|
||||
}
|
||||
return datas, nil
|
||||
}
|
||||
|
@ -65,6 +65,7 @@ func Init() {
|
||||
migrations.AddTablePHPExtensions,
|
||||
migrations.AddTableDatabasePostgresql,
|
||||
migrations.AddPostgresqlSuperUser,
|
||||
migrations.UpdateCronjobWithWebsite,
|
||||
})
|
||||
if err := m.Migrate(); err != nil {
|
||||
global.LOG.Error(err)
|
||||
|
@ -132,6 +132,12 @@ var AddTableDatabasePostgresql = &gormigrate.Migration{
|
||||
return err
|
||||
}
|
||||
for _, job := range jobs {
|
||||
if job.DBName == "all" {
|
||||
if err := tx.Model(&model.Cronjob{}).Where("id = ?", job.ID).Update("db_type", "mysql").Error; err != nil {
|
||||
global.LOG.Errorf("update db type of cronjob %s failed, err: %v", job.Name, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
var db model.DatabaseMysql
|
||||
if err := tx.Where("id == ?", job.DBName).First(&db).Error; err != nil {
|
||||
continue
|
||||
@ -158,3 +164,27 @@ var AddPostgresqlSuperUser = &gormigrate.Migration{
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var UpdateCronjobWithWebsite = &gormigrate.Migration{
|
||||
ID: "20230809-update-cronjob-with-website",
|
||||
Migrate: func(tx *gorm.DB) error {
|
||||
var cronjobs []model.Cronjob
|
||||
if err := tx.Where("(type = ? OR type = ?) AND website != ?", "website", "cutWebsiteLog", "all").Find(&cronjobs).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, job := range cronjobs {
|
||||
var web model.Website
|
||||
if err := tx.Where("primary_domain = ?", job.Website).First(&web).Error; err != nil {
|
||||
continue
|
||||
}
|
||||
if err := tx.Model(&model.Cronjob{}).
|
||||
Where("id = ?", job.ID).
|
||||
Updates(map[string]interface{}{"website": web.ID}).Error; err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
@ -141,7 +141,17 @@
|
||||
:label="$t('commons.table.all')"
|
||||
value="all"
|
||||
/>
|
||||
<el-option v-for="item in websiteOptions" :key="item" :value="item" :label="item" />
|
||||
<el-option
|
||||
v-for="(item, index) in websiteOptions"
|
||||
:key="index"
|
||||
:value="item.id + ''"
|
||||
:label="item.primaryDomain"
|
||||
>
|
||||
<span>{{ item.primaryDomain }}</span>
|
||||
<el-tag class="tagClass">
|
||||
{{ item.alias }}
|
||||
</el-tag>
|
||||
</el-option>
|
||||
</el-select>
|
||||
<span class="input-help" v-if="dialogData.rowData!.type === 'cutWebsiteLog'">
|
||||
{{ $t('cronjob.cutWebsiteLogHelper') }}
|
||||
@ -351,7 +361,7 @@ const handleClose = () => {
|
||||
const localDirID = ref();
|
||||
|
||||
const containerOptions = ref();
|
||||
const websiteOptions = ref();
|
||||
const websiteOptions = ref([]);
|
||||
const backupOptions = ref();
|
||||
const appOptions = ref();
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user