fix: 解决备份列表异常时,备份列表无法打开的问题 (#3717)

This commit is contained in:
ssongliu 2024-01-26 17:08:19 +08:00 committed by GitHub
parent 338ade095a
commit 9f2227cf19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 61 additions and 58 deletions

View File

@ -86,34 +86,11 @@ func (u *BackupService) SearchRecordsWithPage(search dto.RecordSearch) (int64, [
commonRepo.WithByType(search.Type),
backupRepo.WithByDetailName(search.DetailName),
)
var datas []dto.BackupRecords
clientMap := make(map[string]loadSizeHelper)
for i := 0; i < len(records); i++ {
var item dto.BackupRecords
if err := copier.Copy(&item, &records[i]); err != nil {
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
}
itemPath := path.Join(records[i].FileDir, records[i].FileName)
if _, ok := clientMap[records[i].Source]; !ok {
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
if err != nil {
global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
return total, datas, err
}
client, err := u.NewClient(&backup)
if err != nil {
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
return total, datas, err
}
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
datas = append(datas, item)
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client}
continue
}
item.Size, _ = clientMap[records[i].Source].client.Size(path.Join(clientMap[records[i].Source].backupPath, itemPath))
datas = append(datas, item)
if err != nil {
return 0, nil, err
}
datas, err := u.loadRecordSize(records)
return total, datas, err
}
@ -123,38 +100,16 @@ func (u *BackupService) SearchRecordsByCronjobWithPage(search dto.RecordSearchBy
commonRepo.WithOrderBy("created_at desc"),
backupRepo.WithByCronID(search.CronjobID),
)
var datas []dto.BackupRecords
clientMap := make(map[string]loadSizeHelper)
for i := 0; i < len(records); i++ {
var item dto.BackupRecords
if err := copier.Copy(&item, &records[i]); err != nil {
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
}
itemPath := path.Join(records[i].FileDir, records[i].FileName)
if _, ok := clientMap[records[i].Source]; !ok {
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
if err != nil {
global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
return total, datas, err
}
client, err := u.NewClient(&backup)
if err != nil {
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
return total, datas, err
}
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
datas = append(datas, item)
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client}
continue
}
item.Size, _ = clientMap[records[i].Source].client.Size(path.Join(clientMap[records[i].Source].backupPath, itemPath))
datas = append(datas, item)
if err != nil {
return 0, nil, err
}
datas, err := u.loadRecordSize(records)
return total, datas, err
}
type loadSizeHelper struct {
isOk bool
backupPath string
client cloud_storage.CloudStorageClient
}
@ -479,6 +434,43 @@ func (u *BackupService) loadAccessToken(backup *model.BackupAccount) error {
return nil
}
func (u *BackupService) loadRecordSize(records []model.BackupRecord) ([]dto.BackupRecords, error) {
var datas []dto.BackupRecords
clientMap := make(map[string]loadSizeHelper)
for i := 0; i < len(records); i++ {
var item dto.BackupRecords
if err := copier.Copy(&item, &records[i]); err != nil {
return nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
}
itemPath := path.Join(records[i].FileDir, records[i].FileName)
if _, ok := clientMap[records[i].Source]; !ok {
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
if err != nil {
global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
clientMap[records[i].Source] = loadSizeHelper{}
datas = append(datas, item)
continue
}
client, err := u.NewClient(&backup)
if err != nil {
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
clientMap[records[i].Source] = loadSizeHelper{}
datas = append(datas, item)
continue
}
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
datas = append(datas, item)
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client, isOk: true}
continue
}
if clientMap[records[i].Source].isOk {
item.Size, _ = clientMap[records[i].Source].client.Size(path.Join(clientMap[records[i].Source].backupPath, itemPath))
}
datas = append(datas, item)
}
return datas, nil
}
func loadLocalDir() (string, error) {
backup, err := backupRepo.Get(commonRepo.WithByType("LOCAL"))
if err != nil {

View File

@ -290,9 +290,11 @@ func (u *CronjobService) uploadCronjobBackFile(cronjob model.Cronjob, accountMap
cloudSrc := strings.TrimPrefix(file, global.CONF.System.TmpDir+"/")
for _, account := range accounts {
if len(account) != 0 {
global.LOG.Debugf("start upload file to %s, dir: %s", account, path.Join(accountMap[account].backupPath, cloudSrc))
if _, err := accountMap[account].client.Upload(file, path.Join(accountMap[account].backupPath, cloudSrc)); err != nil {
return "", err
}
global.LOG.Debugf("upload successful!")
}
}
return cloudSrc, nil

View File

@ -94,10 +94,10 @@ export const deleteBackupRecord = (params: { ids: number[] }) => {
return http.post(`/settings/backup/record/del`, params);
};
export const searchBackupRecords = (params: Backup.SearchBackupRecord) => {
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search`, params);
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search`, params, TimeoutEnum.T_5M);
};
export const searchBackupRecordsByCronjob = (params: Backup.SearchBackupRecordByCronjob) => {
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search/bycronjob`, params);
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search/bycronjob`, params, TimeoutEnum.T_5M);
};
export const getBackupList = () => {

View File

@ -220,12 +220,18 @@ const buttons = [
},
{
label: i18n.global.t('commons.button.recover'),
disabled: (row: any) => {
return row.size === 0;
},
click: (row: Backup.RecordInfo) => {
onRecover(row);
},
},
{
label: i18n.global.t('commons.button.download'),
disabled: (row: any) => {
return row.size === 0;
},
click: (row: Backup.RecordInfo) => {
onDownload(row);
},

View File

@ -69,10 +69,10 @@
</el-row>
</template>
<template #rightButton>
<el-button @click="sync" type="primary" link v-if="mode === 'installed' && data != null">
<el-button @click="sync" type="primary" plain v-if="mode === 'installed' && data != null">
{{ $t('app.sync') }}
</el-button>
<el-button @click="openIngore" type="primary" link v-if="mode === 'upgrade'">
<el-button @click="openIngore" type="primary" plain v-if="mode === 'upgrade'">
{{ $t('app.showIgnore') }}
</el-button>
</template>

View File

@ -121,6 +121,9 @@ const onDownload = async (row: Backup.RecordInfo) => {
const buttons = [
{
label: i18n.global.t('commons.button.download'),
disabled: (row: any) => {
return row.size === 0;
},
click: (row: Backup.RecordInfo) => {
onDownload(row);
},