mirror of
https://github.com/1Panel-dev/1Panel.git
synced 2024-11-27 20:49:03 +08:00
parent
c8971dd0ef
commit
bfa111541b
@ -34,6 +34,7 @@ type CronjobCreate struct {
|
|||||||
DBType string `json:"dbType"`
|
DBType string `json:"dbType"`
|
||||||
DBName string `json:"dbName"`
|
DBName string `json:"dbName"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
IsDir bool `json:"isDir"`
|
||||||
SourceDir string `json:"sourceDir"`
|
SourceDir string `json:"sourceDir"`
|
||||||
|
|
||||||
SourceAccountIDs string `json:"sourceAccountIDs"`
|
SourceAccountIDs string `json:"sourceAccountIDs"`
|
||||||
@ -61,6 +62,7 @@ type CronjobUpdate struct {
|
|||||||
DBType string `json:"dbType"`
|
DBType string `json:"dbType"`
|
||||||
DBName string `json:"dbName"`
|
DBName string `json:"dbName"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
IsDir bool `json:"isDir"`
|
||||||
SourceDir string `json:"sourceDir"`
|
SourceDir string `json:"sourceDir"`
|
||||||
|
|
||||||
SourceAccountIDs string `json:"sourceAccountIDs"`
|
SourceAccountIDs string `json:"sourceAccountIDs"`
|
||||||
@ -110,6 +112,7 @@ type CronjobInfo struct {
|
|||||||
DBType string `json:"dbType"`
|
DBType string `json:"dbType"`
|
||||||
DBName string `json:"dbName"`
|
DBName string `json:"dbName"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
IsDir bool `json:"isDir"`
|
||||||
SourceDir string `json:"sourceDir"`
|
SourceDir string `json:"sourceDir"`
|
||||||
SourceAccountIDs string `json:"sourceAccountIDs"`
|
SourceAccountIDs string `json:"sourceAccountIDs"`
|
||||||
DownloadAccountID uint `json:"downloadAccountID"`
|
DownloadAccountID uint `json:"downloadAccountID"`
|
||||||
|
@ -26,6 +26,7 @@ type Cronjob struct {
|
|||||||
DBType string `json:"dbType"`
|
DBType string `json:"dbType"`
|
||||||
DBName string `json:"dbName"`
|
DBName string `json:"dbName"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
IsDir bool `json:"isDir"`
|
||||||
SourceDir string `json:"sourceDir"`
|
SourceDir string `json:"sourceDir"`
|
||||||
ExclusionRules string `json:"exclusionRules"`
|
ExclusionRules string `json:"exclusionRules"`
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ import (
|
|||||||
"github.com/1Panel-dev/1Panel/agent/constant"
|
"github.com/1Panel-dev/1Panel/agent/constant"
|
||||||
"github.com/1Panel-dev/1Panel/agent/global"
|
"github.com/1Panel-dev/1Panel/agent/global"
|
||||||
"github.com/1Panel-dev/1Panel/agent/utils/common"
|
"github.com/1Panel-dev/1Panel/agent/utils/common"
|
||||||
|
"github.com/1Panel-dev/1Panel/agent/utils/files"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (u *CronjobService) handleApp(cronjob model.Cronjob, startTime time.Time) error {
|
func (u *CronjobService) handleApp(cronjob model.Cronjob, startTime time.Time) error {
|
||||||
@ -138,9 +139,18 @@ func (u *CronjobService) handleDirectory(cronjob model.Cronjob, startTime time.T
|
|||||||
}
|
}
|
||||||
fileName := fmt.Sprintf("directory%s_%s.tar.gz", strings.ReplaceAll(cronjob.SourceDir, "/", "_"), startTime.Format(constant.DateTimeSlimLayout)+common.RandStrAndNum(5))
|
fileName := fmt.Sprintf("directory%s_%s.tar.gz", strings.ReplaceAll(cronjob.SourceDir, "/", "_"), startTime.Format(constant.DateTimeSlimLayout)+common.RandStrAndNum(5))
|
||||||
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name))
|
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name))
|
||||||
if err := handleTar(cronjob.SourceDir, backupDir, fileName, cronjob.ExclusionRules, cronjob.Secret); err != nil {
|
|
||||||
|
fileOp := files.NewFileOp()
|
||||||
|
if cronjob.IsDir {
|
||||||
|
if err := fileOp.TarGzCompressPro(true, cronjob.SourceDir, path.Join(backupDir, fileName), cronjob.ExclusionRules, cronjob.Secret); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
fileLists := strings.Split(cronjob.SourceDir, ",")
|
||||||
|
if err := fileOp.Compress(fileLists, backupDir, fileName, files.TarGz, cronjob.Secret); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
var record model.BackupRecord
|
var record model.BackupRecord
|
||||||
record.From = "cronjob"
|
record.From = "cronjob"
|
||||||
record.Type = "directory"
|
record.Type = "directory"
|
||||||
|
@ -278,7 +278,7 @@ func (u *CronjobService) uploadCronjobBackFile(cronjob model.Cronjob, accountMap
|
|||||||
cloudSrc := strings.TrimPrefix(file, global.CONF.System.TmpDir+"/")
|
cloudSrc := strings.TrimPrefix(file, global.CONF.System.TmpDir+"/")
|
||||||
for _, account := range accounts {
|
for _, account := range accounts {
|
||||||
if len(account) != 0 {
|
if len(account) != 0 {
|
||||||
global.LOG.Debugf("start upload file to %s, dir: %s", account, path.Join(accountMap[account].backupPath, cloudSrc))
|
global.LOG.Debugf("start upload file to %s, dir: %s", accountMap[account].name, path.Join(accountMap[account].backupPath, cloudSrc))
|
||||||
if _, err := accountMap[account].client.Upload(file, path.Join(accountMap[account].backupPath, cloudSrc)); err != nil {
|
if _, err := accountMap[account].client.Upload(file, path.Join(accountMap[account].backupPath, cloudSrc)); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
@ -268,7 +268,7 @@ var UpdateSnapshot = &gormigrate.Migration{
|
|||||||
}
|
}
|
||||||
|
|
||||||
var UpdateCronjob = &gormigrate.Migration{
|
var UpdateCronjob = &gormigrate.Migration{
|
||||||
ID: "20241011-update-cronjob",
|
ID: "20241017-update-cronjob",
|
||||||
Migrate: func(tx *gorm.DB) error {
|
Migrate: func(tx *gorm.DB) error {
|
||||||
return tx.AutoMigrate(&model.Cronjob{}, &model.JobRecords{})
|
return tx.AutoMigrate(&model.Cronjob{}, &model.JobRecords{})
|
||||||
},
|
},
|
||||||
|
@ -2,8 +2,6 @@ package files
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -36,81 +34,26 @@ func (t TarGzArchiver) Extract(filePath, dstDir string, secret string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t TarGzArchiver) Compress(sourcePaths []string, dstFile string, secret string) error {
|
func (t TarGzArchiver) Compress(sourcePaths []string, dstFile string, secret string) error {
|
||||||
var err error
|
var itemDirs []string
|
||||||
path := ""
|
|
||||||
itemDir := ""
|
|
||||||
for _, item := range sourcePaths {
|
for _, item := range sourcePaths {
|
||||||
itemDir += filepath.Base(item) + " "
|
itemDirs = append(itemDirs, fmt.Sprintf("\"%s\"", filepath.Base(item)))
|
||||||
}
|
}
|
||||||
aheadDir := dstFile[:strings.LastIndex(dstFile, "/")]
|
itemDir := strings.Join(itemDirs, " ")
|
||||||
|
aheadDir := filepath.Dir(sourcePaths[0])
|
||||||
if len(aheadDir) == 0 {
|
if len(aheadDir) == 0 {
|
||||||
aheadDir = "/"
|
aheadDir = "/"
|
||||||
}
|
}
|
||||||
path += fmt.Sprintf("- -C %s %s", aheadDir, itemDir)
|
|
||||||
commands := ""
|
commands := ""
|
||||||
if len(secret) != 0 {
|
if len(secret) != 0 {
|
||||||
extraCmd := "| openssl enc -aes-256-cbc -salt -k '" + secret + "' -out"
|
extraCmd := fmt.Sprintf("| openssl enc -aes-256-cbc -salt -k '%s' -out '%s'", secret, dstFile)
|
||||||
commands = fmt.Sprintf("tar -zcf %s %s %s", path, extraCmd, dstFile)
|
commands = fmt.Sprintf("tar -zcf - -C \"%s\" %s %s", aheadDir, itemDir, extraCmd)
|
||||||
global.LOG.Debug(strings.ReplaceAll(commands, fmt.Sprintf(" %s ", secret), "******"))
|
global.LOG.Debug(strings.ReplaceAll(commands, fmt.Sprintf(" %s ", secret), "******"))
|
||||||
} else {
|
} else {
|
||||||
commands = fmt.Sprintf("tar -zcf %s -C %s %s", dstFile, aheadDir, itemDir)
|
commands = fmt.Sprintf("tar -zcf \"%s\" -C \"%s\" %s", dstFile, aheadDir, itemDir)
|
||||||
global.LOG.Debug(commands)
|
global.LOG.Debug(commands)
|
||||||
}
|
}
|
||||||
if err = cmd.ExecCmd(commands); err != nil {
|
if err := cmd.ExecCmd(commands); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t TarGzArchiver) CompressPro(withDir bool, src, dst, secret, exclusionRules string) error {
|
|
||||||
workdir := src
|
|
||||||
srcItem := "."
|
|
||||||
if withDir {
|
|
||||||
workdir = path.Dir(src)
|
|
||||||
srcItem = path.Base(src)
|
|
||||||
}
|
|
||||||
commands := ""
|
|
||||||
|
|
||||||
exMap := make(map[string]struct{})
|
|
||||||
exStr := ""
|
|
||||||
excludes := strings.Split(exclusionRules, ";")
|
|
||||||
excludes = append(excludes, "*.sock")
|
|
||||||
for _, exclude := range excludes {
|
|
||||||
if len(exclude) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := exMap[exclude]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
exStr += " --exclude "
|
|
||||||
exStr += exclude
|
|
||||||
exMap[exclude] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(secret) != 0 {
|
|
||||||
commands = fmt.Sprintf("tar -zcf - %s | openssl enc -aes-256-cbc -salt -pbkdf2 -k '%s' -out %s", srcItem, secret, dst)
|
|
||||||
global.LOG.Debug(strings.ReplaceAll(commands, fmt.Sprintf(" %s ", secret), "******"))
|
|
||||||
} else {
|
|
||||||
commands = fmt.Sprintf("tar zcf %s %s %s", dst, exStr, srcItem)
|
|
||||||
global.LOG.Debug(commands)
|
|
||||||
}
|
|
||||||
return cmd.ExecCmdWithDir(commands, workdir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t TarGzArchiver) ExtractPro(src, dst string, secret string) error {
|
|
||||||
if _, err := os.Stat(path.Dir(dst)); err != nil && os.IsNotExist(err) {
|
|
||||||
if err = os.MkdirAll(path.Dir(dst), os.ModePerm); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
commands := ""
|
|
||||||
if len(secret) != 0 {
|
|
||||||
commands = fmt.Sprintf("openssl enc -d -aes-256-cbc -salt -pbkdf2 -k '%s' -in %s | tar -zxf - > /root/log", secret, src)
|
|
||||||
global.LOG.Debug(strings.ReplaceAll(commands, fmt.Sprintf(" %s ", secret), "******"))
|
|
||||||
} else {
|
|
||||||
commands = fmt.Sprintf("tar zxvf %s", src)
|
|
||||||
global.LOG.Debug(commands)
|
|
||||||
}
|
|
||||||
return cmd.ExecCmdWithDir(commands, dst)
|
|
||||||
}
|
|
||||||
|
@ -25,6 +25,8 @@ export namespace Cronjob {
|
|||||||
dbType: string;
|
dbType: string;
|
||||||
dbName: string;
|
dbName: string;
|
||||||
url: string;
|
url: string;
|
||||||
|
isDir: boolean;
|
||||||
|
files: Array<Item>;
|
||||||
sourceDir: string;
|
sourceDir: string;
|
||||||
|
|
||||||
sourceAccountIDs: string;
|
sourceAccountIDs: string;
|
||||||
@ -34,6 +36,9 @@ export namespace Cronjob {
|
|||||||
status: string;
|
status: string;
|
||||||
secret: string;
|
secret: string;
|
||||||
}
|
}
|
||||||
|
export interface Item {
|
||||||
|
val: string;
|
||||||
|
}
|
||||||
export interface CronjobCreate {
|
export interface CronjobCreate {
|
||||||
name: string;
|
name: string;
|
||||||
type: string;
|
type: string;
|
||||||
|
@ -887,7 +887,8 @@ const message = {
|
|||||||
cronSpecHelper: 'Enter the correct execution period',
|
cronSpecHelper: 'Enter the correct execution period',
|
||||||
cleanHelper:
|
cleanHelper:
|
||||||
'This operation records all job execution records, backup files, and log files. Do you want to continue?',
|
'This operation records all job execution records, backup files, and log files. Do you want to continue?',
|
||||||
directory: 'Backup Directory',
|
backupContent: 'Backup Content',
|
||||||
|
directory: 'Backup Directory / File',
|
||||||
sourceDir: 'Backup Directory',
|
sourceDir: 'Backup Directory',
|
||||||
snapshot: 'System Snapshot',
|
snapshot: 'System Snapshot',
|
||||||
allOptionHelper:
|
allOptionHelper:
|
||||||
|
@ -843,7 +843,8 @@ const message = {
|
|||||||
cronSpec: '執行周期',
|
cronSpec: '執行周期',
|
||||||
cronSpecHelper: '請輸入正確的執行周期',
|
cronSpecHelper: '請輸入正確的執行周期',
|
||||||
cleanHelper: '該操作將所有任務執行記錄、備份文件和日誌文件,是否繼續?',
|
cleanHelper: '該操作將所有任務執行記錄、備份文件和日誌文件,是否繼續?',
|
||||||
directory: '備份目錄',
|
backupContent: '備份內容',
|
||||||
|
directory: '備份目錄 / 檔案',
|
||||||
sourceDir: '備份目錄',
|
sourceDir: '備份目錄',
|
||||||
snapshot: '系統快照',
|
snapshot: '系統快照',
|
||||||
allOptionHelper: '當前計劃任務為備份所有【{0}】,暫不支持直接下載,可在【{0}】備份列表中查看',
|
allOptionHelper: '當前計劃任務為備份所有【{0}】,暫不支持直接下載,可在【{0}】備份列表中查看',
|
||||||
|
@ -844,7 +844,8 @@ const message = {
|
|||||||
cronSpec: '执行周期',
|
cronSpec: '执行周期',
|
||||||
cronSpecHelper: '请输入正确的执行周期',
|
cronSpecHelper: '请输入正确的执行周期',
|
||||||
cleanHelper: '该操作将所有任务执行记录、备份文件和日志文件,是否继续?',
|
cleanHelper: '该操作将所有任务执行记录、备份文件和日志文件,是否继续?',
|
||||||
directory: '备份目录',
|
backupContent: '备份内容',
|
||||||
|
directory: '备份目录 / 文件',
|
||||||
sourceDir: '备份目录',
|
sourceDir: '备份目录',
|
||||||
snapshot: '系统快照',
|
snapshot: '系统快照',
|
||||||
allOptionHelper: '当前计划任务为备份所有【{0}】,暂不支持直接下载,可在【{0}】备份列表中查看',
|
allOptionHelper: '当前计划任务为备份所有【{0}】,暂不支持直接下载,可在【{0}】备份列表中查看',
|
||||||
|
@ -67,7 +67,7 @@
|
|||||||
<el-form-item :label="$t('cronjob.taskName')" prop="name">
|
<el-form-item :label="$t('cronjob.taskName')" prop="name">
|
||||||
<el-input :disabled="dialogData.title === 'edit'" clearable v-model.trim="dialogData.rowData!.name" />
|
<el-input :disabled="dialogData.title === 'edit'" clearable v-model.trim="dialogData.rowData!.name" />
|
||||||
</el-form-item>
|
</el-form-item>
|
||||||
<el-card>
|
<el-card class="mb-5">
|
||||||
<el-form-item :label="$t('cronjob.cronSpec')" prop="specCustom">
|
<el-form-item :label="$t('cronjob.cronSpec')" prop="specCustom">
|
||||||
<el-checkbox :label="$t('container.custom')" v-model="dialogData.rowData!.specCustom" />
|
<el-checkbox :label="$t('container.custom')" v-model="dialogData.rowData!.specCustom" />
|
||||||
</el-form-item>
|
</el-form-item>
|
||||||
@ -376,17 +376,45 @@
|
|||||||
</el-form-item>
|
</el-form-item>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<el-form-item
|
<el-form-item :label="$t('cronjob.backupContent')">
|
||||||
v-if="dialogData.rowData!.type === 'directory'"
|
<el-radio-group v-model="dialogData.rowData!.isDir">
|
||||||
:label="$t('cronjob.sourceDir')"
|
<el-radio :value="true">{{ $t('file.dir') }}</el-radio>
|
||||||
prop="sourceDir"
|
<el-radio :value="false">{{ $t('file.file') }}</el-radio>
|
||||||
>
|
</el-radio-group>
|
||||||
|
</el-form-item>
|
||||||
|
|
||||||
|
<el-form-item v-if="dialogData.rowData!.type === 'directory' && dialogData.rowData!.isDir" prop="sourceDir">
|
||||||
<el-input v-model="dialogData.rowData!.sourceDir">
|
<el-input v-model="dialogData.rowData!.sourceDir">
|
||||||
<template #prepend>
|
<template #prepend>
|
||||||
<FileList @choose="loadDir" :dir="true"></FileList>
|
<FileList @choose="loadDir" :dir="true"></FileList>
|
||||||
</template>
|
</template>
|
||||||
</el-input>
|
</el-input>
|
||||||
</el-form-item>
|
</el-form-item>
|
||||||
|
<div v-if="dialogData.rowData!.type === 'directory' && !dialogData.rowData!.isDir" class="mb-5">
|
||||||
|
<el-input>
|
||||||
|
<template #prepend>
|
||||||
|
<FileList @choose="loadFile" :dir="false" />
|
||||||
|
</template>
|
||||||
|
</el-input>
|
||||||
|
<el-form-item prop="files">
|
||||||
|
<div style="width: 100%">
|
||||||
|
<ComplexTable
|
||||||
|
:show-header="false"
|
||||||
|
:data="dialogData.rowData.files"
|
||||||
|
v-if="dialogData.rowData.files"
|
||||||
|
>
|
||||||
|
<el-table-column prop="val" />
|
||||||
|
<el-table-column width="60">
|
||||||
|
<template #default="scope">
|
||||||
|
<el-button link type="primary" @click="handleFileDelete(scope.$index)">
|
||||||
|
{{ $t('commons.button.delete') }}
|
||||||
|
</el-button>
|
||||||
|
</template>
|
||||||
|
</el-table-column>
|
||||||
|
</ComplexTable>
|
||||||
|
</div>
|
||||||
|
</el-form-item>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div v-if="isBackup()">
|
<div v-if="isBackup()">
|
||||||
<el-form-item :label="$t('setting.backupAccount')" prop="backupAccountList">
|
<el-form-item :label="$t('setting.backupAccount')" prop="backupAccountList">
|
||||||
@ -530,11 +558,19 @@ const acceptParams = (params: DialogProps): void => {
|
|||||||
dialogData.value.rowData.specs = dialogData.value.rowData.spec.split(',');
|
dialogData.value.rowData.specs = dialogData.value.rowData.spec.split(',');
|
||||||
}
|
}
|
||||||
dialogData.value.rowData.specs = dialogData.value.rowData.specs || [];
|
dialogData.value.rowData.specs = dialogData.value.rowData.specs || [];
|
||||||
|
dialogData.value.rowData.files = [];
|
||||||
|
if (!dialogData.value.rowData.isDir) {
|
||||||
|
let files = dialogData.value.rowData.sourceDir?.split(',') || [];
|
||||||
|
for (const item of files) {
|
||||||
|
dialogData.value.rowData.files.push({ val: item });
|
||||||
|
}
|
||||||
|
}
|
||||||
if (dialogData.value.title === 'create') {
|
if (dialogData.value.title === 'create') {
|
||||||
changeType();
|
changeType();
|
||||||
dialogData.value.rowData.scriptMode = 'input';
|
dialogData.value.rowData.scriptMode = 'input';
|
||||||
dialogData.value.rowData.dbType = 'mysql';
|
dialogData.value.rowData.dbType = 'mysql';
|
||||||
dialogData.value.rowData.downloadAccountID = 1;
|
dialogData.value.rowData.downloadAccountID = 1;
|
||||||
|
dialogData.value.rowData.isDir = true;
|
||||||
}
|
}
|
||||||
if (dialogData.value.rowData.sourceAccountIDs) {
|
if (dialogData.value.rowData.sourceAccountIDs) {
|
||||||
dialogData.value.rowData.sourceAccounts = [];
|
dialogData.value.rowData.sourceAccounts = [];
|
||||||
@ -697,6 +733,14 @@ const verifySpec = (rule: any, value: any, callback: any) => {
|
|||||||
callback();
|
callback();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const verifyFiles = (rule: any, value: any, callback: any) => {
|
||||||
|
if (!dialogData.value.rowData!.files || dialogData.value.rowData!.files.length === 0) {
|
||||||
|
callback(new Error(i18n.global.t('commons.rule.requiredInput')));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
};
|
||||||
|
|
||||||
const rules = reactive({
|
const rules = reactive({
|
||||||
name: [Rules.requiredInput, Rules.noSpace],
|
name: [Rules.requiredInput, Rules.noSpace],
|
||||||
type: [Rules.requiredSelect],
|
type: [Rules.requiredSelect],
|
||||||
@ -709,6 +753,7 @@ const rules = reactive({
|
|||||||
website: [Rules.requiredSelect],
|
website: [Rules.requiredSelect],
|
||||||
dbName: [Rules.requiredSelect],
|
dbName: [Rules.requiredSelect],
|
||||||
url: [Rules.requiredInput],
|
url: [Rules.requiredInput],
|
||||||
|
files: [{ validator: verifyFiles, trigger: 'blur', required: true }],
|
||||||
sourceDir: [Rules.requiredInput],
|
sourceDir: [Rules.requiredInput],
|
||||||
backupAccounts: [Rules.requiredSelect],
|
backupAccounts: [Rules.requiredSelect],
|
||||||
defaultDownload: [Rules.requiredSelect],
|
defaultDownload: [Rules.requiredSelect],
|
||||||
@ -726,6 +771,15 @@ const loadScriptDir = async (path: string) => {
|
|||||||
dialogData.value.rowData!.script = path;
|
dialogData.value.rowData!.script = path;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const loadFile = async (path: string) => {
|
||||||
|
for (const item of dialogData.value.rowData!.files) {
|
||||||
|
if (item.val === path) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dialogData.value.rowData!.files.push({ val: path });
|
||||||
|
};
|
||||||
|
|
||||||
const hasDay = (item: any) => {
|
const hasDay = (item: any) => {
|
||||||
return item.specType === 'perMonth' || item.specType === 'perNDay';
|
return item.specType === 'perMonth' || item.specType === 'perNDay';
|
||||||
};
|
};
|
||||||
@ -812,6 +866,10 @@ const handleSpecCustomDelete = (index: number) => {
|
|||||||
dialogData.value.rowData!.specs.splice(index, 1);
|
dialogData.value.rowData!.specs.splice(index, 1);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleFileDelete = (index: number) => {
|
||||||
|
dialogData.value.rowData!.files.splice(index, 1);
|
||||||
|
};
|
||||||
|
|
||||||
const loadBackups = async () => {
|
const loadBackups = async () => {
|
||||||
const res = await getBackupList();
|
const res = await getBackupList();
|
||||||
backupOptions.value = [];
|
backupOptions.value = [];
|
||||||
@ -885,7 +943,7 @@ function hasExclusionRules() {
|
|||||||
return (
|
return (
|
||||||
dialogData.value.rowData!.type === 'app' ||
|
dialogData.value.rowData!.type === 'app' ||
|
||||||
dialogData.value.rowData!.type === 'website' ||
|
dialogData.value.rowData!.type === 'website' ||
|
||||||
dialogData.value.rowData!.type === 'directory'
|
(dialogData.value.rowData!.type === 'directory' && dialogData.value.rowData!.isDir)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -907,6 +965,13 @@ const onSubmit = async (formEl: FormInstance | undefined) => {
|
|||||||
} else {
|
} else {
|
||||||
specs = dialogData.value.rowData.specs;
|
specs = dialogData.value.rowData.specs;
|
||||||
}
|
}
|
||||||
|
if (!dialogData.value.rowData.isDir) {
|
||||||
|
let files = [];
|
||||||
|
for (const item of dialogData.value.rowData.files) {
|
||||||
|
files.push(item.val);
|
||||||
|
}
|
||||||
|
dialogData.value.rowData.sourceDir = files.join(',');
|
||||||
|
}
|
||||||
dialogData.value.rowData.sourceAccountIDs = dialogData.value.rowData.sourceAccounts.join(',');
|
dialogData.value.rowData.sourceAccountIDs = dialogData.value.rowData.sourceAccounts.join(',');
|
||||||
dialogData.value.rowData.spec = specs.join(',');
|
dialogData.value.rowData.spec = specs.join(',');
|
||||||
if (!formEl) return;
|
if (!formEl) return;
|
||||||
|
Loading…
Reference in New Issue
Block a user