@tachybase/module-backup 0.23.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +12 -0
- package/README.md +118 -0
- package/README.zh-CN.md +118 -0
- package/client.d.ts +2 -0
- package/client.js +1 -0
- package/dist/client/Configuration.d.ts +2 -0
- package/dist/client/DuplicatorProvider.d.ts +5 -0
- package/dist/client/index.d.ts +5 -0
- package/dist/client/index.js +1 -0
- package/dist/client/locale/index.d.ts +4 -0
- package/dist/externalVersion.js +14 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +39 -0
- package/dist/locale/en-US.json +44 -0
- package/dist/locale/ja-JP.d.ts +25 -0
- package/dist/locale/ja-JP.js +46 -0
- package/dist/locale/ko_KR.json +50 -0
- package/dist/locale/pt-BR.d.ts +26 -0
- package/dist/locale/pt-BR.js +48 -0
- package/dist/locale/zh-CN.json +50 -0
- package/dist/node_modules/@hapi/topo/lib/index.d.ts +60 -0
- package/dist/node_modules/@hapi/topo/lib/index.js +1 -0
- package/dist/node_modules/@hapi/topo/package.json +1 -0
- package/dist/node_modules/archiver/LICENSE +22 -0
- package/dist/node_modules/archiver/index.js +68 -0
- package/dist/node_modules/archiver/lib/core.js +974 -0
- package/dist/node_modules/archiver/lib/error.js +40 -0
- package/dist/node_modules/archiver/lib/plugins/json.js +110 -0
- package/dist/node_modules/archiver/lib/plugins/tar.js +167 -0
- package/dist/node_modules/archiver/lib/plugins/zip.js +120 -0
- package/dist/node_modules/archiver/package.json +1 -0
- package/dist/node_modules/decompress/index.js +16 -0
- package/dist/node_modules/decompress/license +9 -0
- package/dist/node_modules/decompress/package.json +1 -0
- package/dist/node_modules/mkdirp/LICENSE +21 -0
- package/dist/node_modules/mkdirp/bin/cmd.js +68 -0
- package/dist/node_modules/mkdirp/index.js +1 -0
- package/dist/node_modules/mkdirp/lib/find-made.js +29 -0
- package/dist/node_modules/mkdirp/lib/mkdirp-manual.js +64 -0
- package/dist/node_modules/mkdirp/lib/mkdirp-native.js +39 -0
- package/dist/node_modules/mkdirp/lib/opts-arg.js +23 -0
- package/dist/node_modules/mkdirp/lib/path-arg.js +29 -0
- package/dist/node_modules/mkdirp/lib/use-native.js +10 -0
- package/dist/node_modules/mkdirp/package.json +1 -0
- package/dist/node_modules/mkdirp/readme.markdown +266 -0
- package/dist/node_modules/semver/LICENSE +15 -0
- package/dist/node_modules/semver/bin/semver.js +188 -0
- package/dist/node_modules/semver/classes/comparator.js +141 -0
- package/dist/node_modules/semver/classes/index.js +5 -0
- package/dist/node_modules/semver/classes/range.js +554 -0
- package/dist/node_modules/semver/classes/semver.js +302 -0
- package/dist/node_modules/semver/functions/clean.js +6 -0
- package/dist/node_modules/semver/functions/cmp.js +52 -0
- package/dist/node_modules/semver/functions/coerce.js +60 -0
- package/dist/node_modules/semver/functions/compare-build.js +7 -0
- package/dist/node_modules/semver/functions/compare-loose.js +3 -0
- package/dist/node_modules/semver/functions/compare.js +5 -0
- package/dist/node_modules/semver/functions/diff.js +65 -0
- package/dist/node_modules/semver/functions/eq.js +3 -0
- package/dist/node_modules/semver/functions/gt.js +3 -0
- package/dist/node_modules/semver/functions/gte.js +3 -0
- package/dist/node_modules/semver/functions/inc.js +19 -0
- package/dist/node_modules/semver/functions/lt.js +3 -0
- package/dist/node_modules/semver/functions/lte.js +3 -0
- package/dist/node_modules/semver/functions/major.js +3 -0
- package/dist/node_modules/semver/functions/minor.js +3 -0
- package/dist/node_modules/semver/functions/neq.js +3 -0
- package/dist/node_modules/semver/functions/parse.js +16 -0
- package/dist/node_modules/semver/functions/patch.js +3 -0
- package/dist/node_modules/semver/functions/prerelease.js +6 -0
- package/dist/node_modules/semver/functions/rcompare.js +3 -0
- package/dist/node_modules/semver/functions/rsort.js +3 -0
- package/dist/node_modules/semver/functions/satisfies.js +10 -0
- package/dist/node_modules/semver/functions/sort.js +3 -0
- package/dist/node_modules/semver/functions/valid.js +6 -0
- package/dist/node_modules/semver/index.js +1 -0
- package/dist/node_modules/semver/internal/constants.js +35 -0
- package/dist/node_modules/semver/internal/debug.js +9 -0
- package/dist/node_modules/semver/internal/identifiers.js +23 -0
- package/dist/node_modules/semver/internal/lrucache.js +40 -0
- package/dist/node_modules/semver/internal/parse-options.js +15 -0
- package/dist/node_modules/semver/internal/re.js +217 -0
- package/dist/node_modules/semver/package.json +1 -0
- package/dist/node_modules/semver/preload.js +2 -0
- package/dist/node_modules/semver/range.bnf +16 -0
- package/dist/node_modules/semver/ranges/gtr.js +4 -0
- package/dist/node_modules/semver/ranges/intersects.js +7 -0
- package/dist/node_modules/semver/ranges/ltr.js +4 -0
- package/dist/node_modules/semver/ranges/max-satisfying.js +25 -0
- package/dist/node_modules/semver/ranges/min-satisfying.js +24 -0
- package/dist/node_modules/semver/ranges/min-version.js +61 -0
- package/dist/node_modules/semver/ranges/outside.js +80 -0
- package/dist/node_modules/semver/ranges/simplify.js +47 -0
- package/dist/node_modules/semver/ranges/subset.js +247 -0
- package/dist/node_modules/semver/ranges/to-comparators.js +8 -0
- package/dist/node_modules/semver/ranges/valid.js +11 -0
- package/dist/server/app-migrator.d.ts +16 -0
- package/dist/server/app-migrator.js +61 -0
- package/dist/server/collection-group-manager.d.ts +4 -0
- package/dist/server/collection-group-manager.js +29 -0
- package/dist/server/commands/restore-command.d.ts +2 -0
- package/dist/server/commands/restore-command.js +67 -0
- package/dist/server/dumper.d.ts +71 -0
- package/dist/server/dumper.js +421 -0
- package/dist/server/errors/restore-check-error.d.ts +3 -0
- package/dist/server/errors/restore-check-error.js +32 -0
- package/dist/server/field-value-writer.d.ts +9 -0
- package/dist/server/field-value-writer.js +99 -0
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.js +33 -0
- package/dist/server/locale/zh-CN.d.ts +9 -0
- package/dist/server/locale/zh-CN.js +30 -0
- package/dist/server/resourcers/backup-files.d.ts +25 -0
- package/dist/server/resourcers/backup-files.js +206 -0
- package/dist/server/restorer.d.ts +35 -0
- package/dist/server/restorer.js +320 -0
- package/dist/server/server.d.ts +8 -0
- package/dist/server/server.js +52 -0
- package/dist/server/utils.d.ts +5 -0
- package/dist/server/utils.js +78 -0
- package/dist/swagger/index.d.ts +392 -0
- package/dist/swagger/index.js +447 -0
- package/package.json +48 -0
- package/server.d.ts +2 -0
- package/server.js +1 -0
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var backup_files_exports = {};
|
|
29
|
+
__export(backup_files_exports, {
|
|
30
|
+
default: () => backup_files_default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(backup_files_exports);
|
|
33
|
+
var import_node_fs = __toESM(require("node:fs"));
|
|
34
|
+
var import_promises = __toESM(require("node:fs/promises"));
|
|
35
|
+
var import_node_os = __toESM(require("node:os"));
|
|
36
|
+
var import_node_path = __toESM(require("node:path"));
|
|
37
|
+
var import_actions = require("@tachybase/actions");
|
|
38
|
+
var import_utils = require("@tachybase/utils");
|
|
39
|
+
var import_dumper = require("../dumper");
|
|
40
|
+
var import_restorer = require("../restorer");
|
|
41
|
+
var import_server2 = __toESM(require("../server"));
|
|
42
|
+
var backup_files_default = {
|
|
43
|
+
name: "backupFiles",
|
|
44
|
+
middleware: async (ctx, next) => {
|
|
45
|
+
if (ctx.action.actionName !== "upload") {
|
|
46
|
+
return next();
|
|
47
|
+
}
|
|
48
|
+
const storage = import_utils.koaMulter.diskStorage({
|
|
49
|
+
destination: import_node_os.default.tmpdir(),
|
|
50
|
+
filename: function(req, file, cb) {
|
|
51
|
+
const randomName = Date.now().toString() + Math.random().toString().slice(2);
|
|
52
|
+
cb(null, randomName);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
const upload = (0, import_utils.koaMulter)({ storage }).single("file");
|
|
56
|
+
return upload(ctx, next);
|
|
57
|
+
},
|
|
58
|
+
actions: {
|
|
59
|
+
async list(ctx, next) {
|
|
60
|
+
const { page = import_actions.DEFAULT_PAGE, pageSize = import_actions.DEFAULT_PER_PAGE } = ctx.action.params;
|
|
61
|
+
const dumper = new import_dumper.Dumper(ctx.app);
|
|
62
|
+
const backupFiles = await dumper.allBackUpFilePaths({
|
|
63
|
+
includeInProgress: true
|
|
64
|
+
});
|
|
65
|
+
const count = backupFiles.length;
|
|
66
|
+
const rows = await Promise.all(
|
|
67
|
+
backupFiles.slice((page - 1) * pageSize, page * pageSize).map(async (file) => {
|
|
68
|
+
return await import_dumper.Dumper.getFileStatus(file.endsWith(".lock") ? file.replace(".lock", "") : file);
|
|
69
|
+
})
|
|
70
|
+
);
|
|
71
|
+
ctx.body = {
|
|
72
|
+
count,
|
|
73
|
+
rows,
|
|
74
|
+
page: Number(page),
|
|
75
|
+
pageSize: Number(pageSize),
|
|
76
|
+
totalPage: Math.ceil(count / pageSize)
|
|
77
|
+
};
|
|
78
|
+
await next();
|
|
79
|
+
},
|
|
80
|
+
async get(ctx, next) {
|
|
81
|
+
const { filterByTk } = ctx.action.params;
|
|
82
|
+
const dumper = new import_dumper.Dumper(ctx.app);
|
|
83
|
+
const filePath = dumper.backUpFilePath(filterByTk);
|
|
84
|
+
async function sendError(message, status = 404) {
|
|
85
|
+
ctx.body = { status: "error", message };
|
|
86
|
+
ctx.status = status;
|
|
87
|
+
}
|
|
88
|
+
try {
|
|
89
|
+
const fileState = await import_dumper.Dumper.getFileStatus(filePath);
|
|
90
|
+
if (fileState.status !== "ok") {
|
|
91
|
+
await sendError(`Backup file ${filterByTk} not found`);
|
|
92
|
+
} else {
|
|
93
|
+
const restorer = new import_restorer.Restorer(ctx.app, {
|
|
94
|
+
backUpFilePath: filePath
|
|
95
|
+
});
|
|
96
|
+
const restoreMeta = await restorer.parseBackupFile();
|
|
97
|
+
ctx.body = {
|
|
98
|
+
...fileState,
|
|
99
|
+
meta: restoreMeta
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
} catch (e) {
|
|
103
|
+
if (e.code === "ENOENT") {
|
|
104
|
+
await sendError(`Backup file ${filterByTk} not found`);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
await next();
|
|
108
|
+
},
|
|
109
|
+
/**
|
|
110
|
+
* create dump task
|
|
111
|
+
* @param ctx
|
|
112
|
+
* @param next
|
|
113
|
+
*/
|
|
114
|
+
async create(ctx, next) {
|
|
115
|
+
var _a;
|
|
116
|
+
const data = ctx.request.body;
|
|
117
|
+
let taskId;
|
|
118
|
+
const app = ctx.app;
|
|
119
|
+
if ((_a = app.worker) == null ? void 0 : _a.available) {
|
|
120
|
+
taskId = await ctx.app.worker.callPluginMethod({
|
|
121
|
+
plugin: import_server2.default,
|
|
122
|
+
method: "workerCreateBackUp",
|
|
123
|
+
params: {
|
|
124
|
+
dataTypes: data.dataTypes
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
ctx.app.noticeManager.notify("backup", { msg: "done" });
|
|
128
|
+
} else {
|
|
129
|
+
const plugin = app.pm.get(import_server2.default);
|
|
130
|
+
taskId = await plugin.workerCreateBackUp(data);
|
|
131
|
+
}
|
|
132
|
+
ctx.body = {
|
|
133
|
+
key: taskId
|
|
134
|
+
};
|
|
135
|
+
await next();
|
|
136
|
+
},
|
|
137
|
+
/**
|
|
138
|
+
* download backup file
|
|
139
|
+
* @param ctx
|
|
140
|
+
* @param next
|
|
141
|
+
*/
|
|
142
|
+
async download(ctx, next) {
|
|
143
|
+
const { filterByTk } = ctx.action.params;
|
|
144
|
+
const dumper = new import_dumper.Dumper(ctx.app);
|
|
145
|
+
const filePath = dumper.backUpFilePath(filterByTk);
|
|
146
|
+
const fileState = await import_dumper.Dumper.getFileStatus(filePath);
|
|
147
|
+
if (fileState.status !== "ok") {
|
|
148
|
+
throw new Error(`Backup file ${filterByTk} not found`);
|
|
149
|
+
}
|
|
150
|
+
ctx.attachment(filePath);
|
|
151
|
+
ctx.body = import_node_fs.default.createReadStream(filePath);
|
|
152
|
+
await next();
|
|
153
|
+
},
|
|
154
|
+
async restore(ctx, next) {
|
|
155
|
+
const { dataTypes, filterByTk, key } = ctx.action.params.values;
|
|
156
|
+
const filePath = (() => {
|
|
157
|
+
if (key) {
|
|
158
|
+
const tmpDir = import_node_os.default.tmpdir();
|
|
159
|
+
return import_node_path.default.resolve(tmpDir, key);
|
|
160
|
+
}
|
|
161
|
+
if (filterByTk) {
|
|
162
|
+
const dumper = new import_dumper.Dumper(ctx.app);
|
|
163
|
+
return dumper.backUpFilePath(filterByTk);
|
|
164
|
+
}
|
|
165
|
+
})();
|
|
166
|
+
if (!filePath) {
|
|
167
|
+
throw new Error(`Backup file ${filterByTk} not found`);
|
|
168
|
+
}
|
|
169
|
+
const args = ["restore", "-f", filePath];
|
|
170
|
+
for (const dataType of dataTypes) {
|
|
171
|
+
args.push("-g", dataType);
|
|
172
|
+
}
|
|
173
|
+
await ctx.app.runCommand(...args);
|
|
174
|
+
await next();
|
|
175
|
+
},
|
|
176
|
+
async destroy(ctx, next) {
|
|
177
|
+
const { filterByTk } = ctx.action.params;
|
|
178
|
+
const dumper = new import_dumper.Dumper(ctx.app);
|
|
179
|
+
const filePath = dumper.backUpFilePath(filterByTk);
|
|
180
|
+
await import_promises.default.unlink(filePath);
|
|
181
|
+
ctx.body = {
|
|
182
|
+
status: "ok"
|
|
183
|
+
};
|
|
184
|
+
await next();
|
|
185
|
+
},
|
|
186
|
+
async upload(ctx, next) {
|
|
187
|
+
const file = ctx.file;
|
|
188
|
+
const fileName = file.filename;
|
|
189
|
+
const restorer = new import_restorer.Restorer(ctx.app, {
|
|
190
|
+
backUpFilePath: file.path
|
|
191
|
+
});
|
|
192
|
+
const restoreMeta = await restorer.parseBackupFile();
|
|
193
|
+
ctx.body = {
|
|
194
|
+
key: fileName,
|
|
195
|
+
meta: restoreMeta
|
|
196
|
+
};
|
|
197
|
+
await next();
|
|
198
|
+
},
|
|
199
|
+
async dumpableCollections(ctx, next) {
|
|
200
|
+
ctx.withoutDataWrapping = true;
|
|
201
|
+
const dumper = new import_dumper.Dumper(ctx.app);
|
|
202
|
+
ctx.body = await dumper.dumpableCollectionsGroupByGroup();
|
|
203
|
+
await next();
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
};
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { DumpRulesGroupType } from '@tachybase/database';
|
|
2
|
+
import { Application } from '@tachybase/server';
|
|
3
|
+
import { AppMigrator, AppMigratorOptions } from './app-migrator';
|
|
4
|
+
type RestoreOptions = {
|
|
5
|
+
groups: Set<DumpRulesGroupType>;
|
|
6
|
+
};
|
|
7
|
+
export declare class Restorer extends AppMigrator {
|
|
8
|
+
direction: "restore";
|
|
9
|
+
backUpFilePath: string;
|
|
10
|
+
decompressed: boolean;
|
|
11
|
+
importedCollections: string[];
|
|
12
|
+
constructor(app: Application, options: AppMigratorOptions & {
|
|
13
|
+
backUpFilePath?: string;
|
|
14
|
+
});
|
|
15
|
+
static sortCollectionsByInherits(collections: Array<{
|
|
16
|
+
name: string;
|
|
17
|
+
inherits: string[];
|
|
18
|
+
}>): any;
|
|
19
|
+
setBackUpFilePath(backUpFilePath: string): void;
|
|
20
|
+
parseBackupFile(): Promise<any>;
|
|
21
|
+
restore(options: RestoreOptions): Promise<void>;
|
|
22
|
+
getImportMeta(): Promise<any>;
|
|
23
|
+
checkMeta(): Promise<void>;
|
|
24
|
+
importCollections(options: RestoreOptions): Promise<void>;
|
|
25
|
+
decompressBackup(backupFilePath: string): Promise<void>;
|
|
26
|
+
readCollectionMeta(collectionName: string): Promise<any>;
|
|
27
|
+
importCollection(options: {
|
|
28
|
+
name: string;
|
|
29
|
+
insert?: boolean;
|
|
30
|
+
clear?: boolean;
|
|
31
|
+
rowCondition?: (row: any) => boolean;
|
|
32
|
+
}): Promise<any>;
|
|
33
|
+
importDb(options: RestoreOptions): Promise<void>;
|
|
34
|
+
}
|
|
35
|
+
export {};
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var restorer_exports = {};
|
|
29
|
+
__export(restorer_exports, {
|
|
30
|
+
Restorer: () => Restorer
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(restorer_exports);
|
|
33
|
+
var import_fs = __toESM(require("fs"));
|
|
34
|
+
var import_promises = __toESM(require("fs/promises"));
|
|
35
|
+
var import_path = __toESM(require("path"));
|
|
36
|
+
var import_database = require("@tachybase/database");
|
|
37
|
+
var Topo = __toESM(require("@hapi/topo"));
|
|
38
|
+
var import_decompress = __toESM(require("decompress"));
|
|
39
|
+
var import_lodash = __toESM(require("lodash"));
|
|
40
|
+
var import_semver = __toESM(require("semver"));
|
|
41
|
+
var import_app_migrator = require("./app-migrator");
|
|
42
|
+
var import_restore_check_error = require("./errors/restore-check-error");
|
|
43
|
+
var import_field_value_writer = require("./field-value-writer");
|
|
44
|
+
var import_utils = require("./utils");
|
|
45
|
+
class Restorer extends import_app_migrator.AppMigrator {
|
|
46
|
+
direction = "restore";
|
|
47
|
+
backUpFilePath;
|
|
48
|
+
decompressed = false;
|
|
49
|
+
importedCollections = [];
|
|
50
|
+
constructor(app, options) {
|
|
51
|
+
super(app, options);
|
|
52
|
+
const { backUpFilePath } = options;
|
|
53
|
+
if (backUpFilePath) {
|
|
54
|
+
this.setBackUpFilePath(backUpFilePath);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
static sortCollectionsByInherits(collections) {
|
|
58
|
+
var _a;
|
|
59
|
+
const sorter = new Topo.Sorter();
|
|
60
|
+
for (const collection of collections) {
|
|
61
|
+
const options = {
|
|
62
|
+
group: collection.name
|
|
63
|
+
};
|
|
64
|
+
if ((_a = collection.inherits) == null ? void 0 : _a.length) {
|
|
65
|
+
options.after = collection.inherits;
|
|
66
|
+
}
|
|
67
|
+
sorter.add(collection, options);
|
|
68
|
+
}
|
|
69
|
+
return sorter.sort();
|
|
70
|
+
}
|
|
71
|
+
setBackUpFilePath(backUpFilePath) {
|
|
72
|
+
if (import_path.default.isAbsolute(backUpFilePath)) {
|
|
73
|
+
this.backUpFilePath = backUpFilePath;
|
|
74
|
+
} else if (import_path.default.basename(backUpFilePath) === backUpFilePath) {
|
|
75
|
+
const dirname = import_path.default.resolve(process.cwd(), "storage", "duplicator");
|
|
76
|
+
this.backUpFilePath = import_path.default.resolve(dirname, backUpFilePath);
|
|
77
|
+
} else {
|
|
78
|
+
this.backUpFilePath = import_path.default.resolve(process.cwd(), backUpFilePath);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
async parseBackupFile() {
|
|
82
|
+
await this.decompressBackup(this.backUpFilePath);
|
|
83
|
+
return await this.getImportMeta();
|
|
84
|
+
}
|
|
85
|
+
async restore(options) {
|
|
86
|
+
await this.decompressBackup(this.backUpFilePath);
|
|
87
|
+
await this.checkMeta();
|
|
88
|
+
await this.importCollections(options);
|
|
89
|
+
await this.importDb(options);
|
|
90
|
+
await this.clearWorkDir();
|
|
91
|
+
}
|
|
92
|
+
async getImportMeta() {
|
|
93
|
+
const metaFile = import_path.default.resolve(this.workDir, "meta");
|
|
94
|
+
return JSON.parse(await import_promises.default.readFile(metaFile, "utf8"));
|
|
95
|
+
}
|
|
96
|
+
async checkMeta() {
|
|
97
|
+
const meta = await this.getImportMeta();
|
|
98
|
+
if (!this.app.db.inDialect(meta["dialect"])) {
|
|
99
|
+
throw new import_restore_check_error.RestoreCheckError(`this backup file can only be imported in database ${meta["dialect"]}`);
|
|
100
|
+
}
|
|
101
|
+
const checkEnv = (envName) => {
|
|
102
|
+
const valueInPackage = meta[envName] || "";
|
|
103
|
+
const valueInEnv = process.env[envName] || "";
|
|
104
|
+
if (valueInPackage && valueInEnv !== valueInPackage) {
|
|
105
|
+
throw new import_restore_check_error.RestoreCheckError(`for use this backup file, please set ${envName}=${valueInPackage}`);
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
for (const envName of ["DB_UNDERSCORED", "DB_SCHEMA", "COLLECTION_MANAGER_SCHEMA", "DB_TABLE_PREFIX"]) {
|
|
109
|
+
checkEnv(envName);
|
|
110
|
+
}
|
|
111
|
+
const version = meta["version"];
|
|
112
|
+
if (import_semver.default.lt(version, "0.18.0-alpha.2")) {
|
|
113
|
+
throw new import_restore_check_error.RestoreCheckError(`this backup file can only be imported in tachybase ${version}`);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
async importCollections(options) {
|
|
117
|
+
const importCollection = async (collectionName) => {
|
|
118
|
+
await this.importCollection({
|
|
119
|
+
name: collectionName
|
|
120
|
+
});
|
|
121
|
+
};
|
|
122
|
+
const { dumpableCollectionsGroupByGroup, delayCollections } = await this.parseBackupFile();
|
|
123
|
+
await importCollection("applicationPlugins");
|
|
124
|
+
await this.app.reload();
|
|
125
|
+
const metaCollections = dumpableCollectionsGroupByGroup.required;
|
|
126
|
+
for (const collection of metaCollections) {
|
|
127
|
+
if (collection.name === "applicationPlugins") {
|
|
128
|
+
continue;
|
|
129
|
+
}
|
|
130
|
+
if (delayCollections.includes(collection.name)) {
|
|
131
|
+
continue;
|
|
132
|
+
}
|
|
133
|
+
await importCollection(collection.name);
|
|
134
|
+
}
|
|
135
|
+
options.groups.delete("required");
|
|
136
|
+
const importGroups = [...options.groups];
|
|
137
|
+
for (const group of importGroups) {
|
|
138
|
+
const collections = dumpableCollectionsGroupByGroup[group];
|
|
139
|
+
if (!collections) {
|
|
140
|
+
this.app.logger.warn(`group ${group} not found`);
|
|
141
|
+
continue;
|
|
142
|
+
}
|
|
143
|
+
for (const collection of Restorer.sortCollectionsByInherits(collections)) {
|
|
144
|
+
await importCollection(collection.name);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
await this.app.reload();
|
|
148
|
+
await this.app.db.getRepository("collections").load();
|
|
149
|
+
await this.app.db.sync();
|
|
150
|
+
for (const collectionName of delayCollections) {
|
|
151
|
+
const delayRestore = this.app.db.getCollection(collectionName).options.dumpRules["delayRestore"];
|
|
152
|
+
await delayRestore(this);
|
|
153
|
+
}
|
|
154
|
+
await this.emitAsync("restoreCollectionsFinished");
|
|
155
|
+
}
|
|
156
|
+
async decompressBackup(backupFilePath) {
|
|
157
|
+
if (!this.decompressed) await (0, import_decompress.default)(backupFilePath, this.workDir);
|
|
158
|
+
}
|
|
159
|
+
async readCollectionMeta(collectionName) {
|
|
160
|
+
const dir = this.workDir;
|
|
161
|
+
const collectionMetaPath = import_path.default.resolve(dir, "collections", collectionName, "meta");
|
|
162
|
+
const metaContent = await import_promises.default.readFile(collectionMetaPath, "utf8");
|
|
163
|
+
return JSON.parse(metaContent);
|
|
164
|
+
}
|
|
165
|
+
async importCollection(options) {
|
|
166
|
+
const app = this.app;
|
|
167
|
+
const db = app.db;
|
|
168
|
+
const collectionName = options.name;
|
|
169
|
+
if (!collectionName) {
|
|
170
|
+
throw new Error("collection name is required");
|
|
171
|
+
}
|
|
172
|
+
const dir = this.workDir;
|
|
173
|
+
const collectionDataPath = import_path.default.resolve(dir, "collections", collectionName, "data");
|
|
174
|
+
const collectionMetaPath = import_path.default.resolve(dir, "collections", collectionName, "meta");
|
|
175
|
+
try {
|
|
176
|
+
await import_promises.default.stat(collectionMetaPath);
|
|
177
|
+
} catch (e) {
|
|
178
|
+
app.logger.info(`${collectionName} has no meta`);
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
const metaContent = await import_promises.default.readFile(collectionMetaPath, "utf8");
|
|
182
|
+
const meta = JSON.parse(metaContent);
|
|
183
|
+
let addSchemaTableName = meta.tableName;
|
|
184
|
+
if (!this.app.db.inDialect("postgres") && (0, import_lodash.isPlainObject)(addSchemaTableName)) {
|
|
185
|
+
addSchemaTableName = addSchemaTableName.tableName;
|
|
186
|
+
}
|
|
187
|
+
const columns = meta["columns"];
|
|
188
|
+
if (columns.length === 0) {
|
|
189
|
+
app.logger.info(`${collectionName} has no columns`);
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
const fieldAttributes = import_lodash.default.mapValues(meta.attributes, (attr) => {
|
|
193
|
+
if (attr.isCollectionField) {
|
|
194
|
+
const fieldClass = db.fieldTypes.get(attr.type);
|
|
195
|
+
if (!fieldClass) throw new Error(`field type ${attr.type} not found`);
|
|
196
|
+
return new fieldClass(attr.typeOptions, {
|
|
197
|
+
database: db
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
return void 0;
|
|
201
|
+
});
|
|
202
|
+
const rawAttributes = import_lodash.default.mapValues(meta.attributes, (attr, key) => {
|
|
203
|
+
if (attr.isCollectionField) {
|
|
204
|
+
const field = fieldAttributes[key];
|
|
205
|
+
return {
|
|
206
|
+
...field.toSequelize(),
|
|
207
|
+
field: attr.field
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
const DataTypeClass = import_database.DataTypes[db.options.dialect][attr.type] || import_database.DataTypes[attr.type];
|
|
211
|
+
const obj = {
|
|
212
|
+
...attr,
|
|
213
|
+
type: new DataTypeClass()
|
|
214
|
+
};
|
|
215
|
+
if (attr.defaultValue && ["JSON", "JSONB", "JSONTYPE"].includes(attr.type)) {
|
|
216
|
+
obj.defaultValue = JSON.stringify(attr.defaultValue);
|
|
217
|
+
}
|
|
218
|
+
return obj;
|
|
219
|
+
});
|
|
220
|
+
if (options.clear !== false) {
|
|
221
|
+
await db.sequelize.getQueryInterface().dropTable(addSchemaTableName, {
|
|
222
|
+
cascade: true
|
|
223
|
+
});
|
|
224
|
+
await db.sequelize.getQueryInterface().createTable(addSchemaTableName, rawAttributes);
|
|
225
|
+
if (meta.inherits) {
|
|
226
|
+
for (const inherit of import_lodash.default.uniq(meta.inherits)) {
|
|
227
|
+
const parentMeta = await this.readCollectionMeta(inherit);
|
|
228
|
+
const sql2 = `ALTER TABLE ${app.db.utils.quoteTable(addSchemaTableName)} INHERIT ${app.db.utils.quoteTable(
|
|
229
|
+
parentMeta.tableName
|
|
230
|
+
)};`;
|
|
231
|
+
await db.sequelize.query(sql2);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
const rows = await (0, import_utils.readLines)(collectionDataPath);
|
|
236
|
+
if (rows.length === 0) {
|
|
237
|
+
app.logger.info(`${collectionName} has no data to import`);
|
|
238
|
+
this.importedCollections.push(collectionName);
|
|
239
|
+
return;
|
|
240
|
+
}
|
|
241
|
+
const rowsWithMeta = rows.map(
|
|
242
|
+
(row) => JSON.parse(row).map((val, index) => [columns[index], val]).reduce((carry, [column, val]) => {
|
|
243
|
+
const field = fieldAttributes[column];
|
|
244
|
+
carry[column] = field ? import_field_value_writer.FieldValueWriter.write(field, val) : val;
|
|
245
|
+
return carry;
|
|
246
|
+
}, {})
|
|
247
|
+
).filter((row) => {
|
|
248
|
+
if (options.rowCondition) {
|
|
249
|
+
return options.rowCondition(row);
|
|
250
|
+
}
|
|
251
|
+
return true;
|
|
252
|
+
});
|
|
253
|
+
if (rowsWithMeta.length === 0) {
|
|
254
|
+
app.logger.info(`${collectionName} has no data to import`);
|
|
255
|
+
this.importedCollections.push(collectionName);
|
|
256
|
+
return;
|
|
257
|
+
}
|
|
258
|
+
const insertGeneratorAttributes = import_lodash.default.mapKeys(rawAttributes, (value, key) => {
|
|
259
|
+
return value.field;
|
|
260
|
+
});
|
|
261
|
+
const sql = db.sequelize.queryInterface.queryGenerator.bulkInsertQuery(
|
|
262
|
+
addSchemaTableName,
|
|
263
|
+
rowsWithMeta,
|
|
264
|
+
{},
|
|
265
|
+
insertGeneratorAttributes
|
|
266
|
+
);
|
|
267
|
+
if (options.insert === false) {
|
|
268
|
+
return sql;
|
|
269
|
+
}
|
|
270
|
+
await app.db.sequelize.query(sql, {
|
|
271
|
+
type: "INSERT"
|
|
272
|
+
});
|
|
273
|
+
app.logger.info(`${collectionName} imported with ${rowsWithMeta.length} rows`);
|
|
274
|
+
if (meta.autoIncrement) {
|
|
275
|
+
const queryInterface = app.db.queryInterface;
|
|
276
|
+
await queryInterface.setAutoIncrementVal({
|
|
277
|
+
tableInfo: (0, import_lodash.isPlainObject)(meta.tableName) ? meta.tableName : {
|
|
278
|
+
schema: "public",
|
|
279
|
+
tableName: meta.tableName
|
|
280
|
+
},
|
|
281
|
+
columnName: meta.autoIncrement.fieldName,
|
|
282
|
+
seqName: meta.autoIncrement.seqName,
|
|
283
|
+
currentVal: meta.autoIncrement.currentVal
|
|
284
|
+
});
|
|
285
|
+
}
|
|
286
|
+
this.importedCollections.push(collectionName);
|
|
287
|
+
}
|
|
288
|
+
async importDb(options) {
|
|
289
|
+
const sqlContentPath = import_path.default.resolve(this.workDir, "sql-content.json");
|
|
290
|
+
if (!import_fs.default.existsSync(sqlContentPath)) {
|
|
291
|
+
return;
|
|
292
|
+
}
|
|
293
|
+
const sqlData = JSON.parse(await import_promises.default.readFile(sqlContentPath, "utf8"));
|
|
294
|
+
const sqlContent = Object.keys(sqlData).filter((key) => options.groups.has(sqlData[key].group)).reduce((acc, key) => {
|
|
295
|
+
acc[key] = sqlData[key];
|
|
296
|
+
return acc;
|
|
297
|
+
}, {});
|
|
298
|
+
const queries = Object.values(
|
|
299
|
+
sqlContent
|
|
300
|
+
);
|
|
301
|
+
for (const sqlData2 of queries) {
|
|
302
|
+
try {
|
|
303
|
+
this.app.logger.info(`import sql: ${sqlData2.sql}`);
|
|
304
|
+
for (const sql of import_lodash.default.castArray(sqlData2.sql)) {
|
|
305
|
+
await this.app.db.sequelize.query(sql);
|
|
306
|
+
}
|
|
307
|
+
} catch (e) {
|
|
308
|
+
if (e.name === "SequelizeDatabaseError") {
|
|
309
|
+
this.app.logger.error(e.message);
|
|
310
|
+
} else {
|
|
311
|
+
throw e;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
318
|
+
0 && (module.exports = {
|
|
319
|
+
Restorer
|
|
320
|
+
});
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var server_exports = {};
|
|
29
|
+
__export(server_exports, {
|
|
30
|
+
default: () => PluginBackupRestoreServer
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(server_exports);
|
|
33
|
+
var import_server = require("@tachybase/server");
|
|
34
|
+
var import_dumper = require("./dumper");
|
|
35
|
+
var import_backup_files = __toESM(require("./resourcers/backup-files"));
|
|
36
|
+
class PluginBackupRestoreServer extends import_server.Plugin {
|
|
37
|
+
beforeLoad() {
|
|
38
|
+
this.app.acl.registerSnippet({
|
|
39
|
+
name: `pm.${this.name}`,
|
|
40
|
+
actions: ["backupFiles:*"]
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
async load() {
|
|
44
|
+
this.app.resourcer.define(import_backup_files.default);
|
|
45
|
+
}
|
|
46
|
+
async workerCreateBackUp(data) {
|
|
47
|
+
const dumper = new import_dumper.Dumper(this.app);
|
|
48
|
+
return dumper.runDumpTask({
|
|
49
|
+
groups: new Set(data.dataTypes)
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { Database } from '@tachybase/database';
|
|
2
|
+
export declare const DUMPED_EXTENSION = "tbdump";
|
|
3
|
+
export declare function sqlAdapter(database: Database, sql: string): string;
|
|
4
|
+
export declare function readLines(filePath: string): Promise<any[]>;
|
|
5
|
+
export declare function humanFileSize(bytes: any, si?: boolean, dp?: number): string;
|