@tachybase/module-backup 0.23.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +12 -0
- package/README.md +118 -0
- package/README.zh-CN.md +118 -0
- package/client.d.ts +2 -0
- package/client.js +1 -0
- package/dist/client/Configuration.d.ts +2 -0
- package/dist/client/DuplicatorProvider.d.ts +5 -0
- package/dist/client/index.d.ts +5 -0
- package/dist/client/index.js +1 -0
- package/dist/client/locale/index.d.ts +4 -0
- package/dist/externalVersion.js +14 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +39 -0
- package/dist/locale/en-US.json +44 -0
- package/dist/locale/ja-JP.d.ts +25 -0
- package/dist/locale/ja-JP.js +46 -0
- package/dist/locale/ko_KR.json +50 -0
- package/dist/locale/pt-BR.d.ts +26 -0
- package/dist/locale/pt-BR.js +48 -0
- package/dist/locale/zh-CN.json +50 -0
- package/dist/node_modules/@hapi/topo/lib/index.d.ts +60 -0
- package/dist/node_modules/@hapi/topo/lib/index.js +1 -0
- package/dist/node_modules/@hapi/topo/package.json +1 -0
- package/dist/node_modules/archiver/LICENSE +22 -0
- package/dist/node_modules/archiver/index.js +68 -0
- package/dist/node_modules/archiver/lib/core.js +974 -0
- package/dist/node_modules/archiver/lib/error.js +40 -0
- package/dist/node_modules/archiver/lib/plugins/json.js +110 -0
- package/dist/node_modules/archiver/lib/plugins/tar.js +167 -0
- package/dist/node_modules/archiver/lib/plugins/zip.js +120 -0
- package/dist/node_modules/archiver/package.json +1 -0
- package/dist/node_modules/decompress/index.js +16 -0
- package/dist/node_modules/decompress/license +9 -0
- package/dist/node_modules/decompress/package.json +1 -0
- package/dist/node_modules/mkdirp/LICENSE +21 -0
- package/dist/node_modules/mkdirp/bin/cmd.js +68 -0
- package/dist/node_modules/mkdirp/index.js +1 -0
- package/dist/node_modules/mkdirp/lib/find-made.js +29 -0
- package/dist/node_modules/mkdirp/lib/mkdirp-manual.js +64 -0
- package/dist/node_modules/mkdirp/lib/mkdirp-native.js +39 -0
- package/dist/node_modules/mkdirp/lib/opts-arg.js +23 -0
- package/dist/node_modules/mkdirp/lib/path-arg.js +29 -0
- package/dist/node_modules/mkdirp/lib/use-native.js +10 -0
- package/dist/node_modules/mkdirp/package.json +1 -0
- package/dist/node_modules/mkdirp/readme.markdown +266 -0
- package/dist/node_modules/semver/LICENSE +15 -0
- package/dist/node_modules/semver/bin/semver.js +188 -0
- package/dist/node_modules/semver/classes/comparator.js +141 -0
- package/dist/node_modules/semver/classes/index.js +5 -0
- package/dist/node_modules/semver/classes/range.js +554 -0
- package/dist/node_modules/semver/classes/semver.js +302 -0
- package/dist/node_modules/semver/functions/clean.js +6 -0
- package/dist/node_modules/semver/functions/cmp.js +52 -0
- package/dist/node_modules/semver/functions/coerce.js +60 -0
- package/dist/node_modules/semver/functions/compare-build.js +7 -0
- package/dist/node_modules/semver/functions/compare-loose.js +3 -0
- package/dist/node_modules/semver/functions/compare.js +5 -0
- package/dist/node_modules/semver/functions/diff.js +65 -0
- package/dist/node_modules/semver/functions/eq.js +3 -0
- package/dist/node_modules/semver/functions/gt.js +3 -0
- package/dist/node_modules/semver/functions/gte.js +3 -0
- package/dist/node_modules/semver/functions/inc.js +19 -0
- package/dist/node_modules/semver/functions/lt.js +3 -0
- package/dist/node_modules/semver/functions/lte.js +3 -0
- package/dist/node_modules/semver/functions/major.js +3 -0
- package/dist/node_modules/semver/functions/minor.js +3 -0
- package/dist/node_modules/semver/functions/neq.js +3 -0
- package/dist/node_modules/semver/functions/parse.js +16 -0
- package/dist/node_modules/semver/functions/patch.js +3 -0
- package/dist/node_modules/semver/functions/prerelease.js +6 -0
- package/dist/node_modules/semver/functions/rcompare.js +3 -0
- package/dist/node_modules/semver/functions/rsort.js +3 -0
- package/dist/node_modules/semver/functions/satisfies.js +10 -0
- package/dist/node_modules/semver/functions/sort.js +3 -0
- package/dist/node_modules/semver/functions/valid.js +6 -0
- package/dist/node_modules/semver/index.js +1 -0
- package/dist/node_modules/semver/internal/constants.js +35 -0
- package/dist/node_modules/semver/internal/debug.js +9 -0
- package/dist/node_modules/semver/internal/identifiers.js +23 -0
- package/dist/node_modules/semver/internal/lrucache.js +40 -0
- package/dist/node_modules/semver/internal/parse-options.js +15 -0
- package/dist/node_modules/semver/internal/re.js +217 -0
- package/dist/node_modules/semver/package.json +1 -0
- package/dist/node_modules/semver/preload.js +2 -0
- package/dist/node_modules/semver/range.bnf +16 -0
- package/dist/node_modules/semver/ranges/gtr.js +4 -0
- package/dist/node_modules/semver/ranges/intersects.js +7 -0
- package/dist/node_modules/semver/ranges/ltr.js +4 -0
- package/dist/node_modules/semver/ranges/max-satisfying.js +25 -0
- package/dist/node_modules/semver/ranges/min-satisfying.js +24 -0
- package/dist/node_modules/semver/ranges/min-version.js +61 -0
- package/dist/node_modules/semver/ranges/outside.js +80 -0
- package/dist/node_modules/semver/ranges/simplify.js +47 -0
- package/dist/node_modules/semver/ranges/subset.js +247 -0
- package/dist/node_modules/semver/ranges/to-comparators.js +8 -0
- package/dist/node_modules/semver/ranges/valid.js +11 -0
- package/dist/server/app-migrator.d.ts +16 -0
- package/dist/server/app-migrator.js +61 -0
- package/dist/server/collection-group-manager.d.ts +4 -0
- package/dist/server/collection-group-manager.js +29 -0
- package/dist/server/commands/restore-command.d.ts +2 -0
- package/dist/server/commands/restore-command.js +67 -0
- package/dist/server/dumper.d.ts +71 -0
- package/dist/server/dumper.js +421 -0
- package/dist/server/errors/restore-check-error.d.ts +3 -0
- package/dist/server/errors/restore-check-error.js +32 -0
- package/dist/server/field-value-writer.d.ts +9 -0
- package/dist/server/field-value-writer.js +99 -0
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.js +33 -0
- package/dist/server/locale/zh-CN.d.ts +9 -0
- package/dist/server/locale/zh-CN.js +30 -0
- package/dist/server/resourcers/backup-files.d.ts +25 -0
- package/dist/server/resourcers/backup-files.js +206 -0
- package/dist/server/restorer.d.ts +35 -0
- package/dist/server/restorer.js +320 -0
- package/dist/server/server.d.ts +8 -0
- package/dist/server/server.js +52 -0
- package/dist/server/utils.d.ts +5 -0
- package/dist/server/utils.js +78 -0
- package/dist/swagger/index.d.ts +392 -0
- package/dist/swagger/index.js +447 -0
- package/package.json +48 -0
- package/server.d.ts +2 -0
- package/server.js +1 -0
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var dumper_exports = {};
|
|
29
|
+
__export(dumper_exports, {
|
|
30
|
+
Dumper: () => Dumper
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(dumper_exports);
|
|
33
|
+
var import_fs = __toESM(require("fs"));
|
|
34
|
+
var import_promises = __toESM(require("fs/promises"));
|
|
35
|
+
var import_path = __toESM(require("path"));
|
|
36
|
+
var process = __toESM(require("process"));
|
|
37
|
+
var import_stream = __toESM(require("stream"));
|
|
38
|
+
var import_util = __toESM(require("util"));
|
|
39
|
+
var import_worker_threads = require("worker_threads");
|
|
40
|
+
var import_database = require("@tachybase/database");
|
|
41
|
+
var import_archiver = __toESM(require("archiver"));
|
|
42
|
+
var import_dayjs = __toESM(require("dayjs"));
|
|
43
|
+
var import_lodash = __toESM(require("lodash"));
|
|
44
|
+
var import_mkdirp = __toESM(require("mkdirp"));
|
|
45
|
+
var import_app_migrator = require("./app-migrator");
|
|
46
|
+
var import_field_value_writer = require("./field-value-writer");
|
|
47
|
+
var import_utils = require("./utils");
|
|
48
|
+
const finished = import_util.default.promisify(import_stream.default.finished);
|
|
49
|
+
class Dumper extends import_app_migrator.AppMigrator {
|
|
50
|
+
static dumpTasks = /* @__PURE__ */ new Map();
|
|
51
|
+
direction = "dump";
|
|
52
|
+
sqlContent = {};
|
|
53
|
+
static getTaskPromise(taskId) {
|
|
54
|
+
return this.dumpTasks.get(taskId);
|
|
55
|
+
}
|
|
56
|
+
static async getFileStatus(filePath) {
|
|
57
|
+
const lockFile = filePath + ".lock";
|
|
58
|
+
const fileName = import_path.default.basename(filePath);
|
|
59
|
+
return import_fs.default.promises.stat(lockFile).then((lockFileStat) => {
|
|
60
|
+
if (lockFileStat.isFile()) {
|
|
61
|
+
return {
|
|
62
|
+
name: fileName,
|
|
63
|
+
inProgress: true,
|
|
64
|
+
status: "in_progress"
|
|
65
|
+
};
|
|
66
|
+
} else {
|
|
67
|
+
throw new Error("Lock file is not a file");
|
|
68
|
+
}
|
|
69
|
+
}).catch((error) => {
|
|
70
|
+
if (error.code === "ENOENT") {
|
|
71
|
+
return import_fs.default.promises.stat(filePath).then((backupFileStat) => {
|
|
72
|
+
if (backupFileStat.isFile()) {
|
|
73
|
+
return {
|
|
74
|
+
name: fileName,
|
|
75
|
+
createdAt: backupFileStat.ctime,
|
|
76
|
+
fileSize: (0, import_utils.humanFileSize)(backupFileStat.size),
|
|
77
|
+
status: "ok"
|
|
78
|
+
};
|
|
79
|
+
} else {
|
|
80
|
+
throw new Error("Path is not a file");
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
throw error;
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
static generateFileName() {
|
|
88
|
+
return `backup_${(0, import_dayjs.default)().format(`YYYYMMDD_HHmmss_${Math.floor(1e3 + Math.random() * 9e3)}`)}.${import_utils.DUMPED_EXTENSION}`;
|
|
89
|
+
}
|
|
90
|
+
writeSQLContent(key, data) {
|
|
91
|
+
this.sqlContent[key] = data;
|
|
92
|
+
}
|
|
93
|
+
getSQLContent(key) {
|
|
94
|
+
return this.sqlContent[key];
|
|
95
|
+
}
|
|
96
|
+
async getCollectionsByDataTypes(groups) {
|
|
97
|
+
const dumpableCollectionsGroupByDataTypes = await this.collectionsGroupByDataTypes();
|
|
98
|
+
return [...groups].reduce((acc, key) => {
|
|
99
|
+
return acc.concat(dumpableCollectionsGroupByDataTypes[key] || []);
|
|
100
|
+
}, []);
|
|
101
|
+
}
|
|
102
|
+
async dumpableCollections() {
|
|
103
|
+
return (await Promise.all(
|
|
104
|
+
[...this.app.db.collections.values()].map(async (c) => {
|
|
105
|
+
try {
|
|
106
|
+
const dumpRules = import_database.CollectionGroupManager.unifyDumpRules(c.options.dumpRules);
|
|
107
|
+
const options = {
|
|
108
|
+
name: c.name,
|
|
109
|
+
title: c.options.title || c.name,
|
|
110
|
+
options: c.options,
|
|
111
|
+
group: dumpRules == null ? void 0 : dumpRules.group,
|
|
112
|
+
isView: c.isView(),
|
|
113
|
+
origin: c.origin
|
|
114
|
+
};
|
|
115
|
+
if (c.options.inherits && c.options.inherits.length > 0) {
|
|
116
|
+
options.inherits = c.options.inherits;
|
|
117
|
+
}
|
|
118
|
+
return options;
|
|
119
|
+
} catch (e) {
|
|
120
|
+
console.error(e);
|
|
121
|
+
throw new Error(`collection ${c.name} has invalid dumpRules option`, { cause: e });
|
|
122
|
+
}
|
|
123
|
+
})
|
|
124
|
+
)).map((item) => {
|
|
125
|
+
if (!item.group) {
|
|
126
|
+
item.group = "unknown";
|
|
127
|
+
}
|
|
128
|
+
return item;
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
async collectionsGroupByDataTypes() {
|
|
132
|
+
const grouped = import_lodash.default.groupBy(await this.dumpableCollections(), "group");
|
|
133
|
+
return Object.fromEntries(Object.entries(grouped).map(([key, value]) => [key, value.map((item) => item.name)]));
|
|
134
|
+
}
|
|
135
|
+
backUpStorageDir() {
|
|
136
|
+
return import_path.default.resolve(process.cwd(), "storage", "backups");
|
|
137
|
+
}
|
|
138
|
+
async allBackUpFilePaths(options) {
|
|
139
|
+
const dirname = (options == null ? void 0 : options.dir) || this.backUpStorageDir();
|
|
140
|
+
const includeInProgress = options == null ? void 0 : options.includeInProgress;
|
|
141
|
+
try {
|
|
142
|
+
const files = await import_promises.default.readdir(dirname);
|
|
143
|
+
const lockFilesSet = new Set(
|
|
144
|
+
files.filter((file) => import_path.default.extname(file) === ".lock").map((file) => import_path.default.basename(file, ".lock"))
|
|
145
|
+
);
|
|
146
|
+
const filteredFiles = files.filter((file) => {
|
|
147
|
+
const baseName = import_path.default.basename(file);
|
|
148
|
+
const isLockFile = import_path.default.extname(file) === ".lock";
|
|
149
|
+
const isDumpFile = import_path.default.extname(file) === `.${import_utils.DUMPED_EXTENSION}`;
|
|
150
|
+
return includeInProgress && isLockFile || isDumpFile && !lockFilesSet.has(baseName);
|
|
151
|
+
}).map(async (file) => {
|
|
152
|
+
const filePath = import_path.default.resolve(dirname, file);
|
|
153
|
+
const stats = await import_promises.default.stat(filePath);
|
|
154
|
+
return { filePath, birthtime: stats.birthtime.getTime() };
|
|
155
|
+
});
|
|
156
|
+
const filesData = await Promise.all(filteredFiles);
|
|
157
|
+
filesData.sort((a, b) => b.birthtime - a.birthtime);
|
|
158
|
+
return filesData.map((fileData) => fileData.filePath);
|
|
159
|
+
} catch (error) {
|
|
160
|
+
if (!error.message.includes("no such file or directory")) {
|
|
161
|
+
console.error("Error reading directory:", error);
|
|
162
|
+
}
|
|
163
|
+
return [];
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
backUpFilePath(fileName) {
|
|
167
|
+
const dirname = this.backUpStorageDir();
|
|
168
|
+
return import_path.default.resolve(dirname, fileName);
|
|
169
|
+
}
|
|
170
|
+
lockFilePath(fileName) {
|
|
171
|
+
const lockFile = fileName + ".lock";
|
|
172
|
+
const dirname = this.backUpStorageDir();
|
|
173
|
+
return import_path.default.resolve(dirname, lockFile);
|
|
174
|
+
}
|
|
175
|
+
async writeLockFile(fileName) {
|
|
176
|
+
const dirname = this.backUpStorageDir();
|
|
177
|
+
await (0, import_mkdirp.default)(dirname);
|
|
178
|
+
const filePath = this.lockFilePath(fileName);
|
|
179
|
+
await import_promises.default.writeFile(filePath, "lock", "utf8");
|
|
180
|
+
}
|
|
181
|
+
async cleanLockFile(fileName) {
|
|
182
|
+
const filePath = this.lockFilePath(fileName);
|
|
183
|
+
await import_promises.default.unlink(filePath);
|
|
184
|
+
}
|
|
185
|
+
async runDumpTask(options) {
|
|
186
|
+
const backupFileName = Dumper.generateFileName();
|
|
187
|
+
await this.writeLockFile(backupFileName);
|
|
188
|
+
if (import_worker_threads.isMainThread) {
|
|
189
|
+
const promise = this.dump({
|
|
190
|
+
groups: options.groups,
|
|
191
|
+
fileName: backupFileName
|
|
192
|
+
}).finally(() => {
|
|
193
|
+
this.cleanLockFile(backupFileName);
|
|
194
|
+
Dumper.dumpTasks.delete(backupFileName);
|
|
195
|
+
this.app.noticeManager.notify("backup", { msg: "done" });
|
|
196
|
+
});
|
|
197
|
+
Dumper.dumpTasks.set(backupFileName, promise);
|
|
198
|
+
} else {
|
|
199
|
+
await this.dump({
|
|
200
|
+
groups: options.groups,
|
|
201
|
+
fileName: backupFileName
|
|
202
|
+
});
|
|
203
|
+
await this.cleanLockFile(backupFileName);
|
|
204
|
+
}
|
|
205
|
+
return backupFileName;
|
|
206
|
+
}
|
|
207
|
+
async dumpableCollectionsGroupByGroup() {
|
|
208
|
+
return (0, import_lodash.default)(await this.dumpableCollections()).map((c) => import_lodash.default.pick(c, ["name", "group", "origin", "title", "isView", "inherits"])).groupBy("group").mapValues((items) => import_lodash.default.sortBy(items, (item) => item.name)).value();
|
|
209
|
+
}
|
|
210
|
+
async dump(options) {
|
|
211
|
+
const dumpingGroups = options.groups;
|
|
212
|
+
dumpingGroups.add("required");
|
|
213
|
+
const delayCollections = /* @__PURE__ */ new Set();
|
|
214
|
+
const dumpedCollections = await this.getCollectionsByDataTypes(dumpingGroups);
|
|
215
|
+
for (const collectionName of dumpedCollections) {
|
|
216
|
+
const collection = this.app.db.getCollection(collectionName);
|
|
217
|
+
if (import_lodash.default.get(collection.options, "dumpRules.delayRestore")) {
|
|
218
|
+
delayCollections.add(collectionName);
|
|
219
|
+
}
|
|
220
|
+
await this.dumpCollection({
|
|
221
|
+
name: collectionName
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
await this.dumpMeta({
|
|
225
|
+
dumpableCollectionsGroupByGroup: import_lodash.default.pick(await this.dumpableCollectionsGroupByGroup(), [...dumpingGroups]),
|
|
226
|
+
dumpedGroups: [...dumpingGroups],
|
|
227
|
+
delayCollections: [...delayCollections]
|
|
228
|
+
});
|
|
229
|
+
await this.dumpDb(options);
|
|
230
|
+
const backupFileName = options.fileName || Dumper.generateFileName();
|
|
231
|
+
const filePath = await this.packDumpedDir(backupFileName);
|
|
232
|
+
await this.clearWorkDir();
|
|
233
|
+
return filePath;
|
|
234
|
+
}
|
|
235
|
+
async dumpDb(options) {
|
|
236
|
+
var _a;
|
|
237
|
+
for (const collection of this.app.db.collections.values()) {
|
|
238
|
+
const collectionOnDumpOption = (_a = this.app.db.collectionFactory.collectionTypes.get(
|
|
239
|
+
collection.constructor
|
|
240
|
+
)) == null ? void 0 : _a.onDump;
|
|
241
|
+
if (collectionOnDumpOption) {
|
|
242
|
+
await collectionOnDumpOption(this, collection);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
if (this.hasSqlContent()) {
|
|
246
|
+
const dbDumpPath = import_path.default.resolve(this.workDir, "sql-content.json");
|
|
247
|
+
await import_promises.default.writeFile(
|
|
248
|
+
dbDumpPath,
|
|
249
|
+
JSON.stringify(
|
|
250
|
+
Object.keys(this.sqlContent).filter((key) => options.groups.has(this.sqlContent[key].group)).reduce((acc, key) => {
|
|
251
|
+
acc[key] = this.sqlContent[key];
|
|
252
|
+
return acc;
|
|
253
|
+
}, {})
|
|
254
|
+
),
|
|
255
|
+
"utf8"
|
|
256
|
+
);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
hasSqlContent() {
|
|
260
|
+
return Object.keys(this.sqlContent).length > 0;
|
|
261
|
+
}
|
|
262
|
+
async dumpMeta(additionalMeta = {}) {
|
|
263
|
+
const metaPath = import_path.default.resolve(this.workDir, "meta");
|
|
264
|
+
const metaObj = {
|
|
265
|
+
version: await this.app.version.get(),
|
|
266
|
+
dialect: this.app.db.sequelize.getDialect(),
|
|
267
|
+
DB_UNDERSCORED: process.env.DB_UNDERSCORED,
|
|
268
|
+
DB_TABLE_PREFIX: process.env.DB_TABLE_PREFIX,
|
|
269
|
+
DB_SCHEMA: process.env.DB_SCHEMA,
|
|
270
|
+
COLLECTION_MANAGER_SCHEMA: process.env.COLLECTION_MANAGER_SCHEMA,
|
|
271
|
+
...additionalMeta
|
|
272
|
+
};
|
|
273
|
+
if (this.app.db.inDialect("postgres")) {
|
|
274
|
+
if (this.app.db.inheritanceMap.nodes.size > 0) {
|
|
275
|
+
metaObj["dialectOnly"] = true;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
if (this.hasSqlContent()) {
|
|
279
|
+
metaObj["dialectOnly"] = true;
|
|
280
|
+
}
|
|
281
|
+
await import_promises.default.writeFile(metaPath, JSON.stringify(metaObj), "utf8");
|
|
282
|
+
}
|
|
283
|
+
async dumpCollection(options) {
|
|
284
|
+
var _a;
|
|
285
|
+
const app = this.app;
|
|
286
|
+
const dir = this.workDir;
|
|
287
|
+
const collectionName = options.name;
|
|
288
|
+
app.logger.info(`dumping collection ${collectionName}`);
|
|
289
|
+
const collection = app.db.getCollection(collectionName);
|
|
290
|
+
if (!collection) {
|
|
291
|
+
this.app.logger.warn(`collection ${collectionName} not found`);
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
const collectionOnDumpOption = (_a = this.app.db.collectionFactory.collectionTypes.get(
|
|
295
|
+
collection.constructor
|
|
296
|
+
)) == null ? void 0 : _a.onDump;
|
|
297
|
+
if (collectionOnDumpOption) {
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
const attributes = collection.model.tableAttributes;
|
|
301
|
+
const columns = [...new Set(import_lodash.default.map(attributes, "field"))];
|
|
302
|
+
const collectionDataDir = import_path.default.resolve(dir, "collections", collectionName);
|
|
303
|
+
await import_promises.default.mkdir(collectionDataDir, { recursive: true });
|
|
304
|
+
let count = 0;
|
|
305
|
+
if (columns.length !== 0) {
|
|
306
|
+
const dataFilePath = import_path.default.resolve(collectionDataDir, "data");
|
|
307
|
+
const dataStream = import_fs.default.createWriteStream(dataFilePath);
|
|
308
|
+
const rows = await app.db.sequelize.query(
|
|
309
|
+
(0, import_utils.sqlAdapter)(
|
|
310
|
+
app.db,
|
|
311
|
+
`SELECT *
|
|
312
|
+
FROM ${collection.isParent() ? "ONLY" : ""} ${collection.quotedTableName()}`
|
|
313
|
+
),
|
|
314
|
+
{
|
|
315
|
+
type: "SELECT"
|
|
316
|
+
}
|
|
317
|
+
);
|
|
318
|
+
for (const row of rows) {
|
|
319
|
+
const rowData = JSON.stringify(
|
|
320
|
+
columns.map((col) => {
|
|
321
|
+
const val = row[col];
|
|
322
|
+
const field = collection.getField(col);
|
|
323
|
+
return field ? import_field_value_writer.FieldValueWriter.toDumpedValue(field, val) : val;
|
|
324
|
+
})
|
|
325
|
+
);
|
|
326
|
+
dataStream.write(rowData + "\r\n", "utf8");
|
|
327
|
+
}
|
|
328
|
+
dataStream.end();
|
|
329
|
+
await finished(dataStream);
|
|
330
|
+
count = rows.length;
|
|
331
|
+
}
|
|
332
|
+
const metaAttributes = import_lodash.default.mapValues(attributes, (attr, key) => {
|
|
333
|
+
var _a2, _b, _c;
|
|
334
|
+
const collectionField = collection.getField(key);
|
|
335
|
+
const fieldOptionKeys = ["field", "primaryKey", "autoIncrement", "allowNull", "defaultValue", "unique"];
|
|
336
|
+
if (collectionField) {
|
|
337
|
+
const fieldAttributes = {
|
|
338
|
+
field: attr.field,
|
|
339
|
+
isCollectionField: true,
|
|
340
|
+
type: collectionField.type,
|
|
341
|
+
typeOptions: collectionField.options
|
|
342
|
+
};
|
|
343
|
+
if (((_c = (_b = (_a2 = fieldAttributes.typeOptions) == null ? void 0 : _a2.defaultValue) == null ? void 0 : _b.constructor) == null ? void 0 : _c.name) === "UUIDV4") {
|
|
344
|
+
delete fieldAttributes.typeOptions.defaultValue;
|
|
345
|
+
}
|
|
346
|
+
return fieldAttributes;
|
|
347
|
+
}
|
|
348
|
+
return {
|
|
349
|
+
...import_lodash.default.pick(attr, fieldOptionKeys),
|
|
350
|
+
type: attr.type.constructor.toString(),
|
|
351
|
+
isCollectionField: false,
|
|
352
|
+
typeOptions: attr.type.options
|
|
353
|
+
};
|
|
354
|
+
});
|
|
355
|
+
const meta = {
|
|
356
|
+
name: collectionName,
|
|
357
|
+
tableName: collection.getTableNameWithSchema(),
|
|
358
|
+
count,
|
|
359
|
+
columns,
|
|
360
|
+
attributes: metaAttributes
|
|
361
|
+
};
|
|
362
|
+
if (collection.options.inherits) {
|
|
363
|
+
meta["inherits"] = import_lodash.default.uniq(collection.options.inherits);
|
|
364
|
+
}
|
|
365
|
+
const autoIncrAttr = collection.model.autoIncrementAttribute;
|
|
366
|
+
if (autoIncrAttr && collection.model.rawAttributes[autoIncrAttr] && collection.model.rawAttributes[autoIncrAttr].autoIncrement) {
|
|
367
|
+
const queryInterface = app.db.queryInterface;
|
|
368
|
+
const autoIncrInfo = await queryInterface.getAutoIncrementInfo({
|
|
369
|
+
tableInfo: {
|
|
370
|
+
tableName: collection.model.tableName,
|
|
371
|
+
schema: collection.collectionSchema()
|
|
372
|
+
},
|
|
373
|
+
fieldName: autoIncrAttr
|
|
374
|
+
});
|
|
375
|
+
meta["autoIncrement"] = {
|
|
376
|
+
...autoIncrInfo,
|
|
377
|
+
fieldName: autoIncrAttr
|
|
378
|
+
};
|
|
379
|
+
}
|
|
380
|
+
await import_promises.default.writeFile(import_path.default.resolve(collectionDataDir, "meta"), JSON.stringify(meta), "utf8");
|
|
381
|
+
}
|
|
382
|
+
async packDumpedDir(fileName) {
|
|
383
|
+
const dirname = this.backUpStorageDir();
|
|
384
|
+
await (0, import_mkdirp.default)(dirname);
|
|
385
|
+
const filePath = import_path.default.resolve(dirname, fileName);
|
|
386
|
+
const output = import_fs.default.createWriteStream(filePath);
|
|
387
|
+
const archive = (0, import_archiver.default)("zip", {
|
|
388
|
+
zlib: { level: 9 }
|
|
389
|
+
});
|
|
390
|
+
const onClose = new Promise((resolve, reject) => {
|
|
391
|
+
output.on("close", function() {
|
|
392
|
+
console.log("dumped file size: " + (0, import_utils.humanFileSize)(archive.pointer(), true));
|
|
393
|
+
resolve(true);
|
|
394
|
+
});
|
|
395
|
+
output.on("end", function() {
|
|
396
|
+
console.log("Data has been drained");
|
|
397
|
+
});
|
|
398
|
+
archive.on("warning", function(err) {
|
|
399
|
+
if (err.code === "ENOENT") {
|
|
400
|
+
} else {
|
|
401
|
+
reject(err);
|
|
402
|
+
}
|
|
403
|
+
});
|
|
404
|
+
archive.on("error", function(err) {
|
|
405
|
+
reject(err);
|
|
406
|
+
});
|
|
407
|
+
});
|
|
408
|
+
archive.pipe(output);
|
|
409
|
+
archive.directory(this.workDir, false);
|
|
410
|
+
await archive.finalize();
|
|
411
|
+
await onClose;
|
|
412
|
+
return {
|
|
413
|
+
filePath,
|
|
414
|
+
dirname
|
|
415
|
+
};
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
419
|
+
0 && (module.exports = {
|
|
420
|
+
Dumper
|
|
421
|
+
});
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
var restore_check_error_exports = {};
|
|
19
|
+
__export(restore_check_error_exports, {
|
|
20
|
+
RestoreCheckError: () => RestoreCheckError
|
|
21
|
+
});
|
|
22
|
+
module.exports = __toCommonJS(restore_check_error_exports);
|
|
23
|
+
class RestoreCheckError extends Error {
|
|
24
|
+
constructor(message) {
|
|
25
|
+
super(message);
|
|
26
|
+
this.name = "RestoreCheckError";
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
30
|
+
0 && (module.exports = {
|
|
31
|
+
RestoreCheckError
|
|
32
|
+
});
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Field } from '@tachybase/database';
|
|
2
|
+
type WriterFunc = (val: any) => any;
|
|
3
|
+
export declare class FieldValueWriter {
|
|
4
|
+
static writers: Map<string, WriterFunc>;
|
|
5
|
+
static write(field: Field, val: any): any;
|
|
6
|
+
static toDumpedValue(field: Field, val: any): any;
|
|
7
|
+
static registerWriter(types: string | string[], writer: WriterFunc): void;
|
|
8
|
+
}
|
|
9
|
+
export {};
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var field_value_writer_exports = {};
|
|
29
|
+
__export(field_value_writer_exports, {
|
|
30
|
+
FieldValueWriter: () => FieldValueWriter
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(field_value_writer_exports);
|
|
33
|
+
var import_database = require("@tachybase/database");
|
|
34
|
+
var import_lodash = __toESM(require("lodash"));
|
|
35
|
+
const getMapFieldWriter = (field) => {
|
|
36
|
+
return (val) => {
|
|
37
|
+
const mockObj = {
|
|
38
|
+
setDataValue: (name, newVal) => {
|
|
39
|
+
val = newVal;
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
field.options.set.call(mockObj, val);
|
|
43
|
+
return val;
|
|
44
|
+
};
|
|
45
|
+
};
|
|
46
|
+
class FieldValueWriter {
|
|
47
|
+
static writers = /* @__PURE__ */ new Map();
|
|
48
|
+
static write(field, val) {
|
|
49
|
+
if (val === null) return val;
|
|
50
|
+
if (field.type == "point" || field.type == "lineString" || field.type == "circle" || field.type === "polygon") {
|
|
51
|
+
return getMapFieldWriter(field)(import_lodash.default.isString(val) ? JSON.parse(val) : val);
|
|
52
|
+
}
|
|
53
|
+
const fieldType = field.typeToString();
|
|
54
|
+
const writer = FieldValueWriter.writers[fieldType];
|
|
55
|
+
if (writer) {
|
|
56
|
+
val = writer(val);
|
|
57
|
+
}
|
|
58
|
+
return val;
|
|
59
|
+
}
|
|
60
|
+
static toDumpedValue(field, val) {
|
|
61
|
+
if (val === null) return val;
|
|
62
|
+
if (field.type == "point" || field.type == "lineString" || field.type == "circle" || field.type === "polygon") {
|
|
63
|
+
const mockObj = {
|
|
64
|
+
getDataValue: () => val
|
|
65
|
+
};
|
|
66
|
+
const newValue = field.options.get.call(mockObj);
|
|
67
|
+
return newValue;
|
|
68
|
+
}
|
|
69
|
+
return val;
|
|
70
|
+
}
|
|
71
|
+
static registerWriter(types, writer) {
|
|
72
|
+
for (const type of import_lodash.default.castArray(types)) {
|
|
73
|
+
FieldValueWriter.writers[type] = writer;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
function isJSONObjectOrArrayString(str) {
|
|
78
|
+
try {
|
|
79
|
+
const parsed = JSON.parse(str);
|
|
80
|
+
return typeof parsed === "object" && parsed !== null;
|
|
81
|
+
} catch (e) {
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
FieldValueWriter.registerWriter([import_database.DataTypes.JSON.toString(), import_database.DataTypes.JSONB.toString()], (val) => {
|
|
86
|
+
try {
|
|
87
|
+
return isJSONObjectOrArrayString(val) ? JSON.parse(val) : val;
|
|
88
|
+
} catch (err) {
|
|
89
|
+
if (err instanceof SyntaxError && err.message.includes("Unexpected")) {
|
|
90
|
+
return val;
|
|
91
|
+
}
|
|
92
|
+
throw err;
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
FieldValueWriter.registerWriter(import_database.DataTypes.BOOLEAN.toString(), (val) => Boolean(val));
|
|
96
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
97
|
+
0 && (module.exports = {
|
|
98
|
+
FieldValueWriter
|
|
99
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { default } from './server';
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var server_exports = {};
|
|
29
|
+
__export(server_exports, {
|
|
30
|
+
default: () => import_server.default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(server_exports);
|
|
33
|
+
var import_server = __toESM(require("./server"));
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
var zh_CN_exports = {};
|
|
19
|
+
__export(zh_CN_exports, {
|
|
20
|
+
default: () => zh_CN_default
|
|
21
|
+
});
|
|
22
|
+
module.exports = __toCommonJS(zh_CN_exports);
|
|
23
|
+
var zh_CN_default = {
|
|
24
|
+
"Select Import data": "\u8BF7\u9009\u62E9\u5BFC\u5165\u6570\u636E",
|
|
25
|
+
"Select Import Plugins": "\u8BF7\u9009\u62E9\u5BFC\u5165\u63D2\u4EF6",
|
|
26
|
+
"Select User Collections": "\u8BF7\u9009\u62E9\u7528\u6237\u6570\u636E",
|
|
27
|
+
"Basic Data": "\u57FA\u7840\u6570\u636E",
|
|
28
|
+
"Optional Data": "\u53EF\u9009\u6570\u636E",
|
|
29
|
+
"User Data": "\u7528\u6237\u6570\u636E"
|
|
30
|
+
};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
declare const _default: {
|
|
2
|
+
name: string;
|
|
3
|
+
middleware: (ctx: any, next: any) => Promise<any>;
|
|
4
|
+
actions: {
|
|
5
|
+
list(ctx: any, next: any): Promise<void>;
|
|
6
|
+
get(ctx: any, next: any): Promise<void>;
|
|
7
|
+
/**
|
|
8
|
+
* create dump task
|
|
9
|
+
* @param ctx
|
|
10
|
+
* @param next
|
|
11
|
+
*/
|
|
12
|
+
create(ctx: any, next: any): Promise<void>;
|
|
13
|
+
/**
|
|
14
|
+
* download backup file
|
|
15
|
+
* @param ctx
|
|
16
|
+
* @param next
|
|
17
|
+
*/
|
|
18
|
+
download(ctx: any, next: any): Promise<void>;
|
|
19
|
+
restore(ctx: any, next: any): Promise<void>;
|
|
20
|
+
destroy(ctx: any, next: any): Promise<void>;
|
|
21
|
+
upload(ctx: any, next: any): Promise<void>;
|
|
22
|
+
dumpableCollections(ctx: any, next: any): Promise<void>;
|
|
23
|
+
};
|
|
24
|
+
};
|
|
25
|
+
export default _default;
|