@tachybase/module-backup 1.5.0 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/components/BackupProgressCell.d.ts +36 -0
- package/dist/client/hooks/useBackupProgress.d.ts +53 -0
- package/dist/client/hooks/useDownloadProgress.d.ts +40 -0
- package/dist/client/index.js +3 -3
- package/dist/externalVersion.js +3 -3
- package/dist/locale/en-US.json +7 -0
- package/dist/locale/ja-JP.d.ts +6 -0
- package/dist/locale/ja-JP.js +6 -0
- package/dist/locale/ko_KR.json +6 -0
- package/dist/locale/pt-BR.d.ts +6 -0
- package/dist/locale/pt-BR.js +6 -0
- package/dist/locale/zh-CN.json +7 -0
- package/dist/node_modules/@hapi/topo/package.json +1 -1
- package/dist/node_modules/archiver/package.json +1 -1
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/semver/package.json +1 -1
- package/dist/node_modules/yauzl/package.json +1 -1
- package/dist/server/dumper.d.ts +9 -3
- package/dist/server/dumper.js +78 -15
- package/dist/server/progress-tracker.d.ts +76 -0
- package/dist/server/progress-tracker.js +286 -0
- package/dist/server/resourcers/backup-files.js +9 -6
- package/dist/server/server.d.ts +1 -0
- package/dist/server/server.js +49 -1
- package/package.json +7 -7
|
@@ -1 +1 @@
|
|
|
1
|
-
{"name":"cron-parser","version":"4.9.0","description":"Node.js library for parsing crontab instructions","main":"lib/parser.js","types":"types/index.d.ts","typesVersions":{"<4.1":{"*":["types/ts3/*"]}},"directories":{"test":"test"},"scripts":{"test:tsd":"tsd","test:unit":"TZ=UTC tap ./test/*.js","test:cover":"TZ=UTC tap --coverage-report=html ./test/*.js","lint":"eslint .","lint:fix":"eslint --fix .","test":"npm run lint && npm run test:unit && npm run test:tsd"},"repository":{"type":"git","url":"https://github.com/harrisiirak/cron-parser.git"},"keywords":["cron","crontab","parser"],"author":"Harri Siirak","contributors":["Nicholas Clawson","Daniel Prentis <daniel@salsitasoft.com>","Renault John Lecoultre","Richard Astbury <richard.astbury@gmail.com>","Meaglin Wasabi <Meaglin.wasabi@gmail.com>","Mike Kusold <hello@mikekusold.com>","Alex Kit <alex.kit@atmajs.com>","Santiago Gimeno <santiago.gimeno@gmail.com>","Daniel <darc.tec@gmail.com>","Christian Steininger <christian.steininger.cs@gmail.com>","Mykola Piskovyi <m.piskovyi@gmail.com>","Brian Vaughn <brian.david.vaughn@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Yasuhiroki <yasuhiroki.duck@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Brendan Warkentin <faazshift@gmail.com>","Charlie Fish <fishcharlie.code@gmail.com>","Ian Graves <ian+diskimage@iangrav.es>","Andy Thompson <me@andytson.com>","Regev Brody <regevbr@gmail.com>"],"license":"MIT","dependencies":{"luxon":"^3.2.1"},"devDependencies":{"eslint":"^8.27.0","sinon":"^15.0.1","tap":"^16.3.3","tsd":"^0.26.0"},"engines":{"node":">=12.0.0"},"browser":{"fs":false},"tap":{"check-coverage":false},"tsd":{"directory":"test","compilerOptions":{"lib":["es2017","dom"]}},"files":["lib","types","LICENSE","README.md"],"_lastModified":"2025-11-
|
|
1
|
+
{"name":"cron-parser","version":"4.9.0","description":"Node.js library for parsing crontab instructions","main":"lib/parser.js","types":"types/index.d.ts","typesVersions":{"<4.1":{"*":["types/ts3/*"]}},"directories":{"test":"test"},"scripts":{"test:tsd":"tsd","test:unit":"TZ=UTC tap ./test/*.js","test:cover":"TZ=UTC tap --coverage-report=html ./test/*.js","lint":"eslint .","lint:fix":"eslint --fix .","test":"npm run lint && npm run test:unit && npm run test:tsd"},"repository":{"type":"git","url":"https://github.com/harrisiirak/cron-parser.git"},"keywords":["cron","crontab","parser"],"author":"Harri Siirak","contributors":["Nicholas Clawson","Daniel Prentis <daniel@salsitasoft.com>","Renault John Lecoultre","Richard Astbury <richard.astbury@gmail.com>","Meaglin Wasabi <Meaglin.wasabi@gmail.com>","Mike Kusold <hello@mikekusold.com>","Alex Kit <alex.kit@atmajs.com>","Santiago Gimeno <santiago.gimeno@gmail.com>","Daniel <darc.tec@gmail.com>","Christian Steininger <christian.steininger.cs@gmail.com>","Mykola Piskovyi <m.piskovyi@gmail.com>","Brian Vaughn <brian.david.vaughn@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Yasuhiroki <yasuhiroki.duck@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Brendan Warkentin <faazshift@gmail.com>","Charlie Fish <fishcharlie.code@gmail.com>","Ian Graves <ian+diskimage@iangrav.es>","Andy Thompson <me@andytson.com>","Regev Brody <regevbr@gmail.com>"],"license":"MIT","dependencies":{"luxon":"^3.2.1"},"devDependencies":{"eslint":"^8.27.0","sinon":"^15.0.1","tap":"^16.3.3","tsd":"^0.26.0"},"engines":{"node":">=12.0.0"},"browser":{"fs":false},"tap":{"check-coverage":false},"tsd":{"directory":"test","compilerOptions":{"lib":["es2017","dom"]}},"files":["lib","types","LICENSE","README.md"],"_lastModified":"2025-11-20T08:10:25.628Z"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"name":"semver","version":"7.7.2","description":"The semantic version parser used by npm.","main":"index.js","scripts":{"test":"tap","snap":"tap","lint":"npm run eslint","postlint":"template-oss-check","lintfix":"npm run eslint -- --fix","posttest":"npm run lint","template-oss-apply":"template-oss-apply --force","eslint":"eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""},"devDependencies":{"@npmcli/eslint-config":"^5.0.0","@npmcli/template-oss":"4.24.3","benchmark":"^2.1.4","tap":"^16.0.0"},"license":"ISC","repository":{"type":"git","url":"git+https://github.com/npm/node-semver.git"},"bin":{"semver":"bin/semver.js"},"files":["bin/","lib/","classes/","functions/","internal/","ranges/","index.js","preload.js","range.bnf"],"tap":{"timeout":30,"coverage-map":"map.js","nyc-arg":["--exclude","tap-snapshots/**"]},"engines":{"node":">=10"},"author":"GitHub Inc.","templateOSS":{"//@npmcli/template-oss":"This file is partially managed by @npmcli/template-oss. Edits may be overwritten.","version":"4.24.3","engines":">=10","distPaths":["classes/","functions/","internal/","ranges/","index.js","preload.js","range.bnf"],"allowPaths":["/classes/","/functions/","/internal/","/ranges/","/index.js","/preload.js","/range.bnf","/benchmarks"],"publish":"true"},"_lastModified":"2025-11-
|
|
1
|
+
{"name":"semver","version":"7.7.2","description":"The semantic version parser used by npm.","main":"index.js","scripts":{"test":"tap","snap":"tap","lint":"npm run eslint","postlint":"template-oss-check","lintfix":"npm run eslint -- --fix","posttest":"npm run lint","template-oss-apply":"template-oss-apply --force","eslint":"eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""},"devDependencies":{"@npmcli/eslint-config":"^5.0.0","@npmcli/template-oss":"4.24.3","benchmark":"^2.1.4","tap":"^16.0.0"},"license":"ISC","repository":{"type":"git","url":"git+https://github.com/npm/node-semver.git"},"bin":{"semver":"bin/semver.js"},"files":["bin/","lib/","classes/","functions/","internal/","ranges/","index.js","preload.js","range.bnf"],"tap":{"timeout":30,"coverage-map":"map.js","nyc-arg":["--exclude","tap-snapshots/**"]},"engines":{"node":">=10"},"author":"GitHub Inc.","templateOSS":{"//@npmcli/template-oss":"This file is partially managed by @npmcli/template-oss. Edits may be overwritten.","version":"4.24.3","engines":">=10","distPaths":["classes/","functions/","internal/","ranges/","index.js","preload.js","range.bnf"],"allowPaths":["/classes/","/functions/","/internal/","/ranges/","/index.js","/preload.js","/range.bnf","/benchmarks"],"publish":"true"},"_lastModified":"2025-11-20T08:10:24.966Z"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"name":"yauzl","version":"3.2.0","description":"yet another unzip library for node","engines":{"node":">=12"},"main":"index.js","scripts":{"test":"node test/test.js"},"repository":{"type":"git","url":"git+https://github.com/thejoshwolfe/yauzl.git"},"keywords":["unzip","zip","stream","archive","file"],"author":"Josh Wolfe <thejoshwolfe@gmail.com>","license":"MIT","bugs":{"url":"https://github.com/thejoshwolfe/yauzl/issues"},"homepage":"https://github.com/thejoshwolfe/yauzl","dependencies":{"buffer-crc32":"~0.2.3","pend":"~1.2.0"},"devDependencies":{"bl":"^6.0.11"},"files":["fd-slicer.js","index.js"],"_lastModified":"2025-11-
|
|
1
|
+
{"name":"yauzl","version":"3.2.0","description":"yet another unzip library for node","engines":{"node":">=12"},"main":"index.js","scripts":{"test":"node test/test.js"},"repository":{"type":"git","url":"git+https://github.com/thejoshwolfe/yauzl.git"},"keywords":["unzip","zip","stream","archive","file"],"author":"Josh Wolfe <thejoshwolfe@gmail.com>","license":"MIT","bugs":{"url":"https://github.com/thejoshwolfe/yauzl/issues"},"homepage":"https://github.com/thejoshwolfe/yauzl","dependencies":{"buffer-crc32":"~0.2.3","pend":"~1.2.0"},"devDependencies":{"bl":"^6.0.11"},"files":["fd-slicer.js","index.js"],"_lastModified":"2025-11-20T08:10:25.158Z"}
|
package/dist/server/dumper.d.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import { DumpRulesGroupType } from '@tego/server';
|
|
2
2
|
import { AppMigrator } from './app-migrator';
|
|
3
|
+
import { ProgressTracker } from './progress-tracker';
|
|
3
4
|
type DumpOptions = {
|
|
4
5
|
groups: Set<DumpRulesGroupType>;
|
|
5
6
|
fileName?: string;
|
|
6
7
|
appName?: string;
|
|
8
|
+
userId?: number;
|
|
7
9
|
};
|
|
8
10
|
type BackUpStatusOk = {
|
|
9
11
|
name: string;
|
|
@@ -15,6 +17,8 @@ type BackUpStatusDoing = {
|
|
|
15
17
|
name: string;
|
|
16
18
|
inProgress: true;
|
|
17
19
|
status: 'in_progress';
|
|
20
|
+
progress?: number;
|
|
21
|
+
currentStep?: string;
|
|
18
22
|
};
|
|
19
23
|
type BackUpStatusError = {
|
|
20
24
|
name: string;
|
|
@@ -24,6 +28,8 @@ type BackUpStatusError = {
|
|
|
24
28
|
export declare class Dumper extends AppMigrator {
|
|
25
29
|
static dumpTasks: Map<string, Promise<any>>;
|
|
26
30
|
direction: "dump";
|
|
31
|
+
private get progressManager();
|
|
32
|
+
private _progressManager?;
|
|
27
33
|
sqlContent: {
|
|
28
34
|
[key: string]: {
|
|
29
35
|
sql: string | string[];
|
|
@@ -31,7 +37,7 @@ export declare class Dumper extends AppMigrator {
|
|
|
31
37
|
};
|
|
32
38
|
};
|
|
33
39
|
static getTaskPromise(taskId: string): Promise<any> | undefined;
|
|
34
|
-
static getFileStatus(filePath: string): Promise<BackUpStatusOk | BackUpStatusDoing | BackUpStatusError>;
|
|
40
|
+
static getFileStatus(filePath: string, appName?: string): Promise<BackUpStatusOk | BackUpStatusDoing | BackUpStatusError>;
|
|
35
41
|
static generateFileName(): string;
|
|
36
42
|
writeSQLContent(key: string, data: {
|
|
37
43
|
sql: string | string[];
|
|
@@ -65,13 +71,13 @@ export declare class Dumper extends AppMigrator {
|
|
|
65
71
|
filePath: string;
|
|
66
72
|
dirname: string;
|
|
67
73
|
}>;
|
|
68
|
-
dumpDb(options: DumpOptions): Promise<void>;
|
|
74
|
+
dumpDb(options: DumpOptions, progressTracker?: ProgressTracker): Promise<void>;
|
|
69
75
|
hasSqlContent(): boolean;
|
|
70
76
|
dumpMeta(additionalMeta?: object): Promise<void>;
|
|
71
77
|
dumpCollection(options: {
|
|
72
78
|
name: string;
|
|
73
79
|
}): Promise<void>;
|
|
74
|
-
packDumpedDir(fileName: string, appName?: string): Promise<{
|
|
80
|
+
packDumpedDir(fileName: string, appName?: string, progressTracker?: ProgressTracker): Promise<{
|
|
75
81
|
filePath: string;
|
|
76
82
|
dirname: string;
|
|
77
83
|
}>;
|
package/dist/server/dumper.js
CHANGED
|
@@ -42,6 +42,7 @@ var import_dayjs = __toESM(require("dayjs"));
|
|
|
42
42
|
var import_lodash = __toESM(require("lodash"));
|
|
43
43
|
var import_app_migrator = require("./app-migrator");
|
|
44
44
|
var import_field_value_writer = require("./field-value-writer");
|
|
45
|
+
var import_progress_tracker = require("./progress-tracker");
|
|
45
46
|
var import_utils = require("./utils");
|
|
46
47
|
const finished = import_node_util.default.promisify(import_node_stream.default.finished);
|
|
47
48
|
const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
@@ -50,13 +51,24 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
50
51
|
this.direction = "dump";
|
|
51
52
|
this.sqlContent = {};
|
|
52
53
|
}
|
|
54
|
+
get progressManager() {
|
|
55
|
+
if (!this._progressManager) {
|
|
56
|
+
this._progressManager = new import_progress_tracker.ProgressManager(
|
|
57
|
+
(appName) => this.backUpStorageDir(appName),
|
|
58
|
+
this.workDir,
|
|
59
|
+
this.app
|
|
60
|
+
);
|
|
61
|
+
}
|
|
62
|
+
return this._progressManager;
|
|
63
|
+
}
|
|
53
64
|
static getTaskPromise(taskId) {
|
|
54
65
|
return this.dumpTasks.get(taskId);
|
|
55
66
|
}
|
|
56
|
-
static async getFileStatus(filePath) {
|
|
67
|
+
static async getFileStatus(filePath, appName) {
|
|
57
68
|
const lockFile = filePath + ".lock";
|
|
69
|
+
const progressFile = filePath + ".progress";
|
|
58
70
|
const fileName = import_node_path.default.basename(filePath);
|
|
59
|
-
return import_node_fs.default.promises.stat(lockFile).then((lockFileStat) => {
|
|
71
|
+
return import_node_fs.default.promises.stat(lockFile).then(async (lockFileStat) => {
|
|
60
72
|
if (lockFileStat.isFile()) {
|
|
61
73
|
if (lockFileStat.ctime.getTime() < Date.now() - 2 * 60 * 60 * 1e3) {
|
|
62
74
|
return {
|
|
@@ -65,10 +77,18 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
65
77
|
status: "error"
|
|
66
78
|
};
|
|
67
79
|
} else {
|
|
80
|
+
let progress = null;
|
|
81
|
+
try {
|
|
82
|
+
const progressContent = await import_promises.default.readFile(progressFile, "utf8");
|
|
83
|
+
progress = JSON.parse(progressContent);
|
|
84
|
+
} catch (error) {
|
|
85
|
+
}
|
|
68
86
|
return {
|
|
69
87
|
name: fileName,
|
|
70
88
|
inProgress: true,
|
|
71
|
-
status: "in_progress"
|
|
89
|
+
status: "in_progress",
|
|
90
|
+
progress: progress == null ? void 0 : progress.percent,
|
|
91
|
+
currentStep: progress == null ? void 0 : progress.currentStep
|
|
72
92
|
};
|
|
73
93
|
}
|
|
74
94
|
} else {
|
|
@@ -193,6 +213,7 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
193
213
|
async cleanLockFile(fileName, appName) {
|
|
194
214
|
const filePath = this.lockFilePath(fileName, appName);
|
|
195
215
|
await import_promises.default.unlink(filePath);
|
|
216
|
+
await this.progressManager.cleanProgressFile(fileName, appName);
|
|
196
217
|
}
|
|
197
218
|
async getLockFile(appName) {
|
|
198
219
|
const backupFileName = _Dumper.generateFileName();
|
|
@@ -203,7 +224,8 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
203
224
|
await this.dump({
|
|
204
225
|
groups: options.groups,
|
|
205
226
|
fileName: options.fileName,
|
|
206
|
-
appName: options.appName
|
|
227
|
+
appName: options.appName,
|
|
228
|
+
userId: options.userId
|
|
207
229
|
});
|
|
208
230
|
}
|
|
209
231
|
async dumpableCollectionsGroupByGroup() {
|
|
@@ -212,9 +234,14 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
212
234
|
async dump(options) {
|
|
213
235
|
const dumpingGroups = options.groups;
|
|
214
236
|
dumpingGroups.add("required");
|
|
237
|
+
const backupFileName = options.fileName || _Dumper.generateFileName();
|
|
238
|
+
const progressTracker = this.progressManager.createProgressTracker(backupFileName, options.appName, options.userId);
|
|
239
|
+
await progressTracker.update(0, "Preparing...");
|
|
215
240
|
const delayCollections = /* @__PURE__ */ new Set();
|
|
216
241
|
const dumpedCollections = await this.getCollectionsByDataTypes(dumpingGroups);
|
|
217
|
-
|
|
242
|
+
const totalCollections = dumpedCollections.length;
|
|
243
|
+
for (let i = 0; i < dumpedCollections.length; i++) {
|
|
244
|
+
const collectionName = dumpedCollections[i];
|
|
218
245
|
const collection = this.app.db.getCollection(collectionName);
|
|
219
246
|
if (import_lodash.default.get(collection.options, "dumpRules.delayRestore")) {
|
|
220
247
|
delayCollections.add(collectionName);
|
|
@@ -222,26 +249,44 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
222
249
|
await this.dumpCollection({
|
|
223
250
|
name: collectionName
|
|
224
251
|
});
|
|
252
|
+
const progress = progressTracker.getCollectionProgress(i, totalCollections);
|
|
253
|
+
await progressTracker.update(progress, `Dumping collection: ${collectionName} (${i + 1}/${totalCollections})`);
|
|
225
254
|
}
|
|
255
|
+
await progressTracker.update(70, "Dumping metadata...");
|
|
226
256
|
await this.dumpMeta({
|
|
227
257
|
dumpableCollectionsGroupByGroup: import_lodash.default.pick(await this.dumpableCollectionsGroupByGroup(), [...dumpingGroups]),
|
|
228
258
|
dumpedGroups: [...dumpingGroups],
|
|
229
259
|
delayCollections: [...delayCollections]
|
|
230
260
|
});
|
|
231
|
-
await
|
|
232
|
-
|
|
233
|
-
|
|
261
|
+
await progressTracker.update(75, "Dumping metadata...");
|
|
262
|
+
await progressTracker.update(80, "Dumping database content...");
|
|
263
|
+
await this.dumpDb(options, progressTracker);
|
|
264
|
+
await progressTracker.update(90, "Packing backup file...");
|
|
265
|
+
const filePath = await this.packDumpedDir(backupFileName, options.appName, progressTracker);
|
|
234
266
|
await this.clearWorkDir();
|
|
267
|
+
await progressTracker.update(100, "Completed");
|
|
268
|
+
await this.progressManager.cleanProgressFile(backupFileName, options.appName);
|
|
235
269
|
return filePath;
|
|
236
270
|
}
|
|
237
|
-
async dumpDb(options) {
|
|
271
|
+
async dumpDb(options, progressTracker) {
|
|
238
272
|
var _a;
|
|
239
|
-
|
|
273
|
+
const collections = Array.from(this.app.db.collections.values());
|
|
274
|
+
const totalCollections = collections.length;
|
|
275
|
+
let processedCollections = 0;
|
|
276
|
+
for (const collection of collections) {
|
|
240
277
|
const collectionOnDumpOption = (_a = this.app.db.collectionFactory.collectionTypes.get(
|
|
241
278
|
collection.constructor
|
|
242
279
|
)) == null ? void 0 : _a.onDump;
|
|
243
280
|
if (collectionOnDumpOption) {
|
|
244
281
|
await collectionOnDumpOption(this, collection);
|
|
282
|
+
processedCollections++;
|
|
283
|
+
if (progressTracker && totalCollections > 0) {
|
|
284
|
+
const progress = progressTracker.getDbContentProgress(processedCollections, totalCollections);
|
|
285
|
+
await progressTracker.update(
|
|
286
|
+
progress,
|
|
287
|
+
`Dumping database content... (${processedCollections}/${totalCollections})`
|
|
288
|
+
);
|
|
289
|
+
}
|
|
245
290
|
}
|
|
246
291
|
}
|
|
247
292
|
if (this.hasSqlContent()) {
|
|
@@ -257,6 +302,9 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
257
302
|
"utf8"
|
|
258
303
|
);
|
|
259
304
|
}
|
|
305
|
+
if (progressTracker) {
|
|
306
|
+
await progressTracker.update(88, "Dumping database content...");
|
|
307
|
+
}
|
|
260
308
|
}
|
|
261
309
|
hasSqlContent() {
|
|
262
310
|
return Object.keys(this.sqlContent).length > 0;
|
|
@@ -375,7 +423,7 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
375
423
|
}
|
|
376
424
|
await import_promises.default.writeFile(import_node_path.default.resolve(collectionDataDir, "meta"), JSON.stringify(meta), "utf8");
|
|
377
425
|
}
|
|
378
|
-
async packDumpedDir(fileName, appName) {
|
|
426
|
+
async packDumpedDir(fileName, appName, progressTracker) {
|
|
379
427
|
const dirname = this.backUpStorageDir(appName);
|
|
380
428
|
await import_promises.default.mkdir(dirname, { recursive: true });
|
|
381
429
|
const filePath = import_node_path.default.resolve(dirname, fileName);
|
|
@@ -383,13 +431,22 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
383
431
|
const archive = (0, import_archiver.default)("zip", {
|
|
384
432
|
zlib: { level: 9 }
|
|
385
433
|
});
|
|
434
|
+
let cleanupProgress = null;
|
|
435
|
+
if (progressTracker) {
|
|
436
|
+
cleanupProgress = this.progressManager.setupPackingProgress(archive, progressTracker);
|
|
437
|
+
}
|
|
438
|
+
const app = this.app;
|
|
386
439
|
const onClose = new Promise((resolve, reject) => {
|
|
387
|
-
output.on("close",
|
|
388
|
-
|
|
440
|
+
output.on("close", () => {
|
|
441
|
+
if (app == null ? void 0 : app.logger) {
|
|
442
|
+
app.logger.info(`Backup file created: ${(0, import_utils.humanFileSize)(archive.pointer(), true)}`);
|
|
443
|
+
}
|
|
444
|
+
if (cleanupProgress) {
|
|
445
|
+
cleanupProgress();
|
|
446
|
+
}
|
|
389
447
|
resolve(true);
|
|
390
448
|
});
|
|
391
|
-
output.on("end",
|
|
392
|
-
console.log("Data has been drained");
|
|
449
|
+
output.on("end", () => {
|
|
393
450
|
});
|
|
394
451
|
archive.on("warning", function(err) {
|
|
395
452
|
if (err.code === "ENOENT") {
|
|
@@ -398,6 +455,9 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
398
455
|
}
|
|
399
456
|
});
|
|
400
457
|
archive.on("error", function(err) {
|
|
458
|
+
if (cleanupProgress) {
|
|
459
|
+
cleanupProgress();
|
|
460
|
+
}
|
|
401
461
|
reject(err);
|
|
402
462
|
});
|
|
403
463
|
});
|
|
@@ -405,6 +465,9 @@ const _Dumper = class _Dumper extends import_app_migrator.AppMigrator {
|
|
|
405
465
|
archive.directory(this.workDir, false);
|
|
406
466
|
await archive.finalize();
|
|
407
467
|
await onClose;
|
|
468
|
+
if (progressTracker) {
|
|
469
|
+
await progressTracker.update(99, "Packing backup file...");
|
|
470
|
+
}
|
|
408
471
|
return {
|
|
409
472
|
filePath,
|
|
410
473
|
dirname
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { Application } from '@tego/server';
|
|
2
|
+
import archiver from 'archiver';
|
|
3
|
+
/**
|
|
4
|
+
* 进度信息类型
|
|
5
|
+
*/
|
|
6
|
+
export type ProgressInfo = {
|
|
7
|
+
percent: number;
|
|
8
|
+
currentStep: string;
|
|
9
|
+
};
|
|
10
|
+
/**
|
|
11
|
+
* 进度跟踪器接口
|
|
12
|
+
*/
|
|
13
|
+
export interface ProgressTracker {
|
|
14
|
+
/**
|
|
15
|
+
* 更新进度
|
|
16
|
+
*/
|
|
17
|
+
update(percent: number, currentStep: string): Promise<void>;
|
|
18
|
+
/**
|
|
19
|
+
* 计算集合备份阶段的进度 (5-70%)
|
|
20
|
+
*/
|
|
21
|
+
getCollectionProgress(currentIndex: number, totalCollections: number): number;
|
|
22
|
+
/**
|
|
23
|
+
* 计算数据库内容备份阶段的进度 (80-88%)
|
|
24
|
+
*/
|
|
25
|
+
getDbContentProgress(processedCollections: number, totalCollections: number): number;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* 进度管理器类,负责管理备份过程中的进度文件
|
|
29
|
+
*/
|
|
30
|
+
export declare class ProgressManager {
|
|
31
|
+
private backupStorageDir;
|
|
32
|
+
private workDir;
|
|
33
|
+
private app?;
|
|
34
|
+
private static wsUnavailableWarned;
|
|
35
|
+
constructor(backupStorageDir: (appName?: string) => string, workDir: string, app?: Application);
|
|
36
|
+
/**
|
|
37
|
+
* 获取进度文件路径(静态方法,用于不需要实例的场景)
|
|
38
|
+
*/
|
|
39
|
+
static getProgressFilePath(filePath: string): string;
|
|
40
|
+
/**
|
|
41
|
+
* 获取进度文件路径
|
|
42
|
+
*/
|
|
43
|
+
private progressFilePath;
|
|
44
|
+
/**
|
|
45
|
+
* 通过 WebSocket 推送进度更新
|
|
46
|
+
* @param fileName 备份文件名
|
|
47
|
+
* @param progress 进度信息
|
|
48
|
+
* @param userId 用户ID(可选,如果未提供则不会推送,适用于自动备份场景)
|
|
49
|
+
* @param appName 应用名称
|
|
50
|
+
*/
|
|
51
|
+
private pushProgressViaWebSocket;
|
|
52
|
+
/**
|
|
53
|
+
* 写入进度信息
|
|
54
|
+
*/
|
|
55
|
+
writeProgress(fileName: string, progress: ProgressInfo, appName?: string, userId?: number): Promise<void>;
|
|
56
|
+
/**
|
|
57
|
+
* 读取进度信息
|
|
58
|
+
*/
|
|
59
|
+
readProgress(fileName: string, appName?: string): Promise<ProgressInfo | null>;
|
|
60
|
+
/**
|
|
61
|
+
* 清理进度文件
|
|
62
|
+
*/
|
|
63
|
+
cleanProgressFile(fileName: string, appName: string): Promise<void>;
|
|
64
|
+
/**
|
|
65
|
+
* 创建进度跟踪器
|
|
66
|
+
*/
|
|
67
|
+
createProgressTracker(backupFileName: string, appName?: string, userId?: number): ProgressTracker;
|
|
68
|
+
/**
|
|
69
|
+
* 统计目录中的文件总数
|
|
70
|
+
*/
|
|
71
|
+
private countFiles;
|
|
72
|
+
/**
|
|
73
|
+
* 设置打包阶段的进度更新
|
|
74
|
+
*/
|
|
75
|
+
setupPackingProgress(archive: archiver.Archiver, progressTracker: ProgressTracker): () => void;
|
|
76
|
+
}
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var progress_tracker_exports = {};
|
|
29
|
+
__export(progress_tracker_exports, {
|
|
30
|
+
ProgressManager: () => ProgressManager
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(progress_tracker_exports);
|
|
33
|
+
var import_promises = __toESM(require("node:fs/promises"));
|
|
34
|
+
var import_node_path = __toESM(require("node:path"));
|
|
35
|
+
var import_server = require("@tego/server");
|
|
36
|
+
const _ProgressManager = class _ProgressManager {
|
|
37
|
+
constructor(backupStorageDir, workDir, app) {
|
|
38
|
+
this.backupStorageDir = backupStorageDir;
|
|
39
|
+
this.workDir = workDir;
|
|
40
|
+
this.app = app;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* 获取进度文件路径(静态方法,用于不需要实例的场景)
|
|
44
|
+
*/
|
|
45
|
+
static getProgressFilePath(filePath) {
|
|
46
|
+
return filePath + ".progress";
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* 获取进度文件路径
|
|
50
|
+
*/
|
|
51
|
+
progressFilePath(fileName, appName) {
|
|
52
|
+
const progressFile = fileName + ".progress";
|
|
53
|
+
const dirname = this.backupStorageDir(appName);
|
|
54
|
+
return import_node_path.default.resolve(dirname, progressFile);
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* 通过 WebSocket 推送进度更新
|
|
58
|
+
* @param fileName 备份文件名
|
|
59
|
+
* @param progress 进度信息
|
|
60
|
+
* @param userId 用户ID(可选,如果未提供则不会推送,适用于自动备份场景)
|
|
61
|
+
* @param appName 应用名称
|
|
62
|
+
*/
|
|
63
|
+
pushProgressViaWebSocket(fileName, progress, userId, appName) {
|
|
64
|
+
var _a, _b, _c, _d, _e;
|
|
65
|
+
if (!this.app) {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
if (!userId || userId <= 0) {
|
|
69
|
+
if ((_a = this.app) == null ? void 0 : _a.logger) {
|
|
70
|
+
this.app.logger.debug(`[ProgressManager] Skipping WebSocket push for ${fileName}: no userId`);
|
|
71
|
+
}
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
try {
|
|
75
|
+
const gateway = import_server.Gateway.getInstance();
|
|
76
|
+
const ws = gateway["wsServer"];
|
|
77
|
+
if (!ws) {
|
|
78
|
+
if (!_ProgressManager.wsUnavailableWarned) {
|
|
79
|
+
_ProgressManager.wsUnavailableWarned = true;
|
|
80
|
+
if ((_b = this.app) == null ? void 0 : _b.logger) {
|
|
81
|
+
this.app.logger.warn(
|
|
82
|
+
"[ProgressManager] WebSocket server not available (this is normal in worker threads, progress will be saved to file only)"
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
const finalAppName = appName || this.app.name;
|
|
89
|
+
const tagPrefix = `app:${finalAppName}`;
|
|
90
|
+
const tagValue = `${userId}`;
|
|
91
|
+
const expectedTag = `${tagPrefix}#${tagValue}`;
|
|
92
|
+
if ((_c = this.app) == null ? void 0 : _c.logger) {
|
|
93
|
+
const matchingConnections = Array.from(ws.webSocketClients.values()).filter((client) => {
|
|
94
|
+
var _a2, _b2;
|
|
95
|
+
return (_b2 = (_a2 = client.tags) == null ? void 0 : _a2.has) == null ? void 0 : _b2.call(_a2, expectedTag);
|
|
96
|
+
});
|
|
97
|
+
this.app.logger.debug(
|
|
98
|
+
`[ProgressManager] Sending backup progress via WebSocket: fileName=${fileName}, userId=${userId}, appName=${finalAppName}, tag=${expectedTag}, progress=${progress.percent}%, matchingConnections=${matchingConnections.length}`
|
|
99
|
+
);
|
|
100
|
+
if (matchingConnections.length === 0) {
|
|
101
|
+
const allTags = Array.from(ws.webSocketClients.values()).flatMap((client) => Array.from(client.tags || [])).filter((tag) => typeof tag === "string" && tag.startsWith("app:"));
|
|
102
|
+
this.app.logger.warn(
|
|
103
|
+
`[ProgressManager] No matching connections found for tag ${expectedTag}. Available tags: ${allTags.join(", ")}`
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
try {
|
|
108
|
+
ws.sendToConnectionsByTag(tagPrefix, tagValue, {
|
|
109
|
+
type: "backup:progress",
|
|
110
|
+
payload: {
|
|
111
|
+
fileName,
|
|
112
|
+
progress
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
} catch (sendError) {
|
|
116
|
+
if ((_d = this.app) == null ? void 0 : _d.logger) {
|
|
117
|
+
this.app.logger.error(
|
|
118
|
+
`[ProgressManager] Failed to send WebSocket message: tagPrefix=${tagPrefix}, tagValue=${tagValue}, error=`,
|
|
119
|
+
sendError
|
|
120
|
+
);
|
|
121
|
+
}
|
|
122
|
+
throw sendError;
|
|
123
|
+
}
|
|
124
|
+
} catch (error) {
|
|
125
|
+
if ((_e = this.app) == null ? void 0 : _e.logger) {
|
|
126
|
+
this.app.logger.warn(`[ProgressManager] WebSocket push error for ${fileName} (userId=${userId}):`, error);
|
|
127
|
+
} else {
|
|
128
|
+
console.error(`[ProgressManager] WebSocket push error for ${fileName} (userId=${userId}):`, error);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* 写入进度信息
|
|
134
|
+
*/
|
|
135
|
+
async writeProgress(fileName, progress, appName, userId) {
|
|
136
|
+
const filePath = this.progressFilePath(fileName, appName);
|
|
137
|
+
await import_promises.default.writeFile(filePath, JSON.stringify(progress), "utf8");
|
|
138
|
+
this.pushProgressViaWebSocket(fileName, progress, userId, appName);
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* 读取进度信息
|
|
142
|
+
*/
|
|
143
|
+
async readProgress(fileName, appName) {
|
|
144
|
+
const filePath = this.progressFilePath(fileName, appName);
|
|
145
|
+
try {
|
|
146
|
+
const content = await import_promises.default.readFile(filePath, "utf8");
|
|
147
|
+
return JSON.parse(content);
|
|
148
|
+
} catch (error) {
|
|
149
|
+
if (error.code === "ENOENT") {
|
|
150
|
+
return null;
|
|
151
|
+
}
|
|
152
|
+
throw error;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* 清理进度文件
|
|
157
|
+
*/
|
|
158
|
+
async cleanProgressFile(fileName, appName) {
|
|
159
|
+
const filePath = this.progressFilePath(fileName, appName);
|
|
160
|
+
try {
|
|
161
|
+
await import_promises.default.unlink(filePath);
|
|
162
|
+
} catch (error) {
|
|
163
|
+
if (error.code !== "ENOENT") {
|
|
164
|
+
throw error;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* 创建进度跟踪器
|
|
170
|
+
*/
|
|
171
|
+
createProgressTracker(backupFileName, appName, userId) {
|
|
172
|
+
return {
|
|
173
|
+
/**
|
|
174
|
+
* 更新进度
|
|
175
|
+
*/
|
|
176
|
+
update: async (percent, currentStep) => {
|
|
177
|
+
await this.writeProgress(backupFileName, { percent, currentStep }, appName, userId);
|
|
178
|
+
},
|
|
179
|
+
/**
|
|
180
|
+
* 计算集合备份阶段的进度 (5-70%)
|
|
181
|
+
*/
|
|
182
|
+
getCollectionProgress: (currentIndex, totalCollections) => {
|
|
183
|
+
return 5 + Math.floor((currentIndex + 1) / totalCollections * 65);
|
|
184
|
+
},
|
|
185
|
+
/**
|
|
186
|
+
* 计算数据库内容备份阶段的进度 (80-88%)
|
|
187
|
+
*/
|
|
188
|
+
getDbContentProgress: (processedCollections, totalCollections) => {
|
|
189
|
+
if (totalCollections === 0) return 80;
|
|
190
|
+
return 80 + Math.floor(processedCollections / totalCollections * 8);
|
|
191
|
+
}
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* 统计目录中的文件总数
|
|
196
|
+
*/
|
|
197
|
+
async countFiles(dir) {
|
|
198
|
+
let count = 0;
|
|
199
|
+
try {
|
|
200
|
+
const entries = await import_promises.default.readdir(dir, { withFileTypes: true });
|
|
201
|
+
for (const entry of entries) {
|
|
202
|
+
const fullPath = import_node_path.default.join(dir, entry.name);
|
|
203
|
+
if (entry.isDirectory()) {
|
|
204
|
+
count += await this.countFiles(fullPath);
|
|
205
|
+
} else {
|
|
206
|
+
count++;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
} catch (error) {
|
|
210
|
+
}
|
|
211
|
+
return count;
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* 设置打包阶段的进度更新
|
|
215
|
+
*/
|
|
216
|
+
setupPackingProgress(archive, progressTracker) {
|
|
217
|
+
let processedEntries = 0;
|
|
218
|
+
let totalEntries = 0;
|
|
219
|
+
let startTime = Date.now();
|
|
220
|
+
let progressInterval = null;
|
|
221
|
+
let currentProgress = 90;
|
|
222
|
+
let isFinished = false;
|
|
223
|
+
this.countFiles(this.workDir).then((count) => {
|
|
224
|
+
totalEntries = count || 1;
|
|
225
|
+
}).catch(() => {
|
|
226
|
+
totalEntries = 1;
|
|
227
|
+
});
|
|
228
|
+
archive.on("entry", () => {
|
|
229
|
+
processedEntries++;
|
|
230
|
+
});
|
|
231
|
+
const handleFinish = async () => {
|
|
232
|
+
var _a;
|
|
233
|
+
if (isFinished) {
|
|
234
|
+
return;
|
|
235
|
+
}
|
|
236
|
+
isFinished = true;
|
|
237
|
+
if (progressInterval) {
|
|
238
|
+
clearInterval(progressInterval);
|
|
239
|
+
progressInterval = null;
|
|
240
|
+
}
|
|
241
|
+
try {
|
|
242
|
+
await progressTracker.update(99, "Packing backup file...");
|
|
243
|
+
} catch (error) {
|
|
244
|
+
if ((_a = this.app) == null ? void 0 : _a.logger) {
|
|
245
|
+
this.app.logger.warn("[ProgressManager] Failed to update progress after packing:", error);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
};
|
|
249
|
+
archive.on("end", handleFinish);
|
|
250
|
+
archive.on("finish", handleFinish);
|
|
251
|
+
progressInterval = setInterval(async () => {
|
|
252
|
+
if (isFinished) {
|
|
253
|
+
if (progressInterval) {
|
|
254
|
+
clearInterval(progressInterval);
|
|
255
|
+
progressInterval = null;
|
|
256
|
+
}
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
const elapsed = Date.now() - startTime;
|
|
260
|
+
if (processedEntries > 0 && totalEntries > 0) {
|
|
261
|
+
const fileBasedProgress = 90 + Math.floor(processedEntries / totalEntries * 9);
|
|
262
|
+
const timeBasedProgress = Math.min(90 + Math.floor(elapsed / 6e4 * 9), 99);
|
|
263
|
+
currentProgress = Math.max(currentProgress, Math.min(fileBasedProgress, timeBasedProgress, 99));
|
|
264
|
+
} else {
|
|
265
|
+
currentProgress = Math.min(90 + Math.floor(elapsed / 6e4 * 9), 99);
|
|
266
|
+
}
|
|
267
|
+
if (currentProgress < 99) {
|
|
268
|
+
await progressTracker.update(currentProgress, "Packing backup file...");
|
|
269
|
+
}
|
|
270
|
+
}, 300);
|
|
271
|
+
return () => {
|
|
272
|
+
isFinished = true;
|
|
273
|
+
if (progressInterval) {
|
|
274
|
+
clearInterval(progressInterval);
|
|
275
|
+
progressInterval = null;
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
};
|
|
280
|
+
// 静态标志,用于避免重复警告 WebSocket 不可用(在 worker 线程中很常见)
|
|
281
|
+
_ProgressManager.wsUnavailableWarned = false;
|
|
282
|
+
let ProgressManager = _ProgressManager;
|
|
283
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
284
|
+
0 && (module.exports = {
|
|
285
|
+
ProgressManager
|
|
286
|
+
});
|