@superblocksteam/vite-plugin-file-sync 2.0.89 → 2.0.90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-service/agent/prompts/api-prompts.js +1 -1
- package/dist/ai-service/agent/prompts/build-base-system-prompt.d.ts +2 -2
- package/dist/ai-service/agent/prompts/build-base-system-prompt.d.ts.map +1 -1
- package/dist/ai-service/agent/prompts/build-base-system-prompt.js +132 -28
- package/dist/ai-service/agent/prompts/build-base-system-prompt.js.map +1 -1
- package/dist/ai-service/agent/tool-message-utils.js +1 -1
- package/dist/ai-service/agent/tool-message-utils.js.map +1 -1
- package/dist/ai-service/agent/tools/apis/test-api.d.ts +44 -0
- package/dist/ai-service/agent/tools/apis/test-api.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/apis/test-api.js +226 -92
- package/dist/ai-service/agent/tools/apis/test-api.js.map +1 -1
- package/dist/ai-service/agent/tools/build-capture-screenshot.d.ts +13 -11
- package/dist/ai-service/agent/tools/build-capture-screenshot.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-capture-screenshot.js +33 -3
- package/dist/ai-service/agent/tools/build-capture-screenshot.js.map +1 -1
- package/dist/ai-service/agent/tools/build-debug.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-debug.js +8 -3
- package/dist/ai-service/agent/tools/build-debug.js.map +1 -1
- package/dist/ai-service/agent/tools/build-edit-file.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-edit-file.js +12 -2
- package/dist/ai-service/agent/tools/build-edit-file.js.map +1 -1
- package/dist/ai-service/agent/tools/build-list-files.js +1 -1
- package/dist/ai-service/agent/tools/build-list-files.js.map +1 -1
- package/dist/ai-service/agent/tools/build-multi-edit-file.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-multi-edit-file.js +6 -1
- package/dist/ai-service/agent/tools/build-multi-edit-file.js.map +1 -1
- package/dist/ai-service/agent/tools/get-logs.d.ts +1 -1
- package/dist/ai-service/agent/tools.d.ts.map +1 -1
- package/dist/ai-service/agent/tools.js +5 -1
- package/dist/ai-service/agent/tools.js.map +1 -1
- package/dist/ai-service/agent/tools2/access-control.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/access-control.js +3 -0
- package/dist/ai-service/agent/tools2/access-control.js.map +1 -1
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.d.ts +2 -2
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.js +3 -3
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.js.map +1 -1
- package/dist/ai-service/agent/tools2/tools/grep-metadata.d.ts +1 -1
- package/dist/ai-service/agent/tools2/tools/grep.d.ts +1 -1
- package/dist/ai-service/agent/tools2/tools/index.d.ts +1 -0
- package/dist/ai-service/agent/tools2/tools/index.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/tools/index.js +1 -0
- package/dist/ai-service/agent/tools2/tools/index.js.map +1 -1
- package/dist/ai-service/agent/tools2/tools/remember-facts.d.ts +29 -0
- package/dist/ai-service/agent/tools2/tools/remember-facts.d.ts.map +1 -0
- package/dist/ai-service/agent/tools2/tools/remember-facts.js +104 -0
- package/dist/ai-service/agent/tools2/tools/remember-facts.js.map +1 -0
- package/dist/ai-service/agent/tools2/types.d.ts +1 -0
- package/dist/ai-service/agent/tools2/types.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/types.js +1 -0
- package/dist/ai-service/agent/tools2/types.js.map +1 -1
- package/dist/ai-service/agent/utils.d.ts.map +1 -1
- package/dist/ai-service/agent/utils.js +77 -0
- package/dist/ai-service/agent/utils.js.map +1 -1
- package/dist/ai-service/app-interface/filesystem/handlers/skill-handler.d.ts +22 -1
- package/dist/ai-service/app-interface/filesystem/handlers/skill-handler.d.ts.map +1 -1
- package/dist/ai-service/app-interface/filesystem/handlers/skill-handler.js +121 -11
- package/dist/ai-service/app-interface/filesystem/handlers/skill-handler.js.map +1 -1
- package/dist/ai-service/app-interface/filesystem/sdk-path-validator.d.ts +1 -1
- package/dist/ai-service/app-interface/filesystem/sdk-path-validator.d.ts.map +1 -1
- package/dist/ai-service/app-interface/filesystem/sdk-path-validator.js +4 -2
- package/dist/ai-service/app-interface/filesystem/sdk-path-validator.js.map +1 -1
- package/dist/ai-service/app-interface/filesystem/virtual-file-system.d.ts +2 -0
- package/dist/ai-service/app-interface/filesystem/virtual-file-system.d.ts.map +1 -1
- package/dist/ai-service/app-interface/filesystem/virtual-file-system.js +28 -3
- package/dist/ai-service/app-interface/filesystem/virtual-file-system.js.map +1 -1
- package/dist/ai-service/app-interface/shell.d.ts.map +1 -1
- package/dist/ai-service/app-interface/shell.js +9 -3
- package/dist/ai-service/app-interface/shell.js.map +1 -1
- package/dist/ai-service/app-skills/helpers.d.ts +32 -0
- package/dist/ai-service/app-skills/helpers.d.ts.map +1 -0
- package/dist/ai-service/app-skills/helpers.js +262 -0
- package/dist/ai-service/app-skills/helpers.js.map +1 -0
- package/dist/ai-service/app-skills/manager.d.ts +60 -0
- package/dist/ai-service/app-skills/manager.d.ts.map +1 -0
- package/dist/ai-service/app-skills/manager.js +203 -0
- package/dist/ai-service/app-skills/manager.js.map +1 -0
- package/dist/ai-service/app-skills/parser.d.ts +53 -0
- package/dist/ai-service/app-skills/parser.d.ts.map +1 -0
- package/dist/ai-service/app-skills/parser.js +82 -0
- package/dist/ai-service/app-skills/parser.js.map +1 -0
- package/dist/ai-service/attachment-upload.d.ts +24 -0
- package/dist/ai-service/attachment-upload.d.ts.map +1 -0
- package/dist/ai-service/attachment-upload.js +75 -0
- package/dist/ai-service/attachment-upload.js.map +1 -0
- package/dist/ai-service/chat/chat-session-store.d.ts +1 -1
- package/dist/ai-service/chat/chat-session-store.d.ts.map +1 -1
- package/dist/ai-service/chat/chat-session-store.js +82 -0
- package/dist/ai-service/chat/chat-session-store.js.map +1 -1
- package/dist/ai-service/facts/helpers.d.ts +8 -1
- package/dist/ai-service/facts/helpers.d.ts.map +1 -1
- package/dist/ai-service/facts/helpers.js +89 -7
- package/dist/ai-service/facts/helpers.js.map +1 -1
- package/dist/ai-service/facts/knowledge-manager.d.ts +39 -3
- package/dist/ai-service/facts/knowledge-manager.d.ts.map +1 -1
- package/dist/ai-service/facts/knowledge-manager.js +108 -27
- package/dist/ai-service/facts/knowledge-manager.js.map +1 -1
- package/dist/ai-service/features.d.ts +5 -0
- package/dist/ai-service/features.d.ts.map +1 -1
- package/dist/ai-service/features.js +5 -0
- package/dist/ai-service/features.js.map +1 -1
- package/dist/ai-service/index.d.ts +20 -0
- package/dist/ai-service/index.d.ts.map +1 -1
- package/dist/ai-service/index.js +38 -1
- package/dist/ai-service/index.js.map +1 -1
- package/dist/ai-service/integrations/store.d.ts.map +1 -1
- package/dist/ai-service/integrations/store.js +3 -1
- package/dist/ai-service/integrations/store.js.map +1 -1
- package/dist/ai-service/llm/context-v2/context.d.ts +2 -0
- package/dist/ai-service/llm/context-v2/context.d.ts.map +1 -1
- package/dist/ai-service/llm/context-v2/context.js +65 -0
- package/dist/ai-service/llm/context-v2/context.js.map +1 -1
- package/dist/ai-service/recording/block-traversal.d.ts +30 -0
- package/dist/ai-service/recording/block-traversal.d.ts.map +1 -0
- package/dist/ai-service/recording/block-traversal.js +118 -0
- package/dist/ai-service/recording/block-traversal.js.map +1 -0
- package/dist/ai-service/recording/index.d.ts +1 -0
- package/dist/ai-service/recording/index.d.ts.map +1 -1
- package/dist/ai-service/recording/index.js +2 -0
- package/dist/ai-service/recording/index.js.map +1 -1
- package/dist/ai-service/recording/snapshot-manager.d.ts +200 -0
- package/dist/ai-service/recording/snapshot-manager.d.ts.map +1 -0
- package/dist/ai-service/recording/snapshot-manager.js +708 -0
- package/dist/ai-service/recording/snapshot-manager.js.map +1 -0
- package/dist/ai-service/recording/storage/session-recording-storage.d.ts +5 -1
- package/dist/ai-service/recording/storage/session-recording-storage.d.ts.map +1 -1
- package/dist/ai-service/recording/storage/session-recording-storage.js +46 -21
- package/dist/ai-service/recording/storage/session-recording-storage.js.map +1 -1
- package/dist/ai-service/sdk-api-templates.d.ts +2 -0
- package/dist/ai-service/sdk-api-templates.d.ts.map +1 -0
- package/dist/ai-service/sdk-api-templates.js +2 -0
- package/dist/ai-service/sdk-api-templates.js.map +1 -0
- package/dist/ai-service/skills/index.d.ts +3 -1
- package/dist/ai-service/skills/index.d.ts.map +1 -1
- package/dist/ai-service/skills/index.js +5 -2
- package/dist/ai-service/skills/index.js.map +1 -1
- package/dist/ai-service/skills/system/superblocks-frontend/skill.generated.d.ts +1 -1
- package/dist/ai-service/skills/system/superblocks-frontend/skill.generated.js +4 -4
- package/dist/ai-service/state-machine/clark-fsm.d.ts +18 -1
- package/dist/ai-service/state-machine/clark-fsm.d.ts.map +1 -1
- package/dist/ai-service/state-machine/clark-fsm.js +1 -0
- package/dist/ai-service/state-machine/clark-fsm.js.map +1 -1
- package/dist/ai-service/state-machine/handlers/agent-planning.d.ts.map +1 -1
- package/dist/ai-service/state-machine/handlers/agent-planning.js +93 -11
- package/dist/ai-service/state-machine/handlers/agent-planning.js.map +1 -1
- package/dist/ai-service/state-machine/handlers/llm-generating.d.ts.map +1 -1
- package/dist/ai-service/state-machine/handlers/llm-generating.js +67 -7
- package/dist/ai-service/state-machine/handlers/llm-generating.js.map +1 -1
- package/dist/ai-service/state-machine/mocks.d.ts.map +1 -1
- package/dist/ai-service/state-machine/mocks.js +2 -0
- package/dist/ai-service/state-machine/mocks.js.map +1 -1
- package/dist/ai-service/types.d.ts +3 -0
- package/dist/ai-service/types.d.ts.map +1 -1
- package/dist/ai-service/types.js.map +1 -1
- package/dist/ai-service/util/mode-message.d.ts.map +1 -1
- package/dist/ai-service/util/mode-message.js +3 -0
- package/dist/ai-service/util/mode-message.js.map +1 -1
- package/dist/ai-service/util/rpc-timeout.d.ts +37 -0
- package/dist/ai-service/util/rpc-timeout.d.ts.map +1 -0
- package/dist/ai-service/util/rpc-timeout.js +58 -0
- package/dist/ai-service/util/rpc-timeout.js.map +1 -0
- package/dist/file-sync-vite-plugin.d.ts.map +1 -1
- package/dist/file-sync-vite-plugin.js +72 -21
- package/dist/file-sync-vite-plugin.js.map +1 -1
- package/dist/file-system-helpers.d.ts +9 -0
- package/dist/file-system-helpers.d.ts.map +1 -1
- package/dist/file-system-helpers.js +21 -0
- package/dist/file-system-helpers.js.map +1 -1
- package/dist/injected-index.d.ts.map +1 -1
- package/dist/injected-index.js +4 -3
- package/dist/injected-index.js.map +1 -1
- package/dist/plugin-options.d.ts +2 -0
- package/dist/plugin-options.d.ts.map +1 -1
- package/dist/plugin-options.js.map +1 -1
- package/dist/snapshot-routes.d.ts +17 -0
- package/dist/snapshot-routes.d.ts.map +1 -0
- package/dist/snapshot-routes.js +247 -0
- package/dist/snapshot-routes.js.map +1 -0
- package/dist/socket-manager.d.ts +1 -0
- package/dist/socket-manager.d.ts.map +1 -1
- package/dist/socket-manager.js +34 -0
- package/dist/socket-manager.js.map +1 -1
- package/package.json +9 -5
|
@@ -0,0 +1,708 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Manager for handling uploaded session recording snapshots.
|
|
3
|
+
*
|
|
4
|
+
* This manager handles receiving archives (zip or tar.gz, same format as
|
|
5
|
+
* sb-download), extracting them to a temporary directory, and tracking the
|
|
6
|
+
* current upload state.
|
|
7
|
+
*
|
|
8
|
+
* Directory structure:
|
|
9
|
+
* /tmp/superblocks-replay/{userId}/{uploadId}/
|
|
10
|
+
* ├── main/ <- main archive extraction
|
|
11
|
+
* │ ├── .superblocks/
|
|
12
|
+
* │ │ ├── superblocks.json
|
|
13
|
+
* │ │ └── recordings/
|
|
14
|
+
* │ │ ├── sessions.json
|
|
15
|
+
* │ │ └── {sessionId}/
|
|
16
|
+
* │ │ ├── recording.jsonl
|
|
17
|
+
* │ │ └── snapshot.zip (or snapshot.tar.gz for older snapshots)
|
|
18
|
+
* │ ├── apis/
|
|
19
|
+
* │ ├── pages/
|
|
20
|
+
* │ └── ... (rest of app structure)
|
|
21
|
+
* └── {sessionId}/ <- session snapshot extraction
|
|
22
|
+
* ├── apis/
|
|
23
|
+
* ├── pages/
|
|
24
|
+
* └── ...
|
|
25
|
+
*/
|
|
26
|
+
import { createHash } from "node:crypto";
|
|
27
|
+
import { createReadStream, createWriteStream } from "node:fs";
|
|
28
|
+
import fs from "node:fs/promises";
|
|
29
|
+
import os from "node:os";
|
|
30
|
+
import path from "node:path";
|
|
31
|
+
import { Readable } from "node:stream";
|
|
32
|
+
import { pipeline } from "node:stream/promises";
|
|
33
|
+
import { createGunzip } from "node:zlib";
|
|
34
|
+
import * as tar from "tar";
|
|
35
|
+
import yaml from "yaml";
|
|
36
|
+
import yauzl from "yauzl";
|
|
37
|
+
import { getLogger, getErrorMeta } from "../../util/logger.js";
|
|
38
|
+
import { sanitizeBlockIntegrations } from "./block-traversal.js";
|
|
39
|
+
/** Maximum age for a snapshot to be considered recent (1 day) */
|
|
40
|
+
const MAX_UPLOAD_AGE_MS = 24 * 60 * 60 * 1000;
|
|
41
|
+
/** Relative path to the snapshot restore tracking file within app root */
|
|
42
|
+
const TRACKING_FILE_REL_PATH = path.join(".superblocks", "snapshot-restore.json");
|
|
43
|
+
/**
|
|
44
|
+
* Get file size in bytes, returns null if file doesn't exist.
|
|
45
|
+
*/
|
|
46
|
+
async function getFileSize(filePath) {
|
|
47
|
+
try {
|
|
48
|
+
const stats = await fs.stat(filePath);
|
|
49
|
+
return stats.size;
|
|
50
|
+
}
|
|
51
|
+
catch {
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Read the first two bytes from a file to determine its archive format.
|
|
57
|
+
*/
|
|
58
|
+
async function readMagicBytes(filePath) {
|
|
59
|
+
const handle = await fs.open(filePath, "r");
|
|
60
|
+
try {
|
|
61
|
+
const buf = Buffer.alloc(2);
|
|
62
|
+
await handle.read(buf, 0, 2, 0);
|
|
63
|
+
return buf;
|
|
64
|
+
}
|
|
65
|
+
finally {
|
|
66
|
+
await handle.close();
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Extract a zip archive (from a Buffer or file path) into `destDir` using yauzl.
|
|
71
|
+
*/
|
|
72
|
+
function extractZip(input, destDir) {
|
|
73
|
+
return new Promise((resolve, reject) => {
|
|
74
|
+
const opts = { lazyEntries: true };
|
|
75
|
+
const cb = (err, zipfile) => {
|
|
76
|
+
if (err || !zipfile)
|
|
77
|
+
return reject(err ?? new Error("Failed to open zip"));
|
|
78
|
+
zipfile.on("error", reject);
|
|
79
|
+
zipfile.readEntry();
|
|
80
|
+
const resolvedDestDir = path.resolve(destDir) + path.sep;
|
|
81
|
+
zipfile.on("entry", (entry) => {
|
|
82
|
+
const dest = path.resolve(destDir, entry.fileName);
|
|
83
|
+
if (!dest.startsWith(resolvedDestDir) &&
|
|
84
|
+
dest !== path.resolve(destDir)) {
|
|
85
|
+
return reject(new Error(`Zip entry escapes target directory: ${entry.fileName}`));
|
|
86
|
+
}
|
|
87
|
+
if (/\/$/.test(entry.fileName)) {
|
|
88
|
+
fs.mkdir(dest, { recursive: true }).then(() => zipfile.readEntry(), reject);
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
fs.mkdir(path.dirname(dest), { recursive: true }).then(() => {
|
|
92
|
+
zipfile.openReadStream(entry, (streamErr, readStream) => {
|
|
93
|
+
if (streamErr || !readStream) {
|
|
94
|
+
return reject(streamErr ?? new Error("Failed to open read stream"));
|
|
95
|
+
}
|
|
96
|
+
const writeStream = createWriteStream(dest);
|
|
97
|
+
readStream.on("end", () => zipfile.readEntry());
|
|
98
|
+
readStream.on("error", reject);
|
|
99
|
+
writeStream.on("error", reject);
|
|
100
|
+
readStream.pipe(writeStream);
|
|
101
|
+
});
|
|
102
|
+
}, reject);
|
|
103
|
+
});
|
|
104
|
+
zipfile.on("end", resolve);
|
|
105
|
+
};
|
|
106
|
+
if (typeof input === "string") {
|
|
107
|
+
yauzl.open(input, opts, cb);
|
|
108
|
+
}
|
|
109
|
+
else {
|
|
110
|
+
yauzl.fromBuffer(input, opts, cb);
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Manager for uploaded session recording snapshots.
|
|
116
|
+
*
|
|
117
|
+
* Tracks the current upload state and handles extraction of uploaded archives
|
|
118
|
+
* to a deterministic temporary directory structure.
|
|
119
|
+
*/
|
|
120
|
+
export class SnapshotManager {
|
|
121
|
+
userId;
|
|
122
|
+
currentUpload = null;
|
|
123
|
+
baseDir;
|
|
124
|
+
/**
|
|
125
|
+
* Create a new SnapshotManager.
|
|
126
|
+
*
|
|
127
|
+
* @param userId - Current user ID, used to namespace the temp directory
|
|
128
|
+
*/
|
|
129
|
+
constructor(userId) {
|
|
130
|
+
this.userId = userId;
|
|
131
|
+
// Base directory: /tmp/superblocks-replay/{userId}
|
|
132
|
+
this.baseDir = path.join(os.tmpdir(), "superblocks-replay", userId);
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Get the main extraction subdirectory within an upload directory.
|
|
136
|
+
* The main tar is extracted here; session extractions are siblings.
|
|
137
|
+
*/
|
|
138
|
+
mainDir(uploadDir) {
|
|
139
|
+
return path.join(uploadDir, "main");
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Scan the base directory for recent uploads.
|
|
143
|
+
* Returns the most recent upload within MAX_UPLOAD_AGE_MS, or null if none found.
|
|
144
|
+
*/
|
|
145
|
+
async findRecentUpload() {
|
|
146
|
+
try {
|
|
147
|
+
// Check if base directory exists and list its contents
|
|
148
|
+
const entries = await fs.readdir(this.baseDir, { withFileTypes: true });
|
|
149
|
+
// Find directories (each is a hash from a previous upload)
|
|
150
|
+
const directories = entries.filter((e) => e.isDirectory());
|
|
151
|
+
// Check each directory's stats and find the most recent one within MAX_UPLOAD_AGE_MS
|
|
152
|
+
const now = Date.now();
|
|
153
|
+
let mostRecent = null;
|
|
154
|
+
for (const dir of directories) {
|
|
155
|
+
const dirPath = path.join(this.baseDir, dir.name);
|
|
156
|
+
const stats = await fs.stat(dirPath);
|
|
157
|
+
const age = now - stats.mtime.getTime();
|
|
158
|
+
if (age < MAX_UPLOAD_AGE_MS) {
|
|
159
|
+
if (!mostRecent || stats.mtime > mostRecent.mtime) {
|
|
160
|
+
mostRecent = { uploadId: dir.name, mtime: stats.mtime };
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
if (mostRecent) {
|
|
165
|
+
return {
|
|
166
|
+
uploadId: mostRecent.uploadId,
|
|
167
|
+
extractedDir: path.join(this.baseDir, mostRecent.uploadId),
|
|
168
|
+
uploadedAt: mostRecent.mtime,
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
return null;
|
|
172
|
+
}
|
|
173
|
+
catch {
|
|
174
|
+
// Directory doesn't exist or other error - no recent upload
|
|
175
|
+
return null;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Check if there's a currently uploaded snapshot.
|
|
180
|
+
* Checks disk for recent uploads if no in-memory state exists.
|
|
181
|
+
*/
|
|
182
|
+
async hasUpload() {
|
|
183
|
+
const upload = await this.getUpload();
|
|
184
|
+
return upload !== null;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Get the current upload state.
|
|
188
|
+
* If no in-memory state exists, checks disk for recent uploads (within 1 hour).
|
|
189
|
+
*
|
|
190
|
+
* @returns The current upload state, or null if no upload exists
|
|
191
|
+
*/
|
|
192
|
+
async getUpload() {
|
|
193
|
+
// Return in-memory state if available
|
|
194
|
+
if (this.currentUpload) {
|
|
195
|
+
return this.currentUpload;
|
|
196
|
+
}
|
|
197
|
+
// Check disk for recent uploads
|
|
198
|
+
const recentUpload = await this.findRecentUpload();
|
|
199
|
+
if (recentUpload) {
|
|
200
|
+
this.currentUpload = recentUpload;
|
|
201
|
+
getLogger().info(`[snapshot] Restored upload from disk`, {
|
|
202
|
+
uploadId: recentUpload.uploadId,
|
|
203
|
+
extractedDir: recentUpload.extractedDir,
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
return this.currentUpload;
|
|
207
|
+
}
|
|
208
|
+
/**
|
|
209
|
+
* Get detailed information about the uploaded snapshot.
|
|
210
|
+
* Reads superblocks.json for app ID and sessions.json for recording sessions.
|
|
211
|
+
*
|
|
212
|
+
* @returns Snapshot details, or null if no upload exists
|
|
213
|
+
*/
|
|
214
|
+
async getDetails() {
|
|
215
|
+
const upload = await this.getUpload();
|
|
216
|
+
if (!upload) {
|
|
217
|
+
return null;
|
|
218
|
+
}
|
|
219
|
+
const mainContentDir = this.mainDir(upload.extractedDir);
|
|
220
|
+
// Read superblocks.json for app ID
|
|
221
|
+
const superblocksJsonPath = path.join(mainContentDir, ".superblocks", "superblocks.json");
|
|
222
|
+
let appId = null;
|
|
223
|
+
try {
|
|
224
|
+
const content = await fs.readFile(superblocksJsonPath, "utf-8");
|
|
225
|
+
const config = JSON.parse(content);
|
|
226
|
+
appId = config.id || null;
|
|
227
|
+
}
|
|
228
|
+
catch {
|
|
229
|
+
// File missing or invalid
|
|
230
|
+
}
|
|
231
|
+
// Read sessions.json
|
|
232
|
+
const sessionsPath = path.join(mainContentDir, ".superblocks", "recordings", "sessions.json");
|
|
233
|
+
let sessions = [];
|
|
234
|
+
try {
|
|
235
|
+
const content = await fs.readFile(sessionsPath, "utf-8");
|
|
236
|
+
const sessionsList = JSON.parse(content);
|
|
237
|
+
// Get file sizes for each session
|
|
238
|
+
sessions = await Promise.all(sessionsList.map(async (session) => {
|
|
239
|
+
const sessionRecDir = path.join(mainContentDir, ".superblocks", "recordings", session.id);
|
|
240
|
+
const recordingSize = await getFileSize(path.join(sessionRecDir, "recording.jsonl"));
|
|
241
|
+
const snapshotSize = (await getFileSize(path.join(sessionRecDir, "snapshot.zip"))) ??
|
|
242
|
+
(await getFileSize(path.join(sessionRecDir, "snapshot.tar.gz")));
|
|
243
|
+
// Check if session was previously extracted
|
|
244
|
+
const { sessionDir: extractionDir } = await this.getSessionExtractionDir(upload.extractedDir, session.id);
|
|
245
|
+
let meta;
|
|
246
|
+
try {
|
|
247
|
+
await fs.access(extractionDir);
|
|
248
|
+
meta = await this.computeSnapshotMeta(extractionDir);
|
|
249
|
+
}
|
|
250
|
+
catch {
|
|
251
|
+
// Not yet extracted — leave meta undefined
|
|
252
|
+
}
|
|
253
|
+
return {
|
|
254
|
+
id: session.id,
|
|
255
|
+
startedAt: session.startedAt,
|
|
256
|
+
endedAt: session.endedAt,
|
|
257
|
+
startingCommitId: session.startingCommitId,
|
|
258
|
+
currentCommitId: session.currentCommitId,
|
|
259
|
+
recordingFileSize: recordingSize ?? 0,
|
|
260
|
+
snapshotFileSize: snapshotSize,
|
|
261
|
+
meta,
|
|
262
|
+
};
|
|
263
|
+
}));
|
|
264
|
+
}
|
|
265
|
+
catch {
|
|
266
|
+
// Directory missing or invalid
|
|
267
|
+
}
|
|
268
|
+
const meta = await this.computeSnapshotMeta(mainContentDir);
|
|
269
|
+
return { appId, sessions, meta };
|
|
270
|
+
}
|
|
271
|
+
/**
|
|
272
|
+
* Upload and extract an archive (zip or tar.gz).
|
|
273
|
+
*
|
|
274
|
+
* This will clear any previous upload before extracting the new one.
|
|
275
|
+
* The archive should be in the same format as sb-download.
|
|
276
|
+
*
|
|
277
|
+
* @param archiveBuffer - The archive buffer to extract (zip or tar.gz)
|
|
278
|
+
* @returns The new upload state
|
|
279
|
+
*/
|
|
280
|
+
async upload(archiveBuffer) {
|
|
281
|
+
const logger = getLogger();
|
|
282
|
+
// Clean up all pre-existing uploads for this user
|
|
283
|
+
try {
|
|
284
|
+
await fs.rm(this.baseDir, { recursive: true, force: true });
|
|
285
|
+
}
|
|
286
|
+
catch {
|
|
287
|
+
// ignore
|
|
288
|
+
}
|
|
289
|
+
this.currentUpload = null;
|
|
290
|
+
const uploadId = createHash("md5").update(archiveBuffer).digest("hex");
|
|
291
|
+
const uploadDir = path.join(this.baseDir, uploadId);
|
|
292
|
+
const mainExtractDir = this.mainDir(uploadDir);
|
|
293
|
+
logger.info(`[snapshot-upload] Starting extraction to ${mainExtractDir}`, {
|
|
294
|
+
uploadId,
|
|
295
|
+
bufferSize: archiveBuffer.length,
|
|
296
|
+
});
|
|
297
|
+
try {
|
|
298
|
+
await fs.mkdir(mainExtractDir, { recursive: true });
|
|
299
|
+
await this.extractArchive(archiveBuffer, mainExtractDir);
|
|
300
|
+
const uploadState = {
|
|
301
|
+
uploadId,
|
|
302
|
+
extractedDir: uploadDir,
|
|
303
|
+
uploadedAt: new Date(),
|
|
304
|
+
};
|
|
305
|
+
this.currentUpload = uploadState;
|
|
306
|
+
logger.info(`[snapshot-upload] Extraction complete`, {
|
|
307
|
+
uploadId,
|
|
308
|
+
extractedDir: uploadDir,
|
|
309
|
+
});
|
|
310
|
+
return uploadState;
|
|
311
|
+
}
|
|
312
|
+
catch (error) {
|
|
313
|
+
// Clean up on failure
|
|
314
|
+
logger.error(`[snapshot-upload] Extraction failed`, getErrorMeta(error));
|
|
315
|
+
try {
|
|
316
|
+
await fs.rm(uploadDir, { recursive: true, force: true });
|
|
317
|
+
}
|
|
318
|
+
catch (cleanupError) {
|
|
319
|
+
logger.warn(`[snapshot-upload] Failed to cleanup after extraction error`, getErrorMeta(cleanupError));
|
|
320
|
+
}
|
|
321
|
+
throw error;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
/**
|
|
325
|
+
* Clear the current upload and delete extracted files.
|
|
326
|
+
*/
|
|
327
|
+
async clear() {
|
|
328
|
+
if (!this.currentUpload) {
|
|
329
|
+
return;
|
|
330
|
+
}
|
|
331
|
+
const logger = getLogger();
|
|
332
|
+
const { uploadId, extractedDir } = this.currentUpload;
|
|
333
|
+
logger.info(`[snapshot-upload] Clearing upload`, {
|
|
334
|
+
uploadId,
|
|
335
|
+
extractedDir,
|
|
336
|
+
});
|
|
337
|
+
try {
|
|
338
|
+
await fs.rm(extractedDir, { recursive: true, force: true });
|
|
339
|
+
}
|
|
340
|
+
catch (error) {
|
|
341
|
+
logger.warn(`[snapshot-upload] Failed to delete extracted directory`, getErrorMeta(error));
|
|
342
|
+
}
|
|
343
|
+
this.currentUpload = null;
|
|
344
|
+
}
|
|
345
|
+
/**
|
|
346
|
+
* Prepare the source directory for a restore operation.
|
|
347
|
+
*
|
|
348
|
+
* - If no `sessionId` is provided, returns the main upload's extractedDir.
|
|
349
|
+
* - If a `sessionId` is provided, extracts the session's snapshot
|
|
350
|
+
* to a temporary directory and returns that.
|
|
351
|
+
* - Writes the current app's `.superblocks/superblocks.json` from `appRootDir`
|
|
352
|
+
* into the source dir so the restored tree keeps the app's identity/config.
|
|
353
|
+
* - When `sanitizeIntegrations` is true and `availableIntegrationIds` is
|
|
354
|
+
* provided, replaces unrecognised integration steps with javascript stubs.
|
|
355
|
+
*
|
|
356
|
+
* The returned `dispose` function cleans up any temporary directory created
|
|
357
|
+
* for session-based restores. It is a no-op when no temp dir was needed.
|
|
358
|
+
*
|
|
359
|
+
* @param uploadId - The upload ID to restore from (must match current upload)
|
|
360
|
+
* @param sessionId - Optional session ID whose snapshot to extract
|
|
361
|
+
* @param appRootDir - Current app root; its .superblocks/superblocks.json is copied into the source dir
|
|
362
|
+
* @param availableIntegrationIds - Integration IDs available in the current environment
|
|
363
|
+
* @param sanitizeIntegrations - When true, unknown integration steps are replaced with JS stubs
|
|
364
|
+
* @returns The source directory and a dispose callback
|
|
365
|
+
*/
|
|
366
|
+
async prepareRestoreSource(uploadId, sessionId, appRootDir, availableIntegrationIds, sanitizeIntegrations) {
|
|
367
|
+
const logger = getLogger();
|
|
368
|
+
const upload = await this.getUpload();
|
|
369
|
+
if (!upload || upload.uploadId !== uploadId) {
|
|
370
|
+
throw new SnapshotRestoreError("Upload not found or no longer available");
|
|
371
|
+
}
|
|
372
|
+
const writeSuperblocksJsonInto = async (targetDir) => {
|
|
373
|
+
const sjPath = path.join(appRootDir, ".superblocks", "superblocks.json");
|
|
374
|
+
let sjBuffer = null;
|
|
375
|
+
try {
|
|
376
|
+
sjBuffer = await fs.readFile(sjPath);
|
|
377
|
+
}
|
|
378
|
+
catch {
|
|
379
|
+
// May not exist
|
|
380
|
+
}
|
|
381
|
+
if (sjBuffer) {
|
|
382
|
+
const destPath = path.join(targetDir, ".superblocks", "superblocks.json");
|
|
383
|
+
await fs.mkdir(path.dirname(destPath), { recursive: true });
|
|
384
|
+
await fs.writeFile(destPath, new Uint8Array(sjBuffer));
|
|
385
|
+
}
|
|
386
|
+
};
|
|
387
|
+
let sourceDir;
|
|
388
|
+
// No session — use the main extraction subdirectory
|
|
389
|
+
if (!sessionId) {
|
|
390
|
+
sourceDir = this.mainDir(upload.extractedDir);
|
|
391
|
+
}
|
|
392
|
+
else {
|
|
393
|
+
// Session restore — extract to deterministic dir (or reuse)
|
|
394
|
+
const { sessionDir, snapshotPath } = await this.getSessionExtractionDir(upload.extractedDir, sessionId);
|
|
395
|
+
try {
|
|
396
|
+
await fs.access(snapshotPath);
|
|
397
|
+
}
|
|
398
|
+
catch {
|
|
399
|
+
throw new SnapshotRestoreError(`Session snapshot not found for session "${sessionId}"`);
|
|
400
|
+
}
|
|
401
|
+
logger.info(`[snapshot-restore] Extracting session snapshot to ${sessionDir}`, { uploadId, sessionId });
|
|
402
|
+
await this.ensureSessionExtracted(sessionDir, snapshotPath);
|
|
403
|
+
sourceDir = sessionDir;
|
|
404
|
+
}
|
|
405
|
+
await writeSuperblocksJsonInto(sourceDir);
|
|
406
|
+
// Sanitize unknown integration IDs when the caller opts in
|
|
407
|
+
if (availableIntegrationIds && sanitizeIntegrations) {
|
|
408
|
+
const report = await this.sanitizeSourceIntegrations(sourceDir, availableIntegrationIds);
|
|
409
|
+
logger.info("[snapshot-restore] Integration sanitization", report);
|
|
410
|
+
}
|
|
411
|
+
return {
|
|
412
|
+
sourceDir,
|
|
413
|
+
dispose: async () => {
|
|
414
|
+
/* no-op — cleaned up when clear() removes extractedDir */
|
|
415
|
+
},
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
/**
|
|
419
|
+
* List immediate subdirectory names under `apis/` and `pages/` within a directory.
|
|
420
|
+
*/
|
|
421
|
+
async computeSnapshotMeta(dir) {
|
|
422
|
+
const listSubdirs = async (subdir) => {
|
|
423
|
+
const fullPath = path.join(dir, subdir);
|
|
424
|
+
try {
|
|
425
|
+
const entries = await fs.readdir(fullPath, { withFileTypes: true });
|
|
426
|
+
return entries
|
|
427
|
+
.filter((e) => e.isDirectory())
|
|
428
|
+
.map((e) => e.name)
|
|
429
|
+
.sort();
|
|
430
|
+
}
|
|
431
|
+
catch {
|
|
432
|
+
return [];
|
|
433
|
+
}
|
|
434
|
+
};
|
|
435
|
+
const [apiNames, pageNames] = await Promise.all([
|
|
436
|
+
listSubdirs("apis"),
|
|
437
|
+
listSubdirs("pages"),
|
|
438
|
+
]);
|
|
439
|
+
return { apiNames, pageNames };
|
|
440
|
+
}
|
|
441
|
+
/**
|
|
442
|
+
* Detect archive format from magic bytes and extract into `destDir`.
|
|
443
|
+
*
|
|
444
|
+
* Accepts either a Buffer (for uploaded archives) or a file path string
|
|
445
|
+
* (for session snapshots on disk). Supports both zip and tar.gz formats.
|
|
446
|
+
*/
|
|
447
|
+
async extractArchive(input, destDir) {
|
|
448
|
+
const magic = typeof input === "string"
|
|
449
|
+
? await readMagicBytes(input)
|
|
450
|
+
: input.subarray(0, 2);
|
|
451
|
+
const isZip = magic[0] === 0x50 && magic[1] === 0x4b;
|
|
452
|
+
const isGzip = magic[0] === 0x1f && magic[1] === 0x8b;
|
|
453
|
+
if (isZip) {
|
|
454
|
+
await extractZip(input, destDir);
|
|
455
|
+
}
|
|
456
|
+
else if (isGzip) {
|
|
457
|
+
const stream = typeof input === "string"
|
|
458
|
+
? createReadStream(input)
|
|
459
|
+
: Readable.from(input);
|
|
460
|
+
await pipeline(stream, createGunzip(), tar.x({ cwd: destDir }));
|
|
461
|
+
}
|
|
462
|
+
else {
|
|
463
|
+
throw new Error(`Unrecognized archive format (magic: 0x${magic[0]?.toString(16)}${magic[1]?.toString(16)})`);
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
/**
|
|
467
|
+
* Get the deterministic extraction directory and snapshot path for a session.
|
|
468
|
+
*
|
|
469
|
+
* The snapshot lives inside the main extraction; the session extraction dir
|
|
470
|
+
* is a sibling of `main/` under the upload root. Prefers `snapshot.zip`,
|
|
471
|
+
* falling back to `snapshot.tar.gz` for older snapshots.
|
|
472
|
+
*/
|
|
473
|
+
async getSessionExtractionDir(uploadDir, sessionId) {
|
|
474
|
+
const recDir = path.join(this.mainDir(uploadDir), ".superblocks", "recordings", sessionId);
|
|
475
|
+
const zipPath = path.join(recDir, "snapshot.zip");
|
|
476
|
+
try {
|
|
477
|
+
await fs.access(zipPath);
|
|
478
|
+
return {
|
|
479
|
+
sessionDir: path.join(uploadDir, sessionId),
|
|
480
|
+
snapshotPath: zipPath,
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
catch {
|
|
484
|
+
// fall through to tar.gz
|
|
485
|
+
}
|
|
486
|
+
return {
|
|
487
|
+
sessionDir: path.join(uploadDir, sessionId),
|
|
488
|
+
snapshotPath: path.join(recDir, "snapshot.tar.gz"),
|
|
489
|
+
};
|
|
490
|
+
}
|
|
491
|
+
/**
|
|
492
|
+
* Extract a session snapshot to its deterministic directory if not already extracted.
|
|
493
|
+
*/
|
|
494
|
+
async ensureSessionExtracted(sessionDir, snapshotPath) {
|
|
495
|
+
try {
|
|
496
|
+
await fs.access(sessionDir);
|
|
497
|
+
return; // Already extracted
|
|
498
|
+
}
|
|
499
|
+
catch {
|
|
500
|
+
// Need to extract
|
|
501
|
+
}
|
|
502
|
+
await fs.mkdir(sessionDir, { recursive: true });
|
|
503
|
+
await this.extractArchive(snapshotPath, sessionDir);
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* Analyze a snapshot (or session snapshot) and return metadata about its contents.
|
|
507
|
+
*
|
|
508
|
+
* - If no `sessionId` is provided, scans the main upload's extracted directory.
|
|
509
|
+
* - If a `sessionId` is provided, extracts the session's snapshot to a
|
|
510
|
+
* deterministic directory (reused by prepareRestoreSource) and scans it.
|
|
511
|
+
*
|
|
512
|
+
* @param uploadId - The upload ID to analyze (must match current upload)
|
|
513
|
+
* @param sessionId - Optional session ID whose snapshot to analyze
|
|
514
|
+
* @returns Metadata about the snapshot contents
|
|
515
|
+
*/
|
|
516
|
+
async analyze(uploadId, sessionId) {
|
|
517
|
+
const logger = getLogger();
|
|
518
|
+
const upload = await this.getUpload();
|
|
519
|
+
if (!upload || upload.uploadId !== uploadId) {
|
|
520
|
+
throw new SnapshotRestoreError("Upload not found or no longer available");
|
|
521
|
+
}
|
|
522
|
+
// No session — scan the main extraction subdirectory directly
|
|
523
|
+
if (!sessionId) {
|
|
524
|
+
logger.info("[snapshot-analyze] Analyzing main upload", { uploadId });
|
|
525
|
+
return this.computeSnapshotMeta(this.mainDir(upload.extractedDir));
|
|
526
|
+
}
|
|
527
|
+
// Session — extract to deterministic dir (or reuse if already extracted)
|
|
528
|
+
const { sessionDir, snapshotPath } = await this.getSessionExtractionDir(upload.extractedDir, sessionId);
|
|
529
|
+
try {
|
|
530
|
+
await fs.access(snapshotPath);
|
|
531
|
+
}
|
|
532
|
+
catch {
|
|
533
|
+
throw new SnapshotRestoreError(`Session snapshot not found for session "${sessionId}"`);
|
|
534
|
+
}
|
|
535
|
+
logger.info("[snapshot-analyze] Analyzing session snapshot", {
|
|
536
|
+
uploadId,
|
|
537
|
+
sessionId,
|
|
538
|
+
sessionDir,
|
|
539
|
+
});
|
|
540
|
+
await this.ensureSessionExtracted(sessionDir, snapshotPath);
|
|
541
|
+
const meta = await this.computeSnapshotMeta(sessionDir);
|
|
542
|
+
logger.info("[snapshot-analyze] Analysis complete", {
|
|
543
|
+
uploadId,
|
|
544
|
+
sessionId,
|
|
545
|
+
meta,
|
|
546
|
+
});
|
|
547
|
+
return meta;
|
|
548
|
+
}
|
|
549
|
+
/**
|
|
550
|
+
* Scan the `apis/` directory within a snapshot source, compare the
|
|
551
|
+
* integration IDs found in each API's blocks against a set of IDs
|
|
552
|
+
* available in the current environment, and replace any unrecognised
|
|
553
|
+
* integration steps with a javascript stub.
|
|
554
|
+
*
|
|
555
|
+
* @param sourceDir - Root of the extracted snapshot (contains `apis/`)
|
|
556
|
+
* @param availableIntegrationIds - IDs present in the current environment
|
|
557
|
+
* @returns A report listing the integration IDs that were sanitized
|
|
558
|
+
*/
|
|
559
|
+
async sanitizeSourceIntegrations(sourceDir, availableIntegrationIds) {
|
|
560
|
+
const logger = getLogger();
|
|
561
|
+
logger.info("[snapshot-restore] sanitizeSourceIntegrations", {
|
|
562
|
+
sourceDir,
|
|
563
|
+
});
|
|
564
|
+
const apisDir = path.join(sourceDir, "apis");
|
|
565
|
+
let apiDirs = [];
|
|
566
|
+
try {
|
|
567
|
+
const entries = await fs.readdir(apisDir, { withFileTypes: true });
|
|
568
|
+
apiDirs = entries.filter((e) => e.isDirectory()).map((e) => e.name);
|
|
569
|
+
}
|
|
570
|
+
catch {
|
|
571
|
+
// No apis/ directory — nothing to sanitize
|
|
572
|
+
}
|
|
573
|
+
const allSanitizedIds = new Set();
|
|
574
|
+
for (const apiName of apiDirs) {
|
|
575
|
+
const yamlPath = path.join(apisDir, apiName, "api.yaml");
|
|
576
|
+
try {
|
|
577
|
+
const content = await fs.readFile(yamlPath, "utf-8");
|
|
578
|
+
const parsed = yaml.parse(content);
|
|
579
|
+
const sanitizedIds = sanitizeBlockIntegrations(parsed.blocks ?? [], availableIntegrationIds);
|
|
580
|
+
if (sanitizedIds.size > 0) {
|
|
581
|
+
logger.info(`[snapshot-restore] Sanitized ${sanitizedIds.size} step(s) in "${apiName}"`);
|
|
582
|
+
await fs.writeFile(yamlPath, yaml.stringify(parsed));
|
|
583
|
+
for (const id of sanitizedIds) {
|
|
584
|
+
allSanitizedIds.add(id);
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
catch (error) {
|
|
589
|
+
logger.warn(`[snapshot-restore] Failed to parse api.yaml for "${apiName}"`, getErrorMeta(error));
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
return {
|
|
593
|
+
sanitizedIntegrationIds: [...allSanitizedIds],
|
|
594
|
+
};
|
|
595
|
+
}
|
|
596
|
+
/**
|
|
597
|
+
* Restore snapshot files into `appRootDir`.
|
|
598
|
+
*
|
|
599
|
+
* Clears everything in `appRootDir` except node_modules, .git, build, then
|
|
600
|
+
* copies the prepared source directory (which already has the correct
|
|
601
|
+
* .superblocks/superblocks.json from prepareRestoreSource) into app root.
|
|
602
|
+
*/
|
|
603
|
+
async copySnapshotToAppRoot(sourceDir, appRootDir) {
|
|
604
|
+
const logger = getLogger();
|
|
605
|
+
const SKIP_DIRS = new Set(["node_modules", ".git", "build"]);
|
|
606
|
+
logger.info(`[snapshot-restore] Clearing app root`, { appRootDir });
|
|
607
|
+
const entries = await fs.readdir(appRootDir, {
|
|
608
|
+
withFileTypes: true,
|
|
609
|
+
});
|
|
610
|
+
for (const entry of entries) {
|
|
611
|
+
if (SKIP_DIRS.has(entry.name))
|
|
612
|
+
continue;
|
|
613
|
+
await fs.rm(path.join(appRootDir, entry.name), {
|
|
614
|
+
recursive: true,
|
|
615
|
+
force: true,
|
|
616
|
+
});
|
|
617
|
+
}
|
|
618
|
+
logger.info(`[snapshot-restore] Copying snapshot to app root`, {
|
|
619
|
+
sourceDir,
|
|
620
|
+
appRootDir,
|
|
621
|
+
});
|
|
622
|
+
await fs.cp(sourceDir, appRootDir, { recursive: true });
|
|
623
|
+
logger.info(`[snapshot-restore] Copy complete`);
|
|
624
|
+
}
|
|
625
|
+
/**
|
|
626
|
+
* Write a tracking file that records a pending snapshot restore.
|
|
627
|
+
* On the next dev-server startup, `executePendingRestore` will read it
|
|
628
|
+
* and perform the actual restore.
|
|
629
|
+
*/
|
|
630
|
+
async createPendingRestore(appRootDir, uploadId, sourceDir) {
|
|
631
|
+
const logger = getLogger();
|
|
632
|
+
const trackingFilePath = path.join(appRootDir, TRACKING_FILE_REL_PATH);
|
|
633
|
+
const tracking = {
|
|
634
|
+
uploadId,
|
|
635
|
+
sourceDir,
|
|
636
|
+
restoredAt: null,
|
|
637
|
+
};
|
|
638
|
+
await fs.mkdir(path.dirname(trackingFilePath), { recursive: true });
|
|
639
|
+
await fs.writeFile(trackingFilePath, JSON.stringify(tracking, null, 2));
|
|
640
|
+
logger.info("[snapshot-restore] Created pending restore tracking file", {
|
|
641
|
+
uploadId,
|
|
642
|
+
sourceDir,
|
|
643
|
+
});
|
|
644
|
+
}
|
|
645
|
+
/**
|
|
646
|
+
* Check for a pending snapshot restore and execute it if found.
|
|
647
|
+
*
|
|
648
|
+
* Note: `copySnapshotToAppRoot` clears the app root, so the tracking data
|
|
649
|
+
* is read into memory first and re-written with a `restoredAt` timestamp
|
|
650
|
+
* after the copy.
|
|
651
|
+
*
|
|
652
|
+
* @returns `true` if a restore was performed, `false` otherwise
|
|
653
|
+
*/
|
|
654
|
+
async executePendingRestore(appRootDir) {
|
|
655
|
+
const logger = getLogger();
|
|
656
|
+
const trackingFilePath = path.join(appRootDir, TRACKING_FILE_REL_PATH);
|
|
657
|
+
let tracking;
|
|
658
|
+
try {
|
|
659
|
+
const raw = await fs.readFile(trackingFilePath, "utf-8");
|
|
660
|
+
tracking = JSON.parse(raw);
|
|
661
|
+
}
|
|
662
|
+
catch {
|
|
663
|
+
return false;
|
|
664
|
+
}
|
|
665
|
+
if (tracking.restoredAt) {
|
|
666
|
+
return false;
|
|
667
|
+
}
|
|
668
|
+
try {
|
|
669
|
+
await fs.access(tracking.sourceDir);
|
|
670
|
+
}
|
|
671
|
+
catch {
|
|
672
|
+
logger.warn("[snapshot-restore] Source directory from tracking file no longer exists", tracking.sourceDir);
|
|
673
|
+
return false;
|
|
674
|
+
}
|
|
675
|
+
logger.info("[snapshot-restore] Executing pending restore", {
|
|
676
|
+
uploadId: tracking.uploadId,
|
|
677
|
+
sourceDir: tracking.sourceDir,
|
|
678
|
+
});
|
|
679
|
+
await this.copySnapshotToAppRoot(tracking.sourceDir, appRootDir);
|
|
680
|
+
// Re-write tracking file with timestamp (the copy above wiped it)
|
|
681
|
+
const restoredAt = new Date().toLocaleString("en-US", {
|
|
682
|
+
dateStyle: "long",
|
|
683
|
+
timeStyle: "long",
|
|
684
|
+
});
|
|
685
|
+
const updatedTracking = {
|
|
686
|
+
...tracking,
|
|
687
|
+
restoredAt,
|
|
688
|
+
};
|
|
689
|
+
await fs.mkdir(path.dirname(trackingFilePath), { recursive: true });
|
|
690
|
+
await fs.writeFile(trackingFilePath, JSON.stringify(updatedTracking, null, 2));
|
|
691
|
+
logger.info("[snapshot-restore] Restore complete", {
|
|
692
|
+
uploadId: tracking.uploadId,
|
|
693
|
+
restoredAt,
|
|
694
|
+
});
|
|
695
|
+
return true;
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
/**
|
|
699
|
+
* Error thrown when a restore operation fails due to a user-facing issue
|
|
700
|
+
* (e.g. upload not found, session snapshot missing).
|
|
701
|
+
*/
|
|
702
|
+
export class SnapshotRestoreError extends Error {
|
|
703
|
+
constructor(message) {
|
|
704
|
+
super(message);
|
|
705
|
+
this.name = "SnapshotRestoreError";
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
//# sourceMappingURL=snapshot-manager.js.map
|