@reshotdev/screenshot 0.0.1-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +190 -0
- package/README.md +388 -0
- package/package.json +64 -0
- package/src/commands/auth.js +259 -0
- package/src/commands/chrome.js +140 -0
- package/src/commands/ci-run.js +123 -0
- package/src/commands/ci-setup.js +288 -0
- package/src/commands/drifts.js +423 -0
- package/src/commands/import-tests.js +309 -0
- package/src/commands/ingest.js +458 -0
- package/src/commands/init.js +633 -0
- package/src/commands/publish.js +1721 -0
- package/src/commands/pull.js +303 -0
- package/src/commands/record.js +94 -0
- package/src/commands/run.js +476 -0
- package/src/commands/setup-wizard.js +740 -0
- package/src/commands/setup.js +137 -0
- package/src/commands/status.js +275 -0
- package/src/commands/sync.js +621 -0
- package/src/commands/ui.js +248 -0
- package/src/commands/validate-docs.js +529 -0
- package/src/index.js +462 -0
- package/src/lib/api-client.js +815 -0
- package/src/lib/capture-engine.js +1623 -0
- package/src/lib/capture-script-runner.js +3120 -0
- package/src/lib/ci-detect.js +137 -0
- package/src/lib/config.js +1240 -0
- package/src/lib/diff-engine.js +642 -0
- package/src/lib/hash.js +74 -0
- package/src/lib/image-crop.js +396 -0
- package/src/lib/matrix.js +89 -0
- package/src/lib/output-path-template.js +318 -0
- package/src/lib/playwright-runner.js +252 -0
- package/src/lib/polished-clip.js +553 -0
- package/src/lib/privacy-engine.js +408 -0
- package/src/lib/progress-tracker.js +142 -0
- package/src/lib/record-browser-injection.js +654 -0
- package/src/lib/record-cdp.js +612 -0
- package/src/lib/record-clip.js +343 -0
- package/src/lib/record-config.js +623 -0
- package/src/lib/record-screenshot.js +360 -0
- package/src/lib/record-terminal.js +123 -0
- package/src/lib/recorder-service.js +781 -0
- package/src/lib/secrets.js +51 -0
- package/src/lib/selector-strategies.js +859 -0
- package/src/lib/standalone-mode.js +400 -0
- package/src/lib/storage-providers.js +569 -0
- package/src/lib/style-engine.js +684 -0
- package/src/lib/ui-api.js +4677 -0
- package/src/lib/ui-assets.js +373 -0
- package/src/lib/ui-executor.js +587 -0
- package/src/lib/variant-injector.js +591 -0
- package/src/lib/viewport-presets.js +454 -0
- package/src/lib/worker-pool.js +118 -0
- package/web/cropper/index.html +436 -0
- package/web/manager/dist/assets/index--ZgioErz.js +507 -0
- package/web/manager/dist/assets/index-n468W0Wr.css +1 -0
- package/web/manager/dist/index.html +27 -0
- package/web/subtitle-editor/index.html +295 -0
|
@@ -0,0 +1,621 @@
|
|
|
1
|
+
// sync.js - Upload Playwright traces and documentation for processing
|
|
2
|
+
// This is the renamed/enhanced version of ingest.js
|
|
3
|
+
// Implements the "Smart Handoff" protocol from the DocSync specification
|
|
4
|
+
|
|
5
|
+
const chalk = require("chalk");
|
|
6
|
+
const crypto = require("crypto");
|
|
7
|
+
const fs = require("fs-extra");
|
|
8
|
+
const path = require("path");
|
|
9
|
+
const { execSync } = require("child_process");
|
|
10
|
+
const config = require("../lib/config");
|
|
11
|
+
const apiClient = require("../lib/api-client");
|
|
12
|
+
const { hashFile } = require("../lib/hash");
|
|
13
|
+
const pkg = require("../../package.json");
|
|
14
|
+
|
|
15
|
+
// File extension allowlists
|
|
16
|
+
const TRACE_EXTENSIONS = [".zip"];
|
|
17
|
+
const DOC_EXTENSIONS = [".md", ".mdx"];
|
|
18
|
+
const MAX_DOC_SIZE = 5 * 1024 * 1024; // 5MB per doc file
|
|
19
|
+
const MAX_TRACE_SIZE = 100 * 1024 * 1024; // 100MB per trace
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Parse frontmatter from a markdown file
|
|
23
|
+
*/
|
|
24
|
+
function parseFrontmatter(content) {
|
|
25
|
+
const frontmatterRegex = /^---\n([\s\S]*?)\n---/;
|
|
26
|
+
const match = content.match(frontmatterRegex);
|
|
27
|
+
|
|
28
|
+
if (!match) return { frontmatter: {}, content };
|
|
29
|
+
|
|
30
|
+
const frontmatter = {};
|
|
31
|
+
const lines = match[1].split("\n");
|
|
32
|
+
|
|
33
|
+
for (const line of lines) {
|
|
34
|
+
const colonIndex = line.indexOf(":");
|
|
35
|
+
if (colonIndex > 0) {
|
|
36
|
+
const key = line.slice(0, colonIndex).trim();
|
|
37
|
+
let value = line.slice(colonIndex + 1).trim();
|
|
38
|
+
if (
|
|
39
|
+
(value.startsWith('"') && value.endsWith('"')) ||
|
|
40
|
+
(value.startsWith("'") && value.endsWith("'"))
|
|
41
|
+
) {
|
|
42
|
+
value = value.slice(1, -1);
|
|
43
|
+
}
|
|
44
|
+
frontmatter[key] = value;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return {
|
|
49
|
+
frontmatter,
|
|
50
|
+
content: content.slice(match[0].length).trim(),
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Discover documentation files based on config
|
|
56
|
+
*/
|
|
57
|
+
async function discoverDocumentation(docConfig, projectRoot) {
|
|
58
|
+
const files = [];
|
|
59
|
+
const root = path.resolve(projectRoot, docConfig.root || "./docs");
|
|
60
|
+
|
|
61
|
+
if (!fs.existsSync(root)) {
|
|
62
|
+
return files;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const include = docConfig.include || ["**/*.md", "**/*.mdx"];
|
|
66
|
+
const exclude = docConfig.exclude || ["**/_*.mdx", "node_modules"];
|
|
67
|
+
const mappings = docConfig.mappings || {};
|
|
68
|
+
|
|
69
|
+
function walkDir(dir, relativePath = "") {
|
|
70
|
+
const items = fs.readdirSync(dir);
|
|
71
|
+
|
|
72
|
+
for (const item of items) {
|
|
73
|
+
const fullPath = path.join(dir, item);
|
|
74
|
+
const relPath = path.join(relativePath, item);
|
|
75
|
+
const stat = fs.statSync(fullPath);
|
|
76
|
+
|
|
77
|
+
const shouldExclude = exclude.some((pattern) => {
|
|
78
|
+
if (pattern.includes("*")) {
|
|
79
|
+
const regex = new RegExp(
|
|
80
|
+
pattern.replace(/\*\*/g, ".*").replace(/\*/g, "[^/]*"),
|
|
81
|
+
);
|
|
82
|
+
return regex.test(relPath);
|
|
83
|
+
}
|
|
84
|
+
return relPath.includes(pattern) || item === pattern;
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
if (shouldExclude) continue;
|
|
88
|
+
|
|
89
|
+
if (stat.isDirectory()) {
|
|
90
|
+
walkDir(fullPath, relPath);
|
|
91
|
+
} else {
|
|
92
|
+
const ext = path.extname(item).toLowerCase();
|
|
93
|
+
if (DOC_EXTENSIONS.includes(ext)) {
|
|
94
|
+
const content = fs.readFileSync(fullPath, "utf-8");
|
|
95
|
+
const { frontmatter } = parseFrontmatter(content);
|
|
96
|
+
|
|
97
|
+
const journeyKey =
|
|
98
|
+
frontmatter.reshot_journey ||
|
|
99
|
+
mappings[relPath] ||
|
|
100
|
+
mappings[fullPath];
|
|
101
|
+
|
|
102
|
+
if (journeyKey) {
|
|
103
|
+
const fileSize = stat.size;
|
|
104
|
+
|
|
105
|
+
if (fileSize > MAX_DOC_SIZE) {
|
|
106
|
+
console.log(
|
|
107
|
+
chalk.yellow(` ⚠ Skipping ${relPath}: exceeds size limit`),
|
|
108
|
+
);
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
files.push({
|
|
113
|
+
path: fullPath,
|
|
114
|
+
relativePath: relPath,
|
|
115
|
+
journeyKey,
|
|
116
|
+
contentHash: crypto
|
|
117
|
+
.createHash("sha256")
|
|
118
|
+
.update(content)
|
|
119
|
+
.digest("hex"),
|
|
120
|
+
size: fileSize,
|
|
121
|
+
frontmatter,
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
walkDir(root);
|
|
130
|
+
return files;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Extract journey key from Playwright test-results directory name
|
|
135
|
+
* Playwright creates directories like: "test-name-chromium" or "describe-test-name-chromium"
|
|
136
|
+
* Playwright truncates names and adds hashes (e.g., "s-83f3d-" instead of "should-")
|
|
137
|
+
* We do fuzzy matching against configured journey mappings
|
|
138
|
+
*/
|
|
139
|
+
function extractJourneyKey(dirName, journeyMappings = {}) {
|
|
140
|
+
// Remove browser suffix (chromium, firefox, webkit, etc.)
|
|
141
|
+
const browserSuffixes = [
|
|
142
|
+
"-chromium",
|
|
143
|
+
"-firefox",
|
|
144
|
+
"-webkit",
|
|
145
|
+
"-chromium-light",
|
|
146
|
+
"-chromium-dark",
|
|
147
|
+
];
|
|
148
|
+
let cleanName = dirName;
|
|
149
|
+
|
|
150
|
+
for (const suffix of browserSuffixes) {
|
|
151
|
+
if (cleanName.endsWith(suffix)) {
|
|
152
|
+
cleanName = cleanName.slice(0, -suffix.length);
|
|
153
|
+
break;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Normalize: lowercase, remove file number prefix, expand truncated patterns
|
|
158
|
+
let normalized = cleanName.toLowerCase();
|
|
159
|
+
|
|
160
|
+
// Remove leading file number like "01-"
|
|
161
|
+
normalized = normalized.replace(/^\d+-/, "");
|
|
162
|
+
|
|
163
|
+
// Remove hash segments (patterns like "-s-83f3d-" or "-9a1be-")
|
|
164
|
+
// These are Playwright's truncation hashes
|
|
165
|
+
normalized = normalized.replace(/-[a-z0-9]{5,6}-/g, "-");
|
|
166
|
+
|
|
167
|
+
// Common Playwright truncations
|
|
168
|
+
normalized = normalized.replace(/-s-/g, "-should-");
|
|
169
|
+
|
|
170
|
+
// Extract key parts for matching
|
|
171
|
+
const normalizedParts = normalized.split("-").filter((p) => p.length > 0);
|
|
172
|
+
|
|
173
|
+
// Check against configured journey mappings
|
|
174
|
+
for (const [pattern, journeyKey] of Object.entries(journeyMappings)) {
|
|
175
|
+
const patternParts = pattern.toLowerCase().split("-");
|
|
176
|
+
|
|
177
|
+
// Check if the normalized name contains significant parts of the pattern
|
|
178
|
+
// At least 3 consecutive words matching
|
|
179
|
+
for (let i = 0; i <= normalizedParts.length - 3; i++) {
|
|
180
|
+
const slice = normalizedParts.slice(i, i + 4).join("-");
|
|
181
|
+
for (let j = 0; j <= patternParts.length - 3; j++) {
|
|
182
|
+
const patternSlice = patternParts.slice(j, j + 4).join("-");
|
|
183
|
+
if (
|
|
184
|
+
slice === patternSlice ||
|
|
185
|
+
slice.includes(patternSlice) ||
|
|
186
|
+
patternSlice.includes(slice)
|
|
187
|
+
) {
|
|
188
|
+
return journeyKey;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// Check if the key parts of the test name appear in both
|
|
194
|
+
// Match "student-login-successfully" to pattern "should-complete-student-login-successfully"
|
|
195
|
+
const patternStr = pattern.toLowerCase();
|
|
196
|
+
const normalizedStr = normalized;
|
|
197
|
+
|
|
198
|
+
// Extract meaningful words (4+ chars) and check overlap
|
|
199
|
+
const meaningfulFromPattern = patternParts.filter((p) => p.length >= 4);
|
|
200
|
+
const meaningfulFromDir = normalizedParts.filter((p) => p.length >= 4);
|
|
201
|
+
|
|
202
|
+
const matchingWords = meaningfulFromPattern.filter((word) =>
|
|
203
|
+
meaningfulFromDir.some(
|
|
204
|
+
(dirWord) => dirWord.includes(word) || word.includes(dirWord),
|
|
205
|
+
),
|
|
206
|
+
);
|
|
207
|
+
|
|
208
|
+
// If >50% of meaningful words match, use this journey
|
|
209
|
+
if (
|
|
210
|
+
matchingWords.length >= Math.ceil(meaningfulFromPattern.length * 0.5) &&
|
|
211
|
+
matchingWords.length >= 2
|
|
212
|
+
) {
|
|
213
|
+
return journeyKey;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Fall back to deriving a journey key from the directory name
|
|
218
|
+
const parts = normalized.split("-").filter((p) => p.length > 0);
|
|
219
|
+
|
|
220
|
+
if (parts.length > 2) {
|
|
221
|
+
// Look for "should" to find the test name boundary
|
|
222
|
+
const shouldIndex = parts.findIndex((p) => p === "should");
|
|
223
|
+
if (shouldIndex > 0) {
|
|
224
|
+
const prefix = parts.slice(0, shouldIndex).join("-");
|
|
225
|
+
const suffix = parts.slice(shouldIndex + 1).join("-");
|
|
226
|
+
return `${prefix}/${suffix}`;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Otherwise split into prefix/suffix
|
|
230
|
+
const prefix = parts.slice(0, 2).join("-");
|
|
231
|
+
const suffix = parts.slice(2).join("-");
|
|
232
|
+
return `${prefix}/${suffix}`;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
return parts.join("-");
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Discover Playwright trace files
|
|
240
|
+
*/
|
|
241
|
+
async function discoverTraces(traceDir, journeyMappings = {}) {
|
|
242
|
+
const traces = [];
|
|
243
|
+
|
|
244
|
+
if (!fs.existsSync(traceDir)) {
|
|
245
|
+
return traces;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
function walkDir(dir) {
|
|
249
|
+
const items = fs.readdirSync(dir);
|
|
250
|
+
|
|
251
|
+
for (const item of items) {
|
|
252
|
+
const fullPath = path.join(dir, item);
|
|
253
|
+
const stat = fs.statSync(fullPath);
|
|
254
|
+
|
|
255
|
+
if (stat.isDirectory()) {
|
|
256
|
+
walkDir(fullPath);
|
|
257
|
+
} else {
|
|
258
|
+
const ext = path.extname(item).toLowerCase();
|
|
259
|
+
if (TRACE_EXTENSIONS.includes(ext) && item.includes("trace")) {
|
|
260
|
+
const fileSize = stat.size;
|
|
261
|
+
|
|
262
|
+
if (fileSize > MAX_TRACE_SIZE) {
|
|
263
|
+
console.log(
|
|
264
|
+
chalk.yellow(` ⚠ Skipping ${item}: exceeds size limit`),
|
|
265
|
+
);
|
|
266
|
+
continue;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// Extract journey key from parent directory name
|
|
270
|
+
const relativePath = path.relative(traceDir, fullPath);
|
|
271
|
+
const parts = relativePath.split(path.sep);
|
|
272
|
+
const parentDir =
|
|
273
|
+
parts.length > 1 ? parts[0] : path.basename(item, ".zip");
|
|
274
|
+
const journeyKey = extractJourneyKey(parentDir, journeyMappings);
|
|
275
|
+
|
|
276
|
+
// Read file and compute hash synchronously
|
|
277
|
+
const fileContent = fs.readFileSync(fullPath);
|
|
278
|
+
const contentHash = crypto
|
|
279
|
+
.createHash("sha256")
|
|
280
|
+
.update(fileContent)
|
|
281
|
+
.digest("hex");
|
|
282
|
+
|
|
283
|
+
traces.push({
|
|
284
|
+
path: fullPath,
|
|
285
|
+
filename: path.basename(fullPath),
|
|
286
|
+
relativePath,
|
|
287
|
+
journeyKey,
|
|
288
|
+
parentDir,
|
|
289
|
+
contentHash,
|
|
290
|
+
size: fileSize,
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
walkDir(traceDir);
|
|
298
|
+
return traces;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
/**
|
|
302
|
+
* Get git metadata for the current repository
|
|
303
|
+
*/
|
|
304
|
+
function getGitMetadata() {
|
|
305
|
+
try {
|
|
306
|
+
const commitHash = execSync("git rev-parse HEAD", {
|
|
307
|
+
encoding: "utf8",
|
|
308
|
+
}).trim();
|
|
309
|
+
const branch = execSync("git rev-parse --abbrev-ref HEAD", {
|
|
310
|
+
encoding: "utf8",
|
|
311
|
+
}).trim();
|
|
312
|
+
const commitMessage = execSync("git log -1 --pretty=%B", {
|
|
313
|
+
encoding: "utf8",
|
|
314
|
+
}).trim();
|
|
315
|
+
|
|
316
|
+
return { commitHash, branch, commitMessage };
|
|
317
|
+
} catch (error) {
|
|
318
|
+
return { commitHash: "unknown", branch: "unknown", commitMessage: "" };
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
/**
|
|
323
|
+
* Main sync command
|
|
324
|
+
* @param {Object} options - Command options
|
|
325
|
+
* @param {string} options.traceDir - Override trace directory
|
|
326
|
+
* @param {boolean} options.traces - Sync traces only
|
|
327
|
+
* @param {boolean} options.docs - Sync documentation only
|
|
328
|
+
* @param {boolean} options.dryRun - Preview without uploading
|
|
329
|
+
* @param {boolean} options.verbose - Show detailed output
|
|
330
|
+
*/
|
|
331
|
+
async function syncCommand(options = {}) {
|
|
332
|
+
const {
|
|
333
|
+
traceDir: traceDirOverride,
|
|
334
|
+
traces: tracesOnly = false,
|
|
335
|
+
docs: docsOnly = false,
|
|
336
|
+
dryRun = false,
|
|
337
|
+
verbose = false,
|
|
338
|
+
} = options;
|
|
339
|
+
|
|
340
|
+
console.log(chalk.cyan.bold("\n🔄 Reshot Sync\n"));
|
|
341
|
+
|
|
342
|
+
// Load configuration (use readDocSyncConfig which is less strict about scenarios)
|
|
343
|
+
let docSyncConfig;
|
|
344
|
+
try {
|
|
345
|
+
docSyncConfig = config.readDocSyncConfig();
|
|
346
|
+
} catch (error) {
|
|
347
|
+
console.error(chalk.red("✖ No docsync.config.json found."));
|
|
348
|
+
console.log(
|
|
349
|
+
chalk.gray(" Run"),
|
|
350
|
+
chalk.cyan("reshot setup"),
|
|
351
|
+
chalk.gray("to initialize.\n"),
|
|
352
|
+
);
|
|
353
|
+
process.exit(1);
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// Load settings for API access
|
|
357
|
+
let settings;
|
|
358
|
+
try {
|
|
359
|
+
settings = config.readSettings();
|
|
360
|
+
} catch {
|
|
361
|
+
settings = null;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
const projectId = docSyncConfig.projectId || settings?.projectId;
|
|
365
|
+
const apiKey = settings?.apiKey;
|
|
366
|
+
|
|
367
|
+
if (!projectId || !apiKey) {
|
|
368
|
+
console.error(chalk.red("✖ Not authenticated with Reshot Cloud."));
|
|
369
|
+
console.log(
|
|
370
|
+
chalk.gray(" Run"),
|
|
371
|
+
chalk.cyan("reshot setup"),
|
|
372
|
+
chalk.gray("to connect.\n"),
|
|
373
|
+
);
|
|
374
|
+
process.exit(1);
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
const features = docSyncConfig._metadata?.features || {
|
|
378
|
+
visuals: true,
|
|
379
|
+
docsync: true,
|
|
380
|
+
};
|
|
381
|
+
const projectRoot = process.cwd();
|
|
382
|
+
|
|
383
|
+
// Determine what to sync
|
|
384
|
+
const syncTraces = !docsOnly && features.visuals !== false;
|
|
385
|
+
const syncDocs =
|
|
386
|
+
!tracesOnly && features.docsync !== false && docSyncConfig.documentation;
|
|
387
|
+
|
|
388
|
+
if (dryRun) {
|
|
389
|
+
console.log(chalk.yellow("DRY RUN - No files will be uploaded\n"));
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
let traceFiles = [];
|
|
393
|
+
let docFiles = [];
|
|
394
|
+
|
|
395
|
+
// ========================================
|
|
396
|
+
// PHASE 1: Discover Traces
|
|
397
|
+
// ========================================
|
|
398
|
+
if (syncTraces) {
|
|
399
|
+
const traceDir =
|
|
400
|
+
traceDirOverride || docSyncConfig.visuals?.traceDir || "./test-results";
|
|
401
|
+
const resolvedTraceDir = path.resolve(projectRoot, traceDir);
|
|
402
|
+
const journeyMappings = docSyncConfig.visuals?.journeyMappings || {};
|
|
403
|
+
|
|
404
|
+
console.log(chalk.gray(`Scanning traces: ${traceDir}`));
|
|
405
|
+
traceFiles = await discoverTraces(resolvedTraceDir, journeyMappings);
|
|
406
|
+
|
|
407
|
+
if (traceFiles.length === 0) {
|
|
408
|
+
console.log(chalk.yellow(" No trace files found"));
|
|
409
|
+
if (!fs.existsSync(resolvedTraceDir)) {
|
|
410
|
+
console.log(
|
|
411
|
+
chalk.gray(" Run your Playwright tests first: npx playwright test"),
|
|
412
|
+
);
|
|
413
|
+
}
|
|
414
|
+
} else {
|
|
415
|
+
console.log(chalk.green(` Found ${traceFiles.length} trace file(s)`));
|
|
416
|
+
if (verbose) {
|
|
417
|
+
for (const trace of traceFiles) {
|
|
418
|
+
console.log(
|
|
419
|
+
chalk.gray(` → ${trace.parentDir || trace.relativePath}`),
|
|
420
|
+
);
|
|
421
|
+
console.log(chalk.gray(` journey: ${trace.journeyKey}`));
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
console.log();
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
// ========================================
|
|
429
|
+
// PHASE 2: Discover Documentation
|
|
430
|
+
// ========================================
|
|
431
|
+
if (syncDocs) {
|
|
432
|
+
console.log(
|
|
433
|
+
chalk.gray(`Scanning docs: ${docSyncConfig.documentation.root}`),
|
|
434
|
+
);
|
|
435
|
+
docFiles = await discoverDocumentation(
|
|
436
|
+
docSyncConfig.documentation,
|
|
437
|
+
projectRoot,
|
|
438
|
+
);
|
|
439
|
+
|
|
440
|
+
if (docFiles.length === 0) {
|
|
441
|
+
console.log(chalk.yellow(" No bound documentation files found"));
|
|
442
|
+
console.log(
|
|
443
|
+
chalk.gray(" Add reshot_journey frontmatter to your markdown files"),
|
|
444
|
+
);
|
|
445
|
+
} else {
|
|
446
|
+
console.log(chalk.green(` Found ${docFiles.length} bound doc file(s)`));
|
|
447
|
+
if (verbose) {
|
|
448
|
+
for (const doc of docFiles) {
|
|
449
|
+
console.log(
|
|
450
|
+
chalk.gray(` → ${doc.relativePath} → ${doc.journeyKey}`),
|
|
451
|
+
);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
console.log();
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// ========================================
|
|
459
|
+
// PHASE 3: Upload to Platform
|
|
460
|
+
// ========================================
|
|
461
|
+
if (dryRun) {
|
|
462
|
+
console.log(chalk.cyan("━━━ Dry Run Summary ━━━\n"));
|
|
463
|
+
console.log(chalk.gray(` Traces: ${traceFiles.length} files`));
|
|
464
|
+
console.log(chalk.gray(` Docs: ${docFiles.length} files`));
|
|
465
|
+
console.log(chalk.yellow("\nNo files uploaded (dry run).\n"));
|
|
466
|
+
return;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
if (traceFiles.length === 0 && docFiles.length === 0) {
|
|
470
|
+
console.log(chalk.yellow("Nothing to sync.\n"));
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
console.log(chalk.cyan("━━━ Uploading ━━━\n"));
|
|
475
|
+
|
|
476
|
+
try {
|
|
477
|
+
// Initialize sync job with manifest
|
|
478
|
+
const manifest = {
|
|
479
|
+
docs: docFiles.map((d) => ({
|
|
480
|
+
relativePath: d.relativePath,
|
|
481
|
+
journeyKey: d.journeyKey,
|
|
482
|
+
contentHash: d.contentHash,
|
|
483
|
+
size: d.size,
|
|
484
|
+
})),
|
|
485
|
+
traces: traceFiles.map((t) => ({
|
|
486
|
+
filename: t.filename,
|
|
487
|
+
journeyKey: t.journeyKey,
|
|
488
|
+
contentHash: t.contentHash,
|
|
489
|
+
size: t.size,
|
|
490
|
+
})),
|
|
491
|
+
};
|
|
492
|
+
|
|
493
|
+
const initResponse = await apiClient.post(
|
|
494
|
+
"/v1/ingest/init",
|
|
495
|
+
{
|
|
496
|
+
projectId,
|
|
497
|
+
manifest,
|
|
498
|
+
},
|
|
499
|
+
{
|
|
500
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
501
|
+
},
|
|
502
|
+
);
|
|
503
|
+
|
|
504
|
+
const presignedUrls = initResponse.presignedUrls || {};
|
|
505
|
+
const skippedFiles = initResponse.skippedFiles || [];
|
|
506
|
+
|
|
507
|
+
if (verbose) {
|
|
508
|
+
console.log(chalk.gray(` Manifest acknowledged`));
|
|
509
|
+
console.log(
|
|
510
|
+
chalk.gray(` Presigned URLs: ${Object.keys(presignedUrls).length}`),
|
|
511
|
+
);
|
|
512
|
+
console.log(chalk.gray(` Skipped files: ${skippedFiles.length}`));
|
|
513
|
+
if (skippedFiles.length > 0) {
|
|
514
|
+
console.log(
|
|
515
|
+
chalk.gray(` ${skippedFiles.length} file(s) unchanged (cached)`),
|
|
516
|
+
);
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
// Upload traces to presigned URLs
|
|
521
|
+
let tracesUploaded = 0;
|
|
522
|
+
for (const trace of traceFiles) {
|
|
523
|
+
if (skippedFiles.includes(trace.contentHash)) {
|
|
524
|
+
if (verbose) {
|
|
525
|
+
console.log(chalk.gray(` ⊘ ${trace.journeyKey} (cached)`));
|
|
526
|
+
}
|
|
527
|
+
tracesUploaded++;
|
|
528
|
+
continue;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
const presigned = presignedUrls[trace.contentHash];
|
|
532
|
+
if (presigned) {
|
|
533
|
+
if (verbose) {
|
|
534
|
+
console.log(chalk.gray(` → uploading to: ${presigned.url}`));
|
|
535
|
+
}
|
|
536
|
+
const content = fs.readFileSync(trace.path);
|
|
537
|
+
await apiClient.uploadToPresignedUrl(presigned.url, content, {
|
|
538
|
+
contentType: presigned.contentType || "application/zip",
|
|
539
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
540
|
+
});
|
|
541
|
+
tracesUploaded++;
|
|
542
|
+
if (verbose) {
|
|
543
|
+
console.log(chalk.gray(` ✔ ${trace.journeyKey}`));
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
// Upload docs to presigned URLs
|
|
549
|
+
let docsUploaded = 0;
|
|
550
|
+
for (const doc of docFiles) {
|
|
551
|
+
if (skippedFiles.includes(doc.contentHash)) {
|
|
552
|
+
if (verbose) {
|
|
553
|
+
console.log(chalk.gray(` ⊘ ${doc.relativePath} (cached)`));
|
|
554
|
+
}
|
|
555
|
+
docsUploaded++;
|
|
556
|
+
continue;
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
const presigned = presignedUrls[doc.contentHash];
|
|
560
|
+
if (presigned) {
|
|
561
|
+
const content = fs.readFileSync(doc.path, "utf-8");
|
|
562
|
+
await apiClient.uploadToPresignedUrl(presigned.url, content, {
|
|
563
|
+
contentType: presigned.contentType || "text/markdown",
|
|
564
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
565
|
+
});
|
|
566
|
+
docsUploaded++;
|
|
567
|
+
if (verbose) {
|
|
568
|
+
console.log(chalk.gray(` ✔ ${doc.relativePath}`));
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
// Get git metadata
|
|
574
|
+
const git = getGitMetadata();
|
|
575
|
+
|
|
576
|
+
// Commit the sync job
|
|
577
|
+
await apiClient.post(
|
|
578
|
+
"/v1/ingest/commit",
|
|
579
|
+
{
|
|
580
|
+
projectId,
|
|
581
|
+
uploadResults: {
|
|
582
|
+
docs: docFiles.map((d) => ({
|
|
583
|
+
relativePath: d.relativePath,
|
|
584
|
+
journeyKey: d.journeyKey,
|
|
585
|
+
storageKey:
|
|
586
|
+
presignedUrls[d.contentHash]?.storageKey || d.contentHash,
|
|
587
|
+
})),
|
|
588
|
+
traces: traceFiles.map((t) => ({
|
|
589
|
+
filename: t.filename,
|
|
590
|
+
journeyKey: t.journeyKey,
|
|
591
|
+
storageKey:
|
|
592
|
+
presignedUrls[t.contentHash]?.storageKey || t.contentHash,
|
|
593
|
+
})),
|
|
594
|
+
},
|
|
595
|
+
git,
|
|
596
|
+
cli: {
|
|
597
|
+
version: pkg.version,
|
|
598
|
+
timestamp: new Date().toISOString(),
|
|
599
|
+
},
|
|
600
|
+
},
|
|
601
|
+
{
|
|
602
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
603
|
+
},
|
|
604
|
+
);
|
|
605
|
+
|
|
606
|
+
console.log(chalk.green(`\n✔ Sync complete!`));
|
|
607
|
+
console.log(chalk.gray(` Traces: ${tracesUploaded} uploaded`));
|
|
608
|
+
console.log(chalk.gray(` Docs: ${docsUploaded} uploaded`));
|
|
609
|
+
console.log();
|
|
610
|
+
console.log(chalk.gray(" View status:"), chalk.cyan("reshot status"));
|
|
611
|
+
console.log(chalk.gray(" Check drifts:"), chalk.cyan("reshot drifts\n"));
|
|
612
|
+
} catch (error) {
|
|
613
|
+
console.error(chalk.red("\n✖ Sync failed:"), error.message);
|
|
614
|
+
if (verbose && error.stack) {
|
|
615
|
+
console.error(chalk.gray(error.stack));
|
|
616
|
+
}
|
|
617
|
+
process.exit(1);
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
module.exports = syncCommand;
|