@reshotdev/screenshot 0.0.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/LICENSE +190 -0
  2. package/README.md +388 -0
  3. package/package.json +64 -0
  4. package/src/commands/auth.js +259 -0
  5. package/src/commands/chrome.js +140 -0
  6. package/src/commands/ci-run.js +123 -0
  7. package/src/commands/ci-setup.js +288 -0
  8. package/src/commands/drifts.js +423 -0
  9. package/src/commands/import-tests.js +309 -0
  10. package/src/commands/ingest.js +458 -0
  11. package/src/commands/init.js +633 -0
  12. package/src/commands/publish.js +1721 -0
  13. package/src/commands/pull.js +303 -0
  14. package/src/commands/record.js +94 -0
  15. package/src/commands/run.js +476 -0
  16. package/src/commands/setup-wizard.js +740 -0
  17. package/src/commands/setup.js +137 -0
  18. package/src/commands/status.js +275 -0
  19. package/src/commands/sync.js +621 -0
  20. package/src/commands/ui.js +248 -0
  21. package/src/commands/validate-docs.js +529 -0
  22. package/src/index.js +462 -0
  23. package/src/lib/api-client.js +815 -0
  24. package/src/lib/capture-engine.js +1623 -0
  25. package/src/lib/capture-script-runner.js +3120 -0
  26. package/src/lib/ci-detect.js +137 -0
  27. package/src/lib/config.js +1240 -0
  28. package/src/lib/diff-engine.js +642 -0
  29. package/src/lib/hash.js +74 -0
  30. package/src/lib/image-crop.js +396 -0
  31. package/src/lib/matrix.js +89 -0
  32. package/src/lib/output-path-template.js +318 -0
  33. package/src/lib/playwright-runner.js +252 -0
  34. package/src/lib/polished-clip.js +553 -0
  35. package/src/lib/privacy-engine.js +408 -0
  36. package/src/lib/progress-tracker.js +142 -0
  37. package/src/lib/record-browser-injection.js +654 -0
  38. package/src/lib/record-cdp.js +612 -0
  39. package/src/lib/record-clip.js +343 -0
  40. package/src/lib/record-config.js +623 -0
  41. package/src/lib/record-screenshot.js +360 -0
  42. package/src/lib/record-terminal.js +123 -0
  43. package/src/lib/recorder-service.js +781 -0
  44. package/src/lib/secrets.js +51 -0
  45. package/src/lib/selector-strategies.js +859 -0
  46. package/src/lib/standalone-mode.js +400 -0
  47. package/src/lib/storage-providers.js +569 -0
  48. package/src/lib/style-engine.js +684 -0
  49. package/src/lib/ui-api.js +4677 -0
  50. package/src/lib/ui-assets.js +373 -0
  51. package/src/lib/ui-executor.js +587 -0
  52. package/src/lib/variant-injector.js +591 -0
  53. package/src/lib/viewport-presets.js +454 -0
  54. package/src/lib/worker-pool.js +118 -0
  55. package/web/cropper/index.html +436 -0
  56. package/web/manager/dist/assets/index--ZgioErz.js +507 -0
  57. package/web/manager/dist/assets/index-n468W0Wr.css +1 -0
  58. package/web/manager/dist/index.html +27 -0
  59. package/web/subtitle-editor/index.html +295 -0
@@ -0,0 +1,1721 @@
1
+ // publish.js - Upload generated assets to platform or BYOS (Bring Your Own Storage)
2
+ const chalk = require("chalk");
3
+ const crypto = require("crypto");
4
+ const fs = require("fs-extra");
5
+ const path = require("path");
6
+ const { execSync } = require("child_process");
7
+ const config = require("../lib/config");
8
+ const apiClient = require("../lib/api-client");
9
+ const { mergeContexts } = require("../lib/matrix");
10
+ const { hashFile, getMimeType } = require("../lib/hash");
11
+ const {
12
+ validateStorageConfig,
13
+ getStorageSetupHelp,
14
+ createStorageProvider,
15
+ getStorageMode,
16
+ isPlatformAvailable,
17
+ } = require("../lib/storage-providers");
18
+ const pkg = require("../../package.json");
19
+
20
+ // Check if transactional flow should be used (R2 configured on server)
21
+ const USE_TRANSACTIONAL_FLOW =
22
+ process.env.RESHOT_USE_TRANSACTIONAL !== "false";
23
+
24
+ /**
25
+ * Load all diff manifests from the output directory
26
+ * Returns a map of "scenarioKey/timestamp" -> manifest data
27
+ */
28
+ function loadDiffManifests(outputBaseDir) {
29
+ const manifests = new Map();
30
+
31
+ if (!fs.existsSync(outputBaseDir)) {
32
+ return manifests;
33
+ }
34
+
35
+ try {
36
+ const scenarios = fs.readdirSync(outputBaseDir).filter((item) => {
37
+ const fullPath = path.join(outputBaseDir, item);
38
+ return fs.statSync(fullPath).isDirectory();
39
+ });
40
+
41
+ for (const scenarioKey of scenarios) {
42
+ const scenarioDir = path.join(outputBaseDir, scenarioKey);
43
+ const versions = fs.readdirSync(scenarioDir).filter((item) => {
44
+ const fullPath = path.join(scenarioDir, item);
45
+ return (
46
+ fs.statSync(fullPath).isDirectory() &&
47
+ /^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$/.test(item)
48
+ );
49
+ });
50
+
51
+ // Get the latest version (sorted desc)
52
+ const latestVersion = versions.sort().reverse()[0];
53
+ if (!latestVersion) continue;
54
+
55
+ const manifestPath = path.join(
56
+ scenarioDir,
57
+ latestVersion,
58
+ "diff-manifest.json",
59
+ );
60
+
61
+ if (fs.existsSync(manifestPath)) {
62
+ try {
63
+ const manifest = fs.readJSONSync(manifestPath);
64
+ manifests.set(`${scenarioKey}/${latestVersion}`, manifest);
65
+ } catch (e) {
66
+ // Skip malformed manifests
67
+ }
68
+ }
69
+ }
70
+ } catch (e) {
71
+ // Return empty if directory structure is unexpected
72
+ }
73
+
74
+ return manifests;
75
+ }
76
+
77
+ /**
78
+ * Get diff data for a specific asset from loaded manifests
79
+ * @param {Map} manifests - Loaded diff manifests
80
+ * @param {string} scenarioKey - Scenario key
81
+ * @param {string} captureKey - Asset capture key
82
+ * @returns {Object|null} Diff data { diffPercentage, diffStatus, previousVersion }
83
+ */
84
+ function getDiffDataForAsset(manifests, scenarioKey, captureKey) {
85
+ // Find the manifest for this scenario (latest version)
86
+ for (const [key, manifest] of manifests.entries()) {
87
+ if (key.startsWith(`${scenarioKey}/`)) {
88
+ const assetData = manifest.assets?.[captureKey];
89
+ if (assetData) {
90
+ return {
91
+ diffPercentage:
92
+ assetData.score != null ? assetData.score * 100 : null,
93
+ diffStatus: assetData.status || null,
94
+ previousVersion: manifest.comparedAgainst || null,
95
+ };
96
+ }
97
+ // Also check with variant prefix patterns
98
+ for (const [assetKey, data] of Object.entries(manifest.assets || {})) {
99
+ if (assetKey.endsWith(`/${captureKey}`) || assetKey === captureKey) {
100
+ return {
101
+ diffPercentage: data.score != null ? data.score * 100 : null,
102
+ diffStatus: data.status || null,
103
+ previousVersion: manifest.comparedAgainst || null,
104
+ };
105
+ }
106
+ }
107
+ }
108
+ }
109
+ return null;
110
+ }
111
+
112
+ /**
113
+ * Recursively find all asset files in a directory
114
+ * Filters out debug artifacts and diff images that shouldn't be published
115
+ */
116
+ function findAssetFiles(
117
+ dir,
118
+ extensions = [".png", ".gif", ".mp4", ".jpg", ".jpeg"],
119
+ ) {
120
+ const files = [];
121
+
122
+ // Patterns to exclude from publishing
123
+ const EXCLUDED_FILENAMES = ["debug-failure.png", "debug-failure.jpg"];
124
+ const EXCLUDED_DIRS = ["diffs"];
125
+ const EXCLUDED_SUFFIXES = [".diff.png", ".diff.jpg"];
126
+
127
+ function shouldExcludeFile(filename, filePath) {
128
+ // Check exact filename matches
129
+ if (EXCLUDED_FILENAMES.includes(filename)) {
130
+ return true;
131
+ }
132
+ // Check suffix patterns (e.g., .diff.png)
133
+ for (const suffix of EXCLUDED_SUFFIXES) {
134
+ if (filename.endsWith(suffix)) {
135
+ return true;
136
+ }
137
+ }
138
+ // Check if file is in an excluded directory
139
+ for (const dir of EXCLUDED_DIRS) {
140
+ if (
141
+ filePath.includes(path.sep + dir + path.sep) ||
142
+ filePath.includes("/" + dir + "/")
143
+ ) {
144
+ return true;
145
+ }
146
+ }
147
+ return false;
148
+ }
149
+
150
+ function walk(currentDir) {
151
+ const items = fs.readdirSync(currentDir);
152
+ for (const item of items) {
153
+ const fullPath = path.join(currentDir, item);
154
+ const stat = fs.statSync(fullPath);
155
+
156
+ if (stat.isDirectory()) {
157
+ // Skip excluded directories entirely
158
+ if (EXCLUDED_DIRS.includes(item)) {
159
+ continue;
160
+ }
161
+ walk(fullPath);
162
+ } else {
163
+ const ext = path.extname(item).toLowerCase();
164
+ if (extensions.includes(ext) && !shouldExcludeFile(item, fullPath)) {
165
+ files.push(fullPath);
166
+ }
167
+ }
168
+ }
169
+ }
170
+
171
+ walk(dir);
172
+ return files;
173
+ }
174
+
175
+ /**
176
+ * Find exported video files in common export directories.
177
+ * Includes:
178
+ * - ./exports
179
+ * - ./.reshot/exports
180
+ */
181
+ function collectExportVideoFiles(projectRoot) {
182
+ const exportDirs = [
183
+ path.join(projectRoot, "exports"),
184
+ path.join(projectRoot, ".reshot", "exports"),
185
+ ];
186
+ const videoExts = new Set([".mp4", ".webm", ".mov"]);
187
+ const results = [];
188
+
189
+ function walk(currentDir) {
190
+ if (!fs.existsSync(currentDir)) return;
191
+ const items = fs.readdirSync(currentDir);
192
+ for (const item of items) {
193
+ const fullPath = path.join(currentDir, item);
194
+ let stat;
195
+ try {
196
+ stat = fs.statSync(fullPath);
197
+ } catch {
198
+ continue;
199
+ }
200
+
201
+ if (stat.isDirectory()) {
202
+ walk(fullPath);
203
+ } else if (stat.isFile()) {
204
+ const ext = path.extname(item).toLowerCase();
205
+ if (videoExts.has(ext)) {
206
+ results.push(fullPath);
207
+ }
208
+ }
209
+ }
210
+ }
211
+
212
+ for (const dir of exportDirs) {
213
+ walk(dir);
214
+ }
215
+
216
+ return results;
217
+ }
218
+
219
+ function resolveVideoPath(projectRoot, candidatePath) {
220
+ const trimmed = String(candidatePath || "").trim();
221
+ if (!trimmed) return null;
222
+ const resolved = path.isAbsolute(trimmed)
223
+ ? trimmed
224
+ : path.resolve(projectRoot, trimmed);
225
+ if (!fs.existsSync(resolved)) return null;
226
+ const stat = fs.statSync(resolved);
227
+ if (!stat.isFile()) return null;
228
+ const ext = path.extname(resolved).toLowerCase();
229
+ if (![".mp4", ".webm", ".mov"].includes(ext)) return null;
230
+ return resolved;
231
+ }
232
+
233
+ function sanitizeScenarioHint(value) {
234
+ return String(value || "")
235
+ .toLowerCase()
236
+ .replace(/[^a-z0-9_-]+/g, "-")
237
+ .replace(/-+/g, "-")
238
+ .replace(/^-|-$/g, "");
239
+ }
240
+
241
+ function readVideoTargetMarker(projectRoot) {
242
+ const markerFiles = [
243
+ path.join(projectRoot, "exports", ".reshot-last-render.json"),
244
+ path.join(projectRoot, ".reshot", "exports", ".reshot-last-render.json"),
245
+ ];
246
+
247
+ for (const markerPath of markerFiles) {
248
+ if (!fs.existsSync(markerPath)) continue;
249
+ try {
250
+ const raw = fs.readJSONSync(markerPath);
251
+ const resolved = resolveVideoPath(projectRoot, raw?.videoPath);
252
+ if (resolved) return resolved;
253
+ } catch {
254
+ // ignore malformed marker files
255
+ }
256
+ }
257
+ return null;
258
+ }
259
+
260
+ function findExportVideoFiles(
261
+ projectRoot,
262
+ { explicitVideoPath, scenarioHints = [] } = {},
263
+ ) {
264
+ const explicit = resolveVideoPath(projectRoot, explicitVideoPath);
265
+ if (explicitVideoPath) {
266
+ if (!explicit) {
267
+ throw new Error(
268
+ `Requested video target was not found or is not a supported video: ${explicitVideoPath}`,
269
+ );
270
+ }
271
+ return { files: [explicit] };
272
+ }
273
+
274
+ const envExplicit = resolveVideoPath(
275
+ projectRoot,
276
+ process.env.RESHOT_PUBLISH_VIDEO_PATH,
277
+ );
278
+ if (process.env.RESHOT_PUBLISH_VIDEO_PATH && !envExplicit) {
279
+ throw new Error(
280
+ `RESHOT_PUBLISH_VIDEO_PATH was set but invalid: ${process.env.RESHOT_PUBLISH_VIDEO_PATH}`,
281
+ );
282
+ }
283
+ if (envExplicit) {
284
+ return { files: [envExplicit] };
285
+ }
286
+
287
+ const markerTarget = readVideoTargetMarker(projectRoot);
288
+ if (markerTarget) {
289
+ return { files: [markerTarget] };
290
+ }
291
+
292
+ const candidates = collectExportVideoFiles(projectRoot);
293
+ if (candidates.length <= 1) {
294
+ return { files: candidates };
295
+ }
296
+
297
+ const normalizedHints = scenarioHints
298
+ .map((hint) => sanitizeScenarioHint(hint))
299
+ .filter(Boolean);
300
+ if (normalizedHints.length > 0) {
301
+ const matched = candidates.filter((filePath) => {
302
+ const base = sanitizeScenarioHint(path.basename(filePath, path.extname(filePath)));
303
+ return normalizedHints.some(
304
+ (hint) =>
305
+ base === hint || base.startsWith(`${hint}-`) || base.includes(`-${hint}-`) || base.includes(hint),
306
+ );
307
+ });
308
+ if (matched.length === 1) {
309
+ return { files: [matched[0]] };
310
+ }
311
+ }
312
+
313
+ return {
314
+ files: [],
315
+ warning:
316
+ "Multiple exported videos found. Skipping video upload to avoid uploading the wrong target. Set RESHOT_PUBLISH_VIDEO_PATH or run a fresh export.",
317
+ };
318
+ }
319
+
320
+ function sanitizeCaptureKey(value) {
321
+ return String(value || "")
322
+ .toLowerCase()
323
+ .replace(/[^a-z0-9_-]+/g, "-")
324
+ .replace(/-+/g, "-")
325
+ .replace(/^-|-$/g, "");
326
+ }
327
+
328
+ function buildVideoAssetGroups(videoFiles) {
329
+ if (!videoFiles || videoFiles.length === 0) return [];
330
+
331
+ const usedKeys = new Set();
332
+ const assets = [];
333
+
334
+ for (const filePath of videoFiles) {
335
+ const base = path.basename(filePath, path.extname(filePath));
336
+ const rawKey = sanitizeCaptureKey(base) || "video-export";
337
+ let captureKey = rawKey;
338
+ let n = 2;
339
+ while (usedKeys.has(captureKey)) {
340
+ captureKey = `${rawKey}-${n++}`;
341
+ }
342
+ usedKeys.add(captureKey);
343
+
344
+ assets.push({
345
+ captureKey,
346
+ path: filePath,
347
+ filename: path.basename(filePath),
348
+ });
349
+ }
350
+
351
+ return [
352
+ {
353
+ scenarioKey: "video-exports",
354
+ variationSlug: "default",
355
+ timestamp: new Date().toISOString().slice(0, 19).replace(/:/g, "-"),
356
+ assets,
357
+ },
358
+ ];
359
+ }
360
+
361
+ /**
362
+ * Extract metadata from file path
363
+ * Convention: .reshot/output/<scenarioKey>/<variationSlug>/<filename>
364
+ */
365
+ function readSettingsSafe() {
366
+ try {
367
+ return config.readSettings();
368
+ } catch (error) {
369
+ return null;
370
+ }
371
+ }
372
+
373
+ function resolveProjectContext({ settings, docSyncConfig, storageMode }) {
374
+ const envApiKey = process.env.RESHOT_API_KEY?.trim();
375
+ const envProjectId = process.env.RESHOT_PROJECT_ID?.trim();
376
+
377
+ const apiKey = envApiKey || settings?.apiKey;
378
+ const projectId =
379
+ envProjectId || settings?.projectId || docSyncConfig?._metadata?.projectId;
380
+
381
+ // For BYOS mode, we don't require API key or project ID
382
+ if (storageMode === "byos") {
383
+ return {
384
+ apiKey: null,
385
+ projectId: projectId || "local",
386
+ storageMode: "byos",
387
+ };
388
+ }
389
+
390
+ if (!apiKey) {
391
+ throw new Error(
392
+ "No API key found. Set RESHOT_API_KEY in your environment or run `reshot auth` locally to create .reshot/settings.json.\n" +
393
+ "Alternatively, configure BYOS (Bring Your Own Storage) in docsync.config.json to publish without authentication.",
394
+ );
395
+ }
396
+
397
+ if (!projectId) {
398
+ throw new Error(
399
+ "No project ID found. Set RESHOT_PROJECT_ID in your environment or ensure docsync.config.json contains _metadata.projectId.",
400
+ );
401
+ }
402
+
403
+ return { apiKey, projectId, storageMode: "platform" };
404
+ }
405
+
406
+ function extractMetadata(filePath, outputBaseDir) {
407
+ const relativePath = path.relative(outputBaseDir, filePath);
408
+ const parts = relativePath.split(path.sep);
409
+
410
+ // Structure: <scenarioKey>/<timestamp>/<contextKey>/<filename>
411
+ // Example: visuals-review-queue/2026-01-08_10-34-55/theme-light/step-0-initial.png
412
+ const scenarioKey = parts[0];
413
+ const timestamp = parts[1]; // Keep timestamp as metadata but not as variation slug
414
+
415
+ // Context key is the folder after timestamp (e.g., "theme-light", "theme-dark")
416
+ // The remaining path after context is the filename
417
+ const contextKey = parts.length > 2 ? parts[2] : "default";
418
+ const filename = parts.slice(3).join("/") || parts[2] || "";
419
+
420
+ // Extract capture key from filename (remove extension)
421
+ const captureKey =
422
+ path.basename(filename, path.extname(filename)) || contextKey;
423
+
424
+ return {
425
+ scenarioKey,
426
+ variationSlug: contextKey, // Use context key as variation slug for semantic URLs
427
+ timestamp,
428
+ contextKey,
429
+ captureKey,
430
+ filename,
431
+ };
432
+ }
433
+
434
+ function groupAssetsByScenario(assetFiles, outputBaseDir) {
435
+ const groups = new Map();
436
+ // Track the latest timestamp per group for deduplication
437
+ const latestTimestamps = new Map();
438
+
439
+ for (const assetPath of assetFiles) {
440
+ const metadata = extractMetadata(assetPath, outputBaseDir);
441
+ const { scenarioKey, variationSlug, captureKey, timestamp } = metadata;
442
+
443
+ if (!scenarioKey || !variationSlug || !captureKey) {
444
+ console.warn(
445
+ chalk.yellow(` ⚠ Skipping asset with unrecognized path: ${assetPath}`),
446
+ );
447
+ continue;
448
+ }
449
+
450
+ const groupKey = `${scenarioKey}::${variationSlug}`;
451
+ const currentLatest = latestTimestamps.get(groupKey);
452
+
453
+ // Only process if this is from the latest timestamp for this scenario+context
454
+ if (currentLatest && timestamp < currentLatest) {
455
+ // Skip older timestamped versions
456
+ continue;
457
+ }
458
+
459
+ // If this is a newer timestamp, clear the old assets
460
+ if (currentLatest && timestamp > currentLatest) {
461
+ groups.set(groupKey, {
462
+ scenarioKey,
463
+ variationSlug,
464
+ timestamp,
465
+ assets: [],
466
+ });
467
+ }
468
+
469
+ latestTimestamps.set(groupKey, timestamp);
470
+
471
+ if (!groups.has(groupKey)) {
472
+ groups.set(groupKey, {
473
+ scenarioKey,
474
+ variationSlug,
475
+ timestamp,
476
+ assets: [],
477
+ });
478
+ }
479
+
480
+ groups.get(groupKey).assets.push({
481
+ captureKey,
482
+ path: assetPath,
483
+ filename: metadata.filename,
484
+ });
485
+ }
486
+
487
+ return Array.from(groups.values());
488
+ }
489
+
490
+ function buildContextForVariation(scenario, variationSlug) {
491
+ const safeVariation = variationSlug || "default";
492
+ if (!scenario || !scenario.contexts) {
493
+ return { variation: safeVariation };
494
+ }
495
+
496
+ const baseContext = scenario.contexts.base || {};
497
+ const variationKeys =
498
+ safeVariation === "default" ? [] : safeVariation.split("_").filter(Boolean);
499
+
500
+ const mergedContext = mergeContexts(
501
+ baseContext,
502
+ variationKeys,
503
+ scenario.contexts,
504
+ );
505
+ return {
506
+ ...mergedContext,
507
+ variation: safeVariation,
508
+ };
509
+ }
510
+
511
+ function buildScenarioDefinition(scenario) {
512
+ if (!scenario) {
513
+ return undefined;
514
+ }
515
+
516
+ const steps = (scenario.steps || []).map((step, index) => ({
517
+ order: typeof step.order === "number" ? step.order : index,
518
+ action: step.action,
519
+ key:
520
+ step.key ||
521
+ step.captureKey ||
522
+ path.basename(step.path || `step-${index}`),
523
+ id: step.id || step.stepId || null,
524
+ selector: step.selector || null,
525
+ clip: step.clip,
526
+ selectorPadding: step.selectorPadding,
527
+ deviceScaleFactor: step.deviceScaleFactor || null,
528
+ }));
529
+
530
+ return {
531
+ name: scenario.name,
532
+ targetUrl: scenario.url,
533
+ outputType: "screenshot",
534
+ contexts: Object.keys(scenario.contexts || {}),
535
+ steps,
536
+ };
537
+ }
538
+
539
+ function buildPublishMetadata({
540
+ projectId,
541
+ publishSessionId,
542
+ scenarioKey,
543
+ scenarioConfig,
544
+ variationSlug,
545
+ contextData,
546
+ gitInfo,
547
+ }) {
548
+ const scenarioDefinition = buildScenarioDefinition(scenarioConfig);
549
+
550
+ return {
551
+ projectId,
552
+ publishSessionId, // Unique ID for this CLI publish run
553
+ scenarioName: scenarioConfig?.name || scenarioKey,
554
+ scenario: scenarioDefinition,
555
+ context: {
556
+ name: variationSlug || "default",
557
+ data: contextData,
558
+ },
559
+ autoCreateVisuals: true,
560
+ git: {
561
+ commitHash: gitInfo.commitHash,
562
+ commitMessage: gitInfo.commitMessage,
563
+ },
564
+ cli: {
565
+ version: pkg.version,
566
+ captureTimestamp: new Date().toISOString(),
567
+ },
568
+ };
569
+ }
570
+
571
+ /**
572
+ * Publish using transactional flow (direct R2 upload with presigned URLs)
573
+ */
574
+ async function publishWithTransactionalFlow(
575
+ apiKey,
576
+ projectId,
577
+ groupedAssets,
578
+ docSyncConfig,
579
+ gitInfo,
580
+ diffManifests = null,
581
+ ) {
582
+ console.log(
583
+ chalk.cyan(" 🚀 Using transactional upload (direct to R2)...\n"),
584
+ );
585
+
586
+ let successCount = 0;
587
+ let failCount = 0;
588
+ let skippedCount = 0;
589
+ let viewUrl = null;
590
+
591
+ // Flatten all assets with metadata
592
+ const allFiles = [];
593
+ for (const group of groupedAssets) {
594
+ const scenarioConfig = docSyncConfig?.scenarios?.find(
595
+ (s) => s.key === group.scenarioKey,
596
+ );
597
+
598
+ for (const asset of group.assets) {
599
+ // Use semantic key: scenarioKey/captureKey to match ReshotSteps URL expectations
600
+ // e.g., "visuals-rollback/step-0-initial" for multi-step scenarios
601
+ // This allows the CDN to serve URLs like: /{projectId}/visuals-rollback/step-0-initial?context=theme-light
602
+ const visualKey = `${group.scenarioKey}/${asset.captureKey}`;
603
+ const fileStat = fs.statSync(asset.path);
604
+
605
+ // Look up diff data for this asset
606
+ const diffData = diffManifests
607
+ ? getDiffDataForAsset(
608
+ diffManifests,
609
+ group.scenarioKey,
610
+ asset.captureKey,
611
+ )
612
+ : null;
613
+
614
+ allFiles.push({
615
+ group,
616
+ asset,
617
+ scenarioConfig,
618
+ key: asset.captureKey,
619
+ visualKey,
620
+ path: asset.path,
621
+ size: fileStat.size,
622
+ contentType: getMimeType(asset.path),
623
+ hash: null, // Will be calculated
624
+ diffData, // Attach diff data from manifest
625
+ thumbnailPath: null, // Will be set for video files
626
+ });
627
+ }
628
+ }
629
+
630
+ // Generate thumbnails for video files (first frame as PNG)
631
+ const videoExts = new Set([".mp4", ".webm", ".mov"]);
632
+ const videoFiles = allFiles.filter((f) =>
633
+ videoExts.has(path.extname(f.path).toLowerCase()),
634
+ );
635
+ if (videoFiles.length > 0) {
636
+ let ffmpegAvailable = false;
637
+ try {
638
+ execSync("ffmpeg -version", { stdio: "ignore" });
639
+ ffmpegAvailable = true;
640
+ } catch {
641
+ // ffmpeg not installed
642
+ }
643
+
644
+ if (ffmpegAvailable) {
645
+ console.log(
646
+ chalk.gray(
647
+ ` Generating thumbnails for ${videoFiles.length} video(s)...`,
648
+ ),
649
+ );
650
+ for (const file of videoFiles) {
651
+ try {
652
+ const thumbPath = file.path.replace(
653
+ path.extname(file.path),
654
+ "-thumb.png",
655
+ );
656
+ execSync(
657
+ `ffmpeg -y -i "${file.path}" -vframes 1 -vf "scale=640:-2" "${thumbPath}"`,
658
+ { stdio: "ignore", timeout: 15000 },
659
+ );
660
+ if (fs.existsSync(thumbPath)) {
661
+ file.thumbnailPath = thumbPath;
662
+ const thumbStat = fs.statSync(thumbPath);
663
+ // Add thumbnail as a separate file to upload
664
+ allFiles.push({
665
+ group: file.group,
666
+ asset: {
667
+ captureKey: `${file.asset.captureKey}-thumb`,
668
+ path: thumbPath,
669
+ filename: path.basename(thumbPath),
670
+ },
671
+ scenarioConfig: file.scenarioConfig,
672
+ key: `${file.asset.captureKey}-thumb`,
673
+ visualKey: `${file.visualKey}-thumb`,
674
+ path: thumbPath,
675
+ size: thumbStat.size,
676
+ contentType: "image/png",
677
+ hash: null,
678
+ diffData: null,
679
+ thumbnailPath: null,
680
+ _isThumbnail: true,
681
+ _parentVisualKey: file.visualKey,
682
+ });
683
+ console.log(
684
+ chalk.green(` ✔ Thumbnail: ${path.basename(thumbPath)}`),
685
+ );
686
+ }
687
+ } catch (err) {
688
+ console.log(
689
+ chalk.yellow(
690
+ ` ⚠ Thumbnail generation failed for ${path.basename(file.path)}: ${err.message}`,
691
+ ),
692
+ );
693
+ }
694
+ }
695
+ }
696
+ }
697
+
698
+ // Step 1: Calculate hashes in parallel (with progress)
699
+ console.log(
700
+ chalk.gray(` Calculating hashes for ${allFiles.length} files...`),
701
+ );
702
+ const hashResults = await Promise.all(
703
+ allFiles.map(async (file) => {
704
+ const hash = await hashFile(file.path);
705
+ return { file, hash };
706
+ }),
707
+ );
708
+ for (const { file, hash } of hashResults) {
709
+ file.hash = hash;
710
+ }
711
+
712
+ // Step 2: Get presigned URLs
713
+ console.log(chalk.gray(" Requesting presigned URLs..."));
714
+ const signPayload = {
715
+ files: allFiles.map((f) => ({
716
+ key: f.key,
717
+ contentType: f.contentType,
718
+ size: f.size,
719
+ hash: f.hash,
720
+ visualKey: f.visualKey,
721
+ })),
722
+ };
723
+
724
+ const signResponse = await apiClient.signAssets(apiKey, signPayload);
725
+ const { urls } = signResponse;
726
+
727
+ // Step 3: Upload files directly to R2 (parallel with concurrency limit)
728
+ console.log(chalk.gray(" Uploading files directly to R2..."));
729
+ const CONCURRENCY = 5;
730
+ const uploadQueue = [...allFiles];
731
+ const uploadResults = [];
732
+
733
+ while (uploadQueue.length > 0) {
734
+ const batch = uploadQueue.splice(0, CONCURRENCY);
735
+ const batchPromises = batch.map(async (file) => {
736
+ // Look up by visualKey:hash (unique per file), fall back to visualKey, then key for backwards compatibility
737
+ const compositeKey = `${file.visualKey}:${file.hash}`;
738
+ const urlInfo =
739
+ urls[compositeKey] || urls[file.visualKey] || urls[file.key];
740
+ if (!urlInfo) {
741
+ throw new Error(
742
+ `No presigned URL for ${file.visualKey} (key: ${file.key}, hash: ${file.hash})`,
743
+ );
744
+ }
745
+
746
+ try {
747
+ const fileBuffer = fs.readFileSync(file.path);
748
+ await apiClient.uploadToPresignedUrl(urlInfo.uploadUrl, fileBuffer, {
749
+ contentType: file.contentType,
750
+ });
751
+
752
+ console.log(chalk.green(` ✔ Uploaded ${file.visualKey}`));
753
+ return { success: true, file, s3Path: urlInfo.path };
754
+ } catch (err) {
755
+ console.log(
756
+ chalk.red(` ✖ Failed ${file.visualKey}: ${err.message}`),
757
+ );
758
+ return { success: false, file, error: err.message };
759
+ }
760
+ });
761
+
762
+ const results = await Promise.all(batchPromises);
763
+ uploadResults.push(...results);
764
+ }
765
+
766
+ // Step 4: Commit metadata (grouped by scenario/variant)
767
+ console.log(chalk.gray("\n Committing metadata to platform..."));
768
+
769
+ // Build a map of thumbnail s3Paths keyed by parent visualKey
770
+ const thumbnailS3Paths = new Map();
771
+ for (const result of uploadResults) {
772
+ if (result.success && result.file._isThumbnail) {
773
+ thumbnailS3Paths.set(result.file._parentVisualKey, result.s3Path);
774
+ }
775
+ }
776
+
777
+ // Group successful uploads by scenario/variant (skip thumbnails — they're metadata)
778
+ const groupMap = new Map();
779
+ for (const result of uploadResults) {
780
+ if (!result.success) {
781
+ if (!result.file._isThumbnail) failCount++;
782
+ continue;
783
+ }
784
+ // Thumbnails are attached to their parent asset, not committed separately
785
+ if (result.file._isThumbnail) continue;
786
+
787
+ const groupKey = `${result.file.group.scenarioKey}::${result.file.group.variationSlug}`;
788
+ if (!groupMap.has(groupKey)) {
789
+ groupMap.set(groupKey, {
790
+ group: result.file.group,
791
+ scenarioConfig: result.file.scenarioConfig,
792
+ assets: [],
793
+ });
794
+ }
795
+ groupMap.get(groupKey).assets.push({
796
+ key: result.file.key,
797
+ s3Path: result.s3Path,
798
+ hash: result.file.hash,
799
+ visualKey: result.file.visualKey,
800
+ size: result.file.size,
801
+ contentType: result.file.contentType,
802
+ // Include diff data from CLI analysis
803
+ diffPercentage: result.file.diffData?.diffPercentage ?? null,
804
+ diffStatus: result.file.diffData?.diffStatus ?? null,
805
+ // Attach thumbnail for video assets
806
+ thumbnailS3Path: thumbnailS3Paths.get(result.file.visualKey) ?? null,
807
+ });
808
+ }
809
+
810
+ // Commit each group
811
+ for (const { group, scenarioConfig, assets } of groupMap.values()) {
812
+ const contextObj = buildContextForVariation(
813
+ scenarioConfig,
814
+ group.variationSlug,
815
+ );
816
+ const metadata = buildPublishMetadata({
817
+ projectId,
818
+ publishSessionId: gitInfo.publishSessionId,
819
+ scenarioKey: group.scenarioKey,
820
+ scenarioConfig,
821
+ variationSlug: group.variationSlug,
822
+ contextData: contextObj,
823
+ gitInfo,
824
+ });
825
+
826
+ if (metadata.cli) {
827
+ metadata.cli.features = ["steps", "transactional"];
828
+ }
829
+
830
+ try {
831
+ const result = await apiClient.publishTransactional(apiKey, {
832
+ metadata,
833
+ assets,
834
+ });
835
+
836
+ const processedCount = result?.assetsProcessed ?? assets.length;
837
+ console.log(
838
+ chalk.green(
839
+ ` ✔ Committed "${group.scenarioKey}" (${group.variationSlug}): ${processedCount} asset(s)`,
840
+ ),
841
+ );
842
+ successCount += processedCount;
843
+
844
+ // Handle skipped assets (visual limit)
845
+ if (result?.skippedAssets?.length > 0) {
846
+ for (const key of result.skippedAssets) {
847
+ console.log(chalk.yellow(` ⚠ Skipped "${key}" (plan limit reached)`));
848
+ }
849
+ skippedCount += result.skippedAssets.length;
850
+ }
851
+
852
+ // Capture viewUrl from first successful response
853
+ if (!viewUrl && result?.viewUrl) {
854
+ viewUrl = result.viewUrl;
855
+ }
856
+ } catch (error) {
857
+ console.log(chalk.red(` ✖ Commit failed: ${error.message}`));
858
+ failCount += assets.length;
859
+ }
860
+ }
861
+
862
+ return { successCount, failCount, skippedCount, viewUrl };
863
+ }
864
+
865
+ /**
866
+ * Publish using legacy flow (multipart form upload through serverless)
867
+ */
868
+ async function publishWithLegacyFlow(
869
+ apiKey,
870
+ projectId,
871
+ groupedAssets,
872
+ docSyncConfig,
873
+ gitInfo,
874
+ ) {
875
+ let successCount = 0;
876
+ let failCount = 0;
877
+
878
+ for (const group of groupedAssets) {
879
+ const scenarioConfig = docSyncConfig
880
+ ? docSyncConfig.scenarios.find((s) => s.key === group.scenarioKey)
881
+ : null;
882
+
883
+ const contextObj = buildContextForVariation(
884
+ scenarioConfig,
885
+ group.variationSlug,
886
+ );
887
+ const metadata = buildPublishMetadata({
888
+ projectId,
889
+ publishSessionId: gitInfo.publishSessionId,
890
+ scenarioKey: group.scenarioKey,
891
+ scenarioConfig,
892
+ variationSlug: group.variationSlug,
893
+ contextData: contextObj,
894
+ gitInfo,
895
+ });
896
+
897
+ if (metadata.cli) {
898
+ metadata.cli.features = ["steps"];
899
+ }
900
+
901
+ const assetsPayload = {};
902
+ group.assets.forEach((asset) => {
903
+ assetsPayload[asset.captureKey] = asset.path;
904
+ });
905
+
906
+ console.log(
907
+ chalk.cyan(
908
+ ` 📦 Uploading scenario "${group.scenarioKey}" (${group.variationSlug}) with ${group.assets.length} asset(s)`,
909
+ ),
910
+ );
911
+
912
+ try {
913
+ const result = await apiClient.publishAssetsV1(
914
+ apiKey,
915
+ metadata,
916
+ assetsPayload,
917
+ );
918
+ const processedCount = result?.assetsProcessed ?? group.assets.length;
919
+ console.log(
920
+ chalk.green(
921
+ ` ✔ Uploaded ${processedCount} asset(s); review items: ${
922
+ result?.reviewQueueItems ?? "n/a"
923
+ }`,
924
+ ),
925
+ );
926
+ successCount += processedCount;
927
+ } catch (error) {
928
+ console.log(chalk.red(` ✖ Failed: ${error.message}`));
929
+ failCount += group.assets.length;
930
+ }
931
+ }
932
+
933
+ return { successCount, failCount };
934
+ }
935
+
936
+ /**
937
+ * Publish using BYOS (Bring Your Own Storage)
938
+ * Uploads directly to S3/R2/local without platform involvement
939
+ */
940
+ async function publishWithBYOS(
941
+ storageConfig,
942
+ groupedAssets,
943
+ docSyncConfig,
944
+ gitInfo,
945
+ ) {
946
+ const storageProvider = createStorageProvider(storageConfig);
947
+
948
+ if (!storageProvider) {
949
+ throw new Error("Failed to create storage provider");
950
+ }
951
+
952
+ console.log(
953
+ chalk.cyan(` 🚀 Using BYOS (${storageConfig.type}) storage...\n`),
954
+ );
955
+
956
+ let successCount = 0;
957
+ let failCount = 0;
958
+ const uploadResults = [];
959
+
960
+ // Build variation context from config for grouping visuals
961
+ const variantDimensions = docSyncConfig?.variants?.dimensions || {};
962
+
963
+ for (const group of groupedAssets) {
964
+ const { scenarioKey, variationSlug, assets } = group;
965
+
966
+ // Parse variation dimensions for manifest metadata
967
+ const parsedVariation = parseVariationForManifest(
968
+ variationSlug,
969
+ variantDimensions,
970
+ );
971
+
972
+ for (const asset of assets) {
973
+ const contentType = getMimeType(asset.path);
974
+ // Key format: scenarioKey/variationSlug/captureKey.ext
975
+ const ext = path.extname(asset.path);
976
+ const assetKey = `${scenarioKey}/${variationSlug}/${asset.captureKey}${ext}`;
977
+
978
+ console.log(chalk.gray(` Uploading: ${assetKey}`));
979
+
980
+ try {
981
+ const result = await storageProvider.upload(
982
+ asset.path,
983
+ assetKey,
984
+ contentType,
985
+ );
986
+
987
+ uploadResults.push({
988
+ key: assetKey,
989
+ scenarioKey,
990
+ variationSlug,
991
+ captureKey: asset.captureKey,
992
+ path: result.path,
993
+ publicUrl: result.publicUrl,
994
+ hash: result.hash,
995
+ contentType,
996
+ variation: parsedVariation,
997
+ });
998
+
999
+ console.log(chalk.green(` ✔ ${assetKey}`));
1000
+ successCount++;
1001
+ } catch (error) {
1002
+ console.log(chalk.red(` ✖ ${assetKey}: ${error.message}`));
1003
+ failCount++;
1004
+ }
1005
+ }
1006
+ }
1007
+
1008
+ // Generate manifest for BYOS uploads
1009
+ let manifestPath = null;
1010
+ if (uploadResults.length > 0) {
1011
+ try {
1012
+ const manifestResult =
1013
+ await storageProvider.generateManifest(uploadResults);
1014
+ manifestPath = manifestResult.manifestPath;
1015
+
1016
+ // Also save a visual-grouped manifest for easier consumption
1017
+ const groupedManifest = buildGroupedManifest(
1018
+ uploadResults,
1019
+ gitInfo,
1020
+ storageConfig,
1021
+ );
1022
+ const groupedManifestPath = manifestResult.manifestPath.replace(
1023
+ "manifest-latest.json",
1024
+ "visual-groups.json",
1025
+ );
1026
+ await fs.writeJSON(groupedManifestPath, groupedManifest, { spaces: 2 });
1027
+ } catch (error) {
1028
+ console.warn(
1029
+ chalk.yellow(` ⚠ Failed to generate manifest: ${error.message}`),
1030
+ );
1031
+ }
1032
+ }
1033
+
1034
+ return { successCount, failCount, manifestPath };
1035
+ }
1036
+
1037
+ /**
1038
+ * Parse variation slug into structured dimensions for manifest
1039
+ */
1040
+ function parseVariationForManifest(slug, dimensionConfig) {
1041
+ if (slug === "default" || !slug) {
1042
+ return { isDefault: true, dimensions: {} };
1043
+ }
1044
+
1045
+ const parts = slug.split("_");
1046
+ const dimensions = {};
1047
+
1048
+ // Try to match parts to known dimension values
1049
+ if (dimensionConfig && Object.keys(dimensionConfig).length > 0) {
1050
+ const remainingParts = [...parts];
1051
+
1052
+ for (const [dimName, dimConfig] of Object.entries(dimensionConfig)) {
1053
+ const options = Object.keys(dimConfig.options || {});
1054
+ const matchedPartIndex = remainingParts.findIndex((part) =>
1055
+ options.includes(part),
1056
+ );
1057
+
1058
+ if (matchedPartIndex !== -1) {
1059
+ dimensions[dimName] = remainingParts[matchedPartIndex];
1060
+ remainingParts.splice(matchedPartIndex, 1);
1061
+ }
1062
+ }
1063
+
1064
+ // Any remaining parts become indexed dimensions
1065
+ remainingParts.forEach((part, idx) => {
1066
+ dimensions[`custom_${idx}`] = part;
1067
+ });
1068
+ } else {
1069
+ // Without config, use positional naming
1070
+ const dimNames = ["locale", "role", "theme", "variant"];
1071
+ parts.forEach((part, idx) => {
1072
+ dimensions[dimNames[idx] || `dim_${idx}`] = part;
1073
+ });
1074
+ }
1075
+
1076
+ return { isDefault: false, dimensions };
1077
+ }
1078
+
1079
+ /**
1080
+ * Build a grouped manifest that organizes assets by visual key with variations
1081
+ */
1082
+ function buildGroupedManifest(uploadResults, gitInfo, storageConfig) {
1083
+ const groups = new Map();
1084
+
1085
+ for (const result of uploadResults) {
1086
+ const { scenarioKey, variationSlug, captureKey } = result;
1087
+
1088
+ // Group by scenarioKey (visual group)
1089
+ if (!groups.has(scenarioKey)) {
1090
+ groups.set(scenarioKey, {
1091
+ visualKey: scenarioKey,
1092
+ variations: {},
1093
+ });
1094
+ }
1095
+
1096
+ const group = groups.get(scenarioKey);
1097
+
1098
+ // Group captures by variation
1099
+ if (!group.variations[variationSlug]) {
1100
+ group.variations[variationSlug] = {
1101
+ slug: variationSlug,
1102
+ ...result.variation,
1103
+ captures: {},
1104
+ };
1105
+ }
1106
+
1107
+ group.variations[variationSlug].captures[captureKey] = {
1108
+ url: result.publicUrl,
1109
+ hash: result.hash,
1110
+ storagePath: result.path,
1111
+ };
1112
+ }
1113
+
1114
+ return {
1115
+ generated: new Date().toISOString(),
1116
+ provider: storageConfig.type,
1117
+ git: {
1118
+ commitHash: gitInfo.commitHash || null,
1119
+ commitMessage: gitInfo.commitMessage || null,
1120
+ },
1121
+ visualGroups: Object.fromEntries(groups),
1122
+ // Flat asset map for quick lookups
1123
+ assets: Object.fromEntries(uploadResults.map((r) => [r.key, r.publicUrl])),
1124
+ };
1125
+ }
1126
+
1127
+ /**
1128
+ * Get git commit information
1129
+ */
1130
+ function getGitInfo() {
1131
+ try {
1132
+ const commitHash = execSync("git rev-parse HEAD", {
1133
+ encoding: "utf-8",
1134
+ }).trim();
1135
+ const commitMessage = execSync("git log -1 --pretty=%B", {
1136
+ encoding: "utf-8",
1137
+ }).trim();
1138
+ return { commitHash, commitMessage };
1139
+ } catch (error) {
1140
+ console.warn(chalk.yellow(" ⚠ Could not read git information"));
1141
+ return { commitHash: "", commitMessage: "" };
1142
+ }
1143
+ }
1144
+
1145
+ /**
1146
+ * Get recent commit messages since last publish
1147
+ */
1148
+ function getRecentCommits(lastCommitHash) {
1149
+ try {
1150
+ if (!lastCommitHash) {
1151
+ // Get last 5 commits if no previous hash
1152
+ const commits = execSync("git log -5 --pretty=%B", { encoding: "utf-8" })
1153
+ .split("\n")
1154
+ .filter((line) => line.trim());
1155
+ return commits;
1156
+ }
1157
+
1158
+ const commits = execSync(`git log ${lastCommitHash}..HEAD --pretty=%B`, {
1159
+ encoding: "utf-8",
1160
+ })
1161
+ .split("\n")
1162
+ .filter((line) => line.trim());
1163
+ return commits;
1164
+ } catch (error) {
1165
+ console.warn(chalk.yellow(" ⚠ Could not read git commit history"));
1166
+ return [];
1167
+ }
1168
+ }
1169
+
1170
+ /**
1171
+ * Recursively find all markdown files matching include/exclude patterns
1172
+ */
1173
+ function findDocFiles(
1174
+ docsRoot,
1175
+ includePatterns = ["**/*.md", "**/*.mdx"],
1176
+ excludePatterns = [],
1177
+ ) {
1178
+ const files = [];
1179
+ const rootPath = path.resolve(process.cwd(), docsRoot);
1180
+
1181
+ if (!fs.existsSync(rootPath)) {
1182
+ return files;
1183
+ }
1184
+
1185
+ function walkDir(dir, relativePath = "") {
1186
+ const items = fs.readdirSync(dir);
1187
+
1188
+ for (const item of items) {
1189
+ const fullPath = path.join(dir, item);
1190
+ const relativeItemPath = path
1191
+ .join(relativePath, item)
1192
+ .replace(/\\/g, "/");
1193
+ const stat = fs.statSync(fullPath);
1194
+
1195
+ // Check exclude patterns
1196
+ const shouldExclude = excludePatterns.some((pattern) => {
1197
+ const regex = new RegExp(
1198
+ pattern.replace(/\*\*/g, ".*").replace(/\*/g, "[^/]*"),
1199
+ );
1200
+ return regex.test(relativeItemPath) || regex.test(fullPath);
1201
+ });
1202
+
1203
+ if (shouldExclude) {
1204
+ continue;
1205
+ }
1206
+
1207
+ if (stat.isDirectory()) {
1208
+ walkDir(fullPath, relativeItemPath);
1209
+ } else if (stat.isFile()) {
1210
+ const ext = path.extname(item).toLowerCase();
1211
+ const matchesInclude = includePatterns.some((pattern) => {
1212
+ const regex = new RegExp(
1213
+ pattern.replace(/\*\*/g, ".*").replace(/\*/g, "[^/]*"),
1214
+ );
1215
+ return (
1216
+ regex.test(relativeItemPath) || ext === ".md" || ext === ".mdx"
1217
+ );
1218
+ });
1219
+
1220
+ if (matchesInclude && (ext === ".md" || ext === ".mdx")) {
1221
+ files.push({
1222
+ fullPath,
1223
+ relativePath: relativeItemPath,
1224
+ });
1225
+ }
1226
+ }
1227
+ }
1228
+ }
1229
+
1230
+ walkDir(rootPath);
1231
+ return files;
1232
+ }
1233
+
1234
+ /**
1235
+ * Parse frontmatter from markdown content
1236
+ */
1237
+ function parseFrontmatter(content) {
1238
+ const frontmatterRegex = /^---\s*\n([\s\S]*?)\n---\s*\n([\s\S]*)$/;
1239
+ const match = content.match(frontmatterRegex);
1240
+
1241
+ if (match) {
1242
+ try {
1243
+ // Simple YAML parsing (basic key-value pairs)
1244
+ const frontmatter = {};
1245
+ const lines = match[1].split("\n");
1246
+ for (const line of lines) {
1247
+ const colonIndex = line.indexOf(":");
1248
+ if (colonIndex > 0) {
1249
+ const key = line.substring(0, colonIndex).trim();
1250
+ const value = line
1251
+ .substring(colonIndex + 1)
1252
+ .trim()
1253
+ .replace(/^["']|["']$/g, "");
1254
+ frontmatter[key] = value;
1255
+ }
1256
+ }
1257
+ return {
1258
+ frontmatter,
1259
+ content: match[2],
1260
+ };
1261
+ } catch (error) {
1262
+ // If parsing fails, return content as-is
1263
+ return { frontmatter: {}, content };
1264
+ }
1265
+ }
1266
+
1267
+ return { frontmatter: {}, content };
1268
+ }
1269
+
1270
+ async function publishCommand(options = {}) {
1271
+ const { tag, message, dryRun, force, video, outputJson } = options;
1272
+
1273
+ // Result tracking for --output-json and programmatic callers
1274
+ const publishResult = {
1275
+ assetsProcessed: 0,
1276
+ assetsFailed: 0,
1277
+ assetsSkipped: 0,
1278
+ reviewQueueItems: 0,
1279
+ viewUrl: null,
1280
+ tag: tag || null,
1281
+ dryRun: !!dryRun,
1282
+ timestamp: new Date().toISOString(),
1283
+ };
1284
+
1285
+ console.log(chalk.cyan("📤 Publishing assets...\n"));
1286
+
1287
+ // If tagging, show the tag
1288
+ if (tag) {
1289
+ console.log(chalk.cyan(`🏷️ Tagging release: ${tag}\n`));
1290
+ }
1291
+
1292
+ // Dry run mode
1293
+ if (dryRun) {
1294
+ console.log(chalk.yellow("🔍 DRY RUN MODE - No assets will be uploaded\n"));
1295
+ }
1296
+
1297
+ // Read config + settings (if available)
1298
+ const settings = readSettingsSafe();
1299
+ let docSyncConfig = null;
1300
+ try {
1301
+ docSyncConfig = config.readConfig();
1302
+ } catch (error) {
1303
+ console.warn(
1304
+ chalk.yellow(" ⚠ Could not read config file, using minimal context"),
1305
+ );
1306
+ }
1307
+
1308
+ // Determine storage mode and validate configuration
1309
+ const storageConfig = docSyncConfig?.storage;
1310
+ const storageMode = getStorageMode(docSyncConfig);
1311
+
1312
+ // Validate storage configuration
1313
+ const validation = validateStorageConfig(storageConfig);
1314
+
1315
+ // Print warnings
1316
+ for (const warning of validation.warnings) {
1317
+ console.log(chalk.yellow(` ⚠ ${warning}`));
1318
+ }
1319
+
1320
+ // Print errors and exit if invalid
1321
+ if (!validation.valid) {
1322
+ console.log(chalk.red("\n❌ Storage configuration errors:"));
1323
+ for (const error of validation.errors) {
1324
+ console.log(chalk.red(` • ${error}`));
1325
+ }
1326
+ console.log(getStorageSetupHelp(storageConfig?.type || "reshot"));
1327
+ process.exit(1);
1328
+ }
1329
+
1330
+ // Resolve project context based on storage mode
1331
+ let projectContext;
1332
+ try {
1333
+ projectContext = resolveProjectContext({
1334
+ settings,
1335
+ docSyncConfig,
1336
+ storageMode,
1337
+ });
1338
+ } catch (error) {
1339
+ console.log(chalk.red(`\n❌ ${error.message}`));
1340
+ if (storageMode !== "byos") {
1341
+ console.log(
1342
+ chalk.gray("\nTip: Configure BYOS to publish without authentication:"),
1343
+ );
1344
+ console.log(getStorageSetupHelp("s3"));
1345
+ }
1346
+ process.exit(1);
1347
+ }
1348
+
1349
+ const { apiKey, projectId, storageMode: resolvedMode } = projectContext;
1350
+ const projectName =
1351
+ docSyncConfig?._metadata?.projectName ||
1352
+ settings?.projectName ||
1353
+ "Local Project";
1354
+
1355
+ if (resolvedMode === "byos") {
1356
+ console.log(
1357
+ chalk.cyan(`📦 BYOS Mode: Publishing to ${storageConfig.type} storage`),
1358
+ );
1359
+ console.log(chalk.gray(` Bucket: ${storageConfig.bucket || "N/A"}`));
1360
+ console.log(
1361
+ chalk.gray(` Path Prefix: ${storageConfig.pathPrefix || "/"}\n`),
1362
+ );
1363
+ } else {
1364
+ console.log(chalk.gray(`Project: ${projectName} (${projectId})`));
1365
+ console.log(chalk.gray(`API Key: ****${apiKey.slice(-4)}`));
1366
+ }
1367
+
1368
+ // Get feature toggles
1369
+ const features = docSyncConfig?._metadata?.features || {
1370
+ visuals: true,
1371
+ docs: false,
1372
+ changelog: true,
1373
+ };
1374
+
1375
+ // Get git information
1376
+ const { commitHash, commitMessage } = getGitInfo();
1377
+
1378
+ // Generate unique session ID for this publish run
1379
+ const publishSessionId = crypto.randomUUID();
1380
+
1381
+ // Handle version tagging
1382
+ if (tag && !dryRun) {
1383
+ try {
1384
+ const tagData = config.addVersionTag(tag, {
1385
+ commitHash,
1386
+ commitMessage: message || commitMessage,
1387
+ publishSessionId,
1388
+ });
1389
+ console.log(chalk.green(` ✔ Version tag "${tag}" created`));
1390
+ console.log(
1391
+ chalk.gray(` Pinned URL: cdn.reshot.dev/assets/{key}?tag=${tag}\n`),
1392
+ );
1393
+ } catch (tagError) {
1394
+ console.log(
1395
+ chalk.yellow(` ⚠ Failed to save tag locally: ${tagError.message}`),
1396
+ );
1397
+ }
1398
+ }
1399
+
1400
+ // Stream A: Visual Assets
1401
+ if (features.visuals === true) {
1402
+ const projectRoot = process.cwd();
1403
+ const outputBaseDir = path.join(projectRoot, ".reshot", "output");
1404
+ const screenshotFiles = fs.existsSync(outputBaseDir)
1405
+ ? findAssetFiles(outputBaseDir)
1406
+ : [];
1407
+ const screenshotGroups =
1408
+ screenshotFiles.length > 0
1409
+ ? groupAssetsByScenario(screenshotFiles, outputBaseDir)
1410
+ : [];
1411
+
1412
+ const { files: exportVideoFiles, warning: exportVideoWarning } =
1413
+ findExportVideoFiles(projectRoot, {
1414
+ explicitVideoPath: video,
1415
+ scenarioHints: screenshotGroups.map((g) => g.scenarioKey),
1416
+ });
1417
+ if (exportVideoWarning) {
1418
+ console.log(chalk.yellow(` ⚠ ${exportVideoWarning}`));
1419
+ }
1420
+ const videoGroups = buildVideoAssetGroups(exportVideoFiles);
1421
+
1422
+ const groupedAssets = [...screenshotGroups, ...videoGroups];
1423
+
1424
+ if (!fs.existsSync(outputBaseDir) && videoGroups.length === 0) {
1425
+ console.log(
1426
+ chalk.yellow("No output directory found. Run `reshot run` first."),
1427
+ );
1428
+ } else if (groupedAssets.length === 0) {
1429
+ console.log(chalk.yellow("No asset files found to publish."));
1430
+ } else {
1431
+ console.log(
1432
+ chalk.cyan(
1433
+ `\nFound ${screenshotFiles.length + exportVideoFiles.length} asset(s) to publish` +
1434
+ ` (${screenshotFiles.length} screenshots, ${exportVideoFiles.length} videos)\n`,
1435
+ ),
1436
+ );
1437
+
1438
+ let successCount = 0;
1439
+ let failCount = 0;
1440
+ let skippedCount = 0;
1441
+ let viewUrl = null;
1442
+
1443
+ // Load diff manifests for attaching diff data to screenshot assets
1444
+ const diffManifests = fs.existsSync(outputBaseDir)
1445
+ ? loadDiffManifests(outputBaseDir)
1446
+ : new Map();
1447
+ if (diffManifests.size > 0) {
1448
+ console.log(
1449
+ chalk.gray(
1450
+ ` Loaded diff data from ${diffManifests.size} scenario(s)\n`,
1451
+ ),
1452
+ );
1453
+ }
1454
+
1455
+ // Use BYOS or Platform flow based on mode
1456
+ if (resolvedMode === "byos") {
1457
+ const result = await publishWithBYOS(
1458
+ storageConfig,
1459
+ groupedAssets,
1460
+ docSyncConfig,
1461
+ { commitHash, commitMessage },
1462
+ );
1463
+ successCount = result.successCount;
1464
+ failCount = result.failCount;
1465
+
1466
+ if (result.manifestPath) {
1467
+ console.log(
1468
+ chalk.cyan(`\n📄 Manifest generated: ${result.manifestPath}`),
1469
+ );
1470
+ }
1471
+ } else {
1472
+ // Try transactional flow first (direct R2 upload)
1473
+ if (USE_TRANSACTIONAL_FLOW) {
1474
+ try {
1475
+ const result = await publishWithTransactionalFlow(
1476
+ apiKey,
1477
+ projectId,
1478
+ groupedAssets,
1479
+ docSyncConfig,
1480
+ { commitHash, commitMessage, publishSessionId },
1481
+ diffManifests,
1482
+ );
1483
+ successCount = result.successCount;
1484
+ failCount = result.failCount;
1485
+ skippedCount = result.skippedCount || 0;
1486
+ viewUrl = result.viewUrl || null;
1487
+ } catch (txError) {
1488
+ // Fall back to legacy flow if transactional fails
1489
+ console.log(
1490
+ chalk.yellow(
1491
+ `\n ⚠ Transactional flow unavailable (${txError.message}), using legacy upload...\n`,
1492
+ ),
1493
+ );
1494
+ const result = await publishWithLegacyFlow(
1495
+ apiKey,
1496
+ projectId,
1497
+ groupedAssets,
1498
+ docSyncConfig,
1499
+ { commitHash, commitMessage, publishSessionId },
1500
+ );
1501
+ successCount = result.successCount;
1502
+ failCount = result.failCount;
1503
+ }
1504
+ } else {
1505
+ // Use legacy flow directly
1506
+ const result = await publishWithLegacyFlow(
1507
+ apiKey,
1508
+ projectId,
1509
+ groupedAssets,
1510
+ docSyncConfig,
1511
+ { commitHash, commitMessage, publishSessionId },
1512
+ );
1513
+ successCount = result.successCount;
1514
+ failCount = result.failCount;
1515
+ }
1516
+ }
1517
+
1518
+ // Update result tracking
1519
+ publishResult.assetsProcessed = successCount;
1520
+ publishResult.assetsFailed = failCount;
1521
+ publishResult.assetsSkipped = skippedCount;
1522
+ publishResult.viewUrl = viewUrl;
1523
+
1524
+ console.log(chalk.cyan("\n📊 Visual Assets Summary:"));
1525
+ console.log(chalk.green(` ✔ Successfully published: ${successCount}`));
1526
+ if (skippedCount > 0) {
1527
+ console.log(chalk.yellow(` ⚠ Skipped (plan limit): ${skippedCount}`));
1528
+ const platformUrl = apiClient.getApiBaseUrl().replace(/\/api\/?$/, "");
1529
+ console.log(
1530
+ chalk.yellow(` Upgrade: ${platformUrl}/app/settings/billing`),
1531
+ );
1532
+ }
1533
+ if (failCount > 0) {
1534
+ console.log(chalk.red(` ✖ Failed: ${failCount}`));
1535
+ }
1536
+ if (viewUrl) {
1537
+ console.log(chalk.cyan(`\n 🔗 View in platform: ${viewUrl}`));
1538
+ }
1539
+
1540
+ // Helpful guidance about diff percentages
1541
+ if (diffManifests && diffManifests.size > 0) {
1542
+ console.log(
1543
+ chalk.gray(
1544
+ "\n 💡 Diff data included! View change percentages in the platform.",
1545
+ ),
1546
+ );
1547
+ } else {
1548
+ console.log(
1549
+ chalk.gray(
1550
+ "\n 💡 Tip: Run 'reshot run' before publish to see change percentages.",
1551
+ ),
1552
+ );
1553
+ }
1554
+ }
1555
+ } else {
1556
+ console.log(
1557
+ chalk.yellow(" ⚠ Visual asset publishing is disabled for this project."),
1558
+ );
1559
+ }
1560
+
1561
+ // Stream B: Documentation
1562
+ if (features.docs === true) {
1563
+ console.log(chalk.cyan("\n📚 Publishing documentation...\n"));
1564
+
1565
+ if (!docSyncConfig || !docSyncConfig.docs) {
1566
+ console.log(
1567
+ chalk.yellow(
1568
+ " ⚠ No docs configuration found in docsync.config.json. Skipping docs stream.",
1569
+ ),
1570
+ );
1571
+ } else {
1572
+ const docsConfig = docSyncConfig.docs;
1573
+ const docsRoot = docsConfig.root || "./docs";
1574
+ const includePatterns = docsConfig.include || ["**/*.md", "**/*.mdx"];
1575
+ const excludePatterns = docsConfig.exclude || [
1576
+ "**/node_modules/**",
1577
+ "**/.git/**",
1578
+ ];
1579
+
1580
+ const docFiles = findDocFiles(docsRoot, includePatterns, excludePatterns);
1581
+
1582
+ if (docFiles.length === 0) {
1583
+ console.log(
1584
+ chalk.yellow(" ⚠ No documentation files found to publish."),
1585
+ );
1586
+ } else {
1587
+ console.log(
1588
+ chalk.cyan(` Found ${docFiles.length} documentation file(s)\n`),
1589
+ );
1590
+
1591
+ const docsPayload = [];
1592
+ for (const docFile of docFiles) {
1593
+ const content = fs.readFileSync(docFile.fullPath, "utf-8");
1594
+ const { frontmatter, content: docContent } =
1595
+ parseFrontmatter(content);
1596
+
1597
+ docsPayload.push({
1598
+ path: docFile.relativePath,
1599
+ content: docContent,
1600
+ frontmatter:
1601
+ Object.keys(frontmatter).length > 0 ? frontmatter : undefined,
1602
+ status: frontmatter.status || "draft",
1603
+ });
1604
+ }
1605
+
1606
+ // Docs publishing requires platform (no BYOS support yet)
1607
+ if (resolvedMode === "byos") {
1608
+ console.log(
1609
+ chalk.yellow(
1610
+ " ⚠ Documentation publishing requires Reshot platform.",
1611
+ ),
1612
+ );
1613
+ console.log(
1614
+ chalk.gray(
1615
+ " Run 'reshot auth' to enable doc hosting with review workflow.",
1616
+ ),
1617
+ );
1618
+ } else {
1619
+ try {
1620
+ const result = await apiClient.publishDocs(apiKey, {
1621
+ projectId,
1622
+ docs: docsPayload,
1623
+ });
1624
+ console.log(
1625
+ chalk.green(
1626
+ ` ✔ Published ${result.created || 0} new doc(s), updated ${
1627
+ result.updated || 0
1628
+ } doc(s)`,
1629
+ ),
1630
+ );
1631
+ } catch (error) {
1632
+ console.log(
1633
+ chalk.red(` ✖ Failed to publish docs: ${error.message}`),
1634
+ );
1635
+ }
1636
+ }
1637
+ }
1638
+ }
1639
+ } else {
1640
+ console.log(
1641
+ chalk.yellow(
1642
+ " ⚠ Documentation publishing is disabled for this project.",
1643
+ ),
1644
+ );
1645
+ }
1646
+
1647
+ // Stream C: Changelog
1648
+ if (features.changelog === true) {
1649
+ // Changelog requires platform (no BYOS support)
1650
+ if (resolvedMode === "byos") {
1651
+ console.log(chalk.cyan("\n📝 Changelog drafts...\n"));
1652
+ console.log(
1653
+ chalk.yellow(
1654
+ " ⚠ Changelog requires Reshot platform for tracking and publishing.",
1655
+ ),
1656
+ );
1657
+ console.log(
1658
+ chalk.gray(" Run 'reshot auth' to enable changelog generation."),
1659
+ );
1660
+ } else {
1661
+ console.log(chalk.cyan("\n📝 Posting changelog drafts...\n"));
1662
+ const lastPublishedHash = settings?.lastPublishedCommitHash;
1663
+ const recentCommits = getRecentCommits(lastPublishedHash);
1664
+
1665
+ if (recentCommits.length > 0) {
1666
+ try {
1667
+ await apiClient.postChangelogDrafts(projectId, recentCommits, apiKey);
1668
+ console.log(
1669
+ chalk.green(
1670
+ ` ✔ Posted ${recentCommits.length} changelog draft(s)`,
1671
+ ),
1672
+ );
1673
+
1674
+ // Update last published commit hash
1675
+ if (settings) {
1676
+ settings.lastPublishedCommitHash = commitHash;
1677
+ config.writeSettings(settings);
1678
+ }
1679
+ } catch (error) {
1680
+ console.log(
1681
+ chalk.yellow(
1682
+ ` ⚠ Failed to post changelog drafts: ${error.message}`,
1683
+ ),
1684
+ );
1685
+ }
1686
+ } else {
1687
+ console.log(chalk.gray(" No new commits to publish"));
1688
+ }
1689
+ }
1690
+ } else {
1691
+ console.log(
1692
+ chalk.yellow(" ⚠ Changelog publishing is disabled for this project."),
1693
+ );
1694
+ }
1695
+
1696
+ // Print upgrade path for BYOS users
1697
+ if (resolvedMode === "byos") {
1698
+ console.log(chalk.cyan("\n💡 Upgrade to Reshot Platform for:"));
1699
+ console.log(chalk.gray(" • Visual review queue with approval workflow"));
1700
+ console.log(chalk.gray(" • Unbreakable URLs that never change"));
1701
+ console.log(chalk.gray(" • Version history and rollback"));
1702
+ console.log(chalk.gray(" • Team collaboration and RBAC"));
1703
+ console.log(chalk.gray(" • Automatic changelog generation"));
1704
+ console.log(chalk.gray("\n Run 'reshot auth' to connect your project."));
1705
+ }
1706
+
1707
+ // Write structured JSON output if requested
1708
+ if (outputJson) {
1709
+ const outputDir = path.join(process.cwd(), ".reshot", "output");
1710
+ fs.ensureDirSync(outputDir);
1711
+ const outputPath = path.join(outputDir, "publish-result.json");
1712
+ fs.writeJsonSync(outputPath, publishResult, { spaces: 2 });
1713
+ console.log(chalk.gray(` 📄 JSON result written to: ${outputPath}`));
1714
+ }
1715
+
1716
+ console.log();
1717
+
1718
+ return publishResult;
1719
+ }
1720
+
1721
+ module.exports = publishCommand;