@reshotdev/screenshot 0.0.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/LICENSE +190 -0
  2. package/README.md +388 -0
  3. package/package.json +64 -0
  4. package/src/commands/auth.js +259 -0
  5. package/src/commands/chrome.js +140 -0
  6. package/src/commands/ci-run.js +123 -0
  7. package/src/commands/ci-setup.js +288 -0
  8. package/src/commands/drifts.js +423 -0
  9. package/src/commands/import-tests.js +309 -0
  10. package/src/commands/ingest.js +458 -0
  11. package/src/commands/init.js +633 -0
  12. package/src/commands/publish.js +1721 -0
  13. package/src/commands/pull.js +303 -0
  14. package/src/commands/record.js +94 -0
  15. package/src/commands/run.js +476 -0
  16. package/src/commands/setup-wizard.js +740 -0
  17. package/src/commands/setup.js +137 -0
  18. package/src/commands/status.js +275 -0
  19. package/src/commands/sync.js +621 -0
  20. package/src/commands/ui.js +248 -0
  21. package/src/commands/validate-docs.js +529 -0
  22. package/src/index.js +462 -0
  23. package/src/lib/api-client.js +815 -0
  24. package/src/lib/capture-engine.js +1623 -0
  25. package/src/lib/capture-script-runner.js +3120 -0
  26. package/src/lib/ci-detect.js +137 -0
  27. package/src/lib/config.js +1240 -0
  28. package/src/lib/diff-engine.js +642 -0
  29. package/src/lib/hash.js +74 -0
  30. package/src/lib/image-crop.js +396 -0
  31. package/src/lib/matrix.js +89 -0
  32. package/src/lib/output-path-template.js +318 -0
  33. package/src/lib/playwright-runner.js +252 -0
  34. package/src/lib/polished-clip.js +553 -0
  35. package/src/lib/privacy-engine.js +408 -0
  36. package/src/lib/progress-tracker.js +142 -0
  37. package/src/lib/record-browser-injection.js +654 -0
  38. package/src/lib/record-cdp.js +612 -0
  39. package/src/lib/record-clip.js +343 -0
  40. package/src/lib/record-config.js +623 -0
  41. package/src/lib/record-screenshot.js +360 -0
  42. package/src/lib/record-terminal.js +123 -0
  43. package/src/lib/recorder-service.js +781 -0
  44. package/src/lib/secrets.js +51 -0
  45. package/src/lib/selector-strategies.js +859 -0
  46. package/src/lib/standalone-mode.js +400 -0
  47. package/src/lib/storage-providers.js +569 -0
  48. package/src/lib/style-engine.js +684 -0
  49. package/src/lib/ui-api.js +4677 -0
  50. package/src/lib/ui-assets.js +373 -0
  51. package/src/lib/ui-executor.js +587 -0
  52. package/src/lib/variant-injector.js +591 -0
  53. package/src/lib/viewport-presets.js +454 -0
  54. package/src/lib/worker-pool.js +118 -0
  55. package/web/cropper/index.html +436 -0
  56. package/web/manager/dist/assets/index--ZgioErz.js +507 -0
  57. package/web/manager/dist/assets/index-n468W0Wr.css +1 -0
  58. package/web/manager/dist/index.html +27 -0
  59. package/web/subtitle-editor/index.html +295 -0
@@ -0,0 +1,458 @@
1
+ // ingest.js - Upload traces and documentation for DocSync processing
2
+ // Implements the "Smart Handoff" protocol from the DocSync specification
3
+
4
+ const chalk = require("chalk");
5
+ const crypto = require("crypto");
6
+ const fs = require("fs-extra");
7
+ const path = require("path");
8
+ const { execSync } = require("child_process");
9
+ const config = require("../lib/config");
10
+ const apiClient = require("../lib/api-client");
11
+ const { hashFile } = require("../lib/hash");
12
+ const pkg = require("../../package.json");
13
+
14
+ // File extension allowlists
15
+ const TRACE_EXTENSIONS = [".zip"];
16
+ const DOC_EXTENSIONS = [".md", ".mdx"];
17
+ const MAX_DOC_SIZE = 5 * 1024 * 1024; // 5MB per doc file
18
+ const MAX_TRACE_SIZE = 100 * 1024 * 1024; // 100MB per trace
19
+
20
+ /**
21
+ * Parse frontmatter from a markdown file
22
+ * Extracts YAML frontmatter between --- delimiters
23
+ */
24
+ function parseFrontmatter(content) {
25
+ const frontmatterRegex = /^---\n([\s\S]*?)\n---/;
26
+ const match = content.match(frontmatterRegex);
27
+
28
+ if (!match) return { frontmatter: {}, content };
29
+
30
+ const frontmatter = {};
31
+ const lines = match[1].split('\n');
32
+
33
+ for (const line of lines) {
34
+ const colonIndex = line.indexOf(':');
35
+ if (colonIndex > 0) {
36
+ const key = line.slice(0, colonIndex).trim();
37
+ let value = line.slice(colonIndex + 1).trim();
38
+ // Remove quotes if present
39
+ if ((value.startsWith('"') && value.endsWith('"')) ||
40
+ (value.startsWith("'") && value.endsWith("'"))) {
41
+ value = value.slice(1, -1);
42
+ }
43
+ frontmatter[key] = value;
44
+ }
45
+ }
46
+
47
+ return {
48
+ frontmatter,
49
+ content: content.slice(match[0].length).trim()
50
+ };
51
+ }
52
+
53
+ /**
54
+ * Discover documentation files based on docsync.config.json
55
+ */
56
+ async function discoverDocumentation(docConfig, projectRoot) {
57
+ const files = [];
58
+ const root = path.resolve(projectRoot, docConfig.root || './docs');
59
+
60
+ if (!fs.existsSync(root)) {
61
+ console.log(chalk.yellow(` ⚠ Documentation root not found: ${root}`));
62
+ return files;
63
+ }
64
+
65
+ const include = docConfig.include || ['**/*.md', '**/*.mdx'];
66
+ const exclude = docConfig.exclude || ['**/_*.mdx', 'node_modules'];
67
+ const mappings = docConfig.mappings || {};
68
+
69
+ // Simple glob-like recursive file discovery
70
+ function walkDir(dir, relativePath = '') {
71
+ const items = fs.readdirSync(dir);
72
+
73
+ for (const item of items) {
74
+ const fullPath = path.join(dir, item);
75
+ const relPath = path.join(relativePath, item);
76
+ const stat = fs.statSync(fullPath);
77
+
78
+ // Check exclusions
79
+ const shouldExclude = exclude.some(pattern => {
80
+ if (pattern.includes('*')) {
81
+ // Simple glob matching
82
+ const regex = new RegExp(pattern.replace(/\*\*/g, '.*').replace(/\*/g, '[^/]*'));
83
+ return regex.test(relPath);
84
+ }
85
+ return relPath.includes(pattern) || item === pattern;
86
+ });
87
+
88
+ if (shouldExclude) continue;
89
+
90
+ if (stat.isDirectory()) {
91
+ walkDir(fullPath, relPath);
92
+ } else {
93
+ const ext = path.extname(item).toLowerCase();
94
+ if (DOC_EXTENSIONS.includes(ext)) {
95
+ // Read file and check for reshot_journey frontmatter
96
+ const content = fs.readFileSync(fullPath, 'utf-8');
97
+ const { frontmatter } = parseFrontmatter(content);
98
+
99
+ // Get journey key from frontmatter or explicit mappings
100
+ const journeyKey = frontmatter.reshot_journey ||
101
+ mappings[relPath] ||
102
+ mappings[fullPath];
103
+
104
+ // Only include files that have a binding
105
+ if (journeyKey) {
106
+ const fileSize = stat.size;
107
+
108
+ if (fileSize > MAX_DOC_SIZE) {
109
+ console.log(chalk.yellow(` ⚠ Skipping ${relPath}: exceeds ${MAX_DOC_SIZE / 1024 / 1024}MB limit`));
110
+ continue;
111
+ }
112
+
113
+ files.push({
114
+ path: fullPath,
115
+ relativePath: relPath,
116
+ journeyKey,
117
+ contentHash: crypto.createHash('sha256').update(content).digest('hex'),
118
+ size: fileSize,
119
+ frontmatter
120
+ });
121
+ }
122
+ }
123
+ }
124
+ }
125
+ }
126
+
127
+ walkDir(root);
128
+ return files;
129
+ }
130
+
131
+ /**
132
+ * Discover Playwright trace files
133
+ */
134
+ async function discoverTraces(traceDir) {
135
+ const traces = [];
136
+
137
+ if (!fs.existsSync(traceDir)) {
138
+ console.log(chalk.yellow(` ⚠ Trace directory not found: ${traceDir}`));
139
+ return traces;
140
+ }
141
+
142
+ function walkDir(dir) {
143
+ const items = fs.readdirSync(dir);
144
+
145
+ for (const item of items) {
146
+ const fullPath = path.join(dir, item);
147
+ const stat = fs.statSync(fullPath);
148
+
149
+ if (stat.isDirectory()) {
150
+ walkDir(fullPath);
151
+ } else if (path.extname(item).toLowerCase() === '.zip') {
152
+ const fileSize = stat.size;
153
+
154
+ if (fileSize > MAX_TRACE_SIZE) {
155
+ console.log(chalk.yellow(` ⚠ Skipping trace ${item}: exceeds ${MAX_TRACE_SIZE / 1024 / 1024}MB limit`));
156
+ continue;
157
+ }
158
+
159
+ // Extract journey key from directory structure or filename
160
+ // Convention: test-results/<test-name>/trace.zip
161
+ const parentDir = path.basename(path.dirname(fullPath));
162
+ const journeyKey = parentDir !== 'test-results' ? parentDir : path.basename(item, '.zip');
163
+
164
+ traces.push({
165
+ path: fullPath,
166
+ filename: item,
167
+ journeyKey,
168
+ contentHash: hashFile(fullPath),
169
+ size: fileSize
170
+ });
171
+ }
172
+ }
173
+ }
174
+
175
+ walkDir(traceDir);
176
+ return traces;
177
+ }
178
+
179
+ /**
180
+ * Get git metadata for the current commit
181
+ */
182
+ function getGitMetadata() {
183
+ try {
184
+ const commitHash = execSync('git rev-parse HEAD', { encoding: 'utf-8' }).trim();
185
+ const branch = execSync('git rev-parse --abbrev-ref HEAD', { encoding: 'utf-8' }).trim();
186
+ const commitMessage = execSync('git log -1 --pretty=%B', { encoding: 'utf-8' }).trim();
187
+
188
+ return { commitHash, branch, commitMessage };
189
+ } catch {
190
+ console.log(chalk.yellow(' ⚠ Not a git repository or git not available'));
191
+ return { commitHash: 'unknown', branch: 'unknown', commitMessage: '' };
192
+ }
193
+ }
194
+
195
+ /**
196
+ * Phase 1: Initialize ingestion with manifest handshake
197
+ */
198
+ async function initializeIngestion(apiKey, projectId, manifest) {
199
+ const response = await apiClient.post('/v1/ingest/init', {
200
+ projectId,
201
+ manifest
202
+ }, {
203
+ headers: { Authorization: `Bearer ${apiKey}` }
204
+ });
205
+
206
+ return response;
207
+ }
208
+
209
+ /**
210
+ * Phase 2: Upload files to presigned URLs
211
+ */
212
+ async function uploadFiles(files, presignedUrls, onProgress) {
213
+ const results = [];
214
+ let uploaded = 0;
215
+
216
+ for (const file of files) {
217
+ const presigned = presignedUrls[file.contentHash];
218
+
219
+ if (!presigned) {
220
+ // File already exists (deduplication)
221
+ results.push({ ...file, skipped: true });
222
+ uploaded++;
223
+ onProgress?.(uploaded, files.length);
224
+ continue;
225
+ }
226
+
227
+ try {
228
+ const fileBuffer = fs.readFileSync(file.path);
229
+
230
+ // Upload to presigned URL
231
+ await apiClient.uploadToPresignedUrl(presigned.url, fileBuffer, {
232
+ contentType: presigned.contentType || 'application/octet-stream'
233
+ });
234
+
235
+ results.push({ ...file, uploaded: true, storageKey: presigned.storageKey });
236
+ } catch (error) {
237
+ results.push({ ...file, error: error.message });
238
+ }
239
+
240
+ uploaded++;
241
+ onProgress?.(uploaded, files.length);
242
+ }
243
+
244
+ return results;
245
+ }
246
+
247
+ /**
248
+ * Phase 3: Commit the ingestion job
249
+ */
250
+ async function commitIngestion(apiKey, projectId, uploadResults, git, cliVersion) {
251
+ const response = await apiClient.post('/v1/ingest/commit', {
252
+ projectId,
253
+ uploadResults,
254
+ git,
255
+ cli: {
256
+ version: cliVersion,
257
+ timestamp: new Date().toISOString()
258
+ }
259
+ }, {
260
+ headers: { Authorization: `Bearer ${apiKey}` }
261
+ });
262
+
263
+ return response;
264
+ }
265
+
266
+ /**
267
+ * Main ingest command
268
+ */
269
+ async function ingestCommand(options = {}) {
270
+ console.log(chalk.blue('\nšŸ“„ Reshot DocSync Ingest\n'));
271
+
272
+ // Read configuration
273
+ let docSyncConfig;
274
+ try {
275
+ docSyncConfig = config.readDocSyncConfig();
276
+ } catch (error) {
277
+ console.error(chalk.red('Error:'), 'docsync.config.json not found. Run `reshot init` first.');
278
+ process.exit(1);
279
+ }
280
+
281
+ // Get API key and project ID
282
+ const settings = config.readSettings();
283
+ const apiKey = process.env.RESHOT_API_KEY || settings?.apiKey;
284
+ const projectId = process.env.RESHOT_PROJECT_ID ||
285
+ settings?.projectId ||
286
+ docSyncConfig._metadata?.projectId;
287
+
288
+ if (!apiKey) {
289
+ console.error(chalk.red('Error:'), 'API key not found. Set RESHOT_API_KEY or run `reshot auth`.');
290
+ process.exit(1);
291
+ }
292
+
293
+ if (!projectId) {
294
+ console.error(chalk.red('Error:'), 'Project ID not found. Set RESHOT_PROJECT_ID or run `reshot init`.');
295
+ process.exit(1);
296
+ }
297
+
298
+ // Validate documentation configuration
299
+ const docConfig = docSyncConfig.documentation;
300
+ if (!docConfig) {
301
+ console.error(chalk.red('Error:'), 'No "documentation" block found in docsync.config.json');
302
+ process.exit(1);
303
+ }
304
+
305
+ if (!docConfig.strategy) {
306
+ console.error(chalk.red('Error:'), 'documentation.strategy is required (git_pr or external_host)');
307
+ process.exit(1);
308
+ }
309
+
310
+ const projectRoot = process.cwd();
311
+ const traceDir = options.traceDir || path.join(projectRoot, 'test-results');
312
+
313
+ // Phase 1: Discovery
314
+ console.log(chalk.gray('šŸ“ Discovering files...'));
315
+
316
+ const [docs, traces] = await Promise.all([
317
+ discoverDocumentation(docConfig, projectRoot),
318
+ discoverTraces(traceDir)
319
+ ]);
320
+
321
+ console.log(chalk.green(` āœ“ Found ${docs.length} documentation file(s) with journey bindings`));
322
+ console.log(chalk.green(` āœ“ Found ${traces.length} trace file(s)`));
323
+
324
+ if (docs.length === 0 && traces.length === 0) {
325
+ console.log(chalk.yellow('\n⚠ No files to ingest. Make sure:'));
326
+ console.log(chalk.yellow(' - Documentation files have reshot_journey frontmatter'));
327
+ console.log(chalk.yellow(' - Playwright traces are in test-results/'));
328
+ return;
329
+ }
330
+
331
+ // Validate unique bindings (1:1 relationship)
332
+ const journeyKeys = new Set();
333
+ const duplicates = [];
334
+ for (const doc of docs) {
335
+ if (journeyKeys.has(doc.journeyKey)) {
336
+ duplicates.push(doc.journeyKey);
337
+ }
338
+ journeyKeys.add(doc.journeyKey);
339
+ }
340
+
341
+ if (duplicates.length > 0) {
342
+ console.error(chalk.red('\nError: Duplicate journey bindings found:'));
343
+ duplicates.forEach(key => console.error(chalk.red(` - ${key}`)));
344
+ console.error(chalk.yellow('Each document must have a unique reshot_journey key.'));
345
+ process.exit(1);
346
+ }
347
+
348
+ // Get git metadata
349
+ const git = getGitMetadata();
350
+ console.log(chalk.gray(`\nšŸ“ Git: ${git.branch} @ ${git.commitHash.slice(0, 7)}`));
351
+
352
+ // Dry run mode
353
+ if (options.dryRun) {
354
+ console.log(chalk.blue('\nšŸ” Dry run - would upload:'));
355
+ console.log(chalk.gray('\nDocumentation:'));
356
+ docs.forEach(doc => {
357
+ console.log(chalk.white(` ${doc.relativePath} → ${doc.journeyKey}`));
358
+ });
359
+ console.log(chalk.gray('\nTraces:'));
360
+ traces.forEach(trace => {
361
+ console.log(chalk.white(` ${trace.filename} → ${trace.journeyKey}`));
362
+ });
363
+ return;
364
+ }
365
+
366
+ // Phase 2: Manifest Handshake
367
+ console.log(chalk.gray('\nšŸ¤ Initializing ingestion...'));
368
+
369
+ const manifest = {
370
+ docs: docs.map(d => ({
371
+ relativePath: d.relativePath,
372
+ journeyKey: d.journeyKey,
373
+ contentHash: d.contentHash,
374
+ size: d.size
375
+ })),
376
+ traces: traces.map(t => ({
377
+ filename: t.filename,
378
+ journeyKey: t.journeyKey,
379
+ contentHash: t.contentHash,
380
+ size: t.size
381
+ }))
382
+ };
383
+
384
+ let initResult;
385
+ try {
386
+ initResult = await initializeIngestion(apiKey, projectId, manifest);
387
+ console.log(chalk.green(' āœ“ Server acknowledged manifest'));
388
+
389
+ if (initResult.skippedFiles?.length > 0) {
390
+ console.log(chalk.gray(` ℹ ${initResult.skippedFiles.length} file(s) unchanged (cached)`));
391
+ }
392
+ } catch (error) {
393
+ console.error(chalk.red('\nError during init:'), error.message);
394
+ process.exit(1);
395
+ }
396
+
397
+ // Phase 3: Upload files to presigned URLs
398
+ const filesToUpload = [...docs, ...traces].filter(
399
+ f => !initResult.skippedFiles?.includes(f.contentHash)
400
+ );
401
+
402
+ if (filesToUpload.length > 0) {
403
+ console.log(chalk.gray(`\nšŸ“¤ Uploading ${filesToUpload.length} file(s)...`));
404
+
405
+ const uploadResults = await uploadFiles(
406
+ filesToUpload,
407
+ initResult.presignedUrls || {},
408
+ (current, total) => {
409
+ process.stdout.write(`\r Progress: ${current}/${total}`);
410
+ }
411
+ );
412
+
413
+ const failed = uploadResults.filter(r => r.error);
414
+ if (failed.length > 0) {
415
+ console.log(chalk.red(`\n āœ— ${failed.length} upload(s) failed`));
416
+ failed.forEach(f => console.log(chalk.red(` - ${f.relativePath || f.filename}: ${f.error}`)));
417
+ }
418
+
419
+ console.log(chalk.green(`\n āœ“ Uploaded ${filesToUpload.length - failed.length} file(s)`));
420
+ }
421
+
422
+ // Phase 4: Commit and trigger processing
423
+ console.log(chalk.gray('\nšŸ”’ Committing ingestion job...'));
424
+
425
+ try {
426
+ const commitResult = await commitIngestion(apiKey, projectId, {
427
+ docs: docs.map(d => ({
428
+ relativePath: d.relativePath,
429
+ journeyKey: d.journeyKey,
430
+ storageKey: initResult.presignedUrls?.[d.contentHash]?.storageKey || d.contentHash
431
+ })),
432
+ traces: traces.map(t => ({
433
+ filename: t.filename,
434
+ journeyKey: t.journeyKey,
435
+ storageKey: initResult.presignedUrls?.[t.contentHash]?.storageKey || t.contentHash
436
+ }))
437
+ }, git, pkg.version);
438
+
439
+ console.log(chalk.green(' āœ“ Ingestion job created'));
440
+ console.log(chalk.blue(`\nšŸ“Š Job ID: ${commitResult.jobId}`));
441
+ console.log(chalk.gray(' The background worker will process traces and detect drift.'));
442
+
443
+ if (docConfig.strategy === 'git_pr') {
444
+ console.log(chalk.gray(' A Pull Request will be created if drift is detected.'));
445
+ } else {
446
+ console.log(chalk.gray(' Check the Sync Kit in the dashboard for update proposals.'));
447
+ }
448
+
449
+ console.log(chalk.blue('\n✨ Ingestion complete!\n'));
450
+
451
+ return commitResult;
452
+ } catch (error) {
453
+ console.error(chalk.red('\nError during commit:'), error.message);
454
+ process.exit(1);
455
+ }
456
+ }
457
+
458
+ module.exports = ingestCommand;