@reshotdev/screenshot 0.0.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/LICENSE +190 -0
  2. package/README.md +388 -0
  3. package/package.json +64 -0
  4. package/src/commands/auth.js +259 -0
  5. package/src/commands/chrome.js +140 -0
  6. package/src/commands/ci-run.js +123 -0
  7. package/src/commands/ci-setup.js +288 -0
  8. package/src/commands/drifts.js +423 -0
  9. package/src/commands/import-tests.js +309 -0
  10. package/src/commands/ingest.js +458 -0
  11. package/src/commands/init.js +633 -0
  12. package/src/commands/publish.js +1721 -0
  13. package/src/commands/pull.js +303 -0
  14. package/src/commands/record.js +94 -0
  15. package/src/commands/run.js +476 -0
  16. package/src/commands/setup-wizard.js +740 -0
  17. package/src/commands/setup.js +137 -0
  18. package/src/commands/status.js +275 -0
  19. package/src/commands/sync.js +621 -0
  20. package/src/commands/ui.js +248 -0
  21. package/src/commands/validate-docs.js +529 -0
  22. package/src/index.js +462 -0
  23. package/src/lib/api-client.js +815 -0
  24. package/src/lib/capture-engine.js +1623 -0
  25. package/src/lib/capture-script-runner.js +3120 -0
  26. package/src/lib/ci-detect.js +137 -0
  27. package/src/lib/config.js +1240 -0
  28. package/src/lib/diff-engine.js +642 -0
  29. package/src/lib/hash.js +74 -0
  30. package/src/lib/image-crop.js +396 -0
  31. package/src/lib/matrix.js +89 -0
  32. package/src/lib/output-path-template.js +318 -0
  33. package/src/lib/playwright-runner.js +252 -0
  34. package/src/lib/polished-clip.js +553 -0
  35. package/src/lib/privacy-engine.js +408 -0
  36. package/src/lib/progress-tracker.js +142 -0
  37. package/src/lib/record-browser-injection.js +654 -0
  38. package/src/lib/record-cdp.js +612 -0
  39. package/src/lib/record-clip.js +343 -0
  40. package/src/lib/record-config.js +623 -0
  41. package/src/lib/record-screenshot.js +360 -0
  42. package/src/lib/record-terminal.js +123 -0
  43. package/src/lib/recorder-service.js +781 -0
  44. package/src/lib/secrets.js +51 -0
  45. package/src/lib/selector-strategies.js +859 -0
  46. package/src/lib/standalone-mode.js +400 -0
  47. package/src/lib/storage-providers.js +569 -0
  48. package/src/lib/style-engine.js +684 -0
  49. package/src/lib/ui-api.js +4677 -0
  50. package/src/lib/ui-assets.js +373 -0
  51. package/src/lib/ui-executor.js +587 -0
  52. package/src/lib/variant-injector.js +591 -0
  53. package/src/lib/viewport-presets.js +454 -0
  54. package/src/lib/worker-pool.js +118 -0
  55. package/web/cropper/index.html +436 -0
  56. package/web/manager/dist/assets/index--ZgioErz.js +507 -0
  57. package/web/manager/dist/assets/index-n468W0Wr.css +1 -0
  58. package/web/manager/dist/index.html +27 -0
  59. package/web/subtitle-editor/index.html +295 -0
@@ -0,0 +1,569 @@
1
+ /**
2
+ * Storage Providers for BYOS (Bring Your Own Storage)
3
+ * Supports: reshot (platform), s3, r2, local
4
+ *
5
+ * This module allows CLI to work standalone or paired with the platform
6
+ */
7
+ const fs = require("fs-extra");
8
+ const path = require("path");
9
+ const crypto = require("crypto");
10
+ const chalk = require("chalk");
11
+
12
+ /**
13
+ * Storage configuration schema
14
+ * @typedef {Object} StorageConfig
15
+ * @property {'reshot'|'s3'|'r2'|'local'} type - Storage provider type
16
+ * @property {string} [bucket] - Bucket name (for s3/r2)
17
+ * @property {string} [region] - AWS region (for s3)
18
+ * @property {string} [pathPrefix] - Path prefix for assets
19
+ * @property {string} [endpoint] - Custom endpoint (for r2)
20
+ * @property {string} [accountId] - Cloudflare account ID (for r2)
21
+ * @property {string} [publicDomain] - Public domain for asset URLs
22
+ * @property {string} [outputDir] - Output directory (for local)
23
+ */
24
+
25
+ /**
26
+ * Validate storage configuration and check for required credentials
27
+ * @param {StorageConfig} config - Storage configuration
28
+ * @returns {{valid: boolean, errors: string[], warnings: string[]}}
29
+ */
30
+ function validateStorageConfig(config) {
31
+ const errors = [];
32
+ const warnings = [];
33
+
34
+ if (!config || !config.type) {
35
+ return { valid: true, errors: [], warnings: ['No storage configuration - using platform mode (requires auth)'] };
36
+ }
37
+
38
+ switch (config.type) {
39
+ case 'reshot':
40
+ // Platform mode - requires API key (from auth or env)
41
+ if (!process.env.RESHOT_API_KEY) {
42
+ warnings.push('Platform storage requires API key. Run "reshot auth" or set RESHOT_API_KEY environment variable.');
43
+ }
44
+ break;
45
+
46
+ case 's3':
47
+ // AWS S3 - requires credentials and bucket
48
+ if (!config.bucket) {
49
+ errors.push('S3 storage requires "bucket" in storage config');
50
+ }
51
+ if (!process.env.AWS_ACCESS_KEY_ID) {
52
+ errors.push('S3 storage requires AWS_ACCESS_KEY_ID environment variable');
53
+ }
54
+ if (!process.env.AWS_SECRET_ACCESS_KEY) {
55
+ errors.push('S3 storage requires AWS_SECRET_ACCESS_KEY environment variable');
56
+ }
57
+ if (!config.region && !process.env.AWS_REGION) {
58
+ warnings.push('No AWS region specified. Defaulting to us-east-1');
59
+ }
60
+ break;
61
+
62
+ case 'r2':
63
+ // Cloudflare R2 - requires credentials, account ID, and bucket
64
+ if (!config.bucket) {
65
+ errors.push('R2 storage requires "bucket" in storage config');
66
+ }
67
+ if (!config.accountId && !process.env.CLOUDFLARE_ACCOUNT_ID) {
68
+ errors.push('R2 storage requires "accountId" in config or CLOUDFLARE_ACCOUNT_ID environment variable');
69
+ }
70
+ if (!process.env.R2_ACCESS_KEY_ID && !process.env.AWS_ACCESS_KEY_ID) {
71
+ errors.push('R2 storage requires R2_ACCESS_KEY_ID (or AWS_ACCESS_KEY_ID) environment variable');
72
+ }
73
+ if (!process.env.R2_SECRET_ACCESS_KEY && !process.env.AWS_SECRET_ACCESS_KEY) {
74
+ errors.push('R2 storage requires R2_SECRET_ACCESS_KEY (or AWS_SECRET_ACCESS_KEY) environment variable');
75
+ }
76
+ break;
77
+
78
+ case 'local':
79
+ // Local storage - just needs output directory
80
+ if (!config.outputDir) {
81
+ config.outputDir = './.reshot/published';
82
+ warnings.push(`No output directory specified. Using default: ${config.outputDir}`);
83
+ }
84
+ break;
85
+
86
+ default:
87
+ errors.push(`Unknown storage type: ${config.type}. Supported: reshot, s3, r2, local`);
88
+ }
89
+
90
+ return {
91
+ valid: errors.length === 0,
92
+ errors,
93
+ warnings,
94
+ };
95
+ }
96
+
97
+ /**
98
+ * Get storage provider help text
99
+ * @param {string} type - Storage type
100
+ * @returns {string}
101
+ */
102
+ function getStorageSetupHelp(type) {
103
+ switch (type) {
104
+ case 's3':
105
+ return `
106
+ ${chalk.cyan('AWS S3 Setup:')}
107
+
108
+ 1. ${chalk.yellow('Create or get your AWS credentials:')}
109
+ - Go to AWS Console > IAM > Users > Your User > Security credentials
110
+ - Create an access key pair
111
+
112
+ 2. ${chalk.yellow('Set environment variables:')}
113
+ ${chalk.gray('export AWS_ACCESS_KEY_ID="your-access-key-id"')}
114
+ ${chalk.gray('export AWS_SECRET_ACCESS_KEY="your-secret-access-key"')}
115
+ ${chalk.gray('export AWS_REGION="us-east-1" # optional, defaults to us-east-1')}
116
+
117
+ 3. ${chalk.yellow('Update docsync.config.json:')}
118
+ ${chalk.gray(JSON.stringify({
119
+ storage: {
120
+ type: 's3',
121
+ bucket: 'your-bucket-name',
122
+ region: 'us-east-1',
123
+ pathPrefix: 'docs-assets/',
124
+ publicDomain: 'https://your-bucket.s3.amazonaws.com'
125
+ }
126
+ }, null, 2))}
127
+
128
+ 4. ${chalk.yellow('Ensure bucket permissions:')}
129
+ - Bucket must allow uploads from your credentials
130
+ - For public assets, configure bucket policy for public read access
131
+ `;
132
+
133
+ case 'r2':
134
+ return `
135
+ ${chalk.cyan('Cloudflare R2 Setup:')}
136
+
137
+ 1. ${chalk.yellow('Create R2 bucket and API token:')}
138
+ - Go to Cloudflare Dashboard > R2 > Create bucket
139
+ - Create an API token with R2 read/write permissions
140
+
141
+ 2. ${chalk.yellow('Set environment variables:')}
142
+ ${chalk.gray('export CLOUDFLARE_ACCOUNT_ID="your-account-id"')}
143
+ ${chalk.gray('export R2_ACCESS_KEY_ID="your-r2-access-key"')}
144
+ ${chalk.gray('export R2_SECRET_ACCESS_KEY="your-r2-secret-key"')}
145
+
146
+ 3. ${chalk.yellow('Update docsync.config.json:')}
147
+ ${chalk.gray(JSON.stringify({
148
+ storage: {
149
+ type: 'r2',
150
+ bucket: 'your-bucket-name',
151
+ accountId: 'your-cloudflare-account-id',
152
+ pathPrefix: 'docs-assets/',
153
+ publicDomain: 'https://assets.yourdomain.com'
154
+ }
155
+ }, null, 2))}
156
+
157
+ 4. ${chalk.yellow('(Optional) Configure custom domain:')}
158
+ - Set up R2 custom domain for public access
159
+ - Add publicDomain to config for correct manifest URLs
160
+ `;
161
+
162
+ case 'local':
163
+ return `
164
+ ${chalk.cyan('Local Storage Setup:')}
165
+
166
+ For local testing or self-hosted scenarios:
167
+
168
+ 1. ${chalk.yellow('Update docsync.config.json:')}
169
+ ${chalk.gray(JSON.stringify({
170
+ storage: {
171
+ type: 'local',
172
+ outputDir: './published-assets',
173
+ publicDomain: 'https://your-domain.com/assets'
174
+ }
175
+ }, null, 2))}
176
+
177
+ 2. ${chalk.yellow('Assets will be saved to:')}
178
+ <projectRoot>/<outputDir>/
179
+
180
+ 3. ${chalk.yellow('Deploy the output directory')} to your web server
181
+ and update publicDomain to match the public URL.
182
+ `;
183
+
184
+ case 'reshot':
185
+ default:
186
+ return `
187
+ ${chalk.cyan('Reshot Platform Setup:')}
188
+
189
+ Use Reshot for full governance features (review queue, version control, etc.):
190
+
191
+ 1. ${chalk.yellow('Authenticate:')}
192
+ ${chalk.gray('reshot auth')}
193
+
194
+ 2. ${chalk.yellow('Or set environment variable for CI/CD:')}
195
+ ${chalk.gray('export RESHOT_API_KEY="your-api-key"')}
196
+
197
+ 3. ${chalk.yellow('Config (optional):')}
198
+ ${chalk.gray(JSON.stringify({
199
+ storage: {
200
+ type: 'reshot'
201
+ }
202
+ }, null, 2))}
203
+
204
+ Benefits of Reshot Platform:
205
+ - Visual review queue with approval workflow
206
+ - Version history and rollback
207
+ - Unbreakable URLs that never change
208
+ - Team collaboration and RBAC
209
+ - Changelog generation from commits
210
+ `;
211
+ }
212
+ }
213
+
214
+ /**
215
+ * Base storage provider interface
216
+ */
217
+ class BaseStorageProvider {
218
+ constructor(config) {
219
+ this.config = config;
220
+ }
221
+
222
+ async upload(filePath, key, contentType) {
223
+ throw new Error('upload() must be implemented by subclass');
224
+ }
225
+
226
+ async generateManifest(uploads) {
227
+ throw new Error('generateManifest() must be implemented by subclass');
228
+ }
229
+
230
+ getPublicUrl(key) {
231
+ throw new Error('getPublicUrl() must be implemented by subclass');
232
+ }
233
+ }
234
+
235
+ /**
236
+ * Local file system storage provider
237
+ */
238
+ class LocalStorageProvider extends BaseStorageProvider {
239
+ constructor(config) {
240
+ super(config);
241
+ const resolved = path.resolve(process.cwd(), config.outputDir || './.reshot/published');
242
+ const projectRoot = process.cwd();
243
+ if (!resolved.startsWith(projectRoot + path.sep) && resolved !== projectRoot) {
244
+ throw new Error(`Output directory must be within the project root: ${projectRoot}. Got: ${resolved}`);
245
+ }
246
+ this.outputDir = resolved;
247
+ this.publicDomain = config.publicDomain || '';
248
+ }
249
+
250
+ async upload(filePath, key, contentType) {
251
+ const destPath = path.join(this.outputDir, key);
252
+ await fs.ensureDir(path.dirname(destPath));
253
+ await fs.copy(filePath, destPath);
254
+
255
+ return {
256
+ success: true,
257
+ path: key,
258
+ publicUrl: this.getPublicUrl(key),
259
+ hash: await this._hashFile(filePath),
260
+ };
261
+ }
262
+
263
+ getPublicUrl(key) {
264
+ if (this.publicDomain) {
265
+ const domain = this.publicDomain.replace(/\/$/, '');
266
+ return `${domain}/${key}`;
267
+ }
268
+ return `file://${path.join(this.outputDir, key)}`;
269
+ }
270
+
271
+ async generateManifest(uploads) {
272
+ const manifest = {
273
+ generated: new Date().toISOString(),
274
+ provider: 'local',
275
+ outputDir: this.outputDir,
276
+ publicDomain: this.publicDomain || null,
277
+ assets: {},
278
+ };
279
+
280
+ for (const upload of uploads) {
281
+ manifest.assets[upload.key] = {
282
+ localPath: path.join(this.outputDir, upload.path),
283
+ publicUrl: upload.publicUrl,
284
+ hash: upload.hash,
285
+ contentType: upload.contentType,
286
+ };
287
+ }
288
+
289
+ const manifestPath = path.join(this.outputDir, 'manifest.json');
290
+ await fs.writeJSON(manifestPath, manifest, { spaces: 2 });
291
+
292
+ return { manifestPath, manifest };
293
+ }
294
+
295
+ async _hashFile(filePath) {
296
+ const content = await fs.readFile(filePath);
297
+ return crypto.createHash('sha256').update(content).digest('hex').slice(0, 16);
298
+ }
299
+ }
300
+
301
+ /**
302
+ * AWS S3 storage provider
303
+ */
304
+ class S3StorageProvider extends BaseStorageProvider {
305
+ constructor(config) {
306
+ super(config);
307
+ this.bucket = config.bucket;
308
+ this.region = config.region || process.env.AWS_REGION || 'us-east-1';
309
+ this.pathPrefix = (config.pathPrefix || '').replace(/\/$/, '');
310
+ this.publicDomain = config.publicDomain;
311
+ this._s3Client = null;
312
+ }
313
+
314
+ async _getClient() {
315
+ if (!this._s3Client) {
316
+ // Dynamic import to avoid requiring AWS SDK when not using S3
317
+ const { S3Client, PutObjectCommand } = await import('@aws-sdk/client-s3');
318
+ this.S3Client = S3Client;
319
+ this.PutObjectCommand = PutObjectCommand;
320
+
321
+ this._s3Client = new S3Client({
322
+ region: this.region,
323
+ credentials: {
324
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
325
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
326
+ },
327
+ });
328
+ }
329
+ return this._s3Client;
330
+ }
331
+
332
+ async upload(filePath, key, contentType) {
333
+ const client = await this._getClient();
334
+ const content = await fs.readFile(filePath);
335
+ const fullKey = this.pathPrefix ? `${this.pathPrefix}/${key}` : key;
336
+ const hash = crypto.createHash('sha256').update(content).digest('hex').slice(0, 16);
337
+
338
+ // Include hash in path for immutability
339
+ const hashedKey = this._addHashToKey(fullKey, hash);
340
+
341
+ const command = new this.PutObjectCommand({
342
+ Bucket: this.bucket,
343
+ Key: hashedKey,
344
+ Body: content,
345
+ ContentType: contentType,
346
+ CacheControl: 'public, max-age=31536000, immutable', // Long cache for immutable assets
347
+ });
348
+
349
+ await client.send(command);
350
+
351
+ return {
352
+ success: true,
353
+ path: hashedKey,
354
+ publicUrl: this.getPublicUrl(hashedKey),
355
+ hash,
356
+ };
357
+ }
358
+
359
+ _addHashToKey(key, hash) {
360
+ const ext = path.extname(key);
361
+ const base = key.slice(0, -ext.length);
362
+ return `${base}-${hash}${ext}`;
363
+ }
364
+
365
+ getPublicUrl(key) {
366
+ if (this.publicDomain) {
367
+ const domain = this.publicDomain.replace(/\/$/, '');
368
+ return `${domain}/${key}`;
369
+ }
370
+ return `https://${this.bucket}.s3.${this.region}.amazonaws.com/${key}`;
371
+ }
372
+
373
+ async generateManifest(uploads) {
374
+ const manifest = {
375
+ generated: new Date().toISOString(),
376
+ provider: 's3',
377
+ bucket: this.bucket,
378
+ region: this.region,
379
+ publicDomain: this.publicDomain || null,
380
+ assets: {},
381
+ };
382
+
383
+ for (const upload of uploads) {
384
+ manifest.assets[upload.key] = {
385
+ s3Key: upload.path,
386
+ publicUrl: upload.publicUrl,
387
+ hash: upload.hash,
388
+ contentType: upload.contentType,
389
+ };
390
+ }
391
+
392
+ // Save manifest locally
393
+ const manifestDir = path.join(process.cwd(), '.reshot', 'manifests');
394
+ await fs.ensureDir(manifestDir);
395
+ const manifestPath = path.join(manifestDir, `manifest-${Date.now()}.json`);
396
+ await fs.writeJSON(manifestPath, manifest, { spaces: 2 });
397
+
398
+ // Also save as latest
399
+ const latestPath = path.join(manifestDir, 'manifest-latest.json');
400
+ await fs.writeJSON(latestPath, manifest, { spaces: 2 });
401
+
402
+ return { manifestPath: latestPath, manifest };
403
+ }
404
+ }
405
+
406
+ /**
407
+ * Cloudflare R2 storage provider
408
+ */
409
+ class R2StorageProvider extends BaseStorageProvider {
410
+ constructor(config) {
411
+ super(config);
412
+ this.bucket = config.bucket;
413
+ this.accountId = config.accountId || process.env.CLOUDFLARE_ACCOUNT_ID;
414
+ this.pathPrefix = (config.pathPrefix || '').replace(/\/$/, '');
415
+ this.publicDomain = config.publicDomain;
416
+ this._s3Client = null;
417
+ }
418
+
419
+ async _getClient() {
420
+ if (!this._s3Client) {
421
+ // R2 uses S3-compatible API
422
+ const { S3Client, PutObjectCommand } = await import('@aws-sdk/client-s3');
423
+ this.S3Client = S3Client;
424
+ this.PutObjectCommand = PutObjectCommand;
425
+
426
+ const endpoint = `https://${this.accountId}.r2.cloudflarestorage.com`;
427
+
428
+ this._s3Client = new S3Client({
429
+ region: 'auto',
430
+ endpoint,
431
+ credentials: {
432
+ accessKeyId: process.env.R2_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID,
433
+ secretAccessKey: process.env.R2_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY,
434
+ },
435
+ });
436
+ }
437
+ return this._s3Client;
438
+ }
439
+
440
+ async upload(filePath, key, contentType) {
441
+ const client = await this._getClient();
442
+ const content = await fs.readFile(filePath);
443
+ const fullKey = this.pathPrefix ? `${this.pathPrefix}/${key}` : key;
444
+ const hash = crypto.createHash('sha256').update(content).digest('hex').slice(0, 16);
445
+
446
+ // Include hash in path for immutability
447
+ const hashedKey = this._addHashToKey(fullKey, hash);
448
+
449
+ const command = new this.PutObjectCommand({
450
+ Bucket: this.bucket,
451
+ Key: hashedKey,
452
+ Body: content,
453
+ ContentType: contentType,
454
+ CacheControl: 'public, max-age=31536000, immutable',
455
+ });
456
+
457
+ await client.send(command);
458
+
459
+ return {
460
+ success: true,
461
+ path: hashedKey,
462
+ publicUrl: this.getPublicUrl(hashedKey),
463
+ hash,
464
+ };
465
+ }
466
+
467
+ _addHashToKey(key, hash) {
468
+ const ext = path.extname(key);
469
+ const base = key.slice(0, -ext.length);
470
+ return `${base}-${hash}${ext}`;
471
+ }
472
+
473
+ getPublicUrl(key) {
474
+ if (this.publicDomain) {
475
+ const domain = this.publicDomain.replace(/\/$/, '');
476
+ return `${domain}/${key}`;
477
+ }
478
+ // R2 doesn't have a default public URL - must use custom domain or R2.dev
479
+ return `https://${this.bucket}.${this.accountId}.r2.dev/${key}`;
480
+ }
481
+
482
+ async generateManifest(uploads) {
483
+ const manifest = {
484
+ generated: new Date().toISOString(),
485
+ provider: 'r2',
486
+ bucket: this.bucket,
487
+ accountId: this.accountId,
488
+ publicDomain: this.publicDomain || null,
489
+ assets: {},
490
+ };
491
+
492
+ for (const upload of uploads) {
493
+ manifest.assets[upload.key] = {
494
+ r2Key: upload.path,
495
+ publicUrl: upload.publicUrl,
496
+ hash: upload.hash,
497
+ contentType: upload.contentType,
498
+ };
499
+ }
500
+
501
+ // Save manifest locally
502
+ const manifestDir = path.join(process.cwd(), '.reshot', 'manifests');
503
+ await fs.ensureDir(manifestDir);
504
+ const manifestPath = path.join(manifestDir, `manifest-${Date.now()}.json`);
505
+ await fs.writeJSON(manifestPath, manifest, { spaces: 2 });
506
+
507
+ // Also save as latest
508
+ const latestPath = path.join(manifestDir, 'manifest-latest.json');
509
+ await fs.writeJSON(latestPath, manifest, { spaces: 2 });
510
+
511
+ return { manifestPath: latestPath, manifest };
512
+ }
513
+ }
514
+
515
+ /**
516
+ * Create storage provider based on config
517
+ * @param {StorageConfig} config - Storage configuration
518
+ * @returns {BaseStorageProvider}
519
+ */
520
+ function createStorageProvider(config) {
521
+ if (!config || !config.type || config.type === 'reshot') {
522
+ return null; // Use platform API client instead
523
+ }
524
+
525
+ switch (config.type) {
526
+ case 's3':
527
+ return new S3StorageProvider(config);
528
+ case 'r2':
529
+ return new R2StorageProvider(config);
530
+ case 'local':
531
+ return new LocalStorageProvider(config);
532
+ default:
533
+ throw new Error(`Unknown storage type: ${config.type}`);
534
+ }
535
+ }
536
+
537
+ /**
538
+ * Determine storage mode from config
539
+ * @param {object} docSyncConfig - The docsync.config.json content
540
+ * @returns {'platform'|'byos'}
541
+ */
542
+ function getStorageMode(docSyncConfig) {
543
+ const storageConfig = docSyncConfig?.storage;
544
+ if (!storageConfig || !storageConfig.type || storageConfig.type === 'reshot') {
545
+ return 'platform';
546
+ }
547
+ return 'byos';
548
+ }
549
+
550
+ /**
551
+ * Check if platform features are available (auth present)
552
+ * @returns {boolean}
553
+ */
554
+ function isPlatformAvailable() {
555
+ return !!(process.env.RESHOT_API_KEY ||
556
+ (fs.existsSync(path.join(process.cwd(), '.reshot', 'settings.json')) &&
557
+ fs.readJSONSync(path.join(process.cwd(), '.reshot', 'settings.json'), { throws: false })?.apiKey));
558
+ }
559
+
560
+ module.exports = {
561
+ validateStorageConfig,
562
+ getStorageSetupHelp,
563
+ createStorageProvider,
564
+ getStorageMode,
565
+ isPlatformAvailable,
566
+ LocalStorageProvider,
567
+ S3StorageProvider,
568
+ R2StorageProvider,
569
+ };