@scrymore/scry-deployer 0.0.6 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -217,6 +217,28 @@ The configuration is resolved in the following order of precedence:
217
217
  3. **Configuration File**: Values from `.storybook-deployer.json` in your project directory (automatically created during installation).
218
218
  4. **Programmatic Defaults**: Lowest precedence (e.g., for `--api-url`).
219
219
 
220
+ ## Private Projects
221
+
222
+ If your project is set to **private** in the Scry dashboard, uploaded Storybook
223
+ and coverage reports will only be accessible to logged-in project members.
224
+
225
+ ### How it works
226
+
227
+ 1. Upload works the same way (using your API key)
228
+ 2. The generated links work for anyone who is:
229
+ - Logged into the Scry dashboard
230
+ - A member of your project
231
+
232
+ ### Sharing with team members
233
+
234
+ To give someone access to a private project:
235
+
236
+ 1. Go to your project in the [Scry Dashboard](https://dashboard.scrymore.com)
237
+ 2. Navigate to **Settings** → **Members**
238
+ 3. Add their email address
239
+
240
+ They'll need to log in once, then all project links will work automatically.
241
+
220
242
  ### Configuration File
221
243
 
222
244
  The configuration file (`.storybook-deployer.json`) is automatically created in your project directory when you install the package. You can edit this file to set default values for common options:
package/bin/cli.js CHANGED
@@ -17,6 +17,7 @@ const { analyzeStorybook } = require('../lib/analysis.js');
17
17
  const { runCoverageAnalysis, loadCoverageReport, extractCoverageSummary } = require('../lib/coverage.js');
18
18
  const { postPRComment } = require('../lib/pr-comment.js');
19
19
  const { runInit } = require('../lib/init.js');
20
+ const { runUpdateWorkflows } = require('../lib/update-workflows.js');
20
21
 
21
22
  async function runAnalysis(argv) {
22
23
  const logger = createLogger(argv);
@@ -84,88 +85,46 @@ async function runDeployment(argv) {
84
85
  logger.debug(`Received arguments: ${JSON.stringify(argv)}`);
85
86
 
86
87
  const outPath = path.join(os.tmpdir(), `storybook-deployment-${Date.now()}.zip`);
88
+ let metadataZipPath = null;
87
89
 
88
90
  try {
89
- const { coverageReport, coverageSummary } = await resolveCoverage(argv, logger);
91
+ const coverage = await resolveCoverage(argv, logger);
92
+ const coverageReport = coverage.coverageReport;
93
+ const coverageSummary = coverage.coverageSummary;
94
+ metadataZipPath = coverage.metadataZipPath;
90
95
 
91
96
  if (argv.withAnalysis) {
92
- // Full deployment with analysis
93
97
  logger.info('Running deployment with analysis...');
98
+ }
94
99
 
95
- // 1. Capture screenshots if storybook URL provided
96
- if (argv.storybookUrl) {
97
- logger.info(`1/5: Capturing screenshots from '${argv.storybookUrl}'...`);
98
- await captureScreenshots(argv.storybookUrl, argv.storycapOptions || {});
99
- logger.success('✅ Screenshots captured');
100
- } else {
101
- logger.info('1/5: Skipping screenshot capture (no Storybook URL provided)');
102
- }
100
+ // 1. Archive only the static Storybook files.
101
+ logger.info(`1/3: Zipping directory '${argv.dir}'...`);
102
+ await zipDirectory(argv.dir, outPath);
103
+ logger.success(`✅ Archive created: ${outPath}`);
104
+ logger.debug(`Archive size: ${fs.statSync(outPath).size} bytes`);
103
105
 
104
- // 2. Analyze stories and map screenshots
105
- logger.info('2/5: Analyzing stories and mapping screenshots...');
106
- const analysisResults = analyzeStorybook({
107
- storiesDir: argv.storiesDir,
108
- screenshotsDir: argv.screenshotsDir,
106
+ // 2. Upload Storybook ZIP + coverage + metadata ZIP (if present).
107
+ logger.info('2/3: Uploading to deployment service...');
108
+ const apiClient = getApiClient(argv.apiUrl, argv.apiKey);
109
+ const uploadResult = await uploadBuild(
110
+ apiClient,
111
+ {
109
112
  project: argv.project,
110
- version: argv.version
111
- });
112
- logger.success(`✅ Found ${analysisResults.summary.totalStories} stories (${analysisResults.summary.withScreenshots} with screenshots)`);
113
-
114
- // 3. Create master ZIP with staticsite, images, and metadata
115
- logger.info('3/5: Creating master archive with static site, images, and metadata...');
116
- await createMasterZip({
117
- outPath: outPath,
118
- staticsiteDir: argv.dir,
119
- screenshotsDir: argv.screenshotsDir,
120
- metadata: analysisResults
121
- });
122
- logger.success(`✅ Master archive created: ${outPath}`);
123
- logger.debug(`Archive size: ${fs.statSync(outPath).size} bytes`);
124
-
125
- // 4. Upload archive (+ optional coverage)
126
- logger.info('4/5: Uploading to deployment service...');
127
- const apiClient = getApiClient(argv.apiUrl, argv.apiKey);
128
- const uploadResult = await uploadBuild(
129
- apiClient,
130
- {
131
- project: argv.project,
132
- version: argv.version,
133
- },
134
- { zipPath: outPath, coverageReport }
135
- );
136
- logger.success('✅ Archive uploaded.');
137
- logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
138
-
139
- await postPRComment(buildDeployResult(argv, coverageSummary), coverageSummary);
140
-
141
- logger.success('\n🎉 Deployment with analysis successful! 🎉');
113
+ version: argv.version,
114
+ },
115
+ {
116
+ zipPath: outPath,
117
+ coverageReport,
118
+ metadataZipPath,
119
+ }
120
+ );
121
+ logger.success('✅ Archive uploaded.');
122
+ logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
142
123
 
143
- } else {
144
- // Simple deployment without analysis
145
- // 1. Archive the directory
146
- logger.info(`1/3: Zipping directory '${argv.dir}'...`);
147
- await zipDirectory(argv.dir, outPath);
148
- logger.success(`✅ Archive created: ${outPath}`);
149
- logger.debug(`Archive size: ${fs.statSync(outPath).size} bytes`);
150
-
151
- // 2. Authenticate and upload directly (+ optional coverage)
152
- logger.info('2/3: Uploading to deployment service...');
153
- const apiClient = getApiClient(argv.apiUrl, argv.apiKey);
154
- const uploadResult = await uploadBuild(
155
- apiClient,
156
- {
157
- project: argv.project,
158
- version: argv.version,
159
- },
160
- { zipPath: outPath, coverageReport }
161
- );
162
- logger.success('✅ Archive uploaded.');
163
- logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
164
-
165
- await postPRComment(buildDeployResult(argv, coverageSummary), coverageSummary);
166
-
167
- logger.success('\n🎉 Deployment successful! 🎉');
168
- }
124
+ await postPRComment(buildDeployResult(argv, coverageSummary, uploadResult), coverageSummary);
125
+
126
+ logger.success('\n🎉 Deployment successful! 🎉');
127
+ logUploadLinks(argv, coverageSummary, uploadResult, logger);
169
128
 
170
129
  } finally {
171
130
  // 4. Clean up the local archive
@@ -173,6 +132,10 @@ async function runDeployment(argv) {
173
132
  fs.unlinkSync(outPath);
174
133
  logger.info(`🧹 Cleaned up temporary file: ${outPath}`);
175
134
  }
135
+ if (metadataZipPath && fs.existsSync(metadataZipPath)) {
136
+ fs.unlinkSync(metadataZipPath);
137
+ logger.info(`🧹 Cleaned up temporary file: ${metadataZipPath}`);
138
+ }
176
139
  }
177
140
  }
178
141
 
@@ -352,7 +315,64 @@ async function main() {
352
315
 
353
316
  await runAnalysis(config);
354
317
  })
355
- .command('init', 'Setup GitHub Actions workflows for automatic deployment', (yargs) => {
318
+
319
+ .command('coverage', 'Run only Storybook coverage analysis and write the report to disk', (yargs) => {
320
+ return yargs
321
+ .option('dir', {
322
+ describe: 'Path to the built Storybook directory (e.g., storybook-static)',
323
+ type: 'string',
324
+ demandOption: true,
325
+ })
326
+ .option('coverage-base', {
327
+ describe: 'Base ref/branch for new code analysis (supports SHAs, origin/main, HEAD~1)',
328
+ type: 'string',
329
+ default: 'main',
330
+ alias: 'coverageBase'
331
+ })
332
+ .option('coverage-fail-on-threshold', {
333
+ describe: 'Fail (exit 1) if coverage thresholds are not met',
334
+ type: 'boolean',
335
+ default: false,
336
+ alias: 'coverageFailOnThreshold'
337
+ })
338
+ .option('coverage-execute', {
339
+ describe: 'Execute stories during coverage analysis (requires playwright in the project)',
340
+ type: 'boolean',
341
+ default: false,
342
+ alias: 'coverageExecute'
343
+ })
344
+ .option('output', {
345
+ describe: 'Where to write the JSON coverage report',
346
+ type: 'string',
347
+ default: './scry-sbcov-report.json'
348
+ })
349
+ .option('verbose', {
350
+ describe: 'Enable verbose logging',
351
+ type: 'boolean',
352
+ default: false,
353
+ });
354
+ }, async (argv) => {
355
+ const logger = createLogger(argv);
356
+
357
+ const result = await runCoverageAnalysis({
358
+ storybookDir: argv.dir,
359
+ baseBranch: argv.coverageBase || 'main',
360
+ failOnThreshold: Boolean(argv.coverageFailOnThreshold),
361
+ execute: Boolean(argv.coverageExecute),
362
+ outputPath: argv.output,
363
+ keepReport: true,
364
+ });
365
+ const report = result.report;
366
+
367
+ if (!report) {
368
+ logger.error('Coverage: no report generated (tool failed or returned null)');
369
+ process.exit(1);
370
+ }
371
+
372
+ logger.success(`✅ Coverage report written to ${argv.output}`);
373
+ })
374
+
375
+ .command('init', 'Setup GitHub Actions workflows for automatic deployment', (yargs) => {
356
376
  return yargs
357
377
  .option('project-id', {
358
378
  describe: 'Project ID from Scry dashboard',
@@ -426,22 +446,32 @@ async function resolveCoverage(argv, logger) {
426
446
  const enabled = argv.coverage !== false;
427
447
  if (!enabled) {
428
448
  logger.info('Coverage: disabled (--no-coverage)');
429
- return { coverageReport: null, coverageSummary: null };
449
+ return { coverageReport: null, coverageSummary: null, metadataZipPath: null };
430
450
  }
431
451
 
432
452
  try {
433
453
  let report = null;
454
+ let metadataZipPath = null;
434
455
 
435
456
  if (argv.coverageReport) {
436
457
  logger.info(`Coverage: using existing report at ${argv.coverageReport}`);
437
458
  report = loadCoverageReport(argv.coverageReport);
438
459
  } else {
439
- report = await runCoverageAnalysis({
460
+ const needsScreenshots = Boolean(argv.withAnalysis);
461
+ const outputZipPath = needsScreenshots
462
+ ? path.join(os.tmpdir(), `scry-metadata-${Date.now()}.zip`)
463
+ : null;
464
+
465
+ const result = await runCoverageAnalysis({
440
466
  storybookDir: argv.dir,
441
467
  baseBranch: argv.coverageBase || 'main',
442
468
  failOnThreshold: Boolean(argv.coverageFailOnThreshold),
443
- execute: Boolean(argv.coverageExecute),
469
+ execute: Boolean(argv.coverageExecute) || needsScreenshots,
470
+ screenshots: needsScreenshots,
471
+ outputZipPath,
444
472
  });
473
+ report = result.report;
474
+ metadataZipPath = result.metadataZipPath;
445
475
  }
446
476
 
447
477
  const summary = extractCoverageSummary(report);
@@ -452,7 +482,7 @@ async function resolveCoverage(argv, logger) {
452
482
  logger.info('Coverage: no report generated (tool failed or report shape unexpected)');
453
483
  }
454
484
 
455
- return { coverageReport: report, coverageSummary: summary };
485
+ return { coverageReport: report, coverageSummary: summary, metadataZipPath };
456
486
  } catch (err) {
457
487
  logger.error(`Coverage: failed (${err.message})`);
458
488
  throw err;
@@ -465,7 +495,7 @@ async function resolveCoverage(argv, logger) {
465
495
  * @param {any} argv
466
496
  * @param {any|null} coverageSummary
467
497
  */
468
- function buildDeployResult(argv, coverageSummary) {
498
+ function buildDeployResult(argv, coverageSummary, uploadResult) {
469
499
  const project = argv.project || 'main';
470
500
  const version = argv.version || 'latest';
471
501
  const viewBaseUrl = process.env.SCRY_VIEW_URL || 'https://view.scrymore.com';
@@ -482,9 +512,24 @@ function buildDeployResult(argv, coverageSummary) {
482
512
  viewUrl,
483
513
  coverageUrl,
484
514
  coveragePageUrl: coverageUrl,
515
+ visibility: uploadResult?.zipUpload?.visibility,
485
516
  };
486
517
  }
487
518
 
519
+ function logUploadLinks(argv, coverageSummary, uploadResult, logger) {
520
+ const deployResult = buildDeployResult(argv, coverageSummary, uploadResult);
521
+
522
+ logger.success('\n✅ Upload successful!\n');
523
+ logger.info(`📖 Storybook: ${deployResult.viewUrl}`);
524
+ if (deployResult.coverageUrl) {
525
+ logger.info(`📊 Coverage: ${deployResult.coverageUrl}`);
526
+ }
527
+
528
+ if (deployResult.visibility === 'private') {
529
+ logger.info('\n🔒 This project is private. Viewers must be logged in to access.');
530
+ }
531
+ }
532
+
488
533
  if (require.main === module) {
489
534
  main();
490
535
  }
@@ -495,4 +540,5 @@ module.exports = {
495
540
  runAnalysis,
496
541
  resolveCoverage,
497
542
  buildDeployResult,
543
+ logUploadLinks,
498
544
  };
package/lib/apiClient.js CHANGED
@@ -1,6 +1,24 @@
1
1
  const axios = require('axios');
2
2
  const fs = require('fs');
3
3
  const { ApiError } = require('./errors.js');
4
+ const { createLogger } = require('./logger.js');
5
+
6
+ const isVerbose =
7
+ process.env.SCRY_VERBOSE === 'true' ||
8
+ process.env.STORYBOOK_DEPLOYER_VERBOSE === 'true' ||
9
+ process.env.VERBOSE === 'true' ||
10
+ process.env.SCRY_API_DEBUG === 'true' ||
11
+ process.env.SCRY_DEBUG === 'true' ||
12
+ process.argv.includes('--verbose');
13
+
14
+ const logger = createLogger({ verbose: isVerbose });
15
+
16
+ const COVERAGE_UPLOAD_DELAY_MS = 5000;
17
+ const COVERAGE_RETRY_DELAY_MS = 60000;
18
+
19
+ function sleep(ms) {
20
+ return new Promise((resolve) => setTimeout(resolve, ms));
21
+ }
4
22
 
5
23
  /**
6
24
  * Creates a pre-configured axios instance for making API calls.
@@ -9,8 +27,10 @@ const { ApiError } = require('./errors.js');
9
27
  * @returns {axios.AxiosInstance} A configured axios instance.
10
28
  */
11
29
  function getApiClient(apiUrl, apiKey) {
30
+ logger.debug(`Initializing API client with baseURL: ${apiUrl}`);
12
31
  // This is a mock check to allow testing of a 401 error case.
13
32
  if (apiKey === 'fail-me-401') {
33
+ logger.debug('Mock 401 failure triggered by API key');
14
34
  throw new ApiError('The provided API key is invalid or has expired.', 401);
15
35
  }
16
36
 
@@ -27,6 +47,7 @@ function getApiClient(apiUrl, apiKey) {
27
47
  return axios.create({
28
48
  baseURL: apiUrl,
29
49
  headers: headers,
50
+ timeout: 60000, // 60 second timeout for large uploads
30
51
  });
31
52
  }
32
53
 
@@ -36,12 +57,14 @@ function getApiClient(apiUrl, apiKey) {
36
57
  * @param {axios.AxiosInstance} apiClient
37
58
  * @param {{project: string, version: string}} target
38
59
  * @param {{fileName: string, contentType: string}} file
39
- * @returns {Promise<string>} presigned URL
60
+ * @returns {Promise<{url: string, visibility?: string}>} presigned URL details
40
61
  */
41
62
  async function requestPresignedUrl(apiClient, target, file) {
42
63
  const projectName = target.project || 'main';
43
64
  const versionName = target.version || 'latest';
44
65
 
66
+ logger.debug(`Requesting presigned URL for ${projectName}/${versionName}/${file.fileName}`);
67
+
45
68
  const presignedResponse = await apiClient.post(
46
69
  `/presigned-url/${projectName}/${versionName}/${file.fileName}`,
47
70
  { contentType: file.contentType },
@@ -52,21 +75,78 @@ async function requestPresignedUrl(apiClient, target, file) {
52
75
  }
53
76
  );
54
77
 
78
+ logger.debug(`Presigned URL response status: ${presignedResponse.status}`);
79
+ if (presignedResponse.data?.buildId || presignedResponse.data?.buildNumber) {
80
+ logger.info(
81
+ `Build record confirmed by presigned URL response (buildId: ${presignedResponse.data?.buildId || 'n/a'}, buildNumber: ${presignedResponse.data?.buildNumber || 'n/a'}).`
82
+ );
83
+ } else {
84
+ logger.debug(
85
+ `Presigned URL response did not include buildId/buildNumber. Response keys: ${Object.keys(presignedResponse.data || {}).join(', ') || 'none'}`
86
+ );
87
+ }
55
88
  const presignedUrl = presignedResponse.data?.url;
89
+ const visibility = presignedResponse.data?.visibility;
56
90
  if (!presignedUrl || typeof presignedUrl !== 'string' || presignedUrl.trim() === '') {
91
+ logger.debug(`Invalid presigned URL received: ${JSON.stringify(presignedResponse.data)}`);
57
92
  throw new ApiError(
58
93
  `Failed to get valid presigned URL from server response. Received: ${JSON.stringify(presignedResponse.data)}`
59
94
  );
60
95
  }
61
96
 
62
- // Validate URL format
97
+ const parsedUrl = validatePresignedUrl(presignedUrl);
98
+ logger.debug(`Validated presigned URL host: ${parsedUrl.hostname}`);
99
+
100
+ return { url: presignedUrl, visibility };
101
+ }
102
+
103
+ function getAxiosErrorDetails(error, fallbackUrl) {
104
+ if (error.response) {
105
+ return {
106
+ message: `HTTP ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
107
+ statusCode: error.response.status,
108
+ kind: 'response'
109
+ };
110
+ }
111
+
112
+ if (error.request) {
113
+ const code = error.code ? ` (${error.code})` : '';
114
+ const url = error.config?.url || fallbackUrl || 'unknown URL';
115
+ const baseURL = error.config?.baseURL ? ` (baseURL: ${error.config.baseURL})` : '';
116
+ return {
117
+ message: `No response received from ${url}${baseURL}${code}`,
118
+ statusCode: undefined,
119
+ kind: 'request'
120
+ };
121
+ }
122
+
123
+ return {
124
+ message: error.message || 'Unknown error',
125
+ statusCode: undefined,
126
+ kind: 'unknown'
127
+ };
128
+ }
129
+
130
+ function validatePresignedUrl(presignedUrl) {
131
+ let parsedUrl;
63
132
  try {
64
- new URL(presignedUrl);
133
+ parsedUrl = new URL(presignedUrl);
65
134
  } catch (urlError) {
66
135
  throw new ApiError(`Received invalid URL format from server: "${presignedUrl}". URL validation error: ${urlError.message}`);
67
136
  }
68
137
 
69
- return presignedUrl;
138
+ const hostname = parsedUrl.hostname || '';
139
+ if (hostname.includes('undefined')) {
140
+ throw new ApiError(
141
+ `Presigned URL hostname contains "undefined": ${hostname}. This usually means the upload service is missing its R2 account ID or bucket configuration.`
142
+ );
143
+ }
144
+
145
+ if (!hostname.endsWith('.r2.cloudflarestorage.com')) {
146
+ logger.debug(`Presigned URL hostname does not look like a standard R2 host: ${hostname}`);
147
+ }
148
+
149
+ return parsedUrl;
70
150
  }
71
151
 
72
152
  /**
@@ -78,14 +158,19 @@ async function requestPresignedUrl(apiClient, target, file) {
78
158
  * @returns {Promise<{status:number}>}
79
159
  */
80
160
  async function putToPresignedUrl(presignedUrl, buffer, contentType) {
161
+ logger.debug(`Starting PUT upload to presigned URL. Size: ${buffer.length} bytes, Content-Type: ${contentType}`);
162
+
81
163
  const uploadResponse = await axios.put(presignedUrl, buffer, {
82
164
  headers: {
83
165
  'Content-Type': contentType,
84
166
  },
85
167
  maxContentLength: Infinity,
86
168
  maxBodyLength: Infinity,
169
+ // Use a separate timeout for the actual upload if needed,
170
+ // but here we rely on the global axios or the one passed in.
87
171
  });
88
172
 
173
+ logger.debug(`PUT upload completed with status: ${uploadResponse.status}`);
89
174
  return { status: uploadResponse.status };
90
175
  }
91
176
 
@@ -101,30 +186,36 @@ async function putToPresignedUrl(presignedUrl, buffer, contentType) {
101
186
  * @returns {Promise<object>} A promise that resolves to the upload result.
102
187
  */
103
188
  async function uploadFileDirectly(apiClient, { project, version }, filePath, file = {}) {
189
+ logger.debug(`uploadFileDirectly called for file: ${filePath}`);
190
+
104
191
  // This is a mock check to allow testing of a 500 server error.
105
192
  if (project === 'fail-me-500') {
193
+ logger.debug('Mock 500 failure triggered by project name');
106
194
  throw new ApiError('The deployment service encountered an internal error.', 500);
107
195
  }
108
196
 
197
+ if (!fs.existsSync(filePath)) {
198
+ logger.debug(`File not found: ${filePath}`);
199
+ throw new Error(`File not found: ${filePath}`);
200
+ }
201
+
109
202
  const fileBuffer = fs.readFileSync(filePath);
110
203
  const fileName = file.fileName || 'storybook.zip';
111
204
  const contentType = file.contentType || 'application/zip';
112
205
 
113
206
  try {
114
- const presignedUrl = await requestPresignedUrl(apiClient, { project, version }, { fileName, contentType });
115
- const upload = await putToPresignedUrl(presignedUrl, fileBuffer, contentType);
116
- return { success: true, url: presignedUrl, status: upload.status };
207
+ const presigned = await requestPresignedUrl(apiClient, { project, version }, { fileName, contentType });
208
+ const upload = await putToPresignedUrl(presigned.url, fileBuffer, contentType);
209
+ return { success: true, url: presigned.url, status: upload.status, visibility: presigned.visibility };
117
210
  } catch (error) {
118
- if (error.response) {
119
- throw new ApiError(
120
- `Failed to upload file: ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
121
- error.response.status
122
- );
123
- } else if (error.request) {
124
- throw new ApiError(`Failed to upload file: No response from server at ${apiClient.defaults.baseURL}`);
125
- } else {
126
- throw new ApiError(`Failed to upload file: ${error.message}`);
211
+ logger.debug(`Upload failed. Error type: ${error.constructor.name}, Message: ${error.message}`);
212
+ const details = getAxiosErrorDetails(error, apiClient.defaults.baseURL);
213
+ if (details.kind === 'response') {
214
+ logger.debug(`Error response status: ${details.statusCode}`);
215
+ } else if (details.kind === 'request') {
216
+ logger.debug(`Error request details: ${details.message}`);
127
217
  }
218
+ throw new ApiError(`Failed to upload file: ${details.message}`, details.statusCode);
128
219
  }
129
220
  }
130
221
 
@@ -141,6 +232,9 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
141
232
  const projectName = target.project || 'main';
142
233
  const versionName = target.version || 'latest';
143
234
 
235
+ logger.info(`Uploading coverage report for ${projectName}/${versionName}...`);
236
+ logger.debug(`Uploading coverage report for ${projectName}/${versionName}`);
237
+
144
238
  try {
145
239
  const response = await apiClient.post(
146
240
  `/upload/${projectName}/${versionName}/coverage`,
@@ -152,22 +246,83 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
152
246
  }
153
247
  );
154
248
 
249
+ logger.debug(`Coverage upload response status: ${response.status}`);
250
+ logger.info(`Coverage report upload complete (status ${response.status}).`);
155
251
  return {
156
252
  success: response.data?.success ?? true,
157
253
  buildId: response.data?.buildId,
158
254
  coverageUrl: response.data?.coverageUrl,
159
255
  };
160
256
  } catch (error) {
161
- if (error.response) {
162
- throw new ApiError(
163
- `Failed to upload coverage: ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
164
- error.response.status
165
- );
166
- } else if (error.request) {
167
- throw new ApiError(`Failed to upload coverage: No response from server at ${apiClient.defaults.baseURL}`);
168
- } else {
169
- throw new ApiError(`Failed to upload coverage: ${error.message}`);
257
+ logger.debug(`Coverage upload failed. Message: ${error.message}`);
258
+ const details = getAxiosErrorDetails(error, apiClient.defaults.baseURL);
259
+
260
+ if (
261
+ details.statusCode === 404 &&
262
+ error.response?.data &&
263
+ typeof error.response.data === 'object' &&
264
+ String(error.response.data.error || '').includes('Build not found')
265
+ ) {
266
+ logger.error('Coverage upload failed with 404: Build not found for this version.');
267
+ logger.info('This is not a missing-secret error. The production Worker did not find a Firestore build record for the project + version you are attaching coverage to.');
268
+ logger.info('Coverage requires an existing build record created by a prior build upload or presigned URL generation.');
269
+ logger.info('Most common causes and fixes:');
270
+ logger.info('1) Coverage called before build exists. Upload the build ZIP first (or call the presigned URL endpoint) and then upload coverage.');
271
+ logger.info('2) Project/version mismatch. Coverage must use the same {project}/{version} as the build upload or presigned URL call.');
272
+ logger.info('3) Firestore secrets present but invalid. A malformed FIREBASE_PRIVATE_KEY (missing literal \\n sequences) or wrong project ID can prevent build creation.');
273
+ logger.info('4) Firestore integration disabled in prod. Ensure FIREBASE_PROJECT_ID, FIREBASE_CLIENT_EMAIL, FIREBASE_PRIVATE_KEY, and FIRESTORE_SERVICE_ACCOUNT_ID are set.');
274
+ logger.info('Recommended checks:');
275
+ logger.info('- Trigger a production upload or presigned URL call first and verify it returns buildId/buildNumber (confirms Firestore created a build).');
276
+ logger.info('- Then call the coverage endpoint for the same project/version.');
277
+ logger.info('- If upload does not return buildId/buildNumber, fix Firestore secrets and ensure FIREBASE_PRIVATE_KEY preserves literal \\n as documented.');
278
+ logger.info('See README.md and docs/PRODUCTION_SETUP.md for details.');
170
279
  }
280
+
281
+ throw new ApiError(`Failed to upload coverage: ${details.message}`, details.statusCode);
282
+ }
283
+ }
284
+
285
+ /**
286
+ * Upload metadata+screenshots ZIP.
287
+ * This triggers async build-processing through the upload service queue.
288
+ *
289
+ * @param {axios.AxiosInstance} apiClient
290
+ * @param {{project: string, version: string}} target
291
+ * @param {string} metadataZipPath
292
+ * @param {{info:Function,success:Function,warn:Function}} customLogger
293
+ * @returns {Promise<{success:boolean, status?:number, queued?:boolean, buildNumber?:number, zipKey?:string, error?:string}>}
294
+ */
295
+ async function uploadMetadataZip(apiClient, target, metadataZipPath, customLogger = logger) {
296
+ const projectName = target.project || 'main';
297
+ const versionName = target.version || 'latest';
298
+ const url = `/upload/${projectName}/${versionName}/metadata`;
299
+
300
+ customLogger.info('Uploading metadata ZIP...');
301
+
302
+ try {
303
+ const fileBuffer = fs.readFileSync(metadataZipPath);
304
+ const response = await apiClient.post(url, fileBuffer, {
305
+ headers: { 'Content-Type': 'application/zip' },
306
+ maxContentLength: 100 * 1024 * 1024,
307
+ maxBodyLength: 100 * 1024 * 1024,
308
+ });
309
+
310
+ const data = response.data || {};
311
+ customLogger.success(
312
+ `Metadata ZIP uploaded (build #${data.buildNumber ?? 'n/a'}, queued: ${Boolean(data.queued)})`
313
+ );
314
+
315
+ return {
316
+ success: true,
317
+ status: response.status,
318
+ queued: Boolean(data.queued),
319
+ buildNumber: data.buildNumber,
320
+ zipKey: data.zipKey,
321
+ };
322
+ } catch (error) {
323
+ const message = error.response?.data?.error || error.message || 'Unknown error';
324
+ customLogger.warn(`Metadata ZIP upload failed: ${message}`);
325
+ return { success: false, error: message };
171
326
  }
172
327
  }
173
328
 
@@ -179,9 +334,10 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
179
334
  *
180
335
  * @param {axios.AxiosInstance} apiClient
181
336
  * @param {{project: string, version: string}} target
182
- * @param {{zipPath: string, coverageReport?: any|null}} options
337
+ * @param {{zipPath: string, coverageReport?: any|null, metadataZipPath?: string|null}} options
183
338
  */
184
339
  async function uploadBuild(apiClient, target, options) {
340
+ logger.debug('uploadBuild orchestration started');
185
341
  const zipUpload = await uploadFileDirectly(apiClient, target, options.zipPath, {
186
342
  fileName: 'storybook.zip',
187
343
  contentType: 'application/zip',
@@ -189,16 +345,31 @@ async function uploadBuild(apiClient, target, options) {
189
345
 
190
346
  let coverageUpload = null;
191
347
  if (options.coverageReport) {
192
- coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
348
+ logger.info(`Waiting ${COVERAGE_UPLOAD_DELAY_MS / 1000}s before uploading coverage report...`);
349
+ await sleep(COVERAGE_UPLOAD_DELAY_MS);
350
+
351
+ try {
352
+ coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
353
+ } catch (error) {
354
+ logger.info('Coverage upload failed; retrying in 60s...');
355
+ await sleep(COVERAGE_RETRY_DELAY_MS);
356
+ coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
357
+ }
358
+ }
359
+
360
+ let metadataUpload = null;
361
+ if (options.metadataZipPath) {
362
+ metadataUpload = await uploadMetadataZip(apiClient, target, options.metadataZipPath, logger);
193
363
  }
194
364
 
195
- return { zipUpload, coverageUpload };
365
+ return { zipUpload, coverageUpload, metadataUpload };
196
366
  }
197
367
 
198
368
  module.exports = {
199
369
  getApiClient,
200
370
  uploadFileDirectly,
201
371
  uploadCoverageReportDirectly,
372
+ uploadMetadataZip,
202
373
  uploadBuild,
203
374
  requestPresignedUrl,
204
375
  putToPresignedUrl,
package/lib/coverage.js CHANGED
@@ -8,6 +8,10 @@ const chalk = require('chalk');
8
8
  * @property {string} storybookDir Path to a built Storybook static directory (e.g. ./storybook-static)
9
9
  * @property {string} [baseBranch='main'] Base branch name to compare for "new code" analysis
10
10
  * @property {boolean} [failOnThreshold=false] If true, pass "--ci" to the coverage tool and rethrow errors
11
+ * @property {string} [outputPath] If provided, write the report to this path (relative to cwd allowed)
12
+ * @property {boolean} [keepReport=false] If true, do not delete the output file after reading
13
+ * @property {boolean} [screenshots=false] Enable passing-story screenshots in scry-sbcov
14
+ * @property {string|null} [outputZipPath=null] Where to write metadata+screenshots ZIP
11
15
  */
12
16
 
13
17
  /**
@@ -19,13 +23,23 @@ const chalk = require('chalk');
19
23
  * - Reads and returns the parsed JSON report
20
24
  * - Deletes the temporary report file
21
25
  *
22
- * If the underlying tool fails and `failOnThreshold` is false, returns `null`.
26
+ * If the underlying tool fails and `failOnThreshold` is false, returns
27
+ * `{ report: null, metadataZipPath: null }`.
23
28
  *
24
29
  * @param {RunCoverageOptions} options
25
- * @returns {Promise<any|null>} The full coverage report JSON, or null if skipped/failed (non-fatal)
30
+ * @returns {Promise<{report:any|null, metadataZipPath:string|null}>}
26
31
  */
27
32
  async function runCoverageAnalysis(options) {
28
- const { storybookDir, baseBranch = 'main', failOnThreshold = false, execute = false } = options || {};
33
+ const {
34
+ storybookDir,
35
+ baseBranch = 'main',
36
+ failOnThreshold = false,
37
+ execute = false,
38
+ outputPath: providedOutputPath,
39
+ keepReport = false,
40
+ screenshots = false,
41
+ outputZipPath = null,
42
+ } = options || {};
29
43
 
30
44
  if (!storybookDir || typeof storybookDir !== 'string') {
31
45
  throw new Error('runCoverageAnalysis: options.storybookDir is required');
@@ -33,7 +47,11 @@ async function runCoverageAnalysis(options) {
33
47
 
34
48
  console.log(chalk.blue('Running Storybook coverage analysis...'));
35
49
 
36
- const outputPath = path.join(process.cwd(), `.scry-coverage-report-${Date.now()}.json`);
50
+ const outputPath = providedOutputPath
51
+ ? (path.isAbsolute(providedOutputPath) ? providedOutputPath : path.resolve(process.cwd(), providedOutputPath))
52
+ : path.join(process.cwd(), `.scry-coverage-report-${Date.now()}.json`);
53
+
54
+ const resolvedBaseRef = resolveCoverageBaseRef(baseBranch);
37
55
 
38
56
  /** @type {string[]} */
39
57
  const cliArgs = [
@@ -42,7 +60,7 @@ async function runCoverageAnalysis(options) {
42
60
  '--output',
43
61
  outputPath,
44
62
  '--base',
45
- `origin/${baseBranch}`,
63
+ normalizeGitBaseRef(resolvedBaseRef),
46
64
  '--verbose', // Enable verbose logging to debug component detection
47
65
  ];
48
66
 
@@ -50,13 +68,21 @@ async function runCoverageAnalysis(options) {
50
68
  cliArgs.push('--ci');
51
69
  }
52
70
 
53
- if (execute) {
71
+ if (execute || screenshots) {
54
72
  cliArgs.push('--execute');
55
73
  }
74
+ if (screenshots) {
75
+ cliArgs.push('--screenshots');
76
+ if (outputZipPath) {
77
+ cliArgs.push('--output-zip', outputZipPath);
78
+ }
79
+ }
56
80
 
57
- // Use npx with -p flag to ensure package is installed, then run the binary
58
- // This is more reliable than `npx @scrymore/scry-sbcov` which can fail to find the binary
59
- const npxCommand = `npx -y -p @scrymore/scry-sbcov scry-sbcov ${cliArgs.map(shellEscape).join(' ')}`;
81
+ // Allow local override for E2E testing before package publication.
82
+ // Example:
83
+ // SCRY_SBCOV_CMD="node /abs/path/to/scry-sbcov/dist/cli/index.js"
84
+ const sbcovCommandPrefix = (process.env.SCRY_SBCOV_CMD || 'npx -y @scrymore/scry-sbcov').trim();
85
+ const npxCommand = `${sbcovCommandPrefix} ${cliArgs.map(shellEscape).join(' ')}`;
60
86
 
61
87
  // Debug logging to show the exact command being executed
62
88
  console.log(chalk.yellow('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
@@ -85,13 +111,16 @@ async function runCoverageAnalysis(options) {
85
111
 
86
112
  const raw = fs.readFileSync(outputPath, 'utf-8');
87
113
  const report = JSON.parse(raw);
114
+ const metadataZipPath = (screenshots && outputZipPath && fs.existsSync(outputZipPath))
115
+ ? outputZipPath
116
+ : null;
88
117
 
89
- safeUnlink(outputPath);
90
- return report;
118
+ if (!keepReport && !providedOutputPath) safeUnlink(outputPath);
119
+ return { report, metadataZipPath };
91
120
  } catch (error) {
92
- safeUnlink(outputPath);
121
+ if (!keepReport && !providedOutputPath) safeUnlink(outputPath);
93
122
  if (failOnThreshold) throw error;
94
- return null;
123
+ return { report: null, metadataZipPath: null };
95
124
  }
96
125
  }
97
126
 
@@ -181,8 +210,91 @@ function shellEscape(value) {
181
210
  return `'${value.replace(/'/g, "'\\''")}'`;
182
211
  }
183
212
 
213
+
214
+
215
+ /**
216
+ * Normalize a user-supplied base ref into something git understands.
217
+ *
218
+ * Why this exists:
219
+ * - In CI, the best base for "push" is often a SHA (e.g. github.event.before)
220
+ * - In PRs, the best base is often a remote-tracking branch (e.g. origin/main)
221
+ * - Locally, users may pass branch names (e.g. main) or rev expressions (e.g. HEAD~1)
222
+ *
223
+ * `scry-sbcov` expects a value it can pass to git commands as the base reference.
224
+ *
225
+ * @param {string} baseBranch
226
+ * @returns {string}
227
+ */
228
+ function normalizeGitBaseRef(baseBranch) {
229
+ const value = (baseBranch || '').trim();
230
+
231
+ if (!value) return 'origin/main';
232
+
233
+ // Commit SHA (short or full)
234
+ if (/^[0-9a-f]{7,40}$/i.test(value)) return value;
235
+
236
+ // Common rev expressions that should not be prefixed.
237
+ if (value === 'HEAD' || value.startsWith('HEAD~') || value.startsWith('HEAD^')) return value;
238
+ if (/[~^]/.test(value)) return value;
239
+
240
+ // If user already provided a qualified ref, use it as-is.
241
+ if (value.startsWith('origin/')) return value;
242
+ if (value.startsWith('refs/')) return value;
243
+ if (value.startsWith('remotes/')) return value;
244
+
245
+ // Otherwise, treat it as a branch name and compare against the remote.
246
+ // This also works for branch names that contain slashes (e.g. feature/foo).
247
+ return `origin/${value}`;
248
+ }
249
+
250
+ /**
251
+ * Resolve the base ref to pass into `scry-sbcov`, preferring PR base SHAs
252
+ * from CI providers when available.
253
+ *
254
+ * @param {string} baseBranch
255
+ * @returns {string}
256
+ */
257
+ function resolveCoverageBaseRef(baseBranch) {
258
+ const env = process.env || {};
259
+
260
+ const githubBaseSha = readGithubPullRequestBaseSha(env.GITHUB_EVENT_PATH);
261
+ if (githubBaseSha) return githubBaseSha;
262
+
263
+ const gitlabBaseSha = env.CI_MERGE_REQUEST_TARGET_BRANCH_SHA;
264
+ if (gitlabBaseSha) return gitlabBaseSha;
265
+
266
+ const bitbucketBaseSha = env.BITBUCKET_PR_DESTINATION_COMMIT || env.BITBUCKET_PR_BASE_COMMIT;
267
+ if (bitbucketBaseSha) return bitbucketBaseSha;
268
+
269
+ return baseBranch || 'main';
270
+ }
271
+
272
+ /**
273
+ * Read GitHub pull_request base.sha from the event payload.
274
+ *
275
+ * @param {string|undefined} eventPath
276
+ * @returns {string|null}
277
+ */
278
+ function readGithubPullRequestBaseSha(eventPath) {
279
+ if (!eventPath || typeof eventPath !== 'string') return null;
280
+
281
+ try {
282
+ if (!fs.existsSync(eventPath)) return null;
283
+ const raw = fs.readFileSync(eventPath, 'utf-8');
284
+ const payload = JSON.parse(raw);
285
+ const baseSha = payload?.pull_request?.base?.sha;
286
+ if (typeof baseSha === 'string' && baseSha.trim()) return baseSha.trim();
287
+ return null;
288
+ } catch (error) {
289
+ return null;
290
+ }
291
+ }
292
+
184
293
  module.exports = {
185
294
  runCoverageAnalysis,
186
295
  loadCoverageReport,
187
296
  extractCoverageSummary,
297
+ normalizeGitBaseRef,
298
+ resolveCoverageBaseRef,
299
+ readGithubPullRequestBaseSha,
188
300
  };
package/lib/templates.js CHANGED
@@ -102,7 +102,8 @@ ${cache}
102
102
  --dir ./storybook-static \\
103
103
  \${{ vars.SCRY_COVERAGE_ENABLED == 'false' && '--no-coverage' || '' }} \\
104
104
  \${{ vars.SCRY_COVERAGE_FAIL_ON_THRESHOLD == 'true' && '--coverage-fail-on-threshold' || '' }} \\
105
- --coverage-base \${{ vars.SCRY_COVERAGE_BASE || 'main' }}
105
+ \${{ vars.SCRY_COVERAGE_EXECUTE == 'false' && '' || '--coverage-execute' }} \\
106
+ --coverage-base \${{ vars.SCRY_COVERAGE_BASE || github.event.before }}
106
107
  env:
107
108
  STORYBOOK_DEPLOYER_API_URL: \${{ vars.SCRY_API_URL }}
108
109
  STORYBOOK_DEPLOYER_PROJECT: \${{ vars.SCRY_PROJECT_ID }}
@@ -165,7 +166,8 @@ ${cache}
165
166
  \${{ github.event.pull_request.draft == true && '--no-coverage' || '' }} \\
166
167
  \${{ vars.SCRY_COVERAGE_ENABLED == 'false' && '--no-coverage' || '' }} \\
167
168
  \${{ vars.SCRY_COVERAGE_FAIL_ON_THRESHOLD == 'true' && '--coverage-fail-on-threshold' || '' }} \\
168
- --coverage-base \${{ vars.SCRY_COVERAGE_BASE || 'main' }}
169
+ \${{ vars.SCRY_COVERAGE_EXECUTE == 'false' && '' || '--coverage-execute' }} \\
170
+ --coverage-base \${{ vars.SCRY_COVERAGE_BASE || format('origin/{0}', github.base_ref) }}
169
171
 
170
172
  # Construct deployment URL using VIEW_URL (where users access the deployed Storybook)
171
173
  # Defaults to https://view.scrymore.com if SCRY_VIEW_URL is not set
@@ -0,0 +1,110 @@
1
+ const { createLogger } = require('./logger');
2
+ const { generateMainWorkflow, generatePRWorkflow } = require('./templates');
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const { execSync } = require('child_process');
6
+
7
+ /**
8
+ * Upgrade-only workflow generator.
9
+ *
10
+ * Goal: allow users to refresh .github/workflows/* to the latest templates
11
+ * WITHOUT needing the Scry API key.
12
+ *
13
+ * This intentionally does NOT:
14
+ * - validate credentials
15
+ * - touch .storybook-deployer.json
16
+ * - set GitHub variables/secrets
17
+ */
18
+ async function runUpdateWorkflows(argv) {
19
+ const logger = createLogger({ verbose: Boolean(argv.verbose) });
20
+
21
+ logger.info('🛠️ Updating Scry GitHub Actions workflows...');
22
+
23
+ const envInfo = await checkEnvironment();
24
+ if (!envInfo.isGit) {
25
+ throw new Error('Not a git repository. Please run this from a git repo.');
26
+ }
27
+
28
+ const workflowsDir = '.github/workflows';
29
+ fs.mkdirSync(workflowsDir, { recursive: true });
30
+
31
+ const buildCmd = envInfo.storybookBuildCmd || 'build-storybook';
32
+
33
+ const mainWorkflow = generateMainWorkflow('', '', envInfo.packageManager, buildCmd);
34
+ const prWorkflow = generatePRWorkflow('', '', envInfo.packageManager, buildCmd);
35
+
36
+ const mainWorkflowPath = path.join(workflowsDir, 'deploy-storybook.yml');
37
+ const prWorkflowPath = path.join(workflowsDir, 'deploy-pr-preview.yml');
38
+
39
+ fs.writeFileSync(mainWorkflowPath, mainWorkflow, 'utf8');
40
+ fs.writeFileSync(prWorkflowPath, prWorkflow, 'utf8');
41
+
42
+ logger.success(`✅ Updated ${mainWorkflowPath}`);
43
+ logger.success(`✅ Updated ${prWorkflowPath}`);
44
+
45
+ if (argv.commit) {
46
+ gitAdd([mainWorkflowPath, prWorkflowPath], logger);
47
+ gitCommit(argv.commitMessage || 'chore: update Scry workflows', logger);
48
+ }
49
+
50
+ logger.success('✅ Workflow update complete');
51
+ }
52
+
53
+ async function checkEnvironment() {
54
+ const envInfo = {
55
+ isGit: false,
56
+ packageManager: 'npm',
57
+ storybookBuildCmd: null,
58
+ };
59
+
60
+ try {
61
+ execSync('git rev-parse --git-dir', { stdio: 'ignore' });
62
+ envInfo.isGit = true;
63
+ } catch {
64
+ envInfo.isGit = false;
65
+ }
66
+
67
+ if (fs.existsSync('pnpm-lock.yaml')) {
68
+ envInfo.packageManager = 'pnpm';
69
+ } else if (fs.existsSync('yarn.lock')) {
70
+ envInfo.packageManager = 'yarn';
71
+ } else if (fs.existsSync('bun.lockb')) {
72
+ envInfo.packageManager = 'bun';
73
+ }
74
+
75
+ try {
76
+ const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
77
+ if (pkg.scripts) {
78
+ if (pkg.scripts['build-storybook']) envInfo.storybookBuildCmd = 'build-storybook';
79
+ else if (pkg.scripts['storybook:build']) envInfo.storybookBuildCmd = 'storybook:build';
80
+ else if (pkg.scripts['build:storybook']) envInfo.storybookBuildCmd = 'build:storybook';
81
+ }
82
+ } catch {
83
+ // ignore
84
+ }
85
+
86
+ return envInfo;
87
+ }
88
+
89
+ function gitAdd(files, logger) {
90
+ for (const file of files) {
91
+ if (fs.existsSync(file)) {
92
+ execSync(`git add "${file}"`, { stdio: 'pipe' });
93
+ logger.debug(` ✓ Added ${file}`);
94
+ }
95
+ }
96
+ }
97
+
98
+ function gitCommit(message, logger) {
99
+ const status = execSync('git status --porcelain', { encoding: 'utf8' });
100
+ if (!status.trim()) {
101
+ logger.info('No changes to commit.');
102
+ return;
103
+ }
104
+
105
+ execSync(`git commit -m "${message}"`, { stdio: 'pipe' });
106
+ const sha = execSync('git rev-parse --short HEAD', { encoding: 'utf8' }).trim();
107
+ logger.success(`✅ Committed workflow update: ${sha}`);
108
+ }
109
+
110
+ module.exports = { runUpdateWorkflows };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@scrymore/scry-deployer",
3
- "version": "0.0.6",
3
+ "version": "0.1.1",
4
4
  "description": "A CLI to automate the deployment of Storybook static builds.",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -37,7 +37,7 @@
37
37
  },
38
38
  "dependencies": {
39
39
  "@octokit/rest": "^20.0.0",
40
- "@scrymore/scry-sbcov": "^0.2.1",
40
+ "@scrymore/scry-sbcov": "^0.3.0",
41
41
  "@sentry/node": "^10.33.0",
42
42
  "archiver": "^7.0.1",
43
43
  "axios": "^1.12.2",