@scrymore/scry-deployer 0.0.5 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -217,6 +217,28 @@ The configuration is resolved in the following order of precedence:
217
217
  3. **Configuration File**: Values from `.storybook-deployer.json` in your project directory (automatically created during installation).
218
218
  4. **Programmatic Defaults**: Lowest precedence (e.g., for `--api-url`).
219
219
 
220
+ ## Private Projects
221
+
222
+ If your project is set to **private** in the Scry dashboard, uploaded Storybook
223
+ and coverage reports will only be accessible to logged-in project members.
224
+
225
+ ### How it works
226
+
227
+ 1. Upload works the same way (using your API key)
228
+ 2. The generated links work for anyone who is:
229
+ - Logged into the Scry dashboard
230
+ - A member of your project
231
+
232
+ ### Sharing with team members
233
+
234
+ To give someone access to a private project:
235
+
236
+ 1. Go to your project in the [Scry Dashboard](https://dashboard.scrymore.com)
237
+ 2. Navigate to **Settings** → **Members**
238
+ 3. Add their email address
239
+
240
+ They'll need to log in once, then all project links will work automatically.
241
+
220
242
  ### Configuration File
221
243
 
222
244
  The configuration file (`.storybook-deployer.json`) is automatically created in your project directory when you install the package. You can edit this file to set default values for common options:
package/bin/cli.js CHANGED
@@ -1,5 +1,6 @@
1
1
  #!/usr/bin/env node
2
2
 
3
+ const Sentry = require('@sentry/node');
3
4
  const yargs = require('yargs/yargs');
4
5
  const { hideBin } = require('yargs/helpers');
5
6
  const fs = require('fs');
@@ -16,6 +17,7 @@ const { analyzeStorybook } = require('../lib/analysis.js');
16
17
  const { runCoverageAnalysis, loadCoverageReport, extractCoverageSummary } = require('../lib/coverage.js');
17
18
  const { postPRComment } = require('../lib/pr-comment.js');
18
19
  const { runInit } = require('../lib/init.js');
20
+ const { runUpdateWorkflows } = require('../lib/update-workflows.js');
19
21
 
20
22
  async function runAnalysis(argv) {
21
23
  const logger = createLogger(argv);
@@ -135,9 +137,10 @@ async function runDeployment(argv) {
135
137
  logger.success('✅ Archive uploaded.');
136
138
  logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
137
139
 
138
- await postPRComment(buildDeployResult(argv, coverageSummary), coverageSummary);
140
+ await postPRComment(buildDeployResult(argv, coverageSummary, uploadResult), coverageSummary);
139
141
 
140
142
  logger.success('\n🎉 Deployment with analysis successful! 🎉');
143
+ logUploadLinks(argv, coverageSummary, uploadResult, logger);
141
144
 
142
145
  } else {
143
146
  // Simple deployment without analysis
@@ -161,9 +164,10 @@ async function runDeployment(argv) {
161
164
  logger.success('✅ Archive uploaded.');
162
165
  logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
163
166
 
164
- await postPRComment(buildDeployResult(argv, coverageSummary), coverageSummary);
167
+ await postPRComment(buildDeployResult(argv, coverageSummary, uploadResult), coverageSummary);
165
168
 
166
169
  logger.success('\n🎉 Deployment successful! 🎉');
170
+ logUploadLinks(argv, coverageSummary, uploadResult, logger);
167
171
  }
168
172
 
169
173
  } finally {
@@ -175,10 +179,26 @@ async function runDeployment(argv) {
175
179
  }
176
180
  }
177
181
 
178
- function handleError(error, argv) {
182
+ async function handleError(error, argv) {
179
183
  const logger = createLogger(argv || {});
180
184
  logger.error(`\n❌ Error: ${error.message}`);
181
185
 
186
+ // Capture error in Sentry with additional context
187
+ Sentry.withScope((scope) => {
188
+ if (argv) {
189
+ scope.setTags({
190
+ project: argv.project,
191
+ version: argv.version,
192
+ command: argv._ ? argv._[0] : 'unknown',
193
+ });
194
+ scope.setExtra('argv', argv);
195
+ }
196
+ Sentry.captureException(error);
197
+ });
198
+
199
+ // Ensure the event is sent before the process exits
200
+ await Sentry.close(2000);
201
+
182
202
  if (error instanceof ApiError) {
183
203
  if (error.statusCode === 401) {
184
204
  logger.error('Suggestion: Check that your API key is correct and has not expired.');
@@ -195,6 +215,13 @@ function handleError(error, argv) {
195
215
  }
196
216
 
197
217
  async function main() {
218
+ // Initialize Sentry
219
+ Sentry.init({
220
+ dsn: "https://c66ce229a1db2289f145eebd02436d9c@o4507889391828992.ingest.us.sentry.io/4510699330732032", // Fallback to hardcoded DSN for user reporting
221
+ tracesSampleRate: 1.0,
222
+ environment: process.env.NODE_ENV || 'production',
223
+ });
224
+
198
225
  let config;
199
226
  try {
200
227
  const args = await yargs(hideBin(process.argv))
@@ -328,7 +355,63 @@ async function main() {
328
355
 
329
356
  await runAnalysis(config);
330
357
  })
331
- .command('init', 'Setup GitHub Actions workflows for automatic deployment', (yargs) => {
358
+
359
+ .command('coverage', 'Run only Storybook coverage analysis and write the report to disk', (yargs) => {
360
+ return yargs
361
+ .option('dir', {
362
+ describe: 'Path to the built Storybook directory (e.g., storybook-static)',
363
+ type: 'string',
364
+ demandOption: true,
365
+ })
366
+ .option('coverage-base', {
367
+ describe: 'Base ref/branch for new code analysis (supports SHAs, origin/main, HEAD~1)',
368
+ type: 'string',
369
+ default: 'main',
370
+ alias: 'coverageBase'
371
+ })
372
+ .option('coverage-fail-on-threshold', {
373
+ describe: 'Fail (exit 1) if coverage thresholds are not met',
374
+ type: 'boolean',
375
+ default: false,
376
+ alias: 'coverageFailOnThreshold'
377
+ })
378
+ .option('coverage-execute', {
379
+ describe: 'Execute stories during coverage analysis (requires playwright in the project)',
380
+ type: 'boolean',
381
+ default: false,
382
+ alias: 'coverageExecute'
383
+ })
384
+ .option('output', {
385
+ describe: 'Where to write the JSON coverage report',
386
+ type: 'string',
387
+ default: './scry-sbcov-report.json'
388
+ })
389
+ .option('verbose', {
390
+ describe: 'Enable verbose logging',
391
+ type: 'boolean',
392
+ default: false,
393
+ });
394
+ }, async (argv) => {
395
+ const logger = createLogger(argv);
396
+
397
+ const report = await runCoverageAnalysis({
398
+ storybookDir: argv.dir,
399
+ baseBranch: argv.coverageBase || 'main',
400
+ failOnThreshold: Boolean(argv.coverageFailOnThreshold),
401
+ execute: Boolean(argv.coverageExecute),
402
+ outputPath: argv.output,
403
+ keepReport: true,
404
+ });
405
+
406
+ if (!report) {
407
+ logger.error('Coverage: no report generated (tool failed or returned null)');
408
+ process.exit(1);
409
+ }
410
+
411
+ logger.success(`✅ Coverage report written to ${argv.output}`);
412
+ })
413
+
414
+ .command('init', 'Setup GitHub Actions workflows for automatic deployment', (yargs) => {
332
415
  return yargs
333
416
  .option('project-id', {
334
417
  describe: 'Project ID from Scry dashboard',
@@ -378,6 +461,9 @@ async function main() {
378
461
 
379
462
  await runInit(initConfig);
380
463
  })
464
+ .command('debug-sentry', 'Test Sentry integration by throwing an error', () => {}, () => {
465
+ throw new Error('Sentry debug error from scry-node CLI');
466
+ })
381
467
  .env('STORYBOOK_DEPLOYER')
382
468
  .help()
383
469
  .alias('help', 'h')
@@ -385,7 +471,7 @@ async function main() {
385
471
  .parse();
386
472
 
387
473
  } catch (error) {
388
- handleError(error, config);
474
+ await handleError(error, config);
389
475
  }
390
476
  }
391
477
 
@@ -438,7 +524,7 @@ async function resolveCoverage(argv, logger) {
438
524
  * @param {any} argv
439
525
  * @param {any|null} coverageSummary
440
526
  */
441
- function buildDeployResult(argv, coverageSummary) {
527
+ function buildDeployResult(argv, coverageSummary, uploadResult) {
442
528
  const project = argv.project || 'main';
443
529
  const version = argv.version || 'latest';
444
530
  const viewBaseUrl = process.env.SCRY_VIEW_URL || 'https://view.scrymore.com';
@@ -455,9 +541,24 @@ function buildDeployResult(argv, coverageSummary) {
455
541
  viewUrl,
456
542
  coverageUrl,
457
543
  coveragePageUrl: coverageUrl,
544
+ visibility: uploadResult?.zipUpload?.visibility,
458
545
  };
459
546
  }
460
547
 
548
+ function logUploadLinks(argv, coverageSummary, uploadResult, logger) {
549
+ const deployResult = buildDeployResult(argv, coverageSummary, uploadResult);
550
+
551
+ logger.success('\n✅ Upload successful!\n');
552
+ logger.info(`📖 Storybook: ${deployResult.viewUrl}`);
553
+ if (deployResult.coverageUrl) {
554
+ logger.info(`📊 Coverage: ${deployResult.coverageUrl}`);
555
+ }
556
+
557
+ if (deployResult.visibility === 'private') {
558
+ logger.info('\n🔒 This project is private. Viewers must be logged in to access.');
559
+ }
560
+ }
561
+
461
562
  if (require.main === module) {
462
563
  main();
463
564
  }
@@ -468,4 +569,5 @@ module.exports = {
468
569
  runAnalysis,
469
570
  resolveCoverage,
470
571
  buildDeployResult,
572
+ logUploadLinks,
471
573
  };
package/lib/apiClient.js CHANGED
@@ -1,6 +1,24 @@
1
1
  const axios = require('axios');
2
2
  const fs = require('fs');
3
3
  const { ApiError } = require('./errors.js');
4
+ const { createLogger } = require('./logger.js');
5
+
6
+ const isVerbose =
7
+ process.env.SCRY_VERBOSE === 'true' ||
8
+ process.env.STORYBOOK_DEPLOYER_VERBOSE === 'true' ||
9
+ process.env.VERBOSE === 'true' ||
10
+ process.env.SCRY_API_DEBUG === 'true' ||
11
+ process.env.SCRY_DEBUG === 'true' ||
12
+ process.argv.includes('--verbose');
13
+
14
+ const logger = createLogger({ verbose: isVerbose });
15
+
16
+ const COVERAGE_UPLOAD_DELAY_MS = 5000;
17
+ const COVERAGE_RETRY_DELAY_MS = 60000;
18
+
19
+ function sleep(ms) {
20
+ return new Promise((resolve) => setTimeout(resolve, ms));
21
+ }
4
22
 
5
23
  /**
6
24
  * Creates a pre-configured axios instance for making API calls.
@@ -9,8 +27,10 @@ const { ApiError } = require('./errors.js');
9
27
  * @returns {axios.AxiosInstance} A configured axios instance.
10
28
  */
11
29
  function getApiClient(apiUrl, apiKey) {
30
+ logger.debug(`Initializing API client with baseURL: ${apiUrl}`);
12
31
  // This is a mock check to allow testing of a 401 error case.
13
32
  if (apiKey === 'fail-me-401') {
33
+ logger.debug('Mock 401 failure triggered by API key');
14
34
  throw new ApiError('The provided API key is invalid or has expired.', 401);
15
35
  }
16
36
 
@@ -27,6 +47,7 @@ function getApiClient(apiUrl, apiKey) {
27
47
  return axios.create({
28
48
  baseURL: apiUrl,
29
49
  headers: headers,
50
+ timeout: 60000, // 60 second timeout for large uploads
30
51
  });
31
52
  }
32
53
 
@@ -36,12 +57,14 @@ function getApiClient(apiUrl, apiKey) {
36
57
  * @param {axios.AxiosInstance} apiClient
37
58
  * @param {{project: string, version: string}} target
38
59
  * @param {{fileName: string, contentType: string}} file
39
- * @returns {Promise<string>} presigned URL
60
+ * @returns {Promise<{url: string, visibility?: string}>} presigned URL details
40
61
  */
41
62
  async function requestPresignedUrl(apiClient, target, file) {
42
63
  const projectName = target.project || 'main';
43
64
  const versionName = target.version || 'latest';
44
65
 
66
+ logger.debug(`Requesting presigned URL for ${projectName}/${versionName}/${file.fileName}`);
67
+
45
68
  const presignedResponse = await apiClient.post(
46
69
  `/presigned-url/${projectName}/${versionName}/${file.fileName}`,
47
70
  { contentType: file.contentType },
@@ -52,21 +75,78 @@ async function requestPresignedUrl(apiClient, target, file) {
52
75
  }
53
76
  );
54
77
 
78
+ logger.debug(`Presigned URL response status: ${presignedResponse.status}`);
79
+ if (presignedResponse.data?.buildId || presignedResponse.data?.buildNumber) {
80
+ logger.info(
81
+ `Build record confirmed by presigned URL response (buildId: ${presignedResponse.data?.buildId || 'n/a'}, buildNumber: ${presignedResponse.data?.buildNumber || 'n/a'}).`
82
+ );
83
+ } else {
84
+ logger.debug(
85
+ `Presigned URL response did not include buildId/buildNumber. Response keys: ${Object.keys(presignedResponse.data || {}).join(', ') || 'none'}`
86
+ );
87
+ }
55
88
  const presignedUrl = presignedResponse.data?.url;
89
+ const visibility = presignedResponse.data?.visibility;
56
90
  if (!presignedUrl || typeof presignedUrl !== 'string' || presignedUrl.trim() === '') {
91
+ logger.debug(`Invalid presigned URL received: ${JSON.stringify(presignedResponse.data)}`);
57
92
  throw new ApiError(
58
93
  `Failed to get valid presigned URL from server response. Received: ${JSON.stringify(presignedResponse.data)}`
59
94
  );
60
95
  }
61
96
 
62
- // Validate URL format
97
+ const parsedUrl = validatePresignedUrl(presignedUrl);
98
+ logger.debug(`Validated presigned URL host: ${parsedUrl.hostname}`);
99
+
100
+ return { url: presignedUrl, visibility };
101
+ }
102
+
103
+ function getAxiosErrorDetails(error, fallbackUrl) {
104
+ if (error.response) {
105
+ return {
106
+ message: `HTTP ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
107
+ statusCode: error.response.status,
108
+ kind: 'response'
109
+ };
110
+ }
111
+
112
+ if (error.request) {
113
+ const code = error.code ? ` (${error.code})` : '';
114
+ const url = error.config?.url || fallbackUrl || 'unknown URL';
115
+ const baseURL = error.config?.baseURL ? ` (baseURL: ${error.config.baseURL})` : '';
116
+ return {
117
+ message: `No response received from ${url}${baseURL}${code}`,
118
+ statusCode: undefined,
119
+ kind: 'request'
120
+ };
121
+ }
122
+
123
+ return {
124
+ message: error.message || 'Unknown error',
125
+ statusCode: undefined,
126
+ kind: 'unknown'
127
+ };
128
+ }
129
+
130
+ function validatePresignedUrl(presignedUrl) {
131
+ let parsedUrl;
63
132
  try {
64
- new URL(presignedUrl);
133
+ parsedUrl = new URL(presignedUrl);
65
134
  } catch (urlError) {
66
135
  throw new ApiError(`Received invalid URL format from server: "${presignedUrl}". URL validation error: ${urlError.message}`);
67
136
  }
68
137
 
69
- return presignedUrl;
138
+ const hostname = parsedUrl.hostname || '';
139
+ if (hostname.includes('undefined')) {
140
+ throw new ApiError(
141
+ `Presigned URL hostname contains "undefined": ${hostname}. This usually means the upload service is missing its R2 account ID or bucket configuration.`
142
+ );
143
+ }
144
+
145
+ if (!hostname.endsWith('.r2.cloudflarestorage.com')) {
146
+ logger.debug(`Presigned URL hostname does not look like a standard R2 host: ${hostname}`);
147
+ }
148
+
149
+ return parsedUrl;
70
150
  }
71
151
 
72
152
  /**
@@ -78,14 +158,19 @@ async function requestPresignedUrl(apiClient, target, file) {
78
158
  * @returns {Promise<{status:number}>}
79
159
  */
80
160
  async function putToPresignedUrl(presignedUrl, buffer, contentType) {
161
+ logger.debug(`Starting PUT upload to presigned URL. Size: ${buffer.length} bytes, Content-Type: ${contentType}`);
162
+
81
163
  const uploadResponse = await axios.put(presignedUrl, buffer, {
82
164
  headers: {
83
165
  'Content-Type': contentType,
84
166
  },
85
167
  maxContentLength: Infinity,
86
168
  maxBodyLength: Infinity,
169
+ // Use a separate timeout for the actual upload if needed,
170
+ // but here we rely on the global axios or the one passed in.
87
171
  });
88
172
 
173
+ logger.debug(`PUT upload completed with status: ${uploadResponse.status}`);
89
174
  return { status: uploadResponse.status };
90
175
  }
91
176
 
@@ -101,30 +186,36 @@ async function putToPresignedUrl(presignedUrl, buffer, contentType) {
101
186
  * @returns {Promise<object>} A promise that resolves to the upload result.
102
187
  */
103
188
  async function uploadFileDirectly(apiClient, { project, version }, filePath, file = {}) {
189
+ logger.debug(`uploadFileDirectly called for file: ${filePath}`);
190
+
104
191
  // This is a mock check to allow testing of a 500 server error.
105
192
  if (project === 'fail-me-500') {
193
+ logger.debug('Mock 500 failure triggered by project name');
106
194
  throw new ApiError('The deployment service encountered an internal error.', 500);
107
195
  }
108
196
 
197
+ if (!fs.existsSync(filePath)) {
198
+ logger.debug(`File not found: ${filePath}`);
199
+ throw new Error(`File not found: ${filePath}`);
200
+ }
201
+
109
202
  const fileBuffer = fs.readFileSync(filePath);
110
203
  const fileName = file.fileName || 'storybook.zip';
111
204
  const contentType = file.contentType || 'application/zip';
112
205
 
113
206
  try {
114
- const presignedUrl = await requestPresignedUrl(apiClient, { project, version }, { fileName, contentType });
115
- const upload = await putToPresignedUrl(presignedUrl, fileBuffer, contentType);
116
- return { success: true, url: presignedUrl, status: upload.status };
207
+ const presigned = await requestPresignedUrl(apiClient, { project, version }, { fileName, contentType });
208
+ const upload = await putToPresignedUrl(presigned.url, fileBuffer, contentType);
209
+ return { success: true, url: presigned.url, status: upload.status, visibility: presigned.visibility };
117
210
  } catch (error) {
118
- if (error.response) {
119
- throw new ApiError(
120
- `Failed to upload file: ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
121
- error.response.status
122
- );
123
- } else if (error.request) {
124
- throw new ApiError(`Failed to upload file: No response from server at ${apiClient.defaults.baseURL}`);
125
- } else {
126
- throw new ApiError(`Failed to upload file: ${error.message}`);
211
+ logger.debug(`Upload failed. Error type: ${error.constructor.name}, Message: ${error.message}`);
212
+ const details = getAxiosErrorDetails(error, apiClient.defaults.baseURL);
213
+ if (details.kind === 'response') {
214
+ logger.debug(`Error response status: ${details.statusCode}`);
215
+ } else if (details.kind === 'request') {
216
+ logger.debug(`Error request details: ${details.message}`);
127
217
  }
218
+ throw new ApiError(`Failed to upload file: ${details.message}`, details.statusCode);
128
219
  }
129
220
  }
130
221
 
@@ -141,6 +232,9 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
141
232
  const projectName = target.project || 'main';
142
233
  const versionName = target.version || 'latest';
143
234
 
235
+ logger.info(`Uploading coverage report for ${projectName}/${versionName}...`);
236
+ logger.debug(`Uploading coverage report for ${projectName}/${versionName}`);
237
+
144
238
  try {
145
239
  const response = await apiClient.post(
146
240
  `/upload/${projectName}/${versionName}/coverage`,
@@ -152,22 +246,39 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
152
246
  }
153
247
  );
154
248
 
249
+ logger.debug(`Coverage upload response status: ${response.status}`);
250
+ logger.info(`Coverage report upload complete (status ${response.status}).`);
155
251
  return {
156
252
  success: response.data?.success ?? true,
157
253
  buildId: response.data?.buildId,
158
254
  coverageUrl: response.data?.coverageUrl,
159
255
  };
160
256
  } catch (error) {
161
- if (error.response) {
162
- throw new ApiError(
163
- `Failed to upload coverage: ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
164
- error.response.status
165
- );
166
- } else if (error.request) {
167
- throw new ApiError(`Failed to upload coverage: No response from server at ${apiClient.defaults.baseURL}`);
168
- } else {
169
- throw new ApiError(`Failed to upload coverage: ${error.message}`);
257
+ logger.debug(`Coverage upload failed. Message: ${error.message}`);
258
+ const details = getAxiosErrorDetails(error, apiClient.defaults.baseURL);
259
+
260
+ if (
261
+ details.statusCode === 404 &&
262
+ error.response?.data &&
263
+ typeof error.response.data === 'object' &&
264
+ String(error.response.data.error || '').includes('Build not found')
265
+ ) {
266
+ logger.error('Coverage upload failed with 404: Build not found for this version.');
267
+ logger.info('This is not a missing-secret error. The production Worker did not find a Firestore build record for the project + version you are attaching coverage to.');
268
+ logger.info('Coverage requires an existing build record created by a prior build upload or presigned URL generation.');
269
+ logger.info('Most common causes and fixes:');
270
+ logger.info('1) Coverage called before build exists. Upload the build ZIP first (or call the presigned URL endpoint) and then upload coverage.');
271
+ logger.info('2) Project/version mismatch. Coverage must use the same {project}/{version} as the build upload or presigned URL call.');
272
+ logger.info('3) Firestore secrets present but invalid. A malformed FIREBASE_PRIVATE_KEY (missing literal \\n sequences) or wrong project ID can prevent build creation.');
273
+ logger.info('4) Firestore integration disabled in prod. Ensure FIREBASE_PROJECT_ID, FIREBASE_CLIENT_EMAIL, FIREBASE_PRIVATE_KEY, and FIRESTORE_SERVICE_ACCOUNT_ID are set.');
274
+ logger.info('Recommended checks:');
275
+ logger.info('- Trigger a production upload or presigned URL call first and verify it returns buildId/buildNumber (confirms Firestore created a build).');
276
+ logger.info('- Then call the coverage endpoint for the same project/version.');
277
+ logger.info('- If upload does not return buildId/buildNumber, fix Firestore secrets and ensure FIREBASE_PRIVATE_KEY preserves literal \\n as documented.');
278
+ logger.info('See README.md and docs/PRODUCTION_SETUP.md for details.');
170
279
  }
280
+
281
+ throw new ApiError(`Failed to upload coverage: ${details.message}`, details.statusCode);
171
282
  }
172
283
  }
173
284
 
@@ -182,6 +293,7 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
182
293
  * @param {{zipPath: string, coverageReport?: any|null}} options
183
294
  */
184
295
  async function uploadBuild(apiClient, target, options) {
296
+ logger.debug('uploadBuild orchestration started');
185
297
  const zipUpload = await uploadFileDirectly(apiClient, target, options.zipPath, {
186
298
  fileName: 'storybook.zip',
187
299
  contentType: 'application/zip',
@@ -189,7 +301,16 @@ async function uploadBuild(apiClient, target, options) {
189
301
 
190
302
  let coverageUpload = null;
191
303
  if (options.coverageReport) {
192
- coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
304
+ logger.info(`Waiting ${COVERAGE_UPLOAD_DELAY_MS / 1000}s before uploading coverage report...`);
305
+ await sleep(COVERAGE_UPLOAD_DELAY_MS);
306
+
307
+ try {
308
+ coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
309
+ } catch (error) {
310
+ logger.info('Coverage upload failed; retrying in 60s...');
311
+ await sleep(COVERAGE_RETRY_DELAY_MS);
312
+ coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
313
+ }
193
314
  }
194
315
 
195
316
  return { zipUpload, coverageUpload };
package/lib/coverage.js CHANGED
@@ -8,6 +8,8 @@ const chalk = require('chalk');
8
8
  * @property {string} storybookDir Path to a built Storybook static directory (e.g. ./storybook-static)
9
9
  * @property {string} [baseBranch='main'] Base branch name to compare for "new code" analysis
10
10
  * @property {boolean} [failOnThreshold=false] If true, pass "--ci" to the coverage tool and rethrow errors
11
+ * @property {string} [outputPath] If provided, write the report to this path (relative to cwd allowed)
12
+ * @property {boolean} [keepReport=false] If true, do not delete the output file after reading
11
13
  */
12
14
 
13
15
  /**
@@ -25,7 +27,7 @@ const chalk = require('chalk');
25
27
  * @returns {Promise<any|null>} The full coverage report JSON, or null if skipped/failed (non-fatal)
26
28
  */
27
29
  async function runCoverageAnalysis(options) {
28
- const { storybookDir, baseBranch = 'main', failOnThreshold = false, execute = false } = options || {};
30
+ const { storybookDir, baseBranch = 'main', failOnThreshold = false, execute = false, outputPath: providedOutputPath, keepReport = false } = options || {};
29
31
 
30
32
  if (!storybookDir || typeof storybookDir !== 'string') {
31
33
  throw new Error('runCoverageAnalysis: options.storybookDir is required');
@@ -33,32 +35,39 @@ async function runCoverageAnalysis(options) {
33
35
 
34
36
  console.log(chalk.blue('Running Storybook coverage analysis...'));
35
37
 
36
- const outputPath = path.join(process.cwd(), `.scry-coverage-report-${Date.now()}.json`);
38
+ const outputPath = providedOutputPath
39
+ ? (path.isAbsolute(providedOutputPath) ? providedOutputPath : path.resolve(process.cwd(), providedOutputPath))
40
+ : path.join(process.cwd(), `.scry-coverage-report-${Date.now()}.json`);
41
+
42
+ const resolvedBaseRef = resolveCoverageBaseRef(baseBranch);
37
43
 
38
44
  /** @type {string[]} */
39
- const args = [
40
- '@scrymore/scry-sbcov',
45
+ const cliArgs = [
41
46
  '--storybook-static',
42
47
  storybookDir,
43
48
  '--output',
44
49
  outputPath,
45
50
  '--base',
46
- `origin/${baseBranch}`,
51
+ normalizeGitBaseRef(resolvedBaseRef),
47
52
  '--verbose', // Enable verbose logging to debug component detection
48
53
  ];
49
54
 
50
55
  if (failOnThreshold) {
51
- args.push('--ci');
56
+ cliArgs.push('--ci');
52
57
  }
53
58
 
54
59
  if (execute) {
55
- args.push('--execute');
60
+ cliArgs.push('--execute');
56
61
  }
57
62
 
63
+ // Use npx with the package name directly.
64
+ // This is more reliable than `npx -p` which can fail to find the binary in some environments.
65
+ const npxCommand = `npx -y @scrymore/scry-sbcov ${cliArgs.map(shellEscape).join(' ')}`;
66
+
58
67
  // Debug logging to show the exact command being executed
59
68
  console.log(chalk.yellow('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
60
69
  console.log(chalk.yellow('DEBUG: Executing coverage command:'));
61
- console.log(chalk.gray(`npx ${args.join(' ')}`));
70
+ console.log(chalk.gray(npxCommand));
62
71
  console.log(chalk.yellow('Working directory: ' + process.cwd()));
63
72
  console.log(chalk.yellow('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n'));
64
73
 
@@ -75,7 +84,7 @@ async function runCoverageAnalysis(options) {
75
84
  console.log(chalk.yellow('Project root: ' + projectRoot));
76
85
  console.log(chalk.yellow('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n'));
77
86
 
78
- execSync(`npx ${args.map(shellEscape).join(' ')}`, {
87
+ execSync(npxCommand, {
79
88
  stdio: 'inherit',
80
89
  cwd: projectRoot // Run from the project root, not scry-node directory
81
90
  });
@@ -83,10 +92,10 @@ async function runCoverageAnalysis(options) {
83
92
  const raw = fs.readFileSync(outputPath, 'utf-8');
84
93
  const report = JSON.parse(raw);
85
94
 
86
- safeUnlink(outputPath);
95
+ if (!keepReport && !providedOutputPath) safeUnlink(outputPath);
87
96
  return report;
88
97
  } catch (error) {
89
- safeUnlink(outputPath);
98
+ if (!keepReport && !providedOutputPath) safeUnlink(outputPath);
90
99
  if (failOnThreshold) throw error;
91
100
  return null;
92
101
  }
@@ -178,8 +187,91 @@ function shellEscape(value) {
178
187
  return `'${value.replace(/'/g, "'\\''")}'`;
179
188
  }
180
189
 
190
+
191
+
192
+ /**
193
+ * Normalize a user-supplied base ref into something git understands.
194
+ *
195
+ * Why this exists:
196
+ * - In CI, the best base for "push" is often a SHA (e.g. github.event.before)
197
+ * - In PRs, the best base is often a remote-tracking branch (e.g. origin/main)
198
+ * - Locally, users may pass branch names (e.g. main) or rev expressions (e.g. HEAD~1)
199
+ *
200
+ * `scry-sbcov` expects a value it can pass to git commands as the base reference.
201
+ *
202
+ * @param {string} baseBranch
203
+ * @returns {string}
204
+ */
205
+ function normalizeGitBaseRef(baseBranch) {
206
+ const value = (baseBranch || '').trim();
207
+
208
+ if (!value) return 'origin/main';
209
+
210
+ // Commit SHA (short or full)
211
+ if (/^[0-9a-f]{7,40}$/i.test(value)) return value;
212
+
213
+ // Common rev expressions that should not be prefixed.
214
+ if (value === 'HEAD' || value.startsWith('HEAD~') || value.startsWith('HEAD^')) return value;
215
+ if (/[~^]/.test(value)) return value;
216
+
217
+ // If user already provided a qualified ref, use it as-is.
218
+ if (value.startsWith('origin/')) return value;
219
+ if (value.startsWith('refs/')) return value;
220
+ if (value.startsWith('remotes/')) return value;
221
+
222
+ // Otherwise, treat it as a branch name and compare against the remote.
223
+ // This also works for branch names that contain slashes (e.g. feature/foo).
224
+ return `origin/${value}`;
225
+ }
226
+
227
+ /**
228
+ * Resolve the base ref to pass into `scry-sbcov`, preferring PR base SHAs
229
+ * from CI providers when available.
230
+ *
231
+ * @param {string} baseBranch
232
+ * @returns {string}
233
+ */
234
+ function resolveCoverageBaseRef(baseBranch) {
235
+ const env = process.env || {};
236
+
237
+ const githubBaseSha = readGithubPullRequestBaseSha(env.GITHUB_EVENT_PATH);
238
+ if (githubBaseSha) return githubBaseSha;
239
+
240
+ const gitlabBaseSha = env.CI_MERGE_REQUEST_TARGET_BRANCH_SHA;
241
+ if (gitlabBaseSha) return gitlabBaseSha;
242
+
243
+ const bitbucketBaseSha = env.BITBUCKET_PR_DESTINATION_COMMIT || env.BITBUCKET_PR_BASE_COMMIT;
244
+ if (bitbucketBaseSha) return bitbucketBaseSha;
245
+
246
+ return baseBranch || 'main';
247
+ }
248
+
249
+ /**
250
+ * Read GitHub pull_request base.sha from the event payload.
251
+ *
252
+ * @param {string|undefined} eventPath
253
+ * @returns {string|null}
254
+ */
255
+ function readGithubPullRequestBaseSha(eventPath) {
256
+ if (!eventPath || typeof eventPath !== 'string') return null;
257
+
258
+ try {
259
+ if (!fs.existsSync(eventPath)) return null;
260
+ const raw = fs.readFileSync(eventPath, 'utf-8');
261
+ const payload = JSON.parse(raw);
262
+ const baseSha = payload?.pull_request?.base?.sha;
263
+ if (typeof baseSha === 'string' && baseSha.trim()) return baseSha.trim();
264
+ return null;
265
+ } catch (error) {
266
+ return null;
267
+ }
268
+ }
269
+
181
270
  module.exports = {
182
271
  runCoverageAnalysis,
183
272
  loadCoverageReport,
184
273
  extractCoverageSummary,
274
+ normalizeGitBaseRef,
275
+ resolveCoverageBaseRef,
276
+ readGithubPullRequestBaseSha,
185
277
  };
package/lib/logger.js CHANGED
@@ -8,6 +8,7 @@ const chalk = require('chalk');
8
8
  * @returns {{info: Function, error: Function, debug: Function, success: Function}}
9
9
  */
10
10
  function createLogger({ verbose = false }) {
11
+ const Sentry = require('@sentry/node');
11
12
  return {
12
13
  /**
13
14
  * Logs an informational message.
@@ -15,6 +16,11 @@ function createLogger({ verbose = false }) {
15
16
  */
16
17
  info: (message) => {
17
18
  console.log(message);
19
+ Sentry.addBreadcrumb({
20
+ category: 'log',
21
+ message: message,
22
+ level: 'info',
23
+ });
18
24
  },
19
25
 
20
26
  /**
@@ -31,6 +37,11 @@ function createLogger({ verbose = false }) {
31
37
  */
32
38
  error: (message) => {
33
39
  console.error(chalk.red(message));
40
+ Sentry.addBreadcrumb({
41
+ category: 'log',
42
+ message: message,
43
+ level: 'error',
44
+ });
34
45
  },
35
46
 
36
47
  /**
@@ -41,6 +52,11 @@ function createLogger({ verbose = false }) {
41
52
  if (verbose) {
42
53
  console.log(chalk.dim(`[debug] ${message}`));
43
54
  }
55
+ Sentry.addBreadcrumb({
56
+ category: 'log',
57
+ message: message,
58
+ level: 'debug',
59
+ });
44
60
  },
45
61
  };
46
62
  }
package/lib/templates.js CHANGED
@@ -102,7 +102,8 @@ ${cache}
102
102
  --dir ./storybook-static \\
103
103
  \${{ vars.SCRY_COVERAGE_ENABLED == 'false' && '--no-coverage' || '' }} \\
104
104
  \${{ vars.SCRY_COVERAGE_FAIL_ON_THRESHOLD == 'true' && '--coverage-fail-on-threshold' || '' }} \\
105
- --coverage-base \${{ vars.SCRY_COVERAGE_BASE || 'main' }}
105
+ \${{ vars.SCRY_COVERAGE_EXECUTE == 'false' && '' || '--coverage-execute' }} \\
106
+ --coverage-base \${{ vars.SCRY_COVERAGE_BASE || github.event.before }}
106
107
  env:
107
108
  STORYBOOK_DEPLOYER_API_URL: \${{ vars.SCRY_API_URL }}
108
109
  STORYBOOK_DEPLOYER_PROJECT: \${{ vars.SCRY_PROJECT_ID }}
@@ -165,7 +166,8 @@ ${cache}
165
166
  \${{ github.event.pull_request.draft == true && '--no-coverage' || '' }} \\
166
167
  \${{ vars.SCRY_COVERAGE_ENABLED == 'false' && '--no-coverage' || '' }} \\
167
168
  \${{ vars.SCRY_COVERAGE_FAIL_ON_THRESHOLD == 'true' && '--coverage-fail-on-threshold' || '' }} \\
168
- --coverage-base \${{ vars.SCRY_COVERAGE_BASE || 'main' }}
169
+ \${{ vars.SCRY_COVERAGE_EXECUTE == 'false' && '' || '--coverage-execute' }} \\
170
+ --coverage-base \${{ vars.SCRY_COVERAGE_BASE || format('origin/{0}', github.base_ref) }}
169
171
 
170
172
  # Construct deployment URL using VIEW_URL (where users access the deployed Storybook)
171
173
  # Defaults to https://view.scrymore.com if SCRY_VIEW_URL is not set
@@ -0,0 +1,110 @@
1
+ const { createLogger } = require('./logger');
2
+ const { generateMainWorkflow, generatePRWorkflow } = require('./templates');
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const { execSync } = require('child_process');
6
+
7
+ /**
8
+ * Upgrade-only workflow generator.
9
+ *
10
+ * Goal: allow users to refresh .github/workflows/* to the latest templates
11
+ * WITHOUT needing the Scry API key.
12
+ *
13
+ * This intentionally does NOT:
14
+ * - validate credentials
15
+ * - touch .storybook-deployer.json
16
+ * - set GitHub variables/secrets
17
+ */
18
+ async function runUpdateWorkflows(argv) {
19
+ const logger = createLogger({ verbose: Boolean(argv.verbose) });
20
+
21
+ logger.info('🛠️ Updating Scry GitHub Actions workflows...');
22
+
23
+ const envInfo = await checkEnvironment();
24
+ if (!envInfo.isGit) {
25
+ throw new Error('Not a git repository. Please run this from a git repo.');
26
+ }
27
+
28
+ const workflowsDir = '.github/workflows';
29
+ fs.mkdirSync(workflowsDir, { recursive: true });
30
+
31
+ const buildCmd = envInfo.storybookBuildCmd || 'build-storybook';
32
+
33
+ const mainWorkflow = generateMainWorkflow('', '', envInfo.packageManager, buildCmd);
34
+ const prWorkflow = generatePRWorkflow('', '', envInfo.packageManager, buildCmd);
35
+
36
+ const mainWorkflowPath = path.join(workflowsDir, 'deploy-storybook.yml');
37
+ const prWorkflowPath = path.join(workflowsDir, 'deploy-pr-preview.yml');
38
+
39
+ fs.writeFileSync(mainWorkflowPath, mainWorkflow, 'utf8');
40
+ fs.writeFileSync(prWorkflowPath, prWorkflow, 'utf8');
41
+
42
+ logger.success(`✅ Updated ${mainWorkflowPath}`);
43
+ logger.success(`✅ Updated ${prWorkflowPath}`);
44
+
45
+ if (argv.commit) {
46
+ gitAdd([mainWorkflowPath, prWorkflowPath], logger);
47
+ gitCommit(argv.commitMessage || 'chore: update Scry workflows', logger);
48
+ }
49
+
50
+ logger.success('✅ Workflow update complete');
51
+ }
52
+
53
+ async function checkEnvironment() {
54
+ const envInfo = {
55
+ isGit: false,
56
+ packageManager: 'npm',
57
+ storybookBuildCmd: null,
58
+ };
59
+
60
+ try {
61
+ execSync('git rev-parse --git-dir', { stdio: 'ignore' });
62
+ envInfo.isGit = true;
63
+ } catch {
64
+ envInfo.isGit = false;
65
+ }
66
+
67
+ if (fs.existsSync('pnpm-lock.yaml')) {
68
+ envInfo.packageManager = 'pnpm';
69
+ } else if (fs.existsSync('yarn.lock')) {
70
+ envInfo.packageManager = 'yarn';
71
+ } else if (fs.existsSync('bun.lockb')) {
72
+ envInfo.packageManager = 'bun';
73
+ }
74
+
75
+ try {
76
+ const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
77
+ if (pkg.scripts) {
78
+ if (pkg.scripts['build-storybook']) envInfo.storybookBuildCmd = 'build-storybook';
79
+ else if (pkg.scripts['storybook:build']) envInfo.storybookBuildCmd = 'storybook:build';
80
+ else if (pkg.scripts['build:storybook']) envInfo.storybookBuildCmd = 'build:storybook';
81
+ }
82
+ } catch {
83
+ // ignore
84
+ }
85
+
86
+ return envInfo;
87
+ }
88
+
89
+ function gitAdd(files, logger) {
90
+ for (const file of files) {
91
+ if (fs.existsSync(file)) {
92
+ execSync(`git add "${file}"`, { stdio: 'pipe' });
93
+ logger.debug(` ✓ Added ${file}`);
94
+ }
95
+ }
96
+ }
97
+
98
+ function gitCommit(message, logger) {
99
+ const status = execSync('git status --porcelain', { encoding: 'utf8' });
100
+ if (!status.trim()) {
101
+ logger.info('No changes to commit.');
102
+ return;
103
+ }
104
+
105
+ execSync(`git commit -m "${message}"`, { stdio: 'pipe' });
106
+ const sha = execSync('git rev-parse --short HEAD', { encoding: 'utf8' }).trim();
107
+ logger.success(`✅ Committed workflow update: ${sha}`);
108
+ }
109
+
110
+ module.exports = { runUpdateWorkflows };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@scrymore/scry-deployer",
3
- "version": "0.0.5",
3
+ "version": "0.1.0",
4
4
  "description": "A CLI to automate the deployment of Storybook static builds.",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -37,7 +37,8 @@
37
37
  },
38
38
  "dependencies": {
39
39
  "@octokit/rest": "^20.0.0",
40
- "@scrymore/scry-sbcov": "^0.2.1",
40
+ "@scrymore/scry-sbcov": "^0.2.2",
41
+ "@sentry/node": "^10.33.0",
41
42
  "archiver": "^7.0.1",
42
43
  "axios": "^1.12.2",
43
44
  "chalk": "^4.1.2",