@scrymore/scry-deployer 0.0.6 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -0
- package/bin/cli.js +79 -4
- package/lib/apiClient.js +147 -26
- package/lib/coverage.js +97 -8
- package/lib/templates.js +4 -2
- package/lib/update-workflows.js +110 -0
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -217,6 +217,28 @@ The configuration is resolved in the following order of precedence:
|
|
|
217
217
|
3. **Configuration File**: Values from `.storybook-deployer.json` in your project directory (automatically created during installation).
|
|
218
218
|
4. **Programmatic Defaults**: Lowest precedence (e.g., for `--api-url`).
|
|
219
219
|
|
|
220
|
+
## Private Projects
|
|
221
|
+
|
|
222
|
+
If your project is set to **private** in the Scry dashboard, uploaded Storybook
|
|
223
|
+
and coverage reports will only be accessible to logged-in project members.
|
|
224
|
+
|
|
225
|
+
### How it works
|
|
226
|
+
|
|
227
|
+
1. Upload works the same way (using your API key)
|
|
228
|
+
2. The generated links work for anyone who is:
|
|
229
|
+
- Logged into the Scry dashboard
|
|
230
|
+
- A member of your project
|
|
231
|
+
|
|
232
|
+
### Sharing with team members
|
|
233
|
+
|
|
234
|
+
To give someone access to a private project:
|
|
235
|
+
|
|
236
|
+
1. Go to your project in the [Scry Dashboard](https://dashboard.scrymore.com)
|
|
237
|
+
2. Navigate to **Settings** → **Members**
|
|
238
|
+
3. Add their email address
|
|
239
|
+
|
|
240
|
+
They'll need to log in once, then all project links will work automatically.
|
|
241
|
+
|
|
220
242
|
### Configuration File
|
|
221
243
|
|
|
222
244
|
The configuration file (`.storybook-deployer.json`) is automatically created in your project directory when you install the package. You can edit this file to set default values for common options:
|
package/bin/cli.js
CHANGED
|
@@ -17,6 +17,7 @@ const { analyzeStorybook } = require('../lib/analysis.js');
|
|
|
17
17
|
const { runCoverageAnalysis, loadCoverageReport, extractCoverageSummary } = require('../lib/coverage.js');
|
|
18
18
|
const { postPRComment } = require('../lib/pr-comment.js');
|
|
19
19
|
const { runInit } = require('../lib/init.js');
|
|
20
|
+
const { runUpdateWorkflows } = require('../lib/update-workflows.js');
|
|
20
21
|
|
|
21
22
|
async function runAnalysis(argv) {
|
|
22
23
|
const logger = createLogger(argv);
|
|
@@ -136,9 +137,10 @@ async function runDeployment(argv) {
|
|
|
136
137
|
logger.success('✅ Archive uploaded.');
|
|
137
138
|
logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
|
|
138
139
|
|
|
139
|
-
await postPRComment(buildDeployResult(argv, coverageSummary), coverageSummary);
|
|
140
|
+
await postPRComment(buildDeployResult(argv, coverageSummary, uploadResult), coverageSummary);
|
|
140
141
|
|
|
141
142
|
logger.success('\n🎉 Deployment with analysis successful! 🎉');
|
|
143
|
+
logUploadLinks(argv, coverageSummary, uploadResult, logger);
|
|
142
144
|
|
|
143
145
|
} else {
|
|
144
146
|
// Simple deployment without analysis
|
|
@@ -162,9 +164,10 @@ async function runDeployment(argv) {
|
|
|
162
164
|
logger.success('✅ Archive uploaded.');
|
|
163
165
|
logger.debug(`Upload result: ${JSON.stringify(uploadResult)}`);
|
|
164
166
|
|
|
165
|
-
await postPRComment(buildDeployResult(argv, coverageSummary), coverageSummary);
|
|
167
|
+
await postPRComment(buildDeployResult(argv, coverageSummary, uploadResult), coverageSummary);
|
|
166
168
|
|
|
167
169
|
logger.success('\n🎉 Deployment successful! 🎉');
|
|
170
|
+
logUploadLinks(argv, coverageSummary, uploadResult, logger);
|
|
168
171
|
}
|
|
169
172
|
|
|
170
173
|
} finally {
|
|
@@ -352,7 +355,63 @@ async function main() {
|
|
|
352
355
|
|
|
353
356
|
await runAnalysis(config);
|
|
354
357
|
})
|
|
355
|
-
|
|
358
|
+
|
|
359
|
+
.command('coverage', 'Run only Storybook coverage analysis and write the report to disk', (yargs) => {
|
|
360
|
+
return yargs
|
|
361
|
+
.option('dir', {
|
|
362
|
+
describe: 'Path to the built Storybook directory (e.g., storybook-static)',
|
|
363
|
+
type: 'string',
|
|
364
|
+
demandOption: true,
|
|
365
|
+
})
|
|
366
|
+
.option('coverage-base', {
|
|
367
|
+
describe: 'Base ref/branch for new code analysis (supports SHAs, origin/main, HEAD~1)',
|
|
368
|
+
type: 'string',
|
|
369
|
+
default: 'main',
|
|
370
|
+
alias: 'coverageBase'
|
|
371
|
+
})
|
|
372
|
+
.option('coverage-fail-on-threshold', {
|
|
373
|
+
describe: 'Fail (exit 1) if coverage thresholds are not met',
|
|
374
|
+
type: 'boolean',
|
|
375
|
+
default: false,
|
|
376
|
+
alias: 'coverageFailOnThreshold'
|
|
377
|
+
})
|
|
378
|
+
.option('coverage-execute', {
|
|
379
|
+
describe: 'Execute stories during coverage analysis (requires playwright in the project)',
|
|
380
|
+
type: 'boolean',
|
|
381
|
+
default: false,
|
|
382
|
+
alias: 'coverageExecute'
|
|
383
|
+
})
|
|
384
|
+
.option('output', {
|
|
385
|
+
describe: 'Where to write the JSON coverage report',
|
|
386
|
+
type: 'string',
|
|
387
|
+
default: './scry-sbcov-report.json'
|
|
388
|
+
})
|
|
389
|
+
.option('verbose', {
|
|
390
|
+
describe: 'Enable verbose logging',
|
|
391
|
+
type: 'boolean',
|
|
392
|
+
default: false,
|
|
393
|
+
});
|
|
394
|
+
}, async (argv) => {
|
|
395
|
+
const logger = createLogger(argv);
|
|
396
|
+
|
|
397
|
+
const report = await runCoverageAnalysis({
|
|
398
|
+
storybookDir: argv.dir,
|
|
399
|
+
baseBranch: argv.coverageBase || 'main',
|
|
400
|
+
failOnThreshold: Boolean(argv.coverageFailOnThreshold),
|
|
401
|
+
execute: Boolean(argv.coverageExecute),
|
|
402
|
+
outputPath: argv.output,
|
|
403
|
+
keepReport: true,
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
if (!report) {
|
|
407
|
+
logger.error('Coverage: no report generated (tool failed or returned null)');
|
|
408
|
+
process.exit(1);
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
logger.success(`✅ Coverage report written to ${argv.output}`);
|
|
412
|
+
})
|
|
413
|
+
|
|
414
|
+
.command('init', 'Setup GitHub Actions workflows for automatic deployment', (yargs) => {
|
|
356
415
|
return yargs
|
|
357
416
|
.option('project-id', {
|
|
358
417
|
describe: 'Project ID from Scry dashboard',
|
|
@@ -465,7 +524,7 @@ async function resolveCoverage(argv, logger) {
|
|
|
465
524
|
* @param {any} argv
|
|
466
525
|
* @param {any|null} coverageSummary
|
|
467
526
|
*/
|
|
468
|
-
function buildDeployResult(argv, coverageSummary) {
|
|
527
|
+
function buildDeployResult(argv, coverageSummary, uploadResult) {
|
|
469
528
|
const project = argv.project || 'main';
|
|
470
529
|
const version = argv.version || 'latest';
|
|
471
530
|
const viewBaseUrl = process.env.SCRY_VIEW_URL || 'https://view.scrymore.com';
|
|
@@ -482,9 +541,24 @@ function buildDeployResult(argv, coverageSummary) {
|
|
|
482
541
|
viewUrl,
|
|
483
542
|
coverageUrl,
|
|
484
543
|
coveragePageUrl: coverageUrl,
|
|
544
|
+
visibility: uploadResult?.zipUpload?.visibility,
|
|
485
545
|
};
|
|
486
546
|
}
|
|
487
547
|
|
|
548
|
+
function logUploadLinks(argv, coverageSummary, uploadResult, logger) {
|
|
549
|
+
const deployResult = buildDeployResult(argv, coverageSummary, uploadResult);
|
|
550
|
+
|
|
551
|
+
logger.success('\n✅ Upload successful!\n');
|
|
552
|
+
logger.info(`📖 Storybook: ${deployResult.viewUrl}`);
|
|
553
|
+
if (deployResult.coverageUrl) {
|
|
554
|
+
logger.info(`📊 Coverage: ${deployResult.coverageUrl}`);
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
if (deployResult.visibility === 'private') {
|
|
558
|
+
logger.info('\n🔒 This project is private. Viewers must be logged in to access.');
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
|
|
488
562
|
if (require.main === module) {
|
|
489
563
|
main();
|
|
490
564
|
}
|
|
@@ -495,4 +569,5 @@ module.exports = {
|
|
|
495
569
|
runAnalysis,
|
|
496
570
|
resolveCoverage,
|
|
497
571
|
buildDeployResult,
|
|
572
|
+
logUploadLinks,
|
|
498
573
|
};
|
package/lib/apiClient.js
CHANGED
|
@@ -1,6 +1,24 @@
|
|
|
1
1
|
const axios = require('axios');
|
|
2
2
|
const fs = require('fs');
|
|
3
3
|
const { ApiError } = require('./errors.js');
|
|
4
|
+
const { createLogger } = require('./logger.js');
|
|
5
|
+
|
|
6
|
+
const isVerbose =
|
|
7
|
+
process.env.SCRY_VERBOSE === 'true' ||
|
|
8
|
+
process.env.STORYBOOK_DEPLOYER_VERBOSE === 'true' ||
|
|
9
|
+
process.env.VERBOSE === 'true' ||
|
|
10
|
+
process.env.SCRY_API_DEBUG === 'true' ||
|
|
11
|
+
process.env.SCRY_DEBUG === 'true' ||
|
|
12
|
+
process.argv.includes('--verbose');
|
|
13
|
+
|
|
14
|
+
const logger = createLogger({ verbose: isVerbose });
|
|
15
|
+
|
|
16
|
+
const COVERAGE_UPLOAD_DELAY_MS = 5000;
|
|
17
|
+
const COVERAGE_RETRY_DELAY_MS = 60000;
|
|
18
|
+
|
|
19
|
+
function sleep(ms) {
|
|
20
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
21
|
+
}
|
|
4
22
|
|
|
5
23
|
/**
|
|
6
24
|
* Creates a pre-configured axios instance for making API calls.
|
|
@@ -9,8 +27,10 @@ const { ApiError } = require('./errors.js');
|
|
|
9
27
|
* @returns {axios.AxiosInstance} A configured axios instance.
|
|
10
28
|
*/
|
|
11
29
|
function getApiClient(apiUrl, apiKey) {
|
|
30
|
+
logger.debug(`Initializing API client with baseURL: ${apiUrl}`);
|
|
12
31
|
// This is a mock check to allow testing of a 401 error case.
|
|
13
32
|
if (apiKey === 'fail-me-401') {
|
|
33
|
+
logger.debug('Mock 401 failure triggered by API key');
|
|
14
34
|
throw new ApiError('The provided API key is invalid or has expired.', 401);
|
|
15
35
|
}
|
|
16
36
|
|
|
@@ -27,6 +47,7 @@ function getApiClient(apiUrl, apiKey) {
|
|
|
27
47
|
return axios.create({
|
|
28
48
|
baseURL: apiUrl,
|
|
29
49
|
headers: headers,
|
|
50
|
+
timeout: 60000, // 60 second timeout for large uploads
|
|
30
51
|
});
|
|
31
52
|
}
|
|
32
53
|
|
|
@@ -36,12 +57,14 @@ function getApiClient(apiUrl, apiKey) {
|
|
|
36
57
|
* @param {axios.AxiosInstance} apiClient
|
|
37
58
|
* @param {{project: string, version: string}} target
|
|
38
59
|
* @param {{fileName: string, contentType: string}} file
|
|
39
|
-
* @returns {Promise<string>} presigned URL
|
|
60
|
+
* @returns {Promise<{url: string, visibility?: string}>} presigned URL details
|
|
40
61
|
*/
|
|
41
62
|
async function requestPresignedUrl(apiClient, target, file) {
|
|
42
63
|
const projectName = target.project || 'main';
|
|
43
64
|
const versionName = target.version || 'latest';
|
|
44
65
|
|
|
66
|
+
logger.debug(`Requesting presigned URL for ${projectName}/${versionName}/${file.fileName}`);
|
|
67
|
+
|
|
45
68
|
const presignedResponse = await apiClient.post(
|
|
46
69
|
`/presigned-url/${projectName}/${versionName}/${file.fileName}`,
|
|
47
70
|
{ contentType: file.contentType },
|
|
@@ -52,21 +75,78 @@ async function requestPresignedUrl(apiClient, target, file) {
|
|
|
52
75
|
}
|
|
53
76
|
);
|
|
54
77
|
|
|
78
|
+
logger.debug(`Presigned URL response status: ${presignedResponse.status}`);
|
|
79
|
+
if (presignedResponse.data?.buildId || presignedResponse.data?.buildNumber) {
|
|
80
|
+
logger.info(
|
|
81
|
+
`Build record confirmed by presigned URL response (buildId: ${presignedResponse.data?.buildId || 'n/a'}, buildNumber: ${presignedResponse.data?.buildNumber || 'n/a'}).`
|
|
82
|
+
);
|
|
83
|
+
} else {
|
|
84
|
+
logger.debug(
|
|
85
|
+
`Presigned URL response did not include buildId/buildNumber. Response keys: ${Object.keys(presignedResponse.data || {}).join(', ') || 'none'}`
|
|
86
|
+
);
|
|
87
|
+
}
|
|
55
88
|
const presignedUrl = presignedResponse.data?.url;
|
|
89
|
+
const visibility = presignedResponse.data?.visibility;
|
|
56
90
|
if (!presignedUrl || typeof presignedUrl !== 'string' || presignedUrl.trim() === '') {
|
|
91
|
+
logger.debug(`Invalid presigned URL received: ${JSON.stringify(presignedResponse.data)}`);
|
|
57
92
|
throw new ApiError(
|
|
58
93
|
`Failed to get valid presigned URL from server response. Received: ${JSON.stringify(presignedResponse.data)}`
|
|
59
94
|
);
|
|
60
95
|
}
|
|
61
96
|
|
|
62
|
-
|
|
97
|
+
const parsedUrl = validatePresignedUrl(presignedUrl);
|
|
98
|
+
logger.debug(`Validated presigned URL host: ${parsedUrl.hostname}`);
|
|
99
|
+
|
|
100
|
+
return { url: presignedUrl, visibility };
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function getAxiosErrorDetails(error, fallbackUrl) {
|
|
104
|
+
if (error.response) {
|
|
105
|
+
return {
|
|
106
|
+
message: `HTTP ${error.response.status} ${error.response.statusText}${error.response.data ? ` - ${JSON.stringify(error.response.data)}` : ''}`,
|
|
107
|
+
statusCode: error.response.status,
|
|
108
|
+
kind: 'response'
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (error.request) {
|
|
113
|
+
const code = error.code ? ` (${error.code})` : '';
|
|
114
|
+
const url = error.config?.url || fallbackUrl || 'unknown URL';
|
|
115
|
+
const baseURL = error.config?.baseURL ? ` (baseURL: ${error.config.baseURL})` : '';
|
|
116
|
+
return {
|
|
117
|
+
message: `No response received from ${url}${baseURL}${code}`,
|
|
118
|
+
statusCode: undefined,
|
|
119
|
+
kind: 'request'
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
return {
|
|
124
|
+
message: error.message || 'Unknown error',
|
|
125
|
+
statusCode: undefined,
|
|
126
|
+
kind: 'unknown'
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
function validatePresignedUrl(presignedUrl) {
|
|
131
|
+
let parsedUrl;
|
|
63
132
|
try {
|
|
64
|
-
new URL(presignedUrl);
|
|
133
|
+
parsedUrl = new URL(presignedUrl);
|
|
65
134
|
} catch (urlError) {
|
|
66
135
|
throw new ApiError(`Received invalid URL format from server: "${presignedUrl}". URL validation error: ${urlError.message}`);
|
|
67
136
|
}
|
|
68
137
|
|
|
69
|
-
|
|
138
|
+
const hostname = parsedUrl.hostname || '';
|
|
139
|
+
if (hostname.includes('undefined')) {
|
|
140
|
+
throw new ApiError(
|
|
141
|
+
`Presigned URL hostname contains "undefined": ${hostname}. This usually means the upload service is missing its R2 account ID or bucket configuration.`
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (!hostname.endsWith('.r2.cloudflarestorage.com')) {
|
|
146
|
+
logger.debug(`Presigned URL hostname does not look like a standard R2 host: ${hostname}`);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return parsedUrl;
|
|
70
150
|
}
|
|
71
151
|
|
|
72
152
|
/**
|
|
@@ -78,14 +158,19 @@ async function requestPresignedUrl(apiClient, target, file) {
|
|
|
78
158
|
* @returns {Promise<{status:number}>}
|
|
79
159
|
*/
|
|
80
160
|
async function putToPresignedUrl(presignedUrl, buffer, contentType) {
|
|
161
|
+
logger.debug(`Starting PUT upload to presigned URL. Size: ${buffer.length} bytes, Content-Type: ${contentType}`);
|
|
162
|
+
|
|
81
163
|
const uploadResponse = await axios.put(presignedUrl, buffer, {
|
|
82
164
|
headers: {
|
|
83
165
|
'Content-Type': contentType,
|
|
84
166
|
},
|
|
85
167
|
maxContentLength: Infinity,
|
|
86
168
|
maxBodyLength: Infinity,
|
|
169
|
+
// Use a separate timeout for the actual upload if needed,
|
|
170
|
+
// but here we rely on the global axios or the one passed in.
|
|
87
171
|
});
|
|
88
172
|
|
|
173
|
+
logger.debug(`PUT upload completed with status: ${uploadResponse.status}`);
|
|
89
174
|
return { status: uploadResponse.status };
|
|
90
175
|
}
|
|
91
176
|
|
|
@@ -101,30 +186,36 @@ async function putToPresignedUrl(presignedUrl, buffer, contentType) {
|
|
|
101
186
|
* @returns {Promise<object>} A promise that resolves to the upload result.
|
|
102
187
|
*/
|
|
103
188
|
async function uploadFileDirectly(apiClient, { project, version }, filePath, file = {}) {
|
|
189
|
+
logger.debug(`uploadFileDirectly called for file: ${filePath}`);
|
|
190
|
+
|
|
104
191
|
// This is a mock check to allow testing of a 500 server error.
|
|
105
192
|
if (project === 'fail-me-500') {
|
|
193
|
+
logger.debug('Mock 500 failure triggered by project name');
|
|
106
194
|
throw new ApiError('The deployment service encountered an internal error.', 500);
|
|
107
195
|
}
|
|
108
196
|
|
|
197
|
+
if (!fs.existsSync(filePath)) {
|
|
198
|
+
logger.debug(`File not found: ${filePath}`);
|
|
199
|
+
throw new Error(`File not found: ${filePath}`);
|
|
200
|
+
}
|
|
201
|
+
|
|
109
202
|
const fileBuffer = fs.readFileSync(filePath);
|
|
110
203
|
const fileName = file.fileName || 'storybook.zip';
|
|
111
204
|
const contentType = file.contentType || 'application/zip';
|
|
112
205
|
|
|
113
206
|
try {
|
|
114
|
-
const
|
|
115
|
-
const upload = await putToPresignedUrl(
|
|
116
|
-
return { success: true, url:
|
|
207
|
+
const presigned = await requestPresignedUrl(apiClient, { project, version }, { fileName, contentType });
|
|
208
|
+
const upload = await putToPresignedUrl(presigned.url, fileBuffer, contentType);
|
|
209
|
+
return { success: true, url: presigned.url, status: upload.status, visibility: presigned.visibility };
|
|
117
210
|
} catch (error) {
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
throw new ApiError(`Failed to upload file: No response from server at ${apiClient.defaults.baseURL}`);
|
|
125
|
-
} else {
|
|
126
|
-
throw new ApiError(`Failed to upload file: ${error.message}`);
|
|
211
|
+
logger.debug(`Upload failed. Error type: ${error.constructor.name}, Message: ${error.message}`);
|
|
212
|
+
const details = getAxiosErrorDetails(error, apiClient.defaults.baseURL);
|
|
213
|
+
if (details.kind === 'response') {
|
|
214
|
+
logger.debug(`Error response status: ${details.statusCode}`);
|
|
215
|
+
} else if (details.kind === 'request') {
|
|
216
|
+
logger.debug(`Error request details: ${details.message}`);
|
|
127
217
|
}
|
|
218
|
+
throw new ApiError(`Failed to upload file: ${details.message}`, details.statusCode);
|
|
128
219
|
}
|
|
129
220
|
}
|
|
130
221
|
|
|
@@ -141,6 +232,9 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
|
|
|
141
232
|
const projectName = target.project || 'main';
|
|
142
233
|
const versionName = target.version || 'latest';
|
|
143
234
|
|
|
235
|
+
logger.info(`Uploading coverage report for ${projectName}/${versionName}...`);
|
|
236
|
+
logger.debug(`Uploading coverage report for ${projectName}/${versionName}`);
|
|
237
|
+
|
|
144
238
|
try {
|
|
145
239
|
const response = await apiClient.post(
|
|
146
240
|
`/upload/${projectName}/${versionName}/coverage`,
|
|
@@ -152,22 +246,39 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
|
|
|
152
246
|
}
|
|
153
247
|
);
|
|
154
248
|
|
|
249
|
+
logger.debug(`Coverage upload response status: ${response.status}`);
|
|
250
|
+
logger.info(`Coverage report upload complete (status ${response.status}).`);
|
|
155
251
|
return {
|
|
156
252
|
success: response.data?.success ?? true,
|
|
157
253
|
buildId: response.data?.buildId,
|
|
158
254
|
coverageUrl: response.data?.coverageUrl,
|
|
159
255
|
};
|
|
160
256
|
} catch (error) {
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
257
|
+
logger.debug(`Coverage upload failed. Message: ${error.message}`);
|
|
258
|
+
const details = getAxiosErrorDetails(error, apiClient.defaults.baseURL);
|
|
259
|
+
|
|
260
|
+
if (
|
|
261
|
+
details.statusCode === 404 &&
|
|
262
|
+
error.response?.data &&
|
|
263
|
+
typeof error.response.data === 'object' &&
|
|
264
|
+
String(error.response.data.error || '').includes('Build not found')
|
|
265
|
+
) {
|
|
266
|
+
logger.error('Coverage upload failed with 404: Build not found for this version.');
|
|
267
|
+
logger.info('This is not a missing-secret error. The production Worker did not find a Firestore build record for the project + version you are attaching coverage to.');
|
|
268
|
+
logger.info('Coverage requires an existing build record created by a prior build upload or presigned URL generation.');
|
|
269
|
+
logger.info('Most common causes and fixes:');
|
|
270
|
+
logger.info('1) Coverage called before build exists. Upload the build ZIP first (or call the presigned URL endpoint) and then upload coverage.');
|
|
271
|
+
logger.info('2) Project/version mismatch. Coverage must use the same {project}/{version} as the build upload or presigned URL call.');
|
|
272
|
+
logger.info('3) Firestore secrets present but invalid. A malformed FIREBASE_PRIVATE_KEY (missing literal \\n sequences) or wrong project ID can prevent build creation.');
|
|
273
|
+
logger.info('4) Firestore integration disabled in prod. Ensure FIREBASE_PROJECT_ID, FIREBASE_CLIENT_EMAIL, FIREBASE_PRIVATE_KEY, and FIRESTORE_SERVICE_ACCOUNT_ID are set.');
|
|
274
|
+
logger.info('Recommended checks:');
|
|
275
|
+
logger.info('- Trigger a production upload or presigned URL call first and verify it returns buildId/buildNumber (confirms Firestore created a build).');
|
|
276
|
+
logger.info('- Then call the coverage endpoint for the same project/version.');
|
|
277
|
+
logger.info('- If upload does not return buildId/buildNumber, fix Firestore secrets and ensure FIREBASE_PRIVATE_KEY preserves literal \\n as documented.');
|
|
278
|
+
logger.info('See README.md and docs/PRODUCTION_SETUP.md for details.');
|
|
170
279
|
}
|
|
280
|
+
|
|
281
|
+
throw new ApiError(`Failed to upload coverage: ${details.message}`, details.statusCode);
|
|
171
282
|
}
|
|
172
283
|
}
|
|
173
284
|
|
|
@@ -182,6 +293,7 @@ async function uploadCoverageReportDirectly(apiClient, target, coverageReport) {
|
|
|
182
293
|
* @param {{zipPath: string, coverageReport?: any|null}} options
|
|
183
294
|
*/
|
|
184
295
|
async function uploadBuild(apiClient, target, options) {
|
|
296
|
+
logger.debug('uploadBuild orchestration started');
|
|
185
297
|
const zipUpload = await uploadFileDirectly(apiClient, target, options.zipPath, {
|
|
186
298
|
fileName: 'storybook.zip',
|
|
187
299
|
contentType: 'application/zip',
|
|
@@ -189,7 +301,16 @@ async function uploadBuild(apiClient, target, options) {
|
|
|
189
301
|
|
|
190
302
|
let coverageUpload = null;
|
|
191
303
|
if (options.coverageReport) {
|
|
192
|
-
|
|
304
|
+
logger.info(`Waiting ${COVERAGE_UPLOAD_DELAY_MS / 1000}s before uploading coverage report...`);
|
|
305
|
+
await sleep(COVERAGE_UPLOAD_DELAY_MS);
|
|
306
|
+
|
|
307
|
+
try {
|
|
308
|
+
coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
|
|
309
|
+
} catch (error) {
|
|
310
|
+
logger.info('Coverage upload failed; retrying in 60s...');
|
|
311
|
+
await sleep(COVERAGE_RETRY_DELAY_MS);
|
|
312
|
+
coverageUpload = await uploadCoverageReportDirectly(apiClient, target, options.coverageReport);
|
|
313
|
+
}
|
|
193
314
|
}
|
|
194
315
|
|
|
195
316
|
return { zipUpload, coverageUpload };
|
package/lib/coverage.js
CHANGED
|
@@ -8,6 +8,8 @@ const chalk = require('chalk');
|
|
|
8
8
|
* @property {string} storybookDir Path to a built Storybook static directory (e.g. ./storybook-static)
|
|
9
9
|
* @property {string} [baseBranch='main'] Base branch name to compare for "new code" analysis
|
|
10
10
|
* @property {boolean} [failOnThreshold=false] If true, pass "--ci" to the coverage tool and rethrow errors
|
|
11
|
+
* @property {string} [outputPath] If provided, write the report to this path (relative to cwd allowed)
|
|
12
|
+
* @property {boolean} [keepReport=false] If true, do not delete the output file after reading
|
|
11
13
|
*/
|
|
12
14
|
|
|
13
15
|
/**
|
|
@@ -25,7 +27,7 @@ const chalk = require('chalk');
|
|
|
25
27
|
* @returns {Promise<any|null>} The full coverage report JSON, or null if skipped/failed (non-fatal)
|
|
26
28
|
*/
|
|
27
29
|
async function runCoverageAnalysis(options) {
|
|
28
|
-
const { storybookDir, baseBranch = 'main', failOnThreshold = false, execute = false } = options || {};
|
|
30
|
+
const { storybookDir, baseBranch = 'main', failOnThreshold = false, execute = false, outputPath: providedOutputPath, keepReport = false } = options || {};
|
|
29
31
|
|
|
30
32
|
if (!storybookDir || typeof storybookDir !== 'string') {
|
|
31
33
|
throw new Error('runCoverageAnalysis: options.storybookDir is required');
|
|
@@ -33,7 +35,11 @@ async function runCoverageAnalysis(options) {
|
|
|
33
35
|
|
|
34
36
|
console.log(chalk.blue('Running Storybook coverage analysis...'));
|
|
35
37
|
|
|
36
|
-
const outputPath =
|
|
38
|
+
const outputPath = providedOutputPath
|
|
39
|
+
? (path.isAbsolute(providedOutputPath) ? providedOutputPath : path.resolve(process.cwd(), providedOutputPath))
|
|
40
|
+
: path.join(process.cwd(), `.scry-coverage-report-${Date.now()}.json`);
|
|
41
|
+
|
|
42
|
+
const resolvedBaseRef = resolveCoverageBaseRef(baseBranch);
|
|
37
43
|
|
|
38
44
|
/** @type {string[]} */
|
|
39
45
|
const cliArgs = [
|
|
@@ -42,7 +48,7 @@ async function runCoverageAnalysis(options) {
|
|
|
42
48
|
'--output',
|
|
43
49
|
outputPath,
|
|
44
50
|
'--base',
|
|
45
|
-
|
|
51
|
+
normalizeGitBaseRef(resolvedBaseRef),
|
|
46
52
|
'--verbose', // Enable verbose logging to debug component detection
|
|
47
53
|
];
|
|
48
54
|
|
|
@@ -54,9 +60,9 @@ async function runCoverageAnalysis(options) {
|
|
|
54
60
|
cliArgs.push('--execute');
|
|
55
61
|
}
|
|
56
62
|
|
|
57
|
-
// Use npx with
|
|
58
|
-
// This is more reliable than `npx
|
|
59
|
-
const npxCommand = `npx -y
|
|
63
|
+
// Use npx with the package name directly.
|
|
64
|
+
// This is more reliable than `npx -p` which can fail to find the binary in some environments.
|
|
65
|
+
const npxCommand = `npx -y @scrymore/scry-sbcov ${cliArgs.map(shellEscape).join(' ')}`;
|
|
60
66
|
|
|
61
67
|
// Debug logging to show the exact command being executed
|
|
62
68
|
console.log(chalk.yellow('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
|
|
@@ -86,10 +92,10 @@ async function runCoverageAnalysis(options) {
|
|
|
86
92
|
const raw = fs.readFileSync(outputPath, 'utf-8');
|
|
87
93
|
const report = JSON.parse(raw);
|
|
88
94
|
|
|
89
|
-
safeUnlink(outputPath);
|
|
95
|
+
if (!keepReport && !providedOutputPath) safeUnlink(outputPath);
|
|
90
96
|
return report;
|
|
91
97
|
} catch (error) {
|
|
92
|
-
safeUnlink(outputPath);
|
|
98
|
+
if (!keepReport && !providedOutputPath) safeUnlink(outputPath);
|
|
93
99
|
if (failOnThreshold) throw error;
|
|
94
100
|
return null;
|
|
95
101
|
}
|
|
@@ -181,8 +187,91 @@ function shellEscape(value) {
|
|
|
181
187
|
return `'${value.replace(/'/g, "'\\''")}'`;
|
|
182
188
|
}
|
|
183
189
|
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Normalize a user-supplied base ref into something git understands.
|
|
194
|
+
*
|
|
195
|
+
* Why this exists:
|
|
196
|
+
* - In CI, the best base for "push" is often a SHA (e.g. github.event.before)
|
|
197
|
+
* - In PRs, the best base is often a remote-tracking branch (e.g. origin/main)
|
|
198
|
+
* - Locally, users may pass branch names (e.g. main) or rev expressions (e.g. HEAD~1)
|
|
199
|
+
*
|
|
200
|
+
* `scry-sbcov` expects a value it can pass to git commands as the base reference.
|
|
201
|
+
*
|
|
202
|
+
* @param {string} baseBranch
|
|
203
|
+
* @returns {string}
|
|
204
|
+
*/
|
|
205
|
+
function normalizeGitBaseRef(baseBranch) {
|
|
206
|
+
const value = (baseBranch || '').trim();
|
|
207
|
+
|
|
208
|
+
if (!value) return 'origin/main';
|
|
209
|
+
|
|
210
|
+
// Commit SHA (short or full)
|
|
211
|
+
if (/^[0-9a-f]{7,40}$/i.test(value)) return value;
|
|
212
|
+
|
|
213
|
+
// Common rev expressions that should not be prefixed.
|
|
214
|
+
if (value === 'HEAD' || value.startsWith('HEAD~') || value.startsWith('HEAD^')) return value;
|
|
215
|
+
if (/[~^]/.test(value)) return value;
|
|
216
|
+
|
|
217
|
+
// If user already provided a qualified ref, use it as-is.
|
|
218
|
+
if (value.startsWith('origin/')) return value;
|
|
219
|
+
if (value.startsWith('refs/')) return value;
|
|
220
|
+
if (value.startsWith('remotes/')) return value;
|
|
221
|
+
|
|
222
|
+
// Otherwise, treat it as a branch name and compare against the remote.
|
|
223
|
+
// This also works for branch names that contain slashes (e.g. feature/foo).
|
|
224
|
+
return `origin/${value}`;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
/**
|
|
228
|
+
* Resolve the base ref to pass into `scry-sbcov`, preferring PR base SHAs
|
|
229
|
+
* from CI providers when available.
|
|
230
|
+
*
|
|
231
|
+
* @param {string} baseBranch
|
|
232
|
+
* @returns {string}
|
|
233
|
+
*/
|
|
234
|
+
function resolveCoverageBaseRef(baseBranch) {
|
|
235
|
+
const env = process.env || {};
|
|
236
|
+
|
|
237
|
+
const githubBaseSha = readGithubPullRequestBaseSha(env.GITHUB_EVENT_PATH);
|
|
238
|
+
if (githubBaseSha) return githubBaseSha;
|
|
239
|
+
|
|
240
|
+
const gitlabBaseSha = env.CI_MERGE_REQUEST_TARGET_BRANCH_SHA;
|
|
241
|
+
if (gitlabBaseSha) return gitlabBaseSha;
|
|
242
|
+
|
|
243
|
+
const bitbucketBaseSha = env.BITBUCKET_PR_DESTINATION_COMMIT || env.BITBUCKET_PR_BASE_COMMIT;
|
|
244
|
+
if (bitbucketBaseSha) return bitbucketBaseSha;
|
|
245
|
+
|
|
246
|
+
return baseBranch || 'main';
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* Read GitHub pull_request base.sha from the event payload.
|
|
251
|
+
*
|
|
252
|
+
* @param {string|undefined} eventPath
|
|
253
|
+
* @returns {string|null}
|
|
254
|
+
*/
|
|
255
|
+
function readGithubPullRequestBaseSha(eventPath) {
|
|
256
|
+
if (!eventPath || typeof eventPath !== 'string') return null;
|
|
257
|
+
|
|
258
|
+
try {
|
|
259
|
+
if (!fs.existsSync(eventPath)) return null;
|
|
260
|
+
const raw = fs.readFileSync(eventPath, 'utf-8');
|
|
261
|
+
const payload = JSON.parse(raw);
|
|
262
|
+
const baseSha = payload?.pull_request?.base?.sha;
|
|
263
|
+
if (typeof baseSha === 'string' && baseSha.trim()) return baseSha.trim();
|
|
264
|
+
return null;
|
|
265
|
+
} catch (error) {
|
|
266
|
+
return null;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
184
270
|
module.exports = {
|
|
185
271
|
runCoverageAnalysis,
|
|
186
272
|
loadCoverageReport,
|
|
187
273
|
extractCoverageSummary,
|
|
274
|
+
normalizeGitBaseRef,
|
|
275
|
+
resolveCoverageBaseRef,
|
|
276
|
+
readGithubPullRequestBaseSha,
|
|
188
277
|
};
|
package/lib/templates.js
CHANGED
|
@@ -102,7 +102,8 @@ ${cache}
|
|
|
102
102
|
--dir ./storybook-static \\
|
|
103
103
|
\${{ vars.SCRY_COVERAGE_ENABLED == 'false' && '--no-coverage' || '' }} \\
|
|
104
104
|
\${{ vars.SCRY_COVERAGE_FAIL_ON_THRESHOLD == 'true' && '--coverage-fail-on-threshold' || '' }} \\
|
|
105
|
-
|
|
105
|
+
\${{ vars.SCRY_COVERAGE_EXECUTE == 'false' && '' || '--coverage-execute' }} \\
|
|
106
|
+
--coverage-base \${{ vars.SCRY_COVERAGE_BASE || github.event.before }}
|
|
106
107
|
env:
|
|
107
108
|
STORYBOOK_DEPLOYER_API_URL: \${{ vars.SCRY_API_URL }}
|
|
108
109
|
STORYBOOK_DEPLOYER_PROJECT: \${{ vars.SCRY_PROJECT_ID }}
|
|
@@ -165,7 +166,8 @@ ${cache}
|
|
|
165
166
|
\${{ github.event.pull_request.draft == true && '--no-coverage' || '' }} \\
|
|
166
167
|
\${{ vars.SCRY_COVERAGE_ENABLED == 'false' && '--no-coverage' || '' }} \\
|
|
167
168
|
\${{ vars.SCRY_COVERAGE_FAIL_ON_THRESHOLD == 'true' && '--coverage-fail-on-threshold' || '' }} \\
|
|
168
|
-
|
|
169
|
+
\${{ vars.SCRY_COVERAGE_EXECUTE == 'false' && '' || '--coverage-execute' }} \\
|
|
170
|
+
--coverage-base \${{ vars.SCRY_COVERAGE_BASE || format('origin/{0}', github.base_ref) }}
|
|
169
171
|
|
|
170
172
|
# Construct deployment URL using VIEW_URL (where users access the deployed Storybook)
|
|
171
173
|
# Defaults to https://view.scrymore.com if SCRY_VIEW_URL is not set
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
const { createLogger } = require('./logger');
|
|
2
|
+
const { generateMainWorkflow, generatePRWorkflow } = require('./templates');
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { execSync } = require('child_process');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Upgrade-only workflow generator.
|
|
9
|
+
*
|
|
10
|
+
* Goal: allow users to refresh .github/workflows/* to the latest templates
|
|
11
|
+
* WITHOUT needing the Scry API key.
|
|
12
|
+
*
|
|
13
|
+
* This intentionally does NOT:
|
|
14
|
+
* - validate credentials
|
|
15
|
+
* - touch .storybook-deployer.json
|
|
16
|
+
* - set GitHub variables/secrets
|
|
17
|
+
*/
|
|
18
|
+
async function runUpdateWorkflows(argv) {
|
|
19
|
+
const logger = createLogger({ verbose: Boolean(argv.verbose) });
|
|
20
|
+
|
|
21
|
+
logger.info('🛠️ Updating Scry GitHub Actions workflows...');
|
|
22
|
+
|
|
23
|
+
const envInfo = await checkEnvironment();
|
|
24
|
+
if (!envInfo.isGit) {
|
|
25
|
+
throw new Error('Not a git repository. Please run this from a git repo.');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const workflowsDir = '.github/workflows';
|
|
29
|
+
fs.mkdirSync(workflowsDir, { recursive: true });
|
|
30
|
+
|
|
31
|
+
const buildCmd = envInfo.storybookBuildCmd || 'build-storybook';
|
|
32
|
+
|
|
33
|
+
const mainWorkflow = generateMainWorkflow('', '', envInfo.packageManager, buildCmd);
|
|
34
|
+
const prWorkflow = generatePRWorkflow('', '', envInfo.packageManager, buildCmd);
|
|
35
|
+
|
|
36
|
+
const mainWorkflowPath = path.join(workflowsDir, 'deploy-storybook.yml');
|
|
37
|
+
const prWorkflowPath = path.join(workflowsDir, 'deploy-pr-preview.yml');
|
|
38
|
+
|
|
39
|
+
fs.writeFileSync(mainWorkflowPath, mainWorkflow, 'utf8');
|
|
40
|
+
fs.writeFileSync(prWorkflowPath, prWorkflow, 'utf8');
|
|
41
|
+
|
|
42
|
+
logger.success(`✅ Updated ${mainWorkflowPath}`);
|
|
43
|
+
logger.success(`✅ Updated ${prWorkflowPath}`);
|
|
44
|
+
|
|
45
|
+
if (argv.commit) {
|
|
46
|
+
gitAdd([mainWorkflowPath, prWorkflowPath], logger);
|
|
47
|
+
gitCommit(argv.commitMessage || 'chore: update Scry workflows', logger);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
logger.success('✅ Workflow update complete');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async function checkEnvironment() {
|
|
54
|
+
const envInfo = {
|
|
55
|
+
isGit: false,
|
|
56
|
+
packageManager: 'npm',
|
|
57
|
+
storybookBuildCmd: null,
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
execSync('git rev-parse --git-dir', { stdio: 'ignore' });
|
|
62
|
+
envInfo.isGit = true;
|
|
63
|
+
} catch {
|
|
64
|
+
envInfo.isGit = false;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (fs.existsSync('pnpm-lock.yaml')) {
|
|
68
|
+
envInfo.packageManager = 'pnpm';
|
|
69
|
+
} else if (fs.existsSync('yarn.lock')) {
|
|
70
|
+
envInfo.packageManager = 'yarn';
|
|
71
|
+
} else if (fs.existsSync('bun.lockb')) {
|
|
72
|
+
envInfo.packageManager = 'bun';
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
|
|
77
|
+
if (pkg.scripts) {
|
|
78
|
+
if (pkg.scripts['build-storybook']) envInfo.storybookBuildCmd = 'build-storybook';
|
|
79
|
+
else if (pkg.scripts['storybook:build']) envInfo.storybookBuildCmd = 'storybook:build';
|
|
80
|
+
else if (pkg.scripts['build:storybook']) envInfo.storybookBuildCmd = 'build:storybook';
|
|
81
|
+
}
|
|
82
|
+
} catch {
|
|
83
|
+
// ignore
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return envInfo;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function gitAdd(files, logger) {
|
|
90
|
+
for (const file of files) {
|
|
91
|
+
if (fs.existsSync(file)) {
|
|
92
|
+
execSync(`git add "${file}"`, { stdio: 'pipe' });
|
|
93
|
+
logger.debug(` ✓ Added ${file}`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function gitCommit(message, logger) {
|
|
99
|
+
const status = execSync('git status --porcelain', { encoding: 'utf8' });
|
|
100
|
+
if (!status.trim()) {
|
|
101
|
+
logger.info('No changes to commit.');
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
execSync(`git commit -m "${message}"`, { stdio: 'pipe' });
|
|
106
|
+
const sha = execSync('git rev-parse --short HEAD', { encoding: 'utf8' }).trim();
|
|
107
|
+
logger.success(`✅ Committed workflow update: ${sha}`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
module.exports = { runUpdateWorkflows };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@scrymore/scry-deployer",
|
|
3
|
-
"version": "0.0
|
|
3
|
+
"version": "0.1.0",
|
|
4
4
|
"description": "A CLI to automate the deployment of Storybook static builds.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"bin": {
|
|
@@ -37,7 +37,7 @@
|
|
|
37
37
|
},
|
|
38
38
|
"dependencies": {
|
|
39
39
|
"@octokit/rest": "^20.0.0",
|
|
40
|
-
"@scrymore/scry-sbcov": "^0.2.
|
|
40
|
+
"@scrymore/scry-sbcov": "^0.2.2",
|
|
41
41
|
"@sentry/node": "^10.33.0",
|
|
42
42
|
"archiver": "^7.0.1",
|
|
43
43
|
"axios": "^1.12.2",
|