@quiltdata/benchling-webhook 0.4.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +226 -0
- package/CHANGELOG.md +91 -0
- package/LICENSE +201 -0
- package/README.benchling.md +77 -0
- package/README.md +53 -0
- package/bin/benchling-webhook.ts +172 -0
- package/bin/check-logs.js +231 -0
- package/bin/cli-auth.sh +74 -0
- package/bin/get-env.js +564 -0
- package/bin/publish-manual.js +211 -0
- package/bin/release-notes.sh +82 -0
- package/bin/release.js +118 -0
- package/bin/send-event.js +203 -0
- package/bin/sync-version.js +72 -0
- package/bin/test-invalid-signature.js +125 -0
- package/bin/version.js +178 -0
- package/cdk.context.json +58 -0
- package/cdk.json +85 -0
- package/doc/NPM_OIDC_SETUP.md +95 -0
- package/doc/PARAMETERS.md +203 -0
- package/doc/RELEASE.md +297 -0
- package/doc/RELEASE_NOTES.md +64 -0
- package/env.template +67 -0
- package/jest.config.js +14 -0
- package/lib/README.md +50 -0
- package/lib/index.ts +31 -0
- package/lib/oauth-tester.json +35 -0
- package/package.json +79 -0
- package/tsconfig.json +34 -0
package/bin/get-env.js
ADDED
|
@@ -0,0 +1,564 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Get environment configuration from a Quilt catalog's config.json
|
|
4
|
+
*
|
|
5
|
+
* This script fetches config.json from a Quilt catalog URL and attempts to:
|
|
6
|
+
* 1. Parse the configuration
|
|
7
|
+
* 2. Query AWS CloudFormation to find the stack using resource identifiers
|
|
8
|
+
* 3. Extract stack outputs and parameters
|
|
9
|
+
* 4. Generate environment variables for .env
|
|
10
|
+
*
|
|
11
|
+
* Usage:
|
|
12
|
+
* node bin/get-env.js https://quilt-catalog.yourcompany.com
|
|
13
|
+
* node bin/get-env.js https://quilt-catalog.yourcompany.com --write
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
const https = require("https");
|
|
17
|
+
const http = require("http");
|
|
18
|
+
const { execSync } = require("child_process");
|
|
19
|
+
const fs = require("fs");
|
|
20
|
+
const path = require("path");
|
|
21
|
+
|
|
22
|
+
// Parse command line arguments (only when run directly)
|
|
23
|
+
let args, catalogUrl, outputFile, writeFile;
|
|
24
|
+
if (require.main === module) {
|
|
25
|
+
args = process.argv.slice(2);
|
|
26
|
+
catalogUrl = args.find((arg) => !arg.startsWith("--"));
|
|
27
|
+
outputFile = args.find((arg) => arg.startsWith("--output="))?.split("=")[1];
|
|
28
|
+
writeFile = args.includes("--write");
|
|
29
|
+
|
|
30
|
+
if (!catalogUrl || args.includes("--help") || args.includes("-h")) {
|
|
31
|
+
printHelp();
|
|
32
|
+
process.exit(catalogUrl ? 0 : 1);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Fetch JSON from a URL
|
|
38
|
+
*/
|
|
39
|
+
function fetchJson(url) {
|
|
40
|
+
return new Promise((resolve, reject) => {
|
|
41
|
+
const parsedUrl = new URL(url);
|
|
42
|
+
const client = parsedUrl.protocol === "https:" ? https : http;
|
|
43
|
+
|
|
44
|
+
const options = {
|
|
45
|
+
headers: {
|
|
46
|
+
"User-Agent": "benchling-webhook-config-tool/1.0",
|
|
47
|
+
"Accept": "application/json"
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
client
|
|
52
|
+
.get(url, options, (res) => {
|
|
53
|
+
let data = "";
|
|
54
|
+
|
|
55
|
+
if (res.statusCode !== 200) {
|
|
56
|
+
reject(new Error(`HTTP ${res.statusCode}: ${res.statusMessage}`));
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
res.on("data", (chunk) => {
|
|
61
|
+
data += chunk;
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
res.on("end", () => {
|
|
65
|
+
try {
|
|
66
|
+
resolve(JSON.parse(data));
|
|
67
|
+
} catch (e) {
|
|
68
|
+
reject(new Error(`Failed to parse JSON: ${e.message}`));
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
})
|
|
72
|
+
.on("error", reject);
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Extract bucket name from S3 ARN or bucket name string
|
|
78
|
+
*/
|
|
79
|
+
function extractBucketName(bucketString) {
|
|
80
|
+
if (bucketString.startsWith("arn:aws:s3:::")) {
|
|
81
|
+
return bucketString.replace("arn:aws:s3:::", "").split("/")[0];
|
|
82
|
+
}
|
|
83
|
+
return bucketString.split("/")[0];
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* Try to find CloudFormation stack by searching for resource
|
|
88
|
+
*/
|
|
89
|
+
function findStackByResource(region, resourceId) {
|
|
90
|
+
try {
|
|
91
|
+
const result = execSync(
|
|
92
|
+
`aws cloudformation describe-stack-resources --region ${region} --physical-resource-id "${resourceId}" --query "StackResources[0].StackName" --output text 2>/dev/null`,
|
|
93
|
+
{ encoding: "utf-8" }
|
|
94
|
+
);
|
|
95
|
+
const stackName = result.trim();
|
|
96
|
+
return stackName && stackName !== "None" ? stackName : null;
|
|
97
|
+
} catch (error) {
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Search for stacks by name pattern
|
|
104
|
+
*/
|
|
105
|
+
function searchStacksByPattern(region, pattern) {
|
|
106
|
+
try {
|
|
107
|
+
const result = execSync(
|
|
108
|
+
`aws cloudformation list-stacks --region ${region} --stack-status-filter CREATE_COMPLETE UPDATE_COMPLETE UPDATE_ROLLBACK_COMPLETE --query "StackSummaries[?contains(StackName, '${pattern}')].StackName" --output json`,
|
|
109
|
+
{ encoding: "utf-8" }
|
|
110
|
+
);
|
|
111
|
+
return JSON.parse(result);
|
|
112
|
+
} catch (error) {
|
|
113
|
+
return [];
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Get stack outputs and parameters
|
|
119
|
+
*/
|
|
120
|
+
function getStackDetails(region, stackName) {
|
|
121
|
+
try {
|
|
122
|
+
const outputsResult = execSync(
|
|
123
|
+
`aws cloudformation describe-stacks --region ${region} --stack-name "${stackName}" --query "Stacks[0].Outputs" --output json`,
|
|
124
|
+
{ encoding: "utf-8" }
|
|
125
|
+
);
|
|
126
|
+
|
|
127
|
+
const paramsResult = execSync(
|
|
128
|
+
`aws cloudformation describe-stacks --region ${region} --stack-name "${stackName}" --query "Stacks[0].Parameters" --output json`,
|
|
129
|
+
{ encoding: "utf-8" }
|
|
130
|
+
);
|
|
131
|
+
|
|
132
|
+
return {
|
|
133
|
+
outputs: JSON.parse(outputsResult) || [],
|
|
134
|
+
parameters: JSON.parse(paramsResult) || []
|
|
135
|
+
};
|
|
136
|
+
} catch (error) {
|
|
137
|
+
console.error(`Warning: Could not get stack details: ${error.message}`);
|
|
138
|
+
return { outputs: [], parameters: [] };
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Get AWS account ID
|
|
144
|
+
*/
|
|
145
|
+
function getAwsAccountId() {
|
|
146
|
+
try {
|
|
147
|
+
const result = execSync(
|
|
148
|
+
`aws sts get-caller-identity --query Account --output text`,
|
|
149
|
+
{ encoding: "utf-8" }
|
|
150
|
+
);
|
|
151
|
+
return result.trim();
|
|
152
|
+
} catch (error) {
|
|
153
|
+
return null;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Extract API Gateway ID from endpoint URL
|
|
159
|
+
*/
|
|
160
|
+
function extractApiGatewayId(endpoint) {
|
|
161
|
+
const match = endpoint.match(/https:\/\/([a-z0-9]+)\.execute-api/);
|
|
162
|
+
return match ? match[1] : null;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Extract stack name prefix from bucket names
|
|
167
|
+
*/
|
|
168
|
+
function inferStackPrefix(analyticsBucket, serviceBucket) {
|
|
169
|
+
// Both buckets typically follow pattern: {prefix}-{suffix}-{resource}-{hash}
|
|
170
|
+
// e.g., "quilt-staging-analyticsbucket-10ort3e91tnoa"
|
|
171
|
+
|
|
172
|
+
const patterns = [analyticsBucket, serviceBucket]
|
|
173
|
+
.filter(Boolean)
|
|
174
|
+
.map(bucket => {
|
|
175
|
+
const parts = bucket.split("-");
|
|
176
|
+
// Try to find common prefix (usually first 1-2 parts)
|
|
177
|
+
if (parts.length >= 3) {
|
|
178
|
+
return parts.slice(0, 2).join("-"); // e.g., "quilt-staging"
|
|
179
|
+
}
|
|
180
|
+
return parts[0];
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
// Return most common pattern
|
|
184
|
+
return patterns[0] || "quilt";
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Parse config.json and infer stack information
|
|
189
|
+
*/
|
|
190
|
+
async function inferStackConfig(catalogUrl) {
|
|
191
|
+
console.log(`Fetching config from: ${catalogUrl}`);
|
|
192
|
+
console.log("");
|
|
193
|
+
|
|
194
|
+
// Normalize URL and construct config.json URL
|
|
195
|
+
let configUrl = catalogUrl.replace(/\/$/, "");
|
|
196
|
+
if (!configUrl.endsWith("/config.json")) {
|
|
197
|
+
configUrl += "/config.json";
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Fetch config.json
|
|
201
|
+
let config;
|
|
202
|
+
try {
|
|
203
|
+
config = await fetchJson(configUrl);
|
|
204
|
+
} catch (error) {
|
|
205
|
+
// If direct fetch fails, try with just /config.json path
|
|
206
|
+
if (error.message.includes("403") || error.message.includes("404")) {
|
|
207
|
+
const baseUrl = catalogUrl.match(/https?:\/\/[^/]+/)?.[0];
|
|
208
|
+
if (baseUrl) {
|
|
209
|
+
console.log(`Direct fetch failed, trying: ${baseUrl}/config.json`);
|
|
210
|
+
config = await fetchJson(`${baseUrl}/config.json`);
|
|
211
|
+
} else {
|
|
212
|
+
throw error;
|
|
213
|
+
}
|
|
214
|
+
} else {
|
|
215
|
+
throw error;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
console.log("✓ Successfully fetched config.json");
|
|
220
|
+
console.log("");
|
|
221
|
+
console.log("Configuration Summary:");
|
|
222
|
+
console.log("=".repeat(80));
|
|
223
|
+
console.log(`Region: ${config.region}`);
|
|
224
|
+
console.log(`API Gateway: ${config.apiGatewayEndpoint}`);
|
|
225
|
+
console.log(`Analytics Bucket: ${config.analyticsBucket}`);
|
|
226
|
+
console.log(`Service Bucket: ${config.serviceBucket}`);
|
|
227
|
+
console.log(`Stack Version: ${config.stackVersion}`);
|
|
228
|
+
console.log("=".repeat(80));
|
|
229
|
+
console.log("");
|
|
230
|
+
|
|
231
|
+
// Extract identifiable resources
|
|
232
|
+
const region = config.region;
|
|
233
|
+
const apiGatewayId = extractApiGatewayId(config.apiGatewayEndpoint);
|
|
234
|
+
const analyticsBucket = extractBucketName(config.analyticsBucket);
|
|
235
|
+
const serviceBucket = extractBucketName(config.serviceBucket);
|
|
236
|
+
const stackPrefix = inferStackPrefix(analyticsBucket, serviceBucket);
|
|
237
|
+
|
|
238
|
+
console.log("Searching for CloudFormation stack...");
|
|
239
|
+
console.log(` Region: ${region}`);
|
|
240
|
+
console.log(` API Gateway ID: ${apiGatewayId || "not found"}`);
|
|
241
|
+
console.log(` Inferred stack prefix: ${stackPrefix}`);
|
|
242
|
+
console.log("");
|
|
243
|
+
|
|
244
|
+
// Try to find the stack
|
|
245
|
+
let stackName = null;
|
|
246
|
+
|
|
247
|
+
// Method 1: Search by API Gateway ID
|
|
248
|
+
if (apiGatewayId) {
|
|
249
|
+
console.log(`Searching by API Gateway ID: ${apiGatewayId}...`);
|
|
250
|
+
stackName = findStackByResource(region, apiGatewayId);
|
|
251
|
+
if (stackName) {
|
|
252
|
+
console.log(`✓ Found stack by API Gateway: ${stackName}`);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// Method 2: Search by Analytics Bucket
|
|
257
|
+
if (!stackName && analyticsBucket) {
|
|
258
|
+
console.log(`Searching by Analytics Bucket: ${analyticsBucket}...`);
|
|
259
|
+
stackName = findStackByResource(region, analyticsBucket);
|
|
260
|
+
if (stackName) {
|
|
261
|
+
console.log(`✓ Found stack by Analytics Bucket: ${stackName}`);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Method 3: Search by Service Bucket
|
|
266
|
+
if (!stackName && serviceBucket) {
|
|
267
|
+
console.log(`Searching by Service Bucket: ${serviceBucket}...`);
|
|
268
|
+
stackName = findStackByResource(region, serviceBucket);
|
|
269
|
+
if (stackName) {
|
|
270
|
+
console.log(`✓ Found stack by Service Bucket: ${stackName}`);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// Method 4: Search by name pattern
|
|
275
|
+
if (!stackName && stackPrefix) {
|
|
276
|
+
console.log(`Searching by stack name pattern: *${stackPrefix}*...`);
|
|
277
|
+
const stacks = searchStacksByPattern(region, stackPrefix);
|
|
278
|
+
if (stacks.length > 0) {
|
|
279
|
+
console.log(`✓ Found ${stacks.length} potential stack(s):`);
|
|
280
|
+
stacks.forEach((name, i) => console.log(` ${i + 1}. ${name}`));
|
|
281
|
+
|
|
282
|
+
if (stacks.length === 1) {
|
|
283
|
+
stackName = stacks[0];
|
|
284
|
+
console.log(` Using: ${stackName}`);
|
|
285
|
+
} else {
|
|
286
|
+
console.log("");
|
|
287
|
+
console.log("⚠️ Multiple stacks found. Using first match: " + stacks[0]);
|
|
288
|
+
console.log(" If this is incorrect, manually verify the stack name.");
|
|
289
|
+
stackName = stacks[0];
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
console.log("");
|
|
295
|
+
|
|
296
|
+
if (!stackName) {
|
|
297
|
+
console.log("⚠️ Could not automatically find CloudFormation stack.");
|
|
298
|
+
console.log(" You may need to manually specify stack resources.");
|
|
299
|
+
console.log("");
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
// Get stack details if found
|
|
303
|
+
let stackDetails = { outputs: [], parameters: [] };
|
|
304
|
+
if (stackName) {
|
|
305
|
+
console.log(`Fetching stack details for: ${stackName}...`);
|
|
306
|
+
stackDetails = getStackDetails(region, stackName);
|
|
307
|
+
console.log(`✓ Retrieved ${stackDetails.outputs.length} outputs and ${stackDetails.parameters.length} parameters`);
|
|
308
|
+
console.log("");
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// Get AWS account ID
|
|
312
|
+
const accountId = getAwsAccountId();
|
|
313
|
+
if (accountId) {
|
|
314
|
+
console.log(`✓ AWS Account ID: ${accountId}`);
|
|
315
|
+
console.log("");
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// Build inferred environment variables
|
|
319
|
+
const inferredVars = buildInferredConfig(
|
|
320
|
+
config,
|
|
321
|
+
stackName,
|
|
322
|
+
stackDetails,
|
|
323
|
+
region,
|
|
324
|
+
accountId,
|
|
325
|
+
catalogUrl.replace(/\/config\.json$/, "")
|
|
326
|
+
);
|
|
327
|
+
|
|
328
|
+
return {
|
|
329
|
+
config,
|
|
330
|
+
stackName,
|
|
331
|
+
stackDetails,
|
|
332
|
+
inferredVars
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
/**
|
|
337
|
+
* Build inferred configuration
|
|
338
|
+
*/
|
|
339
|
+
function buildInferredConfig(config, stackName, stackDetails, region, accountId, catalogDomain) {
|
|
340
|
+
const vars = {};
|
|
341
|
+
|
|
342
|
+
// Extract catalog domain
|
|
343
|
+
const catalogMatch = catalogDomain.match(/https?:\/\/([^/]+)/);
|
|
344
|
+
const catalog = catalogMatch ? catalogMatch[1] : "";
|
|
345
|
+
|
|
346
|
+
// AWS Configuration
|
|
347
|
+
if (accountId) {
|
|
348
|
+
vars.CDK_DEFAULT_ACCOUNT = accountId;
|
|
349
|
+
}
|
|
350
|
+
vars.CDK_DEFAULT_REGION = region;
|
|
351
|
+
vars.AWS_REGION = region;
|
|
352
|
+
|
|
353
|
+
// Quilt Configuration
|
|
354
|
+
if (catalog) {
|
|
355
|
+
vars.QUILT_CATALOG = catalog;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// Try to infer bucket and database from stack or config
|
|
359
|
+
const analyticsBucket = extractBucketName(config.analyticsBucket);
|
|
360
|
+
const serviceBucket = extractBucketName(config.serviceBucket);
|
|
361
|
+
|
|
362
|
+
// Find data bucket from stack outputs or use service bucket as fallback
|
|
363
|
+
const bucketOutput = stackDetails.outputs.find(
|
|
364
|
+
(o) => o.OutputKey === "Bucket" || o.OutputKey === "DataBucket"
|
|
365
|
+
);
|
|
366
|
+
const dataBucket = bucketOutput?.OutputValue || serviceBucket;
|
|
367
|
+
|
|
368
|
+
if (dataBucket) {
|
|
369
|
+
vars.QUILT_USER_BUCKET = `${dataBucket} # Verify this is YOUR data bucket`;
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Try to find database name from stack
|
|
373
|
+
const databaseOutput = stackDetails.outputs.find(
|
|
374
|
+
(o) => o.OutputKey === "Database" || o.OutputKey === "AthenaDatabase"
|
|
375
|
+
);
|
|
376
|
+
if (databaseOutput) {
|
|
377
|
+
vars.QUILT_DATABASE = databaseOutput.OutputValue;
|
|
378
|
+
} else if (catalog) {
|
|
379
|
+
// Infer database name from catalog (common pattern)
|
|
380
|
+
const dbGuess = catalog.replace(/[.-]/g, "_") + "_db";
|
|
381
|
+
vars.QUILT_DATABASE = `${dbGuess} # VERIFY THIS - inferred from catalog name`;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
// SQS Queue
|
|
385
|
+
const queueOutput = stackDetails.outputs.find(
|
|
386
|
+
(o) => o.OutputKey === "PackagerQueue" || o.OutputKey.includes("Queue")
|
|
387
|
+
);
|
|
388
|
+
if (queueOutput) {
|
|
389
|
+
const queueValue = queueOutput.OutputValue;
|
|
390
|
+
|
|
391
|
+
// Parse queue name from ARN or URL
|
|
392
|
+
let queueName;
|
|
393
|
+
if (queueValue.startsWith("arn:aws:sqs:")) {
|
|
394
|
+
// ARN format: arn:aws:sqs:region:account:queue-name
|
|
395
|
+
queueName = queueValue.split(":").pop();
|
|
396
|
+
} else if (queueValue.includes("sqs.")) {
|
|
397
|
+
// URL format: https://sqs.region.amazonaws.com/account/queue-name
|
|
398
|
+
queueName = queueValue.split("/").pop();
|
|
399
|
+
} else {
|
|
400
|
+
// Assume it's just the queue name
|
|
401
|
+
queueName = queueValue;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
vars.QUEUE_NAME = queueName;
|
|
405
|
+
|
|
406
|
+
// Build SQS URL
|
|
407
|
+
if (accountId && region && queueName) {
|
|
408
|
+
vars.SQS_QUEUE_URL = `https://sqs.${region}.amazonaws.com/${accountId}/${queueName}`;
|
|
409
|
+
}
|
|
410
|
+
} else if (stackName) {
|
|
411
|
+
vars.QUEUE_NAME = `${stackName}-PackagerQueue-XXXXX # VERIFY THIS - not found in outputs`;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
// Additional useful info
|
|
415
|
+
if (stackName) {
|
|
416
|
+
vars["# CloudFormation Stack"] = stackName;
|
|
417
|
+
}
|
|
418
|
+
vars["# Stack Version"] = config.stackVersion;
|
|
419
|
+
vars["# API Gateway Endpoint"] = config.apiGatewayEndpoint;
|
|
420
|
+
|
|
421
|
+
return vars;
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
/**
|
|
425
|
+
* Format environment variables for output
|
|
426
|
+
*/
|
|
427
|
+
function formatEnvVars(vars) {
|
|
428
|
+
const lines = [];
|
|
429
|
+
|
|
430
|
+
lines.push("# ==============================================================================");
|
|
431
|
+
lines.push("# INFERRED CONFIGURATION");
|
|
432
|
+
lines.push("# ==============================================================================");
|
|
433
|
+
lines.push("# Generated by: bin/get-env.js");
|
|
434
|
+
lines.push("# Date: " + new Date().toISOString());
|
|
435
|
+
lines.push("#");
|
|
436
|
+
lines.push("# ⚠️ IMPORTANT: Review and verify all values before using!");
|
|
437
|
+
lines.push("# Some values may need manual verification or completion.");
|
|
438
|
+
lines.push("# ==============================================================================");
|
|
439
|
+
lines.push("");
|
|
440
|
+
|
|
441
|
+
for (const [key, value] of Object.entries(vars)) {
|
|
442
|
+
if (key.startsWith("#")) {
|
|
443
|
+
lines.push(`${key}: ${value}`);
|
|
444
|
+
} else {
|
|
445
|
+
lines.push(`${key}=${value}`);
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
lines.push("");
|
|
450
|
+
lines.push("# ==============================================================================");
|
|
451
|
+
lines.push("# REQUIRED VALUES NOT INFERRED - Must be filled manually");
|
|
452
|
+
lines.push("# ==============================================================================");
|
|
453
|
+
lines.push("PREFIX=benchling-webhook");
|
|
454
|
+
lines.push("BENCHLING_TENANT=your-tenant");
|
|
455
|
+
lines.push("BENCHLING_CLIENT_ID=your-client-id");
|
|
456
|
+
lines.push("BENCHLING_CLIENT_SECRET=your-client-secret");
|
|
457
|
+
lines.push("BENCHLING_APP=benchling-webhook");
|
|
458
|
+
lines.push("BENCHLING_API_KEY=your-api-key");
|
|
459
|
+
lines.push("BENCHLING_APP_DEFINITION_ID=appdef_your_id_here");
|
|
460
|
+
lines.push("ENABLE_WEBHOOK_VERIFICATION=true");
|
|
461
|
+
lines.push("BENCHLING_TEST_ENTRY=etr_123456789");
|
|
462
|
+
lines.push("");
|
|
463
|
+
|
|
464
|
+
return lines.join("\n");
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
/**
|
|
468
|
+
* Print help
|
|
469
|
+
*/
|
|
470
|
+
function printHelp() {
|
|
471
|
+
console.log("Usage: node bin/get-env.js <catalog-url> [options]");
|
|
472
|
+
console.log("");
|
|
473
|
+
console.log("Arguments:");
|
|
474
|
+
console.log(" catalog-url URL of Quilt catalog (e.g., https://quilt-catalog.yourcompany.com)");
|
|
475
|
+
console.log("");
|
|
476
|
+
console.log("Options:");
|
|
477
|
+
console.log(" --output=FILE Write output to FILE instead of stdout");
|
|
478
|
+
console.log(" --write Write to env.inferred by default (without dot - user-visible)");
|
|
479
|
+
console.log(" --help, -h Show this help message");
|
|
480
|
+
console.log("");
|
|
481
|
+
console.log("Examples:");
|
|
482
|
+
console.log(" node bin/get-env.js https://nightly.quilttest.com");
|
|
483
|
+
console.log(" node bin/get-env.js https://nightly.quilttest.com --write");
|
|
484
|
+
console.log(" node bin/get-env.js https://nightly.quilttest.com --output=env.staging");
|
|
485
|
+
console.log("");
|
|
486
|
+
console.log("Description:");
|
|
487
|
+
console.log(" This script fetches config.json from a Quilt catalog and infers");
|
|
488
|
+
console.log(" environment variables needed for benchling-webhook deployment by:");
|
|
489
|
+
console.log(" 1. Parsing the catalog configuration");
|
|
490
|
+
console.log(" 2. Querying AWS CloudFormation to find the associated stack");
|
|
491
|
+
console.log(" 3. Extracting stack outputs and parameters");
|
|
492
|
+
console.log(" 4. Generating environment variable assignments");
|
|
493
|
+
console.log("");
|
|
494
|
+
console.log("Requirements:");
|
|
495
|
+
console.log(" - AWS CLI installed and configured");
|
|
496
|
+
console.log(" - AWS credentials with CloudFormation read permissions");
|
|
497
|
+
console.log(" - Network access to the catalog URL");
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
/**
|
|
501
|
+
* Main execution
|
|
502
|
+
*/
|
|
503
|
+
async function main() {
|
|
504
|
+
try {
|
|
505
|
+
const result = await inferStackConfig(catalogUrl);
|
|
506
|
+
|
|
507
|
+
// Format output
|
|
508
|
+
const output = formatEnvVars(result.inferredVars);
|
|
509
|
+
|
|
510
|
+
// Print summary
|
|
511
|
+
console.log("=".repeat(80));
|
|
512
|
+
console.log("INFERRED CONFIGURATION");
|
|
513
|
+
console.log("=".repeat(80));
|
|
514
|
+
console.log("");
|
|
515
|
+
console.log(output);
|
|
516
|
+
console.log("");
|
|
517
|
+
|
|
518
|
+
// Write to file if requested
|
|
519
|
+
const targetFile = outputFile || (writeFile ? "env.inferred" : null);
|
|
520
|
+
if (targetFile) {
|
|
521
|
+
const fullPath = path.resolve(targetFile);
|
|
522
|
+
|
|
523
|
+
// Check if .env already exists and warn before proceeding
|
|
524
|
+
const envPath = path.resolve(".env");
|
|
525
|
+
if (fs.existsSync(envPath)) {
|
|
526
|
+
console.log("⚠️ NOTICE: A .env file already exists!");
|
|
527
|
+
console.log(` Writing to ${targetFile} instead to avoid overwriting your configuration.`);
|
|
528
|
+
console.log("");
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
fs.writeFileSync(fullPath, output);
|
|
532
|
+
console.log("=".repeat(80));
|
|
533
|
+
console.log(`✓ Configuration written to: ${fullPath}`);
|
|
534
|
+
console.log("=".repeat(80));
|
|
535
|
+
console.log("");
|
|
536
|
+
console.log("Next steps:");
|
|
537
|
+
console.log(" 1. Review the generated file and verify all values");
|
|
538
|
+
console.log(" 2. Fill in the REQUIRED VALUES section with your Benchling credentials");
|
|
539
|
+
if (fs.existsSync(envPath)) {
|
|
540
|
+
console.log(" 3. Carefully merge with your existing .env file (DO NOT overwrite!)");
|
|
541
|
+
console.log(" Compare: diff .env env.inferred");
|
|
542
|
+
} else {
|
|
543
|
+
console.log(" 3. Copy to .env when ready: cp env.inferred .env");
|
|
544
|
+
}
|
|
545
|
+
console.log("");
|
|
546
|
+
} else {
|
|
547
|
+
console.log("=".repeat(80));
|
|
548
|
+
console.log("To save this configuration, run:");
|
|
549
|
+
console.log(` node bin/get-env.js ${catalogUrl} --write`);
|
|
550
|
+
console.log("=".repeat(80));
|
|
551
|
+
console.log("");
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
} catch (error) {
|
|
555
|
+
console.error("Error:", error.message);
|
|
556
|
+
process.exit(1);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
if (require.main === module) {
|
|
561
|
+
main();
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
module.exports = { inferStackConfig, buildInferredConfig };
|