@argos-ci/core 5.1.2 → 5.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1389 @@
1
+ import { createRequire } from "node:module";
2
+ import convict from "convict";
3
+ import { execSync } from "node:child_process";
4
+ import createDebug from "debug";
5
+ import { createReadStream, existsSync, readFileSync } from "node:fs";
6
+ import { createClient, throwAPIError } from "@argos-ci/api-client";
7
+ import { basename, extname, resolve } from "node:path";
8
+ import glob from "fast-glob";
9
+ import { promisify } from "node:util";
10
+ import sharp from "sharp";
11
+ import tmp from "tmp";
12
+ import { createHash } from "node:crypto";
13
+ import { readFile } from "node:fs/promises";
14
+ import { getPlaywrightTracePath, readMetadata, readVersionFromPackage } from "@argos-ci/util";
15
+ import mime from "mime-types";
16
+ //#region src/debug.ts
17
+ const KEY = "@argos-ci/core";
18
+ const debug = createDebug(KEY);
19
+ const isDebugEnabled = createDebug.enabled(KEY);
20
+ const debugTime = (arg) => {
21
+ if (isDebugEnabled) console.time(arg);
22
+ };
23
+ const debugTimeEnd = (arg) => {
24
+ if (isDebugEnabled) console.timeEnd(arg);
25
+ };
26
+ //#endregion
27
+ //#region src/ci-environment/git.ts
28
+ /**
29
+ * Check if the current directory is a git repository.
30
+ */
31
+ function checkIsGitRepository() {
32
+ try {
33
+ return execSync("git rev-parse --is-inside-work-tree").toString().trim() === "true";
34
+ } catch {
35
+ return false;
36
+ }
37
+ }
38
+ /**
39
+ * Returns the head commit.
40
+ */
41
+ function head() {
42
+ try {
43
+ return execSync("git rev-parse HEAD").toString().trim();
44
+ } catch {
45
+ return null;
46
+ }
47
+ }
48
+ /**
49
+ * Returns the current branch.
50
+ */
51
+ function branch() {
52
+ try {
53
+ const headRef = execSync("git rev-parse --abbrev-ref HEAD").toString().trim();
54
+ if (headRef === "HEAD") return null;
55
+ return headRef;
56
+ } catch {
57
+ return null;
58
+ }
59
+ }
60
+ /**
61
+ * Returns the repository URL.
62
+ */
63
+ function getRepositoryURL() {
64
+ try {
65
+ return execSync("git config --get remote.origin.url").toString().trim();
66
+ } catch {
67
+ return null;
68
+ }
69
+ }
70
+ /**
71
+ * Run git merge-base command.
72
+ */
73
+ function gitMergeBase(input) {
74
+ try {
75
+ return execSync(`git merge-base ${input.head} ${input.base}`).toString().trim();
76
+ } catch (error) {
77
+ if (checkIsExecError(error) && error.status === 1 && error.stderr.toString() === "") return null;
78
+ throw error;
79
+ }
80
+ }
81
+ /**
82
+ * Run git fetch with a specific ref and depth.
83
+ */
84
+ function gitFetch(input) {
85
+ execSync(`git fetch --force --update-head-ok --depth ${input.depth} origin ${input.ref}:${input.target}`);
86
+ }
87
+ /**
88
+ * Check if an error is an exec error that includes stderr.
89
+ */
90
+ function checkIsExecError(error) {
91
+ return error instanceof Error && "status" in error && typeof error.status === "number" && "stderr" in error && Buffer.isBuffer(error.stderr);
92
+ }
93
+ /**
94
+ * Get the merge base commit SHA.
95
+ * Fetch both base and head with depth and then run merge base.
96
+ * Try to find a merge base with a depth of 1000 max.
97
+ */
98
+ function getMergeBaseCommitSha$1(input) {
99
+ let depth = 200;
100
+ const argosBaseRef = `argos/${input.base}`;
101
+ const argosHeadRef = `argos/${input.head}`;
102
+ while (depth < 1e3) {
103
+ gitFetch({
104
+ ref: input.head,
105
+ depth,
106
+ target: argosHeadRef
107
+ });
108
+ gitFetch({
109
+ ref: input.base,
110
+ depth,
111
+ target: argosBaseRef
112
+ });
113
+ const mergeBase = gitMergeBase({
114
+ base: argosBaseRef,
115
+ head: argosHeadRef
116
+ });
117
+ if (mergeBase) return mergeBase;
118
+ depth += 200;
119
+ }
120
+ if (isDebugEnabled) {
121
+ const headShas = listShas(argosHeadRef);
122
+ const baseShas = listShas(argosBaseRef);
123
+ debug(`No merge base found for ${input.head} and ${input.base} with depth ${depth}`);
124
+ debug(`Found ${headShas.length} commits in ${input.head}: ${headShas.join(", ")}`);
125
+ debug(`Found ${baseShas.length} commits in ${input.base}: ${baseShas.join(", ")}`);
126
+ }
127
+ return null;
128
+ }
129
+ function listShas(path, maxCount) {
130
+ return execSync(`git log --format="%H" ${maxCount ? `--max-count=${maxCount}` : ""} ${path}`.trim()).toString().trim().split("\n");
131
+ }
132
+ function listParentCommits$1(input) {
133
+ const limit = 200;
134
+ try {
135
+ execSync(`git fetch --depth=${limit} origin ${input.sha}`);
136
+ } catch (error) {
137
+ if (error instanceof Error && error.message.includes("not our ref")) return [];
138
+ }
139
+ return listShas(input.sha, limit);
140
+ }
141
+ //#endregion
142
+ //#region src/ci-environment/services/bitrise.ts
143
+ function getPrNumber$2(context) {
144
+ const { env } = context;
145
+ return env.BITRISE_PULL_REQUEST ? Number(env.BITRISE_PULL_REQUEST) : null;
146
+ }
147
+ function getRepository$6(context) {
148
+ const { env } = context;
149
+ if (env.BITRISEIO_GIT_REPOSITORY_OWNER && env.BITRISEIO_GIT_REPOSITORY_SLUG) return `${env.BITRISEIO_GIT_REPOSITORY_OWNER}/${env.BITRISEIO_GIT_REPOSITORY_SLUG}`;
150
+ return null;
151
+ }
152
+ const service$7 = {
153
+ name: "Bitrise",
154
+ key: "bitrise",
155
+ detect: ({ env }) => Boolean(env.BITRISE_IO),
156
+ config: (context) => {
157
+ const { env } = context;
158
+ const repository = getRepository$6(context);
159
+ return {
160
+ commit: env.BITRISE_GIT_COMMIT || null,
161
+ branch: env.BITRISE_GIT_BRANCH || null,
162
+ repository,
163
+ originalRepository: repository,
164
+ jobId: null,
165
+ runId: null,
166
+ runAttempt: null,
167
+ prNumber: getPrNumber$2({ env }),
168
+ prHeadCommit: null,
169
+ prBaseBranch: null,
170
+ nonce: env.BITRISEIO_PIPELINE_ID || null
171
+ };
172
+ },
173
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
174
+ listParentCommits: listParentCommits$1
175
+ };
176
+ //#endregion
177
+ //#region src/util/url.ts
178
+ /**
179
+ * Utility functions for parsing Git remote URLs.
180
+ * Supports SSH, HTTPS, and git protocols.
181
+ */
182
+ function getRepositoryNameFromURL(url) {
183
+ const sshMatch = url.match(/^git@[^:]+:([^/]+)\/(.+?)(?:\.git)?$/);
184
+ if (sshMatch && sshMatch[1] && sshMatch[2]) return `${sshMatch[1]}/${sshMatch[2]}`;
185
+ const httpsMatch = url.match(/^(?:https?|git):\/\/[^/]+\/([^/]+)\/(.+?)(?:\.git)?$/);
186
+ if (httpsMatch && httpsMatch[1] && httpsMatch[2]) return `${httpsMatch[1]}/${httpsMatch[2]}`;
187
+ return null;
188
+ }
189
+ //#endregion
190
+ //#region src/ci-environment/services/buildkite.ts
191
+ function getRepository$5(context) {
192
+ const { env } = context;
193
+ if (env.BUILDKITE_REPO) return getRepositoryNameFromURL(env.BUILDKITE_REPO);
194
+ return null;
195
+ }
196
+ const service$6 = {
197
+ name: "Buildkite",
198
+ key: "buildkite",
199
+ detect: ({ env }) => Boolean(env.BUILDKITE),
200
+ config: (context) => {
201
+ const { env } = context;
202
+ const repository = getRepository$5(context);
203
+ return {
204
+ commit: env.BUILDKITE_COMMIT || head() || null,
205
+ branch: env.BUILDKITE_BRANCH || branch() || null,
206
+ repository,
207
+ originalRepository: repository,
208
+ jobId: null,
209
+ runId: null,
210
+ runAttempt: null,
211
+ prNumber: env.BUILDKITE_PULL_REQUEST ? Number(env.BUILDKITE_PULL_REQUEST) : null,
212
+ prHeadCommit: null,
213
+ prBaseBranch: null,
214
+ nonce: env.BUILDKITE_BUILD_ID || null
215
+ };
216
+ },
217
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
218
+ listParentCommits: listParentCommits$1
219
+ };
220
+ //#endregion
221
+ //#region src/ci-environment/services/heroku.ts
222
+ const service$5 = {
223
+ name: "Heroku",
224
+ key: "heroku",
225
+ detect: ({ env }) => Boolean(env.HEROKU_TEST_RUN_ID),
226
+ config: ({ env }) => ({
227
+ commit: env.HEROKU_TEST_RUN_COMMIT_VERSION || null,
228
+ branch: env.HEROKU_TEST_RUN_BRANCH || null,
229
+ owner: null,
230
+ repository: null,
231
+ originalRepository: null,
232
+ jobId: null,
233
+ runId: null,
234
+ runAttempt: null,
235
+ prNumber: null,
236
+ prHeadCommit: null,
237
+ prBaseBranch: null,
238
+ nonce: env.HEROKU_TEST_RUN_ID || null
239
+ }),
240
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
241
+ listParentCommits: listParentCommits$1
242
+ };
243
+ //#endregion
244
+ //#region src/ci-environment/github.ts
245
+ /**
246
+ * Get the full repository name (account/repo) from environment variable.
247
+ */
248
+ function getGitHubRepository(ctx) {
249
+ return ctx.env.GITHUB_REPOSITORY || null;
250
+ }
251
+ /**
252
+ * Get the full repository name (account/repo) from environment variable or throws.
253
+ */
254
+ function assertGitHubRepository(ctx) {
255
+ const repo = getGitHubRepository(ctx);
256
+ if (!repo) throw new Error("GITHUB_REPOSITORY is missing");
257
+ return repo;
258
+ }
259
+ /**
260
+ * Get a GitHub token from environment variables.
261
+ */
262
+ function getGitHubToken({ env }) {
263
+ if (!env.GITHUB_TOKEN) {
264
+ if (!env.DISABLE_GITHUB_TOKEN_WARNING) console.log(`
265
+ Argos couldn’t find a relevant pull request in the current environment.
266
+ To resolve this, Argos requires a GITHUB_TOKEN to fetch the pull request associated with the head SHA. Please ensure the following environment variable is added:
267
+
268
+ GITHUB_TOKEN: \${{ secrets.GITHUB_TOKEN }}
269
+
270
+ For more details, check out the documentation: Read more at https://argos-ci.com/docs/run-on-preview-deployment
271
+
272
+ If you want to disable this warning, you can set the following environment variable:
273
+
274
+ DISABLE_GITHUB_TOKEN_WARNING: true
275
+ `.trim());
276
+ return null;
277
+ }
278
+ return env.GITHUB_TOKEN;
279
+ }
280
+ /**
281
+ * Fetch GitHub API.
282
+ */
283
+ async function fetchGitHubAPI(ctx, url) {
284
+ const githubToken = getGitHubToken(ctx);
285
+ if (!githubToken) return null;
286
+ return await fetch(url, {
287
+ headers: {
288
+ Accept: "application/vnd.github+json",
289
+ Authorization: `Bearer ${githubToken}`,
290
+ "X-GitHub-Api-Version": "2022-11-28"
291
+ },
292
+ signal: AbortSignal.timeout(1e4)
293
+ });
294
+ }
295
+ const GITHUB_API_BASE_URL = "https://api.github.com";
296
+ /**
297
+ * Get a pull request from a head sha.
298
+ * Fetch the last 30 pull requests sorted by updated date
299
+ * then try to find the one that matches the head sha.
300
+ * If no pull request is found, return null.
301
+ */
302
+ async function getPullRequestFromHeadSha(ctx, sha) {
303
+ debug(`Fetching pull request details from head sha: ${sha}`);
304
+ const githubRepository = assertGitHubRepository(ctx);
305
+ const url = new URL(`/repos/${githubRepository}/pulls`, GITHUB_API_BASE_URL);
306
+ url.search = new URLSearchParams({
307
+ state: "open",
308
+ sort: "updated",
309
+ per_page: "30",
310
+ page: "1"
311
+ }).toString();
312
+ const response = await fetchGitHubAPI(ctx, url);
313
+ if (!response) return null;
314
+ if (!response.ok) throw new Error(`Non-OK response (status: ${response.status}) while fetching pull request details from head sha (${sha})`);
315
+ const result = await response.json();
316
+ if (result.length === 0) {
317
+ debug("No results, no pull request found");
318
+ return null;
319
+ }
320
+ const matchingPr = result.find((pr) => pr.head.sha === sha);
321
+ if (matchingPr) {
322
+ debug("Pull request found", matchingPr);
323
+ return matchingPr;
324
+ }
325
+ debug("No matching pull request found");
326
+ return null;
327
+ }
328
+ /**
329
+ * Get a pull request from a PR number.
330
+ */
331
+ async function getPullRequestFromPrNumber(ctx, prNumber) {
332
+ debug(`Fetching pull request #${prNumber}`);
333
+ const githubRepository = assertGitHubRepository(ctx);
334
+ const response = await fetchGitHubAPI(ctx, new URL(`/repos/${githubRepository}/pulls/${prNumber}`, GITHUB_API_BASE_URL));
335
+ if (!response) return null;
336
+ if (response.status === 404) {
337
+ debug("No pull request found, pr detection from branch was probably a mistake");
338
+ return null;
339
+ }
340
+ if (!response.ok) throw new Error(`Non-OK response (status: ${response.status}) while fetching pull request #${prNumber}`);
341
+ return await response.json();
342
+ }
343
+ /**
344
+ * Get the PR number from a merge group branch.
345
+ * Example: gh-readonly-queue/master/pr-1529-c1c25caabaade7a8ddc1178c449b872b5d3e51a4
346
+ */
347
+ function getPRNumberFromMergeGroupBranch(branch) {
348
+ const prMatch = /queue\/[^/]*\/pr-(\d+)-/.exec(branch);
349
+ if (prMatch) return Number(prMatch[1]);
350
+ return null;
351
+ }
352
+ //#endregion
353
+ //#region src/ci-environment/services/github-actions.ts
354
+ /**
355
+ * Read the event payload.
356
+ */
357
+ function readEventPayload({ env }) {
358
+ if (!env.GITHUB_EVENT_PATH) return null;
359
+ if (!existsSync(env.GITHUB_EVENT_PATH)) return null;
360
+ return JSON.parse(readFileSync(env.GITHUB_EVENT_PATH, "utf-8"));
361
+ }
362
+ /**
363
+ * Get a payload from a Vercel deployment "repository_dispatch"
364
+ * @see https://vercel.com/docs/git/vercel-for-github#repository-dispatch-events
365
+ */
366
+ function getVercelDeploymentPayload(payload) {
367
+ if (process.env.GITHUB_EVENT_NAME === "repository_dispatch" && payload && "action" in payload && payload.action === "vercel.deployment.success") return payload;
368
+ return null;
369
+ }
370
+ /**
371
+ * Get a merge group payload from a "merge_group" event.
372
+ */
373
+ function getMergeGroupPayload(payload) {
374
+ if (payload && process.env.GITHUB_EVENT_NAME === "merge_group" && "action" in payload && payload.action === "checks_requested") return payload;
375
+ return null;
376
+ }
377
+ function getMergeQueuePrNumbers(args) {
378
+ const { mergeGroupPayload, pullRequest } = args;
379
+ if (!mergeGroupPayload) return null;
380
+ if (pullRequest) return [pullRequest.number];
381
+ const headRef = mergeGroupPayload.merge_group.head_ref;
382
+ const prNumberFromBranch = getPRNumberFromMergeGroupBranch(headRef);
383
+ if (prNumberFromBranch != null) return [prNumberFromBranch];
384
+ return [];
385
+ }
386
+ /**
387
+ * Get the branch from the local context.
388
+ */
389
+ function getBranchFromContext(context) {
390
+ const { env } = context;
391
+ if (env.GITHUB_HEAD_REF) return env.GITHUB_HEAD_REF;
392
+ if (!env.GITHUB_REF) return null;
393
+ return /refs\/heads\/(.*)/.exec(env.GITHUB_REF)?.[1] ?? null;
394
+ }
395
+ /**
396
+ * Get the branch from the payload.
397
+ */
398
+ function getBranchFromPayload(payload) {
399
+ if ("workflow_run" in payload && payload.workflow_run) return payload.workflow_run.head_branch;
400
+ if ("deployment" in payload && payload.deployment) return payload.deployment.environment;
401
+ return null;
402
+ }
403
+ /**
404
+ * Get the branch.
405
+ */
406
+ function getBranch(args) {
407
+ const { payload, mergeGroupPayload, vercelPayload, pullRequest, context } = args;
408
+ if (mergeGroupPayload && pullRequest?.head.ref) return pullRequest.head.ref;
409
+ if (vercelPayload) return vercelPayload.client_payload.git.ref;
410
+ if (payload) {
411
+ const fromPayload = getBranchFromPayload(payload);
412
+ if (fromPayload) return fromPayload;
413
+ }
414
+ const fromContext = getBranchFromContext(context);
415
+ if (fromContext) return fromContext;
416
+ if (pullRequest) return pullRequest.head.ref;
417
+ return null;
418
+ }
419
+ /**
420
+ * Get the repository either from payload or from environment variables.
421
+ */
422
+ function getRepository$4(context, payload) {
423
+ if (payload && "pull_request" in payload && payload.pull_request) {
424
+ const pr = payload.pull_request;
425
+ if (pr.head && pr.head.repo && pr.head.repo.full_name) return pr.head.repo.full_name;
426
+ }
427
+ return getGitHubRepository(context);
428
+ }
429
+ /**
430
+ * Get the head sha.
431
+ */
432
+ function getSha(context, vercelPayload, payload) {
433
+ if (context.env.GITHUB_EVENT_NAME === "pull_request_target") {
434
+ if (!payload) throw new Error("Payload is missing in \"pull_request_target\" event");
435
+ const pullRequest = getPullRequestFromPayload(payload);
436
+ if (!pullRequest) throw new Error("Pull request missing in \"pull_request_target\" event");
437
+ return pullRequest.head.sha;
438
+ }
439
+ if (vercelPayload) return vercelPayload.client_payload.git.sha;
440
+ if (!context.env.GITHUB_SHA) throw new Error("GITHUB_SHA is missing");
441
+ return context.env.GITHUB_SHA;
442
+ }
443
+ /**
444
+ * Get the pull request from an event payload.
445
+ */
446
+ function getPullRequestFromPayload(payload) {
447
+ if ("pull_request" in payload && payload.pull_request && payload.pull_request) return payload.pull_request;
448
+ if ("workflow_run" in payload && payload.workflow_run && payload.workflow_run.pull_requests[0]) return payload.workflow_run.pull_requests[0];
449
+ if ("check_run" in payload && payload.check_run && "pull_requests" in payload.check_run && payload.check_run.pull_requests[0]) return payload.check_run.pull_requests[0];
450
+ return null;
451
+ }
452
+ /**
453
+ * Get the pull request either from payload or local fetching.
454
+ */
455
+ async function getPullRequest(args) {
456
+ const { payload, vercelPayload, mergeGroupPayload, context, sha } = args;
457
+ if (vercelPayload || !payload) return getPullRequestFromHeadSha(context, sha);
458
+ if (mergeGroupPayload) {
459
+ const prNumber = getPRNumberFromMergeGroupBranch(mergeGroupPayload.merge_group.head_ref);
460
+ if (!prNumber) {
461
+ debug(`No PR found from merge group head ref: ${mergeGroupPayload.merge_group.head_ref}`);
462
+ return null;
463
+ }
464
+ debug(`PR #${prNumber} found from merge group head ref (${mergeGroupPayload.merge_group.head_ref})`);
465
+ return getPullRequestFromPrNumber(context, prNumber);
466
+ }
467
+ return getPullRequestFromPayload(payload);
468
+ }
469
+ const service$4 = {
470
+ name: "GitHub Actions",
471
+ key: "github-actions",
472
+ detect: (context) => Boolean(context.env.GITHUB_ACTIONS),
473
+ config: async (context) => {
474
+ const { env } = context;
475
+ const payload = readEventPayload(context);
476
+ const vercelPayload = getVercelDeploymentPayload(payload);
477
+ const mergeGroupPayload = getMergeGroupPayload(payload);
478
+ const sha = getSha(context, vercelPayload, payload);
479
+ const pullRequest = await getPullRequest({
480
+ payload,
481
+ vercelPayload,
482
+ mergeGroupPayload,
483
+ sha,
484
+ context
485
+ });
486
+ const branch = getBranch({
487
+ payload,
488
+ vercelPayload,
489
+ mergeGroupPayload,
490
+ context,
491
+ pullRequest
492
+ });
493
+ return {
494
+ commit: sha,
495
+ repository: getRepository$4(context, payload),
496
+ originalRepository: getGitHubRepository(context),
497
+ jobId: env.GITHUB_JOB || null,
498
+ runId: env.GITHUB_RUN_ID || null,
499
+ runAttempt: env.GITHUB_RUN_ATTEMPT ? Number(env.GITHUB_RUN_ATTEMPT) : null,
500
+ nonce: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}`,
501
+ branch,
502
+ prNumber: pullRequest?.number || null,
503
+ prHeadCommit: pullRequest?.head.sha ?? null,
504
+ prBaseBranch: pullRequest?.base.ref ?? null,
505
+ mergeQueuePrNumbers: getMergeQueuePrNumbers({
506
+ mergeGroupPayload,
507
+ pullRequest
508
+ })
509
+ };
510
+ },
511
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
512
+ listParentCommits: listParentCommits$1
513
+ };
514
+ //#endregion
515
+ //#region src/ci-environment/services/circleci.ts
516
+ function getPrNumber$1(context) {
517
+ const { env } = context;
518
+ const matches = /pull\/(\d+)/.exec(env.CIRCLE_PULL_REQUEST || "");
519
+ if (matches) return Number(matches[1]);
520
+ return null;
521
+ }
522
+ function getRepository$3(context) {
523
+ const { env } = context;
524
+ if (env.CIRCLE_PR_REPONAME && env.CIRCLE_PR_USERNAME) return `${env.CIRCLE_PR_USERNAME}/${env.CIRCLE_PR_REPONAME}`;
525
+ return getOriginalRepository$2(context);
526
+ }
527
+ function getOriginalRepository$2(context) {
528
+ const { env } = context;
529
+ if (env.CIRCLE_PROJECT_USERNAME && env.CIRCLE_PROJECT_REPONAME) return `${env.CIRCLE_PROJECT_USERNAME}/${env.CIRCLE_PROJECT_REPONAME}`;
530
+ return null;
531
+ }
532
+ const service$3 = {
533
+ name: "CircleCI",
534
+ key: "circleci",
535
+ detect: ({ env }) => Boolean(env.CIRCLECI),
536
+ config: (context) => {
537
+ const { env } = context;
538
+ return {
539
+ commit: env.CIRCLE_SHA1 || null,
540
+ branch: env.CIRCLE_BRANCH || null,
541
+ repository: getRepository$3(context),
542
+ originalRepository: getOriginalRepository$2(context),
543
+ jobId: null,
544
+ runId: null,
545
+ runAttempt: null,
546
+ prNumber: getPrNumber$1({ env }),
547
+ prHeadCommit: null,
548
+ prBaseBranch: null,
549
+ nonce: env.CIRCLE_WORKFLOW_ID || env.CIRCLE_BUILD_NUM || null
550
+ };
551
+ },
552
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
553
+ listParentCommits: listParentCommits$1
554
+ };
555
+ //#endregion
556
+ //#region src/ci-environment/services/travis.ts
557
+ function getRepository$2(context) {
558
+ const { env } = context;
559
+ if (env.TRAVIS_PULL_REQUEST_SLUG) return env.TRAVIS_PULL_REQUEST_SLUG;
560
+ return getOriginalRepository$1(context);
561
+ }
562
+ function getOriginalRepository$1(context) {
563
+ const { env } = context;
564
+ return env.TRAVIS_REPO_SLUG || null;
565
+ }
566
+ function getPrNumber(context) {
567
+ const { env } = context;
568
+ if (env.TRAVIS_PULL_REQUEST) return Number(env.TRAVIS_PULL_REQUEST);
569
+ return null;
570
+ }
571
+ const service$2 = {
572
+ name: "Travis CI",
573
+ key: "travis",
574
+ detect: ({ env }) => Boolean(env.TRAVIS),
575
+ config: (ctx) => {
576
+ const { env } = ctx;
577
+ return {
578
+ commit: env.TRAVIS_COMMIT || null,
579
+ branch: env.TRAVIS_BRANCH || null,
580
+ repository: getRepository$2(ctx),
581
+ originalRepository: getOriginalRepository$1(ctx),
582
+ jobId: null,
583
+ runId: null,
584
+ runAttempt: null,
585
+ prNumber: getPrNumber(ctx),
586
+ prHeadCommit: null,
587
+ prBaseBranch: null,
588
+ nonce: env.TRAVIS_BUILD_ID || null
589
+ };
590
+ },
591
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
592
+ listParentCommits: listParentCommits$1
593
+ };
594
+ //#endregion
595
+ //#region src/ci-environment/services/gitlab.ts
596
+ function getRepository$1(context) {
597
+ const { env } = context;
598
+ if (env.CI_MERGE_REQUEST_PROJECT_PATH) return env.CI_MERGE_REQUEST_PROJECT_PATH;
599
+ return getOriginalRepository(context);
600
+ }
601
+ function getOriginalRepository(context) {
602
+ const { env } = context;
603
+ return env.CI_PROJECT_PATH || null;
604
+ }
605
+ const service$1 = {
606
+ name: "GitLab",
607
+ key: "gitlab",
608
+ detect: ({ env }) => env.GITLAB_CI === "true",
609
+ config: (context) => {
610
+ const { env } = context;
611
+ return {
612
+ commit: env.CI_COMMIT_SHA || null,
613
+ branch: env.CI_COMMIT_REF_NAME || null,
614
+ repository: getRepository$1(context),
615
+ originalRepository: getOriginalRepository(context),
616
+ jobId: null,
617
+ runId: null,
618
+ runAttempt: null,
619
+ prNumber: null,
620
+ prHeadCommit: null,
621
+ prBaseBranch: null,
622
+ nonce: env.CI_PIPELINE_ID || null
623
+ };
624
+ },
625
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
626
+ listParentCommits: listParentCommits$1
627
+ };
628
+ //#endregion
629
+ //#region src/ci-environment/services/git.ts
630
+ function getRepository() {
631
+ const repositoryURL = getRepositoryURL();
632
+ if (!repositoryURL) return null;
633
+ return getRepositoryNameFromURL(repositoryURL);
634
+ }
635
+ //#endregion
636
+ //#region src/ci-environment/index.ts
637
+ const services = [
638
+ service$5,
639
+ service$4,
640
+ service$3,
641
+ service$2,
642
+ service$6,
643
+ service$1,
644
+ service$7,
645
+ {
646
+ name: "Git",
647
+ key: "git",
648
+ detect: () => checkIsGitRepository(),
649
+ config: () => {
650
+ const repository = getRepository();
651
+ return {
652
+ commit: head() || null,
653
+ branch: branch() || null,
654
+ repository,
655
+ originalRepository: repository,
656
+ jobId: null,
657
+ runId: null,
658
+ runAttempt: null,
659
+ prNumber: null,
660
+ prHeadCommit: null,
661
+ prBaseBranch: null,
662
+ nonce: null
663
+ };
664
+ },
665
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
666
+ listParentCommits: listParentCommits$1
667
+ }
668
+ ];
669
+ /**
670
+ * Create the context for the CI service detection.
671
+ */
672
+ function createContext() {
673
+ return { env: process.env };
674
+ }
675
+ /**
676
+ * Get the CI service that is currently running.
677
+ */
678
+ function getCiService(context) {
679
+ return services.find((service) => service.detect(context));
680
+ }
681
+ /**
682
+ * Get the merge base commit.
683
+ */
684
+ function getMergeBaseCommitSha(input) {
685
+ const context = createContext();
686
+ const service = getCiService(context);
687
+ if (!service) return null;
688
+ return service.getMergeBaseCommitSha(input, context);
689
+ }
690
+ /**
691
+ * Get the merge base commit.
692
+ */
693
+ function listParentCommits(input) {
694
+ const context = createContext();
695
+ const service = getCiService(context);
696
+ if (!service) return null;
697
+ return service.listParentCommits(input, context);
698
+ }
699
+ /**
700
+ * Get the CI environment.
701
+ */
702
+ async function getCiEnvironment() {
703
+ const context = createContext();
704
+ debug("Detecting CI environment", context);
705
+ const service = getCiService(context);
706
+ if (service) {
707
+ debug("Internal service matched", service.name);
708
+ const variables = await service.config(context);
709
+ const ciEnvironment = {
710
+ name: service.name,
711
+ key: service.key,
712
+ ...variables
713
+ };
714
+ debug("CI environment", ciEnvironment);
715
+ return ciEnvironment;
716
+ }
717
+ return null;
718
+ }
719
+ //#endregion
720
+ //#region src/config.ts
721
+ const mustBeApiBaseUrl = (value) => {
722
+ if (!/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/.test(value)) throw new Error("Invalid Argos API base URL");
723
+ };
724
+ const mustBeCommit = (value) => {
725
+ if (!/^[0-9a-f]{40}$/.test(value)) {
726
+ if (/^[0-9a-f]{7}$/.test(value)) throw new Error("Short SHA1 is not allowed");
727
+ throw new Error("Invalid commit");
728
+ }
729
+ };
730
+ const mustBeArgosToken = (value) => {
731
+ if (value && value.length !== 40) throw new Error("Invalid Argos repository token (must be 40 characters)");
732
+ };
733
+ const minInteger = (min) => (value) => {
734
+ if (!Number.isInteger(value)) throw new Error("must be an integer");
735
+ if (value < min) throw new Error(`must be at least ${min}`);
736
+ };
737
+ const toInt = (value) => {
738
+ if (value === "") return null;
739
+ const num = Number(value);
740
+ if (!Number.isInteger(num) || Number.isNaN(num)) return num;
741
+ return num;
742
+ };
743
+ const toFloat = (value) => parseFloat(value);
744
+ const toIntArray = (value) => {
745
+ if (Array.isArray(value)) return value;
746
+ if (value === "") return null;
747
+ return value.split(",").map(toInt);
748
+ };
749
+ convict.addFormat({
750
+ name: "parallel-total",
751
+ validate: minInteger(-1),
752
+ coerce: toInt
753
+ });
754
+ convict.addFormat({
755
+ name: "parallel-index",
756
+ validate: minInteger(1),
757
+ coerce: toInt
758
+ });
759
+ convict.addFormat({
760
+ name: "float-percent",
761
+ validate: (val) => {
762
+ if (val !== 0 && (!val || val > 1 || val < 0)) throw new Error("Must be a float between 0 and 1, inclusive.");
763
+ },
764
+ coerce: toFloat
765
+ });
766
+ convict.addFormat({
767
+ name: "int-array",
768
+ validate: (value) => {
769
+ if (value === null) return;
770
+ if (!Array.isArray(value)) throw new Error("must be an array");
771
+ for (const item of value) if (!Number.isInteger(item)) throw new Error("must be an array of integers");
772
+ },
773
+ coerce: toIntArray
774
+ });
775
+ const schema = {
776
+ apiBaseUrl: {
777
+ env: "ARGOS_API_BASE_URL",
778
+ default: "https://api.argos-ci.com/v2/",
779
+ format: mustBeApiBaseUrl
780
+ },
781
+ commit: {
782
+ env: "ARGOS_COMMIT",
783
+ default: null,
784
+ format: mustBeCommit
785
+ },
786
+ branch: {
787
+ env: "ARGOS_BRANCH",
788
+ default: null,
789
+ format: String
790
+ },
791
+ token: {
792
+ env: "ARGOS_TOKEN",
793
+ default: null,
794
+ format: mustBeArgosToken
795
+ },
796
+ buildName: {
797
+ env: "ARGOS_BUILD_NAME",
798
+ default: null,
799
+ format: String,
800
+ nullable: true
801
+ },
802
+ mode: {
803
+ env: "ARGOS_MODE",
804
+ format: ["ci", "monitoring"],
805
+ default: null,
806
+ nullable: true
807
+ },
808
+ prNumber: {
809
+ env: "ARGOS_PR_NUMBER",
810
+ format: Number,
811
+ default: null,
812
+ nullable: true
813
+ },
814
+ prHeadCommit: {
815
+ env: "ARGOS_PR_HEAD_COMMIT",
816
+ format: String,
817
+ default: null,
818
+ nullable: true
819
+ },
820
+ prBaseBranch: {
821
+ env: "ARGOS_PR_BASE_BRANCH",
822
+ format: String,
823
+ default: null,
824
+ nullable: true
825
+ },
826
+ parallel: {
827
+ env: "ARGOS_PARALLEL",
828
+ default: false,
829
+ format: Boolean
830
+ },
831
+ parallelNonce: {
832
+ env: "ARGOS_PARALLEL_NONCE",
833
+ format: String,
834
+ default: null,
835
+ nullable: true
836
+ },
837
+ parallelIndex: {
838
+ env: "ARGOS_PARALLEL_INDEX",
839
+ format: "parallel-index",
840
+ default: null,
841
+ nullable: true
842
+ },
843
+ parallelTotal: {
844
+ env: "ARGOS_PARALLEL_TOTAL",
845
+ format: "parallel-total",
846
+ default: null,
847
+ nullable: true
848
+ },
849
+ referenceBranch: {
850
+ env: "ARGOS_REFERENCE_BRANCH",
851
+ format: String,
852
+ default: null,
853
+ nullable: true
854
+ },
855
+ referenceCommit: {
856
+ env: "ARGOS_REFERENCE_COMMIT",
857
+ format: String,
858
+ default: null,
859
+ nullable: true
860
+ },
861
+ jobId: {
862
+ format: String,
863
+ default: null,
864
+ nullable: true
865
+ },
866
+ runId: {
867
+ format: String,
868
+ default: null,
869
+ nullable: true
870
+ },
871
+ runAttempt: {
872
+ format: "nat",
873
+ default: null,
874
+ nullable: true
875
+ },
876
+ repository: {
877
+ format: String,
878
+ default: null,
879
+ nullable: true
880
+ },
881
+ originalRepository: {
882
+ format: String,
883
+ default: null,
884
+ nullable: true
885
+ },
886
+ ciProvider: {
887
+ format: String,
888
+ default: null,
889
+ nullable: true
890
+ },
891
+ threshold: {
892
+ env: "ARGOS_THRESHOLD",
893
+ format: "float-percent",
894
+ default: null,
895
+ nullable: true
896
+ },
897
+ previewBaseUrl: {
898
+ env: "ARGOS_PREVIEW_BASE_URL",
899
+ format: String,
900
+ default: null,
901
+ nullable: true
902
+ },
903
+ skipped: {
904
+ env: "ARGOS_SKIPPED",
905
+ format: Boolean,
906
+ default: false
907
+ },
908
+ mergeQueuePrNumbers: {
909
+ env: "ARGOS_MERGE_QUEUE_PRS",
910
+ format: "int-array",
911
+ default: null,
912
+ nullable: true
913
+ },
914
+ mergeQueue: {
915
+ format: Boolean,
916
+ default: false
917
+ },
918
+ subset: {
919
+ env: "ARGOS_SUBSET",
920
+ format: Boolean,
921
+ default: false
922
+ }
923
+ };
924
+ function createConfig() {
925
+ return convict(schema, {
926
+ args: [],
927
+ env: {}
928
+ });
929
+ }
930
+ function getDefaultConfig() {
931
+ return Object.entries(schema).reduce((cfg, [key, entry]) => {
932
+ cfg[key] = "env" in entry && entry.env && process.env[entry.env] ? process.env[entry.env] : entry.default;
933
+ return cfg;
934
+ }, {});
935
+ }
936
+ async function readConfig(options = {}) {
937
+ const config = createConfig();
938
+ const ciEnv = await getCiEnvironment();
939
+ const defaultConfig = getDefaultConfig();
940
+ config.load({
941
+ apiBaseUrl: options.apiBaseUrl || defaultConfig.apiBaseUrl,
942
+ commit: options.commit || defaultConfig.commit || ciEnv?.commit || null,
943
+ branch: options.branch || defaultConfig.branch || ciEnv?.branch || null,
944
+ token: options.token || defaultConfig.token || null,
945
+ buildName: options.buildName || defaultConfig.buildName || null,
946
+ prNumber: options.prNumber || defaultConfig.prNumber || ciEnv?.prNumber || null,
947
+ prHeadCommit: defaultConfig.prHeadCommit || ciEnv?.prHeadCommit || null,
948
+ prBaseBranch: defaultConfig.prBaseBranch || ciEnv?.prBaseBranch || null,
949
+ referenceBranch: options.referenceBranch || defaultConfig.referenceBranch || null,
950
+ referenceCommit: options.referenceCommit || defaultConfig.referenceCommit || null,
951
+ repository: ciEnv?.repository || null,
952
+ originalRepository: ciEnv?.originalRepository || null,
953
+ jobId: ciEnv?.jobId || null,
954
+ runId: ciEnv?.runId || null,
955
+ runAttempt: ciEnv?.runAttempt || null,
956
+ parallel: options.parallel ?? defaultConfig.parallel ?? false,
957
+ parallelNonce: options.parallelNonce || defaultConfig.parallelNonce || ciEnv?.nonce || null,
958
+ parallelTotal: options.parallelTotal ?? defaultConfig.parallelTotal ?? null,
959
+ parallelIndex: options.parallelIndex ?? defaultConfig.parallelIndex ?? null,
960
+ mode: options.mode || defaultConfig.mode || null,
961
+ ciProvider: ciEnv?.key || null,
962
+ previewBaseUrl: defaultConfig.previewBaseUrl || null,
963
+ skipped: options.skipped ?? defaultConfig.skipped ?? false,
964
+ subset: options.subset ?? defaultConfig.subset ?? false,
965
+ mergeQueuePrNumbers: options.mergeQueuePrNumbers ?? defaultConfig.mergeQueuePrNumbers ?? ciEnv?.mergeQueuePrNumbers ?? null
966
+ });
967
+ if (!config.get("branch") || !config.get("commit")) throw new Error("Argos requires a branch and a commit to be set. If you are running in a non-git environment consider setting ARGOS_BRANCH and ARGOS_COMMIT environment variables.");
968
+ config.validate();
969
+ return config.get();
970
+ }
971
+ async function getConfigFromOptions({ parallel, ...options }) {
972
+ return readConfig({
973
+ ...options,
974
+ parallel: parallel !== void 0 ? Boolean(parallel) : void 0,
975
+ parallelNonce: parallel ? parallel.nonce : void 0,
976
+ parallelTotal: parallel ? parallel.total : void 0,
977
+ parallelIndex: parallel ? parallel.index : void 0
978
+ });
979
+ }
980
+ //#endregion
981
+ //#region src/auth.ts
982
+ const base64Encode = (obj) => Buffer.from(JSON.stringify(obj), "utf8").toString("base64");
983
+ /**
984
+ * Get the authentication token.
985
+ */
986
+ function getAuthToken(args) {
987
+ const { token, ciProvider, originalRepository: repository, jobId, runId, prNumber } = args;
988
+ if (token) return token;
989
+ switch (ciProvider) {
990
+ case "github-actions": {
991
+ if (!repository || !jobId || !runId) throw new Error(`Automatic GitHub Actions variables detection failed. Please add the 'ARGOS_TOKEN'`);
992
+ const [owner, repo] = repository.split("/");
993
+ return `tokenless-github-${base64Encode({
994
+ owner,
995
+ repository: repo,
996
+ jobId,
997
+ runId,
998
+ prNumber: prNumber ?? void 0
999
+ })}`;
1000
+ }
1001
+ default: throw new Error("Missing Argos repository token 'ARGOS_TOKEN'");
1002
+ }
1003
+ }
1004
+ //#endregion
1005
+ //#region src/finalize.ts
1006
+ /**
1007
+ * Finalize pending builds.
1008
+ */
1009
+ async function finalize(params) {
1010
+ const config = await readConfig({ parallelNonce: params.parallel?.nonce });
1011
+ const authToken = getAuthToken(config);
1012
+ const apiClient = createClient({
1013
+ baseUrl: config.apiBaseUrl,
1014
+ authToken
1015
+ });
1016
+ if (!config.parallelNonce) throw new Error("parallel.nonce is required to finalize the build");
1017
+ const finalizeBuildsResult = await apiClient.POST("/builds/finalize", { body: { parallelNonce: config.parallelNonce } });
1018
+ if (finalizeBuildsResult.error) throwAPIError(finalizeBuildsResult.error);
1019
+ return finalizeBuildsResult.data;
1020
+ }
1021
+ //#endregion
1022
+ //#region src/discovery.ts
1023
+ /**
1024
+ * Discover snapshots in the given root directory matching the provided patterns.
1025
+ */
1026
+ async function discoverSnapshots(patterns, { root = process.cwd(), ignore } = {}) {
1027
+ debug(`Discovering snapshots with patterns: ${Array.isArray(patterns) ? patterns.join(", ") : patterns} in ${root}`);
1028
+ return (await glob(patterns, {
1029
+ onlyFiles: true,
1030
+ ignore,
1031
+ cwd: root
1032
+ })).map((match) => {
1033
+ debug(`Found screenshot: ${match}`);
1034
+ return {
1035
+ name: match,
1036
+ path: resolve(root, match)
1037
+ };
1038
+ });
1039
+ }
1040
+ /**
1041
+ * Check if the given filename corresponds to an Argos image.
1042
+ */
1043
+ function checkIsValidImageFile(filename) {
1044
+ const lowerFilename = extname(filename).toLowerCase();
1045
+ return lowerFilename === ".png" || lowerFilename === ".jpg" || lowerFilename === ".jpeg";
1046
+ }
1047
+ //#endregion
1048
+ //#region src/optimize.ts
1049
+ const tmpFile = promisify(tmp.file);
1050
+ /**
1051
+ * Maximum number of pixels allowed in a screenshot.
1052
+ */
1053
+ const MAX_PIXELS = 8e7;
1054
+ /**
1055
+ * Default maximum width of a screenshot.
1056
+ * Used when the width or height of the image is not available.
1057
+ */
1058
+ const DEFAULT_MAX_WIDTH = 2048;
1059
+ async function optimizeScreenshot(filepath) {
1060
+ if (!checkIsValidImageFile(filepath)) return filepath;
1061
+ try {
1062
+ const [resultFilePath, metadata] = await Promise.all([tmpFile(), sharp(filepath).metadata()]);
1063
+ const { width, height } = metadata;
1064
+ const maxDimensions = (() => {
1065
+ if (!width || !height) return {
1066
+ width: DEFAULT_MAX_WIDTH,
1067
+ height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
1068
+ };
1069
+ const nbPixels = width * height;
1070
+ if (nbPixels <= MAX_PIXELS) return null;
1071
+ if (width < height) return {
1072
+ width: DEFAULT_MAX_WIDTH,
1073
+ height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
1074
+ };
1075
+ const scaleFactor = Math.sqrt(MAX_PIXELS / nbPixels);
1076
+ return {
1077
+ width: Math.floor(width * scaleFactor),
1078
+ height: Math.floor(height * scaleFactor)
1079
+ };
1080
+ })();
1081
+ let operation = sharp(filepath);
1082
+ if (maxDimensions) operation = operation.resize(maxDimensions.width, maxDimensions.height, {
1083
+ fit: "inside",
1084
+ withoutEnlargement: true
1085
+ });
1086
+ await operation.png({ force: true }).toFile(resultFilePath);
1087
+ if (width && height && maxDimensions) {
1088
+ const { width: maxWidth, height: maxHeight } = maxDimensions;
1089
+ const widthRatio = maxWidth / width;
1090
+ const heightRatio = maxHeight / height;
1091
+ const scaleFactor = Math.min(widthRatio, heightRatio);
1092
+ const newWidth = Math.floor(width * scaleFactor);
1093
+ const newHeight = Math.floor(height * scaleFactor);
1094
+ console.warn(`Image ${basename(filepath)} resized from ${width}x${height} to ${newWidth}x${newHeight}.`);
1095
+ }
1096
+ return resultFilePath;
1097
+ } catch (error) {
1098
+ const message = error instanceof Error ? error.message : "Unknown Error";
1099
+ throw new Error(`Error while processing image (${filepath}): ${message}`, { cause: error });
1100
+ }
1101
+ }
1102
+ //#endregion
1103
+ //#region src/hashing.ts
1104
+ const hashFile = async (filepath) => {
1105
+ const fileStream = createReadStream(filepath);
1106
+ const hash = createHash("sha256");
1107
+ await new Promise((resolve, reject) => {
1108
+ fileStream.on("error", reject);
1109
+ hash.on("error", reject);
1110
+ hash.on("finish", resolve);
1111
+ fileStream.pipe(hash);
1112
+ });
1113
+ return hash.digest("hex");
1114
+ };
1115
+ //#endregion
1116
+ //#region src/s3.ts
1117
+ async function uploadFile(input) {
1118
+ const file = await readFile(input.path);
1119
+ const response = await fetch(input.url, {
1120
+ method: "PUT",
1121
+ headers: {
1122
+ "Content-Type": input.contentType,
1123
+ "Content-Length": file.length.toString()
1124
+ },
1125
+ signal: AbortSignal.timeout(3e4),
1126
+ body: new Uint8Array(file)
1127
+ });
1128
+ if (!response.ok) throw new Error(`Failed to upload file to ${input.url}: ${response.status} ${response.statusText}`);
1129
+ }
1130
+ //#endregion
1131
+ //#region src/util/chunk.ts
1132
+ /**
1133
+ * Split an array into chunks of a given size.
1134
+ */
1135
+ const chunk = (collection, size) => {
1136
+ const result = [];
1137
+ for (let x = 0; x < Math.ceil(collection.length / size); x++) {
1138
+ const start = x * size;
1139
+ const end = start + size;
1140
+ result.push(collection.slice(start, end));
1141
+ }
1142
+ return result;
1143
+ };
1144
+ //#endregion
1145
+ //#region src/version.ts
1146
+ const require = createRequire(import.meta.url);
1147
+ /**
1148
+ * Get the version of the @argos-ci/core package.
1149
+ */
1150
+ async function getArgosCoreSDKIdentifier() {
1151
+ return `@argos-ci/core@${await readVersionFromPackage(require.resolve("@argos-ci/core/package.json"))}`;
1152
+ }
1153
+ //#endregion
1154
+ //#region src/mime-type.ts
1155
+ /**
1156
+ * Get the mime type of a snapshot file based on its extension.
1157
+ */
1158
+ function getSnapshotMimeType(filepath) {
1159
+ const type = mime.lookup(filepath);
1160
+ if (!type) throw new Error(`Unable to determine snapshot file type for: ${filepath}`);
1161
+ return type;
1162
+ }
1163
+ //#endregion
1164
+ //#region src/skip.ts
1165
+ /**
1166
+ * Mark a build as skipped.
1167
+ */
1168
+ async function skip(params) {
1169
+ const [config, argosSdk] = await Promise.all([getConfigFromOptions(params), getArgosCoreSDKIdentifier()]);
1170
+ const authToken = getAuthToken(config);
1171
+ const createBuildResponse = await createClient({
1172
+ baseUrl: config.apiBaseUrl,
1173
+ authToken
1174
+ }).POST("/builds", { body: {
1175
+ commit: config.commit,
1176
+ branch: config.branch,
1177
+ name: config.buildName,
1178
+ mode: config.mode,
1179
+ prNumber: config.prNumber,
1180
+ prHeadCommit: config.prHeadCommit,
1181
+ referenceBranch: config.referenceBranch,
1182
+ referenceCommit: config.referenceCommit,
1183
+ argosSdk,
1184
+ ciProvider: config.ciProvider,
1185
+ runId: config.runId,
1186
+ runAttempt: config.runAttempt,
1187
+ skipped: true,
1188
+ screenshotKeys: [],
1189
+ pwTraceKeys: [],
1190
+ parentCommits: []
1191
+ } });
1192
+ if (createBuildResponse.error) throwAPIError(createBuildResponse.error);
1193
+ return { build: createBuildResponse.data.build };
1194
+ }
1195
+ //#endregion
1196
+ //#region src/upload.ts
1197
+ /**
1198
+ * Size of the chunks used to upload screenshots to Argos.
1199
+ */
1200
+ const CHUNK_SIZE = 10;
1201
+ /**
1202
+ * Upload screenshots to Argos.
1203
+ */
1204
+ async function upload(params) {
1205
+ debug("Starting upload with params", params);
1206
+ const [config, argosSdk] = await Promise.all([getConfigFromOptions(params), getArgosCoreSDKIdentifier()]);
1207
+ const authToken = getAuthToken(config);
1208
+ const apiClient = createClient({
1209
+ baseUrl: config.apiBaseUrl,
1210
+ authToken
1211
+ });
1212
+ if (config.skipped) {
1213
+ const { build } = await skip(params);
1214
+ return {
1215
+ build,
1216
+ screenshots: []
1217
+ };
1218
+ }
1219
+ const previewUrlFormatter = params.previewUrl ?? (config.previewBaseUrl ? { baseUrl: config.previewBaseUrl } : void 0);
1220
+ const globs = params.files ?? ["**/*.{png,jpg,jpeg}"];
1221
+ debug("Using config and files", config, globs);
1222
+ const files = await discoverSnapshots(globs, {
1223
+ root: params.root,
1224
+ ignore: params.ignore
1225
+ });
1226
+ debug("Found snapshots", files);
1227
+ const snapshots = await Promise.all(files.map(async (snapshot) => {
1228
+ const contentType = getSnapshotMimeType(snapshot.path);
1229
+ const [metadata, pwTracePath, optimizedPath] = await Promise.all([
1230
+ readMetadata(snapshot.path),
1231
+ getPlaywrightTracePath(snapshot.path),
1232
+ contentType.startsWith("image/") ? optimizeScreenshot(snapshot.path) : snapshot.path
1233
+ ]);
1234
+ const [hash, pwTraceHash] = await Promise.all([hashFile(optimizedPath), pwTracePath ? hashFile(pwTracePath) : null]);
1235
+ const threshold = metadata?.transient?.threshold ?? null;
1236
+ const baseName = metadata?.transient?.baseName ?? null;
1237
+ const parentName = metadata?.transient?.parentName ?? null;
1238
+ if (metadata) {
1239
+ delete metadata.transient;
1240
+ if (metadata.url && previewUrlFormatter) metadata.previewUrl = formatPreviewUrl(metadata.url, previewUrlFormatter);
1241
+ }
1242
+ return {
1243
+ ...snapshot,
1244
+ hash,
1245
+ optimizedPath,
1246
+ metadata,
1247
+ threshold,
1248
+ baseName,
1249
+ parentName,
1250
+ pwTrace: pwTracePath && pwTraceHash ? {
1251
+ path: pwTracePath,
1252
+ hash: pwTraceHash
1253
+ } : null,
1254
+ contentType
1255
+ };
1256
+ }));
1257
+ debug("Fetch project");
1258
+ const projectResponse = await apiClient.GET("/project");
1259
+ if (projectResponse.error) throwAPIError(projectResponse.error);
1260
+ debug("Project fetched", projectResponse.data);
1261
+ const { defaultBaseBranch, hasRemoteContentAccess } = projectResponse.data;
1262
+ const referenceCommit = (() => {
1263
+ if (config.referenceCommit) {
1264
+ debug("Found reference commit in config", config.referenceCommit);
1265
+ return config.referenceCommit;
1266
+ }
1267
+ if (hasRemoteContentAccess) return null;
1268
+ const sha = getMergeBaseCommitSha({
1269
+ base: config.referenceBranch || config.prBaseBranch || defaultBaseBranch,
1270
+ head: config.branch
1271
+ });
1272
+ if (sha) debug("Found merge base", sha);
1273
+ else debug("No merge base found");
1274
+ return sha;
1275
+ })();
1276
+ const parentCommits = (() => {
1277
+ if (hasRemoteContentAccess) return null;
1278
+ if (referenceCommit) {
1279
+ const commits = listParentCommits({ sha: referenceCommit });
1280
+ if (commits) debug("Found parent commits", commits);
1281
+ else debug("No parent commits found");
1282
+ return commits;
1283
+ }
1284
+ return null;
1285
+ })();
1286
+ debug("Creating build");
1287
+ const [pwTraceKeys, snapshotKeys] = snapshots.reduce(([pwTraceKeys, snapshotKeys], snapshot) => {
1288
+ if (snapshot.pwTrace && !pwTraceKeys.includes(snapshot.pwTrace.hash)) pwTraceKeys.push(snapshot.pwTrace.hash);
1289
+ if (!snapshotKeys.includes(snapshot.hash)) snapshotKeys.push(snapshot.hash);
1290
+ return [pwTraceKeys, snapshotKeys];
1291
+ }, [[], []]);
1292
+ const createBuildResponse = await apiClient.POST("/builds", { body: {
1293
+ commit: config.commit,
1294
+ branch: config.branch,
1295
+ name: config.buildName,
1296
+ mode: config.mode,
1297
+ parallel: config.parallel,
1298
+ parallelNonce: config.parallelNonce,
1299
+ screenshotKeys: snapshotKeys,
1300
+ pwTraceKeys,
1301
+ prNumber: config.prNumber,
1302
+ prHeadCommit: config.prHeadCommit,
1303
+ referenceBranch: config.referenceBranch,
1304
+ referenceCommit,
1305
+ parentCommits,
1306
+ argosSdk,
1307
+ ciProvider: config.ciProvider,
1308
+ runId: config.runId,
1309
+ runAttempt: config.runAttempt,
1310
+ mergeQueue: Boolean(config.mergeQueuePrNumbers),
1311
+ mergeQueuePrNumbers: config.mergeQueuePrNumbers,
1312
+ subset: config.subset
1313
+ } });
1314
+ if (createBuildResponse.error) throwAPIError(createBuildResponse.error);
1315
+ const result = createBuildResponse.data;
1316
+ debug("Got uploads url", result);
1317
+ await uploadFilesToS3([...result.screenshots.map(({ key, putUrl }) => {
1318
+ const snapshot = snapshots.find((s) => s.hash === key);
1319
+ if (!snapshot) throw new Error(`Invariant: snapshot with hash ${key} not found`);
1320
+ return {
1321
+ url: putUrl,
1322
+ path: snapshot.optimizedPath,
1323
+ contentType: snapshot.contentType
1324
+ };
1325
+ }), ...result.pwTraces?.map(({ key, putUrl }) => {
1326
+ const snapshot = snapshots.find((s) => s.pwTrace && s.pwTrace.hash === key);
1327
+ if (!snapshot || !snapshot.pwTrace) throw new Error(`Invariant: trace with ${key} not found`);
1328
+ return {
1329
+ url: putUrl,
1330
+ path: snapshot.pwTrace.path,
1331
+ contentType: "application/json"
1332
+ };
1333
+ }) ?? []]);
1334
+ debug("Updating build");
1335
+ const uploadBuildResponse = await apiClient.PUT("/builds/{buildId}", {
1336
+ params: { path: { buildId: result.build.id } },
1337
+ body: {
1338
+ screenshots: snapshots.map((snapshot) => ({
1339
+ key: snapshot.hash,
1340
+ name: snapshot.name,
1341
+ metadata: snapshot.metadata,
1342
+ pwTraceKey: snapshot.pwTrace?.hash ?? null,
1343
+ threshold: snapshot.threshold ?? config?.threshold ?? null,
1344
+ baseName: snapshot.baseName,
1345
+ parentName: snapshot.parentName,
1346
+ contentType: snapshot.contentType
1347
+ })),
1348
+ parallel: config.parallel,
1349
+ parallelTotal: config.parallelTotal,
1350
+ parallelIndex: config.parallelIndex,
1351
+ metadata: params.metadata
1352
+ }
1353
+ });
1354
+ if (uploadBuildResponse.error) throwAPIError(uploadBuildResponse.error);
1355
+ return {
1356
+ build: uploadBuildResponse.data.build,
1357
+ screenshots: snapshots
1358
+ };
1359
+ }
1360
+ async function uploadFilesToS3(files) {
1361
+ debug(`Split files in chunks of ${CHUNK_SIZE}`);
1362
+ const chunks = chunk(files, CHUNK_SIZE);
1363
+ debug(`Starting upload of ${chunks.length} chunks`);
1364
+ for (let i = 0; i < chunks.length; i++) {
1365
+ debug(`Uploading chunk ${i + 1}/${chunks.length}`);
1366
+ const timeLabel = `Chunk ${i + 1}/${chunks.length}`;
1367
+ debugTime(timeLabel);
1368
+ const chunk = chunks[i];
1369
+ if (!chunk) throw new Error(`Invariant: chunk ${i} is empty`);
1370
+ await Promise.all(chunk.map(async ({ url, path, contentType }) => {
1371
+ await uploadFile({
1372
+ url,
1373
+ path,
1374
+ contentType
1375
+ });
1376
+ }));
1377
+ debugTimeEnd(timeLabel);
1378
+ }
1379
+ }
1380
+ /**
1381
+ * Format the preview URL.
1382
+ */
1383
+ function formatPreviewUrl(url, formatter) {
1384
+ if (typeof formatter === "function") return formatter(url);
1385
+ const urlObj = new URL(url);
1386
+ return new URL(urlObj.pathname + urlObj.search + urlObj.hash, formatter.baseUrl).href;
1387
+ }
1388
+ //#endregion
1389
+ export { finalize, getConfigFromOptions, readConfig, skip, upload };