@argos-ci/core 5.1.2 → 5.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1363 @@
1
+ import { createRequire } from "node:module";
2
+ import convict from "convict";
3
+ import { execSync } from "node:child_process";
4
+ import createDebug from "debug";
5
+ import { createReadStream, existsSync, readFileSync } from "node:fs";
6
+ import { createClient, throwAPIError } from "@argos-ci/api-client";
7
+ import { basename, extname, resolve } from "node:path";
8
+ import glob from "fast-glob";
9
+ import { promisify } from "node:util";
10
+ import sharp from "sharp";
11
+ import tmp from "tmp";
12
+ import { createHash } from "node:crypto";
13
+ import { readFile } from "node:fs/promises";
14
+ import { getPlaywrightTracePath, readMetadata, readVersionFromPackage } from "@argos-ci/util";
15
+ import mime from "mime-types";
16
+ //#region src/debug.ts
17
+ const KEY = "@argos-ci/core";
18
+ const debug = createDebug(KEY);
19
+ const isDebugEnabled = createDebug.enabled(KEY);
20
+ const debugTime = (arg) => {
21
+ if (isDebugEnabled) console.time(arg);
22
+ };
23
+ const debugTimeEnd = (arg) => {
24
+ if (isDebugEnabled) console.timeEnd(arg);
25
+ };
26
+ //#endregion
27
+ //#region src/ci-environment/git.ts
28
+ /**
29
+ * Check if the current directory is a git repository.
30
+ */
31
+ function checkIsGitRepository() {
32
+ try {
33
+ return execSync("git rev-parse --is-inside-work-tree").toString().trim() === "true";
34
+ } catch {
35
+ return false;
36
+ }
37
+ }
38
+ /**
39
+ * Returns the head commit.
40
+ */
41
+ function head() {
42
+ try {
43
+ return execSync("git rev-parse HEAD").toString().trim();
44
+ } catch {
45
+ return null;
46
+ }
47
+ }
48
+ /**
49
+ * Returns the current branch.
50
+ */
51
+ function branch() {
52
+ try {
53
+ const headRef = execSync("git rev-parse --abbrev-ref HEAD").toString().trim();
54
+ if (headRef === "HEAD") return null;
55
+ return headRef;
56
+ } catch {
57
+ return null;
58
+ }
59
+ }
60
+ /**
61
+ * Returns the repository URL.
62
+ */
63
+ function getRepositoryURL() {
64
+ try {
65
+ return execSync("git config --get remote.origin.url").toString().trim();
66
+ } catch {
67
+ return null;
68
+ }
69
+ }
70
+ /**
71
+ * Run git merge-base command.
72
+ */
73
+ function gitMergeBase(input) {
74
+ try {
75
+ return execSync(`git merge-base ${input.head} ${input.base}`).toString().trim();
76
+ } catch (error) {
77
+ if (checkIsExecError(error) && error.status === 1 && error.stderr.toString() === "") return null;
78
+ throw error;
79
+ }
80
+ }
81
+ /**
82
+ * Run git fetch with a specific ref and depth.
83
+ */
84
+ function gitFetch(input) {
85
+ execSync(`git fetch --force --update-head-ok --depth ${input.depth} origin ${input.ref}:${input.target}`);
86
+ }
87
+ /**
88
+ * Check if an error is an exec error that includes stderr.
89
+ */
90
+ function checkIsExecError(error) {
91
+ return error instanceof Error && "status" in error && typeof error.status === "number" && "stderr" in error && Buffer.isBuffer(error.stderr);
92
+ }
93
+ /**
94
+ * Get the merge base commit SHA.
95
+ * Fetch both base and head with depth and then run merge base.
96
+ * Try to find a merge base with a depth of 1000 max.
97
+ */
98
+ function getMergeBaseCommitSha$1(input) {
99
+ let depth = 200;
100
+ const argosBaseRef = `argos/${input.base}`;
101
+ const argosHeadRef = `argos/${input.head}`;
102
+ while (depth < 1e3) {
103
+ gitFetch({
104
+ ref: input.head,
105
+ depth,
106
+ target: argosHeadRef
107
+ });
108
+ gitFetch({
109
+ ref: input.base,
110
+ depth,
111
+ target: argosBaseRef
112
+ });
113
+ const mergeBase = gitMergeBase({
114
+ base: argosBaseRef,
115
+ head: argosHeadRef
116
+ });
117
+ if (mergeBase) return mergeBase;
118
+ depth += 200;
119
+ }
120
+ if (isDebugEnabled) {
121
+ const headShas = listShas(argosHeadRef);
122
+ const baseShas = listShas(argosBaseRef);
123
+ debug(`No merge base found for ${input.head} and ${input.base} with depth ${depth}`);
124
+ debug(`Found ${headShas.length} commits in ${input.head}: ${headShas.join(", ")}`);
125
+ debug(`Found ${baseShas.length} commits in ${input.base}: ${baseShas.join(", ")}`);
126
+ }
127
+ return null;
128
+ }
129
+ function listShas(path, maxCount) {
130
+ return execSync(`git log --format="%H" ${maxCount ? `--max-count=${maxCount}` : ""} ${path}`.trim()).toString().trim().split("\n");
131
+ }
132
+ function listParentCommits$1(input) {
133
+ const limit = 200;
134
+ try {
135
+ execSync(`git fetch --depth=${limit} origin ${input.sha}`);
136
+ } catch (error) {
137
+ if (error instanceof Error && error.message.includes("not our ref")) return [];
138
+ }
139
+ return listShas(input.sha, limit);
140
+ }
141
+ //#endregion
142
+ //#region src/ci-environment/services/bitrise.ts
143
+ function getPrNumber$2(context) {
144
+ const { env } = context;
145
+ return env.BITRISE_PULL_REQUEST ? Number(env.BITRISE_PULL_REQUEST) : null;
146
+ }
147
+ function getRepository$6(context) {
148
+ const { env } = context;
149
+ if (env.BITRISEIO_GIT_REPOSITORY_OWNER && env.BITRISEIO_GIT_REPOSITORY_SLUG) return `${env.BITRISEIO_GIT_REPOSITORY_OWNER}/${env.BITRISEIO_GIT_REPOSITORY_SLUG}`;
150
+ return null;
151
+ }
152
+ const service$7 = {
153
+ name: "Bitrise",
154
+ key: "bitrise",
155
+ detect: ({ env }) => Boolean(env.BITRISE_IO),
156
+ config: (context) => {
157
+ const { env } = context;
158
+ const repository = getRepository$6(context);
159
+ return {
160
+ commit: env.BITRISE_GIT_COMMIT || null,
161
+ branch: env.BITRISE_GIT_BRANCH || null,
162
+ repository,
163
+ originalRepository: repository,
164
+ jobId: null,
165
+ runId: null,
166
+ runAttempt: null,
167
+ prNumber: getPrNumber$2({ env }),
168
+ prHeadCommit: null,
169
+ prBaseBranch: null,
170
+ nonce: env.BITRISEIO_PIPELINE_ID || null,
171
+ mergeQueue: false
172
+ };
173
+ },
174
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
175
+ listParentCommits: listParentCommits$1
176
+ };
177
+ //#endregion
178
+ //#region src/util/url.ts
179
+ /**
180
+ * Utility functions for parsing Git remote URLs.
181
+ * Supports SSH, HTTPS, and git protocols.
182
+ */
183
+ function getRepositoryNameFromURL(url) {
184
+ const sshMatch = url.match(/^git@[^:]+:([^/]+)\/(.+?)(?:\.git)?$/);
185
+ if (sshMatch && sshMatch[1] && sshMatch[2]) return `${sshMatch[1]}/${sshMatch[2]}`;
186
+ const httpsMatch = url.match(/^(?:https?|git):\/\/[^/]+\/([^/]+)\/(.+?)(?:\.git)?$/);
187
+ if (httpsMatch && httpsMatch[1] && httpsMatch[2]) return `${httpsMatch[1]}/${httpsMatch[2]}`;
188
+ return null;
189
+ }
190
+ //#endregion
191
+ //#region src/ci-environment/services/buildkite.ts
192
+ function getRepository$5(context) {
193
+ const { env } = context;
194
+ if (env.BUILDKITE_REPO) return getRepositoryNameFromURL(env.BUILDKITE_REPO);
195
+ return null;
196
+ }
197
+ const service$6 = {
198
+ name: "Buildkite",
199
+ key: "buildkite",
200
+ detect: ({ env }) => Boolean(env.BUILDKITE),
201
+ config: (context) => {
202
+ const { env } = context;
203
+ const repository = getRepository$5(context);
204
+ return {
205
+ commit: env.BUILDKITE_COMMIT || head() || null,
206
+ branch: env.BUILDKITE_BRANCH || branch() || null,
207
+ repository,
208
+ originalRepository: repository,
209
+ jobId: null,
210
+ runId: null,
211
+ runAttempt: null,
212
+ prNumber: env.BUILDKITE_PULL_REQUEST ? Number(env.BUILDKITE_PULL_REQUEST) : null,
213
+ prHeadCommit: null,
214
+ prBaseBranch: null,
215
+ nonce: env.BUILDKITE_BUILD_ID || null,
216
+ mergeQueue: false
217
+ };
218
+ },
219
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
220
+ listParentCommits: listParentCommits$1
221
+ };
222
+ //#endregion
223
+ //#region src/ci-environment/services/heroku.ts
224
+ const service$5 = {
225
+ name: "Heroku",
226
+ key: "heroku",
227
+ detect: ({ env }) => Boolean(env.HEROKU_TEST_RUN_ID),
228
+ config: ({ env }) => ({
229
+ commit: env.HEROKU_TEST_RUN_COMMIT_VERSION || null,
230
+ branch: env.HEROKU_TEST_RUN_BRANCH || null,
231
+ owner: null,
232
+ repository: null,
233
+ originalRepository: null,
234
+ jobId: null,
235
+ runId: null,
236
+ runAttempt: null,
237
+ prNumber: null,
238
+ prHeadCommit: null,
239
+ prBaseBranch: null,
240
+ nonce: env.HEROKU_TEST_RUN_ID || null,
241
+ mergeQueue: false
242
+ }),
243
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
244
+ listParentCommits: listParentCommits$1
245
+ };
246
+ //#endregion
247
+ //#region src/ci-environment/github.ts
248
+ /**
249
+ * Get the full repository name (account/repo) from environment variable.
250
+ */
251
+ function getGitHubRepository(ctx) {
252
+ return ctx.env.GITHUB_REPOSITORY || null;
253
+ }
254
+ /**
255
+ * Get the full repository name (account/repo) from environment variable or throws.
256
+ */
257
+ function assertGitHubRepository(ctx) {
258
+ const repo = getGitHubRepository(ctx);
259
+ if (!repo) throw new Error("GITHUB_REPOSITORY is missing");
260
+ return repo;
261
+ }
262
+ /**
263
+ * Get a GitHub token from environment variables.
264
+ */
265
+ function getGitHubToken({ env }) {
266
+ if (!env.GITHUB_TOKEN) {
267
+ if (!env.DISABLE_GITHUB_TOKEN_WARNING) console.log(`
268
+ Argos couldn’t find a relevant pull request in the current environment.
269
+ To resolve this, Argos requires a GITHUB_TOKEN to fetch the pull request associated with the head SHA. Please ensure the following environment variable is added:
270
+
271
+ GITHUB_TOKEN: \${{ secrets.GITHUB_TOKEN }}
272
+
273
+ For more details, check out the documentation: Read more at https://argos-ci.com/docs/run-on-preview-deployment
274
+
275
+ If you want to disable this warning, you can set the following environment variable:
276
+
277
+ DISABLE_GITHUB_TOKEN_WARNING: true
278
+ `.trim());
279
+ return null;
280
+ }
281
+ return env.GITHUB_TOKEN;
282
+ }
283
+ /**
284
+ * Fetch GitHub API.
285
+ */
286
+ async function fetchGitHubAPI(ctx, url) {
287
+ const githubToken = getGitHubToken(ctx);
288
+ if (!githubToken) return null;
289
+ return await fetch(url, {
290
+ headers: {
291
+ Accept: "application/vnd.github+json",
292
+ Authorization: `Bearer ${githubToken}`,
293
+ "X-GitHub-Api-Version": "2022-11-28"
294
+ },
295
+ signal: AbortSignal.timeout(1e4)
296
+ });
297
+ }
298
+ const GITHUB_API_BASE_URL = "https://api.github.com";
299
+ /**
300
+ * Get a pull request from a head sha.
301
+ * Fetch the last 30 pull requests sorted by updated date
302
+ * then try to find the one that matches the head sha.
303
+ * If no pull request is found, return null.
304
+ */
305
+ async function getPullRequestFromHeadSha(ctx, sha) {
306
+ debug(`Fetching pull request details from head sha: ${sha}`);
307
+ const githubRepository = assertGitHubRepository(ctx);
308
+ const url = new URL(`/repos/${githubRepository}/pulls`, GITHUB_API_BASE_URL);
309
+ url.search = new URLSearchParams({
310
+ state: "open",
311
+ sort: "updated",
312
+ per_page: "30",
313
+ page: "1"
314
+ }).toString();
315
+ const response = await fetchGitHubAPI(ctx, url);
316
+ if (!response) return null;
317
+ if (!response.ok) throw new Error(`Non-OK response (status: ${response.status}) while fetching pull request details from head sha (${sha})`);
318
+ const result = await response.json();
319
+ if (result.length === 0) {
320
+ debug("No results, no pull request found");
321
+ return null;
322
+ }
323
+ const matchingPr = result.find((pr) => pr.head.sha === sha);
324
+ if (matchingPr) {
325
+ debug("Pull request found", matchingPr);
326
+ return matchingPr;
327
+ }
328
+ debug("No matching pull request found");
329
+ return null;
330
+ }
331
+ /**
332
+ * Get a pull request from a PR number.
333
+ */
334
+ async function getPullRequestFromPrNumber(ctx, prNumber) {
335
+ debug(`Fetching pull request #${prNumber}`);
336
+ const githubRepository = assertGitHubRepository(ctx);
337
+ const response = await fetchGitHubAPI(ctx, new URL(`/repos/${githubRepository}/pulls/${prNumber}`, GITHUB_API_BASE_URL));
338
+ if (!response) return null;
339
+ if (response.status === 404) {
340
+ debug("No pull request found, pr detection from branch was probably a mistake");
341
+ return null;
342
+ }
343
+ if (!response.ok) throw new Error(`Non-OK response (status: ${response.status}) while fetching pull request #${prNumber}`);
344
+ return await response.json();
345
+ }
346
+ /**
347
+ * Get the PR number from a merge group branch.
348
+ * Example: gh-readonly-queue/master/pr-1529-c1c25caabaade7a8ddc1178c449b872b5d3e51a4
349
+ */
350
+ function getPRNumberFromMergeGroupBranch(branch) {
351
+ const prMatch = /queue\/[^/]*\/pr-(\d+)-/.exec(branch);
352
+ if (prMatch) return Number(prMatch[1]);
353
+ return null;
354
+ }
355
+ //#endregion
356
+ //#region src/ci-environment/services/github-actions.ts
357
+ /**
358
+ * Read the event payload.
359
+ */
360
+ function readEventPayload({ env }) {
361
+ if (!env.GITHUB_EVENT_PATH) return null;
362
+ if (!existsSync(env.GITHUB_EVENT_PATH)) return null;
363
+ return JSON.parse(readFileSync(env.GITHUB_EVENT_PATH, "utf-8"));
364
+ }
365
+ /**
366
+ * Get a payload from a Vercel deployment "repository_dispatch"
367
+ * @see https://vercel.com/docs/git/vercel-for-github#repository-dispatch-events
368
+ */
369
+ function getVercelDeploymentPayload(payload) {
370
+ if (process.env.GITHUB_EVENT_NAME === "repository_dispatch" && payload && "action" in payload && payload.action === "vercel.deployment.success") return payload;
371
+ return null;
372
+ }
373
+ /**
374
+ * Get a merge group payload from a "merge_group" event.
375
+ */
376
+ function getMergeGroupPayload(payload) {
377
+ if (payload && process.env.GITHUB_EVENT_NAME === "merge_group" && "action" in payload && payload.action === "checks_requested") return payload;
378
+ return null;
379
+ }
380
+ /**
381
+ * Get the branch from the local context.
382
+ */
383
+ function getBranchFromContext(context) {
384
+ const { env } = context;
385
+ if (env.GITHUB_HEAD_REF) return env.GITHUB_HEAD_REF;
386
+ if (!env.GITHUB_REF) return null;
387
+ return /refs\/heads\/(.*)/.exec(env.GITHUB_REF)?.[1] ?? null;
388
+ }
389
+ /**
390
+ * Get the branch from the payload.
391
+ */
392
+ function getBranchFromPayload(payload) {
393
+ if ("workflow_run" in payload && payload.workflow_run) return payload.workflow_run.head_branch;
394
+ if ("deployment" in payload && payload.deployment) return payload.deployment.environment;
395
+ return null;
396
+ }
397
+ /**
398
+ * Get the branch.
399
+ */
400
+ function getBranch(args) {
401
+ const { payload, mergeGroupPayload, vercelPayload, pullRequest, context } = args;
402
+ if (mergeGroupPayload && pullRequest?.head.ref) return pullRequest.head.ref;
403
+ if (vercelPayload) return vercelPayload.client_payload.git.ref;
404
+ if (payload) {
405
+ const fromPayload = getBranchFromPayload(payload);
406
+ if (fromPayload) return fromPayload;
407
+ }
408
+ const fromContext = getBranchFromContext(context);
409
+ if (fromContext) return fromContext;
410
+ if (pullRequest) return pullRequest.head.ref;
411
+ return null;
412
+ }
413
+ /**
414
+ * Get the repository either from payload or from environment variables.
415
+ */
416
+ function getRepository$4(context, payload) {
417
+ if (payload && "pull_request" in payload && payload.pull_request) {
418
+ const pr = payload.pull_request;
419
+ if (pr.head && pr.head.repo && pr.head.repo.full_name) return pr.head.repo.full_name;
420
+ }
421
+ return getGitHubRepository(context);
422
+ }
423
+ /**
424
+ * Get the head sha.
425
+ */
426
+ function getSha(context, vercelPayload, payload) {
427
+ if (context.env.GITHUB_EVENT_NAME === "pull_request_target") {
428
+ if (!payload) throw new Error("Payload is missing in \"pull_request_target\" event");
429
+ const pullRequest = getPullRequestFromPayload(payload);
430
+ if (!pullRequest) throw new Error("Pull request missing in \"pull_request_target\" event");
431
+ return pullRequest.head.sha;
432
+ }
433
+ if (vercelPayload) return vercelPayload.client_payload.git.sha;
434
+ if (!context.env.GITHUB_SHA) throw new Error("GITHUB_SHA is missing");
435
+ return context.env.GITHUB_SHA;
436
+ }
437
+ /**
438
+ * Get the pull request from an event payload.
439
+ */
440
+ function getPullRequestFromPayload(payload) {
441
+ if ("pull_request" in payload && payload.pull_request && payload.pull_request) return payload.pull_request;
442
+ if ("workflow_run" in payload && payload.workflow_run && payload.workflow_run.pull_requests[0]) return payload.workflow_run.pull_requests[0];
443
+ if ("check_run" in payload && payload.check_run && "pull_requests" in payload.check_run && payload.check_run.pull_requests[0]) return payload.check_run.pull_requests[0];
444
+ return null;
445
+ }
446
+ /**
447
+ * Get the pull request either from payload or local fetching.
448
+ */
449
+ async function getPullRequest(args) {
450
+ const { payload, vercelPayload, mergeGroupPayload, context, sha } = args;
451
+ if (vercelPayload || !payload) return getPullRequestFromHeadSha(context, sha);
452
+ if (mergeGroupPayload) {
453
+ const prNumber = getPRNumberFromMergeGroupBranch(mergeGroupPayload.merge_group.head_ref);
454
+ if (!prNumber) {
455
+ debug(`No PR found from merge group head ref: ${mergeGroupPayload.merge_group.head_ref}`);
456
+ return null;
457
+ }
458
+ debug(`PR #${prNumber} found from merge group head ref (${mergeGroupPayload.merge_group.head_ref})`);
459
+ return getPullRequestFromPrNumber(context, prNumber);
460
+ }
461
+ return getPullRequestFromPayload(payload);
462
+ }
463
+ const service$4 = {
464
+ name: "GitHub Actions",
465
+ key: "github-actions",
466
+ detect: (context) => Boolean(context.env.GITHUB_ACTIONS),
467
+ config: async (context) => {
468
+ const { env } = context;
469
+ const payload = readEventPayload(context);
470
+ const vercelPayload = getVercelDeploymentPayload(payload);
471
+ const mergeGroupPayload = getMergeGroupPayload(payload);
472
+ const sha = getSha(context, vercelPayload, payload);
473
+ const pullRequest = await getPullRequest({
474
+ payload,
475
+ vercelPayload,
476
+ mergeGroupPayload,
477
+ sha,
478
+ context
479
+ });
480
+ const branch = getBranch({
481
+ payload,
482
+ vercelPayload,
483
+ mergeGroupPayload,
484
+ context,
485
+ pullRequest
486
+ });
487
+ return {
488
+ commit: sha,
489
+ repository: getRepository$4(context, payload),
490
+ originalRepository: getGitHubRepository(context),
491
+ jobId: env.GITHUB_JOB || null,
492
+ runId: env.GITHUB_RUN_ID || null,
493
+ runAttempt: env.GITHUB_RUN_ATTEMPT ? Number(env.GITHUB_RUN_ATTEMPT) : null,
494
+ nonce: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}`,
495
+ branch,
496
+ prNumber: pullRequest?.number || null,
497
+ prHeadCommit: pullRequest?.head.sha ?? null,
498
+ prBaseBranch: pullRequest?.base.ref ?? null,
499
+ mergeQueue: Boolean(mergeGroupPayload)
500
+ };
501
+ },
502
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
503
+ listParentCommits: listParentCommits$1
504
+ };
505
+ //#endregion
506
+ //#region src/ci-environment/services/circleci.ts
507
+ function getPrNumber$1(context) {
508
+ const { env } = context;
509
+ const matches = /pull\/(\d+)/.exec(env.CIRCLE_PULL_REQUEST || "");
510
+ if (matches) return Number(matches[1]);
511
+ return null;
512
+ }
513
+ function getRepository$3(context) {
514
+ const { env } = context;
515
+ if (env.CIRCLE_PR_REPONAME && env.CIRCLE_PR_USERNAME) return `${env.CIRCLE_PR_USERNAME}/${env.CIRCLE_PR_REPONAME}`;
516
+ return getOriginalRepository$2(context);
517
+ }
518
+ function getOriginalRepository$2(context) {
519
+ const { env } = context;
520
+ if (env.CIRCLE_PROJECT_USERNAME && env.CIRCLE_PROJECT_REPONAME) return `${env.CIRCLE_PROJECT_USERNAME}/${env.CIRCLE_PROJECT_REPONAME}`;
521
+ return null;
522
+ }
523
+ const service$3 = {
524
+ name: "CircleCI",
525
+ key: "circleci",
526
+ detect: ({ env }) => Boolean(env.CIRCLECI),
527
+ config: (context) => {
528
+ const { env } = context;
529
+ return {
530
+ commit: env.CIRCLE_SHA1 || null,
531
+ branch: env.CIRCLE_BRANCH || null,
532
+ repository: getRepository$3(context),
533
+ originalRepository: getOriginalRepository$2(context),
534
+ jobId: null,
535
+ runId: null,
536
+ runAttempt: null,
537
+ prNumber: getPrNumber$1({ env }),
538
+ prHeadCommit: null,
539
+ prBaseBranch: null,
540
+ nonce: env.CIRCLE_WORKFLOW_ID || env.CIRCLE_BUILD_NUM || null,
541
+ mergeQueue: false
542
+ };
543
+ },
544
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
545
+ listParentCommits: listParentCommits$1
546
+ };
547
+ //#endregion
548
+ //#region src/ci-environment/services/travis.ts
549
+ function getRepository$2(context) {
550
+ const { env } = context;
551
+ if (env.TRAVIS_PULL_REQUEST_SLUG) return env.TRAVIS_PULL_REQUEST_SLUG;
552
+ return getOriginalRepository$1(context);
553
+ }
554
+ function getOriginalRepository$1(context) {
555
+ const { env } = context;
556
+ return env.TRAVIS_REPO_SLUG || null;
557
+ }
558
+ function getPrNumber(context) {
559
+ const { env } = context;
560
+ if (env.TRAVIS_PULL_REQUEST) return Number(env.TRAVIS_PULL_REQUEST);
561
+ return null;
562
+ }
563
+ const service$2 = {
564
+ name: "Travis CI",
565
+ key: "travis",
566
+ detect: ({ env }) => Boolean(env.TRAVIS),
567
+ config: (ctx) => {
568
+ const { env } = ctx;
569
+ return {
570
+ commit: env.TRAVIS_COMMIT || null,
571
+ branch: env.TRAVIS_BRANCH || null,
572
+ repository: getRepository$2(ctx),
573
+ originalRepository: getOriginalRepository$1(ctx),
574
+ jobId: null,
575
+ runId: null,
576
+ runAttempt: null,
577
+ prNumber: getPrNumber(ctx),
578
+ prHeadCommit: null,
579
+ prBaseBranch: null,
580
+ nonce: env.TRAVIS_BUILD_ID || null,
581
+ mergeQueue: false
582
+ };
583
+ },
584
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
585
+ listParentCommits: listParentCommits$1
586
+ };
587
+ //#endregion
588
+ //#region src/ci-environment/services/gitlab.ts
589
+ function getRepository$1(context) {
590
+ const { env } = context;
591
+ if (env.CI_MERGE_REQUEST_PROJECT_PATH) return env.CI_MERGE_REQUEST_PROJECT_PATH;
592
+ return getOriginalRepository(context);
593
+ }
594
+ function getOriginalRepository(context) {
595
+ const { env } = context;
596
+ return env.CI_PROJECT_PATH || null;
597
+ }
598
+ const service$1 = {
599
+ name: "GitLab",
600
+ key: "gitlab",
601
+ detect: ({ env }) => env.GITLAB_CI === "true",
602
+ config: (context) => {
603
+ const { env } = context;
604
+ return {
605
+ commit: env.CI_COMMIT_SHA || null,
606
+ branch: env.CI_COMMIT_REF_NAME || null,
607
+ repository: getRepository$1(context),
608
+ originalRepository: getOriginalRepository(context),
609
+ jobId: null,
610
+ runId: null,
611
+ runAttempt: null,
612
+ prNumber: null,
613
+ prHeadCommit: null,
614
+ prBaseBranch: null,
615
+ nonce: env.CI_PIPELINE_ID || null,
616
+ mergeQueue: false
617
+ };
618
+ },
619
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
620
+ listParentCommits: listParentCommits$1
621
+ };
622
+ //#endregion
623
+ //#region src/ci-environment/services/git.ts
624
+ function getRepository() {
625
+ const repositoryURL = getRepositoryURL();
626
+ if (!repositoryURL) return null;
627
+ return getRepositoryNameFromURL(repositoryURL);
628
+ }
629
+ //#endregion
630
+ //#region src/ci-environment/index.ts
631
+ const services = [
632
+ service$5,
633
+ service$4,
634
+ service$3,
635
+ service$2,
636
+ service$6,
637
+ service$1,
638
+ service$7,
639
+ {
640
+ name: "Git",
641
+ key: "git",
642
+ detect: () => checkIsGitRepository(),
643
+ config: () => {
644
+ const repository = getRepository();
645
+ return {
646
+ commit: head() || null,
647
+ branch: branch() || null,
648
+ repository,
649
+ originalRepository: repository,
650
+ jobId: null,
651
+ runId: null,
652
+ runAttempt: null,
653
+ prNumber: null,
654
+ prHeadCommit: null,
655
+ prBaseBranch: null,
656
+ nonce: null,
657
+ mergeQueue: false
658
+ };
659
+ },
660
+ getMergeBaseCommitSha: getMergeBaseCommitSha$1,
661
+ listParentCommits: listParentCommits$1
662
+ }
663
+ ];
664
+ /**
665
+ * Create the context for the CI service detection.
666
+ */
667
+ function createContext() {
668
+ return { env: process.env };
669
+ }
670
+ /**
671
+ * Get the CI service that is currently running.
672
+ */
673
+ function getCiService(context) {
674
+ return services.find((service) => service.detect(context));
675
+ }
676
+ /**
677
+ * Get the merge base commit.
678
+ */
679
+ function getMergeBaseCommitSha(input) {
680
+ const context = createContext();
681
+ const service = getCiService(context);
682
+ if (!service) return null;
683
+ return service.getMergeBaseCommitSha(input, context);
684
+ }
685
+ /**
686
+ * Get the merge base commit.
687
+ */
688
+ function listParentCommits(input) {
689
+ const context = createContext();
690
+ const service = getCiService(context);
691
+ if (!service) return null;
692
+ return service.listParentCommits(input, context);
693
+ }
694
+ /**
695
+ * Get the CI environment.
696
+ */
697
+ async function getCiEnvironment() {
698
+ const context = createContext();
699
+ debug("Detecting CI environment", context);
700
+ const service = getCiService(context);
701
+ if (service) {
702
+ debug("Internal service matched", service.name);
703
+ const variables = await service.config(context);
704
+ const ciEnvironment = {
705
+ name: service.name,
706
+ key: service.key,
707
+ ...variables
708
+ };
709
+ debug("CI environment", ciEnvironment);
710
+ return ciEnvironment;
711
+ }
712
+ return null;
713
+ }
714
+ //#endregion
715
+ //#region src/config.ts
716
+ const mustBeApiBaseUrl = (value) => {
717
+ if (!/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/.test(value)) throw new Error("Invalid Argos API base URL");
718
+ };
719
+ const mustBeCommit = (value) => {
720
+ if (!/^[0-9a-f]{40}$/.test(value)) {
721
+ if (/^[0-9a-f]{7}$/.test(value)) throw new Error("Short SHA1 is not allowed");
722
+ throw new Error("Invalid commit");
723
+ }
724
+ };
725
+ const mustBeArgosToken = (value) => {
726
+ if (value && value.length !== 40) throw new Error("Invalid Argos repository token (must be 40 characters)");
727
+ };
728
+ const minInteger = (min) => (value) => {
729
+ if (!Number.isInteger(value)) throw new Error("must be an integer");
730
+ if (value < min) throw new Error(`must be at least ${min}`);
731
+ };
732
+ const toInt = (value) => {
733
+ if (value === "") return null;
734
+ const num = Number(value);
735
+ if (!Number.isInteger(num) || Number.isNaN(num)) return num;
736
+ return num;
737
+ };
738
+ const toFloat = (value) => parseFloat(value);
739
+ convict.addFormat({
740
+ name: "parallel-total",
741
+ validate: minInteger(-1),
742
+ coerce: toInt
743
+ });
744
+ convict.addFormat({
745
+ name: "parallel-index",
746
+ validate: minInteger(1),
747
+ coerce: toInt
748
+ });
749
+ convict.addFormat({
750
+ name: "float-percent",
751
+ validate: (val) => {
752
+ if (val !== 0 && (!val || val > 1 || val < 0)) throw new Error("Must be a float between 0 and 1, inclusive.");
753
+ },
754
+ coerce: toFloat
755
+ });
756
+ const schema = {
757
+ apiBaseUrl: {
758
+ env: "ARGOS_API_BASE_URL",
759
+ default: "https://api.argos-ci.com/v2/",
760
+ format: mustBeApiBaseUrl
761
+ },
762
+ commit: {
763
+ env: "ARGOS_COMMIT",
764
+ default: null,
765
+ format: mustBeCommit
766
+ },
767
+ branch: {
768
+ env: "ARGOS_BRANCH",
769
+ default: null,
770
+ format: String
771
+ },
772
+ token: {
773
+ env: "ARGOS_TOKEN",
774
+ default: null,
775
+ format: mustBeArgosToken
776
+ },
777
+ buildName: {
778
+ env: "ARGOS_BUILD_NAME",
779
+ default: null,
780
+ format: String,
781
+ nullable: true
782
+ },
783
+ mode: {
784
+ env: "ARGOS_MODE",
785
+ format: ["ci", "monitoring"],
786
+ default: null,
787
+ nullable: true
788
+ },
789
+ prNumber: {
790
+ env: "ARGOS_PR_NUMBER",
791
+ format: Number,
792
+ default: null,
793
+ nullable: true
794
+ },
795
+ prHeadCommit: {
796
+ env: "ARGOS_PR_HEAD_COMMIT",
797
+ format: String,
798
+ default: null,
799
+ nullable: true
800
+ },
801
+ prBaseBranch: {
802
+ env: "ARGOS_PR_BASE_BRANCH",
803
+ format: String,
804
+ default: null,
805
+ nullable: true
806
+ },
807
+ parallel: {
808
+ env: "ARGOS_PARALLEL",
809
+ default: false,
810
+ format: Boolean
811
+ },
812
+ parallelNonce: {
813
+ env: "ARGOS_PARALLEL_NONCE",
814
+ format: String,
815
+ default: null,
816
+ nullable: true
817
+ },
818
+ parallelIndex: {
819
+ env: "ARGOS_PARALLEL_INDEX",
820
+ format: "parallel-index",
821
+ default: null,
822
+ nullable: true
823
+ },
824
+ parallelTotal: {
825
+ env: "ARGOS_PARALLEL_TOTAL",
826
+ format: "parallel-total",
827
+ default: null,
828
+ nullable: true
829
+ },
830
+ referenceBranch: {
831
+ env: "ARGOS_REFERENCE_BRANCH",
832
+ format: String,
833
+ default: null,
834
+ nullable: true
835
+ },
836
+ referenceCommit: {
837
+ env: "ARGOS_REFERENCE_COMMIT",
838
+ format: String,
839
+ default: null,
840
+ nullable: true
841
+ },
842
+ jobId: {
843
+ format: String,
844
+ default: null,
845
+ nullable: true
846
+ },
847
+ runId: {
848
+ format: String,
849
+ default: null,
850
+ nullable: true
851
+ },
852
+ runAttempt: {
853
+ format: "nat",
854
+ default: null,
855
+ nullable: true
856
+ },
857
+ repository: {
858
+ format: String,
859
+ default: null,
860
+ nullable: true
861
+ },
862
+ originalRepository: {
863
+ format: String,
864
+ default: null,
865
+ nullable: true
866
+ },
867
+ ciProvider: {
868
+ format: String,
869
+ default: null,
870
+ nullable: true
871
+ },
872
+ threshold: {
873
+ env: "ARGOS_THRESHOLD",
874
+ format: "float-percent",
875
+ default: null,
876
+ nullable: true
877
+ },
878
+ previewBaseUrl: {
879
+ env: "ARGOS_PREVIEW_BASE_URL",
880
+ format: String,
881
+ default: null,
882
+ nullable: true
883
+ },
884
+ skipped: {
885
+ env: "ARGOS_SKIPPED",
886
+ format: Boolean,
887
+ default: false
888
+ },
889
+ mergeQueue: {
890
+ format: Boolean,
891
+ default: false
892
+ },
893
+ subset: {
894
+ env: "ARGOS_SUBSET",
895
+ format: Boolean,
896
+ default: false
897
+ }
898
+ };
899
+ function createConfig() {
900
+ return convict(schema, {
901
+ args: [],
902
+ env: {}
903
+ });
904
+ }
905
+ function getDefaultConfig() {
906
+ return Object.entries(schema).reduce((cfg, [key, entry]) => {
907
+ cfg[key] = "env" in entry && entry.env && process.env[entry.env] ? process.env[entry.env] : entry.default;
908
+ return cfg;
909
+ }, {});
910
+ }
911
+ async function readConfig(options = {}) {
912
+ const config = createConfig();
913
+ const ciEnv = await getCiEnvironment();
914
+ const defaultConfig = getDefaultConfig();
915
+ config.load({
916
+ apiBaseUrl: options.apiBaseUrl || defaultConfig.apiBaseUrl,
917
+ commit: options.commit || defaultConfig.commit || ciEnv?.commit || null,
918
+ branch: options.branch || defaultConfig.branch || ciEnv?.branch || null,
919
+ token: options.token || defaultConfig.token || null,
920
+ buildName: options.buildName || defaultConfig.buildName || null,
921
+ prNumber: options.prNumber || defaultConfig.prNumber || ciEnv?.prNumber || null,
922
+ prHeadCommit: defaultConfig.prHeadCommit || ciEnv?.prHeadCommit || null,
923
+ prBaseBranch: defaultConfig.prBaseBranch || ciEnv?.prBaseBranch || null,
924
+ referenceBranch: options.referenceBranch || defaultConfig.referenceBranch || null,
925
+ referenceCommit: options.referenceCommit || defaultConfig.referenceCommit || null,
926
+ repository: ciEnv?.repository || null,
927
+ originalRepository: ciEnv?.originalRepository || null,
928
+ jobId: ciEnv?.jobId || null,
929
+ runId: ciEnv?.runId || null,
930
+ runAttempt: ciEnv?.runAttempt || null,
931
+ parallel: options.parallel ?? defaultConfig.parallel ?? false,
932
+ parallelNonce: options.parallelNonce || defaultConfig.parallelNonce || ciEnv?.nonce || null,
933
+ parallelTotal: options.parallelTotal ?? defaultConfig.parallelTotal ?? null,
934
+ parallelIndex: options.parallelIndex ?? defaultConfig.parallelIndex ?? null,
935
+ mode: options.mode || defaultConfig.mode || null,
936
+ ciProvider: ciEnv?.key || null,
937
+ previewBaseUrl: defaultConfig.previewBaseUrl || null,
938
+ skipped: options.skipped ?? defaultConfig.skipped ?? false,
939
+ subset: options.subset ?? defaultConfig.subset ?? false,
940
+ mergeQueue: ciEnv?.mergeQueue ?? false
941
+ });
942
+ if (!config.get("branch") || !config.get("commit")) throw new Error("Argos requires a branch and a commit to be set. If you are running in a non-git environment consider setting ARGOS_BRANCH and ARGOS_COMMIT environment variables.");
943
+ config.validate();
944
+ return config.get();
945
+ }
946
+ async function getConfigFromOptions({ parallel, ...options }) {
947
+ return readConfig({
948
+ ...options,
949
+ parallel: parallel !== void 0 ? Boolean(parallel) : void 0,
950
+ parallelNonce: parallel ? parallel.nonce : void 0,
951
+ parallelTotal: parallel ? parallel.total : void 0,
952
+ parallelIndex: parallel ? parallel.index : void 0
953
+ });
954
+ }
955
+ //#endregion
956
+ //#region src/auth.ts
957
+ const base64Encode = (obj) => Buffer.from(JSON.stringify(obj), "utf8").toString("base64");
958
+ /**
959
+ * Get the authentication token.
960
+ */
961
+ function getAuthToken(args) {
962
+ const { token, ciProvider, originalRepository: repository, jobId, runId, prNumber } = args;
963
+ if (token) return token;
964
+ switch (ciProvider) {
965
+ case "github-actions": {
966
+ if (!repository || !jobId || !runId) throw new Error(`Automatic GitHub Actions variables detection failed. Please add the 'ARGOS_TOKEN'`);
967
+ const [owner, repo] = repository.split("/");
968
+ return `tokenless-github-${base64Encode({
969
+ owner,
970
+ repository: repo,
971
+ jobId,
972
+ runId,
973
+ prNumber: prNumber ?? void 0
974
+ })}`;
975
+ }
976
+ default: throw new Error("Missing Argos repository token 'ARGOS_TOKEN'");
977
+ }
978
+ }
979
+ //#endregion
980
+ //#region src/finalize.ts
981
+ /**
982
+ * Finalize pending builds.
983
+ */
984
+ async function finalize(params) {
985
+ const config = await readConfig({ parallelNonce: params.parallel?.nonce });
986
+ const authToken = getAuthToken(config);
987
+ const apiClient = createClient({
988
+ baseUrl: config.apiBaseUrl,
989
+ authToken
990
+ });
991
+ if (!config.parallelNonce) throw new Error("parallel.nonce is required to finalize the build");
992
+ const finalizeBuildsResult = await apiClient.POST("/builds/finalize", { body: { parallelNonce: config.parallelNonce } });
993
+ if (finalizeBuildsResult.error) throwAPIError(finalizeBuildsResult.error);
994
+ return finalizeBuildsResult.data;
995
+ }
996
+ //#endregion
997
+ //#region src/discovery.ts
998
+ /**
999
+ * Discover snapshots in the given root directory matching the provided patterns.
1000
+ */
1001
+ async function discoverSnapshots(patterns, { root = process.cwd(), ignore } = {}) {
1002
+ debug(`Discovering snapshots with patterns: ${Array.isArray(patterns) ? patterns.join(", ") : patterns} in ${root}`);
1003
+ return (await glob(patterns, {
1004
+ onlyFiles: true,
1005
+ ignore,
1006
+ cwd: root
1007
+ })).map((match) => {
1008
+ debug(`Found screenshot: ${match}`);
1009
+ return {
1010
+ name: match,
1011
+ path: resolve(root, match)
1012
+ };
1013
+ });
1014
+ }
1015
+ /**
1016
+ * Check if the given filename corresponds to an Argos image.
1017
+ */
1018
+ function checkIsValidImageFile(filename) {
1019
+ const lowerFilename = extname(filename).toLowerCase();
1020
+ return lowerFilename === ".png" || lowerFilename === ".jpg" || lowerFilename === ".jpeg";
1021
+ }
1022
+ //#endregion
1023
+ //#region src/optimize.ts
1024
+ const tmpFile = promisify(tmp.file);
1025
+ /**
1026
+ * Maximum number of pixels allowed in a screenshot.
1027
+ */
1028
+ const MAX_PIXELS = 8e7;
1029
+ /**
1030
+ * Default maximum width of a screenshot.
1031
+ * Used when the width or height of the image is not available.
1032
+ */
1033
+ const DEFAULT_MAX_WIDTH = 2048;
1034
+ async function optimizeScreenshot(filepath) {
1035
+ if (!checkIsValidImageFile(filepath)) return filepath;
1036
+ try {
1037
+ const [resultFilePath, metadata] = await Promise.all([tmpFile(), sharp(filepath).metadata()]);
1038
+ const { width, height } = metadata;
1039
+ const maxDimensions = (() => {
1040
+ if (!width || !height) return {
1041
+ width: DEFAULT_MAX_WIDTH,
1042
+ height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
1043
+ };
1044
+ const nbPixels = width * height;
1045
+ if (nbPixels <= MAX_PIXELS) return null;
1046
+ if (width < height) return {
1047
+ width: DEFAULT_MAX_WIDTH,
1048
+ height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
1049
+ };
1050
+ const scaleFactor = Math.sqrt(MAX_PIXELS / nbPixels);
1051
+ return {
1052
+ width: Math.floor(width * scaleFactor),
1053
+ height: Math.floor(height * scaleFactor)
1054
+ };
1055
+ })();
1056
+ let operation = sharp(filepath);
1057
+ if (maxDimensions) operation = operation.resize(maxDimensions.width, maxDimensions.height, {
1058
+ fit: "inside",
1059
+ withoutEnlargement: true
1060
+ });
1061
+ await operation.png({ force: true }).toFile(resultFilePath);
1062
+ if (width && height && maxDimensions) {
1063
+ const { width: maxWidth, height: maxHeight } = maxDimensions;
1064
+ const widthRatio = maxWidth / width;
1065
+ const heightRatio = maxHeight / height;
1066
+ const scaleFactor = Math.min(widthRatio, heightRatio);
1067
+ const newWidth = Math.floor(width * scaleFactor);
1068
+ const newHeight = Math.floor(height * scaleFactor);
1069
+ console.warn(`Image ${basename(filepath)} resized from ${width}x${height} to ${newWidth}x${newHeight}.`);
1070
+ }
1071
+ return resultFilePath;
1072
+ } catch (error) {
1073
+ const message = error instanceof Error ? error.message : "Unknown Error";
1074
+ throw new Error(`Error while processing image (${filepath}): ${message}`, { cause: error });
1075
+ }
1076
+ }
1077
+ //#endregion
1078
+ //#region src/hashing.ts
1079
+ const hashFile = async (filepath) => {
1080
+ const fileStream = createReadStream(filepath);
1081
+ const hash = createHash("sha256");
1082
+ await new Promise((resolve, reject) => {
1083
+ fileStream.on("error", reject);
1084
+ hash.on("error", reject);
1085
+ hash.on("finish", resolve);
1086
+ fileStream.pipe(hash);
1087
+ });
1088
+ return hash.digest("hex");
1089
+ };
1090
+ //#endregion
1091
+ //#region src/s3.ts
1092
+ async function uploadFile(input) {
1093
+ const file = await readFile(input.path);
1094
+ const response = await fetch(input.url, {
1095
+ method: "PUT",
1096
+ headers: {
1097
+ "Content-Type": input.contentType,
1098
+ "Content-Length": file.length.toString()
1099
+ },
1100
+ signal: AbortSignal.timeout(3e4),
1101
+ body: new Uint8Array(file)
1102
+ });
1103
+ if (!response.ok) throw new Error(`Failed to upload file to ${input.url}: ${response.status} ${response.statusText}`);
1104
+ }
1105
+ //#endregion
1106
+ //#region src/util/chunk.ts
1107
+ /**
1108
+ * Split an array into chunks of a given size.
1109
+ */
1110
+ const chunk = (collection, size) => {
1111
+ const result = [];
1112
+ for (let x = 0; x < Math.ceil(collection.length / size); x++) {
1113
+ const start = x * size;
1114
+ const end = start + size;
1115
+ result.push(collection.slice(start, end));
1116
+ }
1117
+ return result;
1118
+ };
1119
+ //#endregion
1120
+ //#region src/version.ts
1121
+ const require = createRequire(import.meta.url);
1122
+ /**
1123
+ * Get the version of the @argos-ci/core package.
1124
+ */
1125
+ async function getArgosCoreSDKIdentifier() {
1126
+ return `@argos-ci/core@${await readVersionFromPackage(require.resolve("@argos-ci/core/package.json"))}`;
1127
+ }
1128
+ //#endregion
1129
+ //#region src/mime-type.ts
1130
+ /**
1131
+ * Get the mime type of a snapshot file based on its extension.
1132
+ */
1133
+ function getSnapshotMimeType(filepath) {
1134
+ const type = mime.lookup(filepath);
1135
+ if (!type) throw new Error(`Unable to determine snapshot file type for: ${filepath}`);
1136
+ return type;
1137
+ }
1138
+ //#endregion
1139
+ //#region src/skip.ts
1140
+ /**
1141
+ * Mark a build as skipped.
1142
+ */
1143
+ async function skip(params) {
1144
+ const [config, argosSdk] = await Promise.all([getConfigFromOptions(params), getArgosCoreSDKIdentifier()]);
1145
+ const authToken = getAuthToken(config);
1146
+ const createBuildResponse = await createClient({
1147
+ baseUrl: config.apiBaseUrl,
1148
+ authToken
1149
+ }).POST("/builds", { body: {
1150
+ commit: config.commit,
1151
+ branch: config.branch,
1152
+ name: config.buildName,
1153
+ mode: config.mode,
1154
+ prNumber: config.prNumber,
1155
+ prHeadCommit: config.prHeadCommit,
1156
+ referenceBranch: config.referenceBranch,
1157
+ referenceCommit: config.referenceCommit,
1158
+ argosSdk,
1159
+ ciProvider: config.ciProvider,
1160
+ runId: config.runId,
1161
+ runAttempt: config.runAttempt,
1162
+ skipped: true,
1163
+ screenshotKeys: [],
1164
+ pwTraceKeys: [],
1165
+ parentCommits: []
1166
+ } });
1167
+ if (createBuildResponse.error) throwAPIError(createBuildResponse.error);
1168
+ return { build: createBuildResponse.data.build };
1169
+ }
1170
+ //#endregion
1171
+ //#region src/upload.ts
1172
+ /**
1173
+ * Size of the chunks used to upload screenshots to Argos.
1174
+ */
1175
+ const CHUNK_SIZE = 10;
1176
+ /**
1177
+ * Upload screenshots to Argos.
1178
+ */
1179
+ async function upload(params) {
1180
+ debug("Starting upload with params", params);
1181
+ const [config, argosSdk] = await Promise.all([getConfigFromOptions(params), getArgosCoreSDKIdentifier()]);
1182
+ const authToken = getAuthToken(config);
1183
+ const apiClient = createClient({
1184
+ baseUrl: config.apiBaseUrl,
1185
+ authToken
1186
+ });
1187
+ if (config.skipped) {
1188
+ const { build } = await skip(params);
1189
+ return {
1190
+ build,
1191
+ screenshots: []
1192
+ };
1193
+ }
1194
+ const previewUrlFormatter = params.previewUrl ?? (config.previewBaseUrl ? { baseUrl: config.previewBaseUrl } : void 0);
1195
+ const globs = params.files ?? ["**/*.{png,jpg,jpeg}"];
1196
+ debug("Using config and files", config, globs);
1197
+ const files = await discoverSnapshots(globs, {
1198
+ root: params.root,
1199
+ ignore: params.ignore
1200
+ });
1201
+ debug("Found snapshots", files);
1202
+ const snapshots = await Promise.all(files.map(async (snapshot) => {
1203
+ const contentType = getSnapshotMimeType(snapshot.path);
1204
+ const [metadata, pwTracePath, optimizedPath] = await Promise.all([
1205
+ readMetadata(snapshot.path),
1206
+ getPlaywrightTracePath(snapshot.path),
1207
+ contentType.startsWith("image/") ? optimizeScreenshot(snapshot.path) : snapshot.path
1208
+ ]);
1209
+ const [hash, pwTraceHash] = await Promise.all([hashFile(optimizedPath), pwTracePath ? hashFile(pwTracePath) : null]);
1210
+ const threshold = metadata?.transient?.threshold ?? null;
1211
+ const baseName = metadata?.transient?.baseName ?? null;
1212
+ const parentName = metadata?.transient?.parentName ?? null;
1213
+ if (metadata) {
1214
+ delete metadata.transient;
1215
+ if (metadata.url && previewUrlFormatter) metadata.previewUrl = formatPreviewUrl(metadata.url, previewUrlFormatter);
1216
+ }
1217
+ return {
1218
+ ...snapshot,
1219
+ hash,
1220
+ optimizedPath,
1221
+ metadata,
1222
+ threshold,
1223
+ baseName,
1224
+ parentName,
1225
+ pwTrace: pwTracePath && pwTraceHash ? {
1226
+ path: pwTracePath,
1227
+ hash: pwTraceHash
1228
+ } : null,
1229
+ contentType
1230
+ };
1231
+ }));
1232
+ debug("Fetch project");
1233
+ const projectResponse = await apiClient.GET("/project");
1234
+ if (projectResponse.error) throwAPIError(projectResponse.error);
1235
+ debug("Project fetched", projectResponse.data);
1236
+ const { defaultBaseBranch, hasRemoteContentAccess } = projectResponse.data;
1237
+ const referenceCommit = (() => {
1238
+ if (config.referenceCommit) {
1239
+ debug("Found reference commit in config", config.referenceCommit);
1240
+ return config.referenceCommit;
1241
+ }
1242
+ if (hasRemoteContentAccess) return null;
1243
+ const sha = getMergeBaseCommitSha({
1244
+ base: config.referenceBranch || config.prBaseBranch || defaultBaseBranch,
1245
+ head: config.branch
1246
+ });
1247
+ if (sha) debug("Found merge base", sha);
1248
+ else debug("No merge base found");
1249
+ return sha;
1250
+ })();
1251
+ const parentCommits = (() => {
1252
+ if (hasRemoteContentAccess) return null;
1253
+ if (referenceCommit) {
1254
+ const commits = listParentCommits({ sha: referenceCommit });
1255
+ if (commits) debug("Found parent commits", commits);
1256
+ else debug("No parent commits found");
1257
+ return commits;
1258
+ }
1259
+ return null;
1260
+ })();
1261
+ debug("Creating build");
1262
+ const [pwTraceKeys, snapshotKeys] = snapshots.reduce(([pwTraceKeys, snapshotKeys], snapshot) => {
1263
+ if (snapshot.pwTrace && !pwTraceKeys.includes(snapshot.pwTrace.hash)) pwTraceKeys.push(snapshot.pwTrace.hash);
1264
+ if (!snapshotKeys.includes(snapshot.hash)) snapshotKeys.push(snapshot.hash);
1265
+ return [pwTraceKeys, snapshotKeys];
1266
+ }, [[], []]);
1267
+ const createBuildResponse = await apiClient.POST("/builds", { body: {
1268
+ commit: config.commit,
1269
+ branch: config.branch,
1270
+ name: config.buildName,
1271
+ mode: config.mode,
1272
+ parallel: config.parallel,
1273
+ parallelNonce: config.parallelNonce,
1274
+ screenshotKeys: snapshotKeys,
1275
+ pwTraceKeys,
1276
+ prNumber: config.prNumber,
1277
+ prHeadCommit: config.prHeadCommit,
1278
+ referenceBranch: config.referenceBranch,
1279
+ referenceCommit,
1280
+ parentCommits,
1281
+ argosSdk,
1282
+ ciProvider: config.ciProvider,
1283
+ runId: config.runId,
1284
+ runAttempt: config.runAttempt,
1285
+ mergeQueue: config.mergeQueue,
1286
+ subset: config.subset
1287
+ } });
1288
+ if (createBuildResponse.error) throwAPIError(createBuildResponse.error);
1289
+ const result = createBuildResponse.data;
1290
+ debug("Got uploads url", result);
1291
+ await uploadFilesToS3([...result.screenshots.map(({ key, putUrl }) => {
1292
+ const snapshot = snapshots.find((s) => s.hash === key);
1293
+ if (!snapshot) throw new Error(`Invariant: snapshot with hash ${key} not found`);
1294
+ return {
1295
+ url: putUrl,
1296
+ path: snapshot.optimizedPath,
1297
+ contentType: snapshot.contentType
1298
+ };
1299
+ }), ...result.pwTraces?.map(({ key, putUrl }) => {
1300
+ const snapshot = snapshots.find((s) => s.pwTrace && s.pwTrace.hash === key);
1301
+ if (!snapshot || !snapshot.pwTrace) throw new Error(`Invariant: trace with ${key} not found`);
1302
+ return {
1303
+ url: putUrl,
1304
+ path: snapshot.pwTrace.path,
1305
+ contentType: "application/json"
1306
+ };
1307
+ }) ?? []]);
1308
+ debug("Updating build");
1309
+ const uploadBuildResponse = await apiClient.PUT("/builds/{buildId}", {
1310
+ params: { path: { buildId: result.build.id } },
1311
+ body: {
1312
+ screenshots: snapshots.map((snapshot) => ({
1313
+ key: snapshot.hash,
1314
+ name: snapshot.name,
1315
+ metadata: snapshot.metadata,
1316
+ pwTraceKey: snapshot.pwTrace?.hash ?? null,
1317
+ threshold: snapshot.threshold ?? config?.threshold ?? null,
1318
+ baseName: snapshot.baseName,
1319
+ parentName: snapshot.parentName,
1320
+ contentType: snapshot.contentType
1321
+ })),
1322
+ parallel: config.parallel,
1323
+ parallelTotal: config.parallelTotal,
1324
+ parallelIndex: config.parallelIndex,
1325
+ metadata: params.metadata
1326
+ }
1327
+ });
1328
+ if (uploadBuildResponse.error) throwAPIError(uploadBuildResponse.error);
1329
+ return {
1330
+ build: uploadBuildResponse.data.build,
1331
+ screenshots: snapshots
1332
+ };
1333
+ }
1334
+ async function uploadFilesToS3(files) {
1335
+ debug(`Split files in chunks of ${CHUNK_SIZE}`);
1336
+ const chunks = chunk(files, CHUNK_SIZE);
1337
+ debug(`Starting upload of ${chunks.length} chunks`);
1338
+ for (let i = 0; i < chunks.length; i++) {
1339
+ debug(`Uploading chunk ${i + 1}/${chunks.length}`);
1340
+ const timeLabel = `Chunk ${i + 1}/${chunks.length}`;
1341
+ debugTime(timeLabel);
1342
+ const chunk = chunks[i];
1343
+ if (!chunk) throw new Error(`Invariant: chunk ${i} is empty`);
1344
+ await Promise.all(chunk.map(async ({ url, path, contentType }) => {
1345
+ await uploadFile({
1346
+ url,
1347
+ path,
1348
+ contentType
1349
+ });
1350
+ }));
1351
+ debugTimeEnd(timeLabel);
1352
+ }
1353
+ }
1354
+ /**
1355
+ * Format the preview URL.
1356
+ */
1357
+ function formatPreviewUrl(url, formatter) {
1358
+ if (typeof formatter === "function") return formatter(url);
1359
+ const urlObj = new URL(url);
1360
+ return new URL(urlObj.pathname + urlObj.search + urlObj.hash, formatter.baseUrl).href;
1361
+ }
1362
+ //#endregion
1363
+ export { finalize, getConfigFromOptions, readConfig, skip, upload };