@argos-ci/core 2.9.2 → 2.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +11 -12
- package/dist/index.d.ts +140 -23
- package/dist/index.js +1032 -0
- package/package.json +11 -12
- package/dist/index.mjs +0 -1039
package/dist/index.mjs
DELETED
|
@@ -1,1039 +0,0 @@
|
|
|
1
|
-
import { createClient, throwAPIError } from '@argos-ci/api-client';
|
|
2
|
-
import convict from 'convict';
|
|
3
|
-
import { execSync } from 'node:child_process';
|
|
4
|
-
import { existsSync, readFileSync, createReadStream } from 'node:fs';
|
|
5
|
-
import axios from 'axios';
|
|
6
|
-
import createDebug from 'debug';
|
|
7
|
-
import { resolve } from 'node:path';
|
|
8
|
-
import glob from 'fast-glob';
|
|
9
|
-
import { promisify } from 'node:util';
|
|
10
|
-
import sharp from 'sharp';
|
|
11
|
-
import tmp from 'tmp';
|
|
12
|
-
import { createHash } from 'node:crypto';
|
|
13
|
-
import { readFile } from 'node:fs/promises';
|
|
14
|
-
import { readVersionFromPackage, readMetadata, getPlaywrightTracePath } from '@argos-ci/util';
|
|
15
|
-
import { createRequire } from 'node:module';
|
|
16
|
-
|
|
17
|
-
/**
|
|
18
|
-
* Check if the current directory is a git repository.
|
|
19
|
-
*/ function checkIsGitRepository() {
|
|
20
|
-
try {
|
|
21
|
-
return execSync("git rev-parse --is-inside-work-tree").toString().trim() === "true";
|
|
22
|
-
} catch {
|
|
23
|
-
return false;
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
/**
|
|
27
|
-
* Returns the head commit.
|
|
28
|
-
*/ function head() {
|
|
29
|
-
try {
|
|
30
|
-
return execSync("git rev-parse HEAD").toString().trim();
|
|
31
|
-
} catch {
|
|
32
|
-
return null;
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
/**
|
|
36
|
-
* Returns the current branch.
|
|
37
|
-
*/ function branch() {
|
|
38
|
-
try {
|
|
39
|
-
const headRef = execSync("git rev-parse --abbrev-ref HEAD").toString().trim();
|
|
40
|
-
if (headRef === "HEAD") {
|
|
41
|
-
return null;
|
|
42
|
-
}
|
|
43
|
-
return headRef;
|
|
44
|
-
} catch {
|
|
45
|
-
return null;
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
function getMergeBaseCommitShaWithDepth(input) {
|
|
49
|
-
try {
|
|
50
|
-
execSync(`git fetch --update-head-ok --depth ${input.depth} origin ${input.head}:${input.head}`);
|
|
51
|
-
execSync(`git fetch --update-head-ok --depth ${input.depth} origin ${input.base}:${input.base}`);
|
|
52
|
-
const mergeBase = execSync(`git merge-base ${input.head} ${input.base}`).toString().trim();
|
|
53
|
-
return mergeBase || null;
|
|
54
|
-
} catch {
|
|
55
|
-
return null;
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
function getMergeBaseCommitSha$1(input) {
|
|
59
|
-
let depth = 200;
|
|
60
|
-
while(depth < 1000){
|
|
61
|
-
const mergeBase = getMergeBaseCommitShaWithDepth({
|
|
62
|
-
depth,
|
|
63
|
-
...input
|
|
64
|
-
});
|
|
65
|
-
if (mergeBase) {
|
|
66
|
-
return mergeBase;
|
|
67
|
-
}
|
|
68
|
-
depth += 200;
|
|
69
|
-
}
|
|
70
|
-
return null;
|
|
71
|
-
}
|
|
72
|
-
function listParentCommits$1(input) {
|
|
73
|
-
try {
|
|
74
|
-
execSync(`git fetch --depth=200 origin ${input.sha}`);
|
|
75
|
-
const raw = execSync(`git log --format="%H" --max-count=200 ${input.sha}`);
|
|
76
|
-
const shas = raw.toString().trim().split("\n");
|
|
77
|
-
return shas;
|
|
78
|
-
} catch {
|
|
79
|
-
return null;
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
const getPrNumber$2 = ({ env })=>{
|
|
84
|
-
return env.BITRISE_PULL_REQUEST ? Number(env.BITRISE_PULL_REQUEST) : null;
|
|
85
|
-
};
|
|
86
|
-
const service$7 = {
|
|
87
|
-
name: "Bitrise",
|
|
88
|
-
key: "bitrise",
|
|
89
|
-
detect: ({ env })=>Boolean(env.BITRISE_IO),
|
|
90
|
-
config: ({ env })=>{
|
|
91
|
-
return {
|
|
92
|
-
commit: env.BITRISE_GIT_COMMIT || null,
|
|
93
|
-
branch: env.BITRISE_GIT_BRANCH || null,
|
|
94
|
-
owner: env.BITRISEIO_GIT_REPOSITORY_OWNER || null,
|
|
95
|
-
repository: env.BITRISEIO_GIT_REPOSITORY_SLUG || null,
|
|
96
|
-
jobId: null,
|
|
97
|
-
runId: null,
|
|
98
|
-
runAttempt: null,
|
|
99
|
-
prNumber: getPrNumber$2({
|
|
100
|
-
env
|
|
101
|
-
}),
|
|
102
|
-
prHeadCommit: null,
|
|
103
|
-
prBaseBranch: null,
|
|
104
|
-
nonce: env.BITRISEIO_PIPELINE_ID || null
|
|
105
|
-
};
|
|
106
|
-
},
|
|
107
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
108
|
-
listParentCommits: listParentCommits$1
|
|
109
|
-
};
|
|
110
|
-
|
|
111
|
-
const service$6 = {
|
|
112
|
-
name: "Buildkite",
|
|
113
|
-
key: "buildkite",
|
|
114
|
-
detect: ({ env })=>Boolean(env.BUILDKITE),
|
|
115
|
-
config: ({ env })=>{
|
|
116
|
-
return {
|
|
117
|
-
// Buildkite doesn't work well so we fallback to git to ensure we have commit and branch
|
|
118
|
-
commit: env.BUILDKITE_COMMIT || head() || null,
|
|
119
|
-
branch: env.BUILDKITE_BRANCH || branch() || null,
|
|
120
|
-
owner: env.BUILDKITE_ORGANIZATION_SLUG || null,
|
|
121
|
-
repository: env.BUILDKITE_PROJECT_SLUG || null,
|
|
122
|
-
jobId: null,
|
|
123
|
-
runId: null,
|
|
124
|
-
runAttempt: null,
|
|
125
|
-
prNumber: env.BUILDKITE_PULL_REQUEST ? Number(env.BUILDKITE_PULL_REQUEST) : null,
|
|
126
|
-
prHeadCommit: null,
|
|
127
|
-
prBaseBranch: null,
|
|
128
|
-
nonce: env.BUILDKITE_BUILD_ID || null
|
|
129
|
-
};
|
|
130
|
-
},
|
|
131
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
132
|
-
listParentCommits: listParentCommits$1
|
|
133
|
-
};
|
|
134
|
-
|
|
135
|
-
const service$5 = {
|
|
136
|
-
name: "Heroku",
|
|
137
|
-
key: "heroku",
|
|
138
|
-
detect: ({ env })=>Boolean(env.HEROKU_TEST_RUN_ID),
|
|
139
|
-
config: ({ env })=>({
|
|
140
|
-
commit: env.HEROKU_TEST_RUN_COMMIT_VERSION || null,
|
|
141
|
-
branch: env.HEROKU_TEST_RUN_BRANCH || null,
|
|
142
|
-
owner: null,
|
|
143
|
-
repository: null,
|
|
144
|
-
jobId: null,
|
|
145
|
-
runId: null,
|
|
146
|
-
runAttempt: null,
|
|
147
|
-
prNumber: null,
|
|
148
|
-
prHeadCommit: null,
|
|
149
|
-
prBaseBranch: null,
|
|
150
|
-
nonce: env.HEROKU_TEST_RUN_ID || null
|
|
151
|
-
}),
|
|
152
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
153
|
-
listParentCommits: listParentCommits$1
|
|
154
|
-
};
|
|
155
|
-
|
|
156
|
-
const KEY = "@argos-ci/core";
|
|
157
|
-
const debug = createDebug(KEY);
|
|
158
|
-
const debugTime = (arg)=>{
|
|
159
|
-
const enabled = createDebug.enabled(KEY);
|
|
160
|
-
if (enabled) {
|
|
161
|
-
console.time(arg);
|
|
162
|
-
}
|
|
163
|
-
};
|
|
164
|
-
const debugTimeEnd = (arg)=>{
|
|
165
|
-
const enabled = createDebug.enabled(KEY);
|
|
166
|
-
if (enabled) {
|
|
167
|
-
console.timeEnd(arg);
|
|
168
|
-
}
|
|
169
|
-
};
|
|
170
|
-
|
|
171
|
-
/**
|
|
172
|
-
* Get a pull request from a head sha.
|
|
173
|
-
* Fetch the last 30 pull requests sorted by updated date
|
|
174
|
-
* then try to find the one that matches the head sha.
|
|
175
|
-
* If no pull request is found, return null.
|
|
176
|
-
*/ async function getPullRequestFromHeadSha({ env }, sha) {
|
|
177
|
-
debug("Fetching pull request number from head sha", sha);
|
|
178
|
-
if (!env.GITHUB_REPOSITORY) {
|
|
179
|
-
throw new Error("GITHUB_REPOSITORY is missing");
|
|
180
|
-
}
|
|
181
|
-
if (!env.GITHUB_TOKEN) {
|
|
182
|
-
// For security reasons, people doesn't want to expose their GITHUB_TOKEN
|
|
183
|
-
// That's why we allow to disable this warning.
|
|
184
|
-
if (!env.DISABLE_GITHUB_TOKEN_WARNING) {
|
|
185
|
-
console.log(`
|
|
186
|
-
Running argos from a "deployment_status" event requires a GITHUB_TOKEN.
|
|
187
|
-
Please add \`GITHUB_TOKEN: \${{ secrets.GITHUB_TOKEN }}\` as environment variable.
|
|
188
|
-
|
|
189
|
-
Read more at https://argos-ci.com/docs/run-on-preview-deployment
|
|
190
|
-
|
|
191
|
-
To disable this warning, add \`DISABLE_GITHUB_TOKEN_WARNING: true\` as environment variable.
|
|
192
|
-
`.trim());
|
|
193
|
-
}
|
|
194
|
-
return null;
|
|
195
|
-
}
|
|
196
|
-
try {
|
|
197
|
-
const result = await axios.get(`https://api.github.com/repos/${env.GITHUB_REPOSITORY}/pulls`, {
|
|
198
|
-
params: {
|
|
199
|
-
state: "open",
|
|
200
|
-
sort: "updated",
|
|
201
|
-
per_page: 30,
|
|
202
|
-
page: 1
|
|
203
|
-
},
|
|
204
|
-
headers: {
|
|
205
|
-
Accept: "application/vnd.github+json",
|
|
206
|
-
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
|
|
207
|
-
"X-GitHub-Api-Version": "2022-11-28"
|
|
208
|
-
}
|
|
209
|
-
});
|
|
210
|
-
if (result.data.length === 0) {
|
|
211
|
-
debug("Aborting because no pull request found");
|
|
212
|
-
return null;
|
|
213
|
-
}
|
|
214
|
-
const matchingPr = result.data.find((pr)=>pr.head.sha === sha);
|
|
215
|
-
if (matchingPr) {
|
|
216
|
-
debug("Pull request found", matchingPr);
|
|
217
|
-
return matchingPr;
|
|
218
|
-
}
|
|
219
|
-
debug("Aborting because no pull request found");
|
|
220
|
-
return null;
|
|
221
|
-
} catch (error) {
|
|
222
|
-
debug("Error while fetching pull request from head sha", error);
|
|
223
|
-
return null;
|
|
224
|
-
}
|
|
225
|
-
}
|
|
226
|
-
function getBranch(context, eventPayload) {
|
|
227
|
-
if (eventPayload?.pull_request?.head.ref) {
|
|
228
|
-
return eventPayload.pull_request.head.ref;
|
|
229
|
-
}
|
|
230
|
-
const { env } = context;
|
|
231
|
-
if (env.GITHUB_HEAD_REF) {
|
|
232
|
-
return env.GITHUB_HEAD_REF;
|
|
233
|
-
}
|
|
234
|
-
if (!env.GITHUB_REF) {
|
|
235
|
-
return null;
|
|
236
|
-
}
|
|
237
|
-
const branchRegex = /refs\/heads\/(.*)/;
|
|
238
|
-
const matches = branchRegex.exec(env.GITHUB_REF);
|
|
239
|
-
return matches?.[1] ?? null;
|
|
240
|
-
}
|
|
241
|
-
function getRepository$1({ env }) {
|
|
242
|
-
if (!env.GITHUB_REPOSITORY) return null;
|
|
243
|
-
return env.GITHUB_REPOSITORY.split("/")[1] || null;
|
|
244
|
-
}
|
|
245
|
-
function readEventPayload({ env }) {
|
|
246
|
-
if (!env.GITHUB_EVENT_PATH) return null;
|
|
247
|
-
if (!existsSync(env.GITHUB_EVENT_PATH)) return null;
|
|
248
|
-
return JSON.parse(readFileSync(env.GITHUB_EVENT_PATH, "utf-8"));
|
|
249
|
-
}
|
|
250
|
-
const service$4 = {
|
|
251
|
-
name: "GitHub Actions",
|
|
252
|
-
key: "github-actions",
|
|
253
|
-
detect: (context)=>Boolean(context.env.GITHUB_ACTIONS),
|
|
254
|
-
config: async (context)=>{
|
|
255
|
-
const { env } = context;
|
|
256
|
-
const payload = readEventPayload(context);
|
|
257
|
-
const sha = process.env.GITHUB_SHA || null;
|
|
258
|
-
if (!sha) {
|
|
259
|
-
throw new Error(`GITHUB_SHA is missing`);
|
|
260
|
-
}
|
|
261
|
-
const commonConfig = {
|
|
262
|
-
commit: sha,
|
|
263
|
-
owner: env.GITHUB_REPOSITORY_OWNER || null,
|
|
264
|
-
repository: getRepository$1(context),
|
|
265
|
-
jobId: env.GITHUB_JOB || null,
|
|
266
|
-
runId: env.GITHUB_RUN_ID || null,
|
|
267
|
-
runAttempt: env.GITHUB_RUN_ATTEMPT ? Number(env.GITHUB_RUN_ATTEMPT) : null,
|
|
268
|
-
nonce: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}` || null
|
|
269
|
-
};
|
|
270
|
-
// If the job is triggered by from a "deployment" or a "deployment_status"
|
|
271
|
-
if (payload?.deployment) {
|
|
272
|
-
debug("Deployment event detected");
|
|
273
|
-
// Try to find a relevant pull request for the sha
|
|
274
|
-
const pullRequest = await getPullRequestFromHeadSha(context, sha);
|
|
275
|
-
return {
|
|
276
|
-
...commonConfig,
|
|
277
|
-
// If no pull request is found, we fallback to the deployment environment as branch name
|
|
278
|
-
// Branch name is required to create a build but has no real impact on the build.
|
|
279
|
-
branch: pullRequest?.head.ref || payload.deployment.environment || null,
|
|
280
|
-
prNumber: pullRequest?.number || null,
|
|
281
|
-
prHeadCommit: pullRequest?.head.sha || null,
|
|
282
|
-
prBaseBranch: null
|
|
283
|
-
};
|
|
284
|
-
}
|
|
285
|
-
return {
|
|
286
|
-
...commonConfig,
|
|
287
|
-
branch: payload?.pull_request?.head.ref || getBranch(context, payload) || null,
|
|
288
|
-
prNumber: payload?.pull_request?.number || null,
|
|
289
|
-
prHeadCommit: payload?.pull_request?.head.sha ?? null,
|
|
290
|
-
prBaseBranch: payload?.pull_request?.base.ref ?? null
|
|
291
|
-
};
|
|
292
|
-
},
|
|
293
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
294
|
-
listParentCommits: listParentCommits$1
|
|
295
|
-
};
|
|
296
|
-
|
|
297
|
-
const getPrNumber$1 = ({ env })=>{
|
|
298
|
-
const branchRegex = /pull\/(\d+)/;
|
|
299
|
-
const matches = branchRegex.exec(env.CIRCLE_PULL_REQUEST || "");
|
|
300
|
-
if (matches) {
|
|
301
|
-
return Number(matches[1]);
|
|
302
|
-
}
|
|
303
|
-
return null;
|
|
304
|
-
};
|
|
305
|
-
const service$3 = {
|
|
306
|
-
name: "CircleCI",
|
|
307
|
-
key: "circleci",
|
|
308
|
-
detect: ({ env })=>Boolean(env.CIRCLECI),
|
|
309
|
-
config: ({ env })=>{
|
|
310
|
-
return {
|
|
311
|
-
commit: env.CIRCLE_SHA1 || null,
|
|
312
|
-
branch: env.CIRCLE_BRANCH || null,
|
|
313
|
-
owner: env.CIRCLE_PROJECT_USERNAME || null,
|
|
314
|
-
repository: env.CIRCLE_PROJECT_REPONAME || null,
|
|
315
|
-
jobId: null,
|
|
316
|
-
runId: null,
|
|
317
|
-
runAttempt: null,
|
|
318
|
-
prNumber: getPrNumber$1({
|
|
319
|
-
env
|
|
320
|
-
}),
|
|
321
|
-
prHeadCommit: null,
|
|
322
|
-
prBaseBranch: null,
|
|
323
|
-
nonce: env.CIRCLE_WORKFLOW_ID || env.CIRCLE_BUILD_NUM || null
|
|
324
|
-
};
|
|
325
|
-
},
|
|
326
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
327
|
-
listParentCommits: listParentCommits$1
|
|
328
|
-
};
|
|
329
|
-
|
|
330
|
-
const getOwner = ({ env })=>{
|
|
331
|
-
if (!env.TRAVIS_REPO_SLUG) return null;
|
|
332
|
-
return env.TRAVIS_REPO_SLUG.split("/")[0] || null;
|
|
333
|
-
};
|
|
334
|
-
const getRepository = ({ env })=>{
|
|
335
|
-
if (!env.TRAVIS_REPO_SLUG) return null;
|
|
336
|
-
return env.TRAVIS_REPO_SLUG.split("/")[1] || null;
|
|
337
|
-
};
|
|
338
|
-
const getPrNumber = ({ env })=>{
|
|
339
|
-
if (env.TRAVIS_PULL_REQUEST) return Number(env.TRAVIS_PULL_REQUEST);
|
|
340
|
-
return null;
|
|
341
|
-
};
|
|
342
|
-
const service$2 = {
|
|
343
|
-
name: "Travis CI",
|
|
344
|
-
key: "travis",
|
|
345
|
-
detect: ({ env })=>Boolean(env.TRAVIS),
|
|
346
|
-
config: (ctx)=>{
|
|
347
|
-
const { env } = ctx;
|
|
348
|
-
return {
|
|
349
|
-
commit: env.TRAVIS_COMMIT || null,
|
|
350
|
-
branch: env.TRAVIS_BRANCH || null,
|
|
351
|
-
owner: getOwner(ctx),
|
|
352
|
-
repository: getRepository(ctx),
|
|
353
|
-
jobId: null,
|
|
354
|
-
runId: null,
|
|
355
|
-
runAttempt: null,
|
|
356
|
-
prNumber: getPrNumber(ctx),
|
|
357
|
-
prHeadCommit: null,
|
|
358
|
-
prBaseBranch: null,
|
|
359
|
-
nonce: env.TRAVIS_BUILD_ID || null
|
|
360
|
-
};
|
|
361
|
-
},
|
|
362
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
363
|
-
listParentCommits: listParentCommits$1
|
|
364
|
-
};
|
|
365
|
-
|
|
366
|
-
const service$1 = {
|
|
367
|
-
name: "GitLab",
|
|
368
|
-
key: "gitlab",
|
|
369
|
-
detect: ({ env })=>env.GITLAB_CI === "true",
|
|
370
|
-
config: ({ env })=>{
|
|
371
|
-
return {
|
|
372
|
-
commit: env.CI_COMMIT_SHA || null,
|
|
373
|
-
branch: env.CI_COMMIT_REF_NAME || null,
|
|
374
|
-
owner: null,
|
|
375
|
-
repository: null,
|
|
376
|
-
jobId: null,
|
|
377
|
-
runId: null,
|
|
378
|
-
runAttempt: null,
|
|
379
|
-
prNumber: null,
|
|
380
|
-
prHeadCommit: null,
|
|
381
|
-
prBaseBranch: null,
|
|
382
|
-
nonce: env.CI_PIPELINE_ID || null
|
|
383
|
-
};
|
|
384
|
-
},
|
|
385
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
386
|
-
listParentCommits: listParentCommits$1
|
|
387
|
-
};
|
|
388
|
-
|
|
389
|
-
const service = {
|
|
390
|
-
name: "Git",
|
|
391
|
-
key: "git",
|
|
392
|
-
detect: ()=>checkIsGitRepository(),
|
|
393
|
-
config: ()=>{
|
|
394
|
-
return {
|
|
395
|
-
commit: head() || null,
|
|
396
|
-
branch: branch() || null,
|
|
397
|
-
owner: null,
|
|
398
|
-
repository: null,
|
|
399
|
-
jobId: null,
|
|
400
|
-
runId: null,
|
|
401
|
-
runAttempt: null,
|
|
402
|
-
prNumber: null,
|
|
403
|
-
prHeadCommit: null,
|
|
404
|
-
prBaseBranch: null,
|
|
405
|
-
nonce: null
|
|
406
|
-
};
|
|
407
|
-
},
|
|
408
|
-
getMergeBaseCommitSha: getMergeBaseCommitSha$1,
|
|
409
|
-
listParentCommits: listParentCommits$1
|
|
410
|
-
};
|
|
411
|
-
|
|
412
|
-
// List of services ordered by usage
|
|
413
|
-
// "git" must be the last one
|
|
414
|
-
const services = [
|
|
415
|
-
service$5,
|
|
416
|
-
service$4,
|
|
417
|
-
service$3,
|
|
418
|
-
service$2,
|
|
419
|
-
service$6,
|
|
420
|
-
service$1,
|
|
421
|
-
service$7,
|
|
422
|
-
service
|
|
423
|
-
];
|
|
424
|
-
/**
|
|
425
|
-
* Create the context for the CI service detection.
|
|
426
|
-
*/ function createContext() {
|
|
427
|
-
return {
|
|
428
|
-
env: process.env
|
|
429
|
-
};
|
|
430
|
-
}
|
|
431
|
-
/**
|
|
432
|
-
* Get the CI service that is currently running.
|
|
433
|
-
*/ function getCiService(context) {
|
|
434
|
-
return services.find((service)=>service.detect(context));
|
|
435
|
-
}
|
|
436
|
-
/**
|
|
437
|
-
* Get the merge base commit.
|
|
438
|
-
*/ function getMergeBaseCommitSha(input) {
|
|
439
|
-
const context = createContext();
|
|
440
|
-
const service = getCiService(context);
|
|
441
|
-
if (!service) {
|
|
442
|
-
return null;
|
|
443
|
-
}
|
|
444
|
-
return service.getMergeBaseCommitSha(input, context);
|
|
445
|
-
}
|
|
446
|
-
/**
|
|
447
|
-
* Get the merge base commit.
|
|
448
|
-
*/ function listParentCommits(input) {
|
|
449
|
-
const context = createContext();
|
|
450
|
-
const service = getCiService(context);
|
|
451
|
-
if (!service) {
|
|
452
|
-
return null;
|
|
453
|
-
}
|
|
454
|
-
return service.listParentCommits(input, context);
|
|
455
|
-
}
|
|
456
|
-
/**
|
|
457
|
-
* Get the CI environment.
|
|
458
|
-
*/ async function getCiEnvironment() {
|
|
459
|
-
const context = createContext();
|
|
460
|
-
debug("Detecting CI environment", context);
|
|
461
|
-
const service = getCiService(context);
|
|
462
|
-
// Service matched
|
|
463
|
-
if (service) {
|
|
464
|
-
debug("Internal service matched", service.name);
|
|
465
|
-
const variables = await service.config(context);
|
|
466
|
-
const ciEnvironment = {
|
|
467
|
-
name: service.name,
|
|
468
|
-
key: service.key,
|
|
469
|
-
...variables
|
|
470
|
-
};
|
|
471
|
-
debug("CI environment", ciEnvironment);
|
|
472
|
-
return ciEnvironment;
|
|
473
|
-
}
|
|
474
|
-
return null;
|
|
475
|
-
}
|
|
476
|
-
|
|
477
|
-
const mustBeApiBaseUrl = (value)=>{
|
|
478
|
-
const URL_REGEX = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)/;
|
|
479
|
-
if (!URL_REGEX.test(value)) {
|
|
480
|
-
throw new Error("Invalid Argos API base URL");
|
|
481
|
-
}
|
|
482
|
-
};
|
|
483
|
-
const mustBeCommit = (value)=>{
|
|
484
|
-
const SHA1_REGEX = /^[0-9a-f]{40}$/;
|
|
485
|
-
if (!SHA1_REGEX.test(value)) {
|
|
486
|
-
const SHA1_SHORT_REGEX = /^[0-9a-f]{7}$/;
|
|
487
|
-
if (SHA1_SHORT_REGEX.test(value)) {
|
|
488
|
-
throw new Error("Short SHA1 is not allowed");
|
|
489
|
-
}
|
|
490
|
-
throw new Error("Invalid commit");
|
|
491
|
-
}
|
|
492
|
-
};
|
|
493
|
-
const mustBeArgosToken = (value)=>{
|
|
494
|
-
if (value && value.length !== 40) {
|
|
495
|
-
throw new Error("Invalid Argos repository token (must be 40 characters)");
|
|
496
|
-
}
|
|
497
|
-
};
|
|
498
|
-
convict.addFormat({
|
|
499
|
-
name: "float-percent",
|
|
500
|
-
validate: function(val) {
|
|
501
|
-
if (val !== 0 && (!val || val > 1 || val < 0)) {
|
|
502
|
-
throw new Error("Must be a float between 0 and 1, inclusive.");
|
|
503
|
-
}
|
|
504
|
-
},
|
|
505
|
-
coerce: function(val) {
|
|
506
|
-
return parseFloat(val);
|
|
507
|
-
}
|
|
508
|
-
});
|
|
509
|
-
const schema = {
|
|
510
|
-
apiBaseUrl: {
|
|
511
|
-
env: "ARGOS_API_BASE_URL",
|
|
512
|
-
default: "https://api.argos-ci.com/v2/",
|
|
513
|
-
format: mustBeApiBaseUrl
|
|
514
|
-
},
|
|
515
|
-
commit: {
|
|
516
|
-
env: "ARGOS_COMMIT",
|
|
517
|
-
default: null,
|
|
518
|
-
format: mustBeCommit
|
|
519
|
-
},
|
|
520
|
-
branch: {
|
|
521
|
-
env: "ARGOS_BRANCH",
|
|
522
|
-
default: null,
|
|
523
|
-
format: String
|
|
524
|
-
},
|
|
525
|
-
token: {
|
|
526
|
-
env: "ARGOS_TOKEN",
|
|
527
|
-
default: null,
|
|
528
|
-
format: mustBeArgosToken
|
|
529
|
-
},
|
|
530
|
-
buildName: {
|
|
531
|
-
env: "ARGOS_BUILD_NAME",
|
|
532
|
-
default: null,
|
|
533
|
-
format: String,
|
|
534
|
-
nullable: true
|
|
535
|
-
},
|
|
536
|
-
mode: {
|
|
537
|
-
env: "ARGOS_MODE",
|
|
538
|
-
format: [
|
|
539
|
-
"ci",
|
|
540
|
-
"monitoring"
|
|
541
|
-
],
|
|
542
|
-
default: null,
|
|
543
|
-
nullable: true
|
|
544
|
-
},
|
|
545
|
-
prNumber: {
|
|
546
|
-
env: "ARGOS_PR_NUMBER",
|
|
547
|
-
format: Number,
|
|
548
|
-
default: null,
|
|
549
|
-
nullable: true
|
|
550
|
-
},
|
|
551
|
-
prHeadCommit: {
|
|
552
|
-
env: "ARGOS_PR_HEAD_COMMIT",
|
|
553
|
-
format: String,
|
|
554
|
-
default: null,
|
|
555
|
-
nullable: true
|
|
556
|
-
},
|
|
557
|
-
prBaseBranch: {
|
|
558
|
-
env: "ARGOS_PR_BASE_BRANCH",
|
|
559
|
-
format: String,
|
|
560
|
-
default: null,
|
|
561
|
-
nullable: true
|
|
562
|
-
},
|
|
563
|
-
parallel: {
|
|
564
|
-
env: "ARGOS_PARALLEL",
|
|
565
|
-
default: false,
|
|
566
|
-
format: Boolean
|
|
567
|
-
},
|
|
568
|
-
parallelNonce: {
|
|
569
|
-
env: "ARGOS_PARALLEL_NONCE",
|
|
570
|
-
format: String,
|
|
571
|
-
default: null,
|
|
572
|
-
nullable: true
|
|
573
|
-
},
|
|
574
|
-
parallelIndex: {
|
|
575
|
-
env: "ARGOS_PARALLEL_INDEX",
|
|
576
|
-
format: "nat",
|
|
577
|
-
default: null,
|
|
578
|
-
nullable: true
|
|
579
|
-
},
|
|
580
|
-
parallelTotal: {
|
|
581
|
-
env: "ARGOS_PARALLEL_TOTAL",
|
|
582
|
-
format: "int",
|
|
583
|
-
default: null,
|
|
584
|
-
nullable: true
|
|
585
|
-
},
|
|
586
|
-
referenceBranch: {
|
|
587
|
-
env: "ARGOS_REFERENCE_BRANCH",
|
|
588
|
-
format: String,
|
|
589
|
-
default: null,
|
|
590
|
-
nullable: true
|
|
591
|
-
},
|
|
592
|
-
referenceCommit: {
|
|
593
|
-
env: "ARGOS_REFERENCE_COMMIT",
|
|
594
|
-
format: String,
|
|
595
|
-
default: null,
|
|
596
|
-
nullable: true
|
|
597
|
-
},
|
|
598
|
-
jobId: {
|
|
599
|
-
format: String,
|
|
600
|
-
default: null,
|
|
601
|
-
nullable: true
|
|
602
|
-
},
|
|
603
|
-
runId: {
|
|
604
|
-
format: String,
|
|
605
|
-
default: null,
|
|
606
|
-
nullable: true
|
|
607
|
-
},
|
|
608
|
-
runAttempt: {
|
|
609
|
-
format: "nat",
|
|
610
|
-
default: null,
|
|
611
|
-
nullable: true
|
|
612
|
-
},
|
|
613
|
-
owner: {
|
|
614
|
-
format: String,
|
|
615
|
-
default: null,
|
|
616
|
-
nullable: true
|
|
617
|
-
},
|
|
618
|
-
repository: {
|
|
619
|
-
format: String,
|
|
620
|
-
default: null,
|
|
621
|
-
nullable: true
|
|
622
|
-
},
|
|
623
|
-
ciProvider: {
|
|
624
|
-
format: String,
|
|
625
|
-
default: null,
|
|
626
|
-
nullable: true
|
|
627
|
-
},
|
|
628
|
-
threshold: {
|
|
629
|
-
env: "ARGOS_THRESHOLD",
|
|
630
|
-
format: "float-percent",
|
|
631
|
-
default: null,
|
|
632
|
-
nullable: true
|
|
633
|
-
}
|
|
634
|
-
};
|
|
635
|
-
const createConfig = ()=>{
|
|
636
|
-
return convict(schema, {
|
|
637
|
-
args: []
|
|
638
|
-
});
|
|
639
|
-
};
|
|
640
|
-
async function readConfig(options = {}) {
|
|
641
|
-
const config = createConfig();
|
|
642
|
-
const ciEnv = await getCiEnvironment();
|
|
643
|
-
config.load({
|
|
644
|
-
apiBaseUrl: options.apiBaseUrl || config.get("apiBaseUrl"),
|
|
645
|
-
commit: options.commit || config.get("commit") || ciEnv?.commit || null,
|
|
646
|
-
branch: options.branch || config.get("branch") || ciEnv?.branch || null,
|
|
647
|
-
token: options.token || config.get("token") || null,
|
|
648
|
-
buildName: options.buildName || config.get("buildName") || null,
|
|
649
|
-
prNumber: options.prNumber || config.get("prNumber") || ciEnv?.prNumber || null,
|
|
650
|
-
prHeadCommit: config.get("prHeadCommit") || ciEnv?.prHeadCommit || null,
|
|
651
|
-
prBaseBranch: config.get("prBaseBranch") || ciEnv?.prBaseBranch || null,
|
|
652
|
-
referenceBranch: options.referenceBranch || config.get("referenceBranch") || null,
|
|
653
|
-
referenceCommit: options.referenceCommit || config.get("referenceCommit") || null,
|
|
654
|
-
owner: ciEnv?.owner || null,
|
|
655
|
-
repository: ciEnv?.repository || null,
|
|
656
|
-
jobId: ciEnv?.jobId || null,
|
|
657
|
-
runId: ciEnv?.runId || null,
|
|
658
|
-
runAttempt: ciEnv?.runAttempt || null,
|
|
659
|
-
parallel: options.parallel ?? config.get("parallel") ?? false,
|
|
660
|
-
parallelNonce: options.parallelNonce || config.get("parallelNonce") || ciEnv?.nonce || null,
|
|
661
|
-
parallelTotal: options.parallelTotal ?? config.get("parallelTotal") ?? null,
|
|
662
|
-
parallelIndex: options.parallelIndex ?? config.get("parallelIndex") ?? null,
|
|
663
|
-
mode: options.mode || config.get("mode") || null,
|
|
664
|
-
ciProvider: ciEnv?.key || null
|
|
665
|
-
});
|
|
666
|
-
config.validate();
|
|
667
|
-
return config.get();
|
|
668
|
-
}
|
|
669
|
-
|
|
670
|
-
const discoverScreenshots = async (patterns, { root = process.cwd(), ignore } = {})=>{
|
|
671
|
-
debug(`Discovering screenshots with patterns: ${Array.isArray(patterns) ? patterns.join(", ") : patterns} in ${root}`);
|
|
672
|
-
const matches = await glob(patterns, {
|
|
673
|
-
onlyFiles: true,
|
|
674
|
-
ignore,
|
|
675
|
-
cwd: root
|
|
676
|
-
});
|
|
677
|
-
return matches.map((match)=>{
|
|
678
|
-
debug(`Found screenshot: ${match}`);
|
|
679
|
-
const path = resolve(root, match);
|
|
680
|
-
return {
|
|
681
|
-
name: match,
|
|
682
|
-
path
|
|
683
|
-
};
|
|
684
|
-
});
|
|
685
|
-
};
|
|
686
|
-
|
|
687
|
-
const tmpFile = promisify(tmp.file);
|
|
688
|
-
const optimizeScreenshot = async (filepath)=>{
|
|
689
|
-
try {
|
|
690
|
-
const resultFilePath = await tmpFile();
|
|
691
|
-
await sharp(filepath).resize(2048, 20480, {
|
|
692
|
-
fit: "inside",
|
|
693
|
-
withoutEnlargement: true
|
|
694
|
-
}).png({
|
|
695
|
-
force: true
|
|
696
|
-
}).toFile(resultFilePath);
|
|
697
|
-
return resultFilePath;
|
|
698
|
-
} catch (error) {
|
|
699
|
-
const message = error instanceof Error ? error.message : "Unknown Error";
|
|
700
|
-
throw new Error(`Error while processing image (${filepath}): ${message}`, {
|
|
701
|
-
cause: error
|
|
702
|
-
});
|
|
703
|
-
}
|
|
704
|
-
};
|
|
705
|
-
|
|
706
|
-
const hashFile = async (filepath)=>{
|
|
707
|
-
const fileStream = createReadStream(filepath);
|
|
708
|
-
const hash = createHash("sha256");
|
|
709
|
-
await new Promise((resolve, reject)=>{
|
|
710
|
-
fileStream.on("error", reject);
|
|
711
|
-
hash.on("error", reject);
|
|
712
|
-
hash.on("finish", resolve);
|
|
713
|
-
fileStream.pipe(hash);
|
|
714
|
-
});
|
|
715
|
-
return hash.digest("hex");
|
|
716
|
-
};
|
|
717
|
-
|
|
718
|
-
const base64Encode = (obj)=>Buffer.from(JSON.stringify(obj), "utf8").toString("base64");
|
|
719
|
-
/**
|
|
720
|
-
* Get the authentication token.
|
|
721
|
-
*/ function getAuthToken({ token, ciProvider, owner, repository, jobId, runId, prNumber }) {
|
|
722
|
-
if (token) {
|
|
723
|
-
return token;
|
|
724
|
-
}
|
|
725
|
-
switch(ciProvider){
|
|
726
|
-
case "github-actions":
|
|
727
|
-
{
|
|
728
|
-
if (!owner || !repository || !jobId || !runId) {
|
|
729
|
-
throw new Error(`Automatic GitHub Actions variables detection failed. Please add the 'ARGOS_TOKEN'`);
|
|
730
|
-
}
|
|
731
|
-
return `tokenless-github-${base64Encode({
|
|
732
|
-
owner,
|
|
733
|
-
repository,
|
|
734
|
-
jobId,
|
|
735
|
-
runId,
|
|
736
|
-
prNumber
|
|
737
|
-
})}`;
|
|
738
|
-
}
|
|
739
|
-
default:
|
|
740
|
-
throw new Error("Missing Argos repository token 'ARGOS_TOKEN'");
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
|
|
744
|
-
const upload$1 = async (input)=>{
|
|
745
|
-
const file = await readFile(input.path);
|
|
746
|
-
await axios({
|
|
747
|
-
method: "PUT",
|
|
748
|
-
url: input.url,
|
|
749
|
-
data: file,
|
|
750
|
-
headers: {
|
|
751
|
-
"Content-Type": input.contentType
|
|
752
|
-
}
|
|
753
|
-
});
|
|
754
|
-
};
|
|
755
|
-
|
|
756
|
-
/**
|
|
757
|
-
* Split an array into chunks of a given size.
|
|
758
|
-
*/ const chunk = (collection, size)=>{
|
|
759
|
-
const result = [];
|
|
760
|
-
// add each chunk to the result
|
|
761
|
-
for(let x = 0; x < Math.ceil(collection.length / size); x++){
|
|
762
|
-
let start = x * size;
|
|
763
|
-
let end = start + size;
|
|
764
|
-
result.push(collection.slice(start, end));
|
|
765
|
-
}
|
|
766
|
-
return result;
|
|
767
|
-
};
|
|
768
|
-
|
|
769
|
-
const require = createRequire(import.meta.url);
|
|
770
|
-
/**
|
|
771
|
-
* Get the version of the @argos-ci/core package.
|
|
772
|
-
*/ async function getArgosCoreSDKIdentifier() {
|
|
773
|
-
const pkgPath = require.resolve("@argos-ci/core/package.json");
|
|
774
|
-
const version = await readVersionFromPackage(pkgPath);
|
|
775
|
-
return `@argos-ci/core@${version}`;
|
|
776
|
-
}
|
|
777
|
-
|
|
778
|
-
/**
|
|
779
|
-
* Size of the chunks used to upload screenshots to Argos.
|
|
780
|
-
*/ const CHUNK_SIZE = 10;
|
|
781
|
-
async function getConfigFromOptions({ parallel, ...options }) {
|
|
782
|
-
return readConfig({
|
|
783
|
-
...options,
|
|
784
|
-
parallel: Boolean(parallel),
|
|
785
|
-
parallelNonce: parallel ? parallel.nonce : null,
|
|
786
|
-
parallelTotal: parallel ? parallel.total : null,
|
|
787
|
-
parallelIndex: parallel ? parallel.index : null
|
|
788
|
-
});
|
|
789
|
-
}
|
|
790
|
-
async function uploadFilesToS3(files) {
|
|
791
|
-
debug(`Split files in chunks of ${CHUNK_SIZE}`);
|
|
792
|
-
const chunks = chunk(files, CHUNK_SIZE);
|
|
793
|
-
debug(`Starting upload of ${chunks.length} chunks`);
|
|
794
|
-
for(let i = 0; i < chunks.length; i++){
|
|
795
|
-
// Upload files
|
|
796
|
-
debug(`Uploading chunk ${i + 1}/${chunks.length}`);
|
|
797
|
-
const timeLabel = `Chunk ${i + 1}/${chunks.length}`;
|
|
798
|
-
debugTime(timeLabel);
|
|
799
|
-
const chunk = chunks[i];
|
|
800
|
-
if (!chunk) {
|
|
801
|
-
throw new Error(`Invariant: chunk ${i} is empty`);
|
|
802
|
-
}
|
|
803
|
-
await Promise.all(chunk.map(async ({ url, path, contentType })=>{
|
|
804
|
-
await upload$1({
|
|
805
|
-
url,
|
|
806
|
-
path,
|
|
807
|
-
contentType
|
|
808
|
-
});
|
|
809
|
-
}));
|
|
810
|
-
debugTimeEnd(timeLabel);
|
|
811
|
-
}
|
|
812
|
-
}
|
|
813
|
-
/**
|
|
814
|
-
* Upload screenshots to Argos.
|
|
815
|
-
*/ async function upload(params) {
|
|
816
|
-
debug("Starting upload with params", params);
|
|
817
|
-
// Read config
|
|
818
|
-
const [config, argosSdk] = await Promise.all([
|
|
819
|
-
getConfigFromOptions(params),
|
|
820
|
-
getArgosCoreSDKIdentifier()
|
|
821
|
-
]);
|
|
822
|
-
const files = params.files ?? [
|
|
823
|
-
"**/*.{png,jpg,jpeg}"
|
|
824
|
-
];
|
|
825
|
-
debug("Using config and files", config, files);
|
|
826
|
-
const authToken = getAuthToken(config);
|
|
827
|
-
const apiClient = createClient({
|
|
828
|
-
baseUrl: config.apiBaseUrl,
|
|
829
|
-
authToken
|
|
830
|
-
});
|
|
831
|
-
// Collect screenshots
|
|
832
|
-
const foundScreenshots = await discoverScreenshots(files, {
|
|
833
|
-
root: params.root,
|
|
834
|
-
ignore: params.ignore
|
|
835
|
-
});
|
|
836
|
-
debug("Found screenshots", foundScreenshots);
|
|
837
|
-
// Optimize & compute hashes
|
|
838
|
-
const screenshots = await Promise.all(foundScreenshots.map(async (screenshot)=>{
|
|
839
|
-
const [metadata, pwTracePath, optimizedPath] = await Promise.all([
|
|
840
|
-
readMetadata(screenshot.path),
|
|
841
|
-
getPlaywrightTracePath(screenshot.path),
|
|
842
|
-
optimizeScreenshot(screenshot.path)
|
|
843
|
-
]);
|
|
844
|
-
const [hash, pwTraceHash] = await Promise.all([
|
|
845
|
-
hashFile(optimizedPath),
|
|
846
|
-
pwTracePath ? hashFile(pwTracePath) : null
|
|
847
|
-
]);
|
|
848
|
-
const threshold = metadata?.transient?.threshold ?? null;
|
|
849
|
-
const baseName = metadata?.transient?.baseName ?? null;
|
|
850
|
-
if (metadata) {
|
|
851
|
-
delete metadata.transient;
|
|
852
|
-
}
|
|
853
|
-
return {
|
|
854
|
-
...screenshot,
|
|
855
|
-
hash,
|
|
856
|
-
optimizedPath,
|
|
857
|
-
metadata,
|
|
858
|
-
threshold,
|
|
859
|
-
baseName,
|
|
860
|
-
pwTrace: pwTracePath && pwTraceHash ? {
|
|
861
|
-
path: pwTracePath,
|
|
862
|
-
hash: pwTraceHash
|
|
863
|
-
} : null
|
|
864
|
-
};
|
|
865
|
-
}));
|
|
866
|
-
debug("Fetch project");
|
|
867
|
-
const projectResponse = await apiClient.GET("/project");
|
|
868
|
-
if (projectResponse.error) {
|
|
869
|
-
throwAPIError(projectResponse.error);
|
|
870
|
-
}
|
|
871
|
-
debug("Project fetched", projectResponse.data);
|
|
872
|
-
const { defaultBaseBranch, hasRemoteContentAccess } = projectResponse.data;
|
|
873
|
-
const referenceCommit = (()=>{
|
|
874
|
-
if (config.referenceCommit) {
|
|
875
|
-
debug("Found reference commit in config", config.referenceCommit);
|
|
876
|
-
return config.referenceCommit;
|
|
877
|
-
}
|
|
878
|
-
// If we have remote access, we will fetch it from the Git Provider.
|
|
879
|
-
if (hasRemoteContentAccess) {
|
|
880
|
-
return null;
|
|
881
|
-
}
|
|
882
|
-
// We use the pull request as base branch if possible
|
|
883
|
-
// else branch specified by the user or the default branch.
|
|
884
|
-
const base = config.referenceBranch || config.prBaseBranch || defaultBaseBranch;
|
|
885
|
-
const sha = getMergeBaseCommitSha({
|
|
886
|
-
base,
|
|
887
|
-
head: config.branch
|
|
888
|
-
});
|
|
889
|
-
if (sha) {
|
|
890
|
-
debug("Found merge base", sha);
|
|
891
|
-
} else {
|
|
892
|
-
debug("No merge base found");
|
|
893
|
-
}
|
|
894
|
-
return sha;
|
|
895
|
-
})();
|
|
896
|
-
const parentCommits = (()=>{
|
|
897
|
-
// If we have remote access, we will fetch them from the Git Provider.
|
|
898
|
-
if (hasRemoteContentAccess) {
|
|
899
|
-
return null;
|
|
900
|
-
}
|
|
901
|
-
if (referenceCommit) {
|
|
902
|
-
const commits = listParentCommits({
|
|
903
|
-
sha: referenceCommit
|
|
904
|
-
});
|
|
905
|
-
if (commits) {
|
|
906
|
-
debug("Found parent commits", commits);
|
|
907
|
-
} else {
|
|
908
|
-
debug("No parent commits found");
|
|
909
|
-
}
|
|
910
|
-
return commits;
|
|
911
|
-
}
|
|
912
|
-
return null;
|
|
913
|
-
})();
|
|
914
|
-
// Create build
|
|
915
|
-
debug("Creating build");
|
|
916
|
-
const [pwTraceKeys, screenshotKeys] = screenshots.reduce(([pwTraceKeys, screenshotKeys], screenshot)=>{
|
|
917
|
-
if (screenshot.pwTrace && !pwTraceKeys.includes(screenshot.pwTrace.hash)) {
|
|
918
|
-
pwTraceKeys.push(screenshot.pwTrace.hash);
|
|
919
|
-
}
|
|
920
|
-
if (!screenshotKeys.includes(screenshot.hash)) {
|
|
921
|
-
screenshotKeys.push(screenshot.hash);
|
|
922
|
-
}
|
|
923
|
-
return [
|
|
924
|
-
pwTraceKeys,
|
|
925
|
-
screenshotKeys
|
|
926
|
-
];
|
|
927
|
-
}, [
|
|
928
|
-
[],
|
|
929
|
-
[]
|
|
930
|
-
]);
|
|
931
|
-
const createBuildResponse = await apiClient.POST("/builds", {
|
|
932
|
-
body: {
|
|
933
|
-
commit: config.commit,
|
|
934
|
-
branch: config.branch,
|
|
935
|
-
name: config.buildName,
|
|
936
|
-
mode: config.mode,
|
|
937
|
-
parallel: config.parallel,
|
|
938
|
-
parallelNonce: config.parallelNonce,
|
|
939
|
-
screenshotKeys,
|
|
940
|
-
pwTraceKeys,
|
|
941
|
-
prNumber: config.prNumber,
|
|
942
|
-
prHeadCommit: config.prHeadCommit,
|
|
943
|
-
referenceBranch: config.referenceBranch,
|
|
944
|
-
referenceCommit,
|
|
945
|
-
parentCommits,
|
|
946
|
-
argosSdk,
|
|
947
|
-
ciProvider: config.ciProvider,
|
|
948
|
-
runId: config.runId,
|
|
949
|
-
runAttempt: config.runAttempt
|
|
950
|
-
}
|
|
951
|
-
});
|
|
952
|
-
if (createBuildResponse.error) {
|
|
953
|
-
throwAPIError(createBuildResponse.error);
|
|
954
|
-
}
|
|
955
|
-
const result = createBuildResponse.data;
|
|
956
|
-
debug("Got uploads url", result);
|
|
957
|
-
const uploadFiles = [
|
|
958
|
-
...result.screenshots.map(({ key, putUrl })=>{
|
|
959
|
-
const screenshot = screenshots.find((s)=>s.hash === key);
|
|
960
|
-
if (!screenshot) {
|
|
961
|
-
throw new Error(`Invariant: screenshot with hash ${key} not found`);
|
|
962
|
-
}
|
|
963
|
-
return {
|
|
964
|
-
url: putUrl,
|
|
965
|
-
path: screenshot.optimizedPath,
|
|
966
|
-
contentType: "image/png"
|
|
967
|
-
};
|
|
968
|
-
}),
|
|
969
|
-
...result.pwTraces?.map(({ key, putUrl })=>{
|
|
970
|
-
const screenshot = screenshots.find((s)=>s.pwTrace && s.pwTrace.hash === key);
|
|
971
|
-
if (!screenshot || !screenshot.pwTrace) {
|
|
972
|
-
throw new Error(`Invariant: trace with ${key} not found`);
|
|
973
|
-
}
|
|
974
|
-
return {
|
|
975
|
-
url: putUrl,
|
|
976
|
-
path: screenshot.pwTrace.path,
|
|
977
|
-
contentType: "application/json"
|
|
978
|
-
};
|
|
979
|
-
}) ?? []
|
|
980
|
-
];
|
|
981
|
-
await uploadFilesToS3(uploadFiles);
|
|
982
|
-
// Update build
|
|
983
|
-
debug("Updating build");
|
|
984
|
-
const uploadBuildResponse = await apiClient.PUT("/builds/{buildId}", {
|
|
985
|
-
params: {
|
|
986
|
-
path: {
|
|
987
|
-
buildId: result.build.id
|
|
988
|
-
}
|
|
989
|
-
},
|
|
990
|
-
body: {
|
|
991
|
-
screenshots: screenshots.map((screenshot)=>({
|
|
992
|
-
key: screenshot.hash,
|
|
993
|
-
name: screenshot.name,
|
|
994
|
-
metadata: screenshot.metadata,
|
|
995
|
-
pwTraceKey: screenshot.pwTrace?.hash ?? null,
|
|
996
|
-
threshold: screenshot.threshold ?? config?.threshold ?? null,
|
|
997
|
-
baseName: screenshot.baseName
|
|
998
|
-
})),
|
|
999
|
-
parallel: config.parallel,
|
|
1000
|
-
parallelTotal: config.parallelTotal,
|
|
1001
|
-
parallelIndex: config.parallelIndex,
|
|
1002
|
-
metadata: params.metadata
|
|
1003
|
-
}
|
|
1004
|
-
});
|
|
1005
|
-
if (uploadBuildResponse.error) {
|
|
1006
|
-
throwAPIError(uploadBuildResponse.error);
|
|
1007
|
-
}
|
|
1008
|
-
return {
|
|
1009
|
-
build: uploadBuildResponse.data.build,
|
|
1010
|
-
screenshots
|
|
1011
|
-
};
|
|
1012
|
-
}
|
|
1013
|
-
|
|
1014
|
-
/**
|
|
1015
|
-
* Finalize pending builds.
|
|
1016
|
-
*/ async function finalize(params) {
|
|
1017
|
-
const config = await readConfig({
|
|
1018
|
-
parallelNonce: params.parallel?.nonce ?? null
|
|
1019
|
-
});
|
|
1020
|
-
const authToken = getAuthToken(config);
|
|
1021
|
-
const apiClient = createClient({
|
|
1022
|
-
baseUrl: config.apiBaseUrl,
|
|
1023
|
-
authToken
|
|
1024
|
-
});
|
|
1025
|
-
if (!config.parallelNonce) {
|
|
1026
|
-
throw new Error("parallel.nonce is required to finalize the build");
|
|
1027
|
-
}
|
|
1028
|
-
const finalizeBuildsResult = await apiClient.POST("/builds/finalize", {
|
|
1029
|
-
body: {
|
|
1030
|
-
parallelNonce: config.parallelNonce
|
|
1031
|
-
}
|
|
1032
|
-
});
|
|
1033
|
-
if (finalizeBuildsResult.error) {
|
|
1034
|
-
throwAPIError(finalizeBuildsResult.error);
|
|
1035
|
-
}
|
|
1036
|
-
return finalizeBuildsResult.data;
|
|
1037
|
-
}
|
|
1038
|
-
|
|
1039
|
-
export { finalize, readConfig, upload };
|