screwdriver-api 7.0.130 → 7.0.132
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/plugins/builds/index.js +174 -1137
- package/plugins/builds/triggers/and.js +100 -0
- package/plugins/builds/triggers/helpers.js +1069 -0
- package/plugins/builds/triggers/joinBase.js +115 -0
- package/plugins/builds/triggers/or.js +27 -0
- package/plugins/builds/triggers/orBase.js +80 -0
- package/plugins/builds/triggers/remoteJoin.js +70 -0
- package/plugins/builds/triggers/remoteTrigger.js +27 -0
|
@@ -0,0 +1,1069 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const logger = require('screwdriver-logger');
|
|
4
|
+
const workflowParser = require('screwdriver-workflow-parser');
|
|
5
|
+
const merge = require('lodash.mergewith');
|
|
6
|
+
const schema = require('screwdriver-data-schema');
|
|
7
|
+
const { EXTERNAL_TRIGGER_ALL } = schema.config.regex;
|
|
8
|
+
const { getFullStageJobName } = require('../../helper');
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @typedef {import('screwdriver-models').JobFactory} JobFactory
|
|
12
|
+
* @typedef {import('screwdriver-models').BuildFactory} BuildFactory
|
|
13
|
+
* @typedef {import('screwdriver-models').EventFactory} EventFactory
|
|
14
|
+
* @typedef {import('screwdriver-models').PipelineFactory} PipelineFactory
|
|
15
|
+
* @typedef {import('screwdriver-models/lib/pipeline')} Pipeline
|
|
16
|
+
* @typedef {import('screwdriver-models/lib/event')} Event
|
|
17
|
+
* @typedef {import('screwdriver-models/lib/build')} Build
|
|
18
|
+
* @typedef {import('screwdriver-models/lib/job')} Job
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
const Status = {
|
|
22
|
+
ABORTED: 'ABORTED',
|
|
23
|
+
CREATED: 'CREATED',
|
|
24
|
+
FAILURE: 'FAILURE',
|
|
25
|
+
QUEUED: 'QUEUED',
|
|
26
|
+
RUNNING: 'RUNNING',
|
|
27
|
+
SUCCESS: 'SUCCESS',
|
|
28
|
+
BLOCKED: 'BLOCKED',
|
|
29
|
+
UNSTABLE: 'UNSTABLE',
|
|
30
|
+
COLLAPSED: 'COLLAPSED',
|
|
31
|
+
FROZEN: 'FROZEN',
|
|
32
|
+
ENABLED: 'ENABLED',
|
|
33
|
+
|
|
34
|
+
isAborted(status) {
|
|
35
|
+
return status === this.ABORTED;
|
|
36
|
+
},
|
|
37
|
+
|
|
38
|
+
isCreated(status) {
|
|
39
|
+
return status === this.CREATED;
|
|
40
|
+
},
|
|
41
|
+
|
|
42
|
+
isFailure(status) {
|
|
43
|
+
return status === this.FAILURE;
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
isQueued(status) {
|
|
47
|
+
return status === this.QUEUED;
|
|
48
|
+
},
|
|
49
|
+
|
|
50
|
+
isRunning(status) {
|
|
51
|
+
return status === this.RUNNING;
|
|
52
|
+
},
|
|
53
|
+
|
|
54
|
+
isSuccess(status) {
|
|
55
|
+
return status === this.SUCCESS;
|
|
56
|
+
},
|
|
57
|
+
|
|
58
|
+
isBlocked(status) {
|
|
59
|
+
return status === this.BLOCKED;
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
isUnstable(status) {
|
|
63
|
+
return status === this.UNSTABLE;
|
|
64
|
+
},
|
|
65
|
+
|
|
66
|
+
isCollapsed(status) {
|
|
67
|
+
return status === this.COLLAPSED;
|
|
68
|
+
},
|
|
69
|
+
|
|
70
|
+
isFrozen(status) {
|
|
71
|
+
return status === this.FROZEN;
|
|
72
|
+
},
|
|
73
|
+
|
|
74
|
+
isEnabled(status) {
|
|
75
|
+
return status === this.ENABLED;
|
|
76
|
+
},
|
|
77
|
+
|
|
78
|
+
isStarted(status) {
|
|
79
|
+
return !['CREATED', null, undefined].includes(status);
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Converts a string to an integer.
|
|
85
|
+
* Throws an error if the string is not a valid integer representation.
|
|
86
|
+
*
|
|
87
|
+
* @param {String} text The string to be converted to an integer.
|
|
88
|
+
* @returns {Number} The converted integer.
|
|
89
|
+
* @throws {Error} An error is thrown if the string can't be converted to a finite number.
|
|
90
|
+
*/
|
|
91
|
+
function strToInt(text) {
|
|
92
|
+
const value = Number.parseInt(text, 10);
|
|
93
|
+
|
|
94
|
+
if (Number.isFinite(value)) {
|
|
95
|
+
return value;
|
|
96
|
+
}
|
|
97
|
+
throw new Error(`Failed to cast '${text}' to integer`);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Delete a build
|
|
102
|
+
* @param {Object} buildConfig build object to delete
|
|
103
|
+
* @param {BuildFactory} buildFactory build factory
|
|
104
|
+
* @returns {Promise}
|
|
105
|
+
*/
|
|
106
|
+
async function deleteBuild(buildConfig, buildFactory) {
|
|
107
|
+
const buildToDelete = await buildFactory.get(buildConfig);
|
|
108
|
+
|
|
109
|
+
if (buildToDelete && Status.isCreated(buildToDelete.status)) {
|
|
110
|
+
return buildToDelete.remove();
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return null;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Checks if job is external trigger
|
|
118
|
+
* @param {String} jobName Job name
|
|
119
|
+
* @returns {Boolean}
|
|
120
|
+
*/
|
|
121
|
+
function isExternalTrigger(jobName) {
|
|
122
|
+
return EXTERNAL_TRIGGER_ALL.test(jobName);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Get external pipelineId and job name from the `name`
|
|
127
|
+
* @param {String} name Job name
|
|
128
|
+
* @returns {{externalPipelineId: String, externalJobName: String}}
|
|
129
|
+
*/
|
|
130
|
+
function getExternalPipelineAndJob(name) {
|
|
131
|
+
const [, externalPipelineId, externalJobName] = EXTERNAL_TRIGGER_ALL.exec(name);
|
|
132
|
+
|
|
133
|
+
return { externalPipelineId, externalJobName };
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Helper function to fetch external event from parentBuilds
|
|
138
|
+
* @param {Build} currentBuild Build for current completed job
|
|
139
|
+
* @param {String} pipelineId Pipeline ID for next job to be triggered.
|
|
140
|
+
* @param {EventFactory} eventFactory Factory for querying event data store.
|
|
141
|
+
* @returns {Promise<Event>} Event where the next job to be triggered belongs to.
|
|
142
|
+
*/
|
|
143
|
+
function getExternalEvent(currentBuild, pipelineId, eventFactory) {
|
|
144
|
+
if (!currentBuild.parentBuilds || !currentBuild.parentBuilds[pipelineId]) {
|
|
145
|
+
return null;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const { eventId } = currentBuild.parentBuilds[pipelineId];
|
|
149
|
+
|
|
150
|
+
return eventFactory.get(eventId);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Create event for downstream pipeline that need to be rebuilt
|
|
155
|
+
* @param {Object} config Configuration object
|
|
156
|
+
* @param {PipelineFactory} config.pipelineFactory Pipeline Factory
|
|
157
|
+
* @param {EventFactory} config.eventFactory Event Factory
|
|
158
|
+
* @param {Number} config.pipelineId Pipeline to be rebuilt
|
|
159
|
+
* @param {String} config.startFrom Job to be rebuilt
|
|
160
|
+
* @param {String} config.causeMessage Caused message, e.g. triggered by 1234(buildId)
|
|
161
|
+
* @param {String} config.parentBuildId ID of the build that triggers this event
|
|
162
|
+
* @param {Record<String, ParentBuild>} [config.parentBuilds] Builds that triggered this build
|
|
163
|
+
* @param {Number} [config.parentEventId] Parent event ID
|
|
164
|
+
* @param {Number} [config.groupEventId] Group parent event ID
|
|
165
|
+
* @returns {Promise<Event>} New event
|
|
166
|
+
*/
|
|
167
|
+
async function createEvent(config) {
|
|
168
|
+
const {
|
|
169
|
+
pipelineFactory,
|
|
170
|
+
eventFactory,
|
|
171
|
+
pipelineId,
|
|
172
|
+
startFrom,
|
|
173
|
+
skipMessage,
|
|
174
|
+
causeMessage,
|
|
175
|
+
parentBuildId,
|
|
176
|
+
parentBuilds,
|
|
177
|
+
parentEventId,
|
|
178
|
+
groupEventId
|
|
179
|
+
} = config;
|
|
180
|
+
|
|
181
|
+
const pipeline = await pipelineFactory.get(pipelineId);
|
|
182
|
+
const realAdmin = await pipeline.admin;
|
|
183
|
+
const { scmContext, scmUri } = pipeline;
|
|
184
|
+
|
|
185
|
+
// get pipeline admin's token
|
|
186
|
+
const token = await realAdmin.unsealToken();
|
|
187
|
+
const scmConfig = {
|
|
188
|
+
scmContext,
|
|
189
|
+
scmUri,
|
|
190
|
+
token
|
|
191
|
+
};
|
|
192
|
+
|
|
193
|
+
// Get commit sha
|
|
194
|
+
const { scm } = eventFactory;
|
|
195
|
+
const sha = await scm.getCommitSha(scmConfig);
|
|
196
|
+
|
|
197
|
+
const payload = {
|
|
198
|
+
pipelineId,
|
|
199
|
+
startFrom,
|
|
200
|
+
skipMessage,
|
|
201
|
+
type: 'pipeline',
|
|
202
|
+
causeMessage,
|
|
203
|
+
parentBuildId,
|
|
204
|
+
scmContext,
|
|
205
|
+
username: realAdmin.username,
|
|
206
|
+
sha,
|
|
207
|
+
...(parentEventId ? { parentEventId } : {}),
|
|
208
|
+
// for backward compatibility, this field is optional
|
|
209
|
+
...(parentBuilds ? { parentBuilds } : {}),
|
|
210
|
+
...(groupEventId ? { groupEventId } : {})
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
// Set configPipelineSha for child pipeline
|
|
214
|
+
if (pipeline.configPipelineId) {
|
|
215
|
+
const configPipeline = await pipelineFactory.get(pipeline.configPipelineId);
|
|
216
|
+
const configAdmin = await configPipeline.admin;
|
|
217
|
+
const configToken = await configAdmin.unsealToken();
|
|
218
|
+
const configScmConfig = {
|
|
219
|
+
scmContext: configPipeline.scmContext,
|
|
220
|
+
scmUri: configPipeline.scmUri,
|
|
221
|
+
token: configToken
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
payload.configPipelineSha = await scm.getCommitSha(configScmConfig);
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return eventFactory.create(payload);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Create external event (returns event with `builds` field)
|
|
232
|
+
* @param {Object} config Configuration object
|
|
233
|
+
* @param {PipelineFactory} config.pipelineFactory Pipeline Factory
|
|
234
|
+
* @param {EventFactory} config.eventFactory Event Factory
|
|
235
|
+
* @param {Number} config.externalPipelineId External pipeline ID
|
|
236
|
+
* @param {String} config.startFrom External trigger to start from
|
|
237
|
+
* @param {String} config.skipMessage If this is set then build won't be created
|
|
238
|
+
* @param {Number} config.parentBuildId Parent Build ID
|
|
239
|
+
* @param {Object} config.parentBuilds Builds that triggered this build
|
|
240
|
+
* @param {String} config.causeMessage Cause message of this event
|
|
241
|
+
* @param {Number} [config.parentEventId] Parent event ID
|
|
242
|
+
* @param {Number} [config.groupEventId] Group parent event ID
|
|
243
|
+
* @returns {Promise<Event>}
|
|
244
|
+
*/
|
|
245
|
+
async function createExternalEvent(config) {
|
|
246
|
+
const {
|
|
247
|
+
pipelineFactory,
|
|
248
|
+
eventFactory,
|
|
249
|
+
externalPipelineId,
|
|
250
|
+
startFrom,
|
|
251
|
+
skipMessage,
|
|
252
|
+
parentBuildId,
|
|
253
|
+
parentBuilds,
|
|
254
|
+
causeMessage,
|
|
255
|
+
parentEventId,
|
|
256
|
+
groupEventId
|
|
257
|
+
} = config;
|
|
258
|
+
|
|
259
|
+
const createEventConfig = {
|
|
260
|
+
pipelineFactory,
|
|
261
|
+
eventFactory,
|
|
262
|
+
pipelineId: externalPipelineId,
|
|
263
|
+
startFrom,
|
|
264
|
+
skipMessage,
|
|
265
|
+
parentBuildId, // current build
|
|
266
|
+
causeMessage,
|
|
267
|
+
parentBuilds,
|
|
268
|
+
...(parentEventId ? { parentEventId } : {}),
|
|
269
|
+
...(groupEventId ? { groupEventId } : {})
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
return createEvent(createEventConfig);
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Create internal build. If config.start is false or not passed in then do not start the job
|
|
277
|
+
* Need to pass in (jobName and pipelineId) or (jobId) to get job data
|
|
278
|
+
* @param {Object} config Configuration object
|
|
279
|
+
* @param {JobFactory} config.jobFactory Job Factory
|
|
280
|
+
* @param {BuildFactory} config.buildFactory Build Factory
|
|
281
|
+
* @param {Number} config.pipelineId Pipeline Id
|
|
282
|
+
* @param {String} config.jobName Job name
|
|
283
|
+
* @param {String} config.username Username of build
|
|
284
|
+
* @param {String} config.scmContext SCM context
|
|
285
|
+
* @param {Record<String, ParentBuild>} config.parentBuilds Builds that triggered this build
|
|
286
|
+
* @param {String|null} config.baseBranch Branch name
|
|
287
|
+
* @param {Number} config.parentBuildId Parent build ID
|
|
288
|
+
* @param {Boolean} config.start Whether to start the build or not
|
|
289
|
+
* @param {Number|undefined} config.jobId Job ID
|
|
290
|
+
* @param {EventModel} config.event Event build belongs to
|
|
291
|
+
* @returns {Promise<BuildModel|null>}
|
|
292
|
+
*/
|
|
293
|
+
async function createInternalBuild(config) {
|
|
294
|
+
const {
|
|
295
|
+
jobFactory,
|
|
296
|
+
buildFactory,
|
|
297
|
+
pipelineId,
|
|
298
|
+
jobName,
|
|
299
|
+
username,
|
|
300
|
+
scmContext,
|
|
301
|
+
event,
|
|
302
|
+
parentBuilds,
|
|
303
|
+
start,
|
|
304
|
+
baseBranch,
|
|
305
|
+
parentBuildId,
|
|
306
|
+
jobId
|
|
307
|
+
} = config;
|
|
308
|
+
const { ref = '', prSource = '', prBranchName = '', url = '' } = event.pr || {};
|
|
309
|
+
const prInfo = prBranchName ? { url, prBranchName } : '';
|
|
310
|
+
/** @type {Job} */
|
|
311
|
+
const job = jobId
|
|
312
|
+
? await jobFactory.get(jobId)
|
|
313
|
+
: await jobFactory.get({
|
|
314
|
+
name: jobName,
|
|
315
|
+
pipelineId
|
|
316
|
+
});
|
|
317
|
+
|
|
318
|
+
const internalBuildConfig = {
|
|
319
|
+
jobId: job.id,
|
|
320
|
+
sha: event.sha,
|
|
321
|
+
parentBuildId,
|
|
322
|
+
parentBuilds: parentBuilds || {},
|
|
323
|
+
eventId: event.id,
|
|
324
|
+
username,
|
|
325
|
+
configPipelineSha: event.configPipelineSha,
|
|
326
|
+
scmContext,
|
|
327
|
+
prRef: ref,
|
|
328
|
+
prSource,
|
|
329
|
+
prInfo,
|
|
330
|
+
start: start !== false,
|
|
331
|
+
baseBranch
|
|
332
|
+
};
|
|
333
|
+
|
|
334
|
+
let jobState = job.state;
|
|
335
|
+
|
|
336
|
+
if (ref) {
|
|
337
|
+
// Whether a job is enabled is determined by the state of the original job.
|
|
338
|
+
// If the original job does not exist, it will be enabled.
|
|
339
|
+
const originalJobName = job.parsePRJobName('job');
|
|
340
|
+
const originalJob = await jobFactory.get({
|
|
341
|
+
name: originalJobName,
|
|
342
|
+
pipelineId
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
jobState = originalJob ? originalJob.state : Status.ENABLED;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
if (Status.isEnabled(jobState)) {
|
|
349
|
+
// return build
|
|
350
|
+
return buildFactory.create(internalBuildConfig);
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
return null;
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
/**
|
|
357
|
+
* Return PR job or not
|
|
358
|
+
* PR job name certainly has ":". e.g. "PR-1:jobName"
|
|
359
|
+
* @param {String} jobName
|
|
360
|
+
* @returns {Boolean}
|
|
361
|
+
*/
|
|
362
|
+
function isPR(jobName) {
|
|
363
|
+
return jobName.startsWith('PR-');
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
/**
|
|
367
|
+
* Trim Job name to follow data-schema
|
|
368
|
+
* @param {String} jobName
|
|
369
|
+
* @returns {String} trimmed jobName
|
|
370
|
+
*/
|
|
371
|
+
function trimJobName(jobName) {
|
|
372
|
+
if (isPR(jobName)) {
|
|
373
|
+
return jobName.split(':')[1];
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
return jobName;
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
/**
|
|
380
|
+
* @typedef {Object} ParentBuild
|
|
381
|
+
* @property {String} eventId
|
|
382
|
+
* @property {Record<String, Number>} jobs Job name and build ID
|
|
383
|
+
*/
|
|
384
|
+
/**
|
|
385
|
+
* @typedef {Record<String, ParentBuild>} ParentBuilds
|
|
386
|
+
*/
|
|
387
|
+
/**
|
|
388
|
+
* Generates a parent builds object
|
|
389
|
+
* @param {Number} config.buildId Build ID
|
|
390
|
+
* @param {Number} config.eventId Event ID
|
|
391
|
+
* @param {Number} config.pipelineId Pipeline ID
|
|
392
|
+
* @param {String} config.jobName Job name
|
|
393
|
+
* @param {Array} [config.joinListNames] Job names in join list
|
|
394
|
+
* @returns {ParentBuilds} Returns parent builds object
|
|
395
|
+
*/
|
|
396
|
+
function createParentBuildsObj(config) {
|
|
397
|
+
const { buildId, eventId, pipelineId, jobName, joinListNames } = config;
|
|
398
|
+
|
|
399
|
+
// For getting multiple parent builds
|
|
400
|
+
if (!joinListNames) {
|
|
401
|
+
return { [pipelineId]: { eventId, jobs: { [jobName]: buildId } } };
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const joinParentBuilds = {};
|
|
405
|
+
|
|
406
|
+
joinListNames.forEach(name => {
|
|
407
|
+
let parentBuildPipelineId = pipelineId;
|
|
408
|
+
let parentBuildJobName = name;
|
|
409
|
+
|
|
410
|
+
if (isExternalTrigger(name)) {
|
|
411
|
+
const { externalPipelineId, externalJobName } = getExternalPipelineAndJob(name);
|
|
412
|
+
|
|
413
|
+
parentBuildPipelineId = externalPipelineId;
|
|
414
|
+
parentBuildJobName = externalJobName;
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
joinParentBuilds[parentBuildPipelineId] = joinParentBuilds[parentBuildPipelineId] || {
|
|
418
|
+
eventId: null,
|
|
419
|
+
jobs: {}
|
|
420
|
+
};
|
|
421
|
+
joinParentBuilds[parentBuildPipelineId].jobs[parentBuildJobName] = null;
|
|
422
|
+
});
|
|
423
|
+
|
|
424
|
+
return joinParentBuilds;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
/**
|
|
428
|
+
* Parse job info into important variables
|
|
429
|
+
* - parentBuilds: parent build information
|
|
430
|
+
* - joinListNames: array of join jobs
|
|
431
|
+
* - joinParentBuilds: parent build information for join jobs
|
|
432
|
+
* @param {Object} arg
|
|
433
|
+
* @param {Object} arg.joinObj Join object
|
|
434
|
+
* @param {Build} arg.currentBuild Object holding current event, job & pipeline
|
|
435
|
+
* @param {Pipeline} arg.currentPipeline Object holding current event, job & pipeline
|
|
436
|
+
* @param {Job} arg.currentJob Object holding current event, job & pipeline
|
|
437
|
+
* @param {String} arg.nextJobName Next job's name
|
|
438
|
+
* @param {Number} arg.nextPipelineId Next job's Pipeline Id
|
|
439
|
+
* @returns {{parentBuilds: ParentBuilds, joinListNames: String[], joinParentBuilds: ParentBuilds}}
|
|
440
|
+
*/
|
|
441
|
+
function parseJobInfo({ joinObj, currentBuild, currentPipeline, currentJob, nextJobName, nextPipelineId }) {
|
|
442
|
+
const joinList = joinObj && joinObj[nextJobName] && joinObj[nextJobName].join ? joinObj[nextJobName].join : [];
|
|
443
|
+
const joinListNames = joinList.map(j => j.name);
|
|
444
|
+
|
|
445
|
+
/* CONSTRUCT AN OBJ LIKE {111: {eventId: 2, D:987}}
|
|
446
|
+
* FOR EASY LOOKUP OF BUILD STATUS */
|
|
447
|
+
// current job's parentBuilds
|
|
448
|
+
const currentJobParentBuilds = currentBuild.parentBuilds || {};
|
|
449
|
+
// join jobs, with eventId and buildId empty
|
|
450
|
+
const joinParentBuilds = createParentBuildsObj({
|
|
451
|
+
pipelineId: nextPipelineId || currentPipeline.id,
|
|
452
|
+
joinListNames
|
|
453
|
+
});
|
|
454
|
+
// override currentBuild in the joinParentBuilds
|
|
455
|
+
const currentBuildInfo = createParentBuildsObj({
|
|
456
|
+
buildId: currentBuild.id,
|
|
457
|
+
eventId: currentBuild.eventId,
|
|
458
|
+
pipelineId: currentPipeline.id,
|
|
459
|
+
jobName: currentJob.name
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
// need to merge because it's possible same event has multiple builds
|
|
463
|
+
const parentBuilds = merge({}, joinParentBuilds, currentJobParentBuilds, currentBuildInfo);
|
|
464
|
+
|
|
465
|
+
return {
|
|
466
|
+
parentBuilds,
|
|
467
|
+
joinListNames,
|
|
468
|
+
joinParentBuilds
|
|
469
|
+
};
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
/**
|
|
473
|
+
* Get builds whose groupEventId is event.groupEventId. Only the latest build is retrieved for each job.
|
|
474
|
+
* @param {Number} groupEventId Group parent event ID
|
|
475
|
+
* @param {BuildFactory} buildFactory Build factory
|
|
476
|
+
* @returns {Promise<Build[]>} All finished builds
|
|
477
|
+
*/
|
|
478
|
+
async function getBuildsForGroupEvent(groupEventId, buildFactory) {
|
|
479
|
+
const builds = await buildFactory.getLatestBuilds({ groupEventId, readOnly: false });
|
|
480
|
+
|
|
481
|
+
builds.forEach(b => {
|
|
482
|
+
try {
|
|
483
|
+
b.environment = JSON.parse(b.environment);
|
|
484
|
+
b.parentBuilds = JSON.parse(b.parentBuilds);
|
|
485
|
+
b.stats = JSON.parse(b.stats);
|
|
486
|
+
b.meta = JSON.parse(b.meta);
|
|
487
|
+
b.parentBuildId = JSON.parse(b.parentBuildId);
|
|
488
|
+
|
|
489
|
+
if (b.parentBuildId) {
|
|
490
|
+
// parentBuildId could be the string '123', the number 123, or an array
|
|
491
|
+
b.parentBuildId = Array.isArray(b.parentBuildId)
|
|
492
|
+
? b.parentBuildId.map(Number)
|
|
493
|
+
: [Number(b.parentBuildId)];
|
|
494
|
+
}
|
|
495
|
+
} catch (err) {
|
|
496
|
+
logger.error(`Failed to parse objects for ${b.id}`);
|
|
497
|
+
}
|
|
498
|
+
});
|
|
499
|
+
|
|
500
|
+
return builds;
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
/**
|
|
504
|
+
* Update parent builds info when next build already exists
|
|
505
|
+
* @param {Object} arg
|
|
506
|
+
* @param {ParentBuilds} arg.joinParentBuilds Parent builds object for join job
|
|
507
|
+
* @param {Build} arg.nextBuild Next build
|
|
508
|
+
* @param {Build} arg.build Build for current completed job
|
|
509
|
+
* @returns {Promise<Build>} Updated next build
|
|
510
|
+
*/
|
|
511
|
+
async function updateParentBuilds({ joinParentBuilds, nextBuild, build }) {
|
|
512
|
+
// Override old parentBuilds info
|
|
513
|
+
const newParentBuilds = merge({}, joinParentBuilds, nextBuild.parentBuilds, (objVal, srcVal) =>
|
|
514
|
+
// passthrough objects, else mergeWith mutates source
|
|
515
|
+
srcVal && typeof srcVal === 'object' ? undefined : objVal || srcVal
|
|
516
|
+
);
|
|
517
|
+
|
|
518
|
+
nextBuild.parentBuilds = newParentBuilds;
|
|
519
|
+
// nextBuild.parentBuildId may be int or Array, so it needs to be flattened
|
|
520
|
+
nextBuild.parentBuildId = Array.from(new Set([build.id, nextBuild.parentBuildId || []].flat()));
|
|
521
|
+
|
|
522
|
+
return nextBuild.update();
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
/**
|
|
526
|
+
* Check if all parent builds of the new build are done
|
|
527
|
+
* @param {Object} arg
|
|
528
|
+
* @param {Build} arg.newBuild Updated build
|
|
529
|
+
* @param {String[]} arg.joinListNames Join list names
|
|
530
|
+
* @param {Number} arg.pipelineId Pipeline ID
|
|
531
|
+
* @param {BuildFactory} arg.buildFactory Build factory
|
|
532
|
+
* @returns {Promise<{hasFailure: Boolean, done: Boolean}>} Object with done and hasFailure statuses
|
|
533
|
+
*/
|
|
534
|
+
async function getParentBuildStatus({ newBuild, joinListNames, pipelineId, buildFactory }) {
|
|
535
|
+
const upstream = newBuild.parentBuilds || {};
|
|
536
|
+
|
|
537
|
+
// Get buildId
|
|
538
|
+
const joinBuildIds = joinListNames.map(name => {
|
|
539
|
+
let upstreamPipelineId = pipelineId;
|
|
540
|
+
let upstreamJobName = name;
|
|
541
|
+
|
|
542
|
+
if (isExternalTrigger(name)) {
|
|
543
|
+
const { externalPipelineId, externalJobName } = getExternalPipelineAndJob(name);
|
|
544
|
+
|
|
545
|
+
upstreamPipelineId = externalPipelineId;
|
|
546
|
+
upstreamJobName = externalJobName;
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
if (upstream[upstreamPipelineId] && upstream[upstreamPipelineId].jobs[upstreamJobName]) {
|
|
550
|
+
return upstream[upstreamPipelineId].jobs[upstreamJobName];
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
return undefined;
|
|
554
|
+
});
|
|
555
|
+
|
|
556
|
+
// If buildId is empty, the job hasn't executed yet and the join is not done
|
|
557
|
+
const isExecuted = !joinBuildIds.includes(undefined);
|
|
558
|
+
|
|
559
|
+
// Get the status of the builds
|
|
560
|
+
const buildIds = joinBuildIds.filter(buildId => buildId !== undefined);
|
|
561
|
+
const promisesToAwait = buildIds.map(buildId => buildFactory.get(buildId));
|
|
562
|
+
const joinedBuilds = await Promise.all(promisesToAwait);
|
|
563
|
+
|
|
564
|
+
const hasFailure = joinedBuilds
|
|
565
|
+
.map(build => {
|
|
566
|
+
// Do not need to run the next build; terminal status
|
|
567
|
+
return [Status.FAILURE, Status.ABORTED, Status.COLLAPSED, Status.UNSTABLE].includes(build.status);
|
|
568
|
+
})
|
|
569
|
+
.includes(true);
|
|
570
|
+
|
|
571
|
+
const isDoneStatus = joinedBuilds.every(build => {
|
|
572
|
+
// All builds are done
|
|
573
|
+
return [Status.FAILURE, Status.SUCCESS, Status.ABORTED, Status.UNSTABLE, Status.COLLAPSED].includes(
|
|
574
|
+
build.status
|
|
575
|
+
);
|
|
576
|
+
});
|
|
577
|
+
|
|
578
|
+
const done = isExecuted && isDoneStatus;
|
|
579
|
+
|
|
580
|
+
return { hasFailure, done };
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
/**
|
|
584
|
+
* Handle new build logic: update, start, or remove
|
|
585
|
+
* If the build is done, check if it has a failure:
|
|
586
|
+
* if failure, delete new build
|
|
587
|
+
* if no failure, start new build
|
|
588
|
+
* Otherwise, do nothing
|
|
589
|
+
* @param {Object} arg If the build is done or not
|
|
590
|
+
* @param {Boolean} arg.done If the build is done or not
|
|
591
|
+
* @param {Boolean} arg.hasFailure If the build has a failure or not
|
|
592
|
+
* @param {Build} arg.newBuild Next build
|
|
593
|
+
* @param {String|undefined} arg.jobName Job name
|
|
594
|
+
* @param {String|undefined} arg.pipelineId Pipeline ID
|
|
595
|
+
* @param {Object|undefined} arg.stage Stage
|
|
596
|
+
* @returns {Promise<Build|null>} The newly updated/created build
|
|
597
|
+
*/
|
|
598
|
+
async function handleNewBuild({ done, hasFailure, newBuild, jobName, pipelineId, stage }) {
|
|
599
|
+
if (!done || Status.isStarted(newBuild.status)) {
|
|
600
|
+
return null;
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
// Delete new build since previous build failed
|
|
604
|
+
if (hasFailure) {
|
|
605
|
+
const stageTeardownName = stage ? getFullStageJobName({ stageName: stage.name, jobName: 'teardown' }) : '';
|
|
606
|
+
|
|
607
|
+
// New build is not stage teardown job
|
|
608
|
+
if (jobName !== stageTeardownName) {
|
|
609
|
+
logger.info(
|
|
610
|
+
`Failure occurred in upstream job, removing new build - build:${newBuild.id} pipeline:${pipelineId}-${jobName} event:${newBuild.eventId} `
|
|
611
|
+
);
|
|
612
|
+
await newBuild.remove();
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
return null;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
// All join builds finished successfully and it's clear that a new build has not been started before.
|
|
619
|
+
// Start new build.
|
|
620
|
+
newBuild.status = Status.QUEUED;
|
|
621
|
+
await newBuild.update();
|
|
622
|
+
|
|
623
|
+
return newBuild.start();
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
/**
|
|
627
|
+
* Get all builds with a given event ID as the parentEventID
|
|
628
|
+
* @param {Object} arg
|
|
629
|
+
* @param {EventFactory} eventFactory Event factory
|
|
630
|
+
* @param {Number} parentEventId Parent event ID
|
|
631
|
+
* @param {Number} pipelineId Pipeline ID
|
|
632
|
+
* @returns {Promise<Build[]>} Array of builds with same parent event ID
|
|
633
|
+
*/
|
|
634
|
+
async function getParallelBuilds({ eventFactory, parentEventId, pipelineId }) {
|
|
635
|
+
let parallelEvents = await eventFactory.list({
|
|
636
|
+
params: {
|
|
637
|
+
parentEventId
|
|
638
|
+
}
|
|
639
|
+
});
|
|
640
|
+
|
|
641
|
+
// Remove previous events from same pipeline
|
|
642
|
+
parallelEvents = parallelEvents.filter(pe => pe.pipelineId !== pipelineId);
|
|
643
|
+
|
|
644
|
+
// Fetch builds for each parallel event and combine them into one array
|
|
645
|
+
const parallelBuildsPromises = parallelEvents.map(pe => pe.getBuilds());
|
|
646
|
+
const parallelBuildsArrays = await Promise.all(parallelBuildsPromises);
|
|
647
|
+
|
|
648
|
+
// Flatten the array of arrays into a single array
|
|
649
|
+
const parallelBuilds = [].concat(...parallelBuildsArrays);
|
|
650
|
+
|
|
651
|
+
return parallelBuilds;
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
/**
|
|
655
|
+
* Merge parentBuilds object with missing job information from latest builds object
|
|
656
|
+
* @param {ParentBuilds} parentBuilds parent builds
|
|
657
|
+
* @param {Build[]} relatedBuilds Related builds which is used to fill parentBuilds data
|
|
658
|
+
* @param {Event} currentEvent Current event
|
|
659
|
+
* @param {Event} nextEvent Next triggered event (Remote trigger or Same pipeline event triggered as external)
|
|
660
|
+
* @returns {ParentBuilds} Merged parent builds { "${pipelineId}": { jobs: { "${jobName}": ${buildId} }, eventId: 123 } }
|
|
661
|
+
*
|
|
662
|
+
* @example
|
|
663
|
+
* >>> mergeParentBuilds(...)
|
|
664
|
+
* {
|
|
665
|
+
* "1": {
|
|
666
|
+
* jobs: { "job-name-a": 1, "job-name-b": 2 }
|
|
667
|
+
* eventId: 123
|
|
668
|
+
* },
|
|
669
|
+
* "2": {
|
|
670
|
+
* jobs: { "job-name-a": 4, "job-name-b": 5 }
|
|
671
|
+
* eventId: 456
|
|
672
|
+
* },
|
|
673
|
+
* }
|
|
674
|
+
*/
|
|
675
|
+
function mergeParentBuilds(parentBuilds, relatedBuilds, currentEvent, nextEvent) {
|
|
676
|
+
const newParentBuilds = {};
|
|
677
|
+
|
|
678
|
+
Object.entries(parentBuilds).forEach(([pipelineId, { jobs, eventId }]) => {
|
|
679
|
+
const newBuilds = {
|
|
680
|
+
jobs,
|
|
681
|
+
eventId
|
|
682
|
+
};
|
|
683
|
+
|
|
684
|
+
Object.entries(jobs).forEach(([jobName, build]) => {
|
|
685
|
+
if (build !== null) {
|
|
686
|
+
newBuilds.jobs[jobName] = build;
|
|
687
|
+
|
|
688
|
+
return;
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
let { workflowGraph } = currentEvent;
|
|
692
|
+
let nodeName = trimJobName(jobName);
|
|
693
|
+
|
|
694
|
+
if (strToInt(pipelineId) !== currentEvent.pipelineId) {
|
|
695
|
+
if (nextEvent) {
|
|
696
|
+
if (strToInt(pipelineId) !== nextEvent.pipelineId) {
|
|
697
|
+
nodeName = `sd@${pipelineId}:${nodeName}`;
|
|
698
|
+
}
|
|
699
|
+
workflowGraph = nextEvent.workflowGraph;
|
|
700
|
+
} else {
|
|
701
|
+
nodeName = `sd@${pipelineId}:${nodeName}`;
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
const targetJob = workflowGraph.nodes.find(node => node.name === nodeName);
|
|
706
|
+
|
|
707
|
+
if (!targetJob) {
|
|
708
|
+
logger.warn(`Job ${jobName}:${pipelineId} not found in workflowGraph for event ${currentEvent.id}`);
|
|
709
|
+
|
|
710
|
+
return;
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
const targetBuild = relatedBuilds.find(b => b.jobId === targetJob.id);
|
|
714
|
+
|
|
715
|
+
if (!targetBuild) {
|
|
716
|
+
logger.warn(`Job ${jobName}:${pipelineId} not found in builds`);
|
|
717
|
+
|
|
718
|
+
return;
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
newBuilds.jobs[jobName] = targetBuild.id;
|
|
722
|
+
newBuilds.eventId = targetBuild.eventId;
|
|
723
|
+
});
|
|
724
|
+
|
|
725
|
+
newParentBuilds[pipelineId] = newBuilds;
|
|
726
|
+
});
|
|
727
|
+
|
|
728
|
+
return newParentBuilds;
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
/**
|
|
732
|
+
* @typedef {Object} JoinPipeline
|
|
733
|
+
* @property {String} event event id
|
|
734
|
+
* @property {Record<String, {id: String, join: String[]}>} jobs
|
|
735
|
+
*/
|
|
736
|
+
/**
|
|
737
|
+
* @typedef {Record<String, JoinPipeline>} JoinPipelines
|
|
738
|
+
*/
|
|
739
|
+
|
|
740
|
+
/**
|
|
741
|
+
* Create joinObject for nextJobs to trigger
|
|
742
|
+
* For A & D in nextJobs for currentJobName B, create
|
|
743
|
+
* {A:[B,C], D:[B,F], X: []} where [B,C] join on A,
|
|
744
|
+
* [B,F] join on D and X has no join
|
|
745
|
+
* This can include external jobs
|
|
746
|
+
* @param {String[]} nextJobNames List of jobs to run next from workflow parser.
|
|
747
|
+
* @param {Object} current Object holding current job's build, event data
|
|
748
|
+
* @param {Build} current.build Current build
|
|
749
|
+
* @param {Event} current.event Current event
|
|
750
|
+
* @param {Pipeline} current.pipeline Current pipeline
|
|
751
|
+
* @param {EventFactory} eventFactory Object for querying DB for event data
|
|
752
|
+
* @returns {Promise<JoinPipelines>} Object representing join data for next jobs grouped by pipeline id
|
|
753
|
+
*
|
|
754
|
+
* @example
|
|
755
|
+
* >>> await createJoinObject(...)
|
|
756
|
+
* {
|
|
757
|
+
* "{pipelineId}" :{
|
|
758
|
+
* event: "{externalEventId}",
|
|
759
|
+
* jobs: {
|
|
760
|
+
* "{nextJobName}": {
|
|
761
|
+
* id: "{jobId}"
|
|
762
|
+
* join: ["{joinJobName1}", "{joinJobName2}"]
|
|
763
|
+
* }
|
|
764
|
+
* }
|
|
765
|
+
* }
|
|
766
|
+
*/
|
|
767
|
+
async function createJoinObject(nextJobNames, current, eventFactory) {
|
|
768
|
+
const { build, event, pipeline } = current;
|
|
769
|
+
const joinObj = {};
|
|
770
|
+
|
|
771
|
+
for (const jobName of nextJobNames) {
|
|
772
|
+
let nextJobPipelineId = pipeline.id;
|
|
773
|
+
let nextJobName = jobName;
|
|
774
|
+
let isExternal = false;
|
|
775
|
+
|
|
776
|
+
if (isExternalTrigger(jobName)) {
|
|
777
|
+
const { externalPipelineId, externalJobName } = getExternalPipelineAndJob(jobName);
|
|
778
|
+
|
|
779
|
+
nextJobPipelineId = externalPipelineId;
|
|
780
|
+
nextJobName = externalJobName;
|
|
781
|
+
isExternal = true;
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
const jId = event.workflowGraph.nodes.find(n => n.name === trimJobName(jobName)).id;
|
|
785
|
+
|
|
786
|
+
if (!joinObj[nextJobPipelineId]) joinObj[nextJobPipelineId] = {};
|
|
787
|
+
const pipelineObj = joinObj[nextJobPipelineId];
|
|
788
|
+
let jobs;
|
|
789
|
+
|
|
790
|
+
if (nextJobPipelineId !== pipeline.id) {
|
|
791
|
+
jobs = [];
|
|
792
|
+
|
|
793
|
+
const externalEvent = pipelineObj.event || (await getExternalEvent(build, nextJobPipelineId, eventFactory));
|
|
794
|
+
|
|
795
|
+
if (externalEvent) {
|
|
796
|
+
pipelineObj.event = externalEvent;
|
|
797
|
+
jobs = workflowParser.getSrcForJoin(externalEvent.workflowGraph, { jobName: nextJobName });
|
|
798
|
+
}
|
|
799
|
+
} else {
|
|
800
|
+
jobs = workflowParser.getSrcForJoin(event.workflowGraph, { jobName });
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
if (!pipelineObj.jobs) pipelineObj.jobs = {};
|
|
804
|
+
pipelineObj.jobs[nextJobName] = { id: jId, join: jobs, isExternal };
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
return joinObj;
|
|
808
|
+
}
|
|
809
|
+
|
|
810
|
+
/**
|
|
811
|
+
* Create stage teardown build if it doesn't already exist
|
|
812
|
+
* @param {Object} arg
|
|
813
|
+
* @param {JobFactory} arg.jobFactory Job factory
|
|
814
|
+
* @param {BuildFactory} arg.buildFactory Build factory
|
|
815
|
+
* @param {Object} arg.current Current object
|
|
816
|
+
* @param {Event} arg.current.event Current event
|
|
817
|
+
* @param {String} arg.stageTeardownName Stage teardown name
|
|
818
|
+
* @param {String} arg.username Username
|
|
819
|
+
* @param {String} arg.scmContext SCM context
|
|
820
|
+
*/
|
|
821
|
+
async function ensureStageTeardownBuildExists({
|
|
822
|
+
jobFactory,
|
|
823
|
+
buildFactory,
|
|
824
|
+
current,
|
|
825
|
+
stageTeardownName,
|
|
826
|
+
username,
|
|
827
|
+
scmContext
|
|
828
|
+
}) {
|
|
829
|
+
// Check if stage teardown build already exists
|
|
830
|
+
const stageTeardownJob = await jobFactory.get({
|
|
831
|
+
pipelineId: current.pipeline.id,
|
|
832
|
+
name: stageTeardownName
|
|
833
|
+
});
|
|
834
|
+
const existingStageTeardownBuild = await buildFactory.get({
|
|
835
|
+
eventId: current.event.id,
|
|
836
|
+
jobId: stageTeardownJob.id
|
|
837
|
+
});
|
|
838
|
+
|
|
839
|
+
// Doesn't exist, create stage teardown job
|
|
840
|
+
if (!existingStageTeardownBuild) {
|
|
841
|
+
await createInternalBuild({
|
|
842
|
+
jobFactory,
|
|
843
|
+
buildFactory,
|
|
844
|
+
pipelineId: current.pipeline.id,
|
|
845
|
+
jobName: stageTeardownName,
|
|
846
|
+
username,
|
|
847
|
+
scmContext,
|
|
848
|
+
event: current.event, // this is the parentBuild for the next build
|
|
849
|
+
baseBranch: current.event.baseBranch || null,
|
|
850
|
+
start: false
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
|
|
855
|
+
/**
|
|
856
|
+
* Delete nextBuild, create teardown build if it doesn't exist, and return teardown build or return null
|
|
857
|
+
* @param {Object} arg
|
|
858
|
+
* @param {String} arg.nextJobName Next job name
|
|
859
|
+
* @param {Object} arg.current Object with stage, event, pipeline info
|
|
860
|
+
* @param {Object} arg.buildConfig Build config
|
|
861
|
+
* @param {JobFactory} arg.jobFactory Job factory
|
|
862
|
+
* @param {BuildFactory} arg.buildFactory Build factory
|
|
863
|
+
* @param {String} arg.username Username
|
|
864
|
+
* @param {String} arg.scmContext Scm context
|
|
865
|
+
* @returns {Promise<Array>} Array of promises
|
|
866
|
+
*/
|
|
867
|
+
async function handleStageFailure({
|
|
868
|
+
nextJobName,
|
|
869
|
+
current,
|
|
870
|
+
buildConfig,
|
|
871
|
+
jobFactory,
|
|
872
|
+
buildFactory,
|
|
873
|
+
username,
|
|
874
|
+
scmContext
|
|
875
|
+
}) {
|
|
876
|
+
const buildDeletePromises = [];
|
|
877
|
+
const stageTeardownName = getFullStageJobName({ stageName: current.stage.name, jobName: 'teardown' });
|
|
878
|
+
|
|
879
|
+
// Remove next build
|
|
880
|
+
if (buildConfig.eventId && nextJobName !== stageTeardownName) {
|
|
881
|
+
buildDeletePromises.push(deleteBuild(buildConfig, buildFactory));
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
await ensureStageTeardownBuildExists({
|
|
885
|
+
jobFactory,
|
|
886
|
+
buildFactory,
|
|
887
|
+
current,
|
|
888
|
+
stageTeardownName,
|
|
889
|
+
username,
|
|
890
|
+
scmContext
|
|
891
|
+
});
|
|
892
|
+
|
|
893
|
+
return buildDeletePromises;
|
|
894
|
+
}
|
|
895
|
+
|
|
896
|
+
/**
|
|
897
|
+
* Get parentBuildId from parentBuilds object
|
|
898
|
+
* @param {Object} arg
|
|
899
|
+
* @param {ParentBuilds} arg.parentBuilds Builds that triggered this build
|
|
900
|
+
* @param {String[]} arg.joinListNames Array of join job name
|
|
901
|
+
* @param {Number} arg.pipelineId Pipeline ID
|
|
902
|
+
* @returns {String[]} Array of parentBuildId
|
|
903
|
+
*/
|
|
904
|
+
function getParentBuildIds({ currentBuildId, parentBuilds, joinListNames, pipelineId }) {
|
|
905
|
+
const parentBuildIds = joinListNames
|
|
906
|
+
.map(name => {
|
|
907
|
+
let parentBuildPipelineId = pipelineId;
|
|
908
|
+
let parentBuildJobName = name;
|
|
909
|
+
|
|
910
|
+
if (isExternalTrigger(name)) {
|
|
911
|
+
const { externalPipelineId, externalJobName } = getExternalPipelineAndJob(name);
|
|
912
|
+
|
|
913
|
+
parentBuildPipelineId = externalPipelineId;
|
|
914
|
+
parentBuildJobName = externalJobName;
|
|
915
|
+
}
|
|
916
|
+
|
|
917
|
+
if (parentBuilds[parentBuildPipelineId] && parentBuilds[parentBuildPipelineId].jobs[parentBuildJobName]) {
|
|
918
|
+
return parentBuilds[parentBuildPipelineId].jobs[parentBuildJobName];
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
return null;
|
|
922
|
+
})
|
|
923
|
+
.filter(Boolean); // Remove undefined or null values
|
|
924
|
+
|
|
925
|
+
return Array.from(new Set([currentBuildId, ...parentBuildIds]));
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
/**
|
|
929
|
+
* Extract a current pipeline's next jobs from pipeline join data
|
|
930
|
+
* (Next jobs triggered as external are not included)
|
|
931
|
+
*
|
|
932
|
+
* @param {JoinPipelines} joinedPipelines
|
|
933
|
+
* @param {Number} currentPipelineId
|
|
934
|
+
* @returns {JoinPipeline}
|
|
935
|
+
*/
|
|
936
|
+
function extractCurrentPipelineJoinData(joinedPipelines, currentPipelineId) {
|
|
937
|
+
const currentPipelineJoinData = joinedPipelines[currentPipelineId.toString()];
|
|
938
|
+
|
|
939
|
+
if (currentPipelineJoinData === undefined) {
|
|
940
|
+
return {};
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
return Object.fromEntries(Object.entries(currentPipelineJoinData.jobs).filter(([, join]) => !join.isExternal));
|
|
944
|
+
}
|
|
945
|
+
|
|
946
|
+
/**
|
|
947
|
+
* Extract next jobs in current and external pipelines from pipeline join data
|
|
948
|
+
*
|
|
949
|
+
* @param {JoinPipelines} joinedPipelines
|
|
950
|
+
* @param {Number} currentPipelineId
|
|
951
|
+
* @returns {JoinPipelines}
|
|
952
|
+
*/
|
|
953
|
+
function extractExternalJoinData(joinedPipelines, currentPipelineId) {
|
|
954
|
+
const externalJoinData = {};
|
|
955
|
+
|
|
956
|
+
Object.entries(joinedPipelines).forEach(([joinedPipelineId, joinedPipeline]) => {
|
|
957
|
+
const isExternalPipeline = strToInt(joinedPipelineId) !== currentPipelineId;
|
|
958
|
+
|
|
959
|
+
if (isExternalPipeline) {
|
|
960
|
+
externalJoinData[joinedPipelineId] = joinedPipeline;
|
|
961
|
+
} else {
|
|
962
|
+
const nextJobsTriggeredAsExternal = Object.entries(joinedPipeline.jobs).filter(
|
|
963
|
+
([, join]) => join.isExternal
|
|
964
|
+
);
|
|
965
|
+
|
|
966
|
+
if (nextJobsTriggeredAsExternal.length === 0) {
|
|
967
|
+
return;
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
externalJoinData[joinedPipelineId] = {
|
|
971
|
+
jobs: Object.fromEntries(nextJobsTriggeredAsExternal),
|
|
972
|
+
event: joinedPipeline.event
|
|
973
|
+
};
|
|
974
|
+
}
|
|
975
|
+
});
|
|
976
|
+
|
|
977
|
+
return externalJoinData;
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
/**
|
|
981
|
+
* Get job id from job name
|
|
982
|
+
* @param {String} jobName Job name
|
|
983
|
+
* @param {String} pipelineId Pipeline id
|
|
984
|
+
* @param {JobFactory} jobFactory Job factory
|
|
985
|
+
* @returns {Promise<Number>}
|
|
986
|
+
*/
|
|
987
|
+
async function getJobId(jobName, pipelineId, jobFactory) {
|
|
988
|
+
const job = await jobFactory.get({
|
|
989
|
+
name: jobName,
|
|
990
|
+
pipelineId
|
|
991
|
+
});
|
|
992
|
+
|
|
993
|
+
return job.id;
|
|
994
|
+
}
|
|
995
|
+
|
|
996
|
+
/**
|
|
997
|
+
* @typedef {Object} WorkflowGraph
|
|
998
|
+
* @property {Array<{src: String, dest: String, join: Boolean}} edges
|
|
999
|
+
* @property {Array<{name: String, id: Number}>} nodes
|
|
1000
|
+
*/
|
|
1001
|
+
/**
|
|
1002
|
+
* Check trigger is OR trigger
|
|
1003
|
+
* @param {WorkflowGraph} workflowGraph
|
|
1004
|
+
* @param {String} currentJobName current job name
|
|
1005
|
+
* @param {String} nextJobName next job name
|
|
1006
|
+
* @returns {Boolean}
|
|
1007
|
+
*/
|
|
1008
|
+
function isOrTrigger(workflowGraph, currentJobName, nextJobName) {
|
|
1009
|
+
return workflowGraph.edges.some(edge => {
|
|
1010
|
+
return edge.src === currentJobName && edge.dest === nextJobName && edge.join !== true;
|
|
1011
|
+
});
|
|
1012
|
+
}
|
|
1013
|
+
|
|
1014
|
+
/**
|
|
1015
|
+
* Filter builds to restart
|
|
1016
|
+
* @param {JoinPipeline} joinPipeline join job names
|
|
1017
|
+
* @param {Build[]} groupEventBuilds Builds belong to current event group
|
|
1018
|
+
* @param {Event} currentEvent Current event
|
|
1019
|
+
* @param {Build} currentBuild Current build
|
|
1020
|
+
* @returns {Build[]}
|
|
1021
|
+
*/
|
|
1022
|
+
function buildsToRestartFilter(joinPipeline, groupEventBuilds, currentEvent, currentBuild) {
|
|
1023
|
+
return Object.values(joinPipeline.jobs)
|
|
1024
|
+
.map(joinJob => {
|
|
1025
|
+
// Next triggered job's build belonging to same event group
|
|
1026
|
+
const existBuild = groupEventBuilds.find(build => build.jobId === joinJob.id);
|
|
1027
|
+
|
|
1028
|
+
// If there is no same job's build, then first time trigger
|
|
1029
|
+
if (!existBuild) return null;
|
|
1030
|
+
|
|
1031
|
+
// CREATED build is not triggered yet
|
|
1032
|
+
if (Status.isCreated(existBuild.status)) return null;
|
|
1033
|
+
|
|
1034
|
+
// Exist build is triggered from current build
|
|
1035
|
+
// Prevent double triggering same build object
|
|
1036
|
+
if (existBuild.parentBuildId.includes(currentBuild.id)) return null;
|
|
1037
|
+
|
|
1038
|
+
// Circle back trigger (Remote Join case)
|
|
1039
|
+
if (existBuild.eventId === currentEvent.parentEventId) return null;
|
|
1040
|
+
|
|
1041
|
+
return existBuild;
|
|
1042
|
+
})
|
|
1043
|
+
.filter(build => build !== null);
|
|
1044
|
+
}
|
|
1045
|
+
|
|
1046
|
+
module.exports = {
|
|
1047
|
+
Status,
|
|
1048
|
+
parseJobInfo,
|
|
1049
|
+
createInternalBuild,
|
|
1050
|
+
getParallelBuilds,
|
|
1051
|
+
mergeParentBuilds,
|
|
1052
|
+
updateParentBuilds,
|
|
1053
|
+
getParentBuildStatus,
|
|
1054
|
+
handleNewBuild,
|
|
1055
|
+
handleStageFailure,
|
|
1056
|
+
getBuildsForGroupEvent,
|
|
1057
|
+
createJoinObject,
|
|
1058
|
+
createExternalEvent,
|
|
1059
|
+
getParentBuildIds,
|
|
1060
|
+
strToInt,
|
|
1061
|
+
createEvent,
|
|
1062
|
+
deleteBuild,
|
|
1063
|
+
getJobId,
|
|
1064
|
+
isOrTrigger,
|
|
1065
|
+
extractCurrentPipelineJoinData,
|
|
1066
|
+
extractExternalJoinData,
|
|
1067
|
+
buildsToRestartFilter,
|
|
1068
|
+
trimJobName
|
|
1069
|
+
};
|