screwdriver-api 4.1.181 → 4.1.185
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/server +7 -0
- package/config/custom-environment-variables.yaml +5 -0
- package/config/default.yaml +4 -0
- package/lib/registerPlugins.js +2 -1
- package/lib/server.js +2 -1
- package/package.json +1 -1
- package/plugins/builds/index.js +6 -4
- package/plugins/events/README.md +3 -0
- package/plugins/events/create.js +1 -1
- package/plugins/events/listBuilds.js +26 -3
- package/plugins/jobs/README.md +2 -0
- package/plugins/jobs/listBuilds.js +25 -4
- package/plugins/processHooks/README.md +33 -0
- package/plugins/processHooks/index.js +47 -0
- package/plugins/webhooks/helper.js +1192 -0
- package/plugins/webhooks/index.js +18 -1158
package/bin/server
CHANGED
|
@@ -50,6 +50,9 @@ const notificationConfig = config.get('notifications');
|
|
|
50
50
|
// Multiple build cluster feature flag
|
|
51
51
|
const multiBuildClusterEnabled = convertToBool(config.get('multiBuildCluster').enabled);
|
|
52
52
|
|
|
53
|
+
// Queue Webhook feature flag
|
|
54
|
+
const queueWebhookEnabled = convertToBool(config.get('queueWebhook').enabled);
|
|
55
|
+
|
|
53
56
|
// Default cluster environment variable
|
|
54
57
|
const clusterEnvConfig = config.get('build').environment; // readonly
|
|
55
58
|
const clusterEnv = { ...clusterEnvConfig };
|
|
@@ -254,6 +257,10 @@ datastore.setup(datastoreConfig.ddlSyncEnabled).then(() =>
|
|
|
254
257
|
validator: {
|
|
255
258
|
externalJoin: true,
|
|
256
259
|
notificationsValidationErr
|
|
260
|
+
},
|
|
261
|
+
queueWebhook: {
|
|
262
|
+
executor,
|
|
263
|
+
queueWebhookEnabled
|
|
257
264
|
}
|
|
258
265
|
})
|
|
259
266
|
.then(instance => logger.info('Server running at %s', instance.info.uri))
|
package/config/default.yaml
CHANGED
package/lib/registerPlugins.js
CHANGED
package/lib/server.js
CHANGED
|
@@ -136,7 +136,8 @@ module.exports = async config => {
|
|
|
136
136
|
collectionFactory: config.collectionFactory,
|
|
137
137
|
buildClusterFactory: config.buildClusterFactory,
|
|
138
138
|
ecosystem: config.ecosystem,
|
|
139
|
-
release: config.release
|
|
139
|
+
release: config.release,
|
|
140
|
+
queueWebhook: config.queueWebhook
|
|
140
141
|
};
|
|
141
142
|
|
|
142
143
|
const bellConfigs = await config.auth.scm.getBellConfiguration();
|
package/package.json
CHANGED
package/plugins/builds/index.js
CHANGED
|
@@ -529,11 +529,13 @@ async function handleNewBuild({ done, hasFailure, newBuild, jobName, pipelineId
|
|
|
529
529
|
return null;
|
|
530
530
|
}
|
|
531
531
|
|
|
532
|
-
// If all join builds finished successfully, start new build
|
|
533
|
-
newBuild.status
|
|
534
|
-
|
|
532
|
+
// If all join builds finished successfully and it's clear that a new build has not been started before, start new build
|
|
533
|
+
if ([ 'CREATED', null, undefined ].includes(newBuild.status)) {
|
|
534
|
+
newBuild.status = 'QUEUED';
|
|
535
|
+
const queuedBuild = await newBuild.update();
|
|
535
536
|
|
|
536
|
-
|
|
537
|
+
return queuedBuild.start();
|
|
538
|
+
}
|
|
537
539
|
}
|
|
538
540
|
|
|
539
541
|
return null;
|
package/plugins/events/README.md
CHANGED
package/plugins/events/create.js
CHANGED
|
@@ -24,14 +24,23 @@ module.exports = () => ({
|
|
|
24
24
|
handler: async (request, h) => {
|
|
25
25
|
const { eventFactory } = request.server.app;
|
|
26
26
|
const event = await eventFactory.get(request.params.id);
|
|
27
|
+
const { fetchSteps, readOnly } = request.query;
|
|
27
28
|
|
|
28
29
|
if (!event) {
|
|
29
30
|
throw boom.notFound('Event does not exist');
|
|
30
31
|
}
|
|
31
32
|
|
|
32
|
-
const
|
|
33
|
+
const config = readOnly ? { readOnly: true } : {};
|
|
33
34
|
|
|
34
|
-
const
|
|
35
|
+
const buildsModel = await event.getBuilds(config);
|
|
36
|
+
|
|
37
|
+
let data;
|
|
38
|
+
|
|
39
|
+
if (fetchSteps) {
|
|
40
|
+
data = await Promise.all(buildsModel.map(async buildModel => buildModel.toJsonWithSteps()));
|
|
41
|
+
} else {
|
|
42
|
+
data = await Promise.all(buildsModel.map(async buildModel => buildModel.toJson()));
|
|
43
|
+
}
|
|
35
44
|
|
|
36
45
|
return h.response(data);
|
|
37
46
|
},
|
|
@@ -41,7 +50,21 @@ module.exports = () => ({
|
|
|
41
50
|
validate: {
|
|
42
51
|
params: joi.object({
|
|
43
52
|
id: eventIdSchema
|
|
44
|
-
})
|
|
53
|
+
}),
|
|
54
|
+
query: schema.api.pagination.concat(
|
|
55
|
+
joi.object({
|
|
56
|
+
readOnly: joi
|
|
57
|
+
.boolean()
|
|
58
|
+
.truthy('true')
|
|
59
|
+
.falsy('false')
|
|
60
|
+
.default(false),
|
|
61
|
+
fetchSteps: joi
|
|
62
|
+
.boolean()
|
|
63
|
+
.truthy('true')
|
|
64
|
+
.falsy('false')
|
|
65
|
+
.default(true)
|
|
66
|
+
})
|
|
67
|
+
)
|
|
45
68
|
}
|
|
46
69
|
}
|
|
47
70
|
});
|
package/plugins/jobs/README.md
CHANGED
|
@@ -46,6 +46,8 @@ Example payload:
|
|
|
46
46
|
#### Get list of builds for a single job
|
|
47
47
|
`GET /jobs/{id}/builds`
|
|
48
48
|
|
|
49
|
+
`GET /jobs/{id}/builds?fetchSteps=false&readOnly=true`
|
|
50
|
+
|
|
49
51
|
`GET /jobs/{id}/builds?page=2&count=30&sort=ascending`
|
|
50
52
|
|
|
51
53
|
`GET /jobs/{id}/builds?page=2&count=30&sort=ascending&sortBy=id`
|
|
@@ -23,7 +23,7 @@ module.exports = () => ({
|
|
|
23
23
|
|
|
24
24
|
handler: async (request, h) => {
|
|
25
25
|
const factory = request.server.app.jobFactory;
|
|
26
|
-
const { sort, sortBy, page, count } = request.query;
|
|
26
|
+
const { sort, sortBy, page, count, fetchSteps, readOnly } = request.query;
|
|
27
27
|
|
|
28
28
|
return factory
|
|
29
29
|
.get(request.params.id)
|
|
@@ -32,7 +32,9 @@ module.exports = () => ({
|
|
|
32
32
|
throw boom.notFound('Job does not exist');
|
|
33
33
|
}
|
|
34
34
|
|
|
35
|
-
const config =
|
|
35
|
+
const config = readOnly
|
|
36
|
+
? { sort, sortBy: 'createTime', readOnly: true }
|
|
37
|
+
: { sort, sortBy: 'createTime' };
|
|
36
38
|
|
|
37
39
|
if (sortBy) {
|
|
38
40
|
config.sortBy = sortBy;
|
|
@@ -45,7 +47,13 @@ module.exports = () => ({
|
|
|
45
47
|
return job.getBuilds(config);
|
|
46
48
|
})
|
|
47
49
|
.then(async builds => {
|
|
48
|
-
|
|
50
|
+
let data;
|
|
51
|
+
|
|
52
|
+
if (fetchSteps) {
|
|
53
|
+
data = await Promise.all(builds.map(b => b.toJsonWithSteps()));
|
|
54
|
+
} else {
|
|
55
|
+
data = await Promise.all(builds.map(b => b.toJson()));
|
|
56
|
+
}
|
|
49
57
|
|
|
50
58
|
return h.response(data);
|
|
51
59
|
})
|
|
@@ -60,7 +68,20 @@ module.exports = () => ({
|
|
|
60
68
|
params: joi.object({
|
|
61
69
|
id: jobIdSchema
|
|
62
70
|
}),
|
|
63
|
-
query: schema.api.pagination
|
|
71
|
+
query: schema.api.pagination.concat(
|
|
72
|
+
joi.object({
|
|
73
|
+
readOnly: joi
|
|
74
|
+
.boolean()
|
|
75
|
+
.truthy('true')
|
|
76
|
+
.falsy('false')
|
|
77
|
+
.default(false),
|
|
78
|
+
fetchSteps: joi
|
|
79
|
+
.boolean()
|
|
80
|
+
.truthy('true')
|
|
81
|
+
.falsy('false')
|
|
82
|
+
.default(true)
|
|
83
|
+
})
|
|
84
|
+
)
|
|
64
85
|
}
|
|
65
86
|
}
|
|
66
87
|
});
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# Process Hooks Plugin
|
|
2
|
+
> Hapi processHooks plugin for the Screwdriver API
|
|
3
|
+
|
|
4
|
+
## Usage
|
|
5
|
+
|
|
6
|
+
### Register plugin
|
|
7
|
+
|
|
8
|
+
```javascript
|
|
9
|
+
const Hapi = require('@hapi/hapi');
|
|
10
|
+
const server = new Hapi.Server();
|
|
11
|
+
const processHooksPlugin = require('./');
|
|
12
|
+
|
|
13
|
+
server.connection({ port: 3000 });
|
|
14
|
+
|
|
15
|
+
server.register({
|
|
16
|
+
register: processHooksPlugin,
|
|
17
|
+
options: {}
|
|
18
|
+
}, () => {
|
|
19
|
+
server.start((err) => {
|
|
20
|
+
if (err) {
|
|
21
|
+
throw err;
|
|
22
|
+
}
|
|
23
|
+
console.log('Server running at:', server.info.uri);
|
|
24
|
+
});
|
|
25
|
+
});
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
### Routes
|
|
29
|
+
|
|
30
|
+
#### Start pipeline events from scm webhook config
|
|
31
|
+
|
|
32
|
+
`POST /processHooks`
|
|
33
|
+
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const logger = require('screwdriver-logger');
|
|
4
|
+
const { startHookEvent } = require('../webhooks/helper');
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Process Hooks API Plugin
|
|
8
|
+
* - Start pipeline events with scm webhook config via queue-service
|
|
9
|
+
* @method register
|
|
10
|
+
* @param {Hapi} server Hapi Server
|
|
11
|
+
* @param {Object} options Configuration
|
|
12
|
+
* @param {Function} next Function to call when done
|
|
13
|
+
*/
|
|
14
|
+
const processHooksPlugin = {
|
|
15
|
+
name: 'processHooks',
|
|
16
|
+
async register(server, options) {
|
|
17
|
+
server.route({
|
|
18
|
+
method: 'POST',
|
|
19
|
+
path: '/processHooks',
|
|
20
|
+
options: {
|
|
21
|
+
description: 'Handle process hook events',
|
|
22
|
+
notes: 'Acts on pull request, pushes, comments, etc.',
|
|
23
|
+
tags: ['api', 'processHook'],
|
|
24
|
+
auth: {
|
|
25
|
+
strategies: ['token'],
|
|
26
|
+
scope: ['webhook_worker']
|
|
27
|
+
},
|
|
28
|
+
plugins: {
|
|
29
|
+
'hapi-rate-limit': {
|
|
30
|
+
enabled: false
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
handler: async (request, h) => {
|
|
34
|
+
try {
|
|
35
|
+
return await startHookEvent(request, h, request.payload);
|
|
36
|
+
} catch (err) {
|
|
37
|
+
logger.error(`Error starting hook events for ${request.payload.hookId}:${err}`);
|
|
38
|
+
|
|
39
|
+
throw err;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
module.exports = processHooksPlugin;
|