screwdriver-api 7.0.211 → 7.0.213
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -863,7 +863,7 @@ We will introduce new commands to validate, publish and tag pipeline templates b
|
|
|
863
863
|
|
|
864
864
|
CLI can be installed using below command:
|
|
865
865
|
|
|
866
|
-
``` $ npm install
|
|
866
|
+
``` $ npm install screwdriver-template-main ```
|
|
867
867
|
|
|
868
868
|
|
|
869
869
|
***1. Validate***
|
package/package.json
CHANGED
package/plugins/builds/README.md
CHANGED
|
@@ -43,6 +43,10 @@ Arguments:
|
|
|
43
43
|
#### List the build step by status
|
|
44
44
|
`GET /builds/{id}/steps`
|
|
45
45
|
|
|
46
|
+
Arguments:
|
|
47
|
+
|
|
48
|
+
* `status` - Status to filter by
|
|
49
|
+
|
|
46
50
|
`GET /builds/{id}/steps?status=active`
|
|
47
51
|
|
|
48
52
|
`GET /builds/{id}/steps?status=failure`
|
|
@@ -62,6 +66,18 @@ Example payload:
|
|
|
62
66
|
}
|
|
63
67
|
```
|
|
64
68
|
|
|
69
|
+
#### Gets all build artifacts as ZIP file
|
|
70
|
+
`GET /builds/{id}/artifacts`
|
|
71
|
+
|
|
72
|
+
#### Gets a build artifact or directory as ZIP file
|
|
73
|
+
`GET /builds/{id}/artifacts/{name*}`
|
|
74
|
+
|
|
75
|
+
Arguments:
|
|
76
|
+
|
|
77
|
+
* `type` - Return type for build artifact, `download` or `preview`
|
|
78
|
+
|
|
79
|
+
`GET /builds/{id}/artifacts/{name*}?type=preview`
|
|
80
|
+
|
|
65
81
|
#### Get build statuses
|
|
66
82
|
`GET /builds/statuses`
|
|
67
83
|
|
|
@@ -3,8 +3,11 @@
|
|
|
3
3
|
const boom = require('@hapi/boom');
|
|
4
4
|
const joi = require('joi');
|
|
5
5
|
const jwt = require('jsonwebtoken');
|
|
6
|
-
const request = require('
|
|
6
|
+
const request = require('got');
|
|
7
7
|
const schema = require('screwdriver-data-schema');
|
|
8
|
+
const archiver = require('archiver');
|
|
9
|
+
const { PassThrough } = require('stream');
|
|
10
|
+
const logger = require('screwdriver-logger');
|
|
8
11
|
const { v4: uuidv4 } = require('uuid');
|
|
9
12
|
const idSchema = schema.models.build.base.extract('id');
|
|
10
13
|
const artifactSchema = joi.string().label('Artifact Name');
|
|
@@ -45,41 +48,113 @@ module.exports = config => ({
|
|
|
45
48
|
return canAccessPipeline(credentials, event.pipelineId, 'pull', req.server.app);
|
|
46
49
|
})
|
|
47
50
|
.then(async () => {
|
|
48
|
-
|
|
51
|
+
// Directory should fetch manifest and
|
|
52
|
+
// gather all files that belong to that directory
|
|
53
|
+
if (artifact.endsWith('/')) {
|
|
54
|
+
try {
|
|
55
|
+
const token = jwt.sign({
|
|
56
|
+
buildId, artifact, scope: ['user']
|
|
57
|
+
}, config.authConfig.jwtPrivateKey, {
|
|
58
|
+
algorithm: 'RS256',
|
|
59
|
+
expiresIn: '10m',
|
|
60
|
+
jwtid: uuidv4()
|
|
61
|
+
});
|
|
62
|
+
const baseUrl = `${config.ecosystem.store}/v1/builds/${buildId}/ARTIFACTS`;
|
|
63
|
+
// Fetch the manifest
|
|
64
|
+
const manifest = await request({
|
|
65
|
+
url: `${baseUrl}/manifest.txt?token=${token}`,
|
|
66
|
+
method: 'GET'
|
|
67
|
+
}).text();
|
|
68
|
+
const manifestArray = manifest.trim().split('\n');
|
|
69
|
+
const directoryArray = manifestArray.filter(f => f.startsWith(`./${artifact}`));
|
|
49
70
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
algorithm: 'RS256',
|
|
54
|
-
expiresIn: '5s',
|
|
55
|
-
jwtid: uuidv4()
|
|
56
|
-
});
|
|
71
|
+
// Create a stream and set up archiver
|
|
72
|
+
const archive = archiver('zip', { zlib: { level: 9 } });
|
|
73
|
+
const passThrough = new PassThrough();
|
|
57
74
|
|
|
58
|
-
|
|
59
|
-
|
|
75
|
+
// Handle archiver errors
|
|
76
|
+
archive.on('error', (err) => {
|
|
77
|
+
logger.error('Archiver error:', err);
|
|
78
|
+
passThrough.emit('error', err); // Propagate the error to the PassThrough stream
|
|
79
|
+
});
|
|
60
80
|
|
|
61
|
-
|
|
81
|
+
// Handle passThrough errors
|
|
82
|
+
passThrough.on('error', (err) => {
|
|
83
|
+
logger.error('PassThrough stream error:', err);
|
|
84
|
+
});
|
|
62
85
|
|
|
63
|
-
|
|
86
|
+
// Pipe the archive to PassThrough
|
|
87
|
+
archive.pipe(passThrough);
|
|
64
88
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
89
|
+
// Fetch and append the directory files
|
|
90
|
+
for (const file of directoryArray) {
|
|
91
|
+
if (file) {
|
|
92
|
+
const fileStream = request.stream(`${baseUrl}/${file}?token=${token}&type=download`);
|
|
93
|
+
|
|
94
|
+
// Handle errors from file streaming
|
|
95
|
+
fileStream.on('error', (err) => {
|
|
96
|
+
logger.error(`Error downloading file: ${file}`, err);
|
|
97
|
+
archive.emit('error', err); // Emit error to stop the archive process
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
// Append the file stream to the archive
|
|
101
|
+
archive.append(fileStream, { name: file });
|
|
102
|
+
}
|
|
74
103
|
}
|
|
104
|
+
|
|
105
|
+
// Finalize the archive once all files are appended
|
|
106
|
+
archive.finalize();
|
|
107
|
+
|
|
108
|
+
// Create a zip name from the directory structure
|
|
109
|
+
const zipName = artifact.split('/').slice(-2)[0];
|
|
110
|
+
|
|
111
|
+
// Respond with the PassThrough stream (which is readable by Hapi)
|
|
112
|
+
return h.response(passThrough)
|
|
113
|
+
.type('application/zip')
|
|
114
|
+
.header('Content-Disposition', `attachment; filename="${zipName}_dir.zip"`);
|
|
115
|
+
} catch (err) {
|
|
116
|
+
// Catch errors related to the manifest request or other async issues
|
|
117
|
+
logger.error('Error while streaming artifact files:', err);
|
|
118
|
+
|
|
119
|
+
return h.response({ error: 'Failed to generate ZIP file' }).code(500);
|
|
120
|
+
}
|
|
121
|
+
} else {
|
|
122
|
+
const token = jwt.sign({
|
|
123
|
+
buildId, artifact, scope: ['user']
|
|
124
|
+
}, config.authConfig.jwtPrivateKey, {
|
|
125
|
+
algorithm: 'RS256',
|
|
126
|
+
expiresIn: '5s',
|
|
127
|
+
jwtid: uuidv4()
|
|
75
128
|
});
|
|
76
|
-
|
|
77
|
-
response.headers['content-type'] = headers['content-type'];
|
|
78
|
-
response.headers['content-disposition'] = headers['content-disposition'];
|
|
79
|
-
response.headers['content-length'] = headers['content-length'];
|
|
129
|
+
const encodedArtifact = encodeURIComponent(artifact);
|
|
80
130
|
|
|
81
|
-
|
|
82
|
-
|
|
131
|
+
// Fetch single file
|
|
132
|
+
let baseUrl = `${config.ecosystem.store}/v1/builds/`
|
|
133
|
+
+ `${buildId}/ARTIFACTS/${encodedArtifact}?token=${token}&type=${req.query.type}`;
|
|
134
|
+
|
|
135
|
+
const requestStream = request.stream(baseUrl);
|
|
136
|
+
|
|
137
|
+
let response = h.response(requestStream);
|
|
138
|
+
|
|
139
|
+
return new Promise((resolve, reject) => {
|
|
140
|
+
requestStream.on('response', response => {
|
|
141
|
+
resolve(response.headers);
|
|
142
|
+
});
|
|
143
|
+
requestStream.on('error', err => {
|
|
144
|
+
if (err.response && err.response.statusCode === 404) {
|
|
145
|
+
reject(boom.notFound('File not found'));
|
|
146
|
+
} else {
|
|
147
|
+
reject(err);
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
}).then(headers => {
|
|
151
|
+
response.headers['content-type'] = headers['content-type'];
|
|
152
|
+
response.headers['content-disposition'] = headers['content-disposition'];
|
|
153
|
+
response.headers['content-length'] = headers['content-length'];
|
|
154
|
+
|
|
155
|
+
return response;
|
|
156
|
+
});
|
|
157
|
+
}
|
|
83
158
|
})
|
|
84
159
|
.catch(err => {
|
|
85
160
|
throw err;
|