@taskcluster/client 88.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,150 @@
1
+ import got, { HTTPError } from 'got';
2
+ import { pipeline } from 'node:stream/promises';
3
+ import retry from './retry.js';
4
+ import { HashStream, ACCEPTABLE_HASHES } from './hashstream.js';
5
+ import { clients } from './client.js';
6
+
7
+ // apply default retry config
8
+ const makeRetryCfg = ({ retries, delayFactor, randomizationFactor, maxDelay }) => ({
9
+ retries: retries === undefined ? 5 : retries,
10
+ delayFactor: delayFactor === undefined ? 100 : delayFactor,
11
+ randomizationFactor: randomizationFactor === undefined ? randomizationFactor : 0.25,
12
+ maxDelay: maxDelay === undefined ? 30 * 1000 : maxDelay,
13
+ });
14
+
15
+ const s3 = async ({ url, streamFactory, retryCfg }) => {
16
+ return await retry(retryCfg, async (retriableError, attempt) => {
17
+ let contentType = 'application/binary';
18
+ try {
19
+ const src = got.stream(url, { retry: { limit: 0 } });
20
+ src.on('response', res => {
21
+ contentType = res.headers['content-type'] || contentType;
22
+ });
23
+ const dest = await streamFactory();
24
+ await pipeline(src, dest);
25
+ return contentType;
26
+ } catch (err) {
27
+ // treat non-500 HTTP responses as fatal errors, and retry everything else
28
+ if (err instanceof HTTPError && err.response.statusCode < 500) {
29
+ throw err;
30
+ }
31
+ return retriableError(err);
32
+ }
33
+ });
34
+ };
35
+
36
+ const getUrl = async ({ object, name, resp, streamFactory, retryCfg }) => {
37
+ let responseUsed = false;
38
+ let hashStream;
39
+ let contentType = 'application/binary';
40
+
41
+ await retry(retryCfg, async (retriableError, attempt) => {
42
+ // renew the download URL if necessary (note that we assume the object-sevice
43
+ // credentials are good for long enough)
44
+ if (responseUsed && new Date(resp.expires) < new Date()) {
45
+ resp = await object.startDownload(name, { acceptDownloadMethods: { getUrl: true } });
46
+ responseUsed = false;
47
+ }
48
+
49
+ try {
50
+ responseUsed = true;
51
+ const src = got.stream(resp.url, { retry: { limit: 0 } });
52
+ src.on('response', res => {
53
+ contentType = res.headers['content-type'] || contentType;
54
+ });
55
+ const dest = await streamFactory();
56
+ hashStream = new HashStream();
57
+ await pipeline(src, hashStream, dest);
58
+
59
+ return;
60
+ } catch (err) {
61
+ // treat non-500 HTTP responses as fatal errors, and retry everything else
62
+ if (err instanceof HTTPError && err.response.statusCode < 500) {
63
+ throw err;
64
+ }
65
+ return retriableError(err);
66
+ }
67
+ });
68
+
69
+ // now that the download is complete, check the hashes. Note that a hash
70
+ // verification failure does not result in a retry.
71
+ const observedHashes = hashStream.hashes();
72
+ verifyHashes(observedHashes, resp.hashes);
73
+
74
+ return contentType;
75
+ };
76
+
77
+ // verify that all known hashes match, and that at least one of them is an
78
+ // "acceptable" hash algorithm. Throws an exception on verification failure.
79
+ const verifyHashes = (observedHashes, expectedHashes) => {
80
+ let someValidAcceptableHash = false;
81
+ for (let algo of Object.keys(expectedHashes)) {
82
+ const computed = observedHashes[algo];
83
+ if (!computed) {
84
+ // ignore unknown hash algorithms
85
+ continue;
86
+ }
87
+ if (computed !== expectedHashes[algo]) {
88
+ throw new Error(`Computed ${algo} hash does not match that from object service`);
89
+ }
90
+
91
+ if (ACCEPTABLE_HASHES.has(algo)) {
92
+ someValidAcceptableHash = true;
93
+ }
94
+ }
95
+
96
+ if (!someValidAcceptableHash) {
97
+ throw new Error("No acceptable hash algorithm found");
98
+ }
99
+ };
100
+
101
+ export const download = async ({ name, object, streamFactory,
102
+ retries, delayFactor, randomizationFactor, maxDelay }) => {
103
+ const retryCfg = makeRetryCfg({ retries, delayFactor, randomizationFactor, maxDelay });
104
+
105
+ const acceptDownloadMethods = {
106
+ getUrl: true,
107
+ };
108
+
109
+ const resp = await object.startDownload(name, { acceptDownloadMethods });
110
+
111
+ if (resp.method === 'getUrl') {
112
+ return await getUrl({ object, name, resp, streamFactory, retryCfg });
113
+ } else {
114
+ throw new Error("Could not negotiate a download method");
115
+ }
116
+ };
117
+
118
+ export const downloadArtifact = async ({
119
+ taskId, runId, name, queue, streamFactory, retries, delayFactor, randomizationFactor, maxDelay,
120
+ }) => {
121
+ const retryCfg = makeRetryCfg({ retries, delayFactor, randomizationFactor, maxDelay });
122
+
123
+ let artifact = await (runId === undefined ? queue.latestArtifact(taskId, name) : queue.artifact(taskId, runId, name));
124
+
125
+ switch (artifact.storageType) {
126
+ case "reference":
127
+ case "s3": {
128
+ return await s3({ url: artifact.url, streamFactory, retryCfg });
129
+ }
130
+
131
+ case "object": {
132
+ const object = new clients.Object({
133
+ rootUrl: queue._options._trueRootUrl,
134
+ credentials: artifact.credentials,
135
+ });
136
+ return await download({ name: artifact.name, object, streamFactory, ...retryCfg });
137
+ }
138
+
139
+ case "error": {
140
+ const err = new Error(artifact.message);
141
+ err.reason = artifact.reason;
142
+ throw err;
143
+ }
144
+
145
+ default:
146
+ throw new Error(`Unsupported artifact storageType '${artifact.storageType}'`);
147
+ }
148
+ };
149
+
150
+ export default { download, downloadArtifact };
@@ -0,0 +1,37 @@
1
+ import { Transform } from 'stream';
2
+ import { createHash } from 'crypto';
3
+
4
+ // The subset of hashes supported by HashStream which are "accepted" as per the
5
+ // object service's schemas.
6
+ export const ACCEPTABLE_HASHES = new Set(["sha256", "sha512"]);
7
+
8
+ /**
9
+ * A stream that hashes the bytes passing through it
10
+ */
11
+ export class HashStream extends Transform {
12
+ constructor() {
13
+ super();
14
+ this.sha256 = createHash('sha256');
15
+ this.sha512 = createHash('sha512');
16
+ this.bytes = 0;
17
+ }
18
+
19
+ _transform(chunk, enc, cb) {
20
+ this.sha256.update(chunk);
21
+ this.sha512.update(chunk);
22
+ this.bytes += chunk.length;
23
+ cb(null, chunk);
24
+ }
25
+
26
+ // Return the calculated hashes in a format suitable for finishUpload,
27
+ // checking that the content length matches the bytes hashed, if given.
28
+ hashes(contentLength) {
29
+ if (contentLength !== undefined && contentLength !== this.bytes) {
30
+ throw new Error(`Hashed ${this.bytes} bytes but content length is ${contentLength}`);
31
+ }
32
+ return {
33
+ sha256: this.sha256.digest('hex'),
34
+ sha512: this.sha512.digest('hex'),
35
+ };
36
+ }
37
+ }
package/src/index.js ADDED
@@ -0,0 +1,21 @@
1
+ /* This Source Code Form is subject to the terms of the Mozilla Public
2
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
3
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4
+
5
+ export * from './client.js';
6
+ export * from './utils.js';
7
+ export * from './upload.js';
8
+ export * from './download.js';
9
+
10
+ import * as client from './client.js';
11
+ import * as utils from './utils.js';
12
+ import * as upload from './upload.js';
13
+ import * as download from './download.js';
14
+
15
+ export default {
16
+ ...client,
17
+ ...client.clients,
18
+ ...utils,
19
+ ...upload,
20
+ ...download,
21
+ };
@@ -0,0 +1,37 @@
1
+ // Regular expression matching:
2
+ // A years B months C days D hours E minutes F seconds
3
+ let timeExp = new RegExp([
4
+ '^(\\s*(-|\\+))?',
5
+ '(\\s*(\\d+)\\s*y((ears?)|r)?)?',
6
+ '(\\s*(\\d+)\\s*mo(nths?)?)?',
7
+ '(\\s*(\\d+)\\s*w((eeks?)|k)?)?',
8
+ '(\\s*(\\d+)\\s*d(ays?)?)?',
9
+ '(\\s*(\\d+)\\s*h((ours?)|r)?)?',
10
+ '(\\s*(\\d+)\\s*m(in(utes?)?)?)?',
11
+ '(\\s*(\\d+)\\s*s(ec(onds?)?)?)?',
12
+ '\\s*$',
13
+ ].join(''), 'i');
14
+
15
+ /** Parse time string */
16
+ let parseTime = function(str) {
17
+ // Parse the string
18
+ let match = timeExp.exec(str || '');
19
+ if (!match) {
20
+ throw new Error('String: \'' + str + '\' isn\'t a time expression');
21
+ }
22
+ // Negate if needed
23
+ let neg = match[2] === '-' ? - 1 : 1;
24
+ // Return parsed values
25
+ return {
26
+ years: parseInt(match[4] || 0, 10) * neg,
27
+ months: parseInt(match[8] || 0, 10) * neg,
28
+ weeks: parseInt(match[11] || 0, 10) * neg,
29
+ days: parseInt(match[15] || 0, 10) * neg,
30
+ hours: parseInt(match[18] || 0, 10) * neg,
31
+ minutes: parseInt(match[22] || 0, 10) * neg,
32
+ seconds: parseInt(match[25] || 0, 10) * neg,
33
+ };
34
+ };
35
+
36
+ // Export parseTime
37
+ export default parseTime;
package/src/retry.js ADDED
@@ -0,0 +1,39 @@
1
+ /**
2
+ * Call the given function based on the given retry configuration. Each call to the
3
+ * function is given a `retriableError` callback. For retriable failures, call this
4
+ * callback with an Error object (in case retries are exhausted) and return. For
5
+ * fatal errors, simply throw the error as usual. The second argument is the attempt number.
6
+ *
7
+ * Note that as per the existing API, `retries` is the number of retries after the first
8
+ * try; that is, a `retries` value of 3 means that `func` will be called 4 times.
9
+ */
10
+ export default async ({ retries, delayFactor, randomizationFactor, maxDelay }, func) => {
11
+ let attempt = 0;
12
+
13
+ while (true) {
14
+ attempt += 1;
15
+
16
+ let retriableError = null;
17
+
18
+ const rv = await func(err => retriableError = err, attempt);
19
+ if (!retriableError) {
20
+ // success!
21
+ return rv;
22
+ }
23
+
24
+ if (attempt > retries) {
25
+ throw retriableError;
26
+ }
27
+
28
+ // Sleep for 2 * delayFactor on the first attempt, and 2x as long
29
+ // each time thereafter
30
+ let delay = Math.pow(2, attempt) * delayFactor;
31
+ // Apply randomization factor
32
+ let rf = randomizationFactor;
33
+ delay *= Math.random() * 2 * rf + 1 - rf;
34
+ // Always limit with a maximum delay
35
+ delay = Math.min(delay, maxDelay);
36
+ // Sleep before looping again
37
+ await new Promise(accept => setTimeout(accept, delay));
38
+ }
39
+ };
package/src/upload.js ADDED
@@ -0,0 +1,101 @@
1
+ import got, { HTTPError } from 'got';
2
+ import { slugid } from './utils.js';
3
+ import retry from './retry.js';
4
+ import { HashStream } from './hashstream.js';
5
+
6
+ const DATA_INLINE_MAX_SIZE = 8192;
7
+
8
+ const putUrl = async ({ streamFactory, contentLength, uploadMethod, retryCfg }) => {
9
+ const { url, headers } = uploadMethod.putUrl;
10
+ await retry(retryCfg, async (retriableError, attempt) => {
11
+ try {
12
+ await got.put(url, {
13
+ headers,
14
+ retry: { limit: 0 }, // use our own retry logic
15
+ body: await streamFactory(),
16
+ });
17
+ } catch (err) {
18
+ // treat non-500 HTTP responses as fatal errors, and retry everything else
19
+ if (err instanceof HTTPError && err.response.statusCode < 500) {
20
+ throw err;
21
+ }
22
+ return retriableError(err);
23
+ }
24
+ });
25
+ };
26
+
27
+ const readFullStream = stream => {
28
+ const chunks = [];
29
+ return new Promise((accept, reject) => {
30
+ stream.on('data', chunk => chunks.push(chunk));
31
+ stream.on('error', err => reject(err));
32
+ stream.on('end', () => accept(Buffer.concat(chunks)));
33
+ });
34
+ };
35
+
36
+ export const upload = async ({
37
+ projectId,
38
+ name,
39
+ contentType,
40
+ contentLength,
41
+ expires,
42
+ object,
43
+ streamFactory,
44
+ retries,
45
+ delayFactor,
46
+ randomizationFactor,
47
+ maxDelay,
48
+ uploadId,
49
+ }) => {
50
+ if (!uploadId) {
51
+ uploadId = slugid();
52
+ }
53
+
54
+ // set up to hash streams as we read them
55
+ let hashStream;
56
+ const hashStreamFactory = async () => {
57
+ hashStream = new HashStream();
58
+ return (await streamFactory()).pipe(hashStream);
59
+ };
60
+
61
+ // apply default retry config
62
+ const retryCfg = {
63
+ retries: retries === undefined ? 5 : retries,
64
+ delayFactor: delayFactor === undefined ? 100 : delayFactor,
65
+ randomizationFactor: randomizationFactor === undefined ? 0.25 : randomizationFactor,
66
+ maxDelay: maxDelay === undefined ? 30 * 1000 : maxDelay,
67
+ };
68
+
69
+ const proposedUploadMethods = {};
70
+
71
+ if (contentLength < DATA_INLINE_MAX_SIZE) {
72
+ // get the (small) data as a buffer to include in the request
73
+ const data = await readFullStream(await hashStreamFactory());
74
+
75
+ proposedUploadMethods.dataInline = {
76
+ contentType,
77
+ objectData: data.toString('base64'),
78
+ };
79
+ }
80
+
81
+ proposedUploadMethods.putUrl = {
82
+ contentType,
83
+ contentLength,
84
+ };
85
+
86
+ const res = await object.createUpload(name, { expires, projectId, uploadId, proposedUploadMethods });
87
+
88
+ if (res.uploadMethod.dataInline) {
89
+ // nothing to do
90
+ } else if (res.uploadMethod.putUrl) {
91
+ await putUrl({ streamFactory: hashStreamFactory, contentLength, uploadMethod: res.uploadMethod, retryCfg });
92
+ } else {
93
+ throw new Error("Could not negotiate an upload method");
94
+ }
95
+
96
+ const hashes = hashStream.hashes(contentLength);
97
+
98
+ await object.finishUpload(name, { projectId, uploadId, hashes });
99
+ };
100
+
101
+ export default upload;
package/src/utils.js ADDED
@@ -0,0 +1,53 @@
1
+ import parseTime from './parsetime.js';
2
+ import sluglib from 'slugid';
3
+
4
+ /**
5
+ * Create a Date object offset = '1d 2h 3min' into the future
6
+ *
7
+ * Offset format: The argument `offset` (if given) is a string on the form
8
+ * `1 day 2 hours 3 minutes`
9
+ * where specification of day, hours and minutes is optional. You can also the
10
+ * short hand `1d2h3min`, it's fairly tolerant of different spelling forms and
11
+ * whitespace. But only really meant to be used with constants.
12
+ */
13
+ export const fromNow = function(offset, reference) {
14
+ if (reference === undefined) {
15
+ reference = new Date();
16
+ }
17
+ offset = parseTime(offset || '');
18
+
19
+ offset.days += 30 * offset.months;
20
+ offset.days += 365 * offset.years;
21
+
22
+ let retval = new Date(
23
+ reference.getTime()
24
+ // + offset.years * 365 * 24 * 60 * 60 * 1000
25
+ // + offset.month * 30 * 24 * 60 * 60 * 1000
26
+ + offset.weeks * 7 * 24 * 60 * 60 * 1000
27
+ + offset.days * 24 * 60 * 60 * 1000
28
+ + offset.hours * 60 * 60 * 1000
29
+ + offset.minutes * 60 * 1000
30
+ + offset.seconds * 1000,
31
+ );
32
+ return retval;
33
+ };
34
+
35
+ /**
36
+ * Create an ISO 8601 time stamp offset = '1d 2h 3min' into the future
37
+ *
38
+ * This returns a time stamp in the format expected by taskcluster.
39
+ * Compatible with Date.toJSON() from Javascript. These time stamps are string
40
+ * that with UTC as timezone.
41
+ *
42
+ * Offset format: The argument `offset` (if given) is a string on the form
43
+ * `1 day 2 hours 3 minutes`
44
+ * where specification of day, hours and minutes is optional. You can also the
45
+ * short hand `1d2h3min`, it's fairly tolerant of different spelling forms and
46
+ * whitespace. But only really meant to be used with constants.
47
+ */
48
+ export const fromNowJSON = function(offset, reference) {
49
+ return fromNow(offset, reference).toJSON();
50
+ };
51
+
52
+ // Export function to generate _nice_ slugids
53
+ export const slugid = () => sluglib.nice();