@actions/artifact 1.1.2 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +111 -174
- package/lib/artifact.d.ts +6 -0
- package/lib/artifact.js +23 -0
- package/lib/artifact.js.map +1 -0
- package/lib/generated/google/protobuf/timestamp.d.ts +145 -0
- package/lib/generated/google/protobuf/timestamp.js +136 -0
- package/lib/generated/google/protobuf/timestamp.js.map +1 -0
- package/lib/generated/google/protobuf/wrappers.d.ts +307 -0
- package/lib/generated/google/protobuf/wrappers.js +609 -0
- package/lib/generated/google/protobuf/wrappers.js.map +1 -0
- package/lib/generated/index.d.ts +4 -0
- package/lib/generated/index.js +21 -0
- package/lib/generated/index.js.map +1 -0
- package/lib/generated/results/api/v1/artifact.d.ts +286 -0
- package/lib/generated/results/api/v1/artifact.js +588 -0
- package/lib/generated/results/api/v1/artifact.js.map +1 -0
- package/lib/generated/results/api/v1/artifact.twirp.d.ts +43 -0
- package/lib/generated/results/api/v1/artifact.twirp.js +416 -0
- package/lib/generated/results/api/v1/artifact.twirp.js.map +1 -0
- package/lib/internal/client.d.ts +61 -0
- package/lib/internal/client.js +121 -0
- package/lib/internal/client.js.map +1 -0
- package/lib/internal/download/download-artifact.d.ts +3 -0
- package/lib/internal/download/download-artifact.js +168 -0
- package/lib/internal/download/download-artifact.js.map +1 -0
- package/lib/internal/find/get-artifact.d.ts +3 -0
- package/lib/internal/find/get-artifact.js +118 -0
- package/lib/internal/find/get-artifact.js.map +1 -0
- package/lib/internal/find/list-artifacts.d.ts +3 -0
- package/lib/internal/find/list-artifacts.js +139 -0
- package/lib/internal/find/list-artifacts.js.map +1 -0
- package/lib/internal/find/retry-options.d.ts +7 -0
- package/lib/internal/find/retry-options.js +50 -0
- package/lib/internal/find/retry-options.js.map +1 -0
- package/lib/internal/shared/artifact-twirp-client.d.ts +6 -0
- package/lib/internal/shared/artifact-twirp-client.js +153 -0
- package/lib/internal/shared/artifact-twirp-client.js.map +1 -0
- package/lib/internal/shared/config.d.ts +6 -0
- package/lib/internal/shared/config.js +55 -0
- package/lib/internal/shared/config.js.map +1 -0
- package/lib/internal/shared/errors.d.ts +22 -0
- package/lib/internal/shared/errors.js +70 -0
- package/lib/internal/shared/errors.js.map +1 -0
- package/lib/internal/shared/interfaces.d.ts +136 -0
- package/lib/internal/{contracts.js → shared/interfaces.js} +1 -1
- package/lib/internal/shared/interfaces.js.map +1 -0
- package/lib/internal/shared/user-agent.d.ts +4 -0
- package/lib/internal/shared/user-agent.js +13 -0
- package/lib/internal/shared/user-agent.js.map +1 -0
- package/lib/internal/shared/util.d.ts +5 -0
- package/lib/internal/shared/util.js +81 -0
- package/lib/internal/shared/util.js.map +1 -0
- package/lib/internal/upload/blob-upload.d.ts +12 -0
- package/lib/internal/upload/blob-upload.js +87 -0
- package/lib/internal/upload/blob-upload.js.map +1 -0
- package/lib/internal/upload/path-and-artifact-name-validation.d.ts +8 -0
- package/lib/internal/{path-and-artifact-name-validation.js → upload/path-and-artifact-name-validation.js} +11 -11
- package/lib/internal/upload/path-and-artifact-name-validation.js.map +1 -0
- package/lib/internal/upload/retention.d.ts +2 -0
- package/lib/internal/upload/retention.js +54 -0
- package/lib/internal/upload/retention.js.map +1 -0
- package/lib/internal/upload/upload-artifact.d.ts +2 -0
- package/lib/internal/upload/upload-artifact.js +103 -0
- package/lib/internal/upload/upload-artifact.js.map +1 -0
- package/lib/internal/upload/upload-zip-specification.d.ts +21 -0
- package/lib/internal/{upload-specification.js → upload/upload-zip-specification.js} +44 -36
- package/lib/internal/upload/upload-zip-specification.js.map +1 -0
- package/lib/internal/upload/zip.d.ts +9 -0
- package/lib/internal/upload/zip.js +113 -0
- package/lib/internal/upload/zip.js.map +1 -0
- package/package.json +25 -11
- package/lib/artifact-client.d.ts +0 -10
- package/lib/artifact-client.js +0 -12
- package/lib/artifact-client.js.map +0 -1
- package/lib/internal/artifact-client.d.ts +0 -41
- package/lib/internal/artifact-client.js +0 -182
- package/lib/internal/artifact-client.js.map +0 -1
- package/lib/internal/config-variables.d.ts +0 -12
- package/lib/internal/config-variables.js +0 -77
- package/lib/internal/config-variables.js.map +0 -1
- package/lib/internal/contracts.d.ts +0 -67
- package/lib/internal/contracts.js.map +0 -1
- package/lib/internal/crc64.d.ts +0 -21
- package/lib/internal/crc64.js +0 -303
- package/lib/internal/crc64.js.map +0 -1
- package/lib/internal/download-http-client.d.ts +0 -39
- package/lib/internal/download-http-client.js +0 -296
- package/lib/internal/download-http-client.js.map +0 -1
- package/lib/internal/download-options.d.ts +0 -7
- package/lib/internal/download-options.js +0 -3
- package/lib/internal/download-options.js.map +0 -1
- package/lib/internal/download-response.d.ts +0 -10
- package/lib/internal/download-response.js +0 -3
- package/lib/internal/download-response.js.map +0 -1
- package/lib/internal/download-specification.d.ts +0 -19
- package/lib/internal/download-specification.js +0 -78
- package/lib/internal/download-specification.js.map +0 -1
- package/lib/internal/http-manager.d.ts +0 -12
- package/lib/internal/http-manager.js +0 -32
- package/lib/internal/http-manager.js.map +0 -1
- package/lib/internal/path-and-artifact-name-validation.d.ts +0 -8
- package/lib/internal/path-and-artifact-name-validation.js.map +0 -1
- package/lib/internal/requestUtils.d.ts +0 -3
- package/lib/internal/requestUtils.js +0 -92
- package/lib/internal/requestUtils.js.map +0 -1
- package/lib/internal/status-reporter.d.ts +0 -21
- package/lib/internal/status-reporter.js +0 -52
- package/lib/internal/status-reporter.js.map +0 -1
- package/lib/internal/upload-gzip.d.ts +0 -14
- package/lib/internal/upload-gzip.js +0 -147
- package/lib/internal/upload-gzip.js.map +0 -1
- package/lib/internal/upload-http-client.d.ts +0 -48
- package/lib/internal/upload-http-client.js +0 -415
- package/lib/internal/upload-http-client.js.map +0 -1
- package/lib/internal/upload-options.d.ts +0 -34
- package/lib/internal/upload-options.js +0 -3
- package/lib/internal/upload-options.js.map +0 -1
- package/lib/internal/upload-response.d.ts +0 -19
- package/lib/internal/upload-response.js +0 -3
- package/lib/internal/upload-response.js.map +0 -1
- package/lib/internal/upload-specification.d.ts +0 -11
- package/lib/internal/upload-specification.js.map +0 -1
- package/lib/internal/utils.d.ts +0 -71
- package/lib/internal/utils.js +0 -292
- package/lib/internal/utils.js.map +0 -1
|
@@ -1,415 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
26
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
27
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
28
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
29
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
30
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
31
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
32
|
-
});
|
|
33
|
-
};
|
|
34
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
35
|
-
exports.UploadHttpClient = void 0;
|
|
36
|
-
const fs = __importStar(require("fs"));
|
|
37
|
-
const core = __importStar(require("@actions/core"));
|
|
38
|
-
const tmp = __importStar(require("tmp-promise"));
|
|
39
|
-
const stream = __importStar(require("stream"));
|
|
40
|
-
const utils_1 = require("./utils");
|
|
41
|
-
const config_variables_1 = require("./config-variables");
|
|
42
|
-
const util_1 = require("util");
|
|
43
|
-
const url_1 = require("url");
|
|
44
|
-
const perf_hooks_1 = require("perf_hooks");
|
|
45
|
-
const status_reporter_1 = require("./status-reporter");
|
|
46
|
-
const http_client_1 = require("@actions/http-client");
|
|
47
|
-
const http_manager_1 = require("./http-manager");
|
|
48
|
-
const upload_gzip_1 = require("./upload-gzip");
|
|
49
|
-
const requestUtils_1 = require("./requestUtils");
|
|
50
|
-
const stat = (0, util_1.promisify)(fs.stat);
|
|
51
|
-
class UploadHttpClient {
|
|
52
|
-
constructor() {
|
|
53
|
-
this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), '@actions/artifact-upload');
|
|
54
|
-
this.statusReporter = new status_reporter_1.StatusReporter(10000);
|
|
55
|
-
}
|
|
56
|
-
/**
|
|
57
|
-
* Creates a file container for the new artifact in the remote blob storage/file service
|
|
58
|
-
* @param {string} artifactName Name of the artifact being created
|
|
59
|
-
* @returns The response from the Artifact Service if the file container was successfully created
|
|
60
|
-
*/
|
|
61
|
-
createArtifactInFileContainer(artifactName, options) {
|
|
62
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
63
|
-
const parameters = {
|
|
64
|
-
Type: 'actions_storage',
|
|
65
|
-
Name: artifactName
|
|
66
|
-
};
|
|
67
|
-
// calculate retention period
|
|
68
|
-
if (options && options.retentionDays) {
|
|
69
|
-
const maxRetentionStr = (0, config_variables_1.getRetentionDays)();
|
|
70
|
-
parameters.RetentionDays = (0, utils_1.getProperRetention)(options.retentionDays, maxRetentionStr);
|
|
71
|
-
}
|
|
72
|
-
const data = JSON.stringify(parameters, null, 2);
|
|
73
|
-
const artifactUrl = (0, utils_1.getArtifactUrl)();
|
|
74
|
-
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
|
75
|
-
const client = this.uploadHttpManager.getClient(0);
|
|
76
|
-
const headers = (0, utils_1.getUploadHeaders)('application/json', false);
|
|
77
|
-
// Extra information to display when a particular HTTP code is returned
|
|
78
|
-
// If a 403 is returned when trying to create a file container, the customer has exceeded
|
|
79
|
-
// their storage quota so no new artifact containers can be created
|
|
80
|
-
const customErrorMessages = new Map([
|
|
81
|
-
[
|
|
82
|
-
http_client_1.HttpCodes.Forbidden,
|
|
83
|
-
(0, config_variables_1.isGhes)()
|
|
84
|
-
? 'Please reference [Enabling GitHub Actions for GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.8/admin/github-actions/enabling-github-actions-for-github-enterprise-server) to ensure Actions storage is configured correctly.'
|
|
85
|
-
: 'Artifact storage quota has been hit. Unable to upload any new artifacts'
|
|
86
|
-
],
|
|
87
|
-
[
|
|
88
|
-
http_client_1.HttpCodes.BadRequest,
|
|
89
|
-
`The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}`
|
|
90
|
-
]
|
|
91
|
-
]);
|
|
92
|
-
const response = yield (0, requestUtils_1.retryHttpClientRequest)('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages);
|
|
93
|
-
const body = yield response.readBody();
|
|
94
|
-
return JSON.parse(body);
|
|
95
|
-
});
|
|
96
|
-
}
|
|
97
|
-
/**
|
|
98
|
-
* Concurrently upload all of the files in chunks
|
|
99
|
-
* @param {string} uploadUrl Base Url for the artifact that was created
|
|
100
|
-
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
|
101
|
-
* @returns The size of all the files uploaded in bytes
|
|
102
|
-
*/
|
|
103
|
-
uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) {
|
|
104
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
105
|
-
const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)();
|
|
106
|
-
const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)();
|
|
107
|
-
core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
|
108
|
-
const parameters = [];
|
|
109
|
-
// by default, file uploads will continue if there is an error unless specified differently in the options
|
|
110
|
-
let continueOnError = true;
|
|
111
|
-
if (options) {
|
|
112
|
-
if (options.continueOnError === false) {
|
|
113
|
-
continueOnError = false;
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
// prepare the necessary parameters to upload all the files
|
|
117
|
-
for (const file of filesToUpload) {
|
|
118
|
-
const resourceUrl = new url_1.URL(uploadUrl);
|
|
119
|
-
resourceUrl.searchParams.append('itemPath', file.uploadFilePath);
|
|
120
|
-
parameters.push({
|
|
121
|
-
file: file.absoluteFilePath,
|
|
122
|
-
resourceUrl: resourceUrl.toString(),
|
|
123
|
-
maxChunkSize: MAX_CHUNK_SIZE,
|
|
124
|
-
continueOnError
|
|
125
|
-
});
|
|
126
|
-
}
|
|
127
|
-
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()];
|
|
128
|
-
const failedItemsToReport = [];
|
|
129
|
-
let currentFile = 0;
|
|
130
|
-
let completedFiles = 0;
|
|
131
|
-
let uploadFileSize = 0;
|
|
132
|
-
let totalFileSize = 0;
|
|
133
|
-
let abortPendingFileUploads = false;
|
|
134
|
-
this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length);
|
|
135
|
-
this.statusReporter.start();
|
|
136
|
-
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
|
137
|
-
yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () {
|
|
138
|
-
while (currentFile < filesToUpload.length) {
|
|
139
|
-
const currentFileParameters = parameters[currentFile];
|
|
140
|
-
currentFile += 1;
|
|
141
|
-
if (abortPendingFileUploads) {
|
|
142
|
-
failedItemsToReport.push(currentFileParameters.file);
|
|
143
|
-
continue;
|
|
144
|
-
}
|
|
145
|
-
const startTime = perf_hooks_1.performance.now();
|
|
146
|
-
const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters);
|
|
147
|
-
if (core.isDebug()) {
|
|
148
|
-
core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`);
|
|
149
|
-
}
|
|
150
|
-
uploadFileSize += uploadFileResult.successfulUploadSize;
|
|
151
|
-
totalFileSize += uploadFileResult.totalSize;
|
|
152
|
-
if (uploadFileResult.isSuccess === false) {
|
|
153
|
-
failedItemsToReport.push(currentFileParameters.file);
|
|
154
|
-
if (!continueOnError) {
|
|
155
|
-
// fail fast
|
|
156
|
-
core.error(`aborting artifact upload`);
|
|
157
|
-
abortPendingFileUploads = true;
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
this.statusReporter.incrementProcessedCount();
|
|
161
|
-
}
|
|
162
|
-
})));
|
|
163
|
-
this.statusReporter.stop();
|
|
164
|
-
// done uploading, safety dispose all connections
|
|
165
|
-
this.uploadHttpManager.disposeAndReplaceAllClients();
|
|
166
|
-
core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`);
|
|
167
|
-
return {
|
|
168
|
-
uploadSize: uploadFileSize,
|
|
169
|
-
totalSize: totalFileSize,
|
|
170
|
-
failedItems: failedItemsToReport
|
|
171
|
-
};
|
|
172
|
-
});
|
|
173
|
-
}
|
|
174
|
-
/**
|
|
175
|
-
* Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
|
|
176
|
-
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
|
|
177
|
-
* @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
|
|
178
|
-
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
|
179
|
-
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
|
180
|
-
*/
|
|
181
|
-
uploadFileAsync(httpClientIndex, parameters) {
|
|
182
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
183
|
-
const fileStat = yield stat(parameters.file);
|
|
184
|
-
const totalFileSize = fileStat.size;
|
|
185
|
-
const isFIFO = fileStat.isFIFO();
|
|
186
|
-
let offset = 0;
|
|
187
|
-
let isUploadSuccessful = true;
|
|
188
|
-
let failedChunkSizes = 0;
|
|
189
|
-
let uploadFileSize = 0;
|
|
190
|
-
let isGzip = true;
|
|
191
|
-
// the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
|
|
192
|
-
// for creating a new GZip file, an in-memory buffer is used for compression
|
|
193
|
-
// with named pipes the file size is reported as zero in that case don't read the file in memory
|
|
194
|
-
if (!isFIFO && totalFileSize < 65536) {
|
|
195
|
-
core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`);
|
|
196
|
-
const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file);
|
|
197
|
-
// An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
|
|
198
|
-
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
|
|
199
|
-
let openUploadStream;
|
|
200
|
-
if (totalFileSize < buffer.byteLength) {
|
|
201
|
-
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
|
202
|
-
core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
|
|
203
|
-
openUploadStream = () => fs.createReadStream(parameters.file);
|
|
204
|
-
isGzip = false;
|
|
205
|
-
uploadFileSize = totalFileSize;
|
|
206
|
-
}
|
|
207
|
-
else {
|
|
208
|
-
// create a readable stream using a PassThrough stream that is both readable and writable
|
|
209
|
-
core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`);
|
|
210
|
-
openUploadStream = () => {
|
|
211
|
-
const passThrough = new stream.PassThrough();
|
|
212
|
-
passThrough.end(buffer);
|
|
213
|
-
return passThrough;
|
|
214
|
-
};
|
|
215
|
-
uploadFileSize = buffer.byteLength;
|
|
216
|
-
}
|
|
217
|
-
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
|
218
|
-
if (!result) {
|
|
219
|
-
// chunk failed to upload
|
|
220
|
-
isUploadSuccessful = false;
|
|
221
|
-
failedChunkSizes += uploadFileSize;
|
|
222
|
-
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
|
223
|
-
}
|
|
224
|
-
return {
|
|
225
|
-
isSuccess: isUploadSuccessful,
|
|
226
|
-
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
|
227
|
-
totalSize: totalFileSize
|
|
228
|
-
};
|
|
229
|
-
}
|
|
230
|
-
else {
|
|
231
|
-
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
|
|
232
|
-
// npm tmp-promise package and this file gets used to create a GZipped file
|
|
233
|
-
const tempFile = yield tmp.file();
|
|
234
|
-
core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`);
|
|
235
|
-
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
|
236
|
-
uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path);
|
|
237
|
-
let uploadFilePath = tempFile.path;
|
|
238
|
-
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
|
239
|
-
// for named pipes totalFileSize is zero, this assumes compression did help
|
|
240
|
-
if (!isFIFO && totalFileSize < uploadFileSize) {
|
|
241
|
-
core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
|
|
242
|
-
uploadFileSize = totalFileSize;
|
|
243
|
-
uploadFilePath = parameters.file;
|
|
244
|
-
isGzip = false;
|
|
245
|
-
}
|
|
246
|
-
else {
|
|
247
|
-
core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`);
|
|
248
|
-
}
|
|
249
|
-
let abortFileUpload = false;
|
|
250
|
-
// upload only a single chunk at a time
|
|
251
|
-
while (offset < uploadFileSize) {
|
|
252
|
-
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
|
|
253
|
-
const startChunkIndex = offset;
|
|
254
|
-
const endChunkIndex = offset + chunkSize - 1;
|
|
255
|
-
offset += parameters.maxChunkSize;
|
|
256
|
-
if (abortFileUpload) {
|
|
257
|
-
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
|
258
|
-
failedChunkSizes += chunkSize;
|
|
259
|
-
continue;
|
|
260
|
-
}
|
|
261
|
-
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, {
|
|
262
|
-
start: startChunkIndex,
|
|
263
|
-
end: endChunkIndex,
|
|
264
|
-
autoClose: false
|
|
265
|
-
}), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize);
|
|
266
|
-
if (!result) {
|
|
267
|
-
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
|
268
|
-
// successfully uploaded so the server may report a different size for what was uploaded
|
|
269
|
-
isUploadSuccessful = false;
|
|
270
|
-
failedChunkSizes += chunkSize;
|
|
271
|
-
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
|
272
|
-
abortFileUpload = true;
|
|
273
|
-
}
|
|
274
|
-
else {
|
|
275
|
-
// if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status
|
|
276
|
-
if (uploadFileSize > 8388608) {
|
|
277
|
-
this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize);
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
|
|
282
|
-
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
|
|
283
|
-
core.debug(`deleting temporary gzip file ${tempFile.path}`);
|
|
284
|
-
yield tempFile.cleanup();
|
|
285
|
-
return {
|
|
286
|
-
isSuccess: isUploadSuccessful,
|
|
287
|
-
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
|
288
|
-
totalSize: totalFileSize
|
|
289
|
-
};
|
|
290
|
-
}
|
|
291
|
-
});
|
|
292
|
-
}
|
|
293
|
-
/**
|
|
294
|
-
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
|
295
|
-
* indicates a retryable status, we try to upload the chunk as well
|
|
296
|
-
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
|
297
|
-
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
|
298
|
-
* @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
|
|
299
|
-
* @param {number} start Starting byte index of file that the chunk belongs to
|
|
300
|
-
* @param {number} end Ending byte index of file that the chunk belongs to
|
|
301
|
-
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
|
302
|
-
* @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
|
|
303
|
-
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
|
304
|
-
* @returns if the chunk was successfully uploaded
|
|
305
|
-
*/
|
|
306
|
-
uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) {
|
|
307
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
308
|
-
// open a new stream and read it to compute the digest
|
|
309
|
-
const digest = yield (0, utils_1.digestForStream)(openStream());
|
|
310
|
-
// prepare all the necessary headers before making any http call
|
|
311
|
-
const headers = (0, utils_1.getUploadHeaders)('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, (0, utils_1.getContentRange)(start, end, uploadFileSize), digest);
|
|
312
|
-
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
|
313
|
-
const client = this.uploadHttpManager.getClient(httpClientIndex);
|
|
314
|
-
return yield client.sendStream('PUT', resourceUrl, openStream(), headers);
|
|
315
|
-
});
|
|
316
|
-
let retryCount = 0;
|
|
317
|
-
const retryLimit = (0, config_variables_1.getRetryLimit)();
|
|
318
|
-
// Increments the current retry count and then checks if the retry limit has been reached
|
|
319
|
-
// If there have been too many retries, fail so the download stops
|
|
320
|
-
const incrementAndCheckRetryLimit = (response) => {
|
|
321
|
-
retryCount++;
|
|
322
|
-
if (retryCount > retryLimit) {
|
|
323
|
-
if (response) {
|
|
324
|
-
(0, utils_1.displayHttpDiagnostics)(response);
|
|
325
|
-
}
|
|
326
|
-
core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`);
|
|
327
|
-
return true;
|
|
328
|
-
}
|
|
329
|
-
return false;
|
|
330
|
-
};
|
|
331
|
-
const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () {
|
|
332
|
-
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
|
333
|
-
if (retryAfterValue) {
|
|
334
|
-
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`);
|
|
335
|
-
yield (0, utils_1.sleep)(retryAfterValue);
|
|
336
|
-
}
|
|
337
|
-
else {
|
|
338
|
-
const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount);
|
|
339
|
-
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`);
|
|
340
|
-
yield (0, utils_1.sleep)(backoffTime);
|
|
341
|
-
}
|
|
342
|
-
core.info(`Finished backoff for retry #${retryCount}, continuing with upload`);
|
|
343
|
-
return;
|
|
344
|
-
});
|
|
345
|
-
// allow for failed chunks to be retried multiple times
|
|
346
|
-
while (retryCount <= retryLimit) {
|
|
347
|
-
let response;
|
|
348
|
-
try {
|
|
349
|
-
response = yield uploadChunkRequest();
|
|
350
|
-
}
|
|
351
|
-
catch (error) {
|
|
352
|
-
// if an error is caught, it is usually indicative of a timeout so retry the upload
|
|
353
|
-
core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`);
|
|
354
|
-
// eslint-disable-next-line no-console
|
|
355
|
-
console.log(error);
|
|
356
|
-
if (incrementAndCheckRetryLimit()) {
|
|
357
|
-
return false;
|
|
358
|
-
}
|
|
359
|
-
yield backOff();
|
|
360
|
-
continue;
|
|
361
|
-
}
|
|
362
|
-
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
|
|
363
|
-
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
|
|
364
|
-
yield response.readBody();
|
|
365
|
-
if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) {
|
|
366
|
-
return true;
|
|
367
|
-
}
|
|
368
|
-
else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) {
|
|
369
|
-
core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`);
|
|
370
|
-
if (incrementAndCheckRetryLimit(response)) {
|
|
371
|
-
return false;
|
|
372
|
-
}
|
|
373
|
-
(0, utils_1.isThrottledStatusCode)(response.message.statusCode)
|
|
374
|
-
? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers))
|
|
375
|
-
: yield backOff();
|
|
376
|
-
}
|
|
377
|
-
else {
|
|
378
|
-
core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`);
|
|
379
|
-
(0, utils_1.displayHttpDiagnostics)(response);
|
|
380
|
-
return false;
|
|
381
|
-
}
|
|
382
|
-
}
|
|
383
|
-
return false;
|
|
384
|
-
});
|
|
385
|
-
}
|
|
386
|
-
/**
|
|
387
|
-
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
|
388
|
-
* Updating the size indicates that we are done uploading all the contents of the artifact
|
|
389
|
-
*/
|
|
390
|
-
patchArtifactSize(size, artifactName) {
|
|
391
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
392
|
-
const resourceUrl = new url_1.URL((0, utils_1.getArtifactUrl)());
|
|
393
|
-
resourceUrl.searchParams.append('artifactName', artifactName);
|
|
394
|
-
const parameters = { Size: size };
|
|
395
|
-
const data = JSON.stringify(parameters, null, 2);
|
|
396
|
-
core.debug(`URL is ${resourceUrl.toString()}`);
|
|
397
|
-
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
|
398
|
-
const client = this.uploadHttpManager.getClient(0);
|
|
399
|
-
const headers = (0, utils_1.getUploadHeaders)('application/json', false);
|
|
400
|
-
// Extra information to display when a particular HTTP code is returned
|
|
401
|
-
const customErrorMessages = new Map([
|
|
402
|
-
[
|
|
403
|
-
http_client_1.HttpCodes.NotFound,
|
|
404
|
-
`An Artifact with the name ${artifactName} was not found`
|
|
405
|
-
]
|
|
406
|
-
]);
|
|
407
|
-
// TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
|
|
408
|
-
const response = yield (0, requestUtils_1.retryHttpClientRequest)('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages);
|
|
409
|
-
yield response.readBody();
|
|
410
|
-
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
|
411
|
-
});
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
exports.UploadHttpClient = UploadHttpClient;
|
|
415
|
-
//# sourceMappingURL=upload-http-client.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"upload-http-client.js","sourceRoot":"","sources":["../../src/internal/upload-http-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,oDAAqC;AACrC,iDAAkC;AAClC,+CAAgC;AAOhC,mCAagB;AAChB,yDAM2B;AAC3B,+BAA8B;AAC9B,6BAAuB;AACvB,2CAAsC;AACtC,uDAAgD;AAChD,sDAAkE;AAClE,iDAA0C;AAG1C,+CAA0E;AAC1E,iDAAqD;AACrD,MAAM,IAAI,GAAG,IAAA,gBAAS,EAAC,EAAE,CAAC,IAAI,CAAC,CAAA;AAE/B,MAAa,gBAAgB;IAI3B;QACE,IAAI,CAAC,iBAAiB,GAAG,IAAI,0BAAW,CACtC,IAAA,2CAAwB,GAAE,EAC1B,0BAA0B,CAC3B,CAAA;QACD,IAAI,CAAC,cAAc,GAAG,IAAI,gCAAc,CAAC,KAAK,CAAC,CAAA;IACjD,CAAC;IAED;;;;OAIG;IACG,6BAA6B,CACjC,YAAoB,EACpB,OAAmC;;YAEnC,MAAM,UAAU,GAA6B;gBAC3C,IAAI,EAAE,iBAAiB;gBACvB,IAAI,EAAE,YAAY;aACnB,CAAA;YAED,6BAA6B;YAC7B,IAAI,OAAO,IAAI,OAAO,CAAC,aAAa,EAAE;gBACpC,MAAM,eAAe,GAAG,IAAA,mCAAgB,GAAE,CAAA;gBAC1C,UAAU,CAAC,aAAa,GAAG,IAAA,0BAAkB,EAC3C,OAAO,CAAC,aAAa,EACrB,eAAe,CAChB,CAAA;aACF;YAED,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACxD,MAAM,WAAW,GAAG,IAAA,sBAAc,GAAE,CAAA;YAEpC,+GAA+G;YAC/G,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YAClD,MAAM,OAAO,GAAG,IAAA,wBAAgB,EAAC,kBAAkB,EAAE,KAAK,CAAC,CAAA;YAE3D,uEAAuE;YACvE,yFAAyF;YACzF,mEAAmE;YACnE,MAAM,mBAAmB,GAAwB,IAAI,GAAG,CAAC;gBACvD;oBACE,uBAAS,CAAC,SAAS;oBACnB,IAAA,yBAAM,GAAE;wBACN,CAAC,CAAC,wPAAwP;wBAC1P,CAAC,CAAC,yEAAyE;iBAC9E;gBACD;oBACE,uBAAS,CAAC,UAAU;oBACpB,qBAAqB,YAAY,8BAA8B,WAAW,EAAE;iBAC7E;aACF,CAAC,CAAA;YAEF,MAAM,QAAQ,GAAG,MAAM,IAAA,qCAAsB,EAC3C,2BAA2B,EAC3B,GAAS,EAAE,gDAAC,OAAA,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA,GAAA,EACnD,mBAAmB,CACpB,CAAA;YACD,MAAM,IAAI,GAAW,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;YAC9C,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;QACzB,CAAC;KAAA;IAED;;;;;OAKG;IACG,6BAA6B,CACjC,SAAiB,EACjB,aAAoC,EACpC,OAAuB;;YAEvB,MAAM,gBAAgB,GAAG,IAAA,2CAAwB,GAAE,CAAA;YACnD,MAAM,cAAc,GAAG,IAAA,qCAAkB,GAAE,CAAA;YAC3C,IAAI,CAAC,KAAK,CACR,qBAAqB,gBAAgB,qBAAqB,cAAc,EAAE,CAC3E,CAAA;YAED,MAAM,UAAU,GAA2B,EAAE,CAAA;YAC7C,0GAA0G;YAC1G,IAAI,eAAe,GAAG,IAAI,CAAA;YAC1B,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,eAAe,KAAK,KAAK,EAAE;oBACrC,eAAe,GAAG,KAAK,CAAA;iBACxB;aACF;YAED,2DAA2D;YAC3D,KAAK,MAAM,IAAI,IAAI,aAAa,EAAE;gBAChC,MAAM,WAAW,GAAG,IAAI,SAAG,CAAC,SAAS,CAAC,CAAA;gBACtC,WAAW,CAAC,YAAY,CAAC,MAAM,CAAC,UAAU,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;gBAChE,UAAU,CAAC,IAAI,CAAC;oBACd,IAAI,EAAE,IAAI,CAAC,gBAAgB;oBAC3B,WAAW,EAAE,WAAW,CAAC,QAAQ,EAAE;oBACnC,YAAY,EAAE,cAAc;oBAC5B,eAAe;iBAChB,CAAC,CAAA;aACH;YAED,MAAM,eAAe,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YAC/D,MAAM,mBAAmB,GAAa,EAAE,CAAA;YACxC,IAAI,WAAW,GAAG,CAAC,CAAA;YACnB,IAAI,cAAc,GAAG,CAAC,CAAA;YACtB,IAAI,cAAc,GAAG,CAAC,CAAA;YACtB,IAAI,aAAa,GAAG,CAAC,CAAA;YACrB,IAAI,uBAAuB,GAAG,KAAK,CAAA;YAEnC,IAAI,CAAC,cAAc,CAAC,8BAA8B,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;YACxE,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,CAAA;YAE3B,uGAAuG;YACvG,MAAM,OAAO,CAAC,GAAG,CACf,eAAe,CAAC,GAAG,CAAC,CAAM,KAAK,EAAC,EAAE;gBAChC,OAAO,WAAW,GAAG,aAAa,CAAC,MAAM,EAAE;oBACzC,MAAM,qBAAqB,GAAG,UAAU,CAAC,WAAW,CAAC,CAAA;oBACrD,WAAW,IAAI,CAAC,CAAA;oBAChB,IAAI,uBAAuB,EAAE;wBAC3B,mBAAmB,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAA;wBACpD,SAAQ;qBACT;oBAED,MAAM,SAAS,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAA;oBACnC,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,eAAe,CACjD,KAAK,EACL,qBAAqB,CACtB,CAAA;oBAED,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;wBAClB,IAAI,CAAC,KAAK,CACR,SAAS,EAAE,cAAc,IAAI,aAAa,CAAC,MAAM,KAC/C,qBAAqB,CAAC,IACxB,SAAS,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC,CAAC,OAAO,CAC9C,CAAC,CACF,gCAAgC,CAClC,CAAA;qBACF;oBAED,cAAc,IAAI,gBAAgB,CAAC,oBAAoB,CAAA;oBACvD,aAAa,IAAI,gBAAgB,CAAC,SAAS,CAAA;oBAC3C,IAAI,gBAAgB,CAAC,SAAS,KAAK,KAAK,EAAE;wBACxC,mBAAmB,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAA;wBACpD,IAAI,CAAC,eAAe,EAAE;4BACpB,YAAY;4BACZ,IAAI,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAA;4BACtC,uBAAuB,GAAG,IAAI,CAAA;yBAC/B;qBACF;oBACD,IAAI,CAAC,cAAc,CAAC,uBAAuB,EAAE,CAAA;iBAC9C;YACH,CAAC,CAAA,CAAC,CACH,CAAA;YAED,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAA;YAC1B,iDAAiD;YACjD,IAAI,CAAC,iBAAiB,CAAC,2BAA2B,EAAE,CAAA;YAEpD,IAAI,CAAC,IAAI,CAAC,2CAA2C,cAAc,QAAQ,CAAC,CAAA;YAC5E,OAAO;gBACL,UAAU,EAAE,cAAc;gBAC1B,SAAS,EAAE,aAAa;gBACxB,WAAW,EAAE,mBAAmB;aACjC,CAAA;QACH,CAAC;KAAA;IAED;;;;;;OAMG;IACW,eAAe,CAC3B,eAAuB,EACvB,UAAgC;;YAEhC,MAAM,QAAQ,GAAa,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;YACtD,MAAM,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAA;YACnC,MAAM,MAAM,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAA;YAChC,IAAI,MAAM,GAAG,CAAC,CAAA;YACd,IAAI,kBAAkB,GAAG,IAAI,CAAA;YAC7B,IAAI,gBAAgB,GAAG,CAAC,CAAA;YACxB,IAAI,cAAc,GAAG,CAAC,CAAA;YACtB,IAAI,MAAM,GAAG,IAAI,CAAA;YAEjB,2GAA2G;YAC3G,4EAA4E;YAC5E,gGAAgG;YAChG,IAAI,CAAC,MAAM,IAAI,aAAa,GAAG,KAAK,EAAE;gBACpC,IAAI,CAAC,KAAK,CACR,GAAG,UAAU,CAAC,IAAI,iGAAiG,CACpH,CAAA;gBACD,MAAM,MAAM,GAAG,MAAM,IAAA,oCAAsB,EAAC,UAAU,CAAC,IAAI,CAAC,CAAA;gBAE5D,6HAA6H;gBAC7H,kGAAkG;gBAClG,IAAI,gBAA6C,CAAA;gBAEjD,IAAI,aAAa,GAAG,MAAM,CAAC,UAAU,EAAE;oBACrC,2GAA2G;oBAC3G,IAAI,CAAC,KAAK,CACR,6BAA6B,UAAU,CAAC,IAAI,4FAA4F,CACzI,CAAA;oBACD,gBAAgB,GAAG,GAAG,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;oBAC7D,MAAM,GAAG,KAAK,CAAA;oBACd,cAAc,GAAG,aAAa,CAAA;iBAC/B;qBAAM;oBACL,yFAAyF;oBACzF,IAAI,CAAC,KAAK,CACR,2BAA2B,UAAU,CAAC,IAAI,4FAA4F,CACvI,CAAA;oBACD,gBAAgB,GAAG,GAAG,EAAE;wBACtB,MAAM,WAAW,GAAG,IAAI,MAAM,CAAC,WAAW,EAAE,CAAA;wBAC5C,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;wBACvB,OAAO,WAAW,CAAA;oBACpB,CAAC,CAAA;oBACD,cAAc,GAAG,MAAM,CAAC,UAAU,CAAA;iBACnC;gBAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CACnC,eAAe,EACf,UAAU,CAAC,WAAW,EACtB,gBAAgB,EAChB,CAAC,EACD,cAAc,GAAG,CAAC,EAClB,cAAc,EACd,MAAM,EACN,aAAa,CACd,CAAA;gBAED,IAAI,CAAC,MAAM,EAAE;oBACX,yBAAyB;oBACzB,kBAAkB,GAAG,KAAK,CAAA;oBAC1B,gBAAgB,IAAI,cAAc,CAAA;oBAClC,IAAI,CAAC,OAAO,CAAC,uBAAuB,UAAU,CAAC,IAAI,iBAAiB,CAAC,CAAA;iBACtE;gBAED,OAAO;oBACL,SAAS,EAAE,kBAAkB;oBAC7B,oBAAoB,EAAE,cAAc,GAAG,gBAAgB;oBACvD,SAAS,EAAE,aAAa;iBACzB,CAAA;aACF;iBAAM;gBACL,+GAA+G;gBAC/G,2EAA2E;gBAC3E,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,CAAA;gBACjC,IAAI,CAAC,KAAK,CACR,GAAG,UAAU,CAAC,IAAI,8DAA8D,QAAQ,CAAC,IAAI,wCAAwC,CACtI,CAAA;gBAED,8GAA8G;gBAC9G,cAAc,GAAG,MAAM,IAAA,kCAAoB,EACzC,UAAU,CAAC,IAAI,EACf,QAAQ,CAAC,IAAI,CACd,CAAA;gBAED,IAAI,cAAc,GAAG,QAAQ,CAAC,IAAI,CAAA;gBAElC,+GAA+G;gBAC/G,2EAA2E;gBAC3E,IAAI,CAAC,MAAM,IAAI,aAAa,GAAG,cAAc,EAAE;oBAC7C,IAAI,CAAC,KAAK,CACR,6BAA6B,UAAU,CAAC,IAAI,4FAA4F,CACzI,CAAA;oBACD,cAAc,GAAG,aAAa,CAAA;oBAC9B,cAAc,GAAG,UAAU,CAAC,IAAI,CAAA;oBAChC,MAAM,GAAG,KAAK,CAAA;iBACf;qBAAM;oBACL,IAAI,CAAC,KAAK,CACR,6BAA6B,UAAU,CAAC,IAAI,2EAA2E,CACxH,CAAA;iBACF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAA;gBAC3B,uCAAuC;gBACvC,OAAO,MAAM,GAAG,cAAc,EAAE;oBAC9B,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CACxB,cAAc,GAAG,MAAM,EACvB,UAAU,CAAC,YAAY,CACxB,CAAA;oBAED,MAAM,eAAe,GAAG,MAAM,CAAA;oBAC9B,MAAM,aAAa,GAAG,MAAM,GAAG,SAAS,GAAG,CAAC,CAAA;oBAC5C,MAAM,IAAI,UAAU,CAAC,YAAY,CAAA;oBAEjC,IAAI,eAAe,EAAE;wBACnB,4GAA4G;wBAC5G,gBAAgB,IAAI,SAAS,CAAA;wBAC7B,SAAQ;qBACT;oBAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CACnC,eAAe,EACf,UAAU,CAAC,WAAW,EACtB,GAAG,EAAE,CACH,EAAE,CAAC,gBAAgB,CAAC,cAAc,EAAE;wBAClC,KAAK,EAAE,eAAe;wBACtB,GAAG,EAAE,aAAa;wBAClB,SAAS,EAAE,KAAK;qBACjB,CAAC,EACJ,eAAe,EACf,aAAa,EACb,cAAc,EACd,MAAM,EACN,aAAa,CACd,CAAA;oBAED,IAAI,CAAC,MAAM,EAAE;wBACX,+IAA+I;wBAC/I,wFAAwF;wBACxF,kBAAkB,GAAG,KAAK,CAAA;wBAC1B,gBAAgB,IAAI,SAAS,CAAA;wBAC7B,IAAI,CAAC,OAAO,CAAC,uBAAuB,UAAU,CAAC,IAAI,iBAAiB,CAAC,CAAA;wBACrE,eAAe,GAAG,IAAI,CAAA;qBACvB;yBAAM;wBACL,qHAAqH;wBACrH,IAAI,cAAc,GAAG,OAAO,EAAE;4BAC5B,IAAI,CAAC,cAAc,CAAC,qBAAqB,CACvC,UAAU,CAAC,IAAI,EACf,eAAe,EACf,aAAa,EACb,cAAc,CACf,CAAA;yBACF;qBACF;iBACF;gBAED,sHAAsH;gBACtH,mIAAmI;gBACnI,IAAI,CAAC,KAAK,CAAC,gCAAgC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAA;gBAC3D,MAAM,QAAQ,CAAC,OAAO,EAAE,CAAA;gBAExB,OAAO;oBACL,SAAS,EAAE,kBAAkB;oBAC7B,oBAAoB,EAAE,cAAc,GAAG,gBAAgB;oBACvD,SAAS,EAAE,aAAa;iBACzB,CAAA;aACF;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;OAYG;IACW,WAAW,CACvB,eAAuB,EACvB,WAAmB,EACnB,UAAuC,EACvC,KAAa,EACb,GAAW,EACX,cAAsB,EACtB,MAAe,EACf,aAAqB;;YAErB,sDAAsD;YACtD,MAAM,MAAM,GAAG,MAAM,IAAA,uBAAe,EAAC,UAAU,EAAE,CAAC,CAAA;YAElD,gEAAgE;YAChE,MAAM,OAAO,GAAG,IAAA,wBAAgB,EAC9B,0BAA0B,EAC1B,IAAI,EACJ,MAAM,EACN,aAAa,EACb,GAAG,GAAG,KAAK,GAAG,CAAC,EACf,IAAA,uBAAe,EAAC,KAAK,EAAE,GAAG,EAAE,cAAc,CAAC,EAC3C,MAAM,CACP,CAAA;YAED,MAAM,kBAAkB,GAAG,GAAsC,EAAE;gBACjE,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;gBAChE,OAAO,MAAM,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,WAAW,EAAE,UAAU,EAAE,EAAE,OAAO,CAAC,CAAA;YAC3E,CAAC,CAAA,CAAA;YAED,IAAI,UAAU,GAAG,CAAC,CAAA;YAClB,MAAM,UAAU,GAAG,IAAA,gCAAa,GAAE,CAAA;YAElC,yFAAyF;YACzF,kEAAkE;YAClE,MAAM,2BAA2B,GAAG,CAClC,QAA6B,EACpB,EAAE;gBACX,UAAU,EAAE,CAAA;gBACZ,IAAI,UAAU,GAAG,UAAU,EAAE;oBAC3B,IAAI,QAAQ,EAAE;wBACZ,IAAA,8BAAsB,EAAC,QAAQ,CAAC,CAAA;qBACjC;oBACD,IAAI,CAAC,IAAI,CACP,oDAAoD,KAAK,OAAO,WAAW,EAAE,CAC9E,CAAA;oBACD,OAAO,IAAI,CAAA;iBACZ;gBACD,OAAO,KAAK,CAAA;YACd,CAAC,CAAA;YAED,MAAM,OAAO,GAAG,CAAO,eAAwB,EAAiB,EAAE;gBAChE,IAAI,CAAC,iBAAiB,CAAC,uBAAuB,CAAC,eAAe,CAAC,CAAA;gBAC/D,IAAI,eAAe,EAAE;oBACnB,IAAI,CAAC,IAAI,CACP,4CAA4C,UAAU,iBAAiB,eAAe,4CAA4C,CACnI,CAAA;oBACD,MAAM,IAAA,aAAK,EAAC,eAAe,CAAC,CAAA;iBAC7B;qBAAM;oBACL,MAAM,WAAW,GAAG,IAAA,6CAAqC,EAAC,UAAU,CAAC,CAAA;oBACrE,IAAI,CAAC,IAAI,CACP,kCAAkC,UAAU,iBAAiB,WAAW,wDAAwD,KAAK,EAAE,CACxI,CAAA;oBACD,MAAM,IAAA,aAAK,EAAC,WAAW,CAAC,CAAA;iBACzB;gBACD,IAAI,CAAC,IAAI,CACP,+BAA+B,UAAU,0BAA0B,CACpE,CAAA;gBACD,OAAM;YACR,CAAC,CAAA,CAAA;YAED,uDAAuD;YACvD,OAAO,UAAU,IAAI,UAAU,EAAE;gBAC/B,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,kBAAkB,EAAE,CAAA;iBACtC;gBAAC,OAAO,KAAK,EAAE;oBACd,mFAAmF;oBACnF,IAAI,CAAC,IAAI,CACP,8CAA8C,eAAe,uBAAuB,CACrF,CAAA;oBACD,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;oBAElB,IAAI,2BAA2B,EAAE,EAAE;wBACjC,OAAO,KAAK,CAAA;qBACb;oBACD,MAAM,OAAO,EAAE,CAAA;oBACf,SAAQ;iBACT;gBAED,kHAAkH;gBAClH,kHAAkH;gBAClH,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;gBAEzB,IAAI,IAAA,2BAAmB,EAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;oBACpD,OAAO,IAAI,CAAA;iBACZ;qBAAM,IAAI,IAAA,6BAAqB,EAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;oBAC7D,IAAI,CAAC,IAAI,CACP,KAAK,QAAQ,CAAC,OAAO,CAAC,UAAU,kEAAkE,CACnG,CAAA;oBACD,IAAI,2BAA2B,CAAC,QAAQ,CAAC,EAAE;wBACzC,OAAO,KAAK,CAAA;qBACb;oBACD,IAAA,6BAAqB,EAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;wBAChD,CAAC,CAAC,MAAM,OAAO,CACX,IAAA,+CAAuC,EAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAClE;wBACH,CAAC,CAAC,MAAM,OAAO,EAAE,CAAA;iBACpB;qBAAM;oBACL,IAAI,CAAC,KAAK,CACR,kDAAkD,WAAW,EAAE,CAChE,CAAA;oBACD,IAAA,8BAAsB,EAAC,QAAQ,CAAC,CAAA;oBAChC,OAAO,KAAK,CAAA;iBACb;aACF;YACD,OAAO,KAAK,CAAA;QACd,CAAC;KAAA;IAED;;;OAGG;IACG,iBAAiB,CAAC,IAAY,EAAE,YAAoB;;YACxD,MAAM,WAAW,GAAG,IAAI,SAAG,CAAC,IAAA,sBAAc,GAAE,CAAC,CAAA;YAC7C,WAAW,CAAC,YAAY,CAAC,MAAM,CAAC,cAAc,EAAE,YAAY,CAAC,CAAA;YAE7D,MAAM,UAAU,GAAsB,EAAC,IAAI,EAAE,IAAI,EAAC,CAAA;YAClD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACxD,IAAI,CAAC,KAAK,CAAC,UAAU,WAAW,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;YAE9C,+GAA+G;YAC/G,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YAClD,MAAM,OAAO,GAAG,IAAA,wBAAgB,EAAC,kBAAkB,EAAE,KAAK,CAAC,CAAA;YAE3D,uEAAuE;YACvE,MAAM,mBAAmB,GAAwB,IAAI,GAAG,CAAC;gBACvD;oBACE,uBAAS,CAAC,QAAQ;oBAClB,6BAA6B,YAAY,gBAAgB;iBAC1D;aACF,CAAC,CAAA;YAEF,0IAA0I;YAC1I,MAAM,QAAQ,GAAG,MAAM,IAAA,qCAAsB,EAC3C,0BAA0B,EAC1B,GAAS,EAAE,gDAAC,OAAA,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA,GAAA,EAC/D,mBAAmB,CACpB,CAAA;YACD,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;YACzB,IAAI,CAAC,KAAK,CACR,YAAY,YAAY,yDAAyD,IAAI,EAAE,CACxF,CAAA;QACH,CAAC;KAAA;CACF;AAjgBD,4CAigBC"}
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
export interface UploadOptions {
|
|
2
|
-
/**
|
|
3
|
-
* Indicates if the artifact upload should continue if file or chunk fails to upload from any error.
|
|
4
|
-
* If there is a error during upload, a partial artifact will always be associated and available for
|
|
5
|
-
* download at the end. The size reported will be the amount of storage that the user or org will be
|
|
6
|
-
* charged for the partial artifact. Defaults to true if not specified
|
|
7
|
-
*
|
|
8
|
-
* If set to false, and an error is encountered, all other uploads will stop and any files or chunks
|
|
9
|
-
* that were queued will not be attempted to be uploaded. The partial artifact available will only
|
|
10
|
-
* include files and chunks up until the failure
|
|
11
|
-
*
|
|
12
|
-
* If set to true and an error is encountered, the failed file will be skipped and ignored and all
|
|
13
|
-
* other queued files will be attempted to be uploaded. The partial artifact at the end will have all
|
|
14
|
-
* files with the exception of the problematic files(s)/chunks(s) that failed to upload
|
|
15
|
-
*
|
|
16
|
-
*/
|
|
17
|
-
continueOnError?: boolean;
|
|
18
|
-
/**
|
|
19
|
-
* Duration after which artifact will expire in days.
|
|
20
|
-
*
|
|
21
|
-
* By default artifact expires after 90 days:
|
|
22
|
-
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
|
23
|
-
*
|
|
24
|
-
* Use this option to override the default expiry.
|
|
25
|
-
*
|
|
26
|
-
* Min value: 1
|
|
27
|
-
* Max value: 90 unless changed by repository setting
|
|
28
|
-
*
|
|
29
|
-
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
|
30
|
-
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
|
31
|
-
* input of 0 assumes default retention setting.
|
|
32
|
-
*/
|
|
33
|
-
retentionDays?: number;
|
|
34
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"upload-options.js","sourceRoot":"","sources":["../../src/internal/upload-options.ts"],"names":[],"mappings":""}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
export interface UploadResponse {
|
|
2
|
-
/**
|
|
3
|
-
* The name of the artifact that was uploaded
|
|
4
|
-
*/
|
|
5
|
-
artifactName: string;
|
|
6
|
-
/**
|
|
7
|
-
* A list of all items that are meant to be uploaded as part of the artifact
|
|
8
|
-
*/
|
|
9
|
-
artifactItems: string[];
|
|
10
|
-
/**
|
|
11
|
-
* Total size of the artifact in bytes that was uploaded
|
|
12
|
-
*/
|
|
13
|
-
size: number;
|
|
14
|
-
/**
|
|
15
|
-
* A list of items that were not uploaded as part of the artifact (includes queued items that were not uploaded if
|
|
16
|
-
* continueOnError is set to false). This is a subset of artifactItems.
|
|
17
|
-
*/
|
|
18
|
-
failedItems: string[];
|
|
19
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"upload-response.js","sourceRoot":"","sources":["../../src/internal/upload-response.ts"],"names":[],"mappings":""}
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
export interface UploadSpecification {
|
|
2
|
-
absoluteFilePath: string;
|
|
3
|
-
uploadFilePath: string;
|
|
4
|
-
}
|
|
5
|
-
/**
|
|
6
|
-
* Creates a specification that describes how each file that is part of the artifact will be uploaded
|
|
7
|
-
* @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
|
|
8
|
-
* @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
|
|
9
|
-
* @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
|
|
10
|
-
*/
|
|
11
|
-
export declare function getUploadSpecification(artifactName: string, rootDirectory: string, artifactFiles: string[]): UploadSpecification[];
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"upload-specification.js","sourceRoot":"","sources":["../../src/internal/upload-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,wCAAmC;AACnC,+BAA6C;AAC7C,2FAAyE;AAOzE;;;;;GAKG;AACH,SAAgB,sBAAsB,CACpC,YAAoB,EACpB,aAAqB,EACrB,aAAuB;IAEvB,+DAA+D;IAC/D,MAAM,cAAc,GAA0B,EAAE,CAAA;IAEhD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;QACjC,MAAM,IAAI,KAAK,CAAC,0BAA0B,aAAa,iBAAiB,CAAC,CAAA;KAC1E;IACD,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC,WAAW,EAAE,EAAE;QAC7C,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,2BAA2B,CACnE,CAAA;KACF;IACD,sFAAsF;IACtF,aAAa,GAAG,IAAA,gBAAS,EAAC,aAAa,CAAC,CAAA;IACxC,aAAa,GAAG,IAAA,cAAO,EAAC,aAAa,CAAC,CAAA;IAEtC;;;;;;;;;;;;;;;;;;MAkBE;IACF,KAAK,IAAI,IAAI,IAAI,aAAa,EAAE;QAC9B,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,QAAQ,IAAI,iBAAiB,CAAC,CAAA;SAC/C;QACD,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE;YACpC,sFAAsF;YACtF,IAAI,GAAG,IAAA,gBAAS,EAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,sBAAsB,aAAa,2CAA2C,IAAI,EAAE,CACrF,CAAA;aACF;YAED,mFAAmF;YACnF,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAA;YAClD,IAAA,yDAAqB,EAAC,UAAU,CAAC,CAAA;YAEjC;;;;;;;;;cASE;YACF,cAAc,CAAC,IAAI,CAAC;gBAClB,gBAAgB,EAAE,IAAI;gBACtB,cAAc,EAAE,IAAA,WAAI,EAAC,YAAY,EAAE,UAAU,CAAC;aAC/C,CAAC,CAAA;SACH;aAAM;YACL,uDAAuD;YACvD,IAAA,YAAK,EAAC,YAAY,IAAI,kDAAkD,CAAC,CAAA;SAC1E;KACF;IACD,OAAO,cAAc,CAAA;AACvB,CAAC;AA7ED,wDA6EC"}
|
package/lib/internal/utils.d.ts
DELETED
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
/// <reference types="node" />
|
|
3
|
-
import { IncomingHttpHeaders, OutgoingHttpHeaders } from 'http';
|
|
4
|
-
import { HttpClient, HttpClientResponse } from '@actions/http-client';
|
|
5
|
-
/**
|
|
6
|
-
* Returns a retry time in milliseconds that exponentially gets larger
|
|
7
|
-
* depending on the amount of retries that have been attempted
|
|
8
|
-
*/
|
|
9
|
-
export declare function getExponentialRetryTimeInMilliseconds(retryCount: number): number;
|
|
10
|
-
/**
|
|
11
|
-
* Parses a env variable that is a number
|
|
12
|
-
*/
|
|
13
|
-
export declare function parseEnvNumber(key: string): number | undefined;
|
|
14
|
-
/**
|
|
15
|
-
* Various utility functions to help with the necessary API calls
|
|
16
|
-
*/
|
|
17
|
-
export declare function getApiVersion(): string;
|
|
18
|
-
export declare function isSuccessStatusCode(statusCode?: number): boolean;
|
|
19
|
-
export declare function isForbiddenStatusCode(statusCode?: number): boolean;
|
|
20
|
-
export declare function isRetryableStatusCode(statusCode: number | undefined): boolean;
|
|
21
|
-
export declare function isThrottledStatusCode(statusCode?: number): boolean;
|
|
22
|
-
/**
|
|
23
|
-
* Attempts to get the retry-after value from a set of http headers. The retry time
|
|
24
|
-
* is originally denoted in seconds, so if present, it is converted to milliseconds
|
|
25
|
-
* @param headers all the headers received when making an http call
|
|
26
|
-
*/
|
|
27
|
-
export declare function tryGetRetryAfterValueTimeInMilliseconds(headers: IncomingHttpHeaders): number | undefined;
|
|
28
|
-
export declare function getContentRange(start: number, end: number, total: number): string;
|
|
29
|
-
/**
|
|
30
|
-
* Sets all the necessary headers when downloading an artifact
|
|
31
|
-
* @param {string} contentType the type of content being uploaded
|
|
32
|
-
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
|
33
|
-
* @param {boolean} acceptGzip can we accept a gzip encoded response
|
|
34
|
-
* @param {string} acceptType the type of content that we can accept
|
|
35
|
-
* @returns appropriate headers to make a specific http call during artifact download
|
|
36
|
-
*/
|
|
37
|
-
export declare function getDownloadHeaders(contentType: string, isKeepAlive?: boolean, acceptGzip?: boolean): OutgoingHttpHeaders;
|
|
38
|
-
/**
|
|
39
|
-
* Sets all the necessary headers when uploading an artifact
|
|
40
|
-
* @param {string} contentType the type of content being uploaded
|
|
41
|
-
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
|
42
|
-
* @param {boolean} isGzip is the connection being used to upload GZip compressed content
|
|
43
|
-
* @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
|
|
44
|
-
* @param {number} contentLength the length of the content that is being uploaded
|
|
45
|
-
* @param {string} contentRange the range of the content that is being uploaded
|
|
46
|
-
* @returns appropriate headers to make a specific http call during artifact upload
|
|
47
|
-
*/
|
|
48
|
-
export declare function getUploadHeaders(contentType: string, isKeepAlive?: boolean, isGzip?: boolean, uncompressedLength?: number, contentLength?: number, contentRange?: string, digest?: StreamDigest): OutgoingHttpHeaders;
|
|
49
|
-
export declare function createHttpClient(userAgent: string): HttpClient;
|
|
50
|
-
export declare function getArtifactUrl(): string;
|
|
51
|
-
/**
|
|
52
|
-
* Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information
|
|
53
|
-
* about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but
|
|
54
|
-
* this entire object is really big and most of the information is not really useful. This function takes the response object and displays only
|
|
55
|
-
* the information that we want.
|
|
56
|
-
*
|
|
57
|
-
* Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided.
|
|
58
|
-
* Other information such as the headers, the response code and message might be useful, so this is displayed.
|
|
59
|
-
*/
|
|
60
|
-
export declare function displayHttpDiagnostics(response: HttpClientResponse): void;
|
|
61
|
-
export declare function createDirectoriesForArtifact(directories: string[]): Promise<void>;
|
|
62
|
-
export declare function createEmptyFilesForArtifact(emptyFilesToCreate: string[]): Promise<void>;
|
|
63
|
-
export declare function getFileSize(filePath: string): Promise<number>;
|
|
64
|
-
export declare function rmFile(filePath: string): Promise<void>;
|
|
65
|
-
export declare function getProperRetention(retentionInput: number, retentionSetting: string | undefined): number;
|
|
66
|
-
export declare function sleep(milliseconds: number): Promise<void>;
|
|
67
|
-
export interface StreamDigest {
|
|
68
|
-
crc64: string;
|
|
69
|
-
md5: string;
|
|
70
|
-
}
|
|
71
|
-
export declare function digestForStream(stream: NodeJS.ReadableStream): Promise<StreamDigest>;
|