@empiricalrun/r2-uploader 0.2.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/fetch.d.ts +7 -0
- package/dist/fetch.d.ts.map +1 -0
- package/dist/fetch.js +28 -0
- package/dist/index.d.ts +2 -25
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -157
- package/dist/types.d.ts +20 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/upload.d.ts +26 -0
- package/dist/upload.d.ts.map +1 -0
- package/dist/upload.js +168 -0
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @empiricalrun/r2-uploader
|
|
2
2
|
|
|
3
|
+
## 0.3.1
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- fccb68a: chore: remove uploader logs for cleaner ci runs
|
|
8
|
+
|
|
9
|
+
## 0.3.0
|
|
10
|
+
|
|
11
|
+
### Minor Changes
|
|
12
|
+
|
|
13
|
+
- bac164e: feat: add report assets page which fetches from r2 bucket
|
|
14
|
+
|
|
3
15
|
## 0.2.0
|
|
4
16
|
|
|
5
17
|
### Minor Changes
|
package/dist/fetch.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetch.d.ts","sourceRoot":"","sources":["../src/fetch.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,aAAa,EAAE,MAAM,SAAS,CAAC;AAExC,eAAO,MAAM,UAAU,WAAkB,aAAa;;;;gBAsBrD,CAAC"}
|
package/dist/fetch.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.fetchFiles = void 0;
|
|
4
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
5
|
+
const fetchFiles = async (config) => {
|
|
6
|
+
const S3 = new client_s3_1.S3Client({
|
|
7
|
+
region: "auto",
|
|
8
|
+
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
9
|
+
credentials: {
|
|
10
|
+
accessKeyId: config.accessKeyId,
|
|
11
|
+
secretAccessKey: config.secretAccessKey,
|
|
12
|
+
},
|
|
13
|
+
});
|
|
14
|
+
const params = {
|
|
15
|
+
Bucket: config.bucket,
|
|
16
|
+
Prefix: config.prefix,
|
|
17
|
+
};
|
|
18
|
+
const cmd = new client_s3_1.ListObjectsV2Command(params);
|
|
19
|
+
const data = await S3.send(cmd);
|
|
20
|
+
return data.Contents?.map((c) => {
|
|
21
|
+
return {
|
|
22
|
+
key: c.Key,
|
|
23
|
+
lastModified: c.LastModified,
|
|
24
|
+
size: c.Size,
|
|
25
|
+
};
|
|
26
|
+
});
|
|
27
|
+
};
|
|
28
|
+
exports.fetchFiles = fetchFiles;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,26 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
export
|
|
3
|
-
[file: string]: string;
|
|
4
|
-
}
|
|
5
|
-
export declare function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
6
|
-
sourceDir: string;
|
|
7
|
-
fileList?: string[];
|
|
8
|
-
destinationDir: string;
|
|
9
|
-
uploadBucket: string;
|
|
10
|
-
accountId?: string;
|
|
11
|
-
accessKeyId?: string;
|
|
12
|
-
secretAccessKey?: string;
|
|
13
|
-
}): Promise<FileMap>;
|
|
14
|
-
export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
15
|
-
files: {
|
|
16
|
-
buffer: Buffer;
|
|
17
|
-
fileName: string;
|
|
18
|
-
mimeType?: string;
|
|
19
|
-
}[];
|
|
20
|
-
destinationDir: string;
|
|
21
|
-
uploadBucket: string;
|
|
22
|
-
accountId?: string;
|
|
23
|
-
accessKeyId?: string;
|
|
24
|
-
secretAccessKey?: string;
|
|
25
|
-
}): Promise<FileMap>;
|
|
1
|
+
export * from "./fetch";
|
|
2
|
+
export * from "./upload";
|
|
26
3
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -10,162 +10,9 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
|
11
11
|
o[k2] = m[k];
|
|
12
12
|
}));
|
|
13
|
-
var
|
|
14
|
-
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
27
15
|
};
|
|
28
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
32
|
-
const async_retry_1 = __importDefault(require("async-retry"));
|
|
33
|
-
const fs = __importStar(require("fs"));
|
|
34
|
-
const md5_1 = __importDefault(require("md5"));
|
|
35
|
-
const mime_1 = __importDefault(require("mime"));
|
|
36
|
-
const path_1 = __importDefault(require("path"));
|
|
37
|
-
const getFileList = (dir) => {
|
|
38
|
-
let files = [];
|
|
39
|
-
const items = fs.readdirSync(dir, {
|
|
40
|
-
withFileTypes: true,
|
|
41
|
-
});
|
|
42
|
-
for (const item of items) {
|
|
43
|
-
const isDir = item.isDirectory();
|
|
44
|
-
const absolutePath = `${dir}/${item.name}`;
|
|
45
|
-
if (isDir) {
|
|
46
|
-
files = [...files, ...getFileList(absolutePath)];
|
|
47
|
-
}
|
|
48
|
-
else {
|
|
49
|
-
files.push(absolutePath);
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
return files;
|
|
53
|
-
};
|
|
54
|
-
const run = async (config) => {
|
|
55
|
-
const map = new Map();
|
|
56
|
-
const urls = {};
|
|
57
|
-
const S3 = new client_s3_1.S3Client({
|
|
58
|
-
region: "auto",
|
|
59
|
-
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
60
|
-
credentials: {
|
|
61
|
-
accessKeyId: config.accessKeyId,
|
|
62
|
-
secretAccessKey: config.secretAccessKey,
|
|
63
|
-
},
|
|
64
|
-
});
|
|
65
|
-
await Promise.all(config.files.map(async (file) => {
|
|
66
|
-
let fileKey = path_1.default.join(config.destinationDir, file.fileName);
|
|
67
|
-
if (fileKey.includes(".gitkeep")) {
|
|
68
|
-
return;
|
|
69
|
-
}
|
|
70
|
-
const mimeType = file.mimeType || "application/octet-stream";
|
|
71
|
-
const uploadParams = {
|
|
72
|
-
Bucket: config.bucket,
|
|
73
|
-
Key: fileKey,
|
|
74
|
-
Body: file.buffer,
|
|
75
|
-
ContentLength: file.buffer.length,
|
|
76
|
-
ContentType: mimeType ?? "application/octet-stream",
|
|
77
|
-
};
|
|
78
|
-
const cmd = new client_s3_1.PutObjectCommand(uploadParams);
|
|
79
|
-
const digest = (0, md5_1.default)(file.buffer);
|
|
80
|
-
cmd.middlewareStack.add((next) => async (args) => {
|
|
81
|
-
args.request.headers["if-none-match"] = `"${digest}"`;
|
|
82
|
-
return await next(args);
|
|
83
|
-
}, {
|
|
84
|
-
step: "build",
|
|
85
|
-
name: "addETag",
|
|
86
|
-
});
|
|
87
|
-
try {
|
|
88
|
-
await (0, async_retry_1.default)(async () => {
|
|
89
|
-
try {
|
|
90
|
-
console.log("\nStarting file upload for:", file.fileName, "\n");
|
|
91
|
-
const data = await S3.send(cmd);
|
|
92
|
-
map.set(file.fileName, data);
|
|
93
|
-
const fileUrl = await (0, s3_request_presigner_1.getSignedUrl)(S3, cmd);
|
|
94
|
-
urls[file.fileName] = fileUrl;
|
|
95
|
-
console.log("\nFinished file upload for:", file.fileName, "\n");
|
|
96
|
-
}
|
|
97
|
-
catch (err) {
|
|
98
|
-
console.log("\nError uploading file: ", file.fileName, err, "\n");
|
|
99
|
-
const error = err;
|
|
100
|
-
if (error["$metadata"]) {
|
|
101
|
-
// throw only those errors that are not 412 Precondition Failed
|
|
102
|
-
// 412 errors are errors while accessing the asset, which is post upload and can be ignored
|
|
103
|
-
if (error.$metadata.httpStatusCode !== 412) {
|
|
104
|
-
throw error;
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
}, {
|
|
109
|
-
retries: 5,
|
|
110
|
-
factor: 3,
|
|
111
|
-
minTimeout: 1000,
|
|
112
|
-
maxTimeout: 60000,
|
|
113
|
-
randomize: true,
|
|
114
|
-
onRetry: (err, i) => {
|
|
115
|
-
if (err) {
|
|
116
|
-
const error = err;
|
|
117
|
-
console.log("Upload retry attempt:", i, ":", file.fileName);
|
|
118
|
-
console.log("Response status:", error.$response?.statusCode);
|
|
119
|
-
}
|
|
120
|
-
},
|
|
121
|
-
});
|
|
122
|
-
}
|
|
123
|
-
catch (err) {
|
|
124
|
-
const error = err;
|
|
125
|
-
console.log(`R2 Error - ${file.fileName} \nError: ${error}`);
|
|
126
|
-
console.log("Upload response", error.$response, "with status", error.$metadata.httpStatusCode);
|
|
127
|
-
}
|
|
128
|
-
return;
|
|
129
|
-
}));
|
|
130
|
-
return urls;
|
|
131
|
-
};
|
|
132
|
-
async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
133
|
-
const filePaths = fileList || getFileList(sourceDir);
|
|
134
|
-
const files = filePaths
|
|
135
|
-
.map((filePath) => ({
|
|
136
|
-
path: filePath,
|
|
137
|
-
fileName: filePath.replace(sourceDir, ""),
|
|
138
|
-
mimeType: undefined,
|
|
139
|
-
}))
|
|
140
|
-
.map((file) => {
|
|
141
|
-
const mimeType = mime_1.default.getType(file.path) || "application/octet-stream";
|
|
142
|
-
return {
|
|
143
|
-
buffer: fs.readFileSync(file.path),
|
|
144
|
-
fileName: file.fileName,
|
|
145
|
-
mimeType,
|
|
146
|
-
};
|
|
147
|
-
});
|
|
148
|
-
let config = {
|
|
149
|
-
accountId: accountId || process.env.R2_ACCOUNT_ID,
|
|
150
|
-
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
151
|
-
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
152
|
-
bucket: uploadBucket,
|
|
153
|
-
destinationDir,
|
|
154
|
-
files,
|
|
155
|
-
};
|
|
156
|
-
const uploadedFiles = await run(config);
|
|
157
|
-
return uploadedFiles;
|
|
158
|
-
}
|
|
159
|
-
exports.uploadDirectory = uploadDirectory;
|
|
160
|
-
async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
161
|
-
const config = {
|
|
162
|
-
accountId: accountId || process.env.R2_ACCOUNT_ID,
|
|
163
|
-
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
164
|
-
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
165
|
-
bucket: uploadBucket,
|
|
166
|
-
destinationDir,
|
|
167
|
-
files,
|
|
168
|
-
};
|
|
169
|
-
return await run(config);
|
|
170
|
-
}
|
|
171
|
-
exports.uploadInMemoryFiles = uploadInMemoryFiles;
|
|
17
|
+
__exportStar(require("./fetch"), exports);
|
|
18
|
+
__exportStar(require("./upload"), exports);
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
interface R2BaseConfig {
|
|
3
|
+
accountId: string;
|
|
4
|
+
accessKeyId: string;
|
|
5
|
+
secretAccessKey: string;
|
|
6
|
+
bucket: string;
|
|
7
|
+
}
|
|
8
|
+
export interface R2UploadConfig extends R2BaseConfig {
|
|
9
|
+
destinationDir: string;
|
|
10
|
+
files: {
|
|
11
|
+
buffer: Buffer;
|
|
12
|
+
fileName: string;
|
|
13
|
+
mimeType?: string;
|
|
14
|
+
}[];
|
|
15
|
+
}
|
|
16
|
+
export interface R2FetchConfig extends R2BaseConfig {
|
|
17
|
+
prefix: string;
|
|
18
|
+
}
|
|
19
|
+
export {};
|
|
20
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":";AAAA,UAAU,YAAY;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;IACxB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,cAAe,SAAQ,YAAY;IAClD,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CAClE;AAED,MAAM,WAAW,aAAc,SAAQ,YAAY;IACjD,MAAM,EAAE,MAAM,CAAC;CAChB"}
|
package/dist/types.js
ADDED
package/dist/upload.d.ts
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
export interface FileMap {
|
|
3
|
+
[file: string]: string;
|
|
4
|
+
}
|
|
5
|
+
export declare function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
6
|
+
sourceDir: string;
|
|
7
|
+
fileList?: string[];
|
|
8
|
+
destinationDir: string;
|
|
9
|
+
uploadBucket: string;
|
|
10
|
+
accountId?: string;
|
|
11
|
+
accessKeyId?: string;
|
|
12
|
+
secretAccessKey?: string;
|
|
13
|
+
}): Promise<FileMap>;
|
|
14
|
+
export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
15
|
+
files: {
|
|
16
|
+
buffer: Buffer;
|
|
17
|
+
fileName: string;
|
|
18
|
+
mimeType?: string;
|
|
19
|
+
}[];
|
|
20
|
+
destinationDir: string;
|
|
21
|
+
uploadBucket: string;
|
|
22
|
+
accountId?: string;
|
|
23
|
+
accessKeyId?: string;
|
|
24
|
+
secretAccessKey?: string;
|
|
25
|
+
}): Promise<FileMap>;
|
|
26
|
+
//# sourceMappingURL=upload.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../src/upload.ts"],"names":[],"mappings":";AAgBA,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;AAmHD,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CA6BnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAUnB"}
|
package/dist/upload.js
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
25
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
+
};
|
|
28
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
+
exports.uploadInMemoryFiles = exports.uploadDirectory = void 0;
|
|
30
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
31
|
+
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
32
|
+
const async_retry_1 = __importDefault(require("async-retry"));
|
|
33
|
+
const fs = __importStar(require("fs"));
|
|
34
|
+
const md5_1 = __importDefault(require("md5"));
|
|
35
|
+
const mime_1 = __importDefault(require("mime"));
|
|
36
|
+
const path_1 = __importDefault(require("path"));
|
|
37
|
+
const getFileList = (dir) => {
|
|
38
|
+
let files = [];
|
|
39
|
+
const items = fs.readdirSync(dir, {
|
|
40
|
+
withFileTypes: true,
|
|
41
|
+
});
|
|
42
|
+
for (const item of items) {
|
|
43
|
+
const isDir = item.isDirectory();
|
|
44
|
+
const absolutePath = `${dir}/${item.name}`;
|
|
45
|
+
if (isDir) {
|
|
46
|
+
files = [...files, ...getFileList(absolutePath)];
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
files.push(absolutePath);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return files;
|
|
53
|
+
};
|
|
54
|
+
const run = async (config) => {
|
|
55
|
+
const map = new Map();
|
|
56
|
+
const urls = {};
|
|
57
|
+
const S3 = new client_s3_1.S3Client({
|
|
58
|
+
region: "auto",
|
|
59
|
+
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
60
|
+
credentials: {
|
|
61
|
+
accessKeyId: config.accessKeyId,
|
|
62
|
+
secretAccessKey: config.secretAccessKey,
|
|
63
|
+
},
|
|
64
|
+
});
|
|
65
|
+
await Promise.all(config.files.map(async (file) => {
|
|
66
|
+
let fileKey = path_1.default.join(config.destinationDir, file.fileName);
|
|
67
|
+
if (fileKey.includes(".gitkeep")) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
const mimeType = file.mimeType || "application/octet-stream";
|
|
71
|
+
const uploadParams = {
|
|
72
|
+
Bucket: config.bucket,
|
|
73
|
+
Key: fileKey,
|
|
74
|
+
Body: file.buffer,
|
|
75
|
+
ContentLength: file.buffer.length,
|
|
76
|
+
ContentType: mimeType ?? "application/octet-stream",
|
|
77
|
+
};
|
|
78
|
+
const cmd = new client_s3_1.PutObjectCommand(uploadParams);
|
|
79
|
+
const digest = (0, md5_1.default)(file.buffer);
|
|
80
|
+
cmd.middlewareStack.add((next) => async (args) => {
|
|
81
|
+
args.request.headers["if-none-match"] = `"${digest}"`;
|
|
82
|
+
return await next(args);
|
|
83
|
+
}, {
|
|
84
|
+
step: "build",
|
|
85
|
+
name: "addETag",
|
|
86
|
+
});
|
|
87
|
+
try {
|
|
88
|
+
await (0, async_retry_1.default)(async () => {
|
|
89
|
+
try {
|
|
90
|
+
const data = await S3.send(cmd);
|
|
91
|
+
map.set(file.fileName, data);
|
|
92
|
+
const fileUrl = await (0, s3_request_presigner_1.getSignedUrl)(S3, cmd);
|
|
93
|
+
urls[file.fileName] = fileUrl;
|
|
94
|
+
}
|
|
95
|
+
catch (err) {
|
|
96
|
+
const error = err;
|
|
97
|
+
if (error["$metadata"]) {
|
|
98
|
+
// throw only those errors that are not 412 Precondition Failed
|
|
99
|
+
// 412 errors are errors while accessing the asset, which is post upload and can be ignored
|
|
100
|
+
if (error.$metadata.httpStatusCode !== 412) {
|
|
101
|
+
throw error;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}, {
|
|
106
|
+
retries: 5,
|
|
107
|
+
factor: 3,
|
|
108
|
+
minTimeout: 1000,
|
|
109
|
+
maxTimeout: 60000,
|
|
110
|
+
randomize: true,
|
|
111
|
+
onRetry: (err, i) => {
|
|
112
|
+
if (err) {
|
|
113
|
+
const error = err;
|
|
114
|
+
console.log("Upload retry attempt:", i, ":", file.fileName);
|
|
115
|
+
console.log("Response status:", error.$response?.statusCode);
|
|
116
|
+
}
|
|
117
|
+
},
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
catch (err) {
|
|
121
|
+
const error = err;
|
|
122
|
+
console.log(`R2 Error - ${file.fileName} \nError: ${error}`);
|
|
123
|
+
console.log("Upload response", error.$response, "with status", error.$metadata.httpStatusCode);
|
|
124
|
+
}
|
|
125
|
+
return;
|
|
126
|
+
}));
|
|
127
|
+
return urls;
|
|
128
|
+
};
|
|
129
|
+
async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
130
|
+
const filePaths = fileList || getFileList(sourceDir);
|
|
131
|
+
const files = filePaths
|
|
132
|
+
.map((filePath) => ({
|
|
133
|
+
path: filePath,
|
|
134
|
+
fileName: filePath.replace(sourceDir, ""),
|
|
135
|
+
mimeType: undefined,
|
|
136
|
+
}))
|
|
137
|
+
.map((file) => {
|
|
138
|
+
const mimeType = mime_1.default.getType(file.path) || "application/octet-stream";
|
|
139
|
+
return {
|
|
140
|
+
buffer: fs.readFileSync(file.path),
|
|
141
|
+
fileName: file.fileName,
|
|
142
|
+
mimeType,
|
|
143
|
+
};
|
|
144
|
+
});
|
|
145
|
+
let config = {
|
|
146
|
+
accountId: accountId || process.env.R2_ACCOUNT_ID,
|
|
147
|
+
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
148
|
+
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
149
|
+
bucket: uploadBucket,
|
|
150
|
+
destinationDir,
|
|
151
|
+
files,
|
|
152
|
+
};
|
|
153
|
+
const uploadedFiles = await run(config);
|
|
154
|
+
return uploadedFiles;
|
|
155
|
+
}
|
|
156
|
+
exports.uploadDirectory = uploadDirectory;
|
|
157
|
+
async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
158
|
+
const config = {
|
|
159
|
+
accountId: accountId || process.env.R2_ACCOUNT_ID,
|
|
160
|
+
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
161
|
+
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
162
|
+
bucket: uploadBucket,
|
|
163
|
+
destinationDir,
|
|
164
|
+
files,
|
|
165
|
+
};
|
|
166
|
+
return await run(config);
|
|
167
|
+
}
|
|
168
|
+
exports.uploadInMemoryFiles = uploadInMemoryFiles;
|