@empiricalrun/r2-uploader 0.1.3 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +44 -35
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
1
2
|
export interface FileMap {
|
|
2
3
|
[file: string]: string;
|
|
3
4
|
}
|
|
@@ -10,4 +11,16 @@ export declare function uploadDirectory({ sourceDir, fileList, destinationDir, u
|
|
|
10
11
|
accessKeyId?: string;
|
|
11
12
|
secretAccessKey?: string;
|
|
12
13
|
}): Promise<FileMap>;
|
|
14
|
+
export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
15
|
+
files: {
|
|
16
|
+
buffer: Buffer;
|
|
17
|
+
fileName: string;
|
|
18
|
+
mimeType?: string;
|
|
19
|
+
}[];
|
|
20
|
+
destinationDir: string;
|
|
21
|
+
uploadBucket: string;
|
|
22
|
+
accountId?: string;
|
|
23
|
+
accessKeyId?: string;
|
|
24
|
+
secretAccessKey?: string;
|
|
25
|
+
}): Promise<FileMap>;
|
|
13
26
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAuBA,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAuBA,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;AAuHD,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CA6BnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAUnB"}
|
package/dist/index.js
CHANGED
|
@@ -26,12 +26,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
26
26
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
27
|
};
|
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
exports.uploadDirectory = void 0;
|
|
29
|
+
exports.uploadInMemoryFiles = exports.uploadDirectory = void 0;
|
|
30
30
|
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
31
31
|
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
32
32
|
const async_retry_1 = __importDefault(require("async-retry"));
|
|
33
33
|
const fs = __importStar(require("fs"));
|
|
34
34
|
const md5_1 = __importDefault(require("md5"));
|
|
35
|
+
const mime_1 = __importDefault(require("mime"));
|
|
35
36
|
const path_1 = __importDefault(require("path"));
|
|
36
37
|
const getFileList = (dir) => {
|
|
37
38
|
let files = [];
|
|
@@ -61,38 +62,21 @@ const run = async (config) => {
|
|
|
61
62
|
secretAccessKey: config.secretAccessKey,
|
|
62
63
|
},
|
|
63
64
|
});
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
}
|
|
68
|
-
else {
|
|
69
|
-
files = getFileList(config.sourceDir);
|
|
70
|
-
}
|
|
71
|
-
await Promise.all(files.map(async (file) => {
|
|
72
|
-
// console.log("\nStarting file upload for : ", file);
|
|
73
|
-
const fileStream = fs.readFileSync(file);
|
|
74
|
-
const fileName = file.replace(config.sourceDir, "");
|
|
75
|
-
const fileKey = path_1.default.join(config.destinationDir !== "" ? config.destinationDir : config.sourceDir, fileName);
|
|
76
|
-
if (fileKey.includes(".gitkeep"))
|
|
65
|
+
await Promise.all(config.files.map(async (file) => {
|
|
66
|
+
let fileKey = path_1.default.join(config.destinationDir, file.fileName);
|
|
67
|
+
if (fileKey.includes(".gitkeep")) {
|
|
77
68
|
return;
|
|
78
|
-
// console.log(fileKey);
|
|
79
|
-
let mimeType = "application/octet-stream";
|
|
80
|
-
try {
|
|
81
|
-
const mime = (await import("mime")).default;
|
|
82
|
-
mimeType = mime.getType(file) || "application/octet-stream";
|
|
83
|
-
}
|
|
84
|
-
catch (err) {
|
|
85
|
-
console.warn("Failed to get mime type for file", file, err);
|
|
86
69
|
}
|
|
70
|
+
const mimeType = file.mimeType || "application/octet-stream";
|
|
87
71
|
const uploadParams = {
|
|
88
72
|
Bucket: config.bucket,
|
|
89
73
|
Key: fileKey,
|
|
90
|
-
Body:
|
|
91
|
-
ContentLength:
|
|
74
|
+
Body: file.buffer,
|
|
75
|
+
ContentLength: file.buffer.length,
|
|
92
76
|
ContentType: mimeType ?? "application/octet-stream",
|
|
93
77
|
};
|
|
94
78
|
const cmd = new client_s3_1.PutObjectCommand(uploadParams);
|
|
95
|
-
const digest = (0, md5_1.default)(
|
|
79
|
+
const digest = (0, md5_1.default)(file.buffer);
|
|
96
80
|
cmd.middlewareStack.add((next) => async (args) => {
|
|
97
81
|
args.request.headers["if-none-match"] = `"${digest}"`;
|
|
98
82
|
return await next(args);
|
|
@@ -103,16 +87,15 @@ const run = async (config) => {
|
|
|
103
87
|
try {
|
|
104
88
|
await (0, async_retry_1.default)(async () => {
|
|
105
89
|
try {
|
|
106
|
-
console.log("\
|
|
90
|
+
console.log("\nStarting file upload for:", file.fileName, "\n");
|
|
107
91
|
const data = await S3.send(cmd);
|
|
108
|
-
|
|
109
|
-
map.set(file, data);
|
|
92
|
+
map.set(file.fileName, data);
|
|
110
93
|
const fileUrl = await (0, s3_request_presigner_1.getSignedUrl)(S3, cmd);
|
|
111
|
-
urls[file] = fileUrl;
|
|
112
|
-
console.log("\
|
|
94
|
+
urls[file.fileName] = fileUrl;
|
|
95
|
+
console.log("\nFinished file upload for:", file.fileName, "\n");
|
|
113
96
|
}
|
|
114
97
|
catch (err) {
|
|
115
|
-
console.log("\
|
|
98
|
+
console.log("\nError uploading file: ", file.fileName, err, "\n");
|
|
116
99
|
const error = err;
|
|
117
100
|
if (error["$metadata"]) {
|
|
118
101
|
// throw only those errors that are not 412 Precondition Failed
|
|
@@ -131,7 +114,7 @@ const run = async (config) => {
|
|
|
131
114
|
onRetry: (err, i) => {
|
|
132
115
|
if (err) {
|
|
133
116
|
const error = err;
|
|
134
|
-
console.log("Upload retry attempt:", i, ":", file);
|
|
117
|
+
console.log("Upload retry attempt:", i, ":", file.fileName);
|
|
135
118
|
console.log("Response status:", error.$response?.statusCode);
|
|
136
119
|
}
|
|
137
120
|
},
|
|
@@ -139,7 +122,7 @@ const run = async (config) => {
|
|
|
139
122
|
}
|
|
140
123
|
catch (err) {
|
|
141
124
|
const error = err;
|
|
142
|
-
console.log(`R2 Error - ${file
|
|
125
|
+
console.log(`R2 Error - ${file.fileName} \nError: ${error}`);
|
|
143
126
|
console.log("Upload response", error.$response, "with status", error.$metadata.httpStatusCode);
|
|
144
127
|
}
|
|
145
128
|
return;
|
|
@@ -147,16 +130,42 @@ const run = async (config) => {
|
|
|
147
130
|
return urls;
|
|
148
131
|
};
|
|
149
132
|
async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
133
|
+
const filePaths = fileList || getFileList(sourceDir);
|
|
134
|
+
const files = filePaths
|
|
135
|
+
.map((filePath) => ({
|
|
136
|
+
path: filePath,
|
|
137
|
+
fileName: filePath.replace(sourceDir, ""),
|
|
138
|
+
mimeType: undefined,
|
|
139
|
+
}))
|
|
140
|
+
.map((file) => {
|
|
141
|
+
const mimeType = mime_1.default.getType(file.path) || "application/octet-stream";
|
|
142
|
+
return {
|
|
143
|
+
buffer: fs.readFileSync(file.path),
|
|
144
|
+
fileName: file.fileName,
|
|
145
|
+
mimeType,
|
|
146
|
+
};
|
|
147
|
+
});
|
|
150
148
|
let config = {
|
|
151
149
|
accountId: accountId || process.env.R2_ACCOUNT_ID,
|
|
152
150
|
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
153
151
|
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
154
152
|
bucket: uploadBucket,
|
|
155
|
-
sourceDir,
|
|
156
153
|
destinationDir,
|
|
157
|
-
|
|
154
|
+
files,
|
|
158
155
|
};
|
|
159
156
|
const uploadedFiles = await run(config);
|
|
160
157
|
return uploadedFiles;
|
|
161
158
|
}
|
|
162
159
|
exports.uploadDirectory = uploadDirectory;
|
|
160
|
+
async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
161
|
+
const config = {
|
|
162
|
+
accountId: accountId || process.env.R2_ACCOUNT_ID,
|
|
163
|
+
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
164
|
+
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
165
|
+
bucket: uploadBucket,
|
|
166
|
+
destinationDir,
|
|
167
|
+
files,
|
|
168
|
+
};
|
|
169
|
+
return await run(config);
|
|
170
|
+
}
|
|
171
|
+
exports.uploadInMemoryFiles = uploadInMemoryFiles;
|