@empiricalrun/r2-uploader 0.3.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @empiricalrun/r2-uploader
2
2
 
3
+ ## 0.3.2
4
+
5
+ ### Patch Changes
6
+
7
+ - f317919: feat: use multipart upload with file streams
8
+
3
9
  ## 0.3.1
4
10
 
5
11
  ### Patch Changes
@@ -0,0 +1,3 @@
1
+ import { R2DeleteConfig } from "./types";
2
+ export declare function deleteFile(fileKey: string, config: R2DeleteConfig): Promise<import("@aws-sdk/client-s3").DeleteObjectCommandOutput>;
3
+ //# sourceMappingURL=delete.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"delete.d.ts","sourceRoot":"","sources":["../src/delete.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAEzC,wBAAsB,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,cAAc,mEAqBvE"}
package/dist/delete.js ADDED
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.deleteFile = void 0;
4
+ const client_s3_1 = require("@aws-sdk/client-s3");
5
+ async function deleteFile(fileKey, config) {
6
+ const s3Client = new client_s3_1.S3Client({
7
+ region: "auto",
8
+ endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
9
+ credentials: {
10
+ accessKeyId: config.accessKeyId,
11
+ secretAccessKey: config.secretAccessKey,
12
+ },
13
+ });
14
+ const params = {
15
+ Bucket: config.bucket,
16
+ Key: fileKey,
17
+ };
18
+ try {
19
+ const command = new client_s3_1.DeleteObjectCommand(params);
20
+ const response = await s3Client.send(command);
21
+ return response;
22
+ }
23
+ catch (error) {
24
+ console.error(`Error deleting file from R2: ${error}`);
25
+ throw error;
26
+ }
27
+ }
28
+ exports.deleteFile = deleteFile;
package/dist/index.d.ts CHANGED
@@ -1,3 +1,4 @@
1
- export * from "./fetch";
2
- export * from "./upload";
1
+ export { fetchFiles } from "./fetch";
2
+ export type { FileMap } from "./types";
3
+ export { uploadDirectory, uploadInMemoryFiles } from "./upload";
3
4
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACrC,YAAY,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AACvC,OAAO,EAAE,eAAe,EAAE,mBAAmB,EAAE,MAAM,UAAU,CAAC"}
package/dist/index.js CHANGED
@@ -1,18 +1,8 @@
1
1
  "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
- };
16
2
  Object.defineProperty(exports, "__esModule", { value: true });
17
- __exportStar(require("./fetch"), exports);
18
- __exportStar(require("./upload"), exports);
3
+ exports.uploadInMemoryFiles = exports.uploadDirectory = exports.fetchFiles = void 0;
4
+ var fetch_1 = require("./fetch");
5
+ Object.defineProperty(exports, "fetchFiles", { enumerable: true, get: function () { return fetch_1.fetchFiles; } });
6
+ var upload_1 = require("./upload");
7
+ Object.defineProperty(exports, "uploadDirectory", { enumerable: true, get: function () { return upload_1.uploadDirectory; } });
8
+ Object.defineProperty(exports, "uploadInMemoryFiles", { enumerable: true, get: function () { return upload_1.uploadInMemoryFiles; } });
package/dist/types.d.ts CHANGED
@@ -5,7 +5,15 @@ interface R2BaseConfig {
5
5
  secretAccessKey: string;
6
6
  bucket: string;
7
7
  }
8
- export interface R2UploadConfig extends R2BaseConfig {
8
+ export interface R2UploadStreamConfig extends R2BaseConfig {
9
+ destinationDir: string;
10
+ files: {
11
+ fullPath: string;
12
+ fileName: string;
13
+ mimeType?: string;
14
+ }[];
15
+ }
16
+ export interface R2UploadBufferConfig extends R2BaseConfig {
9
17
  destinationDir: string;
10
18
  files: {
11
19
  buffer: Buffer;
@@ -16,5 +24,10 @@ export interface R2UploadConfig extends R2BaseConfig {
16
24
  export interface R2FetchConfig extends R2BaseConfig {
17
25
  prefix: string;
18
26
  }
27
+ export interface R2DeleteConfig extends R2BaseConfig {
28
+ }
29
+ export interface FileMap {
30
+ [file: string]: string;
31
+ }
19
32
  export {};
20
33
  //# sourceMappingURL=types.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":";AAAA,UAAU,YAAY;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;IACxB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,cAAe,SAAQ,YAAY;IAClD,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CAClE;AAED,MAAM,WAAW,aAAc,SAAQ,YAAY;IACjD,MAAM,EAAE,MAAM,CAAC;CAChB"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":";AAAA,UAAU,YAAY;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;IACxB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,oBAAqB,SAAQ,YAAY;IACxD,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CACpE;AAED,MAAM,WAAW,oBAAqB,SAAQ,YAAY;IACxD,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CAClE;AAED,MAAM,WAAW,aAAc,SAAQ,YAAY;IACjD,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,cAAe,SAAQ,YAAY;CAAG;AAEvD,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB"}
@@ -0,0 +1,3 @@
1
+ import { FileMap, R2UploadBufferConfig } from "../types";
2
+ export declare const uploadFileBuffers: (config: R2UploadBufferConfig) => Promise<FileMap>;
3
+ //# sourceMappingURL=buffer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"buffer.d.ts","sourceRoot":"","sources":["../../src/upload/buffer.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAEzD,eAAO,MAAM,iBAAiB,WAAkB,oBAAoB,qBA6FnE,CAAC"}
@@ -1,57 +1,15 @@
1
1
  "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
25
2
  var __importDefault = (this && this.__importDefault) || function (mod) {
26
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
27
4
  };
28
5
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.uploadInMemoryFiles = exports.uploadDirectory = void 0;
6
+ exports.uploadFileBuffers = void 0;
30
7
  const client_s3_1 = require("@aws-sdk/client-s3");
31
8
  const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
32
9
  const async_retry_1 = __importDefault(require("async-retry"));
33
- const fs = __importStar(require("fs"));
34
10
  const md5_1 = __importDefault(require("md5"));
35
- const mime_1 = __importDefault(require("mime"));
36
11
  const path_1 = __importDefault(require("path"));
37
- const getFileList = (dir) => {
38
- let files = [];
39
- const items = fs.readdirSync(dir, {
40
- withFileTypes: true,
41
- });
42
- for (const item of items) {
43
- const isDir = item.isDirectory();
44
- const absolutePath = `${dir}/${item.name}`;
45
- if (isDir) {
46
- files = [...files, ...getFileList(absolutePath)];
47
- }
48
- else {
49
- files.push(absolutePath);
50
- }
51
- }
52
- return files;
53
- };
54
- const run = async (config) => {
12
+ const uploadFileBuffers = async (config) => {
55
13
  const map = new Map();
56
14
  const urls = {};
57
15
  const S3 = new client_s3_1.S3Client({
@@ -126,43 +84,4 @@ const run = async (config) => {
126
84
  }));
127
85
  return urls;
128
86
  };
129
- async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
130
- const filePaths = fileList || getFileList(sourceDir);
131
- const files = filePaths
132
- .map((filePath) => ({
133
- path: filePath,
134
- fileName: filePath.replace(sourceDir, ""),
135
- mimeType: undefined,
136
- }))
137
- .map((file) => {
138
- const mimeType = mime_1.default.getType(file.path) || "application/octet-stream";
139
- return {
140
- buffer: fs.readFileSync(file.path),
141
- fileName: file.fileName,
142
- mimeType,
143
- };
144
- });
145
- let config = {
146
- accountId: accountId || process.env.R2_ACCOUNT_ID,
147
- accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
148
- secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
149
- bucket: uploadBucket,
150
- destinationDir,
151
- files,
152
- };
153
- const uploadedFiles = await run(config);
154
- return uploadedFiles;
155
- }
156
- exports.uploadDirectory = uploadDirectory;
157
- async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
158
- const config = {
159
- accountId: accountId || process.env.R2_ACCOUNT_ID,
160
- accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
161
- secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
162
- bucket: uploadBucket,
163
- destinationDir,
164
- files,
165
- };
166
- return await run(config);
167
- }
168
- exports.uploadInMemoryFiles = uploadInMemoryFiles;
87
+ exports.uploadFileBuffers = uploadFileBuffers;
@@ -1,7 +1,5 @@
1
1
  /// <reference types="node" />
2
- export interface FileMap {
3
- [file: string]: string;
4
- }
2
+ import { FileMap } from "../types";
5
3
  export declare function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
6
4
  sourceDir: string;
7
5
  fileList?: string[];
@@ -23,4 +21,4 @@ export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucke
23
21
  accessKeyId?: string;
24
22
  secretAccessKey?: string;
25
23
  }): Promise<FileMap>;
26
- //# sourceMappingURL=upload.d.ts.map
24
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,OAAO,EAA8C,MAAM,UAAU,CAAC;AAqB/E,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAiBnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAUnB"}
@@ -0,0 +1,77 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
25
+ Object.defineProperty(exports, "__esModule", { value: true });
26
+ exports.uploadInMemoryFiles = exports.uploadDirectory = void 0;
27
+ const fs = __importStar(require("fs"));
28
+ const buffer_1 = require("./buffer");
29
+ const stream_1 = require("./stream");
30
+ const getFileList = (dir) => {
31
+ let files = [];
32
+ const items = fs.readdirSync(dir, {
33
+ withFileTypes: true,
34
+ });
35
+ for (const item of items) {
36
+ const isDir = item.isDirectory();
37
+ const absolutePath = `${dir}/${item.name}`;
38
+ if (isDir) {
39
+ files = [...files, ...getFileList(absolutePath)];
40
+ }
41
+ else {
42
+ files.push(absolutePath);
43
+ }
44
+ }
45
+ return files;
46
+ };
47
+ async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
48
+ const filePaths = fileList || getFileList(sourceDir);
49
+ const files = filePaths.map((filePath) => ({
50
+ fullPath: filePath,
51
+ fileName: filePath.replace(sourceDir, ""), // relative path
52
+ mimeType: undefined,
53
+ }));
54
+ const config = {
55
+ accountId: accountId || process.env.R2_ACCOUNT_ID,
56
+ accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
57
+ secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
58
+ bucket: uploadBucket,
59
+ destinationDir,
60
+ files,
61
+ };
62
+ const uploadedFiles = await (0, stream_1.uploadFileStreams)(config);
63
+ return uploadedFiles;
64
+ }
65
+ exports.uploadDirectory = uploadDirectory;
66
+ async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
67
+ const config = {
68
+ accountId: accountId || process.env.R2_ACCOUNT_ID,
69
+ accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
70
+ secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
71
+ bucket: uploadBucket,
72
+ destinationDir,
73
+ files,
74
+ };
75
+ return await (0, buffer_1.uploadFileBuffers)(config);
76
+ }
77
+ exports.uploadInMemoryFiles = uploadInMemoryFiles;
@@ -0,0 +1,3 @@
1
+ import { FileMap, R2UploadStreamConfig } from "../types";
2
+ export declare const uploadFileStreams: (config: R2UploadStreamConfig) => Promise<FileMap>;
3
+ //# sourceMappingURL=stream.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../src/upload/stream.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAEzD,eAAO,MAAM,iBAAiB,WAAkB,oBAAoB,qBAkGnE,CAAC"}
@@ -0,0 +1,89 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.uploadFileStreams = void 0;
7
+ const client_s3_1 = require("@aws-sdk/client-s3");
8
+ const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
9
+ const async_retry_1 = __importDefault(require("async-retry"));
10
+ const fs_1 = __importDefault(require("fs"));
11
+ const path_1 = __importDefault(require("path"));
12
+ const uploadFileStreams = async (config) => {
13
+ const urls = {};
14
+ const S3 = new client_s3_1.S3Client({
15
+ region: "auto",
16
+ endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
17
+ credentials: {
18
+ accessKeyId: config.accessKeyId,
19
+ secretAccessKey: config.secretAccessKey,
20
+ },
21
+ });
22
+ await Promise.all(config.files.map(async (file) => {
23
+ let fileKey = path_1.default.join(config.destinationDir, file.fileName);
24
+ if (fileKey.includes(".gitkeep")) {
25
+ return;
26
+ }
27
+ const mimeType = file.mimeType || "application/octet-stream";
28
+ const createMultipartUploadParams = {
29
+ Bucket: config.bucket,
30
+ Key: fileKey,
31
+ ContentType: mimeType,
32
+ };
33
+ const createMultipartUploadCommand = new client_s3_1.CreateMultipartUploadCommand(createMultipartUploadParams);
34
+ const { UploadId } = await S3.send(createMultipartUploadCommand);
35
+ const partSize = 5 * 1024 * 1024; // 5MB
36
+ const fileStream = fs_1.default.createReadStream(file.fullPath, {
37
+ highWaterMark: partSize,
38
+ });
39
+ let partNumber = 1;
40
+ const parts = [];
41
+ for await (const chunk of fileStream) {
42
+ const uploadPartParams = {
43
+ Bucket: config.bucket,
44
+ Key: fileKey,
45
+ PartNumber: partNumber,
46
+ UploadId,
47
+ Body: chunk,
48
+ };
49
+ const uploadPartCommand = new client_s3_1.UploadPartCommand(uploadPartParams);
50
+ try {
51
+ await (0, async_retry_1.default)(async () => {
52
+ const { ETag } = await S3.send(uploadPartCommand);
53
+ parts.push({ ETag: ETag, PartNumber: partNumber });
54
+ partNumber++;
55
+ }, {
56
+ retries: 5,
57
+ factor: 3,
58
+ minTimeout: 1000,
59
+ maxTimeout: 60000,
60
+ randomize: true,
61
+ onRetry: (err, i) => {
62
+ console.log("Upload part retry attempt:", i, ":", file.fileName);
63
+ },
64
+ });
65
+ }
66
+ catch (err) {
67
+ console.log(`R2 Error - ${file.fileName} \nError: ${err}`);
68
+ }
69
+ }
70
+ const completeMultipartUploadParams = {
71
+ Bucket: config.bucket,
72
+ Key: fileKey,
73
+ UploadId,
74
+ MultipartUpload: {
75
+ Parts: parts,
76
+ },
77
+ };
78
+ const completeMultipartUploadCommand = new client_s3_1.CompleteMultipartUploadCommand(completeMultipartUploadParams);
79
+ await S3.send(completeMultipartUploadCommand);
80
+ const getObjectCommand = new client_s3_1.GetObjectCommand({
81
+ Bucket: config.bucket,
82
+ Key: fileKey,
83
+ });
84
+ const fileUrl = await (0, s3_request_presigner_1.getSignedUrl)(S3, getObjectCommand);
85
+ urls[file.fileName] = fileUrl;
86
+ }));
87
+ return urls;
88
+ };
89
+ exports.uploadFileStreams = uploadFileStreams;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@empiricalrun/r2-uploader",
3
- "version": "0.3.1",
3
+ "version": "0.3.2",
4
4
  "publishConfig": {
5
5
  "registry": "https://registry.npmjs.org/",
6
6
  "access": "public"
Binary file
@@ -1 +0,0 @@
1
- {"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../src/upload.ts"],"names":[],"mappings":";AAgBA,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;AAmHD,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CA6BnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAUnB"}