@empiricalrun/r2-uploader 0.6.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/client.d.ts +5 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +50 -0
- package/dist/delete.d.ts +2 -2
- package/dist/delete.d.ts.map +1 -1
- package/dist/delete.js +2 -8
- package/dist/fetch.d.ts +2 -2
- package/dist/fetch.d.ts.map +1 -1
- package/dist/fetch.js +2 -8
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/types.d.ts +38 -9
- package/dist/types.d.ts.map +1 -1
- package/dist/upload/buffer.d.ts +2 -2
- package/dist/upload/buffer.d.ts.map +1 -1
- package/dist/upload/buffer.js +2 -8
- package/dist/upload/index.d.ts +11 -3
- package/dist/upload/index.d.ts.map +1 -1
- package/dist/upload/index.js +38 -9
- package/dist/upload/stream.d.ts +2 -2
- package/dist/upload/stream.d.ts.map +1 -1
- package/dist/upload/stream.js +2 -8
- package/dist/upload-task.d.ts +3 -3
- package/dist/upload-task.d.ts.map +1 -1
- package/dist/upload-task.js +6 -6
- package/dist/zip-utils.d.ts +14 -0
- package/dist/zip-utils.d.ts.map +1 -0
- package/dist/zip-utils.js +162 -0
- package/package.json +20 -6
- package/tsconfig.tsbuildinfo +1 -1
package/CHANGELOG.md
CHANGED
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { S3Client } from "@aws-sdk/client-s3";
|
|
2
|
+
import { StorageBaseConfig, StorageCredentials } from "./types";
|
|
3
|
+
export declare function getStorageCredentialsFromEnv(): StorageCredentials | null;
|
|
4
|
+
export declare function createStorageClient(config: StorageBaseConfig): S3Client;
|
|
5
|
+
//# sourceMappingURL=client.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAE9C,OAAO,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAEhE,wBAAgB,4BAA4B,IAAI,kBAAkB,GAAG,IAAI,CA4BxE;AAED,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,QAAQ,CAuBvE"}
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getStorageCredentialsFromEnv = getStorageCredentialsFromEnv;
|
|
4
|
+
exports.createStorageClient = createStorageClient;
|
|
5
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
6
|
+
function getStorageCredentialsFromEnv() {
|
|
7
|
+
if (process.env.S3_REGION &&
|
|
8
|
+
process.env.S3_ACCESS_KEY_ID &&
|
|
9
|
+
process.env.S3_SECRET_ACCESS_KEY) {
|
|
10
|
+
return {
|
|
11
|
+
provider: "s3",
|
|
12
|
+
region: process.env.S3_REGION,
|
|
13
|
+
accessKeyId: process.env.S3_ACCESS_KEY_ID,
|
|
14
|
+
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
if (process.env.R2_ACCOUNT_ID &&
|
|
18
|
+
process.env.R2_ACCESS_KEY_ID &&
|
|
19
|
+
process.env.R2_SECRET_ACCESS_KEY) {
|
|
20
|
+
return {
|
|
21
|
+
provider: "r2",
|
|
22
|
+
accountId: process.env.R2_ACCOUNT_ID,
|
|
23
|
+
accessKeyId: process.env.R2_ACCESS_KEY_ID,
|
|
24
|
+
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
function createStorageClient(config) {
|
|
30
|
+
if (config.provider === "s3") {
|
|
31
|
+
return new client_s3_1.S3Client({
|
|
32
|
+
region: config.region,
|
|
33
|
+
credentials: {
|
|
34
|
+
accessKeyId: config.accessKeyId,
|
|
35
|
+
secretAccessKey: config.secretAccessKey,
|
|
36
|
+
},
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
if (config.provider === "r2" || "accountId" in config) {
|
|
40
|
+
return new client_s3_1.S3Client({
|
|
41
|
+
region: "auto",
|
|
42
|
+
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
43
|
+
credentials: {
|
|
44
|
+
accessKeyId: config.accessKeyId,
|
|
45
|
+
secretAccessKey: config.secretAccessKey,
|
|
46
|
+
},
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
throw new Error("Invalid storage config: missing provider or accountId");
|
|
50
|
+
}
|
package/dist/delete.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare function deleteFile(fileKey: string, config:
|
|
1
|
+
import { DeleteConfig } from "./types";
|
|
2
|
+
export declare function deleteFile(fileKey: string, config: DeleteConfig): Promise<import("@aws-sdk/client-s3").DeleteObjectCommandOutput>;
|
|
3
3
|
//# sourceMappingURL=delete.d.ts.map
|
package/dist/delete.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"delete.d.ts","sourceRoot":"","sources":["../src/delete.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"delete.d.ts","sourceRoot":"","sources":["../src/delete.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAEvC,wBAAsB,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,mEAcrE"}
|
package/dist/delete.js
CHANGED
|
@@ -2,16 +2,10 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.deleteFile = deleteFile;
|
|
4
4
|
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
5
|
+
const client_1 = require("./client");
|
|
5
6
|
const logger_1 = require("./logger");
|
|
6
7
|
async function deleteFile(fileKey, config) {
|
|
7
|
-
const s3Client =
|
|
8
|
-
region: "auto",
|
|
9
|
-
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
10
|
-
credentials: {
|
|
11
|
-
accessKeyId: config.accessKeyId,
|
|
12
|
-
secretAccessKey: config.secretAccessKey,
|
|
13
|
-
},
|
|
14
|
-
});
|
|
8
|
+
const s3Client = (0, client_1.createStorageClient)(config);
|
|
15
9
|
const params = {
|
|
16
10
|
Bucket: config.bucket,
|
|
17
11
|
Key: fileKey,
|
package/dist/fetch.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare const fetchFiles: (config:
|
|
1
|
+
import { FetchConfig } from "./types";
|
|
2
|
+
export declare const fetchFiles: (config: FetchConfig) => Promise<{
|
|
3
3
|
key: string;
|
|
4
4
|
lastModified: Date;
|
|
5
5
|
size: number;
|
package/dist/fetch.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fetch.d.ts","sourceRoot":"","sources":["../src/fetch.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"fetch.d.ts","sourceRoot":"","sources":["../src/fetch.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AAEtC,eAAO,MAAM,UAAU,GAAU,QAAQ,WAAW;;;;;;;;;;IA4CnD,CAAC"}
|
package/dist/fetch.js
CHANGED
|
@@ -3,15 +3,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.fetchFiles = void 0;
|
|
4
4
|
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
5
5
|
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
6
|
+
const client_1 = require("./client");
|
|
6
7
|
const fetchFiles = async (config) => {
|
|
7
|
-
const S3 =
|
|
8
|
-
region: "auto",
|
|
9
|
-
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
10
|
-
credentials: {
|
|
11
|
-
accessKeyId: config.accessKeyId,
|
|
12
|
-
secretAccessKey: config.secretAccessKey,
|
|
13
|
-
},
|
|
14
|
-
});
|
|
8
|
+
const S3 = (0, client_1.createStorageClient)(config);
|
|
15
9
|
const params = {
|
|
16
10
|
Bucket: config.bucket,
|
|
17
11
|
Prefix: config.prefix,
|
package/dist/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export { fetchFiles } from "./fetch";
|
|
2
2
|
export { sendTaskToQueue, waitForTaskQueueToFinish } from "./queue";
|
|
3
|
-
export type { AsyncTask, FileMap } from "./types";
|
|
3
|
+
export type { AsyncTask, DeleteConfig, FetchConfig, FileMap, StorageBaseConfig, StorageCredentials, StorageProvider, UploadBufferConfig, UploadStreamConfig, } from "./types";
|
|
4
4
|
export { uploadDirectory, uploadInMemoryFiles } from "./upload";
|
|
5
5
|
export type { CreateUploadTaskOptions } from "./upload-task";
|
|
6
6
|
export { createUploadTask } from "./upload-task";
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACrC,OAAO,EAAE,eAAe,EAAE,wBAAwB,EAAE,MAAM,SAAS,CAAC;AACpE,YAAY,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACrC,OAAO,EAAE,eAAe,EAAE,wBAAwB,EAAE,MAAM,SAAS,CAAC;AACpE,YAAY,EACV,SAAS,EACT,YAAY,EACZ,WAAW,EACX,OAAO,EACP,iBAAiB,EACjB,kBAAkB,EAClB,eAAe,EACf,kBAAkB,EAClB,kBAAkB,GACnB,MAAM,SAAS,CAAC;AACjB,OAAO,EAAE,eAAe,EAAE,mBAAmB,EAAE,MAAM,UAAU,CAAC;AAChE,YAAY,EAAE,uBAAuB,EAAE,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC"}
|
package/dist/types.d.ts
CHANGED
|
@@ -1,32 +1,61 @@
|
|
|
1
|
-
|
|
1
|
+
export type StorageProvider = "r2" | "s3";
|
|
2
|
+
interface R2Credentials {
|
|
3
|
+
provider: "r2";
|
|
2
4
|
accountId: string;
|
|
3
5
|
accessKeyId: string;
|
|
4
6
|
secretAccessKey: string;
|
|
7
|
+
}
|
|
8
|
+
interface S3Credentials {
|
|
9
|
+
provider: "s3";
|
|
10
|
+
region: string;
|
|
11
|
+
accessKeyId: string;
|
|
12
|
+
secretAccessKey: string;
|
|
13
|
+
}
|
|
14
|
+
/** @deprecated Legacy R2 credentials format without explicit provider */
|
|
15
|
+
interface LegacyR2Credentials {
|
|
16
|
+
provider?: undefined;
|
|
17
|
+
accountId: string;
|
|
18
|
+
accessKeyId: string;
|
|
19
|
+
secretAccessKey: string;
|
|
20
|
+
}
|
|
21
|
+
export type StorageCredentials = R2Credentials | S3Credentials;
|
|
22
|
+
interface BaseConfig {
|
|
5
23
|
bucket: string;
|
|
6
24
|
}
|
|
7
|
-
|
|
25
|
+
type R2BaseConfig = BaseConfig & R2Credentials;
|
|
26
|
+
type S3BaseConfig = BaseConfig & S3Credentials;
|
|
27
|
+
type LegacyR2BaseConfig = BaseConfig & LegacyR2Credentials;
|
|
28
|
+
export type StorageBaseConfig = R2BaseConfig | S3BaseConfig | LegacyR2BaseConfig;
|
|
29
|
+
export type UploadStreamConfig = StorageBaseConfig & {
|
|
8
30
|
destinationDir: string;
|
|
9
31
|
files: {
|
|
10
32
|
fullPath: string;
|
|
11
33
|
fileName: string;
|
|
12
34
|
mimeType?: string;
|
|
13
35
|
}[];
|
|
14
|
-
}
|
|
15
|
-
export
|
|
36
|
+
};
|
|
37
|
+
export type UploadBufferConfig = StorageBaseConfig & {
|
|
16
38
|
destinationDir: string;
|
|
17
39
|
files: {
|
|
18
40
|
buffer: Buffer;
|
|
19
41
|
fileName: string;
|
|
20
42
|
mimeType?: string;
|
|
21
43
|
}[];
|
|
22
|
-
}
|
|
23
|
-
export
|
|
44
|
+
};
|
|
45
|
+
export type FetchConfig = StorageBaseConfig & {
|
|
24
46
|
prefix: string;
|
|
25
47
|
withSignedUrl?: boolean;
|
|
26
48
|
suffix?: string;
|
|
27
|
-
}
|
|
28
|
-
export
|
|
29
|
-
|
|
49
|
+
};
|
|
50
|
+
export type DeleteConfig = StorageBaseConfig;
|
|
51
|
+
/** @deprecated Use UploadStreamConfig instead */
|
|
52
|
+
export type R2UploadStreamConfig = UploadStreamConfig;
|
|
53
|
+
/** @deprecated Use UploadBufferConfig instead */
|
|
54
|
+
export type R2UploadBufferConfig = UploadBufferConfig;
|
|
55
|
+
/** @deprecated Use FetchConfig instead */
|
|
56
|
+
export type R2FetchConfig = FetchConfig;
|
|
57
|
+
/** @deprecated Use DeleteConfig instead */
|
|
58
|
+
export type R2DeleteConfig = DeleteConfig;
|
|
30
59
|
export interface FileMap {
|
|
31
60
|
[file: string]: string;
|
|
32
61
|
}
|
package/dist/types.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,UAAU,
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,eAAe,GAAG,IAAI,GAAG,IAAI,CAAC;AAE1C,UAAU,aAAa;IACrB,QAAQ,EAAE,IAAI,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;CACzB;AAED,UAAU,aAAa;IACrB,QAAQ,EAAE,IAAI,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;CACzB;AAED,yEAAyE;AACzE,UAAU,mBAAmB;IAC3B,QAAQ,CAAC,EAAE,SAAS,CAAC;IACrB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;CACzB;AAED,MAAM,MAAM,kBAAkB,GAAG,aAAa,GAAG,aAAa,CAAC;AAE/D,UAAU,UAAU;IAClB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,KAAK,YAAY,GAAG,UAAU,GAAG,aAAa,CAAC;AAC/C,KAAK,YAAY,GAAG,UAAU,GAAG,aAAa,CAAC;AAC/C,KAAK,kBAAkB,GAAG,UAAU,GAAG,mBAAmB,CAAC;AAE3D,MAAM,MAAM,iBAAiB,GACzB,YAAY,GACZ,YAAY,GACZ,kBAAkB,CAAC;AAEvB,MAAM,MAAM,kBAAkB,GAAG,iBAAiB,GAAG;IACnD,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CACpE,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG,iBAAiB,GAAG;IACnD,cAAc,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CAClE,CAAC;AAEF,MAAM,MAAM,WAAW,GAAG,iBAAiB,GAAG;IAC5C,MAAM,EAAE,MAAM,CAAC;IACf,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,YAAY,GAAG,iBAAiB,CAAC;AAE7C,iDAAiD;AACjD,MAAM,MAAM,oBAAoB,GAAG,kBAAkB,CAAC;AACtD,iDAAiD;AACjD,MAAM,MAAM,oBAAoB,GAAG,kBAAkB,CAAC;AACtD,0CAA0C;AAC1C,MAAM,MAAM,aAAa,GAAG,WAAW,CAAC;AACxC,2CAA2C;AAC3C,MAAM,MAAM,cAAc,GAAG,YAAY,CAAC;AAE1C,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;AAED,MAAM,MAAM,SAAS,GAAG,MAAM,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC"}
|
package/dist/upload/buffer.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { FileMap,
|
|
2
|
-
export declare const uploadFileBuffers: (config:
|
|
1
|
+
import { FileMap, UploadBufferConfig } from "../types";
|
|
2
|
+
export declare const uploadFileBuffers: (config: UploadBufferConfig) => Promise<FileMap>;
|
|
3
3
|
//# sourceMappingURL=buffer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"buffer.d.ts","sourceRoot":"","sources":["../../src/upload/buffer.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"buffer.d.ts","sourceRoot":"","sources":["../../src/upload/buffer.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,OAAO,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAC;AAEvD,eAAO,MAAM,iBAAiB,GAAU,QAAQ,kBAAkB,qBAkGjE,CAAC"}
|
package/dist/upload/buffer.js
CHANGED
|
@@ -9,18 +9,12 @@ const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
|
9
9
|
const async_retry_1 = __importDefault(require("async-retry"));
|
|
10
10
|
const md5_1 = __importDefault(require("md5"));
|
|
11
11
|
const path_1 = __importDefault(require("path"));
|
|
12
|
+
const client_1 = require("../client");
|
|
12
13
|
const logger_1 = require("../logger");
|
|
13
14
|
const uploadFileBuffers = async (config) => {
|
|
14
15
|
const map = new Map();
|
|
15
16
|
const urls = {};
|
|
16
|
-
const S3 =
|
|
17
|
-
region: "auto",
|
|
18
|
-
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
19
|
-
credentials: {
|
|
20
|
-
accessKeyId: config.accessKeyId,
|
|
21
|
-
secretAccessKey: config.secretAccessKey,
|
|
22
|
-
},
|
|
23
|
-
});
|
|
17
|
+
const S3 = (0, client_1.createStorageClient)(config);
|
|
24
18
|
await Promise.all(config.files.map(async (file) => {
|
|
25
19
|
let fileKey = path_1.default.join(config.destinationDir, file.fileName);
|
|
26
20
|
if (fileKey.includes(".gitkeep")) {
|
package/dist/upload/index.d.ts
CHANGED
|
@@ -1,14 +1,18 @@
|
|
|
1
|
-
import { FileMap } from "../types";
|
|
2
|
-
export declare function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
1
|
+
import { FileMap, StorageCredentials } from "../types";
|
|
2
|
+
export declare function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, credentials, accountId, accessKeyId, secretAccessKey, }: {
|
|
3
3
|
sourceDir: string;
|
|
4
4
|
fileList?: string[];
|
|
5
5
|
destinationDir: string;
|
|
6
6
|
uploadBucket: string;
|
|
7
|
+
credentials?: StorageCredentials;
|
|
8
|
+
/** @deprecated Use credentials instead */
|
|
7
9
|
accountId?: string;
|
|
10
|
+
/** @deprecated Use credentials instead */
|
|
8
11
|
accessKeyId?: string;
|
|
12
|
+
/** @deprecated Use credentials instead */
|
|
9
13
|
secretAccessKey?: string;
|
|
10
14
|
}): Promise<FileMap>;
|
|
11
|
-
export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }: {
|
|
15
|
+
export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucket, credentials, accountId, accessKeyId, secretAccessKey, }: {
|
|
12
16
|
files: {
|
|
13
17
|
buffer: Buffer;
|
|
14
18
|
fileName: string;
|
|
@@ -16,8 +20,12 @@ export declare function uploadInMemoryFiles({ files, destinationDir, uploadBucke
|
|
|
16
20
|
}[];
|
|
17
21
|
destinationDir: string;
|
|
18
22
|
uploadBucket: string;
|
|
23
|
+
credentials?: StorageCredentials;
|
|
24
|
+
/** @deprecated Use credentials instead */
|
|
19
25
|
accountId?: string;
|
|
26
|
+
/** @deprecated Use credentials instead */
|
|
20
27
|
accessKeyId?: string;
|
|
28
|
+
/** @deprecated Use credentials instead */
|
|
21
29
|
secretAccessKey?: string;
|
|
22
30
|
}): Promise<FileMap>;
|
|
23
31
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":"AAKA,OAAO,EACL,OAAO,EACP,kBAAkB,EAGnB,MAAM,UAAU,CAAC;AAqBlB,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,WAAW,EACX,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,0CAA0C;IAC1C,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,0CAA0C;IAC1C,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,0CAA0C;IAC1C,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAwCnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,WAAW,EACX,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,0CAA0C;IAC1C,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,0CAA0C;IAC1C,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,0CAA0C;IAC1C,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CA6BnB"}
|
package/dist/upload/index.js
CHANGED
|
@@ -40,6 +40,7 @@ exports.uploadDirectory = uploadDirectory;
|
|
|
40
40
|
exports.uploadInMemoryFiles = uploadInMemoryFiles;
|
|
41
41
|
const fs = __importStar(require("fs"));
|
|
42
42
|
const mime_1 = __importDefault(require("mime"));
|
|
43
|
+
const client_1 = require("../client");
|
|
43
44
|
const logger_1 = require("../logger");
|
|
44
45
|
const buffer_1 = require("./buffer");
|
|
45
46
|
const stream_1 = require("./stream");
|
|
@@ -60,18 +61,32 @@ const getFileList = (dir) => {
|
|
|
60
61
|
}
|
|
61
62
|
return files;
|
|
62
63
|
};
|
|
63
|
-
async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
64
|
+
async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBucket, credentials, accountId, accessKeyId, secretAccessKey, }) {
|
|
65
|
+
let creds = credentials;
|
|
66
|
+
if (!creds && accountId && accessKeyId && secretAccessKey) {
|
|
67
|
+
creds = {
|
|
68
|
+
provider: "r2",
|
|
69
|
+
accountId,
|
|
70
|
+
accessKeyId,
|
|
71
|
+
secretAccessKey,
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
if (!creds) {
|
|
75
|
+
const envCreds = (0, client_1.getStorageCredentialsFromEnv)();
|
|
76
|
+
if (!envCreds) {
|
|
77
|
+
throw new Error("No storage credentials provided. Set S3_* or R2_* environment variables.");
|
|
78
|
+
}
|
|
79
|
+
creds = envCreds;
|
|
80
|
+
}
|
|
64
81
|
const filePaths = fileList || getFileList(sourceDir);
|
|
65
82
|
const files = filePaths.map((filePath) => ({
|
|
66
83
|
fullPath: filePath,
|
|
67
|
-
fileName: filePath.replace(sourceDir, ""),
|
|
84
|
+
fileName: filePath.replace(sourceDir, ""),
|
|
68
85
|
mimeType: mime_1.default.getType(filePath) || "application/octet-stream",
|
|
69
86
|
}));
|
|
70
87
|
logger_1.logger.debug("Got request to upload following files:", JSON.stringify(files, null, 2));
|
|
71
88
|
const config = {
|
|
72
|
-
|
|
73
|
-
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
74
|
-
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
89
|
+
...creds,
|
|
75
90
|
bucket: uploadBucket,
|
|
76
91
|
destinationDir,
|
|
77
92
|
files,
|
|
@@ -79,11 +94,25 @@ async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBuck
|
|
|
79
94
|
const uploadedFiles = await (0, stream_1.uploadFileStreams)(config);
|
|
80
95
|
return uploadedFiles;
|
|
81
96
|
}
|
|
82
|
-
async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, accountId, accessKeyId, secretAccessKey, }) {
|
|
97
|
+
async function uploadInMemoryFiles({ files, destinationDir, uploadBucket, credentials, accountId, accessKeyId, secretAccessKey, }) {
|
|
98
|
+
let creds = credentials;
|
|
99
|
+
if (!creds && accountId && accessKeyId && secretAccessKey) {
|
|
100
|
+
creds = {
|
|
101
|
+
provider: "r2",
|
|
102
|
+
accountId,
|
|
103
|
+
accessKeyId,
|
|
104
|
+
secretAccessKey,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
if (!creds) {
|
|
108
|
+
const envCreds = (0, client_1.getStorageCredentialsFromEnv)();
|
|
109
|
+
if (!envCreds) {
|
|
110
|
+
throw new Error("No storage credentials provided. Set S3_* or R2_* environment variables.");
|
|
111
|
+
}
|
|
112
|
+
creds = envCreds;
|
|
113
|
+
}
|
|
83
114
|
const config = {
|
|
84
|
-
|
|
85
|
-
accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
|
|
86
|
-
secretAccessKey: secretAccessKey || process.env.R2_SECRET_ACCESS_KEY,
|
|
115
|
+
...creds,
|
|
87
116
|
bucket: uploadBucket,
|
|
88
117
|
destinationDir,
|
|
89
118
|
files,
|
package/dist/upload/stream.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { FileMap,
|
|
2
|
-
export declare const uploadFileStreams: (config:
|
|
1
|
+
import { FileMap, UploadStreamConfig } from "../types";
|
|
2
|
+
export declare const uploadFileStreams: (config: UploadStreamConfig) => Promise<FileMap>;
|
|
3
3
|
//# sourceMappingURL=stream.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../src/upload/stream.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../src/upload/stream.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,OAAO,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAC;AAEvD,eAAO,MAAM,iBAAiB,GAAU,QAAQ,kBAAkB,qBAmGjE,CAAC"}
|
package/dist/upload/stream.js
CHANGED
|
@@ -9,17 +9,11 @@ const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
|
|
|
9
9
|
const async_retry_1 = __importDefault(require("async-retry"));
|
|
10
10
|
const fs_1 = __importDefault(require("fs"));
|
|
11
11
|
const path_1 = __importDefault(require("path"));
|
|
12
|
+
const client_1 = require("../client");
|
|
12
13
|
const logger_1 = require("../logger");
|
|
13
14
|
const uploadFileStreams = async (config) => {
|
|
14
15
|
const urls = {};
|
|
15
|
-
const S3 =
|
|
16
|
-
region: "auto",
|
|
17
|
-
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
|
18
|
-
credentials: {
|
|
19
|
-
accessKeyId: config.accessKeyId,
|
|
20
|
-
secretAccessKey: config.secretAccessKey,
|
|
21
|
-
},
|
|
22
|
-
});
|
|
16
|
+
const S3 = (0, client_1.createStorageClient)(config);
|
|
23
17
|
await Promise.all(config.files.map(async (file) => {
|
|
24
18
|
let fileKey = path_1.default.join(config.destinationDir, file.fileName);
|
|
25
19
|
if (fileKey.includes(".gitkeep")) {
|
package/dist/upload-task.d.ts
CHANGED
|
@@ -7,13 +7,13 @@ export interface CreateUploadTaskOptions {
|
|
|
7
7
|
baseUrl: string;
|
|
8
8
|
}
|
|
9
9
|
/**
|
|
10
|
-
* Creates an async task for uploading files to R2 storage.
|
|
10
|
+
* Creates an async task for uploading files to R2 or S3 storage.
|
|
11
11
|
*
|
|
12
12
|
* @param {Object} params - The upload parameters
|
|
13
13
|
* @param {string} params.sourceDir - The source directory containing files to upload
|
|
14
14
|
* @param {string[]} [params.fileList] - Optional list of specific files to upload
|
|
15
|
-
* @param {string} params.destinationDir - The destination directory in
|
|
16
|
-
* @param {string} params.uploadBucket - The
|
|
15
|
+
* @param {string} params.destinationDir - The destination directory in storage
|
|
16
|
+
* @param {string} params.uploadBucket - The bucket to upload to
|
|
17
17
|
* @param {string} params.baseUrl - The base URL for generating file URLs
|
|
18
18
|
* @returns {AsyncTask} An async task that when executed will upload the files and return a map of file paths to URLs
|
|
19
19
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"upload-task.d.ts","sourceRoot":"","sources":["../src/upload-task.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"upload-task.d.ts","sourceRoot":"","sources":["../src/upload-task.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,SAAS,EAAW,MAAM,SAAS,CAAC;AAG7C,MAAM,WAAW,uBAAuB;IACtC,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,EAC/B,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,OAAO,GACR,EAAE,uBAAuB,GAAG,SAAS,CAyBrC"}
|
package/dist/upload-task.js
CHANGED
|
@@ -5,24 +5,24 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.createUploadTask = createUploadTask;
|
|
7
7
|
const path_1 = __importDefault(require("path"));
|
|
8
|
+
const client_1 = require("./client");
|
|
8
9
|
const logger_1 = require("./logger");
|
|
9
10
|
const upload_1 = require("./upload");
|
|
10
11
|
/**
|
|
11
|
-
* Creates an async task for uploading files to R2 storage.
|
|
12
|
+
* Creates an async task for uploading files to R2 or S3 storage.
|
|
12
13
|
*
|
|
13
14
|
* @param {Object} params - The upload parameters
|
|
14
15
|
* @param {string} params.sourceDir - The source directory containing files to upload
|
|
15
16
|
* @param {string[]} [params.fileList] - Optional list of specific files to upload
|
|
16
|
-
* @param {string} params.destinationDir - The destination directory in
|
|
17
|
-
* @param {string} params.uploadBucket - The
|
|
17
|
+
* @param {string} params.destinationDir - The destination directory in storage
|
|
18
|
+
* @param {string} params.uploadBucket - The bucket to upload to
|
|
18
19
|
* @param {string} params.baseUrl - The base URL for generating file URLs
|
|
19
20
|
* @returns {AsyncTask} An async task that when executed will upload the files and return a map of file paths to URLs
|
|
20
21
|
*/
|
|
21
22
|
function createUploadTask({ sourceDir, fileList, destinationDir, uploadBucket, baseUrl, }) {
|
|
22
23
|
return async () => {
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
!process.env.R2_SECRET_ACCESS_KEY) {
|
|
24
|
+
const credentials = (0, client_1.getStorageCredentialsFromEnv)();
|
|
25
|
+
if (!credentials) {
|
|
26
26
|
return;
|
|
27
27
|
}
|
|
28
28
|
try {
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export declare function readZipEntry(zipPath: string, entryName: string): Promise<Buffer | null>;
|
|
2
|
+
export declare function getZipEntryNames(zipPath: string): Promise<string[]>;
|
|
3
|
+
export declare function extractZipToDirectory(zipPath: string, destDir: string): Promise<void>;
|
|
4
|
+
export declare function extractZipBufferToDirectory(buffer: Buffer, destDir: string): Promise<void>;
|
|
5
|
+
export declare function createZipFromDirectory(sourceDir: string): Promise<Buffer>;
|
|
6
|
+
export interface PatchZipOptions {
|
|
7
|
+
filterEntry?: (filename: string) => boolean;
|
|
8
|
+
newEntries?: {
|
|
9
|
+
filename: string;
|
|
10
|
+
content: Buffer;
|
|
11
|
+
}[];
|
|
12
|
+
}
|
|
13
|
+
export declare function patchZipFile(zipPath: string, options: PatchZipOptions): Promise<void>;
|
|
14
|
+
//# sourceMappingURL=zip-utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"zip-utils.d.ts","sourceRoot":"","sources":["../src/zip-utils.ts"],"names":[],"mappings":"AAMA,wBAAsB,YAAY,CAChC,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAexB;AAED,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAQzE;AAED,wBAAsB,qBAAqB,CACzC,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,MAAM,GACd,OAAO,CAAC,IAAI,CAAC,CAiBf;AAED,wBAAsB,2BAA2B,CAC/C,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,MAAM,GACd,OAAO,CAAC,IAAI,CAAC,CAiBf;AAED,wBAAsB,sBAAsB,CAC1C,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,MAAM,CAAC,CAyBjB;AAED,MAAM,WAAW,eAAe;IAC9B,WAAW,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC;IAC5C,UAAU,CAAC,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;CACtD;AAED,wBAAsB,YAAY,CAChC,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,eAAe,GACvB,OAAO,CAAC,IAAI,CAAC,CAiCf"}
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.readZipEntry = readZipEntry;
|
|
40
|
+
exports.getZipEntryNames = getZipEntryNames;
|
|
41
|
+
exports.extractZipToDirectory = extractZipToDirectory;
|
|
42
|
+
exports.extractZipBufferToDirectory = extractZipBufferToDirectory;
|
|
43
|
+
exports.createZipFromDirectory = createZipFromDirectory;
|
|
44
|
+
exports.patchZipFile = patchZipFile;
|
|
45
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
46
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
47
|
+
const promises_1 = require("node:stream/promises");
|
|
48
|
+
const yauzl = __importStar(require("yauzl-promise"));
|
|
49
|
+
const yazl = __importStar(require("yazl"));
|
|
50
|
+
async function readZipEntry(zipPath, entryName) {
|
|
51
|
+
const zipFile = await yauzl.open(zipPath);
|
|
52
|
+
for await (const entry of zipFile) {
|
|
53
|
+
if (entry.filename === entryName) {
|
|
54
|
+
const stream = await entry.openReadStream();
|
|
55
|
+
const chunks = [];
|
|
56
|
+
for await (const chunk of stream) {
|
|
57
|
+
chunks.push(chunk);
|
|
58
|
+
}
|
|
59
|
+
await zipFile.close();
|
|
60
|
+
return Buffer.concat(chunks);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
await zipFile.close();
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
async function getZipEntryNames(zipPath) {
|
|
67
|
+
const zipFile = await yauzl.open(zipPath);
|
|
68
|
+
const names = [];
|
|
69
|
+
for await (const entry of zipFile) {
|
|
70
|
+
names.push(entry.filename);
|
|
71
|
+
}
|
|
72
|
+
await zipFile.close();
|
|
73
|
+
return names;
|
|
74
|
+
}
|
|
75
|
+
async function extractZipToDirectory(zipPath, destDir) {
|
|
76
|
+
const zipFile = await yauzl.open(zipPath);
|
|
77
|
+
try {
|
|
78
|
+
for await (const entry of zipFile) {
|
|
79
|
+
const destPath = node_path_1.default.join(destDir, entry.filename);
|
|
80
|
+
if (entry.filename.endsWith("/")) {
|
|
81
|
+
await node_fs_1.default.promises.mkdir(destPath, { recursive: true });
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
await node_fs_1.default.promises.mkdir(node_path_1.default.dirname(destPath), { recursive: true });
|
|
85
|
+
const readStream = await entry.openReadStream();
|
|
86
|
+
const writeStream = node_fs_1.default.createWriteStream(destPath);
|
|
87
|
+
await (0, promises_1.pipeline)(readStream, writeStream);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
finally {
|
|
92
|
+
await zipFile.close();
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
async function extractZipBufferToDirectory(buffer, destDir) {
|
|
96
|
+
const zipFile = await yauzl.fromBuffer(buffer);
|
|
97
|
+
try {
|
|
98
|
+
for await (const entry of zipFile) {
|
|
99
|
+
const destPath = node_path_1.default.join(destDir, entry.filename);
|
|
100
|
+
if (entry.filename.endsWith("/")) {
|
|
101
|
+
await node_fs_1.default.promises.mkdir(destPath, { recursive: true });
|
|
102
|
+
}
|
|
103
|
+
else {
|
|
104
|
+
await node_fs_1.default.promises.mkdir(node_path_1.default.dirname(destPath), { recursive: true });
|
|
105
|
+
const readStream = await entry.openReadStream();
|
|
106
|
+
const writeStream = node_fs_1.default.createWriteStream(destPath);
|
|
107
|
+
await (0, promises_1.pipeline)(readStream, writeStream);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
finally {
|
|
112
|
+
await zipFile.close();
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
async function createZipFromDirectory(sourceDir) {
|
|
116
|
+
const zipFile = new yazl.ZipFile();
|
|
117
|
+
async function addDir(dir, prefix) {
|
|
118
|
+
const entries = await node_fs_1.default.promises.readdir(dir, { withFileTypes: true });
|
|
119
|
+
for (const entry of entries) {
|
|
120
|
+
const fullPath = node_path_1.default.join(dir, entry.name);
|
|
121
|
+
const zipPath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
|
122
|
+
if (entry.isDirectory()) {
|
|
123
|
+
await addDir(fullPath, zipPath);
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
zipFile.addFile(fullPath, zipPath);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
await addDir(sourceDir, "");
|
|
131
|
+
zipFile.end();
|
|
132
|
+
return new Promise((resolve, reject) => {
|
|
133
|
+
const chunks = [];
|
|
134
|
+
zipFile.outputStream.on("data", (chunk) => chunks.push(chunk));
|
|
135
|
+
zipFile.outputStream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
136
|
+
zipFile.outputStream.on("error", reject);
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
async function patchZipFile(zipPath, options) {
|
|
140
|
+
const { filterEntry, newEntries } = options;
|
|
141
|
+
const tempPath = `${zipPath}.tmp`;
|
|
142
|
+
const newZip = new yazl.ZipFile();
|
|
143
|
+
const oldZip = await yauzl.open(zipPath);
|
|
144
|
+
for await (const entry of oldZip) {
|
|
145
|
+
if (filterEntry && !filterEntry(entry.filename)) {
|
|
146
|
+
continue;
|
|
147
|
+
}
|
|
148
|
+
if (entry.filename.endsWith("/")) {
|
|
149
|
+
newZip.addEmptyDirectory(entry.filename);
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
const readStream = await entry.openReadStream();
|
|
153
|
+
newZip.addReadStream(readStream, entry.filename);
|
|
154
|
+
}
|
|
155
|
+
for (const { filename, content } of newEntries ?? []) {
|
|
156
|
+
newZip.addBuffer(content, filename);
|
|
157
|
+
}
|
|
158
|
+
newZip.end();
|
|
159
|
+
await (0, promises_1.pipeline)(newZip.outputStream, node_fs_1.default.createWriteStream(tempPath));
|
|
160
|
+
await oldZip.close();
|
|
161
|
+
node_fs_1.default.renameSync(tempPath, zipPath);
|
|
162
|
+
}
|
package/package.json
CHANGED
|
@@ -1,11 +1,21 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@empiricalrun/r2-uploader",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.8.0",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"registry": "https://registry.npmjs.org/",
|
|
6
6
|
"access": "public"
|
|
7
7
|
},
|
|
8
8
|
"main": "dist/index.js",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./dist/index.d.ts",
|
|
12
|
+
"default": "./dist/index.js"
|
|
13
|
+
},
|
|
14
|
+
"./zip": {
|
|
15
|
+
"types": "./dist/zip-utils.d.ts",
|
|
16
|
+
"default": "./dist/zip-utils.js"
|
|
17
|
+
}
|
|
18
|
+
},
|
|
9
19
|
"repository": {
|
|
10
20
|
"type": "git",
|
|
11
21
|
"url": "https://github.com/empirical-run/empirical.git"
|
|
@@ -14,16 +24,20 @@
|
|
|
14
24
|
"dependencies": {
|
|
15
25
|
"@aws-sdk/client-s3": "3.614.0",
|
|
16
26
|
"@aws-sdk/s3-request-presigner": "3.614.0",
|
|
17
|
-
"@types/async-retry": "^1.4.8",
|
|
18
|
-
"@types/md5": "^2.3.5",
|
|
19
|
-
"@types/mime": "3.0.0",
|
|
20
27
|
"async-retry": "^1.3.3",
|
|
21
28
|
"console-log-level": "^1.4.1",
|
|
22
29
|
"md5": "^2.3.0",
|
|
23
|
-
"mime": "3.0.0"
|
|
30
|
+
"mime": "3.0.0",
|
|
31
|
+
"yazl": "^3.3.1",
|
|
32
|
+
"yauzl-promise": "^4.0.0"
|
|
24
33
|
},
|
|
25
34
|
"devDependencies": {
|
|
26
|
-
"@types/
|
|
35
|
+
"@types/async-retry": "^1.4.8",
|
|
36
|
+
"@types/console-log-level": "^1.4.5",
|
|
37
|
+
"@types/md5": "^2.3.5",
|
|
38
|
+
"@types/mime": "3.0.0",
|
|
39
|
+
"@types/yauzl-promise": "^4.0.1",
|
|
40
|
+
"@types/yazl": "^2.4.5"
|
|
27
41
|
},
|
|
28
42
|
"scripts": {
|
|
29
43
|
"dev": "tsc --build --watch",
|
package/tsconfig.tsbuildinfo
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"root":["./src/delete.ts","./src/fetch.ts","./src/index.ts","./src/logger.ts","./src/queue.ts","./src/types.ts","./src/upload-task.ts","./src/upload/buffer.ts","./src/upload/index.ts","./src/upload/stream.ts"],"version":"5.8.3"}
|
|
1
|
+
{"root":["./src/client.ts","./src/delete.ts","./src/fetch.ts","./src/index.ts","./src/logger.ts","./src/queue.ts","./src/types.ts","./src/upload-task.ts","./src/zip-utils.ts","./src/upload/buffer.ts","./src/upload/index.ts","./src/upload/stream.ts"],"version":"5.8.3"}
|