@empiricalrun/r2-uploader 0.3.5 → 0.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @empiricalrun/r2-uploader
2
2
 
3
+ ## 0.3.7
4
+
5
+ ### Patch Changes
6
+
7
+ - 5231c86: fix: print full path for files in log
8
+
9
+ ## 0.3.6
10
+
11
+ ### Patch Changes
12
+
13
+ - a1e8599: feat: add log levels for r2 uploader
14
+
3
15
  ## 0.3.5
4
16
 
5
17
  ### Patch Changes
@@ -1 +1 @@
1
- {"version":3,"file":"delete.d.ts","sourceRoot":"","sources":["../src/delete.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAEzC,wBAAsB,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,cAAc,mEAqBvE"}
1
+ {"version":3,"file":"delete.d.ts","sourceRoot":"","sources":["../src/delete.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAEzC,wBAAsB,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,cAAc,mEAqBvE"}
package/dist/delete.js CHANGED
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.deleteFile = void 0;
4
4
  const client_s3_1 = require("@aws-sdk/client-s3");
5
+ const logger_1 = require("./logger");
5
6
  async function deleteFile(fileKey, config) {
6
7
  const s3Client = new client_s3_1.S3Client({
7
8
  region: "auto",
@@ -21,7 +22,7 @@ async function deleteFile(fileKey, config) {
21
22
  return response;
22
23
  }
23
24
  catch (error) {
24
- console.error(`Error deleting file from R2: ${error}`);
25
+ logger_1.logger.error(`Error deleting file from R2: ${error}`);
25
26
  throw error;
26
27
  }
27
28
  }
@@ -0,0 +1,3 @@
1
+ import Logger from "console-log-level";
2
+ export declare const logger: Logger.Logger;
3
+ //# sourceMappingURL=logger.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA,OAAO,MAAyB,MAAM,mBAAmB,CAAC;AAE1D,eAAO,MAAM,MAAM,eAGjB,CAAC"}
package/dist/logger.js ADDED
@@ -0,0 +1,11 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.logger = void 0;
7
+ const console_log_level_1 = __importDefault(require("console-log-level"));
8
+ exports.logger = (0, console_log_level_1.default)({
9
+ prefix: "r2-uploader",
10
+ level: process.env.LOG_LEVEL || "info",
11
+ });
@@ -1 +1 @@
1
- {"version":3,"file":"buffer.d.ts","sourceRoot":"","sources":["../../src/upload/buffer.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAEzD,eAAO,MAAM,iBAAiB,WAAkB,oBAAoB,qBA6FnE,CAAC"}
1
+ {"version":3,"file":"buffer.d.ts","sourceRoot":"","sources":["../../src/upload/buffer.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAEzD,eAAO,MAAM,iBAAiB,WAAkB,oBAAoB,qBA6FnE,CAAC"}
@@ -9,6 +9,7 @@ const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
9
9
  const async_retry_1 = __importDefault(require("async-retry"));
10
10
  const md5_1 = __importDefault(require("md5"));
11
11
  const path_1 = __importDefault(require("path"));
12
+ const logger_1 = require("../logger");
12
13
  const uploadFileBuffers = async (config) => {
13
14
  const map = new Map();
14
15
  const urls = {};
@@ -69,16 +70,16 @@ const uploadFileBuffers = async (config) => {
69
70
  onRetry: (err, i) => {
70
71
  if (err) {
71
72
  const error = err;
72
- console.log("Upload retry attempt:", i, ":", file.fileName);
73
- console.log("Response status:", error.$response?.statusCode);
73
+ logger_1.logger.warn("Upload retry attempt:", i, ":", file.fileName);
74
+ logger_1.logger.warn("Response status:", error.$response?.statusCode);
74
75
  }
75
76
  },
76
77
  });
77
78
  }
78
79
  catch (err) {
79
80
  const error = err;
80
- console.log(`R2 Error - ${file.fileName} \nError: ${error}`);
81
- console.log("Upload response", error.$response, "with status", error.$metadata.httpStatusCode);
81
+ logger_1.logger.error(`R2 Error - ${file.fileName} \nError: ${error}`);
82
+ logger_1.logger.error("Upload response", error.$response, "with status", error.$metadata.httpStatusCode);
82
83
  }
83
84
  return;
84
85
  }));
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,OAAO,EAA8C,MAAM,UAAU,CAAC;AAqB/E,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAqBnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAUnB"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":";AAIA,OAAO,EAAE,OAAO,EAA8C,MAAM,UAAU,CAAC;AAqB/E,wBAAsB,eAAe,CAAC,EACpC,SAAS,EACT,QAAQ,EACR,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAqBnB;AAED,wBAAsB,mBAAmB,CAAC,EACxC,KAAK,EACL,cAAc,EACd,YAAY,EACZ,SAAS,EACT,WAAW,EACX,eAAe,GAChB,EAAE;IACD,KAAK,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC;IACjE,cAAc,EAAE,MAAM,CAAC;IACvB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,OAAO,CAAC,OAAO,CAAC,CAUnB"}
@@ -29,6 +29,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
29
29
  exports.uploadInMemoryFiles = exports.uploadDirectory = void 0;
30
30
  const fs = __importStar(require("fs"));
31
31
  const mime_1 = __importDefault(require("mime"));
32
+ const logger_1 = require("../logger");
32
33
  const buffer_1 = require("./buffer");
33
34
  const stream_1 = require("./stream");
34
35
  const getFileList = (dir) => {
@@ -55,7 +56,7 @@ async function uploadDirectory({ sourceDir, fileList, destinationDir, uploadBuck
55
56
  fileName: filePath.replace(sourceDir, ""), // relative path
56
57
  mimeType: mime_1.default.getType(filePath) || "application/octet-stream",
57
58
  }));
58
- console.log("Got request to upload following files:", JSON.stringify(files, null, 2));
59
+ logger_1.logger.debug("Got request to upload following files:", JSON.stringify(files, null, 2));
59
60
  const config = {
60
61
  accountId: accountId || process.env.R2_ACCOUNT_ID,
61
62
  accessKeyId: accessKeyId || process.env.R2_ACCESS_KEY_ID,
@@ -1 +1 @@
1
- {"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../src/upload/stream.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAEzD,eAAO,MAAM,iBAAiB,WAAkB,oBAAoB,qBA+GnE,CAAC"}
1
+ {"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../src/upload/stream.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,OAAO,EAAE,oBAAoB,EAAE,MAAM,UAAU,CAAC;AAEzD,eAAO,MAAM,iBAAiB,WAAkB,oBAAoB,qBAkHnE,CAAC"}
@@ -9,6 +9,7 @@ const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
9
9
  const async_retry_1 = __importDefault(require("async-retry"));
10
10
  const fs_1 = __importDefault(require("fs"));
11
11
  const path_1 = __importDefault(require("path"));
12
+ const logger_1 = require("../logger");
12
13
  const uploadFileStreams = async (config) => {
13
14
  const urls = {};
14
15
  const S3 = new client_s3_1.S3Client({
@@ -31,13 +32,13 @@ const uploadFileStreams = async (config) => {
31
32
  ContentType: mimeType,
32
33
  };
33
34
  const createMultipartUploadCommand = new client_s3_1.CreateMultipartUploadCommand(createMultipartUploadParams);
34
- console.log("sending a multipart upload command for file", file.fileName);
35
+ logger_1.logger.debug("sending a multipart upload command for file", file.fullPath);
35
36
  const { UploadId } = await S3.send(createMultipartUploadCommand);
36
37
  const partSize = 5 * 1024 * 1024; // 5MB
37
38
  const fileStream = fs_1.default.createReadStream(file.fullPath, {
38
39
  highWaterMark: partSize,
39
40
  });
40
- console.log("file stream created successfully for file", file.fileName);
41
+ logger_1.logger.debug("file stream created successfully for file", file.fullPath);
41
42
  let partNumber = 1;
42
43
  const parts = [];
43
44
  for await (const chunk of fileStream) {
@@ -52,7 +53,7 @@ const uploadFileStreams = async (config) => {
52
53
  try {
53
54
  await (0, async_retry_1.default)(async () => {
54
55
  const { ETag } = await S3.send(uploadPartCommand);
55
- console.log("upload part command sent successfully for file", file.fileName);
56
+ logger_1.logger.debug("upload part command sent successfully for file", file.fullPath);
56
57
  parts.push({ ETag: ETag, PartNumber: partNumber });
57
58
  partNumber++;
58
59
  }, {
@@ -62,12 +63,12 @@ const uploadFileStreams = async (config) => {
62
63
  maxTimeout: 60000,
63
64
  randomize: true,
64
65
  onRetry: (err, i) => {
65
- console.log("Upload part retry attempt:", i, ":", file.fileName);
66
+ logger_1.logger.debug("Upload part retry attempt:", i, ":", file.fullPath);
66
67
  },
67
68
  });
68
69
  }
69
70
  catch (err) {
70
- console.log(`R2 Error - ${file.fileName} \nError: ${err}`);
71
+ logger_1.logger.error(`R2 Error - ${file.fullPath} \nError: ${err}`);
71
72
  }
72
73
  }
73
74
  const completeMultipartUploadParams = {
@@ -79,16 +80,16 @@ const uploadFileStreams = async (config) => {
79
80
  },
80
81
  };
81
82
  const completeMultipartUploadCommand = new client_s3_1.CompleteMultipartUploadCommand(completeMultipartUploadParams);
82
- console.log("file uploaded successfully for file. sending complete multipart request", file.fileName);
83
+ logger_1.logger.debug("file uploaded successfully for file. sending complete multipart request", file.fullPath);
83
84
  await S3.send(completeMultipartUploadCommand);
84
- console.log("multipart upload complete for file", file.fileName);
85
+ logger_1.logger.debug("multipart upload complete for file", file.fullPath);
85
86
  const getObjectCommand = new client_s3_1.GetObjectCommand({
86
87
  Bucket: config.bucket,
87
88
  Key: fileKey,
88
89
  });
89
- console.log("getting a signed url for file", file.fileName);
90
+ logger_1.logger.debug("getting a signed url for file", file.fullPath);
90
91
  const fileUrl = await (0, s3_request_presigner_1.getSignedUrl)(S3, getObjectCommand);
91
- console.log("signed url successfully fetched for file", file.fileName);
92
+ logger_1.logger.debug("signed url successfully fetched for file", file.fullPath);
92
93
  urls[file.fileName] = fileUrl;
93
94
  }));
94
95
  return urls;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@empiricalrun/r2-uploader",
3
- "version": "0.3.5",
3
+ "version": "0.3.7",
4
4
  "publishConfig": {
5
5
  "registry": "https://registry.npmjs.org/",
6
6
  "access": "public"
@@ -18,9 +18,13 @@
18
18
  "@types/md5": "^2.3.5",
19
19
  "@types/mime": "3.0.0",
20
20
  "async-retry": "^1.3.3",
21
+ "console-log-level": "^1.4.1",
21
22
  "md5": "^2.3.0",
22
23
  "mime": "3.0.0"
23
24
  },
25
+ "devDependencies": {
26
+ "@types/console-log-level": "^1.4.5"
27
+ },
24
28
  "scripts": {
25
29
  "dev": "tsc --build --watch",
26
30
  "build": "tsc --build",