@arkyn/server 2.2.3 → 2.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"httpDebug.d.ts","sourceRoot":"","sources":["../../src/services/httpDebug.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AAEH,iBAAS,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,GAAG,QA0BtD;AAED,OAAO,EAAE,SAAS,EAAE,CAAC"}
1
+ {"version":3,"file":"httpDebug.d.ts","sourceRoot":"","sources":["../../src/services/httpDebug.ts"],"names":[],"mappings":"AAGA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AAEH,iBAAS,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,GAAG,QAuBtD;AAED,OAAO,EAAE,SAAS,EAAE,CAAC"}
@@ -47,9 +47,6 @@ function httpDebug(name, body, cause) {
47
47
  if (cause) {
48
48
  consoleData += `${debugName} Cause: ${JSON.stringify(cause, null, 2)}\n`;
49
49
  }
50
- const arkynKeys = InboxFlowInstance.getInboxConfig();
51
- if (arkynKeys)
52
- console.log(arkynKeys);
53
50
  console.log(consoleData);
54
51
  }
55
52
  }
@@ -1,52 +1,4 @@
1
1
  import type { SendFileToS3Function } from "@arkyn/types";
2
- /**
3
- * Handles file uploads to an AWS S3 bucket. This function processes a file
4
- * from a multipart form request, validates and optionally compresses the file,
5
- * and uploads it to S3. It supports image-specific operations such as resizing
6
- * validation and quality reduction.
7
- *
8
- * @param request - The HTTP request containing the multipart form data.
9
- * @param awsS3Config - Configuration object for AWS S3, including bucket name,
10
- * region, and credentials.
11
- * @param config - Optional configuration object for file handling.
12
- *
13
- * @param config.fileName - The name of the form field containing the file. Defaults to `"file"`.
14
- * @param config.maxPartSize - The maximum size (in bytes) for each part of the file. Defaults to `5_000_000`.
15
- * @param config.reduceImageQuality - The quality percentage for image compression. Defaults to `100`.
16
- * @param config.validateImageSize - Whether to validate the image dimensions. Defaults to `false`.
17
- * @param config.validateImageMessage - The error message template for invalid image dimensions.
18
- * Defaults to `"Invalid dimensions {{width}}px x {{height}}px"`.
19
- *
20
- * @returns A promise that resolves to an object containing the uploaded file's URL
21
- * or an error message if validation fails.
22
- *
23
- * @throws {BadRequest} If no file is uploaded.
24
- *
25
- * @example
26
- * ```typescript
27
- * const awsS3Config = {
28
- * AWS_S3_BUCKET: "my-bucket",
29
- * AWS_REGION: "us-east-1",
30
- * AWS_ACCESS_KEY_ID: "my-access-key",
31
- * AWS_SECRET_ACCESS_KEY: "my-secret-key",
32
- * };
33
- *
34
- * const config = {
35
- * fileName: "upload",
36
- * maxPartSize: 10_000_000,
37
- * reduceImageQuality: 80,
38
- * validateImageSize: true,
39
- * validateImageMessage: "Invalid dimensions {{width}}px x {{height}}px",
40
- * };
41
- *
42
- * const response = await sendFileToS3(request, awsS3Config, config);
43
- * if (response.error) {
44
- * console.error(response.error);
45
- * } else {
46
- * console.log("File uploaded to:", response.url);
47
- * }
48
- * ```
49
- */
50
2
  declare const sendFileToS3: SendFileToS3Function;
51
3
  export { sendFileToS3 };
52
4
  //# sourceMappingURL=sendFileToS3.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"sendFileToS3.d.ts","sourceRoot":"","sources":["../../src/services/sendFileToS3.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAa,oBAAoB,EAAE,MAAM,cAAc,CAAC;AA8CpE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+CG;AAEH,QAAA,MAAM,YAAY,EAAE,oBAkFnB,CAAC;AAEF,OAAO,EAAE,YAAY,EAAE,CAAC"}
1
+ {"version":3,"file":"sendFileToS3.d.ts","sourceRoot":"","sources":["../../src/services/sendFileToS3.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAa,oBAAoB,EAAE,MAAM,cAAc,CAAC;AA0CpE,QAAA,MAAM,YAAY,EAAE,oBAqFnB,CAAC;AAEF,OAAO,EAAE,YAAY,EAAE,CAAC"}
@@ -1,14 +1,15 @@
1
1
  import { generateId } from "@arkyn/shared";
2
2
  import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
3
- import { unstable_composeUploadHandlers as composeUploadHandlers, unstable_createFileUploadHandler as createFileUploadHandler, unstable_parseMultipartFormData as parseMultipartFormData, } from "@remix-run/node";
3
+ import { MultipartParseError, parseMultipartRequest, } from "@mjackson/multipart-parser";
4
4
  import fs from "fs";
5
5
  import sharp from "sharp";
6
6
  import { BadRequest } from "../http/badResponses/badRequest";
7
7
  import { getScopedParams } from "./getScopedParams";
8
8
  async function s3Upload(fileStream, contentType, awsConfig) {
9
+ const Key = `uploads/${generateId("text", "v4")}`;
9
10
  const uploadParams = {
10
11
  Bucket: awsConfig.AWS_S3_BUCKET,
11
- Key: `uploads/${generateId("text", "v4")}`,
12
+ Key,
12
13
  Body: fileStream,
13
14
  ContentType: contentType,
14
15
  };
@@ -19,93 +20,60 @@ async function s3Upload(fileStream, contentType, awsConfig) {
19
20
  secretAccessKey: awsConfig.AWS_SECRET_ACCESS_KEY,
20
21
  },
21
22
  });
22
- const command = new PutObjectCommand(uploadParams);
23
- try {
24
- await s3Client.send(command);
25
- }
26
- catch (error) {
27
- console.error(error);
28
- }
23
+ await s3Client.send(new PutObjectCommand(uploadParams));
29
24
  return {
30
- location: `https://${awsConfig.AWS_S3_BUCKET}.s3.amazonaws.com/${uploadParams.Key}`,
25
+ location: `https://${awsConfig.AWS_S3_BUCKET}.s3.amazonaws.com/${Key}`,
31
26
  };
32
27
  }
33
- /**
34
- * Handles file uploads to an AWS S3 bucket. This function processes a file
35
- * from a multipart form request, validates and optionally compresses the file,
36
- * and uploads it to S3. It supports image-specific operations such as resizing
37
- * validation and quality reduction.
38
- *
39
- * @param request - The HTTP request containing the multipart form data.
40
- * @param awsS3Config - Configuration object for AWS S3, including bucket name,
41
- * region, and credentials.
42
- * @param config - Optional configuration object for file handling.
43
- *
44
- * @param config.fileName - The name of the form field containing the file. Defaults to `"file"`.
45
- * @param config.maxPartSize - The maximum size (in bytes) for each part of the file. Defaults to `5_000_000`.
46
- * @param config.reduceImageQuality - The quality percentage for image compression. Defaults to `100`.
47
- * @param config.validateImageSize - Whether to validate the image dimensions. Defaults to `false`.
48
- * @param config.validateImageMessage - The error message template for invalid image dimensions.
49
- * Defaults to `"Invalid dimensions {{width}}px x {{height}}px"`.
50
- *
51
- * @returns A promise that resolves to an object containing the uploaded file's URL
52
- * or an error message if validation fails.
53
- *
54
- * @throws {BadRequest} If no file is uploaded.
55
- *
56
- * @example
57
- * ```typescript
58
- * const awsS3Config = {
59
- * AWS_S3_BUCKET: "my-bucket",
60
- * AWS_REGION: "us-east-1",
61
- * AWS_ACCESS_KEY_ID: "my-access-key",
62
- * AWS_SECRET_ACCESS_KEY: "my-secret-key",
63
- * };
64
- *
65
- * const config = {
66
- * fileName: "upload",
67
- * maxPartSize: 10_000_000,
68
- * reduceImageQuality: 80,
69
- * validateImageSize: true,
70
- * validateImageMessage: "Invalid dimensions {{width}}px x {{height}}px",
71
- * };
72
- *
73
- * const response = await sendFileToS3(request, awsS3Config, config);
74
- * if (response.error) {
75
- * console.error(response.error);
76
- * } else {
77
- * console.log("File uploaded to:", response.url);
78
- * }
79
- * ```
80
- */
81
28
  const sendFileToS3 = async (request, awsS3Config, config) => {
82
- const fileName = config?.fileName || "file";
83
- const maxPartSize = config?.maxPartSize || 5_000_000;
84
- const reduceImageQuality = config?.reduceImageQuality || 100;
85
- const validateImageSize = config?.validateImageSize || false;
86
- const validateImageMessage = config?.validateImageMessage ||
29
+ const fileName = config?.fileName ?? "file";
30
+ const reduceImageQuality = config?.reduceImageQuality ?? 100;
31
+ const validateImageSize = config?.validateImageSize ?? false;
32
+ const validateImageMessage = config?.validateImageMessage ??
87
33
  "Invalid dimensions {{width}}px x {{height}}px";
88
- const uploadHandler = composeUploadHandlers(createFileUploadHandler({
89
- maxPartSize,
90
- file: ({ filename }) => filename,
91
- }));
92
- const formData = await parseMultipartFormData(request, uploadHandler);
93
- const file = formData.get(fileName);
94
- if (!file)
95
- throw new BadRequest("No file uploaded");
34
+ let saved = {
35
+ tempPath: "",
36
+ mediaType: "",
37
+ };
38
+ try {
39
+ await parseMultipartRequest(request, async (part) => {
40
+ if (part.isFile && part.name === fileName) {
41
+ const bytes = await part.bytes();
42
+ const tempPath = `/tmp/${generateId("text", "v4")}-${part.filename}`;
43
+ await Bun.write(tempPath, bytes);
44
+ if (!part.mediaType)
45
+ throw new Error("mediaType is undefined");
46
+ saved = {
47
+ tempPath,
48
+ mediaType: part.mediaType,
49
+ };
50
+ }
51
+ });
52
+ }
53
+ catch (err) {
54
+ if (err instanceof MultipartParseError) {
55
+ throw new BadRequest(`Falha ao processar upload: ${err.message}`);
56
+ }
57
+ throw err;
58
+ }
59
+ if (saved.mediaType === "")
60
+ throw new BadRequest("mediaType é indefinido");
61
+ if (saved.tempPath === "")
62
+ throw new BadRequest("tempPath é indefinido");
96
63
  const filterParams = getScopedParams(request);
97
64
  const width = filterParams.get("w");
98
65
  const height = filterParams.get("h");
99
- const reduceQuality = filterParams.get("reduceQuality");
100
- const quality = reduceQuality ? +reduceQuality : reduceImageQuality;
101
- const isImage = file.type.startsWith("image");
66
+ const quality = filterParams.get("reduceQuality") !== null
67
+ ? +filterParams.get("reduceQuality")
68
+ : reduceImageQuality;
69
+ const isImage = saved.mediaType.startsWith("image/");
102
70
  if (isImage && width && height && validateImageSize) {
103
- const image = sharp(file.getFilePath());
104
- const metadata = await image.metadata();
105
- if (metadata.width && metadata.height) {
106
- const widthDiff = Math.abs(metadata.width - +width);
107
- const heightDiff = Math.abs(metadata.height - +height);
108
- if (widthDiff > 10 || heightDiff > 10) {
71
+ const meta = await sharp(saved.tempPath).metadata();
72
+ if (meta.width && meta.height) {
73
+ const dw = Math.abs(meta.width - +width);
74
+ const dh = Math.abs(meta.height - +height);
75
+ if (dw > 10 || dh > 10) {
76
+ fs.unlink(saved.tempPath, () => { });
109
77
  return {
110
78
  error: validateImageMessage
111
79
  .replace("{{width}}", width)
@@ -114,30 +82,24 @@ const sendFileToS3 = async (request, awsS3Config, config) => {
114
82
  }
115
83
  }
116
84
  }
85
+ let uploadPath = saved.tempPath;
117
86
  if (isImage) {
118
- let image = sharp(file.getFilePath());
119
- if (file.type === "image/jpeg") {
120
- image = image.jpeg({ quality });
121
- }
122
- else if (file.type === "image/png") {
123
- image = image.png({ quality });
124
- }
125
- else if (file.type === "image/webp") {
126
- image = image.webp({ quality });
127
- }
128
- const compressedFilePath = file.getFilePath() + "_compressed";
129
- await image.toFile(compressedFilePath);
130
- file.getFilePath = () => compressedFilePath;
131
- const streamFile = fs.createReadStream(file.getFilePath());
132
- const apiResponse = await s3Upload(streamFile, file.type, awsS3Config);
133
- fs.unlink(compressedFilePath, (err) => {
134
- if (err)
135
- console.error(`Delete image error: ${err}`);
136
- });
137
- return { url: apiResponse.location };
87
+ const ext = saved.mediaType.split("/")[1];
88
+ const compressed = `${saved.tempPath}_cmp.${ext}`;
89
+ let img = sharp(saved.tempPath);
90
+ if (ext === "jpeg" || ext === "jpg")
91
+ img = img.jpeg({ quality });
92
+ else if (ext === "png")
93
+ img = img.png({ quality });
94
+ else if (ext === "webp")
95
+ img = img.webp({ quality });
96
+ await img.toFile(compressed);
97
+ fs.unlink(saved.tempPath, () => { });
98
+ uploadPath = compressed;
138
99
  }
139
- const streamFile = fs.createReadStream(file.getFilePath());
140
- const apiResponse = await s3Upload(streamFile, file.type, awsS3Config);
141
- return { url: apiResponse.location };
100
+ const stream = fs.createReadStream(uploadPath);
101
+ const { location } = await s3Upload(stream, saved.mediaType, awsS3Config);
102
+ fs.unlink(uploadPath, () => { });
103
+ return { url: location };
142
104
  };
143
105
  export { sendFileToS3 };
package/package.json CHANGED
@@ -1,31 +1,32 @@
1
1
  {
2
2
  "name": "@arkyn/server",
3
- "version": "2.2.3",
3
+ "version": "2.2.5",
4
+ "author": "Arkyn | Lucas Gonçalves",
4
5
  "main": "./dist/bundle.js",
5
6
  "module": "./src/index.ts",
6
- "type": "module",
7
- "types": "./dist/index.d.ts",
8
- "license": "Apache-2.0",
9
- "author": "Arkyn | Lucas Gonçalves",
10
- "description": "Server-side utilities for projects.",
11
- "scripts": {
12
- "clean": "rm -rf dist",
13
- "build": "bun run clean && bunx tsc --project tsconfig.json",
14
- "test": "vitest --config vitest.config.ts",
15
- "typecheck": "bunx tsc --project tsconfig.json --noEmit"
16
- },
17
7
  "dependencies": {
18
8
  "@arkyn/shared": "*",
19
9
  "@aws-sdk/client-s3": "^3.782.0",
10
+ "@mjackson/multipart-parser": "^0.8.2",
20
11
  "sharp": "^0.33.5",
21
12
  "zod": "^3.24.2"
22
13
  },
23
- "peerDependencies": {
24
- "@remix-run/node": "^2.16.4"
25
- },
26
14
  "devDependencies": {
27
15
  "bun-types": "latest",
28
16
  "vitest": "^3.1.1",
29
17
  "typescript": "^5.8.3"
30
- }
18
+ },
19
+ "peerDependencies": {
20
+ "@react-router/node": ">=7.6.0"
21
+ },
22
+ "description": "Server-side utilities for projects.",
23
+ "license": "Apache-2.0",
24
+ "scripts": {
25
+ "clean": "rm -rf dist",
26
+ "build": "bun run clean && bunx tsc --project tsconfig.json",
27
+ "test": "vitest --config vitest.config.ts",
28
+ "typecheck": "bunx tsc --project tsconfig.json --noEmit"
29
+ },
30
+ "type": "module",
31
+ "types": "./dist/index.d.ts"
31
32
  }
@@ -55,9 +55,6 @@ function httpDebug(name: string, body: any, cause?: any) {
55
55
  consoleData += `${debugName} Cause: ${JSON.stringify(cause, null, 2)}\n`;
56
56
  }
57
57
 
58
- const arkynKeys = InboxFlowInstance.getInboxConfig();
59
- if (arkynKeys) console.log(arkynKeys);
60
-
61
58
  console.log(consoleData);
62
59
  }
63
60
  }
@@ -2,24 +2,28 @@ import { generateId } from "@arkyn/shared";
2
2
  import type { AwsConfig, SendFileToS3Function } from "@arkyn/types";
3
3
  import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
4
4
  import {
5
- unstable_composeUploadHandlers as composeUploadHandlers,
6
- unstable_createFileUploadHandler as createFileUploadHandler,
7
- unstable_parseMultipartFormData as parseMultipartFormData,
8
- type NodeOnDiskFile,
9
- } from "@remix-run/node";
5
+ MultipartParseError,
6
+ parseMultipartRequest,
7
+ } from "@mjackson/multipart-parser";
10
8
  import fs from "fs";
11
9
  import sharp from "sharp";
12
10
  import { BadRequest } from "../http/badResponses/badRequest";
13
11
  import { getScopedParams } from "./getScopedParams";
14
12
 
13
+ type Saved = {
14
+ tempPath: string;
15
+ mediaType: string;
16
+ };
17
+
15
18
  async function s3Upload(
16
19
  fileStream: fs.ReadStream,
17
20
  contentType: string,
18
21
  awsConfig: AwsConfig
19
22
  ) {
23
+ const Key = `uploads/${generateId("text", "v4")}`;
20
24
  const uploadParams = {
21
25
  Bucket: awsConfig.AWS_S3_BUCKET,
22
- Key: `uploads/${generateId("text", "v4")}`,
26
+ Key,
23
27
  Body: fileStream,
24
28
  ContentType: contentType,
25
29
  };
@@ -31,112 +35,70 @@ async function s3Upload(
31
35
  secretAccessKey: awsConfig.AWS_SECRET_ACCESS_KEY,
32
36
  },
33
37
  });
34
-
35
- const command = new PutObjectCommand(uploadParams);
36
-
37
- try {
38
- await s3Client.send(command);
39
- } catch (error) {
40
- console.error(error);
41
- }
42
-
38
+ await s3Client.send(new PutObjectCommand(uploadParams));
43
39
  return {
44
- location: `https://${awsConfig.AWS_S3_BUCKET}.s3.amazonaws.com/${uploadParams.Key}`,
40
+ location: `https://${awsConfig.AWS_S3_BUCKET}.s3.amazonaws.com/${Key}`,
45
41
  };
46
42
  }
47
43
 
48
- /**
49
- * Handles file uploads to an AWS S3 bucket. This function processes a file
50
- * from a multipart form request, validates and optionally compresses the file,
51
- * and uploads it to S3. It supports image-specific operations such as resizing
52
- * validation and quality reduction.
53
- *
54
- * @param request - The HTTP request containing the multipart form data.
55
- * @param awsS3Config - Configuration object for AWS S3, including bucket name,
56
- * region, and credentials.
57
- * @param config - Optional configuration object for file handling.
58
- *
59
- * @param config.fileName - The name of the form field containing the file. Defaults to `"file"`.
60
- * @param config.maxPartSize - The maximum size (in bytes) for each part of the file. Defaults to `5_000_000`.
61
- * @param config.reduceImageQuality - The quality percentage for image compression. Defaults to `100`.
62
- * @param config.validateImageSize - Whether to validate the image dimensions. Defaults to `false`.
63
- * @param config.validateImageMessage - The error message template for invalid image dimensions.
64
- * Defaults to `"Invalid dimensions {{width}}px x {{height}}px"`.
65
- *
66
- * @returns A promise that resolves to an object containing the uploaded file's URL
67
- * or an error message if validation fails.
68
- *
69
- * @throws {BadRequest} If no file is uploaded.
70
- *
71
- * @example
72
- * ```typescript
73
- * const awsS3Config = {
74
- * AWS_S3_BUCKET: "my-bucket",
75
- * AWS_REGION: "us-east-1",
76
- * AWS_ACCESS_KEY_ID: "my-access-key",
77
- * AWS_SECRET_ACCESS_KEY: "my-secret-key",
78
- * };
79
- *
80
- * const config = {
81
- * fileName: "upload",
82
- * maxPartSize: 10_000_000,
83
- * reduceImageQuality: 80,
84
- * validateImageSize: true,
85
- * validateImageMessage: "Invalid dimensions {{width}}px x {{height}}px",
86
- * };
87
- *
88
- * const response = await sendFileToS3(request, awsS3Config, config);
89
- * if (response.error) {
90
- * console.error(response.error);
91
- * } else {
92
- * console.log("File uploaded to:", response.url);
93
- * }
94
- * ```
95
- */
96
-
97
44
  const sendFileToS3: SendFileToS3Function = async (
98
45
  request,
99
46
  awsS3Config,
100
47
  config
101
48
  ) => {
102
- const fileName = config?.fileName || "file";
103
- const maxPartSize = config?.maxPartSize || 5_000_000;
104
- const reduceImageQuality = config?.reduceImageQuality || 100;
105
- const validateImageSize = config?.validateImageSize || false;
49
+ const fileName = config?.fileName ?? "file";
50
+ const reduceImageQuality = config?.reduceImageQuality ?? 100;
51
+ const validateImageSize = config?.validateImageSize ?? false;
106
52
  const validateImageMessage =
107
- config?.validateImageMessage ||
53
+ config?.validateImageMessage ??
108
54
  "Invalid dimensions {{width}}px x {{height}}px";
109
55
 
110
- const uploadHandler = composeUploadHandlers(
111
- createFileUploadHandler({
112
- maxPartSize,
113
- file: ({ filename }) => filename,
114
- })
115
- );
56
+ let saved: Saved = {
57
+ tempPath: "",
58
+ mediaType: "",
59
+ };
116
60
 
117
- const formData = await parseMultipartFormData(request, uploadHandler);
118
- const file = formData.get(fileName) as unknown as NodeOnDiskFile;
61
+ try {
62
+ await parseMultipartRequest(request, async (part) => {
63
+ if (part.isFile && part.name === fileName) {
64
+ const bytes = await part.bytes();
65
+ const tempPath = `/tmp/${generateId("text", "v4")}-${part.filename}`;
66
+ await Bun.write(tempPath, bytes);
67
+ if (!part.mediaType) throw new Error("mediaType is undefined");
68
+
69
+ saved = {
70
+ tempPath,
71
+ mediaType: part.mediaType,
72
+ };
73
+ }
74
+ });
75
+ } catch (err) {
76
+ if (err instanceof MultipartParseError) {
77
+ throw new BadRequest(`Falha ao processar upload: ${err.message}`);
78
+ }
79
+ throw err;
80
+ }
119
81
 
120
- if (!file) throw new BadRequest("No file uploaded");
82
+ if (saved.mediaType === "") throw new BadRequest("mediaType é indefinido");
83
+ if (saved.tempPath === "") throw new BadRequest("tempPath é indefinido");
121
84
 
122
85
  const filterParams = getScopedParams(request);
123
86
  const width = filterParams.get("w");
124
87
  const height = filterParams.get("h");
88
+ const quality =
89
+ filterParams.get("reduceQuality") !== null
90
+ ? +filterParams.get("reduceQuality")!
91
+ : reduceImageQuality;
125
92
 
126
- const reduceQuality = filterParams.get("reduceQuality");
127
- const quality = reduceQuality ? +reduceQuality : reduceImageQuality;
128
-
129
- const isImage = file.type.startsWith("image");
93
+ const isImage = saved.mediaType.startsWith("image/");
130
94
 
131
95
  if (isImage && width && height && validateImageSize) {
132
- const image = sharp(file.getFilePath());
133
- const metadata = await image.metadata();
134
-
135
- if (metadata.width && metadata.height) {
136
- const widthDiff = Math.abs(metadata.width - +width);
137
- const heightDiff = Math.abs(metadata.height - +height);
138
-
139
- if (widthDiff > 10 || heightDiff > 10) {
96
+ const meta = await sharp(saved.tempPath).metadata();
97
+ if (meta.width && meta.height) {
98
+ const dw = Math.abs(meta.width - +width);
99
+ const dh = Math.abs(meta.height - +height);
100
+ if (dw > 10 || dh > 10) {
101
+ fs.unlink(saved.tempPath, () => {});
140
102
  return {
141
103
  error: validateImageMessage
142
104
  .replace("{{width}}", width)
@@ -146,36 +108,24 @@ const sendFileToS3: SendFileToS3Function = async (
146
108
  }
147
109
  }
148
110
 
111
+ let uploadPath = saved.tempPath;
149
112
  if (isImage) {
150
- let image = sharp(file.getFilePath());
151
-
152
- if (file.type === "image/jpeg") {
153
- image = image.jpeg({ quality });
154
- } else if (file.type === "image/png") {
155
- image = image.png({ quality });
156
- } else if (file.type === "image/webp") {
157
- image = image.webp({ quality });
158
- }
159
-
160
- const compressedFilePath = file.getFilePath() + "_compressed";
161
- await image.toFile(compressedFilePath);
162
-
163
- file.getFilePath = () => compressedFilePath;
164
-
165
- const streamFile = fs.createReadStream(file.getFilePath());
166
- const apiResponse = await s3Upload(streamFile, file.type, awsS3Config);
167
-
168
- fs.unlink(compressedFilePath, (err) => {
169
- if (err) console.error(`Delete image error: ${err}`);
170
- });
171
-
172
- return { url: apiResponse.location };
113
+ const ext = saved.mediaType.split("/")[1];
114
+ const compressed = `${saved.tempPath}_cmp.${ext}`;
115
+ let img = sharp(saved.tempPath);
116
+ if (ext === "jpeg" || ext === "jpg") img = img.jpeg({ quality });
117
+ else if (ext === "png") img = img.png({ quality });
118
+ else if (ext === "webp") img = img.webp({ quality });
119
+ await img.toFile(compressed);
120
+ fs.unlink(saved.tempPath, () => {});
121
+ uploadPath = compressed;
173
122
  }
174
123
 
175
- const streamFile = fs.createReadStream(file.getFilePath());
176
- const apiResponse = await s3Upload(streamFile, file.type, awsS3Config);
124
+ const stream = fs.createReadStream(uploadPath);
125
+ const { location } = await s3Upload(stream, saved.mediaType, awsS3Config);
177
126
 
178
- return { url: apiResponse.location };
127
+ fs.unlink(uploadPath, () => {});
128
+ return { url: location };
179
129
  };
180
130
 
181
131
  export { sendFileToS3 };