@arkyn/server 1.4.1 → 1.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/index.d.ts +1 -0
  2. package/dist/index.d.ts.map +1 -1
  3. package/dist/index.js +1 -0
  4. package/dist/services/sendImageToS3.d.ts +16 -0
  5. package/dist/services/sendImageToS3.d.ts.map +1 -0
  6. package/dist/services/sendImageToS3.js +72 -0
  7. package/package.json +7 -4
  8. package/src/index.ts +1 -0
  9. package/src/services/sendImageToS3.ts +115 -0
  10. package/tsconfig.json +2 -1
  11. package/dist/badRequest.d.ts +0 -3
  12. package/dist/badRequest.d.ts.map +0 -1
  13. package/dist/badRequest.js +0 -5
  14. package/dist/formParse.d.ts +0 -3
  15. package/dist/formParse.d.ts.map +0 -1
  16. package/dist/formParse.js +0 -15
  17. package/dist/getScopedParams.d.ts +0 -5
  18. package/dist/getScopedParams.d.ts.map +0 -1
  19. package/dist/getScopedParams.js +0 -10
  20. package/dist/helpers/formParse.d.ts +0 -3
  21. package/dist/helpers/formParse.d.ts.map +0 -1
  22. package/dist/helpers/formParse.js +0 -15
  23. package/dist/helpers/getScopedParams.d.ts +0 -3
  24. package/dist/helpers/getScopedParams.d.ts.map +0 -1
  25. package/dist/helpers/getScopedParams.js +0 -10
  26. package/dist/helpers/uuid.d.ts +0 -3
  27. package/dist/helpers/uuid.d.ts.map +0 -1
  28. package/dist/helpers/uuid.js +0 -5
  29. package/dist/helpers/validateCPF.d.ts +0 -3
  30. package/dist/helpers/validateCPF.d.ts.map +0 -1
  31. package/dist/helpers/validateCPF.js +0 -36
  32. package/dist/services/maskSensitiveData.d.ts +0 -3
  33. package/dist/services/maskSensitiveData.d.ts.map +0 -1
  34. package/dist/services/maskSensitiveData.js +0 -38
  35. package/dist/services/truncateLargeFields.d.ts +0 -3
  36. package/dist/services/truncateLargeFields.d.ts.map +0 -1
  37. package/dist/services/truncateLargeFields.js +0 -29
package/dist/index.d.ts CHANGED
@@ -9,4 +9,5 @@ export { globalErrorHandler } from "./helpers/globalErrorHandler";
9
9
  export { extractJsonFromRequest } from "./services/extractJsonFromRequest";
10
10
  export { formParse } from "./services/formParse";
11
11
  export { getScopedParams } from "./services/getScopedParams";
12
+ export { sendImageToS3 } from "./services/sendImageToS3";
12
13
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAChE,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,8BAA8B,CAAC;AAC9D,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,WAAW,EAAE,MAAM,gCAAgC,CAAC;AAC7D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAGlF,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAGlE,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAChE,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,8BAA8B,CAAC;AAC9D,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,WAAW,EAAE,MAAM,gCAAgC,CAAC;AAC7D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAGlF,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAGlE,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC"}
package/dist/index.js CHANGED
@@ -12,3 +12,4 @@ export { globalErrorHandler } from "./helpers/globalErrorHandler";
12
12
  export { extractJsonFromRequest } from "./services/extractJsonFromRequest";
13
13
  export { formParse } from "./services/formParse";
14
14
  export { getScopedParams } from "./services/getScopedParams";
15
+ export { sendImageToS3 } from "./services/sendImageToS3";
@@ -0,0 +1,16 @@
1
+ import { type ActionFunctionArgs } from "@remix-run/node";
2
+ type AWSConfig = {
3
+ AWS_REGION: string;
4
+ AWS_ACCESS_KEY_ID: string;
5
+ AWS_SECRET_ACCESS_KEY: string;
6
+ AWS_S3_BUCKET: string;
7
+ };
8
+ declare function sendImageToS3(props: ActionFunctionArgs, awsS3Config: AWSConfig): Promise<{
9
+ error: string;
10
+ url?: undefined;
11
+ } | {
12
+ url: string;
13
+ error?: undefined;
14
+ }>;
15
+ export { sendImageToS3 };
16
+ //# sourceMappingURL=sendImageToS3.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sendImageToS3.d.ts","sourceRoot":"","sources":["../../src/services/sendImageToS3.ts"],"names":[],"mappings":"AACA,OAAO,EAIL,KAAK,kBAAkB,EAExB,MAAM,iBAAiB,CAAC;AAOzB,KAAK,SAAS,GAAG;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,iBAAiB,EAAE,MAAM,CAAC;IAC1B,qBAAqB,EAAE,MAAM,CAAC;IAC9B,aAAa,EAAE,MAAM,CAAC;CACvB,CAAC;AAmDF,iBAAe,aAAa,CAC1B,KAAK,EAAE,kBAAkB,EACzB,WAAW,EAAE,SAAS;;;;;;GAwCvB;AAED,OAAO,EAAE,aAAa,EAAE,CAAC"}
@@ -0,0 +1,72 @@
1
+ import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
2
+ import { unstable_composeUploadHandlers as composeUploadHandlers, unstable_createFileUploadHandler as createFileUploadHandler, unstable_parseMultipartFormData as parseMultipartFormData, } from "@remix-run/node";
3
+ import { randomUUID } from "crypto";
4
+ import fs from "fs";
5
+ import sharp from "sharp";
6
+ import { BadRequestError } from "../httpBadResponses/badRequest";
7
+ import { getScopedParams } from "./getScopedParams";
8
+ async function s3_upload(fileStream, contentType, awsS3Config) {
9
+ const { AWS_ACCESS_KEY_ID, AWS_REGION, AWS_S3_BUCKET, AWS_SECRET_ACCESS_KEY, } = awsS3Config;
10
+ const filePath = fileStream.path;
11
+ let fileName = "";
12
+ if (typeof filePath === "string") {
13
+ fileName = filePath.split("/").pop() || "";
14
+ }
15
+ else {
16
+ fileName = randomUUID();
17
+ }
18
+ const uploadParams = {
19
+ Bucket: AWS_S3_BUCKET,
20
+ Key: `uploads/${Date.now()}-${fileName}`,
21
+ Body: fileStream,
22
+ ContentType: contentType,
23
+ };
24
+ const s3Client = new S3Client({
25
+ region: AWS_REGION,
26
+ credentials: {
27
+ accessKeyId: AWS_ACCESS_KEY_ID,
28
+ secretAccessKey: AWS_SECRET_ACCESS_KEY,
29
+ },
30
+ });
31
+ const command = new PutObjectCommand(uploadParams);
32
+ try {
33
+ await s3Client.send(command);
34
+ }
35
+ catch (error) {
36
+ console.error(error);
37
+ }
38
+ return {
39
+ location: `https://${AWS_S3_BUCKET}.s3.amazonaws.com/${uploadParams.Key}`,
40
+ };
41
+ }
42
+ async function sendImageToS3(props, awsS3Config) {
43
+ const { request } = props;
44
+ const uploadHandler = composeUploadHandlers(createFileUploadHandler({
45
+ maxPartSize: 5_000_000,
46
+ file: ({ filename }) => filename,
47
+ }));
48
+ const formData = await parseMultipartFormData(request, uploadHandler);
49
+ const file = formData.get("file");
50
+ if (!file)
51
+ throw new BadRequestError("No file uploaded");
52
+ const filterParams = getScopedParams(request);
53
+ const width = filterParams.get("w");
54
+ const height = filterParams.get("h");
55
+ if (width && height) {
56
+ const image = sharp(file.getFilePath());
57
+ const metadata = await image.metadata();
58
+ if (metadata.width && metadata.height) {
59
+ const widthDiff = Math.abs(metadata.width - +width);
60
+ const heightDiff = Math.abs(metadata.height - +height);
61
+ if (widthDiff > 10 || heightDiff > 10) {
62
+ return {
63
+ error: `Formato inválido ${metadata.width}px x ${metadata.height}px`,
64
+ };
65
+ }
66
+ }
67
+ }
68
+ const streamFile = fs.createReadStream(file.getFilePath());
69
+ const apiResponse = await s3_upload(streamFile, file.type, awsS3Config);
70
+ return { url: apiResponse.location };
71
+ }
72
+ export { sendImageToS3 };
package/package.json CHANGED
@@ -1,19 +1,22 @@
1
1
  {
2
2
  "name": "@arkyn/server",
3
- "version": "1.4.1",
3
+ "version": "1.4.3",
4
4
  "main": "./dist/bundle.js",
5
5
  "module": "./src/index.ts",
6
6
  "type": "module",
7
7
  "types": "./dist/index.d.ts",
8
8
  "scripts": {
9
- "build": "bunx tsc --project tsconfig.json",
9
+ "build": "rm -rf ./dist && bunx tsc --project tsconfig.json",
10
10
  "typecheck": "bunx tsc --project tsconfig.json --noEmit"
11
11
  },
12
12
  "dependencies": {
13
- "@remix-run/node": "^2.15.0"
13
+ "@arkyn/server": "*",
14
+ "@aws-sdk/client-s3": "^3.703.0",
15
+ "@remix-run/node": "^2.15.0",
16
+ "sharp": "^0.33.5"
14
17
  },
15
18
  "devDependencies": {
16
19
  "bun-types": "latest",
17
- "typescript": "^5.6.3"
20
+ "typescript": "^5.7.2"
18
21
  }
19
22
  }
package/src/index.ts CHANGED
@@ -14,3 +14,4 @@ export { globalErrorHandler } from "./helpers/globalErrorHandler";
14
14
  export { extractJsonFromRequest } from "./services/extractJsonFromRequest";
15
15
  export { formParse } from "./services/formParse";
16
16
  export { getScopedParams } from "./services/getScopedParams";
17
+ export { sendImageToS3 } from "./services/sendImageToS3";
@@ -0,0 +1,115 @@
1
+ import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
2
+ import {
3
+ unstable_composeUploadHandlers as composeUploadHandlers,
4
+ unstable_createFileUploadHandler as createFileUploadHandler,
5
+ unstable_parseMultipartFormData as parseMultipartFormData,
6
+ type ActionFunctionArgs,
7
+ type NodeOnDiskFile,
8
+ } from "@remix-run/node";
9
+ import { randomUUID } from "crypto";
10
+ import fs from "fs";
11
+ import sharp from "sharp";
12
+ import { BadRequestError } from "../httpBadResponses/badRequest";
13
+ import { getScopedParams } from "./getScopedParams";
14
+
15
+ type AWSConfig = {
16
+ AWS_REGION: string;
17
+ AWS_ACCESS_KEY_ID: string;
18
+ AWS_SECRET_ACCESS_KEY: string;
19
+ AWS_S3_BUCKET: string;
20
+ };
21
+
22
+ async function s3_upload(
23
+ fileStream: fs.ReadStream,
24
+ contentType: string,
25
+ awsS3Config: AWSConfig
26
+ ) {
27
+ const {
28
+ AWS_ACCESS_KEY_ID,
29
+ AWS_REGION,
30
+ AWS_S3_BUCKET,
31
+ AWS_SECRET_ACCESS_KEY,
32
+ } = awsS3Config;
33
+
34
+ const filePath = fileStream.path;
35
+ let fileName = "";
36
+
37
+ if (typeof filePath === "string") {
38
+ fileName = filePath.split("/").pop() || "";
39
+ } else {
40
+ fileName = randomUUID();
41
+ }
42
+
43
+ const uploadParams = {
44
+ Bucket: AWS_S3_BUCKET,
45
+ Key: `uploads/${Date.now()}-${fileName}`,
46
+ Body: fileStream,
47
+ ContentType: contentType,
48
+ };
49
+
50
+ const s3Client = new S3Client({
51
+ region: AWS_REGION,
52
+ credentials: {
53
+ accessKeyId: AWS_ACCESS_KEY_ID,
54
+ secretAccessKey: AWS_SECRET_ACCESS_KEY,
55
+ },
56
+ });
57
+
58
+ const command = new PutObjectCommand(uploadParams);
59
+
60
+ try {
61
+ await s3Client.send(command);
62
+ } catch (error) {
63
+ console.error(error);
64
+ }
65
+
66
+ return {
67
+ location: `https://${AWS_S3_BUCKET}.s3.amazonaws.com/${uploadParams.Key}`,
68
+ };
69
+ }
70
+
71
+ async function sendImageToS3(
72
+ props: ActionFunctionArgs,
73
+ awsS3Config: AWSConfig
74
+ ) {
75
+ const { request } = props;
76
+
77
+ const uploadHandler = composeUploadHandlers(
78
+ createFileUploadHandler({
79
+ maxPartSize: 5_000_000,
80
+ file: ({ filename }) => filename,
81
+ })
82
+ );
83
+
84
+ const formData = await parseMultipartFormData(request, uploadHandler);
85
+ const file = formData.get("file") as unknown as NodeOnDiskFile;
86
+
87
+ if (!file) throw new BadRequestError("No file uploaded");
88
+
89
+ const filterParams = getScopedParams(request);
90
+ const width = filterParams.get("w");
91
+ const height = filterParams.get("h");
92
+
93
+ if (width && height) {
94
+ const image = sharp(file.getFilePath());
95
+ const metadata = await image.metadata();
96
+
97
+ if (metadata.width && metadata.height) {
98
+ const widthDiff = Math.abs(metadata.width - +width);
99
+ const heightDiff = Math.abs(metadata.height - +height);
100
+
101
+ if (widthDiff > 10 || heightDiff > 10) {
102
+ return {
103
+ error: `Formato inválido ${metadata.width}px x ${metadata.height}px`,
104
+ };
105
+ }
106
+ }
107
+ }
108
+
109
+ const streamFile = fs.createReadStream(file.getFilePath());
110
+ const apiResponse = await s3_upload(streamFile, file.type, awsS3Config);
111
+
112
+ return { url: apiResponse.location };
113
+ }
114
+
115
+ export { sendImageToS3 };
package/tsconfig.json CHANGED
@@ -1,5 +1,6 @@
1
1
  {
2
2
  "compilerOptions": {
3
+ "allowSyntheticDefaultImports": true,
3
4
  "declaration": true,
4
5
  "declarationDir": "./dist",
5
6
  "declarationMap": true,
@@ -15,6 +16,6 @@
15
16
  "types": ["bun-types"],
16
17
  "verbatimModuleSyntax": true
17
18
  },
18
- "exclude": ["dist", "node_modules"],
19
+ "exclude": ["node_modules", "dist"],
19
20
  "include": ["src/**/*.ts"]
20
21
  }
@@ -1,3 +0,0 @@
1
- declare function badRequest<T>(data: T, status?: number): import("@remix-run/node").TypedResponse<T>;
2
- export { badRequest };
3
- //# sourceMappingURL=badRequest.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"badRequest.d.ts","sourceRoot":"","sources":["../src/badRequest.ts"],"names":[],"mappings":"AAEA,iBAAS,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,CAAC,EAAE,MAAM,8CAE9C;AAED,OAAO,EAAE,UAAU,EAAE,CAAC"}
@@ -1,5 +0,0 @@
1
- import { json } from "@remix-run/node";
2
- function badRequest(data, status) {
3
- return json(data, { status: status || 400 });
4
- }
5
- export { badRequest };
@@ -1,3 +0,0 @@
1
- import type { FormParseProps, FormParseReturnType } from "@arkyn/types";
2
- export declare function formParse<T extends FormParseProps>([formData, schema,]: T): FormParseReturnType<T>;
3
- //# sourceMappingURL=formParse.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"formParse.d.ts","sourceRoot":"","sources":["../src/formParse.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,cAAc,CAAC;AAExE,wBAAgB,SAAS,CAAC,CAAC,SAAS,cAAc,EAAE,CAClD,QAAQ,EACR,MAAM,EACP,EAAE,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAoB5B"}
package/dist/formParse.js DELETED
@@ -1,15 +0,0 @@
1
- export function formParse([formData, schema,]) {
2
- const zodResponse = schema.safeParse(formData);
3
- if (zodResponse.success === false) {
4
- const errorsArray = Object.entries(zodResponse.error.formErrors.fieldErrors);
5
- const errorsObject = Object.fromEntries(errorsArray.map((item) => [item[0], item[1]?.[0] || "Error"]));
6
- return {
7
- success: zodResponse.success,
8
- fieldErrors: errorsObject,
9
- fields: formData,
10
- };
11
- }
12
- else {
13
- return { success: zodResponse.success, data: zodResponse.data };
14
- }
15
- }
@@ -1,5 +0,0 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
- declare function getScopedParams(request: Request, scope?: string): import("url").URLSearchParams;
4
- export { getScopedParams };
5
- //# sourceMappingURL=getScopedParams.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"getScopedParams.d.ts","sourceRoot":"","sources":["../src/getScopedParams.ts"],"names":[],"mappings":";;AAAA,iBAAS,eAAe,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,GAAE,MAAW,iCAW5D;AAED,OAAO,EAAE,eAAe,EAAE,CAAC"}
@@ -1,10 +0,0 @@
1
- function getScopedParams(request, scope = "") {
2
- const url = new URL(request.url);
3
- if (scope === "")
4
- return url.searchParams;
5
- const scopedSearchParams = Array.from(url.searchParams.entries())
6
- .filter(([key]) => key.startsWith(`${scope}:`))
7
- .map(([key, value]) => [key.replace(`${scope}:`, ""), value]);
8
- return new URLSearchParams(scopedSearchParams);
9
- }
10
- export { getScopedParams };
@@ -1,3 +0,0 @@
1
- import type { FormParseProps, FormParseReturnType } from "@arkyn/types";
2
- export declare function formParse<T extends FormParseProps>([formData, schema,]: T): FormParseReturnType<T>;
3
- //# sourceMappingURL=formParse.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"formParse.d.ts","sourceRoot":"","sources":["../../src/helpers/formParse.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,cAAc,CAAC;AAExE,wBAAgB,SAAS,CAAC,CAAC,SAAS,cAAc,EAAE,CAClD,QAAQ,EACR,MAAM,EACP,EAAE,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAoB5B"}
@@ -1,15 +0,0 @@
1
- export function formParse([formData, schema,]) {
2
- const zodResponse = schema.safeParse(formData);
3
- if (zodResponse.success === false) {
4
- const errorsArray = Object.entries(zodResponse.error.formErrors.fieldErrors);
5
- const errorsObject = Object.fromEntries(errorsArray.map((item) => [item[0], item[1]?.[0] || "Error"]));
6
- return {
7
- success: zodResponse.success,
8
- fieldErrors: errorsObject,
9
- fields: formData,
10
- };
11
- }
12
- else {
13
- return { success: zodResponse.success, data: zodResponse.data };
14
- }
15
- }
@@ -1,3 +0,0 @@
1
- declare function getScopedParams(request: Request, scope?: string): import("url").URLSearchParams;
2
- export { getScopedParams };
3
- //# sourceMappingURL=getScopedParams.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"getScopedParams.d.ts","sourceRoot":"","sources":["../../src/helpers/getScopedParams.ts"],"names":[],"mappings":"AAAA,iBAAS,eAAe,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,GAAE,MAAW,iCAW5D;AAED,OAAO,EAAE,eAAe,EAAE,CAAC"}
@@ -1,10 +0,0 @@
1
- function getScopedParams(request, scope = "") {
2
- const url = new URL(request.url);
3
- if (scope === "")
4
- return url.searchParams;
5
- const scopedSearchParams = Array.from(url.searchParams.entries())
6
- .filter(([key]) => key.startsWith(`${scope}:`))
7
- .map(([key, value]) => [key.replace(`${scope}:`, ""), value]);
8
- return new URLSearchParams(scopedSearchParams);
9
- }
10
- export { getScopedParams };
@@ -1,3 +0,0 @@
1
- declare function uuid(): string;
2
- export { uuid };
3
- //# sourceMappingURL=uuid.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"uuid.d.ts","sourceRoot":"","sources":["../../src/helpers/uuid.ts"],"names":[],"mappings":"AAEA,iBAAS,IAAI,IAAI,MAAM,CAEtB;AAED,OAAO,EAAE,IAAI,EAAE,CAAC"}
@@ -1,5 +0,0 @@
1
- import { v4 } from "uuid";
2
- function uuid() {
3
- return v4();
4
- }
5
- export { uuid };
@@ -1,3 +0,0 @@
1
- declare function validateCPF(rawCpf: string): boolean;
2
- export { validateCPF };
3
- //# sourceMappingURL=validateCPF.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"validateCPF.d.ts","sourceRoot":"","sources":["../../src/helpers/validateCPF.ts"],"names":[],"mappings":"AA2BA,iBAAS,WAAW,CAAC,MAAM,EAAE,MAAM,WAQlC;AAED,OAAO,EAAE,WAAW,EAAE,CAAC"}
@@ -1,36 +0,0 @@
1
- function removeNonDigits(cpf) {
2
- return cpf.replace(/\D/g, "");
3
- }
4
- function isInvalidLength(cpf) {
5
- const CPF_LENGTH = 11;
6
- return cpf.length !== CPF_LENGTH;
7
- }
8
- function hasAllDigitsEqual(cpf) {
9
- const [firstCpfDigit] = cpf;
10
- return [...cpf].every((digit) => digit === firstCpfDigit);
11
- }
12
- function calculateDigit(cpf, factor) {
13
- let total = 0;
14
- for (const digit of cpf) {
15
- if (factor > 1)
16
- total += parseInt(digit) * factor--;
17
- }
18
- const rest = total % 11;
19
- return rest < 2 ? 0 : 11 - rest;
20
- }
21
- function extractDigit(cpf) {
22
- return cpf.slice(9);
23
- }
24
- function validateCPF(rawCpf) {
25
- if (!rawCpf)
26
- return false;
27
- const cpf = removeNonDigits(rawCpf);
28
- if (isInvalidLength(cpf))
29
- return false;
30
- if (hasAllDigitsEqual(cpf))
31
- return false;
32
- const digit1 = calculateDigit(cpf, 10);
33
- const digit2 = calculateDigit(cpf, 11);
34
- return extractDigit(cpf) === `${digit1}${digit2}`;
35
- }
36
- export { validateCPF };
@@ -1,3 +0,0 @@
1
- declare function maskSensitiveData(jsonString: string, sensitiveKeys?: string[]): string;
2
- export { maskSensitiveData };
3
- //# sourceMappingURL=maskSensitiveData.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"maskSensitiveData.d.ts","sourceRoot":"","sources":["../../src/services/maskSensitiveData.ts"],"names":[],"mappings":"AAAA,iBAAS,iBAAiB,CACxB,UAAU,EAAE,MAAM,EAClB,aAAa,WAAgD,GAC5D,MAAM,CAkCR;AAED,OAAO,EAAE,iBAAiB,EAAE,CAAC"}
@@ -1,38 +0,0 @@
1
- function maskSensitiveData(jsonString, sensitiveKeys = ["password", "confirmPassword", "creditCard"]) {
2
- function maskValue(key, value) {
3
- if (sensitiveKeys.includes(key))
4
- return "****";
5
- return value;
6
- }
7
- function recursiveMask(obj) {
8
- if (Array.isArray(obj)) {
9
- return obj.map((item) => recursiveMask(item));
10
- }
11
- else if (obj !== null && typeof obj === "object") {
12
- return Object.keys(obj).reduce((acc, key) => {
13
- let value = obj[key];
14
- if (typeof value === "string") {
15
- try {
16
- const parsedValue = JSON.parse(value);
17
- if (typeof parsedValue === "object") {
18
- value = JSON.stringify(recursiveMask(parsedValue));
19
- }
20
- }
21
- catch (e) { }
22
- }
23
- acc[key] = recursiveMask(maskValue(key, value));
24
- return acc;
25
- }, {});
26
- }
27
- return obj;
28
- }
29
- try {
30
- const jsonObject = JSON.parse(jsonString);
31
- const maskedObject = recursiveMask(jsonObject);
32
- return JSON.stringify(maskedObject);
33
- }
34
- catch (error) {
35
- return jsonString;
36
- }
37
- }
38
- export { maskSensitiveData };
@@ -1,3 +0,0 @@
1
- declare function truncateLargeFields(jsonString: string, maxLength?: number): string;
2
- export { truncateLargeFields };
3
- //# sourceMappingURL=truncateLargeFields.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"truncateLargeFields.d.ts","sourceRoot":"","sources":["../../src/services/truncateLargeFields.ts"],"names":[],"mappings":"AAAA,iBAAS,mBAAmB,CAAC,UAAU,EAAE,MAAM,EAAE,SAAS,SAAO,GAAG,MAAM,CA6BzE;AAED,OAAO,EAAE,mBAAmB,EAAE,CAAC"}
@@ -1,29 +0,0 @@
1
- function truncateLargeFields(jsonString, maxLength = 1000) {
2
- function truncateValue(key, value) {
3
- if (typeof value === "string" && value.length > maxLength) {
4
- return `To large information: field as ${value.length} characters`;
5
- }
6
- return value;
7
- }
8
- function recursiveTruncate(obj) {
9
- if (Array.isArray(obj)) {
10
- return obj.map(recursiveTruncate);
11
- }
12
- else if (obj !== null && typeof obj === "object") {
13
- return Object.fromEntries(Object.entries(obj).map(([key, value]) => [
14
- key,
15
- truncateValue(key, recursiveTruncate(value)),
16
- ]));
17
- }
18
- return obj;
19
- }
20
- try {
21
- const parsedJson = JSON.parse(jsonString);
22
- const truncatedJson = recursiveTruncate(parsedJson);
23
- return JSON.stringify(truncatedJson);
24
- }
25
- catch (error) {
26
- throw new Error("Invalid JSON string");
27
- }
28
- }
29
- export { truncateLargeFields };