pim-import 5.0.3 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/libs/s3.d.ts +5 -5
- package/dist/libs/s3.js +38 -37
- package/dist/pim/methods/products.js +1 -1
- package/package.json +3 -2
package/dist/libs/s3.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { S3Client } from "@aws-sdk/client-s3";
|
|
2
2
|
import { AvailableCatalogs } from "../types";
|
|
3
3
|
interface AWSConfig {
|
|
4
4
|
accessKeyId: string;
|
|
@@ -9,14 +9,14 @@ interface Config extends AWSConfig {
|
|
|
9
9
|
bucket: string;
|
|
10
10
|
}
|
|
11
11
|
export declare let config: Config;
|
|
12
|
-
export declare let
|
|
12
|
+
export declare let s3Client: S3Client;
|
|
13
13
|
export declare const checkConfig: (skipInit?: boolean) => void;
|
|
14
14
|
export declare const init: (opts?: Config) => void;
|
|
15
|
-
export declare const listObjects: (prefix?: string) => Promise<import("aws-sdk/
|
|
16
|
-
export declare const upload: (url: string, fileName?: string, path?: string, metaData?: any) => Promise<
|
|
15
|
+
export declare const listObjects: (prefix?: string) => Promise<import("@aws-sdk/client-s3").ListObjectsV2CommandOutput>;
|
|
16
|
+
export declare const upload: (url: string, fileName?: string, path?: string, metaData?: any) => Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadCommandOutput>;
|
|
17
17
|
export declare const getFileFromS3: (path: string, returnUrl?: boolean) => Promise<any>;
|
|
18
18
|
export declare const saveCatalogToS3: (catalog: AvailableCatalogs) => Promise<string>;
|
|
19
19
|
export declare const saveAllProductsToS3: (catalog: AvailableCatalogs, lastModified: string) => Promise<string>;
|
|
20
|
-
export declare const saveJsonToS3: (obj: any, fileName?: string, path?: string) => Promise<
|
|
20
|
+
export declare const saveJsonToS3: (obj: any, fileName?: string, path?: string) => Promise<import("@aws-sdk/client-s3").CompleteMultipartUploadCommandOutput>;
|
|
21
21
|
export declare const savePDFToS3: (pdf: any, fileName: string, path?: string) => Promise<string | undefined>;
|
|
22
22
|
export {};
|
package/dist/libs/s3.js
CHANGED
|
@@ -3,10 +3,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.savePDFToS3 = exports.saveJsonToS3 = exports.saveAllProductsToS3 = exports.saveCatalogToS3 = exports.getFileFromS3 = exports.upload = exports.listObjects = exports.init = exports.checkConfig = exports.
|
|
6
|
+
exports.savePDFToS3 = exports.saveJsonToS3 = exports.saveAllProductsToS3 = exports.saveCatalogToS3 = exports.getFileFromS3 = exports.upload = exports.listObjects = exports.init = exports.checkConfig = exports.s3Client = exports.config = void 0;
|
|
7
7
|
const dotenv_1 = __importDefault(require("dotenv"));
|
|
8
8
|
const axios_1 = __importDefault(require("@atoms-studio/axios"));
|
|
9
|
-
const
|
|
9
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
10
|
+
const lib_storage_1 = require("@aws-sdk/lib-storage");
|
|
10
11
|
const utils_1 = require("../utils");
|
|
11
12
|
const config_1 = require("../pim/config");
|
|
12
13
|
const https_1 = __importDefault(require("https"));
|
|
@@ -33,12 +34,13 @@ const checkConfig = (skipInit = false) => {
|
|
|
33
34
|
else if (!exports.config.bucket) {
|
|
34
35
|
throw new Error(beforeMessage + "bucket not found. " + afterMessage);
|
|
35
36
|
}
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
37
|
+
exports.s3Client = new client_s3_1.S3Client({
|
|
38
|
+
credentials: {
|
|
39
|
+
accessKeyId: exports.config.accessKeyId,
|
|
40
|
+
secretAccessKey: exports.config.secretAccessKey,
|
|
41
|
+
},
|
|
39
42
|
region: exports.config.region,
|
|
40
43
|
});
|
|
41
|
-
exports.s3 = new aws_sdk_1.default.S3();
|
|
42
44
|
};
|
|
43
45
|
exports.checkConfig = checkConfig;
|
|
44
46
|
const init = (opts) => {
|
|
@@ -56,20 +58,18 @@ const init = (opts) => {
|
|
|
56
58
|
exports.init = init;
|
|
57
59
|
const listObjects = async (prefix = "") => {
|
|
58
60
|
(0, exports.checkConfig)();
|
|
59
|
-
const
|
|
61
|
+
const command = new client_s3_1.ListObjectsV2Command({
|
|
60
62
|
Bucket: exports.config.bucket,
|
|
61
63
|
Prefix: prefix || "",
|
|
62
|
-
};
|
|
63
|
-
|
|
64
|
-
.
|
|
65
|
-
.promise()
|
|
66
|
-
.then((data) => {
|
|
64
|
+
});
|
|
65
|
+
try {
|
|
66
|
+
const data = await exports.s3Client.send(command);
|
|
67
67
|
return data;
|
|
68
|
-
}
|
|
69
|
-
|
|
68
|
+
}
|
|
69
|
+
catch (err) {
|
|
70
70
|
console.log(err);
|
|
71
71
|
throw new Error("Not exist folder exception is not catch here!");
|
|
72
|
-
}
|
|
72
|
+
}
|
|
73
73
|
};
|
|
74
74
|
exports.listObjects = listObjects;
|
|
75
75
|
const upload = async (url, fileName = "", path = "", metaData = {}) => {
|
|
@@ -92,23 +92,24 @@ const upload = async (url, fileName = "", path = "", metaData = {}) => {
|
|
|
92
92
|
if (!contentLength) {
|
|
93
93
|
contentLength = Number(fileRes.data.byteLength);
|
|
94
94
|
}
|
|
95
|
-
const
|
|
95
|
+
const uploadParams = {
|
|
96
96
|
Bucket: exports.config.bucket,
|
|
97
97
|
Key: `${path}${fileName}`,
|
|
98
98
|
ContentType: fileRes.headers["content-type"],
|
|
99
|
-
ContentLength: contentLength,
|
|
100
99
|
Body: fileRes.data,
|
|
101
100
|
Metadata: metaData,
|
|
102
101
|
};
|
|
103
|
-
|
|
104
|
-
.
|
|
105
|
-
|
|
106
|
-
|
|
102
|
+
try {
|
|
103
|
+
const upload = new lib_storage_1.Upload({
|
|
104
|
+
client: exports.s3Client,
|
|
105
|
+
params: uploadParams,
|
|
106
|
+
});
|
|
107
|
+
const data = await upload.done();
|
|
107
108
|
return data;
|
|
108
|
-
}
|
|
109
|
-
|
|
109
|
+
}
|
|
110
|
+
catch (err) {
|
|
110
111
|
throw new Error(err);
|
|
111
|
-
}
|
|
112
|
+
}
|
|
112
113
|
};
|
|
113
114
|
exports.upload = upload;
|
|
114
115
|
const getFileFromS3 = async (path, returnUrl = false) => {
|
|
@@ -175,26 +176,27 @@ const saveJsonToS3 = async (obj, fileName = "", path = "") => {
|
|
|
175
176
|
if (path && path.substr(-1) !== "/") {
|
|
176
177
|
path += "/";
|
|
177
178
|
}
|
|
178
|
-
const
|
|
179
|
+
const uploadParams = {
|
|
179
180
|
Bucket: exports.config.bucket,
|
|
180
181
|
Key: `${path}${fileName}`,
|
|
181
182
|
ContentEncoding: "base64",
|
|
182
183
|
ContentType: "application/json",
|
|
183
|
-
ContentLength: Number(buf.byteLength),
|
|
184
184
|
Body: buf,
|
|
185
185
|
};
|
|
186
|
-
|
|
187
|
-
.
|
|
188
|
-
|
|
189
|
-
|
|
186
|
+
try {
|
|
187
|
+
const upload = new lib_storage_1.Upload({
|
|
188
|
+
client: exports.s3Client,
|
|
189
|
+
params: uploadParams,
|
|
190
|
+
});
|
|
191
|
+
const res = await upload.done();
|
|
190
192
|
const timeEnd = new Date();
|
|
191
193
|
const seconds = (0, utils_1.secondBetweenTwoDate)(timeStart, timeEnd);
|
|
192
194
|
(0, logs_1.log)(`Request time: ${seconds} seconds - saveJsonToS3`);
|
|
193
195
|
return res;
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
+
}
|
|
197
|
+
catch (err) {
|
|
196
198
|
throw new Error(err);
|
|
197
|
-
}
|
|
199
|
+
}
|
|
198
200
|
};
|
|
199
201
|
exports.saveJsonToS3 = saveJsonToS3;
|
|
200
202
|
const savePDFToS3 = async (pdf, fileName, path = "") => {
|
|
@@ -205,17 +207,16 @@ const savePDFToS3 = async (pdf, fileName, path = "") => {
|
|
|
205
207
|
if (path && path.substr(-1) !== "/") {
|
|
206
208
|
path += "/";
|
|
207
209
|
}
|
|
208
|
-
const
|
|
210
|
+
const command = new client_s3_1.PutObjectCommand({
|
|
209
211
|
Bucket: exports.config.bucket,
|
|
210
212
|
Key: `${path}${fileName}`,
|
|
211
213
|
Body: pdf,
|
|
212
214
|
ContentType: "application/pdf",
|
|
213
215
|
ServerSideEncryption: "AES256",
|
|
214
|
-
};
|
|
216
|
+
});
|
|
215
217
|
let url;
|
|
216
218
|
try {
|
|
217
|
-
exports.
|
|
218
|
-
await exports.s3.putObject(s3Params).promise();
|
|
219
|
+
await exports.s3Client.send(command);
|
|
219
220
|
url = `https://${exports.config.bucket}.s3.${exports.config.region}.amazonaws.com/${path}${fileName}`;
|
|
220
221
|
}
|
|
221
222
|
catch (err) {
|
|
@@ -430,7 +430,7 @@ const getProductAssets = async (pimAssets, productCode) => {
|
|
|
430
430
|
(0, logs_1.log)(`Asset not exists, importing it to S3 path: ${path}/${fileName}`);
|
|
431
431
|
try {
|
|
432
432
|
const res = await (0, s3_1.upload)(pimAsset.url, fileName, path);
|
|
433
|
-
assetUrl = res.Location;
|
|
433
|
+
assetUrl = res.Location || "";
|
|
434
434
|
}
|
|
435
435
|
catch (err) {
|
|
436
436
|
(0, logs_1.log)(`Unable to upload file: ${err.response.status} - ${err.response.statusText}`, "WARN");
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pim-import",
|
|
3
|
-
"version": "5.0
|
|
3
|
+
"version": "5.1.0",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"types": "./dist/index.d.ts",
|
|
@@ -36,8 +36,9 @@
|
|
|
36
36
|
"@atoms-studio/axios": "^0.26.1",
|
|
37
37
|
"@sentry/node": "^6.10.0",
|
|
38
38
|
"@sentry/tracing": "^6.10.0",
|
|
39
|
+
"@aws-sdk/client-s3": "^3.600.0",
|
|
40
|
+
"@aws-sdk/lib-storage": "^3.600.0",
|
|
39
41
|
"algoliasearch": "^4.10.3",
|
|
40
|
-
"aws-sdk": "^2.895.0",
|
|
41
42
|
"contentful": "^10.5.0",
|
|
42
43
|
"contentful-management": "^10.40.0",
|
|
43
44
|
"csv-parser": "^3.0.0",
|