@servicelabsco/slabs-access-manager 0.1.244 → 0.1.246
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/access/dtos/index.d.ts +1 -0
- package/dist/access/dtos/index.js +1 -0
- package/dist/access/dtos/index.js.map +1 -1
- package/dist/access/dtos/report.log.attributes.dto.d.ts +3 -0
- package/dist/access/dtos/report.log.attributes.dto.js +8 -0
- package/dist/access/dtos/report.log.attributes.dto.js.map +1 -0
- package/dist/access/entities/index.d.ts +1 -0
- package/dist/access/entities/index.js +1 -0
- package/dist/access/entities/index.js.map +1 -1
- package/dist/access/entities/report.log.entity.d.ts +17 -0
- package/dist/access/entities/report.log.entity.js +74 -0
- package/dist/access/entities/report.log.entity.js.map +1 -0
- package/dist/access/es6.classes.d.ts +9 -6
- package/dist/access/es6.classes.js +8 -0
- package/dist/access/es6.classes.js.map +1 -1
- package/dist/access/jobs/index.d.ts +1 -0
- package/dist/access/jobs/index.js +1 -0
- package/dist/access/jobs/index.js.map +1 -1
- package/dist/access/jobs/report.log.job.d.ts +7 -0
- package/dist/access/jobs/report.log.job.js +29 -0
- package/dist/access/jobs/report.log.job.js.map +1 -0
- package/dist/access/libraries/process.report.data.d.ts +3 -0
- package/dist/access/libraries/process.report.data.js +52 -4
- package/dist/access/libraries/process.report.data.js.map +1 -1
- package/dist/access/services/access.report.service.d.ts +1 -1
- package/dist/access/services/access.report.service.js.map +1 -1
- package/dist/access/services/es6.jobs.service.d.ts +3 -1
- package/dist/access/services/es6.jobs.service.js +5 -1
- package/dist/access/services/es6.jobs.service.js.map +1 -1
- package/dist/access/subscribers/index.d.ts +1 -0
- package/dist/access/subscribers/index.js +1 -0
- package/dist/access/subscribers/index.js.map +1 -1
- package/dist/access/subscribers/report.log.subscriber.d.ts +10 -0
- package/dist/access/subscribers/report.log.subscriber.js +34 -0
- package/dist/access/subscribers/report.log.subscriber.js.map +1 -0
- package/dist/accessUtility/es6.classes.d.ts +3 -3
- package/dist/accessUtility/jobs/analyse.bulk.upload.job.d.ts +6 -3
- package/dist/accessUtility/jobs/analyse.bulk.upload.job.js +8 -3
- package/dist/accessUtility/jobs/analyse.bulk.upload.job.js.map +1 -1
- package/dist/accessUtility/jobs/bulk.upload.item.job.js +2 -0
- package/dist/accessUtility/jobs/bulk.upload.item.job.js.map +1 -1
- package/dist/accessUtility/jobs/bulk.upload.job.d.ts +1 -0
- package/dist/accessUtility/jobs/bulk.upload.job.js +16 -3
- package/dist/accessUtility/jobs/bulk.upload.job.js.map +1 -1
- package/dist/accessUtility/jobs/push.to.bulk.item.job.d.ts +14 -2
- package/dist/accessUtility/jobs/push.to.bulk.item.job.js +24 -7
- package/dist/accessUtility/jobs/push.to.bulk.item.job.js.map +1 -1
- package/dist/accessUtility/libraries/analyse.bulk.upload.d.ts +16 -4
- package/dist/accessUtility/libraries/analyse.bulk.upload.js +53 -6
- package/dist/accessUtility/libraries/analyse.bulk.upload.js.map +1 -1
- package/dist/accessUtility/libraries/read.xls.file.d.ts +37 -29
- package/dist/accessUtility/libraries/read.xls.file.js +332 -191
- package/dist/accessUtility/libraries/read.xls.file.js.map +1 -1
- package/dist/app.controller.d.ts +5 -4
- package/dist/app.controller.js +10 -5
- package/dist/app.controller.js.map +1 -1
- package/dist/config/entity.constants.d.ts +2 -0
- package/dist/config/entity.constants.js +2 -0
- package/dist/config/entity.constants.js.map +1 -1
- package/dist/config/redis.config.d.ts +5 -0
- package/dist/config/redis.config.js +9 -0
- package/dist/config/redis.config.js.map +1 -0
- package/dist/migrations/1751443370363-CreateReportLogTable.d.ts +5 -0
- package/dist/migrations/1751443370363-CreateReportLogTable.js +25 -0
- package/dist/migrations/1751443370363-CreateReportLogTable.js.map +1 -0
- package/package.json +1 -1
|
@@ -4,175 +4,320 @@ exports.ReadXlsFile = void 0;
|
|
|
4
4
|
const nestjs_utility_services_1 = require("@servicelabsco/nestjs-utility-services");
|
|
5
5
|
const csv = require("csv-parser");
|
|
6
6
|
const date_fns_1 = require("date-fns");
|
|
7
|
-
const
|
|
8
|
-
const
|
|
9
|
-
const
|
|
7
|
+
const http = require("http");
|
|
8
|
+
const https = require("https");
|
|
9
|
+
const stream_1 = require("stream");
|
|
10
|
+
const url_1 = require("url");
|
|
10
11
|
const ExcelJS = require("exceljs");
|
|
12
|
+
var FileExtension;
|
|
13
|
+
(function (FileExtension) {
|
|
14
|
+
FileExtension["CSV"] = "csv";
|
|
15
|
+
FileExtension["XLSX"] = "xlsx";
|
|
16
|
+
})(FileExtension || (FileExtension = {}));
|
|
17
|
+
var ColumnType;
|
|
18
|
+
(function (ColumnType) {
|
|
19
|
+
ColumnType[ColumnType["STRING"] = 1] = "STRING";
|
|
20
|
+
ColumnType[ColumnType["NUMBER"] = 2] = "NUMBER";
|
|
21
|
+
ColumnType[ColumnType["DATE"] = 3] = "DATE";
|
|
22
|
+
ColumnType[ColumnType["DATETIME"] = 4] = "DATETIME";
|
|
23
|
+
ColumnType[ColumnType["BOOLEAN"] = 5] = "BOOLEAN";
|
|
24
|
+
})(ColumnType || (ColumnType = {}));
|
|
11
25
|
class ReadXlsFile {
|
|
12
26
|
constructor() {
|
|
13
|
-
this.promises = [];
|
|
14
27
|
this.rowNumber = 0;
|
|
15
28
|
this.totalRows = 0;
|
|
16
|
-
this.throughCache = true;
|
|
17
29
|
this.sheetCount = {};
|
|
18
30
|
}
|
|
19
31
|
async handle(types) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
if (extension === 'xlsx')
|
|
29
|
-
return this.processXls(types);
|
|
30
|
-
throw new nestjs_utility_services_1.OperationException(`Cannot read this extension file`);
|
|
31
|
-
}
|
|
32
|
-
getFiles() {
|
|
33
|
-
const upload = this.upload;
|
|
34
|
-
const url = decodeURIComponent(upload.file_url);
|
|
35
|
-
const size = upload.attributes?.size;
|
|
36
|
-
if (!size)
|
|
37
|
-
return [url];
|
|
38
|
-
if (size && size <= 20000000)
|
|
39
|
-
return [url];
|
|
40
|
-
const count = upload.attributes?.file_count;
|
|
41
|
-
if (!count)
|
|
42
|
-
return;
|
|
43
|
-
const extension = url.split('.').pop();
|
|
44
|
-
const baseFile = url.replace(`.${extension}`, '');
|
|
45
|
-
const urls = [];
|
|
46
|
-
for (let i = 0; i < count; ++i) {
|
|
47
|
-
const url = `${baseFile}_${i}.${extension}`;
|
|
48
|
-
urls.push(url);
|
|
32
|
+
try {
|
|
33
|
+
await this.initialize();
|
|
34
|
+
await this.processFiles(types);
|
|
35
|
+
await this.flushBatch();
|
|
36
|
+
return this.totalRows || this.rowNumber;
|
|
37
|
+
}
|
|
38
|
+
catch (error) {
|
|
39
|
+
throw new nestjs_utility_services_1.OperationException(`Failed to process files: ${error.message}`);
|
|
49
40
|
}
|
|
50
|
-
return urls;
|
|
51
41
|
}
|
|
52
|
-
async
|
|
53
|
-
|
|
42
|
+
async initialize() {
|
|
43
|
+
if (!this.upload?.file_url) {
|
|
44
|
+
throw new nestjs_utility_services_1.OperationException('Upload entity or file URL is missing');
|
|
45
|
+
}
|
|
46
|
+
this.config = await this.loadConfiguration();
|
|
47
|
+
this.resetCounters();
|
|
48
|
+
}
|
|
49
|
+
async loadConfiguration() {
|
|
50
|
+
const [stringExclusions, batchSize, maxFileLength] = await Promise.all([
|
|
51
|
+
this.loadStringExclusions(),
|
|
52
|
+
this.propertyService.get('bulk.item.batch.size', '100'),
|
|
53
|
+
this.propertyService.get('bulk.item.max.file.length', '1000000000'),
|
|
54
|
+
]);
|
|
55
|
+
const redisKey = `bulk-upload-${this.upload.id}`;
|
|
56
|
+
await this.redisService.del(redisKey);
|
|
57
|
+
return {
|
|
58
|
+
stringExclusions,
|
|
59
|
+
batchSize: parseInt(batchSize, 10),
|
|
60
|
+
maxFileLength: parseInt(maxFileLength, 10),
|
|
61
|
+
redisKey,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
async loadStringExclusions() {
|
|
65
|
+
const defaultValue = JSON.stringify(["'", '`', '"']);
|
|
66
|
+
const value = await this.propertyService.get('bulk.item.string.exclusions', defaultValue);
|
|
67
|
+
return JSON.parse(value);
|
|
68
|
+
}
|
|
69
|
+
resetCounters() {
|
|
70
|
+
this.rowNumber = 0;
|
|
71
|
+
this.totalRows = 0;
|
|
72
|
+
this.sheetCount = {};
|
|
73
|
+
}
|
|
74
|
+
async processFiles(types) {
|
|
75
|
+
const extension = this.getFileExtension();
|
|
76
|
+
const files = this.getFileUrls();
|
|
54
77
|
for (const type of types) {
|
|
55
78
|
this.rowNumber = 0;
|
|
56
|
-
for (const
|
|
57
|
-
await this.
|
|
58
|
-
await this.readWorksheet(type);
|
|
79
|
+
for (const fileUrl of files) {
|
|
80
|
+
await this.processFile(fileUrl, type, extension);
|
|
59
81
|
}
|
|
60
82
|
this.totalRows += this.rowNumber;
|
|
61
83
|
}
|
|
62
84
|
}
|
|
63
|
-
|
|
64
|
-
const
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
for (const file of files) {
|
|
68
|
-
const buffer = await this.uploadService.getBufferFromUrl(file);
|
|
69
|
-
const tempPath = (0, path_1.join)(tmpDirectory, `./${this.upload.uuid}.${nestjs_utility_services_1.PlatformUtility.generateRandomAlpha(6).toLowerCase()}.${sheet}.csv`);
|
|
70
|
-
(0, fs_1.writeFileSync)(tempPath, buffer);
|
|
71
|
-
await this.readCsv(tempPath, types[0], sheet);
|
|
72
|
-
await (0, fs_1.unlink)(tempPath, () => { });
|
|
85
|
+
getFileExtension() {
|
|
86
|
+
const extension = this.upload.file_url.split('.').pop()?.toLowerCase();
|
|
87
|
+
if (!extension || !Object.values(FileExtension).includes(extension)) {
|
|
88
|
+
throw new nestjs_utility_services_1.OperationException(`Unsupported file extension: ${extension}`);
|
|
73
89
|
}
|
|
90
|
+
return extension;
|
|
74
91
|
}
|
|
75
|
-
|
|
76
|
-
|
|
92
|
+
getFileUrls() {
|
|
93
|
+
const url = decodeURIComponent(this.upload.file_url);
|
|
94
|
+
const size = this.upload.attributes?.size;
|
|
95
|
+
if (!size || size <= this.config.maxFileLength) {
|
|
96
|
+
return [url];
|
|
97
|
+
}
|
|
98
|
+
const count = this.upload.attributes?.file_count;
|
|
99
|
+
if (!count) {
|
|
100
|
+
throw new nestjs_utility_services_1.OperationException('File count not specified for large file');
|
|
101
|
+
}
|
|
102
|
+
return this.generateSplitFileUrls(url, count);
|
|
77
103
|
}
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
const
|
|
81
|
-
return
|
|
104
|
+
generateSplitFileUrls(baseUrl, count) {
|
|
105
|
+
const extension = baseUrl.split('.').pop();
|
|
106
|
+
const baseFile = baseUrl.replace(`.${extension}`, '');
|
|
107
|
+
return Array.from({ length: count }, (_, i) => `${baseFile}_${i}.${extension}`);
|
|
82
108
|
}
|
|
83
|
-
async
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
109
|
+
async processFile(fileUrl, type, extension) {
|
|
110
|
+
try {
|
|
111
|
+
switch (extension) {
|
|
112
|
+
case FileExtension.CSV:
|
|
113
|
+
await this.processCsvFile(fileUrl, type);
|
|
114
|
+
break;
|
|
115
|
+
case FileExtension.XLSX:
|
|
116
|
+
await this.processExcelFile(fileUrl, type);
|
|
117
|
+
break;
|
|
118
|
+
default:
|
|
119
|
+
throw new nestjs_utility_services_1.OperationException(`Unsupported file extension: ${extension}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
catch (error) {
|
|
123
|
+
throw new nestjs_utility_services_1.OperationException(`Failed to process file ${fileUrl}: ${error.message}`);
|
|
124
|
+
}
|
|
91
125
|
}
|
|
92
|
-
async
|
|
93
|
-
|
|
126
|
+
async processCsvFile(fileUrl, type) {
|
|
127
|
+
const sheet = type.name.toLowerCase();
|
|
128
|
+
await this.readCsvFromStream(fileUrl, type, sheet);
|
|
129
|
+
await this.flushBatch();
|
|
94
130
|
}
|
|
95
|
-
async
|
|
96
|
-
|
|
97
|
-
|
|
131
|
+
async processExcelFile(fileUrl, type) {
|
|
132
|
+
await this.loadExcelWorkbook(fileUrl);
|
|
133
|
+
await this.processWorksheet(type);
|
|
98
134
|
}
|
|
99
|
-
async
|
|
100
|
-
|
|
135
|
+
async loadExcelWorkbook(fileUrl) {
|
|
136
|
+
try {
|
|
137
|
+
this.workbook = new ExcelJS.Workbook();
|
|
138
|
+
const buffer = await this.uploadService.getBufferFromUrl(fileUrl);
|
|
139
|
+
await this.workbook.xlsx.load(buffer);
|
|
140
|
+
}
|
|
141
|
+
catch (error) {
|
|
142
|
+
throw new nestjs_utility_services_1.OperationException(`Failed to load Excel workbook: ${error.message}`);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
async processWorksheet(type) {
|
|
146
|
+
if (type.sheet?.all) {
|
|
147
|
+
await this.processAllSheets(type);
|
|
148
|
+
}
|
|
149
|
+
else if (type.sheet?.index) {
|
|
150
|
+
await this.processSheetByIndex(type);
|
|
151
|
+
}
|
|
152
|
+
else if (type.sheet?.name) {
|
|
153
|
+
await this.processSheetByName(type);
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
await this.processAllSheets(type);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
async processAllSheets(type) {
|
|
160
|
+
const maxSheets = 100;
|
|
161
|
+
for (let i = 1; i <= maxSheets; i++) {
|
|
101
162
|
const worksheet = this.workbook.getWorksheet(i);
|
|
102
|
-
|
|
103
|
-
if (!sheet)
|
|
163
|
+
if (!worksheet?.name)
|
|
104
164
|
continue;
|
|
165
|
+
const sheetName = worksheet.name;
|
|
105
166
|
this.totalRows += this.rowNumber;
|
|
106
|
-
this.rowNumber = this.sheetCount[
|
|
107
|
-
await this.
|
|
108
|
-
this.
|
|
167
|
+
this.rowNumber = this.sheetCount[sheetName] || 0;
|
|
168
|
+
await this.processSheet(type, sheetName);
|
|
169
|
+
this.sheetCount[sheetName] = this.rowNumber;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
async processSheetByIndex(type) {
|
|
173
|
+
const worksheet = this.workbook.getWorksheet(type.sheet.index);
|
|
174
|
+
if (!worksheet) {
|
|
175
|
+
throw new nestjs_utility_services_1.OperationException(`Worksheet at index ${type.sheet.index} not found`);
|
|
109
176
|
}
|
|
177
|
+
await this.processSheet(type, worksheet.name);
|
|
110
178
|
}
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
await this.workbook.csv.write(writeStream, { sheetName: sheet });
|
|
121
|
-
await this.readCsv(tempPath, type, sheet);
|
|
122
|
-
return (0, fs_1.unlink)(tempPath, () => { });
|
|
123
|
-
}
|
|
124
|
-
async readCsv(tempPath, type, sheet) {
|
|
125
|
-
const fn = this;
|
|
126
|
-
const columns = await this.getDictionaryOfType(type);
|
|
179
|
+
async processSheetByName(type) {
|
|
180
|
+
await this.processSheet(type, type.sheet.name);
|
|
181
|
+
}
|
|
182
|
+
async processSheet(type, sheetName) {
|
|
183
|
+
const csvBuffer = await this.convertSheetToCsvBuffer(sheetName);
|
|
184
|
+
await this.readCsvFromBuffer(csvBuffer, type, sheetName);
|
|
185
|
+
await this.flushBatch();
|
|
186
|
+
}
|
|
187
|
+
async convertSheetToCsvBuffer(sheetName) {
|
|
127
188
|
return new Promise((resolve, reject) => {
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
189
|
+
const chunks = [];
|
|
190
|
+
const writeStream = new stream_1.Writable({
|
|
191
|
+
write(chunk, encoding, callback) {
|
|
192
|
+
chunks.push(chunk);
|
|
193
|
+
callback();
|
|
194
|
+
},
|
|
195
|
+
});
|
|
196
|
+
writeStream.on('finish', () => {
|
|
197
|
+
resolve(Buffer.concat(chunks));
|
|
198
|
+
});
|
|
199
|
+
writeStream.on('error', (error) => {
|
|
200
|
+
reject(error);
|
|
201
|
+
});
|
|
202
|
+
this.workbook.csv.write(writeStream, { sheetName });
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
async readCsvFromStream(fileUrl, type, sheetName) {
|
|
206
|
+
const columns = await this.buildColumnDictionary(type);
|
|
207
|
+
const stream = await this.createStreamFromUrl(fileUrl);
|
|
208
|
+
return new Promise((resolve, reject) => {
|
|
209
|
+
const csvStream = stream.pipe(csv({
|
|
210
|
+
mapHeaders: ({ header }) => this.mapHeader(header, columns.headers),
|
|
211
|
+
mapValues: ({ header, value }) => this.castValue(columns, header, value),
|
|
212
|
+
}));
|
|
213
|
+
csvStream
|
|
214
|
+
.on('data', async (data) => {
|
|
215
|
+
try {
|
|
216
|
+
this.rowNumber++;
|
|
217
|
+
await this.processRow(sheetName, this.rowNumber, data, type, columns);
|
|
218
|
+
}
|
|
219
|
+
catch (error) {
|
|
220
|
+
reject(new Error(`Error processing row ${this.rowNumber}: ${error.message}`));
|
|
221
|
+
}
|
|
137
222
|
})
|
|
138
|
-
.on('end', () =>
|
|
139
|
-
|
|
223
|
+
.on('end', () => resolve())
|
|
224
|
+
.on('error', (error) => reject(new Error(`CSV parsing error: ${error.message}`)));
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
async createStreamFromUrl(fileUrl) {
|
|
228
|
+
const url = new url_1.URL(fileUrl);
|
|
229
|
+
const protocol = url.protocol === 'https:' ? https : http;
|
|
230
|
+
return new Promise((resolve, reject) => {
|
|
231
|
+
const request = protocol.get(fileUrl, (response) => {
|
|
232
|
+
if (response.statusCode !== 200) {
|
|
233
|
+
reject(new Error(`Failed to fetch file: ${response.statusCode}`));
|
|
234
|
+
return;
|
|
235
|
+
}
|
|
236
|
+
resolve(response);
|
|
140
237
|
});
|
|
238
|
+
request.on('error', (error) => {
|
|
239
|
+
reject(error);
|
|
240
|
+
});
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
async readCsvFromBuffer(buffer, type, sheetName) {
|
|
244
|
+
const columns = await this.buildColumnDictionary(type);
|
|
245
|
+
return new Promise((resolve, reject) => {
|
|
246
|
+
const bufferStream = new stream_1.Readable();
|
|
247
|
+
bufferStream.push(buffer);
|
|
248
|
+
bufferStream.push(null);
|
|
249
|
+
const csvStream = bufferStream.pipe(csv({
|
|
250
|
+
mapHeaders: ({ header }) => this.mapHeader(header, columns.headers),
|
|
251
|
+
mapValues: ({ header, value }) => this.castValue(columns, header, value),
|
|
252
|
+
}));
|
|
253
|
+
csvStream
|
|
254
|
+
.on('data', async (data) => {
|
|
255
|
+
try {
|
|
256
|
+
this.rowNumber++;
|
|
257
|
+
await this.processRow(sheetName, this.rowNumber, data, type, columns);
|
|
258
|
+
}
|
|
259
|
+
catch (error) {
|
|
260
|
+
reject(new Error(`Error processing row ${this.rowNumber}: ${error.message}`));
|
|
261
|
+
}
|
|
262
|
+
})
|
|
263
|
+
.on('end', () => resolve())
|
|
264
|
+
.on('error', (error) => reject(new Error(`CSV parsing error: ${error.message}`)));
|
|
141
265
|
});
|
|
142
266
|
}
|
|
143
|
-
async processRow(sheet,
|
|
144
|
-
this.
|
|
145
|
-
const shouldIgnore = this.
|
|
146
|
-
if (shouldIgnore)
|
|
147
|
-
|
|
148
|
-
if (this.throughCache) {
|
|
149
|
-
const obj = { sheet, row, data, bulk_upload_id: this.upload.id, type_id: type.id };
|
|
150
|
-
return this.pushToBulkItemJob.dispatch(obj);
|
|
267
|
+
async processRow(sheet, row_num, data, type, columns) {
|
|
268
|
+
const processedData = this.applyDataTransformations(type, data);
|
|
269
|
+
const shouldIgnore = this.shouldIgnoreRow(processedData, columns.mandatory);
|
|
270
|
+
if (shouldIgnore) {
|
|
271
|
+
processedData.ignore_data = shouldIgnore;
|
|
151
272
|
}
|
|
152
|
-
const
|
|
153
|
-
|
|
154
|
-
|
|
273
|
+
const rowData = {
|
|
274
|
+
sheet,
|
|
275
|
+
row_num,
|
|
276
|
+
data: processedData,
|
|
277
|
+
bulk_upload_id: this.upload.id,
|
|
278
|
+
type_id: type.id,
|
|
279
|
+
};
|
|
280
|
+
return this.addToBatch(rowData);
|
|
155
281
|
}
|
|
156
|
-
|
|
157
|
-
|
|
282
|
+
applyDataTransformations(type, data) {
|
|
283
|
+
const script = type?.attributes?.script;
|
|
284
|
+
if (!script?.script) {
|
|
158
285
|
return data;
|
|
159
|
-
|
|
160
|
-
|
|
286
|
+
}
|
|
287
|
+
try {
|
|
288
|
+
eval(script.script);
|
|
289
|
+
return data;
|
|
290
|
+
}
|
|
291
|
+
catch (error) {
|
|
292
|
+
throw new nestjs_utility_services_1.OperationException(`Data transformation script failed: ${error.message}`);
|
|
293
|
+
}
|
|
161
294
|
}
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
295
|
+
shouldIgnoreRow(data, mandatoryFields) {
|
|
296
|
+
return mandatoryFields.some((field) => !data[field]);
|
|
297
|
+
}
|
|
298
|
+
async addToBatch(rowData) {
|
|
299
|
+
await this.redisService.lpush(this.config.redisKey, JSON.stringify(rowData));
|
|
300
|
+
}
|
|
301
|
+
async flushBatch() {
|
|
302
|
+
const key = this.config.redisKey;
|
|
303
|
+
let items = [];
|
|
304
|
+
while (true) {
|
|
305
|
+
const item = await this.redisService.lpop(key);
|
|
306
|
+
if (!item)
|
|
307
|
+
return this.pushToQueue(items);
|
|
308
|
+
items.push(JSON.parse(item));
|
|
309
|
+
if (items.length >= this.config.batchSize) {
|
|
310
|
+
await this.pushToQueue(items);
|
|
311
|
+
items = [];
|
|
312
|
+
}
|
|
172
313
|
}
|
|
173
|
-
return this.getStringValue(value);
|
|
174
314
|
}
|
|
175
|
-
|
|
315
|
+
async pushToQueue(items) {
|
|
316
|
+
if (!items?.length)
|
|
317
|
+
return;
|
|
318
|
+
return this.pushToBulkItemJob.dispatch(items);
|
|
319
|
+
}
|
|
320
|
+
async buildColumnDictionary(type) {
|
|
176
321
|
const headers = {};
|
|
177
322
|
const stringColumns = [];
|
|
178
323
|
const numberColumns = [];
|
|
@@ -180,95 +325,91 @@ class ReadXlsFile {
|
|
|
180
325
|
const dateColumns = [];
|
|
181
326
|
const dateObject = {};
|
|
182
327
|
const mandatory = [];
|
|
183
|
-
columns.forEach((column) => {
|
|
328
|
+
type.columns.forEach((column) => {
|
|
184
329
|
const header = column.identifier;
|
|
185
330
|
headers[column.name] = header;
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
331
|
+
switch (column.column_type_id) {
|
|
332
|
+
case ColumnType.STRING:
|
|
333
|
+
stringColumns.push(header);
|
|
334
|
+
break;
|
|
335
|
+
case ColumnType.NUMBER:
|
|
336
|
+
numberColumns.push(header);
|
|
337
|
+
break;
|
|
338
|
+
case ColumnType.BOOLEAN:
|
|
339
|
+
booleanColumns.push(header);
|
|
340
|
+
break;
|
|
341
|
+
case ColumnType.DATE:
|
|
342
|
+
case ColumnType.DATETIME:
|
|
343
|
+
dateColumns.push(header);
|
|
344
|
+
dateObject[header] = column;
|
|
345
|
+
break;
|
|
195
346
|
}
|
|
196
|
-
if (column.is_mandatory && column.column_type_id !==
|
|
347
|
+
if (column.is_mandatory && column.column_type_id !== ColumnType.BOOLEAN) {
|
|
197
348
|
mandatory.push(header);
|
|
349
|
+
}
|
|
198
350
|
});
|
|
199
351
|
return { headers, stringColumns, numberColumns, booleanColumns, dateColumns, dateObject, mandatory };
|
|
200
352
|
}
|
|
201
353
|
mapHeader(header, headers) {
|
|
202
|
-
const
|
|
354
|
+
const normalizedHeader = header
|
|
203
355
|
.replace(/[^a-zA-Z0-9_ ]/g, ' ')
|
|
204
356
|
.trim()
|
|
205
357
|
.replace(/\s+/g, '_')
|
|
206
358
|
.trim();
|
|
207
|
-
return headers[
|
|
359
|
+
return headers[normalizedHeader] || normalizedHeader.toLowerCase().replaceAll(' ', '_');
|
|
208
360
|
}
|
|
209
|
-
|
|
210
|
-
if (
|
|
361
|
+
castValue(columns, header, value) {
|
|
362
|
+
if (columns.stringColumns.includes(header)) {
|
|
363
|
+
return this.castToString(value);
|
|
364
|
+
}
|
|
365
|
+
if (columns.numberColumns.includes(header)) {
|
|
366
|
+
return this.castToNumber(value);
|
|
367
|
+
}
|
|
368
|
+
if (columns.booleanColumns.includes(header)) {
|
|
369
|
+
return this.castToBoolean(value);
|
|
370
|
+
}
|
|
371
|
+
if (columns.dateColumns.includes(header)) {
|
|
372
|
+
const column = columns.dateObject[header];
|
|
373
|
+
return this.castToDate(value, column);
|
|
374
|
+
}
|
|
375
|
+
return this.castToString(value);
|
|
376
|
+
}
|
|
377
|
+
castToString(value) {
|
|
378
|
+
if (!value)
|
|
211
379
|
return null;
|
|
212
|
-
return
|
|
380
|
+
return value.replace(new RegExp(`[${this.config.stringExclusions.join('')}]`, 'g'), '').trim();
|
|
213
381
|
}
|
|
214
|
-
|
|
215
|
-
if (!
|
|
382
|
+
castToNumber(value) {
|
|
383
|
+
if (!value)
|
|
216
384
|
return null;
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
return null;
|
|
385
|
+
const cleanedValue = value.replace(/[^a-zA-Z0-9.-]/g, '').trim();
|
|
386
|
+
const parsed = parseFloat(cleanedValue);
|
|
387
|
+
return isNaN(parsed) ? null : parsed;
|
|
221
388
|
}
|
|
222
|
-
|
|
223
|
-
if (!
|
|
224
|
-
return false;
|
|
225
|
-
if (!str.length)
|
|
389
|
+
castToBoolean(value) {
|
|
390
|
+
if (!value || !value.trim())
|
|
226
391
|
return false;
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
392
|
+
const cleanedValue = value
|
|
393
|
+
.replace(/[^a-zA-Z0-9]/g, '')
|
|
394
|
+
.trim()
|
|
395
|
+
.toLowerCase();
|
|
396
|
+
return cleanedValue === 'true';
|
|
231
397
|
}
|
|
232
|
-
|
|
233
|
-
if (!
|
|
398
|
+
castToDate(value, column) {
|
|
399
|
+
if (!value)
|
|
234
400
|
return null;
|
|
235
401
|
try {
|
|
236
|
-
const
|
|
402
|
+
const date = this.parseDate(value, column);
|
|
237
403
|
const offset = column.attributes?.offset || 0;
|
|
238
|
-
return
|
|
404
|
+
return date instanceof Date && isFinite(date.getTime()) ? (0, date_fns_1.subMinutes)(date, offset) : null;
|
|
239
405
|
}
|
|
240
406
|
catch (error) {
|
|
241
407
|
return null;
|
|
242
408
|
}
|
|
243
409
|
}
|
|
244
|
-
|
|
245
|
-
for (const str of mandatory) {
|
|
246
|
-
if (!data[str])
|
|
247
|
-
return true;
|
|
248
|
-
}
|
|
249
|
-
return false;
|
|
250
|
-
}
|
|
251
|
-
getDate(str, column) {
|
|
410
|
+
parseDate(value, column) {
|
|
252
411
|
const format = column.attributes?.format;
|
|
253
|
-
|
|
254
|
-
return new Date(str);
|
|
255
|
-
return (0, date_fns_1.parse)(str, format, new Date());
|
|
256
|
-
}
|
|
257
|
-
async ensureTempDirectory() {
|
|
258
|
-
const appRootPath = (0, path_1.dirname)(require.main.filename);
|
|
259
|
-
const tempDir = (0, path_1.resolve)(appRootPath, '..', 'tmp');
|
|
260
|
-
if (!(0, fs_1.existsSync)(tempDir)) {
|
|
261
|
-
(0, fs_1.mkdirSync)(tempDir);
|
|
262
|
-
}
|
|
263
|
-
return tempDir;
|
|
264
|
-
}
|
|
265
|
-
async setStringExclusions() {
|
|
266
|
-
if (this.stringExclusions)
|
|
267
|
-
return this.stringExclusions;
|
|
268
|
-
const defaultValue = JSON.stringify(["'", '`', '"']);
|
|
269
|
-
const value = await this.propertyService.get('bulk.item.string.exclusions', defaultValue);
|
|
270
|
-
this.stringExclusions = JSON.parse(value);
|
|
271
|
-
return this.stringExclusions;
|
|
412
|
+
return format ? (0, date_fns_1.parse)(value, format, new Date()) : new Date(value);
|
|
272
413
|
}
|
|
273
414
|
}
|
|
274
415
|
exports.ReadXlsFile = ReadXlsFile;
|