@solidstarters/solid-core 1.2.89 → 1.2.91

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/dist/controllers/import-transaction-error-log.controller.d.ts +41 -0
  2. package/dist/controllers/import-transaction-error-log.controller.d.ts.map +1 -0
  3. package/dist/controllers/import-transaction-error-log.controller.js +179 -0
  4. package/dist/controllers/import-transaction-error-log.controller.js.map +1 -0
  5. package/dist/controllers/import-transaction.controller.d.ts +46 -0
  6. package/dist/controllers/import-transaction.controller.d.ts.map +1 -0
  7. package/dist/controllers/import-transaction.controller.js +236 -0
  8. package/dist/controllers/import-transaction.controller.js.map +1 -0
  9. package/dist/dtos/create-import-transaction-error-log.dto.d.ts +10 -0
  10. package/dist/dtos/create-import-transaction-error-log.dto.d.ts.map +1 -0
  11. package/dist/dtos/create-import-transaction-error-log.dto.js +65 -0
  12. package/dist/dtos/create-import-transaction-error-log.dto.js.map +1 -0
  13. package/dist/dtos/create-import-transaction.dto.d.ts +14 -0
  14. package/dist/dtos/create-import-transaction.dto.d.ts.map +1 -0
  15. package/dist/dtos/create-import-transaction.dto.js +90 -0
  16. package/dist/dtos/create-import-transaction.dto.js.map +1 -0
  17. package/dist/dtos/import-instructions.dto.d.ts +19 -0
  18. package/dist/dtos/import-instructions.dto.d.ts.map +1 -0
  19. package/dist/dtos/import-instructions.dto.js +110 -0
  20. package/dist/dtos/import-instructions.dto.js.map +1 -0
  21. package/dist/dtos/update-import-transaction-error-log.dto.d.ts +11 -0
  22. package/dist/dtos/update-import-transaction-error-log.dto.d.ts.map +1 -0
  23. package/dist/dtos/update-import-transaction-error-log.dto.js +72 -0
  24. package/dist/dtos/update-import-transaction-error-log.dto.js.map +1 -0
  25. package/dist/dtos/update-import-transaction.dto.d.ts +15 -0
  26. package/dist/dtos/update-import-transaction.dto.d.ts.map +1 -0
  27. package/dist/dtos/update-import-transaction.dto.js +91 -0
  28. package/dist/dtos/update-import-transaction.dto.js.map +1 -0
  29. package/dist/entities/import-transaction-error-log.entity.d.ts +11 -0
  30. package/dist/entities/import-transaction-error-log.entity.d.ts.map +1 -0
  31. package/dist/entities/import-transaction-error-log.entity.js +53 -0
  32. package/dist/entities/import-transaction-error-log.entity.js.map +1 -0
  33. package/dist/entities/import-transaction.entity.d.ts +11 -0
  34. package/dist/entities/import-transaction.entity.d.ts.map +1 -0
  35. package/dist/entities/import-transaction.entity.js +55 -0
  36. package/dist/entities/import-transaction.entity.js.map +1 -0
  37. package/dist/index.d.ts +2 -0
  38. package/dist/index.d.ts.map +1 -1
  39. package/dist/index.js +2 -0
  40. package/dist/index.js.map +1 -1
  41. package/dist/seeders/seed-data/solid-core-metadata.json +316 -0
  42. package/dist/services/csv.service.d.ts +11 -1
  43. package/dist/services/csv.service.d.ts.map +1 -1
  44. package/dist/services/csv.service.js +72 -5
  45. package/dist/services/csv.service.js.map +1 -1
  46. package/dist/services/excel.service.d.ts +11 -1
  47. package/dist/services/excel.service.d.ts.map +1 -1
  48. package/dist/services/excel.service.js +70 -2
  49. package/dist/services/excel.service.js.map +1 -1
  50. package/dist/services/import-transaction-error-log.service.d.ts +22 -0
  51. package/dist/services/import-transaction-error-log.service.d.ts.map +1 -0
  52. package/dist/services/import-transaction-error-log.service.js +56 -0
  53. package/dist/services/import-transaction-error-log.service.js.map +1 -0
  54. package/dist/services/import-transaction.service.d.ts +64 -0
  55. package/dist/services/import-transaction.service.d.ts.map +1 -0
  56. package/dist/services/import-transaction.service.js +231 -0
  57. package/dist/services/import-transaction.service.js.map +1 -0
  58. package/dist/solid-core.module.d.ts.map +1 -1
  59. package/dist/solid-core.module.js +13 -1
  60. package/dist/solid-core.module.js.map +1 -1
  61. package/dist/tsconfig.tsbuildinfo +1 -1
  62. package/package.json +1 -1
  63. package/src/controllers/import-transaction-error-log.controller.ts +93 -0
  64. package/src/controllers/import-transaction.controller.ts +128 -0
  65. package/src/dtos/create-import-transaction-error-log.dto.ts +34 -0
  66. package/src/dtos/create-import-transaction.dto.ts +50 -0
  67. package/src/dtos/import-instructions.dto.ts +66 -0
  68. package/src/dtos/update-import-transaction-error-log.dto.ts +39 -0
  69. package/src/dtos/update-import-transaction.dto.ts +52 -0
  70. package/src/entities/import-transaction-error-log.entity.ts +22 -0
  71. package/src/entities/import-transaction.entity.ts +22 -0
  72. package/src/index.ts +2 -0
  73. package/src/seeders/seed-data/solid-core-metadata.json +318 -2
  74. package/src/services/csv.service.ts +116 -7
  75. package/src/services/excel.service.ts +111 -55
  76. package/src/services/export_issues.txt +5 -1
  77. package/src/services/import-transaction-error-log.service.ts +34 -0
  78. package/src/services/import-transaction.service.ts +281 -0
  79. package/src/solid-core.module.ts +13 -1
@@ -3414,7 +3414,7 @@
3414
3414
  "ormType": "varchar",
3415
3415
  "length": 256,
3416
3416
  "required": true,
3417
- "unique": true,
3417
+ "unique": true,
3418
3418
  "index": true,
3419
3419
  "private": false,
3420
3420
  "encrypt": false,
@@ -3506,7 +3506,7 @@
3506
3506
  {
3507
3507
  "name": "exportTransactionId",
3508
3508
  "displayName": "Transaction Id",
3509
- "type":"computed",
3509
+ "type": "computed",
3510
3510
  "ormType": "varchar",
3511
3511
  "length": 128,
3512
3512
  "required": true,
@@ -3577,6 +3577,201 @@
3577
3577
  "isSystem": true
3578
3578
  }
3579
3579
  ]
3580
+ },
3581
+ {
3582
+ "singularName": "importTransaction",
3583
+ "tableName": "ss_import_transaction",
3584
+ "pluralName": "importTransactions",
3585
+ "displayName": "Import Transactions",
3586
+ "description": "Model to capture all information related to a file import transaction",
3587
+ "dataSource": "default",
3588
+ "dataSourceType": "postgres",
3589
+ "isSystem": true,
3590
+ "userKeyFieldUserKey": "importTransactionId",
3591
+ "fields": [
3592
+ {
3593
+ "name": "status",
3594
+ "displayName": "Status",
3595
+ "type": "selectionStatic",
3596
+ "ormType": "varchar",
3597
+ "length": 25,
3598
+ "required": false,
3599
+ "index": true,
3600
+ "isSystem": true,
3601
+ "selectionValueType": "string",
3602
+ "selectionStaticValues": [
3603
+ "draft:draft",
3604
+ "mapping_created:mapping_created",
3605
+ "import_started:import_started",
3606
+ "import_succeeded:import_succeeded",
3607
+ "import_failed:import_failed"
3608
+ ],
3609
+ "defaultValue": "draft"
3610
+ },
3611
+ {
3612
+ "name": "importTransactionId",
3613
+ "displayName": "Transaction Id",
3614
+ "type": "shortText",
3615
+ "ormType": "varchar",
3616
+ "length": 128,
3617
+ "required": false,
3618
+ "unique": true,
3619
+ "index": true,
3620
+ "private": false,
3621
+ "encrypt": false,
3622
+ "isSystem": true
3623
+ },
3624
+ {
3625
+ "name": "fileLocation",
3626
+ "displayName": "File Location",
3627
+ "type": "mediaSingle",
3628
+ "required": true,
3629
+ "unique": true,
3630
+ "index": false,
3631
+ "private": false,
3632
+ "encrypt": false,
3633
+ "isSystem": true,
3634
+ "mediaStorageProviderUserKey": "default-filesystem"
3635
+ },
3636
+ {
3637
+ "name": "mapping",
3638
+ "displayName": "Mapping",
3639
+ "type": "json",
3640
+ "ormType": "text",
3641
+ "required": false,
3642
+ "unique": false,
3643
+ "index": false,
3644
+ "private": false,
3645
+ "encrypt": false,
3646
+ "isSystem": true
3647
+ },
3648
+ {
3649
+ "name": "modelMetadata",
3650
+ "displayName": "Related Model Metadata",
3651
+ "type": "relation",
3652
+ "ormType": "int",
3653
+ "required": false,
3654
+ "unique": false,
3655
+ "index": true,
3656
+ "private": false,
3657
+ "encrypt": false,
3658
+ "relationType": "many-to-one",
3659
+ "relationCoModelSingularName": "modelMetadata",
3660
+ "relationCreateInverse": false,
3661
+ "relationCascade": "cascade",
3662
+ "relationModelModuleName": "solid-core",
3663
+ "isSystem": true
3664
+ },
3665
+ {
3666
+ "name": "importTransactionErrorLog",
3667
+ "displayName": "Related Import Transaction Error Log",
3668
+ "type": "relation",
3669
+ "required": false,
3670
+ "unique": false,
3671
+ "index": true,
3672
+ "private": false,
3673
+ "encrypt": false,
3674
+ "relationType": "one-to-many",
3675
+ "relationCoModelFieldName": "importTransaction",
3676
+ "relationCreateInverse": true,
3677
+ "relationCoModelSingularName": "importTransactionErrorLog",
3678
+ "relationCoModelColumnName": "",
3679
+ "relationModelModuleName": "solid-core"
3680
+ }
3681
+ ]
3682
+ },
3683
+ {
3684
+ "singularName": "importTransactionErrorLog",
3685
+ "tableName": "ss_import_transaction_error_log",
3686
+ "pluralName": "importTransactionErrorLogs",
3687
+ "displayName": "Import Transaction Error Logs",
3688
+ "description": "Model to capture all information related to a file import transaction error log",
3689
+ "dataSource": "default",
3690
+ "dataSourceType": "postgres",
3691
+ "isSystem": true,
3692
+ "userKeyFieldUserKey": "importTransactionErrorLogId",
3693
+ "fields": [
3694
+ {
3695
+ "name": "importTransactionErrorLogId",
3696
+ "displayName": "Import Transaction Error Log Id",
3697
+ "type": "shortText",
3698
+ "ormType": "varchar",
3699
+ "length": 256,
3700
+ "required": true,
3701
+ "unique": true,
3702
+ "index": true,
3703
+ "private": false,
3704
+ "encrypt": false,
3705
+ "isSystem": true
3706
+ },
3707
+ {
3708
+ "name": "rowNumber",
3709
+ "displayName": "Row Number",
3710
+ "type": "int",
3711
+ "ormType": "integer",
3712
+ "required": true,
3713
+ "unique": false,
3714
+ "index": false,
3715
+ "private": false,
3716
+ "encrypt": false,
3717
+ "isSystem": true
3718
+ },
3719
+ {
3720
+ "name": "rowData",
3721
+ "displayName": "Row Data",
3722
+ "type": "json",
3723
+ "ormType": "text",
3724
+ "required": false,
3725
+ "unique": false,
3726
+ "index": false,
3727
+ "private": false,
3728
+ "encrypt": false,
3729
+ "isSystem": true
3730
+ },
3731
+ {
3732
+ "name": "importTransaction",
3733
+ "displayName": "Related Import Transaction",
3734
+ "type": "relation",
3735
+ "ormType": "int",
3736
+ "required": true,
3737
+ "unique": false,
3738
+ "index": true,
3739
+ "private": false,
3740
+ "encrypt": false,
3741
+ "relationType": "many-to-one",
3742
+ "relationCoModelFieldName": "importTransactionErrorLogs",
3743
+ "relationCreateInverse": true,
3744
+ "relationCoModelSingularName": "importTransaction",
3745
+ "relationCoModelColumnName": "",
3746
+ "relationModelModuleName": "solid-core",
3747
+ "relationCascade": "cascade"
3748
+ },
3749
+ {
3750
+ "name": "errorMessage",
3751
+ "displayName": "Error Message",
3752
+ "type": "shortText",
3753
+ "ormType": "varchar",
3754
+ "length": 512,
3755
+ "required": true,
3756
+ "unique": false,
3757
+ "index": false,
3758
+ "private": false,
3759
+ "encrypt": false,
3760
+ "isSystem": true
3761
+ },
3762
+ {
3763
+ "name": "errorTrace",
3764
+ "displayName": "Error Trace",
3765
+ "type": "longText",
3766
+ "ormType": "text",
3767
+ "required": false,
3768
+ "unique": false,
3769
+ "index": false,
3770
+ "private": false,
3771
+ "encrypt": false,
3772
+ "isSystem": true
3773
+ }
3774
+ ]
3580
3775
  }
3581
3776
  ]
3582
3777
  },
@@ -3915,6 +4110,19 @@
3915
4110
  "viewUserKey": "listOfValues-list-view",
3916
4111
  "moduleUserKey": "solid-core",
3917
4112
  "modelUserKey": "listOfValues"
4113
+ },
4114
+ {
4115
+ "displayName": "Import Transactions List View",
4116
+ "name": "importTransaction-list-view",
4117
+ "type": "solid",
4118
+ "domain": "",
4119
+ "context": "",
4120
+ "customComponent": "/admin/address-master/importTransaction/all",
4121
+ "customIsModal": true,
4122
+ "serverEndpoint": "",
4123
+ "viewUserKey": "importTransaction-list-view",
4124
+ "moduleUserKey": "solid-core",
4125
+ "modelUserKey": "importTransaction"
3918
4126
  }
3919
4127
  ],
3920
4128
  "menus": [
@@ -4149,6 +4357,14 @@
4149
4357
  "actionUserKey": "chatter-message-details-list-action",
4150
4358
  "moduleUserKey": "solid-core",
4151
4359
  "parentMenuItemUserKey": "other-menu-item"
4360
+ },
4361
+ {
4362
+ "displayName": "Import Transactions",
4363
+ "name": "importTransaction-menu-item",
4364
+ "sequenceNumber": 1,
4365
+ "actionUserKey": "importTransaction-list-view",
4366
+ "moduleUserKey": "solid-core",
4367
+ "parentMenuItemUserKey": ""
4152
4368
  }
4153
4369
  ],
4154
4370
  "views": [
@@ -8750,6 +8966,106 @@
8750
8966
  }
8751
8967
  ]
8752
8968
  }
8969
+ },
8970
+ {
8971
+ "name": "importTransaction-list-view",
8972
+ "displayName": "Import Transactions",
8973
+ "type": "list",
8974
+ "context": "{}",
8975
+ "moduleUserKey": "solid-core",
8976
+ "modelUserKey": "importTransaction",
8977
+ "layout": {
8978
+ "type": "list",
8979
+ "attrs": {
8980
+ "pagination": true,
8981
+ "pageSizeOptions": [
8982
+ 10,
8983
+ 25,
8984
+ 50
8985
+ ],
8986
+ "enableGlobalSearch": true,
8987
+ "create": true,
8988
+ "edit": true,
8989
+ "delete": true
8990
+ },
8991
+ "children": [
8992
+ {
8993
+ "type": "field",
8994
+ "attrs": {
8995
+ "name": "id",
8996
+ "sortable": true,
8997
+ "filterable": true
8998
+ }
8999
+ },
9000
+ {
9001
+ "type": "field",
9002
+ "attrs": {
9003
+ "name": "importTransactionErrorLog",
9004
+ "sortable": true,
9005
+ "filterable": true
9006
+ }
9007
+ }
9008
+ ]
9009
+ }
9010
+ },
9011
+ {
9012
+ "name": "importTransaction-form-view",
9013
+ "displayName": "Import Transactions",
9014
+ "type": "form",
9015
+ "context": "{}",
9016
+ "moduleUserKey": "solid-core",
9017
+ "modelUserKey": "importTransaction",
9018
+ "layout": {
9019
+ "type": "form",
9020
+ "attrs": {
9021
+ "name": "form-1",
9022
+ "label": "Import Transactions",
9023
+ "className": "grid"
9024
+ },
9025
+ "children": [
9026
+ {
9027
+ "type": "sheet",
9028
+ "attrs": {
9029
+ "name": "sheet-1"
9030
+ },
9031
+ "children": [
9032
+ {
9033
+ "type": "row",
9034
+ "attrs": {
9035
+ "name": "sheet-1"
9036
+ },
9037
+ "children": [
9038
+ {
9039
+ "type": "column",
9040
+ "attrs": {
9041
+ "name": "group-1",
9042
+ "label": "",
9043
+ "className": "col-6"
9044
+ },
9045
+ "children": [
9046
+ {
9047
+ "type": "field",
9048
+ "attrs": {
9049
+ "name": "importTransactionErrorLog"
9050
+ }
9051
+ }
9052
+ ]
9053
+ },
9054
+ {
9055
+ "type": "column",
9056
+ "attrs": {
9057
+ "name": "group-2",
9058
+ "label": "",
9059
+ "className": "col-6"
9060
+ },
9061
+ "children": []
9062
+ }
9063
+ ]
9064
+ }
9065
+ ]
9066
+ }
9067
+ ]
9068
+ }
8753
9069
  }
8754
9070
  ],
8755
9071
  "emailTemplates": [
@@ -1,22 +1,59 @@
1
- import { PassThrough, Readable } from 'stream';
2
- import { format } from 'fast-csv';
3
1
  import { Injectable, Logger } from '@nestjs/common';
2
+ import { format, parse } from 'fast-csv';
3
+ import { PassThrough, Readable } from 'stream';
4
+
5
+ export interface CsvReadOptions {
6
+ pageSize?: number; // Number of records per page
7
+ hasHeaderRow?: boolean;
8
+ providedHeaders?: string[]
9
+ };
10
+ const DEFAULT_PAGE_SIZE = 100; // Default page size if not provided
11
+ export interface CsvReadResult {
12
+ headers: string[]; // Headers of the CSV file
13
+ data: Record<string, any>[]; // Data records in the current page
14
+ }
4
15
 
5
16
  @Injectable()
6
17
  export class CsvService {
7
18
  private logger = new Logger(CsvService.name);
8
19
  public async createCsvStream(
9
- getDataRecords: (chunkIndex: number, chunkSize: number) => Promise<any[]>,
10
- chunkSize: number
20
+ getDataRecords: (chunkIndex: number, chunkSize: number) => Promise<any[]> = null,
21
+ chunkSize: number = 100,
22
+ headers: string[] = []
11
23
  ): Promise<Readable> {
24
+ // Validations
25
+ // If neither headers nor data records function is provided, throw an error
26
+ if (headers.length === 0 && typeof getDataRecords !== 'function') {
27
+ throw new Error('Either headers or data records function must be provided.');
28
+ }
29
+
30
+ // If data records function is provided, chunkSize must be greater than 0
31
+ if (getDataRecords && chunkSize <= 0) {
32
+ throw new Error('Chunk size must be greater than 0 when data records function is provided.');
33
+ }
34
+
12
35
  const passThrough = new PassThrough(); // ✅ Create a streaming pipe
13
- const csvStream = format({ headers: true });
36
+ const csvStream = headers.length
37
+ ? format({ headers })
38
+ : format({ headers: true });
14
39
 
15
40
  csvStream.pipe(passThrough); // ✅ Pipe CSV output to PassThrough stream
16
41
 
17
- let chunkIndex = 0;
18
-
19
42
  try {
43
+ // 🧠 If no data retrieval logic is provided, just write headers and close
44
+ if (typeof getDataRecords !== 'function') {
45
+
46
+ const dummyRow = headers.reduce((acc, header) => {
47
+ acc[header] = '';
48
+ return acc;
49
+ }, {} as Record<string, string>);
50
+ csvStream.write(dummyRow);
51
+ csvStream.end();
52
+ return passThrough;
53
+ }
54
+
55
+ // Write the data records in chunks
56
+ let chunkIndex = 0;
20
57
  while (true) {
21
58
  const records = await getDataRecords(chunkIndex, chunkSize); // ✅ Fetch chunked data
22
59
  if (records.length === 0) break; // ✅ Stop if no more records
@@ -38,4 +75,76 @@ export class CsvService {
38
75
 
39
76
  return passThrough; // ✅ Return the streaming response
40
77
  }
78
+
79
+ public async *readCsvInPagesFromStream(
80
+ stream: Readable,
81
+ options?: CsvReadOptions
82
+ ): AsyncGenerator<CsvReadResult> {
83
+ const { pageSize = DEFAULT_PAGE_SIZE, hasHeaderRow = true, providedHeaders = [] } = options || {};
84
+ let headers: string[] = [];
85
+ let page: Record<string, any>[] = [];
86
+ let isFirstRow = true;
87
+ let hasYieldedData = false;
88
+
89
+ // Create parser
90
+ const parser = parse({ headers: hasHeaderRow, renameHeaders: false, trim: true });
91
+
92
+ // Pipe the input stream into the parser
93
+ const parsingStream = stream.pipe(parser);
94
+
95
+ for await (const row of parsingStream) {
96
+ if (isFirstRow && !hasHeaderRow) {
97
+ isFirstRow = false;
98
+
99
+ if (providedHeaders.length) {
100
+ headers = providedHeaders;
101
+ } else {
102
+ // If no header row and no provided headers, generate index-based headers
103
+ headers = Object.keys(row).length > 0 ? Object.keys(row).map((_, i) => i.toString()) : [];
104
+ }
105
+ }
106
+
107
+ // If hasHeaderRow = true, fast-csv already assigns keys as headers, so capture once
108
+ if (hasHeaderRow && isFirstRow) {
109
+ headers = Object.keys(row);
110
+ isFirstRow = false;
111
+ }
112
+
113
+ // When headers are not set yet (edge case), set them now
114
+ if (!headers.length) {
115
+ headers = providedHeaders.length ? providedHeaders : Object.keys(row);
116
+ }
117
+
118
+ // Map row fields to headers - if keys mismatch, fallback to index-based mapping
119
+ const record: Record<string, any> = {};
120
+ for (let i = 0; i < headers.length; i++) {
121
+ // For safety, access by header name or fallback by index
122
+ const key = headers[i];
123
+ const value = row[key] ?? Object.values(row)[i] ?? null;
124
+ record[key] = value;
125
+ }
126
+
127
+ // Skip empty rows
128
+ if (Object.values(record).every(v => v === null || v === '')) continue;
129
+
130
+ page.push(record);
131
+
132
+ if (page.length === pageSize) {
133
+ yield { headers, data: page };
134
+ hasYieldedData = true;
135
+ page = [];
136
+ }
137
+ }
138
+
139
+ if (page.length > 0) {
140
+ yield { headers, data: page };
141
+ hasYieldedData = true;
142
+ }
143
+
144
+ // If only headers present but no data, yield headers with empty data array
145
+ if (!hasYieldedData && headers.length > 0) {
146
+ yield { headers, data: [] };
147
+ }
148
+ }
149
+
41
150
  }
@@ -3,79 +3,72 @@ import * as ExcelJS from 'exceljs';
3
3
  import { PassThrough, Readable } from 'stream';
4
4
 
5
5
 
6
+ export interface ExcelReadOptions {
7
+ pageSize?: number; // Number of records per page
8
+ hasHeaderRow?: boolean; // Whether the first row contains headers
9
+ providedHeaders?: string[]; // Custom headers if hasHeaderRow is false
10
+ }
11
+
12
+ const DEFAULT_PAGE_SIZE = 100; // Default page size if not provided
13
+
14
+ export interface ExcelReadResult {
15
+ headers: string[]; // Headers of the Excel file
16
+ data: Record<string, any>[]; // Data records in the current page
17
+ }
18
+
6
19
  @Injectable()
7
20
  export class ExcelService {
8
21
  private logger = new Logger(ExcelService.name);
9
- // Sample JSON data
10
- // const jsonData = [
11
- // { id: 1, name: 'John Doe', age: 25, email: 'john@example.com' },
12
- // { id: 2, name: 'Jane Doe', age: 28, email: 'jane@example.com' },
13
- // { id: 3, name: 'Alice Smith', age: 30, email: 'alice@example.com' }
14
- // ];
15
-
16
- // public async createExcelFromJson(data: any[], fileName: string) {
17
- // const workbook = new ExcelJS.Workbook();
18
- // const worksheet = workbook.addWorksheet('Data');
19
-
20
- // // Define Columns (Header)
21
- // worksheet.columns = Object.keys(data[0]).map((key) => ({
22
- // header: key.toUpperCase(), // Convert header names to uppercase
23
- // key: key,
24
- // width: 20, // Set column width
25
- // }));
26
-
27
- // // Add Data Rows
28
- // data.forEach((item) => {
29
- // worksheet.addRow(item);
30
- // });
31
-
32
- // // Apply basic formatting
33
- // worksheet.getRow(1).font = { bold: true }; // Make headers bold
34
-
35
- // // Save file
36
- // await workbook.xlsx.writeFile(fileName);
37
- // this.logger.log(`✅ Excel file "${fileName}" created successfully!`);
38
- // // console.log(`✅ Excel file "${fileName}" created successfully!`);
39
- // }
40
-
41
- // public async createExcelStreamFromJson(data: any[]): Promise<Readable> {
42
- // const passThrough = new PassThrough(); // Stream to pipe data
43
- // const workbook = new ExcelJS.stream.xlsx.WorkbookWriter({ stream: passThrough });
44
- // const worksheet = workbook.addWorksheet('Data');
45
- // worksheet.columns = Object.keys(data[0]).map((key) => ({
46
- // header: key.toUpperCase(),
47
- // key: key,
48
- // width: 20,
49
- // }));
50
-
51
- // data.forEach((item) => {
52
- // worksheet.addRow(item);
53
- // });
54
-
55
- // worksheet.getRow(1).font = { bold: true };
56
-
57
- // await workbook.commit();
58
- // return passThrough;
59
- // }
60
-
61
22
 
62
23
  public async createExcelStream(
63
24
  getDataRecords: (chunkIndex: number, chunkSize: number) => Promise<any[]>,
64
- chunkSize: number
25
+ chunkSize: number = 100,
26
+ headers: string[] = []
65
27
  ): Promise<Readable> {
28
+ // Validations
29
+ // If neither headers nor data records function is provided, throw an error
30
+ if (headers.length === 0 && typeof getDataRecords !== 'function') {
31
+ throw new Error('Either headers or data records function must be provided.');
32
+ }
33
+
34
+ // If data records function is provided, chunkSize must be greater than 0
35
+ if (getDataRecords && chunkSize <= 0) {
36
+ throw new Error('Chunk size must be greater than 0 when data records function is provided.');
37
+ }
38
+
66
39
  const passThrough = new PassThrough(); // Create streaming pipe
67
40
  try {
68
41
  const workbook = new ExcelJS.stream.xlsx.WorkbookWriter({ stream: passThrough });
69
42
  const worksheet = workbook.addWorksheet('Data');
70
43
 
71
- let chunkIndex = 0;
44
+ // If headers are provided, use them;
72
45
  let isHeaderWritten = false;
46
+ if (headers.length > 0) {
47
+ worksheet.columns = headers.map((header) => ({
48
+ header: header, // Convert header names to uppercase
49
+ key: header,
50
+ width: 20, // Set column width
51
+ }));
52
+ isHeaderWritten = true; // Mark headers as written
53
+ }
73
54
 
55
+ // ✅ If no data loader provided, write only headers and finish
56
+ if (typeof getDataRecords !== 'function') {
57
+ // worksheet.addRow(
58
+ // headers.reduce((acc, header) => ({ ...acc, [header]: '' }), {})
59
+ // ).commit(); // Write a dummy record with headers
60
+
61
+ await workbook.commit();
62
+ return passThrough;
63
+ }
64
+
65
+ // Write the data records in chunks
66
+ let chunkIndex = 0;
74
67
  while (true) {
75
68
  const records = await getDataRecords(chunkIndex, chunkSize); // Fetch chunked data
76
69
  if (records.length === 0) break; // Stop if no more records
77
70
 
78
- if (!isHeaderWritten) {
71
+ if (!isHeaderWritten) { // Falback because without columns being set, ExcelJS won't write data correctly
79
72
  worksheet.columns = Object.keys(records[0]).map((key) => ({
80
73
  header: key.toUpperCase(),
81
74
  key: key,
@@ -102,4 +95,67 @@ export class ExcelService {
102
95
  return passThrough; // Return streaming response
103
96
  }
104
97
 
98
+ public async *readExcelInPagesFromStream(
99
+ stream: Readable,
100
+ options?: ExcelReadOptions
101
+ ): AsyncGenerator<ExcelReadResult> {
102
+ const { pageSize = DEFAULT_PAGE_SIZE, hasHeaderRow = true, providedHeaders = [] } = options || {};
103
+ const workbookReader = new ExcelJS.stream.xlsx.WorkbookReader(stream, {});
104
+
105
+ let headers: string[] = [];
106
+ let page: Record<string, any>[] = [];
107
+ let isFirstRow = true;
108
+ let hasYieldedData = false;
109
+
110
+ for await (const worksheet of workbookReader) {
111
+ for await (const row of worksheet) {
112
+ const values = Array.isArray(row.values) ? row.values.slice(1) : [];
113
+
114
+ if (isFirstRow) {
115
+ isFirstRow = false;
116
+
117
+ if (hasHeaderRow) {
118
+ headers = values.map(v => v?.toString().trim() || '');
119
+ continue;
120
+ } else if (providedHeaders.length) {
121
+ headers = providedHeaders;
122
+ } else {
123
+ headers = values.map((_, idx) => `${idx}`);
124
+ }
125
+ }
126
+
127
+ while (values.length < headers.length) values.push(null);
128
+ if (values.length > headers.length) values.length = headers.length;
129
+
130
+ const record = headers.reduce((acc, key, i) => {
131
+ acc[key] = values[i] ?? null;
132
+ return acc;
133
+ }, {} as Record<string, any>);
134
+
135
+ if (Object.values(record).every(v => v === null || v === '')) continue;
136
+
137
+ page.push(record);
138
+
139
+ if (page.length === pageSize) {
140
+ yield { headers, data: page };
141
+ hasYieldedData = true;
142
+ page = [];
143
+ }
144
+ }
145
+
146
+ // Optional: break if only processing first worksheet
147
+ // break;
148
+ }
149
+
150
+ if (page.length > 0) {
151
+ yield { headers, data: page };
152
+ hasYieldedData = true;
153
+ }
154
+
155
+ // ✅ Yield headers with empty data if only headers were found
156
+ if (!hasYieldedData && headers.length > 0) {
157
+ yield { headers, data: [] };
158
+ }
159
+ }
160
+
105
161
  }