@javalabs/prisma-client 1.0.27 → 1.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/.github/CODEOWNERS +1 -1
  2. package/README.md +269 -269
  3. package/migration-config.json +63 -63
  4. package/migration-config.json.bk +95 -95
  5. package/migrations/add_reserved_amount.sql +7 -7
  6. package/package.json +44 -44
  7. package/prisma/migrations/add_uuid_to_transactions.sql +13 -13
  8. package/prisma/schema.prisma +609 -601
  9. package/src/index.ts +23 -23
  10. package/src/prisma-factory.service.ts +40 -40
  11. package/src/prisma.module.ts +9 -9
  12. package/src/prisma.service.ts +16 -16
  13. package/src/scripts/add-uuid-to-table.ts +138 -138
  14. package/src/scripts/create-tenant-schemas.ts +145 -145
  15. package/src/scripts/data-migration/batch-migrator.ts +248 -248
  16. package/src/scripts/data-migration/data-transformer.ts +426 -426
  17. package/src/scripts/data-migration/db-connector.ts +120 -120
  18. package/src/scripts/data-migration/dependency-resolver.ts +174 -174
  19. package/src/scripts/data-migration/entity-discovery.ts +196 -196
  20. package/src/scripts/data-migration/foreign-key-manager.ts +277 -277
  21. package/src/scripts/data-migration/migration-config.json +63 -63
  22. package/src/scripts/data-migration/migration-tool.ts +509 -509
  23. package/src/scripts/data-migration/schema-utils.ts +248 -248
  24. package/src/scripts/data-migration/tenant-migrator.ts +201 -201
  25. package/src/scripts/data-migration/typecast-manager.ts +193 -193
  26. package/src/scripts/data-migration/types.ts +113 -113
  27. package/src/scripts/database-initializer.ts +49 -49
  28. package/src/scripts/drop-database.ts +104 -104
  29. package/src/scripts/dump-source-db.sh +61 -61
  30. package/src/scripts/encrypt-user-passwords.ts +36 -36
  31. package/src/scripts/error-handler.ts +117 -117
  32. package/src/scripts/fix-data-types.ts +241 -241
  33. package/src/scripts/fix-enum-values.ts +357 -357
  34. package/src/scripts/fix-schema-discrepancies.ts +317 -317
  35. package/src/scripts/fix-table-indexes.ts +601 -601
  36. package/src/scripts/migrate-schema-structure.ts +90 -90
  37. package/src/scripts/migrate-uuid.ts +76 -76
  38. package/src/scripts/post-migration-validator.ts +526 -526
  39. package/src/scripts/pre-migration-validator.ts +610 -610
  40. package/src/scripts/reset-database.ts +263 -263
  41. package/src/scripts/retry-failed-migrations.ts +416 -416
  42. package/src/scripts/run-migration.ts +707 -707
  43. package/src/scripts/schema-sync.ts +128 -128
  44. package/src/scripts/sequence-sync-cli.ts +416 -416
  45. package/src/scripts/sequence-synchronizer.ts +127 -127
  46. package/src/scripts/sync-enum-types.ts +170 -170
  47. package/src/scripts/sync-enum-values.ts +563 -563
  48. package/src/scripts/truncate-database.ts +123 -123
  49. package/src/scripts/verify-migration-setup.ts +135 -135
  50. package/tsconfig.json +17 -17
@@ -1,248 +1,248 @@
1
- import { Logger } from "@nestjs/common";
2
- import { SchemaUtils } from "./schema-utils";
3
- import { DataTransformer } from "./data-transformer";
4
- import { TypecastManager } from "./typecast-manager";
5
- import {
6
- ColumnSchema,
7
- DatabaseConnections,
8
- MigrationOptions,
9
- TableConfig,
10
- } from "./types";
11
-
12
- const BATCH_SIZE = 100;
13
- const MAX_RETRIES = 3;
14
- const RETRY_BASE_DELAY = 1000;
15
-
16
- export class BatchMigrator {
17
- private readonly logger = new Logger("BatchMigrator");
18
- private readonly typecastManager: TypecastManager;
19
-
20
- constructor(
21
- private readonly schemaUtils: SchemaUtils,
22
- private readonly dataTransformer: DataTransformer,
23
- private readonly connections: DatabaseConnections,
24
- private readonly options: MigrationOptions,
25
- private readonly providerId?: string | null
26
- ) {
27
- this.typecastManager = new TypecastManager();
28
- }
29
-
30
- async validateSchema(
31
- sourceColumns: ColumnSchema[],
32
- targetColumns: ColumnSchema[]
33
- ): Promise<void> {
34
- for (const targetColumn of targetColumns) {
35
- const sourceColumn = sourceColumns.find(
36
- (col) => col.column_name === targetColumn.column_name
37
- );
38
-
39
- if (!sourceColumn) {
40
- if (targetColumn.is_nullable === "NO") {
41
- throw new Error(
42
- `Required column ${targetColumn.column_name} not found in source schema`
43
- );
44
- }
45
- this.logger.warn(
46
- `Column ${targetColumn.column_name} not found in source schema but is nullable`
47
- );
48
- continue;
49
- }
50
-
51
- if (
52
- !this.typecastManager.areTypesCompatible(
53
- sourceColumn.data_type,
54
- targetColumn.data_type
55
- )
56
- ) {
57
- throw new Error(
58
- `Incompatible data types for column ${targetColumn.column_name}: ` +
59
- `source ${sourceColumn.data_type} -> target ${targetColumn.data_type}`
60
- );
61
- }
62
- }
63
- }
64
-
65
- async migrateEntityDataInBatches(
66
- sourceSchema: string,
67
- targetSchema: string,
68
- tableConfig: TableConfig,
69
- tenantId: string
70
- ): Promise<void> {
71
- const sourceColumns = await this.schemaUtils.getTableColumns(
72
- sourceSchema,
73
- tableConfig.sourceTable
74
- );
75
- const targetColumns = await this.schemaUtils.getTableColumns(
76
- targetSchema,
77
- tableConfig.targetTable
78
- );
79
-
80
- await this.validateSchema(sourceColumns, targetColumns);
81
-
82
- let offset = 0;
83
- let hasMoreRecords = true;
84
- let retryCount = 0;
85
-
86
- while (hasMoreRecords) {
87
- try {
88
- const records = await this.fetchBatch(
89
- sourceSchema,
90
- tableConfig,
91
- offset,
92
- BATCH_SIZE
93
- );
94
-
95
- if (records.length === 0) {
96
- hasMoreRecords = false;
97
- continue;
98
- }
99
-
100
- await this.processBatchWithTransaction(
101
- records,
102
- targetSchema,
103
- tableConfig,
104
- sourceColumns,
105
- targetColumns,
106
- tenantId
107
- );
108
-
109
- offset += BATCH_SIZE;
110
- retryCount = 0; // Reset retry count on success
111
- } catch (error) {
112
- if (retryCount < MAX_RETRIES) {
113
- retryCount++;
114
- const delay = RETRY_BASE_DELAY * Math.pow(2, retryCount - 1);
115
- this.logger.warn(
116
- `Error processing batch, retrying in ${delay}ms (attempt ${retryCount}/${MAX_RETRIES}): ${error.message}`
117
- );
118
- await new Promise((resolve) => setTimeout(resolve, delay));
119
- } else {
120
- throw new Error(
121
- `Failed to process batch after ${MAX_RETRIES} retries: ${error.message}`
122
- );
123
- }
124
- }
125
- }
126
- }
127
-
128
- private async fetchBatch(
129
- sourceSchema: string,
130
- tableConfig: TableConfig,
131
- offset: number,
132
- limit: number
133
- ): Promise<any[]> {
134
- const query = `
135
- SELECT *
136
- FROM "${sourceSchema}"."${tableConfig.sourceTable}"
137
- ${this.buildWhereClause(tableConfig)}
138
- ORDER BY "${tableConfig.idField}"
139
- LIMIT ${limit}
140
- OFFSET ${offset}
141
- `;
142
-
143
- const result = await this.connections.sourcePool.query(query);
144
- return result.rows;
145
- }
146
-
147
- private buildWhereClause(tableConfig: TableConfig): string {
148
- const conditions = [];
149
-
150
- if (this.providerId) {
151
- conditions.push(`"${tableConfig.providerLink}" = '${this.providerId}'`);
152
- }
153
-
154
- if (tableConfig.filterColumn) {
155
- conditions.push(`"${tableConfig.filterColumn}" IS NOT NULL`);
156
- }
157
-
158
- return conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
159
- }
160
-
161
- private async processBatchWithTransaction(
162
- records: any[],
163
- targetSchema: string,
164
- tableConfig: TableConfig,
165
- sourceColumns: ColumnSchema[],
166
- targetColumns: ColumnSchema[],
167
- tenantId: string
168
- ): Promise<void> {
169
- const client = await this.connections.targetPool.connect();
170
- try {
171
- await client.query("BEGIN");
172
-
173
- for (const record of records) {
174
- const transformedRecord = await this.transformRecord(
175
- record,
176
- sourceColumns,
177
- targetColumns,
178
- tenantId
179
- );
180
-
181
- await this.insertRecord(
182
- client,
183
- targetSchema,
184
- tableConfig.targetTable,
185
- transformedRecord
186
- );
187
- }
188
-
189
- await client.query("COMMIT");
190
- } catch (error) {
191
- await client.query("ROLLBACK");
192
- throw error;
193
- } finally {
194
- client.release();
195
- }
196
- }
197
-
198
- private async transformRecord(
199
- record: any,
200
- sourceColumns: ColumnSchema[],
201
- targetColumns: ColumnSchema[],
202
- tenantId: string
203
- ): Promise<any> {
204
- const transformedRecord: any = {};
205
-
206
- for (const targetColumn of targetColumns) {
207
- const sourceColumn = sourceColumns.find(
208
- (col) => col.column_name === targetColumn.column_name
209
- );
210
-
211
- if (!sourceColumn) {
212
- transformedRecord[targetColumn.column_name] = null;
213
- continue;
214
- }
215
-
216
- const value = record[targetColumn.column_name];
217
- transformedRecord[targetColumn.column_name] =
218
- await this.dataTransformer.transformColumnValue(
219
- value,
220
- targetColumn.column_name,
221
- { ...targetColumn, source_type: sourceColumn.data_type },
222
- tenantId
223
- );
224
- }
225
-
226
- return transformedRecord;
227
- }
228
-
229
- private async insertRecord(
230
- client: any,
231
- schema: string,
232
- table: string,
233
- record: any
234
- ): Promise<void> {
235
- const columns = Object.keys(record);
236
- const values = Object.values(record);
237
- const placeholders = values.map((_, i) => `$${i + 1}`).join(", ");
238
-
239
- const query = `
240
- INSERT INTO "${schema}"."${table}"
241
- (${columns.map((col) => `"${col}"`).join(", ")})
242
- VALUES (${placeholders})
243
- ON CONFLICT DO NOTHING
244
- `;
245
-
246
- await client.query(query, values);
247
- }
248
- }
1
+ import { Logger } from "@nestjs/common";
2
+ import { SchemaUtils } from "./schema-utils";
3
+ import { DataTransformer } from "./data-transformer";
4
+ import { TypecastManager } from "./typecast-manager";
5
+ import {
6
+ ColumnSchema,
7
+ DatabaseConnections,
8
+ MigrationOptions,
9
+ TableConfig,
10
+ } from "./types";
11
+
12
+ const BATCH_SIZE = 100;
13
+ const MAX_RETRIES = 3;
14
+ const RETRY_BASE_DELAY = 1000;
15
+
16
+ export class BatchMigrator {
17
+ private readonly logger = new Logger("BatchMigrator");
18
+ private readonly typecastManager: TypecastManager;
19
+
20
+ constructor(
21
+ private readonly schemaUtils: SchemaUtils,
22
+ private readonly dataTransformer: DataTransformer,
23
+ private readonly connections: DatabaseConnections,
24
+ private readonly options: MigrationOptions,
25
+ private readonly providerId?: string | null
26
+ ) {
27
+ this.typecastManager = new TypecastManager();
28
+ }
29
+
30
+ async validateSchema(
31
+ sourceColumns: ColumnSchema[],
32
+ targetColumns: ColumnSchema[]
33
+ ): Promise<void> {
34
+ for (const targetColumn of targetColumns) {
35
+ const sourceColumn = sourceColumns.find(
36
+ (col) => col.column_name === targetColumn.column_name
37
+ );
38
+
39
+ if (!sourceColumn) {
40
+ if (targetColumn.is_nullable === "NO") {
41
+ throw new Error(
42
+ `Required column ${targetColumn.column_name} not found in source schema`
43
+ );
44
+ }
45
+ this.logger.warn(
46
+ `Column ${targetColumn.column_name} not found in source schema but is nullable`
47
+ );
48
+ continue;
49
+ }
50
+
51
+ if (
52
+ !this.typecastManager.areTypesCompatible(
53
+ sourceColumn.data_type,
54
+ targetColumn.data_type
55
+ )
56
+ ) {
57
+ throw new Error(
58
+ `Incompatible data types for column ${targetColumn.column_name}: ` +
59
+ `source ${sourceColumn.data_type} -> target ${targetColumn.data_type}`
60
+ );
61
+ }
62
+ }
63
+ }
64
+
65
+ async migrateEntityDataInBatches(
66
+ sourceSchema: string,
67
+ targetSchema: string,
68
+ tableConfig: TableConfig,
69
+ tenantId: string
70
+ ): Promise<void> {
71
+ const sourceColumns = await this.schemaUtils.getTableColumns(
72
+ sourceSchema,
73
+ tableConfig.sourceTable
74
+ );
75
+ const targetColumns = await this.schemaUtils.getTableColumns(
76
+ targetSchema,
77
+ tableConfig.targetTable
78
+ );
79
+
80
+ await this.validateSchema(sourceColumns, targetColumns);
81
+
82
+ let offset = 0;
83
+ let hasMoreRecords = true;
84
+ let retryCount = 0;
85
+
86
+ while (hasMoreRecords) {
87
+ try {
88
+ const records = await this.fetchBatch(
89
+ sourceSchema,
90
+ tableConfig,
91
+ offset,
92
+ BATCH_SIZE
93
+ );
94
+
95
+ if (records.length === 0) {
96
+ hasMoreRecords = false;
97
+ continue;
98
+ }
99
+
100
+ await this.processBatchWithTransaction(
101
+ records,
102
+ targetSchema,
103
+ tableConfig,
104
+ sourceColumns,
105
+ targetColumns,
106
+ tenantId
107
+ );
108
+
109
+ offset += BATCH_SIZE;
110
+ retryCount = 0; // Reset retry count on success
111
+ } catch (error) {
112
+ if (retryCount < MAX_RETRIES) {
113
+ retryCount++;
114
+ const delay = RETRY_BASE_DELAY * Math.pow(2, retryCount - 1);
115
+ this.logger.warn(
116
+ `Error processing batch, retrying in ${delay}ms (attempt ${retryCount}/${MAX_RETRIES}): ${error.message}`
117
+ );
118
+ await new Promise((resolve) => setTimeout(resolve, delay));
119
+ } else {
120
+ throw new Error(
121
+ `Failed to process batch after ${MAX_RETRIES} retries: ${error.message}`
122
+ );
123
+ }
124
+ }
125
+ }
126
+ }
127
+
128
+ private async fetchBatch(
129
+ sourceSchema: string,
130
+ tableConfig: TableConfig,
131
+ offset: number,
132
+ limit: number
133
+ ): Promise<any[]> {
134
+ const query = `
135
+ SELECT *
136
+ FROM "${sourceSchema}"."${tableConfig.sourceTable}"
137
+ ${this.buildWhereClause(tableConfig)}
138
+ ORDER BY "${tableConfig.idField}"
139
+ LIMIT ${limit}
140
+ OFFSET ${offset}
141
+ `;
142
+
143
+ const result = await this.connections.sourcePool.query(query);
144
+ return result.rows;
145
+ }
146
+
147
+ private buildWhereClause(tableConfig: TableConfig): string {
148
+ const conditions = [];
149
+
150
+ if (this.providerId) {
151
+ conditions.push(`"${tableConfig.providerLink}" = '${this.providerId}'`);
152
+ }
153
+
154
+ if (tableConfig.filterColumn) {
155
+ conditions.push(`"${tableConfig.filterColumn}" IS NOT NULL`);
156
+ }
157
+
158
+ return conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
159
+ }
160
+
161
+ private async processBatchWithTransaction(
162
+ records: any[],
163
+ targetSchema: string,
164
+ tableConfig: TableConfig,
165
+ sourceColumns: ColumnSchema[],
166
+ targetColumns: ColumnSchema[],
167
+ tenantId: string
168
+ ): Promise<void> {
169
+ const client = await this.connections.targetPool.connect();
170
+ try {
171
+ await client.query("BEGIN");
172
+
173
+ for (const record of records) {
174
+ const transformedRecord = await this.transformRecord(
175
+ record,
176
+ sourceColumns,
177
+ targetColumns,
178
+ tenantId
179
+ );
180
+
181
+ await this.insertRecord(
182
+ client,
183
+ targetSchema,
184
+ tableConfig.targetTable,
185
+ transformedRecord
186
+ );
187
+ }
188
+
189
+ await client.query("COMMIT");
190
+ } catch (error) {
191
+ await client.query("ROLLBACK");
192
+ throw error;
193
+ } finally {
194
+ client.release();
195
+ }
196
+ }
197
+
198
+ private async transformRecord(
199
+ record: any,
200
+ sourceColumns: ColumnSchema[],
201
+ targetColumns: ColumnSchema[],
202
+ tenantId: string
203
+ ): Promise<any> {
204
+ const transformedRecord: any = {};
205
+
206
+ for (const targetColumn of targetColumns) {
207
+ const sourceColumn = sourceColumns.find(
208
+ (col) => col.column_name === targetColumn.column_name
209
+ );
210
+
211
+ if (!sourceColumn) {
212
+ transformedRecord[targetColumn.column_name] = null;
213
+ continue;
214
+ }
215
+
216
+ const value = record[targetColumn.column_name];
217
+ transformedRecord[targetColumn.column_name] =
218
+ await this.dataTransformer.transformColumnValue(
219
+ value,
220
+ targetColumn.column_name,
221
+ { ...targetColumn, source_type: sourceColumn.data_type },
222
+ tenantId
223
+ );
224
+ }
225
+
226
+ return transformedRecord;
227
+ }
228
+
229
+ private async insertRecord(
230
+ client: any,
231
+ schema: string,
232
+ table: string,
233
+ record: any
234
+ ): Promise<void> {
235
+ const columns = Object.keys(record);
236
+ const values = Object.values(record);
237
+ const placeholders = values.map((_, i) => `$${i + 1}`).join(", ");
238
+
239
+ const query = `
240
+ INSERT INTO "${schema}"."${table}"
241
+ (${columns.map((col) => `"${col}"`).join(", ")})
242
+ VALUES (${placeholders})
243
+ ON CONFLICT DO NOTHING
244
+ `;
245
+
246
+ await client.query(query, values);
247
+ }
248
+ }