@javalabs/prisma-client 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. package/README.md +220 -0
  2. package/dist/index.d.ts +7 -0
  3. package/dist/index.js +34 -0
  4. package/dist/index.js.map +1 -0
  5. package/dist/prisma-factory.service.d.ts +9 -0
  6. package/dist/prisma-factory.service.js +47 -0
  7. package/dist/prisma-factory.service.js.map +1 -0
  8. package/dist/prisma.module.d.ts +2 -0
  9. package/dist/prisma.module.js +23 -0
  10. package/dist/prisma.module.js.map +1 -0
  11. package/dist/prisma.service.d.ts +6 -0
  12. package/dist/prisma.service.js +27 -0
  13. package/dist/prisma.service.js.map +1 -0
  14. package/dist/scripts/create-tenant-schemas.d.ts +1 -0
  15. package/dist/scripts/create-tenant-schemas.js +117 -0
  16. package/dist/scripts/create-tenant-schemas.js.map +1 -0
  17. package/dist/scripts/data-migration/batch-migrator.d.ts +25 -0
  18. package/dist/scripts/data-migration/batch-migrator.js +333 -0
  19. package/dist/scripts/data-migration/batch-migrator.js.map +1 -0
  20. package/dist/scripts/data-migration/data-transformer.d.ts +17 -0
  21. package/dist/scripts/data-migration/data-transformer.js +242 -0
  22. package/dist/scripts/data-migration/data-transformer.js.map +1 -0
  23. package/dist/scripts/data-migration/db-connector.d.ts +7 -0
  24. package/dist/scripts/data-migration/db-connector.js +58 -0
  25. package/dist/scripts/data-migration/db-connector.js.map +1 -0
  26. package/dist/scripts/data-migration/dependency-manager.d.ts +9 -0
  27. package/dist/scripts/data-migration/dependency-manager.js +86 -0
  28. package/dist/scripts/data-migration/dependency-manager.js.map +1 -0
  29. package/dist/scripts/data-migration/dependency-resolver.d.ts +18 -0
  30. package/dist/scripts/data-migration/dependency-resolver.js +251 -0
  31. package/dist/scripts/data-migration/dependency-resolver.js.map +1 -0
  32. package/dist/scripts/data-migration/entity-discovery.d.ts +11 -0
  33. package/dist/scripts/data-migration/entity-discovery.js +152 -0
  34. package/dist/scripts/data-migration/entity-discovery.js.map +1 -0
  35. package/dist/scripts/data-migration/foreign-key-manager.d.ts +17 -0
  36. package/dist/scripts/data-migration/foreign-key-manager.js +70 -0
  37. package/dist/scripts/data-migration/foreign-key-manager.js.map +1 -0
  38. package/dist/scripts/data-migration/migration-phases.d.ts +5 -0
  39. package/dist/scripts/data-migration/migration-phases.js +55 -0
  40. package/dist/scripts/data-migration/migration-phases.js.map +1 -0
  41. package/dist/scripts/data-migration/migration-tool.d.ts +29 -0
  42. package/dist/scripts/data-migration/migration-tool.js +250 -0
  43. package/dist/scripts/data-migration/migration-tool.js.map +1 -0
  44. package/dist/scripts/data-migration/phase-generator.d.ts +15 -0
  45. package/dist/scripts/data-migration/phase-generator.js +187 -0
  46. package/dist/scripts/data-migration/phase-generator.js.map +1 -0
  47. package/dist/scripts/data-migration/schema-utils.d.ts +18 -0
  48. package/dist/scripts/data-migration/schema-utils.js +164 -0
  49. package/dist/scripts/data-migration/schema-utils.js.map +1 -0
  50. package/dist/scripts/data-migration/tenant-migrator.d.ts +15 -0
  51. package/dist/scripts/data-migration/tenant-migrator.js +110 -0
  52. package/dist/scripts/data-migration/tenant-migrator.js.map +1 -0
  53. package/dist/scripts/data-migration/typecast-manager.d.ts +5 -0
  54. package/dist/scripts/data-migration/typecast-manager.js +35 -0
  55. package/dist/scripts/data-migration/typecast-manager.js.map +1 -0
  56. package/dist/scripts/data-migration/types.d.ts +34 -0
  57. package/dist/scripts/data-migration/types.js +3 -0
  58. package/dist/scripts/data-migration/types.js.map +1 -0
  59. package/dist/scripts/data-migration.d.ts +22 -0
  60. package/dist/scripts/data-migration.js +593 -0
  61. package/dist/scripts/data-migration.js.map +1 -0
  62. package/dist/scripts/drop-database.d.ts +10 -0
  63. package/dist/scripts/drop-database.js +81 -0
  64. package/dist/scripts/drop-database.js.map +1 -0
  65. package/dist/scripts/error-handler.d.ts +12 -0
  66. package/dist/scripts/error-handler.js +82 -0
  67. package/dist/scripts/error-handler.js.map +1 -0
  68. package/dist/scripts/fix-data-types.d.ts +10 -0
  69. package/dist/scripts/fix-data-types.js +185 -0
  70. package/dist/scripts/fix-data-types.js.map +1 -0
  71. package/dist/scripts/fix-enum-values.d.ts +17 -0
  72. package/dist/scripts/fix-enum-values.js +234 -0
  73. package/dist/scripts/fix-enum-values.js.map +1 -0
  74. package/dist/scripts/fix-schema-discrepancies.d.ts +21 -0
  75. package/dist/scripts/fix-schema-discrepancies.js +240 -0
  76. package/dist/scripts/fix-schema-discrepancies.js.map +1 -0
  77. package/dist/scripts/migrate-schema-structure.d.ts +1 -0
  78. package/dist/scripts/migrate-schema-structure.js +76 -0
  79. package/dist/scripts/migrate-schema-structure.js.map +1 -0
  80. package/dist/scripts/post-migration-validator.d.ts +21 -0
  81. package/dist/scripts/post-migration-validator.js +341 -0
  82. package/dist/scripts/post-migration-validator.js.map +1 -0
  83. package/dist/scripts/pre-migration-validator.d.ts +25 -0
  84. package/dist/scripts/pre-migration-validator.js +491 -0
  85. package/dist/scripts/pre-migration-validator.js.map +1 -0
  86. package/dist/scripts/reset-database.d.ts +17 -0
  87. package/dist/scripts/reset-database.js +202 -0
  88. package/dist/scripts/reset-database.js.map +1 -0
  89. package/dist/scripts/retry-failed-migrations.d.ts +14 -0
  90. package/dist/scripts/retry-failed-migrations.js +301 -0
  91. package/dist/scripts/retry-failed-migrations.js.map +1 -0
  92. package/dist/scripts/run-migration.d.ts +1 -0
  93. package/dist/scripts/run-migration.js +525 -0
  94. package/dist/scripts/run-migration.js.map +1 -0
  95. package/dist/scripts/schema-sync.d.ts +1 -0
  96. package/dist/scripts/schema-sync.js +85 -0
  97. package/dist/scripts/schema-sync.js.map +1 -0
  98. package/dist/scripts/sync-enum-types.d.ts +13 -0
  99. package/dist/scripts/sync-enum-types.js +139 -0
  100. package/dist/scripts/sync-enum-types.js.map +1 -0
  101. package/dist/scripts/sync-enum-values.d.ts +20 -0
  102. package/dist/scripts/sync-enum-values.js +336 -0
  103. package/dist/scripts/sync-enum-values.js.map +1 -0
  104. package/dist/scripts/truncate-database.d.ts +10 -0
  105. package/dist/scripts/truncate-database.js +100 -0
  106. package/dist/scripts/truncate-database.js.map +1 -0
  107. package/dist/scripts/verify-migration-setup.d.ts +11 -0
  108. package/dist/scripts/verify-migration-setup.js +120 -0
  109. package/dist/scripts/verify-migration-setup.js.map +1 -0
  110. package/dist/tsconfig.tsbuildinfo +1 -0
  111. package/migration-config-public.json +95 -0
  112. package/migration-config.json +95 -0
  113. package/package.json +33 -0
  114. package/prisma/migrations/migration_lock.toml +3 -0
  115. package/prisma/schema.prisma +360 -0
  116. package/src/index.ts +23 -0
  117. package/src/prisma-factory.service.ts +41 -0
  118. package/src/prisma.module.ts +10 -0
  119. package/src/prisma.service.ts +17 -0
  120. package/src/scripts/create-tenant-schemas.ts +146 -0
  121. package/src/scripts/data-migration/batch-migrator.ts +569 -0
  122. package/src/scripts/data-migration/data-transformer.ts +377 -0
  123. package/src/scripts/data-migration/db-connector.ts +67 -0
  124. package/src/scripts/data-migration/dependency-resolver.ts +319 -0
  125. package/src/scripts/data-migration/entity-discovery.ts +197 -0
  126. package/src/scripts/data-migration/foreign-key-manager.ts +95 -0
  127. package/src/scripts/data-migration/migration-tool.ts +357 -0
  128. package/src/scripts/data-migration/schema-utils.ts +186 -0
  129. package/src/scripts/data-migration/tenant-migrator.ts +194 -0
  130. package/src/scripts/data-migration/typecast-manager.ts +38 -0
  131. package/src/scripts/data-migration/types.ts +40 -0
  132. package/src/scripts/drop-database.ts +105 -0
  133. package/src/scripts/dump-source-db.sh +62 -0
  134. package/src/scripts/dumps/source_dump_20250413_112626.sql +1527 -0
  135. package/src/scripts/error-handler.ts +118 -0
  136. package/src/scripts/fix-data-types.ts +242 -0
  137. package/src/scripts/fix-enum-values.ts +357 -0
  138. package/src/scripts/fix-schema-discrepancies.ts +318 -0
  139. package/src/scripts/migrate-schema-structure.ts +90 -0
  140. package/src/scripts/post-migration-validator.ts +427 -0
  141. package/src/scripts/pre-migration-validator.ts +611 -0
  142. package/src/scripts/reset-database.ts +264 -0
  143. package/src/scripts/retry-failed-migrations.ts +416 -0
  144. package/src/scripts/run-migration.ts +691 -0
  145. package/src/scripts/schema-sync.ts +129 -0
  146. package/src/scripts/sync-enum-types.ts +171 -0
  147. package/src/scripts/sync-enum-values.ts +563 -0
  148. package/src/scripts/truncate-database.ts +124 -0
  149. package/src/scripts/verify-migration-setup.ts +136 -0
  150. package/tsconfig.json +18 -0
@@ -0,0 +1,569 @@
1
+ import { Logger } from "@nestjs/common";
2
+ import { PrismaClient } from "@prisma/client";
3
+ import { EntityType, ColumnSchema, EnumCastValue } from "./types";
4
+ import { DataTransformer } from "./data-transformer";
5
+ import { SchemaUtils } from "./schema-utils";
6
+ import { DatabaseConnections } from "./types";
7
+ import { DependencyResolver } from "./dependency-resolver";
8
+ import { ForeignKeyManager } from "./foreign-key-manager"; // Assuming this exists and might be useful later
9
+
10
+ export class BatchMigrator {
11
+ private readonly logger = new Logger("BatchMigrator");
12
+ private readonly BATCH_SIZE = 10; // Consider making this configurable
13
+ // Removed typecastManager as it wasn't used and casting is handled inline
14
+
15
+ constructor(
16
+ private readonly dataTransformer: DataTransformer,
17
+ private readonly schemaUtils: SchemaUtils,
18
+ private readonly connections: DatabaseConnections,
19
+ private readonly dependencyResolver: DependencyResolver, // Keep for dependency checks
20
+ private readonly schemaCache: Record<string, ColumnSchema[]> = {},
21
+ private readonly targetSchemaCache: Record<string, ColumnSchema[]> = {} // Removed ForeignKeyManager from constructor if not used directly here
22
+ ) {}
23
+
24
+ private async checkTableHasData(
25
+ tenantId: string,
26
+ tableName: string
27
+ ): Promise<boolean> {
28
+ // Keep this function as is
29
+ try {
30
+ const result = await this.connections.targetPool.query(
31
+ `SELECT EXISTS (SELECT 1 FROM "${tenantId}"."${tableName}" LIMIT 1)`
32
+ );
33
+ return result.rows[0]?.exists || false;
34
+ } catch (error) {
35
+ this.logger.warn(
36
+ `Error checking data existence for ${tableName}: ${error.message}`
37
+ );
38
+ return false;
39
+ }
40
+ }
41
+
42
+ async migrateEntityDataInBatches(
43
+ prisma: PrismaClient, // Prisma client (likely connected to target public schema)
44
+ entity: EntityType, // Config object for the table being migrated
45
+ providerId: number | null, // Provider ID to filter source data (null if not filtering)
46
+ targetSchema: string // The schema in the TARGET database where data should be inserted
47
+ ) {
48
+ try {
49
+ // Ensure target schema exists (important for tenant schemas if used)
50
+ if (targetSchema !== "public") {
51
+ await this.ensureSchemaExists(targetSchema);
52
+ }
53
+
54
+ // Optional: Dependency check (can be complex with dynamic filtering)
55
+ // Consider if this check is still reliable or needed with the new strategy
56
+ // const dependencies = await this.dependencyResolver.analyzeDependencies();
57
+ // ... dependency check logic ...
58
+
59
+ // Disable foreign key checks in the target schema for the duration of this batch
60
+ // Note: This applies to the connection used by targetPool, ensure it targets the correct DB if replicas are used
61
+ await this.connections.targetPool.query(
62
+ `SET session_replication_role = 'replica';`
63
+ );
64
+
65
+ const { name: tableName, idField, filterColumn, filterVia } = entity;
66
+
67
+ this.logger.log(
68
+ `Migrating ${tableName} -> target schema '${targetSchema}'. ${
69
+ providerId
70
+ ? `Filtering source by Provider ID: ${providerId}`
71
+ : "Migrating all source records."
72
+ }`
73
+ );
74
+
75
+ try {
76
+ // Get source and target schemas
77
+ const sourceTableSchema = await this.getSourceSchema(tableName);
78
+ const targetTableSchema = await this.getTargetSchema(
79
+ targetSchema,
80
+ tableName
81
+ );
82
+
83
+ // Validate schemas
84
+ if (!sourceTableSchema.length) {
85
+ this.logger.warn(
86
+ `Source table ${tableName} schema not found. Skipping.`
87
+ );
88
+ return;
89
+ }
90
+ if (!targetTableSchema.length) {
91
+ this.logger.warn(
92
+ `Target table ${tableName} schema '${targetSchema}' not found. Skipping.`
93
+ );
94
+ return;
95
+ }
96
+
97
+ // --- DYNAMIC SOURCE QUERY GENERATION ---
98
+ let selectQuery: string;
99
+ let queryParams: any[] = [];
100
+
101
+ // Base query selects all columns from the source table
102
+ // Using alias 't' for the primary table
103
+ const columnList = sourceTableSchema
104
+ .map((col) => `"${col.column_name}"`)
105
+ .join(", ");
106
+ let fromClause = `FROM "${tableName}" t`;
107
+ let whereClause = "";
108
+
109
+ if (providerId && filterColumn) {
110
+ // Filtering is needed
111
+ if (filterVia) {
112
+ // Filter through an intermediate table (JOIN needed)
113
+ // Assumes intermediate table links via its primary key (e.g., 'id') to filterColumn
114
+ // Assumes intermediate table links to providers via 'provider_id'
115
+ // Example: Migrating 'invoices' (t) via 'transactions' (j)
116
+ // Needs filterColumn='transaction_id' (linking t to j) and filterVia='transactions'
117
+ // Final filter is j.provider_id = $1
118
+ // NOTE: This makes assumptions! A more robust solution might need more config.
119
+ // Let's assume filterVia table has a primary key named 'id' and a 'provider_id' column
120
+ const joinTable = filterVia;
121
+ const joinCondition = `t."${filterColumn}" = j.id`; // Assuming PK of joinTable is 'id'
122
+ const providerFilter = `j.provider_id = $1`; // Assuming FK in joinTable is 'provider_id'
123
+
124
+ fromClause = `FROM "${tableName}" t JOIN "${joinTable}" j ON ${joinCondition}`;
125
+ whereClause = `WHERE ${providerFilter}`;
126
+ queryParams = [providerId];
127
+ this.logger.log(`Using JOIN filter: ${fromClause} ${whereClause}`);
128
+ } else {
129
+ // Direct filter on the table itself
130
+ whereClause = `WHERE t."${filterColumn}" = $1`;
131
+ queryParams = [providerId];
132
+ this.logger.log(`Using direct filter: ${whereClause}`);
133
+ }
134
+ } else {
135
+ this.logger.log(
136
+ `No providerId filter applied for ${tableName}. Selecting all records.`
137
+ );
138
+ // No filtering needed (e.g., migrating public tables or filteredPublic without providerId)
139
+ }
140
+
141
+ selectQuery = `SELECT t.* ${fromClause} ${whereClause}`;
142
+ // --- END DYNAMIC SOURCE QUERY GENERATION ---
143
+
144
+ // Execute query and process data
145
+ const sourceData = await this.executeSourceQuery(
146
+ selectQuery,
147
+ queryParams
148
+ );
149
+ const totalRecords = sourceData.rows.length;
150
+
151
+ this.logger.log(
152
+ `Found ${totalRecords} ${tableName} records in source to migrate to '${targetSchema}'.`
153
+ );
154
+
155
+ if (totalRecords === 0) {
156
+ this.logger.log(
157
+ `No records to migrate for ${tableName} with current filter. Skipping processing.`
158
+ );
159
+ // Re-enable FK checks before returning
160
+ await this.connections.targetPool.query(
161
+ `SET session_replication_role = 'origin';`
162
+ );
163
+ return;
164
+ }
165
+
166
+ // Use the specific idField from the entity config
167
+ const primaryKeyField = idField;
168
+
169
+ await this.processRecords(
170
+ prisma, // Pass the main Prisma client
171
+ targetSchema, // Pass the specific target schema for insertion
172
+ tableName,
173
+ primaryKeyField,
174
+ sourceData.rows,
175
+ sourceTableSchema,
176
+ targetTableSchema
177
+ );
178
+ } catch (error) {
179
+ this.logger.error(
180
+ `Error during migration step for ${tableName} to schema '${targetSchema}': ${error.message}`
181
+ );
182
+ // Log context for better debugging
183
+ console.error(`Entity Config:`, JSON.stringify(entity));
184
+ console.error(`Provider ID used for filter:`, providerId);
185
+ // console.error(`Generated Select Query:`, selectQuery); // selectQuery might be out of scope here
186
+ // console.error(`Query Params:`, JSON.stringify(queryParams)); // queryParams might be out of scope here
187
+ // Rethrow to be caught by the outer try/catch in DataMigrationTool
188
+ throw error;
189
+ }
190
+ } catch (error) {
191
+ // Catch errors from initial checks or schema operations
192
+ this.logger.error(
193
+ `Error preparing migration for ${entity.name} to schema '${targetSchema}': ${error.message}`
194
+ );
195
+ throw error; // Rethrow to be caught by DataMigrationTool
196
+ } finally {
197
+ // ALWAYS re-enable foreign key checks, even if errors occurred
198
+ try {
199
+ await this.connections.targetPool.query(
200
+ `SET session_replication_role = 'origin';`
201
+ );
202
+ } catch (finallyError) {
203
+ this.logger.error(
204
+ `Failed to reset session_replication_role: ${finallyError.message}`
205
+ );
206
+ }
207
+ }
208
+ }
209
+
210
+ // --- Helper Functions (ensureSchemaExists, getSourceSchema, getTargetSchema, executeSourceQuery, getPrimaryKeyField) ---
211
+ // Keep these mostly as they are, but ensure getPrimaryKeyField uses the correct idField from EntityType if needed
212
+ // (Current implementation queries information_schema, which is fine, but ensure it uses the correct targetSchema)
213
+
214
+ private async ensureSchemaExists(schemaName: string): Promise<void> {
215
+ // Check if schema exists
216
+ const schemaExistsResult = await this.connections.targetPool.query(
217
+ `SELECT schema_name FROM information_schema.schemata WHERE schema_name = $1`,
218
+ [schemaName]
219
+ );
220
+
221
+ if (schemaExistsResult.rows.length === 0) {
222
+ this.logger.log(`Schema '${schemaName}' does not exist. Creating...`);
223
+ // Create schema if it doesn't exist - Needs structure copied from public
224
+ await this.schemaUtils.createSchema(schemaName);
225
+ this.logger.log(`Schema '${schemaName}' created.`);
226
+
227
+ // Optional: Baseline migration record for the new schema if using Prisma Migrate
228
+ try {
229
+ await this.connections.targetPool.query(`
230
+ INSERT INTO "${schemaName}"."_prisma_migrations" (id, checksum, finished_at, migration_name, logs, rolled_back_at, started_at, applied_steps_count)
231
+ SELECT id, checksum, finished_at, migration_name, logs, rolled_back_at, started_at, applied_steps_count
232
+ FROM "public"."_prisma_migrations" WHERE migration_name LIKE '%_init'
233
+ ON CONFLICT DO NOTHING;
234
+ `);
235
+ this.logger.log(
236
+ `Attempted to copy initial migration record to schema '${schemaName}'.`
237
+ );
238
+ } catch (migrationError) {
239
+ this.logger.warn(
240
+ `Could not copy baseline migration to schema '${schemaName}': ${migrationError.message}`
241
+ );
242
+ }
243
+ }
244
+ }
245
+
246
+ private async getSourceSchema(tableName: string): Promise<ColumnSchema[]> {
247
+ const cacheKey = `source.${tableName}`;
248
+ if (!this.schemaCache[cacheKey]) {
249
+ this.logger.debug(`Cache miss for source schema: ${tableName}`);
250
+ this.schemaCache[cacheKey] = await this.schemaUtils.getTableSchema(
251
+ tableName,
252
+ "source",
253
+ "public" // Source is always public schema in this context
254
+ );
255
+ }
256
+ return this.schemaCache[cacheKey];
257
+ }
258
+
259
+ private async getTargetSchema(
260
+ schema: string, // Target schema name (could be 'public' or tenantId)
261
+ tableName: string
262
+ ): Promise<ColumnSchema[]> {
263
+ const cacheKey = `${schema}.${tableName}`;
264
+ if (!this.targetSchemaCache[cacheKey]) {
265
+ this.logger.debug(`Cache miss for target schema: ${cacheKey}`);
266
+ this.targetSchemaCache[cacheKey] = await this.schemaUtils.getTableSchema(
267
+ tableName,
268
+ "target",
269
+ schema // Use the provided target schema name
270
+ );
271
+ }
272
+ return this.targetSchemaCache[cacheKey];
273
+ }
274
+
275
+ private async executeSourceQuery(query: string, params: any[]): Promise<any> {
276
+ try {
277
+ this.logger.debug(
278
+ `Executing source query: ${query.replace(
279
+ /\s\s+/g,
280
+ " "
281
+ )} || PARAMS: ${JSON.stringify(params)}`
282
+ );
283
+ const result = await this.connections.sourcePool.query(query, params);
284
+ this.logger.debug(`Query returned ${result.rows?.length || 0} rows`);
285
+ return result;
286
+ } catch (error) {
287
+ this.logger.error(`Error executing source query: ${error.message}`);
288
+ this.logger.error(`Query was: ${query}`);
289
+ this.logger.error(`Params were: ${JSON.stringify(params)}`);
290
+ throw error;
291
+ }
292
+ }
293
+
294
+ private async getPrimaryKeyField(
295
+ schemaName: string,
296
+ tableName: string
297
+ ): Promise<string | null> {
298
+ // This function remains useful for verifying the actual PK if needed,
299
+ // but we primarily rely on the idField from the config now.
300
+ try {
301
+ const result = await this.connections.targetPool.query(
302
+ `
303
+ SELECT kcu.column_name
304
+ FROM information_schema.table_constraints tc
305
+ JOIN information_schema.key_column_usage kcu
306
+ ON tc.constraint_name = kcu.constraint_name
307
+ AND tc.table_schema = kcu.table_schema
308
+ WHERE tc.constraint_type = 'PRIMARY KEY'
309
+ AND tc.table_schema = $1
310
+ AND tc.table_name = $2
311
+ `,
312
+ [schemaName, tableName]
313
+ );
314
+ return result.rows[0]?.column_name || null;
315
+ } catch (error) {
316
+ this.logger.error(
317
+ `Error getting primary key for ${schemaName}.${tableName}: ${error.message}`
318
+ );
319
+ return null;
320
+ }
321
+ }
322
+
323
+ // --- processRecords ---
324
+ // This function needs significant changes to correctly handle the targetSchema
325
+
326
+ private async processRecords(
327
+ prisma: PrismaClient, // Main prisma client
328
+ targetSchema: string, // The ACTUAL schema to insert/update into
329
+ tableName: string,
330
+ idField: string, // Use idField from config
331
+ records: any[],
332
+ sourceSchema: ColumnSchema[],
333
+ targetSchemaInfo: ColumnSchema[] // Renamed to avoid confusion
334
+ ): Promise<void> {
335
+ if (!idField) {
336
+ this.logger.error(
337
+ `Cannot process records for ${tableName}: idField is missing in configuration.`
338
+ );
339
+ return; // Or throw error
340
+ }
341
+
342
+ for (const record of records) {
343
+ let recordId = record[idField];
344
+ try {
345
+ this.logger.debug(
346
+ `Processing record ${
347
+ recordId ?? "(no id found)"
348
+ } for ${targetSchema}.${tableName}`
349
+ );
350
+
351
+ if (!record || Object.keys(record).length === 0) {
352
+ this.logger.warn(`Empty record found for ${tableName}, skipping`);
353
+ continue;
354
+ }
355
+ if (!recordId) {
356
+ this.logger.warn(
357
+ `Record missing configured ID field '${idField}' in source data, skipping: ${JSON.stringify(
358
+ record
359
+ )}`
360
+ );
361
+ continue;
362
+ }
363
+
364
+ // Transform data using the target schema *info* for type checking etc.
365
+ const transformedData = await this.dataTransformer.transformRecord(
366
+ record,
367
+ sourceSchema,
368
+ targetSchemaInfo, // Use the schema structure info
369
+ targetSchema // Pass target schema for enum validation context etc.
370
+ );
371
+
372
+ // Prepare data for raw query, ensuring correct types and casting strings
373
+ const processedData = Object.entries(transformedData).reduce(
374
+ (acc, [key, value]) => {
375
+ const columnSchema = targetSchemaInfo.find(
376
+ (col) => col.column_name === key
377
+ );
378
+ if (!columnSchema) return acc; // Skip columns not in target schema
379
+
380
+ const columnName = `"${columnSchema.column_name}"`; // Quote column names
381
+
382
+ // --- Helper function to escape values for SQL E'' strings ---
383
+ const escapeValue = (val: any): string => {
384
+ if (val === null || val === undefined) return "NULL";
385
+ if (typeof val === "boolean") return val ? "TRUE" : "FALSE";
386
+ if (typeof val === "number") return String(val);
387
+ // Escape single quotes and backslashes for E'' syntax
388
+ return String(val).replace(/'/g, "''").replace(/\\/g, "");
389
+ };
390
+ // --- End escapeValue ---
391
+
392
+ if (value === null || value === undefined) {
393
+ acc[columnName] = "NULL";
394
+ return acc;
395
+ }
396
+
397
+ // Special handling for EnumCastValue objects from transformer
398
+ if (
399
+ typeof value === "object" &&
400
+ value !== null &&
401
+ value["needsEnumCast"]
402
+ ) {
403
+ const enumValue = value as EnumCastValue;
404
+ const schemaPrefix =
405
+ targetSchema === "public" ? '"public".' : `"${targetSchema}".`;
406
+ const quotedEnumType = `"${enumValue.enumType}"`;
407
+ const escapedEnumValue = escapeValue(enumValue.value);
408
+ // Enum values are typically strings, use E''
409
+ acc[
410
+ columnName
411
+ ] = `CAST(E'${escapedEnumValue}' AS ${schemaPrefix}${quotedEnumType})`;
412
+ return acc;
413
+ }
414
+
415
+ // Handle standard types - Use data_type primarily, fallback to udt_name for enums/user-defined
416
+ let targetType = columnSchema.data_type.toLowerCase();
417
+ let udtName = columnSchema.udt_name;
418
+ let sqlValue: string;
419
+ let requiresQuotes = false;
420
+
421
+ // Determine if quotes are needed based on type category
422
+ if (
423
+ [
424
+ "text",
425
+ "varchar",
426
+ "character varying",
427
+ "char",
428
+ "timestamp with time zone",
429
+ "timestamptz",
430
+ "timestamp without time zone",
431
+ "timestamp",
432
+ "date",
433
+ "uuid",
434
+ "json",
435
+ "jsonb",
436
+ ].includes(targetType) ||
437
+ targetType.includes("enum") ||
438
+ (targetType === "user-defined" && udtName?.startsWith("enum_"))
439
+ ) {
440
+ requiresQuotes = true;
441
+ }
442
+
443
+ // Escape the value appropriately
444
+ const escaped = escapeValue(value);
445
+ if (escaped === "NULL") {
446
+ // Use SQL NULL keyword directly
447
+ sqlValue = "NULL";
448
+ } else if (requiresQuotes) {
449
+ sqlValue = `E'${escaped}'`; // Use E'' for strings, dates, enums, json, etc.
450
+ } else {
451
+ sqlValue = escaped; // Use raw value for numbers, booleans
452
+ }
453
+
454
+ // Determine necessary casting based on target type
455
+ let castExpression = "";
456
+ if (targetType.includes("timestamp"))
457
+ castExpression = "::timestamp with time zone";
458
+ else if (targetType === "date") castExpression = "::date";
459
+ else if (
460
+ targetType === "integer" ||
461
+ targetType === "int" ||
462
+ targetType === "int4"
463
+ )
464
+ castExpression = "::integer";
465
+ else if (targetType === "bigint" || targetType === "int8")
466
+ castExpression = "::bigint";
467
+ else if (targetType === "smallint" || targetType === "int2")
468
+ castExpression = "::smallint";
469
+ else if (targetType === "numeric" || targetType === "decimal")
470
+ castExpression = "::numeric";
471
+ else if (targetType === "real" || targetType === "float4")
472
+ castExpression = "::real";
473
+ else if (
474
+ targetType === "double precision" ||
475
+ targetType === "float8"
476
+ )
477
+ castExpression = "::double precision";
478
+ else if (targetType === "boolean" || targetType === "bool")
479
+ castExpression = "::boolean";
480
+ else if (targetType === "json" || targetType === "jsonb")
481
+ castExpression = `::${targetType}`;
482
+ else if (targetType === "uuid") castExpression = "::uuid";
483
+ else if (targetType === "text" || targetType.includes("char"))
484
+ castExpression = "::text";
485
+ else if (
486
+ targetType === "user-defined" &&
487
+ udtName?.startsWith("enum_")
488
+ ) {
489
+ const schemaPrefix =
490
+ targetSchema === "public" ? '"public".' : `"${targetSchema}".`;
491
+ castExpression = `::${schemaPrefix}"${udtName}"`;
492
+ }
493
+
494
+ acc[columnName] = `${sqlValue}${castExpression}`;
495
+
496
+ return acc;
497
+ },
498
+ {} as Record<string, string> // Accumulator holds SQL value strings
499
+ );
500
+
501
+ // Filter out entries where processedData might be undefined/invalid if needed
502
+ const validProcessedData = Object.entries(processedData).reduce(
503
+ (acc, [key, val]) => {
504
+ if (
505
+ val !== undefined &&
506
+ val !== "NULL" &&
507
+ val !== "E''" &&
508
+ val !== "E'undefined'" &&
509
+ val !== "E'null'"
510
+ ) {
511
+ // Additional checks for empty/invalid strings
512
+ acc[key] = val;
513
+ }
514
+ return acc;
515
+ },
516
+ {}
517
+ );
518
+
519
+ const columns = Object.keys(validProcessedData);
520
+ const valuesString = Object.values(validProcessedData).join(", "); // Values are already SQL strings
521
+
522
+ if (columns.length === 0) {
523
+ this.logger.warn(
524
+ `Record ${recordId} for ${tableName} resulted in no valid columns to insert/update after processing. Skipping.`
525
+ );
526
+ continue;
527
+ }
528
+
529
+ // Construct the SET clause for UPDATE
530
+ const updateSetClauses = columns
531
+ .filter((col) => col !== `"${idField}"`) // Don't update the PK itself
532
+ .map((col) => `${col} = EXCLUDED.${col}`) // Use EXCLUDED to get the value proposed for insertion
533
+ .join(", ");
534
+
535
+ // Ensure target schema and table name are quoted
536
+ const quotedSchemaTable = `"${targetSchema}"."${tableName}"`;
537
+ const quotedIdField = `"${idField}"`;
538
+
539
+ // Only include DO UPDATE clause if there are columns to update
540
+ const conflictClause = updateSetClauses
541
+ ? `ON CONFLICT (${quotedIdField}) DO UPDATE SET ${updateSetClauses}`
542
+ : `ON CONFLICT (${quotedIdField}) DO NOTHING`;
543
+
544
+ const query = `
545
+ INSERT INTO ${quotedSchemaTable} (${columns.join(", ")})
546
+ VALUES (${valuesString})
547
+ ${conflictClause}
548
+ `;
549
+
550
+ // Execute using targetPool connection for raw SQL flexibility
551
+ this.logger.debug(`Executing Upsert: ${query.replace(/\s\s+/g, " ")}`);
552
+ await this.connections.targetPool.query(query);
553
+ } catch (error) {
554
+ // Improved error logging
555
+ this.logger.error(
556
+ `Error processing record ID '${
557
+ recordId ?? "(unknown)"
558
+ }' for ${targetSchema}.${tableName}: ${error.message}`
559
+ );
560
+ this.logger.error(`Record data: ${JSON.stringify(record)}`);
561
+ // Consider logging transformedData and processedData as well for deep debugging
562
+ // this.logger.error(`Transformed data: ${JSON.stringify(transformedData)}`);
563
+ // this.logger.error(`Processed data (SQL values): ${JSON.stringify(processedData)}`);
564
+ // console.error("Underlying Error Stack:", error); // Log the original error stack
565
+ throw error; // Re-throw to stop the batch or be handled by the caller
566
+ }
567
+ }
568
+ }
569
+ }