@bdkinc/knex-ibmi 0.4.0 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -14,6 +14,8 @@ For IBM i OSS docs, see https://ibmi-oss-docs.readthedocs.io/. ODBC guidance: ht
14
14
  - Query execution
15
15
  - Transactions
16
16
  - Streaming
17
+ - Multi-row insert strategies (auto | sequential | disabled)
18
+ - Emulated returning for UPDATE and DELETE
17
19
 
18
20
  ## Requirements
19
21
 
@@ -167,49 +169,36 @@ try {
167
169
 
168
170
  ### Streaming
169
171
 
172
+ There are two primary ways to consume a result stream: (1) classic Node stream piping with transform stages, and (2) async iteration with `for await` (which can be easier to reason about). Use a `fetchSize` to control how many rows are fetched from the driver per batch.
173
+
170
174
  ```ts
171
175
  import { knex } from "knex";
172
176
  import { DB2Dialect, DB2Config } from "@bdkinc/knex-ibmi";
173
177
  import { Transform } from "node:stream";
174
178
  import { finished } from "node:stream/promises";
175
179
 
176
- const config: DB2Config = {
177
- client: DB2Dialect,
178
- connection: {
179
- host: "your-ibm-i-host",
180
- database: "*LOCAL",
181
- user: "your-username",
182
- password: "your-password",
183
- driver: "IBM i Access ODBC Driver",
184
- connectionStringParams: {
185
- ALLOWPROCCALLS: 1,
186
- CMT: 0,
187
- DBQ: "MYLIB"
188
- },
189
- },
190
- pool: { min: 2, max: 10 },
191
- };
192
-
180
+ const config: DB2Config = { /* ...same as earlier examples... */ };
193
181
  const db = knex(config);
194
182
 
195
183
  try {
196
- const stream = await db.select("*").from("LARGETABLE").stream({ fetchSize: 100 });
184
+ const stream = await db("LARGETABLE").select("*").stream({ fetchSize: 100 });
197
185
 
186
+ // Approach 1: Pipe through a Transform stream
198
187
  const transform = new Transform({
199
188
  objectMode: true,
200
- transform(chunk, _enc, cb) {
201
- // Process each row
202
- console.log("Processing row:", chunk);
203
- cb(null, chunk);
189
+ transform(row, _enc, cb) {
190
+ // Process each row (side effects, enrichment, filtering, etc.)
191
+ console.log("Transforming row id=", row.ID);
192
+ cb(null, row);
204
193
  },
205
194
  });
206
-
207
195
  stream.pipe(transform);
208
- await finished(stream);
196
+ await finished(stream); // Wait until piping completes
209
197
 
210
- // Alternative: async iteration
211
- for await (const record of stream) {
212
- console.log(record);
198
+ // Approach 2: Async iteration (recommended for simplicity)
199
+ const iterStream = await db("LARGETABLE").select("*").stream({ fetchSize: 200 });
200
+ for await (const row of iterStream) {
201
+ console.log("Iter row id=", row.ID);
213
202
  }
214
203
  } catch (error) {
215
204
  console.error("Streaming error:", error);
@@ -332,16 +321,12 @@ ibmi-migrations migrate:currentVersion # Show current migration version
332
321
  ibmi-migrations migrate:list # List all migrations
333
322
  ibmi-migrations migrate:make <name> # Create new migration file
334
323
 
335
- # Legacy aliases (backward compatibility):
336
- ibmi-migrations latest # Same as migrate:latest
337
- ibmi-migrations rollback # Same as migrate:rollback
338
- ibmi-migrations status # Same as migrate:status
339
-
340
324
  # Options:
341
325
  ibmi-migrations migrate:status --env production
342
326
  ibmi-migrations migrate:latest --knexfile ./config/knexfile.js
327
+ ibmi-migrations migrate:latest --knexfile ./knexfile.ts # Use TypeScript knexfile
343
328
  ibmi-migrations migrate:make create_users_table
344
- ibmi-migrations migrate:make add_email_column -x ts # TypeScript migration
329
+ ibmi-migrations migrate:make add_email_column -x ts # TypeScript migration
345
330
  ```
346
331
 
347
332
  📖 **See [MIGRATIONS.md](./MIGRATIONS.md) for complete documentation**
@@ -365,6 +350,63 @@ const config = {
365
350
 
366
351
  **Warning**: Standard Knex migrations may still hang on lock operations. The built-in IBM i migration system is strongly recommended.
367
352
 
353
+ ## Multi-Row Insert Strategies
354
+
355
+ Configure via `ibmi.multiRowInsert` in the knex config:
356
+
357
+ ```ts
358
+ const db = knex({
359
+ client: DB2Dialect,
360
+ connection: { /* ... */ },
361
+ ibmi: { multiRowInsert: 'auto' } // 'auto' | 'sequential' | 'disabled'
362
+ });
363
+ ```
364
+
365
+ - `auto` (default): Generates a single INSERT with multiple VALUES lists. For `.returning('*')` or no explicit column list it returns all inserted rows (lenient fallback). Identity values are whatever DB2 ODBC surfaces for that multi-row statement.
366
+ - `sequential`: Compiler shows a single-row statement (first row) but at execution time each row is inserted individually inside a loop to reliably collect identity values (using `IDENTITY_VAL_LOCAL()` per row). Suitable when you need each generated identity.
367
+ - `disabled`: Falls back to legacy behavior: only the first row is inserted (others ignored). Useful for strict backward compatibility.
368
+
369
+ If you specify `.returning(['COL1', 'COL2'])` with multi-row inserts, those columns are selected; otherwise `IDENTITY_VAL_LOCAL()` (single-row) or `*` (multi-row) is used as a lenient fallback.
370
+
371
+ ## Returning Behavior (INSERT / UPDATE / DELETE)
372
+
373
+ Native `RETURNING` is not broadly supported over ODBC on IBM i. The dialect provides pragmatic emulation:
374
+
375
+ ### INSERT
376
+ - `auto` multi-row: generates a single multi-values INSERT. When no explicit column list is requested it returns all inserted rows (`*`) as a lenient fallback. Some installations may see this internally wrapped using a `SELECT * FROM FINAL TABLE( INSERT ... )` pattern in logs or debug output; that wrapper is only an implementation detail to surface inserted rows.
377
+ - `sequential`: inserts each row one at a time so it can reliably call `IDENTITY_VAL_LOCAL()` after each insert; builds an array of returned rows.
378
+ - `disabled`: legacy single-row insert behavior; additional rows in the values array are ignored.
379
+
380
+ ### UPDATE
381
+ - Executes the UPDATE.
382
+ - Re-selects the affected rows using the original WHERE clause when `.returning(...)` is requested.
383
+
384
+ ### DELETE
385
+ - Selects the rows to be deleted (capturing requested returning columns or `*`).
386
+ - Executes the DELETE.
387
+ - Returns the previously selected rows.
388
+
389
+ ### Notes
390
+ - `returning('*')` can be expensive on large result sets—limit the column list when possible.
391
+ - For guaranteed, ordered identity values across many inserted rows use the `sequential` strategy.
392
+
393
+ ## Configuration Summary
394
+
395
+ ```ts
396
+ interface IbmiDialectConfig {
397
+ multiRowInsert?: 'auto' | 'sequential' | 'disabled';
398
+ sequentialInsertTransactional?: boolean; // if true, wraps sequential loop in BEGIN/COMMIT
399
+ }
400
+ ```
401
+
402
+ Attach under the root knex config as `ibmi`.
403
+
404
+ ### Transactional Sequential Inserts
405
+
406
+ When `ibmi.sequentialInsertTransactional` is `true`, the dialect will attempt `BEGIN` before the per-row loop and `COMMIT` after. On commit failure it will attempt a `ROLLBACK`. If `BEGIN` is not supported, it logs a warning and continues non-transactionally.
407
+
408
+ <!-- Benchmarks section intentionally removed. Benchmarking is handled in the external test harness project -->
409
+
368
410
  ## Links
369
411
 
370
412
  - Knex: https://knexjs.org/
package/dist/cli.cjs CHANGED
@@ -255,6 +255,9 @@ function showHelp() {
255
255
  console.log(
256
256
  " --knexfile <file> - Specify knexfile path (default: ./knexfile.js)"
257
257
  );
258
+ console.log(
259
+ " - Supports both .js and .ts knexfiles"
260
+ );
258
261
  console.log(
259
262
  " -x <extension> - File extension for new migrations (js|ts)"
260
263
  );
@@ -264,6 +267,7 @@ function showHelp() {
264
267
  console.log(" ibmi-migrations migrate:latest");
265
268
  console.log(" ibmi-migrations migrate:rollback");
266
269
  console.log(" ibmi-migrations migrate:status --env production");
270
+ console.log(" ibmi-migrations migrate:latest --knexfile knexfile.ts");
267
271
  console.log(" ibmi-migrations migrate:make create_users_table");
268
272
  console.log(" ibmi-migrations migrate:make add_email_to_users -x ts");
269
273
  console.log(" ibmi-migrations latest --knexfile ./config/knexfile.js");
@@ -334,7 +338,7 @@ export const down = (knex) => {
334
338
  `;
335
339
  }
336
340
  function getTsMigrationTemplate(migrationName) {
337
- return `import { Knex } from "knex";
341
+ return `import type { Knex } from "knex";
338
342
 
339
343
  export const up = async (knex: Knex): Promise<void> => {
340
344
  // Add your migration logic here
package/dist/index.d.mts CHANGED
@@ -54,6 +54,15 @@ declare class DB2Client extends knex.Client {
54
54
  destroyRawConnection(connection: any): Promise<any>;
55
55
  _getConnectionString(connectionConfig: DB2ConnectionConfig): string;
56
56
  _query(connection: Connection, obj: any): Promise<any>;
57
+ /**
58
+ * Execute UPDATE with returning clause using transaction + SELECT approach
59
+ * Since IBM i DB2 doesn't support FINAL TABLE with UPDATE, we:
60
+ * 1. Execute the UPDATE statement
61
+ * 2. Execute a SELECT to get the updated values using the same WHERE clause
62
+ */
63
+ private executeUpdateReturning;
64
+ private executeSequentialInsert;
65
+ private executeDeleteReturning;
57
66
  private normalizeQueryObject;
58
67
  private determineQueryMethod;
59
68
  private isSelectMethod;
@@ -71,6 +80,7 @@ declare class DB2Client extends knex.Client {
71
80
  }, stream: any, options: {
72
81
  fetchSize?: number;
73
82
  }): Promise<unknown>;
83
+ private calculateOptimalFetchSize;
74
84
  private _createCursorStream;
75
85
  transaction(container: any, config: any, outerTx: any): Knex.Transaction;
76
86
  schemaCompiler(tableBuilder: any): any;
@@ -80,7 +90,12 @@ declare class DB2Client extends knex.Client {
80
90
  createMigrationRunner(config?: Partial<IBMiMigrationConfig>): IBMiMigrationRunner;
81
91
  processResponse(obj: QueryObject | null, runner: any): any;
82
92
  private validateResponse;
93
+ private wrapError;
94
+ private shouldRetryQuery;
95
+ private retryQuery;
83
96
  private isConnectionError;
97
+ private isTimeoutError;
98
+ private isSQLError;
84
99
  private processSqlMethod;
85
100
  }
86
101
  interface DB2PoolConfig {
@@ -131,6 +146,10 @@ interface DB2Config extends Knex.Config {
131
146
  client: any;
132
147
  connection: DB2ConnectionConfig;
133
148
  pool?: DB2PoolConfig;
149
+ ibmi?: {
150
+ multiRowInsert?: "auto" | "sequential" | "disabled";
151
+ sequentialInsertTransactional?: boolean;
152
+ };
134
153
  }
135
154
  declare const DB2Dialect: typeof DB2Client;
136
155
 
package/dist/index.d.ts CHANGED
@@ -54,6 +54,15 @@ declare class DB2Client extends knex.Client {
54
54
  destroyRawConnection(connection: any): Promise<any>;
55
55
  _getConnectionString(connectionConfig: DB2ConnectionConfig): string;
56
56
  _query(connection: Connection, obj: any): Promise<any>;
57
+ /**
58
+ * Execute UPDATE with returning clause using transaction + SELECT approach
59
+ * Since IBM i DB2 doesn't support FINAL TABLE with UPDATE, we:
60
+ * 1. Execute the UPDATE statement
61
+ * 2. Execute a SELECT to get the updated values using the same WHERE clause
62
+ */
63
+ private executeUpdateReturning;
64
+ private executeSequentialInsert;
65
+ private executeDeleteReturning;
57
66
  private normalizeQueryObject;
58
67
  private determineQueryMethod;
59
68
  private isSelectMethod;
@@ -71,6 +80,7 @@ declare class DB2Client extends knex.Client {
71
80
  }, stream: any, options: {
72
81
  fetchSize?: number;
73
82
  }): Promise<unknown>;
83
+ private calculateOptimalFetchSize;
74
84
  private _createCursorStream;
75
85
  transaction(container: any, config: any, outerTx: any): Knex.Transaction;
76
86
  schemaCompiler(tableBuilder: any): any;
@@ -80,7 +90,12 @@ declare class DB2Client extends knex.Client {
80
90
  createMigrationRunner(config?: Partial<IBMiMigrationConfig>): IBMiMigrationRunner;
81
91
  processResponse(obj: QueryObject | null, runner: any): any;
82
92
  private validateResponse;
93
+ private wrapError;
94
+ private shouldRetryQuery;
95
+ private retryQuery;
83
96
  private isConnectionError;
97
+ private isTimeoutError;
98
+ private isSQLError;
84
99
  private processSqlMethod;
85
100
  }
86
101
  interface DB2PoolConfig {
@@ -131,6 +146,10 @@ interface DB2Config extends Knex.Config {
131
146
  client: any;
132
147
  connection: DB2ConnectionConfig;
133
148
  pool?: DB2PoolConfig;
149
+ ibmi?: {
150
+ multiRowInsert?: "auto" | "sequential" | "disabled";
151
+ sequentialInsertTransactional?: boolean;
152
+ };
134
153
  }
135
154
  declare const DB2Dialect: typeof DB2Client;
136
155