@bdkinc/knex-ibmi 0.3.22 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +290 -139
- package/dist/cli.cjs +531 -0
- package/dist/index.d.mts +156 -0
- package/dist/index.d.ts +55 -53
- package/dist/index.js +969 -158
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +1359 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +29 -20
package/README.md
CHANGED
|
@@ -1,231 +1,224 @@
|
|
|
1
|
-
|
|
1
|
+
# @bdkinc/knex-ibmi
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
[](https://npmjs.org/package/@bdkinc/knex-ibmi)
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
Knex.js dialect for DB2 on IBM i (via ODBC). Built for usage with the official IBM i Access ODBC driver and tested on IBM i.
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
This library uses the ODBC (as recommended here https://ibmi-oss-docs.readthedocs.io/en/latest/odbc/README.html)
|
|
9
|
-
driver and is only tested on IBMi.
|
|
7
|
+
For IBM i OSS docs, see https://ibmi-oss-docs.readthedocs.io/. ODBC guidance: https://ibmi-oss-docs.readthedocs.io/en/latest/odbc/README.html.
|
|
10
8
|
|
|
11
|
-
|
|
9
|
+
> Found an issue or have a question? Please open an issue.
|
|
12
10
|
|
|
13
|
-
##
|
|
11
|
+
## Features
|
|
14
12
|
|
|
15
13
|
- Query building
|
|
16
14
|
- Query execution
|
|
17
15
|
- Transactions
|
|
18
16
|
- Streaming
|
|
17
|
+
- Multi-row insert strategies (auto | sequential | disabled)
|
|
18
|
+
- Emulated returning for UPDATE and DELETE
|
|
19
|
+
|
|
20
|
+
## Requirements
|
|
21
|
+
|
|
22
|
+
- Node.js >= 16
|
|
23
|
+
- ODBC driver (IBM i Access ODBC Driver)
|
|
19
24
|
|
|
20
25
|
## Installation
|
|
21
26
|
|
|
27
|
+
```bash
|
|
28
|
+
npm install @bdkinc/knex-ibmi knex odbc
|
|
22
29
|
```
|
|
23
|
-
npm install --save odbc knex @bdkinc/knex-ibmi
|
|
24
|
-
```
|
|
25
30
|
|
|
26
|
-
|
|
31
|
+
## Quick Start
|
|
32
|
+
|
|
33
|
+
```js
|
|
34
|
+
import knex from "knex";
|
|
35
|
+
import { DB2Dialect } from "@bdkinc/knex-ibmi";
|
|
27
36
|
|
|
28
|
-
|
|
37
|
+
/** @type {import("@bdkinc/knex-ibmi").DB2Config} */
|
|
38
|
+
const config = {
|
|
39
|
+
client: DB2Dialect,
|
|
40
|
+
connection: {
|
|
41
|
+
host: "your-ibm-i-host",
|
|
42
|
+
database: "*LOCAL",
|
|
43
|
+
user: "your-username",
|
|
44
|
+
password: "your-password",
|
|
45
|
+
driver: "IBM i Access ODBC Driver",
|
|
46
|
+
connectionStringParams: { DBQ: "MYLIB" },
|
|
47
|
+
},
|
|
48
|
+
pool: { min: 2, max: 10 },
|
|
49
|
+
};
|
|
29
50
|
|
|
30
|
-
|
|
51
|
+
const db = knex(config);
|
|
31
52
|
|
|
32
|
-
|
|
53
|
+
try {
|
|
54
|
+
const results = await db.select("*").from("MYTABLE").where({ STATUS: "A" });
|
|
55
|
+
console.log(results);
|
|
56
|
+
} catch (error) {
|
|
57
|
+
console.error("Database error:", error);
|
|
58
|
+
} finally {
|
|
59
|
+
await db.destroy();
|
|
60
|
+
}
|
|
61
|
+
```
|
|
33
62
|
|
|
34
63
|
## Usage
|
|
35
64
|
|
|
36
|
-
This
|
|
65
|
+
This package can be used with CommonJS, ESM, or TypeScript.
|
|
37
66
|
|
|
38
|
-
###
|
|
67
|
+
### CommonJS
|
|
39
68
|
|
|
40
|
-
```
|
|
69
|
+
```js
|
|
41
70
|
const knex = require("knex");
|
|
42
71
|
const { DB2Dialect } = require("@bdkinc/knex-ibmi");
|
|
43
72
|
|
|
44
73
|
const db = knex({
|
|
45
74
|
client: DB2Dialect,
|
|
46
75
|
connection: {
|
|
47
|
-
host: "
|
|
48
|
-
database: "*LOCAL",
|
|
49
|
-
user: "
|
|
50
|
-
password: "
|
|
51
|
-
driver: "IBM i Access ODBC Driver",
|
|
52
|
-
connectionStringParams: {
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
DBQ: "MYLIB", // library or schema that holds the tables
|
|
76
|
+
host: "your-ibm-i-host",
|
|
77
|
+
database: "*LOCAL",
|
|
78
|
+
user: "your-username",
|
|
79
|
+
password: "your-password",
|
|
80
|
+
driver: "IBM i Access ODBC Driver",
|
|
81
|
+
connectionStringParams: {
|
|
82
|
+
ALLOWPROCCALLS: 1,
|
|
83
|
+
CMT: 0,
|
|
84
|
+
DBQ: "MYLIB"
|
|
57
85
|
},
|
|
58
86
|
},
|
|
59
|
-
pool: {
|
|
60
|
-
min: 2,
|
|
61
|
-
max: 10,
|
|
62
|
-
},
|
|
87
|
+
pool: { min: 2, max: 10 },
|
|
63
88
|
});
|
|
64
89
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
.
|
|
69
|
-
.
|
|
70
|
-
.
|
|
90
|
+
// Example query
|
|
91
|
+
db.select("*")
|
|
92
|
+
.from("MYTABLE")
|
|
93
|
+
.where({ STATUS: "A" })
|
|
94
|
+
.then(results => console.log(results))
|
|
95
|
+
.catch(error => console.error("Database error:", error))
|
|
96
|
+
.finally(() => db.destroy());
|
|
71
97
|
```
|
|
72
98
|
|
|
73
99
|
### ESM
|
|
74
100
|
|
|
75
|
-
```
|
|
101
|
+
```js
|
|
76
102
|
import knex from "knex";
|
|
77
103
|
import { DB2Dialect } from "@bdkinc/knex-ibmi";
|
|
78
104
|
|
|
79
|
-
/**
|
|
80
|
-
* @type {import("@bdkinc/knex-ibmi").DB2Config}
|
|
81
|
-
*/
|
|
105
|
+
/** @type {import("@bdkinc/knex-ibmi").DB2Config} */
|
|
82
106
|
const config = {
|
|
83
107
|
client: DB2Dialect,
|
|
84
108
|
connection: {
|
|
85
|
-
host: "
|
|
86
|
-
database: "*LOCAL",
|
|
87
|
-
user: "
|
|
88
|
-
password: "
|
|
89
|
-
driver: "IBM i Access ODBC Driver",
|
|
90
|
-
connectionStringParams: {
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
DBQ: "MYLIB", // library or schema that holds the tables
|
|
109
|
+
host: "your-ibm-i-host",
|
|
110
|
+
database: "*LOCAL",
|
|
111
|
+
user: "your-username",
|
|
112
|
+
password: "your-password",
|
|
113
|
+
driver: "IBM i Access ODBC Driver",
|
|
114
|
+
connectionStringParams: {
|
|
115
|
+
ALLOWPROCCALLS: 1,
|
|
116
|
+
CMT: 0,
|
|
117
|
+
DBQ: "MYLIB"
|
|
95
118
|
},
|
|
96
119
|
},
|
|
97
|
-
pool: {
|
|
98
|
-
min: 2,
|
|
99
|
-
max: 10,
|
|
100
|
-
},
|
|
120
|
+
pool: { min: 2, max: 10 },
|
|
101
121
|
};
|
|
102
122
|
|
|
103
123
|
const db = knex(config);
|
|
104
124
|
|
|
105
125
|
try {
|
|
106
|
-
const
|
|
107
|
-
console.log(
|
|
108
|
-
} catch (
|
|
109
|
-
|
|
126
|
+
const results = await db.select("*").from("MYTABLE").where({ STATUS: "A" });
|
|
127
|
+
console.log(results);
|
|
128
|
+
} catch (error) {
|
|
129
|
+
console.error("Database error:", error);
|
|
110
130
|
} finally {
|
|
111
|
-
|
|
131
|
+
await db.destroy();
|
|
112
132
|
}
|
|
113
133
|
```
|
|
114
134
|
|
|
115
135
|
### TypeScript
|
|
116
136
|
|
|
117
|
-
```
|
|
137
|
+
```ts
|
|
118
138
|
import { knex } from "knex";
|
|
119
139
|
import { DB2Dialect, DB2Config } from "@bdkinc/knex-ibmi";
|
|
120
140
|
|
|
121
141
|
const config: DB2Config = {
|
|
122
142
|
client: DB2Dialect,
|
|
123
143
|
connection: {
|
|
124
|
-
host: "
|
|
125
|
-
database: "*LOCAL",
|
|
126
|
-
user: "
|
|
127
|
-
password: "
|
|
128
|
-
driver: "IBM i Access ODBC Driver",
|
|
129
|
-
connectionStringParams: {
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
DBQ: "MYLIB", // library or schema that holds the tables
|
|
144
|
+
host: "your-ibm-i-host",
|
|
145
|
+
database: "*LOCAL",
|
|
146
|
+
user: "your-username",
|
|
147
|
+
password: "your-password",
|
|
148
|
+
driver: "IBM i Access ODBC Driver",
|
|
149
|
+
connectionStringParams: {
|
|
150
|
+
ALLOWPROCCALLS: 1,
|
|
151
|
+
CMT: 0,
|
|
152
|
+
DBQ: "MYLIB"
|
|
134
153
|
},
|
|
135
154
|
},
|
|
136
|
-
pool: {
|
|
137
|
-
min: 2,
|
|
138
|
-
max: 10,
|
|
139
|
-
},
|
|
155
|
+
pool: { min: 2, max: 10 },
|
|
140
156
|
};
|
|
141
157
|
|
|
142
158
|
const db = knex(config);
|
|
143
159
|
|
|
144
160
|
try {
|
|
145
|
-
const
|
|
146
|
-
console.log(
|
|
147
|
-
} catch (
|
|
148
|
-
|
|
161
|
+
const results = await db.select("*").from("MYTABLE").where({ STATUS: "A" });
|
|
162
|
+
console.log(results);
|
|
163
|
+
} catch (error) {
|
|
164
|
+
console.error("Database error:", error);
|
|
149
165
|
} finally {
|
|
150
|
-
|
|
166
|
+
await db.destroy();
|
|
151
167
|
}
|
|
152
168
|
```
|
|
153
169
|
|
|
154
|
-
### Streaming
|
|
170
|
+
### Streaming
|
|
171
|
+
|
|
172
|
+
There are two primary ways to consume a result stream: (1) classic Node stream piping with transform stages, and (2) async iteration with `for await` (which can be easier to reason about). Use a `fetchSize` to control how many rows are fetched from the driver per batch.
|
|
155
173
|
|
|
156
|
-
```
|
|
174
|
+
```ts
|
|
157
175
|
import { knex } from "knex";
|
|
158
176
|
import { DB2Dialect, DB2Config } from "@bdkinc/knex-ibmi";
|
|
159
177
|
import { Transform } from "node:stream";
|
|
160
178
|
import { finished } from "node:stream/promises";
|
|
161
179
|
|
|
162
|
-
const config: DB2Config = {
|
|
163
|
-
client: DB2Dialect,
|
|
164
|
-
connection: {
|
|
165
|
-
host: "localhost", // hostname or ip address of server
|
|
166
|
-
database: "*LOCAL", // usually named in your odbc.ini connection
|
|
167
|
-
user: "<user>", // IBMi username
|
|
168
|
-
password: "<password>", // IBMi password
|
|
169
|
-
driver: "IBM i Access ODBC Driver", // defined in odbcinst.ini
|
|
170
|
-
connectionStringParams: {
|
|
171
|
-
// DSN connection string parameters https://www.ibm.com/docs/en/i/7.5?topic=details-connection-string-keywords
|
|
172
|
-
ALLOWPROCCALLS: 1,
|
|
173
|
-
CMT: 0,
|
|
174
|
-
DBQ: "MYLIB", // library or schema that holds the tables
|
|
175
|
-
},
|
|
176
|
-
},
|
|
177
|
-
pool: {
|
|
178
|
-
min: 2,
|
|
179
|
-
max: 10,
|
|
180
|
-
},
|
|
181
|
-
};
|
|
182
|
-
|
|
180
|
+
const config: DB2Config = { /* ...same as earlier examples... */ };
|
|
183
181
|
const db = knex(config);
|
|
184
182
|
|
|
185
183
|
try {
|
|
186
|
-
const
|
|
187
|
-
.select("*")
|
|
188
|
-
.from("table")
|
|
189
|
-
.stream({ fetchSize: 1 }); // optional, fetchSize defaults to 1
|
|
184
|
+
const stream = await db("LARGETABLE").select("*").stream({ fetchSize: 100 });
|
|
190
185
|
|
|
191
|
-
//
|
|
186
|
+
// Approach 1: Pipe through a Transform stream
|
|
192
187
|
const transform = new Transform({
|
|
193
188
|
objectMode: true,
|
|
194
|
-
transform(
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
) {
|
|
199
|
-
// chunk will be an array of objects
|
|
200
|
-
// the length of the array is the chunk size
|
|
201
|
-
console.log(chunk);
|
|
202
|
-
callback(null, chunk);
|
|
189
|
+
transform(row, _enc, cb) {
|
|
190
|
+
// Process each row (side effects, enrichment, filtering, etc.)
|
|
191
|
+
console.log("Transforming row id=", row.ID);
|
|
192
|
+
cb(null, row);
|
|
203
193
|
},
|
|
204
194
|
});
|
|
195
|
+
stream.pipe(transform);
|
|
196
|
+
await finished(stream); // Wait until piping completes
|
|
205
197
|
|
|
206
|
-
//
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
// or we can iterate through each record
|
|
212
|
-
for await (const record of data) {
|
|
213
|
-
console.log(record);
|
|
198
|
+
// Approach 2: Async iteration (recommended for simplicity)
|
|
199
|
+
const iterStream = await db("LARGETABLE").select("*").stream({ fetchSize: 200 });
|
|
200
|
+
for await (const row of iterStream) {
|
|
201
|
+
console.log("Iter row id=", row.ID);
|
|
214
202
|
}
|
|
215
|
-
} catch (
|
|
216
|
-
|
|
203
|
+
} catch (error) {
|
|
204
|
+
console.error("Streaming error:", error);
|
|
217
205
|
} finally {
|
|
218
|
-
|
|
206
|
+
await db.destroy();
|
|
219
207
|
}
|
|
220
208
|
```
|
|
221
209
|
|
|
222
|
-
##
|
|
210
|
+
## ODBC Driver Setup
|
|
211
|
+
|
|
212
|
+
If you don't know the name of your installed driver, check `odbcinst.ini`. Find its path with:
|
|
213
|
+
|
|
214
|
+
```bash
|
|
215
|
+
odbcinst -j
|
|
216
|
+
```
|
|
223
217
|
|
|
224
|
-
|
|
225
|
-
There you should see an entry like the one below:
|
|
218
|
+
Example entries:
|
|
226
219
|
|
|
227
220
|
```
|
|
228
|
-
[IBM i Access ODBC Driver]
|
|
221
|
+
[IBM i Access ODBC Driver] # driver name in square brackets
|
|
229
222
|
Description=IBM i Access for Linux ODBC Driver
|
|
230
223
|
Driver=/opt/ibm/iaccess/lib/libcwbodbc.so
|
|
231
224
|
Setup=/opt/ibm/iaccess/lib/libcwbodbcs.so
|
|
@@ -244,20 +237,178 @@ DontDLClose=1
|
|
|
244
237
|
UsageCount=1
|
|
245
238
|
```
|
|
246
239
|
|
|
247
|
-
If
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
240
|
+
If unixODBC is using the wrong config directory (e.g., your configs are in `/etc` but it expects elsewhere), set:
|
|
241
|
+
|
|
242
|
+
```bash
|
|
243
|
+
export ODBCINI=/etc
|
|
244
|
+
export ODBCSYSINI=/etc
|
|
245
|
+
```
|
|
251
246
|
|
|
252
247
|
## Bundling with Vite
|
|
253
|
-
If you are bundling your application with Vite, then you will need to add this to your config.
|
|
254
248
|
|
|
255
|
-
|
|
256
|
-
// vite.config.js
|
|
249
|
+
If you bundle with Vite, exclude certain native deps during optimize step:
|
|
257
250
|
|
|
251
|
+
```js
|
|
252
|
+
// vite.config.js
|
|
258
253
|
export default {
|
|
259
254
|
optimizeDeps: {
|
|
260
255
|
exclude: ["@mapbox"],
|
|
256
|
+
},
|
|
257
|
+
};
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
## Migrations
|
|
261
|
+
|
|
262
|
+
⚠️ **Important**: Standard Knex migrations don't work reliably with IBM i DB2 due to auto-commit DDL operations and locking issues.
|
|
263
|
+
|
|
264
|
+
### Recommended: Use Built-in IBM i Migration System
|
|
265
|
+
|
|
266
|
+
The knex-ibmi library includes a custom migration system that bypasses Knex's problematic locking mechanism:
|
|
267
|
+
|
|
268
|
+
```js
|
|
269
|
+
import { createIBMiMigrationRunner } from "@bdkinc/knex-ibmi";
|
|
270
|
+
|
|
271
|
+
const migrationRunner = createIBMiMigrationRunner(db, {
|
|
272
|
+
directory: "./migrations",
|
|
273
|
+
tableName: "KNEX_MIGRATIONS",
|
|
274
|
+
schemaName: "MYSCHEMA"
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
// Run migrations
|
|
278
|
+
await migrationRunner.latest();
|
|
279
|
+
|
|
280
|
+
// Rollback
|
|
281
|
+
await migrationRunner.rollback();
|
|
282
|
+
|
|
283
|
+
// Check status
|
|
284
|
+
const pending = await migrationRunner.listPending();
|
|
285
|
+
```
|
|
286
|
+
|
|
287
|
+
**CLI Usage:** The package includes a built-in CLI that can be used via npm scripts or npx:
|
|
288
|
+
|
|
289
|
+
```bash
|
|
290
|
+
# Install globally (optional)
|
|
291
|
+
npm install -g @bdkinc/knex-ibmi
|
|
292
|
+
|
|
293
|
+
# Or use via npx (recommended)
|
|
294
|
+
npx ibmi-migrations migrate:latest # Run pending migrations
|
|
295
|
+
npx ibmi-migrations migrate:rollback # Rollback last batch
|
|
296
|
+
npx ibmi-migrations migrate:status # Show migration status
|
|
297
|
+
npx ibmi-migrations migrate:make create_users_table # Create new JS migration
|
|
298
|
+
npx ibmi-migrations migrate:make add_email_column -x ts # Create new TS migration
|
|
299
|
+
|
|
300
|
+
# Or add to your package.json scripts:
|
|
301
|
+
{
|
|
302
|
+
"scripts": {
|
|
303
|
+
"migrate:latest": "ibmi-migrations migrate:latest",
|
|
304
|
+
"migrate:rollback": "ibmi-migrations migrate:rollback",
|
|
305
|
+
"migrate:status": "ibmi-migrations migrate:status",
|
|
306
|
+
"migrate:make": "ibmi-migrations migrate:make"
|
|
261
307
|
}
|
|
262
308
|
}
|
|
309
|
+
|
|
310
|
+
# Then run with npm:
|
|
311
|
+
npm run migrate:latest
|
|
312
|
+
npm run migrate:status
|
|
313
|
+
```
|
|
314
|
+
|
|
315
|
+
**Full CLI API (similar to Knex):**
|
|
316
|
+
```bash
|
|
317
|
+
ibmi-migrations migrate:latest # Run all pending migrations
|
|
318
|
+
ibmi-migrations migrate:rollback # Rollback last migration batch
|
|
319
|
+
ibmi-migrations migrate:status # Show detailed migration status
|
|
320
|
+
ibmi-migrations migrate:currentVersion # Show current migration version
|
|
321
|
+
ibmi-migrations migrate:list # List all migrations
|
|
322
|
+
ibmi-migrations migrate:make <name> # Create new migration file
|
|
323
|
+
|
|
324
|
+
# Options:
|
|
325
|
+
ibmi-migrations migrate:status --env production
|
|
326
|
+
ibmi-migrations migrate:latest --knexfile ./config/knexfile.js
|
|
327
|
+
ibmi-migrations migrate:make create_users_table
|
|
328
|
+
ibmi-migrations migrate:make add_email_column -x ts # TypeScript migration
|
|
329
|
+
```
|
|
330
|
+
|
|
331
|
+
📖 **See [MIGRATIONS.md](./MIGRATIONS.md) for complete documentation**
|
|
332
|
+
|
|
333
|
+
### Alternative: Standard Knex with Transactions Disabled
|
|
334
|
+
|
|
335
|
+
If you must use standard Knex migrations, disable transactions to avoid issues:
|
|
336
|
+
|
|
337
|
+
```js
|
|
338
|
+
/** @type {import("@bdkinc/knex-ibmi").DB2Config} */
|
|
339
|
+
const config = {
|
|
340
|
+
client: DB2Dialect,
|
|
341
|
+
connection: { /* your connection config */ },
|
|
342
|
+
migrations: {
|
|
343
|
+
disableTransactions: true, // Required for IBM i
|
|
344
|
+
directory: './migrations',
|
|
345
|
+
tableName: 'knex_migrations',
|
|
346
|
+
},
|
|
347
|
+
};
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
**Warning**: Standard Knex migrations may still hang on lock operations. The built-in IBM i migration system is strongly recommended.
|
|
351
|
+
|
|
352
|
+
## Multi-Row Insert Strategies
|
|
353
|
+
|
|
354
|
+
Configure via `ibmi.multiRowInsert` in the knex config:
|
|
355
|
+
|
|
356
|
+
```ts
|
|
357
|
+
const db = knex({
|
|
358
|
+
client: DB2Dialect,
|
|
359
|
+
connection: { /* ... */ },
|
|
360
|
+
ibmi: { multiRowInsert: 'auto' } // 'auto' | 'sequential' | 'disabled'
|
|
361
|
+
});
|
|
362
|
+
```
|
|
363
|
+
|
|
364
|
+
- `auto` (default): Generates a single INSERT with multiple VALUES lists. For `.returning('*')` or no explicit column list it returns all inserted rows (lenient fallback). Identity values are whatever DB2 ODBC surfaces for that multi-row statement.
|
|
365
|
+
- `sequential`: Compiler shows a single-row statement (first row) but at execution time each row is inserted individually inside a loop to reliably collect identity values (using `IDENTITY_VAL_LOCAL()` per row). Suitable when you need each generated identity.
|
|
366
|
+
- `disabled`: Falls back to legacy behavior: only the first row is inserted (others ignored). Useful for strict backward compatibility.
|
|
367
|
+
|
|
368
|
+
If you specify `.returning(['COL1', 'COL2'])` with multi-row inserts, those columns are selected; otherwise `IDENTITY_VAL_LOCAL()` (single-row) or `*` (multi-row) is used as a lenient fallback.
|
|
369
|
+
|
|
370
|
+
## Returning Behavior (INSERT / UPDATE / DELETE)
|
|
371
|
+
|
|
372
|
+
Native `RETURNING` is not broadly supported over ODBC on IBM i. The dialect provides pragmatic emulation:
|
|
373
|
+
|
|
374
|
+
### INSERT
|
|
375
|
+
- `auto` multi-row: generates a single multi-values INSERT. When no explicit column list is requested it returns all inserted rows (`*`) as a lenient fallback. Some installations may see this internally wrapped using a `SELECT * FROM FINAL TABLE( INSERT ... )` pattern in logs or debug output; that wrapper is only an implementation detail to surface inserted rows.
|
|
376
|
+
- `sequential`: inserts each row one at a time so it can reliably call `IDENTITY_VAL_LOCAL()` after each insert; builds an array of returned rows.
|
|
377
|
+
- `disabled`: legacy single-row insert behavior; additional rows in the values array are ignored.
|
|
378
|
+
|
|
379
|
+
### UPDATE
|
|
380
|
+
- Executes the UPDATE.
|
|
381
|
+
- Re-selects the affected rows using the original WHERE clause when `.returning(...)` is requested.
|
|
382
|
+
|
|
383
|
+
### DELETE
|
|
384
|
+
- Selects the rows to be deleted (capturing requested returning columns or `*`).
|
|
385
|
+
- Executes the DELETE.
|
|
386
|
+
- Returns the previously selected rows.
|
|
387
|
+
|
|
388
|
+
### Notes
|
|
389
|
+
- `returning('*')` can be expensive on large result sets—limit the column list when possible.
|
|
390
|
+
- For guaranteed, ordered identity values across many inserted rows use the `sequential` strategy.
|
|
391
|
+
|
|
392
|
+
## Configuration Summary
|
|
393
|
+
|
|
394
|
+
```ts
|
|
395
|
+
interface IbmiDialectConfig {
|
|
396
|
+
multiRowInsert?: 'auto' | 'sequential' | 'disabled';
|
|
397
|
+
sequentialInsertTransactional?: boolean; // if true, wraps sequential loop in BEGIN/COMMIT
|
|
398
|
+
}
|
|
263
399
|
```
|
|
400
|
+
|
|
401
|
+
Attach under the root knex config as `ibmi`.
|
|
402
|
+
|
|
403
|
+
### Transactional Sequential Inserts
|
|
404
|
+
|
|
405
|
+
When `ibmi.sequentialInsertTransactional` is `true`, the dialect will attempt `BEGIN` before the per-row loop and `COMMIT` after. On commit failure it will attempt a `ROLLBACK`. If `BEGIN` is not supported, it logs a warning and continues non-transactionally.
|
|
406
|
+
|
|
407
|
+
<!-- Benchmarks section intentionally removed. Benchmarking is handled in the external test harness project -->
|
|
408
|
+
|
|
409
|
+
## Links
|
|
410
|
+
|
|
411
|
+
- Knex: https://knexjs.org/
|
|
412
|
+
- Knex repo: https://github.com/knex/knex
|
|
413
|
+
- ODBC driver: https://github.com/IBM/node-odbc
|
|
414
|
+
- IBM i OSS docs: https://ibmi-oss-docs.readthedocs.io/
|