@nesgarbo/node-jt400 6.0.3 → 6.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. package/README.md +330 -369
  2. package/dist-cjs/index.d.cts +12 -5
  3. package/dist-cjs/index.js.map +1 -1
  4. package/dist-cjs/java/JT400.d.cts +3 -3
  5. package/dist-cjs/java/JT400.js.map +1 -1
  6. package/dist-cjs/java/index.js +4 -3
  7. package/dist-cjs/java/index.js.map +1 -1
  8. package/dist-cjs/lib/baseConnection.js +124 -166
  9. package/dist-cjs/lib/baseConnection.js.map +1 -1
  10. package/dist-cjs/lib/baseConnection.types.d.cts +11 -7
  11. package/dist-cjs/lib/baseConnection.types.js.map +1 -1
  12. package/dist-cjs/lib/connection.js +12 -25
  13. package/dist-cjs/lib/connection.js.map +1 -1
  14. package/dist-cjs/lib/connection.types.d.cts +39 -12
  15. package/dist-cjs/lib/connection.types.js.map +1 -1
  16. package/dist-cjs/lib/handleError.d.cts +1 -3
  17. package/dist-cjs/lib/handleError.js +3 -2
  18. package/dist-cjs/lib/handleError.js.map +1 -1
  19. package/dist-cjs/lib/ifs/read_stream.d.cts +6 -1
  20. package/dist-cjs/lib/ifs/read_stream.js +23 -32
  21. package/dist-cjs/lib/ifs/read_stream.js.map +1 -1
  22. package/dist-cjs/lib/ifs/write_stream.d.cts +7 -1
  23. package/dist-cjs/lib/ifs/write_stream.js +13 -24
  24. package/dist-cjs/lib/ifs/write_stream.js.map +1 -1
  25. package/dist-cjs/lib/inMemoryConnection.d.cts +1 -1
  26. package/dist-cjs/lib/inMemoryConnection.js +4 -4
  27. package/dist-cjs/lib/inMemoryConnection.js.map +1 -1
  28. package/dist-cjs/lib/insertList.js +12 -13
  29. package/dist-cjs/lib/insertList.js.map +1 -1
  30. package/dist-cjs/lib/jdbcstream.d.cts +16 -1
  31. package/dist-cjs/lib/jdbcstream.js +31 -36
  32. package/dist-cjs/lib/jdbcstream.js.map +1 -1
  33. package/dist-cjs/lib/jdbcwritestream.d.cts +4 -1
  34. package/dist-cjs/lib/jdbcwritestream.js +7 -20
  35. package/dist-cjs/lib/jdbcwritestream.js.map +1 -1
  36. package/dist-cjs/lib/logger.d.cts +4 -4
  37. package/dist-cjs/lib/logger.js.map +1 -1
  38. package/dist-cjs/lib/sqlutil.d.cts +4 -1
  39. package/dist-cjs/lib/sqlutil.js +1 -0
  40. package/dist-cjs/lib/sqlutil.js.map +1 -1
  41. package/dist-cjs/lib/streamTransformers.d.cts +2 -1
  42. package/dist-cjs/lib/streamTransformers.js +1 -1
  43. package/dist-cjs/lib/streamTransformers.js.map +1 -1
  44. package/dist-cjs/typings/jsonstream.d.js +1 -0
  45. package/dist-cjs/typings/jsonstream.d.js.map +1 -0
  46. package/dist-esm/index.d.ts +12 -5
  47. package/dist-esm/index.js.map +1 -1
  48. package/dist-esm/java/JT400.d.ts +3 -3
  49. package/dist-esm/java/index.js +3 -10
  50. package/dist-esm/java/index.js.map +1 -1
  51. package/dist-esm/lib/baseConnection.js +124 -166
  52. package/dist-esm/lib/baseConnection.js.map +1 -1
  53. package/dist-esm/lib/baseConnection.types.d.ts +11 -7
  54. package/dist-esm/lib/baseConnection.types.js.map +1 -1
  55. package/dist-esm/lib/connection.js +12 -25
  56. package/dist-esm/lib/connection.js.map +1 -1
  57. package/dist-esm/lib/connection.types.d.ts +39 -12
  58. package/dist-esm/lib/handleError.d.ts +1 -3
  59. package/dist-esm/lib/handleError.js +3 -2
  60. package/dist-esm/lib/handleError.js.map +1 -1
  61. package/dist-esm/lib/ifs/read_stream.d.ts +6 -1
  62. package/dist-esm/lib/ifs/read_stream.js +23 -22
  63. package/dist-esm/lib/ifs/read_stream.js.map +1 -1
  64. package/dist-esm/lib/ifs/write_stream.d.ts +7 -1
  65. package/dist-esm/lib/ifs/write_stream.js +13 -14
  66. package/dist-esm/lib/ifs/write_stream.js.map +1 -1
  67. package/dist-esm/lib/inMemoryConnection.d.ts +1 -1
  68. package/dist-esm/lib/inMemoryConnection.js +4 -4
  69. package/dist-esm/lib/inMemoryConnection.js.map +1 -1
  70. package/dist-esm/lib/insertList.js +12 -13
  71. package/dist-esm/lib/insertList.js.map +1 -1
  72. package/dist-esm/lib/jdbcstream.d.ts +16 -1
  73. package/dist-esm/lib/jdbcstream.js +31 -36
  74. package/dist-esm/lib/jdbcstream.js.map +1 -1
  75. package/dist-esm/lib/jdbcwritestream.d.ts +4 -1
  76. package/dist-esm/lib/jdbcwritestream.js +7 -10
  77. package/dist-esm/lib/jdbcwritestream.js.map +1 -1
  78. package/dist-esm/lib/logger.d.ts +4 -4
  79. package/dist-esm/lib/logger.js.map +1 -1
  80. package/dist-esm/lib/sqlutil.d.ts +4 -1
  81. package/dist-esm/lib/sqlutil.js +1 -0
  82. package/dist-esm/lib/sqlutil.js.map +1 -1
  83. package/dist-esm/lib/streamTransformers.d.ts +2 -1
  84. package/dist-esm/lib/streamTransformers.js +1 -1
  85. package/dist-esm/lib/streamTransformers.js.map +1 -1
  86. package/package.json +16 -21
  87. package/dist-cjs/integration-test/call-rpg-spec.d.cts +0 -2
  88. package/dist-cjs/integration-test/call-rpg-spec.js +0 -101
  89. package/dist-cjs/integration-test/call-rpg-spec.js.map +0 -1
  90. package/dist-cjs/integration-test/dataq-spec.d.cts +0 -2
  91. package/dist-cjs/integration-test/dataq-spec.js +0 -57
  92. package/dist-cjs/integration-test/dataq-spec.js.map +0 -1
  93. package/dist-cjs/integration-test/db.d.cts +0 -8
  94. package/dist-cjs/integration-test/db.js +0 -31
  95. package/dist-cjs/integration-test/db.js.map +0 -1
  96. package/dist-cjs/integration-test/db2-connect-spec.d.cts +0 -2
  97. package/dist-cjs/integration-test/db2-connect-spec.js +0 -42
  98. package/dist-cjs/integration-test/db2-connect-spec.js.map +0 -1
  99. package/dist-cjs/integration-test/db2-pool-spec.d.cts +0 -2
  100. package/dist-cjs/integration-test/db2-pool-spec.js +0 -213
  101. package/dist-cjs/integration-test/db2-pool-spec.js.map +0 -1
  102. package/dist-cjs/integration-test/ifs-spec.d.cts +0 -2
  103. package/dist-cjs/integration-test/ifs-spec.js +0 -140
  104. package/dist-cjs/integration-test/ifs-spec.js.map +0 -1
  105. package/dist-cjs/integration-test/msgf-spec.d.cts +0 -2
  106. package/dist-cjs/integration-test/msgf-spec.js +0 -34
  107. package/dist-cjs/integration-test/msgf-spec.js.map +0 -1
  108. package/dist-cjs/integration-test/msgq-spec.d.cts +0 -2
  109. package/dist-cjs/integration-test/msgq-spec.js +0 -45
  110. package/dist-cjs/integration-test/msgq-spec.js.map +0 -1
  111. package/dist-cjs/unit-test/hsql-spec.d.cts +0 -2
  112. package/dist-cjs/unit-test/hsql-spec.js +0 -488
  113. package/dist-cjs/unit-test/hsql-spec.js.map +0 -1
  114. package/dist-cjs/unit-test/sqlutil-spec.d.cts +0 -2
  115. package/dist-cjs/unit-test/sqlutil-spec.js +0 -43
  116. package/dist-cjs/unit-test/sqlutil-spec.js.map +0 -1
  117. package/dist-cjs/unit-test/streamTransformers-spec.d.cts +0 -2
  118. package/dist-cjs/unit-test/streamTransformers-spec.js +0 -121
  119. package/dist-cjs/unit-test/streamTransformers-spec.js.map +0 -1
  120. package/dist-esm/integration-test/call-rpg-spec.d.ts +0 -2
  121. package/dist-esm/integration-test/call-rpg-spec.js +0 -79
  122. package/dist-esm/integration-test/call-rpg-spec.js.map +0 -1
  123. package/dist-esm/integration-test/dataq-spec.d.ts +0 -2
  124. package/dist-esm/integration-test/dataq-spec.js +0 -35
  125. package/dist-esm/integration-test/dataq-spec.js.map +0 -1
  126. package/dist-esm/integration-test/db.d.ts +0 -8
  127. package/dist-esm/integration-test/db.js +0 -8
  128. package/dist-esm/integration-test/db.js.map +0 -1
  129. package/dist-esm/integration-test/db2-connect-spec.d.ts +0 -2
  130. package/dist-esm/integration-test/db2-connect-spec.js +0 -20
  131. package/dist-esm/integration-test/db2-connect-spec.js.map +0 -1
  132. package/dist-esm/integration-test/db2-pool-spec.d.ts +0 -2
  133. package/dist-esm/integration-test/db2-pool-spec.js +0 -197
  134. package/dist-esm/integration-test/db2-pool-spec.js.map +0 -1
  135. package/dist-esm/integration-test/ifs-spec.d.ts +0 -2
  136. package/dist-esm/integration-test/ifs-spec.js +0 -118
  137. package/dist-esm/integration-test/ifs-spec.js.map +0 -1
  138. package/dist-esm/integration-test/msgf-spec.d.ts +0 -2
  139. package/dist-esm/integration-test/msgf-spec.js +0 -12
  140. package/dist-esm/integration-test/msgf-spec.js.map +0 -1
  141. package/dist-esm/integration-test/msgq-spec.d.ts +0 -2
  142. package/dist-esm/integration-test/msgq-spec.js +0 -23
  143. package/dist-esm/integration-test/msgq-spec.js.map +0 -1
  144. package/dist-esm/unit-test/hsql-spec.d.ts +0 -2
  145. package/dist-esm/unit-test/hsql-spec.js +0 -466
  146. package/dist-esm/unit-test/hsql-spec.js.map +0 -1
  147. package/dist-esm/unit-test/sqlutil-spec.d.ts +0 -2
  148. package/dist-esm/unit-test/sqlutil-spec.js +0 -21
  149. package/dist-esm/unit-test/sqlutil-spec.js.map +0 -1
  150. package/dist-esm/unit-test/streamTransformers-spec.d.ts +0 -2
  151. package/dist-esm/unit-test/streamTransformers-spec.js +0 -99
  152. package/dist-esm/unit-test/streamTransformers-spec.js.map +0 -1
package/README.md CHANGED
@@ -1,521 +1,482 @@
1
- # node-jt400
1
+ # @nesgarbo/node-jt400
2
2
 
3
- NodeJS JT400 wrapper to connect to IBM iSeries and AS/400 systems (OS400 operating system, database like DB2, programs and filesystem).
3
+ Node.js/TypeScript wrapper for the IBM Toolbox for Java (JT400). Provides a Promise-based API to interact with IBM iSeries/AS400 systems: DB2 database, IFS filesystem, RPG/COBOL programs, message queues, and data queues.
4
4
 
5
- [![Version](https://img.shields.io/npm/v/node-jt400.svg)](https://npmjs.org/package/node-jt400)
5
+ Bridges Node.js to the JVM via [`java-bridge`](https://www.npmjs.com/package/java-bridge). Dual ESM/CJS package with full TypeScript declarations.
6
6
 
7
- ## Module System Support
7
+ ## Requirements
8
8
 
9
- This package is published as a **dual-mode package** supporting both ESM (ECMAScript Modules) and CommonJS:
9
+ - Node.js 18+
10
+ - Java 8+ (JRE or JDK) installed and available in `PATH`
10
11
 
11
- - Use `import` in ESM projects (Node.js 16+)
12
- - Use `require()` in CommonJS projects
13
- - Full TypeScript support with type definitions for both formats
14
-
15
- ## About
16
-
17
- This package is built on the IBM Toolbox for Java (http://jt400.sourceforge.net/). It maps the java functions to node using node-java. Not all of the Java code has been mapped over to node. The reason is that this module was originally written for internal use-only for Tryggingadmidstodin. Therefore we only implemented what Tryggingamidstodin needed, for example program calls, but not stored procedures.
18
-
19
- Tryggingamidstodin is an Icelandic insurance company dealing with legacy systems in AS400. We figured other people or companies might be dealing with the similar problems so this module was made open source. Most of the coding and documentation reflects this, although we are always trying to improve that. For example the library for programs was orignally not configurable, but is now.
12
+ ## Install
20
13
 
21
- We are always open to suggestions on how to improve and welcome most pull-requests.
14
+ ```sh
15
+ npm install @nesgarbo/node-jt400
16
+ ```
22
17
 
23
- ## Changes
18
+ ## Module system
24
19
 
25
- Check out our [changelog.md](https://github.com/tryggingamidstodin/node-jt400/blob/master/CHANGELOG.md) to see changes to this project. Please note that this changelog was added in version 4.0 so documentation on versions prior to that are incomplete. Feel free to add to this changelog and report an issue if you're having troubles with updating this package.
20
+ Both ESM and CommonJS are supported:
26
21
 
27
- ## Install
22
+ ```ts
23
+ // ESM
24
+ import { pool, connect, useInMemoryDb } from '@nesgarbo/node-jt400'
25
+ ```
28
26
 
29
- ```sh
30
- npm install node-jt400 --save
27
+ ```js
28
+ // CommonJS
29
+ const { pool, connect, useInMemoryDb } = require('@nesgarbo/node-jt400')
31
30
  ```
32
31
 
33
- #### Windows
32
+ ## Connecting
34
33
 
35
- Windows installations can be tricky because of node-java dependency. Make sure that that module works first. You can [check out the node-java documentation for windows installation](https://github.com/joeferner/node-java#installation-windows)
34
+ ### Connection pool (synchronous)
36
35
 
37
- We also have some solved issues you can take a look at like [#13](https://github.com/tryggingamidstodin/node-jt400/issues/13) and [#26](https://github.com/tryggingamidstodin/node-jt400/issues/26)
36
+ ```ts
37
+ import { pool } from '@nesgarbo/node-jt400'
38
38
 
39
- Other issues might be related to node-gyp, python and MS build tools or VS IDE.
39
+ const db = pool({
40
+ host: 'myhost',
41
+ user: 'myuser',
42
+ password: 'secret',
43
+ })
44
+ ```
40
45
 
41
- ## Configure
46
+ ### Single connection (async)
42
47
 
43
- Most basic configuration would be:
48
+ ```ts
49
+ import { connect } from '@nesgarbo/node-jt400'
44
50
 
45
- ```javascript
46
- const config = {
51
+ const db = await connect({
47
52
  host: 'myhost',
48
53
  user: 'myuser',
49
- password: 'xxx',
50
- }
51
- const pool = require('node-jt400').pool(config)
54
+ password: 'secret',
55
+ })
52
56
  ```
53
57
 
54
- But the config accepts all [JT400 JDBC Properties](https://www.ibm.com/support/knowledgecenter/en/ssw_ibm_i_73/rzahh/javadoc/com/ibm/as400/access/doc-files/JDBCProperties.html) so you can add other options like `translate binary`
58
+ ### Configuration
59
+
60
+ `host`, `user`, and `password` fall back to environment variables `AS400_HOST`, `AS400_USERNAME`, and `AS400_PASSWORD` when omitted. The `naming` option defaults to `'system'` (IBM iSeries naming convention).
55
61
 
56
- ```javascript
57
- const config = {
62
+ Any [JT400 JDBC property](https://www.ibm.com/support/knowledgecenter/en/ssw_ibm_i_73/rzahh/javadoc/com/ibm/as400/access/doc-files/JDBCProperties.html) can be passed in the config object:
63
+
64
+ ```ts
65
+ const db = pool({
58
66
  host: 'myhost',
59
67
  user: 'myuser',
60
- password: 'xxx',
68
+ password: 'secret',
61
69
  'translate binary': 'true',
62
70
  trace: 'true',
63
- }
64
- const pool = require('node-jt400').pool(config)
71
+ })
65
72
  ```
66
73
 
67
- To close the connection pool you can call `pool.close()`
68
-
69
74
  ### Logging
70
75
 
71
- You can also pass in you preferred logger. E.g. pino:
76
+ Pass any pino-compatible logger as the second argument:
72
77
 
73
- ```javascript
74
- import createLogger from 'pino'
75
- import { pool } from 'node-jt400'
78
+ ```ts
79
+ import pino from 'pino'
80
+ import { pool } from '@nesgarbo/node-jt400'
76
81
 
77
- const config = {}
78
- const options = {
79
- logger: createLogger(),
80
- }
81
- const connection = pool(config, options)
82
+ const db = pool({}, { logger: pino() })
82
83
  ```
83
84
 
84
- # SQL / Database
85
-
86
- ## Query
85
+ ---
87
86
 
88
- ###### Promises
87
+ ## SQL / Database
89
88
 
90
- ```javascript
91
- pool
92
- .query('SELECT field1, field2 FROM foo WHERE bar=? AND baz=?', [1, 'a'])
93
- .then((result) => {
94
- console.log('result')
95
- const field1 = result[0].FIELD1
96
- console.log(field1)
97
- })
98
- .catch((error) => {
99
- console.log('error')
100
- console.log(error)
101
- })
102
- ```
89
+ ### Query
103
90
 
104
- ###### Async/await
91
+ Returns rows as an array of objects. Column names are uppercased.
105
92
 
106
- ```javascript
107
- try {
108
- const results = await pool.query(
109
- 'SELECT field1, field2 FROM foo WHERE bar=? AND baz=?',
110
- [1, 'a'],
111
- )
112
- console.log('result')
113
- const field1 = result[0].FIELD1
114
- console.log(field1)
115
- } catch (error) {
116
- console.log('error')
117
- console.log(error)
118
- }
93
+ ```ts
94
+ const rows = await db.query<{ FIELD1: number; FIELD2: string }>(
95
+ 'SELECT field1, field2 FROM foo WHERE bar=? AND baz=?',
96
+ [1, 'a'],
97
+ )
98
+ console.log(rows[0].FIELD1)
119
99
  ```
120
100
 
121
- Please note that values from the database are automatically trimmed so 'abc ' will be returned as 'abc'.
122
- [Issue #27](https://github.com/tryggingamidstodin/node-jt400/issues/22)
123
- To override this use the optional QueryOptions parameter to set trim to false.
101
+ String values are automatically trimmed. Disable with `{ trim: false }`:
124
102
 
125
- ```javascript
126
- pool.query('SELECT field1, field2 FROM foo WHERE bar=? AND baz=?', [1, 'a'], {
127
- trim: false,
128
- })
103
+ ```ts
104
+ const rows = await db.query('SELECT name FROM foo', [], { trim: false })
129
105
  ```
130
106
 
131
- ## Update
107
+ ### Update / Delete
132
108
 
133
- ###### Promises
134
-
135
- ```javascript
136
- pool.update('UPDATE foo SET bar=? WHERE baz=?', [1, 'a']).then((nUpdated) => {
137
- console.log('Updated ' + nUpdated + ' rows')
138
- })
109
+ ```ts
110
+ const rowsUpdated = await db.update('UPDATE foo SET bar=? WHERE baz=?', [1, 'a'])
111
+ const rowsDeleted = await db.update('DELETE FROM foo WHERE bar=?', [1])
139
112
  ```
140
113
 
141
- ###### Async/await
114
+ ### Insert
142
115
 
143
- ```javascript
144
- try {
145
- const rowsUpdated = await pool.update('UPDATE foo SET bar=? WHERE baz=?', [
146
- 1,
147
- 'a',
148
- ])
149
- console.log('rows updated')
150
- console.log(rowsUpdated)
151
- } catch (error) {
152
- console.log('error')
153
- console.log(error)
154
- }
116
+ ```ts
117
+ // Insert and get the generated identity value
118
+ const id = await db.insertAndGetId('INSERT INTO foo (bar, baz) VALUES(?,?)', [2, 'b'])
155
119
  ```
156
120
 
157
- ### Delete
121
+ ### Insert list
158
122
 
159
- ###### Promises
123
+ Inserts multiple rows in a single round-trip and returns the generated IDs. All rows must have the same keys in the same order.
160
124
 
161
- ```javascript
162
- pool
163
- .update('DELETE FROM foo WHERE bar=?', [1])
164
- .then(nUpdated => {
165
- console.log('Deleted + ' nUpdated + ' rows');
166
- });
125
+ ```ts
126
+ const ids = await db.insertList('foo', 'fooid', [
127
+ { FIELD1: 1, FIELD2: 'a' },
128
+ { FIELD1: 2, FIELD2: 'b' },
129
+ ])
130
+ // ids: [1, 2]
167
131
  ```
168
132
 
169
- ###### Async/await
133
+ ### Batch update
170
134
 
171
- ```javascript
172
- try {
173
- const rowsDeleted = await pool.update('DELETE FROM foo WHERE bar=?', [1]);
174
- console.log('Deleted + ' rowsDeleted + ' rows');
175
- }
176
- catch (error) {
177
- console.log('error');
178
- console.log(error);
179
- }
135
+ Executes a parameterized statement once per row in a single JDBC batch, returning the number of affected rows for each.
136
+
137
+ ```ts
138
+ const counts = await db.batchUpdate('INSERT INTO foo (f1, f2) VALUES(?,?)', [
139
+ [1, 'a'],
140
+ [2, 'b'],
141
+ ])
142
+ // counts: [1, 1]
180
143
  ```
181
144
 
182
- ### Insert
145
+ ### Complex types (CLOB / BLOB)
183
146
 
184
- ###### Promises
147
+ Strings, numbers, and `null` are handled automatically. For CLOB or BLOB pass a typed object:
185
148
 
186
- ```javascript
187
- pool
188
- .insertAndGetId('INSERT INTO foo (bar, baz) VALUES(?,?)', [2, 'b'])
189
- .then((id) => {
190
- console.log('Inserted new row with id ' + id)
191
- })
149
+ ```ts
150
+ await db.update('INSERT INTO foo (id, notes, doc) VALUES(?,?,?)', [
151
+ 1,
152
+ { type: 'CLOB', value: 'A very long string...' },
153
+ { type: 'BLOB', value: base64String },
154
+ ])
192
155
  ```
193
156
 
194
- ###### Async/await
157
+ For BLOB, pass the base64 string representation of the file.
195
158
 
196
- ```javascript
197
- try {
198
- const id = await pool.insertAndGetId(
199
- 'INSERT INTO foo (bar, baz) VALUES(?,?)',
200
- [2, 'b'],
201
- )
202
- console.log('Inserted new row with id ' + id)
203
- } catch (error) {
204
- console.log('error')
205
- console.log(error)
206
- }
159
+ ### Date parameters
160
+
161
+ Pass JavaScript `Date` objects directly — they are converted to `"YYYY-MM-DD HH:mm:ss"` before being sent to JDBC:
162
+
163
+ ```ts
164
+ await db.update('INSERT INTO foo (id, ts) VALUES(?,?)', [1, new Date()])
207
165
  ```
208
166
 
209
- ### Insert list
167
+ ---
210
168
 
211
- ###### Promises
169
+ ## Streaming
212
170
 
213
- ```javascript
214
- const tableName = 'foo'
215
- const idColumn = 'fooid'
216
- const rows = [
217
- { FIELD1: 1, FIELD2: 'a' },
218
- { FIELD1: 2, FIELD2: 'b' },
219
- ]
171
+ ### SQL read stream
220
172
 
221
- pool.insertList(tableName, idColumn, rows).then((listOfGeneratedIds) => {
222
- console.log(listOfGeneratedIds)
223
- })
224
- ```
173
+ Returns a `Readable` emitting raw JSON chunks (one JSON array per row). Combine with `JSONStream` to pipe rows elsewhere:
225
174
 
226
- ###### Async/await
175
+ ```ts
176
+ import JSONStream from 'JSONStream'
227
177
 
228
- ```javascript
229
- try {
230
- const idList = await pool.insertList(tableName, idColumn, rows)
231
- console.log(idList)
232
- } catch (error) {
233
- console.log('error')
234
- console.log(error)
235
- }
178
+ db.createReadStream('SELECT f1, f2 FROM foo WHERE bar=?', [1])
179
+ .pipe(JSONStream.parse([true]))
180
+ .pipe(db.createWriteStream('INSERT INTO bar (f1, f2) VALUES(?,?)'))
236
181
  ```
237
182
 
238
- ### Batch update
183
+ ### SQL write stream
239
184
 
240
- ###### Promises
185
+ Returns an object-mode `Writable`. Write plain arrays of parameter values; rows are buffered (default 100) and flushed as a `batchUpdate`:
241
186
 
242
- ```javascript
243
- //insert list in one statement
244
- const data = [
245
- [1, 'a'],
246
- [2, 'b'],
247
- ]
187
+ ```ts
188
+ const ws = db.createWriteStream('INSERT INTO bar (f1, f2) VALUES(?,?)')
189
+ ws.write([1, 'a'])
190
+ ws.write([2, 'b'])
191
+ ws.end()
192
+ ```
193
+
194
+ ### Object stream
248
195
 
249
- pool
250
- .batchUpdate('INSERT INTO FOO (FIELD1, FIELD2) VALUES(?,?)', data)
251
- .then((result) => {
252
- console.log(result)
253
- //result is number of updated rows for each row. [1, 1] in this case.
254
- })
196
+ ```ts
197
+ const stmt = await db.execute('SELECT field1, field2 FROM foo', [])
198
+ const objectStream = await stmt.asObjectStream()
199
+ // objectStream emits plain JS objects per row
255
200
  ```
256
201
 
257
- ###### Async/await
202
+ ### Async iterable (row by row)
258
203
 
259
- ```javascript
260
- try {
261
- const result = await pool.batchUpdate(
262
- 'INSERT INTO FOO (FIELD1, FIELD2) VALUES(?,?)',
263
- data,
264
- )
265
- console.log(result)
266
- // result is the number of updated rows for each row. [1, 1] in this case.
267
- } catch (error) {
268
- console.log('error')
269
- console.log(error)
204
+ `execute()` returns a `Statement` whose `asIterable()` yields each row as a `string[]`:
205
+
206
+ ```ts
207
+ const stmt = await db.execute('SELECT f1, f2 FROM foo WHERE bar=?', [1])
208
+ for await (const [f1, f2] of stmt.asIterable()) {
209
+ console.log(f1, f2)
270
210
  }
271
211
  ```
272
212
 
273
- ### SQL stream
213
+ ### Cursor (lazy row-by-row)
274
214
 
275
- ```javascript
276
- pool
277
- .createReadStream('SELECT FIELD1, FIELD2 FROM FOO WHERE BAR=? AND BAZ=?', [
278
- 1,
279
- 'a',
280
- ])
281
- .pipe(JSONStream.parse([true]))
282
- .pipe(pool.createWriteStream('INSERT INTO FOO2 (F1, F2) VALUES(?, ?)'))
215
+ `queryCursor<T>()` returns an `AsyncIterable<T>` that fetches rows one at a time without loading the full result set. The underlying statement is automatically closed when iteration ends or is abandoned.
216
+
217
+ ```ts
218
+ for await (const row of db.queryCursor<{ ID: number; NAME: string }>(
219
+ 'SELECT id, name FROM bigtable ORDER BY id',
220
+ )) {
221
+ process.stdout.write(row.NAME + '\n')
222
+ }
283
223
  ```
284
224
 
285
- #### asObjectStream
225
+ ---
286
226
 
287
- ```javascript
288
- const streamOfObjects = await pool
289
- .execute('SELECT field1, field2 FROM foo', [])
290
- .then((statement) => statement.asObjectStream())
291
- ```
227
+ ## `Statement` API
292
228
 
293
- ### iterable
229
+ `execute(sql, params?)` returns a `Statement` with the following methods:
294
230
 
295
- ```javascript
296
- const statement = await pool.execute(
297
- 'SELECT FIELD1, FIELD2 FROM FOO WHERE BAR=? AND BAZ=?',
298
- [1, 'a'],
299
- )
300
- const rows = statement.asIterable()
301
- for await (const [field1, field2] of rows) {
302
- console.log(field1, field2)
303
- }
304
- ```
231
+ | Method | Description |
232
+ |--------|-------------|
233
+ | `asArray()` | All rows as `string[][]` |
234
+ | `asStream(options?)` | Node.js `Readable` of raw JSON chunks |
235
+ | `asObjectStream(options?)` | Node.js `Readable` of parsed row objects |
236
+ | `asIterable()` | `AsyncIterable<string[]>` for `for await...of` |
237
+ | `updated()` | Rows affected (DML statements) |
238
+ | `metadata()` | Column descriptors (`Metadata[]`) |
239
+ | `isQuery()` | Whether the statement is a SELECT |
240
+ | `close()` | Release the statement |
305
241
 
306
- ### Transactions
242
+ ---
307
243
 
308
- Transaction is commited on success and rolled back on failure.
309
- The transaction object has the same api as the pool object.
244
+ ## Transactions
310
245
 
311
- ```javascript
312
- pool.transaction((transaction) => {
313
- const fooId = 1
246
+ The transaction callback receives a connection object with the same API as the pool. The transaction is committed on success and rolled back on any thrown error.
314
247
 
315
- return transaction
316
- .update('INSERT INTO FOO (FOOID, FIELD2) VALUES(?,?)', [fooId, 'a'])
317
- .then(function () {
318
- return transaction.update('update BAR set FOOID=? where BARID=?', [
319
- fooId,
320
- 2,
321
- ])
322
- })
248
+ ```ts
249
+ await db.transaction(async (tx) => {
250
+ const fooId = await tx.insertAndGetId('INSERT INTO foo (name) VALUES(?)', ['bar'])
251
+ await tx.update('INSERT INTO baz (fooid, val) VALUES(?,?)', [fooId, 42])
323
252
  })
324
253
  ```
325
254
 
326
- ### Complex types
255
+ ### Low-level commit / rollback
327
256
 
328
- The node-jt400 module handles strings, longs, doubles and nulls automatically as types. When using other types like CLOB or BLOB you need to specify the type specifically.
257
+ For ODBC-compatibility scenarios where you manage the transaction boundary yourself:
329
258
 
330
- ```javascript
331
- pool
332
- .update('INSERT INTO foo (fooid, textfield, clobfield) VALUES(?, ?)', [
333
- 1,
334
- 'text',
335
- { type: 'CLOB', value: 'A really long string' },
336
- ])
337
- .then(() => {
338
- console.log('updated')
339
- })
259
+ ```ts
260
+ await db.update('INSERT INTO foo (name) VALUES(?)', ['bar'])
261
+ await db.commit()
262
+ // or:
263
+ await db.rollback()
340
264
  ```
341
265
 
342
- For BLOB pass the base64 string representation of a file. The module will convert it to a blob for the AS400 database.
266
+ ---
343
267
 
344
- ```javascript
345
- const fs = require('fs').promises
346
- const base64String = await fs.readFile('/path/to/file.jpg', {
347
- encoding: 'base64',
348
- })
349
- pool
350
- .update('INSERT INTO foo (fooid, textfield, blobfield) VALUES(?, ?)', [
351
- 1,
352
- 'text',
353
- { type: 'BLOB', value: base64String },
354
- ])
355
- .then(() => {
356
- console.log('updated')
357
- })
358
- ```
268
+ ## IFS Filesystem
359
269
 
360
- When querying a blob field you will recieve a string.
270
+ ```ts
271
+ const ifs = db.ifs()
272
+ ```
361
273
 
362
- ## Filesystem
274
+ ### Read a file
363
275
 
364
- ### IFS read
276
+ ```ts
277
+ import { createWriteStream } from 'fs'
365
278
 
366
- ```javascript
367
- const ifs = pool.ifs()
368
- const readStream = ifs.createReadStream('/foo/bar.txt') // readStream from IFS
279
+ ifs.createReadStream('/home/myuser/report.txt').pipe(createWriteStream('./report.txt'))
369
280
  ```
370
281
 
371
- As with any readable stream you can pipe it wherever you want. For example into the node filesystem.
282
+ ### Write a file
372
283
 
373
- ```javascript
374
- const createWriteStream = require('fs').createWriteStream
375
- const join = require('path').join
376
- const filename = join(__dirname, 'old.txt')
377
- const writeStream = createWriteStream(filename) // writeStream to nodeJS filesystem.
284
+ ```ts
285
+ import { createReadStream } from 'fs'
378
286
 
379
- const ifs = pool.ifs()
380
- const readStream = ifs.createReadStream('/new.txt') // Reading bar.txt from IFS
381
-
382
- readStream.pipe(writeStream) // Piping from IFS to nodeJS
287
+ createReadStream('./local.txt').pipe(ifs.createWriteStream('/home/myuser/remote.txt'))
383
288
  ```
384
289
 
385
- ### IFS write
290
+ Append mode and CCSID encoding:
386
291
 
387
- ```javascript
388
- const ifs = pool.ifs();
389
- const writeStream = ifs.createWriteStream(('/foo/bar.txt')
292
+ ```ts
293
+ ifs.createWriteStream('/home/myuser/log.txt', { append: true, ccsid: 1208 })
390
294
  ```
391
295
 
392
- As with any other writable streams you can pipe a readable stream into it.
393
-
394
- ```javascript
395
- const fs = require('fs').createReadStream
396
- const join = require('path').join
397
- const filename = join(__dirname, 'old.txt')
398
- const readStream = createReadStream(filename) // readStream from nodeJS filesystem
296
+ ### Delete a file
399
297
 
400
- const ifs = pool.ifs()
401
- const writeStream = ifs.createWriteStream('/new.txt')
402
-
403
- readStream.pipe(writeStream) // Piping from nodeJS to IFS
298
+ ```ts
299
+ const deleted = await ifs.deleteFile('/home/myuser/old.txt') // true | false
404
300
  ```
405
301
 
406
- You can see more examples in [issue #27](https://github.com/tryggingamidstodin/node-jt400/issues/27)
407
-
408
- ### IFS delete
302
+ ### Other IFS operations
409
303
 
410
- ```javascript
411
- const ifs = pool.ifs()
412
- ifs.deleteFile('/foo/bar.txt.old').then(console.log) // true or false
304
+ ```ts
305
+ await ifs.moveFile('/tmp/source.txt', '/home/myuser/dest.txt')
306
+ const files = await ifs.listFiles('/home/myuser')
307
+ const meta = await ifs.fileMetadata('/home/myuser/report.txt')
413
308
  ```
414
309
 
415
- ## Programs
310
+ ---
416
311
 
417
- With programs it is necessary to define your input parameters first. These must match your program defination in AS.
312
+ ## Programs (RPG / COBOL)
418
313
 
419
- ```javascript
420
- const myProgram = pool.defineProgram({
314
+ Define the program once with its parameter schema, then call the returned function.
315
+
316
+ ```ts
317
+ const myProgram = db.defineProgram({
421
318
  programName: 'MYPGM',
319
+ libraryName: 'MYLIB', // optional, defaults to *LIBL
422
320
  paramsSchema: [
423
- { type: 'DECIMAL', precision: 10, scale: 0, name: 'myId'},
424
- { type: 'NUMERIC', precision: 8, scale: 0, name: 'myDate'},
425
- { type: 'NUMERIC', precision: 12, scale: 2, name: 'myTotalValue' },
426
- { type: 'CHAR', precision: 32, scale: 0, name: 'myString'}
321
+ { type: 'DECIMAL', precision: 10, scale: 0, name: 'myId' },
322
+ { type: 'NUMERIC', precision: 8, scale: 0, name: 'myDate' },
323
+ { type: 'NUMERIC', precision: 12, scale: 2, name: 'myAmount' },
324
+ { type: 'CHAR', precision: 32, scale: 0, name: 'myString' },
427
325
  ],
428
- libraryName: 'WTMEXC' // Optional. Defaults to *LIBL
429
- );
430
- ```
431
-
432
- The Decimal type maps to com.ibm.as400.access.AS400PackedDecimal
433
- The Numeric type maps to com.ibm.as400.access.AS400ZonedDecimal
434
- Everything else (char) maps to com.ibm.as400.access.AS400Text
435
- Precision is the size and scale is the decimals.
436
-
437
- > ATTENTION: To make the API clearer we renamed .pgm to .defineProgram. The pgm function is deprecated in v3.0
438
-
439
- When you have defined your program, you can call/invoke it with the parameters you defined.
326
+ })
440
327
 
441
- ```javascript
442
- myProgram(
328
+ const result = await myProgram(
443
329
  {
444
- myId: 123
445
- myDate: '20170608',
446
- myTotalValue: 88450.57,
447
- myString: 'This is a test'
330
+ myId: 123,
331
+ myDate: '20240101',
332
+ myAmount: 1234.56,
333
+ myString: 'hello',
448
334
  },
449
- 10 // Optional timeout in sec
335
+ 10, // optional timeout in seconds (default: 3, pass 0 to disable)
450
336
  )
451
- .then(result => {
452
- console.log(result)
453
- });
454
337
  ```
455
338
 
456
- > ATTENTION: In version 3.0 we added a optional timeout parameter for program calls. This defaults to 3 sec. This is a breaking change since your programs will no longer halt or hang for extended period and therefore never give a response. If you have complicated programs that run for longer than 3 sec then you need to adjust the timeout parameter for those specific calls. Setting it to 0 will ignore the timeout limit.
339
+ Type mapping:
340
+
341
+ | Schema type | Java type |
342
+ |-------------|-----------|
343
+ | `DECIMAL` | `AS400PackedDecimal` |
344
+ | `NUMERIC` | `AS400ZonedDecimal` |
345
+ | anything else | `AS400Text` |
346
+
347
+ > `pgm()` is deprecated. Use `defineProgram()`.
348
+
349
+ ---
457
350
 
458
351
  ## Keyed Data Queues
459
352
 
460
- [IBM KeyedDataQueue Reference](https://javadoc.midrange.com/jtopen/index.html?com/ibm/as400/access/KeyedDataQueue.html)
353
+ ```ts
354
+ const dq = db.createKeyedDataQ({ name: 'MYDATAQ' })
355
+
356
+ // Write
357
+ await dq.write('mykey', 'hello')
358
+
359
+ // Read (wait up to 5 seconds)
360
+ const data = await dq.read({ key: 'mykey', wait: 5 })
361
+ console.log(data) // 'hello'
362
+ ```
363
+
364
+ Read with a reply queue (writeKeyLength enables response routing):
365
+
366
+ ```ts
367
+ void dq.read({ key: 'mykey', wait: 10, writeKeyLength: 11 }).then(async (res) => {
368
+ const { data, write } = res as { data: string; write: (d: string) => Promise<void> }
369
+ await write('pong')
370
+ })
461
371
 
462
- ```javascript
463
- const jt400 = pool(jt400config)
464
- // Open a keyed data queue for reading
465
- let queue = jt400.createKeyedDataQ({ name })
466
- // -1 waits until a message exists. (MSGW)
467
- let m = await queue.read({ key: inboxKey, wait: 2 })
372
+ await dq.write('mykey', 'returnkey ping')
373
+ const reply = await dq.read({ key: 'returnkey ', wait: 10 })
468
374
  ```
469
375
 
376
+ Pass `wait: -1` to wait indefinitely until a message arrives.
377
+
378
+ ---
379
+
470
380
  ## Message Queues
471
381
 
472
- [IBM MessageQueue Reference](https://javadoc.midrange.com/jtopen/index.html?com/ibm/as400/access/MessageQueue.html)
382
+ ```ts
383
+ const path = `/QSYS.LIB/${process.env.AS400_USERNAME}.MSGQ`
384
+ const msgq = await db.openMessageQ({ path })
473
385
 
474
- ```javascript
475
- const path = '/QSYS.LIB/' + process.env.AS400_USERNAME + '.MSGQ'
476
- const msgq = await jt400.openMessageQ({ path: path })
477
- const testMessage = 'Test Message'
478
- await msgq.sendInformational(testMessage) // Writes a basic message
479
- await msgq.read()
386
+ await msgq.sendInformational('Hello from Node.js')
387
+
388
+ const msg = await msgq.read()
389
+ console.log(msg?.text)
480
390
  ```
481
391
 
392
+ ---
393
+
482
394
  ## Message Files
483
395
 
484
- [IBM AS400Message Reference](https://javadoc.midrange.com/jtopen/index.html?com/ibm/as400/access/MessageFile.html)
396
+ ```ts
397
+ const file = await db.openMessageFile({
398
+ path: '/QSYS.LIB/MYLIB.LIB/MYMSGF.MSGF',
399
+ })
400
+ const msg = await file.read({ messageId: 'AMX0051' })
401
+ console.log(await msg.getText())
402
+ ```
403
+
404
+ ---
405
+
406
+ ## Error handling
407
+
408
+ All errors are wrapped using [`oops-error`](https://github.com/tryggingamidstodin/oops-error) and categorized:
409
+
410
+ - **`OperationalError`** — connection/network failures (e.g. `UnknownHostException`)
411
+ - **`ProgrammerError`** — SQL syntax errors, wrong parameter counts, data errors, etc.
412
+
413
+ ```ts
414
+ try {
415
+ await db.query('SELECT * FROM foo WHERE bar=?', [1, 'extra-param'])
416
+ } catch (err) {
417
+ console.log(err.category) // 'ProgrammerError'
418
+ console.log(err.message) // e.g. 'Descriptor index not valid.'
419
+ console.log(err.cause) // original Java exception
420
+ }
421
+ ```
422
+
423
+ ---
424
+
425
+ ## Testing with in-memory DB
485
426
 
486
- ```javascript
487
- const file = await pool.openMessageFile({
488
- path: '/QSYS.LIB/YOURLIB.LIB/YOURMSGF.MSGF',
427
+ `useInMemoryDb()` returns a connection backed by HSQLDB (an in-memory Java database). Use it in unit tests without an AS400 connection.
428
+
429
+ ```ts
430
+ import { useInMemoryDb } from '@nesgarbo/node-jt400'
431
+
432
+ const db = useInMemoryDb()
433
+
434
+ await db.update('CREATE TABLE foo (id INT GENERATED ALWAYS AS IDENTITY, name VARCHAR(100))')
435
+ await db.update('INSERT INTO foo (name) VALUES(?)', ['bar'])
436
+ const rows = await db.query<{ ID: number; NAME: string }>('SELECT id, name FROM foo')
437
+ ```
438
+
439
+ ### Mocking program calls
440
+
441
+ ```ts
442
+ const db = useInMemoryDb()
443
+
444
+ db.mockPgm('MYPGM', (input) => {
445
+ return [{ result: input.myId * 2 }]
489
446
  })
490
- let msg = await file.read({ messageId: 'AMX0051' })
491
- console.log('msg', await msg.getText())
447
+
448
+ const run = db.defineProgram({ programName: 'MYPGM', paramsSchema: [] })
449
+ const result = await run({ myId: 5 })
492
450
  ```
493
451
 
494
- # Error handling
452
+ Mock functions can return a value or a `Promise`. Calls can be chained:
495
453
 
496
- This module uses [oops-error](https://github.com/tryggingamidstodin/oops-error) to categorize errors into operational errors and programmer errors. We reccomend you take a look at the [readme](https://github.com/tryggingamidstodin/oops-error/blob/master/README.md) for further information about these categories.
454
+ ```ts
455
+ db.mockPgm('PGM1', () => [{ ok: true }]).mockPgm('PGM2', () => [{ ok: true }])
456
+ ```
497
457
 
498
- The oops-error has few properties:
458
+ ---
499
459
 
500
- - category: Tells you the error is programmer or operational.
501
- - message: The basic error message.
502
- - cause: the original error.
503
- - fullstack: function that returns the fullstack of causes.
460
+ ## Metadata
504
461
 
505
- ## Examples
462
+ ```ts
463
+ const columns = await db.getColumns({
464
+ schema: 'MYLIB',
465
+ table: 'MYTABLE',
466
+ })
506
467
 
507
- Lets define too many paramters for our query.
468
+ const pks = await db.getPrimaryKeys({
469
+ schema: 'MYLIB',
470
+ table: 'MYTABLE',
471
+ })
508
472
 
509
- ```javascript
510
- pool
511
- .query('SELECT field1, field2 FROM foo WHERE bar=? AND baz=?', [1, 'a', 'b])
512
- .then(result => {
513
- console.log('we will not go here')
514
- })
515
- .catch(error => {
516
- console.log('we got programmer error');
517
- console.log('category': errror.category) // ProgrammerError
518
- console.log('message': errror.message) // Descriptor index not valid.
519
- console.log('original error', error.cause)
520
- });
473
+ const tableStream = db.getTablesAsStream({
474
+ schema: 'MYLIB',
475
+ })
521
476
  ```
477
+
478
+ ---
479
+
480
+ ## License
481
+
482
+ MIT