@duckdb/node-api 1.1.3-alpha.9 → 1.2.0-alpha.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/README.md +414 -13
  2. package/lib/DuckDBConnection.d.ts +10 -10
  3. package/lib/DuckDBDataChunk.d.ts +14 -2
  4. package/lib/DuckDBDataChunk.js +79 -18
  5. package/lib/DuckDBLogicalType.d.ts +1 -1
  6. package/lib/DuckDBLogicalType.js +1 -1
  7. package/lib/DuckDBPreparedStatement.d.ts +2 -1
  8. package/lib/DuckDBPreparedStatement.js +9 -1
  9. package/lib/DuckDBResult.d.ts +8 -0
  10. package/lib/DuckDBResult.js +52 -35
  11. package/lib/DuckDBResultReader.d.ts +8 -0
  12. package/lib/DuckDBResultReader.js +34 -36
  13. package/lib/DuckDBType.d.ts +10 -0
  14. package/lib/DuckDBType.js +33 -0
  15. package/lib/DuckDBValueConverter.d.ts +5 -0
  16. package/lib/DuckDBValueConverter.js +2 -0
  17. package/lib/DuckDBValueToJsonConverter.d.ts +10 -0
  18. package/lib/DuckDBValueToJsonConverter.js +101 -0
  19. package/lib/DuckDBVector.js +2 -2
  20. package/lib/conversion/dateTimeStringConversion.d.ts +10 -6
  21. package/lib/conversion/dateTimeStringConversion.js +64 -16
  22. package/lib/convertColumnsFromChunks.d.ts +3 -0
  23. package/lib/convertColumnsFromChunks.js +16 -0
  24. package/lib/convertColumnsObjectFromChunks.d.ts +3 -0
  25. package/lib/convertColumnsObjectFromChunks.js +19 -0
  26. package/lib/convertRowObjectsFromChunks.d.ts +3 -0
  27. package/lib/convertRowObjectsFromChunks.js +17 -0
  28. package/lib/convertRowsFromChunks.d.ts +3 -0
  29. package/lib/convertRowsFromChunks.js +13 -0
  30. package/lib/createValue.js +10 -1
  31. package/lib/getColumnsFromChunks.d.ts +3 -0
  32. package/lib/getColumnsFromChunks.js +16 -0
  33. package/lib/getColumnsObjectFromChunks.d.ts +3 -0
  34. package/lib/getColumnsObjectFromChunks.js +19 -0
  35. package/lib/getRowObjectsFromChunks.d.ts +3 -0
  36. package/lib/getRowObjectsFromChunks.js +17 -0
  37. package/lib/getRowsFromChunks.d.ts +3 -0
  38. package/lib/getRowsFromChunks.js +10 -0
  39. package/lib/index.d.ts +2 -0
  40. package/lib/index.js +2 -0
  41. package/lib/typeForValue.js +6 -1
  42. package/lib/values/DuckDBTimeTZValue.d.ts +10 -10
  43. package/lib/values/DuckDBTimeTZValue.js +16 -15
  44. package/lib/values/DuckDBTimestampMillisecondsValue.d.ts +8 -4
  45. package/lib/values/DuckDBTimestampMillisecondsValue.js +15 -6
  46. package/lib/values/DuckDBTimestampNanosecondsValue.d.ts +8 -4
  47. package/lib/values/DuckDBTimestampNanosecondsValue.js +15 -6
  48. package/lib/values/DuckDBTimestampSecondsValue.d.ts +5 -1
  49. package/lib/values/DuckDBTimestampSecondsValue.js +9 -0
  50. package/lib/values/DuckDBTimestampTZValue.d.ts +2 -0
  51. package/lib/values/DuckDBTimestampTZValue.js +5 -2
  52. package/lib/values/DuckDBTimestampValue.d.ts +1 -0
  53. package/lib/values/DuckDBTimestampValue.js +4 -1
  54. package/package.json +3 -2
package/README.md CHANGED
@@ -140,6 +140,16 @@ const result = await connection.run('select $a, $b, $c', {
140
140
  });
141
141
  ```
142
142
 
143
+ Unspecified types will be inferred:
144
+
145
+ ```ts
146
+ const result = await connection.run('select $a, $b, $c', {
147
+ 'a': 'duck',
148
+ 'b': 42,
149
+ 'c': listValue([10, 11, 12]),
150
+ });
151
+ ```
152
+
143
153
  ### Stream Results
144
154
 
145
155
  Streaming results evaluate lazily when rows are read.
@@ -148,7 +158,7 @@ Streaming results evaluate lazily when rows are read.
148
158
  const result = await connection.stream('from range(10_000)');
149
159
  ```
150
160
 
151
- ### Inspect Result
161
+ ### Inspect Result Metadata
152
162
 
153
163
  Get column names and types:
154
164
  ```ts
@@ -156,7 +166,7 @@ const columnNames = result.columnNames();
156
166
  const columnTypes = result.columnTypes();
157
167
  ```
158
168
 
159
- ### Result Reader
169
+ ### Read Result Data
160
170
 
161
171
  Run and read all data:
162
172
  ```ts
@@ -167,7 +177,10 @@ const rows = reader.getRows();
167
177
 
168
178
  Stream and read up to (at least) some number of rows:
169
179
  ```ts
170
- const reader = await connection.streamAndReadUntil('from range(5000)', 1000);
180
+ const reader = await connection.streamAndReadUntil(
181
+ 'from range(5000)',
182
+ 1000
183
+ );
171
184
  const rows = reader.getRows();
172
185
  // rows.length === 2048. (Rows are read in chunks of 2048.)
173
186
  ```
@@ -186,7 +199,183 @@ reader.readUntil(6000);
186
199
  // reader.done === true
187
200
  ```
188
201
 
189
- ### Read chunks
202
+ ### Get Result Data
203
+
204
+ Result data can be retrieved in a variety of forms:
205
+
206
+ ```ts
207
+ const reader = await connection.runAndReadAll(
208
+ 'from range(3) select range::int as i, 10 + i as n'
209
+ );
210
+
211
+ const rows = reader.getRows();
212
+ // [ [0, 10], [1, 11], [2, 12] ]
213
+
214
+ const rowObjects = reader.getRowObjects();
215
+ // [ { i: 0, n: 10 }, { i: 1, n: 11 }, { i: 2, n: 12 } ]
216
+
217
+ const columns = reader.getColumns();
218
+ // [ [0, 1, 2], [10, 11, 12] ]
219
+
220
+ const columnsObject = reader.getColumnsObject();
221
+ // { i: [0, 1, 2], n: [10, 11, 12] }
222
+ ```
223
+
224
+ ### Convert Result Data to JSON
225
+
226
+ By default, data values that cannot be represented as JS primitives
227
+ are returned as rich JS objects; see `Inspect Data Values` below.
228
+
229
+ To retrieve data in a form that can be losslessly serialized to JSON,
230
+ use the `Json` forms of the above result data methods:
231
+
232
+ ```ts
233
+ const reader = await connection.runAndReadAll(
234
+ 'from test_all_types() select bigint, date, interval limit 2'
235
+ );
236
+
237
+ const rows = reader.getRowsJson();
238
+ // [
239
+ // [
240
+ // "-9223372036854775808",
241
+ // "5877642-06-25 (BC)",
242
+ // { "months": 0, "days": 0, "micros": "0" }
243
+ // ],
244
+ // [
245
+ // "9223372036854775807",
246
+ // "5881580-07-10",
247
+ // { "months": 999, "days": 999, "micros": "999999999" }
248
+ // ]
249
+ // ]
250
+
251
+ const rowObjects = reader.getRowObjectsJson();
252
+ // [
253
+ // {
254
+ // "bigint": "-9223372036854775808",
255
+ // "date": "5877642-06-25 (BC)",
256
+ // "interval": { "months": 0, "days": 0, "micros": "0" }
257
+ // },
258
+ // {
259
+ // "bigint": "9223372036854775807",
260
+ // "date": "5881580-07-10",
261
+ // "interval": { "months": 999, "days": 999, "micros": "999999999" }
262
+ // }
263
+ // ]
264
+
265
+ const columns = reader.getColumnsJson();
266
+ // [
267
+ // [ "-9223372036854775808", "9223372036854775807" ],
268
+ // [ "5877642-06-25 (BC)", "5881580-07-10" ],
269
+ // [
270
+ // { "months": 0, "days": 0, "micros": "0" },
271
+ // { "months": 999, "days": 999, "micros": "999999999" }
272
+ // ]
273
+ // ]
274
+
275
+ const columnsObject = reader.getColumnsObjectJson();
276
+ // {
277
+ // "bigint": [ "-9223372036854775808", "9223372036854775807" ],
278
+ // "date": [ "5877642-06-25 (BC)", "5881580-07-10" ],
279
+ // "interval": [
280
+ // { "months": 0, "days": 0, "micros": "0" },
281
+ // { "months": 999, "days": 999, "micros": "999999999" }
282
+ // ]
283
+ // }
284
+ ```
285
+
286
+ These methods handle nested types as well:
287
+
288
+ ```ts
289
+ const reader = await connection.runAndReadAll(
290
+ 'from test_all_types() select int_array, struct, map, "union" limit 2'
291
+ );
292
+
293
+ const rows = reader.getRowsJson();
294
+ // [
295
+ // [
296
+ // [],
297
+ // { "a": null, "b": null },
298
+ // [],
299
+ // { "tag": "name", "value": "Frank" }
300
+ // ],
301
+ // [
302
+ // [ 42, 999, null, null, -42],
303
+ // { "a": 42, "b": "🦆🦆🦆🦆🦆🦆" },
304
+ // [
305
+ // { "key": "key1", "value": "🦆🦆🦆🦆🦆🦆" },
306
+ // { "key": "key2", "value": "goose" }
307
+ // ],
308
+ // { "tag": "age", "value": 5 }
309
+ // ]
310
+ // ]
311
+
312
+ const rowObjects = reader.getRowObjectsJson();
313
+ // [
314
+ // {
315
+ // "int_array": [],
316
+ // "struct": { "a": null, "b": null },
317
+ // "map": [],
318
+ // "union": { "tag": "name", "value": "Frank" }
319
+ // },
320
+ // {
321
+ // "int_array": [ 42, 999, null, null, -42 ],
322
+ // "struct": { "a": 42, "b": "🦆🦆🦆🦆🦆🦆" },
323
+ // "map": [
324
+ // { "key": "key1", "value": "🦆🦆🦆🦆🦆🦆" },
325
+ // { "key": "key2", "value": "goose" }
326
+ // ],
327
+ // "union": { "tag": "age", "value": 5 }
328
+ // }
329
+ // ]
330
+
331
+ const columns = reader.getColumnsJson();
332
+ // [
333
+ // [
334
+ // [],
335
+ // [42, 999, null, null, -42]
336
+ // ],
337
+ // [
338
+ // { "a": null, "b": null },
339
+ // { "a": 42, "b": "🦆🦆🦆🦆🦆🦆" }
340
+ // ],
341
+ // [
342
+ // [],
343
+ // [
344
+ // { "key": "key1", "value": "🦆🦆🦆🦆🦆🦆" },
345
+ // { "key": "key2", "value": "goose"}
346
+ // ]
347
+ // ],
348
+ // [
349
+ // { "tag": "name", "value": "Frank" },
350
+ // { "tag": "age", "value": 5 }
351
+ // ]
352
+ // ]
353
+
354
+ const columnsObject = reader.getColumnsObjectJson();
355
+ // {
356
+ // "int_array": [
357
+ // [],
358
+ // [42, 999, null, null, -42]
359
+ // ],
360
+ // "struct": [
361
+ // { "a": null, "b": null },
362
+ // { "a": 42, "b": "🦆🦆🦆🦆🦆🦆" }
363
+ // ],
364
+ // "map": [
365
+ // [],
366
+ // [
367
+ // { "key": "key1", "value": "🦆🦆🦆🦆🦆🦆" },
368
+ // { "key": "key2", "value": "goose" }
369
+ // ]
370
+ // ],
371
+ // "union": [
372
+ // { "tag": "name", "value": "Frank" },
373
+ // { "tag": "age", "value": 5 }
374
+ // ]
375
+ // }
376
+ ```
377
+
378
+ ### Fetch Chunks
190
379
 
191
380
  Fetch all chunks:
192
381
  ```ts
@@ -216,19 +405,18 @@ for (let i = 0; i < chunkCount; i++) {
216
405
  }
217
406
  ```
218
407
 
219
- Read chunk data (column-major):
408
+ Get chunk data:
220
409
  ```ts
221
- // array of columns, each as an array of values
222
- const columns = chunk.getColumns();
223
- ```
410
+ const rows = chunk.getRows();
224
411
 
225
- Read chunk data (row-major):
226
- ```ts
227
- // array of rows, each as an array of values
228
- const rows = chunk.getRows();
412
+ const rowObjects = chunk.getRowObjects();
413
+
414
+ const columns = chunk.getColumns();
415
+
416
+ const columnsObject = chunk.getColumnsObject();
229
417
  ```
230
418
 
231
- Read chunk data (one value at a time)
419
+ Get chunk data (one value at a time)
232
420
  ```ts
233
421
  const columns = [];
234
422
  const columnCount = chunk.columnCount;
@@ -417,6 +605,38 @@ if (columnType.typeId === DuckDBTypeId.UUID) {
417
605
  // other possible values are: null, boolean, number, bigint, or string
418
606
  ```
419
607
 
608
+ ### Displaying Timezones
609
+
610
+ Converting a TIMESTAMP_TZ value to a string depends on a timezone offset.
611
+ By default, this is set to the offset for the local timezone when the Node
612
+ process is started.
613
+
614
+ To change it, set the `timezoneOffsetInMinutes`
615
+ property of `DuckDBTimestampTZValue`:
616
+
617
+ ```ts
618
+ DuckDBTimestampTZValue.timezoneOffsetInMinutes = -8 * 60;
619
+ const pst = DuckDBTimestampTZValue.Epoch.toString();
620
+ // 1969-12-31 16:00:00-08
621
+
622
+ DuckDBTimestampTZValue.timezoneOffsetInMinutes = +1 * 60;
623
+ const cet = DuckDBTimestampTZValue.Epoch.toString();
624
+ // 1970-01-01 01:00:00+01
625
+ ```
626
+
627
+ Note that the timezone offset used for this string
628
+ conversion is distinct from the `TimeZone` setting of DuckDB.
629
+
630
+ The following sets this offset to match the `TimeZone` setting of DuckDB:
631
+
632
+ ```ts
633
+ const reader = await connection.runAndReadAll(
634
+ `select (timezone(current_timestamp) / 60)::int`
635
+ );
636
+ DuckDBTimestampTZValue.timezoneOffsetInMinutes =
637
+ reader.getColumns()[0][0];
638
+ ```
639
+
420
640
  ### Append To Table
421
641
 
422
642
  ```ts
@@ -510,3 +730,184 @@ console.log('ready');
510
730
  const result = await pending.getResult();
511
731
  // ...
512
732
  ```
733
+
734
+ ### Ways to run SQL
735
+
736
+ ```ts
737
+ // Run to completion but don't yet retrieve any rows.
738
+ // Optionally take values to bind to SQL parameters,
739
+ // and (optionally) types of those parameters,
740
+ // either as an array (for positional parameters),
741
+ // or an object keyed by parameter name.
742
+ const result = await connection.run(sql);
743
+ const result = await connection.run(sql, values);
744
+ const result = await connection.run(sql, values, types);
745
+
746
+ // Run to completion but don't yet retrieve any rows.
747
+ // Wrap in a DuckDBDataReader for convenient data retrieval.
748
+ const reader = await connection.runAndRead(sql);
749
+ const reader = await connection.runAndRead(sql, values);
750
+ const reader = await connection.runAndRead(sql, values, types);
751
+
752
+ // Run to completion, wrap in a reader, and read all rows.
753
+ const reader = await connection.runAndReadAll(sql);
754
+ const reader = await connection.runAndReadAll(sql, values);
755
+ const reader = await connection.runAndReadAll(sql, values, types);
756
+
757
+ // Run to completion, wrap in a reader, and read at least
758
+ // the given number of rows. (Rows are read in chunks, so more than
759
+ // the target may be read.)
760
+ const reader = await connection.runAndReadUntil(sql, targetRowCount);
761
+ const reader =
762
+ await connection.runAndReadAll(sql, targetRowCount, values);
763
+ const reader =
764
+ await connection.runAndReadAll(sql, targetRowCount, values, types);
765
+
766
+ // Create a streaming result and don't yet retrieve any rows.
767
+ const result = await connection.stream(sql);
768
+ const result = await connection.stream(sql, values);
769
+ const result = await connection.stream(sql, values, types);
770
+
771
+ // Create a streaming result and don't yet retrieve any rows.
772
+ // Wrap in a DuckDBDataReader for convenient data retrieval.
773
+ const reader = await connection.streamAndRead(sql);
774
+ const reader = await connection.streamAndRead(sql, values);
775
+ const reader = await connection.streamAndRead(sql, values, types);
776
+
777
+ // Create a streaming result, wrap in a reader, and read all rows.
778
+ const reader = await connection.streamAndReadAll(sql);
779
+ const reader = await connection.streamAndReadAll(sql, values);
780
+ const reader = await connection.streamAndReadAll(sql, values, types);
781
+
782
+ // Create a streaming result, wrap in a reader, and read at least
783
+ // the given number of rows.
784
+ const reader = await connection.streamAndReadUntil(sql, targetRowCount);
785
+ const reader =
786
+ await connection.streamAndReadUntil(sql, targetRowCount, values);
787
+ const reader =
788
+ await connection.streamAndReadUntil(sql, targetRowCount, values, types);
789
+
790
+ // Prepared Statements
791
+
792
+ // Prepare a possibly-parametered SQL statement to run later.
793
+ const prepared = await connection.prepare(sql);
794
+
795
+ // Bind values to the parameters.
796
+ prepared.bind(values);
797
+ prepared.bind(values, types);
798
+
799
+ // Run the prepared statement. These mirror the methods on the connection.
800
+ const result = prepared.run();
801
+
802
+ const reader = prepared.runAndRead();
803
+ const reader = prepared.runAndReadAll();
804
+ const reader = prepared.runAndReadUntil(targetRowCount);
805
+
806
+ const result = prepared.stream();
807
+
808
+ const reader = prepared.streamAndRead();
809
+ const reader = prepared.streamAndReadAll();
810
+ const reader = prepared.streamAndReadUntil(targetRowCount);
811
+
812
+ // Pending Results
813
+
814
+ // Create a pending result.
815
+ const pending = await connection.start(sql);
816
+ const pending = await connection.start(sql, values);
817
+ const pending = await connection.start(sql, values, types);
818
+
819
+ // Create a pending, streaming result.
820
+ const pending = await connection.startStream(sql);
821
+ const pending = await connection.startStream(sql, values);
822
+ const pending = await connection.startStream(sql, values, types);
823
+
824
+ // Create a pending result from a prepared statement.
825
+ const pending = await prepared.start();
826
+ const pending = await prepared.startStream();
827
+
828
+ while (pending.runTask() !== DuckDBPendingResultState.RESULT_READY) {
829
+ // optionally sleep or do other work between tasks
830
+ }
831
+
832
+ // Retrieve the result. If not yet READY, will run until it is.
833
+ const result = await pending.getResult();
834
+
835
+ const reader = await pending.read();
836
+ const reader = await pending.readAll();
837
+ const reader = await pending.readUntil(targetRowCount);
838
+ ```
839
+
840
+ ### Ways to get result data
841
+
842
+ ```ts
843
+ // From a result
844
+
845
+ // Asynchronously retrieve data for all rows:
846
+ const columns = await result.getColumns();
847
+ const columnsJson = await result.getColumnsJson();
848
+ const columnsObject = await result.getColumnsObject();
849
+ const columnsObjectJson = await result.getColumnsObjectJson();
850
+ const rows = await result.getRows();
851
+ const rowsJson = await result.getRowsJson();
852
+ const rowObjects = await result.getRowObjects();
853
+ const rowObjectsJson = await result.getRowObjectsJson();
854
+
855
+ // From a reader
856
+
857
+ // First, (asynchronously) read some rows:
858
+ await reader.readAll();
859
+ // or:
860
+ await reader.readUntil(targetRowCount);
861
+
862
+ // Then, (synchronously) get result data for the rows read:
863
+ const columns = reader.getColumns();
864
+ const columnsJson = reader.getColumnsJson();
865
+ const columnsObject = reader.getColumnsObject();
866
+ const columnsObjectJson = reader.getColumnsObjectJson();
867
+ const rows = reader.getRows();
868
+ const rowsJson = reader.getRowsJson();
869
+ const rowObjects = reader.getRowObjects();
870
+ const rowObjectsJson = reader.getRowObjectsJson();
871
+
872
+ // Individual values can also be read directly:
873
+ const value = reader.value(columnIndex, rowIndex);
874
+
875
+ // Using chunks
876
+
877
+ // If desired, one or more chunks can be fetched from a result:
878
+ const chunk = await result.fetchChunk();
879
+ const chunks = await result.fetchAllChunks();
880
+
881
+ // And then data can be retrieved from each chunk:
882
+ const columnValues = chunk.getColumnValues(columnIndex);
883
+ const columns = chunk.getColumns();
884
+ const rowValues = chunk.getRowValues(rowIndex);
885
+ const rows = chunk.getRows();
886
+
887
+ // Or, values can be visited:
888
+ chunk.visitColumnValues(columnIndex,
889
+ (value, rowIndex, columnIndex, type) => { /* ... */ }
890
+ );
891
+ chunk.visitColumns((column, columnIndex, type) => { /* ... */ });
892
+ chunk.visitColumnMajor(
893
+ (value, rowIndex, columnIndex, type) => { /* ... */ }
894
+ );
895
+ chunk.visitRowValues(rowIndex,
896
+ (value, rowIndex, columnIndex, type) => { /* ... */ }
897
+ );
898
+ chunk.visitRows((row, rowIndex) => { /* ... */ });
899
+ chunk.visitRowMajor(
900
+ (value, rowIndex, columnIndex, type) => { /* ... */ }
901
+ );
902
+
903
+ // Or converted:
904
+ // The `converter` argument implements `DuckDBValueConverter`,
905
+ // which has the single method convertValue(value, type).
906
+ const columnValues = chunk.convertColumnValues(columnIndex, converter);
907
+ const columns = chunk.convertColumns(converter);
908
+ const rowValues = chunk.convertRowValues(rowIndex, converter);
909
+ const rows = chunk.convertRows(converter);
910
+
911
+ // The reader abstracts these low-level chunk manipulations
912
+ // and is recommended for most cases.
913
+ ```
@@ -18,16 +18,16 @@ export declare class DuckDBConnection {
18
18
  disconnect(): void;
19
19
  interrupt(): void;
20
20
  get progress(): duckdb.QueryProgress;
21
- run(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBMaterializedResult>;
22
- runAndRead(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResultReader>;
23
- runAndReadAll(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResultReader>;
24
- runAndReadUntil(sql: string, targetRowCount: number, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResultReader>;
25
- stream(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResult>;
26
- streamAndRead(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResultReader>;
27
- streamAndReadAll(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResultReader>;
28
- streamAndReadUntil(sql: string, targetRowCount: number, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBResultReader>;
29
- start(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBPendingResult>;
30
- startStream(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType>): Promise<DuckDBPendingResult>;
21
+ run(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBMaterializedResult>;
22
+ runAndRead(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResultReader>;
23
+ runAndReadAll(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResultReader>;
24
+ runAndReadUntil(sql: string, targetRowCount: number, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResultReader>;
25
+ stream(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResult>;
26
+ streamAndRead(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResultReader>;
27
+ streamAndReadAll(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResultReader>;
28
+ streamAndReadUntil(sql: string, targetRowCount: number, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBResultReader>;
29
+ start(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBPendingResult>;
30
+ startStream(sql: string, values?: DuckDBValue[] | Record<string, DuckDBValue>, types?: DuckDBType[] | Record<string, DuckDBType | undefined>): Promise<DuckDBPendingResult>;
31
31
  prepare(sql: string): Promise<DuckDBPreparedStatement>;
32
32
  extractStatements(sql: string): Promise<DuckDBExtractedStatements>;
33
33
  createAppender(schema: string, table: string): Promise<DuckDBAppender>;
@@ -1,5 +1,6 @@
1
1
  import duckdb from '@duckdb/node-bindings';
2
2
  import { DuckDBType } from './DuckDBType';
3
+ import { DuckDBValueConverter } from './DuckDBValueConverter';
3
4
  import { DuckDBVector } from './DuckDBVector';
4
5
  import { DuckDBValue } from './values';
5
6
  export declare class DuckDBDataChunk {
@@ -9,13 +10,24 @@ export declare class DuckDBDataChunk {
9
10
  static create(types: readonly DuckDBType[], rowCount?: number): DuckDBDataChunk;
10
11
  reset(): void;
11
12
  get columnCount(): number;
13
+ get rowCount(): number;
14
+ set rowCount(count: number);
12
15
  getColumnVector(columnIndex: number): DuckDBVector;
16
+ visitColumnValues(columnIndex: number, visitValue: (value: DuckDBValue, rowIndex: number, columnIndex: number, type: DuckDBType) => void): void;
13
17
  getColumnValues(columnIndex: number): DuckDBValue[];
18
+ convertColumnValues<T>(columnIndex: number, converter: DuckDBValueConverter<T>): T[];
14
19
  setColumnValues(columnIndex: number, values: readonly DuckDBValue[]): void;
20
+ visitColumns(visitColumn: (column: DuckDBValue[], columnIndex: number, type: DuckDBType) => void): void;
15
21
  getColumns(): DuckDBValue[][];
22
+ convertColumns<T>(converter: DuckDBValueConverter<T>): T[][];
16
23
  setColumns(columns: readonly (readonly DuckDBValue[])[]): void;
24
+ visitColumnMajor(visitValue: (value: DuckDBValue, rowIndex: number, columnIndex: number, type: DuckDBType) => void): void;
25
+ visitRowValues(rowIndex: number, visitValue: (value: DuckDBValue, rowIndex: number, columnIndex: number, type: DuckDBType) => void): void;
26
+ getRowValues(rowIndex: number): DuckDBValue[];
27
+ convertRowValues<T>(rowIndex: number, converter: DuckDBValueConverter<T>): T[];
28
+ visitRows(visitRow: (row: DuckDBValue[], rowIndex: number) => void): void;
17
29
  getRows(): DuckDBValue[][];
30
+ convertRows<T>(converter: DuckDBValueConverter<T>): T[][];
18
31
  setRows(rows: readonly (readonly DuckDBValue[])[]): void;
19
- get rowCount(): number;
20
- set rowCount(count: number);
32
+ visitRowMajor(visitValue: (value: DuckDBValue, rowIndex: number, columnIndex: number, type: DuckDBType) => void): void;
21
33
  }