@sqlrooms/duckdb-core 0.26.1-rc.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/LICENSE.md +9 -0
  2. package/README.md +387 -0
  3. package/dist/BaseDuckDbConnector.d.ts +20 -0
  4. package/dist/BaseDuckDbConnector.d.ts.map +1 -0
  5. package/dist/BaseDuckDbConnector.js +122 -0
  6. package/dist/BaseDuckDbConnector.js.map +1 -0
  7. package/dist/DuckDbConnector.d.ts +312 -0
  8. package/dist/DuckDbConnector.d.ts.map +1 -0
  9. package/dist/DuckDbConnector.js +2 -0
  10. package/dist/DuckDbConnector.js.map +1 -0
  11. package/dist/arrow-utils.d.ts +8 -0
  12. package/dist/arrow-utils.d.ts.map +1 -0
  13. package/dist/arrow-utils.js +28 -0
  14. package/dist/arrow-utils.js.map +1 -0
  15. package/dist/duckdb-utils.d.ts +140 -0
  16. package/dist/duckdb-utils.d.ts.map +1 -0
  17. package/dist/duckdb-utils.js +290 -0
  18. package/dist/duckdb-utils.js.map +1 -0
  19. package/dist/index.d.ts +12 -0
  20. package/dist/index.d.ts.map +1 -0
  21. package/dist/index.js +8 -0
  22. package/dist/index.js.map +1 -0
  23. package/dist/load/create.d.ts +33 -0
  24. package/dist/load/create.d.ts.map +1 -0
  25. package/dist/load/create.js +33 -0
  26. package/dist/load/create.js.map +1 -0
  27. package/dist/load/load.d.ts +57 -0
  28. package/dist/load/load.d.ts.map +1 -0
  29. package/dist/load/load.js +153 -0
  30. package/dist/load/load.js.map +1 -0
  31. package/dist/load/sql-from.d.ts +18 -0
  32. package/dist/load/sql-from.d.ts.map +1 -0
  33. package/dist/load/sql-from.js +69 -0
  34. package/dist/load/sql-from.js.map +1 -0
  35. package/dist/schema-tree/schemaTree.d.ts +9 -0
  36. package/dist/schema-tree/schemaTree.d.ts.map +1 -0
  37. package/dist/schema-tree/schemaTree.js +75 -0
  38. package/dist/schema-tree/schemaTree.js.map +1 -0
  39. package/dist/schema-tree/typeCategories.d.ts +16 -0
  40. package/dist/schema-tree/typeCategories.d.ts.map +1 -0
  41. package/dist/schema-tree/typeCategories.js +72 -0
  42. package/dist/schema-tree/typeCategories.js.map +1 -0
  43. package/dist/schema-tree/types.d.ts +28 -0
  44. package/dist/schema-tree/types.d.ts.map +1 -0
  45. package/dist/schema-tree/types.js +2 -0
  46. package/dist/schema-tree/types.js.map +1 -0
  47. package/dist/typedRowAccessor.d.ts +19 -0
  48. package/dist/typedRowAccessor.d.ts.map +1 -0
  49. package/dist/typedRowAccessor.js +45 -0
  50. package/dist/typedRowAccessor.js.map +1 -0
  51. package/dist/types.d.ts +21 -0
  52. package/dist/types.d.ts.map +1 -0
  53. package/dist/types.js +2 -0
  54. package/dist/types.js.map +1 -0
  55. package/package.json +42 -0
@@ -0,0 +1,153 @@
1
+ // Adapted from https://github.com/uwdata/mosaic/blob/main/packages/sql/src/load/
2
+ // BSD 3-Clause License Copyright (c) 2023, UW Interactive Data Lab
3
+ import { createSchema, createTable } from './create';
4
+ import { literalToSQL, sqlFrom } from './sql-from';
5
+ // Re-export for external use
6
+ export { literalToSQL, sqlFrom };
7
+ /**
8
+ * Generic function to load data from a file into a DuckDB table
9
+ * @param method - The DuckDB read method to use (e.g., 'read_csv', 'read_json')
10
+ * @param tableName - Name of the table to create
11
+ * @param fileName - Path to the input file
12
+ * @param options - Load options including select, where, view, temp, replace and file-specific options
13
+ * @param defaults - Default options to merge with provided options
14
+ * @returns SQL query string to create the table
15
+ */
16
+ export function load(method, tableName, fileName, options = {}, defaults = {}) {
17
+ const {
18
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
19
+ method: _method, // Remove from options
20
+ schema, select = ['*'], where, view, temp, replace, ...file } = options;
21
+ const params = parameters({ ...defaults, ...file });
22
+ const read = method === 'auto'
23
+ ? `'${fileName}'${params ? ', ' + params : ''}`
24
+ : `${method}('${fileName}'${params ? ', ' + params : ''})`;
25
+ const filter = where ? ` WHERE ${where}` : '';
26
+ const query = `SELECT ${select.join(', ')} FROM ${read}${filter}`;
27
+ return ((schema ? `${createSchema(schema)}; ` : '') +
28
+ createTable(schema ? `${schema}.${tableName}` : tableName, query, {
29
+ view,
30
+ temp,
31
+ replace,
32
+ }));
33
+ }
34
+ /**
35
+ * Load data from a CSV file into a DuckDB table
36
+ * @param tableName - Name of the table to create
37
+ * @param fileName - Path to the CSV file
38
+ * @param options - Load options
39
+ * @returns SQL query string to create the table
40
+ */
41
+ export function loadCSV(tableName, fileName, options) {
42
+ return load('read_csv', tableName, fileName, options, {
43
+ auto_detect: true,
44
+ sample_size: -1,
45
+ });
46
+ }
47
+ /**
48
+ * Load data from a JSON file into a DuckDB table
49
+ * @param tableName - Name of the table to create
50
+ * @param fileName - Path to the JSON file
51
+ * @param options - Load options
52
+ * @returns SQL query string to create the table
53
+ */
54
+ export function loadJSON(tableName, fileName, options) {
55
+ return load('read_json', tableName, fileName, options, {
56
+ auto_detect: true,
57
+ format: 'auto',
58
+ });
59
+ }
60
+ /**
61
+ * Load data from a Parquet file into a DuckDB table
62
+ * @param tableName - Name of the table to create
63
+ * @param fileName - Path to the Parquet file
64
+ * @param options - Load options
65
+ * @returns SQL query string to create the table
66
+ */
67
+ export function loadParquet(tableName, fileName, options) {
68
+ return load('read_parquet', tableName, fileName, options);
69
+ }
70
+ /**
71
+ * Load geometry data within a spatial file format.
72
+ * This method requires that the DuckDB spatial extension is loaded.
73
+ * Supports GeoJSON, TopoJSON, and other common spatial formats.
74
+ * For TopoJSON, set the layer option to indicate the feature to extract.
75
+ * @param tableName - Name of the table to create
76
+ * @param fileName - Path to the spatial data file
77
+ * @param options - Load options including spatial-specific options
78
+ * @returns SQL query string to create the table
79
+ */
80
+ export function loadSpatial(tableName, fileName, options = {}) {
81
+ // nested options map to the open_options argument of st_read
82
+ const { schema, options: opt, ...rest } = options;
83
+ if (opt) {
84
+ // TODO: check correct syntax for open_options
85
+ const open = Array.isArray(opt)
86
+ ? opt.join(', ')
87
+ : typeof opt === 'string'
88
+ ? opt
89
+ : Object.entries(opt)
90
+ .map(([key, value]) => `${key}=${value}`)
91
+ .join(', ');
92
+ Object.assign(rest, { open_options: open.toUpperCase() });
93
+ }
94
+ // TODO: handle box_2d for spatial_filter_box option
95
+ // TODO: handle wkb_blob for spatial_filter option
96
+ return load('st_read', schema ? `${schema}.${tableName}` : tableName, fileName, rest);
97
+ }
98
+ /**
99
+ * Load JavaScript objects directly into a DuckDB table
100
+ * @param tableName - Name of the table to create
101
+ * @param data - Array of objects to load
102
+ * @param options - Load options
103
+ * @returns SQL query string to create the table
104
+ */
105
+ export function loadObjects(tableName, data, options = {}) {
106
+ const { schema, select = ['*'], ...opt } = options;
107
+ const values = sqlFrom(data);
108
+ const query = select.length === 1 && select[0] === '*'
109
+ ? values
110
+ : `SELECT ${select} FROM ${values}`;
111
+ return createTable(schema ? `${schema}.${tableName}` : tableName, query, opt);
112
+ }
113
+ /**
114
+ * Convert options object to DuckDB parameter string
115
+ * @param options - Object containing parameter key-value pairs
116
+ * @returns Formatted parameter string
117
+ */
118
+ function parameters(options) {
119
+ return Object.entries(options)
120
+ .map(([key, value]) => `${key}=${toDuckDBValue(value)}`)
121
+ .join(', ');
122
+ }
123
+ /**
124
+ * Convert JavaScript value to DuckDB literal string representation
125
+ * @param value - Value to convert
126
+ * @returns DuckDB literal string
127
+ */
128
+ function toDuckDBValue(value) {
129
+ switch (typeof value) {
130
+ case 'boolean':
131
+ return String(value);
132
+ case 'string':
133
+ return `'${value}'`;
134
+ case 'undefined':
135
+ case 'object':
136
+ if (value == null) {
137
+ return 'NULL';
138
+ }
139
+ else if (Array.isArray(value)) {
140
+ return '[' + value.map((v) => toDuckDBValue(v)).join(', ') + ']';
141
+ }
142
+ else {
143
+ return ('{' +
144
+ Object.entries(value)
145
+ .map(([k, v]) => `'${k}': ${toDuckDBValue(v)}`)
146
+ .join(', ') +
147
+ '}');
148
+ }
149
+ default:
150
+ return String(value);
151
+ }
152
+ }
153
+ //# sourceMappingURL=load.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"load.js","sourceRoot":"","sources":["../../src/load/load.ts"],"names":[],"mappings":"AAAA,iFAAiF;AACjF,mEAAmE;AAOnE,OAAO,EAAC,YAAY,EAAE,WAAW,EAAC,MAAM,UAAU,CAAC;AACnD,OAAO,EAAC,YAAY,EAAE,OAAO,EAAC,MAAM,YAAY,CAAC;AAEjD,6BAA6B;AAC7B,OAAO,EAAC,YAAY,EAAE,OAAO,EAAC,CAAC;AAE/B;;;;;;;;GAQG;AACH,MAAM,UAAU,IAAI,CAClB,MAAgB,EAChB,SAAiB,EACjB,QAAgB,EAChB,UAA+B,EAAE,EACjC,WAAoC,EAAE;IAEtC,MAAM;IACJ,6DAA6D;IAC7D,MAAM,EAAE,OAAO,EAAE,sBAAsB;IACvC,MAAM,EACN,MAAM,GAAG,CAAC,GAAG,CAAC,EACd,KAAK,EACL,IAAI,EACJ,IAAI,EACJ,OAAO,EACP,GAAG,IAAI,EACR,GAAG,OAAO,CAAC;IACZ,MAAM,MAAM,GAAG,UAAU,CAAC,EAAC,GAAG,QAAQ,EAAE,GAAG,IAAI,EAAC,CAAC,CAAC;IAClD,MAAM,IAAI,GACR,MAAM,KAAK,MAAM;QACf,CAAC,CAAC,IAAI,QAAQ,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/C,CAAC,CAAC,GAAG,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC;IAC/D,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,UAAU,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;IAC9C,MAAM,KAAK,GAAG,UAAU,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,GAAG,MAAM,EAAE,CAAC;IAClE,OAAO,CACL,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QAC3C,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,KAAK,EAAE;YAChE,IAAI;YACJ,IAAI;YACJ,OAAO;SACR,CAAC,CACH,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,OAAO,CACrB,SAAiB,EACjB,QAAgB,EAChB,OAA6B;IAE7B,OAAO,IAAI,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QACpD,WAAW,EAAE,IAAI;QACjB,WAAW,EAAE,CAAC,CAAC;KAChB,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,QAAQ,CACtB,SAAiB,EACjB,QAAgB,EAChB,OAA6B;IAE7B,OAAO,IAAI,CAAC,WAAW,EAAE,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QACrD,WAAW,EAAE,IAAI;QACjB,MAAM,EAAE,MAAM;KACf,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,SAAiB,EACjB,QAAgB,EAChB,OAA6B;IAE7B,OAAO,IAAI,CAAC,cAAc,EAAE,SAAS,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;AAC5D,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,WAAW,CACzB,SAAiB,EACjB,QAAgB,EAChB,UAA8B,EAAE;IAEhC,6DAA6D;IAC7D,MAAM,EAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,IAAI,EAAC,GAAG,OAAO,CAAC;IAChD,IAAI,GAAG,EAAE,CAAC;QACR,8CAA8C;QAC9C,MAAM,IAAI,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC;YAC7B,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;YAChB,CAAC,CAAC,OAAO,GAAG,KAAK,QAAQ;gBACvB,CAAC,CAAC,GAAG;gBACL,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC;qBAChB,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,IAAI,KAAK,EAAE,CAAC;qBACxC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpB,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,EAAC,YAAY,EAAE,IAAI,CAAC,WAAW,EAAE,EAAC,CAAC,CAAC;IAC1D,CAAC;IACD,oDAAoD;IACpD,kDAAkD;IAClD,OAAO,IAAI,CACT,SAAS,EACT,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,EAC7C,QAAQ,EACR,IAAI,CACL,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,SAAiB,EACjB,IAA+B,EAC/B,UAA+B,EAAE;IAEjC,MAAM,EAAC,MAAM,EAAE,MAAM,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,GAAG,EAAC,GAAG,OAAO,CAAC;IACjD,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC7B,MAAM,KAAK,GACT,MAAM,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG;QACtC,CAAC,CAAC,MAAM;QACR,CAAC,CAAC,UAAU,MAAM,SAAS,MAAM,EAAE,CAAC;IACxC,OAAO,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,KAAK,EAAE,GAAG,CAAC,CAAC;AAChF,CAAC;AAED;;;;GAIG;AACH,SAAS,UAAU,CAAC,OAAgC;IAClD,OAAO,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC;SAC3B,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC;SACvD,IAAI,CAAC,IAAI,CAAC,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,SAAS,aAAa,CAAC,KAAc;IACnC,QAAQ,OAAO,KAAK,EAAE,CAAC;QACrB,KAAK,SAAS;YACZ,OAAO,MAAM,CAAC,KAAK,CAAC,CAAC;QACvB,KAAK,QAAQ;YACX,OAAO,IAAI,KAAK,GAAG,CAAC;QACtB,KAAK,WAAW,CAAC;QACjB,KAAK,QAAQ;YACX,IAAI,KAAK,IAAI,IAAI,EAAE,CAAC;gBAClB,OAAO,MAAM,CAAC;YAChB,CAAC;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;gBAChC,OAAO,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC;YACnE,CAAC;iBAAM,CAAC;gBACN,OAAO,CACL,GAAG;oBACH,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;yBAClB,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,aAAa,CAAC,CAAC,CAAC,EAAE,CAAC;yBAC9C,IAAI,CAAC,IAAI,CAAC;oBACb,GAAG,CACJ,CAAC;YACJ,CAAC;QACH;YACE,OAAO,MAAM,CAAC,KAAK,CAAC,CAAC;IACzB,CAAC;AACH,CAAC","sourcesContent":["// Adapted from https://github.com/uwdata/mosaic/blob/main/packages/sql/src/load/\n// BSD 3-Clause License Copyright (c) 2023, UW Interactive Data Lab\n\nimport {\n LoadFile,\n SpatialLoadOptions,\n StandardLoadOptions,\n} from '@sqlrooms/room-config';\nimport {createSchema, createTable} from './create';\nimport {literalToSQL, sqlFrom} from './sql-from';\n\n// Re-export for external use\nexport {literalToSQL, sqlFrom};\n\n/**\n * Generic function to load data from a file into a DuckDB table\n * @param method - The DuckDB read method to use (e.g., 'read_csv', 'read_json')\n * @param tableName - Name of the table to create\n * @param fileName - Path to the input file\n * @param options - Load options including select, where, view, temp, replace and file-specific options\n * @param defaults - Default options to merge with provided options\n * @returns SQL query string to create the table\n */\nexport function load(\n method: LoadFile,\n tableName: string,\n fileName: string,\n options: StandardLoadOptions = {},\n defaults: Record<string, unknown> = {},\n): string {\n const {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n method: _method, // Remove from options\n schema,\n select = ['*'],\n where,\n view,\n temp,\n replace,\n ...file\n } = options;\n const params = parameters({...defaults, ...file});\n const read =\n method === 'auto'\n ? `'${fileName}'${params ? ', ' + params : ''}`\n : `${method}('${fileName}'${params ? ', ' + params : ''})`;\n const filter = where ? ` WHERE ${where}` : '';\n const query = `SELECT ${select.join(', ')} FROM ${read}${filter}`;\n return (\n (schema ? `${createSchema(schema)}; ` : '') +\n createTable(schema ? `${schema}.${tableName}` : tableName, query, {\n view,\n temp,\n replace,\n })\n );\n}\n\n/**\n * Load data from a CSV file into a DuckDB table\n * @param tableName - Name of the table to create\n * @param fileName - Path to the CSV file\n * @param options - Load options\n * @returns SQL query string to create the table\n */\nexport function loadCSV(\n tableName: string,\n fileName: string,\n options?: StandardLoadOptions,\n): string {\n return load('read_csv', tableName, fileName, options, {\n auto_detect: true,\n sample_size: -1,\n });\n}\n\n/**\n * Load data from a JSON file into a DuckDB table\n * @param tableName - Name of the table to create\n * @param fileName - Path to the JSON file\n * @param options - Load options\n * @returns SQL query string to create the table\n */\nexport function loadJSON(\n tableName: string,\n fileName: string,\n options?: StandardLoadOptions,\n): string {\n return load('read_json', tableName, fileName, options, {\n auto_detect: true,\n format: 'auto',\n });\n}\n\n/**\n * Load data from a Parquet file into a DuckDB table\n * @param tableName - Name of the table to create\n * @param fileName - Path to the Parquet file\n * @param options - Load options\n * @returns SQL query string to create the table\n */\nexport function loadParquet(\n tableName: string,\n fileName: string,\n options?: StandardLoadOptions,\n): string {\n return load('read_parquet', tableName, fileName, options);\n}\n\n/**\n * Load geometry data within a spatial file format.\n * This method requires that the DuckDB spatial extension is loaded.\n * Supports GeoJSON, TopoJSON, and other common spatial formats.\n * For TopoJSON, set the layer option to indicate the feature to extract.\n * @param tableName - Name of the table to create\n * @param fileName - Path to the spatial data file\n * @param options - Load options including spatial-specific options\n * @returns SQL query string to create the table\n */\nexport function loadSpatial(\n tableName: string,\n fileName: string,\n options: SpatialLoadOptions = {},\n): string {\n // nested options map to the open_options argument of st_read\n const {schema, options: opt, ...rest} = options;\n if (opt) {\n // TODO: check correct syntax for open_options\n const open = Array.isArray(opt)\n ? opt.join(', ')\n : typeof opt === 'string'\n ? opt\n : Object.entries(opt)\n .map(([key, value]) => `${key}=${value}`)\n .join(', ');\n Object.assign(rest, {open_options: open.toUpperCase()});\n }\n // TODO: handle box_2d for spatial_filter_box option\n // TODO: handle wkb_blob for spatial_filter option\n return load(\n 'st_read',\n schema ? `${schema}.${tableName}` : tableName,\n fileName,\n rest,\n );\n}\n\n/**\n * Load JavaScript objects directly into a DuckDB table\n * @param tableName - Name of the table to create\n * @param data - Array of objects to load\n * @param options - Load options\n * @returns SQL query string to create the table\n */\nexport function loadObjects(\n tableName: string,\n data: Record<string, unknown>[],\n options: StandardLoadOptions = {},\n): string {\n const {schema, select = ['*'], ...opt} = options;\n const values = sqlFrom(data);\n const query =\n select.length === 1 && select[0] === '*'\n ? values\n : `SELECT ${select} FROM ${values}`;\n return createTable(schema ? `${schema}.${tableName}` : tableName, query, opt);\n}\n\n/**\n * Convert options object to DuckDB parameter string\n * @param options - Object containing parameter key-value pairs\n * @returns Formatted parameter string\n */\nfunction parameters(options: Record<string, unknown>): string {\n return Object.entries(options)\n .map(([key, value]) => `${key}=${toDuckDBValue(value)}`)\n .join(', ');\n}\n\n/**\n * Convert JavaScript value to DuckDB literal string representation\n * @param value - Value to convert\n * @returns DuckDB literal string\n */\nfunction toDuckDBValue(value: unknown): string {\n switch (typeof value) {\n case 'boolean':\n return String(value);\n case 'string':\n return `'${value}'`;\n case 'undefined':\n case 'object':\n if (value == null) {\n return 'NULL';\n } else if (Array.isArray(value)) {\n return '[' + value.map((v) => toDuckDBValue(v)).join(', ') + ']';\n } else {\n return (\n '{' +\n Object.entries(value)\n .map(([k, v]) => `'${k}': ${toDuckDBValue(v)}`)\n .join(', ') +\n '}'\n );\n }\n default:\n return String(value);\n }\n}\n"]}
@@ -0,0 +1,18 @@
1
+ /**
2
+ * Create a SQL query that embeds the given data for loading.
3
+ * @param {*} data The dataset as an array of objects.
4
+ * @param {object} [options] Loading options.
5
+ * @param {string[]|Record<string,string>} [options.columns] The columns to include.
6
+ * If not specified, the keys of the first data object are used.
7
+ * @returns {string} SQL query string to load data.
8
+ */
9
+ export declare function sqlFrom(data: Record<string, unknown>[], { columns, }?: {
10
+ columns?: string[] | Record<string, string>;
11
+ }): string;
12
+ /**
13
+ * Convert a value to a SQL literal.
14
+ * @param {*} value The value to convert.
15
+ * @returns {string} The SQL literal.
16
+ */
17
+ export declare function literalToSQL(value: unknown): string;
18
+ //# sourceMappingURL=sql-from.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sql-from.d.ts","sourceRoot":"","sources":["../../src/load/sql-from.ts"],"names":[],"mappings":"AAGA;;;;;;;GAOG;AACH,wBAAgB,OAAO,CACrB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAC/B,EACE,OAAsC,GACvC,GAAE;IAAC,OAAO,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAM,UAsBtD;AAED;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,OAAO,UA2B1C"}
@@ -0,0 +1,69 @@
1
+ // Adapted from https://github.com/uwdata/mosaic/blob/main/packages/sql/src/load/
2
+ // BSD 3-Clause License Copyright (c) 2023, UW Interactive Data Lab
3
+ /**
4
+ * Create a SQL query that embeds the given data for loading.
5
+ * @param {*} data The dataset as an array of objects.
6
+ * @param {object} [options] Loading options.
7
+ * @param {string[]|Record<string,string>} [options.columns] The columns to include.
8
+ * If not specified, the keys of the first data object are used.
9
+ * @returns {string} SQL query string to load data.
10
+ */
11
+ export function sqlFrom(data, { columns = Object.keys(data?.[0] || {}), } = {}) {
12
+ let keys = [];
13
+ let columnMap;
14
+ if (Array.isArray(columns)) {
15
+ keys = columns;
16
+ columnMap = keys.reduce((m, k) => ({ ...m, [k]: k }), {});
17
+ }
18
+ else {
19
+ columnMap = columns;
20
+ keys = Object.keys(columns);
21
+ }
22
+ if (!keys.length) {
23
+ throw new Error('Can not create table from empty column set.');
24
+ }
25
+ const subq = [];
26
+ for (const datum of data) {
27
+ const sel = keys.map((k) => `${literalToSQL(datum[k])} AS "${columnMap[k]}"`);
28
+ subq.push(`(SELECT ${sel.join(', ')})`);
29
+ }
30
+ return subq.join(' UNION ALL ');
31
+ }
32
+ /**
33
+ * Convert a value to a SQL literal.
34
+ * @param {*} value The value to convert.
35
+ * @returns {string} The SQL literal.
36
+ */
37
+ export function literalToSQL(value) {
38
+ switch (typeof value) {
39
+ case 'number':
40
+ return Number.isFinite(value) ? `${value}` : 'NULL';
41
+ case 'string':
42
+ return `'${value.replace(/'/g, "''")}'`;
43
+ case 'boolean':
44
+ return value ? 'TRUE' : 'FALSE';
45
+ default:
46
+ if (value == null) {
47
+ return 'NULL';
48
+ }
49
+ else if (value instanceof Date) {
50
+ const ts = +value;
51
+ if (Number.isNaN(ts))
52
+ return 'NULL';
53
+ const y = value.getUTCFullYear();
54
+ const m = value.getUTCMonth();
55
+ const d = value.getUTCDate();
56
+ return ts === Date.UTC(y, m, d)
57
+ ? `DATE '${y}-${m + 1}-${d}'` // utc date
58
+ : `epoch_ms(${ts})`; // timestamp
59
+ }
60
+ else if (value instanceof RegExp) {
61
+ return `'${value.source}'`;
62
+ }
63
+ else {
64
+ // otherwise rely on string coercion
65
+ return `${value}`;
66
+ }
67
+ }
68
+ }
69
+ //# sourceMappingURL=sql-from.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sql-from.js","sourceRoot":"","sources":["../../src/load/sql-from.ts"],"names":[],"mappings":"AAAA,iFAAiF;AACjF,mEAAmE;AAEnE;;;;;;;GAOG;AACH,MAAM,UAAU,OAAO,CACrB,IAA+B,EAC/B,EACE,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,MACW,EAAE;IAErD,IAAI,IAAI,GAAa,EAAE,CAAC;IACxB,IAAI,SAAiC,CAAC;IACtC,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC3B,IAAI,GAAG,OAAO,CAAC;QACf,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,EAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IAC1D,CAAC;SAAM,CAAC;QACN,SAAS,GAAG,OAAO,CAAC;QACpB,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;IACD,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;QACjB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;IACjE,CAAC;IACD,MAAM,IAAI,GAAG,EAAE,CAAC;IAChB,KAAK,MAAM,KAAK,IAAI,IAAI,EAAE,CAAC;QACzB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAClB,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CACxD,CAAC;QACF,IAAI,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC1C,CAAC;IACD,OAAO,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;AAClC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,KAAc;IACzC,QAAQ,OAAO,KAAK,EAAE,CAAC;QACrB,KAAK,QAAQ;YACX,OAAO,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC;QACtD,KAAK,QAAQ;YACX,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC;QAC1C,KAAK,SAAS;YACZ,OAAO,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;QAClC;YACE,IAAI,KAAK,IAAI,IAAI,EAAE,CAAC;gBAClB,OAAO,MAAM,CAAC;YAChB,CAAC;iBAAM,IAAI,KAAK,YAAY,IAAI,EAAE,CAAC;gBACjC,MAAM,EAAE,GAAG,CAAC,KAAK,CAAC;gBAClB,IAAI,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC;oBAAE,OAAO,MAAM,CAAC;gBACpC,MAAM,CAAC,GAAG,KAAK,CAAC,cAAc,EAAE,CAAC;gBACjC,MAAM,CAAC,GAAG,KAAK,CAAC,WAAW,EAAE,CAAC;gBAC9B,MAAM,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC;gBAC7B,OAAO,EAAE,KAAK,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;oBAC7B,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW;oBACzC,CAAC,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC,YAAY;YACrC,CAAC;iBAAM,IAAI,KAAK,YAAY,MAAM,EAAE,CAAC;gBACnC,OAAO,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC;YAC7B,CAAC;iBAAM,CAAC;gBACN,oCAAoC;gBACpC,OAAO,GAAG,KAAK,EAAE,CAAC;YACpB,CAAC;IACL,CAAC;AACH,CAAC","sourcesContent":["// Adapted from https://github.com/uwdata/mosaic/blob/main/packages/sql/src/load/\n// BSD 3-Clause License Copyright (c) 2023, UW Interactive Data Lab\n\n/**\n * Create a SQL query that embeds the given data for loading.\n * @param {*} data The dataset as an array of objects.\n * @param {object} [options] Loading options.\n * @param {string[]|Record<string,string>} [options.columns] The columns to include.\n * If not specified, the keys of the first data object are used.\n * @returns {string} SQL query string to load data.\n */\nexport function sqlFrom(\n data: Record<string, unknown>[],\n {\n columns = Object.keys(data?.[0] || {}),\n }: {columns?: string[] | Record<string, string>} = {},\n) {\n let keys: string[] = [];\n let columnMap: Record<string, string>;\n if (Array.isArray(columns)) {\n keys = columns;\n columnMap = keys.reduce((m, k) => ({...m, [k]: k}), {});\n } else {\n columnMap = columns;\n keys = Object.keys(columns);\n }\n if (!keys.length) {\n throw new Error('Can not create table from empty column set.');\n }\n const subq = [];\n for (const datum of data) {\n const sel = keys.map(\n (k) => `${literalToSQL(datum[k])} AS \"${columnMap[k]}\"`,\n );\n subq.push(`(SELECT ${sel.join(', ')})`);\n }\n return subq.join(' UNION ALL ');\n}\n\n/**\n * Convert a value to a SQL literal.\n * @param {*} value The value to convert.\n * @returns {string} The SQL literal.\n */\nexport function literalToSQL(value: unknown) {\n switch (typeof value) {\n case 'number':\n return Number.isFinite(value) ? `${value}` : 'NULL';\n case 'string':\n return `'${value.replace(/'/g, \"''\")}'`;\n case 'boolean':\n return value ? 'TRUE' : 'FALSE';\n default:\n if (value == null) {\n return 'NULL';\n } else if (value instanceof Date) {\n const ts = +value;\n if (Number.isNaN(ts)) return 'NULL';\n const y = value.getUTCFullYear();\n const m = value.getUTCMonth();\n const d = value.getUTCDate();\n return ts === Date.UTC(y, m, d)\n ? `DATE '${y}-${m + 1}-${d}'` // utc date\n : `epoch_ms(${ts})`; // timestamp\n } else if (value instanceof RegExp) {\n return `'${value.source}'`;\n } else {\n // otherwise rely on string coercion\n return `${value}`;\n }\n }\n}\n"]}
@@ -0,0 +1,9 @@
1
+ import type { DataTable } from '../types';
2
+ import type { DbSchemaNode } from './types';
3
+ /**
4
+ * Group tables by database, schema and create a tree of databases, schemas, tables, and columns.
5
+ * @param tables - The tables to group
6
+ * @returns An array of database nodes containing schemas, tables and columns
7
+ */
8
+ export declare function createDbSchemaTrees(tables: DataTable[]): DbSchemaNode[];
9
+ //# sourceMappingURL=schemaTree.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schemaTree.d.ts","sourceRoot":"","sources":["../../src/schema-tree/schemaTree.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,UAAU,CAAC;AAExC,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,SAAS,CAAC;AAE1C;;;;GAIG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,YAAY,EAAE,CAmCvE"}
@@ -0,0 +1,75 @@
1
+ import { getDuckDbTypeCategory } from './typeCategories';
2
+ /**
3
+ * Group tables by database, schema and create a tree of databases, schemas, tables, and columns.
4
+ * @param tables - The tables to group
5
+ * @returns An array of database nodes containing schemas, tables and columns
6
+ */
7
+ export function createDbSchemaTrees(tables) {
8
+ const databaseMap = new Map();
9
+ for (const table of tables) {
10
+ const database = table.database ?? 'default';
11
+ const schema = table.schema;
12
+ const tableName = table.tableName;
13
+ const columnNodes = table.columns.map((column) => createColumnNode(schema, tableName, column.name, column.type));
14
+ const tableNode = createTableNode(table, columnNodes);
15
+ if (!databaseMap.has(database)) {
16
+ databaseMap.set(database, new Map());
17
+ }
18
+ const schemaMap = databaseMap.get(database);
19
+ if (!schemaMap.has(schema)) {
20
+ schemaMap.set(schema, []);
21
+ }
22
+ schemaMap.get(schema)?.push(tableNode);
23
+ }
24
+ // Create database nodes
25
+ return Array.from(databaseMap.entries()).map(([database, schemaMap]) => {
26
+ const schemaNodes = Array.from(schemaMap.entries()).map(([schema, tables]) => createSchemaTreeNode(schema, tables));
27
+ return createDatabaseTreeNode(database, schemaNodes);
28
+ });
29
+ }
30
+ function createColumnNode(schema, tableName, columnName, columnType) {
31
+ return {
32
+ key: `${schema}.${tableName}.${columnName}`,
33
+ object: {
34
+ type: 'column',
35
+ name: columnName,
36
+ columnType,
37
+ columnTypeCategory: getDuckDbTypeCategory(columnType),
38
+ },
39
+ };
40
+ }
41
+ function createTableNode(table, columnNodes) {
42
+ return {
43
+ key: `${table.schema}.${table.tableName}`,
44
+ object: {
45
+ type: 'table',
46
+ ...table,
47
+ name: table.tableName,
48
+ },
49
+ isInitialOpen: false,
50
+ children: columnNodes,
51
+ };
52
+ }
53
+ function createSchemaTreeNode(schema, tables) {
54
+ return {
55
+ key: schema,
56
+ object: {
57
+ type: 'schema',
58
+ name: schema,
59
+ },
60
+ isInitialOpen: true,
61
+ children: tables,
62
+ };
63
+ }
64
+ function createDatabaseTreeNode(database, schemas) {
65
+ return {
66
+ key: database,
67
+ object: {
68
+ type: 'database',
69
+ name: database,
70
+ },
71
+ isInitialOpen: true,
72
+ children: schemas,
73
+ };
74
+ }
75
+ //# sourceMappingURL=schemaTree.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schemaTree.js","sourceRoot":"","sources":["../../src/schema-tree/schemaTree.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,qBAAqB,EAAC,MAAM,kBAAkB,CAAC;AAGvD;;;;GAIG;AACH,MAAM,UAAU,mBAAmB,CAAC,MAAmB;IACrD,MAAM,WAAW,GAAG,IAAI,GAAG,EAAuC,CAAC;IAEnE,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,IAAI,SAAS,CAAC;QAC7C,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;QAC5B,MAAM,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC;QAElC,MAAM,WAAW,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAC/C,gBAAgB,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,CAC9D,CAAC;QAEF,MAAM,SAAS,GAAG,eAAe,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAEtD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC/B,WAAW,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,EAA0B,CAAC,CAAC;QAC/D,CAAC;QAED,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAE,CAAC;QAE7C,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;YAC3B,SAAS,CAAC,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;QAC5B,CAAC;QAED,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACzC,CAAC;IAED,wBAAwB;IACxB,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,SAAS,CAAC,EAAE,EAAE;QACrE,MAAM,WAAW,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CACrD,CAAC,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,oBAAoB,CAAC,MAAM,EAAE,MAAM,CAAC,CAC3D,CAAC;QAEF,OAAO,sBAAsB,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,gBAAgB,CACvB,MAAc,EACd,SAAiB,EACjB,UAAkB,EAClB,UAAkB;IAElB,OAAO;QACL,GAAG,EAAE,GAAG,MAAM,IAAI,SAAS,IAAI,UAAU,EAAE;QAC3C,MAAM,EAAE;YACN,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,UAAU;YAChB,UAAU;YACV,kBAAkB,EAAE,qBAAqB,CAAC,UAAU,CAAC;SACtD;KACF,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CACtB,KAAgB,EAChB,WAA2B;IAE3B,OAAO;QACL,GAAG,EAAE,GAAG,KAAK,CAAC,MAAM,IAAI,KAAK,CAAC,SAAS,EAAE;QACzC,MAAM,EAAE;YACN,IAAI,EAAE,OAAO;YACb,GAAG,KAAK;YACR,IAAI,EAAE,KAAK,CAAC,SAAS;SACtB;QACD,aAAa,EAAE,KAAK;QACpB,QAAQ,EAAE,WAAW;KACtB,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAC3B,MAAc,EACd,MAAsB;IAEtB,OAAO;QACL,GAAG,EAAE,MAAM;QACX,MAAM,EAAE;YACN,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,MAAM;SACb;QACD,aAAa,EAAE,IAAI;QACnB,QAAQ,EAAE,MAAM;KACjB,CAAC;AACJ,CAAC;AAED,SAAS,sBAAsB,CAC7B,QAAgB,EAChB,OAAuB;IAEvB,OAAO;QACL,GAAG,EAAE,QAAQ;QACb,MAAM,EAAE;YACN,IAAI,EAAE,UAAU;YAChB,IAAI,EAAE,QAAQ;SACf;QACD,aAAa,EAAE,IAAI;QACnB,QAAQ,EAAE,OAAO;KAClB,CAAC;AACJ,CAAC","sourcesContent":["import type {DataTable} from '../types';\nimport {getDuckDbTypeCategory} from './typeCategories';\nimport type {DbSchemaNode} from './types';\n\n/**\n * Group tables by database, schema and create a tree of databases, schemas, tables, and columns.\n * @param tables - The tables to group\n * @returns An array of database nodes containing schemas, tables and columns\n */\nexport function createDbSchemaTrees(tables: DataTable[]): DbSchemaNode[] {\n const databaseMap = new Map<string, Map<string, DbSchemaNode[]>>();\n\n for (const table of tables) {\n const database = table.database ?? 'default';\n const schema = table.schema;\n const tableName = table.tableName;\n\n const columnNodes = table.columns.map((column) =>\n createColumnNode(schema, tableName, column.name, column.type),\n );\n\n const tableNode = createTableNode(table, columnNodes);\n\n if (!databaseMap.has(database)) {\n databaseMap.set(database, new Map<string, DbSchemaNode[]>());\n }\n\n const schemaMap = databaseMap.get(database)!;\n\n if (!schemaMap.has(schema)) {\n schemaMap.set(schema, []);\n }\n\n schemaMap.get(schema)?.push(tableNode);\n }\n\n // Create database nodes\n return Array.from(databaseMap.entries()).map(([database, schemaMap]) => {\n const schemaNodes = Array.from(schemaMap.entries()).map(\n ([schema, tables]) => createSchemaTreeNode(schema, tables),\n );\n\n return createDatabaseTreeNode(database, schemaNodes);\n });\n}\n\nfunction createColumnNode(\n schema: string,\n tableName: string,\n columnName: string,\n columnType: string,\n): DbSchemaNode {\n return {\n key: `${schema}.${tableName}.${columnName}`,\n object: {\n type: 'column',\n name: columnName,\n columnType,\n columnTypeCategory: getDuckDbTypeCategory(columnType),\n },\n };\n}\n\nfunction createTableNode(\n table: DataTable,\n columnNodes: DbSchemaNode[],\n): DbSchemaNode {\n return {\n key: `${table.schema}.${table.tableName}`,\n object: {\n type: 'table',\n ...table,\n name: table.tableName,\n },\n isInitialOpen: false,\n children: columnNodes,\n };\n}\n\nfunction createSchemaTreeNode(\n schema: string,\n tables: DbSchemaNode[],\n): DbSchemaNode {\n return {\n key: schema,\n object: {\n type: 'schema',\n name: schema,\n },\n isInitialOpen: true,\n children: tables,\n };\n}\n\nfunction createDatabaseTreeNode(\n database: string,\n schemas: DbSchemaNode[],\n): DbSchemaNode {\n return {\n key: database,\n object: {\n type: 'database',\n name: database,\n },\n isInitialOpen: true,\n children: schemas,\n };\n}\n"]}
@@ -0,0 +1,16 @@
1
+ import { DataType } from 'apache-arrow';
2
+ export type ColumnTypeCategory = 'number' | 'string' | 'datetime' | 'boolean' | 'binary' | 'json' | 'struct' | 'geometry';
3
+ /**
4
+ * Get the category of a column type
5
+ * @param columnType - The type of the column
6
+ * @returns The category of the column type
7
+ */
8
+ export declare function getDuckDbTypeCategory(columnType: string): ColumnTypeCategory | undefined;
9
+ /**
10
+ * This function is used to get the type category of a column from an Arrow table.
11
+ *
12
+ * @param type - The Arrow DataType of the column.
13
+ * @returns The type category of the column.
14
+ */
15
+ export declare function getArrowColumnTypeCategory(type: DataType): ColumnTypeCategory;
16
+ //# sourceMappingURL=typeCategories.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typeCategories.d.ts","sourceRoot":"","sources":["../../src/schema-tree/typeCategories.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,QAAQ,EAAC,MAAM,cAAc,CAAC;AAEtC,MAAM,MAAM,kBAAkB,GAC1B,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,SAAS,GACT,QAAQ,GACR,MAAM,GACN,QAAQ,GACR,UAAU,CAAC;AA4Bf;;;;GAIG;AACH,wBAAgB,qBAAqB,CACnC,UAAU,EAAE,MAAM,GACjB,kBAAkB,GAAG,SAAS,CAQhC;AACD;;;;;GAKG;AACH,wBAAgB,0BAA0B,CAAC,IAAI,EAAE,QAAQ,GAAG,kBAAkB,CAiC7E"}
@@ -0,0 +1,72 @@
1
+ import { DataType } from 'apache-arrow';
2
+ const DUCKDB_TYPE_CATEGORIES = {
3
+ string: [/^varchar/, /^char/, /^text/, /^string/, /^uuid/, /^bit/],
4
+ number: [
5
+ /^tinyint/,
6
+ /^smallint/,
7
+ /^integer/,
8
+ /^bigint/,
9
+ /^hugeint/,
10
+ /^utinyint/,
11
+ /^usmallint/,
12
+ /^uinteger/,
13
+ /^ubigint/,
14
+ /^uhugeint/,
15
+ /^decimal/,
16
+ /^numeric/,
17
+ /^double/,
18
+ /^float/,
19
+ ],
20
+ boolean: [/^bool(ean)?/],
21
+ binary: [/^blob/, /^bytea/, /^binary/, /^varbinary/],
22
+ datetime: [/^date$/, /^time$/, /^timestamp$/, /^timestamptz$/, /^interval$/],
23
+ json: [/^json$/],
24
+ struct: [/^struct$/, /^list$/, /^map$/, /^array$/, /^union$/],
25
+ geometry: [/^geometry/],
26
+ };
27
+ /**
28
+ * Get the category of a column type
29
+ * @param columnType - The type of the column
30
+ * @returns The category of the column type
31
+ */
32
+ export function getDuckDbTypeCategory(columnType) {
33
+ const type = columnType.toLowerCase();
34
+ for (const [category, patterns] of Object.entries(DUCKDB_TYPE_CATEGORIES)) {
35
+ if (patterns.some((pattern) => type.match(pattern))) {
36
+ return category;
37
+ }
38
+ }
39
+ return undefined;
40
+ }
41
+ /**
42
+ * This function is used to get the type category of a column from an Arrow table.
43
+ *
44
+ * @param type - The Arrow DataType of the column.
45
+ * @returns The type category of the column.
46
+ */
47
+ export function getArrowColumnTypeCategory(type) {
48
+ if (DataType.isInt(type) ||
49
+ DataType.isFloat(type) ||
50
+ DataType.isDecimal(type)) {
51
+ return 'number';
52
+ }
53
+ if (DataType.isDate(type) ||
54
+ DataType.isTime(type) ||
55
+ DataType.isTimestamp(type)) {
56
+ return 'datetime';
57
+ }
58
+ if (DataType.isBool(type)) {
59
+ return 'boolean';
60
+ }
61
+ if (DataType.isBinary(type)) {
62
+ return 'binary';
63
+ }
64
+ // Note: Arrow doesn't have built-in geometry types, so we'll need to check the type name
65
+ // if your geometry types are custom implementations
66
+ if (type.toString().toLowerCase().includes('geometry')) {
67
+ return 'geometry';
68
+ }
69
+ // Default to string type for all other cases
70
+ return 'string';
71
+ }
72
+ //# sourceMappingURL=typeCategories.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typeCategories.js","sourceRoot":"","sources":["../../src/schema-tree/typeCategories.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,QAAQ,EAAC,MAAM,cAAc,CAAC;AAYtC,MAAM,sBAAsB,GAAG;IAC7B,MAAM,EAAE,CAAC,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC;IAClE,MAAM,EAAE;QACN,UAAU;QACV,WAAW;QACX,UAAU;QACV,SAAS;QACT,UAAU;QACV,WAAW;QACX,YAAY;QACZ,WAAW;QACX,UAAU;QACV,WAAW;QACX,UAAU;QACV,UAAU;QACV,SAAS;QACT,QAAQ;KACT;IACD,OAAO,EAAE,CAAC,aAAa,CAAC;IACxB,MAAM,EAAE,CAAC,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,YAAY,CAAC;IACpD,QAAQ,EAAE,CAAC,QAAQ,EAAE,QAAQ,EAAE,aAAa,EAAE,eAAe,EAAE,YAAY,CAAC;IAC5E,IAAI,EAAE,CAAC,QAAQ,CAAC;IAChB,MAAM,EAAE,CAAC,UAAU,EAAE,QAAQ,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,CAAC;IAC7D,QAAQ,EAAE,CAAC,WAAW,CAAC;CACuB,CAAC;AAEjD;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CACnC,UAAkB;IAElB,MAAM,IAAI,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;IACtC,KAAK,MAAM,CAAC,QAAQ,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE,CAAC;QAC1E,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC;YACpD,OAAO,QAA8B,CAAC;QACxC,CAAC;IACH,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;GAKG;AACH,MAAM,UAAU,0BAA0B,CAAC,IAAc;IACvD,IACE,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;QACpB,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC;QACtB,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,EACxB,CAAC;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,IACE,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC;QACrB,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC;QACrB,QAAQ,CAAC,WAAW,CAAC,IAAI,CAAC,EAC1B,CAAC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC;QAC1B,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;QAC5B,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,yFAAyF;IACzF,oDAAoD;IACpD,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC;QACvD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,6CAA6C;IAC7C,OAAO,QAAQ,CAAC;AAClB,CAAC","sourcesContent":["import {DataType} from 'apache-arrow';\n\nexport type ColumnTypeCategory =\n | 'number'\n | 'string'\n | 'datetime'\n | 'boolean'\n | 'binary'\n | 'json'\n | 'struct'\n | 'geometry';\n\nconst DUCKDB_TYPE_CATEGORIES = {\n string: [/^varchar/, /^char/, /^text/, /^string/, /^uuid/, /^bit/],\n number: [\n /^tinyint/,\n /^smallint/,\n /^integer/,\n /^bigint/,\n /^hugeint/,\n /^utinyint/,\n /^usmallint/,\n /^uinteger/,\n /^ubigint/,\n /^uhugeint/,\n /^decimal/,\n /^numeric/,\n /^double/,\n /^float/,\n ],\n boolean: [/^bool(ean)?/],\n binary: [/^blob/, /^bytea/, /^binary/, /^varbinary/],\n datetime: [/^date$/, /^time$/, /^timestamp$/, /^timestamptz$/, /^interval$/],\n json: [/^json$/],\n struct: [/^struct$/, /^list$/, /^map$/, /^array$/, /^union$/],\n geometry: [/^geometry/],\n} satisfies Record<ColumnTypeCategory, RegExp[]>;\n\n/**\n * Get the category of a column type\n * @param columnType - The type of the column\n * @returns The category of the column type\n */\nexport function getDuckDbTypeCategory(\n columnType: string,\n): ColumnTypeCategory | undefined {\n const type = columnType.toLowerCase();\n for (const [category, patterns] of Object.entries(DUCKDB_TYPE_CATEGORIES)) {\n if (patterns.some((pattern) => type.match(pattern))) {\n return category as ColumnTypeCategory;\n }\n }\n return undefined;\n}\n/**\n * This function is used to get the type category of a column from an Arrow table.\n *\n * @param type - The Arrow DataType of the column.\n * @returns The type category of the column.\n */\nexport function getArrowColumnTypeCategory(type: DataType): ColumnTypeCategory {\n if (\n DataType.isInt(type) ||\n DataType.isFloat(type) ||\n DataType.isDecimal(type)\n ) {\n return 'number';\n }\n\n if (\n DataType.isDate(type) ||\n DataType.isTime(type) ||\n DataType.isTimestamp(type)\n ) {\n return 'datetime';\n }\n\n if (DataType.isBool(type)) {\n return 'boolean';\n }\n\n if (DataType.isBinary(type)) {\n return 'binary';\n }\n\n // Note: Arrow doesn't have built-in geometry types, so we'll need to check the type name\n // if your geometry types are custom implementations\n if (type.toString().toLowerCase().includes('geometry')) {\n return 'geometry';\n }\n\n // Default to string type for all other cases\n return 'string';\n}\n"]}
@@ -0,0 +1,28 @@
1
+ import { DataTable } from '../types';
2
+ import { ColumnTypeCategory } from './typeCategories';
3
+ export type DbSchemaNode<T extends NodeObject = NodeObject> = {
4
+ key: string;
5
+ object: T;
6
+ children?: DbSchemaNode[];
7
+ isInitialOpen?: boolean;
8
+ };
9
+ export type NodeObject = ColumnNodeObject | TableNodeObject | SchemaNodeObject | DatabaseNodeObject;
10
+ type BaseNodeObject = {
11
+ name: string;
12
+ };
13
+ export type ColumnNodeObject = BaseNodeObject & {
14
+ type: 'column';
15
+ columnType: string;
16
+ columnTypeCategory?: ColumnTypeCategory;
17
+ };
18
+ export type TableNodeObject = BaseNodeObject & {
19
+ type: 'table';
20
+ } & DataTable;
21
+ export type SchemaNodeObject = BaseNodeObject & {
22
+ type: 'schema';
23
+ };
24
+ export type DatabaseNodeObject = BaseNodeObject & {
25
+ type: 'database';
26
+ };
27
+ export {};
28
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/schema-tree/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,SAAS,EAAC,MAAM,UAAU,CAAC;AACnC,OAAO,EAAC,kBAAkB,EAAC,MAAM,kBAAkB,CAAC;AAEpD,MAAM,MAAM,YAAY,CAAC,CAAC,SAAS,UAAU,GAAG,UAAU,IAAI;IAC5D,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,CAAC,CAAC;IACV,QAAQ,CAAC,EAAE,YAAY,EAAE,CAAC;IAC1B,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB,CAAC;AAEF,MAAM,MAAM,UAAU,GAClB,gBAAgB,GAChB,eAAe,GACf,gBAAgB,GAChB,kBAAkB,CAAC;AAEvB,KAAK,cAAc,GAAG;IACpB,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,IAAI,EAAE,QAAQ,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;CACzC,CAAC;AAEF,MAAM,MAAM,eAAe,GAAG,cAAc,GAAG;IAC7C,IAAI,EAAE,OAAO,CAAC;CACf,GAAG,SAAS,CAAC;AAEd,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,IAAI,EAAE,QAAQ,CAAC;CAChB,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG,cAAc,GAAG;IAChD,IAAI,EAAE,UAAU,CAAC;CAClB,CAAC"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/schema-tree/types.ts"],"names":[],"mappings":"","sourcesContent":["import {DataTable} from '../types';\nimport {ColumnTypeCategory} from './typeCategories';\n\nexport type DbSchemaNode<T extends NodeObject = NodeObject> = {\n key: string;\n object: T;\n children?: DbSchemaNode[];\n isInitialOpen?: boolean;\n};\n\nexport type NodeObject =\n | ColumnNodeObject\n | TableNodeObject\n | SchemaNodeObject\n | DatabaseNodeObject;\n\ntype BaseNodeObject = {\n name: string;\n};\n\nexport type ColumnNodeObject = BaseNodeObject & {\n type: 'column';\n columnType: string;\n columnTypeCategory?: ColumnTypeCategory;\n};\n\nexport type TableNodeObject = BaseNodeObject & {\n type: 'table';\n} & DataTable;\n\nexport type SchemaNodeObject = BaseNodeObject & {\n type: 'schema';\n};\n\nexport type DatabaseNodeObject = BaseNodeObject & {\n type: 'database';\n};\n"]}
@@ -0,0 +1,19 @@
1
+ import * as arrow from 'apache-arrow';
2
+ export interface TypedRowAccessor<T> extends Iterable<T> {
3
+ /** Returns a typed row at the specified index by converting on demand */
4
+ getRow(index: number): T;
5
+ /** Number of rows in the table */
6
+ length: number;
7
+ /** Returns an iterator that yields each row in the table */
8
+ rows(): IterableIterator<T>;
9
+ /** Returns an array containing all rows in the table. The array is cached and reused. */
10
+ toArray(): T[];
11
+ }
12
+ /**
13
+ * Creates a row accessor wrapper around an Arrow table that provides typed row access.
14
+ */
15
+ export declare function createTypedRowAccessor<T extends arrow.TypeMap = any>({ arrowTable, validate, }: {
16
+ arrowTable: arrow.Table<T>;
17
+ validate?: (row: unknown) => T;
18
+ }): TypedRowAccessor<T>;
19
+ //# sourceMappingURL=typedRowAccessor.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typedRowAccessor.d.ts","sourceRoot":"","sources":["../src/typedRowAccessor.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAEtC,MAAM,WAAW,gBAAgB,CAAC,CAAC,CAAE,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACtD,yEAAyE;IACzE,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,CAAC,CAAC;IACzB,kCAAkC;IAClC,MAAM,EAAE,MAAM,CAAC;IACf,4DAA4D;IAC5D,IAAI,IAAI,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAC5B,yFAAyF;IACzF,OAAO,IAAI,CAAC,EAAE,CAAC;CAChB;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,SAAS,KAAK,CAAC,OAAO,GAAG,GAAG,EAAE,EACpE,UAAU,EACV,QAAQ,GACT,EAAE;IACD,UAAU,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAC3B,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,OAAO,KAAK,CAAC,CAAC;CAChC,GAAG,gBAAgB,CAAC,CAAC,CAAC,CA0CtB"}
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Creates a row accessor wrapper around an Arrow table that provides typed row access.
3
+ */
4
+ export function createTypedRowAccessor({ arrowTable, validate, }) {
5
+ let cachedArray;
6
+ return {
7
+ get length() {
8
+ return arrowTable.numRows;
9
+ },
10
+ getRow(index) {
11
+ const row = {};
12
+ arrowTable.schema.fields.forEach((field) => {
13
+ const column = arrowTable.getChild(field.name);
14
+ if (column) {
15
+ row[field.name] = column.get(index);
16
+ }
17
+ });
18
+ // If a validator is provided, use it to validate/parse the row
19
+ if (validate) {
20
+ return validate(row);
21
+ }
22
+ return row;
23
+ },
24
+ *rows() {
25
+ for (let i = 0; i < this.length; i++) {
26
+ yield this.getRow(i);
27
+ }
28
+ },
29
+ toArray() {
30
+ if (cachedArray) {
31
+ return cachedArray;
32
+ }
33
+ const result = [];
34
+ for (let i = 0; i < this.length; i++) {
35
+ result.push(this.getRow(i));
36
+ }
37
+ cachedArray = result;
38
+ return result;
39
+ },
40
+ [Symbol.iterator]() {
41
+ return this.rows();
42
+ },
43
+ };
44
+ }
45
+ //# sourceMappingURL=typedRowAccessor.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typedRowAccessor.js","sourceRoot":"","sources":["../src/typedRowAccessor.ts"],"names":[],"mappings":"AAaA;;GAEG;AACH,MAAM,UAAU,sBAAsB,CAAgC,EACpE,UAAU,EACV,QAAQ,GAIT;IACC,IAAI,WAA4B,CAAC;IAEjC,OAAO;QACL,IAAI,MAAM;YACR,OAAO,UAAU,CAAC,OAAO,CAAC;QAC5B,CAAC;QACD,MAAM,CAAC,KAAa;YAClB,MAAM,GAAG,GAA4B,EAAE,CAAC;YACxC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,KAAkB,EAAE,EAAE;gBACtD,MAAM,MAAM,GAAG,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAC/C,IAAI,MAAM,EAAE,CAAC;oBACX,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;gBACtC,CAAC;YACH,CAAC,CAAC,CAAC;YAEH,+DAA+D;YAC/D,IAAI,QAAQ,EAAE,CAAC;gBACb,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC;YACvB,CAAC;YACD,OAAO,GAAQ,CAAC;QAClB,CAAC;QACD,CAAC,IAAI;YACH,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACrC,MAAM,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;YACvB,CAAC;QACH,CAAC;QACD,OAAO;YACL,IAAI,WAAW,EAAE,CAAC;gBAChB,OAAO,WAAW,CAAC;YACrB,CAAC;YACD,MAAM,MAAM,GAAQ,EAAE,CAAC;YACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACrC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YAC9B,CAAC;YACD,WAAW,GAAG,MAAM,CAAC;YACrB,OAAO,MAAM,CAAC;QAChB,CAAC;QACD,CAAC,MAAM,CAAC,QAAQ,CAAC;YACf,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["import * as arrow from 'apache-arrow';\n\nexport interface TypedRowAccessor<T> extends Iterable<T> {\n /** Returns a typed row at the specified index by converting on demand */\n getRow(index: number): T;\n /** Number of rows in the table */\n length: number;\n /** Returns an iterator that yields each row in the table */\n rows(): IterableIterator<T>;\n /** Returns an array containing all rows in the table. The array is cached and reused. */\n toArray(): T[];\n}\n\n/**\n * Creates a row accessor wrapper around an Arrow table that provides typed row access.\n */\nexport function createTypedRowAccessor<T extends arrow.TypeMap = any>({\n arrowTable,\n validate,\n}: {\n arrowTable: arrow.Table<T>;\n validate?: (row: unknown) => T;\n}): TypedRowAccessor<T> {\n let cachedArray: T[] | undefined;\n\n return {\n get length() {\n return arrowTable.numRows;\n },\n getRow(index: number): T {\n const row: Record<string, unknown> = {};\n arrowTable.schema.fields.forEach((field: arrow.Field) => {\n const column = arrowTable.getChild(field.name);\n if (column) {\n row[field.name] = column.get(index);\n }\n });\n\n // If a validator is provided, use it to validate/parse the row\n if (validate) {\n return validate(row);\n }\n return row as T;\n },\n *rows(): IterableIterator<T> {\n for (let i = 0; i < this.length; i++) {\n yield this.getRow(i);\n }\n },\n toArray(): T[] {\n if (cachedArray) {\n return cachedArray;\n }\n const result: T[] = [];\n for (let i = 0; i < this.length; i++) {\n result.push(this.getRow(i));\n }\n cachedArray = result;\n return result;\n },\n [Symbol.iterator](): IterableIterator<T> {\n return this.rows();\n },\n };\n}\n"]}
@@ -0,0 +1,21 @@
1
+ import { QualifiedTableName } from './duckdb-utils';
2
+ export type TableColumn = {
3
+ name: string;
4
+ type: string;
5
+ };
6
+ export type DataTable = {
7
+ table: QualifiedTableName;
8
+ isView: boolean;
9
+ /** @deprecated Use table.database instead */
10
+ database?: string;
11
+ /** @deprecated Use table.schema instead */
12
+ schema: string;
13
+ /** @deprecated Use table.table instead */
14
+ tableName: string;
15
+ columns: TableColumn[];
16
+ rowCount?: number;
17
+ inputFileName?: string;
18
+ sql?: string;
19
+ comment?: string;
20
+ };
21
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,kBAAkB,EAAC,MAAM,gBAAgB,CAAC;AAElD,MAAM,MAAM,WAAW,GAAG;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACtB,KAAK,EAAE,kBAAkB,CAAC;IAC1B,MAAM,EAAE,OAAO,CAAC;IAChB,6CAA6C;IAC7C,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,2CAA2C;IAC3C,MAAM,EAAE,MAAM,CAAC;IACf,0CAA0C;IAC1C,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB,CAAC"}
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=types.js.map