@sqlrooms/duckdb 0.15.0 → 0.16.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/DuckDbSlice.d.ts +102 -23
  2. package/dist/DuckDbSlice.d.ts.map +1 -1
  3. package/dist/DuckDbSlice.js +201 -63
  4. package/dist/DuckDbSlice.js.map +1 -1
  5. package/dist/connectors/BaseDuckDbConnector.d.ts +22 -5
  6. package/dist/connectors/BaseDuckDbConnector.d.ts.map +1 -1
  7. package/dist/connectors/BaseDuckDbConnector.js +66 -15
  8. package/dist/connectors/BaseDuckDbConnector.js.map +1 -1
  9. package/dist/connectors/DuckDbConnector.d.ts +237 -4
  10. package/dist/connectors/DuckDbConnector.d.ts.map +1 -1
  11. package/dist/connectors/DuckDbConnector.js.map +1 -1
  12. package/dist/connectors/WasmDuckDbConnector.d.ts +2 -1
  13. package/dist/connectors/WasmDuckDbConnector.d.ts.map +1 -1
  14. package/dist/connectors/WasmDuckDbConnector.js +90 -17
  15. package/dist/connectors/WasmDuckDbConnector.js.map +1 -1
  16. package/dist/duckdb-utils.d.ts +67 -0
  17. package/dist/duckdb-utils.d.ts.map +1 -1
  18. package/dist/duckdb-utils.js +195 -0
  19. package/dist/duckdb-utils.js.map +1 -1
  20. package/dist/exportToCsv.js +1 -1
  21. package/dist/exportToCsv.js.map +1 -1
  22. package/dist/index.d.ts +3 -2
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +3 -2
  25. package/dist/index.js.map +1 -1
  26. package/dist/schemaTree.d.ts +8 -0
  27. package/dist/schemaTree.d.ts.map +1 -0
  28. package/dist/schemaTree.js +76 -0
  29. package/dist/schemaTree.js.map +1 -0
  30. package/dist/typeCategories.d.ts +16 -0
  31. package/dist/typeCategories.d.ts.map +1 -0
  32. package/dist/typeCategories.js +72 -0
  33. package/dist/typeCategories.js.map +1 -0
  34. package/dist/types.d.ts +35 -0
  35. package/dist/types.d.ts.map +1 -1
  36. package/dist/types.js.map +1 -1
  37. package/dist/useSql.d.ts.map +1 -1
  38. package/dist/useSql.js +11 -7
  39. package/dist/useSql.js.map +1 -1
  40. package/package.json +13 -12
@@ -0,0 +1,8 @@
1
+ import { DbSchemaNode, DataTable } from './types';
2
+ /**
3
+ * Group tables by database, schema and create a tree of databases, schemas, tables, and columns.
4
+ * @param tables - The tables to group
5
+ * @returns An array of database nodes containing schemas, tables and columns
6
+ */
7
+ export declare function createDbSchemaTrees(tables: DataTable[]): DbSchemaNode[];
8
+ //# sourceMappingURL=schemaTree.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schemaTree.d.ts","sourceRoot":"","sources":["../src/schemaTree.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,YAAY,EAAE,SAAS,EAAC,MAAM,SAAS,CAAC;AAEhD;;;;GAIG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,YAAY,EAAE,CAmCvE"}
@@ -0,0 +1,76 @@
1
+ import { getDuckDbTypeCategory } from './typeCategories';
2
+ /**
3
+ * Group tables by database, schema and create a tree of databases, schemas, tables, and columns.
4
+ * @param tables - The tables to group
5
+ * @returns An array of database nodes containing schemas, tables and columns
6
+ */
7
+ export function createDbSchemaTrees(tables) {
8
+ const databaseMap = new Map();
9
+ for (const table of tables) {
10
+ const database = table.database ?? 'default';
11
+ const schema = table.schema;
12
+ const tableName = table.tableName;
13
+ const columnNodes = table.columns.map((column) => createColumnNode(schema, tableName, column.name, column.type));
14
+ const tableNode = createTableNode(database, schema, tableName, columnNodes);
15
+ if (!databaseMap.has(database)) {
16
+ databaseMap.set(database, new Map());
17
+ }
18
+ const schemaMap = databaseMap.get(database);
19
+ if (!schemaMap.has(schema)) {
20
+ schemaMap.set(schema, []);
21
+ }
22
+ schemaMap.get(schema)?.push(tableNode);
23
+ }
24
+ // Create database nodes
25
+ return Array.from(databaseMap.entries()).map(([database, schemaMap]) => {
26
+ const schemaNodes = Array.from(schemaMap.entries()).map(([schema, tables]) => createSchemaTreeNode(schema, tables));
27
+ return createDatabaseTreeNode(database, schemaNodes);
28
+ });
29
+ }
30
+ function createColumnNode(schema, tableName, columnName, columnType) {
31
+ return {
32
+ key: `${schema}.${tableName}.${columnName}`,
33
+ object: {
34
+ type: 'column',
35
+ name: columnName,
36
+ columnType,
37
+ columnTypeCategory: getDuckDbTypeCategory(columnType),
38
+ },
39
+ };
40
+ }
41
+ function createTableNode(database, schema, tableName, columnNodes) {
42
+ return {
43
+ key: `${schema}.${tableName}`,
44
+ object: {
45
+ type: 'table',
46
+ schema,
47
+ database,
48
+ name: tableName,
49
+ },
50
+ isInitialOpen: false,
51
+ children: columnNodes,
52
+ };
53
+ }
54
+ function createSchemaTreeNode(schema, tables) {
55
+ return {
56
+ key: schema,
57
+ object: {
58
+ type: 'schema',
59
+ name: schema,
60
+ },
61
+ isInitialOpen: true,
62
+ children: tables,
63
+ };
64
+ }
65
+ function createDatabaseTreeNode(database, schemas) {
66
+ return {
67
+ key: database,
68
+ object: {
69
+ type: 'database',
70
+ name: database,
71
+ },
72
+ isInitialOpen: true,
73
+ children: schemas,
74
+ };
75
+ }
76
+ //# sourceMappingURL=schemaTree.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schemaTree.js","sourceRoot":"","sources":["../src/schemaTree.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,qBAAqB,EAAC,MAAM,kBAAkB,CAAC;AAGvD;;;;GAIG;AACH,MAAM,UAAU,mBAAmB,CAAC,MAAmB;IACrD,MAAM,WAAW,GAAG,IAAI,GAAG,EAAuC,CAAC;IAEnE,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,IAAI,SAAS,CAAC;QAC7C,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;QAC5B,MAAM,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC;QAElC,MAAM,WAAW,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAC/C,gBAAgB,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,CAC9D,CAAC;QAEF,MAAM,SAAS,GAAG,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,CAAC,CAAC;QAE5E,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC/B,WAAW,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,EAA0B,CAAC,CAAC;QAC/D,CAAC;QAED,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAE,CAAC;QAE7C,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;YAC3B,SAAS,CAAC,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;QAC5B,CAAC;QAED,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACzC,CAAC;IAED,wBAAwB;IACxB,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,SAAS,CAAC,EAAE,EAAE;QACrE,MAAM,WAAW,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CACrD,CAAC,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,oBAAoB,CAAC,MAAM,EAAE,MAAM,CAAC,CAC3D,CAAC;QAEF,OAAO,sBAAsB,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,gBAAgB,CACvB,MAAc,EACd,SAAiB,EACjB,UAAkB,EAClB,UAAkB;IAElB,OAAO;QACL,GAAG,EAAE,GAAG,MAAM,IAAI,SAAS,IAAI,UAAU,EAAE;QAC3C,MAAM,EAAE;YACN,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,UAAU;YAChB,UAAU;YACV,kBAAkB,EAAE,qBAAqB,CAAC,UAAU,CAAC;SACtD;KACF,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CACtB,QAAgB,EAChB,MAAc,EACd,SAAiB,EACjB,WAA2B;IAE3B,OAAO;QACL,GAAG,EAAE,GAAG,MAAM,IAAI,SAAS,EAAE;QAC7B,MAAM,EAAE;YACN,IAAI,EAAE,OAAO;YACb,MAAM;YACN,QAAQ;YACR,IAAI,EAAE,SAAS;SAChB;QACD,aAAa,EAAE,KAAK;QACpB,QAAQ,EAAE,WAAW;KACtB,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAC3B,MAAc,EACd,MAAsB;IAEtB,OAAO;QACL,GAAG,EAAE,MAAM;QACX,MAAM,EAAE;YACN,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,MAAM;SACb;QACD,aAAa,EAAE,IAAI;QACnB,QAAQ,EAAE,MAAM;KACjB,CAAC;AACJ,CAAC;AAED,SAAS,sBAAsB,CAC7B,QAAgB,EAChB,OAAuB;IAEvB,OAAO;QACL,GAAG,EAAE,QAAQ;QACb,MAAM,EAAE;YACN,IAAI,EAAE,UAAU;YAChB,IAAI,EAAE,QAAQ;SACf;QACD,aAAa,EAAE,IAAI;QACnB,QAAQ,EAAE,OAAO;KAClB,CAAC;AACJ,CAAC","sourcesContent":["import {getDuckDbTypeCategory} from './typeCategories';\nimport {DbSchemaNode, DataTable} from './types';\n\n/**\n * Group tables by database, schema and create a tree of databases, schemas, tables, and columns.\n * @param tables - The tables to group\n * @returns An array of database nodes containing schemas, tables and columns\n */\nexport function createDbSchemaTrees(tables: DataTable[]): DbSchemaNode[] {\n const databaseMap = new Map<string, Map<string, DbSchemaNode[]>>();\n\n for (const table of tables) {\n const database = table.database ?? 'default';\n const schema = table.schema;\n const tableName = table.tableName;\n\n const columnNodes = table.columns.map((column) =>\n createColumnNode(schema, tableName, column.name, column.type),\n );\n\n const tableNode = createTableNode(database, schema, tableName, columnNodes);\n\n if (!databaseMap.has(database)) {\n databaseMap.set(database, new Map<string, DbSchemaNode[]>());\n }\n\n const schemaMap = databaseMap.get(database)!;\n\n if (!schemaMap.has(schema)) {\n schemaMap.set(schema, []);\n }\n\n schemaMap.get(schema)?.push(tableNode);\n }\n\n // Create database nodes\n return Array.from(databaseMap.entries()).map(([database, schemaMap]) => {\n const schemaNodes = Array.from(schemaMap.entries()).map(\n ([schema, tables]) => createSchemaTreeNode(schema, tables),\n );\n\n return createDatabaseTreeNode(database, schemaNodes);\n });\n}\n\nfunction createColumnNode(\n schema: string,\n tableName: string,\n columnName: string,\n columnType: string,\n): DbSchemaNode {\n return {\n key: `${schema}.${tableName}.${columnName}`,\n object: {\n type: 'column',\n name: columnName,\n columnType,\n columnTypeCategory: getDuckDbTypeCategory(columnType),\n },\n };\n}\n\nfunction createTableNode(\n database: string,\n schema: string,\n tableName: string,\n columnNodes: DbSchemaNode[],\n): DbSchemaNode {\n return {\n key: `${schema}.${tableName}`,\n object: {\n type: 'table',\n schema,\n database,\n name: tableName,\n },\n isInitialOpen: false,\n children: columnNodes,\n };\n}\n\nfunction createSchemaTreeNode(\n schema: string,\n tables: DbSchemaNode[],\n): DbSchemaNode {\n return {\n key: schema,\n object: {\n type: 'schema',\n name: schema,\n },\n isInitialOpen: true,\n children: tables,\n };\n}\n\nfunction createDatabaseTreeNode(\n database: string,\n schemas: DbSchemaNode[],\n): DbSchemaNode {\n return {\n key: database,\n object: {\n type: 'database',\n name: database,\n },\n isInitialOpen: true,\n children: schemas,\n };\n}\n"]}
@@ -0,0 +1,16 @@
1
+ import { ColumnTypeCategory } from './types';
2
+ import { DataType } from 'apache-arrow';
3
+ /**
4
+ * Get the category of a column type
5
+ * @param columnType - The type of the column
6
+ * @returns The category of the column type
7
+ */
8
+ export declare function getDuckDbTypeCategory(columnType: string): ColumnTypeCategory | undefined;
9
+ /**
10
+ * This function is used to get the type category of a column from an Arrow table.
11
+ *
12
+ * @param type - The Arrow DataType of the column.
13
+ * @returns The type category of the column.
14
+ */
15
+ export declare function getArrowColumnTypeCategory(type: DataType): ColumnTypeCategory;
16
+ //# sourceMappingURL=typeCategories.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typeCategories.d.ts","sourceRoot":"","sources":["../src/typeCategories.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,kBAAkB,EAAC,MAAM,SAAS,CAAC;AAC3C,OAAO,EAAC,QAAQ,EAAC,MAAM,cAAc,CAAC;AA4BtC;;;;GAIG;AACH,wBAAgB,qBAAqB,CACnC,UAAU,EAAE,MAAM,GACjB,kBAAkB,GAAG,SAAS,CAQhC;AACD;;;;;GAKG;AACH,wBAAgB,0BAA0B,CAAC,IAAI,EAAE,QAAQ,GAAG,kBAAkB,CAiC7E"}
@@ -0,0 +1,72 @@
1
+ import { DataType } from 'apache-arrow';
2
+ const DUCKDB_TYPE_CATEGORIES = {
3
+ string: [/^varchar/, /^char/, /^text/, /^string/, /^uuid/, /^bit/],
4
+ number: [
5
+ /^tinyint/,
6
+ /^smallint/,
7
+ /^integer/,
8
+ /^bigint/,
9
+ /^hugeint/,
10
+ /^utinyint/,
11
+ /^usmallint/,
12
+ /^uinteger/,
13
+ /^ubigint/,
14
+ /^uhugeint/,
15
+ /^decimal/,
16
+ /^numeric/,
17
+ /^double/,
18
+ /^float/,
19
+ ],
20
+ boolean: [/^bool(ean)?/],
21
+ binary: [/^blob/, /^bytea/, /^binary/, /^varbinary/],
22
+ datetime: [/^date$/, /^time$/, /^timestamp$/, /^timestamptz$/, /^interval$/],
23
+ json: [/^json$/],
24
+ struct: [/^struct$/, /^list$/, /^map$/, /^array$/, /^union$/],
25
+ geometry: [/^geometry/],
26
+ };
27
+ /**
28
+ * Get the category of a column type
29
+ * @param columnType - The type of the column
30
+ * @returns The category of the column type
31
+ */
32
+ export function getDuckDbTypeCategory(columnType) {
33
+ const type = columnType.toLowerCase();
34
+ for (const [category, patterns] of Object.entries(DUCKDB_TYPE_CATEGORIES)) {
35
+ if (patterns.some((pattern) => type.match(pattern))) {
36
+ return category;
37
+ }
38
+ }
39
+ return undefined;
40
+ }
41
+ /**
42
+ * This function is used to get the type category of a column from an Arrow table.
43
+ *
44
+ * @param type - The Arrow DataType of the column.
45
+ * @returns The type category of the column.
46
+ */
47
+ export function getArrowColumnTypeCategory(type) {
48
+ if (DataType.isInt(type) ||
49
+ DataType.isFloat(type) ||
50
+ DataType.isDecimal(type)) {
51
+ return 'number';
52
+ }
53
+ if (DataType.isDate(type) ||
54
+ DataType.isTime(type) ||
55
+ DataType.isTimestamp(type)) {
56
+ return 'datetime';
57
+ }
58
+ if (DataType.isBool(type)) {
59
+ return 'boolean';
60
+ }
61
+ if (DataType.isBinary(type)) {
62
+ return 'binary';
63
+ }
64
+ // Note: Arrow doesn't have built-in geometry types, so we'll need to check the type name
65
+ // if your geometry types are custom implementations
66
+ if (type.toString().toLowerCase().includes('geometry')) {
67
+ return 'geometry';
68
+ }
69
+ // Default to string type for all other cases
70
+ return 'string';
71
+ }
72
+ //# sourceMappingURL=typeCategories.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typeCategories.js","sourceRoot":"","sources":["../src/typeCategories.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,QAAQ,EAAC,MAAM,cAAc,CAAC;AAEtC,MAAM,sBAAsB,GAAG;IAC7B,MAAM,EAAE,CAAC,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,CAAC;IAClE,MAAM,EAAE;QACN,UAAU;QACV,WAAW;QACX,UAAU;QACV,SAAS;QACT,UAAU;QACV,WAAW;QACX,YAAY;QACZ,WAAW;QACX,UAAU;QACV,WAAW;QACX,UAAU;QACV,UAAU;QACV,SAAS;QACT,QAAQ;KACT;IACD,OAAO,EAAE,CAAC,aAAa,CAAC;IACxB,MAAM,EAAE,CAAC,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,YAAY,CAAC;IACpD,QAAQ,EAAE,CAAC,QAAQ,EAAE,QAAQ,EAAE,aAAa,EAAE,eAAe,EAAE,YAAY,CAAC;IAC5E,IAAI,EAAE,CAAC,QAAQ,CAAC;IAChB,MAAM,EAAE,CAAC,UAAU,EAAE,QAAQ,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,CAAC;IAC7D,QAAQ,EAAE,CAAC,WAAW,CAAC;CACuB,CAAC;AAEjD;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CACnC,UAAkB;IAElB,MAAM,IAAI,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;IACtC,KAAK,MAAM,CAAC,QAAQ,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE,CAAC;QAC1E,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC;YACpD,OAAO,QAA8B,CAAC;QACxC,CAAC;IACH,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;GAKG;AACH,MAAM,UAAU,0BAA0B,CAAC,IAAc;IACvD,IACE,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;QACpB,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC;QACtB,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,EACxB,CAAC;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,IACE,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC;QACrB,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC;QACrB,QAAQ,CAAC,WAAW,CAAC,IAAI,CAAC,EAC1B,CAAC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC;QAC1B,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;QAC5B,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,yFAAyF;IACzF,oDAAoD;IACpD,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC;QACvD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,6CAA6C;IAC7C,OAAO,QAAQ,CAAC;AAClB,CAAC","sourcesContent":["import {ColumnTypeCategory} from './types';\nimport {DataType} from 'apache-arrow';\n\nconst DUCKDB_TYPE_CATEGORIES = {\n string: [/^varchar/, /^char/, /^text/, /^string/, /^uuid/, /^bit/],\n number: [\n /^tinyint/,\n /^smallint/,\n /^integer/,\n /^bigint/,\n /^hugeint/,\n /^utinyint/,\n /^usmallint/,\n /^uinteger/,\n /^ubigint/,\n /^uhugeint/,\n /^decimal/,\n /^numeric/,\n /^double/,\n /^float/,\n ],\n boolean: [/^bool(ean)?/],\n binary: [/^blob/, /^bytea/, /^binary/, /^varbinary/],\n datetime: [/^date$/, /^time$/, /^timestamp$/, /^timestamptz$/, /^interval$/],\n json: [/^json$/],\n struct: [/^struct$/, /^list$/, /^map$/, /^array$/, /^union$/],\n geometry: [/^geometry/],\n} satisfies Record<ColumnTypeCategory, RegExp[]>;\n\n/**\n * Get the category of a column type\n * @param columnType - The type of the column\n * @returns The category of the column type\n */\nexport function getDuckDbTypeCategory(\n columnType: string,\n): ColumnTypeCategory | undefined {\n const type = columnType.toLowerCase();\n for (const [category, patterns] of Object.entries(DUCKDB_TYPE_CATEGORIES)) {\n if (patterns.some((pattern) => type.match(pattern))) {\n return category as ColumnTypeCategory;\n }\n }\n return undefined;\n}\n/**\n * This function is used to get the type category of a column from an Arrow table.\n *\n * @param type - The Arrow DataType of the column.\n * @returns The type category of the column.\n */\nexport function getArrowColumnTypeCategory(type: DataType): ColumnTypeCategory {\n if (\n DataType.isInt(type) ||\n DataType.isFloat(type) ||\n DataType.isDecimal(type)\n ) {\n return 'number';\n }\n\n if (\n DataType.isDate(type) ||\n DataType.isTime(type) ||\n DataType.isTimestamp(type)\n ) {\n return 'datetime';\n }\n\n if (DataType.isBool(type)) {\n return 'boolean';\n }\n\n if (DataType.isBinary(type)) {\n return 'binary';\n }\n\n // Note: Arrow doesn't have built-in geometry types, so we'll need to check the type name\n // if your geometry types are custom implementations\n if (type.toString().toLowerCase().includes('geometry')) {\n return 'geometry';\n }\n\n // Default to string type for all other cases\n return 'string';\n}\n"]}
package/dist/types.d.ts CHANGED
@@ -1,11 +1,46 @@
1
+ import { QualifiedTableName } from './duckdb-utils';
1
2
  export type TableColumn = {
2
3
  name: string;
3
4
  type: string;
4
5
  };
5
6
  export type DataTable = {
7
+ table: QualifiedTableName;
8
+ /** @deprecated Use table.database instead */
9
+ database?: string;
10
+ /** @deprecated Use table.schema instead */
11
+ schema: string;
12
+ /** @deprecated Use table.table instead */
6
13
  tableName: string;
7
14
  columns: TableColumn[];
8
15
  rowCount?: number;
9
16
  inputFileName?: string;
10
17
  };
18
+ export type ColumnTypeCategory = 'number' | 'string' | 'datetime' | 'boolean' | 'binary' | 'json' | 'struct' | 'geometry';
19
+ export type DbSchemaNode<T extends NodeObject = NodeObject> = {
20
+ key: string;
21
+ object: T;
22
+ children?: DbSchemaNode[];
23
+ isInitialOpen?: boolean;
24
+ };
25
+ export type NodeObject = ColumnNodeObject | TableNodeObject | SchemaNodeObject | DatabaseNodeObject;
26
+ type BaseNodeObject = {
27
+ name: string;
28
+ };
29
+ export type ColumnNodeObject = BaseNodeObject & {
30
+ type: 'column';
31
+ columnType: string;
32
+ columnTypeCategory?: ColumnTypeCategory;
33
+ };
34
+ export type TableNodeObject = BaseNodeObject & {
35
+ type: 'table';
36
+ schema: string;
37
+ database: string;
38
+ };
39
+ export type SchemaNodeObject = BaseNodeObject & {
40
+ type: 'schema';
41
+ };
42
+ export type DatabaseNodeObject = BaseNodeObject & {
43
+ type: 'database';
44
+ };
45
+ export {};
11
46
  //# sourceMappingURL=types.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,WAAW,GAAG;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACtB,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB,CAAC"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,kBAAkB,EAAC,MAAM,gBAAgB,CAAC;AAElD,MAAM,MAAM,WAAW,GAAG;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACtB,KAAK,EAAE,kBAAkB,CAAC;IAC1B,6CAA6C;IAC7C,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,2CAA2C;IAC3C,MAAM,EAAE,MAAM,CAAC;IACf,0CAA0C;IAC1C,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB,CAAC;AACF,MAAM,MAAM,kBAAkB,GAC1B,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,SAAS,GACT,QAAQ,GACR,MAAM,GACN,QAAQ,GACR,UAAU,CAAC;AAEf,MAAM,MAAM,YAAY,CAAC,CAAC,SAAS,UAAU,GAAG,UAAU,IAAI;IAC5D,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,CAAC,CAAC;IACV,QAAQ,CAAC,EAAE,YAAY,EAAE,CAAC;IAC1B,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB,CAAC;AAEF,MAAM,MAAM,UAAU,GAClB,gBAAgB,GAChB,eAAe,GACf,gBAAgB,GAChB,kBAAkB,CAAC;AAEvB,KAAK,cAAc,GAAG;IACpB,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,IAAI,EAAE,QAAQ,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;CACzC,CAAC;AAEF,MAAM,MAAM,eAAe,GAAG,cAAc,GAAG;IAC7C,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,IAAI,EAAE,QAAQ,CAAC;CAChB,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG,cAAc,GAAG;IAChD,IAAI,EAAE,UAAU,CAAC;CAClB,CAAC"}
package/dist/types.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"","sourcesContent":["export type TableColumn = {\n name: string;\n type: string;\n};\n\nexport type DataTable = {\n tableName: string;\n columns: TableColumn[];\n rowCount?: number;\n inputFileName?: string;\n};\n"]}
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"","sourcesContent":["import {QualifiedTableName} from './duckdb-utils';\n\nexport type TableColumn = {\n name: string;\n type: string;\n};\n\nexport type DataTable = {\n table: QualifiedTableName;\n /** @deprecated Use table.database instead */\n database?: string;\n /** @deprecated Use table.schema instead */\n schema: string;\n /** @deprecated Use table.table instead */\n tableName: string;\n columns: TableColumn[];\n rowCount?: number;\n inputFileName?: string;\n};\nexport type ColumnTypeCategory =\n | 'number'\n | 'string'\n | 'datetime'\n | 'boolean'\n | 'binary'\n | 'json'\n | 'struct'\n | 'geometry';\n\nexport type DbSchemaNode<T extends NodeObject = NodeObject> = {\n key: string;\n object: T;\n children?: DbSchemaNode[];\n isInitialOpen?: boolean;\n};\n\nexport type NodeObject =\n | ColumnNodeObject\n | TableNodeObject\n | SchemaNodeObject\n | DatabaseNodeObject;\n\ntype BaseNodeObject = {\n name: string;\n};\n\nexport type ColumnNodeObject = BaseNodeObject & {\n type: 'column';\n columnType: string;\n columnTypeCategory?: ColumnTypeCategory;\n};\n\nexport type TableNodeObject = BaseNodeObject & {\n type: 'table';\n schema: string;\n database: string;\n};\n\nexport type SchemaNodeObject = BaseNodeObject & {\n type: 'schema';\n};\n\nexport type DatabaseNodeObject = BaseNodeObject & {\n type: 'database';\n};\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"useSql.d.ts","sourceRoot":"","sources":["../src/useSql.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAEtC,OAAO,EAAC,CAAC,EAAC,MAAM,KAAK,CAAC;AAEtB,OAAO,EAAyB,gBAAgB,EAAC,MAAM,oBAAoB,CAAC;AAE5E;;;GAGG;AACH,MAAM,WAAW,iBAAiB,CAAC,CAAC,CAAE,SAAQ,gBAAgB,CAAC,CAAC,CAAC;IAC/D,iCAAiC;IACjC,UAAU,EAAE,KAAK,CAAC,KAAK,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,MAAM,iBAAiB,CAAC,CAAC,IAAI,iBAAiB,CAAC,CAAC,CAAC,CAAC;AAExD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4IG;AACH,wBAAgB,MAAM,CAAC,GAAG,EAAE,OAAO,EAAE;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,OAAO,CAAA;CAAC,GAAG;IACxE,IAAI,EAAE,iBAAiB,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzC,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;CACpB,CAAC;AAEF,wBAAgB,MAAM,CAAC,MAAM,SAAS,CAAC,CAAC,OAAO,EAC7C,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE;IACP,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB,GACA;IACD,IAAI,EAAE,iBAAiB,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,GAAG,SAAS,CAAC;IACrD,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;CACpB,CAAC;AA2EF;;GAEG;AACH,eAAO,MAAM,cAAc,eAAS,CAAC"}
1
+ {"version":3,"file":"useSql.d.ts","sourceRoot":"","sources":["../src/useSql.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAEtC,OAAO,EAAC,CAAC,EAAC,MAAM,KAAK,CAAC;AAEtB,OAAO,EAAyB,gBAAgB,EAAC,MAAM,oBAAoB,CAAC;AAG5E;;;GAGG;AACH,MAAM,WAAW,iBAAiB,CAAC,CAAC,CAAE,SAAQ,gBAAgB,CAAC,CAAC,CAAC;IAC/D,iCAAiC;IACjC,UAAU,EAAE,KAAK,CAAC,KAAK,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,MAAM,iBAAiB,CAAC,CAAC,IAAI,iBAAiB,CAAC,CAAC,CAAC,CAAC;AAExD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4IG;AACH,wBAAgB,MAAM,CAAC,GAAG,EAAE,OAAO,EAAE;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,OAAO,CAAA;CAAC,GAAG;IACxE,IAAI,EAAE,iBAAiB,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzC,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;CACpB,CAAC;AAEF,wBAAgB,MAAM,CAAC,MAAM,SAAS,CAAC,CAAC,OAAO,EAC7C,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE;IACP,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB,GACA;IACD,IAAI,EAAE,iBAAiB,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,GAAG,SAAS,CAAC;IACrD,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IACpB,SAAS,EAAE,OAAO,CAAC;CACpB,CAAC;AAiFF;;GAEG;AACH,eAAO,MAAM,cAAc,eAAS,CAAC"}
package/dist/useSql.js CHANGED
@@ -14,7 +14,7 @@ export function useSql(zodSchemaOrOptions, maybeOptions) {
14
14
  const [data, setData] = useState(undefined);
15
15
  const [error, setError] = useState(null);
16
16
  const [isLoading, setIsLoading] = useState(false);
17
- const getConnector = useStoreWithDuckDb((state) => state.db.getConnector);
17
+ const executeSql = useStoreWithDuckDb((state) => state.db.executeSql);
18
18
  useEffect(() => {
19
19
  let isMounted = true;
20
20
  const fetchData = async () => {
@@ -24,16 +24,20 @@ export function useSql(zodSchemaOrOptions, maybeOptions) {
24
24
  setIsLoading(true);
25
25
  setError(null);
26
26
  try {
27
- const connector = await getConnector();
28
- const result = await connector.query(options.query);
27
+ const queryHandle = await executeSql(options.query);
28
+ if (!queryHandle || !isMounted) {
29
+ return;
30
+ }
31
+ const result = await queryHandle.result;
32
+ if (!isMounted) {
33
+ return;
34
+ }
29
35
  // Create a row accessor that optionally validates with the schema
30
36
  const rowAccessor = createTypedRowAccessor({
31
37
  arrowTable: result,
32
38
  validate: schema ? (row) => schema.parse(row) : undefined,
33
39
  });
34
- if (isMounted) {
35
- setData({ ...rowAccessor, arrowTable: result });
36
- }
40
+ setData({ ...rowAccessor, arrowTable: result });
37
41
  }
38
42
  catch (err) {
39
43
  if (isMounted) {
@@ -50,7 +54,7 @@ export function useSql(zodSchemaOrOptions, maybeOptions) {
50
54
  return () => {
51
55
  isMounted = false;
52
56
  };
53
- }, [options.query, options.enabled]);
57
+ }, [options.query, options.enabled, executeSql]);
54
58
  return {
55
59
  data,
56
60
  error,
@@ -1 +1 @@
1
- {"version":3,"file":"useSql.js","sourceRoot":"","sources":["../src/useSql.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,OAAO,CAAC;AAE1C,OAAO,EAAC,kBAAkB,EAAC,MAAM,eAAe,CAAC;AACjD,OAAO,EAAC,sBAAsB,EAAmB,MAAM,oBAAoB,CAAC;AA+K5E;;GAEG;AACH,MAAM,UAAU,MAAM,CAIpB,kBAA+D,EAC/D,YAAiD;IAEjD,+CAA+C;IAC/C,MAAM,YAAY,GAAG,YAAY,KAAK,SAAS,CAAC;IAChD,MAAM,OAAO,GAAG,YAAY;QAC1B,CAAC,CAAC,YAAY;QACd,CAAC,CAAE,kBAAyD,CAAC;IAC/D,MAAM,MAAM,GAAG,YAAY,CAAC,CAAC,CAAE,kBAA6B,CAAC,CAAC,CAAC,SAAS,CAAC;IAEzE,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,QAAQ,CAC9B,SAAS,CACV,CAAC;IACF,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,GAAG,QAAQ,CAAe,IAAI,CAAC,CAAC;IACvD,MAAM,CAAC,SAAS,EAAE,YAAY,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAElD,MAAM,YAAY,GAAG,kBAAkB,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC;IAC1E,SAAS,CAAC,GAAG,EAAE;QACb,IAAI,SAAS,GAAG,IAAI,CAAC;QAErB,MAAM,SAAS,GAAG,KAAK,IAAI,EAAE;YAC3B,IAAI,CAAC,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE,CAAC;gBACtD,OAAO;YACT,CAAC;YAED,YAAY,CAAC,IAAI,CAAC,CAAC;YACnB,QAAQ,CAAC,IAAI,CAAC,CAAC;YAEf,IAAI,CAAC;gBACH,MAAM,SAAS,GAAG,MAAM,YAAY,EAAE,CAAC;gBACvC,MAAM,MAAM,GAAG,MAAM,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;gBAEpD,kEAAkE;gBAClE,MAAM,WAAW,GAAG,sBAAsB,CAAM;oBAC9C,UAAU,EAAE,MAAM;oBAClB,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,GAAY,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS;iBACnE,CAAC,CAAC;gBAEH,IAAI,SAAS,EAAE,CAAC;oBACd,OAAO,CAAC,EAAC,GAAG,WAAW,EAAE,UAAU,EAAE,MAAM,EAAC,CAAC,CAAC;gBAChD,CAAC;YACH,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAI,SAAS,EAAE,CAAC;oBACd,QAAQ,CAAC,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;gBAChE,CAAC;YACH,CAAC;oBAAS,CAAC;gBACT,IAAI,SAAS,EAAE,CAAC;oBACd,YAAY,CAAC,KAAK,CAAC,CAAC;gBACtB,CAAC;YACH,CAAC;QACH,CAAC,CAAC;QAEF,SAAS,EAAE,CAAC;QAEZ,OAAO,GAAG,EAAE;YACV,SAAS,GAAG,KAAK,CAAC;QACpB,CAAC,CAAC;IACJ,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;IAErC,OAAO;QACL,IAAI;QACJ,KAAK;QACL,SAAS;KACV,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,MAAM,CAAC","sourcesContent":["import * as arrow from 'apache-arrow';\nimport {useEffect, useState} from 'react';\nimport {z} from 'zod';\nimport {useStoreWithDuckDb} from './DuckDbSlice';\nimport {createTypedRowAccessor, TypedRowAccessor} from './typedRowAccessor';\n\n/**\n * A wrapper interface that exposes the underlying Arrow table,\n * a typed row accessor, and the number of rows.\n */\nexport interface UseSqlQueryResult<T> extends TypedRowAccessor<T> {\n /** The underlying Arrow table */\n arrowTable: arrow.Table;\n}\n\n/**\n * @deprecated Use UseSqlQueryResult instead\n */\nexport type DuckDbQueryResult<T> = UseSqlQueryResult<T>;\n\n/**\n * A React hook for executing SQL queries with automatic state management.\n * Provides two ways to ensure type safety:\n * 1. Using TypeScript types (compile-time safety only)\n * 2. Using Zod schemas (both compile-time and runtime validation)\n *\n * @example\n * ```typescript\n * // Option 1: Using TypeScript types (faster, no runtime validation)\n * interface User {\n * id: number;\n * name: string;\n * email: string;\n * }\n *\n * const {data, isLoading, error} = useSql<User>({\n * query: 'SELECT id, name, email FROM users'\n * });\n *\n * // Option 2: Using Zod schema (slower but with runtime validation)\n * const userSchema = z.object({\n * id: z.number(),\n * name: z.string(),\n * email: z.string().email(),\n * createdAt: z.string().transform(str => new Date(str)) // Transform string to Date\n * });\n *\n * const {data: validatedData, isLoading, error} = useSql(\n * userSchema,\n * {query: 'SELECT id, name, email, created_at as createdAt FROM users'}\n * );\n * ```\n *\n * ## Error Handling\n * ```typescript\n * if (isLoading) return <div>Loading...</div>;\n * if (error) {\n * // With Zod, you can catch validation errors specifically\n * if (error instanceof z.ZodError) {\n * return <div>Validation Error: {error.errors[0].message}</div>;\n * }\n * return <div>Error: {error.message}</div>;\n * }\n * if (!data) return null;\n * ```\n *\n * ## Data Access Methods\n *\n * There are several ways to access data with different performance characteristics:\n *\n * ### 1. Typed Row Access (getRow, rows(), toArray())\n * - Provides type safety and validation\n * - Converts data to JavaScript objects\n * - Slower for large datasets due to object creation and validation\n *\n * ```typescript\n * // Iterate through rows using the rows() iterator (recommended)\n * for (const user of data.rows()) {\n * console.log(user.name, user.email);\n * }\n *\n * // Traditional for loop with index access\n * for (let i = 0; i < data.length; i++) {\n * const user = data.getRow(i);\n * console.log(`User ${i}: ${user.name} (${user.email})`);\n * }\n *\n * // Get all rows as an array\n * const allUsers = data.toArray();\n *\n * // With Zod schema, transformed fields are available\n * for (const user of validatedData.rows()) {\n * console.log(`Created: ${user.createdAt.toISOString()}`); // createdAt is a Date object\n * }\n * ```\n *\n * ### 2. Direct Arrow Table Access\n * - Much faster for large datasets\n * - Columnar access is more efficient for analytics\n * - No type safety or validation\n *\n * ```typescript\n * // For performance-critical operations with large datasets:\n * const nameColumn = data.arrowTable.getChild('name');\n * const emailColumn = data.arrowTable.getChild('email');\n *\n * // Fast columnar iteration (no object creation)\n * for (let i = 0; i < data.length; i++) {\n * console.log(nameColumn.get(i), emailColumn.get(i));\n * }\n *\n * // Note: For filtering data, it's most efficient to use SQL in your query\n * const { data } = useSql<User>({\n * query: \"SELECT * FROM users WHERE age > 30\"\n * });\n * ```\n *\n * ### 3. Using Flechette for Advanced Operations\n *\n * For more advanced Arrow operations, consider using [Flechette](https://idl.uw.edu/flechette/),\n * a faster and lighter alternative to the standard Arrow JS implementation.\n *\n * ```typescript\n * // Example using Flechette with SQL query results\n * import { tableFromIPC } from '@uwdata/flechette';\n *\n * // Convert Arrow table to Flechette table\n * const serializedData = data.arrowTable.serialize();\n * const flechetteTable = tableFromIPC(serializedData);\n *\n * // Extract all columns into a { name: array, ... } object\n * const columns = flechetteTable.toColumns();\n *\n * // Create a new table with a selected subset of columns\n * const subtable = flechetteTable.select(['name', 'email']);\n *\n * // Convert to array of objects with customization options\n * const objects = flechetteTable.toArray({\n * useDate: true, // Convert timestamps to Date objects\n * useMap: true // Create Map objects for key-value pairs\n * });\n *\n * // For large datasets, consider memory management\n * serializedData = null; // Allow garbage collection of the serialized data\n * ```\n *\n * Flechette provides several advantages:\n * - Better performance (1.3-1.6x faster value iteration, 7-11x faster row object extraction)\n * - Smaller footprint (~43k minified vs 163k for Arrow JS)\n * - Support for additional data types (including decimal-to-number conversion)\n * - More flexible data value conversion options\n *\n * @template Row The TypeScript type for each row in the result\n * @param options Configuration object containing the query and execution control\n * @returns Object containing the query result, loading state, and any error\n *\n * @template Schema The Zod schema type that defines the shape and validation of each row\n * @param zodSchema A Zod schema that defines the expected shape and validation rules for each row\n * @param options Configuration object containing the query and execution control\n * @returns Object containing the validated query result, loading state, and any error\n */\nexport function useSql<Row>(options: {query: string; enabled?: boolean}): {\n data: UseSqlQueryResult<Row> | undefined;\n error: Error | null;\n isLoading: boolean;\n};\n\nexport function useSql<Schema extends z.ZodType>(\n zodSchema: Schema,\n options: {\n query: string;\n enabled?: boolean;\n },\n): {\n data: UseSqlQueryResult<z.infer<Schema>> | undefined;\n error: Error | null;\n isLoading: boolean;\n};\n\n/**\n * Implementation of useSql that handles both overloads\n */\nexport function useSql<\n Row extends arrow.TypeMap,\n Schema extends z.ZodType = z.ZodType,\n>(\n zodSchemaOrOptions: Schema | {query: string; enabled?: boolean},\n maybeOptions?: {query: string; enabled?: boolean},\n) {\n // Determine if we're using the schema overload\n const hasZodSchema = maybeOptions !== undefined;\n const options = hasZodSchema\n ? maybeOptions\n : (zodSchemaOrOptions as {query: string; enabled?: boolean});\n const schema = hasZodSchema ? (zodSchemaOrOptions as Schema) : undefined;\n\n const [data, setData] = useState<UseSqlQueryResult<Row> | undefined>(\n undefined,\n );\n const [error, setError] = useState<Error | null>(null);\n const [isLoading, setIsLoading] = useState(false);\n\n const getConnector = useStoreWithDuckDb((state) => state.db.getConnector);\n useEffect(() => {\n let isMounted = true;\n\n const fetchData = async () => {\n if (!options.enabled && options.enabled !== undefined) {\n return;\n }\n\n setIsLoading(true);\n setError(null);\n\n try {\n const connector = await getConnector();\n const result = await connector.query(options.query);\n\n // Create a row accessor that optionally validates with the schema\n const rowAccessor = createTypedRowAccessor<Row>({\n arrowTable: result,\n validate: schema ? (row: unknown) => schema.parse(row) : undefined,\n });\n\n if (isMounted) {\n setData({...rowAccessor, arrowTable: result});\n }\n } catch (err) {\n if (isMounted) {\n setError(err instanceof Error ? err : new Error(String(err)));\n }\n } finally {\n if (isMounted) {\n setIsLoading(false);\n }\n }\n };\n\n fetchData();\n\n return () => {\n isMounted = false;\n };\n }, [options.query, options.enabled]);\n\n return {\n data,\n error,\n isLoading,\n };\n}\n\n/**\n * @deprecated Use useSql instead\n */\nexport const useDuckDbQuery = useSql;\n"]}
1
+ {"version":3,"file":"useSql.js","sourceRoot":"","sources":["../src/useSql.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,OAAO,CAAC;AAE1C,OAAO,EAAC,kBAAkB,EAAC,MAAM,eAAe,CAAC;AACjD,OAAO,EAAC,sBAAsB,EAAmB,MAAM,oBAAoB,CAAC;AAgL5E;;GAEG;AACH,MAAM,UAAU,MAAM,CAIpB,kBAA+D,EAC/D,YAAiD;IAEjD,+CAA+C;IAC/C,MAAM,YAAY,GAAG,YAAY,KAAK,SAAS,CAAC;IAChD,MAAM,OAAO,GAAG,YAAY;QAC1B,CAAC,CAAC,YAAY;QACd,CAAC,CAAE,kBAAyD,CAAC;IAC/D,MAAM,MAAM,GAAG,YAAY,CAAC,CAAC,CAAE,kBAA6B,CAAC,CAAC,CAAC,SAAS,CAAC;IAEzE,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,QAAQ,CAC9B,SAAS,CACV,CAAC;IACF,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,GAAG,QAAQ,CAAe,IAAI,CAAC,CAAC;IACvD,MAAM,CAAC,SAAS,EAAE,YAAY,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAElD,MAAM,UAAU,GAAG,kBAAkB,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC;IAEtE,SAAS,CAAC,GAAG,EAAE;QACb,IAAI,SAAS,GAAG,IAAI,CAAC;QAErB,MAAM,SAAS,GAAG,KAAK,IAAI,EAAE;YAC3B,IAAI,CAAC,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE,CAAC;gBACtD,OAAO;YACT,CAAC;YAED,YAAY,CAAC,IAAI,CAAC,CAAC;YACnB,QAAQ,CAAC,IAAI,CAAC,CAAC;YAEf,IAAI,CAAC;gBACH,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;gBACpD,IAAI,CAAC,WAAW,IAAI,CAAC,SAAS,EAAE,CAAC;oBAC/B,OAAO;gBACT,CAAC;gBAED,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,MAAM,CAAC;gBACxC,IAAI,CAAC,SAAS,EAAE,CAAC;oBACf,OAAO;gBACT,CAAC;gBAED,kEAAkE;gBAClE,MAAM,WAAW,GAAG,sBAAsB,CAAM;oBAC9C,UAAU,EAAE,MAAM;oBAClB,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,GAAY,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS;iBACnE,CAAC,CAAC;gBAEH,OAAO,CAAC,EAAC,GAAG,WAAW,EAAE,UAAU,EAAE,MAAM,EAAC,CAAC,CAAC;YAChD,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAI,SAAS,EAAE,CAAC;oBACd,QAAQ,CAAC,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;gBAChE,CAAC;YACH,CAAC;oBAAS,CAAC;gBACT,IAAI,SAAS,EAAE,CAAC;oBACd,YAAY,CAAC,KAAK,CAAC,CAAC;gBACtB,CAAC;YACH,CAAC;QACH,CAAC,CAAC;QAEF,SAAS,EAAE,CAAC;QAEZ,OAAO,GAAG,EAAE;YACV,SAAS,GAAG,KAAK,CAAC;QACpB,CAAC,CAAC;IACJ,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC,CAAC;IAEjD,OAAO;QACL,IAAI;QACJ,KAAK;QACL,SAAS;KACV,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,MAAM,CAAC","sourcesContent":["import * as arrow from 'apache-arrow';\nimport {useEffect, useState} from 'react';\nimport {z} from 'zod';\nimport {useStoreWithDuckDb} from './DuckDbSlice';\nimport {createTypedRowAccessor, TypedRowAccessor} from './typedRowAccessor';\nimport {QueryHandle} from './connectors/DuckDbConnector';\n\n/**\n * A wrapper interface that exposes the underlying Arrow table,\n * a typed row accessor, and the number of rows.\n */\nexport interface UseSqlQueryResult<T> extends TypedRowAccessor<T> {\n /** The underlying Arrow table */\n arrowTable: arrow.Table;\n}\n\n/**\n * @deprecated Use UseSqlQueryResult instead\n */\nexport type DuckDbQueryResult<T> = UseSqlQueryResult<T>;\n\n/**\n * A React hook for executing SQL queries with automatic state management.\n * Provides two ways to ensure type safety:\n * 1. Using TypeScript types (compile-time safety only)\n * 2. Using Zod schemas (both compile-time and runtime validation)\n *\n * @example\n * ```typescript\n * // Option 1: Using TypeScript types (faster, no runtime validation)\n * interface User {\n * id: number;\n * name: string;\n * email: string;\n * }\n *\n * const {data, isLoading, error} = useSql<User>({\n * query: 'SELECT id, name, email FROM users'\n * });\n *\n * // Option 2: Using Zod schema (slower but with runtime validation)\n * const userSchema = z.object({\n * id: z.number(),\n * name: z.string(),\n * email: z.string().email(),\n * createdAt: z.string().transform(str => new Date(str)) // Transform string to Date\n * });\n *\n * const {data: validatedData, isLoading, error} = useSql(\n * userSchema,\n * {query: 'SELECT id, name, email, created_at as createdAt FROM users'}\n * );\n * ```\n *\n * ## Error Handling\n * ```typescript\n * if (isLoading) return <div>Loading...</div>;\n * if (error) {\n * // With Zod, you can catch validation errors specifically\n * if (error instanceof z.ZodError) {\n * return <div>Validation Error: {error.errors[0].message}</div>;\n * }\n * return <div>Error: {error.message}</div>;\n * }\n * if (!data) return null;\n * ```\n *\n * ## Data Access Methods\n *\n * There are several ways to access data with different performance characteristics:\n *\n * ### 1. Typed Row Access (getRow, rows(), toArray())\n * - Provides type safety and validation\n * - Converts data to JavaScript objects\n * - Slower for large datasets due to object creation and validation\n *\n * ```typescript\n * // Iterate through rows using the rows() iterator (recommended)\n * for (const user of data.rows()) {\n * console.log(user.name, user.email);\n * }\n *\n * // Traditional for loop with index access\n * for (let i = 0; i < data.length; i++) {\n * const user = data.getRow(i);\n * console.log(`User ${i}: ${user.name} (${user.email})`);\n * }\n *\n * // Get all rows as an array\n * const allUsers = data.toArray();\n *\n * // With Zod schema, transformed fields are available\n * for (const user of validatedData.rows()) {\n * console.log(`Created: ${user.createdAt.toISOString()}`); // createdAt is a Date object\n * }\n * ```\n *\n * ### 2. Direct Arrow Table Access\n * - Much faster for large datasets\n * - Columnar access is more efficient for analytics\n * - No type safety or validation\n *\n * ```typescript\n * // For performance-critical operations with large datasets:\n * const nameColumn = data.arrowTable.getChild('name');\n * const emailColumn = data.arrowTable.getChild('email');\n *\n * // Fast columnar iteration (no object creation)\n * for (let i = 0; i < data.length; i++) {\n * console.log(nameColumn.get(i), emailColumn.get(i));\n * }\n *\n * // Note: For filtering data, it's most efficient to use SQL in your query\n * const { data } = useSql<User>({\n * query: \"SELECT * FROM users WHERE age > 30\"\n * });\n * ```\n *\n * ### 3. Using Flechette for Advanced Operations\n *\n * For more advanced Arrow operations, consider using [Flechette](https://idl.uw.edu/flechette/),\n * a faster and lighter alternative to the standard Arrow JS implementation.\n *\n * ```typescript\n * // Example using Flechette with SQL query results\n * import { tableFromIPC } from '@uwdata/flechette';\n *\n * // Convert Arrow table to Flechette table\n * const serializedData = data.arrowTable.serialize();\n * const flechetteTable = tableFromIPC(serializedData);\n *\n * // Extract all columns into a { name: array, ... } object\n * const columns = flechetteTable.toColumns();\n *\n * // Create a new table with a selected subset of columns\n * const subtable = flechetteTable.select(['name', 'email']);\n *\n * // Convert to array of objects with customization options\n * const objects = flechetteTable.toArray({\n * useDate: true, // Convert timestamps to Date objects\n * useMap: true // Create Map objects for key-value pairs\n * });\n *\n * // For large datasets, consider memory management\n * serializedData = null; // Allow garbage collection of the serialized data\n * ```\n *\n * Flechette provides several advantages:\n * - Better performance (1.3-1.6x faster value iteration, 7-11x faster row object extraction)\n * - Smaller footprint (~43k minified vs 163k for Arrow JS)\n * - Support for additional data types (including decimal-to-number conversion)\n * - More flexible data value conversion options\n *\n * @template Row The TypeScript type for each row in the result\n * @param options Configuration object containing the query and execution control\n * @returns Object containing the query result, loading state, and any error\n *\n * @template Schema The Zod schema type that defines the shape and validation of each row\n * @param zodSchema A Zod schema that defines the expected shape and validation rules for each row\n * @param options Configuration object containing the query and execution control\n * @returns Object containing the validated query result, loading state, and any error\n */\nexport function useSql<Row>(options: {query: string; enabled?: boolean}): {\n data: UseSqlQueryResult<Row> | undefined;\n error: Error | null;\n isLoading: boolean;\n};\n\nexport function useSql<Schema extends z.ZodType>(\n zodSchema: Schema,\n options: {\n query: string;\n enabled?: boolean;\n },\n): {\n data: UseSqlQueryResult<z.infer<Schema>> | undefined;\n error: Error | null;\n isLoading: boolean;\n};\n\n/**\n * Implementation of useSql that handles both overloads\n */\nexport function useSql<\n Row extends arrow.TypeMap,\n Schema extends z.ZodType = z.ZodType,\n>(\n zodSchemaOrOptions: Schema | {query: string; enabled?: boolean},\n maybeOptions?: {query: string; enabled?: boolean},\n) {\n // Determine if we're using the schema overload\n const hasZodSchema = maybeOptions !== undefined;\n const options = hasZodSchema\n ? maybeOptions\n : (zodSchemaOrOptions as {query: string; enabled?: boolean});\n const schema = hasZodSchema ? (zodSchemaOrOptions as Schema) : undefined;\n\n const [data, setData] = useState<UseSqlQueryResult<Row> | undefined>(\n undefined,\n );\n const [error, setError] = useState<Error | null>(null);\n const [isLoading, setIsLoading] = useState(false);\n\n const executeSql = useStoreWithDuckDb((state) => state.db.executeSql);\n\n useEffect(() => {\n let isMounted = true;\n\n const fetchData = async () => {\n if (!options.enabled && options.enabled !== undefined) {\n return;\n }\n\n setIsLoading(true);\n setError(null);\n\n try {\n const queryHandle = await executeSql(options.query);\n if (!queryHandle || !isMounted) {\n return;\n }\n\n const result = await queryHandle.result;\n if (!isMounted) {\n return;\n }\n\n // Create a row accessor that optionally validates with the schema\n const rowAccessor = createTypedRowAccessor<Row>({\n arrowTable: result,\n validate: schema ? (row: unknown) => schema.parse(row) : undefined,\n });\n\n setData({...rowAccessor, arrowTable: result});\n } catch (err) {\n if (isMounted) {\n setError(err instanceof Error ? err : new Error(String(err)));\n }\n } finally {\n if (isMounted) {\n setIsLoading(false);\n }\n }\n };\n\n fetchData();\n\n return () => {\n isMounted = false;\n };\n }, [options.query, options.enabled, executeSql]);\n\n return {\n data,\n error,\n isLoading,\n };\n}\n\n/**\n * @deprecated Use useSql instead\n */\nexport const useDuckDbQuery = useSql;\n"]}
package/package.json CHANGED
@@ -1,11 +1,10 @@
1
1
  {
2
2
  "name": "@sqlrooms/duckdb",
3
- "version": "0.15.0",
3
+ "version": "0.16.1",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/index.js",
7
7
  "type": "module",
8
- "private": false,
9
8
  "author": "Ilya Boyandin <ilya@boyandin.me>",
10
9
  "license": "MIT",
11
10
  "repository": {
@@ -19,20 +18,22 @@
19
18
  "access": "public"
20
19
  },
21
20
  "dependencies": {
22
- "@duckdb/duckdb-wasm": "1.29.0",
23
- "@sqlrooms/project": "0.15.0",
24
- "@sqlrooms/utils": "0.15.0",
25
- "apache-arrow": "^18.1.0",
21
+ "@duckdb/duckdb-wasm": "1.29.1-dev204.0",
22
+ "@sqlrooms/project": "0.16.1",
23
+ "@sqlrooms/utils": "0.16.1",
26
24
  "fast-deep-equal": "^3.1.3",
27
25
  "immer": "^10.1.1",
28
- "zod": "^3.24.1",
29
- "zustand": "^5.0.3"
26
+ "zod": "^3.25.57",
27
+ "zustand": "^5.0.5"
30
28
  },
31
29
  "devDependencies": {
32
- "@sqlrooms/project-config": "0.15.0",
33
- "@types/jest": "^29.5.12",
30
+ "@sqlrooms/project-config": "0.16.1",
31
+ "@types/jest": "^29.5.14",
34
32
  "jest": "^29.7.0",
35
- "ts-jest": "^29.1.2"
33
+ "ts-jest": "^29.3.4"
34
+ },
35
+ "peerDependencies": {
36
+ "apache-arrow": ">=17"
36
37
  },
37
38
  "scripts": {
38
39
  "dev": "tsc -w",
@@ -43,5 +44,5 @@
43
44
  "test": "jest",
44
45
  "test:watch": "jest --watch"
45
46
  },
46
- "gitHead": "6eb76a841a2ccb40d190720d309db51f1e8a9088"
47
+ "gitHead": "c2015b30f09c327c14008143d5c9752d9ebc3949"
47
48
  }