@sqlrooms/duckdb 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4 @@
1
+
2
+ > @sqlrooms/duckdb@0.0.0 build /Users/ilya/Workspace/sqlrooms/packages/duckdb
3
+ > tsc
4
+
@@ -0,0 +1,58 @@
1
+
2
+ 
3
+ > @sqlrooms/duckdb@1.0.0 dev /Users/ilya/Workspace/sqlrooms/packages/duckdb
4
+ > tsc -w
5
+
6
+ [7:31:02 PM] Starting compilation in watch mode...
7
+
8
+ src/duckdb.ts:11:5 - error TS2345: Argument of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/table").Table<any>' is not assignable to parameter of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/table").Table<any>'.
9
+ The types of 'TType.TArray' are incompatible between these types.
10
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]'.
11
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>'.
12
+ Type 'StructRow<any> & { [x: string]: any; } & { [key: symbol]: any; }' is missing the following properties from type 'StructRow<any>': [kRowIndex], [kParent]
13
+
14
+  11 await conn.query(
15
+    ~~~~~~~~~~~~~~~~~
16
+  12 `CREATE OR REPLACE TABLE main.${tableName} AS (
17
+   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
18
+ ...
19
+  14 )`,
20
+   ~~~~~~~~~
21
+  15 ),
22
+   ~~~~~
23
+
24
+ src/duckdb.ts:59:5 - error TS2345: Argument of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/table").Table<any>' is not assignable to parameter of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/table").Table<any>'.
25
+ The types of 'TType.TArray' are incompatible between these types.
26
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]'.
27
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>'.
28
+ Type 'StructRow<any> & { [x: string]: any; } & { [key: symbol]: any; }' is missing the following properties from type 'StructRow<any>': [kRowIndex], [kParent]
29
+
30
+ 59 await conn.query(
31
+    ~~~~~~~~~~~~~~~~~
32
+ 60 `CREATE OR REPLACE ${mode} ${schema}.${tableName} AS
33
+   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
34
+ 61 SELECT * FROM ${readFileQuery}`,
35
+   ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
36
+ 62 ),
37
+   ~~~~~
38
+
39
+ src/exportToCsv.ts:26:35 - error TS2345: Argument of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/table").Table<any>' is not assignable to parameter of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/table").Table<any>'.
40
+ The types of 'TType.TArray' are incompatible between these types.
41
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]'.
42
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>'.
43
+ Type 'StructRow<any> & { [x: string]: any; } & { [key: symbol]: any; }' is missing the following properties from type 'StructRow<any>': [kRowIndex], [kParent]
44
+
45
+ 26 const csvChunk = convertToCsv(results, !headersAdded);
46
+    ~~~~~~~
47
+
48
+ src/useDuckConn.ts:242:28 - error TS2345: Argument of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/table").Table<any>' is not assignable to parameter of type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/table").Table<any>'.
49
+ The types of 'TType.TArray' are incompatible between these types.
50
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>[]'.
51
+ Type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@17.0.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>' is not assignable to type 'import("/Users/ilya/Workspace/sqlrooms/node_modules/.pnpm/apache-arrow@18.1.0/node_modules/apache-arrow/row/struct").StructRowProxy<any>'.
52
+ Type 'StructRow<any> & { [x: string]: any; } & { [key: symbol]: any; }' is missing the following properties from type 'StructRow<any>': [kRowIndex], [kParent]
53
+
54
+ 242 return getColValAsNumber(res) > 0;
55
+    ~~~
56
+
57
+ [7:31:04 PM] Found 4 errors. Watching for file changes.
58
+
@@ -0,0 +1,4 @@
1
+
2
+ > @sqlrooms/duckdb@0.0.0 lint /Users/ilya/Workspace/sqlrooms/packages/duckdb
3
+ > eslint .
4
+
package/CHANGELOG.md ADDED
@@ -0,0 +1,8 @@
1
+ # Change Log
2
+
3
+ All notable changes to this project will be documented in this file.
4
+ See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
5
+
6
+
7
+
8
+ **Note:** Version bump only for package @sqlrooms/duckdb
package/LICENSE.md ADDED
@@ -0,0 +1,9 @@
1
+ MIT License
2
+
3
+ Copyright 2025 Ilya Boyandin
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
+
7
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8
+
9
+ THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,28 @@
1
+ import * as duckdb from '@duckdb/duckdb-wasm';
2
+ import * as arrow from 'apache-arrow';
3
+ import { ColumnTypeCategory } from '@fsq/ui';
4
+ export type DuckConnection = {
5
+ conn: duckdb.AsyncDuckDBConnection;
6
+ db: duckdb.AsyncDuckDB;
7
+ worker: Worker;
8
+ terminate: () => void;
9
+ };
10
+ export declare function getDuckDb(): Promise<DuckConnection>;
11
+ export declare function useDuckDb(): any;
12
+ export declare function getDuckDbTypeCategory(columnType: string): ColumnTypeCategory | undefined;
13
+ export declare function getDuckTables(schema?: string): Promise<string[]>;
14
+ export declare function getColValAsNumber(res: arrow.Table, column?: string | number, index?: number): number;
15
+ export declare const escapeVal: (val: any) => string;
16
+ export type FileTableInfo = {
17
+ fileName: string;
18
+ tableName: string;
19
+ numRows?: number;
20
+ };
21
+ export declare function checkTableExists(tableName: string, schema?: string): Promise<boolean>;
22
+ export declare function addFileToDb({ file, schema, tableName, existingTables }: {
23
+ file: File;
24
+ schema?: string;
25
+ tableName?: string;
26
+ existingTables?: string[];
27
+ }): Promise<FileTableInfo>;
28
+ //# sourceMappingURL=duckdb%20copy.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"duckdb copy.d.ts","sourceRoot":"","sources":["../src/duckdb copy.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,MAAM,qBAAqB,CAAC;AAC9C,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAKtC,OAAO,EAAC,kBAAkB,EAAC,MAAM,SAAS,CAAC;AAgB3C,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,MAAM,CAAC,qBAAqB,CAAC;IACnC,EAAE,EAAE,MAAM,CAAC,WAAW,CAAC;IACvB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,IAAI,CAAC;CACvB,CAAC;AAIF,wBAAsB,SAAS,4BAqC9B;AAED,wBAAgB,SAAS,QAWxB;AA4BD,wBAAgB,qBAAqB,CAAC,UAAU,EAAE,MAAM,GAAG,kBAAkB,GAAG,SAAS,CAQxF;AAED,wBAAsB,aAAa,CAAC,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAYtE;AAED,wBAAgB,iBAAiB,CAC/B,GAAG,EAAE,KAAK,CAAC,KAAK,EAChB,MAAM,GAAE,MAAM,GAAG,MAAU,EAC3B,KAAK,SAAI,GACR,MAAM,CASR;AAED,eAAO,MAAM,SAAS,QAAS,GAAG,WAEjC,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,wBAAsB,gBAAgB,CAAC,SAAS,EAAE,MAAM,EAAE,MAAM,SAAS,oBAYxE;AAED,wBAAsB,WAAW,CAAC,EAChC,IAAI,EACJ,MAAe,EACf,SAAS,EACT,cAAmB,EACpB,EAAE;IACD,IAAI,EAAE,IAAI,CAAC;IACX,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,cAAc,CAAC,EAAE,MAAM,EAAE,CAAC;CAC3B,GAAG,OAAO,CAAC,aAAa,CAAC,CAmBzB"}
@@ -0,0 +1,140 @@
1
+ import * as duckdb from '@duckdb/duckdb-wasm';
2
+ import eh_worker from '@duckdb/duckdb-wasm/dist/duckdb-browser-eh.worker.js?url';
3
+ import mvp_worker from '@duckdb/duckdb-wasm/dist/duckdb-browser-mvp.worker.js?url';
4
+ import duckdb_wasm_next from '@duckdb/duckdb-wasm/dist/duckdb-eh.wasm?url';
5
+ import duckdb_wasm from '@duckdb/duckdb-wasm/dist/duckdb-mvp.wasm?url';
6
+ import { useSuspenseQuery } from '@tanstack/react-query';
7
+ import { convertToUniqueColumnOrTableName } from './unique-names';
8
+ const MANUAL_BUNDLES = {
9
+ mvp: {
10
+ mainModule: duckdb_wasm,
11
+ mainWorker: mvp_worker
12
+ },
13
+ eh: {
14
+ mainModule: duckdb_wasm_next,
15
+ // mainWorker: new URL('@duckdb/duckdb-wasm/dist/duckdb-browser-eh.worker.js', import.meta.url).toString(),
16
+ mainWorker: eh_worker
17
+ }
18
+ };
19
+ let duckConnection = null;
20
+ export async function getDuckDb() {
21
+ if (duckConnection) {
22
+ return duckConnection;
23
+ }
24
+ // Select a bundle based on browser checks
25
+ const bundle = await duckdb.selectBundle(MANUAL_BUNDLES);
26
+ if (!bundle.mainWorker) {
27
+ throw new Error('No worker found');
28
+ }
29
+ console.time('DuckDB initialization');
30
+ const worker = new Worker(bundle.mainWorker);
31
+ const logger = new duckdb.ConsoleLogger();
32
+ const db = new duckdb.AsyncDuckDB(logger, worker);
33
+ await db.instantiate(bundle.mainModule, bundle.pthreadWorker);
34
+ // TODO: enable OPFS once duckdb-wasm supports it
35
+ // await db.open({
36
+ // path: 'opfs://sql-workbooks.db',
37
+ // accessMode: duckdb.DuckDBAccessMode.READ_WRITE
38
+ // });
39
+ const conn = await db.connect(); // Connect to db
40
+ console.timeEnd('DuckDB initialization');
41
+ return (duckConnection = {
42
+ conn: conn,
43
+ db: db,
44
+ worker: worker,
45
+ terminate: () => {
46
+ conn.close();
47
+ db.terminate();
48
+ worker.terminate();
49
+ duckConnection = null;
50
+ }
51
+ });
52
+ }
53
+ export function useDuckDb() {
54
+ const res = useSuspenseQuery({
55
+ queryKey: ['duckConnection'],
56
+ queryFn: async () => {
57
+ console.log('Attempting to get DuckDB connection');
58
+ return await getDuckDb();
59
+ },
60
+ retry: false,
61
+ gcTime: Infinity
62
+ });
63
+ return res.data;
64
+ }
65
+ const DUCKDB_TYPE_CATEGORIES = {
66
+ string: [/^varchar/, /^char/, /^text/, /^string/, /^uuid/, /^bit/],
67
+ number: [
68
+ /^tinyint/,
69
+ /^smallint/,
70
+ /^integer/,
71
+ /^bigint/,
72
+ /^hugeint/,
73
+ /^utinyint/,
74
+ /^usmallint/,
75
+ /^uinteger/,
76
+ /^ubigint/,
77
+ /^uhugeint/,
78
+ /^decimal/,
79
+ /^numeric/,
80
+ /^double/,
81
+ /^float/
82
+ ],
83
+ boolean: [/^bool(ean)?/],
84
+ binary: [/^blob/, /^bytea/, /^binary/, /^varbinary/],
85
+ datetime: [/^date$/, /^time$/, /^timestamp$/, /^timestamptz$/, /^interval$/],
86
+ json: [/^json$/],
87
+ struct: [/^struct$/, /^list$/, /^map$/, /^array$/, /^union$/],
88
+ geometry: [/^geometry/]
89
+ };
90
+ export function getDuckDbTypeCategory(columnType) {
91
+ const type = columnType.toLowerCase();
92
+ for (const [category, patterns] of Object.entries(DUCKDB_TYPE_CATEGORIES)) {
93
+ if (patterns.some(pattern => type.match(pattern))) {
94
+ return category;
95
+ }
96
+ }
97
+ return undefined;
98
+ }
99
+ export async function getDuckTables(schema = 'main') {
100
+ const { conn } = await getDuckDb();
101
+ const tablesResults = await conn.query(`SELECT * FROM information_schema.tables
102
+ WHERE table_schema = '${schema}'
103
+ ORDER BY table_name`);
104
+ const tableNames = [];
105
+ for (let i = 0; i < tablesResults.numRows; i++) {
106
+ tableNames.push(tablesResults.getChild('table_name')?.get(i));
107
+ }
108
+ return tableNames;
109
+ }
110
+ export function getColValAsNumber(res, column = 0, index = 0) {
111
+ const v = (typeof column === 'number' ? res.getChildAt(column) : res.getChild(column))?.get(index);
112
+ if (v === undefined || v === null) {
113
+ return NaN;
114
+ }
115
+ // if it's an array (can be returned by duckdb as bigint)
116
+ return Number(v[0] ?? v);
117
+ }
118
+ export const escapeVal = (val) => {
119
+ return `'${String(val).replace(/'/g, "''")}'`;
120
+ };
121
+ export async function checkTableExists(tableName, schema = 'main') {
122
+ const { conn } = await getDuckDb();
123
+ return Boolean(getColValAsNumber(await conn.query(`SELECT EXISTS(
124
+ SELECT table_name FROM information_schema.tables
125
+ WHERE table_name = '${tableName}' AND table_schema = '${schema}'
126
+ )`)));
127
+ }
128
+ export async function addFileToDb({ file, schema = 'main', tableName, existingTables = [] }) {
129
+ const table = tableName ?? convertToUniqueColumnOrTableName(file.name, existingTables);
130
+ const { db, conn } = await getDuckDb();
131
+ // const {ext} = splitFilePath(file.name);
132
+ // const duckdbFileName = `${createId()}.${ext}`;
133
+ // const duckdbFileName = `${tableName}.dat`;
134
+ await db.registerFileHandle(file.name, file, duckdb.DuckDBDataProtocol.BROWSER_FILEREADER, true);
135
+ const numRows = getColValAsNumber(await conn.query(`CREATE OR REPLACE TABLE ${schema}.${table} AS SELECT * FROM ${escapeVal(file.name)}`));
136
+ if (!numRows) {
137
+ throw new Error(`No data loaded from "${file.name}"`);
138
+ }
139
+ return { tableName: table, fileName: file.name, numRows };
140
+ }
@@ -0,0 +1,15 @@
1
+ export declare function createTableFromQuery(tableName: string, query: string): Promise<{
2
+ tableName: string;
3
+ rowCount: number;
4
+ }>;
5
+ export declare function createViewFromRegisteredFile(filePath: string, schema: string, tableName: string, opts?: {
6
+ mode: 'table' | 'view';
7
+ }): Promise<{
8
+ tableName: string;
9
+ rowCount: number;
10
+ }>;
11
+ export declare function createViewFromFile(filePath: string, schema: string, tableName: string, file: File | Uint8Array): Promise<{
12
+ tableName: string;
13
+ rowCount: number;
14
+ }>;
15
+ //# sourceMappingURL=duckdb.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"duckdb.d.ts","sourceRoot":"","sources":["../src/duckdb.ts"],"names":[],"mappings":"AAOA,wBAAsB,oBAAoB,CAAC,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM;;;GAU1E;AAED,wBAAsB,4BAA4B,CAChD,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,MAAM,EACjB,IAAI,CAAC,EAAE;IAEL,IAAI,EAAE,OAAO,GAAG,MAAM,CAAC;CACxB,GACA,OAAO,CAAC;IAAC,SAAS,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAC,CAAC,CAsChD;AAED,wBAAsB,kBAAkB,CACtC,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,MAAM,EACjB,IAAI,EAAE,IAAI,GAAG,UAAU,GACtB,OAAO,CAAC;IAAC,SAAS,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAC,CAAC,CAqDhD"}
package/dist/duckdb.js ADDED
@@ -0,0 +1,156 @@
1
+ import { DuckDBDataProtocol } from '@duckdb/duckdb-wasm';
2
+ import { escapeVal, getColValAsNumber, getDuckDb } from './useDuckDb';
3
+ // export function makeTableName(inputFileName: string): string {
4
+ // return inputFileName.replace(/\.[^\.]*$/, '').replace(/\W/g, '_');
5
+ // }
6
+ export async function createTableFromQuery(tableName, query) {
7
+ const { conn } = await getDuckDb();
8
+ const rowCount = getColValAsNumber(await conn.query(`CREATE OR REPLACE TABLE main.${tableName} AS (
9
+ ${query}
10
+ )`));
11
+ return { tableName, rowCount };
12
+ }
13
+ export async function createViewFromRegisteredFile(filePath, schema, tableName, opts) {
14
+ const { mode = 'table' } = opts ?? {};
15
+ const { conn } = await getDuckDb();
16
+ const fileNameLower = filePath.toLowerCase();
17
+ // let rowCount: number;
18
+ // if (fileNameLower.endsWith('.json')) {
19
+ // await conn.insertJSONFromPath(filePath, {schema, name: tableName});
20
+ // // TODO: for JSON we can use insertJSONFromPath https://github.com/duckdb/duckdb-wasm/issues/1262
21
+ // // fileNameLower.endsWith('.json') || fileNameLower.endsWith('.ndjson')
22
+ // // ? `read_json_auto(${escapeVal(fileName)})`
23
+ // rowCount = getColValAsNumber(
24
+ // await conn.query(`SELECT COUNT(*) FROM ${schema}.${tableName}`),
25
+ // );
26
+ // } else {
27
+ const quotedFileName = escapeVal(filePath);
28
+ const readFileQuery = fileNameLower.endsWith('.json') ||
29
+ fileNameLower.endsWith('.geojson') ||
30
+ fileNameLower.endsWith('.ndjson')
31
+ ? `read_json_auto(${quotedFileName}, maximum_object_size=104857600)` // 100MB
32
+ : fileNameLower.endsWith('.parquet')
33
+ ? `parquet_scan(${quotedFileName})`
34
+ : fileNameLower.endsWith('.csv') || fileNameLower.endsWith('.tsv')
35
+ ? `read_csv(${quotedFileName}, SAMPLE_SIZE=-1, AUTO_DETECT=TRUE)`
36
+ : quotedFileName;
37
+ // const readFileQuery = fileNameLower.endsWith('.csv')
38
+ // ? `read_csv(${quotedFileName}, SAMPLE_SIZE=-1, AUTO_DETECT=TRUE)`
39
+ // : quotedFileName;
40
+ // TODO: tableName generate
41
+ const rowCount = getColValAsNumber(await conn.query(`CREATE OR REPLACE ${mode} ${schema}.${tableName} AS
42
+ SELECT * FROM ${readFileQuery}`));
43
+ // }
44
+ return { tableName, rowCount };
45
+ }
46
+ export async function createViewFromFile(filePath, schema, tableName, file) {
47
+ const duckConn = await getDuckDb();
48
+ // const fileName = file.name;
49
+ // await duckConn.db.dropFile(fileName);
50
+ // await duckConn.db.registerFileHandle(
51
+ // fileName,
52
+ // file,
53
+ // DuckDBDataProtocol.BROWSER_FILEREADER,
54
+ // true,
55
+ // );
56
+ // const tableName = makeTableName(fileName);
57
+ // await duckConn.conn.query(`
58
+ // CREATE OR REPLACE VIEW ${tableName} AS SELECT * FROM '${fileName}'
59
+ // `);
60
+ //const fileName = file.name;
61
+ await duckConn.db.dropFile(filePath);
62
+ if (file instanceof File) {
63
+ await duckConn.db.registerFileHandle(filePath, file, DuckDBDataProtocol.BROWSER_FILEREADER, true);
64
+ }
65
+ else {
66
+ await duckConn.db.registerFileBuffer(filePath, file);
67
+ }
68
+ return createViewFromRegisteredFile(filePath, schema, tableName);
69
+ // const res = await duckConn.conn.query(
70
+ // `SELECT count(*) FROM ${inputTableName}`,
71
+ // );
72
+ // const inputRowCount = getColValAsNumber(res, 0);
73
+ // const tableMeta = await duckConn.conn.query(
74
+ // `DESCRIBE TABLE ${inputTableName}`,
75
+ // );
76
+ // const inputTableFields = Array.from(tableMeta).map((row) => ({
77
+ // name: String(row?.column_name),
78
+ // type: String(row?.column_type),
79
+ // }));
80
+ // const nextResult: DataTable = {
81
+ // inputFileName,
82
+ // tableName: inputTableName,
83
+ // rowCount: inputRowCount,
84
+ // // outputRowCount: undefined,
85
+ // columns: inputTableFields,
86
+ // };
87
+ // // setResult(nextResult);
88
+ // return nextResult;
89
+ }
90
+ // async function createViewFromFile2(
91
+ // file: File,
92
+ // duckConn: DuckDb,
93
+ // onTableCreated: (
94
+ // inputTableName: string,
95
+ // result: CreateTableDropzoneResult,
96
+ // ) => void,
97
+ // onError: (status:'error', message: string) => void,
98
+ // ) {
99
+ // try {
100
+ // const inputFileName = file.name;
101
+ // await duckConn.db.dropFile(inputFileName);
102
+ // await duckConn.db.registerFileHandle(
103
+ // inputFileName,
104
+ // file,
105
+ // DuckDBDataProtocol.BROWSER_FILEREADER,
106
+ // true,
107
+ // );
108
+ // const inputTableName = genRandomStr(10, inputFileName).toLowerCase();
109
+ // await duckConn.conn.query(`DROP TABLE IF EXISTS ${inputTableName}`);
110
+ // const readFileQuery = inputFileName.endsWith('.parquet')
111
+ // ? `parquet_scan(${escapeVal(inputFileName)})`
112
+ // : `read_csv(${escapeVal(
113
+ // inputFileName,
114
+ // )}, SAMPLE_SIZE=-1, AUTO_DETECT=TRUE)`;
115
+ // await duckConn.conn.query(
116
+ // `CREATE TABLE ${inputTableName} AS
117
+ // SELECT * FROM ${readFileQuery}`,
118
+ // );
119
+ // const res = await duckConn.conn.query(
120
+ // `SELECT count(*) FROM ${inputTableName}`,
121
+ // );
122
+ // const inputRowCount = getColValAsNumber(res, 0);
123
+ // const tableMeta = await duckConn.conn.query(
124
+ // `DESCRIBE TABLE ${inputTableName}`,
125
+ // );
126
+ // const inputTableFields = Array.from(tableMeta).map((row) => ({
127
+ // name: String(row?.column_name),
128
+ // type: String(row?.column_type),
129
+ // }));
130
+ // const nextResult: CreateTableDropzoneResult = {
131
+ // inputFileName,
132
+ // inputTableName,
133
+ // inputRowCount,
134
+ // // outputRowCount: undefined,
135
+ // inputTableFields,
136
+ // columns: {},
137
+ // };
138
+ // // setResult(nextResult);
139
+ // onTableCreated(inputTableName, nextResult);
140
+ // } catch (e) {
141
+ // console.error(e);
142
+ // onError(e instanceof Error ? e.message : String(e));
143
+ // }
144
+ // }
145
+ // async function maybeDropTable(
146
+ // value: CreateTableDropzoneResult,
147
+ // duckConn: DuckDb,
148
+ // ) {
149
+ // const {inputFileName, inputTableName} = value || {};
150
+ // if (inputFileName) {
151
+ // await duckConn.db.dropFile(inputFileName);
152
+ // }
153
+ // if (inputTableName) {
154
+ // await duckConn.conn.query(`DROP TABLE IF EXISTS ${inputTableName};`);
155
+ // }
156
+ // }
@@ -0,0 +1,2 @@
1
+ export declare function exportToCsv(query: string, fileName: string, pageSize?: number): Promise<void>;
2
+ //# sourceMappingURL=exportToCsv.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"exportToCsv.d.ts","sourceRoot":"","sources":["../src/exportToCsv.ts"],"names":[],"mappings":"AAGA,wBAAsB,WAAW,CAC/B,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,EAChB,QAAQ,SAAS,iBA+BlB"}
@@ -0,0 +1,71 @@
1
+ import { getDuckDb } from './useDuckDb';
2
+ export async function exportToCsv(query, fileName, pageSize = 100000) {
3
+ const { conn } = await getDuckDb();
4
+ let offset = 0;
5
+ const blobs = [];
6
+ let headersAdded = false;
7
+ while (true) {
8
+ const currentQuery = `(
9
+ ${query}
10
+ ) LIMIT ${pageSize} OFFSET ${offset}`;
11
+ const results = await conn.query(currentQuery);
12
+ // Check if we received any results; if not, we are done.
13
+ if (results.numRows === 0) {
14
+ break;
15
+ }
16
+ const csvChunk = convertToCsv(results, !headersAdded);
17
+ blobs.push(new Blob([csvChunk], { type: 'text/csv' }));
18
+ // Ensure that headers are not added in subsequent iterations
19
+ headersAdded = true;
20
+ // Increment offset to fetch the next chunk
21
+ offset += pageSize;
22
+ }
23
+ const fullCsvBlob = new Blob(blobs, { type: 'text/csv' });
24
+ downloadBlob(fullCsvBlob, fileName);
25
+ }
26
+ function convertToCsv(arrowTable, includeHeaders) {
27
+ // return includeHeaders
28
+ // ? csvFormat(arrowTable.toArray())
29
+ // : csvFormatBody(arrowTable.toArray());
30
+ const columnNames = arrowTable.schema.fields.map((field) => field.name);
31
+ const columnsByName = columnNames.reduce((acc, columnName) => {
32
+ const col = arrowTable.getChild(columnName);
33
+ if (col)
34
+ acc[columnName] = col;
35
+ return acc;
36
+ }, {});
37
+ // Add header
38
+ let csvContent = includeHeaders ? columnNames.join(',') + '\r\n' : '';
39
+ // Add data rows
40
+ for (let i = 0; i < arrowTable.numRows; i++) {
41
+ const csvRow = columnNames
42
+ .map((columnName) => {
43
+ const cellValue = columnsByName[columnName]?.get(i);
44
+ // If the cell value is null or undefined, set it to an empty string.
45
+ if (cellValue == null)
46
+ return '';
47
+ // Convert cell value to string
48
+ let cellValueStr = String(cellValue);
49
+ // Escape double quotes and wrap cell value in double quotes if necessary
50
+ if (cellValueStr.includes('"') ||
51
+ cellValueStr.includes(',') ||
52
+ cellValueStr.includes('\n')) {
53
+ cellValueStr = '"' + cellValueStr.replace(/"/g, '""') + '"';
54
+ }
55
+ return cellValueStr;
56
+ })
57
+ .join(',');
58
+ csvContent += csvRow + '\r\n';
59
+ }
60
+ return csvContent;
61
+ }
62
+ function downloadBlob(blob, filename) {
63
+ const url = URL.createObjectURL(blob);
64
+ const a = document.createElement('a');
65
+ a.href = url;
66
+ a.download = filename;
67
+ document.body.appendChild(a);
68
+ a.click();
69
+ URL.revokeObjectURL(url);
70
+ document.body.removeChild(a);
71
+ }
@@ -0,0 +1,5 @@
1
+ export * from './duckdb';
2
+ export * from './types';
3
+ export * from './useDuckDb';
4
+ export * from './exportToCsv';
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAC;AACzB,cAAc,SAAS,CAAC;AACxB,cAAc,aAAa,CAAC;AAC5B,cAAc,eAAe,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,4 @@
1
+ export * from './duckdb';
2
+ export * from './types';
3
+ export * from './useDuckDb';
4
+ export * from './exportToCsv';