@sqlrooms/duckdb 0.29.0-rc.0 → 0.29.0-rc.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DuckDbSlice.d.ts +20 -9
- package/dist/DuckDbSlice.d.ts.map +1 -1
- package/dist/DuckDbSlice.js +119 -127
- package/dist/DuckDbSlice.js.map +1 -1
- package/dist/connectors/WasmDuckDbConnector.d.ts.map +1 -1
- package/dist/connectors/WasmDuckDbConnector.js +19 -0
- package/dist/connectors/WasmDuckDbConnector.js.map +1 -1
- package/dist/index.d.ts +5 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -3
- package/dist/index.js.map +1 -1
- package/dist/loadTableSchemas.d.ts +17 -0
- package/dist/loadTableSchemas.d.ts.map +1 -0
- package/dist/loadTableSchemas.js +134 -0
- package/dist/loadTableSchemas.js.map +1 -0
- package/dist/use-copy-as-tsv.d.ts +13 -0
- package/dist/use-copy-as-tsv.d.ts.map +1 -0
- package/dist/use-copy-as-tsv.js +68 -0
- package/dist/use-copy-as-tsv.js.map +1 -0
- package/dist/use-export-to-csv.d.ts +5 -0
- package/dist/use-export-to-csv.d.ts.map +1 -0
- package/dist/use-export-to-csv.js +77 -0
- package/dist/use-export-to-csv.js.map +1 -0
- package/package.json +9 -9
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { useStoreWithDuckDb } from './DuckDbSlice';
|
|
2
|
+
export function useCopyAsTsv() {
|
|
3
|
+
const getConnector = useStoreWithDuckDb((state) => state.db.getConnector);
|
|
4
|
+
return {
|
|
5
|
+
copyAsTsv: async (query, options) => {
|
|
6
|
+
const { pageSize = 100000, maxSizeBytes = 50 * 1024 * 1024 } = options || {};
|
|
7
|
+
const dbConnector = await getConnector();
|
|
8
|
+
let offset = 0;
|
|
9
|
+
const chunks = [];
|
|
10
|
+
let headersAdded = false;
|
|
11
|
+
let totalRows = 0;
|
|
12
|
+
let totalBytes = 0;
|
|
13
|
+
let limitExceeded = false;
|
|
14
|
+
while (true) {
|
|
15
|
+
const currentQuery = `(
|
|
16
|
+
${query}
|
|
17
|
+
) LIMIT ${pageSize} OFFSET ${offset}`;
|
|
18
|
+
const results = await dbConnector.query(currentQuery);
|
|
19
|
+
if (results.numRows === 0) {
|
|
20
|
+
break;
|
|
21
|
+
}
|
|
22
|
+
const tsvChunk = convertToTsv(results, !headersAdded);
|
|
23
|
+
const chunkBytes = new Blob([tsvChunk]).size;
|
|
24
|
+
// Check if adding this chunk would exceed the limit
|
|
25
|
+
if (totalBytes + chunkBytes > maxSizeBytes) {
|
|
26
|
+
limitExceeded = true;
|
|
27
|
+
break;
|
|
28
|
+
}
|
|
29
|
+
chunks.push(tsvChunk);
|
|
30
|
+
totalBytes += chunkBytes;
|
|
31
|
+
totalRows += results.numRows;
|
|
32
|
+
headersAdded = true;
|
|
33
|
+
offset += pageSize;
|
|
34
|
+
}
|
|
35
|
+
await navigator.clipboard.writeText(chunks.join(''));
|
|
36
|
+
return { rowCount: totalRows, limitExceeded };
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
function convertToTsv(arrowTable, includeHeaders) {
|
|
41
|
+
const columnNames = arrowTable.schema.fields.map((field) => field.name);
|
|
42
|
+
const columnsByName = columnNames.reduce((acc, columnName) => {
|
|
43
|
+
const col = arrowTable.getChild(columnName);
|
|
44
|
+
if (col)
|
|
45
|
+
acc[columnName] = col;
|
|
46
|
+
return acc;
|
|
47
|
+
}, {});
|
|
48
|
+
// Add header
|
|
49
|
+
let tsvContent = includeHeaders ? columnNames.join('\t') + '\r\n' : '';
|
|
50
|
+
// Add data rows
|
|
51
|
+
for (let i = 0; i < arrowTable.numRows; i++) {
|
|
52
|
+
const tsvRow = columnNames
|
|
53
|
+
.map((columnName) => {
|
|
54
|
+
const cellValue = columnsByName[columnName]?.get(i);
|
|
55
|
+
// If the cell value is null or undefined, set it to an empty string.
|
|
56
|
+
if (cellValue == null)
|
|
57
|
+
return '';
|
|
58
|
+
// Replace tabs and newlines to keep TSV structure intact.
|
|
59
|
+
// TSV has no standard escaping for tabs/newlines (unlike CSV with quoted
|
|
60
|
+
// fields), so we replace them with spaces for spreadsheet compatibility.
|
|
61
|
+
return String(cellValue).replace(/\t/g, ' ').replace(/\r?\n/g, ' ');
|
|
62
|
+
})
|
|
63
|
+
.join('\t');
|
|
64
|
+
tsvContent += tsvRow + '\r\n';
|
|
65
|
+
}
|
|
66
|
+
return tsvContent;
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=use-copy-as-tsv.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"use-copy-as-tsv.js","sourceRoot":"","sources":["../src/use-copy-as-tsv.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,kBAAkB,EAAC,MAAM,eAAe,CAAC;AAmBjD,MAAM,UAAU,YAAY;IAC1B,MAAM,YAAY,GAAG,kBAAkB,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC;IAC1E,OAAO;QACL,SAAS,EAAE,KAAK,EACd,KAAa,EACb,OAA0B,EACA,EAAE;YAC5B,MAAM,EAAC,QAAQ,GAAG,MAAM,EAAE,YAAY,GAAG,EAAE,GAAG,IAAI,GAAG,IAAI,EAAC,GACxD,OAAO,IAAI,EAAE,CAAC;YAChB,MAAM,WAAW,GAAG,MAAM,YAAY,EAAE,CAAC;YAEzC,IAAI,MAAM,GAAG,CAAC,CAAC;YACf,MAAM,MAAM,GAAa,EAAE,CAAC;YAC5B,IAAI,YAAY,GAAG,KAAK,CAAC;YACzB,IAAI,SAAS,GAAG,CAAC,CAAC;YAClB,IAAI,UAAU,GAAG,CAAC,CAAC;YACnB,IAAI,aAAa,GAAG,KAAK,CAAC;YAE1B,OAAO,IAAI,EAAE,CAAC;gBACZ,MAAM,YAAY,GAAG;YACjB,KAAK;kBACC,QAAQ,WAAW,MAAM,EAAE,CAAC;gBACtC,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;gBAEtD,IAAI,OAAO,CAAC,OAAO,KAAK,CAAC,EAAE,CAAC;oBAC1B,MAAM;gBACR,CAAC;gBAED,MAAM,QAAQ,GAAG,YAAY,CAAC,OAAO,EAAE,CAAC,YAAY,CAAC,CAAC;gBACtD,MAAM,UAAU,GAAG,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;gBAE7C,oDAAoD;gBACpD,IAAI,UAAU,GAAG,UAAU,GAAG,YAAY,EAAE,CAAC;oBAC3C,aAAa,GAAG,IAAI,CAAC;oBACrB,MAAM;gBACR,CAAC;gBAED,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;gBACtB,UAAU,IAAI,UAAU,CAAC;gBACzB,SAAS,IAAI,OAAO,CAAC,OAAO,CAAC;gBAC7B,YAAY,GAAG,IAAI,CAAC;gBACpB,MAAM,IAAI,QAAQ,CAAC;YACrB,CAAC;YAED,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;YACrD,OAAO,EAAC,QAAQ,EAAE,SAAS,EAAE,aAAa,EAAC,CAAC;QAC9C,CAAC;KACF,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CACnB,UAAuB,EACvB,cAAuB;IAEvB,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACxE,MAAM,aAAa,GAAG,WAAW,CAAC,MAAM,CACtC,CAAC,GAAG,EAAE,UAAU,EAAE,EAAE;QAClB,MAAM,GAAG,GAAG,UAAU,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAC5C,IAAI,GAAG;YAAE,GAAG,CAAC,UAAU,CAAC,GAAG,GAAG,CAAC;QAC/B,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAAkC,CACnC,CAAC;IAEF,aAAa;IACb,IAAI,UAAU,GAAG,cAAc,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;IAEvE,gBAAgB;IAChB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,MAAM,MAAM,GAAG,WAAW;aACvB,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE;YAClB,MAAM,SAAS,GAAG,aAAa,CAAC,UAAU,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;YAEpD,qEAAqE;YACrE,IAAI,SAAS,IAAI,IAAI;gBAAE,OAAO,EAAE,CAAC;YAEjC,0DAA0D;YAC1D,yEAAyE;YACzE,yEAAyE;YACzE,OAAO,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;QACtE,CAAC,CAAC;aACD,IAAI,CAAC,IAAI,CAAC,CAAC;QAEd,UAAU,IAAI,MAAM,GAAG,MAAM,CAAC;IAChC,CAAC;IAED,OAAO,UAAU,CAAC;AACpB,CAAC","sourcesContent":["import * as arrow from 'apache-arrow';\nimport {useStoreWithDuckDb} from './DuckDbSlice';\n\nexport interface CopyAsTsvResult {\n rowCount: number;\n limitExceeded: boolean;\n}\n\nexport interface CopyAsTsvOptions {\n pageSize?: number;\n maxSizeBytes?: number;\n}\n\nexport interface UseCopyAsTsvReturn {\n copyAsTsv: (\n query: string,\n options?: CopyAsTsvOptions,\n ) => Promise<CopyAsTsvResult>;\n}\n\nexport function useCopyAsTsv(): UseCopyAsTsvReturn {\n const getConnector = useStoreWithDuckDb((state) => state.db.getConnector);\n return {\n copyAsTsv: async (\n query: string,\n options?: CopyAsTsvOptions,\n ): Promise<CopyAsTsvResult> => {\n const {pageSize = 100000, maxSizeBytes = 50 * 1024 * 1024} =\n options || {};\n const dbConnector = await getConnector();\n\n let offset = 0;\n const chunks: string[] = [];\n let headersAdded = false;\n let totalRows = 0;\n let totalBytes = 0;\n let limitExceeded = false;\n\n while (true) {\n const currentQuery = `(\n ${query}\n ) LIMIT ${pageSize} OFFSET ${offset}`;\n const results = await dbConnector.query(currentQuery);\n\n if (results.numRows === 0) {\n break;\n }\n\n const tsvChunk = convertToTsv(results, !headersAdded);\n const chunkBytes = new Blob([tsvChunk]).size;\n\n // Check if adding this chunk would exceed the limit\n if (totalBytes + chunkBytes > maxSizeBytes) {\n limitExceeded = true;\n break;\n }\n\n chunks.push(tsvChunk);\n totalBytes += chunkBytes;\n totalRows += results.numRows;\n headersAdded = true;\n offset += pageSize;\n }\n\n await navigator.clipboard.writeText(chunks.join(''));\n return {rowCount: totalRows, limitExceeded};\n },\n };\n}\n\nfunction convertToTsv(\n arrowTable: arrow.Table,\n includeHeaders: boolean,\n): string {\n const columnNames = arrowTable.schema.fields.map((field) => field.name);\n const columnsByName = columnNames.reduce(\n (acc, columnName) => {\n const col = arrowTable.getChild(columnName);\n if (col) acc[columnName] = col;\n return acc;\n },\n {} as Record<string, arrow.Vector>,\n );\n\n // Add header\n let tsvContent = includeHeaders ? columnNames.join('\\t') + '\\r\\n' : '';\n\n // Add data rows\n for (let i = 0; i < arrowTable.numRows; i++) {\n const tsvRow = columnNames\n .map((columnName) => {\n const cellValue = columnsByName[columnName]?.get(i);\n\n // If the cell value is null or undefined, set it to an empty string.\n if (cellValue == null) return '';\n\n // Replace tabs and newlines to keep TSV structure intact.\n // TSV has no standard escaping for tabs/newlines (unlike CSV with quoted\n // fields), so we replace them with spaces for spreadsheet compatibility.\n return String(cellValue).replace(/\\t/g, ' ').replace(/\\r?\\n/g, ' ');\n })\n .join('\\t');\n\n tsvContent += tsvRow + '\\r\\n';\n }\n\n return tsvContent;\n}\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"use-export-to-csv.d.ts","sourceRoot":"","sources":["../src/use-export-to-csv.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,oBAAoB;IACnC,WAAW,EAAE,CACX,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,EAChB,QAAQ,CAAC,EAAE,MAAM,KACd,OAAO,CAAC,IAAI,CAAC,CAAC;CACpB;AAED,wBAAgB,cAAc,IAAI,oBAAoB,CAuCrD"}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { useStoreWithDuckDb } from './DuckDbSlice';
|
|
2
|
+
export function useExportToCsv() {
|
|
3
|
+
const getConnector = useStoreWithDuckDb((state) => state.db.getConnector);
|
|
4
|
+
return {
|
|
5
|
+
exportToCsv: async (query, fileName, pageSize = 100000) => {
|
|
6
|
+
const dbConnector = await getConnector();
|
|
7
|
+
let offset = 0;
|
|
8
|
+
const blobs = [];
|
|
9
|
+
let headersAdded = false;
|
|
10
|
+
while (true) {
|
|
11
|
+
const currentQuery = `(
|
|
12
|
+
${query}
|
|
13
|
+
) LIMIT ${pageSize} OFFSET ${offset}`;
|
|
14
|
+
const results = await dbConnector.query(currentQuery);
|
|
15
|
+
// Check if we received any results; if not, we are done.
|
|
16
|
+
if (results.numRows === 0) {
|
|
17
|
+
break;
|
|
18
|
+
}
|
|
19
|
+
const csvChunk = convertToCsv(results, !headersAdded);
|
|
20
|
+
blobs.push(new Blob([csvChunk], { type: 'text/csv' }));
|
|
21
|
+
// Ensure that headers are not added in subsequent iterations
|
|
22
|
+
headersAdded = true;
|
|
23
|
+
// Increment offset to fetch the next chunk
|
|
24
|
+
offset += pageSize;
|
|
25
|
+
}
|
|
26
|
+
const fullCsvBlob = new Blob(blobs, { type: 'text/csv' });
|
|
27
|
+
downloadBlob(fullCsvBlob, fileName);
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
function convertToCsv(arrowTable, includeHeaders) {
|
|
32
|
+
// return includeHeaders
|
|
33
|
+
// ? csvFormat(arrowTable.toArray())
|
|
34
|
+
// : csvFormatBody(arrowTable.toArray());
|
|
35
|
+
const columnNames = arrowTable.schema.fields.map((field) => field.name);
|
|
36
|
+
const columnsByName = columnNames.reduce((acc, columnName) => {
|
|
37
|
+
const col = arrowTable.getChild(columnName);
|
|
38
|
+
if (col)
|
|
39
|
+
acc[columnName] = col;
|
|
40
|
+
return acc;
|
|
41
|
+
}, {});
|
|
42
|
+
// Add header
|
|
43
|
+
let csvContent = includeHeaders ? columnNames.join(',') + '\r\n' : '';
|
|
44
|
+
// Add data rows
|
|
45
|
+
for (let i = 0; i < arrowTable.numRows; i++) {
|
|
46
|
+
const csvRow = columnNames
|
|
47
|
+
.map((columnName) => {
|
|
48
|
+
const cellValue = columnsByName[columnName]?.get(i);
|
|
49
|
+
// If the cell value is null or undefined, set it to an empty string.
|
|
50
|
+
if (cellValue == null)
|
|
51
|
+
return '';
|
|
52
|
+
// Convert cell value to string
|
|
53
|
+
let cellValueStr = String(cellValue);
|
|
54
|
+
// Escape double quotes and wrap cell value in double quotes if necessary
|
|
55
|
+
if (cellValueStr.includes('"') ||
|
|
56
|
+
cellValueStr.includes(',') ||
|
|
57
|
+
cellValueStr.includes('\n')) {
|
|
58
|
+
cellValueStr = '"' + cellValueStr.replace(/"/g, '""') + '"';
|
|
59
|
+
}
|
|
60
|
+
return cellValueStr;
|
|
61
|
+
})
|
|
62
|
+
.join(',');
|
|
63
|
+
csvContent += csvRow + '\r\n';
|
|
64
|
+
}
|
|
65
|
+
return csvContent;
|
|
66
|
+
}
|
|
67
|
+
function downloadBlob(blob, filename) {
|
|
68
|
+
const url = URL.createObjectURL(blob);
|
|
69
|
+
const a = document.createElement('a');
|
|
70
|
+
a.href = url;
|
|
71
|
+
a.download = filename;
|
|
72
|
+
document.body.appendChild(a);
|
|
73
|
+
a.click();
|
|
74
|
+
URL.revokeObjectURL(url);
|
|
75
|
+
document.body.removeChild(a);
|
|
76
|
+
}
|
|
77
|
+
//# sourceMappingURL=use-export-to-csv.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"use-export-to-csv.js","sourceRoot":"","sources":["../src/use-export-to-csv.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,kBAAkB,EAAC,MAAM,eAAe,CAAC;AAUjD,MAAM,UAAU,cAAc;IAC5B,MAAM,YAAY,GAAG,kBAAkB,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC;IAC1E,OAAO;QACL,WAAW,EAAE,KAAK,EAChB,KAAa,EACb,QAAgB,EAChB,QAAQ,GAAG,MAAM,EACF,EAAE;YACjB,MAAM,WAAW,GAAG,MAAM,YAAY,EAAE,CAAC;YAEzC,IAAI,MAAM,GAAG,CAAC,CAAC;YACf,MAAM,KAAK,GAAW,EAAE,CAAC;YACzB,IAAI,YAAY,GAAG,KAAK,CAAC;YAEzB,OAAO,IAAI,EAAE,CAAC;gBACZ,MAAM,YAAY,GAAG;YACjB,KAAK;kBACC,QAAQ,WAAW,MAAM,EAAE,CAAC;gBACtC,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;gBAEtD,yDAAyD;gBACzD,IAAI,OAAO,CAAC,OAAO,KAAK,CAAC,EAAE,CAAC;oBAC1B,MAAM;gBACR,CAAC;gBAED,MAAM,QAAQ,GAAG,YAAY,CAAC,OAAO,EAAE,CAAC,YAAY,CAAC,CAAC;gBACtD,KAAK,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,EAAE,EAAC,IAAI,EAAE,UAAU,EAAC,CAAC,CAAC,CAAC;gBAErD,6DAA6D;gBAC7D,YAAY,GAAG,IAAI,CAAC;gBAEpB,2CAA2C;gBAC3C,MAAM,IAAI,QAAQ,CAAC;YACrB,CAAC;YAED,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,KAAK,EAAE,EAAC,IAAI,EAAE,UAAU,EAAC,CAAC,CAAC;YACxD,YAAY,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QACtC,CAAC;KACF,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CACnB,UAAuB,EACvB,cAAuB;IAEvB,wBAAwB;IACxB,sCAAsC;IACtC,2CAA2C;IAE3C,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACxE,MAAM,aAAa,GAAG,WAAW,CAAC,MAAM,CACtC,CAAC,GAAG,EAAE,UAAU,EAAE,EAAE;QAClB,MAAM,GAAG,GAAG,UAAU,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAC5C,IAAI,GAAG;YAAE,GAAG,CAAC,UAAU,CAAC,GAAG,GAAG,CAAC;QAC/B,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAAkC,CACnC,CAAC;IAEF,aAAa;IACb,IAAI,UAAU,GAAG,cAAc,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;IAEtE,gBAAgB;IAChB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,MAAM,MAAM,GAAG,WAAW;aACvB,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE;YAClB,MAAM,SAAS,GAAG,aAAa,CAAC,UAAU,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;YAEpD,qEAAqE;YACrE,IAAI,SAAS,IAAI,IAAI;gBAAE,OAAO,EAAE,CAAC;YAEjC,+BAA+B;YAC/B,IAAI,YAAY,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;YAErC,yEAAyE;YACzE,IACE,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC;gBAC1B,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC;gBAC1B,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,EAC3B,CAAC;gBACD,YAAY,GAAG,GAAG,GAAG,YAAY,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,GAAG,CAAC;YAC9D,CAAC;YAED,OAAO,YAAY,CAAC;QACtB,CAAC,CAAC;aACD,IAAI,CAAC,GAAG,CAAC,CAAC;QAEb,UAAU,IAAI,MAAM,GAAG,MAAM,CAAC;IAChC,CAAC;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,YAAY,CAAC,IAAU,EAAE,QAAgB;IAChD,MAAM,GAAG,GAAG,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;IACtC,MAAM,CAAC,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC;IACb,CAAC,CAAC,QAAQ,GAAG,QAAQ,CAAC;IACtB,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IAC7B,CAAC,CAAC,KAAK,EAAE,CAAC;IACV,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;IACzB,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["import * as arrow from 'apache-arrow';\nimport {useStoreWithDuckDb} from './DuckDbSlice';\n\nexport interface UseExportToCsvReturn {\n exportToCsv: (\n query: string,\n fileName: string,\n pageSize?: number,\n ) => Promise<void>;\n}\n\nexport function useExportToCsv(): UseExportToCsvReturn {\n const getConnector = useStoreWithDuckDb((state) => state.db.getConnector);\n return {\n exportToCsv: async (\n query: string,\n fileName: string,\n pageSize = 100000,\n ): Promise<void> => {\n const dbConnector = await getConnector();\n\n let offset = 0;\n const blobs: Blob[] = [];\n let headersAdded = false;\n\n while (true) {\n const currentQuery = `(\n ${query}\n ) LIMIT ${pageSize} OFFSET ${offset}`;\n const results = await dbConnector.query(currentQuery);\n\n // Check if we received any results; if not, we are done.\n if (results.numRows === 0) {\n break;\n }\n\n const csvChunk = convertToCsv(results, !headersAdded);\n blobs.push(new Blob([csvChunk], {type: 'text/csv'}));\n\n // Ensure that headers are not added in subsequent iterations\n headersAdded = true;\n\n // Increment offset to fetch the next chunk\n offset += pageSize;\n }\n\n const fullCsvBlob = new Blob(blobs, {type: 'text/csv'});\n downloadBlob(fullCsvBlob, fileName);\n },\n };\n}\n\nfunction convertToCsv(\n arrowTable: arrow.Table,\n includeHeaders: boolean,\n): string {\n // return includeHeaders\n // ? csvFormat(arrowTable.toArray())\n // : csvFormatBody(arrowTable.toArray());\n\n const columnNames = arrowTable.schema.fields.map((field) => field.name);\n const columnsByName = columnNames.reduce(\n (acc, columnName) => {\n const col = arrowTable.getChild(columnName);\n if (col) acc[columnName] = col;\n return acc;\n },\n {} as Record<string, arrow.Vector>,\n );\n\n // Add header\n let csvContent = includeHeaders ? columnNames.join(',') + '\\r\\n' : '';\n\n // Add data rows\n for (let i = 0; i < arrowTable.numRows; i++) {\n const csvRow = columnNames\n .map((columnName) => {\n const cellValue = columnsByName[columnName]?.get(i);\n\n // If the cell value is null or undefined, set it to an empty string.\n if (cellValue == null) return '';\n\n // Convert cell value to string\n let cellValueStr = String(cellValue);\n\n // Escape double quotes and wrap cell value in double quotes if necessary\n if (\n cellValueStr.includes('\"') ||\n cellValueStr.includes(',') ||\n cellValueStr.includes('\\n')\n ) {\n cellValueStr = '\"' + cellValueStr.replace(/\"/g, '\"\"') + '\"';\n }\n\n return cellValueStr;\n })\n .join(',');\n\n csvContent += csvRow + '\\r\\n';\n }\n\n return csvContent;\n}\n\nfunction downloadBlob(blob: Blob, filename: string) {\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = filename;\n document.body.appendChild(a);\n a.click();\n URL.revokeObjectURL(url);\n document.body.removeChild(a);\n}\n"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sqlrooms/duckdb",
|
|
3
|
-
"version": "0.29.0-rc.
|
|
3
|
+
"version": "0.29.0-rc.2",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/index.js",
|
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
"license": "MIT",
|
|
11
11
|
"repository": {
|
|
12
12
|
"type": "git",
|
|
13
|
-
"url": "https://github.com/sqlrooms/sqlrooms.git"
|
|
13
|
+
"url": "git+https://github.com/sqlrooms/sqlrooms.git"
|
|
14
14
|
},
|
|
15
15
|
"files": [
|
|
16
16
|
"dist"
|
|
@@ -20,17 +20,17 @@
|
|
|
20
20
|
},
|
|
21
21
|
"dependencies": {
|
|
22
22
|
"@duckdb/duckdb-wasm": "1.32.0",
|
|
23
|
-
"@sqlrooms/duckdb-core": "0.29.0-rc.
|
|
24
|
-
"@sqlrooms/room-config": "0.29.0-rc.
|
|
25
|
-
"@sqlrooms/room-store": "0.29.0-rc.
|
|
26
|
-
"@sqlrooms/utils": "0.29.0-rc.
|
|
23
|
+
"@sqlrooms/duckdb-core": "0.29.0-rc.2",
|
|
24
|
+
"@sqlrooms/room-config": "0.29.0-rc.2",
|
|
25
|
+
"@sqlrooms/room-store": "0.29.0-rc.2",
|
|
26
|
+
"@sqlrooms/utils": "0.29.0-rc.2",
|
|
27
27
|
"fast-deep-equal": "^3.1.3",
|
|
28
28
|
"immer": "^11.0.1",
|
|
29
29
|
"zod": "^4.1.8",
|
|
30
30
|
"zustand": "^5.0.8"
|
|
31
31
|
},
|
|
32
32
|
"devDependencies": {
|
|
33
|
-
"@sqlrooms/duckdb-node": "0.29.0-rc.
|
|
33
|
+
"@sqlrooms/duckdb-node": "0.29.0-rc.2",
|
|
34
34
|
"@types/jest": "^30.0.0",
|
|
35
35
|
"jest": "^30.1.3",
|
|
36
36
|
"ts-jest": "^29.4.4"
|
|
@@ -47,5 +47,5 @@
|
|
|
47
47
|
"test": "NODE_OPTIONS='--experimental-vm-modules --no-warnings' jest",
|
|
48
48
|
"test:watch": "NODE_OPTIONS='--experimental-vm-modules --no-warnings' jest --watch"
|
|
49
49
|
},
|
|
50
|
-
"gitHead": "
|
|
51
|
-
}
|
|
50
|
+
"gitHead": "5d511631992c1af8852ea79ced488867aad4a555"
|
|
51
|
+
}
|