@devrev/meerkat-node 0.0.89 → 0.0.90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -3
- package/src/duckdb-exec.d.ts +1 -1
- package/src/duckdb-exec.js +10 -7
- package/src/duckdb-exec.js.map +1 -1
- package/src/duckdb-manager/duckdb-manager.d.ts +15 -6
- package/src/duckdb-manager/duckdb-manager.js +40 -13
- package/src/duckdb-manager/duckdb-manager.js.map +1 -1
- package/src/duckdb-singleton.d.ts +2 -2
- package/src/duckdb-singleton.js +3 -3
- package/src/duckdb-singleton.js.map +1 -1
- package/src/file-manager/file-manager.d.ts +1 -0
- package/src/file-manager/file-manager.js +7 -4
- package/src/file-manager/file-manager.js.map +1 -1
- package/src/node-sql-to-serialization.js.map +1 -1
- package/src/utils/duckdb-type-convertor.d.ts +3 -6
- package/src/utils/duckdb-type-convertor.js +25 -25
- package/src/utils/duckdb-type-convertor.js.map +1 -1
- package/src/utils/hash-string.d.ts +4 -0
- package/src/utils/hash-string.js +13 -0
- package/src/utils/hash-string.js.map +1 -0
- package/src/utils/convert-rows-to-records.d.ts +0 -2
- package/src/utils/convert-rows-to-records.js +0 -17
- package/src/utils/convert-rows-to-records.js.map +0 -1
- package/src/utils/encrypt-string.d.ts +0 -8
- package/src/utils/encrypt-string.js +0 -39
- package/src/utils/encrypt-string.js.map +0 -1
- package/src/utils/transform-duckdb-result.d.ts +0 -13
- package/src/utils/transform-duckdb-result.js +0 -26
- package/src/utils/transform-duckdb-result.js.map +0 -1
package/package.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@devrev/meerkat-node",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.90",
|
|
4
4
|
"dependencies": {
|
|
5
5
|
"@swc/helpers": "~0.5.0",
|
|
6
6
|
"@devrev/meerkat-core": "*",
|
|
7
|
-
"@duckdb/node-api": "1.1.3-alpha.7",
|
|
8
7
|
"axios": "^1.6.0",
|
|
9
|
-
"lodash": "^4.17.21"
|
|
8
|
+
"lodash": "^4.17.21",
|
|
9
|
+
"duckdb": "^1.0.0"
|
|
10
10
|
},
|
|
11
11
|
"scripts": {
|
|
12
12
|
"release": "semantic-release"
|
package/src/duckdb-exec.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const duckdbExec: (query: string) => Promise<
|
|
1
|
+
export declare const duckdbExec: <T = unknown>(query: string) => Promise<T>;
|
package/src/duckdb-exec.js
CHANGED
|
@@ -6,13 +6,16 @@ Object.defineProperty(exports, "duckdbExec", {
|
|
|
6
6
|
}
|
|
7
7
|
});
|
|
8
8
|
const _duckdbsingleton = require("./duckdb-singleton");
|
|
9
|
-
const
|
|
10
|
-
const
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
9
|
+
const duckdbExec = (query)=>{
|
|
10
|
+
const db = _duckdbsingleton.DuckDBSingleton.getInstance();
|
|
11
|
+
return new Promise((resolve, reject)=>{
|
|
12
|
+
db.all(query, (err, res)=>{
|
|
13
|
+
if (err) {
|
|
14
|
+
reject(err);
|
|
15
|
+
}
|
|
16
|
+
resolve(res);
|
|
17
|
+
});
|
|
18
|
+
});
|
|
16
19
|
};
|
|
17
20
|
|
|
18
21
|
//# sourceMappingURL=duckdb-exec.js.map
|
package/src/duckdb-exec.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../meerkat-node/src/duckdb-exec.ts"],"sourcesContent":["import { DuckDBSingleton } from './duckdb-singleton';\
|
|
1
|
+
{"version":3,"sources":["../../../meerkat-node/src/duckdb-exec.ts"],"sourcesContent":["import { DuckDBSingleton } from './duckdb-singleton';\n\nexport const duckdbExec = <T = unknown>(query: string): Promise<T> => {\n const db = DuckDBSingleton.getInstance();\n return new Promise((resolve, reject) => {\n db.all(query, (err, res) => {\n if (err) {\n reject(err);\n }\n resolve(res as T);\n });\n });\n};\n"],"names":["duckdbExec","query","db","DuckDBSingleton","getInstance","Promise","resolve","reject","all","err","res"],"mappings":";+BAEaA;;;eAAAA;;;iCAFmB;AAEzB,MAAMA,aAAa,CAAcC;IACtC,MAAMC,KAAKC,gCAAe,CAACC,WAAW;IACtC,OAAO,IAAIC,QAAQ,CAACC,SAASC;QAC3BL,GAAGM,GAAG,CAACP,OAAO,CAACQ,KAAKC;YAClB,IAAID,KAAK;gBACPF,OAAOE;YACT;YACAH,QAAQI;QACV;IACF;AACF"}
|
|
@@ -1,19 +1,28 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { QueryResult } from '../utils/transform-duckdb-result';
|
|
1
|
+
import { ColumnInfo, Connection, Database, TableData } from 'duckdb';
|
|
3
2
|
export declare class DuckDBManager {
|
|
4
3
|
private db;
|
|
5
4
|
private connection;
|
|
6
5
|
private initPromise;
|
|
7
|
-
constructor({
|
|
8
|
-
|
|
6
|
+
constructor({ onInitialize, }: {
|
|
7
|
+
onInitialize?: (db: Database) => Promise<void>;
|
|
9
8
|
});
|
|
10
9
|
/**
|
|
11
10
|
* Initialize the DuckDB instance
|
|
12
11
|
*/
|
|
13
12
|
private initialize;
|
|
14
|
-
|
|
13
|
+
/**
|
|
14
|
+
* Get a DuckDB connection instance.
|
|
15
|
+
*/
|
|
16
|
+
getConnection(): Promise<Connection | null>;
|
|
15
17
|
/**
|
|
16
18
|
* Execute a query on the DuckDB connection.
|
|
17
19
|
*/
|
|
18
|
-
query(query: string): Promise<
|
|
20
|
+
query(query: string): Promise<{
|
|
21
|
+
columns: ColumnInfo[];
|
|
22
|
+
data: TableData;
|
|
23
|
+
}>;
|
|
24
|
+
/**
|
|
25
|
+
* Close the DuckDB connection and cleanup resources.
|
|
26
|
+
*/
|
|
27
|
+
close(): Promise<void>;
|
|
19
28
|
}
|
|
@@ -6,20 +6,23 @@ Object.defineProperty(exports, "DuckDBManager", {
|
|
|
6
6
|
}
|
|
7
7
|
});
|
|
8
8
|
const _duckdbsingleton = require("../duckdb-singleton");
|
|
9
|
-
const
|
|
9
|
+
const _duckdbtypeconvertor = require("../utils/duckdb-type-convertor");
|
|
10
10
|
let DuckDBManager = class DuckDBManager {
|
|
11
11
|
/**
|
|
12
12
|
* Initialize the DuckDB instance
|
|
13
|
-
*/ async initialize({
|
|
14
|
-
this.db =
|
|
15
|
-
await (
|
|
13
|
+
*/ async initialize({ onInitialize }) {
|
|
14
|
+
this.db = _duckdbsingleton.DuckDBSingleton.getInstance();
|
|
15
|
+
await (onInitialize == null ? void 0 : onInitialize(this.db));
|
|
16
16
|
}
|
|
17
|
-
|
|
17
|
+
/**
|
|
18
|
+
* Get a DuckDB connection instance.
|
|
19
|
+
*/ async getConnection() {
|
|
20
|
+
// Ensure database is initialized before returning the connection
|
|
18
21
|
await this.initPromise;
|
|
19
22
|
if (!this.connection) {
|
|
20
23
|
var _this_db;
|
|
21
|
-
var
|
|
22
|
-
this.connection = (
|
|
24
|
+
var _this_db_connect;
|
|
25
|
+
this.connection = (_this_db_connect = (_this_db = this.db) == null ? void 0 : _this_db.connect()) != null ? _this_db_connect : null;
|
|
23
26
|
}
|
|
24
27
|
return this.connection;
|
|
25
28
|
}
|
|
@@ -27,16 +30,40 @@ let DuckDBManager = class DuckDBManager {
|
|
|
27
30
|
* Execute a query on the DuckDB connection.
|
|
28
31
|
*/ async query(query) {
|
|
29
32
|
const connection = await this.getConnection();
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
33
|
+
return new Promise((resolve, reject)=>{
|
|
34
|
+
connection == null ? void 0 : connection.prepare(query, (err, statement)=>{
|
|
35
|
+
if (err) {
|
|
36
|
+
reject(new Error(`Query preparation failed: ${err.message}`));
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const columns = statement.columns();
|
|
40
|
+
statement.all((err, data)=>{
|
|
41
|
+
if (err) {
|
|
42
|
+
reject(new Error(`Query execution failed: ${err.message}`));
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
const result = (0, _duckdbtypeconvertor.convertTableDataToJSON)(data, columns);
|
|
46
|
+
resolve({
|
|
47
|
+
columns,
|
|
48
|
+
data: result
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Close the DuckDB connection and cleanup resources.
|
|
56
|
+
*/ async close() {
|
|
57
|
+
if (this.connection) {
|
|
58
|
+
this.connection.close();
|
|
59
|
+
this.connection = null;
|
|
60
|
+
}
|
|
34
61
|
}
|
|
35
|
-
constructor({
|
|
62
|
+
constructor({ onInitialize }){
|
|
36
63
|
this.db = null;
|
|
37
64
|
this.connection = null;
|
|
38
65
|
this.initPromise = this.initialize({
|
|
39
|
-
|
|
66
|
+
onInitialize
|
|
40
67
|
});
|
|
41
68
|
}
|
|
42
69
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/duckdb-manager/duckdb-manager.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/duckdb-manager/duckdb-manager.ts"],"sourcesContent":["import { ColumnInfo, Connection, Database, TableData } from 'duckdb';\n\nimport { DuckDBSingleton } from '../duckdb-singleton';\nimport { convertTableDataToJSON } from '../utils/duckdb-type-convertor';\n\nexport class DuckDBManager {\n private db: Database | null = null;\n private connection: Connection | null = null;\n\n private initPromise: Promise<void>;\n\n constructor({\n onInitialize,\n }: {\n onInitialize?: (db: Database) => Promise<void>;\n }) {\n this.initPromise = this.initialize({ onInitialize });\n }\n\n /**\n * Initialize the DuckDB instance\n */\n private async initialize({\n onInitialize,\n }: {\n onInitialize?: (db: Database) => Promise<void>;\n }) {\n this.db = DuckDBSingleton.getInstance();\n\n await onInitialize?.(this.db);\n }\n\n /**\n * Get a DuckDB connection instance.\n */\n async getConnection() {\n // Ensure database is initialized before returning the connection\n await this.initPromise;\n\n if (!this.connection) {\n this.connection = this.db?.connect() ?? null;\n }\n\n return this.connection;\n }\n\n /**\n * Execute a query on the DuckDB connection.\n */\n async query(\n query: string\n ): Promise<{ columns: ColumnInfo[]; data: TableData }> {\n const connection = await this.getConnection();\n\n return new Promise((resolve, reject) => {\n connection?.prepare(query, (err, statement) => {\n if (err) {\n reject(new Error(`Query preparation failed: ${err.message}`));\n return;\n }\n\n const columns = statement.columns();\n\n statement.all((err, data) => {\n if (err) {\n reject(new Error(`Query execution failed: ${err.message}`));\n return;\n }\n\n const result = convertTableDataToJSON(data, columns);\n\n resolve({ columns, data: result });\n });\n });\n });\n }\n\n /**\n * Close the DuckDB connection and cleanup resources.\n */\n async close(): Promise<void> {\n if (this.connection) {\n this.connection.close();\n this.connection = null;\n }\n }\n}\n"],"names":["DuckDBManager","initialize","onInitialize","db","DuckDBSingleton","getInstance","getConnection","initPromise","connection","connect","query","Promise","resolve","reject","prepare","err","statement","Error","message","columns","all","data","result","convertTableDataToJSON","close","constructor"],"mappings":";+BAKaA;;;eAAAA;;;iCAHmB;qCACO;AAEhC,IAAA,AAAMA,gBAAN,MAAMA;IAcX;;GAEC,GACD,MAAcC,WAAW,EACvBC,YAAY,EAGb,EAAE;QACD,IAAI,CAACC,EAAE,GAAGC,gCAAe,CAACC,WAAW;QAErC,OAAMH,gCAAAA,aAAe,IAAI,CAACC,EAAE;IAC9B;IAEA;;GAEC,GACD,MAAMG,gBAAgB;QACpB,iEAAiE;QACjE,MAAM,IAAI,CAACC,WAAW;QAEtB,IAAI,CAAC,IAAI,CAACC,UAAU,EAAE;gBACF;gBAAA;YAAlB,IAAI,CAACA,UAAU,GAAG,CAAA,oBAAA,WAAA,IAAI,CAACL,EAAE,qBAAP,SAASM,OAAO,cAAhB,mBAAsB;QAC1C;QAEA,OAAO,IAAI,CAACD,UAAU;IACxB;IAEA;;GAEC,GACD,MAAME,MACJA,KAAa,EACwC;QACrD,MAAMF,aAAa,MAAM,IAAI,CAACF,aAAa;QAE3C,OAAO,IAAIK,QAAQ,CAACC,SAASC;YAC3BL,8BAAAA,WAAYM,OAAO,CAACJ,OAAO,CAACK,KAAKC;gBAC/B,IAAID,KAAK;oBACPF,OAAO,IAAII,MAAM,CAAC,0BAA0B,EAAEF,IAAIG,OAAO,CAAC,CAAC;oBAC3D;gBACF;gBAEA,MAAMC,UAAUH,UAAUG,OAAO;gBAEjCH,UAAUI,GAAG,CAAC,CAACL,KAAKM;oBAClB,IAAIN,KAAK;wBACPF,OAAO,IAAII,MAAM,CAAC,wBAAwB,EAAEF,IAAIG,OAAO,CAAC,CAAC;wBACzD;oBACF;oBAEA,MAAMI,SAASC,IAAAA,2CAAsB,EAACF,MAAMF;oBAE5CP,QAAQ;wBAAEO;wBAASE,MAAMC;oBAAO;gBAClC;YACF;QACF;IACF;IAEA;;GAEC,GACD,MAAME,QAAuB;QAC3B,IAAI,IAAI,CAAChB,UAAU,EAAE;YACnB,IAAI,CAACA,UAAU,CAACgB,KAAK;YACrB,IAAI,CAAChB,UAAU,GAAG;QACpB;IACF;IA1EAiB,YAAY,EACVvB,YAAY,EAGb,CAAE;aATKC,KAAsB;aACtBK,aAAgC;QAStC,IAAI,CAACD,WAAW,GAAG,IAAI,CAACN,UAAU,CAAC;YAAEC;QAAa;IACpD;AAqEF"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Database } from 'duckdb';
|
|
2
2
|
/**
|
|
3
3
|
* DuckDBSingleton is designed as a Singleton class, which ensures that only one Database connection exists across the entire application.
|
|
4
4
|
* This reduces the overhead involved in establishing new connections for each database request.
|
|
@@ -14,5 +14,5 @@ import { DuckDBInstance } from '@duckdb/node-api';
|
|
|
14
14
|
export declare class DuckDBSingleton {
|
|
15
15
|
private static instance;
|
|
16
16
|
private constructor();
|
|
17
|
-
static getInstance():
|
|
17
|
+
static getInstance(): Database;
|
|
18
18
|
}
|
package/src/duckdb-singleton.js
CHANGED
|
@@ -5,11 +5,11 @@ Object.defineProperty(exports, "DuckDBSingleton", {
|
|
|
5
5
|
return DuckDBSingleton;
|
|
6
6
|
}
|
|
7
7
|
});
|
|
8
|
-
const
|
|
8
|
+
const _duckdb = require("duckdb");
|
|
9
9
|
let DuckDBSingleton = class DuckDBSingleton {
|
|
10
|
-
static
|
|
10
|
+
static getInstance() {
|
|
11
11
|
if (!DuckDBSingleton.instance) {
|
|
12
|
-
DuckDBSingleton.instance =
|
|
12
|
+
DuckDBSingleton.instance = new _duckdb.Database(':memory:');
|
|
13
13
|
}
|
|
14
14
|
return DuckDBSingleton.instance;
|
|
15
15
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../meerkat-node/src/duckdb-singleton.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../../meerkat-node/src/duckdb-singleton.ts"],"sourcesContent":["import { Database } from 'duckdb';\n\n/**\n * DuckDBSingleton is designed as a Singleton class, which ensures that only one Database connection exists across the entire application.\n * This reduces the overhead involved in establishing new connections for each database request.\n *\n * The `getInstance` method returns a DuckDB Database instance. If an instance doesn't already exist, it creates one.\n * Subsequent calls to `getInstance` will return the pre-existing instance, ensuring there is only a single connection to the DuckDB instance throughout the use of the application.\n *\n * Usage: let duckDbConnection = DuckDBSingleton.getInstance();\n *\n * Note: In case of in-memory database, `new Database(':memory:')` in getInstance method. In-memory databases are faster for read/write operations\n * but are not persistent; they lose data as soon as the program ends or the machine is turned off, which is okay for our use-case.\n */\nexport class DuckDBSingleton {\n private static instance: Database;\n\n private constructor() {\n // private to prevent direct instantiation.\n }\n\n static getInstance(): Database {\n if (!DuckDBSingleton.instance) {\n DuckDBSingleton.instance = new Database(':memory:');\n }\n return DuckDBSingleton.instance;\n }\n}\n"],"names":["DuckDBSingleton","getInstance","instance","Database"],"mappings":";+BAcaA;;;eAAAA;;;wBAdY;AAclB,IAAA,AAAMA,kBAAN,MAAMA;IAOX,OAAOC,cAAwB;QAC7B,IAAI,CAACD,gBAAgBE,QAAQ,EAAE;YAC7BF,gBAAgBE,QAAQ,GAAG,IAAIC,gBAAQ,CAAC;QAC1C;QACA,OAAOH,gBAAgBE,QAAQ;IACjC;IATA,aAAsB;IACpB,2CAA2C;IAC7C;AAQF"}
|
|
@@ -9,16 +9,19 @@ const _extends = require("@swc/helpers/_/_extends");
|
|
|
9
9
|
const _axios = require("axios");
|
|
10
10
|
const _fs = require("fs");
|
|
11
11
|
const _path = require("path");
|
|
12
|
-
const
|
|
12
|
+
const _hashstring = require("../utils/hash-string");
|
|
13
13
|
let FileManager = class FileManager {
|
|
14
14
|
getPath(tableName, fileName) {
|
|
15
15
|
return fileName ? _path.join(this.baseDir, tableName, fileName) : _path.join(this.baseDir, tableName);
|
|
16
16
|
}
|
|
17
|
+
getHashedFileName(fileName) {
|
|
18
|
+
return (0, _hashstring.hashString)(fileName);
|
|
19
|
+
}
|
|
17
20
|
/**
|
|
18
21
|
* Write a file buffer to the file system.
|
|
19
22
|
*/ async writeFileBuffer(file) {
|
|
20
23
|
// Hash the file name to avoid file name length issues
|
|
21
|
-
const hashedFileName =
|
|
24
|
+
const hashedFileName = this.getHashedFileName(file.fileName);
|
|
22
25
|
const filePath = this.getPath(file.tableName, hashedFileName);
|
|
23
26
|
await _fs.promises.mkdir(_path.dirname(filePath), {
|
|
24
27
|
recursive: true
|
|
@@ -40,7 +43,7 @@ let FileManager = class FileManager {
|
|
|
40
43
|
*/ async deleteTableFiles(tableName, files) {
|
|
41
44
|
await Promise.all(files.map(async (file)=>{
|
|
42
45
|
try {
|
|
43
|
-
await _fs.promises.unlink(this.getPath(tableName, file));
|
|
46
|
+
await _fs.promises.unlink(this.getPath(tableName, this.getHashedFileName(file)));
|
|
44
47
|
} catch (err) {
|
|
45
48
|
console.error(err);
|
|
46
49
|
}
|
|
@@ -56,7 +59,7 @@ let FileManager = class FileManager {
|
|
|
56
59
|
responseType: 'stream',
|
|
57
60
|
url
|
|
58
61
|
});
|
|
59
|
-
const hashedFileName = (0,
|
|
62
|
+
const hashedFileName = (0, _hashstring.hashString)(fileName);
|
|
60
63
|
const filePath = this.getPath(tableName, hashedFileName);
|
|
61
64
|
await _fs.promises.mkdir(_path.dirname(filePath), {
|
|
62
65
|
recursive: true
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/file-manager/file-manager.ts"],"sourcesContent":["import axios from 'axios';\nimport { createWriteStream, promises as fs } from 'fs';\nimport * as path from 'path';\n\nimport {
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/file-manager/file-manager.ts"],"sourcesContent":["import axios from 'axios';\nimport { createWriteStream, promises as fs } from 'fs';\nimport * as path from 'path';\n\nimport { hashString } from '../utils/hash-string';\n\nexport class FileManager {\n private readonly baseDir: string;\n\n constructor(config: { baseDir?: string }) {\n this.baseDir = config.baseDir ?? './data';\n fs.mkdir(this.baseDir, { recursive: true }).catch(console.error);\n }\n\n public getPath(tableName: string, fileName?: string): string {\n return fileName\n ? path.join(this.baseDir, tableName, fileName)\n : path.join(this.baseDir, tableName);\n }\n\n private getHashedFileName(fileName: string): string {\n return hashString(fileName);\n }\n\n /**\n * Write a file buffer to the file system.\n */\n async writeFileBuffer(file: {\n tableName: string;\n fileName: string;\n buffer: Uint8Array;\n }): Promise<void> {\n // Hash the file name to avoid file name length issues\n const hashedFileName = this.getHashedFileName(file.fileName);\n\n const filePath = this.getPath(file.tableName, hashedFileName);\n\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n\n await fs.writeFile(filePath, file.buffer);\n }\n\n /**\n * Get the file paths for a table.\n */\n async getTableFilePaths(tableName: string): Promise<string[]> {\n try {\n const files = await fs.readdir(this.getPath(tableName));\n\n return files.map((file) => this.getPath(tableName, file));\n } catch {\n return [];\n }\n }\n\n /**\n * Delete files from a table.\n */\n async deleteTableFiles(tableName: string, files: string[]): Promise<void> {\n await Promise.all(\n files.map(async (file) => {\n try {\n await fs.unlink(\n this.getPath(tableName, this.getHashedFileName(file))\n );\n } catch (err) {\n console.error(err);\n }\n })\n );\n }\n\n /**\n * Stream and register a file from a URL.\n */\n async streamAndRegisterFile({\n tableName,\n url,\n headers,\n fileName,\n }: {\n tableName: string;\n url: string;\n headers: Record<string, string>;\n fileName: string;\n }): Promise<void> {\n try {\n const response = await axios({\n headers: {\n ...headers,\n },\n method: 'get',\n responseType: 'stream',\n url,\n });\n\n const hashedFileName = hashString(fileName);\n\n const filePath = this.getPath(tableName, hashedFileName);\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n\n const writer = createWriteStream(filePath);\n\n return new Promise((resolve, reject) => {\n response.data.pipe(writer);\n writer.on('finish', () => {\n writer.close();\n resolve();\n });\n writer.on('error', (err) => {\n writer.close();\n reject(err);\n });\n });\n } catch (error) {\n console.error('Error streaming file:', error);\n throw error;\n }\n }\n}\n"],"names":["FileManager","getPath","tableName","fileName","path","join","baseDir","getHashedFileName","hashString","writeFileBuffer","file","hashedFileName","filePath","fs","mkdir","dirname","recursive","writeFile","buffer","getTableFilePaths","files","readdir","map","deleteTableFiles","Promise","all","unlink","err","console","error","streamAndRegisterFile","url","headers","response","axios","method","responseType","writer","createWriteStream","resolve","reject","data","pipe","on","close","constructor","config","catch"],"mappings":";+BAMaA;;;eAAAA;;;;uBANK;oBACgC;sBAC5B;4BAEK;AAEpB,IAAA,AAAMA,cAAN,MAAMA;IAQJC,QAAQC,SAAiB,EAAEC,QAAiB,EAAU;QAC3D,OAAOA,WACHC,MAAKC,IAAI,CAAC,IAAI,CAACC,OAAO,EAAEJ,WAAWC,YACnCC,MAAKC,IAAI,CAAC,IAAI,CAACC,OAAO,EAAEJ;IAC9B;IAEQK,kBAAkBJ,QAAgB,EAAU;QAClD,OAAOK,IAAAA,sBAAU,EAACL;IACpB;IAEA;;GAEC,GACD,MAAMM,gBAAgBC,IAIrB,EAAiB;QAChB,sDAAsD;QACtD,MAAMC,iBAAiB,IAAI,CAACJ,iBAAiB,CAACG,KAAKP,QAAQ;QAE3D,MAAMS,WAAW,IAAI,CAACX,OAAO,CAACS,KAAKR,SAAS,EAAES;QAE9C,MAAME,YAAE,CAACC,KAAK,CAACV,MAAKW,OAAO,CAACH,WAAW;YAAEI,WAAW;QAAK;QAEzD,MAAMH,YAAE,CAACI,SAAS,CAACL,UAAUF,KAAKQ,MAAM;IAC1C;IAEA;;GAEC,GACD,MAAMC,kBAAkBjB,SAAiB,EAAqB;QAC5D,IAAI;YACF,MAAMkB,QAAQ,MAAMP,YAAE,CAACQ,OAAO,CAAC,IAAI,CAACpB,OAAO,CAACC;YAE5C,OAAOkB,MAAME,GAAG,CAAC,CAACZ,OAAS,IAAI,CAACT,OAAO,CAACC,WAAWQ;QACrD,EAAE,UAAM;YACN,OAAO,EAAE;QACX;IACF;IAEA;;GAEC,GACD,MAAMa,iBAAiBrB,SAAiB,EAAEkB,KAAe,EAAiB;QACxE,MAAMI,QAAQC,GAAG,CACfL,MAAME,GAAG,CAAC,OAAOZ;YACf,IAAI;gBACF,MAAMG,YAAE,CAACa,MAAM,CACb,IAAI,CAACzB,OAAO,CAACC,WAAW,IAAI,CAACK,iBAAiB,CAACG;YAEnD,EAAE,OAAOiB,KAAK;gBACZC,QAAQC,KAAK,CAACF;YAChB;QACF;IAEJ;IAEA;;GAEC,GACD,MAAMG,sBAAsB,EAC1B5B,SAAS,EACT6B,GAAG,EACHC,OAAO,EACP7B,QAAQ,EAMT,EAAiB;QAChB,IAAI;YACF,MAAM8B,WAAW,MAAMC,IAAAA,cAAK,EAAC;gBAC3BF,SAAS,eACJA;gBAELG,QAAQ;gBACRC,cAAc;gBACdL;YACF;YAEA,MAAMpB,iBAAiBH,IAAAA,sBAAU,EAACL;YAElC,MAAMS,WAAW,IAAI,CAACX,OAAO,CAACC,WAAWS;YACzC,MAAME,YAAE,CAACC,KAAK,CAACV,MAAKW,OAAO,CAACH,WAAW;gBAAEI,WAAW;YAAK;YAEzD,MAAMqB,SAASC,IAAAA,qBAAiB,EAAC1B;YAEjC,OAAO,IAAIY,QAAQ,CAACe,SAASC;gBAC3BP,SAASQ,IAAI,CAACC,IAAI,CAACL;gBACnBA,OAAOM,EAAE,CAAC,UAAU;oBAClBN,OAAOO,KAAK;oBACZL;gBACF;gBACAF,OAAOM,EAAE,CAAC,SAAS,CAAChB;oBAClBU,OAAOO,KAAK;oBACZJ,OAAOb;gBACT;YACF;QACF,EAAE,OAAOE,OAAO;YACdD,QAAQC,KAAK,CAAC,yBAAyBA;YACvC,MAAMA;QACR;IACF;IA7GAgB,YAAYC,MAA4B,CAAE;YACzBA;QAAf,IAAI,CAACxC,OAAO,GAAGwC,CAAAA,kBAAAA,OAAOxC,OAAO,YAAdwC,kBAAkB;QACjCjC,YAAE,CAACC,KAAK,CAAC,IAAI,CAACR,OAAO,EAAE;YAAEU,WAAW;QAAK,GAAG+B,KAAK,CAACnB,QAAQC,KAAK;IACjE;AA2GF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../meerkat-node/src/node-sql-to-serialization.ts"],"sourcesContent":["import { SelectStatement } from '@devrev/meerkat-core';\nimport { duckdbExec } from './duckdb-exec';\n\nexport interface ParsedSerialization {\n [key: string]: {\n error: boolean;\n statements: SelectStatement[];\n };\n}\n\nexport const nodeSQLToSerialization = async (\n sql: string\n): Promise<ParsedSerialization> => {\n const queryOutput = await duckdbExec(sql);\n\n const parsedOutput: ParsedSerialization = {};\n\n for (const key in queryOutput[0]) {\n if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {\n parsedOutput[key] = JSON.parse(queryOutput[0][key] as string) as {\n error: boolean;\n statements: SelectStatement[];\n };\n }\n }\n return parsedOutput;\n};\n"],"names":["nodeSQLToSerialization","sql","queryOutput","duckdbExec","parsedOutput","key","Object","prototype","hasOwnProperty","call","JSON","parse"],"mappings":";+BAUaA;;;eAAAA;;;4BATc;AASpB,MAAMA,yBAAyB,OACpCC;IAEA,MAAMC,cAAc,MAAMC,IAAAA,sBAAU,
|
|
1
|
+
{"version":3,"sources":["../../../meerkat-node/src/node-sql-to-serialization.ts"],"sourcesContent":["import { SelectStatement } from '@devrev/meerkat-core';\nimport { duckdbExec } from './duckdb-exec';\n\nexport interface ParsedSerialization {\n [key: string]: {\n error: boolean;\n statements: SelectStatement[];\n };\n}\n\nexport const nodeSQLToSerialization = async (\n sql: string\n): Promise<ParsedSerialization> => {\n const queryOutput = await duckdbExec<\n {\n [key: string]: string;\n }[]\n >(sql);\n\n const parsedOutput: ParsedSerialization = {};\n\n for (const key in queryOutput[0]) {\n if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {\n parsedOutput[key] = JSON.parse(queryOutput[0][key] as string) as {\n error: boolean;\n statements: SelectStatement[];\n };\n }\n }\n return parsedOutput;\n};\n"],"names":["nodeSQLToSerialization","sql","queryOutput","duckdbExec","parsedOutput","key","Object","prototype","hasOwnProperty","call","JSON","parse"],"mappings":";+BAUaA;;;eAAAA;;;4BATc;AASpB,MAAMA,yBAAyB,OACpCC;IAEA,MAAMC,cAAc,MAAMC,IAAAA,sBAAU,EAIlCF;IAEF,MAAMG,eAAoC,CAAC;IAE3C,IAAK,MAAMC,OAAOH,WAAW,CAAC,EAAE,CAAE;QAChC,IAAII,OAAOC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,WAAW,CAAC,EAAE,EAAEG,MAAM;YAC7DD,YAAY,CAACC,IAAI,GAAGK,KAAKC,KAAK,CAACT,WAAW,CAAC,EAAE,CAACG,IAAI;QAIpD;IACF;IACA,OAAOD;AACT"}
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare const convertDuckDBValueToJS: (field:
|
|
3
|
-
export declare const
|
|
4
|
-
name: string;
|
|
5
|
-
type: DuckDBType;
|
|
6
|
-
}[]) => Record<string, unknown>[];
|
|
1
|
+
import { ColumnInfo, TableData, TypeInfo } from 'duckdb';
|
|
2
|
+
export declare const convertDuckDBValueToJS: (field: TypeInfo, value: unknown) => unknown;
|
|
3
|
+
export declare const convertTableDataToJSON: (data: TableData, columns: ColumnInfo[]) => Record<string, unknown>[];
|
|
@@ -9,47 +9,47 @@ _export(exports, {
|
|
|
9
9
|
convertDuckDBValueToJS: function() {
|
|
10
10
|
return convertDuckDBValueToJS;
|
|
11
11
|
},
|
|
12
|
-
|
|
13
|
-
return
|
|
12
|
+
convertTableDataToJSON: function() {
|
|
13
|
+
return convertTableDataToJSON;
|
|
14
14
|
}
|
|
15
15
|
});
|
|
16
|
-
const _nodeapi = require("@duckdb/node-api");
|
|
17
16
|
const _lodash = require("lodash");
|
|
18
17
|
const convertDuckDBValueToJS = (field, value)=>{
|
|
19
|
-
if ((0, _lodash.isNil)(value)) return
|
|
20
|
-
switch(field.
|
|
21
|
-
case
|
|
18
|
+
if ((0, _lodash.isNil)(value)) return null;
|
|
19
|
+
switch(field.id){
|
|
20
|
+
case 'SQL_NULL':
|
|
22
21
|
return null;
|
|
23
|
-
case
|
|
24
|
-
case
|
|
25
|
-
case
|
|
22
|
+
case 'DATE':
|
|
23
|
+
case 'TIME':
|
|
24
|
+
case 'TIMESTAMP':
|
|
26
25
|
return new Date(value).toISOString();
|
|
27
|
-
case
|
|
28
|
-
case
|
|
26
|
+
case 'FLOAT':
|
|
27
|
+
case 'DOUBLE':
|
|
29
28
|
return value;
|
|
30
|
-
case
|
|
31
|
-
case
|
|
32
|
-
case
|
|
33
|
-
case
|
|
34
|
-
case
|
|
35
|
-
case
|
|
36
|
-
case
|
|
37
|
-
case
|
|
38
|
-
|
|
39
|
-
case
|
|
29
|
+
case 'INTEGER':
|
|
30
|
+
case 'TINYINT':
|
|
31
|
+
case 'SMALLINT':
|
|
32
|
+
case 'BIGINT':
|
|
33
|
+
case 'UTINYINT':
|
|
34
|
+
case 'USMALLINT':
|
|
35
|
+
case 'UINTEGER':
|
|
36
|
+
case 'UBIGINT':
|
|
37
|
+
case 'HUGEINT':
|
|
38
|
+
case 'UHUGEINT':
|
|
39
|
+
return parseInt(value.toString(), 10);
|
|
40
|
+
case 'DECIMAL':
|
|
40
41
|
return parseFloat(value.toString());
|
|
41
|
-
case
|
|
42
|
+
case 'LIST':
|
|
42
43
|
{
|
|
43
44
|
if (!value) return [];
|
|
44
45
|
const listValue = value;
|
|
45
|
-
return listValue.
|
|
46
|
+
return listValue.map((item)=>convertDuckDBValueToJS(field.child, item));
|
|
46
47
|
}
|
|
47
48
|
default:
|
|
48
49
|
return value;
|
|
49
50
|
}
|
|
50
51
|
};
|
|
51
|
-
const
|
|
52
|
-
console.log(data);
|
|
52
|
+
const convertTableDataToJSON = (data, columns)=>{
|
|
53
53
|
return data.map((row)=>{
|
|
54
54
|
return columns.reduce((acc, column)=>{
|
|
55
55
|
acc[column.name] = convertDuckDBValueToJS(column.type, row[column.name]);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/utils/duckdb-type-convertor.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/utils/duckdb-type-convertor.ts"],"sourcesContent":["import { ColumnInfo, ListTypeInfo, TableData, TypeInfo } from 'duckdb';\nimport { isNil } from 'lodash';\n\nexport const convertDuckDBValueToJS = (\n field: TypeInfo,\n value: unknown\n): unknown => {\n if (isNil(value)) return null;\n\n switch (field.id) {\n case 'SQL_NULL':\n return null;\n case 'DATE':\n case 'TIME':\n case 'TIMESTAMP':\n return new Date(value as string).toISOString();\n case 'FLOAT':\n case 'DOUBLE':\n return value;\n case 'INTEGER':\n case 'TINYINT':\n case 'SMALLINT':\n case 'BIGINT':\n case 'UTINYINT':\n case 'USMALLINT':\n case 'UINTEGER':\n case 'UBIGINT':\n case 'HUGEINT':\n case 'UHUGEINT':\n return parseInt((value as object).toString(), 10);\n case 'DECIMAL':\n return parseFloat((value as object).toString());\n case 'LIST': {\n if (!value) return [];\n const listValue = value as [];\n return listValue.map((item) =>\n convertDuckDBValueToJS((field as ListTypeInfo).child, item)\n );\n }\n default:\n return value;\n }\n};\n\nexport const convertTableDataToJSON = (\n data: TableData,\n columns: ColumnInfo[]\n): Record<string, unknown>[] => {\n return data.map((row: Record<string, unknown>) => {\n return columns.reduce((acc, column) => {\n acc[column.name] = convertDuckDBValueToJS(column.type, row[column.name]);\n return acc;\n }, {} as Record<string, unknown>);\n });\n};\n"],"names":["convertDuckDBValueToJS","convertTableDataToJSON","field","value","isNil","id","Date","toISOString","parseInt","toString","parseFloat","listValue","map","item","child","data","columns","row","reduce","acc","column","name","type"],"mappings":";;;;;;;;IAGaA,sBAAsB;eAAtBA;;IAyCAC,sBAAsB;eAAtBA;;;wBA3CS;AAEf,MAAMD,yBAAyB,CACpCE,OACAC;IAEA,IAAIC,IAAAA,aAAK,EAACD,QAAQ,OAAO;IAEzB,OAAQD,MAAMG,EAAE;QACd,KAAK;YACH,OAAO;QACT,KAAK;QACL,KAAK;QACL,KAAK;YACH,OAAO,IAAIC,KAAKH,OAAiBI,WAAW;QAC9C,KAAK;QACL,KAAK;YACH,OAAOJ;QACT,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;YACH,OAAOK,SAAS,AAACL,MAAiBM,QAAQ,IAAI;QAChD,KAAK;YACH,OAAOC,WAAW,AAACP,MAAiBM,QAAQ;QAC9C,KAAK;YAAQ;gBACX,IAAI,CAACN,OAAO,OAAO,EAAE;gBACrB,MAAMQ,YAAYR;gBAClB,OAAOQ,UAAUC,GAAG,CAAC,CAACC,OACpBb,uBAAuB,AAACE,MAAuBY,KAAK,EAAED;YAE1D;QACA;YACE,OAAOV;IACX;AACF;AAEO,MAAMF,yBAAyB,CACpCc,MACAC;IAEA,OAAOD,KAAKH,GAAG,CAAC,CAACK;QACf,OAAOD,QAAQE,MAAM,CAAC,CAACC,KAAKC;YAC1BD,GAAG,CAACC,OAAOC,IAAI,CAAC,GAAGrB,uBAAuBoB,OAAOE,IAAI,EAAEL,GAAG,CAACG,OAAOC,IAAI,CAAC;YACvE,OAAOF;QACT,GAAG,CAAC;IACN;AACF"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "hashString", {
|
|
3
|
+
enumerable: true,
|
|
4
|
+
get: function() {
|
|
5
|
+
return hashString;
|
|
6
|
+
}
|
|
7
|
+
});
|
|
8
|
+
const _crypto = require("crypto");
|
|
9
|
+
const hashString = (text)=>{
|
|
10
|
+
return _crypto.createHash('md5').update(text).digest('hex');
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
//# sourceMappingURL=hash-string.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/utils/hash-string.ts"],"sourcesContent":["import * as crypto from 'crypto';\n\n/**\n * Hash a string using MD5.\n */\nexport const hashString = (text: string): string => {\n return crypto.createHash('md5').update(text).digest('hex');\n};\n"],"names":["hashString","text","crypto","createHash","update","digest"],"mappings":";+BAKaA;;;eAAAA;;;wBALW;AAKjB,MAAMA,aAAa,CAACC;IACzB,OAAOC,QAAOC,UAAU,CAAC,OAAOC,MAAM,CAACH,MAAMI,MAAM,CAAC;AACtD"}
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "convertRowsToRecords", {
|
|
3
|
-
enumerable: true,
|
|
4
|
-
get: function() {
|
|
5
|
-
return convertRowsToRecords;
|
|
6
|
-
}
|
|
7
|
-
});
|
|
8
|
-
const convertRowsToRecords = (rows, columnNames)=>{
|
|
9
|
-
return rows.map((row)=>{
|
|
10
|
-
return columnNames.reduce((obj, columnName, index)=>{
|
|
11
|
-
obj[columnName] = row[index];
|
|
12
|
-
return obj;
|
|
13
|
-
}, {});
|
|
14
|
-
});
|
|
15
|
-
};
|
|
16
|
-
|
|
17
|
-
//# sourceMappingURL=convert-rows-to-records.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/utils/convert-rows-to-records.ts"],"sourcesContent":["import { DuckDBValue } from '@duckdb/node-api';\n\nexport const convertRowsToRecords = (\n rows: DuckDBValue[][],\n columnNames: string[]\n) => {\n return rows.map((row) => {\n return columnNames.reduce((obj, columnName, index) => {\n obj[columnName] = row[index];\n return obj;\n }, {} as Record<string, DuckDBValue>);\n });\n};\n"],"names":["convertRowsToRecords","rows","columnNames","map","row","reduce","obj","columnName","index"],"mappings":";+BAEaA;;;eAAAA;;;AAAN,MAAMA,uBAAuB,CAClCC,MACAC;IAEA,OAAOD,KAAKE,GAAG,CAAC,CAACC;QACf,OAAOF,YAAYG,MAAM,CAAC,CAACC,KAAKC,YAAYC;YAC1CF,GAAG,CAACC,WAAW,GAAGH,GAAG,CAACI,MAAM;YAC5B,OAAOF;QACT,GAAG,CAAC;IACN;AACF"}
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
function _export(target, all) {
|
|
3
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
4
|
-
enumerable: true,
|
|
5
|
-
get: all[name]
|
|
6
|
-
});
|
|
7
|
-
}
|
|
8
|
-
_export(exports, {
|
|
9
|
-
encryptString: function() {
|
|
10
|
-
return encryptString;
|
|
11
|
-
},
|
|
12
|
-
decryptString: function() {
|
|
13
|
-
return decryptString;
|
|
14
|
-
}
|
|
15
|
-
});
|
|
16
|
-
const _crypto = require("crypto");
|
|
17
|
-
const ENCRYPTION_KEY = _crypto.randomBytes(32);
|
|
18
|
-
const ALGORITHM = 'aes-256-cbc';
|
|
19
|
-
const encryptString = (text)=>{
|
|
20
|
-
const iv = _crypto.randomBytes(16);
|
|
21
|
-
const cipher = _crypto.createCipheriv(ALGORITHM, ENCRYPTION_KEY, iv);
|
|
22
|
-
const encrypted = Buffer.concat([
|
|
23
|
-
cipher.update(text),
|
|
24
|
-
cipher.final()
|
|
25
|
-
]);
|
|
26
|
-
return iv.toString('hex') + encrypted.toString('hex');
|
|
27
|
-
};
|
|
28
|
-
const decryptString = (encryptedText)=>{
|
|
29
|
-
// First 32 chars are IV (16 bytes in hex)
|
|
30
|
-
const iv = encryptedText.slice(0, 32);
|
|
31
|
-
const encrypted = Buffer.from(encryptedText.slice(32), 'hex');
|
|
32
|
-
const decipher = _crypto.createDecipheriv(ALGORITHM, ENCRYPTION_KEY, iv);
|
|
33
|
-
return Buffer.concat([
|
|
34
|
-
decipher.update(encrypted),
|
|
35
|
-
decipher.final()
|
|
36
|
-
]).toString();
|
|
37
|
-
};
|
|
38
|
-
|
|
39
|
-
//# sourceMappingURL=encrypt-string.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/utils/encrypt-string.ts"],"sourcesContent":["import * as crypto from 'crypto';\n\nconst ENCRYPTION_KEY = crypto.randomBytes(32);\nconst ALGORITHM = 'aes-256-cbc';\n\n/**\n * Encrypts a given string\n */\nexport const encryptString = (text: string): string => {\n const iv = crypto.randomBytes(16);\n\n const cipher = crypto.createCipheriv(ALGORITHM, ENCRYPTION_KEY, iv);\n\n const encrypted = Buffer.concat([cipher.update(text), cipher.final()]);\n\n return iv.toString('hex') + encrypted.toString('hex');\n};\n\n/**\n * Decrypts an encrypted string\n */\nexport const decryptString = (encryptedText: string): string => {\n // First 32 chars are IV (16 bytes in hex)\n const iv = encryptedText.slice(0, 32);\n\n const encrypted = Buffer.from(encryptedText.slice(32), 'hex');\n\n const decipher = crypto.createDecipheriv(ALGORITHM, ENCRYPTION_KEY, iv);\n\n return Buffer.concat([\n decipher.update(encrypted),\n decipher.final(),\n ]).toString();\n};\n"],"names":["encryptString","decryptString","ENCRYPTION_KEY","crypto","randomBytes","ALGORITHM","text","iv","cipher","createCipheriv","encrypted","Buffer","concat","update","final","toString","encryptedText","slice","from","decipher","createDecipheriv"],"mappings":";;;;;;;;IAQaA,aAAa;eAAbA;;IAaAC,aAAa;eAAbA;;;wBArBW;AAExB,MAAMC,iBAAiBC,QAAOC,WAAW,CAAC;AAC1C,MAAMC,YAAY;AAKX,MAAML,gBAAgB,CAACM;IAC5B,MAAMC,KAAKJ,QAAOC,WAAW,CAAC;IAE9B,MAAMI,SAASL,QAAOM,cAAc,CAACJ,WAAWH,gBAAgBK;IAEhE,MAAMG,YAAYC,OAAOC,MAAM,CAAC;QAACJ,OAAOK,MAAM,CAACP;QAAOE,OAAOM,KAAK;KAAG;IAErE,OAAOP,GAAGQ,QAAQ,CAAC,SAASL,UAAUK,QAAQ,CAAC;AACjD;AAKO,MAAMd,gBAAgB,CAACe;IAC5B,0CAA0C;IAC1C,MAAMT,KAAKS,cAAcC,KAAK,CAAC,GAAG;IAElC,MAAMP,YAAYC,OAAOO,IAAI,CAACF,cAAcC,KAAK,CAAC,KAAK;IAEvD,MAAME,WAAWhB,QAAOiB,gBAAgB,CAACf,WAAWH,gBAAgBK;IAEpE,OAAOI,OAAOC,MAAM,CAAC;QACnBO,SAASN,MAAM,CAACH;QAChBS,SAASL,KAAK;KACf,EAAEC,QAAQ;AACb"}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import { DuckDBResult, DuckDBType } from '@duckdb/node-api';
|
|
2
|
-
export interface ColumnMetadata {
|
|
3
|
-
name: string;
|
|
4
|
-
type: DuckDBType;
|
|
5
|
-
}
|
|
6
|
-
export interface QueryResult {
|
|
7
|
-
data: Record<string, unknown>[];
|
|
8
|
-
schema: ColumnMetadata[];
|
|
9
|
-
}
|
|
10
|
-
/**
|
|
11
|
-
* Converts raw DuckDB query results into a structured format with named objects
|
|
12
|
-
*/
|
|
13
|
-
export declare const transformDuckDBQueryResult: (result: DuckDBResult) => Promise<QueryResult>;
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "transformDuckDBQueryResult", {
|
|
3
|
-
enumerable: true,
|
|
4
|
-
get: function() {
|
|
5
|
-
return transformDuckDBQueryResult;
|
|
6
|
-
}
|
|
7
|
-
});
|
|
8
|
-
const _convertrowstorecords = require("./convert-rows-to-records");
|
|
9
|
-
const _duckdbtypeconvertor = require("./duckdb-type-convertor");
|
|
10
|
-
const transformDuckDBQueryResult = async (result)=>{
|
|
11
|
-
const columnNames = result.columnNames();
|
|
12
|
-
const columnTypes = result.columnTypes();
|
|
13
|
-
const columns = columnNames.map((name, index)=>({
|
|
14
|
-
name,
|
|
15
|
-
type: columnTypes[index]
|
|
16
|
-
}));
|
|
17
|
-
const rows = await result.getRows();
|
|
18
|
-
const records = (0, _convertrowstorecords.convertRowsToRecords)(rows, result.columnNames());
|
|
19
|
-
const data = (0, _duckdbtypeconvertor.convertRecordDuckDBValueToJSON)(records, columns);
|
|
20
|
-
return {
|
|
21
|
-
data,
|
|
22
|
-
schema: columns
|
|
23
|
-
};
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
//# sourceMappingURL=transform-duckdb-result.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/utils/transform-duckdb-result.ts"],"sourcesContent":["import { DuckDBResult, DuckDBType } from '@duckdb/node-api';\nimport { convertRowsToRecords } from './convert-rows-to-records';\nimport { convertRecordDuckDBValueToJSON } from './duckdb-type-convertor';\n\nexport interface ColumnMetadata {\n name: string;\n type: DuckDBType;\n}\n\nexport interface QueryResult {\n data: Record<string, unknown>[];\n schema: ColumnMetadata[];\n}\n\n/**\n * Converts raw DuckDB query results into a structured format with named objects\n */\nexport const transformDuckDBQueryResult = async (\n result: DuckDBResult\n): Promise<QueryResult> => {\n const columnNames = result.columnNames();\n const columnTypes = result.columnTypes();\n\n const columns = columnNames.map((name, index) => ({\n name,\n type: columnTypes[index],\n }));\n\n const rows = await result.getRows();\n\n const records = convertRowsToRecords(rows, result.columnNames());\n\n const data = convertRecordDuckDBValueToJSON(records, columns);\n\n return { data, schema: columns };\n};\n"],"names":["transformDuckDBQueryResult","result","columnNames","columnTypes","columns","map","name","index","type","rows","getRows","records","convertRowsToRecords","data","convertRecordDuckDBValueToJSON","schema"],"mappings":";+BAiBaA;;;eAAAA;;;sCAhBwB;qCACU;AAexC,MAAMA,6BAA6B,OACxCC;IAEA,MAAMC,cAAcD,OAAOC,WAAW;IACtC,MAAMC,cAAcF,OAAOE,WAAW;IAEtC,MAAMC,UAAUF,YAAYG,GAAG,CAAC,CAACC,MAAMC,QAAW,CAAA;YAChDD;YACAE,MAAML,WAAW,CAACI,MAAM;QAC1B,CAAA;IAEA,MAAME,OAAO,MAAMR,OAAOS,OAAO;IAEjC,MAAMC,UAAUC,IAAAA,0CAAoB,EAACH,MAAMR,OAAOC,WAAW;IAE7D,MAAMW,OAAOC,IAAAA,mDAA8B,EAACH,SAASP;IAErD,OAAO;QAAES;QAAME,QAAQX;IAAQ;AACjC"}
|