@devrev/meerkat-node 0.0.88 → 0.0.89

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,10 +1,12 @@
1
1
  {
2
2
  "name": "@devrev/meerkat-node",
3
- "version": "0.0.88",
3
+ "version": "0.0.89",
4
4
  "dependencies": {
5
5
  "@swc/helpers": "~0.5.0",
6
6
  "@devrev/meerkat-core": "*",
7
- "duckdb": "^0.10.2"
7
+ "@duckdb/node-api": "1.1.3-alpha.7",
8
+ "axios": "^1.6.0",
9
+ "lodash": "^4.17.21"
8
10
  },
9
11
  "scripts": {
10
12
  "release": "semantic-release"
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../meerkat-node/src/cube-to-sql/cube-to-sql.ts"],"sourcesContent":["import {\n BASE_TABLE_NAME,\n ContextParams,\n Query,\n TableSchema,\n applyFilterParamsToBaseSQL,\n applyProjectionToSQLQuery,\n astDeserializerQuery,\n cubeToDuckdbAST,\n deserializeQuery,\n detectApplyContextParamsToBaseSQL,\n getCombinedTableSchema,\n getFilterParamsSQL,\n getFinalBaseSQL\n} from '@devrev/meerkat-core';\nimport { duckdbExec } from '../duckdb-exec';\n\n\n\ninterface CubeQueryToSQLParams {\n query: Query,\n tableSchemas: TableSchema[],\n contextParams?: ContextParams\n}\n\nexport const cubeQueryToSQL = async ({\n query,\n tableSchemas,\n contextParams,\n}: CubeQueryToSQLParams) => {\n const updatedTableSchemas: TableSchema[] = await Promise.all(\n tableSchemas.map(async (schema: TableSchema) => {\n const baseFilterParamsSQL = await getFinalBaseSQL({query, tableSchema: schema, getQueryOutput: duckdbExec });\n return {\n ...schema,\n sql: baseFilterParamsSQL,\n };\n })\n );\n\n const updatedTableSchema = await getCombinedTableSchema(\n updatedTableSchemas,\n query\n );\n\n const ast = cubeToDuckdbAST(query, updatedTableSchema);\n if (!ast) {\n throw new Error('Could not generate AST');\n }\n\n const queryTemp = astDeserializerQuery(ast);\n\n const queryOutput = await duckdbExec<\n {\n [key: string]: string;\n }[]\n >(queryTemp);\n const preBaseQuery = deserializeQuery(queryOutput);\n\n const filterParamsSQL = await getFilterParamsSQL({\n query,\n tableSchema: updatedTableSchema,\n getQueryOutput: duckdbExec,\n });\n\n const filterParamQuery = applyFilterParamsToBaseSQL(\n updatedTableSchema.sql,\n filterParamsSQL\n );\n\n /**\n * Replace CONTEXT_PARAMS with context params\n */\n const baseQuery = detectApplyContextParamsToBaseSQL(\n filterParamQuery,\n contextParams || {}\n );\n\n /**\n * Replace BASE_TABLE_NAME with cube query\n */\n const replaceBaseTableName = preBaseQuery.replace(\n BASE_TABLE_NAME,\n `(${baseQuery}) AS ${updatedTableSchema.name}`\n );\n\n /**\n * Add measures to the query\n */\n const measures = query.measures;\n const dimensions = query.dimensions || [];\n const finalQuery = applyProjectionToSQLQuery(\n dimensions,\n measures,\n updatedTableSchema,\n replaceBaseTableName\n );\n\n return finalQuery;\n};\n"],"names":["cubeQueryToSQL","query","tableSchemas","contextParams","updatedTableSchemas","Promise","all","map","schema","baseFilterParamsSQL","getFinalBaseSQL","tableSchema","getQueryOutput","duckdbExec","sql","updatedTableSchema","getCombinedTableSchema","ast","cubeToDuckdbAST","Error","queryTemp","astDeserializerQuery","queryOutput","preBaseQuery","deserializeQuery","filterParamsSQL","getFilterParamsSQL","filterParamQuery","applyFilterParamsToBaseSQL","baseQuery","detectApplyContextParamsToBaseSQL","replaceBaseTableName","replace","BASE_TABLE_NAME","name","measures","dimensions","finalQuery","applyProjectionToSQLQuery"],"mappings":";+BAyBaA;;;eAAAA;;;;6BAXN;4BACoB;AAUpB,MAAMA,iBAAiB,OAAO,EACnCC,KAAK,EACLC,YAAY,EACZC,aAAa,EACQ;IACrB,MAAMC,sBAAqC,MAAMC,QAAQC,GAAG,CAC1DJ,aAAaK,GAAG,CAAC,OAAOC;QACtB,MAAMC,sBAAsB,MAAMC,IAAAA,4BAAe,EAAC;YAACT;YAAOU,aAAaH;YAAQI,gBAAgBC,sBAAU;QAAC;QAC1G,OAAO,eACFL;YACHM,KAAKL;;IAET;IAGF,MAAMM,qBAAqB,MAAMC,IAAAA,mCAAsB,EACrDZ,qBACAH;IAGF,MAAMgB,MAAMC,IAAAA,4BAAe,EAACjB,OAAOc;IACnC,IAAI,CAACE,KAAK;QACR,MAAM,IAAIE,MAAM;IAClB;IAEA,MAAMC,YAAYC,IAAAA,iCAAoB,EAACJ;IAEvC,MAAMK,cAAc,MAAMT,IAAAA,sBAAU,EAIlCO;IACF,MAAMG,eAAeC,IAAAA,6BAAgB,EAACF;IAEtC,MAAMG,kBAAkB,MAAMC,IAAAA,+BAAkB,EAAC;QAC/CzB;QACAU,aAAaI;QACbH,gBAAgBC,sBAAU;IAC5B;IAEA,MAAMc,mBAAmBC,IAAAA,uCAA0B,EACjDb,mBAAmBD,GAAG,EACtBW;IAGF;;GAEC,GACD,MAAMI,YAAYC,IAAAA,8CAAiC,EACjDH,kBACAxB,iBAAiB,CAAC;IAGpB;;GAEC,GACD,MAAM4B,uBAAuBR,aAAaS,OAAO,CAC/CC,4BAAe,EACf,CAAC,CAAC,EAAEJ,UAAU,KAAK,EAAEd,mBAAmBmB,IAAI,CAAC,CAAC;IAGhD;;GAEC,GACD,MAAMC,WAAWlC,MAAMkC,QAAQ;IAC/B,MAAMC,aAAanC,MAAMmC,UAAU,IAAI,EAAE;IACzC,MAAMC,aAAaC,IAAAA,sCAAyB,EAC1CF,YACAD,UACApB,oBACAgB;IAGF,OAAOM;AACT"}
1
+ {"version":3,"sources":["../../../../meerkat-node/src/cube-to-sql/cube-to-sql.ts"],"sourcesContent":["import {\n BASE_TABLE_NAME,\n ContextParams,\n Query,\n TableSchema,\n applyFilterParamsToBaseSQL,\n applyProjectionToSQLQuery,\n astDeserializerQuery,\n cubeToDuckdbAST,\n deserializeQuery,\n detectApplyContextParamsToBaseSQL,\n getCombinedTableSchema,\n getFilterParamsSQL,\n getFinalBaseSQL,\n} from '@devrev/meerkat-core';\nimport { duckdbExec } from '../duckdb-exec';\n\ninterface CubeQueryToSQLParams {\n query: Query;\n tableSchemas: TableSchema[];\n contextParams?: ContextParams;\n}\n\nexport const cubeQueryToSQL = async ({\n query,\n tableSchemas,\n contextParams,\n}: CubeQueryToSQLParams) => {\n const updatedTableSchemas: TableSchema[] = await Promise.all(\n tableSchemas.map(async (schema: TableSchema) => {\n const baseFilterParamsSQL = await getFinalBaseSQL({\n query,\n tableSchema: schema,\n getQueryOutput: duckdbExec,\n });\n return {\n ...schema,\n sql: baseFilterParamsSQL,\n };\n })\n );\n\n const updatedTableSchema = await getCombinedTableSchema(\n updatedTableSchemas,\n query\n );\n\n const ast = cubeToDuckdbAST(query, updatedTableSchema);\n if (!ast) {\n throw new Error('Could not generate AST');\n }\n\n const queryTemp = astDeserializerQuery(ast);\n\n const queryOutput = (await duckdbExec(queryTemp)) as Record<string, string>[];\n const preBaseQuery = deserializeQuery(queryOutput);\n\n const filterParamsSQL = await getFilterParamsSQL({\n query,\n tableSchema: updatedTableSchema,\n getQueryOutput: duckdbExec,\n });\n\n const filterParamQuery = applyFilterParamsToBaseSQL(\n updatedTableSchema.sql,\n filterParamsSQL\n );\n\n /**\n * Replace CONTEXT_PARAMS with context params\n */\n const baseQuery = detectApplyContextParamsToBaseSQL(\n filterParamQuery,\n contextParams || {}\n );\n\n /**\n * Replace BASE_TABLE_NAME with cube query\n */\n const replaceBaseTableName = preBaseQuery.replace(\n BASE_TABLE_NAME,\n `(${baseQuery}) AS ${updatedTableSchema.name}`\n );\n\n /**\n * Add measures to the query\n */\n const measures = query.measures;\n const dimensions = query.dimensions || [];\n const finalQuery = applyProjectionToSQLQuery(\n dimensions,\n measures,\n updatedTableSchema,\n replaceBaseTableName\n );\n\n return finalQuery;\n};\n"],"names":["cubeQueryToSQL","query","tableSchemas","contextParams","updatedTableSchemas","Promise","all","map","schema","baseFilterParamsSQL","getFinalBaseSQL","tableSchema","getQueryOutput","duckdbExec","sql","updatedTableSchema","getCombinedTableSchema","ast","cubeToDuckdbAST","Error","queryTemp","astDeserializerQuery","queryOutput","preBaseQuery","deserializeQuery","filterParamsSQL","getFilterParamsSQL","filterParamQuery","applyFilterParamsToBaseSQL","baseQuery","detectApplyContextParamsToBaseSQL","replaceBaseTableName","replace","BASE_TABLE_NAME","name","measures","dimensions","finalQuery","applyProjectionToSQLQuery"],"mappings":";+BAuBaA;;;eAAAA;;;;6BATN;4BACoB;AAQpB,MAAMA,iBAAiB,OAAO,EACnCC,KAAK,EACLC,YAAY,EACZC,aAAa,EACQ;IACrB,MAAMC,sBAAqC,MAAMC,QAAQC,GAAG,CAC1DJ,aAAaK,GAAG,CAAC,OAAOC;QACtB,MAAMC,sBAAsB,MAAMC,IAAAA,4BAAe,EAAC;YAChDT;YACAU,aAAaH;YACbI,gBAAgBC,sBAAU;QAC5B;QACA,OAAO,eACFL;YACHM,KAAKL;;IAET;IAGF,MAAMM,qBAAqB,MAAMC,IAAAA,mCAAsB,EACrDZ,qBACAH;IAGF,MAAMgB,MAAMC,IAAAA,4BAAe,EAACjB,OAAOc;IACnC,IAAI,CAACE,KAAK;QACR,MAAM,IAAIE,MAAM;IAClB;IAEA,MAAMC,YAAYC,IAAAA,iCAAoB,EAACJ;IAEvC,MAAMK,cAAe,MAAMT,IAAAA,sBAAU,EAACO;IACtC,MAAMG,eAAeC,IAAAA,6BAAgB,EAACF;IAEtC,MAAMG,kBAAkB,MAAMC,IAAAA,+BAAkB,EAAC;QAC/CzB;QACAU,aAAaI;QACbH,gBAAgBC,sBAAU;IAC5B;IAEA,MAAMc,mBAAmBC,IAAAA,uCAA0B,EACjDb,mBAAmBD,GAAG,EACtBW;IAGF;;GAEC,GACD,MAAMI,YAAYC,IAAAA,8CAAiC,EACjDH,kBACAxB,iBAAiB,CAAC;IAGpB;;GAEC,GACD,MAAM4B,uBAAuBR,aAAaS,OAAO,CAC/CC,4BAAe,EACf,CAAC,CAAC,EAAEJ,UAAU,KAAK,EAAEd,mBAAmBmB,IAAI,CAAC,CAAC;IAGhD;;GAEC,GACD,MAAMC,WAAWlC,MAAMkC,QAAQ;IAC/B,MAAMC,aAAanC,MAAMmC,UAAU,IAAI,EAAE;IACzC,MAAMC,aAAaC,IAAAA,sCAAyB,EAC1CF,YACAD,UACApB,oBACAgB;IAGF,OAAOM;AACT"}
@@ -1 +1 @@
1
- export declare const duckdbExec: <T = unknown>(query: string) => Promise<T>;
1
+ export declare const duckdbExec: (query: string) => Promise<Record<string, unknown>[]>;
@@ -6,16 +6,13 @@ Object.defineProperty(exports, "duckdbExec", {
6
6
  }
7
7
  });
8
8
  const _duckdbsingleton = require("./duckdb-singleton");
9
- const duckdbExec = (query)=>{
10
- const db = _duckdbsingleton.DuckDBSingleton.getInstance();
11
- return new Promise((resolve, reject)=>{
12
- db.all(query, (err, res)=>{
13
- if (err) {
14
- reject(err);
15
- }
16
- resolve(res);
17
- });
18
- });
9
+ const _transformduckdbresult = require("./utils/transform-duckdb-result");
10
+ const duckdbExec = async (query)=>{
11
+ const db = await _duckdbsingleton.DuckDBSingleton.getInstance();
12
+ const connection = await db.connect();
13
+ const result = await connection.run(query);
14
+ const { data } = await (0, _transformduckdbresult.transformDuckDBQueryResult)(result);
15
+ return data;
19
16
  };
20
17
 
21
18
  //# sourceMappingURL=duckdb-exec.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../meerkat-node/src/duckdb-exec.ts"],"sourcesContent":["import { DuckDBSingleton } from './duckdb-singleton';\n\nexport const duckdbExec = <T = unknown>(query: string): Promise<T> => {\n const db = DuckDBSingleton.getInstance();\n return new Promise((resolve, reject) => {\n db.all(query, (err, res) => {\n if (err) {\n reject(err);\n }\n resolve(res as T);\n });\n });\n};\n"],"names":["duckdbExec","query","db","DuckDBSingleton","getInstance","Promise","resolve","reject","all","err","res"],"mappings":";+BAEaA;;;eAAAA;;;iCAFmB;AAEzB,MAAMA,aAAa,CAAcC;IACtC,MAAMC,KAAKC,gCAAe,CAACC,WAAW;IACtC,OAAO,IAAIC,QAAQ,CAACC,SAASC;QAC3BL,GAAGM,GAAG,CAACP,OAAO,CAACQ,KAAKC;YAClB,IAAID,KAAK;gBACPF,OAAOE;YACT;YACAH,QAAQI;QACV;IACF;AACF"}
1
+ {"version":3,"sources":["../../../meerkat-node/src/duckdb-exec.ts"],"sourcesContent":["import { DuckDBSingleton } from './duckdb-singleton';\nimport { transformDuckDBQueryResult } from './utils/transform-duckdb-result';\n\nexport const duckdbExec = async (\n query: string\n): Promise<Record<string, unknown>[]> => {\n const db = await DuckDBSingleton.getInstance();\n const connection = await db.connect();\n\n const result = await connection.run(query);\n\n const { data } = await transformDuckDBQueryResult(result);\n\n return data;\n};\n"],"names":["duckdbExec","query","db","DuckDBSingleton","getInstance","connection","connect","result","run","data","transformDuckDBQueryResult"],"mappings":";+BAGaA;;;eAAAA;;;iCAHmB;uCACW;AAEpC,MAAMA,aAAa,OACxBC;IAEA,MAAMC,KAAK,MAAMC,gCAAe,CAACC,WAAW;IAC5C,MAAMC,aAAa,MAAMH,GAAGI,OAAO;IAEnC,MAAMC,SAAS,MAAMF,WAAWG,GAAG,CAACP;IAEpC,MAAM,EAAEQ,IAAI,EAAE,GAAG,MAAMC,IAAAA,iDAA0B,EAACH;IAElD,OAAOE;AACT"}
@@ -0,0 +1,19 @@
1
+ import { DuckDBInstance } from '@duckdb/node-api';
2
+ import { QueryResult } from '../utils/transform-duckdb-result';
3
+ export declare class DuckDBManager {
4
+ private db;
5
+ private connection;
6
+ private initPromise;
7
+ constructor({ initializeDatabase, }: {
8
+ initializeDatabase?: (db: DuckDBInstance) => Promise<void>;
9
+ });
10
+ /**
11
+ * Initialize the DuckDB instance
12
+ */
13
+ private initialize;
14
+ private getConnection;
15
+ /**
16
+ * Execute a query on the DuckDB connection.
17
+ */
18
+ query(query: string): Promise<QueryResult>;
19
+ }
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "DuckDBManager", {
3
+ enumerable: true,
4
+ get: function() {
5
+ return DuckDBManager;
6
+ }
7
+ });
8
+ const _duckdbsingleton = require("../duckdb-singleton");
9
+ const _transformduckdbresult = require("../utils/transform-duckdb-result");
10
+ let DuckDBManager = class DuckDBManager {
11
+ /**
12
+ * Initialize the DuckDB instance
13
+ */ async initialize({ initializeDatabase }) {
14
+ this.db = await _duckdbsingleton.DuckDBSingleton.getInstance();
15
+ await (initializeDatabase == null ? void 0 : initializeDatabase(this.db));
16
+ }
17
+ async getConnection() {
18
+ await this.initPromise;
19
+ if (!this.connection) {
20
+ var _this_db;
21
+ var _ref;
22
+ this.connection = (_ref = await ((_this_db = this.db) == null ? void 0 : _this_db.connect())) != null ? _ref : null;
23
+ }
24
+ return this.connection;
25
+ }
26
+ /**
27
+ * Execute a query on the DuckDB connection.
28
+ */ async query(query) {
29
+ const connection = await this.getConnection();
30
+ if (!connection) throw new Error('DuckDB connection not initialized');
31
+ const result = await connection.run(query);
32
+ const data = await (0, _transformduckdbresult.transformDuckDBQueryResult)(result);
33
+ return data;
34
+ }
35
+ constructor({ initializeDatabase }){
36
+ this.db = null;
37
+ this.connection = null;
38
+ this.initPromise = this.initialize({
39
+ initializeDatabase
40
+ });
41
+ }
42
+ };
43
+
44
+ //# sourceMappingURL=duckdb-manager.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../meerkat-node/src/duckdb-manager/duckdb-manager.ts"],"sourcesContent":["import { DuckDBConnection, DuckDBInstance } from '@duckdb/node-api';\nimport { DuckDBSingleton } from '../duckdb-singleton';\nimport {\n QueryResult,\n transformDuckDBQueryResult,\n} from '../utils/transform-duckdb-result';\n\nexport class DuckDBManager {\n private db: DuckDBInstance | null = null;\n private connection: DuckDBConnection | null = null;\n\n private initPromise: Promise<void>;\n\n constructor({\n initializeDatabase,\n }: {\n initializeDatabase?: (db: DuckDBInstance) => Promise<void>;\n }) {\n this.initPromise = this.initialize({ initializeDatabase });\n }\n\n /**\n * Initialize the DuckDB instance\n */\n private async initialize({\n initializeDatabase,\n }: {\n initializeDatabase?: (db: DuckDBInstance) => Promise<void>;\n }): Promise<void> {\n this.db = await DuckDBSingleton.getInstance();\n\n await initializeDatabase?.(this.db);\n }\n\n private async getConnection(): Promise<DuckDBConnection | null> {\n await this.initPromise;\n\n if (!this.connection) {\n this.connection = (await this.db?.connect()) ?? null;\n }\n\n return this.connection;\n }\n\n /**\n * Execute a query on the DuckDB connection.\n */\n async query(query: string): Promise<QueryResult> {\n const connection = await this.getConnection();\n\n if (!connection) throw new Error('DuckDB connection not initialized');\n\n const result = await connection.run(query);\n\n const data = await transformDuckDBQueryResult(result);\n\n return data;\n }\n}\n"],"names":["DuckDBManager","initialize","initializeDatabase","db","DuckDBSingleton","getInstance","getConnection","initPromise","connection","connect","query","Error","result","run","data","transformDuckDBQueryResult","constructor"],"mappings":";+BAOaA;;;eAAAA;;;iCANmB;uCAIzB;AAEA,IAAA,AAAMA,gBAAN,MAAMA;IAcX;;GAEC,GACD,MAAcC,WAAW,EACvBC,kBAAkB,EAGnB,EAAiB;QAChB,IAAI,CAACC,EAAE,GAAG,MAAMC,gCAAe,CAACC,WAAW;QAE3C,OAAMH,sCAAAA,mBAAqB,IAAI,CAACC,EAAE;IACpC;IAEA,MAAcG,gBAAkD;QAC9D,MAAM,IAAI,CAACC,WAAW;QAEtB,IAAI,CAAC,IAAI,CAACC,UAAU,EAAE;gBACK;gBAAN;YAAnB,IAAI,CAACA,UAAU,GAAG,CAAC,OAAA,QAAM,WAAA,IAAI,CAACL,EAAE,qBAAP,SAASM,OAAO,eAAtB,OAA6B;QAClD;QAEA,OAAO,IAAI,CAACD,UAAU;IACxB;IAEA;;GAEC,GACD,MAAME,MAAMA,KAAa,EAAwB;QAC/C,MAAMF,aAAa,MAAM,IAAI,CAACF,aAAa;QAE3C,IAAI,CAACE,YAAY,MAAM,IAAIG,MAAM;QAEjC,MAAMC,SAAS,MAAMJ,WAAWK,GAAG,CAACH;QAEpC,MAAMI,OAAO,MAAMC,IAAAA,iDAA0B,EAACH;QAE9C,OAAOE;IACT;IA5CAE,YAAY,EACVd,kBAAkB,EAGnB,CAAE;aATKC,KAA4B;aAC5BK,aAAsC;QAS5C,IAAI,CAACD,WAAW,GAAG,IAAI,CAACN,UAAU,CAAC;YAAEC;QAAmB;IAC1D;AAuCF"}
@@ -1,4 +1,4 @@
1
- import { Database } from 'duckdb';
1
+ import { DuckDBInstance } from '@duckdb/node-api';
2
2
  /**
3
3
  * DuckDBSingleton is designed as a Singleton class, which ensures that only one Database connection exists across the entire application.
4
4
  * This reduces the overhead involved in establishing new connections for each database request.
@@ -14,5 +14,5 @@ import { Database } from 'duckdb';
14
14
  export declare class DuckDBSingleton {
15
15
  private static instance;
16
16
  private constructor();
17
- static getInstance(): Database;
17
+ static getInstance(): Promise<DuckDBInstance>;
18
18
  }
@@ -5,11 +5,11 @@ Object.defineProperty(exports, "DuckDBSingleton", {
5
5
  return DuckDBSingleton;
6
6
  }
7
7
  });
8
- const _duckdb = require("duckdb");
8
+ const _nodeapi = require("@duckdb/node-api");
9
9
  let DuckDBSingleton = class DuckDBSingleton {
10
- static getInstance() {
10
+ static async getInstance() {
11
11
  if (!DuckDBSingleton.instance) {
12
- DuckDBSingleton.instance = new _duckdb.Database(':memory:');
12
+ DuckDBSingleton.instance = await _nodeapi.DuckDBInstance.create(':memory:');
13
13
  }
14
14
  return DuckDBSingleton.instance;
15
15
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../meerkat-node/src/duckdb-singleton.ts"],"sourcesContent":["import { Database } from 'duckdb';\n\n/**\n * DuckDBSingleton is designed as a Singleton class, which ensures that only one Database connection exists across the entire application.\n * This reduces the overhead involved in establishing new connections for each database request.\n *\n * The `getInstance` method returns a DuckDB Database instance. If an instance doesn't already exist, it creates one.\n * Subsequent calls to `getInstance` will return the pre-existing instance, ensuring there is only a single connection to the DuckDB instance throughout the use of the application.\n *\n * Usage: let duckDbConnection = DuckDBSingleton.getInstance();\n *\n * Note: In case of in-memory database, `new Database(':memory:')` in getInstance method. In-memory databases are faster for read/write operations\n * but are not persistent; they lose data as soon as the program ends or the machine is turned off, which is okay for our use-case.\n */\nexport class DuckDBSingleton {\n private static instance: Database;\n\n private constructor() {\n // private to prevent direct instantiation.\n }\n\n static getInstance(): Database {\n if (!DuckDBSingleton.instance) {\n DuckDBSingleton.instance = new Database(':memory:');\n }\n return DuckDBSingleton.instance;\n }\n}\n"],"names":["DuckDBSingleton","getInstance","instance","Database"],"mappings":";+BAcaA;;;eAAAA;;;wBAdY;AAclB,IAAA,AAAMA,kBAAN,MAAMA;IAOX,OAAOC,cAAwB;QAC7B,IAAI,CAACD,gBAAgBE,QAAQ,EAAE;YAC7BF,gBAAgBE,QAAQ,GAAG,IAAIC,gBAAQ,CAAC;QAC1C;QACA,OAAOH,gBAAgBE,QAAQ;IACjC;IATA,aAAsB;IACpB,2CAA2C;IAC7C;AAQF"}
1
+ {"version":3,"sources":["../../../meerkat-node/src/duckdb-singleton.ts"],"sourcesContent":["import { DuckDBInstance } from '@duckdb/node-api';\n\n/**\n * DuckDBSingleton is designed as a Singleton class, which ensures that only one Database connection exists across the entire application.\n * This reduces the overhead involved in establishing new connections for each database request.\n *\n * The `getInstance` method returns a DuckDB Database instance. If an instance doesn't already exist, it creates one.\n * Subsequent calls to `getInstance` will return the pre-existing instance, ensuring there is only a single connection to the DuckDB instance throughout the use of the application.\n *\n * Usage: let duckDbConnection = DuckDBSingleton.getInstance();\n *\n * Note: In case of in-memory database, `new Database(':memory:')` in getInstance method. In-memory databases are faster for read/write operations\n * but are not persistent; they lose data as soon as the program ends or the machine is turned off, which is okay for our use-case.\n */\nexport class DuckDBSingleton {\n private static instance: DuckDBInstance;\n\n private constructor() {\n // private to prevent direct instantiation.\n }\n\n static async getInstance(): Promise<DuckDBInstance> {\n if (!DuckDBSingleton.instance) {\n DuckDBSingleton.instance = await DuckDBInstance.create(':memory:');\n }\n return DuckDBSingleton.instance;\n }\n}\n"],"names":["DuckDBSingleton","getInstance","instance","DuckDBInstance","create"],"mappings":";+BAcaA;;;eAAAA;;;yBAdkB;AAcxB,IAAA,AAAMA,kBAAN,MAAMA;IAOX,aAAaC,cAAuC;QAClD,IAAI,CAACD,gBAAgBE,QAAQ,EAAE;YAC7BF,gBAAgBE,QAAQ,GAAG,MAAMC,uBAAc,CAACC,MAAM,CAAC;QACzD;QACA,OAAOJ,gBAAgBE,QAAQ;IACjC;IATA,aAAsB;IACpB,2CAA2C;IAC7C;AAQF"}
@@ -0,0 +1,32 @@
1
+ export declare class FileManager {
2
+ private readonly baseDir;
3
+ constructor(config: {
4
+ baseDir?: string;
5
+ });
6
+ getPath(tableName: string, fileName?: string): string;
7
+ /**
8
+ * Write a file buffer to the file system.
9
+ */
10
+ writeFileBuffer(file: {
11
+ tableName: string;
12
+ fileName: string;
13
+ buffer: Uint8Array;
14
+ }): Promise<void>;
15
+ /**
16
+ * Get the file paths for a table.
17
+ */
18
+ getTableFilePaths(tableName: string): Promise<string[]>;
19
+ /**
20
+ * Delete files from a table.
21
+ */
22
+ deleteTableFiles(tableName: string, files: string[]): Promise<void>;
23
+ /**
24
+ * Stream and register a file from a URL.
25
+ */
26
+ streamAndRegisterFile({ tableName, url, headers, fileName, }: {
27
+ tableName: string;
28
+ url: string;
29
+ headers: Record<string, string>;
30
+ fileName: string;
31
+ }): Promise<void>;
32
+ }
@@ -0,0 +1,90 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "FileManager", {
3
+ enumerable: true,
4
+ get: function() {
5
+ return FileManager;
6
+ }
7
+ });
8
+ const _extends = require("@swc/helpers/_/_extends");
9
+ const _axios = require("axios");
10
+ const _fs = require("fs");
11
+ const _path = require("path");
12
+ const _encryptstring = require("../utils/encrypt-string");
13
+ let FileManager = class FileManager {
14
+ getPath(tableName, fileName) {
15
+ return fileName ? _path.join(this.baseDir, tableName, fileName) : _path.join(this.baseDir, tableName);
16
+ }
17
+ /**
18
+ * Write a file buffer to the file system.
19
+ */ async writeFileBuffer(file) {
20
+ // Hash the file name to avoid file name length issues
21
+ const hashedFileName = (0, _encryptstring.encryptString)(file.fileName);
22
+ const filePath = this.getPath(file.tableName, hashedFileName);
23
+ await _fs.promises.mkdir(_path.dirname(filePath), {
24
+ recursive: true
25
+ });
26
+ await _fs.promises.writeFile(filePath, file.buffer);
27
+ }
28
+ /**
29
+ * Get the file paths for a table.
30
+ */ async getTableFilePaths(tableName) {
31
+ try {
32
+ const files = await _fs.promises.readdir(this.getPath(tableName));
33
+ return files.map((file)=>this.getPath(tableName, file));
34
+ } catch (e) {
35
+ return [];
36
+ }
37
+ }
38
+ /**
39
+ * Delete files from a table.
40
+ */ async deleteTableFiles(tableName, files) {
41
+ await Promise.all(files.map(async (file)=>{
42
+ try {
43
+ await _fs.promises.unlink(this.getPath(tableName, file));
44
+ } catch (err) {
45
+ console.error(err);
46
+ }
47
+ }));
48
+ }
49
+ /**
50
+ * Stream and register a file from a URL.
51
+ */ async streamAndRegisterFile({ tableName, url, headers, fileName }) {
52
+ try {
53
+ const response = await (0, _axios.default)({
54
+ headers: _extends._({}, headers),
55
+ method: 'get',
56
+ responseType: 'stream',
57
+ url
58
+ });
59
+ const hashedFileName = (0, _encryptstring.encryptString)(fileName);
60
+ const filePath = this.getPath(tableName, hashedFileName);
61
+ await _fs.promises.mkdir(_path.dirname(filePath), {
62
+ recursive: true
63
+ });
64
+ const writer = (0, _fs.createWriteStream)(filePath);
65
+ return new Promise((resolve, reject)=>{
66
+ response.data.pipe(writer);
67
+ writer.on('finish', ()=>{
68
+ writer.close();
69
+ resolve();
70
+ });
71
+ writer.on('error', (err)=>{
72
+ writer.close();
73
+ reject(err);
74
+ });
75
+ });
76
+ } catch (error) {
77
+ console.error('Error streaming file:', error);
78
+ throw error;
79
+ }
80
+ }
81
+ constructor(config){
82
+ var _config_baseDir;
83
+ this.baseDir = (_config_baseDir = config.baseDir) != null ? _config_baseDir : './data';
84
+ _fs.promises.mkdir(this.baseDir, {
85
+ recursive: true
86
+ }).catch(console.error);
87
+ }
88
+ };
89
+
90
+ //# sourceMappingURL=file-manager.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../meerkat-node/src/file-manager/file-manager.ts"],"sourcesContent":["import axios from 'axios';\nimport { createWriteStream, promises as fs } from 'fs';\nimport * as path from 'path';\n\nimport { encryptString } from '../utils/encrypt-string';\n\nexport class FileManager {\n private readonly baseDir: string;\n\n constructor(config: { baseDir?: string }) {\n this.baseDir = config.baseDir ?? './data';\n fs.mkdir(this.baseDir, { recursive: true }).catch(console.error);\n }\n\n public getPath(tableName: string, fileName?: string): string {\n return fileName\n ? path.join(this.baseDir, tableName, fileName)\n : path.join(this.baseDir, tableName);\n }\n\n /**\n * Write a file buffer to the file system.\n */\n async writeFileBuffer(file: {\n tableName: string;\n fileName: string;\n buffer: Uint8Array;\n }): Promise<void> {\n // Hash the file name to avoid file name length issues\n const hashedFileName = encryptString(file.fileName);\n\n const filePath = this.getPath(file.tableName, hashedFileName);\n\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n\n await fs.writeFile(filePath, file.buffer);\n }\n\n /**\n * Get the file paths for a table.\n */\n async getTableFilePaths(tableName: string): Promise<string[]> {\n try {\n const files = await fs.readdir(this.getPath(tableName));\n\n return files.map((file) => this.getPath(tableName, file));\n } catch {\n return [];\n }\n }\n\n /**\n * Delete files from a table.\n */\n async deleteTableFiles(tableName: string, files: string[]): Promise<void> {\n await Promise.all(\n files.map(async (file) => {\n try {\n await fs.unlink(this.getPath(tableName, file));\n } catch (err) {\n console.error(err);\n }\n })\n );\n }\n\n /**\n * Stream and register a file from a URL.\n */\n async streamAndRegisterFile({\n tableName,\n url,\n headers,\n fileName,\n }: {\n tableName: string;\n url: string;\n headers: Record<string, string>;\n fileName: string;\n }): Promise<void> {\n try {\n const response = await axios({\n headers: {\n ...headers,\n },\n method: 'get',\n responseType: 'stream',\n url,\n });\n\n const hashedFileName = encryptString(fileName);\n\n const filePath = this.getPath(tableName, hashedFileName);\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n\n const writer = createWriteStream(filePath);\n\n return new Promise((resolve, reject) => {\n response.data.pipe(writer);\n writer.on('finish', () => {\n writer.close();\n resolve();\n });\n writer.on('error', (err) => {\n writer.close();\n reject(err);\n });\n });\n } catch (error) {\n console.error('Error streaming file:', error);\n throw error;\n }\n }\n}\n"],"names":["FileManager","getPath","tableName","fileName","path","join","baseDir","writeFileBuffer","file","hashedFileName","encryptString","filePath","fs","mkdir","dirname","recursive","writeFile","buffer","getTableFilePaths","files","readdir","map","deleteTableFiles","Promise","all","unlink","err","console","error","streamAndRegisterFile","url","headers","response","axios","method","responseType","writer","createWriteStream","resolve","reject","data","pipe","on","close","constructor","config","catch"],"mappings":";+BAMaA;;;eAAAA;;;;uBANK;oBACgC;sBAC5B;+BAEQ;AAEvB,IAAA,AAAMA,cAAN,MAAMA;IAQJC,QAAQC,SAAiB,EAAEC,QAAiB,EAAU;QAC3D,OAAOA,WACHC,MAAKC,IAAI,CAAC,IAAI,CAACC,OAAO,EAAEJ,WAAWC,YACnCC,MAAKC,IAAI,CAAC,IAAI,CAACC,OAAO,EAAEJ;IAC9B;IAEA;;GAEC,GACD,MAAMK,gBAAgBC,IAIrB,EAAiB;QAChB,sDAAsD;QACtD,MAAMC,iBAAiBC,IAAAA,4BAAa,EAACF,KAAKL,QAAQ;QAElD,MAAMQ,WAAW,IAAI,CAACV,OAAO,CAACO,KAAKN,SAAS,EAAEO;QAE9C,MAAMG,YAAE,CAACC,KAAK,CAACT,MAAKU,OAAO,CAACH,WAAW;YAAEI,WAAW;QAAK;QAEzD,MAAMH,YAAE,CAACI,SAAS,CAACL,UAAUH,KAAKS,MAAM;IAC1C;IAEA;;GAEC,GACD,MAAMC,kBAAkBhB,SAAiB,EAAqB;QAC5D,IAAI;YACF,MAAMiB,QAAQ,MAAMP,YAAE,CAACQ,OAAO,CAAC,IAAI,CAACnB,OAAO,CAACC;YAE5C,OAAOiB,MAAME,GAAG,CAAC,CAACb,OAAS,IAAI,CAACP,OAAO,CAACC,WAAWM;QACrD,EAAE,UAAM;YACN,OAAO,EAAE;QACX;IACF;IAEA;;GAEC,GACD,MAAMc,iBAAiBpB,SAAiB,EAAEiB,KAAe,EAAiB;QACxE,MAAMI,QAAQC,GAAG,CACfL,MAAME,GAAG,CAAC,OAAOb;YACf,IAAI;gBACF,MAAMI,YAAE,CAACa,MAAM,CAAC,IAAI,CAACxB,OAAO,CAACC,WAAWM;YAC1C,EAAE,OAAOkB,KAAK;gBACZC,QAAQC,KAAK,CAACF;YAChB;QACF;IAEJ;IAEA;;GAEC,GACD,MAAMG,sBAAsB,EAC1B3B,SAAS,EACT4B,GAAG,EACHC,OAAO,EACP5B,QAAQ,EAMT,EAAiB;QAChB,IAAI;YACF,MAAM6B,WAAW,MAAMC,IAAAA,cAAK,EAAC;gBAC3BF,SAAS,eACJA;gBAELG,QAAQ;gBACRC,cAAc;gBACdL;YACF;YAEA,MAAMrB,iBAAiBC,IAAAA,4BAAa,EAACP;YAErC,MAAMQ,WAAW,IAAI,CAACV,OAAO,CAACC,WAAWO;YACzC,MAAMG,YAAE,CAACC,KAAK,CAACT,MAAKU,OAAO,CAACH,WAAW;gBAAEI,WAAW;YAAK;YAEzD,MAAMqB,SAASC,IAAAA,qBAAiB,EAAC1B;YAEjC,OAAO,IAAIY,QAAQ,CAACe,SAASC;gBAC3BP,SAASQ,IAAI,CAACC,IAAI,CAACL;gBACnBA,OAAOM,EAAE,CAAC,UAAU;oBAClBN,OAAOO,KAAK;oBACZL;gBACF;gBACAF,OAAOM,EAAE,CAAC,SAAS,CAAChB;oBAClBU,OAAOO,KAAK;oBACZJ,OAAOb;gBACT;YACF;QACF,EAAE,OAAOE,OAAO;YACdD,QAAQC,KAAK,CAAC,yBAAyBA;YACvC,MAAMA;QACR;IACF;IAvGAgB,YAAYC,MAA4B,CAAE;YACzBA;QAAf,IAAI,CAACvC,OAAO,GAAGuC,CAAAA,kBAAAA,OAAOvC,OAAO,YAAduC,kBAAkB;QACjCjC,YAAE,CAACC,KAAK,CAAC,IAAI,CAACP,OAAO,EAAE;YAAES,WAAW;QAAK,GAAG+B,KAAK,CAACnB,QAAQC,KAAK;IACjE;AAqGF"}
package/src/index.d.ts CHANGED
@@ -3,3 +3,5 @@ export * from './duckdb-singleton';
3
3
  export * from './node-sql-to-serialization';
4
4
  export { convertCubeStringToTableSchema };
5
5
  import { convertCubeStringToTableSchema } from '@devrev/meerkat-core';
6
+ export * from './duckdb-manager/duckdb-manager';
7
+ export * from './file-manager/file-manager';
package/src/index.js CHANGED
@@ -10,5 +10,7 @@ _export_star._(require("./cube-to-sql/cube-to-sql"), exports);
10
10
  _export_star._(require("./duckdb-singleton"), exports);
11
11
  _export_star._(require("./node-sql-to-serialization"), exports);
12
12
  const _meerkatcore = require("@devrev/meerkat-core");
13
+ _export_star._(require("./duckdb-manager/duckdb-manager"), exports);
14
+ _export_star._(require("./file-manager/file-manager"), exports);
13
15
 
14
16
  //# sourceMappingURL=index.js.map
package/src/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../meerkat-node/src/index.ts"],"sourcesContent":["export * from './cube-to-sql/cube-to-sql';\nexport * from './duckdb-singleton';\nexport * from './node-sql-to-serialization';\nexport { convertCubeStringToTableSchema };\nimport { convertCubeStringToTableSchema } from '@devrev/meerkat-core';\n"],"names":["convertCubeStringToTableSchema"],"mappings":";+BAGSA;;;eAAAA,2CAA8B;;;;uBAHzB;uBACA;uBACA;6BAEiC"}
1
+ {"version":3,"sources":["../../../meerkat-node/src/index.ts"],"sourcesContent":["export * from './cube-to-sql/cube-to-sql';\nexport * from './duckdb-singleton';\nexport * from './node-sql-to-serialization';\nexport { convertCubeStringToTableSchema };\nimport { convertCubeStringToTableSchema } from '@devrev/meerkat-core';\nexport * from './duckdb-manager/duckdb-manager';\nexport * from './file-manager/file-manager';\n"],"names":["convertCubeStringToTableSchema"],"mappings":";+BAGSA;;;eAAAA,2CAA8B;;;;uBAHzB;uBACA;uBACA;6BAEiC;uBACjC;uBACA"}
@@ -12,7 +12,6 @@ const nodeSQLToSerialization = async (sql)=>{
12
12
  for(const key in queryOutput[0]){
13
13
  if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {
14
14
  parsedOutput[key] = JSON.parse(queryOutput[0][key]);
15
- break;
16
15
  }
17
16
  }
18
17
  return parsedOutput;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../meerkat-node/src/node-sql-to-serialization.ts"],"sourcesContent":["import { SelectStatement } from '@devrev/meerkat-core';\nimport { duckdbExec } from './duckdb-exec';\n\nexport interface ParsedSerialization {\n [key: string]: {\n error: boolean;\n statements: SelectStatement[];\n };\n}\n\nexport const nodeSQLToSerialization = async (\n sql: string\n): Promise<ParsedSerialization> => {\n const queryOutput = await duckdbExec<\n {\n [key: string]: string;\n }[]\n >(sql);\n\n const parsedOutput: ParsedSerialization = {};\n\n for (const key in queryOutput[0]) {\n if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {\n parsedOutput[key] = JSON.parse(queryOutput[0][key]) as {\n error: boolean;\n statements: SelectStatement[];\n };\n break;\n }\n }\n return parsedOutput;\n};\n"],"names":["nodeSQLToSerialization","sql","queryOutput","duckdbExec","parsedOutput","key","Object","prototype","hasOwnProperty","call","JSON","parse"],"mappings":";+BAUaA;;;eAAAA;;;4BATc;AASpB,MAAMA,yBAAyB,OACpCC;IAEA,MAAMC,cAAc,MAAMC,IAAAA,sBAAU,EAIlCF;IAEF,MAAMG,eAAoC,CAAC;IAE3C,IAAK,MAAMC,OAAOH,WAAW,CAAC,EAAE,CAAE;QAChC,IAAII,OAAOC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,WAAW,CAAC,EAAE,EAAEG,MAAM;YAC7DD,YAAY,CAACC,IAAI,GAAGK,KAAKC,KAAK,CAACT,WAAW,CAAC,EAAE,CAACG,IAAI;YAIlD;QACF;IACF;IACA,OAAOD;AACT"}
1
+ {"version":3,"sources":["../../../meerkat-node/src/node-sql-to-serialization.ts"],"sourcesContent":["import { SelectStatement } from '@devrev/meerkat-core';\nimport { duckdbExec } from './duckdb-exec';\n\nexport interface ParsedSerialization {\n [key: string]: {\n error: boolean;\n statements: SelectStatement[];\n };\n}\n\nexport const nodeSQLToSerialization = async (\n sql: string\n): Promise<ParsedSerialization> => {\n const queryOutput = await duckdbExec(sql);\n\n const parsedOutput: ParsedSerialization = {};\n\n for (const key in queryOutput[0]) {\n if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {\n parsedOutput[key] = JSON.parse(queryOutput[0][key] as string) as {\n error: boolean;\n statements: SelectStatement[];\n };\n }\n }\n return parsedOutput;\n};\n"],"names":["nodeSQLToSerialization","sql","queryOutput","duckdbExec","parsedOutput","key","Object","prototype","hasOwnProperty","call","JSON","parse"],"mappings":";+BAUaA;;;eAAAA;;;4BATc;AASpB,MAAMA,yBAAyB,OACpCC;IAEA,MAAMC,cAAc,MAAMC,IAAAA,sBAAU,EAACF;IAErC,MAAMG,eAAoC,CAAC;IAE3C,IAAK,MAAMC,OAAOH,WAAW,CAAC,EAAE,CAAE;QAChC,IAAII,OAAOC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,WAAW,CAAC,EAAE,EAAEG,MAAM;YAC7DD,YAAY,CAACC,IAAI,GAAGK,KAAKC,KAAK,CAACT,WAAW,CAAC,EAAE,CAACG,IAAI;QAIpD;IACF;IACA,OAAOD;AACT"}
@@ -0,0 +1,2 @@
1
+ import { DuckDBValue } from '@duckdb/node-api';
2
+ export declare const convertRowsToRecords: (rows: DuckDBValue[][], columnNames: string[]) => Record<string, DuckDBValue>[];
@@ -0,0 +1,17 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "convertRowsToRecords", {
3
+ enumerable: true,
4
+ get: function() {
5
+ return convertRowsToRecords;
6
+ }
7
+ });
8
+ const convertRowsToRecords = (rows, columnNames)=>{
9
+ return rows.map((row)=>{
10
+ return columnNames.reduce((obj, columnName, index)=>{
11
+ obj[columnName] = row[index];
12
+ return obj;
13
+ }, {});
14
+ });
15
+ };
16
+
17
+ //# sourceMappingURL=convert-rows-to-records.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../meerkat-node/src/utils/convert-rows-to-records.ts"],"sourcesContent":["import { DuckDBValue } from '@duckdb/node-api';\n\nexport const convertRowsToRecords = (\n rows: DuckDBValue[][],\n columnNames: string[]\n) => {\n return rows.map((row) => {\n return columnNames.reduce((obj, columnName, index) => {\n obj[columnName] = row[index];\n return obj;\n }, {} as Record<string, DuckDBValue>);\n });\n};\n"],"names":["convertRowsToRecords","rows","columnNames","map","row","reduce","obj","columnName","index"],"mappings":";+BAEaA;;;eAAAA;;;AAAN,MAAMA,uBAAuB,CAClCC,MACAC;IAEA,OAAOD,KAAKE,GAAG,CAAC,CAACC;QACf,OAAOF,YAAYG,MAAM,CAAC,CAACC,KAAKC,YAAYC;YAC1CF,GAAG,CAACC,WAAW,GAAGH,GAAG,CAACI,MAAM;YAC5B,OAAOF;QACT,GAAG,CAAC;IACN;AACF"}
@@ -0,0 +1,6 @@
1
+ import { DuckDBType, DuckDBValue } from '@duckdb/node-api';
2
+ export declare const convertDuckDBValueToJS: (field: DuckDBType, value: DuckDBValue) => unknown;
3
+ export declare const convertRecordDuckDBValueToJSON: (data: Record<string, DuckDBValue>[], columns: {
4
+ name: string;
5
+ type: DuckDBType;
6
+ }[]) => Record<string, unknown>[];
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ function _export(target, all) {
3
+ for(var name in all)Object.defineProperty(target, name, {
4
+ enumerable: true,
5
+ get: all[name]
6
+ });
7
+ }
8
+ _export(exports, {
9
+ convertDuckDBValueToJS: function() {
10
+ return convertDuckDBValueToJS;
11
+ },
12
+ convertRecordDuckDBValueToJSON: function() {
13
+ return convertRecordDuckDBValueToJSON;
14
+ }
15
+ });
16
+ const _nodeapi = require("@duckdb/node-api");
17
+ const _lodash = require("lodash");
18
+ const convertDuckDBValueToJS = (field, value)=>{
19
+ if ((0, _lodash.isNil)(value)) return value;
20
+ switch(field.typeId){
21
+ case _nodeapi.DuckDBTypeId.SQLNULL:
22
+ return null;
23
+ case _nodeapi.DuckDBTypeId.DATE:
24
+ case _nodeapi.DuckDBTypeId.TIMESTAMP:
25
+ case _nodeapi.DuckDBTypeId.TIME:
26
+ return new Date(value).toISOString();
27
+ case _nodeapi.DuckDBTypeId.FLOAT:
28
+ case _nodeapi.DuckDBTypeId.DOUBLE:
29
+ return value;
30
+ case _nodeapi.DuckDBTypeId.INTEGER:
31
+ case _nodeapi.DuckDBTypeId.TINYINT:
32
+ case _nodeapi.DuckDBTypeId.SMALLINT:
33
+ case _nodeapi.DuckDBTypeId.BIGINT:
34
+ case _nodeapi.DuckDBTypeId.UTINYINT:
35
+ case _nodeapi.DuckDBTypeId.USMALLINT:
36
+ case _nodeapi.DuckDBTypeId.UINTEGER:
37
+ case _nodeapi.DuckDBTypeId.UBIGINT:
38
+ return parseInt(value.toString());
39
+ case _nodeapi.DuckDBTypeId.DECIMAL:
40
+ return parseFloat(value.toString());
41
+ case _nodeapi.DuckDBTypeId.LIST:
42
+ {
43
+ if (!value) return [];
44
+ const listValue = value;
45
+ return listValue.items.map((item)=>convertDuckDBValueToJS(field.valueType, item));
46
+ }
47
+ default:
48
+ return value;
49
+ }
50
+ };
51
+ const convertRecordDuckDBValueToJSON = (data, columns)=>{
52
+ console.log(data);
53
+ return data.map((row)=>{
54
+ return columns.reduce((acc, column)=>{
55
+ acc[column.name] = convertDuckDBValueToJS(column.type, row[column.name]);
56
+ return acc;
57
+ }, {});
58
+ });
59
+ };
60
+
61
+ //# sourceMappingURL=duckdb-type-convertor.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../meerkat-node/src/utils/duckdb-type-convertor.ts"],"sourcesContent":["import {\n DuckDBListValue,\n DuckDBType,\n DuckDBTypeId,\n DuckDBValue,\n} from '@duckdb/node-api';\nimport { isNil } from 'lodash';\n\nexport const convertDuckDBValueToJS = (\n field: DuckDBType,\n value: DuckDBValue\n): unknown => {\n if (isNil(value)) return value;\n\n switch (field.typeId) {\n case DuckDBTypeId.SQLNULL:\n return null;\n case DuckDBTypeId.DATE:\n case DuckDBTypeId.TIMESTAMP:\n case DuckDBTypeId.TIME:\n return new Date(value as number).toISOString();\n case DuckDBTypeId.FLOAT:\n case DuckDBTypeId.DOUBLE:\n return value;\n case DuckDBTypeId.INTEGER:\n case DuckDBTypeId.TINYINT:\n case DuckDBTypeId.SMALLINT:\n case DuckDBTypeId.BIGINT:\n case DuckDBTypeId.UTINYINT:\n case DuckDBTypeId.USMALLINT:\n case DuckDBTypeId.UINTEGER:\n case DuckDBTypeId.UBIGINT:\n return parseInt((value as object).toString());\n case DuckDBTypeId.DECIMAL:\n return parseFloat((value as object).toString());\n case DuckDBTypeId.LIST: {\n if (!value) return [];\n const listValue = value as DuckDBListValue;\n return listValue.items.map((item) =>\n convertDuckDBValueToJS(field.valueType, item)\n );\n }\n default:\n return value;\n }\n};\n\nexport const convertRecordDuckDBValueToJSON = (\n data: Record<string, DuckDBValue>[],\n columns: { name: string; type: DuckDBType }[]\n): Record<string, unknown>[] => {\n console.log(data);\n return data.map((row: Record<string, DuckDBValue>) => {\n return columns.reduce((acc, column) => {\n acc[column.name] = convertDuckDBValueToJS(column.type, row[column.name]);\n return acc;\n }, {} as Record<string, unknown>);\n });\n};\n"],"names":["convertDuckDBValueToJS","convertRecordDuckDBValueToJSON","field","value","isNil","typeId","DuckDBTypeId","SQLNULL","DATE","TIMESTAMP","TIME","Date","toISOString","FLOAT","DOUBLE","INTEGER","TINYINT","SMALLINT","BIGINT","UTINYINT","USMALLINT","UINTEGER","UBIGINT","parseInt","toString","DECIMAL","parseFloat","LIST","listValue","items","map","item","valueType","data","columns","console","log","row","reduce","acc","column","name","type"],"mappings":";;;;;;;;IAQaA,sBAAsB;eAAtBA;;IAuCAC,8BAA8B;eAA9BA;;;yBA1CN;wBACe;AAEf,MAAMD,yBAAyB,CACpCE,OACAC;IAEA,IAAIC,IAAAA,aAAK,EAACD,QAAQ,OAAOA;IAEzB,OAAQD,MAAMG,MAAM;QAClB,KAAKC,qBAAY,CAACC,OAAO;YACvB,OAAO;QACT,KAAKD,qBAAY,CAACE,IAAI;QACtB,KAAKF,qBAAY,CAACG,SAAS;QAC3B,KAAKH,qBAAY,CAACI,IAAI;YACpB,OAAO,IAAIC,KAAKR,OAAiBS,WAAW;QAC9C,KAAKN,qBAAY,CAACO,KAAK;QACvB,KAAKP,qBAAY,CAACQ,MAAM;YACtB,OAAOX;QACT,KAAKG,qBAAY,CAACS,OAAO;QACzB,KAAKT,qBAAY,CAACU,OAAO;QACzB,KAAKV,qBAAY,CAACW,QAAQ;QAC1B,KAAKX,qBAAY,CAACY,MAAM;QACxB,KAAKZ,qBAAY,CAACa,QAAQ;QAC1B,KAAKb,qBAAY,CAACc,SAAS;QAC3B,KAAKd,qBAAY,CAACe,QAAQ;QAC1B,KAAKf,qBAAY,CAACgB,OAAO;YACvB,OAAOC,SAAS,AAACpB,MAAiBqB,QAAQ;QAC5C,KAAKlB,qBAAY,CAACmB,OAAO;YACvB,OAAOC,WAAW,AAACvB,MAAiBqB,QAAQ;QAC9C,KAAKlB,qBAAY,CAACqB,IAAI;YAAE;gBACtB,IAAI,CAACxB,OAAO,OAAO,EAAE;gBACrB,MAAMyB,YAAYzB;gBAClB,OAAOyB,UAAUC,KAAK,CAACC,GAAG,CAAC,CAACC,OAC1B/B,uBAAuBE,MAAM8B,SAAS,EAAED;YAE5C;QACA;YACE,OAAO5B;IACX;AACF;AAEO,MAAMF,iCAAiC,CAC5CgC,MACAC;IAEAC,QAAQC,GAAG,CAACH;IACZ,OAAOA,KAAKH,GAAG,CAAC,CAACO;QACf,OAAOH,QAAQI,MAAM,CAAC,CAACC,KAAKC;YAC1BD,GAAG,CAACC,OAAOC,IAAI,CAAC,GAAGzC,uBAAuBwC,OAAOE,IAAI,EAAEL,GAAG,CAACG,OAAOC,IAAI,CAAC;YACvE,OAAOF;QACT,GAAG,CAAC;IACN;AACF"}
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Encrypts a given string
3
+ */
4
+ export declare const encryptString: (text: string) => string;
5
+ /**
6
+ * Decrypts an encrypted string
7
+ */
8
+ export declare const decryptString: (encryptedText: string) => string;
@@ -0,0 +1,39 @@
1
+ "use strict";
2
+ function _export(target, all) {
3
+ for(var name in all)Object.defineProperty(target, name, {
4
+ enumerable: true,
5
+ get: all[name]
6
+ });
7
+ }
8
+ _export(exports, {
9
+ encryptString: function() {
10
+ return encryptString;
11
+ },
12
+ decryptString: function() {
13
+ return decryptString;
14
+ }
15
+ });
16
+ const _crypto = require("crypto");
17
+ const ENCRYPTION_KEY = _crypto.randomBytes(32);
18
+ const ALGORITHM = 'aes-256-cbc';
19
+ const encryptString = (text)=>{
20
+ const iv = _crypto.randomBytes(16);
21
+ const cipher = _crypto.createCipheriv(ALGORITHM, ENCRYPTION_KEY, iv);
22
+ const encrypted = Buffer.concat([
23
+ cipher.update(text),
24
+ cipher.final()
25
+ ]);
26
+ return iv.toString('hex') + encrypted.toString('hex');
27
+ };
28
+ const decryptString = (encryptedText)=>{
29
+ // First 32 chars are IV (16 bytes in hex)
30
+ const iv = encryptedText.slice(0, 32);
31
+ const encrypted = Buffer.from(encryptedText.slice(32), 'hex');
32
+ const decipher = _crypto.createDecipheriv(ALGORITHM, ENCRYPTION_KEY, iv);
33
+ return Buffer.concat([
34
+ decipher.update(encrypted),
35
+ decipher.final()
36
+ ]).toString();
37
+ };
38
+
39
+ //# sourceMappingURL=encrypt-string.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../meerkat-node/src/utils/encrypt-string.ts"],"sourcesContent":["import * as crypto from 'crypto';\n\nconst ENCRYPTION_KEY = crypto.randomBytes(32);\nconst ALGORITHM = 'aes-256-cbc';\n\n/**\n * Encrypts a given string\n */\nexport const encryptString = (text: string): string => {\n const iv = crypto.randomBytes(16);\n\n const cipher = crypto.createCipheriv(ALGORITHM, ENCRYPTION_KEY, iv);\n\n const encrypted = Buffer.concat([cipher.update(text), cipher.final()]);\n\n return iv.toString('hex') + encrypted.toString('hex');\n};\n\n/**\n * Decrypts an encrypted string\n */\nexport const decryptString = (encryptedText: string): string => {\n // First 32 chars are IV (16 bytes in hex)\n const iv = encryptedText.slice(0, 32);\n\n const encrypted = Buffer.from(encryptedText.slice(32), 'hex');\n\n const decipher = crypto.createDecipheriv(ALGORITHM, ENCRYPTION_KEY, iv);\n\n return Buffer.concat([\n decipher.update(encrypted),\n decipher.final(),\n ]).toString();\n};\n"],"names":["encryptString","decryptString","ENCRYPTION_KEY","crypto","randomBytes","ALGORITHM","text","iv","cipher","createCipheriv","encrypted","Buffer","concat","update","final","toString","encryptedText","slice","from","decipher","createDecipheriv"],"mappings":";;;;;;;;IAQaA,aAAa;eAAbA;;IAaAC,aAAa;eAAbA;;;wBArBW;AAExB,MAAMC,iBAAiBC,QAAOC,WAAW,CAAC;AAC1C,MAAMC,YAAY;AAKX,MAAML,gBAAgB,CAACM;IAC5B,MAAMC,KAAKJ,QAAOC,WAAW,CAAC;IAE9B,MAAMI,SAASL,QAAOM,cAAc,CAACJ,WAAWH,gBAAgBK;IAEhE,MAAMG,YAAYC,OAAOC,MAAM,CAAC;QAACJ,OAAOK,MAAM,CAACP;QAAOE,OAAOM,KAAK;KAAG;IAErE,OAAOP,GAAGQ,QAAQ,CAAC,SAASL,UAAUK,QAAQ,CAAC;AACjD;AAKO,MAAMd,gBAAgB,CAACe;IAC5B,0CAA0C;IAC1C,MAAMT,KAAKS,cAAcC,KAAK,CAAC,GAAG;IAElC,MAAMP,YAAYC,OAAOO,IAAI,CAACF,cAAcC,KAAK,CAAC,KAAK;IAEvD,MAAME,WAAWhB,QAAOiB,gBAAgB,CAACf,WAAWH,gBAAgBK;IAEpE,OAAOI,OAAOC,MAAM,CAAC;QACnBO,SAASN,MAAM,CAACH;QAChBS,SAASL,KAAK;KACf,EAAEC,QAAQ;AACb"}
@@ -0,0 +1,13 @@
1
+ import { DuckDBResult, DuckDBType } from '@duckdb/node-api';
2
+ export interface ColumnMetadata {
3
+ name: string;
4
+ type: DuckDBType;
5
+ }
6
+ export interface QueryResult {
7
+ data: Record<string, unknown>[];
8
+ schema: ColumnMetadata[];
9
+ }
10
+ /**
11
+ * Converts raw DuckDB query results into a structured format with named objects
12
+ */
13
+ export declare const transformDuckDBQueryResult: (result: DuckDBResult) => Promise<QueryResult>;
@@ -0,0 +1,26 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "transformDuckDBQueryResult", {
3
+ enumerable: true,
4
+ get: function() {
5
+ return transformDuckDBQueryResult;
6
+ }
7
+ });
8
+ const _convertrowstorecords = require("./convert-rows-to-records");
9
+ const _duckdbtypeconvertor = require("./duckdb-type-convertor");
10
+ const transformDuckDBQueryResult = async (result)=>{
11
+ const columnNames = result.columnNames();
12
+ const columnTypes = result.columnTypes();
13
+ const columns = columnNames.map((name, index)=>({
14
+ name,
15
+ type: columnTypes[index]
16
+ }));
17
+ const rows = await result.getRows();
18
+ const records = (0, _convertrowstorecords.convertRowsToRecords)(rows, result.columnNames());
19
+ const data = (0, _duckdbtypeconvertor.convertRecordDuckDBValueToJSON)(records, columns);
20
+ return {
21
+ data,
22
+ schema: columns
23
+ };
24
+ };
25
+
26
+ //# sourceMappingURL=transform-duckdb-result.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../meerkat-node/src/utils/transform-duckdb-result.ts"],"sourcesContent":["import { DuckDBResult, DuckDBType } from '@duckdb/node-api';\nimport { convertRowsToRecords } from './convert-rows-to-records';\nimport { convertRecordDuckDBValueToJSON } from './duckdb-type-convertor';\n\nexport interface ColumnMetadata {\n name: string;\n type: DuckDBType;\n}\n\nexport interface QueryResult {\n data: Record<string, unknown>[];\n schema: ColumnMetadata[];\n}\n\n/**\n * Converts raw DuckDB query results into a structured format with named objects\n */\nexport const transformDuckDBQueryResult = async (\n result: DuckDBResult\n): Promise<QueryResult> => {\n const columnNames = result.columnNames();\n const columnTypes = result.columnTypes();\n\n const columns = columnNames.map((name, index) => ({\n name,\n type: columnTypes[index],\n }));\n\n const rows = await result.getRows();\n\n const records = convertRowsToRecords(rows, result.columnNames());\n\n const data = convertRecordDuckDBValueToJSON(records, columns);\n\n return { data, schema: columns };\n};\n"],"names":["transformDuckDBQueryResult","result","columnNames","columnTypes","columns","map","name","index","type","rows","getRows","records","convertRowsToRecords","data","convertRecordDuckDBValueToJSON","schema"],"mappings":";+BAiBaA;;;eAAAA;;;sCAhBwB;qCACU;AAexC,MAAMA,6BAA6B,OACxCC;IAEA,MAAMC,cAAcD,OAAOC,WAAW;IACtC,MAAMC,cAAcF,OAAOE,WAAW;IAEtC,MAAMC,UAAUF,YAAYG,GAAG,CAAC,CAACC,MAAMC,QAAW,CAAA;YAChDD;YACAE,MAAML,WAAW,CAACI,MAAM;QAC1B,CAAA;IAEA,MAAME,OAAO,MAAMR,OAAOS,OAAO;IAEjC,MAAMC,UAAUC,IAAAA,0CAAoB,EAACH,MAAMR,OAAOC,WAAW;IAE7D,MAAMW,OAAOC,IAAAA,mDAA8B,EAACH,SAASP;IAErD,OAAO;QAAES;QAAME,QAAQX;IAAQ;AACjC"}