@devrev/meerkat-node 0.0.88 → 0.0.90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -2
- package/src/cube-to-sql/cube-to-sql.js.map +1 -1
- package/src/duckdb-manager/duckdb-manager.d.ts +28 -0
- package/src/duckdb-manager/duckdb-manager.js +71 -0
- package/src/duckdb-manager/duckdb-manager.js.map +1 -0
- package/src/file-manager/file-manager.d.ts +33 -0
- package/src/file-manager/file-manager.js +93 -0
- package/src/file-manager/file-manager.js.map +1 -0
- package/src/index.d.ts +2 -0
- package/src/index.js +2 -0
- package/src/index.js.map +1 -1
- package/src/node-sql-to-serialization.js +0 -1
- package/src/node-sql-to-serialization.js.map +1 -1
- package/src/utils/duckdb-type-convertor.d.ts +3 -0
- package/src/utils/duckdb-type-convertor.js +61 -0
- package/src/utils/duckdb-type-convertor.js.map +1 -0
- package/src/utils/hash-string.d.ts +4 -0
- package/src/utils/hash-string.js +13 -0
- package/src/utils/hash-string.js.map +1 -0
package/package.json
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@devrev/meerkat-node",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.90",
|
|
4
4
|
"dependencies": {
|
|
5
5
|
"@swc/helpers": "~0.5.0",
|
|
6
6
|
"@devrev/meerkat-core": "*",
|
|
7
|
-
"
|
|
7
|
+
"axios": "^1.6.0",
|
|
8
|
+
"lodash": "^4.17.21",
|
|
9
|
+
"duckdb": "^1.0.0"
|
|
8
10
|
},
|
|
9
11
|
"scripts": {
|
|
10
12
|
"release": "semantic-release"
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../meerkat-node/src/cube-to-sql/cube-to-sql.ts"],"sourcesContent":["import {\n BASE_TABLE_NAME,\n ContextParams,\n Query,\n TableSchema,\n applyFilterParamsToBaseSQL,\n applyProjectionToSQLQuery,\n astDeserializerQuery,\n cubeToDuckdbAST,\n deserializeQuery,\n detectApplyContextParamsToBaseSQL,\n getCombinedTableSchema,\n getFilterParamsSQL,\n getFinalBaseSQL
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/cube-to-sql/cube-to-sql.ts"],"sourcesContent":["import {\n BASE_TABLE_NAME,\n ContextParams,\n Query,\n TableSchema,\n applyFilterParamsToBaseSQL,\n applyProjectionToSQLQuery,\n astDeserializerQuery,\n cubeToDuckdbAST,\n deserializeQuery,\n detectApplyContextParamsToBaseSQL,\n getCombinedTableSchema,\n getFilterParamsSQL,\n getFinalBaseSQL,\n} from '@devrev/meerkat-core';\nimport { duckdbExec } from '../duckdb-exec';\n\ninterface CubeQueryToSQLParams {\n query: Query;\n tableSchemas: TableSchema[];\n contextParams?: ContextParams;\n}\n\nexport const cubeQueryToSQL = async ({\n query,\n tableSchemas,\n contextParams,\n}: CubeQueryToSQLParams) => {\n const updatedTableSchemas: TableSchema[] = await Promise.all(\n tableSchemas.map(async (schema: TableSchema) => {\n const baseFilterParamsSQL = await getFinalBaseSQL({\n query,\n tableSchema: schema,\n getQueryOutput: duckdbExec,\n });\n return {\n ...schema,\n sql: baseFilterParamsSQL,\n };\n })\n );\n\n const updatedTableSchema = await getCombinedTableSchema(\n updatedTableSchemas,\n query\n );\n\n const ast = cubeToDuckdbAST(query, updatedTableSchema);\n if (!ast) {\n throw new Error('Could not generate AST');\n }\n\n const queryTemp = astDeserializerQuery(ast);\n\n const queryOutput = (await duckdbExec(queryTemp)) as Record<string, string>[];\n const preBaseQuery = deserializeQuery(queryOutput);\n\n const filterParamsSQL = await getFilterParamsSQL({\n query,\n tableSchema: updatedTableSchema,\n getQueryOutput: duckdbExec,\n });\n\n const filterParamQuery = applyFilterParamsToBaseSQL(\n updatedTableSchema.sql,\n filterParamsSQL\n );\n\n /**\n * Replace CONTEXT_PARAMS with context params\n */\n const baseQuery = detectApplyContextParamsToBaseSQL(\n filterParamQuery,\n contextParams || {}\n );\n\n /**\n * Replace BASE_TABLE_NAME with cube query\n */\n const replaceBaseTableName = preBaseQuery.replace(\n BASE_TABLE_NAME,\n `(${baseQuery}) AS ${updatedTableSchema.name}`\n );\n\n /**\n * Add measures to the query\n */\n const measures = query.measures;\n const dimensions = query.dimensions || [];\n const finalQuery = applyProjectionToSQLQuery(\n dimensions,\n measures,\n updatedTableSchema,\n replaceBaseTableName\n );\n\n return finalQuery;\n};\n"],"names":["cubeQueryToSQL","query","tableSchemas","contextParams","updatedTableSchemas","Promise","all","map","schema","baseFilterParamsSQL","getFinalBaseSQL","tableSchema","getQueryOutput","duckdbExec","sql","updatedTableSchema","getCombinedTableSchema","ast","cubeToDuckdbAST","Error","queryTemp","astDeserializerQuery","queryOutput","preBaseQuery","deserializeQuery","filterParamsSQL","getFilterParamsSQL","filterParamQuery","applyFilterParamsToBaseSQL","baseQuery","detectApplyContextParamsToBaseSQL","replaceBaseTableName","replace","BASE_TABLE_NAME","name","measures","dimensions","finalQuery","applyProjectionToSQLQuery"],"mappings":";+BAuBaA;;;eAAAA;;;;6BATN;4BACoB;AAQpB,MAAMA,iBAAiB,OAAO,EACnCC,KAAK,EACLC,YAAY,EACZC,aAAa,EACQ;IACrB,MAAMC,sBAAqC,MAAMC,QAAQC,GAAG,CAC1DJ,aAAaK,GAAG,CAAC,OAAOC;QACtB,MAAMC,sBAAsB,MAAMC,IAAAA,4BAAe,EAAC;YAChDT;YACAU,aAAaH;YACbI,gBAAgBC,sBAAU;QAC5B;QACA,OAAO,eACFL;YACHM,KAAKL;;IAET;IAGF,MAAMM,qBAAqB,MAAMC,IAAAA,mCAAsB,EACrDZ,qBACAH;IAGF,MAAMgB,MAAMC,IAAAA,4BAAe,EAACjB,OAAOc;IACnC,IAAI,CAACE,KAAK;QACR,MAAM,IAAIE,MAAM;IAClB;IAEA,MAAMC,YAAYC,IAAAA,iCAAoB,EAACJ;IAEvC,MAAMK,cAAe,MAAMT,IAAAA,sBAAU,EAACO;IACtC,MAAMG,eAAeC,IAAAA,6BAAgB,EAACF;IAEtC,MAAMG,kBAAkB,MAAMC,IAAAA,+BAAkB,EAAC;QAC/CzB;QACAU,aAAaI;QACbH,gBAAgBC,sBAAU;IAC5B;IAEA,MAAMc,mBAAmBC,IAAAA,uCAA0B,EACjDb,mBAAmBD,GAAG,EACtBW;IAGF;;GAEC,GACD,MAAMI,YAAYC,IAAAA,8CAAiC,EACjDH,kBACAxB,iBAAiB,CAAC;IAGpB;;GAEC,GACD,MAAM4B,uBAAuBR,aAAaS,OAAO,CAC/CC,4BAAe,EACf,CAAC,CAAC,EAAEJ,UAAU,KAAK,EAAEd,mBAAmBmB,IAAI,CAAC,CAAC;IAGhD;;GAEC,GACD,MAAMC,WAAWlC,MAAMkC,QAAQ;IAC/B,MAAMC,aAAanC,MAAMmC,UAAU,IAAI,EAAE;IACzC,MAAMC,aAAaC,IAAAA,sCAAyB,EAC1CF,YACAD,UACApB,oBACAgB;IAGF,OAAOM;AACT"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { ColumnInfo, Connection, Database, TableData } from 'duckdb';
|
|
2
|
+
export declare class DuckDBManager {
|
|
3
|
+
private db;
|
|
4
|
+
private connection;
|
|
5
|
+
private initPromise;
|
|
6
|
+
constructor({ onInitialize, }: {
|
|
7
|
+
onInitialize?: (db: Database) => Promise<void>;
|
|
8
|
+
});
|
|
9
|
+
/**
|
|
10
|
+
* Initialize the DuckDB instance
|
|
11
|
+
*/
|
|
12
|
+
private initialize;
|
|
13
|
+
/**
|
|
14
|
+
* Get a DuckDB connection instance.
|
|
15
|
+
*/
|
|
16
|
+
getConnection(): Promise<Connection | null>;
|
|
17
|
+
/**
|
|
18
|
+
* Execute a query on the DuckDB connection.
|
|
19
|
+
*/
|
|
20
|
+
query(query: string): Promise<{
|
|
21
|
+
columns: ColumnInfo[];
|
|
22
|
+
data: TableData;
|
|
23
|
+
}>;
|
|
24
|
+
/**
|
|
25
|
+
* Close the DuckDB connection and cleanup resources.
|
|
26
|
+
*/
|
|
27
|
+
close(): Promise<void>;
|
|
28
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "DuckDBManager", {
|
|
3
|
+
enumerable: true,
|
|
4
|
+
get: function() {
|
|
5
|
+
return DuckDBManager;
|
|
6
|
+
}
|
|
7
|
+
});
|
|
8
|
+
const _duckdbsingleton = require("../duckdb-singleton");
|
|
9
|
+
const _duckdbtypeconvertor = require("../utils/duckdb-type-convertor");
|
|
10
|
+
let DuckDBManager = class DuckDBManager {
|
|
11
|
+
/**
|
|
12
|
+
* Initialize the DuckDB instance
|
|
13
|
+
*/ async initialize({ onInitialize }) {
|
|
14
|
+
this.db = _duckdbsingleton.DuckDBSingleton.getInstance();
|
|
15
|
+
await (onInitialize == null ? void 0 : onInitialize(this.db));
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Get a DuckDB connection instance.
|
|
19
|
+
*/ async getConnection() {
|
|
20
|
+
// Ensure database is initialized before returning the connection
|
|
21
|
+
await this.initPromise;
|
|
22
|
+
if (!this.connection) {
|
|
23
|
+
var _this_db;
|
|
24
|
+
var _this_db_connect;
|
|
25
|
+
this.connection = (_this_db_connect = (_this_db = this.db) == null ? void 0 : _this_db.connect()) != null ? _this_db_connect : null;
|
|
26
|
+
}
|
|
27
|
+
return this.connection;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Execute a query on the DuckDB connection.
|
|
31
|
+
*/ async query(query) {
|
|
32
|
+
const connection = await this.getConnection();
|
|
33
|
+
return new Promise((resolve, reject)=>{
|
|
34
|
+
connection == null ? void 0 : connection.prepare(query, (err, statement)=>{
|
|
35
|
+
if (err) {
|
|
36
|
+
reject(new Error(`Query preparation failed: ${err.message}`));
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const columns = statement.columns();
|
|
40
|
+
statement.all((err, data)=>{
|
|
41
|
+
if (err) {
|
|
42
|
+
reject(new Error(`Query execution failed: ${err.message}`));
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
const result = (0, _duckdbtypeconvertor.convertTableDataToJSON)(data, columns);
|
|
46
|
+
resolve({
|
|
47
|
+
columns,
|
|
48
|
+
data: result
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Close the DuckDB connection and cleanup resources.
|
|
56
|
+
*/ async close() {
|
|
57
|
+
if (this.connection) {
|
|
58
|
+
this.connection.close();
|
|
59
|
+
this.connection = null;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
constructor({ onInitialize }){
|
|
63
|
+
this.db = null;
|
|
64
|
+
this.connection = null;
|
|
65
|
+
this.initPromise = this.initialize({
|
|
66
|
+
onInitialize
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
|
|
71
|
+
//# sourceMappingURL=duckdb-manager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/duckdb-manager/duckdb-manager.ts"],"sourcesContent":["import { ColumnInfo, Connection, Database, TableData } from 'duckdb';\n\nimport { DuckDBSingleton } from '../duckdb-singleton';\nimport { convertTableDataToJSON } from '../utils/duckdb-type-convertor';\n\nexport class DuckDBManager {\n private db: Database | null = null;\n private connection: Connection | null = null;\n\n private initPromise: Promise<void>;\n\n constructor({\n onInitialize,\n }: {\n onInitialize?: (db: Database) => Promise<void>;\n }) {\n this.initPromise = this.initialize({ onInitialize });\n }\n\n /**\n * Initialize the DuckDB instance\n */\n private async initialize({\n onInitialize,\n }: {\n onInitialize?: (db: Database) => Promise<void>;\n }) {\n this.db = DuckDBSingleton.getInstance();\n\n await onInitialize?.(this.db);\n }\n\n /**\n * Get a DuckDB connection instance.\n */\n async getConnection() {\n // Ensure database is initialized before returning the connection\n await this.initPromise;\n\n if (!this.connection) {\n this.connection = this.db?.connect() ?? null;\n }\n\n return this.connection;\n }\n\n /**\n * Execute a query on the DuckDB connection.\n */\n async query(\n query: string\n ): Promise<{ columns: ColumnInfo[]; data: TableData }> {\n const connection = await this.getConnection();\n\n return new Promise((resolve, reject) => {\n connection?.prepare(query, (err, statement) => {\n if (err) {\n reject(new Error(`Query preparation failed: ${err.message}`));\n return;\n }\n\n const columns = statement.columns();\n\n statement.all((err, data) => {\n if (err) {\n reject(new Error(`Query execution failed: ${err.message}`));\n return;\n }\n\n const result = convertTableDataToJSON(data, columns);\n\n resolve({ columns, data: result });\n });\n });\n });\n }\n\n /**\n * Close the DuckDB connection and cleanup resources.\n */\n async close(): Promise<void> {\n if (this.connection) {\n this.connection.close();\n this.connection = null;\n }\n }\n}\n"],"names":["DuckDBManager","initialize","onInitialize","db","DuckDBSingleton","getInstance","getConnection","initPromise","connection","connect","query","Promise","resolve","reject","prepare","err","statement","Error","message","columns","all","data","result","convertTableDataToJSON","close","constructor"],"mappings":";+BAKaA;;;eAAAA;;;iCAHmB;qCACO;AAEhC,IAAA,AAAMA,gBAAN,MAAMA;IAcX;;GAEC,GACD,MAAcC,WAAW,EACvBC,YAAY,EAGb,EAAE;QACD,IAAI,CAACC,EAAE,GAAGC,gCAAe,CAACC,WAAW;QAErC,OAAMH,gCAAAA,aAAe,IAAI,CAACC,EAAE;IAC9B;IAEA;;GAEC,GACD,MAAMG,gBAAgB;QACpB,iEAAiE;QACjE,MAAM,IAAI,CAACC,WAAW;QAEtB,IAAI,CAAC,IAAI,CAACC,UAAU,EAAE;gBACF;gBAAA;YAAlB,IAAI,CAACA,UAAU,GAAG,CAAA,oBAAA,WAAA,IAAI,CAACL,EAAE,qBAAP,SAASM,OAAO,cAAhB,mBAAsB;QAC1C;QAEA,OAAO,IAAI,CAACD,UAAU;IACxB;IAEA;;GAEC,GACD,MAAME,MACJA,KAAa,EACwC;QACrD,MAAMF,aAAa,MAAM,IAAI,CAACF,aAAa;QAE3C,OAAO,IAAIK,QAAQ,CAACC,SAASC;YAC3BL,8BAAAA,WAAYM,OAAO,CAACJ,OAAO,CAACK,KAAKC;gBAC/B,IAAID,KAAK;oBACPF,OAAO,IAAII,MAAM,CAAC,0BAA0B,EAAEF,IAAIG,OAAO,CAAC,CAAC;oBAC3D;gBACF;gBAEA,MAAMC,UAAUH,UAAUG,OAAO;gBAEjCH,UAAUI,GAAG,CAAC,CAACL,KAAKM;oBAClB,IAAIN,KAAK;wBACPF,OAAO,IAAII,MAAM,CAAC,wBAAwB,EAAEF,IAAIG,OAAO,CAAC,CAAC;wBACzD;oBACF;oBAEA,MAAMI,SAASC,IAAAA,2CAAsB,EAACF,MAAMF;oBAE5CP,QAAQ;wBAAEO;wBAASE,MAAMC;oBAAO;gBAClC;YACF;QACF;IACF;IAEA;;GAEC,GACD,MAAME,QAAuB;QAC3B,IAAI,IAAI,CAAChB,UAAU,EAAE;YACnB,IAAI,CAACA,UAAU,CAACgB,KAAK;YACrB,IAAI,CAAChB,UAAU,GAAG;QACpB;IACF;IA1EAiB,YAAY,EACVvB,YAAY,EAGb,CAAE;aATKC,KAAsB;aACtBK,aAAgC;QAStC,IAAI,CAACD,WAAW,GAAG,IAAI,CAACN,UAAU,CAAC;YAAEC;QAAa;IACpD;AAqEF"}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
export declare class FileManager {
|
|
2
|
+
private readonly baseDir;
|
|
3
|
+
constructor(config: {
|
|
4
|
+
baseDir?: string;
|
|
5
|
+
});
|
|
6
|
+
getPath(tableName: string, fileName?: string): string;
|
|
7
|
+
private getHashedFileName;
|
|
8
|
+
/**
|
|
9
|
+
* Write a file buffer to the file system.
|
|
10
|
+
*/
|
|
11
|
+
writeFileBuffer(file: {
|
|
12
|
+
tableName: string;
|
|
13
|
+
fileName: string;
|
|
14
|
+
buffer: Uint8Array;
|
|
15
|
+
}): Promise<void>;
|
|
16
|
+
/**
|
|
17
|
+
* Get the file paths for a table.
|
|
18
|
+
*/
|
|
19
|
+
getTableFilePaths(tableName: string): Promise<string[]>;
|
|
20
|
+
/**
|
|
21
|
+
* Delete files from a table.
|
|
22
|
+
*/
|
|
23
|
+
deleteTableFiles(tableName: string, files: string[]): Promise<void>;
|
|
24
|
+
/**
|
|
25
|
+
* Stream and register a file from a URL.
|
|
26
|
+
*/
|
|
27
|
+
streamAndRegisterFile({ tableName, url, headers, fileName, }: {
|
|
28
|
+
tableName: string;
|
|
29
|
+
url: string;
|
|
30
|
+
headers: Record<string, string>;
|
|
31
|
+
fileName: string;
|
|
32
|
+
}): Promise<void>;
|
|
33
|
+
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "FileManager", {
|
|
3
|
+
enumerable: true,
|
|
4
|
+
get: function() {
|
|
5
|
+
return FileManager;
|
|
6
|
+
}
|
|
7
|
+
});
|
|
8
|
+
const _extends = require("@swc/helpers/_/_extends");
|
|
9
|
+
const _axios = require("axios");
|
|
10
|
+
const _fs = require("fs");
|
|
11
|
+
const _path = require("path");
|
|
12
|
+
const _hashstring = require("../utils/hash-string");
|
|
13
|
+
let FileManager = class FileManager {
|
|
14
|
+
getPath(tableName, fileName) {
|
|
15
|
+
return fileName ? _path.join(this.baseDir, tableName, fileName) : _path.join(this.baseDir, tableName);
|
|
16
|
+
}
|
|
17
|
+
getHashedFileName(fileName) {
|
|
18
|
+
return (0, _hashstring.hashString)(fileName);
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Write a file buffer to the file system.
|
|
22
|
+
*/ async writeFileBuffer(file) {
|
|
23
|
+
// Hash the file name to avoid file name length issues
|
|
24
|
+
const hashedFileName = this.getHashedFileName(file.fileName);
|
|
25
|
+
const filePath = this.getPath(file.tableName, hashedFileName);
|
|
26
|
+
await _fs.promises.mkdir(_path.dirname(filePath), {
|
|
27
|
+
recursive: true
|
|
28
|
+
});
|
|
29
|
+
await _fs.promises.writeFile(filePath, file.buffer);
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Get the file paths for a table.
|
|
33
|
+
*/ async getTableFilePaths(tableName) {
|
|
34
|
+
try {
|
|
35
|
+
const files = await _fs.promises.readdir(this.getPath(tableName));
|
|
36
|
+
return files.map((file)=>this.getPath(tableName, file));
|
|
37
|
+
} catch (e) {
|
|
38
|
+
return [];
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Delete files from a table.
|
|
43
|
+
*/ async deleteTableFiles(tableName, files) {
|
|
44
|
+
await Promise.all(files.map(async (file)=>{
|
|
45
|
+
try {
|
|
46
|
+
await _fs.promises.unlink(this.getPath(tableName, this.getHashedFileName(file)));
|
|
47
|
+
} catch (err) {
|
|
48
|
+
console.error(err);
|
|
49
|
+
}
|
|
50
|
+
}));
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Stream and register a file from a URL.
|
|
54
|
+
*/ async streamAndRegisterFile({ tableName, url, headers, fileName }) {
|
|
55
|
+
try {
|
|
56
|
+
const response = await (0, _axios.default)({
|
|
57
|
+
headers: _extends._({}, headers),
|
|
58
|
+
method: 'get',
|
|
59
|
+
responseType: 'stream',
|
|
60
|
+
url
|
|
61
|
+
});
|
|
62
|
+
const hashedFileName = (0, _hashstring.hashString)(fileName);
|
|
63
|
+
const filePath = this.getPath(tableName, hashedFileName);
|
|
64
|
+
await _fs.promises.mkdir(_path.dirname(filePath), {
|
|
65
|
+
recursive: true
|
|
66
|
+
});
|
|
67
|
+
const writer = (0, _fs.createWriteStream)(filePath);
|
|
68
|
+
return new Promise((resolve, reject)=>{
|
|
69
|
+
response.data.pipe(writer);
|
|
70
|
+
writer.on('finish', ()=>{
|
|
71
|
+
writer.close();
|
|
72
|
+
resolve();
|
|
73
|
+
});
|
|
74
|
+
writer.on('error', (err)=>{
|
|
75
|
+
writer.close();
|
|
76
|
+
reject(err);
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
} catch (error) {
|
|
80
|
+
console.error('Error streaming file:', error);
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
constructor(config){
|
|
85
|
+
var _config_baseDir;
|
|
86
|
+
this.baseDir = (_config_baseDir = config.baseDir) != null ? _config_baseDir : './data';
|
|
87
|
+
_fs.promises.mkdir(this.baseDir, {
|
|
88
|
+
recursive: true
|
|
89
|
+
}).catch(console.error);
|
|
90
|
+
}
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
//# sourceMappingURL=file-manager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/file-manager/file-manager.ts"],"sourcesContent":["import axios from 'axios';\nimport { createWriteStream, promises as fs } from 'fs';\nimport * as path from 'path';\n\nimport { hashString } from '../utils/hash-string';\n\nexport class FileManager {\n private readonly baseDir: string;\n\n constructor(config: { baseDir?: string }) {\n this.baseDir = config.baseDir ?? './data';\n fs.mkdir(this.baseDir, { recursive: true }).catch(console.error);\n }\n\n public getPath(tableName: string, fileName?: string): string {\n return fileName\n ? path.join(this.baseDir, tableName, fileName)\n : path.join(this.baseDir, tableName);\n }\n\n private getHashedFileName(fileName: string): string {\n return hashString(fileName);\n }\n\n /**\n * Write a file buffer to the file system.\n */\n async writeFileBuffer(file: {\n tableName: string;\n fileName: string;\n buffer: Uint8Array;\n }): Promise<void> {\n // Hash the file name to avoid file name length issues\n const hashedFileName = this.getHashedFileName(file.fileName);\n\n const filePath = this.getPath(file.tableName, hashedFileName);\n\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n\n await fs.writeFile(filePath, file.buffer);\n }\n\n /**\n * Get the file paths for a table.\n */\n async getTableFilePaths(tableName: string): Promise<string[]> {\n try {\n const files = await fs.readdir(this.getPath(tableName));\n\n return files.map((file) => this.getPath(tableName, file));\n } catch {\n return [];\n }\n }\n\n /**\n * Delete files from a table.\n */\n async deleteTableFiles(tableName: string, files: string[]): Promise<void> {\n await Promise.all(\n files.map(async (file) => {\n try {\n await fs.unlink(\n this.getPath(tableName, this.getHashedFileName(file))\n );\n } catch (err) {\n console.error(err);\n }\n })\n );\n }\n\n /**\n * Stream and register a file from a URL.\n */\n async streamAndRegisterFile({\n tableName,\n url,\n headers,\n fileName,\n }: {\n tableName: string;\n url: string;\n headers: Record<string, string>;\n fileName: string;\n }): Promise<void> {\n try {\n const response = await axios({\n headers: {\n ...headers,\n },\n method: 'get',\n responseType: 'stream',\n url,\n });\n\n const hashedFileName = hashString(fileName);\n\n const filePath = this.getPath(tableName, hashedFileName);\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n\n const writer = createWriteStream(filePath);\n\n return new Promise((resolve, reject) => {\n response.data.pipe(writer);\n writer.on('finish', () => {\n writer.close();\n resolve();\n });\n writer.on('error', (err) => {\n writer.close();\n reject(err);\n });\n });\n } catch (error) {\n console.error('Error streaming file:', error);\n throw error;\n }\n }\n}\n"],"names":["FileManager","getPath","tableName","fileName","path","join","baseDir","getHashedFileName","hashString","writeFileBuffer","file","hashedFileName","filePath","fs","mkdir","dirname","recursive","writeFile","buffer","getTableFilePaths","files","readdir","map","deleteTableFiles","Promise","all","unlink","err","console","error","streamAndRegisterFile","url","headers","response","axios","method","responseType","writer","createWriteStream","resolve","reject","data","pipe","on","close","constructor","config","catch"],"mappings":";+BAMaA;;;eAAAA;;;;uBANK;oBACgC;sBAC5B;4BAEK;AAEpB,IAAA,AAAMA,cAAN,MAAMA;IAQJC,QAAQC,SAAiB,EAAEC,QAAiB,EAAU;QAC3D,OAAOA,WACHC,MAAKC,IAAI,CAAC,IAAI,CAACC,OAAO,EAAEJ,WAAWC,YACnCC,MAAKC,IAAI,CAAC,IAAI,CAACC,OAAO,EAAEJ;IAC9B;IAEQK,kBAAkBJ,QAAgB,EAAU;QAClD,OAAOK,IAAAA,sBAAU,EAACL;IACpB;IAEA;;GAEC,GACD,MAAMM,gBAAgBC,IAIrB,EAAiB;QAChB,sDAAsD;QACtD,MAAMC,iBAAiB,IAAI,CAACJ,iBAAiB,CAACG,KAAKP,QAAQ;QAE3D,MAAMS,WAAW,IAAI,CAACX,OAAO,CAACS,KAAKR,SAAS,EAAES;QAE9C,MAAME,YAAE,CAACC,KAAK,CAACV,MAAKW,OAAO,CAACH,WAAW;YAAEI,WAAW;QAAK;QAEzD,MAAMH,YAAE,CAACI,SAAS,CAACL,UAAUF,KAAKQ,MAAM;IAC1C;IAEA;;GAEC,GACD,MAAMC,kBAAkBjB,SAAiB,EAAqB;QAC5D,IAAI;YACF,MAAMkB,QAAQ,MAAMP,YAAE,CAACQ,OAAO,CAAC,IAAI,CAACpB,OAAO,CAACC;YAE5C,OAAOkB,MAAME,GAAG,CAAC,CAACZ,OAAS,IAAI,CAACT,OAAO,CAACC,WAAWQ;QACrD,EAAE,UAAM;YACN,OAAO,EAAE;QACX;IACF;IAEA;;GAEC,GACD,MAAMa,iBAAiBrB,SAAiB,EAAEkB,KAAe,EAAiB;QACxE,MAAMI,QAAQC,GAAG,CACfL,MAAME,GAAG,CAAC,OAAOZ;YACf,IAAI;gBACF,MAAMG,YAAE,CAACa,MAAM,CACb,IAAI,CAACzB,OAAO,CAACC,WAAW,IAAI,CAACK,iBAAiB,CAACG;YAEnD,EAAE,OAAOiB,KAAK;gBACZC,QAAQC,KAAK,CAACF;YAChB;QACF;IAEJ;IAEA;;GAEC,GACD,MAAMG,sBAAsB,EAC1B5B,SAAS,EACT6B,GAAG,EACHC,OAAO,EACP7B,QAAQ,EAMT,EAAiB;QAChB,IAAI;YACF,MAAM8B,WAAW,MAAMC,IAAAA,cAAK,EAAC;gBAC3BF,SAAS,eACJA;gBAELG,QAAQ;gBACRC,cAAc;gBACdL;YACF;YAEA,MAAMpB,iBAAiBH,IAAAA,sBAAU,EAACL;YAElC,MAAMS,WAAW,IAAI,CAACX,OAAO,CAACC,WAAWS;YACzC,MAAME,YAAE,CAACC,KAAK,CAACV,MAAKW,OAAO,CAACH,WAAW;gBAAEI,WAAW;YAAK;YAEzD,MAAMqB,SAASC,IAAAA,qBAAiB,EAAC1B;YAEjC,OAAO,IAAIY,QAAQ,CAACe,SAASC;gBAC3BP,SAASQ,IAAI,CAACC,IAAI,CAACL;gBACnBA,OAAOM,EAAE,CAAC,UAAU;oBAClBN,OAAOO,KAAK;oBACZL;gBACF;gBACAF,OAAOM,EAAE,CAAC,SAAS,CAAChB;oBAClBU,OAAOO,KAAK;oBACZJ,OAAOb;gBACT;YACF;QACF,EAAE,OAAOE,OAAO;YACdD,QAAQC,KAAK,CAAC,yBAAyBA;YACvC,MAAMA;QACR;IACF;IA7GAgB,YAAYC,MAA4B,CAAE;YACzBA;QAAf,IAAI,CAACxC,OAAO,GAAGwC,CAAAA,kBAAAA,OAAOxC,OAAO,YAAdwC,kBAAkB;QACjCjC,YAAE,CAACC,KAAK,CAAC,IAAI,CAACR,OAAO,EAAE;YAAEU,WAAW;QAAK,GAAG+B,KAAK,CAACnB,QAAQC,KAAK;IACjE;AA2GF"}
|
package/src/index.d.ts
CHANGED
|
@@ -3,3 +3,5 @@ export * from './duckdb-singleton';
|
|
|
3
3
|
export * from './node-sql-to-serialization';
|
|
4
4
|
export { convertCubeStringToTableSchema };
|
|
5
5
|
import { convertCubeStringToTableSchema } from '@devrev/meerkat-core';
|
|
6
|
+
export * from './duckdb-manager/duckdb-manager';
|
|
7
|
+
export * from './file-manager/file-manager';
|
package/src/index.js
CHANGED
|
@@ -10,5 +10,7 @@ _export_star._(require("./cube-to-sql/cube-to-sql"), exports);
|
|
|
10
10
|
_export_star._(require("./duckdb-singleton"), exports);
|
|
11
11
|
_export_star._(require("./node-sql-to-serialization"), exports);
|
|
12
12
|
const _meerkatcore = require("@devrev/meerkat-core");
|
|
13
|
+
_export_star._(require("./duckdb-manager/duckdb-manager"), exports);
|
|
14
|
+
_export_star._(require("./file-manager/file-manager"), exports);
|
|
13
15
|
|
|
14
16
|
//# sourceMappingURL=index.js.map
|
package/src/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../meerkat-node/src/index.ts"],"sourcesContent":["export * from './cube-to-sql/cube-to-sql';\nexport * from './duckdb-singleton';\nexport * from './node-sql-to-serialization';\nexport { convertCubeStringToTableSchema };\nimport { convertCubeStringToTableSchema } from '@devrev/meerkat-core';\n"],"names":["convertCubeStringToTableSchema"],"mappings":";+BAGSA;;;eAAAA,2CAA8B;;;;uBAHzB;uBACA;uBACA;6BAEiC"}
|
|
1
|
+
{"version":3,"sources":["../../../meerkat-node/src/index.ts"],"sourcesContent":["export * from './cube-to-sql/cube-to-sql';\nexport * from './duckdb-singleton';\nexport * from './node-sql-to-serialization';\nexport { convertCubeStringToTableSchema };\nimport { convertCubeStringToTableSchema } from '@devrev/meerkat-core';\nexport * from './duckdb-manager/duckdb-manager';\nexport * from './file-manager/file-manager';\n"],"names":["convertCubeStringToTableSchema"],"mappings":";+BAGSA;;;eAAAA,2CAA8B;;;;uBAHzB;uBACA;uBACA;6BAEiC;uBACjC;uBACA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../meerkat-node/src/node-sql-to-serialization.ts"],"sourcesContent":["import { SelectStatement } from '@devrev/meerkat-core';\nimport { duckdbExec } from './duckdb-exec';\n\nexport interface ParsedSerialization {\n [key: string]: {\n error: boolean;\n statements: SelectStatement[];\n };\n}\n\nexport const nodeSQLToSerialization = async (\n sql: string\n): Promise<ParsedSerialization> => {\n const queryOutput = await duckdbExec<\n {\n [key: string]: string;\n }[]\n >(sql);\n\n const parsedOutput: ParsedSerialization = {};\n\n for (const key in queryOutput[0]) {\n if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {\n parsedOutput[key] = JSON.parse(queryOutput[0][key]) as {\n error: boolean;\n statements: SelectStatement[];\n };\n
|
|
1
|
+
{"version":3,"sources":["../../../meerkat-node/src/node-sql-to-serialization.ts"],"sourcesContent":["import { SelectStatement } from '@devrev/meerkat-core';\nimport { duckdbExec } from './duckdb-exec';\n\nexport interface ParsedSerialization {\n [key: string]: {\n error: boolean;\n statements: SelectStatement[];\n };\n}\n\nexport const nodeSQLToSerialization = async (\n sql: string\n): Promise<ParsedSerialization> => {\n const queryOutput = await duckdbExec<\n {\n [key: string]: string;\n }[]\n >(sql);\n\n const parsedOutput: ParsedSerialization = {};\n\n for (const key in queryOutput[0]) {\n if (Object.prototype.hasOwnProperty.call(queryOutput[0], key)) {\n parsedOutput[key] = JSON.parse(queryOutput[0][key] as string) as {\n error: boolean;\n statements: SelectStatement[];\n };\n }\n }\n return parsedOutput;\n};\n"],"names":["nodeSQLToSerialization","sql","queryOutput","duckdbExec","parsedOutput","key","Object","prototype","hasOwnProperty","call","JSON","parse"],"mappings":";+BAUaA;;;eAAAA;;;4BATc;AASpB,MAAMA,yBAAyB,OACpCC;IAEA,MAAMC,cAAc,MAAMC,IAAAA,sBAAU,EAIlCF;IAEF,MAAMG,eAAoC,CAAC;IAE3C,IAAK,MAAMC,OAAOH,WAAW,CAAC,EAAE,CAAE;QAChC,IAAII,OAAOC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,WAAW,CAAC,EAAE,EAAEG,MAAM;YAC7DD,YAAY,CAACC,IAAI,GAAGK,KAAKC,KAAK,CAACT,WAAW,CAAC,EAAE,CAACG,IAAI;QAIpD;IACF;IACA,OAAOD;AACT"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
function _export(target, all) {
|
|
3
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
4
|
+
enumerable: true,
|
|
5
|
+
get: all[name]
|
|
6
|
+
});
|
|
7
|
+
}
|
|
8
|
+
_export(exports, {
|
|
9
|
+
convertDuckDBValueToJS: function() {
|
|
10
|
+
return convertDuckDBValueToJS;
|
|
11
|
+
},
|
|
12
|
+
convertTableDataToJSON: function() {
|
|
13
|
+
return convertTableDataToJSON;
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
const _lodash = require("lodash");
|
|
17
|
+
const convertDuckDBValueToJS = (field, value)=>{
|
|
18
|
+
if ((0, _lodash.isNil)(value)) return null;
|
|
19
|
+
switch(field.id){
|
|
20
|
+
case 'SQL_NULL':
|
|
21
|
+
return null;
|
|
22
|
+
case 'DATE':
|
|
23
|
+
case 'TIME':
|
|
24
|
+
case 'TIMESTAMP':
|
|
25
|
+
return new Date(value).toISOString();
|
|
26
|
+
case 'FLOAT':
|
|
27
|
+
case 'DOUBLE':
|
|
28
|
+
return value;
|
|
29
|
+
case 'INTEGER':
|
|
30
|
+
case 'TINYINT':
|
|
31
|
+
case 'SMALLINT':
|
|
32
|
+
case 'BIGINT':
|
|
33
|
+
case 'UTINYINT':
|
|
34
|
+
case 'USMALLINT':
|
|
35
|
+
case 'UINTEGER':
|
|
36
|
+
case 'UBIGINT':
|
|
37
|
+
case 'HUGEINT':
|
|
38
|
+
case 'UHUGEINT':
|
|
39
|
+
return parseInt(value.toString(), 10);
|
|
40
|
+
case 'DECIMAL':
|
|
41
|
+
return parseFloat(value.toString());
|
|
42
|
+
case 'LIST':
|
|
43
|
+
{
|
|
44
|
+
if (!value) return [];
|
|
45
|
+
const listValue = value;
|
|
46
|
+
return listValue.map((item)=>convertDuckDBValueToJS(field.child, item));
|
|
47
|
+
}
|
|
48
|
+
default:
|
|
49
|
+
return value;
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
const convertTableDataToJSON = (data, columns)=>{
|
|
53
|
+
return data.map((row)=>{
|
|
54
|
+
return columns.reduce((acc, column)=>{
|
|
55
|
+
acc[column.name] = convertDuckDBValueToJS(column.type, row[column.name]);
|
|
56
|
+
return acc;
|
|
57
|
+
}, {});
|
|
58
|
+
});
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
//# sourceMappingURL=duckdb-type-convertor.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/utils/duckdb-type-convertor.ts"],"sourcesContent":["import { ColumnInfo, ListTypeInfo, TableData, TypeInfo } from 'duckdb';\nimport { isNil } from 'lodash';\n\nexport const convertDuckDBValueToJS = (\n field: TypeInfo,\n value: unknown\n): unknown => {\n if (isNil(value)) return null;\n\n switch (field.id) {\n case 'SQL_NULL':\n return null;\n case 'DATE':\n case 'TIME':\n case 'TIMESTAMP':\n return new Date(value as string).toISOString();\n case 'FLOAT':\n case 'DOUBLE':\n return value;\n case 'INTEGER':\n case 'TINYINT':\n case 'SMALLINT':\n case 'BIGINT':\n case 'UTINYINT':\n case 'USMALLINT':\n case 'UINTEGER':\n case 'UBIGINT':\n case 'HUGEINT':\n case 'UHUGEINT':\n return parseInt((value as object).toString(), 10);\n case 'DECIMAL':\n return parseFloat((value as object).toString());\n case 'LIST': {\n if (!value) return [];\n const listValue = value as [];\n return listValue.map((item) =>\n convertDuckDBValueToJS((field as ListTypeInfo).child, item)\n );\n }\n default:\n return value;\n }\n};\n\nexport const convertTableDataToJSON = (\n data: TableData,\n columns: ColumnInfo[]\n): Record<string, unknown>[] => {\n return data.map((row: Record<string, unknown>) => {\n return columns.reduce((acc, column) => {\n acc[column.name] = convertDuckDBValueToJS(column.type, row[column.name]);\n return acc;\n }, {} as Record<string, unknown>);\n });\n};\n"],"names":["convertDuckDBValueToJS","convertTableDataToJSON","field","value","isNil","id","Date","toISOString","parseInt","toString","parseFloat","listValue","map","item","child","data","columns","row","reduce","acc","column","name","type"],"mappings":";;;;;;;;IAGaA,sBAAsB;eAAtBA;;IAyCAC,sBAAsB;eAAtBA;;;wBA3CS;AAEf,MAAMD,yBAAyB,CACpCE,OACAC;IAEA,IAAIC,IAAAA,aAAK,EAACD,QAAQ,OAAO;IAEzB,OAAQD,MAAMG,EAAE;QACd,KAAK;YACH,OAAO;QACT,KAAK;QACL,KAAK;QACL,KAAK;YACH,OAAO,IAAIC,KAAKH,OAAiBI,WAAW;QAC9C,KAAK;QACL,KAAK;YACH,OAAOJ;QACT,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;YACH,OAAOK,SAAS,AAACL,MAAiBM,QAAQ,IAAI;QAChD,KAAK;YACH,OAAOC,WAAW,AAACP,MAAiBM,QAAQ;QAC9C,KAAK;YAAQ;gBACX,IAAI,CAACN,OAAO,OAAO,EAAE;gBACrB,MAAMQ,YAAYR;gBAClB,OAAOQ,UAAUC,GAAG,CAAC,CAACC,OACpBb,uBAAuB,AAACE,MAAuBY,KAAK,EAAED;YAE1D;QACA;YACE,OAAOV;IACX;AACF;AAEO,MAAMF,yBAAyB,CACpCc,MACAC;IAEA,OAAOD,KAAKH,GAAG,CAAC,CAACK;QACf,OAAOD,QAAQE,MAAM,CAAC,CAACC,KAAKC;YAC1BD,GAAG,CAACC,OAAOC,IAAI,CAAC,GAAGrB,uBAAuBoB,OAAOE,IAAI,EAAEL,GAAG,CAACG,OAAOC,IAAI,CAAC;YACvE,OAAOF;QACT,GAAG,CAAC;IACN;AACF"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "hashString", {
|
|
3
|
+
enumerable: true,
|
|
4
|
+
get: function() {
|
|
5
|
+
return hashString;
|
|
6
|
+
}
|
|
7
|
+
});
|
|
8
|
+
const _crypto = require("crypto");
|
|
9
|
+
const hashString = (text)=>{
|
|
10
|
+
return _crypto.createHash('md5').update(text).digest('hex');
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
//# sourceMappingURL=hash-string.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../meerkat-node/src/utils/hash-string.ts"],"sourcesContent":["import * as crypto from 'crypto';\n\n/**\n * Hash a string using MD5.\n */\nexport const hashString = (text: string): string => {\n return crypto.createHash('md5').update(text).digest('hex');\n};\n"],"names":["hashString","text","crypto","createHash","update","digest"],"mappings":";+BAKaA;;;eAAAA;;;wBALW;AAKjB,MAAMA,aAAa,CAACC;IACzB,OAAOC,QAAOC,UAAU,CAAC,OAAOC,MAAM,CAACH,MAAMI,MAAM,CAAC;AACtD"}
|