@visactor/vquery 0.1.48 → 0.1.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,16 @@
1
+ import { DatasetSourceType, RawDatasetSource } from '../types';
2
+ export declare class DatasetSourceBuilder {
3
+ private type;
4
+ private value;
5
+ constructor(raw: RawDatasetSource);
6
+ static from(raw: RawDatasetSource): DatasetSourceBuilder;
7
+ build(): Promise<{
8
+ type: DatasetSourceType;
9
+ blob: Blob;
10
+ }>;
11
+ /**
12
+ * 将不同类型的数据转换为Blob
13
+ */
14
+ private static convertToBlob;
15
+ private static fetchBlob;
16
+ }
@@ -0,0 +1 @@
1
+ export { DatasetSourceBuilder } from './dataSourceBuilder';
@@ -6,8 +6,9 @@ export declare class Dataset {
6
6
  private indexedDB;
7
7
  private _datasetId;
8
8
  constructor(duckDB: DuckDB, indexedDB: IndexedDB, datasetId: string);
9
- init(temporaryStructs?: DatasetColumn[]): Promise<void>;
10
- queryBySQL(sql: string): Promise<{
9
+ init(temporaryColumns?: DatasetColumn[]): Promise<void>;
10
+ createOrReplaceView(columns?: DatasetColumn[]): Promise<void>;
11
+ query<T extends Record<string, number | string>>(queryDSL: QueryDSL<T>): Promise<{
11
12
  performance: {
12
13
  startAt: string;
13
14
  endAt: string;
@@ -16,8 +17,7 @@ export declare class Dataset {
16
17
  dataset: any[];
17
18
  table: any;
18
19
  }>;
19
- convertDSLToSQL<T extends Record<string, number | string>>(queryDSL: QueryDSL<T>): string;
20
- query<T extends Record<string, number | string>>(queryDSL: QueryDSL<T>): Promise<{
20
+ queryBySQL(sql: string): Promise<{
21
21
  performance: {
22
22
  startAt: string;
23
23
  endAt: string;
@@ -1 +1,2 @@
1
1
  export { Dataset } from './dataset';
2
+ export { convertDSLToSQL } from '../sql-builder';
@@ -1,4 +1,4 @@
1
- import { DataSource } from '../types';
1
+ import { DatasetSource } from '../types';
2
2
  import { DatasetSchema } from '../types/DataSet';
3
3
  export declare class IndexedDB {
4
4
  private db;
@@ -7,15 +7,15 @@ export declare class IndexedDB {
7
7
  constructor(dbName: string);
8
8
  open: () => Promise<void>;
9
9
  close: () => void;
10
- writeDataset: (datasetId: string, dataSource: DataSource, datasetSchema: DatasetSchema) => Promise<void>;
10
+ writeDataset: (datasetId: string, datasetSchema: DatasetSchema, datasetSource?: DatasetSource) => Promise<void>;
11
11
  readDataset: (datasetId: string) => Promise<{
12
- dataSource: DataSource;
12
+ datasetSource?: DatasetSource;
13
13
  datasetSchema: DatasetSchema;
14
14
  } | null>;
15
15
  deleteDataset: (datasetId: string) => Promise<void>;
16
16
  listDatasets: () => Promise<{
17
17
  datasetId: string;
18
- dataSource: DataSource;
18
+ dataSource?: DatasetSource;
19
19
  datasetSchema: DatasetSchema;
20
20
  }[]>;
21
21
  }
package/dist/index.cjs CHANGED
@@ -27,70 +27,154 @@ var __webpack_require__ = {};
27
27
  var __webpack_exports__ = {};
28
28
  __webpack_require__.r(__webpack_exports__);
29
29
  __webpack_require__.d(__webpack_exports__, {
30
- DataSourceBuilder: ()=>DataSourceBuilder,
30
+ DatasetSourceBuilder: ()=>DatasetSourceBuilder,
31
31
  isHttpUrl: ()=>isHttpUrl,
32
+ convertDSLToSQL: ()=>convertDSLToSQL,
32
33
  isBase64Url: ()=>isBase64Url,
33
34
  isUrl: ()=>isUrl,
34
35
  VQuery: ()=>VQuery
35
36
  });
36
- const isSelectItem = (item)=>'object' == typeof item && 'field' in item;
37
- const isWhereLeaf = (where)=>'field' in where && 'op' in where && 'value' in where;
38
- const isWhereGroup = (where)=>'op' in where && 'conditions' in where;
39
- const isStringOrNumber = (value)=>'string' == typeof value || 'number' == typeof value;
40
- const applyWhere = (where)=>{
41
- const escape = (str)=>{
42
- if ('string' == typeof str) return `'${str.replace(/'/g, "''")}'`;
43
- return str;
44
- };
45
- if (isWhereGroup(where)) {
46
- const logicalOp = where.op.toUpperCase();
47
- return `(${where.conditions.map((c)=>applyWhere(c)).join(` ${logicalOp} `)})`;
37
+ const external_kysely_namespaceObject = require("kysely");
38
+ class PostgresDialect {
39
+ createDriver() {
40
+ return new external_kysely_namespaceObject.DummyDriver();
48
41
  }
49
- if (isWhereLeaf(where)) {
50
- const { field, op, value } = where;
51
- if ('is null' === op || 'is not null' === op) return `${field} ${op}`;
52
- if ('in' === op || 'not in' === op) {
53
- if (Array.isArray(value)) return `${field} ${op} (${value.map((v)=>escape(v)).join(', ')})`;
54
- }
55
- if ('between' === op || 'not between' === op) {
56
- if (Array.isArray(value) && 2 === value.length && isStringOrNumber(value[0]) && isStringOrNumber(value[1])) {
57
- const value0 = value[0];
58
- const value1 = value[1];
59
- return `${field} ${op} ${escape(value0)} and ${escape(value1)}`;
42
+ createQueryCompiler() {
43
+ return new external_kysely_namespaceObject.PostgresQueryCompiler();
44
+ }
45
+ createAdapter() {
46
+ return new external_kysely_namespaceObject.PostgresAdapter();
47
+ }
48
+ createIntrospector(db) {
49
+ class NullIntrospector {
50
+ async getSchemas() {
51
+ return [];
60
52
  }
53
+ async getTables(options) {
54
+ options?.withInternalKyselyTables;
55
+ return [];
56
+ }
57
+ async getMetadata(options) {
58
+ options?.withInternalKyselyTables;
59
+ return {
60
+ tables: []
61
+ };
62
+ }
63
+ }
64
+ return new NullIntrospector();
65
+ }
66
+ }
67
+ const isSelectItem = (item)=>'object' == typeof item && 'field' in item;
68
+ const escapeValue = (value)=>{
69
+ if (null === value) return 'null';
70
+ if ('string' == typeof value) return `'${value.replace(/'/g, "''")}'`;
71
+ if ('number' == typeof value) return `${value}`;
72
+ if ('boolean' == typeof value) return value ? 'TRUE' : 'FALSE';
73
+ return `'${String(value).replace(/'/g, "''")}'`;
74
+ };
75
+ const inlineParameters = (sql, params)=>{
76
+ if (0 === params.length) return sql;
77
+ if (sql.includes('?')) {
78
+ let out = sql;
79
+ for (const p of params)out = out.replace(/\?/, escapeValue(p));
80
+ return out;
81
+ }
82
+ if (/\$\d+/.test(sql)) return sql.replace(/\$(\d+)/g, (_, idx)=>{
83
+ const i = Number(idx) - 1;
84
+ const v = params[i];
85
+ return escapeValue(v);
86
+ });
87
+ return sql;
88
+ };
89
+ const applyWhere = (where)=>{
90
+ const toRaw = (w)=>{
91
+ if ('op' in w && 'conditions' in w) {
92
+ const parts = w.conditions.map((c)=>toRaw(c));
93
+ const sep = (0, external_kysely_namespaceObject.sql)` ${external_kysely_namespaceObject.sql.raw(w.op)} `;
94
+ return (0, external_kysely_namespaceObject.sql)`(${external_kysely_namespaceObject.sql.join(parts, sep)})`;
61
95
  }
62
- if (isStringOrNumber(value)) {
63
- const value0 = value;
64
- return `${field} ${op} ${escape(value0)}`;
96
+ const leaf = w;
97
+ const field = leaf.field;
98
+ const value = leaf.value;
99
+ switch(leaf.op){
100
+ case 'is null':
101
+ return (0, external_kysely_namespaceObject.sql)`${external_kysely_namespaceObject.sql.ref(field)} is null`;
102
+ case 'is not null':
103
+ return (0, external_kysely_namespaceObject.sql)`${external_kysely_namespaceObject.sql.ref(field)} is not null`;
104
+ case 'in':
105
+ {
106
+ const items = Array.isArray(value) ? value : [
107
+ value
108
+ ];
109
+ return (0, external_kysely_namespaceObject.sql)`${external_kysely_namespaceObject.sql.ref(field)} in (${external_kysely_namespaceObject.sql.join(items.map((v)=>external_kysely_namespaceObject.sql.val(v)))})`;
110
+ }
111
+ case 'not in':
112
+ {
113
+ const items = Array.isArray(value) ? value : [
114
+ value
115
+ ];
116
+ return (0, external_kysely_namespaceObject.sql)`not ${external_kysely_namespaceObject.sql.ref(field)} in (${external_kysely_namespaceObject.sql.join(items.map((v)=>external_kysely_namespaceObject.sql.val(v)))})`;
117
+ }
118
+ case 'between':
119
+ {
120
+ const [a, b] = value;
121
+ return (0, external_kysely_namespaceObject.sql)`${external_kysely_namespaceObject.sql.ref(field)} between (${external_kysely_namespaceObject.sql.val(a)}, ${external_kysely_namespaceObject.sql.val(b)})`;
122
+ }
123
+ case 'not between':
124
+ {
125
+ const [a, b] = value;
126
+ return (0, external_kysely_namespaceObject.sql)`not ${external_kysely_namespaceObject.sql.ref(field)} between (${external_kysely_namespaceObject.sql.val(a)}, ${external_kysely_namespaceObject.sql.val(b)})`;
127
+ }
128
+ default:
129
+ return (0, external_kysely_namespaceObject.sql)`${external_kysely_namespaceObject.sql.ref(field)} ${external_kysely_namespaceObject.sql.raw(leaf.op)} ${external_kysely_namespaceObject.sql.val(value)}`;
65
130
  }
131
+ };
132
+ return toRaw(where);
133
+ };
134
+ const applyGroupBy = (qb, fields)=>{
135
+ if (fields && fields.length > 0) {
136
+ const exprs = fields.map((f)=>external_kysely_namespaceObject.sql.id(f));
137
+ qb = qb.groupBy(exprs);
66
138
  }
67
- return '';
139
+ return qb;
140
+ };
141
+ const applyLimit = (qb, limit)=>{
142
+ if (limit && 'number' == typeof limit) qb = qb.limit(limit);
143
+ return qb;
68
144
  };
69
145
  const convertDSLToSQL = (dsl, tableName)=>{
70
- let sql = 'SELECT';
71
- if (dsl.select && dsl.select.length > 0) {
72
- const selectFields = dsl.select.map((item)=>{
73
- if ('string' == typeof item) return item;
146
+ const db = new external_kysely_namespaceObject.Kysely({
147
+ dialect: new PostgresDialect()
148
+ });
149
+ let qb = db.selectFrom(tableName);
150
+ qb = dsl.select && dsl.select.length > 0 ? qb.select((eb)=>dsl.select.map((item)=>{
74
151
  if (isSelectItem(item)) {
75
- if (item.func) return `${item.func}(${item.field})` + (item.alias ? ` AS "${item.alias}"` : '');
76
- if (item.alias) return `${item.field} AS "${item.alias}"`;
77
- return item.field;
152
+ const field = item.field;
153
+ if (item.func) {
154
+ const alias = item.alias ?? field;
155
+ switch(item.func){
156
+ case 'avg':
157
+ return eb.fn.avg(field).as(alias);
158
+ case 'sum':
159
+ return eb.fn.sum(field).as(alias);
160
+ case 'min':
161
+ return eb.fn.min(field).as(alias);
162
+ case 'max':
163
+ return eb.fn.max(field).as(alias);
164
+ case 'count':
165
+ return eb.fn.count(field).as(alias);
166
+ }
167
+ }
168
+ return item.alias ? eb.ref(field).as(item.alias) : field;
78
169
  }
79
- });
80
- sql += ` ${selectFields.join(', ')}`;
81
- } else sql += ' *';
82
- sql += ` FROM ${tableName}`;
83
- if (dsl.where) {
84
- const whereClause = applyWhere(dsl.where);
85
- if (whereClause) sql += ` WHERE ${whereClause}`;
86
- }
87
- if (dsl.groupBy && dsl.groupBy.length > 0) sql += ` GROUP BY ${dsl.groupBy.join(', ')}`;
88
- if (dsl.orderBy && dsl.orderBy.length > 0) {
89
- const orderByFields = dsl.orderBy.map((item)=>`${item.field}${item.order ? ` ${item.order.toUpperCase()}` : ''}`);
90
- sql += ` ORDER BY ${orderByFields.join(', ')}`;
91
- }
92
- if (dsl.limit) sql += ` LIMIT ${dsl.limit}`;
93
- return sql;
170
+ return item;
171
+ })) : qb.selectAll();
172
+ if (dsl.where) qb = qb.where(applyWhere(dsl.where));
173
+ qb = applyGroupBy(qb, dsl.groupBy);
174
+ if (dsl.orderBy && dsl.orderBy.length > 0) for (const o of dsl.orderBy)qb = qb.orderBy(o.field, o.order ?? 'asc');
175
+ qb = applyLimit(qb, dsl.limit);
176
+ const compiled = qb.compile();
177
+ return inlineParameters(compiled.sql, compiled.parameters);
94
178
  };
95
179
  class Dataset {
96
180
  duckDB;
@@ -101,7 +185,13 @@ class Dataset {
101
185
  this.indexedDB = indexedDB1;
102
186
  this._datasetId = datasetId;
103
187
  }
104
- async init(temporaryStructs) {
188
+ async init(temporaryColumns = []) {
189
+ const datasetInfo = await this.indexedDB.readDataset(this._datasetId);
190
+ if (!datasetInfo) throw new Error(`Dataset ${this._datasetId} not found`);
191
+ const columns = temporaryColumns.length > 0 ? temporaryColumns : datasetInfo.datasetSchema.columns;
192
+ if (columns.length > 0) await this.createOrReplaceView(columns);
193
+ }
194
+ async createOrReplaceView(columns = []) {
105
195
  const readFunctionMap = {
106
196
  csv: 'read_csv_auto',
107
197
  json: 'read_json_auto',
@@ -117,16 +207,20 @@ class Dataset {
117
207
  };
118
208
  const datasetInfo = await this.indexedDB.readDataset(this._datasetId);
119
209
  if (!datasetInfo) throw new Error(`Dataset ${this._datasetId} not found`);
120
- const { dataSource } = datasetInfo;
121
- const datasetSchema = datasetInfo.datasetSchema;
122
- const columns = temporaryStructs || datasetSchema.columns;
123
- const readFunction = readFunctionMap[dataSource.type];
124
- if (!readFunction) throw new Error(`Unsupported dataSource type: ${dataSource.type}`);
125
- await this.duckDB.writeFile(this._datasetId, dataSource.blob);
126
- const columnsStruct = `{${columns.map((c)=>`'${c.name}': '${dataTypeMap[c.type] || 'VARCHAR'}'`).join(', ')}}`;
127
- const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
128
- const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
129
- await this.duckDB.query(createViewSql);
210
+ const { datasetSource: dataSource } = datasetInfo;
211
+ if (dataSource) {
212
+ const readFunction = readFunctionMap[dataSource.type];
213
+ if (!readFunction) throw new Error(`Unsupported dataSource type: ${dataSource.type}`);
214
+ await this.duckDB.writeFile(this._datasetId, dataSource.blob);
215
+ const columnsStruct = `{${columns.map((c)=>`'${c.name}': '${dataTypeMap[c.type] || 'VARCHAR'}'`).join(', ')}}`;
216
+ const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
217
+ const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
218
+ await this.duckDB.query(createViewSql);
219
+ }
220
+ }
221
+ async query(queryDSL) {
222
+ const sql = convertDSLToSQL(queryDSL, this.datasetId);
223
+ return this.queryBySQL(sql);
130
224
  }
131
225
  async queryBySQL(sql) {
132
226
  const start = performance?.now?.()?.toFixed(3) ?? Date.now().toFixed(3);
@@ -141,14 +235,6 @@ class Dataset {
141
235
  }
142
236
  };
143
237
  }
144
- convertDSLToSQL(queryDSL) {
145
- return convertDSLToSQL(queryDSL, this.datasetId);
146
- }
147
- async query(queryDSL) {
148
- const sql = this.convertDSLToSQL(queryDSL);
149
- console.log(sql);
150
- return this.queryBySQL(sql);
151
- }
152
238
  async disconnect() {
153
239
  await this.duckDB.query(`DROP VIEW IF EXISTS "${this._datasetId}"`);
154
240
  }
@@ -248,7 +334,7 @@ class IndexedDB {
248
334
  this.db = null;
249
335
  }
250
336
  };
251
- writeDataset = (datasetId, dataSource, datasetSchema)=>new Promise((resolve, reject)=>{
337
+ writeDataset = (datasetId, datasetSchema, datasetSource)=>new Promise((resolve, reject)=>{
252
338
  if (!this.db) return reject('DB is not open');
253
339
  const transaction = this.db.transaction([
254
340
  this.datasetStoreName
@@ -256,8 +342,8 @@ class IndexedDB {
256
342
  const store = transaction.objectStore(this.datasetStoreName);
257
343
  const request = store.put({
258
344
  datasetId,
259
- dataSource,
260
- datasetSchema
345
+ datasetSchema,
346
+ datasetSource
261
347
  });
262
348
  request.onsuccess = ()=>{
263
349
  resolve();
@@ -314,18 +400,18 @@ class IndexedDB {
314
400
  const isUrl = (url)=>isHttpUrl(url) || isBase64Url(url);
315
401
  const isHttpUrl = (url)=>url.startsWith('http://') || url.startsWith('https://');
316
402
  const isBase64Url = (url)=>url.startsWith('data:');
317
- class DataSourceBuilder {
403
+ class DatasetSourceBuilder {
318
404
  type;
319
405
  value;
320
- constructor(type, value){
321
- this.type = type;
322
- this.value = value;
406
+ constructor(raw){
407
+ this.type = raw.type;
408
+ this.value = raw.rawDataset;
323
409
  }
324
- static from(type, value) {
325
- return new DataSourceBuilder(type, value);
410
+ static from(raw) {
411
+ return new DatasetSourceBuilder(raw);
326
412
  }
327
413
  async build() {
328
- const blob = await DataSourceBuilder.convertToBlob(this.type, this.value);
414
+ const blob = await DatasetSourceBuilder.convertToBlob(this.type, this.value);
329
415
  return {
330
416
  type: this.type,
331
417
  blob: blob
@@ -339,7 +425,7 @@ class DataSourceBuilder {
339
425
  ], {
340
426
  type: 'text/csv'
341
427
  });
342
- if ('string' == typeof csvSource && isUrl(csvSource)) return DataSourceBuilder.fetchBlob(csvSource);
428
+ if ('string' == typeof csvSource && isUrl(csvSource)) return DatasetSourceBuilder.fetchBlob(csvSource);
343
429
  return new Blob([
344
430
  JSON.stringify(csvSource)
345
431
  ], {
@@ -352,7 +438,7 @@ class DataSourceBuilder {
352
438
  ], {
353
439
  type: 'application/json'
354
440
  });
355
- if ('string' == typeof jsonSource && isUrl(jsonSource)) return DataSourceBuilder.fetchBlob(jsonSource);
441
+ if ('string' == typeof jsonSource && isUrl(jsonSource)) return DatasetSourceBuilder.fetchBlob(jsonSource);
356
442
  return new Blob([
357
443
  JSON.stringify(jsonSource)
358
444
  ], {
@@ -365,7 +451,7 @@ class DataSourceBuilder {
365
451
  ], {
366
452
  type: 'application/parquet'
367
453
  });
368
- if ('string' == typeof parquetSource && isUrl(parquetSource)) return DataSourceBuilder.fetchBlob(parquetSource);
454
+ if ('string' == typeof parquetSource && isUrl(parquetSource)) return DatasetSourceBuilder.fetchBlob(parquetSource);
369
455
  return new Blob([
370
456
  parquetSource
371
457
  ], {
@@ -378,7 +464,7 @@ class DataSourceBuilder {
378
464
  ], {
379
465
  type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
380
466
  });
381
- if ('string' == typeof xlsxSource && isUrl(xlsxSource)) return DataSourceBuilder.fetchBlob(xlsxSource);
467
+ if ('string' == typeof xlsxSource && isUrl(xlsxSource)) return DatasetSourceBuilder.fetchBlob(xlsxSource);
382
468
  return new Blob([
383
469
  xlsxSource
384
470
  ], {
@@ -413,56 +499,74 @@ class VQuery {
413
499
  this.duckDB = new DuckDB();
414
500
  this.indexedDB = new IndexedDB(dbName);
415
501
  }
416
- async ensureInitialized() {
502
+ async checkInitialized() {
417
503
  if (!this.isInitialized) {
418
504
  await this.duckDB.init();
419
505
  await this.indexedDB.open();
420
506
  this.isInitialized = true;
421
507
  }
422
508
  }
423
- async createDataset(datasetId, data, type, datasetSchema) {
424
- await this.ensureInitialized();
425
- const dataSource = await DataSourceBuilder.from(type, data).build();
426
- await this.indexedDB.writeDataset(datasetId, dataSource, datasetSchema);
509
+ async checkDatasetExists(datasetId) {
510
+ if (!await this.hasDataset(datasetId)) throw new Error(`dataset ${datasetId} not exists, please create it first`);
427
511
  }
428
- async updateDataset(datasetId, data, type, datasetSchema) {
429
- await this.ensureInitialized();
430
- const dataSource = await DataSourceBuilder.from(type, data).build();
431
- await this.indexedDB.writeDataset(datasetId, dataSource, datasetSchema);
512
+ async createDataset(datasetId, columns = [], rawDatasetSource) {
513
+ await this.checkInitialized();
514
+ const datasetSource = rawDatasetSource ? await DatasetSourceBuilder.from(rawDatasetSource).build() : void 0;
515
+ if (await this.hasDataset(datasetId)) throw new Error(`dataset ${datasetId} already exists`);
516
+ const datasetSchema = {
517
+ datasetId,
518
+ datasetAlias: datasetId,
519
+ columns: columns
520
+ };
521
+ await this.indexedDB.writeDataset(datasetId, datasetSchema, datasetSource);
432
522
  }
433
- async deleteDataset(datasetId) {
434
- await this.ensureInitialized();
435
- await this.indexedDB.deleteDataset(datasetId);
523
+ async updateDatasetSource(datasetId, columns = [], rawDatasetSource) {
524
+ await this.checkInitialized();
525
+ await this.checkDatasetExists(datasetId);
526
+ const datasetSource = rawDatasetSource ? await DatasetSourceBuilder.from(rawDatasetSource).build() : void 0;
527
+ const datasetSchema = {
528
+ datasetId,
529
+ datasetAlias: datasetId,
530
+ columns: columns
531
+ };
532
+ await this.indexedDB.writeDataset(datasetId, datasetSchema, datasetSource);
436
533
  }
437
- async listDatasets() {
438
- await this.ensureInitialized();
439
- return this.indexedDB.listDatasets();
534
+ async dropDataset(datasetId) {
535
+ await this.checkInitialized();
536
+ await this.checkDatasetExists(datasetId);
537
+ await this.indexedDB.deleteDataset(datasetId);
440
538
  }
441
- async connectDataset(datasetId) {
442
- await this.ensureInitialized();
539
+ async connectDataset(datasetId, temporaryColumns = []) {
540
+ await this.checkInitialized();
541
+ await this.checkDatasetExists(datasetId);
443
542
  const dataset = new Dataset(this.duckDB, this.indexedDB, datasetId);
444
- await dataset.init();
543
+ await dataset.init(temporaryColumns);
445
544
  return dataset;
446
545
  }
447
- async connectTemporaryDataset(datasetId, temporaryDatasetSchema) {
448
- await this.ensureInitialized();
449
- const dataset = new Dataset(this.duckDB, this.indexedDB, datasetId);
450
- await dataset.init(temporaryDatasetSchema);
451
- return dataset;
546
+ async hasDataset(datasetId) {
547
+ await this.checkInitialized();
548
+ const datasets = await this.indexedDB.listDatasets();
549
+ return datasets.some((item)=>item.datasetId === datasetId);
550
+ }
551
+ async listDatasets() {
552
+ await this.checkInitialized();
553
+ return this.indexedDB.listDatasets();
452
554
  }
453
555
  async close() {
454
- await this.ensureInitialized();
556
+ await this.checkInitialized();
455
557
  await this.duckDB.close();
456
558
  }
457
559
  }
458
- exports.DataSourceBuilder = __webpack_exports__.DataSourceBuilder;
560
+ exports.DatasetSourceBuilder = __webpack_exports__.DatasetSourceBuilder;
459
561
  exports.VQuery = __webpack_exports__.VQuery;
562
+ exports.convertDSLToSQL = __webpack_exports__.convertDSLToSQL;
460
563
  exports.isBase64Url = __webpack_exports__.isBase64Url;
461
564
  exports.isHttpUrl = __webpack_exports__.isHttpUrl;
462
565
  exports.isUrl = __webpack_exports__.isUrl;
463
566
  for(var __webpack_i__ in __webpack_exports__)if (-1 === [
464
- "DataSourceBuilder",
567
+ "DatasetSourceBuilder",
465
568
  "VQuery",
569
+ "convertDSLToSQL",
466
570
  "isBase64Url",
467
571
  "isHttpUrl",
468
572
  "isUrl"
package/dist/index.d.ts CHANGED
@@ -1,3 +1,4 @@
1
1
  export { VQuery } from './vquery';
2
- export { DataSourceBuilder } from './dataSourceBuilder/dataSourceBuilder';
2
+ export { convertDSLToSQL } from './dataset';
3
+ export { DatasetSourceBuilder } from './data-source-builder/dataSourceBuilder';
3
4
  export * from './utils';
package/dist/index.js CHANGED
@@ -1,62 +1,145 @@
1
+ import { DummyDriver, Kysely, PostgresAdapter, PostgresQueryCompiler, sql as external_kysely_sql } from "kysely";
1
2
  import { AsyncDuckDB, ConsoleLogger, selectBundle } from "@duckdb/duckdb-wasm";
2
- const isSelectItem = (item)=>'object' == typeof item && 'field' in item;
3
- const isWhereLeaf = (where)=>'field' in where && 'op' in where && 'value' in where;
4
- const isWhereGroup = (where)=>'op' in where && 'conditions' in where;
5
- const isStringOrNumber = (value)=>'string' == typeof value || 'number' == typeof value;
6
- const applyWhere = (where)=>{
7
- const escape = (str)=>{
8
- if ('string' == typeof str) return `'${str.replace(/'/g, "''")}'`;
9
- return str;
10
- };
11
- if (isWhereGroup(where)) {
12
- const logicalOp = where.op.toUpperCase();
13
- return `(${where.conditions.map((c)=>applyWhere(c)).join(` ${logicalOp} `)})`;
3
+ class PostgresDialect {
4
+ createDriver() {
5
+ return new DummyDriver();
14
6
  }
15
- if (isWhereLeaf(where)) {
16
- const { field, op, value } = where;
17
- if ('is null' === op || 'is not null' === op) return `${field} ${op}`;
18
- if ('in' === op || 'not in' === op) {
19
- if (Array.isArray(value)) return `${field} ${op} (${value.map((v)=>escape(v)).join(', ')})`;
20
- }
21
- if ('between' === op || 'not between' === op) {
22
- if (Array.isArray(value) && 2 === value.length && isStringOrNumber(value[0]) && isStringOrNumber(value[1])) {
23
- const value0 = value[0];
24
- const value1 = value[1];
25
- return `${field} ${op} ${escape(value0)} and ${escape(value1)}`;
7
+ createQueryCompiler() {
8
+ return new PostgresQueryCompiler();
9
+ }
10
+ createAdapter() {
11
+ return new PostgresAdapter();
12
+ }
13
+ createIntrospector(db) {
14
+ class NullIntrospector {
15
+ async getSchemas() {
16
+ return [];
26
17
  }
18
+ async getTables(options) {
19
+ options?.withInternalKyselyTables;
20
+ return [];
21
+ }
22
+ async getMetadata(options) {
23
+ options?.withInternalKyselyTables;
24
+ return {
25
+ tables: []
26
+ };
27
+ }
28
+ }
29
+ return new NullIntrospector();
30
+ }
31
+ }
32
+ const isSelectItem = (item)=>'object' == typeof item && 'field' in item;
33
+ const escapeValue = (value)=>{
34
+ if (null === value) return 'null';
35
+ if ('string' == typeof value) return `'${value.replace(/'/g, "''")}'`;
36
+ if ('number' == typeof value) return `${value}`;
37
+ if ('boolean' == typeof value) return value ? 'TRUE' : 'FALSE';
38
+ return `'${String(value).replace(/'/g, "''")}'`;
39
+ };
40
+ const inlineParameters = (sql, params)=>{
41
+ if (0 === params.length) return sql;
42
+ if (sql.includes('?')) {
43
+ let out = sql;
44
+ for (const p of params)out = out.replace(/\?/, escapeValue(p));
45
+ return out;
46
+ }
47
+ if (/\$\d+/.test(sql)) return sql.replace(/\$(\d+)/g, (_, idx)=>{
48
+ const i = Number(idx) - 1;
49
+ const v = params[i];
50
+ return escapeValue(v);
51
+ });
52
+ return sql;
53
+ };
54
+ const applyWhere = (where)=>{
55
+ const toRaw = (w)=>{
56
+ if ('op' in w && 'conditions' in w) {
57
+ const parts = w.conditions.map((c)=>toRaw(c));
58
+ const sep = external_kysely_sql` ${external_kysely_sql.raw(w.op)} `;
59
+ return external_kysely_sql`(${external_kysely_sql.join(parts, sep)})`;
27
60
  }
28
- if (isStringOrNumber(value)) {
29
- const value0 = value;
30
- return `${field} ${op} ${escape(value0)}`;
61
+ const leaf = w;
62
+ const field = leaf.field;
63
+ const value = leaf.value;
64
+ switch(leaf.op){
65
+ case 'is null':
66
+ return external_kysely_sql`${external_kysely_sql.ref(field)} is null`;
67
+ case 'is not null':
68
+ return external_kysely_sql`${external_kysely_sql.ref(field)} is not null`;
69
+ case 'in':
70
+ {
71
+ const items = Array.isArray(value) ? value : [
72
+ value
73
+ ];
74
+ return external_kysely_sql`${external_kysely_sql.ref(field)} in (${external_kysely_sql.join(items.map((v)=>external_kysely_sql.val(v)))})`;
75
+ }
76
+ case 'not in':
77
+ {
78
+ const items = Array.isArray(value) ? value : [
79
+ value
80
+ ];
81
+ return external_kysely_sql`not ${external_kysely_sql.ref(field)} in (${external_kysely_sql.join(items.map((v)=>external_kysely_sql.val(v)))})`;
82
+ }
83
+ case 'between':
84
+ {
85
+ const [a, b] = value;
86
+ return external_kysely_sql`${external_kysely_sql.ref(field)} between (${external_kysely_sql.val(a)}, ${external_kysely_sql.val(b)})`;
87
+ }
88
+ case 'not between':
89
+ {
90
+ const [a, b] = value;
91
+ return external_kysely_sql`not ${external_kysely_sql.ref(field)} between (${external_kysely_sql.val(a)}, ${external_kysely_sql.val(b)})`;
92
+ }
93
+ default:
94
+ return external_kysely_sql`${external_kysely_sql.ref(field)} ${external_kysely_sql.raw(leaf.op)} ${external_kysely_sql.val(value)}`;
31
95
  }
96
+ };
97
+ return toRaw(where);
98
+ };
99
+ const applyGroupBy = (qb, fields)=>{
100
+ if (fields && fields.length > 0) {
101
+ const exprs = fields.map((f)=>external_kysely_sql.id(f));
102
+ qb = qb.groupBy(exprs);
32
103
  }
33
- return '';
104
+ return qb;
105
+ };
106
+ const applyLimit = (qb, limit)=>{
107
+ if (limit && 'number' == typeof limit) qb = qb.limit(limit);
108
+ return qb;
34
109
  };
35
110
  const convertDSLToSQL = (dsl, tableName)=>{
36
- let sql = 'SELECT';
37
- if (dsl.select && dsl.select.length > 0) {
38
- const selectFields = dsl.select.map((item)=>{
39
- if ('string' == typeof item) return item;
111
+ const db = new Kysely({
112
+ dialect: new PostgresDialect()
113
+ });
114
+ let qb = db.selectFrom(tableName);
115
+ qb = dsl.select && dsl.select.length > 0 ? qb.select((eb)=>dsl.select.map((item)=>{
40
116
  if (isSelectItem(item)) {
41
- if (item.func) return `${item.func}(${item.field})` + (item.alias ? ` AS "${item.alias}"` : '');
42
- if (item.alias) return `${item.field} AS "${item.alias}"`;
43
- return item.field;
117
+ const field = item.field;
118
+ if (item.func) {
119
+ const alias = item.alias ?? field;
120
+ switch(item.func){
121
+ case 'avg':
122
+ return eb.fn.avg(field).as(alias);
123
+ case 'sum':
124
+ return eb.fn.sum(field).as(alias);
125
+ case 'min':
126
+ return eb.fn.min(field).as(alias);
127
+ case 'max':
128
+ return eb.fn.max(field).as(alias);
129
+ case 'count':
130
+ return eb.fn.count(field).as(alias);
131
+ }
132
+ }
133
+ return item.alias ? eb.ref(field).as(item.alias) : field;
44
134
  }
45
- });
46
- sql += ` ${selectFields.join(', ')}`;
47
- } else sql += ' *';
48
- sql += ` FROM ${tableName}`;
49
- if (dsl.where) {
50
- const whereClause = applyWhere(dsl.where);
51
- if (whereClause) sql += ` WHERE ${whereClause}`;
52
- }
53
- if (dsl.groupBy && dsl.groupBy.length > 0) sql += ` GROUP BY ${dsl.groupBy.join(', ')}`;
54
- if (dsl.orderBy && dsl.orderBy.length > 0) {
55
- const orderByFields = dsl.orderBy.map((item)=>`${item.field}${item.order ? ` ${item.order.toUpperCase()}` : ''}`);
56
- sql += ` ORDER BY ${orderByFields.join(', ')}`;
57
- }
58
- if (dsl.limit) sql += ` LIMIT ${dsl.limit}`;
59
- return sql;
135
+ return item;
136
+ })) : qb.selectAll();
137
+ if (dsl.where) qb = qb.where(applyWhere(dsl.where));
138
+ qb = applyGroupBy(qb, dsl.groupBy);
139
+ if (dsl.orderBy && dsl.orderBy.length > 0) for (const o of dsl.orderBy)qb = qb.orderBy(o.field, o.order ?? 'asc');
140
+ qb = applyLimit(qb, dsl.limit);
141
+ const compiled = qb.compile();
142
+ return inlineParameters(compiled.sql, compiled.parameters);
60
143
  };
61
144
  class Dataset {
62
145
  duckDB;
@@ -67,7 +150,13 @@ class Dataset {
67
150
  this.indexedDB = indexedDB1;
68
151
  this._datasetId = datasetId;
69
152
  }
70
- async init(temporaryStructs) {
153
+ async init(temporaryColumns = []) {
154
+ const datasetInfo = await this.indexedDB.readDataset(this._datasetId);
155
+ if (!datasetInfo) throw new Error(`Dataset ${this._datasetId} not found`);
156
+ const columns = temporaryColumns.length > 0 ? temporaryColumns : datasetInfo.datasetSchema.columns;
157
+ if (columns.length > 0) await this.createOrReplaceView(columns);
158
+ }
159
+ async createOrReplaceView(columns = []) {
71
160
  const readFunctionMap = {
72
161
  csv: 'read_csv_auto',
73
162
  json: 'read_json_auto',
@@ -83,16 +172,20 @@ class Dataset {
83
172
  };
84
173
  const datasetInfo = await this.indexedDB.readDataset(this._datasetId);
85
174
  if (!datasetInfo) throw new Error(`Dataset ${this._datasetId} not found`);
86
- const { dataSource } = datasetInfo;
87
- const datasetSchema = datasetInfo.datasetSchema;
88
- const columns = temporaryStructs || datasetSchema.columns;
89
- const readFunction = readFunctionMap[dataSource.type];
90
- if (!readFunction) throw new Error(`Unsupported dataSource type: ${dataSource.type}`);
91
- await this.duckDB.writeFile(this._datasetId, dataSource.blob);
92
- const columnsStruct = `{${columns.map((c)=>`'${c.name}': '${dataTypeMap[c.type] || 'VARCHAR'}'`).join(', ')}}`;
93
- const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
94
- const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
95
- await this.duckDB.query(createViewSql);
175
+ const { datasetSource: dataSource } = datasetInfo;
176
+ if (dataSource) {
177
+ const readFunction = readFunctionMap[dataSource.type];
178
+ if (!readFunction) throw new Error(`Unsupported dataSource type: ${dataSource.type}`);
179
+ await this.duckDB.writeFile(this._datasetId, dataSource.blob);
180
+ const columnsStruct = `{${columns.map((c)=>`'${c.name}': '${dataTypeMap[c.type] || 'VARCHAR'}'`).join(', ')}}`;
181
+ const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
182
+ const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
183
+ await this.duckDB.query(createViewSql);
184
+ }
185
+ }
186
+ async query(queryDSL) {
187
+ const sql = convertDSLToSQL(queryDSL, this.datasetId);
188
+ return this.queryBySQL(sql);
96
189
  }
97
190
  async queryBySQL(sql) {
98
191
  const start = performance?.now?.()?.toFixed(3) ?? Date.now().toFixed(3);
@@ -107,14 +200,6 @@ class Dataset {
107
200
  }
108
201
  };
109
202
  }
110
- convertDSLToSQL(queryDSL) {
111
- return convertDSLToSQL(queryDSL, this.datasetId);
112
- }
113
- async query(queryDSL) {
114
- const sql = this.convertDSLToSQL(queryDSL);
115
- console.log(sql);
116
- return this.queryBySQL(sql);
117
- }
118
203
  async disconnect() {
119
204
  await this.duckDB.query(`DROP VIEW IF EXISTS "${this._datasetId}"`);
120
205
  }
@@ -213,7 +298,7 @@ class IndexedDB {
213
298
  this.db = null;
214
299
  }
215
300
  };
216
- writeDataset = (datasetId, dataSource, datasetSchema)=>new Promise((resolve, reject)=>{
301
+ writeDataset = (datasetId, datasetSchema, datasetSource)=>new Promise((resolve, reject)=>{
217
302
  if (!this.db) return reject('DB is not open');
218
303
  const transaction = this.db.transaction([
219
304
  this.datasetStoreName
@@ -221,8 +306,8 @@ class IndexedDB {
221
306
  const store = transaction.objectStore(this.datasetStoreName);
222
307
  const request = store.put({
223
308
  datasetId,
224
- dataSource,
225
- datasetSchema
309
+ datasetSchema,
310
+ datasetSource
226
311
  });
227
312
  request.onsuccess = ()=>{
228
313
  resolve();
@@ -279,18 +364,18 @@ class IndexedDB {
279
364
  const isUrl = (url)=>isHttpUrl(url) || isBase64Url(url);
280
365
  const isHttpUrl = (url)=>url.startsWith('http://') || url.startsWith('https://');
281
366
  const isBase64Url = (url)=>url.startsWith('data:');
282
- class DataSourceBuilder {
367
+ class DatasetSourceBuilder {
283
368
  type;
284
369
  value;
285
- constructor(type, value){
286
- this.type = type;
287
- this.value = value;
370
+ constructor(raw){
371
+ this.type = raw.type;
372
+ this.value = raw.rawDataset;
288
373
  }
289
- static from(type, value) {
290
- return new DataSourceBuilder(type, value);
374
+ static from(raw) {
375
+ return new DatasetSourceBuilder(raw);
291
376
  }
292
377
  async build() {
293
- const blob = await DataSourceBuilder.convertToBlob(this.type, this.value);
378
+ const blob = await DatasetSourceBuilder.convertToBlob(this.type, this.value);
294
379
  return {
295
380
  type: this.type,
296
381
  blob: blob
@@ -304,7 +389,7 @@ class DataSourceBuilder {
304
389
  ], {
305
390
  type: 'text/csv'
306
391
  });
307
- if ('string' == typeof csvSource && isUrl(csvSource)) return DataSourceBuilder.fetchBlob(csvSource);
392
+ if ('string' == typeof csvSource && isUrl(csvSource)) return DatasetSourceBuilder.fetchBlob(csvSource);
308
393
  return new Blob([
309
394
  JSON.stringify(csvSource)
310
395
  ], {
@@ -317,7 +402,7 @@ class DataSourceBuilder {
317
402
  ], {
318
403
  type: 'application/json'
319
404
  });
320
- if ('string' == typeof jsonSource && isUrl(jsonSource)) return DataSourceBuilder.fetchBlob(jsonSource);
405
+ if ('string' == typeof jsonSource && isUrl(jsonSource)) return DatasetSourceBuilder.fetchBlob(jsonSource);
321
406
  return new Blob([
322
407
  JSON.stringify(jsonSource)
323
408
  ], {
@@ -330,7 +415,7 @@ class DataSourceBuilder {
330
415
  ], {
331
416
  type: 'application/parquet'
332
417
  });
333
- if ('string' == typeof parquetSource && isUrl(parquetSource)) return DataSourceBuilder.fetchBlob(parquetSource);
418
+ if ('string' == typeof parquetSource && isUrl(parquetSource)) return DatasetSourceBuilder.fetchBlob(parquetSource);
334
419
  return new Blob([
335
420
  parquetSource
336
421
  ], {
@@ -343,7 +428,7 @@ class DataSourceBuilder {
343
428
  ], {
344
429
  type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
345
430
  });
346
- if ('string' == typeof xlsxSource && isUrl(xlsxSource)) return DataSourceBuilder.fetchBlob(xlsxSource);
431
+ if ('string' == typeof xlsxSource && isUrl(xlsxSource)) return DatasetSourceBuilder.fetchBlob(xlsxSource);
347
432
  return new Blob([
348
433
  xlsxSource
349
434
  ], {
@@ -378,46 +463,62 @@ class VQuery {
378
463
  this.duckDB = new DuckDB();
379
464
  this.indexedDB = new IndexedDB(dbName);
380
465
  }
381
- async ensureInitialized() {
466
+ async checkInitialized() {
382
467
  if (!this.isInitialized) {
383
468
  await this.duckDB.init();
384
469
  await this.indexedDB.open();
385
470
  this.isInitialized = true;
386
471
  }
387
472
  }
388
- async createDataset(datasetId, data, type, datasetSchema) {
389
- await this.ensureInitialized();
390
- const dataSource = await DataSourceBuilder.from(type, data).build();
391
- await this.indexedDB.writeDataset(datasetId, dataSource, datasetSchema);
473
+ async checkDatasetExists(datasetId) {
474
+ if (!await this.hasDataset(datasetId)) throw new Error(`dataset ${datasetId} not exists, please create it first`);
392
475
  }
393
- async updateDataset(datasetId, data, type, datasetSchema) {
394
- await this.ensureInitialized();
395
- const dataSource = await DataSourceBuilder.from(type, data).build();
396
- await this.indexedDB.writeDataset(datasetId, dataSource, datasetSchema);
476
+ async createDataset(datasetId, columns = [], rawDatasetSource) {
477
+ await this.checkInitialized();
478
+ const datasetSource = rawDatasetSource ? await DatasetSourceBuilder.from(rawDatasetSource).build() : void 0;
479
+ if (await this.hasDataset(datasetId)) throw new Error(`dataset ${datasetId} already exists`);
480
+ const datasetSchema = {
481
+ datasetId,
482
+ datasetAlias: datasetId,
483
+ columns: columns
484
+ };
485
+ await this.indexedDB.writeDataset(datasetId, datasetSchema, datasetSource);
397
486
  }
398
- async deleteDataset(datasetId) {
399
- await this.ensureInitialized();
400
- await this.indexedDB.deleteDataset(datasetId);
487
+ async updateDatasetSource(datasetId, columns = [], rawDatasetSource) {
488
+ await this.checkInitialized();
489
+ await this.checkDatasetExists(datasetId);
490
+ const datasetSource = rawDatasetSource ? await DatasetSourceBuilder.from(rawDatasetSource).build() : void 0;
491
+ const datasetSchema = {
492
+ datasetId,
493
+ datasetAlias: datasetId,
494
+ columns: columns
495
+ };
496
+ await this.indexedDB.writeDataset(datasetId, datasetSchema, datasetSource);
401
497
  }
402
- async listDatasets() {
403
- await this.ensureInitialized();
404
- return this.indexedDB.listDatasets();
498
+ async dropDataset(datasetId) {
499
+ await this.checkInitialized();
500
+ await this.checkDatasetExists(datasetId);
501
+ await this.indexedDB.deleteDataset(datasetId);
405
502
  }
406
- async connectDataset(datasetId) {
407
- await this.ensureInitialized();
503
+ async connectDataset(datasetId, temporaryColumns = []) {
504
+ await this.checkInitialized();
505
+ await this.checkDatasetExists(datasetId);
408
506
  const dataset = new Dataset(this.duckDB, this.indexedDB, datasetId);
409
- await dataset.init();
507
+ await dataset.init(temporaryColumns);
410
508
  return dataset;
411
509
  }
412
- async connectTemporaryDataset(datasetId, temporaryDatasetSchema) {
413
- await this.ensureInitialized();
414
- const dataset = new Dataset(this.duckDB, this.indexedDB, datasetId);
415
- await dataset.init(temporaryDatasetSchema);
416
- return dataset;
510
+ async hasDataset(datasetId) {
511
+ await this.checkInitialized();
512
+ const datasets = await this.indexedDB.listDatasets();
513
+ return datasets.some((item)=>item.datasetId === datasetId);
514
+ }
515
+ async listDatasets() {
516
+ await this.checkInitialized();
517
+ return this.indexedDB.listDatasets();
417
518
  }
418
519
  async close() {
419
- await this.ensureInitialized();
520
+ await this.checkInitialized();
420
521
  await this.duckDB.close();
421
522
  }
422
523
  }
423
- export { DataSourceBuilder, VQuery, isBase64Url, isHttpUrl, isUrl };
524
+ export { DatasetSourceBuilder, VQuery, convertDSLToSQL, isBase64Url, isHttpUrl, isUrl };
@@ -0,0 +1,2 @@
1
+ import type { SelectQueryBuilder } from 'kysely';
2
+ export declare const applyGroupBy: <DB, TB extends keyof DB & string, O>(qb: SelectQueryBuilder<DB, TB, O>, fields?: Array<string>) => SelectQueryBuilder<DB, TB, O>;
@@ -0,0 +1,3 @@
1
+ export { applyWhere } from './where';
2
+ export { applyGroupBy } from './groupBy';
3
+ export { applyLimit } from './limit';
@@ -0,0 +1,2 @@
1
+ import type { SelectQueryBuilder } from 'kysely';
2
+ export declare const applyLimit: <DB, TB extends keyof DB & string, O>(qb: SelectQueryBuilder<DB, TB, O>, limit?: number) => SelectQueryBuilder<DB, TB, O>;
@@ -1,2 +1,3 @@
1
1
  import { Where, WhereClause } from '../../types';
2
- export declare const applyWhere: <T>(where: Where<T> | WhereClause<T>) => string;
2
+ import type { RawBuilder } from 'kysely';
3
+ export declare const applyWhere: <T>(where: Where<T> | WhereClause<T>) => RawBuilder<boolean>;
@@ -0,0 +1 @@
1
+ export { inlineParameters } from './inlineParameters';
@@ -0,0 +1 @@
1
+ export declare const inlineParameters: (sql: string, params: readonly unknown[]) => string;
@@ -0,0 +1 @@
1
+ export { PostgresDialect } from './postgresDialect';
@@ -0,0 +1,11 @@
1
+ import { Dialect, DummyDriver } from 'kysely';
2
+ import { PostgresQueryCompiler } from 'kysely';
3
+ import { PostgresAdapter } from 'kysely';
4
+ import { Kysely } from 'kysely';
5
+ import type { DatabaseIntrospector } from 'kysely';
6
+ export declare class PostgresDialect implements Dialect {
7
+ createDriver(): DummyDriver;
8
+ createQueryCompiler(): PostgresQueryCompiler;
9
+ createAdapter(): PostgresAdapter;
10
+ createIntrospector<DB = unknown>(db: Kysely<DB>): DatabaseIntrospector;
11
+ }
@@ -0,0 +1,2 @@
1
+ import { QueryDSL } from '../types';
2
+ export declare const convertDSLToSQL: <T, TableName extends string>(dsl: QueryDSL<T>, tableName: TableName) => string;
@@ -0,0 +1 @@
1
+ export { convertDSLToSQL } from './dslToSQL';
@@ -1,6 +1,8 @@
1
- import { Where, WhereClause, WhereGroup, WhereLeaf } from '../../types';
2
- import { SelectItem } from '../../types/QueryDSL/Select';
1
+ import { Where, WhereClause, WhereGroup, WhereLeaf } from '../types';
2
+ import { SelectItem } from '../types/QueryDSL/Select';
3
3
  export declare const isSelectItem: <T>(item: keyof T | SelectItem<T>) => item is SelectItem<T>;
4
4
  export declare const isWhereLeaf: <T>(where: Where<T> | WhereClause<T>) => where is WhereLeaf<T>;
5
5
  export declare const isWhereGroup: <T>(where: Where<T> | WhereClause<T>) => where is WhereGroup<T>;
6
6
  export declare const isStringOrNumber: (value: unknown) => value is string | number;
7
+ export declare const escapeLiteral: <T>(value: T[keyof T]) => T[keyof T];
8
+ export declare const escapeValue: (value: unknown) => string;
@@ -1,7 +1,11 @@
1
1
  export type TidyDatum = Record<string, number | string | null | boolean | undefined>;
2
- export type DataSourceType = 'csv' | 'json' | 'xlsx' | 'parquet';
3
- export type DataSourceValue = string | ArrayBuffer | Blob | TidyDatum[];
4
- export interface DataSource {
5
- type: DataSourceType;
2
+ export type DatasetSourceType = 'csv' | 'json' | 'xlsx' | 'parquet';
3
+ export type DatasetSourceValue = string | ArrayBuffer | Blob | TidyDatum[];
4
+ export interface DatasetSource {
5
+ type: DatasetSourceType;
6
6
  blob: Blob;
7
7
  }
8
+ export interface RawDatasetSource {
9
+ type: DatasetSourceType;
10
+ rawDataset: DatasetSourceValue;
11
+ }
@@ -12,7 +12,7 @@ export type WhereLeaf<T> = {
12
12
  } & (O extends 'is null' | 'is not null' ? {
13
13
  value?: never;
14
14
  } : O extends 'in' | 'not in' ? {
15
- value: T[K][];
15
+ value: T[K] | T[K][];
16
16
  } : O extends 'between' | 'not between' ? {
17
17
  value: [T[K], T[K]];
18
18
  } : {
package/dist/vquery.d.ts CHANGED
@@ -1,43 +1,21 @@
1
1
  import { Dataset } from './dataset/dataset';
2
- import { DatasetSchema, TidyDatum, DataSourceType, DatasetColumn } from './types';
2
+ import { RawDatasetSource, DatasetColumn } from './types';
3
3
  export declare class VQuery {
4
4
  private duckDB;
5
5
  private indexedDB;
6
6
  private isInitialized;
7
7
  constructor(dbName?: string);
8
- private ensureInitialized;
9
- /**
10
- * 创建数据集,根据表结构和数据,存储信息到indexedDB
11
- */
12
- createDataset(datasetId: string, data: string | ArrayBuffer | Blob | TidyDatum[], type: DataSourceType, datasetSchema: DatasetSchema): Promise<void>;
13
- /**
14
- * 修改数据集,更新信息到indexedDB内
15
- */
16
- updateDataset(datasetId: string, data: string | ArrayBuffer | Blob | TidyDatum[], type: DataSourceType, datasetSchema: DatasetSchema): Promise<void>;
17
- /**
18
- * 删除数据集,从indexdb移除数据集
19
- */
20
- deleteDataset(datasetId: string): Promise<void>;
21
- /**
22
- * 获取所有可用数据集
23
- */
8
+ private checkInitialized;
9
+ private checkDatasetExists;
10
+ createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
11
+ updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
12
+ dropDataset(datasetId: string): Promise<void>;
13
+ connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[]): Promise<Dataset>;
14
+ hasDataset(datasetId: string): Promise<boolean>;
24
15
  listDatasets(): Promise<{
25
16
  datasetId: string;
26
- dataSource: import("./types").DataSource;
27
- datasetSchema: DatasetSchema;
17
+ dataSource?: import("./types").DatasetSource;
18
+ datasetSchema: import("./types").DatasetSchema;
28
19
  }[]>;
29
- /**
30
- * 连接数据集,返回数据集信息,从indexedDB获取表结构,使用DuckDB在内存中创建表
31
- */
32
- connectDataset(datasetId: string): Promise<Dataset>;
33
- /**
34
- * 连接临时数据集,返回数据集信息,从indexedDB获取表结构,使用DuckDB在内存中创建表
35
- * @param datasetId
36
- * @returns
37
- */
38
- connectTemporaryDataset(datasetId: string, temporaryDatasetSchema?: DatasetColumn[]): Promise<Dataset>;
39
- /**
40
- * 关闭所有数据集连接,释放DuckDB资源
41
- */
42
20
  close(): Promise<void>;
43
21
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@visactor/vquery",
3
- "version": "0.1.48",
3
+ "version": "0.1.50",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": {
@@ -15,14 +15,15 @@
15
15
  "dist"
16
16
  ],
17
17
  "dependencies": {
18
- "@duckdb/duckdb-wasm": "1.30.0"
18
+ "@duckdb/duckdb-wasm": "1.30.0",
19
+ "kysely": "^0.27.2"
19
20
  },
20
21
  "devDependencies": {
21
22
  "@eslint/js": "^9.35.0",
22
23
  "@rslib/core": "^0.15.1",
23
- "@rstest/core": "^0.5.1",
24
+ "@rstest/core": "0.6.5",
24
25
  "@types/node": "^22.18.10",
25
- "@rstest/coverage-istanbul": "^0.0.2",
26
+ "@rstest/coverage-istanbul": "0.0.5",
26
27
  "eslint": "^9.35.0",
27
28
  "globals": "^16.3.0",
28
29
  "typescript": "^5.9.3",
@@ -33,6 +34,8 @@
33
34
  "dev": "rslib build --watch",
34
35
  "lint": "eslint .",
35
36
  "test": "rstest",
37
+ "test:update": "rstest --update",
38
+ "test:coverage": "rstest --coverage && open ./coverage/index.html",
36
39
  "type-check": "tsc --noEmit"
37
40
  }
38
41
  }
@@ -1,16 +0,0 @@
1
- import { DataSourceType, DataSourceValue } from '../types';
2
- export declare class DataSourceBuilder {
3
- private type;
4
- private value;
5
- constructor(type: DataSourceType, value: DataSourceValue);
6
- static from(type: DataSourceType, value: DataSourceValue): DataSourceBuilder;
7
- build(): Promise<{
8
- type: DataSourceType;
9
- blob: Blob;
10
- }>;
11
- /**
12
- * 将不同类型的数据转换为Blob
13
- */
14
- private static convertToBlob;
15
- private static fetchBlob;
16
- }
@@ -1 +0,0 @@
1
- export { DataSourceBuilder } from './dataSourceBuilder';
@@ -1,2 +0,0 @@
1
- import { QueryDSL } from '../../types';
2
- export declare const convertDSLToSQL: <T>(dsl: QueryDSL<T>, tableName: string) => string;