@visactor/vquery 0.3.12 → 0.3.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/browser/esm/VQuery.d.ts +21 -0
  2. package/dist/browser/esm/browser.d.ts +3 -1
  3. package/dist/browser/esm/browser.js +121 -129
  4. package/dist/browser/esm/data-source-builder/dataSourceBuilder.d.ts +2 -3
  5. package/dist/browser/esm/dataset/dataset.d.ts +21 -3
  6. package/dist/browser/esm/node.d.ts +3 -1
  7. package/dist/browser/esm/sql-builder/builders/index.d.ts +1 -0
  8. package/dist/browser/esm/sql-builder/builders/select.d.ts +3 -0
  9. package/dist/browser/esm/sql-builder/dslToSQL.d.ts +2 -2
  10. package/dist/browser/esm/types/DataSource.d.ts +1 -1
  11. package/dist/browser/esm/types/dsl/GroupBy.d.ts +1 -1
  12. package/dist/browser/esm/types/dsl/Select.d.ts +7 -2
  13. package/dist/browser/esm/vquery-browser.d.ts +2 -19
  14. package/dist/browser/esm/vquery-node.d.ts +2 -19
  15. package/dist/node/cjs/VQuery.d.ts +21 -0
  16. package/dist/node/cjs/browser.d.ts +3 -1
  17. package/dist/node/cjs/data-source-builder/dataSourceBuilder.d.ts +2 -3
  18. package/dist/node/cjs/dataset/dataset.d.ts +21 -3
  19. package/dist/node/cjs/node.cjs +149 -164
  20. package/dist/node/cjs/node.d.ts +3 -1
  21. package/dist/node/cjs/sql-builder/builders/index.d.ts +1 -0
  22. package/dist/node/cjs/sql-builder/builders/select.d.ts +3 -0
  23. package/dist/node/cjs/sql-builder/dslToSQL.d.ts +2 -2
  24. package/dist/node/cjs/types/DataSource.d.ts +1 -1
  25. package/dist/node/cjs/types/dsl/GroupBy.d.ts +1 -1
  26. package/dist/node/cjs/types/dsl/Select.d.ts +7 -2
  27. package/dist/node/cjs/vquery-browser.d.ts +2 -19
  28. package/dist/node/cjs/vquery-node.d.ts +2 -19
  29. package/dist/node/esm/VQuery.d.ts +21 -0
  30. package/dist/node/esm/browser.d.ts +3 -1
  31. package/dist/node/esm/data-source-builder/dataSourceBuilder.d.ts +2 -3
  32. package/dist/node/esm/dataset/dataset.d.ts +21 -3
  33. package/dist/node/esm/node.d.ts +3 -1
  34. package/dist/node/esm/node.js +138 -162
  35. package/dist/node/esm/sql-builder/builders/index.d.ts +1 -0
  36. package/dist/node/esm/sql-builder/builders/select.d.ts +3 -0
  37. package/dist/node/esm/sql-builder/dslToSQL.d.ts +2 -2
  38. package/dist/node/esm/types/DataSource.d.ts +1 -1
  39. package/dist/node/esm/types/dsl/GroupBy.d.ts +1 -1
  40. package/dist/node/esm/types/dsl/Select.d.ts +7 -2
  41. package/dist/node/esm/vquery-browser.d.ts +2 -19
  42. package/dist/node/esm/vquery-node.d.ts +2 -19
  43. package/package.json +5 -2
  44. package/dist/browser/esm/types/dsl/demo.d.ts +0 -1
  45. package/dist/node/cjs/types/dsl/demo.d.ts +0 -1
  46. package/dist/node/esm/types/dsl/demo.d.ts +0 -1
@@ -84,12 +84,12 @@ const applyWhere = (where)=>{
84
84
  case 'between':
85
85
  {
86
86
  const [a, b] = value;
87
- return external_kysely_sql`${external_kysely_sql.ref(field)} between (${external_kysely_sql.val(a)}, ${external_kysely_sql.val(b)})`;
87
+ return external_kysely_sql`${external_kysely_sql.ref(field)} between ${external_kysely_sql.val(a)} and ${external_kysely_sql.val(b)}`;
88
88
  }
89
89
  case 'not between':
90
90
  {
91
91
  const [a, b] = value;
92
- return external_kysely_sql`not ${external_kysely_sql.ref(field)} between (${external_kysely_sql.val(a)}, ${external_kysely_sql.val(b)})`;
92
+ return external_kysely_sql`${external_kysely_sql.ref(field)} not between ${external_kysely_sql.val(a)} and ${external_kysely_sql.val(b)}`;
93
93
  }
94
94
  default:
95
95
  return external_kysely_sql`${external_kysely_sql.ref(field)} ${external_kysely_sql.raw(leaf.op)} ${external_kysely_sql.val(value)}`;
@@ -108,33 +108,62 @@ const applyLimit = (qb, limit)=>{
108
108
  if (limit && 'number' == typeof limit) qb = qb.limit(limit);
109
109
  return qb;
110
110
  };
111
- const convertDSLToSQL = (dsl, tableName)=>{
112
- const db = new Kysely({
113
- dialect: new PostgresDialect()
114
- });
115
- let qb = db.selectFrom(tableName);
116
- qb = dsl.select && dsl.select.length > 0 ? qb.select((eb)=>dsl.select.map((item)=>{
111
+ const DATE_FORMAT_MAP = {
112
+ year: '%Y',
113
+ month: '%Y-%m',
114
+ day: '%Y-%m-%d',
115
+ week: '%Y-W%W',
116
+ hour: '%Y-%m-%d %H',
117
+ minute: '%Y-%m-%d %H:%M',
118
+ second: '%Y-%m-%d %H:%M:%S'
119
+ };
120
+ const applySelect = (qb, select)=>{
121
+ if (select && select.length > 0) return qb.select((eb)=>select.map((item)=>{
117
122
  if (isSelectItem(item)) {
118
123
  const field = item.field;
119
- if (item.func) {
124
+ const expression = eb.ref(field);
125
+ if (item.aggr) {
126
+ const { func } = item.aggr;
120
127
  const alias = item.alias ?? field;
121
- switch(item.func){
122
- case 'avg':
123
- return eb.fn.avg(field).as(alias);
124
- case 'sum':
125
- return eb.fn.sum(field).as(alias);
126
- case 'min':
127
- return eb.fn.min(field).as(alias);
128
- case 'max':
129
- return eb.fn.max(field).as(alias);
130
- case 'count':
131
- return eb.fn.count(field).as(alias);
128
+ if ([
129
+ 'avg',
130
+ 'sum',
131
+ 'min',
132
+ 'max',
133
+ 'variance',
134
+ 'variancePop',
135
+ 'stddev',
136
+ 'median'
137
+ ].includes(func)) {
138
+ if ('variance' === func) return external_kysely_sql`var_samp(${expression})`.as(alias);
139
+ if ('variancePop' === func) return external_kysely_sql`var_pop(${expression})`.as(alias);
140
+ return external_kysely_sql`${external_kysely_sql.raw(func)}(${expression})`.as(alias);
141
+ }
142
+ if ('count' === func) return external_kysely_sql`CAST(count(${expression}) AS INTEGER)`.as(alias);
143
+ if ('quantile' === func) {
144
+ const q = item.aggr.quantile ?? 0.5;
145
+ return external_kysely_sql`quantile(${expression}, ${q})`.as(alias);
146
+ } else if ('count_distinct' === func) return external_kysely_sql`CAST(count(distinct ${expression}) AS INTEGER)`.as(alias);
147
+ else if (func.startsWith('to_')) {
148
+ const dateTrunc = func.replace('to_', '');
149
+ const format = DATE_FORMAT_MAP[dateTrunc];
150
+ if (format) return external_kysely_sql`strftime(CAST(${expression} AS TIMESTAMP), ${format})`.as(alias);
151
+ if ('quarter' === dateTrunc) return external_kysely_sql`strftime(CAST(${expression} AS TIMESTAMP), '%Y') || '-Q' || date_part('quarter', CAST(${expression} AS TIMESTAMP))`.as(alias);
132
152
  }
133
153
  }
134
- return item.alias ? eb.ref(field).as(item.alias) : field;
154
+ const alias = item.alias ?? field;
155
+ return expression.as(alias);
135
156
  }
136
157
  return item;
137
- })) : qb.selectAll();
158
+ }));
159
+ return qb.selectAll();
160
+ };
161
+ const convertDSLToSQL = (dsl, tableName)=>{
162
+ const db = new Kysely({
163
+ dialect: new PostgresDialect()
164
+ });
165
+ let qb = db.selectFrom(tableName);
166
+ qb = applySelect(qb, dsl.select);
138
167
  if (dsl.where) qb = qb.where(applyWhere(dsl.where));
139
168
  qb = applyGroupBy(qb, dsl.groupBy);
140
169
  if (dsl.orderBy && dsl.orderBy.length > 0) for (const o of dsl.orderBy)qb = qb.orderBy(o.field, o.order ?? 'asc');
@@ -142,60 +171,60 @@ const convertDSLToSQL = (dsl, tableName)=>{
142
171
  const compiled = qb.compile();
143
172
  return inlineParameters(compiled.sql, compiled.parameters);
144
173
  };
174
+ const READ_FUNCTION_MAP = {
175
+ csv: 'read_csv_auto',
176
+ json: 'read_json_auto',
177
+ parquet: 'read_parquet'
178
+ };
179
+ const DATA_TYPE_MAP = {
180
+ number: 'DOUBLE',
181
+ string: 'VARCHAR',
182
+ date: 'DATE',
183
+ datetime: 'TIMESTAMP',
184
+ timestamp: 'TIMESTAMP'
185
+ };
145
186
  class Dataset {
146
187
  queryAdapter;
147
188
  storageAdapter;
148
189
  _datasetId;
149
- constructor(duckDB, indexedDB, datasetId){
150
- this.queryAdapter = duckDB;
151
- this.storageAdapter = indexedDB;
190
+ constructor(queryAdapter, storageAdapter, datasetId){
191
+ this.queryAdapter = queryAdapter;
192
+ this.storageAdapter = storageAdapter;
152
193
  this._datasetId = datasetId;
153
194
  }
154
195
  async init(temporaryColumns, temporaryDatasetSource) {
155
196
  const datasetInfo = await this.storageAdapter.readDataset(this._datasetId);
156
197
  if (!datasetInfo) throw new Error(`Dataset ${this._datasetId} not found`);
157
- const columns = temporaryColumns ? temporaryColumns : datasetInfo.datasetSchema.columns;
158
- const datasetSource = temporaryDatasetSource || datasetInfo.datasetSource;
198
+ const columns = temporaryColumns ?? datasetInfo.datasetSchema.columns;
199
+ const datasetSource = temporaryDatasetSource ?? datasetInfo.datasetSource;
159
200
  if (columns.length > 0 && datasetSource) await this.createOrReplaceView(columns, datasetSource);
160
201
  }
161
202
  async createOrReplaceView(columns, datasetSource) {
162
- const readFunctionMap = {
163
- csv: 'read_csv_auto',
164
- json: 'read_json_auto',
165
- xlsx: 'read_excel',
166
- parquet: 'read_parquet'
167
- };
168
- const dataTypeMap = {
169
- number: 'DOUBLE',
170
- string: 'VARCHAR',
171
- date: 'DATE',
172
- datetime: 'TIMESTAMP',
173
- timestamp: 'TIMESTAMP'
174
- };
175
- if (datasetSource) {
176
- const readFunction = readFunctionMap[datasetSource.type];
177
- if (!readFunction) throw new Error(`Unsupported dataSource type: ${datasetSource.type}`);
178
- await this.queryAdapter.writeFile(this._datasetId, datasetSource.blob);
179
- const columnsStruct = `{${columns.map((c)=>`'${c.name}': '${dataTypeMap[c.type] || 'VARCHAR'}'`).join(', ')}}`;
180
- const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
181
- const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
182
- await this.queryAdapter.query(createViewSql);
183
- }
203
+ if (!datasetSource) return;
204
+ const readFunction = READ_FUNCTION_MAP[datasetSource.type];
205
+ if (!readFunction) throw new Error(`Unsupported dataSource type: ${datasetSource.type}`);
206
+ await this.queryAdapter.writeFile(this._datasetId, datasetSource.blob);
207
+ const columnsStruct = this.buildColumnsStruct(columns);
208
+ const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
209
+ let readSql = `${readFunction}('${this._datasetId}')`;
210
+ if ('csv' === datasetSource.type || 'json' === datasetSource.type) readSql = `${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
211
+ const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readSql}`;
212
+ await this.queryAdapter.query(createViewSql);
184
213
  }
185
214
  async query(queryDSL) {
186
215
  const sql = convertDSLToSQL(queryDSL, this.datasetId);
187
216
  return this.queryBySQL(sql);
188
217
  }
189
218
  async queryBySQL(sql) {
190
- const start = performance?.now?.()?.toFixed(3) ?? Date.now().toFixed(3);
219
+ const start = performance?.now?.() ?? Date.now();
191
220
  const result = await this.queryAdapter.query(sql);
192
- const end = performance?.now?.()?.toFixed(3) ?? Date.now().toFixed(3);
221
+ const end = performance?.now?.() ?? Date.now();
193
222
  return {
194
223
  ...result,
195
224
  performance: {
196
- startAt: start,
197
- endAt: end,
198
- duration: Number(end) - Number(start)
225
+ startAt: start.toFixed(3),
226
+ endAt: end.toFixed(3),
227
+ duration: end - start
199
228
  }
200
229
  };
201
230
  }
@@ -205,10 +234,22 @@ class Dataset {
205
234
  get datasetId() {
206
235
  return this._datasetId;
207
236
  }
237
+ buildColumnsStruct(columns) {
238
+ const columnDefs = columns.map((c)=>{
239
+ const duckDBType = DATA_TYPE_MAP[c.type] || 'VARCHAR';
240
+ return `'${c.name}': '${duckDBType}'`;
241
+ });
242
+ return `{${columnDefs.join(', ')}}`;
243
+ }
208
244
  }
209
245
  const isUrl = (url)=>isHttpUrl(url) || isBase64Url(url);
210
246
  const isHttpUrl = (url)=>url.startsWith('http://') || url.startsWith('https://');
211
247
  const isBase64Url = (url)=>url.startsWith('data:');
248
+ const MIME_TYPES = {
249
+ csv: 'text/csv',
250
+ json: 'application/json',
251
+ parquet: 'application/parquet'
252
+ };
212
253
  class DatasetSourceBuilder {
213
254
  type;
214
255
  value;
@@ -220,84 +261,30 @@ class DatasetSourceBuilder {
220
261
  return new DatasetSourceBuilder(raw);
221
262
  }
222
263
  async build() {
223
- const blob = await DatasetSourceBuilder.convertToBlob(this.type, this.value);
264
+ const blob = await this.convertToBlob(this.type, this.value);
224
265
  return {
225
266
  type: this.type,
226
267
  blob: blob
227
268
  };
228
269
  }
229
- static async convertToBlob(type, value) {
270
+ async convertToBlob(type, value) {
230
271
  if (value instanceof Blob) return value;
231
- const convertCsvToBlob = (csvSource)=>{
232
- if (csvSource instanceof ArrayBuffer) return new Blob([
233
- csvSource
234
- ], {
235
- type: 'text/csv'
236
- });
237
- if ('string' == typeof csvSource && isUrl(csvSource)) return DatasetSourceBuilder.fetchBlob(csvSource);
238
- return new Blob([
239
- JSON.stringify(csvSource)
240
- ], {
241
- type: 'text/csv'
242
- });
243
- };
244
- const convertJsonToBlob = (jsonSource)=>{
245
- if (jsonSource instanceof ArrayBuffer) return new Blob([
246
- jsonSource
247
- ], {
248
- type: 'application/json'
249
- });
250
- if ('string' == typeof jsonSource && isUrl(jsonSource)) return DatasetSourceBuilder.fetchBlob(jsonSource);
251
- return new Blob([
252
- JSON.stringify(jsonSource)
253
- ], {
254
- type: 'application/json'
255
- });
256
- };
257
- const convertParquetToBlob = (parquetSource)=>{
258
- if (parquetSource instanceof ArrayBuffer) return new Blob([
259
- parquetSource
260
- ], {
261
- type: 'application/parquet'
262
- });
263
- if ('string' == typeof parquetSource && isUrl(parquetSource)) return DatasetSourceBuilder.fetchBlob(parquetSource);
264
- return new Blob([
265
- parquetSource
266
- ], {
267
- type: 'application/parquet'
268
- });
269
- };
270
- const convertXlsxToBlob = (xlsxSource)=>{
271
- if (xlsxSource instanceof ArrayBuffer) return new Blob([
272
- xlsxSource
273
- ], {
274
- type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
275
- });
276
- if ('string' == typeof xlsxSource && isUrl(xlsxSource)) return DatasetSourceBuilder.fetchBlob(xlsxSource);
277
- return new Blob([
278
- xlsxSource
279
- ], {
280
- type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
281
- });
282
- };
283
- switch(type){
284
- case 'csv':
285
- return convertCsvToBlob(value);
286
- case 'json':
287
- return convertJsonToBlob(value);
288
- case 'xlsx':
289
- return convertXlsxToBlob(value);
290
- case 'parquet':
291
- return convertParquetToBlob(value);
292
- default:
293
- return new Blob([
294
- value
295
- ]);
272
+ if ('string' == typeof value && isUrl(value)) {
273
+ const response = await fetch(value);
274
+ return await response.blob();
296
275
  }
297
- }
298
- static async fetchBlob(url) {
299
- const response = await fetch(url);
300
- return await response.blob();
276
+ const mimeType = MIME_TYPES[type] || 'text/plain';
277
+ if (value instanceof ArrayBuffer) return new Blob([
278
+ value
279
+ ], {
280
+ type: mimeType
281
+ });
282
+ const content = 'object' == typeof value ? JSON.stringify(value) : String(value);
283
+ return new Blob([
284
+ content
285
+ ], {
286
+ type: mimeType
287
+ });
301
288
  }
302
289
  }
303
290
  class InMemoryAdapter {
@@ -306,47 +293,31 @@ class InMemoryAdapter {
306
293
  constructor(){}
307
294
  open = async ()=>{
308
295
  this.isOpen = true;
309
- if (!this.isOpen) this.datasets = new Map();
296
+ this.datasets = new Map();
310
297
  };
311
298
  close = async ()=>{
312
299
  this.isOpen = false;
313
300
  this.datasets.clear();
314
301
  };
315
- writeDataset = (datasetId, datasetSchema, datasetSource)=>new Promise((resolve, reject)=>{
316
- try {
317
- const record = {
318
- datasetId,
319
- datasetSchema,
320
- datasetSource
321
- };
322
- this.datasets.set(datasetId, record);
323
- resolve();
324
- } catch (error) {
325
- reject(error);
326
- }
302
+ writeDataset = (datasetId, datasetSchema, datasetSource)=>new Promise((resolve)=>{
303
+ const record = {
304
+ datasetId,
305
+ datasetSchema,
306
+ datasetSource
307
+ };
308
+ this.datasets.set(datasetId, record);
309
+ resolve();
327
310
  });
328
- readDataset = (datasetId)=>new Promise((resolve, reject)=>{
329
- try {
330
- const record = this.datasets.get(datasetId);
331
- resolve(record ? record : null);
332
- } catch (error) {
333
- reject(error);
334
- }
311
+ readDataset = (datasetId)=>new Promise((resolve)=>{
312
+ const record = this.datasets.get(datasetId);
313
+ resolve(record ? record : null);
335
314
  });
336
- deleteDataset = (datasetId)=>new Promise((resolve, reject)=>{
337
- try {
338
- this.datasets.delete(datasetId);
339
- resolve();
340
- } catch (error) {
341
- reject(error);
342
- }
315
+ deleteDataset = (datasetId)=>new Promise((resolve)=>{
316
+ this.datasets.delete(datasetId);
317
+ resolve();
343
318
  });
344
- listDatasets = ()=>new Promise((resolve, reject)=>{
345
- try {
346
- resolve(Array.from(this.datasets.values()));
347
- } catch (error) {
348
- reject(error);
349
- }
319
+ listDatasets = ()=>new Promise((resolve)=>{
320
+ resolve(Array.from(this.datasets.values()));
350
321
  });
351
322
  }
352
323
  class DuckDBNodeQueryAdapter {
@@ -409,9 +380,9 @@ class VQuery {
409
380
  queryAdapter;
410
381
  storageAdapter;
411
382
  isInitialized = false;
412
- constructor(){
413
- this.queryAdapter = new DuckDBNodeQueryAdapter();
414
- this.storageAdapter = new InMemoryAdapter();
383
+ constructor(queryAdapter, storageAdapter){
384
+ this.queryAdapter = queryAdapter;
385
+ this.storageAdapter = storageAdapter;
415
386
  }
416
387
  async checkInitialized() {
417
388
  if (!this.isInitialized) {
@@ -473,4 +444,9 @@ class VQuery {
473
444
  await this.storageAdapter.close();
474
445
  }
475
446
  }
476
- export { DatasetSourceBuilder, VQuery, convertDSLToSQL, isBase64Url, isHttpUrl, isUrl };
447
+ class vquery_node_VQuery extends VQuery {
448
+ constructor(){
449
+ super(new DuckDBNodeQueryAdapter(), new InMemoryAdapter());
450
+ }
451
+ }
452
+ export { Dataset, DatasetSourceBuilder, DuckDBNodeQueryAdapter, InMemoryAdapter, vquery_node_VQuery as VQuery, convertDSLToSQL, isBase64Url, isHttpUrl, isUrl };
@@ -1,3 +1,4 @@
1
1
  export { applyWhere } from './where';
2
2
  export { applyGroupBy } from './groupBy';
3
3
  export { applyLimit } from './limit';
4
+ export { applySelect } from './select';
@@ -0,0 +1,3 @@
1
+ import { SelectItem } from '../../types/dsl/Select';
2
+ import type { SelectQueryBuilder } from 'kysely';
3
+ export declare const applySelect: <DB, TB extends keyof DB & string, O, T>(qb: SelectQueryBuilder<DB, TB, O>, select?: Array<keyof T | SelectItem<T>>) => SelectQueryBuilder<DB, TB, O & import("kysely").Selection<DB, TB, any>>;
@@ -1,2 +1,2 @@
1
- import { QueryDSL } from '../types';
2
- export declare const convertDSLToSQL: <T, TableName extends string>(dsl: QueryDSL<T>, tableName: TableName) => string;
1
+ import { QueryDSL, VQueryDSL } from '../types';
2
+ export declare const convertDSLToSQL: <T, TableName extends string>(dsl: QueryDSL<T> | VQueryDSL<T>, tableName: TableName) => string;
@@ -1,5 +1,5 @@
1
1
  export type TidyDatum = Record<string, number | string | null | boolean | undefined>;
2
- export type DatasetSourceType = 'csv' | 'json' | 'xlsx' | 'parquet';
2
+ export type DatasetSourceType = 'csv' | 'json' | 'parquet';
3
3
  export type DatasetSourceValue = string | ArrayBuffer | Blob | TidyDatum[];
4
4
  export interface DatasetSource {
5
5
  type: DatasetSourceType;
@@ -1 +1 @@
1
- export type GroupBy<T> = Array<keyof T>;
1
+ export type GroupBy<T> = Array<keyof T | (string & {})>;
@@ -1,7 +1,12 @@
1
- export type AggregateFunction = 'count' | 'sum' | 'avg' | 'min' | 'max' | 'quantile';
1
+ export type BaseAggregateFunction = 'count' | 'count_distinct' | 'sum' | 'avg' | 'min' | 'max' | 'variance' | 'variancePop' | 'stddev' | 'median' | 'quantile';
2
+ export type DateAggregateFunction = 'to_year' | 'to_quarter' | 'to_month' | 'to_week' | 'to_day' | 'to_hour' | 'to_minute' | 'to_second';
3
+ export type AggregateFunction = BaseAggregateFunction | DateAggregateFunction;
2
4
  export type SelectItem<T> = {
3
5
  field: keyof T;
4
6
  alias?: string;
5
- func?: AggregateFunction;
7
+ aggr?: {
8
+ func: AggregateFunction;
9
+ quantile?: number;
10
+ };
6
11
  };
7
12
  export type Select<T> = Array<keyof T | SelectItem<T>>;
@@ -1,21 +1,4 @@
1
- import { Dataset } from './dataset/dataset';
2
- import { RawDatasetSource, DatasetColumn } from './types';
3
- export declare class VQuery {
4
- private queryAdapter;
5
- private storageAdapter;
6
- private isInitialized;
1
+ import { VQuery as VQueryCore } from './VQuery';
2
+ export declare class VQuery extends VQueryCore {
7
3
  constructor();
8
- private checkInitialized;
9
- private checkDatasetExists;
10
- createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
11
- updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
12
- dropDataset(datasetId: string): Promise<void>;
13
- connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[], temporaryRawDatasetSource?: RawDatasetSource): Promise<Dataset>;
14
- hasDataset(datasetId: string): Promise<boolean>;
15
- listDatasets(): Promise<{
16
- datasetId: string;
17
- datasetSchema: import("./types").DatasetSchema;
18
- datasetSource?: import("./types").DatasetSource;
19
- }[]>;
20
- close(): Promise<void>;
21
4
  }
@@ -1,21 +1,4 @@
1
- import { Dataset } from './dataset/dataset';
2
- import { RawDatasetSource, DatasetColumn } from './types';
3
- export declare class VQuery {
4
- private queryAdapter;
5
- private storageAdapter;
6
- private isInitialized;
1
+ import { VQuery as VQueryCore } from './VQuery';
2
+ export declare class VQuery extends VQueryCore {
7
3
  constructor();
8
- private checkInitialized;
9
- private checkDatasetExists;
10
- createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
11
- updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
12
- dropDataset(datasetId: string): Promise<void>;
13
- connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[], temporaryRawDatasetSource?: RawDatasetSource): Promise<Dataset>;
14
- hasDataset(datasetId: string): Promise<boolean>;
15
- listDatasets(): Promise<{
16
- datasetId: string;
17
- datasetSchema: import("./types").DatasetSchema;
18
- datasetSource?: import("./types").DatasetSource;
19
- }[]>;
20
- close(): Promise<void>;
21
4
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@visactor/vquery",
3
- "version": "0.3.12",
3
+ "version": "0.3.14",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": {
@@ -57,11 +57,14 @@
57
57
  },
58
58
  "scripts": {
59
59
  "build": "rslib build",
60
+ "build:test": "node scripts/build-tests.js",
61
+ "build:docs": "node scripts/build-docs.js",
62
+ "g": "pnpm run build:test && pnpm run build:docs",
60
63
  "dev": "rslib build --watch --no-clean",
61
64
  "lint": "eslint .",
62
65
  "test": "rstest",
63
66
  "test:update": "rstest --update",
64
- "test:coverage": "rstest --coverage; open ./coverage/index.html",
67
+ "test:coverage": "rstest --coverage && node scripts/build-coverage-badge.mjs && open ./coverage/index.html",
65
68
  "typecheck": "tsc --noEmit"
66
69
  }
67
70
  }
@@ -1 +0,0 @@
1
- export {};
@@ -1 +0,0 @@
1
- export {};
@@ -1 +0,0 @@
1
- export {};