@visactor/vquery 0.3.13 → 0.3.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/esm/VQuery.d.ts +21 -0
- package/dist/browser/esm/browser.d.ts +3 -1
- package/dist/browser/esm/browser.js +121 -129
- package/dist/browser/esm/data-source-builder/dataSourceBuilder.d.ts +2 -3
- package/dist/browser/esm/dataset/dataset.d.ts +21 -3
- package/dist/browser/esm/node.d.ts +3 -1
- package/dist/browser/esm/sql-builder/builders/index.d.ts +1 -0
- package/dist/browser/esm/sql-builder/builders/select.d.ts +3 -0
- package/dist/browser/esm/sql-builder/dslToSQL.d.ts +2 -2
- package/dist/browser/esm/types/DataSource.d.ts +1 -1
- package/dist/browser/esm/types/dsl/GroupBy.d.ts +1 -1
- package/dist/browser/esm/types/dsl/Select.d.ts +7 -2
- package/dist/browser/esm/vquery-browser.d.ts +2 -19
- package/dist/browser/esm/vquery-node.d.ts +2 -19
- package/dist/node/cjs/VQuery.d.ts +21 -0
- package/dist/node/cjs/browser.d.ts +3 -1
- package/dist/node/cjs/data-source-builder/dataSourceBuilder.d.ts +2 -3
- package/dist/node/cjs/dataset/dataset.d.ts +21 -3
- package/dist/node/cjs/node.cjs +149 -164
- package/dist/node/cjs/node.d.ts +3 -1
- package/dist/node/cjs/sql-builder/builders/index.d.ts +1 -0
- package/dist/node/cjs/sql-builder/builders/select.d.ts +3 -0
- package/dist/node/cjs/sql-builder/dslToSQL.d.ts +2 -2
- package/dist/node/cjs/types/DataSource.d.ts +1 -1
- package/dist/node/cjs/types/dsl/GroupBy.d.ts +1 -1
- package/dist/node/cjs/types/dsl/Select.d.ts +7 -2
- package/dist/node/cjs/vquery-browser.d.ts +2 -19
- package/dist/node/cjs/vquery-node.d.ts +2 -19
- package/dist/node/esm/VQuery.d.ts +21 -0
- package/dist/node/esm/browser.d.ts +3 -1
- package/dist/node/esm/data-source-builder/dataSourceBuilder.d.ts +2 -3
- package/dist/node/esm/dataset/dataset.d.ts +21 -3
- package/dist/node/esm/node.d.ts +3 -1
- package/dist/node/esm/node.js +138 -162
- package/dist/node/esm/sql-builder/builders/index.d.ts +1 -0
- package/dist/node/esm/sql-builder/builders/select.d.ts +3 -0
- package/dist/node/esm/sql-builder/dslToSQL.d.ts +2 -2
- package/dist/node/esm/types/DataSource.d.ts +1 -1
- package/dist/node/esm/types/dsl/GroupBy.d.ts +1 -1
- package/dist/node/esm/types/dsl/Select.d.ts +7 -2
- package/dist/node/esm/vquery-browser.d.ts +2 -19
- package/dist/node/esm/vquery-node.d.ts +2 -19
- package/package.json +5 -2
- package/dist/browser/esm/types/dsl/demo.d.ts +0 -1
- package/dist/node/cjs/types/dsl/demo.d.ts +0 -1
- package/dist/node/esm/types/dsl/demo.d.ts +0 -1
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Dataset } from './dataset/dataset';
|
|
2
|
+
import { RawDatasetSource, DatasetColumn, QueryAdapter, StorageAdapter } from './types';
|
|
3
|
+
export declare class VQuery {
|
|
4
|
+
protected queryAdapter: QueryAdapter;
|
|
5
|
+
protected storageAdapter: StorageAdapter;
|
|
6
|
+
private isInitialized;
|
|
7
|
+
constructor(queryAdapter: QueryAdapter, storageAdapter: StorageAdapter);
|
|
8
|
+
private checkInitialized;
|
|
9
|
+
private checkDatasetExists;
|
|
10
|
+
createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
11
|
+
updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
12
|
+
dropDataset(datasetId: string): Promise<void>;
|
|
13
|
+
connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[], temporaryRawDatasetSource?: RawDatasetSource): Promise<Dataset>;
|
|
14
|
+
hasDataset(datasetId: string): Promise<boolean>;
|
|
15
|
+
listDatasets(): Promise<{
|
|
16
|
+
datasetId: string;
|
|
17
|
+
datasetSchema: import("./types").DatasetSchema;
|
|
18
|
+
datasetSource?: import("./types").DatasetSource;
|
|
19
|
+
}[]>;
|
|
20
|
+
close(): Promise<void>;
|
|
21
|
+
}
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
-
export { convertDSLToSQL } from './dataset';
|
|
1
|
+
export { convertDSLToSQL, Dataset } from './dataset';
|
|
2
2
|
export { DatasetSourceBuilder } from './data-source-builder/dataSourceBuilder';
|
|
3
3
|
export * from './utils';
|
|
4
4
|
export * from './types';
|
|
5
5
|
export { VQuery } from './vquery-browser';
|
|
6
|
+
export { DuckDBWebQueryAdapter } from './adapters/query-adapter/duckdbWebAdapter';
|
|
7
|
+
export { IndexedDBAdapter } from './adapters/storage-adapter/indexeddbAdapter';
|
|
@@ -83,12 +83,12 @@ const applyWhere = (where)=>{
|
|
|
83
83
|
case 'between':
|
|
84
84
|
{
|
|
85
85
|
const [a, b] = value;
|
|
86
|
-
return external_kysely_sql`${external_kysely_sql.ref(field)} between
|
|
86
|
+
return external_kysely_sql`${external_kysely_sql.ref(field)} between ${external_kysely_sql.val(a)} and ${external_kysely_sql.val(b)}`;
|
|
87
87
|
}
|
|
88
88
|
case 'not between':
|
|
89
89
|
{
|
|
90
90
|
const [a, b] = value;
|
|
91
|
-
return external_kysely_sql
|
|
91
|
+
return external_kysely_sql`${external_kysely_sql.ref(field)} not between ${external_kysely_sql.val(a)} and ${external_kysely_sql.val(b)}`;
|
|
92
92
|
}
|
|
93
93
|
default:
|
|
94
94
|
return external_kysely_sql`${external_kysely_sql.ref(field)} ${external_kysely_sql.raw(leaf.op)} ${external_kysely_sql.val(value)}`;
|
|
@@ -107,33 +107,62 @@ const applyLimit = (qb, limit)=>{
|
|
|
107
107
|
if (limit && 'number' == typeof limit) qb = qb.limit(limit);
|
|
108
108
|
return qb;
|
|
109
109
|
};
|
|
110
|
-
const
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
110
|
+
const DATE_FORMAT_MAP = {
|
|
111
|
+
year: '%Y',
|
|
112
|
+
month: '%Y-%m',
|
|
113
|
+
day: '%Y-%m-%d',
|
|
114
|
+
week: '%Y-W%W',
|
|
115
|
+
hour: '%Y-%m-%d %H',
|
|
116
|
+
minute: '%Y-%m-%d %H:%M',
|
|
117
|
+
second: '%Y-%m-%d %H:%M:%S'
|
|
118
|
+
};
|
|
119
|
+
const applySelect = (qb, select)=>{
|
|
120
|
+
if (select && select.length > 0) return qb.select((eb)=>select.map((item)=>{
|
|
116
121
|
if (isSelectItem(item)) {
|
|
117
122
|
const field = item.field;
|
|
118
|
-
|
|
123
|
+
const expression = eb.ref(field);
|
|
124
|
+
if (item.aggr) {
|
|
125
|
+
const { func } = item.aggr;
|
|
119
126
|
const alias = item.alias ?? field;
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
127
|
+
if ([
|
|
128
|
+
'avg',
|
|
129
|
+
'sum',
|
|
130
|
+
'min',
|
|
131
|
+
'max',
|
|
132
|
+
'variance',
|
|
133
|
+
'variancePop',
|
|
134
|
+
'stddev',
|
|
135
|
+
'median'
|
|
136
|
+
].includes(func)) {
|
|
137
|
+
if ('variance' === func) return external_kysely_sql`var_samp(${expression})`.as(alias);
|
|
138
|
+
if ('variancePop' === func) return external_kysely_sql`var_pop(${expression})`.as(alias);
|
|
139
|
+
return external_kysely_sql`${external_kysely_sql.raw(func)}(${expression})`.as(alias);
|
|
140
|
+
}
|
|
141
|
+
if ('count' === func) return external_kysely_sql`CAST(count(${expression}) AS INTEGER)`.as(alias);
|
|
142
|
+
if ('quantile' === func) {
|
|
143
|
+
const q = item.aggr.quantile ?? 0.5;
|
|
144
|
+
return external_kysely_sql`quantile(${expression}, ${q})`.as(alias);
|
|
145
|
+
} else if ('count_distinct' === func) return external_kysely_sql`CAST(count(distinct ${expression}) AS INTEGER)`.as(alias);
|
|
146
|
+
else if (func.startsWith('to_')) {
|
|
147
|
+
const dateTrunc = func.replace('to_', '');
|
|
148
|
+
const format = DATE_FORMAT_MAP[dateTrunc];
|
|
149
|
+
if (format) return external_kysely_sql`strftime(CAST(${expression} AS TIMESTAMP), ${format})`.as(alias);
|
|
150
|
+
if ('quarter' === dateTrunc) return external_kysely_sql`strftime(CAST(${expression} AS TIMESTAMP), '%Y') || '-Q' || date_part('quarter', CAST(${expression} AS TIMESTAMP))`.as(alias);
|
|
131
151
|
}
|
|
132
152
|
}
|
|
133
|
-
|
|
153
|
+
const alias = item.alias ?? field;
|
|
154
|
+
return expression.as(alias);
|
|
134
155
|
}
|
|
135
156
|
return item;
|
|
136
|
-
}))
|
|
157
|
+
}));
|
|
158
|
+
return qb.selectAll();
|
|
159
|
+
};
|
|
160
|
+
const convertDSLToSQL = (dsl, tableName)=>{
|
|
161
|
+
const db = new Kysely({
|
|
162
|
+
dialect: new PostgresDialect()
|
|
163
|
+
});
|
|
164
|
+
let qb = db.selectFrom(tableName);
|
|
165
|
+
qb = applySelect(qb, dsl.select);
|
|
137
166
|
if (dsl.where) qb = qb.where(applyWhere(dsl.where));
|
|
138
167
|
qb = applyGroupBy(qb, dsl.groupBy);
|
|
139
168
|
if (dsl.orderBy && dsl.orderBy.length > 0) for (const o of dsl.orderBy)qb = qb.orderBy(o.field, o.order ?? 'asc');
|
|
@@ -141,60 +170,60 @@ const convertDSLToSQL = (dsl, tableName)=>{
|
|
|
141
170
|
const compiled = qb.compile();
|
|
142
171
|
return inlineParameters(compiled.sql, compiled.parameters);
|
|
143
172
|
};
|
|
173
|
+
const READ_FUNCTION_MAP = {
|
|
174
|
+
csv: 'read_csv_auto',
|
|
175
|
+
json: 'read_json_auto',
|
|
176
|
+
parquet: 'read_parquet'
|
|
177
|
+
};
|
|
178
|
+
const DATA_TYPE_MAP = {
|
|
179
|
+
number: 'DOUBLE',
|
|
180
|
+
string: 'VARCHAR',
|
|
181
|
+
date: 'DATE',
|
|
182
|
+
datetime: 'TIMESTAMP',
|
|
183
|
+
timestamp: 'TIMESTAMP'
|
|
184
|
+
};
|
|
144
185
|
class Dataset {
|
|
145
186
|
queryAdapter;
|
|
146
187
|
storageAdapter;
|
|
147
188
|
_datasetId;
|
|
148
|
-
constructor(
|
|
149
|
-
this.queryAdapter =
|
|
150
|
-
this.storageAdapter =
|
|
189
|
+
constructor(queryAdapter, storageAdapter, datasetId){
|
|
190
|
+
this.queryAdapter = queryAdapter;
|
|
191
|
+
this.storageAdapter = storageAdapter;
|
|
151
192
|
this._datasetId = datasetId;
|
|
152
193
|
}
|
|
153
194
|
async init(temporaryColumns, temporaryDatasetSource) {
|
|
154
195
|
const datasetInfo = await this.storageAdapter.readDataset(this._datasetId);
|
|
155
196
|
if (!datasetInfo) throw new Error(`Dataset ${this._datasetId} not found`);
|
|
156
|
-
const columns = temporaryColumns
|
|
157
|
-
const datasetSource = temporaryDatasetSource
|
|
197
|
+
const columns = temporaryColumns ?? datasetInfo.datasetSchema.columns;
|
|
198
|
+
const datasetSource = temporaryDatasetSource ?? datasetInfo.datasetSource;
|
|
158
199
|
if (columns.length > 0 && datasetSource) await this.createOrReplaceView(columns, datasetSource);
|
|
159
200
|
}
|
|
160
201
|
async createOrReplaceView(columns, datasetSource) {
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
};
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
datetime: 'TIMESTAMP',
|
|
172
|
-
timestamp: 'TIMESTAMP'
|
|
173
|
-
};
|
|
174
|
-
if (datasetSource) {
|
|
175
|
-
const readFunction = readFunctionMap[datasetSource.type];
|
|
176
|
-
if (!readFunction) throw new Error(`Unsupported dataSource type: ${datasetSource.type}`);
|
|
177
|
-
await this.queryAdapter.writeFile(this._datasetId, datasetSource.blob);
|
|
178
|
-
const columnsStruct = `{${columns.map((c)=>`'${c.name}': '${dataTypeMap[c.type] || 'VARCHAR'}'`).join(', ')}}`;
|
|
179
|
-
const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
|
|
180
|
-
const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
|
|
181
|
-
await this.queryAdapter.query(createViewSql);
|
|
182
|
-
}
|
|
202
|
+
if (!datasetSource) return;
|
|
203
|
+
const readFunction = READ_FUNCTION_MAP[datasetSource.type];
|
|
204
|
+
if (!readFunction) throw new Error(`Unsupported dataSource type: ${datasetSource.type}`);
|
|
205
|
+
await this.queryAdapter.writeFile(this._datasetId, datasetSource.blob);
|
|
206
|
+
const columnsStruct = this.buildColumnsStruct(columns);
|
|
207
|
+
const columnNames = columns.map((c)=>`"${c.name}"`).join(', ');
|
|
208
|
+
let readSql = `${readFunction}('${this._datasetId}')`;
|
|
209
|
+
if ('csv' === datasetSource.type || 'json' === datasetSource.type) readSql = `${readFunction}('${this._datasetId}', columns=${columnsStruct})`;
|
|
210
|
+
const createViewSql = `CREATE OR REPLACE VIEW "${this._datasetId}" AS SELECT ${columnNames} FROM ${readSql}`;
|
|
211
|
+
await this.queryAdapter.query(createViewSql);
|
|
183
212
|
}
|
|
184
213
|
async query(queryDSL) {
|
|
185
214
|
const sql = convertDSLToSQL(queryDSL, this.datasetId);
|
|
186
215
|
return this.queryBySQL(sql);
|
|
187
216
|
}
|
|
188
217
|
async queryBySQL(sql) {
|
|
189
|
-
const start = performance?.now?.()
|
|
218
|
+
const start = performance?.now?.() ?? Date.now();
|
|
190
219
|
const result = await this.queryAdapter.query(sql);
|
|
191
|
-
const end = performance?.now?.()
|
|
220
|
+
const end = performance?.now?.() ?? Date.now();
|
|
192
221
|
return {
|
|
193
222
|
...result,
|
|
194
223
|
performance: {
|
|
195
|
-
startAt: start,
|
|
196
|
-
endAt: end,
|
|
197
|
-
duration:
|
|
224
|
+
startAt: start.toFixed(3),
|
|
225
|
+
endAt: end.toFixed(3),
|
|
226
|
+
duration: end - start
|
|
198
227
|
}
|
|
199
228
|
};
|
|
200
229
|
}
|
|
@@ -204,10 +233,22 @@ class Dataset {
|
|
|
204
233
|
get datasetId() {
|
|
205
234
|
return this._datasetId;
|
|
206
235
|
}
|
|
236
|
+
buildColumnsStruct(columns) {
|
|
237
|
+
const columnDefs = columns.map((c)=>{
|
|
238
|
+
const duckDBType = DATA_TYPE_MAP[c.type] || 'VARCHAR';
|
|
239
|
+
return `'${c.name}': '${duckDBType}'`;
|
|
240
|
+
});
|
|
241
|
+
return `{${columnDefs.join(', ')}}`;
|
|
242
|
+
}
|
|
207
243
|
}
|
|
208
244
|
const isUrl = (url)=>isHttpUrl(url) || isBase64Url(url);
|
|
209
245
|
const isHttpUrl = (url)=>url.startsWith('http://') || url.startsWith('https://');
|
|
210
246
|
const isBase64Url = (url)=>url.startsWith('data:');
|
|
247
|
+
const MIME_TYPES = {
|
|
248
|
+
csv: 'text/csv',
|
|
249
|
+
json: 'application/json',
|
|
250
|
+
parquet: 'application/parquet'
|
|
251
|
+
};
|
|
211
252
|
class DatasetSourceBuilder {
|
|
212
253
|
type;
|
|
213
254
|
value;
|
|
@@ -219,84 +260,30 @@ class DatasetSourceBuilder {
|
|
|
219
260
|
return new DatasetSourceBuilder(raw);
|
|
220
261
|
}
|
|
221
262
|
async build() {
|
|
222
|
-
const blob = await
|
|
263
|
+
const blob = await this.convertToBlob(this.type, this.value);
|
|
223
264
|
return {
|
|
224
265
|
type: this.type,
|
|
225
266
|
blob: blob
|
|
226
267
|
};
|
|
227
268
|
}
|
|
228
|
-
|
|
269
|
+
async convertToBlob(type, value) {
|
|
229
270
|
if (value instanceof Blob) return value;
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
], {
|
|
234
|
-
type: 'text/csv'
|
|
235
|
-
});
|
|
236
|
-
if ('string' == typeof csvSource && isUrl(csvSource)) return DatasetSourceBuilder.fetchBlob(csvSource);
|
|
237
|
-
return new Blob([
|
|
238
|
-
JSON.stringify(csvSource)
|
|
239
|
-
], {
|
|
240
|
-
type: 'text/csv'
|
|
241
|
-
});
|
|
242
|
-
};
|
|
243
|
-
const convertJsonToBlob = (jsonSource)=>{
|
|
244
|
-
if (jsonSource instanceof ArrayBuffer) return new Blob([
|
|
245
|
-
jsonSource
|
|
246
|
-
], {
|
|
247
|
-
type: 'application/json'
|
|
248
|
-
});
|
|
249
|
-
if ('string' == typeof jsonSource && isUrl(jsonSource)) return DatasetSourceBuilder.fetchBlob(jsonSource);
|
|
250
|
-
return new Blob([
|
|
251
|
-
JSON.stringify(jsonSource)
|
|
252
|
-
], {
|
|
253
|
-
type: 'application/json'
|
|
254
|
-
});
|
|
255
|
-
};
|
|
256
|
-
const convertParquetToBlob = (parquetSource)=>{
|
|
257
|
-
if (parquetSource instanceof ArrayBuffer) return new Blob([
|
|
258
|
-
parquetSource
|
|
259
|
-
], {
|
|
260
|
-
type: 'application/parquet'
|
|
261
|
-
});
|
|
262
|
-
if ('string' == typeof parquetSource && isUrl(parquetSource)) return DatasetSourceBuilder.fetchBlob(parquetSource);
|
|
263
|
-
return new Blob([
|
|
264
|
-
parquetSource
|
|
265
|
-
], {
|
|
266
|
-
type: 'application/parquet'
|
|
267
|
-
});
|
|
268
|
-
};
|
|
269
|
-
const convertXlsxToBlob = (xlsxSource)=>{
|
|
270
|
-
if (xlsxSource instanceof ArrayBuffer) return new Blob([
|
|
271
|
-
xlsxSource
|
|
272
|
-
], {
|
|
273
|
-
type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
|
274
|
-
});
|
|
275
|
-
if ('string' == typeof xlsxSource && isUrl(xlsxSource)) return DatasetSourceBuilder.fetchBlob(xlsxSource);
|
|
276
|
-
return new Blob([
|
|
277
|
-
xlsxSource
|
|
278
|
-
], {
|
|
279
|
-
type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
|
280
|
-
});
|
|
281
|
-
};
|
|
282
|
-
switch(type){
|
|
283
|
-
case 'csv':
|
|
284
|
-
return convertCsvToBlob(value);
|
|
285
|
-
case 'json':
|
|
286
|
-
return convertJsonToBlob(value);
|
|
287
|
-
case 'xlsx':
|
|
288
|
-
return convertXlsxToBlob(value);
|
|
289
|
-
case 'parquet':
|
|
290
|
-
return convertParquetToBlob(value);
|
|
291
|
-
default:
|
|
292
|
-
return new Blob([
|
|
293
|
-
value
|
|
294
|
-
]);
|
|
271
|
+
if ('string' == typeof value && isUrl(value)) {
|
|
272
|
+
const response = await fetch(value);
|
|
273
|
+
return await response.blob();
|
|
295
274
|
}
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
275
|
+
const mimeType = MIME_TYPES[type] || 'text/plain';
|
|
276
|
+
if (value instanceof ArrayBuffer) return new Blob([
|
|
277
|
+
value
|
|
278
|
+
], {
|
|
279
|
+
type: mimeType
|
|
280
|
+
});
|
|
281
|
+
const content = 'object' == typeof value ? JSON.stringify(value) : String(value);
|
|
282
|
+
return new Blob([
|
|
283
|
+
content
|
|
284
|
+
], {
|
|
285
|
+
type: mimeType
|
|
286
|
+
});
|
|
300
287
|
}
|
|
301
288
|
}
|
|
302
289
|
class IndexedDBAdapter {
|
|
@@ -457,9 +444,9 @@ class VQuery {
|
|
|
457
444
|
queryAdapter;
|
|
458
445
|
storageAdapter;
|
|
459
446
|
isInitialized = false;
|
|
460
|
-
constructor(){
|
|
461
|
-
this.queryAdapter =
|
|
462
|
-
this.storageAdapter =
|
|
447
|
+
constructor(queryAdapter, storageAdapter){
|
|
448
|
+
this.queryAdapter = queryAdapter;
|
|
449
|
+
this.storageAdapter = storageAdapter;
|
|
463
450
|
}
|
|
464
451
|
async checkInitialized() {
|
|
465
452
|
if (!this.isInitialized) {
|
|
@@ -521,4 +508,9 @@ class VQuery {
|
|
|
521
508
|
await this.storageAdapter.close();
|
|
522
509
|
}
|
|
523
510
|
}
|
|
524
|
-
|
|
511
|
+
class vquery_browser_VQuery extends VQuery {
|
|
512
|
+
constructor(){
|
|
513
|
+
super(new DuckDBWebQueryAdapter(), new IndexedDBAdapter());
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
export { Dataset, DatasetSourceBuilder, DuckDBWebQueryAdapter, IndexedDBAdapter, vquery_browser_VQuery as VQuery, convertDSLToSQL, isBase64Url, isHttpUrl, isUrl };
|
|
@@ -1,13 +1,24 @@
|
|
|
1
|
-
import { DatasetColumn, DatasetSource, QueryDSL } from '../types';
|
|
1
|
+
import { DatasetColumn, DatasetSource, QueryDSL, VQueryDSL } from '../types';
|
|
2
2
|
import { QueryAdapter, StorageAdapter } from '../types';
|
|
3
3
|
export declare class Dataset {
|
|
4
4
|
private queryAdapter;
|
|
5
5
|
private storageAdapter;
|
|
6
6
|
private _datasetId;
|
|
7
|
-
constructor(
|
|
7
|
+
constructor(queryAdapter: QueryAdapter, storageAdapter: StorageAdapter, datasetId: string);
|
|
8
|
+
/**
|
|
9
|
+
* Initialize the dataset by loading it into the query engine
|
|
10
|
+
* @param temporaryColumns Optional temporary columns to override storage schema
|
|
11
|
+
* @param temporaryDatasetSource Optional temporary data source to override storage source
|
|
12
|
+
*/
|
|
8
13
|
init(temporaryColumns?: DatasetColumn[], temporaryDatasetSource?: DatasetSource): Promise<void>;
|
|
14
|
+
/**
|
|
15
|
+
* Register file and create a view in DuckDB
|
|
16
|
+
*/
|
|
9
17
|
createOrReplaceView(columns: DatasetColumn[], datasetSource: DatasetSource): Promise<void>;
|
|
10
|
-
|
|
18
|
+
/**
|
|
19
|
+
* Execute query using VQuery DSL
|
|
20
|
+
*/
|
|
21
|
+
query<T extends Record<string, number | string>>(queryDSL: QueryDSL<T> | VQueryDSL<T>): Promise<{
|
|
11
22
|
performance: {
|
|
12
23
|
startAt: string;
|
|
13
24
|
endAt: string;
|
|
@@ -16,6 +27,9 @@ export declare class Dataset {
|
|
|
16
27
|
dataset: any[];
|
|
17
28
|
table: any;
|
|
18
29
|
}>;
|
|
30
|
+
/**
|
|
31
|
+
* Execute raw SQL query
|
|
32
|
+
*/
|
|
19
33
|
queryBySQL(sql: string): Promise<{
|
|
20
34
|
performance: {
|
|
21
35
|
startAt: string;
|
|
@@ -25,6 +39,10 @@ export declare class Dataset {
|
|
|
25
39
|
dataset: any[];
|
|
26
40
|
table: any;
|
|
27
41
|
}>;
|
|
42
|
+
/**
|
|
43
|
+
* Clean up resources
|
|
44
|
+
*/
|
|
28
45
|
disconnect(): Promise<void>;
|
|
29
46
|
get datasetId(): string;
|
|
47
|
+
private buildColumnsStruct;
|
|
30
48
|
}
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
-
export { convertDSLToSQL } from './dataset';
|
|
1
|
+
export { convertDSLToSQL, Dataset } from './dataset';
|
|
2
2
|
export { DatasetSourceBuilder } from './data-source-builder/dataSourceBuilder';
|
|
3
3
|
export * from './utils';
|
|
4
4
|
export * from './types';
|
|
5
5
|
export { VQuery } from './vquery-node';
|
|
6
|
+
export { DuckDBNodeQueryAdapter } from './adapters/query-adapter/duckdbNodeAdapter';
|
|
7
|
+
export { InMemoryAdapter } from './adapters/storage-adapter/inmemoryAdapter';
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import { SelectItem } from '../../types/dsl/Select';
|
|
2
|
+
import type { SelectQueryBuilder } from 'kysely';
|
|
3
|
+
export declare const applySelect: <DB, TB extends keyof DB & string, O, T>(qb: SelectQueryBuilder<DB, TB, O>, select?: Array<keyof T | SelectItem<T>>) => SelectQueryBuilder<DB, TB, O & import("kysely").Selection<DB, TB, any>>;
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { QueryDSL } from '../types';
|
|
2
|
-
export declare const convertDSLToSQL: <T, TableName extends string>(dsl: QueryDSL<T>, tableName: TableName) => string;
|
|
1
|
+
import { QueryDSL, VQueryDSL } from '../types';
|
|
2
|
+
export declare const convertDSLToSQL: <T, TableName extends string>(dsl: QueryDSL<T> | VQueryDSL<T>, tableName: TableName) => string;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
export type TidyDatum = Record<string, number | string | null | boolean | undefined>;
|
|
2
|
-
export type DatasetSourceType = 'csv' | 'json' | '
|
|
2
|
+
export type DatasetSourceType = 'csv' | 'json' | 'parquet';
|
|
3
3
|
export type DatasetSourceValue = string | ArrayBuffer | Blob | TidyDatum[];
|
|
4
4
|
export interface DatasetSource {
|
|
5
5
|
type: DatasetSourceType;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
export type GroupBy<T> = Array<keyof T>;
|
|
1
|
+
export type GroupBy<T> = Array<keyof T | (string & {})>;
|
|
@@ -1,7 +1,12 @@
|
|
|
1
|
-
export type
|
|
1
|
+
export type BaseAggregateFunction = 'count' | 'count_distinct' | 'sum' | 'avg' | 'min' | 'max' | 'variance' | 'variancePop' | 'stddev' | 'median' | 'quantile';
|
|
2
|
+
export type DateAggregateFunction = 'to_year' | 'to_quarter' | 'to_month' | 'to_week' | 'to_day' | 'to_hour' | 'to_minute' | 'to_second';
|
|
3
|
+
export type AggregateFunction = BaseAggregateFunction | DateAggregateFunction;
|
|
2
4
|
export type SelectItem<T> = {
|
|
3
5
|
field: keyof T;
|
|
4
6
|
alias?: string;
|
|
5
|
-
|
|
7
|
+
aggr?: {
|
|
8
|
+
func: AggregateFunction;
|
|
9
|
+
quantile?: number;
|
|
10
|
+
};
|
|
6
11
|
};
|
|
7
12
|
export type Select<T> = Array<keyof T | SelectItem<T>>;
|
|
@@ -1,21 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
export declare class VQuery {
|
|
4
|
-
private queryAdapter;
|
|
5
|
-
private storageAdapter;
|
|
6
|
-
private isInitialized;
|
|
1
|
+
import { VQuery as VQueryCore } from './VQuery';
|
|
2
|
+
export declare class VQuery extends VQueryCore {
|
|
7
3
|
constructor();
|
|
8
|
-
private checkInitialized;
|
|
9
|
-
private checkDatasetExists;
|
|
10
|
-
createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
11
|
-
updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
12
|
-
dropDataset(datasetId: string): Promise<void>;
|
|
13
|
-
connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[], temporaryRawDatasetSource?: RawDatasetSource): Promise<Dataset>;
|
|
14
|
-
hasDataset(datasetId: string): Promise<boolean>;
|
|
15
|
-
listDatasets(): Promise<{
|
|
16
|
-
datasetId: string;
|
|
17
|
-
datasetSchema: import("./types").DatasetSchema;
|
|
18
|
-
datasetSource?: import("./types").DatasetSource;
|
|
19
|
-
}[]>;
|
|
20
|
-
close(): Promise<void>;
|
|
21
4
|
}
|
|
@@ -1,21 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
export declare class VQuery {
|
|
4
|
-
private queryAdapter;
|
|
5
|
-
private storageAdapter;
|
|
6
|
-
private isInitialized;
|
|
1
|
+
import { VQuery as VQueryCore } from './VQuery';
|
|
2
|
+
export declare class VQuery extends VQueryCore {
|
|
7
3
|
constructor();
|
|
8
|
-
private checkInitialized;
|
|
9
|
-
private checkDatasetExists;
|
|
10
|
-
createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
11
|
-
updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
12
|
-
dropDataset(datasetId: string): Promise<void>;
|
|
13
|
-
connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[], temporaryRawDatasetSource?: RawDatasetSource): Promise<Dataset>;
|
|
14
|
-
hasDataset(datasetId: string): Promise<boolean>;
|
|
15
|
-
listDatasets(): Promise<{
|
|
16
|
-
datasetId: string;
|
|
17
|
-
datasetSchema: import("./types").DatasetSchema;
|
|
18
|
-
datasetSource?: import("./types").DatasetSource;
|
|
19
|
-
}[]>;
|
|
20
|
-
close(): Promise<void>;
|
|
21
4
|
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Dataset } from './dataset/dataset';
|
|
2
|
+
import { RawDatasetSource, DatasetColumn, QueryAdapter, StorageAdapter } from './types';
|
|
3
|
+
export declare class VQuery {
|
|
4
|
+
protected queryAdapter: QueryAdapter;
|
|
5
|
+
protected storageAdapter: StorageAdapter;
|
|
6
|
+
private isInitialized;
|
|
7
|
+
constructor(queryAdapter: QueryAdapter, storageAdapter: StorageAdapter);
|
|
8
|
+
private checkInitialized;
|
|
9
|
+
private checkDatasetExists;
|
|
10
|
+
createDataset(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
11
|
+
updateDatasetSource(datasetId: string, columns?: DatasetColumn[], rawDatasetSource?: RawDatasetSource): Promise<void>;
|
|
12
|
+
dropDataset(datasetId: string): Promise<void>;
|
|
13
|
+
connectDataset(datasetId: string, temporaryColumns?: DatasetColumn[], temporaryRawDatasetSource?: RawDatasetSource): Promise<Dataset>;
|
|
14
|
+
hasDataset(datasetId: string): Promise<boolean>;
|
|
15
|
+
listDatasets(): Promise<{
|
|
16
|
+
datasetId: string;
|
|
17
|
+
datasetSchema: import("./types").DatasetSchema;
|
|
18
|
+
datasetSource?: import("./types").DatasetSource;
|
|
19
|
+
}[]>;
|
|
20
|
+
close(): Promise<void>;
|
|
21
|
+
}
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
-
export { convertDSLToSQL } from './dataset';
|
|
1
|
+
export { convertDSLToSQL, Dataset } from './dataset';
|
|
2
2
|
export { DatasetSourceBuilder } from './data-source-builder/dataSourceBuilder';
|
|
3
3
|
export * from './utils';
|
|
4
4
|
export * from './types';
|
|
5
5
|
export { VQuery } from './vquery-browser';
|
|
6
|
+
export { DuckDBWebQueryAdapter } from './adapters/query-adapter/duckdbWebAdapter';
|
|
7
|
+
export { IndexedDBAdapter } from './adapters/storage-adapter/indexeddbAdapter';
|
|
@@ -1,13 +1,24 @@
|
|
|
1
|
-
import { DatasetColumn, DatasetSource, QueryDSL } from '../types';
|
|
1
|
+
import { DatasetColumn, DatasetSource, QueryDSL, VQueryDSL } from '../types';
|
|
2
2
|
import { QueryAdapter, StorageAdapter } from '../types';
|
|
3
3
|
export declare class Dataset {
|
|
4
4
|
private queryAdapter;
|
|
5
5
|
private storageAdapter;
|
|
6
6
|
private _datasetId;
|
|
7
|
-
constructor(
|
|
7
|
+
constructor(queryAdapter: QueryAdapter, storageAdapter: StorageAdapter, datasetId: string);
|
|
8
|
+
/**
|
|
9
|
+
* Initialize the dataset by loading it into the query engine
|
|
10
|
+
* @param temporaryColumns Optional temporary columns to override storage schema
|
|
11
|
+
* @param temporaryDatasetSource Optional temporary data source to override storage source
|
|
12
|
+
*/
|
|
8
13
|
init(temporaryColumns?: DatasetColumn[], temporaryDatasetSource?: DatasetSource): Promise<void>;
|
|
14
|
+
/**
|
|
15
|
+
* Register file and create a view in DuckDB
|
|
16
|
+
*/
|
|
9
17
|
createOrReplaceView(columns: DatasetColumn[], datasetSource: DatasetSource): Promise<void>;
|
|
10
|
-
|
|
18
|
+
/**
|
|
19
|
+
* Execute query using VQuery DSL
|
|
20
|
+
*/
|
|
21
|
+
query<T extends Record<string, number | string>>(queryDSL: QueryDSL<T> | VQueryDSL<T>): Promise<{
|
|
11
22
|
performance: {
|
|
12
23
|
startAt: string;
|
|
13
24
|
endAt: string;
|
|
@@ -16,6 +27,9 @@ export declare class Dataset {
|
|
|
16
27
|
dataset: any[];
|
|
17
28
|
table: any;
|
|
18
29
|
}>;
|
|
30
|
+
/**
|
|
31
|
+
* Execute raw SQL query
|
|
32
|
+
*/
|
|
19
33
|
queryBySQL(sql: string): Promise<{
|
|
20
34
|
performance: {
|
|
21
35
|
startAt: string;
|
|
@@ -25,6 +39,10 @@ export declare class Dataset {
|
|
|
25
39
|
dataset: any[];
|
|
26
40
|
table: any;
|
|
27
41
|
}>;
|
|
42
|
+
/**
|
|
43
|
+
* Clean up resources
|
|
44
|
+
*/
|
|
28
45
|
disconnect(): Promise<void>;
|
|
29
46
|
get datasetId(): string;
|
|
47
|
+
private buildColumnsStruct;
|
|
30
48
|
}
|