@uwdata/mosaic-core 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/mosaic-core.js +1895 -11407
- package/dist/mosaic-core.min.js +4 -13
- package/package.json +4 -4
- package/src/Catalog.js +37 -19
- package/src/Coordinator.js +72 -51
- package/src/DataTileIndexer.js +49 -50
- package/src/FilterGroup.js +3 -6
- package/src/MosaicClient.js +5 -2
- package/src/Param.js +67 -21
- package/src/QueryConsolidator.js +238 -0
- package/src/QueryManager.js +133 -0
- package/src/Selection.js +236 -33
- package/src/{clients → connectors}/rest.js +1 -1
- package/src/{clients → connectors}/socket.js +1 -1
- package/src/{clients → connectors}/wasm.js +1 -1
- package/src/index.js +7 -4
- package/src/util/AsyncDispatch.js +180 -0
- package/src/util/cache.js +58 -0
- package/src/util/priority-queue.js +85 -0
- package/src/util/query-result.js +8 -0
- package/src/util/synchronizer.js +47 -0
- package/src/QueryCache.js +0 -65
- package/src/util/skip-client.js +0 -3
- package/src/util/sql-from.js +0 -22
package/src/Param.js
CHANGED
|
@@ -1,46 +1,92 @@
|
|
|
1
|
+
import { AsyncDispatch } from './util/AsyncDispatch.js';
|
|
1
2
|
import { distinct } from './util/distinct.js';
|
|
2
3
|
|
|
4
|
+
/**
|
|
5
|
+
* Test if a value is a Param instance.
|
|
6
|
+
* @param {*} x The value to test.
|
|
7
|
+
* @returns {boolean} True if the input is a Param, false otherwise.
|
|
8
|
+
*/
|
|
3
9
|
export function isParam(x) {
|
|
4
10
|
return x instanceof Param;
|
|
5
11
|
}
|
|
6
12
|
|
|
7
|
-
|
|
13
|
+
/**
|
|
14
|
+
* Represents a dynamic parameter that dispatches updates
|
|
15
|
+
* upon parameter changes.
|
|
16
|
+
*/
|
|
17
|
+
export class Param extends AsyncDispatch {
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Create a new Param instance.
|
|
21
|
+
* @param {*} value The initial value of the Param.
|
|
22
|
+
*/
|
|
8
23
|
constructor(value) {
|
|
24
|
+
super();
|
|
9
25
|
this._value = value;
|
|
10
|
-
this._listeners = new Map;
|
|
11
26
|
}
|
|
12
27
|
|
|
28
|
+
/**
|
|
29
|
+
* Create a new Param instance with the given initial value.
|
|
30
|
+
* @param {*} value The initial value of the Param.
|
|
31
|
+
* @returns {Param} The new Param instance.
|
|
32
|
+
*/
|
|
13
33
|
static value(value) {
|
|
14
34
|
return new Param(value);
|
|
15
35
|
}
|
|
16
36
|
|
|
37
|
+
/**
|
|
38
|
+
* Create a new Param instance over an array of initial values,
|
|
39
|
+
* which may contain nested Params.
|
|
40
|
+
* @param {*} values The initial values of the Param.
|
|
41
|
+
* @returns {Param} The new Param instance.
|
|
42
|
+
*/
|
|
43
|
+
static array(values) {
|
|
44
|
+
if (values.some(v => isParam(v))) {
|
|
45
|
+
const p = new Param();
|
|
46
|
+
const update = () => p.update(values.map(v => isParam(v) ? v.value : v));
|
|
47
|
+
update();
|
|
48
|
+
values.forEach(v => isParam(v) ? v.addEventListener('value', update) : 0);
|
|
49
|
+
return p;
|
|
50
|
+
}
|
|
51
|
+
return new Param(values);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* The current value of the Param.
|
|
56
|
+
*/
|
|
17
57
|
get value() {
|
|
18
58
|
return this._value;
|
|
19
59
|
}
|
|
20
60
|
|
|
61
|
+
/**
|
|
62
|
+
* Update the Param value
|
|
63
|
+
* @param {*} value The new value of the Param.
|
|
64
|
+
* @param {object} [options] The update options.
|
|
65
|
+
* @param {boolean} [options.force] A boolean flag indicating if the Param
|
|
66
|
+
* should emit a 'value' event even if the internal value is unchanged.
|
|
67
|
+
* @returns {this} This Param instance.
|
|
68
|
+
*/
|
|
21
69
|
update(value, { force } = {}) {
|
|
22
|
-
const
|
|
23
|
-
if (
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
addEventListener(type, callback) {
|
|
29
|
-
let list = this._listeners.get(type) || [];
|
|
30
|
-
if (!list.includes(callback)) {
|
|
31
|
-
list = list.concat(callback);
|
|
70
|
+
const shouldEmit = distinct(this._value, value) || force;
|
|
71
|
+
if (shouldEmit) {
|
|
72
|
+
this.emit('value', value);
|
|
73
|
+
} else {
|
|
74
|
+
this.cancel('value');
|
|
32
75
|
}
|
|
33
|
-
this
|
|
76
|
+
return this;
|
|
34
77
|
}
|
|
35
78
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
79
|
+
/**
|
|
80
|
+
* Upon value-typed updates, sets the current value to the input value
|
|
81
|
+
* immediately prior to the event value being emitted to listeners.
|
|
82
|
+
* @param {string} type The event type.
|
|
83
|
+
* @param {*} value The input event value.
|
|
84
|
+
* @returns {*} The input event value.
|
|
85
|
+
*/
|
|
86
|
+
willEmit(type, value) {
|
|
87
|
+
if (type === 'value') {
|
|
88
|
+
this._value = value;
|
|
40
89
|
}
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
emit(type, event) {
|
|
44
|
-
this._listeners.get(type)?.forEach(l => l(event));
|
|
90
|
+
return value;
|
|
45
91
|
}
|
|
46
92
|
}
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import { Query, Ref } from '@uwdata/mosaic-sql';
|
|
2
|
+
import { queryResult } from './util/query-result.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Create a consolidator to combine structurally compatible queries.
|
|
6
|
+
* @param {*} enqueue Query manager enqueue method
|
|
7
|
+
* @param {*} cache Client-side query cache (sql -> data)
|
|
8
|
+
* @param {*} record Query recorder function
|
|
9
|
+
* @returns A consolidator object
|
|
10
|
+
*/
|
|
11
|
+
export function consolidator(enqueue, cache, record) {
|
|
12
|
+
let pending = [];
|
|
13
|
+
let id = 0;
|
|
14
|
+
|
|
15
|
+
function run() {
|
|
16
|
+
// group queries into bundles that can be consolidated
|
|
17
|
+
const groups = entryGroups(pending, cache);
|
|
18
|
+
pending = [];
|
|
19
|
+
id = 0;
|
|
20
|
+
|
|
21
|
+
// build and issue consolidated queries
|
|
22
|
+
for (const group of groups) {
|
|
23
|
+
consolidate(group, enqueue, record);
|
|
24
|
+
processResults(group, cache);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return {
|
|
29
|
+
add(entry, priority) {
|
|
30
|
+
if (entry.request.type === 'arrow') {
|
|
31
|
+
// wait one frame, gather an ordered list of queries
|
|
32
|
+
// only Apache Arrow is supported, so we can project efficiently
|
|
33
|
+
id = id || requestAnimationFrame(() => run());
|
|
34
|
+
pending.push({ entry, priority, index: pending.length });
|
|
35
|
+
} else {
|
|
36
|
+
enqueue(entry, priority);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Segment query requests into consolidation-compatible groups.
|
|
44
|
+
* @param {*} entries Query request entries ({ request, result } objects)
|
|
45
|
+
* @returns An array of grouped entry arrays
|
|
46
|
+
*/
|
|
47
|
+
function entryGroups(entries, cache) {
|
|
48
|
+
const groups = [];
|
|
49
|
+
const groupMap = new Map;
|
|
50
|
+
|
|
51
|
+
for (const query of entries) {
|
|
52
|
+
const { entry: { request } } = query;
|
|
53
|
+
const key = consolidationKey(request.query, cache);
|
|
54
|
+
if (!groupMap.has(key)) {
|
|
55
|
+
const list = [];
|
|
56
|
+
groups.push(list);
|
|
57
|
+
groupMap.set(key, list);
|
|
58
|
+
}
|
|
59
|
+
groupMap.get(key).push(query);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return groups;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Generate a key string for query consolidation.
|
|
67
|
+
* Queries with matching keys are conosolidation-compatible.
|
|
68
|
+
* If a query is found in the cache, it is exempted from consolidation,
|
|
69
|
+
* which is indicated by returning the precise query SQL as the key.
|
|
70
|
+
* @param {*} query The input query.
|
|
71
|
+
* @param {*} cache The query cache (sql -> data).
|
|
72
|
+
* @returns a key string
|
|
73
|
+
*/
|
|
74
|
+
function consolidationKey(query, cache) {
|
|
75
|
+
const sql = `${query}`;
|
|
76
|
+
if (query instanceof Query && !cache.get(sql)) {
|
|
77
|
+
if (
|
|
78
|
+
query.orderby().length || query.where().length ||
|
|
79
|
+
query.qualify().length || query.having().length
|
|
80
|
+
) {
|
|
81
|
+
// do not try to analyze if query includes clauses
|
|
82
|
+
// that may refer to *derived* columns we can't resolve
|
|
83
|
+
return sql;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// create a derived query stripped of selections
|
|
87
|
+
const q = query.clone().$select('*');
|
|
88
|
+
|
|
89
|
+
// check group by criteria for compatibility
|
|
90
|
+
// queries may refer to *derived* columns as group by criteria
|
|
91
|
+
// we resolve these against the true grouping expressions
|
|
92
|
+
const groupby = query.groupby();
|
|
93
|
+
if (groupby.length) {
|
|
94
|
+
const map = {}; // expression map (as -> expr)
|
|
95
|
+
query.select().forEach(({ as, expr }) => map[as] = expr);
|
|
96
|
+
q.$groupby(groupby.map(e => (e instanceof Ref && map[e.column]) || e));
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// key is just the transformed query as SQL
|
|
100
|
+
return `${q}`;
|
|
101
|
+
} else {
|
|
102
|
+
// can not analyze query, simply return as string
|
|
103
|
+
return sql;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Issue queries, consolidating where possible.
|
|
109
|
+
* @param {*} group Array of bundled query entries
|
|
110
|
+
* @param {*} enqueue Add entry to query queue
|
|
111
|
+
* @param {*} record Query recorder function
|
|
112
|
+
*/
|
|
113
|
+
function consolidate(group, enqueue, record) {
|
|
114
|
+
if (shouldConsolidate(group)) {
|
|
115
|
+
// issue a single consolidated query
|
|
116
|
+
enqueue({
|
|
117
|
+
request: {
|
|
118
|
+
type: 'arrow',
|
|
119
|
+
cache: false,
|
|
120
|
+
record: false,
|
|
121
|
+
query: consolidatedQuery(group, record)
|
|
122
|
+
},
|
|
123
|
+
result: (group.result = queryResult())
|
|
124
|
+
});
|
|
125
|
+
} else {
|
|
126
|
+
// issue queries directly
|
|
127
|
+
for (const { entry, priority } of group) {
|
|
128
|
+
enqueue(entry, priority);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Check if a group contains multiple distinct queries.
|
|
135
|
+
* @param {*} group Array of bundled query entries
|
|
136
|
+
* @returns false if group contains a single (possibly repeated) query,
|
|
137
|
+
* otherwise true
|
|
138
|
+
*/
|
|
139
|
+
function shouldConsolidate(group) {
|
|
140
|
+
if (group.length > 1) {
|
|
141
|
+
const sql = `${group[0].entry.request.query}`;
|
|
142
|
+
for (let i = 1; i < group.length; ++i) {
|
|
143
|
+
if (sql !== `${group[i].entry.request.query}`) {
|
|
144
|
+
return true;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
return false;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Create a consolidated query for a group.
|
|
153
|
+
* @param {*} group Array of bundled query entries
|
|
154
|
+
* @param {*} record Query recorder function
|
|
155
|
+
* @returns A consolidated Query instance
|
|
156
|
+
*/
|
|
157
|
+
function consolidatedQuery(group, record) {
|
|
158
|
+
const maps = group.maps = [];
|
|
159
|
+
const fields = new Map;
|
|
160
|
+
|
|
161
|
+
// gather select fields
|
|
162
|
+
for (const item of group) {
|
|
163
|
+
const { query } = item.entry.request;
|
|
164
|
+
const fieldMap = [];
|
|
165
|
+
maps.push(fieldMap);
|
|
166
|
+
for (const { as, expr } of query.select()) {
|
|
167
|
+
const e = `${expr}`;
|
|
168
|
+
if (!fields.has(e)) {
|
|
169
|
+
fields.set(e, [`col${fields.size}`, expr]);
|
|
170
|
+
}
|
|
171
|
+
const [name] = fields.get(e);
|
|
172
|
+
fieldMap.push([name, as]);
|
|
173
|
+
}
|
|
174
|
+
record(`${query}`);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// use a cloned query as a starting point
|
|
178
|
+
const query = group[0].entry.request.query.clone();
|
|
179
|
+
|
|
180
|
+
// update group by statement as needed
|
|
181
|
+
const groupby = query.groupby();
|
|
182
|
+
if (groupby.length) {
|
|
183
|
+
const map = {};
|
|
184
|
+
group.maps[0].forEach(([name, as]) => map[as] = name);
|
|
185
|
+
query.$groupby(groupby.map(e => (e instanceof Ref && map[e.column]) || e));
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// update select statemenet and return
|
|
189
|
+
return query.$select(Array.from(fields.values()));
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Process query results, dispatch results to original requests
|
|
194
|
+
* @param {*} group Array of query requests
|
|
195
|
+
* @param {*} cache Client-side query cache (sql -> data)
|
|
196
|
+
*/
|
|
197
|
+
async function processResults(group, cache) {
|
|
198
|
+
const { maps, result } = group;
|
|
199
|
+
if (!maps) return; // no consolidation performed
|
|
200
|
+
|
|
201
|
+
let data;
|
|
202
|
+
try {
|
|
203
|
+
data = await result;
|
|
204
|
+
} catch (err) {
|
|
205
|
+
// pass error to consolidated queries
|
|
206
|
+
for (const { entry } of group) {
|
|
207
|
+
entry.result.reject(err);
|
|
208
|
+
}
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
group.forEach(({ entry }, index) => {
|
|
213
|
+
const { request, result } = entry;
|
|
214
|
+
const projected = projectResult(data, maps[index]);
|
|
215
|
+
if (request.cache) {
|
|
216
|
+
cache.set(String(request.query), projected);
|
|
217
|
+
}
|
|
218
|
+
result.fulfill(projected);
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Project a consolidated result to a client result
|
|
224
|
+
* @param {*} data Consolidated query result, as an Apache Arrow Table
|
|
225
|
+
* @param {*} map Column name map as [source, target] pairs
|
|
226
|
+
* @returns the projected Apache Arrow table
|
|
227
|
+
*/
|
|
228
|
+
function projectResult(data, map) {
|
|
229
|
+
if (map) {
|
|
230
|
+
const cols = {};
|
|
231
|
+
for (const [name, as] of map) {
|
|
232
|
+
cols[as] = data.getChild(name);
|
|
233
|
+
}
|
|
234
|
+
return new data.constructor(cols);
|
|
235
|
+
} else {
|
|
236
|
+
return data;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { consolidator } from './QueryConsolidator.js';
|
|
2
|
+
import { lruCache, voidCache } from './util/cache.js';
|
|
3
|
+
import { priorityQueue } from './util/priority-queue.js';
|
|
4
|
+
import { queryResult } from './util/query-result.js';
|
|
5
|
+
|
|
6
|
+
export const Priority = { High: 0, Normal: 1, Low: 2 };
|
|
7
|
+
|
|
8
|
+
export function QueryManager() {
|
|
9
|
+
const queue = priorityQueue(3);
|
|
10
|
+
let db;
|
|
11
|
+
let clientCache;
|
|
12
|
+
let logger;
|
|
13
|
+
let recorders = [];
|
|
14
|
+
let pending = null;
|
|
15
|
+
let consolidate;
|
|
16
|
+
|
|
17
|
+
function next() {
|
|
18
|
+
if (pending || queue.isEmpty()) return;
|
|
19
|
+
const { request, result } = queue.next();
|
|
20
|
+
pending = submit(request, result);
|
|
21
|
+
pending.finally(() => { pending = null; next(); });
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function enqueue(entry, priority = Priority.Normal) {
|
|
25
|
+
queue.insert(entry, priority);
|
|
26
|
+
next();
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function recordQuery(sql) {
|
|
30
|
+
if (recorders.length && sql) {
|
|
31
|
+
recorders.forEach(rec => rec.add(sql));
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async function submit(request, result) {
|
|
36
|
+
try {
|
|
37
|
+
const { query, type, cache = false, record = true, options } = request;
|
|
38
|
+
const sql = query ? `${query}` : null;
|
|
39
|
+
|
|
40
|
+
// update recorders
|
|
41
|
+
if (record) {
|
|
42
|
+
recordQuery(sql);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// check query cache
|
|
46
|
+
if (cache) {
|
|
47
|
+
const cached = clientCache.get(sql);
|
|
48
|
+
if (cached) {
|
|
49
|
+
logger.debug('Cache');
|
|
50
|
+
result.fulfill(cached);
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// issue query, potentially cache result
|
|
56
|
+
const t0 = performance.now();
|
|
57
|
+
const data = await db.query({ type, sql, ...options });
|
|
58
|
+
if (cache) clientCache.set(sql, data);
|
|
59
|
+
logger.debug(`Request: ${(performance.now() - t0).toFixed(1)}`);
|
|
60
|
+
result.fulfill(data);
|
|
61
|
+
} catch (err) {
|
|
62
|
+
result.reject(err);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return {
|
|
67
|
+
cache(value) {
|
|
68
|
+
return value !== undefined
|
|
69
|
+
? (clientCache = value === true ? lruCache() : (value || voidCache()))
|
|
70
|
+
: clientCache;
|
|
71
|
+
},
|
|
72
|
+
|
|
73
|
+
logger(value) {
|
|
74
|
+
return value ? (logger = value) : logger;
|
|
75
|
+
},
|
|
76
|
+
|
|
77
|
+
connector(connector) {
|
|
78
|
+
return connector ? (db = connector) : db;
|
|
79
|
+
},
|
|
80
|
+
|
|
81
|
+
consolidate(flag) {
|
|
82
|
+
if (flag && !consolidate) {
|
|
83
|
+
consolidate = consolidator(enqueue, clientCache, recordQuery);
|
|
84
|
+
} else if (!flag && consolidate) {
|
|
85
|
+
consolidate = null;
|
|
86
|
+
}
|
|
87
|
+
},
|
|
88
|
+
|
|
89
|
+
request(request, priority = Priority.Normal) {
|
|
90
|
+
const result = queryResult();
|
|
91
|
+
const entry = { request, result };
|
|
92
|
+
if (consolidate) {
|
|
93
|
+
consolidate.add(entry, priority);
|
|
94
|
+
} else {
|
|
95
|
+
enqueue(entry, priority);
|
|
96
|
+
}
|
|
97
|
+
return result;
|
|
98
|
+
},
|
|
99
|
+
|
|
100
|
+
cancel(requests) {
|
|
101
|
+
const set = new Set(requests);
|
|
102
|
+
queue.remove(({ result }) => set.has(result));
|
|
103
|
+
},
|
|
104
|
+
|
|
105
|
+
clear() {
|
|
106
|
+
queue.remove(({ result }) => {
|
|
107
|
+
result.reject('Cleared');
|
|
108
|
+
return true;
|
|
109
|
+
});
|
|
110
|
+
},
|
|
111
|
+
|
|
112
|
+
record() {
|
|
113
|
+
let state = [];
|
|
114
|
+
const recorder = {
|
|
115
|
+
add(query) {
|
|
116
|
+
state.push(query);
|
|
117
|
+
},
|
|
118
|
+
reset() {
|
|
119
|
+
state = [];
|
|
120
|
+
},
|
|
121
|
+
snapshot() {
|
|
122
|
+
return state.slice();
|
|
123
|
+
},
|
|
124
|
+
stop() {
|
|
125
|
+
recorders = recorders.filter(x => x !== recorder);
|
|
126
|
+
return state;
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
recorders.push(recorder);
|
|
130
|
+
return recorder;
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
}
|