@alleninstitute/vis-core 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +11 -0
- package/README.md +8 -0
- package/dist/main.js +1426 -0
- package/dist/main.js.map +1 -0
- package/dist/module.js +1271 -0
- package/dist/module.js.map +1 -0
- package/dist/types.d.ts +435 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +63 -0
package/dist/module.js
ADDED
|
@@ -0,0 +1,1271 @@
|
|
|
1
|
+
import $tmQ84$lodashpartial from "lodash/partial";
|
|
2
|
+
import {Box2D as $tmQ84$Box2D, Vec2 as $tmQ84$Vec2} from "@alleninstitute/vis-geometry";
|
|
3
|
+
import $tmQ84$regl from "regl";
|
|
4
|
+
import $tmQ84$lodashuniqueId from "lodash/uniqueId";
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
function $parcel$export(e, n, v, s) {
|
|
8
|
+
Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function $e03391db1cf02961$export$8fbec945df051b82(maximumInflightAsyncTasks, queueProcessingIntervalMS, items, mutableCache, settings, requestsForItem, render, lifecycleCallback, cacheKeyForRequest = (key)=>key, queueTimeBudgetMS = queueProcessingIntervalMS / 3) {
|
|
12
|
+
const abort = new AbortController();
|
|
13
|
+
const queue = [];
|
|
14
|
+
const taskCancelCallbacks = [];
|
|
15
|
+
const reportNormalStatus = (status)=>{
|
|
16
|
+
// we want to report our status, however the flow of events can be confusing -
|
|
17
|
+
// our callers anticipate an asynchronous (long running) frame to be started,
|
|
18
|
+
// but there are scenarios in which the whole thing is completely synchronous
|
|
19
|
+
// callers who are scheduling things may be surprised that their frame finished
|
|
20
|
+
// before the code that handles it appears to start. thus, we make the entire lifecycle callback
|
|
21
|
+
// system async, to prevent surprises.
|
|
22
|
+
Promise.resolve().then(()=>lifecycleCallback({
|
|
23
|
+
status: status
|
|
24
|
+
}));
|
|
25
|
+
};
|
|
26
|
+
// when starting a frame, we greedily attempt to render any tasks that are already in the cache
|
|
27
|
+
// however, if there is too much overhead (or too many tasks) we would risk hogging the main thread
|
|
28
|
+
// thus - obey the limit (its a soft limit)
|
|
29
|
+
const startTime = performance.now();
|
|
30
|
+
for(let i = 0; i < items.length; i += 1){
|
|
31
|
+
const itemToRender = items[i];
|
|
32
|
+
const requestFns = requestsForItem(itemToRender, settings, abort.signal);
|
|
33
|
+
const cacheKey = (rq)=>cacheKeyForRequest(rq, itemToRender, settings);
|
|
34
|
+
const cacheKeys = Object.keys(requestFns).map(cacheKey);
|
|
35
|
+
if (mutableCache.areKeysAllCached(cacheKeys)) {
|
|
36
|
+
const result = mutableCache.cacheAndUse(requestFns, (0, $tmQ84$lodashpartial)(render, itemToRender, settings), cacheKey);
|
|
37
|
+
if (result !== undefined) // this is a problem - the cache reported that all the keys are in the cache, however this result is a cancellation callback,
|
|
38
|
+
// which indicates that the item could not be rendered right away, which should be impossible...
|
|
39
|
+
// TODO
|
|
40
|
+
taskCancelCallbacks.push(result);
|
|
41
|
+
} else // areKeysAllCached returned false - enqueue for later
|
|
42
|
+
queue.push(itemToRender);
|
|
43
|
+
if (performance.now() - startTime > queueTimeBudgetMS) {
|
|
44
|
+
// we've used up all our time - enqueue all remaining tasks
|
|
45
|
+
if (i < items.length - 1) queue.push(...items.slice(i + 1));
|
|
46
|
+
break;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
if (queue.length === 0) {
|
|
50
|
+
// we did all the work - it was already cached
|
|
51
|
+
reportNormalStatus('finished_synchronously');
|
|
52
|
+
return {
|
|
53
|
+
cancelFrame: ()=>{}
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
// TODO: Re-examine lifecycle reporting, potentially unify all statuses into a single type
|
|
57
|
+
reportNormalStatus('begun');
|
|
58
|
+
if (queue.length !== items.length) // We did some work, but there's some left
|
|
59
|
+
reportNormalStatus('progress');
|
|
60
|
+
const doWorkOnQueue = (intervalId)=>{
|
|
61
|
+
// try our best to cleanup if something goes awry
|
|
62
|
+
const startWorkTime = performance.now();
|
|
63
|
+
const cleanupOnError = (err)=>{
|
|
64
|
+
// clear the queue and the staging area (inFlight)
|
|
65
|
+
for (const cancelMe of taskCancelCallbacks)cancelMe();
|
|
66
|
+
queue.splice(0, queue.length);
|
|
67
|
+
// stop fetching
|
|
68
|
+
abort.abort(err);
|
|
69
|
+
clearInterval(intervalId);
|
|
70
|
+
// pass the error somewhere better:
|
|
71
|
+
lifecycleCallback({
|
|
72
|
+
status: 'error',
|
|
73
|
+
error: err
|
|
74
|
+
});
|
|
75
|
+
};
|
|
76
|
+
while(mutableCache.getNumPendingTasks() < Math.max(maximumInflightAsyncTasks, 1)){
|
|
77
|
+
if (queue.length < 1) {
|
|
78
|
+
// we cant add anything to the in-flight staging area, the final task
|
|
79
|
+
// is already in flight
|
|
80
|
+
if (mutableCache.getNumPendingTasks() < 1) {
|
|
81
|
+
// we do want to wait for that last in-flight task to actually finish though:
|
|
82
|
+
clearInterval(intervalId);
|
|
83
|
+
reportNormalStatus('finished');
|
|
84
|
+
}
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
const itemToRender = queue.shift();
|
|
88
|
+
if (itemToRender === undefined) {
|
|
89
|
+
// This should logically never happen, but if it does something is wrong so we emit an error
|
|
90
|
+
cleanupOnError(new Error('Internal error: itemToRender was undefined'));
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
const toCacheKey = (rq)=>cacheKeyForRequest(rq, itemToRender, settings);
|
|
94
|
+
try {
|
|
95
|
+
const result = mutableCache.cacheAndUse(requestsForItem(itemToRender, settings, abort.signal), (0, $tmQ84$lodashpartial)(render, itemToRender, settings), toCacheKey, ()=>reportNormalStatus('progress'));
|
|
96
|
+
if (result !== undefined) // put this cancel callback in a list where we can invoke if something goes wrong
|
|
97
|
+
// note that it is harmless to cancel a task that was completed
|
|
98
|
+
taskCancelCallbacks.push(result);
|
|
99
|
+
} catch (err) {
|
|
100
|
+
cleanupOnError(err);
|
|
101
|
+
}
|
|
102
|
+
if (performance.now() - startWorkTime > queueTimeBudgetMS) break;
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
const interval = setInterval(()=>doWorkOnQueue(interval), queueProcessingIntervalMS);
|
|
106
|
+
// return a function to allow our caller to cancel the frame - guaranteed that no settings/data will be
|
|
107
|
+
// touched/referenced after cancellation, unless the author of render() did some super weird bad things
|
|
108
|
+
return {
|
|
109
|
+
cancelFrame: (reason)=>{
|
|
110
|
+
for (const cancelMe of taskCancelCallbacks)cancelMe();
|
|
111
|
+
abort.abort(new DOMException(reason, 'AbortError'));
|
|
112
|
+
clearInterval(interval);
|
|
113
|
+
reportNormalStatus('cancelled');
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
// return true if the request is completely satisfied, false if its still awaiting more entries
|
|
120
|
+
function $9fd71f854f4df31b$var$updatePendingRequest(req, key, cacheKey, item) {
|
|
121
|
+
if (req.awaiting.has(cacheKey)) {
|
|
122
|
+
const remaningAwaited = req.awaiting.get(cacheKey);
|
|
123
|
+
// we just fullfilled one - remove it from awaiting
|
|
124
|
+
remaningAwaited?.delete(key);
|
|
125
|
+
if ((remaningAwaited?.size ?? 0) < 1) req.awaiting.delete(cacheKey);
|
|
126
|
+
req.blocking.add(cacheKey);
|
|
127
|
+
req.ready[key] = item;
|
|
128
|
+
}
|
|
129
|
+
return req.awaiting.size === 0;
|
|
130
|
+
}
|
|
131
|
+
class $9fd71f854f4df31b$export$6893cac74ecce081 {
|
|
132
|
+
/**
|
|
133
|
+
* the intended use of this cache is to store resources used for rendering. Because the specific contents are generic, a simple interface must be provided
|
|
134
|
+
* to support LRU cache eviction
|
|
135
|
+
* occasionally, it can be necessary to manage these resources more explicitly (see https://stackoverflow.com/a/31250301 for a great example)
|
|
136
|
+
* @param destroy a function which safely releases the resources owned by an entry in this cache - for normal garbage-collected objects, a no-op function will suffice.
|
|
137
|
+
* @param size a function which returns the size of a resource - this is used only in relation to the cacheLimit
|
|
138
|
+
* @param cacheLimit a limit (in whatever units are returned by the size() parameter) to place on cache contents
|
|
139
|
+
* note that this limit is not a hard limit - old entries are evicted when new data is fetched, but the limit may be exceeded occasionally
|
|
140
|
+
* a reasonable implementation may simply return 1 for size, and a desired occupancy count for the limit
|
|
141
|
+
*/ constructor(destroy, size, cacheLimit){
|
|
142
|
+
this.size = size;
|
|
143
|
+
this.destroyer = destroy;
|
|
144
|
+
this.limit = cacheLimit;
|
|
145
|
+
this.entries = new Map();
|
|
146
|
+
this.pendingRequests = new Set();
|
|
147
|
+
}
|
|
148
|
+
usedSpace() {
|
|
149
|
+
// Map uses iterators, so we're in for-loop territory here
|
|
150
|
+
let sum = 0;
|
|
151
|
+
for (const entry of this.entries.values())sum += entry.data instanceof Promise ? 0 : this.size(entry.data);
|
|
152
|
+
return sum;
|
|
153
|
+
}
|
|
154
|
+
countRequests() {
|
|
155
|
+
const reqCounts = {};
|
|
156
|
+
for (const req of this.pendingRequests)for (const key of [
|
|
157
|
+
...req.blocking,
|
|
158
|
+
...req.awaiting.keys()
|
|
159
|
+
]){
|
|
160
|
+
if (!reqCounts[key]) reqCounts[key] = 0;
|
|
161
|
+
reqCounts[key] += 1;
|
|
162
|
+
}
|
|
163
|
+
return reqCounts;
|
|
164
|
+
}
|
|
165
|
+
// if the cache is full, sort candidates which are not currently requested by their last-used timestamps
|
|
166
|
+
// evict those items until the cache is no longer full
|
|
167
|
+
evictIfFull() {
|
|
168
|
+
// find entries which have 0 pending requests, and are not themselves promises...
|
|
169
|
+
let used = this.usedSpace();
|
|
170
|
+
const candidates = [];
|
|
171
|
+
if (used > this.limit) {
|
|
172
|
+
// its potentially a bit slow to do this:
|
|
173
|
+
const counts = this.countRequests();
|
|
174
|
+
this.entries.forEach((entry, key)=>{
|
|
175
|
+
if (!(entry.data instanceof Promise) && (counts[key] ?? 0) < 1) candidates.push({
|
|
176
|
+
key: key,
|
|
177
|
+
data: entry.data,
|
|
178
|
+
lastRequestedTimestamp: entry.lastRequestedTimestamp
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
const priority = candidates.sort((a, b)=>a.lastRequestedTimestamp - b.lastRequestedTimestamp);
|
|
182
|
+
for (const evictMe of priority){
|
|
183
|
+
used -= this.size(evictMe.data);
|
|
184
|
+
this.destroyer(evictMe.data);
|
|
185
|
+
this.entries.delete(evictMe.key);
|
|
186
|
+
if (used < this.limit) return;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* `isCached` checks if the entry is in the cache with a resolved promise.
|
|
192
|
+
*
|
|
193
|
+
* @param key The entry key to check for in the cache
|
|
194
|
+
* @returns True if the entry in the cache has been resolved, false if there is no entry with that key or the promise is still pending
|
|
195
|
+
*/ isCached(key) {
|
|
196
|
+
// the key exists, and the value associated is not a promise
|
|
197
|
+
return this.entries.has(key) && !(this.entries.get(key)?.data instanceof Promise);
|
|
198
|
+
}
|
|
199
|
+
/**
|
|
200
|
+
* `areKeysAllCached` checks if all the keys provided are in the cache with resolved promises.
|
|
201
|
+
*
|
|
202
|
+
* Useful for checking if all the data needed for a particular operation is already in the cache.
|
|
203
|
+
*
|
|
204
|
+
* @param cacheKeys A list of keys to check for in the cache
|
|
205
|
+
* @returns True if all keys are cached, false if any are not in the cache
|
|
206
|
+
*/ areKeysAllCached(cacheKeys) {
|
|
207
|
+
return cacheKeys.every((key)=>this.isCached(key));
|
|
208
|
+
}
|
|
209
|
+
/**
|
|
210
|
+
* @deprecated to alert (external) users to avoid calling this!
|
|
211
|
+
* `getCachedUNSAFE` gets an entry from the cache for the given key (if the promise is resolved).
|
|
212
|
+
* because of how eviction works - this method should be considered unsafe! consider the following
|
|
213
|
+
* @example
|
|
214
|
+
* const entry = cache.getCachedUnsafe('whatever')
|
|
215
|
+
* const otherStuff = await fetch('....')
|
|
216
|
+
* ... more code
|
|
217
|
+
* doSomethingCool(entry, otherStuff)
|
|
218
|
+
*
|
|
219
|
+
* by the time the caller gets to the doSomethingCool call, the resources bound to the cache entry
|
|
220
|
+
* may have been disposed!
|
|
221
|
+
* do note that if you use a cache-entry synchronously (no awaits!) after requesting it, you're likely to not
|
|
222
|
+
* encounter any issues, however its a much more robust practice to simply refactor like so:
|
|
223
|
+
*
|
|
224
|
+
* const otherStuff = await fetch('...')
|
|
225
|
+
* cache.cacheAndUse({...}, (...args)=>doSomethingCool(otherStuff, ..args), ...)
|
|
226
|
+
*
|
|
227
|
+
* @param key Entry key to look up in the cache
|
|
228
|
+
* @returns The entry (D) if it is present, or undefined if it is not
|
|
229
|
+
*/ getCachedUNSAFE(key) {
|
|
230
|
+
const entry = this.entries.get(key);
|
|
231
|
+
if (!entry) return undefined;
|
|
232
|
+
entry.lastRequestedTimestamp = performance.now();
|
|
233
|
+
return entry.data instanceof Promise ? undefined : entry?.data;
|
|
234
|
+
}
|
|
235
|
+
getNumPendingTasks() {
|
|
236
|
+
return this.pendingRequests.size;
|
|
237
|
+
}
|
|
238
|
+
dataArrived(key, cacheKey, data) {
|
|
239
|
+
this.evictIfFull(); // we just got some data - is there room in the cache?
|
|
240
|
+
const mutableEntry = this.entries.get(cacheKey);
|
|
241
|
+
if (mutableEntry) mutableEntry.data = data;
|
|
242
|
+
const removeUs = [];
|
|
243
|
+
for (const req of this.pendingRequests)if ($9fd71f854f4df31b$var$updatePendingRequest(req, key, cacheKey, data)) {
|
|
244
|
+
req.runner(req.ready);
|
|
245
|
+
removeUs.push(req);
|
|
246
|
+
}
|
|
247
|
+
for (const finished of removeUs)this.pendingRequests.delete(finished);
|
|
248
|
+
}
|
|
249
|
+
prepareCache(semanticKey, cacheKey, getter) {
|
|
250
|
+
let promise;
|
|
251
|
+
const entry = this.entries.get(cacheKey);
|
|
252
|
+
const data = entry?.data;
|
|
253
|
+
// we either return early (data is cached)
|
|
254
|
+
// or we initialize promise (either getter() or a pre-existing request for the same cachekey)
|
|
255
|
+
// if we dont return early, we hook up a call to dataArrived to promise
|
|
256
|
+
if (data) {
|
|
257
|
+
if (data instanceof Promise) promise = data;
|
|
258
|
+
else {
|
|
259
|
+
// we could simply "return data"
|
|
260
|
+
// however getCachedUnsafe updates its "last-requested timestamp"
|
|
261
|
+
const resolvedCacheData = this.getCachedUNSAFE(cacheKey);
|
|
262
|
+
if (!resolvedCacheData) throw new Error('unexpected undefined data');
|
|
263
|
+
return resolvedCacheData;
|
|
264
|
+
}
|
|
265
|
+
} else {
|
|
266
|
+
const abort = new AbortController();
|
|
267
|
+
promise = getter(abort.signal);
|
|
268
|
+
this.entries.set(cacheKey, {
|
|
269
|
+
data: promise,
|
|
270
|
+
lastRequestedTimestamp: performance.now(),
|
|
271
|
+
abort: abort
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
return promise.then((data)=>{
|
|
275
|
+
this.dataArrived(semanticKey, cacheKey, data);
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
cacheAndUse(workingSet, use, toCacheKey, // TODO: consider removing taskFinished - it would be more simple to let the caller handle this in their use() function
|
|
279
|
+
taskFinished) {
|
|
280
|
+
const keys = Object.keys(workingSet);
|
|
281
|
+
const req = {
|
|
282
|
+
awaiting: new Map(),
|
|
283
|
+
ready: {},
|
|
284
|
+
runner: use,
|
|
285
|
+
blocking: new Set()
|
|
286
|
+
};
|
|
287
|
+
for (const k of keys)if (req.awaiting.has(toCacheKey(k))) req.awaiting.get(toCacheKey(k))?.add(k);
|
|
288
|
+
else req.awaiting.set(toCacheKey(k), new Set([
|
|
289
|
+
k
|
|
290
|
+
]));
|
|
291
|
+
for (const semanticKey of keys){
|
|
292
|
+
const result = this.prepareCache(semanticKey, toCacheKey(semanticKey), workingSet[semanticKey]);
|
|
293
|
+
if (result instanceof Promise) {
|
|
294
|
+
const prom = taskFinished !== undefined ? result.then(taskFinished) : result;
|
|
295
|
+
prom.catch((_reason)=>{
|
|
296
|
+
// delete the failed entry from the cache
|
|
297
|
+
// also remove the entire request it belongs to
|
|
298
|
+
this.entries.delete(toCacheKey(semanticKey));
|
|
299
|
+
this.pendingRequests.delete(req);
|
|
300
|
+
// note that catches get chained - so any catch handlers that came in with this promise
|
|
301
|
+
// still get called
|
|
302
|
+
});
|
|
303
|
+
} else if ($9fd71f854f4df31b$var$updatePendingRequest(req, semanticKey, toCacheKey(semanticKey), result)) {
|
|
304
|
+
use(req.ready);
|
|
305
|
+
if (taskFinished !== undefined) Promise.resolve().then(taskFinished); // we did the task synchronously...
|
|
306
|
+
// early return in the case that everything was cached!
|
|
307
|
+
// the only thing this short-circuits is pendingRequests.add(req)
|
|
308
|
+
// (because of course it isn't pending, because we just did it!)
|
|
309
|
+
return undefined;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
this.pendingRequests.add(req);
|
|
313
|
+
return ()=>{
|
|
314
|
+
this.pendingRequests.delete(req);
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
var $201a17180a2597e3$exports = {};
|
|
322
|
+
|
|
323
|
+
$parcel$export($201a17180a2597e3$exports, "swapBuffers", () => $201a17180a2597e3$export$e3caf6d91837252d);
|
|
324
|
+
function $201a17180a2597e3$export$e3caf6d91837252d(doubleBuffer) {
|
|
325
|
+
const { readFrom: readFrom, writeTo: writeTo } = doubleBuffer;
|
|
326
|
+
return {
|
|
327
|
+
readFrom: writeTo,
|
|
328
|
+
writeTo: readFrom
|
|
329
|
+
};
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
class $3901da79d5cf59d6$export$4fff234e4eb6b2b6 {
|
|
334
|
+
constructor(regl, imgRenderer, renderFn, resolution){
|
|
335
|
+
this.buffers = {
|
|
336
|
+
readFrom: {
|
|
337
|
+
resolution: resolution,
|
|
338
|
+
texture: regl.framebuffer(...resolution),
|
|
339
|
+
bounds: undefined
|
|
340
|
+
},
|
|
341
|
+
writeTo: {
|
|
342
|
+
resolution: resolution,
|
|
343
|
+
texture: regl.framebuffer(...resolution),
|
|
344
|
+
bounds: undefined
|
|
345
|
+
}
|
|
346
|
+
};
|
|
347
|
+
this.renderImg = imgRenderer;
|
|
348
|
+
this.regl = regl;
|
|
349
|
+
this.runningFrame = null;
|
|
350
|
+
this.renderFn = renderFn;
|
|
351
|
+
}
|
|
352
|
+
destroy() {
|
|
353
|
+
this.runningFrame?.cancelFrame('destroy this layer');
|
|
354
|
+
this.buffers.readFrom.texture.destroy();
|
|
355
|
+
this.buffers.writeTo.texture.destroy();
|
|
356
|
+
}
|
|
357
|
+
renderingInProgress() {
|
|
358
|
+
return this.runningFrame !== null;
|
|
359
|
+
}
|
|
360
|
+
getRenderResults(stage) {
|
|
361
|
+
return stage === 'cur' ? this.buffers.writeTo : this.buffers.readFrom;
|
|
362
|
+
}
|
|
363
|
+
onChange(props, cancel = true) {
|
|
364
|
+
if (cancel && this.runningFrame) {
|
|
365
|
+
this.runningFrame.cancelFrame();
|
|
366
|
+
this.runningFrame = null;
|
|
367
|
+
const { readFrom: readFrom, writeTo: writeTo } = this.buffers;
|
|
368
|
+
// copy our work to the prev-buffer...
|
|
369
|
+
if (readFrom.bounds && writeTo.bounds && (0, $tmQ84$Box2D).intersection(readFrom.bounds, writeTo.bounds)) {
|
|
370
|
+
const [width, height] = writeTo.resolution;
|
|
371
|
+
this.renderImg({
|
|
372
|
+
box: (0, $tmQ84$Box2D).toFlatArray(writeTo.bounds),
|
|
373
|
+
img: writeTo.texture,
|
|
374
|
+
target: readFrom.texture,
|
|
375
|
+
viewport: {
|
|
376
|
+
x: 0,
|
|
377
|
+
y: 0,
|
|
378
|
+
width: width,
|
|
379
|
+
height: height
|
|
380
|
+
},
|
|
381
|
+
view: (0, $tmQ84$Box2D).toFlatArray(readFrom.bounds)
|
|
382
|
+
});
|
|
383
|
+
}
|
|
384
|
+
this.regl.clear({
|
|
385
|
+
framebuffer: this.buffers.writeTo.texture,
|
|
386
|
+
color: [
|
|
387
|
+
0,
|
|
388
|
+
0,
|
|
389
|
+
0,
|
|
390
|
+
0
|
|
391
|
+
],
|
|
392
|
+
depth: 1
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
const { data: data, settings: settings } = props;
|
|
396
|
+
const { camera: camera, callback: callback } = settings;
|
|
397
|
+
this.buffers.writeTo.bounds = camera.view;
|
|
398
|
+
const wrapCallback = {
|
|
399
|
+
...settings,
|
|
400
|
+
callback: (ev)=>{
|
|
401
|
+
const { status: status } = ev;
|
|
402
|
+
switch(status){
|
|
403
|
+
case 'finished':
|
|
404
|
+
case 'finished_synchronously':
|
|
405
|
+
this.buffers = (0, $201a17180a2597e3$export$e3caf6d91837252d)(this.buffers);
|
|
406
|
+
// only erase... if we would have cancelled...
|
|
407
|
+
if (cancel) this.regl.clear({
|
|
408
|
+
framebuffer: this.buffers.writeTo.texture,
|
|
409
|
+
color: [
|
|
410
|
+
0,
|
|
411
|
+
0,
|
|
412
|
+
0,
|
|
413
|
+
0
|
|
414
|
+
],
|
|
415
|
+
depth: 1
|
|
416
|
+
});
|
|
417
|
+
this.runningFrame = null;
|
|
418
|
+
break;
|
|
419
|
+
}
|
|
420
|
+
callback?.(ev);
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
this.runningFrame = this.renderFn(this.buffers.writeTo.texture, data, wrapCallback);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
var $57582d9b80d114ba$exports = {};
|
|
430
|
+
|
|
431
|
+
$parcel$export($57582d9b80d114ba$exports, "createHttpsResource", () => $57582d9b80d114ba$export$81e4895a4d9a9b60);
|
|
432
|
+
$parcel$export($57582d9b80d114ba$exports, "createS3Resource", () => $57582d9b80d114ba$export$497a706b08a344d6);
|
|
433
|
+
$parcel$export($57582d9b80d114ba$exports, "isS3Resource", () => $57582d9b80d114ba$export$ba8acc09153a66ca);
|
|
434
|
+
$parcel$export($57582d9b80d114ba$exports, "isHttpsResource", () => $57582d9b80d114ba$export$5eb78fd524c7545f);
|
|
435
|
+
$parcel$export($57582d9b80d114ba$exports, "getResourceUrl", () => $57582d9b80d114ba$export$d2c227b690e4eb28);
|
|
436
|
+
var $3d0d2b554afda09c$exports = {};
|
|
437
|
+
|
|
438
|
+
$parcel$export($3d0d2b554afda09c$exports, "VisError", () => $3d0d2b554afda09c$export$f6bd031016c938ca);
|
|
439
|
+
class $3d0d2b554afda09c$export$f6bd031016c938ca extends Error {
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
const $57582d9b80d114ba$var$RESOURCE_TYPE_S3 = 's3';
|
|
444
|
+
const $57582d9b80d114ba$var$RESOURCE_TYPE_HTTPS = 'https';
|
|
445
|
+
function $57582d9b80d114ba$export$81e4895a4d9a9b60(url) {
|
|
446
|
+
return {
|
|
447
|
+
type: $57582d9b80d114ba$var$RESOURCE_TYPE_HTTPS,
|
|
448
|
+
url: url
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
function $57582d9b80d114ba$export$497a706b08a344d6(url, region) {
|
|
452
|
+
return {
|
|
453
|
+
type: $57582d9b80d114ba$var$RESOURCE_TYPE_S3,
|
|
454
|
+
region: region,
|
|
455
|
+
url: url
|
|
456
|
+
};
|
|
457
|
+
}
|
|
458
|
+
function $57582d9b80d114ba$export$ba8acc09153a66ca(res) {
|
|
459
|
+
return res && res.type === $57582d9b80d114ba$var$RESOURCE_TYPE_S3;
|
|
460
|
+
}
|
|
461
|
+
function $57582d9b80d114ba$export$5eb78fd524c7545f(res) {
|
|
462
|
+
return res && res.type === $57582d9b80d114ba$var$RESOURCE_TYPE_HTTPS;
|
|
463
|
+
}
|
|
464
|
+
const $57582d9b80d114ba$var$S3_REGION_BASIC_REGEX = /[a-z]+-[a-z]+-[1-9]/;
|
|
465
|
+
function $57582d9b80d114ba$var$httpsFromS3Bucket(url, region) {
|
|
466
|
+
// maybe region is controlled via the url or another arg in the future, so lets make it a variable
|
|
467
|
+
const endOfBucket = url.indexOf('/', 5);
|
|
468
|
+
const bucket = url.slice(5, endOfBucket);
|
|
469
|
+
const path = url.slice(endOfBucket + 1);
|
|
470
|
+
return `https://${bucket}.s3.${region}.amazonaws.com/${path}`;
|
|
471
|
+
}
|
|
472
|
+
function $57582d9b80d114ba$var$isValidS3URL(url) {
|
|
473
|
+
return !!url && url.length >= 6 && url.slice(0, 5) === 's3://';
|
|
474
|
+
}
|
|
475
|
+
function $57582d9b80d114ba$var$isValidS3Region(region) {
|
|
476
|
+
return !!region && $57582d9b80d114ba$var$S3_REGION_BASIC_REGEX.test(region);
|
|
477
|
+
}
|
|
478
|
+
function $57582d9b80d114ba$export$d2c227b690e4eb28(res) {
|
|
479
|
+
if (res.type === 's3') {
|
|
480
|
+
if (!$57582d9b80d114ba$var$isValidS3URL(res.url)) throw new (0, $3d0d2b554afda09c$export$f6bd031016c938ca)('cannot get WebResource URL: invalid S3 URL');
|
|
481
|
+
if (!$57582d9b80d114ba$var$isValidS3Region(res.region)) throw new (0, $3d0d2b554afda09c$export$f6bd031016c938ca)('cannot get WebResource URL: invalid S3 Region');
|
|
482
|
+
return $57582d9b80d114ba$var$httpsFromS3Bucket(res.url, res.region);
|
|
483
|
+
}
|
|
484
|
+
return res.url;
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
var $d9c300243b26048c$exports = {};
|
|
490
|
+
|
|
491
|
+
$parcel$export($d9c300243b26048c$exports, "makeRGBColorVector", () => $d9c300243b26048c$export$c34911d131211e50);
|
|
492
|
+
$parcel$export($d9c300243b26048c$exports, "makeRGBAColorVector", () => $d9c300243b26048c$export$417d2e62b20fc31a);
|
|
493
|
+
class $45f5cb22d200196a$export$efa9a398d6368992 {
|
|
494
|
+
constructor(name, level = 'warn'){
|
|
495
|
+
this.name = name;
|
|
496
|
+
this.level = level;
|
|
497
|
+
}
|
|
498
|
+
setLevel(level) {
|
|
499
|
+
this.level = level;
|
|
500
|
+
}
|
|
501
|
+
shouldLog(level) {
|
|
502
|
+
const levels = [
|
|
503
|
+
'debug',
|
|
504
|
+
'info',
|
|
505
|
+
'warn',
|
|
506
|
+
'error',
|
|
507
|
+
'none'
|
|
508
|
+
];
|
|
509
|
+
return levels.indexOf(level) >= levels.indexOf(this.level);
|
|
510
|
+
}
|
|
511
|
+
formatMessage(level, message) {
|
|
512
|
+
const timestamp = new Date().toISOString();
|
|
513
|
+
return `[${timestamp}] [${this.name}] [${level.toUpperCase()}] ${message}`;
|
|
514
|
+
}
|
|
515
|
+
debug(message, ...optionalParams) {
|
|
516
|
+
if (this.shouldLog('debug')) // biome-ignore lint/suspicious/noConsole: This is a logger
|
|
517
|
+
console.debug(this.formatMessage('debug', message), ...optionalParams);
|
|
518
|
+
}
|
|
519
|
+
dir(obj, ...optionalParams) {
|
|
520
|
+
if (this.shouldLog('debug')) {
|
|
521
|
+
// biome-ignore lint/suspicious/noConsole: This is a logger
|
|
522
|
+
console.log(this.formatMessage('debug', 'See object below'), ...optionalParams);
|
|
523
|
+
// biome-ignore lint/suspicious/noConsole: This is a logger
|
|
524
|
+
console.dir(obj);
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
info(message, ...optionalParams) {
|
|
528
|
+
if (this.shouldLog('info')) // biome-ignore lint/suspicious/noConsole: This is a logger
|
|
529
|
+
console.info(this.formatMessage('info', message), ...optionalParams);
|
|
530
|
+
}
|
|
531
|
+
warn(message, ...optionalParams) {
|
|
532
|
+
if (this.shouldLog('warn')) // biome-ignore lint/suspicious/noConsole: This is a logger
|
|
533
|
+
console.warn(this.formatMessage('warn', message), ...optionalParams);
|
|
534
|
+
}
|
|
535
|
+
error(message, ...optionalParams) {
|
|
536
|
+
if (this.shouldLog('error')) // biome-ignore lint/suspicious/noConsole: This is a logger
|
|
537
|
+
console.error(this.formatMessage('error', message), ...optionalParams);
|
|
538
|
+
}
|
|
539
|
+
}
|
|
540
|
+
const $45f5cb22d200196a$export$af88d00dbe7f521 = new $45f5cb22d200196a$export$efa9a398d6368992('default');
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
// Tests for optional #, then 3 or 6 hex digits
|
|
544
|
+
const $d9c300243b26048c$var$RGB_COLOR_REGEX = /^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$/;
|
|
545
|
+
// Tests for optional #, then 4 or 8 hex digits
|
|
546
|
+
const $d9c300243b26048c$var$RGBA_COLOR_REGEX = /^#?([0-9a-fA-F]{4}|[0-9a-fA-F]{8})$/;
|
|
547
|
+
function $d9c300243b26048c$export$c34911d131211e50(colorHashStr, normalized = true) {
|
|
548
|
+
if (!colorHashStr || !$d9c300243b26048c$var$RGB_COLOR_REGEX.test(colorHashStr)) {
|
|
549
|
+
(0, $45f5cb22d200196a$export$af88d00dbe7f521).warn('invalid color hash string; returning black color vector (0, 0, 0)');
|
|
550
|
+
return [
|
|
551
|
+
0,
|
|
552
|
+
0,
|
|
553
|
+
0
|
|
554
|
+
];
|
|
555
|
+
}
|
|
556
|
+
const hasHash = colorHashStr.charAt(0) === '#';
|
|
557
|
+
const sanitizedColorHashStr = hasHash ? colorHashStr : `#${colorHashStr}`;
|
|
558
|
+
const redCode = sanitizedColorHashStr.length === 4 ? sanitizedColorHashStr.charAt(1) + sanitizedColorHashStr.charAt(1) : sanitizedColorHashStr.slice(1, 3);
|
|
559
|
+
const greenCode = sanitizedColorHashStr.length === 4 ? sanitizedColorHashStr.charAt(2) + sanitizedColorHashStr.charAt(2) : sanitizedColorHashStr.slice(3, 5);
|
|
560
|
+
const blueCode = sanitizedColorHashStr.length === 4 ? sanitizedColorHashStr.charAt(3) + sanitizedColorHashStr.charAt(3) : sanitizedColorHashStr.slice(5, 7);
|
|
561
|
+
const divisor = normalized ? 255 : 1;
|
|
562
|
+
return [
|
|
563
|
+
Number.parseInt(redCode, 16) / divisor,
|
|
564
|
+
Number.parseInt(greenCode, 16) / divisor,
|
|
565
|
+
Number.parseInt(blueCode, 16) / divisor
|
|
566
|
+
];
|
|
567
|
+
}
|
|
568
|
+
function $d9c300243b26048c$export$417d2e62b20fc31a(colorHashStr, normalized = true) {
|
|
569
|
+
if (!colorHashStr) {
|
|
570
|
+
(0, $45f5cb22d200196a$export$af88d00dbe7f521).warn('invalid color hash string; returning transparent black color vector (0, 0, 0, 0)');
|
|
571
|
+
return [
|
|
572
|
+
0,
|
|
573
|
+
0,
|
|
574
|
+
0,
|
|
575
|
+
0
|
|
576
|
+
];
|
|
577
|
+
}
|
|
578
|
+
if ($d9c300243b26048c$var$RGBA_COLOR_REGEX.test(colorHashStr)) {
|
|
579
|
+
const hashHash = colorHashStr.charAt(0) === '#';
|
|
580
|
+
const sanitizedColorHashStr = hashHash ? colorHashStr : `#${colorHashStr}`;
|
|
581
|
+
const redCode = sanitizedColorHashStr.length === 5 ? sanitizedColorHashStr.charAt(1) + sanitizedColorHashStr.charAt(1) : sanitizedColorHashStr.slice(1, 3);
|
|
582
|
+
const greenCode = sanitizedColorHashStr.length === 5 ? sanitizedColorHashStr.charAt(2) + sanitizedColorHashStr.charAt(2) : sanitizedColorHashStr.slice(3, 5);
|
|
583
|
+
const blueCode = sanitizedColorHashStr.length === 5 ? sanitizedColorHashStr.charAt(3) + sanitizedColorHashStr.charAt(3) : sanitizedColorHashStr.slice(5, 7);
|
|
584
|
+
const alphaCode = sanitizedColorHashStr.length === 5 ? sanitizedColorHashStr.charAt(4) + sanitizedColorHashStr.charAt(4) : sanitizedColorHashStr.slice(7, 9);
|
|
585
|
+
const divisor = normalized ? 255 : 1;
|
|
586
|
+
return [
|
|
587
|
+
Number.parseInt(redCode, 16) / divisor,
|
|
588
|
+
Number.parseInt(greenCode, 16) / divisor,
|
|
589
|
+
Number.parseInt(blueCode, 16) / divisor,
|
|
590
|
+
Number.parseInt(alphaCode, 16) / divisor
|
|
591
|
+
];
|
|
592
|
+
}
|
|
593
|
+
if ($d9c300243b26048c$var$RGB_COLOR_REGEX.test(colorHashStr)) {
|
|
594
|
+
const rgb = $d9c300243b26048c$export$c34911d131211e50(colorHashStr, normalized);
|
|
595
|
+
return [
|
|
596
|
+
...rgb,
|
|
597
|
+
normalized ? 1.0 : 255.0
|
|
598
|
+
];
|
|
599
|
+
}
|
|
600
|
+
(0, $45f5cb22d200196a$export$af88d00dbe7f521).warn('invalid color hash string; returning transparent black color vector (0, 0, 0, 0)');
|
|
601
|
+
return [
|
|
602
|
+
0,
|
|
603
|
+
0,
|
|
604
|
+
0,
|
|
605
|
+
0
|
|
606
|
+
];
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
|
|
610
|
+
|
|
611
|
+
function $b44d3a2c3b719253$export$67b5d71f9b7f0efa(config) {
|
|
612
|
+
const { maximumInflightAsyncTasks: maximumInflightAsyncTasks, queueTimeBudgetMS: queueTimeBudgetMS, queueProcessingIntervalMS: queueProcessingIntervalMS, cacheKeyForRequest: cacheKeyForRequest, settings: settings, items: items, mutableCache: mutableCache, lifecycleCallback: lifecycleCallback, renderItem: renderItem, requestsForItem: requestsForItem, isPrepared: isPrepared, dataset: dataset } = config;
|
|
613
|
+
const abort = new AbortController();
|
|
614
|
+
const queue = [
|
|
615
|
+
...items
|
|
616
|
+
];
|
|
617
|
+
const taskCancelCallbacks = [];
|
|
618
|
+
const renderItemWrapper = (itemToRender, maybe)=>{
|
|
619
|
+
if (isPrepared(maybe) && !abort.signal.aborted) renderItem(itemToRender, dataset, settings, maybe);
|
|
620
|
+
};
|
|
621
|
+
const reportStatus = (event, synchronous)=>{
|
|
622
|
+
if (event.status !== 'cancelled' && abort.signal.aborted) return;
|
|
623
|
+
// we want to report our status, however the flow of events can be confusing -
|
|
624
|
+
// our callers anticipate an asynchronous (long running) frame to be started,
|
|
625
|
+
// but there are scenarios in which the whole thing is completely synchronous
|
|
626
|
+
// callers who are scheduling things may be surprised that their frame finished
|
|
627
|
+
// before the code that handles it appears to start. thus, we make the entire lifecycle callback
|
|
628
|
+
// system async, to prevent surprises.
|
|
629
|
+
if (synchronous) lifecycleCallback(event);
|
|
630
|
+
else Promise.resolve().then(()=>lifecycleCallback(event));
|
|
631
|
+
};
|
|
632
|
+
const doWorkOnQueue = (intervalId, synchronous = false)=>{
|
|
633
|
+
// try our best to cleanup if something goes awry
|
|
634
|
+
const startWorkTime = performance.now();
|
|
635
|
+
const cleanupOnError = (err)=>{
|
|
636
|
+
// clear the queue and the staging area (inFlight)
|
|
637
|
+
for (const cancelMe of taskCancelCallbacks)cancelMe();
|
|
638
|
+
queue.splice(0, queue.length);
|
|
639
|
+
// stop fetching
|
|
640
|
+
abort.abort(err);
|
|
641
|
+
clearInterval(intervalId);
|
|
642
|
+
// pass the error somewhere better:
|
|
643
|
+
reportStatus({
|
|
644
|
+
status: 'error',
|
|
645
|
+
error: err
|
|
646
|
+
}, synchronous);
|
|
647
|
+
};
|
|
648
|
+
while(mutableCache.getNumPendingTasks() < Math.max(maximumInflightAsyncTasks, 1)){
|
|
649
|
+
// We know there are items in the queue because of the check above, so we assert the type exist
|
|
650
|
+
const itemToRender = queue.shift();
|
|
651
|
+
if (!itemToRender) break;
|
|
652
|
+
const toCacheKey = (rq)=>cacheKeyForRequest(itemToRender, rq, dataset, settings);
|
|
653
|
+
try {
|
|
654
|
+
const result = mutableCache.cacheAndUse(requestsForItem(itemToRender, dataset, settings, abort.signal), (0, $tmQ84$lodashpartial)(renderItemWrapper, itemToRender), toCacheKey, ()=>reportStatus({
|
|
655
|
+
status: 'progress',
|
|
656
|
+
dataset: dataset,
|
|
657
|
+
renderedItems: [
|
|
658
|
+
itemToRender
|
|
659
|
+
]
|
|
660
|
+
}, synchronous));
|
|
661
|
+
if (result !== undefined) // put this cancel callback in a list where we can invoke if something goes wrong
|
|
662
|
+
// note that it is harmless to cancel a task that was completed
|
|
663
|
+
taskCancelCallbacks.push(result);
|
|
664
|
+
} catch (err) {
|
|
665
|
+
cleanupOnError(err);
|
|
666
|
+
}
|
|
667
|
+
if (performance.now() - startWorkTime > queueTimeBudgetMS) break;
|
|
668
|
+
}
|
|
669
|
+
if (queue.length < 1) {
|
|
670
|
+
// we cant add anything to the in-flight staging area, the final task
|
|
671
|
+
// is already in flight
|
|
672
|
+
if (mutableCache.getNumPendingTasks() < 1) {
|
|
673
|
+
// we do want to wait for that last in-flight task to actually finish though:
|
|
674
|
+
clearInterval(intervalId);
|
|
675
|
+
reportStatus({
|
|
676
|
+
status: 'finished'
|
|
677
|
+
}, synchronous);
|
|
678
|
+
}
|
|
679
|
+
return;
|
|
680
|
+
}
|
|
681
|
+
};
|
|
682
|
+
reportStatus({
|
|
683
|
+
status: 'begin'
|
|
684
|
+
}, true);
|
|
685
|
+
const interval = setInterval(()=>doWorkOnQueue(interval), queueProcessingIntervalMS);
|
|
686
|
+
if (queue.length > 0) doWorkOnQueue(interval, false);
|
|
687
|
+
return {
|
|
688
|
+
cancelFrame: (reason)=>{
|
|
689
|
+
abort.abort(new DOMException(reason, 'AbortError'));
|
|
690
|
+
for (const cancelMe of taskCancelCallbacks)cancelMe();
|
|
691
|
+
clearInterval(interval);
|
|
692
|
+
reportStatus({
|
|
693
|
+
status: 'cancelled'
|
|
694
|
+
}, true);
|
|
695
|
+
}
|
|
696
|
+
};
|
|
697
|
+
}
|
|
698
|
+
const $b44d3a2c3b719253$var$defaultQueueOptions = {
|
|
699
|
+
queueProcessingIntervalMS: 33,
|
|
700
|
+
maximumInflightAsyncTasks: 5,
|
|
701
|
+
queueTimeBudgetMS: 16
|
|
702
|
+
};
|
|
703
|
+
function $b44d3a2c3b719253$export$c1ba6e228d66b8cd(renderer, queueOptions) {
|
|
704
|
+
return (data, settings, callback, target, cache)=>{
|
|
705
|
+
const { renderItem: renderItem, isPrepared: isPrepared, cacheKey: cacheKey, fetchItemContent: fetchItemContent, getVisibleItems: getVisibleItems } = renderer;
|
|
706
|
+
const config = {
|
|
707
|
+
...$b44d3a2c3b719253$var$defaultQueueOptions,
|
|
708
|
+
...queueOptions,
|
|
709
|
+
cacheKeyForRequest: cacheKey,
|
|
710
|
+
dataset: data,
|
|
711
|
+
isPrepared: isPrepared,
|
|
712
|
+
items: getVisibleItems(data, settings),
|
|
713
|
+
lifecycleCallback: callback,
|
|
714
|
+
mutableCache: cache,
|
|
715
|
+
renderItem: (0, $tmQ84$lodashpartial)(renderItem, target),
|
|
716
|
+
requestsForItem: fetchItemContent,
|
|
717
|
+
settings: settings
|
|
718
|
+
};
|
|
719
|
+
return $b44d3a2c3b719253$export$67b5d71f9b7f0efa(config);
|
|
720
|
+
};
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
function $813009929e026506$var$destroyer(item) {
|
|
729
|
+
switch(item.type){
|
|
730
|
+
case 'texture':
|
|
731
|
+
item.texture.destroy();
|
|
732
|
+
break;
|
|
733
|
+
case 'buffer':
|
|
734
|
+
item.buffer.destroy();
|
|
735
|
+
break;
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
// return the size, in bytes, of some cached entity!
|
|
739
|
+
function $813009929e026506$var$sizeOf(item) {
|
|
740
|
+
return Math.max(1, item.bytes ?? 0);
|
|
741
|
+
}
|
|
742
|
+
const $813009929e026506$var$oneMB = 1048576;
|
|
743
|
+
class $813009929e026506$export$b349b3e66cf46ed5 {
|
|
744
|
+
constructor(maxSize, extensions, cacheByteLimit = 2000 * $813009929e026506$var$oneMB){
|
|
745
|
+
this.canvas = new OffscreenCanvas(10, 10); // we always render to private buffers, so we dont need a real resolution here...
|
|
746
|
+
this.clients = new Map();
|
|
747
|
+
this.maxSize = maxSize;
|
|
748
|
+
this.refreshRequested = false;
|
|
749
|
+
const gl = this.canvas.getContext('webgl', {
|
|
750
|
+
alpha: true,
|
|
751
|
+
preserveDrawingBuffer: false,
|
|
752
|
+
antialias: true,
|
|
753
|
+
premultipliedAlpha: true
|
|
754
|
+
});
|
|
755
|
+
if (!gl) throw new Error('WebGL not supported!');
|
|
756
|
+
const regl = (0, $tmQ84$regl)({
|
|
757
|
+
gl: gl,
|
|
758
|
+
extensions: extensions
|
|
759
|
+
});
|
|
760
|
+
this.regl = regl;
|
|
761
|
+
this.cache = new (0, $9fd71f854f4df31b$export$6893cac74ecce081)($813009929e026506$var$destroyer, $813009929e026506$var$sizeOf, cacheByteLimit);
|
|
762
|
+
}
|
|
763
|
+
copyToClient(frameInfo, client) {
|
|
764
|
+
// note: compared transferImageFromBitmap(transferImageToBitmap()), drawImage(canvas) and a few other variations
|
|
765
|
+
// this method seems to have the most consistent performance across various browsers
|
|
766
|
+
const { resolution: resolution, copyBuffer: copyBuffer, image: image, updateRequested: updateRequested } = frameInfo;
|
|
767
|
+
const [width, height] = resolution;
|
|
768
|
+
if (updateRequested) try {
|
|
769
|
+
// read directly from the framebuffer to which we render:
|
|
770
|
+
this.regl?.read({
|
|
771
|
+
framebuffer: image,
|
|
772
|
+
x: 0,
|
|
773
|
+
y: 0,
|
|
774
|
+
width: width,
|
|
775
|
+
height: height,
|
|
776
|
+
data: new Uint8Array(copyBuffer)
|
|
777
|
+
});
|
|
778
|
+
// then put those bytes in the client canvas:
|
|
779
|
+
const ctx = client.getContext('2d');
|
|
780
|
+
if (!ctx) {
|
|
781
|
+
(0, $45f5cb22d200196a$export$af88d00dbe7f521).error('Could not get 2d context');
|
|
782
|
+
throw new Error('Could not get 2d context');
|
|
783
|
+
}
|
|
784
|
+
const img = new ImageData(new Uint8ClampedArray(copyBuffer), width, height);
|
|
785
|
+
updateRequested(ctx, img);
|
|
786
|
+
} catch {
|
|
787
|
+
(0, $45f5cb22d200196a$export$af88d00dbe7f521).error('error - we tried to copy to a client buffer, but maybe it got unmounted? that can happen, its ok');
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
onAnimationFrame() {
|
|
791
|
+
if (this.refreshRequested) {
|
|
792
|
+
for (const [client, entry] of this.clients)if (entry.updateRequested) {
|
|
793
|
+
this.copyToClient(entry, client);
|
|
794
|
+
// mark our progress:
|
|
795
|
+
entry.updateRequested = null;
|
|
796
|
+
}
|
|
797
|
+
this.refreshRequested = false;
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
requestComposition(client, composite) {
|
|
801
|
+
const c = this.clients.get(client);
|
|
802
|
+
if (c) {
|
|
803
|
+
if (!c.updateRequested) {
|
|
804
|
+
c.updateRequested = composite;
|
|
805
|
+
if (!this.refreshRequested) {
|
|
806
|
+
this.refreshRequested = true;
|
|
807
|
+
// as of 2023, requestAnimationFrame should be generally available globally in both workers* and a window
|
|
808
|
+
// if this becomes an issue, we can have our caller pass requestAnimationFrame in to the constructor
|
|
809
|
+
requestAnimationFrame(()=>this.onAnimationFrame());
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
clientFrameFinished(client) {
|
|
815
|
+
const C = this.clients.get(client);
|
|
816
|
+
if (C) C.frame = null;
|
|
817
|
+
}
|
|
818
|
+
destroyClient(client) {
|
|
819
|
+
const C = this.clients.get(client);
|
|
820
|
+
if (C) C.frame?.cancelFrame();
|
|
821
|
+
this.clients.delete(client);
|
|
822
|
+
}
|
|
823
|
+
prepareToRenderToClient(client) {
|
|
824
|
+
const previousEntry = this.clients.get(client);
|
|
825
|
+
if (previousEntry) {
|
|
826
|
+
previousEntry.updateRequested = null;
|
|
827
|
+
// the client is mutable - so every time we get a request, we have to check to see if it got resized
|
|
828
|
+
if (client.width !== previousEntry.resolution[0] || client.height !== previousEntry.resolution[1]) // handle resizing by deleting previously allocated resources:
|
|
829
|
+
previousEntry.image.destroy();
|
|
830
|
+
else // use the existing resources!
|
|
831
|
+
return previousEntry;
|
|
832
|
+
}
|
|
833
|
+
const resolution = (0, $tmQ84$Vec2).min(this.maxSize, [
|
|
834
|
+
client.width,
|
|
835
|
+
client.height
|
|
836
|
+
]);
|
|
837
|
+
const copyBuffer = new ArrayBuffer(resolution[0] * resolution[1] * 4);
|
|
838
|
+
const image = this.regl.framebuffer(...resolution);
|
|
839
|
+
return {
|
|
840
|
+
resolution: resolution,
|
|
841
|
+
copyBuffer: copyBuffer,
|
|
842
|
+
image: image
|
|
843
|
+
};
|
|
844
|
+
}
|
|
845
|
+
beginRendering(renderFn, callback, client) {
|
|
846
|
+
if (this.regl) {
|
|
847
|
+
const clientFrame = this.clients.get(client);
|
|
848
|
+
if (clientFrame?.frame) {
|
|
849
|
+
clientFrame.frame.cancelFrame();
|
|
850
|
+
this.regl.clear({
|
|
851
|
+
framebuffer: clientFrame.image,
|
|
852
|
+
color: [
|
|
853
|
+
0,
|
|
854
|
+
0,
|
|
855
|
+
0,
|
|
856
|
+
0
|
|
857
|
+
],
|
|
858
|
+
depth: 1
|
|
859
|
+
});
|
|
860
|
+
clientFrame.updateRequested = null;
|
|
861
|
+
}
|
|
862
|
+
const { image: image, resolution: resolution, copyBuffer: copyBuffer } = this.prepareToRenderToClient(client);
|
|
863
|
+
const hijack = (e)=>{
|
|
864
|
+
callback({
|
|
865
|
+
...e,
|
|
866
|
+
target: image,
|
|
867
|
+
server: {
|
|
868
|
+
copyToClient: (compose)=>{
|
|
869
|
+
this.requestComposition(client, compose);
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
});
|
|
873
|
+
if (e.status === 'finished' || e.status === 'cancelled') this.clientFrameFinished(client);
|
|
874
|
+
};
|
|
875
|
+
this.clients.set(client, {
|
|
876
|
+
frame: null,
|
|
877
|
+
image: image,
|
|
878
|
+
copyBuffer: copyBuffer,
|
|
879
|
+
resolution: resolution,
|
|
880
|
+
updateRequested: null
|
|
881
|
+
});
|
|
882
|
+
// this is worded rather awkwardly, because sometimes the frameLifecycle object returned by renderFn() represents
|
|
883
|
+
// a frame that is already finished!
|
|
884
|
+
// this is a good thing for performance, but potentially confusing - so we do our book-keeping before we actually start rendering:
|
|
885
|
+
const aboutToStart = this.clients.get(client); // this is the record we just put into the clients map - TS just wants to be sure it really exists:
|
|
886
|
+
if (aboutToStart) {
|
|
887
|
+
const frame = renderFn(image, this.cache, hijack);
|
|
888
|
+
if (frame) aboutToStart.frame = {
|
|
889
|
+
cancelFrame: (reason)=>{
|
|
890
|
+
frame.cancelFrame(reason);
|
|
891
|
+
aboutToStart.updateRequested = null;
|
|
892
|
+
}
|
|
893
|
+
};
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
|
|
901
|
+
// TS note: T extends {} is saying "T may not be null"
|
|
902
|
+
class $21d10294d81a9556$export$457b0b5893773add {
|
|
903
|
+
constructor(size, scoreSystem){
|
|
904
|
+
this.score = scoreSystem;
|
|
905
|
+
this.entries = new Array(size);
|
|
906
|
+
this.curSize = 0;
|
|
907
|
+
}
|
|
908
|
+
parent(i) {
|
|
909
|
+
return Math.floor((i - 1) / 2);
|
|
910
|
+
}
|
|
911
|
+
left(i) {
|
|
912
|
+
return Math.floor(2 * i + 1);
|
|
913
|
+
}
|
|
914
|
+
right(i) {
|
|
915
|
+
return Math.floor(2 * i + 2);
|
|
916
|
+
}
|
|
917
|
+
swap(i, j) {
|
|
918
|
+
const tmp = this.entries[i];
|
|
919
|
+
this.entries[i] = this.entries[j];
|
|
920
|
+
this.entries[j] = tmp;
|
|
921
|
+
}
|
|
922
|
+
addItem(t, score) {
|
|
923
|
+
this.entries[this.curSize] = t;
|
|
924
|
+
this.curSize += 1;
|
|
925
|
+
const myIndex = this.curSize - 1;
|
|
926
|
+
let i = myIndex;
|
|
927
|
+
const myScore = score ?? this.score(t);
|
|
928
|
+
while(i !== 0 && this.score(this.entries[this.parent(i)]) > (i === myIndex ? myScore : this.score(this.entries[i]))){
|
|
929
|
+
this.swap(i, this.parent(i));
|
|
930
|
+
i = this.parent(i);
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
heapify(i) {
|
|
934
|
+
const l = this.left(i);
|
|
935
|
+
const r = this.right(i);
|
|
936
|
+
let smallest = i;
|
|
937
|
+
if (l < this.curSize && this.score(this.entries[l]) < this.score(this.entries[i])) smallest = l;
|
|
938
|
+
if (r < this.curSize && this.score(this.entries[r]) < this.score(this.entries[smallest])) smallest = r;
|
|
939
|
+
if (smallest !== i) {
|
|
940
|
+
this.swap(i, smallest);
|
|
941
|
+
this.heapify(smallest);
|
|
942
|
+
}
|
|
943
|
+
}
|
|
944
|
+
rebuild(score) {
|
|
945
|
+
this.score = score ?? this.score;
|
|
946
|
+
for(let i = this.curSize - 1; i >= 0; i -= 1)this.heapify(i);
|
|
947
|
+
}
|
|
948
|
+
popItem(i) {
|
|
949
|
+
while(i !== 0){
|
|
950
|
+
this.swap(i, this.parent(i));
|
|
951
|
+
/** biome-ignore lint/style/noParameterAssign: a great example of why we should ditch this linter */ i = this.parent(i);
|
|
952
|
+
}
|
|
953
|
+
return this.popMinItem();
|
|
954
|
+
}
|
|
955
|
+
peekMinItem() {
|
|
956
|
+
if (this.curSize < 1) return null;
|
|
957
|
+
return this.entries[0];
|
|
958
|
+
}
|
|
959
|
+
popMinItem() {
|
|
960
|
+
if (this.curSize < 1) return null;
|
|
961
|
+
const item = this.entries[0];
|
|
962
|
+
this.curSize -= 1;
|
|
963
|
+
if (this.curSize > 0) {
|
|
964
|
+
this.entries[0] = this.entries[this.curSize];
|
|
965
|
+
this.heapify(0);
|
|
966
|
+
}
|
|
967
|
+
return item;
|
|
968
|
+
}
|
|
969
|
+
popMinItemWithScore() {
|
|
970
|
+
const t = this.popMinItem();
|
|
971
|
+
return t === null ? null : {
|
|
972
|
+
item: t,
|
|
973
|
+
score: this.score(t)
|
|
974
|
+
};
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
|
|
979
|
+
|
|
980
|
+
class $8256f9d6cfc7946b$export$a953a8a2f1eb4f9a {
|
|
981
|
+
constructor(size, scoreSystem, keyFn){
|
|
982
|
+
this.heap = new (0, $21d10294d81a9556$export$457b0b5893773add)(size, scoreSystem);
|
|
983
|
+
this.keyFn = keyFn;
|
|
984
|
+
this.items = new Map();
|
|
985
|
+
}
|
|
986
|
+
addItem(item) {
|
|
987
|
+
const key = this.keyFn(item);
|
|
988
|
+
if (!this.items.has(key)) {
|
|
989
|
+
this.heap.addItem(key);
|
|
990
|
+
this.items.set(key, item);
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
popMinItem() {
|
|
994
|
+
const min = this.heap.popMinItem();
|
|
995
|
+
if (min !== null) {
|
|
996
|
+
const item = this.items.get(min) ?? null;
|
|
997
|
+
this.items.delete(min);
|
|
998
|
+
return item;
|
|
999
|
+
}
|
|
1000
|
+
return null;
|
|
1001
|
+
}
|
|
1002
|
+
popMinItemWithScore() {
|
|
1003
|
+
const entry = this.heap.popMinItemWithScore();
|
|
1004
|
+
if (entry !== null) {
|
|
1005
|
+
const key = entry.item;
|
|
1006
|
+
const item = this.items.get(key);
|
|
1007
|
+
this.items.delete(key);
|
|
1008
|
+
return item ? {
|
|
1009
|
+
item: item,
|
|
1010
|
+
score: entry.score
|
|
1011
|
+
} : null;
|
|
1012
|
+
}
|
|
1013
|
+
return null;
|
|
1014
|
+
}
|
|
1015
|
+
has(item) {
|
|
1016
|
+
return this.items.has(this.keyFn(item));
|
|
1017
|
+
}
|
|
1018
|
+
rebuild(score) {
|
|
1019
|
+
this.heap.rebuild(score);
|
|
1020
|
+
}
|
|
1021
|
+
hasItemWithKey(k) {
|
|
1022
|
+
return this.items.has(k);
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
function $a5339d2187c48f0b$var$negate(fn) {
|
|
1028
|
+
return (k)=>-fn(k);
|
|
1029
|
+
}
|
|
1030
|
+
class $a5339d2187c48f0b$export$e473ef38ab8a510a {
|
|
1031
|
+
// items with lower scores will be evicted before items with high scores
|
|
1032
|
+
constructor(store, score, limitInBytes){
|
|
1033
|
+
this.store = store;
|
|
1034
|
+
this.evictPriority = new (0, $21d10294d81a9556$export$457b0b5893773add)(5000, score);
|
|
1035
|
+
this.limit = limitInBytes;
|
|
1036
|
+
this.used = 0;
|
|
1037
|
+
}
|
|
1038
|
+
// add {key:item} to the cache - return false (and fail) if the key is already present
|
|
1039
|
+
// may evict items to make room
|
|
1040
|
+
// return true on success
|
|
1041
|
+
put(key, item) {
|
|
1042
|
+
if (this.store.has(key)) return false;
|
|
1043
|
+
const size = this.sanitizedSize(item);
|
|
1044
|
+
if (this.used + size > this.limit) this.evictUntil(Math.max(0, this.limit - size));
|
|
1045
|
+
this.evictPriority.addItem(key);
|
|
1046
|
+
this.store.set(key, item);
|
|
1047
|
+
this.used += size;
|
|
1048
|
+
return true;
|
|
1049
|
+
}
|
|
1050
|
+
sanitizedSize(item) {
|
|
1051
|
+
const givenSize = item.sizeInBytes?.() ?? 0;
|
|
1052
|
+
const size = Number.isFinite(givenSize) ? Math.max(0, givenSize) : 0;
|
|
1053
|
+
return size;
|
|
1054
|
+
}
|
|
1055
|
+
// it is expected that the score function is not "pure" -
|
|
1056
|
+
// it has a closure over data that changes over time, representing changing priorities
|
|
1057
|
+
// thus - the owner of this cache has a responsibility to notify the cache when significant
|
|
1058
|
+
// changes in priority occur!
|
|
1059
|
+
reprioritize(score) {
|
|
1060
|
+
this.evictPriority.rebuild(score);
|
|
1061
|
+
}
|
|
1062
|
+
get(key) {
|
|
1063
|
+
return this.store.get(key);
|
|
1064
|
+
}
|
|
1065
|
+
has(key) {
|
|
1066
|
+
return this.store.has(key);
|
|
1067
|
+
}
|
|
1068
|
+
cached(key) {
|
|
1069
|
+
return this.store.has(key);
|
|
1070
|
+
}
|
|
1071
|
+
isFull() {
|
|
1072
|
+
return this.used >= this.limit;
|
|
1073
|
+
}
|
|
1074
|
+
evictLowestPriority() {
|
|
1075
|
+
const evictMe = this.evictPriority.popMinItem();
|
|
1076
|
+
if (evictMe === null) return false;
|
|
1077
|
+
const data = this.store.get(evictMe);
|
|
1078
|
+
if (data) {
|
|
1079
|
+
data.destroy?.();
|
|
1080
|
+
this.store.delete(evictMe);
|
|
1081
|
+
const size = this.sanitizedSize(data);
|
|
1082
|
+
this.used -= size;
|
|
1083
|
+
}
|
|
1084
|
+
return true;
|
|
1085
|
+
}
|
|
1086
|
+
evictUntil(targetUsedBytes) {
|
|
1087
|
+
while(this.used > targetUsedBytes){
|
|
1088
|
+
if (!this.evictLowestPriority()) // note: evictLowestPriority mutates this.used
|
|
1089
|
+
return; // all items evicted...
|
|
1090
|
+
}
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
class $a5339d2187c48f0b$export$1506b2eb9d10c110 extends $a5339d2187c48f0b$export$e473ef38ab8a510a {
|
|
1094
|
+
// items with lower scores will be evicted before items with high scores
|
|
1095
|
+
constructor(store, score, limitInBytes, maxFetches, onDataArrived){
|
|
1096
|
+
super(store, score, limitInBytes);
|
|
1097
|
+
this.fetchPriority = new (0, $8256f9d6cfc7946b$export$a953a8a2f1eb4f9a)(5000, $a5339d2187c48f0b$var$negate(score), (pr)=>pr.key);
|
|
1098
|
+
this.pendingFetches = new Map();
|
|
1099
|
+
this.MAX_INFLIGHT_FETCHES = maxFetches;
|
|
1100
|
+
this.notify = onDataArrived;
|
|
1101
|
+
}
|
|
1102
|
+
enqueue(key, fetcher) {
|
|
1103
|
+
// enqueue the item, if we dont already have it, or are not already asking
|
|
1104
|
+
if (!this.has(key) && !this.pendingFetches.has(key) && !this.fetchPriority.hasItemWithKey(key)) {
|
|
1105
|
+
this.fetchPriority.addItem({
|
|
1106
|
+
key: key,
|
|
1107
|
+
fetch: fetcher
|
|
1108
|
+
});
|
|
1109
|
+
this.fetchToLimit();
|
|
1110
|
+
return true;
|
|
1111
|
+
}
|
|
1112
|
+
return false;
|
|
1113
|
+
}
|
|
1114
|
+
beginFetch({ key: key, fetch: fetch }) {
|
|
1115
|
+
const abort = new AbortController();
|
|
1116
|
+
this.pendingFetches.set(key, abort);
|
|
1117
|
+
return fetch(abort.signal).then((resource)=>{
|
|
1118
|
+
this.put(key, resource);
|
|
1119
|
+
this.notify?.(key, {
|
|
1120
|
+
status: 'success'
|
|
1121
|
+
});
|
|
1122
|
+
}).catch((reason)=>{
|
|
1123
|
+
this.notify?.(key, {
|
|
1124
|
+
status: 'failure',
|
|
1125
|
+
reason: reason
|
|
1126
|
+
});
|
|
1127
|
+
}).finally(()=>{
|
|
1128
|
+
this.pendingFetches.delete(key);
|
|
1129
|
+
this.fetchToLimit();
|
|
1130
|
+
});
|
|
1131
|
+
}
|
|
1132
|
+
fetchToLimit() {
|
|
1133
|
+
let toFetch = Math.max(0, this.MAX_INFLIGHT_FETCHES - this.pendingFetches.size);
|
|
1134
|
+
for(let i = 0; i < toFetch; i++){
|
|
1135
|
+
const fetchMe = this.fetchPriority.popMinItemWithScore();
|
|
1136
|
+
if (fetchMe !== null) {
|
|
1137
|
+
if (fetchMe.score !== 0) this.beginFetch(fetchMe.item);
|
|
1138
|
+
else toFetch += 1; // increasing the loop limit inside the loop... a bit sketchy
|
|
1139
|
+
} else break;
|
|
1140
|
+
}
|
|
1141
|
+
}
|
|
1142
|
+
// it is expected that the score function is not "pure" -
|
|
1143
|
+
// it has a closure over data that changes over time, representing changing priorities
|
|
1144
|
+
// thus - the owner of this cache has a responsibility to notify the cache when significant
|
|
1145
|
+
// changes in priority occur!
|
|
1146
|
+
reprioritize(score) {
|
|
1147
|
+
super.reprioritize(score);
|
|
1148
|
+
this.fetchPriority.rebuild($a5339d2187c48f0b$var$negate(score));
|
|
1149
|
+
for (const [key, abort] of this.pendingFetches)if (score(key) === 0) {
|
|
1150
|
+
abort.abort();
|
|
1151
|
+
this.pendingFetches.delete(key);
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
cachedOrPending(key) {
|
|
1155
|
+
return this.cached(key) || this.fetchPriority.hasItemWithKey(key) || this.pendingFetches.has(key);
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
|
|
1160
|
+
|
|
1161
|
+
function $7005d0cc41e13b37$export$4fc7dd386ddfb8ee(spec, items, priority) {
|
|
1162
|
+
const pri = {};
|
|
1163
|
+
for (const item of items){
|
|
1164
|
+
const keys = spec.cacheKeys(item);
|
|
1165
|
+
for (const cacheKey of Object.values(keys))pri[cacheKey] = (pri[cacheKey] ?? 0) + priority;
|
|
1166
|
+
}
|
|
1167
|
+
return pri;
|
|
1168
|
+
}
|
|
1169
|
+
function $7005d0cc41e13b37$export$9f8c9675958f3ef5(A, B) {
|
|
1170
|
+
const sum = {};
|
|
1171
|
+
for(const a in A)sum[a] = A[a] + (B[a] ?? 0);
|
|
1172
|
+
for(const b in B)if (!(b in A)) sum[b] = B[b];
|
|
1173
|
+
// else we've already dealt with it
|
|
1174
|
+
return sum;
|
|
1175
|
+
}
|
|
1176
|
+
function $7005d0cc41e13b37$export$69e02f681d6313ec(old, current, update) {
|
|
1177
|
+
for(const o in old){
|
|
1178
|
+
const curPri = current[o] ?? 0;
|
|
1179
|
+
const prevPri = old[o];
|
|
1180
|
+
update(o, curPri - prevPri);
|
|
1181
|
+
}
|
|
1182
|
+
for(const c in current)if (!(c in old)) update(c, current[c]);
|
|
1183
|
+
// else we've already dealt with the diff
|
|
1184
|
+
}
|
|
1185
|
+
|
|
1186
|
+
|
|
1187
|
+
|
|
1188
|
+
function $e8f9d79d878cff91$var$entries(t) {
|
|
1189
|
+
return Object.entries(t);
|
|
1190
|
+
}
|
|
1191
|
+
function $e8f9d79d878cff91$var$mapFields(r, fn) {
|
|
1192
|
+
return $e8f9d79d878cff91$var$entries(r).reduce((acc, [k, v])=>({
|
|
1193
|
+
...acc,
|
|
1194
|
+
[k]: fn(v)
|
|
1195
|
+
}), {});
|
|
1196
|
+
}
|
|
1197
|
+
class $e8f9d79d878cff91$export$14c2e5c3395e07aa {
|
|
1198
|
+
constructor(store, limitInBytes, max_concurrent_fetches = 10){
|
|
1199
|
+
this.importance = {};
|
|
1200
|
+
this.clients = {};
|
|
1201
|
+
this.cache = new (0, $a5339d2187c48f0b$export$1506b2eb9d10c110)(store, (ck)=>this.importance[ck] ?? 0, limitInBytes, max_concurrent_fetches, (ck, result)=>this.onCacheEntryArrived(ck, result));
|
|
1202
|
+
}
|
|
1203
|
+
registerClient(spec) {
|
|
1204
|
+
const id = (0, $tmQ84$lodashuniqueId)('client');
|
|
1205
|
+
this.clients[id] = {
|
|
1206
|
+
priorities: {},
|
|
1207
|
+
notify: spec.onDataArrived
|
|
1208
|
+
};
|
|
1209
|
+
const enqueuePriorities = (spec, items)=>{
|
|
1210
|
+
for (const item of items){
|
|
1211
|
+
const keys = spec.cacheKeys(item);
|
|
1212
|
+
Object.entries(spec.fetch(item)).forEach(([sk, fetcher])=>{
|
|
1213
|
+
const ck = keys[sk];
|
|
1214
|
+
if (ck !== undefined) this.cache.enqueue(ck, fetcher);
|
|
1215
|
+
});
|
|
1216
|
+
}
|
|
1217
|
+
};
|
|
1218
|
+
const setPriorities = (low, high)=>{
|
|
1219
|
+
const client = this.clients[id];
|
|
1220
|
+
if (!client) return; // the client can hold onto a reference to this interface, even after they call unregister - this prevents a crash in that scenario
|
|
1221
|
+
const updated = (0, $7005d0cc41e13b37$export$9f8c9675958f3ef5)((0, $7005d0cc41e13b37$export$4fc7dd386ddfb8ee)(spec, low, 1), (0, $7005d0cc41e13b37$export$4fc7dd386ddfb8ee)(spec, high, 2));
|
|
1222
|
+
let changed = 0;
|
|
1223
|
+
(0, $7005d0cc41e13b37$export$69e02f681d6313ec)(client.priorities, updated, (cacheKey, delta)=>{
|
|
1224
|
+
changed += delta !== 0 ? 1 : 0;
|
|
1225
|
+
this.updateImportance(cacheKey, delta);
|
|
1226
|
+
});
|
|
1227
|
+
if (changed === 0) // nothing changed at all - no need to reprioritize, nor enqueue requests
|
|
1228
|
+
return;
|
|
1229
|
+
this.cache.reprioritize((ck)=>this.importance[ck] ?? 0);
|
|
1230
|
+
client.priorities = updated;
|
|
1231
|
+
// note: many keys may already be cached, or requested - its harmless to re-request them.
|
|
1232
|
+
// there is obviously some overhead, but in testing it seems fine
|
|
1233
|
+
enqueuePriorities(spec, high);
|
|
1234
|
+
enqueuePriorities(spec, low);
|
|
1235
|
+
};
|
|
1236
|
+
return {
|
|
1237
|
+
get: (k)=>{
|
|
1238
|
+
const keys = spec.cacheKeys(k);
|
|
1239
|
+
const v = $e8f9d79d878cff91$var$mapFields(keys, (k)=>this.cache.get(k));
|
|
1240
|
+
return spec.isValue(v) ? v : undefined;
|
|
1241
|
+
},
|
|
1242
|
+
has: (k)=>{
|
|
1243
|
+
const atLeastOneMissing = Object.values(spec.cacheKeys(k)).some((ck)=>!this.cache.has(ck));
|
|
1244
|
+
return !atLeastOneMissing;
|
|
1245
|
+
},
|
|
1246
|
+
unsubscribeFromCache: ()=>{
|
|
1247
|
+
setPriorities([], []); // mark that this client has no priorities, which will decrement the counts for all
|
|
1248
|
+
// priorities it used to have
|
|
1249
|
+
delete this.clients[id];
|
|
1250
|
+
},
|
|
1251
|
+
setPriorities: setPriorities
|
|
1252
|
+
};
|
|
1253
|
+
}
|
|
1254
|
+
onCacheEntryArrived(key, result) {
|
|
1255
|
+
// find any clients that want this...
|
|
1256
|
+
// and notify them
|
|
1257
|
+
for (const cid of Object.keys(this.clients)){
|
|
1258
|
+
const client = this.clients[cid];
|
|
1259
|
+
if ((client.priorities[key] ?? 0) > 0) client.notify?.(key, result);
|
|
1260
|
+
}
|
|
1261
|
+
}
|
|
1262
|
+
updateImportance(key, delta) {
|
|
1263
|
+
this.importance[key] = Math.max(0, (this.importance[key] ?? 0) + delta);
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
|
|
1267
|
+
|
|
1268
|
+
|
|
1269
|
+
|
|
1270
|
+
export {$e03391db1cf02961$export$8fbec945df051b82 as beginLongRunningFrame, $9fd71f854f4df31b$export$6893cac74ecce081 as AsyncDataCache, $3901da79d5cf59d6$export$4fff234e4eb6b2b6 as ReglLayer2D, $b44d3a2c3b719253$export$67b5d71f9b7f0efa as beginFrame, $b44d3a2c3b719253$export$c1ba6e228d66b8cd as buildAsyncRenderer, $813009929e026506$export$b349b3e66cf46ed5 as RenderServer, $45f5cb22d200196a$export$efa9a398d6368992 as Logger, $45f5cb22d200196a$export$af88d00dbe7f521 as logger, $a5339d2187c48f0b$export$e473ef38ab8a510a as PriorityCache, $a5339d2187c48f0b$export$1506b2eb9d10c110 as AsyncPriorityCache, $e8f9d79d878cff91$export$14c2e5c3395e07aa as SharedPriorityCache, $201a17180a2597e3$export$e3caf6d91837252d as swapBuffers, $57582d9b80d114ba$export$81e4895a4d9a9b60 as createHttpsResource, $57582d9b80d114ba$export$497a706b08a344d6 as createS3Resource, $57582d9b80d114ba$export$ba8acc09153a66ca as isS3Resource, $57582d9b80d114ba$export$5eb78fd524c7545f as isHttpsResource, $57582d9b80d114ba$export$d2c227b690e4eb28 as getResourceUrl, $3d0d2b554afda09c$export$f6bd031016c938ca as VisError, $d9c300243b26048c$export$c34911d131211e50 as makeRGBColorVector, $d9c300243b26048c$export$417d2e62b20fc31a as makeRGBAColorVector};
|
|
1271
|
+
//# sourceMappingURL=module.js.map
|