@wovin/core 0.0.14 → 0.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/applog/applog-helpers.d.ts +13 -13
- package/dist/applog/applog-helpers.d.ts.map +1 -1
- package/dist/applog/applog-utils.d.ts +5 -5
- package/dist/applog/applog-utils.d.ts.map +1 -1
- package/dist/applog/datom-types.d.ts +4 -3
- package/dist/applog/datom-types.d.ts.map +1 -1
- package/dist/applog.min.js +4 -4
- package/dist/{chunk-3HSO7QDN.min.js → chunk-FIOA3FZW.min.js} +5 -5
- package/dist/{chunk-6EBQRFQC.min.js → chunk-HYMC7W6S.min.js} +4 -6
- package/dist/{chunk-6EBQRFQC.min.js.map → chunk-HYMC7W6S.min.js.map} +1 -1
- package/dist/{chunk-R3MVKMV2.min.js → chunk-JEOQUHTK.min.js} +206 -1010
- package/dist/chunk-JEOQUHTK.min.js.map +1 -0
- package/dist/{chunk-DF3UOPRV.min.js → chunk-KEHU7HGZ.min.js} +388 -257
- package/dist/chunk-KEHU7HGZ.min.js.map +1 -0
- package/dist/{chunk-JEYJIYNF.min.js → chunk-NPCVLBCM.min.js} +6450 -6341
- package/dist/chunk-NPCVLBCM.min.js.map +1 -0
- package/dist/{chunk-DPU6MD7E.min.js → chunk-OBMVNVJR.min.js} +53 -37
- package/dist/chunk-OBMVNVJR.min.js.map +1 -0
- package/dist/{chunk-KRQZ6V4Y.min.js → chunk-PHITDXZT.min.js} +1 -1
- package/dist/{chunk-CDGK7IKH.min.js → chunk-TEQ4SIKN.min.js} +6 -7
- package/dist/{chunk-CDGK7IKH.min.js.map → chunk-TEQ4SIKN.min.js.map} +1 -1
- package/dist/index.min.js +16 -8
- package/dist/ipfs/car.d.ts +13 -13
- package/dist/ipfs/car.d.ts.map +1 -1
- package/dist/ipfs/ipfs-utils.d.ts +2 -2
- package/dist/ipfs/ipfs-utils.d.ts.map +1 -1
- package/dist/ipfs.min.js +6 -6
- package/dist/mobx/mobx-utils.d.ts +4 -3
- package/dist/mobx/mobx-utils.d.ts.map +1 -1
- package/dist/mobx.min.js +2 -2
- package/dist/pubsub/pub-pull.d.ts +1 -1
- package/dist/pubsub/pub-pull.d.ts.map +1 -1
- package/dist/pubsub/pub-push.d.ts +9 -9
- package/dist/pubsub/pubsub-types.d.ts.map +1 -1
- package/dist/pubsub.min.js +7 -7
- package/dist/query/basic.d.ts +9 -35
- package/dist/query/basic.d.ts.map +1 -1
- package/dist/query/matchers.d.ts.map +1 -1
- package/dist/query/situations.d.ts +2 -0
- package/dist/query/situations.d.ts.map +1 -0
- package/dist/query/types.d.ts +3 -2
- package/dist/query/types.d.ts.map +1 -1
- package/dist/query.min.js +12 -6
- package/dist/thread/basic.d.ts.map +1 -1
- package/dist/thread/filters.d.ts +2 -2
- package/dist/thread/filters.d.ts.map +1 -1
- package/dist/thread/utils.d.ts +1 -0
- package/dist/thread/utils.d.ts.map +1 -1
- package/dist/thread/writeable.d.ts +5 -0
- package/dist/thread/writeable.d.ts.map +1 -1
- package/dist/thread.min.js +6 -4
- package/dist/types/typescript-utils.d.ts +2 -2
- package/dist/types/typescript-utils.d.ts.map +1 -1
- package/dist/types.min.js +2 -2
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.min.js +1 -1
- package/package.json +13 -12
- package/dist/chunk-DF3UOPRV.min.js.map +0 -1
- package/dist/chunk-DPU6MD7E.min.js.map +0 -1
- package/dist/chunk-JEYJIYNF.min.js.map +0 -1
- package/dist/chunk-R3MVKMV2.min.js.map +0 -1
- /package/dist/{chunk-3HSO7QDN.min.js.map → chunk-FIOA3FZW.min.js.map} +0 -0
- /package/dist/{chunk-KRQZ6V4Y.min.js.map → chunk-PHITDXZT.min.js.map} +0 -0
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import {
|
|
2
|
-
E,
|
|
3
2
|
StaticThread,
|
|
4
3
|
ThreadInMemory,
|
|
5
4
|
applogThreadComparer,
|
|
6
5
|
computedFnDeepCompare,
|
|
7
6
|
computedStructuralComparer,
|
|
8
7
|
createDebugName,
|
|
8
|
+
g,
|
|
9
9
|
isInitEvent,
|
|
10
10
|
joinThreads,
|
|
11
11
|
observableArrayMap,
|
|
@@ -16,7 +16,7 @@ import {
|
|
|
16
16
|
rollingMapper,
|
|
17
17
|
sortApplogsByTs,
|
|
18
18
|
wrapper_default
|
|
19
|
-
} from "./chunk-
|
|
19
|
+
} from "./chunk-NPCVLBCM.min.js";
|
|
20
20
|
import {
|
|
21
21
|
action,
|
|
22
22
|
autorun,
|
|
@@ -27,7 +27,7 @@ import {
|
|
|
27
27
|
onBecomeObserved,
|
|
28
28
|
toJS,
|
|
29
29
|
untracked
|
|
30
|
-
} from "./chunk-
|
|
30
|
+
} from "./chunk-KEHU7HGZ.min.js";
|
|
31
31
|
|
|
32
32
|
// src/query/types.ts
|
|
33
33
|
var QueryNode = class {
|
|
@@ -44,8 +44,7 @@ var QueryNode = class {
|
|
|
44
44
|
return this.variables;
|
|
45
45
|
}
|
|
46
46
|
get threadOfTrail() {
|
|
47
|
-
if (!this.prevNode)
|
|
48
|
-
return this.logsOfThisNode;
|
|
47
|
+
if (!this.prevNode) return this.logsOfThisNode;
|
|
49
48
|
return joinThreads([
|
|
50
49
|
this.logsOfThisNode,
|
|
51
50
|
this.prevNode.threadOfTrail
|
|
@@ -83,8 +82,13 @@ var QueryResult = class {
|
|
|
83
82
|
observableArrayMap(() => this.nodes.map(({ logsOfThisNode: thread }) => thread), { name: "QueryResult.leafNodeThread" })
|
|
84
83
|
);
|
|
85
84
|
}
|
|
85
|
+
get leafNodeLogSet() {
|
|
86
|
+
return observableArrayMap(() => this.nodes.map(({ logsOfThisNode: thread }) => thread.applogs), { name: "QueryResult.leafNodeLogSet" });
|
|
87
|
+
}
|
|
86
88
|
get leafNodeLogs() {
|
|
87
|
-
return observableArrayMap(() => this.nodes.
|
|
89
|
+
return observableArrayMap(() => this.nodes.flatMap(({ logsOfThisNode: thread }) => thread.applogs), {
|
|
90
|
+
name: "QueryResult.leafNodeLogs"
|
|
91
|
+
});
|
|
88
92
|
}
|
|
89
93
|
// get trailThreads() {
|
|
90
94
|
// return observableArrayMap(() => this.nodes.map(({ trailThread }) => trailThread))
|
|
@@ -101,7 +105,8 @@ var QueryResult = class {
|
|
|
101
105
|
};
|
|
102
106
|
|
|
103
107
|
// src/query/basic.ts
|
|
104
|
-
var { WARN, LOG, DEBUG, VERBOSE, ERROR } =
|
|
108
|
+
var { WARN, LOG, DEBUG, VERBOSE, ERROR } = g.setup(g.INFO, { prefix: "[q]" });
|
|
109
|
+
var globalQueryTimeoutTime = null;
|
|
105
110
|
var lastWriteWins = computedFnDeepCompare("lastWriteWins", function lastWriteWins2(thread, { inverseToOnlyReturnFirstLogs, tolerateAlreadyFiltered } = {}) {
|
|
106
111
|
VERBOSE(`lastWriteWins${inverseToOnlyReturnFirstLogs ? ".inversed" : ""} < ${thread.nameAndSizeUntracked} > initializing`);
|
|
107
112
|
if (thread.filters.includes("lastWriteWins")) {
|
|
@@ -138,8 +143,7 @@ var lastWriteWins = computedFnDeepCompare("lastWriteWins", function lastWriteWin
|
|
|
138
143
|
tsCheck = log.ts;
|
|
139
144
|
const existing = rollingMap.get(key);
|
|
140
145
|
if (!existing || (inverseToOnlyReturnFirstLogs ? existing.ts > log.ts : existing.ts < log.ts)) {
|
|
141
|
-
if (existing && !isInitial)
|
|
142
|
-
toRemove.push(existing);
|
|
146
|
+
if (existing && !isInitial) toRemove.push(existing);
|
|
143
147
|
toAdd.push(log);
|
|
144
148
|
rollingMap.set(key, log);
|
|
145
149
|
}
|
|
@@ -157,8 +161,7 @@ var lastWriteWins = computedFnDeepCompare("lastWriteWins", function lastWriteWin
|
|
|
157
161
|
return mappedThread;
|
|
158
162
|
}, { equals: applogThreadComparer, argsDebugName: (thread) => createDebugName({ caller: "lastWriteWins", thread }) });
|
|
159
163
|
var withoutDeleted = computedFnDeepCompare("withoutDeleted", function withoutDeleted2(thread) {
|
|
160
|
-
if (VERBOSE.isEnabled)
|
|
161
|
-
VERBOSE(`withoutDeleted<${thread.nameAndSizeUntracked}>`);
|
|
164
|
+
if (VERBOSE.isEnabled) VERBOSE(`withoutDeleted<${thread.nameAndSizeUntracked}>`);
|
|
162
165
|
if (thread.filters.includes("withoutDeleted")) {
|
|
163
166
|
throw ERROR(`thread already filtered withoutDeleted:`, thread.filters, { name: thread.name });
|
|
164
167
|
}
|
|
@@ -186,6 +189,7 @@ var withoutDeleted = computedFnDeepCompare("withoutDeleted", function withoutDel
|
|
|
186
189
|
return rollingFilter(thread, { "!en": deleted }, { name: `withoutDeleted`, extraFilterName: "withoutDeleted" });
|
|
187
190
|
}, { equals: applogThreadComparer });
|
|
188
191
|
var query = computedFnDeepCompare("query", function query2(threadOrLogs, patternOrPatterns, startVariables = {}, opts = {}) {
|
|
192
|
+
throwOnTimeout();
|
|
189
193
|
const thread = threadFromMaybeArray(threadOrLogs);
|
|
190
194
|
DEBUG(`query<${thread.nameAndSizeUntracked}>:`, patternOrPatterns);
|
|
191
195
|
const patterns = Array.isArray(patternOrPatterns) ? patternOrPatterns : [patternOrPatterns];
|
|
@@ -206,8 +210,7 @@ var query = computedFnDeepCompare("query", function query2(threadOrLogs, pattern
|
|
|
206
210
|
});
|
|
207
211
|
var queryStep = computedFnDeepCompare("queryStep", function queryStep2(thread, nodeSet, pattern, opts = {}) {
|
|
208
212
|
DEBUG(`queryStep<${thread.nameAndSizeUntracked}> with`, nodeSet?.untrackedSize ?? "all", "nodes, pattern:", pattern);
|
|
209
|
-
if (!Object.entries(pattern).length)
|
|
210
|
-
throw new Error(`Pattern is empty`);
|
|
213
|
+
if (!Object.entries(pattern).length) throw new Error(`Pattern is empty`);
|
|
211
214
|
function doQuery(node) {
|
|
212
215
|
const [patternWithResolvedVars, variablesToFill] = resolveOrRemoveVariables(pattern, node?.variables ?? {});
|
|
213
216
|
VERBOSE(`[queryStep.doQuery] patternWithoutVars: `, patternWithResolvedVars);
|
|
@@ -244,8 +247,7 @@ var queryStep = computedFnDeepCompare("queryStep", function queryStep2(thread, n
|
|
|
244
247
|
);
|
|
245
248
|
});
|
|
246
249
|
}, { name: createDebugName({ caller: "doQuery.mapNodes", thread: applogsMatchingStatic, pattern }) });
|
|
247
|
-
if (VERBOSE.isEnabled)
|
|
248
|
-
autorun(() => VERBOSE(`[queryStep.doQuery] resultNodes:`, [...resultNodes]));
|
|
250
|
+
if (VERBOSE.isEnabled) autorun(() => VERBOSE(`[queryStep.doQuery] resultNodes:`, [...resultNodes]));
|
|
249
251
|
if (opts.debug) {
|
|
250
252
|
LOG(
|
|
251
253
|
`[queryStep] step result:`,
|
|
@@ -271,8 +273,7 @@ var queryStep = computedFnDeepCompare("queryStep", function queryStep2(thread, n
|
|
|
271
273
|
},
|
|
272
274
|
{ name: createDebugName({ caller: "queryStep", thread, pattern }) }
|
|
273
275
|
);
|
|
274
|
-
if (VERBOSE.isEnabled)
|
|
275
|
-
autorun(() => VERBOSE(`[queryStep] observableResultNodes:`, [...observableResultNodes]));
|
|
276
|
+
if (VERBOSE.isEnabled) autorun(() => VERBOSE(`[queryStep] observableResultNodes:`, [...observableResultNodes]));
|
|
276
277
|
return new QueryResult(observableResultNodes);
|
|
277
278
|
}, { equals: queryNodesComparer, argsDebugName: (thread, _nodes, pattern) => createDebugName({ caller: "queryStep", thread, pattern }) });
|
|
278
279
|
var queryNot = computedFnDeepCompare("queryNot", function queryNot2(thread, startNodes, patternOrPatterns, opts = {}) {
|
|
@@ -280,8 +281,7 @@ var queryNot = computedFnDeepCompare("queryNot", function queryNot2(thread, star
|
|
|
280
281
|
DEBUG(`queryNot<${thread.nameAndSizeUntracked}> from: ${nodes.length} nodes`);
|
|
281
282
|
const patterns = Array.isArray(patternOrPatterns) ? patternOrPatterns : [patternOrPatterns];
|
|
282
283
|
for (const pattern of patterns) {
|
|
283
|
-
if (!Object.entries(patternOrPatterns).length)
|
|
284
|
-
throw new Error(`Pattern is empty`);
|
|
284
|
+
if (!Object.entries(patternOrPatterns).length) throw new Error(`Pattern is empty`);
|
|
285
285
|
nodes = nodes.filter(function innerNodeFilter({
|
|
286
286
|
/* applogs, */
|
|
287
287
|
variables
|
|
@@ -291,8 +291,7 @@ var queryNot = computedFnDeepCompare("queryNot", function queryNot2(thread, star
|
|
|
291
291
|
const newApplogs = rollingFilter(thread, patternWithResolvedVars);
|
|
292
292
|
VERBOSE(`[queryNot] step node:`, variables, " =>", newApplogs.size, "applogs");
|
|
293
293
|
VERBOSE.isDisabled || VERBOSE(`[queryNot] step node:`, variables, " => empty?", untracked(() => newApplogs.applogs));
|
|
294
|
-
if (opts.debug)
|
|
295
|
-
LOG(`[queryNot] node result:`, variables, "=>", newApplogs.applogs);
|
|
294
|
+
if (opts.debug) LOG(`[queryNot] node result:`, variables, "=>", newApplogs.applogs);
|
|
296
295
|
return newApplogs.isEmpty;
|
|
297
296
|
});
|
|
298
297
|
}
|
|
@@ -301,8 +300,10 @@ var queryNot = computedFnDeepCompare("queryNot", function queryNot2(thread, star
|
|
|
301
300
|
var filterAndMap = computedFnDeepCompare("filterAndMap", function filterAndMap2(thread, pattern, mapper) {
|
|
302
301
|
DEBUG(`filterAndMap<${thread.nameAndSizeUntracked}>`, pattern);
|
|
303
302
|
const filtered = rollingFilter(thread, pattern);
|
|
304
|
-
VERBOSE
|
|
305
|
-
|
|
303
|
+
if (VERBOSE.isEnabled) {
|
|
304
|
+
VERBOSE(`[filterAndMap] filtered:`, filtered.untrackedSize);
|
|
305
|
+
autorun(() => VERBOSE(`[filterAndMap] filtered:`, filtered.applogs));
|
|
306
|
+
}
|
|
306
307
|
const name = createDebugName({ thread, pattern, caller: "filterAndMap" });
|
|
307
308
|
const mapped = observableArrayMap(() => mapThreadWith(filtered, mapper), { name });
|
|
308
309
|
VERBOSE.isDisabled || autorun(() => VERBOSE(`[filterAndMap] mapped:`, mapped));
|
|
@@ -344,8 +345,7 @@ var agentsOfThread = computedFnDeepCompare("agentsOfThread", function agentsOfTh
|
|
|
344
345
|
}
|
|
345
346
|
for (const log of !isInitEvent(event) && event.removed || []) {
|
|
346
347
|
const prev = mapped.get(log.ag);
|
|
347
|
-
if (!prev || prev < 1)
|
|
348
|
-
throw ERROR(`[agentsOfThread] number is now negative`, { log, event, mapped, prev });
|
|
348
|
+
if (!prev || prev < 1) throw ERROR(`[agentsOfThread] number is now negative`, { log, event, mapped, prev });
|
|
349
349
|
mapped.set(log.ag, prev - 1);
|
|
350
350
|
}
|
|
351
351
|
LOG(`agentsOfThread<${thread.nameAndSizeUntracked}> processed event`, { event, mapped });
|
|
@@ -381,13 +381,10 @@ var entityOverlapCount2 = computedFnDeepCompare(
|
|
|
381
381
|
var querySingle = computedFnDeepCompare("querySingle", function querySingle2(threadOrLogs, patternOrPatterns, variables = {}) {
|
|
382
382
|
const result = query(threadOrLogs, patternOrPatterns, variables);
|
|
383
383
|
return computed(() => {
|
|
384
|
-
if (result.isEmpty)
|
|
385
|
-
|
|
386
|
-
if (result.size > 1)
|
|
387
|
-
throw ERROR(`[querySingle] got`, result.size, `results:`, result);
|
|
384
|
+
if (result.isEmpty) return null;
|
|
385
|
+
if (result.size > 1) throw ERROR(`[querySingle] got`, result.size, `results:`, result);
|
|
388
386
|
const logsOfThisNode = result.nodes[0].logsOfThisNode;
|
|
389
|
-
if (logsOfThisNode.size != 1)
|
|
390
|
-
throw ERROR(`[querySingle] single result, but got`, logsOfThisNode.size, `logs:`, logsOfThisNode.applogs);
|
|
387
|
+
if (logsOfThisNode.size != 1) throw ERROR(`[querySingle] single result, but got`, logsOfThisNode.size, `logs:`, logsOfThisNode.applogs);
|
|
391
388
|
return logsOfThisNode.applogs[0];
|
|
392
389
|
});
|
|
393
390
|
}, {
|
|
@@ -400,8 +397,7 @@ var querySingleAndMap = computedFnDeepCompare(
|
|
|
400
397
|
const resultBox = querySingle(threadOrLogs, patternOrPatterns, variables);
|
|
401
398
|
return computed(() => {
|
|
402
399
|
const log = resultBox.get();
|
|
403
|
-
if (!log)
|
|
404
|
-
return void 0;
|
|
400
|
+
if (!log) return void 0;
|
|
405
401
|
if (typeof mapDef === "string") {
|
|
406
402
|
return log[mapDef];
|
|
407
403
|
} else {
|
|
@@ -459,12 +455,30 @@ function prefixAttrs(prefix, attrs) {
|
|
|
459
455
|
function prefixAt(prefix, attr) {
|
|
460
456
|
return `${prefix}/${attr}`;
|
|
461
457
|
}
|
|
462
|
-
function threadFromMaybeArray(threadOrLogs) {
|
|
458
|
+
function threadFromMaybeArray(threadOrLogs, name) {
|
|
463
459
|
if (!Array.isArray(threadOrLogs)) {
|
|
464
460
|
return threadOrLogs;
|
|
465
461
|
}
|
|
466
|
-
return ThreadInMemory.fromArray(threadOrLogs, `threadFromArray[${threadOrLogs.length}]`, true);
|
|
462
|
+
return ThreadInMemory.fromArray(threadOrLogs, name || `threadFromArray[${threadOrLogs.length}]`, true);
|
|
463
|
+
}
|
|
464
|
+
function withTimeout(timeoutMilliseconds, func) {
|
|
465
|
+
if (globalQueryTimeoutTime) throw ERROR(`Nested timeout not supported`);
|
|
466
|
+
globalQueryTimeoutTime = performance.now() + timeoutMilliseconds;
|
|
467
|
+
const result = func();
|
|
468
|
+
globalQueryTimeoutTime = null;
|
|
469
|
+
return result;
|
|
467
470
|
}
|
|
471
|
+
function throwOnTimeout() {
|
|
472
|
+
if (globalQueryTimeoutTime == null) return;
|
|
473
|
+
if (performance.now() >= globalQueryTimeoutTime) {
|
|
474
|
+
throw new QueryTimeoutError(globalQueryTimeoutTime);
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
var QueryTimeoutError = class extends Error {
|
|
478
|
+
constructor(message) {
|
|
479
|
+
super(message);
|
|
480
|
+
}
|
|
481
|
+
};
|
|
468
482
|
|
|
469
483
|
export {
|
|
470
484
|
QueryNode,
|
|
@@ -489,6 +503,8 @@ export {
|
|
|
489
503
|
startsWith,
|
|
490
504
|
prefixAttrs,
|
|
491
505
|
prefixAt,
|
|
492
|
-
threadFromMaybeArray
|
|
506
|
+
threadFromMaybeArray,
|
|
507
|
+
withTimeout,
|
|
508
|
+
throwOnTimeout
|
|
493
509
|
};
|
|
494
|
-
//# sourceMappingURL=chunk-
|
|
510
|
+
//# sourceMappingURL=chunk-OBMVNVJR.min.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/query/types.ts","../src/query/basic.ts"],"sourcesContent":["import { computed, makeObservable, untracked } from 'mobx'\nimport { joinThreads } from '../applog/applog-helpers'\nimport { SearchContext } from '../applog/datom-types'\nimport { observableArrayMap } from '../mobx/mobx-utils'\nimport type { Thread } from '../thread/basic'\nimport { ReadonlyObservableArray } from '../types/typescript-utils'\n\nexport class QueryNode {\n\tconstructor(\n\t\treadonly logsOfThisNode: Thread,\n\t\treadonly variables: SearchContext,\n\t\treadonly prevNode: QueryNode | null = null,\n\t) {\n\t\tmakeObservable(this, {\n\t\t\tthreadOfTrail: computed, // ? intuitively only put the ones here that felt expensive to compute (join)\n\t\t})\n\t}\n\tget record() {\n\t\treturn this.variables // alias for end-user consumption\n\t}\n\n\tget threadOfTrail() {\n\t\tif (!this.prevNode) return this.logsOfThisNode\n\t\treturn joinThreads([\n\t\t\tthis.logsOfThisNode,\n\t\t\tthis.prevNode.threadOfTrail,\n\t\t])\n\t}\n\tget trailLogs() {\n\t\treturn this.threadOfTrail.applogs\n\t}\n}\n/**\n * The result of a query (-step)\n */\nexport class QueryResult {\n\tconstructor(\n\t\tpublic nodes: ReadonlyObservableArray<QueryNode>,\n\t) {\n\t\tmakeObservable(this, {\n\t\t\tthreadOfAllTrails: computed, // ? intuitively only put the ones here that felt expensive to compute (join)\n\t\t\tsize: computed, // ... or cheap to cache\n\t\t\tisEmpty: computed,\n\t\t})\n\t}\n\n\tget size() {\n\t\treturn this.records.length\n\t}\n\tget isEmpty() {\n\t\treturn this.records.length === 0\n\t}\n\tget untrackedSize() {\n\t\treturn untracked(() => this.records.length)\n\t}\n\n\tget records() {\n\t\treturn observableArrayMap(() => this.nodes.map(({ variables }) => variables), { name: 'QueryResult.records' })\n\t}\n\tget leafNodeThread() {\n\t\treturn joinThreads(\n\t\t\tobservableArrayMap(() => this.nodes.map(({ logsOfThisNode: thread }) => thread), { name: 'QueryResult.leafNodeThread' }),\n\t\t)\n\t}\n\tget leafNodeLogSet() {\n\t\treturn observableArrayMap(() => this.nodes.map(({ logsOfThisNode: thread }) => thread.applogs), { name: 'QueryResult.leafNodeLogSet' })\n\t}\n\tget leafNodeLogs() {\n\t\treturn observableArrayMap(() => this.nodes.flatMap(({ logsOfThisNode: thread }) => thread.applogs), {\n\t\t\tname: 'QueryResult.leafNodeLogs',\n\t\t})\n\t}\n\t// get trailThreads() {\n\t// \treturn observableArrayMap(() => this.nodes.map(({ trailThread }) => trailThread))\n\t// }\n\tget threadOfAllTrails() {\n\t\treturn joinThreads(observableArrayMap(() => this.nodes.map(node => node.threadOfTrail), { name: 'QueryResult.threadOfAllTrails' }))\n\t}\n\tget thread() {\n\t\treturn this.threadOfAllTrails // alias\n\t}\n\tget allApplogs() {\n\t\treturn this.threadOfAllTrails.applogs // mostly for easy logging\n\t}\n}\n","import { AgentHash, Applog, ApplogValue, DatalogQueryPattern, EntityID, SearchContext, ValueOrMatcher } from '../applog/datom-types'\n\nimport { Logger } from 'besonders-logger'\nimport { action, autorun, comparer, computed, makeObservable, observable, onBecomeObserved, toJS, untracked } from 'mobx'\n\nimport { isEmpty } from 'lodash-es'\nimport stringify from 'safe-stable-stringify'\nimport { resolveOrRemoveVariables, sortApplogsByTs } from '../applog/applog-utils'\nimport {\n\tapplogThreadComparer,\n\tcomputedFnDeepCompare,\n\tcomputedStructuralComparer,\n\tcreateDebugName,\n\tobservableArrayMap,\n\tobservableSetMap,\n\tqueryNodesComparer,\n} from '../mobx/mobx-utils'\nimport { isInitEvent, StaticThread, Thread, ThreadEvent } from '../thread/basic'\nimport { rollingFilter, rollingMapper, ThreadOnlyCurrent } from '../thread/filters'\nimport { MappedThread } from '../thread/mapped'\nimport { ThreadInMemory } from '../thread/writeable'\nimport { QueryNode, QueryResult } from './types'\n\nconst { WARN, LOG, DEBUG, VERBOSE, ERROR } = Logger.setup(Logger.INFO, { prefix: '[q]' }) // eslint-disable-line no-unused-vars\n\nlet globalQueryTimeoutTime = null\n\n// util.inspect.defaultOptions.depth = 5;\n\n// export interface QueryExecutorArguments {\n// db: Thread\n// // applogs: AppLog[]\n// nodes: SearchContextWithLog[]\n// }\n// export interface QueryExecutorResult {\n// // applogs: AppLog[]\n// nodes: SearchContextWithLog[]\n// }\n// export type QueryExecutor = (args: QueryExecutorArguments) => QueryExecutorResult\n\n/////////////\n// QUERIES //\n/////////////\n\n/**\n * Keep only the latest logs for each en&at (= last write wins)\n */\nexport const lastWriteWins = computedFnDeepCompare('lastWriteWins', function lastWriteWins(\n\tthread: Thread,\n\t{ inverseToOnlyReturnFirstLogs, tolerateAlreadyFiltered }: {\n\t\tinverseToOnlyReturnFirstLogs?: boolean\n\t\ttolerateAlreadyFiltered?: boolean\n\t} = {},\n): ThreadOnlyCurrent {\n\tVERBOSE(`lastWriteWins${inverseToOnlyReturnFirstLogs ? '.inversed' : ''} < ${thread.nameAndSizeUntracked} > initializing`)\n\t// if (thread.name.includes('lastWriteWins')) WARN(`thread already contains lastWriteWins:`, thread.name)\n\tif (thread.filters.includes('lastWriteWins')) {\n\t\tif (tolerateAlreadyFiltered) {\n\t\t\tDEBUG(`[lastWriteWins] already filtered, but tolerateAlreadyFiltered=true, so returning`)\n\t\t\treturn thread as ThreadOnlyCurrent\n\t\t}\n\t\tthrow ERROR(`thread already filtered lastWriteWins:`, thread.filters, { name: thread.name })\n\t}\n\n\tlet rollingMap: Map<string, Applog>\n\tconst mappedThread = rollingMapper(thread, function lastWriteWinsMapper(event, sourceThread) {\n\t\tconst isInitial = isInitEvent(event)\n\n\t\tlet newLogs: readonly Applog[]\n\t\tconst toAdd = [] as Applog[]\n\t\tconst toRemove = isInitial ? null : [] as Applog[]\n\t\tif (isInitial) {\n\t\t\trollingMap = new Map()\n\t\t\tnewLogs = event.init\n\t\t} else {\n\t\t\tnewLogs = event.added\n\t\t}\n\n\t\tlet tsCheck: string\n\t\tfor (\n\t\t\tlet i = inverseToOnlyReturnFirstLogs ? 0 : newLogs.length - 1;\n\t\t\tinverseToOnlyReturnFirstLogs ? i < newLogs.length : i >= 0;\n\t\t\tinverseToOnlyReturnFirstLogs ? i++ : i--\n\t\t) {\n\t\t\tconst log = newLogs[i]\n\t\t\tconst key = log.en + '|' + log.at // stringify([log.en, log.at]) - less efficient\n\n\t\t\t// TODO: use isoDateStrCompare ?\n\t\t\tif (tsCheck && (inverseToOnlyReturnFirstLogs ? tsCheck > log.ts : tsCheck < log.ts)) {\n\t\t\t\tthrow ERROR(`lastWriteWins.mapper logs not sorted:`, tsCheck, inverseToOnlyReturnFirstLogs ? '>' : '<', log.ts, {\n\t\t\t\t\tlog,\n\t\t\t\t\ti,\n\t\t\t\t\tnewLogs,\n\t\t\t\t\tinverseToOnlyReturnFirstLogs,\n\t\t\t\t})\n\t\t\t}\n\t\t\ttsCheck = log.ts\n\n\t\t\tconst existing = rollingMap.get(key)\n\t\t\tif (!existing || (inverseToOnlyReturnFirstLogs ? (existing.ts > log.ts) : (existing.ts < log.ts))) {\n\t\t\t\tif (existing && !isInitial) toRemove.push(existing)\n\t\t\t\ttoAdd.push(log)\n\t\t\t\trollingMap.set(key, log)\n\t\t\t}\n\t\t}\n\t\tsortApplogsByTs(toAdd) // HACK: find logical solution\n\t\tVERBOSE.isDisabled ||\n\t\t\tVERBOSE(\n\t\t\t\t`lastWriteWins${inverseToOnlyReturnFirstLogs ? '.inversed' : ''}<${thread.nameAndSizeUntracked}> mapped event`,\n\t\t\t\tisInitial ?\n\t\t\t\t\t{ ...Object.fromEntries(Object.entries(event).map(([k, v]) => [k, v?.length])), toAdd: toAdd.length, toRemove } :\n\t\t\t\t\t{ ...event, toAdd, toRemove },\n\t\t\t)\n\t\treturn isInitial ?\n\t\t\t{ init: toAdd }\n\t\t\t: { added: toAdd, removed: toRemove }\n\t}, { name: `lastWriteWins${inverseToOnlyReturnFirstLogs ? '.inversed' : ''}`, extraFilterName: 'lastWriteWins' })\n\tVERBOSE.isDisabled || autorun(() => {\n\t\tVERBOSE(`lastWriteWins<${thread.nameAndSizeUntracked}> filtered down to`, mappedThread.applogs.length) // using applogs.length, as size might not change, but we still want a log\n\t})\n\treturn mappedThread as ThreadOnlyCurrent\n\t// const filtered = observableArrayMap(() => {\n\t// VERBOSE(`lastWriteWins thread deps:`, getDependencyTree(thread.applogs), thread)\n\t// thread.applogs.forEach(applog => {\n\t// const key = stringify([applog.en, applog.at])\n\t// const existing = mapped.get(key)\n\t// if (!existing || existing.ts < applog.ts)\n\t// mapped.set(key, applog)\n\t// })\n\t// VERBOSE(`[lastWriteWins] mapped:`, mapped.size)\n\t// return Array.from(mapped.values())\n\t// }, { name: obsArrMapName })\n\t// VERBOSE(`lastWriteWins deps of filteredArr:`, getDependencyTree(filtered))\n\t// return new MappedThread(thread, filtered, `${thread.name} | lastWriteWins`)\n}, { equals: applogThreadComparer, argsDebugName: (thread) => createDebugName({ caller: 'lastWriteWins', thread }) })\n\n/**\n * Remove all applogs for entities that have an applog: { at: `isDeleted`, val: true }\n * ! WARNING: If not based on lastWriteWins, it will not respect un-deletions yet (isDeleted: false)\n */\nexport const withoutDeleted = computedFnDeepCompare('withoutDeleted', function withoutDeleted(\n\tthread: Thread,\n) {\n\tif (VERBOSE.isEnabled) VERBOSE(`withoutDeleted<${thread.nameAndSizeUntracked}>`)\n\t// if (thread.name.includes('withoutDeleted')) WARN(`thread already contains withoutDeleted:`, withoutDeleted)\n\tif (thread.filters.includes('withoutDeleted')) {\n\t\tthrow ERROR(`thread already filtered withoutDeleted:`, thread.filters, { name: thread.name })\n\t}\n\n\tconst deletionLogs = rollingFilter(\n\t\tthread, // TODO: handle un-delection\n\t\t{ at: ['isDeleted', 'relation/isDeleted', 'block/isDeleted'], vl: true },\n\t\t{ name: 'isDeleted' },\n\t)\n\tVERBOSE.isEnabled &&\n\t\tVERBOSE(\n\t\t\t`withoutDeleted<${thread.nameAndSizeUntracked}> deletionLogs:`,\n\t\t\tuntracked(function expensiveAssUntrackedVerboseFx() {\n\t\t\t\treturn [...deletionLogs.applogs]\n\t\t\t}),\n\t\t)\n\tconst obsArrMapName = createDebugName({ caller: 'allDeletedEntities', thread })\n\tconst deleted = observableSetMap(function observableSetMapForDeleted() {\n\t\treturn deletionLogs.map(log => log.en)\n\t}, { name: obsArrMapName })\n\t// if (VERBOSE.isEnabled) VERBOSE(`withoutDeleted<${db.nameAndSize}> deleted:`, untracked(() => [...deleted]))\n\tif (VERBOSE.isEnabled) {\n\t\tautorun(() => {\n\t\t\tVERBOSE(`withoutDeleted<${thread.nameAndSizeUntracked}> deleted:`, [...deleted])\n\t\t})\n\t}\n\n\treturn rollingFilter(thread, { '!en': deleted }, { name: `withoutDeleted`, extraFilterName: 'withoutDeleted' })\n}, { equals: applogThreadComparer })\n\n// export const filterStatic = computedFnDeepCompare('filterStatic', function filterStatic(\n// thread: Thread,\n// pattern: DatalogQueryPattern,\n// opts: { name?: string } = {},\n// ) {\n// VERBOSE(`filterStatic<${thread.nameAndSizeUntracked}>:`, pattern)\n// if (!Object.entries(pattern).length) throw new Error(`Pattern is empty`)\n// //TODO: deprecaate in favor of rollingFilter ?\n// return new Thread(thread, thread.applogs.filter(applog => {\n// for (const [field, patternValue] of Object.entries(pattern)) {\n// const applogValue = applog[field.startsWith('!') ? field.slice(1) : field]\n// if (!matchPartStatic(field, patternValue, applogValue))\n// return false\n// }\n// return true\n// }), `${thread.name} | ${opts.name || `filterStatic{${stringify(pattern)}}`}`)\n// }, { equals: applogThreadComparer })\n\nexport const query = computedFnDeepCompare('query', function query(\n\tthreadOrLogs: Thread | Applog[],\n\tpatternOrPatterns: DatalogQueryPattern | DatalogQueryPattern[],\n\tstartVariables: SearchContext = {},\n\topts: { debug?: boolean } = {},\n) {\n\tthrowOnTimeout()\n\tconst thread = threadFromMaybeArray(threadOrLogs)\n\tDEBUG(`query<${thread.nameAndSizeUntracked}>:`, patternOrPatterns)\n\tconst patterns = (Array.isArray(patternOrPatterns) ? patternOrPatterns : [patternOrPatterns]) as DatalogQueryPattern[]\n\n\tlet nodes: QueryResult | null\n\tif (patterns.length === 1) {\n\t\t// We are the first step, so start from scratch\n\t\tnodes = null\n\t} else {\n\t\t// Run the previous step(s) first (= recursion)\n\t\tconst pattersExceptLast = patterns.slice(0, -1)\n\t\t// recursively call this function to have partial queries cacheable\n\t\tnodes = query(thread, pattersExceptLast, startVariables, opts)\n\t}\n\tconst lastPattern = patterns[patterns.length - 1]\n\tconst stepResult = queryStep(thread, nodes, lastPattern, opts)\n\tVERBOSE.isDisabled || autorun(() => VERBOSE(`query result:`, toJS(stepResult)))\n\treturn stepResult\n}, {\n\tequals: queryNodesComparer,\n\targsDebugName: (thread, pattern, startVars) =>\n\t\tcreateDebugName({ caller: 'query', thread, args: startVars ? { pattern, startVars } : pattern }),\n})\n\nexport const queryStep = computedFnDeepCompare('queryStep', function queryStep(\n\tthread: Thread,\n\tnodeSet: QueryResult | null,\n\tpattern: DatalogQueryPattern,\n\t// variables: SearchContext = {},\n\topts: { debug?: boolean } = {},\n) {\n\tDEBUG(`queryStep<${thread.nameAndSizeUntracked}> with`, nodeSet?.untrackedSize ?? 'all', 'nodes, pattern:', pattern)\n\tif (!Object.entries(pattern).length) throw new Error(`Pattern is empty`)\n\n\tfunction doQuery(node: QueryNode | null) {\n\t\tconst [patternWithResolvedVars, variablesToFill] = resolveOrRemoveVariables(pattern, node?.variables ?? {})\n\t\tVERBOSE(`[queryStep.doQuery] patternWithoutVars: `, patternWithResolvedVars)\n\t\tconst applogsMatchingStatic = rollingFilter(thread, patternWithResolvedVars)\n\t\tconst varMapper = createObjMapper(variablesToFill)\n\t\tconst resultNodes = observableArrayMap(function queryStepDoStep() {\n\t\t\tconst newVarsAndTheirLog = applogsMatchingStatic.map(log => ({ log, vars: varMapper(log) }))\n\t\t\tif (VERBOSE.isEnabled) {\n\t\t\t\tVERBOSE(\n\t\t\t\t\t`[queryStep.doQuery] step node:`,\n\t\t\t\t\tnode?.variables,\n\t\t\t\t\t' =>',\n\t\t\t\t\tnewVarsAndTheirLog,\n\t\t\t\t\t'from:',\n\t\t\t\t\tuntracked(() => applogsMatchingStatic.applogs),\n\t\t\t\t)\n\t\t\t}\n\n\t\t\treturn newVarsAndTheirLog.map(({ log, vars }) => {\n\t\t\t\tconst nodeVars = Object.assign({}, node?.variables, vars)\n\t\t\t\treturn new QueryNode(\n\t\t\t\t\t// TODO: ? Make single result nodes reactive using MappedThread - only really relevant if a result had multiple logs (or different paths would lead to the same result?)\n\t\t\t\t\t// ThreadInMemory.fromArray(\n\t\t\t\t\tStaticThread.fromArray(\n\t\t\t\t\t\t[log],\n\t\t\t\t\t\tcreateDebugName({\n\t\t\t\t\t\t\tcaller: 'QueryNode',\n\t\t\t\t\t\t\tthread: applogsMatchingStatic,\n\t\t\t\t\t\t\tpattern: `${stringify(nodeVars)}@${stringify(patternWithResolvedVars)}`,\n\t\t\t\t\t\t}),\n\t\t\t\t\t\t// true,\n\t\t\t\t\t),\n\t\t\t\t\tnodeVars,\n\t\t\t\t\tnode,\n\t\t\t\t)\n\t\t\t})\n\t\t}, { name: createDebugName({ caller: 'doQuery.mapNodes', thread: applogsMatchingStatic, pattern }) })\n\t\tif (VERBOSE.isEnabled) autorun(() => VERBOSE(`[queryStep.doQuery] resultNodes:`, [...resultNodes]))\n\t\tif (opts.debug) {\n\t\t\tLOG(\n\t\t\t\t`[queryStep] step result:`,\n\t\t\t\tuntracked(() =>\n\t\t\t\t\tresultNodes.map(({ variables, logsOfThisNode: thread }) => ({\n\t\t\t\t\t\tvariables,\n\t\t\t\t\t\tthread, // : /* util.inspect( */ thread.applogs, /* , { showHidden: false, depth: null }) */\n\t\t\t\t\t}))\n\t\t\t\t),\n\t\t\t)\n\t\t}\n\t\treturn resultNodes\n\t}\n\tconst observableResultNodes = observableArrayMap(\n\t\tfunction queryStepGetResults() {\n\t\t\tDEBUG(`[queryStep] Running with ${nodeSet?.nodes?.length} input nodes:`)\n\t\t\tif (!nodeSet) {\n\t\t\t\t// first query step\n\t\t\t\treturn [...doQuery(null)] // HACK copy array bc otherwise the observableArrayMap doesn't seem to depend on the contents somehow\n\t\t\t} else {\n\t\t\t\t// subsequent query steps\n\t\t\t\treturn [...nodeSet.nodes.flatMap(doQuery)]\n\t\t\t}\n\t\t},\n\t\t{ name: createDebugName({ caller: 'queryStep', thread, pattern }) },\n\t)\n\n\tif (VERBOSE.isEnabled) autorun(() => VERBOSE(`[queryStep] observableResultNodes:`, [...observableResultNodes]))\n\treturn new QueryResult(observableResultNodes)\n}, { equals: queryNodesComparer, argsDebugName: (thread, _nodes, pattern) => createDebugName({ caller: 'queryStep', thread, pattern }) })\n\nexport const queryNot = computedFnDeepCompare('queryNot', function queryNot( // TODO: update old-style query\n\tthread: Thread,\n\tstartNodes: QueryResult,\n\tpatternOrPatterns: DatalogQueryPattern | DatalogQueryPattern[],\n\topts: { debug?: boolean } = {},\n) {\n\tlet nodes = startNodes.nodes\n\tDEBUG(`queryNot<${thread.nameAndSizeUntracked}> from: ${nodes.length} nodes`)\n\tconst patterns = (Array.isArray(patternOrPatterns) ? patternOrPatterns : [patternOrPatterns]) as DatalogQueryPattern[]\n\n\tfor (const pattern of patterns) {\n\t\tif (!Object.entries(patternOrPatterns).length) throw new Error(`Pattern is empty`)\n\t\tnodes = nodes.filter(function innerNodeFilter({ /* applogs, */ variables }) {\n\t\t\tconst [patternWithResolvedVars, _variablesToFill] = resolveOrRemoveVariables(pattern, variables ?? {})\n\t\t\tVERBOSE(`[queryNot] patternWithoutVars: `, patternWithResolvedVars)\n\t\t\tconst newApplogs = rollingFilter(thread, patternWithResolvedVars)\n\t\t\tVERBOSE(`[queryNot] step node:`, variables, ' =>', newApplogs.size, 'applogs')\n\t\t\tVERBOSE.isDisabled || VERBOSE(`[queryNot] step node:`, variables, ' => empty?', untracked(() => newApplogs.applogs))\n\n\t\t\tif (opts.debug) LOG(`[queryNot] node result:`, variables, '=>', newApplogs.applogs)\n\t\t\treturn newApplogs.isEmpty\n\t\t})\n\t}\n\treturn new QueryResult(nodes)\n}, { equals: queryNodesComparer, argsDebugName: (thread, nodes, pattern) => createDebugName({ caller: 'queryNot', thread, pattern }) })\n\n// export function or(queries: QueryExecutor[]) {\n// return tagged(\n// `or{${stringify(queries)} } `,\n// function orExecutor(args: QueryExecutorArguments) {\n// const { db, nodes: contexts } = args\n// VERBOSE('[or]', { queries, contexts })\n// let results = []\n// for (const query of queries) {\n// const res = query(args)\n// VERBOSE('[or] query', query, 'result =>', res)\n// results.push(...res.nodes)\n// }\n// return { contexts: results }\n// }\n// )\n// }\n\n// export type Tagged<T> = T & { tag: string }\n// export function tagged<T>(tag: string, thing: T): Tagged<T> {\n// const e = thing as (T & { tag: string })\n// e.tag = tag\n// return e\n// }\n\n//////////////////////\n// COMPOSED QUERIES //\n//////////////////////\n// createDebugName({ caller: 'useKidRelations' }, true)\nexport const filterAndMap = computedFnDeepCompare('filterAndMap', function filterAndMap<R>(\n\tthread: Thread,\n\tpattern: DatalogQueryPattern,\n\tmapper: (keyof Applog) | (Partial<{ [key in keyof Applog]: string }>) | ((applog: Applog) => R),\n) {\n\tDEBUG(`filterAndMap<${thread.nameAndSizeUntracked}>`, pattern)\n\n\tconst filtered = rollingFilter(thread, pattern)\n\tif (VERBOSE.isEnabled) {\n\t\tVERBOSE(`[filterAndMap] filtered:`, filtered.untrackedSize)\n\t\tautorun(() => VERBOSE(`[filterAndMap] filtered:`, filtered.applogs))\n\t}\n\n\tconst name = createDebugName({ thread, pattern, caller: 'filterAndMap' })\n\tconst mapped = observableArrayMap<ApplogValue | any>(() => mapThreadWith(filtered, mapper), { name }) // TODO typing:? Record<string, ApplogValue> ?\n\tVERBOSE.isDisabled || autorun(() => VERBOSE(`[filterAndMap] mapped:`, mapped))\n\treturn mapped\n}, { equals: comparer.structural, argsDebugName: (thread, pattern) => createDebugName({ caller: 'filterAndMap', thread, pattern }) })\n\nexport const queryAndMap = computedFnDeepCompare('queryAndMap', function queryAndMap<R>(\n\tthreadOrLogs: Thread | Applog[],\n\tpatternOrPatterns: Parameters<typeof query>[1],\n\tmapDef: string | (Partial<{ [key in keyof SearchContext]: string }>) | ((record: SearchContext) => R),\n\tvariables: SearchContext = {},\n) {\n\tconst thread = threadFromMaybeArray(threadOrLogs)\n\tDEBUG(`queryAndMap<${thread.nameAndSizeUntracked}>`, { patternOrPatterns, variables, map: mapDef })\n\tconst debugName = createDebugName({ thread, caller: 'queryAndMap' })\n\n\tconst queryResult = query(thread, patternOrPatterns)\n\tVERBOSE(`[queryAndMap] filtered count:`, queryResult.untrackedSize)\n\tconst mapped = observableArrayMap<ApplogValue | any>(\n\t\t() => mapQueryResultWith(queryResult, mapDef),\n\t\t{ name: debugName },\n\t)\n\tVERBOSE.isDisabled || autorun(() => VERBOSE(`[queryAndMap] result:`, toJS(mapped)))\n\treturn mapped\n}, { equals: comparer.structural, argsDebugName: (thread, pattern) => createDebugName({ caller: 'queryAndMap', thread, pattern }) })\n\nexport const queryEntity = computedFnDeepCompare('queryEntity', function queryEntity(\n\tthread: Thread,\n\tname: string,\n\tentityID: EntityID,\n\tattributes: readonly string[],\n) {\n\tDEBUG(`queryEntity<${thread.nameAndSizeUntracked}>`, entityID, name)\n\n\tconst filtered = rollingFilter(thread, { en: entityID, at: prefixAttrs(name, attributes) })\n\tVERBOSE(`queryEntity applogs:`, filtered.applogs)\n\treturn computed(() =>\n\t\tfiltered.isEmpty ? null : Object.fromEntries(\n\t\t\tfiltered.map(({ at, vl }) => [at.slice(name.length + 1), vl]),\n\t\t)\n\t)\n}, {\n\tequals: computedStructuralComparer,\n\targsDebugName: (thread, name, entityID) => createDebugName({ caller: 'queryEntity', thread, args: { name, entityID } }),\n})\n\nexport const agentsOfThread = computedFnDeepCompare('agentsOfThread', function agentsOfThread(\n\tthread: Thread,\n) {\n\tDEBUG(`agentsOfThread<${thread.nameAndSizeUntracked}>`)\n\n\tconst mapped = observable.map<string, number>()\n\tconst onEvent = action((event: ThreadEvent) => {\n\t\tfor (const log of (isInitEvent(event) ? event.init : event.added)) {\n\t\t\tconst prev = mapped.get(log.ag) ?? 0\n\t\t\tmapped.set(log.ag, prev + 1)\n\t\t}\n\t\tfor (const log of (!isInitEvent(event) && event.removed || [])) {\n\t\t\tconst prev = mapped.get(log.ag)\n\t\t\tif (!prev || prev < 1) throw ERROR(`[agentsOfThread] number is now negative`, { log, event, mapped, prev })\n\t\t\tmapped.set(log.ag, prev - 1)\n\t\t}\n\t\tLOG(`agentsOfThread<${thread.nameAndSizeUntracked}> processed event`, { event, mapped })\n\t})\n\n\tonEvent({ init: thread.applogs })\n\tconst unsubscribe = thread.subscribe(onEvent)\n\tonBecomeObserved(mapped, unsubscribe)\n\n\treturn mapped\n})\n\nexport const entityOverlap = computedFnDeepCompare('entityOverlap', function entityOverlapCount(\n\tthreadA: Thread,\n\tthreadB: Thread,\n) {\n\tLOG(`entityOverlap<${threadA.nameAndSizeUntracked}, ${threadB.nameAndSizeUntracked}>`)\n\n\treturn computed(() => {\n\t\tconst entitiesA = new Set(threadA.map(log => log.en))\n\t\tconst entitiesB = new Set(threadB.map(log => log.en))\n\t\treturn [...entitiesA].filter(en => entitiesB.has(en))\n\t})\n})\n\nexport const entityOverlapMap = function entityOverlapMap(\n\tthreadA: Thread,\n\tthreadB: Thread,\n\tthreadAName = 'incoming',\n\tthreadBName = 'current',\n) {\n\tconst useInferredVM = (en, thread: Thread) => en\n\tconst overlapping = entityOverlap(threadA, threadB).get()\n\tconst mapped = new Map()\n\toverlapping.forEach(eachEntityID => (\n\t\tmapped.set(eachEntityID, {\n\t\t\t[threadAName]: useInferredVM(eachEntityID, threadA),\n\t\t\t[threadBName]: useInferredVM(eachEntityID, threadB),\n\t\t})\n\t))\n}\n\nexport const entityOverlapCount = computedFnDeepCompare(\n\t'entityOverlapCount',\n\tfunction entityOverlapCount(threadA: Thread, threadB: Thread) {\n\t\treturn computed(() => entityOverlap(threadA, threadB).get().length)\n\t},\n)\nexport const querySingle = computedFnDeepCompare('querySingle', function querySingle(\n\tthreadOrLogs: Thread | Applog[],\n\tpatternOrPatterns: Parameters<typeof query>[1],\n\tvariables: SearchContext = {},\n) {\n\tconst result = query(threadOrLogs, patternOrPatterns, variables)\n\treturn computed(() => {\n\t\tif (result.isEmpty) return null\n\t\tif (result.size > 1) throw ERROR(`[querySingle] got`, result.size, `results:`, result)\n\t\tconst logsOfThisNode = result.nodes[0].logsOfThisNode\n\t\tif (logsOfThisNode.size != 1) throw ERROR(`[querySingle] single result, but got`, logsOfThisNode.size, `logs:`, logsOfThisNode.applogs)\n\t\treturn logsOfThisNode.applogs[0]\n\t})\n}, {\n\tequals: comparer.structural,\n\targsDebugName: (thread, pattern) => createDebugName({ caller: 'querySingle', thread, pattern }),\n})\n\nexport const querySingleAndMap = computedFnDeepCompare(\n\t'querySingleAndMap',\n\tfunction querySingleAndMap<MAP extends (keyof Applog | (Partial<{ [key in keyof Applog]: string }>))>(\n\t\tthreadOrLogs: Thread | Applog[],\n\t\tpatternOrPatterns: Parameters<typeof query>[1],\n\t\tmapDef: MAP,\n\t\tvariables: SearchContext = {},\n\t) {\n\t\tconst resultBox = querySingle(threadOrLogs, patternOrPatterns, variables)\n\t\treturn computed<ApplogValue | { [key in keyof MAP]: ApplogValue } | null>(() => {\n\t\t\tconst log = resultBox.get()\n\t\t\tif (!log) return undefined\n\t\t\tif (typeof mapDef === 'string') {\n\t\t\t\treturn log[mapDef as string]\n\t\t\t} else {\n\t\t\t\treturn createObjMapper(mapDef)(log)\n\t\t\t}\n\t\t})\n\t},\n\t{\n\t\tequals: comparer.structural,\n\t\targsDebugName: (thread, pattern) => createDebugName({ caller: 'querySingleAndMap', thread, pattern }),\n\t},\n)\n/////////////\n// HELPERS //\n/////////////\n\nexport const mapThreadWith = function filterAndMapGetterFx<R>(\n\tthread: Thread,\n\tmapDef: (keyof Applog) | (Partial<{ [key in keyof Applog]: string }>) | ((applog: Applog) => R),\n) {\n\tif (typeof mapDef === 'function') {\n\t\treturn thread.map(mapDef)\n\t} else if (typeof mapDef === 'string') {\n\t\treturn thread.map(log => log[mapDef])\n\t} else {\n\t\treturn thread.map(createObjMapper(mapDef))\n\t}\n}\nexport const mapQueryResultWith = function filterAndMapGetterFx<R>(\n\tqueryResult: QueryResult,\n\tmapDef: string | (Partial<{ [key in keyof SearchContext]: string }>) | ((record: SearchContext) => R),\n) {\n\tif (typeof mapDef === 'function') {\n\t\treturn queryResult.records.map(mapDef)\n\t} else if (typeof mapDef === 'string') {\n\t\treturn queryResult.nodes.map((node) => {\n\t\t\tif (!Object.hasOwn(node.record, mapDef)) {\n\t\t\t\tif (node.logsOfThisNode.size !== 1) {\n\t\t\t\t\tthrow ERROR(`not sure what to map (it's not a var and a result node log count of ${node.logsOfThisNode.size})`)\n\t\t\t\t}\n\t\t\t\treturn node.logsOfThisNode.firstLog[mapDef]\n\t\t\t}\n\t\t\treturn node.record[mapDef]\n\t\t})\n\t} else {\n\t\treturn queryResult.nodes.map((node) => {\n\t\t\treturn createObjMapper(mapDef)(queryResult)\n\t\t})\n\t}\n}\n/**\n * Map Applog to custom named record, e.g.:\n * { en: 'movieID', vl: 'movieName' }\n * will map the applog to { movieID: .., movieName: .. }\n */\nexport function createObjMapper<FROM extends string, TO extends string>(applogFieldMap: Partial<{ [key in FROM]: TO }>) {\n\treturn (applog: { [key in FROM]: any }) => {\n\t\treturn Object.entries(applogFieldMap).reduce((acc, [key, value]) => {\n\t\t\tacc[value as TO] = applog[key]\n\t\t\treturn acc\n\t\t}, {} as Partial<{ [key in TO]: ApplogValue }>)\n\t}\n}\n\nexport function startsWith(str: string) {\n\treturn (value) => value.startsWith(str)\n}\n\nexport function prefixAttrs(prefix: string, attrs: readonly string[]) {\n\treturn attrs.map(at => prefixAt(prefix, at))\n}\nexport function prefixAt(prefix: string, attr: string) {\n\treturn `${prefix}/${attr}`\n}\nexport function threadFromMaybeArray(threadOrLogs: Thread | Applog[], name?: string) {\n\tif (!Array.isArray(threadOrLogs)) {\n\t\treturn threadOrLogs\n\t}\n\treturn ThreadInMemory.fromArray(threadOrLogs, name || `threadFromArray[${threadOrLogs.length}]`, true)\n}\nexport function withTimeout<R>(timeoutMilliseconds: number, func: () => R) {\n\tif (globalQueryTimeoutTime) throw ERROR(`Nested timeout not supported`)\n\tglobalQueryTimeoutTime = performance.now() + timeoutMilliseconds\n\tconst result = func()\n\n\tglobalQueryTimeoutTime = null\n\treturn result\n}\nexport function throwOnTimeout() {\n\tif (globalQueryTimeoutTime == null) return\n\tif (performance.now() >= globalQueryTimeoutTime) {\n\t\tthrow new QueryTimeoutError(globalQueryTimeoutTime)\n\t}\n}\nclass QueryTimeoutError extends Error {\n\tconstructor(message: string) {\n\t\tsuper(message)\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAOO,IAAM,YAAN,MAAgB;AAAA,EACtB,YACU,gBACA,WACA,WAA6B,MACrC;AAHQ;AACA;AACA;AAET,mBAAe,MAAM;AAAA,MACpB,eAAe;AAAA;AAAA,IAChB,CAAC;AAAA,EACF;AAAA,EACA,IAAI,SAAS;AACZ,WAAO,KAAK;AAAA,EACb;AAAA,EAEA,IAAI,gBAAgB;AACnB,QAAI,CAAC,KAAK,SAAU,QAAO,KAAK;AAChC,WAAO,YAAY;AAAA,MAClB,KAAK;AAAA,MACL,KAAK,SAAS;AAAA,IACf,CAAC;AAAA,EACF;AAAA,EACA,IAAI,YAAY;AACf,WAAO,KAAK,cAAc;AAAA,EAC3B;AACD;AAIO,IAAM,cAAN,MAAkB;AAAA,EACxB,YACQ,OACN;AADM;AAEP,mBAAe,MAAM;AAAA,MACpB,mBAAmB;AAAA;AAAA,MACnB,MAAM;AAAA;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,IAAI,OAAO;AACV,WAAO,KAAK,QAAQ;AAAA,EACrB;AAAA,EACA,IAAI,UAAU;AACb,WAAO,KAAK,QAAQ,WAAW;AAAA,EAChC;AAAA,EACA,IAAI,gBAAgB;AACnB,WAAO,UAAU,MAAM,KAAK,QAAQ,MAAM;AAAA,EAC3C;AAAA,EAEA,IAAI,UAAU;AACb,WAAO,mBAAmB,MAAM,KAAK,MAAM,IAAI,CAAC,EAAE,UAAU,MAAM,SAAS,GAAG,EAAE,MAAM,sBAAsB,CAAC;AAAA,EAC9G;AAAA,EACA,IAAI,iBAAiB;AACpB,WAAO;AAAA,MACN,mBAAmB,MAAM,KAAK,MAAM,IAAI,CAAC,EAAE,gBAAgB,OAAO,MAAM,MAAM,GAAG,EAAE,MAAM,6BAA6B,CAAC;AAAA,IACxH;AAAA,EACD;AAAA,EACA,IAAI,iBAAiB;AACpB,WAAO,mBAAmB,MAAM,KAAK,MAAM,IAAI,CAAC,EAAE,gBAAgB,OAAO,MAAM,OAAO,OAAO,GAAG,EAAE,MAAM,6BAA6B,CAAC;AAAA,EACvI;AAAA,EACA,IAAI,eAAe;AAClB,WAAO,mBAAmB,MAAM,KAAK,MAAM,QAAQ,CAAC,EAAE,gBAAgB,OAAO,MAAM,OAAO,OAAO,GAAG;AAAA,MACnG,MAAM;AAAA,IACP,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAIA,IAAI,oBAAoB;AACvB,WAAO,YAAY,mBAAmB,MAAM,KAAK,MAAM,IAAI,UAAQ,KAAK,aAAa,GAAG,EAAE,MAAM,gCAAgC,CAAC,CAAC;AAAA,EACnI;AAAA,EACA,IAAI,SAAS;AACZ,WAAO,KAAK;AAAA,EACb;AAAA,EACA,IAAI,aAAa;AAChB,WAAO,KAAK,kBAAkB;AAAA,EAC/B;AACD;;;AC7DA,IAAM,EAAE,MAAM,KAAK,OAAO,SAAS,MAAM,IAAI,EAAO,MAAM,EAAO,MAAM,EAAE,QAAQ,MAAM,CAAC;AAExF,IAAI,yBAAyB;AAsBtB,IAAM,gBAAgB,sBAAsB,iBAAiB,SAASA,eAC5E,QACA,EAAE,8BAA8B,wBAAwB,IAGpD,CAAC,GACe;AACpB,UAAQ,gBAAgB,+BAA+B,cAAc,EAAE,MAAM,OAAO,oBAAoB,iBAAiB;AAEzH,MAAI,OAAO,QAAQ,SAAS,eAAe,GAAG;AAC7C,QAAI,yBAAyB;AAC5B,YAAM,kFAAkF;AACxF,aAAO;AAAA,IACR;AACA,UAAM,MAAM,0CAA0C,OAAO,SAAS,EAAE,MAAM,OAAO,KAAK,CAAC;AAAA,EAC5F;AAEA,MAAI;AACJ,QAAM,eAAe,cAAc,QAAQ,SAAS,oBAAoB,OAAO,cAAc;AAC5F,UAAM,YAAY,YAAY,KAAK;AAEnC,QAAI;AACJ,UAAM,QAAQ,CAAC;AACf,UAAM,WAAW,YAAY,OAAO,CAAC;AACrC,QAAI,WAAW;AACd,mBAAa,oBAAI,IAAI;AACrB,gBAAU,MAAM;AAAA,IACjB,OAAO;AACN,gBAAU,MAAM;AAAA,IACjB;AAEA,QAAI;AACJ,aACK,IAAI,+BAA+B,IAAI,QAAQ,SAAS,GAC5D,+BAA+B,IAAI,QAAQ,SAAS,KAAK,GACzD,+BAA+B,MAAM,KACpC;AACD,YAAM,MAAM,QAAQ,CAAC;AACrB,YAAM,MAAM,IAAI,KAAK,MAAM,IAAI;AAG/B,UAAI,YAAY,+BAA+B,UAAU,IAAI,KAAK,UAAU,IAAI,KAAK;AACpF,cAAM,MAAM,yCAAyC,SAAS,+BAA+B,MAAM,KAAK,IAAI,IAAI;AAAA,UAC/G;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD,CAAC;AAAA,MACF;AACA,gBAAU,IAAI;AAEd,YAAM,WAAW,WAAW,IAAI,GAAG;AACnC,UAAI,CAAC,aAAa,+BAAgC,SAAS,KAAK,IAAI,KAAO,SAAS,KAAK,IAAI,KAAM;AAClG,YAAI,YAAY,CAAC,UAAW,UAAS,KAAK,QAAQ;AAClD,cAAM,KAAK,GAAG;AACd,mBAAW,IAAI,KAAK,GAAG;AAAA,MACxB;AAAA,IACD;AACA,oBAAgB,KAAK;AACrB,YAAQ,cACP;AAAA,MACC,gBAAgB,+BAA+B,cAAc,EAAE,IAAI,OAAO,oBAAoB;AAAA,MAC9F,YACC,EAAE,GAAG,OAAO,YAAY,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC,CAAC,GAAG,OAAO,MAAM,QAAQ,SAAS,IAC9G,EAAE,GAAG,OAAO,OAAO,SAAS;AAAA,IAC9B;AACD,WAAO,YACN,EAAE,MAAM,MAAM,IACZ,EAAE,OAAO,OAAO,SAAS,SAAS;AAAA,EACtC,GAAG,EAAE,MAAM,gBAAgB,+BAA+B,cAAc,EAAE,IAAI,iBAAiB,gBAAgB,CAAC;AAChH,UAAQ,cAAc,QAAQ,MAAM;AACnC,YAAQ,iBAAiB,OAAO,oBAAoB,sBAAsB,aAAa,QAAQ,MAAM;AAAA,EACtG,CAAC;AACD,SAAO;AAcR,GAAG,EAAE,QAAQ,sBAAsB,eAAe,CAAC,WAAW,gBAAgB,EAAE,QAAQ,iBAAiB,OAAO,CAAC,EAAE,CAAC;AAM7G,IAAM,iBAAiB,sBAAsB,kBAAkB,SAASC,gBAC9E,QACC;AACD,MAAI,QAAQ,UAAW,SAAQ,kBAAkB,OAAO,oBAAoB,GAAG;AAE/E,MAAI,OAAO,QAAQ,SAAS,gBAAgB,GAAG;AAC9C,UAAM,MAAM,2CAA2C,OAAO,SAAS,EAAE,MAAM,OAAO,KAAK,CAAC;AAAA,EAC7F;AAEA,QAAM,eAAe;AAAA,IACpB;AAAA;AAAA,IACA,EAAE,IAAI,CAAC,aAAa,sBAAsB,iBAAiB,GAAG,IAAI,KAAK;AAAA,IACvE,EAAE,MAAM,YAAY;AAAA,EACrB;AACA,UAAQ,aACP;AAAA,IACC,kBAAkB,OAAO,oBAAoB;AAAA,IAC7C,UAAU,SAAS,iCAAiC;AACnD,aAAO,CAAC,GAAG,aAAa,OAAO;AAAA,IAChC,CAAC;AAAA,EACF;AACD,QAAM,gBAAgB,gBAAgB,EAAE,QAAQ,sBAAsB,OAAO,CAAC;AAC9E,QAAM,UAAU,iBAAiB,SAAS,6BAA6B;AACtE,WAAO,aAAa,IAAI,SAAO,IAAI,EAAE;AAAA,EACtC,GAAG,EAAE,MAAM,cAAc,CAAC;AAE1B,MAAI,QAAQ,WAAW;AACtB,YAAQ,MAAM;AACb,cAAQ,kBAAkB,OAAO,oBAAoB,cAAc,CAAC,GAAG,OAAO,CAAC;AAAA,IAChF,CAAC;AAAA,EACF;AAEA,SAAO,cAAc,QAAQ,EAAE,OAAO,QAAQ,GAAG,EAAE,MAAM,kBAAkB,iBAAiB,iBAAiB,CAAC;AAC/G,GAAG,EAAE,QAAQ,qBAAqB,CAAC;AAoB5B,IAAM,QAAQ,sBAAsB,SAAS,SAASC,OAC5D,cACA,mBACA,iBAAgC,CAAC,GACjC,OAA4B,CAAC,GAC5B;AACD,iBAAe;AACf,QAAM,SAAS,qBAAqB,YAAY;AAChD,QAAM,SAAS,OAAO,oBAAoB,MAAM,iBAAiB;AACjE,QAAM,WAAY,MAAM,QAAQ,iBAAiB,IAAI,oBAAoB,CAAC,iBAAiB;AAE3F,MAAI;AACJ,MAAI,SAAS,WAAW,GAAG;AAE1B,YAAQ;AAAA,EACT,OAAO;AAEN,UAAM,oBAAoB,SAAS,MAAM,GAAG,EAAE;AAE9C,YAAQA,OAAM,QAAQ,mBAAmB,gBAAgB,IAAI;AAAA,EAC9D;AACA,QAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,QAAM,aAAa,UAAU,QAAQ,OAAO,aAAa,IAAI;AAC7D,UAAQ,cAAc,QAAQ,MAAM,QAAQ,iBAAiB,KAAK,UAAU,CAAC,CAAC;AAC9E,SAAO;AACR,GAAG;AAAA,EACF,QAAQ;AAAA,EACR,eAAe,CAAC,QAAQ,SAAS,cAChC,gBAAgB,EAAE,QAAQ,SAAS,QAAQ,MAAM,YAAY,EAAE,SAAS,UAAU,IAAI,QAAQ,CAAC;AACjG,CAAC;AAEM,IAAM,YAAY,sBAAsB,aAAa,SAASC,WACpE,QACA,SACA,SAEA,OAA4B,CAAC,GAC5B;AACD,QAAM,aAAa,OAAO,oBAAoB,UAAU,SAAS,iBAAiB,OAAO,mBAAmB,OAAO;AACnH,MAAI,CAAC,OAAO,QAAQ,OAAO,EAAE,OAAQ,OAAM,IAAI,MAAM,kBAAkB;AAEvE,WAAS,QAAQ,MAAwB;AACxC,UAAM,CAAC,yBAAyB,eAAe,IAAI,yBAAyB,SAAS,MAAM,aAAa,CAAC,CAAC;AAC1G,YAAQ,4CAA4C,uBAAuB;AAC3E,UAAM,wBAAwB,cAAc,QAAQ,uBAAuB;AAC3E,UAAM,YAAY,gBAAgB,eAAe;AACjD,UAAM,cAAc,mBAAmB,SAAS,kBAAkB;AACjE,YAAM,qBAAqB,sBAAsB,IAAI,UAAQ,EAAE,KAAK,MAAM,UAAU,GAAG,EAAE,EAAE;AAC3F,UAAI,QAAQ,WAAW;AACtB;AAAA,UACC;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAU,MAAM,sBAAsB,OAAO;AAAA,QAC9C;AAAA,MACD;AAEA,aAAO,mBAAmB,IAAI,CAAC,EAAE,KAAK,KAAK,MAAM;AAChD,cAAM,WAAW,OAAO,OAAO,CAAC,GAAG,MAAM,WAAW,IAAI;AACxD,eAAO,IAAI;AAAA;AAAA;AAAA,UAGV,aAAa;AAAA,YACZ,CAAC,GAAG;AAAA,YACJ,gBAAgB;AAAA,cACf,QAAQ;AAAA,cACR,QAAQ;AAAA,cACR,SAAS,GAAG,gBAAU,QAAQ,CAAC,IAAI,gBAAU,uBAAuB,CAAC;AAAA,YACtE,CAAC;AAAA;AAAA,UAEF;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD,CAAC;AAAA,IACF,GAAG,EAAE,MAAM,gBAAgB,EAAE,QAAQ,oBAAoB,QAAQ,uBAAuB,QAAQ,CAAC,EAAE,CAAC;AACpG,QAAI,QAAQ,UAAW,SAAQ,MAAM,QAAQ,oCAAoC,CAAC,GAAG,WAAW,CAAC,CAAC;AAClG,QAAI,KAAK,OAAO;AACf;AAAA,QACC;AAAA,QACA;AAAA,UAAU,MACT,YAAY,IAAI,CAAC,EAAE,WAAW,gBAAgBC,QAAO,OAAO;AAAA,YAC3D;AAAA,YACA,QAAAA;AAAA;AAAA,UACD,EAAE;AAAA,QACH;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACA,QAAM,wBAAwB;AAAA,IAC7B,SAAS,sBAAsB;AAC9B,YAAM,4BAA4B,SAAS,OAAO,MAAM,eAAe;AACvE,UAAI,CAAC,SAAS;AAEb,eAAO,CAAC,GAAG,QAAQ,IAAI,CAAC;AAAA,MACzB,OAAO;AAEN,eAAO,CAAC,GAAG,QAAQ,MAAM,QAAQ,OAAO,CAAC;AAAA,MAC1C;AAAA,IACD;AAAA,IACA,EAAE,MAAM,gBAAgB,EAAE,QAAQ,aAAa,QAAQ,QAAQ,CAAC,EAAE;AAAA,EACnE;AAEA,MAAI,QAAQ,UAAW,SAAQ,MAAM,QAAQ,sCAAsC,CAAC,GAAG,qBAAqB,CAAC,CAAC;AAC9G,SAAO,IAAI,YAAY,qBAAqB;AAC7C,GAAG,EAAE,QAAQ,oBAAoB,eAAe,CAAC,QAAQ,QAAQ,YAAY,gBAAgB,EAAE,QAAQ,aAAa,QAAQ,QAAQ,CAAC,EAAE,CAAC;AAEjI,IAAM,WAAW,sBAAsB,YAAY,SAASC,UAClE,QACA,YACA,mBACA,OAA4B,CAAC,GAC5B;AACD,MAAI,QAAQ,WAAW;AACvB,QAAM,YAAY,OAAO,oBAAoB,WAAW,MAAM,MAAM,QAAQ;AAC5E,QAAM,WAAY,MAAM,QAAQ,iBAAiB,IAAI,oBAAoB,CAAC,iBAAiB;AAE3F,aAAW,WAAW,UAAU;AAC/B,QAAI,CAAC,OAAO,QAAQ,iBAAiB,EAAE,OAAQ,OAAM,IAAI,MAAM,kBAAkB;AACjF,YAAQ,MAAM,OAAO,SAAS,gBAAgB;AAAA;AAAA,MAAiB;AAAA,IAAU,GAAG;AAC3E,YAAM,CAAC,yBAAyB,gBAAgB,IAAI,yBAAyB,SAAS,aAAa,CAAC,CAAC;AACrG,cAAQ,mCAAmC,uBAAuB;AAClE,YAAM,aAAa,cAAc,QAAQ,uBAAuB;AAChE,cAAQ,yBAAyB,WAAW,OAAO,WAAW,MAAM,SAAS;AAC7E,cAAQ,cAAc,QAAQ,yBAAyB,WAAW,cAAc,UAAU,MAAM,WAAW,OAAO,CAAC;AAEnH,UAAI,KAAK,MAAO,KAAI,2BAA2B,WAAW,MAAM,WAAW,OAAO;AAClF,aAAO,WAAW;AAAA,IACnB,CAAC;AAAA,EACF;AACA,SAAO,IAAI,YAAY,KAAK;AAC7B,GAAG,EAAE,QAAQ,oBAAoB,eAAe,CAAC,QAAQ,OAAO,YAAY,gBAAgB,EAAE,QAAQ,YAAY,QAAQ,QAAQ,CAAC,EAAE,CAAC;AA8B/H,IAAM,eAAe,sBAAsB,gBAAgB,SAASC,cAC1E,QACA,SACA,QACC;AACD,QAAM,gBAAgB,OAAO,oBAAoB,KAAK,OAAO;AAE7D,QAAM,WAAW,cAAc,QAAQ,OAAO;AAC9C,MAAI,QAAQ,WAAW;AACtB,YAAQ,4BAA4B,SAAS,aAAa;AAC1D,YAAQ,MAAM,QAAQ,4BAA4B,SAAS,OAAO,CAAC;AAAA,EACpE;AAEA,QAAM,OAAO,gBAAgB,EAAE,QAAQ,SAAS,QAAQ,eAAe,CAAC;AACxE,QAAM,SAAS,mBAAsC,MAAM,cAAc,UAAU,MAAM,GAAG,EAAE,KAAK,CAAC;AACpG,UAAQ,cAAc,QAAQ,MAAM,QAAQ,0BAA0B,MAAM,CAAC;AAC7E,SAAO;AACR,GAAG,EAAE,QAAQ,SAAS,YAAY,eAAe,CAAC,QAAQ,YAAY,gBAAgB,EAAE,QAAQ,gBAAgB,QAAQ,QAAQ,CAAC,EAAE,CAAC;AAE7H,IAAM,cAAc,sBAAsB,eAAe,SAASC,aACxE,cACA,mBACA,QACA,YAA2B,CAAC,GAC3B;AACD,QAAM,SAAS,qBAAqB,YAAY;AAChD,QAAM,eAAe,OAAO,oBAAoB,KAAK,EAAE,mBAAmB,WAAW,KAAK,OAAO,CAAC;AAClG,QAAM,YAAY,gBAAgB,EAAE,QAAQ,QAAQ,cAAc,CAAC;AAEnE,QAAM,cAAc,MAAM,QAAQ,iBAAiB;AACnD,UAAQ,iCAAiC,YAAY,aAAa;AAClE,QAAM,SAAS;AAAA,IACd,MAAM,mBAAmB,aAAa,MAAM;AAAA,IAC5C,EAAE,MAAM,UAAU;AAAA,EACnB;AACA,UAAQ,cAAc,QAAQ,MAAM,QAAQ,yBAAyB,KAAK,MAAM,CAAC,CAAC;AAClF,SAAO;AACR,GAAG,EAAE,QAAQ,SAAS,YAAY,eAAe,CAAC,QAAQ,YAAY,gBAAgB,EAAE,QAAQ,eAAe,QAAQ,QAAQ,CAAC,EAAE,CAAC;AAE5H,IAAM,cAAc,sBAAsB,eAAe,SAASC,aACxE,QACA,MACA,UACA,YACC;AACD,QAAM,eAAe,OAAO,oBAAoB,KAAK,UAAU,IAAI;AAEnE,QAAM,WAAW,cAAc,QAAQ,EAAE,IAAI,UAAU,IAAI,YAAY,MAAM,UAAU,EAAE,CAAC;AAC1F,UAAQ,wBAAwB,SAAS,OAAO;AAChD,SAAO;AAAA,IAAS,MACf,SAAS,UAAU,OAAO,OAAO;AAAA,MAChC,SAAS,IAAI,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,GAAG,MAAM,KAAK,SAAS,CAAC,GAAG,EAAE,CAAC;AAAA,IAC7D;AAAA,EACD;AACD,GAAG;AAAA,EACF,QAAQ;AAAA,EACR,eAAe,CAAC,QAAQ,MAAM,aAAa,gBAAgB,EAAE,QAAQ,eAAe,QAAQ,MAAM,EAAE,MAAM,SAAS,EAAE,CAAC;AACvH,CAAC;AAEM,IAAM,iBAAiB,sBAAsB,kBAAkB,SAASC,gBAC9E,QACC;AACD,QAAM,kBAAkB,OAAO,oBAAoB,GAAG;AAEtD,QAAM,SAAS,WAAW,IAAoB;AAC9C,QAAM,UAAU,OAAO,CAAC,UAAuB;AAC9C,eAAW,OAAQ,YAAY,KAAK,IAAI,MAAM,OAAO,MAAM,OAAQ;AAClE,YAAM,OAAO,OAAO,IAAI,IAAI,EAAE,KAAK;AACnC,aAAO,IAAI,IAAI,IAAI,OAAO,CAAC;AAAA,IAC5B;AACA,eAAW,OAAQ,CAAC,YAAY,KAAK,KAAK,MAAM,WAAW,CAAC,GAAI;AAC/D,YAAM,OAAO,OAAO,IAAI,IAAI,EAAE;AAC9B,UAAI,CAAC,QAAQ,OAAO,EAAG,OAAM,MAAM,2CAA2C,EAAE,KAAK,OAAO,QAAQ,KAAK,CAAC;AAC1G,aAAO,IAAI,IAAI,IAAI,OAAO,CAAC;AAAA,IAC5B;AACA,QAAI,kBAAkB,OAAO,oBAAoB,qBAAqB,EAAE,OAAO,OAAO,CAAC;AAAA,EACxF,CAAC;AAED,UAAQ,EAAE,MAAM,OAAO,QAAQ,CAAC;AAChC,QAAM,cAAc,OAAO,UAAU,OAAO;AAC5C,mBAAiB,QAAQ,WAAW;AAEpC,SAAO;AACR,CAAC;AAEM,IAAM,gBAAgB,sBAAsB,iBAAiB,SAAS,mBAC5E,SACA,SACC;AACD,MAAI,iBAAiB,QAAQ,oBAAoB,KAAK,QAAQ,oBAAoB,GAAG;AAErF,SAAO,SAAS,MAAM;AACrB,UAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,SAAO,IAAI,EAAE,CAAC;AACpD,UAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,SAAO,IAAI,EAAE,CAAC;AACpD,WAAO,CAAC,GAAG,SAAS,EAAE,OAAO,QAAM,UAAU,IAAI,EAAE,CAAC;AAAA,EACrD,CAAC;AACF,CAAC;AAEM,IAAM,mBAAmB,SAASC,kBACxC,SACA,SACA,cAAc,YACd,cAAc,WACb;AACD,QAAM,gBAAgB,CAAC,IAAI,WAAmB;AAC9C,QAAM,cAAc,cAAc,SAAS,OAAO,EAAE,IAAI;AACxD,QAAM,SAAS,oBAAI,IAAI;AACvB,cAAY,QAAQ,kBACnB,OAAO,IAAI,cAAc;AAAA,IACxB,CAAC,WAAW,GAAG,cAAc,cAAc,OAAO;AAAA,IAClD,CAAC,WAAW,GAAG,cAAc,cAAc,OAAO;AAAA,EACnD,CAAC,CACD;AACF;AAEO,IAAMC,sBAAqB;AAAA,EACjC;AAAA,EACA,SAASA,oBAAmB,SAAiB,SAAiB;AAC7D,WAAO,SAAS,MAAM,cAAc,SAAS,OAAO,EAAE,IAAI,EAAE,MAAM;AAAA,EACnE;AACD;AACO,IAAM,cAAc,sBAAsB,eAAe,SAASC,aACxE,cACA,mBACA,YAA2B,CAAC,GAC3B;AACD,QAAM,SAAS,MAAM,cAAc,mBAAmB,SAAS;AAC/D,SAAO,SAAS,MAAM;AACrB,QAAI,OAAO,QAAS,QAAO;AAC3B,QAAI,OAAO,OAAO,EAAG,OAAM,MAAM,qBAAqB,OAAO,MAAM,YAAY,MAAM;AACrF,UAAM,iBAAiB,OAAO,MAAM,CAAC,EAAE;AACvC,QAAI,eAAe,QAAQ,EAAG,OAAM,MAAM,wCAAwC,eAAe,MAAM,SAAS,eAAe,OAAO;AACtI,WAAO,eAAe,QAAQ,CAAC;AAAA,EAChC,CAAC;AACF,GAAG;AAAA,EACF,QAAQ,SAAS;AAAA,EACjB,eAAe,CAAC,QAAQ,YAAY,gBAAgB,EAAE,QAAQ,eAAe,QAAQ,QAAQ,CAAC;AAC/F,CAAC;AAEM,IAAM,oBAAoB;AAAA,EAChC;AAAA,EACA,SAASC,mBACR,cACA,mBACA,QACA,YAA2B,CAAC,GAC3B;AACD,UAAM,YAAY,YAAY,cAAc,mBAAmB,SAAS;AACxE,WAAO,SAAmE,MAAM;AAC/E,YAAM,MAAM,UAAU,IAAI;AAC1B,UAAI,CAAC,IAAK,QAAO;AACjB,UAAI,OAAO,WAAW,UAAU;AAC/B,eAAO,IAAI,MAAgB;AAAA,MAC5B,OAAO;AACN,eAAO,gBAAgB,MAAM,EAAE,GAAG;AAAA,MACnC;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AAAA,IACC,QAAQ,SAAS;AAAA,IACjB,eAAe,CAAC,QAAQ,YAAY,gBAAgB,EAAE,QAAQ,qBAAqB,QAAQ,QAAQ,CAAC;AAAA,EACrG;AACD;AAKO,IAAM,gBAAgB,SAAS,qBACrC,QACA,QACC;AACD,MAAI,OAAO,WAAW,YAAY;AACjC,WAAO,OAAO,IAAI,MAAM;AAAA,EACzB,WAAW,OAAO,WAAW,UAAU;AACtC,WAAO,OAAO,IAAI,SAAO,IAAI,MAAM,CAAC;AAAA,EACrC,OAAO;AACN,WAAO,OAAO,IAAI,gBAAgB,MAAM,CAAC;AAAA,EAC1C;AACD;AACO,IAAM,qBAAqB,SAASC,sBAC1C,aACA,QACC;AACD,MAAI,OAAO,WAAW,YAAY;AACjC,WAAO,YAAY,QAAQ,IAAI,MAAM;AAAA,EACtC,WAAW,OAAO,WAAW,UAAU;AACtC,WAAO,YAAY,MAAM,IAAI,CAAC,SAAS;AACtC,UAAI,CAAC,OAAO,OAAO,KAAK,QAAQ,MAAM,GAAG;AACxC,YAAI,KAAK,eAAe,SAAS,GAAG;AACnC,gBAAM,MAAM,uEAAuE,KAAK,eAAe,IAAI,GAAG;AAAA,QAC/G;AACA,eAAO,KAAK,eAAe,SAAS,MAAM;AAAA,MAC3C;AACA,aAAO,KAAK,OAAO,MAAM;AAAA,IAC1B,CAAC;AAAA,EACF,OAAO;AACN,WAAO,YAAY,MAAM,IAAI,CAAC,SAAS;AACtC,aAAO,gBAAgB,MAAM,EAAE,WAAW;AAAA,IAC3C,CAAC;AAAA,EACF;AACD;AAMO,SAAS,gBAAwD,gBAAgD;AACvH,SAAO,CAAC,WAAmC;AAC1C,WAAO,OAAO,QAAQ,cAAc,EAAE,OAAO,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACnE,UAAI,KAAW,IAAI,OAAO,GAAG;AAC7B,aAAO;AAAA,IACR,GAAG,CAAC,CAA0C;AAAA,EAC/C;AACD;AAEO,SAAS,WAAW,KAAa;AACvC,SAAO,CAAC,UAAU,MAAM,WAAW,GAAG;AACvC;AAEO,SAAS,YAAY,QAAgB,OAA0B;AACrE,SAAO,MAAM,IAAI,QAAM,SAAS,QAAQ,EAAE,CAAC;AAC5C;AACO,SAAS,SAAS,QAAgB,MAAc;AACtD,SAAO,GAAG,MAAM,IAAI,IAAI;AACzB;AACO,SAAS,qBAAqB,cAAiC,MAAe;AACpF,MAAI,CAAC,MAAM,QAAQ,YAAY,GAAG;AACjC,WAAO;AAAA,EACR;AACA,SAAO,eAAe,UAAU,cAAc,QAAQ,mBAAmB,aAAa,MAAM,KAAK,IAAI;AACtG;AACO,SAAS,YAAe,qBAA6B,MAAe;AAC1E,MAAI,uBAAwB,OAAM,MAAM,8BAA8B;AACtE,2BAAyB,YAAY,IAAI,IAAI;AAC7C,QAAM,SAAS,KAAK;AAEpB,2BAAyB;AACzB,SAAO;AACR;AACO,SAAS,iBAAiB;AAChC,MAAI,0BAA0B,KAAM;AACpC,MAAI,YAAY,IAAI,KAAK,wBAAwB;AAChD,UAAM,IAAI,kBAAkB,sBAAsB;AAAA,EACnD;AACD;AACA,IAAM,oBAAN,cAAgC,MAAM;AAAA,EACrC,YAAY,SAAiB;AAC5B,UAAM,OAAO;AAAA,EACd;AACD;","names":["lastWriteWins","withoutDeleted","query","queryStep","thread","queryNot","filterAndMap","queryAndMap","queryEntity","agentsOfThread","entityOverlapMap","entityOverlapCount","querySingle","querySingleAndMap","filterAndMapGetterFx"]}
|
|
@@ -1,22 +1,21 @@
|
|
|
1
1
|
import {
|
|
2
|
-
E,
|
|
3
2
|
ThreadInMemory,
|
|
4
3
|
computedFnDeepCompare,
|
|
5
4
|
createDebugName,
|
|
5
|
+
g,
|
|
6
6
|
observableArrayMap
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-NPCVLBCM.min.js";
|
|
8
8
|
import {
|
|
9
9
|
autorun,
|
|
10
10
|
comparer,
|
|
11
11
|
toJS
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-KEHU7HGZ.min.js";
|
|
13
13
|
|
|
14
14
|
// src/query/divergences.ts
|
|
15
|
-
var { WARN, LOG, DEBUG, VERBOSE, ERROR } =
|
|
15
|
+
var { WARN, LOG, DEBUG, VERBOSE, ERROR } = g.setup(g.INFO);
|
|
16
16
|
var queryDivergencesByPrev = computedFnDeepCompare("queryDivergencesByPrev", function queryConflictingByPrev(sourceThread) {
|
|
17
17
|
DEBUG(`queryDivergencesByPrev<${sourceThread.nameAndSizeUntracked}>`);
|
|
18
|
-
if (sourceThread.filters.includes("lastWriteWins"))
|
|
19
|
-
WARN(`queryDivergencesByPrev on thread lastWriteWins`, sourceThread);
|
|
18
|
+
if (sourceThread.filters.includes("lastWriteWins")) WARN(`queryDivergencesByPrev on thread lastWriteWins`, sourceThread);
|
|
20
19
|
const divergences = observableArrayMap(() => {
|
|
21
20
|
const logsForNode = /* @__PURE__ */ new Map();
|
|
22
21
|
const leafs = /* @__PURE__ */ new Set();
|
|
@@ -63,4 +62,4 @@ export {
|
|
|
63
62
|
includes,
|
|
64
63
|
includedIn
|
|
65
64
|
};
|
|
66
|
-
//# sourceMappingURL=chunk-
|
|
65
|
+
//# sourceMappingURL=chunk-TEQ4SIKN.min.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/query/divergences.ts","../src/query/matchers.ts"],"sourcesContent":["import { Logger } from 'besonders-logger'\nimport { autorun, comparer, toJS } from 'mobx'\nimport stringify from 'safe-stable-stringify'\nimport { Applog, CidString } from '../applog/datom-types'\nimport { computedFnDeepCompare, createDebugName, observableArrayMap } from '../mobx/mobx-utils'\nimport { Thread } from '../thread/basic'\nimport { ThreadInMemory } from '../thread/writeable'\n\nconst { WARN, LOG, DEBUG, VERBOSE, ERROR } = Logger.setup(Logger.INFO) // eslint-disable-line no-unused-vars\n\nexport interface DivergenceLeaf {\n\tlog: Applog\n\tthread: Thread\n}\n\nexport const queryDivergencesByPrev = computedFnDeepCompare('queryDivergencesByPrev', function queryConflictingByPrev(\n\tsourceThread: Thread,\n) {\n\tDEBUG(`queryDivergencesByPrev<${sourceThread.nameAndSizeUntracked}>`)\n\tif (sourceThread.filters.includes('lastWriteWins')) WARN(`queryDivergencesByPrev on thread lastWriteWins`, sourceThread)\n\n\tconst divergences = observableArrayMap(() => {\n\t\tconst logsForNode = new Map<CidString, Applog[]>()\n\t\tconst leafs = new Set<CidString>()\n\t\tVERBOSE('all applogs:', sourceThread.applogs)\n\t\tfor (const log of sourceThread.applogs) {\n\t\t\tlet prevLogs\n\t\t\tif (log.pv) {\n\t\t\t\tprevLogs = log.pv && logsForNode.get(log.pv.toString())\n\t\t\t\tleafs.delete(log.pv.toString())\n\t\t\t}\n\t\t\tVERBOSE('traversing log', { log, prevLogs, leafs: Array.from(leafs) })\n\t\t\tlogsForNode.set(log.cid, prevLogs ? [...prevLogs, log] : [log])\n\t\t\tleafs.add(log.cid)\n\t\t}\n\t\treturn Array.from(leafs).map(leafID => {\n\t\t\t// TODO use MappedThread?\n\t\t\tconst thread = new ThreadInMemory(\n\t\t\t\tcreateDebugName({\n\t\t\t\t\tcaller: 'DivergenceLeaf',\n\t\t\t\t\tthread: sourceThread,\n\t\t\t\t\tpattern: `leaf: ${leafID}`,\n\t\t\t\t}),\n\t\t\t\tlogsForNode.get(leafID),\n\t\t\t\tsourceThread.filters,\n\t\t\t\ttrue,\n\t\t\t\t// TODO: sourceThread,\n\t\t\t)\n\t\t\treturn ({ log: thread.latestLog, thread })\n\t\t})\n\t}, { name: createDebugName({ caller: 'queryDivergencesByPrev', thread: sourceThread }) })\n\tVERBOSE.isDisabled || autorun(() => VERBOSE(`[queryDivergencesByPrev] result:`, toJS(divergences)))\n\treturn divergences\n}, { equals: comparer.structural })\n\n// export const queryDivergencesOfEnAtByPrev = computedFnDeepCompare('queryDivergencesOfEnAtByPrev', function queryDivergencesOfEnAtByPrev(\n// \tsourceThread: Thread,\n// ) {\n// \tDEBUG(`queryDivergencesOfEnAtByPrev<${sourceThread.nameAndSizeUntracked}>`)\n// \tif (sourceThread.filters.includes('lastWriteWins')) WARN(`queryDivergencesOfEnAtByPrevFon thread lastWriteWins`, sourceThread)\n\n// \tconst divergences = observableArrayMap(() => {\n// \t\tconst logsForNode = new Map<CidString, Applog[]>()\n// \t\tconst leafs = new Map<string, Set<CidString>>() // [en,at]\n// \t\tVERBOSE('all applogs:', sourceThread.applogs)\n// \t\tfor (const log of sourceThread.applogs) {\n// \t\t\tconst key = stringify([log.en, log.at])\n// \t\t\tif (!leafs.has(key)) leafs.set(key, new Set())\n// \t\t\tlet prevLogs\n// \t\t\tif (log.pv) {\n// \t\t\t\tprevLogs = log.pv && logsForNode.get(log.pv.toString())\n// \t\t\t\tleafs.get(key).delete(log.pv.toString())\n// \t\t\t}\n// \t\t\tVERBOSE('traversing log', { key, log, prevLogs, leafs: Array.from(leafs) })\n// \t\t\tlogsForNode.set(log.cid, prevLogs ? [...prevLogs, log] : [log])\n// \t\t\tleafs.get(key).add(log.cid)\n// \t\t}\n// \t\treturn Array.from(leafs.entries()).map(([_enAt, leafs]) => {\n// \t\t\t// TODO use MappedThread?\n// \t\t\tconst thread = new ThreadInMemory(\n// \t\t\t\tcreateDebugName({\n// \t\t\t\t\tcaller: 'DivergenceLeaf',\n// \t\t\t\t\tthread: sourceThread,\n// \t\t\t\t\tpattern: `leaf: ${leafID}`,\n// \t\t\t\t}),\n// \t\t\t\tlogsForNode.get(leafID),\n// \t\t\t\tsourceThread.filters,\n// \t\t\t\ttrue,\n// \t\t\t\t// TODO: sourceThread,\n// \t\t\t)\n// \t\t\treturn ({ log: thread.latestLog, thread })\n// \t\t})\n// \t}, { name: createDebugName({ caller: 'queryDivergencesOfEnAtByPrev', thread: sourceThread }) })\n// \tVERBOSE.isDisabled || autorun(() => VERBOSE(`[queryDivergencesOfEnAtByPrev] result:`, toJS(divergences)))\n// \treturn divergences\n// }, { equals: comparer.structural })\n","import { DatomPart } from '../applog/datom-types'\n\nexport function includes(str: string) {\n\treturn (vl: DatomPart) => vl?.includes?.(str)\n}\nexport function includedIn(arr: string[]) {\n\treturn (vl: DatomPart) => arr?.includes?.(vl)\n}\n"],"mappings":";;;;;;;;;;;;;;AAQA,IAAM,EAAE,MAAM,KAAK,OAAO,SAAS,MAAM,IAAI,EAAO,MAAM,EAAO,IAAI;AAO9D,IAAM,yBAAyB,sBAAsB,0BAA0B,SAAS,uBAC9F,cACC;AACD,QAAM,0BAA0B,aAAa,oBAAoB,GAAG;AACpE,MAAI,aAAa,QAAQ,SAAS,eAAe
|
|
1
|
+
{"version":3,"sources":["../src/query/divergences.ts","../src/query/matchers.ts"],"sourcesContent":["import { Logger } from 'besonders-logger'\nimport { autorun, comparer, toJS } from 'mobx'\nimport stringify from 'safe-stable-stringify'\nimport { Applog, CidString } from '../applog/datom-types'\nimport { computedFnDeepCompare, createDebugName, observableArrayMap } from '../mobx/mobx-utils'\nimport { Thread } from '../thread/basic'\nimport { ThreadInMemory } from '../thread/writeable'\n\nconst { WARN, LOG, DEBUG, VERBOSE, ERROR } = Logger.setup(Logger.INFO) // eslint-disable-line no-unused-vars\n\nexport interface DivergenceLeaf {\n\tlog: Applog\n\tthread: Thread\n}\n\nexport const queryDivergencesByPrev = computedFnDeepCompare('queryDivergencesByPrev', function queryConflictingByPrev(\n\tsourceThread: Thread,\n) {\n\tDEBUG(`queryDivergencesByPrev<${sourceThread.nameAndSizeUntracked}>`)\n\tif (sourceThread.filters.includes('lastWriteWins')) WARN(`queryDivergencesByPrev on thread lastWriteWins`, sourceThread)\n\n\tconst divergences = observableArrayMap(() => {\n\t\tconst logsForNode = new Map<CidString, Applog[]>()\n\t\tconst leafs = new Set<CidString>()\n\t\tVERBOSE('all applogs:', sourceThread.applogs)\n\t\tfor (const log of sourceThread.applogs) {\n\t\t\tlet prevLogs\n\t\t\tif (log.pv) {\n\t\t\t\tprevLogs = log.pv && logsForNode.get(log.pv.toString())\n\t\t\t\tleafs.delete(log.pv.toString())\n\t\t\t}\n\t\t\tVERBOSE('traversing log', { log, prevLogs, leafs: Array.from(leafs) })\n\t\t\tlogsForNode.set(log.cid, prevLogs ? [...prevLogs, log] : [log])\n\t\t\tleafs.add(log.cid)\n\t\t}\n\t\treturn Array.from(leafs).map(leafID => {\n\t\t\t// TODO use MappedThread?\n\t\t\tconst thread = new ThreadInMemory(\n\t\t\t\tcreateDebugName({\n\t\t\t\t\tcaller: 'DivergenceLeaf',\n\t\t\t\t\tthread: sourceThread,\n\t\t\t\t\tpattern: `leaf: ${leafID}`,\n\t\t\t\t}),\n\t\t\t\tlogsForNode.get(leafID),\n\t\t\t\tsourceThread.filters,\n\t\t\t\ttrue,\n\t\t\t\t// TODO: sourceThread,\n\t\t\t)\n\t\t\treturn ({ log: thread.latestLog, thread })\n\t\t})\n\t}, { name: createDebugName({ caller: 'queryDivergencesByPrev', thread: sourceThread }) })\n\tVERBOSE.isDisabled || autorun(() => VERBOSE(`[queryDivergencesByPrev] result:`, toJS(divergences)))\n\treturn divergences\n}, { equals: comparer.structural })\n\n// export const queryDivergencesOfEnAtByPrev = computedFnDeepCompare('queryDivergencesOfEnAtByPrev', function queryDivergencesOfEnAtByPrev(\n// \tsourceThread: Thread,\n// ) {\n// \tDEBUG(`queryDivergencesOfEnAtByPrev<${sourceThread.nameAndSizeUntracked}>`)\n// \tif (sourceThread.filters.includes('lastWriteWins')) WARN(`queryDivergencesOfEnAtByPrevFon thread lastWriteWins`, sourceThread)\n\n// \tconst divergences = observableArrayMap(() => {\n// \t\tconst logsForNode = new Map<CidString, Applog[]>()\n// \t\tconst leafs = new Map<string, Set<CidString>>() // [en,at]\n// \t\tVERBOSE('all applogs:', sourceThread.applogs)\n// \t\tfor (const log of sourceThread.applogs) {\n// \t\t\tconst key = stringify([log.en, log.at])\n// \t\t\tif (!leafs.has(key)) leafs.set(key, new Set())\n// \t\t\tlet prevLogs\n// \t\t\tif (log.pv) {\n// \t\t\t\tprevLogs = log.pv && logsForNode.get(log.pv.toString())\n// \t\t\t\tleafs.get(key).delete(log.pv.toString())\n// \t\t\t}\n// \t\t\tVERBOSE('traversing log', { key, log, prevLogs, leafs: Array.from(leafs) })\n// \t\t\tlogsForNode.set(log.cid, prevLogs ? [...prevLogs, log] : [log])\n// \t\t\tleafs.get(key).add(log.cid)\n// \t\t}\n// \t\treturn Array.from(leafs.entries()).map(([_enAt, leafs]) => {\n// \t\t\t// TODO use MappedThread?\n// \t\t\tconst thread = new ThreadInMemory(\n// \t\t\t\tcreateDebugName({\n// \t\t\t\t\tcaller: 'DivergenceLeaf',\n// \t\t\t\t\tthread: sourceThread,\n// \t\t\t\t\tpattern: `leaf: ${leafID}`,\n// \t\t\t\t}),\n// \t\t\t\tlogsForNode.get(leafID),\n// \t\t\t\tsourceThread.filters,\n// \t\t\t\ttrue,\n// \t\t\t\t// TODO: sourceThread,\n// \t\t\t)\n// \t\t\treturn ({ log: thread.latestLog, thread })\n// \t\t})\n// \t}, { name: createDebugName({ caller: 'queryDivergencesOfEnAtByPrev', thread: sourceThread }) })\n// \tVERBOSE.isDisabled || autorun(() => VERBOSE(`[queryDivergencesOfEnAtByPrev] result:`, toJS(divergences)))\n// \treturn divergences\n// }, { equals: comparer.structural })\n","import { DatomPart } from '../applog/datom-types'\n\nexport function includes(str: string) {\n\treturn (vl: DatomPart) => vl?.includes?.(str)\n}\nexport function includedIn(arr: string[]) {\n\treturn (vl: DatomPart) => arr?.includes?.(vl)\n}\n"],"mappings":";;;;;;;;;;;;;;AAQA,IAAM,EAAE,MAAM,KAAK,OAAO,SAAS,MAAM,IAAI,EAAO,MAAM,EAAO,IAAI;AAO9D,IAAM,yBAAyB,sBAAsB,0BAA0B,SAAS,uBAC9F,cACC;AACD,QAAM,0BAA0B,aAAa,oBAAoB,GAAG;AACpE,MAAI,aAAa,QAAQ,SAAS,eAAe,EAAG,MAAK,kDAAkD,YAAY;AAEvH,QAAM,cAAc,mBAAmB,MAAM;AAC5C,UAAM,cAAc,oBAAI,IAAyB;AACjD,UAAM,QAAQ,oBAAI,IAAe;AACjC,YAAQ,gBAAgB,aAAa,OAAO;AAC5C,eAAW,OAAO,aAAa,SAAS;AACvC,UAAI;AACJ,UAAI,IAAI,IAAI;AACX,mBAAW,IAAI,MAAM,YAAY,IAAI,IAAI,GAAG,SAAS,CAAC;AACtD,cAAM,OAAO,IAAI,GAAG,SAAS,CAAC;AAAA,MAC/B;AACA,cAAQ,kBAAkB,EAAE,KAAK,UAAU,OAAO,MAAM,KAAK,KAAK,EAAE,CAAC;AACrE,kBAAY,IAAI,IAAI,KAAK,WAAW,CAAC,GAAG,UAAU,GAAG,IAAI,CAAC,GAAG,CAAC;AAC9D,YAAM,IAAI,IAAI,GAAG;AAAA,IAClB;AACA,WAAO,MAAM,KAAK,KAAK,EAAE,IAAI,YAAU;AAEtC,YAAM,SAAS,IAAI;AAAA,QAClB,gBAAgB;AAAA,UACf,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR,SAAS,SAAS,MAAM;AAAA,QACzB,CAAC;AAAA,QACD,YAAY,IAAI,MAAM;AAAA,QACtB,aAAa;AAAA,QACb;AAAA;AAAA,MAED;AACA,aAAQ,EAAE,KAAK,OAAO,WAAW,OAAO;AAAA,IACzC,CAAC;AAAA,EACF,GAAG,EAAE,MAAM,gBAAgB,EAAE,QAAQ,0BAA0B,QAAQ,aAAa,CAAC,EAAE,CAAC;AACxF,UAAQ,cAAc,QAAQ,MAAM,QAAQ,oCAAoC,KAAK,WAAW,CAAC,CAAC;AAClG,SAAO;AACR,GAAG,EAAE,QAAQ,SAAS,WAAW,CAAC;;;ACnD3B,SAAS,SAAS,KAAa;AACrC,SAAO,CAAC,OAAkB,IAAI,WAAW,GAAG;AAC7C;AACO,SAAS,WAAW,KAAe;AACzC,SAAO,CAAC,OAAkB,KAAK,WAAW,EAAE;AAC7C;","names":[]}
|
package/dist/index.min.js
CHANGED
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
integratePub,
|
|
6
6
|
isPublication,
|
|
7
7
|
isSubscription
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-FIOA3FZW.min.js";
|
|
9
9
|
import {
|
|
10
10
|
carFromBlob,
|
|
11
11
|
chunkApplogs,
|
|
@@ -21,13 +21,13 @@ import {
|
|
|
21
21
|
preparePubForPush,
|
|
22
22
|
streamReaderToIterable,
|
|
23
23
|
unchunkApplogsBlock
|
|
24
|
-
} from "./chunk-
|
|
24
|
+
} from "./chunk-JEOQUHTK.min.js";
|
|
25
25
|
import "./chunk-QPGEBDMJ.min.js";
|
|
26
26
|
import {
|
|
27
27
|
includedIn,
|
|
28
28
|
includes,
|
|
29
29
|
queryDivergencesByPrev
|
|
30
|
-
} from "./chunk-
|
|
30
|
+
} from "./chunk-TEQ4SIKN.min.js";
|
|
31
31
|
import {
|
|
32
32
|
QueryNode,
|
|
33
33
|
QueryResult,
|
|
@@ -51,8 +51,10 @@ import {
|
|
|
51
51
|
queryStep,
|
|
52
52
|
startsWith,
|
|
53
53
|
threadFromMaybeArray,
|
|
54
|
+
throwOnTimeout,
|
|
55
|
+
withTimeout,
|
|
54
56
|
withoutDeleted
|
|
55
|
-
} from "./chunk-
|
|
57
|
+
} from "./chunk-OBMVNVJR.min.js";
|
|
56
58
|
import {
|
|
57
59
|
AppLogNoCidTB,
|
|
58
60
|
AppLogNoCidTBC,
|
|
@@ -82,6 +84,7 @@ import {
|
|
|
82
84
|
cidToString,
|
|
83
85
|
compareApplogsByEnAt,
|
|
84
86
|
compareApplogsByTs,
|
|
87
|
+
compareStructuralButThreadsOnIdentity,
|
|
85
88
|
computedFnDeepCompare,
|
|
86
89
|
computedStructuralComparer,
|
|
87
90
|
containsCid,
|
|
@@ -99,6 +102,7 @@ import {
|
|
|
99
102
|
ensureValidCIDinstance,
|
|
100
103
|
entityCount,
|
|
101
104
|
finalizeApplogForInsert,
|
|
105
|
+
getAgents,
|
|
102
106
|
getApplogNoCidTypeErrors,
|
|
103
107
|
getApplogTypeErrors,
|
|
104
108
|
getCidSync,
|
|
@@ -148,8 +152,8 @@ import {
|
|
|
148
152
|
variableNameWithoutQuestionmark,
|
|
149
153
|
withPvFrom,
|
|
150
154
|
withTs
|
|
151
|
-
} from "./chunk-
|
|
152
|
-
import "./chunk-
|
|
155
|
+
} from "./chunk-NPCVLBCM.min.js";
|
|
156
|
+
import "./chunk-KEHU7HGZ.min.js";
|
|
153
157
|
import "./chunk-5MMGBK2U.min.js";
|
|
154
158
|
import {
|
|
155
159
|
BOOL,
|
|
@@ -162,8 +166,8 @@ import {
|
|
|
162
166
|
Str,
|
|
163
167
|
arrayIfSingle,
|
|
164
168
|
checkParityTB
|
|
165
|
-
} from "./chunk-
|
|
166
|
-
import "./chunk-
|
|
169
|
+
} from "./chunk-HYMC7W6S.min.js";
|
|
170
|
+
import "./chunk-PHITDXZT.min.js";
|
|
167
171
|
export {
|
|
168
172
|
AppLogNoCidTB,
|
|
169
173
|
AppLogNoCidTBC,
|
|
@@ -209,6 +213,7 @@ export {
|
|
|
209
213
|
cidToString,
|
|
210
214
|
compareApplogsByEnAt,
|
|
211
215
|
compareApplogsByTs,
|
|
216
|
+
compareStructuralButThreadsOnIdentity,
|
|
212
217
|
computedFnDeepCompare,
|
|
213
218
|
computedStructuralComparer,
|
|
214
219
|
containsCid,
|
|
@@ -235,6 +240,7 @@ export {
|
|
|
235
240
|
entityOverlapMap,
|
|
236
241
|
filterAndMap,
|
|
237
242
|
finalizeApplogForInsert,
|
|
243
|
+
getAgents,
|
|
238
244
|
getApplogNoCidTypeErrors,
|
|
239
245
|
getApplogTypeErrors,
|
|
240
246
|
getBlocksOfCar,
|
|
@@ -304,6 +310,7 @@ export {
|
|
|
304
310
|
startsWith,
|
|
305
311
|
streamReaderToIterable,
|
|
306
312
|
threadFromMaybeArray,
|
|
313
|
+
throwOnTimeout,
|
|
307
314
|
toIpnsString,
|
|
308
315
|
tryParseCID,
|
|
309
316
|
tsNearlySame,
|
|
@@ -311,6 +318,7 @@ export {
|
|
|
311
318
|
uniqueEnFromAppLogs,
|
|
312
319
|
variableNameWithoutQuestionmark,
|
|
313
320
|
withPvFrom,
|
|
321
|
+
withTimeout,
|
|
314
322
|
withTs,
|
|
315
323
|
withoutDeleted
|
|
316
324
|
};
|
package/dist/ipfs/car.d.ts
CHANGED
|
@@ -16,12 +16,12 @@ export declare function decodePubFromCar(car: CarReader): Promise<{
|
|
|
16
16
|
info: {
|
|
17
17
|
logs: {
|
|
18
18
|
cid: string;
|
|
19
|
-
pv: CidString;
|
|
20
|
-
ts:
|
|
21
|
-
ag: import("
|
|
22
|
-
en:
|
|
23
|
-
at:
|
|
24
|
-
vl: import("
|
|
19
|
+
pv: CidString | null;
|
|
20
|
+
ts: import("..").Timestamp;
|
|
21
|
+
ag: import("..").AgentHash;
|
|
22
|
+
en: import("..").EntityID;
|
|
23
|
+
at: import("..").Attribute;
|
|
24
|
+
vl: import("..").ApplogValue;
|
|
25
25
|
}[];
|
|
26
26
|
};
|
|
27
27
|
applogsCID: CID<unknown, number, number, import("multiformats").Version>;
|
|
@@ -32,12 +32,12 @@ export declare function decodePubFromBlocks({ rootCID, blockStore }: DecodedCar,
|
|
|
32
32
|
info: {
|
|
33
33
|
logs: {
|
|
34
34
|
cid: string;
|
|
35
|
-
pv: CidString;
|
|
36
|
-
ts:
|
|
37
|
-
ag: import("
|
|
38
|
-
en:
|
|
39
|
-
at:
|
|
40
|
-
vl: import("
|
|
35
|
+
pv: CidString | null;
|
|
36
|
+
ts: import("..").Timestamp;
|
|
37
|
+
ag: import("..").AgentHash;
|
|
38
|
+
en: import("..").EntityID;
|
|
39
|
+
at: import("..").Attribute;
|
|
40
|
+
vl: import("..").ApplogValue;
|
|
41
41
|
}[];
|
|
42
42
|
};
|
|
43
43
|
applogsCID: CID<unknown, number, number, import("multiformats").Version>;
|
|
@@ -50,7 +50,7 @@ export declare function getBlocksOfCar(car: CarReader): Promise<{
|
|
|
50
50
|
};
|
|
51
51
|
}>;
|
|
52
52
|
export declare function getDecodedBlock(blockStore: BlockStoreish, cid: CID): Promise<unknown>;
|
|
53
|
-
export declare function makeCarOut(roots: CIDForCar, blocks: BlockForCar[]): Promise<AsyncIterable<Uint8Array
|
|
53
|
+
export declare function makeCarOut(roots: CIDForCar, blocks: BlockForCar[]): Promise<AsyncIterable<Uint8Array<ArrayBufferLike>>>; /** create a new CarWriter, with the encoded block as the root */
|
|
54
54
|
export declare function makeCarBlob(roots: CIDForCar, blocks: BlockForCar[]): Promise<Blob>;
|
|
55
55
|
export declare function carFromBlob(blob: Blob | File): Promise<CarReader>;
|
|
56
56
|
export declare function streamReaderToIterable(bodyReader: ReadableStreamDefaultReader<Uint8Array>): AsyncIterable<Uint8Array>;
|
package/dist/ipfs/car.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"car.d.ts","sourceRoot":"","sources":["../../src/ipfs/car.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAGhD,OAAO,EAAa,GAAG,EAAE,MAAM,cAAc,CAAA;AAE7C,OAAO,EAAU,yBAAyB,EAAE,SAAS,EAAE,MAAM,uBAAuB,CAAA;AAOpF,MAAM,MAAM,SAAS,GAAG,GAAG,CAAA;AAC3B,MAAM,MAAM,WAAW,GAAG,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;AAEzD,MAAM,WAAW,aAAa;IAC7B,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;CACtC;AAED,MAAM,WAAW,UAAU;IAC1B,OAAO,EAAE,GAAG,CAAA;IAEZ,UAAU,EAAE,aAAa,CAAA;CACzB;AAED,0CAA0C;AAC1C,wBAAsB,gBAAgB,CAAC,GAAG,EAAE,SAAS;;;;;;;;;;;;;;;GAGpD;AAED,wBAAsB,mBAAmB,CAAC,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,UAAU,EAAE,cAAc,GAAE,GAAG,EAAO;;;;;;;;;;;;;;;GA6DxG;AAED,wBAAsB,cAAc,CAAC,GAAG,EAAE,SAAS;;;;;GAmBlD;AACD,wBAAsB,eAAe,CAAC,UAAU,EAAE,aAAa,EAAE,GAAG,EAAE,GAAG,oBAYxE;AAGD,wBAAsB,UAAU,CAAC,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,
|
|
1
|
+
{"version":3,"file":"car.d.ts","sourceRoot":"","sources":["../../src/ipfs/car.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAGhD,OAAO,EAAa,GAAG,EAAE,MAAM,cAAc,CAAA;AAE7C,OAAO,EAAU,yBAAyB,EAAE,SAAS,EAAE,MAAM,uBAAuB,CAAA;AAOpF,MAAM,MAAM,SAAS,GAAG,GAAG,CAAA;AAC3B,MAAM,MAAM,WAAW,GAAG,UAAU,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;AAEzD,MAAM,WAAW,aAAa;IAC7B,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;CACtC;AAED,MAAM,WAAW,UAAU;IAC1B,OAAO,EAAE,GAAG,CAAA;IAEZ,UAAU,EAAE,aAAa,CAAA;CACzB;AAED,0CAA0C;AAC1C,wBAAsB,gBAAgB,CAAC,GAAG,EAAE,SAAS;;;;;;;;;;;;;;;GAGpD;AAED,wBAAsB,mBAAmB,CAAC,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,UAAU,EAAE,cAAc,GAAE,GAAG,EAAO;;;;;;;;;;;;;;;GA6DxG;AAED,wBAAsB,cAAc,CAAC,GAAG,EAAE,SAAS;;;;;GAmBlD;AACD,wBAAsB,eAAe,CAAC,UAAU,EAAE,aAAa,EAAE,GAAG,EAAE,GAAG,oBAYxE;AAGD,wBAAsB,UAAU,CAAC,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,uDASvE,CAAC,iEAAiE;AAWnE,wBAAsB,WAAW,CAAC,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,iBAQxE;AACD,wBAAsB,WAAW,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,GAAG,OAAO,CAAC,SAAS,CAAC,CAEvE;AAED,wBAAgB,sBAAsB,CAAC,UAAU,EAAE,2BAA2B,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,CAWrH"}
|
|
@@ -3,7 +3,7 @@ import { CID } from 'multiformats';
|
|
|
3
3
|
import { Applog, ApplogEncNoCid, ApplogNoCid, ApplogOfSomeSort, CidString, IpnsString } from '../applog/datom-types';
|
|
4
4
|
export declare const MULTICODEC_IPNS_KEY = 114;
|
|
5
5
|
export declare function prepareForPub(log: ApplogOfSomeSort, without?: string[]): {
|
|
6
|
-
log: import("
|
|
6
|
+
log: import("..").ApplogEnc;
|
|
7
7
|
cid: CidString;
|
|
8
8
|
} | {
|
|
9
9
|
log: Applog;
|
|
@@ -20,7 +20,7 @@ export declare function encodeBlock(jsonObject: any): {
|
|
|
20
20
|
bytes: dagJson.ByteView<any>;
|
|
21
21
|
cid: CID;
|
|
22
22
|
};
|
|
23
|
-
export declare function encodeBlockOriginal(jsonObject: any): Promise<import("multiformats").BlockView<
|
|
23
|
+
export declare function encodeBlockOriginal(jsonObject: any): Promise<import("multiformats").BlockView<any, 297, 18, 1>>;
|
|
24
24
|
export declare function tryParseCID(cidString: CidString): {
|
|
25
25
|
cid: CID<unknown, number, number, import("multiformats").Version>;
|
|
26
26
|
errors: any[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ipfs-utils.d.ts","sourceRoot":"","sources":["../../src/ipfs/ipfs-utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,OAAO,MAAM,gBAAgB,CAAA;AAGzC,OAAO,EAAE,GAAG,EAAoB,MAAM,cAAc,CAAA;AAGpD,OAAO,EAAE,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,gBAAgB,EAAE,SAAS,EAAE,UAAU,EAAqB,MAAM,uBAAuB,CAAA;AASvI,eAAO,MAAM,mBAAmB,MAAO,CAAA;AAEvC,wBAAgB,aAAa,CAAC,GAAG,EAAE,gBAAgB,EAAE,OAAO,GAAE,MAAM,EAAY
|
|
1
|
+
{"version":3,"file":"ipfs-utils.d.ts","sourceRoot":"","sources":["../../src/ipfs/ipfs-utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,OAAO,MAAM,gBAAgB,CAAA;AAGzC,OAAO,EAAE,GAAG,EAAoB,MAAM,cAAc,CAAA;AAGpD,OAAO,EAAE,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,gBAAgB,EAAE,SAAS,EAAE,UAAU,EAAqB,MAAM,uBAAuB,CAAA;AASvI,eAAO,MAAM,mBAAmB,MAAO,CAAA;AAEvC,wBAAgB,aAAa,CAAC,GAAG,EAAE,gBAAgB,EAAE,OAAO,GAAE,MAAM,EAAY;;;;SAoBnD,MAAM;;EAClC;AAED,wBAAgB,qBAAqB,CAAC,GAAG,EAAE,WAAW,4BAErD;AACD,wBAAgB,YAAY,CAAC,GAAG,EAAE,WAAW,GAAG,cAAc,GAAG;IAAE,KAAK,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IAAC,GAAG,EAAE,GAAG,CAAA;CAAE,CAE1G;AAED,wBAAgB,UAAU,CAAC,KAAK,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,4BAOtD;AACD,gDAAgD;AAChD,wBAAgB,WAAW,CAAC,UAAU,EAAE,GAAG,GAAG;IAAE,KAAK,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IAAC,GAAG,EAAE,GAAG,CAAA;CAAE,CAIvF;AAED,wBAAsB,mBAAmB,CAAC,UAAU,EAAE,GAAG,8DAQxD;AAED,wBAAgB,WAAW,CAAC,SAAS,EAAE,SAAS;;;;EAsB/C;AACD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,WAEpC;AAED,wBAAgB,WAAW,CAAC,GAAG,EAAE,GAAG,UAMnC;AACD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,GAEL,UAAU,CACzC;AACD,wBAAgB,sBAAsB,CAAC,YAAY,EAAE,GAAG,GAAG,SAAS,gEAMnE;AACD,wBAAgB,YAAY,CAAC,YAAY,EAAE,GAAG,GAAG,SAAS,EAAE,YAAY,EAAE,GAAG,GAAG,SAAS,WAMxF;AACD,wBAAgB,WAAW,CAAC,IAAI,EAAE,CAAC,GAAG,GAAG,SAAS,CAAC,EAAE,GAAG,GAAG,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,GAAG,GAAG,SAAS,WAG9F"}
|
package/dist/ipfs.min.js
CHANGED
|
@@ -8,9 +8,9 @@ import {
|
|
|
8
8
|
makeCarBlob,
|
|
9
9
|
makeCarOut,
|
|
10
10
|
streamReaderToIterable
|
|
11
|
-
} from "./chunk-
|
|
11
|
+
} from "./chunk-JEOQUHTK.min.js";
|
|
12
12
|
import "./chunk-QPGEBDMJ.min.js";
|
|
13
|
-
import "./chunk-
|
|
13
|
+
import "./chunk-OBMVNVJR.min.js";
|
|
14
14
|
import {
|
|
15
15
|
MULTICODEC_IPNS_KEY,
|
|
16
16
|
areCidsEqual,
|
|
@@ -26,10 +26,10 @@ import {
|
|
|
26
26
|
prepareForPub,
|
|
27
27
|
toIpnsString,
|
|
28
28
|
tryParseCID
|
|
29
|
-
} from "./chunk-
|
|
30
|
-
import "./chunk-
|
|
31
|
-
import "./chunk-
|
|
32
|
-
import "./chunk-
|
|
29
|
+
} from "./chunk-NPCVLBCM.min.js";
|
|
30
|
+
import "./chunk-KEHU7HGZ.min.js";
|
|
31
|
+
import "./chunk-HYMC7W6S.min.js";
|
|
32
|
+
import "./chunk-PHITDXZT.min.js";
|
|
33
33
|
export {
|
|
34
34
|
MULTICODEC_IPNS_KEY,
|
|
35
35
|
areCidsEqual,
|