@wovin/core 0.0.0-ciao-mobx-955482e8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/README.md +3 -0
- package/dist/applog/applog-helpers.d.ts +47 -0
- package/dist/applog/applog-helpers.d.ts.map +1 -0
- package/dist/applog/applog-utils.d.ts +57 -0
- package/dist/applog/applog-utils.d.ts.map +1 -0
- package/dist/applog/datom-types.d.ts +128 -0
- package/dist/applog/datom-types.d.ts.map +1 -0
- package/dist/applog.d.ts +4 -0
- package/dist/applog.d.ts.map +1 -0
- package/dist/applog.js +101 -0
- package/dist/applog.js.map +1 -0
- package/dist/blockstore/index.d.ts +21 -0
- package/dist/blockstore/index.d.ts.map +1 -0
- package/dist/blockstore.d.ts +2 -0
- package/dist/blockstore.d.ts.map +1 -0
- package/dist/blockstore.js +24 -0
- package/dist/blockstore.js.map +1 -0
- package/dist/chunk-6MQKRL6W.js +86 -0
- package/dist/chunk-6MQKRL6W.js.map +1 -0
- package/dist/chunk-7MW34UEO.js +40 -0
- package/dist/chunk-7MW34UEO.js.map +1 -0
- package/dist/chunk-7Z5YDQKK.js +1 -0
- package/dist/chunk-7Z5YDQKK.js.map +1 -0
- package/dist/chunk-CY4NLISM.js +144 -0
- package/dist/chunk-CY4NLISM.js.map +1 -0
- package/dist/chunk-E46VTKTZ.js +1 -0
- package/dist/chunk-E46VTKTZ.js.map +1 -0
- package/dist/chunk-O43W7UW6.js +434 -0
- package/dist/chunk-O43W7UW6.js.map +1 -0
- package/dist/chunk-XIQSYEV3.js +1604 -0
- package/dist/chunk-XIQSYEV3.js.map +1 -0
- package/dist/chunk-XVGW4QC3.js +55 -0
- package/dist/chunk-XVGW4QC3.js.map +1 -0
- package/dist/chunk-YDAKBU6Q.js +9 -0
- package/dist/chunk-YDAKBU6Q.js.map +1 -0
- package/dist/chunk-ZAADLBSB.js +36 -0
- package/dist/chunk-ZAADLBSB.js.map +1 -0
- package/dist/chunk-ZXCJRYD7.js +883 -0
- package/dist/chunk-ZXCJRYD7.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +354 -0
- package/dist/index.js.map +1 -0
- package/dist/ipfs/car.d.ts +59 -0
- package/dist/ipfs/car.d.ts.map +1 -0
- package/dist/ipfs/fetch-snapshot-chain.d.ts +32 -0
- package/dist/ipfs/fetch-snapshot-chain.d.ts.map +1 -0
- package/dist/ipfs/ipfs-utils.d.ts +35 -0
- package/dist/ipfs/ipfs-utils.d.ts.map +1 -0
- package/dist/ipfs.d.ts +4 -0
- package/dist/ipfs.d.ts.map +1 -0
- package/dist/ipfs.js +60 -0
- package/dist/ipfs.js.map +1 -0
- package/dist/ipns/ipns-record.d.ts +34 -0
- package/dist/ipns/ipns-record.d.ts.map +1 -0
- package/dist/ipns.d.ts +2 -0
- package/dist/ipns.d.ts.map +1 -0
- package/dist/ipns.js +64 -0
- package/dist/ipns.js.map +1 -0
- package/dist/pubsub/connector.d.ts +9 -0
- package/dist/pubsub/connector.d.ts.map +1 -0
- package/dist/pubsub/pub-pull.d.ts +14 -0
- package/dist/pubsub/pub-pull.d.ts.map +1 -0
- package/dist/pubsub/pubsub-types.d.ts +72 -0
- package/dist/pubsub/pubsub-types.d.ts.map +1 -0
- package/dist/pubsub/snap-push.d.ts +41 -0
- package/dist/pubsub/snap-push.d.ts.map +1 -0
- package/dist/pubsub/ucan-example.d.ts +3 -0
- package/dist/pubsub/ucan-example.d.ts.map +1 -0
- package/dist/pubsub/ucan.d.ts +16 -0
- package/dist/pubsub/ucan.d.ts.map +1 -0
- package/dist/pubsub.d.ts +5 -0
- package/dist/pubsub.d.ts.map +1 -0
- package/dist/pubsub.js +31 -0
- package/dist/pubsub.js.map +1 -0
- package/dist/query/basic.d.ts +105 -0
- package/dist/query/basic.d.ts.map +1 -0
- package/dist/query/divergences.d.ts +12 -0
- package/dist/query/divergences.d.ts.map +1 -0
- package/dist/query/matchers.d.ts +4 -0
- package/dist/query/matchers.d.ts.map +1 -0
- package/dist/query/memoized.d.ts +66 -0
- package/dist/query/memoized.d.ts.map +1 -0
- package/dist/query/query-steps.d.ts +4 -0
- package/dist/query/query-steps.d.ts.map +1 -0
- package/dist/query/situations.d.ts +80 -0
- package/dist/query/situations.d.ts.map +1 -0
- package/dist/query/subscribable.d.ts +102 -0
- package/dist/query/subscribable.d.ts.map +1 -0
- package/dist/query/types.d.ts +70 -0
- package/dist/query/types.d.ts.map +1 -0
- package/dist/query.d.ts +8 -0
- package/dist/query.d.ts.map +1 -0
- package/dist/query.js +108 -0
- package/dist/query.js.map +1 -0
- package/dist/retrieve/index.d.ts +2 -0
- package/dist/retrieve/index.d.ts.map +1 -0
- package/dist/retrieve/update-thread.d.ts +64 -0
- package/dist/retrieve/update-thread.d.ts.map +1 -0
- package/dist/retrieve.d.ts +2 -0
- package/dist/retrieve.d.ts.map +1 -0
- package/dist/retrieve.js +14 -0
- package/dist/retrieve.js.map +1 -0
- package/dist/thread/basic.d.ts +60 -0
- package/dist/thread/basic.d.ts.map +1 -0
- package/dist/thread/filters.d.ts +47 -0
- package/dist/thread/filters.d.ts.map +1 -0
- package/dist/thread/mapped.d.ts +31 -0
- package/dist/thread/mapped.d.ts.map +1 -0
- package/dist/thread/utils.d.ts +23 -0
- package/dist/thread/utils.d.ts.map +1 -0
- package/dist/thread/writeable.d.ts +41 -0
- package/dist/thread/writeable.d.ts.map +1 -0
- package/dist/thread.d.ts +6 -0
- package/dist/thread.d.ts.map +1 -0
- package/dist/thread.js +54 -0
- package/dist/thread.js.map +1 -0
- package/dist/types/typescript-utils.d.ts +34 -0
- package/dist/types/typescript-utils.d.ts.map +1 -0
- package/dist/types.d.ts +2 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +26 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/debug-name.d.ts +13 -0
- package/dist/utils/debug-name.d.ts.map +1 -0
- package/dist/utils.d.ts +4 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +9 -0
- package/dist/utils.js.map +1 -0
- package/package.json +110 -0
- package/src/applog/applog-helpers.ts +150 -0
- package/src/applog/applog-utils.ts +398 -0
- package/src/applog/datom-types.ts +148 -0
- package/src/applog.ts +3 -0
- package/src/blockstore/index.ts +36 -0
- package/src/blockstore.ts +1 -0
- package/src/index.ts +8 -0
- package/src/ipfs/car.ts +291 -0
- package/src/ipfs/fetch-snapshot-chain.ts +135 -0
- package/src/ipfs/ipfs-utils.ts +132 -0
- package/src/ipfs.ts +3 -0
- package/src/ipns/ipns-record.ts +115 -0
- package/src/ipns.ts +1 -0
- package/src/pubsub/UCAN Specs Overview.md +217 -0
- package/src/pubsub/connector.ts +9 -0
- package/src/pubsub/pub-pull.ts +31 -0
- package/src/pubsub/pubsub-types.ts +90 -0
- package/src/pubsub/snap-push.ts +277 -0
- package/src/pubsub/ucan-example.ts +61 -0
- package/src/pubsub/ucan.ts +56 -0
- package/src/pubsub.ts +4 -0
- package/src/query/basic.ts +1061 -0
- package/src/query/divergences.ts +50 -0
- package/src/query/matchers.ts +8 -0
- package/src/query/memoized.test.ts +151 -0
- package/src/query/memoized.ts +180 -0
- package/src/query/query-steps.ts +4 -0
- package/src/query/query.test.ts +536 -0
- package/src/query/situations.ts +261 -0
- package/src/query/subscribable.test.ts +245 -0
- package/src/query/subscribable.ts +225 -0
- package/src/query/types.ts +155 -0
- package/src/query.ts +7 -0
- package/src/retrieve/index.ts +1 -0
- package/src/retrieve/update-thread.ts +248 -0
- package/src/retrieve.ts +1 -0
- package/src/test/perf/query.1m.perf.test.ts +94 -0
- package/src/test/perf/query.perf.test.ts +389 -0
- package/src/test/perf/query.realdata.perf.test.ts +175 -0
- package/src/thread/basic.ts +209 -0
- package/src/thread/filters.ts +234 -0
- package/src/thread/mapped.ts +166 -0
- package/src/thread/utils.ts +146 -0
- package/src/thread/writeable.ts +163 -0
- package/src/thread.ts +5 -0
- package/src/types/typescript-utils.ts +64 -0
- package/src/types.ts +1 -0
- package/src/utils/debug-name.ts +54 -0
- package/src/utils.ts +4 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import {
|
|
2
|
+
unchunkApplogsBlock
|
|
3
|
+
} from "./chunk-O43W7UW6.js";
|
|
4
|
+
import {
|
|
5
|
+
areCidsEqual,
|
|
6
|
+
removeDuplicateAppLogs
|
|
7
|
+
} from "./chunk-XIQSYEV3.js";
|
|
8
|
+
|
|
9
|
+
// src/retrieve/update-thread.ts
|
|
10
|
+
import * as dagJson from "@ipld/dag-json";
|
|
11
|
+
import { Logger } from "besonders-logger";
|
|
12
|
+
import { CID } from "multiformats/cid";
|
|
13
|
+
var { WARN, LOG, DEBUG, VERBOSE, ERROR } = Logger.setup(Logger.INFO);
|
|
14
|
+
function withBlockCache(source, store) {
|
|
15
|
+
return {
|
|
16
|
+
get: (cid) => store.get(cid),
|
|
17
|
+
async *getDag(cid) {
|
|
18
|
+
for await (const block of source.getDag(cid)) {
|
|
19
|
+
await store.put(block.cid, block.bytes);
|
|
20
|
+
yield block;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
function createMemoryBlockStore() {
|
|
26
|
+
const blocks = /* @__PURE__ */ new Map();
|
|
27
|
+
return {
|
|
28
|
+
get(cid) {
|
|
29
|
+
return blocks.get(cid.toV1().toString());
|
|
30
|
+
},
|
|
31
|
+
put(cid, bytes) {
|
|
32
|
+
blocks.set(cid.toV1().toString(), bytes);
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
async function getDecodedBlock(blockStore, cid) {
|
|
37
|
+
const bytes = blockStore.get(cid);
|
|
38
|
+
if (!bytes) return null;
|
|
39
|
+
return dagJson.decode(bytes);
|
|
40
|
+
}
|
|
41
|
+
async function updateThreadFromSnapshot(thread, cid, retriever, options) {
|
|
42
|
+
const { excludeSnapshotCID, stopAtCounter, maxDepth = 100 } = options ?? {};
|
|
43
|
+
DEBUG("[updateThreadFromSnapshot] starting from", cid.toString(), {
|
|
44
|
+
excludeSnapshotCID: excludeSnapshotCID?.toString(),
|
|
45
|
+
stopAtCounter,
|
|
46
|
+
maxDepth
|
|
47
|
+
});
|
|
48
|
+
const blockStore = createMemoryBlockStore();
|
|
49
|
+
const visited = /* @__PURE__ */ new Set();
|
|
50
|
+
let currentCID = cid;
|
|
51
|
+
let snapshotCount = 0;
|
|
52
|
+
const allApplogs = [];
|
|
53
|
+
let minCounter = Infinity;
|
|
54
|
+
let maxCounter = -Infinity;
|
|
55
|
+
let lastCounter;
|
|
56
|
+
while (currentCID && snapshotCount < maxDepth) {
|
|
57
|
+
const cidStr = currentCID.toString();
|
|
58
|
+
if (visited.has(cidStr)) {
|
|
59
|
+
throw ERROR("[updateThreadFromSnapshot] snapshot chain has a loop", {
|
|
60
|
+
currentCID: cidStr,
|
|
61
|
+
visited: [...visited]
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
visited.add(cidStr);
|
|
65
|
+
if (excludeSnapshotCID && areCidsEqual(currentCID, excludeSnapshotCID)) {
|
|
66
|
+
DEBUG("[updateThreadFromSnapshot] reached excludeSnapshotCID, stopping", excludeSnapshotCID.toString());
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
DEBUG("[updateThreadFromSnapshot] fetching root block", cidStr);
|
|
70
|
+
const rootBytes = await retriever.get(currentCID);
|
|
71
|
+
blockStore.put(currentCID, rootBytes);
|
|
72
|
+
const root = dagJson.decode(rootBytes);
|
|
73
|
+
if (typeof root.prevCounter === "number") {
|
|
74
|
+
minCounter = Math.min(minCounter, root.prevCounter);
|
|
75
|
+
maxCounter = Math.max(maxCounter, root.prevCounter);
|
|
76
|
+
if (lastCounter !== void 0 && root.prevCounter !== lastCounter - 1) {
|
|
77
|
+
WARN("[updateThreadFromSnapshot] counter gap detected", {
|
|
78
|
+
expected: lastCounter - 1,
|
|
79
|
+
got: root.prevCounter
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
lastCounter = root.prevCounter;
|
|
83
|
+
}
|
|
84
|
+
if (stopAtCounter !== void 0 && typeof root.prevCounter === "number" && root.prevCounter <= stopAtCounter) {
|
|
85
|
+
DEBUG("[updateThreadFromSnapshot] reached stopAtCounter", { stopAtCounter, prevCounter: root.prevCounter });
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
DEBUG("[updateThreadFromSnapshot] fetching applogs", root.applogs.toString());
|
|
89
|
+
for await (const { cid: blockCid, bytes } of retriever.getDag(root.applogs)) {
|
|
90
|
+
blockStore.put(blockCid, bytes);
|
|
91
|
+
}
|
|
92
|
+
DEBUG("[updateThreadFromSnapshot] fetching info", root.info.toString());
|
|
93
|
+
for await (const { cid: blockCid, bytes } of retriever.getDag(root.info)) {
|
|
94
|
+
blockStore.put(blockCid, bytes);
|
|
95
|
+
}
|
|
96
|
+
const applogsBlock = await getDecodedBlock(blockStore, root.applogs);
|
|
97
|
+
if (!applogsBlock) {
|
|
98
|
+
throw ERROR("[updateThreadFromSnapshot] applogs block not found", { cid: root.applogs.toString() });
|
|
99
|
+
}
|
|
100
|
+
const applogCIDs = await unchunkApplogsBlock(applogsBlock, {
|
|
101
|
+
get: async (cid2) => blockStore.get(cid2)
|
|
102
|
+
});
|
|
103
|
+
for (const applogCID of applogCIDs) {
|
|
104
|
+
const applog = await getDecodedBlock(blockStore, applogCID);
|
|
105
|
+
if (!applog) {
|
|
106
|
+
WARN("[updateThreadFromSnapshot] applog not found:", applogCID.toString());
|
|
107
|
+
continue;
|
|
108
|
+
}
|
|
109
|
+
if (applog.pv instanceof CID) {
|
|
110
|
+
applog.pv = applog.pv.toV1().toString();
|
|
111
|
+
}
|
|
112
|
+
allApplogs.push({
|
|
113
|
+
...applog,
|
|
114
|
+
cid: applogCID.toV1().toString()
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
snapshotCount++;
|
|
118
|
+
currentCID = root.prev;
|
|
119
|
+
}
|
|
120
|
+
DEBUG("[updateThreadFromSnapshot] fetched", {
|
|
121
|
+
snapshotCount,
|
|
122
|
+
applogCount: allApplogs.length,
|
|
123
|
+
rootCID: cid.toString()
|
|
124
|
+
});
|
|
125
|
+
const deduplicated = removeDuplicateAppLogs(allApplogs, "cleanup");
|
|
126
|
+
const inserted = thread.insertMissing(deduplicated, false);
|
|
127
|
+
DEBUG("[updateThreadFromSnapshot] inserted", {
|
|
128
|
+
insertedCount: inserted.length,
|
|
129
|
+
duplicateCount: deduplicated.length - inserted.length
|
|
130
|
+
});
|
|
131
|
+
return {
|
|
132
|
+
cid,
|
|
133
|
+
applogs: deduplicated,
|
|
134
|
+
insertedCount: inserted.length,
|
|
135
|
+
snapshotCount,
|
|
136
|
+
counterRange: minCounter !== Infinity ? { minCounter, maxCounter } : void 0
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export {
|
|
141
|
+
withBlockCache,
|
|
142
|
+
updateThreadFromSnapshot
|
|
143
|
+
};
|
|
144
|
+
//# sourceMappingURL=chunk-CY4NLISM.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/retrieve/update-thread.ts"],"sourcesContent":["import { CarReader } from '@ipld/car'\nimport * as dagJson from '@ipld/dag-json'\nimport { Logger } from 'besonders-logger'\nimport { CID } from 'multiformats/cid'\nimport type { Applog } from '../applog/datom-types.ts'\nimport { removeDuplicateAppLogs } from '../applog/applog-utils.ts'\nimport type { SnapRootBlock, SnapBlockLogsOrChunks } from '../pubsub/pubsub-types.ts'\nimport { unchunkApplogsBlock } from '../pubsub/snap-push.ts'\nimport { areCidsEqual } from '../ipfs/ipfs-utils.ts'\nimport type { WriteableThread } from '../thread/writeable.ts'\nimport type { BlockStore } from '../blockstore/index.ts'\n\nconst { WARN, LOG, DEBUG, VERBOSE, ERROR } = Logger.setup(Logger.INFO) // eslint-disable-line no-unused-vars\n\n/**\n * Block retrieval abstraction - fetch or get blocks.\n * Implemented by gateway retriever or local blockstore.\n */\nexport interface BlockRetriever {\n\t/** Get single block by CID */\n\tget(cid: CID): Promise<Uint8Array>\n\t/** Get all blocks in DAG rooted at CID (for applogs/info sub-DAGs) */\n\tgetDag(cid: CID): AsyncIterable<{ cid: CID; bytes: Uint8Array }>\n}\n\n/**\n * Wrap a BlockRetriever so fetched blocks flow through a BlockStore.\n * - get: delegates to store.get() (which handles local-first / remote fallback)\n * - getDag: streams from source, puts each block into store\n */\nexport function withBlockCache(\n\tsource: BlockRetriever,\n\tstore: BlockStore,\n): BlockRetriever {\n\treturn {\n\t\tget: (cid) => store.get(cid),\n\t\tasync *getDag(cid) {\n\t\t\tfor await (const block of source.getDag(cid)) {\n\t\t\t\tawait store.put(block.cid, block.bytes)\n\t\t\t\tyield block\n\t\t\t}\n\t\t},\n\t}\n}\n\n/**\n * Options for updateThreadFromSnapshot\n */\nexport interface UpdateOptions {\n\t/** CID of last included snapshot - exclude this and older snapshots */\n\texcludeSnapshotCID?: CID\n\t/** Stop when we reach this counter (walking backwards) */\n\tstopAtCounter?: number\n\t/** Maximum number of snapshots to traverse (default: 100) */\n\tmaxDepth?: number\n}\n\n/**\n * Result from updateThreadFromSnapshot\n */\nexport interface UpdateResult {\n\t/** Root CID that was fetched */\n\tcid: CID\n\t/** All applogs decoded from the chain */\n\tapplogs: Applog[]\n\t/** Count of applogs actually inserted (not duplicates) */\n\tinsertedCount: number\n\t/** Number of snapshots traversed */\n\tsnapshotCount: number\n\t/** Counter range encountered (min/max) */\n\tcounterRange?: { minCounter: number; maxCounter: number }\n}\n\n/**\n * Simple in-memory block store used during snapshot chain fetch.\n */\ninterface MemoryBlockStore {\n\tget(cid: CID): Uint8Array | undefined\n\tput(cid: CID, bytes: Uint8Array): void\n}\n\nfunction createMemoryBlockStore(): MemoryBlockStore {\n\tconst blocks = new Map<string, Uint8Array>()\n\treturn {\n\t\tget(cid: CID) {\n\t\t\treturn blocks.get(cid.toV1().toString())\n\t\t},\n\t\tput(cid: CID, bytes: Uint8Array) {\n\t\t\tblocks.set(cid.toV1().toString(), bytes)\n\t\t},\n\t}\n}\n\nasync function getDecodedBlock<T>(blockStore: MemoryBlockStore, cid: CID): Promise<T | null> {\n\tconst bytes = blockStore.get(cid)\n\tif (!bytes) return null\n\treturn dagJson.decode(bytes) as T\n}\n\n/**\n * Fetch snapshot chain from CID using a BlockRetriever, decode applogs, insert into thread.\n * Stops before excludeSnapshotCID if provided (incremental update).\n *\n * @param thread - WriteableThread to insert applogs into\n * @param cid - Root CID of the snapshot to start from\n * @param retriever - BlockRetriever for fetching blocks\n * @param options - Optional configuration\n * @returns UpdateResult with applogs and counts\n */\nexport async function updateThreadFromSnapshot(\n\tthread: WriteableThread,\n\tcid: CID,\n\tretriever: BlockRetriever,\n\toptions?: UpdateOptions\n): Promise<UpdateResult> {\n\tconst { excludeSnapshotCID, stopAtCounter, maxDepth = 100 } = options ?? {}\n\n\tDEBUG('[updateThreadFromSnapshot] starting from', cid.toString(), {\n\t\texcludeSnapshotCID: excludeSnapshotCID?.toString(),\n\t\tstopAtCounter,\n\t\tmaxDepth,\n\t})\n\n\tconst blockStore = createMemoryBlockStore()\n\tconst visited = new Set<string>()\n\tlet currentCID: CID | undefined = cid\n\tlet snapshotCount = 0\n\tconst allApplogs: Applog[] = []\n\tlet minCounter = Infinity\n\tlet maxCounter = -Infinity\n\tlet lastCounter: number | undefined\n\n\twhile (currentCID && snapshotCount < maxDepth) {\n\t\tconst cidStr = currentCID.toString()\n\n\t\t// Loop detection\n\t\tif (visited.has(cidStr)) {\n\t\t\tthrow ERROR('[updateThreadFromSnapshot] snapshot chain has a loop', {\n\t\t\t\tcurrentCID: cidStr,\n\t\t\t\tvisited: [...visited],\n\t\t\t})\n\t\t}\n\t\tvisited.add(cidStr)\n\n\t\t// Check stop condition BEFORE fetching content\n\t\tif (excludeSnapshotCID && areCidsEqual(currentCID, excludeSnapshotCID)) {\n\t\t\tDEBUG('[updateThreadFromSnapshot] reached excludeSnapshotCID, stopping', excludeSnapshotCID.toString())\n\t\t\tbreak\n\t\t}\n\n\t\t// 1. Fetch root block\n\t\tDEBUG('[updateThreadFromSnapshot] fetching root block', cidStr)\n\t\tconst rootBytes = await retriever.get(currentCID)\n\t\tblockStore.put(currentCID, rootBytes)\n\n\t\t// Parse root to get applogs, info, prev CIDs\n\t\tconst root = dagJson.decode(rootBytes) as SnapRootBlock\n\n\t\t// Track counter range and validate sequentiality\n\t\tif (typeof root.prevCounter === 'number') {\n\t\t\tminCounter = Math.min(minCounter, root.prevCounter)\n\t\t\tmaxCounter = Math.max(maxCounter, root.prevCounter)\n\n\t\t\t// Validate sequentiality (walking backwards, counter should decrease)\n\t\t\tif (lastCounter !== undefined && root.prevCounter !== lastCounter - 1) {\n\t\t\t\tWARN('[updateThreadFromSnapshot] counter gap detected', {\n\t\t\t\t\texpected: lastCounter - 1,\n\t\t\t\t\tgot: root.prevCounter,\n\t\t\t\t})\n\t\t\t}\n\t\t\tlastCounter = root.prevCounter\n\t\t}\n\n\t\t// Stop condition based on counter\n\t\tif (stopAtCounter !== undefined && typeof root.prevCounter === 'number' && root.prevCounter <= stopAtCounter) {\n\t\t\tDEBUG('[updateThreadFromSnapshot] reached stopAtCounter', { stopAtCounter, prevCounter: root.prevCounter })\n\t\t\tbreak\n\t\t}\n\n\t\t// 2. Fetch applogs DAG\n\t\tDEBUG('[updateThreadFromSnapshot] fetching applogs', root.applogs.toString())\n\t\tfor await (const { cid: blockCid, bytes } of retriever.getDag(root.applogs)) {\n\t\t\tblockStore.put(blockCid, bytes)\n\t\t}\n\n\t\t// 3. Fetch info DAG\n\t\tDEBUG('[updateThreadFromSnapshot] fetching info', root.info.toString())\n\t\tfor await (const { cid: blockCid, bytes } of retriever.getDag(root.info)) {\n\t\t\tblockStore.put(blockCid, bytes)\n\t\t}\n\n\t\t// Decode applogs from this snapshot\n\t\tconst applogsBlock = await getDecodedBlock<SnapBlockLogsOrChunks>(blockStore, root.applogs)\n\t\tif (!applogsBlock) {\n\t\t\tthrow ERROR('[updateThreadFromSnapshot] applogs block not found', { cid: root.applogs.toString() })\n\t\t}\n\n\t\t// Use the unchunk helper which handles both chunked and non-chunked formats\n\t\tconst applogCIDs = await unchunkApplogsBlock(applogsBlock, {\n\t\t\tget: async (cid: CID) => blockStore.get(cid)!,\n\t\t})\n\n\t\t// Resolve each applog CID to actual applog data\n\t\tfor (const applogCID of applogCIDs) {\n\t\t\tconst applog = await getDecodedBlock<Applog>(blockStore, applogCID)\n\t\t\tif (!applog) {\n\t\t\t\tWARN('[updateThreadFromSnapshot] applog not found:', applogCID.toString())\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// Normalize pv field if it's a CID instance\n\t\t\tif ((applog.pv as any) instanceof CID) {\n\t\t\t\tapplog.pv = (applog.pv as any as CID).toV1().toString()\n\t\t\t}\n\t\t\tallApplogs.push({\n\t\t\t\t...applog,\n\t\t\t\tcid: applogCID.toV1().toString(),\n\t\t\t})\n\t\t}\n\n\t\tsnapshotCount++\n\t\tcurrentCID = root.prev // Move to previous snapshot\n\t}\n\n\tDEBUG('[updateThreadFromSnapshot] fetched', {\n\t\tsnapshotCount,\n\t\tapplogCount: allApplogs.length,\n\t\trootCID: cid.toString(),\n\t})\n\n\t// Deduplicate applogs (in case of overlapping snapshots)\n\tconst deduplicated = removeDuplicateAppLogs(allApplogs, 'cleanup')\n\n\t// Insert into thread\n\tconst inserted = thread.insertMissing(deduplicated, false)\n\n\tDEBUG('[updateThreadFromSnapshot] inserted', {\n\t\tinsertedCount: inserted.length,\n\t\tduplicateCount: deduplicated.length - inserted.length,\n\t})\n\n\treturn {\n\t\tcid,\n\t\tapplogs: deduplicated,\n\t\tinsertedCount: inserted.length,\n\t\tsnapshotCount,\n\t\tcounterRange: minCounter !== Infinity ? { minCounter, maxCounter } : undefined,\n\t}\n}\n"],"mappings":";;;;;;;;;AACA,YAAY,aAAa;AACzB,SAAS,cAAc;AACvB,SAAS,WAAW;AASpB,IAAM,EAAE,MAAM,KAAK,OAAO,SAAS,MAAM,IAAI,OAAO,MAAM,OAAO,IAAI;AAkB9D,SAAS,eACf,QACA,OACiB;AACjB,SAAO;AAAA,IACN,KAAK,CAAC,QAAQ,MAAM,IAAI,GAAG;AAAA,IAC3B,OAAO,OAAO,KAAK;AAClB,uBAAiB,SAAS,OAAO,OAAO,GAAG,GAAG;AAC7C,cAAM,MAAM,IAAI,MAAM,KAAK,MAAM,KAAK;AACtC,cAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AACD;AAsCA,SAAS,yBAA2C;AACnD,QAAM,SAAS,oBAAI,IAAwB;AAC3C,SAAO;AAAA,IACN,IAAI,KAAU;AACb,aAAO,OAAO,IAAI,IAAI,KAAK,EAAE,SAAS,CAAC;AAAA,IACxC;AAAA,IACA,IAAI,KAAU,OAAmB;AAChC,aAAO,IAAI,IAAI,KAAK,EAAE,SAAS,GAAG,KAAK;AAAA,IACxC;AAAA,EACD;AACD;AAEA,eAAe,gBAAmB,YAA8B,KAA6B;AAC5F,QAAM,QAAQ,WAAW,IAAI,GAAG;AAChC,MAAI,CAAC,MAAO,QAAO;AACnB,SAAe,eAAO,KAAK;AAC5B;AAYA,eAAsB,yBACrB,QACA,KACA,WACA,SACwB;AACxB,QAAM,EAAE,oBAAoB,eAAe,WAAW,IAAI,IAAI,WAAW,CAAC;AAE1E,QAAM,4CAA4C,IAAI,SAAS,GAAG;AAAA,IACjE,oBAAoB,oBAAoB,SAAS;AAAA,IACjD;AAAA,IACA;AAAA,EACD,CAAC;AAED,QAAM,aAAa,uBAAuB;AAC1C,QAAM,UAAU,oBAAI,IAAY;AAChC,MAAI,aAA8B;AAClC,MAAI,gBAAgB;AACpB,QAAM,aAAuB,CAAC;AAC9B,MAAI,aAAa;AACjB,MAAI,aAAa;AACjB,MAAI;AAEJ,SAAO,cAAc,gBAAgB,UAAU;AAC9C,UAAM,SAAS,WAAW,SAAS;AAGnC,QAAI,QAAQ,IAAI,MAAM,GAAG;AACxB,YAAM,MAAM,wDAAwD;AAAA,QACnE,YAAY;AAAA,QACZ,SAAS,CAAC,GAAG,OAAO;AAAA,MACrB,CAAC;AAAA,IACF;AACA,YAAQ,IAAI,MAAM;AAGlB,QAAI,sBAAsB,aAAa,YAAY,kBAAkB,GAAG;AACvE,YAAM,mEAAmE,mBAAmB,SAAS,CAAC;AACtG;AAAA,IACD;AAGA,UAAM,kDAAkD,MAAM;AAC9D,UAAM,YAAY,MAAM,UAAU,IAAI,UAAU;AAChD,eAAW,IAAI,YAAY,SAAS;AAGpC,UAAM,OAAe,eAAO,SAAS;AAGrC,QAAI,OAAO,KAAK,gBAAgB,UAAU;AACzC,mBAAa,KAAK,IAAI,YAAY,KAAK,WAAW;AAClD,mBAAa,KAAK,IAAI,YAAY,KAAK,WAAW;AAGlD,UAAI,gBAAgB,UAAa,KAAK,gBAAgB,cAAc,GAAG;AACtE,aAAK,mDAAmD;AAAA,UACvD,UAAU,cAAc;AAAA,UACxB,KAAK,KAAK;AAAA,QACX,CAAC;AAAA,MACF;AACA,oBAAc,KAAK;AAAA,IACpB;AAGA,QAAI,kBAAkB,UAAa,OAAO,KAAK,gBAAgB,YAAY,KAAK,eAAe,eAAe;AAC7G,YAAM,oDAAoD,EAAE,eAAe,aAAa,KAAK,YAAY,CAAC;AAC1G;AAAA,IACD;AAGA,UAAM,+CAA+C,KAAK,QAAQ,SAAS,CAAC;AAC5E,qBAAiB,EAAE,KAAK,UAAU,MAAM,KAAK,UAAU,OAAO,KAAK,OAAO,GAAG;AAC5E,iBAAW,IAAI,UAAU,KAAK;AAAA,IAC/B;AAGA,UAAM,4CAA4C,KAAK,KAAK,SAAS,CAAC;AACtE,qBAAiB,EAAE,KAAK,UAAU,MAAM,KAAK,UAAU,OAAO,KAAK,IAAI,GAAG;AACzE,iBAAW,IAAI,UAAU,KAAK;AAAA,IAC/B;AAGA,UAAM,eAAe,MAAM,gBAAuC,YAAY,KAAK,OAAO;AAC1F,QAAI,CAAC,cAAc;AAClB,YAAM,MAAM,sDAAsD,EAAE,KAAK,KAAK,QAAQ,SAAS,EAAE,CAAC;AAAA,IACnG;AAGA,UAAM,aAAa,MAAM,oBAAoB,cAAc;AAAA,MAC1D,KAAK,OAAOA,SAAa,WAAW,IAAIA,IAAG;AAAA,IAC5C,CAAC;AAGD,eAAW,aAAa,YAAY;AACnC,YAAM,SAAS,MAAM,gBAAwB,YAAY,SAAS;AAClE,UAAI,CAAC,QAAQ;AACZ,aAAK,gDAAgD,UAAU,SAAS,CAAC;AACzE;AAAA,MACD;AAEA,UAAK,OAAO,cAAsB,KAAK;AACtC,eAAO,KAAM,OAAO,GAAkB,KAAK,EAAE,SAAS;AAAA,MACvD;AACA,iBAAW,KAAK;AAAA,QACf,GAAG;AAAA,QACH,KAAK,UAAU,KAAK,EAAE,SAAS;AAAA,MAChC,CAAC;AAAA,IACF;AAEA;AACA,iBAAa,KAAK;AAAA,EACnB;AAEA,QAAM,sCAAsC;AAAA,IAC3C;AAAA,IACA,aAAa,WAAW;AAAA,IACxB,SAAS,IAAI,SAAS;AAAA,EACvB,CAAC;AAGD,QAAM,eAAe,uBAAuB,YAAY,SAAS;AAGjE,QAAM,WAAW,OAAO,cAAc,cAAc,KAAK;AAEzD,QAAM,uCAAuC;AAAA,IAC5C,eAAe,SAAS;AAAA,IACxB,gBAAgB,aAAa,SAAS,SAAS;AAAA,EAChD,CAAC;AAED,SAAO;AAAA,IACN;AAAA,IACA,SAAS;AAAA,IACT,eAAe,SAAS;AAAA,IACxB;AAAA,IACA,cAAc,eAAe,WAAW,EAAE,YAAY,WAAW,IAAI;AAAA,EACtE;AACD;","names":["cid"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=chunk-E46VTKTZ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,434 @@
|
|
|
1
|
+
import {
|
|
2
|
+
keepTruthy
|
|
3
|
+
} from "./chunk-YDAKBU6Q.js";
|
|
4
|
+
import {
|
|
5
|
+
lastWriteWins
|
|
6
|
+
} from "./chunk-ZXCJRYD7.js";
|
|
7
|
+
import {
|
|
8
|
+
areCidsEqual,
|
|
9
|
+
encodeBlockOriginal,
|
|
10
|
+
ensureTsPvAndFinalizeApplog,
|
|
11
|
+
getLogsFromThread,
|
|
12
|
+
prepareForPub,
|
|
13
|
+
rollingFilter
|
|
14
|
+
} from "./chunk-XIQSYEV3.js";
|
|
15
|
+
|
|
16
|
+
// src/pubsub/snap-push.ts
|
|
17
|
+
import * as dagJson2 from "@ipld/dag-json";
|
|
18
|
+
import { Logger as Logger2 } from "besonders-logger";
|
|
19
|
+
import stringify from "safe-stable-stringify";
|
|
20
|
+
|
|
21
|
+
// src/ipfs/car.ts
|
|
22
|
+
import { CarReader, CarWriter } from "@ipld/car";
|
|
23
|
+
import * as dagJson from "@ipld/dag-json";
|
|
24
|
+
import { Logger } from "besonders-logger";
|
|
25
|
+
import { CID } from "multiformats";
|
|
26
|
+
var { WARN, LOG, DEBUG, VERBOSE, ERROR } = Logger.setup(Logger.INFO);
|
|
27
|
+
async function decodePubFromCar(car) {
|
|
28
|
+
const decoded = await getBlocksOfCar(car);
|
|
29
|
+
return await decodePubFromBlocks(decoded);
|
|
30
|
+
}
|
|
31
|
+
async function decodePubFromBlocks({ rootCID, blockStore }, _recursionTrace = [], stopAtCID) {
|
|
32
|
+
if (!rootCID || !blockStore) {
|
|
33
|
+
throw ERROR("Empty roots/blocks", { rootCID, blockStore });
|
|
34
|
+
}
|
|
35
|
+
let allApplogs = [];
|
|
36
|
+
let firstInfo = null;
|
|
37
|
+
let currentCID = rootCID;
|
|
38
|
+
const visited = /* @__PURE__ */ new Set();
|
|
39
|
+
let applogsCID = null;
|
|
40
|
+
while (currentCID) {
|
|
41
|
+
const cidStr = currentCID.toString();
|
|
42
|
+
if (visited.has(cidStr)) {
|
|
43
|
+
throw ERROR("[decodePubFromBlocks] pub chain has a loop", {
|
|
44
|
+
currentCID: cidStr,
|
|
45
|
+
visited: [...visited]
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
visited.add(cidStr);
|
|
49
|
+
const root = await getDecodedBlock(blockStore, currentCID);
|
|
50
|
+
VERBOSE(`[decodePubFromBlocks] root:`, cidStr, root, { blockStore });
|
|
51
|
+
if (!root) {
|
|
52
|
+
throw ERROR("[decodePubFromBlocks] root not found in blockStore", { blockStore, currentCID: cidStr });
|
|
53
|
+
}
|
|
54
|
+
let pubLogsArray;
|
|
55
|
+
if (root?.info) {
|
|
56
|
+
if (!applogsCID) applogsCID = root.applogs;
|
|
57
|
+
const applogsBlock = await getDecodedBlock(blockStore, root.applogs);
|
|
58
|
+
pubLogsArray = await unchunkApplogsBlock(applogsBlock, blockStore);
|
|
59
|
+
if (!firstInfo) {
|
|
60
|
+
firstInfo = await getDecodedBlock(blockStore, root.info);
|
|
61
|
+
DEBUG(`new format - infoLogs`, firstInfo.logs.map((l) => ({ [l.toString()]: l })));
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
pubLogsArray = root.applogs;
|
|
65
|
+
}
|
|
66
|
+
const resolveLogFromCidLink = async (cidOrLink) => {
|
|
67
|
+
const cid = cidOrLink;
|
|
68
|
+
const applog = await getDecodedBlock(blockStore, cid);
|
|
69
|
+
if (!applog) {
|
|
70
|
+
ERROR(`Could not find applog CID in pub blocks:`, cid.toString(), { cid, root, blockStore });
|
|
71
|
+
throw new Error(`Could not find applog CID in pub blocks: ${cid.toString()}`);
|
|
72
|
+
}
|
|
73
|
+
if (applog.pv instanceof CID) applog.pv = applog.pv.toV1().toString();
|
|
74
|
+
return {
|
|
75
|
+
...applog,
|
|
76
|
+
cid: cid.toV1().toString()
|
|
77
|
+
};
|
|
78
|
+
};
|
|
79
|
+
const snapshotApplogs = await Promise.all(pubLogsArray.map(resolveLogFromCidLink));
|
|
80
|
+
allApplogs = allApplogs.concat(snapshotApplogs);
|
|
81
|
+
if (!root.prev) {
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
if (stopAtCID && areCidsEqual(root.prev, stopAtCID)) {
|
|
85
|
+
DEBUG("[decodePubFromBlocks] stopping at stopAtCID:", stopAtCID.toString());
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
const prevBytes = await blockStore.get(root.prev);
|
|
89
|
+
if (!prevBytes) {
|
|
90
|
+
throw ERROR("[decodePubFromBlocks] prev snapshot missing from blockStore", {
|
|
91
|
+
currentCID: cidStr,
|
|
92
|
+
prev: root.prev.toString(),
|
|
93
|
+
stopAtCID: stopAtCID?.toString(),
|
|
94
|
+
visited: [...visited]
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
currentCID = root.prev;
|
|
98
|
+
}
|
|
99
|
+
const result = {
|
|
100
|
+
cid: rootCID,
|
|
101
|
+
info: firstInfo ? {
|
|
102
|
+
...firstInfo,
|
|
103
|
+
logs: await Promise.all(firstInfo.logs.map(async (cidOrLink) => {
|
|
104
|
+
const cid = cidOrLink;
|
|
105
|
+
const applog = await getDecodedBlock(blockStore, cid);
|
|
106
|
+
if (!applog) {
|
|
107
|
+
ERROR(`Could not find info log CID in pub blocks:`, cid.toString(), { cid, blockStore });
|
|
108
|
+
throw new Error(`Could not find info log CID in pub blocks: ${cid.toString()}`);
|
|
109
|
+
}
|
|
110
|
+
if (applog.pv instanceof CID) applog.pv = applog.pv.toV1().toString();
|
|
111
|
+
return {
|
|
112
|
+
...applog,
|
|
113
|
+
cid: cid.toV1().toString()
|
|
114
|
+
};
|
|
115
|
+
}))
|
|
116
|
+
} : null,
|
|
117
|
+
applogsCID,
|
|
118
|
+
applogs: allApplogs
|
|
119
|
+
};
|
|
120
|
+
DEBUG("[decodePubFromBlocks] result:", result, { rootCID: rootCID.toString(), blockStore, applogs: allApplogs });
|
|
121
|
+
return result;
|
|
122
|
+
}
|
|
123
|
+
async function getBlocksOfCar(car) {
|
|
124
|
+
const rootsFromCar = await car.getRoots();
|
|
125
|
+
const roots = rootsFromCar.map((c) => (typeof c.toV1 === "function" ? c : CID.decode(c.bytes)).toV1().toString());
|
|
126
|
+
const blocks = /* @__PURE__ */ new Map();
|
|
127
|
+
for await (const { cid: cidFromCarblocks, bytes } of car.blocks()) {
|
|
128
|
+
const cid = typeof cidFromCarblocks.toV1 === "function" ? cidFromCarblocks : CID.decode(cidFromCarblocks.bytes);
|
|
129
|
+
VERBOSE({ cidFromCarblocks, cid });
|
|
130
|
+
blocks.set(cid.toV1().toString(), bytes);
|
|
131
|
+
}
|
|
132
|
+
if (roots.length !== 1) {
|
|
133
|
+
WARN("Unexpected roots count:", roots);
|
|
134
|
+
}
|
|
135
|
+
return {
|
|
136
|
+
rootCID: CID.parse(roots[0]),
|
|
137
|
+
blockStore: {
|
|
138
|
+
get: (cid) => blocks.get(cid.toV1().toString())
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
async function getDecodedBlock(blockStore, cid) {
|
|
143
|
+
try {
|
|
144
|
+
var blob = await blockStore.get(cid);
|
|
145
|
+
if (!blob) {
|
|
146
|
+
WARN("returning null");
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
} catch (err) {
|
|
150
|
+
if (err.message === "Not Found") return null;
|
|
151
|
+
throw err;
|
|
152
|
+
}
|
|
153
|
+
return dagJson.decode(blob);
|
|
154
|
+
}
|
|
155
|
+
async function makeCarOut(roots, blocks) {
|
|
156
|
+
const { writer, out } = CarWriter.create(Array.isArray(roots) ? roots : [roots]);
|
|
157
|
+
VERBOSE(`Writing ${blocks.length} blocks to CAR`, { roots, blocks });
|
|
158
|
+
blocks.forEach((b) => writer.put(b));
|
|
159
|
+
writer.close();
|
|
160
|
+
return out;
|
|
161
|
+
}
|
|
162
|
+
async function makeCarBlob(roots, blocks) {
|
|
163
|
+
const carOut = await makeCarOut(roots, blocks);
|
|
164
|
+
const chunks = [];
|
|
165
|
+
for await (const chunk of carOut) {
|
|
166
|
+
chunks.push(chunk);
|
|
167
|
+
}
|
|
168
|
+
const blob = new Blob(chunks);
|
|
169
|
+
return blob;
|
|
170
|
+
}
|
|
171
|
+
async function carFromBlob(blob) {
|
|
172
|
+
return CarReader.fromBytes(new Uint8Array(await blob.arrayBuffer()));
|
|
173
|
+
}
|
|
174
|
+
function extractCids(value) {
|
|
175
|
+
if (value instanceof CID) return [value];
|
|
176
|
+
if (Array.isArray(value)) return value.flatMap(extractCids);
|
|
177
|
+
if (value && typeof value === "object") return Object.values(value).flatMap(extractCids);
|
|
178
|
+
return [];
|
|
179
|
+
}
|
|
180
|
+
var MAX_COLLECT_BLOCKS = 1e6;
|
|
181
|
+
async function collectDagBlocks(startCID, blockStore) {
|
|
182
|
+
const visited = /* @__PURE__ */ new Set();
|
|
183
|
+
const blocks = [];
|
|
184
|
+
const queue = [startCID];
|
|
185
|
+
while (queue.length > 0) {
|
|
186
|
+
if (blocks.length >= MAX_COLLECT_BLOCKS) {
|
|
187
|
+
WARN(`[collectDagBlocks] hit ${MAX_COLLECT_BLOCKS} block limit, returning partial result`);
|
|
188
|
+
break;
|
|
189
|
+
}
|
|
190
|
+
const cid = queue.shift();
|
|
191
|
+
const cidStr = cid.toString();
|
|
192
|
+
if (visited.has(cidStr)) continue;
|
|
193
|
+
visited.add(cidStr);
|
|
194
|
+
let bytes;
|
|
195
|
+
try {
|
|
196
|
+
bytes = await blockStore.get(cid);
|
|
197
|
+
} catch {
|
|
198
|
+
WARN(`[collectDagBlocks] block not found: ${cidStr}, stopping this branch`);
|
|
199
|
+
continue;
|
|
200
|
+
}
|
|
201
|
+
if (!bytes) {
|
|
202
|
+
WARN(`[collectDagBlocks] block not found: ${cidStr}, stopping this branch`);
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
blocks.push({ cid, bytes });
|
|
206
|
+
if (blocks.length % 1e3 === 0) {
|
|
207
|
+
LOG(`[collectDagBlocks] collected ${blocks.length} blocks...`);
|
|
208
|
+
}
|
|
209
|
+
try {
|
|
210
|
+
const decoded = dagJson.decode(bytes);
|
|
211
|
+
const childCids = extractCids(decoded);
|
|
212
|
+
for (const child of childCids) {
|
|
213
|
+
if (!visited.has(child.toString())) {
|
|
214
|
+
queue.push(child);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
} catch {
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
DEBUG(`[collectDagBlocks] collected ${blocks.length} blocks from ${startCID.toString()}`);
|
|
221
|
+
return blocks;
|
|
222
|
+
}
|
|
223
|
+
function streamReaderToIterable(bodyReader) {
|
|
224
|
+
return (async function* () {
|
|
225
|
+
while (true) {
|
|
226
|
+
const { done, value } = await bodyReader.read();
|
|
227
|
+
VERBOSE(`[car] chunk`, { done, value });
|
|
228
|
+
if (done) {
|
|
229
|
+
break;
|
|
230
|
+
}
|
|
231
|
+
yield value;
|
|
232
|
+
}
|
|
233
|
+
})();
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// src/pubsub/snap-push.ts
|
|
237
|
+
var { WARN: WARN2, LOG: LOG2, DEBUG: DEBUG2, VERBOSE: VERBOSE2, ERROR: ERROR2 } = Logger2.setup(Logger2.INFO);
|
|
238
|
+
async function prepareSnapshotForPush(agent, appThread, threadToPublish, share, prevSnapCID, prevCounter) {
|
|
239
|
+
if (prevCounter !== null && !prevSnapCID) {
|
|
240
|
+
throw ERROR2("[prepareSnapshotForPush] prevCounter provided without prevSnapCID");
|
|
241
|
+
}
|
|
242
|
+
let logsToPublish = getLogsFromThread(threadToPublish);
|
|
243
|
+
DEBUG2(`[preparePubForPush] Collected ${logsToPublish.length} logs :`, {
|
|
244
|
+
logsToPublish,
|
|
245
|
+
threadOrLogsCount: threadToPublish.nameAndSizeUntracked || `[${threadToPublish.length}]`
|
|
246
|
+
});
|
|
247
|
+
const { sharedAgents, sharedKeyMap, sharedKey, pubCounter } = share ?? {};
|
|
248
|
+
const getExistingOrNewLog = (thread, share2, ag, at, vl) => {
|
|
249
|
+
let logInQuestion = rollingFilter(lastWriteWins(thread), { en: share2.id, at }).latestLog;
|
|
250
|
+
if (!logInQuestion && vl !== void 0) {
|
|
251
|
+
logInQuestion = ensureTsPvAndFinalizeApplog({ ag, en: share2.id, at, vl }, thread);
|
|
252
|
+
}
|
|
253
|
+
return logInQuestion;
|
|
254
|
+
};
|
|
255
|
+
const shareNameLog = getExistingOrNewLog(appThread, share, agent.ag, "share/name", share.name);
|
|
256
|
+
const shareCounterLog = getExistingOrNewLog(appThread, share, agent.ag, "share/counter", `${agent.did}<::>${pubCounter}`);
|
|
257
|
+
const encryptApplog = async (applog, keyToUse) => {
|
|
258
|
+
const { log: eachLog, cid } = prepareForPub(applog);
|
|
259
|
+
const enc = new TextEncoder();
|
|
260
|
+
const stringified = stringify(eachLog);
|
|
261
|
+
const stringifiedEncodedAppLogPayload = enc.encode(stringified);
|
|
262
|
+
VERBOSE2("[odd]", { eachLog, stringified, stringifiedEncodedAppLogPayload });
|
|
263
|
+
try {
|
|
264
|
+
const encPayload = await agent.crypto?.aes.encrypt(stringifiedEncodedAppLogPayload, keyToUse, "AES-GCM");
|
|
265
|
+
VERBOSE2("[odd] encrypted length:", stringifiedEncodedAppLogPayload.length, { encPayload });
|
|
266
|
+
return encPayload;
|
|
267
|
+
} catch (err) {
|
|
268
|
+
throw ERROR2("FAILED TO ENC payload length:", stringifiedEncodedAppLogPayload.length, { err });
|
|
269
|
+
}
|
|
270
|
+
};
|
|
271
|
+
let maybeEncryptedApplogs;
|
|
272
|
+
const encryptedApplogs = [];
|
|
273
|
+
const agentSharedKeyLogs = [];
|
|
274
|
+
if (sharedAgents) {
|
|
275
|
+
if (!sharedKey || !sharedKeyMap) {
|
|
276
|
+
throw ERROR2("sharedAgents but no Keys/Map", { sharedAgents, sharedKeyMap, sharedKey });
|
|
277
|
+
}
|
|
278
|
+
VERBOSE2("encrypting", { sharedAgents, sharedKeyMap });
|
|
279
|
+
for (const [eachAgent, eachEncKey] of Array.from(sharedKeyMap.entries())) {
|
|
280
|
+
VERBOSE2("adding key", { eachAgent, eachEncKey });
|
|
281
|
+
agentSharedKeyLogs.push({
|
|
282
|
+
ag: agent.ag,
|
|
283
|
+
en: eachAgent,
|
|
284
|
+
at: "share/sharedKey",
|
|
285
|
+
vl: eachEncKey
|
|
286
|
+
// these are encrypted with the derived key from the local agent private and remote agent public keys
|
|
287
|
+
});
|
|
288
|
+
}
|
|
289
|
+
const CIDlist = [];
|
|
290
|
+
const pubCIDmap = {};
|
|
291
|
+
for (const eachLog of logsToPublish) {
|
|
292
|
+
VERBOSE2("[crypto] encrypting ", { eachLog, sharedKey });
|
|
293
|
+
const encPayload = await encryptApplog(eachLog, sharedKey);
|
|
294
|
+
DEBUG2("[crypto] encrypted ", { eachLog, encPayload, sharedKey });
|
|
295
|
+
encryptedApplogs.push({ enc: encPayload });
|
|
296
|
+
}
|
|
297
|
+
maybeEncryptedApplogs = encryptedApplogs;
|
|
298
|
+
} else {
|
|
299
|
+
maybeEncryptedApplogs = logsToPublish;
|
|
300
|
+
}
|
|
301
|
+
DEBUG2("adding all agent info and shareAtoms", {
|
|
302
|
+
share,
|
|
303
|
+
agent,
|
|
304
|
+
logsToPublish,
|
|
305
|
+
// threadToPublish, - very verbose
|
|
306
|
+
agentSharedKeyLogs
|
|
307
|
+
});
|
|
308
|
+
const infoLogs = [
|
|
309
|
+
...rollingFilter(lastWriteWins(appThread), {
|
|
310
|
+
// TODO: use static filter for performance
|
|
311
|
+
en: agent.ag,
|
|
312
|
+
at: ["agent/ecdh", "agent/jwkd", "agent/appAgent"]
|
|
313
|
+
}).applogs,
|
|
314
|
+
...shareNameLog ? [shareNameLog] : [],
|
|
315
|
+
...shareCounterLog ? [shareCounterLog] : [],
|
|
316
|
+
...agentSharedKeyLogs
|
|
317
|
+
];
|
|
318
|
+
DEBUG2(`[prepareSnapshotForPush] info logs:`, infoLogs);
|
|
319
|
+
if (!infoLogs.find(({ at }) => at === "agent/appAgent")) throw ERROR2(`[prepareSnapshotForPush] appThread missing agent/appAgent log`);
|
|
320
|
+
const applogsToEncode = keepTruthy(maybeEncryptedApplogs);
|
|
321
|
+
const infologsToEncode = keepTruthy(infoLogs);
|
|
322
|
+
if (!applogsToEncode.length) {
|
|
323
|
+
throw ERROR2("no valid applogs", { agent, maybeEncryptedApplogs, infoLogs, applogsToEncode, infologsToEncode, prevSnapCID });
|
|
324
|
+
}
|
|
325
|
+
if (!infologsToEncode.length) {
|
|
326
|
+
throw ERROR2("no valid infologs", { agent, maybeEncryptedApplogs, infoLogs, applogsToEncode, infologsToEncode, prevSnapCID });
|
|
327
|
+
}
|
|
328
|
+
const encodedSnapshot = await encodeSnapshotAsCar(agent, applogsToEncode, infologsToEncode, prevSnapCID, prevCounter);
|
|
329
|
+
DEBUG2("inPrepareSnapshotForPush", { encodedSnapshot });
|
|
330
|
+
return encodedSnapshot;
|
|
331
|
+
}
|
|
332
|
+
async function encodeSnapshotAsCar(agent, applogs, infoLogs, prevSnapCID, prevCounter) {
|
|
333
|
+
DEBUG2(`[encodeSnapshotAsCar] encoding`, { agent, applogs, infoLogs });
|
|
334
|
+
const { cids: infoLogCids, encodedApplogs: encodedInfoLogs } = await encodeApplogsAsIPLD(infoLogs);
|
|
335
|
+
const { cids: applogCids, encodedApplogs } = await encodeApplogsAsIPLD(applogs);
|
|
336
|
+
let blocks = encodedApplogs.concat(encodedInfoLogs);
|
|
337
|
+
const infoLogsWrap = await encodeBlockOriginal({ logs: infoLogCids });
|
|
338
|
+
blocks.push(infoLogsWrap);
|
|
339
|
+
const { rootCID: chunkRootCID, blocks: chunkBlocks } = await chunkApplogs(applogCids);
|
|
340
|
+
blocks = blocks.concat(chunkBlocks);
|
|
341
|
+
const infoSignature = await agent.sign(infoLogsWrap.cid.bytes);
|
|
342
|
+
const applogsSignature = await agent.sign(chunkRootCID.bytes);
|
|
343
|
+
const root = {
|
|
344
|
+
info: infoLogsWrap.cid,
|
|
345
|
+
applogs: chunkRootCID,
|
|
346
|
+
infoSignature,
|
|
347
|
+
applogsSignature,
|
|
348
|
+
prev: prevSnapCID,
|
|
349
|
+
prevCounter: !prevSnapCID ? 0 : prevCounter !== null ? prevCounter + 1 : null
|
|
350
|
+
};
|
|
351
|
+
DEBUG2("[encodeSnapshotAsCar] encoding root", { root, logCids: applogCids, infoLogCids });
|
|
352
|
+
const encodedRoot = await encodeBlockOriginal(root);
|
|
353
|
+
blocks.push(encodedRoot);
|
|
354
|
+
DEBUG2("[encodeSnapshotAsCar] => root", { encodedRoot });
|
|
355
|
+
return {
|
|
356
|
+
cid: encodedRoot.cid,
|
|
357
|
+
blob: await makeCarBlob(encodedRoot.cid, blocks),
|
|
358
|
+
// TODO: create CarBuilder (incl .encodeAndAdd({...}))
|
|
359
|
+
blocks,
|
|
360
|
+
infoLogCids,
|
|
361
|
+
applogCids
|
|
362
|
+
};
|
|
363
|
+
}
|
|
364
|
+
async function chunkApplogs(applogCids, size = 1e4) {
|
|
365
|
+
if (!applogCids.length) throw ERROR2(`[chunkApplogs] called with empty array`);
|
|
366
|
+
const chunks = [];
|
|
367
|
+
for (let i = 0; i < applogCids.length; i += size) {
|
|
368
|
+
const chunk = await encodeBlockOriginal({ logs: applogCids.slice(i, Math.min(i + applogCids.length, i + size)) });
|
|
369
|
+
chunks.push(chunk);
|
|
370
|
+
}
|
|
371
|
+
if (chunks.length === 1) return { rootCID: chunks[0].cid, blocks: chunks };
|
|
372
|
+
const root = await encodeBlockOriginal({ chunks: chunks.map((chunk) => chunk.cid) });
|
|
373
|
+
const blocks = [root, ...chunks];
|
|
374
|
+
DEBUG2(`[chunkApplogs] ${applogCids.length} logs chunked into ${chunks.length}`, { applogCids, root, blocks, chunks, dagJson: dagJson2 });
|
|
375
|
+
return { rootCID: root.cid, blocks, chunks };
|
|
376
|
+
}
|
|
377
|
+
async function unchunkApplogsBlock(block, blockStore) {
|
|
378
|
+
if (isSnapBlockChunks(block)) {
|
|
379
|
+
return (await Promise.all(
|
|
380
|
+
block.chunks.map(async (chunkCid) => {
|
|
381
|
+
const block2 = await getDecodedBlock(blockStore, chunkCid);
|
|
382
|
+
if (!block2.logs) throw ERROR2(`Weird chunk`, block2);
|
|
383
|
+
return block2.logs;
|
|
384
|
+
})
|
|
385
|
+
)).flat();
|
|
386
|
+
} else {
|
|
387
|
+
return block.logs;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
function isSnapBlockChunks(block) {
|
|
391
|
+
return block.chunks;
|
|
392
|
+
}
|
|
393
|
+
async function encodeSnapshotApplogsAsCar(applogs) {
|
|
394
|
+
const encoded = await encodeApplogsAsIPLD(applogs);
|
|
395
|
+
if (!encoded) throw ERROR2("invalid applogs cannot continue", { applogs, encoded });
|
|
396
|
+
const { cids, encodedApplogs } = encoded;
|
|
397
|
+
const root = { applogs: cids };
|
|
398
|
+
const encodedRoot = await encodeBlockOriginal(root);
|
|
399
|
+
DEBUG2("[encodeSnapshotApplogsAsCar] encoded root", { cids, encodedRoot });
|
|
400
|
+
return await makeCarBlob(encodedRoot.cid, [encodedRoot, ...encodedApplogs]);
|
|
401
|
+
}
|
|
402
|
+
async function encodeApplogsAsIPLD(applogs) {
|
|
403
|
+
DEBUG2({ applogs });
|
|
404
|
+
const validApplogs = applogs.filter((eachLog) => !!eachLog);
|
|
405
|
+
DEBUG2({ validApplogs });
|
|
406
|
+
if (!validApplogs.length) throw ERROR2("no valid applogs");
|
|
407
|
+
const preppedLogs = validApplogs.map((log) => prepareForPub(log).log);
|
|
408
|
+
const encodedApplogs = await Promise.all(preppedLogs.map(encodeBlockOriginal));
|
|
409
|
+
DEBUG2("[encodeApplogsAsIpld] encoded applogs", { preppedLogs, encodedApplogs });
|
|
410
|
+
const cids = encodedApplogs.map((b) => {
|
|
411
|
+
if (!b.cid) throw ERROR2(`[publish] no cid for encoded log:`, b);
|
|
412
|
+
return b.cid;
|
|
413
|
+
});
|
|
414
|
+
return { cids, encodedApplogs };
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
export {
|
|
418
|
+
prepareSnapshotForPush,
|
|
419
|
+
encodeSnapshotAsCar,
|
|
420
|
+
chunkApplogs,
|
|
421
|
+
unchunkApplogsBlock,
|
|
422
|
+
isSnapBlockChunks,
|
|
423
|
+
encodeSnapshotApplogsAsCar,
|
|
424
|
+
decodePubFromCar,
|
|
425
|
+
decodePubFromBlocks,
|
|
426
|
+
getBlocksOfCar,
|
|
427
|
+
getDecodedBlock,
|
|
428
|
+
makeCarOut,
|
|
429
|
+
makeCarBlob,
|
|
430
|
+
carFromBlob,
|
|
431
|
+
collectDagBlocks,
|
|
432
|
+
streamReaderToIterable
|
|
433
|
+
};
|
|
434
|
+
//# sourceMappingURL=chunk-O43W7UW6.js.map
|