@wovin/core 0.0.16 → 0.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/applog.min.js +1 -1
- package/dist/{chunk-TEQ4SIKN.min.js → chunk-E4BAQ5JJ.min.js} +2 -2
- package/dist/chunk-IHGAJTEQ.min.js +70 -0
- package/dist/chunk-IHGAJTEQ.min.js.map +1 -0
- package/dist/{chunk-OBMVNVJR.min.js → chunk-KVPEUWWR.min.js} +2 -2
- package/dist/chunk-SIIQEX77.min.js +4122 -0
- package/dist/chunk-SIIQEX77.min.js.map +1 -0
- package/dist/{chunk-NPCVLBCM.min.js → chunk-UZBCM3UI.min.js} +5 -75
- package/dist/{chunk-NPCVLBCM.min.js.map → chunk-UZBCM3UI.min.js.map} +1 -1
- package/dist/{chunk-FIOA3FZW.min.js → chunk-VDYG5VHX.min.js} +4 -4
- package/dist/{chunk-FIOA3FZW.min.js.map → chunk-VDYG5VHX.min.js.map} +1 -1
- package/dist/index.min.js +19 -16
- package/dist/ipfs/car.d.ts +2 -1
- package/dist/ipfs/car.d.ts.map +1 -1
- package/dist/ipfs/fetch-snapshot-chain.d.ts +27 -0
- package/dist/ipfs/fetch-snapshot-chain.d.ts.map +1 -0
- package/dist/ipfs.d.ts +1 -0
- package/dist/ipfs.d.ts.map +1 -1
- package/dist/ipfs.min.js +7 -4
- package/dist/pubsub/pubsub-types.d.ts +7 -7
- package/dist/pubsub/pubsub-types.d.ts.map +1 -1
- package/dist/pubsub/{pub-push.d.ts → snap-push.d.ts} +7 -7
- package/dist/pubsub/snap-push.d.ts.map +1 -0
- package/dist/pubsub.d.ts +1 -1
- package/dist/pubsub.d.ts.map +1 -1
- package/dist/pubsub.min.js +14 -14
- package/dist/query/situations.d.ts +77 -0
- package/dist/query/situations.d.ts.map +1 -1
- package/dist/query.min.js +3 -3
- package/dist/thread.min.js +1 -1
- package/package.json +1 -1
- package/dist/chunk-JEOQUHTK.min.js +0 -1515
- package/dist/chunk-JEOQUHTK.min.js.map +0 -1
- package/dist/chunk-RPPZKO5L.min.js +0 -1
- package/dist/chunk-RPPZKO5L.min.js.map +0 -1
- package/dist/pubsub/pub-push.d.ts.map +0 -1
- /package/dist/{chunk-TEQ4SIKN.min.js.map → chunk-E4BAQ5JJ.min.js.map} +0 -0
- /package/dist/{chunk-OBMVNVJR.min.js.map → chunk-KVPEUWWR.min.js.map} +0 -0
|
@@ -1,1515 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
keepTruthy
|
|
3
|
-
} from "./chunk-QPGEBDMJ.min.js";
|
|
4
|
-
import {
|
|
5
|
-
lastWriteWins
|
|
6
|
-
} from "./chunk-OBMVNVJR.min.js";
|
|
7
|
-
import {
|
|
8
|
-
CID,
|
|
9
|
-
Token,
|
|
10
|
-
Type,
|
|
11
|
-
areCidsEqual,
|
|
12
|
-
containsCid,
|
|
13
|
-
decode,
|
|
14
|
-
decode2,
|
|
15
|
-
decode3,
|
|
16
|
-
encode,
|
|
17
|
-
encodeBlockOriginal,
|
|
18
|
-
ensureTsPvAndFinalizeApplog,
|
|
19
|
-
g,
|
|
20
|
-
getLogsFromThread,
|
|
21
|
-
makeCborEncoders,
|
|
22
|
-
prepareForPub,
|
|
23
|
-
rollingFilter,
|
|
24
|
-
src_exports,
|
|
25
|
-
wrapper_default
|
|
26
|
-
} from "./chunk-NPCVLBCM.min.js";
|
|
27
|
-
import {
|
|
28
|
-
__commonJS,
|
|
29
|
-
__toESM
|
|
30
|
-
} from "./chunk-PHITDXZT.min.js";
|
|
31
|
-
|
|
32
|
-
// ../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/encode.js
|
|
33
|
-
var require_encode = __commonJS({
|
|
34
|
-
"../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/encode.js"(exports, module) {
|
|
35
|
-
"use strict";
|
|
36
|
-
module.exports = encode3;
|
|
37
|
-
var MSB = 128;
|
|
38
|
-
var REST = 127;
|
|
39
|
-
var MSBALL = ~REST;
|
|
40
|
-
var INT = Math.pow(2, 31);
|
|
41
|
-
function encode3(num, out, offset) {
|
|
42
|
-
if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) {
|
|
43
|
-
encode3.bytes = 0;
|
|
44
|
-
throw new RangeError("Could not encode varint");
|
|
45
|
-
}
|
|
46
|
-
out = out || [];
|
|
47
|
-
offset = offset || 0;
|
|
48
|
-
var oldOffset = offset;
|
|
49
|
-
while (num >= INT) {
|
|
50
|
-
out[offset++] = num & 255 | MSB;
|
|
51
|
-
num /= 128;
|
|
52
|
-
}
|
|
53
|
-
while (num & MSBALL) {
|
|
54
|
-
out[offset++] = num & 255 | MSB;
|
|
55
|
-
num >>>= 7;
|
|
56
|
-
}
|
|
57
|
-
out[offset] = num | 0;
|
|
58
|
-
encode3.bytes = offset - oldOffset + 1;
|
|
59
|
-
return out;
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
});
|
|
63
|
-
|
|
64
|
-
// ../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/decode.js
|
|
65
|
-
var require_decode = __commonJS({
|
|
66
|
-
"../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/decode.js"(exports, module) {
|
|
67
|
-
"use strict";
|
|
68
|
-
module.exports = read;
|
|
69
|
-
var MSB = 128;
|
|
70
|
-
var REST = 127;
|
|
71
|
-
function read(buf, offset) {
|
|
72
|
-
var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length;
|
|
73
|
-
do {
|
|
74
|
-
if (counter >= l || shift > 49) {
|
|
75
|
-
read.bytes = 0;
|
|
76
|
-
throw new RangeError("Could not decode varint");
|
|
77
|
-
}
|
|
78
|
-
b = buf[counter++];
|
|
79
|
-
res += shift < 28 ? (b & REST) << shift : (b & REST) * Math.pow(2, shift);
|
|
80
|
-
shift += 7;
|
|
81
|
-
} while (b >= MSB);
|
|
82
|
-
read.bytes = counter - offset;
|
|
83
|
-
return res;
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
});
|
|
87
|
-
|
|
88
|
-
// ../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/length.js
|
|
89
|
-
var require_length = __commonJS({
|
|
90
|
-
"../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/length.js"(exports, module) {
|
|
91
|
-
"use strict";
|
|
92
|
-
var N1 = Math.pow(2, 7);
|
|
93
|
-
var N2 = Math.pow(2, 14);
|
|
94
|
-
var N3 = Math.pow(2, 21);
|
|
95
|
-
var N4 = Math.pow(2, 28);
|
|
96
|
-
var N5 = Math.pow(2, 35);
|
|
97
|
-
var N6 = Math.pow(2, 42);
|
|
98
|
-
var N7 = Math.pow(2, 49);
|
|
99
|
-
var N8 = Math.pow(2, 56);
|
|
100
|
-
var N9 = Math.pow(2, 63);
|
|
101
|
-
module.exports = function(value) {
|
|
102
|
-
return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
|
|
103
|
-
};
|
|
104
|
-
}
|
|
105
|
-
});
|
|
106
|
-
|
|
107
|
-
// ../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/index.js
|
|
108
|
-
var require_varint = __commonJS({
|
|
109
|
-
"../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/index.js"(exports, module) {
|
|
110
|
-
"use strict";
|
|
111
|
-
module.exports = {
|
|
112
|
-
encode: require_encode(),
|
|
113
|
-
decode: require_decode(),
|
|
114
|
-
encodingLength: require_length()
|
|
115
|
-
};
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
|
|
119
|
-
// ../../../node_modules/.pnpm/@ipld+dag-cbor@9.2.5/node_modules/@ipld/dag-cbor/src/index.js
|
|
120
|
-
var CID_CBOR_TAG = 42;
|
|
121
|
-
function toByteView(buf) {
|
|
122
|
-
if (buf instanceof ArrayBuffer) {
|
|
123
|
-
return new Uint8Array(buf, 0, buf.byteLength);
|
|
124
|
-
}
|
|
125
|
-
return buf;
|
|
126
|
-
}
|
|
127
|
-
function cidEncoder(obj) {
|
|
128
|
-
if (obj.asCID !== obj && obj["/"] !== obj.bytes) {
|
|
129
|
-
return null;
|
|
130
|
-
}
|
|
131
|
-
const cid = CID.asCID(obj);
|
|
132
|
-
if (!cid) {
|
|
133
|
-
return null;
|
|
134
|
-
}
|
|
135
|
-
const bytes = new Uint8Array(cid.bytes.byteLength + 1);
|
|
136
|
-
bytes.set(cid.bytes, 1);
|
|
137
|
-
return [
|
|
138
|
-
new Token(Type.tag, CID_CBOR_TAG),
|
|
139
|
-
new Token(Type.bytes, bytes)
|
|
140
|
-
];
|
|
141
|
-
}
|
|
142
|
-
function undefinedEncoder() {
|
|
143
|
-
throw new Error("`undefined` is not supported by the IPLD Data Model and cannot be encoded");
|
|
144
|
-
}
|
|
145
|
-
function numberEncoder(num) {
|
|
146
|
-
if (Number.isNaN(num)) {
|
|
147
|
-
throw new Error("`NaN` is not supported by the IPLD Data Model and cannot be encoded");
|
|
148
|
-
}
|
|
149
|
-
if (num === Infinity || num === -Infinity) {
|
|
150
|
-
throw new Error("`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded");
|
|
151
|
-
}
|
|
152
|
-
return null;
|
|
153
|
-
}
|
|
154
|
-
function mapEncoder(map) {
|
|
155
|
-
for (const key of map.keys()) {
|
|
156
|
-
if (typeof key !== "string" || key.length === 0) {
|
|
157
|
-
throw new Error("Non-string Map keys are not supported by the IPLD Data Model and cannot be encoded");
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
return null;
|
|
161
|
-
}
|
|
162
|
-
var _encodeOptions = {
|
|
163
|
-
float64: true,
|
|
164
|
-
typeEncoders: {
|
|
165
|
-
Map: mapEncoder,
|
|
166
|
-
Object: cidEncoder,
|
|
167
|
-
undefined: undefinedEncoder,
|
|
168
|
-
number: numberEncoder
|
|
169
|
-
}
|
|
170
|
-
};
|
|
171
|
-
var encodeOptions = {
|
|
172
|
-
..._encodeOptions,
|
|
173
|
-
typeEncoders: {
|
|
174
|
-
..._encodeOptions.typeEncoders
|
|
175
|
-
}
|
|
176
|
-
};
|
|
177
|
-
function cidDecoder(bytes) {
|
|
178
|
-
if (bytes[0] !== 0) {
|
|
179
|
-
throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
|
|
180
|
-
}
|
|
181
|
-
return CID.decode(bytes.subarray(1));
|
|
182
|
-
}
|
|
183
|
-
var _decodeOptions = {
|
|
184
|
-
allowIndefinite: false,
|
|
185
|
-
coerceUndefinedToNull: true,
|
|
186
|
-
allowNaN: false,
|
|
187
|
-
allowInfinity: false,
|
|
188
|
-
allowBigInt: true,
|
|
189
|
-
// this will lead to BigInt for ints outside of
|
|
190
|
-
// safe-integer range, which may surprise users
|
|
191
|
-
strict: true,
|
|
192
|
-
useMaps: false,
|
|
193
|
-
rejectDuplicateMapKeys: true,
|
|
194
|
-
/** @type {import('cborg').TagDecoder[]} */
|
|
195
|
-
tags: []
|
|
196
|
-
};
|
|
197
|
-
_decodeOptions.tags[CID_CBOR_TAG] = cidDecoder;
|
|
198
|
-
var decodeOptions = {
|
|
199
|
-
..._decodeOptions,
|
|
200
|
-
tags: _decodeOptions.tags.slice()
|
|
201
|
-
};
|
|
202
|
-
var encode2 = (node) => encode(node, _encodeOptions);
|
|
203
|
-
var decode4 = (data) => decode(toByteView(data), _decodeOptions);
|
|
204
|
-
|
|
205
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/decoder-common.js
|
|
206
|
-
var import_varint = __toESM(require_varint(), 1);
|
|
207
|
-
var CIDV0_BYTES = {
|
|
208
|
-
SHA2_256: 18,
|
|
209
|
-
LENGTH: 32,
|
|
210
|
-
DAG_PB: 112
|
|
211
|
-
};
|
|
212
|
-
var V2_HEADER_LENGTH = (
|
|
213
|
-
/* characteristics */
|
|
214
|
-
16 + 8 + 8 + 8
|
|
215
|
-
);
|
|
216
|
-
function decodeVarint(bytes, seeker) {
|
|
217
|
-
if (!bytes.length) {
|
|
218
|
-
throw new Error("Unexpected end of data");
|
|
219
|
-
}
|
|
220
|
-
const i = import_varint.default.decode(bytes);
|
|
221
|
-
seeker.seek(
|
|
222
|
-
/** @type {number} */
|
|
223
|
-
import_varint.default.decode.bytes
|
|
224
|
-
);
|
|
225
|
-
return i;
|
|
226
|
-
}
|
|
227
|
-
function decodeV2Header(bytes) {
|
|
228
|
-
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
229
|
-
let offset = 0;
|
|
230
|
-
const header = {
|
|
231
|
-
version: 2,
|
|
232
|
-
/** @type {[bigint, bigint]} */
|
|
233
|
-
characteristics: [
|
|
234
|
-
dv.getBigUint64(offset, true),
|
|
235
|
-
dv.getBigUint64(offset += 8, true)
|
|
236
|
-
],
|
|
237
|
-
dataOffset: Number(dv.getBigUint64(offset += 8, true)),
|
|
238
|
-
dataSize: Number(dv.getBigUint64(offset += 8, true)),
|
|
239
|
-
indexOffset: Number(dv.getBigUint64(offset += 8, true))
|
|
240
|
-
};
|
|
241
|
-
return header;
|
|
242
|
-
}
|
|
243
|
-
function getMultihashLength(bytes) {
|
|
244
|
-
import_varint.default.decode(bytes);
|
|
245
|
-
const codeLength = (
|
|
246
|
-
/** @type {number} */
|
|
247
|
-
import_varint.default.decode.bytes
|
|
248
|
-
);
|
|
249
|
-
const length = import_varint.default.decode(bytes.subarray(import_varint.default.decode.bytes));
|
|
250
|
-
const lengthLength = (
|
|
251
|
-
/** @type {number} */
|
|
252
|
-
import_varint.default.decode.bytes
|
|
253
|
-
);
|
|
254
|
-
const mhLength = codeLength + lengthLength + length;
|
|
255
|
-
return mhLength;
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/header-validator.js
|
|
259
|
-
var Kinds = {
|
|
260
|
-
Null: (
|
|
261
|
-
/**
|
|
262
|
-
* @param obj
|
|
263
|
-
* @returns {undefined|null}
|
|
264
|
-
*/
|
|
265
|
-
(obj) => obj === null ? obj : void 0
|
|
266
|
-
),
|
|
267
|
-
Int: (
|
|
268
|
-
/**
|
|
269
|
-
* @param obj
|
|
270
|
-
* @returns {undefined|number}
|
|
271
|
-
*/
|
|
272
|
-
(obj) => Number.isInteger(obj) ? obj : void 0
|
|
273
|
-
),
|
|
274
|
-
Float: (
|
|
275
|
-
/**
|
|
276
|
-
* @param obj
|
|
277
|
-
* @returns {undefined|number}
|
|
278
|
-
*/
|
|
279
|
-
(obj) => typeof obj === "number" && Number.isFinite(obj) ? obj : void 0
|
|
280
|
-
),
|
|
281
|
-
String: (
|
|
282
|
-
/**
|
|
283
|
-
* @param obj
|
|
284
|
-
* @returns {undefined|string}
|
|
285
|
-
*/
|
|
286
|
-
(obj) => typeof obj === "string" ? obj : void 0
|
|
287
|
-
),
|
|
288
|
-
Bool: (
|
|
289
|
-
/**
|
|
290
|
-
* @param obj
|
|
291
|
-
* @returns {undefined|boolean}
|
|
292
|
-
*/
|
|
293
|
-
(obj) => typeof obj === "boolean" ? obj : void 0
|
|
294
|
-
),
|
|
295
|
-
Bytes: (
|
|
296
|
-
/**
|
|
297
|
-
* @param obj
|
|
298
|
-
* @returns {undefined|Uint8Array}
|
|
299
|
-
*/
|
|
300
|
-
(obj) => obj instanceof Uint8Array ? obj : void 0
|
|
301
|
-
),
|
|
302
|
-
Link: (
|
|
303
|
-
/**
|
|
304
|
-
* @param obj
|
|
305
|
-
* @returns {undefined|object}
|
|
306
|
-
*/
|
|
307
|
-
(obj) => obj !== null && typeof obj === "object" && obj.asCID === obj ? obj : void 0
|
|
308
|
-
),
|
|
309
|
-
List: (
|
|
310
|
-
/**
|
|
311
|
-
* @param obj
|
|
312
|
-
* @returns {undefined|Array<any>}
|
|
313
|
-
*/
|
|
314
|
-
(obj) => Array.isArray(obj) ? obj : void 0
|
|
315
|
-
),
|
|
316
|
-
Map: (
|
|
317
|
-
/**
|
|
318
|
-
* @param obj
|
|
319
|
-
* @returns {undefined|object}
|
|
320
|
-
*/
|
|
321
|
-
(obj) => obj !== null && typeof obj === "object" && obj.asCID !== obj && !Array.isArray(obj) && !(obj instanceof Uint8Array) ? obj : void 0
|
|
322
|
-
)
|
|
323
|
-
};
|
|
324
|
-
var Types = {
|
|
325
|
-
"CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
|
|
326
|
-
"CarV1HeaderOrV2Pragma > roots (anon)": (
|
|
327
|
-
/**
|
|
328
|
-
* @param obj
|
|
329
|
-
* @returns {undefined|any}
|
|
330
|
-
*/
|
|
331
|
-
(obj) => {
|
|
332
|
-
if (Kinds.List(obj) === void 0) {
|
|
333
|
-
return void 0;
|
|
334
|
-
}
|
|
335
|
-
for (let i = 0; i < obj.length; i++) {
|
|
336
|
-
let v = obj[i];
|
|
337
|
-
v = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
|
|
338
|
-
if (v === void 0) {
|
|
339
|
-
return void 0;
|
|
340
|
-
}
|
|
341
|
-
if (v !== obj[i]) {
|
|
342
|
-
const ret = obj.slice(0, i);
|
|
343
|
-
for (let j = i; j < obj.length; j++) {
|
|
344
|
-
let v2 = obj[j];
|
|
345
|
-
v2 = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
|
|
346
|
-
if (v2 === void 0) {
|
|
347
|
-
return void 0;
|
|
348
|
-
}
|
|
349
|
-
ret.push(v2);
|
|
350
|
-
}
|
|
351
|
-
return ret;
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
return obj;
|
|
355
|
-
}
|
|
356
|
-
),
|
|
357
|
-
Int: Kinds.Int,
|
|
358
|
-
CarV1HeaderOrV2Pragma: (
|
|
359
|
-
/**
|
|
360
|
-
* @param obj
|
|
361
|
-
* @returns {undefined|any}
|
|
362
|
-
*/
|
|
363
|
-
(obj) => {
|
|
364
|
-
if (Kinds.Map(obj) === void 0) {
|
|
365
|
-
return void 0;
|
|
366
|
-
}
|
|
367
|
-
const entries = Object.entries(obj);
|
|
368
|
-
let ret = obj;
|
|
369
|
-
let requiredCount = 1;
|
|
370
|
-
for (let i = 0; i < entries.length; i++) {
|
|
371
|
-
const [key, value] = entries[i];
|
|
372
|
-
switch (key) {
|
|
373
|
-
case "roots":
|
|
374
|
-
{
|
|
375
|
-
const v = Types["CarV1HeaderOrV2Pragma > roots (anon)"](obj[key]);
|
|
376
|
-
if (v === void 0) {
|
|
377
|
-
return void 0;
|
|
378
|
-
}
|
|
379
|
-
if (v !== value || ret !== obj) {
|
|
380
|
-
if (ret === obj) {
|
|
381
|
-
ret = {};
|
|
382
|
-
for (let j = 0; j < i; j++) {
|
|
383
|
-
ret[entries[j][0]] = entries[j][1];
|
|
384
|
-
}
|
|
385
|
-
}
|
|
386
|
-
ret.roots = v;
|
|
387
|
-
}
|
|
388
|
-
}
|
|
389
|
-
break;
|
|
390
|
-
case "version":
|
|
391
|
-
{
|
|
392
|
-
requiredCount--;
|
|
393
|
-
const v = Types.Int(obj[key]);
|
|
394
|
-
if (v === void 0) {
|
|
395
|
-
return void 0;
|
|
396
|
-
}
|
|
397
|
-
if (v !== value || ret !== obj) {
|
|
398
|
-
if (ret === obj) {
|
|
399
|
-
ret = {};
|
|
400
|
-
for (let j = 0; j < i; j++) {
|
|
401
|
-
ret[entries[j][0]] = entries[j][1];
|
|
402
|
-
}
|
|
403
|
-
}
|
|
404
|
-
ret.version = v;
|
|
405
|
-
}
|
|
406
|
-
}
|
|
407
|
-
break;
|
|
408
|
-
default:
|
|
409
|
-
return void 0;
|
|
410
|
-
}
|
|
411
|
-
}
|
|
412
|
-
if (requiredCount > 0) {
|
|
413
|
-
return void 0;
|
|
414
|
-
}
|
|
415
|
-
return ret;
|
|
416
|
-
}
|
|
417
|
-
)
|
|
418
|
-
};
|
|
419
|
-
var Reprs = {
|
|
420
|
-
"CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
|
|
421
|
-
"CarV1HeaderOrV2Pragma > roots (anon)": (
|
|
422
|
-
/**
|
|
423
|
-
* @param obj
|
|
424
|
-
* @returns {undefined|any}
|
|
425
|
-
*/
|
|
426
|
-
(obj) => {
|
|
427
|
-
if (Kinds.List(obj) === void 0) {
|
|
428
|
-
return void 0;
|
|
429
|
-
}
|
|
430
|
-
for (let i = 0; i < obj.length; i++) {
|
|
431
|
-
let v = obj[i];
|
|
432
|
-
v = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
|
|
433
|
-
if (v === void 0) {
|
|
434
|
-
return void 0;
|
|
435
|
-
}
|
|
436
|
-
if (v !== obj[i]) {
|
|
437
|
-
const ret = obj.slice(0, i);
|
|
438
|
-
for (let j = i; j < obj.length; j++) {
|
|
439
|
-
let v2 = obj[j];
|
|
440
|
-
v2 = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
|
|
441
|
-
if (v2 === void 0) {
|
|
442
|
-
return void 0;
|
|
443
|
-
}
|
|
444
|
-
ret.push(v2);
|
|
445
|
-
}
|
|
446
|
-
return ret;
|
|
447
|
-
}
|
|
448
|
-
}
|
|
449
|
-
return obj;
|
|
450
|
-
}
|
|
451
|
-
),
|
|
452
|
-
Int: Kinds.Int,
|
|
453
|
-
CarV1HeaderOrV2Pragma: (
|
|
454
|
-
/**
|
|
455
|
-
* @param obj
|
|
456
|
-
* @returns {undefined|any}
|
|
457
|
-
*/
|
|
458
|
-
(obj) => {
|
|
459
|
-
if (Kinds.Map(obj) === void 0) {
|
|
460
|
-
return void 0;
|
|
461
|
-
}
|
|
462
|
-
const entries = Object.entries(obj);
|
|
463
|
-
let ret = obj;
|
|
464
|
-
let requiredCount = 1;
|
|
465
|
-
for (let i = 0; i < entries.length; i++) {
|
|
466
|
-
const [key, value] = entries[i];
|
|
467
|
-
switch (key) {
|
|
468
|
-
case "roots":
|
|
469
|
-
{
|
|
470
|
-
const v = Reprs["CarV1HeaderOrV2Pragma > roots (anon)"](value);
|
|
471
|
-
if (v === void 0) {
|
|
472
|
-
return void 0;
|
|
473
|
-
}
|
|
474
|
-
if (v !== value || ret !== obj) {
|
|
475
|
-
if (ret === obj) {
|
|
476
|
-
ret = {};
|
|
477
|
-
for (let j = 0; j < i; j++) {
|
|
478
|
-
ret[entries[j][0]] = entries[j][1];
|
|
479
|
-
}
|
|
480
|
-
}
|
|
481
|
-
ret.roots = v;
|
|
482
|
-
}
|
|
483
|
-
}
|
|
484
|
-
break;
|
|
485
|
-
case "version":
|
|
486
|
-
{
|
|
487
|
-
requiredCount--;
|
|
488
|
-
const v = Reprs.Int(value);
|
|
489
|
-
if (v === void 0) {
|
|
490
|
-
return void 0;
|
|
491
|
-
}
|
|
492
|
-
if (v !== value || ret !== obj) {
|
|
493
|
-
if (ret === obj) {
|
|
494
|
-
ret = {};
|
|
495
|
-
for (let j = 0; j < i; j++) {
|
|
496
|
-
ret[entries[j][0]] = entries[j][1];
|
|
497
|
-
}
|
|
498
|
-
}
|
|
499
|
-
ret.version = v;
|
|
500
|
-
}
|
|
501
|
-
}
|
|
502
|
-
break;
|
|
503
|
-
default:
|
|
504
|
-
return void 0;
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
if (requiredCount > 0) {
|
|
508
|
-
return void 0;
|
|
509
|
-
}
|
|
510
|
-
return ret;
|
|
511
|
-
}
|
|
512
|
-
)
|
|
513
|
-
};
|
|
514
|
-
var CarV1HeaderOrV2Pragma = {
|
|
515
|
-
toTyped: Types.CarV1HeaderOrV2Pragma,
|
|
516
|
-
toRepresentation: Reprs.CarV1HeaderOrV2Pragma
|
|
517
|
-
};
|
|
518
|
-
|
|
519
|
-
// ../../../node_modules/.pnpm/cborg@4.2.15/node_modules/cborg/lib/length.js
|
|
520
|
-
var cborEncoders = makeCborEncoders();
|
|
521
|
-
|
|
522
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/buffer-writer.js
|
|
523
|
-
var import_varint2 = __toESM(require_varint(), 1);
|
|
524
|
-
var headerPreludeTokens = [
|
|
525
|
-
new Token(Type.map, 2),
|
|
526
|
-
new Token(Type.string, "version"),
|
|
527
|
-
new Token(Type.uint, 1),
|
|
528
|
-
new Token(Type.string, "roots")
|
|
529
|
-
];
|
|
530
|
-
var CID_TAG = new Token(Type.tag, 42);
|
|
531
|
-
|
|
532
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/decoder.js
|
|
533
|
-
async function readHeader(reader, strictVersion) {
|
|
534
|
-
const length = decodeVarint(await reader.upTo(8), reader);
|
|
535
|
-
if (length === 0) {
|
|
536
|
-
throw new Error("Invalid CAR header (zero length)");
|
|
537
|
-
}
|
|
538
|
-
const header = await reader.exactly(length, true);
|
|
539
|
-
const block = decode4(header);
|
|
540
|
-
if (CarV1HeaderOrV2Pragma.toTyped(block) === void 0) {
|
|
541
|
-
throw new Error("Invalid CAR header format");
|
|
542
|
-
}
|
|
543
|
-
if (block.version !== 1 && block.version !== 2 || strictVersion !== void 0 && block.version !== strictVersion) {
|
|
544
|
-
throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== void 0 ? ` (expected ${strictVersion})` : ""}`);
|
|
545
|
-
}
|
|
546
|
-
if (block.version === 1) {
|
|
547
|
-
if (!Array.isArray(block.roots)) {
|
|
548
|
-
throw new Error("Invalid CAR header format");
|
|
549
|
-
}
|
|
550
|
-
return block;
|
|
551
|
-
}
|
|
552
|
-
if (block.roots !== void 0) {
|
|
553
|
-
throw new Error("Invalid CAR header format");
|
|
554
|
-
}
|
|
555
|
-
const v2Header = decodeV2Header(await reader.exactly(V2_HEADER_LENGTH, true));
|
|
556
|
-
reader.seek(v2Header.dataOffset - reader.pos);
|
|
557
|
-
const v1Header = await readHeader(reader, 1);
|
|
558
|
-
return Object.assign(v1Header, v2Header);
|
|
559
|
-
}
|
|
560
|
-
async function readCid(reader) {
|
|
561
|
-
const first = await reader.exactly(2, false);
|
|
562
|
-
if (first[0] === CIDV0_BYTES.SHA2_256 && first[1] === CIDV0_BYTES.LENGTH) {
|
|
563
|
-
const bytes2 = await reader.exactly(34, true);
|
|
564
|
-
const multihash2 = decode2(bytes2);
|
|
565
|
-
return CID.create(0, CIDV0_BYTES.DAG_PB, multihash2);
|
|
566
|
-
}
|
|
567
|
-
const version = decodeVarint(await reader.upTo(8), reader);
|
|
568
|
-
if (version !== 1) {
|
|
569
|
-
throw new Error(`Unexpected CID version (${version})`);
|
|
570
|
-
}
|
|
571
|
-
const codec = decodeVarint(await reader.upTo(8), reader);
|
|
572
|
-
const bytes = await reader.exactly(getMultihashLength(await reader.upTo(8)), true);
|
|
573
|
-
const multihash = decode2(bytes);
|
|
574
|
-
return CID.create(version, codec, multihash);
|
|
575
|
-
}
|
|
576
|
-
async function readBlockHead(reader) {
|
|
577
|
-
const start = reader.pos;
|
|
578
|
-
let length = decodeVarint(await reader.upTo(8), reader);
|
|
579
|
-
if (length === 0) {
|
|
580
|
-
throw new Error("Invalid CAR section (zero length)");
|
|
581
|
-
}
|
|
582
|
-
length += reader.pos - start;
|
|
583
|
-
const cid = await readCid(reader);
|
|
584
|
-
const blockLength = length - Number(reader.pos - start);
|
|
585
|
-
return { cid, length, blockLength };
|
|
586
|
-
}
|
|
587
|
-
async function readBlock(reader) {
|
|
588
|
-
const { cid, blockLength } = await readBlockHead(reader);
|
|
589
|
-
const bytes = await reader.exactly(blockLength, true);
|
|
590
|
-
return { bytes, cid };
|
|
591
|
-
}
|
|
592
|
-
async function readBlockIndex(reader) {
|
|
593
|
-
const offset = reader.pos;
|
|
594
|
-
const { cid, length, blockLength } = await readBlockHead(reader);
|
|
595
|
-
const index = { cid, length, blockLength, offset, blockOffset: reader.pos };
|
|
596
|
-
reader.seek(index.blockLength);
|
|
597
|
-
return index;
|
|
598
|
-
}
|
|
599
|
-
function createDecoder(reader) {
|
|
600
|
-
const headerPromise = (async () => {
|
|
601
|
-
const header = await readHeader(reader);
|
|
602
|
-
if (header.version === 2) {
|
|
603
|
-
const v1length = reader.pos - header.dataOffset;
|
|
604
|
-
reader = limitReader(reader, header.dataSize - v1length);
|
|
605
|
-
}
|
|
606
|
-
return header;
|
|
607
|
-
})();
|
|
608
|
-
return {
|
|
609
|
-
header: () => headerPromise,
|
|
610
|
-
async *blocks() {
|
|
611
|
-
await headerPromise;
|
|
612
|
-
while ((await reader.upTo(8)).length > 0) {
|
|
613
|
-
yield await readBlock(reader);
|
|
614
|
-
}
|
|
615
|
-
},
|
|
616
|
-
async *blocksIndex() {
|
|
617
|
-
await headerPromise;
|
|
618
|
-
while ((await reader.upTo(8)).length > 0) {
|
|
619
|
-
yield await readBlockIndex(reader);
|
|
620
|
-
}
|
|
621
|
-
}
|
|
622
|
-
};
|
|
623
|
-
}
|
|
624
|
-
function bytesReader(bytes) {
|
|
625
|
-
let pos = 0;
|
|
626
|
-
return {
|
|
627
|
-
async upTo(length) {
|
|
628
|
-
const out = bytes.subarray(pos, pos + Math.min(length, bytes.length - pos));
|
|
629
|
-
return out;
|
|
630
|
-
},
|
|
631
|
-
async exactly(length, seek = false) {
|
|
632
|
-
if (length > bytes.length - pos) {
|
|
633
|
-
throw new Error("Unexpected end of data");
|
|
634
|
-
}
|
|
635
|
-
const out = bytes.subarray(pos, pos + length);
|
|
636
|
-
if (seek) {
|
|
637
|
-
pos += length;
|
|
638
|
-
}
|
|
639
|
-
return out;
|
|
640
|
-
},
|
|
641
|
-
seek(length) {
|
|
642
|
-
pos += length;
|
|
643
|
-
},
|
|
644
|
-
get pos() {
|
|
645
|
-
return pos;
|
|
646
|
-
}
|
|
647
|
-
};
|
|
648
|
-
}
|
|
649
|
-
function chunkReader(readChunk) {
|
|
650
|
-
let pos = 0;
|
|
651
|
-
let have = 0;
|
|
652
|
-
let offset = 0;
|
|
653
|
-
let currentChunk = new Uint8Array(0);
|
|
654
|
-
const read = async (length) => {
|
|
655
|
-
have = currentChunk.length - offset;
|
|
656
|
-
const bufa = (
|
|
657
|
-
/** @type {Uint8Array<ArrayBufferLike>[]} */
|
|
658
|
-
[currentChunk.subarray(offset)]
|
|
659
|
-
);
|
|
660
|
-
while (have < length) {
|
|
661
|
-
const chunk = await readChunk();
|
|
662
|
-
if (chunk == null) {
|
|
663
|
-
break;
|
|
664
|
-
}
|
|
665
|
-
if (have < 0) {
|
|
666
|
-
if (chunk.length > have) {
|
|
667
|
-
bufa.push(chunk.subarray(-have));
|
|
668
|
-
}
|
|
669
|
-
} else {
|
|
670
|
-
bufa.push(chunk);
|
|
671
|
-
}
|
|
672
|
-
have += chunk.length;
|
|
673
|
-
}
|
|
674
|
-
currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0));
|
|
675
|
-
let off = 0;
|
|
676
|
-
for (const b of bufa) {
|
|
677
|
-
currentChunk.set(b, off);
|
|
678
|
-
off += b.length;
|
|
679
|
-
}
|
|
680
|
-
offset = 0;
|
|
681
|
-
};
|
|
682
|
-
return {
|
|
683
|
-
async upTo(length) {
|
|
684
|
-
if (currentChunk.length - offset < length) {
|
|
685
|
-
await read(length);
|
|
686
|
-
}
|
|
687
|
-
return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length));
|
|
688
|
-
},
|
|
689
|
-
async exactly(length, seek = false) {
|
|
690
|
-
if (currentChunk.length - offset < length) {
|
|
691
|
-
await read(length);
|
|
692
|
-
}
|
|
693
|
-
if (currentChunk.length - offset < length) {
|
|
694
|
-
throw new Error("Unexpected end of data");
|
|
695
|
-
}
|
|
696
|
-
const out = currentChunk.subarray(offset, offset + length);
|
|
697
|
-
if (seek) {
|
|
698
|
-
pos += length;
|
|
699
|
-
offset += length;
|
|
700
|
-
}
|
|
701
|
-
return out;
|
|
702
|
-
},
|
|
703
|
-
seek(length) {
|
|
704
|
-
pos += length;
|
|
705
|
-
offset += length;
|
|
706
|
-
},
|
|
707
|
-
get pos() {
|
|
708
|
-
return pos;
|
|
709
|
-
}
|
|
710
|
-
};
|
|
711
|
-
}
|
|
712
|
-
function asyncIterableReader(asyncIterable) {
|
|
713
|
-
const iterator = asyncIterable[Symbol.asyncIterator]();
|
|
714
|
-
async function readChunk() {
|
|
715
|
-
const next = await iterator.next();
|
|
716
|
-
if (next.done) {
|
|
717
|
-
return null;
|
|
718
|
-
}
|
|
719
|
-
return next.value;
|
|
720
|
-
}
|
|
721
|
-
return chunkReader(readChunk);
|
|
722
|
-
}
|
|
723
|
-
function limitReader(reader, byteLimit) {
|
|
724
|
-
let bytesRead = 0;
|
|
725
|
-
return {
|
|
726
|
-
async upTo(length) {
|
|
727
|
-
let bytes = await reader.upTo(length);
|
|
728
|
-
if (bytes.length + bytesRead > byteLimit) {
|
|
729
|
-
bytes = bytes.subarray(0, byteLimit - bytesRead);
|
|
730
|
-
}
|
|
731
|
-
return bytes;
|
|
732
|
-
},
|
|
733
|
-
async exactly(length, seek = false) {
|
|
734
|
-
const bytes = await reader.exactly(length, seek);
|
|
735
|
-
if (bytes.length + bytesRead > byteLimit) {
|
|
736
|
-
throw new Error("Unexpected end of data");
|
|
737
|
-
}
|
|
738
|
-
if (seek) {
|
|
739
|
-
bytesRead += length;
|
|
740
|
-
}
|
|
741
|
-
return bytes;
|
|
742
|
-
},
|
|
743
|
-
seek(length) {
|
|
744
|
-
bytesRead += length;
|
|
745
|
-
reader.seek(length);
|
|
746
|
-
},
|
|
747
|
-
get pos() {
|
|
748
|
-
return reader.pos;
|
|
749
|
-
}
|
|
750
|
-
};
|
|
751
|
-
}
|
|
752
|
-
|
|
753
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/reader-browser.js
|
|
754
|
-
var CarReader = class {
|
|
755
|
-
/**
|
|
756
|
-
* @constructs CarReader
|
|
757
|
-
* @param {CarHeader|CarV2Header} header
|
|
758
|
-
* @param {Block[]} blocks
|
|
759
|
-
*/
|
|
760
|
-
constructor(header, blocks) {
|
|
761
|
-
this._header = header;
|
|
762
|
-
this._blocks = blocks;
|
|
763
|
-
this._keys = blocks.map((b) => b.cid.toString());
|
|
764
|
-
}
|
|
765
|
-
/**
|
|
766
|
-
* @property
|
|
767
|
-
* @memberof CarReader
|
|
768
|
-
* @instance
|
|
769
|
-
*/
|
|
770
|
-
get version() {
|
|
771
|
-
return this._header.version;
|
|
772
|
-
}
|
|
773
|
-
/**
|
|
774
|
-
* Get the list of roots defined by the CAR referenced by this reader. May be
|
|
775
|
-
* zero or more `CID`s.
|
|
776
|
-
*
|
|
777
|
-
* @function
|
|
778
|
-
* @memberof CarReader
|
|
779
|
-
* @instance
|
|
780
|
-
* @async
|
|
781
|
-
* @returns {Promise<CID[]>}
|
|
782
|
-
*/
|
|
783
|
-
async getRoots() {
|
|
784
|
-
return this._header.roots;
|
|
785
|
-
}
|
|
786
|
-
/**
|
|
787
|
-
* Check whether a given `CID` exists within the CAR referenced by this
|
|
788
|
-
* reader.
|
|
789
|
-
*
|
|
790
|
-
* @function
|
|
791
|
-
* @memberof CarReader
|
|
792
|
-
* @instance
|
|
793
|
-
* @async
|
|
794
|
-
* @param {CID} key
|
|
795
|
-
* @returns {Promise<boolean>}
|
|
796
|
-
*/
|
|
797
|
-
async has(key) {
|
|
798
|
-
return this._keys.indexOf(key.toString()) > -1;
|
|
799
|
-
}
|
|
800
|
-
/**
|
|
801
|
-
* Fetch a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) from the CAR
|
|
802
|
-
* referenced by this reader matching the provided `CID`. In the case where
|
|
803
|
-
* the provided `CID` doesn't exist within the CAR, `undefined` will be
|
|
804
|
-
* returned.
|
|
805
|
-
*
|
|
806
|
-
* @function
|
|
807
|
-
* @memberof CarReader
|
|
808
|
-
* @instance
|
|
809
|
-
* @async
|
|
810
|
-
* @param {CID} key
|
|
811
|
-
* @returns {Promise<Block | undefined>}
|
|
812
|
-
*/
|
|
813
|
-
async get(key) {
|
|
814
|
-
const index = this._keys.indexOf(key.toString());
|
|
815
|
-
return index > -1 ? this._blocks[index] : void 0;
|
|
816
|
-
}
|
|
817
|
-
/**
|
|
818
|
-
* Returns a `BlockIterator` (`AsyncIterable<Block>`) that iterates over all
|
|
819
|
-
* of the `Block`s (`{ cid:CID, bytes:Uint8Array }` pairs) contained within
|
|
820
|
-
* the CAR referenced by this reader.
|
|
821
|
-
*
|
|
822
|
-
* @function
|
|
823
|
-
* @memberof CarReader
|
|
824
|
-
* @instance
|
|
825
|
-
* @async
|
|
826
|
-
* @generator
|
|
827
|
-
* @returns {AsyncGenerator<Block>}
|
|
828
|
-
*/
|
|
829
|
-
async *blocks() {
|
|
830
|
-
for (const block of this._blocks) {
|
|
831
|
-
yield block;
|
|
832
|
-
}
|
|
833
|
-
}
|
|
834
|
-
/**
|
|
835
|
-
* Returns a `CIDIterator` (`AsyncIterable<CID>`) that iterates over all of
|
|
836
|
-
* the `CID`s contained within the CAR referenced by this reader.
|
|
837
|
-
*
|
|
838
|
-
* @function
|
|
839
|
-
* @memberof CarReader
|
|
840
|
-
* @instance
|
|
841
|
-
* @async
|
|
842
|
-
* @generator
|
|
843
|
-
* @returns {AsyncGenerator<CID>}
|
|
844
|
-
*/
|
|
845
|
-
async *cids() {
|
|
846
|
-
for (const block of this._blocks) {
|
|
847
|
-
yield block.cid;
|
|
848
|
-
}
|
|
849
|
-
}
|
|
850
|
-
/**
|
|
851
|
-
* Instantiate a {@link CarReader} from a `Uint8Array` blob. This performs a
|
|
852
|
-
* decode fully in memory and maintains the decoded state in memory for full
|
|
853
|
-
* access to the data via the `CarReader` API.
|
|
854
|
-
*
|
|
855
|
-
* @async
|
|
856
|
-
* @static
|
|
857
|
-
* @memberof CarReader
|
|
858
|
-
* @param {Uint8Array} bytes
|
|
859
|
-
* @returns {Promise<CarReader>}
|
|
860
|
-
*/
|
|
861
|
-
static async fromBytes(bytes) {
|
|
862
|
-
if (!(bytes instanceof Uint8Array)) {
|
|
863
|
-
throw new TypeError("fromBytes() requires a Uint8Array");
|
|
864
|
-
}
|
|
865
|
-
return decodeReaderComplete(bytesReader(bytes));
|
|
866
|
-
}
|
|
867
|
-
/**
|
|
868
|
-
* Instantiate a {@link CarReader} from a `AsyncIterable<Uint8Array>`, such as
|
|
869
|
-
* a [modern Node.js stream](https://nodejs.org/api/stream.html#stream_streams_compatibility_with_async_generators_and_async_iterators).
|
|
870
|
-
* This performs a decode fully in memory and maintains the decoded state in
|
|
871
|
-
* memory for full access to the data via the `CarReader` API.
|
|
872
|
-
*
|
|
873
|
-
* Care should be taken for large archives; this API may not be appropriate
|
|
874
|
-
* where memory is a concern or the archive is potentially larger than the
|
|
875
|
-
* amount of memory that the runtime can handle.
|
|
876
|
-
*
|
|
877
|
-
* @async
|
|
878
|
-
* @static
|
|
879
|
-
* @memberof CarReader
|
|
880
|
-
* @param {AsyncIterable<Uint8Array>} asyncIterable
|
|
881
|
-
* @returns {Promise<CarReader>}
|
|
882
|
-
*/
|
|
883
|
-
static async fromIterable(asyncIterable) {
|
|
884
|
-
if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === "function")) {
|
|
885
|
-
throw new TypeError("fromIterable() requires an async iterable");
|
|
886
|
-
}
|
|
887
|
-
return decodeReaderComplete(asyncIterableReader(asyncIterable));
|
|
888
|
-
}
|
|
889
|
-
};
|
|
890
|
-
async function decodeReaderComplete(reader) {
|
|
891
|
-
const decoder = createDecoder(reader);
|
|
892
|
-
const header = await decoder.header();
|
|
893
|
-
const blocks = [];
|
|
894
|
-
for await (const block of decoder.blocks()) {
|
|
895
|
-
blocks.push(block);
|
|
896
|
-
}
|
|
897
|
-
return new CarReader(header, blocks);
|
|
898
|
-
}
|
|
899
|
-
|
|
900
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/encoder.js
|
|
901
|
-
var import_varint3 = __toESM(require_varint(), 1);
|
|
902
|
-
var CAR_V1_VERSION = 1;
|
|
903
|
-
function createHeader(roots) {
|
|
904
|
-
const headerBytes = encode2({ version: CAR_V1_VERSION, roots });
|
|
905
|
-
const varintBytes = import_varint3.default.encode(headerBytes.length);
|
|
906
|
-
const header = new Uint8Array(varintBytes.length + headerBytes.length);
|
|
907
|
-
header.set(varintBytes, 0);
|
|
908
|
-
header.set(headerBytes, varintBytes.length);
|
|
909
|
-
return header;
|
|
910
|
-
}
|
|
911
|
-
function createEncoder(writer) {
|
|
912
|
-
return {
|
|
913
|
-
/**
|
|
914
|
-
* @param {CID[]} roots
|
|
915
|
-
* @returns {Promise<void>}
|
|
916
|
-
*/
|
|
917
|
-
async setRoots(roots) {
|
|
918
|
-
const bytes = createHeader(roots);
|
|
919
|
-
await writer.write(bytes);
|
|
920
|
-
},
|
|
921
|
-
/**
|
|
922
|
-
* @param {Block} block
|
|
923
|
-
* @returns {Promise<void>}
|
|
924
|
-
*/
|
|
925
|
-
async writeBlock(block) {
|
|
926
|
-
const { cid, bytes } = block;
|
|
927
|
-
await writer.write(new Uint8Array(import_varint3.default.encode(cid.bytes.length + bytes.length)));
|
|
928
|
-
await writer.write(cid.bytes);
|
|
929
|
-
if (bytes.length) {
|
|
930
|
-
await writer.write(bytes);
|
|
931
|
-
}
|
|
932
|
-
},
|
|
933
|
-
/**
|
|
934
|
-
* @returns {Promise<void>}
|
|
935
|
-
*/
|
|
936
|
-
async close() {
|
|
937
|
-
await writer.end();
|
|
938
|
-
},
|
|
939
|
-
/**
|
|
940
|
-
* @returns {number}
|
|
941
|
-
*/
|
|
942
|
-
version() {
|
|
943
|
-
return CAR_V1_VERSION;
|
|
944
|
-
}
|
|
945
|
-
};
|
|
946
|
-
}
|
|
947
|
-
|
|
948
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/iterator-channel.js
|
|
949
|
-
function noop() {
|
|
950
|
-
}
|
|
951
|
-
function create() {
|
|
952
|
-
const chunkQueue = [];
|
|
953
|
-
let drainer = null;
|
|
954
|
-
let drainerResolver = noop;
|
|
955
|
-
let ended = false;
|
|
956
|
-
let outWait = null;
|
|
957
|
-
let outWaitResolver = noop;
|
|
958
|
-
const makeDrainer = () => {
|
|
959
|
-
if (!drainer) {
|
|
960
|
-
drainer = new Promise((resolve) => {
|
|
961
|
-
drainerResolver = () => {
|
|
962
|
-
drainer = null;
|
|
963
|
-
drainerResolver = noop;
|
|
964
|
-
resolve();
|
|
965
|
-
};
|
|
966
|
-
});
|
|
967
|
-
}
|
|
968
|
-
return drainer;
|
|
969
|
-
};
|
|
970
|
-
const writer = {
|
|
971
|
-
/**
|
|
972
|
-
* @param {T} chunk
|
|
973
|
-
* @returns {Promise<void>}
|
|
974
|
-
*/
|
|
975
|
-
write(chunk) {
|
|
976
|
-
chunkQueue.push(chunk);
|
|
977
|
-
const drainer2 = makeDrainer();
|
|
978
|
-
outWaitResolver();
|
|
979
|
-
return drainer2;
|
|
980
|
-
},
|
|
981
|
-
async end() {
|
|
982
|
-
ended = true;
|
|
983
|
-
const drainer2 = makeDrainer();
|
|
984
|
-
outWaitResolver();
|
|
985
|
-
await drainer2;
|
|
986
|
-
}
|
|
987
|
-
};
|
|
988
|
-
const iterator = {
|
|
989
|
-
/** @returns {Promise<IteratorResult<T>>} */
|
|
990
|
-
async next() {
|
|
991
|
-
const chunk = chunkQueue.shift();
|
|
992
|
-
if (chunk) {
|
|
993
|
-
if (chunkQueue.length === 0) {
|
|
994
|
-
drainerResolver();
|
|
995
|
-
}
|
|
996
|
-
return { done: false, value: chunk };
|
|
997
|
-
}
|
|
998
|
-
if (ended) {
|
|
999
|
-
drainerResolver();
|
|
1000
|
-
return { done: true, value: void 0 };
|
|
1001
|
-
}
|
|
1002
|
-
if (!outWait) {
|
|
1003
|
-
outWait = new Promise((resolve) => {
|
|
1004
|
-
outWaitResolver = () => {
|
|
1005
|
-
outWait = null;
|
|
1006
|
-
outWaitResolver = noop;
|
|
1007
|
-
return resolve(iterator.next());
|
|
1008
|
-
};
|
|
1009
|
-
});
|
|
1010
|
-
}
|
|
1011
|
-
return outWait;
|
|
1012
|
-
}
|
|
1013
|
-
};
|
|
1014
|
-
return { writer, iterator };
|
|
1015
|
-
}
|
|
1016
|
-
|
|
1017
|
-
// ../../../node_modules/.pnpm/@ipld+car@5.4.2/node_modules/@ipld/car/src/writer-browser.js
|
|
1018
|
-
var CarWriter = class _CarWriter {
|
|
1019
|
-
/**
|
|
1020
|
-
* @param {CID[]} roots
|
|
1021
|
-
* @param {CarEncoder} encoder
|
|
1022
|
-
*/
|
|
1023
|
-
constructor(roots, encoder) {
|
|
1024
|
-
this._encoder = encoder;
|
|
1025
|
-
this._mutex = encoder.setRoots(roots);
|
|
1026
|
-
this._ended = false;
|
|
1027
|
-
}
|
|
1028
|
-
/**
|
|
1029
|
-
* Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
|
|
1030
|
-
*
|
|
1031
|
-
* @function
|
|
1032
|
-
* @memberof CarWriter
|
|
1033
|
-
* @instance
|
|
1034
|
-
* @async
|
|
1035
|
-
* @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair.
|
|
1036
|
-
* @returns {Promise<void>} The returned promise will only resolve once the
|
|
1037
|
-
* bytes this block generates are written to the `out` iterable.
|
|
1038
|
-
*/
|
|
1039
|
-
async put(block) {
|
|
1040
|
-
if (!(block.bytes instanceof Uint8Array) || !block.cid) {
|
|
1041
|
-
throw new TypeError("Can only write {cid, bytes} objects");
|
|
1042
|
-
}
|
|
1043
|
-
if (this._ended) {
|
|
1044
|
-
throw new Error("Already closed");
|
|
1045
|
-
}
|
|
1046
|
-
const cid = CID.asCID(block.cid);
|
|
1047
|
-
if (!cid) {
|
|
1048
|
-
throw new TypeError("Can only write {cid, bytes} objects");
|
|
1049
|
-
}
|
|
1050
|
-
this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid, bytes: block.bytes }));
|
|
1051
|
-
return this._mutex;
|
|
1052
|
-
}
|
|
1053
|
-
/**
|
|
1054
|
-
* Finalise the CAR archive and signal that the `out` iterable should end once
|
|
1055
|
-
* any remaining bytes are written.
|
|
1056
|
-
*
|
|
1057
|
-
* @function
|
|
1058
|
-
* @memberof CarWriter
|
|
1059
|
-
* @instance
|
|
1060
|
-
* @async
|
|
1061
|
-
* @returns {Promise<void>}
|
|
1062
|
-
*/
|
|
1063
|
-
async close() {
|
|
1064
|
-
if (this._ended) {
|
|
1065
|
-
throw new Error("Already closed");
|
|
1066
|
-
}
|
|
1067
|
-
await this._mutex;
|
|
1068
|
-
this._ended = true;
|
|
1069
|
-
return this._encoder.close();
|
|
1070
|
-
}
|
|
1071
|
-
/**
|
|
1072
|
-
* Returns the version number of the CAR file being written
|
|
1073
|
-
*
|
|
1074
|
-
* @returns {number}
|
|
1075
|
-
*/
|
|
1076
|
-
version() {
|
|
1077
|
-
return this._encoder.version();
|
|
1078
|
-
}
|
|
1079
|
-
/**
|
|
1080
|
-
* Create a new CAR writer "channel" which consists of a
|
|
1081
|
-
* `{ writer:CarWriter, out:AsyncIterable<Uint8Array> }` pair.
|
|
1082
|
-
*
|
|
1083
|
-
* @async
|
|
1084
|
-
* @static
|
|
1085
|
-
* @memberof CarWriter
|
|
1086
|
-
* @param {CID[] | CID | void} roots
|
|
1087
|
-
* @returns {WriterChannel} The channel takes the form of
|
|
1088
|
-
* `{ writer:CarWriter, out:AsyncIterable<Uint8Array> }`.
|
|
1089
|
-
*/
|
|
1090
|
-
static create(roots) {
|
|
1091
|
-
roots = toRoots(roots);
|
|
1092
|
-
const { encoder, iterator } = encodeWriter();
|
|
1093
|
-
const writer = new _CarWriter(roots, encoder);
|
|
1094
|
-
const out = new CarWriterOut(iterator);
|
|
1095
|
-
return { writer, out };
|
|
1096
|
-
}
|
|
1097
|
-
/**
|
|
1098
|
-
* Create a new CAR appender "channel" which consists of a
|
|
1099
|
-
* `{ writer:CarWriter, out:AsyncIterable<Uint8Array> }` pair.
|
|
1100
|
-
* This appender does not consider roots and does not produce a CAR header.
|
|
1101
|
-
* It is designed to append blocks to an _existing_ CAR archive. It is
|
|
1102
|
-
* expected that `out` will be concatenated onto the end of an existing
|
|
1103
|
-
* archive that already has a properly formatted header.
|
|
1104
|
-
*
|
|
1105
|
-
* @async
|
|
1106
|
-
* @static
|
|
1107
|
-
* @memberof CarWriter
|
|
1108
|
-
* @returns {WriterChannel} The channel takes the form of
|
|
1109
|
-
* `{ writer:CarWriter, out:AsyncIterable<Uint8Array> }`.
|
|
1110
|
-
*/
|
|
1111
|
-
static createAppender() {
|
|
1112
|
-
const { encoder, iterator } = encodeWriter();
|
|
1113
|
-
encoder.setRoots = () => Promise.resolve();
|
|
1114
|
-
const writer = new _CarWriter([], encoder);
|
|
1115
|
-
const out = new CarWriterOut(iterator);
|
|
1116
|
-
return { writer, out };
|
|
1117
|
-
}
|
|
1118
|
-
/**
|
|
1119
|
-
* Update the list of roots in the header of an existing CAR as represented
|
|
1120
|
-
* in a Uint8Array.
|
|
1121
|
-
*
|
|
1122
|
-
* This operation is an _overwrite_, the total length of the CAR will not be
|
|
1123
|
-
* modified. A rejection will occur if the new header will not be the same
|
|
1124
|
-
* length as the existing header, in which case the CAR will not be modified.
|
|
1125
|
-
* It is the responsibility of the user to ensure that the roots being
|
|
1126
|
-
* replaced encode as the same length as the new roots.
|
|
1127
|
-
*
|
|
1128
|
-
* The byte array passed in an argument will be modified and also returned
|
|
1129
|
-
* upon successful modification.
|
|
1130
|
-
*
|
|
1131
|
-
* @async
|
|
1132
|
-
* @static
|
|
1133
|
-
* @memberof CarWriter
|
|
1134
|
-
* @param {Uint8Array} bytes
|
|
1135
|
-
* @param {CID[]} roots - A new list of roots to replace the existing list in
|
|
1136
|
-
* the CAR header. The new header must take up the same number of bytes as the
|
|
1137
|
-
* existing header, so the roots should collectively be the same byte length
|
|
1138
|
-
* as the existing roots.
|
|
1139
|
-
* @returns {Promise<Uint8Array>}
|
|
1140
|
-
*/
|
|
1141
|
-
static async updateRootsInBytes(bytes, roots) {
|
|
1142
|
-
const reader = bytesReader(bytes);
|
|
1143
|
-
await readHeader(reader);
|
|
1144
|
-
const newHeader = createHeader(roots);
|
|
1145
|
-
if (Number(reader.pos) !== newHeader.length) {
|
|
1146
|
-
throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${reader.pos} bytes, new header is ${newHeader.length} bytes)`);
|
|
1147
|
-
}
|
|
1148
|
-
bytes.set(newHeader, 0);
|
|
1149
|
-
return bytes;
|
|
1150
|
-
}
|
|
1151
|
-
};
|
|
1152
|
-
var CarWriterOut = class {
|
|
1153
|
-
/**
|
|
1154
|
-
* @param {AsyncIterator<Uint8Array>} iterator
|
|
1155
|
-
*/
|
|
1156
|
-
constructor(iterator) {
|
|
1157
|
-
this._iterator = iterator;
|
|
1158
|
-
}
|
|
1159
|
-
[Symbol.asyncIterator]() {
|
|
1160
|
-
if (this._iterating) {
|
|
1161
|
-
throw new Error("Multiple iterator not supported");
|
|
1162
|
-
}
|
|
1163
|
-
this._iterating = true;
|
|
1164
|
-
return this._iterator;
|
|
1165
|
-
}
|
|
1166
|
-
};
|
|
1167
|
-
function encodeWriter() {
|
|
1168
|
-
const iw = create();
|
|
1169
|
-
const { writer, iterator } = iw;
|
|
1170
|
-
const encoder = createEncoder(writer);
|
|
1171
|
-
return { encoder, iterator };
|
|
1172
|
-
}
|
|
1173
|
-
function toRoots(roots) {
|
|
1174
|
-
if (roots === void 0) {
|
|
1175
|
-
return [];
|
|
1176
|
-
}
|
|
1177
|
-
if (!Array.isArray(roots)) {
|
|
1178
|
-
const cid = CID.asCID(roots);
|
|
1179
|
-
if (!cid) {
|
|
1180
|
-
throw new TypeError("roots must be a single CID or an array of CIDs");
|
|
1181
|
-
}
|
|
1182
|
-
return [cid];
|
|
1183
|
-
}
|
|
1184
|
-
const _roots = [];
|
|
1185
|
-
for (const root of roots) {
|
|
1186
|
-
const _root = CID.asCID(root);
|
|
1187
|
-
if (!_root) {
|
|
1188
|
-
throw new TypeError("roots must be a single CID or an array of CIDs");
|
|
1189
|
-
}
|
|
1190
|
-
_roots.push(_root);
|
|
1191
|
-
}
|
|
1192
|
-
return _roots;
|
|
1193
|
-
}
|
|
1194
|
-
|
|
1195
|
-
// src/ipfs/car.ts
|
|
1196
|
-
var { WARN, LOG, DEBUG, VERBOSE, ERROR } = g.setup(g.INFO);
|
|
1197
|
-
async function decodePubFromCar(car) {
|
|
1198
|
-
const decoded = await getBlocksOfCar(car);
|
|
1199
|
-
return await decodePubFromBlocks(decoded);
|
|
1200
|
-
}
|
|
1201
|
-
async function decodePubFromBlocks({ rootCID, blockStore }, recursionTrace = []) {
|
|
1202
|
-
if (!rootCID || !blockStore) {
|
|
1203
|
-
throw ERROR("Empty roots/blocks", { rootCID, blockStore });
|
|
1204
|
-
}
|
|
1205
|
-
const root = await getDecodedBlock(blockStore, rootCID);
|
|
1206
|
-
let pubLogsArray;
|
|
1207
|
-
let applogsCID = null;
|
|
1208
|
-
let info = null;
|
|
1209
|
-
VERBOSE(`[decodePubFromBlocks] root:`, rootCID.toString(), root, { blockStore });
|
|
1210
|
-
if (!root) throw ERROR("root not found in blockStore", { blockStore, rootCID });
|
|
1211
|
-
if (root?.info) {
|
|
1212
|
-
applogsCID = root.applogs;
|
|
1213
|
-
const applogsBlock = await getDecodedBlock(blockStore, applogsCID);
|
|
1214
|
-
pubLogsArray = await unchunkApplogsBlock(applogsBlock, blockStore);
|
|
1215
|
-
info = await getDecodedBlock(blockStore, root.info);
|
|
1216
|
-
DEBUG(`new format - infoLogs`, info.logs.map((l) => ({ [l.toString()]: l })));
|
|
1217
|
-
} else {
|
|
1218
|
-
pubLogsArray = root.applogs;
|
|
1219
|
-
}
|
|
1220
|
-
const resolveLogFromCidLink = async (cidOrLink) => {
|
|
1221
|
-
const cid = cidOrLink;
|
|
1222
|
-
const applog = await getDecodedBlock(blockStore, cid);
|
|
1223
|
-
if (!applog) {
|
|
1224
|
-
ERROR(`Could not find applog CID in pub blocks:`, cid.toString(), { cid, root, blockStore });
|
|
1225
|
-
throw new Error(`Could not find applog CID in pub blocks: ${cid.toString()}`);
|
|
1226
|
-
}
|
|
1227
|
-
if (applog.pv instanceof CID) applog.pv = applog.pv.toV1().toString();
|
|
1228
|
-
return {
|
|
1229
|
-
...applog,
|
|
1230
|
-
cid: cid.toV1().toString()
|
|
1231
|
-
};
|
|
1232
|
-
};
|
|
1233
|
-
let applogs;
|
|
1234
|
-
applogs = await Promise.all(pubLogsArray.map(resolveLogFromCidLink));
|
|
1235
|
-
if (root.prev) {
|
|
1236
|
-
if (areCidsEqual(root.prev, rootCID) || containsCid(recursionTrace, root.prev)) {
|
|
1237
|
-
throw ERROR(`[pubFromBlocks] pub chain has a loop`, { rootCID, prev: root.prev, recursionTrace });
|
|
1238
|
-
}
|
|
1239
|
-
applogs = applogs.concat(
|
|
1240
|
-
(await decodePubFromBlocks(
|
|
1241
|
-
{ rootCID: root.prev, blockStore },
|
|
1242
|
-
[...recursionTrace, rootCID]
|
|
1243
|
-
)).applogs
|
|
1244
|
-
);
|
|
1245
|
-
}
|
|
1246
|
-
const result = {
|
|
1247
|
-
cid: rootCID,
|
|
1248
|
-
info: {
|
|
1249
|
-
...info,
|
|
1250
|
-
logs: await Promise.all(info.logs.map(resolveLogFromCidLink))
|
|
1251
|
-
},
|
|
1252
|
-
applogsCID,
|
|
1253
|
-
applogs
|
|
1254
|
-
};
|
|
1255
|
-
DEBUG("[decodePubFromBlocks] result:", result, { rootCID: rootCID.toString(), root, blockStore, applogs, info });
|
|
1256
|
-
return result;
|
|
1257
|
-
}
|
|
1258
|
-
async function getBlocksOfCar(car) {
|
|
1259
|
-
const rootsFromCar = await car.getRoots();
|
|
1260
|
-
const roots = rootsFromCar.map((c) => (typeof c.toV1 === "function" ? c : CID.decode(c.bytes)).toV1().toString());
|
|
1261
|
-
const blocks = /* @__PURE__ */ new Map();
|
|
1262
|
-
for await (const { cid: cidFromCarblocks, bytes } of car.blocks()) {
|
|
1263
|
-
const cid = typeof cidFromCarblocks.toV1 === "function" ? cidFromCarblocks : CID.decode(cidFromCarblocks.bytes);
|
|
1264
|
-
VERBOSE({ cidFromCarblocks, cid });
|
|
1265
|
-
blocks.set(cid.toV1().toString(), bytes);
|
|
1266
|
-
}
|
|
1267
|
-
if (roots.length !== 1) {
|
|
1268
|
-
WARN("Unexpected roots count:", roots);
|
|
1269
|
-
}
|
|
1270
|
-
return {
|
|
1271
|
-
rootCID: CID.parse(roots[0]),
|
|
1272
|
-
blockStore: {
|
|
1273
|
-
get: (cid) => blocks.get(cid.toV1().toString())
|
|
1274
|
-
}
|
|
1275
|
-
};
|
|
1276
|
-
}
|
|
1277
|
-
async function getDecodedBlock(blockStore, cid) {
|
|
1278
|
-
try {
|
|
1279
|
-
var blob = await blockStore.get(cid);
|
|
1280
|
-
if (!blob) {
|
|
1281
|
-
WARN("returning null");
|
|
1282
|
-
return null;
|
|
1283
|
-
}
|
|
1284
|
-
} catch (err) {
|
|
1285
|
-
if (err.message === "Not Found") return null;
|
|
1286
|
-
throw err;
|
|
1287
|
-
}
|
|
1288
|
-
return decode3(blob);
|
|
1289
|
-
}
|
|
1290
|
-
async function makeCarOut(roots, blocks) {
|
|
1291
|
-
const { writer, out } = CarWriter.create(Array.isArray(roots) ? roots : [roots]);
|
|
1292
|
-
VERBOSE(`Writing ${blocks.length} blocks to CAR`, { roots, blocks });
|
|
1293
|
-
blocks.forEach((b) => writer.put(b));
|
|
1294
|
-
writer.close();
|
|
1295
|
-
return out;
|
|
1296
|
-
}
|
|
1297
|
-
async function makeCarBlob(roots, blocks) {
|
|
1298
|
-
const carOut = await makeCarOut(roots, blocks);
|
|
1299
|
-
const chunks = [];
|
|
1300
|
-
for await (const chunk of carOut) {
|
|
1301
|
-
chunks.push(chunk);
|
|
1302
|
-
}
|
|
1303
|
-
const blob = new Blob(chunks);
|
|
1304
|
-
return blob;
|
|
1305
|
-
}
|
|
1306
|
-
async function carFromBlob(blob) {
|
|
1307
|
-
return CarReader.fromBytes(new Uint8Array(await blob.arrayBuffer()));
|
|
1308
|
-
}
|
|
1309
|
-
function streamReaderToIterable(bodyReader) {
|
|
1310
|
-
return (async function* () {
|
|
1311
|
-
while (true) {
|
|
1312
|
-
const { done, value } = await bodyReader.read();
|
|
1313
|
-
VERBOSE(`[car] chunk`, { done, value });
|
|
1314
|
-
if (done) {
|
|
1315
|
-
break;
|
|
1316
|
-
}
|
|
1317
|
-
yield value;
|
|
1318
|
-
}
|
|
1319
|
-
})();
|
|
1320
|
-
}
|
|
1321
|
-
|
|
1322
|
-
// src/pubsub/pub-push.ts
|
|
1323
|
-
var { WARN: WARN2, LOG: LOG2, DEBUG: DEBUG2, VERBOSE: VERBOSE2, ERROR: ERROR2 } = g.setup(g.INFO);
|
|
1324
|
-
async function preparePubForPush(agent, appThread, threadToPublish, publication, prevPubCID) {
|
|
1325
|
-
let logsToPublish = getLogsFromThread(threadToPublish);
|
|
1326
|
-
DEBUG2(`[preparePubForPush] Collected ${logsToPublish.length} logs :`, {
|
|
1327
|
-
logsToPublish,
|
|
1328
|
-
threadOrLogsCount: threadToPublish.nameAndSizeUntracked || `[${threadToPublish.length}]`
|
|
1329
|
-
});
|
|
1330
|
-
const { sharedAgents, sharedKeyMap, sharedKey, pubCounter } = publication ?? {};
|
|
1331
|
-
const getExistingOrNewLog = (thread, publication2, ag, at, vl) => {
|
|
1332
|
-
let logInQuestion = rollingFilter(lastWriteWins(thread), { en: publication2.id, at }).latestLog;
|
|
1333
|
-
if (!logInQuestion && vl !== void 0) {
|
|
1334
|
-
logInQuestion = ensureTsPvAndFinalizeApplog({ ag, en: publication2.id, at, vl }, thread);
|
|
1335
|
-
}
|
|
1336
|
-
return logInQuestion;
|
|
1337
|
-
};
|
|
1338
|
-
const pubNameLog = getExistingOrNewLog(appThread, publication, agent.ag, "pub/name", publication.name);
|
|
1339
|
-
const pubCounterLog = getExistingOrNewLog(appThread, publication, agent.ag, "pub/counter", `${agent.did}<::>${pubCounter}`);
|
|
1340
|
-
const encryptApplog = async (applog, keyToUse) => {
|
|
1341
|
-
const { log: eachLog, cid } = prepareForPub(applog);
|
|
1342
|
-
const enc = new TextEncoder();
|
|
1343
|
-
const stringified = wrapper_default(eachLog);
|
|
1344
|
-
const stringifiedEncodedAppLogPayload = enc.encode(stringified);
|
|
1345
|
-
VERBOSE2("[odd]", { eachLog, stringified, stringifiedEncodedAppLogPayload });
|
|
1346
|
-
try {
|
|
1347
|
-
const encPayload = await agent.crypto?.aes.encrypt(stringifiedEncodedAppLogPayload, keyToUse, "AES-GCM");
|
|
1348
|
-
VERBOSE2("[odd] encrypted length:", stringifiedEncodedAppLogPayload.length, { encPayload });
|
|
1349
|
-
return encPayload;
|
|
1350
|
-
} catch (err) {
|
|
1351
|
-
throw ERROR2("FAILED TO ENC payload length:", stringifiedEncodedAppLogPayload.length, { err });
|
|
1352
|
-
}
|
|
1353
|
-
};
|
|
1354
|
-
let maybeEncryptedApplogs;
|
|
1355
|
-
const encryptedApplogs = [];
|
|
1356
|
-
const agentSharedKeyLogs = [];
|
|
1357
|
-
if (sharedAgents) {
|
|
1358
|
-
if (!sharedKey || !sharedKeyMap) {
|
|
1359
|
-
throw ERROR2("sharedAgents but no Keys/Map", { sharedAgents, sharedKeyMap, sharedKey });
|
|
1360
|
-
}
|
|
1361
|
-
VERBOSE2("encrypting", { sharedAgents, sharedKeyMap });
|
|
1362
|
-
for (const [eachAgent, eachEncKey] of Array.from(sharedKeyMap.entries())) {
|
|
1363
|
-
VERBOSE2("adding key", { eachAgent, eachEncKey });
|
|
1364
|
-
agentSharedKeyLogs.push({
|
|
1365
|
-
ag: agent.ag,
|
|
1366
|
-
en: eachAgent,
|
|
1367
|
-
at: "pub/sharedKey",
|
|
1368
|
-
vl: eachEncKey
|
|
1369
|
-
// these are encrypted with the derived key from the local agent private and remote agent public keys
|
|
1370
|
-
});
|
|
1371
|
-
}
|
|
1372
|
-
const CIDlist = [];
|
|
1373
|
-
const pubCIDmap = {};
|
|
1374
|
-
for (const eachLog of logsToPublish) {
|
|
1375
|
-
VERBOSE2("[crypto] encrypting ", { eachLog, sharedKey });
|
|
1376
|
-
const encPayload = await encryptApplog(eachLog, sharedKey);
|
|
1377
|
-
DEBUG2("[crypto] encrypted ", { eachLog, encPayload, sharedKey });
|
|
1378
|
-
encryptedApplogs.push({ enc: encPayload });
|
|
1379
|
-
}
|
|
1380
|
-
maybeEncryptedApplogs = encryptedApplogs;
|
|
1381
|
-
} else {
|
|
1382
|
-
maybeEncryptedApplogs = logsToPublish;
|
|
1383
|
-
}
|
|
1384
|
-
DEBUG2("adding all agent info and pubAtoms", {
|
|
1385
|
-
publication,
|
|
1386
|
-
agent,
|
|
1387
|
-
logsToPublish,
|
|
1388
|
-
// threadToPublish, - very verbose
|
|
1389
|
-
agentSharedKeyLogs
|
|
1390
|
-
});
|
|
1391
|
-
const infoLogs = [
|
|
1392
|
-
...rollingFilter(lastWriteWins(appThread), {
|
|
1393
|
-
// TODO: use static filter for performance
|
|
1394
|
-
en: agent.ag,
|
|
1395
|
-
at: ["agent/ecdh", "agent/jwkd", "agent/appAgent"]
|
|
1396
|
-
}).applogs,
|
|
1397
|
-
...pubNameLog ? [pubNameLog] : [],
|
|
1398
|
-
...pubCounterLog ? [pubCounterLog] : [],
|
|
1399
|
-
...agentSharedKeyLogs
|
|
1400
|
-
];
|
|
1401
|
-
DEBUG2(`[preparePubForPush] info logs:`, infoLogs);
|
|
1402
|
-
if (!infoLogs.find(({ at }) => at === "agent/appAgent")) throw ERROR2(`[preparePubForPush] appThread missing agent/appAgent log`);
|
|
1403
|
-
const applogsToEncode = keepTruthy(maybeEncryptedApplogs);
|
|
1404
|
-
const infologsToEncode = keepTruthy(infoLogs);
|
|
1405
|
-
if (!applogsToEncode.length) {
|
|
1406
|
-
throw ERROR2("no valid applogs", { agent, maybeEncryptedApplogs, infoLogs, applogsToEncode, infologsToEncode, prevPubCID });
|
|
1407
|
-
}
|
|
1408
|
-
if (!infologsToEncode.length) {
|
|
1409
|
-
throw ERROR2("no valid infologs", { agent, maybeEncryptedApplogs, infoLogs, applogsToEncode, infologsToEncode, prevPubCID });
|
|
1410
|
-
}
|
|
1411
|
-
const encodedPub = await encodePubAsCar(agent, applogsToEncode, infologsToEncode, prevPubCID);
|
|
1412
|
-
DEBUG2("inPreparePubForPush", { encodedPub });
|
|
1413
|
-
return encodedPub;
|
|
1414
|
-
}
|
|
1415
|
-
async function encodePubAsCar(agent, applogs, infoLogs, prevPubCID) {
|
|
1416
|
-
DEBUG2(`[encodePubAsCar] encoding`, { agent, applogs, infoLogs });
|
|
1417
|
-
const { cids: infoLogCids, encodedApplogs: encodedInfoLogs } = await encodeApplogsAsIPLD(infoLogs);
|
|
1418
|
-
const { cids: applogCids, encodedApplogs } = await encodeApplogsAsIPLD(applogs);
|
|
1419
|
-
let blocks = encodedApplogs.concat(encodedInfoLogs);
|
|
1420
|
-
const infoLogsWrap = await encodeBlockOriginal({ logs: infoLogCids });
|
|
1421
|
-
blocks.push(infoLogsWrap);
|
|
1422
|
-
const { rootCID: chunkRootCID, blocks: chunkBlocks } = await chunkApplogs(applogCids);
|
|
1423
|
-
blocks = blocks.concat(chunkBlocks);
|
|
1424
|
-
const infoSignature = await agent.sign(infoLogsWrap.cid.bytes);
|
|
1425
|
-
const applogsSignature = await agent.sign(chunkRootCID.bytes);
|
|
1426
|
-
const root = {
|
|
1427
|
-
info: infoLogsWrap.cid,
|
|
1428
|
-
applogs: chunkRootCID,
|
|
1429
|
-
infoSignature,
|
|
1430
|
-
applogsSignature,
|
|
1431
|
-
prev: prevPubCID
|
|
1432
|
-
};
|
|
1433
|
-
DEBUG2("[encodePubAsCar] encoding root", { root, logCids: applogCids, infoLogCids });
|
|
1434
|
-
const encodedRoot = await encodeBlockOriginal(root);
|
|
1435
|
-
blocks.push(encodedRoot);
|
|
1436
|
-
DEBUG2("[encodePubAsCar] => root", { encodedRoot });
|
|
1437
|
-
return {
|
|
1438
|
-
cid: encodedRoot.cid,
|
|
1439
|
-
blob: await makeCarBlob(encodedRoot.cid, blocks),
|
|
1440
|
-
// TODO: create CarBuilder (incl .encodeAndAdd({...}))
|
|
1441
|
-
blocks,
|
|
1442
|
-
infoLogCids,
|
|
1443
|
-
applogCids
|
|
1444
|
-
};
|
|
1445
|
-
}
|
|
1446
|
-
async function chunkApplogs(applogCids, size = 1e4) {
|
|
1447
|
-
if (!applogCids.length) throw ERROR2(`[chunkApplogs] called with empty array`);
|
|
1448
|
-
const chunks = [];
|
|
1449
|
-
for (let i = 0; i < applogCids.length; i += size) {
|
|
1450
|
-
const chunk = await encodeBlockOriginal({ logs: applogCids.slice(i, Math.min(i + applogCids.length, i + size)) });
|
|
1451
|
-
chunks.push(chunk);
|
|
1452
|
-
}
|
|
1453
|
-
if (chunks.length === 1) return { rootCID: chunks[0].cid, blocks: chunks };
|
|
1454
|
-
const root = await encodeBlockOriginal({ chunks: chunks.map((chunk) => chunk.cid) });
|
|
1455
|
-
const blocks = [root, ...chunks];
|
|
1456
|
-
DEBUG2(`[chunkApplogs] ${applogCids.length} logs chunked into ${chunks.length}`, { applogCids, root, blocks, chunks, dagJson: src_exports });
|
|
1457
|
-
return { rootCID: root.cid, blocks, chunks };
|
|
1458
|
-
}
|
|
1459
|
-
async function unchunkApplogsBlock(block, blockStore) {
|
|
1460
|
-
if (isPubBlockChunks(block)) {
|
|
1461
|
-
return (await Promise.all(
|
|
1462
|
-
block.chunks.map(async (chunkCid) => {
|
|
1463
|
-
const block2 = await getDecodedBlock(blockStore, chunkCid);
|
|
1464
|
-
if (!block2.logs) throw ERROR2(`Weird chunk`, block2);
|
|
1465
|
-
return block2.logs;
|
|
1466
|
-
})
|
|
1467
|
-
)).flat();
|
|
1468
|
-
} else {
|
|
1469
|
-
return block.logs;
|
|
1470
|
-
}
|
|
1471
|
-
}
|
|
1472
|
-
function isPubBlockChunks(block) {
|
|
1473
|
-
return block.chunks;
|
|
1474
|
-
}
|
|
1475
|
-
async function encodeApplogsAsCar(applogs) {
|
|
1476
|
-
const encoded = await encodeApplogsAsIPLD(applogs);
|
|
1477
|
-
if (!encoded) throw ERROR2("invalid applogs cannot continue", { applogs, encoded });
|
|
1478
|
-
const { cids, encodedApplogs } = encoded;
|
|
1479
|
-
const root = { applogs: cids };
|
|
1480
|
-
const encodedRoot = await encodeBlockOriginal(root);
|
|
1481
|
-
DEBUG2("[encodeApplogsAsCar] encoded root", { cids, encodedRoot });
|
|
1482
|
-
return await makeCarBlob(encodedRoot.cid, [encodedRoot, ...encodedApplogs]);
|
|
1483
|
-
}
|
|
1484
|
-
async function encodeApplogsAsIPLD(applogs) {
|
|
1485
|
-
DEBUG2({ applogs });
|
|
1486
|
-
const validApplogs = applogs.filter((eachLog) => !!eachLog);
|
|
1487
|
-
DEBUG2({ validApplogs });
|
|
1488
|
-
if (!validApplogs.length) throw ERROR2("no valid applogs");
|
|
1489
|
-
const preppedLogs = validApplogs.map((log) => prepareForPub(log).log);
|
|
1490
|
-
const encodedApplogs = await Promise.all(preppedLogs.map(encodeBlockOriginal));
|
|
1491
|
-
DEBUG2("[encodeApplogsAsIpld] encoded applogs", { preppedLogs, encodedApplogs });
|
|
1492
|
-
const cids = encodedApplogs.map((b) => {
|
|
1493
|
-
if (!b.cid) throw ERROR2(`[publish] no cid for encoded log:`, b);
|
|
1494
|
-
return b.cid;
|
|
1495
|
-
});
|
|
1496
|
-
return { cids, encodedApplogs };
|
|
1497
|
-
}
|
|
1498
|
-
|
|
1499
|
-
export {
|
|
1500
|
-
preparePubForPush,
|
|
1501
|
-
encodePubAsCar,
|
|
1502
|
-
chunkApplogs,
|
|
1503
|
-
unchunkApplogsBlock,
|
|
1504
|
-
isPubBlockChunks,
|
|
1505
|
-
encodeApplogsAsCar,
|
|
1506
|
-
decodePubFromCar,
|
|
1507
|
-
decodePubFromBlocks,
|
|
1508
|
-
getBlocksOfCar,
|
|
1509
|
-
getDecodedBlock,
|
|
1510
|
-
makeCarOut,
|
|
1511
|
-
makeCarBlob,
|
|
1512
|
-
carFromBlob,
|
|
1513
|
-
streamReaderToIterable
|
|
1514
|
-
};
|
|
1515
|
-
//# sourceMappingURL=chunk-JEOQUHTK.min.js.map
|