@dtudury/streamo 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/.claude/settings.local.json +10 -0
  2. package/LICENSE +661 -0
  3. package/README.md +194 -0
  4. package/ROADMAP.md +111 -0
  5. package/bin/streamo.js +238 -0
  6. package/jsconfig.json +9 -0
  7. package/package.json +26 -0
  8. package/public/apps/chat/index.html +61 -0
  9. package/public/apps/chat/main.js +144 -0
  10. package/public/apps/styles/proto.css +71 -0
  11. package/public/index.html +109 -0
  12. package/public/streamo/Addressifier.js +212 -0
  13. package/public/streamo/CodecRegistry.js +195 -0
  14. package/public/streamo/ContentMap.js +79 -0
  15. package/public/streamo/DESIGN.md +61 -0
  16. package/public/streamo/Repo.js +176 -0
  17. package/public/streamo/Repo.test.js +82 -0
  18. package/public/streamo/RepoRegistry.js +91 -0
  19. package/public/streamo/RepoRegistry.test.js +87 -0
  20. package/public/streamo/Signature.js +15 -0
  21. package/public/streamo/Signer.js +91 -0
  22. package/public/streamo/Streamo.js +392 -0
  23. package/public/streamo/Streamo.test.js +205 -0
  24. package/public/streamo/archiveSync.js +62 -0
  25. package/public/streamo/chat-cli.js +122 -0
  26. package/public/streamo/chat-server.js +60 -0
  27. package/public/streamo/codecs.js +400 -0
  28. package/public/streamo/fileSync.js +238 -0
  29. package/public/streamo/h.js +202 -0
  30. package/public/streamo/h.mount.test.js +67 -0
  31. package/public/streamo/h.test.js +121 -0
  32. package/public/streamo/mount.js +248 -0
  33. package/public/streamo/originSync.js +60 -0
  34. package/public/streamo/outletSync.js +105 -0
  35. package/public/streamo/registrySync.js +333 -0
  36. package/public/streamo/registrySync.test.js +373 -0
  37. package/public/streamo/s3Sync.js +99 -0
  38. package/public/streamo/stateFileSync.js +17 -0
  39. package/public/streamo/sync.test.js +98 -0
  40. package/public/streamo/utils/NestedSet.js +41 -0
  41. package/public/streamo/utils/Recaller.js +77 -0
  42. package/public/streamo/utils/mockDOM.js +113 -0
  43. package/public/streamo/utils/nextTick.js +22 -0
  44. package/public/streamo/utils/noble-secp256k1.js +602 -0
  45. package/public/streamo/utils/testing.js +90 -0
  46. package/public/streamo/utils.js +57 -0
  47. package/public/streamo/webSync.js +118 -0
  48. package/scripts/serve.js +15 -0
  49. package/smoke.test.js +132 -0
@@ -0,0 +1,400 @@
1
+ import { numberToVar, varToNumber, range } from './utils.js'
2
+ import { Signature } from './Signature.js'
3
+
4
+ /**
5
+ * Balanced binary tree node — internal to this file.
6
+ * Arrays and objects are encoded as trees of these nodes.
7
+ * Not exported; callers always work with plain arrays and objects.
8
+ */
9
+ class Duple {
10
+ constructor (items) {
11
+ if (items.length === 2) {
12
+ this.v = items
13
+ } else if (items.length > 2) {
14
+ const split = 2 ** (31 - Math.clz32(items.length - 1))
15
+ const right = items.length - split === 1 ? items[items.length - 1] : new Duple(items.slice(split))
16
+ this.v = [new Duple(items.slice(0, split)), right]
17
+ } else {
18
+ throw new Error('Duple requires at least 2 items')
19
+ }
20
+ }
21
+
22
+ flat () {
23
+ return [
24
+ this.v[0] instanceof Duple ? this.v[0].flat() : this.v[0],
25
+ this.v[1] instanceof Duple ? this.v[1].flat() : this.v[1]
26
+ ].flat()
27
+ }
28
+
29
+ flatDuples () {
30
+ const bothDuple = this.v.every(v => v instanceof Duple)
31
+ const noDuple = this.v.every(v => !(v instanceof Duple))
32
+ if (bothDuple) return [...this.v[0].flatDuples(), ...this.v[1].flatDuples()]
33
+ if (noDuple) return [this]
34
+ throw new Error('mixed Duple tree')
35
+ }
36
+ }
37
+
38
+ /**
39
+ * Build all codecs for a CodecRegistry.
40
+ *
41
+ * Each codec is an object with:
42
+ * type — string name
43
+ * baseFooter — set by the registry after registration
44
+ * partReaders — optional array of option-arrays (see below)
45
+ * getWidth — optional override; defaults to sum of part widths + 1
46
+ * encode(v) — returns Uint8Array or falsy
47
+ * decode(code, asRefs) — returns the JS value
48
+ *
49
+ * The registry is passed as `r` so codecs can call r.encode / r.decode /
50
+ * r.append / r.resolve for sub-values.
51
+ *
52
+ * @param {object} r registry interface: { encode, decode, append, resolve, addressOf, footerToCodec }
53
+ * @returns {Array} codec definitions in registration order
54
+ */
55
+ export function makeCodecs (r) {
56
+ // ── Part reader factories ────────────────────────────────────────────────
57
+ // A partReader is a function (code: Uint8Array) → { type, width, address?, getCode?, getDecoded }
58
+
59
+ const inlineReader = [code => {
60
+ const codec = r.footerToCodec[code.at(-1)]
61
+ const width = codec.getWidth(code)
62
+ return {
63
+ type: `inline(${width})`,
64
+ width,
65
+ getCode: () => code.slice(-width),
66
+ getDecoded: asRefs => r.decode(code.slice(-width), asRefs)
67
+ }
68
+ }]
69
+
70
+ const addressReaders = range(4).map(i => code => {
71
+ const width = i + 1
72
+ const address = varToNumber(code.slice(-width))
73
+ return {
74
+ type: `addr(${width})`,
75
+ width,
76
+ address,
77
+ getCode: () => r.resolve(address),
78
+ getDecoded: asRefs => r.decode(r.resolve(address), asRefs)
79
+ }
80
+ })
81
+
82
+ const inlineOrAddress = [...inlineReader, ...addressReaders] // 5 options: option 0 = inline, 1-4 = 1..4-byte address
83
+
84
+ const wordReaders = range(4).map(i => code => {
85
+ const width = i + 1
86
+ return { type: `word(${width})`, width, getDecoded: () => code.slice(-width) }
87
+ })
88
+
89
+ const literalReaders = range(5).map(width => code => ({
90
+ type: `literal(${width})`,
91
+ width,
92
+ getDecoded: () => code.slice(-width)
93
+ }))
94
+
95
+ const signatureReader = [code => ({
96
+ type: 'sig(64)',
97
+ width: 64,
98
+ getDecoded: () => code.slice(-64)
99
+ })]
100
+
101
+ const uint7Readers = range(128).map(n => () => ({
102
+ type: `uint7(${n})`,
103
+ width: 0,
104
+ getDecoded: () => new Uint8Array([n])
105
+ }))
106
+
107
+ // ── Shared helper ────────────────────────────────────────────────────────
108
+
109
+ /**
110
+ * Ensure `code` is stored and return [partBytes, optionIndex].
111
+ * Option 0: inline (just the raw bytes).
112
+ * Options 1-4: 1-4-byte little-endian address.
113
+ */
114
+ function inlineOrAddressPart (code) {
115
+ const existingAddr = r.addressOf(code)
116
+ const nextAddr = Math.max(0, r.byteLength + code.length - 1)
117
+ if (existingAddr === undefined && code.length <= numberToVar(nextAddr).length) {
118
+ return [code, 0]
119
+ }
120
+ const addr = existingAddr ?? r.append(code)
121
+ const addrBytes = numberToVar(addr)
122
+ return [addrBytes, addrBytes.length] // option = 1..4
123
+ }
124
+
125
+ function encodeMultipart (values, codec, asRefs) {
126
+ if (values.length !== codec.partReaders.length) throw new Error('part count mismatch')
127
+ const parts = []
128
+ let base = 1
129
+ let footer = codec.baseFooter
130
+ for (let i = values.length - 1; i >= 0; i--) {
131
+ const [part, option] = inlineOrAddressPart(r.encode(values[i], asRefs))
132
+ footer += base * option
133
+ base *= codec.partReaders[i].length
134
+ parts.unshift(part)
135
+ }
136
+ const out = new Uint8Array(parts.reduce((n, p) => n + p.length, 0) + 1)
137
+ let pos = 0
138
+ for (const p of parts) { out.set(p, pos); pos += p.length }
139
+ out[pos] = footer
140
+ return out
141
+ }
142
+
143
+ function decodeParts (code) {
144
+ const footer = code.at(-1)
145
+ const codec = r.footerToCodec[footer]
146
+ if (!codec?.partReaders?.length) return []
147
+ const parts = []
148
+ let option = footer - codec.baseFooter
149
+ let end = -1
150
+ for (let i = codec.partReaders.length - 1; i >= 0; i--) {
151
+ const opts = codec.partReaders[i]
152
+ const reader = opts[option % opts.length]
153
+ option = Math.floor(option / opts.length)
154
+ const part = reader(code.subarray(0, end))
155
+ end -= part.width
156
+ parts.unshift(part)
157
+ }
158
+ return parts
159
+ }
160
+
161
+ // Stable address of a single-part value (needed for DUPLE decode with asRefs)
162
+ function getPartAddress (part) {
163
+ if (part.address !== undefined) return part.address
164
+ const code = part.getCode()
165
+ if (code.length === 1) return -(code[0] + 1) // negative address for single-byte primitives
166
+ return r.addressOf(code) ?? r.append(code)
167
+ }
168
+
169
+ // ── Codec definitions ────────────────────────────────────────────────────
170
+
171
+ const UNDEFINED = {
172
+ encode: v => v === undefined && new Uint8Array([UNDEFINED.baseFooter]),
173
+ decode: () => undefined
174
+ }
175
+
176
+ const NULL = {
177
+ encode: v => v === null && new Uint8Array([NULL.baseFooter]),
178
+ decode: () => null
179
+ }
180
+
181
+ const FALSE = {
182
+ encode: v => v === false && new Uint8Array([FALSE.baseFooter]),
183
+ decode: () => false
184
+ }
185
+
186
+ const TRUE = {
187
+ encode: v => v === true && new Uint8Array([TRUE.baseFooter]),
188
+ decode: () => true
189
+ }
190
+
191
+ /** ≤4-byte Uint8Array stored literally */
192
+ const WORD = {
193
+ partReaders: [literalReaders],
194
+ encode (v) {
195
+ if (v instanceof Uint8Array && v.length >= 1 && v.length <= 4) {
196
+ const out = new Uint8Array(v.length + 1)
197
+ out.set(v)
198
+ out[v.length] = WORD.baseFooter + v.length
199
+ return out
200
+ }
201
+ },
202
+ decode (code) { return decodeParts(code)[0].getDecoded() }
203
+ }
204
+
205
+ /** Arbitrary-length Uint8Array (>4 bytes), stored via Duple tree of WORDs */
206
+ const UINT8ARRAY = {
207
+ partReaders: [inlineOrAddress],
208
+ encode (v) {
209
+ if (v instanceof Uint8Array && v.length > 4) {
210
+ const words = []
211
+ for (let i = 0; i < v.length; i += 4) words.push(v.slice(i, Math.min(i + 4, v.length)))
212
+ return encodeMultipart([new Duple(words)], UINT8ARRAY)
213
+ }
214
+ },
215
+ decode (code) {
216
+ const parts = decodeParts(code)
217
+ const duple = parts[0].getDecoded(false)
218
+ const words = duple.flat()
219
+ const total = words.reduce((n, w) => n + w.length, 0)
220
+ const out = new Uint8Array(total)
221
+ let pos = 0
222
+ for (const w of words) { out.set(w, pos); pos += w.length }
223
+ return out
224
+ }
225
+ }
226
+
227
+ const EMPTY_STRING = {
228
+ encode: v => v === '' && new Uint8Array([EMPTY_STRING.baseFooter]),
229
+ decode: () => ''
230
+ }
231
+
232
+ const STRING = {
233
+ partReaders: [inlineOrAddress],
234
+ encode (v) {
235
+ if (typeof v === 'string' && v !== '') {
236
+ const bytes = new TextEncoder().encode(v)
237
+ return encodeMultipart([bytes], STRING)
238
+ }
239
+ },
240
+ decode (code) {
241
+ return new TextDecoder().decode(decodeParts(code)[0].getDecoded(false))
242
+ }
243
+ }
244
+
245
+ /** Non-negative integer 0..127 */
246
+ const UINT7 = {
247
+ partReaders: [uint7Readers],
248
+ encode (v) {
249
+ if (Number.isInteger(v) && v >= 0 && v < 128) return new Uint8Array([UINT7.baseFooter + v])
250
+ },
251
+ decode (code) { return decodeParts(code)[0].getDecoded()[0] }
252
+ }
253
+
254
+ const FLOAT64 = {
255
+ partReaders: [inlineOrAddress],
256
+ encode (v) {
257
+ if (typeof v === 'number') {
258
+ return encodeMultipart([new Uint8Array(new Float64Array([v]).buffer)], FLOAT64)
259
+ }
260
+ },
261
+ decode (code) {
262
+ const bytes = decodeParts(code)[0].getDecoded(false)
263
+ return new Float64Array(bytes.buffer, bytes.byteOffset, 1)[0]
264
+ }
265
+ }
266
+
267
+ const DATE = {
268
+ partReaders: [inlineOrAddress],
269
+ encode (v) {
270
+ if (v instanceof Date) {
271
+ return encodeMultipart([new Uint8Array(new Float64Array([v.getTime()]).buffer)], DATE)
272
+ }
273
+ },
274
+ decode (code) {
275
+ const bytes = decodeParts(code)[0].getDecoded(false)
276
+ return new Date(new Float64Array(bytes.buffer, bytes.byteOffset, 1)[0])
277
+ }
278
+ }
279
+
280
+ const SIGNATURE = {
281
+ partReaders: [wordReaders, signatureReader],
282
+ encode (v) {
283
+ if (v instanceof Signature) {
284
+ const addrBytes = numberToVar(v.address)
285
+ const out = new Uint8Array(addrBytes.length + 64 + 1)
286
+ out.set(addrBytes)
287
+ out.set(v.compactRawBytes, addrBytes.length)
288
+ out[addrBytes.length + 64] = SIGNATURE.baseFooter + addrBytes.length - 1
289
+ return out
290
+ }
291
+ },
292
+ decode (code) {
293
+ const parts = decodeParts(code)
294
+ return new Signature(varToNumber(parts[0].getDecoded()), parts[1].getDecoded())
295
+ }
296
+ }
297
+
298
+ /** Internal balanced binary tree node. Never exposed to callers. */
299
+ const DUPLE = {
300
+ partReaders: [inlineOrAddress, inlineOrAddress],
301
+ encode (v, asRefs) {
302
+ if (v instanceof Duple) return encodeMultipart(v.v, DUPLE, asRefs)
303
+ },
304
+ decode (code, asRefs) {
305
+ const parts = decodeParts(code)
306
+ const leftCode = parts[0].getCode()
307
+ const rightCode = parts[1].getCode()
308
+ const leftIsDuple = r.footerToCodec[leftCode.at(-1)]?.type === 'DUPLE'
309
+ const rightIsDuple = r.footerToCodec[rightCode.at(-1)]?.type === 'DUPLE'
310
+ if (!leftIsDuple && !rightIsDuple) {
311
+ // 'all' means return addresses for both slots (used by array asRefs)
312
+ const nameIsRef = asRefs === 'all' || (Array.isArray(asRefs) && asRefs[1])
313
+ const valueIsRef = asRefs === 'all' || asRefs === true || (Array.isArray(asRefs) && asRefs[0])
314
+ return new Duple([
315
+ nameIsRef ? getPartAddress(parts[0]) : parts[0].getDecoded(false),
316
+ valueIsRef ? getPartAddress(parts[1]) : parts[1].getDecoded(false)
317
+ ])
318
+ }
319
+ // Non-leaf: at least one child is itself a Duple subtree.
320
+ // With 'all', recurse into sub-duples and take the address of any leaf.
321
+ if (asRefs === 'all') {
322
+ return new Duple([
323
+ leftIsDuple ? parts[0].getDecoded('all') : getPartAddress(parts[0]),
324
+ rightIsDuple ? parts[1].getDecoded('all') : getPartAddress(parts[1])
325
+ ])
326
+ }
327
+ return new Duple([parts[0].getDecoded(asRefs), parts[1].getDecoded(asRefs)])
328
+ }
329
+ }
330
+
331
+ const EMPTY_ARRAY = {
332
+ encode: v => Array.isArray(v) && v.length === 0 && new Uint8Array([EMPTY_ARRAY.baseFooter]),
333
+ decode: () => []
334
+ }
335
+
336
+ const ARRAY = {
337
+ partReaders: [inlineOrAddress],
338
+ encode (v, asRefs) {
339
+ if (!Array.isArray(v) || v.length === 0) return
340
+ if (v.length > 1 && Object.keys(v).length === v.length) {
341
+ return encodeMultipart([new Duple(v)], ARRAY, asRefs)
342
+ }
343
+ // sparse or single-element array: encode as object with length key
344
+ const obj = Object.assign({}, v, { length: v.length })
345
+ return encodeMultipart([obj], ARRAY, asRefs)
346
+ },
347
+ decode (code, asRefs) {
348
+ // 'all' mode: return an address for every element rather than decoded values
349
+ const inner = decodeParts(code)[0].getDecoded(asRefs === true ? 'all' : asRefs)
350
+ if (inner instanceof Duple) return inner.flat()
351
+ return Object.assign([], inner)
352
+ }
353
+ }
354
+
355
+ const EMPTY_OBJECT = {
356
+ encode (v) {
357
+ if (!v || typeof v !== 'object' || Array.isArray(v)) return
358
+ const proto = Object.getPrototypeOf(v)
359
+ if (proto !== Object.prototype && proto !== null) return
360
+ if (Object.keys(v).length === 0) return new Uint8Array([EMPTY_OBJECT.baseFooter])
361
+ },
362
+ decode: () => ({})
363
+ }
364
+
365
+ const OBJECT = {
366
+ partReaders: [inlineOrAddress],
367
+ encode (v, asRefs) {
368
+ if (!v || typeof v !== 'object' || Array.isArray(v) || Object.keys(v).length === 0) return
369
+ const duples = Object.entries(v).map(([k, val]) => new Duple([k, val]))
370
+ const tree = duples.length === 1 ? duples[0] : new Duple(duples)
371
+ return encodeMultipart([tree], OBJECT, asRefs)
372
+ },
373
+ decode (code, asRefs) {
374
+ const tree = decodeParts(code)[0].getDecoded(asRefs)
375
+ return Object.fromEntries(tree.flatDuples().map(d => [d.v[0], d.v[1]]))
376
+ }
377
+ }
378
+
379
+ /**
380
+ * A boxed value — wraps any encoded value so it can be stored as a
381
+ * first-class address rather than inline. Used by Stream.set() to
382
+ * store a changing top-level value.
383
+ */
384
+ const VARIABLE = {
385
+ partReaders: [inlineOrAddress],
386
+ encode: () => undefined, // not directly encodable; use _encode
387
+ _encode (encodedValue) {
388
+ const [part, option] = inlineOrAddressPart(encodedValue)
389
+ const out = new Uint8Array(part.length + 1)
390
+ out.set(part)
391
+ out[part.length] = VARIABLE.baseFooter + option
392
+ return out
393
+ },
394
+ decode (code, asRefs) {
395
+ return decodeParts(code)[0].getDecoded(asRefs)
396
+ }
397
+ }
398
+
399
+ return { UNDEFINED, NULL, FALSE, TRUE, WORD, UINT8ARRAY, EMPTY_STRING, STRING, UINT7, FLOAT64, DATE, SIGNATURE, DUPLE, EMPTY_ARRAY, ARRAY, EMPTY_OBJECT, OBJECT, VARIABLE }
400
+ }
@@ -0,0 +1,238 @@
1
+ import { subscribe } from '@parcel/watcher'
2
+ import { mkdir, readFile, readdir, stat, unlink, writeFile } from 'fs/promises'
3
+ import { existsSync, readFileSync } from 'fs'
4
+ import { dirname, join, relative } from 'path'
5
+ import { compile } from '@gerhobbelt/gitignore-parser'
6
+
7
+ const ALWAYS_IGNORE = '.env\n.DS_Store\n.git\nnode_modules'
8
+
9
+ /**
10
+ * Build a filter function from the folder's .gitignore plus hard-coded ignores.
11
+ * @param {string} folder
12
+ * @param {string} dataDir the archive dir, always excluded
13
+ * @returns {(rel: string) => boolean}
14
+ */
15
+ function buildFilter (folder, dataDir) {
16
+ let content = ALWAYS_IGNORE
17
+ try { content = readFileSync(join(folder, '.gitignore'), 'utf8') + '\n' + content } catch {}
18
+ const gitignore = compile(content)
19
+ const dataDirRel = relative(folder, dataDir)
20
+ return rel => !rel.startsWith(dataDirRel + '/') && rel !== dataDirRel && gitignore.accepts(rel)
21
+ }
22
+
23
+ /**
24
+ * Decode file bytes: UTF-8 text → string, binary → Uint8Array.
25
+ * @param {Buffer} bytes
26
+ * @returns {string|Uint8Array}
27
+ */
28
+ function decodeBytes (bytes) {
29
+ if (bytes.includes(0)) return new Uint8Array(bytes)
30
+ try { return new TextDecoder('utf-8', { fatal: true }).decode(bytes) } catch { return new Uint8Array(bytes) }
31
+ }
32
+
33
+ /**
34
+ * Decode a file's value for storage: JSON files become parsed objects (or
35
+ * strings if the JSON is invalid), everything else stays as-is.
36
+ * @param {string} rel relative path
37
+ * @param {string|Uint8Array} value
38
+ * @returns {object|string|Uint8Array}
39
+ */
40
+ function decodeFile (rel, value) {
41
+ if (rel.endsWith('.json') && typeof value === 'string') {
42
+ try { return JSON.parse(value) } catch {}
43
+ }
44
+ return value
45
+ }
46
+
47
+ /**
48
+ * Encode a file value for writing to disk: objects stored under a .json path
49
+ * are serialized back to pretty-printed JSON.
50
+ * @param {string} rel
51
+ * @param {any} value
52
+ * @returns {string|Uint8Array|null} null means skip
53
+ */
54
+ function encodeFile (rel, value) {
55
+ if (rel.endsWith('.json') && value != null && typeof value === 'object' && !(value instanceof Uint8Array)) {
56
+ return JSON.stringify(value, null, 2) + '\n'
57
+ }
58
+ if (typeof value === 'string' || value instanceof Uint8Array) return value
59
+ return null
60
+ }
61
+
62
+ /**
63
+ * Recursively read all accepted files in folder.
64
+ * @param {string} folder
65
+ * @param {(rel: string) => boolean} accepts
66
+ * @returns {Promise<{ files: Object, maxMtime: number }>}
67
+ */
68
+ async function readFolder (folder, accepts) {
69
+ const files = {}
70
+ let maxMtime = 0
71
+ const walk = async dir => {
72
+ let entries
73
+ try { entries = await readdir(dir, { withFileTypes: true }) } catch { return }
74
+ for (const entry of entries) {
75
+ const abs = join(dir, entry.name)
76
+ const rel = relative(folder, abs)
77
+ if (!accepts(rel)) continue
78
+ if (entry.isDirectory()) await walk(abs)
79
+ else if (entry.isFile()) {
80
+ const [bytes, info] = await Promise.all([readFile(abs), stat(abs)])
81
+ files[rel] = decodeFile(rel, decodeBytes(bytes))
82
+ if (info.mtimeMs > maxMtime) maxMtime = info.mtimeMs
83
+ }
84
+ }
85
+ }
86
+ await walk(folder)
87
+ return { files, maxMtime }
88
+ }
89
+
90
+ /**
91
+ * Write a files object to folder, creating directories as needed.
92
+ * @param {string} folder
93
+ * @param {Object} files
94
+ */
95
+ async function writeToFolder (folder, files) {
96
+ for (const [rel, content] of Object.entries(files)) {
97
+ const abs = join(folder, rel)
98
+ await mkdir(dirname(abs), { recursive: true })
99
+ const encoded = encodeFile(rel, content)
100
+ if (encoded === null) continue
101
+ const bytes = typeof encoded === 'string' ? new TextEncoder().encode(encoded) : encoded
102
+ await writeFile(abs, bytes)
103
+ }
104
+ }
105
+
106
+ /**
107
+ * Delete files from folder.
108
+ * @param {string} folder
109
+ * @param {string[]} rels
110
+ */
111
+ async function deleteFromFolder (folder, rels) {
112
+ for (const rel of rels) {
113
+ try { await unlink(join(folder, rel)) } catch {}
114
+ }
115
+ }
116
+
117
+ /**
118
+ * Rough equality check for a files object (handles Uint8Array values).
119
+ * @param {Object} a
120
+ * @param {Object} b
121
+ * @returns {boolean}
122
+ */
123
+ function filesEqual (a, b) {
124
+ if (!a || !b) return a === b
125
+ const aKeys = Object.keys(a).sort()
126
+ const bKeys = Object.keys(b).sort()
127
+ if (JSON.stringify(aKeys) !== JSON.stringify(bKeys)) return false
128
+ for (const k of aKeys) {
129
+ const av = a[k]
130
+ const bv = b[k]
131
+ if (av instanceof Uint8Array && bv instanceof Uint8Array) {
132
+ if (av.length !== bv.length) return false
133
+ if (!av.every((byte, i) => byte === bv[i])) return false
134
+ } else if (av !== bv) {
135
+ if (av == null || bv == null) return false
136
+ if (typeof av === 'object' && typeof bv === 'object') {
137
+ if (JSON.stringify(av) !== JSON.stringify(bv)) return false
138
+ } else {
139
+ return false
140
+ }
141
+ }
142
+ }
143
+ return true
144
+ }
145
+
146
+ /**
147
+ * Two-way sync between a folder and a Repo.
148
+ *
149
+ * Initial state (startup authority via timestamps):
150
+ * - repo has commits and no disk file is newer than the last commit → repo wins
151
+ * - repo is empty or any disk file is newer than the last commit → disk wins
152
+ *
153
+ * Ongoing:
154
+ * - Repo changes (new commit from peer/archive) → write changed files to disk
155
+ * - Disk changes → checkout, update files, commit to repo
156
+ *
157
+ * @param {import('./Repo.js').Repo} repo
158
+ * @param {string} [folder='.']
159
+ * @param {string} [dataDir='.stream']
160
+ * @returns {Promise<import('@parcel/watcher').AsyncSubscription>}
161
+ */
162
+ export async function fileSync (repo, folder = '.', dataDir = '.stream') {
163
+ const accepts = buildFilter(folder, dataDir)
164
+
165
+ const { files: diskFiles, maxMtime: diskMtime } = await readFolder(folder, accepts)
166
+ const lastCommit = repo.lastCommit
167
+ const commitTime = lastCommit ? lastCommit.date.getTime() : 0
168
+
169
+ if (lastCommit && diskMtime <= commitTime) {
170
+ // Repo wins: write committed files to disk
171
+ const repoFiles = repo.files
172
+ const toDelete = Object.keys(diskFiles).filter(k => !(k in repoFiles))
173
+ await writeToFolder(folder, repoFiles)
174
+ await deleteFromFolder(folder, toDelete)
175
+ } else if (Object.keys(diskFiles).length > 0) {
176
+ // Disk wins: commit current disk state as the initial commit
177
+ const working = repo.checkout()
178
+ working.set(diskFiles)
179
+ repo.commit(working, 'initial')
180
+ }
181
+
182
+ // Repo → disk: retries if a write is in progress so no commit is ever dropped
183
+ let writingToDisk = false
184
+ let pendingDiskFlush = false
185
+
186
+ async function flushToDisk () {
187
+ if (writingToDisk) { pendingDiskFlush = true; return }
188
+ writingToDisk = true
189
+ pendingDiskFlush = false
190
+ try {
191
+ const files = repo.files
192
+ if (!files) return
193
+ const { files: current } = await readFolder(folder, accepts)
194
+ if (filesEqual(current, files)) return
195
+ const toDelete = Object.keys(current).filter(k => !(k in files))
196
+ await writeToFolder(folder, files)
197
+ await deleteFromFolder(folder, toDelete)
198
+ } finally {
199
+ writingToDisk = false
200
+ if (pendingDiskFlush) flushToDisk()
201
+ }
202
+ }
203
+
204
+ // Disk → repo: single-flight; filesystem events that arrive mid-commit are
205
+ // naturally re-triggered by the repo watch that follows the commit
206
+ let committingFromDisk = false
207
+
208
+ // Repo → disk: fires when a new commit lands (from peer, archive, or local commit)
209
+ repo.watch('fileSync:repo→disk', () => {
210
+ if (committingFromDisk) return
211
+ const commit = repo.lastCommit
212
+ if (!commit) return
213
+ flushToDisk()
214
+ })
215
+
216
+ // Disk → repo: fires when the filesystem changes
217
+ const subscription = await subscribe(folder, (err, events) => {
218
+ if (err) { console.error('fileSync watcher error:', err); return }
219
+ const relevant = events.filter(e => accepts(relative(folder, e.path)))
220
+ if (!relevant.length) return
221
+ if (committingFromDisk) return
222
+ committingFromDisk = true
223
+ ;(async () => {
224
+ try {
225
+ const { files: newFiles } = await readFolder(folder, accepts)
226
+ const current = repo.files ?? {}
227
+ if (filesEqual(current, newFiles)) return
228
+ const working = repo.checkout()
229
+ working.set(newFiles)
230
+ repo.commit(working, 'file change')
231
+ } finally {
232
+ committingFromDisk = false
233
+ }
234
+ })()
235
+ })
236
+
237
+ return subscription
238
+ }