braid-text 0.2.67 → 0.2.69

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.js +507 -292
  2. package/package.json +2 -2
  3. package/test/test.html +492 -149
package/index.js CHANGED
@@ -16,6 +16,170 @@ function create_braid_text() {
16
16
 
17
17
  let max_encoded_key_size = 240
18
18
 
19
+ braid_text.sync = async (a, b, options = {}) => {
20
+ var unsync_cbs = []
21
+ options.my_unsync = () => unsync_cbs.forEach(cb => cb())
22
+
23
+ if (!options.merge_type) options.merge_type = 'dt'
24
+
25
+ if ((a instanceof URL) === (b instanceof URL)) {
26
+ var a_ops = {
27
+ subscribe: update => braid_text.put(b, update),
28
+ merge_type: options.merge_type,
29
+ }
30
+ braid_text.get(a, a_ops)
31
+ unsync_cbs.push(() => braid_text.forget(a, a_ops))
32
+
33
+ var b_ops = {
34
+ subscribe: update => braid_text.put(a, update),
35
+ merge_type: options.merge_type,
36
+ }
37
+ braid_text.get(b, b_ops)
38
+ unsync_cbs.push(() => braid_text.forget(b, b_ops))
39
+ } else {
40
+ // make a=local and b=remote (swap if not)
41
+ if (a instanceof URL) { let swap = a; a = b; b = swap }
42
+
43
+ var resource = (typeof a == 'string') ? await get_resource(a) : a
44
+
45
+ function extend_frontier(frontier, version, parents) {
46
+ // special case:
47
+ // if current frontier has all parents,
48
+ // then we can just remove those
49
+ // and add version
50
+ var frontier_set = new Set(frontier)
51
+ if (parents.length &&
52
+ parents.every(p => frontier_set.has(p))) {
53
+ parents.forEach(p => frontier_set.delete(p))
54
+ for (var event of version) frontier_set.add(event)
55
+ frontier = [...frontier_set.values()]
56
+ } else {
57
+ // full-proof approach..
58
+ var looking_for = frontier_set
59
+ for (var event of version) looking_for.add(event)
60
+
61
+ frontier = []
62
+ var shadow = new Set()
63
+
64
+ var bytes = resource.doc.toBytes()
65
+ var [_, events, parentss] = braid_text.dt_parse([...bytes])
66
+ for (var i = events.length - 1; i >= 0 && looking_for.size; i--) {
67
+ var e = events[i].join('-')
68
+ if (looking_for.has(e)) {
69
+ looking_for.delete(e)
70
+ if (!shadow.has(e)) frontier.push(e)
71
+ shadow.add(e)
72
+ }
73
+ if (shadow.has(e))
74
+ parentss[i].forEach(p => shadow.add(p.join('-')))
75
+ }
76
+ }
77
+ return frontier.sort()
78
+ }
79
+
80
+ var closed
81
+ var disconnect
82
+ unsync_cbs.push(() => {
83
+ closed = true
84
+ disconnect()
85
+ })
86
+
87
+ connect()
88
+ async function connect() {
89
+ if (options.on_connect) options.on_connect()
90
+
91
+ if (closed) return
92
+
93
+ var ac = new AbortController()
94
+ var disconnect_cbs = [() => ac.abort()]
95
+
96
+ disconnect = () => disconnect_cbs.forEach(cb => cb())
97
+
98
+ try {
99
+ // fork-point
100
+ async function check_version(version) {
101
+ var r = await braid_fetch(b.href, {
102
+ signal: ac.signal,
103
+ method: "HEAD",
104
+ version
105
+ })
106
+ if (!r.ok && r.status !== 309 && r.status !== 500)
107
+ throw new Error(`unexpected HEAD status: ${r.status}`)
108
+ return r.ok
109
+ }
110
+
111
+ function extend_fork_point(update) {
112
+ resource.meta.fork_point =
113
+ extend_frontier(resource.meta.fork_point,
114
+ update.version, update.parents)
115
+ resource.change_meta()
116
+ }
117
+
118
+ // see if remote has the fork point
119
+ if (resource.meta.fork_point &&
120
+ !(await check_version(resource.meta.fork_point))) {
121
+ resource.meta.fork_point = null
122
+ resource.change_meta()
123
+ }
124
+
125
+ // otherwise let's binary search for new fork point..
126
+ if (!resource.meta.fork_point) {
127
+ var bytes = resource.doc.toBytes()
128
+ var [_, events, __] = braid_text.dt_parse([...bytes])
129
+ events = events.map(x => x.join('-'))
130
+
131
+ var min = -1
132
+ var max = events.length
133
+ while (min + 1 < max) {
134
+ var i = Math.floor((min + max)/2)
135
+ var version = [events[i]]
136
+ if (await check_version(version)) {
137
+ min = i
138
+ resource.meta.fork_point = version
139
+ } else max = i
140
+ }
141
+ }
142
+
143
+ // local -> remote
144
+ var a_ops = {
145
+ subscribe: update => {
146
+ update.signal = ac.signal
147
+ braid_text.put(b, update).then((x) => {
148
+ extend_fork_point(update)
149
+ }).catch(e => {
150
+ if (e.name === 'AbortError') {
151
+ // ignore
152
+ } else throw e
153
+ })
154
+ }
155
+ }
156
+ if (resource.meta.fork_point)
157
+ a_ops.parents = resource.meta.fork_point
158
+ disconnect_cbs.push(() => braid_text.forget(a, a_ops))
159
+ braid_text.get(a, a_ops)
160
+
161
+ // remote -> local
162
+ var b_ops = {
163
+ dont_retry: true,
164
+ subscribe: async update => {
165
+ await braid_text.put(a, update)
166
+ extend_fork_point(update)
167
+ },
168
+ }
169
+ disconnect_cbs.push(() => braid_text.forget(b, b_ops))
170
+ // NOTE: this should not return, but it might throw
171
+ await braid_text.get(b, b_ops)
172
+ } catch (e) {
173
+ if (closed) return
174
+
175
+ disconnect()
176
+ console.log(`disconnected, retrying in 1 second`)
177
+ setTimeout(connect, 1000)
178
+ }
179
+ }
180
+ }
181
+ }
182
+
19
183
  braid_text.serve = async (req, res, options = {}) => {
20
184
  options = {
21
185
  key: req.url.split('?')[0], // Default key
@@ -36,6 +200,10 @@ function create_braid_text() {
36
200
 
37
201
  braidify(req, res)
38
202
  if (res.is_multiplexer) return
203
+
204
+ // Sort version arrays from external sources
205
+ if (req.version) req.version.sort()
206
+ if (req.parents) req.parents.sort()
39
207
  } catch (e) {
40
208
  return my_end(500, "The server failed to process this request. The error generated was: " + e)
41
209
  }
@@ -237,7 +405,7 @@ function create_braid_text() {
237
405
  req.parents,
238
406
  resource.actor_seqs,
239
407
  // approximation of memory usage for this update
240
- body ? body.length :
408
+ body != null ? body.length :
241
409
  patches.reduce((a, b) => a + b.range.length + b.content.length, 0),
242
410
  options.recv_buffer_max_time,
243
411
  options.recv_buffer_max_space)
@@ -270,7 +438,8 @@ function create_braid_text() {
270
438
  return done_my_turn(550, "repr-digest mismatch!")
271
439
  }
272
440
 
273
- if (req.version) got_event(options.key, req.version[0], change_count)
441
+ if (req.version?.length)
442
+ got_event(options.key, req.version[0], change_count)
274
443
 
275
444
  res.setHeader("Version", get_current_version())
276
445
 
@@ -291,6 +460,15 @@ function create_braid_text() {
291
460
  }
292
461
 
293
462
  braid_text.get = async (key, options) => {
463
+ if (options && options.version) {
464
+ validate_version_array(options.version)
465
+ options.version.sort()
466
+ }
467
+ if (options && options.parents) {
468
+ validate_version_array(options.parents)
469
+ options.parents.sort()
470
+ }
471
+
294
472
  if (key instanceof URL) {
295
473
  if (!options) options = {}
296
474
 
@@ -298,17 +476,29 @@ function create_braid_text() {
298
476
 
299
477
  var params = {
300
478
  signal: options.my_abort.signal,
301
- retry: () => true,
302
479
  subscribe: !!options.subscribe,
303
480
  heartbeats: 120,
304
481
  }
482
+ if (!options.dont_retry) params.retry = () => true
305
483
  for (var x of ['headers', 'parents', 'version', 'peer'])
306
484
  if (options[x] != null) params[x] = options[x]
307
485
 
308
486
  var res = await braid_fetch(key.href, params)
309
487
 
310
488
  if (options.subscribe) {
311
- res.subscribe(options.subscribe)
489
+ if (options.dont_retry) {
490
+ var error_happened
491
+ var error_promise = new Promise((_, fail) => error_happened = fail)
492
+ }
493
+
494
+ res.subscribe(async update => {
495
+ update.body = update.body_text
496
+ if (update.patches)
497
+ for (var p of update.patches) p.content = p.content_text
498
+ await options.subscribe(update)
499
+ }, e => options.dont_retry && error_happened(e))
500
+
501
+ if (options.dont_retry) return await error_promise
312
502
  return res
313
503
  } else return await res.text()
314
504
  }
@@ -319,9 +509,6 @@ function create_braid_text() {
319
509
  return (await get_resource(key)).val
320
510
  }
321
511
 
322
- if (options.version) validate_version_array(options.version)
323
- if (options.parents) validate_version_array(options.parents)
324
-
325
512
  let resource = (typeof key == 'string') ? await get_resource(key) : key
326
513
  var version = resource.version
327
514
 
@@ -359,13 +546,19 @@ function create_braid_text() {
359
546
  body: resource.doc.get()
360
547
  }
361
548
  } else {
549
+ options.my_subscribe_chain = Promise.resolve()
550
+ options.my_subscribe = (x) =>
551
+ options.my_subscribe_chain =
552
+ options.my_subscribe_chain.then(() =>
553
+ options.subscribe(x))
554
+
362
555
  if (options.merge_type != "dt") {
363
556
  let x = { version }
364
557
 
365
558
  if (!options.parents && !options.version) {
366
559
  x.parents = []
367
560
  x.body = resource.doc.get()
368
- options.subscribe(x)
561
+ options.my_subscribe(x)
369
562
  } else {
370
563
  x.parents = options.version ? options.version : options.parents
371
564
  options.my_last_seen_version = x.parents
@@ -374,7 +567,7 @@ function create_braid_text() {
374
567
  let local_version = OpLog_remote_to_local(resource.doc, x.parents)
375
568
  if (local_version) {
376
569
  x.patches = get_xf_patches(resource.doc, local_version)
377
- options.subscribe(x)
570
+ options.my_subscribe(x)
378
571
  }
379
572
  }
380
573
 
@@ -386,7 +579,7 @@ function create_braid_text() {
386
579
  // optimization: if client wants past current version,
387
580
  // send empty dt
388
581
  if (options.parents && v_eq(options.parents, version)) {
389
- options.subscribe({ encoding: 'dt', body: new Doc().toBytes() })
582
+ options.my_subscribe({ encoding: 'dt', body: new Doc().toBytes() })
390
583
  } else {
391
584
  var bytes = resource.doc.toBytes()
392
585
  if (options.parents) {
@@ -395,12 +588,12 @@ function create_braid_text() {
395
588
  dt_get_local_version(bytes, options.parents))
396
589
  doc.free()
397
590
  }
398
- options.subscribe({ encoding: 'dt', body: bytes })
591
+ options.my_subscribe({ encoding: 'dt', body: bytes })
399
592
  }
400
593
  } else {
401
594
  var updates = null
402
595
  if (!options.parents && !options.version) {
403
- options.subscribe({
596
+ options.my_subscribe({
404
597
  version: [],
405
598
  parents: [],
406
599
  body: "",
@@ -413,7 +606,7 @@ function create_braid_text() {
413
606
  }
414
607
 
415
608
  for (let u of updates)
416
- options.subscribe({
609
+ options.my_subscribe({
417
610
  version: [u.version],
418
611
  parents: u.parents,
419
612
  patches: [{ unit: u.unit, range: u.range, content: u.content }],
@@ -444,323 +637,337 @@ function create_braid_text() {
444
637
  }
445
638
 
446
639
  braid_text.put = async (key, options) => {
640
+ if (options.version) {
641
+ validate_version_array(options.version)
642
+ options.version.sort()
643
+ }
644
+ if (options.parents) {
645
+ validate_version_array(options.parents)
646
+ options.parents.sort()
647
+ }
648
+
447
649
  if (key instanceof URL) {
448
650
  options.my_abort = new AbortController()
651
+ if (options.signal)
652
+ options.signal.addEventListener('abort', () =>
653
+ options.my_abort.abort())
449
654
 
450
655
  var params = {
451
656
  method: 'PUT',
452
657
  signal: options.my_abort.signal,
453
658
  retry: () => true,
454
659
  }
455
- for (var x of ['headers', 'parents', 'version', 'peer', 'body'])
660
+ for (var x of ['headers', 'parents', 'version', 'peer', 'body', 'patches'])
456
661
  if (options[x] != null) params[x] = options[x]
457
662
 
458
663
  return await braid_fetch(key.href, params)
459
664
  }
460
665
 
461
- let { version, patches, body, peer } = options
462
-
463
- // support for json patch puts..
464
- if (patches?.length && patches.every(x => x.unit === 'json')) {
666
+ return await within_fiber('put:' + key, async () => {
465
667
  let resource = (typeof key == 'string') ? await get_resource(key) : key
466
-
467
- let x = JSON.parse(resource.doc.get())
468
- for (let p of patches)
469
- apply_patch(x, p.range, p.content === '' ? undefined : JSON.parse(p.content))
470
668
 
471
- return await braid_text.put(key, {
472
- body: JSON.stringify(x, null, 4)
473
- })
474
- }
669
+ // support for json patch puts..
670
+ if (options.patches && options.patches.length &&
671
+ options.patches.every(x => x.unit === 'json')) {
672
+ let x = JSON.parse(resource.doc.get())
673
+ for (let p of options.patches)
674
+ apply_patch(x, p.range, p.content === '' ? undefined : JSON.parse(p.content))
675
+ options = { body: JSON.stringify(x, null, 4) }
676
+ }
475
677
 
476
- let resource = (typeof key == 'string') ? await get_resource(key) : key
678
+ let { version, patches, body, peer } = options
477
679
 
478
- if (options.transfer_encoding === 'dt') {
479
- var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
480
-
481
- resource.doc.mergeBytes(body)
680
+ if (options.transfer_encoding === 'dt') {
681
+ var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
682
+
683
+ resource.doc.mergeBytes(body)
684
+
685
+ var end_i = resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
686
+ for (var i = start_i; i <= end_i; i++) {
687
+ let v = resource.doc.localToRemoteVersion([i])[0]
688
+ if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
689
+ resource.actor_seqs[v[0]].add_range(v[1], v[1])
690
+ }
691
+ resource.val = resource.doc.get()
692
+ resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
482
693
 
483
- var end_i = resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
484
- for (var i = start_i; i <= end_i; i++) {
485
- let v = resource.doc.localToRemoteVersion([i])[0]
486
- if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
487
- resource.actor_seqs[v[0]].add_range(v[1], v[1])
694
+ await resource.db_delta(body)
695
+ return { change_count: end_i - start_i + 1 }
488
696
  }
489
- resource.val = resource.doc.get()
490
- resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
491
697
 
492
- await resource.db_delta(body)
493
- return { change_count: end_i - start_i + 1 }
494
- }
698
+ if (version && !version.length) {
699
+ console.log(`warning: ignoring put with empty version`)
700
+ return { change_count: 0 }
701
+ }
702
+ if (version && version.length > 1)
703
+ throw new Error(`cannot put a version with multiple ids`)
704
+
705
+ // translate a single parent of "root" to the empty array (same meaning)
706
+ let options_parents = options.parents
707
+ if (options_parents?.length === 1 && options_parents[0] === 'root')
708
+ options_parents = []
709
+
710
+ if (body != null && patches) throw new Error(`cannot have a body and patches`)
711
+ if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
712
+ if (patches) validate_patches(patches)
713
+
714
+ if (options_parents) {
715
+ // make sure we have all these parents
716
+ for (let p of options_parents) {
717
+ let P = decode_version(p)
718
+ if (!resource.actor_seqs[P[0]]?.has(P[1]))
719
+ throw new Error(`missing parent version: ${p}`)
720
+ }
721
+ }
495
722
 
496
- if (version) validate_version_array(version)
723
+ let parents = resource.version
724
+ let og_parents = options_parents || parents
497
725
 
498
- // translate a single parent of "root" to the empty array (same meaning)
499
- let options_parents = options.parents
500
- if (options_parents?.length === 1 && options_parents[0] === 'root')
501
- options_parents = []
726
+ let max_pos = resource.length_cache.get('' + og_parents) ??
727
+ (v_eq(parents, og_parents) ? resource.doc.len() : dt_len(resource.doc, og_parents))
728
+
729
+ if (body != null) {
730
+ patches = [{
731
+ unit: 'text',
732
+ range: `[0:${max_pos}]`,
733
+ content: body
734
+ }]
735
+ }
502
736
 
503
- if (options_parents) validate_version_array(options_parents)
504
- if (body != null && patches) throw new Error(`cannot have a body and patches`)
505
- if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
506
- if (patches) validate_patches(patches)
737
+ let og_patches = patches
738
+ patches = patches.map((p) => ({
739
+ ...p,
740
+ range: p.range.match(/\d+/g).map((x) => parseInt(x)),
741
+ content_codepoints: [...p.content],
742
+ })).sort((a, b) => a.range[0] - b.range[0])
507
743
 
508
- if (options_parents) {
509
- // make sure we have all these parents
510
- for (let p of options_parents) {
511
- let P = decode_version(p)
512
- if (!resource.actor_seqs[P[0]]?.has(P[1]))
513
- throw new Error(`missing parent version: ${p}`)
744
+ // validate patch positions
745
+ let must_be_at_least = 0
746
+ for (let p of patches) {
747
+ if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
748
+ if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
749
+ must_be_at_least = p.range[1]
514
750
  }
515
- }
516
751
 
517
- let parents = resource.version
518
- let og_parents = options_parents || parents
752
+ let change_count = patches.reduce((a, b) => a + b.content_codepoints.length + (b.range[1] - b.range[0]), 0)
519
753
 
520
- let max_pos = resource.length_cache.get('' + og_parents) ??
521
- (v_eq(parents, og_parents) ? resource.doc.len() : dt_len(resource.doc, og_parents))
522
-
523
- if (body != null) {
524
- patches = [{
525
- unit: 'text',
526
- range: `[0:${max_pos}]`,
527
- content: body
528
- }]
529
- }
754
+ let og_v = version?.[0] || `${(is_valid_actor(peer) && peer) || Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
530
755
 
531
- let og_patches = patches
532
- patches = patches.map((p) => ({
533
- ...p,
534
- range: p.range.match(/\d+/g).map((x) => parseInt(x)),
535
- content_codepoints: [...p.content],
536
- })).sort((a, b) => a.range[0] - b.range[0])
756
+ let v = decode_version(og_v)
537
757
 
538
- // validate patch positions
539
- let must_be_at_least = 0
540
- for (let p of patches) {
541
- if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
542
- if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
543
- must_be_at_least = p.range[1]
544
- }
758
+ resource.length_cache.put(`${v[0]}-${v[1]}`, patches.reduce((a, b) =>
759
+ a + (b.content_codepoints?.length ?? 0) - (b.range[1] - b.range[0]),
760
+ max_pos))
545
761
 
546
- let change_count = patches.reduce((a, b) => a + b.content_codepoints.length + (b.range[1] - b.range[0]), 0)
762
+ // validate version: make sure we haven't seen it already
763
+ if (resource.actor_seqs[v[0]]?.has(v[1])) {
547
764
 
548
- let og_v = version?.[0] || `${(is_valid_actor(peer) && peer) || Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
765
+ if (!options.validate_already_seen_versions) return { change_count }
549
766
 
550
- let v = decode_version(og_v)
767
+ // if we have seen it already, make sure it's the same as before
768
+ let updates = dt_get_patches(resource.doc, og_parents)
551
769
 
552
- resource.length_cache.put(`${v[0]}-${v[1]}`, patches.reduce((a, b) =>
553
- a + (b.content_codepoints?.length ?? 0) - (b.range[1] - b.range[0]),
554
- max_pos))
770
+ let seen = {}
771
+ for (let u of updates) {
772
+ u.version = decode_version(u.version)
555
773
 
556
- // validate version: make sure we haven't seen it already
557
- if (resource.actor_seqs[v[0]]?.has(v[1])) {
558
-
559
- if (!options.validate_already_seen_versions) return { change_count }
774
+ if (!u.content) {
775
+ // delete
776
+ let v = u.version
777
+ for (let i = 0; i < u.end - u.start; i++) {
778
+ let ps = (i < u.end - u.start - 1) ? [`${v[0]}-${v[1] - i - 1}`] : u.parents
779
+ seen[JSON.stringify([v[0], v[1] - i, ps, u.start + i])] = true
780
+ }
781
+ } else {
782
+ // insert
783
+ let v = u.version
784
+ let content = [...u.content]
785
+ for (let i = 0; i < content.length; i++) {
786
+ let ps = (i > 0) ? [`${v[0]}-${v[1] - content.length + i}`] : u.parents
787
+ seen[JSON.stringify([v[0], v[1] + 1 - content.length + i, ps, u.start + i, content[i]])] = true
788
+ }
789
+ }
790
+ }
560
791
 
561
- // if we have seen it already, make sure it's the same as before
562
- let updates = dt_get_patches(resource.doc, og_parents)
792
+ v = `${v[0]}-${v[1] + 1 - change_count}`
793
+ let ps = og_parents
794
+ let offset = 0
795
+ for (let p of patches) {
796
+ // delete
797
+ for (let i = p.range[0]; i < p.range[1]; i++) {
798
+ let vv = decode_version(v)
563
799
 
564
- let seen = {}
565
- for (let u of updates) {
566
- u.version = decode_version(u.version)
800
+ if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] - 1 + offset])]) throw new Error('invalid update: different from previous update with same version')
567
801
 
568
- if (!u.content) {
569
- // delete
570
- let v = u.version
571
- for (let i = 0; i < u.end - u.start; i++) {
572
- let ps = (i < u.end - u.start - 1) ? [`${v[0]}-${v[1] - i - 1}`] : u.parents
573
- seen[JSON.stringify([v[0], v[1] - i, ps, u.start + i])] = true
802
+ offset--
803
+ ps = [v]
804
+ v = vv
805
+ v = `${v[0]}-${v[1] + 1}`
574
806
  }
575
- } else {
576
807
  // insert
577
- let v = u.version
578
- let content = [...u.content]
579
- for (let i = 0; i < content.length; i++) {
580
- let ps = (i > 0) ? [`${v[0]}-${v[1] - content.length + i}`] : u.parents
581
- seen[JSON.stringify([v[0], v[1] + 1 - content.length + i, ps, u.start + i, content[i]])] = true
808
+ for (let i = 0; i < p.content_codepoints?.length ?? 0; i++) {
809
+ let vv = decode_version(v)
810
+ let c = p.content_codepoints[i]
811
+
812
+ if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] + offset, c])]) throw new Error('invalid update: different from previous update with same version')
813
+
814
+ offset++
815
+ ps = [v]
816
+ v = vv
817
+ v = `${v[0]}-${v[1] + 1}`
582
818
  }
583
819
  }
820
+
821
+ // we already have this version, so nothing left to do
822
+ return { change_count: change_count }
584
823
  }
824
+ if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
825
+ resource.actor_seqs[v[0]].add_range(v[1] + 1 - change_count, v[1])
585
826
 
827
+ // reduce the version sequence by the number of char-edits
586
828
  v = `${v[0]}-${v[1] + 1 - change_count}`
829
+
587
830
  let ps = og_parents
831
+
832
+ let v_before = resource.doc.getLocalVersion()
833
+
834
+ let bytes = []
835
+
588
836
  let offset = 0
589
837
  for (let p of patches) {
590
838
  // delete
591
- for (let i = p.range[0]; i < p.range[1]; i++) {
592
- let vv = decode_version(v)
593
-
594
- if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] - 1 + offset])]) throw new Error('invalid update: different from previous update with same version')
595
-
596
- offset--
597
- ps = [v]
598
- v = vv
599
- v = `${v[0]}-${v[1] + 1}`
839
+ let del = p.range[1] - p.range[0]
840
+ if (del) {
841
+ bytes.push(dt_create_bytes(v, ps, p.range[0] + offset, del, null))
842
+ offset -= del
843
+ v = decode_version(v)
844
+ ps = [`${v[0]}-${v[1] + (del - 1)}`]
845
+ v = `${v[0]}-${v[1] + del}`
600
846
  }
601
847
  // insert
602
- for (let i = 0; i < p.content_codepoints?.length ?? 0; i++) {
603
- let vv = decode_version(v)
604
- let c = p.content_codepoints[i]
605
-
606
- if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] + offset, c])]) throw new Error('invalid update: different from previous update with same version')
607
-
608
- offset++
609
- ps = [v]
610
- v = vv
611
- v = `${v[0]}-${v[1] + 1}`
848
+ if (p.content?.length) {
849
+ bytes.push(dt_create_bytes(v, ps, p.range[1] + offset, 0, p.content))
850
+ offset += p.content_codepoints.length
851
+ v = decode_version(v)
852
+ ps = [`${v[0]}-${v[1] + (p.content_codepoints.length - 1)}`]
853
+ v = `${v[0]}-${v[1] + p.content_codepoints.length}`
612
854
  }
613
855
  }
614
856
 
615
- // we already have this version, so nothing left to do
616
- return { change_count: change_count }
617
- }
618
- if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
619
- resource.actor_seqs[v[0]].add_range(v[1] + 1 - change_count, v[1])
620
-
621
- // reduce the version sequence by the number of char-edits
622
- v = `${v[0]}-${v[1] + 1 - change_count}`
623
-
624
- let ps = og_parents
625
-
626
- let v_before = resource.doc.getLocalVersion()
627
-
628
- let bytes = []
629
-
630
- let offset = 0
631
- for (let p of patches) {
632
- // delete
633
- let del = p.range[1] - p.range[0]
634
- if (del) {
635
- bytes.push(dt_create_bytes(v, ps, p.range[0] + offset, del, null))
636
- offset -= del
637
- v = decode_version(v)
638
- ps = [`${v[0]}-${v[1] + (del - 1)}`]
639
- v = `${v[0]}-${v[1] + del}`
640
- }
641
- // insert
642
- if (p.content?.length) {
643
- bytes.push(dt_create_bytes(v, ps, p.range[1] + offset, 0, p.content))
644
- offset += p.content_codepoints.length
645
- v = decode_version(v)
646
- ps = [`${v[0]}-${v[1] + (p.content_codepoints.length - 1)}`]
647
- v = `${v[0]}-${v[1] + p.content_codepoints.length}`
648
- }
649
- }
857
+ for (let b of bytes) resource.doc.mergeBytes(b)
858
+ resource.val = resource.doc.get()
859
+ resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
650
860
 
651
- for (let b of bytes) resource.doc.mergeBytes(b)
652
- resource.val = resource.doc.get()
653
- resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
861
+ var post_commit_updates = []
654
862
 
655
- var post_commit_updates = []
863
+ if (options.merge_type != "dt") {
864
+ patches = get_xf_patches(resource.doc, v_before)
865
+ if (braid_text.verbose) console.log(JSON.stringify({ patches }))
656
866
 
657
- if (options.merge_type != "dt") {
658
- patches = get_xf_patches(resource.doc, v_before)
659
- if (braid_text.verbose) console.log(JSON.stringify({ patches }))
867
+ let version = resource.version
660
868
 
661
- let version = resource.version
869
+ for (let client of resource.simpleton_clients) {
870
+ if (peer && client.peer === peer) {
871
+ client.my_last_seen_version = [og_v]
872
+ }
662
873
 
663
- for (let client of resource.simpleton_clients) {
664
- if (peer && client.peer === peer) {
665
- client.my_last_seen_version = [og_v]
666
- }
874
+ function set_timeout(time_override) {
875
+ if (client.my_timeout) clearTimeout(client.my_timeout)
876
+ client.my_timeout = setTimeout(() => {
877
+ // if the doc has been freed, exit early
878
+ if (resource.doc.__wbg_ptr === 0) return
667
879
 
668
- function set_timeout(time_override) {
669
- if (client.my_timeout) clearTimeout(client.my_timeout)
670
- client.my_timeout = setTimeout(() => {
671
- // if the doc has been freed, exit early
672
- if (resource.doc.__wbg_ptr === 0) return
880
+ let version = resource.version
881
+ let x = { version }
882
+ x.parents = client.my_last_seen_version
673
883
 
674
- let version = resource.version
675
- let x = { version }
676
- x.parents = client.my_last_seen_version
884
+ if (braid_text.verbose) console.log("rebasing after timeout.. ")
885
+ if (braid_text.verbose) console.log(" client.my_unused_version_count = " + client.my_unused_version_count)
886
+ x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))
677
887
 
678
- if (braid_text.verbose) console.log("rebasing after timeout.. ")
679
- if (braid_text.verbose) console.log(" client.my_unused_version_count = " + client.my_unused_version_count)
680
- x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))
888
+ if (braid_text.verbose) console.log(`sending from rebase: ${JSON.stringify(x)}`)
889
+ client.my_subscribe(x)
890
+ client.my_last_sent_version = x.version
681
891
 
682
- if (braid_text.verbose) console.log(`sending from rebase: ${JSON.stringify(x)}`)
683
- client.subscribe(x)
684
- client.my_last_sent_version = x.version
892
+ delete client.my_timeout
893
+ }, time_override ?? Math.min(3000, 23 * Math.pow(1.5, client.my_unused_version_count - 1)))
894
+ }
685
895
 
686
- delete client.my_timeout
687
- }, time_override ?? Math.min(3000, 23 * Math.pow(1.5, client.my_unused_version_count - 1)))
688
- }
896
+ if (client.my_timeout) {
897
+ if (peer && client.peer === peer) {
898
+ if (!v_eq(client.my_last_sent_version, og_parents)) {
899
+ // note: we don't add to client.my_unused_version_count,
900
+ // because we're already in a timeout;
901
+ // we'll just extend it here..
902
+ set_timeout()
903
+ } else {
904
+ // hm.. it appears we got a correctly parented version,
905
+ // which suggests that maybe we can stop the timeout early
906
+ set_timeout(0)
907
+ }
908
+ }
909
+ continue
910
+ }
689
911
 
690
- if (client.my_timeout) {
912
+ let x = { version }
691
913
  if (peer && client.peer === peer) {
692
914
  if (!v_eq(client.my_last_sent_version, og_parents)) {
693
- // note: we don't add to client.my_unused_version_count,
694
- // because we're already in a timeout;
695
- // we'll just extend it here..
915
+ client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1
696
916
  set_timeout()
917
+ continue
697
918
  } else {
698
- // hm.. it appears we got a correctly parented version,
699
- // which suggests that maybe we can stop the timeout early
700
- set_timeout(0)
919
+ delete client.my_unused_version_count
701
920
  }
702
- }
703
- continue
704
- }
705
921
 
706
- let x = { version }
707
- if (peer && client.peer === peer) {
708
- if (!v_eq(client.my_last_sent_version, og_parents)) {
709
- client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1
710
- set_timeout()
711
- continue
922
+ x.parents = options.version
923
+ if (!v_eq(version, options.version)) {
924
+ if (braid_text.verbose) console.log("rebasing..")
925
+ x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, [og_v]))
926
+ } else {
927
+ // this client already has this version,
928
+ // so let's pretend to send it back, but not
929
+ if (braid_text.verbose) console.log(`not reflecting back to simpleton`)
930
+ client.my_last_sent_version = x.version
931
+ continue
932
+ }
712
933
  } else {
713
- delete client.my_unused_version_count
934
+ x.parents = parents
935
+ x.patches = patches
714
936
  }
715
-
716
- x.parents = options.version
717
- if (!v_eq(version, options.version)) {
718
- if (braid_text.verbose) console.log("rebasing..")
719
- x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, [og_v]))
720
- } else {
721
- // this client already has this version,
722
- // so let's pretend to send it back, but not
723
- if (braid_text.verbose) console.log(`not reflecting back to simpleton`)
937
+ if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
938
+ post_commit_updates.push([client, x])
939
+ client.my_last_sent_version = x.version
940
+ }
941
+ } else {
942
+ if (resource.simpleton_clients.size) {
943
+ let version = resource.version
944
+ patches = get_xf_patches(resource.doc, v_before)
945
+ let x = { version, parents, patches }
946
+ if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
947
+ for (let client of resource.simpleton_clients) {
948
+ if (client.my_timeout) continue
949
+ post_commit_updates.push([client, x])
724
950
  client.my_last_sent_version = x.version
725
- continue
726
951
  }
727
- } else {
728
- x.parents = parents
729
- x.patches = patches
730
952
  }
731
- if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
732
- post_commit_updates.push([client, x])
733
- client.my_last_sent_version = x.version
734
953
  }
735
- } else {
736
- if (resource.simpleton_clients.size) {
737
- let version = resource.version
738
- patches = get_xf_patches(resource.doc, v_before)
739
- let x = { version, parents, patches }
740
- if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
741
- for (let client of resource.simpleton_clients) {
742
- if (client.my_timeout) continue
954
+
955
+ var x = {
956
+ version: [og_v],
957
+ parents: og_parents,
958
+ patches: og_patches,
959
+ }
960
+ for (let client of resource.clients) {
961
+ if (!peer || client.peer !== peer)
743
962
  post_commit_updates.push([client, x])
744
- client.my_last_sent_version = x.version
745
- }
746
963
  }
747
- }
748
964
 
749
- var x = {
750
- version: [og_v],
751
- parents: og_parents,
752
- patches: og_patches,
753
- }
754
- for (let client of resource.clients) {
755
- if (!peer || client.peer !== peer)
756
- post_commit_updates.push([client, x])
757
- }
965
+ await resource.db_delta(resource.doc.getPatchSince(v_before))
758
966
 
759
- await resource.db_delta(resource.doc.getPatchSince(v_before))
967
+ for (var [client, x] of post_commit_updates) client.my_subscribe(x)
760
968
 
761
- for (var [client, x] of post_commit_updates) client.subscribe(x)
762
-
763
- return { change_count }
969
+ return { change_count }
970
+ })
764
971
  }
765
972
 
766
973
  braid_text.list = async () => {
@@ -800,10 +1007,7 @@ function create_braid_text() {
800
1007
  : { change: () => { }, change_meta: () => {} }
801
1008
 
802
1009
  resource.db_delta = change
803
- resource.update_meta = (meta) => {
804
- Object.assign(resource.meta, meta)
805
- change_meta()
806
- }
1010
+ resource.change_meta = change_meta
807
1011
 
808
1012
  resource.actor_seqs = {}
809
1013
 
@@ -938,48 +1142,45 @@ function create_braid_text() {
938
1142
  } catch (e) {}
939
1143
  set_meta(JSON.parse(meta_file_content))
940
1144
 
941
- let chain = Promise.resolve()
942
1145
  return {
943
- change: async (bytes) => {
944
- await (chain = chain.then(async () => {
945
- if (!bytes) currentSize = threshold
946
- else currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32
947
- const filename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
948
- if (currentSize < threshold) {
949
- if (braid_text.verbose) console.log(`appending to db..`)
950
-
951
- let buffer = Buffer.allocUnsafe(4)
952
- buffer.writeUInt32LE(bytes.length, 0)
953
- await fs.promises.appendFile(filename, buffer)
954
- await fs.promises.appendFile(filename, bytes)
955
-
956
- if (braid_text.verbose) console.log("wrote to : " + filename)
957
- } else {
1146
+ change: (bytes) => within_fiber('file:' + key, async () => {
1147
+ if (!bytes) currentSize = threshold
1148
+ else currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32
1149
+ const filename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
1150
+ if (currentSize < threshold) {
1151
+ if (braid_text.verbose) console.log(`appending to db..`)
1152
+
1153
+ let buffer = Buffer.allocUnsafe(4)
1154
+ buffer.writeUInt32LE(bytes.length, 0)
1155
+ await fs.promises.appendFile(filename, buffer)
1156
+ await fs.promises.appendFile(filename, bytes)
1157
+
1158
+ if (braid_text.verbose) console.log("wrote to : " + filename)
1159
+ } else {
1160
+ try {
1161
+ if (braid_text.verbose) console.log(`starting new db..`)
1162
+
1163
+ currentNumber++
1164
+ const init = get_init()
1165
+ const buffer = Buffer.allocUnsafe(4)
1166
+ buffer.writeUInt32LE(init.length, 0)
1167
+
1168
+ const newFilename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
1169
+ await fs.promises.writeFile(newFilename, buffer)
1170
+ await fs.promises.appendFile(newFilename, init)
1171
+
1172
+ if (braid_text.verbose) console.log("wrote to : " + newFilename)
1173
+
1174
+ currentSize = 4 + init.length
1175
+ threshold = currentSize * 10
958
1176
  try {
959
- if (braid_text.verbose) console.log(`starting new db..`)
960
-
961
- currentNumber++
962
- const init = get_init()
963
- const buffer = Buffer.allocUnsafe(4)
964
- buffer.writeUInt32LE(init.length, 0)
965
-
966
- const newFilename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
967
- await fs.promises.writeFile(newFilename, buffer)
968
- await fs.promises.appendFile(newFilename, init)
969
-
970
- if (braid_text.verbose) console.log("wrote to : " + newFilename)
971
-
972
- currentSize = 4 + init.length
973
- threshold = currentSize * 10
974
- try {
975
- await fs.promises.unlink(filename)
976
- } catch (e) { }
977
- } catch (e) {
978
- if (braid_text.verbose) console.log(`e = ${e.stack}`)
979
- }
1177
+ await fs.promises.unlink(filename)
1178
+ } catch (e) { }
1179
+ } catch (e) {
1180
+ if (braid_text.verbose) console.log(`e = ${e.stack}`)
980
1181
  }
981
- }))
982
- },
1182
+ }
1183
+ }),
983
1184
  change_meta: async () => {
984
1185
  meta_dirty = true
985
1186
  if (meta_saving) return
@@ -2250,6 +2451,20 @@ function create_braid_text() {
2250
2451
  return `sha-256=:${require('crypto').createHash('sha256').update(s).digest('base64')}:`
2251
2452
  }
2252
2453
 
2454
+ function within_fiber(id, func) {
2455
+ if (!within_fiber.chains) within_fiber.chains = {}
2456
+ var prev = within_fiber.chains[id] || Promise.resolve()
2457
+ var curr = prev.then(async () => {
2458
+ try {
2459
+ return await func()
2460
+ } finally {
2461
+ if (within_fiber.chains[id] === curr)
2462
+ delete within_fiber.chains[id]
2463
+ }
2464
+ })
2465
+ return within_fiber.chains[id] = curr
2466
+ }
2467
+
2253
2468
  braid_text.get_resource = get_resource
2254
2469
 
2255
2470
  braid_text.encode_filename = encode_filename