braid-text 0.0.17 → 0.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +98 -94
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -194,10 +194,8 @@ braid_text.serve = async (req, res, options = {}) => {
194
194
 
195
195
  braid_text.get = async (key, options) => {
196
196
  if (!options) {
197
- let x = get_resource.cache?.[key]?.doc.get()
198
- if (x !== undefined) return x
199
- // if it doesn't exist on disk, don't create it in this case
200
- if (!(await get_files_for_key(key)).length) return
197
+ // if it doesn't exist already, don't create it in this case
198
+ if (!get_resource.cache?.[key]) return
201
199
  return (await get_resource(key)).doc.get()
202
200
  }
203
201
 
@@ -207,67 +205,8 @@ braid_text.get = async (key, options) => {
207
205
  let resource = (typeof key == 'string') ? await get_resource(key) : key
208
206
 
209
207
  if (!options.subscribe) {
210
- let doc = null
211
- if (options.version || options.parents) {
212
- let frontier = {}
213
- options.version?.forEach((x) => (frontier[x] = true))
214
- options.parents?.forEach((x) => (frontier[x] = true))
215
-
216
- let local_version = []
217
- let [agents, versions, parentss] = parseDT([...resource.doc.toBytes()])
218
- for (let i = 0; i < versions.length; i++) {
219
- if (frontier[versions[i].join("-")]) {
220
- local_version.push(i)
221
- }
222
- }
223
- local_version = new Uint32Array(local_version)
224
-
225
- let after_versions = {}
226
- let [_, after_versions_array, __] = parseDT([...resource.doc.getPatchSince(local_version)])
227
- for (let v of after_versions_array) after_versions[v.join("-")] = true
228
-
229
- let new_doc = new Doc()
230
- let op_runs = resource.doc.getOpsSince([])
231
- let i = 0
232
- op_runs.forEach((op_run) => {
233
- let parents = parentss[i].map((x) => x.join("-"))
234
- let start = op_run.start
235
- let end = start + 1
236
- let content = op_run.content?.[0]
237
-
238
- let len = op_run.end - op_run.start
239
- let base_i = i
240
- for (let j = 1; j <= len; j++) {
241
- let I = base_i + j
242
- if (
243
- j == len ||
244
- parentss[I].length != 1 ||
245
- parentss[I][0][0] != versions[I - 1][0] ||
246
- parentss[I][0][1] != versions[I - 1][1] ||
247
- versions[I][0] != versions[I - 1][0] ||
248
- versions[I][1] != versions[I - 1][1] + 1
249
- ) {
250
- for (; i < I; i++) {
251
- let version = versions[i].join("-")
252
- if (!after_versions[version]) {
253
- new_doc.mergeBytes(
254
- OpLog_create_bytes(
255
- version,
256
- parentss[i].map((x) => x.join("-")),
257
- content ? start + (i - base_i) : start,
258
- content?.[0]
259
- )
260
- )
261
- }
262
- if (op_run.content) content = content.slice(1)
263
- }
264
- content = ""
265
- }
266
- if (op_run.content) content += op_run.content[j]
267
- }
268
- })
269
- doc = new_doc
270
- } else doc = resource.doc
208
+ let doc = resource.doc
209
+ if (options.version || options.parents) doc = dt_get(doc, options.version || options.parents)
271
210
 
272
211
  return {
273
212
  version: doc.getRemoteVersion().map((x) => x.join("-")),
@@ -361,10 +300,17 @@ braid_text.put = async (key, options) => {
361
300
 
362
301
  let resource = (typeof key == 'string') ? await get_resource(key) : key
363
302
 
303
+ let parents = resource.doc.getRemoteVersion().map((x) => x.join("-"))
304
+ let og_parents = options.parents || parents
305
+
306
+ let max_pos = count_code_points(v_eq(parents, og_parents) ?
307
+ resource.doc.get() :
308
+ dt_get(resource.doc, og_parents).get())
309
+
364
310
  if (body != null) {
365
311
  patches = [{
366
312
  unit: 'text',
367
- range: `[0:${count_code_points(resource.doc.get())}]`,
313
+ range: `[0:${max_pos}]`,
368
314
  content: body
369
315
  }]
370
316
  }
@@ -377,7 +323,6 @@ braid_text.put = async (key, options) => {
377
323
  })).sort((a, b) => a.range[0] - b.range[0])
378
324
 
379
325
  // validate patch positions
380
- let max_pos = resource.doc.get().length
381
326
  let must_be_at_least = 0
382
327
  for (let p of patches) {
383
328
  if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
@@ -398,8 +343,6 @@ braid_text.put = async (key, options) => {
398
343
 
399
344
  v = `${v[0]}-${v[1] + 1 - change_count}`
400
345
 
401
- let parents = resource.doc.getRemoteVersion().map((x) => x.join("-"))
402
- let og_parents = options.parents || parents
403
346
  let ps = og_parents
404
347
 
405
348
  let v_before = resource.doc.getLocalVersion()
@@ -551,38 +494,39 @@ braid_text.list = async () => {
551
494
 
552
495
  async function get_resource(key) {
553
496
  let cache = get_resource.cache || (get_resource.cache = {})
554
- if (cache[key]) return cache[key]
555
-
556
- let resource = {}
557
- resource.clients = new Set()
558
- resource.simpleton_clients = new Set()
497
+ if (!cache[key]) cache[key] = new Promise(async done => {
498
+ let resource = {}
499
+ resource.clients = new Set()
500
+ resource.simpleton_clients = new Set()
559
501
 
560
- resource.doc = new Doc("server")
502
+ resource.doc = new Doc("server")
561
503
 
562
- let { change, delete_me } = braid_text.db_folder
563
- ? await file_sync(key,
564
- (bytes) => resource.doc.mergeBytes(bytes),
565
- () => resource.doc.toBytes())
566
- : { change: () => { }, delete_me: () => { } }
504
+ let { change, delete_me } = braid_text.db_folder
505
+ ? await file_sync(key,
506
+ (bytes) => resource.doc.mergeBytes(bytes),
507
+ () => resource.doc.toBytes())
508
+ : { change: () => { }, delete_me: () => { } }
567
509
 
568
- resource.db_delta = change
510
+ resource.db_delta = change
569
511
 
570
- resource.doc = defrag_dt(resource.doc)
571
- resource.need_defrag = false
512
+ resource.doc = defrag_dt(resource.doc)
513
+ resource.need_defrag = false
572
514
 
573
- resource.actor_seqs = {}
574
- let max_version = resource.doc.getLocalVersion()[0] ?? -1
575
- for (let i = 0; i <= max_version; i++) {
576
- let v = resource.doc.localToRemoteVersion([i])[0]
577
- resource.actor_seqs[v[0]] = Math.max(v[1], resource.actor_seqs[v[0]] ?? -1)
578
- }
515
+ resource.actor_seqs = {}
516
+ let max_version = resource.doc.getLocalVersion()[0] ?? -1
517
+ for (let i = 0; i <= max_version; i++) {
518
+ let v = resource.doc.localToRemoteVersion([i])[0]
519
+ resource.actor_seqs[v[0]] = Math.max(v[1], resource.actor_seqs[v[0]] ?? -1)
520
+ }
579
521
 
580
- resource.delete_me = () => {
581
- delete_me()
582
- delete cache[key]
583
- }
522
+ resource.delete_me = () => {
523
+ delete_me()
524
+ delete cache[key]
525
+ }
584
526
 
585
- return (cache[key] = resource)
527
+ done(resource)
528
+ })
529
+ return await cache[key]
586
530
  }
587
531
 
588
532
  async function db_folder_init() {
@@ -755,6 +699,66 @@ async function file_sync(key, process_delta, get_init) {
755
699
  //////////////////////////////////////////////////////////////////
756
700
  //////////////////////////////////////////////////////////////////
757
701
 
702
+ function dt_get(doc, version) {
703
+ let frontier = {}
704
+ version.forEach((x) => (frontier[x] = true))
705
+
706
+ let local_version = []
707
+ let [agents, versions, parentss] = parseDT([...doc.toBytes()])
708
+ for (let i = 0; i < versions.length; i++) {
709
+ if (frontier[versions[i].join("-")]) {
710
+ local_version.push(i)
711
+ }
712
+ }
713
+ local_version = new Uint32Array(local_version)
714
+
715
+ let after_versions = {}
716
+ let [_, after_versions_array, __] = parseDT([...doc.getPatchSince(local_version)])
717
+ for (let v of after_versions_array) after_versions[v.join("-")] = true
718
+
719
+ let new_doc = new Doc()
720
+ let op_runs = doc.getOpsSince([])
721
+ let i = 0
722
+ op_runs.forEach((op_run) => {
723
+ let parents = parentss[i].map((x) => x.join("-"))
724
+ let start = op_run.start
725
+ let end = start + 1
726
+ let content = op_run.content?.[0]
727
+
728
+ let len = op_run.end - op_run.start
729
+ let base_i = i
730
+ for (let j = 1; j <= len; j++) {
731
+ let I = base_i + j
732
+ if (
733
+ j == len ||
734
+ parentss[I].length != 1 ||
735
+ parentss[I][0][0] != versions[I - 1][0] ||
736
+ parentss[I][0][1] != versions[I - 1][1] ||
737
+ versions[I][0] != versions[I - 1][0] ||
738
+ versions[I][1] != versions[I - 1][1] + 1
739
+ ) {
740
+ for (; i < I; i++) {
741
+ let version = versions[i].join("-")
742
+ if (!after_versions[version]) {
743
+ new_doc.mergeBytes(
744
+ OpLog_create_bytes(
745
+ version,
746
+ parentss[i].map((x) => x.join("-")),
747
+ content ? start + (i - base_i) : start,
748
+ content?.[0]
749
+ )
750
+ )
751
+ }
752
+ if (op_run.content) content = content.slice(1)
753
+ }
754
+ content = ""
755
+ }
756
+ if (op_run.content) content += op_run.content[j]
757
+ }
758
+ })
759
+ return new_doc
760
+ }
761
+
758
762
  function defrag_dt(doc) {
759
763
  let fresh_doc = new Doc("server")
760
764
  fresh_doc.mergeBytes(doc.toBytes())
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "braid-text",
3
- "version": "0.0.17",
3
+ "version": "0.0.19",
4
4
  "description": "Library for collaborative text over http using braid.",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidjs",