braid-text 0.0.13 → 0.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +197 -66
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -10,6 +10,8 @@ let braid_text = {
10
10
  let waiting_puts = 0
11
11
  let prev_put_p = null
12
12
 
13
+ let max_encoded_key_size = 240
14
+
13
15
  braid_text.serve = async (req, res, options = {}) => {
14
16
  options = {
15
17
  key: req.url.split('?')[0], // Default key
@@ -17,12 +19,32 @@ braid_text.serve = async (req, res, options = {}) => {
17
19
  ...options // Override with all options passed in
18
20
  }
19
21
 
20
- let resource = await get_resource(options.key)
22
+ // free CORS
23
+ res.setHeader("Access-Control-Allow-Origin", "*")
24
+ res.setHeader("Access-Control-Allow-Methods", "*")
25
+ res.setHeader("Access-Control-Allow-Headers", "*")
26
+ res.setHeader("Access-Control-Expose-Headers", "*")
27
+
28
+ function my_end(statusCode, x) {
29
+ res.statusCode = statusCode
30
+ res.end(x ?? '')
31
+ }
32
+
33
+ let resource = null
34
+ try {
35
+ resource = await get_resource(options.key)
21
36
 
22
- braidify(req, res)
37
+ braidify(req, res)
38
+ } catch (e) {
39
+ return my_end(400, "The server failed to process this request. The error generated was: " + e)
40
+ }
23
41
 
24
42
  let peer = req.headers["peer"]
25
43
 
44
+ let merge_type = req.headers["merge-type"]
45
+ if (!merge_type) merge_type = 'simpleton'
46
+ if (merge_type !== 'simpleton' && merge_type !== 'dt') return my_end(400, `Unknown merge type: ${merge_type}`)
47
+
26
48
  // set default content type of text/plain
27
49
  if (!res.getHeader('content-type')) res.setHeader('Content-Type', 'text/plain')
28
50
 
@@ -41,17 +63,6 @@ braid_text.serve = async (req, res, options = {}) => {
41
63
  res.setHeader('Content-Type', updatedContentType);
42
64
  }
43
65
 
44
- // free CORS
45
- res.setHeader("Access-Control-Allow-Origin", "*")
46
- res.setHeader("Access-Control-Allow-Methods", "*")
47
- res.setHeader("Access-Control-Allow-Headers", "*")
48
- res.setHeader("Access-Control-Expose-Headers", "*")
49
-
50
- function my_end(statusCode, x) {
51
- res.statusCode = statusCode
52
- res.end(x ?? '')
53
- }
54
-
55
66
  if (req.method == "OPTIONS") return my_end(200)
56
67
 
57
68
  if (req.method == "DELETE") {
@@ -63,7 +74,12 @@ braid_text.serve = async (req, res, options = {}) => {
63
74
  if (!req.subscribe) {
64
75
  res.setHeader("Accept-Subscribe", "true")
65
76
 
66
- let x = await braid_text.get(resource, { version: req.version, parents: req.parents })
77
+ let x = null
78
+ try {
79
+ x = await braid_text.get(resource, { version: req.version, parents: req.parents })
80
+ } catch (e) {
81
+ return my_end(400, "The server failed to get something. The error generated was: " + e)
82
+ }
67
83
 
68
84
  res.setHeader("Version", x.version.map((x) => JSON.stringify(x)).join(", "))
69
85
 
@@ -75,26 +91,30 @@ braid_text.serve = async (req, res, options = {}) => {
75
91
  return my_end(200, buffer)
76
92
  } else {
77
93
  res.setHeader("Editable", "true")
78
- res.setHeader("Merge-Type", req.headers["merge-type"] === "dt" ? "dt" : "simpleton")
94
+ res.setHeader("Merge-Type", merge_type)
79
95
  if (req.method == "HEAD") return my_end(200)
80
96
 
81
97
  let options = {
82
98
  peer,
83
99
  version: req.version,
84
100
  parents: req.parents,
85
- merge_type: req.headers["merge-type"],
101
+ merge_type,
86
102
  subscribe: x => res.sendVersion(x),
87
103
  write: (x) => res.write(x)
88
104
  }
89
105
 
90
106
  res.startSubscription({
91
107
  onClose: () => {
92
- if (req.headers["merge-type"] === "dt") resource.clients.delete(options)
108
+ if (merge_type === "dt") resource.clients.delete(options)
93
109
  else resource.simpleton_clients.delete(options)
94
110
  }
95
111
  })
96
112
 
97
- return braid_text.get(resource, options)
113
+ try {
114
+ return await braid_text.get(resource, options)
115
+ } catch (e) {
116
+ return my_end(400, "The server failed to get something. The error generated was: " + e)
117
+ }
98
118
  }
99
119
  }
100
120
 
@@ -134,7 +154,7 @@ braid_text.serve = async (req, res, options = {}) => {
134
154
  patches = null
135
155
  }
136
156
 
137
- await braid_text.put(resource, { peer, version: req.version, parents: req.parents, patches, body, merge_type: req.headers["merge-type"] })
157
+ await braid_text.put(resource, { peer, version: req.version, parents: req.parents, patches, body, merge_type })
138
158
 
139
159
  options.put_cb(options.key, resource.doc.get())
140
160
  } catch (e) {
@@ -163,7 +183,7 @@ braid_text.serve = async (req, res, options = {}) => {
163
183
  // - 428 Precondition Required
164
184
  // - pros: the name sounds right
165
185
  // - cons: typically implies that the request was missing an http conditional field like If-Match. that is to say, it implies that the request is missing a precondition, not that the server is missing a precondition
166
- return done_my_turn(425, "The server failed to apply this version.")
186
+ return done_my_turn(425, "The server failed to apply this version. The error generated was: " + e)
167
187
  }
168
188
 
169
189
  return done_my_turn(200)
@@ -181,6 +201,9 @@ braid_text.get = async (key, options) => {
181
201
  return (await get_resource(key)).doc.get()
182
202
  }
183
203
 
204
+ if (options.version) validate_version_array(options.version)
205
+ if (options.parents) validate_version_array(options.parents)
206
+
184
207
  let resource = (typeof key == 'string') ? await get_resource(key) : key
185
208
 
186
209
  if (!options.subscribe) {
@@ -247,12 +270,12 @@ braid_text.get = async (key, options) => {
247
270
  } else doc = resource.doc
248
271
 
249
272
  return {
250
- version: doc.getRemoteVersion().map((x) => encode_version(...x)),
273
+ version: doc.getRemoteVersion().map((x) => x.join("-")),
251
274
  body: doc.get()
252
275
  }
253
276
  } else {
254
277
  if (options.merge_type != "dt") {
255
- let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))
278
+ let version = resource.doc.getRemoteVersion().map((x) => x.join("-"))
256
279
  let x = { version }
257
280
 
258
281
  if (!options.parents && !options.version) {
@@ -284,7 +307,7 @@ braid_text.get = async (key, options) => {
284
307
 
285
308
  if (!options.parents && !options.version) {
286
309
  options.subscribe({
287
- version: ["root"],
310
+ version: [],
288
311
  parents: [],
289
312
  body: "",
290
313
  })
@@ -328,7 +351,13 @@ braid_text.get = async (key, options) => {
328
351
  }
329
352
 
330
353
  braid_text.put = async (key, options) => {
331
- let { version, patches, body } = options
354
+ let { version, patches, body, peer } = options
355
+
356
+ if (version) validate_version_array(version)
357
+ if (options.parents) validate_version_array(options.parents)
358
+ if (body != null && patches) throw new Error(`cannot have a body and patches`)
359
+ if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
360
+ if (patches) validate_patches(patches)
332
361
 
333
362
  let resource = (typeof key == 'string') ? await get_resource(key) : key
334
363
 
@@ -344,21 +373,34 @@ braid_text.put = async (key, options) => {
344
373
  patches = patches.map((p) => ({
345
374
  ...p,
346
375
  range: p.range.match(/\d+/g).map((x) => parseInt(x)),
347
- ...(p.content ? { content: [...p.content] } : {}),
348
- }))
376
+ content: [...p.content],
377
+ })).sort((a, b) => a.range[0] - b.range[0])
378
+
379
+ // validate patch positions
380
+ let max_pos = resource.doc.get().length
381
+ let must_be_at_least = 0
382
+ for (let p of patches) {
383
+ if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
384
+ if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
385
+ must_be_at_least = p.range[1]
386
+ }
349
387
 
350
388
  let change_count = patches.reduce((a, b) => a + b.content.length + (b.range[1] - b.range[0]), 0)
351
389
 
352
- let og_v = version?.[0] || `${Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
390
+ let og_v = version?.[0] || `${(is_valid_actor(peer) && peer) || Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
353
391
 
354
392
  // reduce the version sequence by the number of char-edits
355
393
  let v = decode_version(og_v)
356
- v = encode_version(v[0], v[1] + 1 - change_count)
357
394
 
358
- let parents = resource.doc.getRemoteVersion().map((x) => encode_version(...x))
395
+ // validate version: make sure we haven't seen it already
396
+ if (v[1] <= (resource.actor_seqs[v[0]] ?? -1)) throw new Error(`invalid version: already processed`)
397
+ resource.actor_seqs[v[0]] = v[1]
398
+
399
+ v = `${v[0]}-${v[1] + 1 - change_count}`
400
+
401
+ let parents = resource.doc.getRemoteVersion().map((x) => x.join("-"))
359
402
  let og_parents = options.parents || parents
360
403
  let ps = og_parents
361
- if (!ps.length) ps = ["root"]
362
404
 
363
405
  let v_before = resource.doc.getLocalVersion()
364
406
 
@@ -372,7 +414,7 @@ braid_text.put = async (key, options) => {
372
414
  offset--
373
415
  ps = [v]
374
416
  v = decode_version(v)
375
- v = encode_version(v[0], v[1] + 1)
417
+ v = `${v[0]}-${v[1] + 1}`
376
418
  }
377
419
  // insert
378
420
  for (let i = 0; i < p.content?.length ?? 0; i++) {
@@ -381,7 +423,7 @@ braid_text.put = async (key, options) => {
381
423
  offset++
382
424
  ps = [v]
383
425
  v = decode_version(v)
384
- v = encode_version(v[0], v[1] + 1)
426
+ v = `${v[0]}-${v[1] + 1}`
385
427
  }
386
428
  }
387
429
 
@@ -399,17 +441,17 @@ braid_text.put = async (key, options) => {
399
441
  patches = get_xf_patches(resource.doc, v_before)
400
442
  console.log(JSON.stringify({ patches }))
401
443
 
402
- let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))
444
+ let version = resource.doc.getRemoteVersion().map((x) => x.join("-"))
403
445
 
404
446
  for (let client of resource.simpleton_clients) {
405
- if (client.peer == options.peer) {
447
+ if (client.peer == peer) {
406
448
  client.my_last_seen_version = [og_v]
407
449
  }
408
450
 
409
451
  function set_timeout(time_override) {
410
452
  if (client.my_timeout) clearTimeout(client.my_timeout)
411
453
  client.my_timeout = setTimeout(() => {
412
- let version = resource.doc.getRemoteVersion().map((x) => encode_version(...x))
454
+ let version = resource.doc.getRemoteVersion().map((x) => x.join("-"))
413
455
  let x = { version }
414
456
  x.parents = client.my_last_seen_version
415
457
 
@@ -426,7 +468,7 @@ braid_text.put = async (key, options) => {
426
468
  }
427
469
 
428
470
  if (client.my_timeout) {
429
- if (client.peer == options.peer) {
471
+ if (client.peer == peer) {
430
472
  if (!v_eq(client.my_last_sent_version, og_parents)) {
431
473
  // note: we don't add to client.my_unused_version_count,
432
474
  // because we're already in a timeout;
@@ -442,7 +484,7 @@ braid_text.put = async (key, options) => {
442
484
  }
443
485
 
444
486
  let x = { version }
445
- if (client.peer == options.peer) {
487
+ if (client.peer == peer) {
446
488
  if (!v_eq(client.my_last_sent_version, og_parents)) {
447
489
  client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1
448
490
  set_timeout()
@@ -489,7 +531,7 @@ braid_text.put = async (key, options) => {
489
531
  patches: og_patches,
490
532
  }
491
533
  for (let client of resource.clients) {
492
- if (client.peer != options.peer) client.subscribe(x)
534
+ if (client.peer != peer) client.subscribe(x)
493
535
  }
494
536
 
495
537
  await resource.db_delta(resource.doc.getPatchSince(v_before))
@@ -497,12 +539,12 @@ braid_text.put = async (key, options) => {
497
539
 
498
540
  braid_text.list = async () => {
499
541
  try {
500
- var pages = new Set()
501
- for (let x of await require('fs').promises.readdir(braid_text.db_folder)) {
502
- let m = x.match(/^(.*)\.\d+$/)
503
- if (m) pages.add(decode_filename(m[1]))
504
- }
505
- return [...pages.keys()]
542
+ if (braid_text.db_folder) {
543
+ await db_folder_init()
544
+ var pages = new Set()
545
+ for (let x of await require('fs').promises.readdir(braid_text.db_folder)) pages.add(decode_filename(x.replace(/\.\w+$/, '')))
546
+ return [...pages.keys()]
547
+ } else return Object.keys(get_resource.cache)
506
548
  } catch (e) { return [] }
507
549
  }
508
550
 
@@ -527,6 +569,13 @@ async function get_resource(key) {
527
569
  resource.doc = defrag_dt(resource.doc)
528
570
  resource.need_defrag = false
529
571
 
572
+ resource.actor_seqs = {}
573
+ let max_version = resource.doc.getLocalVersion()[0] ?? -1
574
+ for (let i = 0; i <= max_version; i++) {
575
+ let v = resource.doc.localToRemoteVersion([i])[0]
576
+ resource.actor_seqs[v[0]] = Math.max(v[1], resource.actor_seqs[v[0]] ?? -1)
577
+ }
578
+
530
579
  resource.delete_me = () => {
531
580
  delete_me()
532
581
  delete cache[key]
@@ -535,9 +584,56 @@ async function get_resource(key) {
535
584
  return (cache[key] = resource)
536
585
  }
537
586
 
587
+ async function db_folder_init() {
588
+ console.log('__!')
589
+ if (!db_folder_init.p) db_folder_init.p = new Promise(async done => {
590
+ await fs.promises.mkdir(braid_text.db_folder, { recursive: true });
591
+
592
+ // 0.0.13 -> 0.0.14
593
+ // look for files with key-encodings over max_encoded_key_size,
594
+ // and convert them using the new method
595
+ // for (let x of await fs.promises.readdir(braid_text.db_folder)) {
596
+ // let k = x.replace(/(_[0-9a-f]{64})?\.\w+$/, '')
597
+ // if (k.length > max_encoded_key_size) {
598
+ // k = decode_filename(k)
599
+
600
+ // await fs.promises.rename(`${braid_text.db_folder}/${x}`, `${braid_text.db_folder}/${encode_filename(k)}${x.match(/\.\w+$/)[0]}`)
601
+ // await fs.promises.writeFile(`${braid_text.db_folder}/${encode_filename(k)}.name`, k)
602
+ // }
603
+ // }
604
+
605
+ // 0.0.14 -> 0.0.15
606
+ // basically convert the 0.0.14 files back
607
+ let convert_us = {}
608
+ for (let x of await fs.promises.readdir(braid_text.db_folder)) {
609
+ if (x.endsWith('.name')) {
610
+ let encoded = convert_us[x.slice(0, -'.name'.length)] = encode_filename(await fs.promises.readFile(`${braid_text.db_folder}/${x}`, { encoding: 'utf8' }))
611
+ if (encoded.length > max_encoded_key_size) {
612
+ console.log(`trying to convert file to new format, but the key is too big: ${braid_text.db_folder}/${x}`)
613
+ process.exit()
614
+ }
615
+ console.log(`deleting: ${braid_text.db_folder}/${x}`)
616
+ await fs.promises.unlink(`${braid_text.db_folder}/${x}`)
617
+ }
618
+ }
619
+ if (Object.keys(convert_us).length) {
620
+ for (let x of await fs.promises.readdir(braid_text.db_folder)) {
621
+ let [_, k, num] = x.match(/^(.*)\.(\d+)$/s)
622
+ if (!convert_us[k]) continue
623
+ console.log(`renaming: ${braid_text.db_folder}/${x} -> ${braid_text.db_folder}/${convert_us[k]}.${num}`)
624
+ if (convert_us[k]) await fs.promises.rename(`${braid_text.db_folder}/${x}`, `${braid_text.db_folder}/${convert_us[k]}.${num}`)
625
+ }
626
+ }
627
+
628
+ done()
629
+ })
630
+ await db_folder_init.p
631
+ }
632
+
538
633
  async function get_files_for_key(key) {
634
+ await db_folder_init()
539
635
  try {
540
- let re = new RegExp("^" + encode_filename(key).replace(/[^a-zA-Z0-9]/g, "\\$&") + "\\.\\d+$")
636
+ let re = new RegExp("^" + encode_filename(key).replace(/[^a-zA-Z0-9]/g, "\\$&") + "\\.\\w+$")
541
637
  return (await fs.promises.readdir(braid_text.db_folder))
542
638
  .filter((a) => re.test(a))
543
639
  .map((a) => `${braid_text.db_folder}/${a}`)
@@ -545,15 +641,17 @@ async function get_files_for_key(key) {
545
641
  }
546
642
 
547
643
  async function file_sync(key, process_delta, get_init) {
644
+ let encoded = encode_filename(key)
645
+
646
+ if (encoded.length > max_encoded_key_size) throw new Error(`invalid key: too long (max ${max_encoded_key_size})`)
647
+
548
648
  let currentNumber = 0
549
649
  let currentSize = 0
550
650
  let threshold = 0
551
651
 
552
- // Ensure the existence of db_folder
553
- await fs.promises.mkdir(braid_text.db_folder, { recursive: true });
554
-
555
652
  // Read existing files and sort by numbers.
556
653
  const files = (await get_files_for_key(key))
654
+ .filter(x => x.match(/\.\d+$/))
557
655
  .sort((a, b) => parseInt(a.match(/\d+$/)[0]) - parseInt(b.match(/\d+$/)[0]))
558
656
 
559
657
  // Try to process files starting from the highest number.
@@ -595,7 +693,7 @@ async function file_sync(key, process_delta, get_init) {
595
693
  return {
596
694
  change: async (bytes) => {
597
695
  currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32
598
- const filename = `${braid_text.db_folder}/${encode_filename(key)}.${currentNumber}`
696
+ const filename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
599
697
  if (currentSize < threshold) {
600
698
  console.log(`appending to db..`)
601
699
 
@@ -614,7 +712,7 @@ async function file_sync(key, process_delta, get_init) {
614
712
  const buffer = Buffer.allocUnsafe(4)
615
713
  buffer.writeUInt32LE(init.length, 0)
616
714
 
617
- const newFilename = `${braid_text.db_folder}/${encode_filename(key)}.${currentNumber}`
715
+ const newFilename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
618
716
  await fs.promises.writeFile(newFilename, buffer)
619
717
  await fs.promises.appendFile(newFilename, init)
620
718
 
@@ -767,7 +865,7 @@ function parseDT(byte_array) {
767
865
  let num = x >> 2
768
866
 
769
867
  if (x == 1) {
770
- parents.push(["root"])
868
+ // no parents (e.g. parent is "root")
771
869
  } else if (!is_foreign) {
772
870
  parents.push(versions[count - num])
773
871
  } else {
@@ -836,7 +934,7 @@ function OpLog_create_bytes(version, parents, pos, ins) {
836
934
 
837
935
  let agents = new Set()
838
936
  agents.add(version[0])
839
- for (let p of parents) if (p.length > 1) agents.add(p[0])
937
+ for (let p of parents) agents.add(p[0])
840
938
  agents = [...agents]
841
939
 
842
940
  // console.log(JSON.stringify({ agents, parents }, null, 4));
@@ -857,7 +955,7 @@ function OpLog_create_bytes(version, parents, pos, ins) {
857
955
 
858
956
  let branch = []
859
957
 
860
- if (parents[0].length > 1) {
958
+ if (parents.length) {
861
959
  let frontier = []
862
960
 
863
961
  for (let [i, [agent, seq]] of parents.entries()) {
@@ -932,7 +1030,7 @@ function OpLog_create_bytes(version, parents, pos, ins) {
932
1030
 
933
1031
  write_varint(parents_bytes, 1)
934
1032
 
935
- if (parents[0].length > 1) {
1033
+ if (parents.length) {
936
1034
  for (let [i, [agent, seq]] of parents.entries()) {
937
1035
  let has_more = i < parents.length - 1
938
1036
  let agent_i = agent_to_i[agent]
@@ -958,8 +1056,9 @@ function OpLog_remote_to_local(doc, frontier) {
958
1056
  let map = Object.fromEntries(frontier.map((x) => [x, true]))
959
1057
 
960
1058
  let local_version = []
961
- let [agents, versions, parentss] = parseDT([...doc.toBytes()])
962
- for (let i = 0; i < versions.length; i++) {
1059
+
1060
+ let max_version = doc.getLocalVersion()[0] ?? -1
1061
+ for (let i = 0; i <= max_version; i++) {
963
1062
  if (map[doc.localToRemoteVersion([i])[0].join("-")]) {
964
1063
  local_version.push(i)
965
1064
  }
@@ -968,16 +1067,6 @@ function OpLog_remote_to_local(doc, frontier) {
968
1067
  return frontier.length == local_version.length && new Uint32Array(local_version)
969
1068
  }
970
1069
 
971
- function encode_version(agent, seq) {
972
- return agent + "-" + seq
973
- }
974
-
975
- function decode_version(v) {
976
- let a = v.split("-")
977
- if (a.length > 1) a[1] = parseInt(a[1])
978
- return a
979
- }
980
-
981
1070
  function v_eq(v1, v2) {
982
1071
  return v1.length == v2.length && v1.every((x, i) => x == v2[i])
983
1072
  }
@@ -1294,4 +1383,46 @@ function decode_filename(encodedFilename) {
1294
1383
  return decoded
1295
1384
  }
1296
1385
 
1386
+ function validate_version_array(x) {
1387
+ if (!Array.isArray(x)) throw new Error(`invalid version array: not an array`)
1388
+ for (xx of x) validate_actor_seq(xx)
1389
+ }
1390
+
1391
+ function validate_actor_seq(x) {
1392
+ if (typeof x !== 'string') throw new Error(`invalid actor-seq: not a string`)
1393
+ let [actor, seq] = decode_version(x)
1394
+ validate_actor(actor)
1395
+ }
1396
+
1397
+ function validate_actor(x) {
1398
+ if (typeof x !== 'string') throw new Error(`invalid actor: not a string`)
1399
+ if (Buffer.byteLength(x, 'utf8') >= 50) throw new Error(`actor value too long (max 49): ${x}`) // restriction coming from dt
1400
+ }
1401
+
1402
+ function is_valid_actor(x) {
1403
+ try {
1404
+ validate_actor(x)
1405
+ return true
1406
+ } catch (e) {}
1407
+ }
1408
+
1409
+ function decode_version(v) {
1410
+ let m = v.match(/^(.*)-(\d+)$/s)
1411
+ if (!m) throw new Error(`invalid actor-seq version: ${v}`)
1412
+ return [m[1], parseInt(m[2])]
1413
+ }
1414
+
1415
+ function validate_patches(patches) {
1416
+ if (!Array.isArray(patches)) throw new Error(`invalid patches: not an array`)
1417
+ for (let p of patches) validate_patch(p)
1418
+ }
1419
+
1420
+ function validate_patch(x) {
1421
+ if (typeof x != 'object') throw new Error(`invalid patch: not an object`)
1422
+ if (x.unit && x.unit !== 'text') throw new Error(`invalid patch unit '${x.unit}': only 'text' supported`)
1423
+ if (typeof x.range !== 'string') throw new Error(`invalid patch range: must be a string`)
1424
+ if (!x.range.match(/^\s*\[\s*\d+\s*:\s*\d+\s*\]\s*$/)) throw new Error(`invalid patch range: ${x.range}`)
1425
+ if (typeof x.content !== 'string') throw new Error(`invalid patch content: must be a string`)
1426
+ }
1427
+
1297
1428
  module.exports = braid_text
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "braid-text",
3
- "version": "0.0.13",
3
+ "version": "0.0.15",
4
4
  "description": "Library for collaborative text over http using braid.",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidjs",