braidfs 0.0.82 → 0.0.84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +186 -159
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -48,14 +48,17 @@ require('fs').mkdirSync(proxy_base_meta, { recursive: true })
48
48
  require('fs').mkdirSync(trash, { recursive: true })
49
49
  require('fs').mkdirSync(temp_folder, { recursive: true })
50
50
 
51
- // process command line args
52
- let to_run_in_background = process.platform === 'darwin' ? `
51
+ // Add instructions for how to run in the background on this OS
52
+ var to_run_in_background = process.platform === 'darwin' ? `
53
53
  To run daemon in background:
54
54
  launchctl submit -l org.braid.braidfs -- braidfs run` : ''
55
- let argv = process.argv.slice(2)
55
+ // ...except this doesn't work yet. So disable.
56
+ to_run_in_background = ''
56
57
 
57
58
  console.log(`braidfs version: ${require(`${__dirname}/package.json`).version}`)
58
59
 
60
+ // process command line args
61
+ var argv = process.argv.slice(2)
59
62
  if (argv.length === 1 && argv[0].match(/^(run|serve)$/)) {
60
63
  return main()
61
64
  } else if (argv.length && argv.length % 2 == 0 && argv.every((x, i) => i % 2 != 0 || x.match(/^(sync|unsync)$/))) {
@@ -255,7 +258,6 @@ function skip_file(path) {
255
258
  async function trash_file(fullpath, path) {
256
259
  // throw this unrecognized file into the trash,
257
260
  let dest = `${trash}/${braid_text.encode_filename(path)}_${Math.random().toString(36).slice(2)}`
258
- console.log(`moving untracked file ${fullpath} to ${dest}`)
259
261
  await require('fs').promises.rename(fullpath, dest)
260
262
 
261
263
  // and log an error
@@ -263,6 +265,8 @@ async function trash_file(fullpath, path) {
263
265
  }
264
266
 
265
267
  async function log_error(text) {
268
+ console.log(`LOGGING ERROR: ${text}`)
269
+
266
270
  var x = await braid_text.get('.braidfs/errors', {}),
267
271
  len = [...x.body].length
268
272
  await braid_text.put('.braidfs/errors', {
@@ -426,6 +430,7 @@ async function proxy_url(url) {
426
430
 
427
431
  self.peer = Math.random().toString(36).slice(2)
428
432
  self.local_edit_counter = 0
433
+ self.fork_point = null
429
434
  var file_last_version = null,
430
435
  file_last_digest = null
431
436
  self.file_last_text = null
@@ -468,47 +473,60 @@ async function proxy_url(url) {
468
473
  file_loop_pump()
469
474
  }
470
475
 
471
- self.signal_file_needs_writing = () => {
476
+ self.signal_file_needs_writing = (just_meta_file) => {
472
477
  if (freed) return
473
- file_needs_writing = true
478
+
479
+ if (!just_meta_file) file_needs_writing = true
480
+ else if (just_meta_file && !file_needs_writing)
481
+ file_needs_writing = 'just_meta_file'
482
+
474
483
  file_loop_pump()
475
484
  }
476
485
 
477
- async function send_out(stuff) {
486
+ async function my_fetch(params) {
478
487
  if (!start_something()) return
479
- if (is_external_link) {
480
- try {
481
- console.log(`send_out ${url} ${JSON.stringify(stuff, null, 4).slice(0, 1000)}`)
482
-
483
- let a = new AbortController()
484
- aborts.add(a)
485
- var r = await braid_fetch(url, {
486
- signal: a.signal,
487
- headers: {
488
- "Merge-Type": "dt",
489
- "Content-Type": 'text/plain',
490
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
491
- },
492
- method: "PUT",
493
- retry: true,
494
- ...stuff
495
- })
496
- aborts.delete(a)
497
- // if we're not authorized,
498
- if (r.status == 401 || r.status == 403) {
499
- // and it's one of our versions (a local edit),
500
- if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
501
- // then revert it
502
- console.log(`access denied: reverting local edits`)
503
- unproxy_url(url)
504
- proxy_url(url)
505
- }
506
- }
507
- } catch (e) {
508
- if (e?.name !== "AbortError") console.log(e)
488
+ try {
489
+ var a = new AbortController()
490
+ aborts.add(a)
491
+ return await braid_fetch(url, {
492
+ signal: a.signal,
493
+ headers: {
494
+ "Merge-Type": "dt",
495
+ "Content-Type": 'text/plain',
496
+ ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
497
+ },
498
+ retry: true,
499
+ ...params
500
+ })
501
+ } catch (e) {
502
+ if (e?.name !== "AbortError") console.log(e)
503
+ } finally {
504
+ aborts.delete(a)
505
+ finish_something()
506
+ }
507
+ }
508
+
509
+ async function send_out(stuff) {
510
+ if (!is_external_link) return
511
+
512
+ console.log(`send_out ${url} ${JSON.stringify(stuff, null, 4).slice(0, 1000)}`)
513
+
514
+ var r = await my_fetch({ method: "PUT", ...stuff })
515
+
516
+ // the server has acknowledged this version,
517
+ // so add it to the fork point
518
+ if (r.ok) await self.update_fork_point(stuff.version[0], stuff.parents)
519
+
520
+ // if we're not authorized,
521
+ if (r.status == 401 || r.status == 403) {
522
+ // and it's one of our versions (a local edit),
523
+ if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
524
+ // then revert it
525
+ console.log(`access denied: reverting local edits`)
526
+ unproxy_url(url)
527
+ proxy_url(url)
509
528
  }
510
529
  }
511
- finish_something()
512
530
  }
513
531
 
514
532
  if (!start_something()) return
@@ -523,7 +541,8 @@ async function proxy_url(url) {
523
541
  version: file_last_version,
524
542
  digest: file_last_digest,
525
543
  peer: self.peer,
526
- local_edit_counter: self.local_edit_counter
544
+ local_edit_counter: self.local_edit_counter,
545
+ fork_point: self.fork_point
527
546
  } = Array.isArray(meta) ? { version: meta } : meta)
528
547
 
529
548
  if (!self.peer) self.peer = Math.random().toString(36).slice(2)
@@ -569,6 +588,16 @@ async function proxy_url(url) {
569
588
  var fullpath = await get_fullpath()
570
589
 
571
590
  while (file_needs_reading || file_needs_writing) {
591
+ async function write_meta_file() {
592
+ await require('fs').promises.writeFile(meta_path, JSON.stringify({
593
+ version: file_last_version,
594
+ digest: sha256(self.file_last_text),
595
+ peer: self.peer,
596
+ local_edit_counter: self.local_edit_counter,
597
+ fork_point: self.fork_point
598
+ }))
599
+ }
600
+
572
601
  if (file_needs_reading) {
573
602
  console.log(`reading file: ${fullpath}`)
574
603
 
@@ -611,12 +640,7 @@ async function proxy_url(url) {
611
640
 
612
641
  await braid_text.put(url, { version, parents, patches, peer: self.peer, merge_type: 'dt' })
613
642
 
614
- await require('fs').promises.writeFile(meta_path, JSON.stringify({
615
- version: file_last_version,
616
- digest: sha256(self.file_last_text),
617
- peer: self.peer,
618
- local_edit_counter: self.local_edit_counter
619
- }))
643
+ await write_meta_file()
620
644
  } else {
621
645
  add_to_version_cache(text, file_last_version)
622
646
 
@@ -630,7 +654,10 @@ async function proxy_url(url) {
630
654
  self.file_last_stat = stat
631
655
  self.file_ignore_until = Date.now() + 1000
632
656
  }
633
- if (file_needs_writing) {
657
+ if (file_needs_writing === 'just_meta_file') {
658
+ file_needs_writing = false
659
+ await write_meta_file()
660
+ } else if (file_needs_writing) {
634
661
  file_needs_writing = false
635
662
  let { version, body } = await braid_text.get(url, {})
636
663
  if (!v_eq(version, file_last_version)) {
@@ -654,16 +681,10 @@ async function proxy_url(url) {
654
681
  self.file_last_text = body
655
682
  self.file_ignore_until = Date.now() + 1000
656
683
  await require('fs').promises.writeFile(fullpath, self.file_last_text)
657
-
658
-
659
- await require('fs').promises.writeFile(meta_path, JSON.stringify({
660
- version: file_last_version,
661
- digest: sha256(self.file_last_text),
662
- peer: self.peer,
663
- local_edit_counter: self.local_edit_counter
664
- }))
665
684
  }
666
685
 
686
+ await write_meta_file()
687
+
667
688
  if (await is_read_only(fullpath) !== self.file_read_only) {
668
689
  self.file_ignore_until = Date.now() + 1000
669
690
  await set_read_only(fullpath, self.file_read_only)
@@ -682,8 +703,90 @@ async function proxy_url(url) {
682
703
  file_loop_pump_lock--
683
704
  }
684
705
 
685
- if (is_external_link) connect()
686
- function connect() {
706
+ self.update_fork_point = async (event, parents) => {
707
+ var resource = await braid_text.get_resource(url)
708
+
709
+ // special case:
710
+ // if current fork point has all parents,
711
+ // then we can just remove those
712
+ // and add event
713
+ var fork_set = new Set(self.fork_point)
714
+ if (parents.every(p => fork_set.has(p))) {
715
+ parents.forEach(p => fork_set.delete(p))
716
+ fork_set.add(event)
717
+ self.fork_point = [...fork_set.values()]
718
+ } else {
719
+ // full-proof approach..
720
+ var looking_for = fork_set
721
+ looking_for.add(event)
722
+
723
+ self.fork_point = []
724
+ var shadow = new Set()
725
+
726
+ var bytes = resource.doc.toBytes()
727
+ var [_, events, parentss] = braid_text.dt_parse([...bytes])
728
+ for (var i = events.length - 1; i >= 0 && looking_for.size; i--) {
729
+ var e = events[i].join('-')
730
+ if (looking_for.has(e)) {
731
+ looking_for.delete(e)
732
+ if (!shadow.has(e)) self.fork_point.push(e)
733
+ shadow.add(e)
734
+ }
735
+ if (shadow.has(e))
736
+ parentss[i].forEach(p => shadow.add(p.join('-')))
737
+ }
738
+ }
739
+ self.fork_point.sort()
740
+ self.signal_file_needs_writing(true)
741
+ }
742
+
743
+ async function find_fork_point() {
744
+ console.log(`[find_fork_point] url: ${url}`)
745
+
746
+ // see if they have the fork point
747
+ if (self.fork_point) {
748
+ var r = await my_fetch({ method: "HEAD", version: self.fork_point })
749
+ if (r.ok) {
750
+ console.log(`[find_fork_point] they have our latest fork point, horray!`)
751
+ return self.fork_point
752
+ }
753
+ }
754
+
755
+ // otherwise let's binary search for new fork point..
756
+ var resource = await braid_text.get_resource(url)
757
+ var bytes = resource.doc.toBytes()
758
+ var [_, events, __] = braid_text.dt_parse([...bytes])
759
+ events = events.map(x => x.join('-'))
760
+
761
+ var min = -1
762
+ var max = events.length
763
+ self.fork_point = []
764
+ while (min + 1 < max) {
765
+ var i = Math.floor((min + max)/2)
766
+ var version = [events[i]]
767
+
768
+ console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
769
+
770
+ var st = Date.now()
771
+ var r = await my_fetch({ method: "HEAD", version })
772
+ console.log(`fetched in ${Date.now() - st}`)
773
+
774
+ if (r.ok) {
775
+ min = i
776
+ self.fork_point = version
777
+ } else max = i
778
+ }
779
+ console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
780
+ self.signal_file_needs_writing(true)
781
+ return self.fork_point
782
+ }
783
+
784
+ if (is_external_link) find_fork_point().then(fork_point => {
785
+ connect(fork_point)
786
+ send_new_stuff(fork_point)
787
+ })
788
+
789
+ function connect(fork_point) {
687
790
  let a = new AbortController()
688
791
  aborts.add(a)
689
792
  self.reconnect = () => {
@@ -691,7 +794,7 @@ async function proxy_url(url) {
691
794
 
692
795
  aborts.delete(a)
693
796
  a.abort()
694
- connect()
797
+ connect(fork_point)
695
798
  }
696
799
 
697
800
  console.log(`connecting to ${url}`)
@@ -705,12 +808,8 @@ async function proxy_url(url) {
705
808
  subscribe: true,
706
809
  retry: {
707
810
  onRes: (res) => {
708
- if (res.status !== 209) {
709
- log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
710
- return console.log(
711
- `FAILED TO CONNECT TO: ${url}\n` +
712
- `GOT STATUS CODE: ${res.status}, expected 209.`)
713
- }
811
+ if (res.status !== 209)
812
+ return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
714
813
 
715
814
  console.log(`connected to ${url}`)
716
815
  console.log(` editable = ${res.headers.get('editable')}`)
@@ -721,8 +820,9 @@ async function proxy_url(url) {
721
820
  },
722
821
  heartbeats: 120,
723
822
  parents: async () => {
724
- let cur = await braid_text.get(url, {})
725
- if (cur.version.length) return cur.version
823
+ var x = fork_point || await find_fork_point()
824
+ fork_point = null
825
+ return x
726
826
  },
727
827
  peer: self.peer
728
828
  }).then(x => {
@@ -740,6 +840,10 @@ async function proxy_url(url) {
740
840
 
741
841
  await braid_text.put(url, { ...update, peer: self.peer, merge_type: 'dt' })
742
842
 
843
+ // the server is giving us this version,
844
+ // so they must have it,
845
+ // so let's add it to our fork point
846
+ await self.update_fork_point(update.version[0], update.parents)
743
847
 
744
848
  self.signal_file_needs_writing()
745
849
  finish_something()
@@ -748,107 +852,30 @@ async function proxy_url(url) {
748
852
  }
749
853
 
750
854
  // send them stuff we have but they don't
751
- if (is_external_link) send_new_stuff()
752
- async function send_new_stuff() {
753
- if (!start_something()) return
754
- try {
755
- var a = new AbortController()
756
- aborts.add(a)
757
- var r = await braid_fetch(url, {
758
- signal: a.signal,
759
- method: "HEAD",
760
- headers: {
761
- Accept: 'text/plain',
762
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
763
- },
764
- retry: true
765
- })
766
- aborts.delete(a)
767
-
768
- if (r.headers.get('editable') === 'false') {
769
- console.log('do not send updates for read-only file: ' + url)
770
- return
771
- }
772
-
773
- if (r.headers.get('version') == null) {
774
- log_error(`Can't sync ${url} -- got no version from server`)
775
- return console.log(`GOT NO VERSION FROM: ${url}`)
776
- }
777
- var parents = JSON.parse(`[${r.headers.get('version')}]`)
778
-
779
- var bytes = (await braid_text.get_resource(url)).doc.toBytes()
780
- var [_, versions, __] = braid_text.dt_parse([...bytes])
781
- var agents = {}
782
- for (var v of versions) agents[v[0]] = v[1]
783
-
784
- function we_have_it(version) {
785
- var m = version.match(/^(.*)-(\d+)$/s)
786
- var agent = m[1]
787
- var seq = 1 * m[2]
788
- return (agents[agent] ?? -1) >= seq
789
- }
790
-
791
- if (parents.length && !parents.some(we_have_it)) {
792
- var min = 0
793
- var max = versions.length
794
- var last_good_parents = []
795
- while (min < max) {
796
- var i = Math.ceil((min + max)/2)
797
- parents = i ? [versions[i - 1].join('-')] : []
798
-
799
- console.log(`min=${min}, max=${max}, i=${i}, parents=${parents}`)
800
-
801
- var a = new AbortController()
802
- aborts.add(a)
803
- var st = Date.now()
804
- var r = await braid_fetch(url, {
805
- signal: a.signal,
806
- method: "HEAD",
807
- parents,
808
- headers: {
809
- Accept: 'text/plain',
810
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
811
- },
812
- retry: true
813
- })
814
- console.log(`fetched in ${Date.now() - st}`)
815
- aborts.delete(a)
816
-
817
- if (r.ok) {
818
- min = i
819
- last_good_parents = parents
820
- } else {
821
- max = i - 1
822
- }
855
+ async function send_new_stuff(fork_point) {
856
+ var r = await my_fetch({ method: "HEAD" })
857
+ if (r.headers.get('editable') === 'false')
858
+ return console.log('do not send updates for read-only file: ' + url)
859
+
860
+ var in_parallel = create_parallel_promises(10)
861
+ braid_text.get(url, braid_text_get_options = {
862
+ parents: fork_point,
863
+ merge_type: 'dt',
864
+ peer: self.peer,
865
+ subscribe: async (u) => {
866
+ if (u.version.length) {
867
+ self.signal_file_needs_writing()
868
+ in_parallel(() => send_out({...u, peer: self.peer}))
823
869
  }
824
- parents = last_good_parents
825
-
826
- console.log(`found good parents: ${parents}: ${url}`)
827
- }
828
-
829
- var in_parallel = create_parallel_promises(10)
830
- braid_text.get(url, braid_text_get_options = {
831
- parents,
832
- merge_type: 'dt',
833
- peer: self.peer,
834
- subscribe: async (u) => {
835
- if (u.version.length) {
836
- self.signal_file_needs_writing()
837
- in_parallel(() => send_out({...u, peer: self.peer}))
838
- }
839
- },
840
- })
841
- } catch (e) {
842
- if (e?.name !== "AbortError") console.log(e)
843
- }
844
- finish_something()
870
+ },
871
+ })
845
872
  }
846
873
 
847
874
  // for config and errors file, listen for web changes
848
875
  if (!is_external_link) braid_text.get(url, braid_text_get_options = {
849
876
  merge_type: 'dt',
850
877
  peer: self.peer,
851
- subscribe: self.signal_file_needs_writing,
878
+ subscribe: () => self.signal_file_needs_writing(),
852
879
  })
853
880
 
854
881
  return self
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.82",
3
+ "version": "0.0.84",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
9
  "braid-http": "^1.3.73",
10
- "braid-text": "^0.2.22",
10
+ "braid-text": "^0.2.23",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {