braidfs 0.0.95 → 0.0.97

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +300 -207
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -13,7 +13,8 @@ var trash = `${braidfs_config_dir}/trash`
13
13
  var temp_folder = `${braidfs_config_dir}/temp`
14
14
 
15
15
  var config = null,
16
- watcher_misses = 0
16
+ watcher_misses = 0,
17
+ reconnect_rate_limiter = new ReconnectRateLimiter()
17
18
 
18
19
  if (require('fs').existsSync(sync_base)) {
19
20
  try {
@@ -378,7 +379,8 @@ async function sync_url(url) {
378
379
  if (!sync_url.cache) sync_url.cache = {}
379
380
  if (!sync_url.chain) sync_url.chain = Promise.resolve()
380
381
  if (!sync_url.cache[path]) {
381
- var freed = false,
382
+ var self = {url},
383
+ freed = false,
382
384
  aborts = new Set(),
383
385
  braid_text_get_options = null
384
386
  var wait_promise = Promise.resolve()
@@ -398,9 +400,8 @@ async function sync_url(url) {
398
400
  if (!unsync_url.cache) unsync_url.cache = {}
399
401
  unsync_url.cache[path] = async () => {
400
402
  freed = true
401
- for (let a of aborts) a.abort()
403
+ await self.disconnect?.()
402
404
  await wait_promise
403
- if (braid_text_get_options) await braid_text.forget(url, braid_text_get_options)
404
405
 
405
406
  delete braid_text.cache[url]
406
407
  for (let f of await braid_text.get_files_for_key(url)) {
@@ -416,8 +417,6 @@ async function sync_url(url) {
416
417
  async function init() {
417
418
  if (freed) return
418
419
 
419
- var self = {url}
420
-
421
420
  console.log(`sync_url: ${url}`)
422
421
 
423
422
  var resource = await braid_text.get_resource(url)
@@ -727,158 +726,172 @@ async function sync_url(url) {
727
726
  return frontier.sort()
728
727
  }
729
728
 
730
- async function my_fetch(params) {
729
+ var waitTime = 1
730
+ var last_connect_timer = null
731
+
732
+ if (is_external_link) connect()
733
+ async function connect() {
731
734
  if (freed) return
732
- try {
733
- var a = new AbortController()
734
- aborts.add(a)
735
- return await braid_fetch(url, {
736
- signal: a.signal,
737
- headers: {
738
- "Merge-Type": "dt",
739
- "Content-Type": 'text/plain',
740
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
741
- },
742
- retry: { retryRes: r => r.status !== 401 && r.status !== 403 },
743
- ...params
744
- })
745
- } catch (e) {
746
- if (freed) return
747
- if (e?.name !== "AbortError") console.log(e)
748
- } finally {
749
- if (freed) return
750
- aborts.delete(a)
735
+ if (last_connect_timer) return
736
+ console.log(`connecting to ${url}`)
737
+
738
+ var closed = false
739
+ var prev_disconnect = self.disconnect
740
+ self.disconnect = async () => {
741
+ if (closed) return
742
+ closed = true
743
+ reconnect_rate_limiter.on_diss(url)
744
+ for (var a of aborts) a.abort()
745
+ aborts.clear()
746
+ if (braid_text_get_options) await braid_text.forget(url, braid_text_get_options)
747
+ braid_text_get_options = null
748
+ }
749
+ self.reconnect = connect
750
+
751
+ await prev_disconnect?.()
752
+ if (freed || closed) return
753
+
754
+ async function retry(e) {
755
+ if (freed || closed) return
756
+ var p = self.disconnect()
757
+
758
+ console.log(`reconnecting in ${waitTime}s: ${url} after error: ${e}`)
759
+ last_connect_timer = setTimeout(async () => {
760
+ await p
761
+ await reconnect_rate_limiter.get_turn(url)
762
+ last_connect_timer = null
763
+ connect()
764
+ }, waitTime * 1000)
765
+ waitTime = Math.min(waitTime + 1, 3)
751
766
  }
752
- }
753
-
754
- async function send_out(stuff) {
755
- if (!is_external_link) return
756
- if (freed) return
757
767
 
758
- console.log(`send_out ${url} ${JSON.stringify(stuff, null, 4).slice(0, 1000)}`)
768
+ try {
769
+ var initial_connect_done
770
+ var initial_connect_promise = new Promise(done => initial_connect_done = done)
759
771
 
760
- var r = await my_fetch({ method: "PUT", ...stuff })
761
- if (freed) return
772
+ async function my_fetch(params) {
773
+ if (freed || closed) return
774
+ try {
775
+ var a = new AbortController()
776
+ aborts.add(a)
777
+ return await braid_fetch(url, {
778
+ signal: a.signal,
779
+ headers: {
780
+ "Merge-Type": "dt",
781
+ "Content-Type": 'text/plain',
782
+ ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
783
+ },
784
+ ...params
785
+ })
786
+ } catch (e) {
787
+ if (freed || closed) return
788
+ aborts.delete(a)
789
+ throw e
790
+ }
791
+ }
792
+
793
+ async function send_out(stuff) {
794
+ if (freed || closed) return
795
+
796
+ console.log(`send_out ${url} ${JSON.stringify(stuff, null, 4).slice(0, 1000)}`)
797
+
798
+ var r = await my_fetch({ method: "PUT", ...stuff,
799
+ retry: { retryRes: r => r.status !== 401 && r.status !== 403 }})
800
+ if (freed || closed) return
762
801
 
763
- // the server has acknowledged this version,
764
- // so add it to the fork point
765
- if (r.ok) self.update_fork_point(stuff.version[0], stuff.parents)
766
-
767
- // if we're not authorized,
768
- if (r.status == 401 || r.status == 403) {
769
- // and it's one of our versions (a local edit),
770
- if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
771
- // then revert it
772
- console.log(`access denied: reverting local edits`)
773
- unsync_url(url)
774
- sync_url(url)
802
+ // the server has acknowledged this version,
803
+ // so add it to the fork point
804
+ if (r.ok) self.update_fork_point(stuff.version[0], stuff.parents)
805
+
806
+ // if we're not authorized,
807
+ if (r.status == 401 || r.status == 403) {
808
+ // and it's one of our versions (a local edit),
809
+ if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
810
+ // then revert it
811
+ console.log(`access denied: reverting local edits`)
812
+ unsync_url(url)
813
+ sync_url(url)
814
+ }
815
+ }
775
816
  }
776
- }
777
- }
778
817
 
779
- async function find_fork_point() {
780
- if (freed) return
781
- console.log(`[find_fork_point] url: ${url}`)
818
+ async function find_fork_point() {
819
+ if (freed || closed) return
820
+ console.log(`[find_fork_point] url: ${url}`)
782
821
 
783
- // see if remote has the fork point
784
- if (self.fork_point) {
785
- var r = await my_fetch({ method: "HEAD", version: self.fork_point })
786
- if (freed) return
787
- if (r.ok) {
788
- console.log(`[find_fork_point] it has our latest fork point, hooray!`)
789
- return self.fork_point
822
+ // see if remote has the fork point
823
+ if (self.fork_point) {
824
+ var r = await my_fetch({ method: "HEAD", version: self.fork_point })
825
+ if (freed || closed) return
826
+ if (r.ok) return console.log(`[find_fork_point] it has our latest fork point, hooray!`)
827
+ }
828
+
829
+ // otherwise let's binary search for new fork point..
830
+ var bytes = resource.doc.toBytes()
831
+ var [_, events, __] = braid_text.dt_parse([...bytes])
832
+ events = events.map(x => x.join('-'))
833
+
834
+ var min = -1
835
+ var max = events.length
836
+ self.fork_point = []
837
+ while (min + 1 < max) {
838
+ var i = Math.floor((min + max)/2)
839
+ var version = [events[i]]
840
+
841
+ console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
842
+
843
+ var st = Date.now()
844
+ var r = await my_fetch({ method: "HEAD", version })
845
+ if (freed || closed) return
846
+ console.log(`fetched in ${Date.now() - st}`)
847
+
848
+ if (r.ok) {
849
+ min = i
850
+ self.fork_point = version
851
+ } else max = i
852
+ }
853
+ console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
854
+ self.signal_file_needs_writing(true)
790
855
  }
791
- }
792
856
 
793
- // otherwise let's binary search for new fork point..
794
- var bytes = resource.doc.toBytes()
795
- var [_, events, __] = braid_text.dt_parse([...bytes])
796
- events = events.map(x => x.join('-'))
797
-
798
- var min = -1
799
- var max = events.length
800
- self.fork_point = []
801
- while (min + 1 < max) {
802
- var i = Math.floor((min + max)/2)
803
- var version = [events[i]]
804
-
805
- console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
806
-
807
- var st = Date.now()
808
- var r = await my_fetch({ method: "HEAD", version })
809
- if (freed) return
810
- console.log(`fetched in ${Date.now() - st}`)
811
-
812
- if (r.ok) {
813
- min = i
814
- self.fork_point = version
815
- } else max = i
816
- }
817
- console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
818
- self.signal_file_needs_writing(true)
819
- return self.fork_point
820
- }
857
+ await find_fork_point()
858
+ if (freed || closed) return
821
859
 
822
- var initial_connect_done
823
- var initial_connect_promise = new Promise(done => initial_connect_done = done)
860
+ await send_new_stuff()
861
+ if (freed || closed) return
824
862
 
825
- if (is_external_link) find_fork_point().then(async fork_point => {
826
- if (freed) return
827
- await send_new_stuff(fork_point)
828
- if (freed) return
829
- connect(fork_point)
830
- })
831
-
832
- function connect(fork_point) {
833
- if (freed) return
834
- let a = new AbortController()
835
- aborts.add(a)
836
- self.reconnect = () => {
837
- if (freed) return
838
- console.log(`reconnecting ${url}`)
863
+ let a = new AbortController()
864
+ aborts.add(a)
865
+ var res = await braid_fetch(url, {
866
+ signal: a.signal,
867
+ headers: {
868
+ "Merge-Type": "dt",
869
+ Accept: 'text/plain',
870
+ ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
871
+ },
872
+ subscribe: true,
873
+ heartbeats: 120,
874
+ parents: self.fork_point,
875
+ peer: self.peer
876
+ })
877
+ if (freed || closed) return
839
878
 
840
- aborts.delete(a)
841
- a.abort()
842
- connect(fork_point)
843
- }
879
+ if (res.status < 200 || res.status >= 300) return retry(new Error(`unexpected status: ${res.status}`))
844
880
 
845
- console.log(`connecting to ${url}`)
846
- braid_fetch(url, {
847
- signal: a.signal,
848
- headers: {
849
- "Merge-Type": "dt",
850
- Accept: 'text/plain',
851
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
852
- },
853
- subscribe: true,
854
- retry: {
855
- retryRes: () => true,
856
- onRes: (res) => {
857
- if (res.status !== 209)
858
- return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
859
-
860
- console.log(`connected to ${url}`)
861
- console.log(` editable = ${res.headers.get('editable')}`)
862
-
863
- self.file_read_only = res.headers.get('editable') === 'false'
864
- self.signal_file_needs_writing()
865
- }
866
- },
867
- heartbeats: 120,
868
- parents: async () => {
869
- if (freed) return
870
- var x = fork_point || await find_fork_point()
871
- if (freed) return
872
- fork_point = null
873
- return x
874
- },
875
- peer: self.peer
876
- }).then(x => {
877
- if (freed) return
878
- if (x.status !== 209) throw new Error(`unexpected status: ${x.status}`)
881
+ if (res.status !== 209)
882
+ return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
883
+
884
+ console.log(`connected to ${url}`)
885
+ console.log(` editable = ${res.headers.get('editable')}`)
886
+
887
+ reconnect_rate_limiter.on_conn(url)
888
+
889
+ self.file_read_only = res.headers.get('editable') === 'false'
890
+ self.signal_file_needs_writing()
891
+
879
892
  initial_connect_done()
880
- x.subscribe(async update => {
881
- if (freed) return
893
+ res.subscribe(async update => {
894
+ if (freed || closed) return
882
895
  console.log(`got external update about ${url}`)
883
896
 
884
897
  if (update.body) update.body = update.body_text
@@ -889,7 +902,7 @@ async function sync_url(url) {
889
902
  if (update.version.length !== 1) throw 'unexpected'
890
903
 
891
904
  await wait_on(braid_text.put(url, { ...update, peer: self.peer, merge_type: 'dt' }))
892
- if (freed) return
905
+ if (freed || closed) return
893
906
 
894
907
  // the server is giving us this version,
895
908
  // so it must have it,
@@ -897,72 +910,72 @@ async function sync_url(url) {
897
910
  self.update_fork_point(update.version[0], update.parents)
898
911
 
899
912
  self.signal_file_needs_writing()
900
- }, e => (e?.name !== "AbortError") && console.log(e))
901
- }).catch(e => (e?.name !== "AbortError") && console.log(e))
902
- }
903
-
904
- // send it stuff we have but it doesn't't
905
- async function send_new_stuff(fork_point) {
906
- if (freed) return
907
- var q = []
908
- var in_flight = new Map()
909
- var max_in_flight = 10
910
- var send_pump_lock = 0
911
-
912
- async function send_pump() {
913
- send_pump_lock++
914
- if (send_pump_lock > 1) return
915
- try {
916
- if (freed) return
917
- if (in_flight.size >= max_in_flight) return
918
- if (!q.length) {
919
- var frontier = self.fork_point
920
- for (var u of in_flight.values())
921
- frontier = extend_frontier(frontier, u.version[0], u.parents)
922
-
923
- var options = {
924
- parents: frontier,
925
- merge_type: 'dt',
926
- peer: self.peer,
927
- subscribe: u => u.version.length && q.push(u)
913
+ }, retry)
914
+
915
+ // send it stuff we have but it doesn't't
916
+ async function send_new_stuff() {
917
+ if (freed || closed) return
918
+ var q = []
919
+ var in_flight = new Map()
920
+ var max_in_flight = 10
921
+ var send_pump_lock = 0
922
+
923
+ async function send_pump() {
924
+ send_pump_lock++
925
+ if (send_pump_lock > 1) return
926
+ try {
927
+ if (freed || closed) return
928
+ if (in_flight.size >= max_in_flight) return
929
+ if (!q.length) {
930
+ var frontier = self.fork_point
931
+ for (var u of in_flight.values())
932
+ frontier = extend_frontier(frontier, u.version[0], u.parents)
933
+
934
+ var options = {
935
+ parents: frontier,
936
+ merge_type: 'dt',
937
+ peer: self.peer,
938
+ subscribe: u => u.version.length && q.push(u)
939
+ }
940
+ await braid_text.get(url, options)
941
+ await braid_text.forget(url, options)
942
+ }
943
+ while (q.length && in_flight.size < max_in_flight) {
944
+ let u = q.shift()
945
+ in_flight.set(u.version[0], u);
946
+ (async () => {
947
+ await initial_connect_promise
948
+ if (freed || closed) return
949
+ await send_out({...u, peer: self.peer})
950
+ if (freed || closed) return
951
+ in_flight.delete(u.version[0])
952
+ setTimeout(send_pump, 0)
953
+ })()
954
+ }
955
+ } finally {
956
+ var retry = send_pump_lock > 1
957
+ send_pump_lock = 0
958
+ if (retry) setTimeout(send_pump, 0)
928
959
  }
929
- await braid_text.get(url, options)
930
- await braid_text.forget(url, options)
931
- }
932
- while (q.length && in_flight.size < max_in_flight) {
933
- let u = q.shift()
934
- in_flight.set(u.version[0], u);
935
- (async () => {
936
- await initial_connect_promise
937
- if (freed) return
938
- await send_out({...u, peer: self.peer})
939
- if (freed) return
940
- in_flight.delete(u.version[0])
941
- setTimeout(send_pump, 0)
942
- })()
943
960
  }
944
- } finally {
945
- var retry = send_pump_lock > 1
946
- send_pump_lock = 0
947
- if (retry) setTimeout(send_pump, 0)
948
- }
949
- }
950
961
 
951
- var initial_stuff = true
952
- await wait_on(braid_text.get(url, braid_text_get_options = {
953
- parents: fork_point,
954
- merge_type: 'dt',
955
- peer: self.peer,
956
- subscribe: async (u) => {
957
- if (freed) return
958
- if (u.version.length) {
959
- self.signal_file_needs_writing()
960
- if (initial_stuff || in_flight.size < max_in_flight) q.push(u)
961
- send_pump()
962
- }
963
- },
964
- }))
965
- initial_stuff = false
962
+ var initial_stuff = true
963
+ await wait_on(braid_text.get(url, braid_text_get_options = {
964
+ parents: self.fork_point,
965
+ merge_type: 'dt',
966
+ peer: self.peer,
967
+ subscribe: async (u) => {
968
+ if (freed || closed) return
969
+ if (u.version.length) {
970
+ self.signal_file_needs_writing()
971
+ if (initial_stuff || in_flight.size < max_in_flight) q.push(u)
972
+ send_pump()
973
+ }
974
+ },
975
+ }))
976
+ initial_stuff = false
977
+ }
978
+ } catch (e) { return retry(e) }
966
979
  }
967
980
 
968
981
  // for config and errors file, listen for web changes
@@ -997,6 +1010,86 @@ async function ensure_path(path) {
997
1010
  }
998
1011
  }
999
1012
 
1013
+ function ReconnectRateLimiter(wait_time = 1000) {
1014
+ var self = {}
1015
+
1016
+ self.conns = new Map() // Map<host, Set<url>>
1017
+ self.host_to_q = new Map() // Map<host, Array<resolve>>
1018
+ self.qs = [] // Array<{host, turns: Array<resolve>, last_turn}>
1019
+ self.last_turn = 0
1020
+ self.timer = null
1021
+
1022
+ function process() {
1023
+ if (self.timer) clearTimeout(self.timer)
1024
+ self.timer = null
1025
+
1026
+ if (!self.qs.length) return
1027
+ var now = Date.now()
1028
+ var my_last_turn = () => self.conns.size === 0 ? self.last_turn : self.qs[0].last_turn
1029
+
1030
+ while (self.qs.length && now >= my_last_turn() + wait_time) {
1031
+ var x = self.qs.shift()
1032
+ if (!x.turns.length) {
1033
+ self.host_to_q.delete(x.host)
1034
+ continue
1035
+ }
1036
+
1037
+ x.turns.shift()()
1038
+ x.last_turn = self.last_turn = now
1039
+ self.qs.push(x)
1040
+ }
1041
+
1042
+ if (self.qs.length)
1043
+ self.timer = setTimeout(process, Math.max(0,
1044
+ my_last_turn() + wait_time - now))
1045
+ }
1046
+
1047
+ self.get_turn = async (url) => {
1048
+ var host = new URL(url).host
1049
+
1050
+ // If host has connections, give turn immediately
1051
+ if (self.conns.has(host)) return
1052
+
1053
+ console.log(`throttling reconn to ${url} (no conns yet to ${self.conns.size ? host : 'anything'})`)
1054
+
1055
+ if (!self.host_to_q.has(host)) {
1056
+ var turns = []
1057
+ self.host_to_q.set(host, turns)
1058
+ self.qs.unshift({host, turns, last_turn: 0})
1059
+ }
1060
+ var p = new Promise(resolve => self.host_to_q.get(host).push(resolve))
1061
+ process()
1062
+ await p
1063
+ }
1064
+
1065
+ self.on_conn = url => {
1066
+ var host = new URL(url).host
1067
+ if (!self.conns.has(host))
1068
+ self.conns.set(host, new Set())
1069
+ self.conns.get(host).add(url)
1070
+
1071
+ // If there are turns waiting for this host, resolve them all immediately
1072
+ var turns = self.host_to_q.get(host)
1073
+ if (turns) {
1074
+ for (var turn of turns) turn()
1075
+ turns.splice(0, turns.length)
1076
+ }
1077
+
1078
+ process()
1079
+ }
1080
+
1081
+ self.on_diss = url => {
1082
+ var host = new URL(url).host
1083
+ var urls = self.conns.get(host)
1084
+ if (urls) {
1085
+ urls.delete(url)
1086
+ if (urls.size === 0) self.conns.delete(host)
1087
+ }
1088
+ }
1089
+
1090
+ return self
1091
+ }
1092
+
1000
1093
  ////////////////////////////////
1001
1094
 
1002
1095
  function normalize_url(url) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.95",
3
+ "version": "0.0.97",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",