braidfs 0.0.98 → 0.0.101

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +158 -15
  2. package/package.json +3 -3
package/index.js CHANGED
@@ -4,6 +4,8 @@ var { diff_main } = require(`${__dirname}/diff.js`),
4
4
  braid_text = require("braid-text"),
5
5
  braid_fetch = require('braid-http').fetch
6
6
 
7
+ braid_fetch.set_fetch(fetch_http2)
8
+
7
9
  var sync_base = `${require('os').homedir()}/http`,
8
10
  braidfs_config_dir = `${sync_base}/.braidfs`,
9
11
  braidfs_config_file = `${braidfs_config_dir}/config`,
@@ -324,7 +326,10 @@ async function scan_files() {
324
326
  scan_files.running = true
325
327
  while (scan_files.do_again) {
326
328
  scan_files.do_again = false
327
- console.log(`scan files..`)
329
+ var timestamp = new Date().toLocaleTimeString(
330
+ 'en-US', {minute: '2-digit', second: '2-digit', hour: '2-digit'}
331
+ )
332
+ console.log(`scan files.. `, timestamp)
328
333
  if (await f(sync_base))
329
334
  on_watcher_miss(`scanner picked up a change that the watcher should have gotten`, false)
330
335
  }
@@ -688,26 +693,26 @@ async function sync_url(url) {
688
693
  file_loop_pump_lock--
689
694
  }
690
695
 
691
- self.update_fork_point = (event, parents) => {
692
- self.fork_point = extend_frontier(self.fork_point, event, parents)
696
+ self.update_fork_point = (version, parents) => {
697
+ self.fork_point = extend_frontier(self.fork_point, version, parents)
693
698
  self.signal_file_needs_writing(true)
694
699
  }
695
700
 
696
- function extend_frontier(frontier, event, parents) {
701
+ function extend_frontier(frontier, version, parents) {
697
702
  // special case:
698
703
  // if current frontier has all parents,
699
704
  // then we can just remove those
700
- // and add event
705
+ // and add version
701
706
  var frontier_set = new Set(frontier)
702
707
  if (parents.length &&
703
708
  parents.every(p => frontier_set.has(p))) {
704
709
  parents.forEach(p => frontier_set.delete(p))
705
- frontier_set.add(event)
710
+ for (var event of version) frontier_set.add(event)
706
711
  frontier = [...frontier_set.values()]
707
712
  } else {
708
713
  // full-proof approach..
709
714
  var looking_for = frontier_set
710
- looking_for.add(event)
715
+ for (var event of version) looking_for.add(event)
711
716
 
712
717
  frontier = []
713
718
  var shadow = new Set()
@@ -777,13 +782,14 @@ async function sync_url(url) {
777
782
  var a = new AbortController()
778
783
  aborts.add(a)
779
784
  return await braid_fetch(url, {
785
+ ...params,
780
786
  signal: a.signal,
781
787
  headers: {
788
+ ...params.headers,
782
789
  "Merge-Type": "dt",
783
790
  "Content-Type": 'text/plain',
784
791
  ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
785
792
  },
786
- ...params
787
793
  })
788
794
  } catch (e) {
789
795
  if (freed || closed) return
@@ -803,7 +809,7 @@ async function sync_url(url) {
803
809
 
804
810
  // the server has acknowledged this version,
805
811
  // so add it to the fork point
806
- if (r.ok) self.update_fork_point(stuff.version[0], stuff.parents)
812
+ if (r.ok) self.update_fork_point(stuff.version, stuff.parents)
807
813
 
808
814
  // if we're not authorized,
809
815
  if (r.status == 401 || r.status == 403) {
@@ -825,7 +831,7 @@ async function sync_url(url) {
825
831
  if (self.fork_point) {
826
832
  var r = await my_fetch({ method: "HEAD", version: self.fork_point })
827
833
  if (freed || closed) return
828
- if (r.ok) return console.log(`[find_fork_point] it has our latest fork point, hooray!`)
834
+ if (r.ok) return console.log(`[find_fork_point] "${url.split('/').pop()}" has our latest fork point, hooray!`)
829
835
  }
830
836
 
831
837
  // otherwise let's binary search for new fork point..
@@ -862,7 +868,52 @@ async function sync_url(url) {
862
868
  await send_new_stuff()
863
869
  if (freed || closed) return
864
870
 
865
- let a = new AbortController()
871
+ // attempt to download the initial stuff in one go,
872
+ // using transfer-encoding dt
873
+ //
874
+ // first check for support..
875
+ //
876
+ var res = await my_fetch({
877
+ method: 'HEAD',
878
+ headers: { 'accept-transfer-encoding': 'dt' },
879
+ })
880
+ if (freed || closed) return
881
+
882
+ if (res.ok && res.headers.get('x-transfer-encoding') === 'dt') {
883
+ var res = await my_fetch({
884
+ headers: { 'accept-transfer-encoding': 'dt' },
885
+ parents: self.fork_point,
886
+ })
887
+ if (freed || closed) return
888
+ console.log(`got external updates about ${url}`)
889
+
890
+ // manually apply the dt bytes..
891
+ // ..code bits taken from braid-text put..
892
+ var bytes = new Uint8Array(await res.arrayBuffer())
893
+ if (freed || closed) return
894
+
895
+ var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
896
+ resource.doc.mergeBytes(bytes)
897
+
898
+ // update resource.actor_seqs
899
+ var end_i = resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
900
+ for (var i = start_i; i <= end_i; i++) {
901
+ var v = resource.doc.localToRemoteVersion([i])[0]
902
+ if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
903
+ resource.actor_seqs[v[0]].add_range(v[1], v[1])
904
+ }
905
+
906
+ resource.val = resource.doc.get()
907
+ resource.need_defrag = true
908
+ await resource.db_delta(bytes)
909
+ if (freed || closed) return
910
+
911
+ // ..do the things we do when getting subscribe updates..
912
+ self.update_fork_point(JSON.parse(`[${res.headers.get('current-version')}]`), self.fork_point)
913
+ self.signal_file_needs_writing()
914
+ }
915
+
916
+ var a = new AbortController()
866
917
  aborts.add(a)
867
918
  var res = await braid_fetch(url, {
868
919
  signal: a.signal,
@@ -883,8 +934,8 @@ async function sync_url(url) {
883
934
  if (res.status !== 209)
884
935
  return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
885
936
 
886
- console.log(`connected to ${url}`)
887
- console.log(` editable = ${res.headers.get('editable')}`)
937
+ console.log(`connected to ${url}`.padEnd(70, ' ')
938
+ + `(editable: ${res.headers.get('editable')})`)
888
939
 
889
940
  reconnect_rate_limiter.on_conn(url)
890
941
 
@@ -909,7 +960,7 @@ async function sync_url(url) {
909
960
  // the server is giving us this version,
910
961
  // so it must have it,
911
962
  // so let's add it to our fork point
912
- self.update_fork_point(update.version[0], update.parents)
963
+ self.update_fork_point(update.version, update.parents)
913
964
 
914
965
  self.signal_file_needs_writing()
915
966
  }, retry)
@@ -931,7 +982,7 @@ async function sync_url(url) {
931
982
  if (!q.length) {
932
983
  var frontier = self.fork_point
933
984
  for (var u of in_flight.values())
934
- frontier = extend_frontier(frontier, u.version[0], u.parents)
985
+ frontier = extend_frontier(frontier, u.version, u.parents)
935
986
 
936
987
  var options = {
937
988
  parents: frontier,
@@ -1092,6 +1143,98 @@ function ReconnectRateLimiter(get_wait_time) {
1092
1143
  return self
1093
1144
  }
1094
1145
 
1146
+ async function fetch_http2(url, options = {}) {
1147
+ if (!fetch_http2.sessions) {
1148
+ fetch_http2.sessions = new Map()
1149
+ process.on("exit", () => fetch_http2.sessions.forEach(s => s.close()))
1150
+ }
1151
+
1152
+ var u = new URL(url)
1153
+ if (u.protocol !== "https:") return fetch(url, options)
1154
+
1155
+ try {
1156
+ var session = fetch_http2.sessions.get(u.origin)
1157
+ if (!session || session.closed) {
1158
+ session = require("http2").connect(u.origin, {
1159
+ rejectUnauthorized: options.rejectUnauthorized !== false,
1160
+ })
1161
+ session.on("error", () => fetch_http2.sessions.delete(u.origin))
1162
+ session.on("close", () => fetch_http2.sessions.delete(u.origin))
1163
+ fetch_http2.sessions.set(u.origin, session)
1164
+ }
1165
+
1166
+ return await new Promise((resolve, reject) => {
1167
+ var stream = session.request({
1168
+ ":method": options.method || "GET",
1169
+ ":path": u.pathname + u.search,
1170
+ ":scheme": "https",
1171
+ ":authority": u.host,
1172
+ ...Object.fromEntries(options.headers || []),
1173
+ })
1174
+
1175
+ options.signal?.addEventListener("abort",
1176
+ () => stream.destroy(new Error("Request aborted")),
1177
+ { once: true })
1178
+
1179
+ stream.on("response", headers => {
1180
+ var status = +headers[":status"]
1181
+ resolve({
1182
+ ok: status >= 200 && status < 300,
1183
+ status,
1184
+ statusText: "",
1185
+ headers: new Headers(Object.fromEntries(
1186
+ Object.entries(headers).filter(([k]) =>
1187
+ typeof k === "string" && !k.startsWith(":")))),
1188
+ body: new ReadableStream({
1189
+ start(ctrl) {
1190
+ stream.on("data", x => ctrl.enqueue(new Uint8Array(x)))
1191
+ stream.on("end", () => ctrl.close())
1192
+ stream.on("error", err => ctrl.error(err))
1193
+ },
1194
+ cancel() { stream.destroy() },
1195
+ }),
1196
+ bodyUsed: false,
1197
+ async _consumeBody() {
1198
+ this.bodyUsed = true
1199
+ var chunks = []
1200
+ var reader = this.body.getReader()
1201
+
1202
+ while (true) {
1203
+ var { done, value } = await reader.read()
1204
+ if (done) break
1205
+ chunks.push(value)
1206
+ }
1207
+ return Buffer.concat(chunks.map((c) => (Buffer.isBuffer(c) ? c : Buffer.from(c))))
1208
+ },
1209
+ async text() { return (await this._consumeBody()).toString() },
1210
+ async json() { return JSON.parse(await this.text()) },
1211
+ async arrayBuffer() {
1212
+ var b = await this._consumeBody()
1213
+ return b.buffer.slice(b.byteOffset, b.byteOffset + b.byteLength)
1214
+ },
1215
+ })
1216
+ })
1217
+
1218
+ stream.on("error", reject)
1219
+
1220
+ var body = options.body
1221
+ if (!body) return stream.end()
1222
+
1223
+ if (body instanceof Uint8Array || Buffer.isBuffer(body)) stream.end(body)
1224
+ else if (body instanceof Blob) body.arrayBuffer()
1225
+ .then((b) => stream.end(Buffer.from(b)))
1226
+ .catch(reject)
1227
+ else stream.end(typeof body === "string" ? body : JSON.stringify(body))
1228
+ })
1229
+ } catch (err) {
1230
+ if (err.code?.includes("HTTP2") || err.message?.includes("HTTP/2")) {
1231
+ console.log("HTTP/2 failed, falling back to HTTP/1.1:", err.message)
1232
+ return fetch(url, options)
1233
+ }
1234
+ throw err
1235
+ }
1236
+ }
1237
+
1095
1238
  ////////////////////////////////
1096
1239
 
1097
1240
  function normalize_url(url) {
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.98",
3
+ "version": "0.0.101",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
- "braid-http": "^1.3.75",
10
- "braid-text": "^0.2.30",
9
+ "braid-http": "^1.3.76",
10
+ "braid-text": "^0.2.35",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {