braidfs 0.0.97 → 0.0.100

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +107 -8
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -4,6 +4,8 @@ var { diff_main } = require(`${__dirname}/diff.js`),
4
4
  braid_text = require("braid-text"),
5
5
  braid_fetch = require('braid-http').fetch
6
6
 
7
+ braid_fetch.set_fetch(fetch_http2)
8
+
7
9
  var sync_base = `${require('os').homedir()}/http`,
8
10
  braidfs_config_dir = `${sync_base}/.braidfs`,
9
11
  braidfs_config_file = `${braidfs_config_dir}/config`,
@@ -14,7 +16,8 @@ var temp_folder = `${braidfs_config_dir}/temp`
14
16
 
15
17
  var config = null,
16
18
  watcher_misses = 0,
17
- reconnect_rate_limiter = new ReconnectRateLimiter()
19
+ reconnect_rate_limiter = new ReconnectRateLimiter(() =>
20
+ config?.reconnect_delay_ms ?? 3000)
18
21
 
19
22
  if (require('fs').existsSync(sync_base)) {
20
23
  try {
@@ -40,6 +43,7 @@ if (require('fs').existsSync(sync_base)) {
40
43
  cookies: { 'example.com': 'secret_pass' },
41
44
  port: 45678,
42
45
  scan_interval_ms: 1000 * 20,
46
+ reconnect_delay_ms: 1000 * 3
43
47
  }
44
48
  require('fs').mkdirSync(braidfs_config_dir, { recursive: true })
45
49
  require('fs').writeFileSync(braidfs_config_file, JSON.stringify(config, null, 4))
@@ -322,7 +326,10 @@ async function scan_files() {
322
326
  scan_files.running = true
323
327
  while (scan_files.do_again) {
324
328
  scan_files.do_again = false
325
- console.log(`scan files..`)
329
+ var timestamp = new Date().toLocaleTimeString(
330
+ 'en-US', {minute: '2-digit', second: '2-digit', hour: '2-digit'}
331
+ )
332
+ console.log(`scan files.. `, timestamp)
326
333
  if (await f(sync_base))
327
334
  on_watcher_miss(`scanner picked up a change that the watcher should have gotten`, false)
328
335
  }
@@ -823,7 +830,7 @@ async function sync_url(url) {
823
830
  if (self.fork_point) {
824
831
  var r = await my_fetch({ method: "HEAD", version: self.fork_point })
825
832
  if (freed || closed) return
826
- if (r.ok) return console.log(`[find_fork_point] it has our latest fork point, hooray!`)
833
+ if (r.ok) return console.log(`[find_fork_point] "${url.split('/').pop()}" has our latest fork point, hooray!`)
827
834
  }
828
835
 
829
836
  // otherwise let's binary search for new fork point..
@@ -881,8 +888,8 @@ async function sync_url(url) {
881
888
  if (res.status !== 209)
882
889
  return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
883
890
 
884
- console.log(`connected to ${url}`)
885
- console.log(` editable = ${res.headers.get('editable')}`)
891
+ console.log(`connected to ${url}`.padEnd(70, ' ')
892
+ + `(editable: ${res.headers.get('editable')})`)
886
893
 
887
894
  reconnect_rate_limiter.on_conn(url)
888
895
 
@@ -1010,7 +1017,7 @@ async function ensure_path(path) {
1010
1017
  }
1011
1018
  }
1012
1019
 
1013
- function ReconnectRateLimiter(wait_time = 1000) {
1020
+ function ReconnectRateLimiter(get_wait_time) {
1014
1021
  var self = {}
1015
1022
 
1016
1023
  self.conns = new Map() // Map<host, Set<url>>
@@ -1027,7 +1034,7 @@ function ReconnectRateLimiter(wait_time = 1000) {
1027
1034
  var now = Date.now()
1028
1035
  var my_last_turn = () => self.conns.size === 0 ? self.last_turn : self.qs[0].last_turn
1029
1036
 
1030
- while (self.qs.length && now >= my_last_turn() + wait_time) {
1037
+ while (self.qs.length && now >= my_last_turn() + get_wait_time()) {
1031
1038
  var x = self.qs.shift()
1032
1039
  if (!x.turns.length) {
1033
1040
  self.host_to_q.delete(x.host)
@@ -1041,7 +1048,7 @@ function ReconnectRateLimiter(wait_time = 1000) {
1041
1048
 
1042
1049
  if (self.qs.length)
1043
1050
  self.timer = setTimeout(process, Math.max(0,
1044
- my_last_turn() + wait_time - now))
1051
+ my_last_turn() + get_wait_time() - now))
1045
1052
  }
1046
1053
 
1047
1054
  self.get_turn = async (url) => {
@@ -1090,6 +1097,98 @@ function ReconnectRateLimiter(wait_time = 1000) {
1090
1097
  return self
1091
1098
  }
1092
1099
 
1100
+ async function fetch_http2(url, options = {}) {
1101
+ if (!fetch_http2.sessions) {
1102
+ fetch_http2.sessions = new Map()
1103
+ process.on("exit", () => fetch_http2.sessions.forEach(s => s.close()))
1104
+ }
1105
+
1106
+ var u = new URL(url)
1107
+ if (u.protocol !== "https:") return fetch(url, options)
1108
+
1109
+ try {
1110
+ var session = fetch_http2.sessions.get(u.origin)
1111
+ if (!session || session.closed) {
1112
+ session = require("http2").connect(u.origin, {
1113
+ rejectUnauthorized: options.rejectUnauthorized !== false,
1114
+ })
1115
+ session.on("error", () => fetch_http2.sessions.delete(u.origin))
1116
+ session.on("close", () => fetch_http2.sessions.delete(u.origin))
1117
+ fetch_http2.sessions.set(u.origin, session)
1118
+ }
1119
+
1120
+ return await new Promise((resolve, reject) => {
1121
+ var stream = session.request({
1122
+ ":method": options.method || "GET",
1123
+ ":path": u.pathname + u.search,
1124
+ ":scheme": "https",
1125
+ ":authority": u.host,
1126
+ ...Object.fromEntries(options.headers || []),
1127
+ })
1128
+
1129
+ options.signal?.addEventListener("abort",
1130
+ () => stream.destroy(new Error("Request aborted")),
1131
+ { once: true })
1132
+
1133
+ stream.on("response", headers => {
1134
+ var status = +headers[":status"]
1135
+ resolve({
1136
+ ok: status >= 200 && status < 300,
1137
+ status,
1138
+ statusText: "",
1139
+ headers: new Headers(Object.fromEntries(
1140
+ Object.entries(headers).filter(([k]) =>
1141
+ typeof k === "string" && !k.startsWith(":")))),
1142
+ body: new ReadableStream({
1143
+ start(ctrl) {
1144
+ stream.on("data", x => ctrl.enqueue(new Uint8Array(x)))
1145
+ stream.on("end", () => ctrl.close())
1146
+ stream.on("error", err => ctrl.error(err))
1147
+ },
1148
+ cancel() { stream.destroy() },
1149
+ }),
1150
+ bodyUsed: false,
1151
+ async _consumeBody() {
1152
+ this.bodyUsed = true
1153
+ var chunks = []
1154
+ var reader = this.body.getReader()
1155
+
1156
+ while (true) {
1157
+ var { done, value } = await reader.read()
1158
+ if (done) break
1159
+ chunks.push(value)
1160
+ }
1161
+ return Buffer.concat(chunks.map((c) => (Buffer.isBuffer(c) ? c : Buffer.from(c))))
1162
+ },
1163
+ async text() { return (await this._consumeBody()).toString() },
1164
+ async json() { return JSON.parse(await this.text()) },
1165
+ async arrayBuffer() {
1166
+ var b = await this._consumeBody()
1167
+ return b.buffer.slice(b.byteOffset, b.byteOffset + b.byteLength)
1168
+ },
1169
+ })
1170
+ })
1171
+
1172
+ stream.on("error", reject)
1173
+
1174
+ var body = options.body
1175
+ if (!body) return stream.end()
1176
+
1177
+ if (body instanceof Uint8Array || Buffer.isBuffer(body)) stream.end(body)
1178
+ else if (body instanceof Blob) body.arrayBuffer()
1179
+ .then((b) => stream.end(Buffer.from(b)))
1180
+ .catch(reject)
1181
+ else stream.end(typeof body === "string" ? body : JSON.stringify(body))
1182
+ })
1183
+ } catch (err) {
1184
+ if (err.code?.includes("HTTP2") || err.message?.includes("HTTP/2")) {
1185
+ console.log("HTTP/2 failed, falling back to HTTP/1.1:", err.message)
1186
+ return fetch(url, options)
1187
+ }
1188
+ throw err
1189
+ }
1190
+ }
1191
+
1093
1192
  ////////////////////////////////
1094
1193
 
1095
1194
  function normalize_url(url) {
package/package.json CHANGED
@@ -1,12 +1,12 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.97",
3
+ "version": "0.0.100",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
- "braid-http": "^1.3.75",
9
+ "braid-http": "^1.3.76",
10
10
  "braid-text": "^0.2.30",
11
11
  "chokidar": "^3.6.0"
12
12
  },