braidfs 0.0.125 → 0.0.126
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +47 -330
- package/package.json +2 -2
package/index.js
CHANGED
|
@@ -17,7 +17,7 @@ var sync_base = `${require('os').homedir()}/http`
|
|
|
17
17
|
var argv = process.argv.slice(2)
|
|
18
18
|
var sync_base_index = argv.indexOf('--sync-base')
|
|
19
19
|
if (sync_base_index !== -1 && sync_base_index < argv.length - 1) {
|
|
20
|
-
sync_base = argv[sync_base_index + 1]
|
|
20
|
+
sync_base = require('path').resolve(argv[sync_base_index + 1])
|
|
21
21
|
// Remove the --sync-base and its value from argv
|
|
22
22
|
argv.splice(sync_base_index, 2)
|
|
23
23
|
console.log(`[Testing mode] Using sync_base: ${sync_base}`)
|
|
@@ -410,8 +410,7 @@ async function sync_url(url) {
|
|
|
410
410
|
if (!sync_url.cache[path]) {
|
|
411
411
|
var self = {url},
|
|
412
412
|
freed = false,
|
|
413
|
-
aborts = new Set()
|
|
414
|
-
braid_text_get_options = null
|
|
413
|
+
aborts = new Set()
|
|
415
414
|
var wait_promise = Promise.resolve()
|
|
416
415
|
var wait_on = p => {
|
|
417
416
|
wait_promise = wait_promise.then(() => p)
|
|
@@ -429,13 +428,7 @@ async function sync_url(url) {
|
|
|
429
428
|
await self.disconnect?.()
|
|
430
429
|
await wait_promise
|
|
431
430
|
|
|
432
|
-
|
|
433
|
-
for (let f of await braid_text.get_files_for_key(url)) {
|
|
434
|
-
try {
|
|
435
|
-
console.log(`trying to delete: ${f}`)
|
|
436
|
-
await require('fs').promises.unlink(f)
|
|
437
|
-
} catch (e) {}
|
|
438
|
-
}
|
|
431
|
+
await braid_text.delete(url)
|
|
439
432
|
|
|
440
433
|
try {
|
|
441
434
|
console.log(`trying to delete: ${meta_path}`)
|
|
@@ -712,12 +705,8 @@ async function sync_url(url) {
|
|
|
712
705
|
return await init_binary_sync()
|
|
713
706
|
}
|
|
714
707
|
|
|
715
|
-
var resource = await braid_text.get_resource(url)
|
|
716
|
-
if (freed) return
|
|
717
|
-
|
|
718
708
|
self.peer = Math.random().toString(36).slice(2)
|
|
719
709
|
self.local_edit_counter = 0
|
|
720
|
-
self.fork_point = null
|
|
721
710
|
var file_last_version = null,
|
|
722
711
|
file_last_digest = null
|
|
723
712
|
self.file_last_text = null
|
|
@@ -787,8 +776,7 @@ async function sync_url(url) {
|
|
|
787
776
|
version: file_last_version,
|
|
788
777
|
digest: file_last_digest,
|
|
789
778
|
peer: self.peer,
|
|
790
|
-
local_edit_counter: self.local_edit_counter
|
|
791
|
-
fork_point: self.fork_point
|
|
779
|
+
local_edit_counter: self.local_edit_counter
|
|
792
780
|
} = Array.isArray(meta) ? { version: meta } : meta)
|
|
793
781
|
|
|
794
782
|
if (!self.peer) self.peer = Math.random().toString(36).slice(2)
|
|
@@ -845,8 +833,7 @@ async function sync_url(url) {
|
|
|
845
833
|
version: file_last_version,
|
|
846
834
|
digest: sha256(self.file_last_text),
|
|
847
835
|
peer: self.peer,
|
|
848
|
-
local_edit_counter: self.local_edit_counter
|
|
849
|
-
fork_point: self.fork_point
|
|
836
|
+
local_edit_counter: self.local_edit_counter
|
|
850
837
|
})))
|
|
851
838
|
}
|
|
852
839
|
|
|
@@ -968,333 +955,63 @@ async function sync_url(url) {
|
|
|
968
955
|
file_loop_pump_lock--
|
|
969
956
|
}
|
|
970
957
|
|
|
971
|
-
|
|
972
|
-
self.fork_point = extend_frontier(self.fork_point, version, parents)
|
|
973
|
-
self.signal_file_needs_writing(true)
|
|
974
|
-
}
|
|
975
|
-
|
|
976
|
-
function extend_frontier(frontier, version, parents) {
|
|
977
|
-
// special case:
|
|
978
|
-
// if current frontier has all parents,
|
|
979
|
-
// then we can just remove those
|
|
980
|
-
// and add version
|
|
981
|
-
var frontier_set = new Set(frontier)
|
|
982
|
-
if (parents.length &&
|
|
983
|
-
parents.every(p => frontier_set.has(p))) {
|
|
984
|
-
parents.forEach(p => frontier_set.delete(p))
|
|
985
|
-
for (var event of version) frontier_set.add(event)
|
|
986
|
-
frontier = [...frontier_set.values()]
|
|
987
|
-
} else {
|
|
988
|
-
// full-proof approach..
|
|
989
|
-
var looking_for = frontier_set
|
|
990
|
-
for (var event of version) looking_for.add(event)
|
|
991
|
-
|
|
992
|
-
frontier = []
|
|
993
|
-
var shadow = new Set()
|
|
994
|
-
|
|
995
|
-
var bytes = resource.doc.toBytes()
|
|
996
|
-
var [_, events, parentss] = braid_text.dt_parse([...bytes])
|
|
997
|
-
for (var i = events.length - 1; i >= 0 && looking_for.size; i--) {
|
|
998
|
-
var e = events[i].join('-')
|
|
999
|
-
if (looking_for.has(e)) {
|
|
1000
|
-
looking_for.delete(e)
|
|
1001
|
-
if (!shadow.has(e)) frontier.push(e)
|
|
1002
|
-
shadow.add(e)
|
|
1003
|
-
}
|
|
1004
|
-
if (shadow.has(e))
|
|
1005
|
-
parentss[i].forEach(p => shadow.add(p.join('-')))
|
|
1006
|
-
}
|
|
1007
|
-
}
|
|
1008
|
-
return frontier.sort()
|
|
1009
|
-
}
|
|
1010
|
-
|
|
1011
|
-
var waitTime = 1
|
|
1012
|
-
var last_connect_timer = null
|
|
1013
|
-
|
|
1014
|
-
if (is_external_link) connect()
|
|
1015
|
-
async function connect() {
|
|
1016
|
-
if (freed) return
|
|
1017
|
-
if (last_connect_timer) return
|
|
1018
|
-
// console.log(`connecting to ${url}`)
|
|
1019
|
-
|
|
958
|
+
function start_sync() {
|
|
1020
959
|
var closed = false
|
|
1021
|
-
var
|
|
960
|
+
var ac = new AbortController()
|
|
961
|
+
aborts.add(ac)
|
|
962
|
+
|
|
1022
963
|
self.disconnect = async () => {
|
|
1023
964
|
if (closed) return
|
|
1024
965
|
closed = true
|
|
1025
966
|
reconnect_rate_limiter.on_diss(url)
|
|
1026
967
|
for (var a of aborts) a.abort()
|
|
1027
968
|
aborts.clear()
|
|
1028
|
-
if (braid_text_get_options) await braid_text.forget(url, braid_text_get_options)
|
|
1029
|
-
braid_text_get_options = null
|
|
1030
|
-
}
|
|
1031
|
-
self.reconnect = connect
|
|
1032
|
-
|
|
1033
|
-
await prev_disconnect?.()
|
|
1034
|
-
if (freed || closed) return
|
|
1035
|
-
|
|
1036
|
-
await reconnect_rate_limiter.get_turn(url)
|
|
1037
|
-
if (freed || closed) return
|
|
1038
|
-
|
|
1039
|
-
function retry(e) {
|
|
1040
|
-
if (freed || closed) return
|
|
1041
|
-
var p = self.disconnect()
|
|
1042
|
-
|
|
1043
|
-
var delay = waitTime * (config.retry_delay_ms ?? 1000)
|
|
1044
|
-
console.log(`reconnecting in ${(delay / 1000).toFixed(2)}s: ${url} after error: ${e}`)
|
|
1045
|
-
last_connect_timer = setTimeout(async () => {
|
|
1046
|
-
await p
|
|
1047
|
-
last_connect_timer = null
|
|
1048
|
-
connect()
|
|
1049
|
-
}, delay)
|
|
1050
|
-
waitTime = Math.min(waitTime + 1, 3)
|
|
1051
969
|
}
|
|
1052
970
|
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
var a = new AbortController()
|
|
1061
|
-
aborts.add(a)
|
|
1062
|
-
return await braid_fetch(url, {
|
|
1063
|
-
...params,
|
|
1064
|
-
signal: a.signal,
|
|
1065
|
-
headers: {
|
|
1066
|
-
...params.headers,
|
|
1067
|
-
"Merge-Type": "dt",
|
|
1068
|
-
"Content-Type": 'text/plain',
|
|
1069
|
-
...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
|
|
1070
|
-
},
|
|
1071
|
-
})
|
|
1072
|
-
} catch (e) { retry(e) }
|
|
1073
|
-
}
|
|
1074
|
-
|
|
1075
|
-
async function send_out(stuff) {
|
|
1076
|
-
if (freed || closed) return
|
|
1077
|
-
|
|
1078
|
-
console.log(`sending to ${url}`)
|
|
1079
|
-
console.log(JSON.stringify(stuff, null, 4).slice(0, 1000))
|
|
1080
|
-
|
|
1081
|
-
var r = await my_fetch({ method: "PUT", ...stuff })
|
|
1082
|
-
if (freed || closed) return
|
|
1083
|
-
|
|
1084
|
-
// the server has acknowledged this version,
|
|
1085
|
-
// so add it to the fork point
|
|
1086
|
-
if (r.ok) self.update_fork_point(stuff.version, stuff.parents)
|
|
1087
|
-
|
|
1088
|
-
// if we're not authorized,
|
|
1089
|
-
else if (r.status == 401 || r.status == 403) {
|
|
1090
|
-
// and it's one of our versions (a local edit),
|
|
1091
|
-
if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
|
|
1092
|
-
// then revert it
|
|
1093
|
-
console.log(`access denied: reverting local edits`)
|
|
1094
|
-
unsync_url(url)
|
|
1095
|
-
sync_url(url)
|
|
1096
|
-
}
|
|
1097
|
-
}
|
|
1098
|
-
|
|
1099
|
-
// on other errors, restart the connection
|
|
1100
|
-
else retry(new Error(`unexpected PUT status: ${r.status}`))
|
|
1101
|
-
}
|
|
1102
|
-
|
|
1103
|
-
async function find_fork_point() {
|
|
1104
|
-
if (freed || closed) return
|
|
1105
|
-
// console.log(`[find_fork_point] url: ${url}`)
|
|
1106
|
-
|
|
1107
|
-
// see if remote has the fork point
|
|
1108
|
-
if (self.fork_point) {
|
|
1109
|
-
var r = await my_fetch({
|
|
1110
|
-
method: "HEAD",
|
|
1111
|
-
version: self.fork_point
|
|
1112
|
-
})
|
|
1113
|
-
if (freed || closed) return
|
|
1114
|
-
|
|
1115
|
-
if (!r.ok && r.status !== 309 && r.status !== 500) return retry(new Error(`unexpected HEAD status: ${r.status}`))
|
|
1116
|
-
|
|
1117
|
-
if (r.ok) {
|
|
1118
|
-
// console.log(`[find_fork_point] "${url.split('/').pop()}" has our latest fork point, hooray!`)
|
|
1119
|
-
return
|
|
1120
|
-
}
|
|
1121
|
-
}
|
|
1122
|
-
|
|
1123
|
-
// otherwise let's binary search for new fork point..
|
|
1124
|
-
var bytes = resource.doc.toBytes()
|
|
1125
|
-
var [_, events, __] = braid_text.dt_parse([...bytes])
|
|
1126
|
-
events = events.map(x => x.join('-'))
|
|
1127
|
-
|
|
1128
|
-
var min = -1
|
|
1129
|
-
var max = events.length
|
|
1130
|
-
self.fork_point = []
|
|
1131
|
-
while (min + 1 < max) {
|
|
1132
|
-
var i = Math.floor((min + max)/2)
|
|
1133
|
-
var version = [events[i]]
|
|
1134
|
-
|
|
1135
|
-
// console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
|
|
1136
|
-
|
|
1137
|
-
//var st = Date.now()
|
|
1138
|
-
var r = await my_fetch({ method: "HEAD", version })
|
|
1139
|
-
if (freed || closed) return
|
|
1140
|
-
//console.log(`fetched in ${Date.now() - st}`)
|
|
1141
|
-
|
|
1142
|
-
if (!r.ok && r.status !== 309 && r.status !== 500) return retry(new Error(`unexpected HEAD status: ${r.status}`))
|
|
1143
|
-
|
|
1144
|
-
if (r.ok) {
|
|
1145
|
-
min = i
|
|
1146
|
-
self.fork_point = version
|
|
1147
|
-
} else max = i
|
|
1148
|
-
}
|
|
1149
|
-
// console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
|
|
1150
|
-
self.signal_file_needs_writing(true)
|
|
1151
|
-
}
|
|
1152
|
-
|
|
1153
|
-
await find_fork_point()
|
|
1154
|
-
if (freed || closed) return
|
|
1155
|
-
|
|
1156
|
-
await send_new_stuff()
|
|
1157
|
-
if (freed || closed) return
|
|
1158
|
-
|
|
1159
|
-
var a = new AbortController()
|
|
1160
|
-
aborts.add(a)
|
|
1161
|
-
var res = await braid_fetch(url, {
|
|
1162
|
-
signal: a.signal,
|
|
1163
|
-
headers: {
|
|
1164
|
-
"Merge-Type": "dt",
|
|
1165
|
-
'accept-encoding': 'updates(dt)',
|
|
1166
|
-
Accept: 'text/plain',
|
|
1167
|
-
...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
|
|
1168
|
-
},
|
|
1169
|
-
subscribe: true,
|
|
1170
|
-
heartbeats: 120,
|
|
1171
|
-
parents: self.fork_point,
|
|
1172
|
-
peer: self.peer
|
|
1173
|
-
})
|
|
1174
|
-
if (freed || closed) return
|
|
1175
|
-
|
|
1176
|
-
if (res.status < 200 || res.status >= 300) return retry(new Error(`unexpected status: ${res.status}`))
|
|
1177
|
-
|
|
1178
|
-
if (res.status !== 209)
|
|
1179
|
-
return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
|
|
1180
|
-
|
|
1181
|
-
reconnect_rate_limiter.on_conn(url)
|
|
1182
|
-
|
|
1183
|
-
self.file_read_only = res.headers.get('editable') === 'false'
|
|
1184
|
-
self.signal_file_needs_writing()
|
|
1185
|
-
|
|
1186
|
-
console.log(`connected to ${url}${self.file_read_only ? ' (readonly)' : ''}`)
|
|
1187
|
-
|
|
1188
|
-
initial_connect_done()
|
|
1189
|
-
res.subscribe(async update => {
|
|
1190
|
-
if (freed || closed) return
|
|
1191
|
-
|
|
1192
|
-
if (!update.status) {
|
|
1193
|
-
// console.log(`got initial update about ${url}`)
|
|
1194
|
-
|
|
1195
|
-
await braid_text.put(resource, {
|
|
1196
|
-
body: update.body,
|
|
1197
|
-
transfer_encoding: 'dt'
|
|
1198
|
-
})
|
|
1199
|
-
if (freed || closed) return
|
|
1200
|
-
|
|
1201
|
-
self.update_fork_point(JSON.parse(`[${res.headers.get('current-version')}]`), self.fork_point)
|
|
1202
|
-
self.signal_file_needs_writing()
|
|
1203
|
-
return
|
|
1204
|
-
}
|
|
1205
|
-
|
|
1206
|
-
console.log(`update from ${url}`)
|
|
1207
|
-
|
|
1208
|
-
if (update.body) update.body = update.body_text
|
|
1209
|
-
if (update.patches) for (let p of update.patches) p.content = p.content_text
|
|
1210
|
-
|
|
1211
|
-
// console.log(`update: ${JSON.stringify(update, null, 4)}`)
|
|
1212
|
-
if (update.version.length === 0) return
|
|
1213
|
-
if (update.version.length !== 1) throw 'unexpected'
|
|
1214
|
-
|
|
1215
|
-
await wait_on(braid_text.put(url, { ...update, peer: self.peer, merge_type: 'dt' }))
|
|
1216
|
-
if (freed || closed) return
|
|
1217
|
-
|
|
1218
|
-
// the server is giving us this version,
|
|
1219
|
-
// so it must have it,
|
|
1220
|
-
// so let's add it to our fork point
|
|
1221
|
-
self.update_fork_point(update.version, update.parents)
|
|
1222
|
-
|
|
971
|
+
// Subscribe to local changes to trigger file writes
|
|
972
|
+
braid_text.get(url, {
|
|
973
|
+
signal: ac.signal,
|
|
974
|
+
peer: self.peer,
|
|
975
|
+
merge_type: 'dt',
|
|
976
|
+
subscribe: () => {
|
|
977
|
+
if (freed) return
|
|
1223
978
|
self.signal_file_needs_writing()
|
|
1224
|
-
}
|
|
1225
|
-
|
|
1226
|
-
// send it stuff we have but it doesn't't
|
|
1227
|
-
async function send_new_stuff() {
|
|
1228
|
-
if (freed || closed) return
|
|
1229
|
-
var q = []
|
|
1230
|
-
var in_flight = new Map()
|
|
1231
|
-
var max_in_flight = 10
|
|
1232
|
-
var send_pump_lock = 0
|
|
1233
|
-
|
|
1234
|
-
async function send_pump() {
|
|
1235
|
-
send_pump_lock++
|
|
1236
|
-
if (send_pump_lock > 1) return
|
|
1237
|
-
try {
|
|
1238
|
-
if (freed || closed) return
|
|
1239
|
-
if (in_flight.size >= max_in_flight) return
|
|
1240
|
-
if (!q.length) {
|
|
1241
|
-
var frontier = self.fork_point
|
|
1242
|
-
for (var u of in_flight.values())
|
|
1243
|
-
frontier = extend_frontier(frontier, u.version, u.parents)
|
|
1244
|
-
|
|
1245
|
-
var options = {
|
|
1246
|
-
parents: frontier,
|
|
1247
|
-
merge_type: 'dt',
|
|
1248
|
-
peer: self.peer,
|
|
1249
|
-
subscribe: u => u.version.length && q.push(u)
|
|
1250
|
-
}
|
|
1251
|
-
await braid_text.get(url, options)
|
|
1252
|
-
await braid_text.forget(url, options)
|
|
1253
|
-
}
|
|
1254
|
-
while (q.length && in_flight.size < max_in_flight) {
|
|
1255
|
-
let u = q.shift()
|
|
1256
|
-
in_flight.set(u.version[0], u);
|
|
1257
|
-
(async () => {
|
|
1258
|
-
await initial_connect_promise
|
|
1259
|
-
if (freed || closed) return
|
|
1260
|
-
await send_out({...u, peer: self.peer})
|
|
1261
|
-
if (freed || closed) return
|
|
1262
|
-
in_flight.delete(u.version[0])
|
|
1263
|
-
setTimeout(send_pump, 0)
|
|
1264
|
-
})()
|
|
1265
|
-
}
|
|
1266
|
-
} finally {
|
|
1267
|
-
var retry = send_pump_lock > 1
|
|
1268
|
-
send_pump_lock = 0
|
|
1269
|
-
if (retry) setTimeout(send_pump, 0)
|
|
1270
|
-
}
|
|
1271
|
-
}
|
|
979
|
+
}
|
|
980
|
+
})
|
|
1272
981
|
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
982
|
+
// Use braid_text.sync for bidirectional sync with the remote URL
|
|
983
|
+
if (is_external_link) braid_text.sync(url, new URL(url), {
|
|
984
|
+
signal: ac.signal,
|
|
985
|
+
headers: {
|
|
986
|
+
'Content-Type': 'text/plain',
|
|
987
|
+
...(x => x && { Cookie: x })(config.cookies?.[new URL(url).hostname])
|
|
988
|
+
},
|
|
989
|
+
on_pre_connect: () => reconnect_rate_limiter.get_turn(url),
|
|
990
|
+
on_res: res => {
|
|
991
|
+
if (freed) return
|
|
992
|
+
reconnect_rate_limiter.on_conn(url)
|
|
993
|
+
self.file_read_only = res.headers.get('editable') === 'false'
|
|
994
|
+
console.log(`connected to ${url}${self.file_read_only ? ' (readonly)' : ''}`)
|
|
995
|
+
},
|
|
996
|
+
on_unauthorized: async () => {
|
|
997
|
+
console.log(`access denied: reverting local edits`)
|
|
998
|
+
unsync_url(url)
|
|
999
|
+
//await sync_url.chain
|
|
1000
|
+
sync_url(url)
|
|
1001
|
+
},
|
|
1002
|
+
on_disconnect: () => {
|
|
1003
|
+
reconnect_rate_limiter.on_diss(url)
|
|
1288
1004
|
}
|
|
1289
|
-
}
|
|
1005
|
+
})
|
|
1290
1006
|
}
|
|
1291
1007
|
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
}
|
|
1008
|
+
self.reconnect = () => {
|
|
1009
|
+
for (var a of aborts) a.abort()
|
|
1010
|
+
aborts.clear()
|
|
1011
|
+
start_sync()
|
|
1012
|
+
}
|
|
1297
1013
|
|
|
1014
|
+
start_sync()
|
|
1298
1015
|
return self
|
|
1299
1016
|
}
|
|
1300
1017
|
return await sync_url.cache[url]
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braidfs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.126",
|
|
4
4
|
"description": "braid technology synchronizing files and webpages",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braidfs",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"braid-http": "~1.3.83",
|
|
10
|
-
"braid-text": "~0.2.
|
|
10
|
+
"braid-text": "~0.2.93",
|
|
11
11
|
"braid-blob": "~0.0.30",
|
|
12
12
|
"chokidar": "^5.0.0"
|
|
13
13
|
},
|