braidfs 0.0.116 → 0.0.118
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +38 -30
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -114,7 +114,7 @@ async function main() {
|
|
|
114
114
|
process.on("uncaughtException", (x) => console.log(`uncaughtException: ${x.stack}`))
|
|
115
115
|
require('http').createServer(async (req, res) => {
|
|
116
116
|
try {
|
|
117
|
-
console.log(`${req.method} ${req.url}`)
|
|
117
|
+
// console.log(`${req.method} ${req.url}`)
|
|
118
118
|
|
|
119
119
|
if (req.url === '/favicon.ico') return
|
|
120
120
|
|
|
@@ -159,6 +159,8 @@ async function main() {
|
|
|
159
159
|
var patches = diff(parent_text, text)
|
|
160
160
|
|
|
161
161
|
if (patches.length) {
|
|
162
|
+
console.log(`plugin edited ${path}`)
|
|
163
|
+
|
|
162
164
|
sync.local_edit_counter += patches_to_code_points(patches, parent_text)
|
|
163
165
|
var version = [sync.peer + "-" + (sync.local_edit_counter - 1)]
|
|
164
166
|
await braid_text.put(sync.url, { version, parents, patches, merge_type: 'dt' })
|
|
@@ -231,7 +233,7 @@ async function main() {
|
|
|
231
233
|
})
|
|
232
234
|
sync_url('.braidfs/errors')
|
|
233
235
|
|
|
234
|
-
console.log({ sync: config.sync })
|
|
236
|
+
// console.log({ sync: config.sync })
|
|
235
237
|
for (let x of Object.entries(config.sync)) if (x[1]) sync_url(x[0])
|
|
236
238
|
|
|
237
239
|
watch_files()
|
|
@@ -292,7 +294,7 @@ async function watch_files() {
|
|
|
292
294
|
watch_files.watcher = 42
|
|
293
295
|
await w?.close()
|
|
294
296
|
|
|
295
|
-
console.log('watch files..')
|
|
297
|
+
// console.log('watch files..')
|
|
296
298
|
watch_files.watcher = require('chokidar').watch(sync_base, {
|
|
297
299
|
useFsEvents: true,
|
|
298
300
|
usePolling: false,
|
|
@@ -330,16 +332,19 @@ async function scan_files() {
|
|
|
330
332
|
scan_files.running = true
|
|
331
333
|
while (scan_files.do_again) {
|
|
332
334
|
scan_files.do_again = false
|
|
333
|
-
var timestamp = new Date().toLocaleTimeString(
|
|
334
|
-
'en-US', {minute: '2-digit', second: '2-digit', hour: '2-digit'}
|
|
335
|
-
)
|
|
336
|
-
console.log(`scan files.. `, timestamp)
|
|
337
335
|
|
|
338
336
|
if (watch_files?.watcher?.options?.usePolling)
|
|
339
337
|
console.log('Warning: BAD PERFORMANCE!! Filesystem using polling!')
|
|
340
338
|
|
|
339
|
+
var st = Date.now()
|
|
340
|
+
|
|
341
341
|
if (await f(sync_base))
|
|
342
342
|
on_watcher_miss(`scanner picked up a change that the watcher should have gotten`, false)
|
|
343
|
+
|
|
344
|
+
var timestamp = new Date().toLocaleTimeString(
|
|
345
|
+
'en-US', {minute: '2-digit', second: '2-digit', hour: '2-digit'}
|
|
346
|
+
)
|
|
347
|
+
console.log(`scan files.. ${timestamp}. ${Date.now() - st}ms`)
|
|
343
348
|
}
|
|
344
349
|
scan_files.running = false
|
|
345
350
|
|
|
@@ -438,7 +443,7 @@ async function sync_url(url) {
|
|
|
438
443
|
async function init() {
|
|
439
444
|
if (freed) return
|
|
440
445
|
|
|
441
|
-
console.log(`sync_url: ${url}`)
|
|
446
|
+
// console.log(`sync_url: ${url}`)
|
|
442
447
|
|
|
443
448
|
var resource = await braid_text.get_resource(url)
|
|
444
449
|
if (freed) return
|
|
@@ -590,7 +595,7 @@ async function sync_url(url) {
|
|
|
590
595
|
}
|
|
591
596
|
|
|
592
597
|
if (file_needs_reading) {
|
|
593
|
-
console.log(`reading file: ${fullpath}`)
|
|
598
|
+
// console.log(`reading file: ${fullpath}`)
|
|
594
599
|
|
|
595
600
|
file_needs_reading = false
|
|
596
601
|
|
|
@@ -598,7 +603,7 @@ async function sync_url(url) {
|
|
|
598
603
|
if (!(await wait_on(file_exists(fullpath)))) {
|
|
599
604
|
if (freed) return
|
|
600
605
|
|
|
601
|
-
console.log(`file not found, creating: ${fullpath}`)
|
|
606
|
+
// console.log(`file not found, creating: ${fullpath}`)
|
|
602
607
|
|
|
603
608
|
file_needs_writing = true
|
|
604
609
|
file_last_version = []
|
|
@@ -620,7 +625,7 @@ async function sync_url(url) {
|
|
|
620
625
|
|
|
621
626
|
var patches = diff(self.file_last_text, text)
|
|
622
627
|
if (patches.length) {
|
|
623
|
-
console.log(`
|
|
628
|
+
console.log(`file change in ${path}`)
|
|
624
629
|
|
|
625
630
|
// convert from js-indicies to code-points
|
|
626
631
|
self.local_edit_counter += patches_to_code_points(patches, self.file_last_text)
|
|
@@ -641,12 +646,12 @@ async function sync_url(url) {
|
|
|
641
646
|
} else {
|
|
642
647
|
add_to_version_cache(text, file_last_version)
|
|
643
648
|
|
|
644
|
-
console.log(`no changes found in: ${fullpath}`)
|
|
649
|
+
// console.log(`no changes found in: ${fullpath}`)
|
|
645
650
|
if (stat_eq(stat, self.file_last_stat)) {
|
|
646
651
|
if (Date.now() > (self.file_ignore_until ?? 0))
|
|
647
652
|
on_watcher_miss(`expected change to: ${fullpath}`)
|
|
648
|
-
else console.log(`no changes expected`)
|
|
649
|
-
} else console.log('found change in file stat')
|
|
653
|
+
// else console.log(`no changes expected`)
|
|
654
|
+
} // else console.log('found change in file stat')
|
|
650
655
|
}
|
|
651
656
|
self.file_last_stat = stat
|
|
652
657
|
self.file_ignore_until = Date.now() + 1000
|
|
@@ -671,7 +676,7 @@ async function sync_url(url) {
|
|
|
671
676
|
continue
|
|
672
677
|
}
|
|
673
678
|
|
|
674
|
-
console.log(`writing file ${fullpath}`)
|
|
679
|
+
// console.log(`writing file ${fullpath}`)
|
|
675
680
|
|
|
676
681
|
add_to_version_cache(body, version)
|
|
677
682
|
|
|
@@ -754,7 +759,7 @@ async function sync_url(url) {
|
|
|
754
759
|
async function connect() {
|
|
755
760
|
if (freed) return
|
|
756
761
|
if (last_connect_timer) return
|
|
757
|
-
console.log(`connecting to ${url}`)
|
|
762
|
+
// console.log(`connecting to ${url}`)
|
|
758
763
|
|
|
759
764
|
var closed = false
|
|
760
765
|
var prev_disconnect = self.disconnect
|
|
@@ -813,7 +818,8 @@ async function sync_url(url) {
|
|
|
813
818
|
async function send_out(stuff) {
|
|
814
819
|
if (freed || closed) return
|
|
815
820
|
|
|
816
|
-
console.log(`
|
|
821
|
+
console.log(`sending to ${url}`)
|
|
822
|
+
// console.log(JSON.stringify(stuff, null, 4).slice(0, 1000))
|
|
817
823
|
|
|
818
824
|
var r = await my_fetch({ method: "PUT", ...stuff })
|
|
819
825
|
if (freed || closed) return
|
|
@@ -839,7 +845,7 @@ async function sync_url(url) {
|
|
|
839
845
|
|
|
840
846
|
async function find_fork_point() {
|
|
841
847
|
if (freed || closed) return
|
|
842
|
-
console.log(`[find_fork_point] url: ${url}`)
|
|
848
|
+
// console.log(`[find_fork_point] url: ${url}`)
|
|
843
849
|
|
|
844
850
|
// see if remote has the fork point
|
|
845
851
|
if (self.fork_point) {
|
|
@@ -851,7 +857,10 @@ async function sync_url(url) {
|
|
|
851
857
|
|
|
852
858
|
if (!r.ok && r.status !== 309 && r.status !== 500) return retry(new Error(`unexpected HEAD status: ${r.status}`))
|
|
853
859
|
|
|
854
|
-
if (r.ok)
|
|
860
|
+
if (r.ok) {
|
|
861
|
+
// console.log(`[find_fork_point] "${url.split('/').pop()}" has our latest fork point, hooray!`)
|
|
862
|
+
return
|
|
863
|
+
}
|
|
855
864
|
}
|
|
856
865
|
|
|
857
866
|
// otherwise let's binary search for new fork point..
|
|
@@ -866,12 +875,12 @@ async function sync_url(url) {
|
|
|
866
875
|
var i = Math.floor((min + max)/2)
|
|
867
876
|
var version = [events[i]]
|
|
868
877
|
|
|
869
|
-
console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
|
|
878
|
+
// console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
|
|
870
879
|
|
|
871
|
-
var st = Date.now()
|
|
880
|
+
//var st = Date.now()
|
|
872
881
|
var r = await my_fetch({ method: "HEAD", version })
|
|
873
882
|
if (freed || closed) return
|
|
874
|
-
console.log(`fetched in ${Date.now() - st}`)
|
|
883
|
+
//console.log(`fetched in ${Date.now() - st}`)
|
|
875
884
|
|
|
876
885
|
if (!r.ok && r.status !== 309 && r.status !== 500) return retry(new Error(`unexpected HEAD status: ${r.status}`))
|
|
877
886
|
|
|
@@ -880,7 +889,7 @@ async function sync_url(url) {
|
|
|
880
889
|
self.fork_point = version
|
|
881
890
|
} else max = i
|
|
882
891
|
}
|
|
883
|
-
console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
|
|
892
|
+
// console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
|
|
884
893
|
self.signal_file_needs_writing(true)
|
|
885
894
|
}
|
|
886
895
|
|
|
@@ -912,8 +921,7 @@ async function sync_url(url) {
|
|
|
912
921
|
if (res.status !== 209)
|
|
913
922
|
return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
|
|
914
923
|
|
|
915
|
-
console.log(`connected to ${url}
|
|
916
|
-
+ `(editable: ${res.headers.get('editable')})`)
|
|
924
|
+
console.log(`connected to ${url}${res.headers.get('editable') !== 'true' ? ' (readonly)' : ''}`)
|
|
917
925
|
|
|
918
926
|
reconnect_rate_limiter.on_conn(url)
|
|
919
927
|
|
|
@@ -925,7 +933,7 @@ async function sync_url(url) {
|
|
|
925
933
|
if (freed || closed) return
|
|
926
934
|
|
|
927
935
|
if (!update.status) {
|
|
928
|
-
console.log(`got initial update about ${url}`)
|
|
936
|
+
// console.log(`got initial update about ${url}`)
|
|
929
937
|
|
|
930
938
|
// manually apply the dt bytes..
|
|
931
939
|
// ..code bits taken from braid-text put..
|
|
@@ -952,7 +960,7 @@ async function sync_url(url) {
|
|
|
952
960
|
return
|
|
953
961
|
}
|
|
954
962
|
|
|
955
|
-
console.log(`
|
|
963
|
+
console.log(`update from ${url}`)
|
|
956
964
|
|
|
957
965
|
if (update.body) update.body = update.body_text
|
|
958
966
|
if (update.patches) for (let p of update.patches) p.content = p.content_text
|
|
@@ -1109,7 +1117,7 @@ function ReconnectRateLimiter(get_wait_time) {
|
|
|
1109
1117
|
// If host has connections, give turn immediately
|
|
1110
1118
|
if (self.conns.has(host)) return
|
|
1111
1119
|
|
|
1112
|
-
console.log(`throttling reconn to ${url} (no conns yet to ${self.conns.size ? host : 'anything'})`)
|
|
1120
|
+
// console.log(`throttling reconn to ${url} (no conns yet to ${self.conns.size ? host : 'anything'})`)
|
|
1113
1121
|
|
|
1114
1122
|
if (!self.host_to_q.has(host)) {
|
|
1115
1123
|
var turns = []
|
|
@@ -1234,7 +1242,7 @@ async function fetch_http2(url, options = {}) {
|
|
|
1234
1242
|
})
|
|
1235
1243
|
} catch (err) {
|
|
1236
1244
|
if (err.code?.includes("HTTP2") || err.message?.includes("HTTP/2")) {
|
|
1237
|
-
console.log("HTTP/2 failed, falling back to HTTP/1.1:", err.message)
|
|
1245
|
+
// console.log("HTTP/2 failed, falling back to HTTP/1.1:", err.message)
|
|
1238
1246
|
return fetch(url, options)
|
|
1239
1247
|
}
|
|
1240
1248
|
throw err
|
|
@@ -1348,7 +1356,7 @@ async function is_read_only(fullpath) {
|
|
|
1348
1356
|
}
|
|
1349
1357
|
|
|
1350
1358
|
async function set_read_only(fullpath, read_only) {
|
|
1351
|
-
console.log(`set_read_only(${fullpath}, ${read_only})`)
|
|
1359
|
+
// console.log(`set_read_only(${fullpath}, ${read_only})`)
|
|
1352
1360
|
|
|
1353
1361
|
if (require('os').platform() === "win32") {
|
|
1354
1362
|
await new Promise((resolve, reject) => {
|