braidfs 0.0.130 → 0.0.132
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +75 -18
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -186,6 +186,9 @@ async function main() {
|
|
|
186
186
|
var version = [sync.peer + "-" + (sync.local_edit_counter - 1)]
|
|
187
187
|
await braid_text.put(sync.url, { version, parents, patches, merge_type: 'dt' })
|
|
188
188
|
|
|
189
|
+
// DEBUGGING HACK ID: L04LPFHQ1M -- INVESTIGATING DISCONNECTS
|
|
190
|
+
require('fs').appendFileSync(investigating_disconnects_log, `${Date.now()}:${sync.url} -- plugin edited (${sync.investigating_disconnects_thinks_connected})\n`)
|
|
191
|
+
|
|
189
192
|
// may be able to do this more efficiently.. we want to make sure we're capturing a file write that is after our version was written.. there may be a way we can avoid calling file_needs_writing here
|
|
190
193
|
await new Promise(done => {
|
|
191
194
|
sync.file_written_cbs.push(done)
|
|
@@ -426,6 +429,8 @@ async function sync_url(url) {
|
|
|
426
429
|
if (!sync_url.cache) sync_url.cache = {}
|
|
427
430
|
if (!sync_url.chain) sync_url.chain = Promise.resolve()
|
|
428
431
|
if (!sync_url.cache[path]) {
|
|
432
|
+
// console.log(`sync_url: ${url}`)
|
|
433
|
+
|
|
429
434
|
var self = {url},
|
|
430
435
|
freed = false,
|
|
431
436
|
aborts = new Set()
|
|
@@ -458,16 +463,62 @@ async function sync_url(url) {
|
|
|
458
463
|
await require('fs').promises.unlink(fp)
|
|
459
464
|
} catch (e) {}
|
|
460
465
|
}
|
|
461
|
-
|
|
466
|
+
|
|
467
|
+
sync_url.cache[path] = (async () => {
|
|
468
|
+
self.merge_type = await detect_merge_type()
|
|
469
|
+
if (self.merge_type === 'dt') {
|
|
470
|
+
return await (sync_url.chain = sync_url.chain.then(init))
|
|
471
|
+
} else if (self.merge_type === 'aww') {
|
|
472
|
+
return await (sync_url.chain = sync_url.chain.then(init_binary_sync))
|
|
473
|
+
} else throw new Error(`unknown merge-type: ${self.merge_type}`)
|
|
474
|
+
})()
|
|
475
|
+
}
|
|
476
|
+
return
|
|
477
|
+
|
|
478
|
+
async function detect_merge_type() {
|
|
479
|
+
// special case for .braidfs/config and .braidfs/error
|
|
480
|
+
if (url.startsWith('.braidfs/')) return 'dt'
|
|
481
|
+
|
|
482
|
+
try {
|
|
483
|
+
var meta = JSON.parse(await require('fs').promises.readFile(meta_path, 'utf8'))
|
|
484
|
+
|
|
485
|
+
// optimization for old braidfs versions
|
|
486
|
+
if (typeof meta.local_edit_counter === 'number') return 'dt'
|
|
487
|
+
|
|
488
|
+
if (meta.merge_type) return meta.merge_type
|
|
489
|
+
} catch (e) {}
|
|
490
|
+
if (freed) return
|
|
491
|
+
|
|
492
|
+
var res = await braid_fetch(url, {
|
|
493
|
+
method: 'HEAD',
|
|
494
|
+
retry: () => true,
|
|
495
|
+
// braid_fetch will await this function on each reconnect when retrying
|
|
496
|
+
parents: async () => reconnect_rate_limiter.get_turn(url),
|
|
497
|
+
// version needed to force Merge-Type return header
|
|
498
|
+
version: [],
|
|
499
|
+
headers: {
|
|
500
|
+
// needed for braid.org routing
|
|
501
|
+
Accept: 'text/plain',
|
|
502
|
+
// in case it supports dt, so it doesn't give us "simpleton"
|
|
503
|
+
'Merge-Type': 'dt',
|
|
504
|
+
}
|
|
505
|
+
})
|
|
506
|
+
|
|
507
|
+
var merge_type = res.headers.get('merge-type')
|
|
508
|
+
if (merge_type) return merge_type
|
|
509
|
+
|
|
510
|
+
throw `failed to get merge type for ${url}`
|
|
462
511
|
}
|
|
463
512
|
|
|
464
513
|
async function init_binary_sync() {
|
|
514
|
+
await ensure_path_stuff()
|
|
465
515
|
if (freed) return
|
|
466
516
|
|
|
467
517
|
console.log(`init_binary_sync: ${url}`)
|
|
468
518
|
|
|
469
519
|
async function save_meta() {
|
|
470
520
|
await require('fs').promises.writeFile(meta_path, JSON.stringify({
|
|
521
|
+
merge_type: self.merge_type,
|
|
471
522
|
peer: self.peer,
|
|
472
523
|
version: self.version,
|
|
473
524
|
file_mtimeNs_str: self.file_mtimeNs_str
|
|
@@ -697,12 +748,10 @@ async function sync_url(url) {
|
|
|
697
748
|
|
|
698
749
|
return self
|
|
699
750
|
}
|
|
700
|
-
|
|
701
|
-
async function init() {
|
|
702
|
-
if (freed) return
|
|
703
|
-
|
|
704
|
-
// console.log(`sync_url: ${url}`)
|
|
705
751
|
|
|
752
|
+
async function ensure_path_stuff() {
|
|
753
|
+
if (freed) return
|
|
754
|
+
|
|
706
755
|
// if we're accessing /blah/index, it will be normalized to /blah,
|
|
707
756
|
// but we still want to create a directory out of blah in this case
|
|
708
757
|
if (wasnt_normal && !(await is_dir(fullpath))) {
|
|
@@ -713,15 +762,11 @@ async function sync_url(url) {
|
|
|
713
762
|
|
|
714
763
|
await ensure_path(require("path").dirname(fullpath))
|
|
715
764
|
if (freed) return
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
// It notably does NOT handle `.braidfs/set_version/` and `.braidfs/get_version/` correctly!
|
|
722
|
-
// Search ` sync_url.cache[` to see how it's all handled.
|
|
723
|
-
return await init_binary_sync()
|
|
724
|
-
}
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
async function init() {
|
|
768
|
+
await ensure_path_stuff()
|
|
769
|
+
if (freed) return
|
|
725
770
|
|
|
726
771
|
self.peer = Math.random().toString(36).slice(2)
|
|
727
772
|
self.local_edit_counter = 0
|
|
@@ -738,6 +783,9 @@ async function sync_url(url) {
|
|
|
738
783
|
// hack: remvoe in future
|
|
739
784
|
var old_meta_fork_point = null
|
|
740
785
|
|
|
786
|
+
// DEBUGGING HACK ID: L04LPFHQ1M -- INVESTIGATING DISCONNECTS
|
|
787
|
+
self.investigating_disconnects_thinks_connected = false
|
|
788
|
+
|
|
741
789
|
// store a recent mapping of content-hashes to their versions,
|
|
742
790
|
// to support the command line: braidfs editing filename < file
|
|
743
791
|
self.hash_to_version_cache = new Map()
|
|
@@ -799,8 +847,12 @@ async function sync_url(url) {
|
|
|
799
847
|
!({
|
|
800
848
|
version: file_last_version,
|
|
801
849
|
digest: file_last_digest,
|
|
802
|
-
|
|
803
|
-
|
|
850
|
+
|
|
851
|
+
// DEBUGGING HACK ID: L04LPFHQ1M -- INVESTIGATING DISCONNECTS
|
|
852
|
+
// create new peer to eliminate this as a potential issue for now:
|
|
853
|
+
// peer: self.peer,
|
|
854
|
+
// local_edit_counter: self.local_edit_counter,
|
|
855
|
+
|
|
804
856
|
fork_point: old_meta_fork_point
|
|
805
857
|
} = Array.isArray(meta) ? { version: meta } : meta)
|
|
806
858
|
|
|
@@ -858,6 +910,7 @@ async function sync_url(url) {
|
|
|
858
910
|
async function write_meta_file() {
|
|
859
911
|
if (freed) return
|
|
860
912
|
await wait_on(require('fs').promises.writeFile(meta_path, JSON.stringify({
|
|
913
|
+
merge_type: self.merge_type,
|
|
861
914
|
version: file_last_version,
|
|
862
915
|
digest: sha256(self.file_last_text),
|
|
863
916
|
peer: self.peer,
|
|
@@ -912,6 +965,9 @@ async function sync_url(url) {
|
|
|
912
965
|
await wait_on(braid_text.put(url, { version, parents, patches, merge_type: 'dt' }))
|
|
913
966
|
if (freed) return
|
|
914
967
|
|
|
968
|
+
// DEBUGGING HACK ID: L04LPFHQ1M -- INVESTIGATING DISCONNECTS
|
|
969
|
+
require('fs').appendFileSync(investigating_disconnects_log, `${Date.now()}:${url} -- file edited (${self.investigating_disconnects_thinks_connected})\n`)
|
|
970
|
+
|
|
915
971
|
await write_meta_file()
|
|
916
972
|
if (freed) return
|
|
917
973
|
} else {
|
|
@@ -1037,6 +1093,7 @@ async function sync_url(url) {
|
|
|
1037
1093
|
on_res: res => {
|
|
1038
1094
|
// DEBUGGING HACK ID: L04LPFHQ1M -- INVESTIGATING DISCONNECTS
|
|
1039
1095
|
do_investigating_disconnects_log(url, `sync.on_res status:${res?.status}`)
|
|
1096
|
+
self.investigating_disconnects_thinks_connected = res?.status
|
|
1040
1097
|
|
|
1041
1098
|
if (freed) return
|
|
1042
1099
|
reconnect_rate_limiter.on_conn(url)
|
|
@@ -1055,6 +1112,7 @@ async function sync_url(url) {
|
|
|
1055
1112
|
on_disconnect: () => {
|
|
1056
1113
|
// DEBUGGING HACK ID: L04LPFHQ1M -- INVESTIGATING DISCONNECTS
|
|
1057
1114
|
do_investigating_disconnects_log(url, `sync.on_disconnect`)
|
|
1115
|
+
self.investigating_disconnects_thinks_connected = false
|
|
1058
1116
|
|
|
1059
1117
|
return reconnect_rate_limiter.on_diss(url)
|
|
1060
1118
|
}
|
|
@@ -1073,7 +1131,6 @@ async function sync_url(url) {
|
|
|
1073
1131
|
start_sync()
|
|
1074
1132
|
return self
|
|
1075
1133
|
}
|
|
1076
|
-
return await sync_url.cache[url]
|
|
1077
1134
|
}
|
|
1078
1135
|
|
|
1079
1136
|
async function ensure_path(path) {
|