braidfs 0.0.80 → 0.0.82
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +83 -57
- package/package.json +2 -2
package/index.js
CHANGED
|
@@ -149,10 +149,9 @@ async function main() {
|
|
|
149
149
|
var patches = diff(parent_text, text)
|
|
150
150
|
|
|
151
151
|
if (patches.length) {
|
|
152
|
-
|
|
153
|
-
var
|
|
154
|
-
|
|
155
|
-
await braid_text.put(proxy.url, { version, parents, patches, peer, merge_type: 'dt' })
|
|
152
|
+
proxy.local_edit_counter += patches_to_code_points(patches, parent_text)
|
|
153
|
+
var version = [proxy.peer + "-" + (proxy.local_edit_counter - 1)]
|
|
154
|
+
await braid_text.put(proxy.url, { version, parents, patches, merge_type: 'dt' })
|
|
156
155
|
|
|
157
156
|
// may be able to do this more efficiently.. we want to make sure we're capturing a file write that is after our version was written.. there may be a way we can avoid calling file_needs_writing here
|
|
158
157
|
var stat = await new Promise(done => {
|
|
@@ -181,7 +180,6 @@ async function main() {
|
|
|
181
180
|
if (!config.allow_remote_access) console.log('!! only accessible from localhost !!')
|
|
182
181
|
|
|
183
182
|
proxy_url('.braidfs/config').then(() => {
|
|
184
|
-
let peer = Math.random().toString(36).slice(2)
|
|
185
183
|
braid_text.get('.braidfs/config', {
|
|
186
184
|
subscribe: async update => {
|
|
187
185
|
let prev = config
|
|
@@ -218,8 +216,7 @@ async function main() {
|
|
|
218
216
|
if (x !== '') console.log(`warning: config file is currently invalid.`)
|
|
219
217
|
return
|
|
220
218
|
}
|
|
221
|
-
}
|
|
222
|
-
peer
|
|
219
|
+
}
|
|
223
220
|
})
|
|
224
221
|
})
|
|
225
222
|
proxy_url('.braidfs/errors')
|
|
@@ -427,9 +424,9 @@ async function proxy_url(url) {
|
|
|
427
424
|
return p
|
|
428
425
|
}
|
|
429
426
|
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
427
|
+
self.peer = Math.random().toString(36).slice(2)
|
|
428
|
+
self.local_edit_counter = 0
|
|
429
|
+
var file_last_version = null,
|
|
433
430
|
file_last_digest = null
|
|
434
431
|
self.file_last_text = null
|
|
435
432
|
self.file_last_stat = null
|
|
@@ -485,7 +482,7 @@ async function proxy_url(url) {
|
|
|
485
482
|
|
|
486
483
|
let a = new AbortController()
|
|
487
484
|
aborts.add(a)
|
|
488
|
-
await braid_fetch(url, {
|
|
485
|
+
var r = await braid_fetch(url, {
|
|
489
486
|
signal: a.signal,
|
|
490
487
|
headers: {
|
|
491
488
|
"Merge-Type": "dt",
|
|
@@ -497,6 +494,16 @@ async function proxy_url(url) {
|
|
|
497
494
|
...stuff
|
|
498
495
|
})
|
|
499
496
|
aborts.delete(a)
|
|
497
|
+
// if we're not authorized,
|
|
498
|
+
if (r.status == 401 || r.status == 403) {
|
|
499
|
+
// and it's one of our versions (a local edit),
|
|
500
|
+
if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
|
|
501
|
+
// then revert it
|
|
502
|
+
console.log(`access denied: reverting local edits`)
|
|
503
|
+
unproxy_url(url)
|
|
504
|
+
proxy_url(url)
|
|
505
|
+
}
|
|
506
|
+
}
|
|
500
507
|
} catch (e) {
|
|
501
508
|
if (e?.name !== "AbortError") console.log(e)
|
|
502
509
|
}
|
|
@@ -504,6 +511,53 @@ async function proxy_url(url) {
|
|
|
504
511
|
finish_something()
|
|
505
512
|
}
|
|
506
513
|
|
|
514
|
+
if (!start_something()) return
|
|
515
|
+
await within_file_lock(fullpath, async () => {
|
|
516
|
+
var fullpath = await get_fullpath()
|
|
517
|
+
if (await require('fs').promises.access(meta_path).then(
|
|
518
|
+
() => 1, () => 0)) {
|
|
519
|
+
// meta file exists
|
|
520
|
+
let meta = JSON.parse(await require('fs').promises.readFile(meta_path, { encoding: 'utf8' }))
|
|
521
|
+
// destructure stuff from the meta file
|
|
522
|
+
!({
|
|
523
|
+
version: file_last_version,
|
|
524
|
+
digest: file_last_digest,
|
|
525
|
+
peer: self.peer,
|
|
526
|
+
local_edit_counter: self.local_edit_counter
|
|
527
|
+
} = Array.isArray(meta) ? { version: meta } : meta)
|
|
528
|
+
|
|
529
|
+
if (!self.peer) self.peer = Math.random().toString(36).slice(2)
|
|
530
|
+
if (!self.local_edit_counter) self.local_edit_counter = 0
|
|
531
|
+
|
|
532
|
+
try {
|
|
533
|
+
self.file_last_text = (await braid_text.get(url, { version: file_last_version })).body
|
|
534
|
+
} catch (e) {
|
|
535
|
+
// the version from the meta file doesn't exist..
|
|
536
|
+
if (fullpath === braidfs_config_file) {
|
|
537
|
+
// in the case of the config file,
|
|
538
|
+
// we want to load the current file contents,
|
|
539
|
+
// which we can acheive by setting file_last_version
|
|
540
|
+
// to the latest
|
|
541
|
+
console.log(`WARNING: there was an issue with the config file, and it is reverting to the contents at: ${braidfs_config_file}`)
|
|
542
|
+
var x = await braid_text.get(url, {})
|
|
543
|
+
file_last_version = x.version
|
|
544
|
+
self.file_last_text = x.body
|
|
545
|
+
file_last_digest = sha256(self.file_last_text)
|
|
546
|
+
} else throw new Error(`sync error: version not found: ${file_last_version}`)
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
|
|
550
|
+
|
|
551
|
+
// sanity check
|
|
552
|
+
if (file_last_digest && sha256(self.file_last_text) != file_last_digest) throw new Error('file_last_text does not match file_last_digest')
|
|
553
|
+
} else if (await require('fs').promises.access(fullpath).then(() => 1, () => 0)) {
|
|
554
|
+
// file exists, but not meta file
|
|
555
|
+
file_last_version = []
|
|
556
|
+
self.file_last_text = ''
|
|
557
|
+
}
|
|
558
|
+
})
|
|
559
|
+
finish_something()
|
|
560
|
+
|
|
507
561
|
file_loop_pump()
|
|
508
562
|
async function file_loop_pump() {
|
|
509
563
|
if (file_loop_pump_lock) return
|
|
@@ -514,41 +568,6 @@ async function proxy_url(url) {
|
|
|
514
568
|
await within_file_lock(fullpath, async () => {
|
|
515
569
|
var fullpath = await get_fullpath()
|
|
516
570
|
|
|
517
|
-
if (file_last_version === null) {
|
|
518
|
-
if (await require('fs').promises.access(meta_path).then(
|
|
519
|
-
() => 1, () => 0)) {
|
|
520
|
-
// meta file exists
|
|
521
|
-
let meta = JSON.parse(await require('fs').promises.readFile(meta_path, { encoding: 'utf8' }))
|
|
522
|
-
void ({ version: file_last_version, digest: file_last_digest } = Array.isArray(meta) ? { version: meta } : meta)
|
|
523
|
-
|
|
524
|
-
try {
|
|
525
|
-
self.file_last_text = (await braid_text.get(url, { version: file_last_version })).body
|
|
526
|
-
} catch (e) {
|
|
527
|
-
// the version from the meta file doesn't exist..
|
|
528
|
-
if (fullpath === braidfs_config_file) {
|
|
529
|
-
// in the case of the config file,
|
|
530
|
-
// we want to load the current file contents,
|
|
531
|
-
// which we can acheive by setting file_last_version
|
|
532
|
-
// to the latest
|
|
533
|
-
console.log(`WARNING: there was an issue with the config file, and it is reverting to the contents at: ${braidfs_config_file}`)
|
|
534
|
-
var x = await braid_text.get(url, {})
|
|
535
|
-
file_last_version = x.version
|
|
536
|
-
self.file_last_text = x.body
|
|
537
|
-
file_last_digest = sha256(self.file_last_text)
|
|
538
|
-
} else throw new Error(`sync error: version not found: ${file_last_version}`)
|
|
539
|
-
}
|
|
540
|
-
|
|
541
|
-
file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
|
|
542
|
-
|
|
543
|
-
// sanity check
|
|
544
|
-
if (file_last_digest && sha256(self.file_last_text) != file_last_digest) throw new Error('file_last_text does not match file_last_digest')
|
|
545
|
-
} else if (await require('fs').promises.access(fullpath).then(() => 1, () => 0)) {
|
|
546
|
-
// file exists, but not meta file
|
|
547
|
-
file_last_version = []
|
|
548
|
-
self.file_last_text = ''
|
|
549
|
-
}
|
|
550
|
-
}
|
|
551
|
-
|
|
552
571
|
while (file_needs_reading || file_needs_writing) {
|
|
553
572
|
if (file_needs_reading) {
|
|
554
573
|
console.log(`reading file: ${fullpath}`)
|
|
@@ -578,21 +597,26 @@ async function proxy_url(url) {
|
|
|
578
597
|
console.log(`found changes in: ${fullpath}`)
|
|
579
598
|
|
|
580
599
|
// convert from js-indicies to code-points
|
|
581
|
-
|
|
600
|
+
self.local_edit_counter += patches_to_code_points(patches, self.file_last_text)
|
|
582
601
|
|
|
583
602
|
self.file_last_text = text
|
|
584
603
|
|
|
585
|
-
var version = [peer + "-" +
|
|
604
|
+
var version = [self.peer + "-" + (self.local_edit_counter - 1)]
|
|
586
605
|
var parents = file_last_version
|
|
587
606
|
file_last_version = version
|
|
588
607
|
|
|
589
608
|
add_to_version_cache(text, version)
|
|
590
609
|
|
|
591
|
-
send_out({ version, parents, patches, peer })
|
|
610
|
+
send_out({ version, parents, patches, peer: self.peer })
|
|
592
611
|
|
|
593
|
-
await braid_text.put(url, { version, parents, patches, peer, merge_type: 'dt' })
|
|
612
|
+
await braid_text.put(url, { version, parents, patches, peer: self.peer, merge_type: 'dt' })
|
|
594
613
|
|
|
595
|
-
await require('fs').promises.writeFile(meta_path, JSON.stringify({
|
|
614
|
+
await require('fs').promises.writeFile(meta_path, JSON.stringify({
|
|
615
|
+
version: file_last_version,
|
|
616
|
+
digest: sha256(self.file_last_text),
|
|
617
|
+
peer: self.peer,
|
|
618
|
+
local_edit_counter: self.local_edit_counter
|
|
619
|
+
}))
|
|
596
620
|
} else {
|
|
597
621
|
add_to_version_cache(text, file_last_version)
|
|
598
622
|
|
|
@@ -634,7 +658,9 @@ async function proxy_url(url) {
|
|
|
634
658
|
|
|
635
659
|
await require('fs').promises.writeFile(meta_path, JSON.stringify({
|
|
636
660
|
version: file_last_version,
|
|
637
|
-
digest: sha256(self.file_last_text)
|
|
661
|
+
digest: sha256(self.file_last_text),
|
|
662
|
+
peer: self.peer,
|
|
663
|
+
local_edit_counter: self.local_edit_counter
|
|
638
664
|
}))
|
|
639
665
|
}
|
|
640
666
|
|
|
@@ -698,7 +724,7 @@ async function proxy_url(url) {
|
|
|
698
724
|
let cur = await braid_text.get(url, {})
|
|
699
725
|
if (cur.version.length) return cur.version
|
|
700
726
|
},
|
|
701
|
-
peer
|
|
727
|
+
peer: self.peer
|
|
702
728
|
}).then(x => {
|
|
703
729
|
if (x.status === 209) x.subscribe(async update => {
|
|
704
730
|
console.log(`got external update about ${url}`)
|
|
@@ -712,7 +738,7 @@ async function proxy_url(url) {
|
|
|
712
738
|
|
|
713
739
|
if (!start_something()) return
|
|
714
740
|
|
|
715
|
-
await braid_text.put(url, { ...update, peer, merge_type: 'dt' })
|
|
741
|
+
await braid_text.put(url, { ...update, peer: self.peer, merge_type: 'dt' })
|
|
716
742
|
|
|
717
743
|
|
|
718
744
|
self.signal_file_needs_writing()
|
|
@@ -804,11 +830,11 @@ async function proxy_url(url) {
|
|
|
804
830
|
braid_text.get(url, braid_text_get_options = {
|
|
805
831
|
parents,
|
|
806
832
|
merge_type: 'dt',
|
|
807
|
-
peer,
|
|
833
|
+
peer: self.peer,
|
|
808
834
|
subscribe: async (u) => {
|
|
809
835
|
if (u.version.length) {
|
|
810
836
|
self.signal_file_needs_writing()
|
|
811
|
-
in_parallel(() => send_out({...u, peer}))
|
|
837
|
+
in_parallel(() => send_out({...u, peer: self.peer}))
|
|
812
838
|
}
|
|
813
839
|
},
|
|
814
840
|
})
|
|
@@ -821,7 +847,7 @@ async function proxy_url(url) {
|
|
|
821
847
|
// for config and errors file, listen for web changes
|
|
822
848
|
if (!is_external_link) braid_text.get(url, braid_text_get_options = {
|
|
823
849
|
merge_type: 'dt',
|
|
824
|
-
peer,
|
|
850
|
+
peer: self.peer,
|
|
825
851
|
subscribe: self.signal_file_needs_writing,
|
|
826
852
|
})
|
|
827
853
|
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braidfs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.82",
|
|
4
4
|
"description": "braid technology synchronizing files and webpages",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braidfs",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"braid-http": "^1.3.73",
|
|
10
|
-
"braid-text": "^0.2.
|
|
10
|
+
"braid-text": "^0.2.22",
|
|
11
11
|
"chokidar": "^3.6.0"
|
|
12
12
|
},
|
|
13
13
|
"bin": {
|