braidfs 0.0.119 → 0.0.120
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +227 -18
- package/package.json +2 -2
package/index.js
CHANGED
|
@@ -4,6 +4,12 @@ var { diff_main } = require(`${__dirname}/diff.js`),
|
|
|
4
4
|
braid_text = require("braid-text"),
|
|
5
5
|
braid_fetch = require('braid-http').fetch
|
|
6
6
|
|
|
7
|
+
// Helper function to check if a file is binary based on its extension
|
|
8
|
+
function is_binary(filename) {
|
|
9
|
+
const binaryExtensions = ['.jpg', '.jpeg', '.png', '.gif', '.mp4', '.mp3', '.zip', '.tar', '.rar', '.pdf', '.doc', '.docx', '.xls', '.xlsx', '.ppt', '.pptx', '.exe', '.dll', '.so', '.dylib', '.bin', '.iso', '.img', '.bmp', '.tiff', '.svg', '.webp', '.avi', '.mov', '.wmv', '.flv', '.mkv', '.wav', '.flac', '.aac', '.ogg', '.wma', '.7z', '.gz', '.bz2', '.xz'];
|
|
10
|
+
return binaryExtensions.includes(require('path').extname(filename).toLowerCase());
|
|
11
|
+
}
|
|
12
|
+
|
|
7
13
|
braid_fetch.set_fetch(fetch_http2)
|
|
8
14
|
|
|
9
15
|
var sync_base = `${require('os').homedir()}/http`,
|
|
@@ -440,11 +446,228 @@ async function sync_url(url) {
|
|
|
440
446
|
}
|
|
441
447
|
sync_url.cache[path] = sync_url.chain = sync_url.chain.then(init)
|
|
442
448
|
}
|
|
449
|
+
|
|
450
|
+
async function init_binary_sync() {
|
|
451
|
+
if (freed) return
|
|
452
|
+
|
|
453
|
+
console.log(`init_binary_sync: ${url}`)
|
|
454
|
+
|
|
455
|
+
self.peer = Math.random().toString(36).slice(2)
|
|
456
|
+
self.file_last_stat = null
|
|
457
|
+
self.file_read_only = null
|
|
458
|
+
var file_needs_reading = true,
|
|
459
|
+
file_needs_writing = null,
|
|
460
|
+
file_loop_pump_lock = 0
|
|
461
|
+
// self.file_written_cbs = [] // not needed
|
|
462
|
+
|
|
463
|
+
self.signal_file_needs_reading = () => {
|
|
464
|
+
if (freed) return
|
|
465
|
+
file_needs_reading = true
|
|
466
|
+
file_loop_pump()
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
self.signal_file_needs_writing = () => {
|
|
470
|
+
if (freed) return
|
|
471
|
+
file_needs_writing = true
|
|
472
|
+
file_loop_pump()
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
await within_fiber(fullpath, async () => {
|
|
476
|
+
if (freed) return
|
|
477
|
+
var fullpath = await get_fullpath()
|
|
478
|
+
if (freed) return
|
|
479
|
+
|
|
480
|
+
// Determine if file exists locally
|
|
481
|
+
var local_exists = await wait_on(require('fs').promises.access(fullpath).then(() => 1, () => 0))
|
|
482
|
+
|
|
483
|
+
// If the file exists locally, subscribe first; this will also create
|
|
484
|
+
// an early-timestamp file on server if missing. Then compare mtimes
|
|
485
|
+
// to decide whether to upload or download.
|
|
486
|
+
try {
|
|
487
|
+
var subscribeRes = await braid_fetch(url, {
|
|
488
|
+
method: 'GET',
|
|
489
|
+
subscribe: true,
|
|
490
|
+
headers: {
|
|
491
|
+
'peer': self.peer, // Needed for not downloading the file just uploaded to the server
|
|
492
|
+
'X-Local-File-Timestamp': local_mtime_ms > 0 ? String(local_mtime_ms) : '0'
|
|
493
|
+
}
|
|
494
|
+
})
|
|
495
|
+
|
|
496
|
+
if (subscribeRes?.ok) {
|
|
497
|
+
self.server_subscription = subscribeRes
|
|
498
|
+
|
|
499
|
+
// Read local mtime (or 0 if missing)
|
|
500
|
+
var local_mtime_ms = 0
|
|
501
|
+
if (local_exists) {
|
|
502
|
+
try {
|
|
503
|
+
const statResult = await wait_on(require('fs').promises.stat(fullpath));
|
|
504
|
+
local_mtime_ms = Math.round(Number(statResult.mtimeMs));
|
|
505
|
+
} catch (e) {}
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
// Server advertises last-modified header
|
|
509
|
+
var server_mtime_header = subscribeRes.headers.get('last-modified-ms')
|
|
510
|
+
var server_mtime_ms = 0
|
|
511
|
+
if (server_mtime_header) {
|
|
512
|
+
server_mtime_ms = Math.round(Number(server_mtime_header))
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
// If local exists and is newer than server, upload it immediately
|
|
516
|
+
if (local_exists && local_mtime_ms > server_mtime_ms) {
|
|
517
|
+
// console.log(local_mtime_ms)
|
|
518
|
+
// console.log(server_mtime_ms)
|
|
519
|
+
try {
|
|
520
|
+
var fileData = await wait_on(require('fs').promises.readFile(fullpath))
|
|
521
|
+
var putRes = await braid_fetch(url, {
|
|
522
|
+
method: 'PUT',
|
|
523
|
+
body: fileData,
|
|
524
|
+
headers: { 'Content-Type': 'application/octet-stream', 'peer': self.peer, 'X-Timestamp': local_mtime_ms }
|
|
525
|
+
})
|
|
526
|
+
if (putRes.ok) console.log(`Uploaded newer local file to server: ${url}`)
|
|
527
|
+
} catch (e) { console.log(`Failed to upload newer local file: ${e}`) }
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
subscribeRes.subscribe(async (update) => {
|
|
531
|
+
if (freed) return
|
|
532
|
+
if (!update?.body) return
|
|
533
|
+
// console.log(update)
|
|
534
|
+
// ignore first responce if we already have the up-to-date file, although not needed.
|
|
535
|
+
var update_timestamp = Math.round(Number(update.version[0]));
|
|
536
|
+
// if (isNaN(update_timestamp)) update_timestamp = Date.now() // should throw error if can't read server timestamp
|
|
537
|
+
if (local_exists && local_mtime_ms < update_timestamp) {
|
|
538
|
+
try {
|
|
539
|
+
// console.log(Math.round(local_mtime_ms))
|
|
540
|
+
// console.log(Math.round(update_timestamp))
|
|
541
|
+
var writePath = await get_fullpath()
|
|
542
|
+
await wait_on(ensure_path(require("path").dirname(writePath)))
|
|
543
|
+
await wait_on(require('fs').promises.writeFile(writePath, update.body))
|
|
544
|
+
|
|
545
|
+
// Set the file timestamp to the update timestamp
|
|
546
|
+
var mtime = update_timestamp / 1000
|
|
547
|
+
await wait_on(require('fs').promises.utimes(writePath, mtime, mtime))
|
|
548
|
+
// const statResult = await wait_on(require('fs').promises.stat(writePath));
|
|
549
|
+
// console.log(Math.round(Number(statResult.mtimeMs))) // checking for writing vs written issues
|
|
550
|
+
|
|
551
|
+
var st = await wait_on(require('fs').promises.stat(writePath, { bigint: true }))
|
|
552
|
+
self.file_last_stat = st
|
|
553
|
+
console.log(`Updated local binary file from server: ${writePath}`)
|
|
554
|
+
} catch (e) { console.log(`Failed to update local file from server: ${e}`) }
|
|
555
|
+
}
|
|
556
|
+
})
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
} catch (e) {
|
|
561
|
+
console.log(`Failed to subscribe to server for binary file: ${e}`)
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
if (freed) return
|
|
565
|
+
try {
|
|
566
|
+
var stat = await wait_on(require('fs').promises.stat(fullpath, { bigint: true }))
|
|
567
|
+
if (freed) return
|
|
568
|
+
self.file_last_stat = stat
|
|
569
|
+
} catch (e) {
|
|
570
|
+
// file may not exist yet; will be handled by file_loop
|
|
571
|
+
}
|
|
572
|
+
})
|
|
573
|
+
|
|
574
|
+
file_loop_pump()
|
|
575
|
+
|
|
576
|
+
async function file_loop_pump() {
|
|
577
|
+
if (freed) return
|
|
578
|
+
if (file_loop_pump_lock) return
|
|
579
|
+
file_loop_pump_lock++
|
|
580
|
+
|
|
581
|
+
await within_fiber(fullpath, async () => {
|
|
582
|
+
var fullpath = await get_fullpath()
|
|
583
|
+
if (freed) return
|
|
584
|
+
|
|
585
|
+
while (file_needs_reading || file_needs_writing) {
|
|
586
|
+
if (file_needs_reading) {
|
|
587
|
+
file_needs_reading = false
|
|
588
|
+
|
|
589
|
+
if (!(await wait_on(file_exists(fullpath)))) {
|
|
590
|
+
if (freed) return
|
|
591
|
+
// file_needs_writing = true
|
|
592
|
+
await wait_on(require('fs').promises.writeFile(fullpath, ''))
|
|
593
|
+
}
|
|
594
|
+
if (freed) return
|
|
595
|
+
|
|
596
|
+
if (self.file_read_only === null) try { self.file_read_only = await wait_on(is_read_only(fullpath)) } catch (e) { }
|
|
597
|
+
if (freed) return
|
|
598
|
+
|
|
599
|
+
var stat = await wait_on(require('fs').promises.stat(fullpath, { bigint: true }))
|
|
600
|
+
if (freed) return
|
|
601
|
+
|
|
602
|
+
if (!stat_eq(stat, self.file_last_stat)) {
|
|
603
|
+
// Importent logs for debugging precision issues
|
|
604
|
+
// console.log(`binary file change detected in ${path}`)
|
|
605
|
+
// console.log(`stat.mtimeMs: ${stat.mtimeMs}`)
|
|
606
|
+
// console.log(`self.file_last_stat.mtimeMs ${self.file_last_stat.mtimeMs}`)
|
|
607
|
+
// console.log(Math.round(Number(stat.mtimeMs)))
|
|
608
|
+
|
|
609
|
+
// The reason we need this is because mtime from bigint stats may have 1ms difference. Unable to use it.
|
|
610
|
+
const lower_precision_mtimems = Math.round(Number(await wait_on((await require('fs').promises.stat(fullpath)).mtimeMs)));
|
|
611
|
+
|
|
612
|
+
// Upload the changed file to server
|
|
613
|
+
try {
|
|
614
|
+
var fileData = await wait_on(require('fs').promises.readFile(fullpath))
|
|
615
|
+
var response = await braid_fetch(url, {
|
|
616
|
+
method: 'PUT',
|
|
617
|
+
body: fileData,
|
|
618
|
+
headers: { 'Content-Type': 'application/octet-stream', 'peer': self.peer, 'X-Timestamp': lower_precision_mtimems }
|
|
619
|
+
})
|
|
620
|
+
if (response.ok) {
|
|
621
|
+
console.log(`Uploaded changed binary file to server: ${url}`)
|
|
622
|
+
}
|
|
623
|
+
} catch (e) {
|
|
624
|
+
console.log(`Failed to upload changed file: ${e}`)
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
self.file_last_stat = stat
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
if (file_needs_writing) {
|
|
631
|
+
file_needs_writing = false
|
|
632
|
+
// Binary files are handled by the file watcher and server subscription
|
|
633
|
+
// No additional processing needed here
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
})
|
|
637
|
+
|
|
638
|
+
file_loop_pump_lock--
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
// Add disconnect function for cleanup
|
|
642
|
+
self.disconnect = async () => {
|
|
643
|
+
if (freed) return
|
|
644
|
+
freed = true
|
|
645
|
+
// Clean up any active subscriptions
|
|
646
|
+
if (self.server_subscription) {
|
|
647
|
+
try {
|
|
648
|
+
await self.server_subscription.close?.()
|
|
649
|
+
} catch (e) {}
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
return self
|
|
654
|
+
}
|
|
655
|
+
|
|
443
656
|
async function init() {
|
|
444
657
|
if (freed) return
|
|
445
658
|
|
|
446
659
|
// console.log(`sync_url: ${url}`)
|
|
447
660
|
|
|
661
|
+
// Check if this is a binary file
|
|
662
|
+
var is_binary_file = is_binary(path)
|
|
663
|
+
|
|
664
|
+
if (is_binary_file) {
|
|
665
|
+
// Opts into the code for FS watcher (file_needs_reading, file_needs_writing) & unsyncing (disconnect)
|
|
666
|
+
// It notably does NOT handle `.braidfs/set_version/` and `.braidfs/get_version/` correctly!
|
|
667
|
+
// Search ` sync_url.cache[` to see how it's all handled.
|
|
668
|
+
return await init_binary_sync()
|
|
669
|
+
}
|
|
670
|
+
|
|
448
671
|
var resource = await braid_text.get_resource(url)
|
|
449
672
|
if (freed) return
|
|
450
673
|
|
|
@@ -935,24 +1158,10 @@ async function sync_url(url) {
|
|
|
935
1158
|
if (!update.status) {
|
|
936
1159
|
// console.log(`got initial update about ${url}`)
|
|
937
1160
|
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
943
|
-
resource.doc.mergeBytes(bytes)
|
|
944
|
-
|
|
945
|
-
// update resource.actor_seqs
|
|
946
|
-
var end_i = resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
947
|
-
for (var i = start_i; i <= end_i; i++) {
|
|
948
|
-
var v = resource.doc.localToRemoteVersion([i])[0]
|
|
949
|
-
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
|
|
950
|
-
resource.actor_seqs[v[0]].add_range(v[1], v[1])
|
|
951
|
-
}
|
|
952
|
-
|
|
953
|
-
resource.val = resource.doc.get()
|
|
954
|
-
resource.need_defrag = true
|
|
955
|
-
await resource.db_delta(bytes)
|
|
1161
|
+
await braid_text.put(resource, {
|
|
1162
|
+
body: update.body,
|
|
1163
|
+
transfer_encoding: 'dt'
|
|
1164
|
+
})
|
|
956
1165
|
if (freed || closed) return
|
|
957
1166
|
|
|
958
1167
|
self.update_fork_point(JSON.parse(`[${res.headers.get('current-version')}]`), self.fork_point)
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braidfs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.120",
|
|
4
4
|
"description": "braid technology synchronizing files and webpages",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braidfs",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"braid-http": "~1.3.79",
|
|
10
|
-
"braid-text": "~0.2.
|
|
10
|
+
"braid-text": "~0.2.64",
|
|
11
11
|
"chokidar": "^4.0.3"
|
|
12
12
|
},
|
|
13
13
|
"bin": {
|