braidfs 0.0.87 → 0.0.89

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +37 -37
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -296,12 +296,14 @@ async function watch_files() {
296
296
  if (!fullpath.startsWith(sync_base))
297
297
  return on_watcher_miss(`path ${fullpath} outside ${sync_base}`)
298
298
 
299
+ // See if this is a file we should skip..
300
+ var path = require('path').relative(sync_base, fullpath)
301
+ if (skip_file(path)) return
302
+
299
303
  // Make sure the path is to a file, and not a directory
300
304
  if (event != 'unlink' && (await require('fs').promises.stat(fullpath)).isDirectory())
301
305
  return on_watcher_miss(`expected file, got: ${fullpath}`)
302
306
 
303
- var path = require('path').relative(sync_base, fullpath)
304
- if (skip_file(path)) return
305
307
  console.log(`file event: ${path}, event: ${event}`)
306
308
 
307
309
  var sync = await sync_url.cache[normalize_url(path)]
@@ -358,7 +360,8 @@ function unsync_url(url) {
358
360
  console.log(`unsync_url: ${url}`)
359
361
 
360
362
  delete sync_url.cache[url]
361
- unsync_url.cache[url] = unsync_url.cache[url]()
363
+ sync_url.chain = sync_url.chain.then(unsync_url.cache[url])
364
+ delete unsync_url.cache[url]
362
365
  }
363
366
 
364
367
  async function sync_url(url) {
@@ -374,7 +377,7 @@ async function sync_url(url) {
374
377
 
375
378
  if (!sync_url.cache) sync_url.cache = {}
376
379
  if (!sync_url.chain) sync_url.chain = Promise.resolve()
377
- if (!sync_url.cache[path]) sync_url.cache[path] = sync_url.chain = sync_url.chain.then(async () => {
380
+ if (!sync_url.cache[path]) {
378
381
  var freed = false,
379
382
  aborts = new Set(),
380
383
  braid_text_get_options = null,
@@ -390,7 +393,6 @@ async function sync_url(url) {
390
393
  if (!wait_count) wait_promise_done()
391
394
  }
392
395
  if (!unsync_url.cache) unsync_url.cache = {}
393
- var old_unsync = unsync_url.cache[path]
394
396
  unsync_url.cache[path] = async () => {
395
397
  freed = true
396
398
  for (let a of aborts) a.abort()
@@ -406,8 +408,9 @@ async function sync_url(url) {
406
408
  try { await require('fs').promises.unlink(meta_path) } catch (e) {}
407
409
  try { await require('fs').promises.unlink(await get_fullpath()) } catch (e) {}
408
410
  }
409
- await old_unsync
410
-
411
+ sync_url.cache[path] = sync_url.chain = sync_url.chain.then(init)
412
+ }
413
+ async function init() {
411
414
  var self = {url}
412
415
 
413
416
  console.log(`sync_url: ${url}`)
@@ -530,7 +533,7 @@ async function sync_url(url) {
530
533
  }
531
534
 
532
535
  if (!start_something()) return
533
- await within_file_lock(fullpath, async () => {
536
+ await within_fiber(fullpath, async () => {
534
537
  var fullpath = await get_fullpath()
535
538
  if (await require('fs').promises.access(meta_path).then(
536
539
  () => 1, () => 0)) {
@@ -584,7 +587,7 @@ async function sync_url(url) {
584
587
 
585
588
  if (!start_something()) return
586
589
 
587
- await within_file_lock(fullpath, async () => {
590
+ await within_fiber(fullpath, async () => {
588
591
  var fullpath = await get_fullpath()
589
592
 
590
593
  while (file_needs_reading || file_needs_writing) {
@@ -636,9 +639,7 @@ async function sync_url(url) {
636
639
 
637
640
  add_to_version_cache(text, version)
638
641
 
639
- send_out({ version, parents, patches, peer: self.peer })
640
-
641
- await braid_text.put(url, { version, parents, patches, peer: self.peer, merge_type: 'dt' })
642
+ await braid_text.put(url, { version, parents, patches, merge_type: 'dt' })
642
643
 
643
644
  await write_meta_file()
644
645
  } else {
@@ -782,9 +783,12 @@ async function sync_url(url) {
782
783
  return self.fork_point
783
784
  }
784
785
 
785
- if (is_external_link) find_fork_point().then(fork_point => {
786
+ var initial_connect_done
787
+ var initial_connect_promise = new Promise(done => initial_connect_done = done)
788
+
789
+ if (is_external_link) find_fork_point().then(async fork_point => {
790
+ await send_new_stuff(fork_point)
786
791
  connect(fork_point)
787
- send_new_stuff(fork_point)
788
792
  })
789
793
 
790
794
  function connect(fork_point) {
@@ -827,7 +831,9 @@ async function sync_url(url) {
827
831
  },
828
832
  peer: self.peer
829
833
  }).then(x => {
830
- if (x.status === 209) x.subscribe(async update => {
834
+ if (x.status !== 209) throw new Error(`unexpected status: ${x.status}`)
835
+ initial_connect_done()
836
+ x.subscribe(async update => {
831
837
  console.log(`got external update about ${url}`)
832
838
 
833
839
  if (update.body) update.body = update.body_text
@@ -854,18 +860,15 @@ async function sync_url(url) {
854
860
 
855
861
  // send them stuff we have but they don't
856
862
  async function send_new_stuff(fork_point) {
857
- var r = await my_fetch({ method: "HEAD" })
858
- if (r.headers.get('editable') === 'false')
859
- return console.log('do not send updates for read-only file: ' + url)
860
-
861
863
  var in_parallel = create_parallel_promises(10)
862
- braid_text.get(url, braid_text_get_options = {
864
+ await braid_text.get(url, braid_text_get_options = {
863
865
  parents: fork_point,
864
866
  merge_type: 'dt',
865
867
  peer: self.peer,
866
868
  subscribe: async (u) => {
867
869
  if (u.version.length) {
868
870
  self.signal_file_needs_writing()
871
+ await initial_connect_promise
869
872
  in_parallel(() => send_out({...u, peer: self.peer}))
870
873
  }
871
874
  },
@@ -880,7 +883,7 @@ async function sync_url(url) {
880
883
  })
881
884
 
882
885
  return self
883
- })
886
+ }
884
887
  return await sync_url.cache[url]
885
888
  }
886
889
 
@@ -888,7 +891,7 @@ async function ensure_path(path) {
888
891
  var parts = path.split('/').slice(1)
889
892
  for (var i = 1; i <= parts.length; i++) {
890
893
  var partial = '/' + parts.slice(0, i).join('/')
891
- await within_file_lock(partial, async () => {
894
+ await within_fiber(normalize_url(partial), async () => {
892
895
  try {
893
896
  let stat = await require("fs").promises.stat(partial)
894
897
  if (stat.isDirectory()) return // good
@@ -1025,21 +1028,18 @@ async function set_read_only(fullpath, read_only) {
1025
1028
  }
1026
1029
  }
1027
1030
 
1028
- async function get_file_lock(fullpath) {
1029
- if (!get_file_lock.locks) get_file_lock.locks = {}
1030
- if (!get_file_lock.locks[fullpath]) get_file_lock.locks[fullpath] = Promise.resolve()
1031
- return new Promise(done =>
1032
- get_file_lock.locks[fullpath] = get_file_lock.locks[fullpath].then(() =>
1033
- new Promise(done2 => done(done2))))
1034
- }
1035
-
1036
- async function within_file_lock(fullpath, func) {
1037
- var lock = await get_file_lock(fullpath)
1038
- try {
1039
- return await func()
1040
- } finally {
1041
- lock()
1042
- }
1031
+ function within_fiber(id, func) {
1032
+ if (!within_fiber.chains) within_fiber.chains = {}
1033
+ var prev = within_fiber.chains[id] || Promise.resolve()
1034
+ var curr = prev.then(async () => {
1035
+ try {
1036
+ return await func()
1037
+ } finally {
1038
+ if (within_fiber.chains[id] === curr)
1039
+ delete within_fiber.chains[id]
1040
+ }
1041
+ })
1042
+ return within_fiber.chains[id] = curr
1043
1043
  }
1044
1044
 
1045
1045
  async function file_exists(fullpath) {
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.87",
3
+ "version": "0.0.89",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
9
  "braid-http": "^1.3.73",
10
- "braid-text": "^0.2.25",
10
+ "braid-text": "^0.2.28",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {