braidfs 0.0.39 → 0.0.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +262 -167
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -15,6 +15,7 @@ var proxy_base = `${require('os').homedir()}/http`,
15
15
  proxy_base_meta = `${braidfs_config_dir}/proxy_base_meta`
16
16
  braid_text.db_folder = `${braidfs_config_dir}/braid-text-db`
17
17
  var trash = `${braidfs_config_dir}/trash`
18
+ var temp_folder = `${braidfs_config_dir}/temp`
18
19
 
19
20
  var config = null,
20
21
  path_to_func = {},
@@ -30,7 +31,8 @@ if (require('fs').existsSync(proxy_base)) {
30
31
  config = {
31
32
  port: 10000,
32
33
  sync: {},
33
- domains: { 'example.com': { auth_headers: { Cookie: "secret_pass" } } }
34
+ domains: { 'example.com': { auth_headers: { Cookie: "secret_pass" } } },
35
+ scan_interval_ms: 1000 * 20,
34
36
  }
35
37
  require('fs').mkdirSync(braidfs_config_dir, { recursive: true })
36
38
  require('fs').writeFileSync(braidfs_config_file, JSON.stringify(config, null, 4))
@@ -38,6 +40,7 @@ if (require('fs').existsSync(proxy_base)) {
38
40
 
39
41
  require('fs').mkdirSync(proxy_base_meta, { recursive: true })
40
42
  require('fs').mkdirSync(trash, { recursive: true })
43
+ require('fs').mkdirSync(temp_folder, { recursive: true })
41
44
 
42
45
  // process command line args
43
46
  let to_run_in_background = process.platform === 'darwin' ? `
@@ -140,9 +143,12 @@ async function main() {
140
143
  config = JSON.parse(x)
141
144
 
142
145
  // did anything get deleted?
143
- for (let url of Object.keys(prev.sync))
144
- if (!config.sync[url]) unproxy_url(url)
146
+ var old_syncs = new Set(Object.keys(prev.sync).map(url => normalize_url(url).replace(/^https?:\/\//, '')))
147
+ var new_syncs = new Set(Object.keys(config.sync).map(url => normalize_url(url).replace(/^https?:\/\//, '')))
148
+ for (let url of old_syncs.difference(new_syncs))
149
+ unproxy_url(url)
145
150
 
151
+ // proxy all the new stuff
146
152
  for (let url of Object.keys(config.sync)) proxy_url(url)
147
153
 
148
154
  // if any auth stuff has changed,
@@ -157,10 +163,9 @@ async function main() {
157
163
  || JSON.stringify(prev.domains[domain]) !== JSON.stringify(v))
158
164
  changed.add(domain)
159
165
  // ok, have every domain which has changed reconnect
160
- for (let [url, x] of Object.entries(proxy_url.cache))
161
- if (url.match(/^https?:\/\//)
162
- && changed.has(new URL(url).hostname))
163
- (await x).reconnect()
166
+ for (let [path, x] of Object.entries(proxy_url.cache))
167
+ if (changed.has(path.split(/\//)[0].split(/:/)[0]))
168
+ (await x).reconnect?.()
164
169
  } catch (e) {
165
170
  if (x !== '') console.log(`warning: config file is currently invalid.`)
166
171
  return
@@ -174,62 +179,131 @@ async function main() {
174
179
  console.log({ sync: config.sync })
175
180
  for (let url of Object.keys(config.sync)) proxy_url(url)
176
181
 
177
- require('chokidar').watch(proxy_base).on('change', x => chokidar_handler(x, false)).on('add', x => chokidar_handler(x, true))
178
- async function chokidar_handler(fullpath, added) {
179
-
180
- // Make sure the path is within proxy_base..
181
- if (!fullpath.startsWith(proxy_base))
182
- return on_watcher_misses(`path ${fullpath} outside ${proxy_base}`)
183
-
184
- // Make sure the path is to a file, and not a directory
185
- if ((await require('fs').promises.stat(fullpath)).isDirectory())
186
- return on_watcher_misses(`expected file, got: ${fullpath}`)
187
-
188
- var path = require('path').relative(proxy_base, fullpath)
189
-
190
- // Files to skip
191
- if (// Paths with a # in the name can't map to real URLs
192
- path.includes('#')
193
- // .DS_store
194
- || path.endsWith('.DS_store')
195
- // Skip stuff in .braidfs/ except for config and errors
196
- || (path.startsWith('.braidfs')
197
- && !path.match(/^\.braidfs\/(config|errors)$/)))
198
- return
199
-
200
- console.log(`file event: ${path}, added: ${added}`)
201
-
202
- var update_func = await path_to_func[normalize_url(path)]
203
-
204
- console.log(`has update_func: ${!!update_func}`)
205
-
206
- if (update_func && !added) update_func()
207
- if (!update_func) {
208
- // throw this unrecognized file into the trash,
209
- let dest = `${trash}/${braid_text.encode_filename(path)}_${Math.random().toString(36).slice(2)}`
210
- console.log(`moving untracked file ${fullpath} to ${dest}`)
211
- await require('fs').promises.rename(fullpath, dest)
212
-
213
- // and log an error
214
- var x = await braid_text.get('.braidfs/errors', {}),
215
- len = [...x.body].length
216
- await braid_text.put('.braidfs/errors', {
217
- parents: x.version,
218
- patches: [{
219
- unit: 'text',
220
- range: `[${len}:${len}]`,
221
- content: `error: unsynced file ${fullpath}; moved to ${dest}\n`
222
- }]
223
- })
224
- }
225
- }
182
+ watch_files()
183
+ setTimeout(scan_files, 1200)
226
184
  }).on('error', e => {
227
185
  if (e.code === 'EADDRINUSE') return console.log(`server already running on port ${config.port}`)
228
186
  throw e
229
187
  })
230
188
  }
231
189
 
190
+ function on_watcher_miss(message, scan = true) {
191
+ console.log(`watcher miss: ${message}`)
192
+ console.log(`\x1b[33;40m[${++watcher_misses}] watcher misfires\x1b[0m`)
193
+ watch_files()
194
+ if (scan) setTimeout(scan_files, 1200)
195
+ }
196
+
197
+ function skip_file(path) {
198
+ // Files to skip
199
+ return (
200
+ // Paths with a # in the name can't map to real URLs
201
+ path.includes('#')
202
+ // .DS_store
203
+ || path.endsWith('.DS_store')
204
+ // Skip stuff in .braidfs/ except for config and errors
205
+ || (path.startsWith('.braidfs')
206
+ && !path.match(/^\.braidfs\/(config|errors)$/))
207
+ )
208
+ }
209
+
210
+ async function trash_file(fullpath, path) {
211
+ // throw this unrecognized file into the trash,
212
+ let dest = `${trash}/${braid_text.encode_filename(path)}_${Math.random().toString(36).slice(2)}`
213
+ console.log(`moving untracked file ${fullpath} to ${dest}`)
214
+ await require('fs').promises.rename(fullpath, dest)
215
+
216
+ // and log an error
217
+ var x = await braid_text.get('.braidfs/errors', {}),
218
+ len = [...x.body].length
219
+ await braid_text.put('.braidfs/errors', {
220
+ parents: x.version,
221
+ patches: [{
222
+ unit: 'text',
223
+ range: `[${len}:${len}]`,
224
+ content: `error: unsynced file ${fullpath}; moved to ${dest}\n`
225
+ }]
226
+ })
227
+ }
228
+
229
+ async function watch_files() {
230
+ if (watch_files.watcher === 42) return
231
+ let w = watch_files.watcher
232
+ watch_files.watcher = 42
233
+ await w?.close()
234
+
235
+ console.log('watch files..')
236
+ watch_files.watcher = require('chokidar').watch(proxy_base).
237
+ on('change', x => chokidar_handler(x, false)).
238
+ on('add', x => chokidar_handler(x, true))
239
+
240
+ async function chokidar_handler(fullpath, added) {
241
+ // Make sure the path is within proxy_base..
242
+ if (!fullpath.startsWith(proxy_base))
243
+ return on_watcher_miss(`path ${fullpath} outside ${proxy_base}`)
244
+
245
+ // Make sure the path is to a file, and not a directory
246
+ if ((await require('fs').promises.stat(fullpath)).isDirectory())
247
+ return on_watcher_miss(`expected file, got: ${fullpath}`)
248
+
249
+ var path = require('path').relative(proxy_base, fullpath)
250
+ if (skip_file(path)) return
251
+ console.log(`file event: ${path}, added: ${added}`)
252
+
253
+ var proxy = await proxy_url.cache[normalize_url(path)]
254
+
255
+ if (proxy && !added) proxy.signal_file_needs_reading()
256
+ if (!proxy) await trash_file(fullpath, path)
257
+ }
258
+ }
259
+
260
+ async function scan_files() {
261
+ scan_files.do_again = true
262
+ if (scan_files.running) return
263
+ if (scan_files.timeout) clearTimeout(scan_files.timeout)
264
+
265
+ scan_files.running = true
266
+ while (scan_files.do_again) {
267
+ scan_files.do_again = false
268
+ console.log(`scan files..`)
269
+ if (await f(proxy_base))
270
+ on_watcher_miss(`scanner picked up a change that the watcher should have gotten`, false)
271
+ }
272
+ scan_files.running = false
273
+
274
+ scan_files.timeout = setTimeout(scan_files, config.scan_interval_ms ?? (20 * 1000))
275
+
276
+ async function f(fullpath) {
277
+ let stat = await require('fs').promises.stat(fullpath, { bigint: true })
278
+ if (stat.isDirectory()) {
279
+ let found
280
+ for (let file of await require('fs').promises.readdir(fullpath))
281
+ found ||= await f(`${fullpath}/${file}`)
282
+ return found
283
+ } else {
284
+ path = require('path').relative(proxy_base, fullpath)
285
+ if (skip_file(path)) return
286
+
287
+ var proxy = await proxy_url.cache[normalize_url(path)]
288
+ if (!proxy) return await trash_file(fullpath, path)
289
+
290
+ if (!stat_eq(stat, proxy.file_last_stat)) {
291
+ console.log(`scan thinks ${path} has changed`)
292
+
293
+
294
+ console.log(stat)
295
+ console.log(proxy.file_last_stat)
296
+
297
+
298
+ proxy.signal_file_needs_reading()
299
+ return true
300
+ }
301
+ }
302
+ }
303
+ }
304
+
232
305
  function unproxy_url(url) {
306
+ url = normalize_url(url).replace(/^https?:\/\//, '')
233
307
  if (!proxy_url.cache?.[url]) return
234
308
 
235
309
  console.log(`unproxy_url: ${url}`)
@@ -249,18 +323,15 @@ async function proxy_url(url) {
249
323
  fullpath = `${proxy_base}/${path}`,
250
324
  meta_path = `${proxy_base_meta}/${braid_text.encode_filename(url)}`
251
325
 
252
- let set_path_to_func
253
- if (!path_to_func[path]) path_to_func[path] = new Promise(done => set_path_to_func = done)
254
-
255
326
  if (!proxy_url.cache) proxy_url.cache = {}
256
327
  if (!proxy_url.chain) proxy_url.chain = Promise.resolve()
257
- if (!proxy_url.cache[url]) proxy_url.cache[url] = proxy_url.chain = proxy_url.chain.then(async () => {
328
+ if (!proxy_url.cache[path]) proxy_url.cache[path] = proxy_url.chain = proxy_url.chain.then(async () => {
258
329
  var freed = false,
259
330
  aborts = new Set(),
260
331
  braid_text_get_options = null,
261
332
  wait_count = 0
262
- var wait_promise, wait_promise_done
263
- var start_something = () => {
333
+ var wait_promise, wait_promise_done
334
+ var start_something = () => {
264
335
  if (freed) return
265
336
  if (!wait_count) wait_promise = new Promise(done => wait_promise_done = done)
266
337
  return ++wait_count
@@ -270,10 +341,9 @@ async function proxy_url(url) {
270
341
  if (!wait_count) wait_promise_done()
271
342
  }
272
343
  if (!unproxy_url.cache) unproxy_url.cache = {}
273
- var old_unproxy = unproxy_url.cache[url]
274
- unproxy_url.cache[url] = async () => {
344
+ var old_unproxy = unproxy_url.cache[path]
345
+ unproxy_url.cache[path] = async () => {
275
346
  freed = true
276
- delete path_to_func[path]
277
347
  for (let a of aborts) a.abort()
278
348
  await wait_promise
279
349
  if (braid_text_get_options) await braid_text.forget(url, braid_text_get_options)
@@ -306,15 +376,15 @@ async function proxy_url(url) {
306
376
  var peer = Math.random().toString(36).slice(2),
307
377
  char_counter = -1,
308
378
  file_last_version = null,
309
- file_last_digest = null,
310
- file_last_text = null,
311
- file_last_stat = null
379
+ file_last_digest = null
380
+ self.file_last_text = null
381
+ self.file_last_stat = null
312
382
  self.file_read_only = null
313
383
  var file_needs_reading = true,
314
384
  file_needs_writing = null,
315
385
  file_loop_pump_lock = 0
316
386
 
317
- function signal_file_needs_reading() {
387
+ self.signal_file_needs_reading = () => {
318
388
  if (freed) return
319
389
  file_needs_reading = true
320
390
  file_loop_pump()
@@ -353,8 +423,6 @@ async function proxy_url(url) {
353
423
  finish_something()
354
424
  }
355
425
 
356
- set_path_to_func(signal_file_needs_reading)
357
-
358
426
  file_loop_pump()
359
427
  async function file_loop_pump() {
360
428
  if (file_loop_pump_lock) return
@@ -362,98 +430,109 @@ async function proxy_url(url) {
362
430
 
363
431
  if (!start_something()) return
364
432
 
365
- if (file_last_version === null) {
366
- if (await require('fs').promises.access(meta_path).then(() => 1, () => 0)) {
367
- // meta file exists
368
- let meta = JSON.parse(await require('fs').promises.readFile(meta_path, { encoding: 'utf8' }))
369
- let _ = ({ version: file_last_version, digest: file_last_digest } = Array.isArray(meta) ? { version: meta } : meta)
370
-
371
- file_last_text = (await braid_text.get(url, { version: file_last_version })).body
372
- file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
373
-
374
- // sanity check
375
- if (file_last_digest && require('crypto').createHash('sha256').update(file_last_text).digest('base64') != file_last_digest) throw new Error('file_last_text does not match file_last_digest')
376
- } else if (await require('fs').promises.access(await get_fullpath()).then(() => 1, () => 0)) {
377
- // file exists, but not meta file
378
- file_last_version = []
379
- file_last_text = ''
380
- } else {
381
- // file doesn't exist, nor does meta file
382
- file_needs_writing = true
383
- file_last_version = []
384
- file_last_text = ''
385
- await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
433
+ await within_file_lock(fullpath, async () => {
434
+ var fullpath = await get_fullpath()
435
+
436
+ if (file_last_version === null) {
437
+ if (await require('fs').promises.access(meta_path).then(
438
+ () => 1, () => 0)) {
439
+ // meta file exists
440
+ let meta = JSON.parse(await require('fs').promises.readFile(meta_path, { encoding: 'utf8' }))
441
+ let _ = ({ version: file_last_version, digest: file_last_digest } = Array.isArray(meta) ? { version: meta } : meta)
442
+
443
+ self.file_last_text = (await braid_text.get(url, { version: file_last_version })).body
444
+ file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
445
+
446
+ // sanity check
447
+ if (file_last_digest && require('crypto').createHash('sha256').update(self.file_last_text).digest('base64') != file_last_digest) throw new Error('file_last_text does not match file_last_digest')
448
+ } else if (await require('fs').promises.access(fullpath).then(() => 1, () => 0)) {
449
+ // file exists, but not meta file
450
+ file_last_version = []
451
+ self.file_last_text = ''
452
+ } else {
453
+ // file doesn't exist, nor does meta file
454
+ file_needs_writing = true
455
+ file_last_version = []
456
+ self.file_last_text = ''
457
+
458
+ await require('fs').promises.writeFile(fullpath, self.file_last_text)
459
+ }
386
460
  }
387
- }
388
461
 
389
- while (file_needs_reading || file_needs_writing) {
390
- if (file_needs_reading) {
391
- console.log(`reading file: ${await get_fullpath()}`)
462
+ while (file_needs_reading || file_needs_writing) {
463
+ if (file_needs_reading) {
464
+ console.log(`reading file: ${fullpath}`)
392
465
 
393
- file_needs_reading = false
466
+ file_needs_reading = false
394
467
 
395
- if (self.file_read_only === null) try { self.file_read_only = await is_read_only(await get_fullpath()) } catch (e) { }
468
+ if (self.file_read_only === null) try { self.file_read_only = await is_read_only(fullpath) } catch (e) { }
396
469
 
397
- let text = await require('fs').promises.readFile(
398
- await get_fullpath(), { encoding: 'utf8' })
470
+ let text = await require('fs').promises.readFile(
471
+ fullpath, { encoding: 'utf8' })
399
472
 
400
- var stat = await require('fs').promises.stat(await get_fullpath(), {bigint: true})
401
- var stat_ms = Number(stat.mtimeMs)
402
- stat = JSON.stringify(stat, (k, v) => typeof v === 'bigint' ? v.toString() : v)
473
+ var stat = await require('fs').promises.stat(fullpath, { bigint: true })
403
474
 
404
- var patches = diff(file_last_text, text)
405
- if (patches.length) {
406
- console.log(`found changes in: ${await get_fullpath()}`)
475
+ var patches = diff(self.file_last_text, text)
476
+ if (patches.length) {
477
+ console.log(`found changes in: ${fullpath}`)
407
478
 
408
- // convert from js-indicies to code-points
409
- char_counter += patches_to_code_points(patches, file_last_text)
479
+ // convert from js-indicies to code-points
480
+ char_counter += patches_to_code_points(patches, self.file_last_text)
410
481
 
411
- file_last_text = text
482
+ self.file_last_text = text
412
483
 
413
- var version = [peer + "-" + char_counter]
414
- var parents = file_last_version
415
- file_last_version = version
484
+ var version = [peer + "-" + char_counter]
485
+ var parents = file_last_version
486
+ file_last_version = version
416
487
 
417
- send_out({ version, parents, patches, peer })
488
+ send_out({ version, parents, patches, peer })
418
489
 
419
- await braid_text.put(url, { version, parents, patches, peer })
490
+ await braid_text.put(url, { version, parents, patches, peer })
420
491
 
421
- await require('fs').promises.writeFile(meta_path, JSON.stringify({ version: file_last_version, digest: require('crypto').createHash('sha256').update(file_last_text).digest('base64') }))
422
- } else {
423
- console.log(`no changes found in: ${await get_fullpath()}`)
424
- if (stat === file_last_stat && Date.now() > stat_ms + 300)
425
- on_watcher_miss(`expected change to: ${await get_fullpath()}`)
426
- }
427
- file_last_stat = stat
428
- }
429
- if (file_needs_writing) {
430
- file_needs_writing = false
431
- let { version, body } = await braid_text.get(url, {})
432
- if (!v_eq(version, file_last_version)) {
433
- console.log(`writing file ${await get_fullpath()}`)
434
-
435
- // make sure the file has what it had before
436
- let text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' })
437
- if (file_last_text != text) crash(new Error('File changed without us noticing.'))
438
-
439
- try { if (await is_read_only(await get_fullpath())) await set_read_only(await get_fullpath(), false) } catch (e) { }
440
-
441
- file_last_version = version
442
- file_last_text = body
443
- await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
444
-
445
- file_last_stat = JSON.stringify(await require('fs').promises.stat(await get_fullpath(), {bigint: true}), (k, v) => typeof v === 'bigint' ? v.toString() : v)
446
-
447
- await require('fs').promises.writeFile(meta_path, JSON.stringify({
448
- version: file_last_version,
449
- digest: require('crypto').createHash('sha256')
450
- .update(file_last_text).digest('base64')
451
- }))
492
+ await require('fs').promises.writeFile(meta_path, JSON.stringify({ version: file_last_version, digest: require('crypto').createHash('sha256').update(self.file_last_text).digest('base64') }))
493
+ } else {
494
+ console.log(`no changes found in: ${fullpath}`)
495
+ if (stat_eq(stat, self.file_last_stat)
496
+ && !self.file_change_expected)
497
+ on_watcher_miss(`expected change to: ${fullpath}`)
498
+ }
499
+ self.file_last_stat = stat
500
+ self.file_change_expected = null
452
501
  }
502
+ if (file_needs_writing) {
503
+ file_needs_writing = false
504
+ let { version, body } = await braid_text.get(url, {})
505
+ if (!v_eq(version, file_last_version)) {
506
+ console.log(`writing file ${fullpath}`)
507
+
508
+ // make sure the file has what it had before
509
+ let text = await require('fs').promises.readFile(fullpath, { encoding: 'utf8' })
510
+ if (self.file_last_text != text) crash(new Error('File changed without us noticing.'))
511
+
512
+ try { if (await is_read_only(fullpath)) await set_read_only(fullpath, false) } catch (e) { }
513
+
514
+ file_last_version = version
515
+ self.file_last_text = body
516
+ self.file_change_expected = true
517
+ await require('fs').promises.writeFile(fullpath, self.file_last_text)
518
+
519
+
520
+ await require('fs').promises.writeFile(meta_path, JSON.stringify({
521
+ version: file_last_version,
522
+ digest: require('crypto').createHash('sha256')
523
+ .update(self.file_last_text).digest('base64')
524
+ }))
525
+ }
453
526
 
454
- if (await is_read_only(await get_fullpath()) !== self.file_read_only) await set_read_only(await get_fullpath(), self.file_read_only)
527
+ if (await is_read_only(fullpath) !== self.file_read_only) {
528
+ self.file_change_expected = true
529
+ await set_read_only(fullpath, self.file_read_only)
530
+ }
531
+
532
+ self.file_last_stat = await require('fs').promises.stat(fullpath, { bigint: true })
533
+ }
455
534
  }
456
- }
535
+ })
457
536
 
458
537
  finish_something()
459
538
 
@@ -497,7 +576,7 @@ async function proxy_url(url) {
497
576
 
498
577
  await new Promise(done => {
499
578
  if (!Object.keys(waiting_for_versions).length) {
500
- console.log('got everything we were waiting for..')
579
+ console.log('got everything we were waiting for: ' + url)
501
580
  done()
502
581
  done = null
503
582
  }
@@ -618,21 +697,17 @@ async function ensure_path(path) {
618
697
  try {
619
698
  await require("fs").promises.mkdir(path, { recursive: true })
620
699
  } catch (e) {
621
- let parts = path.split(require("path").sep).slice(1)
622
- for (let i = 1; i <= parts.length; i++) {
623
- let partial = require("path").sep + require("path").join(...parts.slice(0, i))
700
+ var parts = path.split('/').slice(1)
701
+ for (var i = 1; i <= parts.length; i++) {
702
+ var partial = '/' + parts.slice(0, i).join('/')
624
703
 
625
704
  if (!(await is_dir(partial))) {
626
- let save = await require("fs").promises.readFile(partial)
627
-
628
- await require("fs").promises.unlink(partial)
629
- await require("fs").promises.mkdir(path, { recursive: true })
630
-
631
- while (await is_dir(partial))
632
- partial = require("path").join(partial, 'index')
633
-
634
- await require("fs").promises.writeFile(partial, save)
635
- break
705
+ await within_file_lock(normalize_url(partial), async () => {
706
+ let temp = `${temp_folder}/${Math.random().toString(36).slice(2)}`
707
+ await require('fs').promises.rename(partial, temp)
708
+ await require("fs").promises.mkdir(partial)
709
+ await require('fs').promises.rename(temp, partial + '/index')
710
+ })
636
711
  }
637
712
  }
638
713
  }
@@ -729,11 +804,19 @@ function v_eq(v1, v2) {
729
804
  return v1.length === v2?.length && v1.every((x, i) => x == v2[i])
730
805
  }
731
806
 
807
+ function stat_eq(a, b) {
808
+ return (!a && !b) || (a && b &&
809
+ a.mode === b.mode &&
810
+ a.size === b.size &&
811
+ a.mtimeNs === b.mtimeNs &&
812
+ a.ctimeNs === b.ctimeNs)
813
+ }
814
+
732
815
  async function is_read_only(fullpath) {
733
- const stats = await require('fs').promises.stat(fullpath)
816
+ const stat = await require('fs').promises.stat(fullpath)
734
817
  return require('os').platform() === "win32" ?
735
- !!(stats.mode & 0x1) :
736
- !(stats.mode & 0o200)
818
+ !!(stat.mode & 0x1) :
819
+ !(stat.mode & 0o200)
737
820
  }
738
821
 
739
822
  async function set_read_only(fullpath, read_only) {
@@ -756,7 +839,19 @@ function crash(e) {
756
839
  process.exit(1)
757
840
  }
758
841
 
759
- function on_watcher_miss(message) {
760
- console.log(`watcher miss: ${message}`)
761
- console.log(`\x1b[33;40m[${++watcher_misses}] watcher misfires\x1b[0m`);
842
+ async function get_file_lock(fullpath) {
843
+ if (!get_file_lock.locks) get_file_lock.locks = {}
844
+ if (!get_file_lock.locks[fullpath]) get_file_lock.locks[fullpath] = Promise.resolve()
845
+ return new Promise(done =>
846
+ get_file_lock.locks[fullpath] = get_file_lock.locks[fullpath].then(() =>
847
+ new Promise(done2 => done(done2))))
848
+ }
849
+
850
+ async function within_file_lock(fullpath, func) {
851
+ var lock = await get_file_lock(fullpath)
852
+ try {
853
+ return await func()
854
+ } finally {
855
+ lock()
856
+ }
762
857
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.39",
3
+ "version": "0.0.40",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",