braidfs 0.0.82 → 0.0.85

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +241 -214
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -4,10 +4,10 @@ var { diff_main } = require(`${__dirname}/diff.js`),
4
4
  braid_text = require("braid-text"),
5
5
  braid_fetch = require('braid-http').fetch
6
6
 
7
- var proxy_base = `${require('os').homedir()}/http`,
8
- braidfs_config_dir = `${proxy_base}/.braidfs`,
7
+ var sync_base = `${require('os').homedir()}/http`,
8
+ braidfs_config_dir = `${sync_base}/.braidfs`,
9
9
  braidfs_config_file = `${braidfs_config_dir}/config`,
10
- proxy_base_meta = `${braidfs_config_dir}/proxy_base_meta`
10
+ sync_base_meta = `${braidfs_config_dir}/proxy_base_meta`
11
11
  braid_text.db_folder = `${braidfs_config_dir}/braid-text-db`
12
12
  var trash = `${braidfs_config_dir}/trash`
13
13
  var temp_folder = `${braidfs_config_dir}/temp`
@@ -15,7 +15,7 @@ var temp_folder = `${braidfs_config_dir}/temp`
15
15
  var config = null,
16
16
  watcher_misses = 0
17
17
 
18
- if (require('fs').existsSync(proxy_base)) {
18
+ if (require('fs').existsSync(sync_base)) {
19
19
  try {
20
20
  config = require('fs').readFileSync(braidfs_config_file, 'utf8')
21
21
  } catch (e) { return console.log(`could not find config file: ${braidfs_config_file}`) }
@@ -44,18 +44,21 @@ if (require('fs').existsSync(proxy_base)) {
44
44
  require('fs').writeFileSync(braidfs_config_file, JSON.stringify(config, null, 4))
45
45
  }
46
46
 
47
- require('fs').mkdirSync(proxy_base_meta, { recursive: true })
47
+ require('fs').mkdirSync(sync_base_meta, { recursive: true })
48
48
  require('fs').mkdirSync(trash, { recursive: true })
49
49
  require('fs').mkdirSync(temp_folder, { recursive: true })
50
50
 
51
- // process command line args
52
- let to_run_in_background = process.platform === 'darwin' ? `
51
+ // Add instructions for how to run in the background on this OS
52
+ var to_run_in_background = process.platform === 'darwin' ? `
53
53
  To run daemon in background:
54
54
  launchctl submit -l org.braid.braidfs -- braidfs run` : ''
55
- let argv = process.argv.slice(2)
55
+ // ...except this doesn't work yet. So disable.
56
+ to_run_in_background = ''
56
57
 
57
58
  console.log(`braidfs version: ${require(`${__dirname}/package.json`).version}`)
58
59
 
60
+ // process command line args
61
+ var argv = process.argv.slice(2)
59
62
  if (argv.length === 1 && argv[0].match(/^(run|serve)$/)) {
60
63
  return main()
61
64
  } else if (argv.length && argv.length % 2 == 0 && argv.every((x, i) => i % 2 != 0 || x.match(/^(sync|unsync)$/))) {
@@ -64,7 +67,7 @@ if (argv.length === 1 && argv[0].match(/^(run|serve)$/)) {
64
67
  var sync = argv[i] === 'sync',
65
68
  url = argv[i + 1]
66
69
  if (!url.match(/^https?:\/\//)) {
67
- if (url.startsWith('/')) url = require('path').relative(proxy_base, url)
70
+ if (url.startsWith('/')) url = require('path').relative(sync_base, url)
68
71
  url = `https://${url}`
69
72
  }
70
73
  console.log(`${sync ? '' : 'un'}subscribing ${sync ? 'to' : 'from'} ${url}`)
@@ -124,9 +127,9 @@ async function main() {
124
127
  var fullpath = decodeURIComponent(m[1])
125
128
  var hash = decodeURIComponent(m[2])
126
129
 
127
- var path = require('path').relative(proxy_base, fullpath)
128
- var proxy = await proxy_url.cache[normalize_url(path)]
129
- var version = proxy?.hash_to_version_cache.get(hash)?.version
130
+ var path = require('path').relative(sync_base, fullpath)
131
+ var sync = await sync_url.cache[normalize_url(path)]
132
+ var version = sync?.hash_to_version_cache.get(hash)?.version
130
133
  if (!version) res.statusCode = 404
131
134
  return res.end(JSON.stringify(version))
132
135
  }
@@ -134,11 +137,11 @@ async function main() {
134
137
  var m = url.match(/^\.braidfs\/set_version\/([^\/]*)\/([^\/]*)/)
135
138
  if (m) {
136
139
  var fullpath = decodeURIComponent(m[1])
137
- var path = require('path').relative(proxy_base, fullpath)
138
- var proxy = await proxy_url.cache[normalize_url(path)]
140
+ var path = require('path').relative(sync_base, fullpath)
141
+ var sync = await sync_url.cache[normalize_url(path)]
139
142
 
140
143
  var parents = JSON.parse(decodeURIComponent(m[2]))
141
- var parent_text = proxy?.version_to_text_cache.get(JSON.stringify(parents)) ?? (await braid_text.get(proxy.url, { parents })).body
144
+ var parent_text = sync?.version_to_text_cache.get(JSON.stringify(parents)) ?? (await braid_text.get(sync.url, { parents })).body
142
145
 
143
146
  var text = await new Promise(done => {
144
147
  const chunks = []
@@ -149,14 +152,14 @@ async function main() {
149
152
  var patches = diff(parent_text, text)
150
153
 
151
154
  if (patches.length) {
152
- proxy.local_edit_counter += patches_to_code_points(patches, parent_text)
153
- var version = [proxy.peer + "-" + (proxy.local_edit_counter - 1)]
154
- await braid_text.put(proxy.url, { version, parents, patches, merge_type: 'dt' })
155
+ sync.local_edit_counter += patches_to_code_points(patches, parent_text)
156
+ var version = [sync.peer + "-" + (sync.local_edit_counter - 1)]
157
+ await braid_text.put(sync.url, { version, parents, patches, merge_type: 'dt' })
155
158
 
156
159
  // may be able to do this more efficiently.. we want to make sure we're capturing a file write that is after our version was written.. there may be a way we can avoid calling file_needs_writing here
157
160
  var stat = await new Promise(done => {
158
- proxy.file_written_cbs.push(done)
159
- proxy.signal_file_needs_writing()
161
+ sync.file_written_cbs.push(done)
162
+ sync.signal_file_needs_writing()
160
163
  })
161
164
 
162
165
  res.writeHead(200, { 'Content-Type': 'application/json' })
@@ -179,7 +182,7 @@ async function main() {
179
182
  console.log(`daemon started on port ${config.port}`)
180
183
  if (!config.allow_remote_access) console.log('!! only accessible from localhost !!')
181
184
 
182
- proxy_url('.braidfs/config').then(() => {
185
+ sync_url('.braidfs/config').then(() => {
183
186
  braid_text.get('.braidfs/config', {
184
187
  subscribe: async update => {
185
188
  let prev = config
@@ -192,10 +195,10 @@ async function main() {
192
195
  var old_syncs = Object.entries(prev.sync).filter(x => x[1]).map(x => normalize_url(x[0]).replace(/^https?:\/\//, ''))
193
196
  var new_syncs = new Set(Object.entries(config.sync).filter(x => x[1]).map(x => normalize_url(x[0]).replace(/^https?:\/\//, '')))
194
197
  for (let url of old_syncs.filter(x => !new_syncs.has(x)))
195
- unproxy_url(url)
198
+ unsync_url(url)
196
199
 
197
- // proxy all the new stuff
198
- for (let x of Object.entries(config.sync)) if (x[1]) proxy_url(x[0])
200
+ // sync all the new stuff
201
+ for (let x of Object.entries(config.sync)) if (x[1]) sync_url(x[0])
199
202
 
200
203
  // if any auth stuff has changed,
201
204
  // have the appropriate connections reconnect
@@ -209,7 +212,7 @@ async function main() {
209
212
  || JSON.stringify(prev.cookies[domain]) !== JSON.stringify(v))
210
213
  changed.add(domain)
211
214
  // ok, have every domain which has changed reconnect
212
- for (let [path, x] of Object.entries(proxy_url.cache))
215
+ for (let [path, x] of Object.entries(sync_url.cache))
213
216
  if (changed.has(path.split(/\//)[0].split(/:/)[0]))
214
217
  (await x).reconnect?.()
215
218
  } catch (e) {
@@ -219,10 +222,10 @@ async function main() {
219
222
  }
220
223
  })
221
224
  })
222
- proxy_url('.braidfs/errors')
225
+ sync_url('.braidfs/errors')
223
226
 
224
227
  console.log({ sync: config.sync })
225
- for (let x of Object.entries(config.sync)) if (x[1]) proxy_url(x[0])
228
+ for (let x of Object.entries(config.sync)) if (x[1]) sync_url(x[0])
226
229
 
227
230
  watch_files()
228
231
  setTimeout(scan_files, 1200)
@@ -255,7 +258,6 @@ function skip_file(path) {
255
258
  async function trash_file(fullpath, path) {
256
259
  // throw this unrecognized file into the trash,
257
260
  let dest = `${trash}/${braid_text.encode_filename(path)}_${Math.random().toString(36).slice(2)}`
258
- console.log(`moving untracked file ${fullpath} to ${dest}`)
259
261
  await require('fs').promises.rename(fullpath, dest)
260
262
 
261
263
  // and log an error
@@ -263,6 +265,8 @@ async function trash_file(fullpath, path) {
263
265
  }
264
266
 
265
267
  async function log_error(text) {
268
+ console.log(`LOGGING ERROR: ${text}`)
269
+
266
270
  var x = await braid_text.get('.braidfs/errors', {}),
267
271
  len = [...x.body].length
268
272
  await braid_text.put('.braidfs/errors', {
@@ -282,28 +286,28 @@ async function watch_files() {
282
286
  await w?.close()
283
287
 
284
288
  console.log('watch files..')
285
- watch_files.watcher = require('chokidar').watch(proxy_base).
289
+ watch_files.watcher = require('chokidar').watch(sync_base).
286
290
  on('add', x => chokidar_handler(x, 'add')).
287
291
  on('change', x => chokidar_handler(x, 'change')).
288
292
  on('unlink', x => chokidar_handler(x, 'unlink'))
289
293
 
290
294
  async function chokidar_handler(fullpath, event) {
291
- // Make sure the path is within proxy_base..
292
- if (!fullpath.startsWith(proxy_base))
293
- return on_watcher_miss(`path ${fullpath} outside ${proxy_base}`)
295
+ // Make sure the path is within sync_base..
296
+ if (!fullpath.startsWith(sync_base))
297
+ return on_watcher_miss(`path ${fullpath} outside ${sync_base}`)
294
298
 
295
299
  // Make sure the path is to a file, and not a directory
296
300
  if (event != 'unlink' && (await require('fs').promises.stat(fullpath)).isDirectory())
297
301
  return on_watcher_miss(`expected file, got: ${fullpath}`)
298
302
 
299
- var path = require('path').relative(proxy_base, fullpath)
303
+ var path = require('path').relative(sync_base, fullpath)
300
304
  if (skip_file(path)) return
301
305
  console.log(`file event: ${path}, event: ${event}`)
302
306
 
303
- var proxy = await proxy_url.cache[normalize_url(path)]
307
+ var sync = await sync_url.cache[normalize_url(path)]
304
308
 
305
- if (proxy && event != 'add') proxy.signal_file_needs_reading()
306
- if (!proxy && event != 'unlink') await trash_file(fullpath, path)
309
+ if (sync && event != 'add') sync.signal_file_needs_reading()
310
+ if (!sync && event != 'unlink') await trash_file(fullpath, path)
307
311
  }
308
312
  }
309
313
 
@@ -316,7 +320,7 @@ async function scan_files() {
316
320
  while (scan_files.do_again) {
317
321
  scan_files.do_again = false
318
322
  console.log(`scan files..`)
319
- if (await f(proxy_base))
323
+ if (await f(sync_base))
320
324
  on_watcher_miss(`scanner picked up a change that the watcher should have gotten`, false)
321
325
  }
322
326
  scan_files.running = false
@@ -324,7 +328,7 @@ async function scan_files() {
324
328
  scan_files.timeout = setTimeout(scan_files, config.scan_interval_ms ?? (20 * 1000))
325
329
 
326
330
  async function f(fullpath) {
327
- path = require('path').relative(proxy_base, fullpath)
331
+ path = require('path').relative(sync_base, fullpath)
328
332
  if (skip_file(path)) return
329
333
 
330
334
  let stat = await require('fs').promises.stat(fullpath, { bigint: true })
@@ -334,30 +338,30 @@ async function scan_files() {
334
338
  found ||= await f(`${fullpath}/${file}`)
335
339
  return found
336
340
  } else {
337
- var proxy = await proxy_url.cache[normalize_url(path)]
338
- if (!proxy) return await trash_file(fullpath, path)
341
+ var sync = await sync_url.cache[normalize_url(path)]
342
+ if (!sync) return await trash_file(fullpath, path)
339
343
 
340
344
  stat = await require('fs').promises.stat(fullpath, { bigint: true })
341
- if (!stat_eq(stat, proxy.file_last_stat)) {
345
+ if (!stat_eq(stat, sync.file_last_stat)) {
342
346
  console.log(`scan thinks ${path} has changed`)
343
- proxy.signal_file_needs_reading()
347
+ sync.signal_file_needs_reading()
344
348
  return true
345
349
  }
346
350
  }
347
351
  }
348
352
  }
349
353
 
350
- function unproxy_url(url) {
354
+ function unsync_url(url) {
351
355
  url = normalize_url(url).replace(/^https?:\/\//, '')
352
- if (!proxy_url.cache?.[url]) return
356
+ if (!sync_url.cache?.[url]) return
353
357
 
354
- console.log(`unproxy_url: ${url}`)
358
+ console.log(`unsync_url: ${url}`)
355
359
 
356
- delete proxy_url.cache[url]
357
- unproxy_url.cache[url] = unproxy_url.cache[url]()
360
+ delete sync_url.cache[url]
361
+ unsync_url.cache[url] = unsync_url.cache[url]()
358
362
  }
359
363
 
360
- async function proxy_url(url) {
364
+ async function sync_url(url) {
361
365
  // normalize url by removing any trailing /index/index/
362
366
  var normalized_url = normalize_url(url),
363
367
  wasnt_normal = normalized_url != url
@@ -365,12 +369,12 @@ async function proxy_url(url) {
365
369
 
366
370
  var is_external_link = url.match(/^https?:\/\//),
367
371
  path = is_external_link ? url.replace(/^https?:\/\//, '') : url,
368
- fullpath = `${proxy_base}/${path}`,
369
- meta_path = `${proxy_base_meta}/${braid_text.encode_filename(url)}`
372
+ fullpath = `${sync_base}/${path}`,
373
+ meta_path = `${sync_base_meta}/${braid_text.encode_filename(url)}`
370
374
 
371
- if (!proxy_url.cache) proxy_url.cache = {}
372
- if (!proxy_url.chain) proxy_url.chain = Promise.resolve()
373
- if (!proxy_url.cache[path]) proxy_url.cache[path] = proxy_url.chain = proxy_url.chain.then(async () => {
375
+ if (!sync_url.cache) sync_url.cache = {}
376
+ if (!sync_url.chain) sync_url.chain = Promise.resolve()
377
+ if (!sync_url.cache[path]) sync_url.cache[path] = sync_url.chain = sync_url.chain.then(async () => {
374
378
  var freed = false,
375
379
  aborts = new Set(),
376
380
  braid_text_get_options = null,
@@ -385,9 +389,9 @@ async function proxy_url(url) {
385
389
  wait_count--
386
390
  if (!wait_count) wait_promise_done()
387
391
  }
388
- if (!unproxy_url.cache) unproxy_url.cache = {}
389
- var old_unproxy = unproxy_url.cache[path]
390
- unproxy_url.cache[path] = async () => {
392
+ if (!unsync_url.cache) unsync_url.cache = {}
393
+ var old_unsync = unsync_url.cache[path]
394
+ unsync_url.cache[path] = async () => {
391
395
  freed = true
392
396
  for (let a of aborts) a.abort()
393
397
  await wait_promise
@@ -402,11 +406,11 @@ async function proxy_url(url) {
402
406
  try { await require('fs').promises.unlink(meta_path) } catch (e) {}
403
407
  try { await require('fs').promises.unlink(await get_fullpath()) } catch (e) {}
404
408
  }
405
- await old_unproxy
409
+ await old_unsync
406
410
 
407
411
  var self = {url}
408
412
 
409
- console.log(`proxy_url: ${url}`)
413
+ console.log(`sync_url: ${url}`)
410
414
 
411
415
  if (!start_something()) return
412
416
 
@@ -426,6 +430,7 @@ async function proxy_url(url) {
426
430
 
427
431
  self.peer = Math.random().toString(36).slice(2)
428
432
  self.local_edit_counter = 0
433
+ self.fork_point = null
429
434
  var file_last_version = null,
430
435
  file_last_digest = null
431
436
  self.file_last_text = null
@@ -468,47 +473,60 @@ async function proxy_url(url) {
468
473
  file_loop_pump()
469
474
  }
470
475
 
471
- self.signal_file_needs_writing = () => {
476
+ self.signal_file_needs_writing = (just_meta_file) => {
472
477
  if (freed) return
473
- file_needs_writing = true
478
+
479
+ if (!just_meta_file) file_needs_writing = true
480
+ else if (just_meta_file && !file_needs_writing)
481
+ file_needs_writing = 'just_meta_file'
482
+
474
483
  file_loop_pump()
475
484
  }
476
485
 
477
- async function send_out(stuff) {
486
+ async function my_fetch(params) {
478
487
  if (!start_something()) return
479
- if (is_external_link) {
480
- try {
481
- console.log(`send_out ${url} ${JSON.stringify(stuff, null, 4).slice(0, 1000)}`)
482
-
483
- let a = new AbortController()
484
- aborts.add(a)
485
- var r = await braid_fetch(url, {
486
- signal: a.signal,
487
- headers: {
488
- "Merge-Type": "dt",
489
- "Content-Type": 'text/plain',
490
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
491
- },
492
- method: "PUT",
493
- retry: true,
494
- ...stuff
495
- })
496
- aborts.delete(a)
497
- // if we're not authorized,
498
- if (r.status == 401 || r.status == 403) {
499
- // and it's one of our versions (a local edit),
500
- if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
501
- // then revert it
502
- console.log(`access denied: reverting local edits`)
503
- unproxy_url(url)
504
- proxy_url(url)
505
- }
506
- }
507
- } catch (e) {
508
- if (e?.name !== "AbortError") console.log(e)
488
+ try {
489
+ var a = new AbortController()
490
+ aborts.add(a)
491
+ return await braid_fetch(url, {
492
+ signal: a.signal,
493
+ headers: {
494
+ "Merge-Type": "dt",
495
+ "Content-Type": 'text/plain',
496
+ ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname])
497
+ },
498
+ retry: true,
499
+ ...params
500
+ })
501
+ } catch (e) {
502
+ if (e?.name !== "AbortError") console.log(e)
503
+ } finally {
504
+ aborts.delete(a)
505
+ finish_something()
506
+ }
507
+ }
508
+
509
+ async function send_out(stuff) {
510
+ if (!is_external_link) return
511
+
512
+ console.log(`send_out ${url} ${JSON.stringify(stuff, null, 4).slice(0, 1000)}`)
513
+
514
+ var r = await my_fetch({ method: "PUT", ...stuff })
515
+
516
+ // the server has acknowledged this version,
517
+ // so add it to the fork point
518
+ if (r.ok) await self.update_fork_point(stuff.version[0], stuff.parents)
519
+
520
+ // if we're not authorized,
521
+ if (r.status == 401 || r.status == 403) {
522
+ // and it's one of our versions (a local edit),
523
+ if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
524
+ // then revert it
525
+ console.log(`access denied: reverting local edits`)
526
+ unsync_url(url)
527
+ sync_url(url)
509
528
  }
510
529
  }
511
- finish_something()
512
530
  }
513
531
 
514
532
  if (!start_something()) return
@@ -523,7 +541,8 @@ async function proxy_url(url) {
523
541
  version: file_last_version,
524
542
  digest: file_last_digest,
525
543
  peer: self.peer,
526
- local_edit_counter: self.local_edit_counter
544
+ local_edit_counter: self.local_edit_counter,
545
+ fork_point: self.fork_point
527
546
  } = Array.isArray(meta) ? { version: meta } : meta)
528
547
 
529
548
  if (!self.peer) self.peer = Math.random().toString(36).slice(2)
@@ -569,6 +588,16 @@ async function proxy_url(url) {
569
588
  var fullpath = await get_fullpath()
570
589
 
571
590
  while (file_needs_reading || file_needs_writing) {
591
+ async function write_meta_file() {
592
+ await require('fs').promises.writeFile(meta_path, JSON.stringify({
593
+ version: file_last_version,
594
+ digest: sha256(self.file_last_text),
595
+ peer: self.peer,
596
+ local_edit_counter: self.local_edit_counter,
597
+ fork_point: self.fork_point
598
+ }))
599
+ }
600
+
572
601
  if (file_needs_reading) {
573
602
  console.log(`reading file: ${fullpath}`)
574
603
 
@@ -611,12 +640,7 @@ async function proxy_url(url) {
611
640
 
612
641
  await braid_text.put(url, { version, parents, patches, peer: self.peer, merge_type: 'dt' })
613
642
 
614
- await require('fs').promises.writeFile(meta_path, JSON.stringify({
615
- version: file_last_version,
616
- digest: sha256(self.file_last_text),
617
- peer: self.peer,
618
- local_edit_counter: self.local_edit_counter
619
- }))
643
+ await write_meta_file()
620
644
  } else {
621
645
  add_to_version_cache(text, file_last_version)
622
646
 
@@ -630,7 +654,10 @@ async function proxy_url(url) {
630
654
  self.file_last_stat = stat
631
655
  self.file_ignore_until = Date.now() + 1000
632
656
  }
633
- if (file_needs_writing) {
657
+ if (file_needs_writing === 'just_meta_file') {
658
+ file_needs_writing = false
659
+ await write_meta_file()
660
+ } else if (file_needs_writing) {
634
661
  file_needs_writing = false
635
662
  let { version, body } = await braid_text.get(url, {})
636
663
  if (!v_eq(version, file_last_version)) {
@@ -654,16 +681,10 @@ async function proxy_url(url) {
654
681
  self.file_last_text = body
655
682
  self.file_ignore_until = Date.now() + 1000
656
683
  await require('fs').promises.writeFile(fullpath, self.file_last_text)
657
-
658
-
659
- await require('fs').promises.writeFile(meta_path, JSON.stringify({
660
- version: file_last_version,
661
- digest: sha256(self.file_last_text),
662
- peer: self.peer,
663
- local_edit_counter: self.local_edit_counter
664
- }))
665
684
  }
666
685
 
686
+ await write_meta_file()
687
+
667
688
  if (await is_read_only(fullpath) !== self.file_read_only) {
668
689
  self.file_ignore_until = Date.now() + 1000
669
690
  await set_read_only(fullpath, self.file_read_only)
@@ -682,8 +703,90 @@ async function proxy_url(url) {
682
703
  file_loop_pump_lock--
683
704
  }
684
705
 
685
- if (is_external_link) connect()
686
- function connect() {
706
+ self.update_fork_point = async (event, parents) => {
707
+ var resource = await braid_text.get_resource(url)
708
+
709
+ // special case:
710
+ // if current fork point has all parents,
711
+ // then we can just remove those
712
+ // and add event
713
+ var fork_set = new Set(self.fork_point)
714
+ if (parents.every(p => fork_set.has(p))) {
715
+ parents.forEach(p => fork_set.delete(p))
716
+ fork_set.add(event)
717
+ self.fork_point = [...fork_set.values()]
718
+ } else {
719
+ // full-proof approach..
720
+ var looking_for = fork_set
721
+ looking_for.add(event)
722
+
723
+ self.fork_point = []
724
+ var shadow = new Set()
725
+
726
+ var bytes = resource.doc.toBytes()
727
+ var [_, events, parentss] = braid_text.dt_parse([...bytes])
728
+ for (var i = events.length - 1; i >= 0 && looking_for.size; i--) {
729
+ var e = events[i].join('-')
730
+ if (looking_for.has(e)) {
731
+ looking_for.delete(e)
732
+ if (!shadow.has(e)) self.fork_point.push(e)
733
+ shadow.add(e)
734
+ }
735
+ if (shadow.has(e))
736
+ parentss[i].forEach(p => shadow.add(p.join('-')))
737
+ }
738
+ }
739
+ self.fork_point.sort()
740
+ self.signal_file_needs_writing(true)
741
+ }
742
+
743
+ async function find_fork_point() {
744
+ console.log(`[find_fork_point] url: ${url}`)
745
+
746
+ // see if they have the fork point
747
+ if (self.fork_point) {
748
+ var r = await my_fetch({ method: "HEAD", version: self.fork_point })
749
+ if (r.ok) {
750
+ console.log(`[find_fork_point] they have our latest fork point, horray!`)
751
+ return self.fork_point
752
+ }
753
+ }
754
+
755
+ // otherwise let's binary search for new fork point..
756
+ var resource = await braid_text.get_resource(url)
757
+ var bytes = resource.doc.toBytes()
758
+ var [_, events, __] = braid_text.dt_parse([...bytes])
759
+ events = events.map(x => x.join('-'))
760
+
761
+ var min = -1
762
+ var max = events.length
763
+ self.fork_point = []
764
+ while (min + 1 < max) {
765
+ var i = Math.floor((min + max)/2)
766
+ var version = [events[i]]
767
+
768
+ console.log(`min=${min}, max=${max}, i=${i}, version=${version}`)
769
+
770
+ var st = Date.now()
771
+ var r = await my_fetch({ method: "HEAD", version })
772
+ console.log(`fetched in ${Date.now() - st}`)
773
+
774
+ if (r.ok) {
775
+ min = i
776
+ self.fork_point = version
777
+ } else max = i
778
+ }
779
+ console.log(`[find_fork_point] settled on: ${JSON.stringify(self.fork_point)}`)
780
+ self.signal_file_needs_writing(true)
781
+ return self.fork_point
782
+ }
783
+
784
+ if (is_external_link) find_fork_point().then(fork_point => {
785
+ connect(fork_point)
786
+ send_new_stuff(fork_point)
787
+ })
788
+
789
+ function connect(fork_point) {
687
790
  let a = new AbortController()
688
791
  aborts.add(a)
689
792
  self.reconnect = () => {
@@ -691,7 +794,7 @@ async function proxy_url(url) {
691
794
 
692
795
  aborts.delete(a)
693
796
  a.abort()
694
- connect()
797
+ connect(fork_point)
695
798
  }
696
799
 
697
800
  console.log(`connecting to ${url}`)
@@ -705,12 +808,8 @@ async function proxy_url(url) {
705
808
  subscribe: true,
706
809
  retry: {
707
810
  onRes: (res) => {
708
- if (res.status !== 209) {
709
- log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
710
- return console.log(
711
- `FAILED TO CONNECT TO: ${url}\n` +
712
- `GOT STATUS CODE: ${res.status}, expected 209.`)
713
- }
811
+ if (res.status !== 209)
812
+ return log_error(`Can't sync ${url} -- got bad response ${res.status} from server (expected 209)`)
714
813
 
715
814
  console.log(`connected to ${url}`)
716
815
  console.log(` editable = ${res.headers.get('editable')}`)
@@ -721,8 +820,9 @@ async function proxy_url(url) {
721
820
  },
722
821
  heartbeats: 120,
723
822
  parents: async () => {
724
- let cur = await braid_text.get(url, {})
725
- if (cur.version.length) return cur.version
823
+ var x = fork_point || await find_fork_point()
824
+ fork_point = null
825
+ return x
726
826
  },
727
827
  peer: self.peer
728
828
  }).then(x => {
@@ -740,6 +840,10 @@ async function proxy_url(url) {
740
840
 
741
841
  await braid_text.put(url, { ...update, peer: self.peer, merge_type: 'dt' })
742
842
 
843
+ // the server is giving us this version,
844
+ // so they must have it,
845
+ // so let's add it to our fork point
846
+ await self.update_fork_point(update.version[0], update.parents)
743
847
 
744
848
  self.signal_file_needs_writing()
745
849
  finish_something()
@@ -748,112 +852,35 @@ async function proxy_url(url) {
748
852
  }
749
853
 
750
854
  // send them stuff we have but they don't
751
- if (is_external_link) send_new_stuff()
752
- async function send_new_stuff() {
753
- if (!start_something()) return
754
- try {
755
- var a = new AbortController()
756
- aborts.add(a)
757
- var r = await braid_fetch(url, {
758
- signal: a.signal,
759
- method: "HEAD",
760
- headers: {
761
- Accept: 'text/plain',
762
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
763
- },
764
- retry: true
765
- })
766
- aborts.delete(a)
767
-
768
- if (r.headers.get('editable') === 'false') {
769
- console.log('do not send updates for read-only file: ' + url)
770
- return
771
- }
772
-
773
- if (r.headers.get('version') == null) {
774
- log_error(`Can't sync ${url} -- got no version from server`)
775
- return console.log(`GOT NO VERSION FROM: ${url}`)
776
- }
777
- var parents = JSON.parse(`[${r.headers.get('version')}]`)
778
-
779
- var bytes = (await braid_text.get_resource(url)).doc.toBytes()
780
- var [_, versions, __] = braid_text.dt_parse([...bytes])
781
- var agents = {}
782
- for (var v of versions) agents[v[0]] = v[1]
783
-
784
- function we_have_it(version) {
785
- var m = version.match(/^(.*)-(\d+)$/s)
786
- var agent = m[1]
787
- var seq = 1 * m[2]
788
- return (agents[agent] ?? -1) >= seq
789
- }
790
-
791
- if (parents.length && !parents.some(we_have_it)) {
792
- var min = 0
793
- var max = versions.length
794
- var last_good_parents = []
795
- while (min < max) {
796
- var i = Math.ceil((min + max)/2)
797
- parents = i ? [versions[i - 1].join('-')] : []
798
-
799
- console.log(`min=${min}, max=${max}, i=${i}, parents=${parents}`)
800
-
801
- var a = new AbortController()
802
- aborts.add(a)
803
- var st = Date.now()
804
- var r = await braid_fetch(url, {
805
- signal: a.signal,
806
- method: "HEAD",
807
- parents,
808
- headers: {
809
- Accept: 'text/plain',
810
- ...(x => x && {Cookie: x})(config.cookies?.[new URL(url).hostname]),
811
- },
812
- retry: true
813
- })
814
- console.log(`fetched in ${Date.now() - st}`)
815
- aborts.delete(a)
816
-
817
- if (r.ok) {
818
- min = i
819
- last_good_parents = parents
820
- } else {
821
- max = i - 1
822
- }
855
+ async function send_new_stuff(fork_point) {
856
+ var r = await my_fetch({ method: "HEAD" })
857
+ if (r.headers.get('editable') === 'false')
858
+ return console.log('do not send updates for read-only file: ' + url)
859
+
860
+ var in_parallel = create_parallel_promises(10)
861
+ braid_text.get(url, braid_text_get_options = {
862
+ parents: fork_point,
863
+ merge_type: 'dt',
864
+ peer: self.peer,
865
+ subscribe: async (u) => {
866
+ if (u.version.length) {
867
+ self.signal_file_needs_writing()
868
+ in_parallel(() => send_out({...u, peer: self.peer}))
823
869
  }
824
- parents = last_good_parents
825
-
826
- console.log(`found good parents: ${parents}: ${url}`)
827
- }
828
-
829
- var in_parallel = create_parallel_promises(10)
830
- braid_text.get(url, braid_text_get_options = {
831
- parents,
832
- merge_type: 'dt',
833
- peer: self.peer,
834
- subscribe: async (u) => {
835
- if (u.version.length) {
836
- self.signal_file_needs_writing()
837
- in_parallel(() => send_out({...u, peer: self.peer}))
838
- }
839
- },
840
- })
841
- } catch (e) {
842
- if (e?.name !== "AbortError") console.log(e)
843
- }
844
- finish_something()
870
+ },
871
+ })
845
872
  }
846
873
 
847
874
  // for config and errors file, listen for web changes
848
875
  if (!is_external_link) braid_text.get(url, braid_text_get_options = {
849
876
  merge_type: 'dt',
850
877
  peer: self.peer,
851
- subscribe: self.signal_file_needs_writing,
878
+ subscribe: () => self.signal_file_needs_writing(),
852
879
  })
853
880
 
854
881
  return self
855
882
  })
856
- return await proxy_url.cache[url]
883
+ return await sync_url.cache[url]
857
884
  }
858
885
 
859
886
  async function ensure_path(path) {
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.82",
3
+ "version": "0.0.85",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
9
  "braid-http": "^1.3.73",
10
- "braid-text": "^0.2.22",
10
+ "braid-text": "^0.2.25",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {