braidfs 0.0.25 → 0.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +226 -175
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -28,28 +28,38 @@ if (!require('fs').existsSync(braidfs_config_file)) {
28
28
  let config = JSON.parse(require('fs').readFileSync(braidfs_config_file, 'utf8'))
29
29
 
30
30
  // process command line args (override config)
31
+ console.log(`braidfs version: ${require('./package.json').version}`)
31
32
  let argv = process.argv.slice(2)
32
33
  let save_config = false
33
34
  while (argv.length) {
34
35
  let a = argv.shift()
35
36
  if (a.match(/^\d+$/)) {
36
37
  config.port = parseInt(a)
38
+ console.log(`setting port to ${config.port}`)
37
39
  } else if (a === 'sync') {
38
40
  let b = argv.shift()
39
41
  if (b === 'index') {
40
42
  config.sync_index_urls.push(argv.shift())
43
+ console.log(`syncing index url: ${config.sync_index_urls.slice(-1)[0]}`)
41
44
  } else {
42
45
  config.sync_urls.push(b)
46
+ console.log(`syncing url: ${config.sync_urls.slice(-1)[0]}`)
43
47
  }
44
48
  } else if (a === 'save') {
45
49
  save_config = true
50
+ console.log(`will save new config file`)
46
51
  } else if (a === 'expose') {
47
52
  config.allow_remote_access = true
53
+ console.log(`exposing server to the outside world`)
48
54
  } else if (a === 'unexpose') {
49
55
  config.allow_remote_access = false
56
+ console.log(`unexpose server from the outside world`)
50
57
  }
51
58
  }
52
- if (save_config) require('fs').writeFileSync(braidfs_config_file, JSON.stringify(config, null, 4))
59
+ if (save_config) {
60
+ require('fs').writeFileSync(braidfs_config_file, JSON.stringify(config, null, 4))
61
+ console.log(`saved config file`)
62
+ }
53
63
 
54
64
  braid_text.db_folder = config.braid_text_db
55
65
 
@@ -77,6 +87,10 @@ braid_text.list().then(x => {
77
87
  require('chokidar').watch(config.proxy_base).
78
88
  on('change', (path) => {
79
89
  path = require('path').relative(config.proxy_base, path)
90
+
91
+ // Skip any temp files with a # in the name
92
+ if (path.includes('#')) return
93
+
80
94
  console.log(`path changed: ${path}`)
81
95
 
82
96
  path = normalize_url(path)
@@ -86,6 +100,10 @@ require('chokidar').watch(config.proxy_base).
86
100
  }).
87
101
  on('add', async (path) => {
88
102
  path = require('path').relative(config.proxy_base, path)
103
+
104
+ // Skip any temp files with a # in the name
105
+ if (path.includes('#')) return
106
+
89
107
  console.log(`path added: ${path}`)
90
108
 
91
109
  let url = null
@@ -157,33 +175,28 @@ server.listen(config.port, () => {
157
175
  });
158
176
 
159
177
  async function proxy_url(url) {
160
- let chain = proxy_url.chain || (proxy_url.chain = Promise.resolve())
161
-
162
178
  async function ensure_path(path) {
163
- // ensure that the path leading to our file exists..
164
- await (chain = chain.then(async () => {
165
- try {
166
- await require("fs").promises.mkdir(path, { recursive: true })
167
- } catch (e) {
168
- let parts = path.split(require("path").sep)
169
- for (let i = 1; i <= parts.length; i++) {
170
- let partial = require("path").join(...parts.slice(0, i))
179
+ try {
180
+ await require("fs").promises.mkdir(path, { recursive: true })
181
+ } catch (e) {
182
+ let parts = path.split(require("path").sep).slice(1)
183
+ for (let i = 1; i <= parts.length; i++) {
184
+ let partial = require("path").sep + require("path").join(...parts.slice(0, i))
171
185
 
172
- if (!(await is_dir(partial))) {
173
- let save = await require("fs").promises.readFile(partial)
186
+ if (!(await is_dir(partial))) {
187
+ let save = await require("fs").promises.readFile(partial)
174
188
 
175
- await require("fs").promises.unlink(partial)
176
- await require("fs").promises.mkdir(path, { recursive: true })
189
+ await require("fs").promises.unlink(partial)
190
+ await require("fs").promises.mkdir(path, { recursive: true })
177
191
 
178
- while (await is_dir(partial))
179
- partial = require("path").join(partial, 'index')
192
+ while (await is_dir(partial))
193
+ partial = require("path").join(partial, 'index')
180
194
 
181
- await require("fs").promises.writeFile(partial, save)
182
- break
183
- }
195
+ await require("fs").promises.writeFile(partial, save)
196
+ break
184
197
  }
185
198
  }
186
- }))
199
+ }
187
200
  }
188
201
 
189
202
  // normalize url by removing any trailing /index/index/
@@ -192,197 +205,235 @@ async function proxy_url(url) {
192
205
  url = normalized_url
193
206
 
194
207
  if (!proxy_url.cache) proxy_url.cache = {}
195
- if (proxy_url.cache[url]) return proxy_url.cache[url]
196
- let self = {}
197
- proxy_url.cache[url] = self
208
+ if (!proxy_url.chain) proxy_url.chain = Promise.resolve()
209
+ if (!proxy_url.cache[url]) proxy_url.cache[url] = proxy_url.chain = proxy_url.chain.then(async () => {
210
+ let self = {}
198
211
 
199
- console.log(`proxy_url: ${url}`)
212
+ console.log(`proxy_url: ${url}`)
200
213
 
201
- let is_external_link = url.match(/^https?:\/\//)
202
- let path = is_external_link ? url.replace(/^https?:\/\//, '') : `localhost/${url}`
203
- let fullpath = require("path").join(config.proxy_base, path)
214
+ let is_external_link = url.match(/^https?:\/\//)
215
+ let path = is_external_link ? url.replace(/^https?:\/\//, '') : `localhost/${url}`
216
+ let fullpath = require("path").join(config.proxy_base, path)
204
217
 
205
- if (is_external_link) {
206
- let u = new URL(url)
207
- host_to_protocol[u.host] = u.protocol
208
- }
218
+ if (is_external_link) {
219
+ let u = new URL(url)
220
+ host_to_protocol[u.host] = u.protocol
221
+ }
209
222
 
210
- // if we're accessing /blah/index, it will be normalized to /blah,
211
- // but we still want to create a directory out of blah in this case
212
- if (wasnt_normal && !(await is_dir(fullpath))) await ensure_path(fullpath)
223
+ // if we're accessing /blah/index, it will be normalized to /blah,
224
+ // but we still want to create a directory out of blah in this case
225
+ if (wasnt_normal && !(await is_dir(fullpath))) await ensure_path(fullpath)
213
226
 
214
- await ensure_path(require("path").dirname(fullpath))
227
+ await ensure_path(require("path").dirname(fullpath))
215
228
 
216
- async function get_fullpath() {
217
- let p = fullpath
218
- while (await is_dir(p)) p = require("path").join(p, 'index')
219
- return p
220
- }
229
+ async function get_fullpath() {
230
+ let p = fullpath
231
+ while (await is_dir(p)) p = require("path").join(p, 'index')
232
+ return p
233
+ }
221
234
 
222
- let peer = Math.random().toString(36).slice(2)
223
- var char_counter = -1
224
- let file_last_version = null
225
- let file_last_text = null
226
- self.file_read_only = null
227
- let file_needs_reading = true
228
- let file_needs_writing = null
229
- let file_loop_pump_lock = 0
230
-
231
- function signal_file_needs_reading() {
232
- file_needs_reading = true
233
- file_loop_pump()
234
- }
235
+ let peer = Math.random().toString(36).slice(2)
236
+ var char_counter = -1
237
+ let file_last_version = null
238
+ let file_last_text = null
239
+ self.file_read_only = null
240
+ let file_needs_reading = true
241
+ let file_needs_writing = null
242
+ let file_loop_pump_lock = 0
243
+
244
+ function signal_file_needs_reading() {
245
+ file_needs_reading = true
246
+ file_loop_pump()
247
+ }
235
248
 
236
- function signal_file_needs_writing() {
237
- file_needs_writing = true
238
- file_loop_pump()
239
- }
249
+ function signal_file_needs_writing() {
250
+ file_needs_writing = true
251
+ file_loop_pump()
252
+ }
240
253
 
241
- async function send_out(stuff) {
242
- if (is_external_link) await braid_fetch_wrapper(url, {
243
- headers: {
244
- "Merge-Type": "dt",
245
- "Content-Type": 'text/plain',
246
- ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
247
- },
248
- method: "PUT",
249
- retry: true,
250
- ...stuff
251
- })
252
- }
254
+ async function send_out(stuff) {
255
+ if (is_external_link) await braid_fetch_wrapper(url, {
256
+ headers: {
257
+ "Merge-Type": "dt",
258
+ "Content-Type": 'text/plain',
259
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
260
+ },
261
+ method: "PUT",
262
+ retry: true,
263
+ ...stuff
264
+ })
265
+ }
253
266
 
254
- file_loop_pump()
255
- async function file_loop_pump() {
256
- if (file_loop_pump_lock) return
257
- file_loop_pump_lock++
267
+ path_to_func[path] = signal_file_needs_reading
258
268
 
259
- if (file_last_version === null) {
260
- try {
261
- file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), { encoding: 'utf8' }))
262
- file_last_text = (await braid_text.get(url, { version: file_last_version })).body
263
- file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
264
- } catch (e) {
265
- file_last_text = ''
266
- file_needs_writing = true
269
+ file_loop_pump()
270
+ async function file_loop_pump() {
271
+ if (file_loop_pump_lock) return
272
+ file_loop_pump_lock++
273
+
274
+ if (file_last_version === null) {
275
+ try {
276
+ file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), { encoding: 'utf8' }))
277
+ file_last_text = (await braid_text.get(url, { version: file_last_version })).body
278
+ file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
279
+ } catch (e) {
280
+ file_last_text = ''
281
+ file_needs_writing = true
282
+ }
267
283
  }
268
- }
269
284
 
270
- while (file_needs_reading || file_needs_writing) {
271
- if (file_needs_reading) {
272
- file_needs_reading = false
285
+ while (file_needs_reading || file_needs_writing) {
286
+ if (file_needs_reading) {
287
+ file_needs_reading = false
273
288
 
274
- if (self.file_read_only === null) try { self.file_read_only = await is_read_only(await get_fullpath()) } catch (e) { }
289
+ if (self.file_read_only === null) try { self.file_read_only = await is_read_only(await get_fullpath()) } catch (e) { }
275
290
 
276
- let text = ''
277
- try { text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' }) } catch (e) { }
291
+ let text = ''
292
+ try { text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' }) } catch (e) { }
278
293
 
279
- var patches = diff(file_last_text, text)
280
- if (patches.length) {
281
- // convert from js-indicies to code-points
282
- char_counter += patches_to_code_points(patches, file_last_text)
294
+ var patches = diff(file_last_text, text)
295
+ if (patches.length) {
296
+ // convert from js-indicies to code-points
297
+ char_counter += patches_to_code_points(patches, file_last_text)
283
298
 
284
- file_last_text = text
299
+ file_last_text = text
285
300
 
286
- var version = [peer + "-" + char_counter]
287
- var parents = file_last_version
288
- file_last_version = version
301
+ var version = [peer + "-" + char_counter]
302
+ var parents = file_last_version
303
+ file_last_version = version
289
304
 
290
- send_out({ version, parents, patches, peer })
305
+ send_out({ version, parents, patches, peer })
291
306
 
292
- await braid_text.put(url, { version, parents, patches, peer })
307
+ await braid_text.put(url, { version, parents, patches, peer })
293
308
 
294
- await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
309
+ await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
310
+ }
295
311
  }
296
- }
297
- if (file_needs_writing) {
298
- file_needs_writing = false
299
- let { version, body } = await braid_text.get(url, {})
300
- if (!v_eq(version, file_last_version)) {
312
+ if (file_needs_writing) {
313
+ file_needs_writing = false
314
+ let { version, body } = await braid_text.get(url, {})
315
+ if (!v_eq(version, file_last_version)) {
301
316
 
302
- console.log(`writing file ${await get_fullpath()}`)
317
+ console.log(`writing file ${await get_fullpath()}`)
303
318
 
304
- try { if (await is_read_only(await get_fullpath())) await set_read_only(await get_fullpath(), false) } catch (e) { }
319
+ try { if (await is_read_only(await get_fullpath())) await set_read_only(await get_fullpath(), false) } catch (e) { }
305
320
 
306
- file_last_version = version
307
- file_last_text = body
308
- await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
309
- await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
310
- }
321
+ file_last_version = version
322
+ file_last_text = body
323
+ await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
324
+ await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
325
+ }
311
326
 
312
- if (await is_read_only(await get_fullpath()) !== self.file_read_only) await set_read_only(await get_fullpath(), self.file_read_only)
327
+ if (await is_read_only(await get_fullpath()) !== self.file_read_only) await set_read_only(await get_fullpath(), self.file_read_only)
328
+ }
313
329
  }
330
+ file_loop_pump_lock--
314
331
  }
315
- file_loop_pump_lock--
316
- }
317
332
 
318
- if (is_external_link) braid_fetch_wrapper(url, {
319
- headers: {
320
- "Merge-Type": "dt",
321
- Accept: 'text/plain',
322
- ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
323
- },
324
- subscribe: true,
325
- retry: true,
326
- parents: async () => {
327
- let cur = await braid_text.get(url, {})
328
- if (cur.version.length) return cur.version
329
- },
330
- peer
331
- }, (res) => {
332
- self.file_read_only = res.headers.get('editable') === 'false'
333
- signal_file_needs_writing()
334
- }).then(x => {
335
- x.subscribe(async update => {
336
- // console.log(`update: ${JSON.stringify(update, null, 4)}`)
337
- if (update.version.length == 0) return;
338
-
339
- await braid_text.put(url, { ...update, peer, merge_type: 'dt' })
340
-
341
- signal_file_needs_writing()
342
- })
343
- })
344
-
345
- path_to_func[path] = signal_file_needs_reading
333
+ // try a HEAD without subscribe to get the version
334
+ let parents = null
335
+ if (is_external_link) {
336
+ try {
337
+ let head_res = await braid_fetch_wrapper(url, {
338
+ method: 'HEAD',
339
+ headers: {
340
+ Accept: 'text/plain',
341
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
342
+ },
343
+ retry: true,
344
+ })
345
+ parents = head_res.headers.get('version') ?
346
+ JSON.parse(`[${head_res.headers.get('version')}]`) :
347
+ null
348
+ self.file_read_only = head_res.headers.get('editable') === 'false'
349
+ signal_file_needs_writing()
350
+ } catch (e) {
351
+ console.log('HEAD failed: ', e)
352
+ }
346
353
 
347
- // try a HEAD without subscribe to get the version
348
- let parents = null
349
- if (is_external_link) {
350
- try {
351
- let head_res = await braid_fetch_wrapper(url, {
352
- method: 'HEAD',
353
- headers: {
354
- Accept: 'text/plain',
355
- ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
356
- },
357
- retry: true,
354
+ // work here
355
+ console.log(`waiting_for_versions: ${parents}`)
356
+
357
+ let waiting_for_versions = Object.fromEntries(parents?.map(x => [x, true]) ?? [])
358
+ await new Promise(done => {
359
+ braid_fetch_wrapper(url, {
360
+ headers: {
361
+ "Merge-Type": "dt",
362
+ Accept: 'text/plain',
363
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
364
+ },
365
+ subscribe: true,
366
+ retry: true,
367
+ parents: async () => {
368
+ let cur = await braid_text.get(url, {})
369
+ if (cur.version.length) {
370
+ waiting_for_versions = Object.fromEntries(Object.keys(waiting_for_versions).map(x => {
371
+ let [a, seq] = x.split('-')
372
+ return [a, seq]
373
+ }))
374
+ for (let v of cur.version) {
375
+ let [a, seq] = v.split('-')
376
+ if (waiting_for_versions[a] <= seq) delete waiting_for_versions[a]
377
+ }
378
+ waiting_for_versions = Object.fromEntries(Object.entries(waiting_for_versions).map(x => [`${x[0]}-${x[1]}`, true]))
379
+
380
+ if (done) {
381
+ if (!Object.keys(waiting_for_versions).length) {
382
+ console.log('got everything we were waiting for..')
383
+ done()
384
+ done = null
385
+ }
386
+ }
387
+
388
+ return cur.version
389
+ }
390
+ },
391
+ peer
392
+ }, (res) => {
393
+ self.file_read_only = res.headers.get('editable') === 'false'
394
+ signal_file_needs_writing()
395
+ }).then(x => {
396
+ x.subscribe(async update => {
397
+ // console.log(`update: ${JSON.stringify(update, null, 4)}`)
398
+ if (update.version.length == 0) return;
399
+ if (update.version.length != 1) throw 'unexpected';
400
+
401
+ await braid_text.put(url, { ...update, peer, merge_type: 'dt' })
402
+
403
+ if (done) {
404
+ delete waiting_for_versions[update.version[0]]
405
+ if (!Object.keys(waiting_for_versions).length) {
406
+ console.log('got everything we were waiting for..')
407
+ done()
408
+ done = null
409
+ }
410
+ }
411
+
412
+ signal_file_needs_writing()
413
+ })
414
+ })
358
415
  })
359
- parents = head_res.headers.get('version') ?
360
- JSON.parse(`[${head_res.headers.get('version')}]`) :
361
- null
362
- self.file_read_only = head_res.headers.get('editable') === 'false'
363
- signal_file_needs_writing()
364
- } catch (e) {
365
- console.log('HEAD failed: ', e)
366
416
  }
367
- }
368
417
 
369
- // now get everything since then, and send it back..
370
- braid_text.get(url, {
371
- parents,
372
- merge_type: 'dt',
373
- peer,
374
- subscribe: async ({ version, parents, body, patches }) => {
375
- if (version.length == 0) return;
418
+ // now get everything since then, and send it back..
419
+ braid_text.get(url, {
420
+ parents,
421
+ merge_type: 'dt',
422
+ peer,
423
+ subscribe: async ({ version, parents, body, patches }) => {
424
+ if (version.length == 0) return;
376
425
 
377
- // console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
426
+ // console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
378
427
 
379
- signal_file_needs_writing()
428
+ signal_file_needs_writing()
380
429
 
381
- send_out({ version, parents, body, patches, peer })
382
- },
383
- })
430
+ send_out({ version, parents, body, patches, peer })
431
+ },
432
+ })
384
433
 
385
- return self
434
+ return self
435
+ })
436
+ return await proxy_url.cache[url]
386
437
  }
387
438
 
388
439
  ////////////////////////////////
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.25",
3
+ "version": "0.0.27",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
9
  "braid-http": "^0.3.20",
10
- "braid-text": "^0.0.29",
10
+ "braid-text": "^0.0.30",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {