braidfs 0.0.25 → 0.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +207 -174
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -157,33 +157,28 @@ server.listen(config.port, () => {
157
157
  });
158
158
 
159
159
  async function proxy_url(url) {
160
- let chain = proxy_url.chain || (proxy_url.chain = Promise.resolve())
161
-
162
160
  async function ensure_path(path) {
163
- // ensure that the path leading to our file exists..
164
- await (chain = chain.then(async () => {
165
- try {
166
- await require("fs").promises.mkdir(path, { recursive: true })
167
- } catch (e) {
168
- let parts = path.split(require("path").sep)
169
- for (let i = 1; i <= parts.length; i++) {
170
- let partial = require("path").join(...parts.slice(0, i))
161
+ try {
162
+ await require("fs").promises.mkdir(path, { recursive: true })
163
+ } catch (e) {
164
+ let parts = path.split(require("path").sep).slice(1)
165
+ for (let i = 1; i <= parts.length; i++) {
166
+ let partial = require("path").sep + require("path").join(...parts.slice(0, i))
171
167
 
172
- if (!(await is_dir(partial))) {
173
- let save = await require("fs").promises.readFile(partial)
168
+ if (!(await is_dir(partial))) {
169
+ let save = await require("fs").promises.readFile(partial)
174
170
 
175
- await require("fs").promises.unlink(partial)
176
- await require("fs").promises.mkdir(path, { recursive: true })
171
+ await require("fs").promises.unlink(partial)
172
+ await require("fs").promises.mkdir(path, { recursive: true })
177
173
 
178
- while (await is_dir(partial))
179
- partial = require("path").join(partial, 'index')
174
+ while (await is_dir(partial))
175
+ partial = require("path").join(partial, 'index')
180
176
 
181
- await require("fs").promises.writeFile(partial, save)
182
- break
183
- }
177
+ await require("fs").promises.writeFile(partial, save)
178
+ break
184
179
  }
185
180
  }
186
- }))
181
+ }
187
182
  }
188
183
 
189
184
  // normalize url by removing any trailing /index/index/
@@ -192,197 +187,235 @@ async function proxy_url(url) {
192
187
  url = normalized_url
193
188
 
194
189
  if (!proxy_url.cache) proxy_url.cache = {}
195
- if (proxy_url.cache[url]) return proxy_url.cache[url]
196
- let self = {}
197
- proxy_url.cache[url] = self
190
+ if (!proxy_url.chain) proxy_url.chain = Promise.resolve()
191
+ if (!proxy_url.cache[url]) proxy_url.cache[url] = proxy_url.chain = proxy_url.chain.then(async () => {
192
+ let self = {}
198
193
 
199
- console.log(`proxy_url: ${url}`)
194
+ console.log(`proxy_url: ${url}`)
200
195
 
201
- let is_external_link = url.match(/^https?:\/\//)
202
- let path = is_external_link ? url.replace(/^https?:\/\//, '') : `localhost/${url}`
203
- let fullpath = require("path").join(config.proxy_base, path)
196
+ let is_external_link = url.match(/^https?:\/\//)
197
+ let path = is_external_link ? url.replace(/^https?:\/\//, '') : `localhost/${url}`
198
+ let fullpath = require("path").join(config.proxy_base, path)
204
199
 
205
- if (is_external_link) {
206
- let u = new URL(url)
207
- host_to_protocol[u.host] = u.protocol
208
- }
200
+ if (is_external_link) {
201
+ let u = new URL(url)
202
+ host_to_protocol[u.host] = u.protocol
203
+ }
209
204
 
210
- // if we're accessing /blah/index, it will be normalized to /blah,
211
- // but we still want to create a directory out of blah in this case
212
- if (wasnt_normal && !(await is_dir(fullpath))) await ensure_path(fullpath)
205
+ // if we're accessing /blah/index, it will be normalized to /blah,
206
+ // but we still want to create a directory out of blah in this case
207
+ if (wasnt_normal && !(await is_dir(fullpath))) await ensure_path(fullpath)
213
208
 
214
- await ensure_path(require("path").dirname(fullpath))
209
+ await ensure_path(require("path").dirname(fullpath))
215
210
 
216
- async function get_fullpath() {
217
- let p = fullpath
218
- while (await is_dir(p)) p = require("path").join(p, 'index')
219
- return p
220
- }
211
+ async function get_fullpath() {
212
+ let p = fullpath
213
+ while (await is_dir(p)) p = require("path").join(p, 'index')
214
+ return p
215
+ }
221
216
 
222
- let peer = Math.random().toString(36).slice(2)
223
- var char_counter = -1
224
- let file_last_version = null
225
- let file_last_text = null
226
- self.file_read_only = null
227
- let file_needs_reading = true
228
- let file_needs_writing = null
229
- let file_loop_pump_lock = 0
230
-
231
- function signal_file_needs_reading() {
232
- file_needs_reading = true
233
- file_loop_pump()
234
- }
217
+ let peer = Math.random().toString(36).slice(2)
218
+ var char_counter = -1
219
+ let file_last_version = null
220
+ let file_last_text = null
221
+ self.file_read_only = null
222
+ let file_needs_reading = true
223
+ let file_needs_writing = null
224
+ let file_loop_pump_lock = 0
225
+
226
+ function signal_file_needs_reading() {
227
+ file_needs_reading = true
228
+ file_loop_pump()
229
+ }
235
230
 
236
- function signal_file_needs_writing() {
237
- file_needs_writing = true
238
- file_loop_pump()
239
- }
231
+ function signal_file_needs_writing() {
232
+ file_needs_writing = true
233
+ file_loop_pump()
234
+ }
240
235
 
241
- async function send_out(stuff) {
242
- if (is_external_link) await braid_fetch_wrapper(url, {
243
- headers: {
244
- "Merge-Type": "dt",
245
- "Content-Type": 'text/plain',
246
- ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
247
- },
248
- method: "PUT",
249
- retry: true,
250
- ...stuff
251
- })
252
- }
236
+ async function send_out(stuff) {
237
+ if (is_external_link) await braid_fetch_wrapper(url, {
238
+ headers: {
239
+ "Merge-Type": "dt",
240
+ "Content-Type": 'text/plain',
241
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
242
+ },
243
+ method: "PUT",
244
+ retry: true,
245
+ ...stuff
246
+ })
247
+ }
253
248
 
254
- file_loop_pump()
255
- async function file_loop_pump() {
256
- if (file_loop_pump_lock) return
257
- file_loop_pump_lock++
249
+ path_to_func[path] = signal_file_needs_reading
258
250
 
259
- if (file_last_version === null) {
260
- try {
261
- file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), { encoding: 'utf8' }))
262
- file_last_text = (await braid_text.get(url, { version: file_last_version })).body
263
- file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
264
- } catch (e) {
265
- file_last_text = ''
266
- file_needs_writing = true
251
+ file_loop_pump()
252
+ async function file_loop_pump() {
253
+ if (file_loop_pump_lock) return
254
+ file_loop_pump_lock++
255
+
256
+ if (file_last_version === null) {
257
+ try {
258
+ file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), { encoding: 'utf8' }))
259
+ file_last_text = (await braid_text.get(url, { version: file_last_version })).body
260
+ file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
261
+ } catch (e) {
262
+ file_last_text = ''
263
+ file_needs_writing = true
264
+ }
267
265
  }
268
- }
269
266
 
270
- while (file_needs_reading || file_needs_writing) {
271
- if (file_needs_reading) {
272
- file_needs_reading = false
267
+ while (file_needs_reading || file_needs_writing) {
268
+ if (file_needs_reading) {
269
+ file_needs_reading = false
273
270
 
274
- if (self.file_read_only === null) try { self.file_read_only = await is_read_only(await get_fullpath()) } catch (e) { }
271
+ if (self.file_read_only === null) try { self.file_read_only = await is_read_only(await get_fullpath()) } catch (e) { }
275
272
 
276
- let text = ''
277
- try { text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' }) } catch (e) { }
273
+ let text = ''
274
+ try { text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' }) } catch (e) { }
278
275
 
279
- var patches = diff(file_last_text, text)
280
- if (patches.length) {
281
- // convert from js-indicies to code-points
282
- char_counter += patches_to_code_points(patches, file_last_text)
276
+ var patches = diff(file_last_text, text)
277
+ if (patches.length) {
278
+ // convert from js-indicies to code-points
279
+ char_counter += patches_to_code_points(patches, file_last_text)
283
280
 
284
- file_last_text = text
281
+ file_last_text = text
285
282
 
286
- var version = [peer + "-" + char_counter]
287
- var parents = file_last_version
288
- file_last_version = version
283
+ var version = [peer + "-" + char_counter]
284
+ var parents = file_last_version
285
+ file_last_version = version
289
286
 
290
- send_out({ version, parents, patches, peer })
287
+ send_out({ version, parents, patches, peer })
291
288
 
292
- await braid_text.put(url, { version, parents, patches, peer })
289
+ await braid_text.put(url, { version, parents, patches, peer })
293
290
 
294
- await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
291
+ await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
292
+ }
295
293
  }
296
- }
297
- if (file_needs_writing) {
298
- file_needs_writing = false
299
- let { version, body } = await braid_text.get(url, {})
300
- if (!v_eq(version, file_last_version)) {
294
+ if (file_needs_writing) {
295
+ file_needs_writing = false
296
+ let { version, body } = await braid_text.get(url, {})
297
+ if (!v_eq(version, file_last_version)) {
301
298
 
302
- console.log(`writing file ${await get_fullpath()}`)
299
+ console.log(`writing file ${await get_fullpath()}`)
303
300
 
304
- try { if (await is_read_only(await get_fullpath())) await set_read_only(await get_fullpath(), false) } catch (e) { }
301
+ try { if (await is_read_only(await get_fullpath())) await set_read_only(await get_fullpath(), false) } catch (e) { }
305
302
 
306
- file_last_version = version
307
- file_last_text = body
308
- await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
309
- await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
310
- }
303
+ file_last_version = version
304
+ file_last_text = body
305
+ await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
306
+ await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
307
+ }
311
308
 
312
- if (await is_read_only(await get_fullpath()) !== self.file_read_only) await set_read_only(await get_fullpath(), self.file_read_only)
309
+ if (await is_read_only(await get_fullpath()) !== self.file_read_only) await set_read_only(await get_fullpath(), self.file_read_only)
310
+ }
313
311
  }
312
+ file_loop_pump_lock--
314
313
  }
315
- file_loop_pump_lock--
316
- }
317
-
318
- if (is_external_link) braid_fetch_wrapper(url, {
319
- headers: {
320
- "Merge-Type": "dt",
321
- Accept: 'text/plain',
322
- ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
323
- },
324
- subscribe: true,
325
- retry: true,
326
- parents: async () => {
327
- let cur = await braid_text.get(url, {})
328
- if (cur.version.length) return cur.version
329
- },
330
- peer
331
- }, (res) => {
332
- self.file_read_only = res.headers.get('editable') === 'false'
333
- signal_file_needs_writing()
334
- }).then(x => {
335
- x.subscribe(async update => {
336
- // console.log(`update: ${JSON.stringify(update, null, 4)}`)
337
- if (update.version.length == 0) return;
338
-
339
- await braid_text.put(url, { ...update, peer, merge_type: 'dt' })
340
-
341
- signal_file_needs_writing()
342
- })
343
- })
344
314
 
345
- path_to_func[path] = signal_file_needs_reading
315
+ // try a HEAD without subscribe to get the version
316
+ let parents = null
317
+ if (is_external_link) {
318
+ try {
319
+ let head_res = await braid_fetch_wrapper(url, {
320
+ method: 'HEAD',
321
+ headers: {
322
+ Accept: 'text/plain',
323
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
324
+ },
325
+ retry: true,
326
+ })
327
+ parents = head_res.headers.get('version') ?
328
+ JSON.parse(`[${head_res.headers.get('version')}]`) :
329
+ null
330
+ self.file_read_only = head_res.headers.get('editable') === 'false'
331
+ signal_file_needs_writing()
332
+ } catch (e) {
333
+ console.log('HEAD failed: ', e)
334
+ }
346
335
 
347
- // try a HEAD without subscribe to get the version
348
- let parents = null
349
- if (is_external_link) {
350
- try {
351
- let head_res = await braid_fetch_wrapper(url, {
352
- method: 'HEAD',
353
- headers: {
354
- Accept: 'text/plain',
355
- ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
356
- },
357
- retry: true,
336
+ // work here
337
+ console.log(`waiting_for_versions: ${parents}`)
338
+
339
+ let waiting_for_versions = Object.fromEntries(parents?.map(x => [x, true]) ?? [])
340
+ await new Promise(done => {
341
+ braid_fetch_wrapper(url, {
342
+ headers: {
343
+ "Merge-Type": "dt",
344
+ Accept: 'text/plain',
345
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
346
+ },
347
+ subscribe: true,
348
+ retry: true,
349
+ parents: async () => {
350
+ let cur = await braid_text.get(url, {})
351
+ if (cur.version.length) {
352
+ waiting_for_versions = Object.fromEntries(Object.keys(waiting_for_versions).map(x => {
353
+ let [a, seq] = x.split('-')
354
+ return [a, seq]
355
+ }))
356
+ for (let v of cur.version) {
357
+ let [a, seq] = v.split('-')
358
+ if (waiting_for_versions[a] <= seq) delete waiting_for_versions[a]
359
+ }
360
+ waiting_for_versions = Object.fromEntries(Object.entries(waiting_for_versions).map(x => [`${x[0]}-${x[1]}`, true]))
361
+
362
+ if (done) {
363
+ if (!Object.keys(waiting_for_versions).length) {
364
+ console.log('got everything we were waiting for..')
365
+ done()
366
+ done = null
367
+ }
368
+ }
369
+
370
+ return cur.version
371
+ }
372
+ },
373
+ peer
374
+ }, (res) => {
375
+ self.file_read_only = res.headers.get('editable') === 'false'
376
+ signal_file_needs_writing()
377
+ }).then(x => {
378
+ x.subscribe(async update => {
379
+ // console.log(`update: ${JSON.stringify(update, null, 4)}`)
380
+ if (update.version.length == 0) return;
381
+ if (update.version.length != 1) throw 'unexpected';
382
+
383
+ await braid_text.put(url, { ...update, peer, merge_type: 'dt' })
384
+
385
+ if (done) {
386
+ delete waiting_for_versions[update.version[0]]
387
+ if (!Object.keys(waiting_for_versions).length) {
388
+ console.log('got everything we were waiting for..')
389
+ done()
390
+ done = null
391
+ }
392
+ }
393
+
394
+ signal_file_needs_writing()
395
+ })
396
+ })
358
397
  })
359
- parents = head_res.headers.get('version') ?
360
- JSON.parse(`[${head_res.headers.get('version')}]`) :
361
- null
362
- self.file_read_only = head_res.headers.get('editable') === 'false'
363
- signal_file_needs_writing()
364
- } catch (e) {
365
- console.log('HEAD failed: ', e)
366
398
  }
367
- }
368
399
 
369
- // now get everything since then, and send it back..
370
- braid_text.get(url, {
371
- parents,
372
- merge_type: 'dt',
373
- peer,
374
- subscribe: async ({ version, parents, body, patches }) => {
375
- if (version.length == 0) return;
400
+ // now get everything since then, and send it back..
401
+ braid_text.get(url, {
402
+ parents,
403
+ merge_type: 'dt',
404
+ peer,
405
+ subscribe: async ({ version, parents, body, patches }) => {
406
+ if (version.length == 0) return;
376
407
 
377
- // console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
408
+ // console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
378
409
 
379
- signal_file_needs_writing()
410
+ signal_file_needs_writing()
380
411
 
381
- send_out({ version, parents, body, patches, peer })
382
- },
383
- })
412
+ send_out({ version, parents, body, patches, peer })
413
+ },
414
+ })
384
415
 
385
- return self
416
+ return self
417
+ })
418
+ return await proxy_url.cache[url]
386
419
  }
387
420
 
388
421
  ////////////////////////////////
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.25",
3
+ "version": "0.0.26",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
9
  "braid-http": "^0.3.20",
10
- "braid-text": "^0.0.29",
10
+ "braid-text": "^0.0.30",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {