braidfs 0.0.23 → 0.0.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +61 -14
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -137,7 +137,15 @@ const server = http.createServer(async (req, res) => {
137
137
  // we don't want to let remote people access external links for now
138
138
  if (config.allow_remote_access && is_external_link) only_allow_local_host()
139
139
 
140
- proxy_url(url)
140
+ let p = await proxy_url(url)
141
+
142
+ res.setHeader('Editable', !p.file_read_only)
143
+ if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
144
+ if (p.file_read_only) {
145
+ res.statusCode = 403 // Forbidden status code
146
+ return res.end('access denied')
147
+ }
148
+ }
141
149
 
142
150
  // Now serve the collaborative text!
143
151
  braid_text.serve(req, res, { key: normalize_url(url) })
@@ -148,12 +156,6 @@ server.listen(config.port, () => {
148
156
  if (!config.allow_remote_access) console.log('!! only accessible from localhost !!');
149
157
  });
150
158
 
151
- ////////////////////////////////
152
-
153
- function normalize_url(url) {
154
- return url.replace(/(\/index|\/)+$/, '')
155
- }
156
-
157
159
  async function proxy_url(url) {
158
160
  let chain = proxy_url.chain || (proxy_url.chain = Promise.resolve())
159
161
 
@@ -190,8 +192,9 @@ async function proxy_url(url) {
190
192
  url = normalized_url
191
193
 
192
194
  if (!proxy_url.cache) proxy_url.cache = {}
193
- if (proxy_url.cache[url]) return
194
- proxy_url.cache[url] = true
195
+ if (proxy_url.cache[url]) return proxy_url.cache[url]
196
+ let self = {}
197
+ proxy_url.cache[url] = self
195
198
 
196
199
  console.log(`proxy_url: ${url}`)
197
200
 
@@ -220,6 +223,7 @@ async function proxy_url(url) {
220
223
  var char_counter = -1
221
224
  let file_last_version = null
222
225
  let file_last_text = null
226
+ self.file_read_only = null
223
227
  let file_needs_reading = true
224
228
  let file_needs_writing = null
225
229
  let file_loop_pump_lock = 0
@@ -258,7 +262,6 @@ async function proxy_url(url) {
258
262
  file_last_text = (await braid_text.get(url, { version: file_last_version })).body
259
263
  file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
260
264
  } catch (e) {
261
- file_last_version = []
262
265
  file_last_text = ''
263
266
  file_needs_writing = true
264
267
  }
@@ -268,6 +271,8 @@ async function proxy_url(url) {
268
271
  if (file_needs_reading) {
269
272
  file_needs_reading = false
270
273
 
274
+ if (self.file_read_only === null) try { self.file_read_only = await is_read_only(await get_fullpath()) } catch (e) { }
275
+
271
276
  let text = ''
272
277
  try { text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' }) } catch (e) { }
273
278
 
@@ -296,11 +301,15 @@ async function proxy_url(url) {
296
301
 
297
302
  console.log(`writing file ${await get_fullpath()}`)
298
303
 
304
+ try { if (await is_read_only(await get_fullpath())) await set_read_only(await get_fullpath(), false) } catch (e) { }
305
+
299
306
  file_last_version = version
300
307
  file_last_text = body
301
308
  await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
302
309
  await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
303
310
  }
311
+
312
+ if (await is_read_only(await get_fullpath()) !== self.file_read_only) await set_read_only(await get_fullpath(), self.file_read_only)
304
313
  }
305
314
  }
306
315
  file_loop_pump_lock--
@@ -309,7 +318,8 @@ async function proxy_url(url) {
309
318
  if (is_external_link) braid_fetch_wrapper(url, {
310
319
  headers: {
311
320
  "Merge-Type": "dt",
312
- Accept: 'text/plain'
321
+ Accept: 'text/plain',
322
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
313
323
  },
314
324
  subscribe: true,
315
325
  retry: true,
@@ -318,6 +328,9 @@ async function proxy_url(url) {
318
328
  if (cur.version.length) return cur.version
319
329
  },
320
330
  peer
331
+ }, (res) => {
332
+ self.file_read_only = res.headers.get('editable') === 'false'
333
+ signal_file_needs_writing()
321
334
  }).then(x => {
322
335
  x.subscribe(async update => {
323
336
  // console.log(`update: ${JSON.stringify(update, null, 4)}`)
@@ -337,12 +350,17 @@ async function proxy_url(url) {
337
350
  try {
338
351
  let head_res = await braid_fetch_wrapper(url, {
339
352
  method: 'HEAD',
340
- headers: { Accept: 'text/plain' },
353
+ headers: {
354
+ Accept: 'text/plain',
355
+ ...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
356
+ },
341
357
  retry: true,
342
358
  })
343
359
  parents = head_res.headers.get('version') ?
344
360
  JSON.parse(`[${head_res.headers.get('version')}]`) :
345
361
  null
362
+ self.file_read_only = head_res.headers.get('editable') === 'false'
363
+ signal_file_needs_writing()
346
364
  } catch (e) {
347
365
  console.log('HEAD failed: ', e)
348
366
  }
@@ -363,6 +381,14 @@ async function proxy_url(url) {
363
381
  send_out({ version, parents, body, patches, peer })
364
382
  },
365
383
  })
384
+
385
+ return self
386
+ }
387
+
388
+ ////////////////////////////////
389
+
390
+ function normalize_url(url) {
391
+ return url.replace(/(\/index|\/)+$/, '')
366
392
  }
367
393
 
368
394
  async function is_dir(p) {
@@ -446,7 +472,7 @@ function count_code_points(str) {
446
472
  return code_points
447
473
  }
448
474
 
449
- async function braid_fetch_wrapper(url, params) {
475
+ async function braid_fetch_wrapper(url, params, connection_cb) {
450
476
  if (!params.retry) throw "wtf"
451
477
  var waitTime = 10
452
478
  if (params.subscribe) {
@@ -456,6 +482,7 @@ async function braid_fetch_wrapper(url, params) {
456
482
  if (params.signal?.aborted) return
457
483
  try {
458
484
  var c = await braid_fetch(url, { ...params, parents: await params.parents?.() })
485
+ connection_cb(c)
459
486
  c.subscribe((...args) => subscribe_handler?.(...args), on_error)
460
487
  waitTime = 10
461
488
  } catch (e) {
@@ -486,5 +513,25 @@ async function braid_fetch_wrapper(url, params) {
486
513
  }
487
514
 
488
515
  function v_eq(v1, v2) {
489
- return v1.length == v2.length && v1.every((x, i) => x == v2[i])
516
+ return v1.length === v2?.length && v1.every((x, i) => x == v2[i])
517
+ }
518
+
519
+ async function is_read_only(fullpath) {
520
+ const stats = await require('fs').promises.stat(fullpath)
521
+ return require('os').platform() === "win32" ?
522
+ !!(stats.mode & 0x1) :
523
+ !(stats.mode & 0o200)
524
+ }
525
+
526
+ async function set_read_only(fullpath, read_only) {
527
+ if (require('os').platform() === "win32") {
528
+ await new Promise((resolve, reject) => {
529
+ require("child_process").exec(`fsutil file setattr readonly "${fullpath}" ${!!read_only}`, (error) => error ? reject(error) : resolve())
530
+ })
531
+ } else {
532
+ let mode = (await require('fs').promises.stat(fullpath)).mode
533
+ if (read_only) mode &= ~0o222
534
+ else mode |= 0o200
535
+ await require('fs').promises.chmod(fullpath, mode)
536
+ }
490
537
  }
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "braidfs",
3
- "version": "0.0.23",
3
+ "version": "0.0.24",
4
4
  "description": "braid technology synchronizing files and webpages",
5
5
  "author": "Braid Working Group",
6
6
  "repository": "braid-org/braidfs",
7
7
  "homepage": "https://braid.org",
8
8
  "dependencies": {
9
9
  "braid-http": "^0.3.20",
10
- "braid-text": "^0.0.26",
10
+ "braid-text": "^0.0.27",
11
11
  "chokidar": "^3.6.0"
12
12
  },
13
13
  "bin": {