braidfs 0.0.84 → 0.0.86
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +59 -58
- package/package.json +2 -2
package/index.js
CHANGED
|
@@ -4,10 +4,10 @@ var { diff_main } = require(`${__dirname}/diff.js`),
|
|
|
4
4
|
braid_text = require("braid-text"),
|
|
5
5
|
braid_fetch = require('braid-http').fetch
|
|
6
6
|
|
|
7
|
-
var
|
|
8
|
-
braidfs_config_dir = `${
|
|
7
|
+
var sync_base = `${require('os').homedir()}/http`,
|
|
8
|
+
braidfs_config_dir = `${sync_base}/.braidfs`,
|
|
9
9
|
braidfs_config_file = `${braidfs_config_dir}/config`,
|
|
10
|
-
|
|
10
|
+
sync_base_meta = `${braidfs_config_dir}/proxy_base_meta`
|
|
11
11
|
braid_text.db_folder = `${braidfs_config_dir}/braid-text-db`
|
|
12
12
|
var trash = `${braidfs_config_dir}/trash`
|
|
13
13
|
var temp_folder = `${braidfs_config_dir}/temp`
|
|
@@ -15,7 +15,7 @@ var temp_folder = `${braidfs_config_dir}/temp`
|
|
|
15
15
|
var config = null,
|
|
16
16
|
watcher_misses = 0
|
|
17
17
|
|
|
18
|
-
if (require('fs').existsSync(
|
|
18
|
+
if (require('fs').existsSync(sync_base)) {
|
|
19
19
|
try {
|
|
20
20
|
config = require('fs').readFileSync(braidfs_config_file, 'utf8')
|
|
21
21
|
} catch (e) { return console.log(`could not find config file: ${braidfs_config_file}`) }
|
|
@@ -44,7 +44,7 @@ if (require('fs').existsSync(proxy_base)) {
|
|
|
44
44
|
require('fs').writeFileSync(braidfs_config_file, JSON.stringify(config, null, 4))
|
|
45
45
|
}
|
|
46
46
|
|
|
47
|
-
require('fs').mkdirSync(
|
|
47
|
+
require('fs').mkdirSync(sync_base_meta, { recursive: true })
|
|
48
48
|
require('fs').mkdirSync(trash, { recursive: true })
|
|
49
49
|
require('fs').mkdirSync(temp_folder, { recursive: true })
|
|
50
50
|
|
|
@@ -67,7 +67,7 @@ if (argv.length === 1 && argv[0].match(/^(run|serve)$/)) {
|
|
|
67
67
|
var sync = argv[i] === 'sync',
|
|
68
68
|
url = argv[i + 1]
|
|
69
69
|
if (!url.match(/^https?:\/\//)) {
|
|
70
|
-
if (url.startsWith('/')) url = require('path').relative(
|
|
70
|
+
if (url.startsWith('/')) url = require('path').relative(sync_base, url)
|
|
71
71
|
url = `https://${url}`
|
|
72
72
|
}
|
|
73
73
|
console.log(`${sync ? '' : 'un'}subscribing ${sync ? 'to' : 'from'} ${url}`)
|
|
@@ -127,9 +127,9 @@ async function main() {
|
|
|
127
127
|
var fullpath = decodeURIComponent(m[1])
|
|
128
128
|
var hash = decodeURIComponent(m[2])
|
|
129
129
|
|
|
130
|
-
var path = require('path').relative(
|
|
131
|
-
var
|
|
132
|
-
var version =
|
|
130
|
+
var path = require('path').relative(sync_base, fullpath)
|
|
131
|
+
var sync = await sync_url.cache[normalize_url(path)]
|
|
132
|
+
var version = sync?.hash_to_version_cache.get(hash)?.version
|
|
133
133
|
if (!version) res.statusCode = 404
|
|
134
134
|
return res.end(JSON.stringify(version))
|
|
135
135
|
}
|
|
@@ -137,11 +137,11 @@ async function main() {
|
|
|
137
137
|
var m = url.match(/^\.braidfs\/set_version\/([^\/]*)\/([^\/]*)/)
|
|
138
138
|
if (m) {
|
|
139
139
|
var fullpath = decodeURIComponent(m[1])
|
|
140
|
-
var path = require('path').relative(
|
|
141
|
-
var
|
|
140
|
+
var path = require('path').relative(sync_base, fullpath)
|
|
141
|
+
var sync = await sync_url.cache[normalize_url(path)]
|
|
142
142
|
|
|
143
143
|
var parents = JSON.parse(decodeURIComponent(m[2]))
|
|
144
|
-
var parent_text =
|
|
144
|
+
var parent_text = sync?.version_to_text_cache.get(JSON.stringify(parents)) ?? (await braid_text.get(sync.url, { parents })).body
|
|
145
145
|
|
|
146
146
|
var text = await new Promise(done => {
|
|
147
147
|
const chunks = []
|
|
@@ -152,14 +152,14 @@ async function main() {
|
|
|
152
152
|
var patches = diff(parent_text, text)
|
|
153
153
|
|
|
154
154
|
if (patches.length) {
|
|
155
|
-
|
|
156
|
-
var version = [
|
|
157
|
-
await braid_text.put(
|
|
155
|
+
sync.local_edit_counter += patches_to_code_points(patches, parent_text)
|
|
156
|
+
var version = [sync.peer + "-" + (sync.local_edit_counter - 1)]
|
|
157
|
+
await braid_text.put(sync.url, { version, parents, patches, merge_type: 'dt' })
|
|
158
158
|
|
|
159
159
|
// may be able to do this more efficiently.. we want to make sure we're capturing a file write that is after our version was written.. there may be a way we can avoid calling file_needs_writing here
|
|
160
160
|
var stat = await new Promise(done => {
|
|
161
|
-
|
|
162
|
-
|
|
161
|
+
sync.file_written_cbs.push(done)
|
|
162
|
+
sync.signal_file_needs_writing()
|
|
163
163
|
})
|
|
164
164
|
|
|
165
165
|
res.writeHead(200, { 'Content-Type': 'application/json' })
|
|
@@ -182,7 +182,7 @@ async function main() {
|
|
|
182
182
|
console.log(`daemon started on port ${config.port}`)
|
|
183
183
|
if (!config.allow_remote_access) console.log('!! only accessible from localhost !!')
|
|
184
184
|
|
|
185
|
-
|
|
185
|
+
sync_url('.braidfs/config').then(() => {
|
|
186
186
|
braid_text.get('.braidfs/config', {
|
|
187
187
|
subscribe: async update => {
|
|
188
188
|
let prev = config
|
|
@@ -195,10 +195,10 @@ async function main() {
|
|
|
195
195
|
var old_syncs = Object.entries(prev.sync).filter(x => x[1]).map(x => normalize_url(x[0]).replace(/^https?:\/\//, ''))
|
|
196
196
|
var new_syncs = new Set(Object.entries(config.sync).filter(x => x[1]).map(x => normalize_url(x[0]).replace(/^https?:\/\//, '')))
|
|
197
197
|
for (let url of old_syncs.filter(x => !new_syncs.has(x)))
|
|
198
|
-
|
|
198
|
+
unsync_url(url)
|
|
199
199
|
|
|
200
|
-
//
|
|
201
|
-
for (let x of Object.entries(config.sync)) if (x[1])
|
|
200
|
+
// sync all the new stuff
|
|
201
|
+
for (let x of Object.entries(config.sync)) if (x[1]) sync_url(x[0])
|
|
202
202
|
|
|
203
203
|
// if any auth stuff has changed,
|
|
204
204
|
// have the appropriate connections reconnect
|
|
@@ -212,7 +212,7 @@ async function main() {
|
|
|
212
212
|
|| JSON.stringify(prev.cookies[domain]) !== JSON.stringify(v))
|
|
213
213
|
changed.add(domain)
|
|
214
214
|
// ok, have every domain which has changed reconnect
|
|
215
|
-
for (let [path, x] of Object.entries(
|
|
215
|
+
for (let [path, x] of Object.entries(sync_url.cache))
|
|
216
216
|
if (changed.has(path.split(/\//)[0].split(/:/)[0]))
|
|
217
217
|
(await x).reconnect?.()
|
|
218
218
|
} catch (e) {
|
|
@@ -222,10 +222,10 @@ async function main() {
|
|
|
222
222
|
}
|
|
223
223
|
})
|
|
224
224
|
})
|
|
225
|
-
|
|
225
|
+
sync_url('.braidfs/errors')
|
|
226
226
|
|
|
227
227
|
console.log({ sync: config.sync })
|
|
228
|
-
for (let x of Object.entries(config.sync)) if (x[1])
|
|
228
|
+
for (let x of Object.entries(config.sync)) if (x[1]) sync_url(x[0])
|
|
229
229
|
|
|
230
230
|
watch_files()
|
|
231
231
|
setTimeout(scan_files, 1200)
|
|
@@ -286,28 +286,28 @@ async function watch_files() {
|
|
|
286
286
|
await w?.close()
|
|
287
287
|
|
|
288
288
|
console.log('watch files..')
|
|
289
|
-
watch_files.watcher = require('chokidar').watch(
|
|
289
|
+
watch_files.watcher = require('chokidar').watch(sync_base).
|
|
290
290
|
on('add', x => chokidar_handler(x, 'add')).
|
|
291
291
|
on('change', x => chokidar_handler(x, 'change')).
|
|
292
292
|
on('unlink', x => chokidar_handler(x, 'unlink'))
|
|
293
293
|
|
|
294
294
|
async function chokidar_handler(fullpath, event) {
|
|
295
|
-
// Make sure the path is within
|
|
296
|
-
if (!fullpath.startsWith(
|
|
297
|
-
return on_watcher_miss(`path ${fullpath} outside ${
|
|
295
|
+
// Make sure the path is within sync_base..
|
|
296
|
+
if (!fullpath.startsWith(sync_base))
|
|
297
|
+
return on_watcher_miss(`path ${fullpath} outside ${sync_base}`)
|
|
298
298
|
|
|
299
299
|
// Make sure the path is to a file, and not a directory
|
|
300
300
|
if (event != 'unlink' && (await require('fs').promises.stat(fullpath)).isDirectory())
|
|
301
301
|
return on_watcher_miss(`expected file, got: ${fullpath}`)
|
|
302
302
|
|
|
303
|
-
var path = require('path').relative(
|
|
303
|
+
var path = require('path').relative(sync_base, fullpath)
|
|
304
304
|
if (skip_file(path)) return
|
|
305
305
|
console.log(`file event: ${path}, event: ${event}`)
|
|
306
306
|
|
|
307
|
-
var
|
|
307
|
+
var sync = await sync_url.cache[normalize_url(path)]
|
|
308
308
|
|
|
309
|
-
if (
|
|
310
|
-
if (!
|
|
309
|
+
if (sync && event != 'add') sync.signal_file_needs_reading()
|
|
310
|
+
if (!sync && event != 'unlink') await trash_file(fullpath, path)
|
|
311
311
|
}
|
|
312
312
|
}
|
|
313
313
|
|
|
@@ -320,7 +320,7 @@ async function scan_files() {
|
|
|
320
320
|
while (scan_files.do_again) {
|
|
321
321
|
scan_files.do_again = false
|
|
322
322
|
console.log(`scan files..`)
|
|
323
|
-
if (await f(
|
|
323
|
+
if (await f(sync_base))
|
|
324
324
|
on_watcher_miss(`scanner picked up a change that the watcher should have gotten`, false)
|
|
325
325
|
}
|
|
326
326
|
scan_files.running = false
|
|
@@ -328,7 +328,7 @@ async function scan_files() {
|
|
|
328
328
|
scan_files.timeout = setTimeout(scan_files, config.scan_interval_ms ?? (20 * 1000))
|
|
329
329
|
|
|
330
330
|
async function f(fullpath) {
|
|
331
|
-
path = require('path').relative(
|
|
331
|
+
path = require('path').relative(sync_base, fullpath)
|
|
332
332
|
if (skip_file(path)) return
|
|
333
333
|
|
|
334
334
|
let stat = await require('fs').promises.stat(fullpath, { bigint: true })
|
|
@@ -338,30 +338,30 @@ async function scan_files() {
|
|
|
338
338
|
found ||= await f(`${fullpath}/${file}`)
|
|
339
339
|
return found
|
|
340
340
|
} else {
|
|
341
|
-
var
|
|
342
|
-
if (!
|
|
341
|
+
var sync = await sync_url.cache[normalize_url(path)]
|
|
342
|
+
if (!sync) return await trash_file(fullpath, path)
|
|
343
343
|
|
|
344
344
|
stat = await require('fs').promises.stat(fullpath, { bigint: true })
|
|
345
|
-
if (!stat_eq(stat,
|
|
345
|
+
if (!stat_eq(stat, sync.file_last_stat)) {
|
|
346
346
|
console.log(`scan thinks ${path} has changed`)
|
|
347
|
-
|
|
347
|
+
sync.signal_file_needs_reading()
|
|
348
348
|
return true
|
|
349
349
|
}
|
|
350
350
|
}
|
|
351
351
|
}
|
|
352
352
|
}
|
|
353
353
|
|
|
354
|
-
function
|
|
354
|
+
function unsync_url(url) {
|
|
355
355
|
url = normalize_url(url).replace(/^https?:\/\//, '')
|
|
356
|
-
if (!
|
|
356
|
+
if (!sync_url.cache?.[url]) return
|
|
357
357
|
|
|
358
|
-
console.log(`
|
|
358
|
+
console.log(`unsync_url: ${url}`)
|
|
359
359
|
|
|
360
|
-
delete
|
|
361
|
-
|
|
360
|
+
delete sync_url.cache[url]
|
|
361
|
+
unsync_url.cache[url] = unsync_url.cache[url]()
|
|
362
362
|
}
|
|
363
363
|
|
|
364
|
-
async function
|
|
364
|
+
async function sync_url(url) {
|
|
365
365
|
// normalize url by removing any trailing /index/index/
|
|
366
366
|
var normalized_url = normalize_url(url),
|
|
367
367
|
wasnt_normal = normalized_url != url
|
|
@@ -369,12 +369,12 @@ async function proxy_url(url) {
|
|
|
369
369
|
|
|
370
370
|
var is_external_link = url.match(/^https?:\/\//),
|
|
371
371
|
path = is_external_link ? url.replace(/^https?:\/\//, '') : url,
|
|
372
|
-
fullpath = `${
|
|
373
|
-
meta_path = `${
|
|
372
|
+
fullpath = `${sync_base}/${path}`,
|
|
373
|
+
meta_path = `${sync_base_meta}/${braid_text.encode_filename(url)}`
|
|
374
374
|
|
|
375
|
-
if (!
|
|
376
|
-
if (!
|
|
377
|
-
if (!
|
|
375
|
+
if (!sync_url.cache) sync_url.cache = {}
|
|
376
|
+
if (!sync_url.chain) sync_url.chain = Promise.resolve()
|
|
377
|
+
if (!sync_url.cache[path]) sync_url.cache[path] = sync_url.chain = sync_url.chain.then(async () => {
|
|
378
378
|
var freed = false,
|
|
379
379
|
aborts = new Set(),
|
|
380
380
|
braid_text_get_options = null,
|
|
@@ -389,9 +389,9 @@ async function proxy_url(url) {
|
|
|
389
389
|
wait_count--
|
|
390
390
|
if (!wait_count) wait_promise_done()
|
|
391
391
|
}
|
|
392
|
-
if (!
|
|
393
|
-
var
|
|
394
|
-
|
|
392
|
+
if (!unsync_url.cache) unsync_url.cache = {}
|
|
393
|
+
var old_unsync = unsync_url.cache[path]
|
|
394
|
+
unsync_url.cache[path] = async () => {
|
|
395
395
|
freed = true
|
|
396
396
|
for (let a of aborts) a.abort()
|
|
397
397
|
await wait_promise
|
|
@@ -406,11 +406,11 @@ async function proxy_url(url) {
|
|
|
406
406
|
try { await require('fs').promises.unlink(meta_path) } catch (e) {}
|
|
407
407
|
try { await require('fs').promises.unlink(await get_fullpath()) } catch (e) {}
|
|
408
408
|
}
|
|
409
|
-
await
|
|
409
|
+
await old_unsync
|
|
410
410
|
|
|
411
411
|
var self = {url}
|
|
412
412
|
|
|
413
|
-
console.log(`
|
|
413
|
+
console.log(`sync_url: ${url}`)
|
|
414
414
|
|
|
415
415
|
if (!start_something()) return
|
|
416
416
|
|
|
@@ -523,8 +523,8 @@ async function proxy_url(url) {
|
|
|
523
523
|
if (self.peer === braid_text.decode_version(stuff.version[0])[0]) {
|
|
524
524
|
// then revert it
|
|
525
525
|
console.log(`access denied: reverting local edits`)
|
|
526
|
-
|
|
527
|
-
|
|
526
|
+
unsync_url(url)
|
|
527
|
+
sync_url(url)
|
|
528
528
|
}
|
|
529
529
|
}
|
|
530
530
|
}
|
|
@@ -711,7 +711,8 @@ async function proxy_url(url) {
|
|
|
711
711
|
// then we can just remove those
|
|
712
712
|
// and add event
|
|
713
713
|
var fork_set = new Set(self.fork_point)
|
|
714
|
-
if (parents.
|
|
714
|
+
if (parents.length &&
|
|
715
|
+
parents.every(p => fork_set.has(p))) {
|
|
715
716
|
parents.forEach(p => fork_set.delete(p))
|
|
716
717
|
fork_set.add(event)
|
|
717
718
|
self.fork_point = [...fork_set.values()]
|
|
@@ -880,7 +881,7 @@ async function proxy_url(url) {
|
|
|
880
881
|
|
|
881
882
|
return self
|
|
882
883
|
})
|
|
883
|
-
return await
|
|
884
|
+
return await sync_url.cache[url]
|
|
884
885
|
}
|
|
885
886
|
|
|
886
887
|
async function ensure_path(path) {
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braidfs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.86",
|
|
4
4
|
"description": "braid technology synchronizing files and webpages",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braidfs",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"braid-http": "^1.3.73",
|
|
10
|
-
"braid-text": "^0.2.
|
|
10
|
+
"braid-text": "^0.2.25",
|
|
11
11
|
"chokidar": "^3.6.0"
|
|
12
12
|
},
|
|
13
13
|
"bin": {
|