braidfs 0.0.13 → 0.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +176 -195
- package/package.json +2 -2
package/index.js
CHANGED
|
@@ -1,15 +1,18 @@
|
|
|
1
1
|
|
|
2
2
|
let http = require('http');
|
|
3
|
-
|
|
4
3
|
let { diff_main } = require('./diff.js')
|
|
5
4
|
let braid_text = require("braid-text");
|
|
6
5
|
let braid_fetch = require('braid-http').fetch
|
|
7
6
|
|
|
7
|
+
process.on("unhandledRejection", (x) => console.log(`unhandledRejection: ${x.stack}`))
|
|
8
|
+
process.on("uncaughtException", (x) => console.log(`uncaughtException: ${x.stack}`))
|
|
9
|
+
|
|
8
10
|
let port = 10000
|
|
9
11
|
let cookie = null
|
|
10
12
|
let pin_urls = []
|
|
11
13
|
let pindex_urls = []
|
|
12
14
|
let proxy_base = `./proxy_base`
|
|
15
|
+
let proxy_base_support = `./proxy_base_support`
|
|
13
16
|
|
|
14
17
|
let argv = process.argv.slice(2)
|
|
15
18
|
while (argv.length) {
|
|
@@ -30,8 +33,19 @@ while (argv.length) {
|
|
|
30
33
|
}
|
|
31
34
|
console.log({ pin_urls, pindex_urls })
|
|
32
35
|
|
|
33
|
-
|
|
34
|
-
|
|
36
|
+
for (let url of pin_urls) proxy_url(url)
|
|
37
|
+
pindex_urls.forEach(async url => {
|
|
38
|
+
let prefix = new URL(url).origin
|
|
39
|
+
while (true) {
|
|
40
|
+
let urls = await (await fetch(url)).json()
|
|
41
|
+
for (let url of urls) proxy_url(prefix + url)
|
|
42
|
+
await new Promise(done => setTimeout(done, 1000 * 60 * 60))
|
|
43
|
+
}
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
braid_text.list().then(x => {
|
|
47
|
+
for (let xx of x) proxy_url(xx)
|
|
48
|
+
})
|
|
35
49
|
|
|
36
50
|
const server = http.createServer(async (req, res) => {
|
|
37
51
|
console.log(`${req.method} ${req.url}`);
|
|
@@ -79,20 +93,6 @@ server.listen(port, () => {
|
|
|
79
93
|
console.log('This proxy is only accessible from localhost');
|
|
80
94
|
});
|
|
81
95
|
|
|
82
|
-
for (let url of pin_urls) proxy_url(url)
|
|
83
|
-
pindex_urls.forEach(async url => {
|
|
84
|
-
let prefix = new URL(url).origin
|
|
85
|
-
while (true) {
|
|
86
|
-
let urls = await (await fetch(url)).json()
|
|
87
|
-
for (let url of urls) proxy_url(prefix + url)
|
|
88
|
-
await new Promise(done => setTimeout(done, 1000 * 60 * 60))
|
|
89
|
-
}
|
|
90
|
-
})
|
|
91
|
-
|
|
92
|
-
braid_text.list().then(x => {
|
|
93
|
-
for (let xx of x) proxy_url(xx)
|
|
94
|
-
})
|
|
95
|
-
|
|
96
96
|
////////////////////////////////
|
|
97
97
|
|
|
98
98
|
async function proxy_url(url) {
|
|
@@ -134,18 +134,114 @@ async function proxy_url(url) {
|
|
|
134
134
|
if (proxy_url.cache[url]) return
|
|
135
135
|
proxy_url.cache[url] = true
|
|
136
136
|
|
|
137
|
+
console.log(`proxy_url: ${url}`)
|
|
138
|
+
|
|
137
139
|
let path = url.replace(/^https?:\/\//, '')
|
|
138
140
|
let fullpath = require("path").join(proxy_base, path)
|
|
139
141
|
|
|
140
142
|
// if we're accessing /blah/index.html, it will be normalized to /blah,
|
|
141
143
|
// but we still want to create a directory out of blah in this case
|
|
142
|
-
if (wasnt_normal && !(await is_dir(fullpath))) ensure_path(fullpath)
|
|
144
|
+
if (wasnt_normal && !(await is_dir(fullpath))) await ensure_path(fullpath)
|
|
143
145
|
|
|
144
|
-
|
|
146
|
+
await ensure_path(require("path").dirname(fullpath))
|
|
145
147
|
|
|
146
|
-
|
|
148
|
+
await require("fs").promises.mkdir(proxy_base_support, { recursive: true })
|
|
149
|
+
|
|
150
|
+
async function get_fullpath() {
|
|
151
|
+
let p = fullpath
|
|
152
|
+
while (await is_dir(p)) p = require("path").join(p, 'index.html')
|
|
153
|
+
return p
|
|
154
|
+
}
|
|
147
155
|
|
|
148
156
|
let peer = Math.random().toString(36).slice(2)
|
|
157
|
+
var char_counter = -1
|
|
158
|
+
let file_last_version = null
|
|
159
|
+
let file_last_text = null
|
|
160
|
+
let file_needs_reading = true
|
|
161
|
+
let file_needs_writing = null
|
|
162
|
+
let file_loop_pump_lock = 0
|
|
163
|
+
|
|
164
|
+
function signal_file_needs_reading() {
|
|
165
|
+
file_needs_reading = true
|
|
166
|
+
file_loop_pump()
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
function signal_file_needs_writing() {
|
|
170
|
+
file_needs_writing = true
|
|
171
|
+
file_loop_pump()
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
async function send_out(stuff) {
|
|
175
|
+
await braid_fetch_wrapper(url, {
|
|
176
|
+
headers: {
|
|
177
|
+
"Merge-Type": "dt",
|
|
178
|
+
"Content-Type": 'text/plain',
|
|
179
|
+
...(cookie ? { "Cookie": cookie } : {}),
|
|
180
|
+
},
|
|
181
|
+
method: "PUT",
|
|
182
|
+
retry: true,
|
|
183
|
+
...stuff
|
|
184
|
+
})
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
file_loop_pump()
|
|
188
|
+
async function file_loop_pump() {
|
|
189
|
+
if (file_loop_pump_lock) return
|
|
190
|
+
file_loop_pump_lock++
|
|
191
|
+
|
|
192
|
+
if (file_last_version === null) {
|
|
193
|
+
try {
|
|
194
|
+
file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(proxy_base_support, braid_text.encode_filename(url)), { encoding: 'utf8' }))
|
|
195
|
+
file_last_text = (await braid_text.get(url, { version: file_last_version })).body
|
|
196
|
+
file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
|
|
197
|
+
} catch (e) {
|
|
198
|
+
file_last_version = []
|
|
199
|
+
file_last_text = ''
|
|
200
|
+
file_needs_writing = true
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
while (file_needs_reading || file_needs_writing) {
|
|
205
|
+
if (file_needs_reading) {
|
|
206
|
+
file_needs_reading = false
|
|
207
|
+
|
|
208
|
+
let text = ''
|
|
209
|
+
try { text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' }) } catch (e) { }
|
|
210
|
+
|
|
211
|
+
var patches = diff(file_last_text, text)
|
|
212
|
+
if (patches.length) {
|
|
213
|
+
// convert from js-indicies to code-points
|
|
214
|
+
char_counter += patches_to_code_points(patches, file_last_text)
|
|
215
|
+
|
|
216
|
+
file_last_text = text
|
|
217
|
+
|
|
218
|
+
var version = [peer + "-" + char_counter]
|
|
219
|
+
var parents = file_last_version
|
|
220
|
+
file_last_version = version
|
|
221
|
+
|
|
222
|
+
send_out({ version, parents, patches, peer })
|
|
223
|
+
|
|
224
|
+
await braid_text.put(url, { version, parents, patches, peer })
|
|
225
|
+
|
|
226
|
+
await require('fs').promises.writeFile(require('path').join(proxy_base_support, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
if (file_needs_writing) {
|
|
230
|
+
file_needs_writing = false
|
|
231
|
+
|
|
232
|
+
console.log(`writing file ${await get_fullpath()}`)
|
|
233
|
+
|
|
234
|
+
let { version, body } = await braid_text.get(url, {})
|
|
235
|
+
if (!v_eq(version, file_last_version)) {
|
|
236
|
+
file_last_version = version
|
|
237
|
+
file_last_text = body
|
|
238
|
+
await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
|
|
239
|
+
await require('fs').promises.writeFile(require('path').join(proxy_base_support, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
file_loop_pump_lock--
|
|
244
|
+
}
|
|
149
245
|
|
|
150
246
|
braid_fetch_wrapper(url, {
|
|
151
247
|
headers: {
|
|
@@ -160,81 +256,18 @@ async function proxy_url(url) {
|
|
|
160
256
|
},
|
|
161
257
|
peer
|
|
162
258
|
}).then(x => {
|
|
163
|
-
x.subscribe(update => {
|
|
259
|
+
x.subscribe(async update => {
|
|
164
260
|
// console.log(`update: ${JSON.stringify(update, null, 4)}`)
|
|
165
261
|
if (update.version.length == 0) return;
|
|
166
262
|
|
|
167
|
-
braid_text.put(url, { ...update, peer })
|
|
168
|
-
})
|
|
169
|
-
})
|
|
170
|
-
|
|
171
|
-
// try a HEAD without subscribe to get the version
|
|
172
|
-
braid_fetch_wrapper(url, {
|
|
173
|
-
method: 'HEAD',
|
|
174
|
-
headers: { Accept: 'text/plain' },
|
|
175
|
-
retry: true,
|
|
176
|
-
}).then(async head_res => {
|
|
177
|
-
let parents = head_res.headers.get('version') ?
|
|
178
|
-
JSON.parse(`[${head_res.headers.get('version')}]`) :
|
|
179
|
-
null
|
|
263
|
+
await braid_text.put(url, { ...update, peer })
|
|
180
264
|
|
|
181
|
-
|
|
182
|
-
braid_text.get(url, {
|
|
183
|
-
parents,
|
|
184
|
-
merge_type: 'dt',
|
|
185
|
-
peer,
|
|
186
|
-
subscribe: async ({ version, parents, body, patches }) => {
|
|
187
|
-
if (version.length == 0) return;
|
|
188
|
-
|
|
189
|
-
// console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
|
|
190
|
-
// console.log(`cookie = ${cookie}`)
|
|
191
|
-
|
|
192
|
-
await braid_fetch_wrapper(url, {
|
|
193
|
-
headers: {
|
|
194
|
-
"Merge-Type": "dt",
|
|
195
|
-
"Content-Type": 'text/plain',
|
|
196
|
-
...(cookie ? { "Cookie": cookie } : {}),
|
|
197
|
-
},
|
|
198
|
-
method: "PUT",
|
|
199
|
-
retry: true,
|
|
200
|
-
version, parents, body, patches,
|
|
201
|
-
peer
|
|
202
|
-
})
|
|
203
|
-
},
|
|
265
|
+
signal_file_needs_writing()
|
|
204
266
|
})
|
|
205
267
|
})
|
|
206
268
|
|
|
207
|
-
await ensure_path(require("path").dirname(fullpath))
|
|
208
|
-
|
|
209
|
-
async function get_fullpath() {
|
|
210
|
-
let p = fullpath
|
|
211
|
-
while (await is_dir(p)) p = require("path").join(p, 'index.html')
|
|
212
|
-
return p
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
let simpleton = simpleton_client(url, {
|
|
216
|
-
apply_remote_update: async ({ state, patches }) => {
|
|
217
|
-
return await (chain = chain.then(async () => {
|
|
218
|
-
console.log(`writing file ${await get_fullpath()}`)
|
|
219
|
-
|
|
220
|
-
if (state !== undefined) last_text = state
|
|
221
|
-
else last_text = apply_patches(last_text, patches)
|
|
222
|
-
await require('fs').promises.writeFile(await get_fullpath(), last_text)
|
|
223
|
-
return last_text
|
|
224
|
-
}))
|
|
225
|
-
},
|
|
226
|
-
generate_local_diff_update: async (_) => {
|
|
227
|
-
return await (chain = chain.then(async () => {
|
|
228
|
-
let text = await require('fs').promises.readFile(await get_fullpath(), { encoding: 'utf8' })
|
|
229
|
-
var patches = diff(last_text, text)
|
|
230
|
-
last_text = text
|
|
231
|
-
return patches.length ? { patches, new_state: last_text } : null
|
|
232
|
-
}))
|
|
233
|
-
}
|
|
234
|
-
})
|
|
235
|
-
|
|
236
269
|
if (!proxy_url.path_to_func) proxy_url.path_to_func = {}
|
|
237
|
-
proxy_url.path_to_func[path] =
|
|
270
|
+
proxy_url.path_to_func[path] = signal_file_needs_reading
|
|
238
271
|
|
|
239
272
|
if (!proxy_url.chokidar) {
|
|
240
273
|
proxy_url.chokidar = true
|
|
@@ -243,11 +276,43 @@ async function proxy_url(url) {
|
|
|
243
276
|
console.log(`path changed: ${path}`)
|
|
244
277
|
|
|
245
278
|
path = path.replace(/(\/index\.html|\/)+$/, '')
|
|
246
|
-
console.log(`normalized path: ${path}`)
|
|
279
|
+
// console.log(`normalized path: ${path}`)
|
|
247
280
|
|
|
248
281
|
proxy_url.path_to_func[path]()
|
|
249
282
|
});
|
|
250
283
|
}
|
|
284
|
+
|
|
285
|
+
// try a HEAD without subscribe to get the version
|
|
286
|
+
let parents = null
|
|
287
|
+
try {
|
|
288
|
+
let head_res = await braid_fetch_wrapper(url, {
|
|
289
|
+
method: 'HEAD',
|
|
290
|
+
headers: { Accept: 'text/plain' },
|
|
291
|
+
retry: true,
|
|
292
|
+
})
|
|
293
|
+
parents = head_res.headers.get('version') ?
|
|
294
|
+
JSON.parse(`[${head_res.headers.get('version')}]`) :
|
|
295
|
+
null
|
|
296
|
+
} catch (e) {
|
|
297
|
+
console.log('HEAD failed: ', e)
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
// now get everything since then, and send it back..
|
|
301
|
+
braid_text.get(url, {
|
|
302
|
+
parents,
|
|
303
|
+
merge_type: 'dt',
|
|
304
|
+
peer,
|
|
305
|
+
subscribe: async ({ version, parents, body, patches }) => {
|
|
306
|
+
if (version.length == 0) return;
|
|
307
|
+
|
|
308
|
+
// console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
|
|
309
|
+
// console.log(`cookie = ${cookie}`)
|
|
310
|
+
|
|
311
|
+
signal_file_needs_writing()
|
|
312
|
+
|
|
313
|
+
send_out({ version, parents, body, patches, peer })
|
|
314
|
+
},
|
|
315
|
+
})
|
|
251
316
|
}
|
|
252
317
|
|
|
253
318
|
async function is_dir(p) {
|
|
@@ -291,118 +356,30 @@ function free_the_cors(req, res) {
|
|
|
291
356
|
}
|
|
292
357
|
}
|
|
293
358
|
|
|
294
|
-
function
|
|
295
|
-
let
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
p.range[1] += offset;
|
|
299
|
-
offset -= p.range[1] - p.range[0];
|
|
300
|
-
offset += p.content.length;
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
let result = originalString;
|
|
304
|
-
|
|
359
|
+
function patches_to_code_points(patches, prev_state) {
|
|
360
|
+
let char_counter = 0
|
|
361
|
+
let c = 0
|
|
362
|
+
let i = 0
|
|
305
363
|
for (let p of patches) {
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
p.content +
|
|
310
|
-
result.substring(range[1]);
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
return result;
|
|
314
|
-
}
|
|
315
|
-
|
|
316
|
-
function simpleton_client(url, { apply_remote_update, generate_local_diff_update, content_type }) {
|
|
317
|
-
var peer = Math.random().toString(36).slice(2)
|
|
318
|
-
var current_version = []
|
|
319
|
-
var prev_state = ""
|
|
320
|
-
var char_counter = -1
|
|
321
|
-
var chain = Promise.resolve()
|
|
322
|
-
var queued_changes = 0
|
|
323
|
-
|
|
324
|
-
braid_text.get(url, {
|
|
325
|
-
peer,
|
|
326
|
-
subscribe: (update) => {
|
|
327
|
-
chain = chain.then(async () => {
|
|
328
|
-
// Only accept the update if its parents == our current version
|
|
329
|
-
update.parents.sort()
|
|
330
|
-
if (current_version.length === update.parents.length
|
|
331
|
-
&& current_version.every((v, i) => v === update.parents[i])) {
|
|
332
|
-
current_version = update.version.sort()
|
|
333
|
-
update.state = update.body
|
|
334
|
-
|
|
335
|
-
if (update.patches) {
|
|
336
|
-
for (let p of update.patches) p.range = p.range.match(/\d+/g).map((x) => 1 * x)
|
|
337
|
-
update.patches.sort((a, b) => a.range[0] - b.range[0])
|
|
338
|
-
|
|
339
|
-
// convert from code-points to js-indicies
|
|
340
|
-
let c = 0
|
|
341
|
-
let i = 0
|
|
342
|
-
for (let p of update.patches) {
|
|
343
|
-
while (c < p.range[0]) {
|
|
344
|
-
i += get_char_size(prev_state, i)
|
|
345
|
-
c++
|
|
346
|
-
}
|
|
347
|
-
p.range[0] = i
|
|
348
|
-
|
|
349
|
-
while (c < p.range[1]) {
|
|
350
|
-
i += get_char_size(prev_state, i)
|
|
351
|
-
c++
|
|
352
|
-
}
|
|
353
|
-
p.range[1] = i
|
|
354
|
-
}
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
prev_state = await apply_remote_update(update)
|
|
358
|
-
}
|
|
359
|
-
})
|
|
364
|
+
while (i < p.range[0]) {
|
|
365
|
+
i += get_char_size(prev_state, i)
|
|
366
|
+
c++
|
|
360
367
|
}
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
return {
|
|
364
|
-
changed: () => {
|
|
365
|
-
if (queued_changes) return
|
|
366
|
-
queued_changes++
|
|
367
|
-
chain = chain.then(async () => {
|
|
368
|
-
queued_changes--
|
|
369
|
-
var update = await generate_local_diff_update(prev_state)
|
|
370
|
-
if (!update) return // Stop if there wasn't a change!
|
|
371
|
-
var { patches, new_state } = update
|
|
372
|
-
|
|
373
|
-
// convert from js-indicies to code-points
|
|
374
|
-
let c = 0
|
|
375
|
-
let i = 0
|
|
376
|
-
for (let p of patches) {
|
|
377
|
-
while (i < p.range[0]) {
|
|
378
|
-
i += get_char_size(prev_state, i)
|
|
379
|
-
c++
|
|
380
|
-
}
|
|
381
|
-
p.range[0] = c
|
|
382
|
-
|
|
383
|
-
while (i < p.range[1]) {
|
|
384
|
-
i += get_char_size(prev_state, i)
|
|
385
|
-
c++
|
|
386
|
-
}
|
|
387
|
-
p.range[1] = c
|
|
368
|
+
p.range[0] = c
|
|
388
369
|
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
var version = [peer + "-" + char_counter]
|
|
370
|
+
while (i < p.range[1]) {
|
|
371
|
+
i += get_char_size(prev_state, i)
|
|
372
|
+
c++
|
|
373
|
+
}
|
|
374
|
+
p.range[1] = c
|
|
397
375
|
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
prev_state = new_state
|
|
376
|
+
char_counter += p.range[1] - p.range[0]
|
|
377
|
+
char_counter += count_code_points(p.content)
|
|
401
378
|
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
}
|
|
379
|
+
p.unit = "text"
|
|
380
|
+
p.range = `[${p.range[0]}:${p.range[1]}]`
|
|
405
381
|
}
|
|
382
|
+
return char_counter
|
|
406
383
|
}
|
|
407
384
|
|
|
408
385
|
function get_char_size(s, i) {
|
|
@@ -456,4 +433,8 @@ async function braid_fetch_wrapper(url, params) {
|
|
|
456
433
|
}
|
|
457
434
|
})
|
|
458
435
|
}
|
|
459
|
-
}
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
function v_eq(v1, v2) {
|
|
439
|
+
return v1.length == v2.length && v1.every((x, i) => x == v2[i])
|
|
440
|
+
}
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braidfs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.14",
|
|
4
4
|
"description": "braid technology synchronizing files and webpages",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braidfs",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"braid-http": "^0.3.20",
|
|
10
|
-
"braid-text": "^0.0.
|
|
10
|
+
"braid-text": "^0.0.25",
|
|
11
11
|
"chokidar": "^3.6.0"
|
|
12
12
|
}
|
|
13
13
|
}
|