braidfs 0.0.14 → 0.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -39
- package/index.js +48 -34
- package/package.json +2 -2
- package/~/http/braid.org/braid-text/hi101 +1 -0
package/README.md
CHANGED
|
@@ -1,67 +1,60 @@
|
|
|
1
1
|
# braidfs
|
|
2
|
-
braid
|
|
2
|
+
Proxy braid collaborative text pages as editable files on disk.
|
|
3
3
|
|
|
4
|
-
## Features
|
|
5
|
-
|
|
6
|
-
- Proxies web resources as collaborative text using the Braid protocol
|
|
7
|
-
- Caches proxied content locally
|
|
8
|
-
- Monitors local files for changes and syncs them back to the origin
|
|
9
|
-
- Supports pinning specific URLs
|
|
10
4
|
|
|
11
5
|
## Installation
|
|
12
6
|
|
|
13
|
-
|
|
7
|
+
Install braidfs globally using npm:
|
|
14
8
|
|
|
15
|
-
```bash
|
|
16
|
-
git clone https://github.com/braid-org/braidfs.git
|
|
17
|
-
cd braidfs
|
|
18
9
|
```
|
|
19
|
-
|
|
20
|
-
Install dependencies:
|
|
21
|
-
|
|
22
|
-
```bash
|
|
23
|
-
npm install
|
|
10
|
+
npm install -g braidfs
|
|
24
11
|
```
|
|
25
12
|
|
|
26
13
|
## Usage
|
|
27
14
|
|
|
28
|
-
|
|
15
|
+
To start braidfs, run the following command:
|
|
29
16
|
|
|
30
|
-
```
|
|
31
|
-
|
|
17
|
+
```
|
|
18
|
+
braidfs [port] [-pin <url>] [-pin index <url>]
|
|
32
19
|
```
|
|
33
20
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
-
|
|
37
|
-
- `-pin URL`: Pin a specific URL to be proxied
|
|
38
|
-
- `-pin index URL`: Pin an index URL that contains a list of URLs to be proxied
|
|
39
|
-
- `COOKIE`: Set a cookie to be used in requests (optional)
|
|
21
|
+
- `[port]`: Optional. Specify the port number (default is 10000).
|
|
22
|
+
- `-pin <url>`: Pin a specific URL for synchronization.
|
|
23
|
+
- `-pin index <url>`: Pin an index URL that contains a list of URLs to synchronize.
|
|
40
24
|
|
|
41
25
|
Example:
|
|
42
26
|
|
|
43
|
-
```
|
|
44
|
-
|
|
27
|
+
```
|
|
28
|
+
braidfs 8080 -pin https://example.com/document.txt -pin index https://example.com/index.json
|
|
45
29
|
```
|
|
46
30
|
|
|
47
|
-
|
|
31
|
+
## Configuration
|
|
48
32
|
|
|
49
|
-
|
|
33
|
+
braidfs looks for a configuration file at `~/.braidfs/config.json`. You can set the following options:
|
|
50
34
|
|
|
51
|
-
|
|
35
|
+
- `port`: The port number for the proxy server.
|
|
36
|
+
- `pin_urls`: An array of URLs to pin for synchronization.
|
|
37
|
+
- `pindex_urls`: An array of index URLs containing lists of URLs to synchronize.
|
|
38
|
+
- `proxy_base`: The base directory for storing proxied files (default is `~/http`).
|
|
52
39
|
|
|
53
|
-
|
|
54
|
-
- `/URL`: Proxies the specified URL as Braid text and creates a file in `proxy_base/URL`
|
|
40
|
+
Example `config.json`:
|
|
55
41
|
|
|
56
|
-
|
|
42
|
+
```json
|
|
43
|
+
{
|
|
44
|
+
"port": 9000,
|
|
45
|
+
"pin_urls": ["https://example.com/document1.txt", "https://example.com/document2.txt"],
|
|
46
|
+
"pindex_urls": ["https://example.com/index.json"],
|
|
47
|
+
"proxy_base": "/path/to/custom/proxy/directory"
|
|
48
|
+
}
|
|
49
|
+
```
|
|
57
50
|
|
|
58
|
-
|
|
59
|
-
- `proxy_base`: Stores proxied files, which are updated when resources change and monitored for local changes
|
|
51
|
+
## Accessing the Proxy
|
|
60
52
|
|
|
61
|
-
|
|
53
|
+
The proxy only allows connections from localhost for security reasons.
|
|
62
54
|
|
|
63
|
-
|
|
55
|
+
- `/pages`: Shows all the proxied URLs
|
|
56
|
+
- `/URL`: Proxies the specified URL and creates a file in `proxy_base/URL`
|
|
64
57
|
|
|
65
|
-
##
|
|
58
|
+
## Security
|
|
66
59
|
|
|
67
|
-
|
|
60
|
+
braidfs is designed to run locally and only accepts connections from localhost (127.0.0.1 or ::1) for security reasons.
|
package/index.js
CHANGED
|
@@ -7,34 +7,47 @@ let braid_fetch = require('braid-http').fetch
|
|
|
7
7
|
process.on("unhandledRejection", (x) => console.log(`unhandledRejection: ${x.stack}`))
|
|
8
8
|
process.on("uncaughtException", (x) => console.log(`uncaughtException: ${x.stack}`))
|
|
9
9
|
|
|
10
|
-
let
|
|
11
|
-
let
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
let
|
|
15
|
-
|
|
10
|
+
let braidfs_config_dir = require('path').join(require('os').homedir(), '.braidfs')
|
|
11
|
+
let braidfs_config_path = require('path').join(braidfs_config_dir, 'config.json')
|
|
12
|
+
braid_text.db_folder = require('path').join(braidfs_config_dir, 'braid-text-db')
|
|
13
|
+
|
|
14
|
+
let config = {
|
|
15
|
+
port: 10000,
|
|
16
|
+
pin_urls: [],
|
|
17
|
+
pindex_urls: [],
|
|
18
|
+
proxy_base: require('path').join(require('os').homedir(), 'http'),
|
|
19
|
+
proxy_base_last_versions: require('path').join(braidfs_config_dir, 'proxy_base_last_versions')
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// process config file
|
|
23
|
+
try {
|
|
24
|
+
console.log(`loading config file at: ${braidfs_config_path}`)
|
|
25
|
+
Object.assign(config, JSON.parse(require('fs').readFileSync(braidfs_config_path, 'utf8')))
|
|
26
|
+
} catch (e) { console.error(`Error loading config file:`, e.message) }
|
|
16
27
|
|
|
28
|
+
// process command line args (override config)
|
|
17
29
|
let argv = process.argv.slice(2)
|
|
18
30
|
while (argv.length) {
|
|
19
31
|
let a = argv.shift()
|
|
20
32
|
if (a.match(/^\d+$/)) {
|
|
21
|
-
port = parseInt(a)
|
|
33
|
+
config.port = parseInt(a)
|
|
22
34
|
} else if (a === '-pin') {
|
|
23
35
|
let b = argv.shift()
|
|
24
36
|
if (b === 'index') {
|
|
25
|
-
pindex_urls.push(argv.shift())
|
|
37
|
+
config.pindex_urls.push(argv.shift())
|
|
26
38
|
} else {
|
|
27
|
-
pin_urls.push(b)
|
|
39
|
+
config.pin_urls.push(b)
|
|
28
40
|
}
|
|
29
|
-
} else {
|
|
30
|
-
cookie = a
|
|
31
|
-
console.log(`cookie = ${cookie}`)
|
|
32
41
|
}
|
|
33
42
|
}
|
|
34
|
-
console.log({ pin_urls, pindex_urls })
|
|
35
43
|
|
|
36
|
-
|
|
37
|
-
|
|
44
|
+
// create directories
|
|
45
|
+
require('fs').mkdirSync(config.proxy_base, { recursive: true })
|
|
46
|
+
require('fs').mkdirSync(config.proxy_base_last_versions, { recursive: true })
|
|
47
|
+
|
|
48
|
+
console.log({ pin_urls: config.pin_urls, pindex_urls: config.pindex_urls })
|
|
49
|
+
for (let url of config.pin_urls) proxy_url(url)
|
|
50
|
+
config.pindex_urls.forEach(async url => {
|
|
38
51
|
let prefix = new URL(url).origin
|
|
39
52
|
while (true) {
|
|
40
53
|
let urls = await (await fetch(url)).json()
|
|
@@ -85,16 +98,20 @@ const server = http.createServer(async (req, res) => {
|
|
|
85
98
|
proxy_url(url)
|
|
86
99
|
|
|
87
100
|
// Now serve the collaborative text!
|
|
88
|
-
braid_text.serve(req, res, { key: url })
|
|
101
|
+
braid_text.serve(req, res, { key: normalize_url(url) })
|
|
89
102
|
});
|
|
90
103
|
|
|
91
|
-
server.listen(port, () => {
|
|
92
|
-
console.log(`Proxy server started on port ${port}`);
|
|
104
|
+
server.listen(config.port, () => {
|
|
105
|
+
console.log(`Proxy server started on port ${config.port}`);
|
|
93
106
|
console.log('This proxy is only accessible from localhost');
|
|
94
107
|
});
|
|
95
108
|
|
|
96
109
|
////////////////////////////////
|
|
97
110
|
|
|
111
|
+
function normalize_url(url) {
|
|
112
|
+
return url.replace(/(\/index|\/)+$/, '')
|
|
113
|
+
}
|
|
114
|
+
|
|
98
115
|
async function proxy_url(url) {
|
|
99
116
|
let chain = proxy_url.chain || (proxy_url.chain = Promise.resolve())
|
|
100
117
|
|
|
@@ -115,7 +132,7 @@ async function proxy_url(url) {
|
|
|
115
132
|
await require("fs").promises.mkdir(path, { recursive: true })
|
|
116
133
|
|
|
117
134
|
while (await is_dir(partial))
|
|
118
|
-
partial = require("path").join(partial, 'index
|
|
135
|
+
partial = require("path").join(partial, 'index')
|
|
119
136
|
|
|
120
137
|
await require("fs").promises.writeFile(partial, save)
|
|
121
138
|
break
|
|
@@ -125,8 +142,8 @@ async function proxy_url(url) {
|
|
|
125
142
|
}))
|
|
126
143
|
}
|
|
127
144
|
|
|
128
|
-
// normalize url by removing any trailing /index
|
|
129
|
-
let normalized_url = url
|
|
145
|
+
// normalize url by removing any trailing /index/index/
|
|
146
|
+
let normalized_url = normalize_url(url)
|
|
130
147
|
let wasnt_normal = normalized_url != url
|
|
131
148
|
url = normalized_url
|
|
132
149
|
|
|
@@ -137,19 +154,17 @@ async function proxy_url(url) {
|
|
|
137
154
|
console.log(`proxy_url: ${url}`)
|
|
138
155
|
|
|
139
156
|
let path = url.replace(/^https?:\/\//, '')
|
|
140
|
-
let fullpath = require("path").join(proxy_base, path)
|
|
157
|
+
let fullpath = require("path").join(config.proxy_base, path)
|
|
141
158
|
|
|
142
|
-
// if we're accessing /blah/index
|
|
159
|
+
// if we're accessing /blah/index, it will be normalized to /blah,
|
|
143
160
|
// but we still want to create a directory out of blah in this case
|
|
144
161
|
if (wasnt_normal && !(await is_dir(fullpath))) await ensure_path(fullpath)
|
|
145
162
|
|
|
146
163
|
await ensure_path(require("path").dirname(fullpath))
|
|
147
164
|
|
|
148
|
-
await require("fs").promises.mkdir(proxy_base_support, { recursive: true })
|
|
149
|
-
|
|
150
165
|
async function get_fullpath() {
|
|
151
166
|
let p = fullpath
|
|
152
|
-
while (await is_dir(p)) p = require("path").join(p, 'index
|
|
167
|
+
while (await is_dir(p)) p = require("path").join(p, 'index')
|
|
153
168
|
return p
|
|
154
169
|
}
|
|
155
170
|
|
|
@@ -176,7 +191,7 @@ async function proxy_url(url) {
|
|
|
176
191
|
headers: {
|
|
177
192
|
"Merge-Type": "dt",
|
|
178
193
|
"Content-Type": 'text/plain',
|
|
179
|
-
...(
|
|
194
|
+
...config?.domains?.[(new URL(url)).hostname]?.auth_headers,
|
|
180
195
|
},
|
|
181
196
|
method: "PUT",
|
|
182
197
|
retry: true,
|
|
@@ -191,7 +206,7 @@ async function proxy_url(url) {
|
|
|
191
206
|
|
|
192
207
|
if (file_last_version === null) {
|
|
193
208
|
try {
|
|
194
|
-
file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(
|
|
209
|
+
file_last_version = JSON.parse(await require('fs').promises.readFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), { encoding: 'utf8' }))
|
|
195
210
|
file_last_text = (await braid_text.get(url, { version: file_last_version })).body
|
|
196
211
|
file_needs_writing = !v_eq(file_last_version, (await braid_text.get(url, {})).version)
|
|
197
212
|
} catch (e) {
|
|
@@ -223,7 +238,7 @@ async function proxy_url(url) {
|
|
|
223
238
|
|
|
224
239
|
await braid_text.put(url, { version, parents, patches, peer })
|
|
225
240
|
|
|
226
|
-
await require('fs').promises.writeFile(require('path').join(
|
|
241
|
+
await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
|
|
227
242
|
}
|
|
228
243
|
}
|
|
229
244
|
if (file_needs_writing) {
|
|
@@ -236,7 +251,7 @@ async function proxy_url(url) {
|
|
|
236
251
|
file_last_version = version
|
|
237
252
|
file_last_text = body
|
|
238
253
|
await require('fs').promises.writeFile(await get_fullpath(), file_last_text)
|
|
239
|
-
await require('fs').promises.writeFile(require('path').join(
|
|
254
|
+
await require('fs').promises.writeFile(require('path').join(config.proxy_base_last_versions, braid_text.encode_filename(url)), JSON.stringify(file_last_version))
|
|
240
255
|
}
|
|
241
256
|
}
|
|
242
257
|
}
|
|
@@ -271,11 +286,11 @@ async function proxy_url(url) {
|
|
|
271
286
|
|
|
272
287
|
if (!proxy_url.chokidar) {
|
|
273
288
|
proxy_url.chokidar = true
|
|
274
|
-
require('chokidar').watch(proxy_base).on('change', (path) => {
|
|
275
|
-
path = require('path').relative(proxy_base, path)
|
|
289
|
+
require('chokidar').watch(config.proxy_base).on('change', (path) => {
|
|
290
|
+
path = require('path').relative(config.proxy_base, path)
|
|
276
291
|
console.log(`path changed: ${path}`)
|
|
277
292
|
|
|
278
|
-
path = path
|
|
293
|
+
path = normalize_url(path)
|
|
279
294
|
// console.log(`normalized path: ${path}`)
|
|
280
295
|
|
|
281
296
|
proxy_url.path_to_func[path]()
|
|
@@ -306,7 +321,6 @@ async function proxy_url(url) {
|
|
|
306
321
|
if (version.length == 0) return;
|
|
307
322
|
|
|
308
323
|
// console.log(`local got: ${JSON.stringify({ version, parents, body, patches }, null, 4)}`)
|
|
309
|
-
// console.log(`cookie = ${cookie}`)
|
|
310
324
|
|
|
311
325
|
signal_file_needs_writing()
|
|
312
326
|
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braidfs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.16",
|
|
4
4
|
"description": "braid technology synchronizing files and webpages",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braidfs",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"braid-http": "^0.3.20",
|
|
10
|
-
"braid-text": "^0.0.
|
|
10
|
+
"braid-text": "^0.0.26",
|
|
11
11
|
"chokidar": "^3.6.0"
|
|
12
12
|
}
|
|
13
13
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
abcd
|