braid-http 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/braid-http-client.js +606 -0
- package/braid-http-server.js +258 -0
- package/index.js +12 -0
- package/index.mjs +15 -0
- package/package.json +33 -0
- package/readme.md +269 -0
|
@@ -0,0 +1,606 @@
|
|
|
1
|
+
var peer = Math.random().toString(36).substr(2)
|
|
2
|
+
|
|
3
|
+
// ***************************
|
|
4
|
+
// http
|
|
5
|
+
// ***************************
|
|
6
|
+
|
|
7
|
+
function braidify_http (http) {
|
|
8
|
+
// Todo: Wrap .put to add `peer` header
|
|
9
|
+
http.normal_get = http.get
|
|
10
|
+
http.get = function braid_req (arg1, arg2, arg3) {
|
|
11
|
+
var url, options, cb
|
|
12
|
+
|
|
13
|
+
// http.get() supports two forms:
|
|
14
|
+
//
|
|
15
|
+
// - http.get(url[, options][, callback])
|
|
16
|
+
// - http.get(options[, callback])
|
|
17
|
+
//
|
|
18
|
+
// We need to know which arguments are which, so let's detect which
|
|
19
|
+
// form we are looking at.
|
|
20
|
+
|
|
21
|
+
// Detect form #1: http.get(url[, options][, callback])
|
|
22
|
+
if (typeof arg1 === 'string' || arg1 instanceof URL) {
|
|
23
|
+
url = arg1
|
|
24
|
+
if (typeof arg2 === 'function')
|
|
25
|
+
cb = arg2
|
|
26
|
+
else {
|
|
27
|
+
options = arg2
|
|
28
|
+
cb = arg3
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Otherwise it's form #2: http.get(options[, callback])
|
|
33
|
+
else {
|
|
34
|
+
options = arg2
|
|
35
|
+
cb = arg3
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
options = options || {}
|
|
39
|
+
|
|
40
|
+
// Now we know where the `options` are specified, let's set headers.
|
|
41
|
+
if (!options.headers)
|
|
42
|
+
options.headers = {}
|
|
43
|
+
|
|
44
|
+
// Add the subscribe header if this is a subscription
|
|
45
|
+
if (options.subscribe)
|
|
46
|
+
options.headers.subscribe = 'true'
|
|
47
|
+
|
|
48
|
+
// Always add the `peer` header
|
|
49
|
+
options.headers.peer = options.headers.peer || peer
|
|
50
|
+
|
|
51
|
+
// Wrap the callback to provide our new .on('version', ...) feature
|
|
52
|
+
var on_version,
|
|
53
|
+
on_error,
|
|
54
|
+
orig_cb = cb
|
|
55
|
+
cb = (res) => {
|
|
56
|
+
res.orig_on = res.on
|
|
57
|
+
res.on = (key, f) => {
|
|
58
|
+
|
|
59
|
+
// Define .on('version', cb)
|
|
60
|
+
if (key === 'version') {
|
|
61
|
+
|
|
62
|
+
// If we have an 'version' handler, let's remember it
|
|
63
|
+
on_version = f
|
|
64
|
+
|
|
65
|
+
// And set up a subscription parser
|
|
66
|
+
var parser = subscription_parser((version, error) => {
|
|
67
|
+
if (!error)
|
|
68
|
+
on_version && on_version(version)
|
|
69
|
+
else
|
|
70
|
+
on_error && on_error(error)
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
// That will run each time we get new data
|
|
74
|
+
res.orig_on('data', (chunk) => {
|
|
75
|
+
parser.read(chunk.toString())
|
|
76
|
+
})
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Forward .on('error', cb) and remember the error function
|
|
80
|
+
else if (key === 'error') {
|
|
81
|
+
on_error = f
|
|
82
|
+
res.orig_on(key, f)
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Forward all other .on(*, cb) calls
|
|
86
|
+
else res.orig_on(key, f)
|
|
87
|
+
}
|
|
88
|
+
orig_cb && orig_cb(res)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Now put the parameters back in their prior order and call the
|
|
92
|
+
// underlying .get() function
|
|
93
|
+
if (url) {
|
|
94
|
+
arg1 = url
|
|
95
|
+
if (options) {
|
|
96
|
+
arg2 = options
|
|
97
|
+
arg3 = cb
|
|
98
|
+
} else {
|
|
99
|
+
arg2 = cb
|
|
100
|
+
}
|
|
101
|
+
} else {
|
|
102
|
+
arg1 = options
|
|
103
|
+
arg2 = cb
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return http.normal_get(arg1, arg2, arg3)
|
|
107
|
+
}
|
|
108
|
+
return http
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
// ***************************
|
|
114
|
+
// Fetch
|
|
115
|
+
// ***************************
|
|
116
|
+
|
|
117
|
+
var normal_fetch,
|
|
118
|
+
AbortController,
|
|
119
|
+
Headers,
|
|
120
|
+
is_nodejs = typeof window === 'undefined'
|
|
121
|
+
|
|
122
|
+
if (is_nodejs) {
|
|
123
|
+
// Nodejs
|
|
124
|
+
|
|
125
|
+
// Note that reconnect logic doesn't work in node-fetch, because it
|
|
126
|
+
// doesn't call the .catch() handler when the stream fails.
|
|
127
|
+
//
|
|
128
|
+
// See https://github.com/node-fetch/node-fetch/issues/753
|
|
129
|
+
|
|
130
|
+
normal_fetch = require('node-fetch')
|
|
131
|
+
AbortController = require('abort-controller')
|
|
132
|
+
Headers = normal_fetch.Headers
|
|
133
|
+
var to_whatwg_stream = require('node-web-streams').toWebReadableStream
|
|
134
|
+
} else {
|
|
135
|
+
// Web Browser
|
|
136
|
+
normal_fetch = window.fetch
|
|
137
|
+
AbortController = window.AbortController
|
|
138
|
+
Headers = window.Headers
|
|
139
|
+
window.fetch = braid_fetch
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
async function braid_fetch (url, params = {}) {
|
|
143
|
+
// Initialize the headers object
|
|
144
|
+
if (!params.headers)
|
|
145
|
+
params.headers = new Headers()
|
|
146
|
+
if (!(params.headers instanceof Headers))
|
|
147
|
+
params.headers = new Headers(params.headers)
|
|
148
|
+
|
|
149
|
+
// Always set the peer
|
|
150
|
+
params.headers.set('peer', peer)
|
|
151
|
+
|
|
152
|
+
// We provide some shortcuts for Braid params
|
|
153
|
+
if (params.version)
|
|
154
|
+
params.headers.set('version', JSON.stringify(params.version))
|
|
155
|
+
if (params.parents)
|
|
156
|
+
params.headers.set('parents', params.parents.map(JSON.stringify).join(', '))
|
|
157
|
+
if (params.subscribe)
|
|
158
|
+
params.headers.set('subscribe', 'true')
|
|
159
|
+
|
|
160
|
+
// Prevent browsers from going to disk cache
|
|
161
|
+
params.cache = 'no-cache'
|
|
162
|
+
|
|
163
|
+
// Prepare patches
|
|
164
|
+
if (params.patches) {
|
|
165
|
+
console.assert(Array.isArray(params.patches), 'Patches must be array')
|
|
166
|
+
console.assert(!params.body, 'Cannot send both patches and body')
|
|
167
|
+
|
|
168
|
+
params.patches = params.patches || []
|
|
169
|
+
params.headers.set('patches', params.patches.length)
|
|
170
|
+
params.body = (params.patches).map(patch => {
|
|
171
|
+
var length = `content-length: ${patch.content.length}`
|
|
172
|
+
var range = `content-range: ${patch.unit} ${patch.range}`
|
|
173
|
+
return `${length}\r\n${range}\r\n\r\n${patch.content}\r\n`
|
|
174
|
+
}).join('\r\n')
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Wrap the AbortController with a new one that we control.
|
|
178
|
+
//
|
|
179
|
+
// This is because we want to be able to abort the fetch that the user
|
|
180
|
+
// passes in. However, the fetch() command uses a silly "AbortController"
|
|
181
|
+
// abstraction to abort fetches, which has both a `signal` and a
|
|
182
|
+
// `controller`, and only passes the signal to fetch(), but we need the
|
|
183
|
+
// `controller` to abort the fetch itself.
|
|
184
|
+
|
|
185
|
+
var original_signal = params.signal
|
|
186
|
+
var underlying_aborter = new AbortController()
|
|
187
|
+
params.signal = underlying_aborter.signal
|
|
188
|
+
if (original_signal)
|
|
189
|
+
original_signal.addEventListener(
|
|
190
|
+
'abort',
|
|
191
|
+
() => underlying_aborter.abort()
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
// Now we run the original fetch....
|
|
195
|
+
var res = await normal_fetch(url, params)
|
|
196
|
+
|
|
197
|
+
// And customize the response with a couple methods for getting
|
|
198
|
+
// the braid subscription data:
|
|
199
|
+
res.subscribe = start_subscription
|
|
200
|
+
res.subscription = {[Symbol.asyncIterator]: iterator}
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
// Now we define the subscription function we just used:
|
|
204
|
+
function start_subscription (cb, error) {
|
|
205
|
+
if (!res.ok)
|
|
206
|
+
throw new Error('Request returned not ok', res)
|
|
207
|
+
|
|
208
|
+
if (res.bodyUsed)
|
|
209
|
+
// TODO: check if this needs a return
|
|
210
|
+
throw new Error('This response\'s body has already been read', res)
|
|
211
|
+
|
|
212
|
+
// Parse the streamed response
|
|
213
|
+
handle_fetch_stream(
|
|
214
|
+
res.body,
|
|
215
|
+
|
|
216
|
+
// Each time something happens, we'll either get a new
|
|
217
|
+
// version back, or an error.
|
|
218
|
+
(result, err) => {
|
|
219
|
+
if (!err)
|
|
220
|
+
// Yay! We got a new version! Tell the callback!
|
|
221
|
+
cb(result)
|
|
222
|
+
else {
|
|
223
|
+
// This error handling code runs if the connection
|
|
224
|
+
// closes, or if there is unparseable stuff in the
|
|
225
|
+
// streamed response.
|
|
226
|
+
|
|
227
|
+
// In any case, we want to be sure to abort the
|
|
228
|
+
// underlying fetch.
|
|
229
|
+
underlying_aborter.abort()
|
|
230
|
+
|
|
231
|
+
// Then send the error upstream.
|
|
232
|
+
if (error)
|
|
233
|
+
error(err)
|
|
234
|
+
else
|
|
235
|
+
throw 'Unhandled network error in subscription'
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
)
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
// And the iterator for use with "for async (...)"
|
|
243
|
+
function iterator () {
|
|
244
|
+
// We'll keep this state while our iterator runs
|
|
245
|
+
var initialized = false,
|
|
246
|
+
inbox = [],
|
|
247
|
+
resolve = null,
|
|
248
|
+
reject = null
|
|
249
|
+
|
|
250
|
+
return {
|
|
251
|
+
async next() {
|
|
252
|
+
// If we've already received a version, return it
|
|
253
|
+
if (inbox.length > 0)
|
|
254
|
+
return {done: false, value: inbox.shift()}
|
|
255
|
+
|
|
256
|
+
// Otherwise, let's set up a promise to resolve when we get the next item
|
|
257
|
+
var promise = new Promise((_resolve, _reject) => {
|
|
258
|
+
resolve = _resolve
|
|
259
|
+
reject = _reject
|
|
260
|
+
})
|
|
261
|
+
|
|
262
|
+
// Start the subscription, if we haven't already
|
|
263
|
+
if (!initialized) {
|
|
264
|
+
initialized = true
|
|
265
|
+
|
|
266
|
+
// The subscription will call whichever resolve and
|
|
267
|
+
// reject functions the current promise is waiting for
|
|
268
|
+
start_subscription(x => resolve(x),
|
|
269
|
+
x => reject(x) )
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// Now wait for the subscription to resolve or reject the promise.
|
|
273
|
+
var result = await promise
|
|
274
|
+
|
|
275
|
+
// Anything we get from here out we should add to the inbox
|
|
276
|
+
resolve = (new_version) => inbox.push(new_version)
|
|
277
|
+
reject = (err) => {throw err}
|
|
278
|
+
|
|
279
|
+
return { done: false, value: result }
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
return res
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// Parse a stream of versions from the incoming bytes
|
|
288
|
+
async function handle_fetch_stream (stream, cb) {
|
|
289
|
+
if (is_nodejs)
|
|
290
|
+
stream = to_whatwg_stream(stream)
|
|
291
|
+
|
|
292
|
+
// Set up a reader
|
|
293
|
+
var reader = stream.getReader(),
|
|
294
|
+
decoder = new TextDecoder('utf-8'),
|
|
295
|
+
parser = subscription_parser(cb)
|
|
296
|
+
|
|
297
|
+
while (true) {
|
|
298
|
+
var versions = []
|
|
299
|
+
|
|
300
|
+
try {
|
|
301
|
+
// Read the next chunk of stream!
|
|
302
|
+
var {done, value} = await reader.read()
|
|
303
|
+
|
|
304
|
+
// Check if this connection has been closed!
|
|
305
|
+
if (done) {
|
|
306
|
+
console.debug("Connection closed.")
|
|
307
|
+
cb(null, 'Connection closed')
|
|
308
|
+
return
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// Tell the parser to process some more stream
|
|
312
|
+
parser.read(decoder.decode(value))
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
catch (e) {
|
|
316
|
+
cb(null, e)
|
|
317
|
+
return
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
// ****************************
|
|
325
|
+
// Braid-HTTP Subscription Parser
|
|
326
|
+
// ****************************
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
var subscription_parser = (cb) => ({
|
|
330
|
+
// A parser keeps some parse state
|
|
331
|
+
state: {input: ''},
|
|
332
|
+
|
|
333
|
+
// And reports back new versions as soon as they are ready
|
|
334
|
+
cb: cb,
|
|
335
|
+
|
|
336
|
+
// You give it new input information as soon as you get it, and it will
|
|
337
|
+
// report back with new versions as soon as it finds them.
|
|
338
|
+
read (input) {
|
|
339
|
+
|
|
340
|
+
// Store the new input!
|
|
341
|
+
this.state.input += input
|
|
342
|
+
|
|
343
|
+
// Now loop through the input and parse until we hit a dead end
|
|
344
|
+
do {
|
|
345
|
+
this.state = parse_version (this.state)
|
|
346
|
+
|
|
347
|
+
// Maybe we parsed a version! That's cool!
|
|
348
|
+
if (this.state.result === 'success') {
|
|
349
|
+
this.cb({
|
|
350
|
+
version: this.state.version,
|
|
351
|
+
parents: this.state.parents,
|
|
352
|
+
body: this.state.body,
|
|
353
|
+
patches: this.state.patches
|
|
354
|
+
})
|
|
355
|
+
|
|
356
|
+
// Reset the parser for the next version!
|
|
357
|
+
this.state = {input: this.state.input}
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// Or maybe there's an error to report upstream
|
|
361
|
+
else if (this.state.result === 'error') {
|
|
362
|
+
this.cb(null, this.state.message)
|
|
363
|
+
return
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
// We stop once we've run out of parseable input.
|
|
367
|
+
} while (this.state.result !== 'waiting' && this.state.input.trim() !== '')
|
|
368
|
+
}
|
|
369
|
+
})
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
// ****************************
|
|
373
|
+
// General parsing functions
|
|
374
|
+
// ****************************
|
|
375
|
+
//
|
|
376
|
+
// Each of these functions takes parsing state as input, mutates the state,
|
|
377
|
+
// and returns the new state.
|
|
378
|
+
//
|
|
379
|
+
// Depending on the parse result, each parse function returns:
|
|
380
|
+
//
|
|
381
|
+
// parse_<thing> (state)
|
|
382
|
+
// => {result: 'waiting', ...} If it parsed part of an item, but neeeds more input
|
|
383
|
+
// => {result: 'success', ...} If it parses an entire item
|
|
384
|
+
// => {result: 'error', ...} If there is a syntax error in the input
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
function parse_version (state) {
|
|
388
|
+
// If we don't have headers yet, let's try to parse some
|
|
389
|
+
if (!state.headers) {
|
|
390
|
+
var parsed = parse_headers(state.input)
|
|
391
|
+
|
|
392
|
+
// If header-parsing fails, send the error upstream
|
|
393
|
+
if (parsed.result === 'error')
|
|
394
|
+
return parsed
|
|
395
|
+
if (parsed.result === 'waiting') {
|
|
396
|
+
state.result = 'waiting'
|
|
397
|
+
return state
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
state.headers = parsed.headers
|
|
401
|
+
state.version = state.headers.version
|
|
402
|
+
state.parents = state.headers.parents
|
|
403
|
+
|
|
404
|
+
// Take the parsed headers out of the buffer
|
|
405
|
+
state.input = parsed.input
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
// We have headers now! Try parsing more body.
|
|
409
|
+
return parse_body(state)
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
function swallow_blank_lines (input) {
|
|
413
|
+
var blank_lines = /(\r\n|\n)*/.exec(input)[0]
|
|
414
|
+
return input.substr(blank_lines.length)
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
// Parsing helpers
|
|
418
|
+
function parse_headers (input) {
|
|
419
|
+
input = swallow_blank_lines(input)
|
|
420
|
+
|
|
421
|
+
// First, find the start & end block of the headers. The headers start
|
|
422
|
+
// when there are no longer newlines, and end at the first double-newline.
|
|
423
|
+
|
|
424
|
+
// Look for the double-newline at the end of the headers
|
|
425
|
+
var headers_end = input.match(/(\r?\n)\r?\n/)
|
|
426
|
+
|
|
427
|
+
// ...if we found none, then we need to wait for more input to complete
|
|
428
|
+
// the headers..
|
|
429
|
+
if (!headers_end)
|
|
430
|
+
return {result: 'waiting'}
|
|
431
|
+
|
|
432
|
+
// We now know where the headers are to parse!
|
|
433
|
+
var headers_length = headers_end.index + headers_end[1].length,
|
|
434
|
+
headers_source = input.substring(0, headers_length)
|
|
435
|
+
|
|
436
|
+
// Let's parse them! First define some variables:
|
|
437
|
+
var headers = {},
|
|
438
|
+
header_regex = /([\w-_]+):\s?(.*)\r?\n/gy, // Parses one line a time
|
|
439
|
+
match,
|
|
440
|
+
found_last_match = false
|
|
441
|
+
|
|
442
|
+
// And now loop through the block, matching one line at a time
|
|
443
|
+
while (match = header_regex.exec(headers_source)) {
|
|
444
|
+
// console.log('Header match:', match && [match[1], match[2]])
|
|
445
|
+
headers[match[1].toLowerCase()] = match[2]
|
|
446
|
+
|
|
447
|
+
// This might be the last line of the headers block!
|
|
448
|
+
if (header_regex.lastIndex === headers_length)
|
|
449
|
+
found_last_match = true
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
// If the regex failed before we got to the end of the block, throw error:
|
|
453
|
+
if (!found_last_match)
|
|
454
|
+
return {
|
|
455
|
+
result: 'error',
|
|
456
|
+
message: 'Parse error in headers: "'
|
|
457
|
+
+ JSON.stringify(headers_source.substr(header_regex.lastIndex)) + '"',
|
|
458
|
+
headers_so_far: headers,
|
|
459
|
+
last_index: header_regex.lastIndex, headers_length
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
// Success! Let's parse special headers
|
|
463
|
+
if ('version' in headers)
|
|
464
|
+
headers.version = JSON.parse(headers.version)
|
|
465
|
+
if ('parents' in headers)
|
|
466
|
+
headers.parents = JSON.parse('['+headers.parents+']')
|
|
467
|
+
if ('patches' in headers)
|
|
468
|
+
headers.patches = JSON.parse(headers.patches)
|
|
469
|
+
|
|
470
|
+
// Update the input
|
|
471
|
+
input = input.substring(headers_length)
|
|
472
|
+
|
|
473
|
+
// Swallow the final blank line ending the headers
|
|
474
|
+
if (input.substr(0, 2) === '\r\n')
|
|
475
|
+
// Swallow \r\n
|
|
476
|
+
input = input.substr(2)
|
|
477
|
+
else
|
|
478
|
+
// Swallow \n
|
|
479
|
+
input = input.substr(1)
|
|
480
|
+
|
|
481
|
+
// And return the parsed result
|
|
482
|
+
return { result: 'success', headers, input }
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
function parse_body (state) {
|
|
486
|
+
// Parse Body Snapshot
|
|
487
|
+
|
|
488
|
+
var content_length = parseInt(state.headers['content-length'])
|
|
489
|
+
if (content_length !== NaN) {
|
|
490
|
+
if (content_length > state.input.length) {
|
|
491
|
+
state.result = 'waiting'
|
|
492
|
+
return state
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
var consumed_length = content_length + 2
|
|
496
|
+
state.result = 'success'
|
|
497
|
+
state.body = state.input.substring(0, content_length)
|
|
498
|
+
state.input = state.input.substring(consumed_length)
|
|
499
|
+
return state
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
// Parse Patches
|
|
503
|
+
|
|
504
|
+
else if (state.headers.patches) {
|
|
505
|
+
state.patches = state.patches || []
|
|
506
|
+
|
|
507
|
+
var last_patch = state.patches[state.patches.length-1]
|
|
508
|
+
|
|
509
|
+
// Parse patches until the final patch has its content filled
|
|
510
|
+
while (!(state.patches.length === state.headers.patches
|
|
511
|
+
&& 'content' in last_patch)) {
|
|
512
|
+
|
|
513
|
+
state.input = state.input.trimStart()
|
|
514
|
+
|
|
515
|
+
// Are we starting a new patch?
|
|
516
|
+
if (!last_patch || 'content' in last_patch) {
|
|
517
|
+
last_patch = {}
|
|
518
|
+
state.patches.push(last_patch)
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
// Parse patch headers
|
|
522
|
+
if (!('headers' in last_patch)) {
|
|
523
|
+
var parsed = parse_headers(state.input)
|
|
524
|
+
|
|
525
|
+
// If header-parsing fails, send the error upstream
|
|
526
|
+
if (parsed.result === 'error')
|
|
527
|
+
return parsed
|
|
528
|
+
if (parsed.result === 'waiting') {
|
|
529
|
+
state.result = 'waiting'
|
|
530
|
+
return state
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
// We parsed patch headers! Update state.
|
|
534
|
+
last_patch.headers = parsed.headers
|
|
535
|
+
state.input = parsed.input
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
// Todo: support arbitrary patches, not just range-patch
|
|
539
|
+
|
|
540
|
+
// Parse Range Patch format
|
|
541
|
+
{
|
|
542
|
+
if (!('content-length' in last_patch.headers))
|
|
543
|
+
return {
|
|
544
|
+
result: 'error',
|
|
545
|
+
message: 'no content-length in patch',
|
|
546
|
+
patch: last_patch, input: state.input
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
if (!('content-range' in last_patch.headers))
|
|
550
|
+
return {
|
|
551
|
+
result: 'error',
|
|
552
|
+
message: 'no content-range in patch',
|
|
553
|
+
patch: last_patch, input: state.input
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
var content_length = parseInt(last_patch.headers['content-length'])
|
|
557
|
+
|
|
558
|
+
// Does input have the entire patch contents yet?
|
|
559
|
+
if (state.input.length < content_length) {
|
|
560
|
+
state.result = 'waiting'
|
|
561
|
+
return state
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
// Content-range is of the form '<unit> <range>' e.g. 'json .index'
|
|
565
|
+
|
|
566
|
+
var match = last_patch.headers['content-range'].match(/(\S+) (.*)/)
|
|
567
|
+
if (!match)
|
|
568
|
+
return {
|
|
569
|
+
result: 'error',
|
|
570
|
+
message: 'cannot parse content-range in patch',
|
|
571
|
+
patch: last_patch, input: state.input
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
last_patch.unit = match[1]
|
|
575
|
+
last_patch.range = match[2]
|
|
576
|
+
last_patch.content = state.input.substr(0, content_length)
|
|
577
|
+
|
|
578
|
+
// Consume the parsed input
|
|
579
|
+
state.input = state.input.substring(content_length)
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
state.result = 'success'
|
|
584
|
+
return state
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
return {
|
|
588
|
+
result: 'error',
|
|
589
|
+
message: 'cannot parse body without content-length or patches header'
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
|
|
594
|
+
// ****************************
|
|
595
|
+
// Exports
|
|
596
|
+
// ****************************
|
|
597
|
+
|
|
598
|
+
if (typeof module !== 'undefined' && module.exports)
|
|
599
|
+
module.exports = {
|
|
600
|
+
fetch: braid_fetch,
|
|
601
|
+
http: braidify_http,
|
|
602
|
+
subscription_parser,
|
|
603
|
+
parse_version,
|
|
604
|
+
parse_headers,
|
|
605
|
+
parse_body
|
|
606
|
+
}
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
var assert = require('assert')
|
|
2
|
+
|
|
3
|
+
// Write an array of patches into the pseudoheader format.
|
|
4
|
+
function generate_patches(res, patches) {
|
|
5
|
+
for (let patch of patches) {
|
|
6
|
+
assert(typeof patch.unit === 'string')
|
|
7
|
+
assert(typeof patch.range === 'string')
|
|
8
|
+
assert(typeof patch.content === 'string')
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// This will return something like:
|
|
12
|
+
// Patches: n
|
|
13
|
+
//
|
|
14
|
+
// content-length: 21
|
|
15
|
+
// content-range: json .range
|
|
16
|
+
//
|
|
17
|
+
// {"some": "json object"}
|
|
18
|
+
//
|
|
19
|
+
// content-length: x
|
|
20
|
+
// ...
|
|
21
|
+
var result = `Patches: ${patches.length}\r\n`
|
|
22
|
+
for (let patch of patches)
|
|
23
|
+
result += `\r
|
|
24
|
+
content-length: ${patch.content.length}\r
|
|
25
|
+
content-range: ${patch.unit} ${patch.range}\r
|
|
26
|
+
\r
|
|
27
|
+
${patch.content}\r
|
|
28
|
+
`
|
|
29
|
+
return result
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
// This function reads num_patches in pseudoheader format from a
|
|
34
|
+
// ReadableStream and then fires a callback when they're finished.
|
|
35
|
+
function parse_patches (req, cb) {
|
|
36
|
+
// Todo: make this work in the case where there is no Patches: header, but
|
|
37
|
+
// Content-Range is still set, nonetheless.
|
|
38
|
+
|
|
39
|
+
var num_patches = req.headers.patches,
|
|
40
|
+
stream = req
|
|
41
|
+
|
|
42
|
+
let patches = []
|
|
43
|
+
let buffer = ""
|
|
44
|
+
if (num_patches === 0)
|
|
45
|
+
return cb(patches)
|
|
46
|
+
|
|
47
|
+
stream.on('data', function parse (chunk) {
|
|
48
|
+
// Merge the latest chunk into our buffer
|
|
49
|
+
buffer = (buffer + chunk)
|
|
50
|
+
|
|
51
|
+
// We might have an extra newline at the start. (mike: why?)
|
|
52
|
+
buffer = buffer.trimStart()
|
|
53
|
+
|
|
54
|
+
while (patches.length < num_patches) {
|
|
55
|
+
// First parse the patch headers. It ends with a double-newline.
|
|
56
|
+
// Let's see where that is.
|
|
57
|
+
var headers_end = buffer.match(/(\r?\n)(\r?\n)/)
|
|
58
|
+
|
|
59
|
+
// Give up if we don't have a set of headers yet.
|
|
60
|
+
if (!headers_end)
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
// Now we know where things end
|
|
64
|
+
var first_newline = headers_end[1],
|
|
65
|
+
headers_length = headers_end.index + first_newline.length,
|
|
66
|
+
blank_line = headers_end[2]
|
|
67
|
+
|
|
68
|
+
// Now let's parse those headers.
|
|
69
|
+
var headers = require('parse-headers')(
|
|
70
|
+
buffer.substring(0, headers_length)
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
// We require `content-length` to declare the length of the patch.
|
|
74
|
+
if (!('content-length' in headers)) {
|
|
75
|
+
// Print a nice error if it's missing
|
|
76
|
+
console.error('No content-length in', JSON.stringify(headers))
|
|
77
|
+
process.exit(1)
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
var body_length = parseInt(headers['content-length'])
|
|
81
|
+
|
|
82
|
+
// Give up if we don't have the full patch yet.
|
|
83
|
+
if (buffer.length < headers_length + blank_line.length + body_length)
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
// XX Todo: support custom patch types beyond content-range.
|
|
87
|
+
|
|
88
|
+
// Content-range is of the form '<unit> <range>' e.g. 'json .index'
|
|
89
|
+
var [unit, range] = headers['content-range'].match(/(\S+) (.*)/).slice(1)
|
|
90
|
+
var patch_content =
|
|
91
|
+
buffer.substring(headers_length + blank_line.length,
|
|
92
|
+
headers_length + blank_line.length + body_length)
|
|
93
|
+
|
|
94
|
+
// We've got our patch!
|
|
95
|
+
patches.push({unit, range, content: patch_content})
|
|
96
|
+
|
|
97
|
+
buffer = buffer.substring(headers_length + blank_line.length + body_length)
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// We got all the patches! Pause the stream and tell the callback!
|
|
101
|
+
stream.pause()
|
|
102
|
+
cb(patches)
|
|
103
|
+
})
|
|
104
|
+
stream.on('end', () => {
|
|
105
|
+
// If the stream ends before we get everything, then return what we
|
|
106
|
+
// did receive
|
|
107
|
+
console.error('Stream ended!')
|
|
108
|
+
if (patches.length !== num_patches)
|
|
109
|
+
console.error(`Got an incomplete PUT: ${patches.length}/${num_patches} patches were received`)
|
|
110
|
+
})
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function braidify (req, res, next) {
|
|
114
|
+
// console.log('\n## Braidifying', req.method, req.url, req.headers.peer)
|
|
115
|
+
|
|
116
|
+
// First, declare that we support Patches and JSON ranges.
|
|
117
|
+
res.setHeader('Range-Request-Allow-Methods', 'PATCH, PUT')
|
|
118
|
+
res.setHeader('Range-Request-Allow-Units', 'json')
|
|
119
|
+
res.setHeader("Patches", "OK")
|
|
120
|
+
|
|
121
|
+
// Extract braid info from headers
|
|
122
|
+
var version = req.headers.version && JSON.parse(req.headers.version),
|
|
123
|
+
parents = req.headers.parents && JSON.parse('['+req.headers.parents+']'),
|
|
124
|
+
peer = req.headers['peer'],
|
|
125
|
+
url = req.url.substr(1)
|
|
126
|
+
|
|
127
|
+
// Parse the subscribe header
|
|
128
|
+
var subscribe = req.headers.subscribe
|
|
129
|
+
if (subscribe === 'true')
|
|
130
|
+
subscribe = true
|
|
131
|
+
|
|
132
|
+
// Define convenience variables
|
|
133
|
+
req.version = version
|
|
134
|
+
req.parents = parents
|
|
135
|
+
req.subscribe = subscribe
|
|
136
|
+
|
|
137
|
+
// Add the braidly request/response helper methods
|
|
138
|
+
res.sendVersion = (stuff) => send_version(res, stuff, req.url, peer)
|
|
139
|
+
req.patches = () => new Promise(
|
|
140
|
+
(done, err) => parse_patches(req, (patches) => done(patches))
|
|
141
|
+
)
|
|
142
|
+
req.patchesJSON = () => new Promise(
|
|
143
|
+
(done, err) => parse_patches(
|
|
144
|
+
req,
|
|
145
|
+
(patches) => done(patches.map(
|
|
146
|
+
p => ({...p, content: JSON.parse(p.content)})
|
|
147
|
+
))
|
|
148
|
+
)
|
|
149
|
+
)
|
|
150
|
+
req.startSubscription = res.startSubscription =
|
|
151
|
+
function startSubscription (args = {}) {
|
|
152
|
+
// console.log('Starting subscription!')
|
|
153
|
+
// console.log('Timeouts are:',
|
|
154
|
+
// req.socket.server.timeout,
|
|
155
|
+
// req.socket.server.keepAliveTimeout)
|
|
156
|
+
|
|
157
|
+
res.isSubscription = true
|
|
158
|
+
|
|
159
|
+
// Let's disable the timeouts
|
|
160
|
+
req.socket.server.timeout = 0.0
|
|
161
|
+
|
|
162
|
+
// We have a subscription!
|
|
163
|
+
res.statusCode = 209
|
|
164
|
+
res.setHeader("subscribe", req.headers.subscribe)
|
|
165
|
+
res.setHeader('cache-control', 'no-cache, no-transform')
|
|
166
|
+
|
|
167
|
+
var connected = true
|
|
168
|
+
function disconnected (x) {
|
|
169
|
+
if (!connected) return
|
|
170
|
+
connected = false
|
|
171
|
+
// console.log(`Connection closed on ${req.url} from`, x, 'event')
|
|
172
|
+
|
|
173
|
+
// Now call the callback
|
|
174
|
+
if (args.onClose)
|
|
175
|
+
args.onClose()
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
res.on('close', x => disconnected('close'))
|
|
179
|
+
res.on('finish', x => disconnected('finish'))
|
|
180
|
+
req.on('abort', x => disconnected('abort'))
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Check the Useragent to work around Firefox bugs
|
|
184
|
+
if (req.headers['user-agent'].toLowerCase().indexOf('firefox') > -1)
|
|
185
|
+
res.is_firefox = true
|
|
186
|
+
|
|
187
|
+
next && next()
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
function send_version(res, data, url, peer) {
|
|
191
|
+
var {version, parents, patches, body} = data
|
|
192
|
+
|
|
193
|
+
function set_header (key, val) {
|
|
194
|
+
if (res.isSubscription)
|
|
195
|
+
res.write(`${key}: ${val}\r\n`)
|
|
196
|
+
else
|
|
197
|
+
res.setHeader(key, val)
|
|
198
|
+
}
|
|
199
|
+
function write_body (body) {
|
|
200
|
+
if (res.isSubscription)
|
|
201
|
+
res.write('\r\n' + body + '\r\n')
|
|
202
|
+
else
|
|
203
|
+
res.write(body)
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// console.log('sending version', {url, peer, version, parents, patches, body,
|
|
207
|
+
// subscription: res.isSubscription})
|
|
208
|
+
|
|
209
|
+
// Validate that the body and patches are strings
|
|
210
|
+
if (body !== undefined)
|
|
211
|
+
assert(typeof body === 'string')
|
|
212
|
+
else {
|
|
213
|
+
assert(patches !== undefined)
|
|
214
|
+
patches.forEach(p => assert(typeof p.content === 'string'))
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Write the headers or virtual headers
|
|
218
|
+
for (var [header, value] of Object.entries(data)) {
|
|
219
|
+
// Version and Parents get output in the Structured Headers format
|
|
220
|
+
if (header === 'version')
|
|
221
|
+
value = JSON.stringify(value)
|
|
222
|
+
else if (header === 'parents')
|
|
223
|
+
value = parents.map(JSON.stringify).join(", ")
|
|
224
|
+
|
|
225
|
+
// We don't output patches or body yet
|
|
226
|
+
else if (header === 'patches' || header == 'body')
|
|
227
|
+
continue
|
|
228
|
+
|
|
229
|
+
set_header(header, value)
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// Write the patches or body
|
|
233
|
+
if (Array.isArray(patches))
|
|
234
|
+
res.write(generate_patches(res, patches)) // adds its own newline
|
|
235
|
+
else if (typeof body === 'string') {
|
|
236
|
+
set_header('content-length', body.length)
|
|
237
|
+
write_body(body)
|
|
238
|
+
} else {
|
|
239
|
+
console.trace("Missing body or patches")
|
|
240
|
+
process.exit()
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Add a newline to prepare for the next version
|
|
244
|
+
// See also https://github.com/braid-org/braid-spec/issues/73
|
|
245
|
+
if (res.isSubscription) {
|
|
246
|
+
var extra_newlines = 0
|
|
247
|
+
if (res.is_firefox)
|
|
248
|
+
// Work around Firefox network buffering bug
|
|
249
|
+
// See https://github.com/braid-org/braidjs/issues/15
|
|
250
|
+
extra_newlines = 240
|
|
251
|
+
|
|
252
|
+
for (var i = 0; i < 1 + extra_newlines; i++)
|
|
253
|
+
res.write("\r\n")
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
module.exports = braidify
|
package/index.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
// This is the root file for require('braid-http').
|
|
2
|
+
//
|
|
3
|
+
// It combines the client and server files into one file.
|
|
4
|
+
|
|
5
|
+
var client = require('./braid-http-client'),
|
|
6
|
+
server = require('./braid-http-server')
|
|
7
|
+
|
|
8
|
+
module.exports = {
|
|
9
|
+
fetch: client.fetch,
|
|
10
|
+
http: client.http,
|
|
11
|
+
http_server: server
|
|
12
|
+
}
|
package/index.mjs
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
// This is the root file for es modules:
|
|
2
|
+
//
|
|
3
|
+
// import {fetch, http} from 'braid-http'
|
|
4
|
+
//
|
|
5
|
+
// This file combines the client and server files into one file.
|
|
6
|
+
|
|
7
|
+
import braid_client from './braid-http-client.js'
|
|
8
|
+
import braid_server from './braid-http-server.js'
|
|
9
|
+
|
|
10
|
+
var fetch = braid_client.fetch,
|
|
11
|
+
http = braid_client.http,
|
|
12
|
+
http_server = braid_server
|
|
13
|
+
|
|
14
|
+
export { fetch, http, http_server }
|
|
15
|
+
export default { fetch, http, http_server }
|
package/package.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "braid-http",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "An implementation of Braid-HTTP for Node.js and Browsers",
|
|
5
|
+
"scripts": {
|
|
6
|
+
"test": "node test/server.js"
|
|
7
|
+
},
|
|
8
|
+
"author": "Braid Working Group",
|
|
9
|
+
"repository": "braid-org/braidjs",
|
|
10
|
+
"homepage": "https://braid.org",
|
|
11
|
+
"files": [
|
|
12
|
+
"braid-http-client.js",
|
|
13
|
+
"braid-http-server.js",
|
|
14
|
+
"index.js",
|
|
15
|
+
"index.mjs"
|
|
16
|
+
],
|
|
17
|
+
"main": "./index.js",
|
|
18
|
+
"exports": {
|
|
19
|
+
"require": "./index.js",
|
|
20
|
+
"import": "./index.mjs"
|
|
21
|
+
},
|
|
22
|
+
"browser": {
|
|
23
|
+
"node-web-streams": false,
|
|
24
|
+
"node-fetch": false,
|
|
25
|
+
"abort-controller": false
|
|
26
|
+
},
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"abort-controller": "^3.0.0",
|
|
29
|
+
"node-fetch": "^2.6.1",
|
|
30
|
+
"node-web-streams": "^0.2.2",
|
|
31
|
+
"parse-headers": "^2.0.3"
|
|
32
|
+
}
|
|
33
|
+
}
|
package/readme.md
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
# Braid-HTTP
|
|
2
|
+
|
|
3
|
+
This polyfill library implements the [Braid-HTTP v03 protocol](https://github.com/braid-org/braid-spec/blob/master/draft-toomim-httpbis-braid-http-03.txt) in Javascript. It extends the existing browser `fetch()` API, and the nodejs `http` library, with the ability to speak Braid.
|
|
4
|
+
|
|
5
|
+
Developed in [braid.org](https://braid.org).
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
## Installing
|
|
9
|
+
|
|
10
|
+
Browsers:
|
|
11
|
+
|
|
12
|
+
```html
|
|
13
|
+
<script src="https://unpkg.com/braid-http/braid-http-client.js"></script>
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
Node.js:
|
|
17
|
+
|
|
18
|
+
```shell
|
|
19
|
+
npm install braid-http
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
Then in your node.js code:
|
|
23
|
+
|
|
24
|
+
```javascript
|
|
25
|
+
require('braid-http').fetch // A polyfill for require('node-fetch')
|
|
26
|
+
require('braid-http').http // A polyfill for require('http') clients
|
|
27
|
+
require('braid-http').http_server // A polyfill for require('http') servers
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## Using it in Browsers
|
|
31
|
+
|
|
32
|
+
This library adds a `{subscribe: true}` option to `fetch()`, and lets you
|
|
33
|
+
access the result of a subscription with two new fields on the fetch response:
|
|
34
|
+
|
|
35
|
+
- `response.subscribe( new_version => ... )`
|
|
36
|
+
- `response.subscription`: an iterator that can be used with `for await`
|
|
37
|
+
|
|
38
|
+
### Example Subscription with Promises
|
|
39
|
+
|
|
40
|
+
Here is an example of subscribing to a Braid resource using promises:
|
|
41
|
+
|
|
42
|
+
```javascript
|
|
43
|
+
fetch('https://braid.org/chat', {subscribe: true}).then(
|
|
44
|
+
res => res.subscribe(
|
|
45
|
+
(new_version) => {
|
|
46
|
+
console.log('We got a new version!', new_version)
|
|
47
|
+
// {
|
|
48
|
+
// version: "me",
|
|
49
|
+
// parents: ["mom", "dad"],
|
|
50
|
+
// patches: [{unit: "json", range: ".foo", content: "3"}]
|
|
51
|
+
// body: "3"
|
|
52
|
+
// }
|
|
53
|
+
//
|
|
54
|
+
// Note that new_version will contain either patches *or* body
|
|
55
|
+
}
|
|
56
|
+
)
|
|
57
|
+
)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
If you want automatic reconnections, add two error handlers like this:
|
|
61
|
+
|
|
62
|
+
```javascript
|
|
63
|
+
function connect() {
|
|
64
|
+
fetch('https://braid.org/chat', {subscribe: true}).then(
|
|
65
|
+
res => res.subscribe(
|
|
66
|
+
(new_version) => {
|
|
67
|
+
console.log('We got a new version!', new_version)
|
|
68
|
+
// Do something with the new_version
|
|
69
|
+
},
|
|
70
|
+
e => setTimeout(connect, 1000)
|
|
71
|
+
)
|
|
72
|
+
).catch(e => setTimeout(connect, 1000))
|
|
73
|
+
}
|
|
74
|
+
connect()
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
### Example Subscription with Async/Await
|
|
78
|
+
|
|
79
|
+
```javascript
|
|
80
|
+
async function connect () {
|
|
81
|
+
try {
|
|
82
|
+
(await fetch('/chat', {subscribe: true})).subscribe(
|
|
83
|
+
(new_version) => {
|
|
84
|
+
// We got a new version!
|
|
85
|
+
},
|
|
86
|
+
() => setTimeout(connect, 1000)
|
|
87
|
+
)
|
|
88
|
+
} catch (e) {
|
|
89
|
+
setTimeout(connect, 1000)
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### Example Subscription with `for await`
|
|
95
|
+
|
|
96
|
+
```javascript
|
|
97
|
+
async function connect () {
|
|
98
|
+
try {
|
|
99
|
+
for await (var v of fetch('/chat', {subscribe: true}).subscription) {
|
|
100
|
+
// Updates might come in the form of patches:
|
|
101
|
+
if (v.patches)
|
|
102
|
+
chat = apply_patches(v.patches, chat)
|
|
103
|
+
|
|
104
|
+
// Or complete versions:
|
|
105
|
+
else
|
|
106
|
+
// Beware the server doesn't send these yet.
|
|
107
|
+
chat = JSON.parse(v.body)
|
|
108
|
+
|
|
109
|
+
render_stuff()
|
|
110
|
+
}
|
|
111
|
+
} catch (e) {
|
|
112
|
+
console.log('Reconnecting...')
|
|
113
|
+
setTimeout(connect, 4000)
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## Using it in Nodejs
|
|
119
|
+
|
|
120
|
+
### Example Nodejs server with `require('http')`
|
|
121
|
+
|
|
122
|
+
Braidify adds these fields and methods to requests and responses:
|
|
123
|
+
- `req.subscribe`
|
|
124
|
+
- `req.startSubscription({onClose: cb})`
|
|
125
|
+
- `await req.patches()`
|
|
126
|
+
- `res.sendVersion()`
|
|
127
|
+
|
|
128
|
+
Use it like this:
|
|
129
|
+
|
|
130
|
+
```javascript
|
|
131
|
+
var braidify = require('braid-http').http_server
|
|
132
|
+
// or:
|
|
133
|
+
import {http_server as braidify} from 'braid-http'
|
|
134
|
+
|
|
135
|
+
require('http').createServer(
|
|
136
|
+
(req, res) => {
|
|
137
|
+
// Add braid stuff to req and res
|
|
138
|
+
braidify(req, res)
|
|
139
|
+
|
|
140
|
+
// Now use it
|
|
141
|
+
if (req.subscribe)
|
|
142
|
+
res.startSubscription({ onClose: _=> null })
|
|
143
|
+
// startSubscription automatically sets statusCode = 209
|
|
144
|
+
else
|
|
145
|
+
res.statusCode = 200
|
|
146
|
+
|
|
147
|
+
// Send the current version
|
|
148
|
+
res.sendVersion({
|
|
149
|
+
version: 'greg',
|
|
150
|
+
body: JSON.stringify({greg: 'greg'})
|
|
151
|
+
})
|
|
152
|
+
}
|
|
153
|
+
).listen(9935)
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
### Example Nodejs server with `require('express')`
|
|
157
|
+
|
|
158
|
+
With `express`, you can simply call `app.use(braidify)` to get braid features
|
|
159
|
+
added to every request and response.
|
|
160
|
+
|
|
161
|
+
```javascript
|
|
162
|
+
var braidify = require('braid-http').http_server
|
|
163
|
+
// or:
|
|
164
|
+
import {http_server as braidify} from 'braid-http'
|
|
165
|
+
|
|
166
|
+
var app = require('express')()
|
|
167
|
+
|
|
168
|
+
app.use(braidify) // Add braid stuff to req and res
|
|
169
|
+
|
|
170
|
+
app.get('/', (req, res) => {
|
|
171
|
+
// Now use it
|
|
172
|
+
if (req.subscribe)
|
|
173
|
+
res.startSubscription({ onClose: _=> null })
|
|
174
|
+
// startSubscription automatically sets statusCode = 209
|
|
175
|
+
else
|
|
176
|
+
res.statusCode = 200
|
|
177
|
+
|
|
178
|
+
// Send the current version
|
|
179
|
+
res.sendVersion({
|
|
180
|
+
version: 'greg',
|
|
181
|
+
parents: ['gr','eg'],
|
|
182
|
+
body: JSON.stringify({greg: 'greg'})
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
// Or you can send patches like this:
|
|
186
|
+
// res.sendVersion({
|
|
187
|
+
// version: 'greg',
|
|
188
|
+
// parents: ['gr','eg'],
|
|
189
|
+
// patches: [{range: '.greg', unit: 'json', content: '"greg"'}]
|
|
190
|
+
// })
|
|
191
|
+
})
|
|
192
|
+
|
|
193
|
+
require('http').createServer(app).listen(8583)
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
### Example Nodejs client with `require('http')`
|
|
199
|
+
|
|
200
|
+
```javascript
|
|
201
|
+
// Use this line if necessary for self-signed certs
|
|
202
|
+
// process.env["NODE_TLS_REJECT_UNAUTHORIZED"] = 0
|
|
203
|
+
|
|
204
|
+
var https = require('braid-http').http(require('https'))
|
|
205
|
+
// or:
|
|
206
|
+
// import braid_http from 'braid-http'
|
|
207
|
+
// https = braid_http.http(require('https'))
|
|
208
|
+
|
|
209
|
+
https.get(
|
|
210
|
+
'https://braid.org/chat',
|
|
211
|
+
{subscribe: true},
|
|
212
|
+
(res) => {
|
|
213
|
+
res.on('version', (version) => {
|
|
214
|
+
console.log('well we got one', version)
|
|
215
|
+
})
|
|
216
|
+
}
|
|
217
|
+
)
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
To get auto-reconnections use:
|
|
221
|
+
|
|
222
|
+
```javascript
|
|
223
|
+
function connect () {
|
|
224
|
+
https.get(
|
|
225
|
+
'https://braid.org/chat',
|
|
226
|
+
{subscribe: true},
|
|
227
|
+
(res) => {
|
|
228
|
+
res.on('version', (version) => {
|
|
229
|
+
// {
|
|
230
|
+
// version: "me",
|
|
231
|
+
// parents: ["mom", "dad"],
|
|
232
|
+
// patches: [{unit: "json", range: ".foo", content: "3"}]
|
|
233
|
+
// body: "3"
|
|
234
|
+
// }
|
|
235
|
+
// // Version will contain either patches *or* body, but not both
|
|
236
|
+
console.log('We got a new version!', version)
|
|
237
|
+
})
|
|
238
|
+
|
|
239
|
+
res.on('end', e => setTimeout(connect, 1000))
|
|
240
|
+
res.on('error', e => setTimeout(connect, 1000))
|
|
241
|
+
})
|
|
242
|
+
}
|
|
243
|
+
connect()
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
### Example Nodejs client with `fetch()`
|
|
248
|
+
|
|
249
|
+
```javascript
|
|
250
|
+
var fetch = require('braid-http').fetch
|
|
251
|
+
// or:
|
|
252
|
+
import {fetch} from 'braid-http'
|
|
253
|
+
|
|
254
|
+
// process.env["NODE_TLS_REJECT_UNAUTHORIZED"] = 0
|
|
255
|
+
|
|
256
|
+
fetch('https://localhost:3009/chat',
|
|
257
|
+
{subscribe: true}).andThen(
|
|
258
|
+
x => console.log('Got ', x)
|
|
259
|
+
)
|
|
260
|
+
```
|
|
261
|
+
|
|
262
|
+
Note: the current version of `node-fetch` doesn't properly throw errors when a
|
|
263
|
+
response connection dies, and thus you cannot attach a `.catch()` handler to
|
|
264
|
+
automatically reconnect. (See
|
|
265
|
+
[issue #980](https://github.com/node-fetch/node-fetch/issues/980) and
|
|
266
|
+
[#753](https://github.com/node-fetch/node-fetch/issues/753).) We recommend
|
|
267
|
+
using the `http` library (below) for requests on nodejs instead.
|
|
268
|
+
|
|
269
|
+
|