braid-text 0.2.65 → 0.2.67
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +1871 -1794
- package/package.json +1 -1
- package/test/test.html +285 -0
package/index.js
CHANGED
|
@@ -1,2196 +1,2273 @@
|
|
|
1
1
|
|
|
2
2
|
let { Doc, OpLog, Branch } = require("@braid.org/diamond-types-node")
|
|
3
|
-
let braidify = require("braid-http")
|
|
3
|
+
let {http_server: braidify, fetch: braid_fetch} = require("braid-http")
|
|
4
4
|
let fs = require("fs")
|
|
5
5
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
6
|
+
function create_braid_text() {
|
|
7
|
+
let braid_text = {
|
|
8
|
+
verbose: false,
|
|
9
|
+
db_folder: './braid-text-db',
|
|
10
|
+
length_cache_size: 10,
|
|
11
|
+
meta_file_save_period_ms: 1000,
|
|
12
|
+
cache: {}
|
|
13
|
+
}
|
|
12
14
|
|
|
13
|
-
let waiting_puts = 0
|
|
15
|
+
let waiting_puts = 0
|
|
14
16
|
|
|
15
|
-
let max_encoded_key_size = 240
|
|
17
|
+
let max_encoded_key_size = 240
|
|
16
18
|
|
|
17
|
-
braid_text.serve = async (req, res, options = {}) => {
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
braid_text.free_cors(res)
|
|
19
|
+
braid_text.serve = async (req, res, options = {}) => {
|
|
20
|
+
options = {
|
|
21
|
+
key: req.url.split('?')[0], // Default key
|
|
22
|
+
put_cb: (key, val) => { }, // Default callback when a PUT changes a key
|
|
23
|
+
...options // Override with all options passed in
|
|
24
|
+
}
|
|
25
25
|
|
|
26
|
-
|
|
27
|
-
res.writeHead(statusCode, statusText, headers)
|
|
28
|
-
res.end(x ?? '')
|
|
29
|
-
}
|
|
26
|
+
braid_text.free_cors(res)
|
|
30
27
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
28
|
+
function my_end(statusCode, x, statusText, headers) {
|
|
29
|
+
res.writeHead(statusCode, statusText, headers)
|
|
30
|
+
res.end(x ?? '')
|
|
31
|
+
}
|
|
34
32
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
return my_end(500, "The server failed to process this request. The error generated was: " + e)
|
|
39
|
-
}
|
|
33
|
+
let resource = null
|
|
34
|
+
try {
|
|
35
|
+
resource = await get_resource(options.key)
|
|
40
36
|
|
|
41
|
-
|
|
37
|
+
braidify(req, res)
|
|
38
|
+
if (res.is_multiplexer) return
|
|
39
|
+
} catch (e) {
|
|
40
|
+
return my_end(500, "The server failed to process this request. The error generated was: " + e)
|
|
41
|
+
}
|
|
42
42
|
|
|
43
|
-
|
|
44
|
-
if (req.headers['selection-sharing-prototype']) {
|
|
45
|
-
res.setHeader('Content-Type', 'application/json')
|
|
43
|
+
let peer = req.headers["peer"]
|
|
46
44
|
|
|
47
|
-
|
|
48
|
-
if (
|
|
45
|
+
// selection sharing prototype
|
|
46
|
+
if (req.headers['selection-sharing-prototype']) {
|
|
47
|
+
res.setHeader('Content-Type', 'application/json')
|
|
49
48
|
|
|
50
|
-
|
|
51
|
-
if (!
|
|
52
|
-
return my_end(200, JSON.stringify(resource.selections))
|
|
53
|
-
} else {
|
|
54
|
-
var client = {peer, res}
|
|
55
|
-
resource.selection_clients.add(client)
|
|
56
|
-
res.startSubscription({
|
|
57
|
-
onClose: () => resource.selection_clients.delete(client)
|
|
58
|
-
})
|
|
59
|
-
res.sendUpdate({ body: JSON.stringify(resource.selections) })
|
|
60
|
-
return
|
|
61
|
-
}
|
|
62
|
-
} else if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
|
|
63
|
-
var body = (await req.patches())[0].content_text
|
|
64
|
-
var json = JSON.parse(body)
|
|
65
|
-
|
|
66
|
-
// only keep new selections if they are newer
|
|
67
|
-
for (var [user, selection] of Object.entries(json)) {
|
|
68
|
-
if (resource.selections[user] && resource.selections[user].time > selection.time) delete json[user]
|
|
69
|
-
else resource.selections[user] = selection
|
|
70
|
-
}
|
|
49
|
+
if (!resource.selections) resource.selections = {}
|
|
50
|
+
if (!resource.selection_clients) resource.selection_clients = new Set()
|
|
71
51
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
52
|
+
if (req.method === "GET" || req.method === "HEAD") {
|
|
53
|
+
if (!req.subscribe) {
|
|
54
|
+
return my_end(200, JSON.stringify(resource.selections))
|
|
55
|
+
} else {
|
|
56
|
+
var client = {peer, res}
|
|
57
|
+
resource.selection_clients.add(client)
|
|
58
|
+
res.startSubscription({
|
|
59
|
+
onClose: () => resource.selection_clients.delete(client)
|
|
60
|
+
})
|
|
61
|
+
res.sendUpdate({ body: JSON.stringify(resource.selections) })
|
|
62
|
+
return
|
|
63
|
+
}
|
|
64
|
+
} else if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
|
|
65
|
+
var body = (await req.patches())[0].content_text
|
|
66
|
+
var json = JSON.parse(body)
|
|
67
|
+
|
|
68
|
+
// only keep new selections if they are newer
|
|
69
|
+
for (var [user, selection] of Object.entries(json)) {
|
|
70
|
+
if (resource.selections[user] && resource.selections[user].time > selection.time) delete json[user]
|
|
71
|
+
else resource.selections[user] = selection
|
|
78
72
|
}
|
|
79
73
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
for (
|
|
83
|
-
if (
|
|
84
|
-
|
|
85
|
-
|
|
74
|
+
// remove old selections that are too old
|
|
75
|
+
var long_ago = Date.now() - 1000 * 60 * 5
|
|
76
|
+
for (var [user, selection] of Object.entries(resource.selections))
|
|
77
|
+
if (selection.time < long_ago) {
|
|
78
|
+
delete resource.selections[user]
|
|
79
|
+
delete json[user]
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
body = JSON.stringify(json)
|
|
83
|
+
if (body.length > 2)
|
|
84
|
+
for (let client of resource.selection_clients)
|
|
85
|
+
if (client.peer !== peer) client.res.sendUpdate({ body })
|
|
86
|
+
|
|
87
|
+
return my_end(200)
|
|
88
|
+
}
|
|
86
89
|
}
|
|
87
|
-
}
|
|
88
90
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
91
|
+
let merge_type = req.headers["merge-type"]
|
|
92
|
+
if (!merge_type) merge_type = 'simpleton'
|
|
93
|
+
if (merge_type !== 'simpleton' && merge_type !== 'dt') return my_end(400, `Unknown merge type: ${merge_type}`)
|
|
94
|
+
|
|
95
|
+
// set default content type of text/plain
|
|
96
|
+
if (!res.getHeader('content-type')) res.setHeader('Content-Type', 'text/plain')
|
|
97
|
+
|
|
98
|
+
// no matter what the content type is,
|
|
99
|
+
// we want to set the charset to utf-8
|
|
100
|
+
const contentType = res.getHeader('Content-Type')
|
|
101
|
+
const parsedContentType = contentType.split(';').map(part => part.trim())
|
|
102
|
+
const charsetParam = parsedContentType.find(part => part.toLowerCase().startsWith('charset='))
|
|
103
|
+
if (!charsetParam)
|
|
104
|
+
res.setHeader('Content-Type', `${contentType}; charset=utf-8`)
|
|
105
|
+
else if (charsetParam.toLowerCase() !== 'charset=utf-8') {
|
|
106
|
+
// Replace the existing charset with utf-8
|
|
107
|
+
const updatedContentType = parsedContentType
|
|
108
|
+
.map(part => (part.toLowerCase().startsWith('charset=') ? 'charset=utf-8' : part))
|
|
109
|
+
.join('; ');
|
|
110
|
+
res.setHeader('Content-Type', updatedContentType);
|
|
111
|
+
}
|
|
110
112
|
|
|
111
|
-
|
|
113
|
+
if (req.method == "OPTIONS") return my_end(200)
|
|
112
114
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
115
|
+
if (req.method == "DELETE") {
|
|
116
|
+
await braid_text.delete(resource)
|
|
117
|
+
return my_end(200)
|
|
118
|
+
}
|
|
117
119
|
|
|
118
|
-
|
|
119
|
-
|
|
120
|
+
var get_current_version = () => ascii_ify(
|
|
121
|
+
resource.version.map(x => JSON.stringify(x)).join(", "))
|
|
120
122
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
123
|
+
if (req.method == "GET" || req.method == "HEAD") {
|
|
124
|
+
// make sure we have the necessary version and parents
|
|
125
|
+
var unknowns = []
|
|
126
|
+
for (var event of (req.version || []).concat(req.parents || [])) {
|
|
127
|
+
var [actor, seq] = decode_version(event)
|
|
128
|
+
if (!resource.actor_seqs[actor]?.has(seq))
|
|
129
|
+
unknowns.push(event)
|
|
130
|
+
}
|
|
131
|
+
if (unknowns.length)
|
|
132
|
+
return my_end(309, '', "Version Unknown Here", {
|
|
133
|
+
Version: ascii_ify(unknowns.map(e => JSON.stringify(e)).join(', '))
|
|
134
|
+
})
|
|
133
135
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
+
if (!req.subscribe) {
|
|
137
|
+
res.setHeader("Accept-Subscribe", "true")
|
|
138
|
+
|
|
139
|
+
// special case for HEAD asking for version/parents,
|
|
140
|
+
// to be faster by not reconstructing body
|
|
141
|
+
if (req.method === "HEAD" && (req.version || req.parents))
|
|
142
|
+
return my_end(200)
|
|
143
|
+
|
|
144
|
+
let x = null
|
|
145
|
+
try {
|
|
146
|
+
x = await braid_text.get(resource, {
|
|
147
|
+
version: req.version,
|
|
148
|
+
parents: req.parents,
|
|
149
|
+
transfer_encoding: req.headers['accept-transfer-encoding']
|
|
150
|
+
})
|
|
151
|
+
} catch (e) {
|
|
152
|
+
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
153
|
+
}
|
|
136
154
|
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
155
|
+
if (req.headers['accept-transfer-encoding'] === 'dt') {
|
|
156
|
+
res.setHeader("Current-Version", get_current_version())
|
|
157
|
+
res.setHeader("X-Transfer-Encoding", 'dt')
|
|
158
|
+
res.setHeader("Content-Length", x.body.length)
|
|
159
|
+
return my_end(209, req.method === "HEAD" ? null : x.body, 'Multiresponse')
|
|
160
|
+
} else {
|
|
161
|
+
if (req.version || req.parents)
|
|
162
|
+
res.setHeader("Current-Version", get_current_version())
|
|
163
|
+
res.setHeader("Version", ascii_ify(x.version.map((x) => JSON.stringify(x)).join(", ")))
|
|
164
|
+
var buffer = Buffer.from(x.body, "utf8")
|
|
165
|
+
res.setHeader("Repr-Digest", get_digest(buffer))
|
|
166
|
+
res.setHeader("Content-Length", buffer.length)
|
|
167
|
+
return my_end(200, req.method === "HEAD" ? null : buffer)
|
|
168
|
+
}
|
|
169
|
+
} else {
|
|
170
|
+
if (!res.hasHeader("editable")) res.setHeader("Editable", "true")
|
|
171
|
+
res.setHeader("Merge-Type", merge_type)
|
|
172
|
+
res.setHeader("Current-Version", get_current_version())
|
|
173
|
+
if (req.method == "HEAD") return my_end(200)
|
|
141
174
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
x = await braid_text.get(resource, {
|
|
175
|
+
let options = {
|
|
176
|
+
peer,
|
|
145
177
|
version: req.version,
|
|
146
178
|
parents: req.parents,
|
|
147
|
-
|
|
179
|
+
merge_type,
|
|
180
|
+
accept_encoding:
|
|
181
|
+
req.headers['x-accept-encoding'] ??
|
|
182
|
+
req.headers['accept-encoding'],
|
|
183
|
+
subscribe: x => {
|
|
184
|
+
|
|
185
|
+
// this is a sanity/rhobustness check..
|
|
186
|
+
// ..this digest is checked on the client..
|
|
187
|
+
if (x.version && v_eq(x.version, resource.version))
|
|
188
|
+
x["Repr-Digest"] = get_digest(resource.val)
|
|
189
|
+
|
|
190
|
+
res.sendVersion(x)
|
|
191
|
+
},
|
|
192
|
+
write: (x) => res.write(x)
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
res.startSubscription({
|
|
196
|
+
onClose: () => {
|
|
197
|
+
if (merge_type === "dt") resource.clients.delete(options)
|
|
198
|
+
else resource.simpleton_clients.delete(options)
|
|
199
|
+
}
|
|
148
200
|
})
|
|
149
|
-
} catch (e) {
|
|
150
|
-
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
151
|
-
}
|
|
152
201
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
} else {
|
|
159
|
-
if (req.version || req.parents)
|
|
160
|
-
res.setHeader("Current-Version", get_current_version())
|
|
161
|
-
res.setHeader("Version", ascii_ify(x.version.map((x) => JSON.stringify(x)).join(", ")))
|
|
162
|
-
var buffer = Buffer.from(x.body, "utf8")
|
|
163
|
-
res.setHeader("Repr-Digest", get_digest(buffer))
|
|
164
|
-
res.setHeader("Content-Length", buffer.length)
|
|
165
|
-
return my_end(200, req.method === "HEAD" ? null : buffer)
|
|
202
|
+
try {
|
|
203
|
+
return await braid_text.get(resource, options)
|
|
204
|
+
} catch (e) {
|
|
205
|
+
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
206
|
+
}
|
|
166
207
|
}
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
let options = {
|
|
174
|
-
peer,
|
|
175
|
-
version: req.version,
|
|
176
|
-
parents: req.parents,
|
|
177
|
-
merge_type,
|
|
178
|
-
accept_encoding:
|
|
179
|
-
req.headers['x-accept-encoding'] ??
|
|
180
|
-
req.headers['accept-encoding'],
|
|
181
|
-
subscribe: x => {
|
|
182
|
-
|
|
183
|
-
// this is a sanity/rhobustness check..
|
|
184
|
-
// ..this digest is checked on the client..
|
|
185
|
-
if (x.version && v_eq(x.version, resource.version))
|
|
186
|
-
x["Repr-Digest"] = get_digest(resource.val)
|
|
187
|
-
|
|
188
|
-
res.sendVersion(x)
|
|
189
|
-
},
|
|
190
|
-
write: (x) => res.write(x)
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
|
|
211
|
+
if (waiting_puts >= 100) {
|
|
212
|
+
console.log(`The server is busy.`)
|
|
213
|
+
return my_end(503, "The server is busy.")
|
|
191
214
|
}
|
|
192
215
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
}
|
|
198
|
-
|
|
216
|
+
waiting_puts++
|
|
217
|
+
if (braid_text.verbose) console.log(`waiting_puts(after++) = ${waiting_puts}`)
|
|
218
|
+
let done_my_turn = (statusCode, x, statusText, headers) => {
|
|
219
|
+
waiting_puts--
|
|
220
|
+
if (braid_text.verbose) console.log(`waiting_puts(after--) = ${waiting_puts}`)
|
|
221
|
+
my_end(statusCode, x, statusText, headers)
|
|
222
|
+
}
|
|
199
223
|
|
|
200
224
|
try {
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
}
|
|
225
|
+
var patches = await req.patches()
|
|
226
|
+
for (let p of patches) p.content = p.content_text
|
|
207
227
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
228
|
+
let body = null
|
|
229
|
+
if (patches[0]?.unit === 'everything') {
|
|
230
|
+
body = patches[0].content
|
|
231
|
+
patches = null
|
|
232
|
+
}
|
|
213
233
|
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
234
|
+
if (req.parents) {
|
|
235
|
+
await wait_for_events(
|
|
236
|
+
options.key,
|
|
237
|
+
req.parents,
|
|
238
|
+
resource.actor_seqs,
|
|
239
|
+
// approximation of memory usage for this update
|
|
240
|
+
body ? body.length :
|
|
241
|
+
patches.reduce((a, b) => a + b.range.length + b.content.length, 0),
|
|
242
|
+
options.recv_buffer_max_time,
|
|
243
|
+
options.recv_buffer_max_space)
|
|
244
|
+
|
|
245
|
+
// make sure we have the necessary parents now
|
|
246
|
+
var unknowns = []
|
|
247
|
+
for (var event of req.parents) {
|
|
248
|
+
var [actor, seq] = decode_version(event)
|
|
249
|
+
if (!resource.actor_seqs[actor]?.has(seq)) unknowns.push(event)
|
|
250
|
+
}
|
|
251
|
+
if (unknowns.length)
|
|
252
|
+
return done_my_turn(309, '', "Version Unknown Here", {
|
|
253
|
+
Version: ascii_ify(unknowns.map(e => JSON.stringify(e)).join(', ')),
|
|
254
|
+
'Retry-After': '1'
|
|
255
|
+
})
|
|
256
|
+
}
|
|
221
257
|
|
|
222
|
-
|
|
223
|
-
var patches = await req.patches()
|
|
224
|
-
for (let p of patches) p.content = p.content_text
|
|
258
|
+
var {change_count} = await braid_text.put(resource, { peer, version: req.version, parents: req.parents, patches, body, merge_type })
|
|
225
259
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
260
|
+
// if Repr-Digest is set,
|
|
261
|
+
// and the request version is also our new current version,
|
|
262
|
+
// then verify the digest..
|
|
263
|
+
if (req.headers['repr-digest'] &&
|
|
264
|
+
v_eq(req.version, resource.version) &&
|
|
265
|
+
req.headers['repr-digest'] !== get_digest(resource.val)) {
|
|
266
|
+
console.log(`repr-digest mismatch!`)
|
|
231
267
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
req.parents,
|
|
236
|
-
resource.actor_seqs,
|
|
237
|
-
// approximation of memory usage for this update
|
|
238
|
-
body ? body.length :
|
|
239
|
-
patches.reduce((a, b) => a + b.range.length + b.content.length, 0),
|
|
240
|
-
options.recv_buffer_max_time,
|
|
241
|
-
options.recv_buffer_max_space)
|
|
242
|
-
|
|
243
|
-
// make sure we have the necessary parents now
|
|
244
|
-
var unknowns = []
|
|
245
|
-
for (var event of req.parents) {
|
|
246
|
-
var [actor, seq] = decode_version(event)
|
|
247
|
-
if (!resource.actor_seqs[actor]?.has(seq)) unknowns.push(event)
|
|
268
|
+
// we return a special 550 error code,
|
|
269
|
+
// which simpleton will pick up on to stop retrying
|
|
270
|
+
return done_my_turn(550, "repr-digest mismatch!")
|
|
248
271
|
}
|
|
249
|
-
if (unknowns.length)
|
|
250
|
-
return done_my_turn(309, '', "Version Unknown Here", {
|
|
251
|
-
Version: ascii_ify(unknowns.map(e => JSON.stringify(e)).join(', ')),
|
|
252
|
-
'Retry-After': '1'
|
|
253
|
-
})
|
|
254
|
-
}
|
|
255
272
|
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
// and the request version is also our new current version,
|
|
260
|
-
// then verify the digest..
|
|
261
|
-
if (req.headers['repr-digest'] &&
|
|
262
|
-
v_eq(req.version, resource.version) &&
|
|
263
|
-
req.headers['repr-digest'] !== get_digest(resource.val)) {
|
|
264
|
-
console.log(`repr-digest mismatch!`)
|
|
273
|
+
if (req.version) got_event(options.key, req.version[0], change_count)
|
|
274
|
+
|
|
275
|
+
res.setHeader("Version", get_current_version())
|
|
265
276
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
277
|
+
options.put_cb(options.key, resource.val)
|
|
278
|
+
} catch (e) {
|
|
279
|
+
console.log(`${req.method} ERROR: ${e.stack}`)
|
|
280
|
+
return done_my_turn(500, "The server failed to apply this version. The error generated was: " + e)
|
|
269
281
|
}
|
|
270
282
|
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
res.setHeader("Version", get_current_version())
|
|
274
|
-
|
|
275
|
-
options.put_cb(options.key, resource.val)
|
|
276
|
-
} catch (e) {
|
|
277
|
-
console.log(`${req.method} ERROR: ${e.stack}`)
|
|
278
|
-
return done_my_turn(500, "The server failed to apply this version. The error generated was: " + e)
|
|
283
|
+
return done_my_turn(200)
|
|
279
284
|
}
|
|
280
285
|
|
|
281
|
-
|
|
286
|
+
throw new Error("unknown")
|
|
282
287
|
}
|
|
283
288
|
|
|
284
|
-
|
|
285
|
-
}
|
|
289
|
+
braid_text.delete = async (key) => {
|
|
290
|
+
await braid_text.put(key, {body: ''})
|
|
291
|
+
}
|
|
286
292
|
|
|
287
|
-
braid_text.
|
|
288
|
-
|
|
289
|
-
}
|
|
293
|
+
braid_text.get = async (key, options) => {
|
|
294
|
+
if (key instanceof URL) {
|
|
295
|
+
if (!options) options = {}
|
|
290
296
|
|
|
291
|
-
|
|
292
|
-
if (!options) {
|
|
293
|
-
// if it doesn't exist already, don't create it in this case
|
|
294
|
-
if (!braid_text.cache[key]) return
|
|
295
|
-
return (await get_resource(key)).val
|
|
296
|
-
}
|
|
297
|
+
options.my_abort = new AbortController()
|
|
297
298
|
|
|
298
|
-
|
|
299
|
-
|
|
299
|
+
var params = {
|
|
300
|
+
signal: options.my_abort.signal,
|
|
301
|
+
retry: () => true,
|
|
302
|
+
subscribe: !!options.subscribe,
|
|
303
|
+
heartbeats: 120,
|
|
304
|
+
}
|
|
305
|
+
for (var x of ['headers', 'parents', 'version', 'peer'])
|
|
306
|
+
if (options[x] != null) params[x] = options[x]
|
|
300
307
|
|
|
301
|
-
|
|
302
|
-
var version = resource.version
|
|
308
|
+
var res = await braid_fetch(key.href, params)
|
|
303
309
|
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
// and let it be handled as the default
|
|
309
|
-
var op_v = options.version
|
|
310
|
-
if (op_v && v_eq(op_v, version)) op_v = null
|
|
311
|
-
|
|
312
|
-
var bytes = null
|
|
313
|
-
if (op_v || options.parents) {
|
|
314
|
-
if (op_v) {
|
|
315
|
-
var doc = dt_get(resource.doc, op_v)
|
|
316
|
-
bytes = doc.toBytes()
|
|
317
|
-
} else {
|
|
318
|
-
bytes = resource.doc.toBytes()
|
|
319
|
-
var doc = Doc.fromBytes(bytes)
|
|
320
|
-
}
|
|
321
|
-
if (options.parents) {
|
|
322
|
-
bytes = doc.getPatchSince(
|
|
323
|
-
dt_get_local_version(bytes, options.parents))
|
|
324
|
-
}
|
|
325
|
-
doc.free()
|
|
326
|
-
} else bytes = resource.doc.toBytes()
|
|
327
|
-
return { body: bytes }
|
|
310
|
+
if (options.subscribe) {
|
|
311
|
+
res.subscribe(options.subscribe)
|
|
312
|
+
return res
|
|
313
|
+
} else return await res.text()
|
|
328
314
|
}
|
|
329
315
|
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
body: resource.doc.get()
|
|
336
|
-
}
|
|
337
|
-
} else {
|
|
338
|
-
if (options.merge_type != "dt") {
|
|
339
|
-
let x = { version }
|
|
316
|
+
if (!options) {
|
|
317
|
+
// if it doesn't exist already, don't create it in this case
|
|
318
|
+
if (!braid_text.cache[key]) return
|
|
319
|
+
return (await get_resource(key)).val
|
|
320
|
+
}
|
|
340
321
|
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
x.body = resource.doc.get()
|
|
344
|
-
options.subscribe(x)
|
|
345
|
-
} else {
|
|
346
|
-
x.parents = options.version ? options.version : options.parents
|
|
347
|
-
options.my_last_seen_version = x.parents
|
|
322
|
+
if (options.version) validate_version_array(options.version)
|
|
323
|
+
if (options.parents) validate_version_array(options.parents)
|
|
348
324
|
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
325
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
326
|
+
var version = resource.version
|
|
327
|
+
|
|
328
|
+
if (!options.subscribe) {
|
|
329
|
+
if (options.transfer_encoding === 'dt') {
|
|
330
|
+
// optimization: if requesting current version
|
|
331
|
+
// pretend as if they didn't set a version,
|
|
332
|
+
// and let it be handled as the default
|
|
333
|
+
var op_v = options.version
|
|
334
|
+
if (op_v && v_eq(op_v, version)) op_v = null
|
|
335
|
+
|
|
336
|
+
var bytes = null
|
|
337
|
+
if (op_v || options.parents) {
|
|
338
|
+
if (op_v) {
|
|
339
|
+
var doc = dt_get(resource.doc, op_v)
|
|
340
|
+
bytes = doc.toBytes()
|
|
341
|
+
} else {
|
|
342
|
+
bytes = resource.doc.toBytes()
|
|
343
|
+
var doc = Doc.fromBytes(bytes)
|
|
344
|
+
}
|
|
345
|
+
if (options.parents) {
|
|
346
|
+
bytes = doc.getPatchSince(
|
|
347
|
+
dt_get_local_version(bytes, options.parents))
|
|
348
|
+
}
|
|
349
|
+
doc.free()
|
|
350
|
+
} else bytes = resource.doc.toBytes()
|
|
351
|
+
return { body: bytes }
|
|
355
352
|
}
|
|
356
353
|
|
|
357
|
-
options.
|
|
358
|
-
|
|
354
|
+
return options.version || options.parents ? {
|
|
355
|
+
version: options.version || options.parents,
|
|
356
|
+
body: dt_get_string(resource.doc, options.version || options.parents)
|
|
357
|
+
} : {
|
|
358
|
+
version,
|
|
359
|
+
body: resource.doc.get()
|
|
360
|
+
}
|
|
359
361
|
} else {
|
|
362
|
+
if (options.merge_type != "dt") {
|
|
363
|
+
let x = { version }
|
|
360
364
|
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
options.subscribe({ encoding: 'dt', body: new Doc().toBytes() })
|
|
365
|
+
if (!options.parents && !options.version) {
|
|
366
|
+
x.parents = []
|
|
367
|
+
x.body = resource.doc.get()
|
|
368
|
+
options.subscribe(x)
|
|
366
369
|
} else {
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
370
|
+
x.parents = options.version ? options.version : options.parents
|
|
371
|
+
options.my_last_seen_version = x.parents
|
|
372
|
+
|
|
373
|
+
// only send them a version from these parents if we have these parents (otherwise we'll assume these parents are more recent, probably versions they created but haven't sent us yet, and we'll send them appropriate rebased updates when they send us these versions)
|
|
374
|
+
let local_version = OpLog_remote_to_local(resource.doc, x.parents)
|
|
375
|
+
if (local_version) {
|
|
376
|
+
x.patches = get_xf_patches(resource.doc, local_version)
|
|
377
|
+
options.subscribe(x)
|
|
373
378
|
}
|
|
374
|
-
options.subscribe({ encoding: 'dt', body: bytes })
|
|
375
379
|
}
|
|
380
|
+
|
|
381
|
+
options.my_last_sent_version = x.version
|
|
382
|
+
resource.simpleton_clients.add(options)
|
|
376
383
|
} else {
|
|
377
|
-
var updates = null
|
|
378
|
-
if (!options.parents && !options.version) {
|
|
379
|
-
options.subscribe({
|
|
380
|
-
version: [],
|
|
381
|
-
parents: [],
|
|
382
|
-
body: "",
|
|
383
|
-
})
|
|
384
384
|
|
|
385
|
-
|
|
385
|
+
if (options.accept_encoding?.match(/updates\s*\((.*)\)/)?.[1].split(',').map(x=>x.trim()).includes('dt')) {
|
|
386
|
+
// optimization: if client wants past current version,
|
|
387
|
+
// send empty dt
|
|
388
|
+
if (options.parents && v_eq(options.parents, version)) {
|
|
389
|
+
options.subscribe({ encoding: 'dt', body: new Doc().toBytes() })
|
|
390
|
+
} else {
|
|
391
|
+
var bytes = resource.doc.toBytes()
|
|
392
|
+
if (options.parents) {
|
|
393
|
+
var doc = Doc.fromBytes(bytes)
|
|
394
|
+
bytes = doc.getPatchSince(
|
|
395
|
+
dt_get_local_version(bytes, options.parents))
|
|
396
|
+
doc.free()
|
|
397
|
+
}
|
|
398
|
+
options.subscribe({ encoding: 'dt', body: bytes })
|
|
399
|
+
}
|
|
386
400
|
} else {
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
401
|
+
var updates = null
|
|
402
|
+
if (!options.parents && !options.version) {
|
|
403
|
+
options.subscribe({
|
|
404
|
+
version: [],
|
|
405
|
+
parents: [],
|
|
406
|
+
body: "",
|
|
407
|
+
})
|
|
408
|
+
|
|
409
|
+
updates = dt_get_patches(resource.doc)
|
|
410
|
+
} else {
|
|
411
|
+
// Then start the subscription from the parents in options
|
|
412
|
+
updates = dt_get_patches(resource.doc, options.parents || options.version)
|
|
413
|
+
}
|
|
390
414
|
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
415
|
+
for (let u of updates)
|
|
416
|
+
options.subscribe({
|
|
417
|
+
version: [u.version],
|
|
418
|
+
parents: u.parents,
|
|
419
|
+
patches: [{ unit: u.unit, range: u.range, content: u.content }],
|
|
420
|
+
})
|
|
421
|
+
|
|
422
|
+
// Output at least *some* data, or else chrome gets confused and
|
|
423
|
+
// thinks the connection failed. This isn't strictly necessary,
|
|
424
|
+
// but it makes fewer scary errors get printed out in the JS
|
|
425
|
+
// console.
|
|
426
|
+
if (updates.length === 0) options.write?.("\r\n")
|
|
427
|
+
}
|
|
397
428
|
|
|
398
|
-
|
|
399
|
-
// thinks the connection failed. This isn't strictly necessary,
|
|
400
|
-
// but it makes fewer scary errors get printed out in the JS
|
|
401
|
-
// console.
|
|
402
|
-
if (updates.length === 0) options.write?.("\r\n")
|
|
429
|
+
resource.clients.add(options)
|
|
403
430
|
}
|
|
404
|
-
|
|
405
|
-
resource.clients.add(options)
|
|
406
431
|
}
|
|
407
432
|
}
|
|
408
|
-
}
|
|
409
433
|
|
|
410
|
-
braid_text.forget = async (key, options) => {
|
|
411
|
-
|
|
434
|
+
braid_text.forget = async (key, options) => {
|
|
435
|
+
if (!options) throw new Error('options is required')
|
|
412
436
|
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
if (options.merge_type != "dt")
|
|
416
|
-
resource.simpleton_clients.delete(options)
|
|
417
|
-
else resource.clients.delete(options)
|
|
418
|
-
}
|
|
437
|
+
if (key instanceof URL) return options.my_abort.abort()
|
|
419
438
|
|
|
420
|
-
braid_text.put = async (key, options) => {
|
|
421
|
-
let { version, patches, body, peer } = options
|
|
422
|
-
|
|
423
|
-
// support for json patch puts..
|
|
424
|
-
if (patches?.length && patches.every(x => x.unit === 'json')) {
|
|
425
439
|
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
426
|
-
|
|
427
|
-
let x = JSON.parse(resource.doc.get())
|
|
428
|
-
for (let p of patches)
|
|
429
|
-
apply_patch(x, p.range, p.content === '' ? undefined : JSON.parse(p.content))
|
|
430
440
|
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
441
|
+
if (options.merge_type != "dt")
|
|
442
|
+
resource.simpleton_clients.delete(options)
|
|
443
|
+
else resource.clients.delete(options)
|
|
434
444
|
}
|
|
435
445
|
|
|
436
|
-
|
|
446
|
+
braid_text.put = async (key, options) => {
|
|
447
|
+
if (key instanceof URL) {
|
|
448
|
+
options.my_abort = new AbortController()
|
|
437
449
|
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
450
|
+
var params = {
|
|
451
|
+
method: 'PUT',
|
|
452
|
+
signal: options.my_abort.signal,
|
|
453
|
+
retry: () => true,
|
|
454
|
+
}
|
|
455
|
+
for (var x of ['headers', 'parents', 'version', 'peer', 'body'])
|
|
456
|
+
if (options[x] != null) params[x] = options[x]
|
|
442
457
|
|
|
443
|
-
|
|
444
|
-
for (var i = start_i; i <= end_i; i++) {
|
|
445
|
-
let v = resource.doc.localToRemoteVersion([i])[0]
|
|
446
|
-
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
|
|
447
|
-
resource.actor_seqs[v[0]].add_range(v[1], v[1])
|
|
458
|
+
return await braid_fetch(key.href, params)
|
|
448
459
|
}
|
|
449
|
-
resource.val = resource.doc.get()
|
|
450
|
-
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
451
460
|
|
|
452
|
-
|
|
453
|
-
return { change_count: end_i - start_i + 1 }
|
|
454
|
-
}
|
|
461
|
+
let { version, patches, body, peer } = options
|
|
455
462
|
|
|
456
|
-
|
|
463
|
+
// support for json patch puts..
|
|
464
|
+
if (patches?.length && patches.every(x => x.unit === 'json')) {
|
|
465
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
466
|
+
|
|
467
|
+
let x = JSON.parse(resource.doc.get())
|
|
468
|
+
for (let p of patches)
|
|
469
|
+
apply_patch(x, p.range, p.content === '' ? undefined : JSON.parse(p.content))
|
|
457
470
|
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
471
|
+
return await braid_text.put(key, {
|
|
472
|
+
body: JSON.stringify(x, null, 4)
|
|
473
|
+
})
|
|
474
|
+
}
|
|
462
475
|
|
|
463
|
-
|
|
464
|
-
if (body != null && patches) throw new Error(`cannot have a body and patches`)
|
|
465
|
-
if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
|
|
466
|
-
if (patches) validate_patches(patches)
|
|
476
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
467
477
|
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
478
|
+
if (options.transfer_encoding === 'dt') {
|
|
479
|
+
var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
480
|
+
|
|
481
|
+
resource.doc.mergeBytes(body)
|
|
482
|
+
|
|
483
|
+
var end_i = resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
484
|
+
for (var i = start_i; i <= end_i; i++) {
|
|
485
|
+
let v = resource.doc.localToRemoteVersion([i])[0]
|
|
486
|
+
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
|
|
487
|
+
resource.actor_seqs[v[0]].add_range(v[1], v[1])
|
|
488
|
+
}
|
|
489
|
+
resource.val = resource.doc.get()
|
|
490
|
+
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
491
|
+
|
|
492
|
+
await resource.db_delta(body)
|
|
493
|
+
return { change_count: end_i - start_i + 1 }
|
|
474
494
|
}
|
|
475
|
-
}
|
|
476
495
|
|
|
477
|
-
|
|
478
|
-
let og_parents = options_parents || parents
|
|
479
|
-
|
|
480
|
-
let max_pos = resource.length_cache.get('' + og_parents) ??
|
|
481
|
-
(v_eq(parents, og_parents) ? resource.doc.len() : dt_len(resource.doc, og_parents))
|
|
482
|
-
|
|
483
|
-
if (body != null) {
|
|
484
|
-
patches = [{
|
|
485
|
-
unit: 'text',
|
|
486
|
-
range: `[0:${max_pos}]`,
|
|
487
|
-
content: body
|
|
488
|
-
}]
|
|
489
|
-
}
|
|
496
|
+
if (version) validate_version_array(version)
|
|
490
497
|
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
if (
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
498
|
+
// translate a single parent of "root" to the empty array (same meaning)
|
|
499
|
+
let options_parents = options.parents
|
|
500
|
+
if (options_parents?.length === 1 && options_parents[0] === 'root')
|
|
501
|
+
options_parents = []
|
|
502
|
+
|
|
503
|
+
if (options_parents) validate_version_array(options_parents)
|
|
504
|
+
if (body != null && patches) throw new Error(`cannot have a body and patches`)
|
|
505
|
+
if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
|
|
506
|
+
if (patches) validate_patches(patches)
|
|
507
|
+
|
|
508
|
+
if (options_parents) {
|
|
509
|
+
// make sure we have all these parents
|
|
510
|
+
for (let p of options_parents) {
|
|
511
|
+
let P = decode_version(p)
|
|
512
|
+
if (!resource.actor_seqs[P[0]]?.has(P[1]))
|
|
513
|
+
throw new Error(`missing parent version: ${p}`)
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
let parents = resource.version
|
|
518
|
+
let og_parents = options_parents || parents
|
|
519
|
+
|
|
520
|
+
let max_pos = resource.length_cache.get('' + og_parents) ??
|
|
521
|
+
(v_eq(parents, og_parents) ? resource.doc.len() : dt_len(resource.doc, og_parents))
|
|
522
|
+
|
|
523
|
+
if (body != null) {
|
|
524
|
+
patches = [{
|
|
525
|
+
unit: 'text',
|
|
526
|
+
range: `[0:${max_pos}]`,
|
|
527
|
+
content: body
|
|
528
|
+
}]
|
|
529
|
+
}
|
|
505
530
|
|
|
506
|
-
|
|
531
|
+
let og_patches = patches
|
|
532
|
+
patches = patches.map((p) => ({
|
|
533
|
+
...p,
|
|
534
|
+
range: p.range.match(/\d+/g).map((x) => parseInt(x)),
|
|
535
|
+
content_codepoints: [...p.content],
|
|
536
|
+
})).sort((a, b) => a.range[0] - b.range[0])
|
|
507
537
|
|
|
508
|
-
|
|
538
|
+
// validate patch positions
|
|
539
|
+
let must_be_at_least = 0
|
|
540
|
+
for (let p of patches) {
|
|
541
|
+
if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
|
|
542
|
+
if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
|
|
543
|
+
must_be_at_least = p.range[1]
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
let change_count = patches.reduce((a, b) => a + b.content_codepoints.length + (b.range[1] - b.range[0]), 0)
|
|
509
547
|
|
|
510
|
-
|
|
548
|
+
let og_v = version?.[0] || `${(is_valid_actor(peer) && peer) || Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
|
|
511
549
|
|
|
512
|
-
|
|
513
|
-
a + (b.content_codepoints?.length ?? 0) - (b.range[1] - b.range[0]),
|
|
514
|
-
max_pos))
|
|
550
|
+
let v = decode_version(og_v)
|
|
515
551
|
|
|
516
|
-
|
|
517
|
-
|
|
552
|
+
resource.length_cache.put(`${v[0]}-${v[1]}`, patches.reduce((a, b) =>
|
|
553
|
+
a + (b.content_codepoints?.length ?? 0) - (b.range[1] - b.range[0]),
|
|
554
|
+
max_pos))
|
|
518
555
|
|
|
519
|
-
|
|
556
|
+
// validate version: make sure we haven't seen it already
|
|
557
|
+
if (resource.actor_seqs[v[0]]?.has(v[1])) {
|
|
520
558
|
|
|
521
|
-
|
|
522
|
-
let updates = dt_get_patches(resource.doc, og_parents)
|
|
559
|
+
if (!options.validate_already_seen_versions) return { change_count }
|
|
523
560
|
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
u.version = decode_version(u.version)
|
|
561
|
+
// if we have seen it already, make sure it's the same as before
|
|
562
|
+
let updates = dt_get_patches(resource.doc, og_parents)
|
|
527
563
|
|
|
528
|
-
|
|
564
|
+
let seen = {}
|
|
565
|
+
for (let u of updates) {
|
|
566
|
+
u.version = decode_version(u.version)
|
|
567
|
+
|
|
568
|
+
if (!u.content) {
|
|
569
|
+
// delete
|
|
570
|
+
let v = u.version
|
|
571
|
+
for (let i = 0; i < u.end - u.start; i++) {
|
|
572
|
+
let ps = (i < u.end - u.start - 1) ? [`${v[0]}-${v[1] - i - 1}`] : u.parents
|
|
573
|
+
seen[JSON.stringify([v[0], v[1] - i, ps, u.start + i])] = true
|
|
574
|
+
}
|
|
575
|
+
} else {
|
|
576
|
+
// insert
|
|
577
|
+
let v = u.version
|
|
578
|
+
let content = [...u.content]
|
|
579
|
+
for (let i = 0; i < content.length; i++) {
|
|
580
|
+
let ps = (i > 0) ? [`${v[0]}-${v[1] - content.length + i}`] : u.parents
|
|
581
|
+
seen[JSON.stringify([v[0], v[1] + 1 - content.length + i, ps, u.start + i, content[i]])] = true
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
v = `${v[0]}-${v[1] + 1 - change_count}`
|
|
587
|
+
let ps = og_parents
|
|
588
|
+
let offset = 0
|
|
589
|
+
for (let p of patches) {
|
|
529
590
|
// delete
|
|
530
|
-
let
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
seen[JSON.stringify([
|
|
591
|
+
for (let i = p.range[0]; i < p.range[1]; i++) {
|
|
592
|
+
let vv = decode_version(v)
|
|
593
|
+
|
|
594
|
+
if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] - 1 + offset])]) throw new Error('invalid update: different from previous update with same version')
|
|
595
|
+
|
|
596
|
+
offset--
|
|
597
|
+
ps = [v]
|
|
598
|
+
v = vv
|
|
599
|
+
v = `${v[0]}-${v[1] + 1}`
|
|
534
600
|
}
|
|
535
|
-
} else {
|
|
536
601
|
// insert
|
|
537
|
-
let
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
seen[JSON.stringify([
|
|
602
|
+
for (let i = 0; i < p.content_codepoints?.length ?? 0; i++) {
|
|
603
|
+
let vv = decode_version(v)
|
|
604
|
+
let c = p.content_codepoints[i]
|
|
605
|
+
|
|
606
|
+
if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] + offset, c])]) throw new Error('invalid update: different from previous update with same version')
|
|
607
|
+
|
|
608
|
+
offset++
|
|
609
|
+
ps = [v]
|
|
610
|
+
v = vv
|
|
611
|
+
v = `${v[0]}-${v[1] + 1}`
|
|
542
612
|
}
|
|
543
613
|
}
|
|
614
|
+
|
|
615
|
+
// we already have this version, so nothing left to do
|
|
616
|
+
return { change_count: change_count }
|
|
544
617
|
}
|
|
618
|
+
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
|
|
619
|
+
resource.actor_seqs[v[0]].add_range(v[1] + 1 - change_count, v[1])
|
|
545
620
|
|
|
621
|
+
// reduce the version sequence by the number of char-edits
|
|
546
622
|
v = `${v[0]}-${v[1] + 1 - change_count}`
|
|
623
|
+
|
|
547
624
|
let ps = og_parents
|
|
625
|
+
|
|
626
|
+
let v_before = resource.doc.getLocalVersion()
|
|
627
|
+
|
|
628
|
+
let bytes = []
|
|
629
|
+
|
|
548
630
|
let offset = 0
|
|
549
631
|
for (let p of patches) {
|
|
550
632
|
// delete
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
v = vv
|
|
559
|
-
v = `${v[0]}-${v[1] + 1}`
|
|
633
|
+
let del = p.range[1] - p.range[0]
|
|
634
|
+
if (del) {
|
|
635
|
+
bytes.push(dt_create_bytes(v, ps, p.range[0] + offset, del, null))
|
|
636
|
+
offset -= del
|
|
637
|
+
v = decode_version(v)
|
|
638
|
+
ps = [`${v[0]}-${v[1] + (del - 1)}`]
|
|
639
|
+
v = `${v[0]}-${v[1] + del}`
|
|
560
640
|
}
|
|
561
641
|
// insert
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
offset++
|
|
569
|
-
ps = [v]
|
|
570
|
-
v = vv
|
|
571
|
-
v = `${v[0]}-${v[1] + 1}`
|
|
642
|
+
if (p.content?.length) {
|
|
643
|
+
bytes.push(dt_create_bytes(v, ps, p.range[1] + offset, 0, p.content))
|
|
644
|
+
offset += p.content_codepoints.length
|
|
645
|
+
v = decode_version(v)
|
|
646
|
+
ps = [`${v[0]}-${v[1] + (p.content_codepoints.length - 1)}`]
|
|
647
|
+
v = `${v[0]}-${v[1] + p.content_codepoints.length}`
|
|
572
648
|
}
|
|
573
649
|
}
|
|
574
650
|
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
|
|
579
|
-
resource.actor_seqs[v[0]].add_range(v[1] + 1 - change_count, v[1])
|
|
580
|
-
|
|
581
|
-
// reduce the version sequence by the number of char-edits
|
|
582
|
-
v = `${v[0]}-${v[1] + 1 - change_count}`
|
|
583
|
-
|
|
584
|
-
let ps = og_parents
|
|
585
|
-
|
|
586
|
-
let v_before = resource.doc.getLocalVersion()
|
|
587
|
-
|
|
588
|
-
let bytes = []
|
|
589
|
-
|
|
590
|
-
let offset = 0
|
|
591
|
-
for (let p of patches) {
|
|
592
|
-
// delete
|
|
593
|
-
let del = p.range[1] - p.range[0]
|
|
594
|
-
if (del) {
|
|
595
|
-
bytes.push(dt_create_bytes(v, ps, p.range[0] + offset, del, null))
|
|
596
|
-
offset -= del
|
|
597
|
-
v = decode_version(v)
|
|
598
|
-
ps = [`${v[0]}-${v[1] + (del - 1)}`]
|
|
599
|
-
v = `${v[0]}-${v[1] + del}`
|
|
600
|
-
}
|
|
601
|
-
// insert
|
|
602
|
-
if (p.content?.length) {
|
|
603
|
-
bytes.push(dt_create_bytes(v, ps, p.range[1] + offset, 0, p.content))
|
|
604
|
-
offset += p.content_codepoints.length
|
|
605
|
-
v = decode_version(v)
|
|
606
|
-
ps = [`${v[0]}-${v[1] + (p.content_codepoints.length - 1)}`]
|
|
607
|
-
v = `${v[0]}-${v[1] + p.content_codepoints.length}`
|
|
608
|
-
}
|
|
609
|
-
}
|
|
651
|
+
for (let b of bytes) resource.doc.mergeBytes(b)
|
|
652
|
+
resource.val = resource.doc.get()
|
|
653
|
+
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
610
654
|
|
|
611
|
-
|
|
612
|
-
resource.val = resource.doc.get()
|
|
613
|
-
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
655
|
+
var post_commit_updates = []
|
|
614
656
|
|
|
615
|
-
|
|
657
|
+
if (options.merge_type != "dt") {
|
|
658
|
+
patches = get_xf_patches(resource.doc, v_before)
|
|
659
|
+
if (braid_text.verbose) console.log(JSON.stringify({ patches }))
|
|
616
660
|
|
|
617
|
-
|
|
618
|
-
patches = get_xf_patches(resource.doc, v_before)
|
|
619
|
-
if (braid_text.verbose) console.log(JSON.stringify({ patches }))
|
|
661
|
+
let version = resource.version
|
|
620
662
|
|
|
621
|
-
|
|
663
|
+
for (let client of resource.simpleton_clients) {
|
|
664
|
+
if (peer && client.peer === peer) {
|
|
665
|
+
client.my_last_seen_version = [og_v]
|
|
666
|
+
}
|
|
622
667
|
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
668
|
+
function set_timeout(time_override) {
|
|
669
|
+
if (client.my_timeout) clearTimeout(client.my_timeout)
|
|
670
|
+
client.my_timeout = setTimeout(() => {
|
|
671
|
+
// if the doc has been freed, exit early
|
|
672
|
+
if (resource.doc.__wbg_ptr === 0) return
|
|
627
673
|
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
// if the doc has been freed, exit early
|
|
632
|
-
if (resource.doc.__wbg_ptr === 0) return
|
|
674
|
+
let version = resource.version
|
|
675
|
+
let x = { version }
|
|
676
|
+
x.parents = client.my_last_seen_version
|
|
633
677
|
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
678
|
+
if (braid_text.verbose) console.log("rebasing after timeout.. ")
|
|
679
|
+
if (braid_text.verbose) console.log(" client.my_unused_version_count = " + client.my_unused_version_count)
|
|
680
|
+
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))
|
|
637
681
|
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
682
|
+
if (braid_text.verbose) console.log(`sending from rebase: ${JSON.stringify(x)}`)
|
|
683
|
+
client.subscribe(x)
|
|
684
|
+
client.my_last_sent_version = x.version
|
|
641
685
|
|
|
642
|
-
|
|
643
|
-
client.
|
|
644
|
-
|
|
686
|
+
delete client.my_timeout
|
|
687
|
+
}, time_override ?? Math.min(3000, 23 * Math.pow(1.5, client.my_unused_version_count - 1)))
|
|
688
|
+
}
|
|
645
689
|
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
690
|
+
if (client.my_timeout) {
|
|
691
|
+
if (peer && client.peer === peer) {
|
|
692
|
+
if (!v_eq(client.my_last_sent_version, og_parents)) {
|
|
693
|
+
// note: we don't add to client.my_unused_version_count,
|
|
694
|
+
// because we're already in a timeout;
|
|
695
|
+
// we'll just extend it here..
|
|
696
|
+
set_timeout()
|
|
697
|
+
} else {
|
|
698
|
+
// hm.. it appears we got a correctly parented version,
|
|
699
|
+
// which suggests that maybe we can stop the timeout early
|
|
700
|
+
set_timeout(0)
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
continue
|
|
704
|
+
}
|
|
649
705
|
|
|
650
|
-
|
|
706
|
+
let x = { version }
|
|
651
707
|
if (peer && client.peer === peer) {
|
|
652
708
|
if (!v_eq(client.my_last_sent_version, og_parents)) {
|
|
653
|
-
|
|
654
|
-
// because we're already in a timeout;
|
|
655
|
-
// we'll just extend it here..
|
|
709
|
+
client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1
|
|
656
710
|
set_timeout()
|
|
711
|
+
continue
|
|
657
712
|
} else {
|
|
658
|
-
|
|
659
|
-
// which suggests that maybe we can stop the timeout early
|
|
660
|
-
set_timeout(0)
|
|
713
|
+
delete client.my_unused_version_count
|
|
661
714
|
}
|
|
662
|
-
}
|
|
663
|
-
continue
|
|
664
|
-
}
|
|
665
715
|
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
716
|
+
x.parents = options.version
|
|
717
|
+
if (!v_eq(version, options.version)) {
|
|
718
|
+
if (braid_text.verbose) console.log("rebasing..")
|
|
719
|
+
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, [og_v]))
|
|
720
|
+
} else {
|
|
721
|
+
// this client already has this version,
|
|
722
|
+
// so let's pretend to send it back, but not
|
|
723
|
+
if (braid_text.verbose) console.log(`not reflecting back to simpleton`)
|
|
724
|
+
client.my_last_sent_version = x.version
|
|
725
|
+
continue
|
|
726
|
+
}
|
|
672
727
|
} else {
|
|
673
|
-
|
|
728
|
+
x.parents = parents
|
|
729
|
+
x.patches = patches
|
|
674
730
|
}
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
731
|
+
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
732
|
+
post_commit_updates.push([client, x])
|
|
733
|
+
client.my_last_sent_version = x.version
|
|
734
|
+
}
|
|
735
|
+
} else {
|
|
736
|
+
if (resource.simpleton_clients.size) {
|
|
737
|
+
let version = resource.version
|
|
738
|
+
patches = get_xf_patches(resource.doc, v_before)
|
|
739
|
+
let x = { version, parents, patches }
|
|
740
|
+
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
741
|
+
for (let client of resource.simpleton_clients) {
|
|
742
|
+
if (client.my_timeout) continue
|
|
743
|
+
post_commit_updates.push([client, x])
|
|
684
744
|
client.my_last_sent_version = x.version
|
|
685
|
-
continue
|
|
686
745
|
}
|
|
687
|
-
} else {
|
|
688
|
-
x.parents = parents
|
|
689
|
-
x.patches = patches
|
|
690
746
|
}
|
|
691
|
-
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
692
|
-
post_commit_updates.push([client, x])
|
|
693
|
-
client.my_last_sent_version = x.version
|
|
694
747
|
}
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
748
|
+
|
|
749
|
+
var x = {
|
|
750
|
+
version: [og_v],
|
|
751
|
+
parents: og_parents,
|
|
752
|
+
patches: og_patches,
|
|
753
|
+
}
|
|
754
|
+
for (let client of resource.clients) {
|
|
755
|
+
if (!peer || client.peer !== peer)
|
|
703
756
|
post_commit_updates.push([client, x])
|
|
704
|
-
client.my_last_sent_version = x.version
|
|
705
|
-
}
|
|
706
757
|
}
|
|
707
|
-
}
|
|
708
758
|
|
|
709
|
-
|
|
710
|
-
version: [og_v],
|
|
711
|
-
parents: og_parents,
|
|
712
|
-
patches: og_patches,
|
|
713
|
-
}
|
|
714
|
-
for (let client of resource.clients) {
|
|
715
|
-
if (!peer || client.peer !== peer)
|
|
716
|
-
post_commit_updates.push([client, x])
|
|
717
|
-
}
|
|
718
|
-
|
|
719
|
-
await resource.db_delta(resource.doc.getPatchSince(v_before))
|
|
720
|
-
|
|
721
|
-
for (var [client, x] of post_commit_updates) client.subscribe(x)
|
|
722
|
-
|
|
723
|
-
return { change_count }
|
|
724
|
-
}
|
|
759
|
+
await resource.db_delta(resource.doc.getPatchSince(v_before))
|
|
725
760
|
|
|
726
|
-
|
|
727
|
-
try {
|
|
728
|
-
if (braid_text.db_folder) {
|
|
729
|
-
await db_folder_init()
|
|
730
|
-
var pages = new Set()
|
|
731
|
-
for (let x of await require('fs').promises.readdir(braid_text.db_folder)) pages.add(decode_filename(x.replace(/\.\w+$/, '')))
|
|
732
|
-
return [...pages.keys()]
|
|
733
|
-
} else return Object.keys(braid_text.cache)
|
|
734
|
-
} catch (e) { return [] }
|
|
735
|
-
}
|
|
761
|
+
for (var [client, x] of post_commit_updates) client.subscribe(x)
|
|
736
762
|
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
res.setHeader("Access-Control-Allow-Methods", "*")
|
|
740
|
-
res.setHeader("Access-Control-Allow-Headers", "*")
|
|
741
|
-
res.setHeader("Access-Control-Expose-Headers", "*")
|
|
742
|
-
}
|
|
763
|
+
return { change_count }
|
|
764
|
+
}
|
|
743
765
|
|
|
744
|
-
async
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
766
|
+
braid_text.list = async () => {
|
|
767
|
+
try {
|
|
768
|
+
if (braid_text.db_folder) {
|
|
769
|
+
await db_folder_init()
|
|
770
|
+
var pages = new Set()
|
|
771
|
+
for (let x of await require('fs').promises.readdir(braid_text.db_folder)) pages.add(decode_filename(x.replace(/\.\w+$/, '')))
|
|
772
|
+
return [...pages.keys()]
|
|
773
|
+
} else return Object.keys(braid_text.cache)
|
|
774
|
+
} catch (e) { return [] }
|
|
775
|
+
}
|
|
776
|
+
|
|
777
|
+
braid_text.free_cors = res => {
|
|
778
|
+
res.setHeader("Access-Control-Allow-Origin", "*")
|
|
779
|
+
res.setHeader("Access-Control-Allow-Methods", "*")
|
|
780
|
+
res.setHeader("Access-Control-Allow-Headers", "*")
|
|
781
|
+
res.setHeader("Access-Control-Expose-Headers", "*")
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
async function get_resource(key) {
|
|
785
|
+
let cache = braid_text.cache
|
|
786
|
+
if (!cache[key]) cache[key] = new Promise(async done => {
|
|
787
|
+
let resource = {key}
|
|
788
|
+
resource.clients = new Set()
|
|
789
|
+
resource.simpleton_clients = new Set()
|
|
790
|
+
|
|
791
|
+
resource.doc = new Doc("server")
|
|
792
|
+
resource.meta = {}
|
|
793
|
+
|
|
794
|
+
let { change, change_meta } = braid_text.db_folder
|
|
795
|
+
? await file_sync(key,
|
|
796
|
+
(bytes) => resource.doc.mergeBytes(bytes),
|
|
797
|
+
() => resource.doc.toBytes(),
|
|
798
|
+
(meta) => resource.meta = meta,
|
|
799
|
+
() => resource.meta)
|
|
800
|
+
: { change: () => { }, change_meta: () => {} }
|
|
801
|
+
|
|
802
|
+
resource.db_delta = change
|
|
803
|
+
resource.update_meta = (meta) => {
|
|
804
|
+
Object.assign(resource.meta, meta)
|
|
805
|
+
change_meta()
|
|
806
|
+
}
|
|
750
807
|
|
|
751
|
-
|
|
808
|
+
resource.actor_seqs = {}
|
|
752
809
|
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
: { change: () => { } }
|
|
810
|
+
dt_get_actor_seq_runs([...resource.doc.toBytes()], (actor, base, len) => {
|
|
811
|
+
if (!resource.actor_seqs[actor]) resource.actor_seqs[actor] = new RangeSet()
|
|
812
|
+
resource.actor_seqs[actor].add_range(base, base + len - 1)
|
|
813
|
+
})
|
|
758
814
|
|
|
759
|
-
|
|
815
|
+
resource.val = resource.doc.get()
|
|
816
|
+
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
760
817
|
|
|
761
|
-
|
|
818
|
+
resource.length_cache = createSimpleCache(braid_text.length_cache_size)
|
|
762
819
|
|
|
763
|
-
|
|
764
|
-
if (!resource.actor_seqs[actor]) resource.actor_seqs[actor] = new RangeSet()
|
|
765
|
-
resource.actor_seqs[actor].add_range(base, base + len - 1)
|
|
820
|
+
done(resource)
|
|
766
821
|
})
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
let encoded = convert_us[x.slice(0, -'.name'.length)] = encode_filename(await fs.promises.readFile(`${braid_text.db_folder}/${x}`, { encoding: 'utf8' }))
|
|
802
|
-
if (encoded.length > max_encoded_key_size) {
|
|
803
|
-
console.log(`trying to convert file to new format, but the key is too big: ${braid_text.db_folder}/${x}`)
|
|
804
|
-
process.exit()
|
|
822
|
+
return await cache[key]
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
async function db_folder_init() {
|
|
826
|
+
if (braid_text.verbose) console.log('__!')
|
|
827
|
+
if (!db_folder_init.p) db_folder_init.p = new Promise(async done => {
|
|
828
|
+
await fs.promises.mkdir(braid_text.db_folder, { recursive: true });
|
|
829
|
+
await fs.promises.mkdir(`${braid_text.db_folder}/.meta`, { recursive: true })
|
|
830
|
+
|
|
831
|
+
// 0.0.13 -> 0.0.14
|
|
832
|
+
// look for files with key-encodings over max_encoded_key_size,
|
|
833
|
+
// and convert them using the new method
|
|
834
|
+
// for (let x of await fs.promises.readdir(braid_text.db_folder)) {
|
|
835
|
+
// let k = x.replace(/(_[0-9a-f]{64})?\.\w+$/, '')
|
|
836
|
+
// if (k.length > max_encoded_key_size) {
|
|
837
|
+
// k = decode_filename(k)
|
|
838
|
+
|
|
839
|
+
// await fs.promises.rename(`${braid_text.db_folder}/${x}`, `${braid_text.db_folder}/${encode_filename(k)}${x.match(/\.\w+$/)[0]}`)
|
|
840
|
+
// await fs.promises.writeFile(`${braid_text.db_folder}/${encode_filename(k)}.name`, k)
|
|
841
|
+
// }
|
|
842
|
+
// }
|
|
843
|
+
|
|
844
|
+
// 0.0.14 -> 0.0.15
|
|
845
|
+
// basically convert the 0.0.14 files back
|
|
846
|
+
let convert_us = {}
|
|
847
|
+
for (let x of await fs.promises.readdir(braid_text.db_folder)) {
|
|
848
|
+
if (x.endsWith('.name')) {
|
|
849
|
+
let encoded = convert_us[x.slice(0, -'.name'.length)] = encode_filename(await fs.promises.readFile(`${braid_text.db_folder}/${x}`, { encoding: 'utf8' }))
|
|
850
|
+
if (encoded.length > max_encoded_key_size) {
|
|
851
|
+
console.log(`trying to convert file to new format, but the key is too big: ${braid_text.db_folder}/${x}`)
|
|
852
|
+
process.exit()
|
|
853
|
+
}
|
|
854
|
+
if (braid_text.verbose) console.log(`deleting: ${braid_text.db_folder}/${x}`)
|
|
855
|
+
await fs.promises.unlink(`${braid_text.db_folder}/${x}`)
|
|
805
856
|
}
|
|
806
|
-
if (braid_text.verbose) console.log(`deleting: ${braid_text.db_folder}/${x}`)
|
|
807
|
-
await fs.promises.unlink(`${braid_text.db_folder}/${x}`)
|
|
808
857
|
}
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
858
|
+
if (Object.keys(convert_us).length) {
|
|
859
|
+
for (let x of await fs.promises.readdir(braid_text.db_folder)) {
|
|
860
|
+
let [_, k, num] = x.match(/^(.*)\.(\d+)$/s)
|
|
861
|
+
if (!convert_us[k]) continue
|
|
862
|
+
if (braid_text.verbose) console.log(`renaming: ${braid_text.db_folder}/${x} -> ${braid_text.db_folder}/${convert_us[k]}.${num}`)
|
|
863
|
+
if (convert_us[k]) await fs.promises.rename(`${braid_text.db_folder}/${x}`, `${braid_text.db_folder}/${convert_us[k]}.${num}`)
|
|
864
|
+
}
|
|
816
865
|
}
|
|
817
|
-
}
|
|
818
866
|
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
}
|
|
867
|
+
done()
|
|
868
|
+
})
|
|
869
|
+
await db_folder_init.p
|
|
870
|
+
}
|
|
823
871
|
|
|
824
|
-
async function get_files_for_key(key) {
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
}
|
|
872
|
+
async function get_files_for_key(key) {
|
|
873
|
+
await db_folder_init()
|
|
874
|
+
try {
|
|
875
|
+
let re = new RegExp("^" + encode_filename(key).replace(/[^a-zA-Z0-9]/g, "\\$&") + "\\.\\w+$")
|
|
876
|
+
return (await fs.promises.readdir(braid_text.db_folder))
|
|
877
|
+
.filter((a) => re.test(a))
|
|
878
|
+
.map((a) => `${braid_text.db_folder}/${a}`)
|
|
879
|
+
} catch (e) { return [] }
|
|
880
|
+
}
|
|
833
881
|
|
|
834
|
-
async function file_sync(key, process_delta, get_init) {
|
|
835
|
-
|
|
882
|
+
async function file_sync(key, process_delta, get_init, set_meta, get_meta) {
|
|
883
|
+
let encoded = encode_filename(key)
|
|
836
884
|
|
|
837
|
-
|
|
885
|
+
if (encoded.length > max_encoded_key_size) throw new Error(`invalid key: too long (max ${max_encoded_key_size})`)
|
|
838
886
|
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
887
|
+
let currentNumber = 0
|
|
888
|
+
let currentSize = 0
|
|
889
|
+
let threshold = 0
|
|
842
890
|
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
891
|
+
// Read existing files and sort by numbers.
|
|
892
|
+
const files = (await get_files_for_key(key))
|
|
893
|
+
.filter(x => x.match(/\.\d+$/))
|
|
894
|
+
.sort((a, b) => parseInt(a.match(/\d+$/)[0]) - parseInt(b.match(/\d+$/)[0]))
|
|
847
895
|
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
896
|
+
// Try to process files starting from the highest number.
|
|
897
|
+
let done = false
|
|
898
|
+
for (let i = files.length - 1; i >= 0; i--) {
|
|
899
|
+
if (done) {
|
|
900
|
+
await fs.promises.unlink(files[i])
|
|
901
|
+
continue
|
|
902
|
+
}
|
|
903
|
+
try {
|
|
904
|
+
const filename = files[i]
|
|
905
|
+
if (braid_text.verbose) console.log(`trying to process file: ${filename}`)
|
|
906
|
+
const data = await fs.promises.readFile(filename)
|
|
907
|
+
|
|
908
|
+
let cursor = 0
|
|
909
|
+
let isFirstChunk = true
|
|
910
|
+
while (cursor < data.length) {
|
|
911
|
+
const chunkSize = data.readUInt32LE(cursor)
|
|
912
|
+
cursor += 4
|
|
913
|
+
const chunk = data.slice(cursor, cursor + chunkSize)
|
|
914
|
+
cursor += chunkSize
|
|
915
|
+
|
|
916
|
+
if (isFirstChunk) {
|
|
917
|
+
isFirstChunk = false
|
|
918
|
+
threshold = chunkSize * 10
|
|
919
|
+
}
|
|
920
|
+
process_delta(chunk)
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
currentSize = data.length
|
|
924
|
+
currentNumber = parseInt(filename.match(/\d+$/)[0])
|
|
925
|
+
done = true
|
|
926
|
+
} catch (error) {
|
|
927
|
+
console.error(`Error processing file: ${files[i]}`)
|
|
928
|
+
await fs.promises.unlink(files[i])
|
|
929
|
+
}
|
|
854
930
|
}
|
|
931
|
+
|
|
932
|
+
var meta_filename = `${braid_text.db_folder}/.meta/${encoded}`
|
|
933
|
+
var meta_dirty = null
|
|
934
|
+
var meta_saving = null
|
|
935
|
+
var meta_file_content = '{}'
|
|
855
936
|
try {
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
937
|
+
var meta_file_content = await fs.promises.readFile(meta_filename)
|
|
938
|
+
} catch (e) {}
|
|
939
|
+
set_meta(JSON.parse(meta_file_content))
|
|
940
|
+
|
|
941
|
+
let chain = Promise.resolve()
|
|
942
|
+
return {
|
|
943
|
+
change: async (bytes) => {
|
|
944
|
+
await (chain = chain.then(async () => {
|
|
945
|
+
if (!bytes) currentSize = threshold
|
|
946
|
+
else currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32
|
|
947
|
+
const filename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
|
|
948
|
+
if (currentSize < threshold) {
|
|
949
|
+
if (braid_text.verbose) console.log(`appending to db..`)
|
|
950
|
+
|
|
951
|
+
let buffer = Buffer.allocUnsafe(4)
|
|
952
|
+
buffer.writeUInt32LE(bytes.length, 0)
|
|
953
|
+
await fs.promises.appendFile(filename, buffer)
|
|
954
|
+
await fs.promises.appendFile(filename, bytes)
|
|
955
|
+
|
|
956
|
+
if (braid_text.verbose) console.log("wrote to : " + filename)
|
|
957
|
+
} else {
|
|
958
|
+
try {
|
|
959
|
+
if (braid_text.verbose) console.log(`starting new db..`)
|
|
960
|
+
|
|
961
|
+
currentNumber++
|
|
962
|
+
const init = get_init()
|
|
963
|
+
const buffer = Buffer.allocUnsafe(4)
|
|
964
|
+
buffer.writeUInt32LE(init.length, 0)
|
|
965
|
+
|
|
966
|
+
const newFilename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
|
|
967
|
+
await fs.promises.writeFile(newFilename, buffer)
|
|
968
|
+
await fs.promises.appendFile(newFilename, init)
|
|
969
|
+
|
|
970
|
+
if (braid_text.verbose) console.log("wrote to : " + newFilename)
|
|
971
|
+
|
|
972
|
+
currentSize = 4 + init.length
|
|
973
|
+
threshold = currentSize * 10
|
|
974
|
+
try {
|
|
975
|
+
await fs.promises.unlink(filename)
|
|
976
|
+
} catch (e) { }
|
|
977
|
+
} catch (e) {
|
|
978
|
+
if (braid_text.verbose) console.log(`e = ${e.stack}`)
|
|
979
|
+
}
|
|
980
|
+
}
|
|
981
|
+
}))
|
|
982
|
+
},
|
|
983
|
+
change_meta: async () => {
|
|
984
|
+
meta_dirty = true
|
|
985
|
+
if (meta_saving) return
|
|
986
|
+
meta_saving = true
|
|
987
|
+
|
|
988
|
+
while (meta_dirty) {
|
|
989
|
+
meta_dirty = false
|
|
990
|
+
await fs.promises.writeFile(meta_filename,
|
|
991
|
+
JSON.stringify(get_meta()))
|
|
992
|
+
await new Promise(done => setTimeout(done,
|
|
993
|
+
braid_text.meta_file_save_period_ms))
|
|
871
994
|
}
|
|
872
|
-
process_delta(chunk)
|
|
873
|
-
}
|
|
874
995
|
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
done = true
|
|
878
|
-
} catch (error) {
|
|
879
|
-
console.error(`Error processing file: ${files[i]}`)
|
|
880
|
-
await fs.promises.unlink(files[i])
|
|
996
|
+
meta_saving = false
|
|
997
|
+
}
|
|
881
998
|
}
|
|
882
999
|
}
|
|
883
1000
|
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
if (currentSize < threshold) {
|
|
892
|
-
if (braid_text.verbose) console.log(`appending to db..`)
|
|
893
|
-
|
|
894
|
-
let buffer = Buffer.allocUnsafe(4)
|
|
895
|
-
buffer.writeUInt32LE(bytes.length, 0)
|
|
896
|
-
await fs.promises.appendFile(filename, buffer)
|
|
897
|
-
await fs.promises.appendFile(filename, bytes)
|
|
898
|
-
|
|
899
|
-
if (braid_text.verbose) console.log("wrote to : " + filename)
|
|
900
|
-
} else {
|
|
901
|
-
try {
|
|
902
|
-
if (braid_text.verbose) console.log(`starting new db..`)
|
|
1001
|
+
async function wait_for_events(
|
|
1002
|
+
key,
|
|
1003
|
+
events,
|
|
1004
|
+
actor_seqs,
|
|
1005
|
+
my_space,
|
|
1006
|
+
max_time = 3000,
|
|
1007
|
+
max_space = 5 * 1024 * 1024) {
|
|
903
1008
|
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
buffer.writeUInt32LE(init.length, 0)
|
|
1009
|
+
if (!wait_for_events.namespaces) wait_for_events.namespaces = {}
|
|
1010
|
+
if (!wait_for_events.namespaces[key]) wait_for_events.namespaces[key] = {}
|
|
1011
|
+
var ns = wait_for_events.namespaces[key]
|
|
908
1012
|
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
1013
|
+
if (!wait_for_events.space_used) wait_for_events.space_used = 0
|
|
1014
|
+
if (wait_for_events.space_used + my_space > max_space) return
|
|
1015
|
+
wait_for_events.space_used += my_space
|
|
912
1016
|
|
|
913
|
-
|
|
1017
|
+
var p_done = null
|
|
1018
|
+
var p = new Promise(done => p_done = done)
|
|
914
1019
|
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
} catch (e) { }
|
|
920
|
-
} catch (e) {
|
|
921
|
-
if (braid_text.verbose) console.log(`e = ${e.stack}`)
|
|
922
|
-
}
|
|
923
|
-
}
|
|
924
|
-
}))
|
|
1020
|
+
var missing = 0
|
|
1021
|
+
var on_find = () => {
|
|
1022
|
+
missing--
|
|
1023
|
+
if (!missing) p_done()
|
|
925
1024
|
}
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
events,
|
|
932
|
-
actor_seqs,
|
|
933
|
-
my_space,
|
|
934
|
-
max_time = 3000,
|
|
935
|
-
max_space = 5 * 1024 * 1024) {
|
|
936
|
-
|
|
937
|
-
if (!wait_for_events.namespaces) wait_for_events.namespaces = {}
|
|
938
|
-
if (!wait_for_events.namespaces[key]) wait_for_events.namespaces[key] = {}
|
|
939
|
-
var ns = wait_for_events.namespaces[key]
|
|
940
|
-
|
|
941
|
-
if (!wait_for_events.space_used) wait_for_events.space_used = 0
|
|
942
|
-
if (wait_for_events.space_used + my_space > max_space) return
|
|
943
|
-
wait_for_events.space_used += my_space
|
|
944
|
-
|
|
945
|
-
var p_done = null
|
|
946
|
-
var p = new Promise(done => p_done = done)
|
|
947
|
-
|
|
948
|
-
var missing = 0
|
|
949
|
-
var on_find = () => {
|
|
950
|
-
missing--
|
|
951
|
-
if (!missing) p_done()
|
|
952
|
-
}
|
|
953
|
-
|
|
954
|
-
for (let event of events) {
|
|
955
|
-
var [actor, seq] = decode_version(event)
|
|
956
|
-
if (actor_seqs?.[actor]?.has(seq)) continue
|
|
957
|
-
missing++
|
|
1025
|
+
|
|
1026
|
+
for (let event of events) {
|
|
1027
|
+
var [actor, seq] = decode_version(event)
|
|
1028
|
+
if (actor_seqs?.[actor]?.has(seq)) continue
|
|
1029
|
+
missing++
|
|
958
1030
|
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
1031
|
+
if (!ns.actor_seqs) ns.actor_seqs = {}
|
|
1032
|
+
if (!ns.actor_seqs[actor]) ns.actor_seqs[actor] = []
|
|
1033
|
+
sorted_set_insert(ns.actor_seqs[actor], seq)
|
|
962
1034
|
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
1035
|
+
if (!ns.events) ns.events = {}
|
|
1036
|
+
if (!ns.events[event]) ns.events[event] = new Set()
|
|
1037
|
+
ns.events[event].add(on_find)
|
|
1038
|
+
}
|
|
967
1039
|
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
1040
|
+
if (missing) {
|
|
1041
|
+
var t = setTimeout(() => {
|
|
1042
|
+
for (let event of events) {
|
|
1043
|
+
var [actor, seq] = decode_version(event)
|
|
1044
|
+
|
|
1045
|
+
var cbs = ns.events[event]
|
|
1046
|
+
if (!cbs) continue
|
|
975
1047
|
|
|
976
|
-
|
|
977
|
-
|
|
1048
|
+
cbs.delete(on_find)
|
|
1049
|
+
if (cbs.size) continue
|
|
978
1050
|
|
|
979
|
-
|
|
1051
|
+
delete ns.events[event]
|
|
980
1052
|
|
|
981
|
-
|
|
982
|
-
|
|
1053
|
+
var seqs = ns.actor_seqs[actor]
|
|
1054
|
+
if (!seqs) continue
|
|
983
1055
|
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
1056
|
+
sorted_set_delete(seqs, seq)
|
|
1057
|
+
if (seqs.length) continue
|
|
1058
|
+
|
|
1059
|
+
delete ns.actor_seqs[actor]
|
|
1060
|
+
}
|
|
1061
|
+
p_done()
|
|
1062
|
+
}, max_time)
|
|
991
1063
|
|
|
992
|
-
|
|
1064
|
+
await p
|
|
993
1065
|
|
|
994
|
-
|
|
1066
|
+
clearTimeout(t)
|
|
1067
|
+
}
|
|
1068
|
+
wait_for_events.space_used -= my_space
|
|
995
1069
|
}
|
|
996
|
-
wait_for_events.space_used -= my_space
|
|
997
|
-
}
|
|
998
|
-
|
|
999
|
-
async function got_event(key, event, change_count) {
|
|
1000
|
-
var ns = wait_for_events.namespaces?.[key]
|
|
1001
|
-
if (!ns) return
|
|
1002
1070
|
|
|
1003
|
-
|
|
1004
|
-
|
|
1071
|
+
async function got_event(key, event, change_count) {
|
|
1072
|
+
var ns = wait_for_events.namespaces?.[key]
|
|
1073
|
+
if (!ns) return
|
|
1005
1074
|
|
|
1006
|
-
|
|
1007
|
-
|
|
1075
|
+
var [actor, seq] = decode_version(event)
|
|
1076
|
+
var base_seq = seq + 1 - change_count
|
|
1008
1077
|
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
while (i < end) {
|
|
1012
|
-
var mid = (i + end) >> 1
|
|
1013
|
-
seqs[mid] < base_seq ? i = mid + 1 : end = mid
|
|
1014
|
-
}
|
|
1015
|
-
var start = i
|
|
1016
|
-
|
|
1017
|
-
// iterate up through seq
|
|
1018
|
-
while (i < seqs.length && seqs[i] <= seq) {
|
|
1019
|
-
var e = actor + "-" + seqs[i]
|
|
1020
|
-
ns.events?.[e]?.forEach(cb => cb())
|
|
1021
|
-
delete ns.events?.[e]
|
|
1022
|
-
i++
|
|
1023
|
-
}
|
|
1078
|
+
var seqs = ns.actor_seqs?.[actor]
|
|
1079
|
+
if (!seqs) return
|
|
1024
1080
|
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1081
|
+
// binary search to find the first i >= base_seq
|
|
1082
|
+
var i = 0, end = seqs.length
|
|
1083
|
+
while (i < end) {
|
|
1084
|
+
var mid = (i + end) >> 1
|
|
1085
|
+
seqs[mid] < base_seq ? i = mid + 1 : end = mid
|
|
1086
|
+
}
|
|
1087
|
+
var start = i
|
|
1088
|
+
|
|
1089
|
+
// iterate up through seq
|
|
1090
|
+
while (i < seqs.length && seqs[i] <= seq) {
|
|
1091
|
+
var e = actor + "-" + seqs[i]
|
|
1092
|
+
ns.events?.[e]?.forEach(cb => cb())
|
|
1093
|
+
delete ns.events?.[e]
|
|
1094
|
+
i++
|
|
1095
|
+
}
|
|
1028
1096
|
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1097
|
+
seqs.splice(start, i - start)
|
|
1098
|
+
if (!seqs.length) delete ns.actor_seqs[actor]
|
|
1099
|
+
}
|
|
1032
1100
|
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1101
|
+
//////////////////////////////////////////////////////////////////
|
|
1102
|
+
//////////////////////////////////////////////////////////////////
|
|
1103
|
+
//////////////////////////////////////////////////////////////////
|
|
1036
1104
|
|
|
1037
|
-
function
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
if (v_eq(version, doc.getRemoteVersion().map((x) => x.join("-")).sort()))
|
|
1041
|
-
return doc.get()
|
|
1105
|
+
function dt_len(doc, version) {
|
|
1106
|
+
return count_code_points(dt_get_string(doc, version))
|
|
1107
|
+
}
|
|
1042
1108
|
|
|
1043
|
-
|
|
1044
|
-
|
|
1109
|
+
function dt_get_string(doc, version) {
|
|
1110
|
+
// optimization: if version is the latest,
|
|
1111
|
+
// then return the current text..
|
|
1112
|
+
if (v_eq(version, doc.getRemoteVersion().map((x) => x.join("-")).sort()))
|
|
1113
|
+
return doc.get()
|
|
1045
1114
|
|
|
1046
|
-
|
|
1115
|
+
var bytes = doc.toBytes()
|
|
1116
|
+
var oplog = OpLog.fromBytes(bytes)
|
|
1047
1117
|
|
|
1048
|
-
|
|
1049
|
-
b.merge(oplog, new Uint32Array(local_version))
|
|
1050
|
-
var s = b.get()
|
|
1051
|
-
b.free()
|
|
1052
|
-
|
|
1053
|
-
oplog.free()
|
|
1054
|
-
return s
|
|
1055
|
-
}
|
|
1118
|
+
var local_version = dt_get_local_version(bytes, version)
|
|
1056
1119
|
|
|
1057
|
-
|
|
1058
|
-
|
|
1120
|
+
var b = new Branch()
|
|
1121
|
+
b.merge(oplog, new Uint32Array(local_version))
|
|
1122
|
+
var s = b.get()
|
|
1123
|
+
b.free()
|
|
1124
|
+
|
|
1125
|
+
oplog.free()
|
|
1126
|
+
return s
|
|
1127
|
+
}
|
|
1059
1128
|
|
|
1060
|
-
|
|
1061
|
-
|
|
1129
|
+
function dt_get(doc, version, agent = null, anti_version = null) {
|
|
1130
|
+
if (dt_get.last_doc) dt_get.last_doc.free()
|
|
1062
1131
|
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
var include_versions = new Set()
|
|
1066
|
-
var bad_versions = new Set(anti_version)
|
|
1132
|
+
let bytes = doc.toBytes()
|
|
1133
|
+
dt_get.last_doc = doc = Doc.fromBytes(bytes, agent)
|
|
1067
1134
|
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
var
|
|
1071
|
-
|
|
1072
|
-
bad_versions.add(v)
|
|
1073
|
-
else
|
|
1074
|
-
include_versions.add(v)
|
|
1075
|
-
}
|
|
1076
|
-
} else {
|
|
1077
|
-
var include_versions = new Set(version)
|
|
1078
|
-
var looking_for = new Set(version)
|
|
1079
|
-
var local_version = []
|
|
1135
|
+
let [_agents, versions, parentss] = dt_parse([...bytes])
|
|
1136
|
+
if (anti_version) {
|
|
1137
|
+
var include_versions = new Set()
|
|
1138
|
+
var bad_versions = new Set(anti_version)
|
|
1080
1139
|
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1140
|
+
for (let i = 0; i < versions.length; i++) {
|
|
1141
|
+
var v = versions[i].join("-")
|
|
1142
|
+
var ps = parentss[i].map(x => x.join('-'))
|
|
1143
|
+
if (bad_versions.has(v) || ps.some(x => bad_versions.has(x)))
|
|
1144
|
+
bad_versions.add(v)
|
|
1145
|
+
else
|
|
1146
|
+
include_versions.add(v)
|
|
1087
1147
|
}
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1148
|
+
} else {
|
|
1149
|
+
var include_versions = new Set(version)
|
|
1150
|
+
var looking_for = new Set(version)
|
|
1151
|
+
var local_version = []
|
|
1152
|
+
|
|
1153
|
+
for (let i = versions.length - 1; i >= 0; i--) {
|
|
1154
|
+
var v = versions[i].join("-")
|
|
1155
|
+
var ps = parentss[i].map(x => x.join('-'))
|
|
1156
|
+
if (looking_for.has(v)) {
|
|
1157
|
+
local_version.push(i)
|
|
1158
|
+
looking_for.delete(v)
|
|
1159
|
+
}
|
|
1160
|
+
if (include_versions.has(v))
|
|
1161
|
+
ps.forEach(x => include_versions.add(x))
|
|
1162
|
+
}
|
|
1163
|
+
local_version.reverse()
|
|
1092
1164
|
|
|
1093
|
-
|
|
1094
|
-
|
|
1165
|
+
// NOTE: currently used by braid-chrome in dt.js at the bottom
|
|
1166
|
+
dt_get.last_local_version = new Uint32Array(local_version)
|
|
1095
1167
|
|
|
1096
|
-
|
|
1097
|
-
|
|
1168
|
+
if (looking_for.size) throw new Error(`version not found: ${version}`)
|
|
1169
|
+
}
|
|
1098
1170
|
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1171
|
+
let new_doc = new Doc(agent)
|
|
1172
|
+
let op_runs = doc.getOpsSince([])
|
|
1173
|
+
|
|
1174
|
+
let i = 0
|
|
1175
|
+
op_runs.forEach((op_run) => {
|
|
1176
|
+
if (op_run.content) op_run.content = [...op_run.content]
|
|
1177
|
+
|
|
1178
|
+
let len = op_run.end - op_run.start
|
|
1179
|
+
let base_i = i
|
|
1180
|
+
for (let j = 1; j <= len; j++) {
|
|
1181
|
+
let I = base_i + j
|
|
1182
|
+
if (
|
|
1183
|
+
j == len ||
|
|
1184
|
+
parentss[I].length != 1 ||
|
|
1185
|
+
parentss[I][0][0] != versions[I - 1][0] ||
|
|
1186
|
+
parentss[I][0][1] != versions[I - 1][1] ||
|
|
1187
|
+
versions[I][0] != versions[I - 1][0] ||
|
|
1188
|
+
versions[I][1] != versions[I - 1][1] + 1
|
|
1189
|
+
) {
|
|
1190
|
+
for (; i < I; i++) {
|
|
1191
|
+
let version = versions[i].join("-")
|
|
1192
|
+
if (!include_versions.has(version)) continue
|
|
1193
|
+
let og_i = i
|
|
1194
|
+
let content = []
|
|
1195
|
+
if (op_run.content?.[i - base_i]) content.push(op_run.content[i - base_i])
|
|
1196
|
+
if (!!op_run.content === op_run.fwd)
|
|
1197
|
+
while (i + 1 < I && include_versions.has(versions[i + 1].join("-"))) {
|
|
1198
|
+
i++
|
|
1199
|
+
if (op_run.content?.[i - base_i]) content.push(op_run.content[i - base_i])
|
|
1200
|
+
}
|
|
1201
|
+
content = content.length ? content.join("") : null
|
|
1202
|
+
|
|
1203
|
+
new_doc.mergeBytes(
|
|
1204
|
+
dt_create_bytes(
|
|
1205
|
+
version,
|
|
1206
|
+
parentss[og_i].map((x) => x.join("-")),
|
|
1207
|
+
op_run.fwd ?
|
|
1208
|
+
(op_run.content ?
|
|
1209
|
+
op_run.start + (og_i - base_i) :
|
|
1210
|
+
op_run.start) :
|
|
1211
|
+
op_run.end - 1 - (i - base_i),
|
|
1212
|
+
op_run.content ? 0 : i - og_i + 1,
|
|
1213
|
+
content
|
|
1214
|
+
)
|
|
1142
1215
|
)
|
|
1143
|
-
|
|
1216
|
+
}
|
|
1144
1217
|
}
|
|
1145
1218
|
}
|
|
1146
|
-
}
|
|
1147
|
-
|
|
1148
|
-
return new_doc
|
|
1149
|
-
}
|
|
1150
|
-
|
|
1151
|
-
function dt_get_patches(doc, version = null) {
|
|
1152
|
-
if (version && v_eq(version,
|
|
1153
|
-
doc.getRemoteVersion().map((x) => x.join("-")).sort())) {
|
|
1154
|
-
// they want everything past the end, which is nothing
|
|
1155
|
-
return []
|
|
1219
|
+
})
|
|
1220
|
+
return new_doc
|
|
1156
1221
|
}
|
|
1157
1222
|
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1223
|
+
function dt_get_patches(doc, version = null) {
|
|
1224
|
+
if (version && v_eq(version,
|
|
1225
|
+
doc.getRemoteVersion().map((x) => x.join("-")).sort())) {
|
|
1226
|
+
// they want everything past the end, which is nothing
|
|
1227
|
+
return []
|
|
1228
|
+
}
|
|
1162
1229
|
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
let
|
|
1186
|
-
let
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
let
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1230
|
+
let bytes = doc.toBytes()
|
|
1231
|
+
doc = Doc.fromBytes(bytes)
|
|
1232
|
+
|
|
1233
|
+
let [_agents, versions, parentss] = dt_parse([...bytes])
|
|
1234
|
+
|
|
1235
|
+
let op_runs = []
|
|
1236
|
+
if (version?.length) {
|
|
1237
|
+
let frontier = {}
|
|
1238
|
+
version.forEach((x) => frontier[x] = true)
|
|
1239
|
+
let local_version = []
|
|
1240
|
+
for (let i = 0; i < versions.length; i++)
|
|
1241
|
+
if (frontier[versions[i].join("-")]) local_version.push(i)
|
|
1242
|
+
|
|
1243
|
+
local_version = new Uint32Array(local_version)
|
|
1244
|
+
|
|
1245
|
+
let after_bytes = doc.getPatchSince(local_version)
|
|
1246
|
+
;[_agents, versions, parentss] = dt_parse([...after_bytes])
|
|
1247
|
+
op_runs = doc.getOpsSince(local_version)
|
|
1248
|
+
} else op_runs = doc.getOpsSince([])
|
|
1249
|
+
|
|
1250
|
+
doc.free()
|
|
1251
|
+
|
|
1252
|
+
let i = 0
|
|
1253
|
+
let patches = []
|
|
1254
|
+
op_runs.forEach((op_run) => {
|
|
1255
|
+
let version = versions[i]
|
|
1256
|
+
let parents = parentss[i].map((x) => x.join("-")).sort()
|
|
1257
|
+
let start = op_run.start
|
|
1258
|
+
let end = start + 1
|
|
1259
|
+
if (op_run.content) op_run.content = [...op_run.content]
|
|
1260
|
+
let len = op_run.end - op_run.start
|
|
1261
|
+
for (let j = 1; j <= len; j++) {
|
|
1262
|
+
let I = i + j
|
|
1263
|
+
if (
|
|
1264
|
+
(!op_run.content && op_run.fwd) ||
|
|
1265
|
+
j == len ||
|
|
1266
|
+
parentss[I].length != 1 ||
|
|
1267
|
+
parentss[I][0][0] != versions[I - 1][0] ||
|
|
1268
|
+
parentss[I][0][1] != versions[I - 1][1] ||
|
|
1269
|
+
versions[I][0] != versions[I - 1][0] ||
|
|
1270
|
+
versions[I][1] != versions[I - 1][1] + 1
|
|
1271
|
+
) {
|
|
1272
|
+
let s = op_run.fwd ?
|
|
1273
|
+
(op_run.content ?
|
|
1274
|
+
start :
|
|
1275
|
+
op_run.start) :
|
|
1276
|
+
(op_run.start + (op_run.end - end))
|
|
1277
|
+
let e = op_run.fwd ?
|
|
1278
|
+
(op_run.content ?
|
|
1279
|
+
end :
|
|
1280
|
+
op_run.start + (end - start)) :
|
|
1281
|
+
(op_run.end - (start - op_run.start))
|
|
1282
|
+
patches.push({
|
|
1283
|
+
version: `${version[0]}-${version[1] + e - s - 1}`,
|
|
1284
|
+
parents,
|
|
1285
|
+
unit: "text",
|
|
1286
|
+
range: op_run.content ? `[${s}:${s}]` : `[${s}:${e}]`,
|
|
1287
|
+
content: op_run.content?.slice(start - op_run.start, end - op_run.start).join("") ?? "",
|
|
1288
|
+
start: s,
|
|
1289
|
+
end: e,
|
|
1290
|
+
})
|
|
1291
|
+
if (j == len) break
|
|
1292
|
+
version = versions[I]
|
|
1293
|
+
parents = parentss[I].map((x) => x.join("-")).sort()
|
|
1294
|
+
start = op_run.start + j
|
|
1295
|
+
}
|
|
1296
|
+
end++
|
|
1223
1297
|
}
|
|
1224
|
-
|
|
1225
|
-
}
|
|
1226
|
-
|
|
1227
|
-
}
|
|
1228
|
-
return patches
|
|
1229
|
-
}
|
|
1298
|
+
i += len
|
|
1299
|
+
})
|
|
1300
|
+
return patches
|
|
1301
|
+
}
|
|
1230
1302
|
|
|
1231
|
-
function dt_parse(byte_array) {
|
|
1232
|
-
|
|
1303
|
+
function dt_parse(byte_array) {
|
|
1304
|
+
if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== "DMNDTYPS") throw new Error("dt parse error, expected DMNDTYPS")
|
|
1233
1305
|
|
|
1234
|
-
|
|
1306
|
+
if (byte_array.shift() != 0) throw new Error("dt parse error, expected version 0")
|
|
1235
1307
|
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1308
|
+
let agents = []
|
|
1309
|
+
let versions = []
|
|
1310
|
+
let parentss = []
|
|
1239
1311
|
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
}
|
|
1249
|
-
} else if (id == 20) {
|
|
1250
|
-
} else if (id == 21) {
|
|
1251
|
-
let seqs = {}
|
|
1252
|
-
let goal = byte_array.length - len
|
|
1253
|
-
while (byte_array.length > goal) {
|
|
1254
|
-
let part0 = dt_read_varint(byte_array)
|
|
1255
|
-
let has_jump = part0 & 1
|
|
1256
|
-
let agent_i = (part0 >> 1) - 1
|
|
1257
|
-
let run_length = dt_read_varint(byte_array)
|
|
1258
|
-
let jump = 0
|
|
1259
|
-
if (has_jump) {
|
|
1260
|
-
let part2 = dt_read_varint(byte_array)
|
|
1261
|
-
jump = part2 >> 1
|
|
1262
|
-
if (part2 & 1) jump *= -1
|
|
1312
|
+
while (byte_array.length) {
|
|
1313
|
+
let id = byte_array.shift()
|
|
1314
|
+
let len = dt_read_varint(byte_array)
|
|
1315
|
+
if (id == 1) {
|
|
1316
|
+
} else if (id == 3) {
|
|
1317
|
+
let goal = byte_array.length - len
|
|
1318
|
+
while (byte_array.length > goal) {
|
|
1319
|
+
agents.push(dt_read_string(byte_array))
|
|
1263
1320
|
}
|
|
1264
|
-
|
|
1321
|
+
} else if (id == 20) {
|
|
1322
|
+
} else if (id == 21) {
|
|
1323
|
+
let seqs = {}
|
|
1324
|
+
let goal = byte_array.length - len
|
|
1325
|
+
while (byte_array.length > goal) {
|
|
1326
|
+
let part0 = dt_read_varint(byte_array)
|
|
1327
|
+
let has_jump = part0 & 1
|
|
1328
|
+
let agent_i = (part0 >> 1) - 1
|
|
1329
|
+
let run_length = dt_read_varint(byte_array)
|
|
1330
|
+
let jump = 0
|
|
1331
|
+
if (has_jump) {
|
|
1332
|
+
let part2 = dt_read_varint(byte_array)
|
|
1333
|
+
jump = part2 >> 1
|
|
1334
|
+
if (part2 & 1) jump *= -1
|
|
1335
|
+
}
|
|
1336
|
+
let base = (seqs[agent_i] || 0) + jump
|
|
1265
1337
|
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
}
|
|
1269
|
-
seqs[agent_i] = base + run_length
|
|
1270
|
-
}
|
|
1271
|
-
} else if (id == 23) {
|
|
1272
|
-
let count = 0
|
|
1273
|
-
let goal = byte_array.length - len
|
|
1274
|
-
while (byte_array.length > goal) {
|
|
1275
|
-
let run_len = dt_read_varint(byte_array)
|
|
1276
|
-
|
|
1277
|
-
let parents = []
|
|
1278
|
-
let has_more = 1
|
|
1279
|
-
while (has_more) {
|
|
1280
|
-
let x = dt_read_varint(byte_array)
|
|
1281
|
-
let is_foreign = 0x1 & x
|
|
1282
|
-
has_more = 0x2 & x
|
|
1283
|
-
let num = x >> 2
|
|
1284
|
-
|
|
1285
|
-
if (x == 1) {
|
|
1286
|
-
// no parents (e.g. parent is "root")
|
|
1287
|
-
} else if (!is_foreign) {
|
|
1288
|
-
parents.push(versions[count - num])
|
|
1289
|
-
} else {
|
|
1290
|
-
parents.push([agents[num - 1], dt_read_varint(byte_array)])
|
|
1338
|
+
for (let i = 0; i < run_length; i++) {
|
|
1339
|
+
versions.push([agents[agent_i], base + i])
|
|
1291
1340
|
}
|
|
1341
|
+
seqs[agent_i] = base + run_length
|
|
1292
1342
|
}
|
|
1293
|
-
|
|
1294
|
-
count
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1343
|
+
} else if (id == 23) {
|
|
1344
|
+
let count = 0
|
|
1345
|
+
let goal = byte_array.length - len
|
|
1346
|
+
while (byte_array.length > goal) {
|
|
1347
|
+
let run_len = dt_read_varint(byte_array)
|
|
1348
|
+
|
|
1349
|
+
let parents = []
|
|
1350
|
+
let has_more = 1
|
|
1351
|
+
while (has_more) {
|
|
1352
|
+
let x = dt_read_varint(byte_array)
|
|
1353
|
+
let is_foreign = 0x1 & x
|
|
1354
|
+
has_more = 0x2 & x
|
|
1355
|
+
let num = x >> 2
|
|
1356
|
+
|
|
1357
|
+
if (x == 1) {
|
|
1358
|
+
// no parents (e.g. parent is "root")
|
|
1359
|
+
} else if (!is_foreign) {
|
|
1360
|
+
parents.push(versions[count - num])
|
|
1361
|
+
} else {
|
|
1362
|
+
parents.push([agents[num - 1], dt_read_varint(byte_array)])
|
|
1363
|
+
}
|
|
1364
|
+
}
|
|
1365
|
+
parentss.push(parents)
|
|
1298
1366
|
count++
|
|
1367
|
+
|
|
1368
|
+
for (let i = 0; i < run_len - 1; i++) {
|
|
1369
|
+
parentss.push([versions[count - 1]])
|
|
1370
|
+
count++
|
|
1371
|
+
}
|
|
1299
1372
|
}
|
|
1373
|
+
} else {
|
|
1374
|
+
byte_array.splice(0, len)
|
|
1300
1375
|
}
|
|
1301
|
-
} else {
|
|
1302
|
-
byte_array.splice(0, len)
|
|
1303
1376
|
}
|
|
1304
|
-
}
|
|
1305
1377
|
|
|
1306
|
-
|
|
1307
|
-
}
|
|
1378
|
+
return [agents, versions, parentss]
|
|
1379
|
+
}
|
|
1308
1380
|
|
|
1309
|
-
function dt_get_actor_seq_runs(byte_array, cb) {
|
|
1310
|
-
|
|
1381
|
+
function dt_get_actor_seq_runs(byte_array, cb) {
|
|
1382
|
+
if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== "DMNDTYPS") throw new Error("dt parse error, expected DMNDTYPS")
|
|
1311
1383
|
|
|
1312
|
-
|
|
1384
|
+
if (byte_array.shift() != 0) throw new Error("dt parse error, expected version 0")
|
|
1313
1385
|
|
|
1314
|
-
|
|
1386
|
+
let agents = []
|
|
1315
1387
|
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
}
|
|
1325
|
-
} else if (id == 20) {
|
|
1326
|
-
} else if (id == 21) {
|
|
1327
|
-
let seqs = {}
|
|
1328
|
-
let goal = byte_array.length - len
|
|
1329
|
-
while (byte_array.length > goal) {
|
|
1330
|
-
let part0 = dt_read_varint(byte_array)
|
|
1331
|
-
let has_jump = part0 & 1
|
|
1332
|
-
let agent_i = (part0 >> 1) - 1
|
|
1333
|
-
let run_length = dt_read_varint(byte_array)
|
|
1334
|
-
let jump = 0
|
|
1335
|
-
if (has_jump) {
|
|
1336
|
-
let part2 = dt_read_varint(byte_array)
|
|
1337
|
-
jump = part2 >> 1
|
|
1338
|
-
if (part2 & 1) jump *= -1
|
|
1388
|
+
while (byte_array.length) {
|
|
1389
|
+
let id = byte_array.shift()
|
|
1390
|
+
let len = dt_read_varint(byte_array)
|
|
1391
|
+
if (id == 1) {
|
|
1392
|
+
} else if (id == 3) {
|
|
1393
|
+
let goal = byte_array.length - len
|
|
1394
|
+
while (byte_array.length > goal) {
|
|
1395
|
+
agents.push(dt_read_string(byte_array))
|
|
1339
1396
|
}
|
|
1340
|
-
|
|
1397
|
+
} else if (id == 20) {
|
|
1398
|
+
} else if (id == 21) {
|
|
1399
|
+
let seqs = {}
|
|
1400
|
+
let goal = byte_array.length - len
|
|
1401
|
+
while (byte_array.length > goal) {
|
|
1402
|
+
let part0 = dt_read_varint(byte_array)
|
|
1403
|
+
let has_jump = part0 & 1
|
|
1404
|
+
let agent_i = (part0 >> 1) - 1
|
|
1405
|
+
let run_length = dt_read_varint(byte_array)
|
|
1406
|
+
let jump = 0
|
|
1407
|
+
if (has_jump) {
|
|
1408
|
+
let part2 = dt_read_varint(byte_array)
|
|
1409
|
+
jump = part2 >> 1
|
|
1410
|
+
if (part2 & 1) jump *= -1
|
|
1411
|
+
}
|
|
1412
|
+
let base = (seqs[agent_i] || 0) + jump
|
|
1341
1413
|
|
|
1342
|
-
|
|
1343
|
-
|
|
1414
|
+
cb(agents[agent_i], base, run_length)
|
|
1415
|
+
seqs[agent_i] = base + run_length
|
|
1416
|
+
}
|
|
1417
|
+
} else {
|
|
1418
|
+
byte_array.splice(0, len)
|
|
1344
1419
|
}
|
|
1345
|
-
} else {
|
|
1346
|
-
byte_array.splice(0, len)
|
|
1347
1420
|
}
|
|
1348
1421
|
}
|
|
1349
|
-
}
|
|
1350
1422
|
|
|
1351
|
-
function dt_get_local_version(bytes, version) {
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1423
|
+
function dt_get_local_version(bytes, version) {
|
|
1424
|
+
var looking_for = new Map()
|
|
1425
|
+
for (var event of version) {
|
|
1426
|
+
var [agent, seq] = decode_version(event)
|
|
1427
|
+
if (!looking_for.has(agent)) looking_for.set(agent, [])
|
|
1428
|
+
looking_for.get(agent).push(seq)
|
|
1429
|
+
}
|
|
1430
|
+
for (var seqs of looking_for.values())
|
|
1431
|
+
seqs.sort((a, b) => a - b)
|
|
1360
1432
|
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1433
|
+
var byte_array = [...bytes]
|
|
1434
|
+
var local_version = []
|
|
1435
|
+
var local_version_base = 0
|
|
1364
1436
|
|
|
1365
|
-
|
|
1437
|
+
if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== "DMNDTYPS") throw new Error("dt parse error, expected DMNDTYPS")
|
|
1366
1438
|
|
|
1367
|
-
|
|
1439
|
+
if (byte_array.shift() != 0) throw new Error("dt parse error, expected version 0")
|
|
1368
1440
|
|
|
1369
|
-
|
|
1441
|
+
let agents = []
|
|
1370
1442
|
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
}
|
|
1380
|
-
} else if (id == 20) {
|
|
1381
|
-
} else if (id == 21) {
|
|
1382
|
-
let seqs = {}
|
|
1383
|
-
let goal = byte_array.length - len
|
|
1384
|
-
while (byte_array.length > goal && looking_for.size) {
|
|
1385
|
-
let part0 = dt_read_varint(byte_array)
|
|
1386
|
-
let has_jump = part0 & 1
|
|
1387
|
-
let agent_i = (part0 >> 1) - 1
|
|
1388
|
-
let run_length = dt_read_varint(byte_array)
|
|
1389
|
-
let jump = 0
|
|
1390
|
-
if (has_jump) {
|
|
1391
|
-
let part2 = dt_read_varint(byte_array)
|
|
1392
|
-
jump = part2 >> 1
|
|
1393
|
-
if (part2 & 1) jump *= -1
|
|
1394
|
-
}
|
|
1395
|
-
let base = (seqs[agent_i] || 0) + jump
|
|
1396
|
-
|
|
1397
|
-
var agent = agents[agent_i]
|
|
1398
|
-
looking_for_seqs = looking_for.get(agent)
|
|
1399
|
-
if (looking_for_seqs) {
|
|
1400
|
-
for (var seq of splice_out_range(
|
|
1401
|
-
looking_for_seqs, base, base + run_length - 1))
|
|
1402
|
-
local_version.push(local_version_base + (seq - base))
|
|
1403
|
-
if (!looking_for_seqs.length) looking_for.delete(agent)
|
|
1443
|
+
while (byte_array.length && looking_for.size) {
|
|
1444
|
+
let id = byte_array.shift()
|
|
1445
|
+
let len = dt_read_varint(byte_array)
|
|
1446
|
+
if (id == 1) {
|
|
1447
|
+
} else if (id == 3) {
|
|
1448
|
+
let goal = byte_array.length - len
|
|
1449
|
+
while (byte_array.length > goal) {
|
|
1450
|
+
agents.push(dt_read_string(byte_array))
|
|
1404
1451
|
}
|
|
1405
|
-
|
|
1452
|
+
} else if (id == 20) {
|
|
1453
|
+
} else if (id == 21) {
|
|
1454
|
+
let seqs = {}
|
|
1455
|
+
let goal = byte_array.length - len
|
|
1456
|
+
while (byte_array.length > goal && looking_for.size) {
|
|
1457
|
+
let part0 = dt_read_varint(byte_array)
|
|
1458
|
+
let has_jump = part0 & 1
|
|
1459
|
+
let agent_i = (part0 >> 1) - 1
|
|
1460
|
+
let run_length = dt_read_varint(byte_array)
|
|
1461
|
+
let jump = 0
|
|
1462
|
+
if (has_jump) {
|
|
1463
|
+
let part2 = dt_read_varint(byte_array)
|
|
1464
|
+
jump = part2 >> 1
|
|
1465
|
+
if (part2 & 1) jump *= -1
|
|
1466
|
+
}
|
|
1467
|
+
let base = (seqs[agent_i] || 0) + jump
|
|
1468
|
+
|
|
1469
|
+
var agent = agents[agent_i]
|
|
1470
|
+
looking_for_seqs = looking_for.get(agent)
|
|
1471
|
+
if (looking_for_seqs) {
|
|
1472
|
+
for (var seq of splice_out_range(
|
|
1473
|
+
looking_for_seqs, base, base + run_length - 1))
|
|
1474
|
+
local_version.push(local_version_base + (seq - base))
|
|
1475
|
+
if (!looking_for_seqs.length) looking_for.delete(agent)
|
|
1476
|
+
}
|
|
1477
|
+
local_version_base += run_length
|
|
1406
1478
|
|
|
1407
|
-
|
|
1479
|
+
seqs[agent_i] = base + run_length
|
|
1480
|
+
}
|
|
1481
|
+
} else {
|
|
1482
|
+
byte_array.splice(0, len)
|
|
1408
1483
|
}
|
|
1409
|
-
} else {
|
|
1410
|
-
byte_array.splice(0, len)
|
|
1411
1484
|
}
|
|
1412
|
-
}
|
|
1413
1485
|
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
|
|
1486
|
+
if (looking_for.size) throw new Error(`version not found: ${version}`)
|
|
1487
|
+
return local_version
|
|
1488
|
+
|
|
1489
|
+
function splice_out_range(a, s, e) {
|
|
1490
|
+
if (!a?.length) return [];
|
|
1491
|
+
let l = 0, r = a.length;
|
|
1492
|
+
while (l < r) {
|
|
1493
|
+
const m = Math.floor((l + r) / 2);
|
|
1494
|
+
if (a[m] < s) l = m + 1; else r = m;
|
|
1495
|
+
}
|
|
1496
|
+
const i = l;
|
|
1497
|
+
l = i; r = a.length;
|
|
1498
|
+
while (l < r) {
|
|
1499
|
+
const m = Math.floor((l + r) / 2);
|
|
1500
|
+
if (a[m] <= e) l = m + 1; else r = m;
|
|
1501
|
+
}
|
|
1502
|
+
return a.splice(i, l - i);
|
|
1429
1503
|
}
|
|
1430
|
-
return a.splice(i, l - i);
|
|
1431
1504
|
}
|
|
1432
|
-
}
|
|
1433
1505
|
|
|
1434
|
-
function dt_read_string(byte_array) {
|
|
1435
|
-
|
|
1436
|
-
}
|
|
1506
|
+
function dt_read_string(byte_array) {
|
|
1507
|
+
return new TextDecoder().decode(new Uint8Array(byte_array.splice(0, dt_read_varint(byte_array))))
|
|
1508
|
+
}
|
|
1437
1509
|
|
|
1438
|
-
function dt_read_varint(byte_array) {
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1510
|
+
function dt_read_varint(byte_array) {
|
|
1511
|
+
let result = 0
|
|
1512
|
+
let shift = 0
|
|
1513
|
+
while (true) {
|
|
1514
|
+
if (byte_array.length === 0) throw new Error("byte array does not contain varint")
|
|
1443
1515
|
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1516
|
+
let byte_val = byte_array.shift()
|
|
1517
|
+
result |= (byte_val & 0x7f) << shift
|
|
1518
|
+
if ((byte_val & 0x80) == 0) return result
|
|
1519
|
+
shift += 7
|
|
1520
|
+
}
|
|
1448
1521
|
}
|
|
1449
|
-
}
|
|
1450
1522
|
|
|
1451
|
-
function dt_create_bytes(version, parents, pos, del, ins) {
|
|
1452
|
-
|
|
1523
|
+
function dt_create_bytes(version, parents, pos, del, ins) {
|
|
1524
|
+
if (del) pos += del - 1
|
|
1453
1525
|
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1526
|
+
function write_varint(bytes, value) {
|
|
1527
|
+
while (value >= 0x80) {
|
|
1528
|
+
bytes.push((value & 0x7f) | 0x80)
|
|
1529
|
+
value >>= 7
|
|
1530
|
+
}
|
|
1531
|
+
bytes.push(value)
|
|
1458
1532
|
}
|
|
1459
|
-
bytes.push(value)
|
|
1460
|
-
}
|
|
1461
1533
|
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1534
|
+
function write_string(byte_array, str) {
|
|
1535
|
+
let str_bytes = new TextEncoder().encode(str)
|
|
1536
|
+
write_varint(byte_array, str_bytes.length)
|
|
1537
|
+
for (let x of str_bytes) byte_array.push(x)
|
|
1538
|
+
}
|
|
1467
1539
|
|
|
1468
|
-
|
|
1469
|
-
|
|
1540
|
+
version = decode_version(version)
|
|
1541
|
+
parents = parents.map(decode_version)
|
|
1470
1542
|
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1543
|
+
let bytes = []
|
|
1544
|
+
bytes = bytes.concat(Array.from(new TextEncoder().encode("DMNDTYPS")))
|
|
1545
|
+
bytes.push(0)
|
|
1474
1546
|
|
|
1475
|
-
|
|
1476
|
-
|
|
1547
|
+
let file_info = []
|
|
1548
|
+
let agent_names = []
|
|
1477
1549
|
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
|
|
1481
|
-
|
|
1550
|
+
let agents = new Set()
|
|
1551
|
+
agents.add(version[0])
|
|
1552
|
+
for (let p of parents) agents.add(p[0])
|
|
1553
|
+
agents = [...agents]
|
|
1482
1554
|
|
|
1483
|
-
|
|
1555
|
+
// console.log(JSON.stringify({ agents, parents }, null, 4));
|
|
1484
1556
|
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1557
|
+
let agent_to_i = {}
|
|
1558
|
+
for (let [i, agent] of agents.entries()) {
|
|
1559
|
+
agent_to_i[agent] = i
|
|
1560
|
+
write_string(agent_names, agent)
|
|
1561
|
+
}
|
|
1490
1562
|
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1563
|
+
file_info.push(3)
|
|
1564
|
+
write_varint(file_info, agent_names.length)
|
|
1565
|
+
for (let x of agent_names) file_info.push(x)
|
|
1494
1566
|
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1567
|
+
bytes.push(1)
|
|
1568
|
+
write_varint(bytes, file_info.length)
|
|
1569
|
+
for (let x of file_info) bytes.push(x)
|
|
1498
1570
|
|
|
1499
|
-
|
|
1571
|
+
let branch = []
|
|
1500
1572
|
|
|
1501
|
-
|
|
1502
|
-
|
|
1573
|
+
if (parents.length) {
|
|
1574
|
+
let frontier = []
|
|
1503
1575
|
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1576
|
+
for (let [i, [agent, seq]] of parents.entries()) {
|
|
1577
|
+
let has_more = i < parents.length - 1
|
|
1578
|
+
let mapped = agent_to_i[agent]
|
|
1579
|
+
let n = ((mapped + 1) << 1) | (has_more ? 1 : 0)
|
|
1580
|
+
write_varint(frontier, n)
|
|
1581
|
+
write_varint(frontier, seq)
|
|
1582
|
+
}
|
|
1511
1583
|
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1584
|
+
branch.push(12)
|
|
1585
|
+
write_varint(branch, frontier.length)
|
|
1586
|
+
for (let x of frontier) branch.push(x)
|
|
1587
|
+
}
|
|
1516
1588
|
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1589
|
+
bytes.push(10)
|
|
1590
|
+
write_varint(bytes, branch.length)
|
|
1591
|
+
for (let x of branch) bytes.push(x)
|
|
1520
1592
|
|
|
1521
|
-
|
|
1593
|
+
let patches = []
|
|
1522
1594
|
|
|
1523
|
-
|
|
1595
|
+
let unicode_chars = ins ? [...ins] : []
|
|
1524
1596
|
|
|
1525
|
-
|
|
1526
|
-
|
|
1597
|
+
if (ins) {
|
|
1598
|
+
let inserted_content_bytes = []
|
|
1527
1599
|
|
|
1528
|
-
|
|
1600
|
+
inserted_content_bytes.push(0) // ins (not del, which is 1)
|
|
1529
1601
|
|
|
1530
|
-
|
|
1602
|
+
inserted_content_bytes.push(13) // "content" enum (rather than compressed)
|
|
1531
1603
|
|
|
1532
|
-
|
|
1533
|
-
|
|
1604
|
+
let encoder = new TextEncoder()
|
|
1605
|
+
let utf8Bytes = encoder.encode(ins)
|
|
1534
1606
|
|
|
1535
|
-
|
|
1536
|
-
|
|
1537
|
-
|
|
1607
|
+
write_varint(inserted_content_bytes, 1 + utf8Bytes.length)
|
|
1608
|
+
// inserted_content_bytes.push(1 + utf8Bytes.length) // length of content chunk
|
|
1609
|
+
inserted_content_bytes.push(4) // "plain text" enum
|
|
1538
1610
|
|
|
1539
|
-
|
|
1611
|
+
for (let b of utf8Bytes) inserted_content_bytes.push(b) // actual text
|
|
1540
1612
|
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1613
|
+
inserted_content_bytes.push(25) // "known" enum
|
|
1614
|
+
let known_chunk = []
|
|
1615
|
+
write_varint(known_chunk, unicode_chars.length * 2 + 1)
|
|
1616
|
+
write_varint(inserted_content_bytes, known_chunk.length)
|
|
1617
|
+
for (let x of known_chunk) inserted_content_bytes.push(x)
|
|
1546
1618
|
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1619
|
+
patches.push(24)
|
|
1620
|
+
write_varint(patches, inserted_content_bytes.length)
|
|
1621
|
+
for (let b of inserted_content_bytes) patches.push(b)
|
|
1622
|
+
}
|
|
1551
1623
|
|
|
1552
|
-
|
|
1553
|
-
|
|
1624
|
+
// write in the version
|
|
1625
|
+
let version_bytes = []
|
|
1554
1626
|
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1627
|
+
let [agent, seq] = version
|
|
1628
|
+
let agent_i = agent_to_i[agent]
|
|
1629
|
+
let jump = seq
|
|
1558
1630
|
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1631
|
+
write_varint(version_bytes, ((agent_i + 1) << 1) | (jump != 0 ? 1 : 0))
|
|
1632
|
+
write_varint(version_bytes, ins ? unicode_chars.length : del)
|
|
1633
|
+
if (jump) write_varint(version_bytes, jump << 1)
|
|
1562
1634
|
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
|
|
1635
|
+
patches.push(21)
|
|
1636
|
+
write_varint(patches, version_bytes.length)
|
|
1637
|
+
for (let b of version_bytes) patches.push(b)
|
|
1566
1638
|
|
|
1567
|
-
|
|
1568
|
-
|
|
1639
|
+
// write in "op" bytes (some encoding of position)
|
|
1640
|
+
let op_bytes = []
|
|
1569
1641
|
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
|
|
1642
|
+
if (del) {
|
|
1643
|
+
if (pos == 0) {
|
|
1644
|
+
write_varint(op_bytes, 4)
|
|
1645
|
+
} else if (del == 1) {
|
|
1646
|
+
write_varint(op_bytes, pos * 16 + 6)
|
|
1647
|
+
} else {
|
|
1648
|
+
write_varint(op_bytes, del * 16 + 7)
|
|
1649
|
+
write_varint(op_bytes, pos * 2 + 2)
|
|
1650
|
+
}
|
|
1651
|
+
} else if (unicode_chars.length == 1) {
|
|
1652
|
+
if (pos == 0) write_varint(op_bytes, 0)
|
|
1653
|
+
else write_varint(op_bytes, pos * 16 + 2)
|
|
1654
|
+
} else if (pos == 0) {
|
|
1655
|
+
write_varint(op_bytes, unicode_chars.length * 8 + 1)
|
|
1575
1656
|
} else {
|
|
1576
|
-
write_varint(op_bytes,
|
|
1577
|
-
write_varint(op_bytes, pos * 2
|
|
1578
|
-
}
|
|
1579
|
-
} else if (unicode_chars.length == 1) {
|
|
1580
|
-
if (pos == 0) write_varint(op_bytes, 0)
|
|
1581
|
-
else write_varint(op_bytes, pos * 16 + 2)
|
|
1582
|
-
} else if (pos == 0) {
|
|
1583
|
-
write_varint(op_bytes, unicode_chars.length * 8 + 1)
|
|
1584
|
-
} else {
|
|
1585
|
-
write_varint(op_bytes, unicode_chars.length * 8 + 3)
|
|
1586
|
-
write_varint(op_bytes, pos * 2)
|
|
1587
|
-
}
|
|
1657
|
+
write_varint(op_bytes, unicode_chars.length * 8 + 3)
|
|
1658
|
+
write_varint(op_bytes, pos * 2)
|
|
1659
|
+
}
|
|
1588
1660
|
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1661
|
+
patches.push(22)
|
|
1662
|
+
write_varint(patches, op_bytes.length)
|
|
1663
|
+
for (let b of op_bytes) patches.push(b)
|
|
1592
1664
|
|
|
1593
|
-
|
|
1594
|
-
|
|
1665
|
+
// write in parents
|
|
1666
|
+
let parents_bytes = []
|
|
1595
1667
|
|
|
1596
|
-
|
|
1668
|
+
write_varint(parents_bytes, ins ? unicode_chars.length : del)
|
|
1597
1669
|
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1670
|
+
if (parents.length) {
|
|
1671
|
+
for (let [i, [agent, seq]] of parents.entries()) {
|
|
1672
|
+
let has_more = i < parents.length - 1
|
|
1673
|
+
let agent_i = agent_to_i[agent]
|
|
1674
|
+
write_varint(parents_bytes, ((agent_i + 1) << 2) | (has_more ? 2 : 0) | 1)
|
|
1675
|
+
write_varint(parents_bytes, seq)
|
|
1676
|
+
}
|
|
1677
|
+
} else write_varint(parents_bytes, 1)
|
|
1606
1678
|
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
|
|
1679
|
+
patches.push(23)
|
|
1680
|
+
write_varint(patches, parents_bytes.length)
|
|
1681
|
+
for (let x of parents_bytes) patches.push(x)
|
|
1610
1682
|
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1683
|
+
// write in patches
|
|
1684
|
+
bytes.push(20)
|
|
1685
|
+
write_varint(bytes, patches.length)
|
|
1686
|
+
for (let b of patches) bytes.push(b)
|
|
1615
1687
|
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
}
|
|
1688
|
+
// console.log(bytes);
|
|
1689
|
+
return bytes
|
|
1690
|
+
}
|
|
1619
1691
|
|
|
1620
1692
|
|
|
1621
|
-
function OpLog_remote_to_local(doc, frontier) {
|
|
1622
|
-
|
|
1693
|
+
function OpLog_remote_to_local(doc, frontier) {
|
|
1694
|
+
let map = Object.fromEntries(frontier.map((x) => [x, true]))
|
|
1623
1695
|
|
|
1624
|
-
|
|
1696
|
+
let local_version = []
|
|
1625
1697
|
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1698
|
+
let max_version = doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
1699
|
+
for (let i = 0; i <= max_version; i++) {
|
|
1700
|
+
if (map[doc.localToRemoteVersion([i])[0].join("-")]) {
|
|
1701
|
+
local_version.push(i)
|
|
1702
|
+
}
|
|
1630
1703
|
}
|
|
1631
|
-
}
|
|
1632
|
-
|
|
1633
|
-
return frontier.length == local_version.length && new Uint32Array(local_version)
|
|
1634
|
-
}
|
|
1635
1704
|
|
|
1636
|
-
|
|
1637
|
-
return v1.length == v2.length && v1.every((x, i) => x == v2[i])
|
|
1638
|
-
}
|
|
1639
|
-
|
|
1640
|
-
function get_xf_patches(doc, v) {
|
|
1641
|
-
let patches = []
|
|
1642
|
-
for (let xf of doc.xfSince(v)) {
|
|
1643
|
-
patches.push(
|
|
1644
|
-
xf.kind == "Ins"
|
|
1645
|
-
? {
|
|
1646
|
-
unit: "text",
|
|
1647
|
-
range: `[${xf.start}:${xf.start}]`,
|
|
1648
|
-
content: xf.content,
|
|
1649
|
-
}
|
|
1650
|
-
: {
|
|
1651
|
-
unit: "text",
|
|
1652
|
-
range: `[${xf.start}:${xf.end}]`,
|
|
1653
|
-
content: "",
|
|
1654
|
-
}
|
|
1655
|
-
)
|
|
1705
|
+
return frontier.length == local_version.length && new Uint32Array(local_version)
|
|
1656
1706
|
}
|
|
1657
|
-
return relative_to_absolute_patches(patches)
|
|
1658
|
-
}
|
|
1659
1707
|
|
|
1660
|
-
function
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
if (parent.left == node) {
|
|
1664
|
-
parent.left_size -= node.left_size + node.size
|
|
1665
|
-
} else {
|
|
1666
|
-
node.left_size += parent.left_size + parent.size
|
|
1667
|
-
}
|
|
1668
|
-
})
|
|
1669
|
-
avl.root.size = Infinity
|
|
1670
|
-
avl.root.left_size = 0
|
|
1708
|
+
function v_eq(v1, v2) {
|
|
1709
|
+
return v1.length == v2.length && v1.every((x, i) => x == v2[i])
|
|
1710
|
+
}
|
|
1671
1711
|
|
|
1672
|
-
function
|
|
1673
|
-
|
|
1674
|
-
let
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1712
|
+
function get_xf_patches(doc, v) {
|
|
1713
|
+
let patches = []
|
|
1714
|
+
for (let xf of doc.xfSince(v)) {
|
|
1715
|
+
patches.push(
|
|
1716
|
+
xf.kind == "Ins"
|
|
1717
|
+
? {
|
|
1718
|
+
unit: "text",
|
|
1719
|
+
range: `[${xf.start}:${xf.start}]`,
|
|
1720
|
+
content: xf.content,
|
|
1721
|
+
}
|
|
1722
|
+
: {
|
|
1723
|
+
unit: "text",
|
|
1724
|
+
range: `[${xf.start}:${xf.end}]`,
|
|
1725
|
+
content: "",
|
|
1726
|
+
}
|
|
1727
|
+
)
|
|
1679
1728
|
}
|
|
1729
|
+
return relative_to_absolute_patches(patches)
|
|
1680
1730
|
}
|
|
1681
1731
|
|
|
1682
|
-
|
|
1683
|
-
let
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
while (true) {
|
|
1688
|
-
if (start < node.left_size || (node.left && node.content == null && start == node.left_size)) {
|
|
1689
|
-
node = node.left
|
|
1690
|
-
} else if (start > node.left_size + node.size || (node.content == null && start == node.left_size + node.size)) {
|
|
1691
|
-
start -= node.left_size + node.size
|
|
1692
|
-
node = node.right
|
|
1732
|
+
function relative_to_absolute_patches(patches) {
|
|
1733
|
+
let avl = create_avl_tree((node) => {
|
|
1734
|
+
let parent = node.parent
|
|
1735
|
+
if (parent.left == node) {
|
|
1736
|
+
parent.left_size -= node.left_size + node.size
|
|
1693
1737
|
} else {
|
|
1694
|
-
|
|
1695
|
-
|
|
1738
|
+
node.left_size += parent.left_size + parent.size
|
|
1739
|
+
}
|
|
1740
|
+
})
|
|
1741
|
+
avl.root.size = Infinity
|
|
1742
|
+
avl.root.left_size = 0
|
|
1743
|
+
|
|
1744
|
+
function resize(node, new_size) {
|
|
1745
|
+
if (node.size == new_size) return
|
|
1746
|
+
let delta = new_size - node.size
|
|
1747
|
+
node.size = new_size
|
|
1748
|
+
while (node.parent) {
|
|
1749
|
+
if (node.parent.left == node) node.parent.left_size += delta
|
|
1750
|
+
node = node.parent
|
|
1696
1751
|
}
|
|
1697
1752
|
}
|
|
1698
1753
|
|
|
1699
|
-
let
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1754
|
+
for (let p of patches) {
|
|
1755
|
+
let [start, end] = p.range.match(/\d+/g).map((x) => 1 * x)
|
|
1756
|
+
let del = end - start
|
|
1757
|
+
|
|
1758
|
+
let node = avl.root
|
|
1759
|
+
while (true) {
|
|
1760
|
+
if (start < node.left_size || (node.left && node.content == null && start == node.left_size)) {
|
|
1761
|
+
node = node.left
|
|
1762
|
+
} else if (start > node.left_size + node.size || (node.content == null && start == node.left_size + node.size)) {
|
|
1763
|
+
start -= node.left_size + node.size
|
|
1764
|
+
node = node.right
|
|
1765
|
+
} else {
|
|
1766
|
+
start -= node.left_size
|
|
1767
|
+
break
|
|
1706
1768
|
}
|
|
1707
|
-
let x = { size: 0, left_size: 0, content: p.content, del }
|
|
1708
|
-
avl.add(node, "left", x)
|
|
1709
|
-
resize(x, count_code_points(x.content))
|
|
1710
|
-
resize(node, node.size - (start + del))
|
|
1711
|
-
} else {
|
|
1712
|
-
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + node.content.slice(codePoints_to_index(node.content, start + del))
|
|
1713
|
-
resize(node, count_code_points(node.content))
|
|
1714
|
-
}
|
|
1715
|
-
} else {
|
|
1716
|
-
let next
|
|
1717
|
-
let middle_del = 0
|
|
1718
|
-
while (remaining >= (next = avl.next(node)).size) {
|
|
1719
|
-
remaining -= next.size
|
|
1720
|
-
middle_del += next.del ?? next.size
|
|
1721
|
-
resize(next, 0)
|
|
1722
|
-
avl.del(next)
|
|
1723
1769
|
}
|
|
1724
1770
|
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
let x = {
|
|
1733
|
-
size: 0,
|
|
1734
|
-
left_size: 0,
|
|
1735
|
-
content: p.content,
|
|
1736
|
-
del: node.size - start + middle_del + remaining,
|
|
1737
|
-
}
|
|
1738
|
-
resize(node, start)
|
|
1739
|
-
avl.add(node, "right", x)
|
|
1740
|
-
resize(x, count_code_points(x.content))
|
|
1771
|
+
let remaining = start + del - node.size
|
|
1772
|
+
if (remaining < 0) {
|
|
1773
|
+
if (node.content == null) {
|
|
1774
|
+
if (start > 0) {
|
|
1775
|
+
let x = { size: 0, left_size: 0 }
|
|
1776
|
+
avl.add(node, "left", x)
|
|
1777
|
+
resize(x, start)
|
|
1741
1778
|
}
|
|
1742
|
-
|
|
1779
|
+
let x = { size: 0, left_size: 0, content: p.content, del }
|
|
1780
|
+
avl.add(node, "left", x)
|
|
1781
|
+
resize(x, count_code_points(x.content))
|
|
1782
|
+
resize(node, node.size - (start + del))
|
|
1743
1783
|
} else {
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
resize(node, start)
|
|
1747
|
-
if (node.size == 0) avl.del(node)
|
|
1748
|
-
resize(next, count_code_points(next.content))
|
|
1784
|
+
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + node.content.slice(codePoints_to_index(node.content, start + del))
|
|
1785
|
+
resize(node, count_code_points(node.content))
|
|
1749
1786
|
}
|
|
1750
1787
|
} else {
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
} else {
|
|
1757
|
-
node.del += middle_del + next.del
|
|
1758
|
-
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + next.content.slice(codePoints_to_index(next.content, remaining))
|
|
1759
|
-
resize(node, count_code_points(node.content))
|
|
1788
|
+
let next
|
|
1789
|
+
let middle_del = 0
|
|
1790
|
+
while (remaining >= (next = avl.next(node)).size) {
|
|
1791
|
+
remaining -= next.size
|
|
1792
|
+
middle_del += next.del ?? next.size
|
|
1760
1793
|
resize(next, 0)
|
|
1761
1794
|
avl.del(next)
|
|
1762
1795
|
}
|
|
1796
|
+
|
|
1797
|
+
if (node.content == null) {
|
|
1798
|
+
if (next.content == null) {
|
|
1799
|
+
if (start == 0) {
|
|
1800
|
+
node.content = p.content
|
|
1801
|
+
node.del = node.size + middle_del + remaining
|
|
1802
|
+
resize(node, count_code_points(node.content))
|
|
1803
|
+
} else {
|
|
1804
|
+
let x = {
|
|
1805
|
+
size: 0,
|
|
1806
|
+
left_size: 0,
|
|
1807
|
+
content: p.content,
|
|
1808
|
+
del: node.size - start + middle_del + remaining,
|
|
1809
|
+
}
|
|
1810
|
+
resize(node, start)
|
|
1811
|
+
avl.add(node, "right", x)
|
|
1812
|
+
resize(x, count_code_points(x.content))
|
|
1813
|
+
}
|
|
1814
|
+
resize(next, next.size - remaining)
|
|
1815
|
+
} else {
|
|
1816
|
+
next.del += node.size - start + middle_del
|
|
1817
|
+
next.content = p.content + next.content.slice(codePoints_to_index(next.content, remaining))
|
|
1818
|
+
resize(node, start)
|
|
1819
|
+
if (node.size == 0) avl.del(node)
|
|
1820
|
+
resize(next, count_code_points(next.content))
|
|
1821
|
+
}
|
|
1822
|
+
} else {
|
|
1823
|
+
if (next.content == null) {
|
|
1824
|
+
node.del += middle_del + remaining
|
|
1825
|
+
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content
|
|
1826
|
+
resize(node, count_code_points(node.content))
|
|
1827
|
+
resize(next, next.size - remaining)
|
|
1828
|
+
} else {
|
|
1829
|
+
node.del += middle_del + next.del
|
|
1830
|
+
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + next.content.slice(codePoints_to_index(next.content, remaining))
|
|
1831
|
+
resize(node, count_code_points(node.content))
|
|
1832
|
+
resize(next, 0)
|
|
1833
|
+
avl.del(next)
|
|
1834
|
+
}
|
|
1835
|
+
}
|
|
1763
1836
|
}
|
|
1764
1837
|
}
|
|
1765
|
-
}
|
|
1766
1838
|
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1839
|
+
let new_patches = []
|
|
1840
|
+
let offset = 0
|
|
1841
|
+
let node = avl.root
|
|
1842
|
+
while (node.left) node = node.left
|
|
1843
|
+
while (node) {
|
|
1844
|
+
if (node.content == null) {
|
|
1845
|
+
offset += node.size
|
|
1846
|
+
} else {
|
|
1847
|
+
new_patches.push({
|
|
1848
|
+
unit: patches[0].unit,
|
|
1849
|
+
range: `[${offset}:${offset + node.del}]`,
|
|
1850
|
+
content: node.content,
|
|
1851
|
+
})
|
|
1852
|
+
offset += node.del
|
|
1853
|
+
}
|
|
1782
1854
|
|
|
1783
|
-
|
|
1855
|
+
node = avl.next(node)
|
|
1856
|
+
}
|
|
1857
|
+
return new_patches
|
|
1784
1858
|
}
|
|
1785
|
-
return new_patches
|
|
1786
|
-
}
|
|
1787
1859
|
|
|
1788
|
-
function create_avl_tree(on_rotate) {
|
|
1789
|
-
|
|
1860
|
+
function create_avl_tree(on_rotate) {
|
|
1861
|
+
let self = { root: { height: 1 } }
|
|
1790
1862
|
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1863
|
+
self.calc_height = (node) => {
|
|
1864
|
+
node.height = 1 + Math.max(node.left?.height ?? 0, node.right?.height ?? 0)
|
|
1865
|
+
}
|
|
1794
1866
|
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1867
|
+
self.rechild = (child, new_child) => {
|
|
1868
|
+
if (child.parent) {
|
|
1869
|
+
if (child.parent.left == child) {
|
|
1870
|
+
child.parent.left = new_child
|
|
1871
|
+
} else {
|
|
1872
|
+
child.parent.right = new_child
|
|
1873
|
+
}
|
|
1799
1874
|
} else {
|
|
1800
|
-
|
|
1875
|
+
self.root = new_child
|
|
1801
1876
|
}
|
|
1802
|
-
|
|
1803
|
-
self.root = new_child
|
|
1877
|
+
if (new_child) new_child.parent = child.parent
|
|
1804
1878
|
}
|
|
1805
|
-
if (new_child) new_child.parent = child.parent
|
|
1806
|
-
}
|
|
1807
1879
|
|
|
1808
|
-
|
|
1809
|
-
|
|
1880
|
+
self.rotate = (node) => {
|
|
1881
|
+
on_rotate(node)
|
|
1810
1882
|
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1883
|
+
let parent = node.parent
|
|
1884
|
+
let left = parent.right == node ? "left" : "right"
|
|
1885
|
+
let right = parent.right == node ? "right" : "left"
|
|
1814
1886
|
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1887
|
+
parent[right] = node[left]
|
|
1888
|
+
if (parent[right]) parent[right].parent = parent
|
|
1889
|
+
self.calc_height(parent)
|
|
1818
1890
|
|
|
1819
|
-
|
|
1820
|
-
|
|
1891
|
+
self.rechild(parent, node)
|
|
1892
|
+
parent.parent = node
|
|
1821
1893
|
|
|
1822
|
-
|
|
1823
|
-
|
|
1894
|
+
node[left] = parent
|
|
1895
|
+
}
|
|
1896
|
+
|
|
1897
|
+
self.fix_avl = (node) => {
|
|
1898
|
+
self.calc_height(node)
|
|
1899
|
+
let diff = (node.right?.height ?? 0) - (node.left?.height ?? 0)
|
|
1900
|
+
if (Math.abs(diff) >= 2) {
|
|
1901
|
+
if (diff > 0) {
|
|
1902
|
+
if ((node.right.left?.height ?? 0) > (node.right.right?.height ?? 0)) self.rotate(node.right.left)
|
|
1903
|
+
self.rotate((node = node.right))
|
|
1904
|
+
} else {
|
|
1905
|
+
if ((node.left.right?.height ?? 0) > (node.left.left?.height ?? 0)) self.rotate(node.left.right)
|
|
1906
|
+
self.rotate((node = node.left))
|
|
1907
|
+
}
|
|
1908
|
+
self.fix_avl(node)
|
|
1909
|
+
} else if (node.parent) self.fix_avl(node.parent)
|
|
1910
|
+
}
|
|
1824
1911
|
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
if (
|
|
1830
|
-
|
|
1831
|
-
|
|
1912
|
+
self.add = (node, side, add_me) => {
|
|
1913
|
+
let other_side = side == "left" ? "right" : "left"
|
|
1914
|
+
add_me.height = 1
|
|
1915
|
+
|
|
1916
|
+
if (node[side]) {
|
|
1917
|
+
node = node[side]
|
|
1918
|
+
while (node[other_side]) node = node[other_side]
|
|
1919
|
+
node[other_side] = add_me
|
|
1832
1920
|
} else {
|
|
1833
|
-
|
|
1834
|
-
self.rotate((node = node.left))
|
|
1921
|
+
node[side] = add_me
|
|
1835
1922
|
}
|
|
1923
|
+
add_me.parent = node
|
|
1836
1924
|
self.fix_avl(node)
|
|
1837
|
-
} else if (node.parent) self.fix_avl(node.parent)
|
|
1838
|
-
}
|
|
1839
|
-
|
|
1840
|
-
self.add = (node, side, add_me) => {
|
|
1841
|
-
let other_side = side == "left" ? "right" : "left"
|
|
1842
|
-
add_me.height = 1
|
|
1843
|
-
|
|
1844
|
-
if (node[side]) {
|
|
1845
|
-
node = node[side]
|
|
1846
|
-
while (node[other_side]) node = node[other_side]
|
|
1847
|
-
node[other_side] = add_me
|
|
1848
|
-
} else {
|
|
1849
|
-
node[side] = add_me
|
|
1850
1925
|
}
|
|
1851
|
-
add_me.parent = node
|
|
1852
|
-
self.fix_avl(node)
|
|
1853
|
-
}
|
|
1854
1926
|
|
|
1855
|
-
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1927
|
+
self.del = (node) => {
|
|
1928
|
+
if (node.left && node.right) {
|
|
1929
|
+
let cursor = node.right
|
|
1930
|
+
while (cursor.left) cursor = cursor.left
|
|
1931
|
+
cursor.left = node.left
|
|
1932
|
+
|
|
1933
|
+
// breaks abstraction
|
|
1934
|
+
cursor.left_size = node.left_size
|
|
1935
|
+
let y = cursor
|
|
1936
|
+
while (y.parent != node) {
|
|
1937
|
+
y = y.parent
|
|
1938
|
+
y.left_size -= cursor.size
|
|
1939
|
+
}
|
|
1940
|
+
|
|
1941
|
+
node.left.parent = cursor
|
|
1942
|
+
if (cursor == node.right) {
|
|
1943
|
+
self.rechild(node, cursor)
|
|
1944
|
+
self.fix_avl(cursor)
|
|
1945
|
+
} else {
|
|
1946
|
+
let x = cursor.parent
|
|
1947
|
+
self.rechild(cursor, cursor.right)
|
|
1948
|
+
cursor.right = node.right
|
|
1949
|
+
node.right.parent = cursor
|
|
1950
|
+
self.rechild(node, cursor)
|
|
1951
|
+
self.fix_avl(x)
|
|
1952
|
+
}
|
|
1953
|
+
} else {
|
|
1954
|
+
self.rechild(node, node.left || node.right || null)
|
|
1955
|
+
if (node.parent) self.fix_avl(node.parent)
|
|
1867
1956
|
}
|
|
1957
|
+
}
|
|
1868
1958
|
|
|
1869
|
-
|
|
1870
|
-
if (
|
|
1871
|
-
|
|
1872
|
-
|
|
1959
|
+
self.next = (node) => {
|
|
1960
|
+
if (node.right) {
|
|
1961
|
+
node = node.right
|
|
1962
|
+
while (node.left) node = node.left
|
|
1963
|
+
return node
|
|
1873
1964
|
} else {
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
cursor.right = node.right
|
|
1877
|
-
node.right.parent = cursor
|
|
1878
|
-
self.rechild(node, cursor)
|
|
1879
|
-
self.fix_avl(x)
|
|
1965
|
+
while (node.parent && node.parent.right == node) node = node.parent
|
|
1966
|
+
return node.parent
|
|
1880
1967
|
}
|
|
1881
|
-
} else {
|
|
1882
|
-
self.rechild(node, node.left || node.right || null)
|
|
1883
|
-
if (node.parent) self.fix_avl(node.parent)
|
|
1884
1968
|
}
|
|
1969
|
+
|
|
1970
|
+
return self
|
|
1885
1971
|
}
|
|
1886
1972
|
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
} else {
|
|
1893
|
-
while (node.parent && node.parent.right == node) node = node.parent
|
|
1894
|
-
return node.parent
|
|
1973
|
+
function count_code_points(str) {
|
|
1974
|
+
let code_points = 0;
|
|
1975
|
+
for (let i = 0; i < str.length; i++) {
|
|
1976
|
+
if (str.charCodeAt(i) >= 0xD800 && str.charCodeAt(i) <= 0xDBFF) i++;
|
|
1977
|
+
code_points++;
|
|
1895
1978
|
}
|
|
1979
|
+
return code_points;
|
|
1896
1980
|
}
|
|
1897
1981
|
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1982
|
+
function index_to_codePoints(str, index) {
|
|
1983
|
+
let i = 0
|
|
1984
|
+
let c = 0
|
|
1985
|
+
while (i < index && i < str.length) {
|
|
1986
|
+
const charCode = str.charCodeAt(i)
|
|
1987
|
+
i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1
|
|
1988
|
+
c++
|
|
1989
|
+
}
|
|
1990
|
+
return c
|
|
1906
1991
|
}
|
|
1907
|
-
return code_points;
|
|
1908
|
-
}
|
|
1909
1992
|
|
|
1910
|
-
function
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1993
|
+
function codePoints_to_index(str, codePoints) {
|
|
1994
|
+
let i = 0
|
|
1995
|
+
let c = 0
|
|
1996
|
+
while (c < codePoints && i < str.length) {
|
|
1997
|
+
const charCode = str.charCodeAt(i)
|
|
1998
|
+
i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1
|
|
1999
|
+
c++
|
|
2000
|
+
}
|
|
2001
|
+
return i
|
|
1917
2002
|
}
|
|
1918
|
-
return c
|
|
1919
|
-
}
|
|
1920
2003
|
|
|
1921
|
-
function
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
while (c < codePoints && i < str.length) {
|
|
1925
|
-
const charCode = str.charCodeAt(i)
|
|
1926
|
-
i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1
|
|
1927
|
-
c++
|
|
1928
|
-
}
|
|
1929
|
-
return i
|
|
1930
|
-
}
|
|
2004
|
+
function encode_filename(filename) {
|
|
2005
|
+
// Swap all "!" and "/" characters
|
|
2006
|
+
let swapped = filename.replace(/[!/]/g, (match) => (match === "!" ? "/" : "!"))
|
|
1931
2007
|
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
let swapped = filename.replace(/[!/]/g, (match) => (match === "!" ? "/" : "!"))
|
|
2008
|
+
// Encode the filename using encodeURIComponent()
|
|
2009
|
+
let encoded = encodeURIComponent(swapped)
|
|
1935
2010
|
|
|
1936
|
-
|
|
1937
|
-
|
|
2011
|
+
return encoded
|
|
2012
|
+
}
|
|
1938
2013
|
|
|
1939
|
-
|
|
1940
|
-
|
|
2014
|
+
function decode_filename(encodedFilename) {
|
|
2015
|
+
// Decode the filename using decodeURIComponent()
|
|
2016
|
+
let decoded = decodeURIComponent(encodedFilename)
|
|
1941
2017
|
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
let decoded = decodeURIComponent(encodedFilename)
|
|
2018
|
+
// Swap all "/" and "!" characters
|
|
2019
|
+
decoded = decoded.replace(/[!/]/g, (match) => (match === "/" ? "!" : "/"))
|
|
1945
2020
|
|
|
1946
|
-
|
|
1947
|
-
|
|
2021
|
+
return decoded
|
|
2022
|
+
}
|
|
1948
2023
|
|
|
1949
|
-
|
|
1950
|
-
|
|
2024
|
+
function validate_version_array(x) {
|
|
2025
|
+
if (!Array.isArray(x)) throw new Error(`invalid version array: not an array`)
|
|
2026
|
+
x.sort()
|
|
2027
|
+
for (var xx of x) validate_actor_seq(xx)
|
|
2028
|
+
}
|
|
1951
2029
|
|
|
1952
|
-
function
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
}
|
|
2030
|
+
function validate_actor_seq(x) {
|
|
2031
|
+
if (typeof x !== 'string') throw new Error(`invalid actor-seq: not a string`)
|
|
2032
|
+
let [actor, seq] = decode_version(x)
|
|
2033
|
+
validate_actor(actor)
|
|
2034
|
+
}
|
|
1957
2035
|
|
|
1958
|
-
function
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
}
|
|
2036
|
+
function validate_actor(x) {
|
|
2037
|
+
if (typeof x !== 'string') throw new Error(`invalid actor: not a string`)
|
|
2038
|
+
if (Buffer.byteLength(x, 'utf8') >= 50) throw new Error(`actor value too long (max 49): ${x}`) // restriction coming from dt
|
|
2039
|
+
}
|
|
1963
2040
|
|
|
1964
|
-
function
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
2041
|
+
function is_valid_actor(x) {
|
|
2042
|
+
try {
|
|
2043
|
+
validate_actor(x)
|
|
2044
|
+
return true
|
|
2045
|
+
} catch (e) { }
|
|
2046
|
+
}
|
|
1968
2047
|
|
|
1969
|
-
function
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
return
|
|
1973
|
-
}
|
|
1974
|
-
}
|
|
2048
|
+
function decode_version(v) {
|
|
2049
|
+
let m = v.match(/^(.*)-(\d+)$/s)
|
|
2050
|
+
if (!m) throw new Error(`invalid actor-seq version: ${v}`)
|
|
2051
|
+
return [m[1], parseInt(m[2])]
|
|
2052
|
+
}
|
|
1975
2053
|
|
|
1976
|
-
function
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
}
|
|
2054
|
+
function validate_patches(patches) {
|
|
2055
|
+
if (!Array.isArray(patches)) throw new Error(`invalid patches: not an array`)
|
|
2056
|
+
for (let p of patches) validate_patch(p)
|
|
2057
|
+
}
|
|
1981
2058
|
|
|
1982
|
-
function
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
2059
|
+
function validate_patch(x) {
|
|
2060
|
+
if (typeof x != 'object') throw new Error(`invalid patch: not an object`)
|
|
2061
|
+
if (x.unit && x.unit !== 'text') throw new Error(`invalid patch unit '${x.unit}': only 'text' supported`)
|
|
2062
|
+
if (typeof x.range !== 'string') throw new Error(`invalid patch range: must be a string`)
|
|
2063
|
+
if (!x.range.match(/^\s*\[\s*\d+\s*:\s*\d+\s*\]\s*$/)) throw new Error(`invalid patch range: ${x.range}`)
|
|
2064
|
+
if (typeof x.content !== 'string') throw new Error(`invalid patch content: must be a string`)
|
|
2065
|
+
}
|
|
1986
2066
|
|
|
1987
|
-
function
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
if (typeof x.range !== 'string') throw new Error(`invalid patch range: must be a string`)
|
|
1991
|
-
if (!x.range.match(/^\s*\[\s*\d+\s*:\s*\d+\s*\]\s*$/)) throw new Error(`invalid patch range: ${x.range}`)
|
|
1992
|
-
if (typeof x.content !== 'string') throw new Error(`invalid patch content: must be a string`)
|
|
1993
|
-
}
|
|
2067
|
+
function createSimpleCache(size) {
|
|
2068
|
+
const maxSize = size
|
|
2069
|
+
const cache = new Map()
|
|
1994
2070
|
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
2071
|
+
return {
|
|
2072
|
+
put(key, value) {
|
|
2073
|
+
if (cache.has(key)) {
|
|
2074
|
+
// If the key already exists, update its value and move it to the end
|
|
2075
|
+
cache.delete(key)
|
|
2076
|
+
cache.set(key, value)
|
|
2077
|
+
} else {
|
|
2078
|
+
// If the cache is full, remove the oldest entry
|
|
2079
|
+
if (cache.size >= maxSize) {
|
|
2080
|
+
const oldestKey = cache.keys().next().value
|
|
2081
|
+
cache.delete(oldestKey)
|
|
2082
|
+
}
|
|
2083
|
+
// Add the new key-value pair
|
|
2084
|
+
cache.set(key, value)
|
|
2085
|
+
}
|
|
2086
|
+
},
|
|
1998
2087
|
|
|
1999
|
-
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
// If the key already exists, update its value and move it to the end
|
|
2003
|
-
cache.delete(key)
|
|
2004
|
-
cache.set(key, value)
|
|
2005
|
-
} else {
|
|
2006
|
-
// If the cache is full, remove the oldest entry
|
|
2007
|
-
if (cache.size >= maxSize) {
|
|
2008
|
-
const oldestKey = cache.keys().next().value
|
|
2009
|
-
cache.delete(oldestKey)
|
|
2088
|
+
get(key) {
|
|
2089
|
+
if (!cache.has(key)) {
|
|
2090
|
+
return null
|
|
2010
2091
|
}
|
|
2011
|
-
//
|
|
2092
|
+
// Move the accessed item to the end (most recently used)
|
|
2093
|
+
const value = cache.get(key)
|
|
2094
|
+
cache.delete(key)
|
|
2012
2095
|
cache.set(key, value)
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
get(key) {
|
|
2017
|
-
if (!cache.has(key)) {
|
|
2018
|
-
return null
|
|
2019
|
-
}
|
|
2020
|
-
// Move the accessed item to the end (most recently used)
|
|
2021
|
-
const value = cache.get(key)
|
|
2022
|
-
cache.delete(key)
|
|
2023
|
-
cache.set(key, value)
|
|
2024
|
-
return value
|
|
2025
|
-
},
|
|
2096
|
+
return value
|
|
2097
|
+
},
|
|
2098
|
+
}
|
|
2026
2099
|
}
|
|
2027
|
-
}
|
|
2028
2100
|
|
|
2029
|
-
function apply_patch(obj, range, content) {
|
|
2101
|
+
function apply_patch(obj, range, content) {
|
|
2030
2102
|
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2103
|
+
// Descend down a bunch of objects until we get to the final object
|
|
2104
|
+
// The final object can be a slice
|
|
2105
|
+
// Set the value in the final object
|
|
2034
2106
|
|
|
2035
|
-
|
|
2036
|
-
|
|
2107
|
+
var path = range,
|
|
2108
|
+
new_stuff = content
|
|
2037
2109
|
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
|
|
2110
|
+
var path_segment = /^(\.?([^\.\[]+))|(\[((-?\d+):)?(-?\d+)\])|\[("(\\"|[^"])*")\]/
|
|
2111
|
+
var curr_obj = obj,
|
|
2112
|
+
last_obj = null
|
|
2041
2113
|
|
|
2042
|
-
|
|
2043
|
-
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
|
|
2047
|
-
|
|
2114
|
+
// Handle negative indices, like "[-9]" or "[-0]"
|
|
2115
|
+
function de_neg (x) {
|
|
2116
|
+
return x[0] === '-'
|
|
2117
|
+
? curr_obj.length - parseInt(x.substr(1), 10)
|
|
2118
|
+
: parseInt(x, 10)
|
|
2119
|
+
}
|
|
2048
2120
|
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2057
|
-
|
|
2058
|
-
|
|
2059
|
-
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
-
|
|
2065
|
-
|
|
2066
|
-
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
|
|
2072
|
-
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
2091
|
-
|
|
2092
|
-
|
|
2121
|
+
// Now iterate through each segment of the range e.g. [3].a.b[3][9]
|
|
2122
|
+
while (true) {
|
|
2123
|
+
var match = path_segment.exec(path),
|
|
2124
|
+
subpath = match ? match[0] : '',
|
|
2125
|
+
field = match && match[2],
|
|
2126
|
+
slice_start = match && match[5],
|
|
2127
|
+
slice_end = match && match[6],
|
|
2128
|
+
quoted_field = match && match[7]
|
|
2129
|
+
|
|
2130
|
+
// The field could be expressed as ["nnn"] instead of .nnn
|
|
2131
|
+
if (quoted_field) field = JSON.parse(quoted_field)
|
|
2132
|
+
|
|
2133
|
+
slice_start = slice_start && de_neg(slice_start)
|
|
2134
|
+
slice_end = slice_end && de_neg(slice_end)
|
|
2135
|
+
|
|
2136
|
+
// console.log('Descending', {curr_obj, path, subpath, field, slice_start, slice_end, last_obj})
|
|
2137
|
+
|
|
2138
|
+
// If it's the final item, set it
|
|
2139
|
+
if (path.length === subpath.length) {
|
|
2140
|
+
if (!subpath) return new_stuff
|
|
2141
|
+
else if (field) { // Object
|
|
2142
|
+
if (new_stuff === undefined)
|
|
2143
|
+
delete curr_obj[field] // - Delete a field in object
|
|
2144
|
+
else
|
|
2145
|
+
curr_obj[field] = new_stuff // - Set a field in object
|
|
2146
|
+
} else if (typeof curr_obj === 'string') { // String
|
|
2147
|
+
console.assert(typeof new_stuff === 'string')
|
|
2148
|
+
if (!slice_start) {slice_start = slice_end; slice_end = slice_end+1}
|
|
2149
|
+
if (last_obj) {
|
|
2150
|
+
var s = last_obj[last_field]
|
|
2151
|
+
last_obj[last_field] = (s.slice(0, slice_start)
|
|
2152
|
+
+ new_stuff
|
|
2153
|
+
+ s.slice(slice_end))
|
|
2154
|
+
} else
|
|
2155
|
+
return obj.slice(0, slice_start) + new_stuff + obj.slice(slice_end)
|
|
2156
|
+
} else // Array
|
|
2157
|
+
if (slice_start) // - Array splice
|
|
2158
|
+
[].splice.apply(curr_obj, [slice_start, slice_end-slice_start]
|
|
2159
|
+
.concat(new_stuff))
|
|
2160
|
+
else { // - Array set
|
|
2161
|
+
console.assert(slice_end >= 0, 'Index '+subpath+' is too small')
|
|
2162
|
+
console.assert(slice_end <= curr_obj.length - 1,
|
|
2163
|
+
'Index '+subpath+' is too big')
|
|
2164
|
+
curr_obj[slice_end] = new_stuff
|
|
2165
|
+
}
|
|
2166
|
+
|
|
2167
|
+
return obj
|
|
2093
2168
|
}
|
|
2094
2169
|
|
|
2095
|
-
|
|
2170
|
+
// Otherwise, descend down the path
|
|
2171
|
+
console.assert(!slice_start, 'No splices allowed in middle of path')
|
|
2172
|
+
last_obj = curr_obj
|
|
2173
|
+
last_field = field || slice_end
|
|
2174
|
+
curr_obj = curr_obj[last_field]
|
|
2175
|
+
path = path.substr(subpath.length)
|
|
2096
2176
|
}
|
|
2097
|
-
|
|
2098
|
-
// Otherwise, descend down the path
|
|
2099
|
-
console.assert(!slice_start, 'No splices allowed in middle of path')
|
|
2100
|
-
last_obj = curr_obj
|
|
2101
|
-
last_field = field || slice_end
|
|
2102
|
-
curr_obj = curr_obj[last_field]
|
|
2103
|
-
path = path.substr(subpath.length)
|
|
2104
2177
|
}
|
|
2105
|
-
}
|
|
2106
2178
|
|
|
2107
|
-
class RangeSet {
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2179
|
+
class RangeSet {
|
|
2180
|
+
constructor() {
|
|
2181
|
+
this.ranges = []
|
|
2182
|
+
}
|
|
2111
2183
|
|
|
2112
|
-
|
|
2113
|
-
|
|
2184
|
+
add_range(low_inclusive, high_inclusive) {
|
|
2185
|
+
if (low_inclusive > high_inclusive) return
|
|
2114
2186
|
|
|
2115
|
-
|
|
2116
|
-
|
|
2187
|
+
const startIndex = this._bs(mid => this.ranges[mid][1] >= low_inclusive - 1, this.ranges.length, true)
|
|
2188
|
+
const endIndex = this._bs(mid => this.ranges[mid][0] <= high_inclusive + 1, -1, false)
|
|
2117
2189
|
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2123
|
-
|
|
2124
|
-
|
|
2190
|
+
if (startIndex > endIndex) {
|
|
2191
|
+
this.ranges.splice(startIndex, 0, [low_inclusive, high_inclusive])
|
|
2192
|
+
} else {
|
|
2193
|
+
const mergedLow = Math.min(low_inclusive, this.ranges[startIndex][0])
|
|
2194
|
+
const mergedHigh = Math.max(high_inclusive, this.ranges[endIndex][1])
|
|
2195
|
+
const removeCount = endIndex - startIndex + 1
|
|
2196
|
+
this.ranges.splice(startIndex, removeCount, [mergedLow, mergedHigh])
|
|
2197
|
+
}
|
|
2198
|
+
}
|
|
2199
|
+
|
|
2200
|
+
has(x) {
|
|
2201
|
+
var index = this._bs(mid => this.ranges[mid][0] <= x, -1, false)
|
|
2202
|
+
return index !== -1 && x <= this.ranges[index][1]
|
|
2203
|
+
}
|
|
2204
|
+
|
|
2205
|
+
_bs(condition, defaultR, moveLeft) {
|
|
2206
|
+
let low = 0
|
|
2207
|
+
let high = this.ranges.length - 1
|
|
2208
|
+
let result = defaultR
|
|
2209
|
+
|
|
2210
|
+
while (low <= high) {
|
|
2211
|
+
const mid = Math.floor((low + high) / 2)
|
|
2212
|
+
if (condition(mid)) {
|
|
2213
|
+
result = mid
|
|
2214
|
+
if (moveLeft) high = mid - 1
|
|
2215
|
+
else low = mid + 1
|
|
2216
|
+
} else {
|
|
2217
|
+
if (moveLeft) low = mid + 1
|
|
2218
|
+
else high = mid - 1
|
|
2219
|
+
}
|
|
2220
|
+
}
|
|
2221
|
+
return result
|
|
2125
2222
|
}
|
|
2126
2223
|
}
|
|
2127
2224
|
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
return index !== -1 && x <= this.ranges[index][1]
|
|
2225
|
+
function ascii_ify(s) {
|
|
2226
|
+
return s.replace(/[^\x20-\x7E]/g, c => '\\u' + c.charCodeAt(0).toString(16).padStart(4, '0'))
|
|
2131
2227
|
}
|
|
2132
2228
|
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
while (low <= high) {
|
|
2139
|
-
const mid = Math.floor((low + high) / 2)
|
|
2140
|
-
if (condition(mid)) {
|
|
2141
|
-
result = mid
|
|
2142
|
-
if (moveLeft) high = mid - 1
|
|
2143
|
-
else low = mid + 1
|
|
2144
|
-
} else {
|
|
2145
|
-
if (moveLeft) low = mid + 1
|
|
2146
|
-
else high = mid - 1
|
|
2147
|
-
}
|
|
2229
|
+
function sorted_set_find(arr, val) {
|
|
2230
|
+
var left = 0, right = arr.length
|
|
2231
|
+
while (left < right) {
|
|
2232
|
+
var mid = (left + right) >> 1
|
|
2233
|
+
arr[mid] < val ? left = mid + 1 : right = mid
|
|
2148
2234
|
}
|
|
2149
|
-
return
|
|
2235
|
+
return left
|
|
2150
2236
|
}
|
|
2151
|
-
}
|
|
2152
2237
|
|
|
2153
|
-
function
|
|
2154
|
-
|
|
2155
|
-
|
|
2238
|
+
function sorted_set_insert(arr, val) {
|
|
2239
|
+
var i = sorted_set_find(arr, val)
|
|
2240
|
+
if (arr[i] !== val) arr.splice(i, 0, val)
|
|
2241
|
+
}
|
|
2156
2242
|
|
|
2157
|
-
function
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
var mid = (left + right) >> 1
|
|
2161
|
-
arr[mid] < val ? left = mid + 1 : right = mid
|
|
2243
|
+
function sorted_set_delete(arr, val) {
|
|
2244
|
+
var i = sorted_set_find(arr, val)
|
|
2245
|
+
if (arr[i] === val) arr.splice(i, 1)
|
|
2162
2246
|
}
|
|
2163
|
-
return left
|
|
2164
|
-
}
|
|
2165
2247
|
|
|
2166
|
-
function
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
}
|
|
2248
|
+
function get_digest(s) {
|
|
2249
|
+
if (typeof s === 'string') s = Buffer.from(s, "utf8")
|
|
2250
|
+
return `sha-256=:${require('crypto').createHash('sha256').update(s).digest('base64')}:`
|
|
2251
|
+
}
|
|
2170
2252
|
|
|
2171
|
-
|
|
2172
|
-
var i = sorted_set_find(arr, val)
|
|
2173
|
-
if (arr[i] === val) arr.splice(i, 1)
|
|
2174
|
-
}
|
|
2253
|
+
braid_text.get_resource = get_resource
|
|
2175
2254
|
|
|
2176
|
-
|
|
2177
|
-
|
|
2178
|
-
|
|
2179
|
-
}
|
|
2255
|
+
braid_text.encode_filename = encode_filename
|
|
2256
|
+
braid_text.decode_filename = decode_filename
|
|
2257
|
+
braid_text.get_files_for_key = get_files_for_key
|
|
2180
2258
|
|
|
2181
|
-
braid_text.
|
|
2259
|
+
braid_text.dt_get = dt_get
|
|
2260
|
+
braid_text.dt_get_patches = dt_get_patches
|
|
2261
|
+
braid_text.dt_parse = dt_parse
|
|
2262
|
+
braid_text.dt_get_local_version = dt_get_local_version
|
|
2263
|
+
braid_text.dt_create_bytes = dt_create_bytes
|
|
2182
2264
|
|
|
2183
|
-
braid_text.
|
|
2184
|
-
braid_text.
|
|
2185
|
-
braid_text.get_files_for_key = get_files_for_key
|
|
2265
|
+
braid_text.decode_version = decode_version
|
|
2266
|
+
braid_text.RangeSet = RangeSet
|
|
2186
2267
|
|
|
2187
|
-
braid_text.
|
|
2188
|
-
braid_text.dt_get_patches = dt_get_patches
|
|
2189
|
-
braid_text.dt_parse = dt_parse
|
|
2190
|
-
braid_text.dt_get_local_version = dt_get_local_version
|
|
2191
|
-
braid_text.dt_create_bytes = dt_create_bytes
|
|
2268
|
+
braid_text.create_braid_text = create_braid_text
|
|
2192
2269
|
|
|
2193
|
-
braid_text
|
|
2194
|
-
|
|
2270
|
+
return braid_text
|
|
2271
|
+
}
|
|
2195
2272
|
|
|
2196
|
-
module.exports =
|
|
2273
|
+
module.exports = create_braid_text()
|