braid-text 0.2.64 → 0.2.66
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/editor.html +12 -70
- package/index.js +1836 -1797
- package/markdown-editor.html +76 -287
- package/package.json +1 -1
- package/server-demo.js +5 -10
- package/test/test.html +63 -0
- package/web-utils.js +75 -0
package/index.js
CHANGED
|
@@ -3,2194 +3,2233 @@ let { Doc, OpLog, Branch } = require("@braid.org/diamond-types-node")
|
|
|
3
3
|
let braidify = require("braid-http").http_server
|
|
4
4
|
let fs = require("fs")
|
|
5
5
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
6
|
+
function create_braid_text() {
|
|
7
|
+
let braid_text = {
|
|
8
|
+
verbose: false,
|
|
9
|
+
db_folder: './braid-text-db',
|
|
10
|
+
length_cache_size: 10,
|
|
11
|
+
meta_file_save_period_ms: 1000,
|
|
12
|
+
cache: {}
|
|
13
|
+
}
|
|
12
14
|
|
|
13
|
-
let waiting_puts = 0
|
|
15
|
+
let waiting_puts = 0
|
|
14
16
|
|
|
15
|
-
let max_encoded_key_size = 240
|
|
17
|
+
let max_encoded_key_size = 240
|
|
16
18
|
|
|
17
|
-
braid_text.serve = async (req, res, options = {}) => {
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
braid_text.free_cors(res)
|
|
19
|
+
braid_text.serve = async (req, res, options = {}) => {
|
|
20
|
+
options = {
|
|
21
|
+
key: req.url.split('?')[0], // Default key
|
|
22
|
+
put_cb: (key, val) => { }, // Default callback when a PUT changes a key
|
|
23
|
+
...options // Override with all options passed in
|
|
24
|
+
}
|
|
25
25
|
|
|
26
|
-
|
|
27
|
-
res.writeHead(statusCode, statusText, headers)
|
|
28
|
-
res.end(x ?? '')
|
|
29
|
-
}
|
|
26
|
+
braid_text.free_cors(res)
|
|
30
27
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
28
|
+
function my_end(statusCode, x, statusText, headers) {
|
|
29
|
+
res.writeHead(statusCode, statusText, headers)
|
|
30
|
+
res.end(x ?? '')
|
|
31
|
+
}
|
|
34
32
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
return my_end(500, "The server failed to process this request. The error generated was: " + e)
|
|
39
|
-
}
|
|
33
|
+
let resource = null
|
|
34
|
+
try {
|
|
35
|
+
resource = await get_resource(options.key)
|
|
40
36
|
|
|
41
|
-
|
|
37
|
+
braidify(req, res)
|
|
38
|
+
if (res.is_multiplexer) return
|
|
39
|
+
} catch (e) {
|
|
40
|
+
return my_end(500, "The server failed to process this request. The error generated was: " + e)
|
|
41
|
+
}
|
|
42
42
|
|
|
43
|
-
|
|
44
|
-
if (req.headers['selection-sharing-prototype']) {
|
|
45
|
-
res.setHeader('Content-Type', 'application/json')
|
|
43
|
+
let peer = req.headers["peer"]
|
|
46
44
|
|
|
47
|
-
|
|
48
|
-
if (
|
|
45
|
+
// selection sharing prototype
|
|
46
|
+
if (req.headers['selection-sharing-prototype']) {
|
|
47
|
+
res.setHeader('Content-Type', 'application/json')
|
|
49
48
|
|
|
50
|
-
|
|
51
|
-
if (!
|
|
52
|
-
return my_end(200, JSON.stringify(resource.selections))
|
|
53
|
-
} else {
|
|
54
|
-
var client = {peer, res}
|
|
55
|
-
resource.selection_clients.add(client)
|
|
56
|
-
res.startSubscription({
|
|
57
|
-
onClose: () => resource.selection_clients.delete(client)
|
|
58
|
-
})
|
|
59
|
-
res.sendUpdate({ body: JSON.stringify(resource.selections) })
|
|
60
|
-
return
|
|
61
|
-
}
|
|
62
|
-
} else if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
|
|
63
|
-
var body = (await req.patches())[0].content_text
|
|
64
|
-
var json = JSON.parse(body)
|
|
65
|
-
|
|
66
|
-
// only keep new selections if they are newer
|
|
67
|
-
for (var [user, selection] of Object.entries(json)) {
|
|
68
|
-
if (resource.selections[user] && resource.selections[user].time > selection.time) delete json[user]
|
|
69
|
-
else resource.selections[user] = selection
|
|
70
|
-
}
|
|
49
|
+
if (!resource.selections) resource.selections = {}
|
|
50
|
+
if (!resource.selection_clients) resource.selection_clients = new Set()
|
|
71
51
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
52
|
+
if (req.method === "GET" || req.method === "HEAD") {
|
|
53
|
+
if (!req.subscribe) {
|
|
54
|
+
return my_end(200, JSON.stringify(resource.selections))
|
|
55
|
+
} else {
|
|
56
|
+
var client = {peer, res}
|
|
57
|
+
resource.selection_clients.add(client)
|
|
58
|
+
res.startSubscription({
|
|
59
|
+
onClose: () => resource.selection_clients.delete(client)
|
|
60
|
+
})
|
|
61
|
+
res.sendUpdate({ body: JSON.stringify(resource.selections) })
|
|
62
|
+
return
|
|
63
|
+
}
|
|
64
|
+
} else if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
|
|
65
|
+
var body = (await req.patches())[0].content_text
|
|
66
|
+
var json = JSON.parse(body)
|
|
67
|
+
|
|
68
|
+
// only keep new selections if they are newer
|
|
69
|
+
for (var [user, selection] of Object.entries(json)) {
|
|
70
|
+
if (resource.selections[user] && resource.selections[user].time > selection.time) delete json[user]
|
|
71
|
+
else resource.selections[user] = selection
|
|
78
72
|
}
|
|
79
73
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
for (
|
|
83
|
-
if (
|
|
84
|
-
|
|
85
|
-
|
|
74
|
+
// remove old selections that are too old
|
|
75
|
+
var long_ago = Date.now() - 1000 * 60 * 5
|
|
76
|
+
for (var [user, selection] of Object.entries(resource.selections))
|
|
77
|
+
if (selection.time < long_ago) {
|
|
78
|
+
delete resource.selections[user]
|
|
79
|
+
delete json[user]
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
body = JSON.stringify(json)
|
|
83
|
+
if (body.length > 2)
|
|
84
|
+
for (let client of resource.selection_clients)
|
|
85
|
+
if (client.peer !== peer) client.res.sendUpdate({ body })
|
|
86
|
+
|
|
87
|
+
return my_end(200)
|
|
88
|
+
}
|
|
86
89
|
}
|
|
87
|
-
}
|
|
88
90
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
91
|
+
let merge_type = req.headers["merge-type"]
|
|
92
|
+
if (!merge_type) merge_type = 'simpleton'
|
|
93
|
+
if (merge_type !== 'simpleton' && merge_type !== 'dt') return my_end(400, `Unknown merge type: ${merge_type}`)
|
|
94
|
+
|
|
95
|
+
// set default content type of text/plain
|
|
96
|
+
if (!res.getHeader('content-type')) res.setHeader('Content-Type', 'text/plain')
|
|
97
|
+
|
|
98
|
+
// no matter what the content type is,
|
|
99
|
+
// we want to set the charset to utf-8
|
|
100
|
+
const contentType = res.getHeader('Content-Type')
|
|
101
|
+
const parsedContentType = contentType.split(';').map(part => part.trim())
|
|
102
|
+
const charsetParam = parsedContentType.find(part => part.toLowerCase().startsWith('charset='))
|
|
103
|
+
if (!charsetParam)
|
|
104
|
+
res.setHeader('Content-Type', `${contentType}; charset=utf-8`)
|
|
105
|
+
else if (charsetParam.toLowerCase() !== 'charset=utf-8') {
|
|
106
|
+
// Replace the existing charset with utf-8
|
|
107
|
+
const updatedContentType = parsedContentType
|
|
108
|
+
.map(part => (part.toLowerCase().startsWith('charset=') ? 'charset=utf-8' : part))
|
|
109
|
+
.join('; ');
|
|
110
|
+
res.setHeader('Content-Type', updatedContentType);
|
|
111
|
+
}
|
|
110
112
|
|
|
111
|
-
|
|
113
|
+
if (req.method == "OPTIONS") return my_end(200)
|
|
112
114
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
115
|
+
if (req.method == "DELETE") {
|
|
116
|
+
await braid_text.delete(resource)
|
|
117
|
+
return my_end(200)
|
|
118
|
+
}
|
|
117
119
|
|
|
118
|
-
|
|
119
|
-
|
|
120
|
+
var get_current_version = () => ascii_ify(
|
|
121
|
+
resource.version.map(x => JSON.stringify(x)).join(", "))
|
|
120
122
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
123
|
+
if (req.method == "GET" || req.method == "HEAD") {
|
|
124
|
+
// make sure we have the necessary version and parents
|
|
125
|
+
var unknowns = []
|
|
126
|
+
for (var event of (req.version || []).concat(req.parents || [])) {
|
|
127
|
+
var [actor, seq] = decode_version(event)
|
|
128
|
+
if (!resource.actor_seqs[actor]?.has(seq))
|
|
129
|
+
unknowns.push(event)
|
|
130
|
+
}
|
|
131
|
+
if (unknowns.length)
|
|
132
|
+
return my_end(309, '', "Version Unknown Here", {
|
|
133
|
+
Version: ascii_ify(unknowns.map(e => JSON.stringify(e)).join(', '))
|
|
134
|
+
})
|
|
133
135
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
+
if (!req.subscribe) {
|
|
137
|
+
res.setHeader("Accept-Subscribe", "true")
|
|
138
|
+
|
|
139
|
+
// special case for HEAD asking for version/parents,
|
|
140
|
+
// to be faster by not reconstructing body
|
|
141
|
+
if (req.method === "HEAD" && (req.version || req.parents))
|
|
142
|
+
return my_end(200)
|
|
143
|
+
|
|
144
|
+
let x = null
|
|
145
|
+
try {
|
|
146
|
+
x = await braid_text.get(resource, {
|
|
147
|
+
version: req.version,
|
|
148
|
+
parents: req.parents,
|
|
149
|
+
transfer_encoding: req.headers['accept-transfer-encoding']
|
|
150
|
+
})
|
|
151
|
+
} catch (e) {
|
|
152
|
+
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
153
|
+
}
|
|
136
154
|
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
155
|
+
if (req.headers['accept-transfer-encoding'] === 'dt') {
|
|
156
|
+
res.setHeader("Current-Version", get_current_version())
|
|
157
|
+
res.setHeader("X-Transfer-Encoding", 'dt')
|
|
158
|
+
res.setHeader("Content-Length", x.body.length)
|
|
159
|
+
return my_end(209, req.method === "HEAD" ? null : x.body, 'Multiresponse')
|
|
160
|
+
} else {
|
|
161
|
+
if (req.version || req.parents)
|
|
162
|
+
res.setHeader("Current-Version", get_current_version())
|
|
163
|
+
res.setHeader("Version", ascii_ify(x.version.map((x) => JSON.stringify(x)).join(", ")))
|
|
164
|
+
var buffer = Buffer.from(x.body, "utf8")
|
|
165
|
+
res.setHeader("Repr-Digest", get_digest(buffer))
|
|
166
|
+
res.setHeader("Content-Length", buffer.length)
|
|
167
|
+
return my_end(200, req.method === "HEAD" ? null : buffer)
|
|
168
|
+
}
|
|
169
|
+
} else {
|
|
170
|
+
if (!res.hasHeader("editable")) res.setHeader("Editable", "true")
|
|
171
|
+
res.setHeader("Merge-Type", merge_type)
|
|
172
|
+
res.setHeader("Current-Version", get_current_version())
|
|
173
|
+
if (req.method == "HEAD") return my_end(200)
|
|
141
174
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
x = await braid_text.get(resource, {
|
|
175
|
+
let options = {
|
|
176
|
+
peer,
|
|
145
177
|
version: req.version,
|
|
146
178
|
parents: req.parents,
|
|
147
|
-
|
|
179
|
+
merge_type,
|
|
180
|
+
accept_encoding:
|
|
181
|
+
req.headers['x-accept-encoding'] ??
|
|
182
|
+
req.headers['accept-encoding'],
|
|
183
|
+
subscribe: x => {
|
|
184
|
+
|
|
185
|
+
// this is a sanity/rhobustness check..
|
|
186
|
+
// ..this digest is checked on the client..
|
|
187
|
+
if (x.version && v_eq(x.version, resource.version))
|
|
188
|
+
x["Repr-Digest"] = get_digest(resource.val)
|
|
189
|
+
|
|
190
|
+
res.sendVersion(x)
|
|
191
|
+
},
|
|
192
|
+
write: (x) => res.write(x)
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
res.startSubscription({
|
|
196
|
+
onClose: () => {
|
|
197
|
+
if (merge_type === "dt") resource.clients.delete(options)
|
|
198
|
+
else resource.simpleton_clients.delete(options)
|
|
199
|
+
}
|
|
148
200
|
})
|
|
149
|
-
} catch (e) {
|
|
150
|
-
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
151
|
-
}
|
|
152
201
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
} else {
|
|
159
|
-
if (req.version || req.parents)
|
|
160
|
-
res.setHeader("Current-Version", get_current_version())
|
|
161
|
-
res.setHeader("Version", ascii_ify(x.version.map((x) => JSON.stringify(x)).join(", ")))
|
|
162
|
-
var buffer = Buffer.from(x.body, "utf8")
|
|
163
|
-
res.setHeader("Repr-Digest", get_digest(buffer))
|
|
164
|
-
res.setHeader("Content-Length", buffer.length)
|
|
165
|
-
return my_end(200, req.method === "HEAD" ? null : buffer)
|
|
202
|
+
try {
|
|
203
|
+
return await braid_text.get(resource, options)
|
|
204
|
+
} catch (e) {
|
|
205
|
+
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
206
|
+
}
|
|
166
207
|
}
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
let options = {
|
|
174
|
-
peer,
|
|
175
|
-
version: req.version,
|
|
176
|
-
parents: req.parents,
|
|
177
|
-
merge_type,
|
|
178
|
-
accept_encoding:
|
|
179
|
-
req.headers['x-accept-encoding'] ??
|
|
180
|
-
req.headers['accept-encoding'],
|
|
181
|
-
subscribe: x => {
|
|
182
|
-
|
|
183
|
-
// this is a sanity/rhobustness check..
|
|
184
|
-
// ..this digest is checked on the client..
|
|
185
|
-
if (x.version && v_eq(x.version, resource.version))
|
|
186
|
-
x["Repr-Digest"] = get_digest(resource.val)
|
|
187
|
-
|
|
188
|
-
res.sendVersion(x)
|
|
189
|
-
},
|
|
190
|
-
write: (x) => res.write(x)
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if (req.method == "PUT" || req.method == "POST" || req.method == "PATCH") {
|
|
211
|
+
if (waiting_puts >= 100) {
|
|
212
|
+
console.log(`The server is busy.`)
|
|
213
|
+
return my_end(503, "The server is busy.")
|
|
191
214
|
}
|
|
192
215
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
}
|
|
198
|
-
|
|
216
|
+
waiting_puts++
|
|
217
|
+
if (braid_text.verbose) console.log(`waiting_puts(after++) = ${waiting_puts}`)
|
|
218
|
+
let done_my_turn = (statusCode, x, statusText, headers) => {
|
|
219
|
+
waiting_puts--
|
|
220
|
+
if (braid_text.verbose) console.log(`waiting_puts(after--) = ${waiting_puts}`)
|
|
221
|
+
my_end(statusCode, x, statusText, headers)
|
|
222
|
+
}
|
|
199
223
|
|
|
200
224
|
try {
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
return my_end(500, "The server failed to get something. The error generated was: " + e)
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
}
|
|
225
|
+
var patches = await req.patches()
|
|
226
|
+
for (let p of patches) p.content = p.content_text
|
|
207
227
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
228
|
+
let body = null
|
|
229
|
+
if (patches[0]?.unit === 'everything') {
|
|
230
|
+
body = patches[0].content
|
|
231
|
+
patches = null
|
|
232
|
+
}
|
|
213
233
|
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
234
|
+
if (req.parents) {
|
|
235
|
+
await wait_for_events(
|
|
236
|
+
options.key,
|
|
237
|
+
req.parents,
|
|
238
|
+
resource.actor_seqs,
|
|
239
|
+
// approximation of memory usage for this update
|
|
240
|
+
body ? body.length :
|
|
241
|
+
patches.reduce((a, b) => a + b.range.length + b.content.length, 0),
|
|
242
|
+
options.recv_buffer_max_time,
|
|
243
|
+
options.recv_buffer_max_space)
|
|
244
|
+
|
|
245
|
+
// make sure we have the necessary parents now
|
|
246
|
+
var unknowns = []
|
|
247
|
+
for (var event of req.parents) {
|
|
248
|
+
var [actor, seq] = decode_version(event)
|
|
249
|
+
if (!resource.actor_seqs[actor]?.has(seq)) unknowns.push(event)
|
|
250
|
+
}
|
|
251
|
+
if (unknowns.length)
|
|
252
|
+
return done_my_turn(309, '', "Version Unknown Here", {
|
|
253
|
+
Version: ascii_ify(unknowns.map(e => JSON.stringify(e)).join(', ')),
|
|
254
|
+
'Retry-After': '1'
|
|
255
|
+
})
|
|
256
|
+
}
|
|
221
257
|
|
|
222
|
-
|
|
223
|
-
var patches = await req.patches()
|
|
224
|
-
for (let p of patches) p.content = p.content_text
|
|
258
|
+
var {change_count} = await braid_text.put(resource, { peer, version: req.version, parents: req.parents, patches, body, merge_type })
|
|
225
259
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
260
|
+
// if Repr-Digest is set,
|
|
261
|
+
// and the request version is also our new current version,
|
|
262
|
+
// then verify the digest..
|
|
263
|
+
if (req.headers['repr-digest'] &&
|
|
264
|
+
v_eq(req.version, resource.version) &&
|
|
265
|
+
req.headers['repr-digest'] !== get_digest(resource.val)) {
|
|
266
|
+
console.log(`repr-digest mismatch!`)
|
|
231
267
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
req.parents,
|
|
236
|
-
resource.actor_seqs,
|
|
237
|
-
// approximation of memory usage for this update
|
|
238
|
-
body ? body.length :
|
|
239
|
-
patches.reduce((a, b) => a + b.range.length + b.content.length, 0),
|
|
240
|
-
options.recv_buffer_max_time,
|
|
241
|
-
options.recv_buffer_max_space)
|
|
242
|
-
|
|
243
|
-
// make sure we have the necessary parents now
|
|
244
|
-
var unknowns = []
|
|
245
|
-
for (var event of req.parents) {
|
|
246
|
-
var [actor, seq] = decode_version(event)
|
|
247
|
-
if (!resource.actor_seqs[actor]?.has(seq)) unknowns.push(event)
|
|
268
|
+
// we return a special 550 error code,
|
|
269
|
+
// which simpleton will pick up on to stop retrying
|
|
270
|
+
return done_my_turn(550, "repr-digest mismatch!")
|
|
248
271
|
}
|
|
249
|
-
if (unknowns.length)
|
|
250
|
-
return done_my_turn(309, '', "Version Unknown Here", {
|
|
251
|
-
Version: ascii_ify(unknowns.map(e => JSON.stringify(e)).join(', ')),
|
|
252
|
-
'Retry-After': '1'
|
|
253
|
-
})
|
|
254
|
-
}
|
|
255
272
|
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
// and the request version is also our new current version,
|
|
260
|
-
// then verify the digest..
|
|
261
|
-
if (req.headers['repr-digest'] &&
|
|
262
|
-
v_eq(req.version, resource.version) &&
|
|
263
|
-
req.headers['repr-digest'] !== get_digest(resource.val)) {
|
|
264
|
-
console.log(`repr-digest mismatch!`)
|
|
273
|
+
if (req.version) got_event(options.key, req.version[0], change_count)
|
|
274
|
+
|
|
275
|
+
res.setHeader("Version", get_current_version())
|
|
265
276
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
277
|
+
options.put_cb(options.key, resource.val)
|
|
278
|
+
} catch (e) {
|
|
279
|
+
console.log(`${req.method} ERROR: ${e.stack}`)
|
|
280
|
+
return done_my_turn(500, "The server failed to apply this version. The error generated was: " + e)
|
|
269
281
|
}
|
|
270
282
|
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
res.setHeader("Version", get_current_version())
|
|
274
|
-
|
|
275
|
-
options.put_cb(options.key, resource.val)
|
|
276
|
-
} catch (e) {
|
|
277
|
-
console.log(`${req.method} ERROR: ${e.stack}`)
|
|
278
|
-
return done_my_turn(500, "The server failed to apply this version. The error generated was: " + e)
|
|
283
|
+
return done_my_turn(200)
|
|
279
284
|
}
|
|
280
285
|
|
|
281
|
-
|
|
286
|
+
throw new Error("unknown")
|
|
282
287
|
}
|
|
283
288
|
|
|
284
|
-
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
braid_text.delete = async (key) => {
|
|
288
|
-
await braid_text.put(key, {body: ''})
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
braid_text.get = async (key, options) => {
|
|
292
|
-
if (!options) {
|
|
293
|
-
// if it doesn't exist already, don't create it in this case
|
|
294
|
-
if (!braid_text.cache[key]) return
|
|
295
|
-
return (await get_resource(key)).val
|
|
289
|
+
braid_text.delete = async (key) => {
|
|
290
|
+
await braid_text.put(key, {body: ''})
|
|
296
291
|
}
|
|
297
292
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
if (!options.subscribe) {
|
|
305
|
-
if (options.transfer_encoding === 'dt') {
|
|
306
|
-
// optimization: if requesting current version
|
|
307
|
-
// pretend as if they didn't set a version,
|
|
308
|
-
// and let it be handled as the default
|
|
309
|
-
var op_v = options.version
|
|
310
|
-
if (op_v && v_eq(op_v, version)) op_v = null
|
|
311
|
-
|
|
312
|
-
var bytes = null
|
|
313
|
-
if (op_v || options.parents) {
|
|
314
|
-
if (op_v) {
|
|
315
|
-
var doc = dt_get(resource.doc, op_v)
|
|
316
|
-
bytes = doc.toBytes()
|
|
317
|
-
} else {
|
|
318
|
-
bytes = resource.doc.toBytes()
|
|
319
|
-
var doc = Doc.fromBytes(bytes)
|
|
320
|
-
}
|
|
321
|
-
if (options.parents) {
|
|
322
|
-
bytes = doc.getPatchSince(
|
|
323
|
-
dt_get_local_version(bytes, options.parents))
|
|
324
|
-
}
|
|
325
|
-
doc.free()
|
|
326
|
-
} else bytes = resource.doc.toBytes()
|
|
327
|
-
return { body: bytes }
|
|
293
|
+
braid_text.get = async (key, options) => {
|
|
294
|
+
if (!options) {
|
|
295
|
+
// if it doesn't exist already, don't create it in this case
|
|
296
|
+
if (!braid_text.cache[key]) return
|
|
297
|
+
return (await get_resource(key)).val
|
|
328
298
|
}
|
|
329
299
|
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
body: dt_get_string(resource.doc, options.version || options.parents)
|
|
333
|
-
} : {
|
|
334
|
-
version,
|
|
335
|
-
body: resource.doc.get()
|
|
336
|
-
}
|
|
337
|
-
} else {
|
|
338
|
-
if (options.merge_type != "dt") {
|
|
339
|
-
let x = { version }
|
|
340
|
-
|
|
341
|
-
if (!options.parents && !options.version) {
|
|
342
|
-
x.parents = []
|
|
343
|
-
x.body = resource.doc.get()
|
|
344
|
-
options.subscribe(x)
|
|
345
|
-
} else {
|
|
346
|
-
x.parents = options.version ? options.version : options.parents
|
|
347
|
-
options.my_last_seen_version = x.parents
|
|
300
|
+
if (options.version) validate_version_array(options.version)
|
|
301
|
+
if (options.parents) validate_version_array(options.parents)
|
|
348
302
|
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
303
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
304
|
+
var version = resource.version
|
|
305
|
+
|
|
306
|
+
if (!options.subscribe) {
|
|
307
|
+
if (options.transfer_encoding === 'dt') {
|
|
308
|
+
// optimization: if requesting current version
|
|
309
|
+
// pretend as if they didn't set a version,
|
|
310
|
+
// and let it be handled as the default
|
|
311
|
+
var op_v = options.version
|
|
312
|
+
if (op_v && v_eq(op_v, version)) op_v = null
|
|
313
|
+
|
|
314
|
+
var bytes = null
|
|
315
|
+
if (op_v || options.parents) {
|
|
316
|
+
if (op_v) {
|
|
317
|
+
var doc = dt_get(resource.doc, op_v)
|
|
318
|
+
bytes = doc.toBytes()
|
|
319
|
+
} else {
|
|
320
|
+
bytes = resource.doc.toBytes()
|
|
321
|
+
var doc = Doc.fromBytes(bytes)
|
|
322
|
+
}
|
|
323
|
+
if (options.parents) {
|
|
324
|
+
bytes = doc.getPatchSince(
|
|
325
|
+
dt_get_local_version(bytes, options.parents))
|
|
326
|
+
}
|
|
327
|
+
doc.free()
|
|
328
|
+
} else bytes = resource.doc.toBytes()
|
|
329
|
+
return { body: bytes }
|
|
355
330
|
}
|
|
356
331
|
|
|
357
|
-
options.
|
|
358
|
-
|
|
332
|
+
return options.version || options.parents ? {
|
|
333
|
+
version: options.version || options.parents,
|
|
334
|
+
body: dt_get_string(resource.doc, options.version || options.parents)
|
|
335
|
+
} : {
|
|
336
|
+
version,
|
|
337
|
+
body: resource.doc.get()
|
|
338
|
+
}
|
|
359
339
|
} else {
|
|
340
|
+
if (options.merge_type != "dt") {
|
|
341
|
+
let x = { version }
|
|
360
342
|
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
options.subscribe({ encoding: 'dt', body: new Doc().toBytes() })
|
|
343
|
+
if (!options.parents && !options.version) {
|
|
344
|
+
x.parents = []
|
|
345
|
+
x.body = resource.doc.get()
|
|
346
|
+
options.subscribe(x)
|
|
366
347
|
} else {
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
348
|
+
x.parents = options.version ? options.version : options.parents
|
|
349
|
+
options.my_last_seen_version = x.parents
|
|
350
|
+
|
|
351
|
+
// only send them a version from these parents if we have these parents (otherwise we'll assume these parents are more recent, probably versions they created but haven't sent us yet, and we'll send them appropriate rebased updates when they send us these versions)
|
|
352
|
+
let local_version = OpLog_remote_to_local(resource.doc, x.parents)
|
|
353
|
+
if (local_version) {
|
|
354
|
+
x.patches = get_xf_patches(resource.doc, local_version)
|
|
355
|
+
options.subscribe(x)
|
|
373
356
|
}
|
|
374
|
-
options.subscribe({ encoding: 'dt', body: bytes })
|
|
375
357
|
}
|
|
358
|
+
|
|
359
|
+
options.my_last_sent_version = x.version
|
|
360
|
+
resource.simpleton_clients.add(options)
|
|
376
361
|
} else {
|
|
377
|
-
var updates = null
|
|
378
|
-
if (!options.parents && !options.version) {
|
|
379
|
-
options.subscribe({
|
|
380
|
-
version: [],
|
|
381
|
-
parents: [],
|
|
382
|
-
body: "",
|
|
383
|
-
})
|
|
384
362
|
|
|
385
|
-
|
|
363
|
+
if (options.accept_encoding?.match(/updates\s*\((.*)\)/)?.[1].split(',').map(x=>x.trim()).includes('dt')) {
|
|
364
|
+
// optimization: if client wants past current version,
|
|
365
|
+
// send empty dt
|
|
366
|
+
if (options.parents && v_eq(options.parents, version)) {
|
|
367
|
+
options.subscribe({ encoding: 'dt', body: new Doc().toBytes() })
|
|
368
|
+
} else {
|
|
369
|
+
var bytes = resource.doc.toBytes()
|
|
370
|
+
if (options.parents) {
|
|
371
|
+
var doc = Doc.fromBytes(bytes)
|
|
372
|
+
bytes = doc.getPatchSince(
|
|
373
|
+
dt_get_local_version(bytes, options.parents))
|
|
374
|
+
doc.free()
|
|
375
|
+
}
|
|
376
|
+
options.subscribe({ encoding: 'dt', body: bytes })
|
|
377
|
+
}
|
|
386
378
|
} else {
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
379
|
+
var updates = null
|
|
380
|
+
if (!options.parents && !options.version) {
|
|
381
|
+
options.subscribe({
|
|
382
|
+
version: [],
|
|
383
|
+
parents: [],
|
|
384
|
+
body: "",
|
|
385
|
+
})
|
|
386
|
+
|
|
387
|
+
updates = dt_get_patches(resource.doc)
|
|
388
|
+
} else {
|
|
389
|
+
// Then start the subscription from the parents in options
|
|
390
|
+
updates = dt_get_patches(resource.doc, options.parents || options.version)
|
|
391
|
+
}
|
|
390
392
|
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
393
|
+
for (let u of updates)
|
|
394
|
+
options.subscribe({
|
|
395
|
+
version: [u.version],
|
|
396
|
+
parents: u.parents,
|
|
397
|
+
patches: [{ unit: u.unit, range: u.range, content: u.content }],
|
|
398
|
+
})
|
|
399
|
+
|
|
400
|
+
// Output at least *some* data, or else chrome gets confused and
|
|
401
|
+
// thinks the connection failed. This isn't strictly necessary,
|
|
402
|
+
// but it makes fewer scary errors get printed out in the JS
|
|
403
|
+
// console.
|
|
404
|
+
if (updates.length === 0) options.write?.("\r\n")
|
|
405
|
+
}
|
|
397
406
|
|
|
398
|
-
|
|
399
|
-
// thinks the connection failed. This isn't strictly necessary,
|
|
400
|
-
// but it makes fewer scary errors get printed out in the JS
|
|
401
|
-
// console.
|
|
402
|
-
if (updates.length === 0) options.write?.("\r\n")
|
|
407
|
+
resource.clients.add(options)
|
|
403
408
|
}
|
|
404
|
-
|
|
405
|
-
resource.clients.add(options)
|
|
406
409
|
}
|
|
407
410
|
}
|
|
408
|
-
}
|
|
409
411
|
|
|
410
|
-
braid_text.forget = async (key, options) => {
|
|
411
|
-
|
|
412
|
+
braid_text.forget = async (key, options) => {
|
|
413
|
+
if (!options) throw new Error('options is required')
|
|
412
414
|
|
|
413
|
-
|
|
415
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
414
416
|
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
}
|
|
417
|
+
if (options.merge_type != "dt")
|
|
418
|
+
resource.simpleton_clients.delete(options)
|
|
419
|
+
else resource.clients.delete(options)
|
|
420
|
+
}
|
|
419
421
|
|
|
420
|
-
braid_text.put = async (key, options) => {
|
|
421
|
-
|
|
422
|
+
braid_text.put = async (key, options) => {
|
|
423
|
+
let { version, patches, body, peer } = options
|
|
422
424
|
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
425
|
+
// support for json patch puts..
|
|
426
|
+
if (patches?.length && patches.every(x => x.unit === 'json')) {
|
|
427
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
428
|
+
|
|
429
|
+
let x = JSON.parse(resource.doc.get())
|
|
430
|
+
for (let p of patches)
|
|
431
|
+
apply_patch(x, p.range, p.content === '' ? undefined : JSON.parse(p.content))
|
|
430
432
|
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
433
|
+
return await braid_text.put(key, {
|
|
434
|
+
body: JSON.stringify(x, null, 4)
|
|
435
|
+
})
|
|
436
|
+
}
|
|
435
437
|
|
|
436
|
-
|
|
438
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
437
439
|
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
440
|
+
if (options.transfer_encoding === 'dt') {
|
|
441
|
+
var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
442
|
+
|
|
443
|
+
resource.doc.mergeBytes(body)
|
|
442
444
|
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
445
|
+
var end_i = resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
446
|
+
for (var i = start_i; i <= end_i; i++) {
|
|
447
|
+
let v = resource.doc.localToRemoteVersion([i])[0]
|
|
448
|
+
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new braid_text.RangeSet()
|
|
449
|
+
resource.actor_seqs[v[0]].add_range(v[1], v[1])
|
|
450
|
+
}
|
|
451
|
+
resource.val = resource.doc.get()
|
|
452
|
+
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
453
|
+
|
|
454
|
+
await resource.db_delta(body)
|
|
455
|
+
return { change_count: end_i - start_i + 1 }
|
|
448
456
|
}
|
|
449
|
-
resource.val = resource.doc.get()
|
|
450
|
-
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
451
457
|
|
|
452
|
-
|
|
453
|
-
return { change_count: end_i - start_i + 1 }
|
|
454
|
-
}
|
|
458
|
+
if (version) validate_version_array(version)
|
|
455
459
|
|
|
456
|
-
|
|
460
|
+
// translate a single parent of "root" to the empty array (same meaning)
|
|
461
|
+
let options_parents = options.parents
|
|
462
|
+
if (options_parents?.length === 1 && options_parents[0] === 'root')
|
|
463
|
+
options_parents = []
|
|
457
464
|
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
465
|
+
if (options_parents) validate_version_array(options_parents)
|
|
466
|
+
if (body != null && patches) throw new Error(`cannot have a body and patches`)
|
|
467
|
+
if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
|
|
468
|
+
if (patches) validate_patches(patches)
|
|
462
469
|
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
470
|
+
if (options_parents) {
|
|
471
|
+
// make sure we have all these parents
|
|
472
|
+
for (let p of options_parents) {
|
|
473
|
+
let P = decode_version(p)
|
|
474
|
+
if (!resource.actor_seqs[P[0]]?.has(P[1]))
|
|
475
|
+
throw new Error(`missing parent version: ${p}`)
|
|
476
|
+
}
|
|
477
|
+
}
|
|
467
478
|
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
479
|
+
let parents = resource.version
|
|
480
|
+
let og_parents = options_parents || parents
|
|
481
|
+
|
|
482
|
+
let max_pos = resource.length_cache.get('' + og_parents) ??
|
|
483
|
+
(v_eq(parents, og_parents) ? resource.doc.len() : dt_len(resource.doc, og_parents))
|
|
484
|
+
|
|
485
|
+
if (body != null) {
|
|
486
|
+
patches = [{
|
|
487
|
+
unit: 'text',
|
|
488
|
+
range: `[0:${max_pos}]`,
|
|
489
|
+
content: body
|
|
490
|
+
}]
|
|
474
491
|
}
|
|
475
|
-
}
|
|
476
492
|
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
if (body != null) {
|
|
484
|
-
patches = [{
|
|
485
|
-
unit: 'text',
|
|
486
|
-
range: `[0:${max_pos}]`,
|
|
487
|
-
content: body
|
|
488
|
-
}]
|
|
489
|
-
}
|
|
493
|
+
let og_patches = patches
|
|
494
|
+
patches = patches.map((p) => ({
|
|
495
|
+
...p,
|
|
496
|
+
range: p.range.match(/\d+/g).map((x) => parseInt(x)),
|
|
497
|
+
content_codepoints: [...p.content],
|
|
498
|
+
})).sort((a, b) => a.range[0] - b.range[0])
|
|
490
499
|
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
// validate patch positions
|
|
499
|
-
let must_be_at_least = 0
|
|
500
|
-
for (let p of patches) {
|
|
501
|
-
if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
|
|
502
|
-
if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
|
|
503
|
-
must_be_at_least = p.range[1]
|
|
504
|
-
}
|
|
500
|
+
// validate patch positions
|
|
501
|
+
let must_be_at_least = 0
|
|
502
|
+
for (let p of patches) {
|
|
503
|
+
if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
|
|
504
|
+
if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
|
|
505
|
+
must_be_at_least = p.range[1]
|
|
506
|
+
}
|
|
505
507
|
|
|
506
|
-
|
|
508
|
+
let change_count = patches.reduce((a, b) => a + b.content_codepoints.length + (b.range[1] - b.range[0]), 0)
|
|
507
509
|
|
|
508
|
-
|
|
510
|
+
let og_v = version?.[0] || `${(is_valid_actor(peer) && peer) || Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
|
|
509
511
|
|
|
510
|
-
|
|
512
|
+
let v = decode_version(og_v)
|
|
511
513
|
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
514
|
+
resource.length_cache.put(`${v[0]}-${v[1]}`, patches.reduce((a, b) =>
|
|
515
|
+
a + (b.content_codepoints?.length ?? 0) - (b.range[1] - b.range[0]),
|
|
516
|
+
max_pos))
|
|
515
517
|
|
|
516
|
-
|
|
517
|
-
|
|
518
|
+
// validate version: make sure we haven't seen it already
|
|
519
|
+
if (resource.actor_seqs[v[0]]?.has(v[1])) {
|
|
518
520
|
|
|
519
|
-
|
|
521
|
+
if (!options.validate_already_seen_versions) return { change_count }
|
|
520
522
|
|
|
521
|
-
|
|
522
|
-
|
|
523
|
+
// if we have seen it already, make sure it's the same as before
|
|
524
|
+
let updates = dt_get_patches(resource.doc, og_parents)
|
|
523
525
|
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
526
|
+
let seen = {}
|
|
527
|
+
for (let u of updates) {
|
|
528
|
+
u.version = decode_version(u.version)
|
|
527
529
|
|
|
528
|
-
|
|
530
|
+
if (!u.content) {
|
|
531
|
+
// delete
|
|
532
|
+
let v = u.version
|
|
533
|
+
for (let i = 0; i < u.end - u.start; i++) {
|
|
534
|
+
let ps = (i < u.end - u.start - 1) ? [`${v[0]}-${v[1] - i - 1}`] : u.parents
|
|
535
|
+
seen[JSON.stringify([v[0], v[1] - i, ps, u.start + i])] = true
|
|
536
|
+
}
|
|
537
|
+
} else {
|
|
538
|
+
// insert
|
|
539
|
+
let v = u.version
|
|
540
|
+
let content = [...u.content]
|
|
541
|
+
for (let i = 0; i < content.length; i++) {
|
|
542
|
+
let ps = (i > 0) ? [`${v[0]}-${v[1] - content.length + i}`] : u.parents
|
|
543
|
+
seen[JSON.stringify([v[0], v[1] + 1 - content.length + i, ps, u.start + i, content[i]])] = true
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
v = `${v[0]}-${v[1] + 1 - change_count}`
|
|
549
|
+
let ps = og_parents
|
|
550
|
+
let offset = 0
|
|
551
|
+
for (let p of patches) {
|
|
529
552
|
// delete
|
|
530
|
-
let
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
seen[JSON.stringify([
|
|
553
|
+
for (let i = p.range[0]; i < p.range[1]; i++) {
|
|
554
|
+
let vv = decode_version(v)
|
|
555
|
+
|
|
556
|
+
if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] - 1 + offset])]) throw new Error('invalid update: different from previous update with same version')
|
|
557
|
+
|
|
558
|
+
offset--
|
|
559
|
+
ps = [v]
|
|
560
|
+
v = vv
|
|
561
|
+
v = `${v[0]}-${v[1] + 1}`
|
|
534
562
|
}
|
|
535
|
-
} else {
|
|
536
563
|
// insert
|
|
537
|
-
let
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
seen[JSON.stringify([
|
|
564
|
+
for (let i = 0; i < p.content_codepoints?.length ?? 0; i++) {
|
|
565
|
+
let vv = decode_version(v)
|
|
566
|
+
let c = p.content_codepoints[i]
|
|
567
|
+
|
|
568
|
+
if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] + offset, c])]) throw new Error('invalid update: different from previous update with same version')
|
|
569
|
+
|
|
570
|
+
offset++
|
|
571
|
+
ps = [v]
|
|
572
|
+
v = vv
|
|
573
|
+
v = `${v[0]}-${v[1] + 1}`
|
|
542
574
|
}
|
|
543
575
|
}
|
|
576
|
+
|
|
577
|
+
// we already have this version, so nothing left to do
|
|
578
|
+
return { change_count: change_count }
|
|
544
579
|
}
|
|
580
|
+
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
|
|
581
|
+
resource.actor_seqs[v[0]].add_range(v[1] + 1 - change_count, v[1])
|
|
545
582
|
|
|
583
|
+
// reduce the version sequence by the number of char-edits
|
|
546
584
|
v = `${v[0]}-${v[1] + 1 - change_count}`
|
|
585
|
+
|
|
547
586
|
let ps = og_parents
|
|
587
|
+
|
|
588
|
+
let v_before = resource.doc.getLocalVersion()
|
|
589
|
+
|
|
590
|
+
let bytes = []
|
|
591
|
+
|
|
548
592
|
let offset = 0
|
|
549
593
|
for (let p of patches) {
|
|
550
594
|
// delete
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
v = vv
|
|
559
|
-
v = `${v[0]}-${v[1] + 1}`
|
|
595
|
+
let del = p.range[1] - p.range[0]
|
|
596
|
+
if (del) {
|
|
597
|
+
bytes.push(dt_create_bytes(v, ps, p.range[0] + offset, del, null))
|
|
598
|
+
offset -= del
|
|
599
|
+
v = decode_version(v)
|
|
600
|
+
ps = [`${v[0]}-${v[1] + (del - 1)}`]
|
|
601
|
+
v = `${v[0]}-${v[1] + del}`
|
|
560
602
|
}
|
|
561
603
|
// insert
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
offset++
|
|
569
|
-
ps = [v]
|
|
570
|
-
v = vv
|
|
571
|
-
v = `${v[0]}-${v[1] + 1}`
|
|
604
|
+
if (p.content?.length) {
|
|
605
|
+
bytes.push(dt_create_bytes(v, ps, p.range[1] + offset, 0, p.content))
|
|
606
|
+
offset += p.content_codepoints.length
|
|
607
|
+
v = decode_version(v)
|
|
608
|
+
ps = [`${v[0]}-${v[1] + (p.content_codepoints.length - 1)}`]
|
|
609
|
+
v = `${v[0]}-${v[1] + p.content_codepoints.length}`
|
|
572
610
|
}
|
|
573
611
|
}
|
|
574
612
|
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
|
|
579
|
-
resource.actor_seqs[v[0]].add_range(v[1] + 1 - change_count, v[1])
|
|
580
|
-
|
|
581
|
-
// reduce the version sequence by the number of char-edits
|
|
582
|
-
v = `${v[0]}-${v[1] + 1 - change_count}`
|
|
583
|
-
|
|
584
|
-
let ps = og_parents
|
|
585
|
-
|
|
586
|
-
let v_before = resource.doc.getLocalVersion()
|
|
587
|
-
|
|
588
|
-
let bytes = []
|
|
589
|
-
|
|
590
|
-
let offset = 0
|
|
591
|
-
for (let p of patches) {
|
|
592
|
-
// delete
|
|
593
|
-
let del = p.range[1] - p.range[0]
|
|
594
|
-
if (del) {
|
|
595
|
-
bytes.push(dt_create_bytes(v, ps, p.range[0] + offset, del, null))
|
|
596
|
-
offset -= del
|
|
597
|
-
v = decode_version(v)
|
|
598
|
-
ps = [`${v[0]}-${v[1] + (del - 1)}`]
|
|
599
|
-
v = `${v[0]}-${v[1] + del}`
|
|
600
|
-
}
|
|
601
|
-
// insert
|
|
602
|
-
if (p.content?.length) {
|
|
603
|
-
bytes.push(dt_create_bytes(v, ps, p.range[1] + offset, 0, p.content))
|
|
604
|
-
offset += p.content_codepoints.length
|
|
605
|
-
v = decode_version(v)
|
|
606
|
-
ps = [`${v[0]}-${v[1] + (p.content_codepoints.length - 1)}`]
|
|
607
|
-
v = `${v[0]}-${v[1] + p.content_codepoints.length}`
|
|
608
|
-
}
|
|
609
|
-
}
|
|
613
|
+
for (let b of bytes) resource.doc.mergeBytes(b)
|
|
614
|
+
resource.val = resource.doc.get()
|
|
615
|
+
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
610
616
|
|
|
611
|
-
|
|
612
|
-
resource.val = resource.doc.get()
|
|
613
|
-
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
617
|
+
var post_commit_updates = []
|
|
614
618
|
|
|
615
|
-
|
|
619
|
+
if (options.merge_type != "dt") {
|
|
620
|
+
patches = get_xf_patches(resource.doc, v_before)
|
|
621
|
+
if (braid_text.verbose) console.log(JSON.stringify({ patches }))
|
|
616
622
|
|
|
617
|
-
|
|
618
|
-
patches = get_xf_patches(resource.doc, v_before)
|
|
619
|
-
if (braid_text.verbose) console.log(JSON.stringify({ patches }))
|
|
623
|
+
let version = resource.version
|
|
620
624
|
|
|
621
|
-
|
|
625
|
+
for (let client of resource.simpleton_clients) {
|
|
626
|
+
if (peer && client.peer === peer) {
|
|
627
|
+
client.my_last_seen_version = [og_v]
|
|
628
|
+
}
|
|
622
629
|
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
630
|
+
function set_timeout(time_override) {
|
|
631
|
+
if (client.my_timeout) clearTimeout(client.my_timeout)
|
|
632
|
+
client.my_timeout = setTimeout(() => {
|
|
633
|
+
// if the doc has been freed, exit early
|
|
634
|
+
if (resource.doc.__wbg_ptr === 0) return
|
|
627
635
|
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
// if the doc has been freed, exit early
|
|
632
|
-
if (resource.doc.__wbg_ptr === 0) return
|
|
636
|
+
let version = resource.version
|
|
637
|
+
let x = { version }
|
|
638
|
+
x.parents = client.my_last_seen_version
|
|
633
639
|
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
640
|
+
if (braid_text.verbose) console.log("rebasing after timeout.. ")
|
|
641
|
+
if (braid_text.verbose) console.log(" client.my_unused_version_count = " + client.my_unused_version_count)
|
|
642
|
+
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))
|
|
637
643
|
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
644
|
+
if (braid_text.verbose) console.log(`sending from rebase: ${JSON.stringify(x)}`)
|
|
645
|
+
client.subscribe(x)
|
|
646
|
+
client.my_last_sent_version = x.version
|
|
641
647
|
|
|
642
|
-
|
|
643
|
-
client.
|
|
644
|
-
|
|
648
|
+
delete client.my_timeout
|
|
649
|
+
}, time_override ?? Math.min(3000, 23 * Math.pow(1.5, client.my_unused_version_count - 1)))
|
|
650
|
+
}
|
|
645
651
|
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
652
|
+
if (client.my_timeout) {
|
|
653
|
+
if (peer && client.peer === peer) {
|
|
654
|
+
if (!v_eq(client.my_last_sent_version, og_parents)) {
|
|
655
|
+
// note: we don't add to client.my_unused_version_count,
|
|
656
|
+
// because we're already in a timeout;
|
|
657
|
+
// we'll just extend it here..
|
|
658
|
+
set_timeout()
|
|
659
|
+
} else {
|
|
660
|
+
// hm.. it appears we got a correctly parented version,
|
|
661
|
+
// which suggests that maybe we can stop the timeout early
|
|
662
|
+
set_timeout(0)
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
continue
|
|
666
|
+
}
|
|
649
667
|
|
|
650
|
-
|
|
668
|
+
let x = { version }
|
|
651
669
|
if (peer && client.peer === peer) {
|
|
652
670
|
if (!v_eq(client.my_last_sent_version, og_parents)) {
|
|
653
|
-
|
|
654
|
-
// because we're already in a timeout;
|
|
655
|
-
// we'll just extend it here..
|
|
671
|
+
client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1
|
|
656
672
|
set_timeout()
|
|
673
|
+
continue
|
|
657
674
|
} else {
|
|
658
|
-
|
|
659
|
-
// which suggests that maybe we can stop the timeout early
|
|
660
|
-
set_timeout(0)
|
|
675
|
+
delete client.my_unused_version_count
|
|
661
676
|
}
|
|
662
|
-
}
|
|
663
|
-
continue
|
|
664
|
-
}
|
|
665
677
|
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
678
|
+
x.parents = options.version
|
|
679
|
+
if (!v_eq(version, options.version)) {
|
|
680
|
+
if (braid_text.verbose) console.log("rebasing..")
|
|
681
|
+
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, [og_v]))
|
|
682
|
+
} else {
|
|
683
|
+
// this client already has this version,
|
|
684
|
+
// so let's pretend to send it back, but not
|
|
685
|
+
if (braid_text.verbose) console.log(`not reflecting back to simpleton`)
|
|
686
|
+
client.my_last_sent_version = x.version
|
|
687
|
+
continue
|
|
688
|
+
}
|
|
672
689
|
} else {
|
|
673
|
-
|
|
690
|
+
x.parents = parents
|
|
691
|
+
x.patches = patches
|
|
674
692
|
}
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
693
|
+
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
694
|
+
post_commit_updates.push([client, x])
|
|
695
|
+
client.my_last_sent_version = x.version
|
|
696
|
+
}
|
|
697
|
+
} else {
|
|
698
|
+
if (resource.simpleton_clients.size) {
|
|
699
|
+
let version = resource.version
|
|
700
|
+
patches = get_xf_patches(resource.doc, v_before)
|
|
701
|
+
let x = { version, parents, patches }
|
|
702
|
+
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
703
|
+
for (let client of resource.simpleton_clients) {
|
|
704
|
+
if (client.my_timeout) continue
|
|
705
|
+
post_commit_updates.push([client, x])
|
|
684
706
|
client.my_last_sent_version = x.version
|
|
685
|
-
continue
|
|
686
707
|
}
|
|
687
|
-
} else {
|
|
688
|
-
x.parents = parents
|
|
689
|
-
x.patches = patches
|
|
690
708
|
}
|
|
691
|
-
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
692
|
-
post_commit_updates.push([client, x])
|
|
693
|
-
client.my_last_sent_version = x.version
|
|
694
709
|
}
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
710
|
+
|
|
711
|
+
var x = {
|
|
712
|
+
version: [og_v],
|
|
713
|
+
parents: og_parents,
|
|
714
|
+
patches: og_patches,
|
|
715
|
+
}
|
|
716
|
+
for (let client of resource.clients) {
|
|
717
|
+
if (!peer || client.peer !== peer)
|
|
703
718
|
post_commit_updates.push([client, x])
|
|
704
|
-
client.my_last_sent_version = x.version
|
|
705
|
-
}
|
|
706
719
|
}
|
|
707
|
-
}
|
|
708
720
|
|
|
709
|
-
|
|
710
|
-
version: [og_v],
|
|
711
|
-
parents: og_parents,
|
|
712
|
-
patches: og_patches,
|
|
713
|
-
}
|
|
714
|
-
for (let client of resource.clients) {
|
|
715
|
-
if (!peer || client.peer !== peer)
|
|
716
|
-
post_commit_updates.push([client, x])
|
|
717
|
-
}
|
|
721
|
+
await resource.db_delta(resource.doc.getPatchSince(v_before))
|
|
718
722
|
|
|
719
|
-
|
|
723
|
+
for (var [client, x] of post_commit_updates) client.subscribe(x)
|
|
720
724
|
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
return { change_count }
|
|
724
|
-
}
|
|
725
|
-
|
|
726
|
-
braid_text.list = async () => {
|
|
727
|
-
try {
|
|
728
|
-
if (braid_text.db_folder) {
|
|
729
|
-
await db_folder_init()
|
|
730
|
-
var pages = new Set()
|
|
731
|
-
for (let x of await require('fs').promises.readdir(braid_text.db_folder)) pages.add(decode_filename(x.replace(/\.\w+$/, '')))
|
|
732
|
-
return [...pages.keys()]
|
|
733
|
-
} else return Object.keys(braid_text.cache)
|
|
734
|
-
} catch (e) { return [] }
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
braid_text.free_cors = res => {
|
|
738
|
-
res.setHeader("Access-Control-Allow-Origin", "*")
|
|
739
|
-
res.setHeader("Access-Control-Allow-Methods", "*")
|
|
740
|
-
res.setHeader("Access-Control-Allow-Headers", "*")
|
|
741
|
-
res.setHeader("Access-Control-Expose-Headers", "*")
|
|
742
|
-
}
|
|
725
|
+
return { change_count }
|
|
726
|
+
}
|
|
743
727
|
|
|
744
|
-
async
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
728
|
+
braid_text.list = async () => {
|
|
729
|
+
try {
|
|
730
|
+
if (braid_text.db_folder) {
|
|
731
|
+
await db_folder_init()
|
|
732
|
+
var pages = new Set()
|
|
733
|
+
for (let x of await require('fs').promises.readdir(braid_text.db_folder)) pages.add(decode_filename(x.replace(/\.\w+$/, '')))
|
|
734
|
+
return [...pages.keys()]
|
|
735
|
+
} else return Object.keys(braid_text.cache)
|
|
736
|
+
} catch (e) { return [] }
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
braid_text.free_cors = res => {
|
|
740
|
+
res.setHeader("Access-Control-Allow-Origin", "*")
|
|
741
|
+
res.setHeader("Access-Control-Allow-Methods", "*")
|
|
742
|
+
res.setHeader("Access-Control-Allow-Headers", "*")
|
|
743
|
+
res.setHeader("Access-Control-Expose-Headers", "*")
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
async function get_resource(key) {
|
|
747
|
+
let cache = braid_text.cache
|
|
748
|
+
if (!cache[key]) cache[key] = new Promise(async done => {
|
|
749
|
+
let resource = {key}
|
|
750
|
+
resource.clients = new Set()
|
|
751
|
+
resource.simpleton_clients = new Set()
|
|
752
|
+
|
|
753
|
+
resource.doc = new Doc("server")
|
|
754
|
+
resource.meta = {}
|
|
755
|
+
|
|
756
|
+
let { change, change_meta } = braid_text.db_folder
|
|
757
|
+
? await file_sync(key,
|
|
758
|
+
(bytes) => resource.doc.mergeBytes(bytes),
|
|
759
|
+
() => resource.doc.toBytes(),
|
|
760
|
+
(meta) => resource.meta = meta,
|
|
761
|
+
() => resource.meta)
|
|
762
|
+
: { change: () => { }, change_meta: () => {} }
|
|
763
|
+
|
|
764
|
+
resource.db_delta = change
|
|
765
|
+
resource.update_meta = (meta) => {
|
|
766
|
+
Object.assign(resource.meta, meta)
|
|
767
|
+
change_meta()
|
|
768
|
+
}
|
|
750
769
|
|
|
751
|
-
|
|
770
|
+
resource.actor_seqs = {}
|
|
752
771
|
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
: { change: () => { } }
|
|
772
|
+
dt_get_actor_seq_runs([...resource.doc.toBytes()], (actor, base, len) => {
|
|
773
|
+
if (!resource.actor_seqs[actor]) resource.actor_seqs[actor] = new RangeSet()
|
|
774
|
+
resource.actor_seqs[actor].add_range(base, base + len - 1)
|
|
775
|
+
})
|
|
758
776
|
|
|
759
|
-
|
|
777
|
+
resource.val = resource.doc.get()
|
|
778
|
+
resource.version = resource.doc.getRemoteVersion().map(x => x.join("-")).sort()
|
|
760
779
|
|
|
761
|
-
|
|
780
|
+
resource.length_cache = createSimpleCache(braid_text.length_cache_size)
|
|
762
781
|
|
|
763
|
-
|
|
764
|
-
if (!resource.actor_seqs[actor]) resource.actor_seqs[actor] = new RangeSet()
|
|
765
|
-
resource.actor_seqs[actor].add_range(base, base + len - 1)
|
|
782
|
+
done(resource)
|
|
766
783
|
})
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
let encoded = convert_us[x.slice(0, -'.name'.length)] = encode_filename(await fs.promises.readFile(`${braid_text.db_folder}/${x}`, { encoding: 'utf8' }))
|
|
802
|
-
if (encoded.length > max_encoded_key_size) {
|
|
803
|
-
console.log(`trying to convert file to new format, but the key is too big: ${braid_text.db_folder}/${x}`)
|
|
804
|
-
process.exit()
|
|
784
|
+
return await cache[key]
|
|
785
|
+
}
|
|
786
|
+
|
|
787
|
+
async function db_folder_init() {
|
|
788
|
+
if (braid_text.verbose) console.log('__!')
|
|
789
|
+
if (!db_folder_init.p) db_folder_init.p = new Promise(async done => {
|
|
790
|
+
await fs.promises.mkdir(braid_text.db_folder, { recursive: true });
|
|
791
|
+
await fs.promises.mkdir(`${braid_text.db_folder}/.meta`, { recursive: true })
|
|
792
|
+
|
|
793
|
+
// 0.0.13 -> 0.0.14
|
|
794
|
+
// look for files with key-encodings over max_encoded_key_size,
|
|
795
|
+
// and convert them using the new method
|
|
796
|
+
// for (let x of await fs.promises.readdir(braid_text.db_folder)) {
|
|
797
|
+
// let k = x.replace(/(_[0-9a-f]{64})?\.\w+$/, '')
|
|
798
|
+
// if (k.length > max_encoded_key_size) {
|
|
799
|
+
// k = decode_filename(k)
|
|
800
|
+
|
|
801
|
+
// await fs.promises.rename(`${braid_text.db_folder}/${x}`, `${braid_text.db_folder}/${encode_filename(k)}${x.match(/\.\w+$/)[0]}`)
|
|
802
|
+
// await fs.promises.writeFile(`${braid_text.db_folder}/${encode_filename(k)}.name`, k)
|
|
803
|
+
// }
|
|
804
|
+
// }
|
|
805
|
+
|
|
806
|
+
// 0.0.14 -> 0.0.15
|
|
807
|
+
// basically convert the 0.0.14 files back
|
|
808
|
+
let convert_us = {}
|
|
809
|
+
for (let x of await fs.promises.readdir(braid_text.db_folder)) {
|
|
810
|
+
if (x.endsWith('.name')) {
|
|
811
|
+
let encoded = convert_us[x.slice(0, -'.name'.length)] = encode_filename(await fs.promises.readFile(`${braid_text.db_folder}/${x}`, { encoding: 'utf8' }))
|
|
812
|
+
if (encoded.length > max_encoded_key_size) {
|
|
813
|
+
console.log(`trying to convert file to new format, but the key is too big: ${braid_text.db_folder}/${x}`)
|
|
814
|
+
process.exit()
|
|
815
|
+
}
|
|
816
|
+
if (braid_text.verbose) console.log(`deleting: ${braid_text.db_folder}/${x}`)
|
|
817
|
+
await fs.promises.unlink(`${braid_text.db_folder}/${x}`)
|
|
805
818
|
}
|
|
806
|
-
if (braid_text.verbose) console.log(`deleting: ${braid_text.db_folder}/${x}`)
|
|
807
|
-
await fs.promises.unlink(`${braid_text.db_folder}/${x}`)
|
|
808
819
|
}
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
820
|
+
if (Object.keys(convert_us).length) {
|
|
821
|
+
for (let x of await fs.promises.readdir(braid_text.db_folder)) {
|
|
822
|
+
let [_, k, num] = x.match(/^(.*)\.(\d+)$/s)
|
|
823
|
+
if (!convert_us[k]) continue
|
|
824
|
+
if (braid_text.verbose) console.log(`renaming: ${braid_text.db_folder}/${x} -> ${braid_text.db_folder}/${convert_us[k]}.${num}`)
|
|
825
|
+
if (convert_us[k]) await fs.promises.rename(`${braid_text.db_folder}/${x}`, `${braid_text.db_folder}/${convert_us[k]}.${num}`)
|
|
826
|
+
}
|
|
816
827
|
}
|
|
817
|
-
}
|
|
818
828
|
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
}
|
|
829
|
+
done()
|
|
830
|
+
})
|
|
831
|
+
await db_folder_init.p
|
|
832
|
+
}
|
|
823
833
|
|
|
824
|
-
async function get_files_for_key(key) {
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
}
|
|
834
|
+
async function get_files_for_key(key) {
|
|
835
|
+
await db_folder_init()
|
|
836
|
+
try {
|
|
837
|
+
let re = new RegExp("^" + encode_filename(key).replace(/[^a-zA-Z0-9]/g, "\\$&") + "\\.\\w+$")
|
|
838
|
+
return (await fs.promises.readdir(braid_text.db_folder))
|
|
839
|
+
.filter((a) => re.test(a))
|
|
840
|
+
.map((a) => `${braid_text.db_folder}/${a}`)
|
|
841
|
+
} catch (e) { return [] }
|
|
842
|
+
}
|
|
833
843
|
|
|
834
|
-
async function file_sync(key, process_delta, get_init) {
|
|
835
|
-
|
|
844
|
+
async function file_sync(key, process_delta, get_init, set_meta, get_meta) {
|
|
845
|
+
let encoded = encode_filename(key)
|
|
836
846
|
|
|
837
|
-
|
|
847
|
+
if (encoded.length > max_encoded_key_size) throw new Error(`invalid key: too long (max ${max_encoded_key_size})`)
|
|
838
848
|
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
849
|
+
let currentNumber = 0
|
|
850
|
+
let currentSize = 0
|
|
851
|
+
let threshold = 0
|
|
842
852
|
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
853
|
+
// Read existing files and sort by numbers.
|
|
854
|
+
const files = (await get_files_for_key(key))
|
|
855
|
+
.filter(x => x.match(/\.\d+$/))
|
|
856
|
+
.sort((a, b) => parseInt(a.match(/\d+$/)[0]) - parseInt(b.match(/\d+$/)[0]))
|
|
847
857
|
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
}
|
|
855
|
-
try {
|
|
856
|
-
const filename = files[i]
|
|
857
|
-
if (braid_text.verbose) console.log(`trying to process file: ${filename}`)
|
|
858
|
-
const data = await fs.promises.readFile(filename)
|
|
859
|
-
|
|
860
|
-
let cursor = 0
|
|
861
|
-
let isFirstChunk = true
|
|
862
|
-
while (cursor < data.length) {
|
|
863
|
-
const chunkSize = data.readUInt32LE(cursor)
|
|
864
|
-
cursor += 4
|
|
865
|
-
const chunk = data.slice(cursor, cursor + chunkSize)
|
|
866
|
-
cursor += chunkSize
|
|
867
|
-
|
|
868
|
-
if (isFirstChunk) {
|
|
869
|
-
isFirstChunk = false
|
|
870
|
-
threshold = chunkSize * 10
|
|
871
|
-
}
|
|
872
|
-
process_delta(chunk)
|
|
858
|
+
// Try to process files starting from the highest number.
|
|
859
|
+
let done = false
|
|
860
|
+
for (let i = files.length - 1; i >= 0; i--) {
|
|
861
|
+
if (done) {
|
|
862
|
+
await fs.promises.unlink(files[i])
|
|
863
|
+
continue
|
|
873
864
|
}
|
|
865
|
+
try {
|
|
866
|
+
const filename = files[i]
|
|
867
|
+
if (braid_text.verbose) console.log(`trying to process file: ${filename}`)
|
|
868
|
+
const data = await fs.promises.readFile(filename)
|
|
869
|
+
|
|
870
|
+
let cursor = 0
|
|
871
|
+
let isFirstChunk = true
|
|
872
|
+
while (cursor < data.length) {
|
|
873
|
+
const chunkSize = data.readUInt32LE(cursor)
|
|
874
|
+
cursor += 4
|
|
875
|
+
const chunk = data.slice(cursor, cursor + chunkSize)
|
|
876
|
+
cursor += chunkSize
|
|
877
|
+
|
|
878
|
+
if (isFirstChunk) {
|
|
879
|
+
isFirstChunk = false
|
|
880
|
+
threshold = chunkSize * 10
|
|
881
|
+
}
|
|
882
|
+
process_delta(chunk)
|
|
883
|
+
}
|
|
874
884
|
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
885
|
+
currentSize = data.length
|
|
886
|
+
currentNumber = parseInt(filename.match(/\d+$/)[0])
|
|
887
|
+
done = true
|
|
888
|
+
} catch (error) {
|
|
889
|
+
console.error(`Error processing file: ${files[i]}`)
|
|
890
|
+
await fs.promises.unlink(files[i])
|
|
891
|
+
}
|
|
881
892
|
}
|
|
882
|
-
}
|
|
883
|
-
|
|
884
|
-
let chain = Promise.resolve()
|
|
885
|
-
return {
|
|
886
|
-
change: async (bytes) => {
|
|
887
|
-
await (chain = chain.then(async () => {
|
|
888
|
-
if (!bytes) currentSize = threshold
|
|
889
|
-
else currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32
|
|
890
|
-
const filename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
|
|
891
|
-
if (currentSize < threshold) {
|
|
892
|
-
if (braid_text.verbose) console.log(`appending to db..`)
|
|
893
|
-
|
|
894
|
-
let buffer = Buffer.allocUnsafe(4)
|
|
895
|
-
buffer.writeUInt32LE(bytes.length, 0)
|
|
896
|
-
await fs.promises.appendFile(filename, buffer)
|
|
897
|
-
await fs.promises.appendFile(filename, bytes)
|
|
898
|
-
|
|
899
|
-
if (braid_text.verbose) console.log("wrote to : " + filename)
|
|
900
|
-
} else {
|
|
901
|
-
try {
|
|
902
|
-
if (braid_text.verbose) console.log(`starting new db..`)
|
|
903
893
|
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
if (braid_text.verbose) console.log("wrote to : " + newFilename)
|
|
894
|
+
var meta_filename = `${braid_text.db_folder}/.meta/${encoded}`
|
|
895
|
+
var meta_dirty = null
|
|
896
|
+
var meta_saving = null
|
|
897
|
+
try {
|
|
898
|
+
set_meta(JSON.parse(await fs.promises.readFile(meta_filename)))
|
|
899
|
+
} catch (e) {
|
|
900
|
+
console.error(`Error processing meta file: ${meta_filename}`)
|
|
901
|
+
}
|
|
914
902
|
|
|
915
|
-
|
|
916
|
-
|
|
903
|
+
let chain = Promise.resolve()
|
|
904
|
+
return {
|
|
905
|
+
change: async (bytes) => {
|
|
906
|
+
await (chain = chain.then(async () => {
|
|
907
|
+
if (!bytes) currentSize = threshold
|
|
908
|
+
else currentSize += bytes.length + 4 // we account for the extra 4 bytes for uint32
|
|
909
|
+
const filename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
|
|
910
|
+
if (currentSize < threshold) {
|
|
911
|
+
if (braid_text.verbose) console.log(`appending to db..`)
|
|
912
|
+
|
|
913
|
+
let buffer = Buffer.allocUnsafe(4)
|
|
914
|
+
buffer.writeUInt32LE(bytes.length, 0)
|
|
915
|
+
await fs.promises.appendFile(filename, buffer)
|
|
916
|
+
await fs.promises.appendFile(filename, bytes)
|
|
917
|
+
|
|
918
|
+
if (braid_text.verbose) console.log("wrote to : " + filename)
|
|
919
|
+
} else {
|
|
917
920
|
try {
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
921
|
+
if (braid_text.verbose) console.log(`starting new db..`)
|
|
922
|
+
|
|
923
|
+
currentNumber++
|
|
924
|
+
const init = get_init()
|
|
925
|
+
const buffer = Buffer.allocUnsafe(4)
|
|
926
|
+
buffer.writeUInt32LE(init.length, 0)
|
|
927
|
+
|
|
928
|
+
const newFilename = `${braid_text.db_folder}/${encoded}.${currentNumber}`
|
|
929
|
+
await fs.promises.writeFile(newFilename, buffer)
|
|
930
|
+
await fs.promises.appendFile(newFilename, init)
|
|
931
|
+
|
|
932
|
+
if (braid_text.verbose) console.log("wrote to : " + newFilename)
|
|
933
|
+
|
|
934
|
+
currentSize = 4 + init.length
|
|
935
|
+
threshold = currentSize * 10
|
|
936
|
+
try {
|
|
937
|
+
await fs.promises.unlink(filename)
|
|
938
|
+
} catch (e) { }
|
|
939
|
+
} catch (e) {
|
|
940
|
+
if (braid_text.verbose) console.log(`e = ${e.stack}`)
|
|
941
|
+
}
|
|
922
942
|
}
|
|
943
|
+
}))
|
|
944
|
+
},
|
|
945
|
+
change_meta: async () => {
|
|
946
|
+
meta_dirty = true
|
|
947
|
+
if (meta_saving) return
|
|
948
|
+
meta_saving = true
|
|
949
|
+
|
|
950
|
+
while (meta_dirty) {
|
|
951
|
+
meta_dirty = false
|
|
952
|
+
await fs.promises.writeFile(meta_filename,
|
|
953
|
+
JSON.stringify(get_meta()))
|
|
954
|
+
await new Promise(done => setTimeout(done,
|
|
955
|
+
braid_text.meta_file_save_period_ms))
|
|
923
956
|
}
|
|
924
|
-
|
|
957
|
+
|
|
958
|
+
meta_saving = false
|
|
959
|
+
}
|
|
925
960
|
}
|
|
926
961
|
}
|
|
927
|
-
}
|
|
928
962
|
|
|
929
|
-
async function wait_for_events(
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
if (!wait_for_events.namespaces) wait_for_events.namespaces = {}
|
|
938
|
-
if (!wait_for_events.namespaces[key]) wait_for_events.namespaces[key] = {}
|
|
939
|
-
var ns = wait_for_events.namespaces[key]
|
|
940
|
-
|
|
941
|
-
if (!wait_for_events.space_used) wait_for_events.space_used = 0
|
|
942
|
-
if (wait_for_events.space_used + my_space > max_space) return
|
|
943
|
-
wait_for_events.space_used += my_space
|
|
944
|
-
|
|
945
|
-
var p_done = null
|
|
946
|
-
var p = new Promise(done => p_done = done)
|
|
947
|
-
|
|
948
|
-
var missing = 0
|
|
949
|
-
var on_find = () => {
|
|
950
|
-
missing--
|
|
951
|
-
if (!missing) p_done()
|
|
952
|
-
}
|
|
953
|
-
|
|
954
|
-
for (let event of events) {
|
|
955
|
-
var [actor, seq] = decode_version(event)
|
|
956
|
-
if (actor_seqs?.[actor]?.has(seq)) continue
|
|
957
|
-
missing++
|
|
963
|
+
async function wait_for_events(
|
|
964
|
+
key,
|
|
965
|
+
events,
|
|
966
|
+
actor_seqs,
|
|
967
|
+
my_space,
|
|
968
|
+
max_time = 3000,
|
|
969
|
+
max_space = 5 * 1024 * 1024) {
|
|
958
970
|
|
|
959
|
-
if (!
|
|
960
|
-
if (!
|
|
961
|
-
|
|
971
|
+
if (!wait_for_events.namespaces) wait_for_events.namespaces = {}
|
|
972
|
+
if (!wait_for_events.namespaces[key]) wait_for_events.namespaces[key] = {}
|
|
973
|
+
var ns = wait_for_events.namespaces[key]
|
|
962
974
|
|
|
963
|
-
if (!
|
|
964
|
-
if (
|
|
965
|
-
|
|
966
|
-
}
|
|
975
|
+
if (!wait_for_events.space_used) wait_for_events.space_used = 0
|
|
976
|
+
if (wait_for_events.space_used + my_space > max_space) return
|
|
977
|
+
wait_for_events.space_used += my_space
|
|
967
978
|
|
|
968
|
-
|
|
969
|
-
var
|
|
970
|
-
for (let event of events) {
|
|
971
|
-
var [actor, seq] = decode_version(event)
|
|
972
|
-
|
|
973
|
-
var cbs = ns.events[event]
|
|
974
|
-
if (!cbs) continue
|
|
979
|
+
var p_done = null
|
|
980
|
+
var p = new Promise(done => p_done = done)
|
|
975
981
|
|
|
976
|
-
|
|
977
|
-
|
|
982
|
+
var missing = 0
|
|
983
|
+
var on_find = () => {
|
|
984
|
+
missing--
|
|
985
|
+
if (!missing) p_done()
|
|
986
|
+
}
|
|
987
|
+
|
|
988
|
+
for (let event of events) {
|
|
989
|
+
var [actor, seq] = decode_version(event)
|
|
990
|
+
if (actor_seqs?.[actor]?.has(seq)) continue
|
|
991
|
+
missing++
|
|
978
992
|
|
|
979
|
-
|
|
993
|
+
if (!ns.actor_seqs) ns.actor_seqs = {}
|
|
994
|
+
if (!ns.actor_seqs[actor]) ns.actor_seqs[actor] = []
|
|
995
|
+
sorted_set_insert(ns.actor_seqs[actor], seq)
|
|
980
996
|
|
|
981
|
-
|
|
982
|
-
|
|
997
|
+
if (!ns.events) ns.events = {}
|
|
998
|
+
if (!ns.events[event]) ns.events[event] = new Set()
|
|
999
|
+
ns.events[event].add(on_find)
|
|
1000
|
+
}
|
|
983
1001
|
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
1002
|
+
if (missing) {
|
|
1003
|
+
var t = setTimeout(() => {
|
|
1004
|
+
for (let event of events) {
|
|
1005
|
+
var [actor, seq] = decode_version(event)
|
|
1006
|
+
|
|
1007
|
+
var cbs = ns.events[event]
|
|
1008
|
+
if (!cbs) continue
|
|
991
1009
|
|
|
992
|
-
|
|
1010
|
+
cbs.delete(on_find)
|
|
1011
|
+
if (cbs.size) continue
|
|
993
1012
|
|
|
994
|
-
|
|
995
|
-
}
|
|
996
|
-
wait_for_events.space_used -= my_space
|
|
997
|
-
}
|
|
1013
|
+
delete ns.events[event]
|
|
998
1014
|
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
if (!ns) return
|
|
1015
|
+
var seqs = ns.actor_seqs[actor]
|
|
1016
|
+
if (!seqs) continue
|
|
1002
1017
|
|
|
1003
|
-
|
|
1004
|
-
|
|
1018
|
+
sorted_set_delete(seqs, seq)
|
|
1019
|
+
if (seqs.length) continue
|
|
1020
|
+
|
|
1021
|
+
delete ns.actor_seqs[actor]
|
|
1022
|
+
}
|
|
1023
|
+
p_done()
|
|
1024
|
+
}, max_time)
|
|
1005
1025
|
|
|
1006
|
-
|
|
1007
|
-
if (!seqs) return
|
|
1026
|
+
await p
|
|
1008
1027
|
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
var mid = (i + end) >> 1
|
|
1013
|
-
seqs[mid] < base_seq ? i = mid + 1 : end = mid
|
|
1014
|
-
}
|
|
1015
|
-
var start = i
|
|
1016
|
-
|
|
1017
|
-
// iterate up through seq
|
|
1018
|
-
while (i < seqs.length && seqs[i] <= seq) {
|
|
1019
|
-
var e = actor + "-" + seqs[i]
|
|
1020
|
-
ns.events?.[e]?.forEach(cb => cb())
|
|
1021
|
-
delete ns.events?.[e]
|
|
1022
|
-
i++
|
|
1028
|
+
clearTimeout(t)
|
|
1029
|
+
}
|
|
1030
|
+
wait_for_events.space_used -= my_space
|
|
1023
1031
|
}
|
|
1024
1032
|
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1033
|
+
async function got_event(key, event, change_count) {
|
|
1034
|
+
var ns = wait_for_events.namespaces?.[key]
|
|
1035
|
+
if (!ns) return
|
|
1028
1036
|
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
//////////////////////////////////////////////////////////////////
|
|
1037
|
+
var [actor, seq] = decode_version(event)
|
|
1038
|
+
var base_seq = seq + 1 - change_count
|
|
1032
1039
|
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1040
|
+
var seqs = ns.actor_seqs?.[actor]
|
|
1041
|
+
if (!seqs) return
|
|
1042
|
+
|
|
1043
|
+
// binary search to find the first i >= base_seq
|
|
1044
|
+
var i = 0, end = seqs.length
|
|
1045
|
+
while (i < end) {
|
|
1046
|
+
var mid = (i + end) >> 1
|
|
1047
|
+
seqs[mid] < base_seq ? i = mid + 1 : end = mid
|
|
1048
|
+
}
|
|
1049
|
+
var start = i
|
|
1050
|
+
|
|
1051
|
+
// iterate up through seq
|
|
1052
|
+
while (i < seqs.length && seqs[i] <= seq) {
|
|
1053
|
+
var e = actor + "-" + seqs[i]
|
|
1054
|
+
ns.events?.[e]?.forEach(cb => cb())
|
|
1055
|
+
delete ns.events?.[e]
|
|
1056
|
+
i++
|
|
1057
|
+
}
|
|
1036
1058
|
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1059
|
+
seqs.splice(start, i - start)
|
|
1060
|
+
if (!seqs.length) delete ns.actor_seqs[actor]
|
|
1061
|
+
}
|
|
1062
|
+
|
|
1063
|
+
//////////////////////////////////////////////////////////////////
|
|
1064
|
+
//////////////////////////////////////////////////////////////////
|
|
1065
|
+
//////////////////////////////////////////////////////////////////
|
|
1066
|
+
|
|
1067
|
+
function dt_len(doc, version) {
|
|
1068
|
+
return count_code_points(dt_get_string(doc, version))
|
|
1069
|
+
}
|
|
1042
1070
|
|
|
1043
|
-
|
|
1044
|
-
|
|
1071
|
+
function dt_get_string(doc, version) {
|
|
1072
|
+
// optimization: if version is the latest,
|
|
1073
|
+
// then return the current text..
|
|
1074
|
+
if (v_eq(version, doc.getRemoteVersion().map((x) => x.join("-")).sort()))
|
|
1075
|
+
return doc.get()
|
|
1045
1076
|
|
|
1046
|
-
|
|
1077
|
+
var bytes = doc.toBytes()
|
|
1078
|
+
var oplog = OpLog.fromBytes(bytes)
|
|
1047
1079
|
|
|
1048
|
-
|
|
1049
|
-
b.merge(oplog, new Uint32Array(local_version))
|
|
1050
|
-
var s = b.get()
|
|
1051
|
-
b.free()
|
|
1052
|
-
|
|
1053
|
-
oplog.free()
|
|
1054
|
-
return s
|
|
1055
|
-
}
|
|
1080
|
+
var local_version = dt_get_local_version(bytes, version)
|
|
1056
1081
|
|
|
1057
|
-
|
|
1058
|
-
|
|
1082
|
+
var b = new Branch()
|
|
1083
|
+
b.merge(oplog, new Uint32Array(local_version))
|
|
1084
|
+
var s = b.get()
|
|
1085
|
+
b.free()
|
|
1086
|
+
|
|
1087
|
+
oplog.free()
|
|
1088
|
+
return s
|
|
1089
|
+
}
|
|
1059
1090
|
|
|
1060
|
-
|
|
1061
|
-
|
|
1091
|
+
function dt_get(doc, version, agent = null, anti_version = null) {
|
|
1092
|
+
if (dt_get.last_doc) dt_get.last_doc.free()
|
|
1062
1093
|
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
var include_versions = new Set()
|
|
1066
|
-
var bad_versions = new Set(anti_version)
|
|
1094
|
+
let bytes = doc.toBytes()
|
|
1095
|
+
dt_get.last_doc = doc = Doc.fromBytes(bytes, agent)
|
|
1067
1096
|
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
var
|
|
1071
|
-
|
|
1072
|
-
bad_versions.add(v)
|
|
1073
|
-
else
|
|
1074
|
-
include_versions.add(v)
|
|
1075
|
-
}
|
|
1076
|
-
} else {
|
|
1077
|
-
var include_versions = new Set(version)
|
|
1078
|
-
var looking_for = new Set(version)
|
|
1079
|
-
var local_version = []
|
|
1097
|
+
let [_agents, versions, parentss] = dt_parse([...bytes])
|
|
1098
|
+
if (anti_version) {
|
|
1099
|
+
var include_versions = new Set()
|
|
1100
|
+
var bad_versions = new Set(anti_version)
|
|
1080
1101
|
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1102
|
+
for (let i = 0; i < versions.length; i++) {
|
|
1103
|
+
var v = versions[i].join("-")
|
|
1104
|
+
var ps = parentss[i].map(x => x.join('-'))
|
|
1105
|
+
if (bad_versions.has(v) || ps.some(x => bad_versions.has(x)))
|
|
1106
|
+
bad_versions.add(v)
|
|
1107
|
+
else
|
|
1108
|
+
include_versions.add(v)
|
|
1087
1109
|
}
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1110
|
+
} else {
|
|
1111
|
+
var include_versions = new Set(version)
|
|
1112
|
+
var looking_for = new Set(version)
|
|
1113
|
+
var local_version = []
|
|
1114
|
+
|
|
1115
|
+
for (let i = versions.length - 1; i >= 0; i--) {
|
|
1116
|
+
var v = versions[i].join("-")
|
|
1117
|
+
var ps = parentss[i].map(x => x.join('-'))
|
|
1118
|
+
if (looking_for.has(v)) {
|
|
1119
|
+
local_version.push(i)
|
|
1120
|
+
looking_for.delete(v)
|
|
1121
|
+
}
|
|
1122
|
+
if (include_versions.has(v))
|
|
1123
|
+
ps.forEach(x => include_versions.add(x))
|
|
1124
|
+
}
|
|
1125
|
+
local_version.reverse()
|
|
1092
1126
|
|
|
1093
|
-
|
|
1094
|
-
|
|
1127
|
+
// NOTE: currently used by braid-chrome in dt.js at the bottom
|
|
1128
|
+
dt_get.last_local_version = new Uint32Array(local_version)
|
|
1095
1129
|
|
|
1096
|
-
|
|
1097
|
-
|
|
1130
|
+
if (looking_for.size) throw new Error(`version not found: ${version}`)
|
|
1131
|
+
}
|
|
1098
1132
|
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1133
|
+
let new_doc = new Doc(agent)
|
|
1134
|
+
let op_runs = doc.getOpsSince([])
|
|
1135
|
+
|
|
1136
|
+
let i = 0
|
|
1137
|
+
op_runs.forEach((op_run) => {
|
|
1138
|
+
if (op_run.content) op_run.content = [...op_run.content]
|
|
1139
|
+
|
|
1140
|
+
let len = op_run.end - op_run.start
|
|
1141
|
+
let base_i = i
|
|
1142
|
+
for (let j = 1; j <= len; j++) {
|
|
1143
|
+
let I = base_i + j
|
|
1144
|
+
if (
|
|
1145
|
+
j == len ||
|
|
1146
|
+
parentss[I].length != 1 ||
|
|
1147
|
+
parentss[I][0][0] != versions[I - 1][0] ||
|
|
1148
|
+
parentss[I][0][1] != versions[I - 1][1] ||
|
|
1149
|
+
versions[I][0] != versions[I - 1][0] ||
|
|
1150
|
+
versions[I][1] != versions[I - 1][1] + 1
|
|
1151
|
+
) {
|
|
1152
|
+
for (; i < I; i++) {
|
|
1153
|
+
let version = versions[i].join("-")
|
|
1154
|
+
if (!include_versions.has(version)) continue
|
|
1155
|
+
let og_i = i
|
|
1156
|
+
let content = []
|
|
1157
|
+
if (op_run.content?.[i - base_i]) content.push(op_run.content[i - base_i])
|
|
1158
|
+
if (!!op_run.content === op_run.fwd)
|
|
1159
|
+
while (i + 1 < I && include_versions.has(versions[i + 1].join("-"))) {
|
|
1160
|
+
i++
|
|
1161
|
+
if (op_run.content?.[i - base_i]) content.push(op_run.content[i - base_i])
|
|
1162
|
+
}
|
|
1163
|
+
content = content.length ? content.join("") : null
|
|
1164
|
+
|
|
1165
|
+
new_doc.mergeBytes(
|
|
1166
|
+
dt_create_bytes(
|
|
1167
|
+
version,
|
|
1168
|
+
parentss[og_i].map((x) => x.join("-")),
|
|
1169
|
+
op_run.fwd ?
|
|
1170
|
+
(op_run.content ?
|
|
1171
|
+
op_run.start + (og_i - base_i) :
|
|
1172
|
+
op_run.start) :
|
|
1173
|
+
op_run.end - 1 - (i - base_i),
|
|
1174
|
+
op_run.content ? 0 : i - og_i + 1,
|
|
1175
|
+
content
|
|
1176
|
+
)
|
|
1142
1177
|
)
|
|
1143
|
-
|
|
1178
|
+
}
|
|
1144
1179
|
}
|
|
1145
1180
|
}
|
|
1146
|
-
}
|
|
1147
|
-
|
|
1148
|
-
return new_doc
|
|
1149
|
-
}
|
|
1150
|
-
|
|
1151
|
-
function dt_get_patches(doc, version = null) {
|
|
1152
|
-
if (version && v_eq(version,
|
|
1153
|
-
doc.getRemoteVersion().map((x) => x.join("-")).sort())) {
|
|
1154
|
-
// they want everything past the end, which is nothing
|
|
1155
|
-
return []
|
|
1181
|
+
})
|
|
1182
|
+
return new_doc
|
|
1156
1183
|
}
|
|
1157
1184
|
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1185
|
+
function dt_get_patches(doc, version = null) {
|
|
1186
|
+
if (version && v_eq(version,
|
|
1187
|
+
doc.getRemoteVersion().map((x) => x.join("-")).sort())) {
|
|
1188
|
+
// they want everything past the end, which is nothing
|
|
1189
|
+
return []
|
|
1190
|
+
}
|
|
1162
1191
|
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
let
|
|
1186
|
-
let
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
let
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1192
|
+
let bytes = doc.toBytes()
|
|
1193
|
+
doc = Doc.fromBytes(bytes)
|
|
1194
|
+
|
|
1195
|
+
let [_agents, versions, parentss] = dt_parse([...bytes])
|
|
1196
|
+
|
|
1197
|
+
let op_runs = []
|
|
1198
|
+
if (version?.length) {
|
|
1199
|
+
let frontier = {}
|
|
1200
|
+
version.forEach((x) => frontier[x] = true)
|
|
1201
|
+
let local_version = []
|
|
1202
|
+
for (let i = 0; i < versions.length; i++)
|
|
1203
|
+
if (frontier[versions[i].join("-")]) local_version.push(i)
|
|
1204
|
+
|
|
1205
|
+
local_version = new Uint32Array(local_version)
|
|
1206
|
+
|
|
1207
|
+
let after_bytes = doc.getPatchSince(local_version)
|
|
1208
|
+
;[_agents, versions, parentss] = dt_parse([...after_bytes])
|
|
1209
|
+
op_runs = doc.getOpsSince(local_version)
|
|
1210
|
+
} else op_runs = doc.getOpsSince([])
|
|
1211
|
+
|
|
1212
|
+
doc.free()
|
|
1213
|
+
|
|
1214
|
+
let i = 0
|
|
1215
|
+
let patches = []
|
|
1216
|
+
op_runs.forEach((op_run) => {
|
|
1217
|
+
let version = versions[i]
|
|
1218
|
+
let parents = parentss[i].map((x) => x.join("-")).sort()
|
|
1219
|
+
let start = op_run.start
|
|
1220
|
+
let end = start + 1
|
|
1221
|
+
if (op_run.content) op_run.content = [...op_run.content]
|
|
1222
|
+
let len = op_run.end - op_run.start
|
|
1223
|
+
for (let j = 1; j <= len; j++) {
|
|
1224
|
+
let I = i + j
|
|
1225
|
+
if (
|
|
1226
|
+
(!op_run.content && op_run.fwd) ||
|
|
1227
|
+
j == len ||
|
|
1228
|
+
parentss[I].length != 1 ||
|
|
1229
|
+
parentss[I][0][0] != versions[I - 1][0] ||
|
|
1230
|
+
parentss[I][0][1] != versions[I - 1][1] ||
|
|
1231
|
+
versions[I][0] != versions[I - 1][0] ||
|
|
1232
|
+
versions[I][1] != versions[I - 1][1] + 1
|
|
1233
|
+
) {
|
|
1234
|
+
let s = op_run.fwd ?
|
|
1235
|
+
(op_run.content ?
|
|
1236
|
+
start :
|
|
1237
|
+
op_run.start) :
|
|
1238
|
+
(op_run.start + (op_run.end - end))
|
|
1239
|
+
let e = op_run.fwd ?
|
|
1240
|
+
(op_run.content ?
|
|
1241
|
+
end :
|
|
1242
|
+
op_run.start + (end - start)) :
|
|
1243
|
+
(op_run.end - (start - op_run.start))
|
|
1244
|
+
patches.push({
|
|
1245
|
+
version: `${version[0]}-${version[1] + e - s - 1}`,
|
|
1246
|
+
parents,
|
|
1247
|
+
unit: "text",
|
|
1248
|
+
range: op_run.content ? `[${s}:${s}]` : `[${s}:${e}]`,
|
|
1249
|
+
content: op_run.content?.slice(start - op_run.start, end - op_run.start).join("") ?? "",
|
|
1250
|
+
start: s,
|
|
1251
|
+
end: e,
|
|
1252
|
+
})
|
|
1253
|
+
if (j == len) break
|
|
1254
|
+
version = versions[I]
|
|
1255
|
+
parents = parentss[I].map((x) => x.join("-")).sort()
|
|
1256
|
+
start = op_run.start + j
|
|
1257
|
+
}
|
|
1258
|
+
end++
|
|
1223
1259
|
}
|
|
1224
|
-
|
|
1225
|
-
}
|
|
1226
|
-
|
|
1227
|
-
}
|
|
1228
|
-
return patches
|
|
1229
|
-
}
|
|
1260
|
+
i += len
|
|
1261
|
+
})
|
|
1262
|
+
return patches
|
|
1263
|
+
}
|
|
1230
1264
|
|
|
1231
|
-
function dt_parse(byte_array) {
|
|
1232
|
-
|
|
1265
|
+
function dt_parse(byte_array) {
|
|
1266
|
+
if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== "DMNDTYPS") throw new Error("dt parse error, expected DMNDTYPS")
|
|
1233
1267
|
|
|
1234
|
-
|
|
1268
|
+
if (byte_array.shift() != 0) throw new Error("dt parse error, expected version 0")
|
|
1235
1269
|
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1270
|
+
let agents = []
|
|
1271
|
+
let versions = []
|
|
1272
|
+
let parentss = []
|
|
1239
1273
|
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
}
|
|
1249
|
-
} else if (id == 20) {
|
|
1250
|
-
} else if (id == 21) {
|
|
1251
|
-
let seqs = {}
|
|
1252
|
-
let goal = byte_array.length - len
|
|
1253
|
-
while (byte_array.length > goal) {
|
|
1254
|
-
let part0 = dt_read_varint(byte_array)
|
|
1255
|
-
let has_jump = part0 & 1
|
|
1256
|
-
let agent_i = (part0 >> 1) - 1
|
|
1257
|
-
let run_length = dt_read_varint(byte_array)
|
|
1258
|
-
let jump = 0
|
|
1259
|
-
if (has_jump) {
|
|
1260
|
-
let part2 = dt_read_varint(byte_array)
|
|
1261
|
-
jump = part2 >> 1
|
|
1262
|
-
if (part2 & 1) jump *= -1
|
|
1274
|
+
while (byte_array.length) {
|
|
1275
|
+
let id = byte_array.shift()
|
|
1276
|
+
let len = dt_read_varint(byte_array)
|
|
1277
|
+
if (id == 1) {
|
|
1278
|
+
} else if (id == 3) {
|
|
1279
|
+
let goal = byte_array.length - len
|
|
1280
|
+
while (byte_array.length > goal) {
|
|
1281
|
+
agents.push(dt_read_string(byte_array))
|
|
1263
1282
|
}
|
|
1264
|
-
|
|
1283
|
+
} else if (id == 20) {
|
|
1284
|
+
} else if (id == 21) {
|
|
1285
|
+
let seqs = {}
|
|
1286
|
+
let goal = byte_array.length - len
|
|
1287
|
+
while (byte_array.length > goal) {
|
|
1288
|
+
let part0 = dt_read_varint(byte_array)
|
|
1289
|
+
let has_jump = part0 & 1
|
|
1290
|
+
let agent_i = (part0 >> 1) - 1
|
|
1291
|
+
let run_length = dt_read_varint(byte_array)
|
|
1292
|
+
let jump = 0
|
|
1293
|
+
if (has_jump) {
|
|
1294
|
+
let part2 = dt_read_varint(byte_array)
|
|
1295
|
+
jump = part2 >> 1
|
|
1296
|
+
if (part2 & 1) jump *= -1
|
|
1297
|
+
}
|
|
1298
|
+
let base = (seqs[agent_i] || 0) + jump
|
|
1265
1299
|
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
}
|
|
1269
|
-
seqs[agent_i] = base + run_length
|
|
1270
|
-
}
|
|
1271
|
-
} else if (id == 23) {
|
|
1272
|
-
let count = 0
|
|
1273
|
-
let goal = byte_array.length - len
|
|
1274
|
-
while (byte_array.length > goal) {
|
|
1275
|
-
let run_len = dt_read_varint(byte_array)
|
|
1276
|
-
|
|
1277
|
-
let parents = []
|
|
1278
|
-
let has_more = 1
|
|
1279
|
-
while (has_more) {
|
|
1280
|
-
let x = dt_read_varint(byte_array)
|
|
1281
|
-
let is_foreign = 0x1 & x
|
|
1282
|
-
has_more = 0x2 & x
|
|
1283
|
-
let num = x >> 2
|
|
1284
|
-
|
|
1285
|
-
if (x == 1) {
|
|
1286
|
-
// no parents (e.g. parent is "root")
|
|
1287
|
-
} else if (!is_foreign) {
|
|
1288
|
-
parents.push(versions[count - num])
|
|
1289
|
-
} else {
|
|
1290
|
-
parents.push([agents[num - 1], dt_read_varint(byte_array)])
|
|
1300
|
+
for (let i = 0; i < run_length; i++) {
|
|
1301
|
+
versions.push([agents[agent_i], base + i])
|
|
1291
1302
|
}
|
|
1303
|
+
seqs[agent_i] = base + run_length
|
|
1292
1304
|
}
|
|
1293
|
-
|
|
1294
|
-
count
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1305
|
+
} else if (id == 23) {
|
|
1306
|
+
let count = 0
|
|
1307
|
+
let goal = byte_array.length - len
|
|
1308
|
+
while (byte_array.length > goal) {
|
|
1309
|
+
let run_len = dt_read_varint(byte_array)
|
|
1310
|
+
|
|
1311
|
+
let parents = []
|
|
1312
|
+
let has_more = 1
|
|
1313
|
+
while (has_more) {
|
|
1314
|
+
let x = dt_read_varint(byte_array)
|
|
1315
|
+
let is_foreign = 0x1 & x
|
|
1316
|
+
has_more = 0x2 & x
|
|
1317
|
+
let num = x >> 2
|
|
1318
|
+
|
|
1319
|
+
if (x == 1) {
|
|
1320
|
+
// no parents (e.g. parent is "root")
|
|
1321
|
+
} else if (!is_foreign) {
|
|
1322
|
+
parents.push(versions[count - num])
|
|
1323
|
+
} else {
|
|
1324
|
+
parents.push([agents[num - 1], dt_read_varint(byte_array)])
|
|
1325
|
+
}
|
|
1326
|
+
}
|
|
1327
|
+
parentss.push(parents)
|
|
1298
1328
|
count++
|
|
1329
|
+
|
|
1330
|
+
for (let i = 0; i < run_len - 1; i++) {
|
|
1331
|
+
parentss.push([versions[count - 1]])
|
|
1332
|
+
count++
|
|
1333
|
+
}
|
|
1299
1334
|
}
|
|
1335
|
+
} else {
|
|
1336
|
+
byte_array.splice(0, len)
|
|
1300
1337
|
}
|
|
1301
|
-
} else {
|
|
1302
|
-
byte_array.splice(0, len)
|
|
1303
1338
|
}
|
|
1304
|
-
}
|
|
1305
1339
|
|
|
1306
|
-
|
|
1307
|
-
}
|
|
1340
|
+
return [agents, versions, parentss]
|
|
1341
|
+
}
|
|
1308
1342
|
|
|
1309
|
-
function dt_get_actor_seq_runs(byte_array, cb) {
|
|
1310
|
-
|
|
1343
|
+
function dt_get_actor_seq_runs(byte_array, cb) {
|
|
1344
|
+
if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== "DMNDTYPS") throw new Error("dt parse error, expected DMNDTYPS")
|
|
1311
1345
|
|
|
1312
|
-
|
|
1346
|
+
if (byte_array.shift() != 0) throw new Error("dt parse error, expected version 0")
|
|
1313
1347
|
|
|
1314
|
-
|
|
1348
|
+
let agents = []
|
|
1315
1349
|
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
}
|
|
1325
|
-
} else if (id == 20) {
|
|
1326
|
-
} else if (id == 21) {
|
|
1327
|
-
let seqs = {}
|
|
1328
|
-
let goal = byte_array.length - len
|
|
1329
|
-
while (byte_array.length > goal) {
|
|
1330
|
-
let part0 = dt_read_varint(byte_array)
|
|
1331
|
-
let has_jump = part0 & 1
|
|
1332
|
-
let agent_i = (part0 >> 1) - 1
|
|
1333
|
-
let run_length = dt_read_varint(byte_array)
|
|
1334
|
-
let jump = 0
|
|
1335
|
-
if (has_jump) {
|
|
1336
|
-
let part2 = dt_read_varint(byte_array)
|
|
1337
|
-
jump = part2 >> 1
|
|
1338
|
-
if (part2 & 1) jump *= -1
|
|
1350
|
+
while (byte_array.length) {
|
|
1351
|
+
let id = byte_array.shift()
|
|
1352
|
+
let len = dt_read_varint(byte_array)
|
|
1353
|
+
if (id == 1) {
|
|
1354
|
+
} else if (id == 3) {
|
|
1355
|
+
let goal = byte_array.length - len
|
|
1356
|
+
while (byte_array.length > goal) {
|
|
1357
|
+
agents.push(dt_read_string(byte_array))
|
|
1339
1358
|
}
|
|
1340
|
-
|
|
1359
|
+
} else if (id == 20) {
|
|
1360
|
+
} else if (id == 21) {
|
|
1361
|
+
let seqs = {}
|
|
1362
|
+
let goal = byte_array.length - len
|
|
1363
|
+
while (byte_array.length > goal) {
|
|
1364
|
+
let part0 = dt_read_varint(byte_array)
|
|
1365
|
+
let has_jump = part0 & 1
|
|
1366
|
+
let agent_i = (part0 >> 1) - 1
|
|
1367
|
+
let run_length = dt_read_varint(byte_array)
|
|
1368
|
+
let jump = 0
|
|
1369
|
+
if (has_jump) {
|
|
1370
|
+
let part2 = dt_read_varint(byte_array)
|
|
1371
|
+
jump = part2 >> 1
|
|
1372
|
+
if (part2 & 1) jump *= -1
|
|
1373
|
+
}
|
|
1374
|
+
let base = (seqs[agent_i] || 0) + jump
|
|
1341
1375
|
|
|
1342
|
-
|
|
1343
|
-
|
|
1376
|
+
cb(agents[agent_i], base, run_length)
|
|
1377
|
+
seqs[agent_i] = base + run_length
|
|
1378
|
+
}
|
|
1379
|
+
} else {
|
|
1380
|
+
byte_array.splice(0, len)
|
|
1344
1381
|
}
|
|
1345
|
-
} else {
|
|
1346
|
-
byte_array.splice(0, len)
|
|
1347
1382
|
}
|
|
1348
1383
|
}
|
|
1349
|
-
}
|
|
1350
1384
|
|
|
1351
|
-
function dt_get_local_version(bytes, version) {
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1385
|
+
function dt_get_local_version(bytes, version) {
|
|
1386
|
+
var looking_for = new Map()
|
|
1387
|
+
for (var event of version) {
|
|
1388
|
+
var [agent, seq] = decode_version(event)
|
|
1389
|
+
if (!looking_for.has(agent)) looking_for.set(agent, [])
|
|
1390
|
+
looking_for.get(agent).push(seq)
|
|
1391
|
+
}
|
|
1392
|
+
for (var seqs of looking_for.values())
|
|
1393
|
+
seqs.sort((a, b) => a - b)
|
|
1360
1394
|
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1395
|
+
var byte_array = [...bytes]
|
|
1396
|
+
var local_version = []
|
|
1397
|
+
var local_version_base = 0
|
|
1364
1398
|
|
|
1365
|
-
|
|
1399
|
+
if (new TextDecoder().decode(new Uint8Array(byte_array.splice(0, 8))) !== "DMNDTYPS") throw new Error("dt parse error, expected DMNDTYPS")
|
|
1366
1400
|
|
|
1367
|
-
|
|
1401
|
+
if (byte_array.shift() != 0) throw new Error("dt parse error, expected version 0")
|
|
1368
1402
|
|
|
1369
|
-
|
|
1403
|
+
let agents = []
|
|
1370
1404
|
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
}
|
|
1380
|
-
} else if (id == 20) {
|
|
1381
|
-
} else if (id == 21) {
|
|
1382
|
-
let seqs = {}
|
|
1383
|
-
let goal = byte_array.length - len
|
|
1384
|
-
while (byte_array.length > goal && looking_for.size) {
|
|
1385
|
-
let part0 = dt_read_varint(byte_array)
|
|
1386
|
-
let has_jump = part0 & 1
|
|
1387
|
-
let agent_i = (part0 >> 1) - 1
|
|
1388
|
-
let run_length = dt_read_varint(byte_array)
|
|
1389
|
-
let jump = 0
|
|
1390
|
-
if (has_jump) {
|
|
1391
|
-
let part2 = dt_read_varint(byte_array)
|
|
1392
|
-
jump = part2 >> 1
|
|
1393
|
-
if (part2 & 1) jump *= -1
|
|
1394
|
-
}
|
|
1395
|
-
let base = (seqs[agent_i] || 0) + jump
|
|
1396
|
-
|
|
1397
|
-
var agent = agents[agent_i]
|
|
1398
|
-
looking_for_seqs = looking_for.get(agent)
|
|
1399
|
-
if (looking_for_seqs) {
|
|
1400
|
-
for (var seq of splice_out_range(
|
|
1401
|
-
looking_for_seqs, base, base + run_length - 1))
|
|
1402
|
-
local_version.push(local_version_base + (seq - base))
|
|
1403
|
-
if (!looking_for_seqs.length) looking_for.delete(agent)
|
|
1405
|
+
while (byte_array.length && looking_for.size) {
|
|
1406
|
+
let id = byte_array.shift()
|
|
1407
|
+
let len = dt_read_varint(byte_array)
|
|
1408
|
+
if (id == 1) {
|
|
1409
|
+
} else if (id == 3) {
|
|
1410
|
+
let goal = byte_array.length - len
|
|
1411
|
+
while (byte_array.length > goal) {
|
|
1412
|
+
agents.push(dt_read_string(byte_array))
|
|
1404
1413
|
}
|
|
1405
|
-
|
|
1414
|
+
} else if (id == 20) {
|
|
1415
|
+
} else if (id == 21) {
|
|
1416
|
+
let seqs = {}
|
|
1417
|
+
let goal = byte_array.length - len
|
|
1418
|
+
while (byte_array.length > goal && looking_for.size) {
|
|
1419
|
+
let part0 = dt_read_varint(byte_array)
|
|
1420
|
+
let has_jump = part0 & 1
|
|
1421
|
+
let agent_i = (part0 >> 1) - 1
|
|
1422
|
+
let run_length = dt_read_varint(byte_array)
|
|
1423
|
+
let jump = 0
|
|
1424
|
+
if (has_jump) {
|
|
1425
|
+
let part2 = dt_read_varint(byte_array)
|
|
1426
|
+
jump = part2 >> 1
|
|
1427
|
+
if (part2 & 1) jump *= -1
|
|
1428
|
+
}
|
|
1429
|
+
let base = (seqs[agent_i] || 0) + jump
|
|
1430
|
+
|
|
1431
|
+
var agent = agents[agent_i]
|
|
1432
|
+
looking_for_seqs = looking_for.get(agent)
|
|
1433
|
+
if (looking_for_seqs) {
|
|
1434
|
+
for (var seq of splice_out_range(
|
|
1435
|
+
looking_for_seqs, base, base + run_length - 1))
|
|
1436
|
+
local_version.push(local_version_base + (seq - base))
|
|
1437
|
+
if (!looking_for_seqs.length) looking_for.delete(agent)
|
|
1438
|
+
}
|
|
1439
|
+
local_version_base += run_length
|
|
1406
1440
|
|
|
1407
|
-
|
|
1441
|
+
seqs[agent_i] = base + run_length
|
|
1442
|
+
}
|
|
1443
|
+
} else {
|
|
1444
|
+
byte_array.splice(0, len)
|
|
1408
1445
|
}
|
|
1409
|
-
} else {
|
|
1410
|
-
byte_array.splice(0, len)
|
|
1411
1446
|
}
|
|
1412
|
-
}
|
|
1413
1447
|
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
|
|
1448
|
+
if (looking_for.size) throw new Error(`version not found: ${version}`)
|
|
1449
|
+
return local_version
|
|
1450
|
+
|
|
1451
|
+
function splice_out_range(a, s, e) {
|
|
1452
|
+
if (!a?.length) return [];
|
|
1453
|
+
let l = 0, r = a.length;
|
|
1454
|
+
while (l < r) {
|
|
1455
|
+
const m = Math.floor((l + r) / 2);
|
|
1456
|
+
if (a[m] < s) l = m + 1; else r = m;
|
|
1457
|
+
}
|
|
1458
|
+
const i = l;
|
|
1459
|
+
l = i; r = a.length;
|
|
1460
|
+
while (l < r) {
|
|
1461
|
+
const m = Math.floor((l + r) / 2);
|
|
1462
|
+
if (a[m] <= e) l = m + 1; else r = m;
|
|
1463
|
+
}
|
|
1464
|
+
return a.splice(i, l - i);
|
|
1429
1465
|
}
|
|
1430
|
-
return a.splice(i, l - i);
|
|
1431
1466
|
}
|
|
1432
|
-
}
|
|
1433
1467
|
|
|
1434
|
-
function dt_read_string(byte_array) {
|
|
1435
|
-
|
|
1436
|
-
}
|
|
1468
|
+
function dt_read_string(byte_array) {
|
|
1469
|
+
return new TextDecoder().decode(new Uint8Array(byte_array.splice(0, dt_read_varint(byte_array))))
|
|
1470
|
+
}
|
|
1437
1471
|
|
|
1438
|
-
function dt_read_varint(byte_array) {
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1472
|
+
function dt_read_varint(byte_array) {
|
|
1473
|
+
let result = 0
|
|
1474
|
+
let shift = 0
|
|
1475
|
+
while (true) {
|
|
1476
|
+
if (byte_array.length === 0) throw new Error("byte array does not contain varint")
|
|
1443
1477
|
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1478
|
+
let byte_val = byte_array.shift()
|
|
1479
|
+
result |= (byte_val & 0x7f) << shift
|
|
1480
|
+
if ((byte_val & 0x80) == 0) return result
|
|
1481
|
+
shift += 7
|
|
1482
|
+
}
|
|
1448
1483
|
}
|
|
1449
|
-
}
|
|
1450
1484
|
|
|
1451
|
-
function dt_create_bytes(version, parents, pos, del, ins) {
|
|
1452
|
-
|
|
1485
|
+
function dt_create_bytes(version, parents, pos, del, ins) {
|
|
1486
|
+
if (del) pos += del - 1
|
|
1453
1487
|
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1488
|
+
function write_varint(bytes, value) {
|
|
1489
|
+
while (value >= 0x80) {
|
|
1490
|
+
bytes.push((value & 0x7f) | 0x80)
|
|
1491
|
+
value >>= 7
|
|
1492
|
+
}
|
|
1493
|
+
bytes.push(value)
|
|
1458
1494
|
}
|
|
1459
|
-
bytes.push(value)
|
|
1460
|
-
}
|
|
1461
1495
|
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1496
|
+
function write_string(byte_array, str) {
|
|
1497
|
+
let str_bytes = new TextEncoder().encode(str)
|
|
1498
|
+
write_varint(byte_array, str_bytes.length)
|
|
1499
|
+
for (let x of str_bytes) byte_array.push(x)
|
|
1500
|
+
}
|
|
1467
1501
|
|
|
1468
|
-
|
|
1469
|
-
|
|
1502
|
+
version = decode_version(version)
|
|
1503
|
+
parents = parents.map(decode_version)
|
|
1470
1504
|
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1505
|
+
let bytes = []
|
|
1506
|
+
bytes = bytes.concat(Array.from(new TextEncoder().encode("DMNDTYPS")))
|
|
1507
|
+
bytes.push(0)
|
|
1474
1508
|
|
|
1475
|
-
|
|
1476
|
-
|
|
1509
|
+
let file_info = []
|
|
1510
|
+
let agent_names = []
|
|
1477
1511
|
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
|
|
1481
|
-
|
|
1512
|
+
let agents = new Set()
|
|
1513
|
+
agents.add(version[0])
|
|
1514
|
+
for (let p of parents) agents.add(p[0])
|
|
1515
|
+
agents = [...agents]
|
|
1482
1516
|
|
|
1483
|
-
|
|
1517
|
+
// console.log(JSON.stringify({ agents, parents }, null, 4));
|
|
1484
1518
|
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1519
|
+
let agent_to_i = {}
|
|
1520
|
+
for (let [i, agent] of agents.entries()) {
|
|
1521
|
+
agent_to_i[agent] = i
|
|
1522
|
+
write_string(agent_names, agent)
|
|
1523
|
+
}
|
|
1490
1524
|
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1525
|
+
file_info.push(3)
|
|
1526
|
+
write_varint(file_info, agent_names.length)
|
|
1527
|
+
for (let x of agent_names) file_info.push(x)
|
|
1494
1528
|
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1529
|
+
bytes.push(1)
|
|
1530
|
+
write_varint(bytes, file_info.length)
|
|
1531
|
+
for (let x of file_info) bytes.push(x)
|
|
1498
1532
|
|
|
1499
|
-
|
|
1533
|
+
let branch = []
|
|
1500
1534
|
|
|
1501
|
-
|
|
1502
|
-
|
|
1535
|
+
if (parents.length) {
|
|
1536
|
+
let frontier = []
|
|
1503
1537
|
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1538
|
+
for (let [i, [agent, seq]] of parents.entries()) {
|
|
1539
|
+
let has_more = i < parents.length - 1
|
|
1540
|
+
let mapped = agent_to_i[agent]
|
|
1541
|
+
let n = ((mapped + 1) << 1) | (has_more ? 1 : 0)
|
|
1542
|
+
write_varint(frontier, n)
|
|
1543
|
+
write_varint(frontier, seq)
|
|
1544
|
+
}
|
|
1511
1545
|
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1546
|
+
branch.push(12)
|
|
1547
|
+
write_varint(branch, frontier.length)
|
|
1548
|
+
for (let x of frontier) branch.push(x)
|
|
1549
|
+
}
|
|
1516
1550
|
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1551
|
+
bytes.push(10)
|
|
1552
|
+
write_varint(bytes, branch.length)
|
|
1553
|
+
for (let x of branch) bytes.push(x)
|
|
1520
1554
|
|
|
1521
|
-
|
|
1555
|
+
let patches = []
|
|
1522
1556
|
|
|
1523
|
-
|
|
1557
|
+
let unicode_chars = ins ? [...ins] : []
|
|
1524
1558
|
|
|
1525
|
-
|
|
1526
|
-
|
|
1559
|
+
if (ins) {
|
|
1560
|
+
let inserted_content_bytes = []
|
|
1527
1561
|
|
|
1528
|
-
|
|
1562
|
+
inserted_content_bytes.push(0) // ins (not del, which is 1)
|
|
1529
1563
|
|
|
1530
|
-
|
|
1564
|
+
inserted_content_bytes.push(13) // "content" enum (rather than compressed)
|
|
1531
1565
|
|
|
1532
|
-
|
|
1533
|
-
|
|
1566
|
+
let encoder = new TextEncoder()
|
|
1567
|
+
let utf8Bytes = encoder.encode(ins)
|
|
1534
1568
|
|
|
1535
|
-
|
|
1536
|
-
|
|
1537
|
-
|
|
1569
|
+
write_varint(inserted_content_bytes, 1 + utf8Bytes.length)
|
|
1570
|
+
// inserted_content_bytes.push(1 + utf8Bytes.length) // length of content chunk
|
|
1571
|
+
inserted_content_bytes.push(4) // "plain text" enum
|
|
1538
1572
|
|
|
1539
|
-
|
|
1573
|
+
for (let b of utf8Bytes) inserted_content_bytes.push(b) // actual text
|
|
1540
1574
|
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1575
|
+
inserted_content_bytes.push(25) // "known" enum
|
|
1576
|
+
let known_chunk = []
|
|
1577
|
+
write_varint(known_chunk, unicode_chars.length * 2 + 1)
|
|
1578
|
+
write_varint(inserted_content_bytes, known_chunk.length)
|
|
1579
|
+
for (let x of known_chunk) inserted_content_bytes.push(x)
|
|
1546
1580
|
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1581
|
+
patches.push(24)
|
|
1582
|
+
write_varint(patches, inserted_content_bytes.length)
|
|
1583
|
+
for (let b of inserted_content_bytes) patches.push(b)
|
|
1584
|
+
}
|
|
1551
1585
|
|
|
1552
|
-
|
|
1553
|
-
|
|
1586
|
+
// write in the version
|
|
1587
|
+
let version_bytes = []
|
|
1554
1588
|
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1589
|
+
let [agent, seq] = version
|
|
1590
|
+
let agent_i = agent_to_i[agent]
|
|
1591
|
+
let jump = seq
|
|
1558
1592
|
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1593
|
+
write_varint(version_bytes, ((agent_i + 1) << 1) | (jump != 0 ? 1 : 0))
|
|
1594
|
+
write_varint(version_bytes, ins ? unicode_chars.length : del)
|
|
1595
|
+
if (jump) write_varint(version_bytes, jump << 1)
|
|
1562
1596
|
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
|
|
1597
|
+
patches.push(21)
|
|
1598
|
+
write_varint(patches, version_bytes.length)
|
|
1599
|
+
for (let b of version_bytes) patches.push(b)
|
|
1566
1600
|
|
|
1567
|
-
|
|
1568
|
-
|
|
1601
|
+
// write in "op" bytes (some encoding of position)
|
|
1602
|
+
let op_bytes = []
|
|
1569
1603
|
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
|
|
1604
|
+
if (del) {
|
|
1605
|
+
if (pos == 0) {
|
|
1606
|
+
write_varint(op_bytes, 4)
|
|
1607
|
+
} else if (del == 1) {
|
|
1608
|
+
write_varint(op_bytes, pos * 16 + 6)
|
|
1609
|
+
} else {
|
|
1610
|
+
write_varint(op_bytes, del * 16 + 7)
|
|
1611
|
+
write_varint(op_bytes, pos * 2 + 2)
|
|
1612
|
+
}
|
|
1613
|
+
} else if (unicode_chars.length == 1) {
|
|
1614
|
+
if (pos == 0) write_varint(op_bytes, 0)
|
|
1615
|
+
else write_varint(op_bytes, pos * 16 + 2)
|
|
1616
|
+
} else if (pos == 0) {
|
|
1617
|
+
write_varint(op_bytes, unicode_chars.length * 8 + 1)
|
|
1575
1618
|
} else {
|
|
1576
|
-
write_varint(op_bytes,
|
|
1577
|
-
write_varint(op_bytes, pos * 2
|
|
1578
|
-
}
|
|
1579
|
-
} else if (unicode_chars.length == 1) {
|
|
1580
|
-
if (pos == 0) write_varint(op_bytes, 0)
|
|
1581
|
-
else write_varint(op_bytes, pos * 16 + 2)
|
|
1582
|
-
} else if (pos == 0) {
|
|
1583
|
-
write_varint(op_bytes, unicode_chars.length * 8 + 1)
|
|
1584
|
-
} else {
|
|
1585
|
-
write_varint(op_bytes, unicode_chars.length * 8 + 3)
|
|
1586
|
-
write_varint(op_bytes, pos * 2)
|
|
1587
|
-
}
|
|
1619
|
+
write_varint(op_bytes, unicode_chars.length * 8 + 3)
|
|
1620
|
+
write_varint(op_bytes, pos * 2)
|
|
1621
|
+
}
|
|
1588
1622
|
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1623
|
+
patches.push(22)
|
|
1624
|
+
write_varint(patches, op_bytes.length)
|
|
1625
|
+
for (let b of op_bytes) patches.push(b)
|
|
1592
1626
|
|
|
1593
|
-
|
|
1594
|
-
|
|
1627
|
+
// write in parents
|
|
1628
|
+
let parents_bytes = []
|
|
1595
1629
|
|
|
1596
|
-
|
|
1630
|
+
write_varint(parents_bytes, ins ? unicode_chars.length : del)
|
|
1597
1631
|
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1632
|
+
if (parents.length) {
|
|
1633
|
+
for (let [i, [agent, seq]] of parents.entries()) {
|
|
1634
|
+
let has_more = i < parents.length - 1
|
|
1635
|
+
let agent_i = agent_to_i[agent]
|
|
1636
|
+
write_varint(parents_bytes, ((agent_i + 1) << 2) | (has_more ? 2 : 0) | 1)
|
|
1637
|
+
write_varint(parents_bytes, seq)
|
|
1638
|
+
}
|
|
1639
|
+
} else write_varint(parents_bytes, 1)
|
|
1606
1640
|
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
|
|
1641
|
+
patches.push(23)
|
|
1642
|
+
write_varint(patches, parents_bytes.length)
|
|
1643
|
+
for (let x of parents_bytes) patches.push(x)
|
|
1610
1644
|
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1645
|
+
// write in patches
|
|
1646
|
+
bytes.push(20)
|
|
1647
|
+
write_varint(bytes, patches.length)
|
|
1648
|
+
for (let b of patches) bytes.push(b)
|
|
1615
1649
|
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
}
|
|
1650
|
+
// console.log(bytes);
|
|
1651
|
+
return bytes
|
|
1652
|
+
}
|
|
1619
1653
|
|
|
1620
1654
|
|
|
1621
|
-
function OpLog_remote_to_local(doc, frontier) {
|
|
1622
|
-
|
|
1655
|
+
function OpLog_remote_to_local(doc, frontier) {
|
|
1656
|
+
let map = Object.fromEntries(frontier.map((x) => [x, true]))
|
|
1623
1657
|
|
|
1624
|
-
|
|
1658
|
+
let local_version = []
|
|
1625
1659
|
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1660
|
+
let max_version = doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
1661
|
+
for (let i = 0; i <= max_version; i++) {
|
|
1662
|
+
if (map[doc.localToRemoteVersion([i])[0].join("-")]) {
|
|
1663
|
+
local_version.push(i)
|
|
1664
|
+
}
|
|
1630
1665
|
}
|
|
1631
|
-
}
|
|
1632
|
-
|
|
1633
|
-
return frontier.length == local_version.length && new Uint32Array(local_version)
|
|
1634
|
-
}
|
|
1635
1666
|
|
|
1636
|
-
|
|
1637
|
-
return v1.length == v2.length && v1.every((x, i) => x == v2[i])
|
|
1638
|
-
}
|
|
1639
|
-
|
|
1640
|
-
function get_xf_patches(doc, v) {
|
|
1641
|
-
let patches = []
|
|
1642
|
-
for (let xf of doc.xfSince(v)) {
|
|
1643
|
-
patches.push(
|
|
1644
|
-
xf.kind == "Ins"
|
|
1645
|
-
? {
|
|
1646
|
-
unit: "text",
|
|
1647
|
-
range: `[${xf.start}:${xf.start}]`,
|
|
1648
|
-
content: xf.content,
|
|
1649
|
-
}
|
|
1650
|
-
: {
|
|
1651
|
-
unit: "text",
|
|
1652
|
-
range: `[${xf.start}:${xf.end}]`,
|
|
1653
|
-
content: "",
|
|
1654
|
-
}
|
|
1655
|
-
)
|
|
1667
|
+
return frontier.length == local_version.length && new Uint32Array(local_version)
|
|
1656
1668
|
}
|
|
1657
|
-
return relative_to_absolute_patches(patches)
|
|
1658
|
-
}
|
|
1659
1669
|
|
|
1660
|
-
function
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
if (parent.left == node) {
|
|
1664
|
-
parent.left_size -= node.left_size + node.size
|
|
1665
|
-
} else {
|
|
1666
|
-
node.left_size += parent.left_size + parent.size
|
|
1667
|
-
}
|
|
1668
|
-
})
|
|
1669
|
-
avl.root.size = Infinity
|
|
1670
|
-
avl.root.left_size = 0
|
|
1670
|
+
function v_eq(v1, v2) {
|
|
1671
|
+
return v1.length == v2.length && v1.every((x, i) => x == v2[i])
|
|
1672
|
+
}
|
|
1671
1673
|
|
|
1672
|
-
function
|
|
1673
|
-
|
|
1674
|
-
let
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1674
|
+
function get_xf_patches(doc, v) {
|
|
1675
|
+
let patches = []
|
|
1676
|
+
for (let xf of doc.xfSince(v)) {
|
|
1677
|
+
patches.push(
|
|
1678
|
+
xf.kind == "Ins"
|
|
1679
|
+
? {
|
|
1680
|
+
unit: "text",
|
|
1681
|
+
range: `[${xf.start}:${xf.start}]`,
|
|
1682
|
+
content: xf.content,
|
|
1683
|
+
}
|
|
1684
|
+
: {
|
|
1685
|
+
unit: "text",
|
|
1686
|
+
range: `[${xf.start}:${xf.end}]`,
|
|
1687
|
+
content: "",
|
|
1688
|
+
}
|
|
1689
|
+
)
|
|
1679
1690
|
}
|
|
1691
|
+
return relative_to_absolute_patches(patches)
|
|
1680
1692
|
}
|
|
1681
1693
|
|
|
1682
|
-
|
|
1683
|
-
let
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
while (true) {
|
|
1688
|
-
if (start < node.left_size || (node.left && node.content == null && start == node.left_size)) {
|
|
1689
|
-
node = node.left
|
|
1690
|
-
} else if (start > node.left_size + node.size || (node.content == null && start == node.left_size + node.size)) {
|
|
1691
|
-
start -= node.left_size + node.size
|
|
1692
|
-
node = node.right
|
|
1694
|
+
function relative_to_absolute_patches(patches) {
|
|
1695
|
+
let avl = create_avl_tree((node) => {
|
|
1696
|
+
let parent = node.parent
|
|
1697
|
+
if (parent.left == node) {
|
|
1698
|
+
parent.left_size -= node.left_size + node.size
|
|
1693
1699
|
} else {
|
|
1694
|
-
|
|
1695
|
-
|
|
1700
|
+
node.left_size += parent.left_size + parent.size
|
|
1701
|
+
}
|
|
1702
|
+
})
|
|
1703
|
+
avl.root.size = Infinity
|
|
1704
|
+
avl.root.left_size = 0
|
|
1705
|
+
|
|
1706
|
+
function resize(node, new_size) {
|
|
1707
|
+
if (node.size == new_size) return
|
|
1708
|
+
let delta = new_size - node.size
|
|
1709
|
+
node.size = new_size
|
|
1710
|
+
while (node.parent) {
|
|
1711
|
+
if (node.parent.left == node) node.parent.left_size += delta
|
|
1712
|
+
node = node.parent
|
|
1696
1713
|
}
|
|
1697
1714
|
}
|
|
1698
1715
|
|
|
1699
|
-
let
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1716
|
+
for (let p of patches) {
|
|
1717
|
+
let [start, end] = p.range.match(/\d+/g).map((x) => 1 * x)
|
|
1718
|
+
let del = end - start
|
|
1719
|
+
|
|
1720
|
+
let node = avl.root
|
|
1721
|
+
while (true) {
|
|
1722
|
+
if (start < node.left_size || (node.left && node.content == null && start == node.left_size)) {
|
|
1723
|
+
node = node.left
|
|
1724
|
+
} else if (start > node.left_size + node.size || (node.content == null && start == node.left_size + node.size)) {
|
|
1725
|
+
start -= node.left_size + node.size
|
|
1726
|
+
node = node.right
|
|
1727
|
+
} else {
|
|
1728
|
+
start -= node.left_size
|
|
1729
|
+
break
|
|
1706
1730
|
}
|
|
1707
|
-
let x = { size: 0, left_size: 0, content: p.content, del }
|
|
1708
|
-
avl.add(node, "left", x)
|
|
1709
|
-
resize(x, count_code_points(x.content))
|
|
1710
|
-
resize(node, node.size - (start + del))
|
|
1711
|
-
} else {
|
|
1712
|
-
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + node.content.slice(codePoints_to_index(node.content, start + del))
|
|
1713
|
-
resize(node, count_code_points(node.content))
|
|
1714
|
-
}
|
|
1715
|
-
} else {
|
|
1716
|
-
let next
|
|
1717
|
-
let middle_del = 0
|
|
1718
|
-
while (remaining >= (next = avl.next(node)).size) {
|
|
1719
|
-
remaining -= next.size
|
|
1720
|
-
middle_del += next.del ?? next.size
|
|
1721
|
-
resize(next, 0)
|
|
1722
|
-
avl.del(next)
|
|
1723
1731
|
}
|
|
1724
1732
|
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
let x = {
|
|
1733
|
-
size: 0,
|
|
1734
|
-
left_size: 0,
|
|
1735
|
-
content: p.content,
|
|
1736
|
-
del: node.size - start + middle_del + remaining,
|
|
1737
|
-
}
|
|
1738
|
-
resize(node, start)
|
|
1739
|
-
avl.add(node, "right", x)
|
|
1740
|
-
resize(x, count_code_points(x.content))
|
|
1733
|
+
let remaining = start + del - node.size
|
|
1734
|
+
if (remaining < 0) {
|
|
1735
|
+
if (node.content == null) {
|
|
1736
|
+
if (start > 0) {
|
|
1737
|
+
let x = { size: 0, left_size: 0 }
|
|
1738
|
+
avl.add(node, "left", x)
|
|
1739
|
+
resize(x, start)
|
|
1741
1740
|
}
|
|
1742
|
-
|
|
1741
|
+
let x = { size: 0, left_size: 0, content: p.content, del }
|
|
1742
|
+
avl.add(node, "left", x)
|
|
1743
|
+
resize(x, count_code_points(x.content))
|
|
1744
|
+
resize(node, node.size - (start + del))
|
|
1743
1745
|
} else {
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
resize(node, start)
|
|
1747
|
-
if (node.size == 0) avl.del(node)
|
|
1748
|
-
resize(next, count_code_points(next.content))
|
|
1746
|
+
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + node.content.slice(codePoints_to_index(node.content, start + del))
|
|
1747
|
+
resize(node, count_code_points(node.content))
|
|
1749
1748
|
}
|
|
1750
1749
|
} else {
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
} else {
|
|
1757
|
-
node.del += middle_del + next.del
|
|
1758
|
-
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + next.content.slice(codePoints_to_index(next.content, remaining))
|
|
1759
|
-
resize(node, count_code_points(node.content))
|
|
1750
|
+
let next
|
|
1751
|
+
let middle_del = 0
|
|
1752
|
+
while (remaining >= (next = avl.next(node)).size) {
|
|
1753
|
+
remaining -= next.size
|
|
1754
|
+
middle_del += next.del ?? next.size
|
|
1760
1755
|
resize(next, 0)
|
|
1761
1756
|
avl.del(next)
|
|
1762
1757
|
}
|
|
1758
|
+
|
|
1759
|
+
if (node.content == null) {
|
|
1760
|
+
if (next.content == null) {
|
|
1761
|
+
if (start == 0) {
|
|
1762
|
+
node.content = p.content
|
|
1763
|
+
node.del = node.size + middle_del + remaining
|
|
1764
|
+
resize(node, count_code_points(node.content))
|
|
1765
|
+
} else {
|
|
1766
|
+
let x = {
|
|
1767
|
+
size: 0,
|
|
1768
|
+
left_size: 0,
|
|
1769
|
+
content: p.content,
|
|
1770
|
+
del: node.size - start + middle_del + remaining,
|
|
1771
|
+
}
|
|
1772
|
+
resize(node, start)
|
|
1773
|
+
avl.add(node, "right", x)
|
|
1774
|
+
resize(x, count_code_points(x.content))
|
|
1775
|
+
}
|
|
1776
|
+
resize(next, next.size - remaining)
|
|
1777
|
+
} else {
|
|
1778
|
+
next.del += node.size - start + middle_del
|
|
1779
|
+
next.content = p.content + next.content.slice(codePoints_to_index(next.content, remaining))
|
|
1780
|
+
resize(node, start)
|
|
1781
|
+
if (node.size == 0) avl.del(node)
|
|
1782
|
+
resize(next, count_code_points(next.content))
|
|
1783
|
+
}
|
|
1784
|
+
} else {
|
|
1785
|
+
if (next.content == null) {
|
|
1786
|
+
node.del += middle_del + remaining
|
|
1787
|
+
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content
|
|
1788
|
+
resize(node, count_code_points(node.content))
|
|
1789
|
+
resize(next, next.size - remaining)
|
|
1790
|
+
} else {
|
|
1791
|
+
node.del += middle_del + next.del
|
|
1792
|
+
node.content = node.content.slice(0, codePoints_to_index(node.content, start)) + p.content + next.content.slice(codePoints_to_index(next.content, remaining))
|
|
1793
|
+
resize(node, count_code_points(node.content))
|
|
1794
|
+
resize(next, 0)
|
|
1795
|
+
avl.del(next)
|
|
1796
|
+
}
|
|
1797
|
+
}
|
|
1763
1798
|
}
|
|
1764
1799
|
}
|
|
1765
|
-
}
|
|
1766
1800
|
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1801
|
+
let new_patches = []
|
|
1802
|
+
let offset = 0
|
|
1803
|
+
let node = avl.root
|
|
1804
|
+
while (node.left) node = node.left
|
|
1805
|
+
while (node) {
|
|
1806
|
+
if (node.content == null) {
|
|
1807
|
+
offset += node.size
|
|
1808
|
+
} else {
|
|
1809
|
+
new_patches.push({
|
|
1810
|
+
unit: patches[0].unit,
|
|
1811
|
+
range: `[${offset}:${offset + node.del}]`,
|
|
1812
|
+
content: node.content,
|
|
1813
|
+
})
|
|
1814
|
+
offset += node.del
|
|
1815
|
+
}
|
|
1782
1816
|
|
|
1783
|
-
|
|
1817
|
+
node = avl.next(node)
|
|
1818
|
+
}
|
|
1819
|
+
return new_patches
|
|
1784
1820
|
}
|
|
1785
|
-
return new_patches
|
|
1786
|
-
}
|
|
1787
1821
|
|
|
1788
|
-
function create_avl_tree(on_rotate) {
|
|
1789
|
-
|
|
1822
|
+
function create_avl_tree(on_rotate) {
|
|
1823
|
+
let self = { root: { height: 1 } }
|
|
1790
1824
|
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1825
|
+
self.calc_height = (node) => {
|
|
1826
|
+
node.height = 1 + Math.max(node.left?.height ?? 0, node.right?.height ?? 0)
|
|
1827
|
+
}
|
|
1794
1828
|
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1829
|
+
self.rechild = (child, new_child) => {
|
|
1830
|
+
if (child.parent) {
|
|
1831
|
+
if (child.parent.left == child) {
|
|
1832
|
+
child.parent.left = new_child
|
|
1833
|
+
} else {
|
|
1834
|
+
child.parent.right = new_child
|
|
1835
|
+
}
|
|
1799
1836
|
} else {
|
|
1800
|
-
|
|
1837
|
+
self.root = new_child
|
|
1801
1838
|
}
|
|
1802
|
-
|
|
1803
|
-
self.root = new_child
|
|
1839
|
+
if (new_child) new_child.parent = child.parent
|
|
1804
1840
|
}
|
|
1805
|
-
if (new_child) new_child.parent = child.parent
|
|
1806
|
-
}
|
|
1807
1841
|
|
|
1808
|
-
|
|
1809
|
-
|
|
1842
|
+
self.rotate = (node) => {
|
|
1843
|
+
on_rotate(node)
|
|
1810
1844
|
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1845
|
+
let parent = node.parent
|
|
1846
|
+
let left = parent.right == node ? "left" : "right"
|
|
1847
|
+
let right = parent.right == node ? "right" : "left"
|
|
1814
1848
|
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1849
|
+
parent[right] = node[left]
|
|
1850
|
+
if (parent[right]) parent[right].parent = parent
|
|
1851
|
+
self.calc_height(parent)
|
|
1818
1852
|
|
|
1819
|
-
|
|
1820
|
-
|
|
1853
|
+
self.rechild(parent, node)
|
|
1854
|
+
parent.parent = node
|
|
1821
1855
|
|
|
1822
|
-
|
|
1823
|
-
|
|
1856
|
+
node[left] = parent
|
|
1857
|
+
}
|
|
1858
|
+
|
|
1859
|
+
self.fix_avl = (node) => {
|
|
1860
|
+
self.calc_height(node)
|
|
1861
|
+
let diff = (node.right?.height ?? 0) - (node.left?.height ?? 0)
|
|
1862
|
+
if (Math.abs(diff) >= 2) {
|
|
1863
|
+
if (diff > 0) {
|
|
1864
|
+
if ((node.right.left?.height ?? 0) > (node.right.right?.height ?? 0)) self.rotate(node.right.left)
|
|
1865
|
+
self.rotate((node = node.right))
|
|
1866
|
+
} else {
|
|
1867
|
+
if ((node.left.right?.height ?? 0) > (node.left.left?.height ?? 0)) self.rotate(node.left.right)
|
|
1868
|
+
self.rotate((node = node.left))
|
|
1869
|
+
}
|
|
1870
|
+
self.fix_avl(node)
|
|
1871
|
+
} else if (node.parent) self.fix_avl(node.parent)
|
|
1872
|
+
}
|
|
1824
1873
|
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
if (
|
|
1830
|
-
|
|
1831
|
-
|
|
1874
|
+
self.add = (node, side, add_me) => {
|
|
1875
|
+
let other_side = side == "left" ? "right" : "left"
|
|
1876
|
+
add_me.height = 1
|
|
1877
|
+
|
|
1878
|
+
if (node[side]) {
|
|
1879
|
+
node = node[side]
|
|
1880
|
+
while (node[other_side]) node = node[other_side]
|
|
1881
|
+
node[other_side] = add_me
|
|
1832
1882
|
} else {
|
|
1833
|
-
|
|
1834
|
-
self.rotate((node = node.left))
|
|
1883
|
+
node[side] = add_me
|
|
1835
1884
|
}
|
|
1885
|
+
add_me.parent = node
|
|
1836
1886
|
self.fix_avl(node)
|
|
1837
|
-
} else if (node.parent) self.fix_avl(node.parent)
|
|
1838
|
-
}
|
|
1839
|
-
|
|
1840
|
-
self.add = (node, side, add_me) => {
|
|
1841
|
-
let other_side = side == "left" ? "right" : "left"
|
|
1842
|
-
add_me.height = 1
|
|
1843
|
-
|
|
1844
|
-
if (node[side]) {
|
|
1845
|
-
node = node[side]
|
|
1846
|
-
while (node[other_side]) node = node[other_side]
|
|
1847
|
-
node[other_side] = add_me
|
|
1848
|
-
} else {
|
|
1849
|
-
node[side] = add_me
|
|
1850
1887
|
}
|
|
1851
|
-
add_me.parent = node
|
|
1852
|
-
self.fix_avl(node)
|
|
1853
|
-
}
|
|
1854
1888
|
|
|
1855
|
-
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1889
|
+
self.del = (node) => {
|
|
1890
|
+
if (node.left && node.right) {
|
|
1891
|
+
let cursor = node.right
|
|
1892
|
+
while (cursor.left) cursor = cursor.left
|
|
1893
|
+
cursor.left = node.left
|
|
1894
|
+
|
|
1895
|
+
// breaks abstraction
|
|
1896
|
+
cursor.left_size = node.left_size
|
|
1897
|
+
let y = cursor
|
|
1898
|
+
while (y.parent != node) {
|
|
1899
|
+
y = y.parent
|
|
1900
|
+
y.left_size -= cursor.size
|
|
1901
|
+
}
|
|
1902
|
+
|
|
1903
|
+
node.left.parent = cursor
|
|
1904
|
+
if (cursor == node.right) {
|
|
1905
|
+
self.rechild(node, cursor)
|
|
1906
|
+
self.fix_avl(cursor)
|
|
1907
|
+
} else {
|
|
1908
|
+
let x = cursor.parent
|
|
1909
|
+
self.rechild(cursor, cursor.right)
|
|
1910
|
+
cursor.right = node.right
|
|
1911
|
+
node.right.parent = cursor
|
|
1912
|
+
self.rechild(node, cursor)
|
|
1913
|
+
self.fix_avl(x)
|
|
1914
|
+
}
|
|
1915
|
+
} else {
|
|
1916
|
+
self.rechild(node, node.left || node.right || null)
|
|
1917
|
+
if (node.parent) self.fix_avl(node.parent)
|
|
1867
1918
|
}
|
|
1919
|
+
}
|
|
1868
1920
|
|
|
1869
|
-
|
|
1870
|
-
if (
|
|
1871
|
-
|
|
1872
|
-
|
|
1921
|
+
self.next = (node) => {
|
|
1922
|
+
if (node.right) {
|
|
1923
|
+
node = node.right
|
|
1924
|
+
while (node.left) node = node.left
|
|
1925
|
+
return node
|
|
1873
1926
|
} else {
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
cursor.right = node.right
|
|
1877
|
-
node.right.parent = cursor
|
|
1878
|
-
self.rechild(node, cursor)
|
|
1879
|
-
self.fix_avl(x)
|
|
1927
|
+
while (node.parent && node.parent.right == node) node = node.parent
|
|
1928
|
+
return node.parent
|
|
1880
1929
|
}
|
|
1881
|
-
} else {
|
|
1882
|
-
self.rechild(node, node.left || node.right || null)
|
|
1883
|
-
if (node.parent) self.fix_avl(node.parent)
|
|
1884
1930
|
}
|
|
1931
|
+
|
|
1932
|
+
return self
|
|
1885
1933
|
}
|
|
1886
1934
|
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
} else {
|
|
1893
|
-
while (node.parent && node.parent.right == node) node = node.parent
|
|
1894
|
-
return node.parent
|
|
1935
|
+
function count_code_points(str) {
|
|
1936
|
+
let code_points = 0;
|
|
1937
|
+
for (let i = 0; i < str.length; i++) {
|
|
1938
|
+
if (str.charCodeAt(i) >= 0xD800 && str.charCodeAt(i) <= 0xDBFF) i++;
|
|
1939
|
+
code_points++;
|
|
1895
1940
|
}
|
|
1941
|
+
return code_points;
|
|
1896
1942
|
}
|
|
1897
1943
|
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1944
|
+
function index_to_codePoints(str, index) {
|
|
1945
|
+
let i = 0
|
|
1946
|
+
let c = 0
|
|
1947
|
+
while (i < index && i < str.length) {
|
|
1948
|
+
const charCode = str.charCodeAt(i)
|
|
1949
|
+
i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1
|
|
1950
|
+
c++
|
|
1951
|
+
}
|
|
1952
|
+
return c
|
|
1906
1953
|
}
|
|
1907
|
-
return code_points;
|
|
1908
|
-
}
|
|
1909
1954
|
|
|
1910
|
-
function
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1955
|
+
function codePoints_to_index(str, codePoints) {
|
|
1956
|
+
let i = 0
|
|
1957
|
+
let c = 0
|
|
1958
|
+
while (c < codePoints && i < str.length) {
|
|
1959
|
+
const charCode = str.charCodeAt(i)
|
|
1960
|
+
i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1
|
|
1961
|
+
c++
|
|
1962
|
+
}
|
|
1963
|
+
return i
|
|
1917
1964
|
}
|
|
1918
|
-
return c
|
|
1919
|
-
}
|
|
1920
1965
|
|
|
1921
|
-
function
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
while (c < codePoints && i < str.length) {
|
|
1925
|
-
const charCode = str.charCodeAt(i)
|
|
1926
|
-
i += (charCode >= 0xd800 && charCode <= 0xdbff) ? 2 : 1
|
|
1927
|
-
c++
|
|
1928
|
-
}
|
|
1929
|
-
return i
|
|
1930
|
-
}
|
|
1966
|
+
function encode_filename(filename) {
|
|
1967
|
+
// Swap all "!" and "/" characters
|
|
1968
|
+
let swapped = filename.replace(/[!/]/g, (match) => (match === "!" ? "/" : "!"))
|
|
1931
1969
|
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
let swapped = filename.replace(/[!/]/g, (match) => (match === "!" ? "/" : "!"))
|
|
1970
|
+
// Encode the filename using encodeURIComponent()
|
|
1971
|
+
let encoded = encodeURIComponent(swapped)
|
|
1935
1972
|
|
|
1936
|
-
|
|
1937
|
-
|
|
1973
|
+
return encoded
|
|
1974
|
+
}
|
|
1938
1975
|
|
|
1939
|
-
|
|
1940
|
-
|
|
1976
|
+
function decode_filename(encodedFilename) {
|
|
1977
|
+
// Decode the filename using decodeURIComponent()
|
|
1978
|
+
let decoded = decodeURIComponent(encodedFilename)
|
|
1941
1979
|
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
let decoded = decodeURIComponent(encodedFilename)
|
|
1980
|
+
// Swap all "/" and "!" characters
|
|
1981
|
+
decoded = decoded.replace(/[!/]/g, (match) => (match === "/" ? "!" : "/"))
|
|
1945
1982
|
|
|
1946
|
-
|
|
1947
|
-
|
|
1983
|
+
return decoded
|
|
1984
|
+
}
|
|
1948
1985
|
|
|
1949
|
-
|
|
1950
|
-
|
|
1986
|
+
function validate_version_array(x) {
|
|
1987
|
+
if (!Array.isArray(x)) throw new Error(`invalid version array: not an array`)
|
|
1988
|
+
x.sort()
|
|
1989
|
+
for (var xx of x) validate_actor_seq(xx)
|
|
1990
|
+
}
|
|
1951
1991
|
|
|
1952
|
-
function
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
}
|
|
1992
|
+
function validate_actor_seq(x) {
|
|
1993
|
+
if (typeof x !== 'string') throw new Error(`invalid actor-seq: not a string`)
|
|
1994
|
+
let [actor, seq] = decode_version(x)
|
|
1995
|
+
validate_actor(actor)
|
|
1996
|
+
}
|
|
1957
1997
|
|
|
1958
|
-
function
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
}
|
|
1998
|
+
function validate_actor(x) {
|
|
1999
|
+
if (typeof x !== 'string') throw new Error(`invalid actor: not a string`)
|
|
2000
|
+
if (Buffer.byteLength(x, 'utf8') >= 50) throw new Error(`actor value too long (max 49): ${x}`) // restriction coming from dt
|
|
2001
|
+
}
|
|
1963
2002
|
|
|
1964
|
-
function
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
2003
|
+
function is_valid_actor(x) {
|
|
2004
|
+
try {
|
|
2005
|
+
validate_actor(x)
|
|
2006
|
+
return true
|
|
2007
|
+
} catch (e) { }
|
|
2008
|
+
}
|
|
1968
2009
|
|
|
1969
|
-
function
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
return
|
|
1973
|
-
}
|
|
1974
|
-
}
|
|
2010
|
+
function decode_version(v) {
|
|
2011
|
+
let m = v.match(/^(.*)-(\d+)$/s)
|
|
2012
|
+
if (!m) throw new Error(`invalid actor-seq version: ${v}`)
|
|
2013
|
+
return [m[1], parseInt(m[2])]
|
|
2014
|
+
}
|
|
1975
2015
|
|
|
1976
|
-
function
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
}
|
|
2016
|
+
function validate_patches(patches) {
|
|
2017
|
+
if (!Array.isArray(patches)) throw new Error(`invalid patches: not an array`)
|
|
2018
|
+
for (let p of patches) validate_patch(p)
|
|
2019
|
+
}
|
|
1981
2020
|
|
|
1982
|
-
function
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
2021
|
+
function validate_patch(x) {
|
|
2022
|
+
if (typeof x != 'object') throw new Error(`invalid patch: not an object`)
|
|
2023
|
+
if (x.unit && x.unit !== 'text') throw new Error(`invalid patch unit '${x.unit}': only 'text' supported`)
|
|
2024
|
+
if (typeof x.range !== 'string') throw new Error(`invalid patch range: must be a string`)
|
|
2025
|
+
if (!x.range.match(/^\s*\[\s*\d+\s*:\s*\d+\s*\]\s*$/)) throw new Error(`invalid patch range: ${x.range}`)
|
|
2026
|
+
if (typeof x.content !== 'string') throw new Error(`invalid patch content: must be a string`)
|
|
2027
|
+
}
|
|
1986
2028
|
|
|
1987
|
-
function
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
if (typeof x.range !== 'string') throw new Error(`invalid patch range: must be a string`)
|
|
1991
|
-
if (!x.range.match(/^\s*\[\s*\d+\s*:\s*\d+\s*\]\s*$/)) throw new Error(`invalid patch range: ${x.range}`)
|
|
1992
|
-
if (typeof x.content !== 'string') throw new Error(`invalid patch content: must be a string`)
|
|
1993
|
-
}
|
|
2029
|
+
function createSimpleCache(size) {
|
|
2030
|
+
const maxSize = size
|
|
2031
|
+
const cache = new Map()
|
|
1994
2032
|
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
2033
|
+
return {
|
|
2034
|
+
put(key, value) {
|
|
2035
|
+
if (cache.has(key)) {
|
|
2036
|
+
// If the key already exists, update its value and move it to the end
|
|
2037
|
+
cache.delete(key)
|
|
2038
|
+
cache.set(key, value)
|
|
2039
|
+
} else {
|
|
2040
|
+
// If the cache is full, remove the oldest entry
|
|
2041
|
+
if (cache.size >= maxSize) {
|
|
2042
|
+
const oldestKey = cache.keys().next().value
|
|
2043
|
+
cache.delete(oldestKey)
|
|
2044
|
+
}
|
|
2045
|
+
// Add the new key-value pair
|
|
2046
|
+
cache.set(key, value)
|
|
2047
|
+
}
|
|
2048
|
+
},
|
|
1998
2049
|
|
|
1999
|
-
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
// If the key already exists, update its value and move it to the end
|
|
2003
|
-
cache.delete(key)
|
|
2004
|
-
cache.set(key, value)
|
|
2005
|
-
} else {
|
|
2006
|
-
// If the cache is full, remove the oldest entry
|
|
2007
|
-
if (cache.size >= maxSize) {
|
|
2008
|
-
const oldestKey = cache.keys().next().value
|
|
2009
|
-
cache.delete(oldestKey)
|
|
2050
|
+
get(key) {
|
|
2051
|
+
if (!cache.has(key)) {
|
|
2052
|
+
return null
|
|
2010
2053
|
}
|
|
2011
|
-
//
|
|
2054
|
+
// Move the accessed item to the end (most recently used)
|
|
2055
|
+
const value = cache.get(key)
|
|
2056
|
+
cache.delete(key)
|
|
2012
2057
|
cache.set(key, value)
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
get(key) {
|
|
2017
|
-
if (!cache.has(key)) {
|
|
2018
|
-
return null
|
|
2019
|
-
}
|
|
2020
|
-
// Move the accessed item to the end (most recently used)
|
|
2021
|
-
const value = cache.get(key)
|
|
2022
|
-
cache.delete(key)
|
|
2023
|
-
cache.set(key, value)
|
|
2024
|
-
return value
|
|
2025
|
-
},
|
|
2058
|
+
return value
|
|
2059
|
+
},
|
|
2060
|
+
}
|
|
2026
2061
|
}
|
|
2027
|
-
}
|
|
2028
2062
|
|
|
2029
|
-
function apply_patch(obj, range, content) {
|
|
2063
|
+
function apply_patch(obj, range, content) {
|
|
2030
2064
|
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2065
|
+
// Descend down a bunch of objects until we get to the final object
|
|
2066
|
+
// The final object can be a slice
|
|
2067
|
+
// Set the value in the final object
|
|
2034
2068
|
|
|
2035
|
-
|
|
2036
|
-
|
|
2069
|
+
var path = range,
|
|
2070
|
+
new_stuff = content
|
|
2037
2071
|
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
|
|
2072
|
+
var path_segment = /^(\.?([^\.\[]+))|(\[((-?\d+):)?(-?\d+)\])|\[("(\\"|[^"])*")\]/
|
|
2073
|
+
var curr_obj = obj,
|
|
2074
|
+
last_obj = null
|
|
2041
2075
|
|
|
2042
|
-
|
|
2043
|
-
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
|
|
2047
|
-
|
|
2076
|
+
// Handle negative indices, like "[-9]" or "[-0]"
|
|
2077
|
+
function de_neg (x) {
|
|
2078
|
+
return x[0] === '-'
|
|
2079
|
+
? curr_obj.length - parseInt(x.substr(1), 10)
|
|
2080
|
+
: parseInt(x, 10)
|
|
2081
|
+
}
|
|
2048
2082
|
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2057
|
-
|
|
2058
|
-
|
|
2059
|
-
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
-
|
|
2065
|
-
|
|
2066
|
-
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
|
|
2072
|
-
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
2091
|
-
|
|
2092
|
-
|
|
2083
|
+
// Now iterate through each segment of the range e.g. [3].a.b[3][9]
|
|
2084
|
+
while (true) {
|
|
2085
|
+
var match = path_segment.exec(path),
|
|
2086
|
+
subpath = match ? match[0] : '',
|
|
2087
|
+
field = match && match[2],
|
|
2088
|
+
slice_start = match && match[5],
|
|
2089
|
+
slice_end = match && match[6],
|
|
2090
|
+
quoted_field = match && match[7]
|
|
2091
|
+
|
|
2092
|
+
// The field could be expressed as ["nnn"] instead of .nnn
|
|
2093
|
+
if (quoted_field) field = JSON.parse(quoted_field)
|
|
2094
|
+
|
|
2095
|
+
slice_start = slice_start && de_neg(slice_start)
|
|
2096
|
+
slice_end = slice_end && de_neg(slice_end)
|
|
2097
|
+
|
|
2098
|
+
// console.log('Descending', {curr_obj, path, subpath, field, slice_start, slice_end, last_obj})
|
|
2099
|
+
|
|
2100
|
+
// If it's the final item, set it
|
|
2101
|
+
if (path.length === subpath.length) {
|
|
2102
|
+
if (!subpath) return new_stuff
|
|
2103
|
+
else if (field) { // Object
|
|
2104
|
+
if (new_stuff === undefined)
|
|
2105
|
+
delete curr_obj[field] // - Delete a field in object
|
|
2106
|
+
else
|
|
2107
|
+
curr_obj[field] = new_stuff // - Set a field in object
|
|
2108
|
+
} else if (typeof curr_obj === 'string') { // String
|
|
2109
|
+
console.assert(typeof new_stuff === 'string')
|
|
2110
|
+
if (!slice_start) {slice_start = slice_end; slice_end = slice_end+1}
|
|
2111
|
+
if (last_obj) {
|
|
2112
|
+
var s = last_obj[last_field]
|
|
2113
|
+
last_obj[last_field] = (s.slice(0, slice_start)
|
|
2114
|
+
+ new_stuff
|
|
2115
|
+
+ s.slice(slice_end))
|
|
2116
|
+
} else
|
|
2117
|
+
return obj.slice(0, slice_start) + new_stuff + obj.slice(slice_end)
|
|
2118
|
+
} else // Array
|
|
2119
|
+
if (slice_start) // - Array splice
|
|
2120
|
+
[].splice.apply(curr_obj, [slice_start, slice_end-slice_start]
|
|
2121
|
+
.concat(new_stuff))
|
|
2122
|
+
else { // - Array set
|
|
2123
|
+
console.assert(slice_end >= 0, 'Index '+subpath+' is too small')
|
|
2124
|
+
console.assert(slice_end <= curr_obj.length - 1,
|
|
2125
|
+
'Index '+subpath+' is too big')
|
|
2126
|
+
curr_obj[slice_end] = new_stuff
|
|
2127
|
+
}
|
|
2128
|
+
|
|
2129
|
+
return obj
|
|
2093
2130
|
}
|
|
2094
2131
|
|
|
2095
|
-
|
|
2132
|
+
// Otherwise, descend down the path
|
|
2133
|
+
console.assert(!slice_start, 'No splices allowed in middle of path')
|
|
2134
|
+
last_obj = curr_obj
|
|
2135
|
+
last_field = field || slice_end
|
|
2136
|
+
curr_obj = curr_obj[last_field]
|
|
2137
|
+
path = path.substr(subpath.length)
|
|
2096
2138
|
}
|
|
2097
|
-
|
|
2098
|
-
// Otherwise, descend down the path
|
|
2099
|
-
console.assert(!slice_start, 'No splices allowed in middle of path')
|
|
2100
|
-
last_obj = curr_obj
|
|
2101
|
-
last_field = field || slice_end
|
|
2102
|
-
curr_obj = curr_obj[last_field]
|
|
2103
|
-
path = path.substr(subpath.length)
|
|
2104
2139
|
}
|
|
2105
|
-
}
|
|
2106
2140
|
|
|
2107
|
-
class RangeSet {
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2141
|
+
class RangeSet {
|
|
2142
|
+
constructor() {
|
|
2143
|
+
this.ranges = []
|
|
2144
|
+
}
|
|
2111
2145
|
|
|
2112
|
-
|
|
2113
|
-
|
|
2146
|
+
add_range(low_inclusive, high_inclusive) {
|
|
2147
|
+
if (low_inclusive > high_inclusive) return
|
|
2114
2148
|
|
|
2115
|
-
|
|
2116
|
-
|
|
2149
|
+
const startIndex = this._bs(mid => this.ranges[mid][1] >= low_inclusive - 1, this.ranges.length, true)
|
|
2150
|
+
const endIndex = this._bs(mid => this.ranges[mid][0] <= high_inclusive + 1, -1, false)
|
|
2117
2151
|
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2123
|
-
|
|
2124
|
-
|
|
2152
|
+
if (startIndex > endIndex) {
|
|
2153
|
+
this.ranges.splice(startIndex, 0, [low_inclusive, high_inclusive])
|
|
2154
|
+
} else {
|
|
2155
|
+
const mergedLow = Math.min(low_inclusive, this.ranges[startIndex][0])
|
|
2156
|
+
const mergedHigh = Math.max(high_inclusive, this.ranges[endIndex][1])
|
|
2157
|
+
const removeCount = endIndex - startIndex + 1
|
|
2158
|
+
this.ranges.splice(startIndex, removeCount, [mergedLow, mergedHigh])
|
|
2159
|
+
}
|
|
2160
|
+
}
|
|
2161
|
+
|
|
2162
|
+
has(x) {
|
|
2163
|
+
var index = this._bs(mid => this.ranges[mid][0] <= x, -1, false)
|
|
2164
|
+
return index !== -1 && x <= this.ranges[index][1]
|
|
2165
|
+
}
|
|
2166
|
+
|
|
2167
|
+
_bs(condition, defaultR, moveLeft) {
|
|
2168
|
+
let low = 0
|
|
2169
|
+
let high = this.ranges.length - 1
|
|
2170
|
+
let result = defaultR
|
|
2171
|
+
|
|
2172
|
+
while (low <= high) {
|
|
2173
|
+
const mid = Math.floor((low + high) / 2)
|
|
2174
|
+
if (condition(mid)) {
|
|
2175
|
+
result = mid
|
|
2176
|
+
if (moveLeft) high = mid - 1
|
|
2177
|
+
else low = mid + 1
|
|
2178
|
+
} else {
|
|
2179
|
+
if (moveLeft) low = mid + 1
|
|
2180
|
+
else high = mid - 1
|
|
2181
|
+
}
|
|
2182
|
+
}
|
|
2183
|
+
return result
|
|
2125
2184
|
}
|
|
2126
2185
|
}
|
|
2127
2186
|
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
return index !== -1 && x <= this.ranges[index][1]
|
|
2187
|
+
function ascii_ify(s) {
|
|
2188
|
+
return s.replace(/[^\x20-\x7E]/g, c => '\\u' + c.charCodeAt(0).toString(16).padStart(4, '0'))
|
|
2131
2189
|
}
|
|
2132
2190
|
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
while (low <= high) {
|
|
2139
|
-
const mid = Math.floor((low + high) / 2)
|
|
2140
|
-
if (condition(mid)) {
|
|
2141
|
-
result = mid
|
|
2142
|
-
if (moveLeft) high = mid - 1
|
|
2143
|
-
else low = mid + 1
|
|
2144
|
-
} else {
|
|
2145
|
-
if (moveLeft) low = mid + 1
|
|
2146
|
-
else high = mid - 1
|
|
2147
|
-
}
|
|
2191
|
+
function sorted_set_find(arr, val) {
|
|
2192
|
+
var left = 0, right = arr.length
|
|
2193
|
+
while (left < right) {
|
|
2194
|
+
var mid = (left + right) >> 1
|
|
2195
|
+
arr[mid] < val ? left = mid + 1 : right = mid
|
|
2148
2196
|
}
|
|
2149
|
-
return
|
|
2197
|
+
return left
|
|
2150
2198
|
}
|
|
2151
|
-
}
|
|
2152
2199
|
|
|
2153
|
-
function
|
|
2154
|
-
|
|
2155
|
-
|
|
2200
|
+
function sorted_set_insert(arr, val) {
|
|
2201
|
+
var i = sorted_set_find(arr, val)
|
|
2202
|
+
if (arr[i] !== val) arr.splice(i, 0, val)
|
|
2203
|
+
}
|
|
2156
2204
|
|
|
2157
|
-
function
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
var mid = (left + right) >> 1
|
|
2161
|
-
arr[mid] < val ? left = mid + 1 : right = mid
|
|
2205
|
+
function sorted_set_delete(arr, val) {
|
|
2206
|
+
var i = sorted_set_find(arr, val)
|
|
2207
|
+
if (arr[i] === val) arr.splice(i, 1)
|
|
2162
2208
|
}
|
|
2163
|
-
return left
|
|
2164
|
-
}
|
|
2165
2209
|
|
|
2166
|
-
function
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
}
|
|
2210
|
+
function get_digest(s) {
|
|
2211
|
+
if (typeof s === 'string') s = Buffer.from(s, "utf8")
|
|
2212
|
+
return `sha-256=:${require('crypto').createHash('sha256').update(s).digest('base64')}:`
|
|
2213
|
+
}
|
|
2170
2214
|
|
|
2171
|
-
|
|
2172
|
-
var i = sorted_set_find(arr, val)
|
|
2173
|
-
if (arr[i] === val) arr.splice(i, 1)
|
|
2174
|
-
}
|
|
2215
|
+
braid_text.get_resource = get_resource
|
|
2175
2216
|
|
|
2176
|
-
|
|
2177
|
-
|
|
2178
|
-
|
|
2179
|
-
}
|
|
2217
|
+
braid_text.encode_filename = encode_filename
|
|
2218
|
+
braid_text.decode_filename = decode_filename
|
|
2219
|
+
braid_text.get_files_for_key = get_files_for_key
|
|
2180
2220
|
|
|
2181
|
-
braid_text.
|
|
2221
|
+
braid_text.dt_get = dt_get
|
|
2222
|
+
braid_text.dt_get_patches = dt_get_patches
|
|
2223
|
+
braid_text.dt_parse = dt_parse
|
|
2224
|
+
braid_text.dt_get_local_version = dt_get_local_version
|
|
2225
|
+
braid_text.dt_create_bytes = dt_create_bytes
|
|
2182
2226
|
|
|
2183
|
-
braid_text.
|
|
2184
|
-
braid_text.
|
|
2185
|
-
braid_text.get_files_for_key = get_files_for_key
|
|
2227
|
+
braid_text.decode_version = decode_version
|
|
2228
|
+
braid_text.RangeSet = RangeSet
|
|
2186
2229
|
|
|
2187
|
-
braid_text.
|
|
2188
|
-
braid_text.dt_get_patches = dt_get_patches
|
|
2189
|
-
braid_text.dt_parse = dt_parse
|
|
2190
|
-
braid_text.dt_get_local_version = dt_get_local_version
|
|
2191
|
-
braid_text.dt_create_bytes = dt_create_bytes
|
|
2230
|
+
braid_text.create_braid_text = create_braid_text
|
|
2192
2231
|
|
|
2193
|
-
braid_text
|
|
2194
|
-
|
|
2232
|
+
return braid_text
|
|
2233
|
+
}
|
|
2195
2234
|
|
|
2196
|
-
module.exports =
|
|
2235
|
+
module.exports = create_braid_text()
|