braid-text 0.2.113 → 0.2.115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +255 -200
- package/package.json +4 -1
package/index.js
CHANGED
|
@@ -9,10 +9,7 @@ function create_braid_text() {
|
|
|
9
9
|
db_folder: './braid-text-db',
|
|
10
10
|
length_cache_size: 10,
|
|
11
11
|
meta_file_save_period_ms: 1000,
|
|
12
|
-
cache: {}
|
|
13
|
-
backups: false,
|
|
14
|
-
backups_folder: './braid-text-backups',
|
|
15
|
-
backups_interval: 60 * 1000
|
|
12
|
+
cache: {}
|
|
16
13
|
}
|
|
17
14
|
|
|
18
15
|
let waiting_puts = 0
|
|
@@ -853,8 +850,9 @@ function create_braid_text() {
|
|
|
853
850
|
return await braid_fetch(key.href, params)
|
|
854
851
|
}
|
|
855
852
|
|
|
856
|
-
|
|
857
|
-
|
|
853
|
+
let resource = (typeof key == 'string') ? await get_resource(key) : key
|
|
854
|
+
|
|
855
|
+
return await within_fiber('put:' + resource.key, async () => {
|
|
858
856
|
|
|
859
857
|
// support for json patch puts..
|
|
860
858
|
if (options.patches && options.patches.length &&
|
|
@@ -865,7 +863,7 @@ function create_braid_text() {
|
|
|
865
863
|
options = { body: JSON.stringify(x, null, 4) }
|
|
866
864
|
}
|
|
867
865
|
|
|
868
|
-
let { version, patches, body, peer } = options
|
|
866
|
+
let { version, parents, patches, body, peer } = options
|
|
869
867
|
|
|
870
868
|
if (options.transfer_encoding === 'dt') {
|
|
871
869
|
var start_i = 1 + resource.doc.getLocalVersion().reduce((a, b) => Math.max(a, b), -1)
|
|
@@ -899,30 +897,24 @@ function create_braid_text() {
|
|
|
899
897
|
if (version && version.length > 1)
|
|
900
898
|
throw new Error(`cannot put a version with multiple ids`)
|
|
901
899
|
|
|
902
|
-
// translate a single parent of "root" to the empty array (same meaning)
|
|
903
|
-
let options_parents = options.parents
|
|
904
|
-
if (options_parents?.length === 1 && options_parents[0] === 'root')
|
|
905
|
-
options_parents = []
|
|
906
|
-
|
|
907
900
|
if (body != null && patches) throw new Error(`cannot have a body and patches`)
|
|
908
901
|
if (body != null && (typeof body !== 'string')) throw new Error(`body must be a string`)
|
|
909
902
|
if (patches) validate_patches(patches)
|
|
910
903
|
|
|
911
|
-
if (
|
|
904
|
+
if (parents) {
|
|
912
905
|
// make sure we have all these parents
|
|
913
|
-
for (let p of
|
|
906
|
+
for (let p of parents) {
|
|
914
907
|
let P = decode_version(p)
|
|
915
908
|
if (!resource.actor_seqs[P[0]]?.has(P[1]))
|
|
916
909
|
throw new Error(`missing parent version: ${p}`)
|
|
917
910
|
}
|
|
918
911
|
}
|
|
919
912
|
|
|
920
|
-
|
|
921
|
-
|
|
913
|
+
if (!parents) parents = resource.version
|
|
914
|
+
|
|
915
|
+
let max_pos = resource.length_cache.get('' + parents) ??
|
|
916
|
+
(v_eq(resource.version, parents) ? resource.doc.len() : dt_len(resource.doc, parents))
|
|
922
917
|
|
|
923
|
-
let max_pos = resource.length_cache.get('' + og_parents) ??
|
|
924
|
-
(v_eq(parents, og_parents) ? resource.doc.len() : dt_len(resource.doc, og_parents))
|
|
925
|
-
|
|
926
918
|
if (body != null) {
|
|
927
919
|
patches = [{
|
|
928
920
|
unit: 'text',
|
|
@@ -931,7 +923,6 @@ function create_braid_text() {
|
|
|
931
923
|
}]
|
|
932
924
|
}
|
|
933
925
|
|
|
934
|
-
let og_patches = patches
|
|
935
926
|
patches = patches.map((p) => ({
|
|
936
927
|
...p,
|
|
937
928
|
range: p.range.match(/-?\d+/g).map((x) => {
|
|
@@ -943,94 +934,66 @@ function create_braid_text() {
|
|
|
943
934
|
content_codepoints: [...p.content],
|
|
944
935
|
})).sort((a, b) => a.range[0] - b.range[0])
|
|
945
936
|
|
|
946
|
-
// validate patch positions
|
|
947
|
-
let must_be_at_least = 0
|
|
948
|
-
for (let p of patches) {
|
|
949
|
-
if (p.range[0] < must_be_at_least || p.range[0] > max_pos) throw new Error(`invalid patch range position: ${p.range[0]}`)
|
|
950
|
-
if (p.range[1] < p.range[0] || p.range[1] > max_pos) throw new Error(`invalid patch range position: ${p.range[1]}`)
|
|
951
|
-
must_be_at_least = p.range[1]
|
|
952
|
-
}
|
|
953
|
-
|
|
954
937
|
let change_count = patches.reduce((a, b) => a + b.content_codepoints.length + (b.range[1] - b.range[0]), 0)
|
|
955
938
|
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
let v = decode_version(og_v)
|
|
939
|
+
version = version?.[0] || `${(is_valid_actor(peer) && peer) || Math.random().toString(36).slice(2, 7)}-${change_count - 1}`
|
|
959
940
|
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
max_pos))
|
|
963
|
-
|
|
964
|
-
// validate version: make sure we haven't seen it already
|
|
965
|
-
if (resource.actor_seqs[v[0]]?.has(v[1])) {
|
|
966
|
-
|
|
967
|
-
if (!options.validate_already_seen_versions) return { change_count }
|
|
968
|
-
|
|
969
|
-
// if we have seen it already, make sure it's the same as before
|
|
970
|
-
let updates = dt_get_patches(resource.doc, og_parents)
|
|
941
|
+
let v = decode_version(version)
|
|
942
|
+
var low_seq = v[1] + 1 - change_count
|
|
971
943
|
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
944
|
+
// make sure we haven't seen this already
|
|
945
|
+
var intersects_range = resource.actor_seqs[v[0]]?.has(low_seq, v[1])
|
|
946
|
+
if (intersects_range) {
|
|
947
|
+
// if low_seq is below the range min,
|
|
948
|
+
// then the intersection has gaps,
|
|
949
|
+
// which is bad, meaning the prior versions must be different,
|
|
950
|
+
// because what we're inserting is contiguous
|
|
951
|
+
if (low_seq < intersects_range[0])
|
|
952
|
+
throw new Error('invalid update: different from previous update with same version')
|
|
975
953
|
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
seen[JSON.stringify([v[0], v[1] - i, ps, u.start + i])] = true
|
|
982
|
-
}
|
|
983
|
-
} else {
|
|
984
|
-
// insert
|
|
985
|
-
let v = u.version
|
|
986
|
-
let content = [...u.content]
|
|
987
|
-
for (let i = 0; i < content.length; i++) {
|
|
988
|
-
let ps = (i > 0) ? [`${v[0]}-${v[1] - content.length + i}`] : u.parents
|
|
989
|
-
seen[JSON.stringify([v[0], v[1] + 1 - content.length + i, ps, u.start + i, content[i]])] = true
|
|
990
|
-
}
|
|
991
|
-
}
|
|
954
|
+
// see if we only have *some* of the versions
|
|
955
|
+
var new_count = v[1] - intersects_range[1]
|
|
956
|
+
if (new_count > 0) {
|
|
957
|
+
// divide the patches between old and new..
|
|
958
|
+
var new_patches = split_patches(patches, change_count - new_count)
|
|
992
959
|
}
|
|
993
960
|
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
let offset = 0
|
|
997
|
-
for (let p of patches) {
|
|
998
|
-
// delete
|
|
999
|
-
for (let i = p.range[0]; i < p.range[1]; i++) {
|
|
1000
|
-
let vv = decode_version(v)
|
|
961
|
+
if (options.validate_already_seen_versions)
|
|
962
|
+
validate_old_patches(resource, `${v[0]}-${low_seq}`, parents, patches)
|
|
1001
963
|
|
|
1002
|
-
|
|
964
|
+
if (new_count <= 0) return { change_count }
|
|
1003
965
|
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
let vv = decode_version(v)
|
|
1012
|
-
let c = p.content_codepoints[i]
|
|
966
|
+
change_count = new_count
|
|
967
|
+
low_seq = v[1] + 1 - change_count
|
|
968
|
+
parents = [`${v[0]}-${low_seq - 1}`]
|
|
969
|
+
max_pos = resource.length_cache.get('' + parents) ??
|
|
970
|
+
(v_eq(resource.version, parents) ? resource.doc.len() : dt_len(resource.doc, parents))
|
|
971
|
+
patches = new_patches
|
|
972
|
+
}
|
|
1013
973
|
|
|
1014
|
-
|
|
974
|
+
// validate patch positions
|
|
975
|
+
let must_be_at_least = 0
|
|
976
|
+
for (let p of patches) {
|
|
977
|
+
if (p.range[0] < must_be_at_least || p.range[0] > max_pos)
|
|
978
|
+
throw new Error(`invalid patch range position: ${p.range[0]}`)
|
|
979
|
+
if (p.range[1] < p.range[0] || p.range[1] > max_pos)
|
|
980
|
+
throw new Error(`invalid patch range position: ${p.range[1]}`)
|
|
981
|
+
must_be_at_least = p.range[1]
|
|
982
|
+
}
|
|
1015
983
|
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
v = `${v[0]}-${v[1] + 1}`
|
|
1020
|
-
}
|
|
1021
|
-
}
|
|
984
|
+
resource.length_cache.put(`${v[0]}-${v[1]}`, patches.reduce((a, b) =>
|
|
985
|
+
a + (b.content_codepoints?.length ?? 0) - (b.range[1] - b.range[0]),
|
|
986
|
+
max_pos))
|
|
1022
987
|
|
|
1023
|
-
// we already have this version, so nothing left to do
|
|
1024
|
-
return { change_count: change_count }
|
|
1025
|
-
}
|
|
1026
988
|
if (!resource.actor_seqs[v[0]]) resource.actor_seqs[v[0]] = new RangeSet()
|
|
1027
|
-
resource.actor_seqs[v[0]].add_range(
|
|
989
|
+
resource.actor_seqs[v[0]].add_range(low_seq, v[1])
|
|
1028
990
|
|
|
1029
|
-
//
|
|
1030
|
-
v = `${v[0]}-${
|
|
991
|
+
// get the version of the first character-wise edit
|
|
992
|
+
v = `${v[0]}-${low_seq}`
|
|
1031
993
|
|
|
1032
|
-
let ps =
|
|
994
|
+
let ps = parents
|
|
1033
995
|
|
|
996
|
+
let version_before = resource.version
|
|
1034
997
|
let v_before = resource.doc.getLocalVersion()
|
|
1035
998
|
|
|
1036
999
|
let bytes = []
|
|
@@ -1063,14 +1026,12 @@ function create_braid_text() {
|
|
|
1063
1026
|
var post_commit_updates = []
|
|
1064
1027
|
|
|
1065
1028
|
if (options.merge_type != "dt") {
|
|
1066
|
-
patches = get_xf_patches(resource.doc, v_before)
|
|
1029
|
+
let patches = get_xf_patches(resource.doc, v_before)
|
|
1067
1030
|
if (braid_text.verbose) console.log(JSON.stringify({ patches }))
|
|
1068
1031
|
|
|
1069
|
-
let version = resource.version
|
|
1070
|
-
|
|
1071
1032
|
for (let client of resource.simpleton_clients) {
|
|
1072
1033
|
if (peer && client.peer === peer) {
|
|
1073
|
-
client.my_last_seen_version = [
|
|
1034
|
+
client.my_last_seen_version = [version]
|
|
1074
1035
|
}
|
|
1075
1036
|
|
|
1076
1037
|
function set_timeout(time_override) {
|
|
@@ -1079,10 +1040,10 @@ function create_braid_text() {
|
|
|
1079
1040
|
// if the doc has been freed, exit early
|
|
1080
1041
|
if (resource.doc.__wbg_ptr === 0) return
|
|
1081
1042
|
|
|
1082
|
-
let
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1043
|
+
let x = {
|
|
1044
|
+
version: resource.version,
|
|
1045
|
+
parents: client.my_last_seen_version
|
|
1046
|
+
}
|
|
1086
1047
|
if (braid_text.verbose) console.log("rebasing after timeout.. ")
|
|
1087
1048
|
if (braid_text.verbose) console.log(" client.my_unused_version_count = " + client.my_unused_version_count)
|
|
1088
1049
|
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, client.my_last_seen_version))
|
|
@@ -1097,7 +1058,7 @@ function create_braid_text() {
|
|
|
1097
1058
|
|
|
1098
1059
|
if (client.my_timeout) {
|
|
1099
1060
|
if (peer && client.peer === peer) {
|
|
1100
|
-
if (!v_eq(client.my_last_sent_version,
|
|
1061
|
+
if (!v_eq(client.my_last_sent_version, parents)) {
|
|
1101
1062
|
// note: we don't add to client.my_unused_version_count,
|
|
1102
1063
|
// because we're already in a timeout;
|
|
1103
1064
|
// we'll just extend it here..
|
|
@@ -1111,9 +1072,9 @@ function create_braid_text() {
|
|
|
1111
1072
|
continue
|
|
1112
1073
|
}
|
|
1113
1074
|
|
|
1114
|
-
let x = { version }
|
|
1075
|
+
let x = { version: resource.version }
|
|
1115
1076
|
if (peer && client.peer === peer) {
|
|
1116
|
-
if (!v_eq(client.my_last_sent_version,
|
|
1077
|
+
if (!v_eq(client.my_last_sent_version, parents)) {
|
|
1117
1078
|
client.my_unused_version_count = (client.my_unused_version_count ?? 0) + 1
|
|
1118
1079
|
set_timeout()
|
|
1119
1080
|
continue
|
|
@@ -1121,10 +1082,10 @@ function create_braid_text() {
|
|
|
1121
1082
|
delete client.my_unused_version_count
|
|
1122
1083
|
}
|
|
1123
1084
|
|
|
1124
|
-
x.parents =
|
|
1125
|
-
if (!v_eq(version,
|
|
1085
|
+
x.parents = [version]
|
|
1086
|
+
if (!v_eq(x.version, x.parents)) {
|
|
1126
1087
|
if (braid_text.verbose) console.log("rebasing..")
|
|
1127
|
-
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc,
|
|
1088
|
+
x.patches = get_xf_patches(resource.doc, OpLog_remote_to_local(resource.doc, x.parents))
|
|
1128
1089
|
} else {
|
|
1129
1090
|
// this client already has this version,
|
|
1130
1091
|
// so let's pretend to send it back, but not
|
|
@@ -1133,7 +1094,7 @@ function create_braid_text() {
|
|
|
1133
1094
|
continue
|
|
1134
1095
|
}
|
|
1135
1096
|
} else {
|
|
1136
|
-
x.parents =
|
|
1097
|
+
x.parents = version_before
|
|
1137
1098
|
x.patches = patches
|
|
1138
1099
|
}
|
|
1139
1100
|
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
@@ -1142,9 +1103,11 @@ function create_braid_text() {
|
|
|
1142
1103
|
}
|
|
1143
1104
|
} else {
|
|
1144
1105
|
if (resource.simpleton_clients.size) {
|
|
1145
|
-
let
|
|
1146
|
-
|
|
1147
|
-
|
|
1106
|
+
let x = {
|
|
1107
|
+
version: resource.version,
|
|
1108
|
+
parents: version_before,
|
|
1109
|
+
patches: get_xf_patches(resource.doc, v_before)
|
|
1110
|
+
}
|
|
1148
1111
|
if (braid_text.verbose) console.log(`sending: ${JSON.stringify(x)}`)
|
|
1149
1112
|
for (let client of resource.simpleton_clients) {
|
|
1150
1113
|
if (client.my_timeout) continue
|
|
@@ -1155,9 +1118,13 @@ function create_braid_text() {
|
|
|
1155
1118
|
}
|
|
1156
1119
|
|
|
1157
1120
|
var x = {
|
|
1158
|
-
version: [
|
|
1159
|
-
parents
|
|
1160
|
-
patches:
|
|
1121
|
+
version: [version],
|
|
1122
|
+
parents,
|
|
1123
|
+
patches: patches.map(p => ({
|
|
1124
|
+
unit: p.unit,
|
|
1125
|
+
range: `[${p.range.join(':')}]`,
|
|
1126
|
+
content: p.content
|
|
1127
|
+
})),
|
|
1161
1128
|
}
|
|
1162
1129
|
for (let client of resource.clients) {
|
|
1163
1130
|
if (!peer || client.peer !== peer)
|
|
@@ -1302,92 +1269,10 @@ function create_braid_text() {
|
|
|
1302
1269
|
var files = (await fs.promises.readdir(braid_text.db_folder))
|
|
1303
1270
|
.filter(x => /\.\d+$/.test(x))
|
|
1304
1271
|
init_filename_mapping(files)
|
|
1305
|
-
|
|
1306
|
-
// Start backups if enabled
|
|
1307
|
-
if (braid_text.backups) backup_init()
|
|
1308
1272
|
})()
|
|
1309
1273
|
await db_folder_init.p
|
|
1310
1274
|
}
|
|
1311
1275
|
|
|
1312
|
-
function backup_init() {
|
|
1313
|
-
if (backup_init.started) return
|
|
1314
|
-
backup_init.started = true
|
|
1315
|
-
|
|
1316
|
-
async function backup_braid_text_db() {
|
|
1317
|
-
var path = require('path')
|
|
1318
|
-
var src_dir = braid_text.db_folder
|
|
1319
|
-
var backup_dir = braid_text.backups_folder
|
|
1320
|
-
|
|
1321
|
-
// Create backup dir if it doesn't exist
|
|
1322
|
-
await fs.promises.mkdir(backup_dir, { recursive: true })
|
|
1323
|
-
|
|
1324
|
-
// Get current date string
|
|
1325
|
-
var d = new Date()
|
|
1326
|
-
var y = d.getYear() + 1900
|
|
1327
|
-
var m = d.getMonth() + 1
|
|
1328
|
-
if (m < 10) m = '0' + m
|
|
1329
|
-
var day = d.getDate()
|
|
1330
|
-
if (day < 10) day = '0' + day
|
|
1331
|
-
var date = y + '-' + m + '-' + day
|
|
1332
|
-
|
|
1333
|
-
// Read files in src_dir (non-recursive)
|
|
1334
|
-
var files
|
|
1335
|
-
try {
|
|
1336
|
-
files = await fs.promises.readdir(src_dir)
|
|
1337
|
-
} catch (e) { return }
|
|
1338
|
-
|
|
1339
|
-
for (var file of files) {
|
|
1340
|
-
// Only process files ending with .N where N is an integer
|
|
1341
|
-
var match = file.match(/^(.+)\.(\d+)$/)
|
|
1342
|
-
if (!match) continue
|
|
1343
|
-
|
|
1344
|
-
var base_name = match[1]
|
|
1345
|
-
var src_path = path.join(src_dir, file)
|
|
1346
|
-
|
|
1347
|
-
// Get source file stats
|
|
1348
|
-
var src_stat
|
|
1349
|
-
try {
|
|
1350
|
-
src_stat = await fs.promises.stat(src_path)
|
|
1351
|
-
} catch (e) { continue }
|
|
1352
|
-
|
|
1353
|
-
// Skip directories
|
|
1354
|
-
if (!src_stat.isFile()) continue
|
|
1355
|
-
|
|
1356
|
-
// Create backup subdir for this base_name if needed
|
|
1357
|
-
var file_backup_dir = path.join(backup_dir, base_name)
|
|
1358
|
-
await fs.promises.mkdir(file_backup_dir, { recursive: true })
|
|
1359
|
-
|
|
1360
|
-
// Check if we need to backup (compare mtime with most recent backup)
|
|
1361
|
-
var backup_path = path.join(file_backup_dir, date)
|
|
1362
|
-
var needs_backup = true
|
|
1363
|
-
|
|
1364
|
-
try {
|
|
1365
|
-
// Find the latest backup file (sorted by date name)
|
|
1366
|
-
var backups = await fs.promises.readdir(file_backup_dir)
|
|
1367
|
-
if (backups.length > 0) {
|
|
1368
|
-
backups.sort()
|
|
1369
|
-
var latest_backup = backups[backups.length - 1]
|
|
1370
|
-
var latest_backup_path = path.join(file_backup_dir, latest_backup)
|
|
1371
|
-
var backup_stat = await fs.promises.stat(latest_backup_path)
|
|
1372
|
-
// Only backup if source is newer than latest backup
|
|
1373
|
-
if (src_stat.mtimeMs <= backup_stat.mtimeMs)
|
|
1374
|
-
needs_backup = false
|
|
1375
|
-
}
|
|
1376
|
-
} catch (e) {
|
|
1377
|
-
// Backup dir empty or doesn't exist, so we need to create backup
|
|
1378
|
-
}
|
|
1379
|
-
|
|
1380
|
-
if (needs_backup) {
|
|
1381
|
-
require('child_process').execFile(
|
|
1382
|
-
'/bin/cp', [src_path, backup_path])
|
|
1383
|
-
}
|
|
1384
|
-
}
|
|
1385
|
-
}
|
|
1386
|
-
|
|
1387
|
-
backup_braid_text_db()
|
|
1388
|
-
setInterval(backup_braid_text_db, braid_text.backups_interval)
|
|
1389
|
-
}
|
|
1390
|
-
|
|
1391
1276
|
async function get_files_for_key(key) {
|
|
1392
1277
|
await db_folder_init()
|
|
1393
1278
|
try {
|
|
@@ -1625,6 +1510,62 @@ function create_braid_text() {
|
|
|
1625
1510
|
if (!seqs.length) delete ns.actor_seqs[actor]
|
|
1626
1511
|
}
|
|
1627
1512
|
|
|
1513
|
+
function validate_old_patches(resource, base_v, parents, patches) {
|
|
1514
|
+
// if we have seen it already, make sure it's the same as before
|
|
1515
|
+
let updates = dt_get_patches(resource.doc, parents)
|
|
1516
|
+
|
|
1517
|
+
let seen = {}
|
|
1518
|
+
for (let u of updates) {
|
|
1519
|
+
u.version = decode_version(u.version)
|
|
1520
|
+
|
|
1521
|
+
if (!u.content) {
|
|
1522
|
+
// delete
|
|
1523
|
+
let v = u.version
|
|
1524
|
+
for (let i = 0; i < u.end - u.start; i++) {
|
|
1525
|
+
let ps = (i < u.end - u.start - 1) ? [`${v[0]}-${v[1] - i - 1}`] : u.parents
|
|
1526
|
+
seen[JSON.stringify([v[0], v[1] - i, ps, u.start + i])] = true
|
|
1527
|
+
}
|
|
1528
|
+
} else {
|
|
1529
|
+
// insert
|
|
1530
|
+
let v = u.version
|
|
1531
|
+
let content = [...u.content]
|
|
1532
|
+
for (let i = 0; i < content.length; i++) {
|
|
1533
|
+
let ps = (i > 0) ? [`${v[0]}-${v[1] - content.length + i}`] : u.parents
|
|
1534
|
+
seen[JSON.stringify([v[0], v[1] + 1 - content.length + i, ps, u.start + i, content[i]])] = true
|
|
1535
|
+
}
|
|
1536
|
+
}
|
|
1537
|
+
}
|
|
1538
|
+
|
|
1539
|
+
let v = base_v
|
|
1540
|
+
let ps = parents
|
|
1541
|
+
let offset = 0
|
|
1542
|
+
for (let p of patches) {
|
|
1543
|
+
// delete
|
|
1544
|
+
for (let i = p.range[0]; i < p.range[1]; i++) {
|
|
1545
|
+
let vv = decode_version(v)
|
|
1546
|
+
|
|
1547
|
+
if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] - 1 + offset])]) throw new Error('invalid update: different from previous update with same version')
|
|
1548
|
+
|
|
1549
|
+
offset--
|
|
1550
|
+
ps = [v]
|
|
1551
|
+
v = vv
|
|
1552
|
+
v = `${v[0]}-${v[1] + 1}`
|
|
1553
|
+
}
|
|
1554
|
+
// insert
|
|
1555
|
+
for (let i = 0; i < p.content_codepoints?.length ?? 0; i++) {
|
|
1556
|
+
let vv = decode_version(v)
|
|
1557
|
+
let c = p.content_codepoints[i]
|
|
1558
|
+
|
|
1559
|
+
if (!seen[JSON.stringify([vv[0], vv[1], ps, p.range[1] + offset, c])]) throw new Error('invalid update: different from previous update with same version')
|
|
1560
|
+
|
|
1561
|
+
offset++
|
|
1562
|
+
ps = [v]
|
|
1563
|
+
v = vv
|
|
1564
|
+
v = `${v[0]}-${v[1] + 1}`
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
|
|
1628
1569
|
//////////////////////////////////////////////////////////////////
|
|
1629
1570
|
//////////////////////////////////////////////////////////////////
|
|
1630
1571
|
//////////////////////////////////////////////////////////////////
|
|
@@ -2628,6 +2569,120 @@ function create_braid_text() {
|
|
|
2628
2569
|
if (typeof x.content !== 'string') throw new Error(`invalid patch content: must be a string`)
|
|
2629
2570
|
}
|
|
2630
2571
|
|
|
2572
|
+
// Splits an array of patches at a given character position within the
|
|
2573
|
+
// combined delete+insert sequence.
|
|
2574
|
+
//
|
|
2575
|
+
// Patches are objects with:
|
|
2576
|
+
// - unit: string (e.g., 'text')
|
|
2577
|
+
// - range: [start, end] - character positions for deletion
|
|
2578
|
+
// - content: string - the content to insert
|
|
2579
|
+
// - content_codepoints: array of single characters
|
|
2580
|
+
//
|
|
2581
|
+
// Each patch represents a "replace" operation: delete then insert.
|
|
2582
|
+
// The combined sequence for patches is:
|
|
2583
|
+
// del(patch1), ins(patch1), del(patch2), ins(patch2), ...
|
|
2584
|
+
//
|
|
2585
|
+
// The split_point is an index into this combined sequence.
|
|
2586
|
+
//
|
|
2587
|
+
// Example: patches with del(3),ins(4),del(2),ins(5)
|
|
2588
|
+
// - split_point 1 falls in first del(3)
|
|
2589
|
+
// - split_point 5 falls in first ins(4) (positions 3-6)
|
|
2590
|
+
// - split_point 7 falls in second del(2) (positions 7-8)
|
|
2591
|
+
//
|
|
2592
|
+
// First patches: operations up to split_point
|
|
2593
|
+
// Second patches: operations from split_point onward (ranges adjusted)
|
|
2594
|
+
function split_patches(patches, split_point) {
|
|
2595
|
+
let second_patches = []
|
|
2596
|
+
|
|
2597
|
+
let position = 0 // current position in the combined sequence
|
|
2598
|
+
let adjustment = 0 // how much to adjust second patches' ranges
|
|
2599
|
+
let first_len = 0 // how many patches stay in first (modified in place)
|
|
2600
|
+
|
|
2601
|
+
for (let i = 0; i < patches.length; i++) {
|
|
2602
|
+
let p = patches[i]
|
|
2603
|
+
let delete_length = p.range[1] - p.range[0]
|
|
2604
|
+
let insert_length = p.content_codepoints.length
|
|
2605
|
+
|
|
2606
|
+
let del_start = position
|
|
2607
|
+
let del_end = position + delete_length
|
|
2608
|
+
let ins_start = del_end
|
|
2609
|
+
let ins_end = ins_start + insert_length
|
|
2610
|
+
|
|
2611
|
+
if (split_point >= ins_end) {
|
|
2612
|
+
// Entire patch is before split point - stays in first (unchanged)
|
|
2613
|
+
first_len++
|
|
2614
|
+
// Adjustment: this patch removes delete_length and adds insert_length
|
|
2615
|
+
adjustment += insert_length - delete_length
|
|
2616
|
+
} else if (split_point <= del_start) {
|
|
2617
|
+
// Entire patch is after split point - goes to second (adjusted)
|
|
2618
|
+
second_patches.push({
|
|
2619
|
+
unit: p.unit,
|
|
2620
|
+
range: [p.range[0] + adjustment, p.range[1] + adjustment],
|
|
2621
|
+
content: p.content,
|
|
2622
|
+
content_codepoints: p.content_codepoints
|
|
2623
|
+
})
|
|
2624
|
+
} else if (split_point <= del_end) {
|
|
2625
|
+
// Split point is within the delete portion
|
|
2626
|
+
let del_chars_before = split_point - del_start
|
|
2627
|
+
|
|
2628
|
+
// Save original values before modifying
|
|
2629
|
+
let original_range_end = p.range[1]
|
|
2630
|
+
let original_content = p.content
|
|
2631
|
+
let original_content_codepoints = p.content_codepoints
|
|
2632
|
+
|
|
2633
|
+
// First patches: partial delete, no insert (modify in place)
|
|
2634
|
+
p.range[1] = p.range[0] + del_chars_before
|
|
2635
|
+
p.content = ''
|
|
2636
|
+
p.content_codepoints = []
|
|
2637
|
+
first_len++
|
|
2638
|
+
|
|
2639
|
+
// Adjustment from partial delete
|
|
2640
|
+
adjustment -= del_chars_before
|
|
2641
|
+
|
|
2642
|
+
// Second patches: remaining delete + full insert (adjusted)
|
|
2643
|
+
second_patches.push({
|
|
2644
|
+
unit: p.unit,
|
|
2645
|
+
range: [p.range[1] + adjustment, original_range_end + adjustment],
|
|
2646
|
+
content: original_content,
|
|
2647
|
+
content_codepoints: original_content_codepoints
|
|
2648
|
+
})
|
|
2649
|
+
} else {
|
|
2650
|
+
// Split point is within the insert portion (split_point > del_end && split_point < ins_end)
|
|
2651
|
+
let ins_chars_before = split_point - ins_start
|
|
2652
|
+
let original_content_codepoints = p.content_codepoints
|
|
2653
|
+
|
|
2654
|
+
// First patches: full delete + partial insert (modify in place)
|
|
2655
|
+
p.content_codepoints = p.content_codepoints.slice(0, ins_chars_before)
|
|
2656
|
+
p.content = p.content_codepoints.join('')
|
|
2657
|
+
first_len++
|
|
2658
|
+
|
|
2659
|
+
// After first patches applied, the position for remaining insert is:
|
|
2660
|
+
// p.range[0] (original position)
|
|
2661
|
+
// + adjustment (net change from all prior first_patches)
|
|
2662
|
+
// + ins_chars_before (what this patch's first part inserted)
|
|
2663
|
+
let adjusted_pos = p.range[0] + adjustment + ins_chars_before
|
|
2664
|
+
|
|
2665
|
+
let content_codepoints = original_content_codepoints.slice(ins_chars_before)
|
|
2666
|
+
second_patches.push({
|
|
2667
|
+
unit: p.unit,
|
|
2668
|
+
range: [adjusted_pos, adjusted_pos],
|
|
2669
|
+
content: content_codepoints.join(''),
|
|
2670
|
+
content_codepoints
|
|
2671
|
+
})
|
|
2672
|
+
|
|
2673
|
+
// Update adjustment: full delete removed, partial insert added
|
|
2674
|
+
adjustment += ins_chars_before - delete_length
|
|
2675
|
+
}
|
|
2676
|
+
|
|
2677
|
+
position = ins_end
|
|
2678
|
+
}
|
|
2679
|
+
|
|
2680
|
+
// Truncate patches array to only contain first_patches
|
|
2681
|
+
patches.length = first_len
|
|
2682
|
+
|
|
2683
|
+
return second_patches
|
|
2684
|
+
}
|
|
2685
|
+
|
|
2631
2686
|
function createSimpleCache(size) {
|
|
2632
2687
|
const maxSize = size
|
|
2633
2688
|
const cache = new Map()
|
|
@@ -2746,7 +2801,7 @@ function create_braid_text() {
|
|
|
2746
2801
|
}
|
|
2747
2802
|
|
|
2748
2803
|
add_range(low_inclusive, high_inclusive) {
|
|
2749
|
-
if (low_inclusive > high_inclusive)
|
|
2804
|
+
if (low_inclusive > high_inclusive) throw new Error('invalid range')
|
|
2750
2805
|
|
|
2751
2806
|
const startIndex = this._bs(mid => this.ranges[mid][1] >= low_inclusive - 1, this.ranges.length, true)
|
|
2752
2807
|
const endIndex = this._bs(mid => this.ranges[mid][0] <= high_inclusive + 1, -1, false)
|
|
@@ -2761,9 +2816,10 @@ function create_braid_text() {
|
|
|
2761
2816
|
}
|
|
2762
2817
|
}
|
|
2763
2818
|
|
|
2764
|
-
has(x) {
|
|
2765
|
-
|
|
2766
|
-
|
|
2819
|
+
has(x, high) {
|
|
2820
|
+
if (high === undefined) high = x
|
|
2821
|
+
var index = this._bs(mid => this.ranges[mid][0] <= high, -1, false)
|
|
2822
|
+
return index !== -1 && x <= this.ranges[index][1] && this.ranges[index]
|
|
2767
2823
|
}
|
|
2768
2824
|
|
|
2769
2825
|
_bs(condition, defaultR, moveLeft) {
|
|
@@ -2937,7 +2993,6 @@ function create_braid_text() {
|
|
|
2937
2993
|
braid_text.get_resource = get_resource
|
|
2938
2994
|
|
|
2939
2995
|
braid_text.db_folder_init = db_folder_init
|
|
2940
|
-
braid_text.backup_init = backup_init
|
|
2941
2996
|
braid_text.encode_filename = encode_filename
|
|
2942
2997
|
braid_text.decode_filename = decode_filename
|
|
2943
2998
|
|
package/package.json
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "braid-text",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.115",
|
|
4
4
|
"description": "Library for collaborative text over http using braid.",
|
|
5
5
|
"author": "Braid Working Group",
|
|
6
6
|
"repository": "braid-org/braid-text",
|
|
7
7
|
"homepage": "https://braid.org",
|
|
8
|
+
"scripts": {
|
|
9
|
+
"test": "node test/test.js"
|
|
10
|
+
},
|
|
8
11
|
"files": [
|
|
9
12
|
"index.js",
|
|
10
13
|
"simpleton-client.js",
|