@dabble/patches 0.5.2 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/algorithms/client/makeChange.js +2 -2
- package/dist/algorithms/server/commitChanges.d.ts +4 -3
- package/dist/algorithms/server/commitChanges.js +25 -6
- package/dist/algorithms/server/handleOfflineSessionsAndBatches.d.ts +5 -2
- package/dist/algorithms/server/handleOfflineSessionsAndBatches.js +16 -8
- package/dist/algorithms/server/transformIncomingChanges.d.ts +2 -1
- package/dist/algorithms/server/transformIncomingChanges.js +11 -9
- package/dist/algorithms/shared/changeBatching.d.ts +19 -0
- package/dist/algorithms/{client/breakChange.js → shared/changeBatching.js} +56 -3
- package/dist/index.d.ts +1 -1
- package/dist/net/PatchesClient.d.ts +3 -2
- package/dist/net/PatchesClient.js +3 -2
- package/dist/net/PatchesSync.js +2 -2
- package/dist/net/index.d.ts +5 -5
- package/dist/net/protocol/types.d.ts +3 -3
- package/dist/net/websocket/RPCServer.d.ts +3 -1
- package/dist/net/websocket/RPCServer.js +3 -2
- package/dist/net/websocket/SignalingService.d.ts +21 -21
- package/dist/net/websocket/SignalingService.js +43 -39
- package/dist/server/PatchesBranchManager.d.ts +2 -1
- package/dist/server/PatchesBranchManager.js +31 -9
- package/dist/server/PatchesServer.d.ts +12 -4
- package/dist/server/PatchesServer.js +8 -3
- package/dist/server/index.d.ts +2 -2
- package/dist/server/types.d.ts +5 -0
- package/dist/types.d.ts +23 -4
- package/dist/utils/dates.js +2 -0
- package/package.json +1 -1
- package/dist/algorithms/client/batching.d.ts +0 -9
- package/dist/algorithms/client/batching.js +0 -42
- package/dist/algorithms/client/breakChange.d.ts +0 -15
- package/dist/algorithms/client/getJSONByteSize.d.ts +0 -4
- package/dist/algorithms/client/getJSONByteSize.js +0 -13
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
2
|
import { createChange } from "../../data/change.js";
|
|
3
3
|
import { createJSONPatch } from "../../json-patch/createJSONPatch.js";
|
|
4
|
-
import {
|
|
4
|
+
import { breakChanges } from "../shared/changeBatching.js";
|
|
5
5
|
import { createStateFromSnapshot } from "./createStateFromSnapshot.js";
|
|
6
6
|
function makeChange(snapshot, mutator, changeMetadata, maxPayloadBytes) {
|
|
7
7
|
const pendingChanges = snapshot.changes;
|
|
@@ -20,7 +20,7 @@ function makeChange(snapshot, mutator, changeMetadata, maxPayloadBytes) {
|
|
|
20
20
|
throw new Error(`Failed to apply change to state during makeChange: ${error}`);
|
|
21
21
|
}
|
|
22
22
|
if (maxPayloadBytes) {
|
|
23
|
-
newChangesArray =
|
|
23
|
+
newChangesArray = breakChanges(newChangesArray, maxPayloadBytes);
|
|
24
24
|
}
|
|
25
25
|
return newChangesArray;
|
|
26
26
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { PatchesStoreBackend } from '../../server/types.js';
|
|
2
|
-
import { ChangeInput, Change } from '../../types.js';
|
|
2
|
+
import { ChangeInput, CommitChangesOptions, Change } from '../../types.js';
|
|
3
3
|
import '../../json-patch/JSONPatch.js';
|
|
4
4
|
import '@dabble/delta';
|
|
5
5
|
import '../../json-patch/types.js';
|
|
@@ -9,10 +9,11 @@ import '../../json-patch/types.js';
|
|
|
9
9
|
* @param docId - The ID of the document.
|
|
10
10
|
* @param changes - The changes to commit.
|
|
11
11
|
* @param originClientId - The ID of the client that initiated the commit.
|
|
12
|
+
* @param options - Optional commit settings.
|
|
12
13
|
* @returns A tuple of [committedChanges, transformedChanges] where:
|
|
13
14
|
* - committedChanges: Changes that were already committed to the server after the client's base revision
|
|
14
15
|
* - transformedChanges: The client's changes after being transformed against concurrent changes
|
|
15
16
|
*/
|
|
16
|
-
declare function commitChanges(store: PatchesStoreBackend, docId: string, changes: ChangeInput[], sessionTimeoutMillis: number): Promise<[Change[], Change[]]>;
|
|
17
|
+
declare function commitChanges(store: PatchesStoreBackend, docId: string, changes: ChangeInput[], sessionTimeoutMillis: number, options?: CommitChangesOptions, maxPayloadBytes?: number): Promise<[Change[], Change[]]>;
|
|
17
18
|
|
|
18
|
-
export { commitChanges };
|
|
19
|
+
export { CommitChangesOptions, commitChanges };
|
|
@@ -6,7 +6,7 @@ import { getSnapshotAtRevision } from "./getSnapshotAtRevision.js";
|
|
|
6
6
|
import { getStateAtRevision } from "./getStateAtRevision.js";
|
|
7
7
|
import { handleOfflineSessionsAndBatches } from "./handleOfflineSessionsAndBatches.js";
|
|
8
8
|
import { transformIncomingChanges } from "./transformIncomingChanges.js";
|
|
9
|
-
async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
9
|
+
async function commitChanges(store, docId, changes, sessionTimeoutMillis, options, maxPayloadBytes) {
|
|
10
10
|
if (changes.length === 0) {
|
|
11
11
|
return [[], []];
|
|
12
12
|
}
|
|
@@ -24,8 +24,10 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
|
24
24
|
}
|
|
25
25
|
if (c.rev == null) c.rev = rev++;
|
|
26
26
|
else rev = c.rev + 1;
|
|
27
|
-
c.committedAt
|
|
28
|
-
|
|
27
|
+
if (!options?.historicalImport || !c.committedAt) {
|
|
28
|
+
c.committedAt = serverNow;
|
|
29
|
+
}
|
|
30
|
+
c.createdAt = c.createdAt ? clampTimestamp(c.createdAt, serverNow) : serverNow;
|
|
29
31
|
});
|
|
30
32
|
if (baseRev > currentRev) {
|
|
31
33
|
throw new Error(
|
|
@@ -39,7 +41,8 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
|
39
41
|
);
|
|
40
42
|
}
|
|
41
43
|
const lastChange = currentChanges[currentChanges.length - 1];
|
|
42
|
-
|
|
44
|
+
const compareTime = options?.historicalImport ? changes[0].createdAt : serverNow;
|
|
45
|
+
if (lastChange && timestampDiff(compareTime, lastChange.createdAt) > sessionTimeoutMillis) {
|
|
43
46
|
await createVersion(store, docId, currentState, currentChanges);
|
|
44
47
|
}
|
|
45
48
|
const committedChanges = await store.listChanges(docId, {
|
|
@@ -53,17 +56,33 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
|
53
56
|
}
|
|
54
57
|
const isOfflineTimestamp = timestampDiff(serverNow, incomingChanges[0].createdAt) > sessionTimeoutMillis;
|
|
55
58
|
if (isOfflineTimestamp || batchId) {
|
|
59
|
+
const canFastForward = committedChanges.length === 0;
|
|
60
|
+
const origin = options?.historicalImport ? "main" : canFastForward ? "main" : "offline-branch";
|
|
56
61
|
incomingChanges = await handleOfflineSessionsAndBatches(
|
|
57
62
|
store,
|
|
58
63
|
sessionTimeoutMillis,
|
|
59
64
|
docId,
|
|
60
65
|
incomingChanges,
|
|
61
66
|
baseRev,
|
|
62
|
-
batchId
|
|
67
|
+
batchId,
|
|
68
|
+
origin,
|
|
69
|
+
true,
|
|
70
|
+
// isOffline
|
|
71
|
+
maxPayloadBytes
|
|
63
72
|
);
|
|
73
|
+
if (canFastForward) {
|
|
74
|
+
await store.saveChanges(docId, incomingChanges);
|
|
75
|
+
return [[], incomingChanges];
|
|
76
|
+
}
|
|
64
77
|
}
|
|
65
78
|
const stateAtBaseRev = (await getStateAtRevision(store, docId, baseRev)).state;
|
|
66
|
-
const transformedChanges = transformIncomingChanges(
|
|
79
|
+
const transformedChanges = transformIncomingChanges(
|
|
80
|
+
incomingChanges,
|
|
81
|
+
stateAtBaseRev,
|
|
82
|
+
committedChanges,
|
|
83
|
+
currentRev,
|
|
84
|
+
options?.forceCommit
|
|
85
|
+
);
|
|
67
86
|
if (transformedChanges.length > 0) {
|
|
68
87
|
await store.saveChanges(docId, transformedChanges);
|
|
69
88
|
}
|
|
@@ -11,8 +11,11 @@ import '../../json-patch/types.js';
|
|
|
11
11
|
* @param changes The incoming changes (all with the same batchId)
|
|
12
12
|
* @param baseRev The base revision for the batch
|
|
13
13
|
* @param batchId The batch identifier
|
|
14
|
-
* @
|
|
14
|
+
* @param origin The origin to use for created versions (default: 'offline-branch')
|
|
15
|
+
* @param isOffline Whether these changes were created offline (metadata flag)
|
|
16
|
+
* @param maxPayloadBytes If set, break collapsed changes that exceed this size
|
|
17
|
+
* @returns The changes (collapsed into one if divergent, unchanged if fast-forward)
|
|
15
18
|
*/
|
|
16
|
-
declare function handleOfflineSessionsAndBatches(store: PatchesStoreBackend, sessionTimeoutMillis: number, docId: string, changes: Change[], baseRev: number, batchId?: string): Promise<Change[]>;
|
|
19
|
+
declare function handleOfflineSessionsAndBatches(store: PatchesStoreBackend, sessionTimeoutMillis: number, docId: string, changes: Change[], baseRev: number, batchId?: string, origin?: 'main' | 'offline-branch', isOffline?: boolean, maxPayloadBytes?: number): Promise<Change[]>;
|
|
17
20
|
|
|
18
21
|
export { handleOfflineSessionsAndBatches };
|
|
@@ -3,8 +3,9 @@ import { createSortableId } from "crypto-id";
|
|
|
3
3
|
import { createVersionMetadata } from "../../data/version.js";
|
|
4
4
|
import { getISO, timestampDiff } from "../../utils/dates.js";
|
|
5
5
|
import { applyChanges } from "../shared/applyChanges.js";
|
|
6
|
+
import { breakChanges } from "../shared/changeBatching.js";
|
|
6
7
|
import { getStateAtRevision } from "./getStateAtRevision.js";
|
|
7
|
-
async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docId, changes, baseRev, batchId) {
|
|
8
|
+
async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docId, changes, baseRev, batchId, origin = "offline-branch", isOffline = true, maxPayloadBytes) {
|
|
8
9
|
const groupId = batchId ?? createSortableId();
|
|
9
10
|
const [lastVersion] = await store.listVersions(docId, {
|
|
10
11
|
groupId,
|
|
@@ -30,8 +31,9 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
|
|
|
30
31
|
const isContinuation = !!lastVersion && timestampDiff(sessionChanges[0].createdAt, lastVersion.endedAt) <= sessionTimeoutMillis;
|
|
31
32
|
if (isContinuation) {
|
|
32
33
|
const mergedState = applyChanges(offlineBaseState, sessionChanges);
|
|
33
|
-
|
|
34
|
-
|
|
34
|
+
const newEndedAt = getISO(sessionChanges[sessionChanges.length - 1].createdAt);
|
|
35
|
+
const newRev = sessionChanges[sessionChanges.length - 1].rev;
|
|
36
|
+
await store.appendVersionChanges(docId, lastVersion.id, sessionChanges, newEndedAt, newRev, mergedState);
|
|
35
37
|
offlineBaseState = mergedState;
|
|
36
38
|
parentId = lastVersion.parentId;
|
|
37
39
|
} else {
|
|
@@ -39,7 +41,8 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
|
|
|
39
41
|
const sessionMetadata = createVersionMetadata({
|
|
40
42
|
parentId,
|
|
41
43
|
groupId,
|
|
42
|
-
origin
|
|
44
|
+
origin,
|
|
45
|
+
isOffline,
|
|
43
46
|
// Convert client timestamps to UTC for version metadata (enables lexicographic sorting)
|
|
44
47
|
startedAt: getISO(sessionChanges[0].createdAt),
|
|
45
48
|
endedAt: getISO(sessionChanges[sessionChanges.length - 1].createdAt),
|
|
@@ -53,12 +56,17 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
|
|
|
53
56
|
}
|
|
54
57
|
}
|
|
55
58
|
}
|
|
56
|
-
|
|
57
|
-
changes.reduce((firstChange, nextChange) => {
|
|
59
|
+
if (origin === "offline-branch") {
|
|
60
|
+
const collapsed = changes.reduce((firstChange, nextChange) => {
|
|
58
61
|
firstChange.ops = [...firstChange.ops, ...nextChange.ops];
|
|
59
62
|
return firstChange;
|
|
60
|
-
})
|
|
61
|
-
|
|
63
|
+
});
|
|
64
|
+
if (maxPayloadBytes) {
|
|
65
|
+
return breakChanges([collapsed], maxPayloadBytes);
|
|
66
|
+
}
|
|
67
|
+
return [collapsed];
|
|
68
|
+
}
|
|
69
|
+
return changes;
|
|
62
70
|
}
|
|
63
71
|
export {
|
|
64
72
|
handleOfflineSessionsAndBatches
|
|
@@ -10,8 +10,9 @@ import '../../json-patch/types.js';
|
|
|
10
10
|
* @param stateAtBaseRev The server state *at the client's baseRev*.
|
|
11
11
|
* @param committedChanges The committed changes that happened *after* the client's baseRev.
|
|
12
12
|
* @param currentRev The current/latest revision number (these changes will have their `rev` set > `currentRev`).
|
|
13
|
+
* @param forceCommit If true, skip filtering of no-op changes (useful for migrations).
|
|
13
14
|
* @returns The transformed changes.
|
|
14
15
|
*/
|
|
15
|
-
declare function transformIncomingChanges(changes: Change[], stateAtBaseRev: any, committedChanges: Change[], currentRev: number): Change[];
|
|
16
|
+
declare function transformIncomingChanges(changes: Change[], stateAtBaseRev: any, committedChanges: Change[], currentRev: number, forceCommit?: boolean): Change[];
|
|
16
17
|
|
|
17
18
|
export { transformIncomingChanges };
|
|
@@ -1,24 +1,26 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
2
|
import { applyPatch } from "../../json-patch/applyPatch.js";
|
|
3
3
|
import { transformPatch } from "../../json-patch/transformPatch.js";
|
|
4
|
-
function transformIncomingChanges(changes, stateAtBaseRev, committedChanges, currentRev) {
|
|
4
|
+
function transformIncomingChanges(changes, stateAtBaseRev, committedChanges, currentRev, forceCommit = false) {
|
|
5
5
|
const committedOps = committedChanges.flatMap((c) => c.ops);
|
|
6
6
|
let state = stateAtBaseRev;
|
|
7
7
|
let rev = currentRev + 1;
|
|
8
8
|
return changes.map((change) => {
|
|
9
9
|
const transformedOps = transformPatch(stateAtBaseRev, committedOps, change.ops);
|
|
10
|
-
if (transformedOps.length === 0) {
|
|
10
|
+
if (transformedOps.length === 0 && !forceCommit) {
|
|
11
11
|
return null;
|
|
12
12
|
}
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
if (transformedOps.length > 0) {
|
|
14
|
+
try {
|
|
15
|
+
const previous = state;
|
|
16
|
+
state = applyPatch(state, transformedOps, { strict: true });
|
|
17
|
+
if (previous === state && !forceCommit) {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
} catch (error) {
|
|
21
|
+
console.error(`Error applying change ${change.id} to state:`, error);
|
|
17
22
|
return null;
|
|
18
23
|
}
|
|
19
|
-
} catch (error) {
|
|
20
|
-
console.error(`Error applying change ${change.id} to state:`, error);
|
|
21
|
-
return null;
|
|
22
24
|
}
|
|
23
25
|
return { ...change, rev: rev++, ops: transformedOps };
|
|
24
26
|
}).filter(Boolean);
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { Change } from '../../types.js';
|
|
2
|
+
import '../../json-patch/JSONPatch.js';
|
|
3
|
+
import '@dabble/delta';
|
|
4
|
+
import '../../json-patch/types.js';
|
|
5
|
+
|
|
6
|
+
/** Estimate JSON string byte size. */
|
|
7
|
+
declare function getJSONByteSize(data: unknown): number;
|
|
8
|
+
/**
|
|
9
|
+
* Break changes into smaller changes so that each change's JSON string size never exceeds `maxBytes`.
|
|
10
|
+
*
|
|
11
|
+
* - Splits first by JSON-Patch *ops*
|
|
12
|
+
* - If an individual op is still too big and is a "@txt" op,
|
|
13
|
+
* split its Delta payload into smaller Deltas
|
|
14
|
+
*/
|
|
15
|
+
declare function breakChanges(changes: Change[], maxBytes: number): Change[];
|
|
16
|
+
/** Break changes into batches based on maxPayloadBytes. */
|
|
17
|
+
declare function breakChangesIntoBatches(changes: Change[], maxPayloadBytes?: number): Change[][];
|
|
18
|
+
|
|
19
|
+
export { breakChanges, breakChangesIntoBatches, getJSONByteSize };
|
|
@@ -1,7 +1,58 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { createId } from "crypto-id";
|
|
2
3
|
import { createChange } from "../../data/change.js";
|
|
3
|
-
|
|
4
|
-
|
|
4
|
+
function getJSONByteSize(data) {
|
|
5
|
+
try {
|
|
6
|
+
const stringified = JSON.stringify(data);
|
|
7
|
+
return stringified ? new TextEncoder().encode(stringified).length : 0;
|
|
8
|
+
} catch (e) {
|
|
9
|
+
console.error("Error calculating JSON size:", e);
|
|
10
|
+
throw new Error("Error calculating JSON size: " + e);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
function breakChanges(changes, maxBytes) {
|
|
14
|
+
const results = [];
|
|
15
|
+
for (const change of changes) {
|
|
16
|
+
results.push(...breakSingleChange(change, maxBytes));
|
|
17
|
+
}
|
|
18
|
+
return results;
|
|
19
|
+
}
|
|
20
|
+
function breakChangesIntoBatches(changes, maxPayloadBytes) {
|
|
21
|
+
if (!maxPayloadBytes || getJSONByteSize(changes) < maxPayloadBytes) {
|
|
22
|
+
return [changes];
|
|
23
|
+
}
|
|
24
|
+
const batchId = createId(12);
|
|
25
|
+
const batches = [];
|
|
26
|
+
let currentBatch = [];
|
|
27
|
+
let currentSize = 2;
|
|
28
|
+
for (const change of changes) {
|
|
29
|
+
const changeWithBatchId = { ...change, batchId };
|
|
30
|
+
const individualActualSize = getJSONByteSize(changeWithBatchId);
|
|
31
|
+
let itemsToProcess;
|
|
32
|
+
if (individualActualSize > maxPayloadBytes) {
|
|
33
|
+
itemsToProcess = breakSingleChange(changeWithBatchId, maxPayloadBytes).map((c) => ({ ...c, batchId }));
|
|
34
|
+
} else {
|
|
35
|
+
itemsToProcess = [changeWithBatchId];
|
|
36
|
+
}
|
|
37
|
+
for (const item of itemsToProcess) {
|
|
38
|
+
const itemActualSize = getJSONByteSize(item);
|
|
39
|
+
const itemSizeForBatching = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
40
|
+
if (currentBatch.length > 0 && currentSize + itemSizeForBatching > maxPayloadBytes) {
|
|
41
|
+
batches.push(currentBatch);
|
|
42
|
+
currentBatch = [];
|
|
43
|
+
currentSize = 2;
|
|
44
|
+
}
|
|
45
|
+
const actualItemContribution = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
46
|
+
currentBatch.push(item);
|
|
47
|
+
currentSize += actualItemContribution;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
if (currentBatch.length > 0) {
|
|
51
|
+
batches.push(currentBatch);
|
|
52
|
+
}
|
|
53
|
+
return batches;
|
|
54
|
+
}
|
|
55
|
+
function breakSingleChange(orig, maxBytes) {
|
|
5
56
|
if (getJSONByteSize(orig) <= maxBytes) return [orig];
|
|
6
57
|
const byOps = [];
|
|
7
58
|
let group = [];
|
|
@@ -205,5 +256,7 @@ function deriveNewChange(origChange, rev, ops) {
|
|
|
205
256
|
return createChange(origChange.baseRev, rev, ops, metadata);
|
|
206
257
|
}
|
|
207
258
|
export {
|
|
208
|
-
|
|
259
|
+
breakChanges,
|
|
260
|
+
breakChangesIntoBatches,
|
|
261
|
+
getJSONByteSize
|
|
209
262
|
};
|
package/dist/index.d.ts
CHANGED
|
@@ -17,7 +17,7 @@ export { createPathProxy, pathProxy } from './json-patch/pathProxy.js';
|
|
|
17
17
|
export { transformPatch } from './json-patch/transformPatch.js';
|
|
18
18
|
export { JSONPatch, PathLike, WriteOptions } from './json-patch/JSONPatch.js';
|
|
19
19
|
export { ApplyJSONPatchOptions, JSONPatchOpHandlerMap as JSONPatchCustomTypes, JSONPatchOp } from './json-patch/types.js';
|
|
20
|
-
export { Branch, BranchStatus, Change, ChangeInput, ChangeMutator, EditableBranchMetadata, EditableVersionMetadata, ListChangesOptions, ListVersionsOptions, PatchesSnapshot, PatchesState, PathProxy, SyncingState, VersionMetadata } from './types.js';
|
|
20
|
+
export { Branch, BranchStatus, Change, ChangeInput, ChangeMutator, CommitChangesOptions, EditableBranchMetadata, EditableVersionMetadata, ListChangesOptions, ListVersionsOptions, PatchesSnapshot, PatchesState, PathProxy, SyncingState, VersionMetadata } from './types.js';
|
|
21
21
|
export { clampTimestamp, extractTimezoneOffset, getISO, getLocalISO, getLocalTimezoneOffset, timestampDiff } from './utils/dates.js';
|
|
22
22
|
export { add } from './json-patch/ops/add.js';
|
|
23
23
|
export { copy } from './json-patch/ops/copy.js';
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Signal } from '../event-signal.js';
|
|
2
|
-
import { Change, PatchesState, ChangeInput, EditableVersionMetadata, ListVersionsOptions, VersionMetadata, PatchesSnapshot } from '../types.js';
|
|
2
|
+
import { Change, PatchesState, ChangeInput, CommitChangesOptions, EditableVersionMetadata, ListVersionsOptions, VersionMetadata, PatchesSnapshot } from '../types.js';
|
|
3
3
|
import { JSONRPCClient } from './protocol/JSONRPCClient.js';
|
|
4
4
|
import { PatchesAPI, ClientTransport } from './protocol/types.js';
|
|
5
5
|
import '../json-patch/JSONPatch.js';
|
|
@@ -52,9 +52,10 @@ declare class PatchesClient implements PatchesAPI {
|
|
|
52
52
|
* Applies a set of client-generated changes to a document on the server.
|
|
53
53
|
* @param docId - The ID of the document.
|
|
54
54
|
* @param changes - An array of changes to apply.
|
|
55
|
+
* @param options - Optional commit settings (e.g., forceCommit for migrations).
|
|
55
56
|
* @returns A promise resolving with the changes as committed by the server (potentially transformed).
|
|
56
57
|
*/
|
|
57
|
-
commitChanges(docId: string, changes: ChangeInput[]): Promise<Change[]>;
|
|
58
|
+
commitChanges(docId: string, changes: ChangeInput[], options?: CommitChangesOptions): Promise<Change[]>;
|
|
58
59
|
/**
|
|
59
60
|
* Deletes a document on the server.
|
|
60
61
|
* @param docId - The ID of the document to delete.
|
|
@@ -60,10 +60,11 @@ class PatchesClient {
|
|
|
60
60
|
* Applies a set of client-generated changes to a document on the server.
|
|
61
61
|
* @param docId - The ID of the document.
|
|
62
62
|
* @param changes - An array of changes to apply.
|
|
63
|
+
* @param options - Optional commit settings (e.g., forceCommit for migrations).
|
|
63
64
|
* @returns A promise resolving with the changes as committed by the server (potentially transformed).
|
|
64
65
|
*/
|
|
65
|
-
async commitChanges(docId, changes) {
|
|
66
|
-
return this.rpc.call("commitChanges", { docId, changes });
|
|
66
|
+
async commitChanges(docId, changes, options) {
|
|
67
|
+
return this.rpc.call("commitChanges", { docId, changes, options });
|
|
67
68
|
}
|
|
68
69
|
/**
|
|
69
70
|
* Deletes a document on the server.
|
package/dist/net/PatchesSync.js
CHANGED
|
@@ -8,7 +8,7 @@ import {
|
|
|
8
8
|
var __receiveCommittedChanges_dec, _syncDoc_dec, _init;
|
|
9
9
|
import { isEqual } from "@dabble/delta";
|
|
10
10
|
import { applyCommittedChanges } from "../algorithms/client/applyCommittedChanges.js";
|
|
11
|
-
import {
|
|
11
|
+
import { breakChangesIntoBatches } from "../algorithms/shared/changeBatching.js";
|
|
12
12
|
import { Patches } from "../client/Patches.js";
|
|
13
13
|
import { signal } from "../event-signal.js";
|
|
14
14
|
import { blockable } from "../utils/concurrency.js";
|
|
@@ -185,7 +185,7 @@ class PatchesSync {
|
|
|
185
185
|
if (!pending.length) {
|
|
186
186
|
return;
|
|
187
187
|
}
|
|
188
|
-
const batches =
|
|
188
|
+
const batches = breakChangesIntoBatches(pending, this.maxPayloadBytes);
|
|
189
189
|
for (const batch of batches) {
|
|
190
190
|
if (!this.state.connected) {
|
|
191
191
|
throw new Error("Disconnected during flush");
|
package/dist/net/index.d.ts
CHANGED
|
@@ -10,14 +10,11 @@ export { Access, AuthContext, AuthorizationProvider, allowAll, denyAll } from '.
|
|
|
10
10
|
export { onlineState } from './websocket/onlineState.js';
|
|
11
11
|
export { PatchesWebSocket } from './websocket/PatchesWebSocket.js';
|
|
12
12
|
export { RPCServer, RPCServerOptions } from './websocket/RPCServer.js';
|
|
13
|
-
export { JsonRpcMessage,
|
|
13
|
+
export { JsonRpcMessage, SignalingService } from './websocket/SignalingService.js';
|
|
14
14
|
export { WebSocketServer } from './websocket/WebSocketServer.js';
|
|
15
15
|
export { WebSocketOptions, WebSocketTransport } from './websocket/WebSocketTransport.js';
|
|
16
|
+
export { CommitChangesOptions } from '../types.js';
|
|
16
17
|
import '../event-signal.js';
|
|
17
|
-
import '../types.js';
|
|
18
|
-
import '../json-patch/JSONPatch.js';
|
|
19
|
-
import '@dabble/delta';
|
|
20
|
-
import '../json-patch/types.js';
|
|
21
18
|
import '../client/Patches.js';
|
|
22
19
|
import '../client/PatchesDoc.js';
|
|
23
20
|
import '../client/PatchesStore.js';
|
|
@@ -25,3 +22,6 @@ import '../server/PatchesBranchManager.js';
|
|
|
25
22
|
import '../server/PatchesServer.js';
|
|
26
23
|
import '../server/types.js';
|
|
27
24
|
import '../server/PatchesHistoryManager.js';
|
|
25
|
+
import '../json-patch/JSONPatch.js';
|
|
26
|
+
import '@dabble/delta';
|
|
27
|
+
import '../json-patch/types.js';
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Unsubscriber } from '../../event-signal.js';
|
|
2
|
-
import { PatchesState, Change, ChangeInput, EditableVersionMetadata, ListVersionsOptions, VersionMetadata } from '../../types.js';
|
|
2
|
+
import { PatchesState, Change, ChangeInput, CommitChangesOptions, EditableVersionMetadata, ListVersionsOptions, VersionMetadata } from '../../types.js';
|
|
3
3
|
import '../../json-patch/JSONPatch.js';
|
|
4
4
|
import '@dabble/delta';
|
|
5
5
|
import '../../json-patch/types.js';
|
|
@@ -123,7 +123,7 @@ interface PatchesAPI {
|
|
|
123
123
|
/** Get changes that occurred after a specific revision. */
|
|
124
124
|
getChangesSince(docId: string, rev: number): Promise<Change[]>;
|
|
125
125
|
/** Apply a set of changes from the client to a document. Returns the committed changes. */
|
|
126
|
-
commitChanges(docId: string, changes: ChangeInput[]): Promise<Change[]>;
|
|
126
|
+
commitChanges(docId: string, changes: ChangeInput[], options?: CommitChangesOptions): Promise<Change[]>;
|
|
127
127
|
/** Delete a document. */
|
|
128
128
|
deleteDoc(docId: string): Promise<void>;
|
|
129
129
|
/** Create a new named version snapshot of a document's current state. */
|
|
@@ -153,4 +153,4 @@ interface SignalNotificationParams {
|
|
|
153
153
|
data: any;
|
|
154
154
|
}
|
|
155
155
|
|
|
156
|
-
export type
|
|
156
|
+
export { type AwarenessUpdateNotificationParams, type ClientTransport, CommitChangesOptions, type ConnectionState, type JsonRpcNotification, type JsonRpcRequest, type JsonRpcResponse, type ListOptions, type Message, type PatchesAPI, type PatchesNotificationParams, type ServerTransport, type SignalNotificationParams };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { PatchesState, Change, ListVersionsOptions, VersionMetadata, EditableVersionMetadata, ListChangesOptions, Branch } from '../../types.js';
|
|
1
|
+
import { PatchesState, Change, CommitChangesOptions, ListVersionsOptions, VersionMetadata, EditableVersionMetadata, ListChangesOptions, Branch } from '../../types.js';
|
|
2
2
|
import { PatchesBranchManager } from '../../server/PatchesBranchManager.js';
|
|
3
3
|
import { PatchesHistoryManager } from '../../server/PatchesHistoryManager.js';
|
|
4
4
|
import { PatchesServer } from '../../server/PatchesServer.js';
|
|
@@ -64,10 +64,12 @@ declare class RPCServer {
|
|
|
64
64
|
* @param params - The change parameters
|
|
65
65
|
* @param params.docId - The ID of the document
|
|
66
66
|
* @param params.changes - An array of changes to apply
|
|
67
|
+
* @param params.options - Optional commit settings (e.g., forceCommit for migrations)
|
|
67
68
|
*/
|
|
68
69
|
commitChanges(params: {
|
|
69
70
|
docId: string;
|
|
70
71
|
changes: Change[];
|
|
72
|
+
options?: CommitChangesOptions;
|
|
71
73
|
}, ctx?: AuthContext): Promise<Change[]>;
|
|
72
74
|
/**
|
|
73
75
|
* Deletes a document on the server.
|
|
@@ -76,11 +76,12 @@ class RPCServer {
|
|
|
76
76
|
* @param params - The change parameters
|
|
77
77
|
* @param params.docId - The ID of the document
|
|
78
78
|
* @param params.changes - An array of changes to apply
|
|
79
|
+
* @param params.options - Optional commit settings (e.g., forceCommit for migrations)
|
|
79
80
|
*/
|
|
80
81
|
async commitChanges(params, ctx) {
|
|
81
|
-
const { docId, changes } = params;
|
|
82
|
+
const { docId, changes, options } = params;
|
|
82
83
|
await this.assertWrite(ctx, docId, "commitChanges", params);
|
|
83
|
-
const [priorChanges, newChanges] = await this.patches.commitChanges(docId, changes, ctx?.clientId);
|
|
84
|
+
const [priorChanges, newChanges] = await this.patches.commitChanges(docId, changes, options, ctx?.clientId);
|
|
84
85
|
return [...priorChanges, ...newChanges];
|
|
85
86
|
}
|
|
86
87
|
/**
|
|
@@ -7,30 +7,38 @@ import '../../json-patch/types.js';
|
|
|
7
7
|
|
|
8
8
|
/** Union type for all possible JSON-RPC message types */
|
|
9
9
|
type JsonRpcMessage = JsonRpcRequest | JsonRpcResponse;
|
|
10
|
-
/** Function type for sending JSON-RPC messages */
|
|
11
|
-
type SendFn = (message: JsonRpcMessage) => void;
|
|
12
10
|
/**
|
|
13
11
|
* Service that facilitates WebRTC connection establishment by relaying signaling messages.
|
|
14
12
|
* Acts as a central hub for WebRTC peers to exchange connection information.
|
|
15
13
|
*/
|
|
16
|
-
declare class SignalingService {
|
|
17
|
-
|
|
14
|
+
declare abstract class SignalingService {
|
|
15
|
+
protected clients: Set<string>;
|
|
16
|
+
abstract send(id: string, message: JsonRpcMessage): void | Promise<void>;
|
|
17
|
+
/**
|
|
18
|
+
* Returns the list of all connected client IDs.
|
|
19
|
+
* @returns Array of client IDs
|
|
20
|
+
*/
|
|
21
|
+
getClients(): Promise<Set<string>>;
|
|
22
|
+
/**
|
|
23
|
+
* Sets the list of all connected client IDs.
|
|
24
|
+
* @param clients - Set of client IDs
|
|
25
|
+
*/
|
|
26
|
+
setClients(clients: Set<string>): Promise<void>;
|
|
18
27
|
/**
|
|
19
28
|
* Registers a new client connection with the signaling service.
|
|
20
29
|
* Assigns a unique ID to the client and informs them of other connected peers.
|
|
21
30
|
*
|
|
22
|
-
* @param send - Function to send messages to this client
|
|
23
31
|
* @param id - Optional client ID (generated if not provided)
|
|
24
32
|
* @returns The client's assigned ID
|
|
25
33
|
*/
|
|
26
|
-
onClientConnected(
|
|
34
|
+
onClientConnected(id?: string): Promise<string>;
|
|
27
35
|
/**
|
|
28
36
|
* Handles a client disconnection by removing them from the registry
|
|
29
37
|
* and notifying all other connected clients.
|
|
30
38
|
*
|
|
31
39
|
* @param id - ID of the disconnected client
|
|
32
40
|
*/
|
|
33
|
-
onClientDisconnected(id: string): void
|
|
41
|
+
onClientDisconnected(id: string): Promise<void>;
|
|
34
42
|
/**
|
|
35
43
|
* Handles a signaling message from a client, relaying WebRTC session data
|
|
36
44
|
* between peers to facilitate connection establishment.
|
|
@@ -39,33 +47,25 @@ declare class SignalingService {
|
|
|
39
47
|
* @param message - The JSON-RPC message or its string representation
|
|
40
48
|
* @returns True if the message was a valid signaling message and was handled, false otherwise
|
|
41
49
|
*/
|
|
42
|
-
handleClientMessage(fromId: string, message: string | JsonRpcRequest): boolean
|
|
50
|
+
handleClientMessage(fromId: string, message: string | JsonRpcRequest): Promise<boolean>;
|
|
43
51
|
/**
|
|
44
52
|
* Sends a successful JSON-RPC response to a client.
|
|
45
53
|
*
|
|
46
|
-
* @
|
|
54
|
+
* @protected
|
|
47
55
|
* @param toId - ID of the client to send the response to
|
|
48
56
|
* @param id - Request ID to match in the response
|
|
49
57
|
* @param result - Result data to include in the response
|
|
50
58
|
*/
|
|
51
|
-
|
|
59
|
+
protected respond(toId: string, id: number, result: any): Promise<void>;
|
|
52
60
|
/**
|
|
53
61
|
* Sends an error JSON-RPC response to a client.
|
|
54
62
|
*
|
|
55
|
-
* @
|
|
63
|
+
* @protected
|
|
56
64
|
* @param toId - ID of the client to send the error response to
|
|
57
65
|
* @param id - Request ID to match in the response, or undefined for notifications
|
|
58
66
|
* @param message - Error message to include
|
|
59
67
|
*/
|
|
60
|
-
|
|
61
|
-
/**
|
|
62
|
-
* Broadcasts a message to all connected clients, optionally excluding one.
|
|
63
|
-
*
|
|
64
|
-
* @private
|
|
65
|
-
* @param message - The message to broadcast
|
|
66
|
-
* @param excludeId - Optional ID of a client to exclude from the broadcast
|
|
67
|
-
*/
|
|
68
|
-
private broadcast;
|
|
68
|
+
protected respondError(toId: string, id: number | undefined, message: string): Promise<void>;
|
|
69
69
|
}
|
|
70
70
|
|
|
71
|
-
export { type JsonRpcMessage,
|
|
71
|
+
export { type JsonRpcMessage, SignalingService };
|
|
@@ -1,26 +1,41 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
2
|
import { createId } from "crypto-id";
|
|
3
3
|
class SignalingService {
|
|
4
|
-
clients = /* @__PURE__ */ new
|
|
4
|
+
clients = /* @__PURE__ */ new Set();
|
|
5
|
+
/**
|
|
6
|
+
* Returns the list of all connected client IDs.
|
|
7
|
+
* @returns Array of client IDs
|
|
8
|
+
*/
|
|
9
|
+
async getClients() {
|
|
10
|
+
return new Set(this.clients);
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Sets the list of all connected client IDs.
|
|
14
|
+
* @param clients - Set of client IDs
|
|
15
|
+
*/
|
|
16
|
+
async setClients(clients) {
|
|
17
|
+
this.clients = clients;
|
|
18
|
+
}
|
|
5
19
|
/**
|
|
6
20
|
* Registers a new client connection with the signaling service.
|
|
7
21
|
* Assigns a unique ID to the client and informs them of other connected peers.
|
|
8
22
|
*
|
|
9
|
-
* @param send - Function to send messages to this client
|
|
10
23
|
* @param id - Optional client ID (generated if not provided)
|
|
11
24
|
* @returns The client's assigned ID
|
|
12
25
|
*/
|
|
13
|
-
onClientConnected(
|
|
14
|
-
|
|
26
|
+
async onClientConnected(id = createId(14)) {
|
|
27
|
+
const clients = await this.getClients();
|
|
28
|
+
clients.add(id);
|
|
29
|
+
await this.setClients(clients);
|
|
15
30
|
const welcome = {
|
|
16
31
|
jsonrpc: "2.0",
|
|
17
32
|
method: "peer-welcome",
|
|
18
33
|
params: {
|
|
19
34
|
id,
|
|
20
|
-
peers: Array.from(this.clients
|
|
35
|
+
peers: Array.from(this.clients).filter((pid) => pid !== id)
|
|
21
36
|
}
|
|
22
37
|
};
|
|
23
|
-
send(welcome);
|
|
38
|
+
this.send(id, welcome);
|
|
24
39
|
return id;
|
|
25
40
|
}
|
|
26
41
|
/**
|
|
@@ -29,13 +44,16 @@ class SignalingService {
|
|
|
29
44
|
*
|
|
30
45
|
* @param id - ID of the disconnected client
|
|
31
46
|
*/
|
|
32
|
-
onClientDisconnected(id) {
|
|
33
|
-
this.
|
|
34
|
-
|
|
47
|
+
async onClientDisconnected(id) {
|
|
48
|
+
const clients = await this.getClients();
|
|
49
|
+
clients.delete(id);
|
|
50
|
+
await this.setClients(clients);
|
|
51
|
+
const message = {
|
|
35
52
|
jsonrpc: "2.0",
|
|
36
53
|
method: "peer-disconnected",
|
|
37
54
|
params: { id }
|
|
38
|
-
}
|
|
55
|
+
};
|
|
56
|
+
await Promise.all(Array.from(clients).map((clientId) => this.send(clientId, message)));
|
|
39
57
|
}
|
|
40
58
|
/**
|
|
41
59
|
* Handles a signaling message from a client, relaying WebRTC session data
|
|
@@ -45,7 +63,7 @@ class SignalingService {
|
|
|
45
63
|
* @param message - The JSON-RPC message or its string representation
|
|
46
64
|
* @returns True if the message was a valid signaling message and was handled, false otherwise
|
|
47
65
|
*/
|
|
48
|
-
handleClientMessage(fromId, message) {
|
|
66
|
+
async handleClientMessage(fromId, message) {
|
|
49
67
|
let parsed;
|
|
50
68
|
try {
|
|
51
69
|
parsed = typeof message === "string" ? JSON.parse(message) : message;
|
|
@@ -55,8 +73,8 @@ class SignalingService {
|
|
|
55
73
|
if (parsed.jsonrpc !== "2.0" || parsed.method !== "peer-signal" || !parsed.params?.to) return false;
|
|
56
74
|
const { params, id } = parsed;
|
|
57
75
|
const { to, data } = params;
|
|
58
|
-
const
|
|
59
|
-
if (!
|
|
76
|
+
const clients = await this.getClients();
|
|
77
|
+
if (!clients.has(to)) {
|
|
60
78
|
this.respondError(fromId, id, "Target not connected");
|
|
61
79
|
return true;
|
|
62
80
|
}
|
|
@@ -68,62 +86,48 @@ class SignalingService {
|
|
|
68
86
|
data
|
|
69
87
|
}
|
|
70
88
|
};
|
|
71
|
-
|
|
89
|
+
await this.send(to, outbound);
|
|
72
90
|
if (id !== void 0) {
|
|
73
|
-
this.respond(fromId, id, "ok");
|
|
91
|
+
await this.respond(fromId, id, "ok");
|
|
74
92
|
}
|
|
75
93
|
return true;
|
|
76
94
|
}
|
|
77
95
|
/**
|
|
78
96
|
* Sends a successful JSON-RPC response to a client.
|
|
79
97
|
*
|
|
80
|
-
* @
|
|
98
|
+
* @protected
|
|
81
99
|
* @param toId - ID of the client to send the response to
|
|
82
100
|
* @param id - Request ID to match in the response
|
|
83
101
|
* @param result - Result data to include in the response
|
|
84
102
|
*/
|
|
85
|
-
respond(toId, id, result) {
|
|
86
|
-
const
|
|
87
|
-
if (!
|
|
103
|
+
async respond(toId, id, result) {
|
|
104
|
+
const clients = await this.getClients();
|
|
105
|
+
if (!clients.has(toId)) return;
|
|
88
106
|
const response = {
|
|
89
107
|
jsonrpc: "2.0",
|
|
90
108
|
result,
|
|
91
109
|
id
|
|
92
110
|
};
|
|
93
|
-
|
|
111
|
+
await this.send(toId, response);
|
|
94
112
|
}
|
|
95
113
|
/**
|
|
96
114
|
* Sends an error JSON-RPC response to a client.
|
|
97
115
|
*
|
|
98
|
-
* @
|
|
116
|
+
* @protected
|
|
99
117
|
* @param toId - ID of the client to send the error response to
|
|
100
118
|
* @param id - Request ID to match in the response, or undefined for notifications
|
|
101
119
|
* @param message - Error message to include
|
|
102
120
|
*/
|
|
103
|
-
respondError(toId, id, message) {
|
|
121
|
+
async respondError(toId, id, message) {
|
|
104
122
|
if (id === void 0) return;
|
|
105
|
-
const
|
|
106
|
-
if (!
|
|
123
|
+
const clients = await this.getClients();
|
|
124
|
+
if (!clients.has(toId)) return;
|
|
107
125
|
const response = {
|
|
108
126
|
jsonrpc: "2.0",
|
|
109
127
|
error: { code: -32e3, message },
|
|
110
128
|
id
|
|
111
129
|
};
|
|
112
|
-
|
|
113
|
-
}
|
|
114
|
-
/**
|
|
115
|
-
* Broadcasts a message to all connected clients, optionally excluding one.
|
|
116
|
-
*
|
|
117
|
-
* @private
|
|
118
|
-
* @param message - The message to broadcast
|
|
119
|
-
* @param excludeId - Optional ID of a client to exclude from the broadcast
|
|
120
|
-
*/
|
|
121
|
-
broadcast(message, excludeId) {
|
|
122
|
-
for (const [id, client] of this.clients.entries()) {
|
|
123
|
-
if (id !== excludeId) {
|
|
124
|
-
client.send(message);
|
|
125
|
-
}
|
|
126
|
-
}
|
|
130
|
+
await this.send(toId, response);
|
|
127
131
|
}
|
|
128
132
|
}
|
|
129
133
|
export {
|
|
@@ -14,7 +14,8 @@ import '../event-signal.js';
|
|
|
14
14
|
declare class PatchesBranchManager {
|
|
15
15
|
private readonly store;
|
|
16
16
|
private readonly patchesServer;
|
|
17
|
-
|
|
17
|
+
private readonly maxPayloadBytes?;
|
|
18
|
+
constructor(store: BranchingStoreBackend, patchesServer: PatchesServer, maxPayloadBytes?: number | undefined);
|
|
18
19
|
/**
|
|
19
20
|
* Lists all open branches for a document.
|
|
20
21
|
* @param docId - The ID of the document.
|
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
import "../chunk-IZ2YBCUP.js";
|
|
2
2
|
import { createId } from "crypto-id";
|
|
3
|
+
import { breakChanges } from "../algorithms/shared/changeBatching.js";
|
|
3
4
|
import { createChange } from "../data/change.js";
|
|
4
5
|
import { createVersionMetadata } from "../data/version.js";
|
|
5
6
|
import { getISO } from "../utils/dates.js";
|
|
6
7
|
class PatchesBranchManager {
|
|
7
|
-
constructor(store, patchesServer) {
|
|
8
|
+
constructor(store, patchesServer, maxPayloadBytes) {
|
|
8
9
|
this.store = store;
|
|
9
10
|
this.patchesServer = patchesServer;
|
|
11
|
+
this.maxPayloadBytes = maxPayloadBytes;
|
|
10
12
|
}
|
|
11
13
|
/**
|
|
12
14
|
* Lists all open branches for a document.
|
|
@@ -94,15 +96,21 @@ class PatchesBranchManager {
|
|
|
94
96
|
await this.closeBranch(branchId, "merged");
|
|
95
97
|
return [];
|
|
96
98
|
}
|
|
99
|
+
const sourceChanges = await this.store.listChanges(sourceDocId, {
|
|
100
|
+
startAfter: branchStartRevOnSource
|
|
101
|
+
});
|
|
102
|
+
const canFastForward = sourceChanges.length === 0;
|
|
97
103
|
const branchVersions = await this.store.listVersions(branchId, { origin: "main" });
|
|
104
|
+
const versionOrigin = canFastForward ? "main" : "branch";
|
|
98
105
|
let lastVersionId;
|
|
99
106
|
for (const v of branchVersions) {
|
|
100
107
|
const newVersionMetadata = createVersionMetadata({
|
|
101
108
|
...v,
|
|
102
|
-
origin:
|
|
109
|
+
origin: versionOrigin,
|
|
103
110
|
baseRev: branchStartRevOnSource,
|
|
104
111
|
groupId: branchId,
|
|
105
112
|
branchName: branch.name,
|
|
113
|
+
// Keep branchName for traceability
|
|
106
114
|
parentId: lastVersionId
|
|
107
115
|
});
|
|
108
116
|
const state = await this.store.loadVersionState(branchId, v.id);
|
|
@@ -110,15 +118,29 @@ class PatchesBranchManager {
|
|
|
110
118
|
await this.store.createVersion(sourceDocId, newVersionMetadata, state, changes);
|
|
111
119
|
lastVersionId = newVersionMetadata.id;
|
|
112
120
|
}
|
|
113
|
-
const rev = branchStartRevOnSource + branchChanges.length;
|
|
114
|
-
const flattenedChange = createChange(
|
|
115
|
-
branchStartRevOnSource,
|
|
116
|
-
rev,
|
|
117
|
-
branchChanges.flatMap((c) => c.ops)
|
|
118
|
-
);
|
|
119
121
|
let committedMergeChanges = [];
|
|
120
122
|
try {
|
|
121
|
-
|
|
123
|
+
if (canFastForward) {
|
|
124
|
+
const adjustedChanges = branchChanges.map((c) => ({
|
|
125
|
+
...c,
|
|
126
|
+
baseRev: branchStartRevOnSource,
|
|
127
|
+
rev: void 0
|
|
128
|
+
// Let commitChanges assign sequential revs
|
|
129
|
+
}));
|
|
130
|
+
[, committedMergeChanges] = await this.patchesServer.commitChanges(sourceDocId, adjustedChanges);
|
|
131
|
+
} else {
|
|
132
|
+
const rev = branchStartRevOnSource + branchChanges.length;
|
|
133
|
+
const flattenedChange = createChange(
|
|
134
|
+
branchStartRevOnSource,
|
|
135
|
+
rev,
|
|
136
|
+
branchChanges.flatMap((c) => c.ops)
|
|
137
|
+
);
|
|
138
|
+
let changesToCommit = [flattenedChange];
|
|
139
|
+
if (this.maxPayloadBytes) {
|
|
140
|
+
changesToCommit = breakChanges(changesToCommit, this.maxPayloadBytes);
|
|
141
|
+
}
|
|
142
|
+
[, committedMergeChanges] = await this.patchesServer.commitChanges(sourceDocId, changesToCommit);
|
|
143
|
+
}
|
|
122
144
|
} catch (error) {
|
|
123
145
|
console.error(`Failed to merge branch ${branchId} into ${sourceDocId}:`, error);
|
|
124
146
|
throw new Error(`Merge failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Signal } from '../event-signal.js';
|
|
2
|
-
import { Change, PatchesState, ChangeInput, ChangeMutator, EditableVersionMetadata } from '../types.js';
|
|
3
2
|
import { PatchesStoreBackend } from './types.js';
|
|
3
|
+
import { Change, PatchesState, ChangeInput, CommitChangesOptions, ChangeMutator, EditableVersionMetadata } from '../types.js';
|
|
4
4
|
import '../json-patch/JSONPatch.js';
|
|
5
5
|
import '@dabble/delta';
|
|
6
6
|
import '../json-patch/types.js';
|
|
@@ -15,6 +15,12 @@ interface PatchesServerOptions {
|
|
|
15
15
|
* Defaults to 30 minutes.
|
|
16
16
|
*/
|
|
17
17
|
sessionTimeoutMinutes?: number;
|
|
18
|
+
/**
|
|
19
|
+
* Maximum size in bytes for a single change's JSON representation.
|
|
20
|
+
* If a flattened/collapsed change exceeds this, it will be split into multiple changes.
|
|
21
|
+
* Useful for databases with row size limits.
|
|
22
|
+
*/
|
|
23
|
+
maxPayloadBytes?: number;
|
|
18
24
|
}
|
|
19
25
|
/**
|
|
20
26
|
* Handles the server-side Operational Transformation (OT) logic,
|
|
@@ -24,6 +30,7 @@ interface PatchesServerOptions {
|
|
|
24
30
|
declare class PatchesServer {
|
|
25
31
|
readonly store: PatchesStoreBackend;
|
|
26
32
|
private readonly sessionTimeoutMillis;
|
|
33
|
+
private readonly maxPayloadBytes?;
|
|
27
34
|
/** Notifies listeners whenever a batch of changes is *successfully* committed. */
|
|
28
35
|
readonly onChangesCommitted: Signal<(docId: string, changes: Change[], originClientId?: string) => void>;
|
|
29
36
|
/** Notifies listeners when a document is deleted. */
|
|
@@ -54,12 +61,13 @@ declare class PatchesServer {
|
|
|
54
61
|
* Commits a set of changes to a document, applying operational transformation as needed.
|
|
55
62
|
* @param docId - The ID of the document.
|
|
56
63
|
* @param changes - The changes to commit.
|
|
57
|
-
* @param
|
|
64
|
+
* @param options - Optional commit settings (e.g., forceCommit for migrations).
|
|
65
|
+
* @param originClientId - The ID of the client that initiated the commit (used by transport layer for broadcast filtering).
|
|
58
66
|
* @returns A tuple of [committedChanges, transformedChanges] where:
|
|
59
67
|
* - committedChanges: Changes that were already committed to the server after the client's base revision
|
|
60
68
|
* - transformedChanges: The client's changes after being transformed against concurrent changes
|
|
61
69
|
*/
|
|
62
|
-
commitChanges(docId: string, changes: ChangeInput[], originClientId?: string): Promise<[Change[], Change[]]>;
|
|
70
|
+
commitChanges(docId: string, changes: ChangeInput[], options?: CommitChangesOptions, originClientId?: string): Promise<[Change[], Change[]]>;
|
|
63
71
|
/**
|
|
64
72
|
* Make a server-side change to a document.
|
|
65
73
|
* @param mutator
|
|
@@ -82,4 +90,4 @@ declare class PatchesServer {
|
|
|
82
90
|
}
|
|
83
91
|
declare function assertVersionMetadata(metadata?: EditableVersionMetadata): void;
|
|
84
92
|
|
|
85
|
-
export { PatchesServer, type PatchesServerOptions, assertVersionMetadata };
|
|
93
|
+
export { CommitChangesOptions, PatchesServer, type PatchesServerOptions, assertVersionMetadata };
|
|
@@ -11,8 +11,10 @@ class PatchesServer {
|
|
|
11
11
|
constructor(store, options = {}) {
|
|
12
12
|
this.store = store;
|
|
13
13
|
this.sessionTimeoutMillis = (options.sessionTimeoutMinutes ?? 30) * 60 * 1e3;
|
|
14
|
+
this.maxPayloadBytes = options.maxPayloadBytes;
|
|
14
15
|
}
|
|
15
16
|
sessionTimeoutMillis;
|
|
17
|
+
maxPayloadBytes;
|
|
16
18
|
/** Notifies listeners whenever a batch of changes is *successfully* committed. */
|
|
17
19
|
onChangesCommitted = signal();
|
|
18
20
|
/** Notifies listeners when a document is deleted. */
|
|
@@ -48,17 +50,20 @@ class PatchesServer {
|
|
|
48
50
|
* Commits a set of changes to a document, applying operational transformation as needed.
|
|
49
51
|
* @param docId - The ID of the document.
|
|
50
52
|
* @param changes - The changes to commit.
|
|
51
|
-
* @param
|
|
53
|
+
* @param options - Optional commit settings (e.g., forceCommit for migrations).
|
|
54
|
+
* @param originClientId - The ID of the client that initiated the commit (used by transport layer for broadcast filtering).
|
|
52
55
|
* @returns A tuple of [committedChanges, transformedChanges] where:
|
|
53
56
|
* - committedChanges: Changes that were already committed to the server after the client's base revision
|
|
54
57
|
* - transformedChanges: The client's changes after being transformed against concurrent changes
|
|
55
58
|
*/
|
|
56
|
-
async commitChanges(docId, changes, originClientId) {
|
|
59
|
+
async commitChanges(docId, changes, options, originClientId) {
|
|
57
60
|
const [committedChanges, transformedChanges] = await commitChanges(
|
|
58
61
|
this.store,
|
|
59
62
|
docId,
|
|
60
63
|
changes,
|
|
61
|
-
this.sessionTimeoutMillis
|
|
64
|
+
this.sessionTimeoutMillis,
|
|
65
|
+
options,
|
|
66
|
+
this.maxPayloadBytes
|
|
62
67
|
);
|
|
63
68
|
if (transformedChanges.length > 0) {
|
|
64
69
|
try {
|
package/dist/server/index.d.ts
CHANGED
|
@@ -2,8 +2,8 @@ export { PatchesBranchManager } from './PatchesBranchManager.js';
|
|
|
2
2
|
export { PatchesHistoryManager } from './PatchesHistoryManager.js';
|
|
3
3
|
export { PatchesServer, PatchesServerOptions } from './PatchesServer.js';
|
|
4
4
|
export { BranchingStoreBackend, PatchesStoreBackend } from './types.js';
|
|
5
|
-
|
|
5
|
+
export { CommitChangesOptions } from '../types.js';
|
|
6
|
+
import '../event-signal.js';
|
|
6
7
|
import '../json-patch/JSONPatch.js';
|
|
7
8
|
import '@dabble/delta';
|
|
8
9
|
import '../json-patch/types.js';
|
|
9
|
-
import '../event-signal.js';
|
package/dist/server/types.d.ts
CHANGED
|
@@ -23,6 +23,11 @@ interface PatchesStoreBackend {
|
|
|
23
23
|
createVersion(docId: string, metadata: VersionMetadata, state: any, changes: Change[]): Promise<void>;
|
|
24
24
|
/** Update a version's metadata. */
|
|
25
25
|
updateVersion(docId: string, versionId: string, metadata: EditableVersionMetadata): Promise<void>;
|
|
26
|
+
/**
|
|
27
|
+
* Appends changes to an existing version, updating its state snapshot, endedAt, and rev.
|
|
28
|
+
* Used when a session spans multiple batch submissions.
|
|
29
|
+
*/
|
|
30
|
+
appendVersionChanges(docId: string, versionId: string, changes: Change[], newEndedAt: string, newRev: number, newState: any): Promise<void>;
|
|
26
31
|
/** Lists version metadata based on filtering/sorting options. */
|
|
27
32
|
listVersions(docId: string, options: ListVersionsOptions): Promise<VersionMetadata[]>;
|
|
28
33
|
/** Loads the state snapshot for a specific version ID. */
|
package/dist/types.d.ts
CHANGED
|
@@ -92,8 +92,10 @@ interface VersionMetadata {
|
|
|
92
92
|
parentId?: string;
|
|
93
93
|
/** Identifier linking versions from the same offline batch or branch. */
|
|
94
94
|
groupId?: string;
|
|
95
|
-
/** Indicates how the version was created ('main', 'offline', 'branch'). */
|
|
96
|
-
origin: 'main' | 'offline' | 'branch';
|
|
95
|
+
/** Indicates how the version was created ('main', 'offline-branch', 'branch'). */
|
|
96
|
+
origin: 'main' | 'offline-branch' | 'branch';
|
|
97
|
+
/** Was this version created while offline? Tracks creation context separately from timeline position. */
|
|
98
|
+
isOffline?: boolean;
|
|
97
99
|
/** User-defined name if origin is 'branch'. */
|
|
98
100
|
branchName?: string;
|
|
99
101
|
/** Server-side ISO timestamp of version start (UTC with Z). */
|
|
@@ -111,6 +113,23 @@ type Disallowed<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>> & {
|
|
|
111
113
|
[P in K]?: never;
|
|
112
114
|
};
|
|
113
115
|
type EditableVersionMetadata = Disallowed<VersionMetadata, 'id' | 'parentId' | 'groupId' | 'origin' | 'branchName' | 'startedAt' | 'endedAt' | 'rev' | 'baseRev'>;
|
|
116
|
+
/**
|
|
117
|
+
* Options for committing changes.
|
|
118
|
+
*/
|
|
119
|
+
interface CommitChangesOptions {
|
|
120
|
+
/**
|
|
121
|
+
* If true, save changes even if they result in no state modification.
|
|
122
|
+
* Useful for migrations where change history must be preserved exactly.
|
|
123
|
+
*/
|
|
124
|
+
forceCommit?: boolean;
|
|
125
|
+
/**
|
|
126
|
+
* Enable historical import mode for migrations. When true:
|
|
127
|
+
* - Preserves `committedAt` if provided (otherwise sets to serverNow)
|
|
128
|
+
* - Uses first incoming change's timestamp for session gap detection (not serverNow)
|
|
129
|
+
* - Creates versions with origin: 'main' instead of 'offline'
|
|
130
|
+
*/
|
|
131
|
+
historicalImport?: boolean;
|
|
132
|
+
}
|
|
114
133
|
/**
|
|
115
134
|
* Options for listing committed server changes. *Always* ordered by revision number.
|
|
116
135
|
*/
|
|
@@ -141,7 +160,7 @@ interface ListVersionsOptions {
|
|
|
141
160
|
/** Return versions in descending order. Defaults to false (ascending). When reversed, startAfter and endBefore apply to the *reversed* list. */
|
|
142
161
|
reverse?: boolean;
|
|
143
162
|
/** Filter by the origin type. */
|
|
144
|
-
origin?: 'main' | 'offline' | 'branch';
|
|
163
|
+
origin?: 'main' | 'offline-branch' | 'branch';
|
|
145
164
|
/** Filter by the group ID (branch ID or offline batch ID). */
|
|
146
165
|
groupId?: string;
|
|
147
166
|
}
|
|
@@ -175,4 +194,4 @@ type PathProxy<T = any> = IsAny<T> extends true ? DeepPathProxy : {
|
|
|
175
194
|
*/
|
|
176
195
|
type ChangeMutator<T> = (patch: JSONPatch, root: PathProxy<T>) => void;
|
|
177
196
|
|
|
178
|
-
export type { Branch, BranchStatus, Change, ChangeInput, ChangeMutator, EditableBranchMetadata, EditableVersionMetadata, ListChangesOptions, ListVersionsOptions, PatchesSnapshot, PatchesState, PathProxy, SyncingState, VersionMetadata };
|
|
197
|
+
export type { Branch, BranchStatus, Change, ChangeInput, ChangeMutator, CommitChangesOptions, EditableBranchMetadata, EditableVersionMetadata, ListChangesOptions, ListVersionsOptions, PatchesSnapshot, PatchesState, PathProxy, SyncingState, VersionMetadata };
|
package/dist/utils/dates.js
CHANGED
|
@@ -15,6 +15,7 @@ function timestampDiff(a, b) {
|
|
|
15
15
|
return new Date(a).getTime() - new Date(b).getTime();
|
|
16
16
|
}
|
|
17
17
|
function clampTimestamp(timestamp, limit) {
|
|
18
|
+
if (!timestamp || !limit) throw new Error("Timestamp and limit are required");
|
|
18
19
|
const timestampDate = new Date(timestamp);
|
|
19
20
|
const limitDate = new Date(limit);
|
|
20
21
|
if (timestampDate <= limitDate) {
|
|
@@ -24,6 +25,7 @@ function clampTimestamp(timestamp, limit) {
|
|
|
24
25
|
return getLocalISO(limitDate, offset);
|
|
25
26
|
}
|
|
26
27
|
function extractTimezoneOffset(iso) {
|
|
28
|
+
if (!iso) return "Z";
|
|
27
29
|
const match = iso.match(/([+-]\d{2}:\d{2}|Z)$/);
|
|
28
30
|
return match ? match[1] : "Z";
|
|
29
31
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@dabble/patches",
|
|
3
|
-
"version": "0.5.
|
|
3
|
+
"version": "0.5.3",
|
|
4
4
|
"description": "Immutable JSON Patch implementation based on RFC 6902 supporting operational transformation and last-writer-wins",
|
|
5
5
|
"author": "Jacob Wright <jacwright@gmail.com>",
|
|
6
6
|
"bugs": {
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { Change } from '../../types.js';
|
|
2
|
-
import '../../json-patch/JSONPatch.js';
|
|
3
|
-
import '@dabble/delta';
|
|
4
|
-
import '../../json-patch/types.js';
|
|
5
|
-
|
|
6
|
-
/** Break changes into batches based on maxPayloadBytes. */
|
|
7
|
-
declare function breakIntoBatches(changes: Change[], maxPayloadBytes?: number): Change[][];
|
|
8
|
-
|
|
9
|
-
export { breakIntoBatches };
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
import "../../chunk-IZ2YBCUP.js";
|
|
2
|
-
import { createId } from "crypto-id";
|
|
3
|
-
import { breakChange } from "./breakChange.js";
|
|
4
|
-
import { getJSONByteSize } from "./getJSONByteSize.js";
|
|
5
|
-
function breakIntoBatches(changes, maxPayloadBytes) {
|
|
6
|
-
if (!maxPayloadBytes || getJSONByteSize(changes) < maxPayloadBytes) {
|
|
7
|
-
return [changes];
|
|
8
|
-
}
|
|
9
|
-
const batchId = createId(12);
|
|
10
|
-
const batches = [];
|
|
11
|
-
let currentBatch = [];
|
|
12
|
-
let currentSize = 2;
|
|
13
|
-
for (const change of changes) {
|
|
14
|
-
const changeWithBatchId = { ...change, batchId };
|
|
15
|
-
const individualActualSize = getJSONByteSize(changeWithBatchId);
|
|
16
|
-
let itemsToProcess;
|
|
17
|
-
if (individualActualSize > maxPayloadBytes) {
|
|
18
|
-
itemsToProcess = breakChange(changeWithBatchId, maxPayloadBytes);
|
|
19
|
-
} else {
|
|
20
|
-
itemsToProcess = [changeWithBatchId];
|
|
21
|
-
}
|
|
22
|
-
for (const item of itemsToProcess) {
|
|
23
|
-
const itemActualSize = getJSONByteSize(item);
|
|
24
|
-
const itemSizeForBatching = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
25
|
-
if (currentBatch.length > 0 && currentSize + itemSizeForBatching > maxPayloadBytes) {
|
|
26
|
-
batches.push(currentBatch);
|
|
27
|
-
currentBatch = [];
|
|
28
|
-
currentSize = 2;
|
|
29
|
-
}
|
|
30
|
-
const actualItemContribution = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
31
|
-
currentBatch.push(item);
|
|
32
|
-
currentSize += actualItemContribution;
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
if (currentBatch.length > 0) {
|
|
36
|
-
batches.push(currentBatch);
|
|
37
|
-
}
|
|
38
|
-
return batches;
|
|
39
|
-
}
|
|
40
|
-
export {
|
|
41
|
-
breakIntoBatches
|
|
42
|
-
};
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { Change } from '../../types.js';
|
|
2
|
-
import '../../json-patch/JSONPatch.js';
|
|
3
|
-
import '@dabble/delta';
|
|
4
|
-
import '../../json-patch/types.js';
|
|
5
|
-
|
|
6
|
-
/**
|
|
7
|
-
* Break a single Change into multiple Changes so that the JSON string size never exceeds `maxBytes`.
|
|
8
|
-
*
|
|
9
|
-
* - Splits first by JSON-Patch *ops*
|
|
10
|
-
* - If an individual op is still too big and is a "@txt" op,
|
|
11
|
-
* split its Delta payload into smaller Deltas
|
|
12
|
-
*/
|
|
13
|
-
declare function breakChange(orig: Change, maxBytes: number): Change[];
|
|
14
|
-
|
|
15
|
-
export { breakChange };
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import "../../chunk-IZ2YBCUP.js";
|
|
2
|
-
function getJSONByteSize(data) {
|
|
3
|
-
try {
|
|
4
|
-
const stringified = JSON.stringify(data);
|
|
5
|
-
return stringified ? new TextEncoder().encode(stringified).length : 0;
|
|
6
|
-
} catch (e) {
|
|
7
|
-
console.error("Error calculating JSON size:", e);
|
|
8
|
-
throw new Error("Error calculating JSON size: " + e);
|
|
9
|
-
}
|
|
10
|
-
}
|
|
11
|
-
export {
|
|
12
|
-
getJSONByteSize
|
|
13
|
-
};
|