@dabble/patches 0.5.2 → 0.5.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/algorithms/client/makeChange.d.ts +2 -1
- package/dist/algorithms/client/makeChange.js +4 -4
- package/dist/algorithms/server/commitChanges.d.ts +4 -3
- package/dist/algorithms/server/commitChanges.js +25 -6
- package/dist/algorithms/server/handleOfflineSessionsAndBatches.d.ts +5 -2
- package/dist/algorithms/server/handleOfflineSessionsAndBatches.js +16 -8
- package/dist/algorithms/server/transformIncomingChanges.d.ts +2 -1
- package/dist/algorithms/server/transformIncomingChanges.js +11 -9
- package/dist/algorithms/shared/changeBatching.d.ts +57 -0
- package/dist/algorithms/{client/breakChange.js → shared/changeBatching.js} +86 -16
- package/dist/algorithms/shared/lz.d.ts +12 -0
- package/dist/algorithms/shared/lz.js +456 -0
- package/dist/client/Patches.d.ts +1 -0
- package/dist/client/PatchesDoc.d.ts +16 -4
- package/dist/client/PatchesDoc.js +5 -3
- package/dist/client/index.d.ts +1 -0
- package/dist/compression/index.d.ts +67 -0
- package/dist/compression/index.js +47 -0
- package/dist/index.d.ts +2 -1
- package/dist/net/PatchesClient.d.ts +3 -2
- package/dist/net/PatchesClient.js +3 -2
- package/dist/net/PatchesSync.d.ts +9 -0
- package/dist/net/PatchesSync.js +11 -3
- package/dist/net/index.d.ts +8 -5
- package/dist/net/protocol/types.d.ts +3 -3
- package/dist/net/websocket/RPCServer.d.ts +5 -1
- package/dist/net/websocket/RPCServer.js +3 -2
- package/dist/net/websocket/SignalingService.d.ts +21 -21
- package/dist/net/websocket/SignalingService.js +43 -39
- package/dist/net/websocket/WebSocketServer.d.ts +2 -0
- package/dist/server/CompressedStoreBackend.d.ts +44 -0
- package/dist/server/CompressedStoreBackend.js +79 -0
- package/dist/server/PatchesBranchManager.d.ts +4 -1
- package/dist/server/PatchesBranchManager.js +31 -9
- package/dist/server/PatchesHistoryManager.d.ts +2 -0
- package/dist/server/PatchesServer.d.ts +24 -5
- package/dist/server/PatchesServer.js +18 -7
- package/dist/server/index.d.ts +6 -3
- package/dist/server/index.js +2 -0
- package/dist/server/types.d.ts +5 -0
- package/dist/types.d.ts +23 -4
- package/dist/utils/dates.js +2 -0
- package/package.json +5 -1
- package/dist/algorithms/client/batching.d.ts +0 -9
- package/dist/algorithms/client/batching.js +0 -42
- package/dist/algorithms/client/breakChange.d.ts +0 -15
- package/dist/algorithms/client/getJSONByteSize.d.ts +0 -4
- package/dist/algorithms/client/getJSONByteSize.js +0 -13
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import { PatchesSnapshot, ChangeMutator, Change } from '../../types.js';
|
|
2
|
+
import { SizeCalculator } from '../shared/changeBatching.js';
|
|
2
3
|
import '../../json-patch/JSONPatch.js';
|
|
3
4
|
import '@dabble/delta';
|
|
4
5
|
import '../../json-patch/types.js';
|
|
5
6
|
|
|
6
|
-
declare function makeChange<T = any>(snapshot: PatchesSnapshot<T>, mutator: ChangeMutator<T>, changeMetadata?: Record<string, any>,
|
|
7
|
+
declare function makeChange<T = any>(snapshot: PatchesSnapshot<T>, mutator: ChangeMutator<T>, changeMetadata?: Record<string, any>, maxStorageBytes?: number, sizeCalculator?: SizeCalculator): Change[];
|
|
7
8
|
|
|
8
9
|
export { makeChange };
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
2
|
import { createChange } from "../../data/change.js";
|
|
3
3
|
import { createJSONPatch } from "../../json-patch/createJSONPatch.js";
|
|
4
|
-
import {
|
|
4
|
+
import { breakChanges } from "../shared/changeBatching.js";
|
|
5
5
|
import { createStateFromSnapshot } from "./createStateFromSnapshot.js";
|
|
6
|
-
function makeChange(snapshot, mutator, changeMetadata,
|
|
6
|
+
function makeChange(snapshot, mutator, changeMetadata, maxStorageBytes, sizeCalculator) {
|
|
7
7
|
const pendingChanges = snapshot.changes;
|
|
8
8
|
const pendingRev = pendingChanges[pendingChanges.length - 1]?.rev ?? snapshot.rev;
|
|
9
9
|
const state = createStateFromSnapshot(snapshot);
|
|
@@ -19,8 +19,8 @@ function makeChange(snapshot, mutator, changeMetadata, maxPayloadBytes) {
|
|
|
19
19
|
console.error("Failed to apply change to state during makeChange:", error);
|
|
20
20
|
throw new Error(`Failed to apply change to state during makeChange: ${error}`);
|
|
21
21
|
}
|
|
22
|
-
if (
|
|
23
|
-
newChangesArray =
|
|
22
|
+
if (maxStorageBytes) {
|
|
23
|
+
newChangesArray = breakChanges(newChangesArray, maxStorageBytes, sizeCalculator);
|
|
24
24
|
}
|
|
25
25
|
return newChangesArray;
|
|
26
26
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { PatchesStoreBackend } from '../../server/types.js';
|
|
2
|
-
import { ChangeInput, Change } from '../../types.js';
|
|
2
|
+
import { ChangeInput, CommitChangesOptions, Change } from '../../types.js';
|
|
3
3
|
import '../../json-patch/JSONPatch.js';
|
|
4
4
|
import '@dabble/delta';
|
|
5
5
|
import '../../json-patch/types.js';
|
|
@@ -9,10 +9,11 @@ import '../../json-patch/types.js';
|
|
|
9
9
|
* @param docId - The ID of the document.
|
|
10
10
|
* @param changes - The changes to commit.
|
|
11
11
|
* @param originClientId - The ID of the client that initiated the commit.
|
|
12
|
+
* @param options - Optional commit settings.
|
|
12
13
|
* @returns A tuple of [committedChanges, transformedChanges] where:
|
|
13
14
|
* - committedChanges: Changes that were already committed to the server after the client's base revision
|
|
14
15
|
* - transformedChanges: The client's changes after being transformed against concurrent changes
|
|
15
16
|
*/
|
|
16
|
-
declare function commitChanges(store: PatchesStoreBackend, docId: string, changes: ChangeInput[], sessionTimeoutMillis: number): Promise<[Change[], Change[]]>;
|
|
17
|
+
declare function commitChanges(store: PatchesStoreBackend, docId: string, changes: ChangeInput[], sessionTimeoutMillis: number, options?: CommitChangesOptions, maxStorageBytes?: number): Promise<[Change[], Change[]]>;
|
|
17
18
|
|
|
18
|
-
export { commitChanges };
|
|
19
|
+
export { CommitChangesOptions, commitChanges };
|
|
@@ -6,7 +6,7 @@ import { getSnapshotAtRevision } from "./getSnapshotAtRevision.js";
|
|
|
6
6
|
import { getStateAtRevision } from "./getStateAtRevision.js";
|
|
7
7
|
import { handleOfflineSessionsAndBatches } from "./handleOfflineSessionsAndBatches.js";
|
|
8
8
|
import { transformIncomingChanges } from "./transformIncomingChanges.js";
|
|
9
|
-
async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
9
|
+
async function commitChanges(store, docId, changes, sessionTimeoutMillis, options, maxStorageBytes) {
|
|
10
10
|
if (changes.length === 0) {
|
|
11
11
|
return [[], []];
|
|
12
12
|
}
|
|
@@ -24,8 +24,10 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
|
24
24
|
}
|
|
25
25
|
if (c.rev == null) c.rev = rev++;
|
|
26
26
|
else rev = c.rev + 1;
|
|
27
|
-
c.committedAt
|
|
28
|
-
|
|
27
|
+
if (!options?.historicalImport || !c.committedAt) {
|
|
28
|
+
c.committedAt = serverNow;
|
|
29
|
+
}
|
|
30
|
+
c.createdAt = c.createdAt ? clampTimestamp(c.createdAt, serverNow) : serverNow;
|
|
29
31
|
});
|
|
30
32
|
if (baseRev > currentRev) {
|
|
31
33
|
throw new Error(
|
|
@@ -39,7 +41,8 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
|
39
41
|
);
|
|
40
42
|
}
|
|
41
43
|
const lastChange = currentChanges[currentChanges.length - 1];
|
|
42
|
-
|
|
44
|
+
const compareTime = options?.historicalImport ? changes[0].createdAt : serverNow;
|
|
45
|
+
if (lastChange && timestampDiff(compareTime, lastChange.createdAt) > sessionTimeoutMillis) {
|
|
43
46
|
await createVersion(store, docId, currentState, currentChanges);
|
|
44
47
|
}
|
|
45
48
|
const committedChanges = await store.listChanges(docId, {
|
|
@@ -53,17 +56,33 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis) {
|
|
|
53
56
|
}
|
|
54
57
|
const isOfflineTimestamp = timestampDiff(serverNow, incomingChanges[0].createdAt) > sessionTimeoutMillis;
|
|
55
58
|
if (isOfflineTimestamp || batchId) {
|
|
59
|
+
const canFastForward = committedChanges.length === 0;
|
|
60
|
+
const origin = options?.historicalImport ? "main" : canFastForward ? "main" : "offline-branch";
|
|
56
61
|
incomingChanges = await handleOfflineSessionsAndBatches(
|
|
57
62
|
store,
|
|
58
63
|
sessionTimeoutMillis,
|
|
59
64
|
docId,
|
|
60
65
|
incomingChanges,
|
|
61
66
|
baseRev,
|
|
62
|
-
batchId
|
|
67
|
+
batchId,
|
|
68
|
+
origin,
|
|
69
|
+
true,
|
|
70
|
+
// isOffline
|
|
71
|
+
maxStorageBytes
|
|
63
72
|
);
|
|
73
|
+
if (canFastForward) {
|
|
74
|
+
await store.saveChanges(docId, incomingChanges);
|
|
75
|
+
return [[], incomingChanges];
|
|
76
|
+
}
|
|
64
77
|
}
|
|
65
78
|
const stateAtBaseRev = (await getStateAtRevision(store, docId, baseRev)).state;
|
|
66
|
-
const transformedChanges = transformIncomingChanges(
|
|
79
|
+
const transformedChanges = transformIncomingChanges(
|
|
80
|
+
incomingChanges,
|
|
81
|
+
stateAtBaseRev,
|
|
82
|
+
committedChanges,
|
|
83
|
+
currentRev,
|
|
84
|
+
options?.forceCommit
|
|
85
|
+
);
|
|
67
86
|
if (transformedChanges.length > 0) {
|
|
68
87
|
await store.saveChanges(docId, transformedChanges);
|
|
69
88
|
}
|
|
@@ -11,8 +11,11 @@ import '../../json-patch/types.js';
|
|
|
11
11
|
* @param changes The incoming changes (all with the same batchId)
|
|
12
12
|
* @param baseRev The base revision for the batch
|
|
13
13
|
* @param batchId The batch identifier
|
|
14
|
-
* @
|
|
14
|
+
* @param origin The origin to use for created versions (default: 'offline-branch')
|
|
15
|
+
* @param isOffline Whether these changes were created offline (metadata flag)
|
|
16
|
+
* @param maxStorageBytes If set, break collapsed changes that exceed this size
|
|
17
|
+
* @returns The changes (collapsed into one if divergent, unchanged if fast-forward)
|
|
15
18
|
*/
|
|
16
|
-
declare function handleOfflineSessionsAndBatches(store: PatchesStoreBackend, sessionTimeoutMillis: number, docId: string, changes: Change[], baseRev: number, batchId?: string): Promise<Change[]>;
|
|
19
|
+
declare function handleOfflineSessionsAndBatches(store: PatchesStoreBackend, sessionTimeoutMillis: number, docId: string, changes: Change[], baseRev: number, batchId?: string, origin?: 'main' | 'offline-branch', isOffline?: boolean, maxStorageBytes?: number): Promise<Change[]>;
|
|
17
20
|
|
|
18
21
|
export { handleOfflineSessionsAndBatches };
|
|
@@ -3,8 +3,9 @@ import { createSortableId } from "crypto-id";
|
|
|
3
3
|
import { createVersionMetadata } from "../../data/version.js";
|
|
4
4
|
import { getISO, timestampDiff } from "../../utils/dates.js";
|
|
5
5
|
import { applyChanges } from "../shared/applyChanges.js";
|
|
6
|
+
import { breakChanges } from "../shared/changeBatching.js";
|
|
6
7
|
import { getStateAtRevision } from "./getStateAtRevision.js";
|
|
7
|
-
async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docId, changes, baseRev, batchId) {
|
|
8
|
+
async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docId, changes, baseRev, batchId, origin = "offline-branch", isOffline = true, maxStorageBytes) {
|
|
8
9
|
const groupId = batchId ?? createSortableId();
|
|
9
10
|
const [lastVersion] = await store.listVersions(docId, {
|
|
10
11
|
groupId,
|
|
@@ -30,8 +31,9 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
|
|
|
30
31
|
const isContinuation = !!lastVersion && timestampDiff(sessionChanges[0].createdAt, lastVersion.endedAt) <= sessionTimeoutMillis;
|
|
31
32
|
if (isContinuation) {
|
|
32
33
|
const mergedState = applyChanges(offlineBaseState, sessionChanges);
|
|
33
|
-
|
|
34
|
-
|
|
34
|
+
const newEndedAt = getISO(sessionChanges[sessionChanges.length - 1].createdAt);
|
|
35
|
+
const newRev = sessionChanges[sessionChanges.length - 1].rev;
|
|
36
|
+
await store.appendVersionChanges(docId, lastVersion.id, sessionChanges, newEndedAt, newRev, mergedState);
|
|
35
37
|
offlineBaseState = mergedState;
|
|
36
38
|
parentId = lastVersion.parentId;
|
|
37
39
|
} else {
|
|
@@ -39,7 +41,8 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
|
|
|
39
41
|
const sessionMetadata = createVersionMetadata({
|
|
40
42
|
parentId,
|
|
41
43
|
groupId,
|
|
42
|
-
origin
|
|
44
|
+
origin,
|
|
45
|
+
isOffline,
|
|
43
46
|
// Convert client timestamps to UTC for version metadata (enables lexicographic sorting)
|
|
44
47
|
startedAt: getISO(sessionChanges[0].createdAt),
|
|
45
48
|
endedAt: getISO(sessionChanges[sessionChanges.length - 1].createdAt),
|
|
@@ -53,12 +56,17 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
|
|
|
53
56
|
}
|
|
54
57
|
}
|
|
55
58
|
}
|
|
56
|
-
|
|
57
|
-
changes.reduce((firstChange, nextChange) => {
|
|
59
|
+
if (origin === "offline-branch") {
|
|
60
|
+
const collapsed = changes.reduce((firstChange, nextChange) => {
|
|
58
61
|
firstChange.ops = [...firstChange.ops, ...nextChange.ops];
|
|
59
62
|
return firstChange;
|
|
60
|
-
})
|
|
61
|
-
|
|
63
|
+
});
|
|
64
|
+
if (maxStorageBytes) {
|
|
65
|
+
return breakChanges([collapsed], maxStorageBytes);
|
|
66
|
+
}
|
|
67
|
+
return [collapsed];
|
|
68
|
+
}
|
|
69
|
+
return changes;
|
|
62
70
|
}
|
|
63
71
|
export {
|
|
64
72
|
handleOfflineSessionsAndBatches
|
|
@@ -10,8 +10,9 @@ import '../../json-patch/types.js';
|
|
|
10
10
|
* @param stateAtBaseRev The server state *at the client's baseRev*.
|
|
11
11
|
* @param committedChanges The committed changes that happened *after* the client's baseRev.
|
|
12
12
|
* @param currentRev The current/latest revision number (these changes will have their `rev` set > `currentRev`).
|
|
13
|
+
* @param forceCommit If true, skip filtering of no-op changes (useful for migrations).
|
|
13
14
|
* @returns The transformed changes.
|
|
14
15
|
*/
|
|
15
|
-
declare function transformIncomingChanges(changes: Change[], stateAtBaseRev: any, committedChanges: Change[], currentRev: number): Change[];
|
|
16
|
+
declare function transformIncomingChanges(changes: Change[], stateAtBaseRev: any, committedChanges: Change[], currentRev: number, forceCommit?: boolean): Change[];
|
|
16
17
|
|
|
17
18
|
export { transformIncomingChanges };
|
|
@@ -1,24 +1,26 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
2
|
import { applyPatch } from "../../json-patch/applyPatch.js";
|
|
3
3
|
import { transformPatch } from "../../json-patch/transformPatch.js";
|
|
4
|
-
function transformIncomingChanges(changes, stateAtBaseRev, committedChanges, currentRev) {
|
|
4
|
+
function transformIncomingChanges(changes, stateAtBaseRev, committedChanges, currentRev, forceCommit = false) {
|
|
5
5
|
const committedOps = committedChanges.flatMap((c) => c.ops);
|
|
6
6
|
let state = stateAtBaseRev;
|
|
7
7
|
let rev = currentRev + 1;
|
|
8
8
|
return changes.map((change) => {
|
|
9
9
|
const transformedOps = transformPatch(stateAtBaseRev, committedOps, change.ops);
|
|
10
|
-
if (transformedOps.length === 0) {
|
|
10
|
+
if (transformedOps.length === 0 && !forceCommit) {
|
|
11
11
|
return null;
|
|
12
12
|
}
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
if (transformedOps.length > 0) {
|
|
14
|
+
try {
|
|
15
|
+
const previous = state;
|
|
16
|
+
state = applyPatch(state, transformedOps, { strict: true });
|
|
17
|
+
if (previous === state && !forceCommit) {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
} catch (error) {
|
|
21
|
+
console.error(`Error applying change ${change.id} to state:`, error);
|
|
17
22
|
return null;
|
|
18
23
|
}
|
|
19
|
-
} catch (error) {
|
|
20
|
-
console.error(`Error applying change ${change.id} to state:`, error);
|
|
21
|
-
return null;
|
|
22
24
|
}
|
|
23
25
|
return { ...change, rev: rev++, ops: transformedOps };
|
|
24
26
|
}).filter(Boolean);
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { Change } from '../../types.js';
|
|
2
|
+
import '../../json-patch/JSONPatch.js';
|
|
3
|
+
import '@dabble/delta';
|
|
4
|
+
import '../../json-patch/types.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Function that calculates the storage size of data.
|
|
8
|
+
* Used by change batching to determine if changes need to be split.
|
|
9
|
+
*
|
|
10
|
+
* Import pre-built calculators from '@dabble/patches/compression':
|
|
11
|
+
* - `compressedSizeBase64` - Uses actual LZ compression + base64
|
|
12
|
+
* - `compressedSizeUint8` - Uses actual LZ compression to binary
|
|
13
|
+
*
|
|
14
|
+
* Or provide your own (e.g., ratio estimate):
|
|
15
|
+
* ```typescript
|
|
16
|
+
* const ratioEstimate = (data) => getJSONByteSize(data) * 0.5;
|
|
17
|
+
* ```
|
|
18
|
+
*/
|
|
19
|
+
type SizeCalculator = (data: unknown) => number;
|
|
20
|
+
/** Estimate JSON string byte size. */
|
|
21
|
+
declare function getJSONByteSize(data: unknown): number;
|
|
22
|
+
/**
|
|
23
|
+
* Break changes into smaller changes so that each change's storage size never exceeds `maxBytes`.
|
|
24
|
+
*
|
|
25
|
+
* - Splits first by JSON-Patch *ops*
|
|
26
|
+
* - If an individual op is still too big and is a "@txt" op,
|
|
27
|
+
* split its Delta payload into smaller Deltas
|
|
28
|
+
*
|
|
29
|
+
* @param changes - The changes to break apart
|
|
30
|
+
* @param maxBytes - Maximum storage size in bytes per change
|
|
31
|
+
* @param sizeCalculator - Custom size calculator (e.g., for compressed size)
|
|
32
|
+
*/
|
|
33
|
+
declare function breakChanges(changes: Change[], maxBytes: number, sizeCalculator?: SizeCalculator): Change[];
|
|
34
|
+
/**
|
|
35
|
+
* Options for breaking changes into batches.
|
|
36
|
+
*/
|
|
37
|
+
interface BreakChangesIntoBatchesOptions {
|
|
38
|
+
/** Batch limit for wire (uncompressed JSON). Defaults to 1MB. */
|
|
39
|
+
maxPayloadBytes?: number;
|
|
40
|
+
/** Per-change storage limit. If exceeded, individual changes are split. */
|
|
41
|
+
maxStorageBytes?: number;
|
|
42
|
+
/** Custom size calculator for storage limit (e.g., compressed size). */
|
|
43
|
+
sizeCalculator?: SizeCalculator;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Break changes into batches for network transmission.
|
|
47
|
+
*
|
|
48
|
+
* Two distinct limits:
|
|
49
|
+
* - `maxPayloadBytes`: Controls batch size for wire transmission (uses uncompressed JSON size)
|
|
50
|
+
* - `maxStorageBytes`: Controls per-change splitting for backend storage (uses sizeCalculator if provided)
|
|
51
|
+
*
|
|
52
|
+
* @param changes - The changes to batch
|
|
53
|
+
* @param options - Batching options (or just maxPayloadBytes for backward compatibility)
|
|
54
|
+
*/
|
|
55
|
+
declare function breakChangesIntoBatches(changes: Change[], options?: BreakChangesIntoBatchesOptions | number): Change[][];
|
|
56
|
+
|
|
57
|
+
export { type BreakChangesIntoBatchesOptions, type SizeCalculator, breakChanges, breakChangesIntoBatches, getJSONByteSize };
|
|
@@ -1,8 +1,73 @@
|
|
|
1
1
|
import "../../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { createId } from "crypto-id";
|
|
2
3
|
import { createChange } from "../../data/change.js";
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
function getJSONByteSize(data) {
|
|
5
|
+
try {
|
|
6
|
+
const stringified = JSON.stringify(data);
|
|
7
|
+
return stringified ? new TextEncoder().encode(stringified).length : 0;
|
|
8
|
+
} catch (e) {
|
|
9
|
+
console.error("Error calculating JSON size:", e);
|
|
10
|
+
throw new Error("Error calculating JSON size: " + e);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
function breakChanges(changes, maxBytes, sizeCalculator) {
|
|
14
|
+
const results = [];
|
|
15
|
+
for (const change of changes) {
|
|
16
|
+
results.push(...breakSingleChange(change, maxBytes, sizeCalculator));
|
|
17
|
+
}
|
|
18
|
+
return results;
|
|
19
|
+
}
|
|
20
|
+
const DEFAULT_MAX_PAYLOAD_BYTES = 1e6;
|
|
21
|
+
function breakChangesIntoBatches(changes, options) {
|
|
22
|
+
const opts = typeof options === "number" ? { maxPayloadBytes: options } : options ?? {};
|
|
23
|
+
const maxPayloadBytes = opts.maxPayloadBytes ?? DEFAULT_MAX_PAYLOAD_BYTES;
|
|
24
|
+
const { maxStorageBytes, sizeCalculator } = opts;
|
|
25
|
+
let processedChanges = changes;
|
|
26
|
+
if (maxStorageBytes) {
|
|
27
|
+
processedChanges = breakChanges(changes, maxStorageBytes, sizeCalculator);
|
|
28
|
+
}
|
|
29
|
+
if (getJSONByteSize(processedChanges) < maxPayloadBytes) {
|
|
30
|
+
return [processedChanges];
|
|
31
|
+
}
|
|
32
|
+
const batchId = createId(12);
|
|
33
|
+
const batches = [];
|
|
34
|
+
let currentBatch = [];
|
|
35
|
+
let currentSize = 2;
|
|
36
|
+
for (const change of processedChanges) {
|
|
37
|
+
const changeWithBatchId = { ...change, batchId };
|
|
38
|
+
const individualActualSize = getJSONByteSize(changeWithBatchId);
|
|
39
|
+
let itemsToProcess;
|
|
40
|
+
if (individualActualSize > maxPayloadBytes) {
|
|
41
|
+
itemsToProcess = breakSingleChange(changeWithBatchId, maxPayloadBytes).map((c) => ({ ...c, batchId }));
|
|
42
|
+
} else {
|
|
43
|
+
itemsToProcess = [changeWithBatchId];
|
|
44
|
+
}
|
|
45
|
+
for (const item of itemsToProcess) {
|
|
46
|
+
const itemActualSize = getJSONByteSize(item);
|
|
47
|
+
const itemSizeForBatching = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
48
|
+
if (currentBatch.length > 0 && currentSize + itemSizeForBatching > maxPayloadBytes) {
|
|
49
|
+
batches.push(currentBatch);
|
|
50
|
+
currentBatch = [];
|
|
51
|
+
currentSize = 2;
|
|
52
|
+
}
|
|
53
|
+
const actualItemContribution = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
54
|
+
currentBatch.push(item);
|
|
55
|
+
currentSize += actualItemContribution;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
if (currentBatch.length > 0) {
|
|
59
|
+
batches.push(currentBatch);
|
|
60
|
+
}
|
|
61
|
+
return batches;
|
|
62
|
+
}
|
|
63
|
+
function getSizeForStorage(data, sizeCalculator) {
|
|
64
|
+
if (sizeCalculator) {
|
|
65
|
+
return sizeCalculator(data);
|
|
66
|
+
}
|
|
67
|
+
return getJSONByteSize(data);
|
|
68
|
+
}
|
|
69
|
+
function breakSingleChange(orig, maxBytes, sizeCalculator) {
|
|
70
|
+
if (getSizeForStorage(orig, sizeCalculator) <= maxBytes) return [orig];
|
|
6
71
|
const byOps = [];
|
|
7
72
|
let group = [];
|
|
8
73
|
let rev = orig.rev;
|
|
@@ -13,17 +78,17 @@ function breakChange(orig, maxBytes) {
|
|
|
13
78
|
};
|
|
14
79
|
for (const op of orig.ops) {
|
|
15
80
|
const tentative = group.concat(op);
|
|
16
|
-
if (
|
|
17
|
-
if (group.length === 0 &&
|
|
81
|
+
if (getSizeForStorage({ ...orig, ops: tentative }, sizeCalculator) > maxBytes) flush();
|
|
82
|
+
if (group.length === 0 && getSizeForStorage({ ...orig, ops: [op] }, sizeCalculator) > maxBytes) {
|
|
18
83
|
if (op.op === "@txt" && op.value) {
|
|
19
|
-
const pieces = breakTextOp(orig, op, maxBytes, rev);
|
|
84
|
+
const pieces = breakTextOp(orig, op, maxBytes, rev, sizeCalculator);
|
|
20
85
|
byOps.push(...pieces);
|
|
21
86
|
if (pieces.length > 0) {
|
|
22
87
|
rev = pieces[pieces.length - 1].rev + 1;
|
|
23
88
|
}
|
|
24
89
|
continue;
|
|
25
90
|
} else if (op.op === "replace" || op.op === "add") {
|
|
26
|
-
const pieces = breakLargeValueOp(orig, op, maxBytes, rev);
|
|
91
|
+
const pieces = breakLargeValueOp(orig, op, maxBytes, rev, sizeCalculator);
|
|
27
92
|
byOps.push(...pieces);
|
|
28
93
|
if (pieces.length > 0) {
|
|
29
94
|
rev = pieces[pieces.length - 1].rev + 1;
|
|
@@ -40,10 +105,10 @@ function breakChange(orig, maxBytes) {
|
|
|
40
105
|
flush();
|
|
41
106
|
return byOps;
|
|
42
107
|
}
|
|
43
|
-
function breakTextOp(origChange, textOp, maxBytes, startRev) {
|
|
108
|
+
function breakTextOp(origChange, textOp, maxBytes, startRev, sizeCalculator) {
|
|
44
109
|
const results = [];
|
|
45
110
|
let rev = startRev;
|
|
46
|
-
const baseSize =
|
|
111
|
+
const baseSize = getSizeForStorage({ ...origChange, ops: [{ ...textOp, value: "" }] }, sizeCalculator);
|
|
47
112
|
const budget = maxBytes - baseSize;
|
|
48
113
|
const buffer = 20;
|
|
49
114
|
const maxLength = Math.max(1, budget - buffer);
|
|
@@ -77,12 +142,15 @@ function breakTextOp(origChange, textOp, maxBytes, startRev) {
|
|
|
77
142
|
testBatchOps.push({ retain: retainToPrefixCurrentPiece });
|
|
78
143
|
}
|
|
79
144
|
testBatchOps.push(op);
|
|
80
|
-
const testBatchSize =
|
|
145
|
+
const testBatchSize = getSizeForStorage({ ...origChange, ops: [{ ...textOp, value: testBatchOps }] }, sizeCalculator);
|
|
81
146
|
if (currentOpsForNextChangePiece.length > 0 && testBatchSize > maxBytes) {
|
|
82
147
|
flushCurrentChangePiece();
|
|
83
148
|
}
|
|
84
149
|
const opStandaloneOps = retainToPrefixCurrentPiece > 0 ? [{ retain: retainToPrefixCurrentPiece }, op] : [op];
|
|
85
|
-
const opStandaloneSize =
|
|
150
|
+
const opStandaloneSize = getSizeForStorage(
|
|
151
|
+
{ ...origChange, ops: [{ ...textOp, value: opStandaloneOps }] },
|
|
152
|
+
sizeCalculator
|
|
153
|
+
);
|
|
86
154
|
if (currentOpsForNextChangePiece.length === 0 && opStandaloneSize > maxBytes) {
|
|
87
155
|
if (op.insert && typeof op.insert === "string") {
|
|
88
156
|
const insertChunks = splitLargeInsertText(op.insert, maxLength, op.attributes);
|
|
@@ -127,11 +195,11 @@ function splitLargeInsertText(text, maxChunkLength, attributes) {
|
|
|
127
195
|
}
|
|
128
196
|
return results;
|
|
129
197
|
}
|
|
130
|
-
function breakLargeValueOp(origChange, op, maxBytes, startRev) {
|
|
198
|
+
function breakLargeValueOp(origChange, op, maxBytes, startRev, sizeCalculator) {
|
|
131
199
|
const results = [];
|
|
132
200
|
let rev = startRev;
|
|
133
|
-
const baseOpSize =
|
|
134
|
-
const baseChangeSize =
|
|
201
|
+
const baseOpSize = getSizeForStorage({ ...op, value: "" }, sizeCalculator);
|
|
202
|
+
const baseChangeSize = getSizeForStorage({ ...origChange, ops: [{ ...op, value: "" }] }, sizeCalculator) - baseOpSize;
|
|
135
203
|
const valueBudget = maxBytes - baseChangeSize - 50;
|
|
136
204
|
if (typeof op.value === "string" && op.value.length > 100) {
|
|
137
205
|
const text = op.value;
|
|
@@ -161,7 +229,7 @@ function breakLargeValueOp(origChange, op, maxBytes, startRev) {
|
|
|
161
229
|
const item = originalArray[i];
|
|
162
230
|
const tentativeChunk = [...currentChunk, item];
|
|
163
231
|
const tentativeOp = { ...op, value: tentativeChunk };
|
|
164
|
-
const tentativeChangeSize =
|
|
232
|
+
const tentativeChangeSize = getSizeForStorage({ ...origChange, ops: [tentativeOp] }, sizeCalculator);
|
|
165
233
|
if (currentChunk.length > 0 && tentativeChangeSize > maxBytes) {
|
|
166
234
|
const chunkOp = {};
|
|
167
235
|
if (chunkStartIndex === 0) {
|
|
@@ -205,5 +273,7 @@ function deriveNewChange(origChange, rev, ops) {
|
|
|
205
273
|
return createChange(origChange.baseRev, rev, ops, metadata);
|
|
206
274
|
}
|
|
207
275
|
export {
|
|
208
|
-
|
|
276
|
+
breakChanges,
|
|
277
|
+
breakChangesIntoBatches,
|
|
278
|
+
getJSONByteSize
|
|
209
279
|
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
declare function compress(uncompressed: string | null): string;
|
|
2
|
+
declare function decompress(compressed: string | null): string | null;
|
|
3
|
+
declare function compressToBase64(input: string | null): string;
|
|
4
|
+
declare function decompressFromBase64(input: string | null): string | null;
|
|
5
|
+
declare function compressToUTF16(input: string | null): string;
|
|
6
|
+
declare function decompressFromUTF16(compressed: string | null): string | null;
|
|
7
|
+
declare function compressToUint8Array(uncompressed: string | null): Uint8Array;
|
|
8
|
+
declare function decompressFromUint8Array(compressed: Uint8Array | null): string | null;
|
|
9
|
+
declare function compressToEncodedURIComponent(input: string | null): string;
|
|
10
|
+
declare function decompressFromEncodedURIComponent(input: string | null): string | null;
|
|
11
|
+
|
|
12
|
+
export { compress, compressToBase64, compressToEncodedURIComponent, compressToUTF16, compressToUint8Array, decompress, decompressFromBase64, decompressFromEncodedURIComponent, decompressFromUTF16, decompressFromUint8Array };
|