@dabble/patches 0.5.3 → 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,9 @@
1
1
  import { PatchesSnapshot, ChangeMutator, Change } from '../../types.js';
2
+ import { SizeCalculator } from '../shared/changeBatching.js';
2
3
  import '../../json-patch/JSONPatch.js';
3
4
  import '@dabble/delta';
4
5
  import '../../json-patch/types.js';
5
6
 
6
- declare function makeChange<T = any>(snapshot: PatchesSnapshot<T>, mutator: ChangeMutator<T>, changeMetadata?: Record<string, any>, maxPayloadBytes?: number): Change[];
7
+ declare function makeChange<T = any>(snapshot: PatchesSnapshot<T>, mutator: ChangeMutator<T>, changeMetadata?: Record<string, any>, maxStorageBytes?: number, sizeCalculator?: SizeCalculator): Change[];
7
8
 
8
9
  export { makeChange };
@@ -3,7 +3,7 @@ import { createChange } from "../../data/change.js";
3
3
  import { createJSONPatch } from "../../json-patch/createJSONPatch.js";
4
4
  import { breakChanges } from "../shared/changeBatching.js";
5
5
  import { createStateFromSnapshot } from "./createStateFromSnapshot.js";
6
- function makeChange(snapshot, mutator, changeMetadata, maxPayloadBytes) {
6
+ function makeChange(snapshot, mutator, changeMetadata, maxStorageBytes, sizeCalculator) {
7
7
  const pendingChanges = snapshot.changes;
8
8
  const pendingRev = pendingChanges[pendingChanges.length - 1]?.rev ?? snapshot.rev;
9
9
  const state = createStateFromSnapshot(snapshot);
@@ -19,8 +19,8 @@ function makeChange(snapshot, mutator, changeMetadata, maxPayloadBytes) {
19
19
  console.error("Failed to apply change to state during makeChange:", error);
20
20
  throw new Error(`Failed to apply change to state during makeChange: ${error}`);
21
21
  }
22
- if (maxPayloadBytes) {
23
- newChangesArray = breakChanges(newChangesArray, maxPayloadBytes);
22
+ if (maxStorageBytes) {
23
+ newChangesArray = breakChanges(newChangesArray, maxStorageBytes, sizeCalculator);
24
24
  }
25
25
  return newChangesArray;
26
26
  }
@@ -14,6 +14,6 @@ import '../../json-patch/types.js';
14
14
  * - committedChanges: Changes that were already committed to the server after the client's base revision
15
15
  * - transformedChanges: The client's changes after being transformed against concurrent changes
16
16
  */
17
- declare function commitChanges(store: PatchesStoreBackend, docId: string, changes: ChangeInput[], sessionTimeoutMillis: number, options?: CommitChangesOptions, maxPayloadBytes?: number): Promise<[Change[], Change[]]>;
17
+ declare function commitChanges(store: PatchesStoreBackend, docId: string, changes: ChangeInput[], sessionTimeoutMillis: number, options?: CommitChangesOptions, maxStorageBytes?: number): Promise<[Change[], Change[]]>;
18
18
 
19
19
  export { CommitChangesOptions, commitChanges };
@@ -6,7 +6,7 @@ import { getSnapshotAtRevision } from "./getSnapshotAtRevision.js";
6
6
  import { getStateAtRevision } from "./getStateAtRevision.js";
7
7
  import { handleOfflineSessionsAndBatches } from "./handleOfflineSessionsAndBatches.js";
8
8
  import { transformIncomingChanges } from "./transformIncomingChanges.js";
9
- async function commitChanges(store, docId, changes, sessionTimeoutMillis, options, maxPayloadBytes) {
9
+ async function commitChanges(store, docId, changes, sessionTimeoutMillis, options, maxStorageBytes) {
10
10
  if (changes.length === 0) {
11
11
  return [[], []];
12
12
  }
@@ -68,7 +68,7 @@ async function commitChanges(store, docId, changes, sessionTimeoutMillis, option
68
68
  origin,
69
69
  true,
70
70
  // isOffline
71
- maxPayloadBytes
71
+ maxStorageBytes
72
72
  );
73
73
  if (canFastForward) {
74
74
  await store.saveChanges(docId, incomingChanges);
@@ -13,9 +13,9 @@ import '../../json-patch/types.js';
13
13
  * @param batchId The batch identifier
14
14
  * @param origin The origin to use for created versions (default: 'offline-branch')
15
15
  * @param isOffline Whether these changes were created offline (metadata flag)
16
- * @param maxPayloadBytes If set, break collapsed changes that exceed this size
16
+ * @param maxStorageBytes If set, break collapsed changes that exceed this size
17
17
  * @returns The changes (collapsed into one if divergent, unchanged if fast-forward)
18
18
  */
19
- declare function handleOfflineSessionsAndBatches(store: PatchesStoreBackend, sessionTimeoutMillis: number, docId: string, changes: Change[], baseRev: number, batchId?: string, origin?: 'main' | 'offline-branch', isOffline?: boolean, maxPayloadBytes?: number): Promise<Change[]>;
19
+ declare function handleOfflineSessionsAndBatches(store: PatchesStoreBackend, sessionTimeoutMillis: number, docId: string, changes: Change[], baseRev: number, batchId?: string, origin?: 'main' | 'offline-branch', isOffline?: boolean, maxStorageBytes?: number): Promise<Change[]>;
20
20
 
21
21
  export { handleOfflineSessionsAndBatches };
@@ -5,7 +5,7 @@ import { getISO, timestampDiff } from "../../utils/dates.js";
5
5
  import { applyChanges } from "../shared/applyChanges.js";
6
6
  import { breakChanges } from "../shared/changeBatching.js";
7
7
  import { getStateAtRevision } from "./getStateAtRevision.js";
8
- async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docId, changes, baseRev, batchId, origin = "offline-branch", isOffline = true, maxPayloadBytes) {
8
+ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docId, changes, baseRev, batchId, origin = "offline-branch", isOffline = true, maxStorageBytes) {
9
9
  const groupId = batchId ?? createSortableId();
10
10
  const [lastVersion] = await store.listVersions(docId, {
11
11
  groupId,
@@ -61,8 +61,8 @@ async function handleOfflineSessionsAndBatches(store, sessionTimeoutMillis, docI
61
61
  firstChange.ops = [...firstChange.ops, ...nextChange.ops];
62
62
  return firstChange;
63
63
  });
64
- if (maxPayloadBytes) {
65
- return breakChanges([collapsed], maxPayloadBytes);
64
+ if (maxStorageBytes) {
65
+ return breakChanges([collapsed], maxStorageBytes);
66
66
  }
67
67
  return [collapsed];
68
68
  }
@@ -3,17 +3,55 @@ import '../../json-patch/JSONPatch.js';
3
3
  import '@dabble/delta';
4
4
  import '../../json-patch/types.js';
5
5
 
6
+ /**
7
+ * Function that calculates the storage size of data.
8
+ * Used by change batching to determine if changes need to be split.
9
+ *
10
+ * Import pre-built calculators from '@dabble/patches/compression':
11
+ * - `compressedSizeBase64` - Uses actual LZ compression + base64
12
+ * - `compressedSizeUint8` - Uses actual LZ compression to binary
13
+ *
14
+ * Or provide your own (e.g., ratio estimate):
15
+ * ```typescript
16
+ * const ratioEstimate = (data) => getJSONByteSize(data) * 0.5;
17
+ * ```
18
+ */
19
+ type SizeCalculator = (data: unknown) => number;
6
20
  /** Estimate JSON string byte size. */
7
21
  declare function getJSONByteSize(data: unknown): number;
8
22
  /**
9
- * Break changes into smaller changes so that each change's JSON string size never exceeds `maxBytes`.
23
+ * Break changes into smaller changes so that each change's storage size never exceeds `maxBytes`.
10
24
  *
11
25
  * - Splits first by JSON-Patch *ops*
12
26
  * - If an individual op is still too big and is a "@txt" op,
13
27
  * split its Delta payload into smaller Deltas
28
+ *
29
+ * @param changes - The changes to break apart
30
+ * @param maxBytes - Maximum storage size in bytes per change
31
+ * @param sizeCalculator - Custom size calculator (e.g., for compressed size)
32
+ */
33
+ declare function breakChanges(changes: Change[], maxBytes: number, sizeCalculator?: SizeCalculator): Change[];
34
+ /**
35
+ * Options for breaking changes into batches.
36
+ */
37
+ interface BreakChangesIntoBatchesOptions {
38
+ /** Batch limit for wire (uncompressed JSON). Defaults to 1MB. */
39
+ maxPayloadBytes?: number;
40
+ /** Per-change storage limit. If exceeded, individual changes are split. */
41
+ maxStorageBytes?: number;
42
+ /** Custom size calculator for storage limit (e.g., compressed size). */
43
+ sizeCalculator?: SizeCalculator;
44
+ }
45
+ /**
46
+ * Break changes into batches for network transmission.
47
+ *
48
+ * Two distinct limits:
49
+ * - `maxPayloadBytes`: Controls batch size for wire transmission (uses uncompressed JSON size)
50
+ * - `maxStorageBytes`: Controls per-change splitting for backend storage (uses sizeCalculator if provided)
51
+ *
52
+ * @param changes - The changes to batch
53
+ * @param options - Batching options (or just maxPayloadBytes for backward compatibility)
14
54
  */
15
- declare function breakChanges(changes: Change[], maxBytes: number): Change[];
16
- /** Break changes into batches based on maxPayloadBytes. */
17
- declare function breakChangesIntoBatches(changes: Change[], maxPayloadBytes?: number): Change[][];
55
+ declare function breakChangesIntoBatches(changes: Change[], options?: BreakChangesIntoBatchesOptions | number): Change[][];
18
56
 
19
- export { breakChanges, breakChangesIntoBatches, getJSONByteSize };
57
+ export { type BreakChangesIntoBatchesOptions, type SizeCalculator, breakChanges, breakChangesIntoBatches, getJSONByteSize };
@@ -10,22 +10,30 @@ function getJSONByteSize(data) {
10
10
  throw new Error("Error calculating JSON size: " + e);
11
11
  }
12
12
  }
13
- function breakChanges(changes, maxBytes) {
13
+ function breakChanges(changes, maxBytes, sizeCalculator) {
14
14
  const results = [];
15
15
  for (const change of changes) {
16
- results.push(...breakSingleChange(change, maxBytes));
16
+ results.push(...breakSingleChange(change, maxBytes, sizeCalculator));
17
17
  }
18
18
  return results;
19
19
  }
20
- function breakChangesIntoBatches(changes, maxPayloadBytes) {
21
- if (!maxPayloadBytes || getJSONByteSize(changes) < maxPayloadBytes) {
22
- return [changes];
20
+ const DEFAULT_MAX_PAYLOAD_BYTES = 1e6;
21
+ function breakChangesIntoBatches(changes, options) {
22
+ const opts = typeof options === "number" ? { maxPayloadBytes: options } : options ?? {};
23
+ const maxPayloadBytes = opts.maxPayloadBytes ?? DEFAULT_MAX_PAYLOAD_BYTES;
24
+ const { maxStorageBytes, sizeCalculator } = opts;
25
+ let processedChanges = changes;
26
+ if (maxStorageBytes) {
27
+ processedChanges = breakChanges(changes, maxStorageBytes, sizeCalculator);
28
+ }
29
+ if (getJSONByteSize(processedChanges) < maxPayloadBytes) {
30
+ return [processedChanges];
23
31
  }
24
32
  const batchId = createId(12);
25
33
  const batches = [];
26
34
  let currentBatch = [];
27
35
  let currentSize = 2;
28
- for (const change of changes) {
36
+ for (const change of processedChanges) {
29
37
  const changeWithBatchId = { ...change, batchId };
30
38
  const individualActualSize = getJSONByteSize(changeWithBatchId);
31
39
  let itemsToProcess;
@@ -52,8 +60,14 @@ function breakChangesIntoBatches(changes, maxPayloadBytes) {
52
60
  }
53
61
  return batches;
54
62
  }
55
- function breakSingleChange(orig, maxBytes) {
56
- if (getJSONByteSize(orig) <= maxBytes) return [orig];
63
+ function getSizeForStorage(data, sizeCalculator) {
64
+ if (sizeCalculator) {
65
+ return sizeCalculator(data);
66
+ }
67
+ return getJSONByteSize(data);
68
+ }
69
+ function breakSingleChange(orig, maxBytes, sizeCalculator) {
70
+ if (getSizeForStorage(orig, sizeCalculator) <= maxBytes) return [orig];
57
71
  const byOps = [];
58
72
  let group = [];
59
73
  let rev = orig.rev;
@@ -64,17 +78,17 @@ function breakSingleChange(orig, maxBytes) {
64
78
  };
65
79
  for (const op of orig.ops) {
66
80
  const tentative = group.concat(op);
67
- if (getJSONByteSize({ ...orig, ops: tentative }) > maxBytes) flush();
68
- if (group.length === 0 && getJSONByteSize({ ...orig, ops: [op] }) > maxBytes) {
81
+ if (getSizeForStorage({ ...orig, ops: tentative }, sizeCalculator) > maxBytes) flush();
82
+ if (group.length === 0 && getSizeForStorage({ ...orig, ops: [op] }, sizeCalculator) > maxBytes) {
69
83
  if (op.op === "@txt" && op.value) {
70
- const pieces = breakTextOp(orig, op, maxBytes, rev);
84
+ const pieces = breakTextOp(orig, op, maxBytes, rev, sizeCalculator);
71
85
  byOps.push(...pieces);
72
86
  if (pieces.length > 0) {
73
87
  rev = pieces[pieces.length - 1].rev + 1;
74
88
  }
75
89
  continue;
76
90
  } else if (op.op === "replace" || op.op === "add") {
77
- const pieces = breakLargeValueOp(orig, op, maxBytes, rev);
91
+ const pieces = breakLargeValueOp(orig, op, maxBytes, rev, sizeCalculator);
78
92
  byOps.push(...pieces);
79
93
  if (pieces.length > 0) {
80
94
  rev = pieces[pieces.length - 1].rev + 1;
@@ -91,10 +105,10 @@ function breakSingleChange(orig, maxBytes) {
91
105
  flush();
92
106
  return byOps;
93
107
  }
94
- function breakTextOp(origChange, textOp, maxBytes, startRev) {
108
+ function breakTextOp(origChange, textOp, maxBytes, startRev, sizeCalculator) {
95
109
  const results = [];
96
110
  let rev = startRev;
97
- const baseSize = getJSONByteSize({ ...origChange, ops: [{ ...textOp, value: "" }] });
111
+ const baseSize = getSizeForStorage({ ...origChange, ops: [{ ...textOp, value: "" }] }, sizeCalculator);
98
112
  const budget = maxBytes - baseSize;
99
113
  const buffer = 20;
100
114
  const maxLength = Math.max(1, budget - buffer);
@@ -128,12 +142,15 @@ function breakTextOp(origChange, textOp, maxBytes, startRev) {
128
142
  testBatchOps.push({ retain: retainToPrefixCurrentPiece });
129
143
  }
130
144
  testBatchOps.push(op);
131
- const testBatchSize = getJSONByteSize({ ...origChange, ops: [{ ...textOp, value: testBatchOps }] });
145
+ const testBatchSize = getSizeForStorage({ ...origChange, ops: [{ ...textOp, value: testBatchOps }] }, sizeCalculator);
132
146
  if (currentOpsForNextChangePiece.length > 0 && testBatchSize > maxBytes) {
133
147
  flushCurrentChangePiece();
134
148
  }
135
149
  const opStandaloneOps = retainToPrefixCurrentPiece > 0 ? [{ retain: retainToPrefixCurrentPiece }, op] : [op];
136
- const opStandaloneSize = getJSONByteSize({ ...origChange, ops: [{ ...textOp, value: opStandaloneOps }] });
150
+ const opStandaloneSize = getSizeForStorage(
151
+ { ...origChange, ops: [{ ...textOp, value: opStandaloneOps }] },
152
+ sizeCalculator
153
+ );
137
154
  if (currentOpsForNextChangePiece.length === 0 && opStandaloneSize > maxBytes) {
138
155
  if (op.insert && typeof op.insert === "string") {
139
156
  const insertChunks = splitLargeInsertText(op.insert, maxLength, op.attributes);
@@ -178,11 +195,11 @@ function splitLargeInsertText(text, maxChunkLength, attributes) {
178
195
  }
179
196
  return results;
180
197
  }
181
- function breakLargeValueOp(origChange, op, maxBytes, startRev) {
198
+ function breakLargeValueOp(origChange, op, maxBytes, startRev, sizeCalculator) {
182
199
  const results = [];
183
200
  let rev = startRev;
184
- const baseOpSize = getJSONByteSize({ ...op, value: "" });
185
- const baseChangeSize = getJSONByteSize({ ...origChange, ops: [{ ...op, value: "" }] }) - baseOpSize;
201
+ const baseOpSize = getSizeForStorage({ ...op, value: "" }, sizeCalculator);
202
+ const baseChangeSize = getSizeForStorage({ ...origChange, ops: [{ ...op, value: "" }] }, sizeCalculator) - baseOpSize;
186
203
  const valueBudget = maxBytes - baseChangeSize - 50;
187
204
  if (typeof op.value === "string" && op.value.length > 100) {
188
205
  const text = op.value;
@@ -212,7 +229,7 @@ function breakLargeValueOp(origChange, op, maxBytes, startRev) {
212
229
  const item = originalArray[i];
213
230
  const tentativeChunk = [...currentChunk, item];
214
231
  const tentativeOp = { ...op, value: tentativeChunk };
215
- const tentativeChangeSize = getJSONByteSize({ ...origChange, ops: [tentativeOp] });
232
+ const tentativeChangeSize = getSizeForStorage({ ...origChange, ops: [tentativeOp] }, sizeCalculator);
216
233
  if (currentChunk.length > 0 && tentativeChangeSize > maxBytes) {
217
234
  const chunkOp = {};
218
235
  if (chunkStartIndex === 0) {
@@ -0,0 +1,12 @@
1
+ declare function compress(uncompressed: string | null): string;
2
+ declare function decompress(compressed: string | null): string | null;
3
+ declare function compressToBase64(input: string | null): string;
4
+ declare function decompressFromBase64(input: string | null): string | null;
5
+ declare function compressToUTF16(input: string | null): string;
6
+ declare function decompressFromUTF16(compressed: string | null): string | null;
7
+ declare function compressToUint8Array(uncompressed: string | null): Uint8Array;
8
+ declare function decompressFromUint8Array(compressed: Uint8Array | null): string | null;
9
+ declare function compressToEncodedURIComponent(input: string | null): string;
10
+ declare function decompressFromEncodedURIComponent(input: string | null): string | null;
11
+
12
+ export { compress, compressToBase64, compressToEncodedURIComponent, compressToUTF16, compressToUint8Array, decompress, decompressFromBase64, decompressFromEncodedURIComponent, decompressFromUTF16, decompressFromUint8Array };