@dabble/patches 0.2.32 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/dist/algorithms/client/applyCommittedChanges.d.ts +8 -0
  2. package/dist/algorithms/client/applyCommittedChanges.js +40 -0
  3. package/dist/{utils → algorithms/client}/batching.d.ts +1 -1
  4. package/dist/{utils → algorithms/client}/batching.js +2 -2
  5. package/dist/{utils → algorithms/client}/breakChange.d.ts +2 -3
  6. package/dist/algorithms/client/breakChange.js +258 -0
  7. package/dist/algorithms/client/createStateFromSnapshot.d.ts +7 -0
  8. package/dist/algorithms/client/createStateFromSnapshot.js +9 -0
  9. package/dist/algorithms/client/getJSONByteSize.js +12 -0
  10. package/dist/algorithms/client/makeChange.d.ts +3 -0
  11. package/dist/algorithms/client/makeChange.js +37 -0
  12. package/dist/algorithms/server/commitChanges.d.ts +12 -0
  13. package/dist/algorithms/server/commitChanges.js +80 -0
  14. package/dist/algorithms/server/createVersion.d.ts +12 -0
  15. package/dist/algorithms/server/createVersion.js +28 -0
  16. package/dist/algorithms/server/getSnapshotAtRevision.d.ts +10 -0
  17. package/dist/algorithms/server/getSnapshotAtRevision.js +29 -0
  18. package/dist/algorithms/server/getStateAtRevision.d.ts +9 -0
  19. package/dist/algorithms/server/getStateAtRevision.js +18 -0
  20. package/dist/algorithms/server/handleOfflineSessionsAndBatches.d.ts +12 -0
  21. package/dist/algorithms/server/handleOfflineSessionsAndBatches.js +80 -0
  22. package/dist/algorithms/server/transformIncomingChanges.d.ts +11 -0
  23. package/dist/algorithms/server/transformIncomingChanges.js +40 -0
  24. package/dist/algorithms/shared/applyChanges.d.ts +10 -0
  25. package/dist/algorithms/shared/applyChanges.js +17 -0
  26. package/dist/{utils.d.ts → algorithms/shared/rebaseChanges.d.ts} +1 -11
  27. package/dist/{utils.js → algorithms/shared/rebaseChanges.js} +3 -43
  28. package/dist/client/InMemoryStore.d.ts +2 -1
  29. package/dist/client/InMemoryStore.js +9 -3
  30. package/dist/client/IndexedDBStore.d.ts +34 -2
  31. package/dist/client/IndexedDBStore.js +399 -282
  32. package/dist/client/Patches.d.ts +11 -41
  33. package/dist/client/Patches.js +197 -208
  34. package/dist/client/PatchesDoc.d.ts +24 -41
  35. package/dist/client/PatchesDoc.js +57 -214
  36. package/dist/client/PatchesHistoryClient.js +1 -1
  37. package/dist/client/PatchesStore.d.ts +186 -9
  38. package/dist/data/change.d.ts +3 -0
  39. package/dist/data/change.js +20 -0
  40. package/dist/data/version.d.ts +12 -0
  41. package/dist/data/version.js +17 -0
  42. package/dist/json-patch/ops/add.js +1 -1
  43. package/dist/json-patch/ops/move.js +1 -1
  44. package/dist/json-patch/ops/remove.js +1 -1
  45. package/dist/json-patch/ops/replace.js +1 -1
  46. package/dist/json-patch/utils/get.js +0 -1
  47. package/dist/json-patch/utils/log.d.ts +4 -1
  48. package/dist/json-patch/utils/log.js +2 -5
  49. package/dist/json-patch/utils/ops.d.ts +1 -1
  50. package/dist/json-patch/utils/ops.js +4 -1
  51. package/dist/json-patch/utils/paths.js +2 -2
  52. package/dist/json-patch/utils/toArrayIndex.js +1 -1
  53. package/dist/net/PatchesSync.d.ts +55 -24
  54. package/dist/net/PatchesSync.js +336 -258
  55. package/dist/net/websocket/AuthorizationProvider.d.ts +9 -2
  56. package/dist/net/websocket/AuthorizationProvider.js +14 -2
  57. package/dist/net/websocket/PatchesWebSocket.d.ts +2 -2
  58. package/dist/net/websocket/PatchesWebSocket.js +3 -2
  59. package/dist/net/websocket/RPCServer.d.ts +2 -2
  60. package/dist/net/websocket/RPCServer.js +3 -3
  61. package/dist/net/websocket/SignalingService.js +1 -1
  62. package/dist/net/websocket/WebSocketServer.d.ts +1 -1
  63. package/dist/net/websocket/WebSocketServer.js +2 -2
  64. package/dist/net/websocket/WebSocketTransport.js +1 -1
  65. package/dist/net/websocket/onlineState.d.ts +1 -1
  66. package/dist/net/websocket/onlineState.js +8 -2
  67. package/dist/server/PatchesBranchManager.js +9 -16
  68. package/dist/server/PatchesHistoryManager.js +1 -1
  69. package/dist/server/PatchesServer.d.ts +11 -38
  70. package/dist/server/PatchesServer.js +32 -255
  71. package/dist/types.d.ts +8 -6
  72. package/dist/utils/concurrency.d.ts +26 -0
  73. package/dist/utils/concurrency.js +60 -0
  74. package/dist/utils/deferred.d.ts +7 -0
  75. package/dist/utils/deferred.js +23 -0
  76. package/package.json +11 -5
  77. package/dist/utils/breakChange.js +0 -302
  78. package/dist/utils/getJSONByteSize.js +0 -12
  79. /package/dist/{utils → algorithms/client}/getJSONByteSize.d.ts +0 -0
package/dist/types.d.ts CHANGED
@@ -33,6 +33,14 @@ export interface PatchesState<T = any> {
33
33
  export interface PatchesSnapshot<T = any> extends PatchesState<T> {
34
34
  changes: Change[];
35
35
  }
36
+ /**
37
+ * Represents the syncing state of a document.
38
+ * @property initial - The document is not syncing.
39
+ * @property updating - The document is syncing.
40
+ * @property null - The document is not syncing.
41
+ * @property Error - The document is syncing with an error.
42
+ */
43
+ export type SyncingState = 'initial' | 'updating' | null | Error;
36
44
  /** Status options for a branch */
37
45
  export type BranchStatus = 'open' | 'closed' | 'merged' | 'archived' | 'abandoned';
38
46
  export interface Branch {
@@ -116,10 +124,4 @@ export interface ListVersionsOptions {
116
124
  /** Filter by the group ID (branch ID or offline batch ID). */
117
125
  groupId?: string;
118
126
  }
119
- export interface Deferred<T = void> {
120
- promise: Promise<T>;
121
- resolve: (value: T) => void;
122
- reject: (reason?: any) => void;
123
- status: 'pending' | 'fulfilled' | 'rejected';
124
- }
125
127
  export {};
@@ -0,0 +1,26 @@
1
+ /**
2
+ * Wrap a function which is blockable for a document.
3
+ * Also, a Typescript decorator for functions which are blockable.
4
+ */
5
+ export declare function blockable<T extends (docId: string, ...args: any[]) => Promise<any>>(target: T): T;
6
+ /**
7
+ * Wrap a function which blocks on a document.
8
+ * Also, a Typescript decorator for functions which block.
9
+ */
10
+ export declare function blocking<T extends (docId: string, ...args: any[]) => Promise<any>>(target: T): T;
11
+ /**
12
+ * Wrap a function which returns a response which is blockable for a document (e.g. fetch).
13
+ * Also, a Typescript decorator for functions whose response should be blocked when needed.
14
+ */
15
+ export declare function blockableResponse<T extends (docId: string, ...args: any[]) => Promise<any>>(target: T): T;
16
+ /**
17
+ * Wrap a function to only return the result of the first call.
18
+ *
19
+ * ### Examples:
20
+ * ```ts
21
+ * const getFromStorage = oneResult(async (key: string) => {
22
+ * ...
23
+ * });
24
+ */
25
+ export declare function singleInvocation<T extends (...args: any[]) => Promise<any>>(target: T): T;
26
+ export declare function singleInvocation<T extends (...args: any[]) => Promise<any>>(matchOnFirstArg: boolean): (target: T) => T;
@@ -0,0 +1,60 @@
1
+ import { simplifiedConcurrency } from 'simplified-concurrency';
2
+ const docIds = new Map();
3
+ /**
4
+ * Make the concurrency be per-path to allow multiple records to be loaded and updated at the same time, keeping only
5
+ * the record's operations sequential with respect to other operations on the same record.
6
+ */
7
+ function concurrency(docId) {
8
+ let concurrency = docIds.get(docId);
9
+ if (!concurrency) {
10
+ concurrency = simplifiedConcurrency();
11
+ docIds.set(docId, concurrency);
12
+ }
13
+ return concurrency;
14
+ }
15
+ /**
16
+ * Wrap a function which is blockable for a document.
17
+ * Also, a Typescript decorator for functions which are blockable.
18
+ */
19
+ export function blockable(target) {
20
+ return function (...args) {
21
+ return concurrency(args[0]).blockFunction(target, args, this);
22
+ };
23
+ }
24
+ /**
25
+ * Wrap a function which blocks on a document.
26
+ * Also, a Typescript decorator for functions which block.
27
+ */
28
+ export function blocking(target) {
29
+ return function (...args) {
30
+ return concurrency(args[0]).blockWhile(target.apply(this, args));
31
+ };
32
+ }
33
+ /**
34
+ * Wrap a function which returns a response which is blockable for a document (e.g. fetch).
35
+ * Also, a Typescript decorator for functions whose response should be blocked when needed.
36
+ */
37
+ export function blockableResponse(target) {
38
+ return function (...args) {
39
+ return concurrency(args[0]).blockResponse(target.apply(this, args));
40
+ };
41
+ }
42
+ export function singleInvocation(matchOnFirstArgOrTarget) {
43
+ if (typeof matchOnFirstArgOrTarget === 'function') {
44
+ return singleInvocation(false)(matchOnFirstArgOrTarget);
45
+ }
46
+ return function (target) {
47
+ const promises = new Map();
48
+ return function (...args) {
49
+ const key = matchOnFirstArgOrTarget ? args[0] : 1;
50
+ if (promises.has(key))
51
+ return promises.get(key);
52
+ const promise = target.apply(this, args);
53
+ promises.set(key, promise);
54
+ promise.finally(() => {
55
+ promises.delete(key);
56
+ });
57
+ return promise;
58
+ };
59
+ };
60
+ }
@@ -0,0 +1,7 @@
1
+ export interface Deferred<T = void> {
2
+ promise: Promise<T>;
3
+ resolve: (value: T) => void;
4
+ reject: (reason?: any) => void;
5
+ status: 'pending' | 'fulfilled' | 'rejected';
6
+ }
7
+ export declare function deferred<T = void>(): Deferred<T>;
@@ -0,0 +1,23 @@
1
+ export function deferred() {
2
+ let resolve;
3
+ let reject;
4
+ let _status = 'pending';
5
+ const promise = new Promise((_resolve, _reject) => {
6
+ resolve = (value) => {
7
+ _resolve(value);
8
+ _status = 'fulfilled';
9
+ };
10
+ reject = (reason) => {
11
+ _reject(reason);
12
+ _status = 'rejected';
13
+ };
14
+ });
15
+ return {
16
+ promise,
17
+ resolve,
18
+ reject,
19
+ get status() {
20
+ return _status;
21
+ },
22
+ };
23
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dabble/patches",
3
- "version": "0.2.32",
3
+ "version": "0.3.0",
4
4
  "description": "Immutable JSON Patch implementation based on RFC 6902 supporting operational transformation and last-writer-wins",
5
5
  "author": "Jacob Wright <jacwright@gmail.com>",
6
6
  "bugs": {
@@ -55,17 +55,23 @@
55
55
  "build": "svelte-package -i src",
56
56
  "prepare": "npm run build",
57
57
  "test": "vitest run",
58
- "tdd": "vitest"
58
+ "tdd": "vitest",
59
+ "lint": "eslint src tests",
60
+ "lint:fix": "eslint src tests --fix"
59
61
  },
60
62
  "dependencies": {
61
63
  "@dabble/delta": "^1.2.4",
62
- "alphacounter": "^2.x",
63
- "crypto-id": "^0.2.3",
64
- "simple-peer": "^9.11.1"
64
+ "alphacounter": "^2.1.1",
65
+ "crypto-id": "^0.3.0",
66
+ "simple-peer": "^9.11.1",
67
+ "simplified-concurrency": "^0.2.0"
65
68
  },
66
69
  "devDependencies": {
67
70
  "@sveltejs/package": "^2.3.11",
68
71
  "@types/simple-peer": "^9.11.8",
72
+ "@typescript-eslint/eslint-plugin": "^8.33.1",
73
+ "@typescript-eslint/parser": "^8.33.1",
74
+ "eslint": "^9.28.0",
69
75
  "fake-indexeddb": "^6.0.0",
70
76
  "prettier": "^3.5.3",
71
77
  "typescript": "^5.8.3",
@@ -1,302 +0,0 @@
1
- import { Op } from '@dabble/delta';
2
- import { createId } from 'crypto-id';
3
- import { getJSONByteSize } from './getJSONByteSize.js'; // Import from new location
4
- /**
5
- * Break a single Change into multiple Changes so that
6
- * JSON.stringify(change).length never exceeds `maxBytes`.
7
- *
8
- * - Splits first by JSON-Patch *ops*
9
- * - If an individual op is still too big and is a "@txt" op,
10
- * split its Delta payload into smaller Deltas
11
- */
12
- export function breakChange(orig, maxBytes) {
13
- if (getJSONByteSize(orig) <= maxBytes)
14
- return [orig];
15
- // First pass: split by ops
16
- const byOps = [];
17
- let group = [];
18
- let rev = orig.rev;
19
- const flush = () => {
20
- if (!group.length)
21
- return;
22
- byOps.push({
23
- ...orig,
24
- id: createId(),
25
- rev: rev++,
26
- ops: group,
27
- created: Date.now(),
28
- });
29
- group = [];
30
- };
31
- for (const op of orig.ops) {
32
- const tentative = group.concat(op);
33
- if (getJSONByteSize({ ...orig, ops: tentative }) > maxBytes)
34
- flush();
35
- // Handle the case where a single op is too large
36
- if (group.length === 0 && getJSONByteSize({ ...orig, ops: [op] }) > maxBytes) {
37
- // We have a single op that's too big - can only be @txt op with large delta
38
- if (op.op === '@txt' && op.value) {
39
- const pieces = breakTextOp(orig, op, maxBytes, rev);
40
- byOps.push(...pieces);
41
- // Only update rev if we got results from breakTextOp
42
- if (pieces.length > 0) {
43
- rev = pieces[pieces.length - 1].rev + 1; // Update rev for next changes
44
- }
45
- continue;
46
- }
47
- else if (op.op === 'replace' || op.op === 'add') {
48
- // For replace/add operations with large value payloads, try to split the value if it's a string or array
49
- const pieces = breakLargeValueOp(orig, op, maxBytes, rev);
50
- byOps.push(...pieces);
51
- if (pieces.length > 0) {
52
- rev = pieces[pieces.length - 1].rev + 1;
53
- }
54
- continue;
55
- }
56
- else {
57
- // Non-splittable op that's too large, include it anyway with a warning
58
- console.warn(`Warning: Single operation of type ${op.op} exceeds maxBytes. Including it anyway.`);
59
- group.push(op);
60
- continue;
61
- }
62
- }
63
- group.push(op);
64
- }
65
- flush();
66
- return byOps;
67
- }
68
- /**
69
- * Break a large @txt operation into multiple smaller operations
70
- */
71
- function breakTextOp(origChange, textOp, maxBytes, startRev) {
72
- const results = [];
73
- let rev = startRev;
74
- // Calculate the budget for the delta content itself
75
- const baseSize = getJSONByteSize({ ...origChange, ops: [{ ...textOp, value: '' }] });
76
- const budget = maxBytes - baseSize;
77
- // Ensure maxLength for splitLargeInsert is at least 1, apply a smaller buffer
78
- const buffer = 20; // Reduced buffer
79
- const maxLength = Math.max(1, budget - buffer);
80
- // Ensure deltaOps is always an array, handle both Delta objects and raw arrays
81
- let deltaOps = [];
82
- if (textOp.value) {
83
- if (Array.isArray(textOp.value)) {
84
- // Direct array of ops
85
- deltaOps = textOp.value;
86
- }
87
- else if (textOp.value.ops && Array.isArray(textOp.value.ops)) {
88
- // Delta object with ops property
89
- deltaOps = textOp.value.ops;
90
- }
91
- else if (typeof textOp.value === 'object') {
92
- // Convert object to array with single op
93
- deltaOps = [textOp.value];
94
- }
95
- }
96
- let currentOps = [];
97
- let retain = 0;
98
- // Helper to create a Change with current accumulated delta ops
99
- const flushDelta = () => {
100
- if (!currentOps.length)
101
- return;
102
- const newOp = {
103
- ...textOp,
104
- value: currentOps,
105
- };
106
- results.push({
107
- ...origChange,
108
- id: createId(),
109
- rev: rev++,
110
- ops: [newOp],
111
- created: Date.now(),
112
- });
113
- currentOps = [];
114
- };
115
- for (const op of deltaOps) {
116
- // Check if adding this op would exceed the size limit
117
- const tentativeOps = [...currentOps, op];
118
- const tentativeChange = {
119
- ...origChange,
120
- ops: [{ ...textOp, value: tentativeOps }],
121
- };
122
- // Add an initial retain op if we're starting a new group of ops and there were prior ops
123
- if (currentOps.length === 0 && retain) {
124
- currentOps.push({ retain });
125
- }
126
- if (getJSONByteSize(tentativeChange) > maxBytes) {
127
- flushDelta();
128
- // Handle the case where a single delta op is too large (e.g., very large text insert)
129
- if (currentOps.length === 0 && getJSONByteSize({ ...origChange, ops: [op] }) > maxBytes) {
130
- // Split large insert into chunks
131
- const retainBeforeChunks = retain; // Capture retain position BEFORE these chunks
132
- const [newRetain, chunks] = splitLargeInsert(op, retain, maxLength);
133
- retain = newRetain; // Update overall retain state for ops *after* these chunks
134
- for (let i = 0; i < chunks.length; i++) {
135
- const chunk = chunks[i];
136
- // Only add retain before the *first* chunk from splitLargeInsert
137
- if (i === 0 && retainBeforeChunks > 0) {
138
- currentOps = [{ retain: retainBeforeChunks }, chunk];
139
- }
140
- else {
141
- currentOps = [chunk];
142
- }
143
- flushDelta(); // Flushes the chunk (potentially with retain on first)
144
- }
145
- continue;
146
- }
147
- }
148
- currentOps.push(op);
149
- if (!op.delete) {
150
- retain += Op.length(op);
151
- }
152
- }
153
- // Flush any remaining ops
154
- flushDelta();
155
- return results;
156
- }
157
- /**
158
- * Split a large insert operation into multiple smaller ones
159
- */
160
- function splitLargeInsert(insertOp, retain, maxChunkSize) {
161
- const results = [];
162
- if (!insertOp.insert || typeof insertOp.insert !== 'string') {
163
- throw new Error(`Single @txt operation exceeds maxBytes. Cannot split further.`);
164
- }
165
- const text = insertOp.insert;
166
- // const attrs = insertOp.attributes || {}; // attrs not used currently
167
- // Ensure maxChunkSize is positive
168
- if (maxChunkSize <= 0) {
169
- throw new Error(`Calculated maxChunkSize is <= 0, cannot split insert.`);
170
- }
171
- // Ensure chunkSize is at least 1 to prevent infinite loops
172
- const targetChunkSize = Math.max(1, maxChunkSize);
173
- const numChunks = Math.ceil(text.length / targetChunkSize);
174
- const chunkSize = Math.ceil(text.length / numChunks);
175
- for (let i = 0; i < text.length; i += chunkSize) {
176
- const chunkText = text.slice(i, i + chunkSize);
177
- const op = { ...insertOp, insert: chunkText }; // Keep original attrs
178
- // For the first chunk, no retain is needed
179
- // Retain calculation seems complex, let breakTextOp handle retains between chunks
180
- // if (i !== 0) {
181
- // results.push({ retain });
182
- // }
183
- results.push(op);
184
- // Retain is now managed by the caller (breakTextOp)
185
- // retain += Op.length(op);
186
- }
187
- // Return just the ops, retain calculation happens in breakTextOp
188
- return [retain, results]; // This return signature might need review based on usage
189
- }
190
- /**
191
- * Attempt to break a large value in a replace/add operation
192
- */
193
- function breakLargeValueOp(origChange, op, maxBytes, startRev) {
194
- const results = [];
195
- let rev = startRev;
196
- // Calculate base size without the value to estimate budget for value chunks
197
- const baseOpSize = getJSONByteSize({ ...op, value: '' });
198
- const baseChangeSize = getJSONByteSize({ ...origChange, ops: [{ ...op, value: '' }] }) - baseOpSize;
199
- const valueBudget = maxBytes - baseChangeSize - 50; // 50 bytes buffer for overhead
200
- // Special case: if value is a string, we can split it into chunks
201
- if (typeof op.value === 'string' && op.value.length > 100) {
202
- // Only split reasonably large strings
203
- const text = op.value;
204
- // Ensure chunkSize is at least 1
205
- const targetChunkSize = Math.max(1, valueBudget);
206
- const numChunks = Math.ceil(text.length / targetChunkSize);
207
- const chunkSize = Math.ceil(text.length / numChunks);
208
- for (let i = 0; i < text.length; i += chunkSize) {
209
- const chunk = text.slice(i, i + chunkSize);
210
- const newOp = { op: 'add' }; // Default to add?
211
- if (i === 0) {
212
- // First chunk: use original op type (add/replace) and path
213
- newOp.op = op.op;
214
- newOp.path = op.path;
215
- newOp.value = chunk;
216
- }
217
- else {
218
- // Subsequent chunks: use 'add' to append to the string (assuming target is container)
219
- // This assumes the path points to an array or object where subsequent adds make sense.
220
- // A more robust solution might need context or use a specific 'patch' op.
221
- // If path was `/foo/bar`, appending needs `/foo/bar/-` or similar if array?
222
- // For now, let's assume path allows adding / maybe this needs a custom 'append' op?
223
- // Reverting to a placeholder 'patch' op type needing server interpretation.
224
- newOp.op = 'patch';
225
- newOp.path = op.path; // Operate on the original path
226
- newOp.appendString = chunk;
227
- }
228
- results.push({
229
- ...origChange,
230
- id: createId(),
231
- rev: rev++,
232
- ops: [newOp],
233
- created: Date.now(),
234
- });
235
- }
236
- return results;
237
- }
238
- else if (Array.isArray(op.value) && op.value.length > 1) {
239
- // Special case: if value is an array, we can split it into smaller arrays
240
- // This requires careful size checking per chunk
241
- const originalArray = op.value;
242
- let currentChunk = [];
243
- let chunkStartIndex = 0;
244
- for (let i = 0; i < originalArray.length; i++) {
245
- const item = originalArray[i];
246
- const tentativeChunk = [...currentChunk, item];
247
- const tentativeOp = { ...op, value: tentativeChunk };
248
- const tentativeChangeSize = getJSONByteSize({ ...origChange, ops: [tentativeOp] });
249
- if (currentChunk.length > 0 && tentativeChangeSize > maxBytes) {
250
- // Flush current chunk
251
- const chunkOp = {};
252
- if (chunkStartIndex === 0) {
253
- chunkOp.op = op.op;
254
- chunkOp.path = op.path;
255
- chunkOp.value = currentChunk;
256
- }
257
- else {
258
- // Append subsequent chunks - needs server support for 'appendArray'
259
- chunkOp.op = 'patch';
260
- chunkOp.path = op.path;
261
- chunkOp.appendArray = currentChunk;
262
- }
263
- results.push({
264
- ...origChange,
265
- id: createId(),
266
- rev: rev++,
267
- ops: [chunkOp],
268
- created: Date.now(),
269
- });
270
- currentChunk = [item]; // Start new chunk with current item
271
- chunkStartIndex = i;
272
- }
273
- else {
274
- currentChunk.push(item);
275
- }
276
- }
277
- // Flush the last chunk
278
- if (currentChunk.length > 0) {
279
- const chunkOp = {};
280
- if (chunkStartIndex === 0) {
281
- chunkOp.op = op.op;
282
- chunkOp.path = op.path;
283
- chunkOp.value = currentChunk;
284
- }
285
- else {
286
- chunkOp.op = 'patch';
287
- chunkOp.path = op.path;
288
- chunkOp.appendArray = currentChunk;
289
- }
290
- results.push({
291
- ...origChange,
292
- id: createId(),
293
- rev: rev++,
294
- ops: [chunkOp],
295
- created: Date.now(),
296
- });
297
- }
298
- return results;
299
- }
300
- // If we can't split it, throw an error
301
- throw new Error(`Single operation of type ${op.op} (path: ${op.path}) exceeds maxBytes and can't be split further.`);
302
- }
@@ -1,12 +0,0 @@
1
- /** Estimate JSON string byte size. */
2
- export function getJSONByteSize(data) {
3
- // Basic estimation, might not be perfectly accurate due to encoding nuances
4
- try {
5
- return new TextEncoder().encode(JSON.stringify(data)).length;
6
- }
7
- catch (e) {
8
- // Handle circular structures or other stringify errors
9
- console.error('Error calculating JSON size:', e);
10
- return Infinity; // Treat errors as infinitely large
11
- }
12
- }