@dabble/patches 0.2.11 → 0.2.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,12 +1,14 @@
1
1
  import { type Unsubscriber } from '../event-signal.js';
2
2
  import type { PatchesStore } from '../persist/PatchesStore.js';
3
3
  import type { Change } from '../types.js';
4
- import { PatchesDoc } from './PatchesDoc.js';
4
+ import { PatchesDoc, type PatchesDocOptions } from './PatchesDoc.js';
5
5
  export interface PatchesOptions {
6
6
  /** Persistence layer instance (e.g., new IndexedDBStore('my-db') or new InMemoryStore()). */
7
7
  store: PatchesStore;
8
8
  /** Initial metadata to attach to changes from this client (merged with per-doc metadata). */
9
9
  metadata?: Record<string, any>;
10
+ /** Document-level options to pass to each PatchesDoc instance */
11
+ docOptions?: PatchesDocOptions;
10
12
  }
11
13
  interface ManagedDoc<T extends object> {
12
14
  doc: PatchesDoc<T>;
@@ -20,6 +22,7 @@ interface ManagedDoc<T extends object> {
20
22
  export declare class Patches {
21
23
  protected options: PatchesOptions;
22
24
  protected docs: Map<string, ManagedDoc<any>>;
25
+ readonly docOptions: PatchesDocOptions;
23
26
  readonly store: PatchesStore;
24
27
  readonly trackedDocs: Set<string>;
25
28
  readonly onError: import("../event-signal.js").Signal<(error: Error, context?: {
@@ -99,6 +102,11 @@ export declare class Patches {
99
102
  * Should be called when shutting down the client.
100
103
  */
101
104
  close(): void;
105
+ /**
106
+ * Updates document options that will be applied to all new documents
107
+ * @param options - Options to merge with current docOptions
108
+ */
109
+ updateDocOptions(options: Partial<PatchesDocOptions>): void;
102
110
  /**
103
111
  * Sets up a listener for local changes on a PatchesDoc, saving pending changes to the store.
104
112
  * @param docId - The document ID being managed.
@@ -36,6 +36,7 @@ export class Patches {
36
36
  };
37
37
  this.options = opts;
38
38
  this.store = opts.store;
39
+ this.docOptions = opts.docOptions ?? {};
39
40
  this.store.listDocs().then(docs => {
40
41
  this.trackDocs(docs.map(({ docId }) => docId));
41
42
  });
@@ -90,7 +91,7 @@ export class Patches {
90
91
  const snapshot = await this.store.getDoc(docId);
91
92
  const initialState = (snapshot?.state ?? {});
92
93
  const mergedMetadata = { ...this.options.metadata, ...opts.metadata };
93
- const doc = new PatchesDoc(initialState, mergedMetadata);
94
+ const doc = new PatchesDoc(initialState, mergedMetadata, this.docOptions);
94
95
  doc.setId(docId);
95
96
  if (snapshot) {
96
97
  doc.import(snapshot);
@@ -191,6 +192,13 @@ export class Patches {
191
192
  // Close store connection
192
193
  void this.store.close();
193
194
  }
195
+ /**
196
+ * Updates document options that will be applied to all new documents
197
+ * @param options - Options to merge with current docOptions
198
+ */
199
+ updateDocOptions(options) {
200
+ Object.assign(this.docOptions, options);
201
+ }
194
202
  // --- Internal Handlers ---
195
203
  /**
196
204
  * Sets up a listener for local changes on a PatchesDoc, saving pending changes to the store.
@@ -1,6 +1,16 @@
1
1
  import { type Unsubscriber } from '../event-signal.js';
2
2
  import type { JSONPatch } from '../json-patch/JSONPatch.js';
3
3
  import type { Change, PatchesSnapshot } from '../types.js';
4
+ /**
5
+ * Options for creating a PatchesDoc instance
6
+ */
7
+ export interface PatchesDocOptions {
8
+ /**
9
+ * Maximum size in bytes for a single payload (network message).
10
+ * Changes exceeding this will be split into multiple smaller changes.
11
+ */
12
+ maxPayloadBytes?: number;
13
+ }
4
14
  /**
5
15
  * Represents a document synchronized using JSON patches.
6
16
  * Manages committed state, pending (local-only) changes, and
@@ -14,6 +24,7 @@ export declare class PatchesDoc<T extends object = object> {
14
24
  protected _pendingChanges: Change[];
15
25
  protected _sendingChanges: Change[];
16
26
  protected _changeMetadata: Record<string, any>;
27
+ protected readonly _maxPayloadBytes?: number;
17
28
  /** Subscribe to be notified before local state changes. */
18
29
  readonly onBeforeChange: import("../event-signal.js").Signal<(change: Change) => void>;
19
30
  /** Subscribe to be notified after local state changes are applied. */
@@ -24,8 +35,9 @@ export declare class PatchesDoc<T extends object = object> {
24
35
  * Creates an instance of PatchesDoc.
25
36
  * @param initialState Optional initial state.
26
37
  * @param initialMetadata Optional metadata to add to generated changes.
38
+ * @param options Additional options for the document.
27
39
  */
28
- constructor(initialState?: T, initialMetadata?: Record<string, any>);
40
+ constructor(initialState?: T, initialMetadata?: Record<string, any>, options?: PatchesDocOptions);
29
41
  /** The unique identifier for this document, once assigned. */
30
42
  get id(): string | null;
31
43
  /** Current local state (committed + sending + pending). */
@@ -2,6 +2,7 @@ import { createId } from 'crypto-id';
2
2
  import { signal } from '../event-signal.js';
3
3
  import { createJSONPatch } from '../json-patch/createJSONPatch.js';
4
4
  import { applyChanges, rebaseChanges } from '../utils.js';
5
+ import { breakChange } from '../utils/breakChange.js';
5
6
  /**
6
7
  * Represents a document synchronized using JSON patches.
7
8
  * Manages committed state, pending (local-only) changes, and
@@ -12,8 +13,9 @@ export class PatchesDoc {
12
13
  * Creates an instance of PatchesDoc.
13
14
  * @param initialState Optional initial state.
14
15
  * @param initialMetadata Optional metadata to add to generated changes.
16
+ * @param options Additional options for the document.
15
17
  */
16
- constructor(initialState = {}, initialMetadata = {}) {
18
+ constructor(initialState = {}, initialMetadata = {}, options = {}) {
17
19
  this._id = null;
18
20
  this._pendingChanges = [];
19
21
  this._sendingChanges = [];
@@ -28,6 +30,7 @@ export class PatchesDoc {
28
30
  this._state = structuredClone(initialState);
29
31
  this._committedRev = 0;
30
32
  this._changeMetadata = initialMetadata;
33
+ this._maxPayloadBytes = options.maxPayloadBytes;
31
34
  }
32
35
  /** The unique identifier for this document, once assigned. */
33
36
  get id() {
@@ -113,8 +116,19 @@ export class PatchesDoc {
113
116
  this.onBeforeChange.emit(change);
114
117
  // Apply to local state immediately
115
118
  this._state = patch.apply(this._state);
116
- this._pendingChanges.push(change);
117
- this.onChange.emit(change);
119
+ if (this._maxPayloadBytes) {
120
+ // Check if the change needs to be split due to size
121
+ const changes = breakChange(change, this._maxPayloadBytes);
122
+ // Emit events for each change piece
123
+ for (const piece of changes) {
124
+ this._pendingChanges.push(piece);
125
+ this.onChange.emit(piece);
126
+ }
127
+ }
128
+ else {
129
+ this._pendingChanges.push(change);
130
+ this.onChange.emit(change);
131
+ }
118
132
  this.onUpdate.emit(this._state);
119
133
  return change;
120
134
  }
@@ -1,4 +1,4 @@
1
- import { JSONPatch } from './JSONPatch';
1
+ import { JSONPatch } from './JSONPatch.js';
2
2
  /**
3
3
  * Creates a `JSONPatch` instance by tracking changes made to a proxy object within an updater function.
4
4
  *
@@ -1,5 +1,5 @@
1
- import { JSONPatch } from './JSONPatch';
2
- import { createPatchProxy } from './patchProxy';
1
+ import { JSONPatch } from './JSONPatch.js';
2
+ import { createPatchProxy } from './patchProxy.js';
3
3
  /**
4
4
  * Creates a `JSONPatch` instance by tracking changes made to a proxy object within an updater function.
5
5
  *
@@ -1,4 +1,4 @@
1
- import { JSONPatch } from './JSONPatch';
1
+ import { JSONPatch } from './JSONPatch.js';
2
2
  /**
3
3
  * Creates a proxy object that can be used in two ways:
4
4
  *
@@ -1,3 +1,3 @@
1
1
  import type { JSONPatchOp, State } from '../types.js';
2
2
  export declare function getType(state: State, patch: JSONPatchOp): import("../types.js").JSONPatchOpHandler;
3
- export declare function getTypeLike(state: State, patch: JSONPatchOp): "replace" | "add" | "remove" | "move" | "copy" | "test";
3
+ export declare function getTypeLike(state: State, patch: JSONPatchOp): "replace" | "add" | "remove" | "copy" | "move" | "test";
@@ -1,8 +1,13 @@
1
1
  import { Patches } from '../client/Patches.js';
2
2
  import type { WebSocketOptions } from './websocket/WebSocketTransport.js';
3
3
  export interface PatchesSyncOptions {
4
+ /** WebSocket connection options */
4
5
  wsOptions?: WebSocketOptions;
5
- maxBatchSize?: number;
6
+ /**
7
+ * Maximum size in bytes for a single payload (network message).
8
+ * Changes exceeding this will be automatically split.
9
+ */
10
+ maxPayloadBytes?: number;
6
11
  }
7
12
  export interface PatchesSyncState {
8
13
  online: boolean;
@@ -76,6 +76,10 @@ export class PatchesSync {
76
76
  this.ws = new PatchesWebSocket(url, options.wsOptions);
77
77
  this._state.online = onlineState.isOnline;
78
78
  this.trackedDocs = new Set(patches.trackedDocs);
79
+ // Set maxPayloadBytes on Patches docOptions if provided
80
+ if (options.maxPayloadBytes) {
81
+ patches.updateDocOptions({ maxPayloadBytes: options.maxPayloadBytes });
82
+ }
79
83
  // --- Event Listeners ---
80
84
  onlineState.onOnlineChange(this._handleOnlineChange);
81
85
  this.ws.onStateChange(this._handleConnectionChange);
@@ -233,7 +237,7 @@ export class PatchesSync {
233
237
  return; // Nothing to flush
234
238
  }
235
239
  }
236
- const batches = breakIntoBatches(pending, this.options.maxBatchSize);
240
+ const batches = breakIntoBatches(pending, this.options.maxPayloadBytes);
237
241
  for (const batch of batches) {
238
242
  if (!this.state.connected) {
239
243
  throw new Error('Disconnected during flush');
@@ -1,6 +1,6 @@
1
1
  declare class OnlineState {
2
2
  onOnlineChange: import("../../event-signal").Signal<(isOnline: boolean) => void>;
3
- _isOnline: any;
3
+ _isOnline: boolean;
4
4
  constructor();
5
5
  get isOnline(): boolean;
6
6
  get isOffline(): boolean;
package/dist/types.d.ts CHANGED
@@ -10,10 +10,10 @@ export interface Change {
10
10
  baseRev?: number;
11
11
  /** Client-side timestamp when the change was created. */
12
12
  created: number;
13
- /** Optional arbitrary metadata associated with the change. */
14
- metadata?: Record<string, any>;
15
13
  /** Optional batch identifier for grouping changes that belong to the same client batch (for multi-batch offline/large edits). */
16
14
  batchId?: string;
15
+ /** Optional arbitrary metadata associated with the change. */
16
+ [metadata: string]: any;
17
17
  }
18
18
  /**
19
19
  * Represents the state of a document in the OT protocol.
@@ -49,7 +49,7 @@ export interface Branch {
49
49
  /** Current status of the branch. */
50
50
  status: BranchStatus;
51
51
  /** Optional arbitrary metadata associated with the branch record. */
52
- metadata?: Record<string, any>;
52
+ [metadata: string]: any;
53
53
  }
54
54
  /**
55
55
  * Metadata, state snapshot, and included changes for a specific version.
@@ -74,6 +74,8 @@ export interface VersionMetadata {
74
74
  rev: number;
75
75
  /** The revision number on the main timeline before the changes that created this version. If this is an offline/branch version, this is the revision number of the source document where the branch was created and not . */
76
76
  baseRev: number;
77
+ /** Optional arbitrary metadata associated with the version. */
78
+ [metadata: string]: any;
77
79
  }
78
80
  /**
79
81
  * Options for listing committed server changes. *Always* ordered by revision number.
@@ -1,5 +1,3 @@
1
1
  import type { Change } from '../types.js';
2
- /** Estimate JSON string byte size. */
3
- export declare function getJSONByteSize(data: any): number;
4
- /** Break changes into batches based on maxBatchSize. */
5
- export declare function breakIntoBatches(changes: Change[], maxSize?: number): Change[][];
2
+ /** Break changes into batches based on maxPayloadBytes. */
3
+ export declare function breakIntoBatches(changes: Change[], maxPayloadBytes?: number): Change[][];
@@ -1,12 +1,9 @@
1
1
  import { createId } from 'crypto-id';
2
- /** Estimate JSON string byte size. */
3
- export function getJSONByteSize(data) {
4
- // Basic estimation, might not be perfectly accurate due to encoding nuances
5
- return new TextEncoder().encode(JSON.stringify(data)).length;
6
- }
7
- /** Break changes into batches based on maxBatchSize. */
8
- export function breakIntoBatches(changes, maxSize) {
9
- if (!maxSize || getJSONByteSize(changes) < maxSize) {
2
+ import { breakChange } from './breakChange.js'; // Import from new file
3
+ import { getJSONByteSize } from './getJSONByteSize.js'; // Import from new file
4
+ /** Break changes into batches based on maxPayloadBytes. */
5
+ export function breakIntoBatches(changes, maxPayloadBytes) {
6
+ if (!maxPayloadBytes || getJSONByteSize(changes) < maxPayloadBytes) {
10
7
  return [changes];
11
8
  }
12
9
  const batchId = createId(12);
@@ -16,20 +13,26 @@ export function breakIntoBatches(changes, maxSize) {
16
13
  for (const change of changes) {
17
14
  // Add batchId if breaking up
18
15
  const changeWithBatchId = { ...change, batchId };
19
- const changeSize = getJSONByteSize(changeWithBatchId) + (currentBatch.length > 0 ? 1 : 0); // Add 1 for comma
20
- // If a single change is too big, we have an issue (should be rare)
21
- if (changeSize > maxSize && currentBatch.length === 0) {
22
- console.error(`Single change ${change.id} (size ${changeSize}) exceeds maxBatchSize (${maxSize}). Sending as its own batch.`);
23
- batches.push([changeWithBatchId]); // Send it anyway
24
- continue;
16
+ const individualActualSize = getJSONByteSize(changeWithBatchId);
17
+ let itemsToProcess;
18
+ if (individualActualSize > maxPayloadBytes) {
19
+ itemsToProcess = breakChange(changeWithBatchId, maxPayloadBytes);
20
+ }
21
+ else {
22
+ itemsToProcess = [changeWithBatchId];
25
23
  }
26
- if (currentSize + changeSize > maxSize) {
27
- batches.push(currentBatch);
28
- currentBatch = [];
29
- currentSize = 2;
24
+ for (const item of itemsToProcess) {
25
+ const itemActualSize = getJSONByteSize(item);
26
+ const itemSizeForBatching = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
27
+ if (currentBatch.length > 0 && currentSize + itemSizeForBatching > maxPayloadBytes) {
28
+ batches.push(currentBatch);
29
+ currentBatch = [];
30
+ currentSize = 2;
31
+ }
32
+ const actualItemContribution = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
33
+ currentBatch.push(item);
34
+ currentSize += actualItemContribution;
30
35
  }
31
- currentBatch.push(changeWithBatchId);
32
- currentSize += changeSize;
33
36
  }
34
37
  if (currentBatch.length > 0) {
35
38
  batches.push(currentBatch);
@@ -0,0 +1,10 @@
1
+ import type { Change } from '../types.js';
2
+ /**
3
+ * Break a single Change into multiple Changes so that
4
+ * JSON.stringify(change).length never exceeds `maxBytes`.
5
+ *
6
+ * - Splits first by JSON-Patch *ops*
7
+ * - If an individual op is still too big and is a "@txt" op,
8
+ * split its Delta payload into smaller Deltas
9
+ */
10
+ export declare function breakChange(orig: Change, maxBytes: number): Change[];
@@ -0,0 +1,303 @@
1
+ import { Op } from '@dabble/delta';
2
+ import { createId } from 'crypto-id';
3
+ import { getJSONByteSize } from './getJSONByteSize.js'; // Import from new location
4
+ /**
5
+ * Break a single Change into multiple Changes so that
6
+ * JSON.stringify(change).length never exceeds `maxBytes`.
7
+ *
8
+ * - Splits first by JSON-Patch *ops*
9
+ * - If an individual op is still too big and is a "@txt" op,
10
+ * split its Delta payload into smaller Deltas
11
+ */
12
+ export function breakChange(orig, maxBytes) {
13
+ if (getJSONByteSize(orig) <= maxBytes)
14
+ return [orig];
15
+ // First pass: split by ops
16
+ const byOps = [];
17
+ let group = [];
18
+ let rev = orig.rev;
19
+ const flush = () => {
20
+ if (!group.length)
21
+ return;
22
+ byOps.push({
23
+ ...orig,
24
+ id: createId(),
25
+ rev: rev++,
26
+ ops: group,
27
+ created: Date.now(),
28
+ });
29
+ group = [];
30
+ };
31
+ for (const op of orig.ops) {
32
+ const tentative = group.concat(op);
33
+ if (getJSONByteSize({ ...orig, ops: tentative }) > maxBytes)
34
+ flush();
35
+ // Handle the case where a single op is too large
36
+ if (group.length === 0 && getJSONByteSize({ ...orig, ops: [op] }) > maxBytes) {
37
+ // We have a single op that's too big - can only be @txt op with large delta
38
+ if (op.op === '@txt' && op.value) {
39
+ const pieces = breakTextOp(orig, op, maxBytes, rev);
40
+ byOps.push(...pieces);
41
+ // Only update rev if we got results from breakTextOp
42
+ if (pieces.length > 0) {
43
+ rev = pieces[pieces.length - 1].rev + 1; // Update rev for next changes
44
+ }
45
+ continue;
46
+ }
47
+ else if (op.op === 'replace' || op.op === 'add') {
48
+ // For replace/add operations with large value payloads, try to split the value if it's a string or array
49
+ const pieces = breakLargeValueOp(orig, op, maxBytes, rev);
50
+ byOps.push(...pieces);
51
+ if (pieces.length > 0) {
52
+ rev = pieces[pieces.length - 1].rev + 1;
53
+ }
54
+ continue;
55
+ }
56
+ else {
57
+ // Non-splittable op that's too large, include it anyway with a warning
58
+ console.warn(`Warning: Single operation of type ${op.op} exceeds maxBytes. Including it anyway.`);
59
+ group.push(op);
60
+ continue;
61
+ }
62
+ }
63
+ group.push(op);
64
+ }
65
+ flush();
66
+ return byOps;
67
+ }
68
+ /**
69
+ * Break a large @txt operation into multiple smaller operations
70
+ */
71
+ function breakTextOp(origChange, textOp, maxBytes, startRev) {
72
+ const results = [];
73
+ let rev = startRev;
74
+ // Calculate the budget for the delta content itself
75
+ const baseSize = getJSONByteSize({ ...origChange, ops: [{ ...textOp, value: '' }] });
76
+ const budget = maxBytes - baseSize;
77
+ // Ensure maxLength for splitLargeInsert is at least 1, apply a smaller buffer
78
+ const buffer = 20; // Reduced buffer
79
+ const maxLength = Math.max(1, budget - buffer);
80
+ // Ensure deltaOps is always an array, handle both Delta objects and raw arrays
81
+ let deltaOps = [];
82
+ if (textOp.value) {
83
+ if (Array.isArray(textOp.value)) {
84
+ // Direct array of ops
85
+ deltaOps = textOp.value;
86
+ }
87
+ else if (textOp.value.ops && Array.isArray(textOp.value.ops)) {
88
+ // Delta object with ops property
89
+ deltaOps = textOp.value.ops;
90
+ }
91
+ else if (typeof textOp.value === 'object') {
92
+ // Convert object to array with single op
93
+ deltaOps = [textOp.value];
94
+ }
95
+ }
96
+ let currentOps = [];
97
+ let retain = 0;
98
+ // Helper to create a Change with current accumulated delta ops
99
+ const flushDelta = () => {
100
+ if (!currentOps.length)
101
+ return;
102
+ const newOp = {
103
+ ...textOp,
104
+ value: currentOps,
105
+ };
106
+ results.push({
107
+ ...origChange,
108
+ id: createId(),
109
+ rev: rev++,
110
+ ops: [newOp],
111
+ created: Date.now(),
112
+ });
113
+ currentOps = [];
114
+ };
115
+ for (const op of deltaOps) {
116
+ // Check if adding this op would exceed the size limit
117
+ const tentativeOps = [...currentOps, op];
118
+ const tentativeChange = {
119
+ ...origChange,
120
+ ops: [{ ...textOp, value: tentativeOps }],
121
+ };
122
+ // Add an initial retain op if we're starting a new group of ops and there were prior ops
123
+ if (currentOps.length === 0 && retain) {
124
+ currentOps.push({ retain });
125
+ }
126
+ if (getJSONByteSize(tentativeChange) > maxBytes) {
127
+ flushDelta();
128
+ // Handle the case where a single delta op is too large (e.g., very large text insert)
129
+ if (currentOps.length === 0 && getJSONByteSize({ ...origChange, ops: [op] }) > maxBytes) {
130
+ // Split large insert into chunks
131
+ const retainBeforeChunks = retain; // Capture retain position BEFORE these chunks
132
+ const [newRetain, chunks] = splitLargeInsert(op, retain, maxLength);
133
+ retain = newRetain; // Update overall retain state for ops *after* these chunks
134
+ for (let i = 0; i < chunks.length; i++) {
135
+ const chunk = chunks[i];
136
+ // Only add retain before the *first* chunk from splitLargeInsert
137
+ if (i === 0 && retainBeforeChunks > 0) {
138
+ currentOps = [{ retain: retainBeforeChunks }, chunk];
139
+ }
140
+ else {
141
+ currentOps = [chunk];
142
+ }
143
+ flushDelta(); // Flushes the chunk (potentially with retain on first)
144
+ }
145
+ continue;
146
+ }
147
+ }
148
+ currentOps.push(op);
149
+ if (!op.delete) {
150
+ retain += Op.length(op);
151
+ }
152
+ }
153
+ // Flush any remaining ops
154
+ flushDelta();
155
+ return results;
156
+ }
157
+ /**
158
+ * Split a large insert operation into multiple smaller ones
159
+ */
160
+ function splitLargeInsert(insertOp, retain, maxChunkSize) {
161
+ const results = [];
162
+ if (!insertOp.insert || typeof insertOp.insert !== 'string') {
163
+ throw new Error(`Single @txt operation exceeds maxBytes. Cannot split further.`);
164
+ }
165
+ const text = insertOp.insert;
166
+ // const attrs = insertOp.attributes || {}; // attrs not used currently
167
+ // Ensure maxChunkSize is positive
168
+ if (maxChunkSize <= 0) {
169
+ throw new Error(`Calculated maxChunkSize is <= 0, cannot split insert.`);
170
+ }
171
+ // Ensure chunkSize is at least 1 to prevent infinite loops
172
+ const targetChunkSize = Math.max(1, maxChunkSize);
173
+ const numChunks = Math.ceil(text.length / targetChunkSize);
174
+ const chunkSize = Math.ceil(text.length / numChunks);
175
+ for (let i = 0; i < text.length; i += chunkSize) {
176
+ const chunkText = text.slice(i, i + chunkSize);
177
+ const op = { ...insertOp, insert: chunkText }; // Keep original attrs
178
+ // For the first chunk, no retain is needed
179
+ // Retain calculation seems complex, let breakTextOp handle retains between chunks
180
+ // if (i !== 0) {
181
+ // results.push({ retain });
182
+ // }
183
+ results.push(op);
184
+ // Retain is now managed by the caller (breakTextOp)
185
+ // retain += Op.length(op);
186
+ }
187
+ // Return just the ops, retain calculation happens in breakTextOp
188
+ return [retain, results]; // This return signature might need review based on usage
189
+ }
190
+ /**
191
+ * Attempt to break a large value in a replace/add operation
192
+ */
193
+ function breakLargeValueOp(origChange, op, maxBytes, startRev) {
194
+ const results = [];
195
+ let rev = startRev;
196
+ // Calculate base size without the value to estimate budget for value chunks
197
+ const baseOpSize = getJSONByteSize({ ...op, value: '' });
198
+ const baseChangeSize = getJSONByteSize({ ...origChange, ops: [{ ...op, value: '' }] }) - baseOpSize;
199
+ const valueBudget = maxBytes - baseChangeSize - 50; // 50 bytes buffer for overhead
200
+ // Special case: if value is a string, we can split it into chunks
201
+ if (typeof op.value === 'string' && op.value.length > 100) {
202
+ // Only split reasonably large strings
203
+ const text = op.value;
204
+ // Ensure chunkSize is at least 1
205
+ const targetChunkSize = Math.max(1, valueBudget);
206
+ const numChunks = Math.ceil(text.length / targetChunkSize);
207
+ const chunkSize = Math.ceil(text.length / numChunks);
208
+ let currentPath = op.path;
209
+ for (let i = 0; i < text.length; i += chunkSize) {
210
+ const chunk = text.slice(i, i + chunkSize);
211
+ const newOp = { op: 'add' }; // Default to add?
212
+ if (i === 0) {
213
+ // First chunk: use original op type (add/replace) and path
214
+ newOp.op = op.op;
215
+ newOp.path = op.path;
216
+ newOp.value = chunk;
217
+ }
218
+ else {
219
+ // Subsequent chunks: use 'add' to append to the string (assuming target is container)
220
+ // This assumes the path points to an array or object where subsequent adds make sense.
221
+ // A more robust solution might need context or use a specific 'patch' op.
222
+ // If path was `/foo/bar`, appending needs `/foo/bar/-` or similar if array?
223
+ // For now, let's assume path allows adding / maybe this needs a custom 'append' op?
224
+ // Reverting to a placeholder 'patch' op type needing server interpretation.
225
+ newOp.op = 'patch';
226
+ newOp.path = op.path; // Operate on the original path
227
+ newOp.appendString = chunk;
228
+ }
229
+ results.push({
230
+ ...origChange,
231
+ id: createId(),
232
+ rev: rev++,
233
+ ops: [newOp],
234
+ created: Date.now(),
235
+ });
236
+ }
237
+ return results;
238
+ }
239
+ else if (Array.isArray(op.value) && op.value.length > 1) {
240
+ // Special case: if value is an array, we can split it into smaller arrays
241
+ // This requires careful size checking per chunk
242
+ const originalArray = op.value;
243
+ let currentChunk = [];
244
+ let chunkStartIndex = 0;
245
+ for (let i = 0; i < originalArray.length; i++) {
246
+ const item = originalArray[i];
247
+ const tentativeChunk = [...currentChunk, item];
248
+ const tentativeOp = { ...op, value: tentativeChunk };
249
+ const tentativeChangeSize = getJSONByteSize({ ...origChange, ops: [tentativeOp] });
250
+ if (currentChunk.length > 0 && tentativeChangeSize > maxBytes) {
251
+ // Flush current chunk
252
+ const chunkOp = {};
253
+ if (chunkStartIndex === 0) {
254
+ chunkOp.op = op.op;
255
+ chunkOp.path = op.path;
256
+ chunkOp.value = currentChunk;
257
+ }
258
+ else {
259
+ // Append subsequent chunks - needs server support for 'appendArray'
260
+ chunkOp.op = 'patch';
261
+ chunkOp.path = op.path;
262
+ chunkOp.appendArray = currentChunk;
263
+ }
264
+ results.push({
265
+ ...origChange,
266
+ id: createId(),
267
+ rev: rev++,
268
+ ops: [chunkOp],
269
+ created: Date.now(),
270
+ });
271
+ currentChunk = [item]; // Start new chunk with current item
272
+ chunkStartIndex = i;
273
+ }
274
+ else {
275
+ currentChunk.push(item);
276
+ }
277
+ }
278
+ // Flush the last chunk
279
+ if (currentChunk.length > 0) {
280
+ const chunkOp = {};
281
+ if (chunkStartIndex === 0) {
282
+ chunkOp.op = op.op;
283
+ chunkOp.path = op.path;
284
+ chunkOp.value = currentChunk;
285
+ }
286
+ else {
287
+ chunkOp.op = 'patch';
288
+ chunkOp.path = op.path;
289
+ chunkOp.appendArray = currentChunk;
290
+ }
291
+ results.push({
292
+ ...origChange,
293
+ id: createId(),
294
+ rev: rev++,
295
+ ops: [chunkOp],
296
+ created: Date.now(),
297
+ });
298
+ }
299
+ return results;
300
+ }
301
+ // If we can't split it, throw an error
302
+ throw new Error(`Single operation of type ${op.op} (path: ${op.path}) exceeds maxBytes and can't be split further.`);
303
+ }
@@ -0,0 +1,2 @@
1
+ /** Estimate JSON string byte size. */
2
+ export declare function getJSONByteSize(data: any): number;
@@ -0,0 +1,13 @@
1
+ import { TextEncoder } from 'util'; // Node.js TextEncoder
2
+ /** Estimate JSON string byte size. */
3
+ export function getJSONByteSize(data) {
4
+ // Basic estimation, might not be perfectly accurate due to encoding nuances
5
+ try {
6
+ return new TextEncoder().encode(JSON.stringify(data)).length;
7
+ }
8
+ catch (e) {
9
+ // Handle circular structures or other stringify errors
10
+ console.error('Error calculating JSON size:', e);
11
+ return Infinity; // Treat errors as infinitely large
12
+ }
13
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dabble/patches",
3
- "version": "0.2.11",
3
+ "version": "0.2.12",
4
4
  "description": "Immutable JSON Patch implementation based on RFC 6902 supporting operational transformation and last-writer-wins",
5
5
  "author": "Jacob Wright <jacwright@gmail.com>",
6
6
  "bugs": {