@dabble/patches 0.2.11 → 0.2.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/Patches.d.ts +9 -1
- package/dist/client/Patches.js +9 -1
- package/dist/client/PatchesDoc.d.ts +13 -1
- package/dist/client/PatchesDoc.js +17 -3
- package/dist/client/PatchesHistoryClient.js +0 -19
- package/dist/index.d.ts +2 -2
- package/dist/index.js +1 -1
- package/dist/json-patch/JSONPatch.js +1 -1
- package/dist/json-patch/createJSONPatch.d.ts +1 -1
- package/dist/json-patch/createJSONPatch.js +2 -2
- package/dist/json-patch/index.d.ts +0 -1
- package/dist/json-patch/index.js +0 -1
- package/dist/json-patch/ops/index.d.ts +0 -1
- package/dist/json-patch/ops/index.js +1 -1
- package/dist/json-patch/ops/text.js +1 -1
- package/dist/json-patch/patchProxy.d.ts +1 -1
- package/dist/json-patch/patchProxy.js +1 -0
- package/dist/json-patch/utils/get.js +1 -1
- package/dist/json-patch/utils/getType.d.ts +1 -1
- package/dist/net/PatchesSync.d.ts +6 -1
- package/dist/net/PatchesSync.js +6 -1
- package/dist/net/websocket/onlineState.d.ts +1 -1
- package/dist/server/PatchesServer.js +1 -1
- package/dist/types.d.ts +6 -4
- package/dist/utils/batching.d.ts +2 -4
- package/dist/utils/batching.js +23 -20
- package/dist/utils/breakChange.d.ts +10 -0
- package/dist/utils/breakChange.js +302 -0
- package/dist/utils/getJSONByteSize.d.ts +2 -0
- package/dist/utils/getJSONByteSize.js +13 -0
- package/dist/utils.d.ts +1 -1
- package/package.json +4 -4
package/dist/client/Patches.d.ts
CHANGED
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
import { type Unsubscriber } from '../event-signal.js';
|
|
2
2
|
import type { PatchesStore } from '../persist/PatchesStore.js';
|
|
3
3
|
import type { Change } from '../types.js';
|
|
4
|
-
import { PatchesDoc } from './PatchesDoc.js';
|
|
4
|
+
import { PatchesDoc, type PatchesDocOptions } from './PatchesDoc.js';
|
|
5
5
|
export interface PatchesOptions {
|
|
6
6
|
/** Persistence layer instance (e.g., new IndexedDBStore('my-db') or new InMemoryStore()). */
|
|
7
7
|
store: PatchesStore;
|
|
8
8
|
/** Initial metadata to attach to changes from this client (merged with per-doc metadata). */
|
|
9
9
|
metadata?: Record<string, any>;
|
|
10
|
+
/** Document-level options to pass to each PatchesDoc instance */
|
|
11
|
+
docOptions?: PatchesDocOptions;
|
|
10
12
|
}
|
|
11
13
|
interface ManagedDoc<T extends object> {
|
|
12
14
|
doc: PatchesDoc<T>;
|
|
@@ -20,6 +22,7 @@ interface ManagedDoc<T extends object> {
|
|
|
20
22
|
export declare class Patches {
|
|
21
23
|
protected options: PatchesOptions;
|
|
22
24
|
protected docs: Map<string, ManagedDoc<any>>;
|
|
25
|
+
readonly docOptions: PatchesDocOptions;
|
|
23
26
|
readonly store: PatchesStore;
|
|
24
27
|
readonly trackedDocs: Set<string>;
|
|
25
28
|
readonly onError: import("../event-signal.js").Signal<(error: Error, context?: {
|
|
@@ -99,6 +102,11 @@ export declare class Patches {
|
|
|
99
102
|
* Should be called when shutting down the client.
|
|
100
103
|
*/
|
|
101
104
|
close(): void;
|
|
105
|
+
/**
|
|
106
|
+
* Updates document options that will be applied to all new documents
|
|
107
|
+
* @param options - Options to merge with current docOptions
|
|
108
|
+
*/
|
|
109
|
+
updateDocOptions(options: Partial<PatchesDocOptions>): void;
|
|
102
110
|
/**
|
|
103
111
|
* Sets up a listener for local changes on a PatchesDoc, saving pending changes to the store.
|
|
104
112
|
* @param docId - The document ID being managed.
|
package/dist/client/Patches.js
CHANGED
|
@@ -36,6 +36,7 @@ export class Patches {
|
|
|
36
36
|
};
|
|
37
37
|
this.options = opts;
|
|
38
38
|
this.store = opts.store;
|
|
39
|
+
this.docOptions = opts.docOptions ?? {};
|
|
39
40
|
this.store.listDocs().then(docs => {
|
|
40
41
|
this.trackDocs(docs.map(({ docId }) => docId));
|
|
41
42
|
});
|
|
@@ -90,7 +91,7 @@ export class Patches {
|
|
|
90
91
|
const snapshot = await this.store.getDoc(docId);
|
|
91
92
|
const initialState = (snapshot?.state ?? {});
|
|
92
93
|
const mergedMetadata = { ...this.options.metadata, ...opts.metadata };
|
|
93
|
-
const doc = new PatchesDoc(initialState, mergedMetadata);
|
|
94
|
+
const doc = new PatchesDoc(initialState, mergedMetadata, this.docOptions);
|
|
94
95
|
doc.setId(docId);
|
|
95
96
|
if (snapshot) {
|
|
96
97
|
doc.import(snapshot);
|
|
@@ -191,6 +192,13 @@ export class Patches {
|
|
|
191
192
|
// Close store connection
|
|
192
193
|
void this.store.close();
|
|
193
194
|
}
|
|
195
|
+
/**
|
|
196
|
+
* Updates document options that will be applied to all new documents
|
|
197
|
+
* @param options - Options to merge with current docOptions
|
|
198
|
+
*/
|
|
199
|
+
updateDocOptions(options) {
|
|
200
|
+
Object.assign(this.docOptions, options);
|
|
201
|
+
}
|
|
194
202
|
// --- Internal Handlers ---
|
|
195
203
|
/**
|
|
196
204
|
* Sets up a listener for local changes on a PatchesDoc, saving pending changes to the store.
|
|
@@ -1,6 +1,16 @@
|
|
|
1
1
|
import { type Unsubscriber } from '../event-signal.js';
|
|
2
2
|
import type { JSONPatch } from '../json-patch/JSONPatch.js';
|
|
3
3
|
import type { Change, PatchesSnapshot } from '../types.js';
|
|
4
|
+
/**
|
|
5
|
+
* Options for creating a PatchesDoc instance
|
|
6
|
+
*/
|
|
7
|
+
export interface PatchesDocOptions {
|
|
8
|
+
/**
|
|
9
|
+
* Maximum size in bytes for a single payload (network message).
|
|
10
|
+
* Changes exceeding this will be split into multiple smaller changes.
|
|
11
|
+
*/
|
|
12
|
+
maxPayloadBytes?: number;
|
|
13
|
+
}
|
|
4
14
|
/**
|
|
5
15
|
* Represents a document synchronized using JSON patches.
|
|
6
16
|
* Manages committed state, pending (local-only) changes, and
|
|
@@ -14,6 +24,7 @@ export declare class PatchesDoc<T extends object = object> {
|
|
|
14
24
|
protected _pendingChanges: Change[];
|
|
15
25
|
protected _sendingChanges: Change[];
|
|
16
26
|
protected _changeMetadata: Record<string, any>;
|
|
27
|
+
protected readonly _maxPayloadBytes?: number;
|
|
17
28
|
/** Subscribe to be notified before local state changes. */
|
|
18
29
|
readonly onBeforeChange: import("../event-signal.js").Signal<(change: Change) => void>;
|
|
19
30
|
/** Subscribe to be notified after local state changes are applied. */
|
|
@@ -24,8 +35,9 @@ export declare class PatchesDoc<T extends object = object> {
|
|
|
24
35
|
* Creates an instance of PatchesDoc.
|
|
25
36
|
* @param initialState Optional initial state.
|
|
26
37
|
* @param initialMetadata Optional metadata to add to generated changes.
|
|
38
|
+
* @param options Additional options for the document.
|
|
27
39
|
*/
|
|
28
|
-
constructor(initialState?: T, initialMetadata?: Record<string, any
|
|
40
|
+
constructor(initialState?: T, initialMetadata?: Record<string, any>, options?: PatchesDocOptions);
|
|
29
41
|
/** The unique identifier for this document, once assigned. */
|
|
30
42
|
get id(): string | null;
|
|
31
43
|
/** Current local state (committed + sending + pending). */
|
|
@@ -2,6 +2,7 @@ import { createId } from 'crypto-id';
|
|
|
2
2
|
import { signal } from '../event-signal.js';
|
|
3
3
|
import { createJSONPatch } from '../json-patch/createJSONPatch.js';
|
|
4
4
|
import { applyChanges, rebaseChanges } from '../utils.js';
|
|
5
|
+
import { breakChange } from '../utils/breakChange.js';
|
|
5
6
|
/**
|
|
6
7
|
* Represents a document synchronized using JSON patches.
|
|
7
8
|
* Manages committed state, pending (local-only) changes, and
|
|
@@ -12,8 +13,9 @@ export class PatchesDoc {
|
|
|
12
13
|
* Creates an instance of PatchesDoc.
|
|
13
14
|
* @param initialState Optional initial state.
|
|
14
15
|
* @param initialMetadata Optional metadata to add to generated changes.
|
|
16
|
+
* @param options Additional options for the document.
|
|
15
17
|
*/
|
|
16
|
-
constructor(initialState = {}, initialMetadata = {}) {
|
|
18
|
+
constructor(initialState = {}, initialMetadata = {}, options = {}) {
|
|
17
19
|
this._id = null;
|
|
18
20
|
this._pendingChanges = [];
|
|
19
21
|
this._sendingChanges = [];
|
|
@@ -28,6 +30,7 @@ export class PatchesDoc {
|
|
|
28
30
|
this._state = structuredClone(initialState);
|
|
29
31
|
this._committedRev = 0;
|
|
30
32
|
this._changeMetadata = initialMetadata;
|
|
33
|
+
this._maxPayloadBytes = options.maxPayloadBytes;
|
|
31
34
|
}
|
|
32
35
|
/** The unique identifier for this document, once assigned. */
|
|
33
36
|
get id() {
|
|
@@ -113,8 +116,19 @@ export class PatchesDoc {
|
|
|
113
116
|
this.onBeforeChange.emit(change);
|
|
114
117
|
// Apply to local state immediately
|
|
115
118
|
this._state = patch.apply(this._state);
|
|
116
|
-
this.
|
|
117
|
-
|
|
119
|
+
if (this._maxPayloadBytes) {
|
|
120
|
+
// Check if the change needs to be split due to size
|
|
121
|
+
const changes = breakChange(change, this._maxPayloadBytes);
|
|
122
|
+
// Emit events for each change piece
|
|
123
|
+
for (const piece of changes) {
|
|
124
|
+
this._pendingChanges.push(piece);
|
|
125
|
+
this.onChange.emit(piece);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
this._pendingChanges.push(change);
|
|
130
|
+
this.onChange.emit(change);
|
|
131
|
+
}
|
|
118
132
|
this.onUpdate.emit(this._state);
|
|
119
133
|
return change;
|
|
120
134
|
}
|
|
@@ -31,25 +31,6 @@ class LRUCache {
|
|
|
31
31
|
this.cache.clear();
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
|
-
/**
|
|
35
|
-
* Simple event signal for update notifications (like in PatchesDoc)
|
|
36
|
-
*/
|
|
37
|
-
class Signal {
|
|
38
|
-
constructor() {
|
|
39
|
-
this.listeners = new Set();
|
|
40
|
-
}
|
|
41
|
-
subscribe(cb) {
|
|
42
|
-
this.listeners.add(cb);
|
|
43
|
-
return () => this.listeners.delete(cb);
|
|
44
|
-
}
|
|
45
|
-
emit() {
|
|
46
|
-
for (const cb of this.listeners)
|
|
47
|
-
cb();
|
|
48
|
-
}
|
|
49
|
-
clear() {
|
|
50
|
-
this.listeners.clear();
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
34
|
/**
|
|
54
35
|
* Client-side history/scrubbing interface for a document.
|
|
55
36
|
* Read-only: allows listing versions, loading states/changes, and scrubbing.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
export { Delta } from '@dabble/delta';
|
|
2
2
|
export * from './client/Patches.js';
|
|
3
3
|
export * from './client/PatchesDoc.js';
|
|
4
|
-
export * from './event-signal';
|
|
4
|
+
export * from './event-signal.js';
|
|
5
5
|
export * from './json-patch/JSONPatch.js';
|
|
6
6
|
export type { ApplyJSONPatchOptions } from './json-patch/types.js';
|
|
7
7
|
export type * from './persist/PatchesStore.js';
|
|
8
|
-
export type * from './types';
|
|
8
|
+
export type * from './types.js';
|
package/dist/index.js
CHANGED
|
@@ -139,7 +139,7 @@ export class JSONPatch {
|
|
|
139
139
|
addObjectsInPath(obj, path) {
|
|
140
140
|
path = checkPath(path);
|
|
141
141
|
const parts = path.split('/');
|
|
142
|
-
for (
|
|
142
|
+
for (let i = 1; i < parts.length - 1; i++) {
|
|
143
143
|
const prop = parts[i];
|
|
144
144
|
if (!obj || !obj[prop]) {
|
|
145
145
|
this.add(parts.slice(0, i + 1).join('/'), {});
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { JSONPatch } from './JSONPatch';
|
|
2
|
-
import { createPatchProxy } from './patchProxy';
|
|
1
|
+
import { JSONPatch } from './JSONPatch.js';
|
|
2
|
+
import { createPatchProxy } from './patchProxy.js';
|
|
3
3
|
/**
|
|
4
4
|
* Creates a `JSONPatch` instance by tracking changes made to a proxy object within an updater function.
|
|
5
5
|
*
|
|
@@ -5,5 +5,4 @@ export { applyBitmask, bitmask, combineBitmasks } from './ops/bitmask.js';
|
|
|
5
5
|
export * as defaultOps from './ops/index.js';
|
|
6
6
|
export { transformPatch } from './transformPatch.js';
|
|
7
7
|
export * from './ops/index.js';
|
|
8
|
-
export * from './ops/index.js';
|
|
9
8
|
export type { ApplyJSONPatchOptions, JSONPatchOpHandlerMap as JSONPatchCustomTypes, JSONPatchOp } from './types.js';
|
package/dist/json-patch/index.js
CHANGED
|
@@ -4,5 +4,4 @@ export { invertPatch } from './invertPatch.js';
|
|
|
4
4
|
export { applyBitmask, bitmask, combineBitmasks } from './ops/bitmask.js';
|
|
5
5
|
export * as defaultOps from './ops/index.js';
|
|
6
6
|
export { transformPatch } from './transformPatch.js';
|
|
7
|
-
export * from './ops/index.js'; // Exports all ops: add, remove, etc.
|
|
8
7
|
export * from './ops/index.js';
|
|
@@ -7,7 +7,6 @@ import { move } from './move.js';
|
|
|
7
7
|
import { remove } from './remove.js';
|
|
8
8
|
import { replace } from './replace.js';
|
|
9
9
|
import { test } from './test.js';
|
|
10
|
-
export * from './bitmask.js';
|
|
11
10
|
export { add, bit, copy, increment, move, remove, replace, test };
|
|
12
11
|
export declare function getTypes(custom?: JSONPatchOpHandlerMap): {
|
|
13
12
|
test: import("../types.js").JSONPatchOpHandler;
|
|
@@ -7,7 +7,7 @@ import { remove } from './remove.js';
|
|
|
7
7
|
import { replace } from './replace.js';
|
|
8
8
|
import { test } from './test.js';
|
|
9
9
|
import { text } from './text.js';
|
|
10
|
-
|
|
10
|
+
// Export all patch operations
|
|
11
11
|
export { add, bit, copy, increment, move, remove, replace, test };
|
|
12
12
|
export function getTypes(custom) {
|
|
13
13
|
return {
|
|
@@ -10,7 +10,7 @@ export const text = {
|
|
|
10
10
|
if (!delta || !Array.isArray(delta.ops)) {
|
|
11
11
|
return 'Invalid delta';
|
|
12
12
|
}
|
|
13
|
-
|
|
13
|
+
const existingData = get(state, path);
|
|
14
14
|
let doc;
|
|
15
15
|
if (Array.isArray(existingData)) {
|
|
16
16
|
if (existingData.length && existingData[0].insert) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { getOpData } from './getOpData.js';
|
|
2
2
|
export function get(state, path) {
|
|
3
3
|
// eslint-disable-next-line no-unused-vars
|
|
4
|
-
const [
|
|
4
|
+
const [, lastKey, target] = getOpData(state, path);
|
|
5
5
|
return target ? target[lastKey] : undefined;
|
|
6
6
|
}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
import type { JSONPatchOp, State } from '../types.js';
|
|
2
2
|
export declare function getType(state: State, patch: JSONPatchOp): import("../types.js").JSONPatchOpHandler;
|
|
3
|
-
export declare function getTypeLike(state: State, patch: JSONPatchOp): "replace" | "add" | "remove" | "
|
|
3
|
+
export declare function getTypeLike(state: State, patch: JSONPatchOp): "replace" | "add" | "remove" | "copy" | "move" | "test";
|
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
import { Patches } from '../client/Patches.js';
|
|
2
2
|
import type { WebSocketOptions } from './websocket/WebSocketTransport.js';
|
|
3
3
|
export interface PatchesSyncOptions {
|
|
4
|
+
/** WebSocket connection options */
|
|
4
5
|
wsOptions?: WebSocketOptions;
|
|
5
|
-
|
|
6
|
+
/**
|
|
7
|
+
* Maximum size in bytes for a single payload (network message).
|
|
8
|
+
* Changes exceeding this will be automatically split.
|
|
9
|
+
*/
|
|
10
|
+
maxPayloadBytes?: number;
|
|
6
11
|
}
|
|
7
12
|
export interface PatchesSyncState {
|
|
8
13
|
online: boolean;
|
package/dist/net/PatchesSync.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { Patches } from '../client/Patches.js';
|
|
1
2
|
import { signal } from '../event-signal.js';
|
|
2
3
|
import { breakIntoBatches } from '../utils/batching.js';
|
|
3
4
|
import { PatchesWebSocket } from './websocket/PatchesWebSocket.js';
|
|
@@ -76,6 +77,10 @@ export class PatchesSync {
|
|
|
76
77
|
this.ws = new PatchesWebSocket(url, options.wsOptions);
|
|
77
78
|
this._state.online = onlineState.isOnline;
|
|
78
79
|
this.trackedDocs = new Set(patches.trackedDocs);
|
|
80
|
+
// Set maxPayloadBytes on Patches docOptions if provided
|
|
81
|
+
if (options.maxPayloadBytes) {
|
|
82
|
+
patches.updateDocOptions({ maxPayloadBytes: options.maxPayloadBytes });
|
|
83
|
+
}
|
|
79
84
|
// --- Event Listeners ---
|
|
80
85
|
onlineState.onOnlineChange(this._handleOnlineChange);
|
|
81
86
|
this.ws.onStateChange(this._handleConnectionChange);
|
|
@@ -233,7 +238,7 @@ export class PatchesSync {
|
|
|
233
238
|
return; // Nothing to flush
|
|
234
239
|
}
|
|
235
240
|
}
|
|
236
|
-
const batches = breakIntoBatches(pending, this.options.
|
|
241
|
+
const batches = breakIntoBatches(pending, this.options.maxPayloadBytes);
|
|
237
242
|
for (const batch of batches) {
|
|
238
243
|
if (!this.state.connected) {
|
|
239
244
|
throw new Error('Disconnected during flush');
|
|
@@ -94,7 +94,7 @@ export class PatchesServer {
|
|
|
94
94
|
await this._createVersion(docId, currentState, currentChanges);
|
|
95
95
|
}
|
|
96
96
|
// 3. Load committed changes *after* the client's baseRev for transformation and idempotency checks
|
|
97
|
-
|
|
97
|
+
const committedChanges = await this.store.listChanges(docId, {
|
|
98
98
|
startAfter: baseRev,
|
|
99
99
|
withoutBatchId: batchId,
|
|
100
100
|
});
|
package/dist/types.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { JSONPatchOp } from './json-patch/types';
|
|
1
|
+
import type { JSONPatchOp } from './json-patch/types.js';
|
|
2
2
|
export interface Change {
|
|
3
3
|
/** Unique identifier for the change, generated client-side. */
|
|
4
4
|
id: string;
|
|
@@ -10,10 +10,10 @@ export interface Change {
|
|
|
10
10
|
baseRev?: number;
|
|
11
11
|
/** Client-side timestamp when the change was created. */
|
|
12
12
|
created: number;
|
|
13
|
-
/** Optional arbitrary metadata associated with the change. */
|
|
14
|
-
metadata?: Record<string, any>;
|
|
15
13
|
/** Optional batch identifier for grouping changes that belong to the same client batch (for multi-batch offline/large edits). */
|
|
16
14
|
batchId?: string;
|
|
15
|
+
/** Optional arbitrary metadata associated with the change. */
|
|
16
|
+
[metadata: string]: any;
|
|
17
17
|
}
|
|
18
18
|
/**
|
|
19
19
|
* Represents the state of a document in the OT protocol.
|
|
@@ -49,7 +49,7 @@ export interface Branch {
|
|
|
49
49
|
/** Current status of the branch. */
|
|
50
50
|
status: BranchStatus;
|
|
51
51
|
/** Optional arbitrary metadata associated with the branch record. */
|
|
52
|
-
metadata
|
|
52
|
+
[metadata: string]: any;
|
|
53
53
|
}
|
|
54
54
|
/**
|
|
55
55
|
* Metadata, state snapshot, and included changes for a specific version.
|
|
@@ -74,6 +74,8 @@ export interface VersionMetadata {
|
|
|
74
74
|
rev: number;
|
|
75
75
|
/** The revision number on the main timeline before the changes that created this version. If this is an offline/branch version, this is the revision number of the source document where the branch was created and not . */
|
|
76
76
|
baseRev: number;
|
|
77
|
+
/** Optional arbitrary metadata associated with the version. */
|
|
78
|
+
[metadata: string]: any;
|
|
77
79
|
}
|
|
78
80
|
/**
|
|
79
81
|
* Options for listing committed server changes. *Always* ordered by revision number.
|
package/dist/utils/batching.d.ts
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
1
|
import type { Change } from '../types.js';
|
|
2
|
-
/**
|
|
3
|
-
export declare function
|
|
4
|
-
/** Break changes into batches based on maxBatchSize. */
|
|
5
|
-
export declare function breakIntoBatches(changes: Change[], maxSize?: number): Change[][];
|
|
2
|
+
/** Break changes into batches based on maxPayloadBytes. */
|
|
3
|
+
export declare function breakIntoBatches(changes: Change[], maxPayloadBytes?: number): Change[][];
|
package/dist/utils/batching.js
CHANGED
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
import { createId } from 'crypto-id';
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
/** Break changes into batches based on maxBatchSize. */
|
|
8
|
-
export function breakIntoBatches(changes, maxSize) {
|
|
9
|
-
if (!maxSize || getJSONByteSize(changes) < maxSize) {
|
|
2
|
+
import { breakChange } from './breakChange.js'; // Import from new file
|
|
3
|
+
import { getJSONByteSize } from './getJSONByteSize.js'; // Import from new file
|
|
4
|
+
/** Break changes into batches based on maxPayloadBytes. */
|
|
5
|
+
export function breakIntoBatches(changes, maxPayloadBytes) {
|
|
6
|
+
if (!maxPayloadBytes || getJSONByteSize(changes) < maxPayloadBytes) {
|
|
10
7
|
return [changes];
|
|
11
8
|
}
|
|
12
9
|
const batchId = createId(12);
|
|
@@ -16,20 +13,26 @@ export function breakIntoBatches(changes, maxSize) {
|
|
|
16
13
|
for (const change of changes) {
|
|
17
14
|
// Add batchId if breaking up
|
|
18
15
|
const changeWithBatchId = { ...change, batchId };
|
|
19
|
-
const
|
|
20
|
-
|
|
21
|
-
if (
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
16
|
+
const individualActualSize = getJSONByteSize(changeWithBatchId);
|
|
17
|
+
let itemsToProcess;
|
|
18
|
+
if (individualActualSize > maxPayloadBytes) {
|
|
19
|
+
itemsToProcess = breakChange(changeWithBatchId, maxPayloadBytes);
|
|
20
|
+
}
|
|
21
|
+
else {
|
|
22
|
+
itemsToProcess = [changeWithBatchId];
|
|
25
23
|
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
currentSize
|
|
24
|
+
for (const item of itemsToProcess) {
|
|
25
|
+
const itemActualSize = getJSONByteSize(item);
|
|
26
|
+
const itemSizeForBatching = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
27
|
+
if (currentBatch.length > 0 && currentSize + itemSizeForBatching > maxPayloadBytes) {
|
|
28
|
+
batches.push(currentBatch);
|
|
29
|
+
currentBatch = [];
|
|
30
|
+
currentSize = 2;
|
|
31
|
+
}
|
|
32
|
+
const actualItemContribution = itemActualSize + (currentBatch.length > 0 ? 1 : 0);
|
|
33
|
+
currentBatch.push(item);
|
|
34
|
+
currentSize += actualItemContribution;
|
|
30
35
|
}
|
|
31
|
-
currentBatch.push(changeWithBatchId);
|
|
32
|
-
currentSize += changeSize;
|
|
33
36
|
}
|
|
34
37
|
if (currentBatch.length > 0) {
|
|
35
38
|
batches.push(currentBatch);
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Change } from '../types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Break a single Change into multiple Changes so that
|
|
4
|
+
* JSON.stringify(change).length never exceeds `maxBytes`.
|
|
5
|
+
*
|
|
6
|
+
* - Splits first by JSON-Patch *ops*
|
|
7
|
+
* - If an individual op is still too big and is a "@txt" op,
|
|
8
|
+
* split its Delta payload into smaller Deltas
|
|
9
|
+
*/
|
|
10
|
+
export declare function breakChange(orig: Change, maxBytes: number): Change[];
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
import { Op } from '@dabble/delta';
|
|
2
|
+
import { createId } from 'crypto-id';
|
|
3
|
+
import { getJSONByteSize } from './getJSONByteSize.js'; // Import from new location
|
|
4
|
+
/**
|
|
5
|
+
* Break a single Change into multiple Changes so that
|
|
6
|
+
* JSON.stringify(change).length never exceeds `maxBytes`.
|
|
7
|
+
*
|
|
8
|
+
* - Splits first by JSON-Patch *ops*
|
|
9
|
+
* - If an individual op is still too big and is a "@txt" op,
|
|
10
|
+
* split its Delta payload into smaller Deltas
|
|
11
|
+
*/
|
|
12
|
+
export function breakChange(orig, maxBytes) {
|
|
13
|
+
if (getJSONByteSize(orig) <= maxBytes)
|
|
14
|
+
return [orig];
|
|
15
|
+
// First pass: split by ops
|
|
16
|
+
const byOps = [];
|
|
17
|
+
let group = [];
|
|
18
|
+
let rev = orig.rev;
|
|
19
|
+
const flush = () => {
|
|
20
|
+
if (!group.length)
|
|
21
|
+
return;
|
|
22
|
+
byOps.push({
|
|
23
|
+
...orig,
|
|
24
|
+
id: createId(),
|
|
25
|
+
rev: rev++,
|
|
26
|
+
ops: group,
|
|
27
|
+
created: Date.now(),
|
|
28
|
+
});
|
|
29
|
+
group = [];
|
|
30
|
+
};
|
|
31
|
+
for (const op of orig.ops) {
|
|
32
|
+
const tentative = group.concat(op);
|
|
33
|
+
if (getJSONByteSize({ ...orig, ops: tentative }) > maxBytes)
|
|
34
|
+
flush();
|
|
35
|
+
// Handle the case where a single op is too large
|
|
36
|
+
if (group.length === 0 && getJSONByteSize({ ...orig, ops: [op] }) > maxBytes) {
|
|
37
|
+
// We have a single op that's too big - can only be @txt op with large delta
|
|
38
|
+
if (op.op === '@txt' && op.value) {
|
|
39
|
+
const pieces = breakTextOp(orig, op, maxBytes, rev);
|
|
40
|
+
byOps.push(...pieces);
|
|
41
|
+
// Only update rev if we got results from breakTextOp
|
|
42
|
+
if (pieces.length > 0) {
|
|
43
|
+
rev = pieces[pieces.length - 1].rev + 1; // Update rev for next changes
|
|
44
|
+
}
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
else if (op.op === 'replace' || op.op === 'add') {
|
|
48
|
+
// For replace/add operations with large value payloads, try to split the value if it's a string or array
|
|
49
|
+
const pieces = breakLargeValueOp(orig, op, maxBytes, rev);
|
|
50
|
+
byOps.push(...pieces);
|
|
51
|
+
if (pieces.length > 0) {
|
|
52
|
+
rev = pieces[pieces.length - 1].rev + 1;
|
|
53
|
+
}
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
// Non-splittable op that's too large, include it anyway with a warning
|
|
58
|
+
console.warn(`Warning: Single operation of type ${op.op} exceeds maxBytes. Including it anyway.`);
|
|
59
|
+
group.push(op);
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
group.push(op);
|
|
64
|
+
}
|
|
65
|
+
flush();
|
|
66
|
+
return byOps;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Break a large @txt operation into multiple smaller operations
|
|
70
|
+
*/
|
|
71
|
+
function breakTextOp(origChange, textOp, maxBytes, startRev) {
|
|
72
|
+
const results = [];
|
|
73
|
+
let rev = startRev;
|
|
74
|
+
// Calculate the budget for the delta content itself
|
|
75
|
+
const baseSize = getJSONByteSize({ ...origChange, ops: [{ ...textOp, value: '' }] });
|
|
76
|
+
const budget = maxBytes - baseSize;
|
|
77
|
+
// Ensure maxLength for splitLargeInsert is at least 1, apply a smaller buffer
|
|
78
|
+
const buffer = 20; // Reduced buffer
|
|
79
|
+
const maxLength = Math.max(1, budget - buffer);
|
|
80
|
+
// Ensure deltaOps is always an array, handle both Delta objects and raw arrays
|
|
81
|
+
let deltaOps = [];
|
|
82
|
+
if (textOp.value) {
|
|
83
|
+
if (Array.isArray(textOp.value)) {
|
|
84
|
+
// Direct array of ops
|
|
85
|
+
deltaOps = textOp.value;
|
|
86
|
+
}
|
|
87
|
+
else if (textOp.value.ops && Array.isArray(textOp.value.ops)) {
|
|
88
|
+
// Delta object with ops property
|
|
89
|
+
deltaOps = textOp.value.ops;
|
|
90
|
+
}
|
|
91
|
+
else if (typeof textOp.value === 'object') {
|
|
92
|
+
// Convert object to array with single op
|
|
93
|
+
deltaOps = [textOp.value];
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
let currentOps = [];
|
|
97
|
+
let retain = 0;
|
|
98
|
+
// Helper to create a Change with current accumulated delta ops
|
|
99
|
+
const flushDelta = () => {
|
|
100
|
+
if (!currentOps.length)
|
|
101
|
+
return;
|
|
102
|
+
const newOp = {
|
|
103
|
+
...textOp,
|
|
104
|
+
value: currentOps,
|
|
105
|
+
};
|
|
106
|
+
results.push({
|
|
107
|
+
...origChange,
|
|
108
|
+
id: createId(),
|
|
109
|
+
rev: rev++,
|
|
110
|
+
ops: [newOp],
|
|
111
|
+
created: Date.now(),
|
|
112
|
+
});
|
|
113
|
+
currentOps = [];
|
|
114
|
+
};
|
|
115
|
+
for (const op of deltaOps) {
|
|
116
|
+
// Check if adding this op would exceed the size limit
|
|
117
|
+
const tentativeOps = [...currentOps, op];
|
|
118
|
+
const tentativeChange = {
|
|
119
|
+
...origChange,
|
|
120
|
+
ops: [{ ...textOp, value: tentativeOps }],
|
|
121
|
+
};
|
|
122
|
+
// Add an initial retain op if we're starting a new group of ops and there were prior ops
|
|
123
|
+
if (currentOps.length === 0 && retain) {
|
|
124
|
+
currentOps.push({ retain });
|
|
125
|
+
}
|
|
126
|
+
if (getJSONByteSize(tentativeChange) > maxBytes) {
|
|
127
|
+
flushDelta();
|
|
128
|
+
// Handle the case where a single delta op is too large (e.g., very large text insert)
|
|
129
|
+
if (currentOps.length === 0 && getJSONByteSize({ ...origChange, ops: [op] }) > maxBytes) {
|
|
130
|
+
// Split large insert into chunks
|
|
131
|
+
const retainBeforeChunks = retain; // Capture retain position BEFORE these chunks
|
|
132
|
+
const [newRetain, chunks] = splitLargeInsert(op, retain, maxLength);
|
|
133
|
+
retain = newRetain; // Update overall retain state for ops *after* these chunks
|
|
134
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
135
|
+
const chunk = chunks[i];
|
|
136
|
+
// Only add retain before the *first* chunk from splitLargeInsert
|
|
137
|
+
if (i === 0 && retainBeforeChunks > 0) {
|
|
138
|
+
currentOps = [{ retain: retainBeforeChunks }, chunk];
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
currentOps = [chunk];
|
|
142
|
+
}
|
|
143
|
+
flushDelta(); // Flushes the chunk (potentially with retain on first)
|
|
144
|
+
}
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
currentOps.push(op);
|
|
149
|
+
if (!op.delete) {
|
|
150
|
+
retain += Op.length(op);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
// Flush any remaining ops
|
|
154
|
+
flushDelta();
|
|
155
|
+
return results;
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Split a large insert operation into multiple smaller ones
|
|
159
|
+
*/
|
|
160
|
+
function splitLargeInsert(insertOp, retain, maxChunkSize) {
|
|
161
|
+
const results = [];
|
|
162
|
+
if (!insertOp.insert || typeof insertOp.insert !== 'string') {
|
|
163
|
+
throw new Error(`Single @txt operation exceeds maxBytes. Cannot split further.`);
|
|
164
|
+
}
|
|
165
|
+
const text = insertOp.insert;
|
|
166
|
+
// const attrs = insertOp.attributes || {}; // attrs not used currently
|
|
167
|
+
// Ensure maxChunkSize is positive
|
|
168
|
+
if (maxChunkSize <= 0) {
|
|
169
|
+
throw new Error(`Calculated maxChunkSize is <= 0, cannot split insert.`);
|
|
170
|
+
}
|
|
171
|
+
// Ensure chunkSize is at least 1 to prevent infinite loops
|
|
172
|
+
const targetChunkSize = Math.max(1, maxChunkSize);
|
|
173
|
+
const numChunks = Math.ceil(text.length / targetChunkSize);
|
|
174
|
+
const chunkSize = Math.ceil(text.length / numChunks);
|
|
175
|
+
for (let i = 0; i < text.length; i += chunkSize) {
|
|
176
|
+
const chunkText = text.slice(i, i + chunkSize);
|
|
177
|
+
const op = { ...insertOp, insert: chunkText }; // Keep original attrs
|
|
178
|
+
// For the first chunk, no retain is needed
|
|
179
|
+
// Retain calculation seems complex, let breakTextOp handle retains between chunks
|
|
180
|
+
// if (i !== 0) {
|
|
181
|
+
// results.push({ retain });
|
|
182
|
+
// }
|
|
183
|
+
results.push(op);
|
|
184
|
+
// Retain is now managed by the caller (breakTextOp)
|
|
185
|
+
// retain += Op.length(op);
|
|
186
|
+
}
|
|
187
|
+
// Return just the ops, retain calculation happens in breakTextOp
|
|
188
|
+
return [retain, results]; // This return signature might need review based on usage
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Attempt to break a large value in a replace/add operation
|
|
192
|
+
*/
|
|
193
|
+
function breakLargeValueOp(origChange, op, maxBytes, startRev) {
|
|
194
|
+
const results = [];
|
|
195
|
+
let rev = startRev;
|
|
196
|
+
// Calculate base size without the value to estimate budget for value chunks
|
|
197
|
+
const baseOpSize = getJSONByteSize({ ...op, value: '' });
|
|
198
|
+
const baseChangeSize = getJSONByteSize({ ...origChange, ops: [{ ...op, value: '' }] }) - baseOpSize;
|
|
199
|
+
const valueBudget = maxBytes - baseChangeSize - 50; // 50 bytes buffer for overhead
|
|
200
|
+
// Special case: if value is a string, we can split it into chunks
|
|
201
|
+
if (typeof op.value === 'string' && op.value.length > 100) {
|
|
202
|
+
// Only split reasonably large strings
|
|
203
|
+
const text = op.value;
|
|
204
|
+
// Ensure chunkSize is at least 1
|
|
205
|
+
const targetChunkSize = Math.max(1, valueBudget);
|
|
206
|
+
const numChunks = Math.ceil(text.length / targetChunkSize);
|
|
207
|
+
const chunkSize = Math.ceil(text.length / numChunks);
|
|
208
|
+
for (let i = 0; i < text.length; i += chunkSize) {
|
|
209
|
+
const chunk = text.slice(i, i + chunkSize);
|
|
210
|
+
const newOp = { op: 'add' }; // Default to add?
|
|
211
|
+
if (i === 0) {
|
|
212
|
+
// First chunk: use original op type (add/replace) and path
|
|
213
|
+
newOp.op = op.op;
|
|
214
|
+
newOp.path = op.path;
|
|
215
|
+
newOp.value = chunk;
|
|
216
|
+
}
|
|
217
|
+
else {
|
|
218
|
+
// Subsequent chunks: use 'add' to append to the string (assuming target is container)
|
|
219
|
+
// This assumes the path points to an array or object where subsequent adds make sense.
|
|
220
|
+
// A more robust solution might need context or use a specific 'patch' op.
|
|
221
|
+
// If path was `/foo/bar`, appending needs `/foo/bar/-` or similar if array?
|
|
222
|
+
// For now, let's assume path allows adding / maybe this needs a custom 'append' op?
|
|
223
|
+
// Reverting to a placeholder 'patch' op type needing server interpretation.
|
|
224
|
+
newOp.op = 'patch';
|
|
225
|
+
newOp.path = op.path; // Operate on the original path
|
|
226
|
+
newOp.appendString = chunk;
|
|
227
|
+
}
|
|
228
|
+
results.push({
|
|
229
|
+
...origChange,
|
|
230
|
+
id: createId(),
|
|
231
|
+
rev: rev++,
|
|
232
|
+
ops: [newOp],
|
|
233
|
+
created: Date.now(),
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
return results;
|
|
237
|
+
}
|
|
238
|
+
else if (Array.isArray(op.value) && op.value.length > 1) {
|
|
239
|
+
// Special case: if value is an array, we can split it into smaller arrays
|
|
240
|
+
// This requires careful size checking per chunk
|
|
241
|
+
const originalArray = op.value;
|
|
242
|
+
let currentChunk = [];
|
|
243
|
+
let chunkStartIndex = 0;
|
|
244
|
+
for (let i = 0; i < originalArray.length; i++) {
|
|
245
|
+
const item = originalArray[i];
|
|
246
|
+
const tentativeChunk = [...currentChunk, item];
|
|
247
|
+
const tentativeOp = { ...op, value: tentativeChunk };
|
|
248
|
+
const tentativeChangeSize = getJSONByteSize({ ...origChange, ops: [tentativeOp] });
|
|
249
|
+
if (currentChunk.length > 0 && tentativeChangeSize > maxBytes) {
|
|
250
|
+
// Flush current chunk
|
|
251
|
+
const chunkOp = {};
|
|
252
|
+
if (chunkStartIndex === 0) {
|
|
253
|
+
chunkOp.op = op.op;
|
|
254
|
+
chunkOp.path = op.path;
|
|
255
|
+
chunkOp.value = currentChunk;
|
|
256
|
+
}
|
|
257
|
+
else {
|
|
258
|
+
// Append subsequent chunks - needs server support for 'appendArray'
|
|
259
|
+
chunkOp.op = 'patch';
|
|
260
|
+
chunkOp.path = op.path;
|
|
261
|
+
chunkOp.appendArray = currentChunk;
|
|
262
|
+
}
|
|
263
|
+
results.push({
|
|
264
|
+
...origChange,
|
|
265
|
+
id: createId(),
|
|
266
|
+
rev: rev++,
|
|
267
|
+
ops: [chunkOp],
|
|
268
|
+
created: Date.now(),
|
|
269
|
+
});
|
|
270
|
+
currentChunk = [item]; // Start new chunk with current item
|
|
271
|
+
chunkStartIndex = i;
|
|
272
|
+
}
|
|
273
|
+
else {
|
|
274
|
+
currentChunk.push(item);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
// Flush the last chunk
|
|
278
|
+
if (currentChunk.length > 0) {
|
|
279
|
+
const chunkOp = {};
|
|
280
|
+
if (chunkStartIndex === 0) {
|
|
281
|
+
chunkOp.op = op.op;
|
|
282
|
+
chunkOp.path = op.path;
|
|
283
|
+
chunkOp.value = currentChunk;
|
|
284
|
+
}
|
|
285
|
+
else {
|
|
286
|
+
chunkOp.op = 'patch';
|
|
287
|
+
chunkOp.path = op.path;
|
|
288
|
+
chunkOp.appendArray = currentChunk;
|
|
289
|
+
}
|
|
290
|
+
results.push({
|
|
291
|
+
...origChange,
|
|
292
|
+
id: createId(),
|
|
293
|
+
rev: rev++,
|
|
294
|
+
ops: [chunkOp],
|
|
295
|
+
created: Date.now(),
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
return results;
|
|
299
|
+
}
|
|
300
|
+
// If we can't split it, throw an error
|
|
301
|
+
throw new Error(`Single operation of type ${op.op} (path: ${op.path}) exceeds maxBytes and can't be split further.`);
|
|
302
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { TextEncoder } from 'util'; // Node.js TextEncoder
|
|
2
|
+
/** Estimate JSON string byte size. */
|
|
3
|
+
export function getJSONByteSize(data) {
|
|
4
|
+
// Basic estimation, might not be perfectly accurate due to encoding nuances
|
|
5
|
+
try {
|
|
6
|
+
return new TextEncoder().encode(JSON.stringify(data)).length;
|
|
7
|
+
}
|
|
8
|
+
catch (e) {
|
|
9
|
+
// Handle circular structures or other stringify errors
|
|
10
|
+
console.error('Error calculating JSON size:', e);
|
|
11
|
+
return Infinity; // Treat errors as infinitely large
|
|
12
|
+
}
|
|
13
|
+
}
|
package/dist/utils.d.ts
CHANGED
|
@@ -7,7 +7,7 @@ import type { Change, Deferred } from './types.js';
|
|
|
7
7
|
* @param changes - Array of changes to split
|
|
8
8
|
* @returns A tuple containing [changes before baseRev, changes with and after baseRev]
|
|
9
9
|
*/
|
|
10
|
-
export declare function splitChanges
|
|
10
|
+
export declare function splitChanges(changes: Change[]): [Change[], Change[]];
|
|
11
11
|
/**
|
|
12
12
|
* Applies a sequence of changes to a state object.
|
|
13
13
|
* Each change is applied in sequence using the applyPatch function.
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@dabble/patches",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.13",
|
|
4
4
|
"description": "Immutable JSON Patch implementation based on RFC 6902 supporting operational transformation and last-writer-wins",
|
|
5
5
|
"author": "Jacob Wright <jacwright@gmail.com>",
|
|
6
6
|
"bugs": {
|
|
@@ -66,13 +66,13 @@
|
|
|
66
66
|
"@dabble/delta": "^1.2.4"
|
|
67
67
|
},
|
|
68
68
|
"devDependencies": {
|
|
69
|
-
"@sveltejs/package": "^2.3.
|
|
69
|
+
"@sveltejs/package": "^2.3.11",
|
|
70
70
|
"@types/simple-peer": "^9.11.8",
|
|
71
71
|
"fake-indexeddb": "^6.0.0",
|
|
72
72
|
"prettier": "^3.5.3",
|
|
73
73
|
"typescript": "^5.8.3",
|
|
74
|
-
"vite": "^6.
|
|
74
|
+
"vite": "^6.3.5",
|
|
75
75
|
"vite-plugin-dts": "^4.5.3",
|
|
76
|
-
"vitest": "^3.1.
|
|
76
|
+
"vitest": "^3.1.3"
|
|
77
77
|
}
|
|
78
78
|
}
|