@tanstack/db 0.5.11 → 0.5.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/SortedMap.cjs +40 -26
- package/dist/cjs/SortedMap.cjs.map +1 -1
- package/dist/cjs/SortedMap.d.cts +10 -15
- package/dist/cjs/collection/change-events.cjs.map +1 -1
- package/dist/cjs/collection/changes.cjs.map +1 -1
- package/dist/cjs/collection/events.cjs.map +1 -1
- package/dist/cjs/collection/events.d.cts +12 -4
- package/dist/cjs/collection/index.cjs +2 -1
- package/dist/cjs/collection/index.cjs.map +1 -1
- package/dist/cjs/collection/indexes.cjs.map +1 -1
- package/dist/cjs/collection/lifecycle.cjs.map +1 -1
- package/dist/cjs/collection/mutations.cjs +5 -2
- package/dist/cjs/collection/mutations.cjs.map +1 -1
- package/dist/cjs/collection/state.cjs +6 -5
- package/dist/cjs/collection/state.cjs.map +1 -1
- package/dist/cjs/collection/state.d.cts +4 -1
- package/dist/cjs/collection/subscription.cjs +60 -53
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +18 -4
- package/dist/cjs/collection/sync.cjs.map +1 -1
- package/dist/cjs/errors.cjs +9 -0
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +3 -0
- package/dist/cjs/event-emitter.cjs.map +1 -1
- package/dist/cjs/index.cjs +2 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +1 -1
- package/dist/cjs/indexes/auto-index.cjs.map +1 -1
- package/dist/cjs/indexes/base-index.cjs.map +1 -1
- package/dist/cjs/indexes/btree-index.cjs +8 -6
- package/dist/cjs/indexes/btree-index.cjs.map +1 -1
- package/dist/cjs/indexes/lazy-index.cjs.map +1 -1
- package/dist/cjs/indexes/reverse-index.cjs.map +1 -1
- package/dist/cjs/local-only.cjs.map +1 -1
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/optimistic-action.cjs.map +1 -1
- package/dist/cjs/paced-mutations.cjs.map +1 -1
- package/dist/cjs/proxy.cjs.map +1 -1
- package/dist/cjs/query/builder/functions.cjs.map +1 -1
- package/dist/cjs/query/builder/index.cjs.map +1 -1
- package/dist/cjs/query/builder/ref-proxy.cjs.map +1 -1
- package/dist/cjs/query/compiler/evaluators.cjs.map +1 -1
- package/dist/cjs/query/compiler/expressions.cjs.map +1 -1
- package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/joins.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.cjs +91 -38
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.d.cts +6 -2
- package/dist/cjs/query/compiler/select.cjs.map +1 -1
- package/dist/cjs/query/expression-helpers.cjs.map +1 -1
- package/dist/cjs/query/index.d.cts +1 -1
- package/dist/cjs/query/ir.cjs.map +1 -1
- package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
- package/dist/cjs/query/live/collection-registry.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.cjs +30 -15
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/cjs/query/live/internal.cjs.map +1 -1
- package/dist/cjs/query/live-query-collection.cjs.map +1 -1
- package/dist/cjs/query/optimizer.cjs.map +1 -1
- package/dist/cjs/query/predicate-utils.cjs +19 -2
- package/dist/cjs/query/predicate-utils.cjs.map +1 -1
- package/dist/cjs/query/predicate-utils.d.cts +32 -1
- package/dist/cjs/query/subset-dedupe.cjs.map +1 -1
- package/dist/cjs/scheduler.cjs.map +1 -1
- package/dist/cjs/strategies/debounceStrategy.cjs.map +1 -1
- package/dist/cjs/strategies/queueStrategy.cjs.map +1 -1
- package/dist/cjs/strategies/throttleStrategy.cjs.map +1 -1
- package/dist/cjs/transactions.cjs.map +1 -1
- package/dist/cjs/types.d.cts +43 -5
- package/dist/cjs/utils/browser-polyfills.cjs.map +1 -1
- package/dist/cjs/utils/btree.cjs.map +1 -1
- package/dist/cjs/utils/comparison.cjs.map +1 -1
- package/dist/cjs/utils/cursor.cjs +39 -0
- package/dist/cjs/utils/cursor.cjs.map +1 -0
- package/dist/cjs/utils/cursor.d.cts +18 -0
- package/dist/cjs/utils/index-optimization.cjs.map +1 -1
- package/dist/cjs/utils.cjs.map +1 -1
- package/dist/esm/SortedMap.d.ts +10 -15
- package/dist/esm/SortedMap.js +40 -26
- package/dist/esm/SortedMap.js.map +1 -1
- package/dist/esm/collection/change-events.js.map +1 -1
- package/dist/esm/collection/changes.js.map +1 -1
- package/dist/esm/collection/events.d.ts +12 -4
- package/dist/esm/collection/events.js.map +1 -1
- package/dist/esm/collection/index.js +2 -1
- package/dist/esm/collection/index.js.map +1 -1
- package/dist/esm/collection/indexes.js.map +1 -1
- package/dist/esm/collection/lifecycle.js.map +1 -1
- package/dist/esm/collection/mutations.js +6 -3
- package/dist/esm/collection/mutations.js.map +1 -1
- package/dist/esm/collection/state.d.ts +4 -1
- package/dist/esm/collection/state.js +6 -5
- package/dist/esm/collection/state.js.map +1 -1
- package/dist/esm/collection/subscription.d.ts +18 -4
- package/dist/esm/collection/subscription.js +61 -54
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/collection/sync.js.map +1 -1
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.js +9 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/event-emitter.js.map +1 -1
- package/dist/esm/index.d.ts +1 -1
- package/dist/esm/index.js +4 -2
- package/dist/esm/indexes/auto-index.js.map +1 -1
- package/dist/esm/indexes/base-index.js.map +1 -1
- package/dist/esm/indexes/btree-index.js +8 -6
- package/dist/esm/indexes/btree-index.js.map +1 -1
- package/dist/esm/indexes/lazy-index.js.map +1 -1
- package/dist/esm/indexes/reverse-index.js.map +1 -1
- package/dist/esm/local-only.js.map +1 -1
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/optimistic-action.js.map +1 -1
- package/dist/esm/paced-mutations.js.map +1 -1
- package/dist/esm/proxy.js.map +1 -1
- package/dist/esm/query/builder/functions.js.map +1 -1
- package/dist/esm/query/builder/index.js.map +1 -1
- package/dist/esm/query/builder/ref-proxy.js.map +1 -1
- package/dist/esm/query/compiler/evaluators.js.map +1 -1
- package/dist/esm/query/compiler/expressions.js.map +1 -1
- package/dist/esm/query/compiler/group-by.js.map +1 -1
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/joins.js.map +1 -1
- package/dist/esm/query/compiler/order-by.d.ts +6 -2
- package/dist/esm/query/compiler/order-by.js +91 -38
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/compiler/select.js.map +1 -1
- package/dist/esm/query/expression-helpers.js.map +1 -1
- package/dist/esm/query/index.d.ts +1 -1
- package/dist/esm/query/ir.js.map +1 -1
- package/dist/esm/query/live/collection-config-builder.js.map +1 -1
- package/dist/esm/query/live/collection-registry.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.js +30 -15
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/dist/esm/query/live/internal.js.map +1 -1
- package/dist/esm/query/live-query-collection.js.map +1 -1
- package/dist/esm/query/optimizer.js.map +1 -1
- package/dist/esm/query/predicate-utils.d.ts +32 -1
- package/dist/esm/query/predicate-utils.js +19 -2
- package/dist/esm/query/predicate-utils.js.map +1 -1
- package/dist/esm/query/subset-dedupe.js.map +1 -1
- package/dist/esm/scheduler.js.map +1 -1
- package/dist/esm/strategies/debounceStrategy.js.map +1 -1
- package/dist/esm/strategies/queueStrategy.js.map +1 -1
- package/dist/esm/strategies/throttleStrategy.js.map +1 -1
- package/dist/esm/transactions.js.map +1 -1
- package/dist/esm/types.d.ts +43 -5
- package/dist/esm/utils/browser-polyfills.js.map +1 -1
- package/dist/esm/utils/btree.js.map +1 -1
- package/dist/esm/utils/comparison.js.map +1 -1
- package/dist/esm/utils/cursor.d.ts +18 -0
- package/dist/esm/utils/cursor.js +39 -0
- package/dist/esm/utils/cursor.js.map +1 -0
- package/dist/esm/utils/index-optimization.js.map +1 -1
- package/dist/esm/utils.js.map +1 -1
- package/package.json +30 -28
- package/src/SortedMap.ts +50 -31
- package/src/collection/change-events.ts +20 -20
- package/src/collection/changes.ts +12 -12
- package/src/collection/events.ts +20 -10
- package/src/collection/index.ts +47 -46
- package/src/collection/indexes.ts +14 -14
- package/src/collection/lifecycle.ts +16 -16
- package/src/collection/mutations.ts +25 -20
- package/src/collection/state.ts +43 -36
- package/src/collection/subscription.ts +114 -83
- package/src/collection/sync.ts +13 -13
- package/src/duplicate-instance-check.ts +1 -1
- package/src/errors.ts +49 -40
- package/src/event-emitter.ts +5 -5
- package/src/index.ts +21 -21
- package/src/indexes/auto-index.ts +11 -11
- package/src/indexes/base-index.ts +13 -13
- package/src/indexes/btree-index.ts +21 -17
- package/src/indexes/index-options.ts +3 -3
- package/src/indexes/lazy-index.ts +8 -8
- package/src/indexes/reverse-index.ts +5 -5
- package/src/local-only.ts +12 -12
- package/src/local-storage.ts +17 -17
- package/src/optimistic-action.ts +5 -5
- package/src/paced-mutations.ts +6 -6
- package/src/proxy.ts +43 -43
- package/src/query/builder/functions.ts +28 -28
- package/src/query/builder/index.ts +22 -22
- package/src/query/builder/ref-proxy.ts +4 -4
- package/src/query/builder/types.ts +8 -8
- package/src/query/compiler/evaluators.ts +9 -9
- package/src/query/compiler/expressions.ts +6 -6
- package/src/query/compiler/group-by.ts +24 -24
- package/src/query/compiler/index.ts +44 -44
- package/src/query/compiler/joins.ts +37 -37
- package/src/query/compiler/order-by.ts +170 -77
- package/src/query/compiler/select.ts +13 -13
- package/src/query/compiler/types.ts +2 -2
- package/src/query/expression-helpers.ts +16 -16
- package/src/query/index.ts +10 -9
- package/src/query/ir.ts +13 -13
- package/src/query/live/collection-config-builder.ts +53 -53
- package/src/query/live/collection-registry.ts +6 -6
- package/src/query/live/collection-subscriber.ts +87 -48
- package/src/query/live/internal.ts +1 -1
- package/src/query/live/types.ts +4 -4
- package/src/query/live-query-collection.ts +15 -15
- package/src/query/optimizer.ts +29 -29
- package/src/query/predicate-utils.ts +105 -50
- package/src/query/subset-dedupe.ts +6 -6
- package/src/scheduler.ts +3 -3
- package/src/strategies/debounceStrategy.ts +6 -6
- package/src/strategies/index.ts +4 -4
- package/src/strategies/queueStrategy.ts +5 -5
- package/src/strategies/throttleStrategy.ts +6 -6
- package/src/strategies/types.ts +2 -2
- package/src/transactions.ts +9 -9
- package/src/types.ts +51 -12
- package/src/utils/array-utils.ts +1 -1
- package/src/utils/browser-polyfills.ts +2 -2
- package/src/utils/btree.ts +22 -22
- package/src/utils/comparison.ts +3 -3
- package/src/utils/cursor.ts +78 -0
- package/src/utils/index-optimization.ts +14 -14
- package/src/utils.ts +4 -4
package/dist/cjs/errors.d.cts
CHANGED
|
@@ -54,6 +54,9 @@ export declare class CollectionOperationError extends TanStackDBError {
|
|
|
54
54
|
export declare class UndefinedKeyError extends CollectionOperationError {
|
|
55
55
|
constructor(item: any);
|
|
56
56
|
}
|
|
57
|
+
export declare class InvalidKeyError extends CollectionOperationError {
|
|
58
|
+
constructor(key: unknown, item: unknown);
|
|
59
|
+
}
|
|
57
60
|
export declare class DuplicateKeyError extends CollectionOperationError {
|
|
58
61
|
constructor(key: string | number);
|
|
59
62
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"event-emitter.cjs","sources":["../../src/event-emitter.ts"],"sourcesContent":["/**\n * Generic type-safe event emitter\n * @template TEvents - Record of event names to event payload types\n */\nexport class EventEmitter<TEvents extends Record<string, any>> {\n private listeners = new Map<\n keyof TEvents,\n Set<(event: TEvents[keyof TEvents]) => void>\n >()\n\n /**\n * Subscribe to an event\n * @param event - Event name to listen for\n * @param callback - Function to call when event is emitted\n * @returns Unsubscribe function\n */\n on<T extends keyof TEvents>(\n event: T,\n callback: (event: TEvents[T]) => void
|
|
1
|
+
{"version":3,"file":"event-emitter.cjs","sources":["../../src/event-emitter.ts"],"sourcesContent":["/**\n * Generic type-safe event emitter\n * @template TEvents - Record of event names to event payload types\n */\nexport class EventEmitter<TEvents extends Record<string, any>> {\n private listeners = new Map<\n keyof TEvents,\n Set<(event: TEvents[keyof TEvents]) => void>\n >()\n\n /**\n * Subscribe to an event\n * @param event - Event name to listen for\n * @param callback - Function to call when event is emitted\n * @returns Unsubscribe function\n */\n on<T extends keyof TEvents>(\n event: T,\n callback: (event: TEvents[T]) => void,\n ): () => void {\n if (!this.listeners.has(event)) {\n this.listeners.set(event, new Set())\n }\n this.listeners.get(event)!.add(callback as (event: any) => void)\n\n return () => {\n this.listeners.get(event)?.delete(callback as (event: any) => void)\n }\n }\n\n /**\n * Subscribe to an event once (automatically unsubscribes after first emission)\n * @param event - Event name to listen for\n * @param callback - Function to call when event is emitted\n * @returns Unsubscribe function\n */\n once<T extends keyof TEvents>(\n event: T,\n callback: (event: TEvents[T]) => void,\n ): () => void {\n const unsubscribe = this.on(event, (eventPayload) => {\n callback(eventPayload)\n unsubscribe()\n })\n return unsubscribe\n }\n\n /**\n * Unsubscribe from an event\n * @param event - Event name to stop listening for\n * @param callback - Function to remove\n */\n off<T extends keyof TEvents>(\n event: T,\n callback: (event: TEvents[T]) => void,\n ): void {\n this.listeners.get(event)?.delete(callback as (event: any) => void)\n }\n\n /**\n * Wait for an event to be emitted\n * @param event - Event name to wait for\n * @param timeout - Optional timeout in milliseconds\n * @returns Promise that resolves with the event payload\n */\n waitFor<T extends keyof TEvents>(\n event: T,\n timeout?: number,\n ): Promise<TEvents[T]> {\n return new Promise((resolve, reject) => {\n let timeoutId: NodeJS.Timeout | undefined\n const unsubscribe = this.on(event, (eventPayload) => {\n if (timeoutId) {\n clearTimeout(timeoutId)\n timeoutId = undefined\n }\n resolve(eventPayload)\n unsubscribe()\n })\n if (timeout) {\n timeoutId = setTimeout(() => {\n timeoutId = undefined\n unsubscribe()\n reject(new Error(`Timeout waiting for event ${String(event)}`))\n }, timeout)\n }\n })\n }\n\n /**\n * Emit an event to all listeners\n * @param event - Event name to emit\n * @param eventPayload - Event payload\n * @internal For use by subclasses - subclasses should wrap this with a public emit if needed\n */\n protected emitInner<T extends keyof TEvents>(\n event: T,\n eventPayload: TEvents[T],\n ): void {\n this.listeners.get(event)?.forEach((listener) => {\n try {\n listener(eventPayload)\n } catch (error) {\n // Re-throw in a microtask to surface the error\n queueMicrotask(() => {\n throw error\n })\n }\n })\n }\n\n /**\n * Clear all listeners\n */\n protected clearListeners(): void {\n this.listeners.clear()\n }\n}\n"],"names":[],"mappings":";;AAIO,MAAM,aAAkD;AAAA,EAAxD,cAAA;AACL,SAAQ,gCAAgB,IAAA;AAAA,EAGtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQF,GACE,OACA,UACY;AACZ,QAAI,CAAC,KAAK,UAAU,IAAI,KAAK,GAAG;AAC9B,WAAK,UAAU,IAAI,OAAO,oBAAI,KAAK;AAAA,IACrC;AACA,SAAK,UAAU,IAAI,KAAK,EAAG,IAAI,QAAgC;AAE/D,WAAO,MAAM;AACX,WAAK,UAAU,IAAI,KAAK,GAAG,OAAO,QAAgC;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,KACE,OACA,UACY;AACZ,UAAM,cAAc,KAAK,GAAG,OAAO,CAAC,iBAAiB;AACnD,eAAS,YAAY;AACrB,kBAAA;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IACE,OACA,UACM;AACN,SAAK,UAAU,IAAI,KAAK,GAAG,OAAO,QAAgC;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QACE,OACA,SACqB;AACrB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAI;AACJ,YAAM,cAAc,KAAK,GAAG,OAAO,CAAC,iBAAiB;AACnD,YAAI,WAAW;AACb,uBAAa,SAAS;AACtB,sBAAY;AAAA,QACd;AACA,gBAAQ,YAAY;AACpB,oBAAA;AAAA,MACF,CAAC;AACD,UAAI,SAAS;AACX,oBAAY,WAAW,MAAM;AAC3B,sBAAY;AACZ,sBAAA;AACA,iBAAO,IAAI,MAAM,6BAA6B,OAAO,KAAK,CAAC,EAAE,CAAC;AAAA,QAChE,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQU,UACR,OACA,cACM;AACN,SAAK,UAAU,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa;AAC/C,UAAI;AACF,iBAAS,YAAY;AAAA,MACvB,SAAS,OAAO;AAEd,uBAAe,MAAM;AACnB,gBAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKU,iBAAuB;AAC/B,SAAK,UAAU,MAAA;AAAA,EACjB;AACF;;"}
|
package/dist/cjs/index.cjs
CHANGED
|
@@ -63,6 +63,7 @@ exports.InvalidJoinConditionLeftSourceError = errors.InvalidJoinConditionLeftSou
|
|
|
63
63
|
exports.InvalidJoinConditionRightSourceError = errors.InvalidJoinConditionRightSourceError;
|
|
64
64
|
exports.InvalidJoinConditionSameSourceError = errors.InvalidJoinConditionSameSourceError;
|
|
65
65
|
exports.InvalidJoinConditionSourceMismatchError = errors.InvalidJoinConditionSourceMismatchError;
|
|
66
|
+
exports.InvalidKeyError = errors.InvalidKeyError;
|
|
66
67
|
exports.InvalidSchemaError = errors.InvalidSchemaError;
|
|
67
68
|
exports.InvalidSourceError = errors.InvalidSourceError;
|
|
68
69
|
exports.InvalidSourceTypeError = errors.InvalidSourceTypeError;
|
|
@@ -165,6 +166,7 @@ exports.compileQuery = index$2.compileQuery;
|
|
|
165
166
|
exports.createLiveQueryCollection = liveQueryCollection.createLiveQueryCollection;
|
|
166
167
|
exports.liveQueryCollectionOptions = liveQueryCollection.liveQueryCollectionOptions;
|
|
167
168
|
exports.isLimitSubset = predicateUtils.isLimitSubset;
|
|
169
|
+
exports.isOffsetLimitSubset = predicateUtils.isOffsetLimitSubset;
|
|
168
170
|
exports.isOrderBySubset = predicateUtils.isOrderBySubset;
|
|
169
171
|
exports.isPredicateSubset = predicateUtils.isPredicateSubset;
|
|
170
172
|
exports.isWhereSubset = predicateUtils.isWhereSubset;
|
package/dist/cjs/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
|
package/dist/cjs/index.d.cts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"auto-index.cjs","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { DEFAULT_COMPARE_OPTIONS } from
|
|
1
|
+
{"version":3,"file":"auto-index.cjs","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { DEFAULT_COMPARE_OPTIONS } from '../utils'\nimport { BTreeIndex } from './btree-index'\nimport type { CompareOptions } from '../query/builder/types'\nimport type { BasicExpression } from '../query/ir'\nimport type { CollectionImpl } from '../collection/index.js'\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareOptions?: CompareOptions,\n compareFn?: (a: any, b: any) => number,\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n const compareOpts = compareOptions ?? {\n ...DEFAULT_COMPARE_OPTIONS,\n ...collection.compareOptions,\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find(\n (index) =>\n index.matchesField(fieldPath) && index.matchesCompareOptions(compareOpts),\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n // Use the proxy-based approach to create the proper accessor for nested paths\n collection.createIndex(\n (row) => {\n // Navigate through the field path\n let current: any = row\n for (const part of fieldPath) {\n current = current[part]\n }\n return current\n },\n {\n name: `auto:${fieldPath.join(`.`)}`,\n indexType: BTreeIndex,\n options: compareFn ? { compareFn, compareOptions: compareOpts } : {},\n },\n )\n } catch (error) {\n console.warn(\n `${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field path \"${fieldPath.join(`.`)}\":`,\n error,\n )\n }\n}\n\n/**\n * Analyzes a where expression and creates indexes for all simple operations on single fields\n */\nexport function ensureIndexForExpression<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n expression: BasicExpression,\n collection: CollectionImpl<T, TKey, any, any, any>,\n): void {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Extract all indexable expressions and create indexes for them\n const indexableExpressions = extractIndexableExpressions(expression)\n\n for (const { fieldName, fieldPath } of indexableExpressions) {\n ensureIndexForField(fieldName, fieldPath, collection)\n }\n}\n\n/**\n * Extracts all indexable expressions from a where expression\n */\nfunction extractIndexableExpressions(\n expression: BasicExpression,\n): Array<{ fieldName: string; fieldPath: Array<string> }> {\n const results: Array<{ fieldName: string; fieldPath: Array<string> }> = []\n\n function extractFromExpression(expr: BasicExpression): void {\n if (expr.type !== `func`) {\n return\n }\n\n const func = expr as any\n\n // Handle 'and' expressions by recursively processing all arguments\n if (func.name === `and`) {\n for (const arg of func.args) {\n extractFromExpression(arg)\n }\n return\n }\n\n // Check if this is a supported operation\n const supportedOperations = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`]\n if (!supportedOperations.includes(func.name)) {\n return\n }\n\n // Check if the first argument is a property reference\n if (func.args.length < 1 || func.args[0].type !== `ref`) {\n return\n }\n\n const fieldRef = func.args[0]\n const fieldPath = fieldRef.path\n\n // Skip if the path is empty\n if (fieldPath.length === 0) {\n return\n }\n\n // For nested paths, use the full path joined with underscores as the field name\n // For simple paths, use the first (and only) element\n const fieldName = fieldPath.join(`_`)\n results.push({ fieldName, fieldPath })\n }\n\n extractFromExpression(expression)\n return results\n}\n"],"names":["DEFAULT_COMPARE_OPTIONS","BTreeIndex"],"mappings":";;;;AAUA,SAAS,gBAAgB,YAAqD;AAE5E,MAAI,WAAW,OAAO,cAAc,SAAS;AAC3C,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,SAAS,oBAId,WACA,WACA,YACA,gBACA,WACA;AACA,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAEA,QAAM,cAAc,kBAAkB;AAAA,IACpC,GAAGA,MAAAA;AAAAA,IACH,GAAG,WAAW;AAAA,EAAA;AAIhB,QAAM,gBAAgB,MAAM,KAAK,WAAW,QAAQ,OAAA,CAAQ,EAAE;AAAA,IAC5D,CAAC,UACC,MAAM,aAAa,SAAS,KAAK,MAAM,sBAAsB,WAAW;AAAA,EAAA;AAG5E,MAAI,eAAe;AACjB;AAAA,EACF;AAGA,MAAI;AAEF,eAAW;AAAA,MACT,CAAC,QAAQ;AAEP,YAAI,UAAe;AACnB,mBAAW,QAAQ,WAAW;AAC5B,oBAAU,QAAQ,IAAI;AAAA,QACxB;AACA,eAAO;AAAA,MACT;AAAA,MACA;AAAA,QACE,MAAM,QAAQ,UAAU,KAAK,GAAG,CAAC;AAAA,QACjC,WAAWC,WAAAA;AAAAA,QACX,SAAS,YAAY,EAAE,WAAW,gBAAgB,YAAA,IAAgB,CAAA;AAAA,MAAC;AAAA,IACrE;AAAA,EAEJ,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,GAAG,WAAW,KAAK,IAAI,WAAW,EAAE,OAAO,EAAE,+CAA+C,UAAU,KAAK,GAAG,CAAC;AAAA,MAC/G;AAAA,IAAA;AAAA,EAEJ;AACF;AAKO,SAAS,yBAId,YACA,YACM;AACN,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,uBAAuB,4BAA4B,UAAU;AAEnE,aAAW,EAAE,WAAW,UAAA,KAAe,sBAAsB;AAC3D,wBAAoB,WAAW,WAAW,UAAU;AAAA,EACtD;AACF;AAKA,SAAS,4BACP,YACwD;AACxD,QAAM,UAAkE,CAAA;AAExE,WAAS,sBAAsB,MAA6B;AAC1D,QAAI,KAAK,SAAS,QAAQ;AACxB;AAAA,IACF;AAEA,UAAM,OAAO;AAGb,QAAI,KAAK,SAAS,OAAO;AACvB,iBAAW,OAAO,KAAK,MAAM;AAC3B,8BAAsB,GAAG;AAAA,MAC3B;AACA;AAAA,IACF;AAGA,UAAM,sBAAsB,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,IAAI;AACjE,QAAI,CAAC,oBAAoB,SAAS,KAAK,IAAI,GAAG;AAC5C;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,SAAS,OAAO;AACvD;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,UAAM,YAAY,SAAS;AAG3B,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AAIA,UAAM,YAAY,UAAU,KAAK,GAAG;AACpC,YAAQ,KAAK,EAAE,WAAW,UAAA,CAAW;AAAA,EACvC;AAEA,wBAAsB,UAAU;AAChC,SAAO;AACT;;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"base-index.cjs","sources":["../../../src/indexes/base-index.ts"],"sourcesContent":["import { compileSingleRowExpression } from
|
|
1
|
+
{"version":3,"file":"base-index.cjs","sources":["../../../src/indexes/base-index.ts"],"sourcesContent":["import { compileSingleRowExpression } from '../query/compiler/evaluators.js'\nimport { comparisonFunctions } from '../query/builder/functions.js'\nimport { DEFAULT_COMPARE_OPTIONS, deepEquals } from '../utils.js'\nimport type { RangeQueryOptions } from './btree-index.js'\nimport type { CompareOptions } from '../query/builder/types.js'\nimport type { BasicExpression, OrderByDirection } from '../query/ir.js'\n\n/**\n * Operations that indexes can support, imported from available comparison functions\n */\nexport const IndexOperation = comparisonFunctions\n\n/**\n * Type for index operation values\n */\nexport type IndexOperation = (typeof comparisonFunctions)[number]\n\n/**\n * Statistics about index usage and performance\n */\nexport interface IndexStats {\n readonly entryCount: number\n readonly lookupCount: number\n readonly averageLookupTime: number\n readonly lastUpdated: Date\n}\n\nexport interface IndexInterface<\n TKey extends string | number = string | number,\n> {\n add: (key: TKey, item: any) => void\n remove: (key: TKey, item: any) => void\n update: (key: TKey, oldItem: any, newItem: any) => void\n\n build: (entries: Iterable<[TKey, any]>) => void\n clear: () => void\n\n lookup: (operation: IndexOperation, value: any) => Set<TKey>\n\n equalityLookup: (value: any) => Set<TKey>\n inArrayLookup: (values: Array<any>) => Set<TKey>\n\n rangeQuery: (options: RangeQueryOptions) => Set<TKey>\n rangeQueryReversed: (options: RangeQueryOptions) => Set<TKey>\n\n take: (\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean,\n ) => Array<TKey>\n takeReversed: (\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean,\n ) => Array<TKey>\n\n get keyCount(): number\n get orderedEntriesArray(): Array<[any, Set<TKey>]>\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]>\n\n get indexedKeysSet(): Set<TKey>\n get valueMapData(): Map<any, Set<TKey>>\n\n supports: (operation: IndexOperation) => boolean\n\n matchesField: (fieldPath: Array<string>) => boolean\n matchesCompareOptions: (compareOptions: CompareOptions) => boolean\n matchesDirection: (direction: OrderByDirection) => boolean\n\n getStats: () => IndexStats\n}\n\n/**\n * Base abstract class that all index types extend\n */\nexport abstract class BaseIndex<\n TKey extends string | number = string | number,\n> implements IndexInterface<TKey> {\n public readonly id: number\n public readonly name?: string\n public readonly expression: BasicExpression\n public abstract readonly supportedOperations: Set<IndexOperation>\n\n protected lookupCount = 0\n protected totalLookupTime = 0\n protected lastUpdated = new Date()\n protected compareOptions: CompareOptions\n\n constructor(\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any,\n ) {\n this.id = id\n this.expression = expression\n this.compareOptions = DEFAULT_COMPARE_OPTIONS\n this.name = name\n this.initialize(options)\n }\n\n // Abstract methods that each index type must implement\n abstract add(key: TKey, item: any): void\n abstract remove(key: TKey, item: any): void\n abstract update(key: TKey, oldItem: any, newItem: any): void\n abstract build(entries: Iterable<[TKey, any]>): void\n abstract clear(): void\n abstract lookup(operation: IndexOperation, value: any): Set<TKey>\n abstract take(\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean,\n ): Array<TKey>\n abstract takeReversed(\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean,\n ): Array<TKey>\n abstract get keyCount(): number\n abstract equalityLookup(value: any): Set<TKey>\n abstract inArrayLookup(values: Array<any>): Set<TKey>\n abstract rangeQuery(options: RangeQueryOptions): Set<TKey>\n abstract rangeQueryReversed(options: RangeQueryOptions): Set<TKey>\n abstract get orderedEntriesArray(): Array<[any, Set<TKey>]>\n abstract get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]>\n abstract get indexedKeysSet(): Set<TKey>\n abstract get valueMapData(): Map<any, Set<TKey>>\n\n // Common methods\n supports(operation: IndexOperation): boolean {\n return this.supportedOperations.has(operation)\n }\n\n matchesField(fieldPath: Array<string>): boolean {\n return (\n this.expression.type === `ref` &&\n this.expression.path.length === fieldPath.length &&\n this.expression.path.every((part, i) => part === fieldPath[i])\n )\n }\n\n /**\n * Checks if the compare options match the index's compare options.\n * The direction is ignored because the index can be reversed if the direction is different.\n */\n matchesCompareOptions(compareOptions: CompareOptions): boolean {\n const thisCompareOptionsWithoutDirection = {\n ...this.compareOptions,\n direction: undefined,\n }\n const compareOptionsWithoutDirection = {\n ...compareOptions,\n direction: undefined,\n }\n\n return deepEquals(\n thisCompareOptionsWithoutDirection,\n compareOptionsWithoutDirection,\n )\n }\n\n /**\n * Checks if the index matches the provided direction.\n */\n matchesDirection(direction: OrderByDirection): boolean {\n return this.compareOptions.direction === direction\n }\n\n getStats(): IndexStats {\n return {\n entryCount: this.keyCount,\n lookupCount: this.lookupCount,\n averageLookupTime:\n this.lookupCount > 0 ? this.totalLookupTime / this.lookupCount : 0,\n lastUpdated: this.lastUpdated,\n }\n }\n\n // Protected methods for subclasses\n protected abstract initialize(options?: any): void\n\n protected evaluateIndexExpression(item: any): any {\n const evaluator = compileSingleRowExpression(this.expression)\n return evaluator(item as Record<string, unknown>)\n }\n\n protected trackLookup(startTime: number): void {\n const duration = performance.now() - startTime\n this.lookupCount++\n this.totalLookupTime += duration\n }\n\n protected updateTimestamp(): void {\n this.lastUpdated = new Date()\n }\n}\n\n/**\n * Type for index constructor\n */\nexport type IndexConstructor<TKey extends string | number = string | number> =\n new (\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any,\n ) => BaseIndex<TKey>\n\n/**\n * Index resolver can be either a class constructor or async loader\n */\nexport type IndexResolver<TKey extends string | number = string | number> =\n | IndexConstructor<TKey>\n | (() => Promise<IndexConstructor<TKey>>)\n"],"names":["comparisonFunctions","DEFAULT_COMPARE_OPTIONS","deepEquals","compileSingleRowExpression"],"mappings":";;;;;AAUO,MAAM,iBAAiBA,UAAAA;AAiEvB,MAAe,UAEY;AAAA,EAWhC,YACE,IACA,YACA,MACA,SACA;AAVF,SAAU,cAAc;AACxB,SAAU,kBAAkB;AAC5B,SAAU,kCAAkB,KAAA;AAS1B,SAAK,KAAK;AACV,SAAK,aAAa;AAClB,SAAK,iBAAiBC,MAAAA;AACtB,SAAK,OAAO;AACZ,SAAK,WAAW,OAAO;AAAA,EACzB;AAAA;AAAA,EA8BA,SAAS,WAAoC;AAC3C,WAAO,KAAK,oBAAoB,IAAI,SAAS;AAAA,EAC/C;AAAA,EAEA,aAAa,WAAmC;AAC9C,WACE,KAAK,WAAW,SAAS,SACzB,KAAK,WAAW,KAAK,WAAW,UAAU,UAC1C,KAAK,WAAW,KAAK,MAAM,CAAC,MAAM,MAAM,SAAS,UAAU,CAAC,CAAC;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,sBAAsB,gBAAyC;AAC7D,UAAM,qCAAqC;AAAA,MACzC,GAAG,KAAK;AAAA,MACR,WAAW;AAAA,IAAA;AAEb,UAAM,iCAAiC;AAAA,MACrC,GAAG;AAAA,MACH,WAAW;AAAA,IAAA;AAGb,WAAOC,MAAAA;AAAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,WAAsC;AACrD,WAAO,KAAK,eAAe,cAAc;AAAA,EAC3C;AAAA,EAEA,WAAuB;AACrB,WAAO;AAAA,MACL,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,MAClB,mBACE,KAAK,cAAc,IAAI,KAAK,kBAAkB,KAAK,cAAc;AAAA,MACnE,aAAa,KAAK;AAAA,IAAA;AAAA,EAEtB;AAAA,EAKU,wBAAwB,MAAgB;AAChD,UAAM,YAAYC,WAAAA,2BAA2B,KAAK,UAAU;AAC5D,WAAO,UAAU,IAA+B;AAAA,EAClD;AAAA,EAEU,YAAY,WAAyB;AAC7C,UAAM,WAAW,YAAY,IAAA,IAAQ;AACrC,SAAK;AACL,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEU,kBAAwB;AAChC,SAAK,kCAAkB,KAAA;AAAA,EACzB;AACF;;;"}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const dbIvm = require("@tanstack/db-ivm");
|
|
3
4
|
const btree = require("../utils/btree.cjs");
|
|
4
5
|
const comparison = require("../utils/comparison.cjs");
|
|
5
6
|
const baseIndex = require("./base-index.cjs");
|
|
@@ -183,7 +184,7 @@ class BTreeIndex extends baseIndex.BaseIndex {
|
|
|
183
184
|
toInclusive: fromInclusive
|
|
184
185
|
});
|
|
185
186
|
}
|
|
186
|
-
takeInternal(n, nextPair, from, filterFn) {
|
|
187
|
+
takeInternal(n, nextPair, from, filterFn, reversed = false) {
|
|
187
188
|
const keysInResult = /* @__PURE__ */ new Set();
|
|
188
189
|
const result = [];
|
|
189
190
|
let pair;
|
|
@@ -191,10 +192,11 @@ class BTreeIndex extends baseIndex.BaseIndex {
|
|
|
191
192
|
while ((pair = nextPair(key)) !== void 0 && result.length < n) {
|
|
192
193
|
key = pair[0];
|
|
193
194
|
const keys = this.valueMap.get(key);
|
|
194
|
-
if (keys) {
|
|
195
|
-
const
|
|
196
|
-
|
|
197
|
-
|
|
195
|
+
if (keys && keys.size > 0) {
|
|
196
|
+
const sorted = Array.from(keys).sort(dbIvm.compareKeys);
|
|
197
|
+
if (reversed) sorted.reverse();
|
|
198
|
+
for (const ks of sorted) {
|
|
199
|
+
if (result.length >= n) break;
|
|
198
200
|
if (!keysInResult.has(ks) && (filterFn?.(ks) ?? true)) {
|
|
199
201
|
result.push(ks);
|
|
200
202
|
keysInResult.add(ks);
|
|
@@ -222,7 +224,7 @@ class BTreeIndex extends baseIndex.BaseIndex {
|
|
|
222
224
|
*/
|
|
223
225
|
takeReversed(n, from, filterFn) {
|
|
224
226
|
const nextPair = (k) => this.orderedEntries.nextLowerPair(k);
|
|
225
|
-
return this.takeInternal(n, nextPair, from, filterFn);
|
|
227
|
+
return this.takeInternal(n, nextPair, from, filterFn, true);
|
|
226
228
|
}
|
|
227
229
|
/**
|
|
228
230
|
* Performs an IN array lookup
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"btree-index.cjs","sources":["../../../src/indexes/btree-index.ts"],"sourcesContent":["import { BTree } from \"../utils/btree.js\"\nimport { defaultComparator, normalizeValue } from \"../utils/comparison.js\"\nimport { BaseIndex } from \"./base-index.js\"\nimport type { CompareOptions } from \"../query/builder/types.js\"\nimport type { BasicExpression } from \"../query/ir.js\"\nimport type { IndexOperation } from \"./base-index.js\"\n\n/**\n * Options for Ordered index\n */\nexport interface BTreeIndexOptions {\n compareFn?: (a: any, b: any) => number\n compareOptions?: CompareOptions\n}\n\n/**\n * Options for range queries\n */\nexport interface RangeQueryOptions {\n from?: any\n to?: any\n fromInclusive?: boolean\n toInclusive?: boolean\n}\n\n/**\n * B+Tree index for sorted data with range queries\n * This maintains items in sorted order and provides efficient range operations\n */\nexport class BTreeIndex<\n TKey extends string | number = string | number,\n> extends BaseIndex<TKey> {\n public readonly supportedOperations = new Set<IndexOperation>([\n `eq`,\n `gt`,\n `gte`,\n `lt`,\n `lte`,\n `in`,\n ])\n\n // Internal data structures - private to hide implementation details\n // The `orderedEntries` B+ tree is used for efficient range queries\n // The `valueMap` is used for O(1) lookups of PKs by indexed value\n private orderedEntries: BTree<any, undefined> // we don't associate values with the keys of the B+ tree (the keys are indexed values)\n private valueMap = new Map<any, Set<TKey>>() // instead we store a mapping of indexed values to a set of PKs\n private indexedKeys = new Set<TKey>()\n private compareFn: (a: any, b: any) => number = defaultComparator\n\n constructor(\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any\n ) {\n super(id, expression, name, options)\n this.compareFn = options?.compareFn ?? defaultComparator\n if (options?.compareOptions) {\n this.compareOptions = options!.compareOptions\n }\n this.orderedEntries = new BTree(this.compareFn)\n }\n\n protected initialize(_options?: BTreeIndexOptions): void {}\n\n /**\n * Adds a value to the index\n */\n add(key: TKey, item: any): void {\n let indexedValue: any\n try {\n indexedValue = this.evaluateIndexExpression(item)\n } catch (error) {\n throw new Error(\n `Failed to evaluate index expression for key ${key}: ${error}`\n )\n }\n\n // Normalize the value for Map key usage\n const normalizedValue = normalizeValue(indexedValue)\n\n // Check if this value already exists\n if (this.valueMap.has(normalizedValue)) {\n // Add to existing set\n this.valueMap.get(normalizedValue)!.add(key)\n } else {\n // Create new set for this value\n const keySet = new Set<TKey>([key])\n this.valueMap.set(normalizedValue, keySet)\n this.orderedEntries.set(normalizedValue, undefined)\n }\n\n this.indexedKeys.add(key)\n this.updateTimestamp()\n }\n\n /**\n * Removes a value from the index\n */\n remove(key: TKey, item: any): void {\n let indexedValue: any\n try {\n indexedValue = this.evaluateIndexExpression(item)\n } catch (error) {\n console.warn(\n `Failed to evaluate index expression for key ${key} during removal:`,\n error\n )\n return\n }\n\n // Normalize the value for Map key usage\n const normalizedValue = normalizeValue(indexedValue)\n\n if (this.valueMap.has(normalizedValue)) {\n const keySet = this.valueMap.get(normalizedValue)!\n keySet.delete(key)\n\n // If set is now empty, remove the entry entirely\n if (keySet.size === 0) {\n this.valueMap.delete(normalizedValue)\n\n // Remove from ordered entries\n this.orderedEntries.delete(normalizedValue)\n }\n }\n\n this.indexedKeys.delete(key)\n this.updateTimestamp()\n }\n\n /**\n * Updates a value in the index\n */\n update(key: TKey, oldItem: any, newItem: any): void {\n this.remove(key, oldItem)\n this.add(key, newItem)\n }\n\n /**\n * Builds the index from a collection of entries\n */\n build(entries: Iterable<[TKey, any]>): void {\n this.clear()\n\n for (const [key, item] of entries) {\n this.add(key, item)\n }\n }\n\n /**\n * Clears all data from the index\n */\n clear(): void {\n this.orderedEntries.clear()\n this.valueMap.clear()\n this.indexedKeys.clear()\n this.updateTimestamp()\n }\n\n /**\n * Performs a lookup operation\n */\n lookup(operation: IndexOperation, value: any): Set<TKey> {\n const startTime = performance.now()\n\n let result: Set<TKey>\n\n switch (operation) {\n case `eq`:\n result = this.equalityLookup(value)\n break\n case `gt`:\n result = this.rangeQuery({ from: value, fromInclusive: false })\n break\n case `gte`:\n result = this.rangeQuery({ from: value, fromInclusive: true })\n break\n case `lt`:\n result = this.rangeQuery({ to: value, toInclusive: false })\n break\n case `lte`:\n result = this.rangeQuery({ to: value, toInclusive: true })\n break\n case `in`:\n result = this.inArrayLookup(value)\n break\n default:\n throw new Error(`Operation ${operation} not supported by BTreeIndex`)\n }\n\n this.trackLookup(startTime)\n return result\n }\n\n /**\n * Gets the number of indexed keys\n */\n get keyCount(): number {\n return this.indexedKeys.size\n }\n\n // Public methods for backward compatibility (used by tests)\n\n /**\n * Performs an equality lookup\n */\n equalityLookup(value: any): Set<TKey> {\n const normalizedValue = normalizeValue(value)\n return new Set(this.valueMap.get(normalizedValue) ?? [])\n }\n\n /**\n * Performs a range query with options\n * This is more efficient for compound queries like \"WHERE a > 5 AND a < 10\"\n */\n rangeQuery(options: RangeQueryOptions = {}): Set<TKey> {\n const { from, to, fromInclusive = true, toInclusive = true } = options\n const result = new Set<TKey>()\n\n const normalizedFrom = normalizeValue(from)\n const normalizedTo = normalizeValue(to)\n const fromKey = normalizedFrom ?? this.orderedEntries.minKey()\n const toKey = normalizedTo ?? this.orderedEntries.maxKey()\n\n this.orderedEntries.forRange(\n fromKey,\n toKey,\n toInclusive,\n (indexedValue, _) => {\n if (!fromInclusive && this.compareFn(indexedValue, from) === 0) {\n // the B+ tree `forRange` method does not support exclusive lower bounds\n // so we need to exclude it manually\n return\n }\n\n const keys = this.valueMap.get(indexedValue)\n if (keys) {\n keys.forEach((key) => result.add(key))\n }\n }\n )\n\n return result\n }\n\n /**\n * Performs a reversed range query\n */\n rangeQueryReversed(options: RangeQueryOptions = {}): Set<TKey> {\n const { from, to, fromInclusive = true, toInclusive = true } = options\n return this.rangeQuery({\n from: to ?? this.orderedEntries.maxKey(),\n to: from ?? this.orderedEntries.minKey(),\n fromInclusive: toInclusive,\n toInclusive: fromInclusive,\n })\n }\n\n private takeInternal(\n n: number,\n nextPair: (k?: any) => [any, any] | undefined,\n from?: any,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey> {\n const keysInResult: Set<TKey> = new Set()\n const result: Array<TKey> = []\n let pair: [any, any] | undefined\n let key = normalizeValue(from)\n\n while ((pair = nextPair(key)) !== undefined && result.length < n) {\n key = pair[0]\n const keys = this.valueMap.get(key)\n if (keys) {\n const it = keys.values()\n let ks: TKey | undefined\n while (result.length < n && (ks = it.next().value)) {\n if (!keysInResult.has(ks) && (filterFn?.(ks) ?? true)) {\n result.push(ks)\n keysInResult.add(ks)\n }\n }\n }\n }\n\n return result\n }\n\n /**\n * Returns the next n items after the provided item or the first n items if no from item is provided.\n * @param n - The number of items to return\n * @param from - The item to start from (exclusive). Starts from the smallest item (inclusive) if not provided.\n * @returns The next n items after the provided key. Returns the first n items if no from item is provided.\n */\n take(n: number, from?: any, filterFn?: (key: TKey) => boolean): Array<TKey> {\n const nextPair = (k?: any) => this.orderedEntries.nextHigherPair(k)\n return this.takeInternal(n, nextPair, from, filterFn)\n }\n\n /**\n * Returns the next n items **before** the provided item (in descending order) or the last n items if no from item is provided.\n * @param n - The number of items to return\n * @param from - The item to start from (exclusive). Starts from the largest item (inclusive) if not provided.\n * @returns The next n items **before** the provided key. Returns the last n items if no from item is provided.\n */\n takeReversed(\n n: number,\n from?: any,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey> {\n const nextPair = (k?: any) => this.orderedEntries.nextLowerPair(k)\n return this.takeInternal(n, nextPair, from, filterFn)\n }\n\n /**\n * Performs an IN array lookup\n */\n inArrayLookup(values: Array<any>): Set<TKey> {\n const result = new Set<TKey>()\n\n for (const value of values) {\n const normalizedValue = normalizeValue(value)\n const keys = this.valueMap.get(normalizedValue)\n if (keys) {\n keys.forEach((key) => result.add(key))\n }\n }\n\n return result\n }\n\n // Getter methods for testing compatibility\n get indexedKeysSet(): Set<TKey> {\n return this.indexedKeys\n }\n\n get orderedEntriesArray(): Array<[any, Set<TKey>]> {\n return this.orderedEntries\n .keysArray()\n .map((key) => [key, this.valueMap.get(key) ?? new Set()])\n }\n\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]> {\n return this.takeReversed(this.orderedEntries.size).map((key) => [\n key,\n this.valueMap.get(key) ?? new Set(),\n ])\n }\n\n get valueMapData(): Map<any, Set<TKey>> {\n return this.valueMap\n }\n}\n"],"names":["BaseIndex","defaultComparator","BTree","normalizeValue"],"mappings":";;;;;AA6BO,MAAM,mBAEHA,UAAAA,UAAgB;AAAA,EAkBxB,YACE,IACA,YACA,MACA,SACA;AACA,UAAM,IAAI,YAAY,MAAM,OAAO;AAvBrC,SAAgB,0CAA0B,IAAoB;AAAA,MAC5D;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAMD,SAAQ,+BAAe,IAAA;AACvB,SAAQ,kCAAkB,IAAA;AAC1B,SAAQ,YAAwCC,WAAAA;AAS9C,SAAK,YAAY,SAAS,aAAaA,WAAAA;AACvC,QAAI,SAAS,gBAAgB;AAC3B,WAAK,iBAAiB,QAAS;AAAA,IACjC;AACA,SAAK,iBAAiB,IAAIC,YAAM,KAAK,SAAS;AAAA,EAChD;AAAA,EAEU,WAAW,UAAoC;AAAA,EAAC;AAAA;AAAA;AAAA;AAAA,EAK1D,IAAI,KAAW,MAAiB;AAC9B,QAAI;AACJ,QAAI;AACF,qBAAe,KAAK,wBAAwB,IAAI;AAAA,IAClD,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,+CAA+C,GAAG,KAAK,KAAK;AAAA,MAAA;AAAA,IAEhE;AAGA,UAAM,kBAAkBC,WAAAA,eAAe,YAAY;AAGnD,QAAI,KAAK,SAAS,IAAI,eAAe,GAAG;AAEtC,WAAK,SAAS,IAAI,eAAe,EAAG,IAAI,GAAG;AAAA,IAC7C,OAAO;AAEL,YAAM,SAAS,oBAAI,IAAU,CAAC,GAAG,CAAC;AAClC,WAAK,SAAS,IAAI,iBAAiB,MAAM;AACzC,WAAK,eAAe,IAAI,iBAAiB,MAAS;AAAA,IACpD;AAEA,SAAK,YAAY,IAAI,GAAG;AACxB,SAAK,gBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,KAAW,MAAiB;AACjC,QAAI;AACJ,QAAI;AACF,qBAAe,KAAK,wBAAwB,IAAI;AAAA,IAClD,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,+CAA+C,GAAG;AAAA,QAClD;AAAA,MAAA;AAEF;AAAA,IACF;AAGA,UAAM,kBAAkBA,WAAAA,eAAe,YAAY;AAEnD,QAAI,KAAK,SAAS,IAAI,eAAe,GAAG;AACtC,YAAM,SAAS,KAAK,SAAS,IAAI,eAAe;AAChD,aAAO,OAAO,GAAG;AAGjB,UAAI,OAAO,SAAS,GAAG;AACrB,aAAK,SAAS,OAAO,eAAe;AAGpC,aAAK,eAAe,OAAO,eAAe;AAAA,MAC5C;AAAA,IACF;AAEA,SAAK,YAAY,OAAO,GAAG;AAC3B,SAAK,gBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,KAAW,SAAc,SAAoB;AAClD,SAAK,OAAO,KAAK,OAAO;AACxB,SAAK,IAAI,KAAK,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAsC;AAC1C,SAAK,MAAA;AAEL,eAAW,CAAC,KAAK,IAAI,KAAK,SAAS;AACjC,WAAK,IAAI,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,eAAe,MAAA;AACpB,SAAK,SAAS,MAAA;AACd,SAAK,YAAY,MAAA;AACjB,SAAK,gBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,WAA2B,OAAuB;AACvD,UAAM,YAAY,YAAY,IAAA;AAE9B,QAAI;AAEJ,YAAQ,WAAA;AAAA,MACN,KAAK;AACH,iBAAS,KAAK,eAAe,KAAK;AAClC;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,MAAM,OAAO,eAAe,OAAO;AAC9D;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,MAAM,OAAO,eAAe,MAAM;AAC7D;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,IAAI,OAAO,aAAa,OAAO;AAC1D;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,IAAI,OAAO,aAAa,MAAM;AACzD;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,cAAc,KAAK;AACjC;AAAA,MACF;AACE,cAAM,IAAI,MAAM,aAAa,SAAS,8BAA8B;AAAA,IAAA;AAGxE,SAAK,YAAY,SAAS;AAC1B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAmB;AACrB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,OAAuB;AACpC,UAAM,kBAAkBA,WAAAA,eAAe,KAAK;AAC5C,WAAO,IAAI,IAAI,KAAK,SAAS,IAAI,eAAe,KAAK,EAAE;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,WAAW,UAA6B,IAAe;AACrD,UAAM,EAAE,MAAM,IAAI,gBAAgB,MAAM,cAAc,SAAS;AAC/D,UAAM,6BAAa,IAAA;AAEnB,UAAM,iBAAiBA,WAAAA,eAAe,IAAI;AAC1C,UAAM,eAAeA,WAAAA,eAAe,EAAE;AACtC,UAAM,UAAU,kBAAkB,KAAK,eAAe,OAAA;AACtD,UAAM,QAAQ,gBAAgB,KAAK,eAAe,OAAA;AAElD,SAAK,eAAe;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA,CAAC,cAAc,MAAM;AACnB,YAAI,CAAC,iBAAiB,KAAK,UAAU,cAAc,IAAI,MAAM,GAAG;AAG9D;AAAA,QACF;AAEA,cAAM,OAAO,KAAK,SAAS,IAAI,YAAY;AAC3C,YAAI,MAAM;AACR,eAAK,QAAQ,CAAC,QAAQ,OAAO,IAAI,GAAG,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IAAA;AAGF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,UAA6B,IAAe;AAC7D,UAAM,EAAE,MAAM,IAAI,gBAAgB,MAAM,cAAc,SAAS;AAC/D,WAAO,KAAK,WAAW;AAAA,MACrB,MAAM,MAAM,KAAK,eAAe,OAAA;AAAA,MAChC,IAAI,QAAQ,KAAK,eAAe,OAAA;AAAA,MAChC,eAAe;AAAA,MACf,aAAa;AAAA,IAAA,CACd;AAAA,EACH;AAAA,EAEQ,aACN,GACA,UACA,MACA,UACa;AACb,UAAM,mCAA8B,IAAA;AACpC,UAAM,SAAsB,CAAA;AAC5B,QAAI;AACJ,QAAI,MAAMA,WAAAA,eAAe,IAAI;AAE7B,YAAQ,OAAO,SAAS,GAAG,OAAO,UAAa,OAAO,SAAS,GAAG;AAChE,YAAM,KAAK,CAAC;AACZ,YAAM,OAAO,KAAK,SAAS,IAAI,GAAG;AAClC,UAAI,MAAM;AACR,cAAM,KAAK,KAAK,OAAA;AAChB,YAAI;AACJ,eAAO,OAAO,SAAS,MAAM,KAAK,GAAG,KAAA,EAAO,QAAQ;AAClD,cAAI,CAAC,aAAa,IAAI,EAAE,MAAM,WAAW,EAAE,KAAK,OAAO;AACrD,mBAAO,KAAK,EAAE;AACd,yBAAa,IAAI,EAAE;AAAA,UACrB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,KAAK,GAAW,MAAY,UAAgD;AAC1E,UAAM,WAAW,CAAC,MAAY,KAAK,eAAe,eAAe,CAAC;AAClE,WAAO,KAAK,aAAa,GAAG,UAAU,MAAM,QAAQ;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aACE,GACA,MACA,UACa;AACb,UAAM,WAAW,CAAC,MAAY,KAAK,eAAe,cAAc,CAAC;AACjE,WAAO,KAAK,aAAa,GAAG,UAAU,MAAM,QAAQ;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,QAA+B;AAC3C,UAAM,6BAAa,IAAA;AAEnB,eAAW,SAAS,QAAQ;AAC1B,YAAM,kBAAkBA,WAAAA,eAAe,KAAK;AAC5C,YAAM,OAAO,KAAK,SAAS,IAAI,eAAe;AAC9C,UAAI,MAAM;AACR,aAAK,QAAQ,CAAC,QAAQ,OAAO,IAAI,GAAG,CAAC;AAAA,MACvC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,IAAI,iBAA4B;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,sBAA+C;AACjD,WAAO,KAAK,eACT,UAAA,EACA,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,SAAS,IAAI,GAAG,KAAK,oBAAI,IAAA,CAAK,CAAC;AAAA,EAC5D;AAAA,EAEA,IAAI,8BAAuD;AACzD,WAAO,KAAK,aAAa,KAAK,eAAe,IAAI,EAAE,IAAI,CAAC,QAAQ;AAAA,MAC9D;AAAA,MACA,KAAK,SAAS,IAAI,GAAG,yBAAS,IAAA;AAAA,IAAI,CACnC;AAAA,EACH;AAAA,EAEA,IAAI,eAAoC;AACtC,WAAO,KAAK;AAAA,EACd;AACF;;"}
|
|
1
|
+
{"version":3,"file":"btree-index.cjs","sources":["../../../src/indexes/btree-index.ts"],"sourcesContent":["import { compareKeys } from '@tanstack/db-ivm'\nimport { BTree } from '../utils/btree.js'\nimport { defaultComparator, normalizeValue } from '../utils/comparison.js'\nimport { BaseIndex } from './base-index.js'\nimport type { CompareOptions } from '../query/builder/types.js'\nimport type { BasicExpression } from '../query/ir.js'\nimport type { IndexOperation } from './base-index.js'\n\n/**\n * Options for Ordered index\n */\nexport interface BTreeIndexOptions {\n compareFn?: (a: any, b: any) => number\n compareOptions?: CompareOptions\n}\n\n/**\n * Options for range queries\n */\nexport interface RangeQueryOptions {\n from?: any\n to?: any\n fromInclusive?: boolean\n toInclusive?: boolean\n}\n\n/**\n * B+Tree index for sorted data with range queries\n * This maintains items in sorted order and provides efficient range operations\n */\nexport class BTreeIndex<\n TKey extends string | number = string | number,\n> extends BaseIndex<TKey> {\n public readonly supportedOperations = new Set<IndexOperation>([\n `eq`,\n `gt`,\n `gte`,\n `lt`,\n `lte`,\n `in`,\n ])\n\n // Internal data structures - private to hide implementation details\n // The `orderedEntries` B+ tree is used for efficient range queries\n // The `valueMap` is used for O(1) lookups of PKs by indexed value\n private orderedEntries: BTree<any, undefined> // we don't associate values with the keys of the B+ tree (the keys are indexed values)\n private valueMap = new Map<any, Set<TKey>>() // instead we store a mapping of indexed values to a set of PKs\n private indexedKeys = new Set<TKey>()\n private compareFn: (a: any, b: any) => number = defaultComparator\n\n constructor(\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any,\n ) {\n super(id, expression, name, options)\n this.compareFn = options?.compareFn ?? defaultComparator\n if (options?.compareOptions) {\n this.compareOptions = options!.compareOptions\n }\n this.orderedEntries = new BTree(this.compareFn)\n }\n\n protected initialize(_options?: BTreeIndexOptions): void {}\n\n /**\n * Adds a value to the index\n */\n add(key: TKey, item: any): void {\n let indexedValue: any\n try {\n indexedValue = this.evaluateIndexExpression(item)\n } catch (error) {\n throw new Error(\n `Failed to evaluate index expression for key ${key}: ${error}`,\n )\n }\n\n // Normalize the value for Map key usage\n const normalizedValue = normalizeValue(indexedValue)\n\n // Check if this value already exists\n if (this.valueMap.has(normalizedValue)) {\n // Add to existing set\n this.valueMap.get(normalizedValue)!.add(key)\n } else {\n // Create new set for this value\n const keySet = new Set<TKey>([key])\n this.valueMap.set(normalizedValue, keySet)\n this.orderedEntries.set(normalizedValue, undefined)\n }\n\n this.indexedKeys.add(key)\n this.updateTimestamp()\n }\n\n /**\n * Removes a value from the index\n */\n remove(key: TKey, item: any): void {\n let indexedValue: any\n try {\n indexedValue = this.evaluateIndexExpression(item)\n } catch (error) {\n console.warn(\n `Failed to evaluate index expression for key ${key} during removal:`,\n error,\n )\n return\n }\n\n // Normalize the value for Map key usage\n const normalizedValue = normalizeValue(indexedValue)\n\n if (this.valueMap.has(normalizedValue)) {\n const keySet = this.valueMap.get(normalizedValue)!\n keySet.delete(key)\n\n // If set is now empty, remove the entry entirely\n if (keySet.size === 0) {\n this.valueMap.delete(normalizedValue)\n\n // Remove from ordered entries\n this.orderedEntries.delete(normalizedValue)\n }\n }\n\n this.indexedKeys.delete(key)\n this.updateTimestamp()\n }\n\n /**\n * Updates a value in the index\n */\n update(key: TKey, oldItem: any, newItem: any): void {\n this.remove(key, oldItem)\n this.add(key, newItem)\n }\n\n /**\n * Builds the index from a collection of entries\n */\n build(entries: Iterable<[TKey, any]>): void {\n this.clear()\n\n for (const [key, item] of entries) {\n this.add(key, item)\n }\n }\n\n /**\n * Clears all data from the index\n */\n clear(): void {\n this.orderedEntries.clear()\n this.valueMap.clear()\n this.indexedKeys.clear()\n this.updateTimestamp()\n }\n\n /**\n * Performs a lookup operation\n */\n lookup(operation: IndexOperation, value: any): Set<TKey> {\n const startTime = performance.now()\n\n let result: Set<TKey>\n\n switch (operation) {\n case `eq`:\n result = this.equalityLookup(value)\n break\n case `gt`:\n result = this.rangeQuery({ from: value, fromInclusive: false })\n break\n case `gte`:\n result = this.rangeQuery({ from: value, fromInclusive: true })\n break\n case `lt`:\n result = this.rangeQuery({ to: value, toInclusive: false })\n break\n case `lte`:\n result = this.rangeQuery({ to: value, toInclusive: true })\n break\n case `in`:\n result = this.inArrayLookup(value)\n break\n default:\n throw new Error(`Operation ${operation} not supported by BTreeIndex`)\n }\n\n this.trackLookup(startTime)\n return result\n }\n\n /**\n * Gets the number of indexed keys\n */\n get keyCount(): number {\n return this.indexedKeys.size\n }\n\n // Public methods for backward compatibility (used by tests)\n\n /**\n * Performs an equality lookup\n */\n equalityLookup(value: any): Set<TKey> {\n const normalizedValue = normalizeValue(value)\n return new Set(this.valueMap.get(normalizedValue) ?? [])\n }\n\n /**\n * Performs a range query with options\n * This is more efficient for compound queries like \"WHERE a > 5 AND a < 10\"\n */\n rangeQuery(options: RangeQueryOptions = {}): Set<TKey> {\n const { from, to, fromInclusive = true, toInclusive = true } = options\n const result = new Set<TKey>()\n\n const normalizedFrom = normalizeValue(from)\n const normalizedTo = normalizeValue(to)\n const fromKey = normalizedFrom ?? this.orderedEntries.minKey()\n const toKey = normalizedTo ?? this.orderedEntries.maxKey()\n\n this.orderedEntries.forRange(\n fromKey,\n toKey,\n toInclusive,\n (indexedValue, _) => {\n if (!fromInclusive && this.compareFn(indexedValue, from) === 0) {\n // the B+ tree `forRange` method does not support exclusive lower bounds\n // so we need to exclude it manually\n return\n }\n\n const keys = this.valueMap.get(indexedValue)\n if (keys) {\n keys.forEach((key) => result.add(key))\n }\n },\n )\n\n return result\n }\n\n /**\n * Performs a reversed range query\n */\n rangeQueryReversed(options: RangeQueryOptions = {}): Set<TKey> {\n const { from, to, fromInclusive = true, toInclusive = true } = options\n return this.rangeQuery({\n from: to ?? this.orderedEntries.maxKey(),\n to: from ?? this.orderedEntries.minKey(),\n fromInclusive: toInclusive,\n toInclusive: fromInclusive,\n })\n }\n\n private takeInternal(\n n: number,\n nextPair: (k?: any) => [any, any] | undefined,\n from?: any,\n filterFn?: (key: TKey) => boolean,\n reversed: boolean = false,\n ): Array<TKey> {\n const keysInResult: Set<TKey> = new Set()\n const result: Array<TKey> = []\n let pair: [any, any] | undefined\n let key = normalizeValue(from)\n\n while ((pair = nextPair(key)) !== undefined && result.length < n) {\n key = pair[0]\n const keys = this.valueMap.get(key)\n if (keys && keys.size > 0) {\n // Sort keys for deterministic order, reverse if needed\n const sorted = Array.from(keys).sort(compareKeys)\n if (reversed) sorted.reverse()\n for (const ks of sorted) {\n if (result.length >= n) break\n if (!keysInResult.has(ks) && (filterFn?.(ks) ?? true)) {\n result.push(ks)\n keysInResult.add(ks)\n }\n }\n }\n }\n\n return result\n }\n\n /**\n * Returns the next n items after the provided item or the first n items if no from item is provided.\n * @param n - The number of items to return\n * @param from - The item to start from (exclusive). Starts from the smallest item (inclusive) if not provided.\n * @returns The next n items after the provided key. Returns the first n items if no from item is provided.\n */\n take(n: number, from?: any, filterFn?: (key: TKey) => boolean): Array<TKey> {\n const nextPair = (k?: any) => this.orderedEntries.nextHigherPair(k)\n return this.takeInternal(n, nextPair, from, filterFn)\n }\n\n /**\n * Returns the next n items **before** the provided item (in descending order) or the last n items if no from item is provided.\n * @param n - The number of items to return\n * @param from - The item to start from (exclusive). Starts from the largest item (inclusive) if not provided.\n * @returns The next n items **before** the provided key. Returns the last n items if no from item is provided.\n */\n takeReversed(\n n: number,\n from?: any,\n filterFn?: (key: TKey) => boolean,\n ): Array<TKey> {\n const nextPair = (k?: any) => this.orderedEntries.nextLowerPair(k)\n return this.takeInternal(n, nextPair, from, filterFn, true)\n }\n\n /**\n * Performs an IN array lookup\n */\n inArrayLookup(values: Array<any>): Set<TKey> {\n const result = new Set<TKey>()\n\n for (const value of values) {\n const normalizedValue = normalizeValue(value)\n const keys = this.valueMap.get(normalizedValue)\n if (keys) {\n keys.forEach((key) => result.add(key))\n }\n }\n\n return result\n }\n\n // Getter methods for testing compatibility\n get indexedKeysSet(): Set<TKey> {\n return this.indexedKeys\n }\n\n get orderedEntriesArray(): Array<[any, Set<TKey>]> {\n return this.orderedEntries\n .keysArray()\n .map((key) => [key, this.valueMap.get(key) ?? new Set()])\n }\n\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]> {\n return this.takeReversed(this.orderedEntries.size).map((key) => [\n key,\n this.valueMap.get(key) ?? new Set(),\n ])\n }\n\n get valueMapData(): Map<any, Set<TKey>> {\n return this.valueMap\n }\n}\n"],"names":["BaseIndex","defaultComparator","BTree","normalizeValue","compareKeys"],"mappings":";;;;;;AA8BO,MAAM,mBAEHA,UAAAA,UAAgB;AAAA,EAkBxB,YACE,IACA,YACA,MACA,SACA;AACA,UAAM,IAAI,YAAY,MAAM,OAAO;AAvBrC,SAAgB,0CAA0B,IAAoB;AAAA,MAC5D;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAMD,SAAQ,+BAAe,IAAA;AACvB,SAAQ,kCAAkB,IAAA;AAC1B,SAAQ,YAAwCC,WAAAA;AAS9C,SAAK,YAAY,SAAS,aAAaA,WAAAA;AACvC,QAAI,SAAS,gBAAgB;AAC3B,WAAK,iBAAiB,QAAS;AAAA,IACjC;AACA,SAAK,iBAAiB,IAAIC,YAAM,KAAK,SAAS;AAAA,EAChD;AAAA,EAEU,WAAW,UAAoC;AAAA,EAAC;AAAA;AAAA;AAAA;AAAA,EAK1D,IAAI,KAAW,MAAiB;AAC9B,QAAI;AACJ,QAAI;AACF,qBAAe,KAAK,wBAAwB,IAAI;AAAA,IAClD,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,+CAA+C,GAAG,KAAK,KAAK;AAAA,MAAA;AAAA,IAEhE;AAGA,UAAM,kBAAkBC,WAAAA,eAAe,YAAY;AAGnD,QAAI,KAAK,SAAS,IAAI,eAAe,GAAG;AAEtC,WAAK,SAAS,IAAI,eAAe,EAAG,IAAI,GAAG;AAAA,IAC7C,OAAO;AAEL,YAAM,SAAS,oBAAI,IAAU,CAAC,GAAG,CAAC;AAClC,WAAK,SAAS,IAAI,iBAAiB,MAAM;AACzC,WAAK,eAAe,IAAI,iBAAiB,MAAS;AAAA,IACpD;AAEA,SAAK,YAAY,IAAI,GAAG;AACxB,SAAK,gBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,KAAW,MAAiB;AACjC,QAAI;AACJ,QAAI;AACF,qBAAe,KAAK,wBAAwB,IAAI;AAAA,IAClD,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,+CAA+C,GAAG;AAAA,QAClD;AAAA,MAAA;AAEF;AAAA,IACF;AAGA,UAAM,kBAAkBA,WAAAA,eAAe,YAAY;AAEnD,QAAI,KAAK,SAAS,IAAI,eAAe,GAAG;AACtC,YAAM,SAAS,KAAK,SAAS,IAAI,eAAe;AAChD,aAAO,OAAO,GAAG;AAGjB,UAAI,OAAO,SAAS,GAAG;AACrB,aAAK,SAAS,OAAO,eAAe;AAGpC,aAAK,eAAe,OAAO,eAAe;AAAA,MAC5C;AAAA,IACF;AAEA,SAAK,YAAY,OAAO,GAAG;AAC3B,SAAK,gBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,KAAW,SAAc,SAAoB;AAClD,SAAK,OAAO,KAAK,OAAO;AACxB,SAAK,IAAI,KAAK,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAsC;AAC1C,SAAK,MAAA;AAEL,eAAW,CAAC,KAAK,IAAI,KAAK,SAAS;AACjC,WAAK,IAAI,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,eAAe,MAAA;AACpB,SAAK,SAAS,MAAA;AACd,SAAK,YAAY,MAAA;AACjB,SAAK,gBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,WAA2B,OAAuB;AACvD,UAAM,YAAY,YAAY,IAAA;AAE9B,QAAI;AAEJ,YAAQ,WAAA;AAAA,MACN,KAAK;AACH,iBAAS,KAAK,eAAe,KAAK;AAClC;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,MAAM,OAAO,eAAe,OAAO;AAC9D;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,MAAM,OAAO,eAAe,MAAM;AAC7D;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,IAAI,OAAO,aAAa,OAAO;AAC1D;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,WAAW,EAAE,IAAI,OAAO,aAAa,MAAM;AACzD;AAAA,MACF,KAAK;AACH,iBAAS,KAAK,cAAc,KAAK;AACjC;AAAA,MACF;AACE,cAAM,IAAI,MAAM,aAAa,SAAS,8BAA8B;AAAA,IAAA;AAGxE,SAAK,YAAY,SAAS;AAC1B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAmB;AACrB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe,OAAuB;AACpC,UAAM,kBAAkBA,WAAAA,eAAe,KAAK;AAC5C,WAAO,IAAI,IAAI,KAAK,SAAS,IAAI,eAAe,KAAK,EAAE;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,WAAW,UAA6B,IAAe;AACrD,UAAM,EAAE,MAAM,IAAI,gBAAgB,MAAM,cAAc,SAAS;AAC/D,UAAM,6BAAa,IAAA;AAEnB,UAAM,iBAAiBA,WAAAA,eAAe,IAAI;AAC1C,UAAM,eAAeA,WAAAA,eAAe,EAAE;AACtC,UAAM,UAAU,kBAAkB,KAAK,eAAe,OAAA;AACtD,UAAM,QAAQ,gBAAgB,KAAK,eAAe,OAAA;AAElD,SAAK,eAAe;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA,CAAC,cAAc,MAAM;AACnB,YAAI,CAAC,iBAAiB,KAAK,UAAU,cAAc,IAAI,MAAM,GAAG;AAG9D;AAAA,QACF;AAEA,cAAM,OAAO,KAAK,SAAS,IAAI,YAAY;AAC3C,YAAI,MAAM;AACR,eAAK,QAAQ,CAAC,QAAQ,OAAO,IAAI,GAAG,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IAAA;AAGF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,UAA6B,IAAe;AAC7D,UAAM,EAAE,MAAM,IAAI,gBAAgB,MAAM,cAAc,SAAS;AAC/D,WAAO,KAAK,WAAW;AAAA,MACrB,MAAM,MAAM,KAAK,eAAe,OAAA;AAAA,MAChC,IAAI,QAAQ,KAAK,eAAe,OAAA;AAAA,MAChC,eAAe;AAAA,MACf,aAAa;AAAA,IAAA,CACd;AAAA,EACH;AAAA,EAEQ,aACN,GACA,UACA,MACA,UACA,WAAoB,OACP;AACb,UAAM,mCAA8B,IAAA;AACpC,UAAM,SAAsB,CAAA;AAC5B,QAAI;AACJ,QAAI,MAAMA,WAAAA,eAAe,IAAI;AAE7B,YAAQ,OAAO,SAAS,GAAG,OAAO,UAAa,OAAO,SAAS,GAAG;AAChE,YAAM,KAAK,CAAC;AACZ,YAAM,OAAO,KAAK,SAAS,IAAI,GAAG;AAClC,UAAI,QAAQ,KAAK,OAAO,GAAG;AAEzB,cAAM,SAAS,MAAM,KAAK,IAAI,EAAE,KAAKC,iBAAW;AAChD,YAAI,iBAAiB,QAAA;AACrB,mBAAW,MAAM,QAAQ;AACvB,cAAI,OAAO,UAAU,EAAG;AACxB,cAAI,CAAC,aAAa,IAAI,EAAE,MAAM,WAAW,EAAE,KAAK,OAAO;AACrD,mBAAO,KAAK,EAAE;AACd,yBAAa,IAAI,EAAE;AAAA,UACrB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,KAAK,GAAW,MAAY,UAAgD;AAC1E,UAAM,WAAW,CAAC,MAAY,KAAK,eAAe,eAAe,CAAC;AAClE,WAAO,KAAK,aAAa,GAAG,UAAU,MAAM,QAAQ;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aACE,GACA,MACA,UACa;AACb,UAAM,WAAW,CAAC,MAAY,KAAK,eAAe,cAAc,CAAC;AACjE,WAAO,KAAK,aAAa,GAAG,UAAU,MAAM,UAAU,IAAI;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,QAA+B;AAC3C,UAAM,6BAAa,IAAA;AAEnB,eAAW,SAAS,QAAQ;AAC1B,YAAM,kBAAkBD,WAAAA,eAAe,KAAK;AAC5C,YAAM,OAAO,KAAK,SAAS,IAAI,eAAe;AAC9C,UAAI,MAAM;AACR,aAAK,QAAQ,CAAC,QAAQ,OAAO,IAAI,GAAG,CAAC;AAAA,MACvC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,IAAI,iBAA4B;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,sBAA+C;AACjD,WAAO,KAAK,eACT,UAAA,EACA,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,SAAS,IAAI,GAAG,KAAK,oBAAI,IAAA,CAAK,CAAC;AAAA,EAC5D;AAAA,EAEA,IAAI,8BAAuD;AACzD,WAAO,KAAK,aAAa,KAAK,eAAe,IAAI,EAAE,IAAI,CAAC,QAAQ;AAAA,MAC9D;AAAA,MACA,KAAK,SAAS,IAAI,GAAG,yBAAS,IAAA;AAAA,IAAI,CACnC;AAAA,EACH;AAAA,EAEA,IAAI,eAAoC;AACtC,WAAO,KAAK;AAAA,EACd;AACF;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lazy-index.cjs","sources":["../../../src/indexes/lazy-index.ts"],"sourcesContent":["import type {\n BaseIndex,\n IndexConstructor,\n IndexResolver,\n} from
|
|
1
|
+
{"version":3,"file":"lazy-index.cjs","sources":["../../../src/indexes/lazy-index.ts"],"sourcesContent":["import type {\n BaseIndex,\n IndexConstructor,\n IndexResolver,\n} from './base-index.js'\nimport type { BasicExpression } from '../query/ir.js'\n\n/**\n * Utility to determine if a resolver is a constructor or async loader\n */\nfunction isConstructor<TKey extends string | number>(\n resolver: IndexResolver<TKey>,\n): resolver is IndexConstructor<TKey> {\n // Check if it's a function with a prototype (constructor)\n return (\n typeof resolver === `function` &&\n resolver.prototype !== undefined &&\n resolver.prototype.constructor === resolver\n )\n}\n\n/**\n * Resolve index constructor from resolver\n */\nasync function resolveIndexConstructor<TKey extends string | number>(\n resolver: IndexResolver<TKey>,\n): Promise<IndexConstructor<TKey>> {\n if (isConstructor(resolver)) {\n return resolver\n } else {\n // It's an async loader function\n return await resolver()\n }\n}\n\n/**\n * Wrapper that defers index creation until first sync\n */\nexport class LazyIndexWrapper<TKey extends string | number = string | number> {\n private indexPromise: Promise<BaseIndex<TKey>> | null = null\n private resolvedIndex: BaseIndex<TKey> | null = null\n\n constructor(\n private id: number,\n private expression: BasicExpression,\n private name: string | undefined,\n private resolver: IndexResolver<TKey>,\n private options: any,\n private collectionEntries?: Iterable<[TKey, any]>,\n ) {\n // For synchronous constructors, resolve immediately\n if (isConstructor(this.resolver)) {\n this.resolvedIndex = new this.resolver(\n this.id,\n this.expression,\n this.name,\n this.options,\n )\n // Build with initial data if provided\n if (this.collectionEntries) {\n this.resolvedIndex.build(this.collectionEntries)\n }\n }\n }\n\n /**\n * Resolve the actual index\n */\n async resolve(): Promise<BaseIndex<TKey>> {\n if (this.resolvedIndex) {\n return this.resolvedIndex\n }\n\n if (!this.indexPromise) {\n this.indexPromise = this.createIndex()\n }\n\n this.resolvedIndex = await this.indexPromise\n return this.resolvedIndex\n }\n\n /**\n * Check if already resolved\n */\n isResolved(): boolean {\n return this.resolvedIndex !== null\n }\n\n /**\n * Get resolved index (throws if not ready)\n */\n getResolved(): BaseIndex<TKey> {\n if (!this.resolvedIndex) {\n throw new Error(\n `Index ${this.id} has not been resolved yet. Ensure collection is synced.`,\n )\n }\n return this.resolvedIndex\n }\n\n /**\n * Get the index ID\n */\n getId(): number {\n return this.id\n }\n\n /**\n * Get the index name\n */\n getName(): string | undefined {\n return this.name\n }\n\n /**\n * Get the index expression\n */\n getExpression(): BasicExpression {\n return this.expression\n }\n\n private async createIndex(): Promise<BaseIndex<TKey>> {\n const IndexClass = await resolveIndexConstructor(this.resolver)\n return new IndexClass(this.id, this.expression, this.name, this.options)\n }\n}\n\n/**\n * Proxy that provides synchronous interface while index loads asynchronously\n */\nexport class IndexProxy<TKey extends string | number = string | number> {\n constructor(\n private indexId: number,\n private lazyIndex: LazyIndexWrapper<TKey>,\n ) {}\n\n /**\n * Get the resolved index (throws if not ready)\n */\n get index(): BaseIndex<TKey> {\n return this.lazyIndex.getResolved()\n }\n\n /**\n * Check if index is ready\n */\n get isReady(): boolean {\n return this.lazyIndex.isResolved()\n }\n\n /**\n * Wait for index to be ready\n */\n async whenReady(): Promise<BaseIndex<TKey>> {\n return await this.lazyIndex.resolve()\n }\n\n /**\n * Get the index ID\n */\n get id(): number {\n return this.indexId\n }\n\n /**\n * Get the index name (throws if not ready)\n */\n get name(): string | undefined {\n if (this.isReady) {\n return this.index.name\n }\n return this.lazyIndex.getName()\n }\n\n /**\n * Get the index expression (available immediately)\n */\n get expression(): BasicExpression {\n return this.lazyIndex.getExpression()\n }\n\n /**\n * Check if index supports an operation (throws if not ready)\n */\n supports(operation: any): boolean {\n return this.index.supports(operation)\n }\n\n /**\n * Get index statistics (throws if not ready)\n */\n getStats() {\n return this.index.getStats()\n }\n\n /**\n * Check if index matches a field path (available immediately)\n */\n matchesField(fieldPath: Array<string>): boolean {\n const expr = this.expression\n return (\n expr.type === `ref` &&\n expr.path.length === fieldPath.length &&\n expr.path.every((part, i) => part === fieldPath[i])\n )\n }\n\n /**\n * Get the key count (throws if not ready)\n */\n get keyCount(): number {\n return this.index.keyCount\n }\n\n // Test compatibility properties - delegate to resolved index\n get indexedKeysSet(): Set<TKey> {\n const resolved = this.index as any\n return resolved.indexedKeysSet\n }\n\n get orderedEntriesArray(): Array<[any, Set<TKey>]> {\n const resolved = this.index as any\n return resolved.orderedEntriesArray\n }\n\n get valueMapData(): Map<any, Set<TKey>> {\n const resolved = this.index as any\n return resolved.valueMapData\n }\n\n // BTreeIndex compatibility methods\n equalityLookup(value: any): Set<TKey> {\n const resolved = this.index as any\n return resolved.equalityLookup?.(value) ?? new Set()\n }\n\n rangeQuery(options: any): Set<TKey> {\n const resolved = this.index as any\n return resolved.rangeQuery?.(options) ?? new Set()\n }\n\n inArrayLookup(values: Array<any>): Set<TKey> {\n const resolved = this.index as any\n return resolved.inArrayLookup?.(values) ?? new Set()\n }\n\n // Internal method for the collection to get the lazy wrapper\n _getLazyWrapper(): LazyIndexWrapper<TKey> {\n return this.lazyIndex\n }\n}\n"],"names":[],"mappings":";;AAUA,SAAS,cACP,UACoC;AAEpC,SACE,OAAO,aAAa,cACpB,SAAS,cAAc,UACvB,SAAS,UAAU,gBAAgB;AAEvC;AAKA,eAAe,wBACb,UACiC;AACjC,MAAI,cAAc,QAAQ,GAAG;AAC3B,WAAO;AAAA,EACT,OAAO;AAEL,WAAO,MAAM,SAAA;AAAA,EACf;AACF;AAKO,MAAM,iBAAiE;AAAA,EAI5E,YACU,IACA,YACA,MACA,UACA,SACA,mBACR;AANQ,SAAA,KAAA;AACA,SAAA,aAAA;AACA,SAAA,OAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AACA,SAAA,oBAAA;AATV,SAAQ,eAAgD;AACxD,SAAQ,gBAAwC;AAW9C,QAAI,cAAc,KAAK,QAAQ,GAAG;AAChC,WAAK,gBAAgB,IAAI,KAAK;AAAA,QAC5B,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAGP,UAAI,KAAK,mBAAmB;AAC1B,aAAK,cAAc,MAAM,KAAK,iBAAiB;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAoC;AACxC,QAAI,KAAK,eAAe;AACtB,aAAO,KAAK;AAAA,IACd;AAEA,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,KAAK,YAAA;AAAA,IAC3B;AAEA,SAAK,gBAAgB,MAAM,KAAK;AAChC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK,kBAAkB;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,cAA+B;AAC7B,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR,SAAS,KAAK,EAAE;AAAA,MAAA;AAAA,IAEpB;AACA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,QAAgB;AACd,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,UAA8B;AAC5B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAiC;AAC/B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAc,cAAwC;AACpD,UAAM,aAAa,MAAM,wBAAwB,KAAK,QAAQ;AAC9D,WAAO,IAAI,WAAW,KAAK,IAAI,KAAK,YAAY,KAAK,MAAM,KAAK,OAAO;AAAA,EACzE;AACF;AAKO,MAAM,WAA2D;AAAA,EACtE,YACU,SACA,WACR;AAFQ,SAAA,UAAA;AACA,SAAA,YAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKH,IAAI,QAAyB;AAC3B,WAAO,KAAK,UAAU,YAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAmB;AACrB,WAAO,KAAK,UAAU,WAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAsC;AAC1C,WAAO,MAAM,KAAK,UAAU,QAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,KAAa;AACf,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAA2B;AAC7B,QAAI,KAAK,SAAS;AAChB,aAAO,KAAK,MAAM;AAAA,IACpB;AACA,WAAO,KAAK,UAAU,QAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,aAA8B;AAChC,WAAO,KAAK,UAAU,cAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,WAAyB;AAChC,WAAO,KAAK,MAAM,SAAS,SAAS;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW;AACT,WAAO,KAAK,MAAM,SAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,WAAmC;AAC9C,UAAM,OAAO,KAAK;AAClB,WACE,KAAK,SAAS,SACd,KAAK,KAAK,WAAW,UAAU,UAC/B,KAAK,KAAK,MAAM,CAAC,MAAM,MAAM,SAAS,UAAU,CAAC,CAAC;AAAA,EAEtD;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAmB;AACrB,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA;AAAA,EAGA,IAAI,iBAA4B;AAC9B,UAAM,WAAW,KAAK;AACtB,WAAO,SAAS;AAAA,EAClB;AAAA,EAEA,IAAI,sBAA+C;AACjD,UAAM,WAAW,KAAK;AACtB,WAAO,SAAS;AAAA,EAClB;AAAA,EAEA,IAAI,eAAoC;AACtC,UAAM,WAAW,KAAK;AACtB,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA,EAGA,eAAe,OAAuB;AACpC,UAAM,WAAW,KAAK;AACtB,WAAO,SAAS,iBAAiB,KAAK,yBAAS,IAAA;AAAA,EACjD;AAAA,EAEA,WAAW,SAAyB;AAClC,UAAM,WAAW,KAAK;AACtB,WAAO,SAAS,aAAa,OAAO,yBAAS,IAAA;AAAA,EAC/C;AAAA,EAEA,cAAc,QAA+B;AAC3C,UAAM,WAAW,KAAK;AACtB,WAAO,SAAS,gBAAgB,MAAM,yBAAS,IAAA;AAAA,EACjD;AAAA;AAAA,EAGA,kBAA0C;AACxC,WAAO,KAAK;AAAA,EACd;AACF;;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"reverse-index.cjs","sources":["../../../src/indexes/reverse-index.ts"],"sourcesContent":["import type { CompareOptions } from
|
|
1
|
+
{"version":3,"file":"reverse-index.cjs","sources":["../../../src/indexes/reverse-index.ts"],"sourcesContent":["import type { CompareOptions } from '../query/builder/types'\nimport type { OrderByDirection } from '../query/ir'\nimport type { IndexInterface, IndexOperation, IndexStats } from './base-index'\nimport type { RangeQueryOptions } from './btree-index'\n\nexport class ReverseIndex<\n TKey extends string | number,\n> implements IndexInterface<TKey> {\n private originalIndex: IndexInterface<TKey>\n\n constructor(index: IndexInterface<TKey>) {\n this.originalIndex = index\n }\n\n // Define the reversed operations\n\n lookup(operation: IndexOperation, value: any): Set<TKey> {\n const reverseOperation =\n operation === `gt`\n ? `lt`\n : operation === `gte`\n ? `lte`\n : operation === `lt`\n ? `gt`\n : operation === `lte`\n ? `gte`\n : operation\n return this.originalIndex.lookup(reverseOperation, value)\n }\n\n rangeQuery(options: RangeQueryOptions = {}): Set<TKey> {\n return this.originalIndex.rangeQueryReversed(options)\n }\n\n rangeQueryReversed(options: RangeQueryOptions = {}): Set<TKey> {\n return this.originalIndex.rangeQuery(options)\n }\n\n take(n: number, from?: any, filterFn?: (key: TKey) => boolean): Array<TKey> {\n return this.originalIndex.takeReversed(n, from, filterFn)\n }\n\n takeReversed(\n n: number,\n from?: any,\n filterFn?: (key: TKey) => boolean,\n ): Array<TKey> {\n return this.originalIndex.take(n, from, filterFn)\n }\n\n get orderedEntriesArray(): Array<[any, Set<TKey>]> {\n return this.originalIndex.orderedEntriesArrayReversed\n }\n\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]> {\n return this.originalIndex.orderedEntriesArray\n }\n\n // All operations below delegate to the original index\n\n supports(operation: IndexOperation): boolean {\n return this.originalIndex.supports(operation)\n }\n\n matchesField(fieldPath: Array<string>): boolean {\n return this.originalIndex.matchesField(fieldPath)\n }\n\n matchesCompareOptions(compareOptions: CompareOptions): boolean {\n return this.originalIndex.matchesCompareOptions(compareOptions)\n }\n\n matchesDirection(direction: OrderByDirection): boolean {\n return this.originalIndex.matchesDirection(direction)\n }\n\n getStats(): IndexStats {\n return this.originalIndex.getStats()\n }\n\n add(key: TKey, item: any): void {\n this.originalIndex.add(key, item)\n }\n\n remove(key: TKey, item: any): void {\n this.originalIndex.remove(key, item)\n }\n\n update(key: TKey, oldItem: any, newItem: any): void {\n this.originalIndex.update(key, oldItem, newItem)\n }\n\n build(entries: Iterable<[TKey, any]>): void {\n this.originalIndex.build(entries)\n }\n\n clear(): void {\n this.originalIndex.clear()\n }\n\n get keyCount(): number {\n return this.originalIndex.keyCount\n }\n\n equalityLookup(value: any): Set<TKey> {\n return this.originalIndex.equalityLookup(value)\n }\n\n inArrayLookup(values: Array<any>): Set<TKey> {\n return this.originalIndex.inArrayLookup(values)\n }\n\n get indexedKeysSet(): Set<TKey> {\n return this.originalIndex.indexedKeysSet\n }\n\n get valueMapData(): Map<any, Set<TKey>> {\n return this.originalIndex.valueMapData\n }\n}\n"],"names":[],"mappings":";;AAKO,MAAM,aAEqB;AAAA,EAGhC,YAAY,OAA6B;AACvC,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA,EAIA,OAAO,WAA2B,OAAuB;AACvD,UAAM,mBACJ,cAAc,OACV,OACA,cAAc,QACZ,QACA,cAAc,OACZ,OACA,cAAc,QACZ,QACA;AACZ,WAAO,KAAK,cAAc,OAAO,kBAAkB,KAAK;AAAA,EAC1D;AAAA,EAEA,WAAW,UAA6B,IAAe;AACrD,WAAO,KAAK,cAAc,mBAAmB,OAAO;AAAA,EACtD;AAAA,EAEA,mBAAmB,UAA6B,IAAe;AAC7D,WAAO,KAAK,cAAc,WAAW,OAAO;AAAA,EAC9C;AAAA,EAEA,KAAK,GAAW,MAAY,UAAgD;AAC1E,WAAO,KAAK,cAAc,aAAa,GAAG,MAAM,QAAQ;AAAA,EAC1D;AAAA,EAEA,aACE,GACA,MACA,UACa;AACb,WAAO,KAAK,cAAc,KAAK,GAAG,MAAM,QAAQ;AAAA,EAClD;AAAA,EAEA,IAAI,sBAA+C;AACjD,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,8BAAuD;AACzD,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA;AAAA,EAIA,SAAS,WAAoC;AAC3C,WAAO,KAAK,cAAc,SAAS,SAAS;AAAA,EAC9C;AAAA,EAEA,aAAa,WAAmC;AAC9C,WAAO,KAAK,cAAc,aAAa,SAAS;AAAA,EAClD;AAAA,EAEA,sBAAsB,gBAAyC;AAC7D,WAAO,KAAK,cAAc,sBAAsB,cAAc;AAAA,EAChE;AAAA,EAEA,iBAAiB,WAAsC;AACrD,WAAO,KAAK,cAAc,iBAAiB,SAAS;AAAA,EACtD;AAAA,EAEA,WAAuB;AACrB,WAAO,KAAK,cAAc,SAAA;AAAA,EAC5B;AAAA,EAEA,IAAI,KAAW,MAAiB;AAC9B,SAAK,cAAc,IAAI,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,OAAO,KAAW,MAAiB;AACjC,SAAK,cAAc,OAAO,KAAK,IAAI;AAAA,EACrC;AAAA,EAEA,OAAO,KAAW,SAAc,SAAoB;AAClD,SAAK,cAAc,OAAO,KAAK,SAAS,OAAO;AAAA,EACjD;AAAA,EAEA,MAAM,SAAsC;AAC1C,SAAK,cAAc,MAAM,OAAO;AAAA,EAClC;AAAA,EAEA,QAAc;AACZ,SAAK,cAAc,MAAA;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,eAAe,OAAuB;AACpC,WAAO,KAAK,cAAc,eAAe,KAAK;AAAA,EAChD;AAAA,EAEA,cAAc,QAA+B;AAC3C,WAAO,KAAK,cAAc,cAAc,MAAM;AAAA,EAChD;AAAA,EAEA,IAAI,iBAA4B;AAC9B,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,eAAoC;AACtC,WAAO,KAAK,cAAc;AAAA,EAC5B;AACF;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-only.cjs","sources":["../../src/local-only.ts"],"sourcesContent":["import type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n OperationType,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { Collection } from \"./collection/index\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Configuration interface for Local-only collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalOnlyCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends Omit<\n BaseCollectionConfig<T, TKey, TSchema, LocalOnlyCollectionUtils>,\n `gcTime` | `startSync`\n> {\n /**\n * Optional initial data to populate the collection with on creation\n * This data will be applied during the initial sync process\n */\n initialData?: Array<T>\n}\n\n/**\n * Local-only collection utilities type\n */\nexport interface LocalOnlyCollectionUtils extends UtilsRecord {\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them.\n * This should be called in your transaction's mutationFn to persist local-only data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localData = createCollection(localOnlyCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-only mutations after success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\ntype LocalOnlyCollectionOptionsResult<\n T extends object,\n TKey extends string | number,\n TSchema extends StandardSchemaV1 | never = never,\n> = CollectionConfig<T, TKey, TSchema> & {\n utils: LocalOnlyCollectionUtils\n}\n\n/**\n * Creates Local-only collection options for use with a standard Collection\n *\n * This is an in-memory collection that doesn't sync with external sources but uses a loopback sync config\n * that immediately \"syncs\" all optimistic changes to the collection, making them permanent.\n * Perfect for local-only data that doesn't need persistence or external synchronization.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-only collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template T - The schema type if a schema is provided, otherwise the type of items in the collection\n * @template TKey - The type of the key returned by getKey\n * @param config - Configuration options for the Local-only collection\n * @returns Collection options with utilities including acceptMutations\n *\n * @example\n * // Basic local-only collection\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Local-only collection with initial data\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * initialData: [\n * { id: 1, name: 'Item 1' },\n * { id: 2, name: 'Item 2' },\n * ],\n * })\n * )\n *\n * @example\n * // Local-only collection with mutation handlers\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * // Custom logic after insert\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localData = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use local data in API call\n * const localMutations = transaction.mutations.filter(m => m.collection === localData)\n * await api.save({ metadata: localMutations[0]?.modified })\n *\n * // Persist local-only mutations after API success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localData.insert({ id: 1, data: 'metadata' })\n * apiCollection.insert({ id: 2, data: 'main data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localOnlyCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): LocalOnlyCollectionOptionsResult<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localOnlyCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): LocalOnlyCollectionOptionsResult<T, TKey> & {\n schema?: never // no schema in the result\n}\n\nexport function localOnlyCollectionOptions<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, TSchema, TKey>\n): LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n} {\n const { initialData, onInsert, onUpdate, onDelete, ...restConfig } = config\n\n // Create the sync configuration with transaction confirmation capability\n const syncResult = createLocalOnlySync<T, TKey>(initialData)\n\n /**\n * Create wrapper handlers that call user handlers first, then confirm transactions\n * Wraps the user's onInsert handler to also confirm the transaction immediately\n */\n const wrappedOnInsert = async (\n params: InsertMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onInsert) {\n handlerResult = (await onInsert(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onUpdate handler that also confirms the transaction immediately\n */\n const wrappedOnUpdate = async (\n params: UpdateMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onUpdate) {\n handlerResult = (await onUpdate(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onDelete handler that also confirms the transaction immediately\n */\n const wrappedOnDelete = async (\n params: DeleteMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onDelete) {\n handlerResult = (await onDelete(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n const collectionMutations = transaction.mutations.filter(\n (m) =>\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n m.collection === syncResult.collection\n )\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Persist the mutations through sync\n syncResult.confirmOperationsSync(\n collectionMutations as Array<PendingMutation<T>>\n )\n }\n\n return {\n ...restConfig,\n sync: syncResult.sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n acceptMutations,\n },\n startSync: true,\n gcTime: 0,\n } as LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n }\n}\n\n/**\n * Internal function to create Local-only sync configuration with transaction confirmation\n *\n * This captures the sync functions and provides synchronous confirmation of operations.\n * It creates a loopback sync that immediately confirms all optimistic operations,\n * making them permanent in the collection.\n *\n * @param initialData - Optional array of initial items to populate the collection\n * @returns Object with sync configuration and confirmOperationsSync function\n */\nfunction createLocalOnlySync<T extends object, TKey extends string | number>(\n initialData?: Array<T>\n) {\n // Capture sync functions and collection for transaction confirmation\n let syncBegin: (() => void) | null = null\n let syncWrite: ((message: { type: OperationType; value: T }) => void) | null =\n null\n let syncCommit: (() => void) | null = null\n let collection: Collection<T, TKey, LocalOnlyCollectionUtils> | null = null\n\n const sync: SyncConfig<T, TKey> = {\n /**\n * Sync function that captures sync parameters and applies initial data\n * @param params - Sync parameters containing begin, write, and commit functions\n * @returns Unsubscribe function (empty since no ongoing sync is needed)\n */\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n\n // Capture sync functions and collection for later use\n syncBegin = begin\n syncWrite = write\n syncCommit = commit\n collection = params.collection\n\n // Apply initial data if provided\n if (initialData && initialData.length > 0) {\n begin()\n initialData.forEach((item) => {\n write({\n type: `insert`,\n value: item,\n })\n })\n commit()\n }\n\n // Mark collection as ready since local-only collections are immediately ready\n markReady()\n\n // Return empty unsubscribe function - no ongoing sync needed\n return () => {}\n },\n /**\n * Get sync metadata - returns empty object for local-only collections\n * @returns Empty metadata object\n */\n getSyncMetadata: () => ({}),\n }\n\n /**\n * Synchronously confirms optimistic operations by immediately writing through sync\n *\n * This loops through transaction mutations and applies them to move from optimistic to synced state.\n * It's called after user handlers to make optimistic changes permanent.\n *\n * @param mutations - Array of mutation objects from the transaction\n */\n const confirmOperationsSync = (mutations: Array<PendingMutation<T>>) => {\n if (!syncBegin || !syncWrite || !syncCommit) {\n return // Sync not initialized yet, which is fine\n }\n\n // Immediately write back through sync interface\n syncBegin()\n mutations.forEach((mutation) => {\n if (syncWrite) {\n syncWrite({\n type: mutation.type,\n value: mutation.modified,\n })\n }\n })\n syncCommit()\n }\n\n return {\n sync,\n confirmOperationsSync,\n collection,\n }\n}\n"],"names":[],"mappings":";;AA4KO,SAAS,2BAKd,QAGA;AACA,QAAM,EAAE,aAAa,UAAU,UAAU,UAAU,GAAG,eAAe;AAGrE,QAAM,aAAa,oBAA6B,WAAW;AAM3D,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,CAAC,gBAEnB;AAEJ,UAAM,sBAAsB,YAAY,UAAU;AAAA,MAChD,CAAC;AAAA;AAAA,QAEC,EAAE,eAAe,WAAW;AAAA;AAAA,IAAA;AAGhC,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW;AAAA,MACT;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,WAAW;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,IAEF,WAAW;AAAA,IACX,QAAQ;AAAA,EAAA;AAIZ;AAYA,SAAS,oBACP,aACA;AAEA,MAAI,YAAiC;AACrC,MAAI,YACF;AACF,MAAI,aAAkC;AACtC,MAAI,aAAmE;AAEvE,QAAM,OAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMhC,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa,OAAO;AAGpB,UAAI,eAAe,YAAY,SAAS,GAAG;AACzC,cAAA;AACA,oBAAY,QAAQ,CAAC,SAAS;AAC5B,gBAAM;AAAA,YACJ,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,CAAC;AACD,eAAA;AAAA,MACF;AAGA,gBAAA;AAGA,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,iBAAiB,OAAO,CAAA;AAAA,EAAC;AAW3B,QAAM,wBAAwB,CAAC,cAAyC;AACtE,QAAI,CAAC,aAAa,CAAC,aAAa,CAAC,YAAY;AAC3C;AAAA,IACF;AAGA,cAAA;AACA,cAAU,QAAQ,CAAC,aAAa;AAC9B,UAAI,WAAW;AACb,kBAAU;AAAA,UACR,MAAM,SAAS;AAAA,UACf,OAAO,SAAS;AAAA,QAAA,CACjB;AAAA,MACH;AAAA,IACF,CAAC;AACD,eAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
|
|
1
|
+
{"version":3,"file":"local-only.cjs","sources":["../../src/local-only.ts"],"sourcesContent":["import type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n OperationType,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from './types'\nimport type { Collection } from './collection/index'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\n\n/**\n * Configuration interface for Local-only collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalOnlyCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends Omit<\n BaseCollectionConfig<T, TKey, TSchema, LocalOnlyCollectionUtils>,\n `gcTime` | `startSync`\n> {\n /**\n * Optional initial data to populate the collection with on creation\n * This data will be applied during the initial sync process\n */\n initialData?: Array<T>\n}\n\n/**\n * Local-only collection utilities type\n */\nexport interface LocalOnlyCollectionUtils extends UtilsRecord {\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them.\n * This should be called in your transaction's mutationFn to persist local-only data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localData = createCollection(localOnlyCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-only mutations after success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\ntype LocalOnlyCollectionOptionsResult<\n T extends object,\n TKey extends string | number,\n TSchema extends StandardSchemaV1 | never = never,\n> = CollectionConfig<T, TKey, TSchema> & {\n utils: LocalOnlyCollectionUtils\n}\n\n/**\n * Creates Local-only collection options for use with a standard Collection\n *\n * This is an in-memory collection that doesn't sync with external sources but uses a loopback sync config\n * that immediately \"syncs\" all optimistic changes to the collection, making them permanent.\n * Perfect for local-only data that doesn't need persistence or external synchronization.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-only collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template T - The schema type if a schema is provided, otherwise the type of items in the collection\n * @template TKey - The type of the key returned by getKey\n * @param config - Configuration options for the Local-only collection\n * @returns Collection options with utilities including acceptMutations\n *\n * @example\n * // Basic local-only collection\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Local-only collection with initial data\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * initialData: [\n * { id: 1, name: 'Item 1' },\n * { id: 2, name: 'Item 2' },\n * ],\n * })\n * )\n *\n * @example\n * // Local-only collection with mutation handlers\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * // Custom logic after insert\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localData = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use local data in API call\n * const localMutations = transaction.mutations.filter(m => m.collection === localData)\n * await api.save({ metadata: localMutations[0]?.modified })\n *\n * // Persist local-only mutations after API success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localData.insert({ id: 1, data: 'metadata' })\n * apiCollection.insert({ id: 2, data: 'main data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localOnlyCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n },\n): LocalOnlyCollectionOptionsResult<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localOnlyCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n },\n): LocalOnlyCollectionOptionsResult<T, TKey> & {\n schema?: never // no schema in the result\n}\n\nexport function localOnlyCollectionOptions<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, TSchema, TKey>,\n): LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n} {\n const { initialData, onInsert, onUpdate, onDelete, ...restConfig } = config\n\n // Create the sync configuration with transaction confirmation capability\n const syncResult = createLocalOnlySync<T, TKey>(initialData)\n\n /**\n * Create wrapper handlers that call user handlers first, then confirm transactions\n * Wraps the user's onInsert handler to also confirm the transaction immediately\n */\n const wrappedOnInsert = async (\n params: InsertMutationFnParams<T, TKey, LocalOnlyCollectionUtils>,\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onInsert) {\n handlerResult = (await onInsert(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onUpdate handler that also confirms the transaction immediately\n */\n const wrappedOnUpdate = async (\n params: UpdateMutationFnParams<T, TKey, LocalOnlyCollectionUtils>,\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onUpdate) {\n handlerResult = (await onUpdate(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onDelete handler that also confirms the transaction immediately\n */\n const wrappedOnDelete = async (\n params: DeleteMutationFnParams<T, TKey, LocalOnlyCollectionUtils>,\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onDelete) {\n handlerResult = (await onDelete(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n const collectionMutations = transaction.mutations.filter(\n (m) =>\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n m.collection === syncResult.collection,\n )\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Persist the mutations through sync\n syncResult.confirmOperationsSync(\n collectionMutations as Array<PendingMutation<T>>,\n )\n }\n\n return {\n ...restConfig,\n sync: syncResult.sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n acceptMutations,\n },\n startSync: true,\n gcTime: 0,\n } as LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n }\n}\n\n/**\n * Internal function to create Local-only sync configuration with transaction confirmation\n *\n * This captures the sync functions and provides synchronous confirmation of operations.\n * It creates a loopback sync that immediately confirms all optimistic operations,\n * making them permanent in the collection.\n *\n * @param initialData - Optional array of initial items to populate the collection\n * @returns Object with sync configuration and confirmOperationsSync function\n */\nfunction createLocalOnlySync<T extends object, TKey extends string | number>(\n initialData?: Array<T>,\n) {\n // Capture sync functions and collection for transaction confirmation\n let syncBegin: (() => void) | null = null\n let syncWrite: ((message: { type: OperationType; value: T }) => void) | null =\n null\n let syncCommit: (() => void) | null = null\n let collection: Collection<T, TKey, LocalOnlyCollectionUtils> | null = null\n\n const sync: SyncConfig<T, TKey> = {\n /**\n * Sync function that captures sync parameters and applies initial data\n * @param params - Sync parameters containing begin, write, and commit functions\n * @returns Unsubscribe function (empty since no ongoing sync is needed)\n */\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n\n // Capture sync functions and collection for later use\n syncBegin = begin\n syncWrite = write\n syncCommit = commit\n collection = params.collection\n\n // Apply initial data if provided\n if (initialData && initialData.length > 0) {\n begin()\n initialData.forEach((item) => {\n write({\n type: `insert`,\n value: item,\n })\n })\n commit()\n }\n\n // Mark collection as ready since local-only collections are immediately ready\n markReady()\n\n // Return empty unsubscribe function - no ongoing sync needed\n return () => {}\n },\n /**\n * Get sync metadata - returns empty object for local-only collections\n * @returns Empty metadata object\n */\n getSyncMetadata: () => ({}),\n }\n\n /**\n * Synchronously confirms optimistic operations by immediately writing through sync\n *\n * This loops through transaction mutations and applies them to move from optimistic to synced state.\n * It's called after user handlers to make optimistic changes permanent.\n *\n * @param mutations - Array of mutation objects from the transaction\n */\n const confirmOperationsSync = (mutations: Array<PendingMutation<T>>) => {\n if (!syncBegin || !syncWrite || !syncCommit) {\n return // Sync not initialized yet, which is fine\n }\n\n // Immediately write back through sync interface\n syncBegin()\n mutations.forEach((mutation) => {\n if (syncWrite) {\n syncWrite({\n type: mutation.type,\n value: mutation.modified,\n })\n }\n })\n syncCommit()\n }\n\n return {\n sync,\n confirmOperationsSync,\n collection,\n }\n}\n"],"names":[],"mappings":";;AA4KO,SAAS,2BAKd,QAGA;AACA,QAAM,EAAE,aAAa,UAAU,UAAU,UAAU,GAAG,eAAe;AAGrE,QAAM,aAAa,oBAA6B,WAAW;AAM3D,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,CAAC,gBAEnB;AAEJ,UAAM,sBAAsB,YAAY,UAAU;AAAA,MAChD,CAAC;AAAA;AAAA,QAEC,EAAE,eAAe,WAAW;AAAA;AAAA,IAAA;AAGhC,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW;AAAA,MACT;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,WAAW;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,IAEF,WAAW;AAAA,IACX,QAAQ;AAAA,EAAA;AAIZ;AAYA,SAAS,oBACP,aACA;AAEA,MAAI,YAAiC;AACrC,MAAI,YACF;AACF,MAAI,aAAkC;AACtC,MAAI,aAAmE;AAEvE,QAAM,OAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMhC,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa,OAAO;AAGpB,UAAI,eAAe,YAAY,SAAS,GAAG;AACzC,cAAA;AACA,oBAAY,QAAQ,CAAC,SAAS;AAC5B,gBAAM;AAAA,YACJ,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,CAAC;AACD,eAAA;AAAA,MACF;AAGA,gBAAA;AAGA,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,iBAAiB,OAAO,CAAA;AAAA,EAAC;AAW3B,QAAM,wBAAwB,CAAC,cAAyC;AACtE,QAAI,CAAC,aAAa,CAAC,aAAa,CAAC,YAAY;AAC3C;AAAA,IACF;AAGA,cAAA;AACA,cAAU,QAAQ,CAAC,aAAa;AAC9B,UAAI,WAAW;AACb,kBAAU;AAAA,UACR,MAAM,SAAS;AAAA,UACf,OAAO,SAAS;AAAA,QAAA,CACjB;AAAA,MACH;AAAA,IACF,CAAC;AACD,eAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\nexport interface Parser {\n parse: (data: string) => unknown\n stringify: (data: unknown) => string\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n\n /**\n * Parser to use for serializing and deserializing data to and from storage\n * Defaults to JSON\n */\n parser?: Parser\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param parser - The parser to use for serialization\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(\n parser: Parser,\n value: any,\n operation: string\n): void {\n try {\n parser.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Encodes a key (string or number) into a storage-safe string format.\n * This prevents collisions between numeric and string keys by prefixing with type information.\n *\n * Examples:\n * - number 1 → \"n:1\"\n * - string \"1\" → \"s:1\"\n * - string \"n:1\" → \"s:n:1\"\n *\n * @param key - The key to encode (string or number)\n * @returns Type-prefixed string that is safe for storage\n */\nfunction encodeStorageKey(key: string | number): string {\n if (typeof key === `number`) {\n return `n:${key}`\n }\n return `s:${key}`\n}\n\n/**\n * Decodes a storage key back to its original form.\n * This is the inverse of encodeStorageKey.\n *\n * @param encodedKey - The encoded key from storage\n * @returns The original key (string or number)\n */\nfunction decodeStorageKey(encodedKey: string): string | number {\n if (encodedKey.startsWith(`n:`)) {\n return Number(encodedKey.slice(2))\n }\n if (encodedKey.startsWith(`s:`)) {\n return encodedKey.slice(2)\n }\n // Fallback for legacy data without encoding\n return encodedKey\n}\n\n/**\n * Creates an in-memory storage implementation that mimics the StorageApi interface\n * Used as a fallback when localStorage is not available (e.g., server-side rendering)\n * @returns An object implementing the StorageApi interface using an in-memory Map\n */\nfunction createInMemoryStorage(): StorageApi {\n const storage = new Map<string, string>()\n\n return {\n getItem(key: string): string | null {\n return storage.get(key) ?? null\n },\n setItem(key: string, value: string): void {\n storage.set(key, value)\n },\n removeItem(key: string): void {\n storage.delete(key)\n },\n }\n}\n\n/**\n * Creates a no-op storage event API for environments without window (e.g., server-side)\n * This provides the required interface but doesn't actually listen to any events\n * since cross-tab synchronization is not possible in server environments\n * @returns An object implementing the StorageEventApi interface with no-op methods\n */\nfunction createNoOpStorageEventApi(): StorageEventApi {\n return {\n addEventListener: () => {\n // No-op: cannot listen to storage events without window\n },\n removeEventListener: () => {\n // No-op: cannot remove listeners without window\n },\n }\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Fallback Behavior:**\n *\n * When localStorage is not available (e.g., in server-side rendering environments),\n * this function automatically falls back to an in-memory storage implementation.\n * This prevents errors during module initialization and allows the collection to\n * work in any environment, though data will not persist across page reloads or\n * be shared across tabs when using the in-memory fallback.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<\n InferSchemaOutput<T>,\n TKey,\n T,\n LocalStorageCollectionUtils\n> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey, never, LocalStorageCollectionUtils> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<\n CollectionConfig<any, string | number, any, LocalStorageCollectionUtils>,\n `id`\n> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n // Fall back to in-memory storage if localStorage is not available (e.g., server-side rendering)\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null) ||\n createInMemoryStorage()\n\n // Default to window for storage events if not provided\n // Fall back to no-op storage event API if window is not available (e.g., server-side rendering)\n const storageEventApi =\n config.storageEventApi ||\n (typeof window !== `undefined` ? window : null) ||\n createNoOpStorageEventApi()\n\n // Default to JSON parser if no parser is provided\n const parser = config.parser || JSON\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n parser,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[encodeStorageKey(key)] = storedItem\n })\n const serialized = parser.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(mutation.key, storedItem)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(mutation.key, storedItem)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n lastKnownData.delete(mutation.key)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(parser, mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(parser, mutation.original, mutation.type)\n break\n }\n }\n\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(mutation.key, storedItem)\n break\n }\n case `delete`: {\n lastKnownData.delete(mutation.key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param parser - The parser to use for deserializing the data\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi,\n parser: Parser\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = parser.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([encodedKey, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n const decodedKey = decodeStorageKey(encodedKey)\n dataMap.set(decodedKey, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, encodedKey)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n parser: Parser,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage, parser)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(parser, value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage, parser)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(parser, storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":["SerializationError","StorageKeyRequiredError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAmIA,SAAS,yBACP,QACA,OACA,WACM;AACN,MAAI;AACF,WAAO,UAAU,KAAK;AAAA,EACxB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AAcA,SAAS,iBAAiB,KAA8B;AACtD,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,KAAK,GAAG;AAAA,EACjB;AACA,SAAO,KAAK,GAAG;AACjB;AASA,SAAS,iBAAiB,YAAqC;AAC7D,MAAI,WAAW,WAAW,IAAI,GAAG;AAC/B,WAAO,OAAO,WAAW,MAAM,CAAC,CAAC;AAAA,EACnC;AACA,MAAI,WAAW,WAAW,IAAI,GAAG;AAC/B,WAAO,WAAW,MAAM,CAAC;AAAA,EAC3B;AAEA,SAAO;AACT;AAOA,SAAS,wBAAoC;AAC3C,QAAM,8BAAc,IAAA;AAEpB,SAAO;AAAA,IACL,QAAQ,KAA4B;AAClC,aAAO,QAAQ,IAAI,GAAG,KAAK;AAAA,IAC7B;AAAA,IACA,QAAQ,KAAa,OAAqB;AACxC,cAAQ,IAAI,KAAK,KAAK;AAAA,IACxB;AAAA,IACA,WAAW,KAAmB;AAC5B,cAAQ,OAAO,GAAG;AAAA,IACpB;AAAA,EAAA;AAEJ;AAQA,SAAS,4BAA6C;AACpD,SAAO;AAAA,IACL,kBAAkB,MAAM;AAAA,IAExB;AAAA,IACA,qBAAqB,MAAM;AAAA,IAE3B;AAAA,EAAA;AAEJ;AAyHO,SAAS,8BACd,QAQA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAIA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe,SACvD,sBAAA;AAIF,QAAM,kBACJ,OAAO,oBACN,OAAO,WAAW,cAAc,SAAS,SAC1C,0BAAA;AAGF,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,iBAAiB,GAAG,CAAC,IAAI;AAAA,MACtC,CAAC;AACD,YAAM,aAAa,OAAO,UAAU,UAAU;AAC9C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,oBAAc,IAAI,SAAS,KAAK,UAAU;AAAA,IAC5C,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,oBAAc,IAAI,SAAS,KAAK,UAAU;AAAA,IAC5C,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,oBAAc,OAAO,SAAS,GAAG;AAAA,IACnC,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,QACF,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,MAAA;AAAA,IAEN;AAIA,eAAW,YAAY,qBAAqB;AAE1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,wBAAc,IAAI,SAAS,KAAK,UAAU;AAC1C;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,wBAAc,OAAO,SAAS,GAAG;AACjC;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AASA,SAAS,gBACP,YACA,SACA,QACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,OAAO,MAAM,OAAO;AACnC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,YAAY,KAAK,MAAM;AAEtD,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,gBAAM,aAAa,iBAAiB,UAAU;AAC9C,kBAAQ,IAAI,YAAY,UAAU;AAAA,QACpC,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,UAAU;AAAA,QAChE;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,QACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,SAAS,MAAM;AAG9D,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,QAAQ,OAAO,IAAI;AAC5C,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,SAAS,MAAM;AAClE,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,QAAQ,WAAW,MAAM,MAAM;AACxD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;;"}
|
|
1
|
+
{"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n SerializationError,\n StorageKeyRequiredError,\n} from './errors'\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from './types'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void,\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void,\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\nexport interface Parser {\n parse: (data: string) => unknown\n stringify: (data: unknown) => string\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n\n /**\n * Parser to use for serializing and deserializing data to and from storage\n * Defaults to JSON\n */\n parser?: Parser\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param parser - The parser to use for serialization\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(\n parser: Parser,\n value: any,\n operation: string,\n): void {\n try {\n parser.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error),\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Encodes a key (string or number) into a storage-safe string format.\n * This prevents collisions between numeric and string keys by prefixing with type information.\n *\n * Examples:\n * - number 1 → \"n:1\"\n * - string \"1\" → \"s:1\"\n * - string \"n:1\" → \"s:n:1\"\n *\n * @param key - The key to encode (string or number)\n * @returns Type-prefixed string that is safe for storage\n */\nfunction encodeStorageKey(key: string | number): string {\n if (typeof key === `number`) {\n return `n:${key}`\n }\n return `s:${key}`\n}\n\n/**\n * Decodes a storage key back to its original form.\n * This is the inverse of encodeStorageKey.\n *\n * @param encodedKey - The encoded key from storage\n * @returns The original key (string or number)\n */\nfunction decodeStorageKey(encodedKey: string): string | number {\n if (encodedKey.startsWith(`n:`)) {\n return Number(encodedKey.slice(2))\n }\n if (encodedKey.startsWith(`s:`)) {\n return encodedKey.slice(2)\n }\n // Fallback for legacy data without encoding\n return encodedKey\n}\n\n/**\n * Creates an in-memory storage implementation that mimics the StorageApi interface\n * Used as a fallback when localStorage is not available (e.g., server-side rendering)\n * @returns An object implementing the StorageApi interface using an in-memory Map\n */\nfunction createInMemoryStorage(): StorageApi {\n const storage = new Map<string, string>()\n\n return {\n getItem(key: string): string | null {\n return storage.get(key) ?? null\n },\n setItem(key: string, value: string): void {\n storage.set(key, value)\n },\n removeItem(key: string): void {\n storage.delete(key)\n },\n }\n}\n\n/**\n * Creates a no-op storage event API for environments without window (e.g., server-side)\n * This provides the required interface but doesn't actually listen to any events\n * since cross-tab synchronization is not possible in server environments\n * @returns An object implementing the StorageEventApi interface with no-op methods\n */\nfunction createNoOpStorageEventApi(): StorageEventApi {\n return {\n addEventListener: () => {\n // No-op: cannot listen to storage events without window\n },\n removeEventListener: () => {\n // No-op: cannot remove listeners without window\n },\n }\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Fallback Behavior:**\n *\n * When localStorage is not available (e.g., in server-side rendering environments),\n * this function automatically falls back to an in-memory storage implementation.\n * This prevents errors during module initialization and allows the collection to\n * work in any environment, though data will not persist across page reloads or\n * be shared across tabs when using the in-memory fallback.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n },\n): CollectionConfig<\n InferSchemaOutput<T>,\n TKey,\n T,\n LocalStorageCollectionUtils\n> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n },\n): CollectionConfig<T, TKey, never, LocalStorageCollectionUtils> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>,\n): Omit<\n CollectionConfig<any, string | number, any, LocalStorageCollectionUtils>,\n `id`\n> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n // Fall back to in-memory storage if localStorage is not available (e.g., server-side rendering)\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null) ||\n createInMemoryStorage()\n\n // Default to window for storage events if not provided\n // Fall back to no-op storage event API if window is not available (e.g., server-side rendering)\n const storageEventApi =\n config.storageEventApi ||\n (typeof window !== `undefined` ? window : null) ||\n createNoOpStorageEventApi()\n\n // Default to JSON parser if no parser is provided\n const parser = config.parser || JSON\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n parser,\n config.getKey,\n lastKnownData,\n )\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>,\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[encodeStorageKey(key)] = storedItem\n })\n const serialized = parser.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error,\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(mutation.key, storedItem)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(mutation.key, storedItem)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n lastKnownData.delete(mutation.key)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(parser, mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(parser, mutation.original, mutation.type)\n break\n }\n }\n\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(mutation.key, storedItem)\n break\n }\n case `delete`: {\n lastKnownData.delete(mutation.key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param parser - The parser to use for deserializing the data\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi,\n parser: Parser,\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = parser.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([encodedKey, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n const decodedKey = decodeStorageKey(encodedKey)\n dataMap.set(decodedKey, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, encodedKey)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error,\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n parser: Parser,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>,\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>,\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage, parser)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(parser, value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage, parser)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(parser, storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":["SerializationError","StorageKeyRequiredError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAmIA,SAAS,yBACP,QACA,OACA,WACM;AACN,MAAI;AACF,WAAO,UAAU,KAAK;AAAA,EACxB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AAcA,SAAS,iBAAiB,KAA8B;AACtD,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,KAAK,GAAG;AAAA,EACjB;AACA,SAAO,KAAK,GAAG;AACjB;AASA,SAAS,iBAAiB,YAAqC;AAC7D,MAAI,WAAW,WAAW,IAAI,GAAG;AAC/B,WAAO,OAAO,WAAW,MAAM,CAAC,CAAC;AAAA,EACnC;AACA,MAAI,WAAW,WAAW,IAAI,GAAG;AAC/B,WAAO,WAAW,MAAM,CAAC;AAAA,EAC3B;AAEA,SAAO;AACT;AAOA,SAAS,wBAAoC;AAC3C,QAAM,8BAAc,IAAA;AAEpB,SAAO;AAAA,IACL,QAAQ,KAA4B;AAClC,aAAO,QAAQ,IAAI,GAAG,KAAK;AAAA,IAC7B;AAAA,IACA,QAAQ,KAAa,OAAqB;AACxC,cAAQ,IAAI,KAAK,KAAK;AAAA,IACxB;AAAA,IACA,WAAW,KAAmB;AAC5B,cAAQ,OAAO,GAAG;AAAA,IACpB;AAAA,EAAA;AAEJ;AAQA,SAAS,4BAA6C;AACpD,SAAO;AAAA,IACL,kBAAkB,MAAM;AAAA,IAExB;AAAA,IACA,qBAAqB,MAAM;AAAA,IAE3B;AAAA,EAAA;AAEJ;AAyHO,SAAS,8BACd,QAQA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAIA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe,SACvD,sBAAA;AAIF,QAAM,kBACJ,OAAO,oBACN,OAAO,WAAW,cAAc,SAAS,SAC1C,0BAAA;AAGF,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,iBAAiB,GAAG,CAAC,IAAI;AAAA,MACtC,CAAC;AACD,YAAM,aAAa,OAAO,UAAU,UAAU;AAC9C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,oBAAc,IAAI,SAAS,KAAK,UAAU;AAAA,IAC5C,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,oBAAc,IAAI,SAAS,KAAK,UAAU;AAAA,IAC5C,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,oBAAc,OAAO,SAAS,GAAG;AAAA,IACnC,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,QACF,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,MAAA;AAAA,IAEN;AAIA,eAAW,YAAY,qBAAqB;AAE1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,wBAAc,IAAI,SAAS,KAAK,UAAU;AAC1C;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,wBAAc,OAAO,SAAS,GAAG;AACjC;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AASA,SAAS,gBACP,YACA,SACA,QACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,OAAO,MAAM,OAAO;AACnC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,YAAY,KAAK,MAAM;AAEtD,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,gBAAM,aAAa,iBAAiB,UAAU;AAC9C,kBAAQ,IAAI,YAAY,UAAU;AAAA,QACpC,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,UAAU;AAAA,QAChE;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,QACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,SAAS,MAAM;AAG9D,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,QAAQ,OAAO,IAAI;AAC5C,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,SAAS,MAAM;AAClE,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,QAAQ,WAAW,MAAM,MAAM;AACxD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;;"}
|