@tanstack/db 0.5.11 → 0.5.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/SortedMap.cjs +40 -26
- package/dist/cjs/SortedMap.cjs.map +1 -1
- package/dist/cjs/SortedMap.d.cts +10 -15
- package/dist/cjs/collection/change-events.cjs.map +1 -1
- package/dist/cjs/collection/changes.cjs.map +1 -1
- package/dist/cjs/collection/events.cjs.map +1 -1
- package/dist/cjs/collection/events.d.cts +12 -4
- package/dist/cjs/collection/index.cjs +2 -1
- package/dist/cjs/collection/index.cjs.map +1 -1
- package/dist/cjs/collection/indexes.cjs.map +1 -1
- package/dist/cjs/collection/lifecycle.cjs.map +1 -1
- package/dist/cjs/collection/mutations.cjs +5 -2
- package/dist/cjs/collection/mutations.cjs.map +1 -1
- package/dist/cjs/collection/state.cjs +6 -5
- package/dist/cjs/collection/state.cjs.map +1 -1
- package/dist/cjs/collection/state.d.cts +4 -1
- package/dist/cjs/collection/subscription.cjs +60 -53
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +18 -4
- package/dist/cjs/collection/sync.cjs.map +1 -1
- package/dist/cjs/errors.cjs +9 -0
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +3 -0
- package/dist/cjs/event-emitter.cjs.map +1 -1
- package/dist/cjs/index.cjs +2 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +1 -1
- package/dist/cjs/indexes/auto-index.cjs.map +1 -1
- package/dist/cjs/indexes/base-index.cjs.map +1 -1
- package/dist/cjs/indexes/btree-index.cjs +8 -6
- package/dist/cjs/indexes/btree-index.cjs.map +1 -1
- package/dist/cjs/indexes/lazy-index.cjs.map +1 -1
- package/dist/cjs/indexes/reverse-index.cjs.map +1 -1
- package/dist/cjs/local-only.cjs.map +1 -1
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/optimistic-action.cjs.map +1 -1
- package/dist/cjs/paced-mutations.cjs.map +1 -1
- package/dist/cjs/proxy.cjs.map +1 -1
- package/dist/cjs/query/builder/functions.cjs.map +1 -1
- package/dist/cjs/query/builder/index.cjs.map +1 -1
- package/dist/cjs/query/builder/ref-proxy.cjs.map +1 -1
- package/dist/cjs/query/compiler/evaluators.cjs.map +1 -1
- package/dist/cjs/query/compiler/expressions.cjs.map +1 -1
- package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/joins.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.cjs +91 -38
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.d.cts +6 -2
- package/dist/cjs/query/compiler/select.cjs.map +1 -1
- package/dist/cjs/query/expression-helpers.cjs.map +1 -1
- package/dist/cjs/query/index.d.cts +1 -1
- package/dist/cjs/query/ir.cjs.map +1 -1
- package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
- package/dist/cjs/query/live/collection-registry.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.cjs +30 -15
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/cjs/query/live/internal.cjs.map +1 -1
- package/dist/cjs/query/live-query-collection.cjs.map +1 -1
- package/dist/cjs/query/optimizer.cjs.map +1 -1
- package/dist/cjs/query/predicate-utils.cjs +19 -2
- package/dist/cjs/query/predicate-utils.cjs.map +1 -1
- package/dist/cjs/query/predicate-utils.d.cts +32 -1
- package/dist/cjs/query/subset-dedupe.cjs.map +1 -1
- package/dist/cjs/scheduler.cjs.map +1 -1
- package/dist/cjs/strategies/debounceStrategy.cjs.map +1 -1
- package/dist/cjs/strategies/queueStrategy.cjs.map +1 -1
- package/dist/cjs/strategies/throttleStrategy.cjs.map +1 -1
- package/dist/cjs/transactions.cjs.map +1 -1
- package/dist/cjs/types.d.cts +43 -5
- package/dist/cjs/utils/browser-polyfills.cjs.map +1 -1
- package/dist/cjs/utils/btree.cjs.map +1 -1
- package/dist/cjs/utils/comparison.cjs.map +1 -1
- package/dist/cjs/utils/cursor.cjs +39 -0
- package/dist/cjs/utils/cursor.cjs.map +1 -0
- package/dist/cjs/utils/cursor.d.cts +18 -0
- package/dist/cjs/utils/index-optimization.cjs.map +1 -1
- package/dist/cjs/utils.cjs.map +1 -1
- package/dist/esm/SortedMap.d.ts +10 -15
- package/dist/esm/SortedMap.js +40 -26
- package/dist/esm/SortedMap.js.map +1 -1
- package/dist/esm/collection/change-events.js.map +1 -1
- package/dist/esm/collection/changes.js.map +1 -1
- package/dist/esm/collection/events.d.ts +12 -4
- package/dist/esm/collection/events.js.map +1 -1
- package/dist/esm/collection/index.js +2 -1
- package/dist/esm/collection/index.js.map +1 -1
- package/dist/esm/collection/indexes.js.map +1 -1
- package/dist/esm/collection/lifecycle.js.map +1 -1
- package/dist/esm/collection/mutations.js +6 -3
- package/dist/esm/collection/mutations.js.map +1 -1
- package/dist/esm/collection/state.d.ts +4 -1
- package/dist/esm/collection/state.js +6 -5
- package/dist/esm/collection/state.js.map +1 -1
- package/dist/esm/collection/subscription.d.ts +18 -4
- package/dist/esm/collection/subscription.js +61 -54
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/collection/sync.js.map +1 -1
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.js +9 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/event-emitter.js.map +1 -1
- package/dist/esm/index.d.ts +1 -1
- package/dist/esm/index.js +4 -2
- package/dist/esm/indexes/auto-index.js.map +1 -1
- package/dist/esm/indexes/base-index.js.map +1 -1
- package/dist/esm/indexes/btree-index.js +8 -6
- package/dist/esm/indexes/btree-index.js.map +1 -1
- package/dist/esm/indexes/lazy-index.js.map +1 -1
- package/dist/esm/indexes/reverse-index.js.map +1 -1
- package/dist/esm/local-only.js.map +1 -1
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/optimistic-action.js.map +1 -1
- package/dist/esm/paced-mutations.js.map +1 -1
- package/dist/esm/proxy.js.map +1 -1
- package/dist/esm/query/builder/functions.js.map +1 -1
- package/dist/esm/query/builder/index.js.map +1 -1
- package/dist/esm/query/builder/ref-proxy.js.map +1 -1
- package/dist/esm/query/compiler/evaluators.js.map +1 -1
- package/dist/esm/query/compiler/expressions.js.map +1 -1
- package/dist/esm/query/compiler/group-by.js.map +1 -1
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/joins.js.map +1 -1
- package/dist/esm/query/compiler/order-by.d.ts +6 -2
- package/dist/esm/query/compiler/order-by.js +91 -38
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/compiler/select.js.map +1 -1
- package/dist/esm/query/expression-helpers.js.map +1 -1
- package/dist/esm/query/index.d.ts +1 -1
- package/dist/esm/query/ir.js.map +1 -1
- package/dist/esm/query/live/collection-config-builder.js.map +1 -1
- package/dist/esm/query/live/collection-registry.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.js +30 -15
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/dist/esm/query/live/internal.js.map +1 -1
- package/dist/esm/query/live-query-collection.js.map +1 -1
- package/dist/esm/query/optimizer.js.map +1 -1
- package/dist/esm/query/predicate-utils.d.ts +32 -1
- package/dist/esm/query/predicate-utils.js +19 -2
- package/dist/esm/query/predicate-utils.js.map +1 -1
- package/dist/esm/query/subset-dedupe.js.map +1 -1
- package/dist/esm/scheduler.js.map +1 -1
- package/dist/esm/strategies/debounceStrategy.js.map +1 -1
- package/dist/esm/strategies/queueStrategy.js.map +1 -1
- package/dist/esm/strategies/throttleStrategy.js.map +1 -1
- package/dist/esm/transactions.js.map +1 -1
- package/dist/esm/types.d.ts +43 -5
- package/dist/esm/utils/browser-polyfills.js.map +1 -1
- package/dist/esm/utils/btree.js.map +1 -1
- package/dist/esm/utils/comparison.js.map +1 -1
- package/dist/esm/utils/cursor.d.ts +18 -0
- package/dist/esm/utils/cursor.js +39 -0
- package/dist/esm/utils/cursor.js.map +1 -0
- package/dist/esm/utils/index-optimization.js.map +1 -1
- package/dist/esm/utils.js.map +1 -1
- package/package.json +30 -28
- package/src/SortedMap.ts +50 -31
- package/src/collection/change-events.ts +20 -20
- package/src/collection/changes.ts +12 -12
- package/src/collection/events.ts +20 -10
- package/src/collection/index.ts +47 -46
- package/src/collection/indexes.ts +14 -14
- package/src/collection/lifecycle.ts +16 -16
- package/src/collection/mutations.ts +25 -20
- package/src/collection/state.ts +43 -36
- package/src/collection/subscription.ts +114 -83
- package/src/collection/sync.ts +13 -13
- package/src/duplicate-instance-check.ts +1 -1
- package/src/errors.ts +49 -40
- package/src/event-emitter.ts +5 -5
- package/src/index.ts +21 -21
- package/src/indexes/auto-index.ts +11 -11
- package/src/indexes/base-index.ts +13 -13
- package/src/indexes/btree-index.ts +21 -17
- package/src/indexes/index-options.ts +3 -3
- package/src/indexes/lazy-index.ts +8 -8
- package/src/indexes/reverse-index.ts +5 -5
- package/src/local-only.ts +12 -12
- package/src/local-storage.ts +17 -17
- package/src/optimistic-action.ts +5 -5
- package/src/paced-mutations.ts +6 -6
- package/src/proxy.ts +43 -43
- package/src/query/builder/functions.ts +28 -28
- package/src/query/builder/index.ts +22 -22
- package/src/query/builder/ref-proxy.ts +4 -4
- package/src/query/builder/types.ts +8 -8
- package/src/query/compiler/evaluators.ts +9 -9
- package/src/query/compiler/expressions.ts +6 -6
- package/src/query/compiler/group-by.ts +24 -24
- package/src/query/compiler/index.ts +44 -44
- package/src/query/compiler/joins.ts +37 -37
- package/src/query/compiler/order-by.ts +170 -77
- package/src/query/compiler/select.ts +13 -13
- package/src/query/compiler/types.ts +2 -2
- package/src/query/expression-helpers.ts +16 -16
- package/src/query/index.ts +10 -9
- package/src/query/ir.ts +13 -13
- package/src/query/live/collection-config-builder.ts +53 -53
- package/src/query/live/collection-registry.ts +6 -6
- package/src/query/live/collection-subscriber.ts +87 -48
- package/src/query/live/internal.ts +1 -1
- package/src/query/live/types.ts +4 -4
- package/src/query/live-query-collection.ts +15 -15
- package/src/query/optimizer.ts +29 -29
- package/src/query/predicate-utils.ts +105 -50
- package/src/query/subset-dedupe.ts +6 -6
- package/src/scheduler.ts +3 -3
- package/src/strategies/debounceStrategy.ts +6 -6
- package/src/strategies/index.ts +4 -4
- package/src/strategies/queueStrategy.ts +5 -5
- package/src/strategies/throttleStrategy.ts +6 -6
- package/src/strategies/types.ts +2 -2
- package/src/transactions.ts +9 -9
- package/src/types.ts +51 -12
- package/src/utils/array-utils.ts +1 -1
- package/src/utils/browser-polyfills.ts +2 -2
- package/src/utils/btree.ts +22 -22
- package/src/utils/comparison.ts +3 -3
- package/src/utils/cursor.ts +78 -0
- package/src/utils/index-optimization.ts +14 -14
- package/src/utils.ts +4 -4
|
@@ -89,6 +89,9 @@ export declare function isOrderBySubset(subset: OrderBy | undefined, superset: O
|
|
|
89
89
|
* Check if one limit is a subset of another.
|
|
90
90
|
* Returns true if the subset limit requirements are satisfied by the superset limit.
|
|
91
91
|
*
|
|
92
|
+
* Note: This function does NOT consider offset. For offset-aware subset checking,
|
|
93
|
+
* use `isOffsetLimitSubset` instead.
|
|
94
|
+
*
|
|
92
95
|
* @example
|
|
93
96
|
* isLimitSubset(10, 20) // true (requesting 10 items when 20 are available)
|
|
94
97
|
* isLimitSubset(20, 10) // false (requesting 20 items when only 10 are available)
|
|
@@ -100,7 +103,35 @@ export declare function isOrderBySubset(subset: OrderBy | undefined, superset: O
|
|
|
100
103
|
*/
|
|
101
104
|
export declare function isLimitSubset(subset: number | undefined, superset: number | undefined): boolean;
|
|
102
105
|
/**
|
|
103
|
-
* Check if one
|
|
106
|
+
* Check if one offset+limit range is a subset of another.
|
|
107
|
+
* Returns true if the subset range is fully contained within the superset range.
|
|
108
|
+
*
|
|
109
|
+
* A query with `{limit: 10, offset: 0}` loads rows [0, 10).
|
|
110
|
+
* A query with `{limit: 10, offset: 20}` loads rows [20, 30).
|
|
111
|
+
*
|
|
112
|
+
* For subset to be satisfied by superset:
|
|
113
|
+
* - Superset must start at or before subset (superset.offset <= subset.offset)
|
|
114
|
+
* - Superset must end at or after subset (superset.offset + superset.limit >= subset.offset + subset.limit)
|
|
115
|
+
*
|
|
116
|
+
* @example
|
|
117
|
+
* isOffsetLimitSubset({ offset: 0, limit: 5 }, { offset: 0, limit: 10 }) // true
|
|
118
|
+
* isOffsetLimitSubset({ offset: 5, limit: 5 }, { offset: 0, limit: 10 }) // true (rows 5-9 within 0-9)
|
|
119
|
+
* isOffsetLimitSubset({ offset: 5, limit: 10 }, { offset: 0, limit: 10 }) // false (rows 5-14 exceed 0-9)
|
|
120
|
+
* isOffsetLimitSubset({ offset: 20, limit: 10 }, { offset: 0, limit: 10 }) // false (rows 20-29 outside 0-9)
|
|
121
|
+
*
|
|
122
|
+
* @param subset - The offset+limit requirements to check
|
|
123
|
+
* @param superset - The offset+limit that might satisfy the requirements
|
|
124
|
+
* @returns true if subset range is fully contained within superset range
|
|
125
|
+
*/
|
|
126
|
+
export declare function isOffsetLimitSubset(subset: {
|
|
127
|
+
offset?: number;
|
|
128
|
+
limit?: number;
|
|
129
|
+
}, superset: {
|
|
130
|
+
offset?: number;
|
|
131
|
+
limit?: number;
|
|
132
|
+
}): boolean;
|
|
133
|
+
/**
|
|
134
|
+
* Check if one predicate (where + orderBy + limit + offset) is a subset of another.
|
|
104
135
|
* Returns true if all aspects of the subset predicate are satisfied by the superset.
|
|
105
136
|
*
|
|
106
137
|
* @example
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"subset-dedupe.cjs","sources":["../../../src/query/subset-dedupe.ts"],"sourcesContent":["import {\n isPredicateSubset,\n isWhereSubset,\n minusWherePredicates,\n unionWherePredicates,\n} from \"./predicate-utils.js\"\nimport type { BasicExpression } from \"./ir.js\"\nimport type { LoadSubsetOptions } from \"../types.js\"\n\n/**\n * Deduplicated wrapper for a loadSubset function.\n * Tracks what data has been loaded and avoids redundant calls by applying\n * subset logic to predicates.\n *\n * @param opts - The options for the DeduplicatedLoadSubset\n * @param opts.loadSubset - The underlying loadSubset function to wrap\n * @param opts.onDeduplicate - An optional callback function that is invoked when a loadSubset call is deduplicated.\n * If the call is deduplicated because the requested data is being loaded by an inflight request,\n * then this callback is invoked when the inflight request completes successfully and the data is fully loaded.\n * This callback is useful if you need to track rows per query, in which case you can't ignore deduplicated calls\n * because you need to know which rows were loaded for each query.\n * @example\n * const dedupe = new DeduplicatedLoadSubset({ loadSubset: myLoadSubset, onDeduplicate: (opts) => console.log(`Call was deduplicated:`, opts) })\n *\n * // First call - fetches data\n * await dedupe.loadSubset({ where: gt(ref('age'), val(10)) })\n *\n * // Second call - subset of first, returns true immediately\n * await dedupe.loadSubset({ where: gt(ref('age'), val(20)) })\n *\n * // Clear state to start fresh\n * dedupe.reset()\n */\nexport class DeduplicatedLoadSubset {\n // The underlying loadSubset function to wrap\n private readonly _loadSubset: (\n options: LoadSubsetOptions\n ) => true | Promise<void>\n\n // An optional callback function that is invoked when a loadSubset call is deduplicated.\n private readonly onDeduplicate:\n | ((options: LoadSubsetOptions) => void)\n | undefined\n\n // Combined where predicate for all unlimited calls (no limit)\n private unlimitedWhere: BasicExpression<boolean> | undefined = undefined\n\n // Flag to track if we've loaded all data (unlimited call with no where clause)\n private hasLoadedAllData = false\n\n // List of all limited calls (with limit, possibly with orderBy)\n // We clone options before storing to prevent mutation of stored predicates\n private limitedCalls: Array<LoadSubsetOptions> = []\n\n // Track in-flight calls to prevent concurrent duplicate requests\n // We store both the options and the promise so we can apply subset logic\n private inflightCalls: Array<{\n options: LoadSubsetOptions\n promise: Promise<void>\n }> = []\n\n // Generation counter to invalidate in-flight requests after reset()\n // When reset() is called, this increments, and any in-flight completion handlers\n // check if their captured generation matches before updating tracking state\n private generation = 0\n\n constructor(opts: {\n loadSubset: (options: LoadSubsetOptions) => true | Promise<void>\n onDeduplicate?: (options: LoadSubsetOptions) => void\n }) {\n this._loadSubset = opts.loadSubset\n this.onDeduplicate = opts.onDeduplicate\n }\n\n /**\n * Load a subset of data, with automatic deduplication based on previously\n * loaded predicates and in-flight requests.\n *\n * This method is auto-bound, so it can be safely passed as a callback without\n * losing its `this` context (e.g., `loadSubset: dedupe.loadSubset` in a sync config).\n *\n * @param options - The predicate options (where, orderBy, limit)\n * @returns true if data is already loaded, or a Promise that resolves when data is loaded\n */\n loadSubset = (options: LoadSubsetOptions): true | Promise<void> => {\n // If we've loaded all data, everything is covered\n if (this.hasLoadedAllData) {\n this.onDeduplicate?.(options)\n return true\n }\n\n // Check against unlimited combined predicate\n // If we've loaded all data matching a where clause, we don't need to refetch subsets\n if (this.unlimitedWhere !== undefined && options.where !== undefined) {\n if (isWhereSubset(options.where, this.unlimitedWhere)) {\n this.onDeduplicate?.(options)\n return true // Data already loaded via unlimited call\n }\n }\n\n // Check against limited calls\n if (options.limit !== undefined) {\n const alreadyLoaded = this.limitedCalls.some((loaded) =>\n isPredicateSubset(options, loaded)\n )\n\n if (alreadyLoaded) {\n this.onDeduplicate?.(options)\n return true // Already loaded\n }\n }\n\n // Check against in-flight calls using the same subset logic as resolved calls\n // This prevents duplicate requests when concurrent calls have subset relationships\n const matchingInflight = this.inflightCalls.find((inflight) =>\n isPredicateSubset(options, inflight.options)\n )\n\n if (matchingInflight !== undefined) {\n // An in-flight call will load data that covers this request\n // Return the same promise so this caller waits for the data to load\n // The in-flight promise already handles tracking updates when it completes\n const prom = matchingInflight.promise\n // Call `onDeduplicate` when the inflight request has loaded the data\n prom.then(() => this.onDeduplicate?.(options)).catch() // ignore errors\n return prom\n }\n\n // Not fully covered by existing data\n // Compute the subset of data that is not covered by the existing data\n // such that we only have to load that subset of missing data\n const clonedOptions = cloneOptions(options)\n if (this.unlimitedWhere !== undefined && options.limit === undefined) {\n // Compute difference to get only the missing data\n // We can only do this for unlimited queries\n // and we can only remove data that was loaded from unlimited queries\n // because with limited queries we have no way to express that we already loaded part of the matching data\n clonedOptions.where =\n minusWherePredicates(clonedOptions.where, this.unlimitedWhere) ??\n clonedOptions.where\n }\n\n // Call underlying loadSubset to load the missing data\n const resultPromise = this._loadSubset(clonedOptions)\n\n // Handle both sync (true) and async (Promise<void>) return values\n if (resultPromise === true) {\n // Sync return - update tracking synchronously\n // Clone options before storing to protect against caller mutation\n this.updateTracking(clonedOptions)\n return true\n } else {\n // Async return - track the promise and update tracking after it resolves\n\n // Capture the current generation - this lets us detect if reset() was called\n // while this request was in-flight, so we can skip updating tracking state\n const capturedGeneration = this.generation\n\n // We need to create a reference to the in-flight entry so we can remove it later\n const inflightEntry = {\n options: clonedOptions, // Store cloned options for subset matching\n promise: resultPromise\n .then((result) => {\n // Only update tracking if this request is still from the current generation\n // If reset() was called, the generation will have incremented and we should\n // not repopulate the state that was just cleared\n if (capturedGeneration === this.generation) {\n // Use the cloned options that we captured before any caller mutations\n // This ensures we track exactly what was loaded, not what the caller changed\n this.updateTracking(clonedOptions)\n }\n return result\n })\n .finally(() => {\n // Always remove from in-flight array on completion OR rejection\n // This ensures failed requests can be retried instead of being cached forever\n const index = this.inflightCalls.indexOf(inflightEntry)\n if (index !== -1) {\n this.inflightCalls.splice(index, 1)\n }\n }),\n }\n\n // Store the in-flight entry so concurrent subset calls can wait for it\n this.inflightCalls.push(inflightEntry)\n return inflightEntry.promise\n }\n }\n\n /**\n * Reset all tracking state.\n * Clears the history of loaded predicates and in-flight calls.\n * Use this when you want to start fresh, for example after clearing the underlying data store.\n *\n * Note: Any in-flight requests will still complete, but they will not update the tracking\n * state after the reset. This prevents old requests from repopulating cleared state.\n */\n reset(): void {\n this.unlimitedWhere = undefined\n this.hasLoadedAllData = false\n this.limitedCalls = []\n this.inflightCalls = []\n // Increment generation to invalidate any in-flight completion handlers\n // This ensures requests that were started before reset() don't repopulate the state\n this.generation++\n }\n\n private updateTracking(options: LoadSubsetOptions): void {\n // Update tracking based on whether this was a limited or unlimited call\n if (options.limit === undefined) {\n // Unlimited call - update combined where predicate\n // We ignore orderBy for unlimited calls as mentioned in requirements\n if (options.where === undefined) {\n // No where clause = all data loaded\n this.hasLoadedAllData = true\n this.unlimitedWhere = undefined\n this.limitedCalls = []\n this.inflightCalls = []\n } else if (this.unlimitedWhere === undefined) {\n this.unlimitedWhere = options.where\n } else {\n this.unlimitedWhere = unionWherePredicates([\n this.unlimitedWhere,\n options.where,\n ])\n }\n } else {\n // Limited call - add to list for future subset checks\n // Options are already cloned by caller to prevent mutation issues\n this.limitedCalls.push(options)\n }\n }\n}\n\n/**\n * Clones a LoadSubsetOptions object to prevent mutation of stored predicates.\n * This is crucial because callers often reuse the same options object and mutate\n * properties like limit or where between calls. Without cloning, our stored history\n * would reflect the mutated values rather than what was actually loaded.\n */\nexport function cloneOptions(options: LoadSubsetOptions): LoadSubsetOptions {\n return { ...options }\n}\n"],"names":["isWhereSubset","isPredicateSubset","minusWherePredicates","unionWherePredicates"],"mappings":";;;AAiCO,MAAM,uBAAuB;AAAA,EAiClC,YAAY,MAGT;AAxBH,SAAQ,iBAAuD;AAG/D,SAAQ,mBAAmB;AAI3B,SAAQ,eAAyC,CAAA;AAIjD,SAAQ,gBAGH,CAAA;AAKL,SAAQ,aAAa;AAoBrB,SAAA,aAAa,CAAC,YAAqD;AAEjE,UAAI,KAAK,kBAAkB;AACzB,aAAK,gBAAgB,OAAO;AAC5B,eAAO;AAAA,MACT;AAIA,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AACpE,YAAIA,eAAAA,cAAc,QAAQ,OAAO,KAAK,cAAc,GAAG;AACrD,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAGA,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,gBAAgB,KAAK,aAAa;AAAA,UAAK,CAAC,WAC5CC,iCAAkB,SAAS,MAAM;AAAA,QAAA;AAGnC,YAAI,eAAe;AACjB,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAIA,YAAM,mBAAmB,KAAK,cAAc;AAAA,QAAK,CAAC,aAChDA,eAAAA,kBAAkB,SAAS,SAAS,OAAO;AAAA,MAAA;AAG7C,UAAI,qBAAqB,QAAW;AAIlC,cAAM,OAAO,iBAAiB;AAE9B,aAAK,KAAK,MAAM,KAAK,gBAAgB,OAAO,CAAC,EAAE,MAAA;AAC/C,eAAO;AAAA,MACT;AAKA,YAAM,gBAAgB,aAAa,OAAO;AAC1C,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AAKpE,sBAAc,QACZC,eAAAA,qBAAqB,cAAc,OAAO,KAAK,cAAc,KAC7D,cAAc;AAAA,MAClB;AAGA,YAAM,gBAAgB,KAAK,YAAY,aAAa;AAGpD,UAAI,kBAAkB,MAAM;AAG1B,aAAK,eAAe,aAAa;AACjC,eAAO;AAAA,MACT,OAAO;AAKL,cAAM,qBAAqB,KAAK;AAGhC,cAAM,gBAAgB;AAAA,UACpB,SAAS;AAAA;AAAA,UACT,SAAS,cACN,KAAK,CAAC,WAAW;AAIhB,gBAAI,uBAAuB,KAAK,YAAY;AAG1C,mBAAK,eAAe,aAAa;AAAA,YACnC;AACA,mBAAO;AAAA,UACT,CAAC,EACA,QAAQ,MAAM;AAGb,kBAAM,QAAQ,KAAK,cAAc,QAAQ,aAAa;AACtD,gBAAI,UAAU,IAAI;AAChB,mBAAK,cAAc,OAAO,OAAO,CAAC;AAAA,YACpC;AAAA,UACF,CAAC;AAAA,QAAA;AAIL,aAAK,cAAc,KAAK,aAAa;AACrC,eAAO,cAAc;AAAA,MACvB;AAAA,IACF;AArHE,SAAK,cAAc,KAAK;AACxB,SAAK,gBAAgB,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6HA,QAAc;AACZ,SAAK,iBAAiB;AACtB,SAAK,mBAAmB;AACxB,SAAK,eAAe,CAAA;AACpB,SAAK,gBAAgB,CAAA;AAGrB,SAAK;AAAA,EACP;AAAA,EAEQ,eAAe,SAAkC;AAEvD,QAAI,QAAQ,UAAU,QAAW;AAG/B,UAAI,QAAQ,UAAU,QAAW;AAE/B,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AACtB,aAAK,eAAe,CAAA;AACpB,aAAK,gBAAgB,CAAA;AAAA,MACvB,WAAW,KAAK,mBAAmB,QAAW;AAC5C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,OAAO;AACL,aAAK,iBAAiBC,oCAAqB;AAAA,UACzC,KAAK;AAAA,UACL,QAAQ;AAAA,QAAA,CACT;AAAA,MACH;AAAA,IACF,OAAO;AAGL,WAAK,aAAa,KAAK,OAAO;AAAA,IAChC;AAAA,EACF;AACF;AAQO,SAAS,aAAa,SAA+C;AAC1E,SAAO,EAAE,GAAG,QAAA;AACd;;;"}
|
|
1
|
+
{"version":3,"file":"subset-dedupe.cjs","sources":["../../../src/query/subset-dedupe.ts"],"sourcesContent":["import {\n isPredicateSubset,\n isWhereSubset,\n minusWherePredicates,\n unionWherePredicates,\n} from './predicate-utils.js'\nimport type { BasicExpression } from './ir.js'\nimport type { LoadSubsetOptions } from '../types.js'\n\n/**\n * Deduplicated wrapper for a loadSubset function.\n * Tracks what data has been loaded and avoids redundant calls by applying\n * subset logic to predicates.\n *\n * @param opts - The options for the DeduplicatedLoadSubset\n * @param opts.loadSubset - The underlying loadSubset function to wrap\n * @param opts.onDeduplicate - An optional callback function that is invoked when a loadSubset call is deduplicated.\n * If the call is deduplicated because the requested data is being loaded by an inflight request,\n * then this callback is invoked when the inflight request completes successfully and the data is fully loaded.\n * This callback is useful if you need to track rows per query, in which case you can't ignore deduplicated calls\n * because you need to know which rows were loaded for each query.\n * @example\n * const dedupe = new DeduplicatedLoadSubset({ loadSubset: myLoadSubset, onDeduplicate: (opts) => console.log(`Call was deduplicated:`, opts) })\n *\n * // First call - fetches data\n * await dedupe.loadSubset({ where: gt(ref('age'), val(10)) })\n *\n * // Second call - subset of first, returns true immediately\n * await dedupe.loadSubset({ where: gt(ref('age'), val(20)) })\n *\n * // Clear state to start fresh\n * dedupe.reset()\n */\nexport class DeduplicatedLoadSubset {\n // The underlying loadSubset function to wrap\n private readonly _loadSubset: (\n options: LoadSubsetOptions,\n ) => true | Promise<void>\n\n // An optional callback function that is invoked when a loadSubset call is deduplicated.\n private readonly onDeduplicate:\n | ((options: LoadSubsetOptions) => void)\n | undefined\n\n // Combined where predicate for all unlimited calls (no limit)\n private unlimitedWhere: BasicExpression<boolean> | undefined = undefined\n\n // Flag to track if we've loaded all data (unlimited call with no where clause)\n private hasLoadedAllData = false\n\n // List of all limited calls (with limit, possibly with orderBy)\n // We clone options before storing to prevent mutation of stored predicates\n private limitedCalls: Array<LoadSubsetOptions> = []\n\n // Track in-flight calls to prevent concurrent duplicate requests\n // We store both the options and the promise so we can apply subset logic\n private inflightCalls: Array<{\n options: LoadSubsetOptions\n promise: Promise<void>\n }> = []\n\n // Generation counter to invalidate in-flight requests after reset()\n // When reset() is called, this increments, and any in-flight completion handlers\n // check if their captured generation matches before updating tracking state\n private generation = 0\n\n constructor(opts: {\n loadSubset: (options: LoadSubsetOptions) => true | Promise<void>\n onDeduplicate?: (options: LoadSubsetOptions) => void\n }) {\n this._loadSubset = opts.loadSubset\n this.onDeduplicate = opts.onDeduplicate\n }\n\n /**\n * Load a subset of data, with automatic deduplication based on previously\n * loaded predicates and in-flight requests.\n *\n * This method is auto-bound, so it can be safely passed as a callback without\n * losing its `this` context (e.g., `loadSubset: dedupe.loadSubset` in a sync config).\n *\n * @param options - The predicate options (where, orderBy, limit)\n * @returns true if data is already loaded, or a Promise that resolves when data is loaded\n */\n loadSubset = (options: LoadSubsetOptions): true | Promise<void> => {\n // If we've loaded all data, everything is covered\n if (this.hasLoadedAllData) {\n this.onDeduplicate?.(options)\n return true\n }\n\n // Check against unlimited combined predicate\n // If we've loaded all data matching a where clause, we don't need to refetch subsets\n if (this.unlimitedWhere !== undefined && options.where !== undefined) {\n if (isWhereSubset(options.where, this.unlimitedWhere)) {\n this.onDeduplicate?.(options)\n return true // Data already loaded via unlimited call\n }\n }\n\n // Check against limited calls\n if (options.limit !== undefined) {\n const alreadyLoaded = this.limitedCalls.some((loaded) =>\n isPredicateSubset(options, loaded),\n )\n\n if (alreadyLoaded) {\n this.onDeduplicate?.(options)\n return true // Already loaded\n }\n }\n\n // Check against in-flight calls using the same subset logic as resolved calls\n // This prevents duplicate requests when concurrent calls have subset relationships\n const matchingInflight = this.inflightCalls.find((inflight) =>\n isPredicateSubset(options, inflight.options),\n )\n\n if (matchingInflight !== undefined) {\n // An in-flight call will load data that covers this request\n // Return the same promise so this caller waits for the data to load\n // The in-flight promise already handles tracking updates when it completes\n const prom = matchingInflight.promise\n // Call `onDeduplicate` when the inflight request has loaded the data\n prom.then(() => this.onDeduplicate?.(options)).catch() // ignore errors\n return prom\n }\n\n // Not fully covered by existing data\n // Compute the subset of data that is not covered by the existing data\n // such that we only have to load that subset of missing data\n const clonedOptions = cloneOptions(options)\n if (this.unlimitedWhere !== undefined && options.limit === undefined) {\n // Compute difference to get only the missing data\n // We can only do this for unlimited queries\n // and we can only remove data that was loaded from unlimited queries\n // because with limited queries we have no way to express that we already loaded part of the matching data\n clonedOptions.where =\n minusWherePredicates(clonedOptions.where, this.unlimitedWhere) ??\n clonedOptions.where\n }\n\n // Call underlying loadSubset to load the missing data\n const resultPromise = this._loadSubset(clonedOptions)\n\n // Handle both sync (true) and async (Promise<void>) return values\n if (resultPromise === true) {\n // Sync return - update tracking synchronously\n // Clone options before storing to protect against caller mutation\n this.updateTracking(clonedOptions)\n return true\n } else {\n // Async return - track the promise and update tracking after it resolves\n\n // Capture the current generation - this lets us detect if reset() was called\n // while this request was in-flight, so we can skip updating tracking state\n const capturedGeneration = this.generation\n\n // We need to create a reference to the in-flight entry so we can remove it later\n const inflightEntry = {\n options: clonedOptions, // Store cloned options for subset matching\n promise: resultPromise\n .then((result) => {\n // Only update tracking if this request is still from the current generation\n // If reset() was called, the generation will have incremented and we should\n // not repopulate the state that was just cleared\n if (capturedGeneration === this.generation) {\n // Use the cloned options that we captured before any caller mutations\n // This ensures we track exactly what was loaded, not what the caller changed\n this.updateTracking(clonedOptions)\n }\n return result\n })\n .finally(() => {\n // Always remove from in-flight array on completion OR rejection\n // This ensures failed requests can be retried instead of being cached forever\n const index = this.inflightCalls.indexOf(inflightEntry)\n if (index !== -1) {\n this.inflightCalls.splice(index, 1)\n }\n }),\n }\n\n // Store the in-flight entry so concurrent subset calls can wait for it\n this.inflightCalls.push(inflightEntry)\n return inflightEntry.promise\n }\n }\n\n /**\n * Reset all tracking state.\n * Clears the history of loaded predicates and in-flight calls.\n * Use this when you want to start fresh, for example after clearing the underlying data store.\n *\n * Note: Any in-flight requests will still complete, but they will not update the tracking\n * state after the reset. This prevents old requests from repopulating cleared state.\n */\n reset(): void {\n this.unlimitedWhere = undefined\n this.hasLoadedAllData = false\n this.limitedCalls = []\n this.inflightCalls = []\n // Increment generation to invalidate any in-flight completion handlers\n // This ensures requests that were started before reset() don't repopulate the state\n this.generation++\n }\n\n private updateTracking(options: LoadSubsetOptions): void {\n // Update tracking based on whether this was a limited or unlimited call\n if (options.limit === undefined) {\n // Unlimited call - update combined where predicate\n // We ignore orderBy for unlimited calls as mentioned in requirements\n if (options.where === undefined) {\n // No where clause = all data loaded\n this.hasLoadedAllData = true\n this.unlimitedWhere = undefined\n this.limitedCalls = []\n this.inflightCalls = []\n } else if (this.unlimitedWhere === undefined) {\n this.unlimitedWhere = options.where\n } else {\n this.unlimitedWhere = unionWherePredicates([\n this.unlimitedWhere,\n options.where,\n ])\n }\n } else {\n // Limited call - add to list for future subset checks\n // Options are already cloned by caller to prevent mutation issues\n this.limitedCalls.push(options)\n }\n }\n}\n\n/**\n * Clones a LoadSubsetOptions object to prevent mutation of stored predicates.\n * This is crucial because callers often reuse the same options object and mutate\n * properties like limit or where between calls. Without cloning, our stored history\n * would reflect the mutated values rather than what was actually loaded.\n */\nexport function cloneOptions(options: LoadSubsetOptions): LoadSubsetOptions {\n return { ...options }\n}\n"],"names":["isWhereSubset","isPredicateSubset","minusWherePredicates","unionWherePredicates"],"mappings":";;;AAiCO,MAAM,uBAAuB;AAAA,EAiClC,YAAY,MAGT;AAxBH,SAAQ,iBAAuD;AAG/D,SAAQ,mBAAmB;AAI3B,SAAQ,eAAyC,CAAA;AAIjD,SAAQ,gBAGH,CAAA;AAKL,SAAQ,aAAa;AAoBrB,SAAA,aAAa,CAAC,YAAqD;AAEjE,UAAI,KAAK,kBAAkB;AACzB,aAAK,gBAAgB,OAAO;AAC5B,eAAO;AAAA,MACT;AAIA,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AACpE,YAAIA,eAAAA,cAAc,QAAQ,OAAO,KAAK,cAAc,GAAG;AACrD,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAGA,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,gBAAgB,KAAK,aAAa;AAAA,UAAK,CAAC,WAC5CC,iCAAkB,SAAS,MAAM;AAAA,QAAA;AAGnC,YAAI,eAAe;AACjB,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAIA,YAAM,mBAAmB,KAAK,cAAc;AAAA,QAAK,CAAC,aAChDA,eAAAA,kBAAkB,SAAS,SAAS,OAAO;AAAA,MAAA;AAG7C,UAAI,qBAAqB,QAAW;AAIlC,cAAM,OAAO,iBAAiB;AAE9B,aAAK,KAAK,MAAM,KAAK,gBAAgB,OAAO,CAAC,EAAE,MAAA;AAC/C,eAAO;AAAA,MACT;AAKA,YAAM,gBAAgB,aAAa,OAAO;AAC1C,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AAKpE,sBAAc,QACZC,eAAAA,qBAAqB,cAAc,OAAO,KAAK,cAAc,KAC7D,cAAc;AAAA,MAClB;AAGA,YAAM,gBAAgB,KAAK,YAAY,aAAa;AAGpD,UAAI,kBAAkB,MAAM;AAG1B,aAAK,eAAe,aAAa;AACjC,eAAO;AAAA,MACT,OAAO;AAKL,cAAM,qBAAqB,KAAK;AAGhC,cAAM,gBAAgB;AAAA,UACpB,SAAS;AAAA;AAAA,UACT,SAAS,cACN,KAAK,CAAC,WAAW;AAIhB,gBAAI,uBAAuB,KAAK,YAAY;AAG1C,mBAAK,eAAe,aAAa;AAAA,YACnC;AACA,mBAAO;AAAA,UACT,CAAC,EACA,QAAQ,MAAM;AAGb,kBAAM,QAAQ,KAAK,cAAc,QAAQ,aAAa;AACtD,gBAAI,UAAU,IAAI;AAChB,mBAAK,cAAc,OAAO,OAAO,CAAC;AAAA,YACpC;AAAA,UACF,CAAC;AAAA,QAAA;AAIL,aAAK,cAAc,KAAK,aAAa;AACrC,eAAO,cAAc;AAAA,MACvB;AAAA,IACF;AArHE,SAAK,cAAc,KAAK;AACxB,SAAK,gBAAgB,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6HA,QAAc;AACZ,SAAK,iBAAiB;AACtB,SAAK,mBAAmB;AACxB,SAAK,eAAe,CAAA;AACpB,SAAK,gBAAgB,CAAA;AAGrB,SAAK;AAAA,EACP;AAAA,EAEQ,eAAe,SAAkC;AAEvD,QAAI,QAAQ,UAAU,QAAW;AAG/B,UAAI,QAAQ,UAAU,QAAW;AAE/B,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AACtB,aAAK,eAAe,CAAA;AACpB,aAAK,gBAAgB,CAAA;AAAA,MACvB,WAAW,KAAK,mBAAmB,QAAW;AAC5C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,OAAO;AACL,aAAK,iBAAiBC,oCAAqB;AAAA,UACzC,KAAK;AAAA,UACL,QAAQ;AAAA,QAAA,CACT;AAAA,MACH;AAAA,IACF,OAAO;AAGL,WAAK,aAAa,KAAK,OAAO;AAAA,IAChC;AAAA,EACF;AACF;AAQO,SAAS,aAAa,SAA+C;AAC1E,SAAO,EAAE,GAAG,QAAA;AACd;;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"scheduler.cjs","sources":["../../src/scheduler.ts"],"sourcesContent":["/**\n * Identifier used to scope scheduled work. Maps to a transaction id for live queries.\n */\nexport type SchedulerContextId = string | symbol\n\n/**\n * Options for {@link Scheduler.schedule}. Jobs are identified by `jobId` within a context\n * and may declare dependencies.\n */\ninterface ScheduleOptions {\n contextId?: SchedulerContextId\n jobId: unknown\n dependencies?: Iterable<unknown>\n run: () => void\n}\n\n/**\n * State per context. Queue preserves order, jobs hold run functions, dependencies track\n * prerequisites, and completed records which jobs have run during the current flush.\n */\ninterface SchedulerContextState {\n queue: Array<unknown>\n jobs: Map<unknown, () => void>\n dependencies: Map<unknown, Set<unknown>>\n completed: Set<unknown>\n}\n\ninterface PendingAwareJob {\n hasPendingGraphRun: (contextId: SchedulerContextId) => boolean\n}\n\nfunction isPendingAwareJob(dep: any): dep is PendingAwareJob {\n return (\n typeof dep === `object` &&\n dep !== null &&\n typeof dep.hasPendingGraphRun === `function`\n )\n}\n\n/**\n * Scoped scheduler that coalesces work by context and job.\n *\n * - **context** (e.g. transaction id) defines the batching boundary; work is queued until flushed.\n * - **job id** deduplicates work within a context; scheduling the same job replaces the previous run function.\n * - Without a context id, work executes immediately.\n *\n * Callers manage their own state; the scheduler only orchestrates execution order.\n */\nexport class Scheduler {\n private contexts = new Map<SchedulerContextId, SchedulerContextState>()\n private clearListeners = new Set<(contextId: SchedulerContextId) => void>()\n\n /**\n * Get or create the state bucket for a context.\n */\n private getOrCreateContext(\n contextId: SchedulerContextId
|
|
1
|
+
{"version":3,"file":"scheduler.cjs","sources":["../../src/scheduler.ts"],"sourcesContent":["/**\n * Identifier used to scope scheduled work. Maps to a transaction id for live queries.\n */\nexport type SchedulerContextId = string | symbol\n\n/**\n * Options for {@link Scheduler.schedule}. Jobs are identified by `jobId` within a context\n * and may declare dependencies.\n */\ninterface ScheduleOptions {\n contextId?: SchedulerContextId\n jobId: unknown\n dependencies?: Iterable<unknown>\n run: () => void\n}\n\n/**\n * State per context. Queue preserves order, jobs hold run functions, dependencies track\n * prerequisites, and completed records which jobs have run during the current flush.\n */\ninterface SchedulerContextState {\n queue: Array<unknown>\n jobs: Map<unknown, () => void>\n dependencies: Map<unknown, Set<unknown>>\n completed: Set<unknown>\n}\n\ninterface PendingAwareJob {\n hasPendingGraphRun: (contextId: SchedulerContextId) => boolean\n}\n\nfunction isPendingAwareJob(dep: any): dep is PendingAwareJob {\n return (\n typeof dep === `object` &&\n dep !== null &&\n typeof dep.hasPendingGraphRun === `function`\n )\n}\n\n/**\n * Scoped scheduler that coalesces work by context and job.\n *\n * - **context** (e.g. transaction id) defines the batching boundary; work is queued until flushed.\n * - **job id** deduplicates work within a context; scheduling the same job replaces the previous run function.\n * - Without a context id, work executes immediately.\n *\n * Callers manage their own state; the scheduler only orchestrates execution order.\n */\nexport class Scheduler {\n private contexts = new Map<SchedulerContextId, SchedulerContextState>()\n private clearListeners = new Set<(contextId: SchedulerContextId) => void>()\n\n /**\n * Get or create the state bucket for a context.\n */\n private getOrCreateContext(\n contextId: SchedulerContextId,\n ): SchedulerContextState {\n let context = this.contexts.get(contextId)\n if (!context) {\n context = {\n queue: [],\n jobs: new Map(),\n dependencies: new Map(),\n completed: new Set(),\n }\n this.contexts.set(contextId, context)\n }\n return context\n }\n\n /**\n * Schedule work. Without a context id, executes immediately.\n * Otherwise queues the job to be flushed once dependencies are satisfied.\n * Scheduling the same jobId again replaces the previous run function.\n */\n schedule({ contextId, jobId, dependencies, run }: ScheduleOptions): void {\n if (typeof contextId === `undefined`) {\n run()\n return\n }\n\n const context = this.getOrCreateContext(contextId)\n\n // If this is a new job, add it to the queue\n if (!context.jobs.has(jobId)) {\n context.queue.push(jobId)\n }\n\n // Store or replace the run function\n context.jobs.set(jobId, run)\n\n // Update dependencies\n if (dependencies) {\n const depSet = new Set<unknown>(dependencies)\n depSet.delete(jobId)\n context.dependencies.set(jobId, depSet)\n } else if (!context.dependencies.has(jobId)) {\n context.dependencies.set(jobId, new Set())\n }\n\n // Clear completion status since we're rescheduling\n context.completed.delete(jobId)\n }\n\n /**\n * Flush all queued work for a context. Jobs with unmet dependencies are retried.\n * Throws if a pass completes without running any job (dependency cycle).\n */\n flush(contextId: SchedulerContextId): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n const { queue, jobs, dependencies, completed } = context\n\n while (queue.length > 0) {\n let ranThisPass = false\n const jobsThisPass = queue.length\n\n for (let i = 0; i < jobsThisPass; i++) {\n const jobId = queue.shift()!\n const run = jobs.get(jobId)\n if (!run) {\n dependencies.delete(jobId)\n completed.delete(jobId)\n continue\n }\n\n const deps = dependencies.get(jobId)\n let ready = !deps\n if (deps) {\n ready = true\n for (const dep of deps) {\n if (dep === jobId) continue\n\n const depHasPending =\n isPendingAwareJob(dep) && dep.hasPendingGraphRun(contextId)\n\n // Treat dependencies as blocking if the dep has a pending run in this\n // context or if it's enqueued and not yet complete. If the dep is\n // neither pending nor enqueued, consider it satisfied to avoid deadlocks\n // on lazy sources that never schedule work.\n if (\n (jobs.has(dep) && !completed.has(dep)) ||\n (!jobs.has(dep) && depHasPending)\n ) {\n ready = false\n break\n }\n }\n }\n\n if (ready) {\n jobs.delete(jobId)\n dependencies.delete(jobId)\n // Run the job. If it throws, we don't mark it complete, allowing the\n // error to propagate while maintaining scheduler state consistency.\n run()\n completed.add(jobId)\n ranThisPass = true\n } else {\n queue.push(jobId)\n }\n }\n\n if (!ranThisPass) {\n throw new Error(\n `Scheduler detected unresolved dependencies for context ${String(\n contextId,\n )}.`,\n )\n }\n }\n\n this.contexts.delete(contextId)\n }\n\n /**\n * Flush all contexts with pending work. Useful during tear-down.\n */\n flushAll(): void {\n for (const contextId of Array.from(this.contexts.keys())) {\n this.flush(contextId)\n }\n }\n\n /** Clear all scheduled jobs for a context. */\n clear(contextId: SchedulerContextId): void {\n this.contexts.delete(contextId)\n // Notify listeners that this context was cleared\n this.clearListeners.forEach((listener) => listener(contextId))\n }\n\n /** Register a listener to be notified when a context is cleared. */\n onClear(listener: (contextId: SchedulerContextId) => void): () => void {\n this.clearListeners.add(listener)\n return () => this.clearListeners.delete(listener)\n }\n\n /** Check if a context has pending jobs. */\n hasPendingJobs(contextId: SchedulerContextId): boolean {\n const context = this.contexts.get(contextId)\n return !!context && context.jobs.size > 0\n }\n\n /** Remove a single job from a context and clean up its dependencies. */\n clearJob(contextId: SchedulerContextId, jobId: unknown): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n context.jobs.delete(jobId)\n context.dependencies.delete(jobId)\n context.completed.delete(jobId)\n context.queue = context.queue.filter((id) => id !== jobId)\n\n if (context.jobs.size === 0) {\n this.contexts.delete(contextId)\n }\n }\n}\n\nexport const transactionScopedScheduler = new Scheduler()\n"],"names":[],"mappings":";;AA+BA,SAAS,kBAAkB,KAAkC;AAC3D,SACE,OAAO,QAAQ,YACf,QAAQ,QACR,OAAO,IAAI,uBAAuB;AAEtC;AAWO,MAAM,UAAU;AAAA,EAAhB,cAAA;AACL,SAAQ,+BAAe,IAAA;AACvB,SAAQ,qCAAqB,IAAA;AAAA,EAA6C;AAAA;AAAA;AAAA;AAAA,EAKlE,mBACN,WACuB;AACvB,QAAI,UAAU,KAAK,SAAS,IAAI,SAAS;AACzC,QAAI,CAAC,SAAS;AACZ,gBAAU;AAAA,QACR,OAAO,CAAA;AAAA,QACP,0BAAU,IAAA;AAAA,QACV,kCAAkB,IAAA;AAAA,QAClB,+BAAe,IAAA;AAAA,MAAI;AAErB,WAAK,SAAS,IAAI,WAAW,OAAO;AAAA,IACtC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,EAAE,WAAW,OAAO,cAAc,OAA8B;AACvE,QAAI,OAAO,cAAc,aAAa;AACpC,UAAA;AACA;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,mBAAmB,SAAS;AAGjD,QAAI,CAAC,QAAQ,KAAK,IAAI,KAAK,GAAG;AAC5B,cAAQ,MAAM,KAAK,KAAK;AAAA,IAC1B;AAGA,YAAQ,KAAK,IAAI,OAAO,GAAG;AAG3B,QAAI,cAAc;AAChB,YAAM,SAAS,IAAI,IAAa,YAAY;AAC5C,aAAO,OAAO,KAAK;AACnB,cAAQ,aAAa,IAAI,OAAO,MAAM;AAAA,IACxC,WAAW,CAAC,QAAQ,aAAa,IAAI,KAAK,GAAG;AAC3C,cAAQ,aAAa,IAAI,OAAO,oBAAI,KAAK;AAAA,IAC3C;AAGA,YAAQ,UAAU,OAAO,KAAK;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAqC;AACzC,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,CAAC,QAAS;AAEd,UAAM,EAAE,OAAO,MAAM,cAAc,cAAc;AAEjD,WAAO,MAAM,SAAS,GAAG;AACvB,UAAI,cAAc;AAClB,YAAM,eAAe,MAAM;AAE3B,eAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,cAAM,QAAQ,MAAM,MAAA;AACpB,cAAM,MAAM,KAAK,IAAI,KAAK;AAC1B,YAAI,CAAC,KAAK;AACR,uBAAa,OAAO,KAAK;AACzB,oBAAU,OAAO,KAAK;AACtB;AAAA,QACF;AAEA,cAAM,OAAO,aAAa,IAAI,KAAK;AACnC,YAAI,QAAQ,CAAC;AACb,YAAI,MAAM;AACR,kBAAQ;AACR,qBAAW,OAAO,MAAM;AACtB,gBAAI,QAAQ,MAAO;AAEnB,kBAAM,gBACJ,kBAAkB,GAAG,KAAK,IAAI,mBAAmB,SAAS;AAM5D,gBACG,KAAK,IAAI,GAAG,KAAK,CAAC,UAAU,IAAI,GAAG,KACnC,CAAC,KAAK,IAAI,GAAG,KAAK,eACnB;AACA,sBAAQ;AACR;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,OAAO;AACT,eAAK,OAAO,KAAK;AACjB,uBAAa,OAAO,KAAK;AAGzB,cAAA;AACA,oBAAU,IAAI,KAAK;AACnB,wBAAc;AAAA,QAChB,OAAO;AACL,gBAAM,KAAK,KAAK;AAAA,QAClB;AAAA,MACF;AAEA,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR,0DAA0D;AAAA,YACxD;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MAEL;AAAA,IACF;AAEA,SAAK,SAAS,OAAO,SAAS;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAiB;AACf,eAAW,aAAa,MAAM,KAAK,KAAK,SAAS,KAAA,CAAM,GAAG;AACxD,WAAK,MAAM,SAAS;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAqC;AACzC,SAAK,SAAS,OAAO,SAAS;AAE9B,SAAK,eAAe,QAAQ,CAAC,aAAa,SAAS,SAAS,CAAC;AAAA,EAC/D;AAAA;AAAA,EAGA,QAAQ,UAA+D;AACrE,SAAK,eAAe,IAAI,QAAQ;AAChC,WAAO,MAAM,KAAK,eAAe,OAAO,QAAQ;AAAA,EAClD;AAAA;AAAA,EAGA,eAAe,WAAwC;AACrD,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,WAAO,CAAC,CAAC,WAAW,QAAQ,KAAK,OAAO;AAAA,EAC1C;AAAA;AAAA,EAGA,SAAS,WAA+B,OAAsB;AAC5D,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,CAAC,QAAS;AAEd,YAAQ,KAAK,OAAO,KAAK;AACzB,YAAQ,aAAa,OAAO,KAAK;AACjC,YAAQ,UAAU,OAAO,KAAK;AAC9B,YAAQ,QAAQ,QAAQ,MAAM,OAAO,CAAC,OAAO,OAAO,KAAK;AAEzD,QAAI,QAAQ,KAAK,SAAS,GAAG;AAC3B,WAAK,SAAS,OAAO,SAAS;AAAA,IAChC;AAAA,EACF;AACF;AAEO,MAAM,6BAA6B,IAAI,UAAA;;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"debounceStrategy.cjs","sources":["../../../src/strategies/debounceStrategy.ts"],"sourcesContent":["import { LiteDebouncer } from
|
|
1
|
+
{"version":3,"file":"debounceStrategy.cjs","sources":["../../../src/strategies/debounceStrategy.ts"],"sourcesContent":["import { LiteDebouncer } from '@tanstack/pacer-lite/lite-debouncer'\nimport type { DebounceStrategy, DebounceStrategyOptions } from './types'\nimport type { Transaction } from '../transactions'\n\n/**\n * Creates a debounce strategy that delays transaction execution until after\n * a period of inactivity.\n *\n * Ideal for scenarios like search inputs or auto-save fields where you want\n * to wait for the user to stop typing before persisting changes.\n *\n * @param options - Configuration for the debounce behavior\n * @returns A debounce strategy instance\n *\n * @example\n * ```ts\n * const mutate = usePacedMutations({\n * onMutate: (value) => {\n * collection.update(id, draft => { draft.value = value })\n * },\n * mutationFn: async ({ transaction }) => {\n * await api.save(transaction.mutations)\n * },\n * strategy: debounceStrategy({ wait: 500 })\n * })\n * ```\n */\nexport function debounceStrategy(\n options: DebounceStrategyOptions,\n): DebounceStrategy {\n const debouncer = new LiteDebouncer(\n (callback: () => Transaction) => callback(),\n options,\n )\n\n return {\n _type: `debounce`,\n options,\n execute: <T extends object = Record<string, unknown>>(\n fn: () => Transaction<T>,\n ) => {\n debouncer.maybeExecute(fn as () => Transaction)\n },\n cleanup: () => {\n debouncer.cancel()\n },\n }\n}\n"],"names":["LiteDebouncer"],"mappings":";;;AA2BO,SAAS,iBACd,SACkB;AAClB,QAAM,YAAY,IAAIA,cAAAA;AAAAA,IACpB,CAAC,aAAgC,SAAA;AAAA,IACjC;AAAA,EAAA;AAGF,SAAO;AAAA,IACL,OAAO;AAAA,IACP;AAAA,IACA,SAAS,CACP,OACG;AACH,gBAAU,aAAa,EAAuB;AAAA,IAChD;AAAA,IACA,SAAS,MAAM;AACb,gBAAU,OAAA;AAAA,IACZ;AAAA,EAAA;AAEJ;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"queueStrategy.cjs","sources":["../../../src/strategies/queueStrategy.ts"],"sourcesContent":["import { LiteQueuer } from
|
|
1
|
+
{"version":3,"file":"queueStrategy.cjs","sources":["../../../src/strategies/queueStrategy.ts"],"sourcesContent":["import { LiteQueuer } from '@tanstack/pacer-lite/lite-queuer'\nimport type { QueueStrategy, QueueStrategyOptions } from './types'\nimport type { Transaction } from '../transactions'\n\n/**\n * Creates a queue strategy that processes all mutations in order with proper serialization.\n *\n * Unlike other strategies that may drop executions, queue ensures every\n * mutation is processed sequentially. Each transaction commit completes before\n * the next one starts. Useful when data consistency is critical and\n * every operation must complete in order.\n *\n * @param options - Configuration for queue behavior (FIFO/LIFO, timing, size limits)\n * @returns A queue strategy instance\n *\n * @example\n * ```ts\n * // FIFO queue - process in order received\n * const mutate = usePacedMutations({\n * mutationFn: async ({ transaction }) => {\n * await api.save(transaction.mutations)\n * },\n * strategy: queueStrategy({\n * wait: 200,\n * addItemsTo: 'back',\n * getItemsFrom: 'front'\n * })\n * })\n * ```\n *\n * @example\n * ```ts\n * // LIFO queue - process most recent first\n * const mutate = usePacedMutations({\n * mutationFn: async ({ transaction }) => {\n * await api.save(transaction.mutations)\n * },\n * strategy: queueStrategy({\n * wait: 200,\n * addItemsTo: 'back',\n * getItemsFrom: 'back'\n * })\n * })\n * ```\n */\nexport function queueStrategy(options?: QueueStrategyOptions): QueueStrategy {\n // Manual promise chaining to ensure async serialization\n // LiteQueuer (unlike AsyncQueuer from @tanstack/pacer) lacks built-in async queue\n // primitives and concurrency control. We compensate by manually chaining promises\n // to ensure each transaction completes before the next one starts.\n let processingChain = Promise.resolve()\n\n const queuer = new LiteQueuer<() => Transaction>(\n (fn) => {\n // Chain each transaction to the previous one's completion\n processingChain = processingChain\n .then(async () => {\n const transaction = fn()\n // Wait for the transaction to be persisted before processing next item\n await transaction.isPersisted.promise\n })\n .catch(() => {\n // Errors are handled via transaction.isPersisted.promise and surfaced there.\n // This catch prevents unhandled promise rejections from breaking the chain,\n // ensuring subsequent transactions can still execute even if one fails.\n })\n },\n {\n wait: options?.wait ?? 0,\n maxSize: options?.maxSize,\n addItemsTo: options?.addItemsTo ?? `back`, // Default FIFO: add to back\n getItemsFrom: options?.getItemsFrom ?? `front`, // Default FIFO: get from front\n started: true, // Start processing immediately\n },\n )\n\n return {\n _type: `queue`,\n options,\n execute: <T extends object = Record<string, unknown>>(\n fn: () => Transaction<T>,\n ) => {\n // Add the transaction-creating function to the queue\n queuer.addItem(fn as () => Transaction)\n },\n cleanup: () => {\n queuer.stop()\n queuer.clear()\n },\n }\n}\n"],"names":["LiteQueuer"],"mappings":";;;AA6CO,SAAS,cAAc,SAA+C;AAK3E,MAAI,kBAAkB,QAAQ,QAAA;AAE9B,QAAM,SAAS,IAAIA,WAAAA;AAAAA,IACjB,CAAC,OAAO;AAEN,wBAAkB,gBACf,KAAK,YAAY;AAChB,cAAM,cAAc,GAAA;AAEpB,cAAM,YAAY,YAAY;AAAA,MAChC,CAAC,EACA,MAAM,MAAM;AAAA,MAIb,CAAC;AAAA,IACL;AAAA,IACA;AAAA,MACE,MAAM,SAAS,QAAQ;AAAA,MACvB,SAAS,SAAS;AAAA,MAClB,YAAY,SAAS,cAAc;AAAA;AAAA,MACnC,cAAc,SAAS,gBAAgB;AAAA;AAAA,MACvC,SAAS;AAAA;AAAA,IAAA;AAAA,EACX;AAGF,SAAO;AAAA,IACL,OAAO;AAAA,IACP;AAAA,IACA,SAAS,CACP,OACG;AAEH,aAAO,QAAQ,EAAuB;AAAA,IACxC;AAAA,IACA,SAAS,MAAM;AACb,aAAO,KAAA;AACP,aAAO,MAAA;AAAA,IACT;AAAA,EAAA;AAEJ;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"throttleStrategy.cjs","sources":["../../../src/strategies/throttleStrategy.ts"],"sourcesContent":["import { LiteThrottler } from
|
|
1
|
+
{"version":3,"file":"throttleStrategy.cjs","sources":["../../../src/strategies/throttleStrategy.ts"],"sourcesContent":["import { LiteThrottler } from '@tanstack/pacer-lite/lite-throttler'\nimport type { ThrottleStrategy, ThrottleStrategyOptions } from './types'\nimport type { Transaction } from '../transactions'\n\n/**\n * Creates a throttle strategy that ensures transactions are evenly spaced\n * over time.\n *\n * Provides smooth, controlled execution patterns ideal for UI updates like\n * sliders, progress bars, or scroll handlers where you want consistent\n * execution timing.\n *\n * @param options - Configuration for throttle behavior\n * @returns A throttle strategy instance\n *\n * @example\n * ```ts\n * // Throttle slider updates to every 200ms\n * const mutate = usePacedMutations({\n * onMutate: (volume) => {\n * settingsCollection.update('volume', draft => { draft.value = volume })\n * },\n * mutationFn: async ({ transaction }) => {\n * await api.updateVolume(transaction.mutations)\n * },\n * strategy: throttleStrategy({ wait: 200 })\n * })\n * ```\n *\n * @example\n * ```ts\n * // Throttle with leading and trailing execution\n * const mutate = usePacedMutations({\n * onMutate: (data) => {\n * collection.update(id, draft => { Object.assign(draft, data) })\n * },\n * mutationFn: async ({ transaction }) => {\n * await api.save(transaction.mutations)\n * },\n * strategy: throttleStrategy({\n * wait: 500,\n * leading: true,\n * trailing: true\n * })\n * })\n * ```\n */\nexport function throttleStrategy(\n options: ThrottleStrategyOptions,\n): ThrottleStrategy {\n const throttler = new LiteThrottler(\n (callback: () => Transaction) => callback(),\n options,\n )\n\n return {\n _type: `throttle`,\n options,\n execute: <T extends object = Record<string, unknown>>(\n fn: () => Transaction<T>,\n ) => {\n throttler.maybeExecute(fn as () => Transaction)\n },\n cleanup: () => {\n throttler.cancel()\n },\n }\n}\n"],"names":["LiteThrottler"],"mappings":";;;AA+CO,SAAS,iBACd,SACkB;AAClB,QAAM,YAAY,IAAIA,cAAAA;AAAAA,IACpB,CAAC,aAAgC,SAAA;AAAA,IACjC;AAAA,EAAA;AAGF,SAAO;AAAA,IACL,OAAO;AAAA,IACP;AAAA,IACA,SAAS,CACP,OACG;AACH,gBAAU,aAAa,EAAuB;AAAA,IAChD;AAAA,IACA,SAAS,MAAM;AACb,gBAAU,OAAA;AAAA,IACZ;AAAA,EAAA;AAEJ;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"transactions.cjs","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport \"./duplicate-instance-check\"\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from \"./errors\"\nimport { transactionScopedScheduler } from \"./scheduler.js\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Merges two pending mutations for the same item within a transaction\n *\n * Merge behavior truth table:\n * - (insert, update) → insert (merge changes, keep empty original)\n * - (insert, delete) → null (cancel both mutations)\n * - (update, delete) → delete (delete dominates)\n * - (update, update) → update (replace with latest, union changes)\n * - (delete, delete) → delete (replace with latest)\n * - (insert, insert) → insert (replace with latest)\n *\n * Note: (delete, update) and (delete, insert) should never occur as the collection\n * layer prevents operations on deleted items within the same transaction.\n *\n * @param existing - The existing mutation in the transaction\n * @param incoming - The new mutation being applied\n * @returns The merged mutation, or null if both should be removed\n */\nfunction mergePendingMutations<T extends object>(\n existing: PendingMutation<T>,\n incoming: PendingMutation<T>\n): PendingMutation<T> | null {\n // Truth table implementation\n switch (`${existing.type}-${incoming.type}` as const) {\n case `insert-update`: {\n // Update after insert: keep as insert but merge changes\n // For insert-update, the key should remain the same since collections don't allow key changes\n return {\n ...existing,\n type: `insert` as const,\n original: {},\n modified: incoming.modified,\n changes: { ...existing.changes, ...incoming.changes },\n // Keep existing keys (key changes not allowed in updates)\n key: existing.key,\n globalKey: existing.globalKey,\n // Merge metadata (last-write-wins)\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n // Update tracking info\n mutationId: incoming.mutationId,\n updatedAt: incoming.updatedAt,\n }\n }\n\n case `insert-delete`:\n // Delete after insert: cancel both mutations\n return null\n\n case `update-delete`:\n // Delete after update: delete dominates\n return incoming\n\n case `update-update`: {\n // Update after update: replace with latest, union changes\n return {\n ...incoming,\n // Keep original from first update\n original: existing.original,\n // Union the changes from both updates\n changes: { ...existing.changes, ...incoming.changes },\n // Merge metadata\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n }\n }\n\n case `delete-delete`:\n case `insert-insert`:\n // Same type: replace with latest\n return incoming\n\n default: {\n // Exhaustiveness check\n const _exhaustive: never = `${existing.type}-${incoming.type}` as never\n throw new Error(`Unhandled mutation combination: ${_exhaustive}`)\n }\n }\n}\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n // Clear any stale work that may have been left behind if a previous mutate\n // scope aborted before we could flush.\n transactionScopedScheduler.clear(tx.id)\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n // Always flush pending work for this transaction before removing it from\n // the ambient stack – this runs even if the mutate callback throws.\n // If flush throws (e.g., due to a job error), we still clean up the stack.\n try {\n transactionScopedScheduler.flush(tx.id)\n } finally {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n }\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together. If the\n * callback returns a Promise, the transaction context will remain active until the promise\n * settles, allowing optimistic writes after `await` boundaries.\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit().catch(() => {\n // Errors from autoCommit are handled via isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n }\n\n return this\n }\n\n /**\n * Apply new mutations to this transaction, intelligently merging with existing mutations\n *\n * When mutations operate on the same item (same globalKey), they are merged according to\n * the following rules:\n *\n * - **insert + update** → insert (merge changes, keep empty original)\n * - **insert + delete** → removed (mutations cancel each other out)\n * - **update + delete** → delete (delete dominates)\n * - **update + update** → update (union changes, keep first original)\n * - **same type** → replace with latest\n *\n * This merging reduces over-the-wire churn and keeps the optimistic local view\n * aligned with user intent.\n *\n * @param mutations - Array of new mutations to apply\n */\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n const existingMutation = this.mutations[existingIndex]!\n const mergeResult = mergePendingMutations(existingMutation, newMutation)\n\n if (mergeResult === null) {\n // Remove the mutation (e.g., delete after insert cancels both)\n this.mutations.splice(existingIndex, 1)\n } else {\n // Replace with merged mutation\n this.mutations[existingIndex] = mergeResult\n }\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection._state.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection._state.pendingSyncedTransactions.length > 0) {\n mutation.collection._state.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n this.isPersisted.resolve(this)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Preserve the original error for rethrowing\n const originalError =\n error instanceof Error ? error : new Error(String(error))\n\n // Update transaction with error information\n this.error = {\n message: originalError.message,\n error: originalError,\n }\n\n // rollback the transaction\n this.rollback()\n\n // Re-throw the original error to preserve identity and stack\n throw originalError\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":["transactionScopedScheduler","MissingMutationFunctionError","createDeferred","TransactionNotPendingMutateError","TransactionAlreadyCompletedRollbackError","TransactionNotPendingCommitError"],"mappings":";;;;;AAkBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAoBrB,SAAS,sBACP,UACA,UAC2B;AAE3B,UAAQ,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI,IAAA;AAAA,IACvC,KAAK,iBAAiB;AAGpB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,MAAM;AAAA,QACN,UAAU,CAAA;AAAA,QACV,UAAU,SAAS;AAAA,QACnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,KAAK,SAAS;AAAA,QACd,WAAW,SAAS;AAAA;AAAA,QAEpB,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA;AAAA,QAEtD,YAAY,SAAS;AAAA,QACrB,WAAW,SAAS;AAAA,MAAA;AAAA,IAExB;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET,KAAK;AAEH,aAAO;AAAA,IAET,KAAK,iBAAiB;AAEpB,aAAO;AAAA,QACL,GAAG;AAAA;AAAA,QAEH,UAAU,SAAS;AAAA;AAAA,QAEnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA,MAAa;AAAA,IAEvE;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAEH,aAAO;AAAA,IAET,SAAS;AAEP,YAAM,cAAqB,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI;AAC5D,YAAM,IAAI,MAAM,mCAAmC,WAAW,EAAE;AAAA,IAClE;AAAA,EAAA;AAEJ;AAsDO,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AAGjDA,uCAA2B,MAAM,GAAG,EAAE;AACtC,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AAInD,MAAI;AACFA,yCAA2B,MAAM,GAAG,EAAE;AAAA,EACxC,UAAA;AACE,uBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAAA,EAClE;AACF;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAIC,OAAAA,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAcC,wBAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2CA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AAExB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,SAAS,MAAM,MAAM;AAAA,MAG1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AACtB,cAAM,mBAAmB,KAAK,UAAU,aAAa;AACrD,cAAM,cAAc,sBAAsB,kBAAkB,WAAW;AAEvE,YAAI,gBAAgB,MAAM;AAExB,eAAK,UAAU,OAAO,eAAe,CAAC;AAAA,QACxC,OAAO;AAEL,eAAK,UAAU,aAAa,IAAI;AAAA,QAClC;AAAA,MACF,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;AACnE,UAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,OAAO,KAAK,OAAO,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,OAAO,yBAAA;AAG3B,YAAI,SAAS,WAAW,OAAO,0BAA0B,SAAS,GAAG;AACnE,mBAAS,WAAW,OAAO,0BAAA;AAAA,QAC7B;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AACzB,WAAK,YAAY,QAAQ,IAAI;AAE7B,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,YAAM,gBACJ,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAG1D,WAAK,QAAQ;AAAA,QACX,SAAS,cAAc;AAAA,QACvB,OAAO;AAAA,MAAA;AAIT,WAAK,SAAA;AAGL,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;;;"}
|
|
1
|
+
{"version":3,"file":"transactions.cjs","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from './deferred'\nimport './duplicate-instance-check'\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from './errors'\nimport { transactionScopedScheduler } from './scheduler.js'\nimport type { Deferred } from './deferred'\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from './types'\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Merges two pending mutations for the same item within a transaction\n *\n * Merge behavior truth table:\n * - (insert, update) → insert (merge changes, keep empty original)\n * - (insert, delete) → null (cancel both mutations)\n * - (update, delete) → delete (delete dominates)\n * - (update, update) → update (replace with latest, union changes)\n * - (delete, delete) → delete (replace with latest)\n * - (insert, insert) → insert (replace with latest)\n *\n * Note: (delete, update) and (delete, insert) should never occur as the collection\n * layer prevents operations on deleted items within the same transaction.\n *\n * @param existing - The existing mutation in the transaction\n * @param incoming - The new mutation being applied\n * @returns The merged mutation, or null if both should be removed\n */\nfunction mergePendingMutations<T extends object>(\n existing: PendingMutation<T>,\n incoming: PendingMutation<T>,\n): PendingMutation<T> | null {\n // Truth table implementation\n switch (`${existing.type}-${incoming.type}` as const) {\n case `insert-update`: {\n // Update after insert: keep as insert but merge changes\n // For insert-update, the key should remain the same since collections don't allow key changes\n return {\n ...existing,\n type: `insert` as const,\n original: {},\n modified: incoming.modified,\n changes: { ...existing.changes, ...incoming.changes },\n // Keep existing keys (key changes not allowed in updates)\n key: existing.key,\n globalKey: existing.globalKey,\n // Merge metadata (last-write-wins)\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n // Update tracking info\n mutationId: incoming.mutationId,\n updatedAt: incoming.updatedAt,\n }\n }\n\n case `insert-delete`:\n // Delete after insert: cancel both mutations\n return null\n\n case `update-delete`:\n // Delete after update: delete dominates\n return incoming\n\n case `update-update`: {\n // Update after update: replace with latest, union changes\n return {\n ...incoming,\n // Keep original from first update\n original: existing.original,\n // Union the changes from both updates\n changes: { ...existing.changes, ...incoming.changes },\n // Merge metadata\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n }\n }\n\n case `delete-delete`:\n case `insert-insert`:\n // Same type: replace with latest\n return incoming\n\n default: {\n // Exhaustiveness check\n const _exhaustive: never = `${existing.type}-${incoming.type}` as never\n throw new Error(`Unhandled mutation combination: ${_exhaustive}`)\n }\n }\n}\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>,\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n // Clear any stale work that may have been left behind if a previous mutate\n // scope aborted before we could flush.\n transactionScopedScheduler.clear(tx.id)\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n // Always flush pending work for this transaction before removing it from\n // the ambient stack – this runs even if the mutate callback throws.\n // If flush throws (e.g., due to a job error), we still clean up the stack.\n try {\n transactionScopedScheduler.flush(tx.id)\n } finally {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n }\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together. If the\n * callback returns a Promise, the transaction context will remain active until the promise\n * settles, allowing optimistic writes after `await` boundaries.\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit().catch(() => {\n // Errors from autoCommit are handled via isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n }\n\n return this\n }\n\n /**\n * Apply new mutations to this transaction, intelligently merging with existing mutations\n *\n * When mutations operate on the same item (same globalKey), they are merged according to\n * the following rules:\n *\n * - **insert + update** → insert (merge changes, keep empty original)\n * - **insert + delete** → removed (mutations cancel each other out)\n * - **update + delete** → delete (delete dominates)\n * - **update + update** → update (union changes, keep first original)\n * - **same type** → replace with latest\n *\n * This merging reduces over-the-wire churn and keeps the optimistic local view\n * aligned with user intent.\n *\n * @param mutations - Array of new mutations to apply\n */\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey,\n )\n\n if (existingIndex >= 0) {\n const existingMutation = this.mutations[existingIndex]!\n const mergeResult = mergePendingMutations(existingMutation, newMutation)\n\n if (mergeResult === null) {\n // Remove the mutation (e.g., delete after insert cancels both)\n this.mutations.splice(existingIndex, 1)\n } else {\n // Replace with merged mutation\n this.mutations[existingIndex] = mergeResult\n }\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection._state.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection._state.pendingSyncedTransactions.length > 0) {\n mutation.collection._state.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n this.isPersisted.resolve(this)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Preserve the original error for rethrowing\n const originalError =\n error instanceof Error ? error : new Error(String(error))\n\n // Update transaction with error information\n this.error = {\n message: originalError.message,\n error: originalError,\n }\n\n // rollback the transaction\n this.rollback()\n\n // Re-throw the original error to preserve identity and stack\n throw originalError\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":["transactionScopedScheduler","MissingMutationFunctionError","createDeferred","TransactionNotPendingMutateError","TransactionAlreadyCompletedRollbackError","TransactionNotPendingCommitError"],"mappings":";;;;;AAkBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAoBrB,SAAS,sBACP,UACA,UAC2B;AAE3B,UAAQ,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI,IAAA;AAAA,IACvC,KAAK,iBAAiB;AAGpB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,MAAM;AAAA,QACN,UAAU,CAAA;AAAA,QACV,UAAU,SAAS;AAAA,QACnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,KAAK,SAAS;AAAA,QACd,WAAW,SAAS;AAAA;AAAA,QAEpB,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA;AAAA,QAEtD,YAAY,SAAS;AAAA,QACrB,WAAW,SAAS;AAAA,MAAA;AAAA,IAExB;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET,KAAK;AAEH,aAAO;AAAA,IAET,KAAK,iBAAiB;AAEpB,aAAO;AAAA,QACL,GAAG;AAAA;AAAA,QAEH,UAAU,SAAS;AAAA;AAAA,QAEnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA,MAAa;AAAA,IAEvE;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAEH,aAAO;AAAA,IAET,SAAS;AAEP,YAAM,cAAqB,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI;AAC5D,YAAM,IAAI,MAAM,mCAAmC,WAAW,EAAE;AAAA,IAClE;AAAA,EAAA;AAEJ;AAsDO,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AAGjDA,uCAA2B,MAAM,GAAG,EAAE;AACtC,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AAInD,MAAI;AACFA,yCAA2B,MAAM,GAAG,EAAE;AAAA,EACxC,UAAA;AACE,uBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAAA,EAClE;AACF;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAIC,OAAAA,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAcC,wBAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2CA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AAExB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,SAAS,MAAM,MAAM;AAAA,MAG1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AACtB,cAAM,mBAAmB,KAAK,UAAU,aAAa;AACrD,cAAM,cAAc,sBAAsB,kBAAkB,WAAW;AAEvE,YAAI,gBAAgB,MAAM;AAExB,eAAK,UAAU,OAAO,eAAe,CAAC;AAAA,QACxC,OAAO;AAEL,eAAK,UAAU,aAAa,IAAI;AAAA,QAClC;AAAA,MACF,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;AACnE,UAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,OAAO,KAAK,OAAO,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,OAAO,yBAAA;AAG3B,YAAI,SAAS,WAAW,OAAO,0BAA0B,SAAS,GAAG;AACnE,mBAAS,WAAW,OAAO,0BAAA;AAAA,QAC7B;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AACzB,WAAK,YAAY,QAAQ,IAAI;AAE7B,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,YAAM,gBACJ,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAG1D,WAAK,QAAQ;AAAA,QACX,SAAS,cAAc;AAAA,QACvB,OAAO;AAAA,MAAA;AAIT,WAAK,SAAA;AAGL,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;;;"}
|
package/dist/cjs/types.d.cts
CHANGED
|
@@ -167,9 +167,9 @@ export interface SubscriptionUnsubscribedEvent {
|
|
|
167
167
|
* All subscription events
|
|
168
168
|
*/
|
|
169
169
|
export type SubscriptionEvents = {
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
170
|
+
'status:change': SubscriptionStatusChangeEvent;
|
|
171
|
+
'status:ready': SubscriptionStatusEvent<`ready`>;
|
|
172
|
+
'status:loadingSubset': SubscriptionStatusEvent<`loadingSubset`>;
|
|
173
173
|
unsubscribed: SubscriptionUnsubscribedEvent;
|
|
174
174
|
};
|
|
175
175
|
/**
|
|
@@ -180,13 +180,51 @@ export interface Subscription extends EventEmitter<SubscriptionEvents> {
|
|
|
180
180
|
/** Current status of the subscription */
|
|
181
181
|
readonly status: SubscriptionStatus;
|
|
182
182
|
}
|
|
183
|
+
/**
|
|
184
|
+
* Cursor expressions for pagination, passed separately from the main `where` clause.
|
|
185
|
+
* The sync layer can choose to use cursor-based pagination (combining these with the where)
|
|
186
|
+
* or offset-based pagination (ignoring these and using the `offset` parameter).
|
|
187
|
+
*
|
|
188
|
+
* Neither expression includes the main `where` clause - they are cursor-specific only.
|
|
189
|
+
*/
|
|
190
|
+
export type CursorExpressions = {
|
|
191
|
+
/**
|
|
192
|
+
* Expression for rows greater than (after) the cursor value.
|
|
193
|
+
* For multi-column orderBy, this is a composite cursor using OR of conditions.
|
|
194
|
+
* Example for [col1 ASC, col2 DESC] with values [v1, v2]:
|
|
195
|
+
* or(gt(col1, v1), and(eq(col1, v1), lt(col2, v2)))
|
|
196
|
+
*/
|
|
197
|
+
whereFrom: BasicExpression<boolean>;
|
|
198
|
+
/**
|
|
199
|
+
* Expression for rows equal to the current cursor value (first orderBy column only).
|
|
200
|
+
* Used to handle tie-breaking/duplicates at the boundary.
|
|
201
|
+
* Example: eq(col1, v1) or for Dates: and(gte(col1, v1), lt(col1, v1+1ms))
|
|
202
|
+
*/
|
|
203
|
+
whereCurrent: BasicExpression<boolean>;
|
|
204
|
+
/**
|
|
205
|
+
* The key of the last item that was loaded.
|
|
206
|
+
* Can be used by sync layers for tracking or deduplication.
|
|
207
|
+
*/
|
|
208
|
+
lastKey?: string | number;
|
|
209
|
+
};
|
|
183
210
|
export type LoadSubsetOptions = {
|
|
184
|
-
/** The where expression to filter the data */
|
|
211
|
+
/** The where expression to filter the data (does NOT include cursor expressions) */
|
|
185
212
|
where?: BasicExpression<boolean>;
|
|
186
213
|
/** The order by clause to sort the data */
|
|
187
214
|
orderBy?: OrderBy;
|
|
188
215
|
/** The limit of the data to load */
|
|
189
216
|
limit?: number;
|
|
217
|
+
/**
|
|
218
|
+
* Cursor expressions for cursor-based pagination.
|
|
219
|
+
* These are separate from `where` - the sync layer should combine them if using cursor-based pagination.
|
|
220
|
+
* Neither expression includes the main `where` clause.
|
|
221
|
+
*/
|
|
222
|
+
cursor?: CursorExpressions;
|
|
223
|
+
/**
|
|
224
|
+
* Row offset for offset-based pagination.
|
|
225
|
+
* The sync layer can use this instead of `cursor` if it prefers offset-based pagination.
|
|
226
|
+
*/
|
|
227
|
+
offset?: number;
|
|
190
228
|
/**
|
|
191
229
|
* The subscription that triggered the load.
|
|
192
230
|
* Advanced sync implementations can use this for:
|
|
@@ -243,7 +281,7 @@ export interface OptimisticChangeMessage<T extends object = Record<string, unkno
|
|
|
243
281
|
* This follows the standard-schema specification: https://github.com/standard-schema/standard-schema
|
|
244
282
|
*/
|
|
245
283
|
export type StandardSchema<T> = StandardSchemaV1 & {
|
|
246
|
-
|
|
284
|
+
'~standard': {
|
|
247
285
|
types?: {
|
|
248
286
|
input: T;
|
|
249
287
|
output: T;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"browser-polyfills.cjs","sources":["../../../src/utils/browser-polyfills.ts"],"sourcesContent":["// Type definitions for requestIdleCallback - compatible with existing browser types\nexport type IdleCallbackDeadline = {\n didTimeout: boolean\n timeRemaining: () => number\n}\n\nexport type IdleCallbackFunction = (deadline: IdleCallbackDeadline) => void\n\nconst requestIdleCallbackPolyfill = (\n callback: IdleCallbackFunction
|
|
1
|
+
{"version":3,"file":"browser-polyfills.cjs","sources":["../../../src/utils/browser-polyfills.ts"],"sourcesContent":["// Type definitions for requestIdleCallback - compatible with existing browser types\nexport type IdleCallbackDeadline = {\n didTimeout: boolean\n timeRemaining: () => number\n}\n\nexport type IdleCallbackFunction = (deadline: IdleCallbackDeadline) => void\n\nconst requestIdleCallbackPolyfill = (\n callback: IdleCallbackFunction,\n): number => {\n // Use a very small timeout for the polyfill to simulate idle time\n const timeout = 0\n const timeoutId = setTimeout(() => {\n callback({\n didTimeout: true, // Always indicate timeout for the polyfill\n timeRemaining: () => 50, // Return some time remaining for polyfill\n })\n }, timeout)\n return timeoutId as unknown as number\n}\n\nconst cancelIdleCallbackPolyfill = (id: number): void => {\n clearTimeout(id as unknown as ReturnType<typeof setTimeout>)\n}\n\nexport const safeRequestIdleCallback: (\n callback: IdleCallbackFunction,\n options?: { timeout?: number },\n) => number =\n typeof window !== `undefined` && `requestIdleCallback` in window\n ? (callback, options) =>\n (window as any).requestIdleCallback(callback, options)\n : (callback, _options) => requestIdleCallbackPolyfill(callback)\n\nexport const safeCancelIdleCallback: (id: number) => void =\n typeof window !== `undefined` && `cancelIdleCallback` in window\n ? (id) => (window as any).cancelIdleCallback(id)\n : cancelIdleCallbackPolyfill\n"],"names":[],"mappings":";;AAQA,MAAM,8BAA8B,CAClC,aACW;AAEX,QAAM,UAAU;AAChB,QAAM,YAAY,WAAW,MAAM;AACjC,aAAS;AAAA,MACP,YAAY;AAAA;AAAA,MACZ,eAAe,MAAM;AAAA;AAAA,IAAA,CACtB;AAAA,EACH,GAAG,OAAO;AACV,SAAO;AACT;AAEA,MAAM,6BAA6B,CAAC,OAAqB;AACvD,eAAa,EAA8C;AAC7D;AAEO,MAAM,0BAIX,OAAO,WAAW,eAAe,yBAAyB,SACtD,CAAC,UAAU,YACR,OAAe,oBAAoB,UAAU,OAAO,IACvD,CAAC,UAAU,aAAa,4BAA4B,QAAQ;AAE3D,MAAM,yBACX,OAAO,WAAW,eAAe,wBAAwB,SACrD,CAAC,OAAQ,OAAe,mBAAmB,EAAE,IAC7C;;;"}
|