@tanstack/db 0.4.5 → 0.4.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/change-events.cjs +1 -1
- package/dist/cjs/collection/change-events.cjs.map +1 -1
- package/dist/cjs/collection/change-events.d.cts +1 -1
- package/dist/cjs/collection/index.cjs +11 -0
- package/dist/cjs/collection/index.cjs.map +1 -1
- package/dist/cjs/collection/index.d.cts +8 -1
- package/dist/cjs/collection/lifecycle.cjs +4 -1
- package/dist/cjs/collection/lifecycle.cjs.map +1 -1
- package/dist/cjs/collection/mutations.cjs +4 -4
- package/dist/cjs/collection/mutations.cjs.map +1 -1
- package/dist/cjs/collection/subscription.cjs +21 -1
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +4 -3
- package/dist/cjs/collection/sync.cjs +94 -71
- package/dist/cjs/collection/sync.cjs.map +1 -1
- package/dist/cjs/collection/sync.d.cts +9 -1
- package/dist/cjs/index.cjs +2 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +2 -0
- package/dist/cjs/indexes/auto-index.cjs +4 -1
- package/dist/cjs/indexes/auto-index.cjs.map +1 -1
- package/dist/cjs/local-only.cjs +21 -2
- package/dist/cjs/local-only.cjs.map +1 -1
- package/dist/cjs/local-only.d.cts +64 -7
- package/dist/cjs/local-storage.cjs +71 -3
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/local-storage.d.cts +55 -2
- package/dist/cjs/query/compiler/expressions.cjs +19 -0
- package/dist/cjs/query/compiler/expressions.cjs.map +1 -1
- package/dist/cjs/query/compiler/expressions.d.cts +2 -1
- package/dist/cjs/query/compiler/order-by.cjs +2 -1
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.d.cts +2 -1
- package/dist/cjs/query/live/collection-subscriber.cjs +18 -8
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.d.cts +1 -0
- package/dist/cjs/types.d.cts +11 -1
- package/dist/esm/collection/change-events.d.ts +1 -1
- package/dist/esm/collection/change-events.js +1 -1
- package/dist/esm/collection/change-events.js.map +1 -1
- package/dist/esm/collection/index.d.ts +8 -1
- package/dist/esm/collection/index.js +11 -0
- package/dist/esm/collection/index.js.map +1 -1
- package/dist/esm/collection/lifecycle.js +4 -1
- package/dist/esm/collection/lifecycle.js.map +1 -1
- package/dist/esm/collection/mutations.js +4 -4
- package/dist/esm/collection/mutations.js.map +1 -1
- package/dist/esm/collection/subscription.d.ts +4 -3
- package/dist/esm/collection/subscription.js +22 -2
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/collection/sync.d.ts +9 -1
- package/dist/esm/collection/sync.js +94 -71
- package/dist/esm/collection/sync.js.map +1 -1
- package/dist/esm/index.d.ts +2 -0
- package/dist/esm/index.js +2 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/indexes/auto-index.js +4 -1
- package/dist/esm/indexes/auto-index.js.map +1 -1
- package/dist/esm/local-only.d.ts +64 -7
- package/dist/esm/local-only.js +21 -2
- package/dist/esm/local-only.js.map +1 -1
- package/dist/esm/local-storage.d.ts +55 -2
- package/dist/esm/local-storage.js +72 -4
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/query/compiler/expressions.d.ts +2 -1
- package/dist/esm/query/compiler/expressions.js +19 -0
- package/dist/esm/query/compiler/expressions.js.map +1 -1
- package/dist/esm/query/compiler/order-by.d.ts +2 -1
- package/dist/esm/query/compiler/order-by.js +2 -1
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.d.ts +1 -0
- package/dist/esm/query/live/collection-subscriber.js +19 -9
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/dist/esm/types.d.ts +11 -1
- package/package.json +1 -1
- package/src/collection/change-events.ts +5 -2
- package/src/collection/index.ts +13 -0
- package/src/collection/lifecycle.ts +4 -1
- package/src/collection/mutations.ts +8 -4
- package/src/collection/subscription.ts +34 -4
- package/src/collection/sync.ts +147 -110
- package/src/index.ts +5 -0
- package/src/indexes/auto-index.ts +4 -1
- package/src/local-only.ts +119 -30
- package/src/local-storage.ts +170 -5
- package/src/query/compiler/expressions.ts +26 -1
- package/src/query/compiler/order-by.ts +3 -1
- package/src/query/live/collection-subscriber.ts +31 -10
- package/src/types.ts +13 -1
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput, UtilsRecord } from './types.cjs';
|
|
1
|
+
import { BaseCollectionConfig, CollectionConfig, DeleteMutationFn, InferSchemaOutput, InsertMutationFn, PendingMutation, UpdateMutationFn, UtilsRecord } from './types.cjs';
|
|
2
2
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
3
|
/**
|
|
4
4
|
* Configuration interface for Local-only collection options
|
|
@@ -14,10 +14,36 @@ export interface LocalOnlyCollectionConfig<T extends object = object, TSchema ex
|
|
|
14
14
|
initialData?: Array<T>;
|
|
15
15
|
}
|
|
16
16
|
/**
|
|
17
|
-
* Local-only collection utilities type
|
|
17
|
+
* Local-only collection utilities type
|
|
18
18
|
*/
|
|
19
19
|
export interface LocalOnlyCollectionUtils extends UtilsRecord {
|
|
20
|
+
/**
|
|
21
|
+
* Accepts mutations from a transaction that belong to this collection and persists them.
|
|
22
|
+
* This should be called in your transaction's mutationFn to persist local-only data.
|
|
23
|
+
*
|
|
24
|
+
* @param transaction - The transaction containing mutations to accept
|
|
25
|
+
* @example
|
|
26
|
+
* const localData = createCollection(localOnlyCollectionOptions({...}))
|
|
27
|
+
*
|
|
28
|
+
* const tx = createTransaction({
|
|
29
|
+
* mutationFn: async ({ transaction }) => {
|
|
30
|
+
* // Make API call first
|
|
31
|
+
* await api.save(...)
|
|
32
|
+
* // Then persist local-only mutations after success
|
|
33
|
+
* localData.utils.acceptMutations(transaction)
|
|
34
|
+
* }
|
|
35
|
+
* })
|
|
36
|
+
*/
|
|
37
|
+
acceptMutations: (transaction: {
|
|
38
|
+
mutations: Array<PendingMutation<Record<string, unknown>>>;
|
|
39
|
+
}) => void;
|
|
20
40
|
}
|
|
41
|
+
type LocalOnlyCollectionOptionsResult<T extends object, TKey extends string | number, TSchema extends StandardSchemaV1 | never = never> = Omit<CollectionConfig<T, TKey, TSchema>, `onInsert` | `onUpdate` | `onDelete`> & {
|
|
42
|
+
onInsert?: InsertMutationFn<T, TKey, LocalOnlyCollectionUtils>;
|
|
43
|
+
onUpdate?: UpdateMutationFn<T, TKey, LocalOnlyCollectionUtils>;
|
|
44
|
+
onDelete?: DeleteMutationFn<T, TKey, LocalOnlyCollectionUtils>;
|
|
45
|
+
utils: LocalOnlyCollectionUtils;
|
|
46
|
+
};
|
|
21
47
|
/**
|
|
22
48
|
* Creates Local-only collection options for use with a standard Collection
|
|
23
49
|
*
|
|
@@ -25,10 +51,16 @@ export interface LocalOnlyCollectionUtils extends UtilsRecord {
|
|
|
25
51
|
* that immediately "syncs" all optimistic changes to the collection, making them permanent.
|
|
26
52
|
* Perfect for local-only data that doesn't need persistence or external synchronization.
|
|
27
53
|
*
|
|
54
|
+
* **Using with Manual Transactions:**
|
|
55
|
+
*
|
|
56
|
+
* For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`
|
|
57
|
+
* to persist changes made during `tx.mutate()`. This is necessary because local-only collections
|
|
58
|
+
* don't participate in the standard mutation handler flow for manual transactions.
|
|
59
|
+
*
|
|
28
60
|
* @template T - The schema type if a schema is provided, otherwise the type of items in the collection
|
|
29
61
|
* @template TKey - The type of the key returned by getKey
|
|
30
62
|
* @param config - Configuration options for the Local-only collection
|
|
31
|
-
* @returns Collection options with utilities
|
|
63
|
+
* @returns Collection options with utilities including acceptMutations
|
|
32
64
|
*
|
|
33
65
|
* @example
|
|
34
66
|
* // Basic local-only collection
|
|
@@ -61,16 +93,41 @@ export interface LocalOnlyCollectionUtils extends UtilsRecord {
|
|
|
61
93
|
* },
|
|
62
94
|
* })
|
|
63
95
|
* )
|
|
96
|
+
*
|
|
97
|
+
* @example
|
|
98
|
+
* // Using with manual transactions
|
|
99
|
+
* const localData = createCollection(
|
|
100
|
+
* localOnlyCollectionOptions({
|
|
101
|
+
* getKey: (item) => item.id,
|
|
102
|
+
* })
|
|
103
|
+
* )
|
|
104
|
+
*
|
|
105
|
+
* const tx = createTransaction({
|
|
106
|
+
* mutationFn: async ({ transaction }) => {
|
|
107
|
+
* // Use local data in API call
|
|
108
|
+
* const localMutations = transaction.mutations.filter(m => m.collection === localData)
|
|
109
|
+
* await api.save({ metadata: localMutations[0]?.modified })
|
|
110
|
+
*
|
|
111
|
+
* // Persist local-only mutations after API success
|
|
112
|
+
* localData.utils.acceptMutations(transaction)
|
|
113
|
+
* }
|
|
114
|
+
* })
|
|
115
|
+
*
|
|
116
|
+
* tx.mutate(() => {
|
|
117
|
+
* localData.insert({ id: 1, data: 'metadata' })
|
|
118
|
+
* apiCollection.insert({ id: 2, data: 'main data' })
|
|
119
|
+
* })
|
|
120
|
+
*
|
|
121
|
+
* await tx.commit()
|
|
64
122
|
*/
|
|
65
123
|
export declare function localOnlyCollectionOptions<T extends StandardSchemaV1, TKey extends string | number = string | number>(config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {
|
|
66
124
|
schema: T;
|
|
67
|
-
}):
|
|
68
|
-
utils: LocalOnlyCollectionUtils;
|
|
125
|
+
}): LocalOnlyCollectionOptionsResult<InferSchemaOutput<T>, TKey, T> & {
|
|
69
126
|
schema: T;
|
|
70
127
|
};
|
|
71
128
|
export declare function localOnlyCollectionOptions<T extends object, TKey extends string | number = string | number>(config: LocalOnlyCollectionConfig<T, never, TKey> & {
|
|
72
129
|
schema?: never;
|
|
73
|
-
}):
|
|
74
|
-
utils: LocalOnlyCollectionUtils;
|
|
130
|
+
}): LocalOnlyCollectionOptionsResult<T, TKey> & {
|
|
75
131
|
schema?: never;
|
|
76
132
|
};
|
|
133
|
+
export {};
|
|
@@ -129,6 +129,52 @@ function localStorageCollectionOptions(config) {
|
|
|
129
129
|
...restConfig
|
|
130
130
|
} = config;
|
|
131
131
|
const collectionId = id ?? `local-collection:${config.storageKey}`;
|
|
132
|
+
const acceptMutations = (transaction) => {
|
|
133
|
+
const collectionMutations = transaction.mutations.filter((m) => {
|
|
134
|
+
if (sync.collection && m.collection === sync.collection) {
|
|
135
|
+
return true;
|
|
136
|
+
}
|
|
137
|
+
return m.collection.id === collectionId;
|
|
138
|
+
});
|
|
139
|
+
if (collectionMutations.length === 0) {
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
for (const mutation of collectionMutations) {
|
|
143
|
+
switch (mutation.type) {
|
|
144
|
+
case `insert`:
|
|
145
|
+
case `update`:
|
|
146
|
+
validateJsonSerializable(mutation.modified, mutation.type);
|
|
147
|
+
break;
|
|
148
|
+
case `delete`:
|
|
149
|
+
validateJsonSerializable(mutation.original, mutation.type);
|
|
150
|
+
break;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
const currentData = loadFromStorage(
|
|
154
|
+
config.storageKey,
|
|
155
|
+
storage
|
|
156
|
+
);
|
|
157
|
+
for (const mutation of collectionMutations) {
|
|
158
|
+
const key = mutation.key;
|
|
159
|
+
switch (mutation.type) {
|
|
160
|
+
case `insert`:
|
|
161
|
+
case `update`: {
|
|
162
|
+
const storedItem = {
|
|
163
|
+
versionKey: generateUuid(),
|
|
164
|
+
data: mutation.modified
|
|
165
|
+
};
|
|
166
|
+
currentData.set(key, storedItem);
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
case `delete`: {
|
|
170
|
+
currentData.delete(key);
|
|
171
|
+
break;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
saveToStorage(currentData);
|
|
176
|
+
sync.confirmOperationsSync(collectionMutations);
|
|
177
|
+
};
|
|
132
178
|
return {
|
|
133
179
|
...restConfig,
|
|
134
180
|
id: collectionId,
|
|
@@ -138,7 +184,8 @@ function localStorageCollectionOptions(config) {
|
|
|
138
184
|
onDelete: wrappedOnDelete,
|
|
139
185
|
utils: {
|
|
140
186
|
clearStorage,
|
|
141
|
-
getStorageSize
|
|
187
|
+
getStorageSize,
|
|
188
|
+
acceptMutations
|
|
142
189
|
}
|
|
143
190
|
};
|
|
144
191
|
}
|
|
@@ -173,6 +220,7 @@ function loadFromStorage(storageKey, storage) {
|
|
|
173
220
|
}
|
|
174
221
|
function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, lastKnownData) {
|
|
175
222
|
let syncParams = null;
|
|
223
|
+
let collection = null;
|
|
176
224
|
const findChanges = (oldData, newData) => {
|
|
177
225
|
const changes = [];
|
|
178
226
|
oldData.forEach((oldStoredItem, key) => {
|
|
@@ -214,6 +262,7 @@ function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, l
|
|
|
214
262
|
sync: (params) => {
|
|
215
263
|
const { begin, write, commit, markReady } = params;
|
|
216
264
|
syncParams = params;
|
|
265
|
+
collection = params.collection;
|
|
217
266
|
const initialData = loadFromStorage(storageKey, storage);
|
|
218
267
|
if (initialData.size > 0) {
|
|
219
268
|
begin();
|
|
@@ -245,9 +294,28 @@ function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, l
|
|
|
245
294
|
storageType: storage === (typeof window !== `undefined` ? window.localStorage : null) ? `localStorage` : `custom`
|
|
246
295
|
}),
|
|
247
296
|
// Manual trigger function for local updates
|
|
248
|
-
manualTrigger: processStorageChanges
|
|
297
|
+
manualTrigger: processStorageChanges,
|
|
298
|
+
// Collection instance reference
|
|
299
|
+
collection
|
|
300
|
+
};
|
|
301
|
+
const confirmOperationsSync = (mutations) => {
|
|
302
|
+
if (!syncParams) {
|
|
303
|
+
return;
|
|
304
|
+
}
|
|
305
|
+
const { begin, write, commit } = syncParams;
|
|
306
|
+
begin();
|
|
307
|
+
mutations.forEach((mutation) => {
|
|
308
|
+
write({
|
|
309
|
+
type: mutation.type,
|
|
310
|
+
value: mutation.type === `delete` ? mutation.original : mutation.modified
|
|
311
|
+
});
|
|
312
|
+
});
|
|
313
|
+
commit();
|
|
314
|
+
};
|
|
315
|
+
return {
|
|
316
|
+
...syncConfig,
|
|
317
|
+
confirmOperationsSync
|
|
249
318
|
};
|
|
250
|
-
return syncConfig;
|
|
251
319
|
}
|
|
252
320
|
exports.localStorageCollectionOptions = localStorageCollectionOptions;
|
|
253
321
|
//# sourceMappingURL=local-storage.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n NoStorageAvailableError,\n NoStorageEventApiError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(value: any, operation: string): void {\n try {\n JSON.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage and getStorageSize\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null)\n\n if (!storage) {\n throw new NoStorageAvailableError()\n }\n\n // Default to window for storage events if not provided\n const storageEventApi =\n config.storageEventApi || (typeof window !== `undefined` ? window : null)\n\n if (!storageEventApi) {\n throw new NoStorageEventApiError()\n }\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = JSON.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = JSON.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & { manualTrigger?: () => void } {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & { manualTrigger?: () => void } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params for later use\n syncParams = params\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n }\n\n return syncConfig\n}\n"],"names":["SerializationError","StorageKeyRequiredError","NoStorageAvailableError","NoStorageEventApiError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAoGA,SAAS,yBAAyB,OAAY,WAAyB;AACrE,MAAI;AACF,SAAK,UAAU,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AA2EO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe;AAEzD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,kBACJ,OAAO,oBAAoB,OAAO,WAAW,cAAc,SAAS;AAEtE,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAIC,OAAAA,uBAAA;AAAA,EACZ;AAGA,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,KAAK,UAAU,UAAU;AAC5C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAEhE,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAQA,SAAS,gBACP,YACA,SACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,SACA,eACgD;AAChD,MAAI,aAA0D;AAQ9D,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,OAAO;AAGtD,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,OAAO,IAAI;AACpC,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAA6D;AAAA,IACjE,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AAGb,YAAM,cAAc,gBAAmB,YAAY,OAAO;AAC1D,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,WAAW,MAAM,MAAM;AAChD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA,EAAA;AAGjB,SAAO;AACT;;"}
|
|
1
|
+
{"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n NoStorageAvailableError,\n NoStorageEventApiError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(value: any, operation: string): void {\n try {\n JSON.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null)\n\n if (!storage) {\n throw new NoStorageAvailableError()\n }\n\n // Default to window for storage events if not provided\n const storageEventApi =\n config.storageEventApi || (typeof window !== `undefined` ? window : null)\n\n if (!storageEventApi) {\n throw new NoStorageEventApiError()\n }\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = JSON.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(mutation.original, mutation.type)\n break\n }\n }\n\n // Load current data from storage\n const currentData = loadFromStorage<Record<string, unknown>>(\n config.storageKey,\n storage\n )\n\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n const key = mutation.key\n\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n break\n }\n case `delete`: {\n currentData.delete(key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(currentData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = JSON.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":["SerializationError","StorageKeyRequiredError","NoStorageAvailableError","NoStorageEventApiError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAyHA,SAAS,yBAAyB,OAAY,WAAyB;AACrE,MAAI;AACF,SAAK,UAAU,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AA4GO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe;AAEzD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,kBACJ,OAAO,oBAAoB,OAAO,WAAW,cAAc,SAAS;AAEtE,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAIC,OAAAA,uBAAA;AAAA,EACZ;AAGA,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,KAAK,UAAU,UAAU;AAC5C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,SAAS,UAAU,SAAS,IAAI;AACzD;AAAA,QACF,KAAK;AACH,mCAAyB,SAAS,UAAU,SAAS,IAAI;AACzD;AAAA,MAAA;AAAA,IAEN;AAGA,UAAM,cAAc;AAAA,MAClB,OAAO;AAAA,MACP;AAAA,IAAA;AAIF,eAAW,YAAY,qBAAqB;AAE1C,YAAM,MAAM,SAAS;AAErB,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,sBAAY,IAAI,KAAK,UAAU;AAC/B;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,sBAAY,OAAO,GAAG;AACtB;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,WAAW;AAIzB,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAQA,SAAS,gBACP,YACA,SACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,OAAO;AAGtD,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,OAAO,IAAI;AACpC,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,OAAO;AAC1D,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,WAAW,MAAM,MAAM;AAChD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;;"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput, UtilsRecord } from './types.cjs';
|
|
1
|
+
import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput, PendingMutation, UtilsRecord } from './types.cjs';
|
|
2
2
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
3
|
/**
|
|
4
4
|
* Storage API interface - subset of DOM Storage that we need
|
|
@@ -47,6 +47,26 @@ export type GetStorageSizeFn = () => number;
|
|
|
47
47
|
export interface LocalStorageCollectionUtils extends UtilsRecord {
|
|
48
48
|
clearStorage: ClearStorageFn;
|
|
49
49
|
getStorageSize: GetStorageSizeFn;
|
|
50
|
+
/**
|
|
51
|
+
* Accepts mutations from a transaction that belong to this collection and persists them to localStorage.
|
|
52
|
+
* This should be called in your transaction's mutationFn to persist local-storage data.
|
|
53
|
+
*
|
|
54
|
+
* @param transaction - The transaction containing mutations to accept
|
|
55
|
+
* @example
|
|
56
|
+
* const localSettings = createCollection(localStorageCollectionOptions({...}))
|
|
57
|
+
*
|
|
58
|
+
* const tx = createTransaction({
|
|
59
|
+
* mutationFn: async ({ transaction }) => {
|
|
60
|
+
* // Make API call first
|
|
61
|
+
* await api.save(...)
|
|
62
|
+
* // Then persist local-storage mutations after success
|
|
63
|
+
* localSettings.utils.acceptMutations(transaction)
|
|
64
|
+
* }
|
|
65
|
+
* })
|
|
66
|
+
*/
|
|
67
|
+
acceptMutations: (transaction: {
|
|
68
|
+
mutations: Array<PendingMutation<Record<string, unknown>>>;
|
|
69
|
+
}) => void;
|
|
50
70
|
}
|
|
51
71
|
/**
|
|
52
72
|
* Creates localStorage collection options for use with a standard Collection
|
|
@@ -54,11 +74,17 @@ export interface LocalStorageCollectionUtils extends UtilsRecord {
|
|
|
54
74
|
* This function creates a collection that persists data to localStorage/sessionStorage
|
|
55
75
|
* and synchronizes changes across browser tabs using storage events.
|
|
56
76
|
*
|
|
77
|
+
* **Using with Manual Transactions:**
|
|
78
|
+
*
|
|
79
|
+
* For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`
|
|
80
|
+
* to persist changes made during `tx.mutate()`. This is necessary because local-storage collections
|
|
81
|
+
* don't participate in the standard mutation handler flow for manual transactions.
|
|
82
|
+
*
|
|
57
83
|
* @template TExplicit - The explicit type of items in the collection (highest priority)
|
|
58
84
|
* @template TSchema - The schema type for validation and type inference (second priority)
|
|
59
85
|
* @template TFallback - The fallback type if no explicit or schema type is provided
|
|
60
86
|
* @param config - Configuration options for the localStorage collection
|
|
61
|
-
* @returns Collection options with utilities including clearStorage and
|
|
87
|
+
* @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations
|
|
62
88
|
*
|
|
63
89
|
* @example
|
|
64
90
|
* // Basic localStorage collection
|
|
@@ -90,6 +116,33 @@ export interface LocalStorageCollectionUtils extends UtilsRecord {
|
|
|
90
116
|
* },
|
|
91
117
|
* })
|
|
92
118
|
* )
|
|
119
|
+
*
|
|
120
|
+
* @example
|
|
121
|
+
* // Using with manual transactions
|
|
122
|
+
* const localSettings = createCollection(
|
|
123
|
+
* localStorageCollectionOptions({
|
|
124
|
+
* storageKey: 'user-settings',
|
|
125
|
+
* getKey: (item) => item.id,
|
|
126
|
+
* })
|
|
127
|
+
* )
|
|
128
|
+
*
|
|
129
|
+
* const tx = createTransaction({
|
|
130
|
+
* mutationFn: async ({ transaction }) => {
|
|
131
|
+
* // Use settings data in API call
|
|
132
|
+
* const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)
|
|
133
|
+
* await api.updateUserProfile({ settings: settingsMutations[0]?.modified })
|
|
134
|
+
*
|
|
135
|
+
* // Persist local-storage mutations after API success
|
|
136
|
+
* localSettings.utils.acceptMutations(transaction)
|
|
137
|
+
* }
|
|
138
|
+
* })
|
|
139
|
+
*
|
|
140
|
+
* tx.mutate(() => {
|
|
141
|
+
* localSettings.insert({ id: 'theme', value: 'dark' })
|
|
142
|
+
* apiCollection.insert({ id: 2, data: 'profile data' })
|
|
143
|
+
* })
|
|
144
|
+
*
|
|
145
|
+
* await tx.commit()
|
|
93
146
|
*/
|
|
94
147
|
export declare function localStorageCollectionOptions<T extends StandardSchemaV1, TKey extends string | number = string | number>(config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {
|
|
95
148
|
schema: T;
|
|
@@ -55,7 +55,26 @@ function convertToBasicExpression(whereClause, collectionAlias) {
|
|
|
55
55
|
return new ir.Func(whereClause.name, args);
|
|
56
56
|
}
|
|
57
57
|
}
|
|
58
|
+
function convertOrderByToBasicExpression(orderBy, collectionAlias) {
|
|
59
|
+
const normalizedOrderBy = orderBy.map((clause) => {
|
|
60
|
+
const basicExp = convertToBasicExpression(
|
|
61
|
+
clause.expression,
|
|
62
|
+
collectionAlias
|
|
63
|
+
);
|
|
64
|
+
if (!basicExp) {
|
|
65
|
+
throw new Error(
|
|
66
|
+
`Failed to convert orderBy expression to a basic expression: ${clause.expression}`
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
...clause,
|
|
71
|
+
expression: basicExp
|
|
72
|
+
};
|
|
73
|
+
});
|
|
74
|
+
return normalizedOrderBy;
|
|
75
|
+
}
|
|
58
76
|
exports.SUPPORTED_COLLECTION_FUNCS = SUPPORTED_COLLECTION_FUNCS;
|
|
77
|
+
exports.convertOrderByToBasicExpression = convertOrderByToBasicExpression;
|
|
59
78
|
exports.convertToBasicExpression = convertToBasicExpression;
|
|
60
79
|
exports.isConvertibleToCollectionFilter = isConvertibleToCollectionFilter;
|
|
61
80
|
//# sourceMappingURL=expressions.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"expressions.cjs","sources":["../../../../src/query/compiler/expressions.ts"],"sourcesContent":["import { Func, PropRef, Value } from \"../ir.js\"\nimport type { BasicExpression } from \"../ir.js\"\n\n/**\n * Functions supported by the collection index system.\n * These are the only functions that can be used in WHERE clauses\n * that are pushed down to collection subscriptions for index optimization.\n */\nexport const SUPPORTED_COLLECTION_FUNCS = new Set([\n `eq`,\n `gt`,\n `lt`,\n `gte`,\n `lte`,\n `and`,\n `or`,\n `in`,\n])\n\n/**\n * Determines if a WHERE clause can be converted to collection-compatible BasicExpression format.\n * This checks if the expression only uses functions supported by the collection index system.\n *\n * @param whereClause - The WHERE clause to check\n * @returns True if the clause can be converted for collection index optimization\n */\nexport function isConvertibleToCollectionFilter(\n whereClause: BasicExpression<boolean>\n): boolean {\n const tpe = whereClause.type\n if (tpe === `func`) {\n // Check if this function is supported\n if (!SUPPORTED_COLLECTION_FUNCS.has(whereClause.name)) {\n return false\n }\n // Recursively check all arguments\n return whereClause.args.every((arg) =>\n isConvertibleToCollectionFilter(arg as BasicExpression<boolean>)\n )\n }\n return [`val`, `ref`].includes(tpe)\n}\n\n/**\n * Converts a WHERE clause to BasicExpression format compatible with collection indexes.\n * This function creates proper BasicExpression class instances that the collection\n * index system can understand.\n *\n * @param whereClause - The WHERE clause to convert\n * @param collectionAlias - The alias of the collection being filtered\n * @returns The converted BasicExpression or null if conversion fails\n */\nexport function convertToBasicExpression(\n whereClause: BasicExpression<boolean>,\n collectionAlias: string\n): BasicExpression<boolean> | null {\n const tpe = whereClause.type\n if (tpe === `val`) {\n return new Value(whereClause.value)\n } else if (tpe === `ref`) {\n const path = whereClause.path\n if (Array.isArray(path)) {\n if (path[0] === collectionAlias && path.length > 1) {\n // Remove the table alias from the path for single-collection queries\n return new PropRef(path.slice(1))\n } else if (path.length === 1 && path[0] !== undefined) {\n // Single field reference\n return new PropRef([path[0]])\n }\n }\n // Fallback for non-array paths\n return new PropRef(Array.isArray(path) ? path : [String(path)])\n } else {\n // Check if this function is supported\n if (!SUPPORTED_COLLECTION_FUNCS.has(whereClause.name)) {\n return null\n }\n // Recursively convert all arguments\n const args: Array<BasicExpression> = []\n for (const arg of whereClause.args) {\n const convertedArg = convertToBasicExpression(\n arg as BasicExpression<boolean>,\n collectionAlias\n )\n if (convertedArg == null) {\n return null\n }\n args.push(convertedArg)\n }\n return new Func(whereClause.name, args)\n }\n}\n"],"names":["Value","PropRef","Func"],"mappings":";;;AAQO,MAAM,iDAAiC,IAAI;AAAA,EAChD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AASM,SAAS,gCACd,aACS;AACT,QAAM,MAAM,YAAY;AACxB,MAAI,QAAQ,QAAQ;AAElB,QAAI,CAAC,2BAA2B,IAAI,YAAY,IAAI,GAAG;AACrD,aAAO;AAAA,IACT;AAEA,WAAO,YAAY,KAAK;AAAA,MAAM,CAAC,QAC7B,gCAAgC,GAA+B;AAAA,IAAA;AAAA,EAEnE;AACA,SAAO,CAAC,OAAO,KAAK,EAAE,SAAS,GAAG;AACpC;AAWO,SAAS,yBACd,aACA,iBACiC;AACjC,QAAM,MAAM,YAAY;AACxB,MAAI,QAAQ,OAAO;AACjB,WAAO,IAAIA,GAAAA,MAAM,YAAY,KAAK;AAAA,EACpC,WAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,YAAY;AACzB,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,CAAC,MAAM,mBAAmB,KAAK,SAAS,GAAG;AAElD,eAAO,IAAIC,GAAAA,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,MAClC,WAAW,KAAK,WAAW,KAAK,KAAK,CAAC,MAAM,QAAW;AAErD,eAAO,IAAIA,GAAAA,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAAA,MAC9B;AAAA,IACF;AAEA,WAAO,IAAIA,GAAAA,QAAQ,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC;AAAA,EAChE,OAAO;AAEL,QAAI,CAAC,2BAA2B,IAAI,YAAY,IAAI,GAAG;AACrD,aAAO;AAAA,IACT;AAEA,UAAM,OAA+B,CAAA;AACrC,eAAW,OAAO,YAAY,MAAM;AAClC,YAAM,eAAe;AAAA,QACnB;AAAA,QACA;AAAA,MAAA;AAEF,UAAI,gBAAgB,MAAM;AACxB,eAAO;AAAA,MACT;AACA,WAAK,KAAK,YAAY;AAAA,IACxB;AACA,WAAO,IAAIC,GAAAA,KAAK,YAAY,MAAM,IAAI;AAAA,EACxC;AACF
|
|
1
|
+
{"version":3,"file":"expressions.cjs","sources":["../../../../src/query/compiler/expressions.ts"],"sourcesContent":["import { Func, PropRef, Value } from \"../ir.js\"\nimport type { BasicExpression, OrderBy } from \"../ir.js\"\n\n/**\n * Functions supported by the collection index system.\n * These are the only functions that can be used in WHERE clauses\n * that are pushed down to collection subscriptions for index optimization.\n */\nexport const SUPPORTED_COLLECTION_FUNCS = new Set([\n `eq`,\n `gt`,\n `lt`,\n `gte`,\n `lte`,\n `and`,\n `or`,\n `in`,\n])\n\n/**\n * Determines if a WHERE clause can be converted to collection-compatible BasicExpression format.\n * This checks if the expression only uses functions supported by the collection index system.\n *\n * @param whereClause - The WHERE clause to check\n * @returns True if the clause can be converted for collection index optimization\n */\nexport function isConvertibleToCollectionFilter(\n whereClause: BasicExpression<boolean>\n): boolean {\n const tpe = whereClause.type\n if (tpe === `func`) {\n // Check if this function is supported\n if (!SUPPORTED_COLLECTION_FUNCS.has(whereClause.name)) {\n return false\n }\n // Recursively check all arguments\n return whereClause.args.every((arg) =>\n isConvertibleToCollectionFilter(arg as BasicExpression<boolean>)\n )\n }\n return [`val`, `ref`].includes(tpe)\n}\n\n/**\n * Converts a WHERE clause to BasicExpression format compatible with collection indexes.\n * This function creates proper BasicExpression class instances that the collection\n * index system can understand.\n *\n * @param whereClause - The WHERE clause to convert\n * @param collectionAlias - The alias of the collection being filtered\n * @returns The converted BasicExpression or null if conversion fails\n */\nexport function convertToBasicExpression(\n whereClause: BasicExpression<boolean>,\n collectionAlias: string\n): BasicExpression<boolean> | null {\n const tpe = whereClause.type\n if (tpe === `val`) {\n return new Value(whereClause.value)\n } else if (tpe === `ref`) {\n const path = whereClause.path\n if (Array.isArray(path)) {\n if (path[0] === collectionAlias && path.length > 1) {\n // Remove the table alias from the path for single-collection queries\n return new PropRef(path.slice(1))\n } else if (path.length === 1 && path[0] !== undefined) {\n // Single field reference\n return new PropRef([path[0]])\n }\n }\n // Fallback for non-array paths\n return new PropRef(Array.isArray(path) ? path : [String(path)])\n } else {\n // Check if this function is supported\n if (!SUPPORTED_COLLECTION_FUNCS.has(whereClause.name)) {\n return null\n }\n // Recursively convert all arguments\n const args: Array<BasicExpression> = []\n for (const arg of whereClause.args) {\n const convertedArg = convertToBasicExpression(\n arg as BasicExpression<boolean>,\n collectionAlias\n )\n if (convertedArg == null) {\n return null\n }\n args.push(convertedArg)\n }\n return new Func(whereClause.name, args)\n }\n}\n\nexport function convertOrderByToBasicExpression(\n orderBy: OrderBy,\n collectionAlias: string\n): OrderBy {\n const normalizedOrderBy = orderBy.map((clause) => {\n const basicExp = convertToBasicExpression(\n clause.expression,\n collectionAlias\n )\n\n if (!basicExp) {\n throw new Error(\n `Failed to convert orderBy expression to a basic expression: ${clause.expression}`\n )\n }\n\n return {\n ...clause,\n expression: basicExp,\n }\n })\n\n return normalizedOrderBy\n}\n"],"names":["Value","PropRef","Func"],"mappings":";;;AAQO,MAAM,iDAAiC,IAAI;AAAA,EAChD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AASM,SAAS,gCACd,aACS;AACT,QAAM,MAAM,YAAY;AACxB,MAAI,QAAQ,QAAQ;AAElB,QAAI,CAAC,2BAA2B,IAAI,YAAY,IAAI,GAAG;AACrD,aAAO;AAAA,IACT;AAEA,WAAO,YAAY,KAAK;AAAA,MAAM,CAAC,QAC7B,gCAAgC,GAA+B;AAAA,IAAA;AAAA,EAEnE;AACA,SAAO,CAAC,OAAO,KAAK,EAAE,SAAS,GAAG;AACpC;AAWO,SAAS,yBACd,aACA,iBACiC;AACjC,QAAM,MAAM,YAAY;AACxB,MAAI,QAAQ,OAAO;AACjB,WAAO,IAAIA,GAAAA,MAAM,YAAY,KAAK;AAAA,EACpC,WAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,YAAY;AACzB,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,CAAC,MAAM,mBAAmB,KAAK,SAAS,GAAG;AAElD,eAAO,IAAIC,GAAAA,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,MAClC,WAAW,KAAK,WAAW,KAAK,KAAK,CAAC,MAAM,QAAW;AAErD,eAAO,IAAIA,GAAAA,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAAA,MAC9B;AAAA,IACF;AAEA,WAAO,IAAIA,GAAAA,QAAQ,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC;AAAA,EAChE,OAAO;AAEL,QAAI,CAAC,2BAA2B,IAAI,YAAY,IAAI,GAAG;AACrD,aAAO;AAAA,IACT;AAEA,UAAM,OAA+B,CAAA;AACrC,eAAW,OAAO,YAAY,MAAM;AAClC,YAAM,eAAe;AAAA,QACnB;AAAA,QACA;AAAA,MAAA;AAEF,UAAI,gBAAgB,MAAM;AACxB,eAAO;AAAA,MACT;AACA,WAAK,KAAK,YAAY;AAAA,IACxB;AACA,WAAO,IAAIC,GAAAA,KAAK,YAAY,MAAM,IAAI;AAAA,EACxC;AACF;AAEO,SAAS,gCACd,SACA,iBACS;AACT,QAAM,oBAAoB,QAAQ,IAAI,CAAC,WAAW;AAChD,UAAM,WAAW;AAAA,MACf,OAAO;AAAA,MACP;AAAA,IAAA;AAGF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI;AAAA,QACR,+DAA+D,OAAO,UAAU;AAAA,MAAA;AAAA,IAEpF;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,YAAY;AAAA,IAAA;AAAA,EAEhB,CAAC;AAED,SAAO;AACT;;;;;"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BasicExpression } from '../ir.js';
|
|
1
|
+
import { BasicExpression, OrderBy } from '../ir.js';
|
|
2
2
|
/**
|
|
3
3
|
* Functions supported by the collection index system.
|
|
4
4
|
* These are the only functions that can be used in WHERE clauses
|
|
@@ -23,3 +23,4 @@ export declare function isConvertibleToCollectionFilter(whereClause: BasicExpres
|
|
|
23
23
|
* @returns The converted BasicExpression or null if conversion fails
|
|
24
24
|
*/
|
|
25
25
|
export declare function convertToBasicExpression(whereClause: BasicExpression<boolean>, collectionAlias: string): BasicExpression<boolean> | null;
|
|
26
|
+
export declare function convertOrderByToBasicExpression(orderBy: OrderBy, collectionAlias: string): OrderBy;
|
|
@@ -93,7 +93,8 @@ function processOrderBy(rawQuery, pipeline, orderByClause, selectClause, collect
|
|
|
93
93
|
limit,
|
|
94
94
|
comparator,
|
|
95
95
|
valueExtractorForRawRow,
|
|
96
|
-
index
|
|
96
|
+
index,
|
|
97
|
+
orderBy: orderByClause
|
|
97
98
|
};
|
|
98
99
|
optimizableOrderByCollections[followRefCollection.id] = orderByOptimizationInfo;
|
|
99
100
|
setSizeCallback = (getSize) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"order-by.cjs","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@tanstack/db-ivm\"\nimport { defaultComparator, makeComparator } from \"../../utils/comparison.js\"\nimport { PropRef, followRef } from \"../ir.js\"\nimport { ensureIndexForField } from \"../../indexes/auto-index.js\"\nimport { findIndexForField } from \"../../utils/index-optimization.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { replaceAggregatesByRefs } from \"./group-by.js\"\nimport type { CompiledSingleRowExpression } from \"./evaluators.js\"\nimport type { OrderByClause, QueryIR, Select } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@tanstack/db-ivm\"\nimport type { IndexInterface } from \"../../indexes/base-index.js\"\nimport type { Collection } from \"../../collection/index.js\"\n\nexport type OrderByOptimizationInfo = {\n offset: number\n limit: number\n comparator: (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => number\n valueExtractorForRawRow: (row: Record<string, unknown>) => any\n index: IndexInterface<string | number>\n dataNeeded?: () => number\n}\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n rawQuery: QueryIR,\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n selectClause: Select,\n collection: Collection,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => {\n const clauseWithoutAggregates = replaceAggregatesByRefs(\n clause.expression,\n selectClause,\n `__select_results`\n )\n return {\n compiledExpression: compileExpression(clauseWithoutAggregates),\n compareOptions: clause.compareOptions,\n }\n })\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // The namespaced row contains:\n // 1. Table aliases as top-level properties (e.g., row[\"tableName\"])\n // 2. SELECT results in __select_results (e.g., row.__select_results[\"aggregateAlias\"])\n // The replaceAggregatesByRefs function has already transformed any aggregate expressions\n // that match SELECT aggregates to use the __select_results namespace.\n const orderByContext = row\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const compare = (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const clause = orderByClause[i]!\n const compareFn = makeComparator(clause.compareOptions)\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const compareFn = makeComparator(clause.compareOptions)\n return compareFn(a, b)\n }\n\n return defaultComparator(a, b)\n }\n\n let setSizeCallback: ((getSize: () => number) => void) | undefined\n\n // Optimize the orderBy operator to lazily load elements\n // by using the range index of the collection.\n // Only for orderBy clause on a single column for now (no composite ordering)\n if (limit && orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const orderByExpression = clause.expression\n\n if (orderByExpression.type === `ref`) {\n const followRefResult = followRef(\n rawQuery,\n orderByExpression,\n collection\n )!\n\n const followRefCollection = followRefResult.collection\n const fieldName = followRefResult.path[0]\n if (fieldName) {\n ensureIndexForField(\n fieldName,\n followRefResult.path,\n followRefCollection,\n clause.compareOptions,\n compare\n )\n }\n\n const valueExtractorForRawRow = compileExpression(\n new PropRef(followRefResult.path),\n true\n ) as CompiledSingleRowExpression\n\n const comparator = (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => {\n const extractedA = a ? valueExtractorForRawRow(a) : a\n const extractedB = b ? valueExtractorForRawRow(b) : b\n return compare(extractedA, extractedB)\n }\n\n const index: IndexInterface<string | number> | undefined =\n findIndexForField(\n followRefCollection.indexes,\n followRefResult.path,\n clause.compareOptions\n )\n\n if (index && index.supports(`gt`)) {\n // We found an index that we can use to lazily load ordered data\n const orderByOptimizationInfo = {\n offset: offset ?? 0,\n limit,\n comparator,\n valueExtractorForRawRow,\n index,\n }\n\n optimizableOrderByCollections[followRefCollection.id] =\n orderByOptimizationInfo\n\n setSizeCallback = (getSize: () => number) => {\n optimizableOrderByCollections[followRefCollection.id] = {\n ...optimizableOrderByCollections[followRefCollection.id]!,\n dataNeeded: () => {\n const size = getSize()\n return Math.max(0, limit - size)\n },\n }\n }\n }\n }\n }\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator: compare,\n setSizeCallback,\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":["replaceAggregatesByRefs","compileExpression","makeComparator","defaultComparator","followRef","ensureIndexForField","PropRef","findIndexForField","orderByWithFractionalIndex"],"mappings":";;;;;;;;;
|
|
1
|
+
{"version":3,"file":"order-by.cjs","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@tanstack/db-ivm\"\nimport { defaultComparator, makeComparator } from \"../../utils/comparison.js\"\nimport { PropRef, followRef } from \"../ir.js\"\nimport { ensureIndexForField } from \"../../indexes/auto-index.js\"\nimport { findIndexForField } from \"../../utils/index-optimization.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { replaceAggregatesByRefs } from \"./group-by.js\"\nimport type { CompiledSingleRowExpression } from \"./evaluators.js\"\nimport type { OrderBy, OrderByClause, QueryIR, Select } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@tanstack/db-ivm\"\nimport type { IndexInterface } from \"../../indexes/base-index.js\"\nimport type { Collection } from \"../../collection/index.js\"\n\nexport type OrderByOptimizationInfo = {\n orderBy: OrderBy\n offset: number\n limit: number\n comparator: (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => number\n valueExtractorForRawRow: (row: Record<string, unknown>) => any\n index: IndexInterface<string | number>\n dataNeeded?: () => number\n}\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n rawQuery: QueryIR,\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n selectClause: Select,\n collection: Collection,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => {\n const clauseWithoutAggregates = replaceAggregatesByRefs(\n clause.expression,\n selectClause,\n `__select_results`\n )\n return {\n compiledExpression: compileExpression(clauseWithoutAggregates),\n compareOptions: clause.compareOptions,\n }\n })\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // The namespaced row contains:\n // 1. Table aliases as top-level properties (e.g., row[\"tableName\"])\n // 2. SELECT results in __select_results (e.g., row.__select_results[\"aggregateAlias\"])\n // The replaceAggregatesByRefs function has already transformed any aggregate expressions\n // that match SELECT aggregates to use the __select_results namespace.\n const orderByContext = row\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const compare = (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const clause = orderByClause[i]!\n const compareFn = makeComparator(clause.compareOptions)\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const compareFn = makeComparator(clause.compareOptions)\n return compareFn(a, b)\n }\n\n return defaultComparator(a, b)\n }\n\n let setSizeCallback: ((getSize: () => number) => void) | undefined\n\n // Optimize the orderBy operator to lazily load elements\n // by using the range index of the collection.\n // Only for orderBy clause on a single column for now (no composite ordering)\n if (limit && orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const orderByExpression = clause.expression\n\n if (orderByExpression.type === `ref`) {\n const followRefResult = followRef(\n rawQuery,\n orderByExpression,\n collection\n )!\n\n const followRefCollection = followRefResult.collection\n const fieldName = followRefResult.path[0]\n if (fieldName) {\n ensureIndexForField(\n fieldName,\n followRefResult.path,\n followRefCollection,\n clause.compareOptions,\n compare\n )\n }\n\n const valueExtractorForRawRow = compileExpression(\n new PropRef(followRefResult.path),\n true\n ) as CompiledSingleRowExpression\n\n const comparator = (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => {\n const extractedA = a ? valueExtractorForRawRow(a) : a\n const extractedB = b ? valueExtractorForRawRow(b) : b\n return compare(extractedA, extractedB)\n }\n\n const index: IndexInterface<string | number> | undefined =\n findIndexForField(\n followRefCollection.indexes,\n followRefResult.path,\n clause.compareOptions\n )\n\n if (index && index.supports(`gt`)) {\n // We found an index that we can use to lazily load ordered data\n const orderByOptimizationInfo = {\n offset: offset ?? 0,\n limit,\n comparator,\n valueExtractorForRawRow,\n index,\n orderBy: orderByClause,\n }\n\n optimizableOrderByCollections[followRefCollection.id] =\n orderByOptimizationInfo\n\n setSizeCallback = (getSize: () => number) => {\n optimizableOrderByCollections[followRefCollection.id] = {\n ...optimizableOrderByCollections[followRefCollection.id]!,\n dataNeeded: () => {\n const size = getSize()\n return Math.max(0, limit - size)\n },\n }\n }\n }\n }\n }\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator: compare,\n setSizeCallback,\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":["replaceAggregatesByRefs","compileExpression","makeComparator","defaultComparator","followRef","ensureIndexForField","PropRef","findIndexForField","orderByWithFractionalIndex"],"mappings":";;;;;;;;;AAgCO,SAAS,eACd,UACA,UACA,eACA,cACA,YACA,+BACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,WAAW;AACpD,UAAM,0BAA0BA,QAAAA;AAAAA,MAC9B,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAEF,WAAO;AAAA,MACL,oBAAoBC,WAAAA,kBAAkB,uBAAuB;AAAA,MAC7D,gBAAgB,OAAO;AAAA,IAAA;AAAA,EAE3B,CAAC;AAGD,QAAM,iBAAiB,CAAC,QAAoD;AAM1E,UAAM,iBAAiB;AAEvB,QAAI,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAAA;AAAA,IAE9C,WAAW,cAAc,WAAW,GAAG;AAErC,YAAM,WAAW,gBAAgB,CAAC;AAClC,aAAO,SAAS,mBAAmB,cAAc;AAAA,IACnD;AAGA,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,CAAC,GAAY,MAAe;AAE1C,QAAI,cAAc,SAAS,GAAG;AAC5B,YAAM,SAAS;AACf,YAAM,SAAS;AACf,eAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,cAAM,SAAS,cAAc,CAAC;AAC9B,cAAM,YAAYC,WAAAA,eAAe,OAAO,cAAc;AACtD,cAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,YAAI,WAAW,GAAG;AAChB,iBAAO;AAAA,QACT;AAAA,MACF;AACA,aAAO,OAAO,SAAS,OAAO;AAAA,IAChC;AAGA,QAAI,cAAc,WAAW,GAAG;AAC9B,YAAM,SAAS,cAAc,CAAC;AAC9B,YAAM,YAAYA,WAAAA,eAAe,OAAO,cAAc;AACtD,aAAO,UAAU,GAAG,CAAC;AAAA,IACvB;AAEA,WAAOC,WAAAA,kBAAkB,GAAG,CAAC;AAAA,EAC/B;AAEA,MAAI;AAKJ,MAAI,SAAS,cAAc,WAAW,GAAG;AACvC,UAAM,SAAS,cAAc,CAAC;AAC9B,UAAM,oBAAoB,OAAO;AAEjC,QAAI,kBAAkB,SAAS,OAAO;AACpC,YAAM,kBAAkBC,GAAAA;AAAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAGF,YAAM,sBAAsB,gBAAgB;AAC5C,YAAM,YAAY,gBAAgB,KAAK,CAAC;AACxC,UAAI,WAAW;AACbC,kBAAAA;AAAAA,UACE;AAAA,UACA,gBAAgB;AAAA,UAChB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QAAA;AAAA,MAEJ;AAEA,YAAM,0BAA0BJ,WAAAA;AAAAA,QAC9B,IAAIK,GAAAA,QAAQ,gBAAgB,IAAI;AAAA,QAChC;AAAA,MAAA;AAGF,YAAM,aAAa,CACjB,GACA,MACG;AACH,cAAM,aAAa,IAAI,wBAAwB,CAAC,IAAI;AACpD,cAAM,aAAa,IAAI,wBAAwB,CAAC,IAAI;AACpD,eAAO,QAAQ,YAAY,UAAU;AAAA,MACvC;AAEA,YAAM,QACJC,kBAAAA;AAAAA,QACE,oBAAoB;AAAA,QACpB,gBAAgB;AAAA,QAChB,OAAO;AAAA,MAAA;AAGX,UAAI,SAAS,MAAM,SAAS,IAAI,GAAG;AAEjC,cAAM,0BAA0B;AAAA,UAC9B,QAAQ,UAAU;AAAA,UAClB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QAAA;AAGX,sCAA8B,oBAAoB,EAAE,IAClD;AAEF,0BAAkB,CAAC,YAA0B;AAC3C,wCAA8B,oBAAoB,EAAE,IAAI;AAAA,YACtD,GAAG,8BAA8B,oBAAoB,EAAE;AAAA,YACvD,YAAY,MAAM;AAChB,oBAAM,OAAO,QAAA;AACb,qBAAO,KAAK,IAAI,GAAG,QAAQ,IAAI;AAAA,YACjC;AAAA,UAAA;AAAA,QAEJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,SAAS;AAAA,IACdC,MAAAA,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,IAAA,CACD;AAAA;AAAA,EAAA;AAGL;;"}
|
|
@@ -1,9 +1,10 @@
|
|
|
1
|
-
import { OrderByClause, QueryIR, Select } from '../ir.js';
|
|
1
|
+
import { OrderBy, OrderByClause, QueryIR, Select } from '../ir.js';
|
|
2
2
|
import { NamespacedAndKeyedStream, NamespacedRow } from '../../types.js';
|
|
3
3
|
import { IStreamBuilder, KeyValue } from '@tanstack/db-ivm';
|
|
4
4
|
import { IndexInterface } from '../../indexes/base-index.js';
|
|
5
5
|
import { Collection } from '../../collection/index.js';
|
|
6
6
|
export type OrderByOptimizationInfo = {
|
|
7
|
+
orderBy: OrderBy;
|
|
7
8
|
offset: number;
|
|
8
9
|
limit: number;
|
|
9
10
|
comparator: (a: Record<string, unknown> | null | undefined, b: Record<string, unknown> | null | undefined) => number;
|
|
@@ -10,17 +10,17 @@ class CollectionSubscriber {
|
|
|
10
10
|
this.syncState = syncState;
|
|
11
11
|
this.collectionConfigBuilder = collectionConfigBuilder;
|
|
12
12
|
this.biggest = void 0;
|
|
13
|
-
|
|
14
|
-
subscribe() {
|
|
15
|
-
const collectionAlias = findCollectionAlias(
|
|
13
|
+
this.collectionAlias = findCollectionAlias(
|
|
16
14
|
this.collectionId,
|
|
17
15
|
this.collectionConfigBuilder.query
|
|
18
16
|
);
|
|
19
|
-
|
|
17
|
+
}
|
|
18
|
+
subscribe() {
|
|
19
|
+
const whereClause = this.getWhereClauseFromAlias(this.collectionAlias);
|
|
20
20
|
if (whereClause) {
|
|
21
21
|
const whereExpression = expressions.convertToBasicExpression(
|
|
22
22
|
whereClause,
|
|
23
|
-
collectionAlias
|
|
23
|
+
this.collectionAlias
|
|
24
24
|
);
|
|
25
25
|
if (whereExpression) {
|
|
26
26
|
return this.subscribeToChanges(whereExpression);
|
|
@@ -78,7 +78,7 @@ class CollectionSubscriber {
|
|
|
78
78
|
return subscription;
|
|
79
79
|
}
|
|
80
80
|
subscribeToOrderedChanges(whereExpression) {
|
|
81
|
-
const { offset, limit, comparator, dataNeeded, index } = this.collectionConfigBuilder.optimizableOrderByCollections[this.collectionId];
|
|
81
|
+
const { orderBy, offset, limit, comparator, dataNeeded, index } = this.collectionConfigBuilder.optimizableOrderByCollections[this.collectionId];
|
|
82
82
|
const sendChangesInRange = (changes) => {
|
|
83
83
|
const splittedChanges = splitUpdates(changes);
|
|
84
84
|
let filteredChanges = splittedChanges;
|
|
@@ -95,8 +95,13 @@ class CollectionSubscriber {
|
|
|
95
95
|
whereExpression
|
|
96
96
|
});
|
|
97
97
|
subscription.setOrderByIndex(index);
|
|
98
|
+
const normalizedOrderBy = expressions.convertOrderByToBasicExpression(
|
|
99
|
+
orderBy,
|
|
100
|
+
this.collectionAlias
|
|
101
|
+
);
|
|
98
102
|
subscription.requestLimitedSnapshot({
|
|
99
|
-
limit: offset + limit
|
|
103
|
+
limit: offset + limit,
|
|
104
|
+
orderBy: normalizedOrderBy
|
|
100
105
|
});
|
|
101
106
|
return subscription;
|
|
102
107
|
}
|
|
@@ -131,10 +136,15 @@ class CollectionSubscriber {
|
|
|
131
136
|
// Loads the next `n` items from the collection
|
|
132
137
|
// starting from the biggest item it has sent
|
|
133
138
|
loadNextItems(n, subscription) {
|
|
134
|
-
const { valueExtractorForRawRow } = this.collectionConfigBuilder.optimizableOrderByCollections[this.collectionId];
|
|
139
|
+
const { orderBy, valueExtractorForRawRow } = this.collectionConfigBuilder.optimizableOrderByCollections[this.collectionId];
|
|
135
140
|
const biggestSentRow = this.biggest;
|
|
136
141
|
const biggestSentValue = biggestSentRow ? valueExtractorForRawRow(biggestSentRow) : biggestSentRow;
|
|
142
|
+
const normalizedOrderBy = expressions.convertOrderByToBasicExpression(
|
|
143
|
+
orderBy,
|
|
144
|
+
this.collectionAlias
|
|
145
|
+
);
|
|
137
146
|
subscription.requestLimitedSnapshot({
|
|
147
|
+
orderBy: normalizedOrderBy,
|
|
138
148
|
limit: n,
|
|
139
149
|
minValue: biggestSentValue
|
|
140
150
|
});
|