@tanstack/db 0.4.6 → 0.4.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/cjs/collection/index.cjs.map +1 -1
  2. package/dist/cjs/collection/index.d.cts +2 -1
  3. package/dist/cjs/collection/lifecycle.cjs +2 -3
  4. package/dist/cjs/collection/lifecycle.cjs.map +1 -1
  5. package/dist/cjs/collection/mutations.cjs +4 -4
  6. package/dist/cjs/collection/mutations.cjs.map +1 -1
  7. package/dist/cjs/collection/state.cjs +22 -33
  8. package/dist/cjs/collection/state.cjs.map +1 -1
  9. package/dist/cjs/collection/state.d.cts +6 -2
  10. package/dist/cjs/collection/sync.cjs +4 -3
  11. package/dist/cjs/collection/sync.cjs.map +1 -1
  12. package/dist/cjs/indexes/auto-index.cjs +0 -3
  13. package/dist/cjs/indexes/auto-index.cjs.map +1 -1
  14. package/dist/cjs/local-only.cjs +21 -2
  15. package/dist/cjs/local-only.cjs.map +1 -1
  16. package/dist/cjs/local-only.d.cts +64 -7
  17. package/dist/cjs/local-storage.cjs +71 -3
  18. package/dist/cjs/local-storage.cjs.map +1 -1
  19. package/dist/cjs/local-storage.d.cts +55 -2
  20. package/dist/cjs/query/live/collection-config-builder.cjs +54 -12
  21. package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
  22. package/dist/cjs/query/live/collection-config-builder.d.cts +17 -2
  23. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
  24. package/dist/cjs/types.d.cts +3 -5
  25. package/dist/esm/collection/index.d.ts +2 -1
  26. package/dist/esm/collection/index.js.map +1 -1
  27. package/dist/esm/collection/lifecycle.js +2 -3
  28. package/dist/esm/collection/lifecycle.js.map +1 -1
  29. package/dist/esm/collection/mutations.js +4 -4
  30. package/dist/esm/collection/mutations.js.map +1 -1
  31. package/dist/esm/collection/state.d.ts +6 -2
  32. package/dist/esm/collection/state.js +22 -33
  33. package/dist/esm/collection/state.js.map +1 -1
  34. package/dist/esm/collection/sync.js +4 -3
  35. package/dist/esm/collection/sync.js.map +1 -1
  36. package/dist/esm/indexes/auto-index.js +0 -3
  37. package/dist/esm/indexes/auto-index.js.map +1 -1
  38. package/dist/esm/local-only.d.ts +64 -7
  39. package/dist/esm/local-only.js +21 -2
  40. package/dist/esm/local-only.js.map +1 -1
  41. package/dist/esm/local-storage.d.ts +55 -2
  42. package/dist/esm/local-storage.js +72 -4
  43. package/dist/esm/local-storage.js.map +1 -1
  44. package/dist/esm/query/live/collection-config-builder.d.ts +17 -2
  45. package/dist/esm/query/live/collection-config-builder.js +54 -12
  46. package/dist/esm/query/live/collection-config-builder.js.map +1 -1
  47. package/dist/esm/query/live/collection-subscriber.js.map +1 -1
  48. package/dist/esm/types.d.ts +3 -5
  49. package/package.json +1 -1
  50. package/src/collection/index.ts +1 -1
  51. package/src/collection/lifecycle.ts +3 -4
  52. package/src/collection/mutations.ts +8 -4
  53. package/src/collection/state.ts +52 -48
  54. package/src/collection/sync.ts +7 -6
  55. package/src/indexes/auto-index.ts +0 -8
  56. package/src/local-only.ts +119 -30
  57. package/src/local-storage.ts +170 -5
  58. package/src/query/live/collection-config-builder.ts +103 -24
  59. package/src/query/live/collection-subscriber.ts +3 -3
  60. package/src/types.ts +3 -5
@@ -129,6 +129,52 @@ function localStorageCollectionOptions(config) {
129
129
  ...restConfig
130
130
  } = config;
131
131
  const collectionId = id ?? `local-collection:${config.storageKey}`;
132
+ const acceptMutations = (transaction) => {
133
+ const collectionMutations = transaction.mutations.filter((m) => {
134
+ if (sync.collection && m.collection === sync.collection) {
135
+ return true;
136
+ }
137
+ return m.collection.id === collectionId;
138
+ });
139
+ if (collectionMutations.length === 0) {
140
+ return;
141
+ }
142
+ for (const mutation of collectionMutations) {
143
+ switch (mutation.type) {
144
+ case `insert`:
145
+ case `update`:
146
+ validateJsonSerializable(mutation.modified, mutation.type);
147
+ break;
148
+ case `delete`:
149
+ validateJsonSerializable(mutation.original, mutation.type);
150
+ break;
151
+ }
152
+ }
153
+ const currentData = loadFromStorage(
154
+ config.storageKey,
155
+ storage
156
+ );
157
+ for (const mutation of collectionMutations) {
158
+ const key = mutation.key;
159
+ switch (mutation.type) {
160
+ case `insert`:
161
+ case `update`: {
162
+ const storedItem = {
163
+ versionKey: generateUuid(),
164
+ data: mutation.modified
165
+ };
166
+ currentData.set(key, storedItem);
167
+ break;
168
+ }
169
+ case `delete`: {
170
+ currentData.delete(key);
171
+ break;
172
+ }
173
+ }
174
+ }
175
+ saveToStorage(currentData);
176
+ sync.confirmOperationsSync(collectionMutations);
177
+ };
132
178
  return {
133
179
  ...restConfig,
134
180
  id: collectionId,
@@ -138,7 +184,8 @@ function localStorageCollectionOptions(config) {
138
184
  onDelete: wrappedOnDelete,
139
185
  utils: {
140
186
  clearStorage,
141
- getStorageSize
187
+ getStorageSize,
188
+ acceptMutations
142
189
  }
143
190
  };
144
191
  }
@@ -173,6 +220,7 @@ function loadFromStorage(storageKey, storage) {
173
220
  }
174
221
  function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, lastKnownData) {
175
222
  let syncParams = null;
223
+ let collection = null;
176
224
  const findChanges = (oldData, newData) => {
177
225
  const changes = [];
178
226
  oldData.forEach((oldStoredItem, key) => {
@@ -214,6 +262,7 @@ function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, l
214
262
  sync: (params) => {
215
263
  const { begin, write, commit, markReady } = params;
216
264
  syncParams = params;
265
+ collection = params.collection;
217
266
  const initialData = loadFromStorage(storageKey, storage);
218
267
  if (initialData.size > 0) {
219
268
  begin();
@@ -245,9 +294,28 @@ function createLocalStorageSync(storageKey, storage, storageEventApi, _getKey, l
245
294
  storageType: storage === (typeof window !== `undefined` ? window.localStorage : null) ? `localStorage` : `custom`
246
295
  }),
247
296
  // Manual trigger function for local updates
248
- manualTrigger: processStorageChanges
297
+ manualTrigger: processStorageChanges,
298
+ // Collection instance reference
299
+ collection
300
+ };
301
+ const confirmOperationsSync = (mutations) => {
302
+ if (!syncParams) {
303
+ return;
304
+ }
305
+ const { begin, write, commit } = syncParams;
306
+ begin();
307
+ mutations.forEach((mutation) => {
308
+ write({
309
+ type: mutation.type,
310
+ value: mutation.type === `delete` ? mutation.original : mutation.modified
311
+ });
312
+ });
313
+ commit();
314
+ };
315
+ return {
316
+ ...syncConfig,
317
+ confirmOperationsSync
249
318
  };
250
- return syncConfig;
251
319
  }
252
320
  exports.localStorageCollectionOptions = localStorageCollectionOptions;
253
321
  //# sourceMappingURL=local-storage.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n NoStorageAvailableError,\n NoStorageEventApiError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(value: any, operation: string): void {\n try {\n JSON.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage and getStorageSize\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null)\n\n if (!storage) {\n throw new NoStorageAvailableError()\n }\n\n // Default to window for storage events if not provided\n const storageEventApi =\n config.storageEventApi || (typeof window !== `undefined` ? window : null)\n\n if (!storageEventApi) {\n throw new NoStorageEventApiError()\n }\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = JSON.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = JSON.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & { manualTrigger?: () => void } {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & { manualTrigger?: () => void } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params for later use\n syncParams = params\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n }\n\n return syncConfig\n}\n"],"names":["SerializationError","StorageKeyRequiredError","NoStorageAvailableError","NoStorageEventApiError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAoGA,SAAS,yBAAyB,OAAY,WAAyB;AACrE,MAAI;AACF,SAAK,UAAU,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AA2EO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe;AAEzD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,kBACJ,OAAO,oBAAoB,OAAO,WAAW,cAAc,SAAS;AAEtE,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAIC,OAAAA,uBAAA;AAAA,EACZ;AAGA,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,KAAK,UAAU,UAAU;AAC5C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAEhE,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAQA,SAAS,gBACP,YACA,SACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,SACA,eACgD;AAChD,MAAI,aAA0D;AAQ9D,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,OAAO;AAGtD,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,OAAO,IAAI;AACpC,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAA6D;AAAA,IACjE,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AAGb,YAAM,cAAc,gBAAmB,YAAY,OAAO;AAC1D,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,WAAW,MAAM,MAAM;AAChD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA,EAAA;AAGjB,SAAO;AACT;;"}
1
+ {"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n NoStorageAvailableError,\n NoStorageEventApiError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(value: any, operation: string): void {\n try {\n JSON.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null)\n\n if (!storage) {\n throw new NoStorageAvailableError()\n }\n\n // Default to window for storage events if not provided\n const storageEventApi =\n config.storageEventApi || (typeof window !== `undefined` ? window : null)\n\n if (!storageEventApi) {\n throw new NoStorageEventApiError()\n }\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = JSON.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(mutation.original, mutation.type)\n break\n }\n }\n\n // Load current data from storage\n const currentData = loadFromStorage<Record<string, unknown>>(\n config.storageKey,\n storage\n )\n\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n const key = mutation.key\n\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n break\n }\n case `delete`: {\n currentData.delete(key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(currentData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = JSON.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":["SerializationError","StorageKeyRequiredError","NoStorageAvailableError","NoStorageEventApiError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAyHA,SAAS,yBAAyB,OAAY,WAAyB;AACrE,MAAI;AACF,SAAK,UAAU,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AA4GO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe;AAEzD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAGA,QAAM,kBACJ,OAAO,oBAAoB,OAAO,WAAW,cAAc,SAAS;AAEtE,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAIC,OAAAA,uBAAA;AAAA,EACZ;AAGA,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,KAAK,UAAU,UAAU;AAC5C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,SAAS,UAAU,QAAQ;AAAA,IACtD,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,OAAO;AAGnE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,SAAS,UAAU,SAAS,IAAI;AACzD;AAAA,QACF,KAAK;AACH,mCAAyB,SAAS,UAAU,SAAS,IAAI;AACzD;AAAA,MAAA;AAAA,IAEN;AAGA,UAAM,cAAc;AAAA,MAClB,OAAO;AAAA,MACP;AAAA,IAAA;AAIF,eAAW,YAAY,qBAAqB;AAE1C,YAAM,MAAM,SAAS;AAErB,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,sBAAY,IAAI,KAAK,UAAU;AAC/B;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,sBAAY,OAAO,GAAG;AACtB;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,WAAW;AAIzB,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAQA,SAAS,gBACP,YACA,SACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,OAAO;AAGtD,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,OAAO,IAAI;AACpC,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,OAAO;AAC1D,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,WAAW,MAAM,MAAM;AAChD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;;"}
@@ -1,4 +1,4 @@
1
- import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput, UtilsRecord } from './types.cjs';
1
+ import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput, PendingMutation, UtilsRecord } from './types.cjs';
2
2
  import { StandardSchemaV1 } from '@standard-schema/spec';
3
3
  /**
4
4
  * Storage API interface - subset of DOM Storage that we need
@@ -47,6 +47,26 @@ export type GetStorageSizeFn = () => number;
47
47
  export interface LocalStorageCollectionUtils extends UtilsRecord {
48
48
  clearStorage: ClearStorageFn;
49
49
  getStorageSize: GetStorageSizeFn;
50
+ /**
51
+ * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.
52
+ * This should be called in your transaction's mutationFn to persist local-storage data.
53
+ *
54
+ * @param transaction - The transaction containing mutations to accept
55
+ * @example
56
+ * const localSettings = createCollection(localStorageCollectionOptions({...}))
57
+ *
58
+ * const tx = createTransaction({
59
+ * mutationFn: async ({ transaction }) => {
60
+ * // Make API call first
61
+ * await api.save(...)
62
+ * // Then persist local-storage mutations after success
63
+ * localSettings.utils.acceptMutations(transaction)
64
+ * }
65
+ * })
66
+ */
67
+ acceptMutations: (transaction: {
68
+ mutations: Array<PendingMutation<Record<string, unknown>>>;
69
+ }) => void;
50
70
  }
51
71
  /**
52
72
  * Creates localStorage collection options for use with a standard Collection
@@ -54,11 +74,17 @@ export interface LocalStorageCollectionUtils extends UtilsRecord {
54
74
  * This function creates a collection that persists data to localStorage/sessionStorage
55
75
  * and synchronizes changes across browser tabs using storage events.
56
76
  *
77
+ * **Using with Manual Transactions:**
78
+ *
79
+ * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`
80
+ * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections
81
+ * don't participate in the standard mutation handler flow for manual transactions.
82
+ *
57
83
  * @template TExplicit - The explicit type of items in the collection (highest priority)
58
84
  * @template TSchema - The schema type for validation and type inference (second priority)
59
85
  * @template TFallback - The fallback type if no explicit or schema type is provided
60
86
  * @param config - Configuration options for the localStorage collection
61
- * @returns Collection options with utilities including clearStorage and getStorageSize
87
+ * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations
62
88
  *
63
89
  * @example
64
90
  * // Basic localStorage collection
@@ -90,6 +116,33 @@ export interface LocalStorageCollectionUtils extends UtilsRecord {
90
116
  * },
91
117
  * })
92
118
  * )
119
+ *
120
+ * @example
121
+ * // Using with manual transactions
122
+ * const localSettings = createCollection(
123
+ * localStorageCollectionOptions({
124
+ * storageKey: 'user-settings',
125
+ * getKey: (item) => item.id,
126
+ * })
127
+ * )
128
+ *
129
+ * const tx = createTransaction({
130
+ * mutationFn: async ({ transaction }) => {
131
+ * // Use settings data in API call
132
+ * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)
133
+ * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })
134
+ *
135
+ * // Persist local-storage mutations after API success
136
+ * localSettings.utils.acceptMutations(transaction)
137
+ * }
138
+ * })
139
+ *
140
+ * tx.mutate(() => {
141
+ * localSettings.insert({ id: 'theme', value: 'dark' })
142
+ * apiCollection.insert({ id: 2, data: 'profile data' })
143
+ * })
144
+ *
145
+ * await tx.commit()
93
146
  */
94
147
  export declare function localStorageCollectionOptions<T extends StandardSchemaV1, TKey extends string | number = string | number>(config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {
95
148
  schema: T;
@@ -11,6 +11,7 @@ class CollectionConfigBuilder {
11
11
  this.resultKeys = /* @__PURE__ */ new WeakMap();
12
12
  this.orderByIndices = /* @__PURE__ */ new WeakMap();
13
13
  this.isGraphRunning = false;
14
+ this.isInErrorState = false;
14
15
  this.subscriptions = {};
15
16
  this.lazyCollectionsCallbacks = {};
16
17
  this.lazyCollections = /* @__PURE__ */ new Set();
@@ -43,18 +44,21 @@ class CollectionConfigBuilder {
43
44
  // This gives the callback a chance to load more data if needed,
44
45
  // that's used to optimize orderBy operators that set a limit,
45
46
  // in order to load some more data if we still don't have enough rows after the pipeline has run.
46
- // That can happend because even though we load N rows, the pipeline might filter some of these rows out
47
+ // That can happen because even though we load N rows, the pipeline might filter some of these rows out
47
48
  // causing the orderBy operator to receive less than N rows or even no rows at all.
48
49
  // So this callback would notice that it doesn't have enough rows and load some more.
49
- // The callback returns a boolean, when it's true it's done loading data and we can mark the collection as ready.
50
+ // The callback returns a boolean, when it's true it's done loading data.
50
51
  maybeRunGraph(config, syncState, callback) {
51
52
  if (this.isGraphRunning) {
52
53
  return;
53
54
  }
54
55
  this.isGraphRunning = true;
55
56
  try {
56
- const { begin, commit, markReady } = config;
57
- if (this.allCollectionsReadyOrInitialCommit() && syncState.subscribedToAllCollections) {
57
+ const { begin, commit } = config;
58
+ if (this.isInErrorState) {
59
+ return;
60
+ }
61
+ if (syncState.subscribedToAllCollections) {
58
62
  while (syncState.graph.pendingWork()) {
59
63
  syncState.graph.run();
60
64
  callback?.();
@@ -62,9 +66,7 @@ class CollectionConfigBuilder {
62
66
  if (syncState.messagesCount === 0) {
63
67
  begin();
64
68
  commit();
65
- }
66
- if (this.allCollectionsReady()) {
67
- markReady();
69
+ this.updateLiveQueryStatus(config);
68
70
  }
69
71
  }
70
72
  } finally {
@@ -78,6 +80,7 @@ class CollectionConfigBuilder {
78
80
  };
79
81
  }
80
82
  syncFn(config) {
83
+ this.liveQueryCollection = config.collection;
81
84
  const syncState = {
82
85
  messagesCount: 0,
83
86
  subscribedToAllCollections: false,
@@ -190,16 +193,50 @@ class CollectionConfigBuilder {
190
193
  );
191
194
  }
192
195
  }
196
+ /**
197
+ * Handle status changes from source collections
198
+ */
199
+ handleSourceStatusChange(config, collectionId, event) {
200
+ const { status } = event;
201
+ if (status === `error`) {
202
+ this.transitionToError(
203
+ `Source collection '${collectionId}' entered error state`
204
+ );
205
+ return;
206
+ }
207
+ if (status === `cleaned-up`) {
208
+ this.transitionToError(
209
+ `Source collection '${collectionId}' was manually cleaned up while live query '${this.id}' depends on it. Live queries prevent automatic GC, so this was likely a manual cleanup() call.`
210
+ );
211
+ return;
212
+ }
213
+ this.updateLiveQueryStatus(config);
214
+ }
215
+ /**
216
+ * Update the live query status based on source collection statuses
217
+ */
218
+ updateLiveQueryStatus(config) {
219
+ const { markReady } = config;
220
+ if (this.isInErrorState) {
221
+ return;
222
+ }
223
+ if (this.allCollectionsReady()) {
224
+ markReady();
225
+ }
226
+ }
227
+ /**
228
+ * Transition the live query to error state
229
+ */
230
+ transitionToError(message) {
231
+ this.isInErrorState = true;
232
+ console.error(`[Live Query Error] ${message}`);
233
+ this.liveQueryCollection?._lifecycle.setStatus(`error`);
234
+ }
193
235
  allCollectionsReady() {
194
236
  return Object.values(this.collections).every(
195
237
  (collection) => collection.isReady()
196
238
  );
197
239
  }
198
- allCollectionsReadyOrInitialCommit() {
199
- return Object.values(this.collections).every(
200
- (collection) => collection.status === `ready` || collection.status === `initialCommit`
201
- );
202
- }
203
240
  subscribeToAllCollections(config, syncState) {
204
241
  const loaders = Object.entries(this.collections).map(
205
242
  ([collectionId, collection]) => {
@@ -212,6 +249,10 @@ class CollectionConfigBuilder {
212
249
  );
213
250
  const subscription = collectionSubscriber$1.subscribe();
214
251
  this.subscriptions[collectionId] = subscription;
252
+ const statusUnsubscribe = collection.on(`status:change`, (event) => {
253
+ this.handleSourceStatusChange(config, collectionId, event);
254
+ });
255
+ syncState.unsubscribeCallbacks.add(statusUnsubscribe);
215
256
  const loadMore = collectionSubscriber$1.loadMoreIfNeeded.bind(
216
257
  collectionSubscriber$1,
217
258
  subscription
@@ -224,6 +265,7 @@ class CollectionConfigBuilder {
224
265
  return true;
225
266
  };
226
267
  syncState.subscribedToAllCollections = true;
268
+ this.updateLiveQueryStatus(config);
227
269
  return loadMoreDataCallback;
228
270
  }
229
271
  }
@@ -1 +1 @@
1
- {"version":3,"file":"collection-config-builder.cjs","sources":["../../../../src/query/live/collection-config-builder.ts"],"sourcesContent":["import { D2, output } from \"@tanstack/db-ivm\"\nimport { compileQuery } from \"../compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"../builder/index.js\"\nimport { CollectionSubscriber } from \"./collection-subscriber.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\nimport type { RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type {\n CollectionConfigSingleRowOption,\n KeyedStream,\n ResultStream,\n SyncConfig,\n} from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression, QueryIR } from \"../ir.js\"\nimport type { LazyCollectionCallbacks } from \"../compiler/joins.js\"\nimport type {\n Changes,\n FullSyncState,\n LiveQueryCollectionConfig,\n SyncState,\n} from \"./types.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\nexport class CollectionConfigBuilder<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n private readonly id: string\n readonly query: QueryIR\n private readonly collections: Record<string, Collection<any, any, any>>\n\n // WeakMap to store the keys of the results\n // so that we can retrieve them in the getKey function\n private readonly resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n private readonly orderByIndices = new WeakMap<object, string>()\n\n private readonly compare?: (val1: TResult, val2: TResult) => number\n\n private isGraphRunning = false\n\n private graphCache: D2 | undefined\n private inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n private pipelineCache: ResultStream | undefined\n public collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n // Map of collection ID to subscription\n readonly subscriptions: Record<string, CollectionSubscription> = {}\n // Map of collection IDs to functions that load keys for that lazy collection\n lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}\n // Set of collection IDs that are lazy collections\n readonly lazyCollections = new Set<string>()\n // Set of collection IDs that include an optimizable ORDER BY clause\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> = {}\n\n constructor(\n private readonly config: LiveQueryCollectionConfig<TContext, TResult>\n ) {\n // Generate a unique ID if not provided\n this.id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n this.query = buildQueryFromConfig(config)\n this.collections = extractCollectionsFromQuery(this.query)\n\n // Create compare function for ordering if the query has orderBy\n if (this.query.orderBy && this.query.orderBy.length > 0) {\n this.compare = createOrderByComparator<TResult>(this.orderByIndices)\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n this.compileBasePipeline()\n }\n\n getConfig(): CollectionConfigSingleRowOption<TResult> {\n return {\n id: this.id,\n getKey:\n this.config.getKey ||\n ((item) => this.resultKeys.get(item) as string | number),\n sync: this.getSyncConfig(),\n compare: this.compare,\n gcTime: this.config.gcTime || 5000, // 5 seconds by default for live queries\n schema: this.config.schema,\n onInsert: this.config.onInsert,\n onUpdate: this.config.onUpdate,\n onDelete: this.config.onDelete,\n startSync: this.config.startSync,\n singleResult: this.query.singleResult,\n }\n }\n\n // The callback function is called after the graph has run.\n // This gives the callback a chance to load more data if needed,\n // that's used to optimize orderBy operators that set a limit,\n // in order to load some more data if we still don't have enough rows after the pipeline has run.\n // That can happend because even though we load N rows, the pipeline might filter some of these rows out\n // causing the orderBy operator to receive less than N rows or even no rows at all.\n // So this callback would notice that it doesn't have enough rows and load some more.\n // The callback returns a boolean, when it's true it's done loading data and we can mark the collection as ready.\n maybeRunGraph(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n syncState: FullSyncState,\n callback?: () => boolean\n ) {\n if (this.isGraphRunning) {\n // no nested runs of the graph\n // which is possible if the `callback`\n // would call `maybeRunGraph` e.g. after it has loaded some more data\n return\n }\n\n this.isGraphRunning = true\n\n try {\n const { begin, commit, markReady } = config\n\n // We only run the graph if all the collections are ready\n if (\n this.allCollectionsReadyOrInitialCommit() &&\n syncState.subscribedToAllCollections\n ) {\n while (syncState.graph.pendingWork()) {\n syncState.graph.run()\n callback?.()\n }\n\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (syncState.messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n if (this.allCollectionsReady()) {\n markReady()\n }\n }\n } finally {\n this.isGraphRunning = false\n }\n }\n\n private getSyncConfig(): SyncConfig<TResult> {\n return {\n rowUpdateMode: `full`,\n sync: this.syncFn.bind(this),\n }\n }\n\n private syncFn(config: Parameters<SyncConfig<TResult>[`sync`]>[0]) {\n const syncState: SyncState = {\n messagesCount: 0,\n subscribedToAllCollections: false,\n unsubscribeCallbacks: new Set<() => void>(),\n }\n\n // Extend the pipeline such that it applies the incoming changes to the collection\n const fullSyncState = this.extendPipelineWithChangeProcessing(\n config,\n syncState\n )\n\n const loadMoreDataCallbacks = this.subscribeToAllCollections(\n config,\n fullSyncState\n )\n\n // Initial run with callback to load more data if needed\n this.maybeRunGraph(config, fullSyncState, loadMoreDataCallbacks)\n\n // Return the unsubscribe function\n return () => {\n syncState.unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n\n // Reset caches so a fresh graph/pipeline is compiled on next start\n // This avoids reusing a finalized D2 graph across GC restarts\n this.graphCache = undefined\n this.inputsCache = undefined\n this.pipelineCache = undefined\n this.collectionWhereClausesCache = undefined\n\n // Reset lazy collection state\n this.lazyCollections.clear()\n this.optimizableOrderByCollections = {}\n this.lazyCollectionsCallbacks = {}\n }\n }\n\n private compileBasePipeline() {\n this.graphCache = new D2()\n this.inputsCache = Object.fromEntries(\n Object.entries(this.collections).map(([key]) => [\n key,\n this.graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n const {\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(\n this.query,\n this.inputsCache as Record<string, KeyedStream>,\n this.collections,\n this.subscriptions,\n this.lazyCollectionsCallbacks,\n this.lazyCollections,\n this.optimizableOrderByCollections\n )\n\n this.pipelineCache = pipelineCache\n this.collectionWhereClausesCache = collectionWhereClausesCache\n }\n\n private maybeCompileBasePipeline() {\n if (!this.graphCache || !this.inputsCache || !this.pipelineCache) {\n this.compileBasePipeline()\n }\n return {\n graph: this.graphCache!,\n inputs: this.inputsCache!,\n pipeline: this.pipelineCache!,\n }\n }\n\n private extendPipelineWithChangeProcessing(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n syncState: SyncState\n ): FullSyncState {\n const { begin, commit } = config\n const { graph, inputs, pipeline } = this.maybeCompileBasePipeline()\n\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n syncState.messagesCount += messages.length\n\n begin()\n messages\n .reduce(\n accumulateChanges<TResult>,\n new Map<unknown, Changes<TResult>>()\n )\n .forEach(this.applyChanges.bind(this, config))\n commit()\n })\n )\n\n graph.finalize()\n\n // Extend the sync state with the graph, inputs, and pipeline\n syncState.graph = graph\n syncState.inputs = inputs\n syncState.pipeline = pipeline\n\n return syncState as FullSyncState\n }\n\n private applyChanges(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n changes: {\n deletes: number\n inserts: number\n value: TResult\n orderByIndex: string | undefined\n },\n key: unknown\n ) {\n const { write, collection } = config\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n this.resultKeys.set(value, key)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n this.orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes && collection.has(collection.getKeyFromItem(value)))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `Could not apply changes: ${JSON.stringify(changes)}. This should never happen.`\n )\n }\n }\n\n private allCollectionsReady() {\n return Object.values(this.collections).every((collection) =>\n collection.isReady()\n )\n }\n\n private allCollectionsReadyOrInitialCommit() {\n return Object.values(this.collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n private subscribeToAllCollections(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n syncState: FullSyncState\n ) {\n const loaders = Object.entries(this.collections).map(\n ([collectionId, collection]) => {\n const collectionSubscriber = new CollectionSubscriber(\n collectionId,\n collection,\n config,\n syncState,\n this\n )\n\n const subscription = collectionSubscriber.subscribe()\n this.subscriptions[collectionId] = subscription\n\n const loadMore = collectionSubscriber.loadMoreIfNeeded.bind(\n collectionSubscriber,\n subscription\n )\n\n return loadMore\n }\n )\n\n const loadMoreDataCallback = () => {\n loaders.map((loader) => loader())\n return true\n }\n\n // Mark the collections as subscribed in the sync state\n syncState.subscribedToAllCollections = true\n\n return loadMoreDataCallback\n }\n}\n\nfunction buildQueryFromConfig<TContext extends Context>(\n config: LiveQueryCollectionConfig<any, any>\n) {\n // Build the query using the provided query builder function or instance\n if (typeof config.query === `function`) {\n return buildQuery<TContext>(config.query)\n }\n return getQueryIR(config.query)\n}\n\nfunction createOrderByComparator<T extends object>(\n orderByIndices: WeakMap<object, string>\n) {\n return (val1: T, val2: T): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\nfunction accumulateChanges<T>(\n acc: Map<unknown, Changes<T>>,\n [[key, tupleData], multiplicity]: [\n [unknown, [any, string | undefined]],\n number,\n ]\n) {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [T, string | undefined]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n}\n"],"names":["D2","compileQuery","output","collectionSubscriber","CollectionSubscriber","buildQuery","getQueryIR"],"mappings":";;;;;;AAyBA,IAAI,6BAA6B;AAE1B,MAAM,wBAGX;AAAA,EAgCA,YACmB,QACjB;AADiB,SAAA,SAAA;AA1BnB,SAAiB,iCAAiB,QAAA;AAGlC,SAAiB,qCAAqB,QAAA;AAItC,SAAQ,iBAAiB;AAUzB,SAAS,gBAAwD,CAAA;AAEjE,SAAA,2BAAoE,CAAA;AAEpE,SAAS,sCAAsB,IAAA;AAE/B,SAAA,gCAAyE,CAAA;AAMvE,SAAK,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAEjE,SAAK,QAAQ,qBAAqB,MAAM;AACxC,SAAK,cAAc,4BAA4B,KAAK,KAAK;AAGzD,QAAI,KAAK,MAAM,WAAW,KAAK,MAAM,QAAQ,SAAS,GAAG;AACvD,WAAK,UAAU,wBAAiC,KAAK,cAAc;AAAA,IACrE;AAIA,SAAK,oBAAA;AAAA,EACP;AAAA,EAEA,YAAsD;AACpD,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,QACE,KAAK,OAAO,WACX,CAAC,SAAS,KAAK,WAAW,IAAI,IAAI;AAAA,MACrC,MAAM,KAAK,cAAA;AAAA,MACX,SAAS,KAAK;AAAA,MACd,QAAQ,KAAK,OAAO,UAAU;AAAA;AAAA,MAC9B,QAAQ,KAAK,OAAO;AAAA,MACpB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,WAAW,KAAK,OAAO;AAAA,MACvB,cAAc,KAAK,MAAM;AAAA,IAAA;AAAA,EAE7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,cACE,QACA,WACA,UACA;AACA,QAAI,KAAK,gBAAgB;AAIvB;AAAA,IACF;AAEA,SAAK,iBAAiB;AAEtB,QAAI;AACF,YAAM,EAAE,OAAO,QAAQ,UAAA,IAAc;AAGrC,UACE,KAAK,wCACL,UAAU,4BACV;AACA,eAAO,UAAU,MAAM,eAAe;AACpC,oBAAU,MAAM,IAAA;AAChB,qBAAA;AAAA,QACF;AAIA,YAAI,UAAU,kBAAkB,GAAG;AACjC,gBAAA;AACA,iBAAA;AAAA,QACF;AAEA,YAAI,KAAK,uBAAuB;AAC9B,oBAAA;AAAA,QACF;AAAA,MACF;AAAA,IACF,UAAA;AACE,WAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,gBAAqC;AAC3C,WAAO;AAAA,MACL,eAAe;AAAA,MACf,MAAM,KAAK,OAAO,KAAK,IAAI;AAAA,IAAA;AAAA,EAE/B;AAAA,EAEQ,OAAO,QAAoD;AACjE,UAAM,YAAuB;AAAA,MAC3B,eAAe;AAAA,MACf,4BAA4B;AAAA,MAC5B,0CAA0B,IAAA;AAAA,IAAgB;AAI5C,UAAM,gBAAgB,KAAK;AAAA,MACzB;AAAA,MACA;AAAA,IAAA;AAGF,UAAM,wBAAwB,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,IAAA;AAIF,SAAK,cAAc,QAAQ,eAAe,qBAAqB;AAG/D,WAAO,MAAM;AACX,gBAAU,qBAAqB,QAAQ,CAAC,gBAAgB,aAAa;AAIrE,WAAK,aAAa;AAClB,WAAK,cAAc;AACnB,WAAK,gBAAgB;AACrB,WAAK,8BAA8B;AAGnC,WAAK,gBAAgB,MAAA;AACrB,WAAK,gCAAgC,CAAA;AACrC,WAAK,2BAA2B,CAAA;AAAA,IAClC;AAAA,EACF;AAAA,EAEQ,sBAAsB;AAC5B,SAAK,aAAa,IAAIA,SAAA;AACtB,SAAK,cAAc,OAAO;AAAA,MACxB,OAAO,QAAQ,KAAK,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QAC9C;AAAA,QACA,KAAK,WAAY,SAAA;AAAA,MAAc,CAChC;AAAA,IAAA;AAIH,UAAM;AAAA,MACJ,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtBC,MAAAA;AAAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IAAA;AAGP,SAAK,gBAAgB;AACrB,SAAK,8BAA8B;AAAA,EACrC;AAAA,EAEQ,2BAA2B;AACjC,QAAI,CAAC,KAAK,cAAc,CAAC,KAAK,eAAe,CAAC,KAAK,eAAe;AAChE,WAAK,oBAAA;AAAA,IACP;AACA,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,IAAA;AAAA,EAEnB;AAAA,EAEQ,mCACN,QACA,WACe;AACf,UAAM,EAAE,OAAO,OAAA,IAAW;AAC1B,UAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,KAAK,yBAAA;AAEzC,aAAS;AAAA,MACPC,MAAAA,OAAO,CAAC,SAAS;AACf,cAAM,WAAW,KAAK,SAAA;AACtB,kBAAU,iBAAiB,SAAS;AAEpC,cAAA;AACA,iBACG;AAAA,UACC;AAAA,8BACI,IAAA;AAAA,QAA+B,EAEpC,QAAQ,KAAK,aAAa,KAAK,MAAM,MAAM,CAAC;AAC/C,eAAA;AAAA,MACF,CAAC;AAAA,IAAA;AAGH,UAAM,SAAA;AAGN,cAAU,QAAQ;AAClB,cAAU,SAAS;AACnB,cAAU,WAAW;AAErB,WAAO;AAAA,EACT;AAAA,EAEQ,aACN,QACA,SAMA,KACA;AACA,UAAM,EAAE,OAAO,WAAA,IAAe;AAC9B,UAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,SAAK,WAAW,IAAI,OAAO,GAAG;AAG9B,QAAI,iBAAiB,QAAW;AAC9B,WAAK,eAAe,IAAI,OAAO,YAAY;AAAA,IAC7C;AAGA,QAAI,WAAW,YAAY,GAAG;AAC5B,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH;AAAA;AAAA,MAEE,UAAU;AAAA;AAAA,MAGT,YAAY,WAAW,WAAW,IAAI,WAAW,eAAe,KAAK,CAAC;AAAA,MACvE;AACA,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IAEH,WAAW,UAAU,GAAG;AACtB,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH,OAAO;AACL,YAAM,IAAI;AAAA,QACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,MAAA;AAAA,IAEvD;AAAA,EACF;AAAA,EAEQ,sBAAsB;AAC5B,WAAO,OAAO,OAAO,KAAK,WAAW,EAAE;AAAA,MAAM,CAAC,eAC5C,WAAW,QAAA;AAAA,IAAQ;AAAA,EAEvB;AAAA,EAEQ,qCAAqC;AAC3C,WAAO,OAAO,OAAO,KAAK,WAAW,EAAE;AAAA,MACrC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAAA,EAEQ,0BACN,QACA,WACA;AACA,UAAM,UAAU,OAAO,QAAQ,KAAK,WAAW,EAAE;AAAA,MAC/C,CAAC,CAAC,cAAc,UAAU,MAAM;AAC9B,cAAMC,yBAAuB,IAAIC,qBAAAA;AAAAA,UAC/B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA;AAGF,cAAM,eAAeD,uBAAqB,UAAA;AAC1C,aAAK,cAAc,YAAY,IAAI;AAEnC,cAAM,WAAWA,uBAAqB,iBAAiB;AAAA,UACrDA;AAAAA,UACA;AAAA,QAAA;AAGF,eAAO;AAAA,MACT;AAAA,IAAA;AAGF,UAAM,uBAAuB,MAAM;AACjC,cAAQ,IAAI,CAAC,WAAW,OAAA,CAAQ;AAChC,aAAO;AAAA,IACT;AAGA,cAAU,6BAA6B;AAEvC,WAAO;AAAA,EACT;AACF;AAEA,SAAS,qBACP,QACA;AAEA,MAAI,OAAO,OAAO,UAAU,YAAY;AACtC,WAAOE,QAAAA,WAAqB,OAAO,KAAK;AAAA,EAC1C;AACA,SAAOC,QAAAA,WAAW,OAAO,KAAK;AAChC;AAEA,SAAS,wBACP,gBACA;AACA,SAAO,CAAC,MAAS,SAAoB;AAEnC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AACF;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAEA,SAAS,kBACP,KACA,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,GAI/B;AAGA,QAAM,CAAC,OAAO,YAAY,IAAI;AAE9B,QAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,IAC9B,SAAS;AAAA,IACT,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EAAA;AAEF,MAAI,eAAe,GAAG;AACpB,YAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,EAC1C,WAAW,eAAe,GAAG;AAC3B,YAAQ,WAAW;AACnB,YAAQ,QAAQ;AAChB,YAAQ,eAAe;AAAA,EACzB;AACA,MAAI,IAAI,KAAK,OAAO;AACpB,SAAO;AACT;;"}
1
+ {"version":3,"file":"collection-config-builder.cjs","sources":["../../../../src/query/live/collection-config-builder.ts"],"sourcesContent":["import { D2, output } from \"@tanstack/db-ivm\"\nimport { compileQuery } from \"../compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"../builder/index.js\"\nimport { CollectionSubscriber } from \"./collection-subscriber.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\nimport type { RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type {\n CollectionConfigSingleRowOption,\n KeyedStream,\n ResultStream,\n SyncConfig,\n} from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression, QueryIR } from \"../ir.js\"\nimport type { LazyCollectionCallbacks } from \"../compiler/joins.js\"\nimport type {\n Changes,\n FullSyncState,\n LiveQueryCollectionConfig,\n SyncState,\n} from \"./types.js\"\nimport type { AllCollectionEvents } from \"../../collection/events.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\ntype SyncMethods<TResult extends object> = Parameters<\n SyncConfig<TResult>[`sync`]\n>[0]\n\nexport class CollectionConfigBuilder<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n private readonly id: string\n readonly query: QueryIR\n private readonly collections: Record<string, Collection<any, any, any>>\n\n // WeakMap to store the keys of the results\n // so that we can retrieve them in the getKey function\n private readonly resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n private readonly orderByIndices = new WeakMap<object, string>()\n\n private readonly compare?: (val1: TResult, val2: TResult) => number\n\n private isGraphRunning = false\n\n // Error state tracking\n private isInErrorState = false\n\n // Reference to the live query collection for error state transitions\n private liveQueryCollection?: Collection<TResult, any, any>\n\n private graphCache: D2 | undefined\n private inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n private pipelineCache: ResultStream | undefined\n public collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n // Map of collection ID to subscription\n readonly subscriptions: Record<string, CollectionSubscription> = {}\n // Map of collection IDs to functions that load keys for that lazy collection\n lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}\n // Set of collection IDs that are lazy collections\n readonly lazyCollections = new Set<string>()\n // Set of collection IDs that include an optimizable ORDER BY clause\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> = {}\n\n constructor(\n private readonly config: LiveQueryCollectionConfig<TContext, TResult>\n ) {\n // Generate a unique ID if not provided\n this.id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n this.query = buildQueryFromConfig(config)\n this.collections = extractCollectionsFromQuery(this.query)\n\n // Create compare function for ordering if the query has orderBy\n if (this.query.orderBy && this.query.orderBy.length > 0) {\n this.compare = createOrderByComparator<TResult>(this.orderByIndices)\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n this.compileBasePipeline()\n }\n\n getConfig(): CollectionConfigSingleRowOption<TResult> {\n return {\n id: this.id,\n getKey:\n this.config.getKey ||\n ((item) => this.resultKeys.get(item) as string | number),\n sync: this.getSyncConfig(),\n compare: this.compare,\n gcTime: this.config.gcTime || 5000, // 5 seconds by default for live queries\n schema: this.config.schema,\n onInsert: this.config.onInsert,\n onUpdate: this.config.onUpdate,\n onDelete: this.config.onDelete,\n startSync: this.config.startSync,\n singleResult: this.query.singleResult,\n }\n }\n\n // The callback function is called after the graph has run.\n // This gives the callback a chance to load more data if needed,\n // that's used to optimize orderBy operators that set a limit,\n // in order to load some more data if we still don't have enough rows after the pipeline has run.\n // That can happen because even though we load N rows, the pipeline might filter some of these rows out\n // causing the orderBy operator to receive less than N rows or even no rows at all.\n // So this callback would notice that it doesn't have enough rows and load some more.\n // The callback returns a boolean, when it's true it's done loading data.\n maybeRunGraph(\n config: SyncMethods<TResult>,\n syncState: FullSyncState,\n callback?: () => boolean\n ) {\n if (this.isGraphRunning) {\n // no nested runs of the graph\n // which is possible if the `callback`\n // would call `maybeRunGraph` e.g. after it has loaded some more data\n return\n }\n\n this.isGraphRunning = true\n\n try {\n const { begin, commit } = config\n\n // Don't run if the live query is in an error state\n if (this.isInErrorState) {\n return\n }\n\n // Always run the graph if subscribed (eager execution)\n if (syncState.subscribedToAllCollections) {\n while (syncState.graph.pendingWork()) {\n syncState.graph.run()\n callback?.()\n }\n\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (syncState.messagesCount === 0) {\n begin()\n commit()\n // After initial commit, check if we should mark ready\n // (in case all sources were already ready before we subscribed)\n this.updateLiveQueryStatus(config)\n }\n }\n } finally {\n this.isGraphRunning = false\n }\n }\n\n private getSyncConfig(): SyncConfig<TResult> {\n return {\n rowUpdateMode: `full`,\n sync: this.syncFn.bind(this),\n }\n }\n\n private syncFn(config: SyncMethods<TResult>) {\n // Store reference to the live query collection for error state transitions\n this.liveQueryCollection = config.collection\n\n const syncState: SyncState = {\n messagesCount: 0,\n subscribedToAllCollections: false,\n unsubscribeCallbacks: new Set<() => void>(),\n }\n\n // Extend the pipeline such that it applies the incoming changes to the collection\n const fullSyncState = this.extendPipelineWithChangeProcessing(\n config,\n syncState\n )\n\n const loadMoreDataCallbacks = this.subscribeToAllCollections(\n config,\n fullSyncState\n )\n\n // Initial run with callback to load more data if needed\n this.maybeRunGraph(config, fullSyncState, loadMoreDataCallbacks)\n\n // Return the unsubscribe function\n return () => {\n syncState.unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n\n // Reset caches so a fresh graph/pipeline is compiled on next start\n // This avoids reusing a finalized D2 graph across GC restarts\n this.graphCache = undefined\n this.inputsCache = undefined\n this.pipelineCache = undefined\n this.collectionWhereClausesCache = undefined\n\n // Reset lazy collection state\n this.lazyCollections.clear()\n this.optimizableOrderByCollections = {}\n this.lazyCollectionsCallbacks = {}\n }\n }\n\n private compileBasePipeline() {\n this.graphCache = new D2()\n this.inputsCache = Object.fromEntries(\n Object.entries(this.collections).map(([key]) => [\n key,\n this.graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n const {\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(\n this.query,\n this.inputsCache as Record<string, KeyedStream>,\n this.collections,\n this.subscriptions,\n this.lazyCollectionsCallbacks,\n this.lazyCollections,\n this.optimizableOrderByCollections\n )\n\n this.pipelineCache = pipelineCache\n this.collectionWhereClausesCache = collectionWhereClausesCache\n }\n\n private maybeCompileBasePipeline() {\n if (!this.graphCache || !this.inputsCache || !this.pipelineCache) {\n this.compileBasePipeline()\n }\n return {\n graph: this.graphCache!,\n inputs: this.inputsCache!,\n pipeline: this.pipelineCache!,\n }\n }\n\n private extendPipelineWithChangeProcessing(\n config: SyncMethods<TResult>,\n syncState: SyncState\n ): FullSyncState {\n const { begin, commit } = config\n const { graph, inputs, pipeline } = this.maybeCompileBasePipeline()\n\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n syncState.messagesCount += messages.length\n\n begin()\n messages\n .reduce(\n accumulateChanges<TResult>,\n new Map<unknown, Changes<TResult>>()\n )\n .forEach(this.applyChanges.bind(this, config))\n commit()\n })\n )\n\n graph.finalize()\n\n // Extend the sync state with the graph, inputs, and pipeline\n syncState.graph = graph\n syncState.inputs = inputs\n syncState.pipeline = pipeline\n\n return syncState as FullSyncState\n }\n\n private applyChanges(\n config: SyncMethods<TResult>,\n changes: {\n deletes: number\n inserts: number\n value: TResult\n orderByIndex: string | undefined\n },\n key: unknown\n ) {\n const { write, collection } = config\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n this.resultKeys.set(value, key)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n this.orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes && collection.has(collection.getKeyFromItem(value)))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `Could not apply changes: ${JSON.stringify(changes)}. This should never happen.`\n )\n }\n }\n\n /**\n * Handle status changes from source collections\n */\n private handleSourceStatusChange(\n config: SyncMethods<TResult>,\n collectionId: string,\n event: AllCollectionEvents[`status:change`]\n ) {\n const { status } = event\n\n // Handle error state - any source collection in error puts live query in error\n if (status === `error`) {\n this.transitionToError(\n `Source collection '${collectionId}' entered error state`\n )\n return\n }\n\n // Handle manual cleanup - this should not happen due to GC prevention,\n // but could happen if user manually calls cleanup()\n if (status === `cleaned-up`) {\n this.transitionToError(\n `Source collection '${collectionId}' was manually cleaned up while live query '${this.id}' depends on it. ` +\n `Live queries prevent automatic GC, so this was likely a manual cleanup() call.`\n )\n return\n }\n\n // Update ready status based on all source collections\n this.updateLiveQueryStatus(config)\n }\n\n /**\n * Update the live query status based on source collection statuses\n */\n private updateLiveQueryStatus(config: SyncMethods<TResult>) {\n const { markReady } = config\n\n // Don't update status if already in error\n if (this.isInErrorState) {\n return\n }\n\n // Mark ready when all source collections are ready\n if (this.allCollectionsReady()) {\n markReady()\n }\n }\n\n /**\n * Transition the live query to error state\n */\n private transitionToError(message: string) {\n this.isInErrorState = true\n\n // Log error to console for debugging\n console.error(`[Live Query Error] ${message}`)\n\n // Transition live query collection to error state\n this.liveQueryCollection?._lifecycle.setStatus(`error`)\n }\n\n private allCollectionsReady() {\n return Object.values(this.collections).every((collection) =>\n collection.isReady()\n )\n }\n\n private subscribeToAllCollections(\n config: SyncMethods<TResult>,\n syncState: FullSyncState\n ) {\n const loaders = Object.entries(this.collections).map(\n ([collectionId, collection]) => {\n const collectionSubscriber = new CollectionSubscriber(\n collectionId,\n collection,\n config,\n syncState,\n this\n )\n\n const subscription = collectionSubscriber.subscribe()\n this.subscriptions[collectionId] = subscription\n\n // Subscribe to status changes for status flow\n const statusUnsubscribe = collection.on(`status:change`, (event) => {\n this.handleSourceStatusChange(config, collectionId, event)\n })\n syncState.unsubscribeCallbacks.add(statusUnsubscribe)\n\n const loadMore = collectionSubscriber.loadMoreIfNeeded.bind(\n collectionSubscriber,\n subscription\n )\n\n return loadMore\n }\n )\n\n const loadMoreDataCallback = () => {\n loaders.map((loader) => loader())\n return true\n }\n\n // Mark the collections as subscribed in the sync state\n syncState.subscribedToAllCollections = true\n\n // Initial status check after all subscriptions are set up\n this.updateLiveQueryStatus(config)\n\n return loadMoreDataCallback\n }\n}\n\nfunction buildQueryFromConfig<TContext extends Context>(\n config: LiveQueryCollectionConfig<any, any>\n) {\n // Build the query using the provided query builder function or instance\n if (typeof config.query === `function`) {\n return buildQuery<TContext>(config.query)\n }\n return getQueryIR(config.query)\n}\n\nfunction createOrderByComparator<T extends object>(\n orderByIndices: WeakMap<object, string>\n) {\n return (val1: T, val2: T): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\nfunction accumulateChanges<T>(\n acc: Map<unknown, Changes<T>>,\n [[key, tupleData], multiplicity]: [\n [unknown, [any, string | undefined]],\n number,\n ]\n) {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [T, string | undefined]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n}\n"],"names":["D2","compileQuery","output","collectionSubscriber","CollectionSubscriber","buildQuery","getQueryIR"],"mappings":";;;;;;AA0BA,IAAI,6BAA6B;AAM1B,MAAM,wBAGX;AAAA,EAsCA,YACmB,QACjB;AADiB,SAAA,SAAA;AAhCnB,SAAiB,iCAAiB,QAAA;AAGlC,SAAiB,qCAAqB,QAAA;AAItC,SAAQ,iBAAiB;AAGzB,SAAQ,iBAAiB;AAazB,SAAS,gBAAwD,CAAA;AAEjE,SAAA,2BAAoE,CAAA;AAEpE,SAAS,sCAAsB,IAAA;AAE/B,SAAA,gCAAyE,CAAA;AAMvE,SAAK,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAEjE,SAAK,QAAQ,qBAAqB,MAAM;AACxC,SAAK,cAAc,4BAA4B,KAAK,KAAK;AAGzD,QAAI,KAAK,MAAM,WAAW,KAAK,MAAM,QAAQ,SAAS,GAAG;AACvD,WAAK,UAAU,wBAAiC,KAAK,cAAc;AAAA,IACrE;AAIA,SAAK,oBAAA;AAAA,EACP;AAAA,EAEA,YAAsD;AACpD,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,QACE,KAAK,OAAO,WACX,CAAC,SAAS,KAAK,WAAW,IAAI,IAAI;AAAA,MACrC,MAAM,KAAK,cAAA;AAAA,MACX,SAAS,KAAK;AAAA,MACd,QAAQ,KAAK,OAAO,UAAU;AAAA;AAAA,MAC9B,QAAQ,KAAK,OAAO;AAAA,MACpB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,WAAW,KAAK,OAAO;AAAA,MACvB,cAAc,KAAK,MAAM;AAAA,IAAA;AAAA,EAE7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,cACE,QACA,WACA,UACA;AACA,QAAI,KAAK,gBAAgB;AAIvB;AAAA,IACF;AAEA,SAAK,iBAAiB;AAEtB,QAAI;AACF,YAAM,EAAE,OAAO,OAAA,IAAW;AAG1B,UAAI,KAAK,gBAAgB;AACvB;AAAA,MACF;AAGA,UAAI,UAAU,4BAA4B;AACxC,eAAO,UAAU,MAAM,eAAe;AACpC,oBAAU,MAAM,IAAA;AAChB,qBAAA;AAAA,QACF;AAIA,YAAI,UAAU,kBAAkB,GAAG;AACjC,gBAAA;AACA,iBAAA;AAGA,eAAK,sBAAsB,MAAM;AAAA,QACnC;AAAA,MACF;AAAA,IACF,UAAA;AACE,WAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,gBAAqC;AAC3C,WAAO;AAAA,MACL,eAAe;AAAA,MACf,MAAM,KAAK,OAAO,KAAK,IAAI;AAAA,IAAA;AAAA,EAE/B;AAAA,EAEQ,OAAO,QAA8B;AAE3C,SAAK,sBAAsB,OAAO;AAElC,UAAM,YAAuB;AAAA,MAC3B,eAAe;AAAA,MACf,4BAA4B;AAAA,MAC5B,0CAA0B,IAAA;AAAA,IAAgB;AAI5C,UAAM,gBAAgB,KAAK;AAAA,MACzB;AAAA,MACA;AAAA,IAAA;AAGF,UAAM,wBAAwB,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,IAAA;AAIF,SAAK,cAAc,QAAQ,eAAe,qBAAqB;AAG/D,WAAO,MAAM;AACX,gBAAU,qBAAqB,QAAQ,CAAC,gBAAgB,aAAa;AAIrE,WAAK,aAAa;AAClB,WAAK,cAAc;AACnB,WAAK,gBAAgB;AACrB,WAAK,8BAA8B;AAGnC,WAAK,gBAAgB,MAAA;AACrB,WAAK,gCAAgC,CAAA;AACrC,WAAK,2BAA2B,CAAA;AAAA,IAClC;AAAA,EACF;AAAA,EAEQ,sBAAsB;AAC5B,SAAK,aAAa,IAAIA,SAAA;AACtB,SAAK,cAAc,OAAO;AAAA,MACxB,OAAO,QAAQ,KAAK,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QAC9C;AAAA,QACA,KAAK,WAAY,SAAA;AAAA,MAAc,CAChC;AAAA,IAAA;AAIH,UAAM;AAAA,MACJ,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtBC,MAAAA;AAAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IAAA;AAGP,SAAK,gBAAgB;AACrB,SAAK,8BAA8B;AAAA,EACrC;AAAA,EAEQ,2BAA2B;AACjC,QAAI,CAAC,KAAK,cAAc,CAAC,KAAK,eAAe,CAAC,KAAK,eAAe;AAChE,WAAK,oBAAA;AAAA,IACP;AACA,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,IAAA;AAAA,EAEnB;AAAA,EAEQ,mCACN,QACA,WACe;AACf,UAAM,EAAE,OAAO,OAAA,IAAW;AAC1B,UAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,KAAK,yBAAA;AAEzC,aAAS;AAAA,MACPC,MAAAA,OAAO,CAAC,SAAS;AACf,cAAM,WAAW,KAAK,SAAA;AACtB,kBAAU,iBAAiB,SAAS;AAEpC,cAAA;AACA,iBACG;AAAA,UACC;AAAA,8BACI,IAAA;AAAA,QAA+B,EAEpC,QAAQ,KAAK,aAAa,KAAK,MAAM,MAAM,CAAC;AAC/C,eAAA;AAAA,MACF,CAAC;AAAA,IAAA;AAGH,UAAM,SAAA;AAGN,cAAU,QAAQ;AAClB,cAAU,SAAS;AACnB,cAAU,WAAW;AAErB,WAAO;AAAA,EACT;AAAA,EAEQ,aACN,QACA,SAMA,KACA;AACA,UAAM,EAAE,OAAO,WAAA,IAAe;AAC9B,UAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,SAAK,WAAW,IAAI,OAAO,GAAG;AAG9B,QAAI,iBAAiB,QAAW;AAC9B,WAAK,eAAe,IAAI,OAAO,YAAY;AAAA,IAC7C;AAGA,QAAI,WAAW,YAAY,GAAG;AAC5B,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH;AAAA;AAAA,MAEE,UAAU;AAAA;AAAA,MAGT,YAAY,WAAW,WAAW,IAAI,WAAW,eAAe,KAAK,CAAC;AAAA,MACvE;AACA,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IAEH,WAAW,UAAU,GAAG;AACtB,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH,OAAO;AACL,YAAM,IAAI;AAAA,QACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,MAAA;AAAA,IAEvD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBACN,QACA,cACA,OACA;AACA,UAAM,EAAE,WAAW;AAGnB,QAAI,WAAW,SAAS;AACtB,WAAK;AAAA,QACH,sBAAsB,YAAY;AAAA,MAAA;AAEpC;AAAA,IACF;AAIA,QAAI,WAAW,cAAc;AAC3B,WAAK;AAAA,QACH,sBAAsB,YAAY,+CAA+C,KAAK,EAAE;AAAA,MAAA;AAG1F;AAAA,IACF;AAGA,SAAK,sBAAsB,MAAM;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,QAA8B;AAC1D,UAAM,EAAE,cAAc;AAGtB,QAAI,KAAK,gBAAgB;AACvB;AAAA,IACF;AAGA,QAAI,KAAK,uBAAuB;AAC9B,gBAAA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,SAAiB;AACzC,SAAK,iBAAiB;AAGtB,YAAQ,MAAM,sBAAsB,OAAO,EAAE;AAG7C,SAAK,qBAAqB,WAAW,UAAU,OAAO;AAAA,EACxD;AAAA,EAEQ,sBAAsB;AAC5B,WAAO,OAAO,OAAO,KAAK,WAAW,EAAE;AAAA,MAAM,CAAC,eAC5C,WAAW,QAAA;AAAA,IAAQ;AAAA,EAEvB;AAAA,EAEQ,0BACN,QACA,WACA;AACA,UAAM,UAAU,OAAO,QAAQ,KAAK,WAAW,EAAE;AAAA,MAC/C,CAAC,CAAC,cAAc,UAAU,MAAM;AAC9B,cAAMC,yBAAuB,IAAIC,qBAAAA;AAAAA,UAC/B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA;AAGF,cAAM,eAAeD,uBAAqB,UAAA;AAC1C,aAAK,cAAc,YAAY,IAAI;AAGnC,cAAM,oBAAoB,WAAW,GAAG,iBAAiB,CAAC,UAAU;AAClE,eAAK,yBAAyB,QAAQ,cAAc,KAAK;AAAA,QAC3D,CAAC;AACD,kBAAU,qBAAqB,IAAI,iBAAiB;AAEpD,cAAM,WAAWA,uBAAqB,iBAAiB;AAAA,UACrDA;AAAAA,UACA;AAAA,QAAA;AAGF,eAAO;AAAA,MACT;AAAA,IAAA;AAGF,UAAM,uBAAuB,MAAM;AACjC,cAAQ,IAAI,CAAC,WAAW,OAAA,CAAQ;AAChC,aAAO;AAAA,IACT;AAGA,cAAU,6BAA6B;AAGvC,SAAK,sBAAsB,MAAM;AAEjC,WAAO;AAAA,EACT;AACF;AAEA,SAAS,qBACP,QACA;AAEA,MAAI,OAAO,OAAO,UAAU,YAAY;AACtC,WAAOE,QAAAA,WAAqB,OAAO,KAAK;AAAA,EAC1C;AACA,SAAOC,QAAAA,WAAW,OAAO,KAAK;AAChC;AAEA,SAAS,wBACP,gBACA;AACA,SAAO,CAAC,MAAS,SAAoB;AAEnC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AACF;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAEA,SAAS,kBACP,KACA,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,GAI/B;AAGA,QAAM,CAAC,OAAO,YAAY,IAAI;AAE9B,QAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,IAC9B,SAAS;AAAA,IACT,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EAAA;AAEF,MAAI,eAAe,GAAG;AACpB,YAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,EAC1C,WAAW,eAAe,GAAG;AAC3B,YAAQ,WAAW;AACnB,YAAQ,QAAQ;AAChB,YAAQ,eAAe;AAAA,EACzB;AACA,MAAI,IAAI,KAAK,OAAO;AACpB,SAAO;AACT;;"}
@@ -5,6 +5,7 @@ import { Context, GetResult } from '../builder/types.js';
5
5
  import { BasicExpression, QueryIR } from '../ir.js';
6
6
  import { LazyCollectionCallbacks } from '../compiler/joins.js';
7
7
  import { FullSyncState, LiveQueryCollectionConfig } from './types.js';
8
+ type SyncMethods<TResult extends object> = Parameters<SyncConfig<TResult>[`sync`]>[0];
8
9
  export declare class CollectionConfigBuilder<TContext extends Context, TResult extends object = GetResult<TContext>> {
9
10
  private readonly config;
10
11
  private readonly id;
@@ -14,6 +15,8 @@ export declare class CollectionConfigBuilder<TContext extends Context, TResult e
14
15
  private readonly orderByIndices;
15
16
  private readonly compare?;
16
17
  private isGraphRunning;
18
+ private isInErrorState;
19
+ private liveQueryCollection?;
17
20
  private graphCache;
18
21
  private inputsCache;
19
22
  private pipelineCache;
@@ -24,14 +27,26 @@ export declare class CollectionConfigBuilder<TContext extends Context, TResult e
24
27
  optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>;
25
28
  constructor(config: LiveQueryCollectionConfig<TContext, TResult>);
26
29
  getConfig(): CollectionConfigSingleRowOption<TResult>;
27
- maybeRunGraph(config: Parameters<SyncConfig<TResult>[`sync`]>[0], syncState: FullSyncState, callback?: () => boolean): void;
30
+ maybeRunGraph(config: SyncMethods<TResult>, syncState: FullSyncState, callback?: () => boolean): void;
28
31
  private getSyncConfig;
29
32
  private syncFn;
30
33
  private compileBasePipeline;
31
34
  private maybeCompileBasePipeline;
32
35
  private extendPipelineWithChangeProcessing;
33
36
  private applyChanges;
37
+ /**
38
+ * Handle status changes from source collections
39
+ */
40
+ private handleSourceStatusChange;
41
+ /**
42
+ * Update the live query status based on source collection statuses
43
+ */
44
+ private updateLiveQueryStatus;
45
+ /**
46
+ * Transition the live query to error state
47
+ */
48
+ private transitionToError;
34
49
  private allCollectionsReady;
35
- private allCollectionsReadyOrInitialCommit;
36
50
  private subscribeToAllCollections;
37
51
  }
52
+ export {};