@tanstack/db 0.4.17 → 0.4.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/changes.cjs +1 -1
- package/dist/cjs/collection/changes.cjs.map +1 -1
- package/dist/cjs/collection/subscription.cjs +55 -5
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +5 -2
- package/dist/cjs/errors.cjs +8 -0
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +8 -0
- package/dist/cjs/index.cjs +1 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/local-storage.cjs +15 -28
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.cjs +40 -0
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.cjs +3 -6
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.cjs +20 -34
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/esm/collection/changes.js +1 -1
- package/dist/esm/collection/changes.js.map +1 -1
- package/dist/esm/collection/subscription.d.ts +5 -2
- package/dist/esm/collection/subscription.js +56 -6
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/errors.d.ts +8 -0
- package/dist/esm/errors.js +8 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.js +2 -1
- package/dist/esm/local-storage.js +15 -28
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/query/compiler/index.js +41 -1
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/order-by.js +3 -6
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.js +20 -34
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/package.json +1 -1
- package/src/collection/changes.ts +1 -1
- package/src/collection/subscription.ts +82 -6
- package/src/errors.ts +16 -0
- package/src/local-storage.ts +28 -45
- package/src/query/compiler/index.ts +74 -0
- package/src/query/compiler/order-by.ts +3 -5
- package/src/query/live/collection-subscriber.ts +26 -72
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\nexport interface Parser {\n parse: (data: string) => unknown\n stringify: (data: unknown) => string\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n\n /**\n * Parser to use for serializing and deserializing data to and from storage\n * Defaults to JSON\n */\n parser?: Parser\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param parser - The parser to use for serialization\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(\n parser: Parser,\n value: any,\n operation: string\n): void {\n try {\n parser.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates an in-memory storage implementation that mimics the StorageApi interface\n * Used as a fallback when localStorage is not available (e.g., server-side rendering)\n * @returns An object implementing the StorageApi interface using an in-memory Map\n */\nfunction createInMemoryStorage(): StorageApi {\n const storage = new Map<string, string>()\n\n return {\n getItem(key: string): string | null {\n return storage.get(key) ?? null\n },\n setItem(key: string, value: string): void {\n storage.set(key, value)\n },\n removeItem(key: string): void {\n storage.delete(key)\n },\n }\n}\n\n/**\n * Creates a no-op storage event API for environments without window (e.g., server-side)\n * This provides the required interface but doesn't actually listen to any events\n * since cross-tab synchronization is not possible in server environments\n * @returns An object implementing the StorageEventApi interface with no-op methods\n */\nfunction createNoOpStorageEventApi(): StorageEventApi {\n return {\n addEventListener: () => {\n // No-op: cannot listen to storage events without window\n },\n removeEventListener: () => {\n // No-op: cannot remove listeners without window\n },\n }\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Fallback Behavior:**\n *\n * When localStorage is not available (e.g., in server-side rendering environments),\n * this function automatically falls back to an in-memory storage implementation.\n * This prevents errors during module initialization and allows the collection to\n * work in any environment, though data will not persist across page reloads or\n * be shared across tabs when using the in-memory fallback.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n // Fall back to in-memory storage if localStorage is not available (e.g., server-side rendering)\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null) ||\n createInMemoryStorage()\n\n // Default to window for storage events if not provided\n // Fall back to no-op storage event API if window is not available (e.g., server-side rendering)\n const storageEventApi =\n config.storageEventApi ||\n (typeof window !== `undefined` ? window : null) ||\n createNoOpStorageEventApi()\n\n // Default to JSON parser if no parser is provided\n const parser = config.parser || JSON\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n parser,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Manual trigger function for local sync updates\n * Forces a check for storage changes and updates the collection if needed\n */\n const triggerLocalSync = () => {\n if (sync.manualTrigger) {\n sync.manualTrigger()\n }\n }\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = parser.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage, parser)\n\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage, parser)\n\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n const key = config.getKey(mutation.modified)\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Load current data from storage\n const currentData = loadFromStorage<any>(config.storageKey, storage, parser)\n\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // For delete operations, mutation.original contains the full object\n const key = config.getKey(mutation.original)\n currentData.delete(key)\n })\n\n // Save to storage\n saveToStorage(currentData)\n\n // Manually trigger local sync since storage events don't fire for current tab\n triggerLocalSync()\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(parser, mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(parser, mutation.original, mutation.type)\n break\n }\n }\n\n // Load current data from storage\n const currentData = loadFromStorage<Record<string, unknown>>(\n config.storageKey,\n storage,\n parser\n )\n\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n const key = mutation.key\n\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n currentData.set(key, storedItem)\n break\n }\n case `delete`: {\n currentData.delete(key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(currentData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param parser - The parser to use for deserializing the data\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi,\n parser: Parser\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = parser.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n parser: Parser,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage, parser)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(parser, value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage, parser)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(parser, storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":["SerializationError","StorageKeyRequiredError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAmIA,SAAS,yBACP,QACA,OACA,WACM;AACN,MAAI;AACF,WAAO,UAAU,KAAK;AAAA,EACxB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AAOA,SAAS,wBAAoC;AAC3C,QAAM,8BAAc,IAAA;AAEpB,SAAO;AAAA,IACL,QAAQ,KAA4B;AAClC,aAAO,QAAQ,IAAI,GAAG,KAAK;AAAA,IAC7B;AAAA,IACA,QAAQ,KAAa,OAAqB;AACxC,cAAQ,IAAI,KAAK,KAAK;AAAA,IACxB;AAAA,IACA,WAAW,KAAmB;AAC5B,cAAQ,OAAO,GAAG;AAAA,IACpB;AAAA,EAAA;AAEJ;AAQA,SAAS,4BAA6C;AACpD,SAAO;AAAA,IACL,kBAAkB,MAAM;AAAA,IAExB;AAAA,IACA,qBAAqB,MAAM;AAAA,IAE3B;AAAA,EAAA;AAEJ;AAoHO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAIA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe,SACvD,sBAAA;AAIF,QAAM,kBACJ,OAAO,oBACN,OAAO,WAAW,cAAc,SAAS,SAC1C,0BAAA;AAGF,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,mBAAmB,MAAM;AAC7B,QAAI,KAAK,eAAe;AACtB,WAAK,cAAA;AAAA,IACP;AAAA,EACF;AAMA,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,OAAO,UAAU,UAAU;AAC9C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,SAAS,MAAM;AAG3E,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,SAAS,MAAM;AAG3E,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,kBAAY,IAAI,KAAK,UAAU;AAAA,IACjC,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAIA,UAAM,cAAc,gBAAqB,OAAO,YAAY,SAAS,MAAM;AAG3E,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,OAAO,OAAO,SAAS,QAAQ;AAC3C,kBAAY,OAAO,GAAG;AAAA,IACxB,CAAC;AAGD,kBAAc,WAAW;AAGzB,qBAAA;AAEA,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,QACF,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,MAAA;AAAA,IAEN;AAGA,UAAM,cAAc;AAAA,MAClB,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAIF,eAAW,YAAY,qBAAqB;AAE1C,YAAM,MAAM,SAAS;AAErB,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,sBAAY,IAAI,KAAK,UAAU;AAC/B;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,sBAAY,OAAO,GAAG;AACtB;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,WAAW;AAIzB,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AASA,SAAS,gBACP,YACA,SACA,QACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,OAAO,MAAM,OAAO;AACnC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,QACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,SAAS,MAAM;AAG9D,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,QAAQ,OAAO,IAAI;AAC5C,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,SAAS,MAAM;AAClE,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,QAAQ,WAAW,MAAM,MAAM;AACxD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;;"}
|
|
1
|
+
{"version":3,"file":"local-storage.cjs","sources":["../../src/local-storage.ts"],"sourcesContent":["import {\n InvalidStorageDataFormatError,\n InvalidStorageObjectFormatError,\n SerializationError,\n StorageKeyRequiredError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Storage API interface - subset of DOM Storage that we need\n */\nexport type StorageApi = Pick<Storage, `getItem` | `setItem` | `removeItem`>\n\n/**\n * Storage event API - subset of Window for 'storage' events only\n */\nexport type StorageEventApi = {\n addEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n removeEventListener: (\n type: `storage`,\n listener: (event: StorageEvent) => void\n ) => void\n}\n\n/**\n * Internal storage format that includes version tracking\n */\ninterface StoredItem<T> {\n versionKey: string\n data: T\n}\n\nexport interface Parser {\n parse: (data: string) => unknown\n stringify: (data: unknown) => string\n}\n\n/**\n * Configuration interface for localStorage collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalStorageCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /**\n * The key to use for storing the collection data in localStorage/sessionStorage\n */\n storageKey: string\n\n /**\n * Storage API to use (defaults to window.localStorage)\n * Can be any object that implements the Storage interface (e.g., sessionStorage)\n */\n storage?: StorageApi\n\n /**\n * Storage event API to use for cross-tab synchronization (defaults to window)\n * Can be any object that implements addEventListener/removeEventListener for storage events\n */\n storageEventApi?: StorageEventApi\n\n /**\n * Parser to use for serializing and deserializing data to and from storage\n * Defaults to JSON\n */\n parser?: Parser\n}\n\n/**\n * Type for the clear utility function\n */\nexport type ClearStorageFn = () => void\n\n/**\n * Type for the getStorageSize utility function\n */\nexport type GetStorageSizeFn = () => number\n\n/**\n * LocalStorage collection utilities type\n */\nexport interface LocalStorageCollectionUtils extends UtilsRecord {\n clearStorage: ClearStorageFn\n getStorageSize: GetStorageSizeFn\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to localStorage.\n * This should be called in your transaction's mutationFn to persist local-storage data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localSettings = createCollection(localStorageCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-storage mutations after success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\n/**\n * Validates that a value can be JSON serialized\n * @param parser - The parser to use for serialization\n * @param value - The value to validate for JSON serialization\n * @param operation - The operation type being performed (for error messages)\n * @throws Error if the value cannot be JSON serialized\n */\nfunction validateJsonSerializable(\n parser: Parser,\n value: any,\n operation: string\n): void {\n try {\n parser.stringify(value)\n } catch (error) {\n throw new SerializationError(\n operation,\n error instanceof Error ? error.message : String(error)\n )\n }\n}\n\n/**\n * Generate a UUID for version tracking\n * @returns A unique identifier string for tracking data versions\n */\nfunction generateUuid(): string {\n return crypto.randomUUID()\n}\n\n/**\n * Creates an in-memory storage implementation that mimics the StorageApi interface\n * Used as a fallback when localStorage is not available (e.g., server-side rendering)\n * @returns An object implementing the StorageApi interface using an in-memory Map\n */\nfunction createInMemoryStorage(): StorageApi {\n const storage = new Map<string, string>()\n\n return {\n getItem(key: string): string | null {\n return storage.get(key) ?? null\n },\n setItem(key: string, value: string): void {\n storage.set(key, value)\n },\n removeItem(key: string): void {\n storage.delete(key)\n },\n }\n}\n\n/**\n * Creates a no-op storage event API for environments without window (e.g., server-side)\n * This provides the required interface but doesn't actually listen to any events\n * since cross-tab synchronization is not possible in server environments\n * @returns An object implementing the StorageEventApi interface with no-op methods\n */\nfunction createNoOpStorageEventApi(): StorageEventApi {\n return {\n addEventListener: () => {\n // No-op: cannot listen to storage events without window\n },\n removeEventListener: () => {\n // No-op: cannot remove listeners without window\n },\n }\n}\n\n/**\n * Creates localStorage collection options for use with a standard Collection\n *\n * This function creates a collection that persists data to localStorage/sessionStorage\n * and synchronizes changes across browser tabs using storage events.\n *\n * **Fallback Behavior:**\n *\n * When localStorage is not available (e.g., in server-side rendering environments),\n * this function automatically falls back to an in-memory storage implementation.\n * This prevents errors during module initialization and allows the collection to\n * work in any environment, though data will not persist across page reloads or\n * be shared across tabs when using the in-memory fallback.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-storage collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template TExplicit - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the localStorage collection\n * @returns Collection options with utilities including clearStorage, getStorageSize, and acceptMutations\n *\n * @example\n * // Basic localStorage collection\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with custom storage\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * storage: window.sessionStorage, // Use sessionStorage instead\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // localStorage collection with mutation handlers\n * const collection = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'todos',\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localSettings = createCollection(\n * localStorageCollectionOptions({\n * storageKey: 'user-settings',\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use settings data in API call\n * const settingsMutations = transaction.mutations.filter(m => m.collection === localSettings)\n * await api.updateUserProfile({ settings: settingsMutations[0]?.modified })\n *\n * // Persist local-storage mutations after API success\n * localSettings.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localSettings.insert({ id: 'theme', value: 'dark' })\n * apiCollection.insert({ id: 2, data: 'profile data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localStorageCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localStorageCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalStorageCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localStorageCollectionOptions(\n config: LocalStorageCollectionConfig<any, any, string | number>\n): Omit<CollectionConfig<any, string | number, any>, `id`> & {\n id: string\n utils: LocalStorageCollectionUtils\n schema?: StandardSchemaV1\n} {\n // Validate required parameters\n if (!config.storageKey) {\n throw new StorageKeyRequiredError()\n }\n\n // Default to window.localStorage if no storage is provided\n // Fall back to in-memory storage if localStorage is not available (e.g., server-side rendering)\n const storage =\n config.storage ||\n (typeof window !== `undefined` ? window.localStorage : null) ||\n createInMemoryStorage()\n\n // Default to window for storage events if not provided\n // Fall back to no-op storage event API if window is not available (e.g., server-side rendering)\n const storageEventApi =\n config.storageEventApi ||\n (typeof window !== `undefined` ? window : null) ||\n createNoOpStorageEventApi()\n\n // Default to JSON parser if no parser is provided\n const parser = config.parser || JSON\n\n // Track the last known state to detect changes\n const lastKnownData = new Map<string | number, StoredItem<any>>()\n\n // Create the sync configuration\n const sync = createLocalStorageSync<any>(\n config.storageKey,\n storage,\n storageEventApi,\n parser,\n config.getKey,\n lastKnownData\n )\n\n /**\n * Save data to storage\n * @param dataMap - Map of items with version tracking to save to storage\n */\n const saveToStorage = (\n dataMap: Map<string | number, StoredItem<any>>\n ): void => {\n try {\n // Convert Map to object format for storage\n const objectData: Record<string, StoredItem<any>> = {}\n dataMap.forEach((storedItem, key) => {\n objectData[String(key)] = storedItem\n })\n const serialized = parser.stringify(objectData)\n storage.setItem(config.storageKey, serialized)\n } catch (error) {\n console.error(\n `[LocalStorageCollection] Error saving data to storage key \"${config.storageKey}\":`,\n error\n )\n throw error\n }\n }\n\n /**\n * Removes all collection data from the configured storage\n */\n const clearStorage: ClearStorageFn = (): void => {\n storage.removeItem(config.storageKey)\n }\n\n /**\n * Get the size of the stored data in bytes (approximate)\n * @returns The approximate size in bytes of the stored collection data\n */\n const getStorageSize: GetStorageSizeFn = (): number => {\n const data = storage.getItem(config.storageKey)\n return data ? new Blob([data]).size : 0\n }\n\n /*\n * Create wrapper handlers for direct persistence operations that perform actual storage operations\n * Wraps the user's onInsert handler to also save changes to localStorage\n */\n const wrappedOnInsert = async (params: InsertMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `insert`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onInsert) {\n handlerResult = (await config.onInsert(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Add new items with version keys\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const key = mutation.key\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n const wrappedOnUpdate = async (params: UpdateMutationFnParams<any>) => {\n // Validate that all values in the transaction can be JSON serialized\n params.transaction.mutations.forEach((mutation) => {\n validateJsonSerializable(parser, mutation.modified, `update`)\n })\n\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onUpdate) {\n handlerResult = (await config.onUpdate(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Update items with new version keys\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const key = mutation.key\n const storedItem: StoredItem<any> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(key, storedItem)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n const wrappedOnDelete = async (params: DeleteMutationFnParams<any>) => {\n // Call the user handler BEFORE persisting changes (if provided)\n let handlerResult: any = {}\n if (config.onDelete) {\n handlerResult = (await config.onDelete(params)) ?? {}\n }\n\n // Always persist to storage\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Remove items\n params.transaction.mutations.forEach((mutation) => {\n // Use the engine's pre-computed key for consistency\n const key = mutation.key\n lastKnownData.delete(key)\n })\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm mutations through sync interface (moves from optimistic to synced state)\n // without reloading from storage\n sync.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n // Extract standard Collection config properties\n const {\n storageKey: _storageKey,\n storage: _storage,\n storageEventApi: _storageEventApi,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n id,\n ...restConfig\n } = config\n\n // Default id to a pattern based on storage key if not provided\n const collectionId = id ?? `local-collection:${config.storageKey}`\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them to storage\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n // Use collection ID for filtering if collection reference isn't available yet\n const collectionMutations = transaction.mutations.filter((m) => {\n // Try to match by collection reference first\n if (sync.collection && m.collection === sync.collection) {\n return true\n }\n // Fall back to matching by collection ID\n return m.collection.id === collectionId\n })\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Validate all mutations can be serialized before modifying storage\n for (const mutation of collectionMutations) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n validateJsonSerializable(parser, mutation.modified, mutation.type)\n break\n case `delete`:\n validateJsonSerializable(parser, mutation.original, mutation.type)\n break\n }\n }\n\n // Use lastKnownData (in-memory cache) instead of reading from storage\n // Apply each mutation\n for (const mutation of collectionMutations) {\n // Use the engine's pre-computed key to avoid key derivation issues\n const key = mutation.key\n\n switch (mutation.type) {\n case `insert`:\n case `update`: {\n const storedItem: StoredItem<Record<string, unknown>> = {\n versionKey: generateUuid(),\n data: mutation.modified,\n }\n lastKnownData.set(key, storedItem)\n break\n }\n case `delete`: {\n lastKnownData.delete(key)\n break\n }\n }\n }\n\n // Save to storage\n saveToStorage(lastKnownData)\n\n // Confirm the mutations in the collection to move them from optimistic to synced state\n // This writes them through the sync interface to make them \"synced\" instead of \"optimistic\"\n sync.confirmOperationsSync(collectionMutations)\n }\n\n return {\n ...restConfig,\n id: collectionId,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n clearStorage,\n getStorageSize,\n acceptMutations,\n },\n }\n}\n\n/**\n * Load data from storage and return as a Map\n * @param parser - The parser to use for deserializing the data\n * @param storageKey - The key used to store data in the storage API\n * @param storage - The storage API to load from (localStorage, sessionStorage, etc.)\n * @returns Map of stored items with version tracking, or empty Map if loading fails\n */\nfunction loadFromStorage<T extends object>(\n storageKey: string,\n storage: StorageApi,\n parser: Parser\n): Map<string | number, StoredItem<T>> {\n try {\n const rawData = storage.getItem(storageKey)\n if (!rawData) {\n return new Map()\n }\n\n const parsed = parser.parse(rawData)\n const dataMap = new Map<string | number, StoredItem<T>>()\n\n // Handle object format where keys map to StoredItem values\n if (\n typeof parsed === `object` &&\n parsed !== null &&\n !Array.isArray(parsed)\n ) {\n Object.entries(parsed).forEach(([key, value]) => {\n // Runtime check to ensure the value has the expected StoredItem structure\n if (\n value &&\n typeof value === `object` &&\n `versionKey` in value &&\n `data` in value\n ) {\n const storedItem = value as StoredItem<T>\n dataMap.set(key, storedItem)\n } else {\n throw new InvalidStorageDataFormatError(storageKey, key)\n }\n })\n } else {\n throw new InvalidStorageObjectFormatError(storageKey)\n }\n\n return dataMap\n } catch (error) {\n console.warn(\n `[LocalStorageCollection] Error loading data from storage key \"${storageKey}\":`,\n error\n )\n return new Map()\n }\n}\n\n/**\n * Internal function to create localStorage sync configuration\n * Creates a sync configuration that handles localStorage persistence and cross-tab synchronization\n * @param storageKey - The key used for storing data in localStorage\n * @param storage - The storage API to use (localStorage, sessionStorage, etc.)\n * @param storageEventApi - The event API for listening to storage changes\n * @param getKey - Function to extract the key from an item\n * @param lastKnownData - Map tracking the last known state for change detection\n * @returns Sync configuration with manual trigger capability\n */\nfunction createLocalStorageSync<T extends object>(\n storageKey: string,\n storage: StorageApi,\n storageEventApi: StorageEventApi,\n parser: Parser,\n _getKey: (item: T) => string | number,\n lastKnownData: Map<string | number, StoredItem<T>>\n): SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n confirmOperationsSync: (mutations: Array<any>) => void\n} {\n let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null\n let collection: any = null\n\n /**\n * Compare two Maps to find differences using version keys\n * @param oldData - The previous state of stored items\n * @param newData - The current state of stored items\n * @returns Array of changes with type, key, and value information\n */\n const findChanges = (\n oldData: Map<string | number, StoredItem<T>>,\n newData: Map<string | number, StoredItem<T>>\n ): Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> => {\n const changes: Array<{\n type: `insert` | `update` | `delete`\n key: string | number\n value?: T\n }> = []\n\n // Check for deletions and updates\n oldData.forEach((oldStoredItem, key) => {\n const newStoredItem = newData.get(key)\n if (!newStoredItem) {\n changes.push({ type: `delete`, key, value: oldStoredItem.data })\n } else if (oldStoredItem.versionKey !== newStoredItem.versionKey) {\n changes.push({ type: `update`, key, value: newStoredItem.data })\n }\n })\n\n // Check for insertions\n newData.forEach((newStoredItem, key) => {\n if (!oldData.has(key)) {\n changes.push({ type: `insert`, key, value: newStoredItem.data })\n }\n })\n\n return changes\n }\n\n /**\n * Process storage changes and update collection\n * Loads new data from storage, compares with last known state, and applies changes\n */\n const processStorageChanges = () => {\n if (!syncParams) return\n\n const { begin, write, commit } = syncParams\n\n // Load the new data\n const newData = loadFromStorage<T>(storageKey, storage, parser)\n\n // Find the specific changes\n const changes = findChanges(lastKnownData, newData)\n\n if (changes.length > 0) {\n begin()\n changes.forEach(({ type, value }) => {\n if (value) {\n validateJsonSerializable(parser, value, type)\n write({ type, value })\n }\n })\n commit()\n\n // Update lastKnownData\n lastKnownData.clear()\n newData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n }\n }\n\n const syncConfig: SyncConfig<T> & {\n manualTrigger?: () => void\n collection: any\n } = {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady } = params\n\n // Store sync params and collection for later use\n syncParams = params\n collection = params.collection\n\n // Initial load\n const initialData = loadFromStorage<T>(storageKey, storage, parser)\n if (initialData.size > 0) {\n begin()\n initialData.forEach((storedItem) => {\n validateJsonSerializable(parser, storedItem.data, `load`)\n write({ type: `insert`, value: storedItem.data })\n })\n commit()\n }\n\n // Update lastKnownData\n lastKnownData.clear()\n initialData.forEach((storedItem, key) => {\n lastKnownData.set(key, storedItem)\n })\n\n // Mark collection as ready after initial load\n markReady()\n\n // Listen for storage events from other tabs\n const handleStorageEvent = (event: StorageEvent) => {\n // Only respond to changes to our specific key and from our storage\n if (event.key !== storageKey || event.storageArea !== storage) {\n return\n }\n\n processStorageChanges()\n }\n\n // Add storage event listener for cross-tab sync\n storageEventApi.addEventListener(`storage`, handleStorageEvent)\n\n // Note: Cleanup is handled automatically by the collection when it's disposed\n },\n\n /**\n * Get sync metadata - returns storage key information\n * @returns Object containing storage key and storage type metadata\n */\n getSyncMetadata: () => ({\n storageKey,\n storageType:\n storage === (typeof window !== `undefined` ? window.localStorage : null)\n ? `localStorage`\n : `custom`,\n }),\n\n // Manual trigger function for local updates\n manualTrigger: processStorageChanges,\n\n // Collection instance reference\n collection,\n }\n\n /**\n * Confirms mutations by writing them through the sync interface\n * This moves mutations from optimistic to synced state\n * @param mutations - Array of mutation objects to confirm\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncParams) {\n // Sync not initialized yet, mutations will be handled on next sync\n return\n }\n\n const { begin, write, commit } = syncParams\n\n // Write the mutations through sync to confirm them\n begin()\n mutations.forEach((mutation: any) => {\n write({\n type: mutation.type,\n value:\n mutation.type === `delete` ? mutation.original : mutation.modified,\n })\n })\n commit()\n }\n\n return {\n ...syncConfig,\n confirmOperationsSync,\n }\n}\n"],"names":["SerializationError","StorageKeyRequiredError","InvalidStorageDataFormatError","InvalidStorageObjectFormatError"],"mappings":";;;AAmIA,SAAS,yBACP,QACA,OACA,WACM;AACN,MAAI;AACF,WAAO,UAAU,KAAK;AAAA,EACxB,SAAS,OAAO;AACd,UAAM,IAAIA,OAAAA;AAAAA,MACR;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAAA;AAAA,EAEzD;AACF;AAMA,SAAS,eAAuB;AAC9B,SAAO,OAAO,WAAA;AAChB;AAOA,SAAS,wBAAoC;AAC3C,QAAM,8BAAc,IAAA;AAEpB,SAAO;AAAA,IACL,QAAQ,KAA4B;AAClC,aAAO,QAAQ,IAAI,GAAG,KAAK;AAAA,IAC7B;AAAA,IACA,QAAQ,KAAa,OAAqB;AACxC,cAAQ,IAAI,KAAK,KAAK;AAAA,IACxB;AAAA,IACA,WAAW,KAAmB;AAC5B,cAAQ,OAAO,GAAG;AAAA,IACpB;AAAA,EAAA;AAEJ;AAQA,SAAS,4BAA6C;AACpD,SAAO;AAAA,IACL,kBAAkB,MAAM;AAAA,IAExB;AAAA,IACA,qBAAqB,MAAM;AAAA,IAE3B;AAAA,EAAA;AAEJ;AAoHO,SAAS,8BACd,QAKA;AAEA,MAAI,CAAC,OAAO,YAAY;AACtB,UAAM,IAAIC,OAAAA,wBAAA;AAAA,EACZ;AAIA,QAAM,UACJ,OAAO,YACN,OAAO,WAAW,cAAc,OAAO,eAAe,SACvD,sBAAA;AAIF,QAAM,kBACJ,OAAO,oBACN,OAAO,WAAW,cAAc,SAAS,SAC1C,0BAAA;AAGF,QAAM,SAAS,OAAO,UAAU;AAGhC,QAAM,oCAAoB,IAAA;AAG1B,QAAM,OAAO;AAAA,IACX,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EAAA;AAOF,QAAM,gBAAgB,CACpB,YACS;AACT,QAAI;AAEF,YAAM,aAA8C,CAAA;AACpD,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,mBAAW,OAAO,GAAG,CAAC,IAAI;AAAA,MAC5B,CAAC;AACD,YAAM,aAAa,OAAO,UAAU,UAAU;AAC9C,cAAQ,QAAQ,OAAO,YAAY,UAAU;AAAA,IAC/C,SAAS,OAAO;AACd,cAAQ;AAAA,QACN,8DAA8D,OAAO,UAAU;AAAA,QAC/E;AAAA,MAAA;AAEF,YAAM;AAAA,IACR;AAAA,EACF;AAKA,QAAM,eAA+B,MAAY;AAC/C,YAAQ,WAAW,OAAO,UAAU;AAAA,EACtC;AAMA,QAAM,iBAAmC,MAAc;AACrD,UAAM,OAAO,QAAQ,QAAQ,OAAO,UAAU;AAC9C,WAAO,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,OAAO;AAAA,EACxC;AAMA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,SAAS;AACrB,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,oBAAc,IAAI,KAAK,UAAU;AAAA,IACnC,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AACjD,+BAAyB,QAAQ,SAAS,UAAU,QAAQ;AAAA,IAC9D,CAAC;AAGD,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,SAAS;AACrB,YAAM,aAA8B;AAAA,QAClC,YAAY,aAAA;AAAA,QACZ,MAAM,SAAS;AAAA,MAAA;AAEjB,oBAAc,IAAI,KAAK,UAAU;AAAA,IACnC,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,OAAO,WAAwC;AAErE,QAAI,gBAAqB,CAAA;AACzB,QAAI,OAAO,UAAU;AACnB,sBAAiB,MAAM,OAAO,SAAS,MAAM,KAAM,CAAA;AAAA,IACrD;AAKA,WAAO,YAAY,UAAU,QAAQ,CAAC,aAAa;AAEjD,YAAM,MAAM,SAAS;AACrB,oBAAc,OAAO,GAAG;AAAA,IAC1B,CAAC;AAGD,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,OAAO,YAAY,SAAS;AAEvD,WAAO;AAAA,EACT;AAGA,QAAM;AAAA,IACJ,YAAY;AAAA,IACZ,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAGJ,QAAM,eAAe,MAAM,oBAAoB,OAAO,UAAU;AAKhE,QAAM,kBAAkB,CAAC,gBAEnB;AAGJ,UAAM,sBAAsB,YAAY,UAAU,OAAO,CAAC,MAAM;AAE9D,UAAI,KAAK,cAAc,EAAE,eAAe,KAAK,YAAY;AACvD,eAAO;AAAA,MACT;AAEA,aAAO,EAAE,WAAW,OAAO;AAAA,IAC7B,CAAC;AAED,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW,YAAY,qBAAqB;AAC1C,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,QACF,KAAK;AACH,mCAAyB,QAAQ,SAAS,UAAU,SAAS,IAAI;AACjE;AAAA,MAAA;AAAA,IAEN;AAIA,eAAW,YAAY,qBAAqB;AAE1C,YAAM,MAAM,SAAS;AAErB,cAAQ,SAAS,MAAA;AAAA,QACf,KAAK;AAAA,QACL,KAAK,UAAU;AACb,gBAAM,aAAkD;AAAA,YACtD,YAAY,aAAA;AAAA,YACZ,MAAM,SAAS;AAAA,UAAA;AAEjB,wBAAc,IAAI,KAAK,UAAU;AACjC;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,wBAAc,OAAO,GAAG;AACxB;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,kBAAc,aAAa;AAI3B,SAAK,sBAAsB,mBAAmB;AAAA,EAChD;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,IAAI;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AASA,SAAS,gBACP,YACA,SACA,QACqC;AACrC,MAAI;AACF,UAAM,UAAU,QAAQ,QAAQ,UAAU;AAC1C,QAAI,CAAC,SAAS;AACZ,iCAAW,IAAA;AAAA,IACb;AAEA,UAAM,SAAS,OAAO,MAAM,OAAO;AACnC,UAAM,8BAAc,IAAA;AAGpB,QACE,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,MAAM,GACrB;AACA,aAAO,QAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM;AAE/C,YACE,SACA,OAAO,UAAU,YACjB,gBAAgB,SAChB,UAAU,OACV;AACA,gBAAM,aAAa;AACnB,kBAAQ,IAAI,KAAK,UAAU;AAAA,QAC7B,OAAO;AACL,gBAAM,IAAIC,OAAAA,8BAA8B,YAAY,GAAG;AAAA,QACzD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,IAAIC,OAAAA,gCAAgC,UAAU;AAAA,IACtD;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,iEAAiE,UAAU;AAAA,MAC3E;AAAA,IAAA;AAEF,+BAAW,IAAA;AAAA,EACb;AACF;AAYA,SAAS,uBACP,YACA,SACA,iBACA,QACA,SACA,eAKA;AACA,MAAI,aAA0D;AAC9D,MAAI,aAAkB;AAQtB,QAAM,cAAc,CAClB,SACA,YAKI;AACJ,UAAM,UAID,CAAA;AAGL,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,YAAM,gBAAgB,QAAQ,IAAI,GAAG;AACrC,UAAI,CAAC,eAAe;AAClB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE,WAAW,cAAc,eAAe,cAAc,YAAY;AAChE,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAGD,YAAQ,QAAQ,CAAC,eAAe,QAAQ;AACtC,UAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAQ,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc,MAAM;AAAA,MACjE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAMA,QAAM,wBAAwB,MAAM;AAClC,QAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAM,UAAU,gBAAmB,YAAY,SAAS,MAAM;AAG9D,UAAM,UAAU,YAAY,eAAe,OAAO;AAElD,QAAI,QAAQ,SAAS,GAAG;AACtB,YAAA;AACA,cAAQ,QAAQ,CAAC,EAAE,MAAM,YAAY;AACnC,YAAI,OAAO;AACT,mCAAyB,QAAQ,OAAO,IAAI;AAC5C,gBAAM,EAAE,MAAM,OAAO;AAAA,QACvB;AAAA,MACF,CAAC;AACD,aAAA;AAGA,oBAAc,MAAA;AACd,cAAQ,QAAQ,CAAC,YAAY,QAAQ;AACnC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAGF;AAAA,IACF,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,mBAAa;AACb,mBAAa,OAAO;AAGpB,YAAM,cAAc,gBAAmB,YAAY,SAAS,MAAM;AAClE,UAAI,YAAY,OAAO,GAAG;AACxB,cAAA;AACA,oBAAY,QAAQ,CAAC,eAAe;AAClC,mCAAyB,QAAQ,WAAW,MAAM,MAAM;AACxD,gBAAM,EAAE,MAAM,UAAU,OAAO,WAAW,MAAM;AAAA,QAClD,CAAC;AACD,eAAA;AAAA,MACF;AAGA,oBAAc,MAAA;AACd,kBAAY,QAAQ,CAAC,YAAY,QAAQ;AACvC,sBAAc,IAAI,KAAK,UAAU;AAAA,MACnC,CAAC;AAGD,gBAAA;AAGA,YAAM,qBAAqB,CAAC,UAAwB;AAElD,YAAI,MAAM,QAAQ,cAAc,MAAM,gBAAgB,SAAS;AAC7D;AAAA,QACF;AAEA,8BAAA;AAAA,MACF;AAGA,sBAAgB,iBAAiB,WAAW,kBAAkB;AAAA,IAGhE;AAAA;AAAA;AAAA;AAAA;AAAA,IAMA,iBAAiB,OAAO;AAAA,MACtB;AAAA,MACA,aACE,aAAa,OAAO,WAAW,cAAc,OAAO,eAAe,QAC/D,iBACA;AAAA,IAAA;AAAA;AAAA,IAIR,eAAe;AAAA;AAAA,IAGf;AAAA,EAAA;AAQF,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,YAAY;AAEf;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,OAAO,OAAA,IAAW;AAGjC,UAAA;AACA,cAAU,QAAQ,CAAC,aAAkB;AACnC,YAAM;AAAA,QACJ,MAAM,SAAS;AAAA,QACf,OACE,SAAS,SAAS,WAAW,SAAS,WAAW,SAAS;AAAA,MAAA,CAC7D;AAAA,IACH,CAAC;AACD,WAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EAAA;AAEJ;;"}
|
|
@@ -14,6 +14,7 @@ function compileQuery(rawQuery, inputs, collections, subscriptions, callbacks, l
|
|
|
14
14
|
if (cachedResult) {
|
|
15
15
|
return cachedResult;
|
|
16
16
|
}
|
|
17
|
+
validateQueryStructure(rawQuery);
|
|
17
18
|
const { optimizedQuery: query, sourceWhereClauses } = optimizer.optimizeQuery(rawQuery);
|
|
18
19
|
queryMapping.set(query, rawQuery);
|
|
19
20
|
mapNestedQueries(query, rawQuery, queryMapping);
|
|
@@ -211,6 +212,45 @@ function compileQuery(rawQuery, inputs, collections, subscriptions, callbacks, l
|
|
|
211
212
|
cache.set(rawQuery, compilationResult);
|
|
212
213
|
return compilationResult;
|
|
213
214
|
}
|
|
215
|
+
function collectDirectCollectionAliases(query) {
|
|
216
|
+
const aliases = /* @__PURE__ */ new Set();
|
|
217
|
+
if (query.from.type === `collectionRef`) {
|
|
218
|
+
aliases.add(query.from.alias);
|
|
219
|
+
}
|
|
220
|
+
if (query.join) {
|
|
221
|
+
for (const joinClause of query.join) {
|
|
222
|
+
if (joinClause.from.type === `collectionRef`) {
|
|
223
|
+
aliases.add(joinClause.from.alias);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
return aliases;
|
|
228
|
+
}
|
|
229
|
+
function validateQueryStructure(query, parentCollectionAliases = /* @__PURE__ */ new Set()) {
|
|
230
|
+
const currentLevelAliases = collectDirectCollectionAliases(query);
|
|
231
|
+
for (const alias of currentLevelAliases) {
|
|
232
|
+
if (parentCollectionAliases.has(alias)) {
|
|
233
|
+
throw new errors.DuplicateAliasInSubqueryError(
|
|
234
|
+
alias,
|
|
235
|
+
Array.from(parentCollectionAliases)
|
|
236
|
+
);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
const combinedAliases = /* @__PURE__ */ new Set([
|
|
240
|
+
...parentCollectionAliases,
|
|
241
|
+
...currentLevelAliases
|
|
242
|
+
]);
|
|
243
|
+
if (query.from.type === `queryRef`) {
|
|
244
|
+
validateQueryStructure(query.from.query, combinedAliases);
|
|
245
|
+
}
|
|
246
|
+
if (query.join) {
|
|
247
|
+
for (const joinClause of query.join) {
|
|
248
|
+
if (joinClause.from.type === `queryRef`) {
|
|
249
|
+
validateQueryStructure(joinClause.from.query, combinedAliases);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
214
254
|
function processFrom(from, allInputs, collections, subscriptions, callbacks, lazySources, optimizableOrderByCollections, setWindowFn, cache, queryMapping, aliasToCollectionId, aliasRemapping) {
|
|
215
255
|
switch (from.type) {
|
|
216
256
|
case `collectionRef`: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../src/query/compiler/index.ts"],"sourcesContent":["import { distinct, filter, map } from \"@tanstack/db-ivm\"\nimport { optimizeQuery } from \"../optimizer.js\"\nimport {\n CollectionInputNotFoundError,\n DistinctRequiresSelectError,\n HavingRequiresGroupByError,\n LimitOffsetRequireOrderByError,\n UnsupportedFromTypeError,\n} from \"../../errors.js\"\nimport { PropRef, Value as ValClass, getWhereExpression } from \"../ir.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { processJoins } from \"./joins.js\"\nimport { processGroupBy } from \"./group-by.js\"\nimport { processOrderBy } from \"./order-by.js\"\nimport { processSelect } from \"./select.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\nimport type { OrderByOptimizationInfo } from \"./order-by.js\"\nimport type {\n BasicExpression,\n CollectionRef,\n QueryIR,\n QueryRef,\n} from \"../ir.js\"\nimport type { LazyCollectionCallbacks } from \"./joins.js\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n ResultStream,\n} from \"../../types.js\"\nimport type { QueryCache, QueryMapping, WindowOptions } from \"./types.js\"\n\nexport type { WindowOptions } from \"./types.js\"\n\n/**\n * Result of query compilation including both the pipeline and source-specific WHERE clauses\n */\nexport interface CompilationResult {\n /** The ID of the main collection */\n collectionId: string\n\n /** The compiled query pipeline (D2 stream) */\n pipeline: ResultStream\n\n /** Map of source aliases to their WHERE clauses for index optimization */\n sourceWhereClauses: Map<string, BasicExpression<boolean>>\n\n /**\n * Maps each source alias to its collection ID. Enables per-alias subscriptions for self-joins.\n * Example: `{ employee: 'employees-col-id', manager: 'employees-col-id' }`\n */\n aliasToCollectionId: Record<string, string>\n\n /**\n * Flattened mapping from outer alias to innermost alias for subqueries.\n * Always provides one-hop lookups, never recursive chains.\n *\n * Example: `{ activeUser: 'user' }` when `.from({ activeUser: subquery })`\n * where the subquery uses `.from({ user: collection })`.\n *\n * For deeply nested subqueries, the mapping goes directly to the innermost alias:\n * `{ author: 'user' }` (not `{ author: 'activeUser' }`), so `aliasRemapping[alias]`\n * always resolves in a single lookup.\n *\n * Used to resolve subscriptions during lazy loading when join aliases differ from\n * the inner aliases where collection subscriptions were created.\n */\n aliasRemapping: Record<string, string>\n}\n\n/**\n * Compiles a query IR into a D2 pipeline\n * @param rawQuery The query IR to compile\n * @param inputs Mapping of source aliases to input streams (e.g., `{ employee: input1, manager: input2 }`)\n * @param collections Mapping of collection IDs to Collection instances\n * @param subscriptions Mapping of source aliases to CollectionSubscription instances\n * @param callbacks Mapping of source aliases to lazy loading callbacks\n * @param lazySources Set of source aliases that should load data lazily\n * @param optimizableOrderByCollections Map of collection IDs to order-by optimization info\n * @param cache Optional cache for compiled subqueries (used internally for recursion)\n * @param queryMapping Optional mapping from optimized queries to original queries\n * @returns A CompilationResult with the pipeline, source WHERE clauses, and alias metadata\n */\nexport function compileQuery(\n rawQuery: QueryIR,\n inputs: Record<string, KeyedStream>,\n collections: Record<string, Collection<any, any, any, any, any>>,\n subscriptions: Record<string, CollectionSubscription>,\n callbacks: Record<string, LazyCollectionCallbacks>,\n lazySources: Set<string>,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n setWindowFn: (windowFn: (options: WindowOptions) => void) => void,\n cache: QueryCache = new WeakMap(),\n queryMapping: QueryMapping = new WeakMap()\n): CompilationResult {\n // Check if the original raw query has already been compiled\n const cachedResult = cache.get(rawQuery)\n if (cachedResult) {\n return cachedResult\n }\n\n // Optimize the query before compilation\n const { optimizedQuery: query, sourceWhereClauses } = optimizeQuery(rawQuery)\n\n // Create mapping from optimized query to original for caching\n queryMapping.set(query, rawQuery)\n mapNestedQueries(query, rawQuery, queryMapping)\n\n // Create a copy of the inputs map to avoid modifying the original\n const allInputs = { ...inputs }\n\n // Track alias to collection id relationships discovered during compilation.\n // This includes all user-declared aliases plus inner aliases from subqueries.\n const aliasToCollectionId: Record<string, string> = {}\n\n // Track alias remapping for subqueries (outer alias → inner alias)\n // e.g., when .join({ activeUser: subquery }) where subquery uses .from({ user: collection })\n // we store: aliasRemapping['activeUser'] = 'user'\n const aliasRemapping: Record<string, string> = {}\n\n // Create a map of source aliases to input streams.\n // Inputs MUST be keyed by alias (e.g., `{ employee: input1, manager: input2 }`),\n // not by collection ID. This enables per-alias subscriptions where different aliases\n // of the same collection (e.g., self-joins) maintain independent filtered streams.\n const sources: Record<string, KeyedStream> = {}\n\n // Process the FROM clause to get the main source\n const {\n alias: mainSource,\n input: mainInput,\n collectionId: mainCollectionId,\n } = processFrom(\n query.from,\n allInputs,\n collections,\n subscriptions,\n callbacks,\n lazySources,\n optimizableOrderByCollections,\n setWindowFn,\n cache,\n queryMapping,\n aliasToCollectionId,\n aliasRemapping\n )\n sources[mainSource] = mainInput\n\n // Prepare the initial pipeline with the main source wrapped in its alias\n let pipeline: NamespacedAndKeyedStream = mainInput.pipe(\n map(([key, row]) => {\n // Initialize the record with a nested structure\n const ret = [key, { [mainSource]: row }] as [\n string,\n Record<string, typeof row>,\n ]\n return ret\n })\n )\n\n // Process JOIN clauses if they exist\n if (query.join && query.join.length > 0) {\n pipeline = processJoins(\n pipeline,\n query.join,\n sources,\n mainCollectionId,\n mainSource,\n allInputs,\n cache,\n queryMapping,\n collections,\n subscriptions,\n callbacks,\n lazySources,\n optimizableOrderByCollections,\n setWindowFn,\n rawQuery,\n compileQuery,\n aliasToCollectionId,\n aliasRemapping\n )\n }\n\n // Process the WHERE clause if it exists\n if (query.where && query.where.length > 0) {\n // Apply each WHERE condition as a filter (they are ANDed together)\n for (const where of query.where) {\n const whereExpression = getWhereExpression(where)\n const compiledWhere = compileExpression(whereExpression)\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return compiledWhere(namespacedRow)\n })\n )\n }\n }\n\n // Process functional WHERE clauses if they exist\n if (query.fnWhere && query.fnWhere.length > 0) {\n for (const fnWhere of query.fnWhere) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnWhere(namespacedRow)\n })\n )\n }\n }\n\n if (query.distinct && !query.fnSelect && !query.select) {\n throw new DistinctRequiresSelectError()\n }\n\n // Process the SELECT clause early - always create __select_results\n // This eliminates duplication and allows for DISTINCT implementation\n if (query.fnSelect) {\n // Handle functional select - apply the function to transform the row\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults = query.fnSelect!(namespacedRow)\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n } else if (query.select) {\n pipeline = processSelect(pipeline, query.select, allInputs)\n } else {\n // If no SELECT clause, create __select_results with the main table data\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults =\n !query.join && !query.groupBy\n ? namespacedRow[mainSource]\n : namespacedRow\n\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n }\n\n // Process the GROUP BY clause if it exists\n if (query.groupBy && query.groupBy.length > 0) {\n pipeline = processGroupBy(\n pipeline,\n query.groupBy,\n query.having,\n query.select,\n query.fnHaving\n )\n } else if (query.select) {\n // Check if SELECT contains aggregates but no GROUP BY (implicit single-group aggregation)\n const hasAggregates = Object.values(query.select).some(\n (expr) => expr.type === `agg`\n )\n if (hasAggregates) {\n // Handle implicit single-group aggregation\n pipeline = processGroupBy(\n pipeline,\n [], // Empty group by means single group\n query.having,\n query.select,\n query.fnHaving\n )\n }\n }\n\n // Process the HAVING clause if it exists (only applies after GROUP BY)\n if (query.having && (!query.groupBy || query.groupBy.length === 0)) {\n // Check if we have aggregates in SELECT that would trigger implicit grouping\n const hasAggregates = query.select\n ? Object.values(query.select).some((expr) => expr.type === `agg`)\n : false\n\n if (!hasAggregates) {\n throw new HavingRequiresGroupByError()\n }\n }\n\n // Process functional HAVING clauses outside of GROUP BY (treat as additional WHERE filters)\n if (\n query.fnHaving &&\n query.fnHaving.length > 0 &&\n (!query.groupBy || query.groupBy.length === 0)\n ) {\n // If there's no GROUP BY but there are fnHaving clauses, apply them as filters\n for (const fnHaving of query.fnHaving) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n // Process the DISTINCT clause if it exists\n if (query.distinct) {\n pipeline = pipeline.pipe(distinct(([_key, row]) => row.__select_results))\n }\n\n // Process orderBy parameter if it exists\n if (query.orderBy && query.orderBy.length > 0) {\n const orderedPipeline = processOrderBy(\n rawQuery,\n pipeline,\n query.orderBy,\n query.select || {},\n collections[mainCollectionId]!,\n optimizableOrderByCollections,\n setWindowFn,\n query.limit,\n query.offset\n )\n\n // Final step: extract the __select_results and include orderBy index\n const resultPipeline = orderedPipeline.pipe(\n map(([key, [row, orderByIndex]]) => {\n // Extract the final results from __select_results and include orderBy index\n const raw = (row as any).__select_results\n const finalResults = unwrapValue(raw)\n return [key, [finalResults, orderByIndex]] as [unknown, [any, string]]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning (use original query as key)\n const compilationResult = {\n collectionId: mainCollectionId,\n pipeline: result,\n sourceWhereClauses,\n aliasToCollectionId,\n aliasRemapping,\n }\n cache.set(rawQuery, compilationResult)\n\n return compilationResult\n } else if (query.limit !== undefined || query.offset !== undefined) {\n // If there's a limit or offset without orderBy, throw an error\n throw new LimitOffsetRequireOrderByError()\n }\n\n // Final step: extract the __select_results and return tuple format (no orderBy)\n const resultPipeline: ResultStream = pipeline.pipe(\n map(([key, row]) => {\n // Extract the final results from __select_results and return [key, [results, undefined]]\n const raw = (row as any).__select_results\n const finalResults = unwrapValue(raw)\n return [key, [finalResults, undefined]] as [\n unknown,\n [any, string | undefined],\n ]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning (use original query as key)\n const compilationResult = {\n collectionId: mainCollectionId,\n pipeline: result,\n sourceWhereClauses,\n aliasToCollectionId,\n aliasRemapping,\n }\n cache.set(rawQuery, compilationResult)\n\n return compilationResult\n}\n\n/**\n * Processes the FROM clause, handling direct collection references and subqueries.\n * Populates `aliasToCollectionId` and `aliasRemapping` for per-alias subscription tracking.\n */\nfunction processFrom(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n collections: Record<string, Collection>,\n subscriptions: Record<string, CollectionSubscription>,\n callbacks: Record<string, LazyCollectionCallbacks>,\n lazySources: Set<string>,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n setWindowFn: (windowFn: (options: WindowOptions) => void) => void,\n cache: QueryCache,\n queryMapping: QueryMapping,\n aliasToCollectionId: Record<string, string>,\n aliasRemapping: Record<string, string>\n): { alias: string; input: KeyedStream; collectionId: string } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.alias]\n if (!input) {\n throw new CollectionInputNotFoundError(\n from.alias,\n from.collection.id,\n Object.keys(allInputs)\n )\n }\n aliasToCollectionId[from.alias] = from.collection.id\n return { alias: from.alias, input, collectionId: from.collection.id }\n }\n case `queryRef`: {\n // Find the original query for caching purposes\n const originalQuery = queryMapping.get(from.query) || from.query\n\n // Recursively compile the sub-query with cache\n const subQueryResult = compileQuery(\n originalQuery,\n allInputs,\n collections,\n subscriptions,\n callbacks,\n lazySources,\n optimizableOrderByCollections,\n setWindowFn,\n cache,\n queryMapping\n )\n\n // Pull up alias mappings from subquery to parent scope.\n // This includes both the innermost alias-to-collection mappings AND\n // any existing remappings from nested subquery levels.\n Object.assign(aliasToCollectionId, subQueryResult.aliasToCollectionId)\n Object.assign(aliasRemapping, subQueryResult.aliasRemapping)\n\n // Create a FLATTENED remapping from outer alias to innermost alias.\n // For nested subqueries, this ensures one-hop lookups (not recursive chains).\n //\n // Example with 3-level nesting:\n // Inner: .from({ user: usersCollection })\n // Middle: .from({ activeUser: innerSubquery }) → creates: activeUser → user\n // Outer: .from({ author: middleSubquery }) → creates: author → user (not author → activeUser)\n //\n // The key insight: We search through the PULLED-UP aliasToCollectionId (which contains\n // the innermost 'user' alias), so we always map directly to the deepest level.\n // This means aliasRemapping[alias] is always a single lookup, never recursive.\n // Needed for subscription resolution during lazy loading.\n const innerAlias = Object.keys(subQueryResult.aliasToCollectionId).find(\n (alias) =>\n subQueryResult.aliasToCollectionId[alias] ===\n subQueryResult.collectionId\n )\n if (innerAlias && innerAlias !== from.alias) {\n aliasRemapping[from.alias] = innerAlias\n }\n\n // Extract the pipeline from the compilation result\n const subQueryInput = subQueryResult.pipeline\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n // Unwrap Value expressions that might have leaked through as the entire row\n const unwrapped = unwrapValue(value)\n return [key, unwrapped] as [unknown, any]\n })\n )\n\n return {\n alias: from.alias,\n input: extractedInput,\n collectionId: subQueryResult.collectionId,\n }\n }\n default:\n throw new UnsupportedFromTypeError((from as any).type)\n }\n}\n\n// Helper to check if a value is a Value expression\nfunction isValue(raw: any): boolean {\n return (\n raw instanceof ValClass ||\n (raw && typeof raw === `object` && `type` in raw && raw.type === `val`)\n )\n}\n\n// Helper to unwrap a Value expression or return the value itself\nfunction unwrapValue(value: any): any {\n return isValue(value) ? value.value : value\n}\n\n/**\n * Recursively maps optimized subqueries to their original queries for proper caching.\n * This ensures that when we encounter the same QueryRef object in different contexts,\n * we can find the original query to check the cache.\n */\nfunction mapNestedQueries(\n optimizedQuery: QueryIR,\n originalQuery: QueryIR,\n queryMapping: QueryMapping\n): void {\n // Map the FROM clause if it's a QueryRef\n if (\n optimizedQuery.from.type === `queryRef` &&\n originalQuery.from.type === `queryRef`\n ) {\n queryMapping.set(optimizedQuery.from.query, originalQuery.from.query)\n // Recursively map nested queries\n mapNestedQueries(\n optimizedQuery.from.query,\n originalQuery.from.query,\n queryMapping\n )\n }\n\n // Map JOIN clauses if they exist\n if (optimizedQuery.join && originalQuery.join) {\n for (\n let i = 0;\n i < optimizedQuery.join.length && i < originalQuery.join.length;\n i++\n ) {\n const optimizedJoin = optimizedQuery.join[i]!\n const originalJoin = originalQuery.join[i]!\n\n if (\n optimizedJoin.from.type === `queryRef` &&\n originalJoin.from.type === `queryRef`\n ) {\n queryMapping.set(optimizedJoin.from.query, originalJoin.from.query)\n // Recursively map nested queries in joins\n mapNestedQueries(\n optimizedJoin.from.query,\n originalJoin.from.query,\n queryMapping\n )\n }\n }\n }\n}\n\nfunction getRefFromAlias(\n query: QueryIR,\n alias: string\n): CollectionRef | QueryRef | void {\n if (query.from.alias === alias) {\n return query.from\n }\n\n for (const join of query.join || []) {\n if (join.from.alias === alias) {\n return join.from\n }\n }\n}\n\n/**\n * Follows the given reference in a query\n * until its finds the root field the reference points to.\n * @returns The collection, its alias, and the path to the root field in this collection\n */\nexport function followRef(\n query: QueryIR,\n ref: PropRef<any>,\n collection: Collection\n): { collection: Collection; path: Array<string> } | void {\n if (ref.path.length === 0) {\n return\n }\n\n if (ref.path.length === 1) {\n // This field should be part of this collection\n const field = ref.path[0]!\n // is it part of the select clause?\n if (query.select) {\n const selectedField = query.select[field]\n if (selectedField && selectedField.type === `ref`) {\n return followRef(query, selectedField, collection)\n }\n }\n\n // Either this field is not part of the select clause\n // and thus it must be part of the collection itself\n // or it is part of the select but is not a reference\n // so we can stop here and don't have to follow it\n return { collection, path: [field] }\n }\n\n if (ref.path.length > 1) {\n // This is a nested field\n const [alias, ...rest] = ref.path\n const aliasRef = getRefFromAlias(query, alias!)\n if (!aliasRef) {\n return\n }\n\n if (aliasRef.type === `queryRef`) {\n return followRef(aliasRef.query, new PropRef(rest), collection)\n } else {\n // This is a reference to a collection\n // we can't follow it further\n // so the field must be on the collection itself\n return { collection: aliasRef.collection, path: rest }\n }\n }\n}\n\nexport type CompileQueryFn = typeof compileQuery\n"],"names":["optimizeQuery","map","processJoins","getWhereExpression","compileExpression","filter","DistinctRequiresSelectError","processSelect","processGroupBy","HavingRequiresGroupByError","distinct","processOrderBy","resultPipeline","result","compilationResult","LimitOffsetRequireOrderByError","CollectionInputNotFoundError","UnsupportedFromTypeError","ValClass"],"mappings":";;;;;;;;;;;AAmFO,SAAS,aACd,UACA,QACA,aACA,eACA,WACA,aACA,+BACA,aACA,4BAAwB,QAAA,GACxB,eAA6B,oBAAI,WACd;AAEnB,QAAM,eAAe,MAAM,IAAI,QAAQ;AACvC,MAAI,cAAc;AAChB,WAAO;AAAA,EACT;AAGA,QAAM,EAAE,gBAAgB,OAAO,mBAAA,IAAuBA,UAAAA,cAAc,QAAQ;AAG5E,eAAa,IAAI,OAAO,QAAQ;AAChC,mBAAiB,OAAO,UAAU,YAAY;AAG9C,QAAM,YAAY,EAAE,GAAG,OAAA;AAIvB,QAAM,sBAA8C,CAAA;AAKpD,QAAM,iBAAyC,CAAA;AAM/C,QAAM,UAAuC,CAAA;AAG7C,QAAM;AAAA,IACJ,OAAO;AAAA,IACP,OAAO;AAAA,IACP,cAAc;AAAA,EAAA,IACZ;AAAA,IACF,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEF,UAAQ,UAAU,IAAI;AAGtB,MAAI,WAAqC,UAAU;AAAA,IACjDC,MAAAA,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,MAAM,CAAC,KAAK,EAAE,CAAC,UAAU,GAAG,KAAK;AAIvC,aAAO;AAAA,IACT,CAAC;AAAA,EAAA;AAIH,MAAI,MAAM,QAAQ,MAAM,KAAK,SAAS,GAAG;AACvC,eAAWC,MAAAA;AAAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAGA,MAAI,MAAM,SAAS,MAAM,MAAM,SAAS,GAAG;AAEzC,eAAW,SAAS,MAAM,OAAO;AAC/B,YAAM,kBAAkBC,GAAAA,mBAAmB,KAAK;AAChD,YAAM,gBAAgBC,WAAAA,kBAAkB,eAAe;AACvD,iBAAW,SAAS;AAAA,QAClBC,MAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,cAAc,aAAa;AAAA,QACpC,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,eAAW,WAAW,MAAM,SAAS;AACnC,iBAAW,SAAS;AAAA,QAClBA,MAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,QAAQ,aAAa;AAAA,QAC9B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAEA,MAAI,MAAM,YAAY,CAAC,MAAM,YAAY,CAAC,MAAM,QAAQ;AACtD,UAAM,IAAIC,OAAAA,4BAAA;AAAA,EACZ;AAIA,MAAI,MAAM,UAAU;AAElB,eAAW,SAAS;AAAA,MAClBL,MAAAA,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,cAAM,gBAAgB,MAAM,SAAU,aAAa;AACnD,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAAA,EAEL,WAAW,MAAM,QAAQ;AACvB,eAAWM,OAAAA,cAAc,UAAU,MAAM,MAAiB;AAAA,EAC5D,OAAO;AAEL,eAAW,SAAS;AAAA,MAClBN,MAAAA,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,cAAM,gBACJ,CAAC,MAAM,QAAQ,CAAC,MAAM,UAClB,cAAc,UAAU,IACxB;AAEN,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAAA,EAEL;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,eAAWO,QAAAA;AAAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,IAAA;AAAA,EAEV,WAAW,MAAM,QAAQ;AAEvB,UAAM,gBAAgB,OAAO,OAAO,MAAM,MAAM,EAAE;AAAA,MAChD,CAAC,SAAS,KAAK,SAAS;AAAA,IAAA;AAE1B,QAAI,eAAe;AAEjB,iBAAWA,QAAAA;AAAAA,QACT;AAAA,QACA,CAAA;AAAA;AAAA,QACA,MAAM;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,MAAA;AAAA,IAEV;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAAI;AAElE,UAAM,gBAAgB,MAAM,SACxB,OAAO,OAAO,MAAM,MAAM,EAAE,KAAK,CAAC,SAAS,KAAK,SAAS,KAAK,IAC9D;AAEJ,QAAI,CAAC,eAAe;AAClB,YAAM,IAAIC,OAAAA,2BAAA;AAAA,IACZ;AAAA,EACF;AAGA,MACE,MAAM,YACN,MAAM,SAAS,SAAS,MACvB,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAC5C;AAEA,eAAW,YAAY,MAAM,UAAU;AACrC,iBAAW,SAAS;AAAA,QAClBJ,MAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,SAAS,aAAa;AAAA,QAC/B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,MAAM,UAAU;AAClB,eAAW,SAAS,KAAKK,eAAS,CAAC,CAAC,MAAM,GAAG,MAAM,IAAI,gBAAgB,CAAC;AAAA,EAC1E;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,UAAM,kBAAkBC,QAAAA;AAAAA,MACtB;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,MAAM,UAAU,CAAA;AAAA,MAChB,YAAY,gBAAgB;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,IAAA;AAIR,UAAMC,kBAAiB,gBAAgB;AAAA,MACrCX,MAAAA,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,YAAY,CAAC,MAAM;AAElC,cAAM,MAAO,IAAY;AACzB,cAAM,eAAe,YAAY,GAAG;AACpC,eAAO,CAAC,KAAK,CAAC,cAAc,YAAY,CAAC;AAAA,MAC3C,CAAC;AAAA,IAAA;AAGH,UAAMY,UAASD;AAEf,UAAME,qBAAoB;AAAA,MACxB,cAAc;AAAA,MACd,UAAUD;AAAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,UAAM,IAAI,UAAUC,kBAAiB;AAErC,WAAOA;AAAAA,EACT,WAAW,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAElE,UAAM,IAAIC,OAAAA,+BAAA;AAAA,EACZ;AAGA,QAAM,iBAA+B,SAAS;AAAA,IAC5Cd,MAAAA,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,MAAO,IAAY;AACzB,YAAM,eAAe,YAAY,GAAG;AACpC,aAAO,CAAC,KAAK,CAAC,cAAc,MAAS,CAAC;AAAA,IAIxC,CAAC;AAAA,EAAA;AAGH,QAAM,SAAS;AAEf,QAAM,oBAAoB;AAAA,IACxB,cAAc;AAAA,IACd,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEF,QAAM,IAAI,UAAU,iBAAiB;AAErC,SAAO;AACT;AAMA,SAAS,YACP,MACA,WACA,aACA,eACA,WACA,aACA,+BACA,aACA,OACA,cACA,qBACA,gBAC6D;AAC7D,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,KAAK;AAClC,UAAI,CAAC,OAAO;AACV,cAAM,IAAIe,OAAAA;AAAAA,UACR,KAAK;AAAA,UACL,KAAK,WAAW;AAAA,UAChB,OAAO,KAAK,SAAS;AAAA,QAAA;AAAA,MAEzB;AACA,0BAAoB,KAAK,KAAK,IAAI,KAAK,WAAW;AAClD,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,cAAc,KAAK,WAAW,GAAA;AAAA,IACnE;AAAA,IACA,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,IAAI,KAAK,KAAK,KAAK,KAAK;AAG3D,YAAM,iBAAiB;AAAA,QACrB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAMF,aAAO,OAAO,qBAAqB,eAAe,mBAAmB;AACrE,aAAO,OAAO,gBAAgB,eAAe,cAAc;AAc3D,YAAM,aAAa,OAAO,KAAK,eAAe,mBAAmB,EAAE;AAAA,QACjE,CAAC,UACC,eAAe,oBAAoB,KAAK,MACxC,eAAe;AAAA,MAAA;AAEnB,UAAI,cAAc,eAAe,KAAK,OAAO;AAC3C,uBAAe,KAAK,KAAK,IAAI;AAAA,MAC/B;AAGA,YAAM,gBAAgB,eAAe;AAIrC,YAAM,iBAAiB,cAAc;AAAA,QACnCf,MAAAA,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AAEtC,gBAAM,YAAY,YAAY,KAAK;AACnC,iBAAO,CAAC,KAAK,SAAS;AAAA,QACxB,CAAC;AAAA,MAAA;AAGH,aAAO;AAAA,QACL,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,cAAc,eAAe;AAAA,MAAA;AAAA,IAEjC;AAAA,IACA;AACE,YAAM,IAAIgB,OAAAA,yBAA0B,KAAa,IAAI;AAAA,EAAA;AAE3D;AAGA,SAAS,QAAQ,KAAmB;AAClC,SACE,eAAeC,GAAAA,SACd,OAAO,OAAO,QAAQ,YAAY,UAAU,OAAO,IAAI,SAAS;AAErE;AAGA,SAAS,YAAY,OAAiB;AACpC,SAAO,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC;AAOA,SAAS,iBACP,gBACA,eACA,cACM;AAEN,MACE,eAAe,KAAK,SAAS,cAC7B,cAAc,KAAK,SAAS,YAC5B;AACA,iBAAa,IAAI,eAAe,KAAK,OAAO,cAAc,KAAK,KAAK;AAEpE;AAAA,MACE,eAAe,KAAK;AAAA,MACpB,cAAc,KAAK;AAAA,MACnB;AAAA,IAAA;AAAA,EAEJ;AAGA,MAAI,eAAe,QAAQ,cAAc,MAAM;AAC7C,aACM,IAAI,GACR,IAAI,eAAe,KAAK,UAAU,IAAI,cAAc,KAAK,QACzD,KACA;AACA,YAAM,gBAAgB,eAAe,KAAK,CAAC;AAC3C,YAAM,eAAe,cAAc,KAAK,CAAC;AAEzC,UACE,cAAc,KAAK,SAAS,cAC5B,aAAa,KAAK,SAAS,YAC3B;AACA,qBAAa,IAAI,cAAc,KAAK,OAAO,aAAa,KAAK,KAAK;AAElE;AAAA,UACE,cAAc,KAAK;AAAA,UACnB,aAAa,KAAK;AAAA,UAClB;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAAA,EACF;AACF;;"}
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../../../../src/query/compiler/index.ts"],"sourcesContent":["import { distinct, filter, map } from \"@tanstack/db-ivm\"\nimport { optimizeQuery } from \"../optimizer.js\"\nimport {\n CollectionInputNotFoundError,\n DistinctRequiresSelectError,\n DuplicateAliasInSubqueryError,\n HavingRequiresGroupByError,\n LimitOffsetRequireOrderByError,\n UnsupportedFromTypeError,\n} from \"../../errors.js\"\nimport { PropRef, Value as ValClass, getWhereExpression } from \"../ir.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { processJoins } from \"./joins.js\"\nimport { processGroupBy } from \"./group-by.js\"\nimport { processOrderBy } from \"./order-by.js\"\nimport { processSelect } from \"./select.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\nimport type { OrderByOptimizationInfo } from \"./order-by.js\"\nimport type {\n BasicExpression,\n CollectionRef,\n QueryIR,\n QueryRef,\n} from \"../ir.js\"\nimport type { LazyCollectionCallbacks } from \"./joins.js\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n ResultStream,\n} from \"../../types.js\"\nimport type { QueryCache, QueryMapping, WindowOptions } from \"./types.js\"\n\nexport type { WindowOptions } from \"./types.js\"\n\n/**\n * Result of query compilation including both the pipeline and source-specific WHERE clauses\n */\nexport interface CompilationResult {\n /** The ID of the main collection */\n collectionId: string\n\n /** The compiled query pipeline (D2 stream) */\n pipeline: ResultStream\n\n /** Map of source aliases to their WHERE clauses for index optimization */\n sourceWhereClauses: Map<string, BasicExpression<boolean>>\n\n /**\n * Maps each source alias to its collection ID. Enables per-alias subscriptions for self-joins.\n * Example: `{ employee: 'employees-col-id', manager: 'employees-col-id' }`\n */\n aliasToCollectionId: Record<string, string>\n\n /**\n * Flattened mapping from outer alias to innermost alias for subqueries.\n * Always provides one-hop lookups, never recursive chains.\n *\n * Example: `{ activeUser: 'user' }` when `.from({ activeUser: subquery })`\n * where the subquery uses `.from({ user: collection })`.\n *\n * For deeply nested subqueries, the mapping goes directly to the innermost alias:\n * `{ author: 'user' }` (not `{ author: 'activeUser' }`), so `aliasRemapping[alias]`\n * always resolves in a single lookup.\n *\n * Used to resolve subscriptions during lazy loading when join aliases differ from\n * the inner aliases where collection subscriptions were created.\n */\n aliasRemapping: Record<string, string>\n}\n\n/**\n * Compiles a query IR into a D2 pipeline\n * @param rawQuery The query IR to compile\n * @param inputs Mapping of source aliases to input streams (e.g., `{ employee: input1, manager: input2 }`)\n * @param collections Mapping of collection IDs to Collection instances\n * @param subscriptions Mapping of source aliases to CollectionSubscription instances\n * @param callbacks Mapping of source aliases to lazy loading callbacks\n * @param lazySources Set of source aliases that should load data lazily\n * @param optimizableOrderByCollections Map of collection IDs to order-by optimization info\n * @param cache Optional cache for compiled subqueries (used internally for recursion)\n * @param queryMapping Optional mapping from optimized queries to original queries\n * @returns A CompilationResult with the pipeline, source WHERE clauses, and alias metadata\n */\nexport function compileQuery(\n rawQuery: QueryIR,\n inputs: Record<string, KeyedStream>,\n collections: Record<string, Collection<any, any, any, any, any>>,\n subscriptions: Record<string, CollectionSubscription>,\n callbacks: Record<string, LazyCollectionCallbacks>,\n lazySources: Set<string>,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n setWindowFn: (windowFn: (options: WindowOptions) => void) => void,\n cache: QueryCache = new WeakMap(),\n queryMapping: QueryMapping = new WeakMap()\n): CompilationResult {\n // Check if the original raw query has already been compiled\n const cachedResult = cache.get(rawQuery)\n if (cachedResult) {\n return cachedResult\n }\n\n // Validate the raw query BEFORE optimization to check user's original structure.\n // This must happen before optimization because the optimizer may create internal\n // subqueries (e.g., for predicate pushdown) that reuse aliases, which is fine.\n validateQueryStructure(rawQuery)\n\n // Optimize the query before compilation\n const { optimizedQuery: query, sourceWhereClauses } = optimizeQuery(rawQuery)\n\n // Create mapping from optimized query to original for caching\n queryMapping.set(query, rawQuery)\n mapNestedQueries(query, rawQuery, queryMapping)\n\n // Create a copy of the inputs map to avoid modifying the original\n const allInputs = { ...inputs }\n\n // Track alias to collection id relationships discovered during compilation.\n // This includes all user-declared aliases plus inner aliases from subqueries.\n const aliasToCollectionId: Record<string, string> = {}\n\n // Track alias remapping for subqueries (outer alias → inner alias)\n // e.g., when .join({ activeUser: subquery }) where subquery uses .from({ user: collection })\n // we store: aliasRemapping['activeUser'] = 'user'\n const aliasRemapping: Record<string, string> = {}\n\n // Create a map of source aliases to input streams.\n // Inputs MUST be keyed by alias (e.g., `{ employee: input1, manager: input2 }`),\n // not by collection ID. This enables per-alias subscriptions where different aliases\n // of the same collection (e.g., self-joins) maintain independent filtered streams.\n const sources: Record<string, KeyedStream> = {}\n\n // Process the FROM clause to get the main source\n const {\n alias: mainSource,\n input: mainInput,\n collectionId: mainCollectionId,\n } = processFrom(\n query.from,\n allInputs,\n collections,\n subscriptions,\n callbacks,\n lazySources,\n optimizableOrderByCollections,\n setWindowFn,\n cache,\n queryMapping,\n aliasToCollectionId,\n aliasRemapping\n )\n sources[mainSource] = mainInput\n\n // Prepare the initial pipeline with the main source wrapped in its alias\n let pipeline: NamespacedAndKeyedStream = mainInput.pipe(\n map(([key, row]) => {\n // Initialize the record with a nested structure\n const ret = [key, { [mainSource]: row }] as [\n string,\n Record<string, typeof row>,\n ]\n return ret\n })\n )\n\n // Process JOIN clauses if they exist\n if (query.join && query.join.length > 0) {\n pipeline = processJoins(\n pipeline,\n query.join,\n sources,\n mainCollectionId,\n mainSource,\n allInputs,\n cache,\n queryMapping,\n collections,\n subscriptions,\n callbacks,\n lazySources,\n optimizableOrderByCollections,\n setWindowFn,\n rawQuery,\n compileQuery,\n aliasToCollectionId,\n aliasRemapping\n )\n }\n\n // Process the WHERE clause if it exists\n if (query.where && query.where.length > 0) {\n // Apply each WHERE condition as a filter (they are ANDed together)\n for (const where of query.where) {\n const whereExpression = getWhereExpression(where)\n const compiledWhere = compileExpression(whereExpression)\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return compiledWhere(namespacedRow)\n })\n )\n }\n }\n\n // Process functional WHERE clauses if they exist\n if (query.fnWhere && query.fnWhere.length > 0) {\n for (const fnWhere of query.fnWhere) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnWhere(namespacedRow)\n })\n )\n }\n }\n\n if (query.distinct && !query.fnSelect && !query.select) {\n throw new DistinctRequiresSelectError()\n }\n\n // Process the SELECT clause early - always create __select_results\n // This eliminates duplication and allows for DISTINCT implementation\n if (query.fnSelect) {\n // Handle functional select - apply the function to transform the row\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults = query.fnSelect!(namespacedRow)\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n } else if (query.select) {\n pipeline = processSelect(pipeline, query.select, allInputs)\n } else {\n // If no SELECT clause, create __select_results with the main table data\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults =\n !query.join && !query.groupBy\n ? namespacedRow[mainSource]\n : namespacedRow\n\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n }\n\n // Process the GROUP BY clause if it exists\n if (query.groupBy && query.groupBy.length > 0) {\n pipeline = processGroupBy(\n pipeline,\n query.groupBy,\n query.having,\n query.select,\n query.fnHaving\n )\n } else if (query.select) {\n // Check if SELECT contains aggregates but no GROUP BY (implicit single-group aggregation)\n const hasAggregates = Object.values(query.select).some(\n (expr) => expr.type === `agg`\n )\n if (hasAggregates) {\n // Handle implicit single-group aggregation\n pipeline = processGroupBy(\n pipeline,\n [], // Empty group by means single group\n query.having,\n query.select,\n query.fnHaving\n )\n }\n }\n\n // Process the HAVING clause if it exists (only applies after GROUP BY)\n if (query.having && (!query.groupBy || query.groupBy.length === 0)) {\n // Check if we have aggregates in SELECT that would trigger implicit grouping\n const hasAggregates = query.select\n ? Object.values(query.select).some((expr) => expr.type === `agg`)\n : false\n\n if (!hasAggregates) {\n throw new HavingRequiresGroupByError()\n }\n }\n\n // Process functional HAVING clauses outside of GROUP BY (treat as additional WHERE filters)\n if (\n query.fnHaving &&\n query.fnHaving.length > 0 &&\n (!query.groupBy || query.groupBy.length === 0)\n ) {\n // If there's no GROUP BY but there are fnHaving clauses, apply them as filters\n for (const fnHaving of query.fnHaving) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n // Process the DISTINCT clause if it exists\n if (query.distinct) {\n pipeline = pipeline.pipe(distinct(([_key, row]) => row.__select_results))\n }\n\n // Process orderBy parameter if it exists\n if (query.orderBy && query.orderBy.length > 0) {\n const orderedPipeline = processOrderBy(\n rawQuery,\n pipeline,\n query.orderBy,\n query.select || {},\n collections[mainCollectionId]!,\n optimizableOrderByCollections,\n setWindowFn,\n query.limit,\n query.offset\n )\n\n // Final step: extract the __select_results and include orderBy index\n const resultPipeline = orderedPipeline.pipe(\n map(([key, [row, orderByIndex]]) => {\n // Extract the final results from __select_results and include orderBy index\n const raw = (row as any).__select_results\n const finalResults = unwrapValue(raw)\n return [key, [finalResults, orderByIndex]] as [unknown, [any, string]]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning (use original query as key)\n const compilationResult = {\n collectionId: mainCollectionId,\n pipeline: result,\n sourceWhereClauses,\n aliasToCollectionId,\n aliasRemapping,\n }\n cache.set(rawQuery, compilationResult)\n\n return compilationResult\n } else if (query.limit !== undefined || query.offset !== undefined) {\n // If there's a limit or offset without orderBy, throw an error\n throw new LimitOffsetRequireOrderByError()\n }\n\n // Final step: extract the __select_results and return tuple format (no orderBy)\n const resultPipeline: ResultStream = pipeline.pipe(\n map(([key, row]) => {\n // Extract the final results from __select_results and return [key, [results, undefined]]\n const raw = (row as any).__select_results\n const finalResults = unwrapValue(raw)\n return [key, [finalResults, undefined]] as [\n unknown,\n [any, string | undefined],\n ]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning (use original query as key)\n const compilationResult = {\n collectionId: mainCollectionId,\n pipeline: result,\n sourceWhereClauses,\n aliasToCollectionId,\n aliasRemapping,\n }\n cache.set(rawQuery, compilationResult)\n\n return compilationResult\n}\n\n/**\n * Collects aliases used for DIRECT collection references (not subqueries).\n * Used to validate that subqueries don't reuse parent query collection aliases.\n * Only direct CollectionRef aliases matter - QueryRef aliases don't cause conflicts.\n */\nfunction collectDirectCollectionAliases(query: QueryIR): Set<string> {\n const aliases = new Set<string>()\n\n // Collect FROM alias only if it's a direct collection reference\n if (query.from.type === `collectionRef`) {\n aliases.add(query.from.alias)\n }\n\n // Collect JOIN aliases only for direct collection references\n if (query.join) {\n for (const joinClause of query.join) {\n if (joinClause.from.type === `collectionRef`) {\n aliases.add(joinClause.from.alias)\n }\n }\n }\n\n return aliases\n}\n\n/**\n * Validates the structure of a query and its subqueries.\n * Checks that subqueries don't reuse collection aliases from parent queries.\n * This must be called on the RAW query before optimization.\n */\nfunction validateQueryStructure(\n query: QueryIR,\n parentCollectionAliases: Set<string> = new Set()\n): void {\n // Collect direct collection aliases from this query level\n const currentLevelAliases = collectDirectCollectionAliases(query)\n\n // Check if any current alias conflicts with parent aliases\n for (const alias of currentLevelAliases) {\n if (parentCollectionAliases.has(alias)) {\n throw new DuplicateAliasInSubqueryError(\n alias,\n Array.from(parentCollectionAliases)\n )\n }\n }\n\n // Combine parent and current aliases for checking nested subqueries\n const combinedAliases = new Set([\n ...parentCollectionAliases,\n ...currentLevelAliases,\n ])\n\n // Recursively validate FROM subquery\n if (query.from.type === `queryRef`) {\n validateQueryStructure(query.from.query, combinedAliases)\n }\n\n // Recursively validate JOIN subqueries\n if (query.join) {\n for (const joinClause of query.join) {\n if (joinClause.from.type === `queryRef`) {\n validateQueryStructure(joinClause.from.query, combinedAliases)\n }\n }\n }\n}\n\n/**\n * Processes the FROM clause, handling direct collection references and subqueries.\n * Populates `aliasToCollectionId` and `aliasRemapping` for per-alias subscription tracking.\n */\nfunction processFrom(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n collections: Record<string, Collection>,\n subscriptions: Record<string, CollectionSubscription>,\n callbacks: Record<string, LazyCollectionCallbacks>,\n lazySources: Set<string>,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n setWindowFn: (windowFn: (options: WindowOptions) => void) => void,\n cache: QueryCache,\n queryMapping: QueryMapping,\n aliasToCollectionId: Record<string, string>,\n aliasRemapping: Record<string, string>\n): { alias: string; input: KeyedStream; collectionId: string } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.alias]\n if (!input) {\n throw new CollectionInputNotFoundError(\n from.alias,\n from.collection.id,\n Object.keys(allInputs)\n )\n }\n aliasToCollectionId[from.alias] = from.collection.id\n return { alias: from.alias, input, collectionId: from.collection.id }\n }\n case `queryRef`: {\n // Find the original query for caching purposes\n const originalQuery = queryMapping.get(from.query) || from.query\n\n // Recursively compile the sub-query with cache\n const subQueryResult = compileQuery(\n originalQuery,\n allInputs,\n collections,\n subscriptions,\n callbacks,\n lazySources,\n optimizableOrderByCollections,\n setWindowFn,\n cache,\n queryMapping\n )\n\n // Pull up alias mappings from subquery to parent scope.\n // This includes both the innermost alias-to-collection mappings AND\n // any existing remappings from nested subquery levels.\n Object.assign(aliasToCollectionId, subQueryResult.aliasToCollectionId)\n Object.assign(aliasRemapping, subQueryResult.aliasRemapping)\n\n // Create a FLATTENED remapping from outer alias to innermost alias.\n // For nested subqueries, this ensures one-hop lookups (not recursive chains).\n //\n // Example with 3-level nesting:\n // Inner: .from({ user: usersCollection })\n // Middle: .from({ activeUser: innerSubquery }) → creates: activeUser → user\n // Outer: .from({ author: middleSubquery }) → creates: author → user (not author → activeUser)\n //\n // The key insight: We search through the PULLED-UP aliasToCollectionId (which contains\n // the innermost 'user' alias), so we always map directly to the deepest level.\n // This means aliasRemapping[alias] is always a single lookup, never recursive.\n // Needed for subscription resolution during lazy loading.\n const innerAlias = Object.keys(subQueryResult.aliasToCollectionId).find(\n (alias) =>\n subQueryResult.aliasToCollectionId[alias] ===\n subQueryResult.collectionId\n )\n if (innerAlias && innerAlias !== from.alias) {\n aliasRemapping[from.alias] = innerAlias\n }\n\n // Extract the pipeline from the compilation result\n const subQueryInput = subQueryResult.pipeline\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n // Unwrap Value expressions that might have leaked through as the entire row\n const unwrapped = unwrapValue(value)\n return [key, unwrapped] as [unknown, any]\n })\n )\n\n return {\n alias: from.alias,\n input: extractedInput,\n collectionId: subQueryResult.collectionId,\n }\n }\n default:\n throw new UnsupportedFromTypeError((from as any).type)\n }\n}\n\n// Helper to check if a value is a Value expression\nfunction isValue(raw: any): boolean {\n return (\n raw instanceof ValClass ||\n (raw && typeof raw === `object` && `type` in raw && raw.type === `val`)\n )\n}\n\n// Helper to unwrap a Value expression or return the value itself\nfunction unwrapValue(value: any): any {\n return isValue(value) ? value.value : value\n}\n\n/**\n * Recursively maps optimized subqueries to their original queries for proper caching.\n * This ensures that when we encounter the same QueryRef object in different contexts,\n * we can find the original query to check the cache.\n */\nfunction mapNestedQueries(\n optimizedQuery: QueryIR,\n originalQuery: QueryIR,\n queryMapping: QueryMapping\n): void {\n // Map the FROM clause if it's a QueryRef\n if (\n optimizedQuery.from.type === `queryRef` &&\n originalQuery.from.type === `queryRef`\n ) {\n queryMapping.set(optimizedQuery.from.query, originalQuery.from.query)\n // Recursively map nested queries\n mapNestedQueries(\n optimizedQuery.from.query,\n originalQuery.from.query,\n queryMapping\n )\n }\n\n // Map JOIN clauses if they exist\n if (optimizedQuery.join && originalQuery.join) {\n for (\n let i = 0;\n i < optimizedQuery.join.length && i < originalQuery.join.length;\n i++\n ) {\n const optimizedJoin = optimizedQuery.join[i]!\n const originalJoin = originalQuery.join[i]!\n\n if (\n optimizedJoin.from.type === `queryRef` &&\n originalJoin.from.type === `queryRef`\n ) {\n queryMapping.set(optimizedJoin.from.query, originalJoin.from.query)\n // Recursively map nested queries in joins\n mapNestedQueries(\n optimizedJoin.from.query,\n originalJoin.from.query,\n queryMapping\n )\n }\n }\n }\n}\n\nfunction getRefFromAlias(\n query: QueryIR,\n alias: string\n): CollectionRef | QueryRef | void {\n if (query.from.alias === alias) {\n return query.from\n }\n\n for (const join of query.join || []) {\n if (join.from.alias === alias) {\n return join.from\n }\n }\n}\n\n/**\n * Follows the given reference in a query\n * until its finds the root field the reference points to.\n * @returns The collection, its alias, and the path to the root field in this collection\n */\nexport function followRef(\n query: QueryIR,\n ref: PropRef<any>,\n collection: Collection\n): { collection: Collection; path: Array<string> } | void {\n if (ref.path.length === 0) {\n return\n }\n\n if (ref.path.length === 1) {\n // This field should be part of this collection\n const field = ref.path[0]!\n // is it part of the select clause?\n if (query.select) {\n const selectedField = query.select[field]\n if (selectedField && selectedField.type === `ref`) {\n return followRef(query, selectedField, collection)\n }\n }\n\n // Either this field is not part of the select clause\n // and thus it must be part of the collection itself\n // or it is part of the select but is not a reference\n // so we can stop here and don't have to follow it\n return { collection, path: [field] }\n }\n\n if (ref.path.length > 1) {\n // This is a nested field\n const [alias, ...rest] = ref.path\n const aliasRef = getRefFromAlias(query, alias!)\n if (!aliasRef) {\n return\n }\n\n if (aliasRef.type === `queryRef`) {\n return followRef(aliasRef.query, new PropRef(rest), collection)\n } else {\n // This is a reference to a collection\n // we can't follow it further\n // so the field must be on the collection itself\n return { collection: aliasRef.collection, path: rest }\n }\n }\n}\n\nexport type CompileQueryFn = typeof compileQuery\n"],"names":["optimizeQuery","map","processJoins","getWhereExpression","compileExpression","filter","DistinctRequiresSelectError","processSelect","processGroupBy","HavingRequiresGroupByError","distinct","processOrderBy","resultPipeline","result","compilationResult","LimitOffsetRequireOrderByError","DuplicateAliasInSubqueryError","CollectionInputNotFoundError","UnsupportedFromTypeError","ValClass"],"mappings":";;;;;;;;;;;AAoFO,SAAS,aACd,UACA,QACA,aACA,eACA,WACA,aACA,+BACA,aACA,4BAAwB,QAAA,GACxB,eAA6B,oBAAI,WACd;AAEnB,QAAM,eAAe,MAAM,IAAI,QAAQ;AACvC,MAAI,cAAc;AAChB,WAAO;AAAA,EACT;AAKA,yBAAuB,QAAQ;AAG/B,QAAM,EAAE,gBAAgB,OAAO,mBAAA,IAAuBA,UAAAA,cAAc,QAAQ;AAG5E,eAAa,IAAI,OAAO,QAAQ;AAChC,mBAAiB,OAAO,UAAU,YAAY;AAG9C,QAAM,YAAY,EAAE,GAAG,OAAA;AAIvB,QAAM,sBAA8C,CAAA;AAKpD,QAAM,iBAAyC,CAAA;AAM/C,QAAM,UAAuC,CAAA;AAG7C,QAAM;AAAA,IACJ,OAAO;AAAA,IACP,OAAO;AAAA,IACP,cAAc;AAAA,EAAA,IACZ;AAAA,IACF,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEF,UAAQ,UAAU,IAAI;AAGtB,MAAI,WAAqC,UAAU;AAAA,IACjDC,MAAAA,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,MAAM,CAAC,KAAK,EAAE,CAAC,UAAU,GAAG,KAAK;AAIvC,aAAO;AAAA,IACT,CAAC;AAAA,EAAA;AAIH,MAAI,MAAM,QAAQ,MAAM,KAAK,SAAS,GAAG;AACvC,eAAWC,MAAAA;AAAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAGA,MAAI,MAAM,SAAS,MAAM,MAAM,SAAS,GAAG;AAEzC,eAAW,SAAS,MAAM,OAAO;AAC/B,YAAM,kBAAkBC,GAAAA,mBAAmB,KAAK;AAChD,YAAM,gBAAgBC,WAAAA,kBAAkB,eAAe;AACvD,iBAAW,SAAS;AAAA,QAClBC,MAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,cAAc,aAAa;AAAA,QACpC,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,eAAW,WAAW,MAAM,SAAS;AACnC,iBAAW,SAAS;AAAA,QAClBA,MAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,QAAQ,aAAa;AAAA,QAC9B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAEA,MAAI,MAAM,YAAY,CAAC,MAAM,YAAY,CAAC,MAAM,QAAQ;AACtD,UAAM,IAAIC,OAAAA,4BAAA;AAAA,EACZ;AAIA,MAAI,MAAM,UAAU;AAElB,eAAW,SAAS;AAAA,MAClBL,MAAAA,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,cAAM,gBAAgB,MAAM,SAAU,aAAa;AACnD,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAAA,EAEL,WAAW,MAAM,QAAQ;AACvB,eAAWM,OAAAA,cAAc,UAAU,MAAM,MAAiB;AAAA,EAC5D,OAAO;AAEL,eAAW,SAAS;AAAA,MAClBN,MAAAA,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,cAAM,gBACJ,CAAC,MAAM,QAAQ,CAAC,MAAM,UAClB,cAAc,UAAU,IACxB;AAEN,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAAA,EAEL;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,eAAWO,QAAAA;AAAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,IAAA;AAAA,EAEV,WAAW,MAAM,QAAQ;AAEvB,UAAM,gBAAgB,OAAO,OAAO,MAAM,MAAM,EAAE;AAAA,MAChD,CAAC,SAAS,KAAK,SAAS;AAAA,IAAA;AAE1B,QAAI,eAAe;AAEjB,iBAAWA,QAAAA;AAAAA,QACT;AAAA,QACA,CAAA;AAAA;AAAA,QACA,MAAM;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,MAAA;AAAA,IAEV;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAAI;AAElE,UAAM,gBAAgB,MAAM,SACxB,OAAO,OAAO,MAAM,MAAM,EAAE,KAAK,CAAC,SAAS,KAAK,SAAS,KAAK,IAC9D;AAEJ,QAAI,CAAC,eAAe;AAClB,YAAM,IAAIC,OAAAA,2BAAA;AAAA,IACZ;AAAA,EACF;AAGA,MACE,MAAM,YACN,MAAM,SAAS,SAAS,MACvB,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAC5C;AAEA,eAAW,YAAY,MAAM,UAAU;AACrC,iBAAW,SAAS;AAAA,QAClBJ,MAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,SAAS,aAAa;AAAA,QAC/B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,MAAM,UAAU;AAClB,eAAW,SAAS,KAAKK,eAAS,CAAC,CAAC,MAAM,GAAG,MAAM,IAAI,gBAAgB,CAAC;AAAA,EAC1E;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,UAAM,kBAAkBC,QAAAA;AAAAA,MACtB;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,MAAM,UAAU,CAAA;AAAA,MAChB,YAAY,gBAAgB;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,IAAA;AAIR,UAAMC,kBAAiB,gBAAgB;AAAA,MACrCX,MAAAA,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,YAAY,CAAC,MAAM;AAElC,cAAM,MAAO,IAAY;AACzB,cAAM,eAAe,YAAY,GAAG;AACpC,eAAO,CAAC,KAAK,CAAC,cAAc,YAAY,CAAC;AAAA,MAC3C,CAAC;AAAA,IAAA;AAGH,UAAMY,UAASD;AAEf,UAAME,qBAAoB;AAAA,MACxB,cAAc;AAAA,MACd,UAAUD;AAAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,UAAM,IAAI,UAAUC,kBAAiB;AAErC,WAAOA;AAAAA,EACT,WAAW,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAElE,UAAM,IAAIC,OAAAA,+BAAA;AAAA,EACZ;AAGA,QAAM,iBAA+B,SAAS;AAAA,IAC5Cd,MAAAA,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,MAAO,IAAY;AACzB,YAAM,eAAe,YAAY,GAAG;AACpC,aAAO,CAAC,KAAK,CAAC,cAAc,MAAS,CAAC;AAAA,IAIxC,CAAC;AAAA,EAAA;AAGH,QAAM,SAAS;AAEf,QAAM,oBAAoB;AAAA,IACxB,cAAc;AAAA,IACd,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEF,QAAM,IAAI,UAAU,iBAAiB;AAErC,SAAO;AACT;AAOA,SAAS,+BAA+B,OAA6B;AACnE,QAAM,8BAAc,IAAA;AAGpB,MAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,YAAQ,IAAI,MAAM,KAAK,KAAK;AAAA,EAC9B;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,UAAI,WAAW,KAAK,SAAS,iBAAiB;AAC5C,gBAAQ,IAAI,WAAW,KAAK,KAAK;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAOA,SAAS,uBACP,OACA,0BAAuC,oBAAI,OACrC;AAEN,QAAM,sBAAsB,+BAA+B,KAAK;AAGhE,aAAW,SAAS,qBAAqB;AACvC,QAAI,wBAAwB,IAAI,KAAK,GAAG;AACtC,YAAM,IAAIe,OAAAA;AAAAA,QACR;AAAA,QACA,MAAM,KAAK,uBAAuB;AAAA,MAAA;AAAA,IAEtC;AAAA,EACF;AAGA,QAAM,sCAAsB,IAAI;AAAA,IAC9B,GAAG;AAAA,IACH,GAAG;AAAA,EAAA,CACJ;AAGD,MAAI,MAAM,KAAK,SAAS,YAAY;AAClC,2BAAuB,MAAM,KAAK,OAAO,eAAe;AAAA,EAC1D;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,UAAI,WAAW,KAAK,SAAS,YAAY;AACvC,+BAAuB,WAAW,KAAK,OAAO,eAAe;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AACF;AAMA,SAAS,YACP,MACA,WACA,aACA,eACA,WACA,aACA,+BACA,aACA,OACA,cACA,qBACA,gBAC6D;AAC7D,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,KAAK;AAClC,UAAI,CAAC,OAAO;AACV,cAAM,IAAIC,OAAAA;AAAAA,UACR,KAAK;AAAA,UACL,KAAK,WAAW;AAAA,UAChB,OAAO,KAAK,SAAS;AAAA,QAAA;AAAA,MAEzB;AACA,0BAAoB,KAAK,KAAK,IAAI,KAAK,WAAW;AAClD,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,cAAc,KAAK,WAAW,GAAA;AAAA,IACnE;AAAA,IACA,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,IAAI,KAAK,KAAK,KAAK,KAAK;AAG3D,YAAM,iBAAiB;AAAA,QACrB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAMF,aAAO,OAAO,qBAAqB,eAAe,mBAAmB;AACrE,aAAO,OAAO,gBAAgB,eAAe,cAAc;AAc3D,YAAM,aAAa,OAAO,KAAK,eAAe,mBAAmB,EAAE;AAAA,QACjE,CAAC,UACC,eAAe,oBAAoB,KAAK,MACxC,eAAe;AAAA,MAAA;AAEnB,UAAI,cAAc,eAAe,KAAK,OAAO;AAC3C,uBAAe,KAAK,KAAK,IAAI;AAAA,MAC/B;AAGA,YAAM,gBAAgB,eAAe;AAIrC,YAAM,iBAAiB,cAAc;AAAA,QACnChB,MAAAA,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AAEtC,gBAAM,YAAY,YAAY,KAAK;AACnC,iBAAO,CAAC,KAAK,SAAS;AAAA,QACxB,CAAC;AAAA,MAAA;AAGH,aAAO;AAAA,QACL,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,cAAc,eAAe;AAAA,MAAA;AAAA,IAEjC;AAAA,IACA;AACE,YAAM,IAAIiB,OAAAA,yBAA0B,KAAa,IAAI;AAAA,EAAA;AAE3D;AAGA,SAAS,QAAQ,KAAmB;AAClC,SACE,eAAeC,GAAAA,SACd,OAAO,OAAO,QAAQ,YAAY,UAAU,OAAO,IAAI,SAAS;AAErE;AAGA,SAAS,YAAY,OAAiB;AACpC,SAAO,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC;AAOA,SAAS,iBACP,gBACA,eACA,cACM;AAEN,MACE,eAAe,KAAK,SAAS,cAC7B,cAAc,KAAK,SAAS,YAC5B;AACA,iBAAa,IAAI,eAAe,KAAK,OAAO,cAAc,KAAK,KAAK;AAEpE;AAAA,MACE,eAAe,KAAK;AAAA,MACpB,cAAc,KAAK;AAAA,MACnB;AAAA,IAAA;AAAA,EAEJ;AAGA,MAAI,eAAe,QAAQ,cAAc,MAAM;AAC7C,aACM,IAAI,GACR,IAAI,eAAe,KAAK,UAAU,IAAI,cAAc,KAAK,QACzD,KACA;AACA,YAAM,gBAAgB,eAAe,KAAK,CAAC;AAC3C,YAAM,eAAe,cAAc,KAAK,CAAC;AAEzC,UACE,cAAc,KAAK,SAAS,cAC5B,aAAa,KAAK,SAAS,YAC3B;AACA,qBAAa,IAAI,cAAc,KAAK,OAAO,aAAa,KAAK,KAAK;AAElE;AAAA,UACE,cAAc,KAAK;AAAA,UACnB,aAAa,KAAK;AAAA,UAClB;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAAA,EACF;AACF;;"}
|
|
@@ -101,12 +101,9 @@ function processOrderBy(rawQuery, pipeline, orderByClause, selectClause, collect
|
|
|
101
101
|
};
|
|
102
102
|
optimizableOrderByCollections[followRefCollection.id] = orderByOptimizationInfo;
|
|
103
103
|
setSizeCallback = (getSize) => {
|
|
104
|
-
optimizableOrderByCollections[followRefCollection.id] = {
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
const size = getSize();
|
|
108
|
-
return Math.max(0, orderByOptimizationInfo.limit - size);
|
|
109
|
-
}
|
|
104
|
+
optimizableOrderByCollections[followRefCollection.id][`dataNeeded`] = () => {
|
|
105
|
+
const size = getSize();
|
|
106
|
+
return Math.max(0, orderByOptimizationInfo.limit - size);
|
|
110
107
|
};
|
|
111
108
|
};
|
|
112
109
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"order-by.cjs","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@tanstack/db-ivm\"\nimport { defaultComparator, makeComparator } from \"../../utils/comparison.js\"\nimport { PropRef, followRef } from \"../ir.js\"\nimport { ensureIndexForField } from \"../../indexes/auto-index.js\"\nimport { findIndexForField } from \"../../utils/index-optimization.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { replaceAggregatesByRefs } from \"./group-by.js\"\nimport type { WindowOptions } from \"./types.js\"\nimport type { CompiledSingleRowExpression } from \"./evaluators.js\"\nimport type { OrderBy, OrderByClause, QueryIR, Select } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@tanstack/db-ivm\"\nimport type { IndexInterface } from \"../../indexes/base-index.js\"\nimport type { Collection } from \"../../collection/index.js\"\n\nexport type OrderByOptimizationInfo = {\n alias: string\n orderBy: OrderBy\n offset: number\n limit: number\n comparator: (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => number\n valueExtractorForRawRow: (row: Record<string, unknown>) => any\n index: IndexInterface<string | number>\n dataNeeded?: () => number\n}\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n rawQuery: QueryIR,\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n selectClause: Select,\n collection: Collection,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n setWindowFn: (windowFn: (options: WindowOptions) => void) => void,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => {\n const clauseWithoutAggregates = replaceAggregatesByRefs(\n clause.expression,\n selectClause,\n `__select_results`\n )\n return {\n compiledExpression: compileExpression(clauseWithoutAggregates),\n compareOptions: clause.compareOptions,\n }\n })\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // The namespaced row contains:\n // 1. Table aliases as top-level properties (e.g., row[\"tableName\"])\n // 2. SELECT results in __select_results (e.g., row.__select_results[\"aggregateAlias\"])\n // The replaceAggregatesByRefs function has already transformed any aggregate expressions\n // that match SELECT aggregates to use the __select_results namespace.\n const orderByContext = row\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const compare = (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const clause = orderByClause[i]!\n const compareFn = makeComparator(clause.compareOptions)\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const compareFn = makeComparator(clause.compareOptions)\n return compareFn(a, b)\n }\n\n return defaultComparator(a, b)\n }\n\n let setSizeCallback: ((getSize: () => number) => void) | undefined\n\n let orderByOptimizationInfo: OrderByOptimizationInfo | undefined\n\n // Optimize the orderBy operator to lazily load elements\n // by using the range index of the collection.\n // Only for orderBy clause on a single column for now (no composite ordering)\n if (limit && orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const orderByExpression = clause.expression\n\n if (orderByExpression.type === `ref`) {\n const followRefResult = followRef(\n rawQuery,\n orderByExpression,\n collection\n )!\n\n const followRefCollection = followRefResult.collection\n const fieldName = followRefResult.path[0]\n if (fieldName) {\n ensureIndexForField(\n fieldName,\n followRefResult.path,\n followRefCollection,\n clause.compareOptions,\n compare\n )\n }\n\n const valueExtractorForRawRow = compileExpression(\n new PropRef(followRefResult.path),\n true\n ) as CompiledSingleRowExpression\n\n const comparator = (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => {\n const extractedA = a ? valueExtractorForRawRow(a) : a\n const extractedB = b ? valueExtractorForRawRow(b) : b\n return compare(extractedA, extractedB)\n }\n\n const index: IndexInterface<string | number> | undefined =\n findIndexForField(\n followRefCollection.indexes,\n followRefResult.path,\n clause.compareOptions\n )\n\n if (index && index.supports(`gt`)) {\n // We found an index that we can use to lazily load ordered data\n const orderByAlias =\n orderByExpression.path.length > 1\n ? String(orderByExpression.path[0])\n : rawQuery.from.alias\n\n orderByOptimizationInfo = {\n alias: orderByAlias,\n offset: offset ?? 0,\n limit,\n comparator,\n valueExtractorForRawRow,\n index,\n orderBy: orderByClause,\n }\n\n optimizableOrderByCollections[followRefCollection.id] =\n orderByOptimizationInfo\n\n setSizeCallback = (getSize: () => number) => {\n optimizableOrderByCollections[followRefCollection.id] = {\n ...optimizableOrderByCollections[followRefCollection.id]!,\n dataNeeded: () => {\n const size = getSize()\n return Math.max(0, orderByOptimizationInfo!.limit - size)\n },\n }\n }\n }\n }\n }\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator: compare,\n setSizeCallback,\n setWindowFn: (\n windowFn: (options: { offset?: number; limit?: number }) => void\n ) => {\n setWindowFn(\n // We wrap the move function such that we update the orderByOptimizationInfo\n // because that is used by the `dataNeeded` callback to determine if we need to load more data\n (options) => {\n windowFn(options)\n if (orderByOptimizationInfo) {\n orderByOptimizationInfo.offset =\n options.offset ?? orderByOptimizationInfo.offset\n orderByOptimizationInfo.limit =\n options.limit ?? orderByOptimizationInfo.limit\n }\n }\n )\n },\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":["replaceAggregatesByRefs","compileExpression","makeComparator","defaultComparator","followRef","ensureIndexForField","PropRef","findIndexForField","orderByWithFractionalIndex"],"mappings":";;;;;;;;;AAkCO,SAAS,eACd,UACA,UACA,eACA,cACA,YACA,+BACA,aACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,WAAW;AACpD,UAAM,0BAA0BA,QAAAA;AAAAA,MAC9B,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAEF,WAAO;AAAA,MACL,oBAAoBC,WAAAA,kBAAkB,uBAAuB;AAAA,MAC7D,gBAAgB,OAAO;AAAA,IAAA;AAAA,EAE3B,CAAC;AAGD,QAAM,iBAAiB,CAAC,QAAoD;AAM1E,UAAM,iBAAiB;AAEvB,QAAI,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAAA;AAAA,IAE9C,WAAW,cAAc,WAAW,GAAG;AAErC,YAAM,WAAW,gBAAgB,CAAC;AAClC,aAAO,SAAS,mBAAmB,cAAc;AAAA,IACnD;AAGA,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,CAAC,GAAY,MAAe;AAE1C,QAAI,cAAc,SAAS,GAAG;AAC5B,YAAM,SAAS;AACf,YAAM,SAAS;AACf,eAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,cAAM,SAAS,cAAc,CAAC;AAC9B,cAAM,YAAYC,WAAAA,eAAe,OAAO,cAAc;AACtD,cAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,YAAI,WAAW,GAAG;AAChB,iBAAO;AAAA,QACT;AAAA,MACF;AACA,aAAO,OAAO,SAAS,OAAO;AAAA,IAChC;AAGA,QAAI,cAAc,WAAW,GAAG;AAC9B,YAAM,SAAS,cAAc,CAAC;AAC9B,YAAM,YAAYA,WAAAA,eAAe,OAAO,cAAc;AACtD,aAAO,UAAU,GAAG,CAAC;AAAA,IACvB;AAEA,WAAOC,WAAAA,kBAAkB,GAAG,CAAC;AAAA,EAC/B;AAEA,MAAI;AAEJ,MAAI;AAKJ,MAAI,SAAS,cAAc,WAAW,GAAG;AACvC,UAAM,SAAS,cAAc,CAAC;AAC9B,UAAM,oBAAoB,OAAO;AAEjC,QAAI,kBAAkB,SAAS,OAAO;AACpC,YAAM,kBAAkBC,GAAAA;AAAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAGF,YAAM,sBAAsB,gBAAgB;AAC5C,YAAM,YAAY,gBAAgB,KAAK,CAAC;AACxC,UAAI,WAAW;AACbC,kBAAAA;AAAAA,UACE;AAAA,UACA,gBAAgB;AAAA,UAChB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QAAA;AAAA,MAEJ;AAEA,YAAM,0BAA0BJ,WAAAA;AAAAA,QAC9B,IAAIK,GAAAA,QAAQ,gBAAgB,IAAI;AAAA,QAChC;AAAA,MAAA;AAGF,YAAM,aAAa,CACjB,GACA,MACG;AACH,cAAM,aAAa,IAAI,wBAAwB,CAAC,IAAI;AACpD,cAAM,aAAa,IAAI,wBAAwB,CAAC,IAAI;AACpD,eAAO,QAAQ,YAAY,UAAU;AAAA,MACvC;AAEA,YAAM,QACJC,kBAAAA;AAAAA,QACE,oBAAoB;AAAA,QACpB,gBAAgB;AAAA,QAChB,OAAO;AAAA,MAAA;AAGX,UAAI,SAAS,MAAM,SAAS,IAAI,GAAG;AAEjC,cAAM,eACJ,kBAAkB,KAAK,SAAS,IAC5B,OAAO,kBAAkB,KAAK,CAAC,CAAC,IAChC,SAAS,KAAK;AAEpB,kCAA0B;AAAA,UACxB,OAAO;AAAA,UACP,QAAQ,UAAU;AAAA,UAClB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QAAA;AAGX,sCAA8B,oBAAoB,EAAE,IAClD;AAEF,0BAAkB,CAAC,YAA0B;AAC3C,wCAA8B,oBAAoB,EAAE,IAAI;AAAA,YACtD,GAAG,8BAA8B,oBAAoB,EAAE;AAAA,YACvD,YAAY,MAAM;AAChB,oBAAM,OAAO,QAAA;AACb,qBAAO,KAAK,IAAI,GAAG,wBAAyB,QAAQ,IAAI;AAAA,YAC1D;AAAA,UAAA;AAAA,QAEJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,SAAS;AAAA,IACdC,MAAAA,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA,aAAa,CACX,aACG;AACH;AAAA;AAAA;AAAA,UAGE,CAAC,YAAY;AACX,qBAAS,OAAO;AAChB,gBAAI,yBAAyB;AAC3B,sCAAwB,SACtB,QAAQ,UAAU,wBAAwB;AAC5C,sCAAwB,QACtB,QAAQ,SAAS,wBAAwB;AAAA,YAC7C;AAAA,UACF;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA,CACD;AAAA;AAAA,EAAA;AAGL;;"}
|
|
1
|
+
{"version":3,"file":"order-by.cjs","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@tanstack/db-ivm\"\nimport { defaultComparator, makeComparator } from \"../../utils/comparison.js\"\nimport { PropRef, followRef } from \"../ir.js\"\nimport { ensureIndexForField } from \"../../indexes/auto-index.js\"\nimport { findIndexForField } from \"../../utils/index-optimization.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { replaceAggregatesByRefs } from \"./group-by.js\"\nimport type { WindowOptions } from \"./types.js\"\nimport type { CompiledSingleRowExpression } from \"./evaluators.js\"\nimport type { OrderBy, OrderByClause, QueryIR, Select } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@tanstack/db-ivm\"\nimport type { IndexInterface } from \"../../indexes/base-index.js\"\nimport type { Collection } from \"../../collection/index.js\"\n\nexport type OrderByOptimizationInfo = {\n alias: string\n orderBy: OrderBy\n offset: number\n limit: number\n comparator: (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => number\n valueExtractorForRawRow: (row: Record<string, unknown>) => any\n index: IndexInterface<string | number>\n dataNeeded?: () => number\n}\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n rawQuery: QueryIR,\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n selectClause: Select,\n collection: Collection,\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,\n setWindowFn: (windowFn: (options: WindowOptions) => void) => void,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => {\n const clauseWithoutAggregates = replaceAggregatesByRefs(\n clause.expression,\n selectClause,\n `__select_results`\n )\n return {\n compiledExpression: compileExpression(clauseWithoutAggregates),\n compareOptions: clause.compareOptions,\n }\n })\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // The namespaced row contains:\n // 1. Table aliases as top-level properties (e.g., row[\"tableName\"])\n // 2. SELECT results in __select_results (e.g., row.__select_results[\"aggregateAlias\"])\n // The replaceAggregatesByRefs function has already transformed any aggregate expressions\n // that match SELECT aggregates to use the __select_results namespace.\n const orderByContext = row\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const compare = (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const clause = orderByClause[i]!\n const compareFn = makeComparator(clause.compareOptions)\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const compareFn = makeComparator(clause.compareOptions)\n return compareFn(a, b)\n }\n\n return defaultComparator(a, b)\n }\n\n let setSizeCallback: ((getSize: () => number) => void) | undefined\n\n let orderByOptimizationInfo: OrderByOptimizationInfo | undefined\n\n // Optimize the orderBy operator to lazily load elements\n // by using the range index of the collection.\n // Only for orderBy clause on a single column for now (no composite ordering)\n if (limit && orderByClause.length === 1) {\n const clause = orderByClause[0]!\n const orderByExpression = clause.expression\n\n if (orderByExpression.type === `ref`) {\n const followRefResult = followRef(\n rawQuery,\n orderByExpression,\n collection\n )!\n\n const followRefCollection = followRefResult.collection\n const fieldName = followRefResult.path[0]\n if (fieldName) {\n ensureIndexForField(\n fieldName,\n followRefResult.path,\n followRefCollection,\n clause.compareOptions,\n compare\n )\n }\n\n const valueExtractorForRawRow = compileExpression(\n new PropRef(followRefResult.path),\n true\n ) as CompiledSingleRowExpression\n\n const comparator = (\n a: Record<string, unknown> | null | undefined,\n b: Record<string, unknown> | null | undefined\n ) => {\n const extractedA = a ? valueExtractorForRawRow(a) : a\n const extractedB = b ? valueExtractorForRawRow(b) : b\n return compare(extractedA, extractedB)\n }\n\n const index: IndexInterface<string | number> | undefined =\n findIndexForField(\n followRefCollection.indexes,\n followRefResult.path,\n clause.compareOptions\n )\n\n if (index && index.supports(`gt`)) {\n // We found an index that we can use to lazily load ordered data\n const orderByAlias =\n orderByExpression.path.length > 1\n ? String(orderByExpression.path[0])\n : rawQuery.from.alias\n\n orderByOptimizationInfo = {\n alias: orderByAlias,\n offset: offset ?? 0,\n limit,\n comparator,\n valueExtractorForRawRow,\n index,\n orderBy: orderByClause,\n }\n\n optimizableOrderByCollections[followRefCollection.id] =\n orderByOptimizationInfo\n\n setSizeCallback = (getSize: () => number) => {\n optimizableOrderByCollections[followRefCollection.id]![`dataNeeded`] =\n () => {\n const size = getSize()\n return Math.max(0, orderByOptimizationInfo!.limit - size)\n }\n }\n }\n }\n }\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator: compare,\n setSizeCallback,\n setWindowFn: (\n windowFn: (options: { offset?: number; limit?: number }) => void\n ) => {\n setWindowFn(\n // We wrap the move function such that we update the orderByOptimizationInfo\n // because that is used by the `dataNeeded` callback to determine if we need to load more data\n (options) => {\n windowFn(options)\n if (orderByOptimizationInfo) {\n orderByOptimizationInfo.offset =\n options.offset ?? orderByOptimizationInfo.offset\n orderByOptimizationInfo.limit =\n options.limit ?? orderByOptimizationInfo.limit\n }\n }\n )\n },\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":["replaceAggregatesByRefs","compileExpression","makeComparator","defaultComparator","followRef","ensureIndexForField","PropRef","findIndexForField","orderByWithFractionalIndex"],"mappings":";;;;;;;;;AAkCO,SAAS,eACd,UACA,UACA,eACA,cACA,YACA,+BACA,aACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,WAAW;AACpD,UAAM,0BAA0BA,QAAAA;AAAAA,MAC9B,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA;AAEF,WAAO;AAAA,MACL,oBAAoBC,WAAAA,kBAAkB,uBAAuB;AAAA,MAC7D,gBAAgB,OAAO;AAAA,IAAA;AAAA,EAE3B,CAAC;AAGD,QAAM,iBAAiB,CAAC,QAAoD;AAM1E,UAAM,iBAAiB;AAEvB,QAAI,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAAA;AAAA,IAE9C,WAAW,cAAc,WAAW,GAAG;AAErC,YAAM,WAAW,gBAAgB,CAAC;AAClC,aAAO,SAAS,mBAAmB,cAAc;AAAA,IACnD;AAGA,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,CAAC,GAAY,MAAe;AAE1C,QAAI,cAAc,SAAS,GAAG;AAC5B,YAAM,SAAS;AACf,YAAM,SAAS;AACf,eAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,cAAM,SAAS,cAAc,CAAC;AAC9B,cAAM,YAAYC,WAAAA,eAAe,OAAO,cAAc;AACtD,cAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,YAAI,WAAW,GAAG;AAChB,iBAAO;AAAA,QACT;AAAA,MACF;AACA,aAAO,OAAO,SAAS,OAAO;AAAA,IAChC;AAGA,QAAI,cAAc,WAAW,GAAG;AAC9B,YAAM,SAAS,cAAc,CAAC;AAC9B,YAAM,YAAYA,WAAAA,eAAe,OAAO,cAAc;AACtD,aAAO,UAAU,GAAG,CAAC;AAAA,IACvB;AAEA,WAAOC,WAAAA,kBAAkB,GAAG,CAAC;AAAA,EAC/B;AAEA,MAAI;AAEJ,MAAI;AAKJ,MAAI,SAAS,cAAc,WAAW,GAAG;AACvC,UAAM,SAAS,cAAc,CAAC;AAC9B,UAAM,oBAAoB,OAAO;AAEjC,QAAI,kBAAkB,SAAS,OAAO;AACpC,YAAM,kBAAkBC,GAAAA;AAAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAGF,YAAM,sBAAsB,gBAAgB;AAC5C,YAAM,YAAY,gBAAgB,KAAK,CAAC;AACxC,UAAI,WAAW;AACbC,kBAAAA;AAAAA,UACE;AAAA,UACA,gBAAgB;AAAA,UAChB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QAAA;AAAA,MAEJ;AAEA,YAAM,0BAA0BJ,WAAAA;AAAAA,QAC9B,IAAIK,GAAAA,QAAQ,gBAAgB,IAAI;AAAA,QAChC;AAAA,MAAA;AAGF,YAAM,aAAa,CACjB,GACA,MACG;AACH,cAAM,aAAa,IAAI,wBAAwB,CAAC,IAAI;AACpD,cAAM,aAAa,IAAI,wBAAwB,CAAC,IAAI;AACpD,eAAO,QAAQ,YAAY,UAAU;AAAA,MACvC;AAEA,YAAM,QACJC,kBAAAA;AAAAA,QACE,oBAAoB;AAAA,QACpB,gBAAgB;AAAA,QAChB,OAAO;AAAA,MAAA;AAGX,UAAI,SAAS,MAAM,SAAS,IAAI,GAAG;AAEjC,cAAM,eACJ,kBAAkB,KAAK,SAAS,IAC5B,OAAO,kBAAkB,KAAK,CAAC,CAAC,IAChC,SAAS,KAAK;AAEpB,kCAA0B;AAAA,UACxB,OAAO;AAAA,UACP,QAAQ,UAAU;AAAA,UAClB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QAAA;AAGX,sCAA8B,oBAAoB,EAAE,IAClD;AAEF,0BAAkB,CAAC,YAA0B;AAC3C,wCAA8B,oBAAoB,EAAE,EAAG,YAAY,IACjE,MAAM;AACJ,kBAAM,OAAO,QAAA;AACb,mBAAO,KAAK,IAAI,GAAG,wBAAyB,QAAQ,IAAI;AAAA,UAC1D;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,SAAS;AAAA,IACdC,MAAAA,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA,aAAa,CACX,aACG;AACH;AAAA;AAAA;AAAA,UAGE,CAAC,YAAY;AACX,qBAAS,OAAO;AAChB,gBAAI,yBAAyB;AAC3B,sCAAwB,SACtB,QAAQ,UAAU,wBAAwB;AAC5C,sCAAwB,QACtB,QAAQ,SAAS,wBAAwB;AAAA,YAC7C;AAAA,UACF;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA,CACD;AAAA;AAAA,EAAA;AAGL;;"}
|
|
@@ -43,20 +43,26 @@ class CollectionSubscriber {
|
|
|
43
43
|
includeInitialState
|
|
44
44
|
);
|
|
45
45
|
}
|
|
46
|
+
const trackLoadPromise = () => {
|
|
47
|
+
if (!this.subscriptionLoadingPromises.has(subscription)) {
|
|
48
|
+
let resolve;
|
|
49
|
+
const promise = new Promise((res) => {
|
|
50
|
+
resolve = res;
|
|
51
|
+
});
|
|
52
|
+
this.subscriptionLoadingPromises.set(subscription, {
|
|
53
|
+
resolve
|
|
54
|
+
});
|
|
55
|
+
this.collectionConfigBuilder.liveQueryCollection._sync.trackLoadPromise(
|
|
56
|
+
promise
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
if (subscription.status === `loadingSubset`) {
|
|
61
|
+
trackLoadPromise();
|
|
62
|
+
}
|
|
46
63
|
const statusUnsubscribe = subscription.on(`status:change`, (event) => {
|
|
47
64
|
if (event.status === `loadingSubset`) {
|
|
48
|
-
|
|
49
|
-
let resolve;
|
|
50
|
-
const promise = new Promise((res) => {
|
|
51
|
-
resolve = res;
|
|
52
|
-
});
|
|
53
|
-
this.subscriptionLoadingPromises.set(subscription, {
|
|
54
|
-
resolve
|
|
55
|
-
});
|
|
56
|
-
this.collectionConfigBuilder.liveQueryCollection._sync.trackLoadPromise(
|
|
57
|
-
promise
|
|
58
|
-
);
|
|
59
|
-
}
|
|
65
|
+
trackLoadPromise();
|
|
60
66
|
} else {
|
|
61
67
|
const deferred = this.subscriptionLoadingPromises.get(subscription);
|
|
62
68
|
if (deferred) {
|
|
@@ -102,18 +108,10 @@ class CollectionSubscriber {
|
|
|
102
108
|
return subscription;
|
|
103
109
|
}
|
|
104
110
|
subscribeToOrderedChanges(whereExpression, orderByInfo) {
|
|
105
|
-
const { orderBy, offset, limit,
|
|
111
|
+
const { orderBy, offset, limit, index } = orderByInfo;
|
|
106
112
|
const sendChangesInRange = (changes) => {
|
|
107
113
|
const splittedChanges = splitUpdates(changes);
|
|
108
|
-
|
|
109
|
-
if (dataNeeded && dataNeeded() === 0) {
|
|
110
|
-
filteredChanges = filterChangesSmallerOrEqualToMax(
|
|
111
|
-
splittedChanges,
|
|
112
|
-
comparator,
|
|
113
|
-
this.biggest
|
|
114
|
-
);
|
|
115
|
-
}
|
|
116
|
-
this.sendChangesToPipelineWithTracking(filteredChanges, subscription);
|
|
114
|
+
this.sendChangesToPipelineWithTracking(splittedChanges, subscription);
|
|
117
115
|
};
|
|
118
116
|
const subscription = this.collection.subscribeChanges(sendChangesInRange, {
|
|
119
117
|
whereExpression
|
|
@@ -236,17 +234,5 @@ function* splitUpdates(changes) {
|
|
|
236
234
|
}
|
|
237
235
|
}
|
|
238
236
|
}
|
|
239
|
-
function* filterChanges(changes, f) {
|
|
240
|
-
for (const change of changes) {
|
|
241
|
-
if (f(change)) {
|
|
242
|
-
yield change;
|
|
243
|
-
}
|
|
244
|
-
}
|
|
245
|
-
}
|
|
246
|
-
function* filterChangesSmallerOrEqualToMax(changes, comparator, maxValue) {
|
|
247
|
-
yield* filterChanges(changes, (change) => {
|
|
248
|
-
return !maxValue || comparator(change.value, maxValue) <= 0;
|
|
249
|
-
});
|
|
250
|
-
}
|
|
251
237
|
exports.CollectionSubscriber = CollectionSubscriber;
|
|
252
238
|
//# sourceMappingURL=collection-subscriber.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"collection-subscriber.cjs","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from \"@tanstack/db-ivm\"\nimport {\n convertOrderByToBasicExpression,\n convertToBasicExpression,\n} from \"../compiler/expressions.js\"\nimport { WhereClauseConversionError } from \"../../errors.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type { ChangeMessage } from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression } from \"../ir.js\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { CollectionConfigBuilder } from \"./collection-config-builder.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = convertToBasicExpression(whereClause, this.alias)\n\n if (whereExpression) {\n return this.subscribeToChanges(whereExpression)\n }\n\n throw new WhereClauseConversionError(this.collectionId, this.alias)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState\n )\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n // TODO: For now we are setting this loading state whenever the subscription\n // status changes to 'loadingSubset'. But we have discussed it only happening\n // when the the live query has it's offset/limit changed, and that triggers the\n // subscription to request a snapshot. This will require more work to implement,\n // and builds on https://github.com/TanStack/db/pull/663 which this PR\n // does not yet depend on.\n if (event.status === `loadingSubset`) {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise\n )\n }\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo\n ) {\n const { orderBy, offset, limit, comparator, dataNeeded, index } =\n orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n // and filter out changes that are bigger than the biggest value we've sent so far\n // because they can't affect the topK (and if later we need more data, we will dynamically load more data)\n const splittedChanges = splitUpdates(changes)\n let filteredChanges = splittedChanges\n if (dataNeeded && dataNeeded() === 0) {\n // If the topK is full [..., maxSentValue] then we do not need to send changes > maxSentValue\n // because they can never make it into the topK.\n // However, if the topK isn't full yet, we need to also send changes > maxSentValue\n // because they will make it into the topK\n filteredChanges = filterChangesSmallerOrEqualToMax(\n splittedChanges,\n comparator,\n this.biggest\n )\n }\n\n this.sendChangesToPipelineWithTracking(filteredChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n subscription.setOrderByIndex(index)\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${this.collectionId}`\n )\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol]\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow } = orderByInfo\n const biggestSentRow = this.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Take the `n` items after the biggest sent value\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValue: biggestSentValue,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number\n ) {\n for (const change of changes) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n\nfunction* filterChanges<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n f: (change: ChangeMessage<T, TKey>) => boolean\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (f(change)) {\n yield change\n }\n }\n}\n\n/**\n * Filters changes to only include those that are smaller or equal to the provided max value\n * @param changes - Iterable of changes to filter\n * @param comparator - Comparator function to use for filtering\n * @param maxValue - Range to filter changes within (range boundaries are exclusive)\n * @returns Iterable of changes that fall within the range\n */\nfunction* filterChangesSmallerOrEqualToMax<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n comparator: (a: any, b: any) => number,\n maxValue: any\n): Generator<ChangeMessage<T, TKey>> {\n yield* filterChanges(changes, (change) => {\n return !maxValue || comparator(change.value, maxValue) <= 0\n })\n}\n"],"names":["convertToBasicExpression","WhereClauseConversionError","convertOrderByToBasicExpression","MultiSet"],"mappings":";;;;;AAeA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkBA,YAAAA,yBAAyB,aAAa,KAAK,KAAK;AAExE,UAAI,iBAAiB;AACnB,eAAO,KAAK,mBAAmB,eAAe;AAAA,MAChD;AAEA,YAAM,IAAIC,OAAAA,2BAA2B,KAAK,cAAc,KAAK,KAAK;AAAA,IACpE;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AAOpE,UAAI,MAAM,WAAW,iBAAiB;AAEpC,YAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,cAAI;AACJ,gBAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,sBAAU;AAAA,UACZ,CAAC;AAED,eAAK,4BAA4B,IAAI,cAAc;AAAA,YACjD;AAAA,UAAA,CACD;AACD,eAAK,wBAAwB,oBAAqB,MAAM;AAAA,YACtD;AAAA,UAAA;AAAA,QAEJ;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,YAAY,YAAY,UACtD;AAEF,UAAM,qBAAqB,CACzB,YACG;AAIH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,UAAI,kBAAkB;AACtB,UAAI,cAAc,WAAA,MAAiB,GAAG;AAKpC,0BAAkB;AAAA,UAChB;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QAAA;AAAA,MAET;AAEA,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAED,iBAAa,gBAAgB,KAAK;AAGlC,UAAM,oBAAoBC,YAAAA;AAAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAKP,iBAAa,uBAAuB;AAAA,MAClC,OAAO,SAAS;AAAA,MAChB,SAAS;AAAA,IAAA,CACV;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAGf,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,YAAY;AAAA,MAAA;AAAA,IAEnE;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,wBAAA,IAA4B;AAC7C,UAAM,iBAAiB,KAAK;AAC5B,UAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,oBAAoBA,YAAAA;AAAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAIP,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU;AAAA,IAAA,CACX;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,KAAK,SAAS;AACjB,aAAK,UAAU,OAAO;AAAA,MACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,aAAK,UAAU,OAAO;AAAA,MACxB;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAIC,MAAAA,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,UAAU,cAIR,SACA,GACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,EAAE,MAAM,GAAG;AACb,YAAM;AAAA,IACR;AAAA,EACF;AACF;AASA,UAAU,iCAIR,SACA,YACA,UACmC;AACnC,SAAO,cAAc,SAAS,CAAC,WAAW;AACxC,WAAO,CAAC,YAAY,WAAW,OAAO,OAAO,QAAQ,KAAK;AAAA,EAC5D,CAAC;AACH;;"}
|
|
1
|
+
{"version":3,"file":"collection-subscriber.cjs","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from \"@tanstack/db-ivm\"\nimport {\n convertOrderByToBasicExpression,\n convertToBasicExpression,\n} from \"../compiler/expressions.js\"\nimport { WhereClauseConversionError } from \"../../errors.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type { ChangeMessage } from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression } from \"../ir.js\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { CollectionConfigBuilder } from \"./collection-config-builder.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = convertToBasicExpression(whereClause, this.alias)\n\n if (whereExpression) {\n return this.subscribeToChanges(whereExpression)\n }\n\n throw new WhereClauseConversionError(this.collectionId, this.alias)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n subscription.setOrderByIndex(index)\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${this.collectionId}`\n )\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol]\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow } = orderByInfo\n const biggestSentRow = this.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Take the `n` items after the biggest sent value\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValue: biggestSentValue,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number\n ) {\n for (const change of changes) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":["convertToBasicExpression","WhereClauseConversionError","convertOrderByToBasicExpression","MultiSet"],"mappings":";;;;;AAeA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkBA,YAAAA,yBAAyB,aAAa,KAAK,KAAK;AAExE,UAAI,iBAAiB;AACnB,eAAO,KAAK,mBAAmB,eAAe;AAAA,MAChD;AAEA,YAAM,IAAIC,OAAAA,2BAA2B,KAAK,cAAc,KAAK,KAAK;AAAA,IACpE;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAED,iBAAa,gBAAgB,KAAK;AAGlC,UAAM,oBAAoBC,YAAAA;AAAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAKP,iBAAa,uBAAuB;AAAA,MAClC,OAAO,SAAS;AAAA,MAChB,SAAS;AAAA,IAAA,CACV;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAGf,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,YAAY;AAAA,MAAA;AAAA,IAEnE;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,wBAAA,IAA4B;AAC7C,UAAM,iBAAiB,KAAK;AAC5B,UAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,oBAAoBA,YAAAA;AAAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAIP,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU;AAAA,IAAA,CACX;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,KAAK,SAAS;AACjB,aAAK,UAAU,OAAO;AAAA,MACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,aAAK,UAAU,OAAO;AAAA,MACxB;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAIC,MAAAA,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;"}
|