@tanstack/db 0.5.7 → 0.5.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,12 +14,14 @@ class CollectionMutationsManager {
14
14
  }
15
15
  const items = Array.isArray(data) ? data : [data];
16
16
  const mutations = [];
17
+ const keysInCurrentBatch = /* @__PURE__ */ new Set();
17
18
  items.forEach((item) => {
18
19
  const validatedData = this.validateData(item, `insert`);
19
20
  const key = this.config.getKey(validatedData);
20
- if (this.state.has(key)) {
21
+ if (this.state.has(key) || keysInCurrentBatch.has(key)) {
21
22
  throw new errors.DuplicateKeyError(key);
22
23
  }
24
+ keysInCurrentBatch.add(key);
23
25
  const globalKey = this.generateGlobalKey(key, item);
24
26
  const mutation = {
25
27
  mutationId: crypto.randomUUID(),
@@ -1 +1 @@
1
- {"version":3,"file":"mutations.cjs","sources":["../../../src/collection/mutations.ts"],"sourcesContent":["import { withArrayChangeTracking, withChangeTracking } from \"../proxy\"\nimport { createTransaction, getActiveTransaction } from \"../transactions\"\nimport {\n DeleteKeyNotFoundError,\n DuplicateKeyError,\n InvalidSchemaError,\n KeyUpdateNotAllowedError,\n MissingDeleteHandlerError,\n MissingInsertHandlerError,\n MissingUpdateArgumentError,\n MissingUpdateHandlerError,\n NoKeysPassedToDeleteError,\n NoKeysPassedToUpdateError,\n SchemaMustBeSynchronousError,\n SchemaValidationError,\n UndefinedKeyError,\n UpdateKeyNotFoundError,\n} from \"../errors\"\nimport type { Collection, CollectionImpl } from \"./index.js\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n CollectionConfig,\n InsertConfig,\n OperationConfig,\n PendingMutation,\n StandardSchema,\n Transaction as TransactionType,\n TransactionWithMutations,\n UtilsRecord,\n WritableDeep,\n} from \"../types\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\nimport type { CollectionStateManager } from \"./state\"\n\nexport class CollectionMutationsManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TUtils extends UtilsRecord = {},\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private collection!: CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.id = id\n this.config = config\n }\n\n setDeps(deps: {\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n collection: CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n }) {\n this.lifecycle = deps.lifecycle\n this.state = deps.state\n this.collection = deps.collection\n }\n\n private ensureStandardSchema(schema: unknown): StandardSchema<TOutput> {\n // If the schema already implements the standard-schema interface, return it\n if (schema && `~standard` in (schema as {})) {\n return schema as StandardSchema<TOutput>\n }\n\n throw new InvalidSchemaError()\n }\n\n public validateData(\n data: unknown,\n type: `insert` | `update`,\n key?: TKey\n ): TOutput | never {\n if (!this.config.schema) return data as TOutput\n\n const standardSchema = this.ensureStandardSchema(this.config.schema)\n\n // For updates, we need to merge with the existing data before validation\n if (type === `update` && key) {\n // Get the existing data for this key\n const existingData = this.state.get(key)\n\n if (\n existingData &&\n data &&\n typeof data === `object` &&\n typeof existingData === `object`\n ) {\n // Merge the update with the existing data\n const mergedData = Object.assign({}, existingData, data)\n\n // Validate the merged data\n const result = standardSchema[`~standard`].validate(mergedData)\n\n // Ensure validation is synchronous\n if (result instanceof Promise) {\n throw new SchemaMustBeSynchronousError()\n }\n\n // If validation fails, throw a SchemaValidationError with the issues\n if (`issues` in result && result.issues) {\n const typedIssues = result.issues.map((issue) => ({\n message: issue.message,\n path: issue.path?.map((p) => String(p)),\n }))\n throw new SchemaValidationError(type, typedIssues)\n }\n\n // Extract only the modified keys from the validated result\n const validatedMergedData = result.value as TOutput\n const modifiedKeys = Object.keys(data)\n const extractedChanges = Object.fromEntries(\n modifiedKeys.map((k) => [k, validatedMergedData[k as keyof TOutput]])\n ) as TOutput\n\n return extractedChanges\n }\n }\n\n // For inserts or updates without existing data, validate the data directly\n const result = standardSchema[`~standard`].validate(data)\n\n // Ensure validation is synchronous\n if (result instanceof Promise) {\n throw new SchemaMustBeSynchronousError()\n }\n\n // If validation fails, throw a SchemaValidationError with the issues\n if (`issues` in result && result.issues) {\n const typedIssues = result.issues.map((issue) => ({\n message: issue.message,\n path: issue.path?.map((p) => String(p)),\n }))\n throw new SchemaValidationError(type, typedIssues)\n }\n\n return result.value as TOutput\n }\n\n public generateGlobalKey(key: any, item: any): string {\n if (typeof key === `undefined`) {\n throw new UndefinedKeyError(item)\n }\n\n return `KEY::${this.id}/${key}`\n }\n\n /**\n * Inserts one or more items into the collection\n */\n insert = (data: TInput | Array<TInput>, config?: InsertConfig) => {\n this.lifecycle.validateCollectionUsable(`insert`)\n const state = this.state\n const ambientTransaction = getActiveTransaction()\n\n // If no ambient transaction exists, check for an onInsert handler early\n if (!ambientTransaction && !this.config.onInsert) {\n throw new MissingInsertHandlerError()\n }\n\n const items = Array.isArray(data) ? data : [data]\n const mutations: Array<PendingMutation<TOutput>> = []\n\n // Create mutations for each item\n items.forEach((item) => {\n // Validate the data against the schema if one exists\n const validatedData = this.validateData(item, `insert`)\n\n // Check if an item with this ID already exists in the collection\n const key = this.config.getKey(validatedData)\n if (this.state.has(key)) {\n throw new DuplicateKeyError(key)\n }\n const globalKey = this.generateGlobalKey(key, item)\n\n const mutation: PendingMutation<TOutput, `insert`> = {\n mutationId: crypto.randomUUID(),\n original: {},\n modified: validatedData,\n // Pick the values from validatedData based on what's passed in - this is for cases\n // where a schema has default values. The validated data has the extra default\n // values but for changes, we just want to show the data that was actually passed in.\n changes: Object.fromEntries(\n Object.keys(item).map((k) => [\n k,\n validatedData[k as keyof typeof validatedData],\n ])\n ) as TInput,\n globalKey,\n key,\n metadata: config?.metadata as unknown,\n syncMetadata: this.config.sync.getSyncMetadata?.() || {},\n optimistic: config?.optimistic ?? true,\n type: `insert`,\n createdAt: new Date(),\n updatedAt: new Date(),\n collection: this.collection,\n }\n\n mutations.push(mutation)\n })\n\n // If an ambient transaction exists, use it\n if (ambientTransaction) {\n ambientTransaction.applyMutations(mutations)\n\n state.transactions.set(ambientTransaction.id, ambientTransaction)\n state.scheduleTransactionCleanup(ambientTransaction)\n state.recomputeOptimisticState(true)\n\n return ambientTransaction\n } else {\n // Create a new transaction with a mutation function that calls the onInsert handler\n const directOpTransaction = createTransaction<TOutput>({\n mutationFn: async (params) => {\n // Call the onInsert handler with the transaction and collection\n return await this.config.onInsert!({\n transaction:\n params.transaction as unknown as TransactionWithMutations<\n TOutput,\n `insert`\n >,\n collection: this.collection as unknown as Collection<TOutput, TKey>,\n })\n },\n })\n\n // Apply mutations to the new transaction\n directOpTransaction.applyMutations(mutations)\n // Errors still reject tx.isPersisted.promise; this catch only prevents global unhandled rejections\n directOpTransaction.commit().catch(() => undefined)\n\n // Add the transaction to the collection's transactions store\n state.transactions.set(directOpTransaction.id, directOpTransaction)\n state.scheduleTransactionCleanup(directOpTransaction)\n state.recomputeOptimisticState(true)\n\n return directOpTransaction\n }\n }\n\n /**\n * Updates one or more items in the collection using a callback function\n */\n update(\n keys: (TKey | unknown) | Array<TKey | unknown>,\n configOrCallback:\n | ((draft: WritableDeep<TInput>) => void)\n | ((drafts: Array<WritableDeep<TInput>>) => void)\n | OperationConfig,\n maybeCallback?:\n | ((draft: WritableDeep<TInput>) => void)\n | ((drafts: Array<WritableDeep<TInput>>) => void)\n ) {\n if (typeof keys === `undefined`) {\n throw new MissingUpdateArgumentError()\n }\n\n const state = this.state\n this.lifecycle.validateCollectionUsable(`update`)\n\n const ambientTransaction = getActiveTransaction()\n\n // If no ambient transaction exists, check for an onUpdate handler early\n if (!ambientTransaction && !this.config.onUpdate) {\n throw new MissingUpdateHandlerError()\n }\n\n const isArray = Array.isArray(keys)\n const keysArray = isArray ? keys : [keys]\n\n if (isArray && keysArray.length === 0) {\n throw new NoKeysPassedToUpdateError()\n }\n\n const callback =\n typeof configOrCallback === `function` ? configOrCallback : maybeCallback!\n const config =\n typeof configOrCallback === `function` ? {} : configOrCallback\n\n // Get the current objects or empty objects if they don't exist\n const currentObjects = keysArray.map((key) => {\n const item = this.state.get(key)\n if (!item) {\n throw new UpdateKeyNotFoundError(key)\n }\n\n return item\n }) as unknown as Array<TInput>\n\n let changesArray\n if (isArray) {\n // Use the proxy to track changes for all objects\n changesArray = withArrayChangeTracking(\n currentObjects,\n callback as (draft: Array<TInput>) => void\n )\n } else {\n const result = withChangeTracking(\n currentObjects[0]!,\n callback as (draft: TInput) => void\n )\n changesArray = [result]\n }\n\n // Create mutations for each object that has changes\n const mutations: Array<\n PendingMutation<\n TOutput,\n `update`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n >\n > = keysArray\n .map((key, index) => {\n const itemChanges = changesArray[index] // User-provided changes for this specific item\n\n // Skip items with no changes\n if (!itemChanges || Object.keys(itemChanges).length === 0) {\n return null\n }\n\n const originalItem = currentObjects[index] as unknown as TOutput\n // Validate the user-provided changes for this item\n const validatedUpdatePayload = this.validateData(\n itemChanges,\n `update`,\n key\n )\n\n // Construct the full modified item by applying the validated update payload to the original item\n const modifiedItem = Object.assign(\n {},\n originalItem,\n validatedUpdatePayload\n )\n\n // Check if the ID of the item is being changed\n const originalItemId = this.config.getKey(originalItem)\n const modifiedItemId = this.config.getKey(modifiedItem)\n\n if (originalItemId !== modifiedItemId) {\n throw new KeyUpdateNotAllowedError(originalItemId, modifiedItemId)\n }\n\n const globalKey = this.generateGlobalKey(modifiedItemId, modifiedItem)\n\n return {\n mutationId: crypto.randomUUID(),\n original: originalItem,\n modified: modifiedItem,\n // Pick the values from modifiedItem based on what's passed in - this is for cases\n // where a schema has default values or transforms. The modified data has the extra\n // default or transformed values but for changes, we just want to show the data that\n // was actually passed in.\n changes: Object.fromEntries(\n Object.keys(itemChanges).map((k) => [\n k,\n modifiedItem[k as keyof typeof modifiedItem],\n ])\n ) as TInput,\n globalKey,\n key,\n metadata: config.metadata as unknown,\n syncMetadata: (state.syncedMetadata.get(key) || {}) as Record<\n string,\n unknown\n >,\n optimistic: config.optimistic ?? true,\n type: `update`,\n createdAt: new Date(),\n updatedAt: new Date(),\n collection: this.collection,\n }\n })\n .filter(Boolean) as Array<\n PendingMutation<\n TOutput,\n `update`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n >\n >\n\n // If no changes were made, return an empty transaction early\n if (mutations.length === 0) {\n const emptyTransaction = createTransaction({\n mutationFn: async () => {},\n })\n // Errors still propagate through tx.isPersisted.promise; suppress the background commit from warning\n emptyTransaction.commit().catch(() => undefined)\n // Schedule cleanup for empty transaction\n state.scheduleTransactionCleanup(emptyTransaction)\n return emptyTransaction\n }\n\n // If an ambient transaction exists, use it\n if (ambientTransaction) {\n ambientTransaction.applyMutations(mutations)\n\n state.transactions.set(ambientTransaction.id, ambientTransaction)\n state.scheduleTransactionCleanup(ambientTransaction)\n state.recomputeOptimisticState(true)\n\n return ambientTransaction\n }\n\n // No need to check for onUpdate handler here as we've already checked at the beginning\n\n // Create a new transaction with a mutation function that calls the onUpdate handler\n const directOpTransaction = createTransaction<TOutput>({\n mutationFn: async (params) => {\n // Call the onUpdate handler with the transaction and collection\n return this.config.onUpdate!({\n transaction:\n params.transaction as unknown as TransactionWithMutations<\n TOutput,\n `update`\n >,\n collection: this.collection as unknown as Collection<TOutput, TKey>,\n })\n },\n })\n\n // Apply mutations to the new transaction\n directOpTransaction.applyMutations(mutations)\n // Errors still hit tx.isPersisted.promise; avoid leaking an unhandled rejection from the fire-and-forget commit\n directOpTransaction.commit().catch(() => undefined)\n\n // Add the transaction to the collection's transactions store\n\n state.transactions.set(directOpTransaction.id, directOpTransaction)\n state.scheduleTransactionCleanup(directOpTransaction)\n state.recomputeOptimisticState(true)\n\n return directOpTransaction\n }\n\n /**\n * Deletes one or more items from the collection\n */\n delete = (\n keys: Array<TKey> | TKey,\n config?: OperationConfig\n ): TransactionType<any> => {\n const state = this.state\n this.lifecycle.validateCollectionUsable(`delete`)\n\n const ambientTransaction = getActiveTransaction()\n\n // If no ambient transaction exists, check for an onDelete handler early\n if (!ambientTransaction && !this.config.onDelete) {\n throw new MissingDeleteHandlerError()\n }\n\n if (Array.isArray(keys) && keys.length === 0) {\n throw new NoKeysPassedToDeleteError()\n }\n\n const keysArray = Array.isArray(keys) ? keys : [keys]\n const mutations: Array<\n PendingMutation<\n TOutput,\n `delete`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n >\n > = []\n\n for (const key of keysArray) {\n if (!this.state.has(key)) {\n throw new DeleteKeyNotFoundError(key)\n }\n const globalKey = this.generateGlobalKey(key, this.state.get(key)!)\n const mutation: PendingMutation<\n TOutput,\n `delete`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n > = {\n mutationId: crypto.randomUUID(),\n original: this.state.get(key)!,\n modified: this.state.get(key)!,\n changes: this.state.get(key)!,\n globalKey,\n key,\n metadata: config?.metadata as unknown,\n syncMetadata: (state.syncedMetadata.get(key) || {}) as Record<\n string,\n unknown\n >,\n optimistic: config?.optimistic ?? true,\n type: `delete`,\n createdAt: new Date(),\n updatedAt: new Date(),\n collection: this.collection,\n }\n\n mutations.push(mutation)\n }\n\n // If an ambient transaction exists, use it\n if (ambientTransaction) {\n ambientTransaction.applyMutations(mutations)\n\n state.transactions.set(ambientTransaction.id, ambientTransaction)\n state.scheduleTransactionCleanup(ambientTransaction)\n state.recomputeOptimisticState(true)\n\n return ambientTransaction\n }\n\n // Create a new transaction with a mutation function that calls the onDelete handler\n const directOpTransaction = createTransaction<TOutput>({\n autoCommit: true,\n mutationFn: async (params) => {\n // Call the onDelete handler with the transaction and collection\n return this.config.onDelete!({\n transaction:\n params.transaction as unknown as TransactionWithMutations<\n TOutput,\n `delete`\n >,\n collection: this.collection as unknown as Collection<TOutput, TKey>,\n })\n },\n })\n\n // Apply mutations to the new transaction\n directOpTransaction.applyMutations(mutations)\n // Errors still reject tx.isPersisted.promise; silence the internal commit promise to prevent test noise\n directOpTransaction.commit().catch(() => undefined)\n\n state.transactions.set(directOpTransaction.id, directOpTransaction)\n state.scheduleTransactionCleanup(directOpTransaction)\n state.recomputeOptimisticState(true)\n\n return directOpTransaction\n }\n}\n"],"names":["config","getActiveTransaction","MissingInsertHandlerError","DuplicateKeyError","createTransaction","MissingDeleteHandlerError","NoKeysPassedToDeleteError","DeleteKeyNotFoundError","InvalidSchemaError","result","SchemaMustBeSynchronousError","SchemaValidationError","UndefinedKeyError","MissingUpdateArgumentError","MissingUpdateHandlerError","NoKeysPassedToUpdateError","UpdateKeyNotFoundError","withArrayChangeTracking","withChangeTracking","KeyUpdateNotAllowedError"],"mappings":";;;;;AAkCO,MAAM,2BAMX;AAAA,EAOA,YAAY,QAAkD,IAAY;AA0G1E,SAAA,SAAS,CAAC,MAA8BA,YAA0B;AAChE,WAAK,UAAU,yBAAyB,QAAQ;AAChD,YAAM,QAAQ,KAAK;AACnB,YAAM,qBAAqBC,aAAAA,qBAAA;AAG3B,UAAI,CAAC,sBAAsB,CAAC,KAAK,OAAO,UAAU;AAChD,cAAM,IAAIC,OAAAA,0BAAA;AAAA,MACZ;AAEA,YAAM,QAAQ,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI;AAChD,YAAM,YAA6C,CAAA;AAGnD,YAAM,QAAQ,CAAC,SAAS;AAEtB,cAAM,gBAAgB,KAAK,aAAa,MAAM,QAAQ;AAGtD,cAAM,MAAM,KAAK,OAAO,OAAO,aAAa;AAC5C,YAAI,KAAK,MAAM,IAAI,GAAG,GAAG;AACvB,gBAAM,IAAIC,OAAAA,kBAAkB,GAAG;AAAA,QACjC;AACA,cAAM,YAAY,KAAK,kBAAkB,KAAK,IAAI;AAElD,cAAM,WAA+C;AAAA,UACnD,YAAY,OAAO,WAAA;AAAA,UACnB,UAAU,CAAA;AAAA,UACV,UAAU;AAAA;AAAA;AAAA;AAAA,UAIV,SAAS,OAAO;AAAA,YACd,OAAO,KAAK,IAAI,EAAE,IAAI,CAAC,MAAM;AAAA,cAC3B;AAAA,cACA,cAAc,CAA+B;AAAA,YAAA,CAC9C;AAAA,UAAA;AAAA,UAEH;AAAA,UACA;AAAA,UACA,UAAUH,SAAQ;AAAA,UAClB,cAAc,KAAK,OAAO,KAAK,kBAAA,KAAuB,CAAA;AAAA,UACtD,YAAYA,SAAQ,cAAc;AAAA,UAClC,MAAM;AAAA,UACN,+BAAe,KAAA;AAAA,UACf,+BAAe,KAAA;AAAA,UACf,YAAY,KAAK;AAAA,QAAA;AAGnB,kBAAU,KAAK,QAAQ;AAAA,MACzB,CAAC;AAGD,UAAI,oBAAoB;AACtB,2BAAmB,eAAe,SAAS;AAE3C,cAAM,aAAa,IAAI,mBAAmB,IAAI,kBAAkB;AAChE,cAAM,2BAA2B,kBAAkB;AACnD,cAAM,yBAAyB,IAAI;AAEnC,eAAO;AAAA,MACT,OAAO;AAEL,cAAM,sBAAsBI,aAAAA,kBAA2B;AAAA,UACrD,YAAY,OAAO,WAAW;AAE5B,mBAAO,MAAM,KAAK,OAAO,SAAU;AAAA,cACjC,aACE,OAAO;AAAA,cAIT,YAAY,KAAK;AAAA,YAAA,CAClB;AAAA,UACH;AAAA,QAAA,CACD;AAGD,4BAAoB,eAAe,SAAS;AAE5C,4BAAoB,OAAA,EAAS,MAAM,MAAM,MAAS;AAGlD,cAAM,aAAa,IAAI,oBAAoB,IAAI,mBAAmB;AAClE,cAAM,2BAA2B,mBAAmB;AACpD,cAAM,yBAAyB,IAAI;AAEnC,eAAO;AAAA,MACT;AAAA,IACF;AAwMA,SAAA,SAAS,CACP,MACAJ,YACyB;AACzB,YAAM,QAAQ,KAAK;AACnB,WAAK,UAAU,yBAAyB,QAAQ;AAEhD,YAAM,qBAAqBC,aAAAA,qBAAA;AAG3B,UAAI,CAAC,sBAAsB,CAAC,KAAK,OAAO,UAAU;AAChD,cAAM,IAAII,OAAAA,0BAAA;AAAA,MACZ;AAEA,UAAI,MAAM,QAAQ,IAAI,KAAK,KAAK,WAAW,GAAG;AAC5C,cAAM,IAAIC,OAAAA,0BAAA;AAAA,MACZ;AAEA,YAAM,YAAY,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI;AACpD,YAAM,YAMF,CAAA;AAEJ,iBAAW,OAAO,WAAW;AAC3B,YAAI,CAAC,KAAK,MAAM,IAAI,GAAG,GAAG;AACxB,gBAAM,IAAIC,OAAAA,uBAAuB,GAAG;AAAA,QACtC;AACA,cAAM,YAAY,KAAK,kBAAkB,KAAK,KAAK,MAAM,IAAI,GAAG,CAAE;AAClE,cAAM,WAIF;AAAA,UACF,YAAY,OAAO,WAAA;AAAA,UACnB,UAAU,KAAK,MAAM,IAAI,GAAG;AAAA,UAC5B,UAAU,KAAK,MAAM,IAAI,GAAG;AAAA,UAC5B,SAAS,KAAK,MAAM,IAAI,GAAG;AAAA,UAC3B;AAAA,UACA;AAAA,UACA,UAAUP,SAAQ;AAAA,UAClB,cAAe,MAAM,eAAe,IAAI,GAAG,KAAK,CAAA;AAAA,UAIhD,YAAYA,SAAQ,cAAc;AAAA,UAClC,MAAM;AAAA,UACN,+BAAe,KAAA;AAAA,UACf,+BAAe,KAAA;AAAA,UACf,YAAY,KAAK;AAAA,QAAA;AAGnB,kBAAU,KAAK,QAAQ;AAAA,MACzB;AAGA,UAAI,oBAAoB;AACtB,2BAAmB,eAAe,SAAS;AAE3C,cAAM,aAAa,IAAI,mBAAmB,IAAI,kBAAkB;AAChE,cAAM,2BAA2B,kBAAkB;AACnD,cAAM,yBAAyB,IAAI;AAEnC,eAAO;AAAA,MACT;AAGA,YAAM,sBAAsBI,aAAAA,kBAA2B;AAAA,QACrD,YAAY;AAAA,QACZ,YAAY,OAAO,WAAW;AAE5B,iBAAO,KAAK,OAAO,SAAU;AAAA,YAC3B,aACE,OAAO;AAAA,YAIT,YAAY,KAAK;AAAA,UAAA,CAClB;AAAA,QACH;AAAA,MAAA,CACD;AAGD,0BAAoB,eAAe,SAAS;AAE5C,0BAAoB,OAAA,EAAS,MAAM,MAAM,MAAS;AAElD,YAAM,aAAa,IAAI,oBAAoB,IAAI,mBAAmB;AAClE,YAAM,2BAA2B,mBAAmB;AACpD,YAAM,yBAAyB,IAAI;AAEnC,aAAO;AAAA,IACT;AAzeE,SAAK,KAAK;AACV,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,QAAQ,MAIL;AACD,SAAK,YAAY,KAAK;AACtB,SAAK,QAAQ,KAAK;AAClB,SAAK,aAAa,KAAK;AAAA,EACzB;AAAA,EAEQ,qBAAqB,QAA0C;AAErE,QAAI,UAAU,eAAgB,QAAe;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,IAAII,OAAAA,mBAAA;AAAA,EACZ;AAAA,EAEO,aACL,MACA,MACA,KACiB;AACjB,QAAI,CAAC,KAAK,OAAO,OAAQ,QAAO;AAEhC,UAAM,iBAAiB,KAAK,qBAAqB,KAAK,OAAO,MAAM;AAGnE,QAAI,SAAS,YAAY,KAAK;AAE5B,YAAM,eAAe,KAAK,MAAM,IAAI,GAAG;AAEvC,UACE,gBACA,QACA,OAAO,SAAS,YAChB,OAAO,iBAAiB,UACxB;AAEA,cAAM,aAAa,OAAO,OAAO,CAAA,GAAI,cAAc,IAAI;AAGvD,cAAMC,UAAS,eAAe,WAAW,EAAE,SAAS,UAAU;AAG9D,YAAIA,mBAAkB,SAAS;AAC7B,gBAAM,IAAIC,OAAAA,6BAAA;AAAA,QACZ;AAGA,YAAI,YAAYD,WAAUA,QAAO,QAAQ;AACvC,gBAAM,cAAcA,QAAO,OAAO,IAAI,CAAC,WAAW;AAAA,YAChD,SAAS,MAAM;AAAA,YACf,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,OAAO,CAAC,CAAC;AAAA,UAAA,EACtC;AACF,gBAAM,IAAIE,OAAAA,sBAAsB,MAAM,WAAW;AAAA,QACnD;AAGA,cAAM,sBAAsBF,QAAO;AACnC,cAAM,eAAe,OAAO,KAAK,IAAI;AACrC,cAAM,mBAAmB,OAAO;AAAA,UAC9B,aAAa,IAAI,CAAC,MAAM,CAAC,GAAG,oBAAoB,CAAkB,CAAC,CAAC;AAAA,QAAA;AAGtE,eAAO;AAAA,MACT;AAAA,IACF;AAGA,UAAM,SAAS,eAAe,WAAW,EAAE,SAAS,IAAI;AAGxD,QAAI,kBAAkB,SAAS;AAC7B,YAAM,IAAIC,OAAAA,6BAAA;AAAA,IACZ;AAGA,QAAI,YAAY,UAAU,OAAO,QAAQ;AACvC,YAAM,cAAc,OAAO,OAAO,IAAI,CAAC,WAAW;AAAA,QAChD,SAAS,MAAM;AAAA,QACf,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,OAAO,CAAC,CAAC;AAAA,MAAA,EACtC;AACF,YAAM,IAAIC,OAAAA,sBAAsB,MAAM,WAAW;AAAA,IACnD;AAEA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEO,kBAAkB,KAAU,MAAmB;AACpD,QAAI,OAAO,QAAQ,aAAa;AAC9B,YAAM,IAAIC,OAAAA,kBAAkB,IAAI;AAAA,IAClC;AAEA,WAAO,QAAQ,KAAK,EAAE,IAAI,GAAG;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAmGA,OACE,MACA,kBAIA,eAGA;AACA,QAAI,OAAO,SAAS,aAAa;AAC/B,YAAM,IAAIC,OAAAA,2BAAA;AAAA,IACZ;AAEA,UAAM,QAAQ,KAAK;AACnB,SAAK,UAAU,yBAAyB,QAAQ;AAEhD,UAAM,qBAAqBZ,aAAAA,qBAAA;AAG3B,QAAI,CAAC,sBAAsB,CAAC,KAAK,OAAO,UAAU;AAChD,YAAM,IAAIa,OAAAA,0BAAA;AAAA,IACZ;AAEA,UAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,UAAM,YAAY,UAAU,OAAO,CAAC,IAAI;AAExC,QAAI,WAAW,UAAU,WAAW,GAAG;AACrC,YAAM,IAAIC,OAAAA,0BAAA;AAAA,IACZ;AAEA,UAAM,WACJ,OAAO,qBAAqB,aAAa,mBAAmB;AAC9D,UAAM,SACJ,OAAO,qBAAqB,aAAa,CAAA,IAAK;AAGhD,UAAM,iBAAiB,UAAU,IAAI,CAAC,QAAQ;AAC5C,YAAM,OAAO,KAAK,MAAM,IAAI,GAAG;AAC/B,UAAI,CAAC,MAAM;AACT,cAAM,IAAIC,OAAAA,uBAAuB,GAAG;AAAA,MACtC;AAEA,aAAO;AAAA,IACT,CAAC;AAED,QAAI;AACJ,QAAI,SAAS;AAEX,qBAAeC,MAAAA;AAAAA,QACb;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AACL,YAAM,SAASC,MAAAA;AAAAA,QACb,eAAe,CAAC;AAAA,QAChB;AAAA,MAAA;AAEF,qBAAe,CAAC,MAAM;AAAA,IACxB;AAGA,UAAM,YAMF,UACD,IAAI,CAAC,KAAK,UAAU;AACnB,YAAM,cAAc,aAAa,KAAK;AAGtC,UAAI,CAAC,eAAe,OAAO,KAAK,WAAW,EAAE,WAAW,GAAG;AACzD,eAAO;AAAA,MACT;AAEA,YAAM,eAAe,eAAe,KAAK;AAEzC,YAAM,yBAAyB,KAAK;AAAA,QAClC;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAIF,YAAM,eAAe,OAAO;AAAA,QAC1B,CAAA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAIF,YAAM,iBAAiB,KAAK,OAAO,OAAO,YAAY;AACtD,YAAM,iBAAiB,KAAK,OAAO,OAAO,YAAY;AAEtD,UAAI,mBAAmB,gBAAgB;AACrC,cAAM,IAAIC,OAAAA,yBAAyB,gBAAgB,cAAc;AAAA,MACnE;AAEA,YAAM,YAAY,KAAK,kBAAkB,gBAAgB,YAAY;AAErE,aAAO;AAAA,QACL,YAAY,OAAO,WAAA;AAAA,QACnB,UAAU;AAAA,QACV,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,QAKV,SAAS,OAAO;AAAA,UACd,OAAO,KAAK,WAAW,EAAE,IAAI,CAAC,MAAM;AAAA,YAClC;AAAA,YACA,aAAa,CAA8B;AAAA,UAAA,CAC5C;AAAA,QAAA;AAAA,QAEH;AAAA,QACA;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,cAAe,MAAM,eAAe,IAAI,GAAG,KAAK,CAAA;AAAA,QAIhD,YAAY,OAAO,cAAc;AAAA,QACjC,MAAM;AAAA,QACN,+BAAe,KAAA;AAAA,QACf,+BAAe,KAAA;AAAA,QACf,YAAY,KAAK;AAAA,MAAA;AAAA,IAErB,CAAC,EACA,OAAO,OAAO;AASjB,QAAI,UAAU,WAAW,GAAG;AAC1B,YAAM,mBAAmBf,aAAAA,kBAAkB;AAAA,QACzC,YAAY,YAAY;AAAA,QAAC;AAAA,MAAA,CAC1B;AAED,uBAAiB,OAAA,EAAS,MAAM,MAAM,MAAS;AAE/C,YAAM,2BAA2B,gBAAgB;AACjD,aAAO;AAAA,IACT;AAGA,QAAI,oBAAoB;AACtB,yBAAmB,eAAe,SAAS;AAE3C,YAAM,aAAa,IAAI,mBAAmB,IAAI,kBAAkB;AAChE,YAAM,2BAA2B,kBAAkB;AACnD,YAAM,yBAAyB,IAAI;AAEnC,aAAO;AAAA,IACT;AAKA,UAAM,sBAAsBA,aAAAA,kBAA2B;AAAA,MACrD,YAAY,OAAO,WAAW;AAE5B,eAAO,KAAK,OAAO,SAAU;AAAA,UAC3B,aACE,OAAO;AAAA,UAIT,YAAY,KAAK;AAAA,QAAA,CAClB;AAAA,MACH;AAAA,IAAA,CACD;AAGD,wBAAoB,eAAe,SAAS;AAE5C,wBAAoB,OAAA,EAAS,MAAM,MAAM,MAAS;AAIlD,UAAM,aAAa,IAAI,oBAAoB,IAAI,mBAAmB;AAClE,UAAM,2BAA2B,mBAAmB;AACpD,UAAM,yBAAyB,IAAI;AAEnC,WAAO;AAAA,EACT;AAqGF;;"}
1
+ {"version":3,"file":"mutations.cjs","sources":["../../../src/collection/mutations.ts"],"sourcesContent":["import { withArrayChangeTracking, withChangeTracking } from \"../proxy\"\nimport { createTransaction, getActiveTransaction } from \"../transactions\"\nimport {\n DeleteKeyNotFoundError,\n DuplicateKeyError,\n InvalidSchemaError,\n KeyUpdateNotAllowedError,\n MissingDeleteHandlerError,\n MissingInsertHandlerError,\n MissingUpdateArgumentError,\n MissingUpdateHandlerError,\n NoKeysPassedToDeleteError,\n NoKeysPassedToUpdateError,\n SchemaMustBeSynchronousError,\n SchemaValidationError,\n UndefinedKeyError,\n UpdateKeyNotFoundError,\n} from \"../errors\"\nimport type { Collection, CollectionImpl } from \"./index.js\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n CollectionConfig,\n InsertConfig,\n OperationConfig,\n PendingMutation,\n StandardSchema,\n Transaction as TransactionType,\n TransactionWithMutations,\n UtilsRecord,\n WritableDeep,\n} from \"../types\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\nimport type { CollectionStateManager } from \"./state\"\n\nexport class CollectionMutationsManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TUtils extends UtilsRecord = {},\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private collection!: CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.id = id\n this.config = config\n }\n\n setDeps(deps: {\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n collection: CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n }) {\n this.lifecycle = deps.lifecycle\n this.state = deps.state\n this.collection = deps.collection\n }\n\n private ensureStandardSchema(schema: unknown): StandardSchema<TOutput> {\n // If the schema already implements the standard-schema interface, return it\n if (schema && `~standard` in (schema as {})) {\n return schema as StandardSchema<TOutput>\n }\n\n throw new InvalidSchemaError()\n }\n\n public validateData(\n data: unknown,\n type: `insert` | `update`,\n key?: TKey\n ): TOutput | never {\n if (!this.config.schema) return data as TOutput\n\n const standardSchema = this.ensureStandardSchema(this.config.schema)\n\n // For updates, we need to merge with the existing data before validation\n if (type === `update` && key) {\n // Get the existing data for this key\n const existingData = this.state.get(key)\n\n if (\n existingData &&\n data &&\n typeof data === `object` &&\n typeof existingData === `object`\n ) {\n // Merge the update with the existing data\n const mergedData = Object.assign({}, existingData, data)\n\n // Validate the merged data\n const result = standardSchema[`~standard`].validate(mergedData)\n\n // Ensure validation is synchronous\n if (result instanceof Promise) {\n throw new SchemaMustBeSynchronousError()\n }\n\n // If validation fails, throw a SchemaValidationError with the issues\n if (`issues` in result && result.issues) {\n const typedIssues = result.issues.map((issue) => ({\n message: issue.message,\n path: issue.path?.map((p) => String(p)),\n }))\n throw new SchemaValidationError(type, typedIssues)\n }\n\n // Extract only the modified keys from the validated result\n const validatedMergedData = result.value as TOutput\n const modifiedKeys = Object.keys(data)\n const extractedChanges = Object.fromEntries(\n modifiedKeys.map((k) => [k, validatedMergedData[k as keyof TOutput]])\n ) as TOutput\n\n return extractedChanges\n }\n }\n\n // For inserts or updates without existing data, validate the data directly\n const result = standardSchema[`~standard`].validate(data)\n\n // Ensure validation is synchronous\n if (result instanceof Promise) {\n throw new SchemaMustBeSynchronousError()\n }\n\n // If validation fails, throw a SchemaValidationError with the issues\n if (`issues` in result && result.issues) {\n const typedIssues = result.issues.map((issue) => ({\n message: issue.message,\n path: issue.path?.map((p) => String(p)),\n }))\n throw new SchemaValidationError(type, typedIssues)\n }\n\n return result.value as TOutput\n }\n\n public generateGlobalKey(key: any, item: any): string {\n if (typeof key === `undefined`) {\n throw new UndefinedKeyError(item)\n }\n\n return `KEY::${this.id}/${key}`\n }\n\n /**\n * Inserts one or more items into the collection\n */\n insert = (data: TInput | Array<TInput>, config?: InsertConfig) => {\n this.lifecycle.validateCollectionUsable(`insert`)\n const state = this.state\n const ambientTransaction = getActiveTransaction()\n\n // If no ambient transaction exists, check for an onInsert handler early\n if (!ambientTransaction && !this.config.onInsert) {\n throw new MissingInsertHandlerError()\n }\n\n const items = Array.isArray(data) ? data : [data]\n const mutations: Array<PendingMutation<TOutput>> = []\n const keysInCurrentBatch = new Set<TKey>()\n\n // Create mutations for each item\n items.forEach((item) => {\n // Validate the data against the schema if one exists\n const validatedData = this.validateData(item, `insert`)\n\n // Check if an item with this ID already exists in the collection or in the current batch\n const key = this.config.getKey(validatedData)\n if (this.state.has(key) || keysInCurrentBatch.has(key)) {\n throw new DuplicateKeyError(key)\n }\n keysInCurrentBatch.add(key)\n const globalKey = this.generateGlobalKey(key, item)\n\n const mutation: PendingMutation<TOutput, `insert`> = {\n mutationId: crypto.randomUUID(),\n original: {},\n modified: validatedData,\n // Pick the values from validatedData based on what's passed in - this is for cases\n // where a schema has default values. The validated data has the extra default\n // values but for changes, we just want to show the data that was actually passed in.\n changes: Object.fromEntries(\n Object.keys(item).map((k) => [\n k,\n validatedData[k as keyof typeof validatedData],\n ])\n ) as TInput,\n globalKey,\n key,\n metadata: config?.metadata as unknown,\n syncMetadata: this.config.sync.getSyncMetadata?.() || {},\n optimistic: config?.optimistic ?? true,\n type: `insert`,\n createdAt: new Date(),\n updatedAt: new Date(),\n collection: this.collection,\n }\n\n mutations.push(mutation)\n })\n\n // If an ambient transaction exists, use it\n if (ambientTransaction) {\n ambientTransaction.applyMutations(mutations)\n\n state.transactions.set(ambientTransaction.id, ambientTransaction)\n state.scheduleTransactionCleanup(ambientTransaction)\n state.recomputeOptimisticState(true)\n\n return ambientTransaction\n } else {\n // Create a new transaction with a mutation function that calls the onInsert handler\n const directOpTransaction = createTransaction<TOutput>({\n mutationFn: async (params) => {\n // Call the onInsert handler with the transaction and collection\n return await this.config.onInsert!({\n transaction:\n params.transaction as unknown as TransactionWithMutations<\n TOutput,\n `insert`\n >,\n collection: this.collection as unknown as Collection<TOutput, TKey>,\n })\n },\n })\n\n // Apply mutations to the new transaction\n directOpTransaction.applyMutations(mutations)\n // Errors still reject tx.isPersisted.promise; this catch only prevents global unhandled rejections\n directOpTransaction.commit().catch(() => undefined)\n\n // Add the transaction to the collection's transactions store\n state.transactions.set(directOpTransaction.id, directOpTransaction)\n state.scheduleTransactionCleanup(directOpTransaction)\n state.recomputeOptimisticState(true)\n\n return directOpTransaction\n }\n }\n\n /**\n * Updates one or more items in the collection using a callback function\n */\n update(\n keys: (TKey | unknown) | Array<TKey | unknown>,\n configOrCallback:\n | ((draft: WritableDeep<TInput>) => void)\n | ((drafts: Array<WritableDeep<TInput>>) => void)\n | OperationConfig,\n maybeCallback?:\n | ((draft: WritableDeep<TInput>) => void)\n | ((drafts: Array<WritableDeep<TInput>>) => void)\n ) {\n if (typeof keys === `undefined`) {\n throw new MissingUpdateArgumentError()\n }\n\n const state = this.state\n this.lifecycle.validateCollectionUsable(`update`)\n\n const ambientTransaction = getActiveTransaction()\n\n // If no ambient transaction exists, check for an onUpdate handler early\n if (!ambientTransaction && !this.config.onUpdate) {\n throw new MissingUpdateHandlerError()\n }\n\n const isArray = Array.isArray(keys)\n const keysArray = isArray ? keys : [keys]\n\n if (isArray && keysArray.length === 0) {\n throw new NoKeysPassedToUpdateError()\n }\n\n const callback =\n typeof configOrCallback === `function` ? configOrCallback : maybeCallback!\n const config =\n typeof configOrCallback === `function` ? {} : configOrCallback\n\n // Get the current objects or empty objects if they don't exist\n const currentObjects = keysArray.map((key) => {\n const item = this.state.get(key)\n if (!item) {\n throw new UpdateKeyNotFoundError(key)\n }\n\n return item\n }) as unknown as Array<TInput>\n\n let changesArray\n if (isArray) {\n // Use the proxy to track changes for all objects\n changesArray = withArrayChangeTracking(\n currentObjects,\n callback as (draft: Array<TInput>) => void\n )\n } else {\n const result = withChangeTracking(\n currentObjects[0]!,\n callback as (draft: TInput) => void\n )\n changesArray = [result]\n }\n\n // Create mutations for each object that has changes\n const mutations: Array<\n PendingMutation<\n TOutput,\n `update`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n >\n > = keysArray\n .map((key, index) => {\n const itemChanges = changesArray[index] // User-provided changes for this specific item\n\n // Skip items with no changes\n if (!itemChanges || Object.keys(itemChanges).length === 0) {\n return null\n }\n\n const originalItem = currentObjects[index] as unknown as TOutput\n // Validate the user-provided changes for this item\n const validatedUpdatePayload = this.validateData(\n itemChanges,\n `update`,\n key\n )\n\n // Construct the full modified item by applying the validated update payload to the original item\n const modifiedItem = Object.assign(\n {},\n originalItem,\n validatedUpdatePayload\n )\n\n // Check if the ID of the item is being changed\n const originalItemId = this.config.getKey(originalItem)\n const modifiedItemId = this.config.getKey(modifiedItem)\n\n if (originalItemId !== modifiedItemId) {\n throw new KeyUpdateNotAllowedError(originalItemId, modifiedItemId)\n }\n\n const globalKey = this.generateGlobalKey(modifiedItemId, modifiedItem)\n\n return {\n mutationId: crypto.randomUUID(),\n original: originalItem,\n modified: modifiedItem,\n // Pick the values from modifiedItem based on what's passed in - this is for cases\n // where a schema has default values or transforms. The modified data has the extra\n // default or transformed values but for changes, we just want to show the data that\n // was actually passed in.\n changes: Object.fromEntries(\n Object.keys(itemChanges).map((k) => [\n k,\n modifiedItem[k as keyof typeof modifiedItem],\n ])\n ) as TInput,\n globalKey,\n key,\n metadata: config.metadata as unknown,\n syncMetadata: (state.syncedMetadata.get(key) || {}) as Record<\n string,\n unknown\n >,\n optimistic: config.optimistic ?? true,\n type: `update`,\n createdAt: new Date(),\n updatedAt: new Date(),\n collection: this.collection,\n }\n })\n .filter(Boolean) as Array<\n PendingMutation<\n TOutput,\n `update`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n >\n >\n\n // If no changes were made, return an empty transaction early\n if (mutations.length === 0) {\n const emptyTransaction = createTransaction({\n mutationFn: async () => {},\n })\n // Errors still propagate through tx.isPersisted.promise; suppress the background commit from warning\n emptyTransaction.commit().catch(() => undefined)\n // Schedule cleanup for empty transaction\n state.scheduleTransactionCleanup(emptyTransaction)\n return emptyTransaction\n }\n\n // If an ambient transaction exists, use it\n if (ambientTransaction) {\n ambientTransaction.applyMutations(mutations)\n\n state.transactions.set(ambientTransaction.id, ambientTransaction)\n state.scheduleTransactionCleanup(ambientTransaction)\n state.recomputeOptimisticState(true)\n\n return ambientTransaction\n }\n\n // No need to check for onUpdate handler here as we've already checked at the beginning\n\n // Create a new transaction with a mutation function that calls the onUpdate handler\n const directOpTransaction = createTransaction<TOutput>({\n mutationFn: async (params) => {\n // Call the onUpdate handler with the transaction and collection\n return this.config.onUpdate!({\n transaction:\n params.transaction as unknown as TransactionWithMutations<\n TOutput,\n `update`\n >,\n collection: this.collection as unknown as Collection<TOutput, TKey>,\n })\n },\n })\n\n // Apply mutations to the new transaction\n directOpTransaction.applyMutations(mutations)\n // Errors still hit tx.isPersisted.promise; avoid leaking an unhandled rejection from the fire-and-forget commit\n directOpTransaction.commit().catch(() => undefined)\n\n // Add the transaction to the collection's transactions store\n\n state.transactions.set(directOpTransaction.id, directOpTransaction)\n state.scheduleTransactionCleanup(directOpTransaction)\n state.recomputeOptimisticState(true)\n\n return directOpTransaction\n }\n\n /**\n * Deletes one or more items from the collection\n */\n delete = (\n keys: Array<TKey> | TKey,\n config?: OperationConfig\n ): TransactionType<any> => {\n const state = this.state\n this.lifecycle.validateCollectionUsable(`delete`)\n\n const ambientTransaction = getActiveTransaction()\n\n // If no ambient transaction exists, check for an onDelete handler early\n if (!ambientTransaction && !this.config.onDelete) {\n throw new MissingDeleteHandlerError()\n }\n\n if (Array.isArray(keys) && keys.length === 0) {\n throw new NoKeysPassedToDeleteError()\n }\n\n const keysArray = Array.isArray(keys) ? keys : [keys]\n const mutations: Array<\n PendingMutation<\n TOutput,\n `delete`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n >\n > = []\n\n for (const key of keysArray) {\n if (!this.state.has(key)) {\n throw new DeleteKeyNotFoundError(key)\n }\n const globalKey = this.generateGlobalKey(key, this.state.get(key)!)\n const mutation: PendingMutation<\n TOutput,\n `delete`,\n CollectionImpl<TOutput, TKey, TUtils, TSchema, TInput>\n > = {\n mutationId: crypto.randomUUID(),\n original: this.state.get(key)!,\n modified: this.state.get(key)!,\n changes: this.state.get(key)!,\n globalKey,\n key,\n metadata: config?.metadata as unknown,\n syncMetadata: (state.syncedMetadata.get(key) || {}) as Record<\n string,\n unknown\n >,\n optimistic: config?.optimistic ?? true,\n type: `delete`,\n createdAt: new Date(),\n updatedAt: new Date(),\n collection: this.collection,\n }\n\n mutations.push(mutation)\n }\n\n // If an ambient transaction exists, use it\n if (ambientTransaction) {\n ambientTransaction.applyMutations(mutations)\n\n state.transactions.set(ambientTransaction.id, ambientTransaction)\n state.scheduleTransactionCleanup(ambientTransaction)\n state.recomputeOptimisticState(true)\n\n return ambientTransaction\n }\n\n // Create a new transaction with a mutation function that calls the onDelete handler\n const directOpTransaction = createTransaction<TOutput>({\n autoCommit: true,\n mutationFn: async (params) => {\n // Call the onDelete handler with the transaction and collection\n return this.config.onDelete!({\n transaction:\n params.transaction as unknown as TransactionWithMutations<\n TOutput,\n `delete`\n >,\n collection: this.collection as unknown as Collection<TOutput, TKey>,\n })\n },\n })\n\n // Apply mutations to the new transaction\n directOpTransaction.applyMutations(mutations)\n // Errors still reject tx.isPersisted.promise; silence the internal commit promise to prevent test noise\n directOpTransaction.commit().catch(() => undefined)\n\n state.transactions.set(directOpTransaction.id, directOpTransaction)\n state.scheduleTransactionCleanup(directOpTransaction)\n state.recomputeOptimisticState(true)\n\n return directOpTransaction\n }\n}\n"],"names":["config","getActiveTransaction","MissingInsertHandlerError","DuplicateKeyError","createTransaction","MissingDeleteHandlerError","NoKeysPassedToDeleteError","DeleteKeyNotFoundError","InvalidSchemaError","result","SchemaMustBeSynchronousError","SchemaValidationError","UndefinedKeyError","MissingUpdateArgumentError","MissingUpdateHandlerError","NoKeysPassedToUpdateError","UpdateKeyNotFoundError","withArrayChangeTracking","withChangeTracking","KeyUpdateNotAllowedError"],"mappings":";;;;;AAkCO,MAAM,2BAMX;AAAA,EAOA,YAAY,QAAkD,IAAY;AA0G1E,SAAA,SAAS,CAAC,MAA8BA,YAA0B;AAChE,WAAK,UAAU,yBAAyB,QAAQ;AAChD,YAAM,QAAQ,KAAK;AACnB,YAAM,qBAAqBC,aAAAA,qBAAA;AAG3B,UAAI,CAAC,sBAAsB,CAAC,KAAK,OAAO,UAAU;AAChD,cAAM,IAAIC,OAAAA,0BAAA;AAAA,MACZ;AAEA,YAAM,QAAQ,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI;AAChD,YAAM,YAA6C,CAAA;AACnD,YAAM,yCAAyB,IAAA;AAG/B,YAAM,QAAQ,CAAC,SAAS;AAEtB,cAAM,gBAAgB,KAAK,aAAa,MAAM,QAAQ;AAGtD,cAAM,MAAM,KAAK,OAAO,OAAO,aAAa;AAC5C,YAAI,KAAK,MAAM,IAAI,GAAG,KAAK,mBAAmB,IAAI,GAAG,GAAG;AACtD,gBAAM,IAAIC,OAAAA,kBAAkB,GAAG;AAAA,QACjC;AACA,2BAAmB,IAAI,GAAG;AAC1B,cAAM,YAAY,KAAK,kBAAkB,KAAK,IAAI;AAElD,cAAM,WAA+C;AAAA,UACnD,YAAY,OAAO,WAAA;AAAA,UACnB,UAAU,CAAA;AAAA,UACV,UAAU;AAAA;AAAA;AAAA;AAAA,UAIV,SAAS,OAAO;AAAA,YACd,OAAO,KAAK,IAAI,EAAE,IAAI,CAAC,MAAM;AAAA,cAC3B;AAAA,cACA,cAAc,CAA+B;AAAA,YAAA,CAC9C;AAAA,UAAA;AAAA,UAEH;AAAA,UACA;AAAA,UACA,UAAUH,SAAQ;AAAA,UAClB,cAAc,KAAK,OAAO,KAAK,kBAAA,KAAuB,CAAA;AAAA,UACtD,YAAYA,SAAQ,cAAc;AAAA,UAClC,MAAM;AAAA,UACN,+BAAe,KAAA;AAAA,UACf,+BAAe,KAAA;AAAA,UACf,YAAY,KAAK;AAAA,QAAA;AAGnB,kBAAU,KAAK,QAAQ;AAAA,MACzB,CAAC;AAGD,UAAI,oBAAoB;AACtB,2BAAmB,eAAe,SAAS;AAE3C,cAAM,aAAa,IAAI,mBAAmB,IAAI,kBAAkB;AAChE,cAAM,2BAA2B,kBAAkB;AACnD,cAAM,yBAAyB,IAAI;AAEnC,eAAO;AAAA,MACT,OAAO;AAEL,cAAM,sBAAsBI,aAAAA,kBAA2B;AAAA,UACrD,YAAY,OAAO,WAAW;AAE5B,mBAAO,MAAM,KAAK,OAAO,SAAU;AAAA,cACjC,aACE,OAAO;AAAA,cAIT,YAAY,KAAK;AAAA,YAAA,CAClB;AAAA,UACH;AAAA,QAAA,CACD;AAGD,4BAAoB,eAAe,SAAS;AAE5C,4BAAoB,OAAA,EAAS,MAAM,MAAM,MAAS;AAGlD,cAAM,aAAa,IAAI,oBAAoB,IAAI,mBAAmB;AAClE,cAAM,2BAA2B,mBAAmB;AACpD,cAAM,yBAAyB,IAAI;AAEnC,eAAO;AAAA,MACT;AAAA,IACF;AAwMA,SAAA,SAAS,CACP,MACAJ,YACyB;AACzB,YAAM,QAAQ,KAAK;AACnB,WAAK,UAAU,yBAAyB,QAAQ;AAEhD,YAAM,qBAAqBC,aAAAA,qBAAA;AAG3B,UAAI,CAAC,sBAAsB,CAAC,KAAK,OAAO,UAAU;AAChD,cAAM,IAAII,OAAAA,0BAAA;AAAA,MACZ;AAEA,UAAI,MAAM,QAAQ,IAAI,KAAK,KAAK,WAAW,GAAG;AAC5C,cAAM,IAAIC,OAAAA,0BAAA;AAAA,MACZ;AAEA,YAAM,YAAY,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI;AACpD,YAAM,YAMF,CAAA;AAEJ,iBAAW,OAAO,WAAW;AAC3B,YAAI,CAAC,KAAK,MAAM,IAAI,GAAG,GAAG;AACxB,gBAAM,IAAIC,OAAAA,uBAAuB,GAAG;AAAA,QACtC;AACA,cAAM,YAAY,KAAK,kBAAkB,KAAK,KAAK,MAAM,IAAI,GAAG,CAAE;AAClE,cAAM,WAIF;AAAA,UACF,YAAY,OAAO,WAAA;AAAA,UACnB,UAAU,KAAK,MAAM,IAAI,GAAG;AAAA,UAC5B,UAAU,KAAK,MAAM,IAAI,GAAG;AAAA,UAC5B,SAAS,KAAK,MAAM,IAAI,GAAG;AAAA,UAC3B;AAAA,UACA;AAAA,UACA,UAAUP,SAAQ;AAAA,UAClB,cAAe,MAAM,eAAe,IAAI,GAAG,KAAK,CAAA;AAAA,UAIhD,YAAYA,SAAQ,cAAc;AAAA,UAClC,MAAM;AAAA,UACN,+BAAe,KAAA;AAAA,UACf,+BAAe,KAAA;AAAA,UACf,YAAY,KAAK;AAAA,QAAA;AAGnB,kBAAU,KAAK,QAAQ;AAAA,MACzB;AAGA,UAAI,oBAAoB;AACtB,2BAAmB,eAAe,SAAS;AAE3C,cAAM,aAAa,IAAI,mBAAmB,IAAI,kBAAkB;AAChE,cAAM,2BAA2B,kBAAkB;AACnD,cAAM,yBAAyB,IAAI;AAEnC,eAAO;AAAA,MACT;AAGA,YAAM,sBAAsBI,aAAAA,kBAA2B;AAAA,QACrD,YAAY;AAAA,QACZ,YAAY,OAAO,WAAW;AAE5B,iBAAO,KAAK,OAAO,SAAU;AAAA,YAC3B,aACE,OAAO;AAAA,YAIT,YAAY,KAAK;AAAA,UAAA,CAClB;AAAA,QACH;AAAA,MAAA,CACD;AAGD,0BAAoB,eAAe,SAAS;AAE5C,0BAAoB,OAAA,EAAS,MAAM,MAAM,MAAS;AAElD,YAAM,aAAa,IAAI,oBAAoB,IAAI,mBAAmB;AAClE,YAAM,2BAA2B,mBAAmB;AACpD,YAAM,yBAAyB,IAAI;AAEnC,aAAO;AAAA,IACT;AA3eE,SAAK,KAAK;AACV,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,QAAQ,MAIL;AACD,SAAK,YAAY,KAAK;AACtB,SAAK,QAAQ,KAAK;AAClB,SAAK,aAAa,KAAK;AAAA,EACzB;AAAA,EAEQ,qBAAqB,QAA0C;AAErE,QAAI,UAAU,eAAgB,QAAe;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,IAAII,OAAAA,mBAAA;AAAA,EACZ;AAAA,EAEO,aACL,MACA,MACA,KACiB;AACjB,QAAI,CAAC,KAAK,OAAO,OAAQ,QAAO;AAEhC,UAAM,iBAAiB,KAAK,qBAAqB,KAAK,OAAO,MAAM;AAGnE,QAAI,SAAS,YAAY,KAAK;AAE5B,YAAM,eAAe,KAAK,MAAM,IAAI,GAAG;AAEvC,UACE,gBACA,QACA,OAAO,SAAS,YAChB,OAAO,iBAAiB,UACxB;AAEA,cAAM,aAAa,OAAO,OAAO,CAAA,GAAI,cAAc,IAAI;AAGvD,cAAMC,UAAS,eAAe,WAAW,EAAE,SAAS,UAAU;AAG9D,YAAIA,mBAAkB,SAAS;AAC7B,gBAAM,IAAIC,OAAAA,6BAAA;AAAA,QACZ;AAGA,YAAI,YAAYD,WAAUA,QAAO,QAAQ;AACvC,gBAAM,cAAcA,QAAO,OAAO,IAAI,CAAC,WAAW;AAAA,YAChD,SAAS,MAAM;AAAA,YACf,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,OAAO,CAAC,CAAC;AAAA,UAAA,EACtC;AACF,gBAAM,IAAIE,OAAAA,sBAAsB,MAAM,WAAW;AAAA,QACnD;AAGA,cAAM,sBAAsBF,QAAO;AACnC,cAAM,eAAe,OAAO,KAAK,IAAI;AACrC,cAAM,mBAAmB,OAAO;AAAA,UAC9B,aAAa,IAAI,CAAC,MAAM,CAAC,GAAG,oBAAoB,CAAkB,CAAC,CAAC;AAAA,QAAA;AAGtE,eAAO;AAAA,MACT;AAAA,IACF;AAGA,UAAM,SAAS,eAAe,WAAW,EAAE,SAAS,IAAI;AAGxD,QAAI,kBAAkB,SAAS;AAC7B,YAAM,IAAIC,OAAAA,6BAAA;AAAA,IACZ;AAGA,QAAI,YAAY,UAAU,OAAO,QAAQ;AACvC,YAAM,cAAc,OAAO,OAAO,IAAI,CAAC,WAAW;AAAA,QAChD,SAAS,MAAM;AAAA,QACf,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,OAAO,CAAC,CAAC;AAAA,MAAA,EACtC;AACF,YAAM,IAAIC,OAAAA,sBAAsB,MAAM,WAAW;AAAA,IACnD;AAEA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEO,kBAAkB,KAAU,MAAmB;AACpD,QAAI,OAAO,QAAQ,aAAa;AAC9B,YAAM,IAAIC,OAAAA,kBAAkB,IAAI;AAAA,IAClC;AAEA,WAAO,QAAQ,KAAK,EAAE,IAAI,GAAG;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAqGA,OACE,MACA,kBAIA,eAGA;AACA,QAAI,OAAO,SAAS,aAAa;AAC/B,YAAM,IAAIC,OAAAA,2BAAA;AAAA,IACZ;AAEA,UAAM,QAAQ,KAAK;AACnB,SAAK,UAAU,yBAAyB,QAAQ;AAEhD,UAAM,qBAAqBZ,aAAAA,qBAAA;AAG3B,QAAI,CAAC,sBAAsB,CAAC,KAAK,OAAO,UAAU;AAChD,YAAM,IAAIa,OAAAA,0BAAA;AAAA,IACZ;AAEA,UAAM,UAAU,MAAM,QAAQ,IAAI;AAClC,UAAM,YAAY,UAAU,OAAO,CAAC,IAAI;AAExC,QAAI,WAAW,UAAU,WAAW,GAAG;AACrC,YAAM,IAAIC,OAAAA,0BAAA;AAAA,IACZ;AAEA,UAAM,WACJ,OAAO,qBAAqB,aAAa,mBAAmB;AAC9D,UAAM,SACJ,OAAO,qBAAqB,aAAa,CAAA,IAAK;AAGhD,UAAM,iBAAiB,UAAU,IAAI,CAAC,QAAQ;AAC5C,YAAM,OAAO,KAAK,MAAM,IAAI,GAAG;AAC/B,UAAI,CAAC,MAAM;AACT,cAAM,IAAIC,OAAAA,uBAAuB,GAAG;AAAA,MACtC;AAEA,aAAO;AAAA,IACT,CAAC;AAED,QAAI;AACJ,QAAI,SAAS;AAEX,qBAAeC,MAAAA;AAAAA,QACb;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AACL,YAAM,SAASC,MAAAA;AAAAA,QACb,eAAe,CAAC;AAAA,QAChB;AAAA,MAAA;AAEF,qBAAe,CAAC,MAAM;AAAA,IACxB;AAGA,UAAM,YAMF,UACD,IAAI,CAAC,KAAK,UAAU;AACnB,YAAM,cAAc,aAAa,KAAK;AAGtC,UAAI,CAAC,eAAe,OAAO,KAAK,WAAW,EAAE,WAAW,GAAG;AACzD,eAAO;AAAA,MACT;AAEA,YAAM,eAAe,eAAe,KAAK;AAEzC,YAAM,yBAAyB,KAAK;AAAA,QAClC;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAIF,YAAM,eAAe,OAAO;AAAA,QAC1B,CAAA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAIF,YAAM,iBAAiB,KAAK,OAAO,OAAO,YAAY;AACtD,YAAM,iBAAiB,KAAK,OAAO,OAAO,YAAY;AAEtD,UAAI,mBAAmB,gBAAgB;AACrC,cAAM,IAAIC,OAAAA,yBAAyB,gBAAgB,cAAc;AAAA,MACnE;AAEA,YAAM,YAAY,KAAK,kBAAkB,gBAAgB,YAAY;AAErE,aAAO;AAAA,QACL,YAAY,OAAO,WAAA;AAAA,QACnB,UAAU;AAAA,QACV,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,QAKV,SAAS,OAAO;AAAA,UACd,OAAO,KAAK,WAAW,EAAE,IAAI,CAAC,MAAM;AAAA,YAClC;AAAA,YACA,aAAa,CAA8B;AAAA,UAAA,CAC5C;AAAA,QAAA;AAAA,QAEH;AAAA,QACA;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,cAAe,MAAM,eAAe,IAAI,GAAG,KAAK,CAAA;AAAA,QAIhD,YAAY,OAAO,cAAc;AAAA,QACjC,MAAM;AAAA,QACN,+BAAe,KAAA;AAAA,QACf,+BAAe,KAAA;AAAA,QACf,YAAY,KAAK;AAAA,MAAA;AAAA,IAErB,CAAC,EACA,OAAO,OAAO;AASjB,QAAI,UAAU,WAAW,GAAG;AAC1B,YAAM,mBAAmBf,aAAAA,kBAAkB;AAAA,QACzC,YAAY,YAAY;AAAA,QAAC;AAAA,MAAA,CAC1B;AAED,uBAAiB,OAAA,EAAS,MAAM,MAAM,MAAS;AAE/C,YAAM,2BAA2B,gBAAgB;AACjD,aAAO;AAAA,IACT;AAGA,QAAI,oBAAoB;AACtB,yBAAmB,eAAe,SAAS;AAE3C,YAAM,aAAa,IAAI,mBAAmB,IAAI,kBAAkB;AAChE,YAAM,2BAA2B,kBAAkB;AACnD,YAAM,yBAAyB,IAAI;AAEnC,aAAO;AAAA,IACT;AAKA,UAAM,sBAAsBA,aAAAA,kBAA2B;AAAA,MACrD,YAAY,OAAO,WAAW;AAE5B,eAAO,KAAK,OAAO,SAAU;AAAA,UAC3B,aACE,OAAO;AAAA,UAIT,YAAY,KAAK;AAAA,QAAA,CAClB;AAAA,MACH;AAAA,IAAA,CACD;AAGD,wBAAoB,eAAe,SAAS;AAE5C,wBAAoB,OAAA,EAAS,MAAM,MAAM,MAAS;AAIlD,UAAM,aAAa,IAAI,oBAAoB,IAAI,mBAAmB;AAClE,UAAM,2BAA2B,mBAAmB;AACpD,UAAM,yBAAyB,IAAI;AAEnC,WAAO;AAAA,EACT;AAqGF;;"}
@@ -224,7 +224,16 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
224
224
  const promises = [];
225
225
  if (typeof minValue !== `undefined`) {
226
226
  const { expression } = orderBy[0];
227
- const exactValueFilter = functions.eq(expression, new ir.Value(minValue));
227
+ let exactValueFilter;
228
+ if (minValue instanceof Date) {
229
+ const minValuePlus1ms = new Date(minValue.getTime() + 1);
230
+ exactValueFilter = functions.and(
231
+ functions.gte(expression, new ir.Value(minValue)),
232
+ functions.lt(expression, new ir.Value(minValuePlus1ms))
233
+ );
234
+ } else {
235
+ exactValueFilter = functions.eq(expression, new ir.Value(minValue));
236
+ }
228
237
  const loadOptions2 = {
229
238
  where: exactValueFilter,
230
239
  subscription: this
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and, eq, gt, lt } from \"../query/builder/functions.js\"\nimport { Value } from \"../query/ir.js\"\nimport { EventEmitter } from \"../event-emitter.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression, OrderBy } from \"../query/ir.js\"\nimport type { IndexInterface } from \"../indexes/base-index.js\"\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n minValue?: any\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to `minValue` + limit values greater than `minValue`.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValue,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n let keys: Array<string | number> = []\n if (minValue !== undefined) {\n // First, get all items with the same value as minValue\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValue)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValue,\n filterFn\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n\n let whereWithValueFilter = where\n if (typeof minValue !== `undefined`) {\n // Only request data that we haven't seen yet (i.e. is bigger than the minValue)\n const { expression, compareOptions } = orderBy[0]!\n const operator = compareOptions.direction === `asc` ? gt : lt\n const valueFilter = operator(expression, new Value(minValue))\n whereWithValueFilter = where ? and(where, valueFilter) : valueFilter\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions1: LoadSubsetOptions = {\n where: whereWithValueFilter,\n limit,\n orderBy,\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions1)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions1)\n\n // Make parallel loadSubset calls for values equal to minValue and values greater than minValue\n const promises: Array<Promise<void>> = []\n\n // First promise: load all values equal to minValue\n if (typeof minValue !== `undefined`) {\n const { expression } = orderBy[0]!\n const exactValueFilter = eq(expression, new Value(minValue))\n\n const loadOptions2: LoadSubsetOptions = {\n where: exactValueFilter,\n subscription: this,\n }\n const equalValueResult = this.collection._sync.loadSubset(loadOptions2)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions2)\n\n if (equalValueResult instanceof Promise) {\n promises.push(equalValueResult)\n }\n }\n\n // Second promise: load values greater than minValue\n if (syncResult instanceof Promise) {\n promises.push(syncResult)\n }\n\n // Track the combined promise\n if (promises.length > 0) {\n const combinedPromise = Promise.all(promises).then(() => {})\n this.trackLoadSubsetPromise(combinedPromise)\n } else {\n this.trackLoadSubsetPromise(syncResult)\n }\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","gt","lt"],"mappings":";;;;;;;AAwCO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EA4BE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AA9BV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAOvB,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAQ5D,QAAI,OAA+B,CAAA;AACnC,QAAI,aAAa,QAAW;AAE1B,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,QAAQ,CAAC;AAAA,MAAA,CAC1C;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,MAC7C;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,IAC7C;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAErB,QAAI,uBAAuB;AAC3B,QAAI,OAAO,aAAa,aAAa;AAEnC,YAAM,EAAE,YAAY,mBAAmB,QAAQ,CAAC;AAChD,YAAM,WAAW,eAAe,cAAc,QAAQC,UAAAA,KAAKC,UAAAA;AAC3D,YAAM,cAAc,SAAS,YAAY,IAAIF,GAAAA,MAAM,QAAQ,CAAC;AAC5D,6BAAuB,QAAQH,UAAAA,IAAI,OAAO,WAAW,IAAI;AAAA,IAC3D;AAIA,UAAM,eAAkC;AAAA,MACtC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,YAAY;AAGhE,SAAK,cAAc,KAAK,YAAY;AAGpC,UAAM,WAAiC,CAAA;AAGvC,QAAI,OAAO,aAAa,aAAa;AACnC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,mBAAmBE,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,QAAQ,CAAC;AAE3D,YAAM,eAAkC;AAAA,QACtC,OAAO;AAAA,QACP,cAAc;AAAA,MAAA;AAEhB,YAAM,mBAAmB,KAAK,WAAW,MAAM,WAAW,YAAY;AAGtE,WAAK,cAAc,KAAK,YAAY;AAEpC,UAAI,4BAA4B,SAAS;AACvC,iBAAS,KAAK,gBAAgB;AAAA,MAChC;AAAA,IACF;AAGA,QAAI,sBAAsB,SAAS;AACjC,eAAS,KAAK,UAAU;AAAA,IAC1B;AAGA,QAAI,SAAS,SAAS,GAAG;AACvB,YAAM,kBAAkB,QAAQ,IAAI,QAAQ,EAAE,KAAK,MAAM;AAAA,MAAC,CAAC;AAC3D,WAAK,uBAAuB,eAAe;AAAA,IAC7C,OAAO;AACL,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
1
+ {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and, eq, gt, gte, lt } from \"../query/builder/functions.js\"\nimport { Value } from \"../query/ir.js\"\nimport { EventEmitter } from \"../event-emitter.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression, OrderBy } from \"../query/ir.js\"\nimport type { IndexInterface } from \"../indexes/base-index.js\"\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n minValue?: any\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to `minValue` + limit values greater than `minValue`.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValue,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n let keys: Array<string | number> = []\n if (minValue !== undefined) {\n // First, get all items with the same value as minValue\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValue)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValue,\n filterFn\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n\n let whereWithValueFilter = where\n if (typeof minValue !== `undefined`) {\n // Only request data that we haven't seen yet (i.e. is bigger than the minValue)\n const { expression, compareOptions } = orderBy[0]!\n const operator = compareOptions.direction === `asc` ? gt : lt\n const valueFilter = operator(expression, new Value(minValue))\n whereWithValueFilter = where ? and(where, valueFilter) : valueFilter\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions1: LoadSubsetOptions = {\n where: whereWithValueFilter,\n limit,\n orderBy,\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions1)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions1)\n\n // Make parallel loadSubset calls for values equal to minValue and values greater than minValue\n const promises: Array<Promise<void>> = []\n\n // First promise: load all values equal to minValue\n if (typeof minValue !== `undefined`) {\n const { expression } = orderBy[0]!\n\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let exactValueFilter\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n exactValueFilter = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms))\n )\n } else {\n exactValueFilter = eq(expression, new Value(minValue))\n }\n\n const loadOptions2: LoadSubsetOptions = {\n where: exactValueFilter,\n subscription: this,\n }\n const equalValueResult = this.collection._sync.loadSubset(loadOptions2)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions2)\n\n if (equalValueResult instanceof Promise) {\n promises.push(equalValueResult)\n }\n }\n\n // Second promise: load values greater than minValue\n if (syncResult instanceof Promise) {\n promises.push(syncResult)\n }\n\n // Track the combined promise\n if (promises.length > 0) {\n const combinedPromise = Promise.all(promises).then(() => {})\n this.trackLoadSubsetPromise(combinedPromise)\n } else {\n this.trackLoadSubsetPromise(syncResult)\n }\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","gt","lt","gte"],"mappings":";;;;;;;AAwCO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EA4BE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AA9BV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAOvB,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAQ5D,QAAI,OAA+B,CAAA;AACnC,QAAI,aAAa,QAAW;AAE1B,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,QAAQ,CAAC;AAAA,MAAA,CAC1C;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,MAC7C;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,IAC7C;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAErB,QAAI,uBAAuB;AAC3B,QAAI,OAAO,aAAa,aAAa;AAEnC,YAAM,EAAE,YAAY,mBAAmB,QAAQ,CAAC;AAChD,YAAM,WAAW,eAAe,cAAc,QAAQC,UAAAA,KAAKC,UAAAA;AAC3D,YAAM,cAAc,SAAS,YAAY,IAAIF,GAAAA,MAAM,QAAQ,CAAC;AAC5D,6BAAuB,QAAQH,UAAAA,IAAI,OAAO,WAAW,IAAI;AAAA,IAC3D;AAIA,UAAM,eAAkC;AAAA,MACtC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,YAAY;AAGhE,SAAK,cAAc,KAAK,YAAY;AAGpC,UAAM,WAAiC,CAAA;AAGvC,QAAI,OAAO,aAAa,aAAa;AACnC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAIhC,UAAI;AACJ,UAAI,oBAAoB,MAAM;AAC5B,cAAM,kBAAkB,IAAI,KAAK,SAAS,QAAA,IAAY,CAAC;AACvD,2BAAmBA,UAAAA;AAAAA,UACjBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAM,QAAQ,CAAC;AAAA,UACnCE,UAAAA,GAAG,YAAY,IAAIF,GAAAA,MAAM,eAAe,CAAC;AAAA,QAAA;AAAA,MAE7C,OAAO;AACL,2BAAmBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,QAAQ,CAAC;AAAA,MACvD;AAEA,YAAM,eAAkC;AAAA,QACtC,OAAO;AAAA,QACP,cAAc;AAAA,MAAA;AAEhB,YAAM,mBAAmB,KAAK,WAAW,MAAM,WAAW,YAAY;AAGtE,WAAK,cAAc,KAAK,YAAY;AAEpC,UAAI,4BAA4B,SAAS;AACvC,iBAAS,KAAK,gBAAgB;AAAA,MAChC;AAAA,IACF;AAGA,QAAI,sBAAsB,SAAS;AACjC,eAAS,KAAK,UAAU;AAAA,IAC1B;AAGA,QAAI,SAAS,SAAS,GAAG;AACvB,YAAM,kBAAkB,QAAQ,IAAI,QAAQ,EAAE,KAAK,MAAM;AAAA,MAAC,CAAC;AAC3D,WAAK,uBAAuB,eAAe;AAAA,IAC7C,OAAO;AACL,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"base-index.cjs","sources":["../../../src/indexes/base-index.ts"],"sourcesContent":["import { compileSingleRowExpression } from \"../query/compiler/evaluators.js\"\nimport { comparisonFunctions } from \"../query/builder/functions.js\"\nimport { DEFAULT_COMPARE_OPTIONS, deepEquals } from \"../utils.js\"\nimport type { RangeQueryOptions } from \"./btree-index.js\"\nimport type { CompareOptions } from \"../query/builder/types.js\"\nimport type { BasicExpression, OrderByDirection } from \"../query/ir.js\"\n\n/**\n * Operations that indexes can support, imported from available comparison functions\n */\nexport const IndexOperation = comparisonFunctions\n\n/**\n * Type for index operation values\n */\nexport type IndexOperation = (typeof comparisonFunctions)[number]\n\n/**\n * Statistics about index usage and performance\n */\nexport interface IndexStats {\n readonly entryCount: number\n readonly lookupCount: number\n readonly averageLookupTime: number\n readonly lastUpdated: Date\n}\n\nexport interface IndexInterface<\n TKey extends string | number = string | number,\n> {\n add: (key: TKey, item: any) => void\n remove: (key: TKey, item: any) => void\n update: (key: TKey, oldItem: any, newItem: any) => void\n\n build: (entries: Iterable<[TKey, any]>) => void\n clear: () => void\n\n lookup: (operation: IndexOperation, value: any) => Set<TKey>\n\n equalityLookup: (value: any) => Set<TKey>\n inArrayLookup: (values: Array<any>) => Set<TKey>\n\n rangeQuery: (options: RangeQueryOptions) => Set<TKey>\n rangeQueryReversed: (options: RangeQueryOptions) => Set<TKey>\n\n take: (\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ) => Array<TKey>\n takeReversed: (\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ) => Array<TKey>\n\n get keyCount(): number\n get orderedEntriesArray(): Array<[any, Set<TKey>]>\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]>\n\n get indexedKeysSet(): Set<TKey>\n get valueMapData(): Map<any, Set<TKey>>\n\n supports: (operation: IndexOperation) => boolean\n\n matchesField: (fieldPath: Array<string>) => boolean\n matchesCompareOptions: (compareOptions: CompareOptions) => boolean\n matchesDirection: (direction: OrderByDirection) => boolean\n\n getStats: () => IndexStats\n}\n\n/**\n * Base abstract class that all index types extend\n */\nexport abstract class BaseIndex<TKey extends string | number = string | number>\n implements IndexInterface<TKey>\n{\n public readonly id: number\n public readonly name?: string\n public readonly expression: BasicExpression\n public abstract readonly supportedOperations: Set<IndexOperation>\n\n protected lookupCount = 0\n protected totalLookupTime = 0\n protected lastUpdated = new Date()\n protected compareOptions: CompareOptions\n\n constructor(\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any\n ) {\n this.id = id\n this.expression = expression\n this.compareOptions = DEFAULT_COMPARE_OPTIONS\n this.name = name\n this.initialize(options)\n }\n\n // Abstract methods that each index type must implement\n abstract add(key: TKey, item: any): void\n abstract remove(key: TKey, item: any): void\n abstract update(key: TKey, oldItem: any, newItem: any): void\n abstract build(entries: Iterable<[TKey, any]>): void\n abstract clear(): void\n abstract lookup(operation: IndexOperation, value: any): Set<TKey>\n abstract take(\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey>\n abstract takeReversed(\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey>\n abstract get keyCount(): number\n abstract equalityLookup(value: any): Set<TKey>\n abstract inArrayLookup(values: Array<any>): Set<TKey>\n abstract rangeQuery(options: RangeQueryOptions): Set<TKey>\n abstract rangeQueryReversed(options: RangeQueryOptions): Set<TKey>\n abstract get orderedEntriesArray(): Array<[any, Set<TKey>]>\n abstract get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]>\n abstract get indexedKeysSet(): Set<TKey>\n abstract get valueMapData(): Map<any, Set<TKey>>\n\n // Common methods\n supports(operation: IndexOperation): boolean {\n return this.supportedOperations.has(operation)\n }\n\n matchesField(fieldPath: Array<string>): boolean {\n return (\n this.expression.type === `ref` &&\n this.expression.path.length === fieldPath.length &&\n this.expression.path.every((part, i) => part === fieldPath[i])\n )\n }\n\n /**\n * Checks if the compare options match the index's compare options.\n * The direction is ignored because the index can be reversed if the direction is different.\n */\n matchesCompareOptions(compareOptions: CompareOptions): boolean {\n const thisCompareOptionsWithoutDirection = {\n ...this.compareOptions,\n direction: undefined,\n }\n const compareOptionsWithoutDirection = {\n ...compareOptions,\n direction: undefined,\n }\n\n return deepEquals(\n thisCompareOptionsWithoutDirection,\n compareOptionsWithoutDirection\n )\n }\n\n /**\n * Checks if the index matches the provided direction.\n */\n matchesDirection(direction: OrderByDirection): boolean {\n return this.compareOptions.direction === direction\n }\n\n getStats(): IndexStats {\n return {\n entryCount: this.keyCount,\n lookupCount: this.lookupCount,\n averageLookupTime:\n this.lookupCount > 0 ? this.totalLookupTime / this.lookupCount : 0,\n lastUpdated: this.lastUpdated,\n }\n }\n\n // Protected methods for subclasses\n protected abstract initialize(options?: any): void\n\n protected evaluateIndexExpression(item: any): any {\n const evaluator = compileSingleRowExpression(this.expression)\n return evaluator(item as Record<string, unknown>)\n }\n\n protected trackLookup(startTime: number): void {\n const duration = performance.now() - startTime\n this.lookupCount++\n this.totalLookupTime += duration\n }\n\n protected updateTimestamp(): void {\n this.lastUpdated = new Date()\n }\n}\n\n/**\n * Type for index constructor\n */\nexport type IndexConstructor<TKey extends string | number = string | number> =\n new (\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any\n ) => BaseIndex<TKey>\n\n/**\n * Index resolver can be either a class constructor or async loader\n */\nexport type IndexResolver<TKey extends string | number = string | number> =\n | IndexConstructor<TKey>\n | (() => Promise<IndexConstructor<TKey>>)\n"],"names":["comparisonFunctions","DEFAULT_COMPARE_OPTIONS","deepEquals","compileSingleRowExpression"],"mappings":";;;;;AAUO,MAAM,iBAAiBA,UAAAA;AAiEvB,MAAe,UAEtB;AAAA,EAWE,YACE,IACA,YACA,MACA,SACA;AAVF,SAAU,cAAc;AACxB,SAAU,kBAAkB;AAC5B,SAAU,kCAAkB,KAAA;AAS1B,SAAK,KAAK;AACV,SAAK,aAAa;AAClB,SAAK,iBAAiBC,MAAAA;AACtB,SAAK,OAAO;AACZ,SAAK,WAAW,OAAO;AAAA,EACzB;AAAA;AAAA,EA8BA,SAAS,WAAoC;AAC3C,WAAO,KAAK,oBAAoB,IAAI,SAAS;AAAA,EAC/C;AAAA,EAEA,aAAa,WAAmC;AAC9C,WACE,KAAK,WAAW,SAAS,SACzB,KAAK,WAAW,KAAK,WAAW,UAAU,UAC1C,KAAK,WAAW,KAAK,MAAM,CAAC,MAAM,MAAM,SAAS,UAAU,CAAC,CAAC;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,sBAAsB,gBAAyC;AAC7D,UAAM,qCAAqC;AAAA,MACzC,GAAG,KAAK;AAAA,MACR,WAAW;AAAA,IAAA;AAEb,UAAM,iCAAiC;AAAA,MACrC,GAAG;AAAA,MACH,WAAW;AAAA,IAAA;AAGb,WAAOC,MAAAA;AAAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,WAAsC;AACrD,WAAO,KAAK,eAAe,cAAc;AAAA,EAC3C;AAAA,EAEA,WAAuB;AACrB,WAAO;AAAA,MACL,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,MAClB,mBACE,KAAK,cAAc,IAAI,KAAK,kBAAkB,KAAK,cAAc;AAAA,MACnE,aAAa,KAAK;AAAA,IAAA;AAAA,EAEtB;AAAA,EAKU,wBAAwB,MAAgB;AAChD,UAAM,YAAYC,WAAAA,2BAA2B,KAAK,UAAU;AAC5D,WAAO,UAAU,IAA+B;AAAA,EAClD;AAAA,EAEU,YAAY,WAAyB;AAC7C,UAAM,WAAW,YAAY,IAAA,IAAQ;AACrC,SAAK;AACL,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEU,kBAAwB;AAChC,SAAK,kCAAkB,KAAA;AAAA,EACzB;AACF;;;"}
1
+ {"version":3,"file":"base-index.cjs","sources":["../../../src/indexes/base-index.ts"],"sourcesContent":["import { compileSingleRowExpression } from \"../query/compiler/evaluators.js\"\nimport { comparisonFunctions } from \"../query/builder/functions.js\"\nimport { DEFAULT_COMPARE_OPTIONS, deepEquals } from \"../utils.js\"\nimport type { RangeQueryOptions } from \"./btree-index.js\"\nimport type { CompareOptions } from \"../query/builder/types.js\"\nimport type { BasicExpression, OrderByDirection } from \"../query/ir.js\"\n\n/**\n * Operations that indexes can support, imported from available comparison functions\n */\nexport const IndexOperation = comparisonFunctions\n\n/**\n * Type for index operation values\n */\nexport type IndexOperation = (typeof comparisonFunctions)[number]\n\n/**\n * Statistics about index usage and performance\n */\nexport interface IndexStats {\n readonly entryCount: number\n readonly lookupCount: number\n readonly averageLookupTime: number\n readonly lastUpdated: Date\n}\n\nexport interface IndexInterface<\n TKey extends string | number = string | number,\n> {\n add: (key: TKey, item: any) => void\n remove: (key: TKey, item: any) => void\n update: (key: TKey, oldItem: any, newItem: any) => void\n\n build: (entries: Iterable<[TKey, any]>) => void\n clear: () => void\n\n lookup: (operation: IndexOperation, value: any) => Set<TKey>\n\n equalityLookup: (value: any) => Set<TKey>\n inArrayLookup: (values: Array<any>) => Set<TKey>\n\n rangeQuery: (options: RangeQueryOptions) => Set<TKey>\n rangeQueryReversed: (options: RangeQueryOptions) => Set<TKey>\n\n take: (\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ) => Array<TKey>\n takeReversed: (\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ) => Array<TKey>\n\n get keyCount(): number\n get orderedEntriesArray(): Array<[any, Set<TKey>]>\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]>\n\n get indexedKeysSet(): Set<TKey>\n get valueMapData(): Map<any, Set<TKey>>\n\n supports: (operation: IndexOperation) => boolean\n\n matchesField: (fieldPath: Array<string>) => boolean\n matchesCompareOptions: (compareOptions: CompareOptions) => boolean\n matchesDirection: (direction: OrderByDirection) => boolean\n\n getStats: () => IndexStats\n}\n\n/**\n * Base abstract class that all index types extend\n */\nexport abstract class BaseIndex<\n TKey extends string | number = string | number,\n> implements IndexInterface<TKey> {\n public readonly id: number\n public readonly name?: string\n public readonly expression: BasicExpression\n public abstract readonly supportedOperations: Set<IndexOperation>\n\n protected lookupCount = 0\n protected totalLookupTime = 0\n protected lastUpdated = new Date()\n protected compareOptions: CompareOptions\n\n constructor(\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any\n ) {\n this.id = id\n this.expression = expression\n this.compareOptions = DEFAULT_COMPARE_OPTIONS\n this.name = name\n this.initialize(options)\n }\n\n // Abstract methods that each index type must implement\n abstract add(key: TKey, item: any): void\n abstract remove(key: TKey, item: any): void\n abstract update(key: TKey, oldItem: any, newItem: any): void\n abstract build(entries: Iterable<[TKey, any]>): void\n abstract clear(): void\n abstract lookup(operation: IndexOperation, value: any): Set<TKey>\n abstract take(\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey>\n abstract takeReversed(\n n: number,\n from?: TKey,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey>\n abstract get keyCount(): number\n abstract equalityLookup(value: any): Set<TKey>\n abstract inArrayLookup(values: Array<any>): Set<TKey>\n abstract rangeQuery(options: RangeQueryOptions): Set<TKey>\n abstract rangeQueryReversed(options: RangeQueryOptions): Set<TKey>\n abstract get orderedEntriesArray(): Array<[any, Set<TKey>]>\n abstract get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]>\n abstract get indexedKeysSet(): Set<TKey>\n abstract get valueMapData(): Map<any, Set<TKey>>\n\n // Common methods\n supports(operation: IndexOperation): boolean {\n return this.supportedOperations.has(operation)\n }\n\n matchesField(fieldPath: Array<string>): boolean {\n return (\n this.expression.type === `ref` &&\n this.expression.path.length === fieldPath.length &&\n this.expression.path.every((part, i) => part === fieldPath[i])\n )\n }\n\n /**\n * Checks if the compare options match the index's compare options.\n * The direction is ignored because the index can be reversed if the direction is different.\n */\n matchesCompareOptions(compareOptions: CompareOptions): boolean {\n const thisCompareOptionsWithoutDirection = {\n ...this.compareOptions,\n direction: undefined,\n }\n const compareOptionsWithoutDirection = {\n ...compareOptions,\n direction: undefined,\n }\n\n return deepEquals(\n thisCompareOptionsWithoutDirection,\n compareOptionsWithoutDirection\n )\n }\n\n /**\n * Checks if the index matches the provided direction.\n */\n matchesDirection(direction: OrderByDirection): boolean {\n return this.compareOptions.direction === direction\n }\n\n getStats(): IndexStats {\n return {\n entryCount: this.keyCount,\n lookupCount: this.lookupCount,\n averageLookupTime:\n this.lookupCount > 0 ? this.totalLookupTime / this.lookupCount : 0,\n lastUpdated: this.lastUpdated,\n }\n }\n\n // Protected methods for subclasses\n protected abstract initialize(options?: any): void\n\n protected evaluateIndexExpression(item: any): any {\n const evaluator = compileSingleRowExpression(this.expression)\n return evaluator(item as Record<string, unknown>)\n }\n\n protected trackLookup(startTime: number): void {\n const duration = performance.now() - startTime\n this.lookupCount++\n this.totalLookupTime += duration\n }\n\n protected updateTimestamp(): void {\n this.lastUpdated = new Date()\n }\n}\n\n/**\n * Type for index constructor\n */\nexport type IndexConstructor<TKey extends string | number = string | number> =\n new (\n id: number,\n expression: BasicExpression,\n name?: string,\n options?: any\n ) => BaseIndex<TKey>\n\n/**\n * Index resolver can be either a class constructor or async loader\n */\nexport type IndexResolver<TKey extends string | number = string | number> =\n | IndexConstructor<TKey>\n | (() => Promise<IndexConstructor<TKey>>)\n"],"names":["comparisonFunctions","DEFAULT_COMPARE_OPTIONS","deepEquals","compileSingleRowExpression"],"mappings":";;;;;AAUO,MAAM,iBAAiBA,UAAAA;AAiEvB,MAAe,UAEY;AAAA,EAWhC,YACE,IACA,YACA,MACA,SACA;AAVF,SAAU,cAAc;AACxB,SAAU,kBAAkB;AAC5B,SAAU,kCAAkB,KAAA;AAS1B,SAAK,KAAK;AACV,SAAK,aAAa;AAClB,SAAK,iBAAiBC,MAAAA;AACtB,SAAK,OAAO;AACZ,SAAK,WAAW,OAAO;AAAA,EACzB;AAAA;AAAA,EA8BA,SAAS,WAAoC;AAC3C,WAAO,KAAK,oBAAoB,IAAI,SAAS;AAAA,EAC/C;AAAA,EAEA,aAAa,WAAmC;AAC9C,WACE,KAAK,WAAW,SAAS,SACzB,KAAK,WAAW,KAAK,WAAW,UAAU,UAC1C,KAAK,WAAW,KAAK,MAAM,CAAC,MAAM,MAAM,SAAS,UAAU,CAAC,CAAC;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,sBAAsB,gBAAyC;AAC7D,UAAM,qCAAqC;AAAA,MACzC,GAAG,KAAK;AAAA,MACR,WAAW;AAAA,IAAA;AAEb,UAAM,iCAAiC;AAAA,MACrC,GAAG;AAAA,MACH,WAAW;AAAA,IAAA;AAGb,WAAOC,MAAAA;AAAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,WAAsC;AACrD,WAAO,KAAK,eAAe,cAAc;AAAA,EAC3C;AAAA,EAEA,WAAuB;AACrB,WAAO;AAAA,MACL,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,MAClB,mBACE,KAAK,cAAc,IAAI,KAAK,kBAAkB,KAAK,cAAc;AAAA,MACnE,aAAa,KAAK;AAAA,IAAA;AAAA,EAEtB;AAAA,EAKU,wBAAwB,MAAgB;AAChD,UAAM,YAAYC,WAAAA,2BAA2B,KAAK,UAAU;AAC5D,WAAO,UAAU,IAA+B;AAAA,EAClD;AAAA,EAEU,YAAY,WAAyB;AAC7C,UAAM,WAAW,YAAY,IAAA,IAAQ;AACrC,SAAK;AACL,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEU,kBAAwB;AAChC,SAAK,kCAAkB,KAAA;AAAA,EACzB;AACF;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"reverse-index.cjs","sources":["../../../src/indexes/reverse-index.ts"],"sourcesContent":["import type { CompareOptions } from \"../query/builder/types\"\nimport type { OrderByDirection } from \"../query/ir\"\nimport type { IndexInterface, IndexOperation, IndexStats } from \"./base-index\"\nimport type { RangeQueryOptions } from \"./btree-index\"\n\nexport class ReverseIndex<TKey extends string | number>\n implements IndexInterface<TKey>\n{\n private originalIndex: IndexInterface<TKey>\n\n constructor(index: IndexInterface<TKey>) {\n this.originalIndex = index\n }\n\n // Define the reversed operations\n\n lookup(operation: IndexOperation, value: any): Set<TKey> {\n const reverseOperation =\n operation === `gt`\n ? `lt`\n : operation === `gte`\n ? `lte`\n : operation === `lt`\n ? `gt`\n : operation === `lte`\n ? `gte`\n : operation\n return this.originalIndex.lookup(reverseOperation, value)\n }\n\n rangeQuery(options: RangeQueryOptions = {}): Set<TKey> {\n return this.originalIndex.rangeQueryReversed(options)\n }\n\n rangeQueryReversed(options: RangeQueryOptions = {}): Set<TKey> {\n return this.originalIndex.rangeQuery(options)\n }\n\n take(n: number, from?: any, filterFn?: (key: TKey) => boolean): Array<TKey> {\n return this.originalIndex.takeReversed(n, from, filterFn)\n }\n\n takeReversed(\n n: number,\n from?: any,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey> {\n return this.originalIndex.take(n, from, filterFn)\n }\n\n get orderedEntriesArray(): Array<[any, Set<TKey>]> {\n return this.originalIndex.orderedEntriesArrayReversed\n }\n\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]> {\n return this.originalIndex.orderedEntriesArray\n }\n\n // All operations below delegate to the original index\n\n supports(operation: IndexOperation): boolean {\n return this.originalIndex.supports(operation)\n }\n\n matchesField(fieldPath: Array<string>): boolean {\n return this.originalIndex.matchesField(fieldPath)\n }\n\n matchesCompareOptions(compareOptions: CompareOptions): boolean {\n return this.originalIndex.matchesCompareOptions(compareOptions)\n }\n\n matchesDirection(direction: OrderByDirection): boolean {\n return this.originalIndex.matchesDirection(direction)\n }\n\n getStats(): IndexStats {\n return this.originalIndex.getStats()\n }\n\n add(key: TKey, item: any): void {\n this.originalIndex.add(key, item)\n }\n\n remove(key: TKey, item: any): void {\n this.originalIndex.remove(key, item)\n }\n\n update(key: TKey, oldItem: any, newItem: any): void {\n this.originalIndex.update(key, oldItem, newItem)\n }\n\n build(entries: Iterable<[TKey, any]>): void {\n this.originalIndex.build(entries)\n }\n\n clear(): void {\n this.originalIndex.clear()\n }\n\n get keyCount(): number {\n return this.originalIndex.keyCount\n }\n\n equalityLookup(value: any): Set<TKey> {\n return this.originalIndex.equalityLookup(value)\n }\n\n inArrayLookup(values: Array<any>): Set<TKey> {\n return this.originalIndex.inArrayLookup(values)\n }\n\n get indexedKeysSet(): Set<TKey> {\n return this.originalIndex.indexedKeysSet\n }\n\n get valueMapData(): Map<any, Set<TKey>> {\n return this.originalIndex.valueMapData\n }\n}\n"],"names":[],"mappings":";;AAKO,MAAM,aAEb;AAAA,EAGE,YAAY,OAA6B;AACvC,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA,EAIA,OAAO,WAA2B,OAAuB;AACvD,UAAM,mBACJ,cAAc,OACV,OACA,cAAc,QACZ,QACA,cAAc,OACZ,OACA,cAAc,QACZ,QACA;AACZ,WAAO,KAAK,cAAc,OAAO,kBAAkB,KAAK;AAAA,EAC1D;AAAA,EAEA,WAAW,UAA6B,IAAe;AACrD,WAAO,KAAK,cAAc,mBAAmB,OAAO;AAAA,EACtD;AAAA,EAEA,mBAAmB,UAA6B,IAAe;AAC7D,WAAO,KAAK,cAAc,WAAW,OAAO;AAAA,EAC9C;AAAA,EAEA,KAAK,GAAW,MAAY,UAAgD;AAC1E,WAAO,KAAK,cAAc,aAAa,GAAG,MAAM,QAAQ;AAAA,EAC1D;AAAA,EAEA,aACE,GACA,MACA,UACa;AACb,WAAO,KAAK,cAAc,KAAK,GAAG,MAAM,QAAQ;AAAA,EAClD;AAAA,EAEA,IAAI,sBAA+C;AACjD,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,8BAAuD;AACzD,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA;AAAA,EAIA,SAAS,WAAoC;AAC3C,WAAO,KAAK,cAAc,SAAS,SAAS;AAAA,EAC9C;AAAA,EAEA,aAAa,WAAmC;AAC9C,WAAO,KAAK,cAAc,aAAa,SAAS;AAAA,EAClD;AAAA,EAEA,sBAAsB,gBAAyC;AAC7D,WAAO,KAAK,cAAc,sBAAsB,cAAc;AAAA,EAChE;AAAA,EAEA,iBAAiB,WAAsC;AACrD,WAAO,KAAK,cAAc,iBAAiB,SAAS;AAAA,EACtD;AAAA,EAEA,WAAuB;AACrB,WAAO,KAAK,cAAc,SAAA;AAAA,EAC5B;AAAA,EAEA,IAAI,KAAW,MAAiB;AAC9B,SAAK,cAAc,IAAI,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,OAAO,KAAW,MAAiB;AACjC,SAAK,cAAc,OAAO,KAAK,IAAI;AAAA,EACrC;AAAA,EAEA,OAAO,KAAW,SAAc,SAAoB;AAClD,SAAK,cAAc,OAAO,KAAK,SAAS,OAAO;AAAA,EACjD;AAAA,EAEA,MAAM,SAAsC;AAC1C,SAAK,cAAc,MAAM,OAAO;AAAA,EAClC;AAAA,EAEA,QAAc;AACZ,SAAK,cAAc,MAAA;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,eAAe,OAAuB;AACpC,WAAO,KAAK,cAAc,eAAe,KAAK;AAAA,EAChD;AAAA,EAEA,cAAc,QAA+B;AAC3C,WAAO,KAAK,cAAc,cAAc,MAAM;AAAA,EAChD;AAAA,EAEA,IAAI,iBAA4B;AAC9B,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,eAAoC;AACtC,WAAO,KAAK,cAAc;AAAA,EAC5B;AACF;;"}
1
+ {"version":3,"file":"reverse-index.cjs","sources":["../../../src/indexes/reverse-index.ts"],"sourcesContent":["import type { CompareOptions } from \"../query/builder/types\"\nimport type { OrderByDirection } from \"../query/ir\"\nimport type { IndexInterface, IndexOperation, IndexStats } from \"./base-index\"\nimport type { RangeQueryOptions } from \"./btree-index\"\n\nexport class ReverseIndex<\n TKey extends string | number,\n> implements IndexInterface<TKey> {\n private originalIndex: IndexInterface<TKey>\n\n constructor(index: IndexInterface<TKey>) {\n this.originalIndex = index\n }\n\n // Define the reversed operations\n\n lookup(operation: IndexOperation, value: any): Set<TKey> {\n const reverseOperation =\n operation === `gt`\n ? `lt`\n : operation === `gte`\n ? `lte`\n : operation === `lt`\n ? `gt`\n : operation === `lte`\n ? `gte`\n : operation\n return this.originalIndex.lookup(reverseOperation, value)\n }\n\n rangeQuery(options: RangeQueryOptions = {}): Set<TKey> {\n return this.originalIndex.rangeQueryReversed(options)\n }\n\n rangeQueryReversed(options: RangeQueryOptions = {}): Set<TKey> {\n return this.originalIndex.rangeQuery(options)\n }\n\n take(n: number, from?: any, filterFn?: (key: TKey) => boolean): Array<TKey> {\n return this.originalIndex.takeReversed(n, from, filterFn)\n }\n\n takeReversed(\n n: number,\n from?: any,\n filterFn?: (key: TKey) => boolean\n ): Array<TKey> {\n return this.originalIndex.take(n, from, filterFn)\n }\n\n get orderedEntriesArray(): Array<[any, Set<TKey>]> {\n return this.originalIndex.orderedEntriesArrayReversed\n }\n\n get orderedEntriesArrayReversed(): Array<[any, Set<TKey>]> {\n return this.originalIndex.orderedEntriesArray\n }\n\n // All operations below delegate to the original index\n\n supports(operation: IndexOperation): boolean {\n return this.originalIndex.supports(operation)\n }\n\n matchesField(fieldPath: Array<string>): boolean {\n return this.originalIndex.matchesField(fieldPath)\n }\n\n matchesCompareOptions(compareOptions: CompareOptions): boolean {\n return this.originalIndex.matchesCompareOptions(compareOptions)\n }\n\n matchesDirection(direction: OrderByDirection): boolean {\n return this.originalIndex.matchesDirection(direction)\n }\n\n getStats(): IndexStats {\n return this.originalIndex.getStats()\n }\n\n add(key: TKey, item: any): void {\n this.originalIndex.add(key, item)\n }\n\n remove(key: TKey, item: any): void {\n this.originalIndex.remove(key, item)\n }\n\n update(key: TKey, oldItem: any, newItem: any): void {\n this.originalIndex.update(key, oldItem, newItem)\n }\n\n build(entries: Iterable<[TKey, any]>): void {\n this.originalIndex.build(entries)\n }\n\n clear(): void {\n this.originalIndex.clear()\n }\n\n get keyCount(): number {\n return this.originalIndex.keyCount\n }\n\n equalityLookup(value: any): Set<TKey> {\n return this.originalIndex.equalityLookup(value)\n }\n\n inArrayLookup(values: Array<any>): Set<TKey> {\n return this.originalIndex.inArrayLookup(values)\n }\n\n get indexedKeysSet(): Set<TKey> {\n return this.originalIndex.indexedKeysSet\n }\n\n get valueMapData(): Map<any, Set<TKey>> {\n return this.originalIndex.valueMapData\n }\n}\n"],"names":[],"mappings":";;AAKO,MAAM,aAEqB;AAAA,EAGhC,YAAY,OAA6B;AACvC,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA,EAIA,OAAO,WAA2B,OAAuB;AACvD,UAAM,mBACJ,cAAc,OACV,OACA,cAAc,QACZ,QACA,cAAc,OACZ,OACA,cAAc,QACZ,QACA;AACZ,WAAO,KAAK,cAAc,OAAO,kBAAkB,KAAK;AAAA,EAC1D;AAAA,EAEA,WAAW,UAA6B,IAAe;AACrD,WAAO,KAAK,cAAc,mBAAmB,OAAO;AAAA,EACtD;AAAA,EAEA,mBAAmB,UAA6B,IAAe;AAC7D,WAAO,KAAK,cAAc,WAAW,OAAO;AAAA,EAC9C;AAAA,EAEA,KAAK,GAAW,MAAY,UAAgD;AAC1E,WAAO,KAAK,cAAc,aAAa,GAAG,MAAM,QAAQ;AAAA,EAC1D;AAAA,EAEA,aACE,GACA,MACA,UACa;AACb,WAAO,KAAK,cAAc,KAAK,GAAG,MAAM,QAAQ;AAAA,EAClD;AAAA,EAEA,IAAI,sBAA+C;AACjD,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,8BAAuD;AACzD,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA;AAAA,EAIA,SAAS,WAAoC;AAC3C,WAAO,KAAK,cAAc,SAAS,SAAS;AAAA,EAC9C;AAAA,EAEA,aAAa,WAAmC;AAC9C,WAAO,KAAK,cAAc,aAAa,SAAS;AAAA,EAClD;AAAA,EAEA,sBAAsB,gBAAyC;AAC7D,WAAO,KAAK,cAAc,sBAAsB,cAAc;AAAA,EAChE;AAAA,EAEA,iBAAiB,WAAsC;AACrD,WAAO,KAAK,cAAc,iBAAiB,SAAS;AAAA,EACtD;AAAA,EAEA,WAAuB;AACrB,WAAO,KAAK,cAAc,SAAA;AAAA,EAC5B;AAAA,EAEA,IAAI,KAAW,MAAiB;AAC9B,SAAK,cAAc,IAAI,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,OAAO,KAAW,MAAiB;AACjC,SAAK,cAAc,OAAO,KAAK,IAAI;AAAA,EACrC;AAAA,EAEA,OAAO,KAAW,SAAc,SAAoB;AAClD,SAAK,cAAc,OAAO,KAAK,SAAS,OAAO;AAAA,EACjD;AAAA,EAEA,MAAM,SAAsC;AAC1C,SAAK,cAAc,MAAM,OAAO;AAAA,EAClC;AAAA,EAEA,QAAc;AACZ,SAAK,cAAc,MAAA;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,eAAe,OAAuB;AACpC,WAAO,KAAK,cAAc,eAAe,KAAK;AAAA,EAChD;AAAA,EAEA,cAAc,QAA+B;AAC3C,WAAO,KAAK,cAAc,cAAc,MAAM;AAAA,EAChD;AAAA,EAEA,IAAI,iBAA4B;AAC9B,WAAO,KAAK,cAAc;AAAA,EAC5B;AAAA,EAEA,IAAI,eAAoC;AACtC,WAAO,KAAK,cAAc;AAAA,EAC5B;AACF;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"local-only.cjs","sources":["../../src/local-only.ts"],"sourcesContent":["import type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n OperationType,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { Collection } from \"./collection/index\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Configuration interface for Local-only collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalOnlyCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends Omit<\n BaseCollectionConfig<T, TKey, TSchema, LocalOnlyCollectionUtils>,\n `gcTime` | `startSync`\n > {\n /**\n * Optional initial data to populate the collection with on creation\n * This data will be applied during the initial sync process\n */\n initialData?: Array<T>\n}\n\n/**\n * Local-only collection utilities type\n */\nexport interface LocalOnlyCollectionUtils extends UtilsRecord {\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them.\n * This should be called in your transaction's mutationFn to persist local-only data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localData = createCollection(localOnlyCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-only mutations after success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\ntype LocalOnlyCollectionOptionsResult<\n T extends object,\n TKey extends string | number,\n TSchema extends StandardSchemaV1 | never = never,\n> = CollectionConfig<T, TKey, TSchema> & {\n utils: LocalOnlyCollectionUtils\n}\n\n/**\n * Creates Local-only collection options for use with a standard Collection\n *\n * This is an in-memory collection that doesn't sync with external sources but uses a loopback sync config\n * that immediately \"syncs\" all optimistic changes to the collection, making them permanent.\n * Perfect for local-only data that doesn't need persistence or external synchronization.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-only collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template T - The schema type if a schema is provided, otherwise the type of items in the collection\n * @template TKey - The type of the key returned by getKey\n * @param config - Configuration options for the Local-only collection\n * @returns Collection options with utilities including acceptMutations\n *\n * @example\n * // Basic local-only collection\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Local-only collection with initial data\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * initialData: [\n * { id: 1, name: 'Item 1' },\n * { id: 2, name: 'Item 2' },\n * ],\n * })\n * )\n *\n * @example\n * // Local-only collection with mutation handlers\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * // Custom logic after insert\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localData = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use local data in API call\n * const localMutations = transaction.mutations.filter(m => m.collection === localData)\n * await api.save({ metadata: localMutations[0]?.modified })\n *\n * // Persist local-only mutations after API success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localData.insert({ id: 1, data: 'metadata' })\n * apiCollection.insert({ id: 2, data: 'main data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localOnlyCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): LocalOnlyCollectionOptionsResult<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localOnlyCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): LocalOnlyCollectionOptionsResult<T, TKey> & {\n schema?: never // no schema in the result\n}\n\nexport function localOnlyCollectionOptions<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, TSchema, TKey>\n): LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n} {\n const { initialData, onInsert, onUpdate, onDelete, ...restConfig } = config\n\n // Create the sync configuration with transaction confirmation capability\n const syncResult = createLocalOnlySync<T, TKey>(initialData)\n\n /**\n * Create wrapper handlers that call user handlers first, then confirm transactions\n * Wraps the user's onInsert handler to also confirm the transaction immediately\n */\n const wrappedOnInsert = async (\n params: InsertMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onInsert) {\n handlerResult = (await onInsert(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onUpdate handler that also confirms the transaction immediately\n */\n const wrappedOnUpdate = async (\n params: UpdateMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onUpdate) {\n handlerResult = (await onUpdate(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onDelete handler that also confirms the transaction immediately\n */\n const wrappedOnDelete = async (\n params: DeleteMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onDelete) {\n handlerResult = (await onDelete(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n const collectionMutations = transaction.mutations.filter(\n (m) =>\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n m.collection === syncResult.collection\n )\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Persist the mutations through sync\n syncResult.confirmOperationsSync(\n collectionMutations as Array<PendingMutation<T>>\n )\n }\n\n return {\n ...restConfig,\n sync: syncResult.sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n acceptMutations,\n },\n startSync: true,\n gcTime: 0,\n } as LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n }\n}\n\n/**\n * Internal function to create Local-only sync configuration with transaction confirmation\n *\n * This captures the sync functions and provides synchronous confirmation of operations.\n * It creates a loopback sync that immediately confirms all optimistic operations,\n * making them permanent in the collection.\n *\n * @param initialData - Optional array of initial items to populate the collection\n * @returns Object with sync configuration and confirmOperationsSync function\n */\nfunction createLocalOnlySync<T extends object, TKey extends string | number>(\n initialData?: Array<T>\n) {\n // Capture sync functions and collection for transaction confirmation\n let syncBegin: (() => void) | null = null\n let syncWrite: ((message: { type: OperationType; value: T }) => void) | null =\n null\n let syncCommit: (() => void) | null = null\n let collection: Collection<T, TKey, LocalOnlyCollectionUtils> | null = null\n\n const sync: SyncConfig<T, TKey> = {\n /**\n * Sync function that captures sync parameters and applies initial data\n * @param params - Sync parameters containing begin, write, and commit functions\n * @returns Unsubscribe function (empty since no ongoing sync is needed)\n */\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n\n // Capture sync functions and collection for later use\n syncBegin = begin\n syncWrite = write\n syncCommit = commit\n collection = params.collection\n\n // Apply initial data if provided\n if (initialData && initialData.length > 0) {\n begin()\n initialData.forEach((item) => {\n write({\n type: `insert`,\n value: item,\n })\n })\n commit()\n }\n\n // Mark collection as ready since local-only collections are immediately ready\n markReady()\n\n // Return empty unsubscribe function - no ongoing sync needed\n return () => {}\n },\n /**\n * Get sync metadata - returns empty object for local-only collections\n * @returns Empty metadata object\n */\n getSyncMetadata: () => ({}),\n }\n\n /**\n * Synchronously confirms optimistic operations by immediately writing through sync\n *\n * This loops through transaction mutations and applies them to move from optimistic to synced state.\n * It's called after user handlers to make optimistic changes permanent.\n *\n * @param mutations - Array of mutation objects from the transaction\n */\n const confirmOperationsSync = (mutations: Array<PendingMutation<T>>) => {\n if (!syncBegin || !syncWrite || !syncCommit) {\n return // Sync not initialized yet, which is fine\n }\n\n // Immediately write back through sync interface\n syncBegin()\n mutations.forEach((mutation) => {\n if (syncWrite) {\n syncWrite({\n type: mutation.type,\n value: mutation.modified,\n })\n }\n })\n syncCommit()\n }\n\n return {\n sync,\n confirmOperationsSync,\n collection,\n }\n}\n"],"names":[],"mappings":";;AA4KO,SAAS,2BAKd,QAGA;AACA,QAAM,EAAE,aAAa,UAAU,UAAU,UAAU,GAAG,eAAe;AAGrE,QAAM,aAAa,oBAA6B,WAAW;AAM3D,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,CAAC,gBAEnB;AAEJ,UAAM,sBAAsB,YAAY,UAAU;AAAA,MAChD,CAAC;AAAA;AAAA,QAEC,EAAE,eAAe,WAAW;AAAA;AAAA,IAAA;AAGhC,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW;AAAA,MACT;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,WAAW;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,IAEF,WAAW;AAAA,IACX,QAAQ;AAAA,EAAA;AAIZ;AAYA,SAAS,oBACP,aACA;AAEA,MAAI,YAAiC;AACrC,MAAI,YACF;AACF,MAAI,aAAkC;AACtC,MAAI,aAAmE;AAEvE,QAAM,OAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMhC,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa,OAAO;AAGpB,UAAI,eAAe,YAAY,SAAS,GAAG;AACzC,cAAA;AACA,oBAAY,QAAQ,CAAC,SAAS;AAC5B,gBAAM;AAAA,YACJ,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,CAAC;AACD,eAAA;AAAA,MACF;AAGA,gBAAA;AAGA,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,iBAAiB,OAAO,CAAA;AAAA,EAAC;AAW3B,QAAM,wBAAwB,CAAC,cAAyC;AACtE,QAAI,CAAC,aAAa,CAAC,aAAa,CAAC,YAAY;AAC3C;AAAA,IACF;AAGA,cAAA;AACA,cAAU,QAAQ,CAAC,aAAa;AAC9B,UAAI,WAAW;AACb,kBAAU;AAAA,UACR,MAAM,SAAS;AAAA,UACf,OAAO,SAAS;AAAA,QAAA,CACjB;AAAA,MACH;AAAA,IACF,CAAC;AACD,eAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
1
+ {"version":3,"file":"local-only.cjs","sources":["../../src/local-only.ts"],"sourcesContent":["import type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n OperationType,\n PendingMutation,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { Collection } from \"./collection/index\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Configuration interface for Local-only collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalOnlyCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends Omit<\n BaseCollectionConfig<T, TKey, TSchema, LocalOnlyCollectionUtils>,\n `gcTime` | `startSync`\n> {\n /**\n * Optional initial data to populate the collection with on creation\n * This data will be applied during the initial sync process\n */\n initialData?: Array<T>\n}\n\n/**\n * Local-only collection utilities type\n */\nexport interface LocalOnlyCollectionUtils extends UtilsRecord {\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them.\n * This should be called in your transaction's mutationFn to persist local-only data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localData = createCollection(localOnlyCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-only mutations after success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\ntype LocalOnlyCollectionOptionsResult<\n T extends object,\n TKey extends string | number,\n TSchema extends StandardSchemaV1 | never = never,\n> = CollectionConfig<T, TKey, TSchema> & {\n utils: LocalOnlyCollectionUtils\n}\n\n/**\n * Creates Local-only collection options for use with a standard Collection\n *\n * This is an in-memory collection that doesn't sync with external sources but uses a loopback sync config\n * that immediately \"syncs\" all optimistic changes to the collection, making them permanent.\n * Perfect for local-only data that doesn't need persistence or external synchronization.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-only collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template T - The schema type if a schema is provided, otherwise the type of items in the collection\n * @template TKey - The type of the key returned by getKey\n * @param config - Configuration options for the Local-only collection\n * @returns Collection options with utilities including acceptMutations\n *\n * @example\n * // Basic local-only collection\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Local-only collection with initial data\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * initialData: [\n * { id: 1, name: 'Item 1' },\n * { id: 2, name: 'Item 2' },\n * ],\n * })\n * )\n *\n * @example\n * // Local-only collection with mutation handlers\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * // Custom logic after insert\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localData = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use local data in API call\n * const localMutations = transaction.mutations.filter(m => m.collection === localData)\n * await api.save({ metadata: localMutations[0]?.modified })\n *\n * // Persist local-only mutations after API success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localData.insert({ id: 1, data: 'metadata' })\n * apiCollection.insert({ id: 2, data: 'main data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localOnlyCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): LocalOnlyCollectionOptionsResult<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localOnlyCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): LocalOnlyCollectionOptionsResult<T, TKey> & {\n schema?: never // no schema in the result\n}\n\nexport function localOnlyCollectionOptions<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, TSchema, TKey>\n): LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n} {\n const { initialData, onInsert, onUpdate, onDelete, ...restConfig } = config\n\n // Create the sync configuration with transaction confirmation capability\n const syncResult = createLocalOnlySync<T, TKey>(initialData)\n\n /**\n * Create wrapper handlers that call user handlers first, then confirm transactions\n * Wraps the user's onInsert handler to also confirm the transaction immediately\n */\n const wrappedOnInsert = async (\n params: InsertMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onInsert) {\n handlerResult = (await onInsert(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onUpdate handler that also confirms the transaction immediately\n */\n const wrappedOnUpdate = async (\n params: UpdateMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onUpdate) {\n handlerResult = (await onUpdate(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onDelete handler that also confirms the transaction immediately\n */\n const wrappedOnDelete = async (\n params: DeleteMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onDelete) {\n handlerResult = (await onDelete(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n const collectionMutations = transaction.mutations.filter(\n (m) =>\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n m.collection === syncResult.collection\n )\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Persist the mutations through sync\n syncResult.confirmOperationsSync(\n collectionMutations as Array<PendingMutation<T>>\n )\n }\n\n return {\n ...restConfig,\n sync: syncResult.sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n acceptMutations,\n },\n startSync: true,\n gcTime: 0,\n } as LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n }\n}\n\n/**\n * Internal function to create Local-only sync configuration with transaction confirmation\n *\n * This captures the sync functions and provides synchronous confirmation of operations.\n * It creates a loopback sync that immediately confirms all optimistic operations,\n * making them permanent in the collection.\n *\n * @param initialData - Optional array of initial items to populate the collection\n * @returns Object with sync configuration and confirmOperationsSync function\n */\nfunction createLocalOnlySync<T extends object, TKey extends string | number>(\n initialData?: Array<T>\n) {\n // Capture sync functions and collection for transaction confirmation\n let syncBegin: (() => void) | null = null\n let syncWrite: ((message: { type: OperationType; value: T }) => void) | null =\n null\n let syncCommit: (() => void) | null = null\n let collection: Collection<T, TKey, LocalOnlyCollectionUtils> | null = null\n\n const sync: SyncConfig<T, TKey> = {\n /**\n * Sync function that captures sync parameters and applies initial data\n * @param params - Sync parameters containing begin, write, and commit functions\n * @returns Unsubscribe function (empty since no ongoing sync is needed)\n */\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n\n // Capture sync functions and collection for later use\n syncBegin = begin\n syncWrite = write\n syncCommit = commit\n collection = params.collection\n\n // Apply initial data if provided\n if (initialData && initialData.length > 0) {\n begin()\n initialData.forEach((item) => {\n write({\n type: `insert`,\n value: item,\n })\n })\n commit()\n }\n\n // Mark collection as ready since local-only collections are immediately ready\n markReady()\n\n // Return empty unsubscribe function - no ongoing sync needed\n return () => {}\n },\n /**\n * Get sync metadata - returns empty object for local-only collections\n * @returns Empty metadata object\n */\n getSyncMetadata: () => ({}),\n }\n\n /**\n * Synchronously confirms optimistic operations by immediately writing through sync\n *\n * This loops through transaction mutations and applies them to move from optimistic to synced state.\n * It's called after user handlers to make optimistic changes permanent.\n *\n * @param mutations - Array of mutation objects from the transaction\n */\n const confirmOperationsSync = (mutations: Array<PendingMutation<T>>) => {\n if (!syncBegin || !syncWrite || !syncCommit) {\n return // Sync not initialized yet, which is fine\n }\n\n // Immediately write back through sync interface\n syncBegin()\n mutations.forEach((mutation) => {\n if (syncWrite) {\n syncWrite({\n type: mutation.type,\n value: mutation.modified,\n })\n }\n })\n syncCommit()\n }\n\n return {\n sync,\n confirmOperationsSync,\n collection,\n }\n}\n"],"names":[],"mappings":";;AA4KO,SAAS,2BAKd,QAGA;AACA,QAAM,EAAE,aAAa,UAAU,UAAU,UAAU,GAAG,eAAe;AAGrE,QAAM,aAAa,oBAA6B,WAAW;AAM3D,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,CAAC,gBAEnB;AAEJ,UAAM,sBAAsB,YAAY,UAAU;AAAA,MAChD,CAAC;AAAA;AAAA,QAEC,EAAE,eAAe,WAAW;AAAA;AAAA,IAAA;AAGhC,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW;AAAA,MACT;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,WAAW;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,IAEF,WAAW;AAAA,IACX,QAAQ;AAAA,EAAA;AAIZ;AAYA,SAAS,oBACP,aACA;AAEA,MAAI,YAAiC;AACrC,MAAI,YACF;AACF,MAAI,aAAkC;AACtC,MAAI,aAAmE;AAEvE,QAAM,OAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMhC,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa,OAAO;AAGpB,UAAI,eAAe,YAAY,SAAS,GAAG;AACzC,cAAA;AACA,oBAAY,QAAQ,CAAC,SAAS;AAC5B,gBAAM;AAAA,YACJ,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,CAAC;AACD,eAAA;AAAA,MACF;AAGA,gBAAA;AAGA,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,iBAAiB,OAAO,CAAA;AAAA,EAAC;AAW3B,QAAM,wBAAwB,CAAC,cAAyC;AACtE,QAAI,CAAC,aAAa,CAAC,aAAa,CAAC,YAAY;AAC3C;AAAA,IACF;AAGA,cAAA;AACA,cAAU,QAAQ,CAAC,aAAa;AAC9B,UAAI,WAAW;AACb,kBAAU;AAAA,UACR,MAAM,SAAS;AAAA,UACf,OAAO,SAAS;AAAA,QAAA,CACjB;AAAA,MACH;AAAA,IACF,CAAC;AACD,eAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
@@ -420,8 +420,23 @@ function isLimitSubset(subset, superset) {
420
420
  return subset <= superset;
421
421
  }
422
422
  function isPredicateSubset(subset, superset) {
423
+ if (superset.limit !== void 0) {
424
+ if (!areWhereClausesEqual(subset.where, superset.where)) {
425
+ return false;
426
+ }
427
+ return isOrderBySubset(subset.orderBy, superset.orderBy) && isLimitSubset(subset.limit, superset.limit);
428
+ }
423
429
  return isWhereSubset(subset.where, superset.where) && isOrderBySubset(subset.orderBy, superset.orderBy) && isLimitSubset(subset.limit, superset.limit);
424
430
  }
431
+ function areWhereClausesEqual(a, b) {
432
+ if (a === void 0 && b === void 0) {
433
+ return true;
434
+ }
435
+ if (a === void 0 || b === void 0) {
436
+ return false;
437
+ }
438
+ return areExpressionsEqual(a, b);
439
+ }
425
440
  function findCommonConditions(predicate1, predicate2) {
426
441
  const conditions1 = extractAllConditions(predicate1);
427
442
  const conditions2 = extractAllConditions(predicate2);