@tanstack/electric-db-collection 0.1.29 → 0.1.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,14 +27,48 @@ function hasTxids(message) {
27
27
  function electricCollectionOptions(config) {
28
28
  const seenTxids = new store.Store(/* @__PURE__ */ new Set([]));
29
29
  const seenSnapshots = new store.Store([]);
30
+ const pendingMatches = new store.Store(/* @__PURE__ */ new Map());
31
+ const currentBatchMessages = new store.Store([]);
32
+ const removePendingMatches = (matchIds) => {
33
+ if (matchIds.length > 0) {
34
+ pendingMatches.setState((current) => {
35
+ const newMatches = new Map(current);
36
+ matchIds.forEach((id) => newMatches.delete(id));
37
+ return newMatches;
38
+ });
39
+ }
40
+ };
41
+ const resolveMatchedPendingMatches = () => {
42
+ const matchesToResolve = [];
43
+ pendingMatches.state.forEach((match, matchId) => {
44
+ if (match.matched) {
45
+ clearTimeout(match.timeoutId);
46
+ match.resolve(true);
47
+ matchesToResolve.push(matchId);
48
+ debug(
49
+ `${config.id ? `[${config.id}] ` : ``}awaitMatch resolved on up-to-date for match %s`,
50
+ matchId
51
+ );
52
+ }
53
+ });
54
+ removePendingMatches(matchesToResolve);
55
+ };
30
56
  const sync = createElectricSync(config.shapeOptions, {
31
57
  seenTxids,
32
- seenSnapshots
58
+ seenSnapshots,
59
+ pendingMatches,
60
+ currentBatchMessages,
61
+ removePendingMatches,
62
+ resolveMatchedPendingMatches,
63
+ collectionId: config.id
33
64
  });
34
- const awaitTxId = async (txId, timeout = 3e4) => {
35
- debug(`awaitTxId called with txid %d`, txId);
65
+ const awaitTxId = async (txId, timeout = 5e3) => {
66
+ debug(
67
+ `${config.id ? `[${config.id}] ` : ``}awaitTxId called with txid %d`,
68
+ txId
69
+ );
36
70
  if (typeof txId !== `number`) {
37
- throw new errors.ExpectedNumberInAwaitTxIdError(typeof txId);
71
+ throw new errors.ExpectedNumberInAwaitTxIdError(typeof txId, config.id);
38
72
  }
39
73
  const hasTxid = seenTxids.state.has(txId);
40
74
  if (hasTxid) return true;
@@ -46,11 +80,14 @@ function electricCollectionOptions(config) {
46
80
  const timeoutId = setTimeout(() => {
47
81
  unsubscribeSeenTxids();
48
82
  unsubscribeSeenSnapshots();
49
- reject(new errors.TimeoutWaitingForTxIdError(txId));
83
+ reject(new errors.TimeoutWaitingForTxIdError(txId, config.id));
50
84
  }, timeout);
51
85
  const unsubscribeSeenTxids = seenTxids.subscribe(() => {
52
86
  if (seenTxids.state.has(txId)) {
53
- debug(`awaitTxId found match for txid %o`, txId);
87
+ debug(
88
+ `${config.id ? `[${config.id}] ` : ``}awaitTxId found match for txid %o`,
89
+ txId
90
+ );
54
91
  clearTimeout(timeoutId);
55
92
  unsubscribeSeenTxids();
56
93
  unsubscribeSeenSnapshots();
@@ -63,7 +100,7 @@ function electricCollectionOptions(config) {
63
100
  );
64
101
  if (visibleSnapshot) {
65
102
  debug(
66
- `awaitTxId found match for txid %o in snapshot %o`,
103
+ `${config.id ? `[${config.id}] ` : ``}awaitTxId found match for txid %o in snapshot %o`,
67
104
  txId,
68
105
  visibleSnapshot
69
106
  );
@@ -75,42 +112,96 @@ function electricCollectionOptions(config) {
75
112
  });
76
113
  });
77
114
  };
78
- const wrappedOnInsert = config.onInsert ? async (params) => {
79
- const handlerResult = await config.onInsert(params) ?? {};
80
- const txid = handlerResult.txid;
81
- if (!txid) {
82
- throw new errors.ElectricInsertHandlerMustReturnTxIdError();
83
- }
84
- if (Array.isArray(txid)) {
85
- await Promise.all(txid.map((id) => awaitTxId(id)));
86
- } else {
87
- await awaitTxId(txid);
115
+ const awaitMatch = async (matchFn, timeout = 3e3) => {
116
+ debug(
117
+ `${config.id ? `[${config.id}] ` : ``}awaitMatch called with custom function`
118
+ );
119
+ return new Promise((resolve, reject) => {
120
+ const matchId = Math.random().toString(36);
121
+ const cleanupMatch = () => {
122
+ pendingMatches.setState((current) => {
123
+ const newMatches = new Map(current);
124
+ newMatches.delete(matchId);
125
+ return newMatches;
126
+ });
127
+ };
128
+ const onTimeout = () => {
129
+ cleanupMatch();
130
+ reject(new errors.TimeoutWaitingForMatchError(config.id));
131
+ };
132
+ const timeoutId = setTimeout(onTimeout, timeout);
133
+ const checkMatch = (message) => {
134
+ if (matchFn(message)) {
135
+ debug(
136
+ `${config.id ? `[${config.id}] ` : ``}awaitMatch found matching message, waiting for up-to-date`
137
+ );
138
+ pendingMatches.setState((current) => {
139
+ const newMatches = new Map(current);
140
+ const existing = newMatches.get(matchId);
141
+ if (existing) {
142
+ newMatches.set(matchId, { ...existing, matched: true });
143
+ }
144
+ return newMatches;
145
+ });
146
+ return true;
147
+ }
148
+ return false;
149
+ };
150
+ for (const message of currentBatchMessages.state) {
151
+ if (matchFn(message)) {
152
+ debug(
153
+ `${config.id ? `[${config.id}] ` : ``}awaitMatch found immediate match in current batch, waiting for up-to-date`
154
+ );
155
+ pendingMatches.setState((current) => {
156
+ const newMatches = new Map(current);
157
+ newMatches.set(matchId, {
158
+ matchFn: checkMatch,
159
+ resolve,
160
+ reject,
161
+ timeoutId,
162
+ matched: true
163
+ // Already matched
164
+ });
165
+ return newMatches;
166
+ });
167
+ return;
168
+ }
169
+ }
170
+ pendingMatches.setState((current) => {
171
+ const newMatches = new Map(current);
172
+ newMatches.set(matchId, {
173
+ matchFn: checkMatch,
174
+ resolve,
175
+ reject,
176
+ timeoutId,
177
+ matched: false
178
+ });
179
+ return newMatches;
180
+ });
181
+ });
182
+ };
183
+ const processMatchingStrategy = async (result) => {
184
+ if (result && `txid` in result) {
185
+ if (Array.isArray(result.txid)) {
186
+ await Promise.all(result.txid.map(awaitTxId));
187
+ } else {
188
+ await awaitTxId(result.txid);
189
+ }
88
190
  }
191
+ };
192
+ const wrappedOnInsert = config.onInsert ? async (params) => {
193
+ const handlerResult = await config.onInsert(params);
194
+ await processMatchingStrategy(handlerResult);
89
195
  return handlerResult;
90
196
  } : void 0;
91
197
  const wrappedOnUpdate = config.onUpdate ? async (params) => {
92
- const handlerResult = await config.onUpdate(params) ?? {};
93
- const txid = handlerResult.txid;
94
- if (!txid) {
95
- throw new errors.ElectricUpdateHandlerMustReturnTxIdError();
96
- }
97
- if (Array.isArray(txid)) {
98
- await Promise.all(txid.map((id) => awaitTxId(id)));
99
- } else {
100
- await awaitTxId(txid);
101
- }
198
+ const handlerResult = await config.onUpdate(params);
199
+ await processMatchingStrategy(handlerResult);
102
200
  return handlerResult;
103
201
  } : void 0;
104
202
  const wrappedOnDelete = config.onDelete ? async (params) => {
105
203
  const handlerResult = await config.onDelete(params);
106
- if (!handlerResult.txid) {
107
- throw new errors.ElectricDeleteHandlerMustReturnTxIdError();
108
- }
109
- if (Array.isArray(handlerResult.txid)) {
110
- await Promise.all(handlerResult.txid.map((id) => awaitTxId(id)));
111
- } else {
112
- await awaitTxId(handlerResult.txid);
113
- }
204
+ await processMatchingStrategy(handlerResult);
114
205
  return handlerResult;
115
206
  } : void 0;
116
207
  const {
@@ -127,13 +218,22 @@ function electricCollectionOptions(config) {
127
218
  onUpdate: wrappedOnUpdate,
128
219
  onDelete: wrappedOnDelete,
129
220
  utils: {
130
- awaitTxId
221
+ awaitTxId,
222
+ awaitMatch
131
223
  }
132
224
  };
133
225
  }
134
226
  function createElectricSync(shapeOptions, options) {
135
- const { seenTxids } = options;
136
- const { seenSnapshots } = options;
227
+ const {
228
+ seenTxids,
229
+ seenSnapshots,
230
+ pendingMatches,
231
+ currentBatchMessages,
232
+ removePendingMatches,
233
+ resolveMatchedPendingMatches,
234
+ collectionId
235
+ } = options;
236
+ const MAX_BATCH_MESSAGES = 1e3;
137
237
  const relationSchema = new store.Store(void 0);
138
238
  const getSyncMetadata = () => {
139
239
  const schema = relationSchema.state || `public`;
@@ -160,6 +260,15 @@ function createElectricSync(shapeOptions, options) {
160
260
  abortController.abort();
161
261
  }
162
262
  }
263
+ abortController.signal.addEventListener(`abort`, () => {
264
+ pendingMatches.setState((current) => {
265
+ current.forEach((match) => {
266
+ clearTimeout(match.timeoutId);
267
+ match.reject(new errors.StreamAbortedError());
268
+ });
269
+ return /* @__PURE__ */ new Map();
270
+ });
271
+ });
163
272
  const stream = new client.ShapeStream({
164
273
  ...shapeOptions,
165
274
  signal: abortController.signal,
@@ -184,9 +293,34 @@ You can provide an 'onError' handler on the shapeOptions to handle this error, a
184
293
  unsubscribeStream = stream.subscribe((messages) => {
185
294
  let hasUpToDate = false;
186
295
  for (const message of messages) {
296
+ if (client.isChangeMessage(message)) {
297
+ currentBatchMessages.setState((currentBuffer) => {
298
+ const newBuffer = [...currentBuffer, message];
299
+ if (newBuffer.length > MAX_BATCH_MESSAGES) {
300
+ newBuffer.splice(0, newBuffer.length - MAX_BATCH_MESSAGES);
301
+ }
302
+ return newBuffer;
303
+ });
304
+ }
187
305
  if (hasTxids(message)) {
188
306
  message.headers.txids?.forEach((txid) => newTxids.add(txid));
189
307
  }
308
+ const matchesToRemove = [];
309
+ pendingMatches.state.forEach((match, matchId) => {
310
+ if (!match.matched) {
311
+ try {
312
+ match.matchFn(message);
313
+ } catch (err) {
314
+ clearTimeout(match.timeoutId);
315
+ match.reject(
316
+ err instanceof Error ? err : new Error(String(err))
317
+ );
318
+ matchesToRemove.push(matchId);
319
+ debug(`matchFn error: %o`, err);
320
+ }
321
+ }
322
+ });
323
+ removePendingMatches(matchesToRemove);
190
324
  if (client.isChangeMessage(message)) {
191
325
  const schema = message.headers.schema;
192
326
  if (schema && typeof schema === `string`) {
@@ -210,7 +344,7 @@ You can provide an 'onError' handler on the shapeOptions to handle this error, a
210
344
  hasUpToDate = true;
211
345
  } else if (isMustRefetchMessage(message)) {
212
346
  debug(
213
- `Received must-refetch message, starting transaction with truncate`
347
+ `${collectionId ? `[${collectionId}] ` : ``}Received must-refetch message, starting transaction with truncate`
214
348
  );
215
349
  if (!transactionStarted) {
216
350
  begin();
@@ -221,6 +355,7 @@ You can provide an 'onError' handler on the shapeOptions to handle this error, a
221
355
  }
222
356
  }
223
357
  if (hasUpToDate) {
358
+ currentBatchMessages.setState(() => []);
224
359
  if (transactionStarted) {
225
360
  commit();
226
361
  transactionStarted = false;
@@ -229,7 +364,10 @@ You can provide an 'onError' handler on the shapeOptions to handle this error, a
229
364
  seenTxids.setState((currentTxids) => {
230
365
  const clonedSeen = new Set(currentTxids);
231
366
  if (newTxids.size > 0) {
232
- debug(`new txids synced from pg %O`, Array.from(newTxids));
367
+ debug(
368
+ `${collectionId ? `[${collectionId}] ` : ``}new txids synced from pg %O`,
369
+ Array.from(newTxids)
370
+ );
233
371
  }
234
372
  newTxids.forEach((txid) => clonedSeen.add(txid));
235
373
  newTxids.clear();
@@ -238,11 +376,15 @@ You can provide an 'onError' handler on the shapeOptions to handle this error, a
238
376
  seenSnapshots.setState((currentSnapshots) => {
239
377
  const seen = [...currentSnapshots, ...newSnapshots];
240
378
  newSnapshots.forEach(
241
- (snapshot) => debug(`new snapshot synced from pg %o`, snapshot)
379
+ (snapshot) => debug(
380
+ `${collectionId ? `[${collectionId}] ` : ``}new snapshot synced from pg %o`,
381
+ snapshot
382
+ )
242
383
  );
243
384
  newSnapshots.length = 0;
244
385
  return seen;
245
386
  });
387
+ resolveMatchedPendingMatches();
246
388
  }
247
389
  });
248
390
  return () => {
@@ -254,5 +396,13 @@ You can provide an 'onError' handler on the shapeOptions to handle this error, a
254
396
  getSyncMetadata
255
397
  };
256
398
  }
399
+ Object.defineProperty(exports, "isChangeMessage", {
400
+ enumerable: true,
401
+ get: () => client.isChangeMessage
402
+ });
403
+ Object.defineProperty(exports, "isControlMessage", {
404
+ enumerable: true,
405
+ get: () => client.isControlMessage
406
+ });
257
407
  exports.electricCollectionOptions = electricCollectionOptions;
258
408
  //# sourceMappingURL=electric.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"electric.cjs","sources":["../../src/electric.ts"],"sourcesContent":["import {\n ShapeStream,\n isChangeMessage,\n isControlMessage,\n isVisibleInSnapshot,\n} from \"@electric-sql/client\"\nimport { Store } from \"@tanstack/store\"\nimport DebugModule from \"debug\"\nimport {\n ElectricDeleteHandlerMustReturnTxIdError,\n ElectricInsertHandlerMustReturnTxIdError,\n ElectricUpdateHandlerMustReturnTxIdError,\n ExpectedNumberInAwaitTxIdError,\n TimeoutWaitingForTxIdError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n Fn,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n ControlMessage,\n GetExtensions,\n Message,\n PostgresSnapshot,\n Row,\n ShapeStreamOptions,\n} from \"@electric-sql/client\"\n\nconst debug = DebugModule.debug(`ts/db:electric`)\n\n/**\n * Type representing a transaction ID in ElectricSQL\n */\nexport type Txid = number\n\n/**\n * Type representing the result of an insert, update, or delete handler\n */\ntype MaybeTxId =\n | {\n txid?: Txid | Array<Txid>\n }\n | undefined\n | null\n\n/**\n * Type representing a snapshot end message\n */\ntype SnapshotEndMessage = ControlMessage & {\n headers: { control: `snapshot-end` }\n}\n\n// The `InferSchemaOutput` and `ResolveType` are copied from the `@tanstack/db` package\n// but we modified `InferSchemaOutput` slightly to restrict the schema output to `Row<unknown>`\n// This is needed in order for `GetExtensions` to be able to infer the parser extensions type from the schema\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends Row<unknown>\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration interface for Electric collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n */\nexport interface ElectricCollectionConfig<\n T extends Row<unknown> = Row<unknown>,\n TSchema extends StandardSchemaV1 = never,\n> extends BaseCollectionConfig<\n T,\n string | number,\n TSchema,\n Record<string, Fn>,\n { txid: Txid | Array<Txid> }\n > {\n /**\n * Configuration options for the ElectricSQL ShapeStream\n */\n shapeOptions: ShapeStreamOptions<GetExtensions<T>>\n}\n\nfunction isUpToDateMessage<T extends Row<unknown>>(\n message: Message<T>\n): message is ControlMessage & { up_to_date: true } {\n return isControlMessage(message) && message.headers.control === `up-to-date`\n}\n\nfunction isMustRefetchMessage<T extends Row<unknown>>(\n message: Message<T>\n): message is ControlMessage & { headers: { control: `must-refetch` } } {\n return isControlMessage(message) && message.headers.control === `must-refetch`\n}\n\nfunction isSnapshotEndMessage<T extends Row<unknown>>(\n message: Message<T>\n): message is SnapshotEndMessage {\n return isControlMessage(message) && message.headers.control === `snapshot-end`\n}\n\nfunction parseSnapshotMessage(message: SnapshotEndMessage): PostgresSnapshot {\n return {\n xmin: message.headers.xmin,\n xmax: message.headers.xmax,\n xip_list: message.headers.xip_list,\n }\n}\n\n// Check if a message contains txids in its headers\nfunction hasTxids<T extends Row<unknown>>(\n message: Message<T>\n): message is Message<T> & { headers: { txids?: Array<Txid> } } {\n return `txids` in message.headers && Array.isArray(message.headers.txids)\n}\n\n/**\n * Type for the awaitTxId utility function\n */\nexport type AwaitTxIdFn = (txId: Txid, timeout?: number) => Promise<boolean>\n\n/**\n * Electric collection utilities type\n */\nexport interface ElectricCollectionUtils extends UtilsRecord {\n awaitTxId: AwaitTxIdFn\n}\n\n/**\n * Creates Electric collection options for use with a standard Collection\n *\n * @template T - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the Electric collection\n * @returns Collection options with utilities\n */\n\n// Overload for when schema is provided\nexport function electricCollectionOptions<T extends StandardSchemaV1>(\n config: ElectricCollectionConfig<InferSchemaOutput<T>, T> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, string | number, T> & {\n id?: string\n utils: ElectricCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\nexport function electricCollectionOptions<T extends Row<unknown>>(\n config: ElectricCollectionConfig<T> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, string | number> & {\n id?: string\n utils: ElectricCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function electricCollectionOptions(\n config: ElectricCollectionConfig<any, any>\n): CollectionConfig<any, string | number, any> & {\n id?: string\n utils: ElectricCollectionUtils\n schema?: any\n} {\n const seenTxids = new Store<Set<Txid>>(new Set([]))\n const seenSnapshots = new Store<Array<PostgresSnapshot>>([])\n const sync = createElectricSync<any>(config.shapeOptions, {\n seenTxids,\n seenSnapshots,\n })\n\n /**\n * Wait for a specific transaction ID to be synced\n * @param txId The transaction ID to wait for as a number\n * @param timeout Optional timeout in milliseconds (defaults to 30000ms)\n * @returns Promise that resolves when the txId is synced\n */\n const awaitTxId: AwaitTxIdFn = async (\n txId: Txid,\n timeout: number = 30000\n ): Promise<boolean> => {\n debug(`awaitTxId called with txid %d`, txId)\n if (typeof txId !== `number`) {\n throw new ExpectedNumberInAwaitTxIdError(typeof txId)\n }\n\n // First check if the txid is in the seenTxids store\n const hasTxid = seenTxids.state.has(txId)\n if (hasTxid) return true\n\n // Then check if the txid is in any of the seen snapshots\n const hasSnapshot = seenSnapshots.state.some((snapshot) =>\n isVisibleInSnapshot(txId, snapshot)\n )\n if (hasSnapshot) return true\n\n return new Promise((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribeSeenTxids()\n unsubscribeSeenSnapshots()\n reject(new TimeoutWaitingForTxIdError(txId))\n }, timeout)\n\n const unsubscribeSeenTxids = seenTxids.subscribe(() => {\n if (seenTxids.state.has(txId)) {\n debug(`awaitTxId found match for txid %o`, txId)\n clearTimeout(timeoutId)\n unsubscribeSeenTxids()\n unsubscribeSeenSnapshots()\n resolve(true)\n }\n })\n\n const unsubscribeSeenSnapshots = seenSnapshots.subscribe(() => {\n const visibleSnapshot = seenSnapshots.state.find((snapshot) =>\n isVisibleInSnapshot(txId, snapshot)\n )\n if (visibleSnapshot) {\n debug(\n `awaitTxId found match for txid %o in snapshot %o`,\n txId,\n visibleSnapshot\n )\n clearTimeout(timeoutId)\n unsubscribeSeenSnapshots()\n unsubscribeSeenTxids()\n resolve(true)\n }\n })\n })\n }\n\n // Create wrapper handlers for direct persistence operations that handle txid awaiting\n const wrappedOnInsert = config.onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n // Runtime check (that doesn't follow type)\n\n const handlerResult =\n ((await config.onInsert!(params)) as MaybeTxId) ?? {}\n const txid = handlerResult.txid\n\n if (!txid) {\n throw new ElectricInsertHandlerMustReturnTxIdError()\n }\n\n // Handle both single txid and array of txids\n if (Array.isArray(txid)) {\n await Promise.all(txid.map((id) => awaitTxId(id)))\n } else {\n await awaitTxId(txid)\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = config.onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n // Runtime check (that doesn't follow type)\n\n const handlerResult =\n ((await config.onUpdate!(params)) as MaybeTxId) ?? {}\n const txid = handlerResult.txid\n\n if (!txid) {\n throw new ElectricUpdateHandlerMustReturnTxIdError()\n }\n\n // Handle both single txid and array of txids\n if (Array.isArray(txid)) {\n await Promise.all(txid.map((id) => awaitTxId(id)))\n } else {\n await awaitTxId(txid)\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = config.onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = await config.onDelete!(params)\n if (!handlerResult.txid) {\n throw new ElectricDeleteHandlerMustReturnTxIdError()\n }\n\n // Handle both single txid and array of txids\n if (Array.isArray(handlerResult.txid)) {\n await Promise.all(handlerResult.txid.map((id) => awaitTxId(id)))\n } else {\n await awaitTxId(handlerResult.txid)\n }\n\n return handlerResult\n }\n : undefined\n\n // Extract standard Collection config properties\n const {\n shapeOptions: _shapeOptions,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n ...restConfig\n } = config\n\n return {\n ...restConfig,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n awaitTxId,\n },\n }\n}\n\n/**\n * Internal function to create ElectricSQL sync configuration\n */\nfunction createElectricSync<T extends Row<unknown>>(\n shapeOptions: ShapeStreamOptions<GetExtensions<T>>,\n options: {\n seenTxids: Store<Set<Txid>>\n seenSnapshots: Store<Array<PostgresSnapshot>>\n }\n): SyncConfig<T> {\n const { seenTxids } = options\n const { seenSnapshots } = options\n\n // Store for the relation schema information\n const relationSchema = new Store<string | undefined>(undefined)\n\n /**\n * Get the sync metadata for insert operations\n * @returns Record containing relation information\n */\n const getSyncMetadata = (): Record<string, unknown> => {\n // Use the stored schema if available, otherwise default to 'public'\n const schema = relationSchema.state || `public`\n\n return {\n relation: shapeOptions.params?.table\n ? [schema, shapeOptions.params.table]\n : undefined,\n }\n }\n\n let unsubscribeStream: () => void\n\n return {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady, truncate, collection } = params\n\n // Abort controller for the stream - wraps the signal if provided\n const abortController = new AbortController()\n\n if (shapeOptions.signal) {\n shapeOptions.signal.addEventListener(\n `abort`,\n () => {\n abortController.abort()\n },\n {\n once: true,\n }\n )\n if (shapeOptions.signal.aborted) {\n abortController.abort()\n }\n }\n\n const stream = new ShapeStream({\n ...shapeOptions,\n signal: abortController.signal,\n onError: (errorParams) => {\n // Just immediately mark ready if there's an error to avoid blocking\n // apps waiting for `.preload()` to finish.\n // Note that Electric sends a 409 error on a `must-refetch` message, but the\n // ShapeStream handled this and it will not reach this handler, therefor\n // this markReady will not be triggers by a `must-refetch`.\n markReady()\n\n if (shapeOptions.onError) {\n return shapeOptions.onError(errorParams)\n } else {\n console.error(\n `An error occurred while syncing collection: ${collection.id}, \\n` +\n `it has been marked as ready to avoid blocking apps waiting for '.preload()' to finish. \\n` +\n `You can provide an 'onError' handler on the shapeOptions to handle this error, and this message will not be logged.`,\n errorParams\n )\n }\n\n return\n },\n })\n let transactionStarted = false\n const newTxids = new Set<Txid>()\n const newSnapshots: Array<PostgresSnapshot> = []\n\n unsubscribeStream = stream.subscribe((messages: Array<Message<T>>) => {\n let hasUpToDate = false\n\n for (const message of messages) {\n // Check for txids in the message and add them to our store\n if (hasTxids(message)) {\n message.headers.txids?.forEach((txid) => newTxids.add(txid))\n }\n\n if (isChangeMessage(message)) {\n // Check if the message contains schema information\n const schema = message.headers.schema\n if (schema && typeof schema === `string`) {\n // Store the schema for future use if it's a valid string\n relationSchema.setState(() => schema)\n }\n\n if (!transactionStarted) {\n begin()\n transactionStarted = true\n }\n\n write({\n type: message.headers.operation,\n value: message.value,\n // Include the primary key and relation info in the metadata\n metadata: {\n ...message.headers,\n },\n })\n } else if (isSnapshotEndMessage(message)) {\n newSnapshots.push(parseSnapshotMessage(message))\n } else if (isUpToDateMessage(message)) {\n hasUpToDate = true\n } else if (isMustRefetchMessage(message)) {\n debug(\n `Received must-refetch message, starting transaction with truncate`\n )\n\n // Start a transaction and truncate the collection\n if (!transactionStarted) {\n begin()\n transactionStarted = true\n }\n\n truncate()\n\n // Reset hasUpToDate so we continue accumulating changes until next up-to-date\n hasUpToDate = false\n }\n }\n\n if (hasUpToDate) {\n // Commit transaction if one was started\n if (transactionStarted) {\n commit()\n transactionStarted = false\n }\n\n // Mark the collection as ready now that sync is up to date\n markReady()\n\n // Always commit txids when we receive up-to-date, regardless of transaction state\n seenTxids.setState((currentTxids) => {\n const clonedSeen = new Set<Txid>(currentTxids)\n if (newTxids.size > 0) {\n debug(`new txids synced from pg %O`, Array.from(newTxids))\n }\n newTxids.forEach((txid) => clonedSeen.add(txid))\n newTxids.clear()\n return clonedSeen\n })\n\n // Always commit snapshots when we receive up-to-date, regardless of transaction state\n seenSnapshots.setState((currentSnapshots) => {\n const seen = [...currentSnapshots, ...newSnapshots]\n newSnapshots.forEach((snapshot) =>\n debug(`new snapshot synced from pg %o`, snapshot)\n )\n newSnapshots.length = 0\n return seen\n })\n }\n })\n\n // Return the unsubscribe function\n return () => {\n // Unsubscribe from the stream\n unsubscribeStream()\n // Abort the abort controller to stop the stream\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata,\n }\n}\n"],"names":["isControlMessage","Store","ExpectedNumberInAwaitTxIdError","isVisibleInSnapshot","TimeoutWaitingForTxIdError","ElectricInsertHandlerMustReturnTxIdError","ElectricUpdateHandlerMustReturnTxIdError","ElectricDeleteHandlerMustReturnTxIdError","ShapeStream","isChangeMessage"],"mappings":";;;;;;AAmCA,MAAM,QAAQ,YAAY,MAAM,gBAAgB;AAsDhD,SAAS,kBACP,SACkD;AAClD,SAAOA,OAAAA,iBAAiB,OAAO,KAAK,QAAQ,QAAQ,YAAY;AAClE;AAEA,SAAS,qBACP,SACsE;AACtE,SAAOA,OAAAA,iBAAiB,OAAO,KAAK,QAAQ,QAAQ,YAAY;AAClE;AAEA,SAAS,qBACP,SAC+B;AAC/B,SAAOA,OAAAA,iBAAiB,OAAO,KAAK,QAAQ,QAAQ,YAAY;AAClE;AAEA,SAAS,qBAAqB,SAA+C;AAC3E,SAAO;AAAA,IACL,MAAM,QAAQ,QAAQ;AAAA,IACtB,MAAM,QAAQ,QAAQ;AAAA,IACtB,UAAU,QAAQ,QAAQ;AAAA,EAAA;AAE9B;AAGA,SAAS,SACP,SAC8D;AAC9D,SAAO,WAAW,QAAQ,WAAW,MAAM,QAAQ,QAAQ,QAAQ,KAAK;AAC1E;AA8CO,SAAS,0BACd,QAKA;AACA,QAAM,YAAY,IAAIC,MAAAA,0BAAqB,IAAI,CAAA,CAAE,CAAC;AAClD,QAAM,gBAAgB,IAAIA,MAAAA,MAA+B,EAAE;AAC3D,QAAM,OAAO,mBAAwB,OAAO,cAAc;AAAA,IACxD;AAAA,IACA;AAAA,EAAA,CACD;AAQD,QAAM,YAAyB,OAC7B,MACA,UAAkB,QACG;AACrB,UAAM,iCAAiC,IAAI;AAC3C,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAIC,OAAAA,+BAA+B,OAAO,IAAI;AAAA,IACtD;AAGA,UAAM,UAAU,UAAU,MAAM,IAAI,IAAI;AACxC,QAAI,QAAS,QAAO;AAGpB,UAAM,cAAc,cAAc,MAAM;AAAA,MAAK,CAAC,aAC5CC,2BAAoB,MAAM,QAAQ;AAAA,IAAA;AAEpC,QAAI,YAAa,QAAO;AAExB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,YAAY,WAAW,MAAM;AACjC,6BAAA;AACA,iCAAA;AACA,eAAO,IAAIC,kCAA2B,IAAI,CAAC;AAAA,MAC7C,GAAG,OAAO;AAEV,YAAM,uBAAuB,UAAU,UAAU,MAAM;AACrD,YAAI,UAAU,MAAM,IAAI,IAAI,GAAG;AAC7B,gBAAM,qCAAqC,IAAI;AAC/C,uBAAa,SAAS;AACtB,+BAAA;AACA,mCAAA;AACA,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF,CAAC;AAED,YAAM,2BAA2B,cAAc,UAAU,MAAM;AAC7D,cAAM,kBAAkB,cAAc,MAAM;AAAA,UAAK,CAAC,aAChDD,2BAAoB,MAAM,QAAQ;AAAA,QAAA;AAEpC,YAAI,iBAAiB;AACnB;AAAA,YACE;AAAA,YACA;AAAA,YACA;AAAA,UAAA;AAEF,uBAAa,SAAS;AACtB,mCAAA;AACA,+BAAA;AACA,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAGA,QAAM,kBAAkB,OAAO,WAC3B,OAAO,WAAwC;AAG7C,UAAM,gBACF,MAAM,OAAO,SAAU,MAAM,KAAoB,CAAA;AACrD,UAAM,OAAO,cAAc;AAE3B,QAAI,CAAC,MAAM;AACT,YAAM,IAAIE,OAAAA,yCAAA;AAAA,IACZ;AAGA,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,YAAM,QAAQ,IAAI,KAAK,IAAI,CAAC,OAAO,UAAU,EAAE,CAAC,CAAC;AAAA,IACnD,OAAO;AACL,YAAM,UAAU,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,OAAO,WAC3B,OAAO,WAAwC;AAG7C,UAAM,gBACF,MAAM,OAAO,SAAU,MAAM,KAAoB,CAAA;AACrD,UAAM,OAAO,cAAc;AAE3B,QAAI,CAAC,MAAM;AACT,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAGA,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,YAAM,QAAQ,IAAI,KAAK,IAAI,CAAC,OAAO,UAAU,EAAE,CAAC,CAAC;AAAA,IACnD,OAAO;AACL,YAAM,UAAU,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,OAAO,WAC3B,OAAO,WAAwC;AAC7C,UAAM,gBAAgB,MAAM,OAAO,SAAU,MAAM;AACnD,QAAI,CAAC,cAAc,MAAM;AACvB,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAGA,QAAI,MAAM,QAAQ,cAAc,IAAI,GAAG;AACrC,YAAM,QAAQ,IAAI,cAAc,KAAK,IAAI,CAAC,OAAO,UAAU,EAAE,CAAC,CAAC;AAAA,IACjE,OAAO;AACL,YAAM,UAAU,cAAc,IAAI;AAAA,IACpC;AAEA,WAAO;AAAA,EACT,IACA;AAGJ,QAAM;AAAA,IACJ,cAAc;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,GAAG;AAAA,EAAA,IACD;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,EACF;AAEJ;AAKA,SAAS,mBACP,cACA,SAIe;AACf,QAAM,EAAE,cAAc;AACtB,QAAM,EAAE,kBAAkB;AAG1B,QAAM,iBAAiB,IAAIN,MAAAA,MAA0B,MAAS;AAM9D,QAAM,kBAAkB,MAA+B;AAErD,UAAM,SAAS,eAAe,SAAS;AAEvC,WAAO;AAAA,MACL,UAAU,aAAa,QAAQ,QAC3B,CAAC,QAAQ,aAAa,OAAO,KAAK,IAClC;AAAA,IAAA;AAAA,EAER;AAEA,MAAI;AAEJ,SAAO;AAAA,IACL,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,UAAU,eAAe;AAGlE,YAAM,kBAAkB,IAAI,gBAAA;AAE5B,UAAI,aAAa,QAAQ;AACvB,qBAAa,OAAO;AAAA,UAClB;AAAA,UACA,MAAM;AACJ,4BAAgB,MAAA;AAAA,UAClB;AAAA,UACA;AAAA,YACE,MAAM;AAAA,UAAA;AAAA,QACR;AAEF,YAAI,aAAa,OAAO,SAAS;AAC/B,0BAAgB,MAAA;AAAA,QAClB;AAAA,MACF;AAEA,YAAM,SAAS,IAAIO,mBAAY;AAAA,QAC7B,GAAG;AAAA,QACH,QAAQ,gBAAgB;AAAA,QACxB,SAAS,CAAC,gBAAgB;AAMxB,oBAAA;AAEA,cAAI,aAAa,SAAS;AACxB,mBAAO,aAAa,QAAQ,WAAW;AAAA,UACzC,OAAO;AACL,oBAAQ;AAAA,cACN,+CAA+C,WAAW,EAAE;AAAA;AAAA;AAAA,cAG5D;AAAA,YAAA;AAAA,UAEJ;AAEA;AAAA,QACF;AAAA,MAAA,CACD;AACD,UAAI,qBAAqB;AACzB,YAAM,+BAAe,IAAA;AACrB,YAAM,eAAwC,CAAA;AAE9C,0BAAoB,OAAO,UAAU,CAAC,aAAgC;AACpE,YAAI,cAAc;AAElB,mBAAW,WAAW,UAAU;AAE9B,cAAI,SAAS,OAAO,GAAG;AACrB,oBAAQ,QAAQ,OAAO,QAAQ,CAAC,SAAS,SAAS,IAAI,IAAI,CAAC;AAAA,UAC7D;AAEA,cAAIC,OAAAA,gBAAgB,OAAO,GAAG;AAE5B,kBAAM,SAAS,QAAQ,QAAQ;AAC/B,gBAAI,UAAU,OAAO,WAAW,UAAU;AAExC,6BAAe,SAAS,MAAM,MAAM;AAAA,YACtC;AAEA,gBAAI,CAAC,oBAAoB;AACvB,oBAAA;AACA,mCAAqB;AAAA,YACvB;AAEA,kBAAM;AAAA,cACJ,MAAM,QAAQ,QAAQ;AAAA,cACtB,OAAO,QAAQ;AAAA;AAAA,cAEf,UAAU;AAAA,gBACR,GAAG,QAAQ;AAAA,cAAA;AAAA,YACb,CACD;AAAA,UACH,WAAW,qBAAqB,OAAO,GAAG;AACxC,yBAAa,KAAK,qBAAqB,OAAO,CAAC;AAAA,UACjD,WAAW,kBAAkB,OAAO,GAAG;AACrC,0BAAc;AAAA,UAChB,WAAW,qBAAqB,OAAO,GAAG;AACxC;AAAA,cACE;AAAA,YAAA;AAIF,gBAAI,CAAC,oBAAoB;AACvB,oBAAA;AACA,mCAAqB;AAAA,YACvB;AAEA,qBAAA;AAGA,0BAAc;AAAA,UAChB;AAAA,QACF;AAEA,YAAI,aAAa;AAEf,cAAI,oBAAoB;AACtB,mBAAA;AACA,iCAAqB;AAAA,UACvB;AAGA,oBAAA;AAGA,oBAAU,SAAS,CAAC,iBAAiB;AACnC,kBAAM,aAAa,IAAI,IAAU,YAAY;AAC7C,gBAAI,SAAS,OAAO,GAAG;AACrB,oBAAM,+BAA+B,MAAM,KAAK,QAAQ,CAAC;AAAA,YAC3D;AACA,qBAAS,QAAQ,CAAC,SAAS,WAAW,IAAI,IAAI,CAAC;AAC/C,qBAAS,MAAA;AACT,mBAAO;AAAA,UACT,CAAC;AAGD,wBAAc,SAAS,CAAC,qBAAqB;AAC3C,kBAAM,OAAO,CAAC,GAAG,kBAAkB,GAAG,YAAY;AAClD,yBAAa;AAAA,cAAQ,CAAC,aACpB,MAAM,kCAAkC,QAAQ;AAAA,YAAA;AAElD,yBAAa,SAAS;AACtB,mBAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAGD,aAAO,MAAM;AAEX,0BAAA;AAEA,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA;AAAA,EAAA;AAEJ;;"}
1
+ {"version":3,"file":"electric.cjs","sources":["../../src/electric.ts"],"sourcesContent":["import {\n ShapeStream,\n isChangeMessage,\n isControlMessage,\n isVisibleInSnapshot,\n} from \"@electric-sql/client\"\nimport { Store } from \"@tanstack/store\"\nimport DebugModule from \"debug\"\nimport {\n ExpectedNumberInAwaitTxIdError,\n StreamAbortedError,\n TimeoutWaitingForMatchError,\n TimeoutWaitingForTxIdError,\n} from \"./errors\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n ControlMessage,\n GetExtensions,\n Message,\n PostgresSnapshot,\n Row,\n ShapeStreamOptions,\n} from \"@electric-sql/client\"\n\n// Re-export for user convenience in custom match functions\nexport { isChangeMessage, isControlMessage } from \"@electric-sql/client\"\n\nconst debug = DebugModule.debug(`ts/db:electric`)\n\n/**\n * Type representing a transaction ID in ElectricSQL\n */\nexport type Txid = number\n\n/**\n * Custom match function type - receives stream messages and returns boolean\n * indicating if the mutation has been synchronized\n */\nexport type MatchFunction<T extends Row<unknown>> = (\n message: Message<T>\n) => boolean\n\n/**\n * Matching strategies for Electric synchronization\n * Handlers can return:\n * - Txid strategy: { txid: number | number[] } (recommended)\n * - Void (no return value) - mutation completes without waiting\n */\nexport type MatchingStrategy = { txid: Txid | Array<Txid> } | void\n\n/**\n * Type representing a snapshot end message\n */\ntype SnapshotEndMessage = ControlMessage & {\n headers: { control: `snapshot-end` }\n}\n// The `InferSchemaOutput` and `ResolveType` are copied from the `@tanstack/db` package\n// but we modified `InferSchemaOutput` slightly to restrict the schema output to `Row<unknown>`\n// This is needed in order for `GetExtensions` to be able to infer the parser extensions type from the schema\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends Row<unknown>\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration interface for Electric collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n */\nexport interface ElectricCollectionConfig<\n T extends Row<unknown> = Row<unknown>,\n TSchema extends StandardSchemaV1 = never,\n> extends Omit<\n BaseCollectionConfig<T, string | number, TSchema, UtilsRecord, any>,\n `onInsert` | `onUpdate` | `onDelete`\n > {\n /**\n * Configuration options for the ElectricSQL ShapeStream\n */\n shapeOptions: ShapeStreamOptions<GetExtensions<T>>\n\n /**\n * Optional asynchronous handler function called before an insert operation\n * @param params Object containing transaction and collection information\n * @returns Promise resolving to { txid } or void\n * @example\n * // Basic Electric insert handler with txid (recommended)\n * onInsert: async ({ transaction }) => {\n * const newItem = transaction.mutations[0].modified\n * const result = await api.todos.create({\n * data: newItem\n * })\n * return { txid: result.txid }\n * }\n *\n * @example\n * // Insert handler with multiple items - return array of txids\n * onInsert: async ({ transaction }) => {\n * const items = transaction.mutations.map(m => m.modified)\n * const results = await Promise.all(\n * items.map(item => api.todos.create({ data: item }))\n * )\n * return { txid: results.map(r => r.txid) }\n * }\n *\n * @example\n * // Use awaitMatch utility for custom matching\n * onInsert: async ({ transaction, collection }) => {\n * const newItem = transaction.mutations[0].modified\n * await api.todos.create({ data: newItem })\n * await collection.utils.awaitMatch(\n * (message) => isChangeMessage(message) &&\n * message.headers.operation === 'insert' &&\n * message.value.name === newItem.name\n * )\n * }\n */\n onInsert?: (params: InsertMutationFnParams<T>) => Promise<MatchingStrategy>\n\n /**\n * Optional asynchronous handler function called before an update operation\n * @param params Object containing transaction and collection information\n * @returns Promise resolving to { txid } or void\n * @example\n * // Basic Electric update handler with txid (recommended)\n * onUpdate: async ({ transaction }) => {\n * const { original, changes } = transaction.mutations[0]\n * const result = await api.todos.update({\n * where: { id: original.id },\n * data: changes\n * })\n * return { txid: result.txid }\n * }\n *\n * @example\n * // Use awaitMatch utility for custom matching\n * onUpdate: async ({ transaction, collection }) => {\n * const { original, changes } = transaction.mutations[0]\n * await api.todos.update({ where: { id: original.id }, data: changes })\n * await collection.utils.awaitMatch(\n * (message) => isChangeMessage(message) &&\n * message.headers.operation === 'update' &&\n * message.value.id === original.id\n * )\n * }\n */\n onUpdate?: (params: UpdateMutationFnParams<T>) => Promise<MatchingStrategy>\n\n /**\n * Optional asynchronous handler function called before a delete operation\n * @param params Object containing transaction and collection information\n * @returns Promise resolving to { txid } or void\n * @example\n * // Basic Electric delete handler with txid (recommended)\n * onDelete: async ({ transaction }) => {\n * const mutation = transaction.mutations[0]\n * const result = await api.todos.delete({\n * id: mutation.original.id\n * })\n * return { txid: result.txid }\n * }\n *\n * @example\n * // Use awaitMatch utility for custom matching\n * onDelete: async ({ transaction, collection }) => {\n * const mutation = transaction.mutations[0]\n * await api.todos.delete({ id: mutation.original.id })\n * await collection.utils.awaitMatch(\n * (message) => isChangeMessage(message) &&\n * message.headers.operation === 'delete' &&\n * message.value.id === mutation.original.id\n * )\n * }\n */\n onDelete?: (params: DeleteMutationFnParams<T>) => Promise<MatchingStrategy>\n}\n\nfunction isUpToDateMessage<T extends Row<unknown>>(\n message: Message<T>\n): message is ControlMessage & { up_to_date: true } {\n return isControlMessage(message) && message.headers.control === `up-to-date`\n}\n\nfunction isMustRefetchMessage<T extends Row<unknown>>(\n message: Message<T>\n): message is ControlMessage & { headers: { control: `must-refetch` } } {\n return isControlMessage(message) && message.headers.control === `must-refetch`\n}\n\nfunction isSnapshotEndMessage<T extends Row<unknown>>(\n message: Message<T>\n): message is SnapshotEndMessage {\n return isControlMessage(message) && message.headers.control === `snapshot-end`\n}\n\nfunction parseSnapshotMessage(message: SnapshotEndMessage): PostgresSnapshot {\n return {\n xmin: message.headers.xmin,\n xmax: message.headers.xmax,\n xip_list: message.headers.xip_list,\n }\n}\n\n// Check if a message contains txids in its headers\nfunction hasTxids<T extends Row<unknown>>(\n message: Message<T>\n): message is Message<T> & { headers: { txids?: Array<Txid> } } {\n return `txids` in message.headers && Array.isArray(message.headers.txids)\n}\n\n/**\n * Type for the awaitTxId utility function\n */\nexport type AwaitTxIdFn = (txId: Txid, timeout?: number) => Promise<boolean>\n\n/**\n * Type for the awaitMatch utility function\n */\nexport type AwaitMatchFn<T extends Row<unknown>> = (\n matchFn: MatchFunction<T>,\n timeout?: number\n) => Promise<boolean>\n\n/**\n * Electric collection utilities type\n */\nexport interface ElectricCollectionUtils<T extends Row<unknown> = Row<unknown>>\n extends UtilsRecord {\n awaitTxId: AwaitTxIdFn\n awaitMatch: AwaitMatchFn<T>\n}\n\n/**\n * Creates Electric collection options for use with a standard Collection\n *\n * @template T - The explicit type of items in the collection (highest priority)\n * @template TSchema - The schema type for validation and type inference (second priority)\n * @template TFallback - The fallback type if no explicit or schema type is provided\n * @param config - Configuration options for the Electric collection\n * @returns Collection options with utilities\n */\n\n// Overload for when schema is provided\nexport function electricCollectionOptions<T extends StandardSchemaV1>(\n config: ElectricCollectionConfig<InferSchemaOutput<T>, T> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, string | number, T> & {\n id?: string\n utils: ElectricCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\nexport function electricCollectionOptions<T extends Row<unknown>>(\n config: ElectricCollectionConfig<T> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, string | number> & {\n id?: string\n utils: ElectricCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function electricCollectionOptions(\n config: ElectricCollectionConfig<any, any>\n): CollectionConfig<any, string | number, any> & {\n id?: string\n utils: ElectricCollectionUtils\n schema?: any\n} {\n const seenTxids = new Store<Set<Txid>>(new Set([]))\n const seenSnapshots = new Store<Array<PostgresSnapshot>>([])\n const pendingMatches = new Store<\n Map<\n string,\n {\n matchFn: (message: Message<any>) => boolean\n resolve: (value: boolean) => void\n reject: (error: Error) => void\n timeoutId: ReturnType<typeof setTimeout>\n matched: boolean\n }\n >\n >(new Map())\n\n // Buffer messages since last up-to-date to handle race conditions\n const currentBatchMessages = new Store<Array<Message<any>>>([])\n\n /**\n * Helper function to remove multiple matches from the pendingMatches store\n */\n const removePendingMatches = (matchIds: Array<string>) => {\n if (matchIds.length > 0) {\n pendingMatches.setState((current) => {\n const newMatches = new Map(current)\n matchIds.forEach((id) => newMatches.delete(id))\n return newMatches\n })\n }\n }\n\n /**\n * Helper function to resolve and cleanup matched pending matches\n */\n const resolveMatchedPendingMatches = () => {\n const matchesToResolve: Array<string> = []\n pendingMatches.state.forEach((match, matchId) => {\n if (match.matched) {\n clearTimeout(match.timeoutId)\n match.resolve(true)\n matchesToResolve.push(matchId)\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitMatch resolved on up-to-date for match %s`,\n matchId\n )\n }\n })\n removePendingMatches(matchesToResolve)\n }\n const sync = createElectricSync<any>(config.shapeOptions, {\n seenTxids,\n seenSnapshots,\n pendingMatches,\n currentBatchMessages,\n removePendingMatches,\n resolveMatchedPendingMatches,\n collectionId: config.id,\n })\n\n /**\n * Wait for a specific transaction ID to be synced\n * @param txId The transaction ID to wait for as a number\n * @param timeout Optional timeout in milliseconds (defaults to 5000ms)\n * @returns Promise that resolves when the txId is synced\n */\n const awaitTxId: AwaitTxIdFn = async (\n txId: Txid,\n timeout: number = 5000\n ): Promise<boolean> => {\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitTxId called with txid %d`,\n txId\n )\n if (typeof txId !== `number`) {\n throw new ExpectedNumberInAwaitTxIdError(typeof txId, config.id)\n }\n\n // First check if the txid is in the seenTxids store\n const hasTxid = seenTxids.state.has(txId)\n if (hasTxid) return true\n\n // Then check if the txid is in any of the seen snapshots\n const hasSnapshot = seenSnapshots.state.some((snapshot) =>\n isVisibleInSnapshot(txId, snapshot)\n )\n if (hasSnapshot) return true\n\n return new Promise((resolve, reject) => {\n const timeoutId = setTimeout(() => {\n unsubscribeSeenTxids()\n unsubscribeSeenSnapshots()\n reject(new TimeoutWaitingForTxIdError(txId, config.id))\n }, timeout)\n\n const unsubscribeSeenTxids = seenTxids.subscribe(() => {\n if (seenTxids.state.has(txId)) {\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitTxId found match for txid %o`,\n txId\n )\n clearTimeout(timeoutId)\n unsubscribeSeenTxids()\n unsubscribeSeenSnapshots()\n resolve(true)\n }\n })\n\n const unsubscribeSeenSnapshots = seenSnapshots.subscribe(() => {\n const visibleSnapshot = seenSnapshots.state.find((snapshot) =>\n isVisibleInSnapshot(txId, snapshot)\n )\n if (visibleSnapshot) {\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitTxId found match for txid %o in snapshot %o`,\n txId,\n visibleSnapshot\n )\n clearTimeout(timeoutId)\n unsubscribeSeenSnapshots()\n unsubscribeSeenTxids()\n resolve(true)\n }\n })\n })\n }\n\n /**\n * Wait for a custom match function to find a matching message\n * @param matchFn Function that returns true when a message matches\n * @param timeout Optional timeout in milliseconds (defaults to 5000ms)\n * @returns Promise that resolves when a matching message is found\n */\n const awaitMatch: AwaitMatchFn<any> = async (\n matchFn: MatchFunction<any>,\n timeout: number = 3000\n ): Promise<boolean> => {\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitMatch called with custom function`\n )\n\n return new Promise((resolve, reject) => {\n const matchId = Math.random().toString(36)\n\n const cleanupMatch = () => {\n pendingMatches.setState((current) => {\n const newMatches = new Map(current)\n newMatches.delete(matchId)\n return newMatches\n })\n }\n\n const onTimeout = () => {\n cleanupMatch()\n reject(new TimeoutWaitingForMatchError(config.id))\n }\n\n const timeoutId = setTimeout(onTimeout, timeout)\n\n // We need access to the stream messages to check against the match function\n // This will be handled by the sync configuration\n const checkMatch = (message: Message<any>) => {\n if (matchFn(message)) {\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitMatch found matching message, waiting for up-to-date`\n )\n // Mark as matched but don't resolve yet - wait for up-to-date\n pendingMatches.setState((current) => {\n const newMatches = new Map(current)\n const existing = newMatches.get(matchId)\n if (existing) {\n newMatches.set(matchId, { ...existing, matched: true })\n }\n return newMatches\n })\n return true\n }\n return false\n }\n\n // Check against current batch messages first to handle race conditions\n for (const message of currentBatchMessages.state) {\n if (matchFn(message)) {\n debug(\n `${config.id ? `[${config.id}] ` : ``}awaitMatch found immediate match in current batch, waiting for up-to-date`\n )\n // Register match as already matched\n pendingMatches.setState((current) => {\n const newMatches = new Map(current)\n newMatches.set(matchId, {\n matchFn: checkMatch,\n resolve,\n reject,\n timeoutId,\n matched: true, // Already matched\n })\n return newMatches\n })\n return\n }\n }\n\n // Store the match function for the sync process to use\n // We'll add this to a pending matches store\n pendingMatches.setState((current) => {\n const newMatches = new Map(current)\n newMatches.set(matchId, {\n matchFn: checkMatch,\n resolve,\n reject,\n timeoutId,\n matched: false,\n })\n return newMatches\n })\n })\n }\n\n /**\n * Process matching strategy and wait for synchronization\n */\n const processMatchingStrategy = async (\n result: MatchingStrategy\n ): Promise<void> => {\n // Only wait if result contains txid\n if (result && `txid` in result) {\n // Handle both single txid and array of txids\n if (Array.isArray(result.txid)) {\n await Promise.all(result.txid.map(awaitTxId))\n } else {\n await awaitTxId(result.txid)\n }\n }\n // If result is void/undefined, don't wait - mutation completes immediately\n }\n\n // Create wrapper handlers for direct persistence operations that handle different matching strategies\n const wrappedOnInsert = config.onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = await config.onInsert!(params)\n await processMatchingStrategy(handlerResult)\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = config.onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = await config.onUpdate!(params)\n await processMatchingStrategy(handlerResult)\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = config.onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = await config.onDelete!(params)\n await processMatchingStrategy(handlerResult)\n return handlerResult\n }\n : undefined\n\n // Extract standard Collection config properties\n const {\n shapeOptions: _shapeOptions,\n onInsert: _onInsert,\n onUpdate: _onUpdate,\n onDelete: _onDelete,\n ...restConfig\n } = config\n\n return {\n ...restConfig,\n sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n awaitTxId,\n awaitMatch,\n } as ElectricCollectionUtils<any>,\n }\n}\n\n/**\n * Internal function to create ElectricSQL sync configuration\n */\nfunction createElectricSync<T extends Row<unknown>>(\n shapeOptions: ShapeStreamOptions<GetExtensions<T>>,\n options: {\n seenTxids: Store<Set<Txid>>\n seenSnapshots: Store<Array<PostgresSnapshot>>\n pendingMatches: Store<\n Map<\n string,\n {\n matchFn: (message: Message<T>) => boolean\n resolve: (value: boolean) => void\n reject: (error: Error) => void\n timeoutId: ReturnType<typeof setTimeout>\n matched: boolean\n }\n >\n >\n currentBatchMessages: Store<Array<Message<T>>>\n removePendingMatches: (matchIds: Array<string>) => void\n resolveMatchedPendingMatches: () => void\n collectionId?: string\n }\n): SyncConfig<T> {\n const {\n seenTxids,\n seenSnapshots,\n pendingMatches,\n currentBatchMessages,\n removePendingMatches,\n resolveMatchedPendingMatches,\n collectionId,\n } = options\n const MAX_BATCH_MESSAGES = 1000 // Safety limit for message buffer\n\n // Store for the relation schema information\n const relationSchema = new Store<string | undefined>(undefined)\n\n /**\n * Get the sync metadata for insert operations\n * @returns Record containing relation information\n */\n const getSyncMetadata = (): Record<string, unknown> => {\n // Use the stored schema if available, otherwise default to 'public'\n const schema = relationSchema.state || `public`\n\n return {\n relation: shapeOptions.params?.table\n ? [schema, shapeOptions.params.table]\n : undefined,\n }\n }\n\n let unsubscribeStream: () => void\n\n return {\n sync: (params: Parameters<SyncConfig<T>[`sync`]>[0]) => {\n const { begin, write, commit, markReady, truncate, collection } = params\n\n // Abort controller for the stream - wraps the signal if provided\n const abortController = new AbortController()\n\n if (shapeOptions.signal) {\n shapeOptions.signal.addEventListener(\n `abort`,\n () => {\n abortController.abort()\n },\n {\n once: true,\n }\n )\n if (shapeOptions.signal.aborted) {\n abortController.abort()\n }\n }\n\n // Cleanup pending matches on abort\n abortController.signal.addEventListener(`abort`, () => {\n pendingMatches.setState((current) => {\n current.forEach((match) => {\n clearTimeout(match.timeoutId)\n match.reject(new StreamAbortedError())\n })\n return new Map() // Clear all pending matches\n })\n })\n\n const stream = new ShapeStream({\n ...shapeOptions,\n signal: abortController.signal,\n onError: (errorParams) => {\n // Just immediately mark ready if there's an error to avoid blocking\n // apps waiting for `.preload()` to finish.\n // Note that Electric sends a 409 error on a `must-refetch` message, but the\n // ShapeStream handled this and it will not reach this handler, therefor\n // this markReady will not be triggers by a `must-refetch`.\n markReady()\n\n if (shapeOptions.onError) {\n return shapeOptions.onError(errorParams)\n } else {\n console.error(\n `An error occurred while syncing collection: ${collection.id}, \\n` +\n `it has been marked as ready to avoid blocking apps waiting for '.preload()' to finish. \\n` +\n `You can provide an 'onError' handler on the shapeOptions to handle this error, and this message will not be logged.`,\n errorParams\n )\n }\n\n return\n },\n })\n let transactionStarted = false\n const newTxids = new Set<Txid>()\n const newSnapshots: Array<PostgresSnapshot> = []\n\n unsubscribeStream = stream.subscribe((messages: Array<Message<T>>) => {\n let hasUpToDate = false\n\n for (const message of messages) {\n // Add message to current batch buffer (for race condition handling)\n if (isChangeMessage(message)) {\n currentBatchMessages.setState((currentBuffer) => {\n const newBuffer = [...currentBuffer, message]\n // Limit buffer size for safety\n if (newBuffer.length > MAX_BATCH_MESSAGES) {\n newBuffer.splice(0, newBuffer.length - MAX_BATCH_MESSAGES)\n }\n return newBuffer\n })\n }\n\n // Check for txids in the message and add them to our store\n if (hasTxids(message)) {\n message.headers.txids?.forEach((txid) => newTxids.add(txid))\n }\n\n // Check pending matches against this message\n // Note: matchFn will mark matches internally, we don't resolve here\n const matchesToRemove: Array<string> = []\n pendingMatches.state.forEach((match, matchId) => {\n if (!match.matched) {\n try {\n match.matchFn(message)\n } catch (err) {\n // If matchFn throws, clean up and reject the promise\n clearTimeout(match.timeoutId)\n match.reject(\n err instanceof Error ? err : new Error(String(err))\n )\n matchesToRemove.push(matchId)\n debug(`matchFn error: %o`, err)\n }\n }\n })\n\n // Remove matches that errored\n removePendingMatches(matchesToRemove)\n\n if (isChangeMessage(message)) {\n // Check if the message contains schema information\n const schema = message.headers.schema\n if (schema && typeof schema === `string`) {\n // Store the schema for future use if it's a valid string\n relationSchema.setState(() => schema)\n }\n\n if (!transactionStarted) {\n begin()\n transactionStarted = true\n }\n\n write({\n type: message.headers.operation,\n value: message.value,\n // Include the primary key and relation info in the metadata\n metadata: {\n ...message.headers,\n },\n })\n } else if (isSnapshotEndMessage(message)) {\n newSnapshots.push(parseSnapshotMessage(message))\n } else if (isUpToDateMessage(message)) {\n hasUpToDate = true\n } else if (isMustRefetchMessage(message)) {\n debug(\n `${collectionId ? `[${collectionId}] ` : ``}Received must-refetch message, starting transaction with truncate`\n )\n\n // Start a transaction and truncate the collection\n if (!transactionStarted) {\n begin()\n transactionStarted = true\n }\n\n truncate()\n\n // Reset hasUpToDate so we continue accumulating changes until next up-to-date\n hasUpToDate = false\n }\n }\n\n if (hasUpToDate) {\n // Clear the current batch buffer since we're now up-to-date\n currentBatchMessages.setState(() => [])\n\n // Commit transaction if one was started\n if (transactionStarted) {\n commit()\n transactionStarted = false\n }\n\n // Mark the collection as ready now that sync is up to date\n markReady()\n\n // Always commit txids when we receive up-to-date, regardless of transaction state\n seenTxids.setState((currentTxids) => {\n const clonedSeen = new Set<Txid>(currentTxids)\n if (newTxids.size > 0) {\n debug(\n `${collectionId ? `[${collectionId}] ` : ``}new txids synced from pg %O`,\n Array.from(newTxids)\n )\n }\n newTxids.forEach((txid) => clonedSeen.add(txid))\n newTxids.clear()\n return clonedSeen\n })\n\n // Always commit snapshots when we receive up-to-date, regardless of transaction state\n seenSnapshots.setState((currentSnapshots) => {\n const seen = [...currentSnapshots, ...newSnapshots]\n newSnapshots.forEach((snapshot) =>\n debug(\n `${collectionId ? `[${collectionId}] ` : ``}new snapshot synced from pg %o`,\n snapshot\n )\n )\n newSnapshots.length = 0\n return seen\n })\n\n // Resolve all matched pending matches on up-to-date\n resolveMatchedPendingMatches()\n }\n })\n\n // Return the unsubscribe function\n return () => {\n // Unsubscribe from the stream\n unsubscribeStream()\n // Abort the abort controller to stop the stream\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata,\n }\n}\n"],"names":["isControlMessage","Store","ExpectedNumberInAwaitTxIdError","isVisibleInSnapshot","TimeoutWaitingForTxIdError","TimeoutWaitingForMatchError","StreamAbortedError","ShapeStream","isChangeMessage"],"mappings":";;;;;;AAoCA,MAAM,QAAQ,YAAY,MAAM,gBAAgB;AAuJhD,SAAS,kBACP,SACkD;AAClD,SAAOA,OAAAA,iBAAiB,OAAO,KAAK,QAAQ,QAAQ,YAAY;AAClE;AAEA,SAAS,qBACP,SACsE;AACtE,SAAOA,OAAAA,iBAAiB,OAAO,KAAK,QAAQ,QAAQ,YAAY;AAClE;AAEA,SAAS,qBACP,SAC+B;AAC/B,SAAOA,OAAAA,iBAAiB,OAAO,KAAK,QAAQ,QAAQ,YAAY;AAClE;AAEA,SAAS,qBAAqB,SAA+C;AAC3E,SAAO;AAAA,IACL,MAAM,QAAQ,QAAQ;AAAA,IACtB,MAAM,QAAQ,QAAQ;AAAA,IACtB,UAAU,QAAQ,QAAQ;AAAA,EAAA;AAE9B;AAGA,SAAS,SACP,SAC8D;AAC9D,SAAO,WAAW,QAAQ,WAAW,MAAM,QAAQ,QAAQ,QAAQ,KAAK;AAC1E;AAwDO,SAAS,0BACd,QAKA;AACA,QAAM,YAAY,IAAIC,MAAAA,0BAAqB,IAAI,CAAA,CAAE,CAAC;AAClD,QAAM,gBAAgB,IAAIA,MAAAA,MAA+B,EAAE;AAC3D,QAAM,iBAAiB,IAAIA,YAWzB,oBAAI,KAAK;AAGX,QAAM,uBAAuB,IAAIA,MAAAA,MAA2B,EAAE;AAK9D,QAAM,uBAAuB,CAAC,aAA4B;AACxD,QAAI,SAAS,SAAS,GAAG;AACvB,qBAAe,SAAS,CAAC,YAAY;AACnC,cAAM,aAAa,IAAI,IAAI,OAAO;AAClC,iBAAS,QAAQ,CAAC,OAAO,WAAW,OAAO,EAAE,CAAC;AAC9C,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF;AAKA,QAAM,+BAA+B,MAAM;AACzC,UAAM,mBAAkC,CAAA;AACxC,mBAAe,MAAM,QAAQ,CAAC,OAAO,YAAY;AAC/C,UAAI,MAAM,SAAS;AACjB,qBAAa,MAAM,SAAS;AAC5B,cAAM,QAAQ,IAAI;AAClB,yBAAiB,KAAK,OAAO;AAC7B;AAAA,UACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,UACrC;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF,CAAC;AACD,yBAAqB,gBAAgB;AAAA,EACvC;AACA,QAAM,OAAO,mBAAwB,OAAO,cAAc;AAAA,IACxD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc,OAAO;AAAA,EAAA,CACtB;AAQD,QAAM,YAAyB,OAC7B,MACA,UAAkB,QACG;AACrB;AAAA,MACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,MACrC;AAAA,IAAA;AAEF,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAIC,OAAAA,+BAA+B,OAAO,MAAM,OAAO,EAAE;AAAA,IACjE;AAGA,UAAM,UAAU,UAAU,MAAM,IAAI,IAAI;AACxC,QAAI,QAAS,QAAO;AAGpB,UAAM,cAAc,cAAc,MAAM;AAAA,MAAK,CAAC,aAC5CC,2BAAoB,MAAM,QAAQ;AAAA,IAAA;AAEpC,QAAI,YAAa,QAAO;AAExB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,YAAY,WAAW,MAAM;AACjC,6BAAA;AACA,iCAAA;AACA,eAAO,IAAIC,OAAAA,2BAA2B,MAAM,OAAO,EAAE,CAAC;AAAA,MACxD,GAAG,OAAO;AAEV,YAAM,uBAAuB,UAAU,UAAU,MAAM;AACrD,YAAI,UAAU,MAAM,IAAI,IAAI,GAAG;AAC7B;AAAA,YACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,YACrC;AAAA,UAAA;AAEF,uBAAa,SAAS;AACtB,+BAAA;AACA,mCAAA;AACA,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF,CAAC;AAED,YAAM,2BAA2B,cAAc,UAAU,MAAM;AAC7D,cAAM,kBAAkB,cAAc,MAAM;AAAA,UAAK,CAAC,aAChDD,2BAAoB,MAAM,QAAQ;AAAA,QAAA;AAEpC,YAAI,iBAAiB;AACnB;AAAA,YACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,YACrC;AAAA,YACA;AAAA,UAAA;AAEF,uBAAa,SAAS;AACtB,mCAAA;AACA,+BAAA;AACA,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAQA,QAAM,aAAgC,OACpC,SACA,UAAkB,QACG;AACrB;AAAA,MACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,IAAA;AAGvC,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,UAAU,KAAK,OAAA,EAAS,SAAS,EAAE;AAEzC,YAAM,eAAe,MAAM;AACzB,uBAAe,SAAS,CAAC,YAAY;AACnC,gBAAM,aAAa,IAAI,IAAI,OAAO;AAClC,qBAAW,OAAO,OAAO;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAEA,YAAM,YAAY,MAAM;AACtB,qBAAA;AACA,eAAO,IAAIE,OAAAA,4BAA4B,OAAO,EAAE,CAAC;AAAA,MACnD;AAEA,YAAM,YAAY,WAAW,WAAW,OAAO;AAI/C,YAAM,aAAa,CAAC,YAA0B;AAC5C,YAAI,QAAQ,OAAO,GAAG;AACpB;AAAA,YACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,UAAA;AAGvC,yBAAe,SAAS,CAAC,YAAY;AACnC,kBAAM,aAAa,IAAI,IAAI,OAAO;AAClC,kBAAM,WAAW,WAAW,IAAI,OAAO;AACvC,gBAAI,UAAU;AACZ,yBAAW,IAAI,SAAS,EAAE,GAAG,UAAU,SAAS,MAAM;AAAA,YACxD;AACA,mBAAO;AAAA,UACT,CAAC;AACD,iBAAO;AAAA,QACT;AACA,eAAO;AAAA,MACT;AAGA,iBAAW,WAAW,qBAAqB,OAAO;AAChD,YAAI,QAAQ,OAAO,GAAG;AACpB;AAAA,YACE,GAAG,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,EAAE;AAAA,UAAA;AAGvC,yBAAe,SAAS,CAAC,YAAY;AACnC,kBAAM,aAAa,IAAI,IAAI,OAAO;AAClC,uBAAW,IAAI,SAAS;AAAA,cACtB,SAAS;AAAA,cACT;AAAA,cACA;AAAA,cACA;AAAA,cACA,SAAS;AAAA;AAAA,YAAA,CACV;AACD,mBAAO;AAAA,UACT,CAAC;AACD;AAAA,QACF;AAAA,MACF;AAIA,qBAAe,SAAS,CAAC,YAAY;AACnC,cAAM,aAAa,IAAI,IAAI,OAAO;AAClC,mBAAW,IAAI,SAAS;AAAA,UACtB,SAAS;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QAAA,CACV;AACD,eAAO;AAAA,MACT,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAKA,QAAM,0BAA0B,OAC9B,WACkB;AAElB,QAAI,UAAU,UAAU,QAAQ;AAE9B,UAAI,MAAM,QAAQ,OAAO,IAAI,GAAG;AAC9B,cAAM,QAAQ,IAAI,OAAO,KAAK,IAAI,SAAS,CAAC;AAAA,MAC9C,OAAO;AACL,cAAM,UAAU,OAAO,IAAI;AAAA,MAC7B;AAAA,IACF;AAAA,EAEF;AAGA,QAAM,kBAAkB,OAAO,WAC3B,OAAO,WAAwC;AAC7C,UAAM,gBAAgB,MAAM,OAAO,SAAU,MAAM;AACnD,UAAM,wBAAwB,aAAa;AAC3C,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,OAAO,WAC3B,OAAO,WAAwC;AAC7C,UAAM,gBAAgB,MAAM,OAAO,SAAU,MAAM;AACnD,UAAM,wBAAwB,aAAa;AAC3C,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,OAAO,WAC3B,OAAO,WAAwC;AAC7C,UAAM,gBAAgB,MAAM,OAAO,SAAU,MAAM;AACnD,UAAM,wBAAwB,aAAa;AAC3C,WAAO;AAAA,EACT,IACA;AAGJ,QAAM;AAAA,IACJ,cAAc;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,GAAG;AAAA,EAAA,IACD;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAKA,SAAS,mBACP,cACA,SAoBe;AACf,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,IACE;AACJ,QAAM,qBAAqB;AAG3B,QAAM,iBAAiB,IAAIJ,MAAAA,MAA0B,MAAS;AAM9D,QAAM,kBAAkB,MAA+B;AAErD,UAAM,SAAS,eAAe,SAAS;AAEvC,WAAO;AAAA,MACL,UAAU,aAAa,QAAQ,QAC3B,CAAC,QAAQ,aAAa,OAAO,KAAK,IAClC;AAAA,IAAA;AAAA,EAER;AAEA,MAAI;AAEJ,SAAO;AAAA,IACL,MAAM,CAAC,WAAiD;AACtD,YAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,UAAU,eAAe;AAGlE,YAAM,kBAAkB,IAAI,gBAAA;AAE5B,UAAI,aAAa,QAAQ;AACvB,qBAAa,OAAO;AAAA,UAClB;AAAA,UACA,MAAM;AACJ,4BAAgB,MAAA;AAAA,UAClB;AAAA,UACA;AAAA,YACE,MAAM;AAAA,UAAA;AAAA,QACR;AAEF,YAAI,aAAa,OAAO,SAAS;AAC/B,0BAAgB,MAAA;AAAA,QAClB;AAAA,MACF;AAGA,sBAAgB,OAAO,iBAAiB,SAAS,MAAM;AACrD,uBAAe,SAAS,CAAC,YAAY;AACnC,kBAAQ,QAAQ,CAAC,UAAU;AACzB,yBAAa,MAAM,SAAS;AAC5B,kBAAM,OAAO,IAAIK,OAAAA,oBAAoB;AAAA,UACvC,CAAC;AACD,qCAAW,IAAA;AAAA,QACb,CAAC;AAAA,MACH,CAAC;AAED,YAAM,SAAS,IAAIC,mBAAY;AAAA,QAC7B,GAAG;AAAA,QACH,QAAQ,gBAAgB;AAAA,QACxB,SAAS,CAAC,gBAAgB;AAMxB,oBAAA;AAEA,cAAI,aAAa,SAAS;AACxB,mBAAO,aAAa,QAAQ,WAAW;AAAA,UACzC,OAAO;AACL,oBAAQ;AAAA,cACN,+CAA+C,WAAW,EAAE;AAAA;AAAA;AAAA,cAG5D;AAAA,YAAA;AAAA,UAEJ;AAEA;AAAA,QACF;AAAA,MAAA,CACD;AACD,UAAI,qBAAqB;AACzB,YAAM,+BAAe,IAAA;AACrB,YAAM,eAAwC,CAAA;AAE9C,0BAAoB,OAAO,UAAU,CAAC,aAAgC;AACpE,YAAI,cAAc;AAElB,mBAAW,WAAW,UAAU;AAE9B,cAAIC,OAAAA,gBAAgB,OAAO,GAAG;AAC5B,iCAAqB,SAAS,CAAC,kBAAkB;AAC/C,oBAAM,YAAY,CAAC,GAAG,eAAe,OAAO;AAE5C,kBAAI,UAAU,SAAS,oBAAoB;AACzC,0BAAU,OAAO,GAAG,UAAU,SAAS,kBAAkB;AAAA,cAC3D;AACA,qBAAO;AAAA,YACT,CAAC;AAAA,UACH;AAGA,cAAI,SAAS,OAAO,GAAG;AACrB,oBAAQ,QAAQ,OAAO,QAAQ,CAAC,SAAS,SAAS,IAAI,IAAI,CAAC;AAAA,UAC7D;AAIA,gBAAM,kBAAiC,CAAA;AACvC,yBAAe,MAAM,QAAQ,CAAC,OAAO,YAAY;AAC/C,gBAAI,CAAC,MAAM,SAAS;AAClB,kBAAI;AACF,sBAAM,QAAQ,OAAO;AAAA,cACvB,SAAS,KAAK;AAEZ,6BAAa,MAAM,SAAS;AAC5B,sBAAM;AAAA,kBACJ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,gBAAA;AAEpD,gCAAgB,KAAK,OAAO;AAC5B,sBAAM,qBAAqB,GAAG;AAAA,cAChC;AAAA,YACF;AAAA,UACF,CAAC;AAGD,+BAAqB,eAAe;AAEpC,cAAIA,OAAAA,gBAAgB,OAAO,GAAG;AAE5B,kBAAM,SAAS,QAAQ,QAAQ;AAC/B,gBAAI,UAAU,OAAO,WAAW,UAAU;AAExC,6BAAe,SAAS,MAAM,MAAM;AAAA,YACtC;AAEA,gBAAI,CAAC,oBAAoB;AACvB,oBAAA;AACA,mCAAqB;AAAA,YACvB;AAEA,kBAAM;AAAA,cACJ,MAAM,QAAQ,QAAQ;AAAA,cACtB,OAAO,QAAQ;AAAA;AAAA,cAEf,UAAU;AAAA,gBACR,GAAG,QAAQ;AAAA,cAAA;AAAA,YACb,CACD;AAAA,UACH,WAAW,qBAAqB,OAAO,GAAG;AACxC,yBAAa,KAAK,qBAAqB,OAAO,CAAC;AAAA,UACjD,WAAW,kBAAkB,OAAO,GAAG;AACrC,0BAAc;AAAA,UAChB,WAAW,qBAAqB,OAAO,GAAG;AACxC;AAAA,cACE,GAAG,eAAe,IAAI,YAAY,OAAO,EAAE;AAAA,YAAA;AAI7C,gBAAI,CAAC,oBAAoB;AACvB,oBAAA;AACA,mCAAqB;AAAA,YACvB;AAEA,qBAAA;AAGA,0BAAc;AAAA,UAChB;AAAA,QACF;AAEA,YAAI,aAAa;AAEf,+BAAqB,SAAS,MAAM,EAAE;AAGtC,cAAI,oBAAoB;AACtB,mBAAA;AACA,iCAAqB;AAAA,UACvB;AAGA,oBAAA;AAGA,oBAAU,SAAS,CAAC,iBAAiB;AACnC,kBAAM,aAAa,IAAI,IAAU,YAAY;AAC7C,gBAAI,SAAS,OAAO,GAAG;AACrB;AAAA,gBACE,GAAG,eAAe,IAAI,YAAY,OAAO,EAAE;AAAA,gBAC3C,MAAM,KAAK,QAAQ;AAAA,cAAA;AAAA,YAEvB;AACA,qBAAS,QAAQ,CAAC,SAAS,WAAW,IAAI,IAAI,CAAC;AAC/C,qBAAS,MAAA;AACT,mBAAO;AAAA,UACT,CAAC;AAGD,wBAAc,SAAS,CAAC,qBAAqB;AAC3C,kBAAM,OAAO,CAAC,GAAG,kBAAkB,GAAG,YAAY;AAClD,yBAAa;AAAA,cAAQ,CAAC,aACpB;AAAA,gBACE,GAAG,eAAe,IAAI,YAAY,OAAO,EAAE;AAAA,gBAC3C;AAAA,cAAA;AAAA,YACF;AAEF,yBAAa,SAAS;AACtB,mBAAO;AAAA,UACT,CAAC;AAGD,uCAAA;AAAA,QACF;AAAA,MACF,CAAC;AAGD,aAAO,MAAM;AAEX,0BAAA;AAEA,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA;AAAA,EAAA;AAEJ;;;;;;;;;;"}
@@ -1,33 +1,143 @@
1
- import { BaseCollectionConfig, CollectionConfig, Fn, UtilsRecord } from '@tanstack/db';
1
+ import { BaseCollectionConfig, CollectionConfig, DeleteMutationFnParams, InsertMutationFnParams, UpdateMutationFnParams, UtilsRecord } from '@tanstack/db';
2
2
  import { StandardSchemaV1 } from '@standard-schema/spec';
3
- import { GetExtensions, Row, ShapeStreamOptions } from '@electric-sql/client';
3
+ import { GetExtensions, Message, Row, ShapeStreamOptions } from '@electric-sql/client';
4
+ export { isChangeMessage, isControlMessage } from '@electric-sql/client';
4
5
  /**
5
6
  * Type representing a transaction ID in ElectricSQL
6
7
  */
7
8
  export type Txid = number;
9
+ /**
10
+ * Custom match function type - receives stream messages and returns boolean
11
+ * indicating if the mutation has been synchronized
12
+ */
13
+ export type MatchFunction<T extends Row<unknown>> = (message: Message<T>) => boolean;
14
+ /**
15
+ * Matching strategies for Electric synchronization
16
+ * Handlers can return:
17
+ * - Txid strategy: { txid: number | number[] } (recommended)
18
+ * - Void (no return value) - mutation completes without waiting
19
+ */
20
+ export type MatchingStrategy = {
21
+ txid: Txid | Array<Txid>;
22
+ } | void;
8
23
  type InferSchemaOutput<T> = T extends StandardSchemaV1 ? StandardSchemaV1.InferOutput<T> extends Row<unknown> ? StandardSchemaV1.InferOutput<T> : Record<string, unknown> : Record<string, unknown>;
9
24
  /**
10
25
  * Configuration interface for Electric collection options
11
26
  * @template T - The type of items in the collection
12
27
  * @template TSchema - The schema type for validation
13
28
  */
14
- export interface ElectricCollectionConfig<T extends Row<unknown> = Row<unknown>, TSchema extends StandardSchemaV1 = never> extends BaseCollectionConfig<T, string | number, TSchema, Record<string, Fn>, {
15
- txid: Txid | Array<Txid>;
16
- }> {
29
+ export interface ElectricCollectionConfig<T extends Row<unknown> = Row<unknown>, TSchema extends StandardSchemaV1 = never> extends Omit<BaseCollectionConfig<T, string | number, TSchema, UtilsRecord, any>, `onInsert` | `onUpdate` | `onDelete`> {
17
30
  /**
18
31
  * Configuration options for the ElectricSQL ShapeStream
19
32
  */
20
33
  shapeOptions: ShapeStreamOptions<GetExtensions<T>>;
34
+ /**
35
+ * Optional asynchronous handler function called before an insert operation
36
+ * @param params Object containing transaction and collection information
37
+ * @returns Promise resolving to { txid } or void
38
+ * @example
39
+ * // Basic Electric insert handler with txid (recommended)
40
+ * onInsert: async ({ transaction }) => {
41
+ * const newItem = transaction.mutations[0].modified
42
+ * const result = await api.todos.create({
43
+ * data: newItem
44
+ * })
45
+ * return { txid: result.txid }
46
+ * }
47
+ *
48
+ * @example
49
+ * // Insert handler with multiple items - return array of txids
50
+ * onInsert: async ({ transaction }) => {
51
+ * const items = transaction.mutations.map(m => m.modified)
52
+ * const results = await Promise.all(
53
+ * items.map(item => api.todos.create({ data: item }))
54
+ * )
55
+ * return { txid: results.map(r => r.txid) }
56
+ * }
57
+ *
58
+ * @example
59
+ * // Use awaitMatch utility for custom matching
60
+ * onInsert: async ({ transaction, collection }) => {
61
+ * const newItem = transaction.mutations[0].modified
62
+ * await api.todos.create({ data: newItem })
63
+ * await collection.utils.awaitMatch(
64
+ * (message) => isChangeMessage(message) &&
65
+ * message.headers.operation === 'insert' &&
66
+ * message.value.name === newItem.name
67
+ * )
68
+ * }
69
+ */
70
+ onInsert?: (params: InsertMutationFnParams<T>) => Promise<MatchingStrategy>;
71
+ /**
72
+ * Optional asynchronous handler function called before an update operation
73
+ * @param params Object containing transaction and collection information
74
+ * @returns Promise resolving to { txid } or void
75
+ * @example
76
+ * // Basic Electric update handler with txid (recommended)
77
+ * onUpdate: async ({ transaction }) => {
78
+ * const { original, changes } = transaction.mutations[0]
79
+ * const result = await api.todos.update({
80
+ * where: { id: original.id },
81
+ * data: changes
82
+ * })
83
+ * return { txid: result.txid }
84
+ * }
85
+ *
86
+ * @example
87
+ * // Use awaitMatch utility for custom matching
88
+ * onUpdate: async ({ transaction, collection }) => {
89
+ * const { original, changes } = transaction.mutations[0]
90
+ * await api.todos.update({ where: { id: original.id }, data: changes })
91
+ * await collection.utils.awaitMatch(
92
+ * (message) => isChangeMessage(message) &&
93
+ * message.headers.operation === 'update' &&
94
+ * message.value.id === original.id
95
+ * )
96
+ * }
97
+ */
98
+ onUpdate?: (params: UpdateMutationFnParams<T>) => Promise<MatchingStrategy>;
99
+ /**
100
+ * Optional asynchronous handler function called before a delete operation
101
+ * @param params Object containing transaction and collection information
102
+ * @returns Promise resolving to { txid } or void
103
+ * @example
104
+ * // Basic Electric delete handler with txid (recommended)
105
+ * onDelete: async ({ transaction }) => {
106
+ * const mutation = transaction.mutations[0]
107
+ * const result = await api.todos.delete({
108
+ * id: mutation.original.id
109
+ * })
110
+ * return { txid: result.txid }
111
+ * }
112
+ *
113
+ * @example
114
+ * // Use awaitMatch utility for custom matching
115
+ * onDelete: async ({ transaction, collection }) => {
116
+ * const mutation = transaction.mutations[0]
117
+ * await api.todos.delete({ id: mutation.original.id })
118
+ * await collection.utils.awaitMatch(
119
+ * (message) => isChangeMessage(message) &&
120
+ * message.headers.operation === 'delete' &&
121
+ * message.value.id === mutation.original.id
122
+ * )
123
+ * }
124
+ */
125
+ onDelete?: (params: DeleteMutationFnParams<T>) => Promise<MatchingStrategy>;
21
126
  }
22
127
  /**
23
128
  * Type for the awaitTxId utility function
24
129
  */
25
130
  export type AwaitTxIdFn = (txId: Txid, timeout?: number) => Promise<boolean>;
131
+ /**
132
+ * Type for the awaitMatch utility function
133
+ */
134
+ export type AwaitMatchFn<T extends Row<unknown>> = (matchFn: MatchFunction<T>, timeout?: number) => Promise<boolean>;
26
135
  /**
27
136
  * Electric collection utilities type
28
137
  */
29
- export interface ElectricCollectionUtils extends UtilsRecord {
138
+ export interface ElectricCollectionUtils<T extends Row<unknown> = Row<unknown>> extends UtilsRecord {
30
139
  awaitTxId: AwaitTxIdFn;
140
+ awaitMatch: AwaitMatchFn<T>;
31
141
  }
32
142
  /**
33
143
  * Creates Electric collection options for use with a standard Collection
@@ -52,4 +162,3 @@ export declare function electricCollectionOptions<T extends Row<unknown>>(config
52
162
  utils: ElectricCollectionUtils;
53
163
  schema?: never;
54
164
  };
55
- export {};