@tanstack/electric-db-collection 0.1.44 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,151 @@
1
+ import { serialize } from "./pg-serializer.js";
2
+ function compileSQL(options) {
3
+ const { where, orderBy, limit } = options;
4
+ const params = [];
5
+ const compiledSQL = { params };
6
+ if (where) {
7
+ compiledSQL.where = compileBasicExpression(where, params);
8
+ }
9
+ if (orderBy) {
10
+ compiledSQL.orderBy = compileOrderBy(orderBy, params);
11
+ }
12
+ if (limit) {
13
+ compiledSQL.limit = limit;
14
+ }
15
+ if (!where) {
16
+ compiledSQL.where = `true = true`;
17
+ }
18
+ const paramsRecord = params.reduce(
19
+ (acc, param, index) => {
20
+ const serialized = serialize(param);
21
+ if (serialized !== ``) {
22
+ acc[`${index + 1}`] = serialized;
23
+ }
24
+ return acc;
25
+ },
26
+ {}
27
+ );
28
+ return {
29
+ ...compiledSQL,
30
+ params: paramsRecord
31
+ };
32
+ }
33
+ function quoteIdentifier(name) {
34
+ return `"${name}"`;
35
+ }
36
+ function compileBasicExpression(exp, params) {
37
+ switch (exp.type) {
38
+ case `val`:
39
+ params.push(exp.value);
40
+ return `$${params.length}`;
41
+ case `ref`:
42
+ if (exp.path.length !== 1) {
43
+ throw new Error(
44
+ `Compiler can't handle nested properties: ${exp.path.join(`.`)}`
45
+ );
46
+ }
47
+ return quoteIdentifier(exp.path[0]);
48
+ case `func`:
49
+ return compileFunction(exp, params);
50
+ default:
51
+ throw new Error(`Unknown expression type`);
52
+ }
53
+ }
54
+ function compileOrderBy(orderBy, params) {
55
+ const compiledOrderByClauses = orderBy.map(
56
+ (clause) => compileOrderByClause(clause, params)
57
+ );
58
+ return compiledOrderByClauses.join(`,`);
59
+ }
60
+ function compileOrderByClause(clause, params) {
61
+ const { expression, compareOptions } = clause;
62
+ let sql = compileBasicExpression(expression, params);
63
+ if (compareOptions.direction === `desc`) {
64
+ sql = `${sql} DESC`;
65
+ }
66
+ if (compareOptions.nulls === `first`) {
67
+ sql = `${sql} NULLS FIRST`;
68
+ }
69
+ if (compareOptions.nulls === `last`) {
70
+ sql = `${sql} NULLS LAST`;
71
+ }
72
+ return sql;
73
+ }
74
+ function compileFunction(exp, params = []) {
75
+ const { name, args } = exp;
76
+ const opName = getOpName(name);
77
+ const compiledArgs = args.map(
78
+ (arg) => compileBasicExpression(arg, params)
79
+ );
80
+ if (name === `isNull` || name === `isUndefined`) {
81
+ if (compiledArgs.length !== 1) {
82
+ throw new Error(`${name} expects 1 argument`);
83
+ }
84
+ return `${compiledArgs[0]} ${opName}`;
85
+ }
86
+ if (name === `not`) {
87
+ if (compiledArgs.length !== 1) {
88
+ throw new Error(`NOT expects 1 argument`);
89
+ }
90
+ const arg = args[0];
91
+ if (arg && arg.type === `func`) {
92
+ const funcArg = arg;
93
+ if (funcArg.name === `isNull` || funcArg.name === `isUndefined`) {
94
+ const innerArg = compileBasicExpression(funcArg.args[0], params);
95
+ return `${innerArg} IS NOT NULL`;
96
+ }
97
+ }
98
+ return `${opName} (${compiledArgs[0]})`;
99
+ }
100
+ if (isBinaryOp(name)) {
101
+ if ((name === `and` || name === `or`) && compiledArgs.length > 2) {
102
+ return compiledArgs.map((arg) => `(${arg})`).join(` ${opName} `);
103
+ }
104
+ if (compiledArgs.length !== 2) {
105
+ throw new Error(`Binary operator ${name} expects 2 arguments`);
106
+ }
107
+ const [lhs, rhs] = compiledArgs;
108
+ if (name === `in`) {
109
+ return `${lhs} ${opName}(${rhs})`;
110
+ }
111
+ return `${lhs} ${opName} ${rhs}`;
112
+ }
113
+ return `${opName}(${compiledArgs.join(`,`)})`;
114
+ }
115
+ function isBinaryOp(name) {
116
+ const binaryOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`, `in`];
117
+ return binaryOps.includes(name);
118
+ }
119
+ function getOpName(name) {
120
+ const opNames = {
121
+ eq: `=`,
122
+ gt: `>`,
123
+ gte: `>=`,
124
+ lt: `<`,
125
+ lte: `<=`,
126
+ add: `+`,
127
+ and: `AND`,
128
+ or: `OR`,
129
+ not: `NOT`,
130
+ isUndefined: `IS NULL`,
131
+ isNull: `IS NULL`,
132
+ in: `= ANY`,
133
+ // Use = ANY syntax for array parameters
134
+ like: `LIKE`,
135
+ ilike: `ILIKE`,
136
+ upper: `UPPER`,
137
+ lower: `LOWER`,
138
+ length: `LENGTH`,
139
+ concat: `CONCAT`,
140
+ coalesce: `COALESCE`
141
+ };
142
+ const opName = opNames[name];
143
+ if (!opName) {
144
+ throw new Error(`Unknown operator/function: ${name}`);
145
+ }
146
+ return opName;
147
+ }
148
+ export {
149
+ compileSQL
150
+ };
151
+ //# sourceMappingURL=sql-compiler.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sql-compiler.js","sources":["../../src/sql-compiler.ts"],"sourcesContent":["import { serialize } from \"./pg-serializer\"\nimport type { SubsetParams } from \"@electric-sql/client\"\nimport type { IR, LoadSubsetOptions } from \"@tanstack/db\"\n\nexport type CompiledSqlRecord = Omit<SubsetParams, `params`> & {\n params?: Array<unknown>\n}\n\nexport function compileSQL<T>(options: LoadSubsetOptions): SubsetParams {\n const { where, orderBy, limit } = options\n\n const params: Array<T> = []\n const compiledSQL: CompiledSqlRecord = { params }\n\n if (where) {\n // TODO: this only works when the where expression's PropRefs directly reference a column of the collection\n // doesn't work if it goes through aliases because then we need to know the entire query to be able to follow the reference until the base collection (cf. followRef function)\n compiledSQL.where = compileBasicExpression(where, params)\n }\n\n if (orderBy) {\n compiledSQL.orderBy = compileOrderBy(orderBy, params)\n }\n\n if (limit) {\n compiledSQL.limit = limit\n }\n\n // WORKAROUND for Electric bug: Empty subset requests don't load data\n // Add dummy \"true = true\" predicate when there's no where clause\n // This is always true so doesn't filter data, just tricks Electric into loading\n if (!where) {\n compiledSQL.where = `true = true`\n }\n\n // Serialize the values in the params array into PG formatted strings\n // and transform the array into a Record<string, string>\n const paramsRecord = params.reduce(\n (acc, param, index) => {\n const serialized = serialize(param)\n // Only include non-empty values in params\n // Empty strings from null/undefined should be omitted\n if (serialized !== ``) {\n acc[`${index + 1}`] = serialized\n }\n return acc\n },\n {} as Record<string, string>\n )\n\n return {\n ...compiledSQL,\n params: paramsRecord,\n }\n}\n\n/**\n * Quote PostgreSQL identifiers to handle mixed case column names correctly.\n * Electric/Postgres requires quotes for case-sensitive identifiers.\n * @param name - The identifier to quote\n * @returns The quoted identifier\n */\nfunction quoteIdentifier(name: string): string {\n return `\"${name}\"`\n}\n\n/**\n * Compiles the expression to a SQL string and mutates the params array with the values.\n * @param exp - The expression to compile\n * @param params - The params array\n * @returns The compiled SQL string\n */\nfunction compileBasicExpression(\n exp: IR.BasicExpression<unknown>,\n params: Array<unknown>\n): string {\n switch (exp.type) {\n case `val`:\n params.push(exp.value)\n return `$${params.length}`\n case `ref`:\n // TODO: doesn't yet support JSON(B) values which could be accessed with nested props\n if (exp.path.length !== 1) {\n throw new Error(\n `Compiler can't handle nested properties: ${exp.path.join(`.`)}`\n )\n }\n return quoteIdentifier(exp.path[0]!)\n case `func`:\n return compileFunction(exp, params)\n default:\n throw new Error(`Unknown expression type`)\n }\n}\n\nfunction compileOrderBy(orderBy: IR.OrderBy, params: Array<unknown>): string {\n const compiledOrderByClauses = orderBy.map((clause: IR.OrderByClause) =>\n compileOrderByClause(clause, params)\n )\n return compiledOrderByClauses.join(`,`)\n}\n\nfunction compileOrderByClause(\n clause: IR.OrderByClause,\n params: Array<unknown>\n): string {\n // FIXME: We should handle stringSort and locale.\n // Correctly supporting them is tricky as it depends on Postgres' collation\n const { expression, compareOptions } = clause\n let sql = compileBasicExpression(expression, params)\n\n if (compareOptions.direction === `desc`) {\n sql = `${sql} DESC`\n }\n\n if (compareOptions.nulls === `first`) {\n sql = `${sql} NULLS FIRST`\n }\n\n if (compareOptions.nulls === `last`) {\n sql = `${sql} NULLS LAST`\n }\n\n return sql\n}\n\nfunction compileFunction(\n exp: IR.Func<unknown>,\n params: Array<unknown> = []\n): string {\n const { name, args } = exp\n\n const opName = getOpName(name)\n\n const compiledArgs = args.map((arg: IR.BasicExpression) =>\n compileBasicExpression(arg, params)\n )\n\n // Special case for IS NULL / IS NOT NULL - these are postfix operators\n if (name === `isNull` || name === `isUndefined`) {\n if (compiledArgs.length !== 1) {\n throw new Error(`${name} expects 1 argument`)\n }\n return `${compiledArgs[0]} ${opName}`\n }\n\n // Special case for NOT - unary prefix operator\n if (name === `not`) {\n if (compiledArgs.length !== 1) {\n throw new Error(`NOT expects 1 argument`)\n }\n // Check if the argument is IS NULL to generate IS NOT NULL\n const arg = args[0]\n if (arg && arg.type === `func`) {\n const funcArg = arg\n if (funcArg.name === `isNull` || funcArg.name === `isUndefined`) {\n const innerArg = compileBasicExpression(funcArg.args[0]!, params)\n return `${innerArg} IS NOT NULL`\n }\n }\n return `${opName} (${compiledArgs[0]})`\n }\n\n if (isBinaryOp(name)) {\n // Special handling for AND/OR which can be variadic\n if ((name === `and` || name === `or`) && compiledArgs.length > 2) {\n // Chain multiple arguments: (a AND b AND c) or (a OR b OR c)\n return compiledArgs.map((arg) => `(${arg})`).join(` ${opName} `)\n }\n\n if (compiledArgs.length !== 2) {\n throw new Error(`Binary operator ${name} expects 2 arguments`)\n }\n const [lhs, rhs] = compiledArgs\n // Special case for = ANY operator which needs parentheses around the array parameter\n if (name === `in`) {\n return `${lhs} ${opName}(${rhs})`\n }\n return `${lhs} ${opName} ${rhs}`\n }\n\n return `${opName}(${compiledArgs.join(`,`)})`\n}\n\nfunction isBinaryOp(name: string): boolean {\n const binaryOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`, `in`]\n return binaryOps.includes(name)\n}\n\nfunction getOpName(name: string): string {\n const opNames = {\n eq: `=`,\n gt: `>`,\n gte: `>=`,\n lt: `<`,\n lte: `<=`,\n add: `+`,\n and: `AND`,\n or: `OR`,\n not: `NOT`,\n isUndefined: `IS NULL`,\n isNull: `IS NULL`,\n in: `= ANY`, // Use = ANY syntax for array parameters\n like: `LIKE`,\n ilike: `ILIKE`,\n upper: `UPPER`,\n lower: `LOWER`,\n length: `LENGTH`,\n concat: `CONCAT`,\n coalesce: `COALESCE`,\n }\n\n const opName = opNames[name as keyof typeof opNames]\n\n if (!opName) {\n throw new Error(`Unknown operator/function: ${name}`)\n }\n\n return opName\n}\n"],"names":[],"mappings":";AAQO,SAAS,WAAc,SAA0C;AACtE,QAAM,EAAE,OAAO,SAAS,MAAA,IAAU;AAElC,QAAM,SAAmB,CAAA;AACzB,QAAM,cAAiC,EAAE,OAAA;AAEzC,MAAI,OAAO;AAGT,gBAAY,QAAQ,uBAAuB,OAAO,MAAM;AAAA,EAC1D;AAEA,MAAI,SAAS;AACX,gBAAY,UAAU,eAAe,SAAS,MAAM;AAAA,EACtD;AAEA,MAAI,OAAO;AACT,gBAAY,QAAQ;AAAA,EACtB;AAKA,MAAI,CAAC,OAAO;AACV,gBAAY,QAAQ;AAAA,EACtB;AAIA,QAAM,eAAe,OAAO;AAAA,IAC1B,CAAC,KAAK,OAAO,UAAU;AACrB,YAAM,aAAa,UAAU,KAAK;AAGlC,UAAI,eAAe,IAAI;AACrB,YAAI,GAAG,QAAQ,CAAC,EAAE,IAAI;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AAAA,IACA,CAAA;AAAA,EAAC;AAGH,SAAO;AAAA,IACL,GAAG;AAAA,IACH,QAAQ;AAAA,EAAA;AAEZ;AAQA,SAAS,gBAAgB,MAAsB;AAC7C,SAAO,IAAI,IAAI;AACjB;AAQA,SAAS,uBACP,KACA,QACQ;AACR,UAAQ,IAAI,MAAA;AAAA,IACV,KAAK;AACH,aAAO,KAAK,IAAI,KAAK;AACrB,aAAO,IAAI,OAAO,MAAM;AAAA,IAC1B,KAAK;AAEH,UAAI,IAAI,KAAK,WAAW,GAAG;AACzB,cAAM,IAAI;AAAA,UACR,4CAA4C,IAAI,KAAK,KAAK,GAAG,CAAC;AAAA,QAAA;AAAA,MAElE;AACA,aAAO,gBAAgB,IAAI,KAAK,CAAC,CAAE;AAAA,IACrC,KAAK;AACH,aAAO,gBAAgB,KAAK,MAAM;AAAA,IACpC;AACE,YAAM,IAAI,MAAM,yBAAyB;AAAA,EAAA;AAE/C;AAEA,SAAS,eAAe,SAAqB,QAAgC;AAC3E,QAAM,yBAAyB,QAAQ;AAAA,IAAI,CAAC,WAC1C,qBAAqB,QAAQ,MAAM;AAAA,EAAA;AAErC,SAAO,uBAAuB,KAAK,GAAG;AACxC;AAEA,SAAS,qBACP,QACA,QACQ;AAGR,QAAM,EAAE,YAAY,eAAA,IAAmB;AACvC,MAAI,MAAM,uBAAuB,YAAY,MAAM;AAEnD,MAAI,eAAe,cAAc,QAAQ;AACvC,UAAM,GAAG,GAAG;AAAA,EACd;AAEA,MAAI,eAAe,UAAU,SAAS;AACpC,UAAM,GAAG,GAAG;AAAA,EACd;AAEA,MAAI,eAAe,UAAU,QAAQ;AACnC,UAAM,GAAG,GAAG;AAAA,EACd;AAEA,SAAO;AACT;AAEA,SAAS,gBACP,KACA,SAAyB,IACjB;AACR,QAAM,EAAE,MAAM,KAAA,IAAS;AAEvB,QAAM,SAAS,UAAU,IAAI;AAE7B,QAAM,eAAe,KAAK;AAAA,IAAI,CAAC,QAC7B,uBAAuB,KAAK,MAAM;AAAA,EAAA;AAIpC,MAAI,SAAS,YAAY,SAAS,eAAe;AAC/C,QAAI,aAAa,WAAW,GAAG;AAC7B,YAAM,IAAI,MAAM,GAAG,IAAI,qBAAqB;AAAA,IAC9C;AACA,WAAO,GAAG,aAAa,CAAC,CAAC,IAAI,MAAM;AAAA,EACrC;AAGA,MAAI,SAAS,OAAO;AAClB,QAAI,aAAa,WAAW,GAAG;AAC7B,YAAM,IAAI,MAAM,wBAAwB;AAAA,IAC1C;AAEA,UAAM,MAAM,KAAK,CAAC;AAClB,QAAI,OAAO,IAAI,SAAS,QAAQ;AAC9B,YAAM,UAAU;AAChB,UAAI,QAAQ,SAAS,YAAY,QAAQ,SAAS,eAAe;AAC/D,cAAM,WAAW,uBAAuB,QAAQ,KAAK,CAAC,GAAI,MAAM;AAChE,eAAO,GAAG,QAAQ;AAAA,MACpB;AAAA,IACF;AACA,WAAO,GAAG,MAAM,KAAK,aAAa,CAAC,CAAC;AAAA,EACtC;AAEA,MAAI,WAAW,IAAI,GAAG;AAEpB,SAAK,SAAS,SAAS,SAAS,SAAS,aAAa,SAAS,GAAG;AAEhE,aAAO,aAAa,IAAI,CAAC,QAAQ,IAAI,GAAG,GAAG,EAAE,KAAK,IAAI,MAAM,GAAG;AAAA,IACjE;AAEA,QAAI,aAAa,WAAW,GAAG;AAC7B,YAAM,IAAI,MAAM,mBAAmB,IAAI,sBAAsB;AAAA,IAC/D;AACA,UAAM,CAAC,KAAK,GAAG,IAAI;AAEnB,QAAI,SAAS,MAAM;AACjB,aAAO,GAAG,GAAG,IAAI,MAAM,IAAI,GAAG;AAAA,IAChC;AACA,WAAO,GAAG,GAAG,IAAI,MAAM,IAAI,GAAG;AAAA,EAChC;AAEA,SAAO,GAAG,MAAM,IAAI,aAAa,KAAK,GAAG,CAAC;AAC5C;AAEA,SAAS,WAAW,MAAuB;AACzC,QAAM,YAAY,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,OAAO,MAAM,IAAI;AACpE,SAAO,UAAU,SAAS,IAAI;AAChC;AAEA,SAAS,UAAU,MAAsB;AACvC,QAAM,UAAU;AAAA,IACd,IAAI;AAAA,IACJ,IAAI;AAAA,IACJ,KAAK;AAAA,IACL,IAAI;AAAA,IACJ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,IAAI;AAAA,IACJ,KAAK;AAAA,IACL,aAAa;AAAA,IACb,QAAQ;AAAA,IACR,IAAI;AAAA;AAAA,IACJ,MAAM;AAAA,IACN,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,UAAU;AAAA,EAAA;AAGZ,QAAM,SAAS,QAAQ,IAA4B;AAEnD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,EACtD;AAEA,SAAO;AACT;"}
package/package.json CHANGED
@@ -1,17 +1,19 @@
1
1
  {
2
2
  "name": "@tanstack/electric-db-collection",
3
3
  "description": "ElectricSQL collection for TanStack DB",
4
- "version": "0.1.44",
4
+ "version": "0.2.1",
5
5
  "dependencies": {
6
- "@electric-sql/client": "^1.1.0",
6
+ "@electric-sql/client": "^1.1.4",
7
7
  "@standard-schema/spec": "^1.0.0",
8
8
  "@tanstack/store": "^0.8.0",
9
9
  "debug": "^4.4.3",
10
- "@tanstack/db": "0.4.20"
10
+ "@tanstack/db": "0.5.1"
11
11
  },
12
12
  "devDependencies": {
13
13
  "@types/debug": "^4.1.12",
14
- "@vitest/coverage-istanbul": "^3.2.4"
14
+ "@types/pg": "^8.15.6",
15
+ "@vitest/coverage-istanbul": "^3.2.4",
16
+ "pg": "^8.16.3"
15
17
  },
16
18
  "exports": {
17
19
  ".": {
@@ -53,6 +55,7 @@
53
55
  "build": "vite build",
54
56
  "dev": "vite build --watch",
55
57
  "lint": "eslint . --fix",
56
- "test": "npx vitest --run"
58
+ "test": "npx vitest run",
59
+ "test:e2e": "npx vitest run --config vitest.e2e.config.ts"
57
60
  }
58
61
  }
package/src/electric.ts CHANGED
@@ -6,18 +6,22 @@ import {
6
6
  } from "@electric-sql/client"
7
7
  import { Store } from "@tanstack/store"
8
8
  import DebugModule from "debug"
9
+ import { DeduplicatedLoadSubset } from "@tanstack/db"
9
10
  import {
10
11
  ExpectedNumberInAwaitTxIdError,
11
12
  StreamAbortedError,
12
13
  TimeoutWaitingForMatchError,
13
14
  TimeoutWaitingForTxIdError,
14
15
  } from "./errors"
16
+ import { compileSQL } from "./sql-compiler"
15
17
  import type {
16
18
  BaseCollectionConfig,
17
19
  CollectionConfig,
18
20
  DeleteMutationFnParams,
19
21
  InsertMutationFnParams,
22
+ LoadSubsetOptions,
20
23
  SyncConfig,
24
+ SyncMode,
21
25
  UpdateMutationFnParams,
22
26
  UtilsRecord,
23
27
  } from "@tanstack/db"
@@ -52,10 +56,16 @@ export type MatchFunction<T extends Row<unknown>> = (
52
56
  /**
53
57
  * Matching strategies for Electric synchronization
54
58
  * Handlers can return:
55
- * - Txid strategy: { txid: number | number[] } (recommended)
59
+ * - Txid strategy: { txid: number | number[], timeout?: number } (recommended)
56
60
  * - Void (no return value) - mutation completes without waiting
61
+ *
62
+ * The optional timeout property specifies how long to wait for the txid(s) in milliseconds.
63
+ * If not specified, defaults to 5000ms.
57
64
  */
58
- export type MatchingStrategy = { txid: Txid | Array<Txid> } | void
65
+ export type MatchingStrategy = {
66
+ txid: Txid | Array<Txid>
67
+ timeout?: number
68
+ } | void
59
69
 
60
70
  /**
61
71
  * Type representing a snapshot end message
@@ -72,6 +82,24 @@ type InferSchemaOutput<T> = T extends StandardSchemaV1
72
82
  : Record<string, unknown>
73
83
  : Record<string, unknown>
74
84
 
85
+ /**
86
+ * The mode of sync to use for the collection.
87
+ * @default `eager`
88
+ * @description
89
+ * - `eager`:
90
+ * - syncs all data immediately on preload
91
+ * - collection will be marked as ready once the sync is complete
92
+ * - there is no incremental sync
93
+ * - `on-demand`:
94
+ * - syncs data in incremental snapshots when the collection is queried
95
+ * - collection will be marked as ready immediately after the first snapshot is synced
96
+ * - `progressive`:
97
+ * - syncs all data for the collection in the background
98
+ * - uses incremental snapshots during the initial sync to provide a fast path to the data required for queries
99
+ * - collection will be marked as ready once the full sync is complete
100
+ */
101
+ export type ElectricSyncMode = SyncMode | `progressive`
102
+
75
103
  /**
76
104
  * Configuration interface for Electric collection options
77
105
  * @template T - The type of items in the collection
@@ -82,17 +110,18 @@ export interface ElectricCollectionConfig<
82
110
  TSchema extends StandardSchemaV1 = never,
83
111
  > extends Omit<
84
112
  BaseCollectionConfig<T, string | number, TSchema, UtilsRecord, any>,
85
- `onInsert` | `onUpdate` | `onDelete`
113
+ `onInsert` | `onUpdate` | `onDelete` | `syncMode`
86
114
  > {
87
115
  /**
88
116
  * Configuration options for the ElectricSQL ShapeStream
89
117
  */
90
118
  shapeOptions: ShapeStreamOptions<GetExtensions<T>>
119
+ syncMode?: ElectricSyncMode
91
120
 
92
121
  /**
93
122
  * Optional asynchronous handler function called before an insert operation
94
123
  * @param params Object containing transaction and collection information
95
- * @returns Promise resolving to { txid } or void
124
+ * @returns Promise resolving to { txid, timeout? } or void
96
125
  * @example
97
126
  * // Basic Electric insert handler with txid (recommended)
98
127
  * onInsert: async ({ transaction }) => {
@@ -104,6 +133,16 @@ export interface ElectricCollectionConfig<
104
133
  * }
105
134
  *
106
135
  * @example
136
+ * // Insert handler with custom timeout
137
+ * onInsert: async ({ transaction }) => {
138
+ * const newItem = transaction.mutations[0].modified
139
+ * const result = await api.todos.create({
140
+ * data: newItem
141
+ * })
142
+ * return { txid: result.txid, timeout: 10000 } // Wait up to 10 seconds
143
+ * }
144
+ *
145
+ * @example
107
146
  * // Insert handler with multiple items - return array of txids
108
147
  * onInsert: async ({ transaction }) => {
109
148
  * const items = transaction.mutations.map(m => m.modified)
@@ -130,7 +169,7 @@ export interface ElectricCollectionConfig<
130
169
  /**
131
170
  * Optional asynchronous handler function called before an update operation
132
171
  * @param params Object containing transaction and collection information
133
- * @returns Promise resolving to { txid } or void
172
+ * @returns Promise resolving to { txid, timeout? } or void
134
173
  * @example
135
174
  * // Basic Electric update handler with txid (recommended)
136
175
  * onUpdate: async ({ transaction }) => {
@@ -159,7 +198,7 @@ export interface ElectricCollectionConfig<
159
198
  /**
160
199
  * Optional asynchronous handler function called before a delete operation
161
200
  * @param params Object containing transaction and collection information
162
- * @returns Promise resolving to { txid } or void
201
+ * @returns Promise resolving to { txid, timeout? } or void
163
202
  * @example
164
203
  * // Basic Electric delete handler with txid (recommended)
165
204
  * onDelete: async ({ transaction }) => {
@@ -281,6 +320,9 @@ export function electricCollectionOptions(
281
320
  } {
282
321
  const seenTxids = new Store<Set<Txid>>(new Set([]))
283
322
  const seenSnapshots = new Store<Array<PostgresSnapshot>>([])
323
+ const internalSyncMode = config.syncMode ?? `eager`
324
+ const finalSyncMode =
325
+ internalSyncMode === `progressive` ? `on-demand` : internalSyncMode
284
326
  const pendingMatches = new Store<
285
327
  Map<
286
328
  string,
@@ -331,6 +373,7 @@ export function electricCollectionOptions(
331
373
  const sync = createElectricSync<any>(config.shapeOptions, {
332
374
  seenTxids,
333
375
  seenSnapshots,
376
+ syncMode: internalSyncMode,
334
377
  pendingMatches,
335
378
  currentBatchMessages,
336
379
  removePendingMatches,
@@ -504,11 +547,12 @@ export function electricCollectionOptions(
504
547
  ): Promise<void> => {
505
548
  // Only wait if result contains txid
506
549
  if (result && `txid` in result) {
550
+ const timeout = result.timeout
507
551
  // Handle both single txid and array of txids
508
552
  if (Array.isArray(result.txid)) {
509
- await Promise.all(result.txid.map(awaitTxId))
553
+ await Promise.all(result.txid.map((txid) => awaitTxId(txid, timeout)))
510
554
  } else {
511
- await awaitTxId(result.txid)
555
+ await awaitTxId(result.txid, timeout)
512
556
  }
513
557
  }
514
558
  // If result is void/undefined, don't wait - mutation completes immediately
@@ -550,6 +594,7 @@ export function electricCollectionOptions(
550
594
 
551
595
  return {
552
596
  ...restConfig,
597
+ syncMode: finalSyncMode,
553
598
  sync,
554
599
  onInsert: wrappedOnInsert,
555
600
  onUpdate: wrappedOnUpdate,
@@ -567,6 +612,7 @@ export function electricCollectionOptions(
567
612
  function createElectricSync<T extends Row<unknown>>(
568
613
  shapeOptions: ShapeStreamOptions<GetExtensions<T>>,
569
614
  options: {
615
+ syncMode: ElectricSyncMode
570
616
  seenTxids: Store<Set<Txid>>
571
617
  seenSnapshots: Store<Array<PostgresSnapshot>>
572
618
  pendingMatches: Store<
@@ -590,6 +636,7 @@ function createElectricSync<T extends Row<unknown>>(
590
636
  const {
591
637
  seenTxids,
592
638
  seenSnapshots,
639
+ syncMode,
593
640
  pendingMatches,
594
641
  currentBatchMessages,
595
642
  removePendingMatches,
@@ -653,6 +700,12 @@ function createElectricSync<T extends Row<unknown>>(
653
700
 
654
701
  const stream = new ShapeStream({
655
702
  ...shapeOptions,
703
+ // In on-demand mode, we only want to sync changes, so we set the log to `changes_only`
704
+ log: syncMode === `on-demand` ? `changes_only` : undefined,
705
+ // In on-demand mode, we only need the changes from the point of time the collection was created
706
+ // so we default to `now` when there is no saved offset.
707
+ offset:
708
+ shapeOptions.offset ?? (syncMode === `on-demand` ? `now` : undefined),
656
709
  signal: abortController.signal,
657
710
  onError: (errorParams) => {
658
711
  // Just immediately mark ready if there's an error to avoid blocking
@@ -679,9 +732,28 @@ function createElectricSync<T extends Row<unknown>>(
679
732
  let transactionStarted = false
680
733
  const newTxids = new Set<Txid>()
681
734
  const newSnapshots: Array<PostgresSnapshot> = []
735
+ let hasReceivedUpToDate = false // Track if we've completed initial sync in progressive mode
736
+
737
+ // Create deduplicated loadSubset wrapper for non-eager modes
738
+ // This prevents redundant snapshot requests when multiple concurrent
739
+ // live queries request overlapping or subset predicates
740
+ const loadSubsetDedupe =
741
+ syncMode === `eager`
742
+ ? null
743
+ : new DeduplicatedLoadSubset({
744
+ loadSubset: async (opts: LoadSubsetOptions) => {
745
+ // In progressive mode, stop requesting snapshots once full sync is complete
746
+ if (syncMode === `progressive` && hasReceivedUpToDate) {
747
+ return
748
+ }
749
+ const snapshotParams = compileSQL<T>(opts)
750
+ await stream.requestSnapshot(snapshotParams)
751
+ },
752
+ })
682
753
 
683
754
  unsubscribeStream = stream.subscribe((messages: Array<Message<T>>) => {
684
755
  let hasUpToDate = false
756
+ let hasSnapshotEnd = false
685
757
 
686
758
  for (const message of messages) {
687
759
  // Add message to current batch buffer (for race condition handling)
@@ -746,6 +818,7 @@ function createElectricSync<T extends Row<unknown>>(
746
818
  })
747
819
  } else if (isSnapshotEndMessage(message)) {
748
820
  newSnapshots.push(parseSnapshotMessage(message))
821
+ hasSnapshotEnd = true
749
822
  } else if (isUpToDateMessage(message)) {
750
823
  hasUpToDate = true
751
824
  } else if (isMustRefetchMessage(message)) {
@@ -761,12 +834,18 @@ function createElectricSync<T extends Row<unknown>>(
761
834
 
762
835
  truncate()
763
836
 
764
- // Reset hasUpToDate so we continue accumulating changes until next up-to-date
837
+ // Reset the loadSubset deduplication state since we're starting fresh
838
+ // This ensures that previously loaded predicates don't prevent refetching after truncate
839
+ loadSubsetDedupe?.reset()
840
+
841
+ // Reset flags so we continue accumulating changes until next up-to-date
765
842
  hasUpToDate = false
843
+ hasSnapshotEnd = false
844
+ hasReceivedUpToDate = false // Reset for progressive mode - we're starting a new sync
766
845
  }
767
846
  }
768
847
 
769
- if (hasUpToDate) {
848
+ if (hasUpToDate || hasSnapshotEnd) {
770
849
  // Clear the current batch buffer since we're now up-to-date
771
850
  currentBatchMessages.setState(() => [])
772
851
 
@@ -776,8 +855,15 @@ function createElectricSync<T extends Row<unknown>>(
776
855
  transactionStarted = false
777
856
  }
778
857
 
779
- // Mark the collection as ready now that sync is up to date
780
- markReady()
858
+ if (hasUpToDate || (hasSnapshotEnd && syncMode === `on-demand`)) {
859
+ // Mark the collection as ready now that sync is up to date
860
+ markReady()
861
+ }
862
+
863
+ // Track that we've received the first up-to-date for progressive mode
864
+ if (hasUpToDate) {
865
+ hasReceivedUpToDate = true
866
+ }
781
867
 
782
868
  // Always commit txids when we receive up-to-date, regardless of transaction state
783
869
  seenTxids.setState((currentTxids) => {
@@ -811,12 +897,18 @@ function createElectricSync<T extends Row<unknown>>(
811
897
  }
812
898
  })
813
899
 
814
- // Return the unsubscribe function
815
- return () => {
816
- // Unsubscribe from the stream
817
- unsubscribeStream()
818
- // Abort the abort controller to stop the stream
819
- abortController.abort()
900
+ // Return the deduplicated loadSubset if available (on-demand or progressive mode)
901
+ // The loadSubset method is auto-bound, so it can be safely returned directly
902
+ return {
903
+ loadSubset: loadSubsetDedupe?.loadSubset,
904
+ cleanup: () => {
905
+ // Unsubscribe from the stream
906
+ unsubscribeStream()
907
+ // Abort the abort controller to stop the stream
908
+ abortController.abort()
909
+ // Reset deduplication tracking so collection can load fresh data if restarted
910
+ loadSubsetDedupe?.reset()
911
+ },
820
912
  }
821
913
  },
822
914
  // Expose the getSyncMetadata function
@@ -0,0 +1,58 @@
1
+ /**
2
+ * Serialize values for Electric SQL subset parameters.
3
+ *
4
+ * IMPORTANT: Electric expects RAW values, NOT SQL-formatted literals.
5
+ * Electric handles all type casting and escaping on the server side.
6
+ * The params Record<string, string> contains the actual values as strings,
7
+ * and Electric will parse/cast them based on the column type in the WHERE clause.
8
+ *
9
+ * @param value - The value to serialize
10
+ * @returns The raw value as a string (no SQL formatting/quoting)
11
+ */
12
+ export function serialize(value: unknown): string {
13
+ // Handle null/undefined - return empty string
14
+ // Electric interprets empty string as NULL in typed column context
15
+ if (value === null || value === undefined) {
16
+ return ``
17
+ }
18
+
19
+ // Handle strings - return as-is (NO quotes, Electric handles escaping)
20
+ if (typeof value === `string`) {
21
+ return value
22
+ }
23
+
24
+ // Handle numbers - convert to string
25
+ if (typeof value === `number`) {
26
+ return value.toString()
27
+ }
28
+
29
+ // Handle booleans - return as lowercase string
30
+ if (typeof value === `boolean`) {
31
+ return value ? `true` : `false`
32
+ }
33
+
34
+ // Handle dates - return ISO format (NO quotes)
35
+ if (value instanceof Date) {
36
+ return value.toISOString()
37
+ }
38
+
39
+ // Handle arrays - for = ANY() operator, serialize as Postgres array literal
40
+ // Format: {val1,val2,val3} with proper escaping
41
+ if (Array.isArray(value)) {
42
+ // Postgres array literal format uses curly braces
43
+ const elements = value.map((item) => {
44
+ if (item === null || item === undefined) {
45
+ return `NULL`
46
+ }
47
+ if (typeof item === `string`) {
48
+ // Escape quotes and backslashes for Postgres array literals
49
+ const escaped = item.replace(/\\/g, `\\\\`).replace(/"/g, `\\"`)
50
+ return `"${escaped}"`
51
+ }
52
+ return serialize(item)
53
+ })
54
+ return `{${elements.join(`,`)}}`
55
+ }
56
+
57
+ throw new Error(`Cannot serialize value: ${JSON.stringify(value)}`)
58
+ }