@sync-subscribe/server 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 fromkeith
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,249 @@
1
+ # @sync-subscribe/server
2
+
3
+ Framework-agnostic sync server for `sync-subscribe`. Wire `SyncHandler` into any HTTP framework (Express, Hono, Fastify, …) to expose pull, push, and SSE streaming endpoints.
4
+
5
+ ## Design
6
+
7
+ The server is **stateless with respect to subscriptions**. Clients send their filters and sync tokens directly in every pull/stream request — the server never stores subscription state. This means:
8
+
9
+ - No subscription table in your database
10
+ - No risk of client/server subscription desync
11
+ - The server simply applies its own mandatory filter additions (e.g. `userId`) at request time
12
+
13
+ ## Concepts
14
+
15
+ | Term | Description |
16
+ |---|---|
17
+ | `SyncRecord` | Every synced record must have `recordId`, `createdAt`, `updatedAt`, `revisionCount` |
18
+ | `SyncStore` | Your storage adapter — implement `getRecordsSince`, `upsert`, `getById` |
19
+ | `SyncHandler` | Orchestrates pull / push logic; decoupled from HTTP |
20
+ | `SyncSubscriptionRequest` | One entry in a pull/stream request: `{ key, filter, syncToken }` |
21
+ | Server filter additions | Fields the server merges into every client filter (e.g. `userId`) — invisible to the client |
22
+
23
+ ## Installation
24
+
25
+ ```bash
26
+ npm install @sync-subscribe/server @sync-subscribe/core
27
+ ```
28
+
29
+ ## Quick start
30
+
31
+ ### 1. Define your record type
32
+
33
+ ```ts
34
+ import type { SyncRecord } from "@sync-subscribe/core";
35
+
36
+ interface NoteRecord extends SyncRecord {
37
+ userId: string;
38
+ title: string;
39
+ contents: string;
40
+ isDeleted: boolean;
41
+ }
42
+ ```
43
+
44
+ ### 2. Implement `SyncStore`
45
+
46
+ ```ts
47
+ import type { SyncStore } from "@sync-subscribe/server";
48
+ import type { SyncPatch, SyncToken, SubscriptionFilter } from "@sync-subscribe/core";
49
+ import { decodeSyncToken } from "@sync-subscribe/core";
50
+
51
+ class NotesStore implements SyncStore<NoteRecord> {
52
+ async getRecordsSince(
53
+ subscriptions: { filter: SubscriptionFilter; since: SyncToken }[]
54
+ ): Promise<SyncPatch<NoteRecord>[]> {
55
+ // Query your DB using a union of all subscription filters,
56
+ // each scoped to its own since-token.
57
+ // Results must be ordered by (updatedAt ASC, revisionCount ASC, recordId ASC).
58
+ // SyncHandler deduplicates records that match multiple subscriptions.
59
+ }
60
+
61
+ async upsert(record: NoteRecord): Promise<NoteRecord> {
62
+ // INSERT OR REPLACE / ON CONFLICT DO UPDATE
63
+ return record;
64
+ }
65
+
66
+ async getById(recordId: string): Promise<NoteRecord | null> {
67
+ // Return record or null
68
+ }
69
+ }
70
+ ```
71
+
72
+ ### 3. Wire up routes
73
+
74
+ ```ts
75
+ import { SyncHandler } from "@sync-subscribe/server";
76
+ import type { SyncSubscriptionRequest } from "@sync-subscribe/server";
77
+ import type { SubscriptionFilter } from "@sync-subscribe/core";
78
+
79
+ const store = new NotesStore();
80
+ const handler = new SyncHandler<NoteRecord>(store, {
81
+ readonlyFields: ["createdAt"], // clients cannot overwrite these
82
+ onRecordsChanged: (records) => { /* notify SSE clients */ },
83
+ });
84
+
85
+ // POST /sync/pull — pull patches for all requested subscriptions
86
+ app.post("/sync/pull", async (req, res) => {
87
+ const { subscriptions } = req.body as { subscriptions: SyncSubscriptionRequest[] };
88
+
89
+ // Merge server-enforced fields into each subscription's filter.
90
+ // The client never sees these additions.
91
+ const merged = subscriptions.map((s) => ({
92
+ ...s,
93
+ filter: { ...s.filter, userId: req.user.id } as SubscriptionFilter,
94
+ }));
95
+
96
+ const result = await handler.pull(merged);
97
+ res.json(result); // { patches, syncTokens }
98
+ });
99
+
100
+ // POST /sync/push — push records from client
101
+ app.post("/sync/push", async (req, res) => {
102
+ const { records } = req.body;
103
+ // Inject server-authoritative fields before processing
104
+ const sanitized = records.map((r) => ({ ...r, userId: req.user.id }));
105
+ const result = await handler.push({ records: sanitized });
106
+ res.json(result); // { ok: true, serverUpdatedAt } or { conflict: true, serverRecord }
107
+ });
108
+ ```
109
+
110
+ ## SSE streaming
111
+
112
+ The client opens a persistent SSE stream by POSTing its subscriptions (same shape as pull). The server sends an initial batch, then fans out future changes via `onRecordsChanged`.
113
+
114
+ ```ts
115
+ import { matchesFilter, encodeSyncToken } from "@sync-subscribe/core";
116
+ import type { SyncToken, SubscriptionFilter, StreamEvent } from "@sync-subscribe/core";
117
+
118
+ interface SseConnection {
119
+ subscriptions: { key: string; filter: SubscriptionFilter }[];
120
+ res: Response;
121
+ }
122
+ const sseConnections = new Set<SseConnection>();
123
+
124
+ const handler = new SyncHandler<NoteRecord>(store, {
125
+ onRecordsChanged: (records) => {
126
+ for (const conn of sseConnections) {
127
+ const patches = [];
128
+ const syncTokens: Record<string, SyncToken> = {};
129
+
130
+ for (const sub of conn.subscriptions) {
131
+ const matching = records.filter((r) =>
132
+ matchesFilter(r as Record<string, unknown>, sub.filter)
133
+ );
134
+ if (matching.length === 0) continue;
135
+
136
+ const last = matching[matching.length - 1]!;
137
+ syncTokens[sub.key] = encodeSyncToken({
138
+ updatedAt: last.updatedAt,
139
+ revisionCount: last.revisionCount,
140
+ recordId: last.recordId,
141
+ });
142
+ patches.push(...matching.map((r) => ({ op: "upsert" as const, record: r })));
143
+ }
144
+
145
+ if (patches.length > 0) {
146
+ conn.res.write(`data: ${JSON.stringify({ patches, syncTokens })}\n\n`);
147
+ }
148
+ }
149
+ },
150
+ });
151
+
152
+ // POST /sync/stream — POST-based SSE (body carries subscriptions)
153
+ app.post("/sync/stream", async (req, res) => {
154
+ const { subscriptions } = req.body as { subscriptions: SyncSubscriptionRequest[] };
155
+
156
+ // Merge server filter additions
157
+ const merged = subscriptions.map((s) => ({
158
+ ...s,
159
+ filter: { ...s.filter, userId: req.user.id } as SubscriptionFilter,
160
+ }));
161
+
162
+ res.setHeader("Content-Type", "text/event-stream");
163
+ res.setHeader("Cache-Control", "no-cache");
164
+ res.setHeader("Connection", "keep-alive");
165
+ res.flushHeaders();
166
+
167
+ // Send initial batch
168
+ const initial = await handler.pull(merged);
169
+ res.write(`data: ${JSON.stringify(initial)}\n\n`);
170
+
171
+ // Register for future push notifications
172
+ const conn: SseConnection = {
173
+ subscriptions: merged.map((s) => ({ key: s.key, filter: s.filter })),
174
+ res,
175
+ };
176
+ sseConnections.add(conn);
177
+
178
+ const heartbeat = setInterval(() => res.write(": heartbeat\n\n"), 30_000);
179
+ req.on("close", () => {
180
+ clearInterval(heartbeat);
181
+ sseConnections.delete(conn);
182
+ });
183
+ });
184
+ ```
185
+
186
+ ## Server-initiated writes
187
+
188
+ Use `serverUpsert` for background jobs, webhooks, or inter-service writes. Unlike `push`, there is no conflict resolution — the server's intent always wins.
189
+
190
+ ```ts
191
+ await handler.serverUpsert({
192
+ recordId: "note-abc",
193
+ userId: "system",
194
+ title: "Auto-generated",
195
+ contents: "...",
196
+ isDeleted: false,
197
+ createdAt: 0, // overwritten by serverUpsert
198
+ updatedAt: 0, // overwritten by serverUpsert
199
+ revisionCount: 0, // incremented by serverUpsert
200
+ });
201
+ ```
202
+
203
+ `onRecordsChanged` fires after every `serverUpsert`, so SSE clients are notified automatically.
204
+
205
+ ## Readonly fields
206
+
207
+ Fields listed in `readonlyFields` are copied from the existing server record before conflict resolution. Clients cannot overwrite them even if they try.
208
+
209
+ ```ts
210
+ new SyncHandler(store, {
211
+ readonlyFields: ["createdAt", "userId"],
212
+ });
213
+ ```
214
+
215
+ ## Conflict resolution
216
+
217
+ On `push`, if the server's `revisionCount` is higher than the incoming record (or equal with an older `updatedAt`), the push returns a conflict:
218
+
219
+ ```ts
220
+ // { conflict: true, serverRecord: NoteRecord }
221
+ ```
222
+
223
+ The client should apply the server record locally and retry if needed. `revisionCount` acts as a "work done" counter — it doesn't depend on clocks.
224
+
225
+ ## API reference
226
+
227
+ ### `SyncHandler<T>`
228
+
229
+ | Method | Description |
230
+ |---|---|
231
+ | `pull(subscriptions)` | Return deduplicated patches and per-key `syncTokens` for all requested subscriptions |
232
+ | `push(req)` | Accept client records, resolve conflicts, persist, fire `onRecordsChanged` |
233
+ | `serverUpsert(record)` | Write a record as the server; no conflict resolution; fires `onRecordsChanged` |
234
+
235
+ ### `SyncHandlerOptions<T>`
236
+
237
+ | Option | Description |
238
+ |---|---|
239
+ | `readonlyFields` | Fields clients cannot modify |
240
+ | `onRecordsChanged` | Called after every successful `push` or `serverUpsert` with the stored records |
241
+
242
+ ### `SyncStore<T>`
243
+
244
+ | Method | Description |
245
+ |---|---|
246
+ | `getRecordsSince(subscriptions)` | Fetch records matching a union of `{ filter, since }` entries; ordered by `(updatedAt, revisionCount, recordId) ASC` |
247
+ | `upsert(record)` | Write a record; return the stored record |
248
+ | `getById(recordId)` | Return the current record for a given id, or `null` |
249
+ | `computePartialSyncToken?(oldFilter, newFilter, token)` | Optional: compute a smarter token when a subscription filter changes |
@@ -0,0 +1,3 @@
1
+ export { SyncHandler } from "./syncHandler.js";
2
+ export type { SyncHandlerOptions, SyncStore, SyncSubscriptionRequest, } from "./types.js";
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,YAAY,EACV,kBAAkB,EAClB,SAAS,EACT,uBAAuB,GACxB,MAAM,YAAY,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,2 @@
1
+ export { SyncHandler } from "./syncHandler.js";
2
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC"}
@@ -0,0 +1,41 @@
1
+ import type { SyncRecord, SyncToken, SyncPatch, ConflictResult } from "@sync-subscribe/core";
2
+ import type { SyncHandlerOptions, SyncStore, SyncSubscriptionRequest } from "./types.js";
3
+ /**
4
+ * Core sync logic, decoupled from any HTTP framework.
5
+ * Wire this up to your route handlers (Express, Hono, Fastify, …).
6
+ *
7
+ * The server has no concept of stored subscriptions. The client sends its
8
+ * filters and sync tokens directly in every pull/stream request. The route
9
+ * handler is responsible for merging any server-side filter additions
10
+ * (e.g. userId from auth context) before calling pull().
11
+ */
12
+ export declare class SyncHandler<T extends SyncRecord> {
13
+ private readonly store;
14
+ private readonly options;
15
+ constructor(store: SyncStore<T>, options?: SyncHandlerOptions<T>);
16
+ /**
17
+ * Pull patches for one or more subscriptions.
18
+ *
19
+ * Each entry in `subscriptions` carries an opaque `key` (echoed back in
20
+ * the response), a fully-merged filter (client filter + server additions),
21
+ * and the client's last-known sync token.
22
+ *
23
+ * Returns deduplicated patches and one sync token per key.
24
+ */
25
+ pull(subscriptions: SyncSubscriptionRequest[]): Promise<{
26
+ patches: SyncPatch<T>[];
27
+ syncTokens: Record<string, SyncToken>;
28
+ }>;
29
+ push(req: {
30
+ records: T[];
31
+ }): Promise<{
32
+ ok: true;
33
+ serverUpdatedAt: number;
34
+ } | ConflictResult<T>>;
35
+ /**
36
+ * Upserts a record from the server itself (background job, webhook, etc.).
37
+ * The server's intent always wins — no conflict resolution.
38
+ */
39
+ serverUpsert(record: T): Promise<T>;
40
+ }
41
+ //# sourceMappingURL=syncHandler.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"syncHandler.d.ts","sourceRoot":"","sources":["../src/syncHandler.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EACV,UAAU,EACV,SAAS,EACT,SAAS,EACT,cAAc,EACf,MAAM,sBAAsB,CAAC;AAC9B,OAAO,KAAK,EACV,kBAAkB,EAClB,SAAS,EACT,uBAAuB,EACxB,MAAM,YAAY,CAAC;AAEpB;;;;;;;;GAQG;AACH,qBAAa,WAAW,CAAC,CAAC,SAAS,UAAU;IAEzC,OAAO,CAAC,QAAQ,CAAC,KAAK;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO;gBADP,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,EACnB,OAAO,GAAE,kBAAkB,CAAC,CAAC,CAAM;IAGtD;;;;;;;;OAQG;IACG,IAAI,CAAC,aAAa,EAAE,uBAAuB,EAAE,GAAG,OAAO,CAAC;QAC5D,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;QACxB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACvC,CAAC;IAoCI,IAAI,CAAC,GAAG,EAAE;QAAE,OAAO,EAAE,CAAC,EAAE,CAAA;KAAE,GAAG,OAAO,CAAC;QAAE,EAAE,EAAE,IAAI,CAAC;QAAC,eAAe,EAAE,MAAM,CAAA;KAAE,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;IAyCrG;;;OAGG;IACG,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;CAwB1C"}
@@ -0,0 +1,114 @@
1
+ import { resolveConflict, matchesFilter, encodeSyncToken, } from "@sync-subscribe/core";
2
+ /**
3
+ * Core sync logic, decoupled from any HTTP framework.
4
+ * Wire this up to your route handlers (Express, Hono, Fastify, …).
5
+ *
6
+ * The server has no concept of stored subscriptions. The client sends its
7
+ * filters and sync tokens directly in every pull/stream request. The route
8
+ * handler is responsible for merging any server-side filter additions
9
+ * (e.g. userId from auth context) before calling pull().
10
+ */
11
+ export class SyncHandler {
12
+ store;
13
+ options;
14
+ constructor(store, options = {}) {
15
+ this.store = store;
16
+ this.options = options;
17
+ }
18
+ /**
19
+ * Pull patches for one or more subscriptions.
20
+ *
21
+ * Each entry in `subscriptions` carries an opaque `key` (echoed back in
22
+ * the response), a fully-merged filter (client filter + server additions),
23
+ * and the client's last-known sync token.
24
+ *
25
+ * Returns deduplicated patches and one sync token per key.
26
+ */
27
+ async pull(subscriptions) {
28
+ const allPatches = await this.store.getRecordsSince(subscriptions.map((s) => ({ filter: s.filter, since: s.syncToken })));
29
+ // Compute the latest sync token per subscription key.
30
+ const syncTokens = {};
31
+ for (const sub of subscriptions) {
32
+ let lastMatch;
33
+ for (const p of allPatches) {
34
+ if (p.op === "upsert" &&
35
+ matchesFilter(p.record, sub.filter)) {
36
+ lastMatch = p.record;
37
+ }
38
+ }
39
+ syncTokens[sub.key] = lastMatch
40
+ ? encodeSyncToken({
41
+ updatedAt: lastMatch.updatedAt,
42
+ revisionCount: lastMatch.revisionCount,
43
+ recordId: lastMatch.recordId,
44
+ })
45
+ : sub.syncToken;
46
+ }
47
+ // Deduplicate patches — last write per recordId wins across subscriptions.
48
+ const patchMap = new Map();
49
+ for (const p of allPatches) {
50
+ const k = p.op === "upsert" ? p.record.recordId : p.recordId;
51
+ patchMap.set(k, p);
52
+ }
53
+ return { patches: [...patchMap.values()], syncTokens };
54
+ }
55
+ async push(req) {
56
+ const { readonlyFields, onRecordsChanged } = this.options;
57
+ const stored = [];
58
+ const now = Date.now();
59
+ for (const incoming of req.records) {
60
+ const existing = await this.store.getById(incoming.recordId);
61
+ let record = incoming;
62
+ if (readonlyFields && readonlyFields.length > 0 && existing) {
63
+ const patched = { ...record };
64
+ for (const field of readonlyFields) {
65
+ patched[field] = existing[field];
66
+ }
67
+ record = patched;
68
+ }
69
+ if (existing) {
70
+ const winner = resolveConflict(record, existing);
71
+ if (winner === "b") {
72
+ return { conflict: true, serverRecord: existing };
73
+ }
74
+ }
75
+ const toStore = {
76
+ ...record,
77
+ updatedAt: now,
78
+ createdAt: existing ? existing.createdAt : now,
79
+ };
80
+ await this.store.upsert(toStore);
81
+ stored.push(toStore);
82
+ }
83
+ if (stored.length > 0) {
84
+ onRecordsChanged?.(stored);
85
+ }
86
+ return { ok: true, serverUpdatedAt: now };
87
+ }
88
+ /**
89
+ * Upserts a record from the server itself (background job, webhook, etc.).
90
+ * The server's intent always wins — no conflict resolution.
91
+ */
92
+ async serverUpsert(record) {
93
+ const { readonlyFields, onRecordsChanged } = this.options;
94
+ const existing = await this.store.getById(record.recordId);
95
+ let incoming = record;
96
+ if (readonlyFields && readonlyFields.length > 0 && existing) {
97
+ const patched = { ...incoming };
98
+ for (const field of readonlyFields) {
99
+ patched[field] = existing[field];
100
+ }
101
+ incoming = patched;
102
+ }
103
+ const now = Date.now();
104
+ const toStore = {
105
+ ...incoming,
106
+ updatedAt: now,
107
+ createdAt: existing ? existing.createdAt : now,
108
+ };
109
+ const stored = await this.store.upsert(toStore);
110
+ onRecordsChanged?.([stored]);
111
+ return stored;
112
+ }
113
+ }
114
+ //# sourceMappingURL=syncHandler.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"syncHandler.js","sourceRoot":"","sources":["../src/syncHandler.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,eAAe,EACf,aAAa,EACb,eAAe,GAEhB,MAAM,sBAAsB,CAAC;AAa9B;;;;;;;;GAQG;AACH,MAAM,OAAO,WAAW;IAEH;IACA;IAFnB,YACmB,KAAmB,EACnB,UAAiC,EAAE;QADnC,UAAK,GAAL,KAAK,CAAc;QACnB,YAAO,GAAP,OAAO,CAA4B;IACnD,CAAC;IAEJ;;;;;;;;OAQG;IACH,KAAK,CAAC,IAAI,CAAC,aAAwC;QAIjD,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,eAAe,CACjD,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CACrE,CAAC;QAEF,sDAAsD;QACtD,MAAM,UAAU,GAA8B,EAAE,CAAC;QACjD,KAAK,MAAM,GAAG,IAAI,aAAa,EAAE,CAAC;YAChC,IAAI,SAAwB,CAAC;YAC7B,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE,CAAC;gBAC3B,IACE,CAAC,CAAC,EAAE,KAAK,QAAQ;oBACjB,aAAa,CAAC,CAAC,CAAC,MAAiC,EAAE,GAAG,CAAC,MAAM,CAAC,EAC9D,CAAC;oBACD,SAAS,GAAG,CAAC,CAAC,MAAW,CAAC;gBAC5B,CAAC;YACH,CAAC;YACD,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,SAAS;gBAC7B,CAAC,CAAC,eAAe,CAAC;oBACd,SAAS,EAAE,SAAS,CAAC,SAAS;oBAC9B,aAAa,EAAE,SAAS,CAAC,aAAa;oBACtC,QAAQ,EAAE,SAAS,CAAC,QAAQ;iBAC7B,CAAC;gBACJ,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC;QACpB,CAAC;QAED,2EAA2E;QAC3E,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAwB,CAAC;QACjD,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE,CAAC;YAC3B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;YAC7D,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACrB,CAAC;QAED,OAAO,EAAE,OAAO,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC;IACzD,CAAC;IAED,KAAK,CAAC,IAAI,CAAC,GAAqB;QAC9B,MAAM,EAAE,cAAc,EAAE,gBAAgB,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC;QAC1D,MAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,GAAG,CAAC,OAAO,EAAE,CAAC;YACnC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YAE7D,IAAI,MAAM,GAAG,QAAQ,CAAC;YACtB,IAAI,cAAc,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,IAAI,QAAQ,EAAE,CAAC;gBAC5D,MAAM,OAAO,GAAG,EAAE,GAAG,MAAM,EAA6B,CAAC;gBACzD,KAAK,MAAM,KAAK,IAAI,cAAc,EAAE,CAAC;oBACnC,OAAO,CAAC,KAAK,CAAC,GAAI,QAAoC,CAAC,KAAK,CAAC,CAAC;gBAChE,CAAC;gBACD,MAAM,GAAG,OAAY,CAAC;YACxB,CAAC;YAED,IAAI,QAAQ,EAAE,CAAC;gBACb,MAAM,MAAM,GAAG,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACjD,IAAI,MAAM,KAAK,GAAG,EAAE,CAAC;oBACnB,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,EAAE,CAAC;gBACpD,CAAC;YACH,CAAC;YAED,MAAM,OAAO,GAAM;gBACjB,GAAG,MAAM;gBACT,SAAS,EAAE,GAAG;gBACd,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG;aAC/C,CAAC;YAEF,MAAM,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YACjC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACvB,CAAC;QAED,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACtB,gBAAgB,EAAE,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC;QAED,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,CAAC;IAC5C,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,YAAY,CAAC,MAAS;QAC1B,MAAM,EAAE,cAAc,EAAE,gBAAgB,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC;QAC1D,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QAE3D,IAAI,QAAQ,GAAG,MAAM,CAAC;QACtB,IAAI,cAAc,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,IAAI,QAAQ,EAAE,CAAC;YAC5D,MAAM,OAAO,GAAG,EAAE,GAAG,QAAQ,EAA6B,CAAC;YAC3D,KAAK,MAAM,KAAK,IAAI,cAAc,EAAE,CAAC;gBACnC,OAAO,CAAC,KAAK,CAAC,GAAI,QAAoC,CAAC,KAAK,CAAC,CAAC;YAChE,CAAC;YACD,QAAQ,GAAG,OAAY,CAAC;QAC1B,CAAC;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QACvB,MAAM,OAAO,GAAM;YACjB,GAAG,QAAQ;YACX,SAAS,EAAE,GAAG;YACd,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG;SAC/C,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAChD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;QAC7B,OAAO,MAAM,CAAC;IAChB,CAAC;CACF"}
@@ -0,0 +1,52 @@
1
+ import type { ConflictResult, SyncPatch, SyncRecord, SyncToken, SubscriptionFilter } from "@sync-subscribe/core";
2
+ export type { PullRequest, PullResponse, PushRequest, PushResponse, StreamEvent, StreamRequest, } from "@sync-subscribe/core";
3
+ export interface SyncHandlerOptions<T extends SyncRecord> {
4
+ /**
5
+ * Fields that clients cannot modify. For existing records, these values
6
+ * are copied from the server record before conflict resolution and storage,
7
+ * so client-supplied values are silently ignored.
8
+ */
9
+ readonlyFields?: readonly string[];
10
+ /**
11
+ * Called with every record successfully written to the store after a push.
12
+ * Use this to notify SSE subscribers, invalidate caches, etc.
13
+ */
14
+ onRecordsChanged?: (records: T[]) => void;
15
+ }
16
+ /**
17
+ * One entry in a pull or stream request.
18
+ * key is an opaque client-assigned identifier echoed back in the syncTokens response.
19
+ * filter has already had server-side additions merged in by the route handler.
20
+ */
21
+ export interface SyncSubscriptionRequest {
22
+ key: string;
23
+ filter: SubscriptionFilter;
24
+ syncToken: SyncToken;
25
+ }
26
+ /**
27
+ * Interface that a server adapter must implement to persist and query records.
28
+ * Framework-agnostic; implementors plug in their own storage layer.
29
+ */
30
+ export interface SyncStore<T extends SyncRecord> {
31
+ /**
32
+ * Fetch records matching one or more subscription requests, each with its own
33
+ * since-token. Implementations should query using a union of all filters
34
+ * and return patches ordered by (updatedAt, revisionCount, recordId) ascending.
35
+ * Deduplication across subscriptions is handled by SyncHandler.
36
+ */
37
+ getRecordsSince(subscriptions: {
38
+ filter: SubscriptionFilter;
39
+ since: SyncToken;
40
+ }[]): Promise<SyncPatch<T>[]>;
41
+ /** Write a record. Returns the stored record. */
42
+ upsert(record: T): Promise<T>;
43
+ /** Returns the current server record for a given id, or null. */
44
+ getById(recordId: string): Promise<T | null>;
45
+ /**
46
+ * Optional: compute a smarter sync token when a subscription filter changes,
47
+ * avoiding a full re-sync when only a subset of the data is new to the client.
48
+ */
49
+ computePartialSyncToken?(oldFilter: SubscriptionFilter, newFilter: SubscriptionFilter, existingToken: SyncToken): Promise<SyncToken>;
50
+ }
51
+ export type { ConflictResult };
52
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,cAAc,EACd,SAAS,EACT,UAAU,EACV,SAAS,EACT,kBAAkB,EACnB,MAAM,sBAAsB,CAAC;AAG9B,YAAY,EACV,WAAW,EACX,YAAY,EACZ,WAAW,EACX,YAAY,EACZ,WAAW,EACX,aAAa,GACd,MAAM,sBAAsB,CAAC;AAE9B,MAAM,WAAW,kBAAkB,CAAC,CAAC,SAAS,UAAU;IACtD;;;;OAIG;IACH,cAAc,CAAC,EAAE,SAAS,MAAM,EAAE,CAAC;IACnC;;;OAGG;IACH,gBAAgB,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,EAAE,KAAK,IAAI,CAAC;CAC3C;AAED;;;;GAIG;AACH,MAAM,WAAW,uBAAuB;IACtC,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,kBAAkB,CAAC;IAC3B,SAAS,EAAE,SAAS,CAAC;CACtB;AAED;;;GAGG;AACH,MAAM,WAAW,SAAS,CAAC,CAAC,SAAS,UAAU;IAC7C;;;;;OAKG;IACH,eAAe,CACb,aAAa,EAAE;QAAE,MAAM,EAAE,kBAAkB,CAAC;QAAC,KAAK,EAAE,SAAS,CAAA;KAAE,EAAE,GAChE,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;IAE3B,iDAAiD;IACjD,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IAE9B,iEAAiE;IACjE,OAAO,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAE7C;;;OAGG;IACH,uBAAuB,CAAC,CACtB,SAAS,EAAE,kBAAkB,EAC7B,SAAS,EAAE,kBAAkB,EAC7B,aAAa,EAAE,SAAS,GACvB,OAAO,CAAC,SAAS,CAAC,CAAC;CACvB;AAGD,YAAY,EAAE,cAAc,EAAE,CAAC"}
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""}
package/package.json ADDED
@@ -0,0 +1,37 @@
1
+ {
2
+ "name": "@sync-subscribe/server",
3
+ "version": "0.3.2",
4
+ "type": "module",
5
+ "publishConfig": {
6
+ "access": "public"
7
+ },
8
+ "license": "MIT",
9
+ "files": [
10
+ "dist",
11
+ "README.md",
12
+ "LICENSE"
13
+ ],
14
+ "main": "./dist/index.js",
15
+ "types": "./dist/index.d.ts",
16
+ "exports": {
17
+ ".": {
18
+ "import": "./dist/index.js",
19
+ "types": "./dist/index.d.ts"
20
+ }
21
+ },
22
+ "dependencies": {
23
+ "@sync-subscribe/core": "0.3.2"
24
+ },
25
+ "devDependencies": {
26
+ "@types/node": "^22.0.0",
27
+ "typescript": "*",
28
+ "vitest": "*"
29
+ },
30
+ "scripts": {
31
+ "build": "tsc --build",
32
+ "dev": "tsc --build --watch",
33
+ "typecheck": "tsc --noEmit",
34
+ "test": "vitest run",
35
+ "clean": "rm -rf dist *.tsbuildinfo"
36
+ }
37
+ }