@fatagnus/dink-convex 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/LICENSE +190 -0
  2. package/README.md +282 -0
  3. package/convex/convex.config.ts +23 -0
  4. package/convex/crons.ts +37 -0
  5. package/convex/http.ts +421 -0
  6. package/convex/index.ts +20 -0
  7. package/convex/install.ts +172 -0
  8. package/convex/outbox.ts +198 -0
  9. package/convex/outboxProcessor.ts +240 -0
  10. package/convex/schema.ts +97 -0
  11. package/convex/sync.ts +327 -0
  12. package/dist/component.d.ts +34 -0
  13. package/dist/component.d.ts.map +1 -0
  14. package/dist/component.js +35 -0
  15. package/dist/component.js.map +1 -0
  16. package/dist/crdt.d.ts +82 -0
  17. package/dist/crdt.d.ts.map +1 -0
  18. package/dist/crdt.js +134 -0
  19. package/dist/crdt.js.map +1 -0
  20. package/dist/factories.d.ts +80 -0
  21. package/dist/factories.d.ts.map +1 -0
  22. package/dist/factories.js +159 -0
  23. package/dist/factories.js.map +1 -0
  24. package/dist/http.d.ts +238 -0
  25. package/dist/http.d.ts.map +1 -0
  26. package/dist/http.js +222 -0
  27. package/dist/http.js.map +1 -0
  28. package/dist/httpFactory.d.ts +39 -0
  29. package/dist/httpFactory.d.ts.map +1 -0
  30. package/dist/httpFactory.js +128 -0
  31. package/dist/httpFactory.js.map +1 -0
  32. package/dist/index.d.ts +68 -0
  33. package/dist/index.d.ts.map +1 -0
  34. package/dist/index.js +73 -0
  35. package/dist/index.js.map +1 -0
  36. package/dist/schema.d.ts +217 -0
  37. package/dist/schema.d.ts.map +1 -0
  38. package/dist/schema.js +195 -0
  39. package/dist/schema.js.map +1 -0
  40. package/dist/syncFactories.d.ts +240 -0
  41. package/dist/syncFactories.d.ts.map +1 -0
  42. package/dist/syncFactories.js +623 -0
  43. package/dist/syncFactories.js.map +1 -0
  44. package/dist/triggers.d.ts +442 -0
  45. package/dist/triggers.d.ts.map +1 -0
  46. package/dist/triggers.js +705 -0
  47. package/dist/triggers.js.map +1 -0
  48. package/package.json +108 -0
  49. package/scripts/check-peer-deps.cjs +132 -0
@@ -0,0 +1,198 @@
1
+ /**
2
+ * Outbox processing utilities for @fatagnus/dink-convex component.
3
+ *
4
+ * Provides helper functions for processing the _sync_outbox table
5
+ * and pushing deltas to dinkd.
6
+ *
7
+ * @module convex/outbox
8
+ */
9
+
10
+ /**
11
+ * Status constants for outbox items.
12
+ */
13
+ export const OUTBOX_STATUS = {
14
+ PENDING: "pending",
15
+ SENT: "sent",
16
+ FAILED: "failed",
17
+ } as const;
18
+
19
+ /**
20
+ * Maximum number of retry attempts before marking as failed.
21
+ */
22
+ export const MAX_RETRIES = 5;
23
+
24
+ /**
25
+ * Base delay for exponential backoff (1 second).
26
+ */
27
+ const BASE_DELAY_MS = 1000;
28
+
29
+ /**
30
+ * Maximum delay cap for backoff (5 minutes).
31
+ */
32
+ const MAX_DELAY_MS = 5 * 60 * 1000;
33
+
34
+ /**
35
+ * Outbox item structure from _sync_outbox table.
36
+ */
37
+ export interface OutboxItem {
38
+ _id: string;
39
+ collection: string;
40
+ docId: string;
41
+ delta: ArrayBuffer;
42
+ seq: number;
43
+ status: string;
44
+ retries: number;
45
+ createdAt: number;
46
+ lastAttempt?: number;
47
+ }
48
+
49
+ /**
50
+ * Payload structure for POSTing to dinkd /api/sync/convex-push.
51
+ */
52
+ export interface PushPayload {
53
+ collection: string;
54
+ docId: string;
55
+ seq: number;
56
+ bytes: number[];
57
+ }
58
+
59
+ /**
60
+ * Configuration for outbox processing.
61
+ */
62
+ export interface OutboxConfig {
63
+ dinkHttpUrl: string | undefined;
64
+ appSyncKey: string | undefined;
65
+ }
66
+
67
+ /**
68
+ * Item identifier for deduplication.
69
+ */
70
+ export interface ItemIdentifier {
71
+ docId: string;
72
+ seq: number;
73
+ }
74
+
75
+ /**
76
+ * Calculate exponential backoff delay based on retry count.
77
+ *
78
+ * Uses the formula: min(BASE_DELAY * 2^retries, MAX_DELAY)
79
+ *
80
+ * @param retries - Number of previous retry attempts
81
+ * @returns Delay in milliseconds before next retry
82
+ */
83
+ export function calculateBackoffDelay(retries: number): number {
84
+ const delay = BASE_DELAY_MS * Math.pow(2, retries);
85
+ return Math.min(delay, MAX_DELAY_MS);
86
+ }
87
+
88
+ /**
89
+ * Check if an item should be retried based on retry count.
90
+ *
91
+ * @param retries - Current number of retry attempts
92
+ * @returns True if retries < MAX_RETRIES
93
+ */
94
+ export function shouldRetry(retries: number): boolean {
95
+ return retries < MAX_RETRIES;
96
+ }
97
+
98
+ /**
99
+ * Check if an item is a duplicate based on docId and seq.
100
+ *
101
+ * Used to prevent duplicate pushes when the same delta
102
+ * might be processed multiple times.
103
+ *
104
+ * @param item - Item to check
105
+ * @param sentItems - List of already sent items
106
+ * @returns True if the item is already in the sent list
107
+ */
108
+ export function isDuplicate(
109
+ item: ItemIdentifier,
110
+ sentItems: ItemIdentifier[]
111
+ ): boolean {
112
+ return sentItems.some(
113
+ (sent) => sent.docId === item.docId && sent.seq === item.seq
114
+ );
115
+ }
116
+
117
+ /**
118
+ * Create the payload structure for POSTing to dinkd.
119
+ *
120
+ * Converts the delta bytes from Uint8Array to number array
121
+ * for JSON serialization.
122
+ *
123
+ * @param item - Outbox item with delta bytes
124
+ * @returns Payload ready for JSON serialization
125
+ */
126
+ export function createPushPayload(item: {
127
+ collection: string;
128
+ docId: string;
129
+ delta: Uint8Array | ArrayBuffer;
130
+ seq: number;
131
+ }): PushPayload {
132
+ // Convert delta to number array for JSON
133
+ const deltaArray =
134
+ item.delta instanceof Uint8Array
135
+ ? item.delta
136
+ : new Uint8Array(item.delta);
137
+
138
+ return {
139
+ collection: item.collection,
140
+ docId: item.docId,
141
+ seq: item.seq,
142
+ bytes: Array.from(deltaArray),
143
+ };
144
+ }
145
+
146
+ /**
147
+ * Get the outbox configuration from environment variables.
148
+ *
149
+ * Reads DINK_HTTP_URL and DINK_APP_SYNC_KEY from the environment.
150
+ * In Convex, these are set via the dashboard environment variables.
151
+ *
152
+ * @returns Configuration object with dinkHttpUrl and appSyncKey
153
+ */
154
+ export function getOutboxConfig(): OutboxConfig {
155
+ return {
156
+ dinkHttpUrl: process.env.DINK_HTTP_URL,
157
+ appSyncKey: process.env.DINK_APP_SYNC_KEY,
158
+ };
159
+ }
160
+
161
+ /**
162
+ * Determine the new status for an outbox item based on push result.
163
+ *
164
+ * - Success: "sent"
165
+ * - Failure with retries remaining: "pending" (will be retried)
166
+ * - Failure at max retries: "failed" (permanent failure)
167
+ *
168
+ * @param success - Whether the push succeeded
169
+ * @param currentRetries - Current retry count
170
+ * @returns New status string
171
+ */
172
+ export function getNewStatus(
173
+ success: boolean,
174
+ currentRetries: number
175
+ ): string {
176
+ if (success) {
177
+ return OUTBOX_STATUS.SENT;
178
+ }
179
+
180
+ // If we've hit max retries, mark as failed
181
+ if (currentRetries >= MAX_RETRIES) {
182
+ return OUTBOX_STATUS.FAILED;
183
+ }
184
+
185
+ // Otherwise, keep as pending for retry
186
+ return OUTBOX_STATUS.PENDING;
187
+ }
188
+
189
+ /**
190
+ * Build the full URL for the convex-push endpoint.
191
+ *
192
+ * @param baseUrl - Base dinkd HTTP URL
193
+ * @returns Full URL for the convex-push endpoint
194
+ */
195
+ export function buildPushUrl(baseUrl: string): string {
196
+ const url = new URL("/api/sync/convex-push", baseUrl);
197
+ return url.toString();
198
+ }
@@ -0,0 +1,240 @@
1
+ /**
2
+ * Outbox processor for @fatagnus/dink-convex component.
3
+ *
4
+ * Processes pending items from sync_outbox and pushes them to dinkd.
5
+ * Called by the scheduled cron job and immediately after mutations.
6
+ *
7
+ * Re-export these in your app's convex/outboxProcessor.ts:
8
+ * ```typescript
9
+ * export {
10
+ * processOutboxBatch,
11
+ * queryPendingItems,
12
+ * updateItemStatus,
13
+ * scheduleOutboxProcessing,
14
+ * } from "@fatagnus/dink-convex/outbox";
15
+ * ```
16
+ *
17
+ * @module convex/outboxProcessor
18
+ */
19
+
20
+ import { v } from "convex/values";
21
+ import { internalAction, internalMutation, internalQuery } from "./_generated/server";
22
+ import { internal } from "./_generated/api";
23
+
24
+ import {
25
+ OUTBOX_STATUS,
26
+ calculateBackoffDelay,
27
+ isDuplicate,
28
+ createPushPayload,
29
+ getNewStatus,
30
+ buildPushUrl,
31
+ } from "./outbox";
32
+
33
+ /**
34
+ * Maximum number of items to process per batch.
35
+ */
36
+ const BATCH_SIZE = 50;
37
+
38
+ /**
39
+ * Process a batch of pending outbox items.
40
+ *
41
+ * This action:
42
+ * 1. Queries pending items from _sync_outbox
43
+ * 2. POSTs each to dinkd /api/sync/convex-push with appSyncKey
44
+ * 3. Updates status to 'sent' on success, handles retries on error
45
+ * 4. Deduplicates by (docId, seq) to prevent duplicate pushes
46
+ */
47
+ export const processOutboxBatch = internalAction({
48
+ args: {},
49
+ returns: v.object({
50
+ processed: v.number(),
51
+ sent: v.number(),
52
+ failed: v.number(),
53
+ skipped: v.number(),
54
+ }),
55
+ handler: async (ctx) => {
56
+ // Get configuration from environment
57
+ const dinkHttpUrl = process.env.DINK_HTTP_URL;
58
+ const appSyncKey = process.env.DINK_APP_SYNC_KEY;
59
+
60
+ // If not configured, skip processing
61
+ if (!dinkHttpUrl || !appSyncKey) {
62
+ console.log(
63
+ "Outbox processor: DINK_HTTP_URL or DINK_APP_SYNC_KEY not configured, skipping"
64
+ );
65
+ return { processed: 0, sent: 0, failed: 0, skipped: 0 };
66
+ }
67
+
68
+ // Query pending items
69
+ const pendingItems = await ctx.runQuery(
70
+ internal.outboxProcessor.queryPendingItems,
71
+ { limit: BATCH_SIZE }
72
+ );
73
+
74
+ if (pendingItems.length === 0) {
75
+ return { processed: 0, sent: 0, failed: 0, skipped: 0 };
76
+ }
77
+
78
+ const pushUrl = buildPushUrl(dinkHttpUrl);
79
+ const sentItems: Array<{ docId: string; seq: number }> = [];
80
+ let sent = 0;
81
+ let failed = 0;
82
+ let skipped = 0;
83
+
84
+ for (const item of pendingItems) {
85
+ // Check for duplicates within this batch
86
+ if (isDuplicate({ docId: item.docId, seq: item.seq }, sentItems)) {
87
+ skipped++;
88
+ continue;
89
+ }
90
+
91
+ // Check if we should skip based on backoff timing
92
+ if (item.lastAttempt && item.retries > 0) {
93
+ const backoffDelay = calculateBackoffDelay(item.retries - 1);
94
+ const timeSinceLastAttempt = Date.now() - item.lastAttempt;
95
+ if (timeSinceLastAttempt < backoffDelay) {
96
+ skipped++;
97
+ continue;
98
+ }
99
+ }
100
+
101
+ // Create the payload
102
+ const payload = createPushPayload({
103
+ collection: item.collection,
104
+ docId: item.docId,
105
+ delta: item.delta,
106
+ seq: item.seq,
107
+ });
108
+
109
+ let success = false;
110
+ try {
111
+ // POST to dinkd
112
+ const response = await fetch(pushUrl, {
113
+ method: "POST",
114
+ headers: {
115
+ "Content-Type": "application/json",
116
+ Authorization: `Bearer ${appSyncKey}`,
117
+ },
118
+ body: JSON.stringify(payload),
119
+ });
120
+
121
+ success = response.ok;
122
+
123
+ if (!success) {
124
+ console.error(
125
+ `Outbox push failed for ${item.docId}:${item.seq}: ${response.status} ${response.statusText}`
126
+ );
127
+ }
128
+ } catch (error) {
129
+ console.error(
130
+ `Outbox push error for ${item.docId}:${item.seq}:`,
131
+ error
132
+ );
133
+ success = false;
134
+ }
135
+
136
+ // Determine new status
137
+ const newRetries = success ? item.retries : item.retries + 1;
138
+ const newStatus = getNewStatus(success, newRetries);
139
+
140
+ // Update the item status
141
+ await ctx.runMutation(
142
+ internal.outboxProcessor.updateItemStatus,
143
+ {
144
+ itemId: item._id,
145
+ status: newStatus,
146
+ retries: newRetries,
147
+ lastAttempt: Date.now(),
148
+ }
149
+ );
150
+
151
+ if (success) {
152
+ sent++;
153
+ sentItems.push({ docId: item.docId, seq: item.seq });
154
+ } else if (newStatus === OUTBOX_STATUS.FAILED) {
155
+ failed++;
156
+ }
157
+ }
158
+
159
+ return {
160
+ processed: pendingItems.length,
161
+ sent,
162
+ failed,
163
+ skipped,
164
+ };
165
+ },
166
+ });
167
+
168
+ /**
169
+ * Query pending items from _sync_outbox.
170
+ * Internal query used by the processor action.
171
+ */
172
+ export const queryPendingItems = internalQuery({
173
+ args: {
174
+ limit: v.number(),
175
+ },
176
+ returns: v.array(
177
+ v.object({
178
+ _id: v.id("sync_outbox"),
179
+ collection: v.string(),
180
+ docId: v.string(),
181
+ delta: v.bytes(),
182
+ seq: v.number(),
183
+ status: v.string(),
184
+ retries: v.number(),
185
+ createdAt: v.number(),
186
+ lastAttempt: v.optional(v.number()),
187
+ })
188
+ ),
189
+ handler: async (ctx, args) => {
190
+ const items = await ctx.db
191
+ .query("sync_outbox")
192
+ .withIndex("by_status", (q) => q.eq("status", OUTBOX_STATUS.PENDING))
193
+ .take(args.limit);
194
+
195
+ return items;
196
+ },
197
+ });
198
+
199
+ /**
200
+ * Update the status of an outbox item.
201
+ * Internal mutation used by the processor action.
202
+ */
203
+ export const updateItemStatus = internalMutation({
204
+ args: {
205
+ itemId: v.id("sync_outbox"),
206
+ status: v.string(),
207
+ retries: v.number(),
208
+ lastAttempt: v.number(),
209
+ },
210
+ returns: v.null(),
211
+ handler: async (ctx, args) => {
212
+ await ctx.db.patch(args.itemId, {
213
+ status: args.status,
214
+ retries: args.retries,
215
+ lastAttempt: args.lastAttempt,
216
+ });
217
+ return null;
218
+ },
219
+ });
220
+
221
+ /**
222
+ * Schedule immediate outbox processing.
223
+ *
224
+ * This mutation uses ctx.scheduler.runAfter(0, ...) to trigger
225
+ * the processOutboxBatch action immediately after the current
226
+ * transaction commits. This provides near real-time sync instead
227
+ * of waiting for the cron job fallback.
228
+ *
229
+ * Called by trigger handlers after inserting to _sync_outbox.
230
+ */
231
+ export const scheduleOutboxProcessing = internalMutation({
232
+ args: {},
233
+ returns: v.null(),
234
+ handler: async (ctx) => {
235
+ // Schedule immediate processing with 0 delay
236
+ // This runs right after the current transaction commits
237
+ await ctx.scheduler.runAfter(0, internal.outboxProcessor.processOutboxBatch, {});
238
+ return null;
239
+ },
240
+ });
@@ -0,0 +1,97 @@
1
+ /**
2
+ * Internal sync tables schema for @fatagnus/dink-convex component.
3
+ *
4
+ * These tables are component-private and not visible to developers.
5
+ * They manage sync state, deltas, and configuration for bidirectional sync.
6
+ *
7
+ * @module convex/schema
8
+ */
9
+
10
+ import { defineSchema, defineTable } from "convex/server";
11
+ import { v } from "convex/values";
12
+
13
+ export default defineSchema({
14
+ /**
15
+ * _sync_deltas - Stores CRDT delta updates for sync operations.
16
+ *
17
+ * Each delta represents a change to a document that needs to be
18
+ * synchronized with edge clients via dinkd.
19
+ */
20
+ sync_deltas: defineTable({
21
+ /** Collection name (user table) */
22
+ collection: v.string(),
23
+ /** Document sync ID (syncId from user table) */
24
+ docId: v.string(),
25
+ /** CRDT delta bytes (Yjs update) */
26
+ bytes: v.bytes(),
27
+ /** Sequence number for ordering */
28
+ seq: v.number(),
29
+ /** Timestamp when delta was created */
30
+ timestamp: v.number(),
31
+ /** Edge ID that originated this delta (optional) */
32
+ edgeId: v.optional(v.string()),
33
+ })
34
+ .index("by_collection", ["collection"])
35
+ .index("by_docId", ["collection", "docId"])
36
+ .index("by_seq", ["collection", "seq"]),
37
+
38
+ /**
39
+ * _sync_outbox - Queue of deltas pending push to dinkd.
40
+ *
41
+ * Deltas are queued here when triggers fire, then processed by
42
+ * the scheduled outbox processor which pushes them to dinkd.
43
+ */
44
+ sync_outbox: defineTable({
45
+ /** Collection name (user table) */
46
+ collection: v.string(),
47
+ /** Document sync ID (syncId from user table) */
48
+ docId: v.string(),
49
+ /** CRDT delta bytes to send */
50
+ delta: v.bytes(),
51
+ /** Sequence number for deduplication */
52
+ seq: v.number(),
53
+ /** Status: "pending" | "sent" | "failed" */
54
+ status: v.string(),
55
+ /** Number of retry attempts */
56
+ retries: v.number(),
57
+ /** Timestamp when item was created */
58
+ createdAt: v.number(),
59
+ /** Timestamp of last send attempt (optional) */
60
+ lastAttempt: v.optional(v.number()),
61
+ })
62
+ .index("by_status", ["status"])
63
+ .index("by_collection", ["collection"])
64
+ .index("by_docId_seq", ["docId", "seq"]),
65
+
66
+ /**
67
+ * _sync_sessions - Tracks connected sessions and their sync state.
68
+ *
69
+ * Each session represents a connected client that is syncing
70
+ * documents from a specific collection.
71
+ */
72
+ sync_sessions: defineTable({
73
+ /** Unique session identifier */
74
+ sessionId: v.string(),
75
+ /** Collection being synced */
76
+ collection: v.string(),
77
+ /** Last sequence number acknowledged by this session */
78
+ lastSeq: v.number(),
79
+ /** Timestamp when session was last active */
80
+ lastSeen: v.number(),
81
+ })
82
+ .index("by_sessionId", ["sessionId"])
83
+ .index("by_collection", ["collection"]),
84
+
85
+ /**
86
+ * _sync_config - Configuration for which tables have sync enabled.
87
+ *
88
+ * Triggers check this table to determine whether to process
89
+ * writes to a given collection.
90
+ */
91
+ sync_config: defineTable({
92
+ /** Collection name (user table) */
93
+ collection: v.string(),
94
+ /** Whether sync is enabled for this collection */
95
+ enabled: v.boolean(),
96
+ }).index("by_collection", ["collection"]),
97
+ });