@spooky-sync/core 0.0.0-canary.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -0
- package/dist/index.d.ts +590 -0
- package/dist/index.js +3082 -0
- package/package.json +46 -0
- package/src/events/events.test.ts +242 -0
- package/src/events/index.ts +261 -0
- package/src/index.ts +3 -0
- package/src/modules/auth/events/index.ts +18 -0
- package/src/modules/auth/index.ts +267 -0
- package/src/modules/cache/index.ts +241 -0
- package/src/modules/cache/types.ts +19 -0
- package/src/modules/data/data.test.ts +58 -0
- package/src/modules/data/index.ts +777 -0
- package/src/modules/devtools/index.ts +364 -0
- package/src/modules/sync/engine.ts +163 -0
- package/src/modules/sync/events/index.ts +77 -0
- package/src/modules/sync/index.ts +3 -0
- package/src/modules/sync/queue/index.ts +2 -0
- package/src/modules/sync/queue/queue-down.ts +89 -0
- package/src/modules/sync/queue/queue-up.ts +223 -0
- package/src/modules/sync/scheduler.ts +84 -0
- package/src/modules/sync/sync.ts +407 -0
- package/src/modules/sync/utils.test.ts +311 -0
- package/src/modules/sync/utils.ts +171 -0
- package/src/services/database/database.ts +108 -0
- package/src/services/database/events/index.ts +32 -0
- package/src/services/database/index.ts +5 -0
- package/src/services/database/local-migrator.ts +203 -0
- package/src/services/database/local.ts +99 -0
- package/src/services/database/remote.ts +110 -0
- package/src/services/logger/index.ts +118 -0
- package/src/services/persistence/localstorage.ts +26 -0
- package/src/services/persistence/surrealdb.ts +62 -0
- package/src/services/stream-processor/index.ts +364 -0
- package/src/services/stream-processor/stream-processor.test.ts +140 -0
- package/src/services/stream-processor/wasm-types.ts +31 -0
- package/src/spooky.ts +346 -0
- package/src/types.ts +237 -0
- package/src/utils/error-classification.ts +28 -0
- package/src/utils/index.ts +172 -0
- package/src/utils/parser.test.ts +125 -0
- package/src/utils/parser.ts +46 -0
- package/src/utils/surql.ts +182 -0
- package/src/utils/utils.test.ts +152 -0
- package/src/utils/withRetry.test.ts +153 -0
- package/tsconfig.json +14 -0
- package/tsdown.config.ts +9 -0
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
import { LocalDatabaseService, RemoteDatabaseService } from '../../services/database/index';
|
|
2
|
+
import { Logger } from '../../services/logger/index';
|
|
3
|
+
import { SchemaStructure } from '@spooky-sync/query-builder';
|
|
4
|
+
import { RecordId } from 'surrealdb';
|
|
5
|
+
import { StreamUpdate, StreamUpdateReceiver } from '../../services/stream-processor/index';
|
|
6
|
+
import { encodeRecordId } from '../../utils/index';
|
|
7
|
+
|
|
8
|
+
// DevTools interfaces (matching extension expectations)
|
|
9
|
+
export interface DevToolsEvent {
|
|
10
|
+
id: number;
|
|
11
|
+
timestamp: number;
|
|
12
|
+
eventType: string;
|
|
13
|
+
payload: any;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
import { DataModule } from '../data/index';
|
|
17
|
+
import { AuthService } from '../auth/index';
|
|
18
|
+
import { AuthEventTypes } from '../auth/events/index';
|
|
19
|
+
|
|
20
|
+
export class DevToolsService implements StreamUpdateReceiver {
|
|
21
|
+
private eventsHistory: DevToolsEvent[] = [];
|
|
22
|
+
private eventIdCounter = 0;
|
|
23
|
+
private version = '1.0.0';
|
|
24
|
+
|
|
25
|
+
constructor(
|
|
26
|
+
private databaseService: LocalDatabaseService,
|
|
27
|
+
private remoteDatabaseService: RemoteDatabaseService,
|
|
28
|
+
private logger: Logger,
|
|
29
|
+
private schema: SchemaStructure,
|
|
30
|
+
private authService: AuthService<SchemaStructure>,
|
|
31
|
+
private dataManager?: DataModule<SchemaStructure>
|
|
32
|
+
) {
|
|
33
|
+
this.exposeToWindow();
|
|
34
|
+
|
|
35
|
+
// Subscribe to auth events
|
|
36
|
+
this.authService.eventSystem.subscribe(AuthEventTypes.AuthStateChanged, () => {
|
|
37
|
+
this.notifyDevTools();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
this.logger.debug({ Category: 'spooky-client::DevToolsService::init' }, 'Service initialized');
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Get active queries directly from DataManager (single source of truth)
|
|
44
|
+
private getActiveQueries(): Map<number, any> {
|
|
45
|
+
const result = new Map<number, any>();
|
|
46
|
+
if (!this.dataManager) return result;
|
|
47
|
+
|
|
48
|
+
const queries = this.dataManager.getActiveQueries();
|
|
49
|
+
queries.forEach((q) => {
|
|
50
|
+
const queryHash = this.hashString(encodeRecordId(q.config.id));
|
|
51
|
+
result.set(queryHash, {
|
|
52
|
+
queryHash,
|
|
53
|
+
status: 'active',
|
|
54
|
+
createdAt:
|
|
55
|
+
q.config.lastActiveAt instanceof Date
|
|
56
|
+
? q.config.lastActiveAt.getTime()
|
|
57
|
+
: new Date(q.config.lastActiveAt || Date.now()).getTime(),
|
|
58
|
+
lastUpdate: Date.now(),
|
|
59
|
+
updateCount: q.updateCount,
|
|
60
|
+
query: q.config.surql,
|
|
61
|
+
variables: q.config.params || {},
|
|
62
|
+
dataSize: q.records?.length || 0,
|
|
63
|
+
data: q.records,
|
|
64
|
+
localArray: q.config.localArray,
|
|
65
|
+
remoteArray: q.config.remoteArray,
|
|
66
|
+
});
|
|
67
|
+
});
|
|
68
|
+
return result;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
public onQueryInitialized(payload: any) {
|
|
72
|
+
this.logger.debug(
|
|
73
|
+
{ payload, Category: 'spooky-client::DevToolsService::onQueryInitialized' },
|
|
74
|
+
'QueryInitialized'
|
|
75
|
+
);
|
|
76
|
+
const queryHash = this.hashString(payload.queryId.toString());
|
|
77
|
+
|
|
78
|
+
this.addEvent('QUERY_REQUEST_INIT', {
|
|
79
|
+
queryHash,
|
|
80
|
+
query: payload.sql,
|
|
81
|
+
variables: {},
|
|
82
|
+
});
|
|
83
|
+
this.notifyDevTools();
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
public onQueryUpdated(payload: any) {
|
|
87
|
+
this.logger.debug(
|
|
88
|
+
{
|
|
89
|
+
id: payload.queryId?.toString(),
|
|
90
|
+
Category: 'spooky-client::DevToolsService::onQueryUpdated',
|
|
91
|
+
},
|
|
92
|
+
'QueryUpdated'
|
|
93
|
+
);
|
|
94
|
+
const queryHash = this.hashString(payload.queryId.toString());
|
|
95
|
+
|
|
96
|
+
this.addEvent('QUERY_UPDATED', {
|
|
97
|
+
queryHash,
|
|
98
|
+
data: payload.records,
|
|
99
|
+
});
|
|
100
|
+
this.notifyDevTools();
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
public onStreamUpdate(update: StreamUpdate) {
|
|
104
|
+
this.logger.debug(
|
|
105
|
+
{ update, Category: 'spooky-client::DevToolsService::onStreamUpdate' },
|
|
106
|
+
'StreamUpdate'
|
|
107
|
+
);
|
|
108
|
+
this.addEvent('STREAM_UPDATE', {
|
|
109
|
+
updates: [update],
|
|
110
|
+
});
|
|
111
|
+
this.notifyDevTools();
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
public onMutation(payload: any[]) {
|
|
115
|
+
const payloads = payload;
|
|
116
|
+
payloads.forEach((p) => {
|
|
117
|
+
this.addEvent('MUTATION_REQUEST_EXECUTION', {
|
|
118
|
+
mutation: {
|
|
119
|
+
type: 'create', // simplifying
|
|
120
|
+
data: 'data' in p ? p.data : undefined,
|
|
121
|
+
selector: encodeRecordId(p.record_id),
|
|
122
|
+
},
|
|
123
|
+
});
|
|
124
|
+
});
|
|
125
|
+
this.notifyDevTools();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
private hashString(str: string): number {
|
|
129
|
+
let hash = 0;
|
|
130
|
+
if (str.length === 0) return hash;
|
|
131
|
+
for (let i = 0; i < str.length; i++) {
|
|
132
|
+
const char = str.charCodeAt(i);
|
|
133
|
+
hash = (hash << 5) - hash + char;
|
|
134
|
+
hash = hash & hash; // Convert to 32bit integer
|
|
135
|
+
}
|
|
136
|
+
return hash;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
public logEvent(eventType: string, payload: any) {
|
|
140
|
+
this.addEvent(eventType, payload);
|
|
141
|
+
this.notifyDevTools();
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
private addEvent(eventType: string, payload: any) {
|
|
145
|
+
this.eventsHistory.push({
|
|
146
|
+
id: this.eventIdCounter++,
|
|
147
|
+
timestamp: Date.now(),
|
|
148
|
+
eventType,
|
|
149
|
+
payload: this.serializeForDevTools(payload),
|
|
150
|
+
});
|
|
151
|
+
if (this.eventsHistory.length > 100) this.eventsHistory.shift();
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
private getState() {
|
|
155
|
+
return this.serializeForDevTools({
|
|
156
|
+
eventsHistory: [...this.eventsHistory],
|
|
157
|
+
activeQueries: Object.fromEntries(this.getActiveQueries()),
|
|
158
|
+
auth: {
|
|
159
|
+
authenticated: this.authService.isAuthenticated,
|
|
160
|
+
userId: this.authService.currentUser?.id,
|
|
161
|
+
},
|
|
162
|
+
version: this.version,
|
|
163
|
+
database: {
|
|
164
|
+
tables: this.schema.tables.map((t) => t.name),
|
|
165
|
+
tableData: {},
|
|
166
|
+
},
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
private notifyDevTools() {
|
|
171
|
+
if (typeof window !== 'undefined') {
|
|
172
|
+
window.postMessage(
|
|
173
|
+
{
|
|
174
|
+
type: 'SPOOKY_STATE_CHANGED',
|
|
175
|
+
source: 'spooky-devtools-page',
|
|
176
|
+
state: this.getState(),
|
|
177
|
+
},
|
|
178
|
+
'*'
|
|
179
|
+
);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
private serializeForDevTools(data: any, seen = new WeakSet<object>()): any {
|
|
184
|
+
if (data === undefined) {
|
|
185
|
+
return 'undefined';
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
if (data === null) {
|
|
189
|
+
return null;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
if (data instanceof RecordId) {
|
|
193
|
+
return data.toString();
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if (Array.isArray(data)) {
|
|
197
|
+
if (seen.has(data)) {
|
|
198
|
+
return '[Circular Array]';
|
|
199
|
+
}
|
|
200
|
+
seen.add(data);
|
|
201
|
+
return data.map((item) => this.serializeForDevTools(item, seen));
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (typeof data === 'bigint') {
|
|
205
|
+
return data.toString();
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (data instanceof Date) {
|
|
209
|
+
return data.toISOString();
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
if (typeof data === 'object') {
|
|
213
|
+
if (seen.has(data)) {
|
|
214
|
+
return '[Circular Object]';
|
|
215
|
+
}
|
|
216
|
+
seen.add(data);
|
|
217
|
+
|
|
218
|
+
const result: Record<string, any> = {};
|
|
219
|
+
for (const key in data) {
|
|
220
|
+
if (Object.prototype.hasOwnProperty.call(data, key)) {
|
|
221
|
+
result[key] = this.serializeForDevTools(data[key], seen);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
return result;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return data;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
private exposeToWindow() {
|
|
231
|
+
if (typeof window !== 'undefined') {
|
|
232
|
+
(window as any).__SPOOKY__ = {
|
|
233
|
+
version: this.version,
|
|
234
|
+
getState: () => this.getState(),
|
|
235
|
+
clearHistory: () => {
|
|
236
|
+
this.eventsHistory = [];
|
|
237
|
+
this.notifyDevTools();
|
|
238
|
+
},
|
|
239
|
+
getTableData: async (tableName: string) => {
|
|
240
|
+
try {
|
|
241
|
+
// Returns the first statement result as T.
|
|
242
|
+
// SurrealDB query returns [Result1, Result2...].
|
|
243
|
+
// We want the records from the first result.
|
|
244
|
+
const result = await this.databaseService.query<any>(`SELECT * FROM ${tableName}`);
|
|
245
|
+
|
|
246
|
+
let records: any[] = [];
|
|
247
|
+
|
|
248
|
+
if (Array.isArray(result) && result.length > 0) {
|
|
249
|
+
const first = result[0];
|
|
250
|
+
if (Array.isArray(first)) {
|
|
251
|
+
// Legacy or flattened format: [[records]]
|
|
252
|
+
records = first;
|
|
253
|
+
} else if (
|
|
254
|
+
first &&
|
|
255
|
+
typeof first === 'object' &&
|
|
256
|
+
'result' in first &&
|
|
257
|
+
'status' in first
|
|
258
|
+
) {
|
|
259
|
+
// SurrealDB 2.0 format: [{ result: [...records], status: 'OK', ... }]
|
|
260
|
+
records = Array.isArray(first.result) ? first.result : [];
|
|
261
|
+
} else {
|
|
262
|
+
// Fallback: assume result is the array of records itself
|
|
263
|
+
records = result;
|
|
264
|
+
}
|
|
265
|
+
} else if (Array.isArray(result)) {
|
|
266
|
+
// Empty array
|
|
267
|
+
records = [];
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
return this.serializeForDevTools(records) || [];
|
|
271
|
+
} catch (e) {
|
|
272
|
+
this.logger.error(
|
|
273
|
+
{ err: e, Category: 'spooky-client::DevToolsService::exposeToWindow' },
|
|
274
|
+
'Failed to get table data'
|
|
275
|
+
);
|
|
276
|
+
return [];
|
|
277
|
+
}
|
|
278
|
+
},
|
|
279
|
+
updateTableRow: async (
|
|
280
|
+
tableName: string,
|
|
281
|
+
recordId: string,
|
|
282
|
+
updates: Record<string, unknown>
|
|
283
|
+
) => {
|
|
284
|
+
try {
|
|
285
|
+
await this.databaseService.query(`UPDATE ${recordId} MERGE $updates`, { updates });
|
|
286
|
+
return { success: true };
|
|
287
|
+
} catch (e: any) {
|
|
288
|
+
return { success: false, error: e.message };
|
|
289
|
+
}
|
|
290
|
+
},
|
|
291
|
+
deleteTableRow: async (tableName: string, recordId: string) => {
|
|
292
|
+
try {
|
|
293
|
+
await this.databaseService.query(`DELETE ${recordId}`);
|
|
294
|
+
return { success: true };
|
|
295
|
+
} catch (e: any) {
|
|
296
|
+
return { success: false, error: e.message };
|
|
297
|
+
}
|
|
298
|
+
},
|
|
299
|
+
runQuery: async (query: string, target: 'local' | 'remote' = 'local') => {
|
|
300
|
+
try {
|
|
301
|
+
this.logger.debug(
|
|
302
|
+
{ query, target, Category: 'spooky-client::DevToolsService::runQuery' },
|
|
303
|
+
'Running query (START)'
|
|
304
|
+
);
|
|
305
|
+
const service = target === 'remote' ? this.remoteDatabaseService : this.databaseService;
|
|
306
|
+
|
|
307
|
+
const startTime = Date.now();
|
|
308
|
+
const result = await service.query<any>(query);
|
|
309
|
+
const queryTime = Date.now() - startTime;
|
|
310
|
+
|
|
311
|
+
this.logger.debug(
|
|
312
|
+
{
|
|
313
|
+
query,
|
|
314
|
+
time: queryTime,
|
|
315
|
+
resultType: typeof result,
|
|
316
|
+
isArray: Array.isArray(result),
|
|
317
|
+
Category: 'spooky-client::DevToolsService::runQuery',
|
|
318
|
+
},
|
|
319
|
+
'Database returned result'
|
|
320
|
+
);
|
|
321
|
+
|
|
322
|
+
// Serialize the result for DevTools
|
|
323
|
+
const serializeStart = Date.now();
|
|
324
|
+
const serialized = this.serializeForDevTools(result);
|
|
325
|
+
const serializeTime = Date.now() - serializeStart;
|
|
326
|
+
|
|
327
|
+
this.logger.debug(
|
|
328
|
+
{
|
|
329
|
+
serializeTime,
|
|
330
|
+
serializedLength: JSON.stringify(serialized).length,
|
|
331
|
+
Category: 'spooky-client::DevToolsService::runQuery',
|
|
332
|
+
},
|
|
333
|
+
'Serialization complete'
|
|
334
|
+
);
|
|
335
|
+
|
|
336
|
+
return {
|
|
337
|
+
success: true,
|
|
338
|
+
data: serialized,
|
|
339
|
+
target,
|
|
340
|
+
};
|
|
341
|
+
} catch (e: any) {
|
|
342
|
+
this.logger.error(
|
|
343
|
+
{ err: e, query, target, Category: 'spooky-client::DevToolsService::runQuery' },
|
|
344
|
+
'Query execution failed'
|
|
345
|
+
);
|
|
346
|
+
// Ensure we always return a string for error
|
|
347
|
+
const errorMessage =
|
|
348
|
+
e instanceof Error ? e.message : typeof e === 'string' ? e : JSON.stringify(e);
|
|
349
|
+
return { success: false, error: errorMessage || 'Unknown occurred' };
|
|
350
|
+
}
|
|
351
|
+
},
|
|
352
|
+
};
|
|
353
|
+
|
|
354
|
+
window.postMessage(
|
|
355
|
+
{
|
|
356
|
+
type: 'SPOOKY_DETECTED',
|
|
357
|
+
source: 'spooky-devtools-page',
|
|
358
|
+
data: { version: this.version, detected: true },
|
|
359
|
+
},
|
|
360
|
+
'*'
|
|
361
|
+
);
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import { RecordId } from 'surrealdb';
|
|
2
|
+
import { SchemaStructure } from '@spooky-sync/query-builder';
|
|
3
|
+
import { RemoteDatabaseService } from '../../services/database/index';
|
|
4
|
+
import { CacheModule, CacheRecord, RecordWithId } from '../cache/index';
|
|
5
|
+
import { RecordVersionDiff } from '../../types';
|
|
6
|
+
import { Logger } from '../../services/logger/index';
|
|
7
|
+
import { SyncEventTypes, createSyncEventSystem } from './events/index';
|
|
8
|
+
import { encodeRecordId } from '../../utils/index';
|
|
9
|
+
import { cleanRecord } from '../../utils/parser';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* SyncEngine handles the core sync operations: fetching remote records,
|
|
13
|
+
* caching them locally, and ingesting into DBSP.
|
|
14
|
+
*
|
|
15
|
+
* This is extracted from SpookySync to separate "how to sync" from "when to sync".
|
|
16
|
+
*/
|
|
17
|
+
export class SyncEngine {
|
|
18
|
+
private logger: Logger;
|
|
19
|
+
public events = createSyncEventSystem();
|
|
20
|
+
|
|
21
|
+
constructor(
|
|
22
|
+
private remote: RemoteDatabaseService,
|
|
23
|
+
private cache: CacheModule,
|
|
24
|
+
private schema: SchemaStructure,
|
|
25
|
+
logger: Logger
|
|
26
|
+
) {
|
|
27
|
+
this.logger = logger.child({ service: 'SpookySync:SyncEngine' });
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Sync missing/updated/removed records between local and remote.
|
|
32
|
+
* Main entry point for sync operations.
|
|
33
|
+
* Uses batch processing to minimize events emitted.
|
|
34
|
+
*/
|
|
35
|
+
async syncRecords(diff: RecordVersionDiff): Promise<void> {
|
|
36
|
+
const { added, updated, removed } = diff;
|
|
37
|
+
|
|
38
|
+
this.logger.debug(
|
|
39
|
+
{
|
|
40
|
+
added,
|
|
41
|
+
updated,
|
|
42
|
+
removed,
|
|
43
|
+
Category: 'spooky-client::SyncEngine::syncRecords',
|
|
44
|
+
},
|
|
45
|
+
'SyncEngine.syncRecords diff'
|
|
46
|
+
);
|
|
47
|
+
|
|
48
|
+
// Handle removed records: verify they don't exist remotely before deleting locally
|
|
49
|
+
if (removed.length > 0) {
|
|
50
|
+
await this.handleRemovedRecords(removed);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Fetch added/updated records from remote
|
|
54
|
+
const toFetch = [...added, ...updated];
|
|
55
|
+
const idsToFetch = toFetch.map((x) => x.id);
|
|
56
|
+
if (idsToFetch.length === 0) {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const [remoteResults] = await this.remote.query<[RecordWithId[]]>(
|
|
61
|
+
"SELECT (SELECT * FROM ONLY <record>$parent.id) AS record, (SELECT version FROM ONLY _spooky_version WHERE record_id = <record>$parent.id)['version'] as spooky_rv FROM $idsToFetch",
|
|
62
|
+
{ idsToFetch }
|
|
63
|
+
);
|
|
64
|
+
console.log('remoteResults>', remoteResults);
|
|
65
|
+
// Prepare batch for cache (which handles both DB and DBSP)
|
|
66
|
+
const cacheBatch: CacheRecord[] = [];
|
|
67
|
+
|
|
68
|
+
for (const { spooky_rv, record } of remoteResults) {
|
|
69
|
+
if (!record?.id) {
|
|
70
|
+
this.logger.warn(
|
|
71
|
+
{
|
|
72
|
+
record,
|
|
73
|
+
idsToFetch,
|
|
74
|
+
Category: 'spooky-client::SyncEngine::syncRecords',
|
|
75
|
+
},
|
|
76
|
+
'Remote record has no id. Skipping record'
|
|
77
|
+
);
|
|
78
|
+
continue;
|
|
79
|
+
}
|
|
80
|
+
const fullId = encodeRecordId(record.id);
|
|
81
|
+
const table = record.id.table.toString();
|
|
82
|
+
const isAdded = added.some((item) => encodeRecordId(item.id) === fullId);
|
|
83
|
+
|
|
84
|
+
const localVersion = this.cache.lookup(fullId);
|
|
85
|
+
if (localVersion && spooky_rv <= localVersion) {
|
|
86
|
+
this.logger.info(
|
|
87
|
+
{
|
|
88
|
+
recordId: fullId,
|
|
89
|
+
version: spooky_rv,
|
|
90
|
+
localVersion,
|
|
91
|
+
Category: 'spooky-client::SyncEngine::syncRecords',
|
|
92
|
+
},
|
|
93
|
+
'Local version is higher than remote version. Skipping record'
|
|
94
|
+
);
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
const tableSchema = this.schema.tables.find((t) => t.name === table);
|
|
98
|
+
const cleanedRecord = tableSchema
|
|
99
|
+
? cleanRecord(tableSchema.columns, record)
|
|
100
|
+
: record;
|
|
101
|
+
|
|
102
|
+
cacheBatch.push({
|
|
103
|
+
table,
|
|
104
|
+
op: isAdded ? 'CREATE' : 'UPDATE',
|
|
105
|
+
record: cleanedRecord as RecordWithId,
|
|
106
|
+
version: spooky_rv,
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Use CacheModule to handle both local DB and DBSP ingestion
|
|
111
|
+
if (cacheBatch.length > 0) {
|
|
112
|
+
await this.cache.saveBatch(cacheBatch);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
this.events.emit(SyncEventTypes.RemoteDataIngested, {
|
|
116
|
+
records: remoteResults,
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Handle records that exist locally but not in remote array.
|
|
122
|
+
*/
|
|
123
|
+
private async handleRemovedRecords(removed: RecordId[]): Promise<void> {
|
|
124
|
+
this.logger.debug(
|
|
125
|
+
{
|
|
126
|
+
removed: removed.map((r) => r.toString()),
|
|
127
|
+
Category: 'spooky-client::SyncEngine::handleRemovedRecords',
|
|
128
|
+
},
|
|
129
|
+
'Checking removed records'
|
|
130
|
+
);
|
|
131
|
+
|
|
132
|
+
let existingRemoteIds = new Set<string>();
|
|
133
|
+
try {
|
|
134
|
+
const [existingRemote] = await this.remote.query<[{ id: RecordId }[]]>('SELECT id FROM $ids', {
|
|
135
|
+
ids: removed,
|
|
136
|
+
});
|
|
137
|
+
existingRemoteIds = new Set(existingRemote.map((r) => encodeRecordId(r.id)));
|
|
138
|
+
} catch {
|
|
139
|
+
// If remote check fails (e.g., SurrealDB parameter serialization issue),
|
|
140
|
+
// proceed with deletion — the caller has already determined these should be removed
|
|
141
|
+
this.logger.debug(
|
|
142
|
+
{ Category: 'spooky-client::SyncEngine::handleRemovedRecords' },
|
|
143
|
+
'Remote existence check failed, proceeding with deletion'
|
|
144
|
+
);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
for (const recordId of removed) {
|
|
148
|
+
const recordIdStr = encodeRecordId(recordId);
|
|
149
|
+
if (!existingRemoteIds.has(recordIdStr)) {
|
|
150
|
+
this.logger.debug(
|
|
151
|
+
{
|
|
152
|
+
recordId: recordIdStr,
|
|
153
|
+
Category: 'spooky-client::SyncEngine::handleRemovedRecords',
|
|
154
|
+
},
|
|
155
|
+
'Deleting confirmed removed record'
|
|
156
|
+
);
|
|
157
|
+
|
|
158
|
+
// Use CacheModule to handle both local DB and DBSP deletion
|
|
159
|
+
await this.cache.delete(recordId.table.name, recordIdStr);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { createEventSystem, EventDefinition, EventSystem } from '../../../events/index';
|
|
2
|
+
import { RecordVersionArray } from '../../../types';
|
|
3
|
+
|
|
4
|
+
export const SyncQueueEventTypes = {
|
|
5
|
+
MutationEnqueued: 'MUTATION_ENQUEUED',
|
|
6
|
+
MutationDequeued: 'MUTATION_DEQUEUED',
|
|
7
|
+
QueryItemEnqueued: 'QUERY_ITEM_ENQUEUED',
|
|
8
|
+
} as const;
|
|
9
|
+
|
|
10
|
+
export type SyncQueueEventTypeMap = {
|
|
11
|
+
[SyncQueueEventTypes.MutationEnqueued]: EventDefinition<
|
|
12
|
+
typeof SyncQueueEventTypes.MutationEnqueued,
|
|
13
|
+
{ queueSize: number }
|
|
14
|
+
>;
|
|
15
|
+
[SyncQueueEventTypes.MutationDequeued]: EventDefinition<
|
|
16
|
+
typeof SyncQueueEventTypes.MutationDequeued,
|
|
17
|
+
{ queueSize: number }
|
|
18
|
+
>;
|
|
19
|
+
[SyncQueueEventTypes.QueryItemEnqueued]: EventDefinition<
|
|
20
|
+
typeof SyncQueueEventTypes.QueryItemEnqueued,
|
|
21
|
+
{ queueSize: number }
|
|
22
|
+
>;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
export type SyncQueueEventSystem = EventSystem<SyncQueueEventTypeMap>;
|
|
26
|
+
|
|
27
|
+
export function createSyncQueueEventSystem(): SyncQueueEventSystem {
|
|
28
|
+
return createEventSystem([
|
|
29
|
+
SyncQueueEventTypes.QueryItemEnqueued,
|
|
30
|
+
SyncQueueEventTypes.MutationEnqueued,
|
|
31
|
+
SyncQueueEventTypes.MutationDequeued,
|
|
32
|
+
]);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export const SyncEventTypes = {
|
|
36
|
+
QueryUpdated: 'SYNC_QUERY_UPDATED',
|
|
37
|
+
RemoteDataIngested: 'SYNC_REMOTE_DATA_INGESTED',
|
|
38
|
+
MutationRolledBack: 'SYNC_MUTATION_ROLLED_BACK',
|
|
39
|
+
} as const;
|
|
40
|
+
|
|
41
|
+
export type SyncEventTypeMap = {
|
|
42
|
+
[SyncEventTypes.QueryUpdated]: EventDefinition<
|
|
43
|
+
typeof SyncEventTypes.QueryUpdated,
|
|
44
|
+
{
|
|
45
|
+
queryId: any; // RecordId<string> but imported
|
|
46
|
+
localHash?: string;
|
|
47
|
+
localArray?: RecordVersionArray;
|
|
48
|
+
remoteHash?: string;
|
|
49
|
+
remoteArray?: RecordVersionArray;
|
|
50
|
+
records: Record<string, any>[];
|
|
51
|
+
}
|
|
52
|
+
>;
|
|
53
|
+
[SyncEventTypes.RemoteDataIngested]: EventDefinition<
|
|
54
|
+
typeof SyncEventTypes.RemoteDataIngested,
|
|
55
|
+
{
|
|
56
|
+
records: Record<string, any>[];
|
|
57
|
+
}
|
|
58
|
+
>;
|
|
59
|
+
[SyncEventTypes.MutationRolledBack]: EventDefinition<
|
|
60
|
+
typeof SyncEventTypes.MutationRolledBack,
|
|
61
|
+
{
|
|
62
|
+
eventType: string;
|
|
63
|
+
recordId: string;
|
|
64
|
+
error: string;
|
|
65
|
+
}
|
|
66
|
+
>;
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
export type SyncEventSystem = EventSystem<SyncEventTypeMap>;
|
|
70
|
+
|
|
71
|
+
export function createSyncEventSystem(): SyncEventSystem {
|
|
72
|
+
return createEventSystem([
|
|
73
|
+
SyncEventTypes.QueryUpdated,
|
|
74
|
+
SyncEventTypes.RemoteDataIngested,
|
|
75
|
+
SyncEventTypes.MutationRolledBack,
|
|
76
|
+
]);
|
|
77
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { LocalDatabaseService } from '../../../services/database/index';
|
|
2
|
+
import {
|
|
3
|
+
createSyncQueueEventSystem,
|
|
4
|
+
SyncQueueEventSystem,
|
|
5
|
+
SyncQueueEventTypes,
|
|
6
|
+
} from '../events/index';
|
|
7
|
+
import { Logger } from '../../../services/logger/index';
|
|
8
|
+
|
|
9
|
+
export type RegisterEvent = {
|
|
10
|
+
type: 'register';
|
|
11
|
+
payload: {
|
|
12
|
+
hash: string;
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
export type SyncEvent = {
|
|
17
|
+
type: 'sync';
|
|
18
|
+
payload: {
|
|
19
|
+
hash: string;
|
|
20
|
+
};
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export type HeartbeatEvent = {
|
|
24
|
+
type: 'heartbeat';
|
|
25
|
+
payload: {
|
|
26
|
+
hash: string;
|
|
27
|
+
};
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export type CleanupEvent = {
|
|
31
|
+
type: 'cleanup';
|
|
32
|
+
payload: {
|
|
33
|
+
hash: string;
|
|
34
|
+
};
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export type DownEvent = RegisterEvent | SyncEvent | HeartbeatEvent | CleanupEvent;
|
|
38
|
+
|
|
39
|
+
export class DownQueue {
|
|
40
|
+
private queue: DownEvent[] = [];
|
|
41
|
+
private _events: SyncQueueEventSystem;
|
|
42
|
+
private logger: Logger;
|
|
43
|
+
|
|
44
|
+
get events(): SyncQueueEventSystem {
|
|
45
|
+
return this._events;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
constructor(
|
|
49
|
+
private local: LocalDatabaseService,
|
|
50
|
+
logger: Logger
|
|
51
|
+
) {
|
|
52
|
+
this._events = createSyncQueueEventSystem();
|
|
53
|
+
this.logger = logger.child({ service: 'DownQueue' });
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
get size(): number {
|
|
57
|
+
return this.queue.length;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
push(event: DownEvent) {
|
|
61
|
+
this.queue.push(event);
|
|
62
|
+
this.emitPushEvent();
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
private emitPushEvent() {
|
|
66
|
+
this._events.addEvent({
|
|
67
|
+
type: SyncQueueEventTypes.QueryItemEnqueued,
|
|
68
|
+
payload: {
|
|
69
|
+
queueSize: this.queue.length,
|
|
70
|
+
},
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async next(fn: (event: DownEvent) => Promise<void>): Promise<void> {
|
|
75
|
+
const event = this.queue.shift();
|
|
76
|
+
if (event) {
|
|
77
|
+
try {
|
|
78
|
+
await fn(event);
|
|
79
|
+
} catch (error) {
|
|
80
|
+
this.logger.error(
|
|
81
|
+
{ error, event, Category: 'spooky-client::DownQueue::next' },
|
|
82
|
+
'Failed to process query'
|
|
83
|
+
);
|
|
84
|
+
this.queue.unshift(event);
|
|
85
|
+
throw error;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|