@spooky-sync/core 0.0.0-canary.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -0
- package/dist/index.d.ts +590 -0
- package/dist/index.js +3082 -0
- package/package.json +46 -0
- package/src/events/events.test.ts +242 -0
- package/src/events/index.ts +261 -0
- package/src/index.ts +3 -0
- package/src/modules/auth/events/index.ts +18 -0
- package/src/modules/auth/index.ts +267 -0
- package/src/modules/cache/index.ts +241 -0
- package/src/modules/cache/types.ts +19 -0
- package/src/modules/data/data.test.ts +58 -0
- package/src/modules/data/index.ts +777 -0
- package/src/modules/devtools/index.ts +364 -0
- package/src/modules/sync/engine.ts +163 -0
- package/src/modules/sync/events/index.ts +77 -0
- package/src/modules/sync/index.ts +3 -0
- package/src/modules/sync/queue/index.ts +2 -0
- package/src/modules/sync/queue/queue-down.ts +89 -0
- package/src/modules/sync/queue/queue-up.ts +223 -0
- package/src/modules/sync/scheduler.ts +84 -0
- package/src/modules/sync/sync.ts +407 -0
- package/src/modules/sync/utils.test.ts +311 -0
- package/src/modules/sync/utils.ts +171 -0
- package/src/services/database/database.ts +108 -0
- package/src/services/database/events/index.ts +32 -0
- package/src/services/database/index.ts +5 -0
- package/src/services/database/local-migrator.ts +203 -0
- package/src/services/database/local.ts +99 -0
- package/src/services/database/remote.ts +110 -0
- package/src/services/logger/index.ts +118 -0
- package/src/services/persistence/localstorage.ts +26 -0
- package/src/services/persistence/surrealdb.ts +62 -0
- package/src/services/stream-processor/index.ts +364 -0
- package/src/services/stream-processor/stream-processor.test.ts +140 -0
- package/src/services/stream-processor/wasm-types.ts +31 -0
- package/src/spooky.ts +346 -0
- package/src/types.ts +237 -0
- package/src/utils/error-classification.ts +28 -0
- package/src/utils/index.ts +172 -0
- package/src/utils/parser.test.ts +125 -0
- package/src/utils/parser.ts +46 -0
- package/src/utils/surql.ts +182 -0
- package/src/utils/utils.test.ts +152 -0
- package/src/utils/withRetry.test.ts +153 -0
- package/tsconfig.json +14 -0
- package/tsdown.config.ts +9 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,3082 @@
|
|
|
1
|
+
import { DateTime, Duration, RecordId, Surreal, Uuid, applyDiagnostics, createRemoteEngines } from "surrealdb";
|
|
2
|
+
import { QueryBuilder, RecordId as RecordId$1 } from "@spooky-sync/query-builder";
|
|
3
|
+
import { createWasmWorkerEngines } from "@surrealdb/wasm";
|
|
4
|
+
import pino from "pino";
|
|
5
|
+
import { BatchLogRecordProcessor, LoggerProvider } from "@opentelemetry/sdk-logs";
|
|
6
|
+
import { OTLPLogExporter } from "@opentelemetry/exporter-logs-otlp-proto";
|
|
7
|
+
import { resourceFromAttributes } from "@opentelemetry/resources";
|
|
8
|
+
import { ATTR_SERVICE_NAME } from "@opentelemetry/semantic-conventions";
|
|
9
|
+
import { createContextKey } from "@opentelemetry/api";
|
|
10
|
+
import init, { SpookyProcessor } from "@spooky-sync/ssp-wasm";
|
|
11
|
+
|
|
12
|
+
//#region src/utils/surql.ts
|
|
13
|
+
const surql = {
|
|
14
|
+
seal(query, options) {
|
|
15
|
+
if (typeof query === "string") return `${query};`;
|
|
16
|
+
const txQuery = query;
|
|
17
|
+
const idx = options?.resultIndex ?? txQuery.statementCount - 1;
|
|
18
|
+
return {
|
|
19
|
+
sql: `${txQuery.sql};`,
|
|
20
|
+
extract(results) {
|
|
21
|
+
return results[idx + 1];
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
},
|
|
25
|
+
tx(queries) {
|
|
26
|
+
return {
|
|
27
|
+
__brand: "TxQuery",
|
|
28
|
+
sql: `BEGIN TRANSACTION;\n${queries.join(";")};\nCOMMIT TRANSACTION`,
|
|
29
|
+
statementCount: queries.length
|
|
30
|
+
};
|
|
31
|
+
},
|
|
32
|
+
selectById(idVar, returnValues) {
|
|
33
|
+
return `SELECT ${returnValues.join(",")} FROM ONLY $${idVar}`;
|
|
34
|
+
},
|
|
35
|
+
selectByFieldsAnd(table, whereVar, returnValues) {
|
|
36
|
+
return `SELECT ${returnValues.map((returnValues) => typeof returnValues === "string" ? returnValues : `${returnValues.field} as ${returnValues.alias}`).join(",")} FROM ${table} WHERE ${whereVar.map((whereVar) => typeof whereVar === "string" ? `${whereVar} = $${whereVar}` : `${whereVar.field} = $${whereVar.variable}`).join(" AND ")}`;
|
|
37
|
+
},
|
|
38
|
+
create(idVar, dataVar) {
|
|
39
|
+
return `CREATE ONLY $${idVar} CONTENT $${dataVar}`;
|
|
40
|
+
},
|
|
41
|
+
createSet(idVar, keyDataVars) {
|
|
42
|
+
return `CREATE ONLY $${idVar} SET ${keyDataVars.map((keyDataVar) => typeof keyDataVar === "string" ? `${keyDataVar} = $${keyDataVar}` : "statement" in keyDataVar ? keyDataVar.statement : `${keyDataVar.key} = $${keyDataVar.variable}`).join(", ")}`;
|
|
43
|
+
},
|
|
44
|
+
upsert(idVar, dataVar) {
|
|
45
|
+
return `UPSERT ONLY $${idVar} REPLACE $${dataVar}`;
|
|
46
|
+
},
|
|
47
|
+
updateMerge(idVar, dataVar) {
|
|
48
|
+
return `UPDATE ONLY $${idVar} MERGE $${dataVar}`;
|
|
49
|
+
},
|
|
50
|
+
updateSet(idVar, keyDataVar) {
|
|
51
|
+
return `UPDATE $${idVar} SET ${keyDataVar.map((keyDataVar) => typeof keyDataVar === "string" ? `${keyDataVar} = $${keyDataVar}` : "statement" in keyDataVar ? keyDataVar.statement : `${keyDataVar.key} = $${keyDataVar.variable}`).join(", ")}`;
|
|
52
|
+
},
|
|
53
|
+
delete(idVar) {
|
|
54
|
+
return `DELETE $${idVar}`;
|
|
55
|
+
},
|
|
56
|
+
let(name, query) {
|
|
57
|
+
return `LET $${name} = (${query})`;
|
|
58
|
+
},
|
|
59
|
+
createMutation(t, mutationIdVar, recordIdVar, dataVar, beforeRecordVar) {
|
|
60
|
+
switch (t) {
|
|
61
|
+
case "create": return `CREATE ONLY $${mutationIdVar} SET mutationType = 'create', recordId = $${recordIdVar}`;
|
|
62
|
+
case "update": {
|
|
63
|
+
let stmt = `CREATE ONLY $${mutationIdVar} SET mutationType = 'update', recordId = $${recordIdVar}, data = $${dataVar}`;
|
|
64
|
+
if (beforeRecordVar) stmt += `, beforeRecord = $${beforeRecordVar}`;
|
|
65
|
+
return stmt;
|
|
66
|
+
}
|
|
67
|
+
case "delete": return `CREATE ONLY $${mutationIdVar} SET mutationType = 'delete', recordId = $${recordIdVar}`;
|
|
68
|
+
}
|
|
69
|
+
},
|
|
70
|
+
returnObject(entries) {
|
|
71
|
+
return `RETURN {${entries.map(({ key, variable }) => `${key}: $${variable}`).join(",")}}`;
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
//#endregion
|
|
76
|
+
//#region src/utils/parser.ts
|
|
77
|
+
function cleanRecord(tableSchema, record) {
|
|
78
|
+
const cleaned = {};
|
|
79
|
+
for (const [key, value] of Object.entries(record)) if (key === "id" || key in tableSchema) cleaned[key] = value;
|
|
80
|
+
return cleaned;
|
|
81
|
+
}
|
|
82
|
+
function parseParams(tableSchema, params) {
|
|
83
|
+
const parsedParams = {};
|
|
84
|
+
for (const [key, value] of Object.entries(params)) {
|
|
85
|
+
const column = tableSchema[key];
|
|
86
|
+
if (column && value !== void 0) parsedParams[key] = parseValue(key, column, value);
|
|
87
|
+
}
|
|
88
|
+
return parsedParams;
|
|
89
|
+
}
|
|
90
|
+
function parseValue(name, column, value) {
|
|
91
|
+
if (column.recordId) {
|
|
92
|
+
if (value instanceof RecordId$1) return value;
|
|
93
|
+
if (typeof value === "string") return parseRecordIdString(value);
|
|
94
|
+
throw new Error(`Invalid value for ${name}: ${value}`);
|
|
95
|
+
}
|
|
96
|
+
if (column.dateTime) {
|
|
97
|
+
if (value instanceof Date) return value;
|
|
98
|
+
if (value instanceof DateTime) return value.toDate();
|
|
99
|
+
if (typeof value === "number" || typeof value === "string") return new Date(value);
|
|
100
|
+
throw new Error(`Invalid value for ${name}: ${value}`);
|
|
101
|
+
}
|
|
102
|
+
return value;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
//#endregion
|
|
106
|
+
//#region src/utils/error-classification.ts
|
|
107
|
+
const NETWORK_ERROR_PATTERNS = [
|
|
108
|
+
"connection",
|
|
109
|
+
"timeout",
|
|
110
|
+
"timed out",
|
|
111
|
+
"websocket",
|
|
112
|
+
"fetch failed",
|
|
113
|
+
"disconnected",
|
|
114
|
+
"socket",
|
|
115
|
+
"network",
|
|
116
|
+
"econnrefused",
|
|
117
|
+
"econnreset",
|
|
118
|
+
"enotfound",
|
|
119
|
+
"epipe",
|
|
120
|
+
"abort"
|
|
121
|
+
];
|
|
122
|
+
function classifySyncError(error) {
|
|
123
|
+
const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
|
|
124
|
+
for (const pattern of NETWORK_ERROR_PATTERNS) if (message.includes(pattern)) return "network";
|
|
125
|
+
return "application";
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
//#endregion
|
|
129
|
+
//#region src/utils/index.ts
|
|
130
|
+
const encodeRecordId = (recordId) => {
|
|
131
|
+
return `${recordId.table.toString()}:${recordId.id}`;
|
|
132
|
+
};
|
|
133
|
+
const extractIdPart = (id) => {
|
|
134
|
+
if (typeof id === "string") return id.split(":").slice(1).join(":");
|
|
135
|
+
const idValue = id.id;
|
|
136
|
+
if (typeof idValue === "string") return idValue;
|
|
137
|
+
return String(idValue);
|
|
138
|
+
};
|
|
139
|
+
const extractTablePart = (id) => {
|
|
140
|
+
if (typeof id === "string") return id.split(":")[0];
|
|
141
|
+
return id.table.toString();
|
|
142
|
+
};
|
|
143
|
+
const parseRecordIdString = (id) => {
|
|
144
|
+
const [table, ...idParts] = id.split(":");
|
|
145
|
+
return new RecordId(table, idParts.join(":"));
|
|
146
|
+
};
|
|
147
|
+
function generateId() {
|
|
148
|
+
return Uuid.v4().toString().replace(/-/g, "");
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Parse duration string or Duration object to milliseconds
|
|
152
|
+
*/
|
|
153
|
+
function parseDuration(duration) {
|
|
154
|
+
if (duration instanceof Duration) {
|
|
155
|
+
const ms = duration.milliseconds || duration._milliseconds;
|
|
156
|
+
if (ms) return Number(ms);
|
|
157
|
+
const str = duration.toString();
|
|
158
|
+
if (str !== "[object Object]") return parseDuration(str);
|
|
159
|
+
return 6e5;
|
|
160
|
+
}
|
|
161
|
+
if (typeof duration === "bigint") return Number(duration);
|
|
162
|
+
if (typeof duration !== "string") return 6e5;
|
|
163
|
+
const match = duration.match(/^(\d+)([smh])$/);
|
|
164
|
+
if (!match) return 6e5;
|
|
165
|
+
const val = parseInt(match[1], 10);
|
|
166
|
+
switch (match[2]) {
|
|
167
|
+
case "s": return val * 1e3;
|
|
168
|
+
case "h": return val * 36e5;
|
|
169
|
+
default: return val * 6e4;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Helper for retrying DB operations with exponential backoff
|
|
174
|
+
*/
|
|
175
|
+
async function withRetry(logger, operation, retries = 3, delayMs = 100) {
|
|
176
|
+
let lastError;
|
|
177
|
+
for (let i = 0; i < retries; i++) try {
|
|
178
|
+
return await operation();
|
|
179
|
+
} catch (err) {
|
|
180
|
+
lastError = err;
|
|
181
|
+
if (err?.message?.includes("Can not open transaction") || err?.message?.includes("transaction") || err?.message?.includes("Database is busy")) {
|
|
182
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
183
|
+
logger.warn({
|
|
184
|
+
attempt: i + 1,
|
|
185
|
+
retries,
|
|
186
|
+
error: msg,
|
|
187
|
+
Category: "spooky-client::utils::withRetry"
|
|
188
|
+
}, "Retrying DB operation");
|
|
189
|
+
await new Promise((res) => setTimeout(res, delayMs * (i + 1)));
|
|
190
|
+
continue;
|
|
191
|
+
}
|
|
192
|
+
throw err;
|
|
193
|
+
}
|
|
194
|
+
throw lastError;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
//#endregion
|
|
198
|
+
//#region src/modules/data/index.ts
|
|
199
|
+
/**
|
|
200
|
+
* DataModule - Unified query and mutation management
|
|
201
|
+
*
|
|
202
|
+
* Merges the functionality of QueryManager and MutationManager.
|
|
203
|
+
* Uses CacheModule for all storage operations.
|
|
204
|
+
*/
|
|
205
|
+
var DataModule = class {
|
|
206
|
+
activeQueries = /* @__PURE__ */ new Map();
|
|
207
|
+
subscriptions = /* @__PURE__ */ new Map();
|
|
208
|
+
mutationCallbacks = /* @__PURE__ */ new Set();
|
|
209
|
+
debounceTimers = /* @__PURE__ */ new Map();
|
|
210
|
+
logger;
|
|
211
|
+
constructor(cache, local, schema, logger, streamDebounceTime = 100) {
|
|
212
|
+
this.cache = cache;
|
|
213
|
+
this.local = local;
|
|
214
|
+
this.schema = schema;
|
|
215
|
+
this.streamDebounceTime = streamDebounceTime;
|
|
216
|
+
this.logger = logger.child({ service: "DataModule" });
|
|
217
|
+
}
|
|
218
|
+
async init() {
|
|
219
|
+
this.logger.info({ Category: "spooky-client::DataModule::init" }, "DataModule initialized");
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Register a query and return its hash for subscriptions
|
|
223
|
+
*/
|
|
224
|
+
async query(tableName, surqlString, params, ttl) {
|
|
225
|
+
const hash = await this.calculateHash({
|
|
226
|
+
surql: surqlString,
|
|
227
|
+
params
|
|
228
|
+
});
|
|
229
|
+
this.logger.debug({
|
|
230
|
+
hash,
|
|
231
|
+
Category: "spooky-client::DataModule::query"
|
|
232
|
+
}, "Query Initialization: started");
|
|
233
|
+
const recordId = new RecordId("_spooky_query", hash);
|
|
234
|
+
if (this.activeQueries.has(hash)) {
|
|
235
|
+
this.logger.debug({
|
|
236
|
+
hash,
|
|
237
|
+
Category: "spooky-client::DataModule::query"
|
|
238
|
+
}, "Query Initialization: exists, returning");
|
|
239
|
+
return hash;
|
|
240
|
+
}
|
|
241
|
+
this.logger.debug({
|
|
242
|
+
hash,
|
|
243
|
+
Category: "spooky-client::DataModule::query"
|
|
244
|
+
}, "Query Initialization: not found, creating new query");
|
|
245
|
+
const queryState = await this.createNewQuery({
|
|
246
|
+
recordId,
|
|
247
|
+
surql: surqlString,
|
|
248
|
+
params,
|
|
249
|
+
ttl,
|
|
250
|
+
tableName
|
|
251
|
+
});
|
|
252
|
+
const { localArray } = this.cache.registerQuery({
|
|
253
|
+
queryHash: hash,
|
|
254
|
+
surql: surqlString,
|
|
255
|
+
params,
|
|
256
|
+
ttl: new Duration(ttl),
|
|
257
|
+
lastActiveAt: /* @__PURE__ */ new Date()
|
|
258
|
+
});
|
|
259
|
+
await withRetry(this.logger, () => this.local.query(surql.seal(surql.updateSet("id", ["localArray"])), {
|
|
260
|
+
id: recordId,
|
|
261
|
+
localArray
|
|
262
|
+
}));
|
|
263
|
+
this.activeQueries.set(hash, queryState);
|
|
264
|
+
this.startTTLHeartbeat(queryState);
|
|
265
|
+
this.logger.debug({
|
|
266
|
+
hash,
|
|
267
|
+
tableName,
|
|
268
|
+
recordCount: queryState.records.length,
|
|
269
|
+
Category: "spooky-client::DataModule::query"
|
|
270
|
+
}, "Query registered");
|
|
271
|
+
return hash;
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Subscribe to query updates
|
|
275
|
+
*/
|
|
276
|
+
subscribe(queryHash, callback, options = {}) {
|
|
277
|
+
if (!this.subscriptions.has(queryHash)) this.subscriptions.set(queryHash, /* @__PURE__ */ new Set());
|
|
278
|
+
this.subscriptions.get(queryHash)?.add(callback);
|
|
279
|
+
if (options.immediate) {
|
|
280
|
+
const query = this.activeQueries.get(queryHash);
|
|
281
|
+
if (query) callback(query.records);
|
|
282
|
+
}
|
|
283
|
+
return () => {
|
|
284
|
+
const subs = this.subscriptions.get(queryHash);
|
|
285
|
+
if (subs) {
|
|
286
|
+
subs.delete(callback);
|
|
287
|
+
if (subs.size === 0) this.subscriptions.delete(queryHash);
|
|
288
|
+
}
|
|
289
|
+
};
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* Subscribe to mutations (for sync)
|
|
293
|
+
*/
|
|
294
|
+
onMutation(callback) {
|
|
295
|
+
this.mutationCallbacks.add(callback);
|
|
296
|
+
return () => {
|
|
297
|
+
this.mutationCallbacks.delete(callback);
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
/**
|
|
301
|
+
* Handle stream updates from DBSP (via CacheModule)
|
|
302
|
+
*/
|
|
303
|
+
async onStreamUpdate(update) {
|
|
304
|
+
const { queryHash, op } = update;
|
|
305
|
+
if (op === "UPDATE") {
|
|
306
|
+
if (this.debounceTimers.has(queryHash)) clearTimeout(this.debounceTimers.get(queryHash));
|
|
307
|
+
const timer = setTimeout(async () => {
|
|
308
|
+
this.debounceTimers.delete(queryHash);
|
|
309
|
+
await this.processStreamUpdate(update);
|
|
310
|
+
}, this.streamDebounceTime);
|
|
311
|
+
this.debounceTimers.set(queryHash, timer);
|
|
312
|
+
} else await this.processStreamUpdate(update);
|
|
313
|
+
}
|
|
314
|
+
async processStreamUpdate(update) {
|
|
315
|
+
const { queryHash, localArray } = update;
|
|
316
|
+
const queryState = this.activeQueries.get(queryHash);
|
|
317
|
+
if (!queryState) {
|
|
318
|
+
this.logger.warn({
|
|
319
|
+
queryHash,
|
|
320
|
+
Category: "spooky-client::DataModule::onStreamUpdate"
|
|
321
|
+
}, "Received update for unknown query. Skipping...");
|
|
322
|
+
return;
|
|
323
|
+
}
|
|
324
|
+
try {
|
|
325
|
+
const [records] = await this.local.query(queryState.config.surql, queryState.config.params);
|
|
326
|
+
queryState.records = records || [];
|
|
327
|
+
queryState.config.localArray = localArray;
|
|
328
|
+
queryState.updateCount++;
|
|
329
|
+
await this.local.query(surql.seal(surql.updateSet("id", ["localArray"])), {
|
|
330
|
+
id: queryState.config.id,
|
|
331
|
+
localArray
|
|
332
|
+
});
|
|
333
|
+
const subscribers = this.subscriptions.get(queryHash);
|
|
334
|
+
if (subscribers) for (const callback of subscribers) callback(queryState.records);
|
|
335
|
+
this.logger.debug({
|
|
336
|
+
queryHash,
|
|
337
|
+
recordCount: records?.length,
|
|
338
|
+
Category: "spooky-client::DataModule::onStreamUpdate"
|
|
339
|
+
}, "Query updated from stream");
|
|
340
|
+
} catch (err) {
|
|
341
|
+
this.logger.error({
|
|
342
|
+
err,
|
|
343
|
+
queryHash,
|
|
344
|
+
Category: "spooky-client::DataModule::onStreamUpdate"
|
|
345
|
+
}, "Failed to fetch records for stream update");
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
/**
|
|
349
|
+
* Get query state (for sync and devtools)
|
|
350
|
+
*/
|
|
351
|
+
getQueryByHash(hash) {
|
|
352
|
+
return this.activeQueries.get(hash);
|
|
353
|
+
}
|
|
354
|
+
/**
|
|
355
|
+
* Get query state by id (for sync and devtools)
|
|
356
|
+
*/
|
|
357
|
+
getQueryById(id) {
|
|
358
|
+
return this.activeQueries.get(extractIdPart(id));
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Get all active queries (for devtools)
|
|
362
|
+
*/
|
|
363
|
+
getActiveQueries() {
|
|
364
|
+
return Array.from(this.activeQueries.values());
|
|
365
|
+
}
|
|
366
|
+
async updateQueryLocalArray(id, localArray) {
|
|
367
|
+
const queryState = this.activeQueries.get(id);
|
|
368
|
+
if (!queryState) {
|
|
369
|
+
this.logger.warn({
|
|
370
|
+
id,
|
|
371
|
+
Category: "spooky-client::DataModule::updateQueryLocalArray"
|
|
372
|
+
}, "Query to update local array not found");
|
|
373
|
+
return;
|
|
374
|
+
}
|
|
375
|
+
queryState.config.localArray = localArray;
|
|
376
|
+
await this.local.query(surql.seal(surql.updateSet("id", ["localArray"])), {
|
|
377
|
+
id: queryState.config.id,
|
|
378
|
+
localArray
|
|
379
|
+
});
|
|
380
|
+
}
|
|
381
|
+
async updateQueryRemoteArray(hash, remoteArray) {
|
|
382
|
+
const queryState = this.getQueryByHash(hash);
|
|
383
|
+
if (!queryState) {
|
|
384
|
+
this.logger.warn({
|
|
385
|
+
hash,
|
|
386
|
+
Category: "spooky-client::DataModule::updateQueryRemoteArray"
|
|
387
|
+
}, "Query to update remote array not found");
|
|
388
|
+
return;
|
|
389
|
+
}
|
|
390
|
+
queryState.config.remoteArray = remoteArray;
|
|
391
|
+
await this.local.query(surql.seal(surql.updateSet("id", ["remoteArray"])), {
|
|
392
|
+
id: queryState.config.id,
|
|
393
|
+
remoteArray
|
|
394
|
+
});
|
|
395
|
+
}
|
|
396
|
+
async run(backend, path, data, options) {
|
|
397
|
+
const route = this.schema.backends?.[backend]?.routes?.[path];
|
|
398
|
+
if (!route) throw new Error(`Route ${backend}.${path} not found`);
|
|
399
|
+
const tableName = this.schema.backends?.[backend]?.outboxTable;
|
|
400
|
+
if (!tableName) throw new Error(`Outbox table for backend ${backend} not found`);
|
|
401
|
+
const payload = {};
|
|
402
|
+
for (const argName of Object.keys(route.args)) {
|
|
403
|
+
const arg = route.args[argName];
|
|
404
|
+
if (data[argName] === void 0 && arg.optional === false) throw new Error(`Missing required argument ${argName}`);
|
|
405
|
+
payload[argName] = data[argName];
|
|
406
|
+
}
|
|
407
|
+
const record = {
|
|
408
|
+
path,
|
|
409
|
+
payload: JSON.stringify(payload),
|
|
410
|
+
max_retries: options?.max_retries ?? 3,
|
|
411
|
+
retry_strategy: options?.retry_strategy ?? "linear"
|
|
412
|
+
};
|
|
413
|
+
if (options?.assignedTo) record.assigned_to = options.assignedTo;
|
|
414
|
+
const recordId = `${tableName}:${generateId()}`;
|
|
415
|
+
await this.create(recordId, record);
|
|
416
|
+
}
|
|
417
|
+
/**
|
|
418
|
+
* Create a new record
|
|
419
|
+
*/
|
|
420
|
+
async create(id, data) {
|
|
421
|
+
const tableName = extractTablePart(id);
|
|
422
|
+
const tableSchema = this.schema.tables.find((t) => t.name === tableName);
|
|
423
|
+
if (!tableSchema) throw new Error(`Table ${tableName} not found`);
|
|
424
|
+
const rid = parseRecordIdString(id);
|
|
425
|
+
const params = parseParams(tableSchema.columns, data);
|
|
426
|
+
const mutationId = parseRecordIdString(`_spooky_pending_mutations:${Date.now()}`);
|
|
427
|
+
const dataKeys = Object.keys(params).map((key) => ({
|
|
428
|
+
key,
|
|
429
|
+
variable: `data_${key}`
|
|
430
|
+
}));
|
|
431
|
+
const prefixedParams = Object.fromEntries(dataKeys.map(({ key, variable }) => [variable, params[key]]));
|
|
432
|
+
const query = surql.seal(surql.tx([surql.createSet("id", dataKeys), surql.createMutation("create", "mid", "id", "data")]), { resultIndex: 0 });
|
|
433
|
+
const target = await withRetry(this.logger, () => this.local.execute(query, {
|
|
434
|
+
id: rid,
|
|
435
|
+
mid: mutationId,
|
|
436
|
+
...prefixedParams
|
|
437
|
+
}));
|
|
438
|
+
const parsedRecord = parseParams(tableSchema.columns, target);
|
|
439
|
+
await this.cache.save({
|
|
440
|
+
table: tableName,
|
|
441
|
+
op: "CREATE",
|
|
442
|
+
record: parsedRecord,
|
|
443
|
+
version: 1
|
|
444
|
+
}, true);
|
|
445
|
+
const mutationEvent = {
|
|
446
|
+
type: "create",
|
|
447
|
+
mutation_id: mutationId,
|
|
448
|
+
record_id: rid,
|
|
449
|
+
data: params,
|
|
450
|
+
record: target,
|
|
451
|
+
tableName
|
|
452
|
+
};
|
|
453
|
+
for (const callback of this.mutationCallbacks) callback([mutationEvent]);
|
|
454
|
+
this.logger.debug({
|
|
455
|
+
id,
|
|
456
|
+
Category: "spooky-client::DataModule::create"
|
|
457
|
+
}, "Record created");
|
|
458
|
+
return target;
|
|
459
|
+
}
|
|
460
|
+
/**
|
|
461
|
+
* Update an existing record
|
|
462
|
+
*/
|
|
463
|
+
async update(table, id, data, options) {
|
|
464
|
+
const tableName = extractTablePart(id);
|
|
465
|
+
const tableSchema = this.schema.tables.find((t) => t.name === tableName);
|
|
466
|
+
if (!tableSchema) throw new Error(`Table ${tableName} not found`);
|
|
467
|
+
const rid = parseRecordIdString(id);
|
|
468
|
+
const params = parseParams(tableSchema.columns, data);
|
|
469
|
+
const mutationId = parseRecordIdString(`_spooky_pending_mutations:${Date.now()}`);
|
|
470
|
+
const [beforeRecord] = await withRetry(this.logger, () => this.local.query("SELECT * FROM ONLY $id", { id: rid }));
|
|
471
|
+
const query = surql.seal(surql.tx([
|
|
472
|
+
surql.updateSet("id", [{ statement: "spooky_rv += 1" }]),
|
|
473
|
+
surql.let("updated", surql.updateMerge("id", "data")),
|
|
474
|
+
surql.createMutation("update", "mid", "id", "data"),
|
|
475
|
+
surql.returnObject([{
|
|
476
|
+
key: "target",
|
|
477
|
+
variable: "updated"
|
|
478
|
+
}])
|
|
479
|
+
]));
|
|
480
|
+
const { target } = await withRetry(this.logger, () => this.local.execute(query, {
|
|
481
|
+
id: rid,
|
|
482
|
+
mid: mutationId,
|
|
483
|
+
data: params
|
|
484
|
+
}));
|
|
485
|
+
this.replaceRecordInQueries(target);
|
|
486
|
+
const parsedRecord = parseParams(tableSchema.columns, target);
|
|
487
|
+
await this.cache.save({
|
|
488
|
+
table,
|
|
489
|
+
op: "UPDATE",
|
|
490
|
+
record: parsedRecord,
|
|
491
|
+
version: target.spooky_rv
|
|
492
|
+
}, true);
|
|
493
|
+
const pushEventOptions = parseUpdateOptions(id, data, options);
|
|
494
|
+
const mutationEvent = {
|
|
495
|
+
type: "update",
|
|
496
|
+
mutation_id: mutationId,
|
|
497
|
+
record_id: rid,
|
|
498
|
+
data: params,
|
|
499
|
+
record: target,
|
|
500
|
+
beforeRecord: beforeRecord || void 0,
|
|
501
|
+
options: pushEventOptions
|
|
502
|
+
};
|
|
503
|
+
for (const callback of this.mutationCallbacks) callback([mutationEvent]);
|
|
504
|
+
this.logger.debug({
|
|
505
|
+
id,
|
|
506
|
+
Category: "spooky-client::DataModule::update"
|
|
507
|
+
}, "Record updated");
|
|
508
|
+
return target;
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Delete a record
|
|
512
|
+
*/
|
|
513
|
+
async delete(table, id) {
|
|
514
|
+
const tableName = extractTablePart(id);
|
|
515
|
+
if (!this.schema.tables.find((t) => t.name === tableName)) throw new Error(`Table ${tableName} not found`);
|
|
516
|
+
const rid = parseRecordIdString(id);
|
|
517
|
+
const mutationId = parseRecordIdString(`_spooky_pending_mutations:${Date.now()}`);
|
|
518
|
+
const query = surql.seal(surql.tx([surql.delete("id"), surql.createMutation("delete", "mid", "id")]));
|
|
519
|
+
await withRetry(this.logger, () => this.local.execute(query, {
|
|
520
|
+
id: rid,
|
|
521
|
+
mid: mutationId
|
|
522
|
+
}));
|
|
523
|
+
await this.cache.delete(table, id, true);
|
|
524
|
+
const mutationEvent = {
|
|
525
|
+
type: "delete",
|
|
526
|
+
mutation_id: mutationId,
|
|
527
|
+
record_id: rid
|
|
528
|
+
};
|
|
529
|
+
for (const callback of this.mutationCallbacks) callback([mutationEvent]);
|
|
530
|
+
this.logger.debug({
|
|
531
|
+
id,
|
|
532
|
+
Category: "spooky-client::DataModule::delete"
|
|
533
|
+
}, "Record deleted");
|
|
534
|
+
}
|
|
535
|
+
/**
|
|
536
|
+
* Rollback a failed optimistic create by deleting the record locally
|
|
537
|
+
*/
|
|
538
|
+
async rollbackCreate(recordId, tableName) {
|
|
539
|
+
const id = encodeRecordId(recordId);
|
|
540
|
+
try {
|
|
541
|
+
await withRetry(this.logger, () => this.local.query("DELETE $id", { id: recordId }));
|
|
542
|
+
await this.cache.delete(tableName, id, true);
|
|
543
|
+
this.removeRecordFromQueries(recordId);
|
|
544
|
+
this.logger.info({
|
|
545
|
+
id,
|
|
546
|
+
tableName,
|
|
547
|
+
Category: "spooky-client::DataModule::rollbackCreate"
|
|
548
|
+
}, "Rolled back optimistic create");
|
|
549
|
+
} catch (err) {
|
|
550
|
+
this.logger.error({
|
|
551
|
+
err,
|
|
552
|
+
id,
|
|
553
|
+
tableName,
|
|
554
|
+
Category: "spooky-client::DataModule::rollbackCreate"
|
|
555
|
+
}, "Failed to rollback create");
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
/**
|
|
559
|
+
* Rollback a failed optimistic update by restoring the previous record state
|
|
560
|
+
*/
|
|
561
|
+
async rollbackUpdate(recordId, tableName, beforeRecord) {
|
|
562
|
+
const id = encodeRecordId(recordId);
|
|
563
|
+
try {
|
|
564
|
+
const { id: _recordId, ...content } = beforeRecord;
|
|
565
|
+
await withRetry(this.logger, () => this.local.query(surql.seal(surql.upsert("id", "content")), {
|
|
566
|
+
id: recordId,
|
|
567
|
+
content
|
|
568
|
+
}));
|
|
569
|
+
const tableSchema = this.schema.tables.find((t) => t.name === tableName);
|
|
570
|
+
const parsedRecord = tableSchema ? parseParams(tableSchema.columns, beforeRecord) : beforeRecord;
|
|
571
|
+
await this.cache.save({
|
|
572
|
+
table: tableName,
|
|
573
|
+
op: "UPDATE",
|
|
574
|
+
record: parsedRecord,
|
|
575
|
+
version: beforeRecord.spooky_rv || 1
|
|
576
|
+
}, true);
|
|
577
|
+
await this.replaceRecordInQueries(beforeRecord);
|
|
578
|
+
this.logger.info({
|
|
579
|
+
id,
|
|
580
|
+
tableName,
|
|
581
|
+
Category: "spooky-client::DataModule::rollbackUpdate"
|
|
582
|
+
}, "Rolled back optimistic update");
|
|
583
|
+
} catch (err) {
|
|
584
|
+
this.logger.error({
|
|
585
|
+
err,
|
|
586
|
+
id,
|
|
587
|
+
tableName,
|
|
588
|
+
Category: "spooky-client::DataModule::rollbackUpdate"
|
|
589
|
+
}, "Failed to rollback update");
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
/**
|
|
593
|
+
* Remove a record from all active query states and notify subscribers
|
|
594
|
+
*/
|
|
595
|
+
removeRecordFromQueries(recordId) {
|
|
596
|
+
const encodedId = encodeRecordId(recordId);
|
|
597
|
+
for (const [queryHash, queryState] of this.activeQueries.entries()) {
|
|
598
|
+
const index = queryState.records.findIndex((r) => {
|
|
599
|
+
return (r.id instanceof RecordId ? encodeRecordId(r.id) : String(r.id)) === encodedId;
|
|
600
|
+
});
|
|
601
|
+
if (index !== -1) {
|
|
602
|
+
queryState.records.splice(index, 1);
|
|
603
|
+
const subscribers = this.subscriptions.get(queryHash);
|
|
604
|
+
if (subscribers) for (const callback of subscribers) callback(queryState.records);
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
async createNewQuery({ recordId, surql: surqlString, params, ttl, tableName }) {
|
|
609
|
+
const tableSchema = this.schema.tables.find((t) => t.name === tableName);
|
|
610
|
+
if (!tableSchema) throw new Error(`Table ${tableName} not found`);
|
|
611
|
+
let [configRecord] = await withRetry(this.logger, () => this.local.query("SELECT * FROM ONLY $id", { id: recordId }));
|
|
612
|
+
if (!configRecord) {
|
|
613
|
+
const [createdRecord] = await withRetry(this.logger, () => this.local.query(surql.seal(surql.create("id", "data")), {
|
|
614
|
+
id: recordId,
|
|
615
|
+
data: {
|
|
616
|
+
surql: surqlString,
|
|
617
|
+
params,
|
|
618
|
+
localArray: [],
|
|
619
|
+
remoteArray: [],
|
|
620
|
+
lastActiveAt: /* @__PURE__ */ new Date(),
|
|
621
|
+
ttl,
|
|
622
|
+
tableName
|
|
623
|
+
}
|
|
624
|
+
}));
|
|
625
|
+
configRecord = createdRecord;
|
|
626
|
+
}
|
|
627
|
+
const config = {
|
|
628
|
+
...configRecord,
|
|
629
|
+
id: recordId,
|
|
630
|
+
params: parseParams(tableSchema.columns, configRecord.params)
|
|
631
|
+
};
|
|
632
|
+
let records = [];
|
|
633
|
+
try {
|
|
634
|
+
const [result] = await this.local.query(surqlString, params);
|
|
635
|
+
records = result || [];
|
|
636
|
+
} catch (err) {
|
|
637
|
+
this.logger.warn({
|
|
638
|
+
err,
|
|
639
|
+
Category: "spooky-client::DataModule::createNewQuery"
|
|
640
|
+
}, "Failed to load initial cached records");
|
|
641
|
+
}
|
|
642
|
+
return {
|
|
643
|
+
config,
|
|
644
|
+
records,
|
|
645
|
+
ttlTimer: null,
|
|
646
|
+
ttlDurationMs: parseDuration(ttl),
|
|
647
|
+
updateCount: 0
|
|
648
|
+
};
|
|
649
|
+
}
|
|
650
|
+
async calculateHash(data) {
|
|
651
|
+
const content = JSON.stringify(data);
|
|
652
|
+
const msgBuffer = new TextEncoder().encode(content);
|
|
653
|
+
const hashBuffer = await crypto.subtle.digest("SHA-256", msgBuffer);
|
|
654
|
+
return Array.from(new Uint8Array(hashBuffer)).map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
655
|
+
}
|
|
656
|
+
startTTLHeartbeat(queryState) {
|
|
657
|
+
if (queryState.ttlTimer) return;
|
|
658
|
+
const heartbeatTime = Math.floor(queryState.ttlDurationMs * .9);
|
|
659
|
+
queryState.ttlTimer = setTimeout(() => {
|
|
660
|
+
this.logger.debug({
|
|
661
|
+
id: encodeRecordId(queryState.config.id),
|
|
662
|
+
Category: "spooky-client::DataModule::startTTLHeartbeat"
|
|
663
|
+
}, "TTL heartbeat");
|
|
664
|
+
this.startTTLHeartbeat(queryState);
|
|
665
|
+
}, heartbeatTime);
|
|
666
|
+
}
|
|
667
|
+
stopTTLHeartbeat(queryState) {
|
|
668
|
+
if (queryState.ttlTimer) {
|
|
669
|
+
clearTimeout(queryState.ttlTimer);
|
|
670
|
+
queryState.ttlTimer = null;
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
async replaceRecordInQueries(record) {
|
|
674
|
+
for (const queryState of this.activeQueries.values()) this.replaceRecordInQuery(queryState, record);
|
|
675
|
+
}
|
|
676
|
+
replaceRecordInQuery(queryState, record) {
|
|
677
|
+
const index = queryState.records.findIndex((r) => r.id === record.id);
|
|
678
|
+
if (index !== -1) queryState.records[index] = record;
|
|
679
|
+
}
|
|
680
|
+
};
|
|
681
|
+
/**
|
|
682
|
+
* Parse update options to generate push event options
|
|
683
|
+
*/
|
|
684
|
+
function parseUpdateOptions(id, data, options) {
|
|
685
|
+
let pushEventOptions = {};
|
|
686
|
+
if (options?.debounced) pushEventOptions = { debounced: {
|
|
687
|
+
delay: options.debounced !== true ? options.debounced?.delay ?? 200 : 200,
|
|
688
|
+
key: (options.debounced !== true ? options.debounced?.key ?? id : id) === "recordId_x_fields" ? `${id}::${Object.keys(data).sort().join("#")}` : id
|
|
689
|
+
} };
|
|
690
|
+
return pushEventOptions;
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
//#endregion
|
|
694
|
+
//#region src/services/database/database.ts
|
|
695
|
+
var AbstractDatabaseService = class {
|
|
696
|
+
client;
|
|
697
|
+
logger;
|
|
698
|
+
events;
|
|
699
|
+
constructor(client, logger, events) {
|
|
700
|
+
this.client = client;
|
|
701
|
+
this.logger = logger.child({ service: "Database" });
|
|
702
|
+
this.events = events;
|
|
703
|
+
}
|
|
704
|
+
getClient() {
|
|
705
|
+
return this.client;
|
|
706
|
+
}
|
|
707
|
+
getEvents() {
|
|
708
|
+
return this.events;
|
|
709
|
+
}
|
|
710
|
+
tx() {
|
|
711
|
+
return this.client.beginTransaction();
|
|
712
|
+
}
|
|
713
|
+
queryQueue = Promise.resolve();
|
|
714
|
+
/**
|
|
715
|
+
* Execute a query with serialized execution to prevent WASM transaction issues.
|
|
716
|
+
*/
|
|
717
|
+
async query(query, vars) {
|
|
718
|
+
return new Promise((resolve, reject) => {
|
|
719
|
+
this.queryQueue = this.queryQueue.then(async () => {
|
|
720
|
+
const startTime = performance.now();
|
|
721
|
+
try {
|
|
722
|
+
this.logger.debug({
|
|
723
|
+
query,
|
|
724
|
+
vars,
|
|
725
|
+
Category: "spooky-client::Database::query"
|
|
726
|
+
}, "Executing query");
|
|
727
|
+
const result = await this.client.query(query, vars);
|
|
728
|
+
const duration = performance.now() - startTime;
|
|
729
|
+
this.events.emit(this.eventType, {
|
|
730
|
+
query,
|
|
731
|
+
vars,
|
|
732
|
+
duration,
|
|
733
|
+
success: true,
|
|
734
|
+
timestamp: Date.now()
|
|
735
|
+
});
|
|
736
|
+
resolve(result);
|
|
737
|
+
this.logger.trace({
|
|
738
|
+
query,
|
|
739
|
+
result,
|
|
740
|
+
Category: "spooky-client::Database::query"
|
|
741
|
+
}, "Query executed successfully");
|
|
742
|
+
} catch (err) {
|
|
743
|
+
const duration = performance.now() - startTime;
|
|
744
|
+
this.events.emit(this.eventType, {
|
|
745
|
+
query,
|
|
746
|
+
vars,
|
|
747
|
+
duration,
|
|
748
|
+
success: false,
|
|
749
|
+
error: err instanceof Error ? err.message : String(err),
|
|
750
|
+
timestamp: Date.now()
|
|
751
|
+
});
|
|
752
|
+
this.logger.error({
|
|
753
|
+
query,
|
|
754
|
+
vars,
|
|
755
|
+
err,
|
|
756
|
+
Category: "spooky-client::Database::query"
|
|
757
|
+
}, "Query execution failed");
|
|
758
|
+
reject(err);
|
|
759
|
+
}
|
|
760
|
+
}).catch(() => {});
|
|
761
|
+
});
|
|
762
|
+
}
|
|
763
|
+
async execute(query, vars) {
|
|
764
|
+
const raw = await this.query(query.sql, vars);
|
|
765
|
+
return query.extract(raw);
|
|
766
|
+
}
|
|
767
|
+
async close() {
|
|
768
|
+
this.logger.info({ Category: "spooky-client::Database::close" }, "Closing database connection");
|
|
769
|
+
await this.client.close();
|
|
770
|
+
}
|
|
771
|
+
};
|
|
772
|
+
|
|
773
|
+
//#endregion
|
|
774
|
+
//#region src/events/index.ts
|
|
775
|
+
/**
|
|
776
|
+
* A type-safe event system that handles subscription, emission (including debouncing), and buffering of events.
|
|
777
|
+
* @template E The EventTypeMap defining all supported events.
|
|
778
|
+
*/
|
|
779
|
+
var EventSystem = class {
|
|
780
|
+
subscriberId = 0;
|
|
781
|
+
isProcessing = false;
|
|
782
|
+
buffer;
|
|
783
|
+
subscribers;
|
|
784
|
+
subscribersTypeMap;
|
|
785
|
+
lastEvents;
|
|
786
|
+
debouncedEvents;
|
|
787
|
+
constructor(_eventTypes) {
|
|
788
|
+
this._eventTypes = _eventTypes;
|
|
789
|
+
this.buffer = [];
|
|
790
|
+
this.subscribers = this._eventTypes.reduce((acc, key) => {
|
|
791
|
+
return Object.assign(acc, { [key]: /* @__PURE__ */ new Map() });
|
|
792
|
+
}, {});
|
|
793
|
+
this.lastEvents = {};
|
|
794
|
+
this.subscribersTypeMap = /* @__PURE__ */ new Map();
|
|
795
|
+
this.debouncedEvents = /* @__PURE__ */ new Map();
|
|
796
|
+
}
|
|
797
|
+
get eventTypes() {
|
|
798
|
+
return this._eventTypes;
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* Subscribes a handler to a specific event type.
|
|
802
|
+
* @param type The event type to subscribe to.
|
|
803
|
+
* @param handler The function to call when the event occurs.
|
|
804
|
+
* @param options Subscription options (once, immediately).
|
|
805
|
+
* @returns A subscription ID that can be used to unsubscribe.
|
|
806
|
+
*/
|
|
807
|
+
subscribe(type, handler, options) {
|
|
808
|
+
const id = this.subscriberId++;
|
|
809
|
+
this.subscribers[type].set(id, {
|
|
810
|
+
id,
|
|
811
|
+
handler,
|
|
812
|
+
once: options?.once ?? false
|
|
813
|
+
});
|
|
814
|
+
this.subscribersTypeMap.set(id, type);
|
|
815
|
+
if (options?.immediately) {
|
|
816
|
+
const lastEvent = this.lastEvents[type];
|
|
817
|
+
if (lastEvent) handler(lastEvent);
|
|
818
|
+
}
|
|
819
|
+
return id;
|
|
820
|
+
}
|
|
821
|
+
/**
|
|
822
|
+
* Subscribes a handler to multiple event types.
|
|
823
|
+
* @param types An array of event types to subscribe to.
|
|
824
|
+
* @param handler The function to call when any of the events occur.
|
|
825
|
+
* @param options Subscription options.
|
|
826
|
+
* @returns An array of subscription IDs.
|
|
827
|
+
*/
|
|
828
|
+
subscribeMany(types, handler, options) {
|
|
829
|
+
return types.map((type) => this.subscribe(type, handler, options));
|
|
830
|
+
}
|
|
831
|
+
/**
|
|
832
|
+
* Unsubscribes a specific subscription by ID.
|
|
833
|
+
* @param id The subscription ID returned by subscribe().
|
|
834
|
+
* @returns True if the subscription was found and removed, false otherwise.
|
|
835
|
+
*/
|
|
836
|
+
unsubscribe(id) {
|
|
837
|
+
const type = this.subscribersTypeMap.get(id);
|
|
838
|
+
if (type) {
|
|
839
|
+
this.subscribers[type].delete(id);
|
|
840
|
+
this.subscribersTypeMap.delete(id);
|
|
841
|
+
return true;
|
|
842
|
+
}
|
|
843
|
+
return false;
|
|
844
|
+
}
|
|
845
|
+
/**
|
|
846
|
+
* Emits an event with the given type and payload.
|
|
847
|
+
* @param type The type of event to emit.
|
|
848
|
+
* @param payload The data associated with the event.
|
|
849
|
+
*/
|
|
850
|
+
emit(type, payload) {
|
|
851
|
+
const event = {
|
|
852
|
+
type,
|
|
853
|
+
payload
|
|
854
|
+
};
|
|
855
|
+
this.addEvent(event);
|
|
856
|
+
}
|
|
857
|
+
/**
|
|
858
|
+
* Adds a fully constructed event object to the system.
|
|
859
|
+
* Similar to emit, but takes the full event object directly.
|
|
860
|
+
* Supports debouncing if options are provided.
|
|
861
|
+
* @param event The event object.
|
|
862
|
+
* @param options Options for the event push (e.g., debouncing).
|
|
863
|
+
*/
|
|
864
|
+
addEvent(event, options) {
|
|
865
|
+
if (options?.debounced) {
|
|
866
|
+
this.handleDebouncedEvent(event, options.debounced.key, options.debounced.delay);
|
|
867
|
+
return;
|
|
868
|
+
}
|
|
869
|
+
this.buffer.push(event);
|
|
870
|
+
this.scheduleProcessing();
|
|
871
|
+
}
|
|
872
|
+
handleDebouncedEvent(event, key, delay) {
|
|
873
|
+
if (this.debouncedEvents.has(key)) clearTimeout(this.debouncedEvents.get(key)?.timer);
|
|
874
|
+
const timer = setTimeout(() => {
|
|
875
|
+
this.debouncedEvents.delete(key);
|
|
876
|
+
this.buffer.push(event);
|
|
877
|
+
this.scheduleProcessing();
|
|
878
|
+
}, delay);
|
|
879
|
+
this.debouncedEvents.set(key, {
|
|
880
|
+
timer,
|
|
881
|
+
resolve: () => {}
|
|
882
|
+
});
|
|
883
|
+
}
|
|
884
|
+
scheduleProcessing() {
|
|
885
|
+
if (!this.isProcessing) queueMicrotask(() => this.processEvents());
|
|
886
|
+
}
|
|
887
|
+
async processEvents() {
|
|
888
|
+
if (this.isProcessing) return;
|
|
889
|
+
this.isProcessing = true;
|
|
890
|
+
try {
|
|
891
|
+
while (this.dequeue());
|
|
892
|
+
} finally {
|
|
893
|
+
this.isProcessing = false;
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
dequeue() {
|
|
897
|
+
const event = this.buffer.shift();
|
|
898
|
+
if (!event) return false;
|
|
899
|
+
this.setLastEvent(event.type, event);
|
|
900
|
+
this.broadcastEvent(event.type, event);
|
|
901
|
+
return true;
|
|
902
|
+
}
|
|
903
|
+
setLastEvent(type, event) {
|
|
904
|
+
this.lastEvents[type] = event;
|
|
905
|
+
}
|
|
906
|
+
broadcastEvent(type, event) {
|
|
907
|
+
const subscribers = this.subscribers[type].values();
|
|
908
|
+
for (const subscriber of subscribers) {
|
|
909
|
+
subscriber.handler(event);
|
|
910
|
+
if (subscriber.once) this.unsubscribe(subscriber.id);
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
};
|
|
914
|
+
function createEventSystem(eventTypes) {
|
|
915
|
+
return new EventSystem(eventTypes);
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
//#endregion
|
|
919
|
+
//#region src/services/database/events/index.ts
|
|
920
|
+
const DatabaseEventTypes = {
|
|
921
|
+
LocalQuery: "DATABASE_LOCAL_QUERY",
|
|
922
|
+
RemoteQuery: "DATABASE_REMOTE_QUERY"
|
|
923
|
+
};
|
|
924
|
+
function createDatabaseEventSystem() {
|
|
925
|
+
return createEventSystem([DatabaseEventTypes.LocalQuery, DatabaseEventTypes.RemoteQuery]);
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
//#endregion
|
|
929
|
+
//#region src/services/database/local.ts
|
|
930
|
+
var LocalDatabaseService = class extends AbstractDatabaseService {
|
|
931
|
+
config;
|
|
932
|
+
eventType = DatabaseEventTypes.LocalQuery;
|
|
933
|
+
constructor(config, logger) {
|
|
934
|
+
const events = createDatabaseEventSystem();
|
|
935
|
+
super(new Surreal({
|
|
936
|
+
codecOptions: { valueDecodeVisitor(value) {
|
|
937
|
+
if (value instanceof RecordId) return encodeRecordId(value);
|
|
938
|
+
if (value instanceof DateTime) return value.toDate();
|
|
939
|
+
return value;
|
|
940
|
+
} },
|
|
941
|
+
engines: applyDiagnostics(createWasmWorkerEngines(), ({ key, type, phase, ...other }) => {
|
|
942
|
+
if (phase === "progress" || phase === "after") logger.trace({
|
|
943
|
+
...other,
|
|
944
|
+
key,
|
|
945
|
+
type,
|
|
946
|
+
phase,
|
|
947
|
+
service: "surrealdb:local",
|
|
948
|
+
Category: "spooky-client::LocalDatabaseService::diagnostics"
|
|
949
|
+
}, `Local SurrealDB diagnostics captured ${type}:${phase}`);
|
|
950
|
+
})
|
|
951
|
+
}), logger, events);
|
|
952
|
+
this.config = config;
|
|
953
|
+
}
|
|
954
|
+
getConfig() {
|
|
955
|
+
return this.config;
|
|
956
|
+
}
|
|
957
|
+
async connect() {
|
|
958
|
+
const { namespace, database } = this.getConfig();
|
|
959
|
+
this.logger.info({
|
|
960
|
+
namespace,
|
|
961
|
+
database,
|
|
962
|
+
Category: "spooky-client::LocalDatabaseService::connect"
|
|
963
|
+
}, "Connecting to local database");
|
|
964
|
+
try {
|
|
965
|
+
const storeUrl = (this.getConfig().store ?? "memory") === "memory" ? "mem://" : "indxdb://spooky";
|
|
966
|
+
this.logger.debug({
|
|
967
|
+
storeUrl,
|
|
968
|
+
Category: "spooky-client::LocalDatabaseService::connect"
|
|
969
|
+
}, "[LocalDatabaseService] Calling client.connect");
|
|
970
|
+
await this.client.connect(storeUrl, {});
|
|
971
|
+
this.logger.debug({
|
|
972
|
+
namespace,
|
|
973
|
+
database,
|
|
974
|
+
Category: "spooky-client::LocalDatabaseService::connect"
|
|
975
|
+
}, "[LocalDatabaseService] client.connect returned. Calling client.use");
|
|
976
|
+
await this.client.use({
|
|
977
|
+
namespace,
|
|
978
|
+
database
|
|
979
|
+
});
|
|
980
|
+
this.logger.debug({ Category: "spooky-client::LocalDatabaseService::connect" }, "[LocalDatabaseService] client.use returned");
|
|
981
|
+
this.logger.info({ Category: "spooky-client::LocalDatabaseService::connect" }, "Connected to local database");
|
|
982
|
+
} catch (err) {
|
|
983
|
+
this.logger.error({
|
|
984
|
+
err,
|
|
985
|
+
Category: "spooky-client::LocalDatabaseService::connect"
|
|
986
|
+
}, "Failed to connect to local database");
|
|
987
|
+
throw err;
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
};
|
|
991
|
+
|
|
992
|
+
//#endregion
|
|
993
|
+
//#region src/services/database/remote.ts
|
|
994
|
+
var RemoteDatabaseService = class extends AbstractDatabaseService {
|
|
995
|
+
config;
|
|
996
|
+
eventType = DatabaseEventTypes.RemoteQuery;
|
|
997
|
+
constructor(config, logger) {
|
|
998
|
+
const events = createDatabaseEventSystem();
|
|
999
|
+
super(new Surreal({ engines: applyDiagnostics(createRemoteEngines(), ({ key, type, phase, ...other }) => {
|
|
1000
|
+
if (phase === "progress" || phase === "after") logger.trace({
|
|
1001
|
+
...other,
|
|
1002
|
+
key,
|
|
1003
|
+
type,
|
|
1004
|
+
phase,
|
|
1005
|
+
service: "surrealdb:remote",
|
|
1006
|
+
Category: "spooky-client::RemoteDatabaseService::diagnostics"
|
|
1007
|
+
}, `Remote SurrealDB diagnostics captured ${type}:${phase}`);
|
|
1008
|
+
}) }), logger, events);
|
|
1009
|
+
this.config = config;
|
|
1010
|
+
}
|
|
1011
|
+
getConfig() {
|
|
1012
|
+
return this.config;
|
|
1013
|
+
}
|
|
1014
|
+
async connect() {
|
|
1015
|
+
const { endpoint, token, namespace, database } = this.getConfig();
|
|
1016
|
+
if (endpoint) {
|
|
1017
|
+
this.logger.info({
|
|
1018
|
+
endpoint,
|
|
1019
|
+
namespace,
|
|
1020
|
+
database,
|
|
1021
|
+
Category: "spooky-client::RemoteDatabaseService::connect"
|
|
1022
|
+
}, "Connecting to remote database");
|
|
1023
|
+
try {
|
|
1024
|
+
await this.client.connect(endpoint);
|
|
1025
|
+
await this.client.use({
|
|
1026
|
+
namespace,
|
|
1027
|
+
database
|
|
1028
|
+
});
|
|
1029
|
+
if (token) {
|
|
1030
|
+
this.logger.debug({ Category: "spooky-client::RemoteDatabaseService::connect" }, "Authenticating with token");
|
|
1031
|
+
await this.client.authenticate(token);
|
|
1032
|
+
}
|
|
1033
|
+
this.logger.info({ Category: "spooky-client::RemoteDatabaseService::connect" }, "Connected to remote database");
|
|
1034
|
+
} catch (err) {
|
|
1035
|
+
this.logger.error({
|
|
1036
|
+
err,
|
|
1037
|
+
Category: "spooky-client::RemoteDatabaseService::connect"
|
|
1038
|
+
}, "Failed to connect to remote database");
|
|
1039
|
+
throw err;
|
|
1040
|
+
}
|
|
1041
|
+
} else this.logger.warn({ Category: "spooky-client::RemoteDatabaseService::connect" }, "No endpoint configured for remote database");
|
|
1042
|
+
}
|
|
1043
|
+
async signin(params) {
|
|
1044
|
+
return this.client.signin(params);
|
|
1045
|
+
}
|
|
1046
|
+
async signup(params) {
|
|
1047
|
+
return this.client.signup(params);
|
|
1048
|
+
}
|
|
1049
|
+
async authenticate(token) {
|
|
1050
|
+
return this.client.authenticate(token);
|
|
1051
|
+
}
|
|
1052
|
+
async invalidate() {
|
|
1053
|
+
return this.client.invalidate();
|
|
1054
|
+
}
|
|
1055
|
+
};
|
|
1056
|
+
|
|
1057
|
+
//#endregion
|
|
1058
|
+
//#region src/services/logger/index.ts
|
|
1059
|
+
createContextKey("Category");
|
|
1060
|
+
function mapLevelToSeverityNumber(level) {
|
|
1061
|
+
switch (level) {
|
|
1062
|
+
case "trace": return 1;
|
|
1063
|
+
case "debug": return 5;
|
|
1064
|
+
case "info": return 9;
|
|
1065
|
+
case "warn": return 13;
|
|
1066
|
+
case "error": return 17;
|
|
1067
|
+
case "fatal": return 21;
|
|
1068
|
+
default: return 9;
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
function createLogger(level = "info", otelEndpoint) {
|
|
1072
|
+
const browserConfig = {
|
|
1073
|
+
asObject: true,
|
|
1074
|
+
write: (o) => {
|
|
1075
|
+
console.log(JSON.stringify(o));
|
|
1076
|
+
}
|
|
1077
|
+
};
|
|
1078
|
+
if (otelEndpoint) {
|
|
1079
|
+
const loggerProvider = new LoggerProvider({
|
|
1080
|
+
resource: resourceFromAttributes({ [ATTR_SERVICE_NAME]: "spooky-client" }),
|
|
1081
|
+
processors: [new BatchLogRecordProcessor(new OTLPLogExporter({ url: otelEndpoint }))]
|
|
1082
|
+
});
|
|
1083
|
+
const otelLogger = {};
|
|
1084
|
+
const getOtelLogger = (category) => {
|
|
1085
|
+
if (!otelLogger[category]) otelLogger[category] = loggerProvider.getLogger(category);
|
|
1086
|
+
return otelLogger[category];
|
|
1087
|
+
};
|
|
1088
|
+
browserConfig.transmit = {
|
|
1089
|
+
level,
|
|
1090
|
+
send: (levelLabel, logEvent) => {
|
|
1091
|
+
try {
|
|
1092
|
+
const messages = [...logEvent.messages];
|
|
1093
|
+
const severityNumber = mapLevelToSeverityNumber(levelLabel);
|
|
1094
|
+
let body = "";
|
|
1095
|
+
const msg = messages.pop();
|
|
1096
|
+
if (typeof msg === "string") body = msg;
|
|
1097
|
+
else if (msg) body = JSON.stringify(msg);
|
|
1098
|
+
let category = "spooky-client::unknown";
|
|
1099
|
+
const attributes = {};
|
|
1100
|
+
for (const msg of messages) if (typeof msg === "object") {
|
|
1101
|
+
if (msg.Category) {
|
|
1102
|
+
category = msg.Category;
|
|
1103
|
+
delete msg.Category;
|
|
1104
|
+
}
|
|
1105
|
+
Object.assign(attributes, msg);
|
|
1106
|
+
}
|
|
1107
|
+
getOtelLogger(category).emit({
|
|
1108
|
+
severityNumber,
|
|
1109
|
+
severityText: levelLabel.toUpperCase(),
|
|
1110
|
+
body,
|
|
1111
|
+
attributes: {
|
|
1112
|
+
...logEvent.bindings[0],
|
|
1113
|
+
...attributes
|
|
1114
|
+
},
|
|
1115
|
+
timestamp: new Date(logEvent.ts)
|
|
1116
|
+
});
|
|
1117
|
+
} catch (e) {
|
|
1118
|
+
console.warn("Failed to transmit log to OTEL endpoint", e);
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
};
|
|
1122
|
+
}
|
|
1123
|
+
return pino({
|
|
1124
|
+
level,
|
|
1125
|
+
browser: browserConfig
|
|
1126
|
+
});
|
|
1127
|
+
}
|
|
1128
|
+
|
|
1129
|
+
//#endregion
|
|
1130
|
+
//#region src/services/database/local-migrator.ts
|
|
1131
|
+
const sha1 = async (str) => {
|
|
1132
|
+
const enc = new TextEncoder();
|
|
1133
|
+
const hash = await crypto.subtle.digest("SHA-1", enc.encode(str));
|
|
1134
|
+
return Array.from(new Uint8Array(hash)).map((v) => v.toString(16).padStart(2, "0")).join("");
|
|
1135
|
+
};
|
|
1136
|
+
var LocalMigrator = class {
|
|
1137
|
+
logger;
|
|
1138
|
+
constructor(localDb, logger) {
|
|
1139
|
+
this.localDb = localDb;
|
|
1140
|
+
this.logger = logger.child({ service: "LocalMigrator" });
|
|
1141
|
+
logger?.child({ service: "LocalMigrator" }) ?? createLogger("info").child({ service: "LocalMigrator" });
|
|
1142
|
+
}
|
|
1143
|
+
async provision(schemaSurql) {
|
|
1144
|
+
const hash = await sha1(schemaSurql);
|
|
1145
|
+
const { database } = this.localDb.getConfig();
|
|
1146
|
+
if (await this.isSchemaUpToDate(hash)) {
|
|
1147
|
+
this.logger.info({ Category: "spooky-client::LocalMigrator::provision" }, "[Provisioning] Schema is up to date, skipping migration");
|
|
1148
|
+
return;
|
|
1149
|
+
}
|
|
1150
|
+
await this.recreateDatabase(database);
|
|
1151
|
+
const fullSchema = schemaSurql + "\n\n DEFINE TABLE IF NOT EXISTS _spooky_stream_processor_state SCHEMALESS PERMISSIONS FOR select, create, update, delete WHERE true;\n DEFINE TABLE IF NOT EXISTS _spooky_query SCHEMALESS PERMISSIONS FOR select, create, update, delete WHERE true;\n DEFINE TABLE IF NOT EXISTS _spooky_schema SCHEMALESS PERMISSIONS FOR select, create, update, delete WHERE true;\n DEFINE TABLE IF NOT EXISTS _spooky_pending_mutations SCHEMALESS PERMISSIONS FOR select, create, update, delete WHERE true;\n ";
|
|
1152
|
+
const statements = this.splitStatements(fullSchema);
|
|
1153
|
+
for (let i = 0; i < statements.length; i++) {
|
|
1154
|
+
const statement = statements[i];
|
|
1155
|
+
const cleanStatement = statement.replace(/--.*/g, "").trim();
|
|
1156
|
+
if (cleanStatement.toUpperCase().startsWith("DEFINE INDEX")) {
|
|
1157
|
+
this.logger.warn({ Category: "spooky-client::LocalMigrator::provision" }, `[Provisioning] Skipping index definition (WASM hang avoidance): ${cleanStatement.substring(0, 50)}...`);
|
|
1158
|
+
continue;
|
|
1159
|
+
}
|
|
1160
|
+
try {
|
|
1161
|
+
this.logger.info({ Category: "spooky-client::LocalMigrator::provision" }, `[Provisioning] (${i + 1}/${statements.length}) Executing: ${statement.substring(0, 50)}...`);
|
|
1162
|
+
await this.localDb.query(statement);
|
|
1163
|
+
this.logger.info({ Category: "spooky-client::LocalMigrator::provision" }, `[Provisioning] (${i + 1}/${statements.length}) Done`);
|
|
1164
|
+
} catch (e) {
|
|
1165
|
+
this.logger.error({ Category: "spooky-client::LocalMigrator::provision" }, `[Provisioning] (${i + 1}/${statements.length}) Error executing statement: ${statement}`);
|
|
1166
|
+
throw e;
|
|
1167
|
+
}
|
|
1168
|
+
}
|
|
1169
|
+
await this.createHashRecord(hash);
|
|
1170
|
+
}
|
|
1171
|
+
async isSchemaUpToDate(hash) {
|
|
1172
|
+
try {
|
|
1173
|
+
const [lastSchemaRecord] = await this.localDb.query(`SELECT hash, created_at FROM ONLY _spooky_schema ORDER BY created_at DESC LIMIT 1;`);
|
|
1174
|
+
return lastSchemaRecord?.hash === hash;
|
|
1175
|
+
} catch (error) {
|
|
1176
|
+
return false;
|
|
1177
|
+
}
|
|
1178
|
+
}
|
|
1179
|
+
async recreateDatabase(database) {
|
|
1180
|
+
try {
|
|
1181
|
+
await this.localDb.query(`DEFINE DATABASE _spooky_temp;`);
|
|
1182
|
+
} catch (e) {}
|
|
1183
|
+
try {
|
|
1184
|
+
await this.localDb.query(`
|
|
1185
|
+
USE DB _spooky_temp;
|
|
1186
|
+
REMOVE DATABASE ${database};
|
|
1187
|
+
`);
|
|
1188
|
+
} catch (e) {}
|
|
1189
|
+
await this.localDb.query(`
|
|
1190
|
+
DEFINE DATABASE ${database};
|
|
1191
|
+
USE DB ${database};
|
|
1192
|
+
`);
|
|
1193
|
+
}
|
|
1194
|
+
splitStatements(schema) {
|
|
1195
|
+
const statements = [];
|
|
1196
|
+
let current = "";
|
|
1197
|
+
let depth = 0;
|
|
1198
|
+
let inQuote = false;
|
|
1199
|
+
let quoteChar = "";
|
|
1200
|
+
let inComment = false;
|
|
1201
|
+
for (let i = 0; i < schema.length; i++) {
|
|
1202
|
+
const char = schema[i];
|
|
1203
|
+
const nextChar = schema[i + 1];
|
|
1204
|
+
if (inComment) {
|
|
1205
|
+
current += char;
|
|
1206
|
+
if (char === "\n") inComment = false;
|
|
1207
|
+
continue;
|
|
1208
|
+
}
|
|
1209
|
+
if (!inQuote && char === "-" && nextChar === "-") {
|
|
1210
|
+
inComment = true;
|
|
1211
|
+
current += char;
|
|
1212
|
+
continue;
|
|
1213
|
+
}
|
|
1214
|
+
if (inQuote) {
|
|
1215
|
+
current += char;
|
|
1216
|
+
if (char === quoteChar && schema[i - 1] !== "\\") inQuote = false;
|
|
1217
|
+
continue;
|
|
1218
|
+
}
|
|
1219
|
+
if (char === "\"" || char === "'") {
|
|
1220
|
+
inQuote = true;
|
|
1221
|
+
quoteChar = char;
|
|
1222
|
+
current += char;
|
|
1223
|
+
continue;
|
|
1224
|
+
}
|
|
1225
|
+
if (char === "{") {
|
|
1226
|
+
depth++;
|
|
1227
|
+
current += char;
|
|
1228
|
+
continue;
|
|
1229
|
+
}
|
|
1230
|
+
if (char === "}") {
|
|
1231
|
+
depth--;
|
|
1232
|
+
current += char;
|
|
1233
|
+
continue;
|
|
1234
|
+
}
|
|
1235
|
+
if (char === ";" && depth === 0) {
|
|
1236
|
+
if (current.trim().length > 0) statements.push(current.trim());
|
|
1237
|
+
current = "";
|
|
1238
|
+
continue;
|
|
1239
|
+
}
|
|
1240
|
+
current += char;
|
|
1241
|
+
}
|
|
1242
|
+
if (current.trim().length > 0) statements.push(current.trim());
|
|
1243
|
+
return statements;
|
|
1244
|
+
}
|
|
1245
|
+
async createHashRecord(hash) {
|
|
1246
|
+
await this.localDb.query(`UPSERT _spooky_schema SET hash = $hash, created_at = time::now() WHERE hash = $hash;`, { hash });
|
|
1247
|
+
}
|
|
1248
|
+
};
|
|
1249
|
+
|
|
1250
|
+
//#endregion
|
|
1251
|
+
//#region src/modules/sync/events/index.ts
|
|
1252
|
+
const SyncQueueEventTypes = {
|
|
1253
|
+
MutationEnqueued: "MUTATION_ENQUEUED",
|
|
1254
|
+
MutationDequeued: "MUTATION_DEQUEUED",
|
|
1255
|
+
QueryItemEnqueued: "QUERY_ITEM_ENQUEUED"
|
|
1256
|
+
};
|
|
1257
|
+
function createSyncQueueEventSystem() {
|
|
1258
|
+
return createEventSystem([
|
|
1259
|
+
SyncQueueEventTypes.QueryItemEnqueued,
|
|
1260
|
+
SyncQueueEventTypes.MutationEnqueued,
|
|
1261
|
+
SyncQueueEventTypes.MutationDequeued
|
|
1262
|
+
]);
|
|
1263
|
+
}
|
|
1264
|
+
const SyncEventTypes = {
|
|
1265
|
+
QueryUpdated: "SYNC_QUERY_UPDATED",
|
|
1266
|
+
RemoteDataIngested: "SYNC_REMOTE_DATA_INGESTED",
|
|
1267
|
+
MutationRolledBack: "SYNC_MUTATION_ROLLED_BACK"
|
|
1268
|
+
};
|
|
1269
|
+
function createSyncEventSystem() {
|
|
1270
|
+
return createEventSystem([
|
|
1271
|
+
SyncEventTypes.QueryUpdated,
|
|
1272
|
+
SyncEventTypes.RemoteDataIngested,
|
|
1273
|
+
SyncEventTypes.MutationRolledBack
|
|
1274
|
+
]);
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
//#endregion
|
|
1278
|
+
//#region src/modules/sync/queue/queue-up.ts
|
|
1279
|
+
var UpQueue = class {
|
|
1280
|
+
queue = [];
|
|
1281
|
+
_events;
|
|
1282
|
+
logger;
|
|
1283
|
+
debouncedMutations;
|
|
1284
|
+
get events() {
|
|
1285
|
+
return this._events;
|
|
1286
|
+
}
|
|
1287
|
+
constructor(local, logger) {
|
|
1288
|
+
this.local = local;
|
|
1289
|
+
this._events = createSyncQueueEventSystem();
|
|
1290
|
+
this.logger = logger.child({ service: "UpQueue" });
|
|
1291
|
+
this.debouncedMutations = /* @__PURE__ */ new Map();
|
|
1292
|
+
}
|
|
1293
|
+
get size() {
|
|
1294
|
+
return this.queue.length;
|
|
1295
|
+
}
|
|
1296
|
+
push(event) {
|
|
1297
|
+
if (event.options?.debounced) {
|
|
1298
|
+
const { key, delay } = event.options.debounced;
|
|
1299
|
+
this.handleDebouncedMutation(event, key, delay);
|
|
1300
|
+
return;
|
|
1301
|
+
}
|
|
1302
|
+
this.addToQueue(event);
|
|
1303
|
+
}
|
|
1304
|
+
addToQueue(event) {
|
|
1305
|
+
this.queue.push(event);
|
|
1306
|
+
this._events.addEvent({
|
|
1307
|
+
type: SyncQueueEventTypes.MutationEnqueued,
|
|
1308
|
+
payload: { queueSize: this.queue.length }
|
|
1309
|
+
});
|
|
1310
|
+
}
|
|
1311
|
+
handleDebouncedMutation(event, key, delay) {
|
|
1312
|
+
const existing = this.debouncedMutations.get(key);
|
|
1313
|
+
let firstBeforeRecord;
|
|
1314
|
+
if (existing) {
|
|
1315
|
+
clearTimeout(existing.timer);
|
|
1316
|
+
firstBeforeRecord = existing.firstBeforeRecord;
|
|
1317
|
+
} else if (event.type === "update") firstBeforeRecord = event.beforeRecord;
|
|
1318
|
+
const timer = setTimeout(() => {
|
|
1319
|
+
this.debouncedMutations.delete(key);
|
|
1320
|
+
if (firstBeforeRecord && event.type === "update") event.beforeRecord = firstBeforeRecord;
|
|
1321
|
+
this.addToQueue(event);
|
|
1322
|
+
}, delay);
|
|
1323
|
+
this.debouncedMutations.set(key, {
|
|
1324
|
+
timer,
|
|
1325
|
+
firstBeforeRecord
|
|
1326
|
+
});
|
|
1327
|
+
}
|
|
1328
|
+
async next(fn, onRollback) {
|
|
1329
|
+
const event = this.queue.shift();
|
|
1330
|
+
if (event) {
|
|
1331
|
+
try {
|
|
1332
|
+
await fn(event);
|
|
1333
|
+
} catch (error) {
|
|
1334
|
+
if (classifySyncError(error) === "network") {
|
|
1335
|
+
this.logger.error({
|
|
1336
|
+
error,
|
|
1337
|
+
event,
|
|
1338
|
+
Category: "spooky-client::UpQueue::next"
|
|
1339
|
+
}, "Network error processing mutation, re-queuing");
|
|
1340
|
+
this.queue.unshift(event);
|
|
1341
|
+
throw error;
|
|
1342
|
+
}
|
|
1343
|
+
this.logger.error({
|
|
1344
|
+
error,
|
|
1345
|
+
event,
|
|
1346
|
+
Category: "spooky-client::UpQueue::next"
|
|
1347
|
+
}, "Application error processing mutation, rolling back");
|
|
1348
|
+
try {
|
|
1349
|
+
await this.removeEventFromDatabase(event.mutation_id);
|
|
1350
|
+
} catch (removeError) {
|
|
1351
|
+
this.logger.error({
|
|
1352
|
+
error: removeError,
|
|
1353
|
+
event,
|
|
1354
|
+
Category: "spooky-client::UpQueue::next"
|
|
1355
|
+
}, "Failed to remove rolled-back mutation from database");
|
|
1356
|
+
}
|
|
1357
|
+
if (onRollback) try {
|
|
1358
|
+
await onRollback(event, error instanceof Error ? error : new Error(String(error)));
|
|
1359
|
+
} catch (rollbackError) {
|
|
1360
|
+
this.logger.error({
|
|
1361
|
+
error: rollbackError,
|
|
1362
|
+
event,
|
|
1363
|
+
Category: "spooky-client::UpQueue::next"
|
|
1364
|
+
}, "Rollback handler failed");
|
|
1365
|
+
}
|
|
1366
|
+
this._events.addEvent({
|
|
1367
|
+
type: SyncQueueEventTypes.MutationDequeued,
|
|
1368
|
+
payload: { queueSize: this.queue.length }
|
|
1369
|
+
});
|
|
1370
|
+
return;
|
|
1371
|
+
}
|
|
1372
|
+
try {
|
|
1373
|
+
await this.removeEventFromDatabase(event.mutation_id);
|
|
1374
|
+
} catch (error) {
|
|
1375
|
+
this.logger.error({
|
|
1376
|
+
error,
|
|
1377
|
+
event,
|
|
1378
|
+
Category: "spooky-client::UpQueue::next"
|
|
1379
|
+
}, "Failed to remove mutation from database after successful processing");
|
|
1380
|
+
}
|
|
1381
|
+
this._events.addEvent({
|
|
1382
|
+
type: SyncQueueEventTypes.MutationDequeued,
|
|
1383
|
+
payload: { queueSize: this.queue.length }
|
|
1384
|
+
});
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
async removeEventFromDatabase(mutation_id) {
|
|
1388
|
+
return this.local.query(`DELETE $mutation_id`, { mutation_id });
|
|
1389
|
+
}
|
|
1390
|
+
async loadFromDatabase() {
|
|
1391
|
+
try {
|
|
1392
|
+
const [records] = await this.local.query(`SELECT * FROM _spooky_pending_mutations ORDER BY created_at ASC`);
|
|
1393
|
+
this.queue = records.map((r) => {
|
|
1394
|
+
switch (r.mutationType) {
|
|
1395
|
+
case "create": return {
|
|
1396
|
+
type: "create",
|
|
1397
|
+
mutation_id: parseRecordIdString(r.id),
|
|
1398
|
+
record_id: parseRecordIdString(r.recordId),
|
|
1399
|
+
data: r.data,
|
|
1400
|
+
tableName: extractTablePart(r.recordId)
|
|
1401
|
+
};
|
|
1402
|
+
case "update": return {
|
|
1403
|
+
type: "update",
|
|
1404
|
+
mutation_id: parseRecordIdString(r.id),
|
|
1405
|
+
record_id: parseRecordIdString(r.recordId),
|
|
1406
|
+
data: r.data,
|
|
1407
|
+
beforeRecord: r.beforeRecord
|
|
1408
|
+
};
|
|
1409
|
+
case "delete": return {
|
|
1410
|
+
type: "delete",
|
|
1411
|
+
mutation_id: parseRecordIdString(r.id),
|
|
1412
|
+
record_id: parseRecordIdString(r.recordId)
|
|
1413
|
+
};
|
|
1414
|
+
default:
|
|
1415
|
+
this.logger.warn({
|
|
1416
|
+
mutationType: r.mutationType,
|
|
1417
|
+
record: r,
|
|
1418
|
+
Category: "spooky-client::UpQueue::loadFromDatabase"
|
|
1419
|
+
}, "Unknown mutation type");
|
|
1420
|
+
return null;
|
|
1421
|
+
}
|
|
1422
|
+
}).filter((e) => e !== null);
|
|
1423
|
+
} catch (error) {
|
|
1424
|
+
this.logger.error({
|
|
1425
|
+
error,
|
|
1426
|
+
Category: "spooky-client::UpQueue::loadFromDatabase"
|
|
1427
|
+
}, "Failed to load pending mutations from database");
|
|
1428
|
+
}
|
|
1429
|
+
}
|
|
1430
|
+
};
|
|
1431
|
+
|
|
1432
|
+
//#endregion
|
|
1433
|
+
//#region src/modules/sync/queue/queue-down.ts
|
|
1434
|
+
var DownQueue = class {
|
|
1435
|
+
queue = [];
|
|
1436
|
+
_events;
|
|
1437
|
+
logger;
|
|
1438
|
+
get events() {
|
|
1439
|
+
return this._events;
|
|
1440
|
+
}
|
|
1441
|
+
constructor(local, logger) {
|
|
1442
|
+
this.local = local;
|
|
1443
|
+
this._events = createSyncQueueEventSystem();
|
|
1444
|
+
this.logger = logger.child({ service: "DownQueue" });
|
|
1445
|
+
}
|
|
1446
|
+
get size() {
|
|
1447
|
+
return this.queue.length;
|
|
1448
|
+
}
|
|
1449
|
+
push(event) {
|
|
1450
|
+
this.queue.push(event);
|
|
1451
|
+
this.emitPushEvent();
|
|
1452
|
+
}
|
|
1453
|
+
emitPushEvent() {
|
|
1454
|
+
this._events.addEvent({
|
|
1455
|
+
type: SyncQueueEventTypes.QueryItemEnqueued,
|
|
1456
|
+
payload: { queueSize: this.queue.length }
|
|
1457
|
+
});
|
|
1458
|
+
}
|
|
1459
|
+
async next(fn) {
|
|
1460
|
+
const event = this.queue.shift();
|
|
1461
|
+
if (event) try {
|
|
1462
|
+
await fn(event);
|
|
1463
|
+
} catch (error) {
|
|
1464
|
+
this.logger.error({
|
|
1465
|
+
error,
|
|
1466
|
+
event,
|
|
1467
|
+
Category: "spooky-client::DownQueue::next"
|
|
1468
|
+
}, "Failed to process query");
|
|
1469
|
+
this.queue.unshift(event);
|
|
1470
|
+
throw error;
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
};
|
|
1474
|
+
|
|
1475
|
+
//#endregion
|
|
1476
|
+
//#region src/modules/sync/utils.ts
|
|
1477
|
+
var ArraySyncer = class {
|
|
1478
|
+
localArray;
|
|
1479
|
+
remoteArray;
|
|
1480
|
+
needsSort = false;
|
|
1481
|
+
constructor(localArray, remoteArray) {
|
|
1482
|
+
this.remoteArray = remoteArray.sort((a, b) => a[0].localeCompare(b[0]));
|
|
1483
|
+
this.localArray = localArray.sort((a, b) => a[0].localeCompare(b[0]));
|
|
1484
|
+
}
|
|
1485
|
+
/**
|
|
1486
|
+
* Inserts an item into the local array
|
|
1487
|
+
*/
|
|
1488
|
+
insert(recordId, version) {
|
|
1489
|
+
this.localArray.push([recordId, version]);
|
|
1490
|
+
this.needsSort = true;
|
|
1491
|
+
}
|
|
1492
|
+
/**
|
|
1493
|
+
* Updates the current local RecordVersionArray state.
|
|
1494
|
+
*/
|
|
1495
|
+
update(recordId, version) {
|
|
1496
|
+
this.localArray = this.localArray.map((record) => {
|
|
1497
|
+
if (record[0] === recordId) {
|
|
1498
|
+
this.needsSort = true;
|
|
1499
|
+
return [recordId, version];
|
|
1500
|
+
}
|
|
1501
|
+
return record;
|
|
1502
|
+
});
|
|
1503
|
+
}
|
|
1504
|
+
/**
|
|
1505
|
+
* Deletes an item from the local array
|
|
1506
|
+
*/
|
|
1507
|
+
delete(recordId) {
|
|
1508
|
+
this.localArray = this.localArray.filter((record) => record[0] !== recordId);
|
|
1509
|
+
}
|
|
1510
|
+
/**
|
|
1511
|
+
* Returns the difference between the local and remote arrays.
|
|
1512
|
+
* Includes sets of added, updated, and removed records.
|
|
1513
|
+
*/
|
|
1514
|
+
nextSet() {
|
|
1515
|
+
if (this.needsSort) {
|
|
1516
|
+
this.localArray.sort((a, b) => a[0].localeCompare(b[0]));
|
|
1517
|
+
this.needsSort = false;
|
|
1518
|
+
}
|
|
1519
|
+
console.log("xxxx555", this.localArray, this.remoteArray);
|
|
1520
|
+
return diffRecordVersionArray(this.localArray, this.remoteArray);
|
|
1521
|
+
}
|
|
1522
|
+
};
|
|
1523
|
+
function diffRecordVersionArray(local, remote) {
|
|
1524
|
+
const localArray = local || [];
|
|
1525
|
+
const remoteArray = remote || [];
|
|
1526
|
+
const localMap = new Map(localArray);
|
|
1527
|
+
const remoteMap = new Map(remoteArray);
|
|
1528
|
+
const added = [];
|
|
1529
|
+
const updated = [];
|
|
1530
|
+
const removed = [];
|
|
1531
|
+
for (const [recordId, remoteVersion] of remoteMap) {
|
|
1532
|
+
const localVersion = localMap.get(recordId);
|
|
1533
|
+
if (localVersion === void 0) added.push(recordId);
|
|
1534
|
+
else if (localVersion < remoteVersion) updated.push(recordId);
|
|
1535
|
+
}
|
|
1536
|
+
for (const [recordId] of localMap) if (!remoteMap.has(recordId)) removed.push(recordId);
|
|
1537
|
+
return {
|
|
1538
|
+
added: added.map((id) => ({
|
|
1539
|
+
id: parseRecordIdString(id),
|
|
1540
|
+
version: remoteMap.get(id)
|
|
1541
|
+
})),
|
|
1542
|
+
updated: updated.map((id) => ({
|
|
1543
|
+
id: parseRecordIdString(id),
|
|
1544
|
+
version: remoteMap.get(id)
|
|
1545
|
+
})),
|
|
1546
|
+
removed: removed.map(parseRecordIdString)
|
|
1547
|
+
};
|
|
1548
|
+
}
|
|
1549
|
+
function createDiffFromDbOp(op, recordId, version, versions) {
|
|
1550
|
+
if (op !== "DELETE") {
|
|
1551
|
+
const old = versions?.find((record) => record[0] === encodeRecordId(recordId));
|
|
1552
|
+
if (old && old[1] >= version) return {
|
|
1553
|
+
added: [],
|
|
1554
|
+
updated: [],
|
|
1555
|
+
removed: []
|
|
1556
|
+
};
|
|
1557
|
+
}
|
|
1558
|
+
if (op === "CREATE") return {
|
|
1559
|
+
added: [{
|
|
1560
|
+
id: recordId,
|
|
1561
|
+
version
|
|
1562
|
+
}],
|
|
1563
|
+
updated: [],
|
|
1564
|
+
removed: []
|
|
1565
|
+
};
|
|
1566
|
+
else if (op === "UPDATE") return {
|
|
1567
|
+
added: [],
|
|
1568
|
+
updated: [{
|
|
1569
|
+
id: recordId,
|
|
1570
|
+
version
|
|
1571
|
+
}],
|
|
1572
|
+
removed: []
|
|
1573
|
+
};
|
|
1574
|
+
else return {
|
|
1575
|
+
added: [],
|
|
1576
|
+
updated: [],
|
|
1577
|
+
removed: [recordId]
|
|
1578
|
+
};
|
|
1579
|
+
}
|
|
1580
|
+
|
|
1581
|
+
//#endregion
|
|
1582
|
+
//#region src/modules/sync/engine.ts
|
|
1583
|
+
/**
|
|
1584
|
+
* SyncEngine handles the core sync operations: fetching remote records,
|
|
1585
|
+
* caching them locally, and ingesting into DBSP.
|
|
1586
|
+
*
|
|
1587
|
+
* This is extracted from SpookySync to separate "how to sync" from "when to sync".
|
|
1588
|
+
*/
|
|
1589
|
+
var SyncEngine = class {
|
|
1590
|
+
logger;
|
|
1591
|
+
events = createSyncEventSystem();
|
|
1592
|
+
constructor(remote, cache, schema, logger) {
|
|
1593
|
+
this.remote = remote;
|
|
1594
|
+
this.cache = cache;
|
|
1595
|
+
this.schema = schema;
|
|
1596
|
+
this.logger = logger.child({ service: "SpookySync:SyncEngine" });
|
|
1597
|
+
}
|
|
1598
|
+
/**
|
|
1599
|
+
* Sync missing/updated/removed records between local and remote.
|
|
1600
|
+
* Main entry point for sync operations.
|
|
1601
|
+
* Uses batch processing to minimize events emitted.
|
|
1602
|
+
*/
|
|
1603
|
+
async syncRecords(diff) {
|
|
1604
|
+
const { added, updated, removed } = diff;
|
|
1605
|
+
this.logger.debug({
|
|
1606
|
+
added,
|
|
1607
|
+
updated,
|
|
1608
|
+
removed,
|
|
1609
|
+
Category: "spooky-client::SyncEngine::syncRecords"
|
|
1610
|
+
}, "SyncEngine.syncRecords diff");
|
|
1611
|
+
if (removed.length > 0) await this.handleRemovedRecords(removed);
|
|
1612
|
+
const idsToFetch = [...added, ...updated].map((x) => x.id);
|
|
1613
|
+
if (idsToFetch.length === 0) return;
|
|
1614
|
+
const [remoteResults] = await this.remote.query("SELECT (SELECT * FROM ONLY <record>$parent.id) AS record, (SELECT version FROM ONLY _spooky_version WHERE record_id = <record>$parent.id)['version'] as spooky_rv FROM $idsToFetch", { idsToFetch });
|
|
1615
|
+
console.log("remoteResults>", remoteResults);
|
|
1616
|
+
const cacheBatch = [];
|
|
1617
|
+
for (const { spooky_rv, record } of remoteResults) {
|
|
1618
|
+
if (!record?.id) {
|
|
1619
|
+
this.logger.warn({
|
|
1620
|
+
record,
|
|
1621
|
+
idsToFetch,
|
|
1622
|
+
Category: "spooky-client::SyncEngine::syncRecords"
|
|
1623
|
+
}, "Remote record has no id. Skipping record");
|
|
1624
|
+
continue;
|
|
1625
|
+
}
|
|
1626
|
+
const fullId = encodeRecordId(record.id);
|
|
1627
|
+
const table = record.id.table.toString();
|
|
1628
|
+
const isAdded = added.some((item) => encodeRecordId(item.id) === fullId);
|
|
1629
|
+
const localVersion = this.cache.lookup(fullId);
|
|
1630
|
+
if (localVersion && spooky_rv <= localVersion) {
|
|
1631
|
+
this.logger.info({
|
|
1632
|
+
recordId: fullId,
|
|
1633
|
+
version: spooky_rv,
|
|
1634
|
+
localVersion,
|
|
1635
|
+
Category: "spooky-client::SyncEngine::syncRecords"
|
|
1636
|
+
}, "Local version is higher than remote version. Skipping record");
|
|
1637
|
+
continue;
|
|
1638
|
+
}
|
|
1639
|
+
const tableSchema = this.schema.tables.find((t) => t.name === table);
|
|
1640
|
+
const cleanedRecord = tableSchema ? cleanRecord(tableSchema.columns, record) : record;
|
|
1641
|
+
cacheBatch.push({
|
|
1642
|
+
table,
|
|
1643
|
+
op: isAdded ? "CREATE" : "UPDATE",
|
|
1644
|
+
record: cleanedRecord,
|
|
1645
|
+
version: spooky_rv
|
|
1646
|
+
});
|
|
1647
|
+
}
|
|
1648
|
+
if (cacheBatch.length > 0) await this.cache.saveBatch(cacheBatch);
|
|
1649
|
+
this.events.emit(SyncEventTypes.RemoteDataIngested, { records: remoteResults });
|
|
1650
|
+
}
|
|
1651
|
+
/**
|
|
1652
|
+
* Handle records that exist locally but not in remote array.
|
|
1653
|
+
*/
|
|
1654
|
+
async handleRemovedRecords(removed) {
|
|
1655
|
+
this.logger.debug({
|
|
1656
|
+
removed: removed.map((r) => r.toString()),
|
|
1657
|
+
Category: "spooky-client::SyncEngine::handleRemovedRecords"
|
|
1658
|
+
}, "Checking removed records");
|
|
1659
|
+
let existingRemoteIds = /* @__PURE__ */ new Set();
|
|
1660
|
+
try {
|
|
1661
|
+
const [existingRemote] = await this.remote.query("SELECT id FROM $ids", { ids: removed });
|
|
1662
|
+
existingRemoteIds = new Set(existingRemote.map((r) => encodeRecordId(r.id)));
|
|
1663
|
+
} catch {
|
|
1664
|
+
this.logger.debug({ Category: "spooky-client::SyncEngine::handleRemovedRecords" }, "Remote existence check failed, proceeding with deletion");
|
|
1665
|
+
}
|
|
1666
|
+
for (const recordId of removed) {
|
|
1667
|
+
const recordIdStr = encodeRecordId(recordId);
|
|
1668
|
+
if (!existingRemoteIds.has(recordIdStr)) {
|
|
1669
|
+
this.logger.debug({
|
|
1670
|
+
recordId: recordIdStr,
|
|
1671
|
+
Category: "spooky-client::SyncEngine::handleRemovedRecords"
|
|
1672
|
+
}, "Deleting confirmed removed record");
|
|
1673
|
+
await this.cache.delete(recordId.table.name, recordIdStr);
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
};
|
|
1678
|
+
|
|
1679
|
+
//#endregion
|
|
1680
|
+
//#region src/modules/sync/scheduler.ts
|
|
1681
|
+
/**
|
|
1682
|
+
* SyncScheduler manages when to sync: queue management and orchestration.
|
|
1683
|
+
* Decides the order and timing of sync operations.
|
|
1684
|
+
*/
|
|
1685
|
+
var SyncScheduler = class {
|
|
1686
|
+
isSyncingUp = false;
|
|
1687
|
+
isSyncingDown = false;
|
|
1688
|
+
constructor(upQueue, downQueue, onProcessUp, onProcessDown, logger, onRollback) {
|
|
1689
|
+
this.upQueue = upQueue;
|
|
1690
|
+
this.downQueue = downQueue;
|
|
1691
|
+
this.onProcessUp = onProcessUp;
|
|
1692
|
+
this.onProcessDown = onProcessDown;
|
|
1693
|
+
this.logger = logger;
|
|
1694
|
+
this.onRollback = onRollback;
|
|
1695
|
+
}
|
|
1696
|
+
async init() {
|
|
1697
|
+
await this.upQueue.loadFromDatabase();
|
|
1698
|
+
this.upQueue.events.subscribe(SyncQueueEventTypes.MutationEnqueued, this.syncUp.bind(this));
|
|
1699
|
+
this.downQueue.events.subscribe(SyncQueueEventTypes.QueryItemEnqueued, this.syncDown.bind(this));
|
|
1700
|
+
}
|
|
1701
|
+
/**
|
|
1702
|
+
* Add mutations to the upload queue
|
|
1703
|
+
*/
|
|
1704
|
+
enqueueMutation(mutations) {
|
|
1705
|
+
for (const mutation of mutations) this.upQueue.push(mutation);
|
|
1706
|
+
}
|
|
1707
|
+
/**
|
|
1708
|
+
* Add query events to the download queue
|
|
1709
|
+
*/
|
|
1710
|
+
enqueueDownEvent(event) {
|
|
1711
|
+
this.downQueue.push(event);
|
|
1712
|
+
}
|
|
1713
|
+
/**
|
|
1714
|
+
* Process upload queue
|
|
1715
|
+
*/
|
|
1716
|
+
async syncUp() {
|
|
1717
|
+
if (this.isSyncingUp) return;
|
|
1718
|
+
this.isSyncingUp = true;
|
|
1719
|
+
try {
|
|
1720
|
+
while (this.upQueue.size > 0) await this.upQueue.next(this.onProcessUp, this.onRollback);
|
|
1721
|
+
} finally {
|
|
1722
|
+
this.isSyncingUp = false;
|
|
1723
|
+
this.syncDown();
|
|
1724
|
+
}
|
|
1725
|
+
}
|
|
1726
|
+
/**
|
|
1727
|
+
* Process download queue
|
|
1728
|
+
*/
|
|
1729
|
+
async syncDown() {
|
|
1730
|
+
if (this.isSyncingDown) return;
|
|
1731
|
+
if (this.upQueue.size > 0) return;
|
|
1732
|
+
this.isSyncingDown = true;
|
|
1733
|
+
try {
|
|
1734
|
+
while (this.downQueue.size > 0) {
|
|
1735
|
+
if (this.upQueue.size > 0) break;
|
|
1736
|
+
await this.downQueue.next(this.onProcessDown);
|
|
1737
|
+
}
|
|
1738
|
+
} finally {
|
|
1739
|
+
this.isSyncingDown = false;
|
|
1740
|
+
}
|
|
1741
|
+
}
|
|
1742
|
+
get isSyncing() {
|
|
1743
|
+
return this.isSyncingUp || this.isSyncingDown;
|
|
1744
|
+
}
|
|
1745
|
+
};
|
|
1746
|
+
|
|
1747
|
+
//#endregion
|
|
1748
|
+
//#region src/modules/sync/sync.ts
|
|
1749
|
+
/**
|
|
1750
|
+
* The main synchronization engine for Spooky.
|
|
1751
|
+
* Handles the bidirectional synchronization between the local database and the remote backend.
|
|
1752
|
+
* Uses a queue-based architecture with 'up' (local to remote) and 'down' (remote to local) queues.
|
|
1753
|
+
* @template S The schema structure type.
|
|
1754
|
+
*/
|
|
1755
|
+
var SpookySync = class {
|
|
1756
|
+
clientId = "";
|
|
1757
|
+
upQueue;
|
|
1758
|
+
downQueue;
|
|
1759
|
+
isInit = false;
|
|
1760
|
+
logger;
|
|
1761
|
+
syncEngine;
|
|
1762
|
+
scheduler;
|
|
1763
|
+
events = createSyncEventSystem();
|
|
1764
|
+
get isSyncing() {
|
|
1765
|
+
return this.scheduler.isSyncing;
|
|
1766
|
+
}
|
|
1767
|
+
get pendingMutationCount() {
|
|
1768
|
+
return this.upQueue.size;
|
|
1769
|
+
}
|
|
1770
|
+
subscribeToPendingMutations(cb) {
|
|
1771
|
+
const id1 = this.upQueue.events.subscribe(SyncQueueEventTypes.MutationEnqueued, (event) => cb(event.payload.queueSize));
|
|
1772
|
+
const id2 = this.upQueue.events.subscribe(SyncQueueEventTypes.MutationDequeued, (event) => cb(event.payload.queueSize));
|
|
1773
|
+
return () => {
|
|
1774
|
+
this.upQueue.events.unsubscribe(id1);
|
|
1775
|
+
this.upQueue.events.unsubscribe(id2);
|
|
1776
|
+
};
|
|
1777
|
+
}
|
|
1778
|
+
constructor(local, remote, cache, dataModule, schema, logger) {
|
|
1779
|
+
this.local = local;
|
|
1780
|
+
this.remote = remote;
|
|
1781
|
+
this.cache = cache;
|
|
1782
|
+
this.dataModule = dataModule;
|
|
1783
|
+
this.schema = schema;
|
|
1784
|
+
this.logger = logger.child({ service: "SpookySync" });
|
|
1785
|
+
this.upQueue = new UpQueue(this.local, this.logger);
|
|
1786
|
+
this.downQueue = new DownQueue(this.local, this.logger);
|
|
1787
|
+
this.syncEngine = new SyncEngine(this.remote, this.cache, this.schema, this.logger);
|
|
1788
|
+
this.scheduler = new SyncScheduler(this.upQueue, this.downQueue, this.processUpEvent.bind(this), this.processDownEvent.bind(this), this.logger, this.handleRollback.bind(this));
|
|
1789
|
+
}
|
|
1790
|
+
/**
|
|
1791
|
+
* Initializes the synchronization system.
|
|
1792
|
+
* Starts the scheduler and initiates the initial sync cycles.
|
|
1793
|
+
* @param clientId The unique identifier for this client instance.
|
|
1794
|
+
* @throws Error if already initialized.
|
|
1795
|
+
*/
|
|
1796
|
+
async init(clientId) {
|
|
1797
|
+
if (this.isInit) throw new Error("SpookySync is already initialized");
|
|
1798
|
+
this.clientId = clientId;
|
|
1799
|
+
this.isInit = true;
|
|
1800
|
+
await this.scheduler.init();
|
|
1801
|
+
this.scheduler.syncUp();
|
|
1802
|
+
this.scheduler.syncUp();
|
|
1803
|
+
this.scheduler.syncDown();
|
|
1804
|
+
this.startRefLiveQueries();
|
|
1805
|
+
}
|
|
1806
|
+
async startRefLiveQueries() {
|
|
1807
|
+
this.logger.debug({
|
|
1808
|
+
clientId: this.clientId,
|
|
1809
|
+
Category: "spooky-client::SpookySync::startRefLiveQueries"
|
|
1810
|
+
}, "Starting ref live queries");
|
|
1811
|
+
const [queryUuid] = await this.remote.query("LIVE SELECT * FROM _spooky_list_ref");
|
|
1812
|
+
(await this.remote.getClient().liveOf(queryUuid)).subscribe((message) => {
|
|
1813
|
+
this.logger.debug({
|
|
1814
|
+
message,
|
|
1815
|
+
Category: "spooky-client::SpookySync::startRefLiveQueries"
|
|
1816
|
+
}, "Live update received");
|
|
1817
|
+
if (message.action === "KILLED") return;
|
|
1818
|
+
this.handleRemoteListRefChange(message.action, message.value.in, message.value.out, message.value.version).catch((err) => {
|
|
1819
|
+
this.logger.error({
|
|
1820
|
+
err,
|
|
1821
|
+
Category: "spooky-client::SpookySync::startRefLiveQueries"
|
|
1822
|
+
}, "Error handling remote list ref change");
|
|
1823
|
+
});
|
|
1824
|
+
});
|
|
1825
|
+
}
|
|
1826
|
+
async handleRemoteListRefChange(action, queryId, recordId, version) {
|
|
1827
|
+
const existing = this.dataModule.getQueryById(queryId);
|
|
1828
|
+
if (!existing) {
|
|
1829
|
+
this.logger.warn({
|
|
1830
|
+
queryId: queryId.toString(),
|
|
1831
|
+
Category: "spooky-client::SpookySync::handleRemoteListRefChange"
|
|
1832
|
+
}, "Received remote update for unknown local query");
|
|
1833
|
+
return;
|
|
1834
|
+
}
|
|
1835
|
+
const { localArray } = existing.config;
|
|
1836
|
+
this.logger.debug({
|
|
1837
|
+
action,
|
|
1838
|
+
queryId,
|
|
1839
|
+
recordId,
|
|
1840
|
+
version,
|
|
1841
|
+
localArray,
|
|
1842
|
+
Category: "spooky-client::SpookySync::handleRemoteListRefChange"
|
|
1843
|
+
}, "Live update is being processed");
|
|
1844
|
+
const diff = createDiffFromDbOp(action, recordId, version, localArray);
|
|
1845
|
+
await this.syncEngine.syncRecords(diff);
|
|
1846
|
+
}
|
|
1847
|
+
/**
|
|
1848
|
+
* Enqueues a 'down' event (from remote to local) for processing.
|
|
1849
|
+
* @param event The DownEvent to enqueue.
|
|
1850
|
+
*/
|
|
1851
|
+
enqueueDownEvent(event) {
|
|
1852
|
+
this.scheduler.enqueueDownEvent(event);
|
|
1853
|
+
}
|
|
1854
|
+
async processUpEvent(event) {
|
|
1855
|
+
this.logger.debug({
|
|
1856
|
+
event,
|
|
1857
|
+
Category: "spooky-client::SpookySync::processUpEvent"
|
|
1858
|
+
}, "Processing up event");
|
|
1859
|
+
console.log("xx1", event);
|
|
1860
|
+
switch (event.type) {
|
|
1861
|
+
case "create":
|
|
1862
|
+
const dataKeys = Object.keys(event.data).map((key) => ({
|
|
1863
|
+
key,
|
|
1864
|
+
variable: `data_${key}`
|
|
1865
|
+
}));
|
|
1866
|
+
const prefixedParams = Object.fromEntries(dataKeys.map(({ key, variable }) => [variable, event.data[key]]));
|
|
1867
|
+
const query = surql.seal(surql.createSet("id", dataKeys));
|
|
1868
|
+
await this.remote.query(query, {
|
|
1869
|
+
id: event.record_id,
|
|
1870
|
+
...prefixedParams
|
|
1871
|
+
});
|
|
1872
|
+
break;
|
|
1873
|
+
case "update":
|
|
1874
|
+
await this.remote.query(`UPDATE $id MERGE $data`, {
|
|
1875
|
+
id: event.record_id,
|
|
1876
|
+
data: event.data
|
|
1877
|
+
});
|
|
1878
|
+
break;
|
|
1879
|
+
case "delete":
|
|
1880
|
+
await this.remote.query(`DELETE $id`, { id: event.record_id });
|
|
1881
|
+
break;
|
|
1882
|
+
default:
|
|
1883
|
+
this.logger.error({
|
|
1884
|
+
event,
|
|
1885
|
+
Category: "spooky-client::SpookySync::processUpEvent"
|
|
1886
|
+
}, "processUpEvent unknown event type");
|
|
1887
|
+
return;
|
|
1888
|
+
}
|
|
1889
|
+
}
|
|
1890
|
+
async handleRollback(event, error) {
|
|
1891
|
+
const recordId = encodeRecordId(event.record_id);
|
|
1892
|
+
const tableName = event.type === "create" && event.tableName ? event.tableName : extractTablePart(recordId);
|
|
1893
|
+
this.logger.warn({
|
|
1894
|
+
type: event.type,
|
|
1895
|
+
recordId,
|
|
1896
|
+
tableName,
|
|
1897
|
+
error: error.message,
|
|
1898
|
+
Category: "spooky-client::SpookySync::handleRollback"
|
|
1899
|
+
}, "Rolling back failed mutation");
|
|
1900
|
+
switch (event.type) {
|
|
1901
|
+
case "create":
|
|
1902
|
+
await this.dataModule.rollbackCreate(event.record_id, tableName);
|
|
1903
|
+
break;
|
|
1904
|
+
case "update":
|
|
1905
|
+
if (event.beforeRecord) await this.dataModule.rollbackUpdate(event.record_id, tableName, event.beforeRecord);
|
|
1906
|
+
else this.logger.warn({
|
|
1907
|
+
recordId,
|
|
1908
|
+
Category: "spooky-client::SpookySync::handleRollback"
|
|
1909
|
+
}, "Cannot rollback update: no beforeRecord available. Down-sync will reconcile.");
|
|
1910
|
+
break;
|
|
1911
|
+
case "delete":
|
|
1912
|
+
this.logger.warn({
|
|
1913
|
+
recordId,
|
|
1914
|
+
Category: "spooky-client::SpookySync::handleRollback"
|
|
1915
|
+
}, "Delete rollback not implemented. Down-sync will reconcile.");
|
|
1916
|
+
break;
|
|
1917
|
+
}
|
|
1918
|
+
this.events.emit(SyncEventTypes.MutationRolledBack, {
|
|
1919
|
+
eventType: event.type,
|
|
1920
|
+
recordId,
|
|
1921
|
+
error: error.message
|
|
1922
|
+
});
|
|
1923
|
+
}
|
|
1924
|
+
async processDownEvent(event) {
|
|
1925
|
+
this.logger.debug({
|
|
1926
|
+
event,
|
|
1927
|
+
Category: "spooky-client::SpookySync::processDownEvent"
|
|
1928
|
+
}, "Processing down event");
|
|
1929
|
+
switch (event.type) {
|
|
1930
|
+
case "register": return this.registerQuery(event.payload.hash);
|
|
1931
|
+
case "sync": return this.syncQuery(event.payload.hash);
|
|
1932
|
+
case "heartbeat": return this.heartbeatQuery(event.payload.hash);
|
|
1933
|
+
case "cleanup": return this.cleanupQuery(event.payload.hash);
|
|
1934
|
+
}
|
|
1935
|
+
}
|
|
1936
|
+
/**
|
|
1937
|
+
* Synchronizes a specific query by hash.
|
|
1938
|
+
* Compares local and remote version arrays and fetches differences.
|
|
1939
|
+
* @param hash The hash of the query to sync.
|
|
1940
|
+
*/
|
|
1941
|
+
async syncQuery(hash) {
|
|
1942
|
+
const queryState = this.dataModule.getQueryByHash(hash);
|
|
1943
|
+
if (!queryState) {
|
|
1944
|
+
this.logger.warn({
|
|
1945
|
+
hash,
|
|
1946
|
+
Category: "spooky-client::SpookySync::syncQuery"
|
|
1947
|
+
}, "Query not found");
|
|
1948
|
+
return;
|
|
1949
|
+
}
|
|
1950
|
+
const diff = new ArraySyncer(queryState.config.localArray, queryState.config.remoteArray).nextSet();
|
|
1951
|
+
if (!diff) return;
|
|
1952
|
+
return this.syncEngine.syncRecords(diff);
|
|
1953
|
+
}
|
|
1954
|
+
/**
|
|
1955
|
+
* Enqueues a list of mutations (up events) to be sent to the remote.
|
|
1956
|
+
* @param mutations Array of UpEvents (create/update/delete) to enqueue.
|
|
1957
|
+
*/
|
|
1958
|
+
async enqueueMutation(mutations) {
|
|
1959
|
+
this.scheduler.enqueueMutation(mutations);
|
|
1960
|
+
}
|
|
1961
|
+
async registerQuery(queryHash) {
|
|
1962
|
+
try {
|
|
1963
|
+
this.logger.debug({
|
|
1964
|
+
queryHash,
|
|
1965
|
+
Category: "spooky-client::SpookySync::registerQuery"
|
|
1966
|
+
}, "Register Query state");
|
|
1967
|
+
await this.createRemoteQuery(queryHash);
|
|
1968
|
+
await this.syncQuery(queryHash);
|
|
1969
|
+
} catch (e) {
|
|
1970
|
+
this.logger.error({
|
|
1971
|
+
err: e,
|
|
1972
|
+
Category: "spooky-client::SpookySync::registerQuery"
|
|
1973
|
+
}, "registerQuery error");
|
|
1974
|
+
throw e;
|
|
1975
|
+
}
|
|
1976
|
+
}
|
|
1977
|
+
async createRemoteQuery(queryHash) {
|
|
1978
|
+
const queryState = this.dataModule.getQueryByHash(queryHash);
|
|
1979
|
+
if (!queryState) {
|
|
1980
|
+
this.logger.warn({
|
|
1981
|
+
queryHash,
|
|
1982
|
+
Category: "spooky-client::SpookySync::createRemoteQuery"
|
|
1983
|
+
}, "Query to register not found");
|
|
1984
|
+
throw new Error("Query to register not found");
|
|
1985
|
+
}
|
|
1986
|
+
await this.remote.query("fn::query::register($config)", { config: {
|
|
1987
|
+
clientId: this.clientId,
|
|
1988
|
+
id: queryState.config.id,
|
|
1989
|
+
surql: queryState.config.surql,
|
|
1990
|
+
params: queryState.config.params,
|
|
1991
|
+
ttl: queryState.config.ttl
|
|
1992
|
+
} });
|
|
1993
|
+
const [items] = await this.remote.query(surql.selectByFieldsAnd("_spooky_list_ref", ["in"], ["out", "version"]), { in: queryState.config.id });
|
|
1994
|
+
this.logger.trace({
|
|
1995
|
+
queryId: encodeRecordId(queryState.config.id),
|
|
1996
|
+
items,
|
|
1997
|
+
Category: "spooky-client::SpookySync::createRemoteQuery"
|
|
1998
|
+
}, "Got query record version array from remote");
|
|
1999
|
+
const array = items.map((item) => [encodeRecordId(item.out), item.version]);
|
|
2000
|
+
this.logger.debug({
|
|
2001
|
+
queryId: encodeRecordId(queryState.config.id),
|
|
2002
|
+
array,
|
|
2003
|
+
Category: "spooky-client::SpookySync::createRemoteQuery"
|
|
2004
|
+
}, "createdRemoteQuery");
|
|
2005
|
+
if (array) await this.dataModule.updateQueryRemoteArray(queryHash, array);
|
|
2006
|
+
}
|
|
2007
|
+
async heartbeatQuery(queryHash) {
|
|
2008
|
+
const queryState = this.dataModule.getQueryByHash(queryHash);
|
|
2009
|
+
if (!queryState) {
|
|
2010
|
+
this.logger.warn({
|
|
2011
|
+
queryHash,
|
|
2012
|
+
Category: "spooky-client::SpookySync::heartbeatQuery"
|
|
2013
|
+
}, "Query to register not found");
|
|
2014
|
+
throw new Error("Query to register not found");
|
|
2015
|
+
}
|
|
2016
|
+
await this.remote.query("fn::query::heartbeat($id)", { id: queryState.config.id });
|
|
2017
|
+
}
|
|
2018
|
+
async cleanupQuery(queryHash) {
|
|
2019
|
+
const queryState = this.dataModule.getQueryByHash(queryHash);
|
|
2020
|
+
if (!queryState) {
|
|
2021
|
+
this.logger.warn({
|
|
2022
|
+
queryHash,
|
|
2023
|
+
Category: "spooky-client::SpookySync::cleanupQuery"
|
|
2024
|
+
}, "Query to register not found");
|
|
2025
|
+
throw new Error("Query to register not found");
|
|
2026
|
+
}
|
|
2027
|
+
await this.remote.query(`DELETE $id`, { id: queryState.config.id });
|
|
2028
|
+
}
|
|
2029
|
+
};
|
|
2030
|
+
|
|
2031
|
+
//#endregion
|
|
2032
|
+
//#region src/modules/auth/events/index.ts
|
|
2033
|
+
const AuthEventTypes = { AuthStateChanged: "AUTH_STATE_CHANGED" };
|
|
2034
|
+
function createAuthEventSystem() {
|
|
2035
|
+
return createEventSystem([AuthEventTypes.AuthStateChanged]);
|
|
2036
|
+
}
|
|
2037
|
+
|
|
2038
|
+
//#endregion
|
|
2039
|
+
//#region src/modules/devtools/index.ts
|
|
2040
|
+
var DevToolsService = class {
|
|
2041
|
+
eventsHistory = [];
|
|
2042
|
+
eventIdCounter = 0;
|
|
2043
|
+
version = "1.0.0";
|
|
2044
|
+
constructor(databaseService, remoteDatabaseService, logger, schema, authService, dataManager) {
|
|
2045
|
+
this.databaseService = databaseService;
|
|
2046
|
+
this.remoteDatabaseService = remoteDatabaseService;
|
|
2047
|
+
this.logger = logger;
|
|
2048
|
+
this.schema = schema;
|
|
2049
|
+
this.authService = authService;
|
|
2050
|
+
this.dataManager = dataManager;
|
|
2051
|
+
this.exposeToWindow();
|
|
2052
|
+
this.authService.eventSystem.subscribe(AuthEventTypes.AuthStateChanged, () => {
|
|
2053
|
+
this.notifyDevTools();
|
|
2054
|
+
});
|
|
2055
|
+
this.logger.debug({ Category: "spooky-client::DevToolsService::init" }, "Service initialized");
|
|
2056
|
+
}
|
|
2057
|
+
getActiveQueries() {
|
|
2058
|
+
const result = /* @__PURE__ */ new Map();
|
|
2059
|
+
if (!this.dataManager) return result;
|
|
2060
|
+
this.dataManager.getActiveQueries().forEach((q) => {
|
|
2061
|
+
const queryHash = this.hashString(encodeRecordId(q.config.id));
|
|
2062
|
+
result.set(queryHash, {
|
|
2063
|
+
queryHash,
|
|
2064
|
+
status: "active",
|
|
2065
|
+
createdAt: q.config.lastActiveAt instanceof Date ? q.config.lastActiveAt.getTime() : new Date(q.config.lastActiveAt || Date.now()).getTime(),
|
|
2066
|
+
lastUpdate: Date.now(),
|
|
2067
|
+
updateCount: q.updateCount,
|
|
2068
|
+
query: q.config.surql,
|
|
2069
|
+
variables: q.config.params || {},
|
|
2070
|
+
dataSize: q.records?.length || 0,
|
|
2071
|
+
data: q.records,
|
|
2072
|
+
localArray: q.config.localArray,
|
|
2073
|
+
remoteArray: q.config.remoteArray
|
|
2074
|
+
});
|
|
2075
|
+
});
|
|
2076
|
+
return result;
|
|
2077
|
+
}
|
|
2078
|
+
onQueryInitialized(payload) {
|
|
2079
|
+
this.logger.debug({
|
|
2080
|
+
payload,
|
|
2081
|
+
Category: "spooky-client::DevToolsService::onQueryInitialized"
|
|
2082
|
+
}, "QueryInitialized");
|
|
2083
|
+
const queryHash = this.hashString(payload.queryId.toString());
|
|
2084
|
+
this.addEvent("QUERY_REQUEST_INIT", {
|
|
2085
|
+
queryHash,
|
|
2086
|
+
query: payload.sql,
|
|
2087
|
+
variables: {}
|
|
2088
|
+
});
|
|
2089
|
+
this.notifyDevTools();
|
|
2090
|
+
}
|
|
2091
|
+
onQueryUpdated(payload) {
|
|
2092
|
+
this.logger.debug({
|
|
2093
|
+
id: payload.queryId?.toString(),
|
|
2094
|
+
Category: "spooky-client::DevToolsService::onQueryUpdated"
|
|
2095
|
+
}, "QueryUpdated");
|
|
2096
|
+
const queryHash = this.hashString(payload.queryId.toString());
|
|
2097
|
+
this.addEvent("QUERY_UPDATED", {
|
|
2098
|
+
queryHash,
|
|
2099
|
+
data: payload.records
|
|
2100
|
+
});
|
|
2101
|
+
this.notifyDevTools();
|
|
2102
|
+
}
|
|
2103
|
+
onStreamUpdate(update) {
|
|
2104
|
+
this.logger.debug({
|
|
2105
|
+
update,
|
|
2106
|
+
Category: "spooky-client::DevToolsService::onStreamUpdate"
|
|
2107
|
+
}, "StreamUpdate");
|
|
2108
|
+
this.addEvent("STREAM_UPDATE", { updates: [update] });
|
|
2109
|
+
this.notifyDevTools();
|
|
2110
|
+
}
|
|
2111
|
+
onMutation(payload) {
|
|
2112
|
+
payload.forEach((p) => {
|
|
2113
|
+
this.addEvent("MUTATION_REQUEST_EXECUTION", { mutation: {
|
|
2114
|
+
type: "create",
|
|
2115
|
+
data: "data" in p ? p.data : void 0,
|
|
2116
|
+
selector: encodeRecordId(p.record_id)
|
|
2117
|
+
} });
|
|
2118
|
+
});
|
|
2119
|
+
this.notifyDevTools();
|
|
2120
|
+
}
|
|
2121
|
+
hashString(str) {
|
|
2122
|
+
let hash = 0;
|
|
2123
|
+
if (str.length === 0) return hash;
|
|
2124
|
+
for (let i = 0; i < str.length; i++) {
|
|
2125
|
+
const char = str.charCodeAt(i);
|
|
2126
|
+
hash = (hash << 5) - hash + char;
|
|
2127
|
+
hash = hash & hash;
|
|
2128
|
+
}
|
|
2129
|
+
return hash;
|
|
2130
|
+
}
|
|
2131
|
+
logEvent(eventType, payload) {
|
|
2132
|
+
this.addEvent(eventType, payload);
|
|
2133
|
+
this.notifyDevTools();
|
|
2134
|
+
}
|
|
2135
|
+
addEvent(eventType, payload) {
|
|
2136
|
+
this.eventsHistory.push({
|
|
2137
|
+
id: this.eventIdCounter++,
|
|
2138
|
+
timestamp: Date.now(),
|
|
2139
|
+
eventType,
|
|
2140
|
+
payload: this.serializeForDevTools(payload)
|
|
2141
|
+
});
|
|
2142
|
+
if (this.eventsHistory.length > 100) this.eventsHistory.shift();
|
|
2143
|
+
}
|
|
2144
|
+
getState() {
|
|
2145
|
+
return this.serializeForDevTools({
|
|
2146
|
+
eventsHistory: [...this.eventsHistory],
|
|
2147
|
+
activeQueries: Object.fromEntries(this.getActiveQueries()),
|
|
2148
|
+
auth: {
|
|
2149
|
+
authenticated: this.authService.isAuthenticated,
|
|
2150
|
+
userId: this.authService.currentUser?.id
|
|
2151
|
+
},
|
|
2152
|
+
version: this.version,
|
|
2153
|
+
database: {
|
|
2154
|
+
tables: this.schema.tables.map((t) => t.name),
|
|
2155
|
+
tableData: {}
|
|
2156
|
+
}
|
|
2157
|
+
});
|
|
2158
|
+
}
|
|
2159
|
+
notifyDevTools() {
|
|
2160
|
+
if (typeof window !== "undefined") window.postMessage({
|
|
2161
|
+
type: "SPOOKY_STATE_CHANGED",
|
|
2162
|
+
source: "spooky-devtools-page",
|
|
2163
|
+
state: this.getState()
|
|
2164
|
+
}, "*");
|
|
2165
|
+
}
|
|
2166
|
+
serializeForDevTools(data, seen = /* @__PURE__ */ new WeakSet()) {
|
|
2167
|
+
if (data === void 0) return "undefined";
|
|
2168
|
+
if (data === null) return null;
|
|
2169
|
+
if (data instanceof RecordId) return data.toString();
|
|
2170
|
+
if (Array.isArray(data)) {
|
|
2171
|
+
if (seen.has(data)) return "[Circular Array]";
|
|
2172
|
+
seen.add(data);
|
|
2173
|
+
return data.map((item) => this.serializeForDevTools(item, seen));
|
|
2174
|
+
}
|
|
2175
|
+
if (typeof data === "bigint") return data.toString();
|
|
2176
|
+
if (data instanceof Date) return data.toISOString();
|
|
2177
|
+
if (typeof data === "object") {
|
|
2178
|
+
if (seen.has(data)) return "[Circular Object]";
|
|
2179
|
+
seen.add(data);
|
|
2180
|
+
const result = {};
|
|
2181
|
+
for (const key in data) if (Object.prototype.hasOwnProperty.call(data, key)) result[key] = this.serializeForDevTools(data[key], seen);
|
|
2182
|
+
return result;
|
|
2183
|
+
}
|
|
2184
|
+
return data;
|
|
2185
|
+
}
|
|
2186
|
+
exposeToWindow() {
|
|
2187
|
+
if (typeof window !== "undefined") {
|
|
2188
|
+
window.__SPOOKY__ = {
|
|
2189
|
+
version: this.version,
|
|
2190
|
+
getState: () => this.getState(),
|
|
2191
|
+
clearHistory: () => {
|
|
2192
|
+
this.eventsHistory = [];
|
|
2193
|
+
this.notifyDevTools();
|
|
2194
|
+
},
|
|
2195
|
+
getTableData: async (tableName) => {
|
|
2196
|
+
try {
|
|
2197
|
+
const result = await this.databaseService.query(`SELECT * FROM ${tableName}`);
|
|
2198
|
+
let records = [];
|
|
2199
|
+
if (Array.isArray(result) && result.length > 0) {
|
|
2200
|
+
const first = result[0];
|
|
2201
|
+
if (Array.isArray(first)) records = first;
|
|
2202
|
+
else if (first && typeof first === "object" && "result" in first && "status" in first) records = Array.isArray(first.result) ? first.result : [];
|
|
2203
|
+
else records = result;
|
|
2204
|
+
} else if (Array.isArray(result)) records = [];
|
|
2205
|
+
return this.serializeForDevTools(records) || [];
|
|
2206
|
+
} catch (e) {
|
|
2207
|
+
this.logger.error({
|
|
2208
|
+
err: e,
|
|
2209
|
+
Category: "spooky-client::DevToolsService::exposeToWindow"
|
|
2210
|
+
}, "Failed to get table data");
|
|
2211
|
+
return [];
|
|
2212
|
+
}
|
|
2213
|
+
},
|
|
2214
|
+
updateTableRow: async (tableName, recordId, updates) => {
|
|
2215
|
+
try {
|
|
2216
|
+
await this.databaseService.query(`UPDATE ${recordId} MERGE $updates`, { updates });
|
|
2217
|
+
return { success: true };
|
|
2218
|
+
} catch (e) {
|
|
2219
|
+
return {
|
|
2220
|
+
success: false,
|
|
2221
|
+
error: e.message
|
|
2222
|
+
};
|
|
2223
|
+
}
|
|
2224
|
+
},
|
|
2225
|
+
deleteTableRow: async (tableName, recordId) => {
|
|
2226
|
+
try {
|
|
2227
|
+
await this.databaseService.query(`DELETE ${recordId}`);
|
|
2228
|
+
return { success: true };
|
|
2229
|
+
} catch (e) {
|
|
2230
|
+
return {
|
|
2231
|
+
success: false,
|
|
2232
|
+
error: e.message
|
|
2233
|
+
};
|
|
2234
|
+
}
|
|
2235
|
+
},
|
|
2236
|
+
runQuery: async (query, target = "local") => {
|
|
2237
|
+
try {
|
|
2238
|
+
this.logger.debug({
|
|
2239
|
+
query,
|
|
2240
|
+
target,
|
|
2241
|
+
Category: "spooky-client::DevToolsService::runQuery"
|
|
2242
|
+
}, "Running query (START)");
|
|
2243
|
+
const service = target === "remote" ? this.remoteDatabaseService : this.databaseService;
|
|
2244
|
+
const startTime = Date.now();
|
|
2245
|
+
const result = await service.query(query);
|
|
2246
|
+
const queryTime = Date.now() - startTime;
|
|
2247
|
+
this.logger.debug({
|
|
2248
|
+
query,
|
|
2249
|
+
time: queryTime,
|
|
2250
|
+
resultType: typeof result,
|
|
2251
|
+
isArray: Array.isArray(result),
|
|
2252
|
+
Category: "spooky-client::DevToolsService::runQuery"
|
|
2253
|
+
}, "Database returned result");
|
|
2254
|
+
const serializeStart = Date.now();
|
|
2255
|
+
const serialized = this.serializeForDevTools(result);
|
|
2256
|
+
const serializeTime = Date.now() - serializeStart;
|
|
2257
|
+
this.logger.debug({
|
|
2258
|
+
serializeTime,
|
|
2259
|
+
serializedLength: JSON.stringify(serialized).length,
|
|
2260
|
+
Category: "spooky-client::DevToolsService::runQuery"
|
|
2261
|
+
}, "Serialization complete");
|
|
2262
|
+
return {
|
|
2263
|
+
success: true,
|
|
2264
|
+
data: serialized,
|
|
2265
|
+
target
|
|
2266
|
+
};
|
|
2267
|
+
} catch (e) {
|
|
2268
|
+
this.logger.error({
|
|
2269
|
+
err: e,
|
|
2270
|
+
query,
|
|
2271
|
+
target,
|
|
2272
|
+
Category: "spooky-client::DevToolsService::runQuery"
|
|
2273
|
+
}, "Query execution failed");
|
|
2274
|
+
return {
|
|
2275
|
+
success: false,
|
|
2276
|
+
error: (e instanceof Error ? e.message : typeof e === "string" ? e : JSON.stringify(e)) || "Unknown occurred"
|
|
2277
|
+
};
|
|
2278
|
+
}
|
|
2279
|
+
}
|
|
2280
|
+
};
|
|
2281
|
+
window.postMessage({
|
|
2282
|
+
type: "SPOOKY_DETECTED",
|
|
2283
|
+
source: "spooky-devtools-page",
|
|
2284
|
+
data: {
|
|
2285
|
+
version: this.version,
|
|
2286
|
+
detected: true
|
|
2287
|
+
}
|
|
2288
|
+
}, "*");
|
|
2289
|
+
}
|
|
2290
|
+
}
|
|
2291
|
+
};
|
|
2292
|
+
|
|
2293
|
+
//#endregion
|
|
2294
|
+
//#region src/modules/auth/index.ts
|
|
2295
|
+
var AuthService = class {
|
|
2296
|
+
token = null;
|
|
2297
|
+
currentUser = null;
|
|
2298
|
+
isAuthenticated = false;
|
|
2299
|
+
isLoading = true;
|
|
2300
|
+
events = createAuthEventSystem();
|
|
2301
|
+
get eventSystem() {
|
|
2302
|
+
return this.events;
|
|
2303
|
+
}
|
|
2304
|
+
constructor(schema, remote, persistenceClient, logger) {
|
|
2305
|
+
this.schema = schema;
|
|
2306
|
+
this.remote = remote;
|
|
2307
|
+
this.persistenceClient = persistenceClient;
|
|
2308
|
+
this.logger = logger;
|
|
2309
|
+
}
|
|
2310
|
+
async init() {
|
|
2311
|
+
await this.check();
|
|
2312
|
+
}
|
|
2313
|
+
getAccessDefinition(name) {
|
|
2314
|
+
return this.schema.access?.[name];
|
|
2315
|
+
}
|
|
2316
|
+
/**
|
|
2317
|
+
* Subscribe to auth state changes.
|
|
2318
|
+
* callback is called immediately with current value and whenever validation status changes.
|
|
2319
|
+
*/
|
|
2320
|
+
subscribe(cb) {
|
|
2321
|
+
cb(this.currentUser?.id || null);
|
|
2322
|
+
const id = this.events.subscribe(AuthEventTypes.AuthStateChanged, (event) => {
|
|
2323
|
+
cb(event.payload);
|
|
2324
|
+
});
|
|
2325
|
+
return () => {
|
|
2326
|
+
this.events.unsubscribe(id);
|
|
2327
|
+
};
|
|
2328
|
+
}
|
|
2329
|
+
notifyListeners() {
|
|
2330
|
+
const userId = this.currentUser?.id || null;
|
|
2331
|
+
this.events.emit(AuthEventTypes.AuthStateChanged, userId);
|
|
2332
|
+
}
|
|
2333
|
+
/**
|
|
2334
|
+
* Check for existing session and validate
|
|
2335
|
+
*/
|
|
2336
|
+
async check(accessToken) {
|
|
2337
|
+
this.isLoading = true;
|
|
2338
|
+
try {
|
|
2339
|
+
const token = accessToken || await this.persistenceClient.get("spooky_auth_token");
|
|
2340
|
+
if (!token) {
|
|
2341
|
+
this.logger.debug({ Category: "spooky-client::AuthService::check" }, "No token found in storage or arguments");
|
|
2342
|
+
this.isLoading = false;
|
|
2343
|
+
this.isAuthenticated = false;
|
|
2344
|
+
this.notifyListeners();
|
|
2345
|
+
return;
|
|
2346
|
+
}
|
|
2347
|
+
await this.remote.getClient().authenticate(token);
|
|
2348
|
+
const result = await this.remote.query("SELECT * FROM ONLY $auth.id");
|
|
2349
|
+
const items = Array.isArray(result) && Array.isArray(result[0]) ? result[0] : result;
|
|
2350
|
+
const user = Array.isArray(items) ? items[0] : items;
|
|
2351
|
+
if (user && user.id) {
|
|
2352
|
+
this.logger.info({
|
|
2353
|
+
user,
|
|
2354
|
+
Category: "spooky-client::AuthService::check"
|
|
2355
|
+
}, "Auth check complete (via $auth.id)");
|
|
2356
|
+
await this.setSession(token, user);
|
|
2357
|
+
} else {
|
|
2358
|
+
this.logger.warn({ Category: "spooky-client::AuthService::check" }, "$auth.id empty, attempting manual user fetch");
|
|
2359
|
+
const manualResult = await this.remote.query("SELECT * FROM user WHERE id = $auth.id LIMIT 1");
|
|
2360
|
+
const manualItems = Array.isArray(manualResult) && Array.isArray(manualResult[0]) ? manualResult[0] : manualResult;
|
|
2361
|
+
const manualUser = Array.isArray(manualItems) ? manualItems[0] : manualItems;
|
|
2362
|
+
if (manualUser && manualUser.id) {
|
|
2363
|
+
this.logger.info({
|
|
2364
|
+
user: manualUser,
|
|
2365
|
+
Category: "spooky-client::AuthService::check"
|
|
2366
|
+
}, "Auth check complete (via manual fetch)");
|
|
2367
|
+
await this.setSession(token, manualUser);
|
|
2368
|
+
} else {
|
|
2369
|
+
this.logger.warn({ Category: "spooky-client::AuthService::check" }, "Token valid but user not found via fallback");
|
|
2370
|
+
await this.signOut();
|
|
2371
|
+
}
|
|
2372
|
+
}
|
|
2373
|
+
} catch (error) {
|
|
2374
|
+
this.logger.error({
|
|
2375
|
+
error,
|
|
2376
|
+
stack: error.stack,
|
|
2377
|
+
Category: "spooky-client::AuthService::check"
|
|
2378
|
+
}, "Auth check failed");
|
|
2379
|
+
await this.signOut();
|
|
2380
|
+
} finally {
|
|
2381
|
+
this.isLoading = false;
|
|
2382
|
+
}
|
|
2383
|
+
}
|
|
2384
|
+
/**
|
|
2385
|
+
* Sign out and clear session
|
|
2386
|
+
*/
|
|
2387
|
+
async signOut() {
|
|
2388
|
+
this.token = null;
|
|
2389
|
+
this.currentUser = null;
|
|
2390
|
+
this.isAuthenticated = false;
|
|
2391
|
+
await this.persistenceClient.remove("spooky_auth_token");
|
|
2392
|
+
try {
|
|
2393
|
+
await this.remote.getClient().invalidate();
|
|
2394
|
+
} catch (e) {}
|
|
2395
|
+
this.notifyListeners();
|
|
2396
|
+
}
|
|
2397
|
+
async setSession(token, user) {
|
|
2398
|
+
this.token = token;
|
|
2399
|
+
this.currentUser = user;
|
|
2400
|
+
this.isAuthenticated = true;
|
|
2401
|
+
await this.persistenceClient.set("spooky_auth_token", token);
|
|
2402
|
+
this.notifyListeners();
|
|
2403
|
+
}
|
|
2404
|
+
async signUp(accessName, params) {
|
|
2405
|
+
const def = this.getAccessDefinition(accessName);
|
|
2406
|
+
if (!def) throw new Error(`Access definition '${accessName}' not found`);
|
|
2407
|
+
const runtimeParams = params;
|
|
2408
|
+
const missingParams = Object.entries(def.signup.params).filter(([name, schema]) => !schema.optional && !(name in runtimeParams)).map(([name]) => name);
|
|
2409
|
+
if (missingParams.length > 0) throw new Error(`Missing required signup params for '${accessName}': ${missingParams.join(", ")}`);
|
|
2410
|
+
this.logger.info({
|
|
2411
|
+
accessName,
|
|
2412
|
+
runtimeParams,
|
|
2413
|
+
Category: "spooky-client::AuthService::signUp"
|
|
2414
|
+
}, "Attempting signup");
|
|
2415
|
+
const { access } = await this.remote.getClient().signup({
|
|
2416
|
+
access: accessName,
|
|
2417
|
+
variables: runtimeParams
|
|
2418
|
+
});
|
|
2419
|
+
this.logger.info({ Category: "spooky-client::AuthService::signUp" }, "Signup successful, token received");
|
|
2420
|
+
await this.check(access);
|
|
2421
|
+
}
|
|
2422
|
+
async signIn(accessName, params) {
|
|
2423
|
+
const def = this.getAccessDefinition(accessName);
|
|
2424
|
+
if (!def) throw new Error(`Access definition '${accessName}' not found`);
|
|
2425
|
+
const runtimeParams = params;
|
|
2426
|
+
const missingParams = Object.entries(def.signIn.params).filter(([name, schema]) => !schema.optional && !(name in runtimeParams)).map(([name]) => name);
|
|
2427
|
+
if (missingParams.length > 0) throw new Error(`Missing required signin params for '${accessName}': ${missingParams.join(", ")}`);
|
|
2428
|
+
this.logger.info({
|
|
2429
|
+
accessName,
|
|
2430
|
+
Category: "spooky-client::AuthService::signIn"
|
|
2431
|
+
}, "Attempting signin");
|
|
2432
|
+
const { access } = await this.remote.getClient().signin({
|
|
2433
|
+
access: accessName,
|
|
2434
|
+
variables: runtimeParams
|
|
2435
|
+
});
|
|
2436
|
+
await this.check(access);
|
|
2437
|
+
}
|
|
2438
|
+
};
|
|
2439
|
+
|
|
2440
|
+
//#endregion
|
|
2441
|
+
//#region src/services/stream-processor/index.ts
|
|
2442
|
+
var StreamProcessorService = class {
|
|
2443
|
+
logger;
|
|
2444
|
+
processor;
|
|
2445
|
+
isInitialized = false;
|
|
2446
|
+
receivers = [];
|
|
2447
|
+
constructor(events, db, persistenceClient, logger) {
|
|
2448
|
+
this.events = events;
|
|
2449
|
+
this.db = db;
|
|
2450
|
+
this.persistenceClient = persistenceClient;
|
|
2451
|
+
this.logger = logger.child({ name: "StreamProcessorService" });
|
|
2452
|
+
}
|
|
2453
|
+
/**
|
|
2454
|
+
* Add a receiver for stream updates.
|
|
2455
|
+
* Multiple receivers can be registered (DataManager, DevTools, etc.)
|
|
2456
|
+
*/
|
|
2457
|
+
addReceiver(receiver) {
|
|
2458
|
+
this.receivers.push(receiver);
|
|
2459
|
+
}
|
|
2460
|
+
notifyUpdates(updates) {
|
|
2461
|
+
for (const update of updates) for (const receiver of this.receivers) receiver.onStreamUpdate(update);
|
|
2462
|
+
}
|
|
2463
|
+
/**
|
|
2464
|
+
* Initialize the WASM module and processor.
|
|
2465
|
+
* This must be called before using other methods.
|
|
2466
|
+
*/
|
|
2467
|
+
async init() {
|
|
2468
|
+
if (this.isInitialized) return;
|
|
2469
|
+
this.logger.info({ Category: "spooky-client::StreamProcessorService::init" }, "Initializing WASM...");
|
|
2470
|
+
try {
|
|
2471
|
+
await init();
|
|
2472
|
+
this.processor = new SpookyProcessor();
|
|
2473
|
+
await this.loadState();
|
|
2474
|
+
this.isInitialized = true;
|
|
2475
|
+
this.logger.info({ Category: "spooky-client::StreamProcessorService::init" }, "Initialized successfully");
|
|
2476
|
+
} catch (e) {
|
|
2477
|
+
this.logger.error({
|
|
2478
|
+
error: e,
|
|
2479
|
+
Category: "spooky-client::StreamProcessorService::init"
|
|
2480
|
+
}, "Failed to initialize");
|
|
2481
|
+
throw e;
|
|
2482
|
+
}
|
|
2483
|
+
}
|
|
2484
|
+
async loadState() {
|
|
2485
|
+
if (!this.processor) return;
|
|
2486
|
+
try {
|
|
2487
|
+
const result = await this.persistenceClient.get("_spooky_stream_processor_state");
|
|
2488
|
+
if (Array.isArray(result) && result.length > 0 && Array.isArray(result[0]) && result[0].length > 0 && result[0][0]?.state) {
|
|
2489
|
+
const state = result[0][0].state;
|
|
2490
|
+
this.logger.info({
|
|
2491
|
+
stateLength: state.length,
|
|
2492
|
+
Category: "spooky-client::StreamProcessorService::loadState"
|
|
2493
|
+
}, "Loading state from DB");
|
|
2494
|
+
if (typeof this.processor.load_state === "function") this.processor.load_state(state);
|
|
2495
|
+
else this.logger.warn({ Category: "spooky-client::StreamProcessorService::loadState" }, "load_state method not found on processor");
|
|
2496
|
+
} else this.logger.info({ Category: "spooky-client::StreamProcessorService::loadState" }, "No saved state found");
|
|
2497
|
+
} catch (e) {
|
|
2498
|
+
this.logger.error({
|
|
2499
|
+
error: e,
|
|
2500
|
+
Category: "spooky-client::StreamProcessorService::loadState"
|
|
2501
|
+
}, "Failed to load state");
|
|
2502
|
+
}
|
|
2503
|
+
}
|
|
2504
|
+
async saveState() {
|
|
2505
|
+
if (!this.processor) return;
|
|
2506
|
+
try {
|
|
2507
|
+
if (typeof this.processor.save_state === "function") {
|
|
2508
|
+
const state = this.processor.save_state();
|
|
2509
|
+
if (state) {
|
|
2510
|
+
await this.persistenceClient.set("_spooky_stream_processor_state", state);
|
|
2511
|
+
this.logger.trace({ Category: "spooky-client::StreamProcessorService::saveState" }, "State saved");
|
|
2512
|
+
}
|
|
2513
|
+
}
|
|
2514
|
+
} catch (e) {
|
|
2515
|
+
this.logger.error({
|
|
2516
|
+
error: e,
|
|
2517
|
+
Category: "spooky-client::StreamProcessorService::saveState"
|
|
2518
|
+
}, "Failed to save state");
|
|
2519
|
+
}
|
|
2520
|
+
}
|
|
2521
|
+
/**
|
|
2522
|
+
* Ingest a record change into the processor.
|
|
2523
|
+
* Emits 'stream_update' event if materialized views are affected.
|
|
2524
|
+
* @param isOptimistic true = local mutation (increment versions), false = remote sync (keep versions)
|
|
2525
|
+
*/
|
|
2526
|
+
ingest(table, op, id, record) {
|
|
2527
|
+
this.logger.debug({
|
|
2528
|
+
table,
|
|
2529
|
+
op,
|
|
2530
|
+
id,
|
|
2531
|
+
Category: "spooky-client::StreamProcessorService::ingest"
|
|
2532
|
+
}, "Ingesting into ssp");
|
|
2533
|
+
if (!this.processor) {
|
|
2534
|
+
this.logger.warn({ Category: "spooky-client::StreamProcessorService::ingest" }, "Not initialized, skipping ingest");
|
|
2535
|
+
return [];
|
|
2536
|
+
}
|
|
2537
|
+
try {
|
|
2538
|
+
const normalizedRecord = this.normalizeValue(record);
|
|
2539
|
+
const rawUpdates = this.processor.ingest(table, op, id, normalizedRecord);
|
|
2540
|
+
this.logger.debug({
|
|
2541
|
+
table,
|
|
2542
|
+
op,
|
|
2543
|
+
id,
|
|
2544
|
+
rawUpdates: rawUpdates.length,
|
|
2545
|
+
Category: "spooky-client::StreamProcessorService::ingest"
|
|
2546
|
+
}, "Ingesting into ssp done");
|
|
2547
|
+
if (rawUpdates && Array.isArray(rawUpdates) && rawUpdates.length > 0) {
|
|
2548
|
+
const updates = rawUpdates.map((u) => ({
|
|
2549
|
+
queryHash: u.query_id,
|
|
2550
|
+
localArray: u.result_data,
|
|
2551
|
+
op
|
|
2552
|
+
}));
|
|
2553
|
+
this.notifyUpdates(updates);
|
|
2554
|
+
}
|
|
2555
|
+
this.saveState();
|
|
2556
|
+
return rawUpdates;
|
|
2557
|
+
} catch (e) {
|
|
2558
|
+
this.logger.error({
|
|
2559
|
+
error: e,
|
|
2560
|
+
Category: "spooky-client::StreamProcessorService::ingest"
|
|
2561
|
+
}, "Ingesting into ssp failed");
|
|
2562
|
+
}
|
|
2563
|
+
return [];
|
|
2564
|
+
}
|
|
2565
|
+
/**
|
|
2566
|
+
* Register a new query plan.
|
|
2567
|
+
* Emits 'stream_update' with the initial result.
|
|
2568
|
+
*/
|
|
2569
|
+
registerQueryPlan(queryPlan) {
|
|
2570
|
+
if (!this.processor) {
|
|
2571
|
+
this.logger.warn({ Category: "spooky-client::StreamProcessorService::registerQueryPlan" }, "Not initialized, skipping registration");
|
|
2572
|
+
return;
|
|
2573
|
+
}
|
|
2574
|
+
this.logger.debug({
|
|
2575
|
+
queryHash: queryPlan.queryHash,
|
|
2576
|
+
surql: queryPlan.surql,
|
|
2577
|
+
params: queryPlan.params,
|
|
2578
|
+
Category: "spooky-client::StreamProcessorService::registerQueryPlan"
|
|
2579
|
+
}, "Registering query plan");
|
|
2580
|
+
try {
|
|
2581
|
+
const normalizedParams = this.normalizeValue(queryPlan.params);
|
|
2582
|
+
const initialUpdate = this.processor.register_view({
|
|
2583
|
+
id: queryPlan.queryHash,
|
|
2584
|
+
surql: queryPlan.surql,
|
|
2585
|
+
params: normalizedParams,
|
|
2586
|
+
clientId: "local",
|
|
2587
|
+
ttl: queryPlan.ttl.toString(),
|
|
2588
|
+
lastActiveAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
2589
|
+
});
|
|
2590
|
+
this.logger.debug({
|
|
2591
|
+
initialUpdate,
|
|
2592
|
+
Category: "spooky-client::StreamProcessorService::registerQueryPlan"
|
|
2593
|
+
}, "register_view result");
|
|
2594
|
+
if (!initialUpdate) throw new Error("Failed to register query plan");
|
|
2595
|
+
const update = {
|
|
2596
|
+
queryHash: initialUpdate.query_id,
|
|
2597
|
+
localArray: initialUpdate.result_data
|
|
2598
|
+
};
|
|
2599
|
+
this.saveState();
|
|
2600
|
+
this.logger.debug({
|
|
2601
|
+
queryHash: queryPlan.queryHash,
|
|
2602
|
+
surql: queryPlan.surql,
|
|
2603
|
+
params: queryPlan.params,
|
|
2604
|
+
Category: "spooky-client::StreamProcessorService::registerQueryPlan"
|
|
2605
|
+
}, "Registered query plan");
|
|
2606
|
+
return update;
|
|
2607
|
+
} catch (e) {
|
|
2608
|
+
this.logger.error({
|
|
2609
|
+
error: e,
|
|
2610
|
+
Category: "spooky-client::StreamProcessorService::registerQueryPlan"
|
|
2611
|
+
}, "Error registering query plan");
|
|
2612
|
+
throw e;
|
|
2613
|
+
}
|
|
2614
|
+
}
|
|
2615
|
+
/**
|
|
2616
|
+
* Unregister a query plan by ID.
|
|
2617
|
+
*/
|
|
2618
|
+
unregisterQueryPlan(queryHash) {
|
|
2619
|
+
if (!this.processor) return;
|
|
2620
|
+
try {
|
|
2621
|
+
this.processor.unregister_view(queryHash);
|
|
2622
|
+
this.saveState();
|
|
2623
|
+
} catch (e) {
|
|
2624
|
+
this.logger.error({
|
|
2625
|
+
error: e,
|
|
2626
|
+
Category: "spooky-client::StreamProcessorService::unregisterQueryPlan"
|
|
2627
|
+
}, "Error unregistering query plan");
|
|
2628
|
+
}
|
|
2629
|
+
}
|
|
2630
|
+
normalizeValue(value) {
|
|
2631
|
+
if (value === null || value === void 0) return value;
|
|
2632
|
+
if (typeof value === "object") {
|
|
2633
|
+
const hasTable = "table" in value && typeof value.table?.toString === "function";
|
|
2634
|
+
const hasId = "id" in value;
|
|
2635
|
+
const hasToString = typeof value.toString === "function";
|
|
2636
|
+
const isNotPlainObject = value.constructor !== Object;
|
|
2637
|
+
if (hasTable && hasId && hasToString && isNotPlainObject) {
|
|
2638
|
+
const result = value.toString();
|
|
2639
|
+
this.logger.trace({
|
|
2640
|
+
result,
|
|
2641
|
+
Category: "spooky-client::StreamProcessorService::normalizeValue"
|
|
2642
|
+
}, "RecordId detected");
|
|
2643
|
+
return result;
|
|
2644
|
+
}
|
|
2645
|
+
if ("tb" in value && "id" in value && !("table" in value)) return `${value.tb}:${value.id}`;
|
|
2646
|
+
if (Array.isArray(value)) return value.map((v) => this.normalizeValue(v));
|
|
2647
|
+
if (value.constructor === Object) {
|
|
2648
|
+
const out = {};
|
|
2649
|
+
for (const k in value) out[k] = this.normalizeValue(value[k]);
|
|
2650
|
+
return out;
|
|
2651
|
+
}
|
|
2652
|
+
}
|
|
2653
|
+
return value;
|
|
2654
|
+
}
|
|
2655
|
+
};
|
|
2656
|
+
|
|
2657
|
+
//#endregion
|
|
2658
|
+
//#region src/modules/cache/index.ts
|
|
2659
|
+
/**
|
|
2660
|
+
* CacheModule - Centralized storage and DBSP ingestion
|
|
2661
|
+
*
|
|
2662
|
+
* Single responsibility: Handle all local storage operations and DBSP ingestion.
|
|
2663
|
+
* This module acts as the bridge between data operations and persistence.
|
|
2664
|
+
*/
|
|
2665
|
+
var CacheModule = class {
|
|
2666
|
+
logger;
|
|
2667
|
+
streamUpdateCallback;
|
|
2668
|
+
versionLookups = {};
|
|
2669
|
+
constructor(local, streamProcessor, streamUpdateCallback, logger) {
|
|
2670
|
+
this.local = local;
|
|
2671
|
+
this.streamProcessor = streamProcessor;
|
|
2672
|
+
this.logger = logger.child({ service: "CacheModule" });
|
|
2673
|
+
this.streamUpdateCallback = streamUpdateCallback;
|
|
2674
|
+
this.streamProcessor.addReceiver(this);
|
|
2675
|
+
}
|
|
2676
|
+
/**
|
|
2677
|
+
* Implements StreamUpdateReceiver interface
|
|
2678
|
+
* Called directly by StreamProcessor when views change
|
|
2679
|
+
*/
|
|
2680
|
+
onStreamUpdate(update) {
|
|
2681
|
+
this.logger.debug({
|
|
2682
|
+
queryHash: update.queryHash,
|
|
2683
|
+
arrayLength: update.localArray?.length,
|
|
2684
|
+
Category: "spooky-client::CacheModule::onStreamUpdate"
|
|
2685
|
+
}, "Stream update received");
|
|
2686
|
+
this.streamUpdateCallback(update);
|
|
2687
|
+
}
|
|
2688
|
+
lookup(recordId) {
|
|
2689
|
+
return this.versionLookups[recordId] ?? 0;
|
|
2690
|
+
}
|
|
2691
|
+
/**
|
|
2692
|
+
* Save a single record to local DB and ingest into DBSP
|
|
2693
|
+
* Used by mutations (create/update)
|
|
2694
|
+
*/
|
|
2695
|
+
async save(cacheRecord, skipDbInsert = false) {
|
|
2696
|
+
return this.saveBatch([cacheRecord], skipDbInsert);
|
|
2697
|
+
}
|
|
2698
|
+
/**
|
|
2699
|
+
* Save multiple records in a batch
|
|
2700
|
+
* More efficient than calling save() multiple times
|
|
2701
|
+
* Used by sync operations
|
|
2702
|
+
*/
|
|
2703
|
+
async saveBatch(records, skipDbInsert = false) {
|
|
2704
|
+
if (records.length === 0) return;
|
|
2705
|
+
this.logger.debug({
|
|
2706
|
+
count: records.length,
|
|
2707
|
+
Category: "spooky-client::CacheModule::saveBatch"
|
|
2708
|
+
}, "Saving record batch");
|
|
2709
|
+
try {
|
|
2710
|
+
const populatedRecords = records.map((record) => {
|
|
2711
|
+
if (!record.version) throw new Error("Record version is required");
|
|
2712
|
+
return {
|
|
2713
|
+
...record,
|
|
2714
|
+
record: {
|
|
2715
|
+
...record.record,
|
|
2716
|
+
spooky_rv: record.version
|
|
2717
|
+
}
|
|
2718
|
+
};
|
|
2719
|
+
});
|
|
2720
|
+
if (!skipDbInsert) {
|
|
2721
|
+
const query = surql.seal(surql.tx(populatedRecords.map((_, i) => {
|
|
2722
|
+
return surql.upsert(`id${i}`, `content${i}`);
|
|
2723
|
+
})));
|
|
2724
|
+
const params = populatedRecords.reduce((acc, record, i) => {
|
|
2725
|
+
const { id, ...content } = record.record;
|
|
2726
|
+
return {
|
|
2727
|
+
...acc,
|
|
2728
|
+
[`id${i}`]: id,
|
|
2729
|
+
[`content${i}`]: content
|
|
2730
|
+
};
|
|
2731
|
+
}, {});
|
|
2732
|
+
await this.local.execute(query, params);
|
|
2733
|
+
}
|
|
2734
|
+
for (const record of populatedRecords) {
|
|
2735
|
+
const recordId = encodeRecordId(record.record.id);
|
|
2736
|
+
this.versionLookups[recordId] = record.version;
|
|
2737
|
+
this.streamProcessor.ingest(record.table, record.op, recordId, record.record);
|
|
2738
|
+
}
|
|
2739
|
+
this.logger.debug({
|
|
2740
|
+
count: records.length,
|
|
2741
|
+
Category: "spooky-client::CacheModule::saveBatch"
|
|
2742
|
+
}, "Batch saved successfully");
|
|
2743
|
+
} catch (err) {
|
|
2744
|
+
this.logger.error({
|
|
2745
|
+
err,
|
|
2746
|
+
count: records.length,
|
|
2747
|
+
Category: "spooky-client::CacheModule::saveBatch"
|
|
2748
|
+
}, "Failed to save batch");
|
|
2749
|
+
throw err;
|
|
2750
|
+
}
|
|
2751
|
+
}
|
|
2752
|
+
/**
|
|
2753
|
+
* Delete a record from local DB and ingest deletion into DBSP
|
|
2754
|
+
*/
|
|
2755
|
+
async delete(table, id, skipDbDelete = false) {
|
|
2756
|
+
this.logger.debug({
|
|
2757
|
+
table,
|
|
2758
|
+
id,
|
|
2759
|
+
Category: "spooky-client::CacheModule::delete"
|
|
2760
|
+
}, "Deleting record");
|
|
2761
|
+
try {
|
|
2762
|
+
if (!skipDbDelete) await this.local.query("DELETE $id", { id: parseRecordIdString(id) });
|
|
2763
|
+
delete this.versionLookups[id];
|
|
2764
|
+
await this.streamProcessor.ingest(table, "DELETE", id, {});
|
|
2765
|
+
this.logger.debug({
|
|
2766
|
+
table,
|
|
2767
|
+
id,
|
|
2768
|
+
Category: "spooky-client::CacheModule::delete"
|
|
2769
|
+
}, "Record deleted successfully");
|
|
2770
|
+
} catch (err) {
|
|
2771
|
+
this.logger.error({
|
|
2772
|
+
err,
|
|
2773
|
+
table,
|
|
2774
|
+
id,
|
|
2775
|
+
Category: "spooky-client::CacheModule::delete"
|
|
2776
|
+
}, "Failed to delete record");
|
|
2777
|
+
throw err;
|
|
2778
|
+
}
|
|
2779
|
+
}
|
|
2780
|
+
/**
|
|
2781
|
+
* Register a query with DBSP to create a materialized view
|
|
2782
|
+
* Returns the initial result array
|
|
2783
|
+
*/
|
|
2784
|
+
registerQuery(config) {
|
|
2785
|
+
this.logger.debug({
|
|
2786
|
+
queryHash: config.queryHash,
|
|
2787
|
+
surql: config.surql,
|
|
2788
|
+
Category: "spooky-client::CacheModule::registerQuery"
|
|
2789
|
+
}, "Registering query");
|
|
2790
|
+
try {
|
|
2791
|
+
const update = this.streamProcessor.registerQueryPlan({
|
|
2792
|
+
queryHash: config.queryHash,
|
|
2793
|
+
surql: config.surql,
|
|
2794
|
+
params: config.params,
|
|
2795
|
+
ttl: config.ttl,
|
|
2796
|
+
lastActiveAt: config.lastActiveAt,
|
|
2797
|
+
localArray: [],
|
|
2798
|
+
remoteArray: [],
|
|
2799
|
+
meta: { tableName: "" }
|
|
2800
|
+
});
|
|
2801
|
+
if (!update) throw new Error("Failed to register query with DBSP");
|
|
2802
|
+
this.logger.debug({
|
|
2803
|
+
queryHash: config.queryHash,
|
|
2804
|
+
arrayLength: update.localArray?.length,
|
|
2805
|
+
Category: "spooky-client::CacheModule::registerQuery"
|
|
2806
|
+
}, "Query registered successfully");
|
|
2807
|
+
return { localArray: update.localArray };
|
|
2808
|
+
} catch (err) {
|
|
2809
|
+
this.logger.error({
|
|
2810
|
+
err,
|
|
2811
|
+
queryHash: config.queryHash,
|
|
2812
|
+
Category: "spooky-client::CacheModule::registerQuery"
|
|
2813
|
+
}, "Failed to register query");
|
|
2814
|
+
throw err;
|
|
2815
|
+
}
|
|
2816
|
+
}
|
|
2817
|
+
/**
|
|
2818
|
+
* Unregister a query from DBSP
|
|
2819
|
+
*/
|
|
2820
|
+
unregisterQuery(queryHash) {
|
|
2821
|
+
this.logger.debug({
|
|
2822
|
+
queryHash,
|
|
2823
|
+
Category: "spooky-client::CacheModule::unregisterQuery"
|
|
2824
|
+
}, "Unregistering query");
|
|
2825
|
+
try {
|
|
2826
|
+
this.streamProcessor.unregisterQueryPlan(queryHash);
|
|
2827
|
+
this.logger.debug({
|
|
2828
|
+
queryHash,
|
|
2829
|
+
Category: "spooky-client::CacheModule::unregisterQuery"
|
|
2830
|
+
}, "Query unregistered successfully");
|
|
2831
|
+
} catch (err) {
|
|
2832
|
+
this.logger.error({
|
|
2833
|
+
err,
|
|
2834
|
+
queryHash,
|
|
2835
|
+
Category: "spooky-client::CacheModule::unregisterQuery"
|
|
2836
|
+
}, "Failed to unregister query");
|
|
2837
|
+
}
|
|
2838
|
+
}
|
|
2839
|
+
};
|
|
2840
|
+
|
|
2841
|
+
//#endregion
|
|
2842
|
+
//#region src/services/persistence/localstorage.ts
|
|
2843
|
+
var LocalStoragePersistenceClient = class {
|
|
2844
|
+
logger;
|
|
2845
|
+
constructor(logger) {
|
|
2846
|
+
this.logger = logger.child({ service: "PersistenceClient:LocalStorage" });
|
|
2847
|
+
}
|
|
2848
|
+
set(key, value) {
|
|
2849
|
+
localStorage.setItem(key, JSON.stringify(value));
|
|
2850
|
+
return Promise.resolve();
|
|
2851
|
+
}
|
|
2852
|
+
get(key) {
|
|
2853
|
+
const value = localStorage.getItem(key);
|
|
2854
|
+
if (!value) return Promise.resolve(null);
|
|
2855
|
+
return Promise.resolve(JSON.parse(value));
|
|
2856
|
+
}
|
|
2857
|
+
remove(key) {
|
|
2858
|
+
localStorage.removeItem(key);
|
|
2859
|
+
return Promise.resolve();
|
|
2860
|
+
}
|
|
2861
|
+
};
|
|
2862
|
+
|
|
2863
|
+
//#endregion
|
|
2864
|
+
//#region src/services/persistence/surrealdb.ts
|
|
2865
|
+
var SurrealDBPersistenceClient = class {
|
|
2866
|
+
logger;
|
|
2867
|
+
constructor(db, logger) {
|
|
2868
|
+
this.db = db;
|
|
2869
|
+
this.logger = logger.child({ service: "PersistenceClient:SurrealDb" });
|
|
2870
|
+
}
|
|
2871
|
+
async set(key, val) {
|
|
2872
|
+
try {
|
|
2873
|
+
const id = parseRecordIdString(`_spooky_kv:${key}`);
|
|
2874
|
+
await this.db.query(surql.seal(surql.upsert("id", "data")), {
|
|
2875
|
+
id,
|
|
2876
|
+
data: { val }
|
|
2877
|
+
});
|
|
2878
|
+
} catch (error) {
|
|
2879
|
+
this.logger.error({
|
|
2880
|
+
error,
|
|
2881
|
+
Category: "spooky-client::SurrealDBPersistenceClient::set"
|
|
2882
|
+
}, "Failed to set KV");
|
|
2883
|
+
throw error;
|
|
2884
|
+
}
|
|
2885
|
+
}
|
|
2886
|
+
async get(key) {
|
|
2887
|
+
try {
|
|
2888
|
+
const id = parseRecordIdString(`_spooky_kv:${key}`);
|
|
2889
|
+
const [result] = await this.db.query(surql.seal(surql.selectById("id", ["val"])), { id });
|
|
2890
|
+
if (!result?.val) return null;
|
|
2891
|
+
return result.val;
|
|
2892
|
+
} catch (error) {
|
|
2893
|
+
this.logger.warn({
|
|
2894
|
+
error,
|
|
2895
|
+
Category: "spooky-client::SurrealDBPersistenceClient::get"
|
|
2896
|
+
}, "Failed to get KV");
|
|
2897
|
+
return null;
|
|
2898
|
+
}
|
|
2899
|
+
}
|
|
2900
|
+
async remove(key) {
|
|
2901
|
+
try {
|
|
2902
|
+
const id = parseRecordIdString(`_spooky_kv:${key}`);
|
|
2903
|
+
await this.db.query(surql.seal(surql.delete("id")), { id });
|
|
2904
|
+
} catch (err) {
|
|
2905
|
+
this.logger.info({
|
|
2906
|
+
err,
|
|
2907
|
+
Category: "spooky-client::SurrealDBPersistenceClient::remove"
|
|
2908
|
+
}, "Failed to delete KV");
|
|
2909
|
+
}
|
|
2910
|
+
}
|
|
2911
|
+
};
|
|
2912
|
+
|
|
2913
|
+
//#endregion
|
|
2914
|
+
//#region src/spooky.ts
|
|
2915
|
+
var SpookyClient = class {
|
|
2916
|
+
local;
|
|
2917
|
+
remote;
|
|
2918
|
+
persistenceClient;
|
|
2919
|
+
migrator;
|
|
2920
|
+
cache;
|
|
2921
|
+
dataModule;
|
|
2922
|
+
sync;
|
|
2923
|
+
devTools;
|
|
2924
|
+
logger;
|
|
2925
|
+
auth;
|
|
2926
|
+
streamProcessor;
|
|
2927
|
+
get remoteClient() {
|
|
2928
|
+
return this.remote.getClient();
|
|
2929
|
+
}
|
|
2930
|
+
get localClient() {
|
|
2931
|
+
return this.local.getClient();
|
|
2932
|
+
}
|
|
2933
|
+
get pendingMutationCount() {
|
|
2934
|
+
return this.sync.pendingMutationCount;
|
|
2935
|
+
}
|
|
2936
|
+
subscribeToPendingMutations(cb) {
|
|
2937
|
+
return this.sync.subscribeToPendingMutations(cb);
|
|
2938
|
+
}
|
|
2939
|
+
constructor(config) {
|
|
2940
|
+
this.config = config;
|
|
2941
|
+
const logger = createLogger(config.logLevel ?? "info", config.otelEndpoint);
|
|
2942
|
+
this.logger = logger.child({ service: "SpookyClient" });
|
|
2943
|
+
this.logger.info({
|
|
2944
|
+
config: {
|
|
2945
|
+
...config,
|
|
2946
|
+
schema: "[SchemaStructure]"
|
|
2947
|
+
},
|
|
2948
|
+
Category: "spooky-client::SpookyClient::constructor"
|
|
2949
|
+
}, "SpookyClient initialized");
|
|
2950
|
+
this.local = new LocalDatabaseService(this.config.database, logger);
|
|
2951
|
+
this.remote = new RemoteDatabaseService(this.config.database, logger);
|
|
2952
|
+
if (config.persistenceClient === "surrealdb") this.persistenceClient = new SurrealDBPersistenceClient(this.local, logger);
|
|
2953
|
+
else if (config.persistenceClient === "localstorage" || !config.persistenceClient) this.persistenceClient = new LocalStoragePersistenceClient(logger);
|
|
2954
|
+
else this.persistenceClient = config.persistenceClient;
|
|
2955
|
+
this.streamProcessor = new StreamProcessorService(new EventSystem(["stream_update"]), this.local, this.persistenceClient, logger);
|
|
2956
|
+
this.migrator = new LocalMigrator(this.local, logger);
|
|
2957
|
+
this.cache = new CacheModule(this.local, this.streamProcessor, (update) => {
|
|
2958
|
+
this.dataModule.onStreamUpdate(update);
|
|
2959
|
+
}, logger);
|
|
2960
|
+
this.dataModule = new DataModule(this.cache, this.local, this.config.schema, logger, this.config.streamDebounceTime);
|
|
2961
|
+
this.auth = new AuthService(this.config.schema, this.remote, this.persistenceClient, logger);
|
|
2962
|
+
this.sync = new SpookySync(this.local, this.remote, this.cache, this.dataModule, this.config.schema, this.logger);
|
|
2963
|
+
this.devTools = new DevToolsService(this.local, this.remote, logger, this.config.schema, this.auth, this.dataModule);
|
|
2964
|
+
this.streamProcessor.addReceiver(this.devTools);
|
|
2965
|
+
this.setupCallbacks();
|
|
2966
|
+
}
|
|
2967
|
+
/**
|
|
2968
|
+
* Setup direct callbacks instead of event subscriptions
|
|
2969
|
+
*/
|
|
2970
|
+
setupCallbacks() {
|
|
2971
|
+
this.dataModule.onMutation((mutations) => {
|
|
2972
|
+
this.devTools.onMutation(mutations);
|
|
2973
|
+
if (mutations.length > 0) this.sync.enqueueMutation(mutations);
|
|
2974
|
+
});
|
|
2975
|
+
this.sync.events.subscribe("SYNC_QUERY_UPDATED", (event) => {
|
|
2976
|
+
this.devTools.logEvent("SYNC_QUERY_UPDATED", event.payload);
|
|
2977
|
+
});
|
|
2978
|
+
this.local.getEvents().subscribe("DATABASE_LOCAL_QUERY", (event) => {
|
|
2979
|
+
this.devTools.logEvent("LOCAL_QUERY", event.payload);
|
|
2980
|
+
});
|
|
2981
|
+
this.remote.getEvents().subscribe("DATABASE_REMOTE_QUERY", (event) => {
|
|
2982
|
+
this.devTools.logEvent("REMOTE_QUERY", event.payload);
|
|
2983
|
+
});
|
|
2984
|
+
}
|
|
2985
|
+
async init() {
|
|
2986
|
+
this.logger.info({ Category: "spooky-client::SpookyClient::init" }, "SpookyClient initialization started");
|
|
2987
|
+
try {
|
|
2988
|
+
const clientId = this.config.clientId ?? await this.loadOrGenerateClientId();
|
|
2989
|
+
this.persistClientId(clientId);
|
|
2990
|
+
this.logger.debug({
|
|
2991
|
+
clientId,
|
|
2992
|
+
Category: "spooky-client::SpookyClient::init"
|
|
2993
|
+
}, "Client ID loaded");
|
|
2994
|
+
await this.local.connect();
|
|
2995
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "Local database connected");
|
|
2996
|
+
await this.migrator.provision(this.config.schemaSurql);
|
|
2997
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "Schema provisioned");
|
|
2998
|
+
await this.remote.connect();
|
|
2999
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "Remote database connected");
|
|
3000
|
+
await this.streamProcessor.init();
|
|
3001
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "StreamProcessor initialized");
|
|
3002
|
+
await this.auth.init();
|
|
3003
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "Auth initialized");
|
|
3004
|
+
await this.dataModule.init();
|
|
3005
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "DataModule initialized");
|
|
3006
|
+
await this.sync.init(clientId);
|
|
3007
|
+
this.logger.debug({ Category: "spooky-client::SpookyClient::init" }, "Sync initialized");
|
|
3008
|
+
this.logger.info({ Category: "spooky-client::SpookyClient::init" }, "SpookyClient initialization completed successfully");
|
|
3009
|
+
} catch (e) {
|
|
3010
|
+
this.logger.error({
|
|
3011
|
+
error: e,
|
|
3012
|
+
Category: "spooky-client::SpookyClient::init"
|
|
3013
|
+
}, "SpookyClient initialization failed");
|
|
3014
|
+
throw e;
|
|
3015
|
+
}
|
|
3016
|
+
}
|
|
3017
|
+
async close() {
|
|
3018
|
+
await this.local.close();
|
|
3019
|
+
await this.remote.close();
|
|
3020
|
+
}
|
|
3021
|
+
authenticate(token) {
|
|
3022
|
+
return this.remote.getClient().authenticate(token);
|
|
3023
|
+
}
|
|
3024
|
+
deauthenticate() {
|
|
3025
|
+
return this.remote.getClient().invalidate();
|
|
3026
|
+
}
|
|
3027
|
+
query(table, options, ttl = "10m") {
|
|
3028
|
+
return new QueryBuilder(this.config.schema, table, async (q) => ({ hash: await this.initQuery(table, q, ttl) }), options);
|
|
3029
|
+
}
|
|
3030
|
+
async initQuery(table, q, ttl) {
|
|
3031
|
+
const tableSchema = this.config.schema.tables.find((t) => t.name === table);
|
|
3032
|
+
if (!tableSchema) throw new Error(`Table ${table} not found`);
|
|
3033
|
+
const hash = await this.dataModule.query(table, q.selectQuery.query, parseParams(tableSchema.columns, q.selectQuery.vars ?? {}), ttl);
|
|
3034
|
+
await this.sync.enqueueDownEvent({
|
|
3035
|
+
type: "register",
|
|
3036
|
+
payload: { hash }
|
|
3037
|
+
});
|
|
3038
|
+
return hash;
|
|
3039
|
+
}
|
|
3040
|
+
async queryRaw(sql, params, ttl) {
|
|
3041
|
+
const tableName = sql.split("FROM ")[1].split(" ")[0];
|
|
3042
|
+
return this.dataModule.query(tableName, sql, params, ttl);
|
|
3043
|
+
}
|
|
3044
|
+
async subscribe(queryHash, callback, options) {
|
|
3045
|
+
return this.dataModule.subscribe(queryHash, callback, options);
|
|
3046
|
+
}
|
|
3047
|
+
run(backend, path, payload, options) {
|
|
3048
|
+
return this.dataModule.run(backend, path, payload, options);
|
|
3049
|
+
}
|
|
3050
|
+
create(id, data) {
|
|
3051
|
+
return this.dataModule.create(id, data);
|
|
3052
|
+
}
|
|
3053
|
+
update(table, id, data, options) {
|
|
3054
|
+
return this.dataModule.update(table, id, data, options);
|
|
3055
|
+
}
|
|
3056
|
+
delete(table, id) {
|
|
3057
|
+
return this.dataModule.delete(table, id);
|
|
3058
|
+
}
|
|
3059
|
+
async useRemote(fn) {
|
|
3060
|
+
return fn(this.remote.getClient());
|
|
3061
|
+
}
|
|
3062
|
+
persistClientId(id) {
|
|
3063
|
+
try {
|
|
3064
|
+
this.persistenceClient.set("spooky_client_id", id);
|
|
3065
|
+
} catch (e) {
|
|
3066
|
+
this.logger.warn({
|
|
3067
|
+
error: e,
|
|
3068
|
+
Category: "spooky-client::SpookyClient::persistClientId"
|
|
3069
|
+
}, "Failed to persist client ID");
|
|
3070
|
+
}
|
|
3071
|
+
}
|
|
3072
|
+
async loadOrGenerateClientId() {
|
|
3073
|
+
const clientId = await this.persistenceClient.get("spooky_client_id");
|
|
3074
|
+
if (clientId) return clientId;
|
|
3075
|
+
const newId = generateId();
|
|
3076
|
+
await this.persistClientId(newId);
|
|
3077
|
+
return newId;
|
|
3078
|
+
}
|
|
3079
|
+
};
|
|
3080
|
+
|
|
3081
|
+
//#endregion
|
|
3082
|
+
export { AuthEventTypes, AuthService, SpookyClient, createAuthEventSystem };
|