@concavejs/runtime-cf-base 0.0.1-alpha.6 → 0.0.1-alpha.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/durable-objects/blobstore-rpc.d.ts +13 -0
- package/dist/durable-objects/blobstore-rpc.js +27 -0
- package/dist/durable-objects/concave-do-base.d.ts +6 -1
- package/dist/durable-objects/concave-do-base.js +111 -92
- package/dist/durable-objects/docstore-rpc.d.ts +46 -0
- package/dist/durable-objects/docstore-rpc.js +63 -0
- package/dist/durable-objects/scheduler-manager.d.ts +19 -0
- package/dist/durable-objects/scheduler-manager.js +52 -0
- package/dist/durable-objects/sync-notifier.d.ts +16 -0
- package/dist/durable-objects/sync-notifier.js +38 -0
- package/dist/http/http-api.js +42 -8
- package/dist/rpc/docstore-proxy.js +13 -12
- package/dist/udf/executor/do-client-executor.d.ts +1 -1
- package/dist/udf/executor/do-client-executor.js +19 -2
- package/dist/udf/executor/isolated-executor.d.ts +1 -1
- package/dist/udf/executor/isolated-executor.js +2 -2
- package/dist/worker/udf-worker.d.ts +1 -1
- package/dist/worker/udf-worker.js +2 -2
- package/package.json +7 -7
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { BlobStore, StorageOptions, StorageMetadata } from "@concavejs/core/abstractions";
|
|
2
|
+
/**
|
|
3
|
+
* BlobStore RPC surface for service-binding isolation.
|
|
4
|
+
* Provides async delegation methods for blob storage operations.
|
|
5
|
+
*/
|
|
6
|
+
export declare class BlobStoreRpc {
|
|
7
|
+
private readonly blobstore;
|
|
8
|
+
constructor(blobstore: BlobStore);
|
|
9
|
+
store(buffer: ArrayBuffer, options?: StorageOptions): Promise<StorageMetadata>;
|
|
10
|
+
get(storageId: string): Promise<ArrayBuffer | null>;
|
|
11
|
+
delete(storageId: string): Promise<void>;
|
|
12
|
+
getUrl(storageId: string): Promise<string | null>;
|
|
13
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BlobStore RPC surface for service-binding isolation.
|
|
3
|
+
* Provides async delegation methods for blob storage operations.
|
|
4
|
+
*/
|
|
5
|
+
export class BlobStoreRpc {
|
|
6
|
+
blobstore;
|
|
7
|
+
constructor(blobstore) {
|
|
8
|
+
this.blobstore = blobstore;
|
|
9
|
+
}
|
|
10
|
+
async store(buffer, options) {
|
|
11
|
+
return this.blobstore.store(buffer, options);
|
|
12
|
+
}
|
|
13
|
+
async get(storageId) {
|
|
14
|
+
const result = await this.blobstore.get(storageId);
|
|
15
|
+
if (result === null)
|
|
16
|
+
return null;
|
|
17
|
+
if (result instanceof Blob)
|
|
18
|
+
return result.arrayBuffer();
|
|
19
|
+
return result;
|
|
20
|
+
}
|
|
21
|
+
async delete(storageId) {
|
|
22
|
+
return this.blobstore.delete(storageId);
|
|
23
|
+
}
|
|
24
|
+
async getUrl(storageId) {
|
|
25
|
+
return this.blobstore.getUrl(storageId);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -63,6 +63,10 @@ export declare class ConcaveDOBase extends DurableObject {
|
|
|
63
63
|
env: any;
|
|
64
64
|
protected scheduler: ScheduledFunctionExecutor;
|
|
65
65
|
protected cronExecutor: CronExecutor;
|
|
66
|
+
private readonly docStoreRpc;
|
|
67
|
+
private readonly blobStoreRpc;
|
|
68
|
+
private readonly syncNotifier;
|
|
69
|
+
private schedulerManager;
|
|
66
70
|
constructor(state: DurableObjectState, env: any, config: ConcaveDOConfig);
|
|
67
71
|
/**
|
|
68
72
|
* Initialize scheduler and cron executor
|
|
@@ -74,6 +78,7 @@ export declare class ConcaveDOBase extends DurableObject {
|
|
|
74
78
|
* runtime auto-discovery from the global module registry.
|
|
75
79
|
*/
|
|
76
80
|
private initializeCronSpecs;
|
|
81
|
+
private currentSnapshotTimestamp;
|
|
77
82
|
/**
|
|
78
83
|
* Main request handler
|
|
79
84
|
*/
|
|
@@ -89,7 +94,7 @@ export declare class ConcaveDOBase extends DurableObject {
|
|
|
89
94
|
/**
|
|
90
95
|
* Execute a UDF
|
|
91
96
|
*/
|
|
92
|
-
protected execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: any, componentPath?: string, requestId?: string): Promise<UdfResult>;
|
|
97
|
+
protected execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: any, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint): Promise<UdfResult>;
|
|
93
98
|
/**
|
|
94
99
|
* Handle scheduled function alarms
|
|
95
100
|
*/
|
|
@@ -12,7 +12,12 @@ import { getSearchIndexesFromSchema, getVectorIndexesFromSchema } from "@concave
|
|
|
12
12
|
import { SchemaService } from "@concavejs/core/kernel";
|
|
13
13
|
import { runAsClientCall, runAsServerCall } from "@concavejs/core/udf";
|
|
14
14
|
import { ScheduledFunctionExecutor, CronExecutor } from "@concavejs/core";
|
|
15
|
-
import {
|
|
15
|
+
import { resolveAuthContext } from "@concavejs/core/http";
|
|
16
|
+
import { AdminAuthError, identityFromToken, isAdminToken, isSystemToken, JWTValidationError, resolveAdminAuthConfigFromEnv, resolveJwtValidationConfigFromEnv, resolveSystemAuthConfigFromEnv, setAdminAuthConfig, setJwtValidationConfig, setSystemAuthConfig, SystemAuthError, } from "@concavejs/core/auth";
|
|
17
|
+
import { DocStoreRpc } from "./docstore-rpc";
|
|
18
|
+
import { BlobStoreRpc } from "./blobstore-rpc";
|
|
19
|
+
import { SyncNotifier } from "./sync-notifier";
|
|
20
|
+
import { SchedulerManager } from "./scheduler-manager";
|
|
16
21
|
const VERSIONED_API_PREFIX = /^\/api\/\d+\.\d+(?:\.\d+)?(?=\/|$)/;
|
|
17
22
|
function stripApiVersionPrefix(pathname) {
|
|
18
23
|
return pathname.replace(VERSIONED_API_PREFIX, "/api");
|
|
@@ -23,6 +28,8 @@ function isReservedApiPath(pathname) {
|
|
|
23
28
|
normalizedPath === "/api/sync" ||
|
|
24
29
|
normalizedPath === "/api/reset-test-state" ||
|
|
25
30
|
normalizedPath === "/api/query" ||
|
|
31
|
+
normalizedPath === "/api/query_ts" ||
|
|
32
|
+
normalizedPath === "/api/query_at_ts" ||
|
|
26
33
|
normalizedPath === "/api/mutation" ||
|
|
27
34
|
normalizedPath === "/api/action") {
|
|
28
35
|
return true;
|
|
@@ -41,6 +48,28 @@ function shouldHandleAsHttpRoute(pathname) {
|
|
|
41
48
|
}
|
|
42
49
|
return !isReservedApiPath(pathname);
|
|
43
50
|
}
|
|
51
|
+
function parseSnapshotTimestamp(value) {
|
|
52
|
+
if (value === undefined || value === null) {
|
|
53
|
+
return undefined;
|
|
54
|
+
}
|
|
55
|
+
if (typeof value === "bigint") {
|
|
56
|
+
return value;
|
|
57
|
+
}
|
|
58
|
+
if (typeof value === "number") {
|
|
59
|
+
if (!Number.isFinite(value) || !Number.isInteger(value) || value < 0) {
|
|
60
|
+
throw new Error("Invalid snapshotTimestamp");
|
|
61
|
+
}
|
|
62
|
+
return BigInt(value);
|
|
63
|
+
}
|
|
64
|
+
if (typeof value === "string") {
|
|
65
|
+
const trimmed = value.trim();
|
|
66
|
+
if (!/^\d+$/.test(trimmed)) {
|
|
67
|
+
throw new Error("Invalid snapshotTimestamp");
|
|
68
|
+
}
|
|
69
|
+
return BigInt(trimmed);
|
|
70
|
+
}
|
|
71
|
+
throw new Error("Invalid snapshotTimestamp");
|
|
72
|
+
}
|
|
44
73
|
/**
|
|
45
74
|
* Base class for Concave Durable Objects
|
|
46
75
|
*
|
|
@@ -61,6 +90,10 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
61
90
|
env;
|
|
62
91
|
scheduler;
|
|
63
92
|
cronExecutor;
|
|
93
|
+
docStoreRpc;
|
|
94
|
+
blobStoreRpc;
|
|
95
|
+
syncNotifier;
|
|
96
|
+
schedulerManager;
|
|
64
97
|
constructor(state, env, config) {
|
|
65
98
|
super(state, env);
|
|
66
99
|
this.doState = state;
|
|
@@ -74,7 +107,6 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
74
107
|
setAdminAuthConfig(adminConfig);
|
|
75
108
|
setSystemAuthConfig(systemConfig);
|
|
76
109
|
const instanceId = state.id.name ?? state.id.toString();
|
|
77
|
-
console.log(`[ConcaveDO.constructor] instanceId=${instanceId}`);
|
|
78
110
|
const adapterContext = {
|
|
79
111
|
state,
|
|
80
112
|
env,
|
|
@@ -84,6 +116,9 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
84
116
|
this._docstore = config.createDocstore ? config.createDocstore(adapterContext) : new DODocStore(state);
|
|
85
117
|
// Create BlobStore (allow override for testing or alternative implementations)
|
|
86
118
|
this._blobstore = config.createBlobstore?.(adapterContext);
|
|
119
|
+
this.docStoreRpc = new DocStoreRpc(this._docstore);
|
|
120
|
+
this.blobStoreRpc = this._blobstore ? new BlobStoreRpc(this._blobstore) : null;
|
|
121
|
+
this.syncNotifier = new SyncNotifier(state, env);
|
|
87
122
|
// Create UDF executor from resolved runtime services
|
|
88
123
|
this.udfExecutor = config.createUdfExecutor({
|
|
89
124
|
...adapterContext,
|
|
@@ -128,6 +163,7 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
128
163
|
notifyWrites,
|
|
129
164
|
allocateTimestamp,
|
|
130
165
|
});
|
|
166
|
+
this.schedulerManager = new SchedulerManager(this.scheduler, this.cronExecutor, this.doState);
|
|
131
167
|
}
|
|
132
168
|
/**
|
|
133
169
|
* Discover and sync cron specs during DO initialization.
|
|
@@ -155,11 +191,30 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
155
191
|
console.warn("[ConcaveDO] Failed to initialize cron specs:", error?.message ?? error);
|
|
156
192
|
}
|
|
157
193
|
}
|
|
194
|
+
currentSnapshotTimestamp() {
|
|
195
|
+
const oracle = this._docstore?.timestampOracle;
|
|
196
|
+
const oracleTimestamp = typeof oracle?.beginSnapshot === "function"
|
|
197
|
+
? oracle.beginSnapshot()
|
|
198
|
+
: typeof oracle?.getCurrentTimestamp === "function"
|
|
199
|
+
? oracle.getCurrentTimestamp()
|
|
200
|
+
: undefined;
|
|
201
|
+
const wallClock = BigInt(Date.now());
|
|
202
|
+
if (typeof oracleTimestamp === "bigint" && oracleTimestamp > wallClock) {
|
|
203
|
+
return oracleTimestamp;
|
|
204
|
+
}
|
|
205
|
+
return wallClock;
|
|
206
|
+
}
|
|
158
207
|
/**
|
|
159
208
|
* Main request handler
|
|
160
209
|
*/
|
|
161
210
|
async fetch(request) {
|
|
162
211
|
const url = new URL(request.url);
|
|
212
|
+
if (url.pathname === "/query_ts") {
|
|
213
|
+
if (request.method !== "POST") {
|
|
214
|
+
return new Response("Method not allowed", { status: 405 });
|
|
215
|
+
}
|
|
216
|
+
return Response.json({ ts: this.currentSnapshotTimestamp().toString() }, { headers: this.corsHeaders(request) });
|
|
217
|
+
}
|
|
163
218
|
if (shouldHandleAsHttpRoute(url.pathname)) {
|
|
164
219
|
return this.handleHttp(request);
|
|
165
220
|
}
|
|
@@ -173,13 +228,16 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
173
228
|
*/
|
|
174
229
|
async handleUdfRequest(request) {
|
|
175
230
|
try {
|
|
176
|
-
const { path, args, type, auth, componentPath, caller } = await request.json();
|
|
231
|
+
const { path, args, type, auth, componentPath, caller, snapshotTimestamp } = await request.json();
|
|
177
232
|
const convexArgs = jsonToConvex(args);
|
|
233
|
+
const parsedSnapshotTimestamp = parseSnapshotTimestamp(snapshotTimestamp);
|
|
178
234
|
const requestId = crypto.randomUUID();
|
|
179
|
-
const exec = () => this.execute(path, convexArgs, type, auth, componentPath, requestId);
|
|
235
|
+
const exec = () => this.execute(path, convexArgs, type, auth, componentPath, requestId, parsedSnapshotTimestamp);
|
|
180
236
|
const result = caller === "server" ? await runAsServerCall(exec, path) : await runAsClientCall(exec);
|
|
181
237
|
if (type === "mutation" || type === "action") {
|
|
182
|
-
this.doState.waitUntil(this.reschedule())
|
|
238
|
+
this.doState.waitUntil(this.reschedule().catch((error) => {
|
|
239
|
+
console.error("[ConcaveDO] Failed to reschedule alarm", error?.message ?? error);
|
|
240
|
+
}));
|
|
183
241
|
}
|
|
184
242
|
const writtenTables = writtenTablesFromRanges(result.writtenRanges) ?? [];
|
|
185
243
|
const responseBody = {
|
|
@@ -196,8 +254,9 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
196
254
|
}
|
|
197
255
|
catch (e) {
|
|
198
256
|
console.error(e);
|
|
199
|
-
|
|
200
|
-
|
|
257
|
+
const errorMessage = e?.message ?? "Internal Server Error";
|
|
258
|
+
return new Response(JSON.stringify({ error: errorMessage }), {
|
|
259
|
+
headers: { "Content-Type": "application/json", ...this.corsHeaders(request) },
|
|
201
260
|
status: 500,
|
|
202
261
|
});
|
|
203
262
|
}
|
|
@@ -210,177 +269,137 @@ export class ConcaveDOBase extends DurableObject {
|
|
|
210
269
|
const url = new URL(request.url);
|
|
211
270
|
url.pathname = url.pathname.replace(/^\/api\/http/, "");
|
|
212
271
|
const req = new Request(url.toString(), request);
|
|
213
|
-
const
|
|
272
|
+
const authHeader = req.headers.get("Authorization");
|
|
273
|
+
const headerToken = authHeader?.replace(/^Bearer\s+/i, "").trim() || undefined;
|
|
274
|
+
let headerIdentity;
|
|
275
|
+
try {
|
|
276
|
+
headerIdentity =
|
|
277
|
+
headerToken && !isAdminToken(headerToken) && !isSystemToken(headerToken)
|
|
278
|
+
? await identityFromToken(headerToken)
|
|
279
|
+
: undefined;
|
|
280
|
+
}
|
|
281
|
+
catch (error) {
|
|
282
|
+
if (error instanceof JWTValidationError || error instanceof AdminAuthError || error instanceof SystemAuthError) {
|
|
283
|
+
return Response.json({ error: "Unauthorized" }, { status: 401, headers: this.corsHeaders(request) });
|
|
284
|
+
}
|
|
285
|
+
throw error;
|
|
286
|
+
}
|
|
287
|
+
const auth = (await resolveAuthContext(undefined, headerToken, headerIdentity));
|
|
214
288
|
const requestId = crypto.randomUUID();
|
|
215
289
|
return this.udfExecutor.executeHttp(req, auth, requestId);
|
|
216
290
|
}
|
|
217
291
|
catch (e) {
|
|
218
292
|
console.error(e);
|
|
219
|
-
return new Response(
|
|
293
|
+
return new Response("Internal Server Error", { status: 500, headers: this.corsHeaders(request) });
|
|
220
294
|
}
|
|
221
295
|
}
|
|
222
296
|
/**
|
|
223
297
|
* Execute a UDF
|
|
224
298
|
*/
|
|
225
|
-
async execute(path, args, type, auth, componentPath, requestId) {
|
|
226
|
-
return this.udfExecutor.execute(path, args, type, auth, componentPath, requestId);
|
|
299
|
+
async execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp) {
|
|
300
|
+
return this.udfExecutor.execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp);
|
|
227
301
|
}
|
|
228
302
|
/**
|
|
229
303
|
* Handle scheduled function alarms
|
|
230
304
|
*/
|
|
231
305
|
async alarm() {
|
|
232
|
-
|
|
233
|
-
const cronResult = await this.cronExecutor.runDueJobs();
|
|
234
|
-
const nextTimes = [scheduledResult.nextScheduledTime, cronResult.nextScheduledTime].filter((t) => t !== null);
|
|
235
|
-
if (nextTimes.length === 0) {
|
|
236
|
-
await this.doState.storage.deleteAlarm();
|
|
237
|
-
}
|
|
238
|
-
else {
|
|
239
|
-
await this.doState.storage.setAlarm(Math.min(...nextTimes));
|
|
240
|
-
}
|
|
306
|
+
return this.schedulerManager.handleAlarm();
|
|
241
307
|
}
|
|
242
308
|
/**
|
|
243
309
|
* Reschedule alarms
|
|
244
310
|
*/
|
|
245
311
|
async reschedule() {
|
|
246
|
-
|
|
247
|
-
const cronTime = await this.cronExecutor.getNextScheduledTime();
|
|
248
|
-
const nextTimes = [scheduledTime, cronTime].filter((t) => t !== null);
|
|
249
|
-
if (nextTimes.length === 0) {
|
|
250
|
-
await this.doState.storage.deleteAlarm();
|
|
251
|
-
}
|
|
252
|
-
else {
|
|
253
|
-
await this.doState.storage.setAlarm(Math.min(...nextTimes));
|
|
254
|
-
}
|
|
312
|
+
return this.schedulerManager.reschedule();
|
|
255
313
|
}
|
|
256
314
|
/**
|
|
257
315
|
* Sync cron specs
|
|
258
316
|
*/
|
|
259
317
|
async syncCronSpecs(cronSpecs) {
|
|
260
|
-
|
|
261
|
-
await this.reschedule();
|
|
318
|
+
return this.schedulerManager.syncCronSpecs(cronSpecs);
|
|
262
319
|
}
|
|
263
320
|
// =============================================================================
|
|
264
321
|
// DocStore RPC Methods - Direct delegation to _docstore
|
|
265
322
|
// =============================================================================
|
|
266
323
|
async setupSchema(options) {
|
|
267
|
-
return this.
|
|
324
|
+
return this.docStoreRpc.setupSchema(options);
|
|
268
325
|
}
|
|
269
326
|
async write(documents, indexes, conflictStrategy) {
|
|
270
|
-
return this.
|
|
327
|
+
return this.docStoreRpc.write(documents, indexes, conflictStrategy);
|
|
271
328
|
}
|
|
272
329
|
async get(id, readTimestamp) {
|
|
273
|
-
return this.
|
|
330
|
+
return this.docStoreRpc.get(id, readTimestamp);
|
|
274
331
|
}
|
|
275
332
|
async scan(table, readTimestamp) {
|
|
276
|
-
return this.
|
|
333
|
+
return this.docStoreRpc.scan(table, readTimestamp);
|
|
277
334
|
}
|
|
278
335
|
async scanPaginated(table, cursor, limit, order, readTimestamp) {
|
|
279
|
-
return this.
|
|
336
|
+
return this.docStoreRpc.scanPaginated(table, cursor, limit, order, readTimestamp);
|
|
280
337
|
}
|
|
281
338
|
/**
|
|
282
339
|
* Generators return arrays over RPC (cannot stream async generators)
|
|
283
340
|
*/
|
|
284
341
|
async index_scan(indexId, tabletId, readTimestamp, interval, order) {
|
|
285
|
-
|
|
286
|
-
for await (const item of this._docstore.index_scan(indexId, tabletId, readTimestamp, interval, order)) {
|
|
287
|
-
results.push(item);
|
|
288
|
-
}
|
|
289
|
-
return results;
|
|
342
|
+
return this.docStoreRpc.index_scan(indexId, tabletId, readTimestamp, interval, order);
|
|
290
343
|
}
|
|
291
344
|
/**
|
|
292
345
|
* Generators return arrays over RPC (cannot stream async generators)
|
|
293
346
|
*/
|
|
294
347
|
async load_documents(range, order) {
|
|
295
|
-
|
|
296
|
-
for await (const item of this._docstore.load_documents(range, order)) {
|
|
297
|
-
results.push(item);
|
|
298
|
-
}
|
|
299
|
-
return results;
|
|
348
|
+
return this.docStoreRpc.load_documents(range, order);
|
|
300
349
|
}
|
|
301
350
|
async count(table) {
|
|
302
|
-
return this.
|
|
351
|
+
return this.docStoreRpc.count(table);
|
|
303
352
|
}
|
|
304
353
|
async search(indexId, searchQuery, filters, options) {
|
|
305
|
-
return this.
|
|
354
|
+
return this.docStoreRpc.search(indexId, searchQuery, filters, options);
|
|
306
355
|
}
|
|
307
356
|
async vectorSearch(indexId, vector, limit, filters) {
|
|
308
|
-
return this.
|
|
357
|
+
return this.docStoreRpc.vectorSearch(indexId, vector, limit, filters);
|
|
309
358
|
}
|
|
310
359
|
async getGlobal(key) {
|
|
311
|
-
return this.
|
|
360
|
+
return this.docStoreRpc.getGlobal(key);
|
|
312
361
|
}
|
|
313
362
|
async writeGlobal(key, value) {
|
|
314
|
-
return this.
|
|
363
|
+
return this.docStoreRpc.writeGlobal(key, value);
|
|
315
364
|
}
|
|
316
365
|
async previous_revisions(queries) {
|
|
317
|
-
|
|
318
|
-
return Array.from(result.entries());
|
|
366
|
+
return this.docStoreRpc.previous_revisions(queries);
|
|
319
367
|
}
|
|
320
368
|
async previous_revisions_of_documents(queries) {
|
|
321
|
-
|
|
322
|
-
return Array.from(result.entries());
|
|
369
|
+
return this.docStoreRpc.previous_revisions_of_documents(queries);
|
|
323
370
|
}
|
|
324
371
|
// =============================================================================
|
|
325
372
|
// Blobstore RPC Methods - Prefixed to avoid collision with other methods
|
|
326
373
|
// =============================================================================
|
|
327
374
|
async blobstoreStore(buffer, options) {
|
|
328
|
-
if (!this.
|
|
375
|
+
if (!this.blobStoreRpc) {
|
|
329
376
|
throw new Error("Blobstore not configured");
|
|
330
377
|
}
|
|
331
|
-
return this.
|
|
378
|
+
return this.blobStoreRpc.store(buffer, options);
|
|
332
379
|
}
|
|
333
380
|
async blobstoreGet(storageId) {
|
|
334
|
-
if (!this.
|
|
381
|
+
if (!this.blobStoreRpc) {
|
|
335
382
|
throw new Error("Blobstore not configured");
|
|
336
383
|
}
|
|
337
|
-
|
|
338
|
-
if (result === null)
|
|
339
|
-
return null;
|
|
340
|
-
if (result instanceof Blob)
|
|
341
|
-
return result.arrayBuffer();
|
|
342
|
-
return result;
|
|
384
|
+
return this.blobStoreRpc.get(storageId);
|
|
343
385
|
}
|
|
344
386
|
async blobstoreDelete(storageId) {
|
|
345
|
-
if (!this.
|
|
387
|
+
if (!this.blobStoreRpc) {
|
|
346
388
|
throw new Error("Blobstore not configured");
|
|
347
389
|
}
|
|
348
|
-
return this.
|
|
390
|
+
return this.blobStoreRpc.delete(storageId);
|
|
349
391
|
}
|
|
350
392
|
async blobstoreGetUrl(storageId) {
|
|
351
|
-
if (!this.
|
|
393
|
+
if (!this.blobStoreRpc) {
|
|
352
394
|
throw new Error("Blobstore not configured");
|
|
353
395
|
}
|
|
354
|
-
return this.
|
|
396
|
+
return this.blobStoreRpc.getUrl(storageId);
|
|
355
397
|
}
|
|
356
398
|
/**
|
|
357
399
|
* Notify SyncDO of writes for subscription invalidation
|
|
358
400
|
*/
|
|
359
401
|
async notifySyncDo(writtenRanges, writtenTables, commitTimestamp) {
|
|
360
|
-
|
|
361
|
-
return;
|
|
362
|
-
}
|
|
363
|
-
try {
|
|
364
|
-
const instanceName = this.doState.id.name ?? "singleton";
|
|
365
|
-
const syncNamespace = this.env?.SYNC_DO;
|
|
366
|
-
if (!syncNamespace) {
|
|
367
|
-
return;
|
|
368
|
-
}
|
|
369
|
-
const syncId = syncNamespace.idFromName(instanceName);
|
|
370
|
-
const syncStub = syncNamespace.get(syncId);
|
|
371
|
-
await syncStub.fetch("http://do/notify", {
|
|
372
|
-
method: "POST",
|
|
373
|
-
headers: { "Content-Type": "application/json" },
|
|
374
|
-
body: JSON.stringify({
|
|
375
|
-
writtenRanges,
|
|
376
|
-
writtenTables,
|
|
377
|
-
commitTimestamp: commitTimestamp ? commitTimestamp.toString() : undefined,
|
|
378
|
-
}),
|
|
379
|
-
});
|
|
380
|
-
}
|
|
381
|
-
catch (error) {
|
|
382
|
-
console.warn("Failed to notify SyncDO", error?.message ?? error);
|
|
383
|
-
}
|
|
402
|
+
return this.syncNotifier.notify(writtenRanges, writtenTables, commitTimestamp);
|
|
384
403
|
}
|
|
385
404
|
/**
|
|
386
405
|
* Get CORS headers for responses
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import type { DocStore, DocumentLogEntry, DatabaseIndexUpdate, Interval, Order, IndexKeyBytes, LatestDocument, TimestampRange, InternalDocumentId, GlobalKey, DocumentPrevTsQuery, SearchIndexDefinition, VectorIndexDefinition } from "@concavejs/core/docstore";
|
|
2
|
+
import type { JSONValue } from "convex/values";
|
|
3
|
+
/**
|
|
4
|
+
* DocStore RPC surface for service-binding isolation.
|
|
5
|
+
* Provides async delegation methods that flatten generators into arrays
|
|
6
|
+
* for RPC transport (async generators cannot be streamed over service bindings).
|
|
7
|
+
*/
|
|
8
|
+
export declare class DocStoreRpc {
|
|
9
|
+
private readonly docstore;
|
|
10
|
+
constructor(docstore: DocStore);
|
|
11
|
+
setupSchema(options?: {
|
|
12
|
+
searchIndexes?: SearchIndexDefinition[];
|
|
13
|
+
vectorIndexes?: VectorIndexDefinition[];
|
|
14
|
+
}): Promise<void>;
|
|
15
|
+
write(documents: DocumentLogEntry[], indexes: Set<{
|
|
16
|
+
ts: bigint;
|
|
17
|
+
update: DatabaseIndexUpdate;
|
|
18
|
+
}>, conflictStrategy: "Error" | "Overwrite"): Promise<void>;
|
|
19
|
+
get(id: InternalDocumentId, readTimestamp?: bigint): Promise<LatestDocument | null>;
|
|
20
|
+
scan(table: string, readTimestamp?: bigint): Promise<LatestDocument[]>;
|
|
21
|
+
scanPaginated(table: string, cursor: string | null, limit: number, order: Order, readTimestamp?: bigint): Promise<{
|
|
22
|
+
documents: LatestDocument[];
|
|
23
|
+
nextCursor: string | null;
|
|
24
|
+
hasMore: boolean;
|
|
25
|
+
}>;
|
|
26
|
+
index_scan(indexId: string, tabletId: string, readTimestamp: bigint, interval: Interval, order: Order): Promise<[IndexKeyBytes, LatestDocument][]>;
|
|
27
|
+
load_documents(range: TimestampRange, order: Order): Promise<DocumentLogEntry[]>;
|
|
28
|
+
count(table: string): Promise<number>;
|
|
29
|
+
search(indexId: string, searchQuery: string, filters: Map<string, unknown>, options?: {
|
|
30
|
+
limit?: number;
|
|
31
|
+
}): Promise<{
|
|
32
|
+
doc: LatestDocument;
|
|
33
|
+
score: number;
|
|
34
|
+
}[]>;
|
|
35
|
+
vectorSearch(indexId: string, vector: number[], limit: number, filters: Map<string, string>): Promise<{
|
|
36
|
+
doc: LatestDocument;
|
|
37
|
+
score: number;
|
|
38
|
+
}[]>;
|
|
39
|
+
getGlobal(key: GlobalKey): Promise<JSONValue | null>;
|
|
40
|
+
writeGlobal(key: GlobalKey, value: JSONValue): Promise<void>;
|
|
41
|
+
previous_revisions(queries: Set<{
|
|
42
|
+
id: InternalDocumentId;
|
|
43
|
+
ts: bigint;
|
|
44
|
+
}>): Promise<[string, DocumentLogEntry][]>;
|
|
45
|
+
previous_revisions_of_documents(queries: Set<DocumentPrevTsQuery>): Promise<[string, DocumentLogEntry][]>;
|
|
46
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* DocStore RPC surface for service-binding isolation.
|
|
3
|
+
* Provides async delegation methods that flatten generators into arrays
|
|
4
|
+
* for RPC transport (async generators cannot be streamed over service bindings).
|
|
5
|
+
*/
|
|
6
|
+
export class DocStoreRpc {
|
|
7
|
+
docstore;
|
|
8
|
+
constructor(docstore) {
|
|
9
|
+
this.docstore = docstore;
|
|
10
|
+
}
|
|
11
|
+
async setupSchema(options) {
|
|
12
|
+
return this.docstore.setupSchema(options);
|
|
13
|
+
}
|
|
14
|
+
async write(documents, indexes, conflictStrategy) {
|
|
15
|
+
return this.docstore.write(documents, indexes, conflictStrategy);
|
|
16
|
+
}
|
|
17
|
+
async get(id, readTimestamp) {
|
|
18
|
+
return this.docstore.get(id, readTimestamp);
|
|
19
|
+
}
|
|
20
|
+
async scan(table, readTimestamp) {
|
|
21
|
+
return this.docstore.scan(table, readTimestamp);
|
|
22
|
+
}
|
|
23
|
+
async scanPaginated(table, cursor, limit, order, readTimestamp) {
|
|
24
|
+
return this.docstore.scanPaginated(table, cursor, limit, order, readTimestamp);
|
|
25
|
+
}
|
|
26
|
+
async index_scan(indexId, tabletId, readTimestamp, interval, order) {
|
|
27
|
+
const results = [];
|
|
28
|
+
for await (const item of this.docstore.index_scan(indexId, tabletId, readTimestamp, interval, order)) {
|
|
29
|
+
results.push(item);
|
|
30
|
+
}
|
|
31
|
+
return results;
|
|
32
|
+
}
|
|
33
|
+
async load_documents(range, order) {
|
|
34
|
+
const results = [];
|
|
35
|
+
for await (const item of this.docstore.load_documents(range, order)) {
|
|
36
|
+
results.push(item);
|
|
37
|
+
}
|
|
38
|
+
return results;
|
|
39
|
+
}
|
|
40
|
+
async count(table) {
|
|
41
|
+
return this.docstore.count(table);
|
|
42
|
+
}
|
|
43
|
+
async search(indexId, searchQuery, filters, options) {
|
|
44
|
+
return this.docstore.search(indexId, searchQuery, filters, options);
|
|
45
|
+
}
|
|
46
|
+
async vectorSearch(indexId, vector, limit, filters) {
|
|
47
|
+
return this.docstore.vectorSearch(indexId, vector, limit, filters);
|
|
48
|
+
}
|
|
49
|
+
async getGlobal(key) {
|
|
50
|
+
return this.docstore.getGlobal(key);
|
|
51
|
+
}
|
|
52
|
+
async writeGlobal(key, value) {
|
|
53
|
+
return this.docstore.writeGlobal(key, value);
|
|
54
|
+
}
|
|
55
|
+
async previous_revisions(queries) {
|
|
56
|
+
const result = await this.docstore.previous_revisions(queries);
|
|
57
|
+
return Array.from(result.entries());
|
|
58
|
+
}
|
|
59
|
+
async previous_revisions_of_documents(queries) {
|
|
60
|
+
const result = await this.docstore.previous_revisions_of_documents(queries);
|
|
61
|
+
return Array.from(result.entries());
|
|
62
|
+
}
|
|
63
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { ScheduledFunctionExecutor, CronExecutor } from "@concavejs/core";
|
|
2
|
+
/**
|
|
3
|
+
* Manages scheduled function and cron job execution.
|
|
4
|
+
* Extracted from ConcaveDOBase for single-responsibility.
|
|
5
|
+
*/
|
|
6
|
+
export declare class SchedulerManager {
|
|
7
|
+
private readonly scheduler;
|
|
8
|
+
private readonly cronExecutor;
|
|
9
|
+
private readonly doState;
|
|
10
|
+
constructor(scheduler: ScheduledFunctionExecutor, cronExecutor: CronExecutor, doState: {
|
|
11
|
+
storage: {
|
|
12
|
+
setAlarm(time: number): Promise<void>;
|
|
13
|
+
deleteAlarm(): Promise<void>;
|
|
14
|
+
};
|
|
15
|
+
});
|
|
16
|
+
handleAlarm(): Promise<void>;
|
|
17
|
+
reschedule(): Promise<void>;
|
|
18
|
+
syncCronSpecs(cronSpecs: Record<string, any>): Promise<void>;
|
|
19
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Manages scheduled function and cron job execution.
|
|
3
|
+
* Extracted from ConcaveDOBase for single-responsibility.
|
|
4
|
+
*/
|
|
5
|
+
export class SchedulerManager {
|
|
6
|
+
scheduler;
|
|
7
|
+
cronExecutor;
|
|
8
|
+
doState;
|
|
9
|
+
constructor(scheduler, cronExecutor, doState) {
|
|
10
|
+
this.scheduler = scheduler;
|
|
11
|
+
this.cronExecutor = cronExecutor;
|
|
12
|
+
this.doState = doState;
|
|
13
|
+
}
|
|
14
|
+
async handleAlarm() {
|
|
15
|
+
try {
|
|
16
|
+
const scheduledResult = await this.scheduler.runDueJobs();
|
|
17
|
+
const cronResult = await this.cronExecutor.runDueJobs();
|
|
18
|
+
const nextTimes = [scheduledResult.nextScheduledTime, cronResult.nextScheduledTime].filter((t) => t !== null);
|
|
19
|
+
if (nextTimes.length === 0) {
|
|
20
|
+
await this.doState.storage.deleteAlarm();
|
|
21
|
+
}
|
|
22
|
+
else {
|
|
23
|
+
await this.doState.storage.setAlarm(Math.min(...nextTimes));
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
catch (error) {
|
|
27
|
+
console.error("[ConcaveDO] Alarm handler failed:", error?.message ?? error);
|
|
28
|
+
try {
|
|
29
|
+
await this.reschedule();
|
|
30
|
+
}
|
|
31
|
+
catch (rescheduleError) {
|
|
32
|
+
console.error("[ConcaveDO] Reschedule after alarm failure also failed:", rescheduleError?.message);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async reschedule() {
|
|
37
|
+
const scheduledTime = await this.scheduler.getNextScheduledTime();
|
|
38
|
+
const cronTime = await this.cronExecutor.getNextScheduledTime();
|
|
39
|
+
const nextTimes = [scheduledTime, cronTime].filter((t) => t !== null);
|
|
40
|
+
if (nextTimes.length === 0) {
|
|
41
|
+
await this.doState.storage.deleteAlarm();
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
const alarmTime = Math.min(...nextTimes);
|
|
45
|
+
await this.doState.storage.setAlarm(alarmTime);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
async syncCronSpecs(cronSpecs) {
|
|
49
|
+
await this.cronExecutor.syncCronSpecs(cronSpecs);
|
|
50
|
+
await this.reschedule();
|
|
51
|
+
}
|
|
52
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { SerializedKeyRange } from "@concavejs/core/queryengine";
|
|
2
|
+
/**
|
|
3
|
+
* Notifies the SyncDO of writes for subscription invalidation.
|
|
4
|
+
* Extracted from ConcaveDOBase for single-responsibility.
|
|
5
|
+
*/
|
|
6
|
+
export declare class SyncNotifier {
|
|
7
|
+
private readonly doState;
|
|
8
|
+
private readonly env;
|
|
9
|
+
constructor(doState: {
|
|
10
|
+
id: {
|
|
11
|
+
name: string | null;
|
|
12
|
+
toString(): string;
|
|
13
|
+
};
|
|
14
|
+
}, env: any);
|
|
15
|
+
notify(writtenRanges?: SerializedKeyRange[], writtenTables?: string[], commitTimestamp?: bigint): Promise<void>;
|
|
16
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Notifies the SyncDO of writes for subscription invalidation.
|
|
3
|
+
* Extracted from ConcaveDOBase for single-responsibility.
|
|
4
|
+
*/
|
|
5
|
+
export class SyncNotifier {
|
|
6
|
+
doState;
|
|
7
|
+
env;
|
|
8
|
+
constructor(doState, env) {
|
|
9
|
+
this.doState = doState;
|
|
10
|
+
this.env = env;
|
|
11
|
+
}
|
|
12
|
+
async notify(writtenRanges, writtenTables, commitTimestamp) {
|
|
13
|
+
if (!writtenRanges?.length && !writtenTables?.length) {
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
try {
|
|
17
|
+
const instanceName = this.doState.id.name ?? "singleton";
|
|
18
|
+
const syncNamespace = this.env?.SYNC_DO;
|
|
19
|
+
if (!syncNamespace) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
const syncId = syncNamespace.idFromName(instanceName);
|
|
23
|
+
const syncStub = syncNamespace.get(syncId);
|
|
24
|
+
await syncStub.fetch("http://do/notify", {
|
|
25
|
+
method: "POST",
|
|
26
|
+
headers: { "Content-Type": "application/json" },
|
|
27
|
+
body: JSON.stringify({
|
|
28
|
+
writtenRanges,
|
|
29
|
+
writtenTables,
|
|
30
|
+
commitTimestamp: commitTimestamp ? commitTimestamp.toString() : undefined,
|
|
31
|
+
}),
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
console.warn("Failed to notify SyncDO", error?.message ?? error);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
package/dist/http/http-api.js
CHANGED
|
@@ -15,6 +15,8 @@ function isReservedApiPath(pathname) {
|
|
|
15
15
|
normalizedPath === "/api/sync" ||
|
|
16
16
|
normalizedPath === "/api/reset-test-state" ||
|
|
17
17
|
normalizedPath === "/api/query" ||
|
|
18
|
+
normalizedPath === "/api/query_ts" ||
|
|
19
|
+
normalizedPath === "/api/query_at_ts" ||
|
|
18
20
|
normalizedPath === "/api/mutation" ||
|
|
19
21
|
normalizedPath === "/api/action") {
|
|
20
22
|
return true;
|
|
@@ -36,6 +38,24 @@ function shouldForwardApiPath(pathname) {
|
|
|
36
38
|
}
|
|
37
39
|
return !isReservedApiPath(pathname);
|
|
38
40
|
}
|
|
41
|
+
function arrayBufferToBase64(buffer) {
|
|
42
|
+
const bytes = new Uint8Array(buffer);
|
|
43
|
+
const chunkSize = 0x8000;
|
|
44
|
+
let binary = "";
|
|
45
|
+
for (let offset = 0; offset < bytes.length; offset += chunkSize) {
|
|
46
|
+
const chunk = bytes.subarray(offset, Math.min(offset + chunkSize, bytes.length));
|
|
47
|
+
binary += String.fromCharCode(...chunk);
|
|
48
|
+
}
|
|
49
|
+
return btoa(binary);
|
|
50
|
+
}
|
|
51
|
+
function base64ToArrayBuffer(base64) {
|
|
52
|
+
const binary = atob(base64);
|
|
53
|
+
const bytes = new Uint8Array(binary.length);
|
|
54
|
+
for (let i = 0; i < binary.length; i++) {
|
|
55
|
+
bytes[i] = binary.charCodeAt(i);
|
|
56
|
+
}
|
|
57
|
+
return bytes.buffer;
|
|
58
|
+
}
|
|
39
59
|
/**
|
|
40
60
|
* Create a storage adapter that routes through the ConcaveDO's storage syscall handler.
|
|
41
61
|
* This ensures storage operations are properly isolated within the DO.
|
|
@@ -44,7 +64,7 @@ function createStorageAdapter(concaveDO, _instance) {
|
|
|
44
64
|
return {
|
|
45
65
|
store: async (blob) => {
|
|
46
66
|
const buffer = await blob.arrayBuffer();
|
|
47
|
-
const base64 =
|
|
67
|
+
const base64 = arrayBufferToBase64(buffer);
|
|
48
68
|
const response = await concaveDO.fetch("http://do/storage", {
|
|
49
69
|
method: "POST",
|
|
50
70
|
headers: { "Content-Type": "application/json" },
|
|
@@ -81,12 +101,8 @@ function createStorageAdapter(concaveDO, _instance) {
|
|
|
81
101
|
if (!result.result || !result.result.__arrayBuffer) {
|
|
82
102
|
return { blob: null };
|
|
83
103
|
}
|
|
84
|
-
const
|
|
85
|
-
|
|
86
|
-
for (let i = 0; i < binary.length; i++) {
|
|
87
|
-
bytes[i] = binary.charCodeAt(i);
|
|
88
|
-
}
|
|
89
|
-
return { blob: new Blob([bytes.buffer]) };
|
|
104
|
+
const buffer = base64ToArrayBuffer(result.result.__arrayBuffer);
|
|
105
|
+
return { blob: new Blob([buffer]) };
|
|
90
106
|
},
|
|
91
107
|
};
|
|
92
108
|
}
|
|
@@ -142,10 +158,28 @@ export async function handleHttpApiRequest(request, env, ctx, instance = "single
|
|
|
142
158
|
}
|
|
143
159
|
// Note: Internal function access control is now handled by core executor (fail-closed)
|
|
144
160
|
const coreResult = await handleCoreHttpApiRequest(request, {
|
|
145
|
-
executeFunction: async ({ type, path, args, auth, componentPath }) => adapter.executeUdf(path, args, type, auth, componentPath),
|
|
161
|
+
executeFunction: async ({ type, path, args, auth, componentPath, snapshotTimestamp }) => adapter.executeUdf(path, args, type, auth, componentPath, undefined, snapshotTimestamp),
|
|
146
162
|
notifyWrites,
|
|
147
163
|
storage: storageAdapter,
|
|
148
164
|
corsHeaders,
|
|
165
|
+
getSnapshotTimestamp: async () => {
|
|
166
|
+
try {
|
|
167
|
+
const response = await concave.fetch("http://do/query_ts", {
|
|
168
|
+
method: "POST",
|
|
169
|
+
});
|
|
170
|
+
if (!response.ok) {
|
|
171
|
+
throw new Error(`query_ts failed with status ${response.status}`);
|
|
172
|
+
}
|
|
173
|
+
const body = (await response.json());
|
|
174
|
+
if (typeof body.ts !== "string" || !/^\d+$/.test(body.ts)) {
|
|
175
|
+
throw new Error("Invalid query_ts response");
|
|
176
|
+
}
|
|
177
|
+
return BigInt(body.ts);
|
|
178
|
+
}
|
|
179
|
+
catch {
|
|
180
|
+
return BigInt(Date.now());
|
|
181
|
+
}
|
|
182
|
+
},
|
|
149
183
|
});
|
|
150
184
|
if (coreResult?.handled) {
|
|
151
185
|
return coreResult.response;
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { TimestampOracle } from "@concavejs/core/utils";
|
|
1
2
|
/**
|
|
2
3
|
* Wraps a DO stub as a DocStore.
|
|
3
4
|
* Generator methods are converted from arrays back to async generators.
|
|
@@ -16,6 +17,9 @@ export function createGatewayDocStoreProxy(gateway, projectId, instance) {
|
|
|
16
17
|
* Internal helper that creates a DocStore proxy with configurable argument transformation.
|
|
17
18
|
*/
|
|
18
19
|
function createDocStoreProxyInternal(target, transformArgs) {
|
|
20
|
+
// Use a real TimestampOracle to guarantee monotonic, unique timestamps.
|
|
21
|
+
// The DO-side oracle cannot be accessed via RPC, so each proxy gets its own.
|
|
22
|
+
const oracle = new TimestampOracle();
|
|
19
23
|
return new Proxy({}, {
|
|
20
24
|
get(_, prop) {
|
|
21
25
|
// Convert array results back to async generators
|
|
@@ -48,25 +52,22 @@ function createDocStoreProxyInternal(target, transformArgs) {
|
|
|
48
52
|
return new Map(result);
|
|
49
53
|
};
|
|
50
54
|
}
|
|
51
|
-
// Provide a local
|
|
55
|
+
// Provide a local TimestampOracle (DO-side oracle cannot be accessed via RPC)
|
|
52
56
|
if (prop === "timestampOracle") {
|
|
53
|
-
return
|
|
54
|
-
observeTimestamp: () => { },
|
|
55
|
-
allocateTimestamp: () => BigInt(Date.now()),
|
|
56
|
-
getCurrentTimestamp: () => BigInt(Date.now()),
|
|
57
|
-
beginSnapshot: () => BigInt(Date.now()),
|
|
58
|
-
};
|
|
57
|
+
return oracle;
|
|
59
58
|
}
|
|
60
59
|
// No-op close
|
|
61
60
|
if (prop === "close") {
|
|
62
61
|
return async () => { };
|
|
63
62
|
}
|
|
64
|
-
// Bind methods to target with transformed args
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
63
|
+
// Bind methods to target with transformed args.
|
|
64
|
+
// Use direct invocation (target[prop](...)) instead of .call() because
|
|
65
|
+
// Cloudflare RPC stubs intercept property access - .call() would be
|
|
66
|
+
// interpreted as an RPC method name rather than Function.prototype.call.
|
|
67
|
+
if (typeof target[prop] === "function") {
|
|
68
|
+
return (...args) => target[prop](...transformArgs(...args));
|
|
68
69
|
}
|
|
69
|
-
return
|
|
70
|
+
return target[prop];
|
|
70
71
|
},
|
|
71
72
|
});
|
|
72
73
|
}
|
|
@@ -9,6 +9,6 @@ import type { AuthContext } from "@concavejs/core/sync/protocol-handler";
|
|
|
9
9
|
export declare class ConcaveStubExecutor implements UdfExec {
|
|
10
10
|
private readonly stub;
|
|
11
11
|
constructor(stub: DurableObjectStub);
|
|
12
|
-
execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string): Promise<UdfResult>;
|
|
12
|
+
execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: AuthContext | UserIdentityAttributes, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint): Promise<UdfResult>;
|
|
13
13
|
executeHttp(request: Request): Promise<Response>;
|
|
14
14
|
}
|
|
@@ -8,7 +8,7 @@ export class ConcaveStubExecutor {
|
|
|
8
8
|
constructor(stub) {
|
|
9
9
|
this.stub = stub;
|
|
10
10
|
}
|
|
11
|
-
async execute(path, args, type, auth, componentPath, requestId) {
|
|
11
|
+
async execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp) {
|
|
12
12
|
const payload = {
|
|
13
13
|
path,
|
|
14
14
|
args: convexToJson(args),
|
|
@@ -17,6 +17,7 @@ export class ConcaveStubExecutor {
|
|
|
17
17
|
componentPath,
|
|
18
18
|
caller: "client",
|
|
19
19
|
requestId,
|
|
20
|
+
snapshotTimestamp: snapshotTimestamp?.toString(),
|
|
20
21
|
};
|
|
21
22
|
const response = await this.stub.fetch("http://do/execute", {
|
|
22
23
|
method: "POST",
|
|
@@ -25,7 +26,23 @@ export class ConcaveStubExecutor {
|
|
|
25
26
|
});
|
|
26
27
|
if (!response.ok) {
|
|
27
28
|
const errorText = await response.text();
|
|
28
|
-
|
|
29
|
+
// Try to parse JSON error from ConcaveDO (e.g., { error: "message" })
|
|
30
|
+
let errorMessage = errorText;
|
|
31
|
+
try {
|
|
32
|
+
const parsed = JSON.parse(errorText);
|
|
33
|
+
if (parsed?.error) {
|
|
34
|
+
errorMessage = parsed.error;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
// Not JSON, use raw text
|
|
39
|
+
}
|
|
40
|
+
// Strip "UDF execution failed: " prefix from shim errors to expose original message
|
|
41
|
+
const udfPrefix = "UDF execution failed: ";
|
|
42
|
+
if (errorMessage.startsWith(udfPrefix)) {
|
|
43
|
+
errorMessage = errorMessage.slice(udfPrefix.length);
|
|
44
|
+
}
|
|
45
|
+
throw new Error(errorMessage);
|
|
29
46
|
}
|
|
30
47
|
const result = (await response.json());
|
|
31
48
|
if (result.commitTimestamp) {
|
|
@@ -19,6 +19,6 @@ export declare class UdfExecIsolated implements UdfExec {
|
|
|
19
19
|
private instance;
|
|
20
20
|
private projectId;
|
|
21
21
|
constructor(stubOrOptions: UdfExecutorRpc | UdfExecIsolatedOptions);
|
|
22
|
-
execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: any, componentPath?: string, requestId?: string): Promise<UdfResult>;
|
|
22
|
+
execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: any, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint): Promise<UdfResult>;
|
|
23
23
|
executeHttp(request: Request, auth?: any, requestId?: string): Promise<Response>;
|
|
24
24
|
}
|
|
@@ -21,9 +21,9 @@ export class UdfExecIsolated {
|
|
|
21
21
|
this.projectId = "default";
|
|
22
22
|
}
|
|
23
23
|
}
|
|
24
|
-
async execute(path, args, type, auth, componentPath, requestId) {
|
|
24
|
+
async execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp) {
|
|
25
25
|
// Pass instance context for syscall routing
|
|
26
|
-
return this.rpc.execute(path, args, type, auth, componentPath, requestId, this.instance, this.projectId);
|
|
26
|
+
return this.rpc.execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp, this.instance, this.projectId);
|
|
27
27
|
}
|
|
28
28
|
async executeHttp(request, auth, requestId) {
|
|
29
29
|
return this.rpc.executeHttp(request, auth, requestId, this.instance, this.projectId);
|
|
@@ -19,7 +19,7 @@ interface Env {
|
|
|
19
19
|
export declare class UdfExecutorRpc extends WorkerEntrypoint {
|
|
20
20
|
private udfExecutor;
|
|
21
21
|
constructor(ctx: ExecutionContext, env: Env);
|
|
22
|
-
execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: any, componentPath?: string, requestId?: string, _instance?: string, _projectId?: string): Promise<import("@concavejs/core").UdfResult>;
|
|
22
|
+
execute(path: string, args: Record<string, any>, type: "query" | "mutation" | "action", auth?: any, componentPath?: string, requestId?: string, snapshotTimestamp?: bigint, _instance?: string, _projectId?: string): Promise<import("@concavejs/core").UdfResult>;
|
|
23
23
|
executeHttp(request: Request, auth?: any, requestId?: string, _instance?: string, _projectId?: string): Promise<Response>;
|
|
24
24
|
}
|
|
25
25
|
export {};
|
|
@@ -51,11 +51,11 @@ export class UdfExecutorRpc extends WorkerEntrypoint {
|
|
|
51
51
|
// Pass blobstore if available, otherwise fall back to R2Bucket for direct mode
|
|
52
52
|
this.udfExecutor = new UdfExecInline(docstore, blobstore ?? env.STORAGE_BUCKET, env.R2_PUBLIC_URL);
|
|
53
53
|
}
|
|
54
|
-
async execute(path, args, type, auth, componentPath, requestId, _instance, _projectId) {
|
|
54
|
+
async execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp, _instance, _projectId) {
|
|
55
55
|
// Note: instance and projectId are passed for per-request context but
|
|
56
56
|
// currently we rely on the environment settings for syscall routing.
|
|
57
57
|
// This can be enhanced to create per-request syscall clients if needed.
|
|
58
|
-
return this.udfExecutor.execute(path, args, type, auth, componentPath, requestId);
|
|
58
|
+
return this.udfExecutor.execute(path, args, type, auth, componentPath, requestId, snapshotTimestamp);
|
|
59
59
|
}
|
|
60
60
|
async executeHttp(request, auth, requestId, _instance, _projectId) {
|
|
61
61
|
return this.udfExecutor.executeHttp(request, auth, requestId);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@concavejs/runtime-cf-base",
|
|
3
|
-
"version": "0.0.1-alpha.
|
|
3
|
+
"version": "0.0.1-alpha.8",
|
|
4
4
|
"license": "FSL-1.1-Apache-2.0",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -81,12 +81,12 @@
|
|
|
81
81
|
"test": "bun test --run --passWithNoTests || true"
|
|
82
82
|
},
|
|
83
83
|
"dependencies": {
|
|
84
|
-
"@concavejs/core": "0.0.1-alpha.
|
|
85
|
-
"@concavejs/runtime-base": "0.0.1-alpha.
|
|
86
|
-
"@concavejs/docstore-cf-do": "0.0.1-alpha.
|
|
87
|
-
"@concavejs/docstore-cf-d1": "0.0.1-alpha.
|
|
88
|
-
"@concavejs/docstore-cf-hyperdrive": "0.0.1-alpha.
|
|
89
|
-
"@concavejs/blobstore-cf-r2": "0.0.1-alpha.
|
|
84
|
+
"@concavejs/core": "0.0.1-alpha.8",
|
|
85
|
+
"@concavejs/runtime-base": "0.0.1-alpha.8",
|
|
86
|
+
"@concavejs/docstore-cf-do": "0.0.1-alpha.8",
|
|
87
|
+
"@concavejs/docstore-cf-d1": "0.0.1-alpha.8",
|
|
88
|
+
"@concavejs/docstore-cf-hyperdrive": "0.0.1-alpha.8",
|
|
89
|
+
"@concavejs/blobstore-cf-r2": "0.0.1-alpha.8",
|
|
90
90
|
"convex": "^1.27.3"
|
|
91
91
|
},
|
|
92
92
|
"devDependencies": {
|