@concavejs/runtime-cf-base 0.0.1-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of @concavejs/runtime-cf-base might be problematic. Click here for more details.
- package/dist/adapters/cf-websocket-adapter.d.ts +38 -0
- package/dist/adapters/cf-websocket-adapter.js +83 -0
- package/dist/durable-objects/concave-do-base.d.ts +158 -0
- package/dist/durable-objects/concave-do-base.js +412 -0
- package/dist/http/dx-http.d.ts +28 -0
- package/dist/http/dx-http.js +306 -0
- package/dist/http/http-api.d.ts +1 -0
- package/dist/http/http-api.js +262 -0
- package/dist/http/index.d.ts +7 -0
- package/dist/http/index.js +7 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +25 -0
- package/dist/internal.d.ts +4 -0
- package/dist/internal.js +4 -0
- package/dist/routing/instance.d.ts +25 -0
- package/dist/routing/instance.js +101 -0
- package/dist/rpc/blobstore-proxy.d.ts +11 -0
- package/dist/rpc/blobstore-proxy.js +28 -0
- package/dist/rpc/docstore-proxy.d.ts +11 -0
- package/dist/rpc/docstore-proxy.js +72 -0
- package/dist/rpc/index.d.ts +2 -0
- package/dist/rpc/index.js +2 -0
- package/dist/sync/cf-websocket-adapter.d.ts +15 -0
- package/dist/sync/cf-websocket-adapter.js +22 -0
- package/dist/sync/concave-do-udf-executor.d.ts +37 -0
- package/dist/sync/concave-do-udf-executor.js +67 -0
- package/dist/sync/index.d.ts +2 -0
- package/dist/sync/index.js +2 -0
- package/dist/udf/executor/do-client-executor.d.ts +14 -0
- package/dist/udf/executor/do-client-executor.js +42 -0
- package/dist/udf/executor/index.d.ts +9 -0
- package/dist/udf/executor/index.js +9 -0
- package/dist/udf/executor/inline-executor.d.ts +13 -0
- package/dist/udf/executor/inline-executor.js +25 -0
- package/dist/udf/executor/isolated-executor.d.ts +24 -0
- package/dist/udf/executor/isolated-executor.js +31 -0
- package/dist/udf/executor/shim-content.d.ts +1 -0
- package/dist/udf/executor/shim-content.js +3 -0
- package/dist/worker/create-concave-worker.d.ts +34 -0
- package/dist/worker/create-concave-worker.js +162 -0
- package/dist/worker/index.d.ts +6 -0
- package/dist/worker/index.js +6 -0
- package/dist/worker/udf-worker.d.ts +14 -0
- package/dist/worker/udf-worker.js +63 -0
- package/package.json +45 -0
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import type { DocStore, DocumentLogEntry, DatabaseIndexUpdate, InternalDocumentId, Interval, Order, TimestampRange, GlobalKey, DocumentPrevTsQuery, LatestDocument, IndexKeyBytes, type SearchIndexDefinition, type VectorIndexDefinition } from "@concavejs/core/docstore";
|
|
2
|
+
import type { JSONValue as JsonValue } from "convex/values";
|
|
3
|
+
export declare class HttpDocStore implements DocStore {
|
|
4
|
+
private readonly url;
|
|
5
|
+
constructor(url: string);
|
|
6
|
+
private post;
|
|
7
|
+
setupSchema(options?: {
|
|
8
|
+
searchIndexes?: SearchIndexDefinition[];
|
|
9
|
+
vectorIndexes?: VectorIndexDefinition[];
|
|
10
|
+
}): Promise<void>;
|
|
11
|
+
write(documents: DocumentLogEntry[], indexes: Set<{
|
|
12
|
+
ts: bigint;
|
|
13
|
+
update: DatabaseIndexUpdate;
|
|
14
|
+
}>, conflictStrategy: "Error" | "Overwrite"): Promise<void>;
|
|
15
|
+
index_scan(indexId: string, tabletId: string, readTimestamp: bigint, interval: Interval, order: Order): AsyncGenerator<[IndexKeyBytes, LatestDocument]>;
|
|
16
|
+
load_documents(range: TimestampRange, order: Order): AsyncGenerator<DocumentLogEntry>;
|
|
17
|
+
getGlobal(key: GlobalKey): Promise<JsonValue | null>;
|
|
18
|
+
writeGlobal(key: GlobalKey, value: JsonValue): Promise<void>;
|
|
19
|
+
previous_revisions(queries: Set<{
|
|
20
|
+
id: InternalDocumentId;
|
|
21
|
+
ts: bigint;
|
|
22
|
+
}>): Promise<Map<string, DocumentLogEntry>>;
|
|
23
|
+
previous_revisions_of_documents(queries: Set<DocumentPrevTsQuery>): Promise<Map<string, DocumentLogEntry>>;
|
|
24
|
+
get(id: InternalDocumentId, readTimestamp?: bigint): Promise<LatestDocument | null>;
|
|
25
|
+
count(table: string): Promise<number>;
|
|
26
|
+
scan(table: string, readTimestamp?: bigint): Promise<LatestDocument[]>;
|
|
27
|
+
}
|
|
28
|
+
export declare function handleRequest(docstore: DocStore, request: Request): Promise<Response>;
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
// Serialization helpers
|
|
2
|
+
function ab_to_hex(buffer) {
|
|
3
|
+
return [...new Uint8Array(buffer)].map((x) => x.toString(16).padStart(2, "0")).join("");
|
|
4
|
+
}
|
|
5
|
+
function hex_to_ab(hex) {
|
|
6
|
+
const bytes = new Uint8Array(hex.length / 2);
|
|
7
|
+
for (let i = 0; i < hex.length; i += 2) {
|
|
8
|
+
bytes[i / 2] = parseInt(hex.substr(i, 2), 16);
|
|
9
|
+
}
|
|
10
|
+
return bytes.buffer;
|
|
11
|
+
}
|
|
12
|
+
// Custom JSON serialization to handle BigInt, ArrayBuffer, Map, and Set.
|
|
13
|
+
function serialize(obj) {
|
|
14
|
+
return JSON.stringify(obj, (_key, value) => {
|
|
15
|
+
if (typeof value === "bigint") {
|
|
16
|
+
return { __type: "bigint", value: value.toString() };
|
|
17
|
+
}
|
|
18
|
+
if (value instanceof ArrayBuffer) {
|
|
19
|
+
return { __type: "ArrayBuffer", value: ab_to_hex(value) };
|
|
20
|
+
}
|
|
21
|
+
if (value instanceof Map) {
|
|
22
|
+
return { __type: "Map", value: Array.from(value.entries()) };
|
|
23
|
+
}
|
|
24
|
+
if (value instanceof Set) {
|
|
25
|
+
return { __type: "Set", value: Array.from(value) };
|
|
26
|
+
}
|
|
27
|
+
return value;
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
function deserialize(json) {
|
|
31
|
+
const reviver = (_key, value) => {
|
|
32
|
+
if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
33
|
+
if (value.__type === "bigint") {
|
|
34
|
+
return BigInt(value.value);
|
|
35
|
+
}
|
|
36
|
+
if (value.__type === "ArrayBuffer") {
|
|
37
|
+
return hex_to_ab(value.value);
|
|
38
|
+
}
|
|
39
|
+
if (value.__type === "Map") {
|
|
40
|
+
return new Map(value.value.map(([k, v]) => [reviver("", k), reviver("", v)]));
|
|
41
|
+
}
|
|
42
|
+
if (value.__type === "Set") {
|
|
43
|
+
return new Set(value.value.map((v) => reviver("", v)));
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return value;
|
|
47
|
+
};
|
|
48
|
+
return JSON.parse(json, reviver);
|
|
49
|
+
}
|
|
50
|
+
// Client-side implementation of DocStore over HTTP
|
|
51
|
+
export class HttpDocStore {
|
|
52
|
+
url;
|
|
53
|
+
constructor(url) {
|
|
54
|
+
this.url = url;
|
|
55
|
+
}
|
|
56
|
+
async post(path, body) {
|
|
57
|
+
const serializedBody = serialize(body);
|
|
58
|
+
return await fetch(`${this.url}/${path}`, {
|
|
59
|
+
method: "POST",
|
|
60
|
+
headers: { "Content-Type": "application/json" },
|
|
61
|
+
body: serializedBody,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
async setupSchema(options) {
|
|
65
|
+
const response = await this.post("setupSchema", options ?? {});
|
|
66
|
+
if (!response.ok) {
|
|
67
|
+
throw new Error(`setupSchema failed: ${await response.text()}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
async write(documents, indexes, conflictStrategy) {
|
|
71
|
+
const response = await this.post("write", {
|
|
72
|
+
documents,
|
|
73
|
+
indexes,
|
|
74
|
+
conflictStrategy,
|
|
75
|
+
});
|
|
76
|
+
if (!response.ok) {
|
|
77
|
+
throw new Error(`write failed: ${await response.text()}`);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
async *index_scan(indexId, tabletId, readTimestamp, interval, order) {
|
|
81
|
+
const response = await this.post("index_scan", {
|
|
82
|
+
indexId,
|
|
83
|
+
tabletId,
|
|
84
|
+
readTimestamp,
|
|
85
|
+
interval,
|
|
86
|
+
order,
|
|
87
|
+
});
|
|
88
|
+
if (!response.ok) {
|
|
89
|
+
throw new Error(`index_scan failed: ${await response.text()}`);
|
|
90
|
+
}
|
|
91
|
+
if (!response.body) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
const reader = response.body.getReader();
|
|
95
|
+
const decoder = new TextDecoder();
|
|
96
|
+
let buffer = "";
|
|
97
|
+
while (true) {
|
|
98
|
+
const { done, value } = await reader.read();
|
|
99
|
+
if (done) {
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
buffer += decoder.decode(value, { stream: true });
|
|
103
|
+
const lines = buffer.split("\n");
|
|
104
|
+
buffer = lines.pop();
|
|
105
|
+
for (const line of lines) {
|
|
106
|
+
if (line.trim() === "")
|
|
107
|
+
continue;
|
|
108
|
+
yield deserialize(line);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
if (buffer.trim() !== "") {
|
|
112
|
+
yield deserialize(buffer);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
async *load_documents(range, order) {
|
|
116
|
+
const response = await this.post("load_documents", { range, order });
|
|
117
|
+
if (!response.ok) {
|
|
118
|
+
throw new Error(`load_documents failed: ${await response.text()}`);
|
|
119
|
+
}
|
|
120
|
+
if (!response.body) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
const reader = response.body.getReader();
|
|
124
|
+
const decoder = new TextDecoder();
|
|
125
|
+
let buffer = "";
|
|
126
|
+
while (true) {
|
|
127
|
+
const { done, value } = await reader.read();
|
|
128
|
+
if (done) {
|
|
129
|
+
break;
|
|
130
|
+
}
|
|
131
|
+
buffer += decoder.decode(value, { stream: true });
|
|
132
|
+
const lines = buffer.split("\n");
|
|
133
|
+
buffer = lines.pop();
|
|
134
|
+
for (const line of lines) {
|
|
135
|
+
if (line.trim() === "")
|
|
136
|
+
continue;
|
|
137
|
+
yield deserialize(line);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
if (buffer.trim() !== "") {
|
|
141
|
+
yield deserialize(buffer);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
async getGlobal(key) {
|
|
145
|
+
const response = await this.post("getGlobal", { key });
|
|
146
|
+
if (!response.ok) {
|
|
147
|
+
throw new Error(`getGlobal failed: ${await response.text()}`);
|
|
148
|
+
}
|
|
149
|
+
return deserialize(await response.text());
|
|
150
|
+
}
|
|
151
|
+
async writeGlobal(key, value) {
|
|
152
|
+
const response = await this.post("writeGlobal", { key, value });
|
|
153
|
+
if (!response.ok) {
|
|
154
|
+
throw new Error(`writeGlobal failed: ${await response.text()}`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
async previous_revisions(queries) {
|
|
158
|
+
const response = await this.post("previous_revisions", { queries });
|
|
159
|
+
if (!response.ok) {
|
|
160
|
+
throw new Error(`previous_revisions failed: ${await response.text()}`);
|
|
161
|
+
}
|
|
162
|
+
return deserialize(await response.text());
|
|
163
|
+
}
|
|
164
|
+
async previous_revisions_of_documents(queries) {
|
|
165
|
+
const response = await this.post("previous_revisions_of_documents", {
|
|
166
|
+
queries,
|
|
167
|
+
});
|
|
168
|
+
if (!response.ok) {
|
|
169
|
+
throw new Error(`previous_revisions_of_documents failed: ${await response.text()}`);
|
|
170
|
+
}
|
|
171
|
+
return deserialize(await response.text());
|
|
172
|
+
}
|
|
173
|
+
async get(id, readTimestamp) {
|
|
174
|
+
const response = await this.post("get", { id, readTimestamp });
|
|
175
|
+
if (!response.ok) {
|
|
176
|
+
throw new Error(`get failed: ${await response.text()}`);
|
|
177
|
+
}
|
|
178
|
+
return deserialize(await response.text());
|
|
179
|
+
}
|
|
180
|
+
async count(table) {
|
|
181
|
+
const response = await this.post("count", { table });
|
|
182
|
+
if (!response.ok) {
|
|
183
|
+
throw new Error(`count failed: ${await response.text()}`);
|
|
184
|
+
}
|
|
185
|
+
return deserialize(await response.text());
|
|
186
|
+
}
|
|
187
|
+
async scan(table, readTimestamp) {
|
|
188
|
+
const response = await this.post("scan", { table, readTimestamp });
|
|
189
|
+
if (!response.ok) {
|
|
190
|
+
throw new Error(`scan failed: ${await response.text()}`);
|
|
191
|
+
}
|
|
192
|
+
return deserialize(await response.text());
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// Server-side request handler
|
|
196
|
+
export async function handleRequest(docstore, request) {
|
|
197
|
+
const url = new URL(request.url);
|
|
198
|
+
const path = url.pathname.split("/").pop();
|
|
199
|
+
if (request.method !== "POST") {
|
|
200
|
+
return new Response("Method not allowed", { status: 405 });
|
|
201
|
+
}
|
|
202
|
+
try {
|
|
203
|
+
const body = await request.text();
|
|
204
|
+
const args = body ? deserialize(body) : {};
|
|
205
|
+
switch (path) {
|
|
206
|
+
case "setupSchema":
|
|
207
|
+
await docstore.setupSchema(args);
|
|
208
|
+
return new Response(null, { status: 200 });
|
|
209
|
+
case "write":
|
|
210
|
+
await docstore.write(args.documents, args.indexes, args.conflictStrategy);
|
|
211
|
+
return new Response(null, { status: 200 });
|
|
212
|
+
case "index_scan": {
|
|
213
|
+
const stream = new ReadableStream({
|
|
214
|
+
async start(controller) {
|
|
215
|
+
try {
|
|
216
|
+
for await (const row of docstore.index_scan(args.indexId, args.tabletId, args.readTimestamp, args.interval, args.order)) {
|
|
217
|
+
controller.enqueue(serialize(row) + "\n");
|
|
218
|
+
}
|
|
219
|
+
controller.close();
|
|
220
|
+
}
|
|
221
|
+
catch (e) {
|
|
222
|
+
controller.error(e);
|
|
223
|
+
}
|
|
224
|
+
},
|
|
225
|
+
});
|
|
226
|
+
const encoder = new TextEncoder();
|
|
227
|
+
const encodedStream = stream.pipeThrough(new TransformStream({
|
|
228
|
+
transform(chunk, controller) {
|
|
229
|
+
controller.enqueue(encoder.encode(chunk));
|
|
230
|
+
},
|
|
231
|
+
}));
|
|
232
|
+
return new Response(encodedStream, {
|
|
233
|
+
headers: { "Content-Type": "application/x-ndjson" },
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
case "load_documents": {
|
|
237
|
+
const stream = new ReadableStream({
|
|
238
|
+
async start(controller) {
|
|
239
|
+
try {
|
|
240
|
+
for await (const row of docstore.load_documents(args.range, args.order)) {
|
|
241
|
+
controller.enqueue(serialize(row) + "\n");
|
|
242
|
+
}
|
|
243
|
+
controller.close();
|
|
244
|
+
}
|
|
245
|
+
catch (e) {
|
|
246
|
+
controller.error(e);
|
|
247
|
+
}
|
|
248
|
+
},
|
|
249
|
+
});
|
|
250
|
+
const encoder = new TextEncoder();
|
|
251
|
+
const encodedStream = stream.pipeThrough(new TransformStream({
|
|
252
|
+
transform(chunk, controller) {
|
|
253
|
+
controller.enqueue(encoder.encode(chunk));
|
|
254
|
+
},
|
|
255
|
+
}));
|
|
256
|
+
return new Response(encodedStream, {
|
|
257
|
+
headers: { "Content-Type": "application/x-ndjson" },
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
case "getGlobal": {
|
|
261
|
+
const result = await docstore.getGlobal(args.key);
|
|
262
|
+
return new Response(serialize(result), {
|
|
263
|
+
headers: { "Content-Type": "application/json" },
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
case "writeGlobal":
|
|
267
|
+
await docstore.writeGlobal(args.key, args.value);
|
|
268
|
+
return new Response(null, { status: 200 });
|
|
269
|
+
case "previous_revisions": {
|
|
270
|
+
const result = await docstore.previous_revisions(args.queries);
|
|
271
|
+
return new Response(serialize(result), {
|
|
272
|
+
headers: { "Content-Type": "application/json" },
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
case "previous_revisions_of_documents": {
|
|
276
|
+
const result = await docstore.previous_revisions_of_documents(args.queries);
|
|
277
|
+
return new Response(serialize(result), {
|
|
278
|
+
headers: { "Content-Type": "application/json" },
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
case "get": {
|
|
282
|
+
const result = await docstore.get(args.id, args.readTimestamp);
|
|
283
|
+
return new Response(serialize(result), {
|
|
284
|
+
headers: { "Content-Type": "application/json" },
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
case "count": {
|
|
288
|
+
const result = await docstore.count(args.table);
|
|
289
|
+
return new Response(serialize(result), {
|
|
290
|
+
headers: { "Content-Type": "application/json" },
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
case "scan": {
|
|
294
|
+
const result = await docstore.scan(args.table, args.readTimestamp);
|
|
295
|
+
return new Response(serialize(result), {
|
|
296
|
+
headers: { "Content-Type": "application/json" },
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
default:
|
|
300
|
+
return new Response("Not Found", { status: 404 });
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
catch (e) {
|
|
304
|
+
return new Response(e.stack || e.toString(), { status: 500 });
|
|
305
|
+
}
|
|
306
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function handleHttpApiRequest(request: Request, env: Env, ctx: ExecutionContext, instance?: string): Promise<Response>;
|
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
import { ConcaveStubExecutor } from "../udf/executor/do-client-executor";
|
|
2
|
+
import { createClientAdapter } from "@concavejs/core/udf/execution-adapter";
|
|
3
|
+
import { loadConvexModule } from "@concavejs/core/udf";
|
|
4
|
+
import { writtenTablesFromRanges } from "@concavejs/core/utils";
|
|
5
|
+
import { applyCors, computeCorsHeaders, handleCoreHttpApiRequest, resolveAuthContext } from "@concavejs/core/http";
|
|
6
|
+
import { AdminAuthError, identityFromToken, isAdminToken, isSystemToken, JWTValidationError, SystemAuthError, } from "@concavejs/core/auth";
|
|
7
|
+
import { InternalFunctionAccessError } from "@concavejs/core/errors";
|
|
8
|
+
const VERSIONED_API_PREFIX = /^\/api\/\d+\.\d+(?:\.\d+)?(?=\/|$)/;
|
|
9
|
+
function stripApiVersionPrefix(pathname) {
|
|
10
|
+
return pathname.replace(VERSIONED_API_PREFIX, "/api");
|
|
11
|
+
}
|
|
12
|
+
function isReservedApiPath(pathname) {
|
|
13
|
+
const normalizedPath = stripApiVersionPrefix(pathname);
|
|
14
|
+
if (normalizedPath === "/api/execute" ||
|
|
15
|
+
normalizedPath === "/api/sync" ||
|
|
16
|
+
normalizedPath === "/api/reset-test-state" ||
|
|
17
|
+
normalizedPath === "/api/query" ||
|
|
18
|
+
normalizedPath === "/api/mutation" ||
|
|
19
|
+
normalizedPath === "/api/action") {
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
if (normalizedPath === "/api/storage" || normalizedPath.startsWith("/api/storage/")) {
|
|
23
|
+
return true;
|
|
24
|
+
}
|
|
25
|
+
if (normalizedPath === "/api/http" || normalizedPath.startsWith("/api/http/")) {
|
|
26
|
+
return true;
|
|
27
|
+
}
|
|
28
|
+
if (normalizedPath === "/api/run" || normalizedPath.startsWith("/api/run/")) {
|
|
29
|
+
return true;
|
|
30
|
+
}
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
function shouldForwardApiPath(pathname) {
|
|
34
|
+
if (!pathname.startsWith("/api/")) {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
return !isReservedApiPath(pathname);
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Create a storage adapter that routes through the ConcaveDO's storage syscall handler.
|
|
41
|
+
* This ensures storage operations are properly isolated within the DO.
|
|
42
|
+
*/
|
|
43
|
+
function createStorageAdapter(concaveDO, _instance) {
|
|
44
|
+
return {
|
|
45
|
+
store: async (blob) => {
|
|
46
|
+
const buffer = await blob.arrayBuffer();
|
|
47
|
+
const base64 = btoa(String.fromCharCode(...new Uint8Array(buffer)));
|
|
48
|
+
const response = await concaveDO.fetch("http://do/storage", {
|
|
49
|
+
method: "POST",
|
|
50
|
+
headers: { "Content-Type": "application/json" },
|
|
51
|
+
body: JSON.stringify({
|
|
52
|
+
method: "store",
|
|
53
|
+
args: [{ __arrayBuffer: base64 }, { contentType: blob.type }],
|
|
54
|
+
}),
|
|
55
|
+
});
|
|
56
|
+
if (!response.ok) {
|
|
57
|
+
const error = await response.json();
|
|
58
|
+
throw new Error(error?.error?.message ?? "Storage store failed");
|
|
59
|
+
}
|
|
60
|
+
const result = await response.json();
|
|
61
|
+
return {
|
|
62
|
+
storageId: result.result.storageId,
|
|
63
|
+
writtenRanges: [],
|
|
64
|
+
writtenTables: ["_storage"],
|
|
65
|
+
};
|
|
66
|
+
},
|
|
67
|
+
get: async (storageId) => {
|
|
68
|
+
const response = await concaveDO.fetch("http://do/storage", {
|
|
69
|
+
method: "POST",
|
|
70
|
+
headers: { "Content-Type": "application/json" },
|
|
71
|
+
body: JSON.stringify({
|
|
72
|
+
method: "get",
|
|
73
|
+
args: [storageId],
|
|
74
|
+
}),
|
|
75
|
+
});
|
|
76
|
+
if (!response.ok) {
|
|
77
|
+
const error = await response.json();
|
|
78
|
+
throw new Error(error?.error?.message ?? "Storage get failed");
|
|
79
|
+
}
|
|
80
|
+
const result = await response.json();
|
|
81
|
+
if (!result.result || !result.result.__arrayBuffer) {
|
|
82
|
+
return { blob: null };
|
|
83
|
+
}
|
|
84
|
+
const binary = atob(result.result.__arrayBuffer);
|
|
85
|
+
const bytes = new Uint8Array(binary.length);
|
|
86
|
+
for (let i = 0; i < binary.length; i++) {
|
|
87
|
+
bytes[i] = binary.charCodeAt(i);
|
|
88
|
+
}
|
|
89
|
+
return { blob: new Blob([bytes.buffer]) };
|
|
90
|
+
},
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
function createNotifyWrites(env, instance, ctx) {
|
|
94
|
+
return async (writtenRanges, writtenTables, commitTimestamp) => {
|
|
95
|
+
if (!writtenRanges?.length && !writtenTables?.length) {
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
const syncDoId = env.SYNC_DO.idFromName(instance);
|
|
99
|
+
const syncDo = env.SYNC_DO.get(syncDoId);
|
|
100
|
+
const payload = {
|
|
101
|
+
writtenRanges,
|
|
102
|
+
writtenTables: writtenTables ?? writtenTablesFromRanges(writtenRanges),
|
|
103
|
+
commitTimestamp: commitTimestamp ? commitTimestamp.toString() : undefined,
|
|
104
|
+
};
|
|
105
|
+
ctx.waitUntil(syncDo.fetch("http://do/notify", {
|
|
106
|
+
method: "POST",
|
|
107
|
+
headers: { "Content-Type": "application/json" },
|
|
108
|
+
body: JSON.stringify(payload),
|
|
109
|
+
}));
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
export async function handleHttpApiRequest(request, env, ctx, instance = "singleton") {
|
|
113
|
+
const corsHeaders = computeCorsHeaders(request);
|
|
114
|
+
const apply = (response) => applyCors(response, corsHeaders);
|
|
115
|
+
const url = new URL(request.url);
|
|
116
|
+
const pathParts = url.pathname.slice(1).split("/");
|
|
117
|
+
if (pathParts[0] !== "api") {
|
|
118
|
+
return apply(new Response("Not found", { status: 404 }));
|
|
119
|
+
}
|
|
120
|
+
console.log(`[handleHttpApiRequest] instance=${instance}`);
|
|
121
|
+
const concaveId = env.CONCAVE_DO.idFromName(instance);
|
|
122
|
+
const concave = env.CONCAVE_DO.get(concaveId);
|
|
123
|
+
const executor = new ConcaveStubExecutor(concave);
|
|
124
|
+
const adapter = createClientAdapter(executor);
|
|
125
|
+
const notifyWrites = createNotifyWrites(env, instance, ctx);
|
|
126
|
+
// Route storage operations through the DO's storage syscall handler
|
|
127
|
+
const storageAdapter = createStorageAdapter(concave, instance);
|
|
128
|
+
const authHeader = request.headers.get("Authorization");
|
|
129
|
+
const headerToken = authHeader?.replace(/^Bearer\s+/i, "").trim() || undefined;
|
|
130
|
+
let headerIdentity;
|
|
131
|
+
try {
|
|
132
|
+
headerIdentity =
|
|
133
|
+
headerToken && !isAdminToken(headerToken) && !isSystemToken(headerToken)
|
|
134
|
+
? await identityFromToken(headerToken)
|
|
135
|
+
: undefined;
|
|
136
|
+
}
|
|
137
|
+
catch (error) {
|
|
138
|
+
if (error instanceof JWTValidationError || error instanceof AdminAuthError || error instanceof SystemAuthError) {
|
|
139
|
+
return apply(Response.json({ error: "Unauthorized" }, { status: 401 }));
|
|
140
|
+
}
|
|
141
|
+
throw error;
|
|
142
|
+
}
|
|
143
|
+
// Note: Internal function access control is now handled by core executor (fail-closed)
|
|
144
|
+
const coreResult = await handleCoreHttpApiRequest(request, {
|
|
145
|
+
executeFunction: async ({ type, path, args, auth, componentPath }) => adapter.executeUdf(path, args, type, auth, componentPath),
|
|
146
|
+
notifyWrites,
|
|
147
|
+
storage: storageAdapter,
|
|
148
|
+
corsHeaders,
|
|
149
|
+
});
|
|
150
|
+
if (coreResult?.handled) {
|
|
151
|
+
return coreResult.response;
|
|
152
|
+
}
|
|
153
|
+
// Handle /api/http/*
|
|
154
|
+
if (pathParts.length >= 2 && pathParts[1] === "http") {
|
|
155
|
+
const forwardUrl = new URL(request.url);
|
|
156
|
+
const forwardedRequest = new Request(forwardUrl.toString(), request);
|
|
157
|
+
const response = await concave.fetch(forwardedRequest);
|
|
158
|
+
return apply(response);
|
|
159
|
+
}
|
|
160
|
+
// Handle /api/run/{functionIdentifier}
|
|
161
|
+
if (pathParts.length > 2 && pathParts[1] === "run") {
|
|
162
|
+
const functionIdentifier = pathParts.slice(2).join("/");
|
|
163
|
+
const udfPath = functionIdentifier.replace(/\//g, ":");
|
|
164
|
+
let bodyArgs = {};
|
|
165
|
+
if (request.headers.get("Content-Type")?.includes("application/json") && request.body) {
|
|
166
|
+
try {
|
|
167
|
+
const body = await request.clone().json();
|
|
168
|
+
if (body.args && typeof body.args === "object") {
|
|
169
|
+
bodyArgs = body.args;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
catch (_error) {
|
|
173
|
+
// Ignore parse errors
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
const queryArgs = {};
|
|
177
|
+
for (const [key, value] of url.searchParams.entries()) {
|
|
178
|
+
try {
|
|
179
|
+
queryArgs[key] = JSON.parse(value);
|
|
180
|
+
}
|
|
181
|
+
catch {
|
|
182
|
+
queryArgs[key] = value;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
const mergedArgs = { ...queryArgs, ...bodyArgs };
|
|
186
|
+
let authForExecution;
|
|
187
|
+
try {
|
|
188
|
+
authForExecution = await resolveAuthContext(undefined, headerToken, headerIdentity);
|
|
189
|
+
}
|
|
190
|
+
catch (error) {
|
|
191
|
+
if (error instanceof JWTValidationError || error instanceof AdminAuthError) {
|
|
192
|
+
return apply(Response.json({ error: "Unauthorized" }, { status: 401 }));
|
|
193
|
+
}
|
|
194
|
+
throw error;
|
|
195
|
+
}
|
|
196
|
+
try {
|
|
197
|
+
const modulePath = udfPath.split(":")[0];
|
|
198
|
+
const functionName = udfPath.split(":")[1] ?? "default";
|
|
199
|
+
const module = await loadConvexModule(modulePath, { hint: "udf" });
|
|
200
|
+
const func = module[functionName];
|
|
201
|
+
if (!func) {
|
|
202
|
+
return apply(new Response(`Function ${udfPath} not found`, { status: 404 }));
|
|
203
|
+
}
|
|
204
|
+
// Note: Internal function access control is now handled by core executor (fail-closed)
|
|
205
|
+
let udfType = null;
|
|
206
|
+
if (func.isQuery)
|
|
207
|
+
udfType = "query";
|
|
208
|
+
else if (func.isMutation)
|
|
209
|
+
udfType = "mutation";
|
|
210
|
+
else if (func.isAction)
|
|
211
|
+
udfType = "action";
|
|
212
|
+
if (!udfType) {
|
|
213
|
+
console.error(`Function ${udfPath} is not a valid query, mutation, or action.`);
|
|
214
|
+
return apply(new Response(`Function ${udfPath} is not a valid query, mutation, or action.`, {
|
|
215
|
+
status: 400,
|
|
216
|
+
}));
|
|
217
|
+
}
|
|
218
|
+
const result = await adapter.executeUdf(udfPath, mergedArgs, udfType, authForExecution);
|
|
219
|
+
if ((udfType === "mutation" || udfType === "action") && result.writtenRanges?.length) {
|
|
220
|
+
await notifyWrites(result.writtenRanges, result.writtenTables);
|
|
221
|
+
}
|
|
222
|
+
return apply(Response.json({
|
|
223
|
+
status: "success",
|
|
224
|
+
value: result.result,
|
|
225
|
+
logLines: [],
|
|
226
|
+
}));
|
|
227
|
+
}
|
|
228
|
+
catch (error) {
|
|
229
|
+
// Handle internal function access errors with 403
|
|
230
|
+
if (error instanceof InternalFunctionAccessError) {
|
|
231
|
+
return apply(Response.json({
|
|
232
|
+
status: "error",
|
|
233
|
+
errorMessage: error.message,
|
|
234
|
+
}, { status: 403 }));
|
|
235
|
+
}
|
|
236
|
+
if (typeof error?.message === "string") {
|
|
237
|
+
const message = error.message.toLowerCase();
|
|
238
|
+
if (message.includes("module not found") ||
|
|
239
|
+
message.includes("failed to load convex module") ||
|
|
240
|
+
message.includes("unable to resolve module")) {
|
|
241
|
+
return apply(new Response(`Module for function ${udfPath} not found.`, {
|
|
242
|
+
status: 404,
|
|
243
|
+
}));
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
if (error instanceof Response) {
|
|
247
|
+
return apply(error);
|
|
248
|
+
}
|
|
249
|
+
return apply(new Response(`Error determining function type: ${error?.message ?? String(error)}`, {
|
|
250
|
+
status: 500,
|
|
251
|
+
}));
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
if (shouldForwardApiPath(url.pathname)) {
|
|
255
|
+
const forwardUrl = new URL(request.url);
|
|
256
|
+
forwardUrl.pathname = stripApiVersionPrefix(forwardUrl.pathname);
|
|
257
|
+
const forwardedRequest = new Request(forwardUrl.toString(), request);
|
|
258
|
+
const response = await concave.fetch(forwardedRequest);
|
|
259
|
+
return apply(response);
|
|
260
|
+
}
|
|
261
|
+
return apply(new Response("Not found", { status: 404 }));
|
|
262
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export * from "@concavejs/core";
|
|
2
|
+
export { UdfExecutorRpc } from "./worker/udf-worker";
|
|
3
|
+
export { createConcaveWorker, resolveNamespaceBinding, createScopedNamespace, type ConcaveWorkerOptions, type ConcaveWorkerBindings, type ConcaveWorker, } from "./worker/create-concave-worker";
|
|
4
|
+
export { DEFAULT_INSTANCE_KEY, DEFAULT_INSTANCE_VALUE, DEFAULT_INSTANCE_COOKIE_PATH, resolveInstanceFromRequest, maybeAttachInstanceCookie, readCookieValue, buildInstanceCookie, type InstanceResolution, type InstanceResolutionOptions, type InstanceCookieOptions, } from "./routing/instance";
|
|
5
|
+
export { UdfExecInline } from "./udf/executor/inline-executor";
|
|
6
|
+
export { UdfExecIsolated } from "./udf/executor/isolated-executor";
|
|
7
|
+
export { ConcaveDOBase, type ConcaveDOConfig, type ConcaveDOAdapterContext, type ConcaveDOExecutorContext, } from "./durable-objects/concave-do-base";
|
|
8
|
+
export { createDocStoreProxy, createBlobStoreProxy, createGatewayDocStoreProxy, createGatewayBlobStoreProxy, } from "./rpc";
|
|
9
|
+
export { CFWebSocketAdapter } from "./adapters/cf-websocket-adapter";
|
|
10
|
+
export { CFWebSocketAdapter as SyncWebSocketAdapter } from "./sync/cf-websocket-adapter";
|
|
11
|
+
export { ConcaveDOUdfExecutor } from "./sync/concave-do-udf-executor";
|
|
12
|
+
export { SHIM_SOURCE } from "./udf/executor/shim-content";
|
|
13
|
+
export { DODocStore } from "@concavejs/docstore-cf-do";
|
|
14
|
+
export { D1DocStore } from "@concavejs/docstore-cf-d1";
|
|
15
|
+
export { R2BlobStore } from "@concavejs/blobstore-cf-r2";
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
// Re-export core functionality
|
|
2
|
+
export * from "@concavejs/core";
|
|
3
|
+
// Export Cloudflare Workers utilities
|
|
4
|
+
export { UdfExecutorRpc } from "./worker/udf-worker";
|
|
5
|
+
export { createConcaveWorker, resolveNamespaceBinding, createScopedNamespace, } from "./worker/create-concave-worker";
|
|
6
|
+
// Export instance routing helpers
|
|
7
|
+
export { DEFAULT_INSTANCE_KEY, DEFAULT_INSTANCE_VALUE, DEFAULT_INSTANCE_COOKIE_PATH, resolveInstanceFromRequest, maybeAttachInstanceCookie, readCookieValue, buildInstanceCookie, } from "./routing/instance";
|
|
8
|
+
// Export UDF executors
|
|
9
|
+
export { UdfExecInline } from "./udf/executor/inline-executor";
|
|
10
|
+
export { UdfExecIsolated } from "./udf/executor/isolated-executor";
|
|
11
|
+
// Export Durable Object base classes
|
|
12
|
+
export { ConcaveDOBase, } from "./durable-objects/concave-do-base";
|
|
13
|
+
// Export RPC proxy utilities
|
|
14
|
+
export { createDocStoreProxy, createBlobStoreProxy, createGatewayDocStoreProxy, createGatewayBlobStoreProxy, } from "./rpc";
|
|
15
|
+
// Export CF-specific adapters
|
|
16
|
+
export { CFWebSocketAdapter } from "./adapters/cf-websocket-adapter";
|
|
17
|
+
// Export sync protocol adapters
|
|
18
|
+
export { CFWebSocketAdapter as SyncWebSocketAdapter } from "./sync/cf-websocket-adapter";
|
|
19
|
+
export { ConcaveDOUdfExecutor } from "./sync/concave-do-udf-executor";
|
|
20
|
+
// Export embedded shim source
|
|
21
|
+
export { SHIM_SOURCE } from "./udf/executor/shim-content";
|
|
22
|
+
// Export CF-specific DocStore implementations
|
|
23
|
+
export { DODocStore } from "@concavejs/docstore-cf-do";
|
|
24
|
+
export { D1DocStore } from "@concavejs/docstore-cf-d1";
|
|
25
|
+
export { R2BlobStore } from "@concavejs/blobstore-cf-r2";
|
package/dist/internal.js
ADDED