@concavejs/runtime-cf-base 0.0.1-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/cf-websocket-adapter.d.ts +38 -0
- package/dist/adapters/cf-websocket-adapter.js +83 -0
- package/dist/durable-objects/blobstore-rpc.d.ts +13 -0
- package/dist/durable-objects/blobstore-rpc.js +27 -0
- package/dist/durable-objects/concave-do-base.d.ts +169 -0
- package/dist/durable-objects/concave-do-base.js +466 -0
- package/dist/durable-objects/docstore-rpc.d.ts +46 -0
- package/dist/durable-objects/docstore-rpc.js +63 -0
- package/dist/durable-objects/scheduler-manager.d.ts +19 -0
- package/dist/durable-objects/scheduler-manager.js +53 -0
- package/dist/durable-objects/sync-notifier.d.ts +16 -0
- package/dist/durable-objects/sync-notifier.js +38 -0
- package/dist/env.d.ts +19 -0
- package/dist/env.js +6 -0
- package/dist/http/dx-http.d.ts +43 -0
- package/dist/http/dx-http.js +327 -0
- package/dist/http/http-api.d.ts +38 -0
- package/dist/http/http-api.js +399 -0
- package/dist/http/index.d.ts +7 -0
- package/dist/http/index.js +7 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.js +27 -0
- package/dist/internal.d.ts +4 -0
- package/dist/internal.js +4 -0
- package/dist/routing/instance.d.ts +25 -0
- package/dist/routing/instance.js +101 -0
- package/dist/routing/sync-topology.d.ts +40 -0
- package/dist/routing/sync-topology.js +669 -0
- package/dist/rpc/blobstore-proxy.d.ts +11 -0
- package/dist/rpc/blobstore-proxy.js +28 -0
- package/dist/rpc/docstore-proxy.d.ts +11 -0
- package/dist/rpc/docstore-proxy.js +73 -0
- package/dist/rpc/index.d.ts +2 -0
- package/dist/rpc/index.js +2 -0
- package/dist/sync/cf-websocket-adapter.d.ts +15 -0
- package/dist/sync/cf-websocket-adapter.js +22 -0
- package/dist/sync/concave-do-udf-executor.d.ts +46 -0
- package/dist/sync/concave-do-udf-executor.js +75 -0
- package/dist/sync/index.d.ts +2 -0
- package/dist/sync/index.js +2 -0
- package/dist/udf/executor/do-client-executor.d.ts +14 -0
- package/dist/udf/executor/do-client-executor.js +58 -0
- package/dist/udf/executor/index.d.ts +8 -0
- package/dist/udf/executor/index.js +8 -0
- package/dist/udf/executor/inline-executor.d.ts +13 -0
- package/dist/udf/executor/inline-executor.js +25 -0
- package/dist/udf/executor/isolated-executor.d.ts +24 -0
- package/dist/udf/executor/isolated-executor.js +31 -0
- package/dist/udf/executor/shim-content.d.ts +1 -0
- package/dist/udf/executor/shim-content.js +3 -0
- package/dist/worker/create-concave-worker.d.ts +79 -0
- package/dist/worker/create-concave-worker.js +196 -0
- package/dist/worker/index.d.ts +6 -0
- package/dist/worker/index.js +6 -0
- package/dist/worker/udf-worker.d.ts +25 -0
- package/dist/worker/udf-worker.js +63 -0
- package/package.json +99 -0
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Notifies the SyncDO of writes for subscription invalidation.
|
|
3
|
+
* Extracted from ConcaveDOBase for single-responsibility.
|
|
4
|
+
*/
|
|
5
|
+
export class SyncNotifier {
|
|
6
|
+
doState;
|
|
7
|
+
env;
|
|
8
|
+
constructor(doState, env) {
|
|
9
|
+
this.doState = doState;
|
|
10
|
+
this.env = env;
|
|
11
|
+
}
|
|
12
|
+
async notify(writtenRanges, writtenTables, commitTimestamp) {
|
|
13
|
+
if (!writtenRanges?.length && !writtenTables?.length) {
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
try {
|
|
17
|
+
const instanceName = this.doState.id.name ?? "singleton";
|
|
18
|
+
const syncNamespace = this.env?.SYNC_DO;
|
|
19
|
+
if (!syncNamespace) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
const syncId = syncNamespace.idFromName(instanceName);
|
|
23
|
+
const syncStub = syncNamespace.get(syncId);
|
|
24
|
+
await syncStub.fetch("http://do/notify", {
|
|
25
|
+
method: "POST",
|
|
26
|
+
headers: { "Content-Type": "application/json" },
|
|
27
|
+
body: JSON.stringify({
|
|
28
|
+
writtenRanges,
|
|
29
|
+
writtenTables,
|
|
30
|
+
commitTimestamp: commitTimestamp ? commitTimestamp.toString() : undefined,
|
|
31
|
+
}),
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
console.warn("Failed to notify SyncDO", error?.message ?? error);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
package/dist/env.d.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Canonical environment type definitions for Concave Cloudflare Workers runtimes.
|
|
3
|
+
*
|
|
4
|
+
* Users can extend these interfaces via intersection types to add their own bindings.
|
|
5
|
+
*/
|
|
6
|
+
/** Core bindings required by every Concave Cloudflare worker. */
|
|
7
|
+
export interface ConcaveEnv {
|
|
8
|
+
CONCAVE_DO: DurableObjectNamespace;
|
|
9
|
+
SYNC_DO: DurableObjectNamespace;
|
|
10
|
+
}
|
|
11
|
+
/** Bindings for R2-backed blob storage. */
|
|
12
|
+
export interface ConcaveStorageEnv {
|
|
13
|
+
STORAGE_BUCKET: R2Bucket;
|
|
14
|
+
R2_PUBLIC_URL?: string;
|
|
15
|
+
}
|
|
16
|
+
/** Bindings for D1-backed docstore. */
|
|
17
|
+
export interface ConcaveD1Env {
|
|
18
|
+
DB: D1Database;
|
|
19
|
+
}
|
package/dist/env.js
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import type { DocStore, DocumentLogEntry, DatabaseIndexUpdate, InternalDocumentId, Interval, Order, TimestampRange, GlobalKey, DocumentPrevTsQuery, LatestDocument, IndexKeyBytes, SearchIndexDefinition, VectorIndexDefinition } from "@concavejs/core/docstore";
|
|
2
|
+
import type { JSONValue as JsonValue } from "convex/values";
|
|
3
|
+
export declare class HttpDocStore implements DocStore {
|
|
4
|
+
private readonly url;
|
|
5
|
+
constructor(url: string);
|
|
6
|
+
private post;
|
|
7
|
+
setupSchema(options?: {
|
|
8
|
+
searchIndexes?: SearchIndexDefinition[];
|
|
9
|
+
vectorIndexes?: VectorIndexDefinition[];
|
|
10
|
+
}): Promise<void>;
|
|
11
|
+
write(documents: DocumentLogEntry[], indexes: Set<{
|
|
12
|
+
ts: bigint;
|
|
13
|
+
update: DatabaseIndexUpdate;
|
|
14
|
+
}>, conflictStrategy: "Error" | "Overwrite"): Promise<void>;
|
|
15
|
+
index_scan(indexId: string, tabletId: string, readTimestamp: bigint, interval: Interval, order: Order): AsyncGenerator<[IndexKeyBytes, LatestDocument]>;
|
|
16
|
+
load_documents(range: TimestampRange, order: Order): AsyncGenerator<DocumentLogEntry>;
|
|
17
|
+
getGlobal(key: GlobalKey): Promise<JsonValue | null>;
|
|
18
|
+
writeGlobal(key: GlobalKey, value: JsonValue): Promise<void>;
|
|
19
|
+
previous_revisions(queries: Set<{
|
|
20
|
+
id: InternalDocumentId;
|
|
21
|
+
ts: bigint;
|
|
22
|
+
}>): Promise<Map<string, DocumentLogEntry>>;
|
|
23
|
+
previous_revisions_of_documents(queries: Set<DocumentPrevTsQuery>): Promise<Map<string, DocumentLogEntry>>;
|
|
24
|
+
get(id: InternalDocumentId, readTimestamp?: bigint): Promise<LatestDocument | null>;
|
|
25
|
+
count(table: string): Promise<number>;
|
|
26
|
+
scan(table: string, readTimestamp?: bigint): Promise<LatestDocument[]>;
|
|
27
|
+
scanPaginated(tableId: string, cursor: string | null, limit: number, order: Order, readTimestamp?: bigint): Promise<{
|
|
28
|
+
documents: LatestDocument[];
|
|
29
|
+
nextCursor: string | null;
|
|
30
|
+
hasMore: boolean;
|
|
31
|
+
}>;
|
|
32
|
+
search(indexId: string, searchQuery: string, filters: Map<string, unknown>, options?: {
|
|
33
|
+
limit?: number;
|
|
34
|
+
}): Promise<{
|
|
35
|
+
doc: LatestDocument;
|
|
36
|
+
score: number;
|
|
37
|
+
}[]>;
|
|
38
|
+
vectorSearch(indexId: string, vector: number[], limit: number, filters: Map<string, string>): Promise<{
|
|
39
|
+
doc: LatestDocument;
|
|
40
|
+
score: number;
|
|
41
|
+
}[]>;
|
|
42
|
+
}
|
|
43
|
+
export declare function handleRequest(docstore: DocStore, request: Request): Promise<Response>;
|
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
// Serialization helpers
|
|
2
|
+
function ab_to_hex(buffer) {
|
|
3
|
+
return [...new Uint8Array(buffer)].map((x) => x.toString(16).padStart(2, "0")).join("");
|
|
4
|
+
}
|
|
5
|
+
function hex_to_ab(hex) {
|
|
6
|
+
const bytes = new Uint8Array(hex.length / 2);
|
|
7
|
+
for (let i = 0; i < hex.length; i += 2) {
|
|
8
|
+
bytes[i / 2] = parseInt(hex.substr(i, 2), 16);
|
|
9
|
+
}
|
|
10
|
+
return bytes.buffer;
|
|
11
|
+
}
|
|
12
|
+
// Custom JSON serialization to handle BigInt, ArrayBuffer, Map, and Set.
|
|
13
|
+
function serialize(obj) {
|
|
14
|
+
return JSON.stringify(obj, (_key, value) => {
|
|
15
|
+
if (typeof value === "bigint") {
|
|
16
|
+
return { __type: "bigint", value: value.toString() };
|
|
17
|
+
}
|
|
18
|
+
if (value instanceof ArrayBuffer) {
|
|
19
|
+
return { __type: "ArrayBuffer", value: ab_to_hex(value) };
|
|
20
|
+
}
|
|
21
|
+
if (value instanceof Map) {
|
|
22
|
+
return { __type: "Map", value: Array.from(value.entries()) };
|
|
23
|
+
}
|
|
24
|
+
if (value instanceof Set) {
|
|
25
|
+
return { __type: "Set", value: Array.from(value) };
|
|
26
|
+
}
|
|
27
|
+
return value;
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
function deserialize(json) {
|
|
31
|
+
const reviver = (_key, value) => {
|
|
32
|
+
if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
33
|
+
if (value.__type === "bigint") {
|
|
34
|
+
return BigInt(value.value);
|
|
35
|
+
}
|
|
36
|
+
if (value.__type === "ArrayBuffer") {
|
|
37
|
+
return hex_to_ab(value.value);
|
|
38
|
+
}
|
|
39
|
+
if (value.__type === "Map") {
|
|
40
|
+
return new Map(value.value.map(([k, v]) => [reviver("", k), reviver("", v)]));
|
|
41
|
+
}
|
|
42
|
+
if (value.__type === "Set") {
|
|
43
|
+
return new Set(value.value.map((v) => reviver("", v)));
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return value;
|
|
47
|
+
};
|
|
48
|
+
return JSON.parse(json, reviver);
|
|
49
|
+
}
|
|
50
|
+
// Client-side implementation of DocStore over HTTP
|
|
51
|
+
export class HttpDocStore {
|
|
52
|
+
url;
|
|
53
|
+
constructor(url) {
|
|
54
|
+
this.url = url;
|
|
55
|
+
}
|
|
56
|
+
async post(path, body) {
|
|
57
|
+
const serializedBody = serialize(body);
|
|
58
|
+
return await fetch(`${this.url}/${path}`, {
|
|
59
|
+
method: "POST",
|
|
60
|
+
headers: { "Content-Type": "application/json" },
|
|
61
|
+
body: serializedBody,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
async setupSchema(options) {
|
|
65
|
+
const response = await this.post("setupSchema", options ?? {});
|
|
66
|
+
if (!response.ok) {
|
|
67
|
+
throw new Error(`setupSchema failed: ${await response.text()}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
async write(documents, indexes, conflictStrategy) {
|
|
71
|
+
const response = await this.post("write", {
|
|
72
|
+
documents,
|
|
73
|
+
indexes,
|
|
74
|
+
conflictStrategy,
|
|
75
|
+
});
|
|
76
|
+
if (!response.ok) {
|
|
77
|
+
throw new Error(`write failed: ${await response.text()}`);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
async *index_scan(indexId, tabletId, readTimestamp, interval, order) {
|
|
81
|
+
const response = await this.post("index_scan", {
|
|
82
|
+
indexId,
|
|
83
|
+
tabletId,
|
|
84
|
+
readTimestamp,
|
|
85
|
+
interval,
|
|
86
|
+
order,
|
|
87
|
+
});
|
|
88
|
+
if (!response.ok) {
|
|
89
|
+
throw new Error(`index_scan failed: ${await response.text()}`);
|
|
90
|
+
}
|
|
91
|
+
if (!response.body) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
const reader = response.body.getReader();
|
|
95
|
+
const decoder = new TextDecoder();
|
|
96
|
+
let buffer = "";
|
|
97
|
+
while (true) {
|
|
98
|
+
const { done, value } = await reader.read();
|
|
99
|
+
if (done) {
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
buffer += decoder.decode(value, { stream: true });
|
|
103
|
+
const lines = buffer.split("\n");
|
|
104
|
+
buffer = lines.pop();
|
|
105
|
+
for (const line of lines) {
|
|
106
|
+
if (line.trim() === "")
|
|
107
|
+
continue;
|
|
108
|
+
yield deserialize(line);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
if (buffer.trim() !== "") {
|
|
112
|
+
yield deserialize(buffer);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
async *load_documents(range, order) {
|
|
116
|
+
const response = await this.post("load_documents", { range, order });
|
|
117
|
+
if (!response.ok) {
|
|
118
|
+
throw new Error(`load_documents failed: ${await response.text()}`);
|
|
119
|
+
}
|
|
120
|
+
if (!response.body) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
const reader = response.body.getReader();
|
|
124
|
+
const decoder = new TextDecoder();
|
|
125
|
+
let buffer = "";
|
|
126
|
+
while (true) {
|
|
127
|
+
const { done, value } = await reader.read();
|
|
128
|
+
if (done) {
|
|
129
|
+
break;
|
|
130
|
+
}
|
|
131
|
+
buffer += decoder.decode(value, { stream: true });
|
|
132
|
+
const lines = buffer.split("\n");
|
|
133
|
+
buffer = lines.pop();
|
|
134
|
+
for (const line of lines) {
|
|
135
|
+
if (line.trim() === "")
|
|
136
|
+
continue;
|
|
137
|
+
yield deserialize(line);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
if (buffer.trim() !== "") {
|
|
141
|
+
yield deserialize(buffer);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
async getGlobal(key) {
|
|
145
|
+
const response = await this.post("getGlobal", { key });
|
|
146
|
+
if (!response.ok) {
|
|
147
|
+
throw new Error(`getGlobal failed: ${await response.text()}`);
|
|
148
|
+
}
|
|
149
|
+
return deserialize(await response.text());
|
|
150
|
+
}
|
|
151
|
+
async writeGlobal(key, value) {
|
|
152
|
+
const response = await this.post("writeGlobal", { key, value });
|
|
153
|
+
if (!response.ok) {
|
|
154
|
+
throw new Error(`writeGlobal failed: ${await response.text()}`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
async previous_revisions(queries) {
|
|
158
|
+
const response = await this.post("previous_revisions", { queries });
|
|
159
|
+
if (!response.ok) {
|
|
160
|
+
throw new Error(`previous_revisions failed: ${await response.text()}`);
|
|
161
|
+
}
|
|
162
|
+
return deserialize(await response.text());
|
|
163
|
+
}
|
|
164
|
+
async previous_revisions_of_documents(queries) {
|
|
165
|
+
const response = await this.post("previous_revisions_of_documents", {
|
|
166
|
+
queries,
|
|
167
|
+
});
|
|
168
|
+
if (!response.ok) {
|
|
169
|
+
throw new Error(`previous_revisions_of_documents failed: ${await response.text()}`);
|
|
170
|
+
}
|
|
171
|
+
return deserialize(await response.text());
|
|
172
|
+
}
|
|
173
|
+
async get(id, readTimestamp) {
|
|
174
|
+
const response = await this.post("get", { id, readTimestamp });
|
|
175
|
+
if (!response.ok) {
|
|
176
|
+
throw new Error(`get failed: ${await response.text()}`);
|
|
177
|
+
}
|
|
178
|
+
return deserialize(await response.text());
|
|
179
|
+
}
|
|
180
|
+
async count(table) {
|
|
181
|
+
const response = await this.post("count", { table });
|
|
182
|
+
if (!response.ok) {
|
|
183
|
+
throw new Error(`count failed: ${await response.text()}`);
|
|
184
|
+
}
|
|
185
|
+
return deserialize(await response.text());
|
|
186
|
+
}
|
|
187
|
+
async scan(table, readTimestamp) {
|
|
188
|
+
const response = await this.post("scan", { table, readTimestamp });
|
|
189
|
+
if (!response.ok) {
|
|
190
|
+
throw new Error(`scan failed: ${await response.text()}`);
|
|
191
|
+
}
|
|
192
|
+
return deserialize(await response.text());
|
|
193
|
+
}
|
|
194
|
+
async scanPaginated(tableId, cursor, limit, order, readTimestamp) {
|
|
195
|
+
const response = await this.post("scanPaginated", { tableId, cursor, limit, order, readTimestamp });
|
|
196
|
+
if (!response.ok) {
|
|
197
|
+
throw new Error(`scanPaginated failed: ${await response.text()}`);
|
|
198
|
+
}
|
|
199
|
+
return deserialize(await response.text());
|
|
200
|
+
}
|
|
201
|
+
async search(indexId, searchQuery, filters, options) {
|
|
202
|
+
const response = await this.post("search", { indexId, searchQuery, filters, options });
|
|
203
|
+
if (!response.ok) {
|
|
204
|
+
throw new Error(`search failed: ${await response.text()}`);
|
|
205
|
+
}
|
|
206
|
+
return deserialize(await response.text());
|
|
207
|
+
}
|
|
208
|
+
async vectorSearch(indexId, vector, limit, filters) {
|
|
209
|
+
const response = await this.post("vectorSearch", { indexId, vector, limit, filters });
|
|
210
|
+
if (!response.ok) {
|
|
211
|
+
throw new Error(`vectorSearch failed: ${await response.text()}`);
|
|
212
|
+
}
|
|
213
|
+
return deserialize(await response.text());
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// Server-side request handler
|
|
217
|
+
export async function handleRequest(docstore, request) {
|
|
218
|
+
const url = new URL(request.url);
|
|
219
|
+
const path = url.pathname.split("/").pop();
|
|
220
|
+
if (request.method !== "POST") {
|
|
221
|
+
return new Response("Method not allowed", { status: 405 });
|
|
222
|
+
}
|
|
223
|
+
try {
|
|
224
|
+
const body = await request.text();
|
|
225
|
+
const args = body ? deserialize(body) : {};
|
|
226
|
+
switch (path) {
|
|
227
|
+
case "setupSchema":
|
|
228
|
+
await docstore.setupSchema(args);
|
|
229
|
+
return new Response(null, { status: 200 });
|
|
230
|
+
case "write":
|
|
231
|
+
await docstore.write(args.documents, args.indexes, args.conflictStrategy);
|
|
232
|
+
return new Response(null, { status: 200 });
|
|
233
|
+
case "index_scan": {
|
|
234
|
+
const stream = new ReadableStream({
|
|
235
|
+
async start(controller) {
|
|
236
|
+
try {
|
|
237
|
+
for await (const row of docstore.index_scan(args.indexId, args.tabletId, args.readTimestamp, args.interval, args.order)) {
|
|
238
|
+
controller.enqueue(serialize(row) + "\n");
|
|
239
|
+
}
|
|
240
|
+
controller.close();
|
|
241
|
+
}
|
|
242
|
+
catch (e) {
|
|
243
|
+
controller.error(e);
|
|
244
|
+
}
|
|
245
|
+
},
|
|
246
|
+
});
|
|
247
|
+
const encoder = new TextEncoder();
|
|
248
|
+
const encodedStream = stream.pipeThrough(new TransformStream({
|
|
249
|
+
transform(chunk, controller) {
|
|
250
|
+
controller.enqueue(encoder.encode(chunk));
|
|
251
|
+
},
|
|
252
|
+
}));
|
|
253
|
+
return new Response(encodedStream, {
|
|
254
|
+
headers: { "Content-Type": "application/x-ndjson" },
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
case "load_documents": {
|
|
258
|
+
const stream = new ReadableStream({
|
|
259
|
+
async start(controller) {
|
|
260
|
+
try {
|
|
261
|
+
for await (const row of docstore.load_documents(args.range, args.order)) {
|
|
262
|
+
controller.enqueue(serialize(row) + "\n");
|
|
263
|
+
}
|
|
264
|
+
controller.close();
|
|
265
|
+
}
|
|
266
|
+
catch (e) {
|
|
267
|
+
controller.error(e);
|
|
268
|
+
}
|
|
269
|
+
},
|
|
270
|
+
});
|
|
271
|
+
const encoder = new TextEncoder();
|
|
272
|
+
const encodedStream = stream.pipeThrough(new TransformStream({
|
|
273
|
+
transform(chunk, controller) {
|
|
274
|
+
controller.enqueue(encoder.encode(chunk));
|
|
275
|
+
},
|
|
276
|
+
}));
|
|
277
|
+
return new Response(encodedStream, {
|
|
278
|
+
headers: { "Content-Type": "application/x-ndjson" },
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
case "getGlobal": {
|
|
282
|
+
const result = await docstore.getGlobal(args.key);
|
|
283
|
+
return new Response(serialize(result), {
|
|
284
|
+
headers: { "Content-Type": "application/json" },
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
case "writeGlobal":
|
|
288
|
+
await docstore.writeGlobal(args.key, args.value);
|
|
289
|
+
return new Response(null, { status: 200 });
|
|
290
|
+
case "previous_revisions": {
|
|
291
|
+
const result = await docstore.previous_revisions(args.queries);
|
|
292
|
+
return new Response(serialize(result), {
|
|
293
|
+
headers: { "Content-Type": "application/json" },
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
case "previous_revisions_of_documents": {
|
|
297
|
+
const result = await docstore.previous_revisions_of_documents(args.queries);
|
|
298
|
+
return new Response(serialize(result), {
|
|
299
|
+
headers: { "Content-Type": "application/json" },
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
case "get": {
|
|
303
|
+
const result = await docstore.get(args.id, args.readTimestamp);
|
|
304
|
+
return new Response(serialize(result), {
|
|
305
|
+
headers: { "Content-Type": "application/json" },
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
case "count": {
|
|
309
|
+
const result = await docstore.count(args.table);
|
|
310
|
+
return new Response(serialize(result), {
|
|
311
|
+
headers: { "Content-Type": "application/json" },
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
case "scan": {
|
|
315
|
+
const result = await docstore.scan(args.table, args.readTimestamp);
|
|
316
|
+
return new Response(serialize(result), {
|
|
317
|
+
headers: { "Content-Type": "application/json" },
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
default:
|
|
321
|
+
return new Response("Not Found", { status: 404 });
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
catch (e) {
|
|
325
|
+
return new Response(e.stack || e.toString(), { status: 500 });
|
|
326
|
+
}
|
|
327
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import type { SerializedKeyRange } from "@concavejs/core/queryengine";
|
|
2
|
+
/** Minimal Env shape needed by the HTTP API handler */
|
|
3
|
+
interface Env {
|
|
4
|
+
CONCAVE_DO: DurableObjectNamespace;
|
|
5
|
+
SYNC_DO: DurableObjectNamespace;
|
|
6
|
+
SYNC_NOTIFY_QUEUE?: QueueLike;
|
|
7
|
+
}
|
|
8
|
+
type QueueLike = {
|
|
9
|
+
send(message: unknown): Promise<void>;
|
|
10
|
+
};
|
|
11
|
+
export interface HttpApiRoutingTargets {
|
|
12
|
+
/** Physical ConcaveDO name for execution traffic. Defaults to the logical `instance`. */
|
|
13
|
+
concaveDoName?: string;
|
|
14
|
+
/** Alias for non-DO runtimes to describe the write execution target identifier. */
|
|
15
|
+
concaveTargetName?: string;
|
|
16
|
+
/**
|
|
17
|
+
* Physical SyncDO names to notify after writes.
|
|
18
|
+
* Defaults to `[instance]`.
|
|
19
|
+
*/
|
|
20
|
+
syncDoNames?: string[];
|
|
21
|
+
/** Alias for non-DO runtimes to describe sync invalidation targets. */
|
|
22
|
+
syncTargetNames?: string[];
|
|
23
|
+
/**
|
|
24
|
+
* Optional custom write dispatcher.
|
|
25
|
+
* When set, it is responsible for fanning out invalidations (queue, stream, etc.).
|
|
26
|
+
*/
|
|
27
|
+
dispatchSyncWrites?: (payload: SyncWriteDispatchPayload) => Promise<void>;
|
|
28
|
+
}
|
|
29
|
+
export interface SyncWriteDispatchPayload {
|
|
30
|
+
logicalInstance: string;
|
|
31
|
+
projectId?: string;
|
|
32
|
+
syncTargets: string[];
|
|
33
|
+
writtenRanges?: SerializedKeyRange[];
|
|
34
|
+
writtenTables?: string[];
|
|
35
|
+
commitTimestamp?: string;
|
|
36
|
+
}
|
|
37
|
+
export declare function handleHttpApiRequest(request: Request, env: Env, ctx: ExecutionContext, instance?: string, routingTargets?: HttpApiRoutingTargets): Promise<Response>;
|
|
38
|
+
export {};
|