@echofiles/echo-pdf 0.4.1 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/README.md +302 -11
  2. package/bin/echo-pdf.js +176 -8
  3. package/bin/lib/http.js +26 -1
  4. package/dist/agent-defaults.d.ts +3 -0
  5. package/dist/agent-defaults.js +18 -0
  6. package/dist/auth.d.ts +18 -0
  7. package/dist/auth.js +36 -0
  8. package/dist/core/index.d.ts +50 -0
  9. package/dist/core/index.js +7 -0
  10. package/dist/file-ops.d.ts +11 -0
  11. package/dist/file-ops.js +36 -0
  12. package/dist/file-store-do.d.ts +36 -0
  13. package/dist/file-store-do.js +298 -0
  14. package/dist/file-utils.d.ts +6 -0
  15. package/dist/file-utils.js +36 -0
  16. package/dist/http-error.d.ts +9 -0
  17. package/dist/http-error.js +14 -0
  18. package/dist/index.d.ts +1 -0
  19. package/dist/index.js +1 -0
  20. package/dist/local/index.d.ts +135 -0
  21. package/dist/local/index.js +555 -0
  22. package/dist/mcp-server.d.ts +3 -0
  23. package/dist/mcp-server.js +124 -0
  24. package/dist/node/pdfium-local.d.ts +8 -0
  25. package/dist/node/pdfium-local.js +147 -0
  26. package/dist/node/semantic-local.d.ts +16 -0
  27. package/dist/node/semantic-local.js +113 -0
  28. package/dist/pdf-agent.d.ts +18 -0
  29. package/dist/pdf-agent.js +217 -0
  30. package/dist/pdf-config.d.ts +4 -0
  31. package/dist/pdf-config.js +140 -0
  32. package/dist/pdf-storage.d.ts +8 -0
  33. package/dist/pdf-storage.js +86 -0
  34. package/dist/pdf-types.d.ts +83 -0
  35. package/dist/pdf-types.js +1 -0
  36. package/dist/pdfium-engine.d.ts +9 -0
  37. package/dist/pdfium-engine.js +180 -0
  38. package/dist/provider-client.d.ts +20 -0
  39. package/dist/provider-client.js +173 -0
  40. package/dist/provider-keys.d.ts +10 -0
  41. package/dist/provider-keys.js +27 -0
  42. package/dist/r2-file-store.d.ts +20 -0
  43. package/dist/r2-file-store.js +176 -0
  44. package/dist/response-schema.d.ts +15 -0
  45. package/dist/response-schema.js +159 -0
  46. package/dist/tool-registry.d.ts +16 -0
  47. package/dist/tool-registry.js +175 -0
  48. package/dist/types.d.ts +91 -0
  49. package/dist/types.js +1 -0
  50. package/dist/worker.d.ts +7 -0
  51. package/dist/worker.js +386 -0
  52. package/package.json +34 -5
  53. package/wrangler.toml +1 -1
  54. package/src/agent-defaults.ts +0 -25
  55. package/src/file-ops.ts +0 -50
  56. package/src/file-store-do.ts +0 -349
  57. package/src/file-utils.ts +0 -43
  58. package/src/http-error.ts +0 -21
  59. package/src/index.ts +0 -415
  60. package/src/mcp-server.ts +0 -171
  61. package/src/pdf-agent.ts +0 -252
  62. package/src/pdf-config.ts +0 -143
  63. package/src/pdf-storage.ts +0 -109
  64. package/src/pdf-types.ts +0 -85
  65. package/src/pdfium-engine.ts +0 -207
  66. package/src/provider-client.ts +0 -176
  67. package/src/provider-keys.ts +0 -44
  68. package/src/r2-file-store.ts +0 -195
  69. package/src/response-schema.ts +0 -182
  70. package/src/tool-registry.ts +0 -203
  71. package/src/types.ts +0 -40
  72. package/src/wasm.d.ts +0 -4
@@ -0,0 +1,175 @@
1
+ import { normalizeReturnMode } from "./file-utils.js";
2
+ import { runFileOp } from "./file-ops.js";
3
+ import { runPdfAgent } from "./pdf-agent.js";
4
+ const asNumberArray = (value) => Array.isArray(value) ? value.map((item) => Number(item)).filter((item) => Number.isInteger(item) && item > 0) : [];
5
+ const asObject = (value) => typeof value === "object" && value !== null && !Array.isArray(value)
6
+ ? value
7
+ : {};
8
+ const readString = (obj, key) => {
9
+ const value = obj[key];
10
+ return typeof value === "string" && value.trim().length > 0 ? value.trim() : undefined;
11
+ };
12
+ const toolDefinitions = [
13
+ {
14
+ schema: {
15
+ name: "pdf_extract_pages",
16
+ description: "Render specific PDF pages to image and return inline/file_id/url mode.",
17
+ inputSchema: {
18
+ type: "object",
19
+ properties: {
20
+ fileId: { type: "string" },
21
+ url: { type: "string" },
22
+ base64: { type: "string" },
23
+ filename: { type: "string" },
24
+ pages: { type: "array", items: { type: "integer" } },
25
+ renderScale: { type: "number" },
26
+ returnMode: { type: "string", enum: ["inline", "file_id", "url"] },
27
+ },
28
+ required: ["pages"],
29
+ },
30
+ source: { kind: "local", toolName: "pdf.extract_pages" },
31
+ },
32
+ run: async (ctx, args) => {
33
+ const req = {
34
+ operation: "extract_pages",
35
+ fileId: readString(args, "fileId"),
36
+ url: readString(args, "url"),
37
+ base64: readString(args, "base64"),
38
+ filename: readString(args, "filename"),
39
+ pages: asNumberArray(args.pages),
40
+ renderScale: typeof args.renderScale === "number" ? args.renderScale : undefined,
41
+ provider: undefined,
42
+ model: "not-required",
43
+ providerApiKeys: ctx.providerApiKeys,
44
+ returnMode: normalizeReturnMode(args.returnMode),
45
+ };
46
+ return runPdfAgent(ctx.config, ctx.env, req, {
47
+ fileStore: ctx.fileStore,
48
+ trace: ctx.trace,
49
+ });
50
+ },
51
+ },
52
+ {
53
+ schema: {
54
+ name: "pdf_ocr_pages",
55
+ description: "OCR specific PDF pages using configured multimodal model.",
56
+ inputSchema: {
57
+ type: "object",
58
+ properties: {
59
+ fileId: { type: "string" },
60
+ url: { type: "string" },
61
+ base64: { type: "string" },
62
+ filename: { type: "string" },
63
+ pages: { type: "array", items: { type: "integer" } },
64
+ renderScale: { type: "number" },
65
+ provider: { type: "string" },
66
+ model: { type: "string" },
67
+ prompt: { type: "string" },
68
+ },
69
+ required: ["pages"],
70
+ },
71
+ source: { kind: "local", toolName: "pdf.ocr_pages" },
72
+ },
73
+ run: async (ctx, args) => {
74
+ const req = {
75
+ operation: "ocr_pages",
76
+ fileId: readString(args, "fileId"),
77
+ url: readString(args, "url"),
78
+ base64: readString(args, "base64"),
79
+ filename: readString(args, "filename"),
80
+ pages: asNumberArray(args.pages),
81
+ renderScale: typeof args.renderScale === "number" ? args.renderScale : undefined,
82
+ provider: readString(args, "provider"),
83
+ model: readString(args, "model") ?? "",
84
+ prompt: readString(args, "prompt"),
85
+ providerApiKeys: ctx.providerApiKeys,
86
+ returnMode: "inline",
87
+ };
88
+ return runPdfAgent(ctx.config, ctx.env, req, {
89
+ fileStore: ctx.fileStore,
90
+ trace: ctx.trace,
91
+ });
92
+ },
93
+ },
94
+ {
95
+ schema: {
96
+ name: "pdf_tables_to_latex",
97
+ description: "Recognize tables from pages and return LaTeX tabular output.",
98
+ inputSchema: {
99
+ type: "object",
100
+ properties: {
101
+ fileId: { type: "string" },
102
+ url: { type: "string" },
103
+ base64: { type: "string" },
104
+ filename: { type: "string" },
105
+ pages: { type: "array", items: { type: "integer" } },
106
+ renderScale: { type: "number" },
107
+ provider: { type: "string" },
108
+ model: { type: "string" },
109
+ prompt: { type: "string" },
110
+ },
111
+ required: ["pages"],
112
+ },
113
+ source: { kind: "local", toolName: "pdf.tables_to_latex" },
114
+ },
115
+ run: async (ctx, args) => {
116
+ const req = {
117
+ operation: "tables_to_latex",
118
+ fileId: readString(args, "fileId"),
119
+ url: readString(args, "url"),
120
+ base64: readString(args, "base64"),
121
+ filename: readString(args, "filename"),
122
+ pages: asNumberArray(args.pages),
123
+ renderScale: typeof args.renderScale === "number" ? args.renderScale : undefined,
124
+ provider: readString(args, "provider"),
125
+ model: readString(args, "model") ?? "",
126
+ prompt: readString(args, "prompt"),
127
+ providerApiKeys: ctx.providerApiKeys,
128
+ returnMode: "inline",
129
+ };
130
+ return runPdfAgent(ctx.config, ctx.env, req, {
131
+ fileStore: ctx.fileStore,
132
+ trace: ctx.trace,
133
+ });
134
+ },
135
+ },
136
+ {
137
+ schema: {
138
+ name: "file_ops",
139
+ description: "Basic file operations: list/read/delete/put for runtime file store.",
140
+ inputSchema: {
141
+ type: "object",
142
+ properties: {
143
+ op: { type: "string", enum: ["list", "read", "delete", "put"] },
144
+ fileId: { type: "string" },
145
+ includeBase64: { type: "boolean" },
146
+ text: { type: "string" },
147
+ filename: { type: "string" },
148
+ mimeType: { type: "string" },
149
+ base64: { type: "string" },
150
+ returnMode: { type: "string", enum: ["inline", "file_id", "url"] },
151
+ },
152
+ required: ["op"],
153
+ },
154
+ source: { kind: "local", toolName: "file.ops" },
155
+ },
156
+ run: async (ctx, args) => runFileOp(ctx.fileStore, {
157
+ op: readString(args, "op") ?? "list",
158
+ fileId: readString(args, "fileId"),
159
+ includeBase64: Boolean(args.includeBase64),
160
+ text: readString(args, "text"),
161
+ filename: readString(args, "filename"),
162
+ mimeType: readString(args, "mimeType"),
163
+ base64: readString(args, "base64"),
164
+ returnMode: normalizeReturnMode(args.returnMode),
165
+ }),
166
+ },
167
+ ];
168
+ export const listToolSchemas = () => toolDefinitions.map((item) => item.schema);
169
+ export const callTool = async (name, args, ctx) => {
170
+ const definition = toolDefinitions.find((item) => item.schema.name === name);
171
+ if (!definition) {
172
+ throw new Error(`Unknown tool: ${name}`);
173
+ }
174
+ return definition.run(ctx, asObject(args));
175
+ };
@@ -0,0 +1,91 @@
1
+ export type JsonPrimitive = string | number | boolean | null;
2
+ export type JsonValue = JsonPrimitive | JsonObject | JsonArray;
3
+ export type JsonArray = JsonValue[];
4
+ export interface JsonObject {
5
+ [key: string]: JsonValue;
6
+ }
7
+ export type ProviderType = "openai" | "openrouter" | "vercel-ai-gateway";
8
+ export type ReturnMode = "inline" | "file_id" | "url";
9
+ export interface Fetcher {
10
+ fetch(input: RequestInfo | URL, init?: RequestInit): Promise<Response>;
11
+ }
12
+ export interface DurableObjectStub {
13
+ fetch(input: RequestInfo | URL, init?: RequestInit): Promise<Response>;
14
+ }
15
+ export interface DurableObjectId {
16
+ }
17
+ export interface DurableObjectNamespace {
18
+ idFromName(name: string): DurableObjectId;
19
+ get(id: DurableObjectId): DurableObjectStub;
20
+ }
21
+ export interface DurableObjectStorage {
22
+ get<T>(key: string): Promise<T | undefined>;
23
+ put<T>(key: string, value: T): Promise<void>;
24
+ list<T>(options?: {
25
+ prefix?: string;
26
+ }): Promise<Map<string, T>>;
27
+ delete(key: string): Promise<boolean>;
28
+ }
29
+ export interface DurableObjectState {
30
+ storage: DurableObjectStorage;
31
+ }
32
+ export interface R2ObjectBody {
33
+ key: string;
34
+ size: number;
35
+ uploaded: Date;
36
+ httpMetadata?: {
37
+ contentType?: string;
38
+ };
39
+ customMetadata?: Record<string, string>;
40
+ arrayBuffer(): Promise<ArrayBuffer>;
41
+ }
42
+ export interface R2Bucket {
43
+ put(key: string, value: ArrayBuffer | ArrayBufferView, options?: {
44
+ httpMetadata?: {
45
+ contentType?: string;
46
+ };
47
+ customMetadata?: Record<string, string>;
48
+ }): Promise<unknown>;
49
+ get(key: string): Promise<R2ObjectBody | null>;
50
+ delete(key: string | string[]): Promise<void>;
51
+ list(options?: {
52
+ prefix?: string;
53
+ limit?: number;
54
+ cursor?: string;
55
+ }): Promise<{
56
+ objects: R2ObjectBody[];
57
+ truncated: boolean;
58
+ cursor?: string;
59
+ }>;
60
+ }
61
+ export interface Env {
62
+ readonly ECHO_PDF_CONFIG_JSON?: string;
63
+ readonly ASSETS?: Fetcher;
64
+ readonly FILE_STORE_BUCKET?: R2Bucket;
65
+ readonly FILE_STORE_DO?: DurableObjectNamespace;
66
+ readonly [key: string]: string | Fetcher | DurableObjectNamespace | R2Bucket | undefined;
67
+ }
68
+ export interface WorkerExecutionContext {
69
+ waitUntil(promise: Promise<unknown>): void;
70
+ passThroughOnException?(): void;
71
+ }
72
+ export interface StoredFileMeta {
73
+ readonly id: string;
74
+ readonly filename: string;
75
+ readonly mimeType: string;
76
+ readonly sizeBytes: number;
77
+ readonly createdAt: string;
78
+ }
79
+ export interface StoredFileRecord extends StoredFileMeta {
80
+ readonly bytes: Uint8Array;
81
+ }
82
+ export interface FileStore {
83
+ put(input: {
84
+ readonly filename: string;
85
+ readonly mimeType: string;
86
+ readonly bytes: Uint8Array;
87
+ }): Promise<StoredFileMeta>;
88
+ get(fileId: string): Promise<StoredFileRecord | null>;
89
+ list(): Promise<ReadonlyArray<StoredFileMeta>>;
90
+ delete(fileId: string): Promise<boolean>;
91
+ }
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,7 @@
1
+ import { FileStoreDO } from "./file-store-do.js";
2
+ import type { Env, WorkerExecutionContext } from "./types.js";
3
+ declare const _default: {
4
+ fetch(request: Request, env: Env, ctx: WorkerExecutionContext): Promise<Response>;
5
+ };
6
+ export default _default;
7
+ export { FileStoreDO };
package/dist/worker.js ADDED
@@ -0,0 +1,386 @@
1
+ import { normalizeReturnMode } from "./file-utils.js";
2
+ import { FileStoreDO } from "./file-store-do.js";
3
+ import { resolveModelForProvider, resolveProviderAlias } from "./agent-defaults.js";
4
+ import { checkHeaderAuth } from "./auth.js";
5
+ import { handleMcpRequest } from "./mcp-server.js";
6
+ import { loadEchoPdfConfig } from "./pdf-config.js";
7
+ import { getRuntimeFileStore } from "./pdf-storage.js";
8
+ import { listProviderModels } from "./provider-client.js";
9
+ import { buildToolOutputEnvelope } from "./response-schema.js";
10
+ import { callTool, listToolSchemas } from "./tool-registry.js";
11
+ const json = (data, status = 200) => new Response(JSON.stringify(data), {
12
+ status,
13
+ headers: {
14
+ "Content-Type": "application/json; charset=utf-8",
15
+ "Cache-Control": "no-store",
16
+ },
17
+ });
18
+ const toError = (error) => error instanceof Error ? error.message : String(error);
19
+ const errorStatus = (error) => {
20
+ const status = error?.status;
21
+ return typeof status === "number" && Number.isFinite(status) ? status : null;
22
+ };
23
+ const errorCode = (error) => {
24
+ const code = error?.code;
25
+ return typeof code === "string" && code.length > 0 ? code : null;
26
+ };
27
+ const errorDetails = (error) => error?.details;
28
+ const jsonError = (error, fallbackStatus = 500) => {
29
+ const status = errorStatus(error) ?? fallbackStatus;
30
+ const code = errorCode(error);
31
+ const details = errorDetails(error);
32
+ return json({ error: toError(error), code, details }, status);
33
+ };
34
+ const readJson = async (request) => {
35
+ try {
36
+ const body = await request.json();
37
+ if (typeof body === "object" && body !== null && !Array.isArray(body)) {
38
+ return body;
39
+ }
40
+ return {};
41
+ }
42
+ catch {
43
+ return {};
44
+ }
45
+ };
46
+ const asObj = (value) => typeof value === "object" && value !== null && !Array.isArray(value)
47
+ ? value
48
+ : {};
49
+ const resolvePublicBaseUrl = (request, configured) => typeof configured === "string" && configured.length > 0 ? configured : request.url;
50
+ const sanitizeDownloadFilename = (filename) => {
51
+ const cleaned = filename
52
+ .replace(/[\r\n"]/g, "")
53
+ .replace(/[^\x20-\x7E]+/g, "")
54
+ .trim();
55
+ return cleaned.length > 0 ? cleaned : "download.bin";
56
+ };
57
+ const sseResponse = (stream) => new Response(stream, {
58
+ headers: {
59
+ "Content-Type": "text/event-stream; charset=utf-8",
60
+ "Cache-Control": "no-store",
61
+ Connection: "keep-alive",
62
+ },
63
+ });
64
+ const encodeSse = (event, data) => {
65
+ const encoder = new TextEncoder();
66
+ return encoder.encode(`event: ${event}\ndata: ${JSON.stringify(data)}\n\n`);
67
+ };
68
+ const isValidOperation = (value) => value === "extract_pages" || value === "ocr_pages" || value === "tables_to_latex";
69
+ const toPdfOperation = (input, defaultProvider) => ({
70
+ operation: isValidOperation(input.operation) ? input.operation : "extract_pages",
71
+ fileId: typeof input.fileId === "string" ? input.fileId : undefined,
72
+ url: typeof input.url === "string" ? input.url : undefined,
73
+ base64: typeof input.base64 === "string" ? input.base64 : undefined,
74
+ filename: typeof input.filename === "string" ? input.filename : undefined,
75
+ pages: Array.isArray(input.pages) ? input.pages.map((v) => Number(v)) : [],
76
+ renderScale: typeof input.renderScale === "number" ? input.renderScale : undefined,
77
+ provider: typeof input.provider === "string" ? input.provider : defaultProvider,
78
+ model: typeof input.model === "string" ? input.model : "",
79
+ providerApiKeys: typeof input.providerApiKeys === "object" && input.providerApiKeys !== null
80
+ ? input.providerApiKeys
81
+ : undefined,
82
+ returnMode: normalizeReturnMode(input.returnMode),
83
+ prompt: typeof input.prompt === "string" ? input.prompt : undefined,
84
+ });
85
+ const toolNameByOperation = {
86
+ extract_pages: "pdf_extract_pages",
87
+ ocr_pages: "pdf_ocr_pages",
88
+ tables_to_latex: "pdf_tables_to_latex",
89
+ };
90
+ const operationArgsFromRequest = (request) => {
91
+ const args = {
92
+ pages: request.pages,
93
+ };
94
+ if (request.fileId)
95
+ args.fileId = request.fileId;
96
+ if (request.url)
97
+ args.url = request.url;
98
+ if (request.base64)
99
+ args.base64 = request.base64;
100
+ if (request.filename)
101
+ args.filename = request.filename;
102
+ if (typeof request.renderScale === "number")
103
+ args.renderScale = request.renderScale;
104
+ if (request.returnMode)
105
+ args.returnMode = request.returnMode;
106
+ if (request.provider)
107
+ args.provider = request.provider;
108
+ if (request.model)
109
+ args.model = request.model;
110
+ if (request.prompt)
111
+ args.prompt = request.prompt;
112
+ return args;
113
+ };
114
+ const checkComputeAuth = (request, env, config) => checkHeaderAuth(request, env, {
115
+ authHeader: config.service.computeAuth?.authHeader,
116
+ authEnv: config.service.computeAuth?.authEnv,
117
+ allowMissingSecret: false,
118
+ misconfiguredCode: "COMPUTE_AUTH_MISCONFIGURED",
119
+ unauthorizedCode: "UNAUTHORIZED",
120
+ contextName: "compute endpoint",
121
+ });
122
+ export default {
123
+ async fetch(request, env, ctx) {
124
+ const url = new URL(request.url);
125
+ const config = loadEchoPdfConfig(env);
126
+ const runtimeStore = getRuntimeFileStore(env, config);
127
+ const fileStore = runtimeStore.store;
128
+ if (request.method === "GET" && url.pathname === "/health") {
129
+ return json({ ok: true, service: config.service.name, now: new Date().toISOString() });
130
+ }
131
+ if (request.method === "GET" && url.pathname === "/config") {
132
+ return json({
133
+ service: config.service,
134
+ agent: config.agent,
135
+ providers: Object.entries(config.providers).map(([alias, provider]) => ({ alias, type: provider.type })),
136
+ capabilities: {
137
+ toolCatalogEndpoint: "/tools/catalog",
138
+ toolCallEndpoint: "/tools/call",
139
+ fileOpsEndpoint: "/api/files/op",
140
+ fileUploadEndpoint: "/api/files/upload",
141
+ fileStatsEndpoint: "/api/files/stats",
142
+ fileCleanupEndpoint: "/api/files/cleanup",
143
+ supportedReturnModes: ["inline", "file_id", "url"],
144
+ },
145
+ mcp: {
146
+ serverName: config.mcp.serverName,
147
+ version: config.mcp.version,
148
+ authHeader: config.mcp.authHeader ?? null,
149
+ },
150
+ fileGet: {
151
+ authHeader: config.service.fileGet?.authHeader ?? null,
152
+ cacheTtlSeconds: config.service.fileGet?.cacheTtlSeconds ?? 300,
153
+ },
154
+ });
155
+ }
156
+ if (request.method === "GET" && url.pathname === "/tools/catalog") {
157
+ return json({ tools: listToolSchemas() });
158
+ }
159
+ if (request.method === "POST" && url.pathname === "/tools/call") {
160
+ const auth = checkComputeAuth(request, env, config);
161
+ if (!auth.ok)
162
+ return json({ error: auth.message, code: auth.code }, auth.status);
163
+ const body = await readJson(request);
164
+ const name = typeof body.name === "string" ? body.name : "";
165
+ if (!name)
166
+ return json({ error: "Missing required field: name" }, 400);
167
+ try {
168
+ const args = asObj(body.arguments);
169
+ const preferredProvider = resolveProviderAlias(config, typeof body.provider === "string" ? body.provider : undefined);
170
+ const preferredModel = resolveModelForProvider(config, preferredProvider, typeof body.model === "string" ? body.model : undefined);
171
+ if (name === "pdf_ocr_pages" || name === "pdf_tables_to_latex") {
172
+ if (typeof args.provider !== "string" || args.provider.length === 0) {
173
+ args.provider = preferredProvider;
174
+ }
175
+ if (typeof args.model !== "string" || args.model.length === 0) {
176
+ args.model = preferredModel;
177
+ }
178
+ }
179
+ const result = await callTool(name, args, {
180
+ config,
181
+ env,
182
+ fileStore,
183
+ providerApiKeys: typeof body.providerApiKeys === "object" && body.providerApiKeys !== null
184
+ ? body.providerApiKeys
185
+ : undefined,
186
+ });
187
+ return json(buildToolOutputEnvelope(result, resolvePublicBaseUrl(request, config.service.publicBaseUrl)));
188
+ }
189
+ catch (error) {
190
+ return jsonError(error, 500);
191
+ }
192
+ }
193
+ if (request.method === "POST" && url.pathname === "/providers/models") {
194
+ const auth = checkComputeAuth(request, env, config);
195
+ if (!auth.ok)
196
+ return json({ error: auth.message, code: auth.code }, auth.status);
197
+ const body = await readJson(request);
198
+ const provider = resolveProviderAlias(config, typeof body.provider === "string" ? body.provider : undefined);
199
+ const runtimeKeys = typeof body.providerApiKeys === "object" && body.providerApiKeys !== null
200
+ ? body.providerApiKeys
201
+ : undefined;
202
+ try {
203
+ const models = await listProviderModels(config, env, provider, runtimeKeys);
204
+ return json({ provider, models });
205
+ }
206
+ catch (error) {
207
+ return jsonError(error, 500);
208
+ }
209
+ }
210
+ if (request.method === "POST" && url.pathname === "/api/agent/run") {
211
+ const auth = checkComputeAuth(request, env, config);
212
+ if (!auth.ok)
213
+ return json({ error: auth.message, code: auth.code }, auth.status);
214
+ const body = await readJson(request);
215
+ if (Object.hasOwn(body, "operation") && !isValidOperation(body.operation)) {
216
+ return json({ error: "Invalid operation. Must be one of: extract_pages, ocr_pages, tables_to_latex" }, 400);
217
+ }
218
+ const requestPayload = toPdfOperation(body, config.agent.defaultProvider);
219
+ try {
220
+ const result = await callTool(toolNameByOperation[requestPayload.operation], operationArgsFromRequest(requestPayload), {
221
+ config,
222
+ env,
223
+ fileStore,
224
+ providerApiKeys: requestPayload.providerApiKeys,
225
+ });
226
+ return json(result);
227
+ }
228
+ catch (error) {
229
+ return jsonError(error, 500);
230
+ }
231
+ }
232
+ if (request.method === "POST" && url.pathname === "/api/agent/stream") {
233
+ const auth = checkComputeAuth(request, env, config);
234
+ if (!auth.ok)
235
+ return json({ error: auth.message, code: auth.code }, auth.status);
236
+ const body = await readJson(request);
237
+ if (Object.hasOwn(body, "operation") && !isValidOperation(body.operation)) {
238
+ return json({ error: "Invalid operation. Must be one of: extract_pages, ocr_pages, tables_to_latex" }, 400);
239
+ }
240
+ const requestPayload = toPdfOperation(body, config.agent.defaultProvider);
241
+ const stream = new TransformStream();
242
+ const writer = stream.writable.getWriter();
243
+ let queue = Promise.resolve();
244
+ const send = (event, data) => {
245
+ queue = queue.then(() => writer.write(encodeSse(event, data))).catch(() => undefined);
246
+ };
247
+ const run = async () => {
248
+ try {
249
+ send("meta", { kind: "meta", startedAt: new Date().toISOString(), streaming: true });
250
+ send("io", { kind: "io", direction: "input", content: requestPayload });
251
+ const result = await callTool(toolNameByOperation[requestPayload.operation], operationArgsFromRequest(requestPayload), {
252
+ config,
253
+ env,
254
+ fileStore,
255
+ providerApiKeys: requestPayload.providerApiKeys,
256
+ trace: (event) => send("step", event),
257
+ });
258
+ send("io", { kind: "io", direction: "output", content: "operation completed" });
259
+ send("result", { kind: "result", output: result });
260
+ send("done", { ok: true });
261
+ }
262
+ catch (error) {
263
+ send("error", { kind: "error", message: toError(error) });
264
+ send("done", { ok: false });
265
+ }
266
+ finally {
267
+ await queue;
268
+ await writer.close();
269
+ }
270
+ };
271
+ ctx.waitUntil(run());
272
+ return sseResponse(stream.readable);
273
+ }
274
+ if (request.method === "POST" && url.pathname === "/api/files/op") {
275
+ const body = await readJson(request);
276
+ try {
277
+ const result = await callTool("file_ops", asObj(body), {
278
+ config,
279
+ env,
280
+ fileStore,
281
+ });
282
+ return json(result);
283
+ }
284
+ catch (error) {
285
+ return jsonError(error, 500);
286
+ }
287
+ }
288
+ if (request.method === "POST" && url.pathname === "/api/files/upload") {
289
+ try {
290
+ const formData = await request.formData();
291
+ const file = formData.get("file");
292
+ if (!file || typeof file.arrayBuffer !== "function") {
293
+ return json({ error: "Missing file field: file" }, 400);
294
+ }
295
+ const bytes = new Uint8Array(await file.arrayBuffer());
296
+ const stored = await fileStore.put({
297
+ filename: file.name || `upload-${Date.now()}.pdf`,
298
+ mimeType: file.type || "application/pdf",
299
+ bytes,
300
+ });
301
+ return json({ file: stored }, 200);
302
+ }
303
+ catch (error) {
304
+ return jsonError(error, 500);
305
+ }
306
+ }
307
+ if (request.method === "GET" && url.pathname === "/api/files/get") {
308
+ const fileGetConfig = config.service.fileGet ?? {};
309
+ const auth = checkHeaderAuth(request, env, {
310
+ authHeader: fileGetConfig.authHeader,
311
+ authEnv: fileGetConfig.authEnv,
312
+ allowMissingSecret: env.ECHO_PDF_ALLOW_MISSING_AUTH_SECRET === "1",
313
+ misconfiguredCode: "AUTH_MISCONFIGURED",
314
+ unauthorizedCode: "UNAUTHORIZED",
315
+ contextName: "file get",
316
+ });
317
+ if (!auth.ok) {
318
+ return json({ error: auth.message, code: auth.code }, auth.status);
319
+ }
320
+ const fileId = url.searchParams.get("fileId") || "";
321
+ if (!fileId)
322
+ return json({ error: "Missing fileId" }, 400);
323
+ const file = await fileStore.get(fileId);
324
+ if (!file)
325
+ return json({ error: "File not found" }, 404);
326
+ const download = url.searchParams.get("download") === "1";
327
+ const headers = new Headers();
328
+ headers.set("Content-Type", file.mimeType);
329
+ const cacheTtl = Number(fileGetConfig.cacheTtlSeconds ?? 300);
330
+ const cacheControl = cacheTtl > 0
331
+ ? `public, max-age=${Math.floor(cacheTtl)}, s-maxage=${Math.floor(cacheTtl)}`
332
+ : "no-store";
333
+ headers.set("Cache-Control", cacheControl);
334
+ if (download) {
335
+ headers.set("Content-Disposition", `attachment; filename=\"${sanitizeDownloadFilename(file.filename)}\"`);
336
+ }
337
+ return new Response(file.bytes, { status: 200, headers });
338
+ }
339
+ if (request.method === "GET" && url.pathname === "/api/files/stats") {
340
+ try {
341
+ return json(await runtimeStore.stats(), 200);
342
+ }
343
+ catch (error) {
344
+ return json({ error: toError(error) }, 500);
345
+ }
346
+ }
347
+ if (request.method === "POST" && url.pathname === "/api/files/cleanup") {
348
+ try {
349
+ return json(await runtimeStore.cleanup(), 200);
350
+ }
351
+ catch (error) {
352
+ return json({ error: toError(error) }, 500);
353
+ }
354
+ }
355
+ if (request.method === "POST" && url.pathname === "/mcp") {
356
+ return await handleMcpRequest(request, env, config, fileStore);
357
+ }
358
+ if (request.method === "GET" && env.ASSETS) {
359
+ const assetReq = url.pathname === "/"
360
+ ? new Request(new URL("/index.html", url), request)
361
+ : request;
362
+ const asset = await env.ASSETS.fetch(assetReq);
363
+ if (asset.status !== 404)
364
+ return asset;
365
+ }
366
+ return json({
367
+ error: "Not found",
368
+ routes: {
369
+ health: "GET /health",
370
+ config: "GET /config",
371
+ toolsCatalog: "GET /tools/catalog",
372
+ toolCall: "POST /tools/call",
373
+ models: "POST /providers/models",
374
+ run: "POST /api/agent/run",
375
+ stream: "POST /api/agent/stream",
376
+ files: "POST /api/files/op",
377
+ fileUpload: "POST /api/files/upload",
378
+ fileGet: "GET /api/files/get?fileId=<id>",
379
+ fileStats: "GET /api/files/stats",
380
+ fileCleanup: "POST /api/files/cleanup",
381
+ mcp: "POST /mcp",
382
+ },
383
+ }, 404);
384
+ },
385
+ };
386
+ export { FileStoreDO };