@echofiles/echo-pdf 0.4.3 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +85 -562
  3. package/bin/echo-pdf.js +130 -525
  4. package/dist/file-utils.d.ts +0 -3
  5. package/dist/file-utils.js +0 -18
  6. package/dist/local/document.d.ts +10 -0
  7. package/dist/local/document.js +133 -0
  8. package/dist/local/index.d.ts +3 -135
  9. package/dist/local/index.js +2 -555
  10. package/dist/local/semantic.d.ts +2 -0
  11. package/dist/local/semantic.js +231 -0
  12. package/dist/local/shared.d.ts +50 -0
  13. package/dist/local/shared.js +173 -0
  14. package/dist/local/types.d.ts +183 -0
  15. package/dist/local/types.js +2 -0
  16. package/dist/node/pdfium-local.js +30 -6
  17. package/dist/pdf-config.js +2 -65
  18. package/dist/pdf-types.d.ts +1 -58
  19. package/dist/types.d.ts +1 -87
  20. package/echo-pdf.config.json +1 -21
  21. package/package.json +25 -22
  22. package/bin/lib/http.js +0 -97
  23. package/bin/lib/mcp-stdio.js +0 -99
  24. package/dist/auth.d.ts +0 -18
  25. package/dist/auth.js +0 -36
  26. package/dist/core/index.d.ts +0 -50
  27. package/dist/core/index.js +0 -7
  28. package/dist/file-ops.d.ts +0 -11
  29. package/dist/file-ops.js +0 -36
  30. package/dist/file-store-do.d.ts +0 -36
  31. package/dist/file-store-do.js +0 -298
  32. package/dist/http-error.d.ts +0 -9
  33. package/dist/http-error.js +0 -14
  34. package/dist/index.d.ts +0 -1
  35. package/dist/index.js +0 -1
  36. package/dist/mcp-server.d.ts +0 -3
  37. package/dist/mcp-server.js +0 -124
  38. package/dist/node/semantic-local.d.ts +0 -16
  39. package/dist/node/semantic-local.js +0 -113
  40. package/dist/pdf-agent.d.ts +0 -18
  41. package/dist/pdf-agent.js +0 -217
  42. package/dist/pdf-storage.d.ts +0 -8
  43. package/dist/pdf-storage.js +0 -86
  44. package/dist/pdfium-engine.d.ts +0 -9
  45. package/dist/pdfium-engine.js +0 -180
  46. package/dist/r2-file-store.d.ts +0 -20
  47. package/dist/r2-file-store.js +0 -176
  48. package/dist/response-schema.d.ts +0 -15
  49. package/dist/response-schema.js +0 -159
  50. package/dist/tool-registry.d.ts +0 -16
  51. package/dist/tool-registry.js +0 -175
  52. package/dist/worker.d.ts +0 -7
  53. package/dist/worker.js +0 -386
  54. package/scripts/export-fixtures.sh +0 -204
  55. package/wrangler.toml +0 -19
@@ -1,176 +0,0 @@
1
- const PREFIX = "file/";
2
- const toId = (key) => key.startsWith(PREFIX) ? key.slice(PREFIX.length) : key;
3
- const toKey = (id) => `${PREFIX}${id}`;
4
- const parseCreatedAt = (value, fallback) => {
5
- if (typeof value === "string" && value.trim().length > 0) {
6
- const ms = Date.parse(value);
7
- if (Number.isFinite(ms))
8
- return new Date(ms).toISOString();
9
- }
10
- return fallback.toISOString();
11
- };
12
- const isExpired = (createdAtIso, ttlHours) => {
13
- const ms = Date.parse(createdAtIso);
14
- if (!Number.isFinite(ms))
15
- return false;
16
- return Date.now() - ms > ttlHours * 60 * 60 * 1000;
17
- };
18
- export class R2FileStore {
19
- bucket;
20
- policy;
21
- constructor(bucket, policy) {
22
- this.bucket = bucket;
23
- this.policy = policy;
24
- }
25
- async put(input) {
26
- const sizeBytes = input.bytes.byteLength;
27
- if (sizeBytes > this.policy.maxFileBytes) {
28
- const err = new Error(`file too large: ${sizeBytes} bytes exceeds maxFileBytes ${this.policy.maxFileBytes}`);
29
- err.status = 413;
30
- err.code = "FILE_TOO_LARGE";
31
- err.details = { policy: this.policy, sizeBytes };
32
- throw err;
33
- }
34
- await this.cleanupInternal(sizeBytes);
35
- const id = crypto.randomUUID();
36
- const createdAt = new Date().toISOString();
37
- await this.bucket.put(toKey(id), input.bytes, {
38
- httpMetadata: {
39
- contentType: input.mimeType,
40
- },
41
- customMetadata: {
42
- filename: input.filename,
43
- mimeType: input.mimeType,
44
- createdAt,
45
- },
46
- });
47
- return { id, filename: input.filename, mimeType: input.mimeType, sizeBytes, createdAt };
48
- }
49
- async get(fileId) {
50
- const obj = await this.bucket.get(toKey(fileId));
51
- if (!obj)
52
- return null;
53
- const meta = (obj.customMetadata ?? {});
54
- const createdAt = parseCreatedAt(meta.createdAt, obj.uploaded);
55
- const filename = meta.filename ?? fileId;
56
- const mimeType = meta.mimeType ?? obj.httpMetadata?.contentType ?? "application/octet-stream";
57
- const bytes = new Uint8Array(await obj.arrayBuffer());
58
- return {
59
- id: fileId,
60
- filename,
61
- mimeType,
62
- sizeBytes: bytes.byteLength,
63
- createdAt,
64
- bytes,
65
- };
66
- }
67
- async list() {
68
- return await this.listAllFiles();
69
- }
70
- async delete(fileId) {
71
- await this.bucket.delete(toKey(fileId));
72
- return true;
73
- }
74
- async stats() {
75
- const files = await this.listAllFiles();
76
- const totalBytes = files.reduce((sum, file) => sum + file.sizeBytes, 0);
77
- return {
78
- backend: "r2",
79
- policy: this.policy,
80
- stats: {
81
- fileCount: files.length,
82
- totalBytes,
83
- },
84
- };
85
- }
86
- async cleanup() {
87
- const files = await this.listAllFiles();
88
- const expired = files.filter((f) => isExpired(f.createdAt, this.policy.ttlHours));
89
- const active = files.filter((f) => !isExpired(f.createdAt, this.policy.ttlHours));
90
- if (expired.length > 0) {
91
- await this.bucket.delete(expired.map((f) => toKey(f.id)));
92
- }
93
- const evict = this.pickEvictions(active, 0);
94
- if (evict.length > 0) {
95
- await this.bucket.delete(evict.map((f) => toKey(f.id)));
96
- }
97
- const evictIds = new Set(evict.map((f) => f.id));
98
- const after = active.filter((f) => !evictIds.has(f.id));
99
- const totalBytes = after.reduce((sum, file) => sum + file.sizeBytes, 0);
100
- return {
101
- backend: "r2",
102
- policy: this.policy,
103
- deletedExpired: expired.length,
104
- deletedEvicted: evict.length,
105
- stats: {
106
- fileCount: after.length,
107
- totalBytes,
108
- },
109
- };
110
- }
111
- async cleanupInternal(incomingBytes) {
112
- const files = await this.listAllFiles();
113
- const expired = files.filter((f) => isExpired(f.createdAt, this.policy.ttlHours));
114
- const active = files.filter((f) => !isExpired(f.createdAt, this.policy.ttlHours));
115
- if (expired.length > 0) {
116
- await this.bucket.delete(expired.map((f) => toKey(f.id)));
117
- }
118
- const evict = this.pickEvictions(active, incomingBytes);
119
- if (evict.length > 0) {
120
- await this.bucket.delete(evict.map((f) => toKey(f.id)));
121
- }
122
- const evictIds = new Set(evict.map((f) => f.id));
123
- const remaining = active.filter((f) => !evictIds.has(f.id));
124
- const finalTotal = remaining.reduce((sum, file) => sum + file.sizeBytes, 0);
125
- if (finalTotal + incomingBytes > this.policy.maxTotalBytes) {
126
- const err = new Error(`storage quota exceeded: total ${finalTotal} + incoming ${incomingBytes} > maxTotalBytes ${this.policy.maxTotalBytes}`);
127
- err.status = 507;
128
- err.code = "STORAGE_QUOTA_EXCEEDED";
129
- err.details = { policy: this.policy, totalBytes: finalTotal, incomingBytes };
130
- throw err;
131
- }
132
- }
133
- pickEvictions(files, incomingBytes) {
134
- const totalBytes = files.reduce((sum, f) => sum + f.sizeBytes, 0);
135
- const projected = totalBytes + incomingBytes;
136
- if (projected <= this.policy.maxTotalBytes)
137
- return [];
138
- const needFree = projected - this.policy.maxTotalBytes;
139
- const candidates = [...files].sort((a, b) => Date.parse(a.createdAt) - Date.parse(b.createdAt));
140
- const evict = [];
141
- let freed = 0;
142
- for (const file of candidates) {
143
- evict.push(file);
144
- freed += file.sizeBytes;
145
- if (freed >= needFree)
146
- break;
147
- if (evict.length >= this.policy.cleanupBatchSize)
148
- break;
149
- }
150
- return evict;
151
- }
152
- async listAllFiles() {
153
- const files = [];
154
- let cursor;
155
- while (true) {
156
- const listed = await this.bucket.list({ prefix: PREFIX, limit: 1000, cursor });
157
- for (const obj of listed.objects) {
158
- const meta = (obj.customMetadata ?? {});
159
- const createdAt = parseCreatedAt(meta.createdAt, obj.uploaded);
160
- const filename = meta.filename ?? toId(obj.key);
161
- const mimeType = meta.mimeType ?? obj.httpMetadata?.contentType ?? "application/octet-stream";
162
- files.push({
163
- id: toId(obj.key),
164
- filename,
165
- mimeType,
166
- sizeBytes: obj.size,
167
- createdAt,
168
- });
169
- }
170
- if (listed.truncated !== true || !listed.cursor)
171
- break;
172
- cursor = listed.cursor;
173
- }
174
- return files;
175
- }
176
- }
@@ -1,15 +0,0 @@
1
- export interface ToolArtifact {
2
- readonly id?: string;
3
- readonly kind: "image" | "pdf" | "file" | "json" | "text";
4
- readonly mimeType?: string;
5
- readonly filename?: string;
6
- readonly sizeBytes?: number;
7
- readonly url?: string;
8
- }
9
- export interface ToolOutputEnvelope {
10
- readonly ok: true;
11
- readonly data: unknown;
12
- readonly artifacts: ToolArtifact[];
13
- }
14
- export declare const buildToolOutputEnvelope: (result: unknown, baseUrl: string) => ToolOutputEnvelope;
15
- export declare const buildMcpContent: (envelope: ToolOutputEnvelope) => Array<Record<string, unknown>>;
@@ -1,159 +0,0 @@
1
- const MAX_TEXT_STRING = 1200;
2
- const MAX_TEXT_ARRAY = 40;
3
- const MAX_TEXT_DEPTH = 8;
4
- const asObj = (value) => typeof value === "object" && value !== null && !Array.isArray(value)
5
- ? value
6
- : {};
7
- const inferKind = (mimeType) => {
8
- const mime = (mimeType || "").toLowerCase();
9
- if (mime.startsWith("image/"))
10
- return "image";
11
- if (mime === "application/pdf")
12
- return "pdf";
13
- if (mime.includes("json"))
14
- return "json";
15
- if (mime.startsWith("text/"))
16
- return "text";
17
- return "file";
18
- };
19
- const toAbsoluteUrl = (value, baseUrl) => {
20
- try {
21
- return new URL(value, baseUrl).toString();
22
- }
23
- catch {
24
- return value;
25
- }
26
- };
27
- const addArtifact = (artifacts, artifact) => {
28
- if (!artifact.id && !artifact.url && !artifact.filename)
29
- return;
30
- artifacts.push(artifact);
31
- };
32
- export const buildToolOutputEnvelope = (result, baseUrl) => {
33
- const root = asObj(result);
34
- const artifacts = [];
35
- const fileMeta = asObj(root.file);
36
- if (typeof fileMeta.id === "string") {
37
- addArtifact(artifacts, {
38
- id: fileMeta.id,
39
- kind: inferKind(typeof fileMeta.mimeType === "string" ? fileMeta.mimeType : undefined),
40
- mimeType: typeof fileMeta.mimeType === "string" ? fileMeta.mimeType : undefined,
41
- filename: typeof fileMeta.filename === "string" ? fileMeta.filename : undefined,
42
- sizeBytes: typeof fileMeta.sizeBytes === "number" ? fileMeta.sizeBytes : undefined,
43
- url: typeof root.url === "string" ? toAbsoluteUrl(root.url, baseUrl) : undefined,
44
- });
45
- }
46
- const images = Array.isArray(root.images) ? root.images : [];
47
- for (const item of images) {
48
- const image = asObj(item);
49
- const fileId = typeof image.fileId === "string" ? image.fileId : undefined;
50
- const rawUrl = typeof image.url === "string" ? image.url : undefined;
51
- if (!fileId && !rawUrl)
52
- continue;
53
- addArtifact(artifacts, {
54
- id: fileId,
55
- kind: "image",
56
- mimeType: typeof image.mimeType === "string" ? image.mimeType : "image/png",
57
- filename: fileId ? `artifact-${fileId}.png` : undefined,
58
- url: rawUrl ? toAbsoluteUrl(rawUrl, baseUrl) : undefined,
59
- });
60
- }
61
- const files = Array.isArray(root.files) ? root.files : [];
62
- for (const item of files) {
63
- const meta = asObj(item);
64
- if (typeof meta.id !== "string")
65
- continue;
66
- addArtifact(artifacts, {
67
- id: meta.id,
68
- kind: inferKind(typeof meta.mimeType === "string" ? meta.mimeType : undefined),
69
- mimeType: typeof meta.mimeType === "string" ? meta.mimeType : undefined,
70
- filename: typeof meta.filename === "string" ? meta.filename : undefined,
71
- sizeBytes: typeof meta.sizeBytes === "number" ? meta.sizeBytes : undefined,
72
- });
73
- }
74
- return {
75
- ok: true,
76
- data: result,
77
- artifacts,
78
- };
79
- };
80
- const summarizeData = (data) => {
81
- const root = asObj(data);
82
- if (typeof root.returnMode === "string" && Array.isArray(root.images)) {
83
- return `Extracted ${root.images.length} page image(s) in returnMode=${root.returnMode}.`;
84
- }
85
- if (Array.isArray(root.pages)) {
86
- return `Processed ${root.pages.length} page(s).`;
87
- }
88
- if (Array.isArray(root.files)) {
89
- return `Listed ${root.files.length} file(s).`;
90
- }
91
- if (typeof root.deleted === "boolean") {
92
- return root.deleted ? "File deleted." : "File not found.";
93
- }
94
- return "Tool executed successfully.";
95
- };
96
- const sanitizeString = (value) => {
97
- if (value.startsWith("data:")) {
98
- const [head] = value.split(",", 1);
99
- return `${head},<omitted>`;
100
- }
101
- if (/^[A-Za-z0-9+/=]{300,}$/.test(value)) {
102
- return `<base64 omitted len=${value.length}>`;
103
- }
104
- if (value.length > MAX_TEXT_STRING) {
105
- return `${value.slice(0, MAX_TEXT_STRING)}...(truncated ${value.length - MAX_TEXT_STRING} chars)`;
106
- }
107
- return value;
108
- };
109
- const sanitizeForText = (value, depth = 0) => {
110
- if (depth >= MAX_TEXT_DEPTH)
111
- return "<max-depth>";
112
- if (typeof value === "string")
113
- return sanitizeString(value);
114
- if (typeof value !== "object" || value === null)
115
- return value;
116
- if (Array.isArray(value)) {
117
- const items = value.slice(0, MAX_TEXT_ARRAY).map((item) => sanitizeForText(item, depth + 1));
118
- if (value.length > MAX_TEXT_ARRAY) {
119
- items.push(`<truncated ${value.length - MAX_TEXT_ARRAY} items>`);
120
- }
121
- return items;
122
- }
123
- const out = {};
124
- for (const [key, nested] of Object.entries(value)) {
125
- out[key] = sanitizeForText(nested, depth + 1);
126
- }
127
- return out;
128
- };
129
- export const buildMcpContent = (envelope) => {
130
- const lines = [summarizeData(envelope.data)];
131
- if (envelope.artifacts.length > 0) {
132
- lines.push("Artifacts:");
133
- for (const artifact of envelope.artifacts) {
134
- const descriptor = [
135
- artifact.kind,
136
- artifact.filename ?? artifact.id ?? "artifact",
137
- artifact.mimeType ?? "",
138
- artifact.url ?? "",
139
- ]
140
- .filter((v) => v.length > 0)
141
- .join(" | ");
142
- lines.push(`- ${descriptor}`);
143
- }
144
- }
145
- lines.push("");
146
- lines.push(JSON.stringify(sanitizeForText(envelope), null, 2));
147
- const content = [{ type: "text", text: lines.join("\n") }];
148
- for (const artifact of envelope.artifacts) {
149
- if (!artifact.url)
150
- continue;
151
- content.push({
152
- type: "resource_link",
153
- name: artifact.filename ?? artifact.id ?? "artifact",
154
- uri: artifact.url,
155
- mimeType: artifact.mimeType ?? "application/octet-stream",
156
- });
157
- }
158
- return content;
159
- };
@@ -1,16 +0,0 @@
1
- import type { EchoPdfConfig, ToolSchema } from "./pdf-types.js";
2
- import type { Env, FileStore } from "./types.js";
3
- export interface ToolRuntimeContext {
4
- readonly config: EchoPdfConfig;
5
- readonly env: Env;
6
- readonly fileStore: FileStore;
7
- readonly providerApiKeys?: Record<string, string>;
8
- readonly trace?: (event: {
9
- kind: "step";
10
- phase: "start" | "end" | "log";
11
- name: string;
12
- payload?: unknown;
13
- }) => void;
14
- }
15
- export declare const listToolSchemas: () => ReadonlyArray<ToolSchema>;
16
- export declare const callTool: (name: string, args: unknown, ctx: ToolRuntimeContext) => Promise<unknown>;
@@ -1,175 +0,0 @@
1
- import { normalizeReturnMode } from "./file-utils.js";
2
- import { runFileOp } from "./file-ops.js";
3
- import { runPdfAgent } from "./pdf-agent.js";
4
- const asNumberArray = (value) => Array.isArray(value) ? value.map((item) => Number(item)).filter((item) => Number.isInteger(item) && item > 0) : [];
5
- const asObject = (value) => typeof value === "object" && value !== null && !Array.isArray(value)
6
- ? value
7
- : {};
8
- const readString = (obj, key) => {
9
- const value = obj[key];
10
- return typeof value === "string" && value.trim().length > 0 ? value.trim() : undefined;
11
- };
12
- const toolDefinitions = [
13
- {
14
- schema: {
15
- name: "pdf_extract_pages",
16
- description: "Render specific PDF pages to image and return inline/file_id/url mode.",
17
- inputSchema: {
18
- type: "object",
19
- properties: {
20
- fileId: { type: "string" },
21
- url: { type: "string" },
22
- base64: { type: "string" },
23
- filename: { type: "string" },
24
- pages: { type: "array", items: { type: "integer" } },
25
- renderScale: { type: "number" },
26
- returnMode: { type: "string", enum: ["inline", "file_id", "url"] },
27
- },
28
- required: ["pages"],
29
- },
30
- source: { kind: "local", toolName: "pdf.extract_pages" },
31
- },
32
- run: async (ctx, args) => {
33
- const req = {
34
- operation: "extract_pages",
35
- fileId: readString(args, "fileId"),
36
- url: readString(args, "url"),
37
- base64: readString(args, "base64"),
38
- filename: readString(args, "filename"),
39
- pages: asNumberArray(args.pages),
40
- renderScale: typeof args.renderScale === "number" ? args.renderScale : undefined,
41
- provider: undefined,
42
- model: "not-required",
43
- providerApiKeys: ctx.providerApiKeys,
44
- returnMode: normalizeReturnMode(args.returnMode),
45
- };
46
- return runPdfAgent(ctx.config, ctx.env, req, {
47
- fileStore: ctx.fileStore,
48
- trace: ctx.trace,
49
- });
50
- },
51
- },
52
- {
53
- schema: {
54
- name: "pdf_ocr_pages",
55
- description: "OCR specific PDF pages using configured multimodal model.",
56
- inputSchema: {
57
- type: "object",
58
- properties: {
59
- fileId: { type: "string" },
60
- url: { type: "string" },
61
- base64: { type: "string" },
62
- filename: { type: "string" },
63
- pages: { type: "array", items: { type: "integer" } },
64
- renderScale: { type: "number" },
65
- provider: { type: "string" },
66
- model: { type: "string" },
67
- prompt: { type: "string" },
68
- },
69
- required: ["pages"],
70
- },
71
- source: { kind: "local", toolName: "pdf.ocr_pages" },
72
- },
73
- run: async (ctx, args) => {
74
- const req = {
75
- operation: "ocr_pages",
76
- fileId: readString(args, "fileId"),
77
- url: readString(args, "url"),
78
- base64: readString(args, "base64"),
79
- filename: readString(args, "filename"),
80
- pages: asNumberArray(args.pages),
81
- renderScale: typeof args.renderScale === "number" ? args.renderScale : undefined,
82
- provider: readString(args, "provider"),
83
- model: readString(args, "model") ?? "",
84
- prompt: readString(args, "prompt"),
85
- providerApiKeys: ctx.providerApiKeys,
86
- returnMode: "inline",
87
- };
88
- return runPdfAgent(ctx.config, ctx.env, req, {
89
- fileStore: ctx.fileStore,
90
- trace: ctx.trace,
91
- });
92
- },
93
- },
94
- {
95
- schema: {
96
- name: "pdf_tables_to_latex",
97
- description: "Recognize tables from pages and return LaTeX tabular output.",
98
- inputSchema: {
99
- type: "object",
100
- properties: {
101
- fileId: { type: "string" },
102
- url: { type: "string" },
103
- base64: { type: "string" },
104
- filename: { type: "string" },
105
- pages: { type: "array", items: { type: "integer" } },
106
- renderScale: { type: "number" },
107
- provider: { type: "string" },
108
- model: { type: "string" },
109
- prompt: { type: "string" },
110
- },
111
- required: ["pages"],
112
- },
113
- source: { kind: "local", toolName: "pdf.tables_to_latex" },
114
- },
115
- run: async (ctx, args) => {
116
- const req = {
117
- operation: "tables_to_latex",
118
- fileId: readString(args, "fileId"),
119
- url: readString(args, "url"),
120
- base64: readString(args, "base64"),
121
- filename: readString(args, "filename"),
122
- pages: asNumberArray(args.pages),
123
- renderScale: typeof args.renderScale === "number" ? args.renderScale : undefined,
124
- provider: readString(args, "provider"),
125
- model: readString(args, "model") ?? "",
126
- prompt: readString(args, "prompt"),
127
- providerApiKeys: ctx.providerApiKeys,
128
- returnMode: "inline",
129
- };
130
- return runPdfAgent(ctx.config, ctx.env, req, {
131
- fileStore: ctx.fileStore,
132
- trace: ctx.trace,
133
- });
134
- },
135
- },
136
- {
137
- schema: {
138
- name: "file_ops",
139
- description: "Basic file operations: list/read/delete/put for runtime file store.",
140
- inputSchema: {
141
- type: "object",
142
- properties: {
143
- op: { type: "string", enum: ["list", "read", "delete", "put"] },
144
- fileId: { type: "string" },
145
- includeBase64: { type: "boolean" },
146
- text: { type: "string" },
147
- filename: { type: "string" },
148
- mimeType: { type: "string" },
149
- base64: { type: "string" },
150
- returnMode: { type: "string", enum: ["inline", "file_id", "url"] },
151
- },
152
- required: ["op"],
153
- },
154
- source: { kind: "local", toolName: "file.ops" },
155
- },
156
- run: async (ctx, args) => runFileOp(ctx.fileStore, {
157
- op: readString(args, "op") ?? "list",
158
- fileId: readString(args, "fileId"),
159
- includeBase64: Boolean(args.includeBase64),
160
- text: readString(args, "text"),
161
- filename: readString(args, "filename"),
162
- mimeType: readString(args, "mimeType"),
163
- base64: readString(args, "base64"),
164
- returnMode: normalizeReturnMode(args.returnMode),
165
- }),
166
- },
167
- ];
168
- export const listToolSchemas = () => toolDefinitions.map((item) => item.schema);
169
- export const callTool = async (name, args, ctx) => {
170
- const definition = toolDefinitions.find((item) => item.schema.name === name);
171
- if (!definition) {
172
- throw new Error(`Unknown tool: ${name}`);
173
- }
174
- return definition.run(ctx, asObject(args));
175
- };
package/dist/worker.d.ts DELETED
@@ -1,7 +0,0 @@
1
- import { FileStoreDO } from "./file-store-do.js";
2
- import type { Env, WorkerExecutionContext } from "./types.js";
3
- declare const _default: {
4
- fetch(request: Request, env: Env, ctx: WorkerExecutionContext): Promise<Response>;
5
- };
6
- export default _default;
7
- export { FileStoreDO };