@avasis-ai/synthcode 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +292 -0
  3. package/dist/chunk-53ZOIXM4.js +624 -0
  4. package/dist/chunk-53ZOIXM4.js.map +1 -0
  5. package/dist/chunk-BWXHO6UJ.js +115 -0
  6. package/dist/chunk-BWXHO6UJ.js.map +1 -0
  7. package/dist/chunk-CARUMOML.js +123 -0
  8. package/dist/chunk-CARUMOML.js.map +1 -0
  9. package/dist/chunk-DGUM43GV.js +11 -0
  10. package/dist/chunk-DGUM43GV.js.map +1 -0
  11. package/dist/chunk-F34HO4RA.js +487 -0
  12. package/dist/chunk-F34HO4RA.js.map +1 -0
  13. package/dist/chunk-FK7S2S7V.js +132 -0
  14. package/dist/chunk-FK7S2S7V.js.map +1 -0
  15. package/dist/chunk-MQ7XP6VT.js +174 -0
  16. package/dist/chunk-MQ7XP6VT.js.map +1 -0
  17. package/dist/chunk-TLPOO6C3.js +176 -0
  18. package/dist/chunk-TLPOO6C3.js.map +1 -0
  19. package/dist/chunk-W6OLZ2OI.js +56 -0
  20. package/dist/chunk-W6OLZ2OI.js.map +1 -0
  21. package/dist/cli/index.cjs +151 -0
  22. package/dist/cli/index.cjs.map +1 -0
  23. package/dist/cli/index.d.cts +1 -0
  24. package/dist/cli/index.d.ts +1 -0
  25. package/dist/cli/index.js +8 -0
  26. package/dist/cli/index.js.map +1 -0
  27. package/dist/cli/run.cjs +128 -0
  28. package/dist/cli/run.cjs.map +1 -0
  29. package/dist/cli/run.d.cts +1 -0
  30. package/dist/cli/run.d.ts +1 -0
  31. package/dist/cli/run.js +126 -0
  32. package/dist/cli/run.js.map +1 -0
  33. package/dist/index-D-K6sx8s.d.cts +8 -0
  34. package/dist/index-D-K6sx8s.d.ts +8 -0
  35. package/dist/index.cjs +2909 -0
  36. package/dist/index.cjs.map +1 -0
  37. package/dist/index.d.cts +274 -0
  38. package/dist/index.d.ts +274 -0
  39. package/dist/index.js +1048 -0
  40. package/dist/index.js.map +1 -0
  41. package/dist/llm/index.cjs +531 -0
  42. package/dist/llm/index.cjs.map +1 -0
  43. package/dist/llm/index.d.cts +70 -0
  44. package/dist/llm/index.d.ts +70 -0
  45. package/dist/llm/index.js +24 -0
  46. package/dist/llm/index.js.map +1 -0
  47. package/dist/mcp/index.cjs +323 -0
  48. package/dist/mcp/index.cjs.map +1 -0
  49. package/dist/mcp/index.d.cts +39 -0
  50. package/dist/mcp/index.d.ts +39 -0
  51. package/dist/mcp/index.js +11 -0
  52. package/dist/mcp/index.js.map +1 -0
  53. package/dist/memory/index.cjs +146 -0
  54. package/dist/memory/index.cjs.map +1 -0
  55. package/dist/memory/index.d.cts +51 -0
  56. package/dist/memory/index.d.ts +51 -0
  57. package/dist/memory/index.js +10 -0
  58. package/dist/memory/index.js.map +1 -0
  59. package/dist/tools/fuzzy-edit.cjs +200 -0
  60. package/dist/tools/fuzzy-edit.cjs.map +1 -0
  61. package/dist/tools/fuzzy-edit.d.cts +9 -0
  62. package/dist/tools/fuzzy-edit.d.ts +9 -0
  63. package/dist/tools/fuzzy-edit.js +12 -0
  64. package/dist/tools/fuzzy-edit.js.map +1 -0
  65. package/dist/tools/index.cjs +1032 -0
  66. package/dist/tools/index.cjs.map +1 -0
  67. package/dist/tools/index.d.cts +4 -0
  68. package/dist/tools/index.d.ts +4 -0
  69. package/dist/tools/index.js +39 -0
  70. package/dist/tools/index.js.map +1 -0
  71. package/dist/types-C11cw5ZD.d.cts +177 -0
  72. package/dist/types-C11cw5ZD.d.ts +177 -0
  73. package/dist/utils-TF4TBXQJ.js +10 -0
  74. package/dist/utils-TF4TBXQJ.js.map +1 -0
  75. package/dist/web-fetch-B42QzYD2.d.cts +85 -0
  76. package/dist/web-fetch-EDdhxmEf.d.ts +85 -0
  77. package/package.json +134 -0
@@ -0,0 +1,487 @@
1
+ // src/llm/provider.ts
2
+ var RetryableError = class extends Error {
3
+ cause;
4
+ constructor(message, cause) {
5
+ super(message);
6
+ this.name = "RetryableError";
7
+ this.cause = cause;
8
+ }
9
+ };
10
+ var BaseProvider = class {
11
+ model;
12
+ maxOutputTokens;
13
+ temperature;
14
+ constructor(config) {
15
+ this.model = config.model;
16
+ this.maxOutputTokens = config.maxOutputTokens;
17
+ this.temperature = config.temperature;
18
+ }
19
+ mapStopReason(reason) {
20
+ switch (reason) {
21
+ case "end_turn":
22
+ case "stop":
23
+ return "end_turn";
24
+ case "tool_use":
25
+ case "tool_calls":
26
+ return "tool_use";
27
+ case "max_tokens":
28
+ case "length":
29
+ return "max_tokens";
30
+ case "stop_sequence":
31
+ return "stop_sequence";
32
+ default:
33
+ return "end_turn";
34
+ }
35
+ }
36
+ };
37
+
38
+ // src/llm/anthropic.ts
39
+ var AnthropicProvider = class extends BaseProvider {
40
+ apiKey;
41
+ baseURL;
42
+ dangerouslySkipAuth;
43
+ enableCaching;
44
+ client = null;
45
+ constructor(config) {
46
+ super(config);
47
+ this.apiKey = config.apiKey;
48
+ this.baseURL = config.baseURL;
49
+ this.dangerouslySkipAuth = config.dangerouslySkipAuth;
50
+ this.enableCaching = config.enableCaching ?? false;
51
+ }
52
+ async chat(request) {
53
+ const AnthropicSDK = (await import("@anthropic-ai/sdk")).default;
54
+ if (!this.client) {
55
+ const opts = { apiKey: this.apiKey };
56
+ if (this.baseURL) opts.baseURL = this.baseURL;
57
+ if (this.dangerouslySkipAuth) opts.dangerouslySkipAuth = true;
58
+ this.client = new AnthropicSDK(opts);
59
+ }
60
+ const messages = this.mapMessages(request.messages);
61
+ const tools = this.mapTools(request.tools);
62
+ const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens ?? 4096;
63
+ const temperature = request.temperature ?? this.temperature;
64
+ const body = {
65
+ model: this.model,
66
+ max_tokens: maxTokens,
67
+ messages
68
+ };
69
+ if (request.systemPrompt) {
70
+ if (this.enableCaching) {
71
+ body.system = [
72
+ { type: "text", text: request.systemPrompt, cache_control: { type: "ephemeral" } }
73
+ ];
74
+ } else {
75
+ body.system = request.systemPrompt;
76
+ }
77
+ }
78
+ if (tools) body.tools = tools;
79
+ if (temperature !== void 0) body.temperature = temperature;
80
+ const reqOptions = {};
81
+ if (request.abortSignal) reqOptions.signal = request.abortSignal;
82
+ let response;
83
+ try {
84
+ response = await this.client.messages.create(body, reqOptions);
85
+ } catch (err) {
86
+ const status = err?.status;
87
+ const code = err?.code;
88
+ const isNetwork = err instanceof TypeError || typeof code === "string" && ["ECONNRESET", "ECONNREFUSED", "ENOTFOUND", "ETIMEDOUT"].includes(code);
89
+ if (status === 429 || status === 529 || isNetwork) {
90
+ throw new RetryableError(
91
+ err instanceof Error ? err.message : String(err),
92
+ err instanceof Error ? err : void 0
93
+ );
94
+ }
95
+ throw new Error(
96
+ `Anthropic API error: ${err instanceof Error ? err.message : String(err)}`,
97
+ err instanceof Error ? { cause: err } : void 0
98
+ );
99
+ }
100
+ const content = [];
101
+ for (const block of response.content) {
102
+ if (block.type === "text") {
103
+ content.push({ type: "text", text: block.text });
104
+ } else if (block.type === "tool_use") {
105
+ content.push({ type: "tool_use", id: block.id, name: block.name, input: block.input });
106
+ } else if (block.type === "thinking") {
107
+ content.push({ type: "thinking", thinking: block.thinking });
108
+ }
109
+ }
110
+ return {
111
+ content,
112
+ usage: this.mapUsage(response.usage),
113
+ stopReason: this.mapStopReason(response.stop_reason)
114
+ };
115
+ }
116
+ mapMessages(messages) {
117
+ return messages.map((msg) => {
118
+ if (msg.role === "tool") {
119
+ const textContent = typeof msg.content === "string" ? msg.content : msg.content.filter((b) => b.type === "text").map((b) => b.text).join("\n");
120
+ return {
121
+ role: "user",
122
+ content: [
123
+ {
124
+ type: "tool_result",
125
+ tool_use_id: msg.tool_use_id ?? "",
126
+ content: textContent,
127
+ ...msg.is_error ? { is_error: true } : {}
128
+ }
129
+ ]
130
+ };
131
+ }
132
+ if (msg.role === "assistant") {
133
+ const content = typeof msg.content === "string" ? [{ type: "text", text: msg.content }] : msg.content.map((block) => this.mapOutgoingBlock(block));
134
+ return { role: "assistant", content };
135
+ }
136
+ if (typeof msg.content === "string") {
137
+ return { role: "user", content: msg.content };
138
+ }
139
+ return {
140
+ role: "user",
141
+ content: msg.content.map((block) => this.mapOutgoingBlock(block))
142
+ };
143
+ });
144
+ }
145
+ mapTools(tools) {
146
+ if (!tools || tools.length === 0) return void 0;
147
+ return tools.map((tool) => ({
148
+ name: tool.name,
149
+ description: tool.description,
150
+ input_schema: tool.input_schema,
151
+ type: "tool"
152
+ }));
153
+ }
154
+ mapUsage(providerUsage) {
155
+ const u = providerUsage;
156
+ return {
157
+ inputTokens: u.input_tokens,
158
+ outputTokens: u.output_tokens,
159
+ cacheReadTokens: u.cache_read_input_tokens,
160
+ cacheWriteTokens: u.cache_creation_input_tokens
161
+ };
162
+ }
163
+ mapOutgoingBlock(block) {
164
+ switch (block.type) {
165
+ case "text":
166
+ return { type: "text", text: block.text };
167
+ case "tool_use":
168
+ return { type: "tool_use", id: block.id, name: block.name, input: block.input };
169
+ case "thinking":
170
+ return { type: "thinking", thinking: block.thinking };
171
+ default:
172
+ return { type: "text", text: "" };
173
+ }
174
+ }
175
+ };
176
+
177
+ // src/llm/openai.ts
178
+ var OpenAIProvider = class extends BaseProvider {
179
+ apiKey;
180
+ baseURL;
181
+ organization;
182
+ client = null;
183
+ constructor(config) {
184
+ super(config);
185
+ this.apiKey = config.apiKey;
186
+ this.baseURL = config.baseURL;
187
+ this.organization = config.organization;
188
+ }
189
+ async chat(request) {
190
+ const OpenAI = (await import("openai")).default;
191
+ if (!this.client) {
192
+ const opts = { apiKey: this.apiKey };
193
+ if (this.baseURL) opts.baseURL = this.baseURL;
194
+ if (this.organization) opts.organization = this.organization;
195
+ this.client = new OpenAI(opts);
196
+ }
197
+ const mappedMessages = this.mapMessages(request.messages);
198
+ const messages = [];
199
+ if (request.systemPrompt) {
200
+ messages.push({ role: "system", content: request.systemPrompt });
201
+ }
202
+ messages.push(...mappedMessages);
203
+ const tools = this.mapTools(request.tools);
204
+ const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens;
205
+ const temperature = request.temperature ?? this.temperature;
206
+ const body = {
207
+ model: this.model,
208
+ messages,
209
+ stream: false
210
+ };
211
+ if (tools) body.tools = tools;
212
+ if (maxTokens !== void 0) body.max_tokens = maxTokens;
213
+ if (temperature !== void 0) body.temperature = temperature;
214
+ const reqOptions = {};
215
+ if (request.abortSignal) reqOptions.signal = request.abortSignal;
216
+ let response;
217
+ try {
218
+ response = await this.client.chat.completions.create(body, reqOptions);
219
+ } catch (err) {
220
+ const status = err?.status;
221
+ const code = err?.code;
222
+ const isNetwork = err instanceof TypeError || typeof code === "string" && ["ECONNRESET", "ECONNREFUSED", "ENOTFOUND", "ETIMEDOUT"].includes(code);
223
+ if (status === 429 || isNetwork) {
224
+ throw new RetryableError(
225
+ err instanceof Error ? err.message : String(err),
226
+ err instanceof Error ? err : void 0
227
+ );
228
+ }
229
+ throw new Error(
230
+ `OpenAI API error: ${err instanceof Error ? err.message : String(err)}`,
231
+ err instanceof Error ? { cause: err } : void 0
232
+ );
233
+ }
234
+ const choice = response.choices[0];
235
+ const content = [];
236
+ if (choice.message.content) {
237
+ content.push({ type: "text", text: choice.message.content });
238
+ }
239
+ if (choice.message.tool_calls) {
240
+ for (const tc of choice.message.tool_calls) {
241
+ let input = {};
242
+ try {
243
+ input = JSON.parse(tc.function.arguments);
244
+ } catch {
245
+ }
246
+ content.push({
247
+ type: "tool_use",
248
+ id: tc.id,
249
+ name: tc.function.name,
250
+ input
251
+ });
252
+ }
253
+ }
254
+ return {
255
+ content,
256
+ usage: this.mapUsage(response.usage),
257
+ stopReason: this.mapStopReason(choice.finish_reason)
258
+ };
259
+ }
260
+ mapMessages(messages) {
261
+ return messages.map((msg) => {
262
+ if (msg.role === "tool") {
263
+ return {
264
+ role: "tool",
265
+ tool_call_id: msg.tool_use_id ?? "",
266
+ content: typeof msg.content === "string" ? msg.content : msg.content.filter((b) => b.type === "text").map((b) => b.text).join("\n")
267
+ };
268
+ }
269
+ if (msg.role === "assistant") {
270
+ if (typeof msg.content === "string") {
271
+ return { role: "assistant", content: msg.content };
272
+ }
273
+ const textParts = msg.content.filter((b) => b.type === "text").map((b) => b.text).join("");
274
+ const toolCalls = msg.content.filter((b) => b.type === "tool_use").map((b) => ({
275
+ id: b.id,
276
+ type: "function",
277
+ function: { name: b.name, arguments: JSON.stringify(b.input) }
278
+ }));
279
+ return {
280
+ role: "assistant",
281
+ content: textParts || null,
282
+ ...toolCalls.length > 0 ? { tool_calls: toolCalls } : {}
283
+ };
284
+ }
285
+ return {
286
+ role: "user",
287
+ content: typeof msg.content === "string" ? msg.content : msg.content.filter((b) => b.type === "text").map((b) => b.text).join("\n")
288
+ };
289
+ });
290
+ }
291
+ mapTools(tools) {
292
+ if (!tools || tools.length === 0) return void 0;
293
+ return tools.map((tool) => ({
294
+ type: "function",
295
+ function: {
296
+ name: tool.name,
297
+ description: tool.description,
298
+ parameters: tool.input_schema
299
+ }
300
+ }));
301
+ }
302
+ mapUsage(providerUsage) {
303
+ const u = providerUsage;
304
+ return {
305
+ inputTokens: u.prompt_tokens,
306
+ outputTokens: u.completion_tokens,
307
+ cacheReadTokens: u.prompt_tokens_details?.cached_tokens
308
+ };
309
+ }
310
+ };
311
+
312
+ // src/llm/ollama.ts
313
+ var OllamaProvider = class {
314
+ model;
315
+ baseURL;
316
+ constructor(config) {
317
+ this.model = config.model;
318
+ this.baseURL = config.baseURL ?? "http://localhost:11434/v1";
319
+ }
320
+ async chat(request) {
321
+ const messages = [];
322
+ if (request.systemPrompt) {
323
+ messages.push({ role: "system", content: request.systemPrompt });
324
+ }
325
+ for (const m of request.messages) {
326
+ if (m.role === "tool") {
327
+ messages.push({
328
+ role: "tool",
329
+ tool_call_id: m.tool_use_id,
330
+ content: m.content
331
+ });
332
+ continue;
333
+ }
334
+ if (m.role === "assistant" && Array.isArray(m.content)) {
335
+ const textParts = m.content.filter((b) => b.type === "text");
336
+ const toolParts = m.content.filter((b) => b.type === "tool_use");
337
+ const msg = {};
338
+ if (textParts.length > 0) {
339
+ msg.content = textParts.map((p) => p.text).join("");
340
+ }
341
+ if (toolParts.length > 0) {
342
+ msg.tool_calls = toolParts.map((b) => {
343
+ const tb = b;
344
+ return {
345
+ id: tb.id,
346
+ type: "function",
347
+ function: { name: tb.name, arguments: JSON.stringify(tb.input) }
348
+ };
349
+ });
350
+ }
351
+ msg.role = "assistant";
352
+ messages.push(msg);
353
+ continue;
354
+ }
355
+ messages.push({ role: m.role, content: m.content });
356
+ }
357
+ const body = {
358
+ model: this.model,
359
+ messages,
360
+ stream: false
361
+ };
362
+ if (request.maxOutputTokens) {
363
+ body.max_tokens = request.maxOutputTokens;
364
+ }
365
+ if (request.tools?.length) {
366
+ body.tools = request.tools.map((t) => ({
367
+ type: "function",
368
+ function: {
369
+ name: t.name,
370
+ description: t.description,
371
+ parameters: t.input_schema
372
+ }
373
+ }));
374
+ }
375
+ let response;
376
+ try {
377
+ response = await fetch(`${this.baseURL}/chat/completions`, {
378
+ method: "POST",
379
+ headers: { "Content-Type": "application/json" },
380
+ body: JSON.stringify(body),
381
+ signal: request.abortSignal
382
+ });
383
+ } catch (err) {
384
+ if (err instanceof RetryableError) throw err;
385
+ throw new RetryableError(
386
+ `Ollama connection failed: ${err instanceof Error ? err.message : String(err)}`
387
+ );
388
+ }
389
+ if (!response.ok) {
390
+ const text = await response.text();
391
+ if (response.status === 429 || response.status === 503 || response.status === 529) {
392
+ throw new RetryableError(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);
393
+ }
394
+ throw new Error(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);
395
+ }
396
+ const data = await response.json();
397
+ const choice = data.choices?.[0];
398
+ if (!choice) {
399
+ throw new Error("Ollama returned no choices");
400
+ }
401
+ const content = [];
402
+ let stopReason = "end_turn";
403
+ if (choice.message?.content) {
404
+ let text = choice.message.content;
405
+ text = text.replace(/<think[^>]*>[\s\S]*?<\/think>/gi, "").trim();
406
+ text = text.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "").trim();
407
+ text = text.replace(/\[Thinking[^\]]*\]/gi, "").trim();
408
+ if (text.length > 0) {
409
+ content.push({ type: "text", text });
410
+ }
411
+ }
412
+ if (choice.message?.tool_calls?.length) {
413
+ stopReason = "tool_use";
414
+ for (const tc of choice.message.tool_calls) {
415
+ let input;
416
+ try {
417
+ input = JSON.parse(tc.function.arguments);
418
+ } catch {
419
+ input = {};
420
+ }
421
+ content.push({
422
+ type: "tool_use",
423
+ id: tc.id,
424
+ name: tc.function.name,
425
+ input
426
+ });
427
+ }
428
+ }
429
+ return {
430
+ content,
431
+ stopReason,
432
+ usage: {
433
+ inputTokens: data.usage?.prompt_tokens ?? 0,
434
+ outputTokens: data.usage?.completion_tokens ?? 0,
435
+ cacheReadTokens: 0,
436
+ cacheWriteTokens: 0
437
+ }
438
+ };
439
+ }
440
+ };
441
+
442
+ // src/llm/index.ts
443
+ function anthropic(config) {
444
+ return new AnthropicProvider(config);
445
+ }
446
+ function openai(config) {
447
+ return new OpenAIProvider(config);
448
+ }
449
+ function ollama(config) {
450
+ return new OllamaProvider(config);
451
+ }
452
+ var CustomProvider = class {
453
+ model;
454
+ chatFn;
455
+ constructor(config) {
456
+ this.model = config.model;
457
+ this.chatFn = config.chat;
458
+ }
459
+ chat(request) {
460
+ return this.chatFn(request);
461
+ }
462
+ };
463
+ function createProvider(config) {
464
+ switch (config.provider) {
465
+ case "anthropic":
466
+ return new AnthropicProvider(config);
467
+ case "openai":
468
+ return new OpenAIProvider(config);
469
+ case "ollama":
470
+ return new OllamaProvider(config);
471
+ case "custom":
472
+ return new CustomProvider(config);
473
+ }
474
+ }
475
+
476
+ export {
477
+ RetryableError,
478
+ BaseProvider,
479
+ AnthropicProvider,
480
+ OpenAIProvider,
481
+ OllamaProvider,
482
+ anthropic,
483
+ openai,
484
+ ollama,
485
+ createProvider
486
+ };
487
+ //# sourceMappingURL=chunk-F34HO4RA.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/llm/provider.ts","../src/llm/anthropic.ts","../src/llm/openai.ts","../src/llm/ollama.ts","../src/llm/index.ts"],"sourcesContent":["import type { ModelResponse, ContentBlock, TokenUsage, ProviderConfig } from '../types.js';\nimport type { APIToolDefinition } from '../tools/tool.js';\n\nexport interface Provider {\n readonly model: string;\n chat(request: ChatRequest): Promise<ModelResponse>;\n}\n\nexport interface ChatRequest {\n messages: ChatMessage[];\n tools?: APIToolDefinition[];\n systemPrompt?: string;\n maxOutputTokens?: number;\n temperature?: number;\n abortSignal?: AbortSignal;\n}\n\nexport interface ChatMessage {\n role: \"user\" | \"assistant\" | \"tool\";\n content: string | ContentBlock[];\n tool_use_id?: string;\n is_error?: boolean;\n}\n\nexport type { APIToolDefinition } from '../tools/tool.js';\n\nexport class RetryableError extends Error {\n readonly cause?: Error;\n\n constructor(message: string, cause?: Error) {\n super(message);\n this.name = 'RetryableError';\n this.cause = cause;\n }\n}\n\nexport abstract class BaseProvider implements Provider {\n readonly model: string;\n readonly maxOutputTokens?: number;\n readonly temperature?: number;\n\n constructor(config: ProviderConfig) {\n this.model = config.model;\n this.maxOutputTokens = config.maxOutputTokens;\n this.temperature = config.temperature;\n }\n\n abstract chat(request: ChatRequest): Promise<ModelResponse>;\n\n protected abstract mapMessages(messages: ChatMessage[]): unknown[];\n\n protected abstract mapTools(tools?: APIToolDefinition[]): unknown[] | undefined;\n\n protected abstract mapUsage(providerUsage: unknown): TokenUsage;\n\n protected mapStopReason(reason: string): ModelResponse[\"stopReason\"] {\n switch (reason) {\n case 'end_turn':\n case 'stop':\n return 'end_turn';\n case 'tool_use':\n case 'tool_calls':\n return 'tool_use';\n case 'max_tokens':\n case 'length':\n return 'max_tokens';\n case 'stop_sequence':\n return 'stop_sequence';\n default:\n return 'end_turn';\n }\n }\n}\n","import type { ModelResponse, ContentBlock, TextBlock, ToolUseBlock, ThinkingBlock, TokenUsage, ProviderConfig } from '../types.js';\nimport type { ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nimport { BaseProvider, RetryableError } from './provider.js';\n\nexport interface AnthropicProviderConfig extends ProviderConfig {\n apiKey: string;\n baseURL?: string;\n dangerouslySkipAuth?: boolean;\n enableCaching?: boolean;\n}\n\nexport class AnthropicProvider extends BaseProvider {\n private readonly apiKey: string;\n private readonly baseURL?: string;\n private readonly dangerouslySkipAuth?: boolean;\n private readonly enableCaching: boolean;\n private client: any = null;\n\n constructor(config: AnthropicProviderConfig) {\n super(config);\n this.apiKey = config.apiKey;\n this.baseURL = config.baseURL;\n this.dangerouslySkipAuth = config.dangerouslySkipAuth;\n this.enableCaching = config.enableCaching ?? false;\n }\n\n async chat(request: ChatRequest): Promise<ModelResponse> {\n const AnthropicSDK = (await import('@anthropic-ai/sdk')).default;\n\n if (!this.client) {\n const opts: Record<string, unknown> = { apiKey: this.apiKey };\n if (this.baseURL) opts.baseURL = this.baseURL;\n if (this.dangerouslySkipAuth) opts.dangerouslySkipAuth = true;\n this.client = new AnthropicSDK(opts);\n }\n\n const messages = this.mapMessages(request.messages);\n const tools = this.mapTools(request.tools);\n const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens ?? 4096;\n const temperature = request.temperature ?? this.temperature;\n\n const body: Record<string, unknown> = {\n model: this.model,\n max_tokens: maxTokens,\n messages,\n };\n\n if (request.systemPrompt) {\n if (this.enableCaching) {\n body.system = [\n { type: \"text\", text: request.systemPrompt, cache_control: { type: \"ephemeral\" } },\n ];\n } else {\n body.system = request.systemPrompt;\n }\n }\n if (tools) body.tools = tools;\n if (temperature !== undefined) body.temperature = temperature;\n\n const reqOptions: Record<string, unknown> = {};\n if (request.abortSignal) reqOptions.signal = request.abortSignal;\n\n let response: any;\n try {\n response = await this.client.messages.create(body, reqOptions);\n } catch (err: unknown) {\n const status = (err as any)?.status;\n const code = (err as any)?.code;\n const isNetwork =\n err instanceof TypeError ||\n (typeof code === 'string' &&\n ['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ETIMEDOUT'].includes(code));\n\n if (status === 429 || status === 529 || isNetwork) {\n throw new RetryableError(\n err instanceof Error ? err.message : String(err),\n err instanceof Error ? err : undefined,\n );\n }\n throw new Error(\n `Anthropic API error: ${err instanceof Error ? err.message : String(err)}`,\n err instanceof Error ? { cause: err } : undefined,\n );\n }\n\n const content: ContentBlock[] = [];\n for (const block of response.content) {\n if (block.type === 'text') {\n content.push({ type: 'text', text: block.text } as TextBlock);\n } else if (block.type === 'tool_use') {\n content.push({ type: 'tool_use', id: block.id, name: block.name, input: block.input } as ToolUseBlock);\n } else if (block.type === 'thinking') {\n content.push({ type: 'thinking', thinking: block.thinking } as ThinkingBlock);\n }\n }\n\n return {\n content,\n usage: this.mapUsage(response.usage),\n stopReason: this.mapStopReason(response.stop_reason),\n };\n }\n\n protected mapMessages(messages: ChatMessage[]): unknown[] {\n return messages.map((msg) => {\n if (msg.role === 'tool') {\n const textContent =\n typeof msg.content === 'string'\n ? msg.content\n : msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('\\n');\n\n return {\n role: 'user',\n content: [\n {\n type: 'tool_result',\n tool_use_id: msg.tool_use_id ?? '',\n content: textContent,\n ...(msg.is_error ? { is_error: true } : {}),\n },\n ],\n };\n }\n\n if (msg.role === 'assistant') {\n const content =\n typeof msg.content === 'string'\n ? [{ type: 'text', text: msg.content }]\n : msg.content.map((block) => this.mapOutgoingBlock(block));\n return { role: 'assistant', content };\n }\n\n if (typeof msg.content === 'string') {\n return { role: 'user', content: msg.content };\n }\n\n return {\n role: 'user',\n content: msg.content.map((block) => this.mapOutgoingBlock(block)),\n };\n });\n }\n\n protected mapTools(tools?: APIToolDefinition[]): unknown[] | undefined {\n if (!tools || tools.length === 0) return undefined;\n return tools.map((tool) => ({\n name: tool.name,\n description: tool.description,\n input_schema: tool.input_schema,\n type: 'tool',\n }));\n }\n\n protected mapUsage(providerUsage: unknown): TokenUsage {\n const u = providerUsage as {\n input_tokens: number;\n output_tokens: number;\n cache_read_input_tokens?: number;\n cache_creation_input_tokens?: number;\n };\n return {\n inputTokens: u.input_tokens,\n outputTokens: u.output_tokens,\n cacheReadTokens: u.cache_read_input_tokens,\n cacheWriteTokens: u.cache_creation_input_tokens,\n };\n }\n\n private mapOutgoingBlock(block: ContentBlock): Record<string, unknown> {\n switch (block.type) {\n case 'text':\n return { type: 'text', text: block.text };\n case 'tool_use':\n return { type: 'tool_use', id: block.id, name: block.name, input: block.input };\n case 'thinking':\n return { type: 'thinking', thinking: block.thinking };\n default:\n return { type: 'text', text: '' };\n }\n }\n}\n","import type { ModelResponse, ContentBlock, TextBlock, ToolUseBlock, TokenUsage, ProviderConfig } from '../types.js';\nimport type { ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nimport { BaseProvider, RetryableError } from './provider.js';\n\nexport interface OpenAIProviderConfig extends ProviderConfig {\n apiKey: string;\n baseURL?: string;\n organization?: string;\n}\n\nexport class OpenAIProvider extends BaseProvider {\n private readonly apiKey: string;\n private readonly baseURL?: string;\n private readonly organization?: string;\n private client: any = null;\n\n constructor(config: OpenAIProviderConfig) {\n super(config);\n this.apiKey = config.apiKey;\n this.baseURL = config.baseURL;\n this.organization = config.organization;\n }\n\n async chat(request: ChatRequest): Promise<ModelResponse> {\n const OpenAI = (await import('openai')).default;\n\n if (!this.client) {\n const opts: Record<string, unknown> = { apiKey: this.apiKey };\n if (this.baseURL) opts.baseURL = this.baseURL;\n if (this.organization) opts.organization = this.organization;\n this.client = new OpenAI(opts);\n }\n\n const mappedMessages = this.mapMessages(request.messages);\n const messages: unknown[] = [];\n if (request.systemPrompt) {\n messages.push({ role: 'system', content: request.systemPrompt });\n }\n messages.push(...mappedMessages);\n\n const tools = this.mapTools(request.tools);\n const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens;\n const temperature = request.temperature ?? this.temperature;\n\n const body: Record<string, unknown> = {\n model: this.model,\n messages,\n stream: false,\n };\n\n if (tools) body.tools = tools;\n if (maxTokens !== undefined) body.max_tokens = maxTokens;\n if (temperature !== undefined) body.temperature = temperature;\n\n const reqOptions: Record<string, unknown> = {};\n if (request.abortSignal) reqOptions.signal = request.abortSignal;\n\n let response: any;\n try {\n response = await this.client.chat.completions.create(body, reqOptions);\n } catch (err: unknown) {\n const status = (err as any)?.status;\n const code = (err as any)?.code;\n const isNetwork =\n err instanceof TypeError ||\n (typeof code === 'string' &&\n ['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ETIMEDOUT'].includes(code));\n\n if (status === 429 || isNetwork) {\n throw new RetryableError(\n err instanceof Error ? err.message : String(err),\n err instanceof Error ? err : undefined,\n );\n }\n throw new Error(\n `OpenAI API error: ${err instanceof Error ? err.message : String(err)}`,\n err instanceof Error ? { cause: err } : undefined,\n );\n }\n\n const choice = response.choices[0];\n const content: ContentBlock[] = [];\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content } as TextBlock);\n }\n\n if (choice.message.tool_calls) {\n for (const tc of choice.message.tool_calls) {\n let input: Record<string, unknown> = {};\n try {\n input = JSON.parse(tc.function.arguments);\n } catch {}\n content.push({\n type: 'tool_use',\n id: tc.id,\n name: tc.function.name,\n input,\n } as ToolUseBlock);\n }\n }\n\n return {\n content,\n usage: this.mapUsage(response.usage),\n stopReason: this.mapStopReason(choice.finish_reason),\n };\n }\n\n protected mapMessages(messages: ChatMessage[]): unknown[] {\n return messages.map((msg) => {\n if (msg.role === 'tool') {\n return {\n role: 'tool',\n tool_call_id: msg.tool_use_id ?? '',\n content:\n typeof msg.content === 'string'\n ? msg.content\n : msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('\\n'),\n };\n }\n\n if (msg.role === 'assistant') {\n if (typeof msg.content === 'string') {\n return { role: 'assistant', content: msg.content };\n }\n\n const textParts = msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('');\n const toolCalls = msg.content\n .filter((b): b is ToolUseBlock => b.type === 'tool_use')\n .map((b) => ({\n id: b.id,\n type: 'function' as const,\n function: { name: b.name, arguments: JSON.stringify(b.input) },\n }));\n\n return {\n role: 'assistant',\n content: textParts || null,\n ...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {}),\n };\n }\n\n return {\n role: 'user',\n content:\n typeof msg.content === 'string'\n ? msg.content\n : msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('\\n'),\n };\n });\n }\n\n protected mapTools(tools?: APIToolDefinition[]): unknown[] | undefined {\n if (!tools || tools.length === 0) return undefined;\n return tools.map((tool) => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.input_schema,\n },\n }));\n }\n\n protected mapUsage(providerUsage: unknown): TokenUsage {\n const u = providerUsage as {\n prompt_tokens: number;\n completion_tokens: number;\n prompt_tokens_details?: { cached_tokens?: number };\n };\n return {\n inputTokens: u.prompt_tokens,\n outputTokens: u.completion_tokens,\n cacheReadTokens: u.prompt_tokens_details?.cached_tokens,\n };\n }\n}\n","import type { Provider, ChatRequest } from \"./provider.js\";\nimport { RetryableError } from \"./provider.js\";\nimport type { ModelResponse, ContentBlock } from \"../types.js\";\nimport type { APIToolDefinition } from \"../tools/tool.js\";\n\nexport interface OllamaProviderConfig {\n model: string;\n baseURL?: string;\n}\n\nexport class OllamaProvider implements Provider {\n readonly model: string;\n private readonly baseURL: string;\n\n constructor(config: OllamaProviderConfig) {\n this.model = config.model;\n this.baseURL = config.baseURL ?? \"http://localhost:11434/v1\";\n }\n\n async chat(request: ChatRequest): Promise<ModelResponse> {\n const messages: any[] = [];\n\n if (request.systemPrompt) {\n messages.push({ role: \"system\", content: request.systemPrompt });\n }\n\n for (const m of request.messages) {\n if (m.role === \"tool\") {\n messages.push({\n role: \"tool\" as const,\n tool_call_id: m.tool_use_id,\n content: m.content,\n });\n continue;\n }\n if (m.role === \"assistant\" && Array.isArray(m.content)) {\n const textParts = (m.content as ContentBlock[]).filter(b => b.type === \"text\");\n const toolParts = (m.content as ContentBlock[]).filter(b => b.type === \"tool_use\");\n\n const msg: any = {};\n if (textParts.length > 0) {\n msg.content = textParts.map(p => (p as { text: string }).text).join(\"\");\n }\n if (toolParts.length > 0) {\n msg.tool_calls = toolParts.map(b => {\n const tb = b as { type: \"tool_use\"; id: string; name: string; input: Record<string, unknown> };\n return {\n id: tb.id,\n type: \"function\",\n function: { name: tb.name, arguments: JSON.stringify(tb.input) },\n };\n });\n }\n msg.role = \"assistant\";\n messages.push(msg);\n continue;\n }\n messages.push({ role: m.role, content: m.content });\n }\n\n const body: any = {\n model: this.model,\n messages,\n stream: false,\n };\n\n if (request.maxOutputTokens) {\n body.max_tokens = request.maxOutputTokens;\n }\n\n if (request.tools?.length) {\n body.tools = request.tools.map(t => ({\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: t.input_schema,\n },\n }));\n }\n\n let response;\n try {\n response = await fetch(`${this.baseURL}/chat/completions`, {\n method: \"POST\",\n headers: { \"Content-Type\": \"application/json\" },\n body: JSON.stringify(body),\n signal: request.abortSignal,\n });\n } catch (err) {\n if (err instanceof RetryableError) throw err;\n throw new RetryableError(\n `Ollama connection failed: ${err instanceof Error ? err.message : String(err)}`,\n );\n }\n\n if (!response.ok) {\n const text = await response.text();\n if (response.status === 429 || response.status === 503 || response.status === 529) {\n throw new RetryableError(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);\n }\n throw new Error(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);\n }\n\n const data = await response.json();\n const choice = data.choices?.[0];\n if (!choice) {\n throw new Error(\"Ollama returned no choices\");\n }\n\n const content: any[] = [];\n let stopReason: ModelResponse[\"stopReason\"] = \"end_turn\";\n\n if (choice.message?.content) {\n let text = choice.message.content;\n text = text.replace(/<think[^>]*>[\\s\\S]*?<\\/think>/gi, \"\").trim();\n text = text.replace(/<thinking>[\\s\\S]*?<\\/thinking>/gi, \"\").trim();\n text = text.replace(/\\[Thinking[^\\]]*\\]/gi, \"\").trim();\n if (text.length > 0) {\n content.push({ type: \"text\" as const, text });\n }\n }\n\n if (choice.message?.tool_calls?.length) {\n stopReason = \"tool_use\";\n for (const tc of choice.message.tool_calls) {\n let input: Record<string, unknown>;\n try {\n input = JSON.parse(tc.function.arguments);\n } catch {\n input = {};\n }\n content.push({\n type: \"tool_use\" as const,\n id: tc.id,\n name: tc.function.name,\n input,\n });\n }\n }\n\n return {\n content,\n stopReason,\n usage: {\n inputTokens: data.usage?.prompt_tokens ?? 0,\n outputTokens: data.usage?.completion_tokens ?? 0,\n cacheReadTokens: 0,\n cacheWriteTokens: 0,\n },\n };\n }\n}\n","import type { ModelResponse } from '../types.js';\nimport { BaseProvider, RetryableError } from './provider.js';\nimport type { Provider, ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nimport { AnthropicProvider } from './anthropic.js';\nimport type { AnthropicProviderConfig } from './anthropic.js';\nimport { OpenAIProvider } from './openai.js';\nimport type { OpenAIProviderConfig } from './openai.js';\nimport { OllamaProvider } from './ollama.js';\nimport type { OllamaProviderConfig } from './ollama.js';\n\nexport { BaseProvider, RetryableError } from './provider.js';\nexport type { Provider, ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nexport { AnthropicProvider } from './anthropic.js';\nexport type { AnthropicProviderConfig } from './anthropic.js';\nexport { OpenAIProvider } from './openai.js';\nexport type { OpenAIProviderConfig } from './openai.js';\nexport { OllamaProvider } from './ollama.js';\nexport type { OllamaProviderConfig } from './ollama.js';\n\nexport function anthropic(config: AnthropicProviderConfig): AnthropicProvider {\n return new AnthropicProvider(config);\n}\n\nexport function openai(config: OpenAIProviderConfig): OpenAIProvider {\n return new OpenAIProvider(config);\n}\n\nexport function ollama(config: OllamaProviderConfig): OllamaProvider {\n return new OllamaProvider(config);\n}\n\nexport interface CustomProviderConfig {\n provider: 'custom';\n model: string;\n chat: (request: ChatRequest) => Promise<ModelResponse>;\n}\n\nclass CustomProvider implements Provider {\n readonly model: string;\n private readonly chatFn: (request: ChatRequest) => Promise<ModelResponse>;\n\n constructor(config: CustomProviderConfig) {\n this.model = config.model;\n this.chatFn = config.chat;\n }\n\n chat(request: ChatRequest): Promise<ModelResponse> {\n return this.chatFn(request);\n }\n}\n\nexport type OllamaProviderConfigWithProvider = OllamaProviderConfig & { provider: 'ollama' };\n\nexport function createProvider(\n config:\n | (AnthropicProviderConfig & { provider: 'anthropic' })\n | (OpenAIProviderConfig & { provider: 'openai' })\n | OllamaProviderConfigWithProvider\n | CustomProviderConfig,\n): Provider {\n switch (config.provider) {\n case 'anthropic':\n return new AnthropicProvider(config);\n case 'openai':\n return new OpenAIProvider(config);\n case 'ollama':\n return new OllamaProvider(config);\n case 'custom':\n return new CustomProvider(config);\n }\n}\n"],"mappings":";AA0BO,IAAM,iBAAN,cAA6B,MAAM;AAAA,EAC/B;AAAA,EAET,YAAY,SAAiB,OAAe;AAC1C,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,QAAQ;AAAA,EACf;AACF;AAEO,IAAe,eAAf,MAAgD;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,QAAwB;AAClC,SAAK,QAAQ,OAAO;AACpB,SAAK,kBAAkB,OAAO;AAC9B,SAAK,cAAc,OAAO;AAAA,EAC5B;AAAA,EAUU,cAAc,QAA6C;AACnE,YAAQ,QAAQ;AAAA,MACd,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AACF;;;AC7DO,IAAM,oBAAN,cAAgC,aAAa;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT,SAAc;AAAA,EAEtB,YAAY,QAAiC;AAC3C,UAAM,MAAM;AACZ,SAAK,SAAS,OAAO;AACrB,SAAK,UAAU,OAAO;AACtB,SAAK,sBAAsB,OAAO;AAClC,SAAK,gBAAgB,OAAO,iBAAiB;AAAA,EAC/C;AAAA,EAEA,MAAM,KAAK,SAA8C;AACvD,UAAM,gBAAgB,MAAM,OAAO,mBAAmB,GAAG;AAEzD,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,OAAgC,EAAE,QAAQ,KAAK,OAAO;AAC5D,UAAI,KAAK,QAAS,MAAK,UAAU,KAAK;AACtC,UAAI,KAAK,oBAAqB,MAAK,sBAAsB;AACzD,WAAK,SAAS,IAAI,aAAa,IAAI;AAAA,IACrC;AAEA,UAAM,WAAW,KAAK,YAAY,QAAQ,QAAQ;AAClD,UAAM,QAAQ,KAAK,SAAS,QAAQ,KAAK;AACzC,UAAM,YAAY,QAAQ,mBAAmB,KAAK,mBAAmB;AACrE,UAAM,cAAc,QAAQ,eAAe,KAAK;AAEhD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,YAAY;AAAA,MACZ;AAAA,IACF;AAEA,QAAI,QAAQ,cAAc;AACxB,UAAI,KAAK,eAAe;AACtB,aAAK,SAAS;AAAA,UACZ,EAAE,MAAM,QAAQ,MAAM,QAAQ,cAAc,eAAe,EAAE,MAAM,YAAY,EAAE;AAAA,QACnF;AAAA,MACF,OAAO;AACL,aAAK,SAAS,QAAQ;AAAA,MACxB;AAAA,IACF;AACA,QAAI,MAAO,MAAK,QAAQ;AACxB,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAElD,UAAM,aAAsC,CAAC;AAC7C,QAAI,QAAQ,YAAa,YAAW,SAAS,QAAQ;AAErD,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,KAAK,OAAO,SAAS,OAAO,MAAM,UAAU;AAAA,IAC/D,SAAS,KAAc;AACrB,YAAM,SAAU,KAAa;AAC7B,YAAM,OAAQ,KAAa;AAC3B,YAAM,YACJ,eAAe,aACd,OAAO,SAAS,YACf,CAAC,cAAc,gBAAgB,aAAa,WAAW,EAAE,SAAS,IAAI;AAE1E,UAAI,WAAW,OAAO,WAAW,OAAO,WAAW;AACjD,cAAM,IAAI;AAAA,UACR,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,UAC/C,eAAe,QAAQ,MAAM;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,wBAAwB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,QACxE,eAAe,QAAQ,EAAE,OAAO,IAAI,IAAI;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,UAA0B,CAAC;AACjC,eAAW,SAAS,SAAS,SAAS;AACpC,UAAI,MAAM,SAAS,QAAQ;AACzB,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAc;AAAA,MAC9D,WAAW,MAAM,SAAS,YAAY;AACpC,gBAAQ,KAAK,EAAE,MAAM,YAAY,IAAI,MAAM,IAAI,MAAM,MAAM,MAAM,OAAO,MAAM,MAAM,CAAiB;AAAA,MACvG,WAAW,MAAM,SAAS,YAAY;AACpC,gBAAQ,KAAK,EAAE,MAAM,YAAY,UAAU,MAAM,SAAS,CAAkB;AAAA,MAC9E;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,KAAK,SAAS,SAAS,KAAK;AAAA,MACnC,YAAY,KAAK,cAAc,SAAS,WAAW;AAAA,IACrD;AAAA,EACF;AAAA,EAEU,YAAY,UAAoC;AACxD,WAAO,SAAS,IAAI,CAAC,QAAQ;AAC3B,UAAI,IAAI,SAAS,QAAQ;AACvB,cAAM,cACJ,OAAO,IAAI,YAAY,WACnB,IAAI,UACJ,IAAI,QACD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AAElB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,YACP;AAAA,cACE,MAAM;AAAA,cACN,aAAa,IAAI,eAAe;AAAA,cAChC,SAAS;AAAA,cACT,GAAI,IAAI,WAAW,EAAE,UAAU,KAAK,IAAI,CAAC;AAAA,YAC3C;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,IAAI,SAAS,aAAa;AAC5B,cAAM,UACJ,OAAO,IAAI,YAAY,WACnB,CAAC,EAAE,MAAM,QAAQ,MAAM,IAAI,QAAQ,CAAC,IACpC,IAAI,QAAQ,IAAI,CAAC,UAAU,KAAK,iBAAiB,KAAK,CAAC;AAC7D,eAAO,EAAE,MAAM,aAAa,QAAQ;AAAA,MACtC;AAEA,UAAI,OAAO,IAAI,YAAY,UAAU;AACnC,eAAO,EAAE,MAAM,QAAQ,SAAS,IAAI,QAAQ;AAAA,MAC9C;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,IAAI,QAAQ,IAAI,CAAC,UAAU,KAAK,iBAAiB,KAAK,CAAC;AAAA,MAClE;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEU,SAAS,OAAoD;AACrE,QAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AACzC,WAAO,MAAM,IAAI,CAAC,UAAU;AAAA,MAC1B,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,cAAc,KAAK;AAAA,MACnB,MAAM;AAAA,IACR,EAAE;AAAA,EACJ;AAAA,EAEU,SAAS,eAAoC;AACrD,UAAM,IAAI;AAMV,WAAO;AAAA,MACL,aAAa,EAAE;AAAA,MACf,cAAc,EAAE;AAAA,MAChB,iBAAiB,EAAE;AAAA,MACnB,kBAAkB,EAAE;AAAA,IACtB;AAAA,EACF;AAAA,EAEQ,iBAAiB,OAA8C;AACrE,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK;AACH,eAAO,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,MAC1C,KAAK;AACH,eAAO,EAAE,MAAM,YAAY,IAAI,MAAM,IAAI,MAAM,MAAM,MAAM,OAAO,MAAM,MAAM;AAAA,MAChF,KAAK;AACH,eAAO,EAAE,MAAM,YAAY,UAAU,MAAM,SAAS;AAAA,MACtD;AACE,eAAO,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,EACF;AACF;;;AC7KO,IAAM,iBAAN,cAA6B,aAAa;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACT,SAAc;AAAA,EAEtB,YAAY,QAA8B;AACxC,UAAM,MAAM;AACZ,SAAK,SAAS,OAAO;AACrB,SAAK,UAAU,OAAO;AACtB,SAAK,eAAe,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAM,KAAK,SAA8C;AACvD,UAAM,UAAU,MAAM,OAAO,QAAQ,GAAG;AAExC,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,OAAgC,EAAE,QAAQ,KAAK,OAAO;AAC5D,UAAI,KAAK,QAAS,MAAK,UAAU,KAAK;AACtC,UAAI,KAAK,aAAc,MAAK,eAAe,KAAK;AAChD,WAAK,SAAS,IAAI,OAAO,IAAI;AAAA,IAC/B;AAEA,UAAM,iBAAiB,KAAK,YAAY,QAAQ,QAAQ;AACxD,UAAM,WAAsB,CAAC;AAC7B,QAAI,QAAQ,cAAc;AACxB,eAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,aAAa,CAAC;AAAA,IACjE;AACA,aAAS,KAAK,GAAG,cAAc;AAE/B,UAAM,QAAQ,KAAK,SAAS,QAAQ,KAAK;AACzC,UAAM,YAAY,QAAQ,mBAAmB,KAAK;AAClD,UAAM,cAAc,QAAQ,eAAe,KAAK;AAEhD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,MAAO,MAAK,QAAQ;AACxB,QAAI,cAAc,OAAW,MAAK,aAAa;AAC/C,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAElD,UAAM,aAAsC,CAAC;AAC7C,QAAI,QAAQ,YAAa,YAAW,SAAS,QAAQ;AAErD,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO,MAAM,UAAU;AAAA,IACvE,SAAS,KAAc;AACrB,YAAM,SAAU,KAAa;AAC7B,YAAM,OAAQ,KAAa;AAC3B,YAAM,YACJ,eAAe,aACd,OAAO,SAAS,YACf,CAAC,cAAc,gBAAgB,aAAa,WAAW,EAAE,SAAS,IAAI;AAE1E,UAAI,WAAW,OAAO,WAAW;AAC/B,cAAM,IAAI;AAAA,UACR,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,UAC/C,eAAe,QAAQ,MAAM;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,qBAAqB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,QACrE,eAAe,QAAQ,EAAE,OAAO,IAAI,IAAI;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAA0B,CAAC;AAEjC,QAAI,OAAO,QAAQ,SAAS;AAC1B,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAc;AAAA,IAC1E;AAEA,QAAI,OAAO,QAAQ,YAAY;AAC7B,iBAAW,MAAM,OAAO,QAAQ,YAAY;AAC1C,YAAI,QAAiC,CAAC;AACtC,YAAI;AACF,kBAAQ,KAAK,MAAM,GAAG,SAAS,SAAS;AAAA,QAC1C,QAAQ;AAAA,QAAC;AACT,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,MAAM,GAAG,SAAS;AAAA,UAClB;AAAA,QACF,CAAiB;AAAA,MACnB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,KAAK,SAAS,SAAS,KAAK;AAAA,MACnC,YAAY,KAAK,cAAc,OAAO,aAAa;AAAA,IACrD;AAAA,EACF;AAAA,EAEU,YAAY,UAAoC;AACxD,WAAO,SAAS,IAAI,CAAC,QAAQ;AAC3B,UAAI,IAAI,SAAS,QAAQ;AACvB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,cAAc,IAAI,eAAe;AAAA,UACjC,SACE,OAAO,IAAI,YAAY,WACnB,IAAI,UACJ,IAAI,QACD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AAAA,QACpB;AAAA,MACF;AAEA,UAAI,IAAI,SAAS,aAAa;AAC5B,YAAI,OAAO,IAAI,YAAY,UAAU;AACnC,iBAAO,EAAE,MAAM,aAAa,SAAS,IAAI,QAAQ;AAAA,QACnD;AAEA,cAAM,YAAY,IAAI,QACnB,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AACV,cAAM,YAAY,IAAI,QACnB,OAAO,CAAC,MAAyB,EAAE,SAAS,UAAU,EACtD,IAAI,CAAC,OAAO;AAAA,UACX,IAAI,EAAE;AAAA,UACN,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,EAAE,MAAM,WAAW,KAAK,UAAU,EAAE,KAAK,EAAE;AAAA,QAC/D,EAAE;AAEJ,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,aAAa;AAAA,UACtB,GAAI,UAAU,SAAS,IAAI,EAAE,YAAY,UAAU,IAAI,CAAC;AAAA,QAC1D;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SACE,OAAO,IAAI,YAAY,WACnB,IAAI,UACJ,IAAI,QACD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AAAA,MACpB;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEU,SAAS,OAAoD;AACrE,QAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AACzC,WAAO,MAAM,IAAI,CAAC,UAAU;AAAA,MAC1B,MAAM;AAAA,MACN,UAAU;AAAA,QACR,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,YAAY,KAAK;AAAA,MACnB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEU,SAAS,eAAoC;AACrD,UAAM,IAAI;AAKV,WAAO;AAAA,MACL,aAAa,EAAE;AAAA,MACf,cAAc,EAAE;AAAA,MAChB,iBAAiB,EAAE,uBAAuB;AAAA,IAC5C;AAAA,EACF;AACF;;;AChLO,IAAM,iBAAN,MAAyC;AAAA,EACrC;AAAA,EACQ;AAAA,EAEjB,YAAY,QAA8B;AACxC,SAAK,QAAQ,OAAO;AACpB,SAAK,UAAU,OAAO,WAAW;AAAA,EACnC;AAAA,EAEA,MAAM,KAAK,SAA8C;AACvD,UAAM,WAAkB,CAAC;AAEzB,QAAI,QAAQ,cAAc;AACxB,eAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,aAAa,CAAC;AAAA,IACjE;AAEA,eAAW,KAAK,QAAQ,UAAU;AAChC,UAAI,EAAE,SAAS,QAAQ;AACrB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,cAAc,EAAE;AAAA,UAChB,SAAS,EAAE;AAAA,QACb,CAAC;AACD;AAAA,MACF;AACA,UAAI,EAAE,SAAS,eAAe,MAAM,QAAQ,EAAE,OAAO,GAAG;AACtD,cAAM,YAAa,EAAE,QAA2B,OAAO,OAAK,EAAE,SAAS,MAAM;AAC7E,cAAM,YAAa,EAAE,QAA2B,OAAO,OAAK,EAAE,SAAS,UAAU;AAEjF,cAAM,MAAW,CAAC;AAClB,YAAI,UAAU,SAAS,GAAG;AACxB,cAAI,UAAU,UAAU,IAAI,OAAM,EAAuB,IAAI,EAAE,KAAK,EAAE;AAAA,QACxE;AACA,YAAI,UAAU,SAAS,GAAG;AACxB,cAAI,aAAa,UAAU,IAAI,OAAK;AAClC,kBAAM,KAAK;AACX,mBAAO;AAAA,cACL,IAAI,GAAG;AAAA,cACP,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,GAAG,MAAM,WAAW,KAAK,UAAU,GAAG,KAAK,EAAE;AAAA,YACjE;AAAA,UACF,CAAC;AAAA,QACH;AACA,YAAI,OAAO;AACX,iBAAS,KAAK,GAAG;AACjB;AAAA,MACF;AACA,eAAS,KAAK,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,CAAC;AAAA,IACpD;AAEA,UAAM,OAAY;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,QAAQ,iBAAiB;AAC3B,WAAK,aAAa,QAAQ;AAAA,IAC5B;AAEA,QAAI,QAAQ,OAAO,QAAQ;AACzB,WAAK,QAAQ,QAAQ,MAAM,IAAI,QAAM;AAAA,QACnC,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,EAAE;AAAA,UACR,aAAa,EAAE;AAAA,UACf,YAAY,EAAE;AAAA,QAChB;AAAA,MACF,EAAE;AAAA,IACJ;AAEA,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,MAAM,GAAG,KAAK,OAAO,qBAAqB;AAAA,QACzD,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,UAAI,eAAe,eAAgB,OAAM;AACzC,YAAM,IAAI;AAAA,QACR,6BAA6B,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/E;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAI,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,KAAK;AACjF,cAAM,IAAI,eAAe,oBAAoB,SAAS,MAAM,KAAK,KAAK,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,MACvF;AACA,YAAM,IAAI,MAAM,oBAAoB,SAAS,MAAM,KAAK,KAAK,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IAC9E;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAEA,UAAM,UAAiB,CAAC;AACxB,QAAI,aAA0C;AAE9C,QAAI,OAAO,SAAS,SAAS;AAC3B,UAAI,OAAO,OAAO,QAAQ;AAC1B,aAAO,KAAK,QAAQ,mCAAmC,EAAE,EAAE,KAAK;AAChE,aAAO,KAAK,QAAQ,oCAAoC,EAAE,EAAE,KAAK;AACjE,aAAO,KAAK,QAAQ,wBAAwB,EAAE,EAAE,KAAK;AACrD,UAAI,KAAK,SAAS,GAAG;AACnB,gBAAQ,KAAK,EAAE,MAAM,QAAiB,KAAK,CAAC;AAAA,MAC9C;AAAA,IACF;AAEA,QAAI,OAAO,SAAS,YAAY,QAAQ;AACtC,mBAAa;AACb,iBAAW,MAAM,OAAO,QAAQ,YAAY;AAC1C,YAAI;AACJ,YAAI;AACF,kBAAQ,KAAK,MAAM,GAAG,SAAS,SAAS;AAAA,QAC1C,QAAQ;AACN,kBAAQ,CAAC;AAAA,QACX;AACA,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,MAAM,GAAG,SAAS;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,OAAO;AAAA,QACL,aAAa,KAAK,OAAO,iBAAiB;AAAA,QAC1C,cAAc,KAAK,OAAO,qBAAqB;AAAA,QAC/C,iBAAiB;AAAA,QACjB,kBAAkB;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AACF;;;ACrIO,SAAS,UAAU,QAAoD;AAC5E,SAAO,IAAI,kBAAkB,MAAM;AACrC;AAEO,SAAS,OAAO,QAA8C;AACnE,SAAO,IAAI,eAAe,MAAM;AAClC;AAEO,SAAS,OAAO,QAA8C;AACnE,SAAO,IAAI,eAAe,MAAM;AAClC;AAQA,IAAM,iBAAN,MAAyC;AAAA,EAC9B;AAAA,EACQ;AAAA,EAEjB,YAAY,QAA8B;AACxC,SAAK,QAAQ,OAAO;AACpB,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,KAAK,SAA8C;AACjD,WAAO,KAAK,OAAO,OAAO;AAAA,EAC5B;AACF;AAIO,SAAS,eACd,QAKU;AACV,UAAQ,OAAO,UAAU;AAAA,IACvB,KAAK;AACH,aAAO,IAAI,kBAAkB,MAAM;AAAA,IACrC,KAAK;AACH,aAAO,IAAI,eAAe,MAAM;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,eAAe,MAAM;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,eAAe,MAAM;AAAA,EACpC;AACF;","names":[]}
@@ -0,0 +1,132 @@
1
+ // src/tools/tool.ts
2
+ function zodToJsonSchema(schema) {
3
+ const s = schema;
4
+ const def = s._def;
5
+ const typeName = def.typeName;
6
+ const description = def.description;
7
+ const base = {};
8
+ if (description) base.description = description;
9
+ switch (typeName) {
10
+ case "ZodString":
11
+ return { ...base, type: "string" };
12
+ case "ZodNumber":
13
+ return { ...base, type: "number" };
14
+ case "ZodBoolean":
15
+ return { ...base, type: "boolean" };
16
+ case "ZodNull":
17
+ return { ...base, type: "null" };
18
+ case "ZodArray": {
19
+ const items = zodToJsonSchema(def.element);
20
+ return { ...base, type: "array", items };
21
+ }
22
+ case "ZodObject": {
23
+ const shapeFn = def.shape;
24
+ const shape = shapeFn();
25
+ const properties = {};
26
+ const required = [];
27
+ for (const [key, value] of Object.entries(shape)) {
28
+ properties[key] = zodToJsonSchema(value);
29
+ const propDef = value._def;
30
+ if (propDef.typeName !== "ZodOptional" && propDef.typeName !== "ZodNullish" && propDef.typeName !== "ZodDefault") {
31
+ required.push(key);
32
+ }
33
+ }
34
+ const result = { ...base, type: "object", properties };
35
+ if (required.length > 0) result.required = required;
36
+ return result;
37
+ }
38
+ case "ZodEnum":
39
+ return { ...base, enum: def.values };
40
+ case "ZodLiteral":
41
+ return { ...base, const: def.value };
42
+ case "ZodUnion": {
43
+ const options = def.options.map((o) => zodToJsonSchema(o));
44
+ return { ...base, anyOf: options };
45
+ }
46
+ case "ZodDiscriminatedUnion": {
47
+ const options = def.options.map((o) => zodToJsonSchema(o));
48
+ return { ...base, anyOf: options };
49
+ }
50
+ case "ZodOptional":
51
+ return zodToJsonSchema(def.innerType);
52
+ case "ZodNullable": {
53
+ const inner = zodToJsonSchema(def.innerType);
54
+ inner.nullable = true;
55
+ return inner;
56
+ }
57
+ case "ZodNullish": {
58
+ const inner = zodToJsonSchema(def.innerType);
59
+ inner.nullable = true;
60
+ return inner;
61
+ }
62
+ case "ZodDefault":
63
+ return zodToJsonSchema(def.innerType);
64
+ case "ZodRecord": {
65
+ const valueSchema = zodToJsonSchema(def.valueType);
66
+ return { ...base, type: "object", additionalProperties: valueSchema };
67
+ }
68
+ case "ZodTuple": {
69
+ const items = def.items.map((o) => zodToJsonSchema(o));
70
+ return { ...base, type: "array", items, minItems: items.length, maxItems: items.length };
71
+ }
72
+ case "ZodEffects": {
73
+ return zodToJsonSchema(def.innerType);
74
+ }
75
+ case "ZodAny":
76
+ return {};
77
+ case "ZodUnknown":
78
+ return {};
79
+ case "ZodVoid":
80
+ return { ...base, type: "null" };
81
+ case "ZodNever":
82
+ return { ...base, not: {} };
83
+ default:
84
+ return { ...base, type: "string" };
85
+ }
86
+ }
87
+ function defineTool(config) {
88
+ const tool = {
89
+ name: config.name,
90
+ description: config.description,
91
+ inputSchema: config.inputSchema,
92
+ isReadOnly: config.isReadOnly ?? false,
93
+ isConcurrencySafe: config.isConcurrencySafe ?? false,
94
+ execute: config.execute,
95
+ toAPI() {
96
+ return {
97
+ name: config.name,
98
+ description: config.description,
99
+ input_schema: zodToJsonSchema(config.inputSchema)
100
+ };
101
+ },
102
+ toString(input) {
103
+ const entries = Object.entries(input).map(([k, v]) => {
104
+ if (typeof v === "string") return `${k}: "${v}"`;
105
+ if (v === void 0) return `${k}: undefined`;
106
+ if (v === null) return `${k}: null`;
107
+ return `${k}: ${String(v)}`;
108
+ }).join(", ");
109
+ return `${config.name}({ ${entries} })`;
110
+ }
111
+ };
112
+ return tool;
113
+ }
114
+ function defineToolFromClass(ctor) {
115
+ const instance = new ctor();
116
+ return {
117
+ name: instance.name,
118
+ description: instance.description,
119
+ inputSchema: instance.inputSchema,
120
+ isReadOnly: instance.isReadOnly,
121
+ isConcurrencySafe: instance.isConcurrencySafe,
122
+ execute: (input, context) => instance.execute(input, context),
123
+ toAPI: () => instance.toAPI(),
124
+ toString: (input) => instance.toString(input)
125
+ };
126
+ }
127
+
128
+ export {
129
+ defineTool,
130
+ defineToolFromClass
131
+ };
132
+ //# sourceMappingURL=chunk-FK7S2S7V.js.map