@avasis-ai/synthcode 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +292 -0
  3. package/dist/chunk-53ZOIXM4.js +624 -0
  4. package/dist/chunk-53ZOIXM4.js.map +1 -0
  5. package/dist/chunk-BWXHO6UJ.js +115 -0
  6. package/dist/chunk-BWXHO6UJ.js.map +1 -0
  7. package/dist/chunk-CARUMOML.js +123 -0
  8. package/dist/chunk-CARUMOML.js.map +1 -0
  9. package/dist/chunk-DGUM43GV.js +11 -0
  10. package/dist/chunk-DGUM43GV.js.map +1 -0
  11. package/dist/chunk-F34HO4RA.js +487 -0
  12. package/dist/chunk-F34HO4RA.js.map +1 -0
  13. package/dist/chunk-FK7S2S7V.js +132 -0
  14. package/dist/chunk-FK7S2S7V.js.map +1 -0
  15. package/dist/chunk-MQ7XP6VT.js +174 -0
  16. package/dist/chunk-MQ7XP6VT.js.map +1 -0
  17. package/dist/chunk-TLPOO6C3.js +176 -0
  18. package/dist/chunk-TLPOO6C3.js.map +1 -0
  19. package/dist/chunk-W6OLZ2OI.js +56 -0
  20. package/dist/chunk-W6OLZ2OI.js.map +1 -0
  21. package/dist/cli/index.cjs +151 -0
  22. package/dist/cli/index.cjs.map +1 -0
  23. package/dist/cli/index.d.cts +1 -0
  24. package/dist/cli/index.d.ts +1 -0
  25. package/dist/cli/index.js +8 -0
  26. package/dist/cli/index.js.map +1 -0
  27. package/dist/cli/run.cjs +128 -0
  28. package/dist/cli/run.cjs.map +1 -0
  29. package/dist/cli/run.d.cts +1 -0
  30. package/dist/cli/run.d.ts +1 -0
  31. package/dist/cli/run.js +126 -0
  32. package/dist/cli/run.js.map +1 -0
  33. package/dist/index-D-K6sx8s.d.cts +8 -0
  34. package/dist/index-D-K6sx8s.d.ts +8 -0
  35. package/dist/index.cjs +2909 -0
  36. package/dist/index.cjs.map +1 -0
  37. package/dist/index.d.cts +274 -0
  38. package/dist/index.d.ts +274 -0
  39. package/dist/index.js +1048 -0
  40. package/dist/index.js.map +1 -0
  41. package/dist/llm/index.cjs +531 -0
  42. package/dist/llm/index.cjs.map +1 -0
  43. package/dist/llm/index.d.cts +70 -0
  44. package/dist/llm/index.d.ts +70 -0
  45. package/dist/llm/index.js +24 -0
  46. package/dist/llm/index.js.map +1 -0
  47. package/dist/mcp/index.cjs +323 -0
  48. package/dist/mcp/index.cjs.map +1 -0
  49. package/dist/mcp/index.d.cts +39 -0
  50. package/dist/mcp/index.d.ts +39 -0
  51. package/dist/mcp/index.js +11 -0
  52. package/dist/mcp/index.js.map +1 -0
  53. package/dist/memory/index.cjs +146 -0
  54. package/dist/memory/index.cjs.map +1 -0
  55. package/dist/memory/index.d.cts +51 -0
  56. package/dist/memory/index.d.ts +51 -0
  57. package/dist/memory/index.js +10 -0
  58. package/dist/memory/index.js.map +1 -0
  59. package/dist/tools/fuzzy-edit.cjs +200 -0
  60. package/dist/tools/fuzzy-edit.cjs.map +1 -0
  61. package/dist/tools/fuzzy-edit.d.cts +9 -0
  62. package/dist/tools/fuzzy-edit.d.ts +9 -0
  63. package/dist/tools/fuzzy-edit.js +12 -0
  64. package/dist/tools/fuzzy-edit.js.map +1 -0
  65. package/dist/tools/index.cjs +1032 -0
  66. package/dist/tools/index.cjs.map +1 -0
  67. package/dist/tools/index.d.cts +4 -0
  68. package/dist/tools/index.d.ts +4 -0
  69. package/dist/tools/index.js +39 -0
  70. package/dist/tools/index.js.map +1 -0
  71. package/dist/types-C11cw5ZD.d.cts +177 -0
  72. package/dist/types-C11cw5ZD.d.ts +177 -0
  73. package/dist/utils-TF4TBXQJ.js +10 -0
  74. package/dist/utils-TF4TBXQJ.js.map +1 -0
  75. package/dist/web-fetch-B42QzYD2.d.cts +85 -0
  76. package/dist/web-fetch-EDdhxmEf.d.ts +85 -0
  77. package/package.json +134 -0
@@ -0,0 +1,531 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/llm/index.ts
31
+ var llm_exports = {};
32
+ __export(llm_exports, {
33
+ AnthropicProvider: () => AnthropicProvider,
34
+ BaseProvider: () => BaseProvider,
35
+ OllamaProvider: () => OllamaProvider,
36
+ OpenAIProvider: () => OpenAIProvider,
37
+ RetryableError: () => RetryableError,
38
+ anthropic: () => anthropic,
39
+ createProvider: () => createProvider,
40
+ ollama: () => ollama,
41
+ openai: () => openai
42
+ });
43
+ module.exports = __toCommonJS(llm_exports);
44
+
45
+ // src/llm/provider.ts
46
+ var RetryableError = class extends Error {
47
+ cause;
48
+ constructor(message, cause) {
49
+ super(message);
50
+ this.name = "RetryableError";
51
+ this.cause = cause;
52
+ }
53
+ };
54
+ var BaseProvider = class {
55
+ model;
56
+ maxOutputTokens;
57
+ temperature;
58
+ constructor(config) {
59
+ this.model = config.model;
60
+ this.maxOutputTokens = config.maxOutputTokens;
61
+ this.temperature = config.temperature;
62
+ }
63
+ mapStopReason(reason) {
64
+ switch (reason) {
65
+ case "end_turn":
66
+ case "stop":
67
+ return "end_turn";
68
+ case "tool_use":
69
+ case "tool_calls":
70
+ return "tool_use";
71
+ case "max_tokens":
72
+ case "length":
73
+ return "max_tokens";
74
+ case "stop_sequence":
75
+ return "stop_sequence";
76
+ default:
77
+ return "end_turn";
78
+ }
79
+ }
80
+ };
81
+
82
+ // src/llm/anthropic.ts
83
+ var AnthropicProvider = class extends BaseProvider {
84
+ apiKey;
85
+ baseURL;
86
+ dangerouslySkipAuth;
87
+ enableCaching;
88
+ client = null;
89
+ constructor(config) {
90
+ super(config);
91
+ this.apiKey = config.apiKey;
92
+ this.baseURL = config.baseURL;
93
+ this.dangerouslySkipAuth = config.dangerouslySkipAuth;
94
+ this.enableCaching = config.enableCaching ?? false;
95
+ }
96
+ async chat(request) {
97
+ const AnthropicSDK = (await import("@anthropic-ai/sdk")).default;
98
+ if (!this.client) {
99
+ const opts = { apiKey: this.apiKey };
100
+ if (this.baseURL) opts.baseURL = this.baseURL;
101
+ if (this.dangerouslySkipAuth) opts.dangerouslySkipAuth = true;
102
+ this.client = new AnthropicSDK(opts);
103
+ }
104
+ const messages = this.mapMessages(request.messages);
105
+ const tools = this.mapTools(request.tools);
106
+ const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens ?? 4096;
107
+ const temperature = request.temperature ?? this.temperature;
108
+ const body = {
109
+ model: this.model,
110
+ max_tokens: maxTokens,
111
+ messages
112
+ };
113
+ if (request.systemPrompt) {
114
+ if (this.enableCaching) {
115
+ body.system = [
116
+ { type: "text", text: request.systemPrompt, cache_control: { type: "ephemeral" } }
117
+ ];
118
+ } else {
119
+ body.system = request.systemPrompt;
120
+ }
121
+ }
122
+ if (tools) body.tools = tools;
123
+ if (temperature !== void 0) body.temperature = temperature;
124
+ const reqOptions = {};
125
+ if (request.abortSignal) reqOptions.signal = request.abortSignal;
126
+ let response;
127
+ try {
128
+ response = await this.client.messages.create(body, reqOptions);
129
+ } catch (err) {
130
+ const status = err?.status;
131
+ const code = err?.code;
132
+ const isNetwork = err instanceof TypeError || typeof code === "string" && ["ECONNRESET", "ECONNREFUSED", "ENOTFOUND", "ETIMEDOUT"].includes(code);
133
+ if (status === 429 || status === 529 || isNetwork) {
134
+ throw new RetryableError(
135
+ err instanceof Error ? err.message : String(err),
136
+ err instanceof Error ? err : void 0
137
+ );
138
+ }
139
+ throw new Error(
140
+ `Anthropic API error: ${err instanceof Error ? err.message : String(err)}`,
141
+ err instanceof Error ? { cause: err } : void 0
142
+ );
143
+ }
144
+ const content = [];
145
+ for (const block of response.content) {
146
+ if (block.type === "text") {
147
+ content.push({ type: "text", text: block.text });
148
+ } else if (block.type === "tool_use") {
149
+ content.push({ type: "tool_use", id: block.id, name: block.name, input: block.input });
150
+ } else if (block.type === "thinking") {
151
+ content.push({ type: "thinking", thinking: block.thinking });
152
+ }
153
+ }
154
+ return {
155
+ content,
156
+ usage: this.mapUsage(response.usage),
157
+ stopReason: this.mapStopReason(response.stop_reason)
158
+ };
159
+ }
160
+ mapMessages(messages) {
161
+ return messages.map((msg) => {
162
+ if (msg.role === "tool") {
163
+ const textContent = typeof msg.content === "string" ? msg.content : msg.content.filter((b) => b.type === "text").map((b) => b.text).join("\n");
164
+ return {
165
+ role: "user",
166
+ content: [
167
+ {
168
+ type: "tool_result",
169
+ tool_use_id: msg.tool_use_id ?? "",
170
+ content: textContent,
171
+ ...msg.is_error ? { is_error: true } : {}
172
+ }
173
+ ]
174
+ };
175
+ }
176
+ if (msg.role === "assistant") {
177
+ const content = typeof msg.content === "string" ? [{ type: "text", text: msg.content }] : msg.content.map((block) => this.mapOutgoingBlock(block));
178
+ return { role: "assistant", content };
179
+ }
180
+ if (typeof msg.content === "string") {
181
+ return { role: "user", content: msg.content };
182
+ }
183
+ return {
184
+ role: "user",
185
+ content: msg.content.map((block) => this.mapOutgoingBlock(block))
186
+ };
187
+ });
188
+ }
189
+ mapTools(tools) {
190
+ if (!tools || tools.length === 0) return void 0;
191
+ return tools.map((tool) => ({
192
+ name: tool.name,
193
+ description: tool.description,
194
+ input_schema: tool.input_schema,
195
+ type: "tool"
196
+ }));
197
+ }
198
+ mapUsage(providerUsage) {
199
+ const u = providerUsage;
200
+ return {
201
+ inputTokens: u.input_tokens,
202
+ outputTokens: u.output_tokens,
203
+ cacheReadTokens: u.cache_read_input_tokens,
204
+ cacheWriteTokens: u.cache_creation_input_tokens
205
+ };
206
+ }
207
+ mapOutgoingBlock(block) {
208
+ switch (block.type) {
209
+ case "text":
210
+ return { type: "text", text: block.text };
211
+ case "tool_use":
212
+ return { type: "tool_use", id: block.id, name: block.name, input: block.input };
213
+ case "thinking":
214
+ return { type: "thinking", thinking: block.thinking };
215
+ default:
216
+ return { type: "text", text: "" };
217
+ }
218
+ }
219
+ };
220
+
221
+ // src/llm/openai.ts
222
+ var OpenAIProvider = class extends BaseProvider {
223
+ apiKey;
224
+ baseURL;
225
+ organization;
226
+ client = null;
227
+ constructor(config) {
228
+ super(config);
229
+ this.apiKey = config.apiKey;
230
+ this.baseURL = config.baseURL;
231
+ this.organization = config.organization;
232
+ }
233
+ async chat(request) {
234
+ const OpenAI = (await import("openai")).default;
235
+ if (!this.client) {
236
+ const opts = { apiKey: this.apiKey };
237
+ if (this.baseURL) opts.baseURL = this.baseURL;
238
+ if (this.organization) opts.organization = this.organization;
239
+ this.client = new OpenAI(opts);
240
+ }
241
+ const mappedMessages = this.mapMessages(request.messages);
242
+ const messages = [];
243
+ if (request.systemPrompt) {
244
+ messages.push({ role: "system", content: request.systemPrompt });
245
+ }
246
+ messages.push(...mappedMessages);
247
+ const tools = this.mapTools(request.tools);
248
+ const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens;
249
+ const temperature = request.temperature ?? this.temperature;
250
+ const body = {
251
+ model: this.model,
252
+ messages,
253
+ stream: false
254
+ };
255
+ if (tools) body.tools = tools;
256
+ if (maxTokens !== void 0) body.max_tokens = maxTokens;
257
+ if (temperature !== void 0) body.temperature = temperature;
258
+ const reqOptions = {};
259
+ if (request.abortSignal) reqOptions.signal = request.abortSignal;
260
+ let response;
261
+ try {
262
+ response = await this.client.chat.completions.create(body, reqOptions);
263
+ } catch (err) {
264
+ const status = err?.status;
265
+ const code = err?.code;
266
+ const isNetwork = err instanceof TypeError || typeof code === "string" && ["ECONNRESET", "ECONNREFUSED", "ENOTFOUND", "ETIMEDOUT"].includes(code);
267
+ if (status === 429 || isNetwork) {
268
+ throw new RetryableError(
269
+ err instanceof Error ? err.message : String(err),
270
+ err instanceof Error ? err : void 0
271
+ );
272
+ }
273
+ throw new Error(
274
+ `OpenAI API error: ${err instanceof Error ? err.message : String(err)}`,
275
+ err instanceof Error ? { cause: err } : void 0
276
+ );
277
+ }
278
+ const choice = response.choices[0];
279
+ const content = [];
280
+ if (choice.message.content) {
281
+ content.push({ type: "text", text: choice.message.content });
282
+ }
283
+ if (choice.message.tool_calls) {
284
+ for (const tc of choice.message.tool_calls) {
285
+ let input = {};
286
+ try {
287
+ input = JSON.parse(tc.function.arguments);
288
+ } catch {
289
+ }
290
+ content.push({
291
+ type: "tool_use",
292
+ id: tc.id,
293
+ name: tc.function.name,
294
+ input
295
+ });
296
+ }
297
+ }
298
+ return {
299
+ content,
300
+ usage: this.mapUsage(response.usage),
301
+ stopReason: this.mapStopReason(choice.finish_reason)
302
+ };
303
+ }
304
+ mapMessages(messages) {
305
+ return messages.map((msg) => {
306
+ if (msg.role === "tool") {
307
+ return {
308
+ role: "tool",
309
+ tool_call_id: msg.tool_use_id ?? "",
310
+ content: typeof msg.content === "string" ? msg.content : msg.content.filter((b) => b.type === "text").map((b) => b.text).join("\n")
311
+ };
312
+ }
313
+ if (msg.role === "assistant") {
314
+ if (typeof msg.content === "string") {
315
+ return { role: "assistant", content: msg.content };
316
+ }
317
+ const textParts = msg.content.filter((b) => b.type === "text").map((b) => b.text).join("");
318
+ const toolCalls = msg.content.filter((b) => b.type === "tool_use").map((b) => ({
319
+ id: b.id,
320
+ type: "function",
321
+ function: { name: b.name, arguments: JSON.stringify(b.input) }
322
+ }));
323
+ return {
324
+ role: "assistant",
325
+ content: textParts || null,
326
+ ...toolCalls.length > 0 ? { tool_calls: toolCalls } : {}
327
+ };
328
+ }
329
+ return {
330
+ role: "user",
331
+ content: typeof msg.content === "string" ? msg.content : msg.content.filter((b) => b.type === "text").map((b) => b.text).join("\n")
332
+ };
333
+ });
334
+ }
335
+ mapTools(tools) {
336
+ if (!tools || tools.length === 0) return void 0;
337
+ return tools.map((tool) => ({
338
+ type: "function",
339
+ function: {
340
+ name: tool.name,
341
+ description: tool.description,
342
+ parameters: tool.input_schema
343
+ }
344
+ }));
345
+ }
346
+ mapUsage(providerUsage) {
347
+ const u = providerUsage;
348
+ return {
349
+ inputTokens: u.prompt_tokens,
350
+ outputTokens: u.completion_tokens,
351
+ cacheReadTokens: u.prompt_tokens_details?.cached_tokens
352
+ };
353
+ }
354
+ };
355
+
356
+ // src/llm/ollama.ts
357
+ var OllamaProvider = class {
358
+ model;
359
+ baseURL;
360
+ constructor(config) {
361
+ this.model = config.model;
362
+ this.baseURL = config.baseURL ?? "http://localhost:11434/v1";
363
+ }
364
+ async chat(request) {
365
+ const messages = [];
366
+ if (request.systemPrompt) {
367
+ messages.push({ role: "system", content: request.systemPrompt });
368
+ }
369
+ for (const m of request.messages) {
370
+ if (m.role === "tool") {
371
+ messages.push({
372
+ role: "tool",
373
+ tool_call_id: m.tool_use_id,
374
+ content: m.content
375
+ });
376
+ continue;
377
+ }
378
+ if (m.role === "assistant" && Array.isArray(m.content)) {
379
+ const textParts = m.content.filter((b) => b.type === "text");
380
+ const toolParts = m.content.filter((b) => b.type === "tool_use");
381
+ const msg = {};
382
+ if (textParts.length > 0) {
383
+ msg.content = textParts.map((p) => p.text).join("");
384
+ }
385
+ if (toolParts.length > 0) {
386
+ msg.tool_calls = toolParts.map((b) => {
387
+ const tb = b;
388
+ return {
389
+ id: tb.id,
390
+ type: "function",
391
+ function: { name: tb.name, arguments: JSON.stringify(tb.input) }
392
+ };
393
+ });
394
+ }
395
+ msg.role = "assistant";
396
+ messages.push(msg);
397
+ continue;
398
+ }
399
+ messages.push({ role: m.role, content: m.content });
400
+ }
401
+ const body = {
402
+ model: this.model,
403
+ messages,
404
+ stream: false
405
+ };
406
+ if (request.maxOutputTokens) {
407
+ body.max_tokens = request.maxOutputTokens;
408
+ }
409
+ if (request.tools?.length) {
410
+ body.tools = request.tools.map((t) => ({
411
+ type: "function",
412
+ function: {
413
+ name: t.name,
414
+ description: t.description,
415
+ parameters: t.input_schema
416
+ }
417
+ }));
418
+ }
419
+ let response;
420
+ try {
421
+ response = await fetch(`${this.baseURL}/chat/completions`, {
422
+ method: "POST",
423
+ headers: { "Content-Type": "application/json" },
424
+ body: JSON.stringify(body),
425
+ signal: request.abortSignal
426
+ });
427
+ } catch (err) {
428
+ if (err instanceof RetryableError) throw err;
429
+ throw new RetryableError(
430
+ `Ollama connection failed: ${err instanceof Error ? err.message : String(err)}`
431
+ );
432
+ }
433
+ if (!response.ok) {
434
+ const text = await response.text();
435
+ if (response.status === 429 || response.status === 503 || response.status === 529) {
436
+ throw new RetryableError(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);
437
+ }
438
+ throw new Error(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);
439
+ }
440
+ const data = await response.json();
441
+ const choice = data.choices?.[0];
442
+ if (!choice) {
443
+ throw new Error("Ollama returned no choices");
444
+ }
445
+ const content = [];
446
+ let stopReason = "end_turn";
447
+ if (choice.message?.content) {
448
+ let text = choice.message.content;
449
+ text = text.replace(/<think[^>]*>[\s\S]*?<\/think>/gi, "").trim();
450
+ text = text.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "").trim();
451
+ text = text.replace(/\[Thinking[^\]]*\]/gi, "").trim();
452
+ if (text.length > 0) {
453
+ content.push({ type: "text", text });
454
+ }
455
+ }
456
+ if (choice.message?.tool_calls?.length) {
457
+ stopReason = "tool_use";
458
+ for (const tc of choice.message.tool_calls) {
459
+ let input;
460
+ try {
461
+ input = JSON.parse(tc.function.arguments);
462
+ } catch {
463
+ input = {};
464
+ }
465
+ content.push({
466
+ type: "tool_use",
467
+ id: tc.id,
468
+ name: tc.function.name,
469
+ input
470
+ });
471
+ }
472
+ }
473
+ return {
474
+ content,
475
+ stopReason,
476
+ usage: {
477
+ inputTokens: data.usage?.prompt_tokens ?? 0,
478
+ outputTokens: data.usage?.completion_tokens ?? 0,
479
+ cacheReadTokens: 0,
480
+ cacheWriteTokens: 0
481
+ }
482
+ };
483
+ }
484
+ };
485
+
486
+ // src/llm/index.ts
487
+ function anthropic(config) {
488
+ return new AnthropicProvider(config);
489
+ }
490
+ function openai(config) {
491
+ return new OpenAIProvider(config);
492
+ }
493
+ function ollama(config) {
494
+ return new OllamaProvider(config);
495
+ }
496
+ var CustomProvider = class {
497
+ model;
498
+ chatFn;
499
+ constructor(config) {
500
+ this.model = config.model;
501
+ this.chatFn = config.chat;
502
+ }
503
+ chat(request) {
504
+ return this.chatFn(request);
505
+ }
506
+ };
507
+ function createProvider(config) {
508
+ switch (config.provider) {
509
+ case "anthropic":
510
+ return new AnthropicProvider(config);
511
+ case "openai":
512
+ return new OpenAIProvider(config);
513
+ case "ollama":
514
+ return new OllamaProvider(config);
515
+ case "custom":
516
+ return new CustomProvider(config);
517
+ }
518
+ }
519
+ // Annotate the CommonJS export names for ESM import in node:
520
+ 0 && (module.exports = {
521
+ AnthropicProvider,
522
+ BaseProvider,
523
+ OllamaProvider,
524
+ OpenAIProvider,
525
+ RetryableError,
526
+ anthropic,
527
+ createProvider,
528
+ ollama,
529
+ openai
530
+ });
531
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/llm/index.ts","../../src/llm/provider.ts","../../src/llm/anthropic.ts","../../src/llm/openai.ts","../../src/llm/ollama.ts"],"sourcesContent":["import type { ModelResponse } from '../types.js';\nimport { BaseProvider, RetryableError } from './provider.js';\nimport type { Provider, ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nimport { AnthropicProvider } from './anthropic.js';\nimport type { AnthropicProviderConfig } from './anthropic.js';\nimport { OpenAIProvider } from './openai.js';\nimport type { OpenAIProviderConfig } from './openai.js';\nimport { OllamaProvider } from './ollama.js';\nimport type { OllamaProviderConfig } from './ollama.js';\n\nexport { BaseProvider, RetryableError } from './provider.js';\nexport type { Provider, ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nexport { AnthropicProvider } from './anthropic.js';\nexport type { AnthropicProviderConfig } from './anthropic.js';\nexport { OpenAIProvider } from './openai.js';\nexport type { OpenAIProviderConfig } from './openai.js';\nexport { OllamaProvider } from './ollama.js';\nexport type { OllamaProviderConfig } from './ollama.js';\n\nexport function anthropic(config: AnthropicProviderConfig): AnthropicProvider {\n return new AnthropicProvider(config);\n}\n\nexport function openai(config: OpenAIProviderConfig): OpenAIProvider {\n return new OpenAIProvider(config);\n}\n\nexport function ollama(config: OllamaProviderConfig): OllamaProvider {\n return new OllamaProvider(config);\n}\n\nexport interface CustomProviderConfig {\n provider: 'custom';\n model: string;\n chat: (request: ChatRequest) => Promise<ModelResponse>;\n}\n\nclass CustomProvider implements Provider {\n readonly model: string;\n private readonly chatFn: (request: ChatRequest) => Promise<ModelResponse>;\n\n constructor(config: CustomProviderConfig) {\n this.model = config.model;\n this.chatFn = config.chat;\n }\n\n chat(request: ChatRequest): Promise<ModelResponse> {\n return this.chatFn(request);\n }\n}\n\nexport type OllamaProviderConfigWithProvider = OllamaProviderConfig & { provider: 'ollama' };\n\nexport function createProvider(\n config:\n | (AnthropicProviderConfig & { provider: 'anthropic' })\n | (OpenAIProviderConfig & { provider: 'openai' })\n | OllamaProviderConfigWithProvider\n | CustomProviderConfig,\n): Provider {\n switch (config.provider) {\n case 'anthropic':\n return new AnthropicProvider(config);\n case 'openai':\n return new OpenAIProvider(config);\n case 'ollama':\n return new OllamaProvider(config);\n case 'custom':\n return new CustomProvider(config);\n }\n}\n","import type { ModelResponse, ContentBlock, TokenUsage, ProviderConfig } from '../types.js';\nimport type { APIToolDefinition } from '../tools/tool.js';\n\nexport interface Provider {\n readonly model: string;\n chat(request: ChatRequest): Promise<ModelResponse>;\n}\n\nexport interface ChatRequest {\n messages: ChatMessage[];\n tools?: APIToolDefinition[];\n systemPrompt?: string;\n maxOutputTokens?: number;\n temperature?: number;\n abortSignal?: AbortSignal;\n}\n\nexport interface ChatMessage {\n role: \"user\" | \"assistant\" | \"tool\";\n content: string | ContentBlock[];\n tool_use_id?: string;\n is_error?: boolean;\n}\n\nexport type { APIToolDefinition } from '../tools/tool.js';\n\nexport class RetryableError extends Error {\n readonly cause?: Error;\n\n constructor(message: string, cause?: Error) {\n super(message);\n this.name = 'RetryableError';\n this.cause = cause;\n }\n}\n\nexport abstract class BaseProvider implements Provider {\n readonly model: string;\n readonly maxOutputTokens?: number;\n readonly temperature?: number;\n\n constructor(config: ProviderConfig) {\n this.model = config.model;\n this.maxOutputTokens = config.maxOutputTokens;\n this.temperature = config.temperature;\n }\n\n abstract chat(request: ChatRequest): Promise<ModelResponse>;\n\n protected abstract mapMessages(messages: ChatMessage[]): unknown[];\n\n protected abstract mapTools(tools?: APIToolDefinition[]): unknown[] | undefined;\n\n protected abstract mapUsage(providerUsage: unknown): TokenUsage;\n\n protected mapStopReason(reason: string): ModelResponse[\"stopReason\"] {\n switch (reason) {\n case 'end_turn':\n case 'stop':\n return 'end_turn';\n case 'tool_use':\n case 'tool_calls':\n return 'tool_use';\n case 'max_tokens':\n case 'length':\n return 'max_tokens';\n case 'stop_sequence':\n return 'stop_sequence';\n default:\n return 'end_turn';\n }\n }\n}\n","import type { ModelResponse, ContentBlock, TextBlock, ToolUseBlock, ThinkingBlock, TokenUsage, ProviderConfig } from '../types.js';\nimport type { ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nimport { BaseProvider, RetryableError } from './provider.js';\n\nexport interface AnthropicProviderConfig extends ProviderConfig {\n apiKey: string;\n baseURL?: string;\n dangerouslySkipAuth?: boolean;\n enableCaching?: boolean;\n}\n\nexport class AnthropicProvider extends BaseProvider {\n private readonly apiKey: string;\n private readonly baseURL?: string;\n private readonly dangerouslySkipAuth?: boolean;\n private readonly enableCaching: boolean;\n private client: any = null;\n\n constructor(config: AnthropicProviderConfig) {\n super(config);\n this.apiKey = config.apiKey;\n this.baseURL = config.baseURL;\n this.dangerouslySkipAuth = config.dangerouslySkipAuth;\n this.enableCaching = config.enableCaching ?? false;\n }\n\n async chat(request: ChatRequest): Promise<ModelResponse> {\n const AnthropicSDK = (await import('@anthropic-ai/sdk')).default;\n\n if (!this.client) {\n const opts: Record<string, unknown> = { apiKey: this.apiKey };\n if (this.baseURL) opts.baseURL = this.baseURL;\n if (this.dangerouslySkipAuth) opts.dangerouslySkipAuth = true;\n this.client = new AnthropicSDK(opts);\n }\n\n const messages = this.mapMessages(request.messages);\n const tools = this.mapTools(request.tools);\n const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens ?? 4096;\n const temperature = request.temperature ?? this.temperature;\n\n const body: Record<string, unknown> = {\n model: this.model,\n max_tokens: maxTokens,\n messages,\n };\n\n if (request.systemPrompt) {\n if (this.enableCaching) {\n body.system = [\n { type: \"text\", text: request.systemPrompt, cache_control: { type: \"ephemeral\" } },\n ];\n } else {\n body.system = request.systemPrompt;\n }\n }\n if (tools) body.tools = tools;\n if (temperature !== undefined) body.temperature = temperature;\n\n const reqOptions: Record<string, unknown> = {};\n if (request.abortSignal) reqOptions.signal = request.abortSignal;\n\n let response: any;\n try {\n response = await this.client.messages.create(body, reqOptions);\n } catch (err: unknown) {\n const status = (err as any)?.status;\n const code = (err as any)?.code;\n const isNetwork =\n err instanceof TypeError ||\n (typeof code === 'string' &&\n ['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ETIMEDOUT'].includes(code));\n\n if (status === 429 || status === 529 || isNetwork) {\n throw new RetryableError(\n err instanceof Error ? err.message : String(err),\n err instanceof Error ? err : undefined,\n );\n }\n throw new Error(\n `Anthropic API error: ${err instanceof Error ? err.message : String(err)}`,\n err instanceof Error ? { cause: err } : undefined,\n );\n }\n\n const content: ContentBlock[] = [];\n for (const block of response.content) {\n if (block.type === 'text') {\n content.push({ type: 'text', text: block.text } as TextBlock);\n } else if (block.type === 'tool_use') {\n content.push({ type: 'tool_use', id: block.id, name: block.name, input: block.input } as ToolUseBlock);\n } else if (block.type === 'thinking') {\n content.push({ type: 'thinking', thinking: block.thinking } as ThinkingBlock);\n }\n }\n\n return {\n content,\n usage: this.mapUsage(response.usage),\n stopReason: this.mapStopReason(response.stop_reason),\n };\n }\n\n protected mapMessages(messages: ChatMessage[]): unknown[] {\n return messages.map((msg) => {\n if (msg.role === 'tool') {\n const textContent =\n typeof msg.content === 'string'\n ? msg.content\n : msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('\\n');\n\n return {\n role: 'user',\n content: [\n {\n type: 'tool_result',\n tool_use_id: msg.tool_use_id ?? '',\n content: textContent,\n ...(msg.is_error ? { is_error: true } : {}),\n },\n ],\n };\n }\n\n if (msg.role === 'assistant') {\n const content =\n typeof msg.content === 'string'\n ? [{ type: 'text', text: msg.content }]\n : msg.content.map((block) => this.mapOutgoingBlock(block));\n return { role: 'assistant', content };\n }\n\n if (typeof msg.content === 'string') {\n return { role: 'user', content: msg.content };\n }\n\n return {\n role: 'user',\n content: msg.content.map((block) => this.mapOutgoingBlock(block)),\n };\n });\n }\n\n protected mapTools(tools?: APIToolDefinition[]): unknown[] | undefined {\n if (!tools || tools.length === 0) return undefined;\n return tools.map((tool) => ({\n name: tool.name,\n description: tool.description,\n input_schema: tool.input_schema,\n type: 'tool',\n }));\n }\n\n protected mapUsage(providerUsage: unknown): TokenUsage {\n const u = providerUsage as {\n input_tokens: number;\n output_tokens: number;\n cache_read_input_tokens?: number;\n cache_creation_input_tokens?: number;\n };\n return {\n inputTokens: u.input_tokens,\n outputTokens: u.output_tokens,\n cacheReadTokens: u.cache_read_input_tokens,\n cacheWriteTokens: u.cache_creation_input_tokens,\n };\n }\n\n private mapOutgoingBlock(block: ContentBlock): Record<string, unknown> {\n switch (block.type) {\n case 'text':\n return { type: 'text', text: block.text };\n case 'tool_use':\n return { type: 'tool_use', id: block.id, name: block.name, input: block.input };\n case 'thinking':\n return { type: 'thinking', thinking: block.thinking };\n default:\n return { type: 'text', text: '' };\n }\n }\n}\n","import type { ModelResponse, ContentBlock, TextBlock, ToolUseBlock, TokenUsage, ProviderConfig } from '../types.js';\nimport type { ChatRequest, ChatMessage, APIToolDefinition } from './provider.js';\nimport { BaseProvider, RetryableError } from './provider.js';\n\nexport interface OpenAIProviderConfig extends ProviderConfig {\n apiKey: string;\n baseURL?: string;\n organization?: string;\n}\n\nexport class OpenAIProvider extends BaseProvider {\n private readonly apiKey: string;\n private readonly baseURL?: string;\n private readonly organization?: string;\n private client: any = null;\n\n constructor(config: OpenAIProviderConfig) {\n super(config);\n this.apiKey = config.apiKey;\n this.baseURL = config.baseURL;\n this.organization = config.organization;\n }\n\n async chat(request: ChatRequest): Promise<ModelResponse> {\n const OpenAI = (await import('openai')).default;\n\n if (!this.client) {\n const opts: Record<string, unknown> = { apiKey: this.apiKey };\n if (this.baseURL) opts.baseURL = this.baseURL;\n if (this.organization) opts.organization = this.organization;\n this.client = new OpenAI(opts);\n }\n\n const mappedMessages = this.mapMessages(request.messages);\n const messages: unknown[] = [];\n if (request.systemPrompt) {\n messages.push({ role: 'system', content: request.systemPrompt });\n }\n messages.push(...mappedMessages);\n\n const tools = this.mapTools(request.tools);\n const maxTokens = request.maxOutputTokens ?? this.maxOutputTokens;\n const temperature = request.temperature ?? this.temperature;\n\n const body: Record<string, unknown> = {\n model: this.model,\n messages,\n stream: false,\n };\n\n if (tools) body.tools = tools;\n if (maxTokens !== undefined) body.max_tokens = maxTokens;\n if (temperature !== undefined) body.temperature = temperature;\n\n const reqOptions: Record<string, unknown> = {};\n if (request.abortSignal) reqOptions.signal = request.abortSignal;\n\n let response: any;\n try {\n response = await this.client.chat.completions.create(body, reqOptions);\n } catch (err: unknown) {\n const status = (err as any)?.status;\n const code = (err as any)?.code;\n const isNetwork =\n err instanceof TypeError ||\n (typeof code === 'string' &&\n ['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ETIMEDOUT'].includes(code));\n\n if (status === 429 || isNetwork) {\n throw new RetryableError(\n err instanceof Error ? err.message : String(err),\n err instanceof Error ? err : undefined,\n );\n }\n throw new Error(\n `OpenAI API error: ${err instanceof Error ? err.message : String(err)}`,\n err instanceof Error ? { cause: err } : undefined,\n );\n }\n\n const choice = response.choices[0];\n const content: ContentBlock[] = [];\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content } as TextBlock);\n }\n\n if (choice.message.tool_calls) {\n for (const tc of choice.message.tool_calls) {\n let input: Record<string, unknown> = {};\n try {\n input = JSON.parse(tc.function.arguments);\n } catch {}\n content.push({\n type: 'tool_use',\n id: tc.id,\n name: tc.function.name,\n input,\n } as ToolUseBlock);\n }\n }\n\n return {\n content,\n usage: this.mapUsage(response.usage),\n stopReason: this.mapStopReason(choice.finish_reason),\n };\n }\n\n protected mapMessages(messages: ChatMessage[]): unknown[] {\n return messages.map((msg) => {\n if (msg.role === 'tool') {\n return {\n role: 'tool',\n tool_call_id: msg.tool_use_id ?? '',\n content:\n typeof msg.content === 'string'\n ? msg.content\n : msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('\\n'),\n };\n }\n\n if (msg.role === 'assistant') {\n if (typeof msg.content === 'string') {\n return { role: 'assistant', content: msg.content };\n }\n\n const textParts = msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('');\n const toolCalls = msg.content\n .filter((b): b is ToolUseBlock => b.type === 'tool_use')\n .map((b) => ({\n id: b.id,\n type: 'function' as const,\n function: { name: b.name, arguments: JSON.stringify(b.input) },\n }));\n\n return {\n role: 'assistant',\n content: textParts || null,\n ...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {}),\n };\n }\n\n return {\n role: 'user',\n content:\n typeof msg.content === 'string'\n ? msg.content\n : msg.content\n .filter((b): b is TextBlock => b.type === 'text')\n .map((b) => b.text)\n .join('\\n'),\n };\n });\n }\n\n protected mapTools(tools?: APIToolDefinition[]): unknown[] | undefined {\n if (!tools || tools.length === 0) return undefined;\n return tools.map((tool) => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.input_schema,\n },\n }));\n }\n\n protected mapUsage(providerUsage: unknown): TokenUsage {\n const u = providerUsage as {\n prompt_tokens: number;\n completion_tokens: number;\n prompt_tokens_details?: { cached_tokens?: number };\n };\n return {\n inputTokens: u.prompt_tokens,\n outputTokens: u.completion_tokens,\n cacheReadTokens: u.prompt_tokens_details?.cached_tokens,\n };\n }\n}\n","import type { Provider, ChatRequest } from \"./provider.js\";\nimport { RetryableError } from \"./provider.js\";\nimport type { ModelResponse, ContentBlock } from \"../types.js\";\nimport type { APIToolDefinition } from \"../tools/tool.js\";\n\nexport interface OllamaProviderConfig {\n model: string;\n baseURL?: string;\n}\n\nexport class OllamaProvider implements Provider {\n readonly model: string;\n private readonly baseURL: string;\n\n constructor(config: OllamaProviderConfig) {\n this.model = config.model;\n this.baseURL = config.baseURL ?? \"http://localhost:11434/v1\";\n }\n\n async chat(request: ChatRequest): Promise<ModelResponse> {\n const messages: any[] = [];\n\n if (request.systemPrompt) {\n messages.push({ role: \"system\", content: request.systemPrompt });\n }\n\n for (const m of request.messages) {\n if (m.role === \"tool\") {\n messages.push({\n role: \"tool\" as const,\n tool_call_id: m.tool_use_id,\n content: m.content,\n });\n continue;\n }\n if (m.role === \"assistant\" && Array.isArray(m.content)) {\n const textParts = (m.content as ContentBlock[]).filter(b => b.type === \"text\");\n const toolParts = (m.content as ContentBlock[]).filter(b => b.type === \"tool_use\");\n\n const msg: any = {};\n if (textParts.length > 0) {\n msg.content = textParts.map(p => (p as { text: string }).text).join(\"\");\n }\n if (toolParts.length > 0) {\n msg.tool_calls = toolParts.map(b => {\n const tb = b as { type: \"tool_use\"; id: string; name: string; input: Record<string, unknown> };\n return {\n id: tb.id,\n type: \"function\",\n function: { name: tb.name, arguments: JSON.stringify(tb.input) },\n };\n });\n }\n msg.role = \"assistant\";\n messages.push(msg);\n continue;\n }\n messages.push({ role: m.role, content: m.content });\n }\n\n const body: any = {\n model: this.model,\n messages,\n stream: false,\n };\n\n if (request.maxOutputTokens) {\n body.max_tokens = request.maxOutputTokens;\n }\n\n if (request.tools?.length) {\n body.tools = request.tools.map(t => ({\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: t.input_schema,\n },\n }));\n }\n\n let response;\n try {\n response = await fetch(`${this.baseURL}/chat/completions`, {\n method: \"POST\",\n headers: { \"Content-Type\": \"application/json\" },\n body: JSON.stringify(body),\n signal: request.abortSignal,\n });\n } catch (err) {\n if (err instanceof RetryableError) throw err;\n throw new RetryableError(\n `Ollama connection failed: ${err instanceof Error ? err.message : String(err)}`,\n );\n }\n\n if (!response.ok) {\n const text = await response.text();\n if (response.status === 429 || response.status === 503 || response.status === 529) {\n throw new RetryableError(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);\n }\n throw new Error(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);\n }\n\n const data = await response.json();\n const choice = data.choices?.[0];\n if (!choice) {\n throw new Error(\"Ollama returned no choices\");\n }\n\n const content: any[] = [];\n let stopReason: ModelResponse[\"stopReason\"] = \"end_turn\";\n\n if (choice.message?.content) {\n let text = choice.message.content;\n text = text.replace(/<think[^>]*>[\\s\\S]*?<\\/think>/gi, \"\").trim();\n text = text.replace(/<thinking>[\\s\\S]*?<\\/thinking>/gi, \"\").trim();\n text = text.replace(/\\[Thinking[^\\]]*\\]/gi, \"\").trim();\n if (text.length > 0) {\n content.push({ type: \"text\" as const, text });\n }\n }\n\n if (choice.message?.tool_calls?.length) {\n stopReason = \"tool_use\";\n for (const tc of choice.message.tool_calls) {\n let input: Record<string, unknown>;\n try {\n input = JSON.parse(tc.function.arguments);\n } catch {\n input = {};\n }\n content.push({\n type: \"tool_use\" as const,\n id: tc.id,\n name: tc.function.name,\n input,\n });\n }\n }\n\n return {\n content,\n stopReason,\n usage: {\n inputTokens: data.usage?.prompt_tokens ?? 0,\n outputTokens: data.usage?.completion_tokens ?? 0,\n cacheReadTokens: 0,\n cacheWriteTokens: 0,\n },\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC0BO,IAAM,iBAAN,cAA6B,MAAM;AAAA,EAC/B;AAAA,EAET,YAAY,SAAiB,OAAe;AAC1C,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,QAAQ;AAAA,EACf;AACF;AAEO,IAAe,eAAf,MAAgD;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,QAAwB;AAClC,SAAK,QAAQ,OAAO;AACpB,SAAK,kBAAkB,OAAO;AAC9B,SAAK,cAAc,OAAO;AAAA,EAC5B;AAAA,EAUU,cAAc,QAA6C;AACnE,YAAQ,QAAQ;AAAA,MACd,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AACF;;;AC7DO,IAAM,oBAAN,cAAgC,aAAa;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT,SAAc;AAAA,EAEtB,YAAY,QAAiC;AAC3C,UAAM,MAAM;AACZ,SAAK,SAAS,OAAO;AACrB,SAAK,UAAU,OAAO;AACtB,SAAK,sBAAsB,OAAO;AAClC,SAAK,gBAAgB,OAAO,iBAAiB;AAAA,EAC/C;AAAA,EAEA,MAAM,KAAK,SAA8C;AACvD,UAAM,gBAAgB,MAAM,OAAO,mBAAmB,GAAG;AAEzD,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,OAAgC,EAAE,QAAQ,KAAK,OAAO;AAC5D,UAAI,KAAK,QAAS,MAAK,UAAU,KAAK;AACtC,UAAI,KAAK,oBAAqB,MAAK,sBAAsB;AACzD,WAAK,SAAS,IAAI,aAAa,IAAI;AAAA,IACrC;AAEA,UAAM,WAAW,KAAK,YAAY,QAAQ,QAAQ;AAClD,UAAM,QAAQ,KAAK,SAAS,QAAQ,KAAK;AACzC,UAAM,YAAY,QAAQ,mBAAmB,KAAK,mBAAmB;AACrE,UAAM,cAAc,QAAQ,eAAe,KAAK;AAEhD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,YAAY;AAAA,MACZ;AAAA,IACF;AAEA,QAAI,QAAQ,cAAc;AACxB,UAAI,KAAK,eAAe;AACtB,aAAK,SAAS;AAAA,UACZ,EAAE,MAAM,QAAQ,MAAM,QAAQ,cAAc,eAAe,EAAE,MAAM,YAAY,EAAE;AAAA,QACnF;AAAA,MACF,OAAO;AACL,aAAK,SAAS,QAAQ;AAAA,MACxB;AAAA,IACF;AACA,QAAI,MAAO,MAAK,QAAQ;AACxB,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAElD,UAAM,aAAsC,CAAC;AAC7C,QAAI,QAAQ,YAAa,YAAW,SAAS,QAAQ;AAErD,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,KAAK,OAAO,SAAS,OAAO,MAAM,UAAU;AAAA,IAC/D,SAAS,KAAc;AACrB,YAAM,SAAU,KAAa;AAC7B,YAAM,OAAQ,KAAa;AAC3B,YAAM,YACJ,eAAe,aACd,OAAO,SAAS,YACf,CAAC,cAAc,gBAAgB,aAAa,WAAW,EAAE,SAAS,IAAI;AAE1E,UAAI,WAAW,OAAO,WAAW,OAAO,WAAW;AACjD,cAAM,IAAI;AAAA,UACR,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,UAC/C,eAAe,QAAQ,MAAM;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,wBAAwB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,QACxE,eAAe,QAAQ,EAAE,OAAO,IAAI,IAAI;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,UAA0B,CAAC;AACjC,eAAW,SAAS,SAAS,SAAS;AACpC,UAAI,MAAM,SAAS,QAAQ;AACzB,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAc;AAAA,MAC9D,WAAW,MAAM,SAAS,YAAY;AACpC,gBAAQ,KAAK,EAAE,MAAM,YAAY,IAAI,MAAM,IAAI,MAAM,MAAM,MAAM,OAAO,MAAM,MAAM,CAAiB;AAAA,MACvG,WAAW,MAAM,SAAS,YAAY;AACpC,gBAAQ,KAAK,EAAE,MAAM,YAAY,UAAU,MAAM,SAAS,CAAkB;AAAA,MAC9E;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,KAAK,SAAS,SAAS,KAAK;AAAA,MACnC,YAAY,KAAK,cAAc,SAAS,WAAW;AAAA,IACrD;AAAA,EACF;AAAA,EAEU,YAAY,UAAoC;AACxD,WAAO,SAAS,IAAI,CAAC,QAAQ;AAC3B,UAAI,IAAI,SAAS,QAAQ;AACvB,cAAM,cACJ,OAAO,IAAI,YAAY,WACnB,IAAI,UACJ,IAAI,QACD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AAElB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,YACP;AAAA,cACE,MAAM;AAAA,cACN,aAAa,IAAI,eAAe;AAAA,cAChC,SAAS;AAAA,cACT,GAAI,IAAI,WAAW,EAAE,UAAU,KAAK,IAAI,CAAC;AAAA,YAC3C;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,IAAI,SAAS,aAAa;AAC5B,cAAM,UACJ,OAAO,IAAI,YAAY,WACnB,CAAC,EAAE,MAAM,QAAQ,MAAM,IAAI,QAAQ,CAAC,IACpC,IAAI,QAAQ,IAAI,CAAC,UAAU,KAAK,iBAAiB,KAAK,CAAC;AAC7D,eAAO,EAAE,MAAM,aAAa,QAAQ;AAAA,MACtC;AAEA,UAAI,OAAO,IAAI,YAAY,UAAU;AACnC,eAAO,EAAE,MAAM,QAAQ,SAAS,IAAI,QAAQ;AAAA,MAC9C;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,IAAI,QAAQ,IAAI,CAAC,UAAU,KAAK,iBAAiB,KAAK,CAAC;AAAA,MAClE;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEU,SAAS,OAAoD;AACrE,QAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AACzC,WAAO,MAAM,IAAI,CAAC,UAAU;AAAA,MAC1B,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,cAAc,KAAK;AAAA,MACnB,MAAM;AAAA,IACR,EAAE;AAAA,EACJ;AAAA,EAEU,SAAS,eAAoC;AACrD,UAAM,IAAI;AAMV,WAAO;AAAA,MACL,aAAa,EAAE;AAAA,MACf,cAAc,EAAE;AAAA,MAChB,iBAAiB,EAAE;AAAA,MACnB,kBAAkB,EAAE;AAAA,IACtB;AAAA,EACF;AAAA,EAEQ,iBAAiB,OAA8C;AACrE,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK;AACH,eAAO,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,MAC1C,KAAK;AACH,eAAO,EAAE,MAAM,YAAY,IAAI,MAAM,IAAI,MAAM,MAAM,MAAM,OAAO,MAAM,MAAM;AAAA,MAChF,KAAK;AACH,eAAO,EAAE,MAAM,YAAY,UAAU,MAAM,SAAS;AAAA,MACtD;AACE,eAAO,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,EACF;AACF;;;AC7KO,IAAM,iBAAN,cAA6B,aAAa;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACT,SAAc;AAAA,EAEtB,YAAY,QAA8B;AACxC,UAAM,MAAM;AACZ,SAAK,SAAS,OAAO;AACrB,SAAK,UAAU,OAAO;AACtB,SAAK,eAAe,OAAO;AAAA,EAC7B;AAAA,EAEA,MAAM,KAAK,SAA8C;AACvD,UAAM,UAAU,MAAM,OAAO,QAAQ,GAAG;AAExC,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,OAAgC,EAAE,QAAQ,KAAK,OAAO;AAC5D,UAAI,KAAK,QAAS,MAAK,UAAU,KAAK;AACtC,UAAI,KAAK,aAAc,MAAK,eAAe,KAAK;AAChD,WAAK,SAAS,IAAI,OAAO,IAAI;AAAA,IAC/B;AAEA,UAAM,iBAAiB,KAAK,YAAY,QAAQ,QAAQ;AACxD,UAAM,WAAsB,CAAC;AAC7B,QAAI,QAAQ,cAAc;AACxB,eAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,aAAa,CAAC;AAAA,IACjE;AACA,aAAS,KAAK,GAAG,cAAc;AAE/B,UAAM,QAAQ,KAAK,SAAS,QAAQ,KAAK;AACzC,UAAM,YAAY,QAAQ,mBAAmB,KAAK;AAClD,UAAM,cAAc,QAAQ,eAAe,KAAK;AAEhD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,MAAO,MAAK,QAAQ;AACxB,QAAI,cAAc,OAAW,MAAK,aAAa;AAC/C,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAElD,UAAM,aAAsC,CAAC;AAC7C,QAAI,QAAQ,YAAa,YAAW,SAAS,QAAQ;AAErD,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO,MAAM,UAAU;AAAA,IACvE,SAAS,KAAc;AACrB,YAAM,SAAU,KAAa;AAC7B,YAAM,OAAQ,KAAa;AAC3B,YAAM,YACJ,eAAe,aACd,OAAO,SAAS,YACf,CAAC,cAAc,gBAAgB,aAAa,WAAW,EAAE,SAAS,IAAI;AAE1E,UAAI,WAAW,OAAO,WAAW;AAC/B,cAAM,IAAI;AAAA,UACR,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,UAC/C,eAAe,QAAQ,MAAM;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,qBAAqB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,QACrE,eAAe,QAAQ,EAAE,OAAO,IAAI,IAAI;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAA0B,CAAC;AAEjC,QAAI,OAAO,QAAQ,SAAS;AAC1B,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,QAAQ,QAAQ,CAAc;AAAA,IAC1E;AAEA,QAAI,OAAO,QAAQ,YAAY;AAC7B,iBAAW,MAAM,OAAO,QAAQ,YAAY;AAC1C,YAAI,QAAiC,CAAC;AACtC,YAAI;AACF,kBAAQ,KAAK,MAAM,GAAG,SAAS,SAAS;AAAA,QAC1C,QAAQ;AAAA,QAAC;AACT,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,MAAM,GAAG,SAAS;AAAA,UAClB;AAAA,QACF,CAAiB;AAAA,MACnB;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,KAAK,SAAS,SAAS,KAAK;AAAA,MACnC,YAAY,KAAK,cAAc,OAAO,aAAa;AAAA,IACrD;AAAA,EACF;AAAA,EAEU,YAAY,UAAoC;AACxD,WAAO,SAAS,IAAI,CAAC,QAAQ;AAC3B,UAAI,IAAI,SAAS,QAAQ;AACvB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,cAAc,IAAI,eAAe;AAAA,UACjC,SACE,OAAO,IAAI,YAAY,WACnB,IAAI,UACJ,IAAI,QACD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AAAA,QACpB;AAAA,MACF;AAEA,UAAI,IAAI,SAAS,aAAa;AAC5B,YAAI,OAAO,IAAI,YAAY,UAAU;AACnC,iBAAO,EAAE,MAAM,aAAa,SAAS,IAAI,QAAQ;AAAA,QACnD;AAEA,cAAM,YAAY,IAAI,QACnB,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AACV,cAAM,YAAY,IAAI,QACnB,OAAO,CAAC,MAAyB,EAAE,SAAS,UAAU,EACtD,IAAI,CAAC,OAAO;AAAA,UACX,IAAI,EAAE;AAAA,UACN,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,EAAE,MAAM,WAAW,KAAK,UAAU,EAAE,KAAK,EAAE;AAAA,QAC/D,EAAE;AAEJ,eAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,aAAa;AAAA,UACtB,GAAI,UAAU,SAAS,IAAI,EAAE,YAAY,UAAU,IAAI,CAAC;AAAA,QAC1D;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SACE,OAAO,IAAI,YAAY,WACnB,IAAI,UACJ,IAAI,QACD,OAAO,CAAC,MAAsB,EAAE,SAAS,MAAM,EAC/C,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,IAAI;AAAA,MACpB;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEU,SAAS,OAAoD;AACrE,QAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AACzC,WAAO,MAAM,IAAI,CAAC,UAAU;AAAA,MAC1B,MAAM;AAAA,MACN,UAAU;AAAA,QACR,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,YAAY,KAAK;AAAA,MACnB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEU,SAAS,eAAoC;AACrD,UAAM,IAAI;AAKV,WAAO;AAAA,MACL,aAAa,EAAE;AAAA,MACf,cAAc,EAAE;AAAA,MAChB,iBAAiB,EAAE,uBAAuB;AAAA,IAC5C;AAAA,EACF;AACF;;;AChLO,IAAM,iBAAN,MAAyC;AAAA,EACrC;AAAA,EACQ;AAAA,EAEjB,YAAY,QAA8B;AACxC,SAAK,QAAQ,OAAO;AACpB,SAAK,UAAU,OAAO,WAAW;AAAA,EACnC;AAAA,EAEA,MAAM,KAAK,SAA8C;AACvD,UAAM,WAAkB,CAAC;AAEzB,QAAI,QAAQ,cAAc;AACxB,eAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,aAAa,CAAC;AAAA,IACjE;AAEA,eAAW,KAAK,QAAQ,UAAU;AAChC,UAAI,EAAE,SAAS,QAAQ;AACrB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,cAAc,EAAE;AAAA,UAChB,SAAS,EAAE;AAAA,QACb,CAAC;AACD;AAAA,MACF;AACA,UAAI,EAAE,SAAS,eAAe,MAAM,QAAQ,EAAE,OAAO,GAAG;AACtD,cAAM,YAAa,EAAE,QAA2B,OAAO,OAAK,EAAE,SAAS,MAAM;AAC7E,cAAM,YAAa,EAAE,QAA2B,OAAO,OAAK,EAAE,SAAS,UAAU;AAEjF,cAAM,MAAW,CAAC;AAClB,YAAI,UAAU,SAAS,GAAG;AACxB,cAAI,UAAU,UAAU,IAAI,OAAM,EAAuB,IAAI,EAAE,KAAK,EAAE;AAAA,QACxE;AACA,YAAI,UAAU,SAAS,GAAG;AACxB,cAAI,aAAa,UAAU,IAAI,OAAK;AAClC,kBAAM,KAAK;AACX,mBAAO;AAAA,cACL,IAAI,GAAG;AAAA,cACP,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,GAAG,MAAM,WAAW,KAAK,UAAU,GAAG,KAAK,EAAE;AAAA,YACjE;AAAA,UACF,CAAC;AAAA,QACH;AACA,YAAI,OAAO;AACX,iBAAS,KAAK,GAAG;AACjB;AAAA,MACF;AACA,eAAS,KAAK,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,CAAC;AAAA,IACpD;AAEA,UAAM,OAAY;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,QAAQ,iBAAiB;AAC3B,WAAK,aAAa,QAAQ;AAAA,IAC5B;AAEA,QAAI,QAAQ,OAAO,QAAQ;AACzB,WAAK,QAAQ,QAAQ,MAAM,IAAI,QAAM;AAAA,QACnC,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,EAAE;AAAA,UACR,aAAa,EAAE;AAAA,UACf,YAAY,EAAE;AAAA,QAChB;AAAA,MACF,EAAE;AAAA,IACJ;AAEA,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,MAAM,GAAG,KAAK,OAAO,qBAAqB;AAAA,QACzD,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,UAAI,eAAe,eAAgB,OAAM;AACzC,YAAM,IAAI;AAAA,QACR,6BAA6B,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/E;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAI,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,KAAK;AACjF,cAAM,IAAI,eAAe,oBAAoB,SAAS,MAAM,KAAK,KAAK,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,MACvF;AACA,YAAM,IAAI,MAAM,oBAAoB,SAAS,MAAM,KAAK,KAAK,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IAC9E;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAEA,UAAM,UAAiB,CAAC;AACxB,QAAI,aAA0C;AAE9C,QAAI,OAAO,SAAS,SAAS;AAC3B,UAAI,OAAO,OAAO,QAAQ;AAC1B,aAAO,KAAK,QAAQ,mCAAmC,EAAE,EAAE,KAAK;AAChE,aAAO,KAAK,QAAQ,oCAAoC,EAAE,EAAE,KAAK;AACjE,aAAO,KAAK,QAAQ,wBAAwB,EAAE,EAAE,KAAK;AACrD,UAAI,KAAK,SAAS,GAAG;AACnB,gBAAQ,KAAK,EAAE,MAAM,QAAiB,KAAK,CAAC;AAAA,MAC9C;AAAA,IACF;AAEA,QAAI,OAAO,SAAS,YAAY,QAAQ;AACtC,mBAAa;AACb,iBAAW,MAAM,OAAO,QAAQ,YAAY;AAC1C,YAAI;AACJ,YAAI;AACF,kBAAQ,KAAK,MAAM,GAAG,SAAS,SAAS;AAAA,QAC1C,QAAQ;AACN,kBAAQ,CAAC;AAAA,QACX;AACA,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,IAAI,GAAG;AAAA,UACP,MAAM,GAAG,SAAS;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,OAAO;AAAA,QACL,aAAa,KAAK,OAAO,iBAAiB;AAAA,QAC1C,cAAc,KAAK,OAAO,qBAAqB;AAAA,QAC/C,iBAAiB;AAAA,QACjB,kBAAkB;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AACF;;;AJrIO,SAAS,UAAU,QAAoD;AAC5E,SAAO,IAAI,kBAAkB,MAAM;AACrC;AAEO,SAAS,OAAO,QAA8C;AACnE,SAAO,IAAI,eAAe,MAAM;AAClC;AAEO,SAAS,OAAO,QAA8C;AACnE,SAAO,IAAI,eAAe,MAAM;AAClC;AAQA,IAAM,iBAAN,MAAyC;AAAA,EAC9B;AAAA,EACQ;AAAA,EAEjB,YAAY,QAA8B;AACxC,SAAK,QAAQ,OAAO;AACpB,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,KAAK,SAA8C;AACjD,WAAO,KAAK,OAAO,OAAO;AAAA,EAC5B;AACF;AAIO,SAAS,eACd,QAKU;AACV,UAAQ,OAAO,UAAU;AAAA,IACvB,KAAK;AACH,aAAO,IAAI,kBAAkB,MAAM;AAAA,IACrC,KAAK;AACH,aAAO,IAAI,eAAe,MAAM;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,eAAe,MAAM;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,eAAe,MAAM;AAAA,EACpC;AACF;","names":[]}
@@ -0,0 +1,70 @@
1
+ import { B as BaseProvider, P as ProviderConfig, C as ChatRequest, M as ModelResponse, c as ChatMessage, A as APIToolDefinition, e as TokenUsage, f as Provider } from '../types-C11cw5ZD.cjs';
2
+ export { R as RetryableError } from '../types-C11cw5ZD.cjs';
3
+ import 'zod';
4
+
5
+ interface AnthropicProviderConfig extends ProviderConfig {
6
+ apiKey: string;
7
+ baseURL?: string;
8
+ dangerouslySkipAuth?: boolean;
9
+ enableCaching?: boolean;
10
+ }
11
+ declare class AnthropicProvider extends BaseProvider {
12
+ private readonly apiKey;
13
+ private readonly baseURL?;
14
+ private readonly dangerouslySkipAuth?;
15
+ private readonly enableCaching;
16
+ private client;
17
+ constructor(config: AnthropicProviderConfig);
18
+ chat(request: ChatRequest): Promise<ModelResponse>;
19
+ protected mapMessages(messages: ChatMessage[]): unknown[];
20
+ protected mapTools(tools?: APIToolDefinition[]): unknown[] | undefined;
21
+ protected mapUsage(providerUsage: unknown): TokenUsage;
22
+ private mapOutgoingBlock;
23
+ }
24
+
25
+ interface OpenAIProviderConfig extends ProviderConfig {
26
+ apiKey: string;
27
+ baseURL?: string;
28
+ organization?: string;
29
+ }
30
+ declare class OpenAIProvider extends BaseProvider {
31
+ private readonly apiKey;
32
+ private readonly baseURL?;
33
+ private readonly organization?;
34
+ private client;
35
+ constructor(config: OpenAIProviderConfig);
36
+ chat(request: ChatRequest): Promise<ModelResponse>;
37
+ protected mapMessages(messages: ChatMessage[]): unknown[];
38
+ protected mapTools(tools?: APIToolDefinition[]): unknown[] | undefined;
39
+ protected mapUsage(providerUsage: unknown): TokenUsage;
40
+ }
41
+
42
+ interface OllamaProviderConfig {
43
+ model: string;
44
+ baseURL?: string;
45
+ }
46
+ declare class OllamaProvider implements Provider {
47
+ readonly model: string;
48
+ private readonly baseURL;
49
+ constructor(config: OllamaProviderConfig);
50
+ chat(request: ChatRequest): Promise<ModelResponse>;
51
+ }
52
+
53
+ declare function anthropic(config: AnthropicProviderConfig): AnthropicProvider;
54
+ declare function openai(config: OpenAIProviderConfig): OpenAIProvider;
55
+ declare function ollama(config: OllamaProviderConfig): OllamaProvider;
56
+ interface CustomProviderConfig {
57
+ provider: 'custom';
58
+ model: string;
59
+ chat: (request: ChatRequest) => Promise<ModelResponse>;
60
+ }
61
+ type OllamaProviderConfigWithProvider = OllamaProviderConfig & {
62
+ provider: 'ollama';
63
+ };
64
+ declare function createProvider(config: (AnthropicProviderConfig & {
65
+ provider: 'anthropic';
66
+ }) | (OpenAIProviderConfig & {
67
+ provider: 'openai';
68
+ }) | OllamaProviderConfigWithProvider | CustomProviderConfig): Provider;
69
+
70
+ export { APIToolDefinition, AnthropicProvider, type AnthropicProviderConfig, BaseProvider, ChatMessage, ChatRequest, type CustomProviderConfig, OllamaProvider, type OllamaProviderConfig, type OllamaProviderConfigWithProvider, OpenAIProvider, type OpenAIProviderConfig, Provider, anthropic, createProvider, ollama, openai };