@tepa/provider-anthropic 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,106 @@
1
+ # @tepa/provider-anthropic
2
+
3
+ Anthropic Claude LLM provider for the Tepa agent pipeline.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ npm install @tepa/provider-anthropic
9
+ ```
10
+
11
+ ## Setup
12
+
13
+ Set the `ANTHROPIC_API_KEY` environment variable. You can either export it directly:
14
+
15
+ ```bash
16
+ export ANTHROPIC_API_KEY=sk-ant-...
17
+ ```
18
+
19
+ Or use a `.env` file with [dotenv](https://www.npmjs.com/package/dotenv):
20
+
21
+ ```bash
22
+ # .env
23
+ ANTHROPIC_API_KEY=sk-ant-...
24
+ ```
25
+
26
+ ```typescript
27
+ import "dotenv/config";
28
+ ```
29
+
30
+ ## Usage
31
+
32
+ ```typescript
33
+ import { Tepa } from "tepa";
34
+ import { AnthropicProvider } from "@tepa/provider-anthropic";
35
+
36
+ const tepa = new Tepa({
37
+ tools: [/* ... */],
38
+ provider: new AnthropicProvider(),
39
+ });
40
+ ```
41
+
42
+ ### Provider Options
43
+
44
+ ```typescript
45
+ const provider = new AnthropicProvider({
46
+ apiKey: "sk-ant-...", // defaults to ANTHROPIC_API_KEY env var
47
+ maxRetries: 3, // retry attempts on transient failures (default: 3)
48
+ retryBaseDelayMs: 1000, // base delay for exponential backoff (default: 1000)
49
+ });
50
+ ```
51
+
52
+ ### Factory Function
53
+
54
+ Use `createProvider` to create providers from a string identifier:
55
+
56
+ ```typescript
57
+ import { createProvider } from "@tepa/provider-anthropic";
58
+
59
+ const provider = createProvider("anthropic");
60
+ ```
61
+
62
+ ## Logging
63
+
64
+ Every LLM call is automatically logged to a JSONL file in `.tepa/logs/`. You can disable the default file logger, add custom log listeners, or send logs to external services like Prometheus, NewRelic, or Datadog using the `onLog()` method:
65
+
66
+ ```typescript
67
+ const provider = new AnthropicProvider({ defaultLog: false });
68
+
69
+ provider.onLog((entry) => {
70
+ externalLogger.send(entry);
71
+ });
72
+ ```
73
+
74
+ See [`@tepa/provider-core`](../provider-core) for full logging documentation.
75
+
76
+ ## Other Providers
77
+
78
+ Tepa ships with multiple LLM providers — all following the same `LLMProvider` interface:
79
+
80
+ - [`@tepa/provider-gemini`](../provider-gemini) — Google Gemini
81
+ - [`@tepa/provider-openai`](../provider-openai) — OpenAI
82
+
83
+ ## Implementing Custom Providers
84
+
85
+ To support a different LLM, implement the `LLMProvider` interface from `@tepa/types`:
86
+
87
+ ```typescript
88
+ import type { LLMProvider, LLMMessage, LLMRequestOptions, LLMResponse } from "@tepa/types";
89
+
90
+ export class MyProvider implements LLMProvider {
91
+ async complete(messages: LLMMessage[], options: LLMRequestOptions): Promise<LLMResponse> {
92
+ // Call your LLM API, passing options.tools if provided
93
+ return {
94
+ text: "response text",
95
+ tokensUsed: { input: 100, output: 50 },
96
+ finishReason: "end_turn",
97
+ };
98
+ }
99
+ }
100
+ ```
101
+
102
+ The provider interface is intentionally minimal — one method, clear input/output types.
103
+
104
+ ## Native Tool Use
105
+
106
+ All built-in providers support **native tool use**. When the executor passes tool schemas via `options.tools`, the provider forwards them to the LLM's native function-calling API. The LLM returns structured `toolUse` blocks with pre-parsed parameters — no manual JSON parsing needed. This eliminates escaping errors that occur when LLMs produce tool parameters as free-form text, especially with large content like file writes.
package/dist/index.cjs ADDED
@@ -0,0 +1,195 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ AnthropicProvider: () => AnthropicProvider,
34
+ createProvider: () => createProvider,
35
+ extractText: () => extractText,
36
+ extractToolUse: () => extractToolUse,
37
+ toAnthropicMessages: () => toAnthropicMessages,
38
+ toAnthropicTools: () => toAnthropicTools,
39
+ toFinishReason: () => toFinishReason
40
+ });
41
+ module.exports = __toCommonJS(index_exports);
42
+
43
+ // src/anthropic.ts
44
+ var import_sdk = __toESM(require("@anthropic-ai/sdk"), 1);
45
+ var import_provider_core = require("@tepa/provider-core");
46
+
47
+ // src/formatting.ts
48
+ function toAnthropicMessages(messages) {
49
+ return messages.map((msg) => ({
50
+ role: msg.role,
51
+ content: msg.content
52
+ }));
53
+ }
54
+ function toFinishReason(stopReason) {
55
+ switch (stopReason) {
56
+ case "max_tokens":
57
+ return "max_tokens";
58
+ case "stop_sequence":
59
+ return "stop_sequence";
60
+ case "tool_use":
61
+ return "tool_use";
62
+ default:
63
+ return "end_turn";
64
+ }
65
+ }
66
+ function extractText(content) {
67
+ return content.filter((block) => block.type === "text").map((block) => block.text).join("");
68
+ }
69
+ function extractToolUse(content) {
70
+ return content.filter((block) => block.type === "tool_use").map((block) => ({
71
+ id: block.id,
72
+ name: block.name,
73
+ input: block.input
74
+ }));
75
+ }
76
+ function toAnthropicTools(tools) {
77
+ return tools.map((tool) => {
78
+ const properties = {};
79
+ const required = [];
80
+ for (const [name, param] of Object.entries(tool.parameters)) {
81
+ properties[name] = {
82
+ type: param.type,
83
+ description: param.description
84
+ };
85
+ if (param.required !== false) {
86
+ required.push(name);
87
+ }
88
+ }
89
+ return {
90
+ name: tool.name,
91
+ description: tool.description,
92
+ input_schema: {
93
+ type: "object",
94
+ properties,
95
+ required
96
+ }
97
+ };
98
+ });
99
+ }
100
+
101
+ // src/anthropic.ts
102
+ var DEFAULT_MODEL = "claude-haiku-4-5";
103
+ var DEFAULT_MAX_TOKENS = 64e3;
104
+ var AnthropicProvider = class extends import_provider_core.BaseLLMProvider {
105
+ providerName = "anthropic";
106
+ client;
107
+ constructor(options = {}) {
108
+ super(options);
109
+ this.client = new import_sdk.default({
110
+ apiKey: options.apiKey,
111
+ timeout: 15 * 60 * 1e3
112
+ // 15 minutes – pipeline calls can be long
113
+ });
114
+ }
115
+ async doComplete(messages, options) {
116
+ const params = {
117
+ model: options.model ?? DEFAULT_MODEL,
118
+ max_tokens: options.maxTokens ?? DEFAULT_MAX_TOKENS,
119
+ messages: toAnthropicMessages(messages)
120
+ };
121
+ if (options.temperature !== void 0) {
122
+ params.temperature = options.temperature;
123
+ }
124
+ if (options.systemPrompt) {
125
+ params.system = options.systemPrompt;
126
+ }
127
+ if (options.tools && options.tools.length > 0) {
128
+ params.tools = toAnthropicTools(options.tools);
129
+ }
130
+ const response = await this.client.messages.create(params);
131
+ const toolUse = extractToolUse(response.content);
132
+ return {
133
+ text: extractText(response.content),
134
+ tokensUsed: {
135
+ input: response.usage.input_tokens,
136
+ output: response.usage.output_tokens
137
+ },
138
+ finishReason: toFinishReason(response.stop_reason),
139
+ ...toolUse.length > 0 && { toolUse }
140
+ };
141
+ }
142
+ isRetryable(error) {
143
+ if (error instanceof import_sdk.default.RateLimitError) {
144
+ return true;
145
+ }
146
+ if (error instanceof import_sdk.default.InternalServerError) {
147
+ return true;
148
+ }
149
+ if (error instanceof import_sdk.default.APIConnectionError) {
150
+ return true;
151
+ }
152
+ if (error instanceof import_sdk.default.APIError && error.status === 529) {
153
+ return true;
154
+ }
155
+ return false;
156
+ }
157
+ getRetryAfterMs(error) {
158
+ if (error instanceof import_sdk.default.APIError) {
159
+ const retryAfter = error.headers?.["retry-after"];
160
+ if (retryAfter) {
161
+ const seconds = Number(retryAfter);
162
+ if (!Number.isNaN(seconds) && seconds > 0) {
163
+ return seconds * 1e3;
164
+ }
165
+ }
166
+ }
167
+ return null;
168
+ }
169
+ isRateLimitError(error) {
170
+ return error instanceof import_sdk.default.RateLimitError;
171
+ }
172
+ };
173
+
174
+ // src/factory.ts
175
+ function createProvider(name, options) {
176
+ switch (name) {
177
+ case "anthropic":
178
+ return new AnthropicProvider(options);
179
+ default: {
180
+ const _exhaustive = name;
181
+ throw new Error(`Unknown provider: ${_exhaustive}`);
182
+ }
183
+ }
184
+ }
185
+ // Annotate the CommonJS export names for ESM import in node:
186
+ 0 && (module.exports = {
187
+ AnthropicProvider,
188
+ createProvider,
189
+ extractText,
190
+ extractToolUse,
191
+ toAnthropicMessages,
192
+ toAnthropicTools,
193
+ toFinishReason
194
+ });
195
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/anthropic.ts","../src/formatting.ts","../src/factory.ts"],"sourcesContent":["export { AnthropicProvider, type AnthropicProviderOptions } from \"./anthropic.js\";\nexport { createProvider, type ProviderName } from \"./factory.js\";\nexport { toAnthropicMessages, toAnthropicTools, toFinishReason, extractText, extractToolUse } from \"./formatting.js\";\n","import Anthropic from \"@anthropic-ai/sdk\";\nimport type { LLMMessage, LLMRequestOptions, LLMResponse } from \"@tepa/types\";\nimport { BaseLLMProvider, type BaseLLMProviderOptions } from \"@tepa/provider-core\";\nimport { toAnthropicMessages, toAnthropicTools, toFinishReason, extractText, extractToolUse } from \"./formatting.js\";\n\nconst DEFAULT_MODEL = \"claude-haiku-4-5\";\nconst DEFAULT_MAX_TOKENS = 64_000;\n\nexport interface AnthropicProviderOptions extends BaseLLMProviderOptions {\n /** Anthropic API key. Falls back to ANTHROPIC_API_KEY env variable. */\n apiKey?: string;\n}\n\n/** LLM provider implementation for Anthropic Claude models. */\nexport class AnthropicProvider extends BaseLLMProvider {\n protected readonly providerName = \"anthropic\";\n private readonly client: Anthropic;\n\n constructor(options: AnthropicProviderOptions = {}) {\n super(options);\n this.client = new Anthropic({\n apiKey: options.apiKey,\n timeout: 15 * 60 * 1000, // 15 minutes – pipeline calls can be long\n });\n }\n\n protected async doComplete(\n messages: LLMMessage[],\n options: LLMRequestOptions,\n ): Promise<LLMResponse> {\n const params: Anthropic.MessageCreateParamsNonStreaming = {\n model: options.model ?? DEFAULT_MODEL,\n max_tokens: options.maxTokens ?? DEFAULT_MAX_TOKENS,\n messages: toAnthropicMessages(messages),\n };\n\n if (options.temperature !== undefined) {\n params.temperature = options.temperature;\n }\n\n if (options.systemPrompt) {\n params.system = options.systemPrompt;\n }\n\n if (options.tools && options.tools.length > 0) {\n params.tools = toAnthropicTools(options.tools);\n }\n\n const response = await this.client.messages.create(params);\n\n const toolUse = extractToolUse(response.content);\n\n return {\n text: extractText(response.content),\n tokensUsed: {\n input: response.usage.input_tokens,\n output: response.usage.output_tokens,\n },\n finishReason: toFinishReason(response.stop_reason),\n ...(toolUse.length > 0 && { toolUse }),\n };\n }\n\n protected isRetryable(error: unknown): boolean {\n if (error instanceof Anthropic.RateLimitError) {\n return true;\n }\n if (error instanceof Anthropic.InternalServerError) {\n return true;\n }\n if (error instanceof Anthropic.APIConnectionError) {\n return true;\n }\n if (error instanceof Anthropic.APIError && error.status === 529) {\n return true;\n }\n return false;\n }\n\n protected getRetryAfterMs(error: unknown): number | null {\n if (error instanceof Anthropic.APIError) {\n const retryAfter = error.headers?.[\"retry-after\"];\n if (retryAfter) {\n const seconds = Number(retryAfter);\n if (!Number.isNaN(seconds) && seconds > 0) {\n return seconds * 1000;\n }\n }\n }\n return null;\n }\n\n protected isRateLimitError(error: unknown): boolean {\n return error instanceof Anthropic.RateLimitError;\n }\n}\n","import type { LLMMessage, LLMToolUseBlock, ToolSchema } from \"@tepa/types\";\nimport type Anthropic from \"@anthropic-ai/sdk\";\n\n/**\n * Convert Tepa LLMMessage array to Anthropic message format.\n */\nexport function toAnthropicMessages(\n messages: LLMMessage[],\n): Anthropic.MessageParam[] {\n return messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n }));\n}\n\n/**\n * Map Anthropic stop_reason to Tepa finishReason.\n */\nexport function toFinishReason(\n stopReason: string | null,\n): \"end_turn\" | \"max_tokens\" | \"stop_sequence\" | \"tool_use\" {\n switch (stopReason) {\n case \"max_tokens\":\n return \"max_tokens\";\n case \"stop_sequence\":\n return \"stop_sequence\";\n case \"tool_use\":\n return \"tool_use\";\n default:\n return \"end_turn\";\n }\n}\n\n/**\n * Extract text content from Anthropic response content blocks.\n */\nexport function extractText(content: Anthropic.ContentBlock[]): string {\n return content\n .filter((block): block is Anthropic.TextBlock => block.type === \"text\")\n .map((block) => block.text)\n .join(\"\");\n}\n\n/**\n * Extract tool use blocks from Anthropic response content blocks.\n */\nexport function extractToolUse(content: Anthropic.ContentBlock[]): LLMToolUseBlock[] {\n return content\n .filter((block): block is Anthropic.ToolUseBlock => block.type === \"tool_use\")\n .map((block) => ({\n id: block.id,\n name: block.name,\n input: block.input as Record<string, unknown>,\n }));\n}\n\n/**\n * Convert Tepa ToolSchema to Anthropic tool format.\n */\nexport function toAnthropicTools(tools: ToolSchema[]): Anthropic.Tool[] {\n return tools.map((tool) => {\n const properties: Record<string, unknown> = {};\n const required: string[] = [];\n\n for (const [name, param] of Object.entries(tool.parameters)) {\n properties[name] = {\n type: param.type,\n description: param.description,\n };\n if (param.required !== false) {\n required.push(name);\n }\n }\n\n return {\n name: tool.name,\n description: tool.description,\n input_schema: {\n type: \"object\" as const,\n properties,\n required,\n },\n };\n });\n}\n","import type { LLMProvider } from \"@tepa/types\";\nimport { AnthropicProvider, type AnthropicProviderOptions } from \"./anthropic.js\";\n\nexport type ProviderName = \"anthropic\";\n\n/**\n * Create an LLM provider from a name string.\n * Currently supports \"anthropic\". Future providers (openai, gemini, etc.)\n * would be separate packages following the same pattern.\n */\nexport function createProvider(\n name: ProviderName,\n options?: AnthropicProviderOptions,\n): LLMProvider {\n switch (name) {\n case \"anthropic\":\n return new AnthropicProvider(options);\n default: {\n const _exhaustive: never = name;\n throw new Error(`Unknown provider: ${_exhaustive}`);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,iBAAsB;AAEtB,2BAA6D;;;ACItD,SAAS,oBACd,UAC0B;AAC1B,SAAO,SAAS,IAAI,CAAC,SAAS;AAAA,IAC5B,MAAM,IAAI;AAAA,IACV,SAAS,IAAI;AAAA,EACf,EAAE;AACJ;AAKO,SAAS,eACd,YAC0D;AAC1D,UAAQ,YAAY;AAAA,IAClB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKO,SAAS,YAAY,SAA2C;AACrE,SAAO,QACJ,OAAO,CAAC,UAAwC,MAAM,SAAS,MAAM,EACrE,IAAI,CAAC,UAAU,MAAM,IAAI,EACzB,KAAK,EAAE;AACZ;AAKO,SAAS,eAAe,SAAsD;AACnF,SAAO,QACJ,OAAO,CAAC,UAA2C,MAAM,SAAS,UAAU,EAC5E,IAAI,CAAC,WAAW;AAAA,IACf,IAAI,MAAM;AAAA,IACV,MAAM,MAAM;AAAA,IACZ,OAAO,MAAM;AAAA,EACf,EAAE;AACN;AAKO,SAAS,iBAAiB,OAAuC;AACtE,SAAO,MAAM,IAAI,CAAC,SAAS;AACzB,UAAM,aAAsC,CAAC;AAC7C,UAAM,WAAqB,CAAC;AAE5B,eAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,KAAK,UAAU,GAAG;AAC3D,iBAAW,IAAI,IAAI;AAAA,QACjB,MAAM,MAAM;AAAA,QACZ,aAAa,MAAM;AAAA,MACrB;AACA,UAAI,MAAM,aAAa,OAAO;AAC5B,iBAAS,KAAK,IAAI;AAAA,MACpB;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,cAAc;AAAA,QACZ,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AD/EA,IAAM,gBAAgB;AACtB,IAAM,qBAAqB;AAQpB,IAAM,oBAAN,cAAgC,qCAAgB;AAAA,EAClC,eAAe;AAAA,EACjB;AAAA,EAEjB,YAAY,UAAoC,CAAC,GAAG;AAClD,UAAM,OAAO;AACb,SAAK,SAAS,IAAI,WAAAA,QAAU;AAAA,MAC1B,QAAQ,QAAQ;AAAA,MAChB,SAAS,KAAK,KAAK;AAAA;AAAA,IACrB,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,WACd,UACA,SACsB;AACtB,UAAM,SAAoD;AAAA,MACxD,OAAO,QAAQ,SAAS;AAAA,MACxB,YAAY,QAAQ,aAAa;AAAA,MACjC,UAAU,oBAAoB,QAAQ;AAAA,IACxC;AAEA,QAAI,QAAQ,gBAAgB,QAAW;AACrC,aAAO,cAAc,QAAQ;AAAA,IAC/B;AAEA,QAAI,QAAQ,cAAc;AACxB,aAAO,SAAS,QAAQ;AAAA,IAC1B;AAEA,QAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,aAAO,QAAQ,iBAAiB,QAAQ,KAAK;AAAA,IAC/C;AAEA,UAAM,WAAW,MAAM,KAAK,OAAO,SAAS,OAAO,MAAM;AAEzD,UAAM,UAAU,eAAe,SAAS,OAAO;AAE/C,WAAO;AAAA,MACL,MAAM,YAAY,SAAS,OAAO;AAAA,MAClC,YAAY;AAAA,QACV,OAAO,SAAS,MAAM;AAAA,QACtB,QAAQ,SAAS,MAAM;AAAA,MACzB;AAAA,MACA,cAAc,eAAe,SAAS,WAAW;AAAA,MACjD,GAAI,QAAQ,SAAS,KAAK,EAAE,QAAQ;AAAA,IACtC;AAAA,EACF;AAAA,EAEU,YAAY,OAAyB;AAC7C,QAAI,iBAAiB,WAAAA,QAAU,gBAAgB;AAC7C,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,WAAAA,QAAU,qBAAqB;AAClD,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,WAAAA,QAAU,oBAAoB;AACjD,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,WAAAA,QAAU,YAAY,MAAM,WAAW,KAAK;AAC/D,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEU,gBAAgB,OAA+B;AACvD,QAAI,iBAAiB,WAAAA,QAAU,UAAU;AACvC,YAAM,aAAa,MAAM,UAAU,aAAa;AAChD,UAAI,YAAY;AACd,cAAM,UAAU,OAAO,UAAU;AACjC,YAAI,CAAC,OAAO,MAAM,OAAO,KAAK,UAAU,GAAG;AACzC,iBAAO,UAAU;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEU,iBAAiB,OAAyB;AAClD,WAAO,iBAAiB,WAAAA,QAAU;AAAA,EACpC;AACF;;;AErFO,SAAS,eACd,MACA,SACa;AACb,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,IAAI,kBAAkB,OAAO;AAAA,IACtC,SAAS;AACP,YAAM,cAAqB;AAC3B,YAAM,IAAI,MAAM,qBAAqB,WAAW,EAAE;AAAA,IACpD;AAAA,EACF;AACF;","names":["Anthropic"]}
@@ -0,0 +1,49 @@
1
+ import { LLMMessage, LLMRequestOptions, LLMResponse, LLMProvider, LLMToolUseBlock, ToolSchema } from '@tepa/types';
2
+ import { BaseLLMProvider, BaseLLMProviderOptions } from '@tepa/provider-core';
3
+ import Anthropic from '@anthropic-ai/sdk';
4
+
5
+ interface AnthropicProviderOptions extends BaseLLMProviderOptions {
6
+ /** Anthropic API key. Falls back to ANTHROPIC_API_KEY env variable. */
7
+ apiKey?: string;
8
+ }
9
+ /** LLM provider implementation for Anthropic Claude models. */
10
+ declare class AnthropicProvider extends BaseLLMProvider {
11
+ protected readonly providerName = "anthropic";
12
+ private readonly client;
13
+ constructor(options?: AnthropicProviderOptions);
14
+ protected doComplete(messages: LLMMessage[], options: LLMRequestOptions): Promise<LLMResponse>;
15
+ protected isRetryable(error: unknown): boolean;
16
+ protected getRetryAfterMs(error: unknown): number | null;
17
+ protected isRateLimitError(error: unknown): boolean;
18
+ }
19
+
20
+ type ProviderName = "anthropic";
21
+ /**
22
+ * Create an LLM provider from a name string.
23
+ * Currently supports "anthropic". Future providers (openai, gemini, etc.)
24
+ * would be separate packages following the same pattern.
25
+ */
26
+ declare function createProvider(name: ProviderName, options?: AnthropicProviderOptions): LLMProvider;
27
+
28
+ /**
29
+ * Convert Tepa LLMMessage array to Anthropic message format.
30
+ */
31
+ declare function toAnthropicMessages(messages: LLMMessage[]): Anthropic.MessageParam[];
32
+ /**
33
+ * Map Anthropic stop_reason to Tepa finishReason.
34
+ */
35
+ declare function toFinishReason(stopReason: string | null): "end_turn" | "max_tokens" | "stop_sequence" | "tool_use";
36
+ /**
37
+ * Extract text content from Anthropic response content blocks.
38
+ */
39
+ declare function extractText(content: Anthropic.ContentBlock[]): string;
40
+ /**
41
+ * Extract tool use blocks from Anthropic response content blocks.
42
+ */
43
+ declare function extractToolUse(content: Anthropic.ContentBlock[]): LLMToolUseBlock[];
44
+ /**
45
+ * Convert Tepa ToolSchema to Anthropic tool format.
46
+ */
47
+ declare function toAnthropicTools(tools: ToolSchema[]): Anthropic.Tool[];
48
+
49
+ export { AnthropicProvider, type AnthropicProviderOptions, type ProviderName, createProvider, extractText, extractToolUse, toAnthropicMessages, toAnthropicTools, toFinishReason };
@@ -0,0 +1,49 @@
1
+ import { LLMMessage, LLMRequestOptions, LLMResponse, LLMProvider, LLMToolUseBlock, ToolSchema } from '@tepa/types';
2
+ import { BaseLLMProvider, BaseLLMProviderOptions } from '@tepa/provider-core';
3
+ import Anthropic from '@anthropic-ai/sdk';
4
+
5
+ interface AnthropicProviderOptions extends BaseLLMProviderOptions {
6
+ /** Anthropic API key. Falls back to ANTHROPIC_API_KEY env variable. */
7
+ apiKey?: string;
8
+ }
9
+ /** LLM provider implementation for Anthropic Claude models. */
10
+ declare class AnthropicProvider extends BaseLLMProvider {
11
+ protected readonly providerName = "anthropic";
12
+ private readonly client;
13
+ constructor(options?: AnthropicProviderOptions);
14
+ protected doComplete(messages: LLMMessage[], options: LLMRequestOptions): Promise<LLMResponse>;
15
+ protected isRetryable(error: unknown): boolean;
16
+ protected getRetryAfterMs(error: unknown): number | null;
17
+ protected isRateLimitError(error: unknown): boolean;
18
+ }
19
+
20
+ type ProviderName = "anthropic";
21
+ /**
22
+ * Create an LLM provider from a name string.
23
+ * Currently supports "anthropic". Future providers (openai, gemini, etc.)
24
+ * would be separate packages following the same pattern.
25
+ */
26
+ declare function createProvider(name: ProviderName, options?: AnthropicProviderOptions): LLMProvider;
27
+
28
+ /**
29
+ * Convert Tepa LLMMessage array to Anthropic message format.
30
+ */
31
+ declare function toAnthropicMessages(messages: LLMMessage[]): Anthropic.MessageParam[];
32
+ /**
33
+ * Map Anthropic stop_reason to Tepa finishReason.
34
+ */
35
+ declare function toFinishReason(stopReason: string | null): "end_turn" | "max_tokens" | "stop_sequence" | "tool_use";
36
+ /**
37
+ * Extract text content from Anthropic response content blocks.
38
+ */
39
+ declare function extractText(content: Anthropic.ContentBlock[]): string;
40
+ /**
41
+ * Extract tool use blocks from Anthropic response content blocks.
42
+ */
43
+ declare function extractToolUse(content: Anthropic.ContentBlock[]): LLMToolUseBlock[];
44
+ /**
45
+ * Convert Tepa ToolSchema to Anthropic tool format.
46
+ */
47
+ declare function toAnthropicTools(tools: ToolSchema[]): Anthropic.Tool[];
48
+
49
+ export { AnthropicProvider, type AnthropicProviderOptions, type ProviderName, createProvider, extractText, extractToolUse, toAnthropicMessages, toAnthropicTools, toFinishReason };
package/dist/index.js ADDED
@@ -0,0 +1,152 @@
1
+ // src/anthropic.ts
2
+ import Anthropic from "@anthropic-ai/sdk";
3
+ import { BaseLLMProvider } from "@tepa/provider-core";
4
+
5
+ // src/formatting.ts
6
+ function toAnthropicMessages(messages) {
7
+ return messages.map((msg) => ({
8
+ role: msg.role,
9
+ content: msg.content
10
+ }));
11
+ }
12
+ function toFinishReason(stopReason) {
13
+ switch (stopReason) {
14
+ case "max_tokens":
15
+ return "max_tokens";
16
+ case "stop_sequence":
17
+ return "stop_sequence";
18
+ case "tool_use":
19
+ return "tool_use";
20
+ default:
21
+ return "end_turn";
22
+ }
23
+ }
24
+ function extractText(content) {
25
+ return content.filter((block) => block.type === "text").map((block) => block.text).join("");
26
+ }
27
+ function extractToolUse(content) {
28
+ return content.filter((block) => block.type === "tool_use").map((block) => ({
29
+ id: block.id,
30
+ name: block.name,
31
+ input: block.input
32
+ }));
33
+ }
34
+ function toAnthropicTools(tools) {
35
+ return tools.map((tool) => {
36
+ const properties = {};
37
+ const required = [];
38
+ for (const [name, param] of Object.entries(tool.parameters)) {
39
+ properties[name] = {
40
+ type: param.type,
41
+ description: param.description
42
+ };
43
+ if (param.required !== false) {
44
+ required.push(name);
45
+ }
46
+ }
47
+ return {
48
+ name: tool.name,
49
+ description: tool.description,
50
+ input_schema: {
51
+ type: "object",
52
+ properties,
53
+ required
54
+ }
55
+ };
56
+ });
57
+ }
58
+
59
+ // src/anthropic.ts
60
+ var DEFAULT_MODEL = "claude-haiku-4-5";
61
+ var DEFAULT_MAX_TOKENS = 64e3;
62
+ var AnthropicProvider = class extends BaseLLMProvider {
63
+ providerName = "anthropic";
64
+ client;
65
+ constructor(options = {}) {
66
+ super(options);
67
+ this.client = new Anthropic({
68
+ apiKey: options.apiKey,
69
+ timeout: 15 * 60 * 1e3
70
+ // 15 minutes – pipeline calls can be long
71
+ });
72
+ }
73
+ async doComplete(messages, options) {
74
+ const params = {
75
+ model: options.model ?? DEFAULT_MODEL,
76
+ max_tokens: options.maxTokens ?? DEFAULT_MAX_TOKENS,
77
+ messages: toAnthropicMessages(messages)
78
+ };
79
+ if (options.temperature !== void 0) {
80
+ params.temperature = options.temperature;
81
+ }
82
+ if (options.systemPrompt) {
83
+ params.system = options.systemPrompt;
84
+ }
85
+ if (options.tools && options.tools.length > 0) {
86
+ params.tools = toAnthropicTools(options.tools);
87
+ }
88
+ const response = await this.client.messages.create(params);
89
+ const toolUse = extractToolUse(response.content);
90
+ return {
91
+ text: extractText(response.content),
92
+ tokensUsed: {
93
+ input: response.usage.input_tokens,
94
+ output: response.usage.output_tokens
95
+ },
96
+ finishReason: toFinishReason(response.stop_reason),
97
+ ...toolUse.length > 0 && { toolUse }
98
+ };
99
+ }
100
+ isRetryable(error) {
101
+ if (error instanceof Anthropic.RateLimitError) {
102
+ return true;
103
+ }
104
+ if (error instanceof Anthropic.InternalServerError) {
105
+ return true;
106
+ }
107
+ if (error instanceof Anthropic.APIConnectionError) {
108
+ return true;
109
+ }
110
+ if (error instanceof Anthropic.APIError && error.status === 529) {
111
+ return true;
112
+ }
113
+ return false;
114
+ }
115
+ getRetryAfterMs(error) {
116
+ if (error instanceof Anthropic.APIError) {
117
+ const retryAfter = error.headers?.["retry-after"];
118
+ if (retryAfter) {
119
+ const seconds = Number(retryAfter);
120
+ if (!Number.isNaN(seconds) && seconds > 0) {
121
+ return seconds * 1e3;
122
+ }
123
+ }
124
+ }
125
+ return null;
126
+ }
127
+ isRateLimitError(error) {
128
+ return error instanceof Anthropic.RateLimitError;
129
+ }
130
+ };
131
+
132
+ // src/factory.ts
133
+ function createProvider(name, options) {
134
+ switch (name) {
135
+ case "anthropic":
136
+ return new AnthropicProvider(options);
137
+ default: {
138
+ const _exhaustive = name;
139
+ throw new Error(`Unknown provider: ${_exhaustive}`);
140
+ }
141
+ }
142
+ }
143
+ export {
144
+ AnthropicProvider,
145
+ createProvider,
146
+ extractText,
147
+ extractToolUse,
148
+ toAnthropicMessages,
149
+ toAnthropicTools,
150
+ toFinishReason
151
+ };
152
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/anthropic.ts","../src/formatting.ts","../src/factory.ts"],"sourcesContent":["import Anthropic from \"@anthropic-ai/sdk\";\nimport type { LLMMessage, LLMRequestOptions, LLMResponse } from \"@tepa/types\";\nimport { BaseLLMProvider, type BaseLLMProviderOptions } from \"@tepa/provider-core\";\nimport { toAnthropicMessages, toAnthropicTools, toFinishReason, extractText, extractToolUse } from \"./formatting.js\";\n\nconst DEFAULT_MODEL = \"claude-haiku-4-5\";\nconst DEFAULT_MAX_TOKENS = 64_000;\n\nexport interface AnthropicProviderOptions extends BaseLLMProviderOptions {\n /** Anthropic API key. Falls back to ANTHROPIC_API_KEY env variable. */\n apiKey?: string;\n}\n\n/** LLM provider implementation for Anthropic Claude models. */\nexport class AnthropicProvider extends BaseLLMProvider {\n protected readonly providerName = \"anthropic\";\n private readonly client: Anthropic;\n\n constructor(options: AnthropicProviderOptions = {}) {\n super(options);\n this.client = new Anthropic({\n apiKey: options.apiKey,\n timeout: 15 * 60 * 1000, // 15 minutes – pipeline calls can be long\n });\n }\n\n protected async doComplete(\n messages: LLMMessage[],\n options: LLMRequestOptions,\n ): Promise<LLMResponse> {\n const params: Anthropic.MessageCreateParamsNonStreaming = {\n model: options.model ?? DEFAULT_MODEL,\n max_tokens: options.maxTokens ?? DEFAULT_MAX_TOKENS,\n messages: toAnthropicMessages(messages),\n };\n\n if (options.temperature !== undefined) {\n params.temperature = options.temperature;\n }\n\n if (options.systemPrompt) {\n params.system = options.systemPrompt;\n }\n\n if (options.tools && options.tools.length > 0) {\n params.tools = toAnthropicTools(options.tools);\n }\n\n const response = await this.client.messages.create(params);\n\n const toolUse = extractToolUse(response.content);\n\n return {\n text: extractText(response.content),\n tokensUsed: {\n input: response.usage.input_tokens,\n output: response.usage.output_tokens,\n },\n finishReason: toFinishReason(response.stop_reason),\n ...(toolUse.length > 0 && { toolUse }),\n };\n }\n\n protected isRetryable(error: unknown): boolean {\n if (error instanceof Anthropic.RateLimitError) {\n return true;\n }\n if (error instanceof Anthropic.InternalServerError) {\n return true;\n }\n if (error instanceof Anthropic.APIConnectionError) {\n return true;\n }\n if (error instanceof Anthropic.APIError && error.status === 529) {\n return true;\n }\n return false;\n }\n\n protected getRetryAfterMs(error: unknown): number | null {\n if (error instanceof Anthropic.APIError) {\n const retryAfter = error.headers?.[\"retry-after\"];\n if (retryAfter) {\n const seconds = Number(retryAfter);\n if (!Number.isNaN(seconds) && seconds > 0) {\n return seconds * 1000;\n }\n }\n }\n return null;\n }\n\n protected isRateLimitError(error: unknown): boolean {\n return error instanceof Anthropic.RateLimitError;\n }\n}\n","import type { LLMMessage, LLMToolUseBlock, ToolSchema } from \"@tepa/types\";\nimport type Anthropic from \"@anthropic-ai/sdk\";\n\n/**\n * Convert Tepa LLMMessage array to Anthropic message format.\n */\nexport function toAnthropicMessages(\n messages: LLMMessage[],\n): Anthropic.MessageParam[] {\n return messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n }));\n}\n\n/**\n * Map Anthropic stop_reason to Tepa finishReason.\n */\nexport function toFinishReason(\n stopReason: string | null,\n): \"end_turn\" | \"max_tokens\" | \"stop_sequence\" | \"tool_use\" {\n switch (stopReason) {\n case \"max_tokens\":\n return \"max_tokens\";\n case \"stop_sequence\":\n return \"stop_sequence\";\n case \"tool_use\":\n return \"tool_use\";\n default:\n return \"end_turn\";\n }\n}\n\n/**\n * Extract text content from Anthropic response content blocks.\n */\nexport function extractText(content: Anthropic.ContentBlock[]): string {\n return content\n .filter((block): block is Anthropic.TextBlock => block.type === \"text\")\n .map((block) => block.text)\n .join(\"\");\n}\n\n/**\n * Extract tool use blocks from Anthropic response content blocks.\n */\nexport function extractToolUse(content: Anthropic.ContentBlock[]): LLMToolUseBlock[] {\n return content\n .filter((block): block is Anthropic.ToolUseBlock => block.type === \"tool_use\")\n .map((block) => ({\n id: block.id,\n name: block.name,\n input: block.input as Record<string, unknown>,\n }));\n}\n\n/**\n * Convert Tepa ToolSchema to Anthropic tool format.\n */\nexport function toAnthropicTools(tools: ToolSchema[]): Anthropic.Tool[] {\n return tools.map((tool) => {\n const properties: Record<string, unknown> = {};\n const required: string[] = [];\n\n for (const [name, param] of Object.entries(tool.parameters)) {\n properties[name] = {\n type: param.type,\n description: param.description,\n };\n if (param.required !== false) {\n required.push(name);\n }\n }\n\n return {\n name: tool.name,\n description: tool.description,\n input_schema: {\n type: \"object\" as const,\n properties,\n required,\n },\n };\n });\n}\n","import type { LLMProvider } from \"@tepa/types\";\nimport { AnthropicProvider, type AnthropicProviderOptions } from \"./anthropic.js\";\n\nexport type ProviderName = \"anthropic\";\n\n/**\n * Create an LLM provider from a name string.\n * Currently supports \"anthropic\". Future providers (openai, gemini, etc.)\n * would be separate packages following the same pattern.\n */\nexport function createProvider(\n name: ProviderName,\n options?: AnthropicProviderOptions,\n): LLMProvider {\n switch (name) {\n case \"anthropic\":\n return new AnthropicProvider(options);\n default: {\n const _exhaustive: never = name;\n throw new Error(`Unknown provider: ${_exhaustive}`);\n }\n }\n}\n"],"mappings":";AAAA,OAAO,eAAe;AAEtB,SAAS,uBAAoD;;;ACItD,SAAS,oBACd,UAC0B;AAC1B,SAAO,SAAS,IAAI,CAAC,SAAS;AAAA,IAC5B,MAAM,IAAI;AAAA,IACV,SAAS,IAAI;AAAA,EACf,EAAE;AACJ;AAKO,SAAS,eACd,YAC0D;AAC1D,UAAQ,YAAY;AAAA,IAClB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKO,SAAS,YAAY,SAA2C;AACrE,SAAO,QACJ,OAAO,CAAC,UAAwC,MAAM,SAAS,MAAM,EACrE,IAAI,CAAC,UAAU,MAAM,IAAI,EACzB,KAAK,EAAE;AACZ;AAKO,SAAS,eAAe,SAAsD;AACnF,SAAO,QACJ,OAAO,CAAC,UAA2C,MAAM,SAAS,UAAU,EAC5E,IAAI,CAAC,WAAW;AAAA,IACf,IAAI,MAAM;AAAA,IACV,MAAM,MAAM;AAAA,IACZ,OAAO,MAAM;AAAA,EACf,EAAE;AACN;AAKO,SAAS,iBAAiB,OAAuC;AACtE,SAAO,MAAM,IAAI,CAAC,SAAS;AACzB,UAAM,aAAsC,CAAC;AAC7C,UAAM,WAAqB,CAAC;AAE5B,eAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,KAAK,UAAU,GAAG;AAC3D,iBAAW,IAAI,IAAI;AAAA,QACjB,MAAM,MAAM;AAAA,QACZ,aAAa,MAAM;AAAA,MACrB;AACA,UAAI,MAAM,aAAa,OAAO;AAC5B,iBAAS,KAAK,IAAI;AAAA,MACpB;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,cAAc;AAAA,QACZ,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AD/EA,IAAM,gBAAgB;AACtB,IAAM,qBAAqB;AAQpB,IAAM,oBAAN,cAAgC,gBAAgB;AAAA,EAClC,eAAe;AAAA,EACjB;AAAA,EAEjB,YAAY,UAAoC,CAAC,GAAG;AAClD,UAAM,OAAO;AACb,SAAK,SAAS,IAAI,UAAU;AAAA,MAC1B,QAAQ,QAAQ;AAAA,MAChB,SAAS,KAAK,KAAK;AAAA;AAAA,IACrB,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,WACd,UACA,SACsB;AACtB,UAAM,SAAoD;AAAA,MACxD,OAAO,QAAQ,SAAS;AAAA,MACxB,YAAY,QAAQ,aAAa;AAAA,MACjC,UAAU,oBAAoB,QAAQ;AAAA,IACxC;AAEA,QAAI,QAAQ,gBAAgB,QAAW;AACrC,aAAO,cAAc,QAAQ;AAAA,IAC/B;AAEA,QAAI,QAAQ,cAAc;AACxB,aAAO,SAAS,QAAQ;AAAA,IAC1B;AAEA,QAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,aAAO,QAAQ,iBAAiB,QAAQ,KAAK;AAAA,IAC/C;AAEA,UAAM,WAAW,MAAM,KAAK,OAAO,SAAS,OAAO,MAAM;AAEzD,UAAM,UAAU,eAAe,SAAS,OAAO;AAE/C,WAAO;AAAA,MACL,MAAM,YAAY,SAAS,OAAO;AAAA,MAClC,YAAY;AAAA,QACV,OAAO,SAAS,MAAM;AAAA,QACtB,QAAQ,SAAS,MAAM;AAAA,MACzB;AAAA,MACA,cAAc,eAAe,SAAS,WAAW;AAAA,MACjD,GAAI,QAAQ,SAAS,KAAK,EAAE,QAAQ;AAAA,IACtC;AAAA,EACF;AAAA,EAEU,YAAY,OAAyB;AAC7C,QAAI,iBAAiB,UAAU,gBAAgB;AAC7C,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,UAAU,qBAAqB;AAClD,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,UAAU,oBAAoB;AACjD,aAAO;AAAA,IACT;AACA,QAAI,iBAAiB,UAAU,YAAY,MAAM,WAAW,KAAK;AAC/D,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEU,gBAAgB,OAA+B;AACvD,QAAI,iBAAiB,UAAU,UAAU;AACvC,YAAM,aAAa,MAAM,UAAU,aAAa;AAChD,UAAI,YAAY;AACd,cAAM,UAAU,OAAO,UAAU;AACjC,YAAI,CAAC,OAAO,MAAM,OAAO,KAAK,UAAU,GAAG;AACzC,iBAAO,UAAU;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEU,iBAAiB,OAAyB;AAClD,WAAO,iBAAiB,UAAU;AAAA,EACpC;AACF;;;AErFO,SAAS,eACd,MACA,SACa;AACb,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,IAAI,kBAAkB,OAAO;AAAA,IACtC,SAAS;AACP,YAAM,cAAqB;AAC3B,YAAM,IAAI,MAAM,qBAAqB,WAAW,EAAE;AAAA,IACpD;AAAA,EACF;AACF;","names":[]}
package/package.json ADDED
@@ -0,0 +1,57 @@
1
+ {
2
+ "name": "@tepa/provider-anthropic",
3
+ "version": "0.1.0",
4
+ "description": "Anthropic Claude LLM provider for the Tepa autonomous agent",
5
+ "license": "MIT",
6
+ "author": "Frandi <frandi.tech@gmail.com>",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "https://github.com/frandi/tepa-ai.git",
10
+ "directory": "packages/provider-anthropic"
11
+ },
12
+ "homepage": "https://github.com/frandi/tepa-ai/tree/main/packages/provider-anthropic#readme",
13
+ "bugs": {
14
+ "url": "https://github.com/frandi/tepa-ai/issues"
15
+ },
16
+ "keywords": ["tepa", "ai", "llm", "agent", "anthropic", "claude", "provider"],
17
+ "type": "module",
18
+ "main": "./dist/index.cjs",
19
+ "module": "./dist/index.js",
20
+ "types": "./dist/index.d.ts",
21
+ "exports": {
22
+ ".": {
23
+ "import": {
24
+ "types": "./dist/index.d.ts",
25
+ "default": "./dist/index.js"
26
+ },
27
+ "require": {
28
+ "types": "./dist/index.d.cts",
29
+ "default": "./dist/index.cjs"
30
+ }
31
+ }
32
+ },
33
+ "files": [
34
+ "dist"
35
+ ],
36
+ "sideEffects": false,
37
+ "engines": {
38
+ "node": ">=18"
39
+ },
40
+ "publishConfig": {
41
+ "access": "public"
42
+ },
43
+ "scripts": {
44
+ "build": "tsup",
45
+ "dev": "tsup --watch",
46
+ "prepublishOnly": "npm run build"
47
+ },
48
+ "dependencies": {
49
+ "@tepa/types": "^0.1.0",
50
+ "@tepa/provider-core": "^0.1.0",
51
+ "@anthropic-ai/sdk": "^0.39.0"
52
+ },
53
+ "devDependencies": {
54
+ "tsup": "^8.0.0",
55
+ "typescript": "^5.5.0"
56
+ }
57
+ }