@riotprompt/execution-anthropic 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.kodrdriv-test-cache.json +6 -0
- package/LICENSE +18 -0
- package/README.md +81 -0
- package/dist/index.cjs +83 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +85 -0
- package/dist/index.js +83 -0
- package/dist/index.js.map +1 -0
- package/package.json +46 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
Apache License
|
|
2
|
+
Version 2.0, January 2004
|
|
3
|
+
http://www.apache.org/licenses/
|
|
4
|
+
|
|
5
|
+
Copyright 2025 Tim O'Brien
|
|
6
|
+
|
|
7
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
you may not use this file except in compliance with the License.
|
|
9
|
+
You may obtain a copy of the License at
|
|
10
|
+
|
|
11
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
|
|
13
|
+
Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
See the License for the specific language governing permissions and
|
|
17
|
+
limitations under the License.
|
|
18
|
+
|
package/README.md
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
# execution-anthropic
|
|
2
|
+
|
|
3
|
+
Anthropic Claude provider implementation for LLM execution. Implements the `Provider` interface from the `execution` package.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install execution-anthropic @anthropic-ai/sdk
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```typescript
|
|
14
|
+
import { AnthropicProvider, createAnthropicProvider } from 'execution-anthropic';
|
|
15
|
+
|
|
16
|
+
// Create provider
|
|
17
|
+
const provider = createAnthropicProvider();
|
|
18
|
+
|
|
19
|
+
// Execute a request
|
|
20
|
+
const response = await provider.execute(
|
|
21
|
+
{
|
|
22
|
+
model: 'claude-3-opus-20240229',
|
|
23
|
+
messages: [
|
|
24
|
+
{ role: 'system', content: 'You are helpful.' },
|
|
25
|
+
{ role: 'user', content: 'Hello!' }
|
|
26
|
+
],
|
|
27
|
+
addMessage: () => {},
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
31
|
+
temperature: 0.7,
|
|
32
|
+
maxTokens: 4096,
|
|
33
|
+
}
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
console.log(response.content);
|
|
37
|
+
console.log(response.usage); // { inputTokens: X, outputTokens: Y }
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Supported Models
|
|
41
|
+
|
|
42
|
+
The provider supports all Claude models:
|
|
43
|
+
- Claude 3 Opus
|
|
44
|
+
- Claude 3 Sonnet
|
|
45
|
+
- Claude 3 Haiku
|
|
46
|
+
- Claude 3.5 family
|
|
47
|
+
|
|
48
|
+
## API Key
|
|
49
|
+
|
|
50
|
+
Set via:
|
|
51
|
+
1. `options.apiKey` parameter
|
|
52
|
+
2. `ANTHROPIC_API_KEY` environment variable
|
|
53
|
+
|
|
54
|
+
## Features
|
|
55
|
+
|
|
56
|
+
- Automatic system prompt extraction (Anthropic separates system from messages)
|
|
57
|
+
- Structured output via tool use (JSON schema support)
|
|
58
|
+
- Full token usage tracking
|
|
59
|
+
|
|
60
|
+
## Response Format
|
|
61
|
+
|
|
62
|
+
```typescript
|
|
63
|
+
interface ProviderResponse {
|
|
64
|
+
content: string;
|
|
65
|
+
model: string;
|
|
66
|
+
usage?: {
|
|
67
|
+
inputTokens: number;
|
|
68
|
+
outputTokens: number;
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
## Related Packages
|
|
74
|
+
|
|
75
|
+
- `execution` - Core interfaces (no SDK dependencies)
|
|
76
|
+
- `execution-openai` - OpenAI provider
|
|
77
|
+
- `execution-gemini` - Google Gemini provider
|
|
78
|
+
|
|
79
|
+
## License
|
|
80
|
+
|
|
81
|
+
Apache-2.0
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperties(exports, { __esModule: { value: true }, [Symbol.toStringTag]: { value: "Module" } });
|
|
3
|
+
const Anthropic = require("@anthropic-ai/sdk");
|
|
4
|
+
class AnthropicProvider {
|
|
5
|
+
name = "anthropic";
|
|
6
|
+
/**
|
|
7
|
+
* Check if this provider supports a given model
|
|
8
|
+
*/
|
|
9
|
+
supportsModel(model) {
|
|
10
|
+
if (!model) return false;
|
|
11
|
+
return model.startsWith("claude");
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Execute a request against Anthropic
|
|
15
|
+
*/
|
|
16
|
+
async execute(request, options = {}) {
|
|
17
|
+
const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY;
|
|
18
|
+
if (!apiKey) throw new Error("Anthropic API key is required");
|
|
19
|
+
const client = new Anthropic({ apiKey });
|
|
20
|
+
const model = options.model || request.model || "claude-3-opus-20240229";
|
|
21
|
+
let systemPrompt = "";
|
|
22
|
+
const messages = [];
|
|
23
|
+
for (const msg of request.messages) {
|
|
24
|
+
if (msg.role === "system" || msg.role === "developer") {
|
|
25
|
+
systemPrompt += (typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)) + "\n\n";
|
|
26
|
+
} else {
|
|
27
|
+
messages.push({
|
|
28
|
+
role: msg.role,
|
|
29
|
+
content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
const response = await client.messages.create({
|
|
34
|
+
model,
|
|
35
|
+
system: systemPrompt.trim() || void 0,
|
|
36
|
+
messages,
|
|
37
|
+
max_tokens: options.maxTokens || 4096,
|
|
38
|
+
temperature: options.temperature,
|
|
39
|
+
...request.responseFormat?.type === "json_schema" ? {
|
|
40
|
+
tools: [
|
|
41
|
+
{
|
|
42
|
+
name: request.responseFormat.json_schema.name,
|
|
43
|
+
description: request.responseFormat.json_schema.description || "Output data in this structured format",
|
|
44
|
+
input_schema: request.responseFormat.json_schema.schema
|
|
45
|
+
}
|
|
46
|
+
],
|
|
47
|
+
tool_choice: {
|
|
48
|
+
type: "tool",
|
|
49
|
+
name: request.responseFormat.json_schema.name
|
|
50
|
+
}
|
|
51
|
+
} : {}
|
|
52
|
+
});
|
|
53
|
+
let text = "";
|
|
54
|
+
if (request.responseFormat?.type === "json_schema") {
|
|
55
|
+
const toolUseBlock = response.content.find(
|
|
56
|
+
(block) => block.type === "tool_use"
|
|
57
|
+
);
|
|
58
|
+
if (toolUseBlock && toolUseBlock.type === "tool_use") {
|
|
59
|
+
text = JSON.stringify(toolUseBlock.input, null, 2);
|
|
60
|
+
}
|
|
61
|
+
} else {
|
|
62
|
+
const contentBlock = response.content[0];
|
|
63
|
+
text = contentBlock.type === "text" ? contentBlock.text : "";
|
|
64
|
+
}
|
|
65
|
+
return {
|
|
66
|
+
content: text,
|
|
67
|
+
model: response.model,
|
|
68
|
+
usage: {
|
|
69
|
+
inputTokens: response.usage.input_tokens,
|
|
70
|
+
outputTokens: response.usage.output_tokens
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
function createAnthropicProvider() {
|
|
76
|
+
return new AnthropicProvider();
|
|
77
|
+
}
|
|
78
|
+
const VERSION = "0.0.1";
|
|
79
|
+
exports.AnthropicProvider = AnthropicProvider;
|
|
80
|
+
exports.VERSION = VERSION;
|
|
81
|
+
exports.createAnthropicProvider = createAnthropicProvider;
|
|
82
|
+
exports.default = AnthropicProvider;
|
|
83
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../src/index.ts"],"sourcesContent":["/**\n * Execution Anthropic Package\n *\n * Anthropic provider implementation for LLM execution.\n *\n * @packageDocumentation\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\n\n// ===== INLINE TYPES (from 'execution' package) =====\n\nexport type Model = string;\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system' | 'developer' | 'tool';\n content: string | string[] | null;\n name?: string;\n}\n\nexport interface Request {\n messages: Message[];\n model: Model;\n responseFormat?: any;\n validator?: any;\n addMessage(message: Message): void;\n}\n\nexport interface ProviderResponse {\n content: string;\n model: string;\n usage?: {\n inputTokens: number;\n outputTokens: number;\n };\n toolCalls?: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }>;\n}\n\nexport interface ExecutionOptions {\n apiKey?: string;\n model?: string;\n temperature?: number;\n maxTokens?: number;\n timeout?: number;\n retries?: number;\n}\n\nexport interface Provider {\n readonly name: string;\n execute(request: Request, options?: ExecutionOptions): Promise<ProviderResponse>;\n supportsModel?(model: Model): boolean;\n}\n\n/**\n * Anthropic Provider implementation\n */\nexport class AnthropicProvider implements Provider {\n readonly name = 'anthropic';\n\n /**\n * Check if this provider supports a given model\n */\n supportsModel(model: Model): boolean {\n if (!model) return false;\n return model.startsWith('claude');\n }\n\n /**\n * Execute a request against Anthropic\n */\n async execute(\n request: Request,\n options: ExecutionOptions = {}\n ): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY;\n if (!apiKey) throw new Error('Anthropic API key is required');\n\n const client = new Anthropic({ apiKey });\n\n const model = options.model || request.model || 'claude-3-opus-20240229';\n\n // Anthropic separates system prompt from messages\n let systemPrompt = '';\n const messages: Anthropic.MessageParam[] = [];\n\n for (const msg of request.messages) {\n if (msg.role === 'system' || msg.role === 'developer') {\n systemPrompt +=\n (typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content)) + '\\n\\n';\n } else {\n messages.push({\n role: msg.role as 'user' | 'assistant',\n content:\n typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n });\n }\n }\n\n const response = await client.messages.create({\n model: model,\n system: systemPrompt.trim() || undefined,\n messages: messages,\n max_tokens: options.maxTokens || 4096,\n temperature: options.temperature,\n ...(request.responseFormat?.type === 'json_schema'\n ? {\n tools: [\n {\n name: request.responseFormat.json_schema.name,\n description:\n request.responseFormat.json_schema.description ||\n 'Output data in this structured format',\n input_schema:\n request.responseFormat.json_schema.schema,\n },\n ],\n tool_choice: {\n type: 'tool' as const,\n name: request.responseFormat.json_schema.name,\n },\n }\n : {}),\n });\n\n // Handle ContentBlock\n let text = '';\n\n if (request.responseFormat?.type === 'json_schema') {\n const toolUseBlock = response.content.find(\n (block) => block.type === 'tool_use'\n );\n if (toolUseBlock && toolUseBlock.type === 'tool_use') {\n text = JSON.stringify(toolUseBlock.input, null, 2);\n }\n } else {\n const contentBlock = response.content[0];\n text = contentBlock.type === 'text' ? contentBlock.text : '';\n }\n\n return {\n content: text,\n model: response.model,\n usage: {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n },\n };\n }\n}\n\n/**\n * Create a new Anthropic provider instance\n */\nexport function createAnthropicProvider(): AnthropicProvider {\n return new AnthropicProvider();\n}\n\n/**\n * Package version\n */\nexport const VERSION = '0.0.1';\n\nexport default AnthropicProvider;\n"],"names":[],"mappings":";;;AA+DO,MAAM,kBAAsC;AAAA,EACtC,OAAO;AAAA;AAAA;AAAA;AAAA,EAKhB,cAAc,OAAuB;AACjC,QAAI,CAAC,MAAO,QAAO;AACnB,WAAO,MAAM,WAAW,QAAQ;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACF,SACA,UAA4B,IACH;AACzB,UAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAC7C,QAAI,CAAC,OAAQ,OAAM,IAAI,MAAM,+BAA+B;AAE5D,UAAM,SAAS,IAAI,UAAU,EAAE,QAAQ;AAEvC,UAAM,QAAQ,QAAQ,SAAS,QAAQ,SAAS;AAGhD,QAAI,eAAe;AACnB,UAAM,WAAqC,CAAA;AAE3C,eAAW,OAAO,QAAQ,UAAU;AAChC,UAAI,IAAI,SAAS,YAAY,IAAI,SAAS,aAAa;AACnD,yBACK,OAAO,IAAI,YAAY,WAClB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO,KAAK;AAAA,MAC7C,OAAO;AACH,iBAAS,KAAK;AAAA,UACV,MAAM,IAAI;AAAA,UACV,SACI,OAAO,IAAI,YAAY,WACjB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,QAAA,CACvC;AAAA,MACL;AAAA,IACJ;AAEA,UAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,MAC1C;AAAA,MACA,QAAQ,aAAa,KAAA,KAAU;AAAA,MAC/B;AAAA,MACA,YAAY,QAAQ,aAAa;AAAA,MACjC,aAAa,QAAQ;AAAA,MACrB,GAAI,QAAQ,gBAAgB,SAAS,gBAC/B;AAAA,QACE,OAAO;AAAA,UACH;AAAA,YACI,MAAM,QAAQ,eAAe,YAAY;AAAA,YACzC,aACM,QAAQ,eAAe,YAAY,eACnC;AAAA,YACN,cACM,QAAQ,eAAe,YAAY;AAAA,UAAA;AAAA,QAC7C;AAAA,QAEJ,aAAa;AAAA,UACT,MAAM;AAAA,UACN,MAAM,QAAQ,eAAe,YAAY;AAAA,QAAA;AAAA,MAC7C,IAEF,CAAA;AAAA,IAAC,CACV;AAGD,QAAI,OAAO;AAEX,QAAI,QAAQ,gBAAgB,SAAS,eAAe;AAChD,YAAM,eAAe,SAAS,QAAQ;AAAA,QAClC,CAAC,UAAU,MAAM,SAAS;AAAA,MAAA;AAE9B,UAAI,gBAAgB,aAAa,SAAS,YAAY;AAClD,eAAO,KAAK,UAAU,aAAa,OAAO,MAAM,CAAC;AAAA,MACrD;AAAA,IACJ,OAAO;AACH,YAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,aAAO,aAAa,SAAS,SAAS,aAAa,OAAO;AAAA,IAC9D;AAEA,WAAO;AAAA,MACH,SAAS;AAAA,MACT,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,QACH,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAAA;AAAA,IACjC;AAAA,EAER;AACJ;AAKO,SAAS,0BAA6C;AACzD,SAAO,IAAI,kBAAA;AACf;AAKO,MAAM,UAAU;;;;;"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Execution Anthropic Package
|
|
3
|
+
*
|
|
4
|
+
* Anthropic provider implementation for LLM execution.
|
|
5
|
+
*
|
|
6
|
+
* @packageDocumentation
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Anthropic Provider implementation
|
|
11
|
+
*/
|
|
12
|
+
declare class AnthropicProvider implements Provider {
|
|
13
|
+
readonly name = "anthropic";
|
|
14
|
+
/**
|
|
15
|
+
* Check if this provider supports a given model
|
|
16
|
+
*/
|
|
17
|
+
supportsModel(model: Model): boolean;
|
|
18
|
+
/**
|
|
19
|
+
* Execute a request against Anthropic
|
|
20
|
+
*/
|
|
21
|
+
execute(request: Request_2, options?: ExecutionOptions): Promise<ProviderResponse>;
|
|
22
|
+
}
|
|
23
|
+
export { AnthropicProvider }
|
|
24
|
+
export default AnthropicProvider;
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Create a new Anthropic provider instance
|
|
28
|
+
*/
|
|
29
|
+
export declare function createAnthropicProvider(): AnthropicProvider;
|
|
30
|
+
|
|
31
|
+
export declare interface ExecutionOptions {
|
|
32
|
+
apiKey?: string;
|
|
33
|
+
model?: string;
|
|
34
|
+
temperature?: number;
|
|
35
|
+
maxTokens?: number;
|
|
36
|
+
timeout?: number;
|
|
37
|
+
retries?: number;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export declare interface Message {
|
|
41
|
+
role: 'user' | 'assistant' | 'system' | 'developer' | 'tool';
|
|
42
|
+
content: string | string[] | null;
|
|
43
|
+
name?: string;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export declare type Model = string;
|
|
47
|
+
|
|
48
|
+
export declare interface Provider {
|
|
49
|
+
readonly name: string;
|
|
50
|
+
execute(request: Request_2, options?: ExecutionOptions): Promise<ProviderResponse>;
|
|
51
|
+
supportsModel?(model: Model): boolean;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export declare interface ProviderResponse {
|
|
55
|
+
content: string;
|
|
56
|
+
model: string;
|
|
57
|
+
usage?: {
|
|
58
|
+
inputTokens: number;
|
|
59
|
+
outputTokens: number;
|
|
60
|
+
};
|
|
61
|
+
toolCalls?: Array<{
|
|
62
|
+
id: string;
|
|
63
|
+
type: 'function';
|
|
64
|
+
function: {
|
|
65
|
+
name: string;
|
|
66
|
+
arguments: string;
|
|
67
|
+
};
|
|
68
|
+
}>;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
declare interface Request_2 {
|
|
72
|
+
messages: Message[];
|
|
73
|
+
model: Model;
|
|
74
|
+
responseFormat?: any;
|
|
75
|
+
validator?: any;
|
|
76
|
+
addMessage(message: Message): void;
|
|
77
|
+
}
|
|
78
|
+
export { Request_2 as Request }
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Package version
|
|
82
|
+
*/
|
|
83
|
+
export declare const VERSION = "0.0.1";
|
|
84
|
+
|
|
85
|
+
export { }
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
class AnthropicProvider {
|
|
3
|
+
name = "anthropic";
|
|
4
|
+
/**
|
|
5
|
+
* Check if this provider supports a given model
|
|
6
|
+
*/
|
|
7
|
+
supportsModel(model) {
|
|
8
|
+
if (!model) return false;
|
|
9
|
+
return model.startsWith("claude");
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Execute a request against Anthropic
|
|
13
|
+
*/
|
|
14
|
+
async execute(request, options = {}) {
|
|
15
|
+
const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY;
|
|
16
|
+
if (!apiKey) throw new Error("Anthropic API key is required");
|
|
17
|
+
const client = new Anthropic({ apiKey });
|
|
18
|
+
const model = options.model || request.model || "claude-3-opus-20240229";
|
|
19
|
+
let systemPrompt = "";
|
|
20
|
+
const messages = [];
|
|
21
|
+
for (const msg of request.messages) {
|
|
22
|
+
if (msg.role === "system" || msg.role === "developer") {
|
|
23
|
+
systemPrompt += (typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)) + "\n\n";
|
|
24
|
+
} else {
|
|
25
|
+
messages.push({
|
|
26
|
+
role: msg.role,
|
|
27
|
+
content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
const response = await client.messages.create({
|
|
32
|
+
model,
|
|
33
|
+
system: systemPrompt.trim() || void 0,
|
|
34
|
+
messages,
|
|
35
|
+
max_tokens: options.maxTokens || 4096,
|
|
36
|
+
temperature: options.temperature,
|
|
37
|
+
...request.responseFormat?.type === "json_schema" ? {
|
|
38
|
+
tools: [
|
|
39
|
+
{
|
|
40
|
+
name: request.responseFormat.json_schema.name,
|
|
41
|
+
description: request.responseFormat.json_schema.description || "Output data in this structured format",
|
|
42
|
+
input_schema: request.responseFormat.json_schema.schema
|
|
43
|
+
}
|
|
44
|
+
],
|
|
45
|
+
tool_choice: {
|
|
46
|
+
type: "tool",
|
|
47
|
+
name: request.responseFormat.json_schema.name
|
|
48
|
+
}
|
|
49
|
+
} : {}
|
|
50
|
+
});
|
|
51
|
+
let text = "";
|
|
52
|
+
if (request.responseFormat?.type === "json_schema") {
|
|
53
|
+
const toolUseBlock = response.content.find(
|
|
54
|
+
(block) => block.type === "tool_use"
|
|
55
|
+
);
|
|
56
|
+
if (toolUseBlock && toolUseBlock.type === "tool_use") {
|
|
57
|
+
text = JSON.stringify(toolUseBlock.input, null, 2);
|
|
58
|
+
}
|
|
59
|
+
} else {
|
|
60
|
+
const contentBlock = response.content[0];
|
|
61
|
+
text = contentBlock.type === "text" ? contentBlock.text : "";
|
|
62
|
+
}
|
|
63
|
+
return {
|
|
64
|
+
content: text,
|
|
65
|
+
model: response.model,
|
|
66
|
+
usage: {
|
|
67
|
+
inputTokens: response.usage.input_tokens,
|
|
68
|
+
outputTokens: response.usage.output_tokens
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
function createAnthropicProvider() {
|
|
74
|
+
return new AnthropicProvider();
|
|
75
|
+
}
|
|
76
|
+
const VERSION = "0.0.1";
|
|
77
|
+
export {
|
|
78
|
+
AnthropicProvider,
|
|
79
|
+
VERSION,
|
|
80
|
+
createAnthropicProvider,
|
|
81
|
+
AnthropicProvider as default
|
|
82
|
+
};
|
|
83
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/index.ts"],"sourcesContent":["/**\n * Execution Anthropic Package\n *\n * Anthropic provider implementation for LLM execution.\n *\n * @packageDocumentation\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\n\n// ===== INLINE TYPES (from 'execution' package) =====\n\nexport type Model = string;\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system' | 'developer' | 'tool';\n content: string | string[] | null;\n name?: string;\n}\n\nexport interface Request {\n messages: Message[];\n model: Model;\n responseFormat?: any;\n validator?: any;\n addMessage(message: Message): void;\n}\n\nexport interface ProviderResponse {\n content: string;\n model: string;\n usage?: {\n inputTokens: number;\n outputTokens: number;\n };\n toolCalls?: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }>;\n}\n\nexport interface ExecutionOptions {\n apiKey?: string;\n model?: string;\n temperature?: number;\n maxTokens?: number;\n timeout?: number;\n retries?: number;\n}\n\nexport interface Provider {\n readonly name: string;\n execute(request: Request, options?: ExecutionOptions): Promise<ProviderResponse>;\n supportsModel?(model: Model): boolean;\n}\n\n/**\n * Anthropic Provider implementation\n */\nexport class AnthropicProvider implements Provider {\n readonly name = 'anthropic';\n\n /**\n * Check if this provider supports a given model\n */\n supportsModel(model: Model): boolean {\n if (!model) return false;\n return model.startsWith('claude');\n }\n\n /**\n * Execute a request against Anthropic\n */\n async execute(\n request: Request,\n options: ExecutionOptions = {}\n ): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY;\n if (!apiKey) throw new Error('Anthropic API key is required');\n\n const client = new Anthropic({ apiKey });\n\n const model = options.model || request.model || 'claude-3-opus-20240229';\n\n // Anthropic separates system prompt from messages\n let systemPrompt = '';\n const messages: Anthropic.MessageParam[] = [];\n\n for (const msg of request.messages) {\n if (msg.role === 'system' || msg.role === 'developer') {\n systemPrompt +=\n (typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content)) + '\\n\\n';\n } else {\n messages.push({\n role: msg.role as 'user' | 'assistant',\n content:\n typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n });\n }\n }\n\n const response = await client.messages.create({\n model: model,\n system: systemPrompt.trim() || undefined,\n messages: messages,\n max_tokens: options.maxTokens || 4096,\n temperature: options.temperature,\n ...(request.responseFormat?.type === 'json_schema'\n ? {\n tools: [\n {\n name: request.responseFormat.json_schema.name,\n description:\n request.responseFormat.json_schema.description ||\n 'Output data in this structured format',\n input_schema:\n request.responseFormat.json_schema.schema,\n },\n ],\n tool_choice: {\n type: 'tool' as const,\n name: request.responseFormat.json_schema.name,\n },\n }\n : {}),\n });\n\n // Handle ContentBlock\n let text = '';\n\n if (request.responseFormat?.type === 'json_schema') {\n const toolUseBlock = response.content.find(\n (block) => block.type === 'tool_use'\n );\n if (toolUseBlock && toolUseBlock.type === 'tool_use') {\n text = JSON.stringify(toolUseBlock.input, null, 2);\n }\n } else {\n const contentBlock = response.content[0];\n text = contentBlock.type === 'text' ? contentBlock.text : '';\n }\n\n return {\n content: text,\n model: response.model,\n usage: {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n },\n };\n }\n}\n\n/**\n * Create a new Anthropic provider instance\n */\nexport function createAnthropicProvider(): AnthropicProvider {\n return new AnthropicProvider();\n}\n\n/**\n * Package version\n */\nexport const VERSION = '0.0.1';\n\nexport default AnthropicProvider;\n"],"names":[],"mappings":";AA+DO,MAAM,kBAAsC;AAAA,EACtC,OAAO;AAAA;AAAA;AAAA;AAAA,EAKhB,cAAc,OAAuB;AACjC,QAAI,CAAC,MAAO,QAAO;AACnB,WAAO,MAAM,WAAW,QAAQ;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACF,SACA,UAA4B,IACH;AACzB,UAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAC7C,QAAI,CAAC,OAAQ,OAAM,IAAI,MAAM,+BAA+B;AAE5D,UAAM,SAAS,IAAI,UAAU,EAAE,QAAQ;AAEvC,UAAM,QAAQ,QAAQ,SAAS,QAAQ,SAAS;AAGhD,QAAI,eAAe;AACnB,UAAM,WAAqC,CAAA;AAE3C,eAAW,OAAO,QAAQ,UAAU;AAChC,UAAI,IAAI,SAAS,YAAY,IAAI,SAAS,aAAa;AACnD,yBACK,OAAO,IAAI,YAAY,WAClB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO,KAAK;AAAA,MAC7C,OAAO;AACH,iBAAS,KAAK;AAAA,UACV,MAAM,IAAI;AAAA,UACV,SACI,OAAO,IAAI,YAAY,WACjB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,QAAA,CACvC;AAAA,MACL;AAAA,IACJ;AAEA,UAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,MAC1C;AAAA,MACA,QAAQ,aAAa,KAAA,KAAU;AAAA,MAC/B;AAAA,MACA,YAAY,QAAQ,aAAa;AAAA,MACjC,aAAa,QAAQ;AAAA,MACrB,GAAI,QAAQ,gBAAgB,SAAS,gBAC/B;AAAA,QACE,OAAO;AAAA,UACH;AAAA,YACI,MAAM,QAAQ,eAAe,YAAY;AAAA,YACzC,aACM,QAAQ,eAAe,YAAY,eACnC;AAAA,YACN,cACM,QAAQ,eAAe,YAAY;AAAA,UAAA;AAAA,QAC7C;AAAA,QAEJ,aAAa;AAAA,UACT,MAAM;AAAA,UACN,MAAM,QAAQ,eAAe,YAAY;AAAA,QAAA;AAAA,MAC7C,IAEF,CAAA;AAAA,IAAC,CACV;AAGD,QAAI,OAAO;AAEX,QAAI,QAAQ,gBAAgB,SAAS,eAAe;AAChD,YAAM,eAAe,SAAS,QAAQ;AAAA,QAClC,CAAC,UAAU,MAAM,SAAS;AAAA,MAAA;AAE9B,UAAI,gBAAgB,aAAa,SAAS,YAAY;AAClD,eAAO,KAAK,UAAU,aAAa,OAAO,MAAM,CAAC;AAAA,MACrD;AAAA,IACJ,OAAO;AACH,YAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,aAAO,aAAa,SAAS,SAAS,aAAa,OAAO;AAAA,IAC9D;AAEA,WAAO;AAAA,MACH,SAAS;AAAA,MACT,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,QACH,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAAA;AAAA,IACjC;AAAA,EAER;AACJ;AAKO,SAAS,0BAA6C;AACzD,SAAO,IAAI,kBAAA;AACf;AAKO,MAAM,UAAU;"}
|
package/package.json
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@riotprompt/execution-anthropic",
|
|
3
|
+
"version": "0.0.2",
|
|
4
|
+
"description": "Anthropic Claude provider for execution interface",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.cjs",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./dist/index.d.ts",
|
|
12
|
+
"import": "./dist/index.js",
|
|
13
|
+
"require": "./dist/index.cjs"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"scripts": {
|
|
17
|
+
"clean": "rm -rf dist",
|
|
18
|
+
"build": "vite build",
|
|
19
|
+
"test": "vitest run",
|
|
20
|
+
"lint": "eslint src",
|
|
21
|
+
"prepublishOnly": "npm run clean && npm run build"
|
|
22
|
+
},
|
|
23
|
+
"keywords": [
|
|
24
|
+
"llm",
|
|
25
|
+
"anthropic",
|
|
26
|
+
"claude",
|
|
27
|
+
"execution",
|
|
28
|
+
"provider"
|
|
29
|
+
],
|
|
30
|
+
"author": "Tim O'Brien <tobrien@discursive.com>",
|
|
31
|
+
"license": "Apache-2.0",
|
|
32
|
+
"dependencies": {
|
|
33
|
+
"@anthropic-ai/sdk": "^0.52.0"
|
|
34
|
+
},
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@types/node": "^25.0.6",
|
|
37
|
+
"@typescript-eslint/eslint-plugin": "^8.33.1",
|
|
38
|
+
"@typescript-eslint/parser": "^8.33.1",
|
|
39
|
+
"eslint": "^9.28.0",
|
|
40
|
+
"globals": "^16.2.0",
|
|
41
|
+
"typescript": "^5.8.3",
|
|
42
|
+
"vite": "^7.0.4",
|
|
43
|
+
"vite-plugin-dts": "^4.5.4",
|
|
44
|
+
"vitest": "^3.2.4"
|
|
45
|
+
}
|
|
46
|
+
}
|