@providerprotocol/ai 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +84 -0
- package/dist/anthropic/index.d.ts +41 -0
- package/dist/anthropic/index.js +500 -0
- package/dist/anthropic/index.js.map +1 -0
- package/dist/chunk-CUCRF5W6.js +136 -0
- package/dist/chunk-CUCRF5W6.js.map +1 -0
- package/dist/chunk-FTFX2VET.js +424 -0
- package/dist/chunk-FTFX2VET.js.map +1 -0
- package/dist/chunk-QUUX4G7U.js +117 -0
- package/dist/chunk-QUUX4G7U.js.map +1 -0
- package/dist/chunk-Y6Q7JCNP.js +39 -0
- package/dist/chunk-Y6Q7JCNP.js.map +1 -0
- package/dist/google/index.d.ts +69 -0
- package/dist/google/index.js +517 -0
- package/dist/google/index.js.map +1 -0
- package/dist/http/index.d.ts +61 -0
- package/dist/http/index.js +43 -0
- package/dist/http/index.js.map +1 -0
- package/dist/index.d.ts +792 -0
- package/dist/index.js +898 -0
- package/dist/index.js.map +1 -0
- package/dist/openai/index.d.ts +204 -0
- package/dist/openai/index.js +1340 -0
- package/dist/openai/index.js.map +1 -0
- package/dist/provider-CUJWjgNl.d.ts +192 -0
- package/dist/retry-I2661_rv.d.ts +118 -0
- package/package.json +88 -0
- package/src/anthropic/index.ts +3 -0
- package/src/core/image.ts +188 -0
- package/src/core/llm.ts +619 -0
- package/src/core/provider.ts +92 -0
- package/src/google/index.ts +3 -0
- package/src/http/errors.ts +112 -0
- package/src/http/fetch.ts +210 -0
- package/src/http/index.ts +31 -0
- package/src/http/keys.ts +136 -0
- package/src/http/retry.ts +205 -0
- package/src/http/sse.ts +136 -0
- package/src/index.ts +32 -0
- package/src/openai/index.ts +9 -0
- package/src/providers/anthropic/index.ts +17 -0
- package/src/providers/anthropic/llm.ts +196 -0
- package/src/providers/anthropic/transform.ts +452 -0
- package/src/providers/anthropic/types.ts +213 -0
- package/src/providers/google/index.ts +17 -0
- package/src/providers/google/llm.ts +203 -0
- package/src/providers/google/transform.ts +487 -0
- package/src/providers/google/types.ts +214 -0
- package/src/providers/openai/index.ts +151 -0
- package/src/providers/openai/llm.completions.ts +201 -0
- package/src/providers/openai/llm.responses.ts +211 -0
- package/src/providers/openai/transform.completions.ts +628 -0
- package/src/providers/openai/transform.responses.ts +718 -0
- package/src/providers/openai/types.ts +711 -0
- package/src/types/content.ts +133 -0
- package/src/types/errors.ts +85 -0
- package/src/types/index.ts +105 -0
- package/src/types/llm.ts +211 -0
- package/src/types/messages.ts +182 -0
- package/src/types/provider.ts +195 -0
- package/src/types/schema.ts +58 -0
- package/src/types/stream.ts +146 -0
- package/src/types/thread.ts +226 -0
- package/src/types/tool.ts +88 -0
- package/src/types/turn.ts +118 -0
- package/src/utils/id.ts +28 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025, 2026 ProviderProtocol
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# @providerprotocol/ai
|
|
2
|
+
|
|
3
|
+
Unified Provider Protocol (UPP-1.1) implementation for AI inference across multiple providers.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
bun add @providerprotocol/ai
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```typescript
|
|
14
|
+
import { llm } from '@providerprotocol/ai';
|
|
15
|
+
import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
16
|
+
import { openai } from '@providerprotocol/ai/openai';
|
|
17
|
+
import { google } from '@providerprotocol/ai/google';
|
|
18
|
+
|
|
19
|
+
// Simple generation
|
|
20
|
+
const claude = llm({ model: anthropic('claude-sonnet-4-20250514') });
|
|
21
|
+
const turn = await claude.generate('Hello!');
|
|
22
|
+
console.log(turn.response.text);
|
|
23
|
+
|
|
24
|
+
// Streaming
|
|
25
|
+
const stream = claude.stream('Count to 5');
|
|
26
|
+
for await (const event of stream) {
|
|
27
|
+
if (event.type === 'text_delta') process.stdout.write(event.delta.text);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Multi-turn
|
|
31
|
+
const history = [];
|
|
32
|
+
const t1 = await claude.generate(history, 'My name is Alice');
|
|
33
|
+
history.push(...t1.messages);
|
|
34
|
+
const t2 = await claude.generate(history, 'What is my name?');
|
|
35
|
+
|
|
36
|
+
// Tools
|
|
37
|
+
const turn = await claude.generate({
|
|
38
|
+
tools: [{
|
|
39
|
+
name: 'getWeather',
|
|
40
|
+
description: 'Get weather for a location',
|
|
41
|
+
parameters: { type: 'object', properties: { location: { type: 'string' } } },
|
|
42
|
+
run: async ({ location }) => `Sunny in ${location}`,
|
|
43
|
+
}],
|
|
44
|
+
}, 'Weather in Tokyo?');
|
|
45
|
+
|
|
46
|
+
// Structured output
|
|
47
|
+
const turn = await llm({
|
|
48
|
+
model: openai('gpt-4o'),
|
|
49
|
+
structure: {
|
|
50
|
+
type: 'object',
|
|
51
|
+
properties: { name: { type: 'string' }, age: { type: 'number' } },
|
|
52
|
+
},
|
|
53
|
+
}).generate('Extract: John is 30 years old');
|
|
54
|
+
console.log(turn.data); // { name: 'John', age: 30 }
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Providers
|
|
58
|
+
|
|
59
|
+
| Provider | Import |
|
|
60
|
+
|----------|--------|
|
|
61
|
+
| Anthropic | `@providerprotocol/ai/anthropic` |
|
|
62
|
+
| OpenAI | `@providerprotocol/ai/openai` |
|
|
63
|
+
| Google | `@providerprotocol/ai/google` |
|
|
64
|
+
|
|
65
|
+
## Configuration
|
|
66
|
+
|
|
67
|
+
```typescript
|
|
68
|
+
import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
|
|
69
|
+
|
|
70
|
+
const instance = llm({
|
|
71
|
+
model: openai('gpt-4o'),
|
|
72
|
+
config: {
|
|
73
|
+
apiKey: 'sk-...',
|
|
74
|
+
timeout: 30000,
|
|
75
|
+
retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
|
|
76
|
+
},
|
|
77
|
+
params: { temperature: 0.7, max_tokens: 1000 },
|
|
78
|
+
system: 'You are helpful.',
|
|
79
|
+
});
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## License
|
|
83
|
+
|
|
84
|
+
MIT
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { b as Provider } from '../provider-CUJWjgNl.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Anthropic-specific LLM parameters
|
|
5
|
+
* These are passed through to the Anthropic Messages API
|
|
6
|
+
*/
|
|
7
|
+
interface AnthropicLLMParams {
|
|
8
|
+
/** Maximum number of tokens to generate (required by Anthropic) */
|
|
9
|
+
max_tokens: number;
|
|
10
|
+
/** Temperature for randomness (0.0 - 1.0) */
|
|
11
|
+
temperature?: number;
|
|
12
|
+
/** Top-p (nucleus) sampling */
|
|
13
|
+
top_p?: number;
|
|
14
|
+
/** Top-k sampling */
|
|
15
|
+
top_k?: number;
|
|
16
|
+
/** Custom stop sequences */
|
|
17
|
+
stop_sequences?: string[];
|
|
18
|
+
/** Metadata for the request */
|
|
19
|
+
metadata?: {
|
|
20
|
+
user_id?: string;
|
|
21
|
+
};
|
|
22
|
+
/** Extended thinking configuration */
|
|
23
|
+
thinking?: {
|
|
24
|
+
type: 'enabled';
|
|
25
|
+
budget_tokens: number;
|
|
26
|
+
};
|
|
27
|
+
/**
|
|
28
|
+
* Service tier for priority/standard capacity
|
|
29
|
+
* - "auto": Automatically select based on availability (default)
|
|
30
|
+
* - "standard_only": Only use standard capacity
|
|
31
|
+
*/
|
|
32
|
+
service_tier?: 'auto' | 'standard_only';
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Anthropic provider
|
|
37
|
+
* Supports LLM modality with Claude models
|
|
38
|
+
*/
|
|
39
|
+
declare const anthropic: Provider<unknown>;
|
|
40
|
+
|
|
41
|
+
export { type AnthropicLLMParams, anthropic };
|
|
@@ -0,0 +1,500 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createProvider
|
|
3
|
+
} from "../chunk-Y6Q7JCNP.js";
|
|
4
|
+
import {
|
|
5
|
+
AssistantMessage,
|
|
6
|
+
isAssistantMessage,
|
|
7
|
+
isToolResultMessage,
|
|
8
|
+
isUserMessage
|
|
9
|
+
} from "../chunk-QUUX4G7U.js";
|
|
10
|
+
import {
|
|
11
|
+
UPPError,
|
|
12
|
+
doFetch,
|
|
13
|
+
doStreamFetch,
|
|
14
|
+
normalizeHttpError,
|
|
15
|
+
parseSSEStream,
|
|
16
|
+
resolveApiKey
|
|
17
|
+
} from "../chunk-FTFX2VET.js";
|
|
18
|
+
|
|
19
|
+
// src/providers/anthropic/transform.ts
|
|
20
|
+
function transformRequest(request, modelId) {
|
|
21
|
+
const params = request.params ?? { max_tokens: 4096 };
|
|
22
|
+
const anthropicRequest = {
|
|
23
|
+
model: modelId,
|
|
24
|
+
max_tokens: params.max_tokens ?? 4096,
|
|
25
|
+
messages: request.messages.map(transformMessage)
|
|
26
|
+
};
|
|
27
|
+
if (request.system) {
|
|
28
|
+
anthropicRequest.system = request.system;
|
|
29
|
+
}
|
|
30
|
+
if (params.temperature !== void 0) {
|
|
31
|
+
anthropicRequest.temperature = params.temperature;
|
|
32
|
+
}
|
|
33
|
+
if (params.top_p !== void 0) {
|
|
34
|
+
anthropicRequest.top_p = params.top_p;
|
|
35
|
+
}
|
|
36
|
+
if (params.top_k !== void 0) {
|
|
37
|
+
anthropicRequest.top_k = params.top_k;
|
|
38
|
+
}
|
|
39
|
+
if (params.stop_sequences) {
|
|
40
|
+
anthropicRequest.stop_sequences = params.stop_sequences;
|
|
41
|
+
}
|
|
42
|
+
if (params.metadata) {
|
|
43
|
+
anthropicRequest.metadata = params.metadata;
|
|
44
|
+
}
|
|
45
|
+
if (params.thinking) {
|
|
46
|
+
anthropicRequest.thinking = params.thinking;
|
|
47
|
+
}
|
|
48
|
+
if (params.service_tier !== void 0) {
|
|
49
|
+
anthropicRequest.service_tier = params.service_tier;
|
|
50
|
+
}
|
|
51
|
+
if (request.tools && request.tools.length > 0) {
|
|
52
|
+
anthropicRequest.tools = request.tools.map(transformTool);
|
|
53
|
+
anthropicRequest.tool_choice = { type: "auto" };
|
|
54
|
+
}
|
|
55
|
+
if (request.structure) {
|
|
56
|
+
const structuredTool = {
|
|
57
|
+
name: "json_response",
|
|
58
|
+
description: "Return the response in the specified JSON format. You MUST use this tool to provide your response.",
|
|
59
|
+
input_schema: {
|
|
60
|
+
type: "object",
|
|
61
|
+
properties: request.structure.properties,
|
|
62
|
+
required: request.structure.required
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
anthropicRequest.tools = [...anthropicRequest.tools ?? [], structuredTool];
|
|
66
|
+
anthropicRequest.tool_choice = { type: "tool", name: "json_response" };
|
|
67
|
+
}
|
|
68
|
+
return anthropicRequest;
|
|
69
|
+
}
|
|
70
|
+
function filterValidContent(content) {
|
|
71
|
+
return content.filter((c) => c && typeof c.type === "string");
|
|
72
|
+
}
|
|
73
|
+
function transformMessage(message) {
|
|
74
|
+
if (isUserMessage(message)) {
|
|
75
|
+
const validContent = filterValidContent(message.content);
|
|
76
|
+
return {
|
|
77
|
+
role: "user",
|
|
78
|
+
content: validContent.map(transformContentBlock)
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
if (isAssistantMessage(message)) {
|
|
82
|
+
const validContent = filterValidContent(message.content);
|
|
83
|
+
const content = validContent.map(transformContentBlock);
|
|
84
|
+
if (message.toolCalls) {
|
|
85
|
+
for (const call of message.toolCalls) {
|
|
86
|
+
content.push({
|
|
87
|
+
type: "tool_use",
|
|
88
|
+
id: call.toolCallId,
|
|
89
|
+
name: call.toolName,
|
|
90
|
+
input: call.arguments
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return {
|
|
95
|
+
role: "assistant",
|
|
96
|
+
content
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
if (isToolResultMessage(message)) {
|
|
100
|
+
return {
|
|
101
|
+
role: "user",
|
|
102
|
+
content: message.results.map((result) => ({
|
|
103
|
+
type: "tool_result",
|
|
104
|
+
tool_use_id: result.toolCallId,
|
|
105
|
+
content: typeof result.result === "string" ? result.result : JSON.stringify(result.result),
|
|
106
|
+
is_error: result.isError
|
|
107
|
+
}))
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
throw new Error(`Unknown message type: ${message.type}`);
|
|
111
|
+
}
|
|
112
|
+
function transformContentBlock(block) {
|
|
113
|
+
switch (block.type) {
|
|
114
|
+
case "text":
|
|
115
|
+
return { type: "text", text: block.text };
|
|
116
|
+
case "image": {
|
|
117
|
+
const imageBlock = block;
|
|
118
|
+
if (imageBlock.source.type === "base64") {
|
|
119
|
+
return {
|
|
120
|
+
type: "image",
|
|
121
|
+
source: {
|
|
122
|
+
type: "base64",
|
|
123
|
+
media_type: imageBlock.mimeType,
|
|
124
|
+
data: imageBlock.source.data
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
if (imageBlock.source.type === "url") {
|
|
129
|
+
return {
|
|
130
|
+
type: "image",
|
|
131
|
+
source: {
|
|
132
|
+
type: "url",
|
|
133
|
+
url: imageBlock.source.url
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
if (imageBlock.source.type === "bytes") {
|
|
138
|
+
const base64 = btoa(
|
|
139
|
+
Array.from(imageBlock.source.data).map((b) => String.fromCharCode(b)).join("")
|
|
140
|
+
);
|
|
141
|
+
return {
|
|
142
|
+
type: "image",
|
|
143
|
+
source: {
|
|
144
|
+
type: "base64",
|
|
145
|
+
media_type: imageBlock.mimeType,
|
|
146
|
+
data: base64
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
throw new Error(`Unknown image source type`);
|
|
151
|
+
}
|
|
152
|
+
default:
|
|
153
|
+
throw new Error(`Unsupported content type: ${block.type}`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
function transformTool(tool) {
|
|
157
|
+
return {
|
|
158
|
+
name: tool.name,
|
|
159
|
+
description: tool.description,
|
|
160
|
+
input_schema: {
|
|
161
|
+
type: "object",
|
|
162
|
+
properties: tool.parameters.properties,
|
|
163
|
+
required: tool.parameters.required
|
|
164
|
+
}
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
function transformResponse(data) {
|
|
168
|
+
const textContent = [];
|
|
169
|
+
const toolCalls = [];
|
|
170
|
+
let structuredData;
|
|
171
|
+
for (const block of data.content) {
|
|
172
|
+
if (block.type === "text") {
|
|
173
|
+
textContent.push({ type: "text", text: block.text });
|
|
174
|
+
} else if (block.type === "tool_use") {
|
|
175
|
+
if (block.name === "json_response") {
|
|
176
|
+
structuredData = block.input;
|
|
177
|
+
}
|
|
178
|
+
toolCalls.push({
|
|
179
|
+
toolCallId: block.id,
|
|
180
|
+
toolName: block.name,
|
|
181
|
+
arguments: block.input
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
const message = new AssistantMessage(
|
|
186
|
+
textContent,
|
|
187
|
+
toolCalls.length > 0 ? toolCalls : void 0,
|
|
188
|
+
{
|
|
189
|
+
id: data.id,
|
|
190
|
+
metadata: {
|
|
191
|
+
anthropic: {
|
|
192
|
+
stop_reason: data.stop_reason,
|
|
193
|
+
stop_sequence: data.stop_sequence,
|
|
194
|
+
model: data.model
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
);
|
|
199
|
+
const usage = {
|
|
200
|
+
inputTokens: data.usage.input_tokens,
|
|
201
|
+
outputTokens: data.usage.output_tokens,
|
|
202
|
+
totalTokens: data.usage.input_tokens + data.usage.output_tokens
|
|
203
|
+
};
|
|
204
|
+
return {
|
|
205
|
+
message,
|
|
206
|
+
usage,
|
|
207
|
+
stopReason: data.stop_reason ?? "end_turn",
|
|
208
|
+
data: structuredData
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
function createStreamState() {
|
|
212
|
+
return {
|
|
213
|
+
messageId: "",
|
|
214
|
+
model: "",
|
|
215
|
+
content: [],
|
|
216
|
+
stopReason: null,
|
|
217
|
+
inputTokens: 0,
|
|
218
|
+
outputTokens: 0
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
function transformStreamEvent(event, state) {
|
|
222
|
+
switch (event.type) {
|
|
223
|
+
case "message_start":
|
|
224
|
+
state.messageId = event.message.id;
|
|
225
|
+
state.model = event.message.model;
|
|
226
|
+
state.inputTokens = event.message.usage.input_tokens;
|
|
227
|
+
return { type: "message_start", index: 0, delta: {} };
|
|
228
|
+
case "content_block_start":
|
|
229
|
+
if (event.content_block.type === "text") {
|
|
230
|
+
state.content[event.index] = { type: "text", text: "" };
|
|
231
|
+
} else if (event.content_block.type === "tool_use") {
|
|
232
|
+
state.content[event.index] = {
|
|
233
|
+
type: "tool_use",
|
|
234
|
+
id: event.content_block.id,
|
|
235
|
+
name: event.content_block.name,
|
|
236
|
+
input: ""
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
return { type: "content_block_start", index: event.index, delta: {} };
|
|
240
|
+
case "content_block_delta": {
|
|
241
|
+
const delta = event.delta;
|
|
242
|
+
if (delta.type === "text_delta") {
|
|
243
|
+
if (state.content[event.index]) {
|
|
244
|
+
state.content[event.index].text = (state.content[event.index].text ?? "") + delta.text;
|
|
245
|
+
}
|
|
246
|
+
return {
|
|
247
|
+
type: "text_delta",
|
|
248
|
+
index: event.index,
|
|
249
|
+
delta: { text: delta.text }
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
if (delta.type === "input_json_delta") {
|
|
253
|
+
if (state.content[event.index]) {
|
|
254
|
+
state.content[event.index].input = (state.content[event.index].input ?? "") + delta.partial_json;
|
|
255
|
+
}
|
|
256
|
+
return {
|
|
257
|
+
type: "tool_call_delta",
|
|
258
|
+
index: event.index,
|
|
259
|
+
delta: {
|
|
260
|
+
argumentsJson: delta.partial_json,
|
|
261
|
+
toolCallId: state.content[event.index]?.id,
|
|
262
|
+
toolName: state.content[event.index]?.name
|
|
263
|
+
}
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
if (delta.type === "thinking_delta") {
|
|
267
|
+
return {
|
|
268
|
+
type: "reasoning_delta",
|
|
269
|
+
index: event.index,
|
|
270
|
+
delta: { text: delta.thinking }
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
return null;
|
|
274
|
+
}
|
|
275
|
+
case "content_block_stop":
|
|
276
|
+
return { type: "content_block_stop", index: event.index, delta: {} };
|
|
277
|
+
case "message_delta":
|
|
278
|
+
state.stopReason = event.delta.stop_reason;
|
|
279
|
+
state.outputTokens = event.usage.output_tokens;
|
|
280
|
+
return null;
|
|
281
|
+
case "message_stop":
|
|
282
|
+
return { type: "message_stop", index: 0, delta: {} };
|
|
283
|
+
case "ping":
|
|
284
|
+
case "error":
|
|
285
|
+
return null;
|
|
286
|
+
default:
|
|
287
|
+
return null;
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
function buildResponseFromState(state) {
|
|
291
|
+
const textContent = [];
|
|
292
|
+
const toolCalls = [];
|
|
293
|
+
let structuredData;
|
|
294
|
+
for (const block of state.content) {
|
|
295
|
+
if (block.type === "text" && block.text) {
|
|
296
|
+
textContent.push({ type: "text", text: block.text });
|
|
297
|
+
} else if (block.type === "tool_use" && block.id && block.name) {
|
|
298
|
+
let args = {};
|
|
299
|
+
if (block.input) {
|
|
300
|
+
try {
|
|
301
|
+
args = JSON.parse(block.input);
|
|
302
|
+
} catch {
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
if (block.name === "json_response") {
|
|
306
|
+
structuredData = args;
|
|
307
|
+
}
|
|
308
|
+
toolCalls.push({
|
|
309
|
+
toolCallId: block.id,
|
|
310
|
+
toolName: block.name,
|
|
311
|
+
arguments: args
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
const message = new AssistantMessage(
|
|
316
|
+
textContent,
|
|
317
|
+
toolCalls.length > 0 ? toolCalls : void 0,
|
|
318
|
+
{
|
|
319
|
+
id: state.messageId,
|
|
320
|
+
metadata: {
|
|
321
|
+
anthropic: {
|
|
322
|
+
stop_reason: state.stopReason,
|
|
323
|
+
model: state.model
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
);
|
|
328
|
+
const usage = {
|
|
329
|
+
inputTokens: state.inputTokens,
|
|
330
|
+
outputTokens: state.outputTokens,
|
|
331
|
+
totalTokens: state.inputTokens + state.outputTokens
|
|
332
|
+
};
|
|
333
|
+
return {
|
|
334
|
+
message,
|
|
335
|
+
usage,
|
|
336
|
+
stopReason: state.stopReason ?? "end_turn",
|
|
337
|
+
data: structuredData
|
|
338
|
+
};
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// src/providers/anthropic/llm.ts
|
|
342
|
+
var ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages";
|
|
343
|
+
var ANTHROPIC_VERSION = "2023-06-01";
|
|
344
|
+
var ANTHROPIC_CAPABILITIES = {
|
|
345
|
+
streaming: true,
|
|
346
|
+
tools: true,
|
|
347
|
+
structuredOutput: true,
|
|
348
|
+
imageInput: true,
|
|
349
|
+
videoInput: false,
|
|
350
|
+
audioInput: false
|
|
351
|
+
};
|
|
352
|
+
function createLLMHandler() {
|
|
353
|
+
let providerRef = null;
|
|
354
|
+
return {
|
|
355
|
+
_setProvider(provider) {
|
|
356
|
+
providerRef = provider;
|
|
357
|
+
},
|
|
358
|
+
bind(modelId) {
|
|
359
|
+
if (!providerRef) {
|
|
360
|
+
throw new UPPError(
|
|
361
|
+
"Provider reference not set. Handler must be used with createProvider().",
|
|
362
|
+
"INVALID_REQUEST",
|
|
363
|
+
"anthropic",
|
|
364
|
+
"llm"
|
|
365
|
+
);
|
|
366
|
+
}
|
|
367
|
+
const model = {
|
|
368
|
+
modelId,
|
|
369
|
+
capabilities: ANTHROPIC_CAPABILITIES,
|
|
370
|
+
get provider() {
|
|
371
|
+
return providerRef;
|
|
372
|
+
},
|
|
373
|
+
async complete(request) {
|
|
374
|
+
const apiKey = await resolveApiKey(
|
|
375
|
+
request.config,
|
|
376
|
+
"ANTHROPIC_API_KEY",
|
|
377
|
+
"anthropic",
|
|
378
|
+
"llm"
|
|
379
|
+
);
|
|
380
|
+
const baseUrl = request.config.baseUrl ?? ANTHROPIC_API_URL;
|
|
381
|
+
const body = transformRequest(request, modelId);
|
|
382
|
+
const response = await doFetch(
|
|
383
|
+
baseUrl,
|
|
384
|
+
{
|
|
385
|
+
method: "POST",
|
|
386
|
+
headers: {
|
|
387
|
+
"Content-Type": "application/json",
|
|
388
|
+
"x-api-key": apiKey,
|
|
389
|
+
"anthropic-version": request.config.apiVersion ?? ANTHROPIC_VERSION
|
|
390
|
+
},
|
|
391
|
+
body: JSON.stringify(body),
|
|
392
|
+
signal: request.signal
|
|
393
|
+
},
|
|
394
|
+
request.config,
|
|
395
|
+
"anthropic",
|
|
396
|
+
"llm"
|
|
397
|
+
);
|
|
398
|
+
const data = await response.json();
|
|
399
|
+
return transformResponse(data);
|
|
400
|
+
},
|
|
401
|
+
stream(request) {
|
|
402
|
+
const state = createStreamState();
|
|
403
|
+
let responseResolve;
|
|
404
|
+
let responseReject;
|
|
405
|
+
const responsePromise = new Promise((resolve, reject) => {
|
|
406
|
+
responseResolve = resolve;
|
|
407
|
+
responseReject = reject;
|
|
408
|
+
});
|
|
409
|
+
async function* generateEvents() {
|
|
410
|
+
try {
|
|
411
|
+
const apiKey = await resolveApiKey(
|
|
412
|
+
request.config,
|
|
413
|
+
"ANTHROPIC_API_KEY",
|
|
414
|
+
"anthropic",
|
|
415
|
+
"llm"
|
|
416
|
+
);
|
|
417
|
+
const baseUrl = request.config.baseUrl ?? ANTHROPIC_API_URL;
|
|
418
|
+
const body = transformRequest(request, modelId);
|
|
419
|
+
body.stream = true;
|
|
420
|
+
const response = await doStreamFetch(
|
|
421
|
+
baseUrl,
|
|
422
|
+
{
|
|
423
|
+
method: "POST",
|
|
424
|
+
headers: {
|
|
425
|
+
"Content-Type": "application/json",
|
|
426
|
+
"x-api-key": apiKey,
|
|
427
|
+
"anthropic-version": request.config.apiVersion ?? ANTHROPIC_VERSION
|
|
428
|
+
},
|
|
429
|
+
body: JSON.stringify(body),
|
|
430
|
+
signal: request.signal
|
|
431
|
+
},
|
|
432
|
+
request.config,
|
|
433
|
+
"anthropic",
|
|
434
|
+
"llm"
|
|
435
|
+
);
|
|
436
|
+
if (!response.ok) {
|
|
437
|
+
const error = await normalizeHttpError(response, "anthropic", "llm");
|
|
438
|
+
responseReject(error);
|
|
439
|
+
throw error;
|
|
440
|
+
}
|
|
441
|
+
if (!response.body) {
|
|
442
|
+
const error = new UPPError(
|
|
443
|
+
"No response body for streaming request",
|
|
444
|
+
"PROVIDER_ERROR",
|
|
445
|
+
"anthropic",
|
|
446
|
+
"llm"
|
|
447
|
+
);
|
|
448
|
+
responseReject(error);
|
|
449
|
+
throw error;
|
|
450
|
+
}
|
|
451
|
+
for await (const data of parseSSEStream(response.body)) {
|
|
452
|
+
if (typeof data === "object" && data !== null && "type" in data) {
|
|
453
|
+
const event = data;
|
|
454
|
+
if (event.type === "error") {
|
|
455
|
+
const error = new UPPError(
|
|
456
|
+
event.error.message,
|
|
457
|
+
"PROVIDER_ERROR",
|
|
458
|
+
"anthropic",
|
|
459
|
+
"llm"
|
|
460
|
+
);
|
|
461
|
+
responseReject(error);
|
|
462
|
+
throw error;
|
|
463
|
+
}
|
|
464
|
+
const uppEvent = transformStreamEvent(event, state);
|
|
465
|
+
if (uppEvent) {
|
|
466
|
+
yield uppEvent;
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
responseResolve(buildResponseFromState(state));
|
|
471
|
+
} catch (error) {
|
|
472
|
+
responseReject(error);
|
|
473
|
+
throw error;
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
return {
|
|
477
|
+
[Symbol.asyncIterator]() {
|
|
478
|
+
return generateEvents();
|
|
479
|
+
},
|
|
480
|
+
response: responsePromise
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
};
|
|
484
|
+
return model;
|
|
485
|
+
}
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
// src/providers/anthropic/index.ts
|
|
490
|
+
var anthropic = createProvider({
|
|
491
|
+
name: "anthropic",
|
|
492
|
+
version: "1.0.0",
|
|
493
|
+
modalities: {
|
|
494
|
+
llm: createLLMHandler()
|
|
495
|
+
}
|
|
496
|
+
});
|
|
497
|
+
export {
|
|
498
|
+
anthropic
|
|
499
|
+
};
|
|
500
|
+
//# sourceMappingURL=index.js.map
|