@providerprotocol/ai 0.0.11 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/dist/anthropic/index.d.ts +51 -15
  2. package/dist/anthropic/index.js +54 -19
  3. package/dist/anthropic/index.js.map +1 -1
  4. package/dist/{chunk-SUNYWHTH.js → chunk-MOU4U3PO.js} +55 -3
  5. package/dist/chunk-MOU4U3PO.js.map +1 -0
  6. package/dist/{chunk-Y6Q7JCNP.js → chunk-MSR5P65T.js} +1 -1
  7. package/dist/chunk-MSR5P65T.js.map +1 -0
  8. package/dist/{chunk-W4BB4BG2.js → chunk-SVYROCLD.js} +31 -11
  9. package/dist/chunk-SVYROCLD.js.map +1 -0
  10. package/dist/chunk-U4JJC2YX.js +234 -0
  11. package/dist/chunk-U4JJC2YX.js.map +1 -0
  12. package/dist/{chunk-X5G4EHL7.js → chunk-Z7RBRCRN.js} +1 -1
  13. package/dist/chunk-Z7RBRCRN.js.map +1 -0
  14. package/dist/google/index.d.ts +376 -7
  15. package/dist/google/index.js +127 -15
  16. package/dist/google/index.js.map +1 -1
  17. package/dist/http/index.d.ts +222 -25
  18. package/dist/http/index.js +3 -3
  19. package/dist/index.d.ts +1482 -198
  20. package/dist/index.js +233 -49
  21. package/dist/index.js.map +1 -1
  22. package/dist/ollama/index.d.ts +92 -20
  23. package/dist/ollama/index.js +17 -7
  24. package/dist/ollama/index.js.map +1 -1
  25. package/dist/openai/index.d.ts +340 -61
  26. package/dist/openai/index.js +57 -15
  27. package/dist/openai/index.js.map +1 -1
  28. package/dist/openrouter/index.d.ts +107 -51
  29. package/dist/openrouter/index.js +36 -8
  30. package/dist/openrouter/index.js.map +1 -1
  31. package/dist/provider-mKkz7Q9U.d.ts +488 -0
  32. package/dist/retry-Dh70lgr0.d.ts +508 -0
  33. package/dist/xai/index.d.ts +97 -22
  34. package/dist/xai/index.js +55 -19
  35. package/dist/xai/index.js.map +1 -1
  36. package/package.json +8 -12
  37. package/dist/chunk-CUCRF5W6.js +0 -136
  38. package/dist/chunk-CUCRF5W6.js.map +0 -1
  39. package/dist/chunk-SUNYWHTH.js.map +0 -1
  40. package/dist/chunk-W4BB4BG2.js.map +0 -1
  41. package/dist/chunk-X5G4EHL7.js.map +0 -1
  42. package/dist/chunk-Y6Q7JCNP.js.map +0 -1
  43. package/dist/provider-CUJWjgNl.d.ts +0 -192
  44. package/dist/retry-I2661_rv.d.ts +0 -118
  45. package/src/anthropic/index.ts +0 -3
  46. package/src/core/image.ts +0 -188
  47. package/src/core/llm.ts +0 -650
  48. package/src/core/provider.ts +0 -92
  49. package/src/google/index.ts +0 -3
  50. package/src/http/errors.ts +0 -112
  51. package/src/http/fetch.ts +0 -210
  52. package/src/http/index.ts +0 -31
  53. package/src/http/keys.ts +0 -136
  54. package/src/http/retry.ts +0 -205
  55. package/src/http/sse.ts +0 -136
  56. package/src/index.ts +0 -32
  57. package/src/ollama/index.ts +0 -3
  58. package/src/openai/index.ts +0 -39
  59. package/src/openrouter/index.ts +0 -11
  60. package/src/providers/anthropic/index.ts +0 -17
  61. package/src/providers/anthropic/llm.ts +0 -196
  62. package/src/providers/anthropic/transform.ts +0 -434
  63. package/src/providers/anthropic/types.ts +0 -213
  64. package/src/providers/google/index.ts +0 -17
  65. package/src/providers/google/llm.ts +0 -203
  66. package/src/providers/google/transform.ts +0 -447
  67. package/src/providers/google/types.ts +0 -214
  68. package/src/providers/ollama/index.ts +0 -43
  69. package/src/providers/ollama/llm.ts +0 -272
  70. package/src/providers/ollama/transform.ts +0 -434
  71. package/src/providers/ollama/types.ts +0 -260
  72. package/src/providers/openai/index.ts +0 -186
  73. package/src/providers/openai/llm.completions.ts +0 -201
  74. package/src/providers/openai/llm.responses.ts +0 -211
  75. package/src/providers/openai/transform.completions.ts +0 -561
  76. package/src/providers/openai/transform.responses.ts +0 -708
  77. package/src/providers/openai/types.ts +0 -1249
  78. package/src/providers/openrouter/index.ts +0 -177
  79. package/src/providers/openrouter/llm.completions.ts +0 -201
  80. package/src/providers/openrouter/llm.responses.ts +0 -211
  81. package/src/providers/openrouter/transform.completions.ts +0 -538
  82. package/src/providers/openrouter/transform.responses.ts +0 -742
  83. package/src/providers/openrouter/types.ts +0 -717
  84. package/src/providers/xai/index.ts +0 -223
  85. package/src/providers/xai/llm.completions.ts +0 -201
  86. package/src/providers/xai/llm.messages.ts +0 -195
  87. package/src/providers/xai/llm.responses.ts +0 -211
  88. package/src/providers/xai/transform.completions.ts +0 -565
  89. package/src/providers/xai/transform.messages.ts +0 -448
  90. package/src/providers/xai/transform.responses.ts +0 -678
  91. package/src/providers/xai/types.ts +0 -938
  92. package/src/types/content.ts +0 -133
  93. package/src/types/errors.ts +0 -85
  94. package/src/types/index.ts +0 -105
  95. package/src/types/llm.ts +0 -211
  96. package/src/types/messages.ts +0 -205
  97. package/src/types/provider.ts +0 -195
  98. package/src/types/schema.ts +0 -58
  99. package/src/types/stream.ts +0 -188
  100. package/src/types/thread.ts +0 -226
  101. package/src/types/tool.ts +0 -88
  102. package/src/types/turn.ts +0 -118
  103. package/src/utils/id.ts +0 -28
  104. package/src/xai/index.ts +0 -41
@@ -1,434 +0,0 @@
1
- import type { LLMRequest, LLMResponse } from '../../types/llm.ts';
2
- import type { Message } from '../../types/messages.ts';
3
- import type { StreamEvent } from '../../types/stream.ts';
4
- import type { Tool, ToolCall } from '../../types/tool.ts';
5
- import type { TokenUsage } from '../../types/turn.ts';
6
- import type { ContentBlock, TextBlock, ImageBlock } from '../../types/content.ts';
7
- import {
8
- AssistantMessage,
9
- UserMessage,
10
- ToolResultMessage,
11
- isUserMessage,
12
- isAssistantMessage,
13
- isToolResultMessage,
14
- } from '../../types/messages.ts';
15
- import type {
16
- AnthropicLLMParams,
17
- AnthropicRequest,
18
- AnthropicMessage,
19
- AnthropicContent,
20
- AnthropicTool,
21
- AnthropicResponse,
22
- AnthropicStreamEvent,
23
- AnthropicContentBlockDeltaEvent,
24
- } from './types.ts';
25
-
26
- /**
27
- * Transform UPP request to Anthropic format
28
- *
29
- * Params are spread directly to allow pass-through of any Anthropic API fields,
30
- * even those not explicitly defined in our type. This enables developers to
31
- * use new API features without waiting for library updates.
32
- */
33
- export function transformRequest<TParams extends AnthropicLLMParams>(
34
- request: LLMRequest<TParams>,
35
- modelId: string
36
- ): AnthropicRequest {
37
- const params = (request.params ?? {}) as AnthropicLLMParams;
38
-
39
- // Spread params to pass through all fields, then set required fields
40
- const anthropicRequest: AnthropicRequest = {
41
- ...params,
42
- model: modelId,
43
- messages: request.messages.map(transformMessage),
44
- };
45
-
46
- // System prompt (top-level in Anthropic)
47
- if (request.system) {
48
- anthropicRequest.system = request.system;
49
- }
50
-
51
- // Tools come from request, not params
52
- if (request.tools && request.tools.length > 0) {
53
- anthropicRequest.tools = request.tools.map(transformTool);
54
- anthropicRequest.tool_choice = { type: 'auto' };
55
- }
56
-
57
- // Structured output via tool-based approach
58
- // Anthropic doesn't have native structured output, so we use a tool to enforce the schema
59
- if (request.structure) {
60
- const structuredTool: AnthropicTool = {
61
- name: 'json_response',
62
- description: 'Return the response in the specified JSON format. You MUST use this tool to provide your response.',
63
- input_schema: {
64
- type: 'object',
65
- properties: request.structure.properties,
66
- required: request.structure.required,
67
- },
68
- };
69
-
70
- // Add the structured output tool (may coexist with user tools)
71
- anthropicRequest.tools = [...(anthropicRequest.tools ?? []), structuredTool];
72
- // Force the model to use the json_response tool
73
- anthropicRequest.tool_choice = { type: 'tool', name: 'json_response' };
74
- }
75
-
76
- return anthropicRequest;
77
- }
78
-
79
- /**
80
- * Filter to only valid content blocks with a type property
81
- */
82
- function filterValidContent<T extends { type?: string }>(content: T[]): T[] {
83
- return content.filter((c) => c && typeof c.type === 'string');
84
- }
85
-
86
- /**
87
- * Transform a UPP Message to Anthropic format
88
- */
89
- function transformMessage(message: Message): AnthropicMessage {
90
- if (isUserMessage(message)) {
91
- const validContent = filterValidContent(message.content);
92
- return {
93
- role: 'user',
94
- content: validContent.map(transformContentBlock),
95
- };
96
- }
97
-
98
- if (isAssistantMessage(message)) {
99
- const validContent = filterValidContent(message.content);
100
- const content: AnthropicContent[] = validContent.map(transformContentBlock);
101
-
102
- // Add tool calls as tool_use content blocks
103
- if (message.toolCalls) {
104
- for (const call of message.toolCalls) {
105
- content.push({
106
- type: 'tool_use',
107
- id: call.toolCallId,
108
- name: call.toolName,
109
- input: call.arguments,
110
- });
111
- }
112
- }
113
-
114
- return {
115
- role: 'assistant',
116
- content,
117
- };
118
- }
119
-
120
- if (isToolResultMessage(message)) {
121
- // Tool results are sent as user messages with tool_result content
122
- return {
123
- role: 'user',
124
- content: message.results.map((result) => ({
125
- type: 'tool_result' as const,
126
- tool_use_id: result.toolCallId,
127
- content:
128
- typeof result.result === 'string'
129
- ? result.result
130
- : JSON.stringify(result.result),
131
- is_error: result.isError,
132
- })),
133
- };
134
- }
135
-
136
- throw new Error(`Unknown message type: ${message.type}`);
137
- }
138
-
139
- /**
140
- * Transform a content block to Anthropic format
141
- */
142
- function transformContentBlock(block: ContentBlock): AnthropicContent {
143
- switch (block.type) {
144
- case 'text':
145
- return { type: 'text', text: block.text };
146
-
147
- case 'image': {
148
- const imageBlock = block as ImageBlock;
149
- if (imageBlock.source.type === 'base64') {
150
- return {
151
- type: 'image',
152
- source: {
153
- type: 'base64',
154
- media_type: imageBlock.mimeType,
155
- data: imageBlock.source.data,
156
- },
157
- };
158
- }
159
- if (imageBlock.source.type === 'url') {
160
- return {
161
- type: 'image',
162
- source: {
163
- type: 'url',
164
- url: imageBlock.source.url,
165
- },
166
- };
167
- }
168
- if (imageBlock.source.type === 'bytes') {
169
- // Convert bytes to base64
170
- const base64 = btoa(
171
- Array.from(imageBlock.source.data)
172
- .map((b) => String.fromCharCode(b))
173
- .join('')
174
- );
175
- return {
176
- type: 'image',
177
- source: {
178
- type: 'base64',
179
- media_type: imageBlock.mimeType,
180
- data: base64,
181
- },
182
- };
183
- }
184
- throw new Error(`Unknown image source type`);
185
- }
186
-
187
- default:
188
- throw new Error(`Unsupported content type: ${block.type}`);
189
- }
190
- }
191
-
192
- /**
193
- * Transform a UPP Tool to Anthropic format
194
- */
195
- function transformTool(tool: Tool): AnthropicTool {
196
- return {
197
- name: tool.name,
198
- description: tool.description,
199
- input_schema: {
200
- type: 'object',
201
- properties: tool.parameters.properties,
202
- required: tool.parameters.required,
203
- },
204
- };
205
- }
206
-
207
- /**
208
- * Transform Anthropic response to UPP LLMResponse
209
- */
210
- export function transformResponse(data: AnthropicResponse): LLMResponse {
211
- // Extract text content
212
- const textContent: TextBlock[] = [];
213
- const toolCalls: ToolCall[] = [];
214
- let structuredData: unknown;
215
-
216
- for (const block of data.content) {
217
- if (block.type === 'text') {
218
- textContent.push({ type: 'text', text: block.text });
219
- } else if (block.type === 'tool_use') {
220
- // Check if this is the json_response tool (structured output)
221
- if (block.name === 'json_response') {
222
- // Extract structured data from tool arguments
223
- structuredData = block.input;
224
- }
225
- toolCalls.push({
226
- toolCallId: block.id,
227
- toolName: block.name,
228
- arguments: block.input,
229
- });
230
- }
231
- // Skip thinking blocks for now
232
- }
233
-
234
- const message = new AssistantMessage(
235
- textContent,
236
- toolCalls.length > 0 ? toolCalls : undefined,
237
- {
238
- id: data.id,
239
- metadata: {
240
- anthropic: {
241
- stop_reason: data.stop_reason,
242
- stop_sequence: data.stop_sequence,
243
- model: data.model,
244
- },
245
- },
246
- }
247
- );
248
-
249
- const usage: TokenUsage = {
250
- inputTokens: data.usage.input_tokens,
251
- outputTokens: data.usage.output_tokens,
252
- totalTokens: data.usage.input_tokens + data.usage.output_tokens,
253
- };
254
-
255
- return {
256
- message,
257
- usage,
258
- stopReason: data.stop_reason ?? 'end_turn',
259
- data: structuredData,
260
- };
261
- }
262
-
263
- /**
264
- * State for accumulating streaming response
265
- */
266
- export interface StreamState {
267
- messageId: string;
268
- model: string;
269
- content: Array<{ type: string; text?: string; id?: string; name?: string; input?: string }>;
270
- stopReason: string | null;
271
- inputTokens: number;
272
- outputTokens: number;
273
- }
274
-
275
- /**
276
- * Create initial stream state
277
- */
278
- export function createStreamState(): StreamState {
279
- return {
280
- messageId: '',
281
- model: '',
282
- content: [],
283
- stopReason: null,
284
- inputTokens: 0,
285
- outputTokens: 0,
286
- };
287
- }
288
-
289
- /**
290
- * Transform Anthropic stream event to UPP StreamEvent
291
- * Returns null for events that don't produce UPP events
292
- */
293
- export function transformStreamEvent(
294
- event: AnthropicStreamEvent,
295
- state: StreamState
296
- ): StreamEvent | null {
297
- switch (event.type) {
298
- case 'message_start':
299
- state.messageId = event.message.id;
300
- state.model = event.message.model;
301
- state.inputTokens = event.message.usage.input_tokens;
302
- return { type: 'message_start', index: 0, delta: {} };
303
-
304
- case 'content_block_start':
305
- // Initialize content block
306
- if (event.content_block.type === 'text') {
307
- state.content[event.index] = { type: 'text', text: '' };
308
- } else if (event.content_block.type === 'tool_use') {
309
- state.content[event.index] = {
310
- type: 'tool_use',
311
- id: event.content_block.id,
312
- name: event.content_block.name,
313
- input: '',
314
- };
315
- }
316
- return { type: 'content_block_start', index: event.index, delta: {} };
317
-
318
- case 'content_block_delta': {
319
- const delta = event.delta;
320
- if (delta.type === 'text_delta') {
321
- if (state.content[event.index]) {
322
- state.content[event.index]!.text =
323
- (state.content[event.index]!.text ?? '') + delta.text;
324
- }
325
- return {
326
- type: 'text_delta',
327
- index: event.index,
328
- delta: { text: delta.text },
329
- };
330
- }
331
- if (delta.type === 'input_json_delta') {
332
- if (state.content[event.index]) {
333
- state.content[event.index]!.input =
334
- (state.content[event.index]!.input ?? '') + delta.partial_json;
335
- }
336
- return {
337
- type: 'tool_call_delta',
338
- index: event.index,
339
- delta: {
340
- argumentsJson: delta.partial_json,
341
- toolCallId: state.content[event.index]?.id,
342
- toolName: state.content[event.index]?.name,
343
- },
344
- };
345
- }
346
- if (delta.type === 'thinking_delta') {
347
- return {
348
- type: 'reasoning_delta',
349
- index: event.index,
350
- delta: { text: delta.thinking },
351
- };
352
- }
353
- return null;
354
- }
355
-
356
- case 'content_block_stop':
357
- return { type: 'content_block_stop', index: event.index, delta: {} };
358
-
359
- case 'message_delta':
360
- state.stopReason = event.delta.stop_reason;
361
- state.outputTokens = event.usage.output_tokens;
362
- return null;
363
-
364
- case 'message_stop':
365
- return { type: 'message_stop', index: 0, delta: {} };
366
-
367
- case 'ping':
368
- case 'error':
369
- return null;
370
-
371
- default:
372
- return null;
373
- }
374
- }
375
-
376
- /**
377
- * Build LLMResponse from accumulated stream state
378
- */
379
- export function buildResponseFromState(state: StreamState): LLMResponse {
380
- const textContent: TextBlock[] = [];
381
- const toolCalls: ToolCall[] = [];
382
- let structuredData: unknown;
383
-
384
- for (const block of state.content) {
385
- if (block.type === 'text' && block.text) {
386
- textContent.push({ type: 'text', text: block.text });
387
- } else if (block.type === 'tool_use' && block.id && block.name) {
388
- let args: Record<string, unknown> = {};
389
- if (block.input) {
390
- try {
391
- args = JSON.parse(block.input);
392
- } catch {
393
- // Invalid JSON - use empty object
394
- }
395
- }
396
- // Check if this is the json_response tool (structured output)
397
- if (block.name === 'json_response') {
398
- structuredData = args;
399
- }
400
- toolCalls.push({
401
- toolCallId: block.id,
402
- toolName: block.name,
403
- arguments: args,
404
- });
405
- }
406
- }
407
-
408
- const message = new AssistantMessage(
409
- textContent,
410
- toolCalls.length > 0 ? toolCalls : undefined,
411
- {
412
- id: state.messageId,
413
- metadata: {
414
- anthropic: {
415
- stop_reason: state.stopReason,
416
- model: state.model,
417
- },
418
- },
419
- }
420
- );
421
-
422
- const usage: TokenUsage = {
423
- inputTokens: state.inputTokens,
424
- outputTokens: state.outputTokens,
425
- totalTokens: state.inputTokens + state.outputTokens,
426
- };
427
-
428
- return {
429
- message,
430
- usage,
431
- stopReason: state.stopReason ?? 'end_turn',
432
- data: structuredData,
433
- };
434
- }
@@ -1,213 +0,0 @@
1
- /**
2
- * Anthropic-specific LLM parameters
3
- * These are passed through to the Anthropic Messages API
4
- */
5
- export interface AnthropicLLMParams {
6
- /** Maximum number of tokens to generate (required by Anthropic API) */
7
- max_tokens?: number;
8
-
9
- /** Temperature for randomness (0.0 - 1.0) */
10
- temperature?: number;
11
-
12
- /** Top-p (nucleus) sampling */
13
- top_p?: number;
14
-
15
- /** Top-k sampling */
16
- top_k?: number;
17
-
18
- /** Custom stop sequences */
19
- stop_sequences?: string[];
20
-
21
- /** Metadata for the request */
22
- metadata?: {
23
- user_id?: string;
24
- };
25
-
26
- /** Extended thinking configuration */
27
- thinking?: {
28
- type: 'enabled';
29
- budget_tokens: number;
30
- };
31
-
32
- /**
33
- * Service tier for priority/standard capacity
34
- * - "auto": Automatically select based on availability (default)
35
- * - "standard_only": Only use standard capacity
36
- */
37
- service_tier?: 'auto' | 'standard_only';
38
- }
39
-
40
- /**
41
- * Anthropic API request body
42
- */
43
- export interface AnthropicRequest {
44
- model: string;
45
- max_tokens?: number;
46
- messages: AnthropicMessage[];
47
- system?: string;
48
- temperature?: number;
49
- top_p?: number;
50
- top_k?: number;
51
- stop_sequences?: string[];
52
- stream?: boolean;
53
- tools?: AnthropicTool[];
54
- tool_choice?: { type: 'auto' | 'any' | 'tool'; name?: string };
55
- metadata?: { user_id?: string };
56
- thinking?: { type: 'enabled'; budget_tokens: number };
57
- service_tier?: 'auto' | 'standard_only';
58
- }
59
-
60
- /**
61
- * Anthropic message format
62
- */
63
- export interface AnthropicMessage {
64
- role: 'user' | 'assistant';
65
- content: AnthropicContent[] | string;
66
- }
67
-
68
- /**
69
- * Anthropic content types
70
- */
71
- export type AnthropicContent =
72
- | AnthropicTextContent
73
- | AnthropicImageContent
74
- | AnthropicToolUseContent
75
- | AnthropicToolResultContent;
76
-
77
- export interface AnthropicTextContent {
78
- type: 'text';
79
- text: string;
80
- }
81
-
82
- export interface AnthropicImageContent {
83
- type: 'image';
84
- source: {
85
- type: 'base64' | 'url';
86
- media_type?: string;
87
- data?: string;
88
- url?: string;
89
- };
90
- }
91
-
92
- export interface AnthropicToolUseContent {
93
- type: 'tool_use';
94
- id: string;
95
- name: string;
96
- input: Record<string, unknown>;
97
- }
98
-
99
- export interface AnthropicToolResultContent {
100
- type: 'tool_result';
101
- tool_use_id: string;
102
- content: string | AnthropicContent[];
103
- is_error?: boolean;
104
- }
105
-
106
- /**
107
- * Anthropic tool format
108
- */
109
- export interface AnthropicTool {
110
- name: string;
111
- description: string;
112
- input_schema: {
113
- type: 'object';
114
- properties: Record<string, unknown>;
115
- required?: string[];
116
- };
117
- }
118
-
119
- /**
120
- * Anthropic response format
121
- */
122
- export interface AnthropicResponse {
123
- id: string;
124
- type: 'message';
125
- role: 'assistant';
126
- content: AnthropicResponseContent[];
127
- model: string;
128
- stop_reason: 'end_turn' | 'max_tokens' | 'stop_sequence' | 'tool_use' | 'pause_turn' | 'refusal' | null;
129
- stop_sequence: string | null;
130
- usage: {
131
- input_tokens: number;
132
- output_tokens: number;
133
- cache_creation_input_tokens?: number;
134
- cache_read_input_tokens?: number;
135
- };
136
- }
137
-
138
- export type AnthropicResponseContent =
139
- | AnthropicTextContent
140
- | AnthropicToolUseContent
141
- | AnthropicThinkingContent;
142
-
143
- export interface AnthropicThinkingContent {
144
- type: 'thinking';
145
- thinking: string;
146
- signature?: string;
147
- }
148
-
149
- /**
150
- * Anthropic streaming event types
151
- */
152
- export type AnthropicStreamEvent =
153
- | AnthropicMessageStartEvent
154
- | AnthropicContentBlockStartEvent
155
- | AnthropicContentBlockDeltaEvent
156
- | AnthropicContentBlockStopEvent
157
- | AnthropicMessageDeltaEvent
158
- | AnthropicMessageStopEvent
159
- | AnthropicPingEvent
160
- | AnthropicErrorEvent;
161
-
162
- export interface AnthropicMessageStartEvent {
163
- type: 'message_start';
164
- message: AnthropicResponse;
165
- }
166
-
167
- export interface AnthropicContentBlockStartEvent {
168
- type: 'content_block_start';
169
- index: number;
170
- content_block: AnthropicResponseContent;
171
- }
172
-
173
- export interface AnthropicContentBlockDeltaEvent {
174
- type: 'content_block_delta';
175
- index: number;
176
- delta:
177
- | { type: 'text_delta'; text: string }
178
- | { type: 'thinking_delta'; thinking: string }
179
- | { type: 'signature_delta'; signature: string }
180
- | { type: 'input_json_delta'; partial_json: string };
181
- }
182
-
183
- export interface AnthropicContentBlockStopEvent {
184
- type: 'content_block_stop';
185
- index: number;
186
- }
187
-
188
- export interface AnthropicMessageDeltaEvent {
189
- type: 'message_delta';
190
- delta: {
191
- stop_reason: string | null;
192
- stop_sequence: string | null;
193
- };
194
- usage: {
195
- output_tokens: number;
196
- };
197
- }
198
-
199
- export interface AnthropicMessageStopEvent {
200
- type: 'message_stop';
201
- }
202
-
203
- export interface AnthropicPingEvent {
204
- type: 'ping';
205
- }
206
-
207
- export interface AnthropicErrorEvent {
208
- type: 'error';
209
- error: {
210
- type: string;
211
- message: string;
212
- };
213
- }
@@ -1,17 +0,0 @@
1
- import { createProvider } from '../../core/provider.ts';
2
- import { createLLMHandler } from './llm.ts';
3
-
4
- /**
5
- * Google Gemini provider
6
- * Supports LLM modality with Gemini models
7
- */
8
- export const google = createProvider({
9
- name: 'google',
10
- version: '1.0.0',
11
- modalities: {
12
- llm: createLLMHandler(),
13
- },
14
- });
15
-
16
- // Re-export types
17
- export type { GoogleLLMParams } from './types.ts';