@zhin.js/ai 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,375 @@
1
+ /**
2
+ * @zhin.js/ai - Anthropic Provider
3
+ * 支持 Claude 系列模型
4
+ */
5
+
6
+ import { BaseProvider } from './base.js';
7
+ import type {
8
+ ProviderConfig,
9
+ ChatCompletionRequest,
10
+ ChatCompletionResponse,
11
+ ChatCompletionChunk,
12
+ ChatMessage,
13
+ ToolDefinition,
14
+ ContentPart,
15
+ } from '../types.js';
16
+
17
+ export interface AnthropicConfig extends ProviderConfig {
18
+ anthropicVersion?: string;
19
+ }
20
+
21
+ /**
22
+ * Anthropic API 格式转换
23
+ */
24
+ function toAnthropicMessages(messages: ChatMessage[]): {
25
+ system?: string;
26
+ messages: any[];
27
+ } {
28
+ let system: string | undefined;
29
+ const anthropicMessages: any[] = [];
30
+
31
+ for (const msg of messages) {
32
+ if (msg.role === 'system') {
33
+ system = typeof msg.content === 'string'
34
+ ? msg.content
35
+ : msg.content.map(p => p.type === 'text' ? p.text : '').join('');
36
+ continue;
37
+ }
38
+
39
+ if (msg.role === 'tool') {
40
+ anthropicMessages.push({
41
+ role: 'user',
42
+ content: [{
43
+ type: 'tool_result',
44
+ tool_use_id: msg.tool_call_id,
45
+ content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content),
46
+ }],
47
+ });
48
+ continue;
49
+ }
50
+
51
+ let content: any;
52
+ if (typeof msg.content === 'string') {
53
+ content = msg.content;
54
+ } else {
55
+ content = msg.content.map((part: ContentPart) => {
56
+ if (part.type === 'text') {
57
+ return { type: 'text', text: part.text };
58
+ }
59
+ if (part.type === 'image_url') {
60
+ // Anthropic 需要 base64 格式
61
+ const url = part.image_url.url;
62
+ if (url.startsWith('data:')) {
63
+ const [meta, data] = url.split(',');
64
+ // 使用更安全的正则表达式,避免 ReDoS
65
+ const mediaType = meta.match(/^data:([^;]+);/)?.[1] || 'image/png';
66
+ return {
67
+ type: 'image',
68
+ source: { type: 'base64', media_type: mediaType, data },
69
+ };
70
+ }
71
+ return {
72
+ type: 'image',
73
+ source: { type: 'url', url },
74
+ };
75
+ }
76
+ return { type: 'text', text: '' };
77
+ });
78
+ }
79
+
80
+ // 处理 tool_calls
81
+ if (msg.tool_calls?.length) {
82
+ const toolUseContent = msg.tool_calls.map(tc => ({
83
+ type: 'tool_use',
84
+ id: tc.id,
85
+ name: tc.function.name,
86
+ input: JSON.parse(tc.function.arguments),
87
+ }));
88
+
89
+ if (typeof content === 'string' && content) {
90
+ content = [{ type: 'text', text: content }, ...toolUseContent];
91
+ } else if (Array.isArray(content)) {
92
+ content = [...content, ...toolUseContent];
93
+ } else {
94
+ content = toolUseContent;
95
+ }
96
+ }
97
+
98
+ anthropicMessages.push({
99
+ role: msg.role === 'assistant' ? 'assistant' : 'user',
100
+ content,
101
+ });
102
+ }
103
+
104
+ return { system, messages: anthropicMessages };
105
+ }
106
+
107
+ /**
108
+ * 转换工具定义
109
+ */
110
+ function toAnthropicTools(tools?: ToolDefinition[]): any[] | undefined {
111
+ if (!tools?.length) return undefined;
112
+
113
+ return tools.map(tool => ({
114
+ name: tool.function.name,
115
+ description: tool.function.description,
116
+ input_schema: tool.function.parameters,
117
+ }));
118
+ }
119
+
120
+ /**
121
+ * 转换 Anthropic 响应为 OpenAI 格式
122
+ */
123
+ function fromAnthropicResponse(response: any): ChatCompletionResponse {
124
+ const content: ContentPart[] = [];
125
+ const toolCalls: any[] = [];
126
+
127
+ for (const block of response.content || []) {
128
+ if (block.type === 'text') {
129
+ content.push({ type: 'text', text: block.text });
130
+ } else if (block.type === 'tool_use') {
131
+ toolCalls.push({
132
+ id: block.id,
133
+ type: 'function',
134
+ function: {
135
+ name: block.name,
136
+ arguments: JSON.stringify(block.input),
137
+ },
138
+ });
139
+ }
140
+ }
141
+
142
+ const textContent = content
143
+ .filter(c => c.type === 'text')
144
+ .map(c => (c as { type: 'text'; text: string }).text)
145
+ .join('');
146
+
147
+ return {
148
+ id: response.id,
149
+ object: 'chat.completion',
150
+ created: Date.now(),
151
+ model: response.model,
152
+ choices: [{
153
+ index: 0,
154
+ message: {
155
+ role: 'assistant',
156
+ content: textContent,
157
+ tool_calls: toolCalls.length ? toolCalls : undefined,
158
+ },
159
+ finish_reason: response.stop_reason === 'tool_use' ? 'tool_calls' : 'stop',
160
+ }],
161
+ usage: {
162
+ prompt_tokens: response.usage?.input_tokens || 0,
163
+ completion_tokens: response.usage?.output_tokens || 0,
164
+ total_tokens: (response.usage?.input_tokens || 0) + (response.usage?.output_tokens || 0),
165
+ },
166
+ };
167
+ }
168
+
169
+ export class AnthropicProvider extends BaseProvider {
170
+ name = 'anthropic';
171
+ models = [
172
+ 'claude-opus-4-20250514',
173
+ 'claude-sonnet-4-20250514',
174
+ 'claude-3-7-sonnet-20250219',
175
+ 'claude-3-5-sonnet-20241022',
176
+ 'claude-3-5-haiku-20241022',
177
+ 'claude-3-opus-20240229',
178
+ 'claude-3-sonnet-20240229',
179
+ 'claude-3-haiku-20240307',
180
+ ];
181
+
182
+ private baseUrl: string;
183
+ private anthropicVersion: string;
184
+
185
+ constructor(config: AnthropicConfig = {}) {
186
+ super(config);
187
+ this.baseUrl = config.baseUrl || 'https://api.anthropic.com';
188
+ this.anthropicVersion = config.anthropicVersion || '2023-06-01';
189
+ }
190
+
191
+ protected async fetch<T>(url: string, options: RequestInit & { json?: any } = {}): Promise<T> {
192
+ const { json, ...fetchOptions } = options;
193
+
194
+ const headers: Record<string, string> = {
195
+ 'Content-Type': 'application/json',
196
+ 'x-api-key': this.config.apiKey || '',
197
+ 'anthropic-version': this.anthropicVersion,
198
+ ...this.config.headers,
199
+ ...(options.headers as Record<string, string>),
200
+ };
201
+
202
+ const controller = new AbortController();
203
+ const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
204
+
205
+ try {
206
+ const response = await globalThis.fetch(url, {
207
+ ...fetchOptions,
208
+ headers,
209
+ body: json ? JSON.stringify(json) : fetchOptions.body,
210
+ signal: controller.signal,
211
+ });
212
+
213
+ if (!response.ok) {
214
+ const error = await response.text();
215
+ throw new Error(`Anthropic API Error (${response.status}): ${error}`);
216
+ }
217
+
218
+ return response.json() as Promise<T>;
219
+ } finally {
220
+ clearTimeout(timeoutId);
221
+ }
222
+ }
223
+
224
+ async chat(request: ChatCompletionRequest): Promise<ChatCompletionResponse> {
225
+ const { system, messages } = toAnthropicMessages(request.messages);
226
+
227
+ const anthropicRequest: any = {
228
+ model: request.model,
229
+ messages,
230
+ max_tokens: request.max_tokens || 4096,
231
+ };
232
+
233
+ if (system) {
234
+ anthropicRequest.system = system;
235
+ }
236
+
237
+ if (request.temperature !== undefined) {
238
+ anthropicRequest.temperature = request.temperature;
239
+ }
240
+
241
+ if (request.top_p !== undefined) {
242
+ anthropicRequest.top_p = request.top_p;
243
+ }
244
+
245
+ if (request.stop) {
246
+ anthropicRequest.stop_sequences = Array.isArray(request.stop) ? request.stop : [request.stop];
247
+ }
248
+
249
+ const tools = toAnthropicTools(request.tools);
250
+ if (tools) {
251
+ anthropicRequest.tools = tools;
252
+ }
253
+
254
+ const response = await this.fetch<any>(`${this.baseUrl}/v1/messages`, {
255
+ method: 'POST',
256
+ json: anthropicRequest,
257
+ });
258
+
259
+ return fromAnthropicResponse(response);
260
+ }
261
+
262
+ async *chatStream(request: ChatCompletionRequest): AsyncIterable<ChatCompletionChunk> {
263
+ const { system, messages } = toAnthropicMessages(request.messages);
264
+
265
+ const anthropicRequest: any = {
266
+ model: request.model,
267
+ messages,
268
+ max_tokens: request.max_tokens || 4096,
269
+ stream: true,
270
+ };
271
+
272
+ if (system) {
273
+ anthropicRequest.system = system;
274
+ }
275
+
276
+ if (request.temperature !== undefined) {
277
+ anthropicRequest.temperature = request.temperature;
278
+ }
279
+
280
+ const tools = toAnthropicTools(request.tools);
281
+ if (tools) {
282
+ anthropicRequest.tools = tools;
283
+ }
284
+
285
+ const headers: Record<string, string> = {
286
+ 'Content-Type': 'application/json',
287
+ 'x-api-key': this.config.apiKey || '',
288
+ 'anthropic-version': this.anthropicVersion,
289
+ };
290
+
291
+ const response = await globalThis.fetch(`${this.baseUrl}/v1/messages`, {
292
+ method: 'POST',
293
+ headers,
294
+ body: JSON.stringify(anthropicRequest),
295
+ });
296
+
297
+ if (!response.ok) {
298
+ const error = await response.text();
299
+ throw new Error(`Anthropic API Error (${response.status}): ${error}`);
300
+ }
301
+
302
+ if (!response.body) {
303
+ throw new Error('Response body is empty');
304
+ }
305
+
306
+ const reader = response.body.getReader();
307
+ const decoder = new TextDecoder();
308
+ let buffer = '';
309
+ let messageId = '';
310
+ let model = request.model;
311
+
312
+ while (true) {
313
+ const { done, value } = await reader.read();
314
+ if (done) break;
315
+
316
+ buffer += decoder.decode(value, { stream: true });
317
+ const lines = buffer.split('\n');
318
+ buffer = lines.pop() || '';
319
+
320
+ for (const line of lines) {
321
+ const trimmed = line.trim();
322
+ if (trimmed.startsWith('data: ')) {
323
+ const data = trimmed.slice(6);
324
+ if (data === '[DONE]') continue;
325
+
326
+ try {
327
+ const event = JSON.parse(data);
328
+
329
+ if (event.type === 'message_start') {
330
+ messageId = event.message?.id || '';
331
+ model = event.message?.model || model;
332
+ } else if (event.type === 'content_block_delta') {
333
+ if (event.delta?.type === 'text_delta') {
334
+ yield {
335
+ id: messageId,
336
+ object: 'chat.completion.chunk',
337
+ created: Date.now(),
338
+ model,
339
+ choices: [{
340
+ index: 0,
341
+ delta: { content: event.delta.text },
342
+ finish_reason: null,
343
+ }],
344
+ };
345
+ }
346
+ } else if (event.type === 'message_delta') {
347
+ yield {
348
+ id: messageId,
349
+ object: 'chat.completion.chunk',
350
+ created: Date.now(),
351
+ model,
352
+ choices: [{
353
+ index: 0,
354
+ delta: {},
355
+ finish_reason: event.delta?.stop_reason === 'tool_use' ? 'tool_calls' : 'stop',
356
+ }],
357
+ usage: event.usage ? {
358
+ prompt_tokens: event.usage.input_tokens || 0,
359
+ completion_tokens: event.usage.output_tokens || 0,
360
+ total_tokens: (event.usage.input_tokens || 0) + (event.usage.output_tokens || 0),
361
+ } : undefined,
362
+ };
363
+ }
364
+ } catch {
365
+ // 忽略解析错误
366
+ }
367
+ }
368
+ }
369
+ }
370
+ }
371
+
372
+ async listModels(): Promise<string[]> {
373
+ return this.models;
374
+ }
375
+ }
@@ -0,0 +1,173 @@
1
+ /**
2
+ * @zhin.js/ai - Base Provider
3
+ * AI Provider 抽象基类
4
+ */
5
+
6
+ import type {
7
+ AIProvider,
8
+ ProviderConfig,
9
+ ChatCompletionRequest,
10
+ ChatCompletionResponse,
11
+ ChatCompletionChunk,
12
+ } from '../types.js';
13
+
14
+ /**
15
+ * Provider 基类
16
+ * 提供通用的 HTTP 请求和流式解析能力
17
+ */
18
+ export abstract class BaseProvider implements AIProvider {
19
+ abstract name: string;
20
+ abstract models: string[];
21
+
22
+ protected config: ProviderConfig;
23
+ protected abortControllers: Map<string, AbortController> = new Map();
24
+
25
+ constructor(config: ProviderConfig = {}) {
26
+ this.config = {
27
+ timeout: 60000,
28
+ maxRetries: 3,
29
+ ...config,
30
+ };
31
+ }
32
+
33
+ abstract chat(request: ChatCompletionRequest): Promise<ChatCompletionResponse>;
34
+ abstract chatStream(request: ChatCompletionRequest): AsyncIterable<ChatCompletionChunk>;
35
+
36
+ /**
37
+ * 发送 HTTP 请求
38
+ */
39
+ protected async fetch<T>(
40
+ url: string,
41
+ options: RequestInit & { json?: any } = {}
42
+ ): Promise<T> {
43
+ const { json, ...fetchOptions } = options;
44
+
45
+ const headers: Record<string, string> = {
46
+ 'Content-Type': 'application/json',
47
+ ...this.config.headers,
48
+ ...(options.headers as Record<string, string>),
49
+ };
50
+
51
+ if (this.config.apiKey) {
52
+ headers['Authorization'] = `Bearer ${this.config.apiKey}`;
53
+ }
54
+
55
+ const controller = new AbortController();
56
+ const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
57
+
58
+ try {
59
+ const response = await fetch(url, {
60
+ ...fetchOptions,
61
+ headers,
62
+ body: json ? JSON.stringify(json) : fetchOptions.body,
63
+ signal: controller.signal,
64
+ });
65
+
66
+ if (!response.ok) {
67
+ const error = await response.text();
68
+ throw new Error(`API Error (${response.status}): ${error}`);
69
+ }
70
+
71
+ return response.json() as Promise<T>;
72
+ } finally {
73
+ clearTimeout(timeoutId);
74
+ }
75
+ }
76
+
77
+ /**
78
+ * 发送流式请求
79
+ */
80
+ protected async *fetchStream(
81
+ url: string,
82
+ options: RequestInit & { json?: any } = {}
83
+ ): AsyncIterable<string> {
84
+ const { json, ...fetchOptions } = options;
85
+
86
+ const headers: Record<string, string> = {
87
+ 'Content-Type': 'application/json',
88
+ Accept: 'text/event-stream',
89
+ ...this.config.headers,
90
+ ...(options.headers as Record<string, string>),
91
+ };
92
+
93
+ if (this.config.apiKey) {
94
+ headers['Authorization'] = `Bearer ${this.config.apiKey}`;
95
+ }
96
+
97
+ const controller = new AbortController();
98
+ const requestId = Math.random().toString(36).slice(2);
99
+ this.abortControllers.set(requestId, controller);
100
+
101
+ try {
102
+ const response = await fetch(url, {
103
+ ...fetchOptions,
104
+ headers,
105
+ body: json ? JSON.stringify(json) : fetchOptions.body,
106
+ signal: controller.signal,
107
+ });
108
+
109
+ if (!response.ok) {
110
+ const error = await response.text();
111
+ throw new Error(`API Error (${response.status}): ${error}`);
112
+ }
113
+
114
+ if (!response.body) {
115
+ throw new Error('Response body is empty');
116
+ }
117
+
118
+ const reader = response.body.getReader();
119
+ const decoder = new TextDecoder();
120
+ let buffer = '';
121
+
122
+ while (true) {
123
+ const { done, value } = await reader.read();
124
+ if (done) break;
125
+
126
+ buffer += decoder.decode(value, { stream: true });
127
+ const lines = buffer.split('\n');
128
+ buffer = lines.pop() || '';
129
+
130
+ for (const line of lines) {
131
+ const trimmed = line.trim();
132
+ if (trimmed.startsWith('data: ')) {
133
+ const data = trimmed.slice(6);
134
+ if (data !== '[DONE]') {
135
+ yield data;
136
+ }
137
+ }
138
+ }
139
+ }
140
+ } finally {
141
+ this.abortControllers.delete(requestId);
142
+ }
143
+ }
144
+
145
+ /**
146
+ * 取消所有进行中的请求
147
+ */
148
+ cancelAll(): void {
149
+ for (const controller of this.abortControllers.values()) {
150
+ controller.abort();
151
+ }
152
+ this.abortControllers.clear();
153
+ }
154
+
155
+ /**
156
+ * 健康检查
157
+ */
158
+ async healthCheck(): Promise<boolean> {
159
+ try {
160
+ await this.listModels?.();
161
+ return true;
162
+ } catch {
163
+ return false;
164
+ }
165
+ }
166
+
167
+ /**
168
+ * 列出可用模型(子类可覆盖)
169
+ */
170
+ async listModels(): Promise<string[]> {
171
+ return this.models;
172
+ }
173
+ }
@@ -0,0 +1,13 @@
1
+ /**
2
+ * @zhin.js/ai - Providers Index
3
+ * 导出所有 Provider
4
+ */
5
+
6
+ export { BaseProvider } from './base.js';
7
+ export { OpenAIProvider, DeepSeekProvider, MoonshotProvider, ZhipuProvider } from './openai.js';
8
+ export { AnthropicProvider } from './anthropic.js';
9
+ export { OllamaProvider } from './ollama.js';
10
+
11
+ export type { OpenAIConfig } from './openai.js';
12
+ export type { AnthropicConfig } from './anthropic.js';
13
+ export type { OllamaConfig } from './ollama.js';