@yh-ui/ai-sdk 0.1.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1023 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.PROVIDER_PRESETS = exports.AIContextKey = void 0;
7
+ exports.XRequest = XRequest;
8
+ exports.clearCache = clearCache;
9
+ exports.createAIContext = createAIContext;
10
+ exports.createProviderAdapter = createProviderAdapter;
11
+ exports.createStreamableValue = createStreamableValue;
12
+ exports.createVercelAIProvider = createVercelAIProvider;
13
+ exports.createXRequest = createXRequest;
14
+ exports.createYHFunctionTool = createYHFunctionTool;
15
+ exports.getProviderPreset = getProviderPreset;
16
+ exports.registerMiddleware = registerMiddleware;
17
+ exports.useAIChat = useAIChat;
18
+ exports.useAIStream = useAIStream;
19
+ exports.useConversation = useConversation;
20
+ exports.useConversations = useConversations;
21
+ exports.useStreamableValue = useStreamableValue;
22
+ var _vue = require("vue");
23
+ function createStreamableValue(initialValue) {
24
+ const value = (0, _vue.shallowRef)(initialValue);
25
+ const loading = (0, _vue.ref)(true);
26
+ const error = (0, _vue.shallowRef)(null);
27
+ return {
28
+ value,
29
+ loading,
30
+ error
31
+ };
32
+ }
33
+ function useStreamableValue(streamable) {
34
+ const isLoading = (0, _vue.computed)(() => streamable.loading.value);
35
+ const data = (0, _vue.computed)(() => streamable.value.value);
36
+ const err = (0, _vue.computed)(() => streamable.error.value);
37
+ return {
38
+ value: data,
39
+ loading: isLoading,
40
+ error: err
41
+ };
42
+ }
43
+ const globalMiddlewares = [];
44
+ function registerMiddleware(middleware) {
45
+ globalMiddlewares.push(middleware);
46
+ return () => {
47
+ const index = globalMiddlewares.indexOf(middleware);
48
+ if (index > -1) {
49
+ globalMiddlewares.splice(index, 1);
50
+ }
51
+ };
52
+ }
53
+ const cacheStorage = /* @__PURE__ */new Map();
54
+ function generateCacheKey(config) {
55
+ const {
56
+ url,
57
+ method = "POST",
58
+ body,
59
+ params
60
+ } = config;
61
+ const keyData = {
62
+ url,
63
+ method,
64
+ body,
65
+ params
66
+ };
67
+ return `yh-ai-cache-${JSON.stringify(keyData)}`;
68
+ }
69
+ function getCache(config, cacheConfig) {
70
+ if (!cacheConfig.enabled) return null;
71
+ const key = cacheConfig.key || generateCacheKey(config);
72
+ const cached = cacheStorage.get(key);
73
+ if (cached && cached.expiry > Date.now()) {
74
+ return cached.data;
75
+ }
76
+ if (cached) {
77
+ cacheStorage.delete(key);
78
+ }
79
+ return null;
80
+ }
81
+ function setCache(config, cacheConfig, data) {
82
+ if (!cacheConfig.enabled) return;
83
+ const key = cacheConfig.key || generateCacheKey(config);
84
+ const ttl = cacheConfig.ttl || 5 * 60 * 1e3;
85
+ cacheStorage.set(key, {
86
+ data,
87
+ expiry: Date.now() + ttl
88
+ });
89
+ }
90
+ function clearCache() {
91
+ const now = Date.now();
92
+ for (const [key, value] of cacheStorage.entries()) {
93
+ if (value.expiry < now) {
94
+ cacheStorage.delete(key);
95
+ }
96
+ }
97
+ }
98
+ async function XRequest(config, callbacks, options) {
99
+ const {
100
+ middlewares = [],
101
+ cache = {},
102
+ retry = {}
103
+ } = options || {};
104
+ const allMiddlewares = [...globalMiddlewares, ...middlewares];
105
+ const mergedCallbacks = {
106
+ onStart: callbacks?.onStart,
107
+ onResponse: callbacks?.onResponse,
108
+ onChunk: callbacks?.onChunk,
109
+ onFinish: callbacks?.onFinish,
110
+ onError: callbacks?.onError,
111
+ onFinally: callbacks?.onFinally
112
+ };
113
+ let finalConfig = {
114
+ ...config
115
+ };
116
+ for (const mw of allMiddlewares) {
117
+ if (mw.onRequest) {
118
+ finalConfig = await mw.onRequest(finalConfig);
119
+ }
120
+ }
121
+ mergedCallbacks.onStart?.(finalConfig);
122
+ if (!finalConfig.stream && cache.enabled) {
123
+ const cachedData = getCache(finalConfig, cache);
124
+ if (cachedData) {
125
+ mergedCallbacks.onFinish?.(cachedData, cachedData);
126
+ mergedCallbacks.onFinally?.();
127
+ return cachedData;
128
+ }
129
+ }
130
+ let url = finalConfig.url;
131
+ if (finalConfig.params) {
132
+ const searchParams = new URLSearchParams(finalConfig.params);
133
+ url += `?${searchParams.toString()}`;
134
+ }
135
+ const maxRetries = retry.enabled === true ? retry.maxRetries || 3 : 0;
136
+ const retryDelay = retry.retryDelay || 1e3;
137
+ const retryCondition = retry.retryCondition || (error => {
138
+ const msg = error.message.toLowerCase();
139
+ return msg.includes("fetch") || msg.includes("network");
140
+ });
141
+ let lastError = null;
142
+ let attempt = 0;
143
+ while (true) {
144
+ try {
145
+ attempt++;
146
+ const response = await fetch(url, {
147
+ method: finalConfig.method || "POST",
148
+ headers: {
149
+ "Content-Type": "application/json",
150
+ ...finalConfig.headers
151
+ },
152
+ body: finalConfig.body ? JSON.stringify(finalConfig.body) : void 0,
153
+ signal: finalConfig.timeout ? AbortSignal.timeout(finalConfig.timeout) : void 0
154
+ });
155
+ mergedCallbacks.onResponse?.(response);
156
+ if (finalConfig.stream && response.body) {
157
+ const reader = response.body.getReader();
158
+ const decoder = new TextDecoder();
159
+ let fullContent = "";
160
+ while (true) {
161
+ const {
162
+ done,
163
+ value
164
+ } = await reader.read();
165
+ if (done) break;
166
+ const chunk = decoder.decode(value, {
167
+ stream: true
168
+ });
169
+ let processedChunk = chunk;
170
+ for (const mw of allMiddlewares) {
171
+ if (mw.onChunk) {
172
+ processedChunk = mw.onChunk(processedChunk, finalConfig);
173
+ }
174
+ }
175
+ fullContent += processedChunk;
176
+ mergedCallbacks.onChunk?.(processedChunk, {
177
+ done: false
178
+ });
179
+ }
180
+ mergedCallbacks.onFinish?.(fullContent, {
181
+ done: true
182
+ });
183
+ if (cache.enabled) {
184
+ setCache(finalConfig, cache, fullContent);
185
+ }
186
+ mergedCallbacks.onFinally?.();
187
+ return fullContent;
188
+ }
189
+ const data = await response.json();
190
+ let processedData = data;
191
+ for (const mw of allMiddlewares) {
192
+ if (mw.onResponse) {
193
+ processedData = mw.onResponse(processedData, finalConfig);
194
+ }
195
+ }
196
+ if (cache.enabled) {
197
+ setCache(finalConfig, cache, processedData);
198
+ }
199
+ mergedCallbacks.onFinish?.(processedData, processedData);
200
+ mergedCallbacks.onFinally?.();
201
+ return processedData;
202
+ } catch (error) {
203
+ lastError = error instanceof Error ? error : new Error(String(error));
204
+ if (attempt <= maxRetries && retryCondition(lastError)) {
205
+ await new Promise(resolve => setTimeout(resolve, retryDelay * attempt));
206
+ continue;
207
+ }
208
+ for (const mw of allMiddlewares) {
209
+ if (mw.onError) {
210
+ lastError = mw.onError(lastError, finalConfig);
211
+ }
212
+ }
213
+ mergedCallbacks.onError?.(lastError);
214
+ mergedCallbacks.onFinally?.();
215
+ throw lastError;
216
+ }
217
+ }
218
+ mergedCallbacks.onFinally?.();
219
+ throw lastError;
220
+ }
221
+ function createXRequest(defaultConfig = {}, defaultOptions) {
222
+ return (config, callbacks) => {
223
+ return XRequest({
224
+ ...defaultConfig,
225
+ ...config
226
+ }, callbacks, defaultOptions);
227
+ };
228
+ }
229
+ function useConversation(config = {}) {
230
+ const {
231
+ maxHistory = 50,
232
+ persist = false,
233
+ storageKey = "yh-ai-conversation"
234
+ } = config;
235
+ const messages = (0, _vue.ref)([]);
236
+ const loadHistory = () => {
237
+ if (persist) {
238
+ const stored = localStorage.getItem(storageKey);
239
+ if (stored) {
240
+ try {
241
+ messages.value = JSON.parse(stored);
242
+ } catch {
243
+ messages.value = [];
244
+ }
245
+ }
246
+ }
247
+ };
248
+ const saveHistory = () => {
249
+ if (persist) {
250
+ localStorage.setItem(storageKey, JSON.stringify(messages.value));
251
+ }
252
+ };
253
+ const addMessage = message => {
254
+ const newMessage = {
255
+ ...message,
256
+ id: `msg-${Date.now()}-${Math.random().toString(36).slice(2)}`,
257
+ createdAt: /* @__PURE__ */new Date()
258
+ };
259
+ messages.value = [...messages.value, newMessage].slice(-maxHistory);
260
+ saveHistory();
261
+ return newMessage;
262
+ };
263
+ const clearHistory = () => {
264
+ messages.value = [];
265
+ if (persist) {
266
+ localStorage.removeItem(storageKey);
267
+ }
268
+ };
269
+ loadHistory();
270
+ return {
271
+ messages,
272
+ addMessage,
273
+ clearHistory,
274
+ loadHistory,
275
+ saveHistory
276
+ };
277
+ }
278
+ function generateId() {
279
+ return `conv-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
280
+ }
281
+ function generateTitle(messages) {
282
+ const firstUserMessage = messages.find(m => m.role === "user");
283
+ if (firstUserMessage) {
284
+ const content = firstUserMessage.content;
285
+ return content.length > 30 ? content.slice(0, 30) + "..." : content;
286
+ }
287
+ return "\u65B0\u4F1A\u8BDD";
288
+ }
289
+ function useConversations(options = {}) {
290
+ const {
291
+ maxConversations = 50,
292
+ persist = false,
293
+ storageKey = "yh-ai-conversations",
294
+ autoTitle = true
295
+ } = options;
296
+ const conversations = (0, _vue.ref)([]);
297
+ const currentId = (0, _vue.ref)(null);
298
+ const currentConversation = (0, _vue.computed)(() => {
299
+ return conversations.value.find(c => c.id === currentId.value) || null;
300
+ });
301
+ const currentMessages = (0, _vue.computed)(() => {
302
+ return currentConversation.value?.messages || [];
303
+ });
304
+ const loadConversations = () => {
305
+ if (persist) {
306
+ try {
307
+ const stored = localStorage.getItem(storageKey);
308
+ if (stored) {
309
+ const parsed = JSON.parse(stored);
310
+ conversations.value = parsed.conversations || [];
311
+ currentId.value = parsed.currentId || null;
312
+ if (currentId.value && !conversations.value.find(c => c.id === currentId.value)) {
313
+ currentId.value = conversations.value[0]?.id || null;
314
+ }
315
+ }
316
+ } catch {
317
+ conversations.value = [];
318
+ currentId.value = null;
319
+ }
320
+ }
321
+ };
322
+ const saveConversations = () => {
323
+ if (persist) {
324
+ localStorage.setItem(storageKey, JSON.stringify({
325
+ conversations: conversations.value,
326
+ currentId: currentId.value
327
+ }));
328
+ }
329
+ };
330
+ const create = (initialMessages = []) => {
331
+ const now = /* @__PURE__ */new Date();
332
+ const newConversation = {
333
+ id: generateId(),
334
+ title: autoTitle ? generateTitle(initialMessages) : "\u65B0\u4F1A\u8BDD",
335
+ messages: initialMessages,
336
+ createdAt: now,
337
+ updatedAt: now
338
+ };
339
+ conversations.value = [newConversation, ...conversations.value].slice(0, maxConversations);
340
+ currentId.value = newConversation.id;
341
+ saveConversations();
342
+ return newConversation.id;
343
+ };
344
+ const remove = id => {
345
+ const index = conversations.value.findIndex(c => c.id === id);
346
+ if (index === -1) return;
347
+ conversations.value = conversations.value.filter(c => c.id !== id);
348
+ if (currentId.value === id) {
349
+ currentId.value = conversations.value[0]?.id || null;
350
+ }
351
+ saveConversations();
352
+ };
353
+ const select = id => {
354
+ if (conversations.value.find(c => c.id === id)) {
355
+ currentId.value = id;
356
+ saveConversations();
357
+ }
358
+ };
359
+ const updateTitle = (id, title) => {
360
+ const conversation = conversations.value.find(c => c.id === id);
361
+ if (conversation) {
362
+ conversation.title = title;
363
+ conversation.updatedAt = /* @__PURE__ */new Date();
364
+ saveConversations();
365
+ }
366
+ };
367
+ const addMessage = message => {
368
+ if (!currentId.value) {
369
+ create();
370
+ }
371
+ const conversation = conversations.value.find(c => c.id === currentId.value);
372
+ if (conversation) {
373
+ const newMessage = {
374
+ ...message,
375
+ id: `msg-${Date.now()}-${Math.random().toString(36).slice(2)}`,
376
+ createdAt: /* @__PURE__ */new Date()
377
+ };
378
+ conversation.messages.push(newMessage);
379
+ if (autoTitle && conversation.messages.length <= 2) {
380
+ conversation.title = generateTitle(conversation.messages);
381
+ }
382
+ conversation.updatedAt = /* @__PURE__ */new Date();
383
+ saveConversations();
384
+ return newMessage;
385
+ }
386
+ return null;
387
+ };
388
+ const clearCurrent = () => {
389
+ const conversation = conversations.value.find(c => c.id === currentId.value);
390
+ if (conversation) {
391
+ conversation.messages = [];
392
+ conversation.updatedAt = /* @__PURE__ */new Date();
393
+ saveConversations();
394
+ }
395
+ };
396
+ loadConversations();
397
+ if (conversations.value.length === 0) {
398
+ create();
399
+ }
400
+ return {
401
+ conversations,
402
+ currentId,
403
+ currentConversation,
404
+ currentMessages,
405
+ create,
406
+ remove,
407
+ select,
408
+ updateTitle,
409
+ addMessage,
410
+ clearCurrent
411
+ };
412
+ }
413
+ function useAIChat(options) {
414
+ const {
415
+ api,
416
+ initialMessages = [],
417
+ headers = {},
418
+ body = {},
419
+ stream = false,
420
+ streamInterval = 20,
421
+ tools = [],
422
+ onRequest,
423
+ onResponse,
424
+ onChunk,
425
+ onToolCall,
426
+ onToolResult,
427
+ onFinish,
428
+ onError
429
+ } = options;
430
+ const messages = (0, _vue.ref)([...initialMessages]);
431
+ const input = (0, _vue.ref)("");
432
+ const isLoading = (0, _vue.ref)(false);
433
+ const isStreaming = (0, _vue.ref)(false);
434
+ const error = (0, _vue.ref)(null);
435
+ const currentMessage = (0, _vue.ref)(null);
436
+ let abortController = null;
437
+ const append = (content, role = "user") => {
438
+ const newMessage = {
439
+ id: `msg-${Date.now()}-${Math.random().toString(36).slice(2)}`,
440
+ role,
441
+ content,
442
+ createdAt: /* @__PURE__ */new Date()
443
+ };
444
+ messages.value = [...messages.value, newMessage];
445
+ return newMessage;
446
+ };
447
+ const updateLastMessage = updates => {
448
+ if (messages.value.length > 0) {
449
+ const lastIndex = messages.value.length - 1;
450
+ messages.value = messages.value.map((msg, i) => i === lastIndex ? {
451
+ ...msg,
452
+ ...updates
453
+ } : msg);
454
+ }
455
+ };
456
+ const reload = () => {
457
+ messages.value = [...initialMessages];
458
+ input.value = "";
459
+ error.value = null;
460
+ currentMessage.value = null;
461
+ if (abortController) {
462
+ abortController.abort();
463
+ abortController = null;
464
+ }
465
+ };
466
+ const stop = () => {
467
+ if (abortController) {
468
+ abortController.abort();
469
+ abortController = null;
470
+ }
471
+ isStreaming.value = false;
472
+ };
473
+ const executeTools = async toolCalls => {
474
+ const results = [];
475
+ for (const toolCall of toolCalls) {
476
+ const tool = tools.find(t => t.name === toolCall.name);
477
+ if (tool) {
478
+ try {
479
+ onToolCall?.({
480
+ name: toolCall.name,
481
+ args: toolCall.arguments
482
+ });
483
+ const result = await tool.execute(toolCall.arguments);
484
+ onToolResult?.(toolCall.name, result);
485
+ results.push({
486
+ id: `tool-${Date.now()}-${Math.random().toString(36).slice(2)}`,
487
+ role: "tool",
488
+ content: typeof result === "string" ? result : JSON.stringify(result),
489
+ name: toolCall.name,
490
+ toolCallId: toolCall.id,
491
+ createdAt: /* @__PURE__ */new Date()
492
+ });
493
+ } catch (err) {
494
+ results.push({
495
+ id: `tool-error-${Date.now()}`,
496
+ role: "tool",
497
+ content: `Error: ${err instanceof Error ? err.message : String(err)}`,
498
+ name: toolCall.name,
499
+ toolCallId: toolCall.id,
500
+ createdAt: /* @__PURE__ */new Date()
501
+ });
502
+ }
503
+ }
504
+ }
505
+ return results;
506
+ };
507
+ const sendMessageStream = async content => {
508
+ if (!content.trim() || isStreaming.value) return;
509
+ error.value = null;
510
+ isStreaming.value = true;
511
+ isLoading.value = true;
512
+ append(content, "user");
513
+ input.value = "";
514
+ const assistantMessage = append("", "assistant");
515
+ currentMessage.value = assistantMessage;
516
+ abortController = new AbortController();
517
+ try {
518
+ onRequest?.(content);
519
+ const allMessages = messages.value.map(m => ({
520
+ role: m.role,
521
+ content: m.content,
522
+ ...(m.name && {
523
+ name: m.name
524
+ }),
525
+ ...(m.toolCallId && {
526
+ tool_call_id: m.toolCallId
527
+ })
528
+ }));
529
+ const response = await fetch(api, {
530
+ method: "POST",
531
+ headers: {
532
+ "Content-Type": "application/json",
533
+ ...headers
534
+ },
535
+ body: JSON.stringify({
536
+ messages: allMessages,
537
+ stream: true,
538
+ tools: tools.length > 0 ? tools.map(t => ({
539
+ type: "function",
540
+ function: {
541
+ name: t.name,
542
+ description: t.description || "",
543
+ parameters: t.parameters || {}
544
+ }
545
+ })) : void 0,
546
+ ...body
547
+ }),
548
+ signal: abortController.signal
549
+ });
550
+ onResponse?.(response);
551
+ if (!response.ok || !response.body) {
552
+ throw new Error(`API Error: ${response.status} ${response.statusText}`);
553
+ }
554
+ const reader = response.body.getReader();
555
+ const decoder = new TextDecoder();
556
+ let fullContent = "";
557
+ let currentToolCalls = [];
558
+ while (true) {
559
+ const {
560
+ done,
561
+ value
562
+ } = await reader.read();
563
+ if (done) break;
564
+ const chunk = decoder.decode(value, {
565
+ stream: true
566
+ });
567
+ const lines = chunk.split("\n").filter(line => line.trim() !== "");
568
+ for (const line of lines) {
569
+ if (line.startsWith("data: ")) {
570
+ const data = line.slice(6);
571
+ if (data === "[DONE]") {
572
+ continue;
573
+ }
574
+ try {
575
+ const parsed = JSON.parse(data);
576
+ if (parsed.choices?.[0]?.delta?.content) {
577
+ const delta = parsed.choices[0].delta.content;
578
+ fullContent += delta;
579
+ currentMessage.value = {
580
+ ...assistantMessage,
581
+ content: fullContent
582
+ };
583
+ updateLastMessage({
584
+ content: fullContent
585
+ });
586
+ onChunk?.(delta, currentMessage.value);
587
+ }
588
+ if (parsed.choices?.[0]?.delta?.tool_calls) {
589
+ const toolCalls = parsed.choices[0].delta.tool_calls;
590
+ for (const tc of toolCalls) {
591
+ const existingIndex = currentToolCalls.findIndex(t => t.id === tc.id);
592
+ if (existingIndex >= 0) {
593
+ currentToolCalls[existingIndex] = {
594
+ ...currentToolCalls[existingIndex],
595
+ arguments: {
596
+ ...currentToolCalls[existingIndex].arguments,
597
+ ...(tc.function?.arguments && JSON.parse(tc.function.arguments))
598
+ }
599
+ };
600
+ } else if (tc.id && tc.function?.name) {
601
+ currentToolCalls.push({
602
+ id: tc.id,
603
+ type: "function",
604
+ name: tc.function.name,
605
+ arguments: tc.function.arguments ? JSON.parse(tc.function.arguments) : {}
606
+ });
607
+ }
608
+ }
609
+ updateLastMessage({
610
+ toolCalls: [...currentToolCalls]
611
+ });
612
+ }
613
+ } catch {}
614
+ }
615
+ }
616
+ if (streamInterval > 0) {
617
+ await new Promise(resolve => setTimeout(resolve, streamInterval));
618
+ }
619
+ }
620
+ if (currentToolCalls.length > 0) {
621
+ updateLastMessage({
622
+ toolCalls: currentToolCalls
623
+ });
624
+ const toolResults = await executeTools(currentToolCalls);
625
+ for (const result of toolResults) {
626
+ messages.value.push(result);
627
+ }
628
+ const finalMessages = messages.value.map(m => ({
629
+ role: m.role,
630
+ content: m.content,
631
+ ...(m.name && {
632
+ name: m.name
633
+ }),
634
+ ...(m.toolCallId && {
635
+ tool_call_id: m.toolCallId
636
+ })
637
+ }));
638
+ const finalResponse = await fetch(api, {
639
+ method: "POST",
640
+ headers: {
641
+ "Content-Type": "application/json",
642
+ ...headers
643
+ },
644
+ body: JSON.stringify({
645
+ messages: finalMessages,
646
+ ...body
647
+ })
648
+ });
649
+ if (finalResponse.ok) {
650
+ const finalData = await finalResponse.json();
651
+ const finalContent = finalData.content || finalData.message?.content || "";
652
+ updateLastMessage({
653
+ content: finalContent
654
+ });
655
+ currentMessage.value = {
656
+ ...assistantMessage,
657
+ content: finalContent
658
+ };
659
+ }
660
+ }
661
+ const finalMessage = messages.value[messages.value.length - 1];
662
+ onFinish?.(finalMessage);
663
+ } catch (err) {
664
+ if (err.name !== "AbortError") {
665
+ const errorObj = err instanceof Error ? err : new Error(String(err));
666
+ error.value = errorObj;
667
+ onError?.(errorObj);
668
+ }
669
+ } finally {
670
+ isLoading.value = false;
671
+ isStreaming.value = false;
672
+ abortController = null;
673
+ }
674
+ };
675
+ const sendMessage = async content => {
676
+ if (stream) {
677
+ return sendMessageStream(content);
678
+ }
679
+ if (!content.trim() || isLoading.value) return;
680
+ error.value = null;
681
+ isLoading.value = true;
682
+ append(content, "user");
683
+ input.value = "";
684
+ try {
685
+ onRequest?.(content);
686
+ const allMessages = messages.value.map(m => ({
687
+ role: m.role,
688
+ content: m.content,
689
+ ...(m.name && {
690
+ name: m.name
691
+ }),
692
+ ...(m.toolCallId && {
693
+ tool_call_id: m.toolCallId
694
+ })
695
+ }));
696
+ const response = await fetch(api, {
697
+ method: "POST",
698
+ headers: {
699
+ "Content-Type": "application/json",
700
+ ...headers
701
+ },
702
+ body: JSON.stringify({
703
+ messages: allMessages,
704
+ tools: tools.length > 0 ? tools.map(t => ({
705
+ type: "function",
706
+ function: {
707
+ name: t.name,
708
+ description: t.description || "",
709
+ parameters: t.parameters || {}
710
+ }
711
+ })) : void 0,
712
+ ...body
713
+ })
714
+ });
715
+ onResponse?.(response);
716
+ if (!response.ok) {
717
+ throw new Error(`API Error: ${response.status} ${response.statusText}`);
718
+ }
719
+ const data = await response.json();
720
+ let toolCalls = data.tool_calls || [];
721
+ let finalContent = data.content || data.message?.content || "";
722
+ if (toolCalls.length > 0) {
723
+ const toolResults = await executeTools(toolCalls);
724
+ for (const result of toolResults) {
725
+ messages.value.push(result);
726
+ }
727
+ const finalMessages = messages.value.map(m => ({
728
+ role: m.role,
729
+ content: m.content,
730
+ ...(m.name && {
731
+ name: m.name
732
+ }),
733
+ ...(m.toolCallId && {
734
+ tool_call_id: m.toolCallId
735
+ })
736
+ }));
737
+ const finalResponse = await fetch(api, {
738
+ method: "POST",
739
+ headers: {
740
+ "Content-Type": "application/json",
741
+ ...headers
742
+ },
743
+ body: JSON.stringify({
744
+ messages: finalMessages,
745
+ ...body
746
+ })
747
+ });
748
+ if (finalResponse.ok) {
749
+ const finalData = await finalResponse.json();
750
+ finalContent = finalData.content || finalData.message?.content || "";
751
+ }
752
+ }
753
+ const assistantMessage = append(finalContent, "assistant");
754
+ if (toolCalls.length > 0) {
755
+ updateLastMessage({
756
+ toolCalls
757
+ });
758
+ }
759
+ onFinish?.(assistantMessage);
760
+ } catch (err) {
761
+ const errorObj = err instanceof Error ? err : new Error(String(err));
762
+ error.value = errorObj;
763
+ onError?.(errorObj);
764
+ } finally {
765
+ isLoading.value = false;
766
+ }
767
+ };
768
+ return {
769
+ messages,
770
+ input,
771
+ isLoading,
772
+ isStreaming,
773
+ error,
774
+ currentMessage,
775
+ sendMessage,
776
+ sendMessageStream,
777
+ stop,
778
+ append,
779
+ updateLastMessage,
780
+ reload
781
+ };
782
+ }
783
+ function useAIStream(options) {
784
+ const {
785
+ api,
786
+ initialContent = "",
787
+ onChunk,
788
+ onFinish,
789
+ onError
790
+ } = options;
791
+ const content = (0, _vue.shallowRef)(initialContent);
792
+ const isStreaming = (0, _vue.ref)(false);
793
+ const error = (0, _vue.ref)(null);
794
+ let abortController = null;
795
+ const start = async prompt => {
796
+ if (isStreaming.value) {
797
+ stop();
798
+ }
799
+ error.value = null;
800
+ isStreaming.value = true;
801
+ content.value = "";
802
+ abortController = new AbortController();
803
+ try {
804
+ const response = await fetch(api, {
805
+ method: "POST",
806
+ headers: {
807
+ "Content-Type": "application/json"
808
+ },
809
+ body: JSON.stringify({
810
+ prompt
811
+ }),
812
+ signal: abortController.signal
813
+ });
814
+ if (!response.ok || !response.body) {
815
+ throw new Error(`Stream Error: ${response.status}`);
816
+ }
817
+ const reader = response.body.getReader();
818
+ const decoder = new TextDecoder();
819
+ while (true) {
820
+ const {
821
+ done,
822
+ value
823
+ } = await reader.read();
824
+ if (done) break;
825
+ const chunk = decoder.decode(value, {
826
+ stream: true
827
+ });
828
+ content.value += chunk;
829
+ onChunk?.(chunk);
830
+ }
831
+ onFinish?.(content.value);
832
+ } catch (err) {
833
+ if (err.name !== "AbortError") {
834
+ const errorObj = err instanceof Error ? err : new Error(String(err));
835
+ error.value = errorObj;
836
+ onError?.(errorObj);
837
+ }
838
+ } finally {
839
+ isStreaming.value = false;
840
+ abortController = null;
841
+ }
842
+ };
843
+ const stop = () => {
844
+ if (abortController) {
845
+ abortController.abort();
846
+ isStreaming.value = false;
847
+ }
848
+ };
849
+ (0, _vue.onUnmounted)(() => {
850
+ stop();
851
+ });
852
+ return {
853
+ content,
854
+ isStreaming,
855
+ error,
856
+ start,
857
+ stop
858
+ };
859
+ }
860
+ function createYHFunctionTool(tool) {
861
+ return {
862
+ type: "function",
863
+ name: tool.name,
864
+ description: tool.description || "",
865
+ parameters: tool.parameters || {},
866
+ execute: tool.execute
867
+ };
868
+ }
869
+ const PROVIDER_PRESETS = exports.PROVIDER_PRESETS = {
870
+ openai: {
871
+ name: "openai",
872
+ baseUrl: "https://api.openai.com/v1",
873
+ defaultModel: "gpt-4",
874
+ supportsStreaming: true,
875
+ supportsFunctionCalling: true
876
+ },
877
+ anthropic: {
878
+ name: "anthropic",
879
+ baseUrl: "https://api.anthropic.com/v1",
880
+ defaultModel: "claude-3-5-sonnet-20241022",
881
+ supportsStreaming: true,
882
+ supportsFunctionCalling: true
883
+ },
884
+ google: {
885
+ name: "google",
886
+ baseUrl: "https://generativelanguage.googleapis.com/v1",
887
+ defaultModel: "gemini-1.5-pro",
888
+ supportsStreaming: true,
889
+ supportsFunctionCalling: true
890
+ },
891
+ deepseek: {
892
+ name: "deepseek",
893
+ baseUrl: "https://api.deepseek.com/v1",
894
+ defaultModel: "deepseek-chat",
895
+ supportsStreaming: true,
896
+ supportsFunctionCalling: true
897
+ },
898
+ ollama: {
899
+ name: "ollama",
900
+ baseUrl: "http://localhost:11434/v1",
901
+ defaultModel: "llama2",
902
+ supportsStreaming: true,
903
+ supportsFunctionCalling: false
904
+ },
905
+ azure: {
906
+ name: "azure",
907
+ baseUrl: "",
908
+ // 需要配置
909
+ defaultModel: "",
910
+ supportsStreaming: true,
911
+ supportsFunctionCalling: true,
912
+ needsProjectId: true
913
+ },
914
+ moonshot: {
915
+ name: "moonshot",
916
+ baseUrl: "https://api.moonshot.cn/v1",
917
+ defaultModel: "moonshot-v1-8k",
918
+ supportsStreaming: true,
919
+ supportsFunctionCalling: true
920
+ },
921
+ minimax: {
922
+ name: "minimax",
923
+ baseUrl: "https://api.minimax.chat/v1",
924
+ defaultModel: "abab6.5s-chat",
925
+ supportsStreaming: true,
926
+ supportsFunctionCalling: true
927
+ },
928
+ zhipu: {
929
+ name: "zhipu",
930
+ baseUrl: "https://open.bigmodel.cn/api/paas/v4",
931
+ defaultModel: "glm-4",
932
+ supportsStreaming: true,
933
+ supportsFunctionCalling: true
934
+ },
935
+ siliconflow: {
936
+ name: "siliconflow",
937
+ baseUrl: "https://api.siliconflow.cn/v1",
938
+ defaultModel: "Qwen/Qwen2-7B-Instruct",
939
+ supportsStreaming: true,
940
+ supportsFunctionCalling: true
941
+ },
942
+ together: {
943
+ name: "together",
944
+ baseUrl: "https://api.together.ai/v1",
945
+ defaultModel: "meta-llama/Llama-3-70b-chat-hf",
946
+ supportsStreaming: true,
947
+ supportsFunctionCalling: true
948
+ },
949
+ novita: {
950
+ name: "novita",
951
+ baseUrl: "https://api.novita.ai/v3",
952
+ defaultModel: "meta-llama/llama-3.1-70b-instruct",
953
+ supportsStreaming: true,
954
+ supportsFunctionCalling: true
955
+ }
956
+ };
957
+ function getProviderPreset(name) {
958
+ return PROVIDER_PRESETS[name.toLowerCase()];
959
+ }
960
+ function createProviderAdapter(config) {
961
+ let finalConfig;
962
+ if ("provider" in config && !("baseUrl" in config)) {
963
+ const preset = PROVIDER_PRESETS[config.provider.toLowerCase()];
964
+ if (!preset) {
965
+ throw new Error(`Unknown provider: ${config.provider}`);
966
+ }
967
+ finalConfig = {
968
+ name: preset.name,
969
+ baseUrl: preset.baseUrl,
970
+ defaultModel: preset.defaultModel,
971
+ apiKey: config.apiKey
972
+ };
973
+ } else {
974
+ finalConfig = config;
975
+ }
976
+ return {
977
+ ...finalConfig,
978
+ createChat: model => {
979
+ const modelConfig = typeof model === "string" ? {
980
+ model
981
+ } : model || {};
982
+ return {
983
+ provider: finalConfig.name,
984
+ baseUrl: finalConfig.baseUrl,
985
+ apiKey: finalConfig.apiKey,
986
+ ...modelConfig
987
+ };
988
+ }
989
+ };
990
+ }
991
+ function createVercelAIProvider(provider, config) {
992
+ return {
993
+ languageModel: modelId => {
994
+ return {
995
+ provider,
996
+ modelId,
997
+ config
998
+ };
999
+ }
1000
+ };
1001
+ }
1002
+ const AIContextKey = exports.AIContextKey = Symbol("yh-ai-context");
1003
+ function createAIContext(initialProvider, initialModel) {
1004
+ const sessionId = (0, _vue.ref)(null);
1005
+ const provider = (0, _vue.ref)(initialProvider || null);
1006
+ const modelConfig = (0, _vue.ref)({
1007
+ model: initialModel || "gpt-4"
1008
+ });
1009
+ return {
1010
+ sessionId,
1011
+ provider,
1012
+ modelConfig,
1013
+ setSession: id => {
1014
+ sessionId.value = id;
1015
+ },
1016
+ setProvider: p => {
1017
+ provider.value = p;
1018
+ },
1019
+ setModel: config => {
1020
+ modelConfig.value = config;
1021
+ }
1022
+ };
1023
+ }