@yh-ui/ai-sdk 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +309 -0
- package/dist/agent-enhanced.cjs +292 -0
- package/dist/agent-enhanced.d.ts +143 -0
- package/dist/agent-enhanced.mjs +267 -0
- package/dist/cache-adapter.cjs +99 -0
- package/dist/cache-adapter.d.ts +42 -0
- package/dist/cache-adapter.mjs +95 -0
- package/dist/future.cjs +882 -0
- package/dist/future.d.ts +519 -0
- package/dist/future.mjs +765 -0
- package/dist/index.cjs +913 -0
- package/dist/index.d.ts +55 -0
- package/dist/index.mjs +217 -0
- package/dist/langchain.cjs +363 -0
- package/dist/langchain.d.ts +232 -0
- package/dist/langchain.mjs +319 -0
- package/dist/loaders.cjs +110 -0
- package/dist/loaders.d.ts +58 -0
- package/dist/loaders.mjs +76 -0
- package/dist/mcp-server.cjs +265 -0
- package/dist/mcp-server.d.ts +186 -0
- package/dist/mcp-server.mjs +234 -0
- package/dist/mcp.cjs +370 -0
- package/dist/mcp.d.ts +206 -0
- package/dist/mcp.mjs +354 -0
- package/dist/observability.cjs +150 -0
- package/dist/observability.d.ts +112 -0
- package/dist/observability.mjs +117 -0
- package/dist/rag-production.cjs +95 -0
- package/dist/rag-production.d.ts +43 -0
- package/dist/rag-production.mjs +85 -0
- package/dist/rate-limit.cjs +73 -0
- package/dist/rate-limit.d.ts +55 -0
- package/dist/rate-limit.mjs +51 -0
- package/dist/vector-store.cjs +63 -0
- package/dist/vector-store.d.ts +74 -0
- package/dist/vector-store.mjs +55 -0
- package/dist/vue/index.cjs +1023 -0
- package/dist/vue/index.d.ts +627 -0
- package/dist/vue/index.mjs +913 -0
- package/package.json +87 -0
|
@@ -0,0 +1,913 @@
|
|
|
1
|
+
import { ref, shallowRef, computed, onUnmounted } from "vue";
|
|
2
|
+
export function createStreamableValue(initialValue) {
|
|
3
|
+
const value = shallowRef(initialValue);
|
|
4
|
+
const loading = ref(true);
|
|
5
|
+
const error = shallowRef(null);
|
|
6
|
+
return { value, loading, error };
|
|
7
|
+
}
|
|
8
|
+
export function useStreamableValue(streamable) {
|
|
9
|
+
const isLoading = computed(() => streamable.loading.value);
|
|
10
|
+
const data = computed(() => streamable.value.value);
|
|
11
|
+
const err = computed(() => streamable.error.value);
|
|
12
|
+
return {
|
|
13
|
+
value: data,
|
|
14
|
+
loading: isLoading,
|
|
15
|
+
error: err
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
const globalMiddlewares = [];
|
|
19
|
+
export function registerMiddleware(middleware) {
|
|
20
|
+
globalMiddlewares.push(middleware);
|
|
21
|
+
return () => {
|
|
22
|
+
const index = globalMiddlewares.indexOf(middleware);
|
|
23
|
+
if (index > -1) {
|
|
24
|
+
globalMiddlewares.splice(index, 1);
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
const cacheStorage = /* @__PURE__ */ new Map();
|
|
29
|
+
function generateCacheKey(config) {
|
|
30
|
+
const { url, method = "POST", body, params } = config;
|
|
31
|
+
const keyData = { url, method, body, params };
|
|
32
|
+
return `yh-ai-cache-${JSON.stringify(keyData)}`;
|
|
33
|
+
}
|
|
34
|
+
function getCache(config, cacheConfig) {
|
|
35
|
+
if (!cacheConfig.enabled) return null;
|
|
36
|
+
const key = cacheConfig.key || generateCacheKey(config);
|
|
37
|
+
const cached = cacheStorage.get(key);
|
|
38
|
+
if (cached && cached.expiry > Date.now()) {
|
|
39
|
+
return cached.data;
|
|
40
|
+
}
|
|
41
|
+
if (cached) {
|
|
42
|
+
cacheStorage.delete(key);
|
|
43
|
+
}
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
function setCache(config, cacheConfig, data) {
|
|
47
|
+
if (!cacheConfig.enabled) return;
|
|
48
|
+
const key = cacheConfig.key || generateCacheKey(config);
|
|
49
|
+
const ttl = cacheConfig.ttl || 5 * 60 * 1e3;
|
|
50
|
+
cacheStorage.set(key, {
|
|
51
|
+
data,
|
|
52
|
+
expiry: Date.now() + ttl
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
export function clearCache() {
|
|
56
|
+
const now = Date.now();
|
|
57
|
+
for (const [key, value] of cacheStorage.entries()) {
|
|
58
|
+
if (value.expiry < now) {
|
|
59
|
+
cacheStorage.delete(key);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
export async function XRequest(config, callbacks, options) {
|
|
64
|
+
const { middlewares = [], cache = {}, retry = {} } = options || {};
|
|
65
|
+
const allMiddlewares = [...globalMiddlewares, ...middlewares];
|
|
66
|
+
const mergedCallbacks = {
|
|
67
|
+
onStart: callbacks?.onStart,
|
|
68
|
+
onResponse: callbacks?.onResponse,
|
|
69
|
+
onChunk: callbacks?.onChunk,
|
|
70
|
+
onFinish: callbacks?.onFinish,
|
|
71
|
+
onError: callbacks?.onError,
|
|
72
|
+
onFinally: callbacks?.onFinally
|
|
73
|
+
};
|
|
74
|
+
let finalConfig = { ...config };
|
|
75
|
+
for (const mw of allMiddlewares) {
|
|
76
|
+
if (mw.onRequest) {
|
|
77
|
+
finalConfig = await mw.onRequest(finalConfig);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
mergedCallbacks.onStart?.(finalConfig);
|
|
81
|
+
if (!finalConfig.stream && cache.enabled) {
|
|
82
|
+
const cachedData = getCache(finalConfig, cache);
|
|
83
|
+
if (cachedData) {
|
|
84
|
+
mergedCallbacks.onFinish?.(cachedData, cachedData);
|
|
85
|
+
mergedCallbacks.onFinally?.();
|
|
86
|
+
return cachedData;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
let url = finalConfig.url;
|
|
90
|
+
if (finalConfig.params) {
|
|
91
|
+
const searchParams = new URLSearchParams(finalConfig.params);
|
|
92
|
+
url += `?${searchParams.toString()}`;
|
|
93
|
+
}
|
|
94
|
+
const maxRetries = retry.enabled === true ? retry.maxRetries || 3 : 0;
|
|
95
|
+
const retryDelay = retry.retryDelay || 1e3;
|
|
96
|
+
const retryCondition = retry.retryCondition || ((error) => {
|
|
97
|
+
const msg = error.message.toLowerCase();
|
|
98
|
+
return msg.includes("fetch") || msg.includes("network");
|
|
99
|
+
});
|
|
100
|
+
let lastError = null;
|
|
101
|
+
let attempt = 0;
|
|
102
|
+
while (true) {
|
|
103
|
+
try {
|
|
104
|
+
attempt++;
|
|
105
|
+
const response = await fetch(url, {
|
|
106
|
+
method: finalConfig.method || "POST",
|
|
107
|
+
headers: {
|
|
108
|
+
"Content-Type": "application/json",
|
|
109
|
+
...finalConfig.headers
|
|
110
|
+
},
|
|
111
|
+
body: finalConfig.body ? JSON.stringify(finalConfig.body) : void 0,
|
|
112
|
+
signal: finalConfig.timeout ? AbortSignal.timeout(finalConfig.timeout) : void 0
|
|
113
|
+
});
|
|
114
|
+
mergedCallbacks.onResponse?.(response);
|
|
115
|
+
if (finalConfig.stream && response.body) {
|
|
116
|
+
const reader = response.body.getReader();
|
|
117
|
+
const decoder = new TextDecoder();
|
|
118
|
+
let fullContent = "";
|
|
119
|
+
while (true) {
|
|
120
|
+
const { done, value } = await reader.read();
|
|
121
|
+
if (done) break;
|
|
122
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
123
|
+
let processedChunk = chunk;
|
|
124
|
+
for (const mw of allMiddlewares) {
|
|
125
|
+
if (mw.onChunk) {
|
|
126
|
+
processedChunk = mw.onChunk(processedChunk, finalConfig);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
fullContent += processedChunk;
|
|
130
|
+
mergedCallbacks.onChunk?.(processedChunk, { done: false });
|
|
131
|
+
}
|
|
132
|
+
mergedCallbacks.onFinish?.(fullContent, { done: true });
|
|
133
|
+
if (cache.enabled) {
|
|
134
|
+
setCache(finalConfig, cache, fullContent);
|
|
135
|
+
}
|
|
136
|
+
mergedCallbacks.onFinally?.();
|
|
137
|
+
return fullContent;
|
|
138
|
+
}
|
|
139
|
+
const data = await response.json();
|
|
140
|
+
let processedData = data;
|
|
141
|
+
for (const mw of allMiddlewares) {
|
|
142
|
+
if (mw.onResponse) {
|
|
143
|
+
processedData = mw.onResponse(processedData, finalConfig);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
if (cache.enabled) {
|
|
147
|
+
setCache(finalConfig, cache, processedData);
|
|
148
|
+
}
|
|
149
|
+
mergedCallbacks.onFinish?.(processedData, processedData);
|
|
150
|
+
mergedCallbacks.onFinally?.();
|
|
151
|
+
return processedData;
|
|
152
|
+
} catch (error) {
|
|
153
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
154
|
+
if (attempt <= maxRetries && retryCondition(lastError)) {
|
|
155
|
+
await new Promise((resolve) => setTimeout(resolve, retryDelay * attempt));
|
|
156
|
+
continue;
|
|
157
|
+
}
|
|
158
|
+
for (const mw of allMiddlewares) {
|
|
159
|
+
if (mw.onError) {
|
|
160
|
+
lastError = mw.onError(lastError, finalConfig);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
mergedCallbacks.onError?.(lastError);
|
|
164
|
+
mergedCallbacks.onFinally?.();
|
|
165
|
+
throw lastError;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
mergedCallbacks.onFinally?.();
|
|
169
|
+
throw lastError;
|
|
170
|
+
}
|
|
171
|
+
export function createXRequest(defaultConfig = {}, defaultOptions) {
|
|
172
|
+
return (config, callbacks) => {
|
|
173
|
+
return XRequest({ ...defaultConfig, ...config }, callbacks, defaultOptions);
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
export function useConversation(config = {}) {
|
|
177
|
+
const { maxHistory = 50, persist = false, storageKey = "yh-ai-conversation" } = config;
|
|
178
|
+
const messages = ref([]);
|
|
179
|
+
const loadHistory = () => {
|
|
180
|
+
if (persist) {
|
|
181
|
+
const stored = localStorage.getItem(storageKey);
|
|
182
|
+
if (stored) {
|
|
183
|
+
try {
|
|
184
|
+
messages.value = JSON.parse(stored);
|
|
185
|
+
} catch {
|
|
186
|
+
messages.value = [];
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
};
|
|
191
|
+
const saveHistory = () => {
|
|
192
|
+
if (persist) {
|
|
193
|
+
localStorage.setItem(storageKey, JSON.stringify(messages.value));
|
|
194
|
+
}
|
|
195
|
+
};
|
|
196
|
+
const addMessage = (message) => {
|
|
197
|
+
const newMessage = {
|
|
198
|
+
...message,
|
|
199
|
+
id: `msg-${Date.now()}-${Math.random().toString(36).slice(2)}`,
|
|
200
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
201
|
+
};
|
|
202
|
+
messages.value = [...messages.value, newMessage].slice(-maxHistory);
|
|
203
|
+
saveHistory();
|
|
204
|
+
return newMessage;
|
|
205
|
+
};
|
|
206
|
+
const clearHistory = () => {
|
|
207
|
+
messages.value = [];
|
|
208
|
+
if (persist) {
|
|
209
|
+
localStorage.removeItem(storageKey);
|
|
210
|
+
}
|
|
211
|
+
};
|
|
212
|
+
loadHistory();
|
|
213
|
+
return {
|
|
214
|
+
messages,
|
|
215
|
+
addMessage,
|
|
216
|
+
clearHistory,
|
|
217
|
+
loadHistory,
|
|
218
|
+
saveHistory
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
function generateId() {
|
|
222
|
+
return `conv-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
|
|
223
|
+
}
|
|
224
|
+
function generateTitle(messages) {
|
|
225
|
+
const firstUserMessage = messages.find((m) => m.role === "user");
|
|
226
|
+
if (firstUserMessage) {
|
|
227
|
+
const content = firstUserMessage.content;
|
|
228
|
+
return content.length > 30 ? content.slice(0, 30) + "..." : content;
|
|
229
|
+
}
|
|
230
|
+
return "\u65B0\u4F1A\u8BDD";
|
|
231
|
+
}
|
|
232
|
+
export function useConversations(options = {}) {
|
|
233
|
+
const {
|
|
234
|
+
maxConversations = 50,
|
|
235
|
+
persist = false,
|
|
236
|
+
storageKey = "yh-ai-conversations",
|
|
237
|
+
autoTitle = true
|
|
238
|
+
} = options;
|
|
239
|
+
const conversations = ref([]);
|
|
240
|
+
const currentId = ref(null);
|
|
241
|
+
const currentConversation = computed(() => {
|
|
242
|
+
return conversations.value.find((c) => c.id === currentId.value) || null;
|
|
243
|
+
});
|
|
244
|
+
const currentMessages = computed(() => {
|
|
245
|
+
return currentConversation.value?.messages || [];
|
|
246
|
+
});
|
|
247
|
+
const loadConversations = () => {
|
|
248
|
+
if (persist) {
|
|
249
|
+
try {
|
|
250
|
+
const stored = localStorage.getItem(storageKey);
|
|
251
|
+
if (stored) {
|
|
252
|
+
const parsed = JSON.parse(stored);
|
|
253
|
+
conversations.value = parsed.conversations || [];
|
|
254
|
+
currentId.value = parsed.currentId || null;
|
|
255
|
+
if (currentId.value && !conversations.value.find((c) => c.id === currentId.value)) {
|
|
256
|
+
currentId.value = conversations.value[0]?.id || null;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
} catch {
|
|
260
|
+
conversations.value = [];
|
|
261
|
+
currentId.value = null;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
};
|
|
265
|
+
const saveConversations = () => {
|
|
266
|
+
if (persist) {
|
|
267
|
+
localStorage.setItem(
|
|
268
|
+
storageKey,
|
|
269
|
+
JSON.stringify({
|
|
270
|
+
conversations: conversations.value,
|
|
271
|
+
currentId: currentId.value
|
|
272
|
+
})
|
|
273
|
+
);
|
|
274
|
+
}
|
|
275
|
+
};
|
|
276
|
+
const create = (initialMessages = []) => {
|
|
277
|
+
const now = /* @__PURE__ */ new Date();
|
|
278
|
+
const newConversation = {
|
|
279
|
+
id: generateId(),
|
|
280
|
+
title: autoTitle ? generateTitle(initialMessages) : "\u65B0\u4F1A\u8BDD",
|
|
281
|
+
messages: initialMessages,
|
|
282
|
+
createdAt: now,
|
|
283
|
+
updatedAt: now
|
|
284
|
+
};
|
|
285
|
+
conversations.value = [newConversation, ...conversations.value].slice(0, maxConversations);
|
|
286
|
+
currentId.value = newConversation.id;
|
|
287
|
+
saveConversations();
|
|
288
|
+
return newConversation.id;
|
|
289
|
+
};
|
|
290
|
+
const remove = (id) => {
|
|
291
|
+
const index = conversations.value.findIndex((c) => c.id === id);
|
|
292
|
+
if (index === -1) return;
|
|
293
|
+
conversations.value = conversations.value.filter((c) => c.id !== id);
|
|
294
|
+
if (currentId.value === id) {
|
|
295
|
+
currentId.value = conversations.value[0]?.id || null;
|
|
296
|
+
}
|
|
297
|
+
saveConversations();
|
|
298
|
+
};
|
|
299
|
+
const select = (id) => {
|
|
300
|
+
if (conversations.value.find((c) => c.id === id)) {
|
|
301
|
+
currentId.value = id;
|
|
302
|
+
saveConversations();
|
|
303
|
+
}
|
|
304
|
+
};
|
|
305
|
+
const updateTitle = (id, title) => {
|
|
306
|
+
const conversation = conversations.value.find((c) => c.id === id);
|
|
307
|
+
if (conversation) {
|
|
308
|
+
conversation.title = title;
|
|
309
|
+
conversation.updatedAt = /* @__PURE__ */ new Date();
|
|
310
|
+
saveConversations();
|
|
311
|
+
}
|
|
312
|
+
};
|
|
313
|
+
const addMessage = (message) => {
|
|
314
|
+
if (!currentId.value) {
|
|
315
|
+
create();
|
|
316
|
+
}
|
|
317
|
+
const conversation = conversations.value.find((c) => c.id === currentId.value);
|
|
318
|
+
if (conversation) {
|
|
319
|
+
const newMessage = {
|
|
320
|
+
...message,
|
|
321
|
+
id: `msg-${Date.now()}-${Math.random().toString(36).slice(2)}`,
|
|
322
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
323
|
+
};
|
|
324
|
+
conversation.messages.push(newMessage);
|
|
325
|
+
if (autoTitle && conversation.messages.length <= 2) {
|
|
326
|
+
conversation.title = generateTitle(conversation.messages);
|
|
327
|
+
}
|
|
328
|
+
conversation.updatedAt = /* @__PURE__ */ new Date();
|
|
329
|
+
saveConversations();
|
|
330
|
+
return newMessage;
|
|
331
|
+
}
|
|
332
|
+
return null;
|
|
333
|
+
};
|
|
334
|
+
const clearCurrent = () => {
|
|
335
|
+
const conversation = conversations.value.find((c) => c.id === currentId.value);
|
|
336
|
+
if (conversation) {
|
|
337
|
+
conversation.messages = [];
|
|
338
|
+
conversation.updatedAt = /* @__PURE__ */ new Date();
|
|
339
|
+
saveConversations();
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
loadConversations();
|
|
343
|
+
if (conversations.value.length === 0) {
|
|
344
|
+
create();
|
|
345
|
+
}
|
|
346
|
+
return {
|
|
347
|
+
conversations,
|
|
348
|
+
currentId,
|
|
349
|
+
currentConversation,
|
|
350
|
+
currentMessages,
|
|
351
|
+
create,
|
|
352
|
+
remove,
|
|
353
|
+
select,
|
|
354
|
+
updateTitle,
|
|
355
|
+
addMessage,
|
|
356
|
+
clearCurrent
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
export function useAIChat(options) {
|
|
360
|
+
const {
|
|
361
|
+
api,
|
|
362
|
+
initialMessages = [],
|
|
363
|
+
headers = {},
|
|
364
|
+
body = {},
|
|
365
|
+
stream = false,
|
|
366
|
+
streamInterval = 20,
|
|
367
|
+
tools = [],
|
|
368
|
+
onRequest,
|
|
369
|
+
onResponse,
|
|
370
|
+
onChunk,
|
|
371
|
+
onToolCall,
|
|
372
|
+
onToolResult,
|
|
373
|
+
onFinish,
|
|
374
|
+
onError
|
|
375
|
+
} = options;
|
|
376
|
+
const messages = ref([...initialMessages]);
|
|
377
|
+
const input = ref("");
|
|
378
|
+
const isLoading = ref(false);
|
|
379
|
+
const isStreaming = ref(false);
|
|
380
|
+
const error = ref(null);
|
|
381
|
+
const currentMessage = ref(null);
|
|
382
|
+
let abortController = null;
|
|
383
|
+
const append = (content, role = "user") => {
|
|
384
|
+
const newMessage = {
|
|
385
|
+
id: `msg-${Date.now()}-${Math.random().toString(36).slice(2)}`,
|
|
386
|
+
role,
|
|
387
|
+
content,
|
|
388
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
389
|
+
};
|
|
390
|
+
messages.value = [...messages.value, newMessage];
|
|
391
|
+
return newMessage;
|
|
392
|
+
};
|
|
393
|
+
const updateLastMessage = (updates) => {
|
|
394
|
+
if (messages.value.length > 0) {
|
|
395
|
+
const lastIndex = messages.value.length - 1;
|
|
396
|
+
messages.value = messages.value.map(
|
|
397
|
+
(msg, i) => i === lastIndex ? { ...msg, ...updates } : msg
|
|
398
|
+
);
|
|
399
|
+
}
|
|
400
|
+
};
|
|
401
|
+
const reload = () => {
|
|
402
|
+
messages.value = [...initialMessages];
|
|
403
|
+
input.value = "";
|
|
404
|
+
error.value = null;
|
|
405
|
+
currentMessage.value = null;
|
|
406
|
+
if (abortController) {
|
|
407
|
+
abortController.abort();
|
|
408
|
+
abortController = null;
|
|
409
|
+
}
|
|
410
|
+
};
|
|
411
|
+
const stop = () => {
|
|
412
|
+
if (abortController) {
|
|
413
|
+
abortController.abort();
|
|
414
|
+
abortController = null;
|
|
415
|
+
}
|
|
416
|
+
isStreaming.value = false;
|
|
417
|
+
};
|
|
418
|
+
const executeTools = async (toolCalls) => {
|
|
419
|
+
const results = [];
|
|
420
|
+
for (const toolCall of toolCalls) {
|
|
421
|
+
const tool = tools.find((t) => t.name === toolCall.name);
|
|
422
|
+
if (tool) {
|
|
423
|
+
try {
|
|
424
|
+
onToolCall?.({ name: toolCall.name, args: toolCall.arguments });
|
|
425
|
+
const result = await tool.execute(toolCall.arguments);
|
|
426
|
+
onToolResult?.(toolCall.name, result);
|
|
427
|
+
results.push({
|
|
428
|
+
id: `tool-${Date.now()}-${Math.random().toString(36).slice(2)}`,
|
|
429
|
+
role: "tool",
|
|
430
|
+
content: typeof result === "string" ? result : JSON.stringify(result),
|
|
431
|
+
name: toolCall.name,
|
|
432
|
+
toolCallId: toolCall.id,
|
|
433
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
434
|
+
});
|
|
435
|
+
} catch (err) {
|
|
436
|
+
results.push({
|
|
437
|
+
id: `tool-error-${Date.now()}`,
|
|
438
|
+
role: "tool",
|
|
439
|
+
content: `Error: ${err instanceof Error ? err.message : String(err)}`,
|
|
440
|
+
name: toolCall.name,
|
|
441
|
+
toolCallId: toolCall.id,
|
|
442
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
return results;
|
|
448
|
+
};
|
|
449
|
+
const sendMessageStream = async (content) => {
|
|
450
|
+
if (!content.trim() || isStreaming.value) return;
|
|
451
|
+
error.value = null;
|
|
452
|
+
isStreaming.value = true;
|
|
453
|
+
isLoading.value = true;
|
|
454
|
+
append(content, "user");
|
|
455
|
+
input.value = "";
|
|
456
|
+
const assistantMessage = append("", "assistant");
|
|
457
|
+
currentMessage.value = assistantMessage;
|
|
458
|
+
abortController = new AbortController();
|
|
459
|
+
try {
|
|
460
|
+
onRequest?.(content);
|
|
461
|
+
const allMessages = messages.value.map((m) => ({
|
|
462
|
+
role: m.role,
|
|
463
|
+
content: m.content,
|
|
464
|
+
...m.name && { name: m.name },
|
|
465
|
+
...m.toolCallId && { tool_call_id: m.toolCallId }
|
|
466
|
+
}));
|
|
467
|
+
const response = await fetch(api, {
|
|
468
|
+
method: "POST",
|
|
469
|
+
headers: {
|
|
470
|
+
"Content-Type": "application/json",
|
|
471
|
+
...headers
|
|
472
|
+
},
|
|
473
|
+
body: JSON.stringify({
|
|
474
|
+
messages: allMessages,
|
|
475
|
+
stream: true,
|
|
476
|
+
tools: tools.length > 0 ? tools.map((t) => ({
|
|
477
|
+
type: "function",
|
|
478
|
+
function: {
|
|
479
|
+
name: t.name,
|
|
480
|
+
description: t.description || "",
|
|
481
|
+
parameters: t.parameters || {}
|
|
482
|
+
}
|
|
483
|
+
})) : void 0,
|
|
484
|
+
...body
|
|
485
|
+
}),
|
|
486
|
+
signal: abortController.signal
|
|
487
|
+
});
|
|
488
|
+
onResponse?.(response);
|
|
489
|
+
if (!response.ok || !response.body) {
|
|
490
|
+
throw new Error(`API Error: ${response.status} ${response.statusText}`);
|
|
491
|
+
}
|
|
492
|
+
const reader = response.body.getReader();
|
|
493
|
+
const decoder = new TextDecoder();
|
|
494
|
+
let fullContent = "";
|
|
495
|
+
let currentToolCalls = [];
|
|
496
|
+
while (true) {
|
|
497
|
+
const { done, value } = await reader.read();
|
|
498
|
+
if (done) break;
|
|
499
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
500
|
+
const lines = chunk.split("\n").filter((line) => line.trim() !== "");
|
|
501
|
+
for (const line of lines) {
|
|
502
|
+
if (line.startsWith("data: ")) {
|
|
503
|
+
const data = line.slice(6);
|
|
504
|
+
if (data === "[DONE]") {
|
|
505
|
+
continue;
|
|
506
|
+
}
|
|
507
|
+
try {
|
|
508
|
+
const parsed = JSON.parse(data);
|
|
509
|
+
if (parsed.choices?.[0]?.delta?.content) {
|
|
510
|
+
const delta = parsed.choices[0].delta.content;
|
|
511
|
+
fullContent += delta;
|
|
512
|
+
currentMessage.value = { ...assistantMessage, content: fullContent };
|
|
513
|
+
updateLastMessage({ content: fullContent });
|
|
514
|
+
onChunk?.(delta, currentMessage.value);
|
|
515
|
+
}
|
|
516
|
+
if (parsed.choices?.[0]?.delta?.tool_calls) {
|
|
517
|
+
const toolCalls = parsed.choices[0].delta.tool_calls;
|
|
518
|
+
for (const tc of toolCalls) {
|
|
519
|
+
const existingIndex = currentToolCalls.findIndex((t) => t.id === tc.id);
|
|
520
|
+
if (existingIndex >= 0) {
|
|
521
|
+
currentToolCalls[existingIndex] = {
|
|
522
|
+
...currentToolCalls[existingIndex],
|
|
523
|
+
arguments: {
|
|
524
|
+
...currentToolCalls[existingIndex].arguments,
|
|
525
|
+
...tc.function?.arguments && JSON.parse(tc.function.arguments)
|
|
526
|
+
}
|
|
527
|
+
};
|
|
528
|
+
} else if (tc.id && tc.function?.name) {
|
|
529
|
+
currentToolCalls.push({
|
|
530
|
+
id: tc.id,
|
|
531
|
+
type: "function",
|
|
532
|
+
name: tc.function.name,
|
|
533
|
+
arguments: tc.function.arguments ? JSON.parse(tc.function.arguments) : {}
|
|
534
|
+
});
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
updateLastMessage({ toolCalls: [...currentToolCalls] });
|
|
538
|
+
}
|
|
539
|
+
} catch {
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
if (streamInterval > 0) {
|
|
544
|
+
await new Promise((resolve) => setTimeout(resolve, streamInterval));
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
if (currentToolCalls.length > 0) {
|
|
548
|
+
updateLastMessage({ toolCalls: currentToolCalls });
|
|
549
|
+
const toolResults = await executeTools(currentToolCalls);
|
|
550
|
+
for (const result of toolResults) {
|
|
551
|
+
messages.value.push(result);
|
|
552
|
+
}
|
|
553
|
+
const finalMessages = messages.value.map((m) => ({
|
|
554
|
+
role: m.role,
|
|
555
|
+
content: m.content,
|
|
556
|
+
...m.name && { name: m.name },
|
|
557
|
+
...m.toolCallId && { tool_call_id: m.toolCallId }
|
|
558
|
+
}));
|
|
559
|
+
const finalResponse = await fetch(api, {
|
|
560
|
+
method: "POST",
|
|
561
|
+
headers: {
|
|
562
|
+
"Content-Type": "application/json",
|
|
563
|
+
...headers
|
|
564
|
+
},
|
|
565
|
+
body: JSON.stringify({
|
|
566
|
+
messages: finalMessages,
|
|
567
|
+
...body
|
|
568
|
+
})
|
|
569
|
+
});
|
|
570
|
+
if (finalResponse.ok) {
|
|
571
|
+
const finalData = await finalResponse.json();
|
|
572
|
+
const finalContent = finalData.content || finalData.message?.content || "";
|
|
573
|
+
updateLastMessage({ content: finalContent });
|
|
574
|
+
currentMessage.value = { ...assistantMessage, content: finalContent };
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
const finalMessage = messages.value[messages.value.length - 1];
|
|
578
|
+
onFinish?.(finalMessage);
|
|
579
|
+
} catch (err) {
|
|
580
|
+
if (err.name !== "AbortError") {
|
|
581
|
+
const errorObj = err instanceof Error ? err : new Error(String(err));
|
|
582
|
+
error.value = errorObj;
|
|
583
|
+
onError?.(errorObj);
|
|
584
|
+
}
|
|
585
|
+
} finally {
|
|
586
|
+
isLoading.value = false;
|
|
587
|
+
isStreaming.value = false;
|
|
588
|
+
abortController = null;
|
|
589
|
+
}
|
|
590
|
+
};
|
|
591
|
+
const sendMessage = async (content) => {
|
|
592
|
+
if (stream) {
|
|
593
|
+
return sendMessageStream(content);
|
|
594
|
+
}
|
|
595
|
+
if (!content.trim() || isLoading.value) return;
|
|
596
|
+
error.value = null;
|
|
597
|
+
isLoading.value = true;
|
|
598
|
+
append(content, "user");
|
|
599
|
+
input.value = "";
|
|
600
|
+
try {
|
|
601
|
+
onRequest?.(content);
|
|
602
|
+
const allMessages = messages.value.map((m) => ({
|
|
603
|
+
role: m.role,
|
|
604
|
+
content: m.content,
|
|
605
|
+
...m.name && { name: m.name },
|
|
606
|
+
...m.toolCallId && { tool_call_id: m.toolCallId }
|
|
607
|
+
}));
|
|
608
|
+
const response = await fetch(api, {
|
|
609
|
+
method: "POST",
|
|
610
|
+
headers: {
|
|
611
|
+
"Content-Type": "application/json",
|
|
612
|
+
...headers
|
|
613
|
+
},
|
|
614
|
+
body: JSON.stringify({
|
|
615
|
+
messages: allMessages,
|
|
616
|
+
tools: tools.length > 0 ? tools.map((t) => ({
|
|
617
|
+
type: "function",
|
|
618
|
+
function: {
|
|
619
|
+
name: t.name,
|
|
620
|
+
description: t.description || "",
|
|
621
|
+
parameters: t.parameters || {}
|
|
622
|
+
}
|
|
623
|
+
})) : void 0,
|
|
624
|
+
...body
|
|
625
|
+
})
|
|
626
|
+
});
|
|
627
|
+
onResponse?.(response);
|
|
628
|
+
if (!response.ok) {
|
|
629
|
+
throw new Error(`API Error: ${response.status} ${response.statusText}`);
|
|
630
|
+
}
|
|
631
|
+
const data = await response.json();
|
|
632
|
+
let toolCalls = data.tool_calls || [];
|
|
633
|
+
let finalContent = data.content || data.message?.content || "";
|
|
634
|
+
if (toolCalls.length > 0) {
|
|
635
|
+
const toolResults = await executeTools(toolCalls);
|
|
636
|
+
for (const result of toolResults) {
|
|
637
|
+
messages.value.push(result);
|
|
638
|
+
}
|
|
639
|
+
const finalMessages = messages.value.map((m) => ({
|
|
640
|
+
role: m.role,
|
|
641
|
+
content: m.content,
|
|
642
|
+
...m.name && { name: m.name },
|
|
643
|
+
...m.toolCallId && { tool_call_id: m.toolCallId }
|
|
644
|
+
}));
|
|
645
|
+
const finalResponse = await fetch(api, {
|
|
646
|
+
method: "POST",
|
|
647
|
+
headers: {
|
|
648
|
+
"Content-Type": "application/json",
|
|
649
|
+
...headers
|
|
650
|
+
},
|
|
651
|
+
body: JSON.stringify({
|
|
652
|
+
messages: finalMessages,
|
|
653
|
+
...body
|
|
654
|
+
})
|
|
655
|
+
});
|
|
656
|
+
if (finalResponse.ok) {
|
|
657
|
+
const finalData = await finalResponse.json();
|
|
658
|
+
finalContent = finalData.content || finalData.message?.content || "";
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
const assistantMessage = append(finalContent, "assistant");
|
|
662
|
+
if (toolCalls.length > 0) {
|
|
663
|
+
updateLastMessage({ toolCalls });
|
|
664
|
+
}
|
|
665
|
+
onFinish?.(assistantMessage);
|
|
666
|
+
} catch (err) {
|
|
667
|
+
const errorObj = err instanceof Error ? err : new Error(String(err));
|
|
668
|
+
error.value = errorObj;
|
|
669
|
+
onError?.(errorObj);
|
|
670
|
+
} finally {
|
|
671
|
+
isLoading.value = false;
|
|
672
|
+
}
|
|
673
|
+
};
|
|
674
|
+
return {
|
|
675
|
+
messages,
|
|
676
|
+
input,
|
|
677
|
+
isLoading,
|
|
678
|
+
isStreaming,
|
|
679
|
+
error,
|
|
680
|
+
currentMessage,
|
|
681
|
+
sendMessage,
|
|
682
|
+
sendMessageStream,
|
|
683
|
+
stop,
|
|
684
|
+
append,
|
|
685
|
+
updateLastMessage,
|
|
686
|
+
reload
|
|
687
|
+
};
|
|
688
|
+
}
|
|
689
|
+
export function useAIStream(options) {
|
|
690
|
+
const { api, initialContent = "", onChunk, onFinish, onError } = options;
|
|
691
|
+
const content = shallowRef(initialContent);
|
|
692
|
+
const isStreaming = ref(false);
|
|
693
|
+
const error = ref(null);
|
|
694
|
+
let abortController = null;
|
|
695
|
+
const start = async (prompt) => {
|
|
696
|
+
if (isStreaming.value) {
|
|
697
|
+
stop();
|
|
698
|
+
}
|
|
699
|
+
error.value = null;
|
|
700
|
+
isStreaming.value = true;
|
|
701
|
+
content.value = "";
|
|
702
|
+
abortController = new AbortController();
|
|
703
|
+
try {
|
|
704
|
+
const response = await fetch(api, {
|
|
705
|
+
method: "POST",
|
|
706
|
+
headers: {
|
|
707
|
+
"Content-Type": "application/json"
|
|
708
|
+
},
|
|
709
|
+
body: JSON.stringify({ prompt }),
|
|
710
|
+
signal: abortController.signal
|
|
711
|
+
});
|
|
712
|
+
if (!response.ok || !response.body) {
|
|
713
|
+
throw new Error(`Stream Error: ${response.status}`);
|
|
714
|
+
}
|
|
715
|
+
const reader = response.body.getReader();
|
|
716
|
+
const decoder = new TextDecoder();
|
|
717
|
+
while (true) {
|
|
718
|
+
const { done, value } = await reader.read();
|
|
719
|
+
if (done) break;
|
|
720
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
721
|
+
content.value += chunk;
|
|
722
|
+
onChunk?.(chunk);
|
|
723
|
+
}
|
|
724
|
+
onFinish?.(content.value);
|
|
725
|
+
} catch (err) {
|
|
726
|
+
if (err.name !== "AbortError") {
|
|
727
|
+
const errorObj = err instanceof Error ? err : new Error(String(err));
|
|
728
|
+
error.value = errorObj;
|
|
729
|
+
onError?.(errorObj);
|
|
730
|
+
}
|
|
731
|
+
} finally {
|
|
732
|
+
isStreaming.value = false;
|
|
733
|
+
abortController = null;
|
|
734
|
+
}
|
|
735
|
+
};
|
|
736
|
+
const stop = () => {
|
|
737
|
+
if (abortController) {
|
|
738
|
+
abortController.abort();
|
|
739
|
+
isStreaming.value = false;
|
|
740
|
+
}
|
|
741
|
+
};
|
|
742
|
+
onUnmounted(() => {
|
|
743
|
+
stop();
|
|
744
|
+
});
|
|
745
|
+
return {
|
|
746
|
+
content,
|
|
747
|
+
isStreaming,
|
|
748
|
+
error,
|
|
749
|
+
start,
|
|
750
|
+
stop
|
|
751
|
+
};
|
|
752
|
+
}
|
|
753
|
+
export function createYHFunctionTool(tool) {
|
|
754
|
+
return {
|
|
755
|
+
type: "function",
|
|
756
|
+
name: tool.name,
|
|
757
|
+
description: tool.description || "",
|
|
758
|
+
parameters: tool.parameters || {},
|
|
759
|
+
execute: tool.execute
|
|
760
|
+
};
|
|
761
|
+
}
|
|
762
|
+
export const PROVIDER_PRESETS = {
|
|
763
|
+
openai: {
|
|
764
|
+
name: "openai",
|
|
765
|
+
baseUrl: "https://api.openai.com/v1",
|
|
766
|
+
defaultModel: "gpt-4",
|
|
767
|
+
supportsStreaming: true,
|
|
768
|
+
supportsFunctionCalling: true
|
|
769
|
+
},
|
|
770
|
+
anthropic: {
|
|
771
|
+
name: "anthropic",
|
|
772
|
+
baseUrl: "https://api.anthropic.com/v1",
|
|
773
|
+
defaultModel: "claude-3-5-sonnet-20241022",
|
|
774
|
+
supportsStreaming: true,
|
|
775
|
+
supportsFunctionCalling: true
|
|
776
|
+
},
|
|
777
|
+
google: {
|
|
778
|
+
name: "google",
|
|
779
|
+
baseUrl: "https://generativelanguage.googleapis.com/v1",
|
|
780
|
+
defaultModel: "gemini-1.5-pro",
|
|
781
|
+
supportsStreaming: true,
|
|
782
|
+
supportsFunctionCalling: true
|
|
783
|
+
},
|
|
784
|
+
deepseek: {
|
|
785
|
+
name: "deepseek",
|
|
786
|
+
baseUrl: "https://api.deepseek.com/v1",
|
|
787
|
+
defaultModel: "deepseek-chat",
|
|
788
|
+
supportsStreaming: true,
|
|
789
|
+
supportsFunctionCalling: true
|
|
790
|
+
},
|
|
791
|
+
ollama: {
|
|
792
|
+
name: "ollama",
|
|
793
|
+
baseUrl: "http://localhost:11434/v1",
|
|
794
|
+
defaultModel: "llama2",
|
|
795
|
+
supportsStreaming: true,
|
|
796
|
+
supportsFunctionCalling: false
|
|
797
|
+
},
|
|
798
|
+
azure: {
|
|
799
|
+
name: "azure",
|
|
800
|
+
baseUrl: "",
|
|
801
|
+
// 需要配置
|
|
802
|
+
defaultModel: "",
|
|
803
|
+
supportsStreaming: true,
|
|
804
|
+
supportsFunctionCalling: true,
|
|
805
|
+
needsProjectId: true
|
|
806
|
+
},
|
|
807
|
+
moonshot: {
|
|
808
|
+
name: "moonshot",
|
|
809
|
+
baseUrl: "https://api.moonshot.cn/v1",
|
|
810
|
+
defaultModel: "moonshot-v1-8k",
|
|
811
|
+
supportsStreaming: true,
|
|
812
|
+
supportsFunctionCalling: true
|
|
813
|
+
},
|
|
814
|
+
minimax: {
|
|
815
|
+
name: "minimax",
|
|
816
|
+
baseUrl: "https://api.minimax.chat/v1",
|
|
817
|
+
defaultModel: "abab6.5s-chat",
|
|
818
|
+
supportsStreaming: true,
|
|
819
|
+
supportsFunctionCalling: true
|
|
820
|
+
},
|
|
821
|
+
zhipu: {
|
|
822
|
+
name: "zhipu",
|
|
823
|
+
baseUrl: "https://open.bigmodel.cn/api/paas/v4",
|
|
824
|
+
defaultModel: "glm-4",
|
|
825
|
+
supportsStreaming: true,
|
|
826
|
+
supportsFunctionCalling: true
|
|
827
|
+
},
|
|
828
|
+
siliconflow: {
|
|
829
|
+
name: "siliconflow",
|
|
830
|
+
baseUrl: "https://api.siliconflow.cn/v1",
|
|
831
|
+
defaultModel: "Qwen/Qwen2-7B-Instruct",
|
|
832
|
+
supportsStreaming: true,
|
|
833
|
+
supportsFunctionCalling: true
|
|
834
|
+
},
|
|
835
|
+
together: {
|
|
836
|
+
name: "together",
|
|
837
|
+
baseUrl: "https://api.together.ai/v1",
|
|
838
|
+
defaultModel: "meta-llama/Llama-3-70b-chat-hf",
|
|
839
|
+
supportsStreaming: true,
|
|
840
|
+
supportsFunctionCalling: true
|
|
841
|
+
},
|
|
842
|
+
novita: {
|
|
843
|
+
name: "novita",
|
|
844
|
+
baseUrl: "https://api.novita.ai/v3",
|
|
845
|
+
defaultModel: "meta-llama/llama-3.1-70b-instruct",
|
|
846
|
+
supportsStreaming: true,
|
|
847
|
+
supportsFunctionCalling: true
|
|
848
|
+
}
|
|
849
|
+
};
|
|
850
|
+
export function getProviderPreset(name) {
|
|
851
|
+
return PROVIDER_PRESETS[name.toLowerCase()];
|
|
852
|
+
}
|
|
853
|
+
export function createProviderAdapter(config) {
|
|
854
|
+
let finalConfig;
|
|
855
|
+
if ("provider" in config && !("baseUrl" in config)) {
|
|
856
|
+
const preset = PROVIDER_PRESETS[config.provider.toLowerCase()];
|
|
857
|
+
if (!preset) {
|
|
858
|
+
throw new Error(`Unknown provider: ${config.provider}`);
|
|
859
|
+
}
|
|
860
|
+
finalConfig = {
|
|
861
|
+
name: preset.name,
|
|
862
|
+
baseUrl: preset.baseUrl,
|
|
863
|
+
defaultModel: preset.defaultModel,
|
|
864
|
+
apiKey: config.apiKey
|
|
865
|
+
};
|
|
866
|
+
} else {
|
|
867
|
+
finalConfig = config;
|
|
868
|
+
}
|
|
869
|
+
return {
|
|
870
|
+
...finalConfig,
|
|
871
|
+
createChat: (model) => {
|
|
872
|
+
const modelConfig = typeof model === "string" ? { model } : model || {};
|
|
873
|
+
return {
|
|
874
|
+
provider: finalConfig.name,
|
|
875
|
+
baseUrl: finalConfig.baseUrl,
|
|
876
|
+
apiKey: finalConfig.apiKey,
|
|
877
|
+
...modelConfig
|
|
878
|
+
};
|
|
879
|
+
}
|
|
880
|
+
};
|
|
881
|
+
}
|
|
882
|
+
export function createVercelAIProvider(provider, config) {
|
|
883
|
+
return {
|
|
884
|
+
languageModel: (modelId) => {
|
|
885
|
+
return {
|
|
886
|
+
provider,
|
|
887
|
+
modelId,
|
|
888
|
+
config
|
|
889
|
+
};
|
|
890
|
+
}
|
|
891
|
+
};
|
|
892
|
+
}
|
|
893
|
+
const AIContextKey = Symbol("yh-ai-context");
|
|
894
|
+
export function createAIContext(initialProvider, initialModel) {
|
|
895
|
+
const sessionId = ref(null);
|
|
896
|
+
const provider = ref(initialProvider || null);
|
|
897
|
+
const modelConfig = ref({ model: initialModel || "gpt-4" });
|
|
898
|
+
return {
|
|
899
|
+
sessionId,
|
|
900
|
+
provider,
|
|
901
|
+
modelConfig,
|
|
902
|
+
setSession: (id) => {
|
|
903
|
+
sessionId.value = id;
|
|
904
|
+
},
|
|
905
|
+
setProvider: (p) => {
|
|
906
|
+
provider.value = p;
|
|
907
|
+
},
|
|
908
|
+
setModel: (config) => {
|
|
909
|
+
modelConfig.value = config;
|
|
910
|
+
}
|
|
911
|
+
};
|
|
912
|
+
}
|
|
913
|
+
export { AIContextKey };
|