js-agent-core 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +29 -0
- package/dist/core/AgentDashboard.d.ts +33 -0
- package/dist/core/AgentDashboard.d.ts.map +1 -0
- package/dist/core/AgentDashboard.js +477 -0
- package/dist/core/AgentDashboard.js.map +1 -0
- package/dist/core/AutoVectorStore.d.ts +23 -0
- package/dist/core/AutoVectorStore.d.ts.map +1 -0
- package/dist/core/AutoVectorStore.js +55 -0
- package/dist/core/AutoVectorStore.js.map +1 -0
- package/dist/core/BaseAgent.d.ts +70 -0
- package/dist/core/BaseAgent.d.ts.map +1 -0
- package/dist/core/BaseAgent.js +583 -0
- package/dist/core/BaseAgent.js.map +1 -0
- package/dist/core/EventEmitter.d.ts +8 -0
- package/dist/core/EventEmitter.d.ts.map +1 -0
- package/dist/core/EventEmitter.js +32 -0
- package/dist/core/EventEmitter.js.map +1 -0
- package/dist/core/LocalEmbedder.d.ts +25 -0
- package/dist/core/LocalEmbedder.d.ts.map +1 -0
- package/dist/core/LocalEmbedder.js +62 -0
- package/dist/core/LocalEmbedder.js.map +1 -0
- package/dist/core/LongTermMemory.d.ts +30 -0
- package/dist/core/LongTermMemory.d.ts.map +1 -0
- package/dist/core/LongTermMemory.js +123 -0
- package/dist/core/LongTermMemory.js.map +1 -0
- package/dist/core/MemoryVectorStore.d.ts +17 -0
- package/dist/core/MemoryVectorStore.d.ts.map +1 -0
- package/dist/core/MemoryVectorStore.js +44 -0
- package/dist/core/MemoryVectorStore.js.map +1 -0
- package/dist/core/OpenAIProvider.d.ts +21 -0
- package/dist/core/OpenAIProvider.d.ts.map +1 -0
- package/dist/core/OpenAIProvider.js +254 -0
- package/dist/core/OpenAIProvider.js.map +1 -0
- package/dist/core/SimpleMemory.d.ts +12 -0
- package/dist/core/SimpleMemory.d.ts.map +1 -0
- package/dist/core/SimpleMemory.js +27 -0
- package/dist/core/SimpleMemory.js.map +1 -0
- package/dist/core/StructuredPlanner.d.ts +13 -0
- package/dist/core/StructuredPlanner.d.ts.map +1 -0
- package/dist/core/StructuredPlanner.js +156 -0
- package/dist/core/StructuredPlanner.js.map +1 -0
- package/dist/core/ToolRegistry.d.ts +18 -0
- package/dist/core/ToolRegistry.d.ts.map +1 -0
- package/dist/core/ToolRegistry.js +74 -0
- package/dist/core/ToolRegistry.js.map +1 -0
- package/dist/core/index.d.ts +14 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +14 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/logging/CompositeLogger.d.ts +8 -0
- package/dist/core/logging/CompositeLogger.d.ts.map +1 -0
- package/dist/core/logging/CompositeLogger.js +23 -0
- package/dist/core/logging/CompositeLogger.js.map +1 -0
- package/dist/core/logging/NodeFsLogger.d.ts +12 -0
- package/dist/core/logging/NodeFsLogger.d.ts.map +1 -0
- package/dist/core/logging/NodeFsLogger.js +46 -0
- package/dist/core/logging/NodeFsLogger.js.map +1 -0
- package/dist/core/logging/WebIndexedDbLogger.d.ts +15 -0
- package/dist/core/logging/WebIndexedDbLogger.d.ts.map +1 -0
- package/dist/core/logging/WebIndexedDbLogger.js +65 -0
- package/dist/core/logging/WebIndexedDbLogger.js.map +1 -0
- package/dist/core/logging/index.d.ts +11 -0
- package/dist/core/logging/index.d.ts.map +1 -0
- package/dist/core/logging/index.js +33 -0
- package/dist/core/logging/index.js.map +1 -0
- package/dist/core/persistence/NodeFsSkillStore.d.ts +17 -0
- package/dist/core/persistence/NodeFsSkillStore.d.ts.map +1 -0
- package/dist/core/persistence/NodeFsSkillStore.js +124 -0
- package/dist/core/persistence/NodeFsSkillStore.js.map +1 -0
- package/dist/core/persistence/NodeFsVectorStore.d.ts +25 -0
- package/dist/core/persistence/NodeFsVectorStore.d.ts.map +1 -0
- package/dist/core/persistence/NodeFsVectorStore.js +74 -0
- package/dist/core/persistence/NodeFsVectorStore.js.map +1 -0
- package/dist/core/persistence/SkillLoader.d.ts +26 -0
- package/dist/core/persistence/SkillLoader.d.ts.map +1 -0
- package/dist/core/persistence/SkillLoader.js +144 -0
- package/dist/core/persistence/SkillLoader.js.map +1 -0
- package/dist/core/persistence/WebIndexedDbSkillStore.d.ts +21 -0
- package/dist/core/persistence/WebIndexedDbSkillStore.d.ts.map +1 -0
- package/dist/core/persistence/WebIndexedDbSkillStore.js +119 -0
- package/dist/core/persistence/WebIndexedDbSkillStore.js.map +1 -0
- package/dist/core/persistence/WebIndexedDbVectorStore.d.ts +30 -0
- package/dist/core/persistence/WebIndexedDbVectorStore.d.ts.map +1 -0
- package/dist/core/persistence/WebIndexedDbVectorStore.js +87 -0
- package/dist/core/persistence/WebIndexedDbVectorStore.js.map +1 -0
- package/dist/core/persistence/index.d.ts +6 -0
- package/dist/core/persistence/index.d.ts.map +1 -0
- package/dist/core/persistence/index.js +21 -0
- package/dist/core/persistence/index.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +3 -0
- package/dist/index.js.map +1 -0
- package/dist/interfaces/IAgentEvent.d.ts +27 -0
- package/dist/interfaces/IAgentEvent.d.ts.map +1 -0
- package/dist/interfaces/IAgentEvent.js +2 -0
- package/dist/interfaces/IAgentEvent.js.map +1 -0
- package/dist/interfaces/IAgentState.d.ts +22 -0
- package/dist/interfaces/IAgentState.d.ts.map +1 -0
- package/dist/interfaces/IAgentState.js +2 -0
- package/dist/interfaces/IAgentState.js.map +1 -0
- package/dist/interfaces/ILLMProvider.d.ts +43 -0
- package/dist/interfaces/ILLMProvider.d.ts.map +1 -0
- package/dist/interfaces/ILLMProvider.js +2 -0
- package/dist/interfaces/ILLMProvider.js.map +1 -0
- package/dist/interfaces/ILogger.d.ts +15 -0
- package/dist/interfaces/ILogger.d.ts.map +1 -0
- package/dist/interfaces/ILogger.js +2 -0
- package/dist/interfaces/ILogger.js.map +1 -0
- package/dist/interfaces/IMemory.d.ts +32 -0
- package/dist/interfaces/IMemory.d.ts.map +1 -0
- package/dist/interfaces/IMemory.js +2 -0
- package/dist/interfaces/IMemory.js.map +1 -0
- package/dist/interfaces/IPlanner.d.ts +20 -0
- package/dist/interfaces/IPlanner.d.ts.map +1 -0
- package/dist/interfaces/IPlanner.js +2 -0
- package/dist/interfaces/IPlanner.js.map +1 -0
- package/dist/interfaces/ISkill.d.ts +9 -0
- package/dist/interfaces/ISkill.d.ts.map +1 -0
- package/dist/interfaces/ISkill.js +2 -0
- package/dist/interfaces/ISkill.js.map +1 -0
- package/dist/interfaces/ISkillStore.d.ts +53 -0
- package/dist/interfaces/ISkillStore.d.ts.map +1 -0
- package/dist/interfaces/ISkillStore.js +2 -0
- package/dist/interfaces/ISkillStore.js.map +1 -0
- package/dist/interfaces/ITool.d.ts +32 -0
- package/dist/interfaces/ITool.d.ts.map +1 -0
- package/dist/interfaces/ITool.js +2 -0
- package/dist/interfaces/ITool.js.map +1 -0
- package/dist/interfaces/index.d.ts +10 -0
- package/dist/interfaces/index.d.ts.map +1 -0
- package/dist/interfaces/index.js +10 -0
- package/dist/interfaces/index.js.map +1 -0
- package/package.json +47 -0
- package/src/core/AgentDashboard.ts +533 -0
- package/src/core/AutoVectorStore.ts +60 -0
- package/src/core/BaseAgent.ts +676 -0
- package/src/core/EventEmitter.ts +35 -0
- package/src/core/LocalEmbedder.ts +68 -0
- package/src/core/LongTermMemory.ts +146 -0
- package/src/core/MemoryVectorStore.ts +54 -0
- package/src/core/OpenAIProvider.ts +274 -0
- package/src/core/SimpleMemory.ts +31 -0
- package/src/core/StructuredPlanner.ts +165 -0
- package/src/core/ToolRegistry.ts +89 -0
- package/src/core/index.ts +16 -0
- package/src/core/logging/CompositeLogger.ts +26 -0
- package/src/core/logging/NodeFsLogger.ts +53 -0
- package/src/core/logging/WebIndexedDbLogger.ts +76 -0
- package/src/core/logging/index.ts +35 -0
- package/src/core/persistence/NodeFsSkillStore.ts +138 -0
- package/src/core/persistence/NodeFsVectorStore.ts +86 -0
- package/src/core/persistence/SkillLoader.ts +153 -0
- package/src/core/persistence/WebIndexedDbSkillStore.ts +139 -0
- package/src/core/persistence/WebIndexedDbVectorStore.ts +106 -0
- package/src/core/persistence/index.ts +22 -0
- package/src/index.ts +2 -0
- package/src/interfaces/IAgentEvent.ts +46 -0
- package/src/interfaces/IAgentState.ts +22 -0
- package/src/interfaces/ILLMProvider.ts +47 -0
- package/src/interfaces/ILogger.ts +16 -0
- package/src/interfaces/IMemory.ts +29 -0
- package/src/interfaces/IPlanner.ts +22 -0
- package/src/interfaces/ISkill.ts +9 -0
- package/src/interfaces/ISkillStore.ts +60 -0
- package/src/interfaces/ITool.ts +38 -0
- package/src/interfaces/index.ts +10 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { IEventEmitter, AgentEventType } from '../interfaces/IAgentEvent.js';
|
|
2
|
+
|
|
3
|
+
export class SimpleEventEmitter implements IEventEmitter {
|
|
4
|
+
private listeners: Map<AgentEventType, Set<(payload: any) => void>> = new Map();
|
|
5
|
+
|
|
6
|
+
on<T = any>(event: AgentEventType, listener: (payload: T) => void): void {
|
|
7
|
+
if (!this.listeners.has(event)) {
|
|
8
|
+
this.listeners.set(event, new Set());
|
|
9
|
+
}
|
|
10
|
+
this.listeners.get(event)!.add(listener);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
off<T = any>(event: AgentEventType, listener: (payload: T) => void): void {
|
|
14
|
+
const callbacks = this.listeners.get(event);
|
|
15
|
+
if (callbacks) {
|
|
16
|
+
callbacks.delete(listener);
|
|
17
|
+
if (callbacks.size === 0) {
|
|
18
|
+
this.listeners.delete(event);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
emit<T = any>(event: AgentEventType, payload: T): void {
|
|
24
|
+
const callbacks = this.listeners.get(event);
|
|
25
|
+
if (callbacks) {
|
|
26
|
+
callbacks.forEach(listener => {
|
|
27
|
+
try {
|
|
28
|
+
listener(payload);
|
|
29
|
+
} catch (error) {
|
|
30
|
+
console.error(`Error in event listener for ${event}:`, error);
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { IEmbedder } from '../interfaces/index.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* A lightweight, local embedder that uses @huggingface/transformers (formerly transformers.js).
|
|
5
|
+
* It supports dynamic loading to keep the core SDK lightweight.
|
|
6
|
+
*/
|
|
7
|
+
export class LocalEmbedder implements IEmbedder {
|
|
8
|
+
private modelName: string;
|
|
9
|
+
private extractor: any = null;
|
|
10
|
+
private isLoading: boolean = false;
|
|
11
|
+
private loadPromise: Promise<void> | null = null;
|
|
12
|
+
|
|
13
|
+
constructor(modelName: string = 'Xenova/all-MiniLM-L6-v2') {
|
|
14
|
+
this.modelName = modelName;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Lazy-loads the transformers library and the model.
|
|
19
|
+
*/
|
|
20
|
+
private async loadModel(): Promise<void> {
|
|
21
|
+
if (this.extractor) return;
|
|
22
|
+
if (this.isLoading) return this.loadPromise!;
|
|
23
|
+
|
|
24
|
+
this.isLoading = true;
|
|
25
|
+
this.loadPromise = (async () => {
|
|
26
|
+
try {
|
|
27
|
+
// Dynamic import to avoid bundling @huggingface/transformers by default.
|
|
28
|
+
// @ts-ignore - optional dependency that might not be present at compile time
|
|
29
|
+
const { pipeline } = await import('@huggingface/transformers');
|
|
30
|
+
|
|
31
|
+
this.extractor = await pipeline('feature-extraction', this.modelName, {
|
|
32
|
+
// Optimization for browser/Node environment if needed
|
|
33
|
+
// e.g., device: 'webgpu' or 'wasm'
|
|
34
|
+
});
|
|
35
|
+
} catch (error) {
|
|
36
|
+
this.isLoading = false;
|
|
37
|
+
throw new Error(`Failed to load local embedding model (${this.modelName}): ${error instanceof Error ? error.message : String(error)}. Please ensure '@huggingface/transformers' is installed.`);
|
|
38
|
+
} finally {
|
|
39
|
+
this.isLoading = false;
|
|
40
|
+
}
|
|
41
|
+
})();
|
|
42
|
+
|
|
43
|
+
return this.loadPromise;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Generates an embedding for the given text.
|
|
48
|
+
*/
|
|
49
|
+
async embed(text: string): Promise<number[]> {
|
|
50
|
+
await this.loadModel();
|
|
51
|
+
|
|
52
|
+
const output = await this.extractor(text, {
|
|
53
|
+
pooling: 'mean',
|
|
54
|
+
normalize: true,
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
// transformers.js output is a Tensor, we convert it to a regular array
|
|
58
|
+
return Array.from(output.data) as number[];
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Clear the cached model from memory.
|
|
63
|
+
*/
|
|
64
|
+
async dispose(): Promise<void> {
|
|
65
|
+
this.extractor = null;
|
|
66
|
+
this.loadPromise = null;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import { ILongTermMemory, IVectorStore, ILLMProvider, LLMMessage, IEmbedder } from '../interfaces/index.js';
|
|
2
|
+
import { AutoVectorStore } from './AutoVectorStore.js';
|
|
3
|
+
|
|
4
|
+
export interface LongTermMemoryOptions {
|
|
5
|
+
provider: ILLMProvider;
|
|
6
|
+
vectorStore?: IVectorStore; // Make vectorStore optional
|
|
7
|
+
embedder?: IEmbedder;
|
|
8
|
+
topK?: number;
|
|
9
|
+
minScore?: number;
|
|
10
|
+
maxWindowSize?: number; // 添加滑动窗口大小限制
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export class LongTermMemory implements ILongTermMemory {
|
|
14
|
+
private provider: ILLMProvider;
|
|
15
|
+
private vectorStore: IVectorStore;
|
|
16
|
+
private embedder?: IEmbedder;
|
|
17
|
+
private topK: number;
|
|
18
|
+
private minScore: number;
|
|
19
|
+
private maxWindowSize: number;
|
|
20
|
+
private history: LLMMessage[] = [];
|
|
21
|
+
|
|
22
|
+
constructor(options: LongTermMemoryOptions) {
|
|
23
|
+
this.provider = options.provider;
|
|
24
|
+
this.vectorStore = options.vectorStore || new AutoVectorStore();
|
|
25
|
+
this.embedder = options.embedder;
|
|
26
|
+
this.topK = options.topK || 3;
|
|
27
|
+
this.minScore = options.minScore || 0.7;
|
|
28
|
+
this.maxWindowSize = options.maxWindowSize || 20; // 默认保留最近 20 条消息
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async add(message: LLMMessage): Promise<void> {
|
|
32
|
+
this.history.push(message);
|
|
33
|
+
|
|
34
|
+
// 实现滑动窗口,保留最近的消息,但始终保留第一条系统提示词(如果有)
|
|
35
|
+
if (this.history.length > this.maxWindowSize) {
|
|
36
|
+
const systemMessage = this.history[0].role === 'system' ? this.history[0] : null;
|
|
37
|
+
this.history = this.history.slice(-this.maxWindowSize);
|
|
38
|
+
if (systemMessage && this.history[0] !== systemMessage) {
|
|
39
|
+
this.history[0] = systemMessage;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Auto-remember high-value user/assistant interactions
|
|
44
|
+
if (message.role === 'user' || message.role === 'assistant') {
|
|
45
|
+
if (this.isHighValueContent(message.content)) {
|
|
46
|
+
await this.remember(message.content);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* 简单的启发式判断内容是否具有长期保存价值
|
|
53
|
+
*/
|
|
54
|
+
private isHighValueContent(text: string): boolean {
|
|
55
|
+
if (!text) return false;
|
|
56
|
+
const trimmed = text.trim();
|
|
57
|
+
// 过滤掉太短的消息 (如 "ok", "yes", "thanks")
|
|
58
|
+
if (trimmed.length < 5) return false;
|
|
59
|
+
// 过滤掉纯语气词或无意义短语
|
|
60
|
+
const lowValueWords = [
|
|
61
|
+
'ok', 'yes', 'no', 'thanks', 'thank you', 'thanks a lot',
|
|
62
|
+
'got it', 'understand', 'fine', 'good', 'nice', 'cool'
|
|
63
|
+
];
|
|
64
|
+
if (lowValueWords.includes(trimmed.toLowerCase().replace(/[.!?,]/g, ''))) return false;
|
|
65
|
+
|
|
66
|
+
return true;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async getHistory(): Promise<LLMMessage[]> {
|
|
70
|
+
return [...this.history];
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async clear(): Promise<void> {
|
|
74
|
+
this.history = [];
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async remember(text: string): Promise<void> {
|
|
78
|
+
let embeddings: number[][] | undefined;
|
|
79
|
+
|
|
80
|
+
if (this.embedder) {
|
|
81
|
+
const singleEmbedding = await this.embedder.embed(text);
|
|
82
|
+
embeddings = [singleEmbedding];
|
|
83
|
+
} else if (this.provider.embed) {
|
|
84
|
+
embeddings = await this.provider.embed(text);
|
|
85
|
+
} else {
|
|
86
|
+
// If no embedder or provider doesn't support embeddings, we can't do vector storage
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (embeddings && embeddings.length > 0) {
|
|
91
|
+
await this.vectorStore.add(text, embeddings[0], { timestamp: Date.now() });
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async recall(query: string): Promise<string[]> {
|
|
96
|
+
// 1. Vector Search (If embedder or provider supports embeddings)
|
|
97
|
+
let vectorResults: string[] = [];
|
|
98
|
+
let embeddings: number[][] | undefined;
|
|
99
|
+
|
|
100
|
+
if (this.embedder) {
|
|
101
|
+
const singleEmbedding = await this.embedder.embed(query);
|
|
102
|
+
embeddings = [singleEmbedding];
|
|
103
|
+
} else if (this.provider.embed) {
|
|
104
|
+
embeddings = await this.provider.embed(query);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if (embeddings && embeddings.length > 0) {
|
|
108
|
+
const results = await this.vectorStore.search(embeddings[0], this.topK);
|
|
109
|
+
vectorResults = results
|
|
110
|
+
.filter(r => r.score >= this.minScore)
|
|
111
|
+
.map(r => r.text);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// 2. Keyword Search (Hybrid) - Always available if vectorStore supports it
|
|
115
|
+
let keywordResults: string[] = [];
|
|
116
|
+
if (this.vectorStore.searchByKeyword) {
|
|
117
|
+
// 提取可能的关键字 (例如: 超过 3 个字符的单词,排除标点符号)
|
|
118
|
+
const keywords = query
|
|
119
|
+
.replace(/[.,\/#!$%\^&\*;:{}=\-_`~()]/g, "")
|
|
120
|
+
.split(/\s+/)
|
|
121
|
+
.filter(w => w.length > 3);
|
|
122
|
+
|
|
123
|
+
for (const kw of keywords) {
|
|
124
|
+
const results = await this.vectorStore.searchByKeyword(kw, 2);
|
|
125
|
+
keywordResults.push(...results.map(r => r.text));
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// 也尝试全文匹配
|
|
129
|
+
if (query.length > 5) {
|
|
130
|
+
const results = await this.vectorStore.searchByKeyword(query, 2);
|
|
131
|
+
keywordResults.push(...results.map(r => r.text));
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// 3. Fusion & Deduplication
|
|
136
|
+
const combined = [...new Set([...keywordResults, ...vectorResults])];
|
|
137
|
+
return combined.slice(0, this.topK * 2);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async getContext(input: string): Promise<string> {
|
|
141
|
+
const memories = await this.recall(input);
|
|
142
|
+
if (memories.length === 0) return '';
|
|
143
|
+
|
|
144
|
+
return `Relevant context from memory (Hybrid Search):\n${memories.join('\n---\n')}`;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { IVectorStore } from '../interfaces/index.js';
|
|
2
|
+
|
|
3
|
+
export class MemoryVectorStore implements IVectorStore {
|
|
4
|
+
private items: Array<{ text: string; embedding: number[]; metadata?: Record<string, any> }> = [];
|
|
5
|
+
|
|
6
|
+
async add(text: string, embedding: number[], metadata?: Record<string, any>): Promise<void> {
|
|
7
|
+
this.items.push({ text, embedding, metadata });
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
async search(queryEmbedding: number[], topK: number = 3): Promise<Array<{ text: string; score: number; metadata?: Record<string, any> }>> {
|
|
11
|
+
const scoredItems = this.items.map(item => ({
|
|
12
|
+
...item,
|
|
13
|
+
score: this.cosineSimilarity(queryEmbedding, item.embedding)
|
|
14
|
+
}));
|
|
15
|
+
|
|
16
|
+
// Sort by score descending and take topK
|
|
17
|
+
return scoredItems
|
|
18
|
+
.sort((a, b) => b.score - a.score)
|
|
19
|
+
.slice(0, topK)
|
|
20
|
+
.map(({ text, score, metadata }) => ({ text, score, metadata }));
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
async searchByKeyword(keyword: string, topK: number = 3): Promise<Array<{ text: string; score: number; metadata?: Record<string, any> }>> {
|
|
24
|
+
const lowKeyword = keyword.toLowerCase();
|
|
25
|
+
|
|
26
|
+
const results = this.items
|
|
27
|
+
.filter(item => item.text.toLowerCase().includes(lowKeyword))
|
|
28
|
+
.map(item => ({
|
|
29
|
+
text: item.text,
|
|
30
|
+
score: 1.0, // keyword matches are high priority
|
|
31
|
+
metadata: item.metadata
|
|
32
|
+
}))
|
|
33
|
+
.slice(0, topK);
|
|
34
|
+
|
|
35
|
+
return results;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
private cosineSimilarity(vecA: number[], vecB: number[]): number {
|
|
39
|
+
if (vecA.length !== vecB.length) return 0;
|
|
40
|
+
|
|
41
|
+
let dotProduct = 0;
|
|
42
|
+
let normA = 0;
|
|
43
|
+
let normB = 0;
|
|
44
|
+
|
|
45
|
+
for (let i = 0; i < vecA.length; i++) {
|
|
46
|
+
dotProduct += vecA[i] * vecB[i];
|
|
47
|
+
normA += vecA[i] * vecA[i];
|
|
48
|
+
normB += vecB[i] * vecB[i];
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const magnitude = Math.sqrt(normA) * Math.sqrt(normB);
|
|
52
|
+
return magnitude === 0 ? 0 : dotProduct / magnitude;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import { ILLMProvider, LLMMessage, LLMOptions, LLMResponse } from '../interfaces/index.js';
|
|
2
|
+
|
|
3
|
+
export class OpenAIProvider implements ILLMProvider {
|
|
4
|
+
private baseUrl: string;
|
|
5
|
+
private apiKey: string;
|
|
6
|
+
|
|
7
|
+
constructor(options: { baseUrl?: string; apiKey?: string } = {}) {
|
|
8
|
+
this.baseUrl = options.baseUrl || 'https://api.openai.com/v1';
|
|
9
|
+
this.apiKey = options.apiKey || '';
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
private buildUrl(options: LLMOptions): string {
|
|
13
|
+
const url = new URL(`${this.baseUrl}/chat/completions`);
|
|
14
|
+
if (options.intent) {
|
|
15
|
+
url.searchParams.append('intent', options.intent);
|
|
16
|
+
}
|
|
17
|
+
return url.toString();
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async generate(messages: LLMMessage[], options: LLMOptions): Promise<LLMResponse> {
|
|
21
|
+
const response = await fetch(this.buildUrl(options), {
|
|
22
|
+
method: 'POST',
|
|
23
|
+
headers: {
|
|
24
|
+
'Content-Type': 'application/json',
|
|
25
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
26
|
+
},
|
|
27
|
+
body: JSON.stringify({
|
|
28
|
+
model: options.model,
|
|
29
|
+
messages: messages.map(m => ({
|
|
30
|
+
role: m.role,
|
|
31
|
+
content: m.content,
|
|
32
|
+
name: m.name,
|
|
33
|
+
tool_call_id: m.tool_call_id,
|
|
34
|
+
})),
|
|
35
|
+
temperature: options.temperature ?? 0.7,
|
|
36
|
+
max_tokens: options.max_tokens,
|
|
37
|
+
tools: options.tools,
|
|
38
|
+
tool_choice: options.tools ? 'auto' : undefined,
|
|
39
|
+
stream: options.stream ?? false,
|
|
40
|
+
}),
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
if (!response.ok) {
|
|
44
|
+
const errorText = await response.text();
|
|
45
|
+
throw new Error(`OpenAI API error: ${response.status} ${errorText}`);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const data = await response.json();
|
|
49
|
+
const choice = data.choices[0];
|
|
50
|
+
const message = choice.message;
|
|
51
|
+
|
|
52
|
+
let toolCalls = message.tool_calls;
|
|
53
|
+
|
|
54
|
+
// Fallback: Parse tool calls from text content if standard tool_calls are missing
|
|
55
|
+
if ((!toolCalls || toolCalls.length === 0) && message.content) {
|
|
56
|
+
const toolCallMatch = message.content.match(/<tool_call>([\s\S]*?)<\/tool_call>/);
|
|
57
|
+
if (toolCallMatch) {
|
|
58
|
+
try {
|
|
59
|
+
const toolCallData = JSON.parse(toolCallMatch[1].trim());
|
|
60
|
+
toolCalls = [{
|
|
61
|
+
id: `call_${Date.now()}`,
|
|
62
|
+
type: 'function',
|
|
63
|
+
function: {
|
|
64
|
+
name: toolCallData.name,
|
|
65
|
+
arguments: JSON.stringify(toolCallData.arguments),
|
|
66
|
+
}
|
|
67
|
+
}];
|
|
68
|
+
} catch (e) {
|
|
69
|
+
// Silent catch or use agent.error event
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
content: message.content || '',
|
|
76
|
+
toolCalls: toolCalls && toolCalls.length > 0 ? toolCalls : undefined,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async stream(
|
|
81
|
+
messages: LLMMessage[],
|
|
82
|
+
options: LLMOptions,
|
|
83
|
+
onChunk: (chunk: { content?: string; toolCalls?: any[] }) => void
|
|
84
|
+
): Promise<LLMResponse> {
|
|
85
|
+
const response = await fetch(this.buildUrl(options), {
|
|
86
|
+
method: 'POST',
|
|
87
|
+
headers: {
|
|
88
|
+
'Content-Type': 'application/json',
|
|
89
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
90
|
+
},
|
|
91
|
+
body: JSON.stringify({
|
|
92
|
+
model: options.model,
|
|
93
|
+
messages: messages.map(m => ({
|
|
94
|
+
role: m.role,
|
|
95
|
+
content: m.content,
|
|
96
|
+
name: m.name,
|
|
97
|
+
tool_call_id: m.tool_call_id,
|
|
98
|
+
})),
|
|
99
|
+
temperature: options.temperature ?? 0.7,
|
|
100
|
+
max_tokens: options.max_tokens,
|
|
101
|
+
tools: options.tools,
|
|
102
|
+
tool_choice: options.tools ? 'auto' : undefined,
|
|
103
|
+
stream: true,
|
|
104
|
+
}),
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
if (!response.ok) {
|
|
108
|
+
const errorText = await response.text();
|
|
109
|
+
throw new Error(`OpenAI API error: ${response.status} ${errorText}`);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const reader = response.body?.getReader();
|
|
113
|
+
if (!reader) {
|
|
114
|
+
throw new Error('Response body is not readable');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const decoder = new TextDecoder();
|
|
118
|
+
let content = '';
|
|
119
|
+
let toolCalls: any[] = [];
|
|
120
|
+
let unfinishedLine = '';
|
|
121
|
+
let currentToolCallId = `call_${Date.now()}`;
|
|
122
|
+
|
|
123
|
+
while (true) {
|
|
124
|
+
const { done, value } = await reader.read();
|
|
125
|
+
if (done) break;
|
|
126
|
+
|
|
127
|
+
const chunk = unfinishedLine + decoder.decode(value, { stream: true });
|
|
128
|
+
const lines = chunk.split('\n');
|
|
129
|
+
unfinishedLine = lines.pop() || '';
|
|
130
|
+
|
|
131
|
+
for (const line of lines) {
|
|
132
|
+
const trimmedLine = line.trim();
|
|
133
|
+
if (!trimmedLine || trimmedLine === 'data: [DONE]') continue;
|
|
134
|
+
|
|
135
|
+
if (trimmedLine.startsWith('data: ')) {
|
|
136
|
+
try {
|
|
137
|
+
const data = JSON.parse(trimmedLine.slice(6));
|
|
138
|
+
const delta = data.choices[0]?.delta;
|
|
139
|
+
|
|
140
|
+
if (delta?.content) {
|
|
141
|
+
content += delta.content;
|
|
142
|
+
onChunk({ content: delta.content });
|
|
143
|
+
|
|
144
|
+
// 增量解析 fallback 模式下的 <tool_call>
|
|
145
|
+
if (content.includes('<tool_call>')) {
|
|
146
|
+
const toolCallMatch = content.match(/<tool_call>([\s\S]*?)<\/tool_call>/);
|
|
147
|
+
if (toolCallMatch) {
|
|
148
|
+
// 如果匹配到了完整的标签,说明已经结束,不需要在这里额外处理
|
|
149
|
+
// 这里的逻辑主要是为了能在流式过程中识别出工具调用
|
|
150
|
+
} else {
|
|
151
|
+
// 如果只匹配到了开标签但没有闭标签,说明正在流式输出工具调用内容
|
|
152
|
+
const startTag = content.indexOf('<tool_call>');
|
|
153
|
+
const partialJson = content.slice(startTag + 11).trim();
|
|
154
|
+
if (partialJson) {
|
|
155
|
+
try {
|
|
156
|
+
// 尝试解析部分 JSON,获取工具名
|
|
157
|
+
const partialData = this.parsePartialJson(partialJson);
|
|
158
|
+
if (partialData?.name) {
|
|
159
|
+
onChunk({
|
|
160
|
+
toolCalls: [{
|
|
161
|
+
index: 0,
|
|
162
|
+
id: currentToolCallId,
|
|
163
|
+
function: {
|
|
164
|
+
name: partialData.name,
|
|
165
|
+
arguments: JSON.stringify(partialData.arguments || {})
|
|
166
|
+
}
|
|
167
|
+
}]
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
} catch (e) {
|
|
171
|
+
// 忽略部分解析错误
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if (delta?.tool_calls) {
|
|
179
|
+
onChunk({ toolCalls: delta.tool_calls });
|
|
180
|
+
for (const tc of delta.tool_calls) {
|
|
181
|
+
if (tc.index === undefined) continue;
|
|
182
|
+
if (!toolCalls[tc.index]) {
|
|
183
|
+
toolCalls[tc.index] = {
|
|
184
|
+
id: tc.id,
|
|
185
|
+
type: 'function',
|
|
186
|
+
function: { name: '', arguments: '' }
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
if (tc.id) toolCalls[tc.index].id = tc.id;
|
|
190
|
+
if (tc.function?.name) toolCalls[tc.index].function.name += tc.function.name;
|
|
191
|
+
if (tc.function?.arguments) toolCalls[tc.index].function.arguments += tc.function.arguments;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
} catch (e) {
|
|
195
|
+
console.error('Error parsing SSE chunk:', e);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// Fallback: Parse tool calls from text content if standard tool_calls are missing
|
|
202
|
+
if ((toolCalls.length === 0) && content) {
|
|
203
|
+
const toolCallMatch = content.match(/<tool_call>([\s\S]*?)<\/tool_call>/);
|
|
204
|
+
if (toolCallMatch) {
|
|
205
|
+
try {
|
|
206
|
+
const toolCallData = JSON.parse(toolCallMatch[1].trim());
|
|
207
|
+
toolCalls = [{
|
|
208
|
+
id: currentToolCallId,
|
|
209
|
+
type: 'function',
|
|
210
|
+
function: {
|
|
211
|
+
name: toolCallData.name,
|
|
212
|
+
arguments: JSON.stringify(toolCallData.arguments),
|
|
213
|
+
}
|
|
214
|
+
}];
|
|
215
|
+
} catch (e) {
|
|
216
|
+
// Silent catch
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return {
|
|
222
|
+
content,
|
|
223
|
+
toolCalls: toolCalls.length > 0 ? toolCalls.filter(Boolean) : undefined,
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
/**
|
|
228
|
+
* 简单解析部分 JSON,目前只支持提取 name 字段
|
|
229
|
+
*/
|
|
230
|
+
private parsePartialJson(partialJson: string): any {
|
|
231
|
+
try {
|
|
232
|
+
// 如果 partialJson 看起来像完整的 JSON,直接解析
|
|
233
|
+
if (partialJson.endsWith('}')) {
|
|
234
|
+
return JSON.parse(partialJson);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// 否则尝试用正则提取 "name": "..."
|
|
238
|
+
const nameMatch = partialJson.match(/"name"\s*:\s*"([^"]*)"/);
|
|
239
|
+
const argsMatch = partialJson.match(/"arguments"\s*:\s*(\{[\s\S]*)/);
|
|
240
|
+
|
|
241
|
+
return {
|
|
242
|
+
name: nameMatch ? nameMatch[1] : undefined,
|
|
243
|
+
arguments: argsMatch ? argsMatch[1] : undefined
|
|
244
|
+
};
|
|
245
|
+
} catch (e) {
|
|
246
|
+
return null;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async embed(text: string | string[]): Promise<number[][]> {
|
|
251
|
+
const input = Array.isArray(text) ? text : [text];
|
|
252
|
+
const response = await fetch(`${this.baseUrl}/embeddings`, {
|
|
253
|
+
method: 'POST',
|
|
254
|
+
headers: {
|
|
255
|
+
'Content-Type': 'application/json',
|
|
256
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
257
|
+
},
|
|
258
|
+
body: JSON.stringify({
|
|
259
|
+
model: 'text-embedding-3-small', // Default for OpenAI, might need config
|
|
260
|
+
input,
|
|
261
|
+
}),
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
if (!response.ok) {
|
|
265
|
+
throw new Error(`OpenAI Embedding error: ${response.statusText}`);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
const data = await response.json();
|
|
269
|
+
if (!data.data || !Array.isArray(data.data)) {
|
|
270
|
+
return [];
|
|
271
|
+
}
|
|
272
|
+
return data.data.map((item: any) => item.embedding);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { IMemory, LLMMessage } from '../interfaces/index.js';
|
|
2
|
+
|
|
3
|
+
export class SimpleMemory implements IMemory {
|
|
4
|
+
private messages: LLMMessage[] = [];
|
|
5
|
+
private maxMessages: number;
|
|
6
|
+
|
|
7
|
+
constructor(options: { maxMessages?: number } = {}) {
|
|
8
|
+
this.maxMessages = options.maxMessages || 50;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
async add(message: LLMMessage): Promise<void> {
|
|
12
|
+
this.messages.push(message);
|
|
13
|
+
if (this.messages.length > this.maxMessages) {
|
|
14
|
+
// Keep system prompt if it exists at index 0
|
|
15
|
+
const systemPrompt = this.messages[0].role === 'system' ? this.messages[0] : null;
|
|
16
|
+
if (systemPrompt) {
|
|
17
|
+
this.messages = [systemPrompt, ...this.messages.slice(-(this.maxMessages - 1))];
|
|
18
|
+
} else {
|
|
19
|
+
this.messages = this.messages.slice(-this.maxMessages);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async getHistory(): Promise<LLMMessage[]> {
|
|
25
|
+
return [...this.messages];
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async clear(): Promise<void> {
|
|
29
|
+
this.messages = [];
|
|
30
|
+
}
|
|
31
|
+
}
|