family-ai-agent 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +49 -0
- package/README.md +161 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +336 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/config/index.d.ts +37 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +68 -0
- package/dist/config/index.js.map +1 -0
- package/dist/config/models.d.ts +17 -0
- package/dist/config/models.d.ts.map +1 -0
- package/dist/config/models.js +128 -0
- package/dist/config/models.js.map +1 -0
- package/dist/core/agents/agent-factory.d.ts +31 -0
- package/dist/core/agents/agent-factory.d.ts.map +1 -0
- package/dist/core/agents/agent-factory.js +151 -0
- package/dist/core/agents/agent-factory.js.map +1 -0
- package/dist/core/agents/base-agent.d.ts +51 -0
- package/dist/core/agents/base-agent.d.ts.map +1 -0
- package/dist/core/agents/base-agent.js +245 -0
- package/dist/core/agents/base-agent.js.map +1 -0
- package/dist/core/agents/index.d.ts +8 -0
- package/dist/core/agents/index.d.ts.map +1 -0
- package/dist/core/agents/index.js +9 -0
- package/dist/core/agents/index.js.map +1 -0
- package/dist/core/agents/personalities/automation.d.ts +14 -0
- package/dist/core/agents/personalities/automation.d.ts.map +1 -0
- package/dist/core/agents/personalities/automation.js +146 -0
- package/dist/core/agents/personalities/automation.js.map +1 -0
- package/dist/core/agents/personalities/chat.d.ts +10 -0
- package/dist/core/agents/personalities/chat.d.ts.map +1 -0
- package/dist/core/agents/personalities/chat.js +132 -0
- package/dist/core/agents/personalities/chat.js.map +1 -0
- package/dist/core/agents/personalities/coding.d.ts +16 -0
- package/dist/core/agents/personalities/coding.d.ts.map +1 -0
- package/dist/core/agents/personalities/coding.js +166 -0
- package/dist/core/agents/personalities/coding.js.map +1 -0
- package/dist/core/agents/personalities/research.d.ts +13 -0
- package/dist/core/agents/personalities/research.d.ts.map +1 -0
- package/dist/core/agents/personalities/research.js +133 -0
- package/dist/core/agents/personalities/research.js.map +1 -0
- package/dist/core/agents/types.d.ts +102 -0
- package/dist/core/agents/types.d.ts.map +1 -0
- package/dist/core/agents/types.js +2 -0
- package/dist/core/agents/types.js.map +1 -0
- package/dist/core/orchestrator/graph.d.ts +118 -0
- package/dist/core/orchestrator/graph.d.ts.map +1 -0
- package/dist/core/orchestrator/graph.js +233 -0
- package/dist/core/orchestrator/graph.js.map +1 -0
- package/dist/database/client.d.ts +19 -0
- package/dist/database/client.d.ts.map +1 -0
- package/dist/database/client.js +95 -0
- package/dist/database/client.js.map +1 -0
- package/dist/index.d.ts +41 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +67 -0
- package/dist/index.js.map +1 -0
- package/dist/llm/openrouter-client.d.ts +45 -0
- package/dist/llm/openrouter-client.d.ts.map +1 -0
- package/dist/llm/openrouter-client.js +155 -0
- package/dist/llm/openrouter-client.js.map +1 -0
- package/dist/memory/conversation/index.d.ts +37 -0
- package/dist/memory/conversation/index.d.ts.map +1 -0
- package/dist/memory/conversation/index.js +196 -0
- package/dist/memory/conversation/index.js.map +1 -0
- package/dist/memory/index.d.ts +4 -0
- package/dist/memory/index.d.ts.map +1 -0
- package/dist/memory/index.js +5 -0
- package/dist/memory/index.js.map +1 -0
- package/dist/memory/knowledge-base/index.d.ts +51 -0
- package/dist/memory/knowledge-base/index.d.ts.map +1 -0
- package/dist/memory/knowledge-base/index.js +222 -0
- package/dist/memory/knowledge-base/index.js.map +1 -0
- package/dist/memory/longterm/vector-store.d.ts +44 -0
- package/dist/memory/longterm/vector-store.d.ts.map +1 -0
- package/dist/memory/longterm/vector-store.js +229 -0
- package/dist/memory/longterm/vector-store.js.map +1 -0
- package/dist/safety/audit-logger.d.ts +68 -0
- package/dist/safety/audit-logger.d.ts.map +1 -0
- package/dist/safety/audit-logger.js +215 -0
- package/dist/safety/audit-logger.js.map +1 -0
- package/dist/safety/guardrails/input-guardrail.d.ts +21 -0
- package/dist/safety/guardrails/input-guardrail.d.ts.map +1 -0
- package/dist/safety/guardrails/input-guardrail.js +145 -0
- package/dist/safety/guardrails/input-guardrail.js.map +1 -0
- package/dist/safety/guardrails/output-guardrail.d.ts +18 -0
- package/dist/safety/guardrails/output-guardrail.d.ts.map +1 -0
- package/dist/safety/guardrails/output-guardrail.js +125 -0
- package/dist/safety/guardrails/output-guardrail.js.map +1 -0
- package/dist/safety/index.d.ts +4 -0
- package/dist/safety/index.d.ts.map +1 -0
- package/dist/safety/index.js +5 -0
- package/dist/safety/index.js.map +1 -0
- package/dist/utils/errors.d.ts +36 -0
- package/dist/utils/errors.d.ts.map +1 -0
- package/dist/utils/errors.js +94 -0
- package/dist/utils/errors.js.map +1 -0
- package/dist/utils/logger.d.ts +8 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +47 -0
- package/dist/utils/logger.js.map +1 -0
- package/docker/init-db.sql +149 -0
- package/docker/sandbox/Dockerfile.sandbox +29 -0
- package/docker-compose.yml +61 -0
- package/package.json +80 -0
- package/src/cli/index.ts +392 -0
- package/src/config/index.ts +85 -0
- package/src/config/models.ts +156 -0
- package/src/core/agents/agent-factory.ts +192 -0
- package/src/core/agents/base-agent.ts +333 -0
- package/src/core/agents/index.ts +27 -0
- package/src/core/agents/personalities/automation.ts +202 -0
- package/src/core/agents/personalities/chat.ts +159 -0
- package/src/core/agents/personalities/coding.ts +227 -0
- package/src/core/agents/personalities/research.ts +177 -0
- package/src/core/agents/types.ts +124 -0
- package/src/core/orchestrator/graph.ts +305 -0
- package/src/database/client.ts +109 -0
- package/src/index.ts +104 -0
- package/src/llm/openrouter-client.ts +218 -0
- package/src/memory/conversation/index.ts +313 -0
- package/src/memory/index.ts +23 -0
- package/src/memory/knowledge-base/index.ts +357 -0
- package/src/memory/longterm/vector-store.ts +364 -0
- package/src/safety/audit-logger.ts +357 -0
- package/src/safety/guardrails/input-guardrail.ts +191 -0
- package/src/safety/guardrails/output-guardrail.ts +160 -0
- package/src/safety/index.ts +21 -0
- package/src/utils/errors.ts +120 -0
- package/src/utils/logger.ts +74 -0
- package/tsconfig.json +37 -0
package/src/index.ts
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Family AI Agent - Multi-Agent AI System
|
|
3
|
+
*
|
|
4
|
+
* A production-ready multi-agent AI system with:
|
|
5
|
+
* - Supervisor architecture for task routing
|
|
6
|
+
* - Specialized agents (Research, Coding, Automation, Chat)
|
|
7
|
+
* - Long-term memory with vector search
|
|
8
|
+
* - Knowledge base with RAG
|
|
9
|
+
* - Safety guardrails
|
|
10
|
+
*
|
|
11
|
+
* @module family-ai-agent
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
// Core exports
|
|
15
|
+
export { BaseAgent } from './core/agents/base-agent.js';
|
|
16
|
+
export { ChatAgent, createChatAgent } from './core/agents/personalities/chat.js';
|
|
17
|
+
export { ResearchAgent, createResearchAgent } from './core/agents/personalities/research.js';
|
|
18
|
+
export { CodingAgent, createCodingAgent } from './core/agents/personalities/coding.js';
|
|
19
|
+
export { AutomationAgent, createAutomationAgent } from './core/agents/personalities/automation.js';
|
|
20
|
+
export { AgentFactory, getAgentFactory } from './core/agents/agent-factory.js';
|
|
21
|
+
export type {
|
|
22
|
+
AgentRole,
|
|
23
|
+
AgentStatus,
|
|
24
|
+
AgentIdentity,
|
|
25
|
+
AgentConfig,
|
|
26
|
+
AgentState,
|
|
27
|
+
TaskDefinition,
|
|
28
|
+
TaskResult,
|
|
29
|
+
RetrievedMemory,
|
|
30
|
+
} from './core/agents/types.js';
|
|
31
|
+
|
|
32
|
+
// Orchestrator exports
|
|
33
|
+
export {
|
|
34
|
+
createOrchestratorGraph,
|
|
35
|
+
getOrchestratorGraph,
|
|
36
|
+
runOrchestrator,
|
|
37
|
+
streamOrchestrator,
|
|
38
|
+
} from './core/orchestrator/graph.js';
|
|
39
|
+
|
|
40
|
+
// Memory exports
|
|
41
|
+
export { VectorStore, getVectorStore } from './memory/longterm/vector-store.js';
|
|
42
|
+
export { ConversationMemory, getConversationMemory } from './memory/conversation/index.js';
|
|
43
|
+
export { KnowledgeBase, getKnowledgeBase } from './memory/knowledge-base/index.js';
|
|
44
|
+
|
|
45
|
+
// LLM exports
|
|
46
|
+
export { OpenRouterClient, getOpenRouterClient } from './llm/openrouter-client.js';
|
|
47
|
+
|
|
48
|
+
// Safety exports
|
|
49
|
+
export { InputGuardrail, getInputGuardrail, validateInput } from './safety/guardrails/input-guardrail.js';
|
|
50
|
+
export { OutputGuardrail, getOutputGuardrail, validateOutput } from './safety/guardrails/output-guardrail.js';
|
|
51
|
+
export { AuditLogger, getAuditLogger } from './safety/audit-logger.js';
|
|
52
|
+
|
|
53
|
+
// Database exports
|
|
54
|
+
export { initDatabase, closePool, query, transaction } from './database/client.js';
|
|
55
|
+
|
|
56
|
+
// Config exports
|
|
57
|
+
export { config, getDatabaseUrl, getRedisUrl, isProduction, isDevelopment } from './config/index.js';
|
|
58
|
+
export { AVAILABLE_MODELS, getModelConfig, getRecommendedModel } from './config/models.js';
|
|
59
|
+
|
|
60
|
+
// Utility exports
|
|
61
|
+
export { logger, createLogger } from './utils/logger.js';
|
|
62
|
+
export * from './utils/errors.js';
|
|
63
|
+
|
|
64
|
+
// Version
|
|
65
|
+
export const VERSION = '1.0.0';
|
|
66
|
+
|
|
67
|
+
// Quick start function
|
|
68
|
+
import { initDatabase } from './database/client.js';
|
|
69
|
+
import { runOrchestrator } from './core/orchestrator/graph.js';
|
|
70
|
+
import { getVectorStore } from './memory/longterm/vector-store.js';
|
|
71
|
+
|
|
72
|
+
export async function quickStart(): Promise<{
|
|
73
|
+
ask: (input: string) => Promise<string>;
|
|
74
|
+
remember: (content: string) => Promise<string>;
|
|
75
|
+
search: (query: string) => Promise<RetrievedMemory[]>;
|
|
76
|
+
shutdown: () => Promise<void>;
|
|
77
|
+
}> {
|
|
78
|
+
await initDatabase();
|
|
79
|
+
|
|
80
|
+
const vectorStore = getVectorStore();
|
|
81
|
+
|
|
82
|
+
return {
|
|
83
|
+
async ask(input: string): Promise<string> {
|
|
84
|
+
const memories = await vectorStore.search(input, { limit: 3 });
|
|
85
|
+
const result = await runOrchestrator(input, {}, memories);
|
|
86
|
+
return result.response;
|
|
87
|
+
},
|
|
88
|
+
|
|
89
|
+
async remember(content: string): Promise<string> {
|
|
90
|
+
return vectorStore.store(content, 'semantic');
|
|
91
|
+
},
|
|
92
|
+
|
|
93
|
+
async search(query: string): Promise<RetrievedMemory[]> {
|
|
94
|
+
return vectorStore.search(query);
|
|
95
|
+
},
|
|
96
|
+
|
|
97
|
+
async shutdown(): Promise<void> {
|
|
98
|
+
const { closePool } = await import('./database/client.js');
|
|
99
|
+
await closePool();
|
|
100
|
+
},
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
import type { RetrievedMemory } from './core/agents/types.js';
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import { ChatOpenAI } from '@langchain/openai';
|
|
2
|
+
import { OpenAIEmbeddings } from '@langchain/openai';
|
|
3
|
+
import { config } from '../config/index.js';
|
|
4
|
+
import { getModelConfig, type ModelConfig } from '../config/models.js';
|
|
5
|
+
import { LLMError } from '../utils/errors.js';
|
|
6
|
+
import { createLogger } from '../utils/logger.js';
|
|
7
|
+
import type { BaseMessage } from '@langchain/core/messages';
|
|
8
|
+
|
|
9
|
+
const logger = createLogger('OpenRouterClient');
|
|
10
|
+
|
|
11
|
+
export interface ChatOptions {
|
|
12
|
+
model?: string;
|
|
13
|
+
temperature?: number;
|
|
14
|
+
maxTokens?: number;
|
|
15
|
+
topP?: number;
|
|
16
|
+
frequencyPenalty?: number;
|
|
17
|
+
presencePenalty?: number;
|
|
18
|
+
stop?: string[];
|
|
19
|
+
streaming?: boolean;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface CompletionResult {
|
|
23
|
+
content: string;
|
|
24
|
+
model: string;
|
|
25
|
+
usage: {
|
|
26
|
+
promptTokens: number;
|
|
27
|
+
completionTokens: number;
|
|
28
|
+
totalTokens: number;
|
|
29
|
+
};
|
|
30
|
+
finishReason: string;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export class OpenRouterClient {
|
|
34
|
+
private defaultModel: string;
|
|
35
|
+
private fastModel: string;
|
|
36
|
+
private embeddingModel: string;
|
|
37
|
+
private chatModels: Map<string, ChatOpenAI> = new Map();
|
|
38
|
+
private embeddings: OpenAIEmbeddings | null = null;
|
|
39
|
+
|
|
40
|
+
constructor() {
|
|
41
|
+
this.defaultModel = config.DEFAULT_MODEL;
|
|
42
|
+
this.fastModel = config.FAST_MODEL;
|
|
43
|
+
this.embeddingModel = config.EMBEDDING_MODEL;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
private getChatModel(modelId: string, options: ChatOptions = {}): ChatOpenAI {
|
|
47
|
+
const cacheKey = `${modelId}-${JSON.stringify(options)}`;
|
|
48
|
+
|
|
49
|
+
if (!this.chatModels.has(cacheKey)) {
|
|
50
|
+
const modelConfig = getModelConfig(modelId);
|
|
51
|
+
|
|
52
|
+
const chatModel = new ChatOpenAI({
|
|
53
|
+
modelName: modelId,
|
|
54
|
+
openAIApiKey: config.OPENROUTER_API_KEY,
|
|
55
|
+
temperature: options.temperature ?? 0.7,
|
|
56
|
+
maxTokens: options.maxTokens ?? modelConfig?.maxOutput ?? 4096,
|
|
57
|
+
topP: options.topP,
|
|
58
|
+
frequencyPenalty: options.frequencyPenalty,
|
|
59
|
+
presencePenalty: options.presencePenalty,
|
|
60
|
+
stop: options.stop,
|
|
61
|
+
streaming: options.streaming ?? false,
|
|
62
|
+
configuration: {
|
|
63
|
+
baseURL: config.OPENROUTER_BASE_URL,
|
|
64
|
+
defaultHeaders: {
|
|
65
|
+
'HTTP-Referer': 'https://family-ai-agent.local',
|
|
66
|
+
'X-Title': 'Family AI Agent',
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
this.chatModels.set(cacheKey, chatModel);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return this.chatModels.get(cacheKey)!;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
getEmbeddings(): OpenAIEmbeddings {
|
|
78
|
+
if (!this.embeddings) {
|
|
79
|
+
this.embeddings = new OpenAIEmbeddings({
|
|
80
|
+
modelName: this.embeddingModel,
|
|
81
|
+
openAIApiKey: config.OPENROUTER_API_KEY,
|
|
82
|
+
configuration: {
|
|
83
|
+
baseURL: config.OPENROUTER_BASE_URL,
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
return this.embeddings;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Get a LangChain ChatModel for use with agents
|
|
91
|
+
getChatModelForAgent(options: ChatOptions = {}): ChatOpenAI {
|
|
92
|
+
const modelId = options.model ?? this.defaultModel;
|
|
93
|
+
return this.getChatModel(modelId, {
|
|
94
|
+
...options,
|
|
95
|
+
streaming: true, // Enable streaming for agents
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Get a fast model for quick operations
|
|
100
|
+
getFastModel(options: ChatOptions = {}): ChatOpenAI {
|
|
101
|
+
return this.getChatModel(this.fastModel, options);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Simple completion method
|
|
105
|
+
async complete(
|
|
106
|
+
messages: BaseMessage[],
|
|
107
|
+
options: ChatOptions = {}
|
|
108
|
+
): Promise<CompletionResult> {
|
|
109
|
+
const modelId = options.model ?? this.defaultModel;
|
|
110
|
+
const model = this.getChatModel(modelId, options);
|
|
111
|
+
|
|
112
|
+
try {
|
|
113
|
+
logger.debug('Sending completion request', { model: modelId });
|
|
114
|
+
const startTime = Date.now();
|
|
115
|
+
|
|
116
|
+
const response = await model.invoke(messages);
|
|
117
|
+
|
|
118
|
+
const duration = Date.now() - startTime;
|
|
119
|
+
logger.debug('Completion received', { model: modelId, duration });
|
|
120
|
+
|
|
121
|
+
return {
|
|
122
|
+
content: typeof response.content === 'string'
|
|
123
|
+
? response.content
|
|
124
|
+
: JSON.stringify(response.content),
|
|
125
|
+
model: modelId,
|
|
126
|
+
usage: {
|
|
127
|
+
promptTokens: response.usage_metadata?.input_tokens ?? 0,
|
|
128
|
+
completionTokens: response.usage_metadata?.output_tokens ?? 0,
|
|
129
|
+
totalTokens: response.usage_metadata?.total_tokens ?? 0,
|
|
130
|
+
},
|
|
131
|
+
finishReason: response.response_metadata?.finish_reason ?? 'stop',
|
|
132
|
+
};
|
|
133
|
+
} catch (error) {
|
|
134
|
+
logger.error('Completion failed', { model: modelId, error });
|
|
135
|
+
throw new LLMError(
|
|
136
|
+
`Failed to get completion from ${modelId}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
137
|
+
{ model: modelId, originalError: String(error) }
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Stream completion
|
|
143
|
+
async *streamComplete(
|
|
144
|
+
messages: BaseMessage[],
|
|
145
|
+
options: ChatOptions = {}
|
|
146
|
+
): AsyncGenerator<string, void, unknown> {
|
|
147
|
+
const modelId = options.model ?? this.defaultModel;
|
|
148
|
+
const model = this.getChatModel(modelId, { ...options, streaming: true });
|
|
149
|
+
|
|
150
|
+
try {
|
|
151
|
+
logger.debug('Starting stream completion', { model: modelId });
|
|
152
|
+
|
|
153
|
+
const stream = await model.stream(messages);
|
|
154
|
+
|
|
155
|
+
for await (const chunk of stream) {
|
|
156
|
+
if (typeof chunk.content === 'string') {
|
|
157
|
+
yield chunk.content;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
} catch (error) {
|
|
161
|
+
logger.error('Stream completion failed', { model: modelId, error });
|
|
162
|
+
throw new LLMError(
|
|
163
|
+
`Failed to stream completion from ${modelId}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
164
|
+
{ model: modelId, originalError: String(error) }
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Generate embeddings for text
|
|
170
|
+
async embed(texts: string[]): Promise<number[][]> {
|
|
171
|
+
try {
|
|
172
|
+
const embeddings = this.getEmbeddings();
|
|
173
|
+
return await embeddings.embedDocuments(texts);
|
|
174
|
+
} catch (error) {
|
|
175
|
+
logger.error('Embedding generation failed', { error });
|
|
176
|
+
throw new LLMError(
|
|
177
|
+
`Failed to generate embeddings: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
178
|
+
{ originalError: String(error) }
|
|
179
|
+
);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Generate single embedding
|
|
184
|
+
async embedQuery(text: string): Promise<number[]> {
|
|
185
|
+
try {
|
|
186
|
+
const embeddings = this.getEmbeddings();
|
|
187
|
+
return await embeddings.embedQuery(text);
|
|
188
|
+
} catch (error) {
|
|
189
|
+
logger.error('Query embedding failed', { error });
|
|
190
|
+
throw new LLMError(
|
|
191
|
+
`Failed to generate query embedding: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
192
|
+
{ originalError: String(error) }
|
|
193
|
+
);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// Get model info
|
|
198
|
+
getModelInfo(modelId?: string): ModelConfig | undefined {
|
|
199
|
+
return getModelConfig(modelId ?? this.defaultModel);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// List available models
|
|
203
|
+
getAvailableModels(): string[] {
|
|
204
|
+
return [this.defaultModel, this.fastModel];
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// Singleton instance
|
|
209
|
+
let clientInstance: OpenRouterClient | null = null;
|
|
210
|
+
|
|
211
|
+
export function getOpenRouterClient(): OpenRouterClient {
|
|
212
|
+
if (!clientInstance) {
|
|
213
|
+
clientInstance = new OpenRouterClient();
|
|
214
|
+
}
|
|
215
|
+
return clientInstance;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
export default OpenRouterClient;
|
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
import { query } from '../../database/client.js';
|
|
2
|
+
import { createLogger, logMemoryOperation } from '../../utils/logger.js';
|
|
3
|
+
import { MemoryError } from '../../utils/errors.js';
|
|
4
|
+
import type { BaseMessage } from '@langchain/core/messages';
|
|
5
|
+
import { HumanMessage, AIMessage, SystemMessage } from '@langchain/core/messages';
|
|
6
|
+
|
|
7
|
+
const logger = createLogger('ConversationMemory');
|
|
8
|
+
|
|
9
|
+
export interface Conversation {
|
|
10
|
+
id: string;
|
|
11
|
+
threadId: string;
|
|
12
|
+
userId?: string;
|
|
13
|
+
createdAt: Date;
|
|
14
|
+
updatedAt: Date;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface Message {
|
|
18
|
+
id: string;
|
|
19
|
+
conversationId: string;
|
|
20
|
+
role: 'user' | 'assistant' | 'system';
|
|
21
|
+
content: string;
|
|
22
|
+
metadata: Record<string, unknown>;
|
|
23
|
+
createdAt: Date;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export class ConversationMemory {
|
|
27
|
+
// Create a new conversation
|
|
28
|
+
async createConversation(threadId: string, userId?: string): Promise<string> {
|
|
29
|
+
try {
|
|
30
|
+
const result = await query<{ id: string }>(
|
|
31
|
+
`INSERT INTO conversations (thread_id, user_id)
|
|
32
|
+
VALUES ($1, $2)
|
|
33
|
+
RETURNING id`,
|
|
34
|
+
[threadId, userId ?? null]
|
|
35
|
+
);
|
|
36
|
+
|
|
37
|
+
const conversationId = result.rows[0]?.id;
|
|
38
|
+
if (!conversationId) {
|
|
39
|
+
throw new MemoryError('Failed to create conversation');
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
logMemoryOperation('write', 'conversation', {
|
|
43
|
+
conversationId,
|
|
44
|
+
threadId,
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
return conversationId;
|
|
48
|
+
} catch (error) {
|
|
49
|
+
logger.error('Failed to create conversation', { error });
|
|
50
|
+
throw new MemoryError(
|
|
51
|
+
`Failed to create conversation: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Get conversation by thread ID
|
|
57
|
+
async getConversation(threadId: string): Promise<Conversation | null> {
|
|
58
|
+
try {
|
|
59
|
+
const result = await query<{
|
|
60
|
+
id: string;
|
|
61
|
+
thread_id: string;
|
|
62
|
+
user_id: string | null;
|
|
63
|
+
created_at: Date;
|
|
64
|
+
updated_at: Date;
|
|
65
|
+
}>(
|
|
66
|
+
`SELECT id, thread_id, user_id, created_at, updated_at
|
|
67
|
+
FROM conversations WHERE thread_id = $1`,
|
|
68
|
+
[threadId]
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
if (result.rows.length === 0) {
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const row = result.rows[0]!;
|
|
76
|
+
return {
|
|
77
|
+
id: row.id,
|
|
78
|
+
threadId: row.thread_id,
|
|
79
|
+
userId: row.user_id ?? undefined,
|
|
80
|
+
createdAt: row.created_at,
|
|
81
|
+
updatedAt: row.updated_at,
|
|
82
|
+
};
|
|
83
|
+
} catch (error) {
|
|
84
|
+
logger.error('Failed to get conversation', { error });
|
|
85
|
+
throw new MemoryError(
|
|
86
|
+
`Failed to get conversation: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Get or create conversation
|
|
92
|
+
async getOrCreateConversation(threadId: string, userId?: string): Promise<string> {
|
|
93
|
+
const existing = await this.getConversation(threadId);
|
|
94
|
+
if (existing) {
|
|
95
|
+
return existing.id;
|
|
96
|
+
}
|
|
97
|
+
return this.createConversation(threadId, userId);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Add message to conversation
|
|
101
|
+
async addMessage(
|
|
102
|
+
conversationId: string,
|
|
103
|
+
role: 'user' | 'assistant' | 'system',
|
|
104
|
+
content: string,
|
|
105
|
+
metadata: Record<string, unknown> = {}
|
|
106
|
+
): Promise<string> {
|
|
107
|
+
try {
|
|
108
|
+
const result = await query<{ id: string }>(
|
|
109
|
+
`INSERT INTO messages (conversation_id, role, content, metadata)
|
|
110
|
+
VALUES ($1, $2, $3, $4)
|
|
111
|
+
RETURNING id`,
|
|
112
|
+
[conversationId, role, content, JSON.stringify(metadata)]
|
|
113
|
+
);
|
|
114
|
+
|
|
115
|
+
const messageId = result.rows[0]?.id;
|
|
116
|
+
if (!messageId) {
|
|
117
|
+
throw new MemoryError('Failed to add message');
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Update conversation timestamp
|
|
121
|
+
await query(
|
|
122
|
+
'UPDATE conversations SET updated_at = NOW() WHERE id = $1',
|
|
123
|
+
[conversationId]
|
|
124
|
+
);
|
|
125
|
+
|
|
126
|
+
return messageId;
|
|
127
|
+
} catch (error) {
|
|
128
|
+
logger.error('Failed to add message', { error });
|
|
129
|
+
throw new MemoryError(
|
|
130
|
+
`Failed to add message: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
131
|
+
);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Get messages for a conversation
|
|
136
|
+
async getMessages(
|
|
137
|
+
conversationId: string,
|
|
138
|
+
options: { limit?: number; offset?: number } = {}
|
|
139
|
+
): Promise<Message[]> {
|
|
140
|
+
const { limit = 50, offset = 0 } = options;
|
|
141
|
+
|
|
142
|
+
try {
|
|
143
|
+
const result = await query<{
|
|
144
|
+
id: string;
|
|
145
|
+
conversation_id: string;
|
|
146
|
+
role: 'user' | 'assistant' | 'system';
|
|
147
|
+
content: string;
|
|
148
|
+
metadata: Record<string, unknown>;
|
|
149
|
+
created_at: Date;
|
|
150
|
+
}>(
|
|
151
|
+
`SELECT id, conversation_id, role, content, metadata, created_at
|
|
152
|
+
FROM messages
|
|
153
|
+
WHERE conversation_id = $1
|
|
154
|
+
ORDER BY created_at ASC
|
|
155
|
+
LIMIT $2 OFFSET $3`,
|
|
156
|
+
[conversationId, limit, offset]
|
|
157
|
+
);
|
|
158
|
+
|
|
159
|
+
return result.rows.map((row) => ({
|
|
160
|
+
id: row.id,
|
|
161
|
+
conversationId: row.conversation_id,
|
|
162
|
+
role: row.role,
|
|
163
|
+
content: row.content,
|
|
164
|
+
metadata: row.metadata,
|
|
165
|
+
createdAt: row.created_at,
|
|
166
|
+
}));
|
|
167
|
+
} catch (error) {
|
|
168
|
+
logger.error('Failed to get messages', { error });
|
|
169
|
+
throw new MemoryError(
|
|
170
|
+
`Failed to get messages: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Convert messages to LangChain format
|
|
176
|
+
async getMessagesAsLangChain(conversationId: string): Promise<BaseMessage[]> {
|
|
177
|
+
const messages = await this.getMessages(conversationId);
|
|
178
|
+
|
|
179
|
+
return messages.map((msg) => {
|
|
180
|
+
switch (msg.role) {
|
|
181
|
+
case 'user':
|
|
182
|
+
return new HumanMessage({ content: msg.content });
|
|
183
|
+
case 'assistant':
|
|
184
|
+
return new AIMessage({ content: msg.content });
|
|
185
|
+
case 'system':
|
|
186
|
+
return new SystemMessage({ content: msg.content });
|
|
187
|
+
default:
|
|
188
|
+
return new HumanMessage({ content: msg.content });
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// Get recent messages (for context window)
|
|
194
|
+
async getRecentMessages(
|
|
195
|
+
conversationId: string,
|
|
196
|
+
maxMessages: number = 10
|
|
197
|
+
): Promise<Message[]> {
|
|
198
|
+
try {
|
|
199
|
+
const result = await query<{
|
|
200
|
+
id: string;
|
|
201
|
+
conversation_id: string;
|
|
202
|
+
role: 'user' | 'assistant' | 'system';
|
|
203
|
+
content: string;
|
|
204
|
+
metadata: Record<string, unknown>;
|
|
205
|
+
created_at: Date;
|
|
206
|
+
}>(
|
|
207
|
+
`SELECT id, conversation_id, role, content, metadata, created_at
|
|
208
|
+
FROM messages
|
|
209
|
+
WHERE conversation_id = $1
|
|
210
|
+
ORDER BY created_at DESC
|
|
211
|
+
LIMIT $2`,
|
|
212
|
+
[conversationId, maxMessages]
|
|
213
|
+
);
|
|
214
|
+
|
|
215
|
+
// Reverse to get chronological order
|
|
216
|
+
return result.rows.reverse().map((row) => ({
|
|
217
|
+
id: row.id,
|
|
218
|
+
conversationId: row.conversation_id,
|
|
219
|
+
role: row.role,
|
|
220
|
+
content: row.content,
|
|
221
|
+
metadata: row.metadata,
|
|
222
|
+
createdAt: row.created_at,
|
|
223
|
+
}));
|
|
224
|
+
} catch (error) {
|
|
225
|
+
logger.error('Failed to get recent messages', { error });
|
|
226
|
+
throw new MemoryError(
|
|
227
|
+
`Failed to get recent messages: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
228
|
+
);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// Delete conversation and all messages
|
|
233
|
+
async deleteConversation(conversationId: string): Promise<void> {
|
|
234
|
+
try {
|
|
235
|
+
await query('DELETE FROM conversations WHERE id = $1', [conversationId]);
|
|
236
|
+
logMemoryOperation('write', 'conversation', {
|
|
237
|
+
conversationId,
|
|
238
|
+
action: 'delete',
|
|
239
|
+
});
|
|
240
|
+
} catch (error) {
|
|
241
|
+
logger.error('Failed to delete conversation', { error });
|
|
242
|
+
throw new MemoryError(
|
|
243
|
+
`Failed to delete conversation: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
244
|
+
);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Get conversation count for user
|
|
249
|
+
async getConversationCount(userId: string): Promise<number> {
|
|
250
|
+
try {
|
|
251
|
+
const result = await query<{ count: string }>(
|
|
252
|
+
'SELECT COUNT(*) as count FROM conversations WHERE user_id = $1',
|
|
253
|
+
[userId]
|
|
254
|
+
);
|
|
255
|
+
return parseInt(result.rows[0]?.count ?? '0', 10);
|
|
256
|
+
} catch (error) {
|
|
257
|
+
logger.error('Failed to get conversation count', { error });
|
|
258
|
+
throw new MemoryError(
|
|
259
|
+
`Failed to get conversation count: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
260
|
+
);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// List user's conversations
|
|
265
|
+
async listConversations(
|
|
266
|
+
userId: string,
|
|
267
|
+
options: { limit?: number; offset?: number } = {}
|
|
268
|
+
): Promise<Conversation[]> {
|
|
269
|
+
const { limit = 20, offset = 0 } = options;
|
|
270
|
+
|
|
271
|
+
try {
|
|
272
|
+
const result = await query<{
|
|
273
|
+
id: string;
|
|
274
|
+
thread_id: string;
|
|
275
|
+
user_id: string | null;
|
|
276
|
+
created_at: Date;
|
|
277
|
+
updated_at: Date;
|
|
278
|
+
}>(
|
|
279
|
+
`SELECT id, thread_id, user_id, created_at, updated_at
|
|
280
|
+
FROM conversations
|
|
281
|
+
WHERE user_id = $1
|
|
282
|
+
ORDER BY updated_at DESC
|
|
283
|
+
LIMIT $2 OFFSET $3`,
|
|
284
|
+
[userId, limit, offset]
|
|
285
|
+
);
|
|
286
|
+
|
|
287
|
+
return result.rows.map((row) => ({
|
|
288
|
+
id: row.id,
|
|
289
|
+
threadId: row.thread_id,
|
|
290
|
+
userId: row.user_id ?? undefined,
|
|
291
|
+
createdAt: row.created_at,
|
|
292
|
+
updatedAt: row.updated_at,
|
|
293
|
+
}));
|
|
294
|
+
} catch (error) {
|
|
295
|
+
logger.error('Failed to list conversations', { error });
|
|
296
|
+
throw new MemoryError(
|
|
297
|
+
`Failed to list conversations: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
298
|
+
);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// Singleton instance
|
|
304
|
+
let memoryInstance: ConversationMemory | null = null;
|
|
305
|
+
|
|
306
|
+
export function getConversationMemory(): ConversationMemory {
|
|
307
|
+
if (!memoryInstance) {
|
|
308
|
+
memoryInstance = new ConversationMemory();
|
|
309
|
+
}
|
|
310
|
+
return memoryInstance;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
export default ConversationMemory;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
// Memory module exports
|
|
2
|
+
export {
|
|
3
|
+
VectorStore,
|
|
4
|
+
getVectorStore,
|
|
5
|
+
type MemoryType,
|
|
6
|
+
type MemoryEntry,
|
|
7
|
+
type SearchOptions,
|
|
8
|
+
} from './longterm/vector-store.js';
|
|
9
|
+
|
|
10
|
+
export {
|
|
11
|
+
ConversationMemory,
|
|
12
|
+
getConversationMemory,
|
|
13
|
+
type Conversation,
|
|
14
|
+
type Message,
|
|
15
|
+
} from './conversation/index.js';
|
|
16
|
+
|
|
17
|
+
export {
|
|
18
|
+
KnowledgeBase,
|
|
19
|
+
getKnowledgeBase,
|
|
20
|
+
type Document,
|
|
21
|
+
type DocumentChunk,
|
|
22
|
+
type SearchResult,
|
|
23
|
+
} from './knowledge-base/index.js';
|