@nextsparkjs/plugin-langchain 0.1.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +41 -0
- package/api/observability/metrics/route.ts +110 -0
- package/api/observability/traces/[traceId]/route.ts +398 -0
- package/api/observability/traces/route.ts +205 -0
- package/api/sessions/route.ts +332 -0
- package/components/observability/CollapsibleJson.tsx +71 -0
- package/components/observability/CompactTimeline.tsx +75 -0
- package/components/observability/ConversationFlow.tsx +271 -0
- package/components/observability/DisabledMessage.tsx +21 -0
- package/components/observability/FiltersPanel.tsx +82 -0
- package/components/observability/ObservabilityDashboard.tsx +230 -0
- package/components/observability/SpansList.tsx +210 -0
- package/components/observability/TraceDetail.tsx +335 -0
- package/components/observability/TraceStatusBadge.tsx +39 -0
- package/components/observability/TracesTable.tsx +97 -0
- package/components/observability/index.ts +7 -0
- package/docs/01-getting-started/01-overview.md +196 -0
- package/docs/01-getting-started/02-installation.md +368 -0
- package/docs/01-getting-started/03-configuration.md +794 -0
- package/docs/02-core-concepts/01-architecture.md +566 -0
- package/docs/02-core-concepts/02-agents.md +597 -0
- package/docs/02-core-concepts/03-tools.md +689 -0
- package/docs/03-orchestration/01-graph-orchestrator.md +809 -0
- package/docs/03-orchestration/02-legacy-react.md +650 -0
- package/docs/04-advanced/01-observability.md +645 -0
- package/docs/04-advanced/02-token-tracking.md +469 -0
- package/docs/04-advanced/03-streaming.md +476 -0
- package/docs/04-advanced/04-guardrails.md +597 -0
- package/docs/05-reference/01-api-reference.md +1403 -0
- package/docs/05-reference/02-customization.md +646 -0
- package/docs/05-reference/03-examples.md +881 -0
- package/docs/index.md +85 -0
- package/hooks/observability/useMetrics.ts +31 -0
- package/hooks/observability/useTraceDetail.ts +48 -0
- package/hooks/observability/useTraces.ts +59 -0
- package/lib/agent-factory.ts +354 -0
- package/lib/agent-helpers.ts +201 -0
- package/lib/db-memory-store.ts +417 -0
- package/lib/graph/index.ts +58 -0
- package/lib/graph/nodes/combiner.ts +399 -0
- package/lib/graph/nodes/router.ts +440 -0
- package/lib/graph/orchestrator-graph.ts +386 -0
- package/lib/graph/prompts/combiner.md +131 -0
- package/lib/graph/prompts/router.md +193 -0
- package/lib/graph/types.ts +365 -0
- package/lib/guardrails.ts +230 -0
- package/lib/index.ts +44 -0
- package/lib/logger.ts +70 -0
- package/lib/memory-store.ts +168 -0
- package/lib/message-serializer.ts +110 -0
- package/lib/prompt-renderer.ts +94 -0
- package/lib/providers.ts +226 -0
- package/lib/streaming.ts +232 -0
- package/lib/token-tracker.ts +298 -0
- package/lib/tools-builder.ts +192 -0
- package/lib/tracer-callbacks.ts +342 -0
- package/lib/tracer.ts +350 -0
- package/migrations/001_langchain_memory.sql +83 -0
- package/migrations/002_token_usage.sql +127 -0
- package/migrations/003_observability.sql +257 -0
- package/package.json +28 -0
- package/plugin.config.ts +170 -0
- package/presets/lib/langchain.config.ts.preset +142 -0
- package/presets/templates/sector7/ai-observability/[traceId]/page.tsx +91 -0
- package/presets/templates/sector7/ai-observability/page.tsx +54 -0
- package/types/langchain.types.ts +274 -0
- package/types/observability.types.ts +270 -0
|
@@ -0,0 +1,1403 @@
|
|
|
1
|
+
# API Reference
|
|
2
|
+
|
|
3
|
+
Complete API documentation for the LangChain plugin.
|
|
4
|
+
|
|
5
|
+
## Import Paths
|
|
6
|
+
|
|
7
|
+
The plugin does not have a central entry point. Import from specific modules:
|
|
8
|
+
|
|
9
|
+
```typescript
|
|
10
|
+
// Agent Factory
|
|
11
|
+
import { createAgent } from '@/contents/plugins/langchain/lib/agent-factory'
|
|
12
|
+
|
|
13
|
+
// Tool System
|
|
14
|
+
import { createTool, buildTools, type ToolDefinition } from '@/contents/plugins/langchain/lib/tools-builder'
|
|
15
|
+
|
|
16
|
+
// Memory Store
|
|
17
|
+
import {
|
|
18
|
+
memoryStore,
|
|
19
|
+
dbMemoryStore,
|
|
20
|
+
generateSessionId,
|
|
21
|
+
CONVERSATION_LIMITS,
|
|
22
|
+
type DbMemoryStoreContext,
|
|
23
|
+
type SessionConfig,
|
|
24
|
+
type ConversationInfo,
|
|
25
|
+
} from '@/contents/plugins/langchain/lib/memory-store'
|
|
26
|
+
|
|
27
|
+
// Message Serialization
|
|
28
|
+
import {
|
|
29
|
+
serializeMessages,
|
|
30
|
+
deserializeMessages,
|
|
31
|
+
type SerializedMessage,
|
|
32
|
+
} from '@/contents/plugins/langchain/lib/message-serializer'
|
|
33
|
+
|
|
34
|
+
// Providers
|
|
35
|
+
import {
|
|
36
|
+
createOllamaModel,
|
|
37
|
+
createOpenAIModel,
|
|
38
|
+
createAnthropicModel,
|
|
39
|
+
getModel,
|
|
40
|
+
} from '@/contents/plugins/langchain/lib/providers'
|
|
41
|
+
|
|
42
|
+
// Provider Availability
|
|
43
|
+
import {
|
|
44
|
+
isProviderAvailable,
|
|
45
|
+
getAvailableProviders,
|
|
46
|
+
} from '@/contents/plugins/langchain/plugin.config'
|
|
47
|
+
|
|
48
|
+
// Types
|
|
49
|
+
import type {
|
|
50
|
+
ModelConfig,
|
|
51
|
+
LLMProvider,
|
|
52
|
+
ThemeLangChainConfig,
|
|
53
|
+
AgentConfig,
|
|
54
|
+
AgentResponse,
|
|
55
|
+
AgentDefinition,
|
|
56
|
+
ToolContext,
|
|
57
|
+
ChatMessage,
|
|
58
|
+
} from '@/contents/plugins/langchain/types/langchain.types'
|
|
59
|
+
|
|
60
|
+
// Theme Helper Factory
|
|
61
|
+
import { createAgentHelpers } from '@/contents/plugins/langchain/lib/agent-helpers'
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
> **Note:** There is no central `index.ts` for this plugin. Import directly from the module that provides the functionality you need.
|
|
65
|
+
|
|
66
|
+
---
|
|
67
|
+
|
|
68
|
+
## createAgent
|
|
69
|
+
|
|
70
|
+
Creates a new AI agent instance.
|
|
71
|
+
|
|
72
|
+
### Signature
|
|
73
|
+
|
|
74
|
+
```typescript
|
|
75
|
+
function createAgent(config: AgentConfig): Promise<Agent>
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### Parameters
|
|
79
|
+
|
|
80
|
+
```typescript
|
|
81
|
+
interface AgentConfig {
|
|
82
|
+
// Required: Unique session identifier
|
|
83
|
+
sessionId: string
|
|
84
|
+
|
|
85
|
+
// Optional: System prompt for agent behavior
|
|
86
|
+
systemPrompt?: string
|
|
87
|
+
|
|
88
|
+
// Optional: Tools the agent can use
|
|
89
|
+
tools?: ToolDefinition<any>[]
|
|
90
|
+
|
|
91
|
+
// Optional: Model configuration
|
|
92
|
+
modelConfig?: Partial<ModelConfig>
|
|
93
|
+
|
|
94
|
+
// Optional: Context for memory persistence (RECOMMENDED)
|
|
95
|
+
// Without context, conversation history won't be persisted
|
|
96
|
+
context?: {
|
|
97
|
+
userId: string
|
|
98
|
+
teamId: string
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
interface ModelConfig {
|
|
103
|
+
provider: 'openai' | 'anthropic' | 'ollama'
|
|
104
|
+
model?: string
|
|
105
|
+
temperature?: number
|
|
106
|
+
maxTokens?: number
|
|
107
|
+
options?: {
|
|
108
|
+
apiKey?: string
|
|
109
|
+
baseUrl?: string
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
### Returns
|
|
115
|
+
|
|
116
|
+
```typescript
|
|
117
|
+
interface Agent {
|
|
118
|
+
// Send a message and get a response
|
|
119
|
+
chat(message: string): Promise<AgentResponse>
|
|
120
|
+
|
|
121
|
+
// Get conversation history (requires context)
|
|
122
|
+
getHistory(): Promise<ChatMessage[]>
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
interface AgentResponse {
|
|
126
|
+
content: string
|
|
127
|
+
sessionId: string
|
|
128
|
+
messages: BaseMessage[] // Full LangChain message array from the turn
|
|
129
|
+
}
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
> **Note:** The `messages` field contains the complete LangChain message array including tool calls and responses. This is always present and is essential for extracting tool results in orchestration patterns.
|
|
133
|
+
|
|
134
|
+
### Example
|
|
135
|
+
|
|
136
|
+
```typescript
|
|
137
|
+
const agent = await createAgent({
|
|
138
|
+
sessionId: `user-${userId}-${Date.now()}`,
|
|
139
|
+
systemPrompt: 'You are a helpful assistant.',
|
|
140
|
+
tools: [
|
|
141
|
+
{
|
|
142
|
+
name: 'get_time',
|
|
143
|
+
description: 'Get current time',
|
|
144
|
+
schema: z.object({}),
|
|
145
|
+
func: async () => new Date().toISOString(),
|
|
146
|
+
},
|
|
147
|
+
],
|
|
148
|
+
modelConfig: {
|
|
149
|
+
provider: 'ollama',
|
|
150
|
+
temperature: 0.3,
|
|
151
|
+
},
|
|
152
|
+
// IMPORTANT: Always provide context for conversation persistence
|
|
153
|
+
context: { userId, teamId },
|
|
154
|
+
})
|
|
155
|
+
|
|
156
|
+
const response = await agent.chat('What time is it?')
|
|
157
|
+
console.log(response.content)
|
|
158
|
+
|
|
159
|
+
// Access full message history including tool calls
|
|
160
|
+
console.log(response.messages)
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
> **Warning:** If you omit `context`, the agent will still work but conversations won't be persisted to the database. A warning will be logged in development mode.
|
|
164
|
+
|
|
165
|
+
---
|
|
166
|
+
|
|
167
|
+
## buildTools
|
|
168
|
+
|
|
169
|
+
Converts tool definitions to LangChain DynamicStructuredTool instances.
|
|
170
|
+
|
|
171
|
+
### Signature
|
|
172
|
+
|
|
173
|
+
```typescript
|
|
174
|
+
function buildTools(definitions: ToolDefinition<any>[]): DynamicStructuredTool[]
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
### Parameters
|
|
178
|
+
|
|
179
|
+
```typescript
|
|
180
|
+
interface ToolDefinition<T extends z.ZodObject<any>> {
|
|
181
|
+
// Unique tool identifier
|
|
182
|
+
name: string
|
|
183
|
+
|
|
184
|
+
// Description for the LLM (IMPORTANT: this is what the model reads)
|
|
185
|
+
description: string
|
|
186
|
+
|
|
187
|
+
// Zod schema for input validation
|
|
188
|
+
schema: T
|
|
189
|
+
|
|
190
|
+
// Async function that executes the tool
|
|
191
|
+
// MUST return a string (JSON.stringify for objects)
|
|
192
|
+
func: (input: z.infer<T>) => Promise<string>
|
|
193
|
+
}
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
### Example
|
|
197
|
+
|
|
198
|
+
```typescript
|
|
199
|
+
const toolDefs: ToolDefinition<any>[] = [
|
|
200
|
+
{
|
|
201
|
+
name: 'add_numbers',
|
|
202
|
+
description: 'Add two numbers together',
|
|
203
|
+
schema: z.object({
|
|
204
|
+
a: z.number().describe('First number'),
|
|
205
|
+
b: z.number().describe('Second number'),
|
|
206
|
+
}),
|
|
207
|
+
func: async ({ a, b }) => JSON.stringify({ result: a + b }),
|
|
208
|
+
},
|
|
209
|
+
]
|
|
210
|
+
|
|
211
|
+
const langChainTools = buildTools(toolDefs)
|
|
212
|
+
```
|
|
213
|
+
|
|
214
|
+
---
|
|
215
|
+
|
|
216
|
+
## createTool
|
|
217
|
+
|
|
218
|
+
Creates a single LangChain DynamicStructuredTool from a tool definition. Use this when you need to create individual tools rather than batches.
|
|
219
|
+
|
|
220
|
+
### Signature
|
|
221
|
+
|
|
222
|
+
```typescript
|
|
223
|
+
function createTool<T extends z.ZodObject<any>>(def: ToolDefinition<T>): DynamicStructuredTool
|
|
224
|
+
```
|
|
225
|
+
|
|
226
|
+
### Example
|
|
227
|
+
|
|
228
|
+
```typescript
|
|
229
|
+
import { createTool } from '@/contents/plugins/langchain/lib/tools-builder'
|
|
230
|
+
|
|
231
|
+
const weatherTool = createTool({
|
|
232
|
+
name: 'get_weather',
|
|
233
|
+
description: 'Get current weather for a city',
|
|
234
|
+
schema: z.object({
|
|
235
|
+
city: z.string().describe('City name'),
|
|
236
|
+
}),
|
|
237
|
+
func: async ({ city }) => {
|
|
238
|
+
const weather = await fetchWeather(city)
|
|
239
|
+
return JSON.stringify(weather)
|
|
240
|
+
},
|
|
241
|
+
})
|
|
242
|
+
|
|
243
|
+
// Use directly with LangChain
|
|
244
|
+
const agent = createReactAgent({
|
|
245
|
+
llm: model,
|
|
246
|
+
tools: [weatherTool],
|
|
247
|
+
})
|
|
248
|
+
```
|
|
249
|
+
|
|
250
|
+
> **Note:** Most theme code should use `buildTools()` for convenience. Use `createTool()` when integrating with raw LangChain APIs or when you need individual tool instances.
|
|
251
|
+
|
|
252
|
+
---
|
|
253
|
+
|
|
254
|
+
## memoryStore
|
|
255
|
+
|
|
256
|
+
Interface for conversation persistence. All methods are async and require context for multi-tenancy.
|
|
257
|
+
|
|
258
|
+
### Type: DbMemoryStoreContext
|
|
259
|
+
|
|
260
|
+
```typescript
|
|
261
|
+
interface DbMemoryStoreContext {
|
|
262
|
+
userId: string
|
|
263
|
+
teamId: string
|
|
264
|
+
}
|
|
265
|
+
```
|
|
266
|
+
|
|
267
|
+
### Type: SessionConfig
|
|
268
|
+
|
|
269
|
+
Optional configuration for session behavior.
|
|
270
|
+
|
|
271
|
+
```typescript
|
|
272
|
+
interface SessionConfig {
|
|
273
|
+
/** Maximum messages to keep (sliding window). Default: 50 */
|
|
274
|
+
maxMessages?: number
|
|
275
|
+
/** TTL in hours. null = no expiration (default) */
|
|
276
|
+
ttlHours?: number | null
|
|
277
|
+
}
|
|
278
|
+
```
|
|
279
|
+
|
|
280
|
+
**Usage:**
|
|
281
|
+
```typescript
|
|
282
|
+
// Use custom message limit
|
|
283
|
+
await memoryStore.addMessages(sessionId, messages, context, {
|
|
284
|
+
maxMessages: 100, // Keep more messages
|
|
285
|
+
})
|
|
286
|
+
|
|
287
|
+
// Use expiring sessions (e.g., for temporary support chats)
|
|
288
|
+
await memoryStore.addMessages(sessionId, messages, context, {
|
|
289
|
+
ttlHours: 24, // Expire after 24 hours
|
|
290
|
+
})
|
|
291
|
+
```
|
|
292
|
+
|
|
293
|
+
> **Note:** By default, sessions never expire and use a 50-message sliding window.
|
|
294
|
+
|
|
295
|
+
### Methods
|
|
296
|
+
|
|
297
|
+
#### getMessages
|
|
298
|
+
|
|
299
|
+
Get messages for a session. Returns LangChain BaseMessage instances.
|
|
300
|
+
|
|
301
|
+
```typescript
|
|
302
|
+
memoryStore.getMessages(
|
|
303
|
+
sessionId: string,
|
|
304
|
+
context: DbMemoryStoreContext
|
|
305
|
+
): Promise<BaseMessage[]>
|
|
306
|
+
```
|
|
307
|
+
|
|
308
|
+
#### addMessages
|
|
309
|
+
|
|
310
|
+
Add messages to a session. Automatically creates the session if it doesn't exist.
|
|
311
|
+
|
|
312
|
+
```typescript
|
|
313
|
+
memoryStore.addMessages(
|
|
314
|
+
sessionId: string,
|
|
315
|
+
messages: BaseMessage[],
|
|
316
|
+
context: DbMemoryStoreContext,
|
|
317
|
+
config?: SessionConfig
|
|
318
|
+
): Promise<void>
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
> **Note:** Messages are stored using a sliding window. When the limit (default: 50) is reached, older messages are automatically removed.
|
|
322
|
+
|
|
323
|
+
#### createSession
|
|
324
|
+
|
|
325
|
+
Create a new empty session. Returns an auto-generated sessionId.
|
|
326
|
+
|
|
327
|
+
```typescript
|
|
328
|
+
memoryStore.createSession(
|
|
329
|
+
context: DbMemoryStoreContext,
|
|
330
|
+
name?: string
|
|
331
|
+
): Promise<{ sessionId: string; createdAt: Date }>
|
|
332
|
+
```
|
|
333
|
+
|
|
334
|
+
**Example:**
|
|
335
|
+
```typescript
|
|
336
|
+
const context = { userId: 'user-123', teamId: 'team-456' }
|
|
337
|
+
|
|
338
|
+
// Create session (sessionId is auto-generated using userId + timestamp)
|
|
339
|
+
const { sessionId, createdAt } = await memoryStore.createSession(context, 'My Chat')
|
|
340
|
+
console.log(sessionId) // e.g., 'user-123-1704067200000'
|
|
341
|
+
```
|
|
342
|
+
|
|
343
|
+
#### listSessions
|
|
344
|
+
|
|
345
|
+
List all sessions for a user in a team.
|
|
346
|
+
|
|
347
|
+
```typescript
|
|
348
|
+
memoryStore.listSessions(
|
|
349
|
+
context: DbMemoryStoreContext
|
|
350
|
+
): Promise<ConversationInfo[]>
|
|
351
|
+
|
|
352
|
+
interface ConversationInfo {
|
|
353
|
+
sessionId: string
|
|
354
|
+
name: string | null
|
|
355
|
+
messageCount: number
|
|
356
|
+
firstMessage: string | null
|
|
357
|
+
isPinned: boolean
|
|
358
|
+
createdAt: Date // Note: Date object, not string
|
|
359
|
+
updatedAt: Date // Note: Date object, not string
|
|
360
|
+
}
|
|
361
|
+
```
|
|
362
|
+
|
|
363
|
+
#### getSession
|
|
364
|
+
|
|
365
|
+
Get full session info. Returns null if not found.
|
|
366
|
+
|
|
367
|
+
```typescript
|
|
368
|
+
memoryStore.getSession(
|
|
369
|
+
sessionId: string,
|
|
370
|
+
context: DbMemoryStoreContext
|
|
371
|
+
): Promise<ConversationInfo | null>
|
|
372
|
+
```
|
|
373
|
+
|
|
374
|
+
#### renameSession
|
|
375
|
+
|
|
376
|
+
Rename a session.
|
|
377
|
+
|
|
378
|
+
```typescript
|
|
379
|
+
memoryStore.renameSession(
|
|
380
|
+
sessionId: string,
|
|
381
|
+
name: string, // Note: name comes BEFORE context
|
|
382
|
+
context: DbMemoryStoreContext
|
|
383
|
+
): Promise<void>
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
#### togglePinSession
|
|
387
|
+
|
|
388
|
+
Toggle pin status of a session.
|
|
389
|
+
|
|
390
|
+
```typescript
|
|
391
|
+
memoryStore.togglePinSession(
|
|
392
|
+
sessionId: string,
|
|
393
|
+
isPinned: boolean, // Note: isPinned comes BEFORE context
|
|
394
|
+
context: DbMemoryStoreContext
|
|
395
|
+
): Promise<void>
|
|
396
|
+
```
|
|
397
|
+
|
|
398
|
+
#### clearSession
|
|
399
|
+
|
|
400
|
+
Delete a session and all its messages.
|
|
401
|
+
|
|
402
|
+
```typescript
|
|
403
|
+
memoryStore.clearSession(
|
|
404
|
+
sessionId: string,
|
|
405
|
+
context: DbMemoryStoreContext
|
|
406
|
+
): Promise<void>
|
|
407
|
+
```
|
|
408
|
+
|
|
409
|
+
#### countSessions
|
|
410
|
+
|
|
411
|
+
Count sessions for limit enforcement.
|
|
412
|
+
|
|
413
|
+
```typescript
|
|
414
|
+
memoryStore.countSessions(
|
|
415
|
+
context: DbMemoryStoreContext
|
|
416
|
+
): Promise<number>
|
|
417
|
+
```
|
|
418
|
+
|
|
419
|
+
#### cleanup
|
|
420
|
+
|
|
421
|
+
Remove expired sessions (if TTL is set).
|
|
422
|
+
|
|
423
|
+
```typescript
|
|
424
|
+
memoryStore.cleanup(): Promise<number>
|
|
425
|
+
// Returns: number of sessions cleaned up
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
### Complete Example
|
|
429
|
+
|
|
430
|
+
```typescript
|
|
431
|
+
const context = { userId: 'user-123', teamId: 'team-456' }
|
|
432
|
+
|
|
433
|
+
// Create a new session (sessionId auto-generated)
|
|
434
|
+
const { sessionId } = await memoryStore.createSession(context, 'My Chat')
|
|
435
|
+
|
|
436
|
+
// List all sessions
|
|
437
|
+
const sessions = await memoryStore.listSessions(context)
|
|
438
|
+
console.log(`You have ${sessions.length} conversations`)
|
|
439
|
+
|
|
440
|
+
// Get messages from a session
|
|
441
|
+
const messages = await memoryStore.getMessages(sessionId, context)
|
|
442
|
+
|
|
443
|
+
// Rename the session (note parameter order!)
|
|
444
|
+
await memoryStore.renameSession(sessionId, 'Renamed Chat', context)
|
|
445
|
+
|
|
446
|
+
// Pin the session (note parameter order!)
|
|
447
|
+
await memoryStore.togglePinSession(sessionId, true, context)
|
|
448
|
+
|
|
449
|
+
// Check session count before creating new ones
|
|
450
|
+
const count = await memoryStore.countSessions(context)
|
|
451
|
+
if (count >= 50) {
|
|
452
|
+
console.log('Maximum conversations reached!')
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
// Clean up
|
|
456
|
+
await memoryStore.clearSession(sessionId, context)
|
|
457
|
+
```
|
|
458
|
+
|
|
459
|
+
---
|
|
460
|
+
|
|
461
|
+
## Message Serialization
|
|
462
|
+
|
|
463
|
+
### serializeMessages
|
|
464
|
+
|
|
465
|
+
Convert LangChain messages to JSON-serializable format.
|
|
466
|
+
|
|
467
|
+
```typescript
|
|
468
|
+
function serializeMessages(messages: BaseMessage[]): SerializedMessage[]
|
|
469
|
+
|
|
470
|
+
interface SerializedMessage {
|
|
471
|
+
type: 'human' | 'ai' | 'system' | 'tool'
|
|
472
|
+
content: string
|
|
473
|
+
name?: string
|
|
474
|
+
additional_kwargs?: Record<string, unknown>
|
|
475
|
+
response_metadata?: Record<string, unknown>
|
|
476
|
+
tool_call_id?: string
|
|
477
|
+
}
|
|
478
|
+
```
|
|
479
|
+
|
|
480
|
+
### deserializeMessages
|
|
481
|
+
|
|
482
|
+
Convert serialized messages back to LangChain format.
|
|
483
|
+
|
|
484
|
+
```typescript
|
|
485
|
+
function deserializeMessages(messages: SerializedMessage[]): BaseMessage[]
|
|
486
|
+
```
|
|
487
|
+
|
|
488
|
+
### Example
|
|
489
|
+
|
|
490
|
+
```typescript
|
|
491
|
+
// Serialize for custom storage
|
|
492
|
+
const serialized = serializeMessages(langchainMessages)
|
|
493
|
+
const json = JSON.stringify(serialized)
|
|
494
|
+
|
|
495
|
+
// Deserialize when loading
|
|
496
|
+
const parsed = JSON.parse(json)
|
|
497
|
+
const messages = deserializeMessages(parsed)
|
|
498
|
+
```
|
|
499
|
+
|
|
500
|
+
> **Note:** The memoryStore handles serialization automatically. You only need these functions if implementing custom storage.
|
|
501
|
+
|
|
502
|
+
---
|
|
503
|
+
|
|
504
|
+
## Provider Functions
|
|
505
|
+
|
|
506
|
+
### getModel
|
|
507
|
+
|
|
508
|
+
Get an LLM instance based on configuration.
|
|
509
|
+
|
|
510
|
+
```typescript
|
|
511
|
+
function getModel(config: Partial<ModelConfig>): BaseChatModel
|
|
512
|
+
```
|
|
513
|
+
|
|
514
|
+
### isProviderAvailable
|
|
515
|
+
|
|
516
|
+
Check if a provider is configured and available.
|
|
517
|
+
|
|
518
|
+
```typescript
|
|
519
|
+
function isProviderAvailable(provider: LLMProvider): boolean
|
|
520
|
+
```
|
|
521
|
+
|
|
522
|
+
### getAvailableProviders
|
|
523
|
+
|
|
524
|
+
List all configured providers.
|
|
525
|
+
|
|
526
|
+
```typescript
|
|
527
|
+
function getAvailableProviders(): LLMProvider[]
|
|
528
|
+
```
|
|
529
|
+
|
|
530
|
+
### createOllamaModel
|
|
531
|
+
|
|
532
|
+
```typescript
|
|
533
|
+
function createOllamaModel(config?: {
|
|
534
|
+
model?: string
|
|
535
|
+
temperature?: number
|
|
536
|
+
baseUrl?: string
|
|
537
|
+
}): ChatOllama
|
|
538
|
+
```
|
|
539
|
+
|
|
540
|
+
### createOpenAIModel
|
|
541
|
+
|
|
542
|
+
```typescript
|
|
543
|
+
function createOpenAIModel(config?: {
|
|
544
|
+
model?: string
|
|
545
|
+
temperature?: number
|
|
546
|
+
apiKey?: string
|
|
547
|
+
baseUrl?: string // For OpenAI-compatible servers like LM Studio
|
|
548
|
+
}): ChatOpenAI
|
|
549
|
+
```
|
|
550
|
+
|
|
551
|
+
### createAnthropicModel
|
|
552
|
+
|
|
553
|
+
```typescript
|
|
554
|
+
function createAnthropicModel(config?: {
|
|
555
|
+
model?: string
|
|
556
|
+
temperature?: number
|
|
557
|
+
apiKey?: string
|
|
558
|
+
}): ChatAnthropic
|
|
559
|
+
```
|
|
560
|
+
|
|
561
|
+
### Example
|
|
562
|
+
|
|
563
|
+
```typescript
|
|
564
|
+
// Check available providers
|
|
565
|
+
const providers = getAvailableProviders()
|
|
566
|
+
// ['ollama', 'openai'] (depends on env config)
|
|
567
|
+
|
|
568
|
+
// Check specific provider
|
|
569
|
+
if (isProviderAvailable('anthropic')) {
|
|
570
|
+
const model = createAnthropicModel({
|
|
571
|
+
model: 'claude-3-5-sonnet-20241022',
|
|
572
|
+
temperature: 0.3,
|
|
573
|
+
})
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
// Get model from config (uses env defaults)
|
|
577
|
+
const model = getModel({
|
|
578
|
+
provider: 'ollama',
|
|
579
|
+
temperature: 0.3,
|
|
580
|
+
})
|
|
581
|
+
```
|
|
582
|
+
|
|
583
|
+
---
|
|
584
|
+
|
|
585
|
+
## generateSessionId
|
|
586
|
+
|
|
587
|
+
Utility to generate unique session IDs. Must be imported from memory-store.
|
|
588
|
+
|
|
589
|
+
```typescript
|
|
590
|
+
import { generateSessionId } from '@/contents/plugins/langchain/lib/memory-store'
|
|
591
|
+
|
|
592
|
+
function generateSessionId(userId: string): string
|
|
593
|
+
// Returns: '{userId}-{timestamp}'
|
|
594
|
+
// Example: 'user-123-1704067200000'
|
|
595
|
+
```
|
|
596
|
+
|
|
597
|
+
> **Note:** The generated ID combines the userId with a timestamp for uniqueness and traceability.
|
|
598
|
+
|
|
599
|
+
### Example
|
|
600
|
+
|
|
601
|
+
```typescript
|
|
602
|
+
import { generateSessionId } from '@/contents/plugins/langchain/lib/memory-store'
|
|
603
|
+
|
|
604
|
+
const sessionId = generateSessionId('user-123')
|
|
605
|
+
// 'user-123-1704067200000'
|
|
606
|
+
```
|
|
607
|
+
|
|
608
|
+
---
|
|
609
|
+
|
|
610
|
+
## Theme Helper Factory
|
|
611
|
+
|
|
612
|
+
Must be imported directly from the lib folder.
|
|
613
|
+
|
|
614
|
+
```typescript
|
|
615
|
+
import { createAgentHelpers } from '@/contents/plugins/langchain/lib/agent-helpers'
|
|
616
|
+
|
|
617
|
+
function createAgentHelpers(
|
|
618
|
+
agents: Record<string, AgentDefinition>,
|
|
619
|
+
defaults: {
|
|
620
|
+
provider?: LLMProvider
|
|
621
|
+
model?: string
|
|
622
|
+
temperature?: number
|
|
623
|
+
}
|
|
624
|
+
): AgentHelpers
|
|
625
|
+
|
|
626
|
+
interface AgentHelpers {
|
|
627
|
+
getAgentConfig(name: string): AgentDefinition | undefined
|
|
628
|
+
getAgentModelConfig(name: string): Partial<ModelConfig> | undefined
|
|
629
|
+
getAgentTools(name: string, context: ToolContext): ToolDefinition<any>[]
|
|
630
|
+
getAgentPromptName(name: string): string | undefined
|
|
631
|
+
hasAgent(name: string): boolean
|
|
632
|
+
getAgentNames(): string[]
|
|
633
|
+
}
|
|
634
|
+
```
|
|
635
|
+
|
|
636
|
+
### Example
|
|
637
|
+
|
|
638
|
+
```typescript
|
|
639
|
+
import { createAgentHelpers } from '@/contents/plugins/langchain/lib/agent-helpers'
|
|
640
|
+
|
|
641
|
+
const helpers = createAgentHelpers(AGENTS, {
|
|
642
|
+
provider: 'ollama',
|
|
643
|
+
temperature: 0.3,
|
|
644
|
+
})
|
|
645
|
+
|
|
646
|
+
// Check if agent exists
|
|
647
|
+
if (helpers.hasAgent('task-assistant')) {
|
|
648
|
+
const config = helpers.getAgentConfig('task-assistant')
|
|
649
|
+
const tools = helpers.getAgentTools('task-assistant', { userId, teamId })
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
// List all agents
|
|
653
|
+
const agentNames = helpers.getAgentNames()
|
|
654
|
+
// ['single-agent', 'orchestrator', 'task-assistant', ...]
|
|
655
|
+
```
|
|
656
|
+
|
|
657
|
+
---
|
|
658
|
+
|
|
659
|
+
## Types
|
|
660
|
+
|
|
661
|
+
### LLMProvider
|
|
662
|
+
|
|
663
|
+
```typescript
|
|
664
|
+
type LLMProvider = 'openai' | 'anthropic' | 'ollama'
|
|
665
|
+
```
|
|
666
|
+
|
|
667
|
+
### AgentDefinition
|
|
668
|
+
|
|
669
|
+
```typescript
|
|
670
|
+
interface AgentDefinition {
|
|
671
|
+
provider: LLMProvider
|
|
672
|
+
model?: string
|
|
673
|
+
temperature?: number
|
|
674
|
+
description?: string
|
|
675
|
+
systemPrompt?: string // Filename (loads .md) or inline string (contains \n)
|
|
676
|
+
createTools?: (context: ToolContext) => ToolDefinition<any>[]
|
|
677
|
+
}
|
|
678
|
+
```
|
|
679
|
+
|
|
680
|
+
### ThemeLangChainConfig
|
|
681
|
+
|
|
682
|
+
```typescript
|
|
683
|
+
interface ThemeLangChainConfig {
|
|
684
|
+
defaultProvider: LLMProvider
|
|
685
|
+
defaultModel?: string
|
|
686
|
+
defaultTemperature?: number
|
|
687
|
+
agents?: Record<string, AgentDefinition>
|
|
688
|
+
}
|
|
689
|
+
```
|
|
690
|
+
|
|
691
|
+
### ToolContext
|
|
692
|
+
|
|
693
|
+
```typescript
|
|
694
|
+
interface ToolContext {
|
|
695
|
+
userId: string
|
|
696
|
+
teamId: string
|
|
697
|
+
}
|
|
698
|
+
```
|
|
699
|
+
|
|
700
|
+
### ChatMessage
|
|
701
|
+
|
|
702
|
+
Used for UI display (different from LangChain BaseMessage).
|
|
703
|
+
|
|
704
|
+
```typescript
|
|
705
|
+
interface ChatMessage {
|
|
706
|
+
id: string
|
|
707
|
+
role: 'user' | 'assistant'
|
|
708
|
+
content: string
|
|
709
|
+
timestamp: number // Unix timestamp in milliseconds
|
|
710
|
+
}
|
|
711
|
+
```
|
|
712
|
+
|
|
713
|
+
---
|
|
714
|
+
|
|
715
|
+
## REST API Endpoints
|
|
716
|
+
|
|
717
|
+
The plugin provides REST endpoints via `api/sessions/route.ts`.
|
|
718
|
+
|
|
719
|
+
> **Date Format Note:** REST API responses serialize dates as ISO 8601 strings (e.g., `"2024-01-01T00:00:00.000Z"`), while `memoryStore` methods return JavaScript `Date` objects. When using the REST API, parse dates with `new Date(response.data.createdAt)` if needed.
|
|
720
|
+
|
|
721
|
+
> **Important:** All responses are wrapped in a standard format:
|
|
722
|
+
> ```typescript
|
|
723
|
+
> {
|
|
724
|
+
> success: boolean
|
|
725
|
+
> data?: any // Present on success
|
|
726
|
+
> error?: string // Present on failure
|
|
727
|
+
> code?: string // Error code for programmatic handling
|
|
728
|
+
> }
|
|
729
|
+
> ```
|
|
730
|
+
|
|
731
|
+
### Headers Required
|
|
732
|
+
|
|
733
|
+
All endpoints require:
|
|
734
|
+
- **Authentication**: Session cookie or `x-api-key` header
|
|
735
|
+
- **Team Context**: `x-team-id` header
|
|
736
|
+
|
|
737
|
+
### GET /api/plugin/langchain/sessions
|
|
738
|
+
|
|
739
|
+
List all conversations or get a specific one.
|
|
740
|
+
|
|
741
|
+
**Query Parameters:**
|
|
742
|
+
- `id` (optional): Session ID to get specific conversation
|
|
743
|
+
|
|
744
|
+
**Response (list):**
|
|
745
|
+
```json
|
|
746
|
+
{
|
|
747
|
+
"success": true,
|
|
748
|
+
"data": {
|
|
749
|
+
"sessions": [
|
|
750
|
+
{
|
|
751
|
+
"sessionId": "session-123",
|
|
752
|
+
"name": "My Chat",
|
|
753
|
+
"messageCount": 5,
|
|
754
|
+
"firstMessage": "Hello!",
|
|
755
|
+
"isPinned": false,
|
|
756
|
+
"createdAt": "2024-01-01T00:00:00.000Z",
|
|
757
|
+
"updatedAt": "2024-01-01T00:05:00.000Z"
|
|
758
|
+
}
|
|
759
|
+
],
|
|
760
|
+
"count": 1,
|
|
761
|
+
"maxAllowed": 50
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
```
|
|
765
|
+
|
|
766
|
+
**Response (single):**
|
|
767
|
+
```json
|
|
768
|
+
{
|
|
769
|
+
"success": true,
|
|
770
|
+
"data": {
|
|
771
|
+
"sessionId": "session-123",
|
|
772
|
+
"name": "My Chat",
|
|
773
|
+
"messageCount": 5,
|
|
774
|
+
"firstMessage": "Hello!",
|
|
775
|
+
"isPinned": false,
|
|
776
|
+
"createdAt": "2024-01-01T00:00:00.000Z",
|
|
777
|
+
"updatedAt": "2024-01-01T00:05:00.000Z"
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
```
|
|
781
|
+
|
|
782
|
+
### POST /api/plugin/langchain/sessions
|
|
783
|
+
|
|
784
|
+
Create a new empty conversation.
|
|
785
|
+
|
|
786
|
+
**Request Body:**
|
|
787
|
+
```json
|
|
788
|
+
{
|
|
789
|
+
"name": "Optional name"
|
|
790
|
+
}
|
|
791
|
+
```
|
|
792
|
+
|
|
793
|
+
**Response (success):**
|
|
794
|
+
```json
|
|
795
|
+
{
|
|
796
|
+
"success": true,
|
|
797
|
+
"data": {
|
|
798
|
+
"sessionId": "generated-uuid",
|
|
799
|
+
"name": "Optional name",
|
|
800
|
+
"createdAt": "2024-01-01T00:00:00.000Z"
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
```
|
|
804
|
+
|
|
805
|
+
**Response (limit reached):**
|
|
806
|
+
```json
|
|
807
|
+
{
|
|
808
|
+
"success": false,
|
|
809
|
+
"error": "CONVERSATION_LIMIT_REACHED",
|
|
810
|
+
"message": "Maximum of 50 conversations reached. Delete an existing conversation to create a new one.",
|
|
811
|
+
"data": {
|
|
812
|
+
"currentCount": 50,
|
|
813
|
+
"maxAllowed": 50,
|
|
814
|
+
"oldestSession": {
|
|
815
|
+
"sessionId": "oldest-session-id",
|
|
816
|
+
"name": "Old Chat",
|
|
817
|
+
"updatedAt": "2024-01-01T00:00:00.000Z"
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
```
|
|
822
|
+
|
|
823
|
+
### PATCH /api/plugin/langchain/sessions
|
|
824
|
+
|
|
825
|
+
Update a conversation (rename, pin/unpin).
|
|
826
|
+
|
|
827
|
+
**Request Body:**
|
|
828
|
+
```json
|
|
829
|
+
{
|
|
830
|
+
"sessionId": "session-123",
|
|
831
|
+
"name": "New Name",
|
|
832
|
+
"isPinned": true
|
|
833
|
+
}
|
|
834
|
+
```
|
|
835
|
+
|
|
836
|
+
**Response:**
|
|
837
|
+
```json
|
|
838
|
+
{
|
|
839
|
+
"success": true,
|
|
840
|
+
"data": {
|
|
841
|
+
"sessionId": "session-123",
|
|
842
|
+
"name": "New Name",
|
|
843
|
+
"isPinned": true,
|
|
844
|
+
"messageCount": 5,
|
|
845
|
+
"firstMessage": "Hello!",
|
|
846
|
+
"createdAt": "2024-01-01T00:00:00.000Z",
|
|
847
|
+
"updatedAt": "2024-01-01T00:10:00.000Z"
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
```
|
|
851
|
+
|
|
852
|
+
### DELETE /api/plugin/langchain/sessions
|
|
853
|
+
|
|
854
|
+
Delete a conversation.
|
|
855
|
+
|
|
856
|
+
**Request Body:**
|
|
857
|
+
```json
|
|
858
|
+
{
|
|
859
|
+
"sessionId": "session-123"
|
|
860
|
+
}
|
|
861
|
+
```
|
|
862
|
+
|
|
863
|
+
**Response:**
|
|
864
|
+
```json
|
|
865
|
+
{
|
|
866
|
+
"success": true,
|
|
867
|
+
"message": "Conversation deleted successfully",
|
|
868
|
+
"sessionId": "session-123"
|
|
869
|
+
}
|
|
870
|
+
```
|
|
871
|
+
|
|
872
|
+
### Error Responses
|
|
873
|
+
|
|
874
|
+
**401 Unauthorized:**
|
|
875
|
+
```json
|
|
876
|
+
{ "success": false, "error": "Unauthorized" }
|
|
877
|
+
```
|
|
878
|
+
|
|
879
|
+
**400 Bad Request (missing team):**
|
|
880
|
+
```json
|
|
881
|
+
{ "success": false, "error": "Team context required", "code": "TEAM_CONTEXT_REQUIRED" }
|
|
882
|
+
```
|
|
883
|
+
|
|
884
|
+
**404 Not Found:**
|
|
885
|
+
```json
|
|
886
|
+
{ "success": false, "error": "Conversation not found" }
|
|
887
|
+
```
|
|
888
|
+
|
|
889
|
+
---
|
|
890
|
+
|
|
891
|
+
## Streaming API
|
|
892
|
+
|
|
893
|
+
Real-time token-by-token responses via Server-Sent Events (SSE).
|
|
894
|
+
|
|
895
|
+
### POST /api/v1/theme/default/ai/chat/stream
|
|
896
|
+
|
|
897
|
+
Stream chat responses.
|
|
898
|
+
|
|
899
|
+
**Request Body:**
|
|
900
|
+
```json
|
|
901
|
+
{
|
|
902
|
+
"message": "Show my tasks",
|
|
903
|
+
"sessionId": "session-123",
|
|
904
|
+
"agentName": "orchestrator"
|
|
905
|
+
}
|
|
906
|
+
```
|
|
907
|
+
|
|
908
|
+
**Response Headers:**
|
|
909
|
+
```
|
|
910
|
+
Content-Type: text/event-stream
|
|
911
|
+
Cache-Control: no-cache
|
|
912
|
+
Connection: keep-alive
|
|
913
|
+
```
|
|
914
|
+
|
|
915
|
+
**Response Stream:**
|
|
916
|
+
```
|
|
917
|
+
data: {"type":"token","content":"I"}
|
|
918
|
+
|
|
919
|
+
data: {"type":"token","content":" found"}
|
|
920
|
+
|
|
921
|
+
data: {"type":"tool_start","toolName":"list_tasks"}
|
|
922
|
+
|
|
923
|
+
data: {"type":"tool_end","toolName":"list_tasks","result":[...]}
|
|
924
|
+
|
|
925
|
+
data: {"type":"token","content":" 3"}
|
|
926
|
+
|
|
927
|
+
data: {"type":"token","content":" tasks"}
|
|
928
|
+
|
|
929
|
+
data: {"type":"done","fullContent":"I found 3 tasks...","tokenUsage":{"inputTokens":50,"outputTokens":30}}
|
|
930
|
+
|
|
931
|
+
data: [DONE]
|
|
932
|
+
```
|
|
933
|
+
|
|
934
|
+
**Stream Chunk Types:**
|
|
935
|
+
|
|
936
|
+
| Type | Description |
|
|
937
|
+
|------|-------------|
|
|
938
|
+
| `token` | A generated token: `{ type: 'token', content: string }` |
|
|
939
|
+
| `done` | Stream complete: `{ type: 'done', fullContent: string, tokenUsage?: object }` |
|
|
940
|
+
| `error` | Error occurred: `{ type: 'error', error: string }` |
|
|
941
|
+
| `tool_start` | Tool invocation started: `{ type: 'tool_start', toolName: string }` |
|
|
942
|
+
| `tool_end` | Tool completed: `{ type: 'tool_end', toolName: string, result: any }` |
|
|
943
|
+
|
|
944
|
+
> **Full Documentation**: [Streaming](../04-advanced/03-streaming.md)
|
|
945
|
+
|
|
946
|
+
---
|
|
947
|
+
|
|
948
|
+
## Observability API
|
|
949
|
+
|
|
950
|
+
Tracing and metrics endpoints. **Requires superadmin authentication.**
|
|
951
|
+
|
|
952
|
+
### GET /api/v1/theme/default/observability/traces
|
|
953
|
+
|
|
954
|
+
List traces with filtering and pagination.
|
|
955
|
+
|
|
956
|
+
**Query Parameters:**
|
|
957
|
+
- `limit` - Results per page (default: 50)
|
|
958
|
+
- `offset` - Pagination offset
|
|
959
|
+
- `status` - Filter by status: `success` | `error`
|
|
960
|
+
- `agentName` - Filter by agent name
|
|
961
|
+
- `teamId` - Filter by team (admin only)
|
|
962
|
+
- `from` - Start date (ISO 8601)
|
|
963
|
+
- `to` - End date (ISO 8601)
|
|
964
|
+
|
|
965
|
+
**Response:**
|
|
966
|
+
```json
|
|
967
|
+
{
|
|
968
|
+
"success": true,
|
|
969
|
+
"data": {
|
|
970
|
+
"traces": [
|
|
971
|
+
{
|
|
972
|
+
"traceId": "abc-123",
|
|
973
|
+
"agentName": "orchestrator",
|
|
974
|
+
"status": "success",
|
|
975
|
+
"durationMs": 2340,
|
|
976
|
+
"inputTokens": 150,
|
|
977
|
+
"outputTokens": 80,
|
|
978
|
+
"totalCost": 0.002,
|
|
979
|
+
"llmCalls": 2,
|
|
980
|
+
"toolCalls": 1,
|
|
981
|
+
"startedAt": "2024-12-23T10:00:00Z"
|
|
982
|
+
}
|
|
983
|
+
],
|
|
984
|
+
"total": 156,
|
|
985
|
+
"hasMore": true
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
```
|
|
989
|
+
|
|
990
|
+
### GET /api/v1/theme/default/observability/traces/[traceId]
|
|
991
|
+
|
|
992
|
+
Get single trace with all spans.
|
|
993
|
+
|
|
994
|
+
**Response:**
|
|
995
|
+
```json
|
|
996
|
+
{
|
|
997
|
+
"success": true,
|
|
998
|
+
"data": {
|
|
999
|
+
"trace": {
|
|
1000
|
+
"traceId": "abc-123",
|
|
1001
|
+
"input": "Show my tasks",
|
|
1002
|
+
"output": "Found 3 tasks...",
|
|
1003
|
+
"status": "success",
|
|
1004
|
+
"durationMs": 2340,
|
|
1005
|
+
"spans": [
|
|
1006
|
+
{
|
|
1007
|
+
"spanId": "span-1",
|
|
1008
|
+
"name": "router",
|
|
1009
|
+
"type": "llm",
|
|
1010
|
+
"provider": "anthropic",
|
|
1011
|
+
"model": "claude-3-haiku",
|
|
1012
|
+
"durationMs": 1200,
|
|
1013
|
+
"status": "success"
|
|
1014
|
+
},
|
|
1015
|
+
{
|
|
1016
|
+
"spanId": "span-2",
|
|
1017
|
+
"name": "task_handler",
|
|
1018
|
+
"type": "tool",
|
|
1019
|
+
"durationMs": 340,
|
|
1020
|
+
"status": "success"
|
|
1021
|
+
}
|
|
1022
|
+
]
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
}
|
|
1026
|
+
```
|
|
1027
|
+
|
|
1028
|
+
### GET /api/v1/theme/default/observability/metrics
|
|
1029
|
+
|
|
1030
|
+
Get aggregate metrics.
|
|
1031
|
+
|
|
1032
|
+
**Query Parameters:**
|
|
1033
|
+
- `period` - Time period: `1h`, `24h`, `7d`, `30d`
|
|
1034
|
+
|
|
1035
|
+
**Response:**
|
|
1036
|
+
```json
|
|
1037
|
+
{
|
|
1038
|
+
"success": true,
|
|
1039
|
+
"data": {
|
|
1040
|
+
"summary": {
|
|
1041
|
+
"totalTraces": 1500,
|
|
1042
|
+
"successRate": 98.5,
|
|
1043
|
+
"avgLatencyMs": 2340,
|
|
1044
|
+
"totalCost": 15.23,
|
|
1045
|
+
"totalLlmCalls": 2800,
|
|
1046
|
+
"totalToolCalls": 1200
|
|
1047
|
+
},
|
|
1048
|
+
"byAgent": [
|
|
1049
|
+
{
|
|
1050
|
+
"agentName": "orchestrator",
|
|
1051
|
+
"count": 1500,
|
|
1052
|
+
"successRate": 98.5,
|
|
1053
|
+
"avgLatencyMs": 2340
|
|
1054
|
+
}
|
|
1055
|
+
],
|
|
1056
|
+
"byStatus": {
|
|
1057
|
+
"success": 1478,
|
|
1058
|
+
"error": 22
|
|
1059
|
+
}
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
```
|
|
1063
|
+
|
|
1064
|
+
> **Full Documentation**: [Observability](../04-advanced/01-observability.md)
|
|
1065
|
+
|
|
1066
|
+
---
|
|
1067
|
+
|
|
1068
|
+
## Token Usage API
|
|
1069
|
+
|
|
1070
|
+
Token usage and cost tracking endpoints.
|
|
1071
|
+
|
|
1072
|
+
### GET /api/v1/theme/default/ai/usage
|
|
1073
|
+
|
|
1074
|
+
Get current user's usage stats.
|
|
1075
|
+
|
|
1076
|
+
**Query Parameters:**
|
|
1077
|
+
- `period` - `today`, `7d`, `30d`, or `all` (default: `30d`)
|
|
1078
|
+
|
|
1079
|
+
**Response:**
|
|
1080
|
+
```json
|
|
1081
|
+
{
|
|
1082
|
+
"success": true,
|
|
1083
|
+
"data": {
|
|
1084
|
+
"totalTokens": 150000,
|
|
1085
|
+
"totalCost": 2.45,
|
|
1086
|
+
"inputTokens": 100000,
|
|
1087
|
+
"outputTokens": 50000,
|
|
1088
|
+
"requestCount": 450,
|
|
1089
|
+
"byModel": {
|
|
1090
|
+
"gpt-4o-mini": { "tokens": 120000, "cost": 0.50 },
|
|
1091
|
+
"claude-3-haiku": { "tokens": 30000, "cost": 1.95 }
|
|
1092
|
+
}
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
```
|
|
1096
|
+
|
|
1097
|
+
### GET /api/v1/theme/default/ai/usage/team
|
|
1098
|
+
|
|
1099
|
+
Get team-wide usage stats. **Requires admin permission.**
|
|
1100
|
+
|
|
1101
|
+
**Query Parameters:**
|
|
1102
|
+
- `period` - `today`, `7d`, `30d`, or `all` (default: `30d`)
|
|
1103
|
+
|
|
1104
|
+
**Response:**
|
|
1105
|
+
```json
|
|
1106
|
+
{
|
|
1107
|
+
"success": true,
|
|
1108
|
+
"data": {
|
|
1109
|
+
"totalTokens": 500000,
|
|
1110
|
+
"totalCost": 8.50,
|
|
1111
|
+
"byUser": {
|
|
1112
|
+
"user-1": { "tokens": 200000, "cost": 3.40 },
|
|
1113
|
+
"user-2": { "tokens": 300000, "cost": 5.10 }
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
}
|
|
1117
|
+
```
|
|
1118
|
+
|
|
1119
|
+
> **Full Documentation**: [Token Tracking](../04-advanced/02-token-tracking.md)
|
|
1120
|
+
|
|
1121
|
+
---
|
|
1122
|
+
|
|
1123
|
+
## Graph Orchestrator
|
|
1124
|
+
|
|
1125
|
+
### invokeOrchestrator
|
|
1126
|
+
|
|
1127
|
+
Invoke the graph-based orchestrator directly.
|
|
1128
|
+
|
|
1129
|
+
```typescript
|
|
1130
|
+
import { invokeOrchestrator } from '@/contents/plugins/langchain/lib/graph/orchestrator'
|
|
1131
|
+
|
|
1132
|
+
const result = await invokeOrchestrator(
|
|
1133
|
+
message: string,
|
|
1134
|
+
sessionId: string,
|
|
1135
|
+
context: { userId: string, teamId: string },
|
|
1136
|
+
history: BaseMessage[],
|
|
1137
|
+
options?: {
|
|
1138
|
+
traceId?: string
|
|
1139
|
+
modelConfig?: {
|
|
1140
|
+
router?: ModelConfig
|
|
1141
|
+
combiner?: ModelConfig
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
)
|
|
1145
|
+
|
|
1146
|
+
interface OrchestratorResult {
|
|
1147
|
+
finalResponse: string
|
|
1148
|
+
intents: Intent[]
|
|
1149
|
+
handlerResults: HandlerResult[]
|
|
1150
|
+
completedHandlers: string[]
|
|
1151
|
+
error?: string
|
|
1152
|
+
}
|
|
1153
|
+
```
|
|
1154
|
+
|
|
1155
|
+
### streamChat
|
|
1156
|
+
|
|
1157
|
+
Stream chat responses with SSE support.
|
|
1158
|
+
|
|
1159
|
+
```typescript
|
|
1160
|
+
import { streamChat, StreamChunk } from '@/contents/plugins/langchain/lib/streaming'
|
|
1161
|
+
|
|
1162
|
+
async function* handleStream(): AsyncGenerator<StreamChunk> {
|
|
1163
|
+
yield* streamChat(
|
|
1164
|
+
agent,
|
|
1165
|
+
message,
|
|
1166
|
+
context,
|
|
1167
|
+
{ modelConfig: { provider: 'openai', model: 'gpt-4o-mini' } },
|
|
1168
|
+
{
|
|
1169
|
+
sessionId: 'session-123',
|
|
1170
|
+
agentName: 'orchestrator',
|
|
1171
|
+
signal: abortController.signal,
|
|
1172
|
+
}
|
|
1173
|
+
)
|
|
1174
|
+
}
|
|
1175
|
+
```
|
|
1176
|
+
|
|
1177
|
+
---
|
|
1178
|
+
|
|
1179
|
+
## Guardrails Service
|
|
1180
|
+
|
|
1181
|
+
Security middleware for input/output processing.
|
|
1182
|
+
|
|
1183
|
+
```typescript
|
|
1184
|
+
import { guardrails } from '@/contents/plugins/langchain/lib/guardrails'
|
|
1185
|
+
|
|
1186
|
+
// Check for prompt injection
|
|
1187
|
+
const injectionCheck = guardrails.checkInjection(input, config.promptInjection)
|
|
1188
|
+
// { safe: boolean, reason?: string, pattern?: string }
|
|
1189
|
+
|
|
1190
|
+
// Mask PII
|
|
1191
|
+
const piiResult = guardrails.maskPII(input, config.piiMasking)
|
|
1192
|
+
// { masked: string, mappings: [...], hasPII: boolean }
|
|
1193
|
+
|
|
1194
|
+
// Filter content
|
|
1195
|
+
const filterResult = guardrails.filterContent(output, config.contentFilter)
|
|
1196
|
+
// { filtered: string, blocked: boolean, reason?: string }
|
|
1197
|
+
|
|
1198
|
+
// Full input pipeline
|
|
1199
|
+
const { processed, warnings } = await guardrails.processInput(input, config)
|
|
1200
|
+
|
|
1201
|
+
// Full output pipeline
|
|
1202
|
+
const { processed, blocked } = await guardrails.processOutput(output, config)
|
|
1203
|
+
```
|
|
1204
|
+
|
|
1205
|
+
> **Full Documentation**: [Guardrails](../04-advanced/04-guardrails.md)
|
|
1206
|
+
|
|
1207
|
+
---
|
|
1208
|
+
|
|
1209
|
+
## Tracer Service
|
|
1210
|
+
|
|
1211
|
+
Observability tracing service.
|
|
1212
|
+
|
|
1213
|
+
```typescript
|
|
1214
|
+
import { tracer } from '@/contents/plugins/langchain/lib/tracer'
|
|
1215
|
+
|
|
1216
|
+
// Start a trace
|
|
1217
|
+
const traceContext = await tracer.startTrace(
|
|
1218
|
+
context: { userId, teamId },
|
|
1219
|
+
agentName: string,
|
|
1220
|
+
input: string,
|
|
1221
|
+
options?: { sessionId?: string, metadata?: object }
|
|
1222
|
+
)
|
|
1223
|
+
|
|
1224
|
+
// Start a span within a trace
|
|
1225
|
+
const spanContext = await tracer.startSpan(
|
|
1226
|
+
context: { userId, teamId },
|
|
1227
|
+
traceId: string,
|
|
1228
|
+
options: {
|
|
1229
|
+
name: string,
|
|
1230
|
+
type: 'llm' | 'tool' | 'chain',
|
|
1231
|
+
provider?: string,
|
|
1232
|
+
model?: string,
|
|
1233
|
+
input?: object,
|
|
1234
|
+
}
|
|
1235
|
+
)
|
|
1236
|
+
|
|
1237
|
+
// End a span
|
|
1238
|
+
await tracer.endSpan(
|
|
1239
|
+
context: { userId, teamId },
|
|
1240
|
+
traceId: string,
|
|
1241
|
+
spanId: string,
|
|
1242
|
+
options: { output?: object, error?: Error, tokens?: object }
|
|
1243
|
+
)
|
|
1244
|
+
|
|
1245
|
+
// End a trace
|
|
1246
|
+
await tracer.endTrace(
|
|
1247
|
+
context: { userId, teamId },
|
|
1248
|
+
traceId: string,
|
|
1249
|
+
options: {
|
|
1250
|
+
output?: string,
|
|
1251
|
+
error?: Error,
|
|
1252
|
+
tokens?: { input: number, output: number, total: number },
|
|
1253
|
+
cost?: number,
|
|
1254
|
+
llmCalls?: number,
|
|
1255
|
+
toolCalls?: number,
|
|
1256
|
+
}
|
|
1257
|
+
)
|
|
1258
|
+
```
|
|
1259
|
+
|
|
1260
|
+
> **Full Documentation**: [Observability](../04-advanced/01-observability.md)
|
|
1261
|
+
|
|
1262
|
+
---
|
|
1263
|
+
|
|
1264
|
+
## Token Tracker Service
|
|
1265
|
+
|
|
1266
|
+
Token usage and cost tracking service.
|
|
1267
|
+
|
|
1268
|
+
```typescript
|
|
1269
|
+
import { tokenTracker } from '@/contents/plugins/langchain/lib/token-tracker'
|
|
1270
|
+
|
|
1271
|
+
// Track usage
|
|
1272
|
+
await tokenTracker.trackUsage({
|
|
1273
|
+
context: { userId, teamId },
|
|
1274
|
+
sessionId: string,
|
|
1275
|
+
provider: string,
|
|
1276
|
+
model: string,
|
|
1277
|
+
usage: { inputTokens: number, outputTokens: number, totalTokens: number },
|
|
1278
|
+
agentName?: string,
|
|
1279
|
+
})
|
|
1280
|
+
|
|
1281
|
+
// Calculate cost
|
|
1282
|
+
const costs = tokenTracker.calculateCost(model, usage)
|
|
1283
|
+
// { inputCost: number, outputCost: number, totalCost: number }
|
|
1284
|
+
|
|
1285
|
+
// Get usage stats
|
|
1286
|
+
const stats = await tokenTracker.getUsage(context, period)
|
|
1287
|
+
// { totalTokens, totalCost, inputTokens, outputTokens, requestCount, byModel }
|
|
1288
|
+
|
|
1289
|
+
// Get daily usage (for charts)
|
|
1290
|
+
const daily = await tokenTracker.getDailyUsage(context, days)
|
|
1291
|
+
// [{ date, tokens, cost, requests }, ...]
|
|
1292
|
+
|
|
1293
|
+
// Get team usage (admin)
|
|
1294
|
+
const teamStats = await tokenTracker.getTeamUsage(teamId, period)
|
|
1295
|
+
```
|
|
1296
|
+
|
|
1297
|
+
> **Full Documentation**: [Token Tracking](../04-advanced/02-token-tracking.md)
|
|
1298
|
+
|
|
1299
|
+
---
|
|
1300
|
+
|
|
1301
|
+
## Environment Variables
|
|
1302
|
+
|
|
1303
|
+
| Variable | Description | Default | Required |
|
|
1304
|
+
|----------|-------------|---------|----------|
|
|
1305
|
+
| `LANGCHAIN_PLUGIN_ENABLED` | Enable/disable plugin | `false` | Yes |
|
|
1306
|
+
| `LANGCHAIN_PLUGIN_DEBUG` | Enable debug console logging | `false` | No |
|
|
1307
|
+
| `LOG_ENABLED` | Enable file logging to logger/ai/ (core variable) | `false` | No |
|
|
1308
|
+
| `LANGCHAIN_OLLAMA_BASE_URL` | Ollama server URL | `http://localhost:11434` | For Ollama |
|
|
1309
|
+
| `LANGCHAIN_OLLAMA_MODEL` | Default Ollama model | `llama3.2:3b` | For Ollama |
|
|
1310
|
+
| `OPENAI_API_KEY` | OpenAI API key | - | For OpenAI |
|
|
1311
|
+
| `LANGCHAIN_OPENAI_MODEL` | Default OpenAI model | `gpt-4o-mini` | No |
|
|
1312
|
+
| `LANGCHAIN_OPENAI_BASE_URL` | Custom OpenAI URL (for LM Studio) | - | No |
|
|
1313
|
+
| `ANTHROPIC_API_KEY` | Anthropic API key | - | For Anthropic |
|
|
1314
|
+
| `LANGCHAIN_ANTHROPIC_MODEL` | Default Anthropic model | `claude-3-5-sonnet-20241022` | No |
|
|
1315
|
+
|
|
1316
|
+
---
|
|
1317
|
+
|
|
1318
|
+
## Constants
|
|
1319
|
+
|
|
1320
|
+
### Conversation Limits
|
|
1321
|
+
|
|
1322
|
+
```typescript
|
|
1323
|
+
import { CONVERSATION_LIMITS } from '@/contents/plugins/langchain/lib/memory-store'
|
|
1324
|
+
|
|
1325
|
+
const CONVERSATION_LIMITS = {
|
|
1326
|
+
MAX_CONVERSATIONS: 50, // Per user per team
|
|
1327
|
+
MAX_MESSAGES_PER_CONVERSATION: 50 // Sliding window (oldest removed)
|
|
1328
|
+
}
|
|
1329
|
+
```
|
|
1330
|
+
|
|
1331
|
+
> **Sliding Window:** When a conversation reaches 50 messages, older messages are automatically removed when new ones are added. The most recent 50 messages are always preserved.
|
|
1332
|
+
|
|
1333
|
+
---
|
|
1334
|
+
|
|
1335
|
+
## Logging
|
|
1336
|
+
|
|
1337
|
+
When `LOG_ENABLED=true`, the plugin logs to files in `logger/ai/` using the core `FileLogger` utility:
|
|
1338
|
+
|
|
1339
|
+
```
|
|
1340
|
+
logger/ai/
|
|
1341
|
+
├── session-abc123-orchestrator.log
|
|
1342
|
+
├── session-abc123-task.log
|
|
1343
|
+
└── session-abc123-customer.log
|
|
1344
|
+
```
|
|
1345
|
+
|
|
1346
|
+
> **Note:** The logging uses `FileLogger` from `@/core/lib/utils/file-logger`, a generic utility that can be used by any theme or plugin.
|
|
1347
|
+
|
|
1348
|
+
**Log format:**
|
|
1349
|
+
```
|
|
1350
|
+
[2024-01-01T12:00:00.000Z] [SESSION_INIT]
|
|
1351
|
+
{
|
|
1352
|
+
"provider": "ollama",
|
|
1353
|
+
"model": "qwen2.5:7b",
|
|
1354
|
+
"temperature": 0.3,
|
|
1355
|
+
"toolsCount": 5
|
|
1356
|
+
}
|
|
1357
|
+
--------------------------------------------------
|
|
1358
|
+
[2024-01-01T12:00:01.000Z] [USER_MESSAGE]
|
|
1359
|
+
{
|
|
1360
|
+
"message": "Show me my tasks"
|
|
1361
|
+
}
|
|
1362
|
+
--------------------------------------------------
|
|
1363
|
+
[2024-01-01T12:00:05.000Z] [AGENT_RESPONSE]
|
|
1364
|
+
{
|
|
1365
|
+
"messages": [...]
|
|
1366
|
+
}
|
|
1367
|
+
--------------------------------------------------
|
|
1368
|
+
```
|
|
1369
|
+
|
|
1370
|
+
---
|
|
1371
|
+
|
|
1372
|
+
## Error Handling
|
|
1373
|
+
|
|
1374
|
+
All async functions may throw errors. Handle them appropriately:
|
|
1375
|
+
|
|
1376
|
+
```typescript
|
|
1377
|
+
try {
|
|
1378
|
+
const agent = await createAgent(config)
|
|
1379
|
+
const response = await agent.chat(message)
|
|
1380
|
+
} catch (error) {
|
|
1381
|
+
if (error.message.includes('Provider')) {
|
|
1382
|
+
// LLM provider issue (API key, rate limit, connection)
|
|
1383
|
+
console.error('Provider error:', error.message)
|
|
1384
|
+
} else if (error.message.includes('database') || error.message.includes('RLS')) {
|
|
1385
|
+
// Database issue (connection, permissions)
|
|
1386
|
+
console.error('Database error:', error.message)
|
|
1387
|
+
} else {
|
|
1388
|
+
// Unknown error
|
|
1389
|
+
console.error('Error:', error)
|
|
1390
|
+
}
|
|
1391
|
+
}
|
|
1392
|
+
```
|
|
1393
|
+
|
|
1394
|
+
---
|
|
1395
|
+
|
|
1396
|
+
## Next Steps
|
|
1397
|
+
|
|
1398
|
+
- [Examples](./03-examples.md) - Real-world implementation examples
|
|
1399
|
+
- [Graph Orchestrator](../03-orchestration/01-graph-orchestrator.md) - Modern orchestration approach
|
|
1400
|
+
- [Observability](../04-advanced/01-observability.md) - Tracing and debugging
|
|
1401
|
+
- [Token Tracking](../04-advanced/02-token-tracking.md) - Usage and cost monitoring
|
|
1402
|
+
- [Streaming](../04-advanced/03-streaming.md) - Real-time responses
|
|
1403
|
+
- [Guardrails](../04-advanced/04-guardrails.md) - Security middleware
|