stoops 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of stoops might be problematic. Click here for more details.

@@ -0,0 +1,30 @@
1
+ import { I as ILLMSession, R as RoomResolver, L as LangGraphSessionOptions, a as ContentPart } from '../types-CzHDzfHA.js';
2
+ import '../index-DlxJ95ki.js';
3
+ import 'zod';
4
+
5
+ /**
6
+ * LangGraph-based LLM session backend for stoops agents.
7
+ *
8
+ * Connects to the stoops MCP server via HTTP URL (same server as Claude backend).
9
+ * Uses a custom StateGraph with inject/agent/tools nodes.
10
+ */
11
+
12
+ declare class LangGraphSession implements ILLMSession {
13
+ private _systemPrompt;
14
+ private _resolver;
15
+ private _model;
16
+ private _options;
17
+ private _threadId;
18
+ private _processing;
19
+ private _mcpServer;
20
+ private _mcpClient;
21
+ private _graph;
22
+ constructor(systemPrompt: string, resolver: RoomResolver, model: string, options?: LangGraphSessionOptions);
23
+ start(): Promise<void>;
24
+ stop(): Promise<void>;
25
+ setApiKey(key: string): void;
26
+ process(parts: ContentPart[]): Promise<void>;
27
+ }
28
+ declare function createLangGraphSession(systemPrompt: string, resolver: RoomResolver, model: string, options: LangGraphSessionOptions): ILLMSession;
29
+
30
+ export { LangGraphSession, createLangGraphSession };
@@ -0,0 +1,250 @@
1
+ import {
2
+ createFullMcpServer
3
+ } from "../chunk-7PKT5MPI.js";
4
+ import {
5
+ contentPartsToString
6
+ } from "../chunk-BLGV3QN4.js";
7
+ import "../chunk-HQS7HBZR.js";
8
+
9
+ // src/langgraph/session.ts
10
+ var TOKEN_PRICING = {
11
+ "claude-sonnet-4-5-20250929": { input: 3, output: 15 },
12
+ "claude-haiku-4-5-20251001": { input: 0.8, output: 4 },
13
+ "claude-opus-4-5-20250918": { input: 15, output: 75 },
14
+ "claude-opus-4-5-20250929": { input: 15, output: 75 },
15
+ "gpt-4o": { input: 2.5, output: 10 },
16
+ "gpt-4o-mini": { input: 0.15, output: 0.6 },
17
+ "o3": { input: 10, output: 40 },
18
+ "o3-mini": { input: 1.1, output: 4.4 },
19
+ "gemini-2.0-flash": { input: 0.1, output: 0.4 },
20
+ "gemini-2.5-pro": { input: 1.25, output: 10 }
21
+ };
22
+ var MODEL_CONTEXT_WINDOWS = {
23
+ "claude-haiku-4-5-20251001": 2e5,
24
+ "claude-sonnet-4-5-20250929": 2e5,
25
+ "claude-opus-4-5-20250918": 2e5,
26
+ "claude-opus-4-5-20250929": 2e5,
27
+ "gpt-4o": 128e3,
28
+ "gpt-4o-mini": 128e3,
29
+ "o3": 2e5,
30
+ "o3-mini": 2e5,
31
+ "gemini-2.0-flash": 1e6,
32
+ "gemini-2.5-pro": 1e6
33
+ };
34
+ function estimateCost(modelName, inputTokens, outputTokens) {
35
+ const bare = modelName.includes(":") ? modelName.split(":").pop() : modelName;
36
+ const pricing = TOKEN_PRICING[bare];
37
+ if (!pricing) return 0;
38
+ return (inputTokens * pricing.input + outputTokens * pricing.output) / 1e6;
39
+ }
40
+ function getContextWindow(modelName) {
41
+ const bare = modelName.includes(":") ? modelName.split(":").pop() : modelName;
42
+ return MODEL_CONTEXT_WINDOWS[bare] ?? 2e5;
43
+ }
44
+ var LangGraphSession = class {
45
+ _systemPrompt;
46
+ _resolver;
47
+ _model;
48
+ _options;
49
+ _threadId;
50
+ _processing = false;
51
+ _mcpServer = null;
52
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
53
+ _mcpClient = null;
54
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
55
+ _graph = null;
56
+ constructor(systemPrompt, resolver, model, options = {}) {
57
+ this._systemPrompt = systemPrompt;
58
+ this._resolver = resolver;
59
+ this._model = model;
60
+ this._options = options;
61
+ this._threadId = crypto.randomUUID();
62
+ }
63
+ async start() {
64
+ try {
65
+ await import("@langchain/langgraph");
66
+ } catch {
67
+ throw new Error(
68
+ "LangGraph backend requires @langchain/langgraph, @langchain/core, @langchain/mcp-adapters, and langchain. Install them with: npm install @langchain/langgraph @langchain/core @langchain/mcp-adapters langchain @langchain/anthropic"
69
+ );
70
+ }
71
+ this._mcpServer = await createFullMcpServer(this._resolver, {
72
+ isEventSeen: this._options.isEventSeen,
73
+ markEventsSeen: this._options.markEventsSeen,
74
+ assignRef: this._options.assignRef,
75
+ resolveRef: this._options.resolveRef
76
+ });
77
+ const mcpUrl = this._mcpServer.url;
78
+ const { StateGraph, MemorySaver, MessagesValue, START, END } = await import("@langchain/langgraph");
79
+ const { StateSchema } = await import("@langchain/langgraph");
80
+ const { initChatModel } = await import("langchain/chat_models/universal");
81
+ const { ToolNode } = await import("@langchain/langgraph/prebuilt");
82
+ const { MultiServerMCPClient } = await import("@langchain/mcp-adapters");
83
+ const mcpConfig = {
84
+ servers: {
85
+ stoops_tools: {
86
+ transport: "streamable_http",
87
+ url: mcpUrl
88
+ }
89
+ }
90
+ };
91
+ this._mcpClient = new MultiServerMCPClient(
92
+ mcpConfig
93
+ );
94
+ const tools = await this._mcpClient.getTools();
95
+ const llm = await initChatModel(this._model, {
96
+ temperature: 0,
97
+ ...this._options.apiKey ? { apiKey: this._options.apiKey } : {}
98
+ });
99
+ const llmWithTools = llm.bindTools(tools);
100
+ const AgentState = new StateSchema({
101
+ messages: MessagesValue
102
+ });
103
+ const options = this._options;
104
+ const injectNode = async (state) => {
105
+ const drained = options.drainEventQueue?.();
106
+ if (!drained || drained.length === 0) return {};
107
+ const { HumanMessage } = await import("@langchain/core/messages");
108
+ const lines = ["While you were responding, this happened:\n"];
109
+ for (const parts of drained) {
110
+ for (const part of parts) {
111
+ if (part.type === "text") lines.push(part.text);
112
+ }
113
+ }
114
+ return { messages: [new HumanMessage({ content: lines.join("\n") })] };
115
+ };
116
+ const agentNode = async (state) => {
117
+ const response = await llmWithTools.invoke(state.messages);
118
+ return { messages: [response] };
119
+ };
120
+ const toolNode = new ToolNode(tools);
121
+ const toolsNodeWrapped = async (state) => {
122
+ const lastMsg = state.messages[state.messages.length - 1];
123
+ const toolCalls = lastMsg?.tool_calls ?? [];
124
+ for (const tc of toolCalls) options.onToolUse?.(tc.name, "started");
125
+ const result = await toolNode.invoke(state);
126
+ for (const tc of toolCalls) options.onToolUse?.(tc.name, "completed");
127
+ return result;
128
+ };
129
+ const shouldContinue = (state) => {
130
+ const lastMsg = state.messages[state.messages.length - 1];
131
+ return lastMsg?.tool_calls && lastMsg.tool_calls.length > 0 ? "tools" : END;
132
+ };
133
+ const checkpointer = new MemorySaver();
134
+ this._graph = new StateGraph(AgentState).addNode("inject", injectNode).addNode("agent", agentNode).addNode("tools", toolsNodeWrapped).addEdge(START, "inject").addEdge("inject", "agent").addConditionalEdges("agent", shouldContinue, ["tools", END]).addEdge("tools", "inject").compile({ checkpointer });
135
+ }
136
+ async stop() {
137
+ try {
138
+ await this._mcpClient?.close?.();
139
+ } catch {
140
+ }
141
+ this._mcpClient = null;
142
+ await this._mcpServer?.stop();
143
+ this._mcpServer = null;
144
+ this._graph = null;
145
+ }
146
+ setApiKey(key) {
147
+ this._options = { ...this._options, apiKey: key };
148
+ }
149
+ async process(parts) {
150
+ if (!this._graph) throw new Error("Session not started");
151
+ if (this._processing) return;
152
+ this._processing = true;
153
+ const inputForTrace = contentPartsToString(parts);
154
+ const startTime = Date.now();
155
+ const turns = [];
156
+ try {
157
+ const { HumanMessage, SystemMessage } = await import("@langchain/core/messages");
158
+ const messageContent = [];
159
+ for (const part of parts) {
160
+ if (part.type === "text") {
161
+ messageContent.push({ type: "text", text: part.text });
162
+ } else {
163
+ messageContent.push({ type: "image_url", image_url: { url: part.url } });
164
+ }
165
+ }
166
+ const state = await this._graph.getState({ configurable: { thread_id: this._threadId } });
167
+ const existingMessageCount = state?.values?.messages?.length ?? 0;
168
+ const isFirstInvocation = existingMessageCount === 0;
169
+ const inputMessages = [];
170
+ if (isFirstInvocation) {
171
+ inputMessages.push(new SystemMessage({ content: this._systemPrompt }));
172
+ }
173
+ inputMessages.push(new HumanMessage({ content: messageContent }));
174
+ const result = await this._graph.invoke(
175
+ { messages: inputMessages },
176
+ { configurable: { thread_id: this._threadId } }
177
+ );
178
+ const allMessages = result.messages ?? [];
179
+ const resultMessages = allMessages.slice(existingMessageCount);
180
+ let totalInputTokens = 0;
181
+ let totalOutputTokens = 0;
182
+ let numTurns = 0;
183
+ for (const msg of resultMessages) {
184
+ const usage = msg?.usage_metadata ?? msg?.response_metadata?.usage;
185
+ if (usage) {
186
+ totalInputTokens += usage.input_tokens ?? usage.prompt_tokens ?? 0;
187
+ totalOutputTokens += usage.output_tokens ?? usage.completion_tokens ?? 0;
188
+ numTurns++;
189
+ }
190
+ if (msg?.tool_calls?.length) {
191
+ for (const tc of msg.tool_calls) {
192
+ turns.push({ type: "tool_use", tool: tc.name, content: tc.args });
193
+ }
194
+ }
195
+ if (msg?.role === "tool" || msg?._getType?.() === "tool") {
196
+ turns.push({ type: "tool_result", tool: msg.name ?? "unknown", content: msg.content });
197
+ }
198
+ }
199
+ const contextWindow = getContextWindow(this._model);
200
+ const autoCompactPct = this._options.autoCompactPct ?? 80;
201
+ const usagePct = Math.round(totalInputTokens / contextWindow * 100);
202
+ if (usagePct >= autoCompactPct) this._options.onContextCompacted?.();
203
+ if (this._options.onQueryComplete) {
204
+ const durationMs = Date.now() - startTime;
205
+ this._options.onQueryComplete({
206
+ totalCostUsd: estimateCost(this._model, totalInputTokens, totalOutputTokens),
207
+ durationMs,
208
+ durationApiMs: durationMs,
209
+ numTurns,
210
+ inputTokens: totalInputTokens,
211
+ outputTokens: totalOutputTokens,
212
+ cacheReadInputTokens: 0,
213
+ cacheCreationInputTokens: 0,
214
+ isError: false,
215
+ contextPct: Math.max(0, Math.min(100, usagePct)),
216
+ input: inputForTrace,
217
+ turns
218
+ });
219
+ }
220
+ } catch (err) {
221
+ if (this._options.onQueryComplete) {
222
+ this._options.onQueryComplete({
223
+ totalCostUsd: 0,
224
+ durationMs: Date.now() - startTime,
225
+ durationApiMs: 0,
226
+ numTurns: 0,
227
+ inputTokens: 0,
228
+ outputTokens: 0,
229
+ cacheReadInputTokens: 0,
230
+ cacheCreationInputTokens: 0,
231
+ isError: true,
232
+ contextPct: 0,
233
+ input: inputForTrace,
234
+ turns
235
+ });
236
+ }
237
+ throw err;
238
+ } finally {
239
+ this._processing = false;
240
+ }
241
+ }
242
+ };
243
+ function createLangGraphSession(systemPrompt, resolver, model, options) {
244
+ return new LangGraphSession(systemPrompt, resolver, model, options);
245
+ }
246
+ export {
247
+ LangGraphSession,
248
+ createLangGraphSession
249
+ };
250
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/langgraph/session.ts"],"sourcesContent":["/**\n * LangGraph-based LLM session backend for stoops agents.\n *\n * Connects to the stoops MCP server via HTTP URL (same server as Claude backend).\n * Uses a custom StateGraph with inject/agent/tools nodes.\n */\n\nimport type { RoomResolver, LangGraphSessionOptions, ILLMSession, ContentPart, QueryTurn } from \"../agent/types.js\";\nimport { contentPartsToString } from \"../agent/prompts.js\";\nimport { createFullMcpServer, type StoopsMcpServer } from \"../agent/mcp/index.js\";\n\n// ── Token pricing table (approximate, USD per 1M tokens) ─────────────────────\n// Last updated: 2026-02. Add new models as they launch.\n// Unknown models return cost 0 — callers should not rely on this for billing.\n\nconst TOKEN_PRICING: Record<string, { input: number; output: number }> = {\n \"claude-sonnet-4-5-20250929\": { input: 3.0, output: 15.0 },\n \"claude-haiku-4-5-20251001\": { input: 0.8, output: 4.0 },\n \"claude-opus-4-5-20250918\": { input: 15.0, output: 75.0 },\n \"claude-opus-4-5-20250929\": { input: 15.0, output: 75.0 },\n \"gpt-4o\": { input: 2.5, output: 10.0 },\n \"gpt-4o-mini\": { input: 0.15, output: 0.6 },\n \"o3\": { input: 10.0, output: 40.0 },\n \"o3-mini\": { input: 1.1, output: 4.4 },\n \"gemini-2.0-flash\": { input: 0.1, output: 0.4 },\n \"gemini-2.5-pro\": { input: 1.25, output: 10.0 },\n};\n\nconst MODEL_CONTEXT_WINDOWS: Record<string, number> = {\n \"claude-haiku-4-5-20251001\": 200_000,\n \"claude-sonnet-4-5-20250929\": 200_000,\n \"claude-opus-4-5-20250918\": 200_000,\n \"claude-opus-4-5-20250929\": 200_000,\n \"gpt-4o\": 128_000,\n \"gpt-4o-mini\": 128_000,\n \"o3\": 200_000,\n \"o3-mini\": 200_000,\n \"gemini-2.0-flash\": 1_000_000,\n \"gemini-2.5-pro\": 1_000_000,\n};\n\nfunction estimateCost(modelName: string, inputTokens: number, outputTokens: number): number {\n const bare = modelName.includes(\":\") ? modelName.split(\":\").pop()! : modelName;\n const pricing = TOKEN_PRICING[bare];\n if (!pricing) return 0;\n return (inputTokens * pricing.input + outputTokens * pricing.output) / 1_000_000;\n}\n\nfunction getContextWindow(modelName: string): number {\n const bare = modelName.includes(\":\") ? modelName.split(\":\").pop()! : modelName;\n return MODEL_CONTEXT_WINDOWS[bare] ?? 200_000;\n}\n\nexport class LangGraphSession implements ILLMSession {\n private _systemPrompt: string;\n private _resolver: RoomResolver;\n private _model: string;\n private _options: LangGraphSessionOptions;\n private _threadId: string;\n private _processing = false;\n private _mcpServer: StoopsMcpServer | null = null;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n private _mcpClient: any = null;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n private _graph: any = null;\n\n constructor(\n systemPrompt: string,\n resolver: RoomResolver,\n model: string,\n options: LangGraphSessionOptions = {},\n ) {\n this._systemPrompt = systemPrompt;\n this._resolver = resolver;\n this._model = model;\n this._options = options;\n this._threadId = crypto.randomUUID();\n }\n\n async start(): Promise<void> {\n // Validate LangChain dependencies are installed (they're optional in package.json)\n try {\n await import(\"@langchain/langgraph\");\n } catch {\n throw new Error(\n \"LangGraph backend requires @langchain/langgraph, @langchain/core, @langchain/mcp-adapters, and langchain. \" +\n \"Install them with: npm install @langchain/langgraph @langchain/core @langchain/mcp-adapters langchain @langchain/anthropic\",\n );\n }\n\n // Start the shared stoops MCP server\n this._mcpServer = await createFullMcpServer(this._resolver, {\n isEventSeen: this._options.isEventSeen,\n markEventsSeen: this._options.markEventsSeen,\n assignRef: this._options.assignRef,\n resolveRef: this._options.resolveRef,\n });\n\n const mcpUrl = this._mcpServer.url;\n\n const { StateGraph, MemorySaver, MessagesValue, START, END } = await import(\"@langchain/langgraph\");\n const { StateSchema } = await import(\"@langchain/langgraph\");\n const { initChatModel } = await import(\"langchain/chat_models/universal\");\n const { ToolNode } = await import(\"@langchain/langgraph/prebuilt\");\n const { MultiServerMCPClient } = await import(\"@langchain/mcp-adapters\");\n\n // Connect to stoops MCP server via HTTP.\n // MultiServerMCPClient's type doesn't accept arbitrary string keys for server\n // names in its config, but the runtime API does. We assert the config shape\n // precisely and then cast to the constructor's parameter type.\n const mcpConfig = {\n servers: {\n stoops_tools: {\n transport: \"streamable_http\" as const,\n url: mcpUrl,\n },\n },\n };\n this._mcpClient = new MultiServerMCPClient(\n mcpConfig as unknown as ConstructorParameters<typeof MultiServerMCPClient>[0],\n );\n\n const tools = await this._mcpClient.getTools();\n\n const llm = await initChatModel(this._model, {\n temperature: 0,\n ...(this._options.apiKey ? { apiKey: this._options.apiKey } : {}),\n });\n const llmWithTools = llm.bindTools(tools);\n\n const AgentState = new StateSchema({\n messages: MessagesValue,\n });\n\n const options = this._options;\n\n // inject node: drains mid-loop event buffer between tool rounds.\n // Events are already pre-formatted by the runtime (timestamps, room labels,\n // participant icons) — we just extract the text from ContentPart[].\n const injectNode = async (state: { messages: unknown[] }) => {\n const drained = options.drainEventQueue?.();\n if (!drained || drained.length === 0) return {};\n const { HumanMessage } = await import(\"@langchain/core/messages\");\n const lines = [\"While you were responding, this happened:\\n\"];\n for (const parts of drained) {\n for (const part of parts) {\n if (part.type === \"text\") lines.push(part.text);\n }\n }\n return { messages: [new HumanMessage({ content: lines.join(\"\\n\") })] };\n };\n\n const agentNode = async (state: { messages: unknown[] }) => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const response = await llmWithTools.invoke(state.messages as any);\n return { messages: [response] };\n };\n\n const toolNode = new ToolNode(tools);\n const toolsNodeWrapped = async (state: { messages: unknown[] }) => {\n const lastMsg = state.messages[state.messages.length - 1] as { tool_calls?: Array<{ name: string }> };\n const toolCalls = lastMsg?.tool_calls ?? [];\n for (const tc of toolCalls) options.onToolUse?.(tc.name, \"started\");\n const result = await toolNode.invoke(state);\n for (const tc of toolCalls) options.onToolUse?.(tc.name, \"completed\");\n return result;\n };\n\n const shouldContinue = (state: { messages: unknown[] }) => {\n const lastMsg = state.messages[state.messages.length - 1] as { tool_calls?: unknown[] };\n return lastMsg?.tool_calls && (lastMsg.tool_calls as unknown[]).length > 0 ? \"tools\" : END;\n };\n\n const checkpointer = new MemorySaver();\n\n this._graph = new StateGraph(AgentState)\n .addNode(\"inject\", injectNode)\n .addNode(\"agent\", agentNode)\n .addNode(\"tools\", toolsNodeWrapped)\n .addEdge(START, \"inject\")\n .addEdge(\"inject\", \"agent\")\n .addConditionalEdges(\"agent\", shouldContinue, [\"tools\", END])\n .addEdge(\"tools\", \"inject\")\n .compile({ checkpointer });\n }\n\n async stop(): Promise<void> {\n try { await this._mcpClient?.close?.(); } catch { /* best effort */ }\n this._mcpClient = null;\n await this._mcpServer?.stop();\n this._mcpServer = null;\n this._graph = null;\n }\n\n setApiKey(key: string): void {\n this._options = { ...this._options, apiKey: key };\n }\n\n async process(parts: ContentPart[]): Promise<void> {\n if (!this._graph) throw new Error(\"Session not started\");\n if (this._processing) return;\n this._processing = true;\n\n const inputForTrace = contentPartsToString(parts);\n const startTime = Date.now();\n const turns: QueryTurn[] = [];\n\n try {\n const { HumanMessage, SystemMessage } = await import(\"@langchain/core/messages\");\n\n const messageContent: Array<{ type: string; text?: string; image_url?: { url: string } }> = [];\n for (const part of parts) {\n if (part.type === \"text\") {\n messageContent.push({ type: \"text\", text: part.text });\n } else {\n messageContent.push({ type: \"image_url\", image_url: { url: part.url } });\n }\n }\n\n const state = await this._graph.getState({ configurable: { thread_id: this._threadId } });\n const existingMessageCount = state?.values?.messages?.length ?? 0;\n const isFirstInvocation = existingMessageCount === 0;\n\n const inputMessages: unknown[] = [];\n if (isFirstInvocation) {\n inputMessages.push(new SystemMessage({ content: this._systemPrompt }));\n }\n inputMessages.push(new HumanMessage({ content: messageContent }));\n\n const result = await this._graph.invoke(\n { messages: inputMessages },\n { configurable: { thread_id: this._threadId } },\n );\n\n const allMessages = result.messages ?? [];\n // Only count new messages from this invocation (skip historical ones)\n const resultMessages = allMessages.slice(existingMessageCount);\n let totalInputTokens = 0;\n let totalOutputTokens = 0;\n let numTurns = 0;\n\n // Token usage field names vary by provider:\n // Anthropic: usage_metadata.{input_tokens, output_tokens}\n // OpenAI: response_metadata.usage.{prompt_tokens, completion_tokens}\n for (const msg of resultMessages) {\n const usage = msg?.usage_metadata ?? msg?.response_metadata?.usage;\n if (usage) {\n totalInputTokens += usage.input_tokens ?? usage.prompt_tokens ?? 0;\n totalOutputTokens += usage.output_tokens ?? usage.completion_tokens ?? 0;\n numTurns++;\n }\n if (msg?.tool_calls?.length) {\n for (const tc of msg.tool_calls) {\n turns.push({ type: \"tool_use\", tool: tc.name, content: tc.args });\n }\n }\n if (msg?.role === \"tool\" || msg?._getType?.() === \"tool\") {\n turns.push({ type: \"tool_result\", tool: msg.name ?? \"unknown\", content: msg.content });\n }\n }\n\n const contextWindow = getContextWindow(this._model);\n const autoCompactPct = this._options.autoCompactPct ?? 80;\n const usagePct = Math.round((totalInputTokens / contextWindow) * 100);\n if (usagePct >= autoCompactPct) this._options.onContextCompacted?.();\n\n if (this._options.onQueryComplete) {\n const durationMs = Date.now() - startTime;\n this._options.onQueryComplete({\n totalCostUsd: estimateCost(this._model, totalInputTokens, totalOutputTokens),\n durationMs,\n durationApiMs: durationMs,\n numTurns,\n inputTokens: totalInputTokens,\n outputTokens: totalOutputTokens,\n cacheReadInputTokens: 0,\n cacheCreationInputTokens: 0,\n isError: false,\n contextPct: Math.max(0, Math.min(100, usagePct)),\n input: inputForTrace,\n turns,\n });\n }\n } catch (err) {\n if (this._options.onQueryComplete) {\n this._options.onQueryComplete({\n totalCostUsd: 0,\n durationMs: Date.now() - startTime,\n durationApiMs: 0,\n numTurns: 0,\n inputTokens: 0,\n outputTokens: 0,\n cacheReadInputTokens: 0,\n cacheCreationInputTokens: 0,\n isError: true,\n contextPct: 0,\n input: inputForTrace,\n turns,\n });\n }\n throw err;\n } finally {\n this._processing = false;\n }\n }\n}\n\nexport function createLangGraphSession(\n systemPrompt: string,\n resolver: RoomResolver,\n model: string,\n options: LangGraphSessionOptions,\n): ILLMSession {\n return new LangGraphSession(systemPrompt, resolver, model, options);\n}\n"],"mappings":";;;;;;;;;AAeA,IAAM,gBAAmE;AAAA,EACvE,8BAA8B,EAAE,OAAO,GAAM,QAAQ,GAAK;AAAA,EAC1D,6BAA8B,EAAE,OAAO,KAAM,QAAQ,EAAI;AAAA,EACzD,4BAA8B,EAAE,OAAO,IAAM,QAAQ,GAAK;AAAA,EAC1D,4BAA8B,EAAE,OAAO,IAAM,QAAQ,GAAK;AAAA,EAC1D,UAA8B,EAAE,OAAO,KAAM,QAAQ,GAAK;AAAA,EAC1D,eAA8B,EAAE,OAAO,MAAM,QAAQ,IAAI;AAAA,EACzD,MAA8B,EAAE,OAAO,IAAM,QAAQ,GAAK;AAAA,EAC1D,WAA8B,EAAE,OAAO,KAAM,QAAQ,IAAI;AAAA,EACzD,oBAA8B,EAAE,OAAO,KAAM,QAAQ,IAAI;AAAA,EACzD,kBAA8B,EAAE,OAAO,MAAM,QAAQ,GAAK;AAC5D;AAEA,IAAM,wBAAgD;AAAA,EACpD,6BAA8B;AAAA,EAC9B,8BAA8B;AAAA,EAC9B,4BAA8B;AAAA,EAC9B,4BAA8B;AAAA,EAC9B,UAAkB;AAAA,EAClB,eAAkB;AAAA,EAClB,MAAkB;AAAA,EAClB,WAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,kBAAoB;AACtB;AAEA,SAAS,aAAa,WAAmB,aAAqB,cAA8B;AAC1F,QAAM,OAAO,UAAU,SAAS,GAAG,IAAI,UAAU,MAAM,GAAG,EAAE,IAAI,IAAK;AACrE,QAAM,UAAU,cAAc,IAAI;AAClC,MAAI,CAAC,QAAS,QAAO;AACrB,UAAQ,cAAc,QAAQ,QAAQ,eAAe,QAAQ,UAAU;AACzE;AAEA,SAAS,iBAAiB,WAA2B;AACnD,QAAM,OAAO,UAAU,SAAS,GAAG,IAAI,UAAU,MAAM,GAAG,EAAE,IAAI,IAAK;AACrE,SAAO,sBAAsB,IAAI,KAAK;AACxC;AAEO,IAAM,mBAAN,MAA8C;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd,aAAqC;AAAA;AAAA,EAErC,aAAkB;AAAA;AAAA,EAElB,SAAc;AAAA,EAEtB,YACE,cACA,UACA,OACA,UAAmC,CAAC,GACpC;AACA,SAAK,gBAAgB;AACrB,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,WAAW;AAChB,SAAK,YAAY,OAAO,WAAW;AAAA,EACrC;AAAA,EAEA,MAAM,QAAuB;AAE3B,QAAI;AACF,YAAM,OAAO,sBAAsB;AAAA,IACrC,QAAQ;AACN,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAGA,SAAK,aAAa,MAAM,oBAAoB,KAAK,WAAW;AAAA,MAC1D,aAAa,KAAK,SAAS;AAAA,MAC3B,gBAAgB,KAAK,SAAS;AAAA,MAC9B,WAAW,KAAK,SAAS;AAAA,MACzB,YAAY,KAAK,SAAS;AAAA,IAC5B,CAAC;AAED,UAAM,SAAS,KAAK,WAAW;AAE/B,UAAM,EAAE,YAAY,aAAa,eAAe,OAAO,IAAI,IAAI,MAAM,OAAO,sBAAsB;AAClG,UAAM,EAAE,YAAY,IAAI,MAAM,OAAO,sBAAsB;AAC3D,UAAM,EAAE,cAAc,IAAI,MAAM,OAAO,iCAAiC;AACxE,UAAM,EAAE,SAAS,IAAI,MAAM,OAAO,+BAA+B;AACjE,UAAM,EAAE,qBAAqB,IAAI,MAAM,OAAO,yBAAyB;AAMvE,UAAM,YAAY;AAAA,MAChB,SAAS;AAAA,QACP,cAAc;AAAA,UACZ,WAAW;AAAA,UACX,KAAK;AAAA,QACP;AAAA,MACF;AAAA,IACF;AACA,SAAK,aAAa,IAAI;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,QAAQ,MAAM,KAAK,WAAW,SAAS;AAE7C,UAAM,MAAM,MAAM,cAAc,KAAK,QAAQ;AAAA,MAC3C,aAAa;AAAA,MACb,GAAI,KAAK,SAAS,SAAS,EAAE,QAAQ,KAAK,SAAS,OAAO,IAAI,CAAC;AAAA,IACjE,CAAC;AACD,UAAM,eAAe,IAAI,UAAU,KAAK;AAExC,UAAM,aAAa,IAAI,YAAY;AAAA,MACjC,UAAU;AAAA,IACZ,CAAC;AAED,UAAM,UAAU,KAAK;AAKrB,UAAM,aAAa,OAAO,UAAmC;AAC3D,YAAM,UAAU,QAAQ,kBAAkB;AAC1C,UAAI,CAAC,WAAW,QAAQ,WAAW,EAAG,QAAO,CAAC;AAC9C,YAAM,EAAE,aAAa,IAAI,MAAM,OAAO,0BAA0B;AAChE,YAAM,QAAQ,CAAC,6CAA6C;AAC5D,iBAAW,SAAS,SAAS;AAC3B,mBAAW,QAAQ,OAAO;AACxB,cAAI,KAAK,SAAS,OAAQ,OAAM,KAAK,KAAK,IAAI;AAAA,QAChD;AAAA,MACF;AACA,aAAO,EAAE,UAAU,CAAC,IAAI,aAAa,EAAE,SAAS,MAAM,KAAK,IAAI,EAAE,CAAC,CAAC,EAAE;AAAA,IACvE;AAEA,UAAM,YAAY,OAAO,UAAmC;AAE1D,YAAM,WAAW,MAAM,aAAa,OAAO,MAAM,QAAe;AAChE,aAAO,EAAE,UAAU,CAAC,QAAQ,EAAE;AAAA,IAChC;AAEA,UAAM,WAAW,IAAI,SAAS,KAAK;AACnC,UAAM,mBAAmB,OAAO,UAAmC;AACjE,YAAM,UAAU,MAAM,SAAS,MAAM,SAAS,SAAS,CAAC;AACxD,YAAM,YAAY,SAAS,cAAc,CAAC;AAC1C,iBAAW,MAAM,UAAW,SAAQ,YAAY,GAAG,MAAM,SAAS;AAClE,YAAM,SAAS,MAAM,SAAS,OAAO,KAAK;AAC1C,iBAAW,MAAM,UAAW,SAAQ,YAAY,GAAG,MAAM,WAAW;AACpE,aAAO;AAAA,IACT;AAEA,UAAM,iBAAiB,CAAC,UAAmC;AACzD,YAAM,UAAU,MAAM,SAAS,MAAM,SAAS,SAAS,CAAC;AACxD,aAAO,SAAS,cAAe,QAAQ,WAAyB,SAAS,IAAI,UAAU;AAAA,IACzF;AAEA,UAAM,eAAe,IAAI,YAAY;AAErC,SAAK,SAAS,IAAI,WAAW,UAAU,EACpC,QAAQ,UAAU,UAAU,EAC5B,QAAQ,SAAS,SAAS,EAC1B,QAAQ,SAAS,gBAAgB,EACjC,QAAQ,OAAO,QAAQ,EACvB,QAAQ,UAAU,OAAO,EACzB,oBAAoB,SAAS,gBAAgB,CAAC,SAAS,GAAG,CAAC,EAC3D,QAAQ,SAAS,QAAQ,EACzB,QAAQ,EAAE,aAAa,CAAC;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAsB;AAC1B,QAAI;AAAE,YAAM,KAAK,YAAY,QAAQ;AAAA,IAAG,QAAQ;AAAA,IAAoB;AACpE,SAAK,aAAa;AAClB,UAAM,KAAK,YAAY,KAAK;AAC5B,SAAK,aAAa;AAClB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,UAAU,KAAmB;AAC3B,SAAK,WAAW,EAAE,GAAG,KAAK,UAAU,QAAQ,IAAI;AAAA,EAClD;AAAA,EAEA,MAAM,QAAQ,OAAqC;AACjD,QAAI,CAAC,KAAK,OAAQ,OAAM,IAAI,MAAM,qBAAqB;AACvD,QAAI,KAAK,YAAa;AACtB,SAAK,cAAc;AAEnB,UAAM,gBAAgB,qBAAqB,KAAK;AAChD,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,QAAqB,CAAC;AAE5B,QAAI;AACF,YAAM,EAAE,cAAc,cAAc,IAAI,MAAM,OAAO,0BAA0B;AAE/E,YAAM,iBAAsF,CAAC;AAC7F,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,SAAS,QAAQ;AACxB,yBAAe,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,QACvD,OAAO;AACL,yBAAe,KAAK,EAAE,MAAM,aAAa,WAAW,EAAE,KAAK,KAAK,IAAI,EAAE,CAAC;AAAA,QACzE;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM,KAAK,OAAO,SAAS,EAAE,cAAc,EAAE,WAAW,KAAK,UAAU,EAAE,CAAC;AACxF,YAAM,uBAAuB,OAAO,QAAQ,UAAU,UAAU;AAChE,YAAM,oBAAoB,yBAAyB;AAEnD,YAAM,gBAA2B,CAAC;AAClC,UAAI,mBAAmB;AACrB,sBAAc,KAAK,IAAI,cAAc,EAAE,SAAS,KAAK,cAAc,CAAC,CAAC;AAAA,MACvE;AACA,oBAAc,KAAK,IAAI,aAAa,EAAE,SAAS,eAAe,CAAC,CAAC;AAEhE,YAAM,SAAS,MAAM,KAAK,OAAO;AAAA,QAC/B,EAAE,UAAU,cAAc;AAAA,QAC1B,EAAE,cAAc,EAAE,WAAW,KAAK,UAAU,EAAE;AAAA,MAChD;AAEA,YAAM,cAAc,OAAO,YAAY,CAAC;AAExC,YAAM,iBAAiB,YAAY,MAAM,oBAAoB;AAC7D,UAAI,mBAAmB;AACvB,UAAI,oBAAoB;AACxB,UAAI,WAAW;AAKf,iBAAW,OAAO,gBAAgB;AAChC,cAAM,QAAQ,KAAK,kBAAkB,KAAK,mBAAmB;AAC7D,YAAI,OAAO;AACT,8BAAoB,MAAM,gBAAgB,MAAM,iBAAiB;AACjE,+BAAqB,MAAM,iBAAiB,MAAM,qBAAqB;AACvE;AAAA,QACF;AACA,YAAI,KAAK,YAAY,QAAQ;AAC3B,qBAAW,MAAM,IAAI,YAAY;AAC/B,kBAAM,KAAK,EAAE,MAAM,YAAY,MAAM,GAAG,MAAM,SAAS,GAAG,KAAK,CAAC;AAAA,UAClE;AAAA,QACF;AACA,YAAI,KAAK,SAAS,UAAU,KAAK,WAAW,MAAM,QAAQ;AACxD,gBAAM,KAAK,EAAE,MAAM,eAAe,MAAM,IAAI,QAAQ,WAAW,SAAS,IAAI,QAAQ,CAAC;AAAA,QACvF;AAAA,MACF;AAEA,YAAM,gBAAgB,iBAAiB,KAAK,MAAM;AAClD,YAAM,iBAAiB,KAAK,SAAS,kBAAkB;AACvD,YAAM,WAAW,KAAK,MAAO,mBAAmB,gBAAiB,GAAG;AACpE,UAAI,YAAY,eAAgB,MAAK,SAAS,qBAAqB;AAEnE,UAAI,KAAK,SAAS,iBAAiB;AACjC,cAAM,aAAa,KAAK,IAAI,IAAI;AAChC,aAAK,SAAS,gBAAgB;AAAA,UAC5B,cAAc,aAAa,KAAK,QAAQ,kBAAkB,iBAAiB;AAAA,UAC3E;AAAA,UACA,eAAe;AAAA,UACf;AAAA,UACA,aAAa;AAAA,UACb,cAAc;AAAA,UACd,sBAAsB;AAAA,UACtB,0BAA0B;AAAA,UAC1B,SAAS;AAAA,UACT,YAAY,KAAK,IAAI,GAAG,KAAK,IAAI,KAAK,QAAQ,CAAC;AAAA,UAC/C,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,KAAK,SAAS,iBAAiB;AACjC,aAAK,SAAS,gBAAgB;AAAA,UAC5B,cAAc;AAAA,UACd,YAAY,KAAK,IAAI,IAAI;AAAA,UACzB,eAAe;AAAA,UACf,UAAU;AAAA,UACV,aAAa;AAAA,UACb,cAAc;AAAA,UACd,sBAAsB;AAAA,UACtB,0BAA0B;AAAA,UAC1B,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAAA,MACH;AACA,YAAM;AAAA,IACR,UAAE;AACA,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AACF;AAEO,SAAS,uBACd,cACA,UACA,OACA,SACa;AACb,SAAO,IAAI,iBAAiB,cAAc,UAAU,OAAO,OAAO;AACpE;","names":[]}