@inkeep/agents-run-api 0.0.0-dev-20250910232631
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +49 -0
- package/README.md +117 -0
- package/dist/__tests__/setup.d.ts +4 -0
- package/dist/__tests__/setup.d.ts.map +1 -0
- package/dist/__tests__/setup.js +80 -0
- package/dist/__tests__/utils/testProject.d.ts +18 -0
- package/dist/__tests__/utils/testProject.d.ts.map +1 -0
- package/dist/__tests__/utils/testProject.js +26 -0
- package/dist/__tests__/utils/testRequest.d.ts +8 -0
- package/dist/__tests__/utils/testRequest.d.ts.map +1 -0
- package/dist/__tests__/utils/testRequest.js +32 -0
- package/dist/__tests__/utils/testTenant.d.ts +64 -0
- package/dist/__tests__/utils/testTenant.d.ts.map +1 -0
- package/dist/__tests__/utils/testTenant.js +71 -0
- package/dist/a2a/client.d.ts +182 -0
- package/dist/a2a/client.d.ts.map +1 -0
- package/dist/a2a/client.js +645 -0
- package/dist/a2a/handlers.d.ts +4 -0
- package/dist/a2a/handlers.d.ts.map +1 -0
- package/dist/a2a/handlers.js +656 -0
- package/dist/a2a/transfer.d.ts +18 -0
- package/dist/a2a/transfer.d.ts.map +1 -0
- package/dist/a2a/transfer.js +22 -0
- package/dist/a2a/types.d.ts +63 -0
- package/dist/a2a/types.d.ts.map +1 -0
- package/dist/a2a/types.js +1 -0
- package/dist/agents/Agent.d.ts +151 -0
- package/dist/agents/Agent.d.ts.map +1 -0
- package/dist/agents/Agent.js +1164 -0
- package/dist/agents/ModelFactory.d.ts +62 -0
- package/dist/agents/ModelFactory.d.ts.map +1 -0
- package/dist/agents/ModelFactory.js +208 -0
- package/dist/agents/SystemPromptBuilder.d.ts +14 -0
- package/dist/agents/SystemPromptBuilder.d.ts.map +1 -0
- package/dist/agents/SystemPromptBuilder.js +62 -0
- package/dist/agents/ToolSessionManager.d.ts +53 -0
- package/dist/agents/ToolSessionManager.d.ts.map +1 -0
- package/dist/agents/ToolSessionManager.js +106 -0
- package/dist/agents/artifactTools.d.ts +30 -0
- package/dist/agents/artifactTools.d.ts.map +1 -0
- package/dist/agents/artifactTools.js +463 -0
- package/dist/agents/generateTaskHandler.d.ts +41 -0
- package/dist/agents/generateTaskHandler.d.ts.map +1 -0
- package/dist/agents/generateTaskHandler.js +350 -0
- package/dist/agents/relationTools.d.ts +35 -0
- package/dist/agents/relationTools.d.ts.map +1 -0
- package/dist/agents/relationTools.js +246 -0
- package/dist/agents/types.d.ts +23 -0
- package/dist/agents/types.d.ts.map +1 -0
- package/dist/agents/types.js +1 -0
- package/dist/agents/versions/V1Config.d.ts +21 -0
- package/dist/agents/versions/V1Config.d.ts.map +1 -0
- package/dist/agents/versions/V1Config.js +285 -0
- package/dist/app.d.ts +5 -0
- package/dist/app.d.ts.map +1 -0
- package/dist/app.js +219 -0
- package/dist/data/agentGraph.d.ts +4 -0
- package/dist/data/agentGraph.d.ts.map +1 -0
- package/dist/data/agentGraph.js +73 -0
- package/dist/data/agents.d.ts +4 -0
- package/dist/data/agents.d.ts.map +1 -0
- package/dist/data/agents.js +78 -0
- package/dist/data/conversations.d.ts +59 -0
- package/dist/data/conversations.d.ts.map +1 -0
- package/dist/data/conversations.js +216 -0
- package/dist/data/db/clean.d.ts +6 -0
- package/dist/data/db/clean.d.ts.map +1 -0
- package/dist/data/db/clean.js +77 -0
- package/dist/data/db/dbClient.d.ts +3 -0
- package/dist/data/db/dbClient.d.ts.map +1 -0
- package/dist/data/db/dbClient.js +13 -0
- package/dist/env.d.ts +45 -0
- package/dist/env.d.ts.map +1 -0
- package/dist/env.js +64 -0
- package/dist/handlers/executionHandler.d.ts +36 -0
- package/dist/handlers/executionHandler.d.ts.map +1 -0
- package/dist/handlers/executionHandler.js +415 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +28 -0
- package/dist/instrumentation.d.ts +13 -0
- package/dist/instrumentation.d.ts.map +1 -0
- package/dist/instrumentation.js +66 -0
- package/dist/logger.d.ts +4 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +32 -0
- package/dist/middleware/api-key-auth.d.ts +22 -0
- package/dist/middleware/api-key-auth.d.ts.map +1 -0
- package/dist/middleware/api-key-auth.js +139 -0
- package/dist/middleware/index.d.ts +2 -0
- package/dist/middleware/index.d.ts.map +1 -0
- package/dist/middleware/index.js +1 -0
- package/dist/openapi.d.ts +2 -0
- package/dist/openapi.d.ts.map +1 -0
- package/dist/openapi.js +36 -0
- package/dist/routes/agents.d.ts +10 -0
- package/dist/routes/agents.d.ts.map +1 -0
- package/dist/routes/agents.js +158 -0
- package/dist/routes/chat.d.ts +10 -0
- package/dist/routes/chat.d.ts.map +1 -0
- package/dist/routes/chat.js +307 -0
- package/dist/routes/chatDataStream.d.ts +10 -0
- package/dist/routes/chatDataStream.d.ts.map +1 -0
- package/dist/routes/chatDataStream.js +185 -0
- package/dist/routes/mcp.d.ts +10 -0
- package/dist/routes/mcp.d.ts.map +1 -0
- package/dist/routes/mcp.js +500 -0
- package/dist/tracer.d.ts +24 -0
- package/dist/tracer.d.ts.map +1 -0
- package/dist/tracer.js +107 -0
- package/dist/types/chat.d.ts +25 -0
- package/dist/types/chat.d.ts.map +1 -0
- package/dist/types/chat.js +1 -0
- package/dist/types/execution-context.d.ts +14 -0
- package/dist/types/execution-context.d.ts.map +1 -0
- package/dist/types/execution-context.js +14 -0
- package/dist/utils/agent-operations.d.ts +93 -0
- package/dist/utils/agent-operations.d.ts.map +1 -0
- package/dist/utils/agent-operations.js +78 -0
- package/dist/utils/artifact-component-schema.d.ts +29 -0
- package/dist/utils/artifact-component-schema.d.ts.map +1 -0
- package/dist/utils/artifact-component-schema.js +119 -0
- package/dist/utils/artifact-parser.d.ts +71 -0
- package/dist/utils/artifact-parser.d.ts.map +1 -0
- package/dist/utils/artifact-parser.js +253 -0
- package/dist/utils/cleanup.d.ts +19 -0
- package/dist/utils/cleanup.d.ts.map +1 -0
- package/dist/utils/cleanup.js +66 -0
- package/dist/utils/data-component-schema.d.ts +6 -0
- package/dist/utils/data-component-schema.d.ts.map +1 -0
- package/dist/utils/data-component-schema.js +43 -0
- package/dist/utils/graph-session.d.ts +230 -0
- package/dist/utils/graph-session.d.ts.map +1 -0
- package/dist/utils/graph-session.js +1199 -0
- package/dist/utils/incremental-stream-parser.d.ts +62 -0
- package/dist/utils/incremental-stream-parser.d.ts.map +1 -0
- package/dist/utils/incremental-stream-parser.js +330 -0
- package/dist/utils/response-formatter.d.ts +26 -0
- package/dist/utils/response-formatter.d.ts.map +1 -0
- package/dist/utils/response-formatter.js +158 -0
- package/dist/utils/stream-helpers.d.ts +186 -0
- package/dist/utils/stream-helpers.d.ts.map +1 -0
- package/dist/utils/stream-helpers.js +603 -0
- package/dist/utils/stream-registry.d.ts +18 -0
- package/dist/utils/stream-registry.d.ts.map +1 -0
- package/dist/utils/stream-registry.js +33 -0
- package/package.json +95 -0
- package/templates/v1/artifact.xml +7 -0
- package/templates/v1/data-component.xml +9 -0
- package/templates/v1/system-prompt.xml +52 -0
- package/templates/v1/thinking-preparation.xml +34 -0
- package/templates/v1/tool.xml +12 -0
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
import { createMessage, createTask, getActiveAgentForConversation, getFullGraph, getTask, updateTask, } from '@inkeep/agents-core';
|
|
2
|
+
import { trace } from '@opentelemetry/api';
|
|
3
|
+
import { A2AClient } from '../a2a/client.js';
|
|
4
|
+
import { executeTransfer, isTransferResponse } from '../a2a/transfer.js';
|
|
5
|
+
import { getLogger } from '../logger.js';
|
|
6
|
+
import { agentInitializingOp, completionOp, errorOp } from '../utils/agent-operations.js';
|
|
7
|
+
import { graphSessionManager } from '../utils/graph-session.js';
|
|
8
|
+
import { MCPStreamHelper } from '../utils/stream-helpers.js';
|
|
9
|
+
import { registerStreamHelper, unregisterStreamHelper } from '../utils/stream-registry.js';
|
|
10
|
+
import dbClient from '../data/db/dbClient.js';
|
|
11
|
+
import { nanoid } from 'nanoid';
|
|
12
|
+
const logger = getLogger('ExecutionHandler');
|
|
13
|
+
export class ExecutionHandler {
|
|
14
|
+
// Hardcoded error limit - separate from configurable stopWhen
|
|
15
|
+
MAX_ERRORS = 3;
|
|
16
|
+
/**
|
|
17
|
+
* performs exeuction loop
|
|
18
|
+
*
|
|
19
|
+
* Do up to limit of MAX_ITERATIONS
|
|
20
|
+
*
|
|
21
|
+
* 1. lookup active agent for thread
|
|
22
|
+
* 2. Send A2A message to selected agent
|
|
23
|
+
* 3. Parse A2A message response
|
|
24
|
+
* 4. Handle transfer messages (if any)
|
|
25
|
+
* 5. Handle completion messages (if any)
|
|
26
|
+
* 6. If no valid response or transfer, return error
|
|
27
|
+
* @param params
|
|
28
|
+
* @returns
|
|
29
|
+
*/
|
|
30
|
+
async execute(params) {
|
|
31
|
+
const { executionContext, conversationId, userMessage, initialAgentId, requestId, sseHelper } = params;
|
|
32
|
+
const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
|
|
33
|
+
// Register streamHelper so agents can access it via requestId
|
|
34
|
+
registerStreamHelper(requestId, sseHelper);
|
|
35
|
+
// Create GraphSession for this entire message execution using requestId as the session ID
|
|
36
|
+
graphSessionManager.createSession(requestId, graphId, tenantId, projectId);
|
|
37
|
+
logger.info({ sessionId: requestId, graphId }, 'Created GraphSession for message execution');
|
|
38
|
+
// Initialize status updates if configured
|
|
39
|
+
let graphConfig = null;
|
|
40
|
+
try {
|
|
41
|
+
graphConfig = await getFullGraph(dbClient)({ scopes: { tenantId, projectId }, graphId });
|
|
42
|
+
if (graphConfig?.statusUpdates && graphConfig.statusUpdates.enabled !== false) {
|
|
43
|
+
graphSessionManager.initializeStatusUpdates(requestId, graphConfig.statusUpdates, graphConfig.models?.summarizer);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
logger.error({
|
|
48
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
49
|
+
stack: error instanceof Error ? error.stack : undefined,
|
|
50
|
+
}, '❌ Failed to initialize status updates, continuing without them');
|
|
51
|
+
}
|
|
52
|
+
let currentAgentId = initialAgentId;
|
|
53
|
+
let iterations = 0;
|
|
54
|
+
let errorCount = 0;
|
|
55
|
+
let task = null;
|
|
56
|
+
let fromAgentId; // Track the agent that executed a transfer
|
|
57
|
+
try {
|
|
58
|
+
// Send agent initializing and ready operations immediately to ensure UI rendering
|
|
59
|
+
await sseHelper.writeOperation(agentInitializingOp(requestId, graphId));
|
|
60
|
+
// Use atomic upsert pattern to handle race conditions properly
|
|
61
|
+
const taskId = `task_${conversationId}-${requestId}`;
|
|
62
|
+
logger.info({ taskId, currentAgentId, conversationId, requestId }, 'Attempting to create or reuse existing task');
|
|
63
|
+
try {
|
|
64
|
+
// Try to create the task atomically
|
|
65
|
+
task = await createTask(dbClient)({
|
|
66
|
+
id: taskId,
|
|
67
|
+
tenantId,
|
|
68
|
+
projectId,
|
|
69
|
+
agentId: currentAgentId,
|
|
70
|
+
contextId: conversationId,
|
|
71
|
+
status: 'pending',
|
|
72
|
+
metadata: {
|
|
73
|
+
conversation_id: conversationId,
|
|
74
|
+
message_id: requestId,
|
|
75
|
+
stream_request_id: requestId, // This also serves as the GraphSession ID
|
|
76
|
+
created_at: new Date().toISOString(),
|
|
77
|
+
updated_at: new Date().toISOString(),
|
|
78
|
+
root_agent_id: initialAgentId,
|
|
79
|
+
agent_id: currentAgentId,
|
|
80
|
+
},
|
|
81
|
+
});
|
|
82
|
+
logger.info({
|
|
83
|
+
taskId,
|
|
84
|
+
createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata,
|
|
85
|
+
}, 'Task created with metadata');
|
|
86
|
+
}
|
|
87
|
+
catch (error) {
|
|
88
|
+
// Handle race condition: if task already exists due to concurrent request,
|
|
89
|
+
// fetch and reuse the existing task instead of failing
|
|
90
|
+
if (error?.message?.includes('UNIQUE constraint failed') ||
|
|
91
|
+
error?.message?.includes('PRIMARY KEY constraint failed') ||
|
|
92
|
+
error?.code === 'SQLITE_CONSTRAINT_PRIMARYKEY') {
|
|
93
|
+
logger.info({ taskId, error: error.message }, 'Task already exists, fetching existing task');
|
|
94
|
+
const existingTask = await getTask(dbClient)({ id: taskId });
|
|
95
|
+
if (existingTask) {
|
|
96
|
+
task = existingTask;
|
|
97
|
+
logger.info({ taskId, existingTask }, 'Successfully reused existing task from race condition');
|
|
98
|
+
}
|
|
99
|
+
else {
|
|
100
|
+
// This should not happen, but handle gracefully
|
|
101
|
+
logger.error({ taskId, error }, 'Task constraint failed but task not found');
|
|
102
|
+
throw error;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
else {
|
|
106
|
+
// Re-throw non-constraint errors
|
|
107
|
+
logger.error({ taskId, error }, 'Failed to create task due to non-constraint error');
|
|
108
|
+
throw error;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// Debug logging for execution handler (structured logging only)
|
|
112
|
+
logger.debug({
|
|
113
|
+
timestamp: new Date().toISOString(),
|
|
114
|
+
executionType: 'create_initial_task',
|
|
115
|
+
conversationId,
|
|
116
|
+
requestId,
|
|
117
|
+
currentAgentId,
|
|
118
|
+
taskId: Array.isArray(task) ? task[0]?.id : task?.id,
|
|
119
|
+
userMessage: userMessage.substring(0, 100), // Truncate for security
|
|
120
|
+
}, 'ExecutionHandler: Initial task created');
|
|
121
|
+
// If createTask returns an array, get the first element
|
|
122
|
+
if (Array.isArray(task))
|
|
123
|
+
task = task[0];
|
|
124
|
+
let currentMessage = userMessage;
|
|
125
|
+
// Get transfer limit from graph configuration
|
|
126
|
+
const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
|
|
127
|
+
// Start execution loop
|
|
128
|
+
while (iterations < maxTransfers) {
|
|
129
|
+
iterations++;
|
|
130
|
+
// Stream iteration start
|
|
131
|
+
// Iteration start (data operations removed)
|
|
132
|
+
logger.info({ iterations, currentAgentId, graphId, conversationId, fromAgentId }, `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || 'none'}`);
|
|
133
|
+
// Step 1: Determine which agent should handle the message
|
|
134
|
+
const activeAgent = await getActiveAgentForConversation(dbClient)({
|
|
135
|
+
scopes: { tenantId, projectId },
|
|
136
|
+
conversationId,
|
|
137
|
+
});
|
|
138
|
+
logger.info({ activeAgent }, 'activeAgent');
|
|
139
|
+
if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
|
|
140
|
+
currentAgentId = activeAgent.activeAgentId;
|
|
141
|
+
logger.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
|
|
142
|
+
// Stream agent selection update
|
|
143
|
+
// Agent selection (data operations removed)
|
|
144
|
+
}
|
|
145
|
+
// Step 2: Send A2A message to selected agent
|
|
146
|
+
const agentBaseUrl = `${baseUrl}/agents`;
|
|
147
|
+
const a2aClient = new A2AClient(agentBaseUrl, {
|
|
148
|
+
headers: {
|
|
149
|
+
Authorization: `Bearer ${apiKey}`,
|
|
150
|
+
'x-inkeep-tenant-id': tenantId,
|
|
151
|
+
'x-inkeep-project-id': projectId,
|
|
152
|
+
'x-inkeep-graph-id': graphId,
|
|
153
|
+
'x-inkeep-agent-id': currentAgentId,
|
|
154
|
+
},
|
|
155
|
+
});
|
|
156
|
+
// Check if agent supports streaming
|
|
157
|
+
// const agentCard = await a2aClient.getAgentCard();
|
|
158
|
+
let messageResponse = null;
|
|
159
|
+
// Build message metadata - include fromAgentId only if this is a transfer
|
|
160
|
+
const messageMetadata = {
|
|
161
|
+
stream_request_id: requestId, // This also serves as the GraphSession ID
|
|
162
|
+
};
|
|
163
|
+
if (fromAgentId) {
|
|
164
|
+
messageMetadata.fromAgentId = fromAgentId;
|
|
165
|
+
}
|
|
166
|
+
messageResponse = await a2aClient.sendMessage({
|
|
167
|
+
message: {
|
|
168
|
+
role: 'user',
|
|
169
|
+
parts: [
|
|
170
|
+
{
|
|
171
|
+
kind: 'text',
|
|
172
|
+
text: currentMessage,
|
|
173
|
+
},
|
|
174
|
+
],
|
|
175
|
+
messageId: `${requestId}-iter-${iterations}`,
|
|
176
|
+
kind: 'message',
|
|
177
|
+
contextId: conversationId,
|
|
178
|
+
metadata: messageMetadata,
|
|
179
|
+
},
|
|
180
|
+
configuration: {
|
|
181
|
+
acceptedOutputModes: ['text', 'text/plain'],
|
|
182
|
+
blocking: false,
|
|
183
|
+
},
|
|
184
|
+
});
|
|
185
|
+
// Step 3: Parse A2A message response
|
|
186
|
+
if (!messageResponse?.result) {
|
|
187
|
+
errorCount++;
|
|
188
|
+
logger.error({ currentAgentId, iterations, errorCount }, `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`);
|
|
189
|
+
// Check if we've hit the error limit
|
|
190
|
+
if (errorCount >= this.MAX_ERRORS) {
|
|
191
|
+
const errorMessage = `Maximum error limit (${this.MAX_ERRORS}) reached`;
|
|
192
|
+
logger.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage);
|
|
193
|
+
await sseHelper.writeError(errorMessage);
|
|
194
|
+
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || 'system'));
|
|
195
|
+
if (task) {
|
|
196
|
+
await updateTask(dbClient)({
|
|
197
|
+
taskId: task.id,
|
|
198
|
+
data: {
|
|
199
|
+
status: 'failed',
|
|
200
|
+
metadata: {
|
|
201
|
+
...task.metadata,
|
|
202
|
+
failed_at: new Date().toISOString(),
|
|
203
|
+
error: errorMessage,
|
|
204
|
+
},
|
|
205
|
+
},
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
graphSessionManager.endSession(requestId);
|
|
209
|
+
unregisterStreamHelper(requestId);
|
|
210
|
+
return { success: false, error: errorMessage, iterations };
|
|
211
|
+
}
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
// Step 4: Handle transfer messages
|
|
215
|
+
if (isTransferResponse(messageResponse.result)) {
|
|
216
|
+
const transferResponse = messageResponse.result;
|
|
217
|
+
// Extract targetAgentId from transfer response artifacts
|
|
218
|
+
const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data
|
|
219
|
+
?.targetAgentId;
|
|
220
|
+
const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
|
|
221
|
+
// Transfer operation (data operations removed)
|
|
222
|
+
logger.info({ targetAgentId, transferReason }, 'transfer response');
|
|
223
|
+
// Update the current message to the transfer reason so as not to duplicate the user message on every transfer
|
|
224
|
+
// including the xml because the fromAgent does not always directly adress the toAgent in its text
|
|
225
|
+
currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
|
|
226
|
+
const { success, targetAgentId: newAgentId } = await executeTransfer({
|
|
227
|
+
projectId,
|
|
228
|
+
tenantId,
|
|
229
|
+
threadId: conversationId,
|
|
230
|
+
targetAgentId,
|
|
231
|
+
});
|
|
232
|
+
if (success) {
|
|
233
|
+
// Set fromAgentId to track which agent executed this transfer
|
|
234
|
+
fromAgentId = currentAgentId;
|
|
235
|
+
currentAgentId = newAgentId;
|
|
236
|
+
logger.info({
|
|
237
|
+
transferFrom: fromAgentId,
|
|
238
|
+
transferTo: currentAgentId,
|
|
239
|
+
reason: transferReason,
|
|
240
|
+
}, 'Transfer executed, tracking fromAgentId for next iteration');
|
|
241
|
+
}
|
|
242
|
+
// Continue to next iteration with new agent
|
|
243
|
+
continue;
|
|
244
|
+
}
|
|
245
|
+
const responseParts = messageResponse.result.artifacts?.flatMap((artifact) => artifact.parts || []) || [];
|
|
246
|
+
if (responseParts && responseParts.length > 0) {
|
|
247
|
+
// Log graph session data after completion response
|
|
248
|
+
const graphSessionData = graphSessionManager.getSession(requestId);
|
|
249
|
+
if (graphSessionData) {
|
|
250
|
+
const sessionSummary = graphSessionData.getSummary();
|
|
251
|
+
logger.info(sessionSummary, 'GraphSession data after completion');
|
|
252
|
+
}
|
|
253
|
+
// Process response parts for database storage and A2A protocol
|
|
254
|
+
// NOTE: Do NOT stream content here - agents handle their own streaming
|
|
255
|
+
let textContent = '';
|
|
256
|
+
for (const part of responseParts) {
|
|
257
|
+
const isTextPart = (part.kind === 'text' || part.type === 'text') && part.text;
|
|
258
|
+
if (isTextPart) {
|
|
259
|
+
textContent += part.text;
|
|
260
|
+
}
|
|
261
|
+
// Data parts are already processed by the agent's streaming logic
|
|
262
|
+
}
|
|
263
|
+
// Stream completion operation
|
|
264
|
+
// Completion operation (data operations removed)
|
|
265
|
+
const activeSpan = trace.getActiveSpan();
|
|
266
|
+
if (activeSpan) {
|
|
267
|
+
activeSpan.setAttributes({
|
|
268
|
+
'ai.response.content': textContent || 'No response content',
|
|
269
|
+
'ai.response.timestamp': new Date().toISOString(),
|
|
270
|
+
'ai.agent.name': currentAgentId,
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
// Store the agent response in the database with both text and parts
|
|
274
|
+
await createMessage(dbClient)({
|
|
275
|
+
id: nanoid(),
|
|
276
|
+
tenantId,
|
|
277
|
+
projectId,
|
|
278
|
+
conversationId,
|
|
279
|
+
role: 'agent',
|
|
280
|
+
content: {
|
|
281
|
+
text: textContent || undefined,
|
|
282
|
+
parts: responseParts.map((part) => ({
|
|
283
|
+
type: part.kind === 'text' ? 'text' : 'data',
|
|
284
|
+
text: part.kind === 'text' ? part.text : undefined,
|
|
285
|
+
data: part.kind === 'data' ? JSON.stringify(part.data) : undefined,
|
|
286
|
+
})),
|
|
287
|
+
},
|
|
288
|
+
visibility: 'user-facing',
|
|
289
|
+
messageType: 'chat',
|
|
290
|
+
agentId: currentAgentId,
|
|
291
|
+
fromAgentId: currentAgentId,
|
|
292
|
+
taskId: task.id,
|
|
293
|
+
});
|
|
294
|
+
// Mark task as completed
|
|
295
|
+
const updateTaskStart = Date.now();
|
|
296
|
+
await updateTask(dbClient)({
|
|
297
|
+
taskId: task.id,
|
|
298
|
+
data: {
|
|
299
|
+
status: 'completed',
|
|
300
|
+
metadata: {
|
|
301
|
+
...task.metadata,
|
|
302
|
+
completed_at: new Date().toISOString(),
|
|
303
|
+
response: {
|
|
304
|
+
text: textContent,
|
|
305
|
+
parts: responseParts,
|
|
306
|
+
hasText: !!textContent,
|
|
307
|
+
hasData: responseParts.some((p) => p.kind === 'data'),
|
|
308
|
+
},
|
|
309
|
+
},
|
|
310
|
+
},
|
|
311
|
+
});
|
|
312
|
+
const updateTaskEnd = Date.now();
|
|
313
|
+
logger.info({ duration: updateTaskEnd - updateTaskStart }, 'Completed updateTask operation');
|
|
314
|
+
// Send completion data operation before ending session
|
|
315
|
+
await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
|
|
316
|
+
// Complete the stream to flush any queued operations
|
|
317
|
+
await sseHelper.complete();
|
|
318
|
+
// End the GraphSession and clean up resources
|
|
319
|
+
logger.info('Ending GraphSession and cleaning up');
|
|
320
|
+
graphSessionManager.endSession(requestId);
|
|
321
|
+
// Clean up streamHelper
|
|
322
|
+
logger.info('Cleaning up streamHelper');
|
|
323
|
+
unregisterStreamHelper(requestId);
|
|
324
|
+
// Extract captured response if using MCPStreamHelper
|
|
325
|
+
let response;
|
|
326
|
+
if (sseHelper instanceof MCPStreamHelper) {
|
|
327
|
+
const captured = sseHelper.getCapturedResponse();
|
|
328
|
+
response = captured.text || 'No response content';
|
|
329
|
+
}
|
|
330
|
+
logger.info('ExecutionHandler returning success');
|
|
331
|
+
return { success: true, iterations, response };
|
|
332
|
+
}
|
|
333
|
+
// If we get here, we didn't get a valid response or transfer
|
|
334
|
+
errorCount++;
|
|
335
|
+
logger.warn({ iterations, errorCount }, `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`);
|
|
336
|
+
// Check if we've hit the error limit
|
|
337
|
+
if (errorCount >= this.MAX_ERRORS) {
|
|
338
|
+
const errorMessage = `Maximum error limit (${this.MAX_ERRORS}) reached`;
|
|
339
|
+
logger.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage);
|
|
340
|
+
await sseHelper.writeError(errorMessage);
|
|
341
|
+
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || 'system'));
|
|
342
|
+
if (task) {
|
|
343
|
+
await updateTask(dbClient)({
|
|
344
|
+
taskId: task.id,
|
|
345
|
+
data: {
|
|
346
|
+
status: 'failed',
|
|
347
|
+
metadata: {
|
|
348
|
+
...task.metadata,
|
|
349
|
+
failed_at: new Date().toISOString(),
|
|
350
|
+
error: errorMessage,
|
|
351
|
+
},
|
|
352
|
+
},
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
graphSessionManager.endSession(requestId);
|
|
356
|
+
unregisterStreamHelper(requestId);
|
|
357
|
+
return { success: false, error: errorMessage, iterations };
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
// Max transfers reached
|
|
361
|
+
const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
|
|
362
|
+
logger.error({ maxTransfers, iterations }, errorMessage);
|
|
363
|
+
// Stream error operation
|
|
364
|
+
// Error operation (data operations removed)
|
|
365
|
+
await sseHelper.writeError(errorMessage);
|
|
366
|
+
// Send error operation for max iterations reached
|
|
367
|
+
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || 'system'));
|
|
368
|
+
// Mark task as failed
|
|
369
|
+
if (task) {
|
|
370
|
+
await updateTask(dbClient)({
|
|
371
|
+
taskId: task.id,
|
|
372
|
+
data: {
|
|
373
|
+
status: 'failed',
|
|
374
|
+
metadata: {
|
|
375
|
+
...task.metadata,
|
|
376
|
+
failed_at: new Date().toISOString(),
|
|
377
|
+
error: errorMessage,
|
|
378
|
+
},
|
|
379
|
+
},
|
|
380
|
+
});
|
|
381
|
+
}
|
|
382
|
+
// Clean up GraphSession and streamHelper on error
|
|
383
|
+
graphSessionManager.endSession(requestId);
|
|
384
|
+
unregisterStreamHelper(requestId);
|
|
385
|
+
return { success: false, error: errorMessage, iterations };
|
|
386
|
+
}
|
|
387
|
+
catch (error) {
|
|
388
|
+
logger.error({ error }, 'Error in execution handler');
|
|
389
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown execution error';
|
|
390
|
+
// Stream error operation
|
|
391
|
+
// Error operation (data operations removed)
|
|
392
|
+
await sseHelper.writeError(`Execution error: ${errorMessage}`);
|
|
393
|
+
// Send error operation for execution exception
|
|
394
|
+
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || 'system'));
|
|
395
|
+
// Mark task as failed
|
|
396
|
+
if (task) {
|
|
397
|
+
await updateTask(dbClient)({
|
|
398
|
+
taskId: task.id,
|
|
399
|
+
data: {
|
|
400
|
+
status: 'failed',
|
|
401
|
+
metadata: {
|
|
402
|
+
...task.metadata,
|
|
403
|
+
failed_at: new Date().toISOString(),
|
|
404
|
+
error: errorMessage,
|
|
405
|
+
},
|
|
406
|
+
},
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
// Clean up GraphSession and streamHelper on exception
|
|
410
|
+
graphSessionManager.endSession(requestId);
|
|
411
|
+
unregisterStreamHelper(requestId);
|
|
412
|
+
return { success: false, error: errorMessage, iterations };
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import './instrumentation';
|
|
2
|
+
import { type CredentialStore, type ServerConfig } from '@inkeep/agents-core';
|
|
3
|
+
import { createExecutionHono } from './app';
|
|
4
|
+
declare const app: import("hono").Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
5
|
+
export default app;
|
|
6
|
+
export { createExecutionHono };
|
|
7
|
+
export declare function createExecutionApp(config?: {
|
|
8
|
+
serverConfig?: ServerConfig;
|
|
9
|
+
credentialStores?: CredentialStore[];
|
|
10
|
+
}): import("hono").Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
11
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,mBAAmB,CAAC;AAC3B,OAAO,EACL,KAAK,eAAe,EAGpB,KAAK,YAAY,EAClB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,mBAAmB,EAAE,MAAM,OAAO,CAAC;AAiB5C,QAAA,MAAM,GAAG,2FAAsD,CAAC;AAGhE,eAAe,GAAG,CAAC;AAGnB,OAAO,EAAE,mBAAmB,EAAE,CAAC;AAG/B,wBAAgB,kBAAkB,CAAC,MAAM,CAAC,EAAE;IAC1C,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,gBAAgB,CAAC,EAAE,eAAe,EAAE,CAAC;CACtC,6FAMA"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import './instrumentation';
|
|
2
|
+
import { CredentialStoreRegistry, createDefaultCredentialStores, } from '@inkeep/agents-core';
|
|
3
|
+
import { createExecutionHono } from './app';
|
|
4
|
+
// Create default configuration
|
|
5
|
+
const defaultConfig = {
|
|
6
|
+
port: 3003,
|
|
7
|
+
serverOptions: {
|
|
8
|
+
requestTimeout: 120000, // 120 seconds for execution requests
|
|
9
|
+
keepAliveTimeout: 60000,
|
|
10
|
+
keepAlive: true,
|
|
11
|
+
},
|
|
12
|
+
};
|
|
13
|
+
// Create default credential stores
|
|
14
|
+
const defaultStores = createDefaultCredentialStores();
|
|
15
|
+
const defaultRegistry = new CredentialStoreRegistry(defaultStores);
|
|
16
|
+
// Create default app instance for simple usage
|
|
17
|
+
const app = createExecutionHono(defaultConfig, defaultRegistry);
|
|
18
|
+
// Export the default app for Vite dev server and simple deployments
|
|
19
|
+
export default app;
|
|
20
|
+
// Also export the factory function for advanced usage
|
|
21
|
+
export { createExecutionHono };
|
|
22
|
+
// Export a helper to create app with custom credential stores - fallsback to default configs
|
|
23
|
+
export function createExecutionApp(config) {
|
|
24
|
+
const serverConfig = config?.serverConfig ?? defaultConfig;
|
|
25
|
+
const stores = config?.credentialStores ?? defaultStores;
|
|
26
|
+
const registry = new CredentialStoreRegistry(stores);
|
|
27
|
+
return createExecutionHono(serverConfig, registry);
|
|
28
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { NodeSDK } from '@opentelemetry/sdk-node';
|
|
2
|
+
declare class FanOutSpanProcessor {
|
|
3
|
+
private inner;
|
|
4
|
+
constructor(inner: any[]);
|
|
5
|
+
onStart(span: any, parent: any): void;
|
|
6
|
+
onEnd(span: any): void;
|
|
7
|
+
forceFlush(): Promise<void>;
|
|
8
|
+
shutdown(): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
declare const spanProcessor: FanOutSpanProcessor;
|
|
11
|
+
export declare const sdk: NodeSDK;
|
|
12
|
+
export { spanProcessor };
|
|
13
|
+
//# sourceMappingURL=instrumentation.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"instrumentation.d.ts","sourceRoot":"","sources":["../src/instrumentation.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,OAAO,EAAE,MAAM,yBAAyB,CAAC;AAOlD,cAAM,mBAAmB;IACX,OAAO,CAAC,KAAK;gBAAL,KAAK,EAAE,GAAG,EAAE;IAChC,OAAO,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG;IAG9B,KAAK,CAAC,IAAI,EAAE,GAAG;IAGf,UAAU;IAGV,QAAQ;CAGT;AAED,QAAA,MAAM,aAAa,qBAQjB,CAAC;AAEH,eAAO,MAAM,GAAG,SAyBd,CAAC;AAGH,OAAO,EAAE,aAAa,EAAE,CAAC"}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
|
|
2
|
+
import { ALLOW_ALL_BAGGAGE_KEYS, BaggageSpanProcessor, } from '@opentelemetry/baggage-span-processor';
|
|
3
|
+
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
|
|
4
|
+
import { NodeSDK } from '@opentelemetry/sdk-node';
|
|
5
|
+
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-node';
|
|
6
|
+
const otlpUrl = process.env.OTEL_EXPORTER_OTLP_ENDPOINT || 'http://localhost:14318/v1/traces';
|
|
7
|
+
const otlpExporter = new OTLPTraceExporter({ url: otlpUrl });
|
|
8
|
+
// Minimal fan-out so NodeSDK can accept ONE spanProcessor
|
|
9
|
+
class FanOutSpanProcessor {
|
|
10
|
+
inner;
|
|
11
|
+
constructor(inner) {
|
|
12
|
+
this.inner = inner;
|
|
13
|
+
}
|
|
14
|
+
onStart(span, parent) {
|
|
15
|
+
this.inner.forEach((p) => p.onStart(span, parent));
|
|
16
|
+
}
|
|
17
|
+
onEnd(span) {
|
|
18
|
+
this.inner.forEach((p) => p.onEnd(span));
|
|
19
|
+
}
|
|
20
|
+
forceFlush() {
|
|
21
|
+
return Promise.all(this.inner.map((p) => p.forceFlush?.())).then(() => { });
|
|
22
|
+
}
|
|
23
|
+
shutdown() {
|
|
24
|
+
return Promise.all(this.inner.map((p) => p.shutdown?.())).then(() => { });
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
const spanProcessor = new FanOutSpanProcessor([
|
|
28
|
+
new BaggageSpanProcessor(ALLOW_ALL_BAGGAGE_KEYS),
|
|
29
|
+
new BatchSpanProcessor(otlpExporter, {
|
|
30
|
+
maxExportBatchSize: 1, // Send immediately (vs 512)
|
|
31
|
+
scheduledDelayMillis: 100, // 100ms delay (vs 5000ms)
|
|
32
|
+
exportTimeoutMillis: 5000, // 5s timeout (vs 30s)
|
|
33
|
+
maxQueueSize: 512, // Smaller queue
|
|
34
|
+
}),
|
|
35
|
+
]);
|
|
36
|
+
export const sdk = new NodeSDK({
|
|
37
|
+
serviceName: 'inkeep-chat',
|
|
38
|
+
spanProcessor,
|
|
39
|
+
instrumentations: [
|
|
40
|
+
getNodeAutoInstrumentations({
|
|
41
|
+
'@opentelemetry/instrumentation-http': {
|
|
42
|
+
enabled: true,
|
|
43
|
+
requestHook: (span, request) => {
|
|
44
|
+
const url = request?.url ?? request?.path;
|
|
45
|
+
if (!url)
|
|
46
|
+
return;
|
|
47
|
+
const u = new URL(url, 'http://localhost');
|
|
48
|
+
span.updateName(`${request?.method || 'UNKNOWN'} ${u.pathname}`);
|
|
49
|
+
},
|
|
50
|
+
},
|
|
51
|
+
'@opentelemetry/instrumentation-undici': {
|
|
52
|
+
requestHook: (span) => {
|
|
53
|
+
const method = span.attributes?.['http.request.method'];
|
|
54
|
+
const host = span.attributes?.['server.address'];
|
|
55
|
+
const path = span.attributes?.['url.path'];
|
|
56
|
+
if (method && path)
|
|
57
|
+
span.updateName(host ? `${method} ${host}${path}` : `${method} ${path}`);
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
}),
|
|
61
|
+
],
|
|
62
|
+
});
|
|
63
|
+
// Export the span processor for force flush access
|
|
64
|
+
export { spanProcessor };
|
|
65
|
+
// SDK starts automatically when imported
|
|
66
|
+
sdk.start();
|
package/dist/logger.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAwBjC,wBAAgB,SAAS,CAAC,IAAI,CAAC,EAAE,MAAM,yCAOtC;AAED,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,EAAE,IAAI,iBAEzD"}
|
package/dist/logger.js
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { AsyncLocalStorage } from 'node:async_hooks';
|
|
2
|
+
import { pino } from 'pino';
|
|
3
|
+
// import { createGcpLoggingPinoConfig } from '@google-cloud/pino-logging-gcp-config';
|
|
4
|
+
import { env } from './env';
|
|
5
|
+
const logger = pino({
|
|
6
|
+
level: env.LOG_LEVEL,
|
|
7
|
+
serializers: {
|
|
8
|
+
obj: (value) => ({ ...value }),
|
|
9
|
+
},
|
|
10
|
+
redact: ['req.headers.authorization', 'req.headers["x-inkeep-admin-authentication"]'],
|
|
11
|
+
transport: {
|
|
12
|
+
target: 'pino-pretty',
|
|
13
|
+
options: {
|
|
14
|
+
sync: true,
|
|
15
|
+
destination: 1, // stdout
|
|
16
|
+
colorize: true,
|
|
17
|
+
translateTime: 'SYS:standard',
|
|
18
|
+
},
|
|
19
|
+
},
|
|
20
|
+
});
|
|
21
|
+
const asyncLocalStorage = new AsyncLocalStorage();
|
|
22
|
+
export function getLogger(name) {
|
|
23
|
+
const store = asyncLocalStorage.getStore();
|
|
24
|
+
const reqId = store?.get('requestId') || undefined;
|
|
25
|
+
if (!reqId) {
|
|
26
|
+
return logger.child({ name });
|
|
27
|
+
}
|
|
28
|
+
return logger.child({ reqId, name });
|
|
29
|
+
}
|
|
30
|
+
export function withRequestContext(reqId, fn) {
|
|
31
|
+
return asyncLocalStorage.run(new Map([['requestId', reqId]]), fn);
|
|
32
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { type ExecutionContext } from '@inkeep/agents-core';
|
|
2
|
+
/**
|
|
3
|
+
* Middleware to authenticate API requests using Bearer token authentication
|
|
4
|
+
* First checks if token matches INKEEP_AGENTS_RUN_BYPASS_SECRET, then falls back to API key validation
|
|
5
|
+
* Extracts and validates API keys, then adds execution context to the request
|
|
6
|
+
*/
|
|
7
|
+
export declare const apiKeyAuth: () => import("hono").MiddlewareHandler<{
|
|
8
|
+
Variables: {
|
|
9
|
+
executionContext: ExecutionContext;
|
|
10
|
+
};
|
|
11
|
+
}, string, {}>;
|
|
12
|
+
export declare const extractContextFromApiKey: (apiKey: string) => Promise<ExecutionContext>;
|
|
13
|
+
/**
|
|
14
|
+
* Helper middleware for endpoints that optionally support API key authentication
|
|
15
|
+
* If no auth header is present, it continues without setting the executionContext
|
|
16
|
+
*/
|
|
17
|
+
export declare const optionalAuth: () => import("hono").MiddlewareHandler<{
|
|
18
|
+
Variables: {
|
|
19
|
+
executionContext?: ExecutionContext;
|
|
20
|
+
};
|
|
21
|
+
}, string, {}>;
|
|
22
|
+
//# sourceMappingURL=api-key-auth.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"api-key-auth.d.ts","sourceRoot":"","sources":["../../src/middleware/api-key-auth.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,gBAAgB,EAAmC,MAAM,qBAAqB,CAAC;AAQ7F;;;;GAIG;AACH,eAAO,MAAM,UAAU;eAER;QACT,gBAAgB,EAAE,gBAAgB,CAAC;KACpC;cAoHD,CAAC;AAEL,eAAO,MAAM,wBAAwB,GAAU,QAAQ,MAAM,8BAgB5D,CAAC;AACF;;;GAGG;AACH,eAAO,MAAM,YAAY;eAEV;QACT,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;KACrC;cAYD,CAAC"}
|