@inkeep/agents-run-api 0.26.2 → 0.28.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-TVLDBLRZ.js → chunk-DWEFKQTA.js} +1 -1
- package/dist/{chunk-XLUE6U2L.js → chunk-IMJLQGAX.js} +2 -2
- package/dist/{chunk-IBMWBEXH.js → chunk-TRNLEUK2.js} +1 -1
- package/dist/{chunk-LHCIBW34.js → chunk-Z4TYO3W3.js} +1 -0
- package/dist/{conversations-H2TSLD3U.js → conversations-V6DNH5MW.js} +1 -1
- package/dist/dbClient-PLEBWGM4.js +1 -0
- package/dist/index.cjs +520 -212
- package/dist/index.js +517 -211
- package/dist/instrumentation.cjs +1 -0
- package/dist/instrumentation.js +1 -1
- package/package.json +2 -2
- package/dist/dbClient-CSP4YJOO.js +0 -1
package/dist/index.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import { flushBatchProcessor } from './chunk-
|
|
2
|
-
import { getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-
|
|
3
|
-
import { dbClient_default } from './chunk-
|
|
4
|
-
import { env } from './chunk-
|
|
1
|
+
import { flushBatchProcessor } from './chunk-DWEFKQTA.js';
|
|
2
|
+
import { getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-IMJLQGAX.js';
|
|
3
|
+
import { dbClient_default } from './chunk-TRNLEUK2.js';
|
|
4
|
+
import { env } from './chunk-Z4TYO3W3.js';
|
|
5
5
|
import { getLogger } from './chunk-A2S7GSHL.js';
|
|
6
6
|
import { __publicField } from './chunk-PKBMQBKP.js';
|
|
7
|
-
import { getTracer, HeadersScopeSchema, getRequestExecutionContext, createApiError, getAgentWithDefaultSubAgent, contextValidationMiddleware, getConversationId, getFullAgent, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getSubAgentById, handleContextResolution, createMessage, generateId, commonGetErrorResponses, loggerFactory, getDataComponent, getProject, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, upsertLedgerArtifact, createTask, updateTask, setSpanWithError, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getAgentById, getRelatedAgentsForAgent, getExternalAgentsForSubAgent, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, dbResultToMcpTool, validateAndGetApiKey, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getFunctionToolsForSubAgent, getFunction, getContextConfigById, getFullAgentDefinition, TemplateEngine, agentHasArtifactComponents, MCPTransportType, SPAN_KEYS } from '@inkeep/agents-core';
|
|
7
|
+
import { getTracer, HeadersScopeSchema, getRequestExecutionContext, createApiError, getAgentWithDefaultSubAgent, contextValidationMiddleware, getConversationId, getFullAgent, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getSubAgentById, handleContextResolution, createMessage, generateId, commonGetErrorResponses, loggerFactory, getDataComponent, getProject, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, upsertLedgerArtifact, createTask, updateTask, setSpanWithError, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getAgentById, getRelatedAgentsForAgent, getExternalAgentsForSubAgent, getTeamAgentsForSubAgent, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, dbResultToMcpTool, validateAndGetApiKey, verifyServiceToken, validateTargetAgent, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getFunctionToolsForSubAgent, getFunction, getContextConfigById, getFullAgentDefinition, TemplateEngine, agentHasArtifactComponents, MCPTransportType, SPAN_KEYS, headers, generateServiceToken } from '@inkeep/agents-core';
|
|
8
8
|
import { otel } from '@hono/otel';
|
|
9
9
|
import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
|
|
10
10
|
import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
|
|
@@ -40,7 +40,8 @@ function createExecutionContext(params) {
|
|
|
40
40
|
agentId: params.agentId,
|
|
41
41
|
baseUrl: params.baseUrl || process.env.API_URL || "http://localhost:3003",
|
|
42
42
|
apiKeyId: params.apiKeyId,
|
|
43
|
-
subAgentId: params.subAgentId
|
|
43
|
+
subAgentId: params.subAgentId,
|
|
44
|
+
metadata: params.metadata || {}
|
|
44
45
|
};
|
|
45
46
|
}
|
|
46
47
|
|
|
@@ -62,28 +63,36 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
|
|
|
62
63
|
const reqUrl = new URL(c.req.url);
|
|
63
64
|
const baseUrl = proto && host ? `${proto}://${host}` : host ? `${reqUrl.protocol}//${host}` : `${reqUrl.origin}`;
|
|
64
65
|
if (process.env.ENVIRONMENT === "development" || process.env.ENVIRONMENT === "test") {
|
|
66
|
+
logger.info({}, "development environment");
|
|
65
67
|
let executionContext;
|
|
66
68
|
if (authHeader?.startsWith("Bearer ")) {
|
|
69
|
+
const apiKey2 = authHeader.substring(7);
|
|
67
70
|
try {
|
|
68
|
-
executionContext = await extractContextFromApiKey(
|
|
71
|
+
executionContext = await extractContextFromApiKey(apiKey2, baseUrl);
|
|
69
72
|
if (subAgentId) {
|
|
70
73
|
executionContext.subAgentId = subAgentId;
|
|
71
74
|
}
|
|
72
|
-
|
|
75
|
+
c.set("executionContext", executionContext);
|
|
73
76
|
} catch {
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
77
|
+
try {
|
|
78
|
+
executionContext = await extractContextFromTeamAgentToken(apiKey2, baseUrl, subAgentId);
|
|
79
|
+
c.set("executionContext", executionContext);
|
|
80
|
+
} catch {
|
|
81
|
+
executionContext = createExecutionContext({
|
|
82
|
+
apiKey: "development",
|
|
83
|
+
tenantId: tenantId || "test-tenant",
|
|
84
|
+
projectId: projectId || "test-project",
|
|
85
|
+
agentId: agentId || "test-agent",
|
|
86
|
+
apiKeyId: "test-key",
|
|
87
|
+
baseUrl,
|
|
88
|
+
subAgentId
|
|
89
|
+
});
|
|
90
|
+
c.set("executionContext", executionContext);
|
|
91
|
+
logger.info(
|
|
92
|
+
{},
|
|
93
|
+
"Development/test environment - fallback to default context due to invalid API key"
|
|
94
|
+
);
|
|
95
|
+
}
|
|
87
96
|
}
|
|
88
97
|
} else {
|
|
89
98
|
executionContext = createExecutionContext({
|
|
@@ -95,12 +104,12 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
|
|
|
95
104
|
baseUrl,
|
|
96
105
|
subAgentId
|
|
97
106
|
});
|
|
107
|
+
c.set("executionContext", executionContext);
|
|
98
108
|
logger.info(
|
|
99
109
|
{},
|
|
100
110
|
"Development/test environment - no API key provided, using default context"
|
|
101
111
|
);
|
|
102
112
|
}
|
|
103
|
-
c.set("executionContext", executionContext);
|
|
104
113
|
await next();
|
|
105
114
|
return;
|
|
106
115
|
}
|
|
@@ -131,12 +140,21 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
|
|
|
131
140
|
await next();
|
|
132
141
|
return;
|
|
133
142
|
} else if (apiKey) {
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
143
|
+
try {
|
|
144
|
+
const executionContext = await extractContextFromApiKey(apiKey, baseUrl);
|
|
145
|
+
if (subAgentId) {
|
|
146
|
+
executionContext.subAgentId = subAgentId;
|
|
147
|
+
}
|
|
148
|
+
c.set("executionContext", executionContext);
|
|
149
|
+
logger.info({}, "API key authenticated successfully");
|
|
150
|
+
} catch {
|
|
151
|
+
const executionContext = await extractContextFromTeamAgentToken(
|
|
152
|
+
apiKey,
|
|
153
|
+
baseUrl,
|
|
154
|
+
subAgentId
|
|
155
|
+
);
|
|
156
|
+
c.set("executionContext", executionContext);
|
|
137
157
|
}
|
|
138
|
-
c.set("executionContext", executionContext);
|
|
139
|
-
logger.info({}, "API key authenticated successfully");
|
|
140
158
|
await next();
|
|
141
159
|
return;
|
|
142
160
|
} else {
|
|
@@ -166,14 +184,24 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
|
|
|
166
184
|
"API key authenticated successfully"
|
|
167
185
|
);
|
|
168
186
|
await next();
|
|
169
|
-
} catch
|
|
170
|
-
|
|
171
|
-
|
|
187
|
+
} catch {
|
|
188
|
+
try {
|
|
189
|
+
const executionContext = await extractContextFromTeamAgentToken(
|
|
190
|
+
apiKey,
|
|
191
|
+
baseUrl,
|
|
192
|
+
subAgentId
|
|
193
|
+
);
|
|
194
|
+
c.set("executionContext", executionContext);
|
|
195
|
+
await next();
|
|
196
|
+
} catch (error) {
|
|
197
|
+
if (error instanceof HTTPException) {
|
|
198
|
+
throw error;
|
|
199
|
+
}
|
|
200
|
+
logger.error({ error }, "API key authentication error");
|
|
201
|
+
throw new HTTPException(500, {
|
|
202
|
+
message: "Authentication failed"
|
|
203
|
+
});
|
|
172
204
|
}
|
|
173
|
-
logger.error({ error }, "API key authentication error");
|
|
174
|
-
throw new HTTPException(500, {
|
|
175
|
-
message: "Authentication failed"
|
|
176
|
-
});
|
|
177
205
|
}
|
|
178
206
|
});
|
|
179
207
|
var extractContextFromApiKey = async (apiKey, baseUrl) => {
|
|
@@ -195,8 +223,15 @@ var extractContextFromApiKey = async (apiKey, baseUrl) => {
|
|
|
195
223
|
message: "Invalid or expired API key"
|
|
196
224
|
});
|
|
197
225
|
}
|
|
198
|
-
logger.
|
|
199
|
-
|
|
226
|
+
logger.debug(
|
|
227
|
+
{
|
|
228
|
+
tenantId: apiKeyRecord.tenantId,
|
|
229
|
+
projectId: apiKeyRecord.projectId,
|
|
230
|
+
agentId: apiKeyRecord.agentId,
|
|
231
|
+
subAgentId: agent.defaultSubAgentId || void 0
|
|
232
|
+
},
|
|
233
|
+
"API key authenticated successfully"
|
|
234
|
+
);
|
|
200
235
|
return createExecutionContext({
|
|
201
236
|
apiKey,
|
|
202
237
|
tenantId: apiKeyRecord.tenantId,
|
|
@@ -207,6 +242,53 @@ var extractContextFromApiKey = async (apiKey, baseUrl) => {
|
|
|
207
242
|
subAgentId: agent.defaultSubAgentId || void 0
|
|
208
243
|
});
|
|
209
244
|
};
|
|
245
|
+
var extractContextFromTeamAgentToken = async (token, baseUrl, expectedSubAgentId) => {
|
|
246
|
+
const result = await verifyServiceToken(token);
|
|
247
|
+
if (!result.valid || !result.payload) {
|
|
248
|
+
logger.warn({ error: result.error }, "Invalid team agent JWT token");
|
|
249
|
+
throw new HTTPException(401, {
|
|
250
|
+
message: `Invalid team agent token: ${result.error || "Unknown error"}`
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
const payload = result.payload;
|
|
254
|
+
if (expectedSubAgentId && !validateTargetAgent(payload, expectedSubAgentId)) {
|
|
255
|
+
logger.error(
|
|
256
|
+
{
|
|
257
|
+
tokenTargetAgentId: payload.aud,
|
|
258
|
+
expectedSubAgentId,
|
|
259
|
+
originAgentId: payload.sub
|
|
260
|
+
},
|
|
261
|
+
"Team agent token target mismatch"
|
|
262
|
+
);
|
|
263
|
+
throw new HTTPException(403, {
|
|
264
|
+
message: "Token not valid for the requested agent"
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
logger.info(
|
|
268
|
+
{
|
|
269
|
+
originAgentId: payload.sub,
|
|
270
|
+
targetAgentId: payload.aud,
|
|
271
|
+
tenantId: payload.tenantId,
|
|
272
|
+
projectId: payload.projectId
|
|
273
|
+
},
|
|
274
|
+
"Team agent JWT token authenticated successfully"
|
|
275
|
+
);
|
|
276
|
+
return createExecutionContext({
|
|
277
|
+
apiKey: "team-agent-jwt",
|
|
278
|
+
// Not an actual API key
|
|
279
|
+
tenantId: payload.tenantId,
|
|
280
|
+
projectId: payload.projectId,
|
|
281
|
+
agentId: payload.aud,
|
|
282
|
+
// Target agent ID
|
|
283
|
+
apiKeyId: "team-agent-token",
|
|
284
|
+
baseUrl,
|
|
285
|
+
subAgentId: void 0,
|
|
286
|
+
metadata: {
|
|
287
|
+
teamDelegation: true,
|
|
288
|
+
originAgentId: payload.sub
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
};
|
|
210
292
|
function setupOpenAPIRoutes(app7) {
|
|
211
293
|
app7.get("/openapi.json", (c) => {
|
|
212
294
|
try {
|
|
@@ -387,7 +469,7 @@ async function handleMessageSend(c, agent, request) {
|
|
|
387
469
|
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
388
470
|
});
|
|
389
471
|
logger2.info({ metadata: params.message.metadata }, "message metadata");
|
|
390
|
-
if (params.message.metadata?.fromSubAgentId || params.message.metadata?.fromExternalAgentId) {
|
|
472
|
+
if (params.message.metadata?.fromSubAgentId || params.message.metadata?.fromExternalAgentId || params.message.metadata?.fromTeamAgentId) {
|
|
391
473
|
const messageText = params.message.parts.filter((part) => part.kind === "text" && "text" in part && part.text).map((part) => part.text).join(" ");
|
|
392
474
|
try {
|
|
393
475
|
const messageData = {
|
|
@@ -409,13 +491,18 @@ async function handleMessageSend(c, agent, request) {
|
|
|
409
491
|
} else if (params.message.metadata?.fromExternalAgentId) {
|
|
410
492
|
messageData.fromExternalAgentId = params.message.metadata.fromExternalAgentId;
|
|
411
493
|
messageData.toSubAgentId = agent.subAgentId;
|
|
494
|
+
} else if (params.message.metadata?.fromTeamAgentId) {
|
|
495
|
+
messageData.fromTeamAgentId = params.message.metadata.fromTeamAgentId;
|
|
496
|
+
messageData.toTeamAgentId = agent.subAgentId;
|
|
412
497
|
}
|
|
413
498
|
await createMessage(dbClient_default)(messageData);
|
|
414
499
|
logger2.info(
|
|
415
500
|
{
|
|
416
501
|
fromSubAgentId: params.message.metadata.fromSubAgentId,
|
|
417
502
|
fromExternalAgentId: params.message.metadata.fromExternalAgentId,
|
|
503
|
+
fromTeamAgentId: params.message.metadata.fromTeamAgentId,
|
|
418
504
|
toSubAgentId: agent.subAgentId,
|
|
505
|
+
toTeamAgentId: params.message.metadata.fromTeamAgentId ? agent.subAgentId : void 0,
|
|
419
506
|
conversationId: effectiveContextId,
|
|
420
507
|
messageType: "a2a-request",
|
|
421
508
|
taskId: task.id
|
|
@@ -428,6 +515,7 @@ async function handleMessageSend(c, agent, request) {
|
|
|
428
515
|
error,
|
|
429
516
|
fromSubAgentId: params.message.metadata.fromSubAgentId,
|
|
430
517
|
fromExternalAgentId: params.message.metadata.fromExternalAgentId,
|
|
518
|
+
fromTeamAgentId: params.message.metadata.fromTeamAgentId,
|
|
431
519
|
toSubAgentId: agent.subAgentId,
|
|
432
520
|
conversationId: effectiveContextId
|
|
433
521
|
},
|
|
@@ -1353,9 +1441,11 @@ var _ArtifactService = class _ArtifactService {
|
|
|
1353
1441
|
const component = this.context.artifactComponents?.find((ac) => ac.name === request.type);
|
|
1354
1442
|
let summaryData = {};
|
|
1355
1443
|
let fullData = {};
|
|
1444
|
+
let previewSchema = null;
|
|
1445
|
+
let fullSchema = null;
|
|
1356
1446
|
if (component?.props) {
|
|
1357
|
-
|
|
1358
|
-
|
|
1447
|
+
previewSchema = extractPreviewFields(component.props);
|
|
1448
|
+
fullSchema = extractFullFields(component.props);
|
|
1359
1449
|
summaryData = this.extractPropsFromSchema(
|
|
1360
1450
|
selectedData,
|
|
1361
1451
|
previewSchema,
|
|
@@ -1378,6 +1468,15 @@ var _ArtifactService = class _ArtifactService {
|
|
|
1378
1468
|
}
|
|
1379
1469
|
const cleanedSummaryData = this.cleanEscapedContent(summaryData);
|
|
1380
1470
|
const cleanedFullData = this.cleanEscapedContent(fullData);
|
|
1471
|
+
const schemaValidation = this.validateExtractedData(
|
|
1472
|
+
request.artifactId,
|
|
1473
|
+
request.type,
|
|
1474
|
+
cleanedSummaryData,
|
|
1475
|
+
cleanedFullData,
|
|
1476
|
+
previewSchema,
|
|
1477
|
+
fullSchema,
|
|
1478
|
+
component?.props
|
|
1479
|
+
);
|
|
1381
1480
|
const artifactData = {
|
|
1382
1481
|
artifactId: request.artifactId,
|
|
1383
1482
|
toolCallId: request.toolCallId,
|
|
@@ -1386,7 +1485,7 @@ var _ArtifactService = class _ArtifactService {
|
|
|
1386
1485
|
type: request.type,
|
|
1387
1486
|
data: cleanedSummaryData
|
|
1388
1487
|
};
|
|
1389
|
-
await this.persistArtifact(request, cleanedSummaryData, cleanedFullData, subAgentId);
|
|
1488
|
+
await this.persistArtifact(request, cleanedSummaryData, cleanedFullData, subAgentId, schemaValidation);
|
|
1390
1489
|
await this.cacheArtifact(
|
|
1391
1490
|
request.artifactId,
|
|
1392
1491
|
request.toolCallId,
|
|
@@ -1496,32 +1595,183 @@ var _ArtifactService = class _ArtifactService {
|
|
|
1496
1595
|
* Format raw artifact to standardized summary data format
|
|
1497
1596
|
*/
|
|
1498
1597
|
formatArtifactSummaryData(artifact, artifactId, toolCallId) {
|
|
1598
|
+
let data = artifact.parts?.[0]?.data?.summary;
|
|
1599
|
+
let dataSource = "parts[0].data.summary";
|
|
1600
|
+
if (!data || typeof data === "object" && Object.keys(data).length === 0) {
|
|
1601
|
+
data = artifact.parts?.[0]?.data;
|
|
1602
|
+
if (data && !(typeof data === "object" && Object.keys(data).length === 0)) {
|
|
1603
|
+
dataSource = "parts[0].data (fallback)";
|
|
1604
|
+
logger6.debug(
|
|
1605
|
+
{ artifactId, toolCallId, dataSource },
|
|
1606
|
+
"Using fallback data source for artifact summary"
|
|
1607
|
+
);
|
|
1608
|
+
} else {
|
|
1609
|
+
data = artifact.data;
|
|
1610
|
+
if (data && !(typeof data === "object" && Object.keys(data).length === 0)) {
|
|
1611
|
+
dataSource = "artifact.data (fallback)";
|
|
1612
|
+
logger6.debug(
|
|
1613
|
+
{ artifactId, toolCallId, dataSource },
|
|
1614
|
+
"Using fallback data source for artifact summary"
|
|
1615
|
+
);
|
|
1616
|
+
} else {
|
|
1617
|
+
data = {};
|
|
1618
|
+
dataSource = "empty (no data found)";
|
|
1619
|
+
logger6.warn(
|
|
1620
|
+
{
|
|
1621
|
+
artifactId,
|
|
1622
|
+
toolCallId,
|
|
1623
|
+
artifactStructure: {
|
|
1624
|
+
hasParts: !!artifact.parts,
|
|
1625
|
+
partsLength: artifact.parts?.length,
|
|
1626
|
+
hasPartsData: !!artifact.parts?.[0]?.data,
|
|
1627
|
+
hasPartsSummary: !!artifact.parts?.[0]?.data?.summary,
|
|
1628
|
+
hasArtifactData: !!artifact.data,
|
|
1629
|
+
artifactKeys: Object.keys(artifact || {})
|
|
1630
|
+
}
|
|
1631
|
+
},
|
|
1632
|
+
"No valid data found for artifact summary - using empty object"
|
|
1633
|
+
);
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1499
1637
|
return {
|
|
1500
1638
|
artifactId,
|
|
1501
1639
|
toolCallId,
|
|
1502
1640
|
name: artifact.name || "Processing...",
|
|
1503
1641
|
description: artifact.description || "Name and description being generated...",
|
|
1504
1642
|
type: artifact.metadata?.artifactType || artifact.artifactType,
|
|
1505
|
-
data
|
|
1643
|
+
data
|
|
1506
1644
|
};
|
|
1507
1645
|
}
|
|
1508
1646
|
/**
|
|
1509
1647
|
* Format raw artifact to standardized full data format
|
|
1510
1648
|
*/
|
|
1511
1649
|
formatArtifactFullData(artifact, artifactId, toolCallId) {
|
|
1650
|
+
let data = artifact.parts?.[0]?.data?.full;
|
|
1651
|
+
let dataSource = "parts[0].data.full";
|
|
1652
|
+
if (!data || typeof data === "object" && Object.keys(data).length === 0) {
|
|
1653
|
+
data = artifact.parts?.[0]?.data;
|
|
1654
|
+
if (data && !(typeof data === "object" && Object.keys(data).length === 0)) {
|
|
1655
|
+
dataSource = "parts[0].data (fallback)";
|
|
1656
|
+
logger6.debug(
|
|
1657
|
+
{ artifactId, toolCallId, dataSource },
|
|
1658
|
+
"Using fallback data source for artifact full data"
|
|
1659
|
+
);
|
|
1660
|
+
} else {
|
|
1661
|
+
data = artifact.data;
|
|
1662
|
+
if (data && !(typeof data === "object" && Object.keys(data).length === 0)) {
|
|
1663
|
+
dataSource = "artifact.data (fallback)";
|
|
1664
|
+
logger6.debug(
|
|
1665
|
+
{ artifactId, toolCallId, dataSource },
|
|
1666
|
+
"Using fallback data source for artifact full data"
|
|
1667
|
+
);
|
|
1668
|
+
} else {
|
|
1669
|
+
data = {};
|
|
1670
|
+
dataSource = "empty (no data found)";
|
|
1671
|
+
logger6.warn(
|
|
1672
|
+
{
|
|
1673
|
+
artifactId,
|
|
1674
|
+
toolCallId,
|
|
1675
|
+
artifactStructure: {
|
|
1676
|
+
hasParts: !!artifact.parts,
|
|
1677
|
+
partsLength: artifact.parts?.length,
|
|
1678
|
+
hasPartsData: !!artifact.parts?.[0]?.data,
|
|
1679
|
+
hasPartsFull: !!artifact.parts?.[0]?.data?.full,
|
|
1680
|
+
hasArtifactData: !!artifact.data,
|
|
1681
|
+
artifactKeys: Object.keys(artifact || {})
|
|
1682
|
+
}
|
|
1683
|
+
},
|
|
1684
|
+
"No valid data found for artifact full data - using empty object"
|
|
1685
|
+
);
|
|
1686
|
+
}
|
|
1687
|
+
}
|
|
1688
|
+
}
|
|
1512
1689
|
return {
|
|
1513
1690
|
artifactId,
|
|
1514
1691
|
toolCallId,
|
|
1515
1692
|
name: artifact.name || "Processing...",
|
|
1516
1693
|
description: artifact.description || "Name and description being generated...",
|
|
1517
1694
|
type: artifact.metadata?.artifactType || artifact.artifactType,
|
|
1518
|
-
data
|
|
1695
|
+
data
|
|
1696
|
+
};
|
|
1697
|
+
}
|
|
1698
|
+
/**
|
|
1699
|
+
* Validate extracted data against the schemas used for extraction
|
|
1700
|
+
*/
|
|
1701
|
+
validateExtractedData(artifactId, artifactType, summaryData, fullData, previewSchema, fullSchema, originalProps) {
|
|
1702
|
+
const validateAgainstSchema = (data, schema) => {
|
|
1703
|
+
const actualFields = Object.keys(data || {});
|
|
1704
|
+
const expectedFields = schema?.properties ? Object.keys(schema.properties) : [];
|
|
1705
|
+
const missingFields = expectedFields.filter((field) => !(field in (data || {})));
|
|
1706
|
+
const extraFields = actualFields.filter((field) => !expectedFields.includes(field));
|
|
1707
|
+
const requiredFields = schema?.required || [];
|
|
1708
|
+
const missingRequired = requiredFields.filter((field) => !(field in (data || {})));
|
|
1709
|
+
return {
|
|
1710
|
+
hasExpectedFields: missingFields.length === 0,
|
|
1711
|
+
missingFields,
|
|
1712
|
+
extraFields,
|
|
1713
|
+
expectedFields,
|
|
1714
|
+
actualFields,
|
|
1715
|
+
hasRequiredFields: missingRequired.length === 0,
|
|
1716
|
+
missingRequired
|
|
1717
|
+
};
|
|
1718
|
+
};
|
|
1719
|
+
const summaryValidation = validateAgainstSchema(summaryData, previewSchema);
|
|
1720
|
+
const fullValidation = validateAgainstSchema(fullData, fullSchema);
|
|
1721
|
+
if (!summaryValidation.hasRequiredFields) {
|
|
1722
|
+
const error = new Error(
|
|
1723
|
+
`Cannot save artifact: Missing required fields [${summaryValidation.missingRequired.join(", ")}] for '${artifactType}' schema. Required: [${summaryValidation.missingRequired.join(", ")}]. Found: [${summaryValidation.actualFields.join(", ")}]. Consider using a different artifact component type that matches your data structure.`
|
|
1724
|
+
);
|
|
1725
|
+
logger6.error(
|
|
1726
|
+
{
|
|
1727
|
+
artifactId,
|
|
1728
|
+
artifactType,
|
|
1729
|
+
requiredFields: summaryValidation.missingRequired,
|
|
1730
|
+
actualFields: summaryValidation.actualFields,
|
|
1731
|
+
schemaExpected: previewSchema?.properties ? Object.keys(previewSchema.properties) : []
|
|
1732
|
+
},
|
|
1733
|
+
"Blocking artifact save due to missing required fields"
|
|
1734
|
+
);
|
|
1735
|
+
throw error;
|
|
1736
|
+
}
|
|
1737
|
+
if (!summaryValidation.hasExpectedFields || summaryValidation.extraFields.length > 0) {
|
|
1738
|
+
logger6.warn(
|
|
1739
|
+
{
|
|
1740
|
+
artifactId,
|
|
1741
|
+
artifactType,
|
|
1742
|
+
dataType: "summary",
|
|
1743
|
+
expectedFields: summaryValidation.expectedFields,
|
|
1744
|
+
actualFields: summaryValidation.actualFields,
|
|
1745
|
+
missingFields: summaryValidation.missingFields,
|
|
1746
|
+
extraFields: summaryValidation.extraFields
|
|
1747
|
+
},
|
|
1748
|
+
"Summary data structure does not match preview schema"
|
|
1749
|
+
);
|
|
1750
|
+
}
|
|
1751
|
+
if (!fullValidation.hasExpectedFields || fullValidation.extraFields.length > 0) {
|
|
1752
|
+
logger6.warn(
|
|
1753
|
+
{
|
|
1754
|
+
artifactId,
|
|
1755
|
+
artifactType,
|
|
1756
|
+
dataType: "full",
|
|
1757
|
+
expectedFields: fullValidation.expectedFields,
|
|
1758
|
+
actualFields: fullValidation.actualFields,
|
|
1759
|
+
missingFields: fullValidation.missingFields,
|
|
1760
|
+
extraFields: fullValidation.extraFields
|
|
1761
|
+
},
|
|
1762
|
+
"Full data structure does not match full schema"
|
|
1763
|
+
);
|
|
1764
|
+
}
|
|
1765
|
+
return {
|
|
1766
|
+
summary: summaryValidation,
|
|
1767
|
+
full: fullValidation,
|
|
1768
|
+
schemaFound: !!originalProps
|
|
1519
1769
|
};
|
|
1520
1770
|
}
|
|
1521
1771
|
/**
|
|
1522
1772
|
* Persist artifact to database vian agent session
|
|
1523
1773
|
*/
|
|
1524
|
-
async persistArtifact(request, summaryData, fullData, subAgentId) {
|
|
1774
|
+
async persistArtifact(request, summaryData, fullData, subAgentId, schemaValidation) {
|
|
1525
1775
|
const effectiveAgentId = subAgentId || this.context.subAgentId;
|
|
1526
1776
|
if (this.context.streamRequestId && effectiveAgentId && this.context.taskId) {
|
|
1527
1777
|
await agentSessionManager.recordEvent(
|
|
@@ -1543,6 +1793,11 @@ var _ArtifactService = class _ArtifactService {
|
|
|
1543
1793
|
sessionId: this.context.sessionId,
|
|
1544
1794
|
artifactType: request.type
|
|
1545
1795
|
},
|
|
1796
|
+
schemaValidation: schemaValidation || {
|
|
1797
|
+
summary: { hasExpectedFields: true, missingFields: [], extraFields: [], expectedFields: [], actualFields: [], hasRequiredFields: true, missingRequired: [] },
|
|
1798
|
+
full: { hasExpectedFields: true, missingFields: [], extraFields: [], expectedFields: [], actualFields: [], hasRequiredFields: true, missingRequired: [] },
|
|
1799
|
+
schemaFound: false
|
|
1800
|
+
},
|
|
1546
1801
|
tenantId: this.context.tenantId,
|
|
1547
1802
|
projectId: this.context.projectId,
|
|
1548
1803
|
contextId: this.context.contextId,
|
|
@@ -3037,7 +3292,29 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
|
|
|
3037
3292
|
has_context_id: !!artifactData.contextId,
|
|
3038
3293
|
has_metadata: !!artifactData.metadata,
|
|
3039
3294
|
tool_call_id: artifactData.metadata?.toolCallId || "missing",
|
|
3040
|
-
pending_generation: !!artifactData.pendingGeneration
|
|
3295
|
+
pending_generation: !!artifactData.pendingGeneration,
|
|
3296
|
+
// Schema validation attributes
|
|
3297
|
+
"schema_validation.schema_found": artifactData.schemaValidation?.schemaFound || false,
|
|
3298
|
+
"schema_validation.summary.has_expected_fields": artifactData.schemaValidation?.summary?.hasExpectedFields || true,
|
|
3299
|
+
"schema_validation.summary.missing_fields_count": artifactData.schemaValidation?.summary?.missingFields?.length || 0,
|
|
3300
|
+
"schema_validation.summary.extra_fields_count": artifactData.schemaValidation?.summary?.extraFields?.length || 0,
|
|
3301
|
+
"schema_validation.summary.expected_fields": JSON.stringify(artifactData.schemaValidation?.summary?.expectedFields || []),
|
|
3302
|
+
"schema_validation.summary.actual_fields": JSON.stringify(artifactData.schemaValidation?.summary?.actualFields || []),
|
|
3303
|
+
"schema_validation.summary.missing_fields": JSON.stringify(artifactData.schemaValidation?.summary?.missingFields || []),
|
|
3304
|
+
"schema_validation.summary.extra_fields": JSON.stringify(artifactData.schemaValidation?.summary?.extraFields || []),
|
|
3305
|
+
"schema_validation.summary.has_required_fields": artifactData.schemaValidation?.summary?.hasRequiredFields || true,
|
|
3306
|
+
"schema_validation.summary.missing_required_count": artifactData.schemaValidation?.summary?.missingRequired?.length || 0,
|
|
3307
|
+
"schema_validation.summary.missing_required": JSON.stringify(artifactData.schemaValidation?.summary?.missingRequired || []),
|
|
3308
|
+
"schema_validation.full.has_expected_fields": artifactData.schemaValidation?.full?.hasExpectedFields || true,
|
|
3309
|
+
"schema_validation.full.missing_fields_count": artifactData.schemaValidation?.full?.missingFields?.length || 0,
|
|
3310
|
+
"schema_validation.full.extra_fields_count": artifactData.schemaValidation?.full?.extraFields?.length || 0,
|
|
3311
|
+
"schema_validation.full.expected_fields": JSON.stringify(artifactData.schemaValidation?.full?.expectedFields || []),
|
|
3312
|
+
"schema_validation.full.actual_fields": JSON.stringify(artifactData.schemaValidation?.full?.actualFields || []),
|
|
3313
|
+
"schema_validation.full.missing_fields": JSON.stringify(artifactData.schemaValidation?.full?.missingFields || []),
|
|
3314
|
+
"schema_validation.full.extra_fields": JSON.stringify(artifactData.schemaValidation?.full?.extraFields || []),
|
|
3315
|
+
"schema_validation.full.has_required_fields": artifactData.schemaValidation?.full?.hasRequiredFields || true,
|
|
3316
|
+
"schema_validation.full.missing_required_count": artifactData.schemaValidation?.full?.missingRequired?.length || 0,
|
|
3317
|
+
"schema_validation.full.missing_required": JSON.stringify(artifactData.schemaValidation?.full?.missingRequired || [])
|
|
3041
3318
|
}
|
|
3042
3319
|
},
|
|
3043
3320
|
async (span) => {
|
|
@@ -5245,8 +5522,10 @@ function createDelegateToAgentTool({
|
|
|
5245
5522
|
);
|
|
5246
5523
|
}
|
|
5247
5524
|
const isInternal = delegateConfig.type === "internal";
|
|
5525
|
+
const isExternal = delegateConfig.type === "external";
|
|
5526
|
+
const isTeam = delegateConfig.type === "team";
|
|
5248
5527
|
let resolvedHeaders = {};
|
|
5249
|
-
if (
|
|
5528
|
+
if (isExternal) {
|
|
5250
5529
|
if ((delegateConfig.config.credentialReferenceId || delegateConfig.config.headers) && credentialStoreRegistry) {
|
|
5251
5530
|
const contextResolver = new ContextResolver(
|
|
5252
5531
|
tenantId,
|
|
@@ -5284,6 +5563,23 @@ function createDelegateToAgentTool({
|
|
|
5284
5563
|
headers: delegateConfig.config.headers || void 0
|
|
5285
5564
|
});
|
|
5286
5565
|
}
|
|
5566
|
+
} else if (isTeam) {
|
|
5567
|
+
const contextResolver = new ContextResolver(
|
|
5568
|
+
tenantId,
|
|
5569
|
+
projectId,
|
|
5570
|
+
dbClient_default,
|
|
5571
|
+
credentialStoreRegistry
|
|
5572
|
+
);
|
|
5573
|
+
const context2 = await contextResolver.resolveHeaders(metadata.conversationId, contextId);
|
|
5574
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
5575
|
+
resolvedHeaders[key] = TemplateEngine.render(value, context2, { strict: true });
|
|
5576
|
+
}
|
|
5577
|
+
resolvedHeaders.Authorization = `Bearer ${await generateServiceToken({
|
|
5578
|
+
tenantId,
|
|
5579
|
+
projectId,
|
|
5580
|
+
originAgentId: agentId,
|
|
5581
|
+
targetAgentId: delegateConfig.config.id
|
|
5582
|
+
})}`;
|
|
5287
5583
|
} else {
|
|
5288
5584
|
resolvedHeaders = {
|
|
5289
5585
|
Authorization: `Bearer ${metadata.apiKey}`,
|
|
@@ -7144,7 +7440,7 @@ var Agent = class {
|
|
|
7144
7440
|
/**
|
|
7145
7441
|
* Get resolved context using ContextResolver - will return cached data or fetch fresh data as needed
|
|
7146
7442
|
*/
|
|
7147
|
-
async getResolvedContext(conversationId,
|
|
7443
|
+
async getResolvedContext(conversationId, headers2) {
|
|
7148
7444
|
try {
|
|
7149
7445
|
if (!this.config.contextConfigId) {
|
|
7150
7446
|
logger15.debug({ agentId: this.config.agentId }, "No context config found for agent");
|
|
@@ -7168,7 +7464,7 @@ var Agent = class {
|
|
|
7168
7464
|
const result = await this.contextResolver.resolve(contextConfig, {
|
|
7169
7465
|
triggerEvent: "invocation",
|
|
7170
7466
|
conversationId,
|
|
7171
|
-
headers:
|
|
7467
|
+
headers: headers2 || {},
|
|
7172
7468
|
tenantId: this.config.tenantId
|
|
7173
7469
|
});
|
|
7174
7470
|
const contextWithBuiltins = {
|
|
@@ -7355,7 +7651,7 @@ var Agent = class {
|
|
|
7355
7651
|
inputSchema: tool3.inputSchema || tool3.parameters || {},
|
|
7356
7652
|
usageGuidelines: name.startsWith("transfer_to_") || name.startsWith("delegate_to_") ? `Use this tool to ${name.startsWith("transfer_to_") ? "transfer" : "delegate"} to another agent when appropriate.` : "Use this tool when appropriate for the task at hand."
|
|
7357
7653
|
}));
|
|
7358
|
-
const { getConversationScopedArtifacts } = await import('./conversations-
|
|
7654
|
+
const { getConversationScopedArtifacts } = await import('./conversations-V6DNH5MW.js');
|
|
7359
7655
|
const historyConfig = this.config.conversationHistoryConfig ?? createDefaultConversationHistoryConfig();
|
|
7360
7656
|
const referenceArtifacts = await getConversationScopedArtifacts({
|
|
7361
7657
|
tenantId: this.config.tenantId,
|
|
@@ -8270,6 +8566,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
|
|
|
8270
8566
|
const [
|
|
8271
8567
|
internalRelations,
|
|
8272
8568
|
externalRelations,
|
|
8569
|
+
teamRelations,
|
|
8273
8570
|
toolsForAgent,
|
|
8274
8571
|
dataComponents,
|
|
8275
8572
|
artifactComponents
|
|
@@ -8290,6 +8587,14 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
|
|
|
8290
8587
|
subAgentId: config.subAgentId
|
|
8291
8588
|
}
|
|
8292
8589
|
}),
|
|
8590
|
+
getTeamAgentsForSubAgent(dbClient_default)({
|
|
8591
|
+
scopes: {
|
|
8592
|
+
tenantId: config.tenantId,
|
|
8593
|
+
projectId: config.projectId,
|
|
8594
|
+
agentId: config.agentId,
|
|
8595
|
+
subAgentId: config.subAgentId
|
|
8596
|
+
}
|
|
8597
|
+
}),
|
|
8293
8598
|
getToolsForAgent(dbClient_default)({
|
|
8294
8599
|
scopes: {
|
|
8295
8600
|
tenantId: config.tenantId,
|
|
@@ -8438,6 +8743,17 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
|
|
|
8438
8743
|
relationId: relation.id,
|
|
8439
8744
|
relationType: "delegate"
|
|
8440
8745
|
}
|
|
8746
|
+
})),
|
|
8747
|
+
...teamRelations.data.map((relation) => ({
|
|
8748
|
+
type: "team",
|
|
8749
|
+
config: {
|
|
8750
|
+
id: relation.targetAgent.id,
|
|
8751
|
+
name: relation.targetAgent.name,
|
|
8752
|
+
description: relation.targetAgent.description || "",
|
|
8753
|
+
baseUrl: config.baseUrl,
|
|
8754
|
+
headers: relation.headers,
|
|
8755
|
+
relationId: relation.id
|
|
8756
|
+
}
|
|
8441
8757
|
}))
|
|
8442
8758
|
],
|
|
8443
8759
|
tools: toolsForAgentResult,
|
|
@@ -8659,6 +8975,7 @@ var createTaskHandlerConfig = async (params) => {
|
|
|
8659
8975
|
};
|
|
8660
8976
|
|
|
8661
8977
|
// src/data/agents.ts
|
|
8978
|
+
var logger17 = getLogger("agents");
|
|
8662
8979
|
function createAgentCard({
|
|
8663
8980
|
dbAgent,
|
|
8664
8981
|
baseUrl
|
|
@@ -8760,13 +9077,26 @@ async function hydrateAgent({
|
|
|
8760
9077
|
async function getRegisteredAgent(params) {
|
|
8761
9078
|
const { executionContext, credentialStoreRegistry, sandboxConfig } = params;
|
|
8762
9079
|
const { tenantId, projectId, agentId, subAgentId, baseUrl, apiKey } = executionContext;
|
|
9080
|
+
let dbAgent;
|
|
8763
9081
|
if (!subAgentId) {
|
|
8764
|
-
|
|
9082
|
+
const agent = await getAgentWithDefaultSubAgent(dbClient_default)({
|
|
9083
|
+
scopes: { tenantId, projectId, agentId }
|
|
9084
|
+
});
|
|
9085
|
+
logger17.info({ agent }, "agent with default sub agent");
|
|
9086
|
+
if (!agent || !agent.defaultSubAgent) {
|
|
9087
|
+
return null;
|
|
9088
|
+
}
|
|
9089
|
+
dbAgent = agent.defaultSubAgent;
|
|
9090
|
+
} else {
|
|
9091
|
+
const response = await getSubAgentById(dbClient_default)({
|
|
9092
|
+
scopes: { tenantId, projectId, agentId },
|
|
9093
|
+
subAgentId
|
|
9094
|
+
});
|
|
9095
|
+
if (!response) {
|
|
9096
|
+
return null;
|
|
9097
|
+
}
|
|
9098
|
+
dbAgent = response;
|
|
8765
9099
|
}
|
|
8766
|
-
const dbAgent = await getSubAgentById(dbClient_default)({
|
|
8767
|
-
scopes: { tenantId, projectId, agentId },
|
|
8768
|
-
subAgentId
|
|
8769
|
-
});
|
|
8770
9100
|
if (!dbAgent) {
|
|
8771
9101
|
return null;
|
|
8772
9102
|
}
|
|
@@ -8783,7 +9113,7 @@ async function getRegisteredAgent(params) {
|
|
|
8783
9113
|
|
|
8784
9114
|
// src/routes/agents.ts
|
|
8785
9115
|
var app = new OpenAPIHono();
|
|
8786
|
-
var
|
|
9116
|
+
var logger18 = getLogger("agents");
|
|
8787
9117
|
app.openapi(
|
|
8788
9118
|
createRoute({
|
|
8789
9119
|
method: "get",
|
|
@@ -8821,7 +9151,7 @@ app.openapi(
|
|
|
8821
9151
|
tracestate: c.req.header("tracestate"),
|
|
8822
9152
|
baggage: c.req.header("baggage")
|
|
8823
9153
|
};
|
|
8824
|
-
|
|
9154
|
+
logger18.info(
|
|
8825
9155
|
{
|
|
8826
9156
|
otelHeaders,
|
|
8827
9157
|
path: c.req.path,
|
|
@@ -8831,56 +9161,32 @@ app.openapi(
|
|
|
8831
9161
|
);
|
|
8832
9162
|
const executionContext = getRequestExecutionContext(c);
|
|
8833
9163
|
const { tenantId, projectId, agentId, subAgentId } = executionContext;
|
|
8834
|
-
|
|
8835
|
-
|
|
8836
|
-
|
|
8837
|
-
|
|
8838
|
-
|
|
8839
|
-
|
|
8840
|
-
|
|
8841
|
-
|
|
8842
|
-
|
|
8843
|
-
|
|
8844
|
-
|
|
8845
|
-
|
|
8846
|
-
|
|
8847
|
-
|
|
8848
|
-
|
|
8849
|
-
|
|
8850
|
-
|
|
8851
|
-
|
|
8852
|
-
|
|
8853
|
-
|
|
8854
|
-
|
|
8855
|
-
|
|
8856
|
-
|
|
8857
|
-
message: "Agent not found"
|
|
8858
|
-
});
|
|
8859
|
-
}
|
|
8860
|
-
return c.json(agent.agentCard);
|
|
8861
|
-
} else {
|
|
8862
|
-
logger17.info(
|
|
8863
|
-
{
|
|
8864
|
-
message: "getRegisteredAgent (agent-level)",
|
|
8865
|
-
tenantId,
|
|
8866
|
-
projectId,
|
|
8867
|
-
agentId
|
|
8868
|
-
},
|
|
8869
|
-
"agent-level well-known agent.json"
|
|
8870
|
-
);
|
|
8871
|
-
const sandboxConfig = c.get("sandboxConfig");
|
|
8872
|
-
const agent = await getRegisteredAgent({
|
|
8873
|
-
executionContext,
|
|
8874
|
-
sandboxConfig
|
|
9164
|
+
logger18.info({ executionContext }, "executionContext");
|
|
9165
|
+
logger18.info(
|
|
9166
|
+
{
|
|
9167
|
+
message: "getRegisteredAgent (agent-level)",
|
|
9168
|
+
tenantId,
|
|
9169
|
+
projectId,
|
|
9170
|
+
agentId,
|
|
9171
|
+
subAgentId
|
|
9172
|
+
},
|
|
9173
|
+
"agent-level well-known agent.json"
|
|
9174
|
+
);
|
|
9175
|
+
const credentialStores = c.get("credentialStores");
|
|
9176
|
+
const sandboxConfig = c.get("sandboxConfig");
|
|
9177
|
+
const agent = await getRegisteredAgent({
|
|
9178
|
+
executionContext,
|
|
9179
|
+
credentialStoreRegistry: credentialStores,
|
|
9180
|
+
sandboxConfig
|
|
9181
|
+
});
|
|
9182
|
+
logger18.info({ agent }, "agent registered: well-known agent.json");
|
|
9183
|
+
if (!agent) {
|
|
9184
|
+
throw createApiError({
|
|
9185
|
+
code: "not_found",
|
|
9186
|
+
message: "Agent not found"
|
|
8875
9187
|
});
|
|
8876
|
-
if (!agent) {
|
|
8877
|
-
throw createApiError({
|
|
8878
|
-
code: "not_found",
|
|
8879
|
-
message: "Agent not found"
|
|
8880
|
-
});
|
|
8881
|
-
}
|
|
8882
|
-
return c.json(agent.agentCard);
|
|
8883
9188
|
}
|
|
9189
|
+
return c.json(agent.agentCard);
|
|
8884
9190
|
}
|
|
8885
9191
|
);
|
|
8886
9192
|
app.post("/a2a", async (c) => {
|
|
@@ -8889,7 +9195,7 @@ app.post("/a2a", async (c) => {
|
|
|
8889
9195
|
tracestate: c.req.header("tracestate"),
|
|
8890
9196
|
baggage: c.req.header("baggage")
|
|
8891
9197
|
};
|
|
8892
|
-
|
|
9198
|
+
logger18.info(
|
|
8893
9199
|
{
|
|
8894
9200
|
otelHeaders,
|
|
8895
9201
|
path: c.req.path,
|
|
@@ -8900,7 +9206,7 @@ app.post("/a2a", async (c) => {
|
|
|
8900
9206
|
const executionContext = getRequestExecutionContext(c);
|
|
8901
9207
|
const { tenantId, projectId, agentId, subAgentId } = executionContext;
|
|
8902
9208
|
if (subAgentId) {
|
|
8903
|
-
|
|
9209
|
+
logger18.info(
|
|
8904
9210
|
{
|
|
8905
9211
|
message: "a2a (agent-level)",
|
|
8906
9212
|
tenantId,
|
|
@@ -8929,7 +9235,7 @@ app.post("/a2a", async (c) => {
|
|
|
8929
9235
|
}
|
|
8930
9236
|
return a2aHandler(c, agent);
|
|
8931
9237
|
} else {
|
|
8932
|
-
|
|
9238
|
+
logger18.info(
|
|
8933
9239
|
{
|
|
8934
9240
|
message: "a2a (agent-level)",
|
|
8935
9241
|
tenantId,
|
|
@@ -9032,14 +9338,14 @@ function extractTransferData(task) {
|
|
|
9032
9338
|
}
|
|
9033
9339
|
|
|
9034
9340
|
// src/a2a/transfer.ts
|
|
9035
|
-
var
|
|
9341
|
+
var logger19 = getLogger("Transfer");
|
|
9036
9342
|
async function executeTransfer({
|
|
9037
9343
|
tenantId,
|
|
9038
9344
|
threadId,
|
|
9039
9345
|
projectId,
|
|
9040
9346
|
targetSubAgentId
|
|
9041
9347
|
}) {
|
|
9042
|
-
|
|
9348
|
+
logger19.info(
|
|
9043
9349
|
{
|
|
9044
9350
|
targetAgent: targetSubAgentId,
|
|
9045
9351
|
threadId,
|
|
@@ -9054,12 +9360,12 @@ async function executeTransfer({
|
|
|
9054
9360
|
threadId,
|
|
9055
9361
|
subAgentId: targetSubAgentId
|
|
9056
9362
|
});
|
|
9057
|
-
|
|
9363
|
+
logger19.info(
|
|
9058
9364
|
{ targetAgent: targetSubAgentId, threadId },
|
|
9059
9365
|
"Successfully updated active_sub_agent_id in database"
|
|
9060
9366
|
);
|
|
9061
9367
|
} catch (error) {
|
|
9062
|
-
|
|
9368
|
+
logger19.error(
|
|
9063
9369
|
{ error, targetAgent: targetSubAgentId, threadId },
|
|
9064
9370
|
"Failed to update active_sub_agent_id"
|
|
9065
9371
|
);
|
|
@@ -9630,7 +9936,7 @@ function createMCPStreamHelper() {
|
|
|
9630
9936
|
}
|
|
9631
9937
|
|
|
9632
9938
|
// src/handlers/executionHandler.ts
|
|
9633
|
-
var
|
|
9939
|
+
var logger20 = getLogger("ExecutionHandler");
|
|
9634
9940
|
var ExecutionHandler = class {
|
|
9635
9941
|
constructor() {
|
|
9636
9942
|
__publicField(this, "MAX_ERRORS", 3);
|
|
@@ -9665,7 +9971,7 @@ var ExecutionHandler = class {
|
|
|
9665
9971
|
if (emitOperations) {
|
|
9666
9972
|
agentSessionManager.enableEmitOperations(requestId2);
|
|
9667
9973
|
}
|
|
9668
|
-
|
|
9974
|
+
logger20.info(
|
|
9669
9975
|
{ sessionId: requestId2, agentId, conversationId, emitOperations },
|
|
9670
9976
|
"Created AgentSession for message execution"
|
|
9671
9977
|
);
|
|
@@ -9682,7 +9988,7 @@ var ExecutionHandler = class {
|
|
|
9682
9988
|
);
|
|
9683
9989
|
}
|
|
9684
9990
|
} catch (error) {
|
|
9685
|
-
|
|
9991
|
+
logger20.error(
|
|
9686
9992
|
{
|
|
9687
9993
|
error: error instanceof Error ? error.message : "Unknown error",
|
|
9688
9994
|
stack: error instanceof Error ? error.stack : void 0
|
|
@@ -9698,7 +10004,7 @@ var ExecutionHandler = class {
|
|
|
9698
10004
|
try {
|
|
9699
10005
|
await sseHelper.writeOperation(agentInitializingOp(requestId2, agentId));
|
|
9700
10006
|
const taskId = `task_${conversationId}-${requestId2}`;
|
|
9701
|
-
|
|
10007
|
+
logger20.info(
|
|
9702
10008
|
{ taskId, currentAgentId, conversationId, requestId: requestId2 },
|
|
9703
10009
|
"Attempting to create or reuse existing task"
|
|
9704
10010
|
);
|
|
@@ -9722,7 +10028,7 @@ var ExecutionHandler = class {
|
|
|
9722
10028
|
sub_agent_id: currentAgentId
|
|
9723
10029
|
}
|
|
9724
10030
|
});
|
|
9725
|
-
|
|
10031
|
+
logger20.info(
|
|
9726
10032
|
{
|
|
9727
10033
|
taskId,
|
|
9728
10034
|
createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
|
|
@@ -9731,27 +10037,27 @@ var ExecutionHandler = class {
|
|
|
9731
10037
|
);
|
|
9732
10038
|
} catch (error) {
|
|
9733
10039
|
if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
|
|
9734
|
-
|
|
10040
|
+
logger20.info(
|
|
9735
10041
|
{ taskId, error: error.message },
|
|
9736
10042
|
"Task already exists, fetching existing task"
|
|
9737
10043
|
);
|
|
9738
10044
|
const existingTask = await getTask(dbClient_default)({ id: taskId });
|
|
9739
10045
|
if (existingTask) {
|
|
9740
10046
|
task = existingTask;
|
|
9741
|
-
|
|
10047
|
+
logger20.info(
|
|
9742
10048
|
{ taskId, existingTask },
|
|
9743
10049
|
"Successfully reused existing task from race condition"
|
|
9744
10050
|
);
|
|
9745
10051
|
} else {
|
|
9746
|
-
|
|
10052
|
+
logger20.error({ taskId, error }, "Task constraint failed but task not found");
|
|
9747
10053
|
throw error;
|
|
9748
10054
|
}
|
|
9749
10055
|
} else {
|
|
9750
|
-
|
|
10056
|
+
logger20.error({ taskId, error }, "Failed to create task due to non-constraint error");
|
|
9751
10057
|
throw error;
|
|
9752
10058
|
}
|
|
9753
10059
|
}
|
|
9754
|
-
|
|
10060
|
+
logger20.debug(
|
|
9755
10061
|
{
|
|
9756
10062
|
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
9757
10063
|
executionType: "create_initial_task",
|
|
@@ -9770,7 +10076,7 @@ var ExecutionHandler = class {
|
|
|
9770
10076
|
const maxTransfers = agentConfig?.stopWhen?.transferCountIs ?? 10;
|
|
9771
10077
|
while (iterations < maxTransfers) {
|
|
9772
10078
|
iterations++;
|
|
9773
|
-
|
|
10079
|
+
logger20.info(
|
|
9774
10080
|
{ iterations, currentAgentId, agentId, conversationId, fromSubAgentId },
|
|
9775
10081
|
`Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromSubAgentId || "none"}`
|
|
9776
10082
|
);
|
|
@@ -9778,10 +10084,10 @@ var ExecutionHandler = class {
|
|
|
9778
10084
|
scopes: { tenantId, projectId },
|
|
9779
10085
|
conversationId
|
|
9780
10086
|
});
|
|
9781
|
-
|
|
10087
|
+
logger20.info({ activeAgent }, "activeAgent");
|
|
9782
10088
|
if (activeAgent && activeAgent.activeSubAgentId !== currentAgentId) {
|
|
9783
10089
|
currentAgentId = activeAgent.activeSubAgentId;
|
|
9784
|
-
|
|
10090
|
+
logger20.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
|
|
9785
10091
|
}
|
|
9786
10092
|
const agentBaseUrl = `${baseUrl}/agents`;
|
|
9787
10093
|
const a2aClient = new A2AClient(agentBaseUrl, {
|
|
@@ -9822,13 +10128,13 @@ var ExecutionHandler = class {
|
|
|
9822
10128
|
});
|
|
9823
10129
|
if (!messageResponse?.result) {
|
|
9824
10130
|
errorCount++;
|
|
9825
|
-
|
|
10131
|
+
logger20.error(
|
|
9826
10132
|
{ currentAgentId, iterations, errorCount },
|
|
9827
10133
|
`No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
|
|
9828
10134
|
);
|
|
9829
10135
|
if (errorCount >= this.MAX_ERRORS) {
|
|
9830
10136
|
const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
|
|
9831
|
-
|
|
10137
|
+
logger20.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
|
|
9832
10138
|
await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
|
|
9833
10139
|
if (task) {
|
|
9834
10140
|
await updateTask(dbClient_default)({
|
|
@@ -9852,7 +10158,7 @@ var ExecutionHandler = class {
|
|
|
9852
10158
|
if (isTransferTask(messageResponse.result)) {
|
|
9853
10159
|
const transferData = extractTransferData(messageResponse.result);
|
|
9854
10160
|
if (!transferData) {
|
|
9855
|
-
|
|
10161
|
+
logger20.error(
|
|
9856
10162
|
{ result: messageResponse.result },
|
|
9857
10163
|
"Transfer detected but no transfer data found"
|
|
9858
10164
|
);
|
|
@@ -9861,7 +10167,7 @@ var ExecutionHandler = class {
|
|
|
9861
10167
|
const { targetSubAgentId, fromSubAgentId: transferFromAgent } = transferData;
|
|
9862
10168
|
const firstArtifact = messageResponse.result.artifacts[0];
|
|
9863
10169
|
const transferReason = firstArtifact?.parts[1]?.kind === "text" ? firstArtifact.parts[1].text : "Transfer initiated";
|
|
9864
|
-
|
|
10170
|
+
logger20.info({ targetSubAgentId, transferReason, transferFromAgent }, "Transfer response");
|
|
9865
10171
|
currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
|
|
9866
10172
|
const { success, targetSubAgentId: newAgentId } = await executeTransfer({
|
|
9867
10173
|
projectId,
|
|
@@ -9872,7 +10178,7 @@ var ExecutionHandler = class {
|
|
|
9872
10178
|
if (success) {
|
|
9873
10179
|
fromSubAgentId = currentAgentId;
|
|
9874
10180
|
currentAgentId = newAgentId;
|
|
9875
|
-
|
|
10181
|
+
logger20.info(
|
|
9876
10182
|
{
|
|
9877
10183
|
transferFrom: fromSubAgentId,
|
|
9878
10184
|
transferTo: currentAgentId,
|
|
@@ -9886,7 +10192,7 @@ var ExecutionHandler = class {
|
|
|
9886
10192
|
let responseParts = [];
|
|
9887
10193
|
if (messageResponse.result.streamedContent?.parts) {
|
|
9888
10194
|
responseParts = messageResponse.result.streamedContent.parts;
|
|
9889
|
-
|
|
10195
|
+
logger20.info(
|
|
9890
10196
|
{ partsCount: responseParts.length },
|
|
9891
10197
|
"Using streamed content for conversation history"
|
|
9892
10198
|
);
|
|
@@ -9894,7 +10200,7 @@ var ExecutionHandler = class {
|
|
|
9894
10200
|
responseParts = messageResponse.result.artifacts?.flatMap(
|
|
9895
10201
|
(artifact) => artifact.parts || []
|
|
9896
10202
|
) || [];
|
|
9897
|
-
|
|
10203
|
+
logger20.info(
|
|
9898
10204
|
{ partsCount: responseParts.length },
|
|
9899
10205
|
"Using artifacts for conversation history (fallback)"
|
|
9900
10206
|
);
|
|
@@ -9903,7 +10209,7 @@ var ExecutionHandler = class {
|
|
|
9903
10209
|
const agentSessionData = agentSessionManager.getSession(requestId2);
|
|
9904
10210
|
if (agentSessionData) {
|
|
9905
10211
|
const sessionSummary = agentSessionData.getSummary();
|
|
9906
|
-
|
|
10212
|
+
logger20.info(sessionSummary, "AgentSession data after completion");
|
|
9907
10213
|
}
|
|
9908
10214
|
let textContent = "";
|
|
9909
10215
|
for (const part of responseParts) {
|
|
@@ -9956,22 +10262,22 @@ var ExecutionHandler = class {
|
|
|
9956
10262
|
}
|
|
9957
10263
|
});
|
|
9958
10264
|
const updateTaskEnd = Date.now();
|
|
9959
|
-
|
|
10265
|
+
logger20.info(
|
|
9960
10266
|
{ duration: updateTaskEnd - updateTaskStart },
|
|
9961
10267
|
"Completed updateTask operation"
|
|
9962
10268
|
);
|
|
9963
10269
|
await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
|
|
9964
10270
|
await sseHelper.complete();
|
|
9965
|
-
|
|
10271
|
+
logger20.info({}, "Ending AgentSession and cleaning up");
|
|
9966
10272
|
agentSessionManager.endSession(requestId2);
|
|
9967
|
-
|
|
10273
|
+
logger20.info({}, "Cleaning up streamHelper");
|
|
9968
10274
|
unregisterStreamHelper(requestId2);
|
|
9969
10275
|
let response;
|
|
9970
10276
|
if (sseHelper instanceof MCPStreamHelper) {
|
|
9971
10277
|
const captured = sseHelper.getCapturedResponse();
|
|
9972
10278
|
response = captured.text || "No response content";
|
|
9973
10279
|
}
|
|
9974
|
-
|
|
10280
|
+
logger20.info({}, "ExecutionHandler returning success");
|
|
9975
10281
|
return { success: true, iterations, response };
|
|
9976
10282
|
} catch (error) {
|
|
9977
10283
|
setSpanWithError(span, error instanceof Error ? error : new Error(String(error)));
|
|
@@ -9982,13 +10288,13 @@ var ExecutionHandler = class {
|
|
|
9982
10288
|
});
|
|
9983
10289
|
}
|
|
9984
10290
|
errorCount++;
|
|
9985
|
-
|
|
10291
|
+
logger20.warn(
|
|
9986
10292
|
{ iterations, errorCount },
|
|
9987
10293
|
`No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
|
|
9988
10294
|
);
|
|
9989
10295
|
if (errorCount >= this.MAX_ERRORS) {
|
|
9990
10296
|
const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
|
|
9991
|
-
|
|
10297
|
+
logger20.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
|
|
9992
10298
|
await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
|
|
9993
10299
|
if (task) {
|
|
9994
10300
|
await updateTask(dbClient_default)({
|
|
@@ -10009,7 +10315,7 @@ var ExecutionHandler = class {
|
|
|
10009
10315
|
}
|
|
10010
10316
|
}
|
|
10011
10317
|
const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
|
|
10012
|
-
|
|
10318
|
+
logger20.error({ maxTransfers, iterations }, errorMessage);
|
|
10013
10319
|
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
|
|
10014
10320
|
if (task) {
|
|
10015
10321
|
await updateTask(dbClient_default)({
|
|
@@ -10028,7 +10334,7 @@ var ExecutionHandler = class {
|
|
|
10028
10334
|
unregisterStreamHelper(requestId2);
|
|
10029
10335
|
return { success: false, error: errorMessage, iterations };
|
|
10030
10336
|
} catch (error) {
|
|
10031
|
-
|
|
10337
|
+
logger20.error({ error }, "Error in execution handler");
|
|
10032
10338
|
const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
|
|
10033
10339
|
await sseHelper.writeOperation(
|
|
10034
10340
|
errorOp(`Execution error: ${errorMessage}`, currentAgentId || "system")
|
|
@@ -10055,7 +10361,7 @@ var ExecutionHandler = class {
|
|
|
10055
10361
|
|
|
10056
10362
|
// src/routes/chat.ts
|
|
10057
10363
|
var app2 = new OpenAPIHono();
|
|
10058
|
-
var
|
|
10364
|
+
var logger21 = getLogger("completionsHandler");
|
|
10059
10365
|
var chatCompletionsRoute = createRoute({
|
|
10060
10366
|
method: "post",
|
|
10061
10367
|
path: "/completions",
|
|
@@ -10173,7 +10479,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
10173
10479
|
tracestate: c.req.header("tracestate"),
|
|
10174
10480
|
baggage: c.req.header("baggage")
|
|
10175
10481
|
};
|
|
10176
|
-
|
|
10482
|
+
logger21.info(
|
|
10177
10483
|
{
|
|
10178
10484
|
otelHeaders,
|
|
10179
10485
|
path: c.req.path,
|
|
@@ -10282,7 +10588,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
10282
10588
|
dbClient: dbClient_default,
|
|
10283
10589
|
credentialStores
|
|
10284
10590
|
});
|
|
10285
|
-
|
|
10591
|
+
logger21.info(
|
|
10286
10592
|
{
|
|
10287
10593
|
tenantId,
|
|
10288
10594
|
projectId,
|
|
@@ -10330,7 +10636,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
10330
10636
|
try {
|
|
10331
10637
|
const sseHelper = createSSEStreamHelper(stream3, requestId2, timestamp);
|
|
10332
10638
|
await sseHelper.writeRole();
|
|
10333
|
-
|
|
10639
|
+
logger21.info({ subAgentId }, "Starting execution");
|
|
10334
10640
|
const emitOperationsHeader = c.req.header("x-emit-operations");
|
|
10335
10641
|
const emitOperations = emitOperationsHeader === "true";
|
|
10336
10642
|
const executionHandler = new ExecutionHandler();
|
|
@@ -10343,7 +10649,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
10343
10649
|
sseHelper,
|
|
10344
10650
|
emitOperations
|
|
10345
10651
|
});
|
|
10346
|
-
|
|
10652
|
+
logger21.info(
|
|
10347
10653
|
{ result },
|
|
10348
10654
|
`Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
|
|
10349
10655
|
);
|
|
@@ -10357,7 +10663,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
10357
10663
|
}
|
|
10358
10664
|
await sseHelper.complete();
|
|
10359
10665
|
} catch (error) {
|
|
10360
|
-
|
|
10666
|
+
logger21.error(
|
|
10361
10667
|
{
|
|
10362
10668
|
error: error instanceof Error ? error.message : error,
|
|
10363
10669
|
stack: error instanceof Error ? error.stack : void 0
|
|
@@ -10374,13 +10680,13 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
10374
10680
|
);
|
|
10375
10681
|
await sseHelper.complete();
|
|
10376
10682
|
} catch (streamError) {
|
|
10377
|
-
|
|
10683
|
+
logger21.error({ streamError }, "Failed to write error to stream");
|
|
10378
10684
|
}
|
|
10379
10685
|
}
|
|
10380
10686
|
});
|
|
10381
10687
|
});
|
|
10382
10688
|
} catch (error) {
|
|
10383
|
-
|
|
10689
|
+
logger21.error(
|
|
10384
10690
|
{
|
|
10385
10691
|
error: error instanceof Error ? error.message : error,
|
|
10386
10692
|
stack: error instanceof Error ? error.stack : void 0
|
|
@@ -10404,7 +10710,7 @@ var getMessageText = (content) => {
|
|
|
10404
10710
|
};
|
|
10405
10711
|
var chat_default = app2;
|
|
10406
10712
|
var app3 = new OpenAPIHono();
|
|
10407
|
-
var
|
|
10713
|
+
var logger22 = getLogger("chatDataStream");
|
|
10408
10714
|
var chatDataStreamRoute = createRoute({
|
|
10409
10715
|
method: "post",
|
|
10410
10716
|
path: "/chat",
|
|
@@ -10528,7 +10834,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
10528
10834
|
});
|
|
10529
10835
|
const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
|
|
10530
10836
|
const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
|
|
10531
|
-
|
|
10837
|
+
logger22.info({ userText, lastUserMessage }, "userText");
|
|
10532
10838
|
const messageSpan = trace.getActiveSpan();
|
|
10533
10839
|
if (messageSpan) {
|
|
10534
10840
|
messageSpan.setAttributes({
|
|
@@ -10573,7 +10879,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
10573
10879
|
await streamHelper.writeOperation(errorOp("Unable to process request", "system"));
|
|
10574
10880
|
}
|
|
10575
10881
|
} catch (err) {
|
|
10576
|
-
|
|
10882
|
+
logger22.error({ err }, "Streaming error");
|
|
10577
10883
|
await streamHelper.writeOperation(errorOp("Internal server error", "system"));
|
|
10578
10884
|
} finally {
|
|
10579
10885
|
if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
|
|
@@ -10595,7 +10901,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
10595
10901
|
);
|
|
10596
10902
|
});
|
|
10597
10903
|
} catch (error) {
|
|
10598
|
-
|
|
10904
|
+
logger22.error({ error }, "chatDataStream error");
|
|
10599
10905
|
throw createApiError({
|
|
10600
10906
|
code: "internal_server_error",
|
|
10601
10907
|
message: "Failed to process chat completion"
|
|
@@ -10603,14 +10909,14 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
10603
10909
|
}
|
|
10604
10910
|
});
|
|
10605
10911
|
var chatDataStream_default = app3;
|
|
10606
|
-
var
|
|
10912
|
+
var logger23 = getLogger("dataComponentPreview");
|
|
10607
10913
|
var app4 = new OpenAPIHono();
|
|
10608
10914
|
var generatePreviewRoute = createRoute({
|
|
10609
10915
|
method: "post",
|
|
10610
|
-
path: "/:tenantId/projects/:projectId/data-components/:id/generate-
|
|
10916
|
+
path: "/:tenantId/projects/:projectId/data-components/:id/generate-render",
|
|
10611
10917
|
tags: ["Data Component Preview"],
|
|
10612
|
-
summary: "Generate Component
|
|
10613
|
-
description: "Generate a React/Tailwind component
|
|
10918
|
+
summary: "Generate Component Render",
|
|
10919
|
+
description: "Generate a React/Tailwind component render using AI based on the data component schema",
|
|
10614
10920
|
request: {
|
|
10615
10921
|
params: z.object({
|
|
10616
10922
|
tenantId: z.string(),
|
|
@@ -10649,7 +10955,7 @@ app4.openapi(generatePreviewRoute, async (c) => {
|
|
|
10649
10955
|
const { tenantId, projectId, id } = c.req.valid("param");
|
|
10650
10956
|
const body = c.req.valid("json");
|
|
10651
10957
|
const { instructions, existingCode } = body;
|
|
10652
|
-
|
|
10958
|
+
logger23.info(
|
|
10653
10959
|
{
|
|
10654
10960
|
tenantId,
|
|
10655
10961
|
projectId,
|
|
@@ -10681,38 +10987,38 @@ app4.openapi(generatePreviewRoute, async (c) => {
|
|
|
10681
10987
|
const prompt = buildGenerationPrompt(dataComponent, instructions, existingCode);
|
|
10682
10988
|
try {
|
|
10683
10989
|
const modelConfig = ModelFactory.prepareGenerationConfig(project.models.base);
|
|
10684
|
-
const
|
|
10685
|
-
|
|
10686
|
-
|
|
10990
|
+
const renderSchema = z.object({
|
|
10991
|
+
component: z.string().describe("The React component code"),
|
|
10992
|
+
mockData: z.any().describe("Sample data matching the props schema")
|
|
10687
10993
|
});
|
|
10688
10994
|
const result = streamObject({
|
|
10689
10995
|
...modelConfig,
|
|
10690
10996
|
prompt,
|
|
10691
|
-
schema:
|
|
10997
|
+
schema: renderSchema,
|
|
10692
10998
|
temperature: 0.7
|
|
10693
10999
|
});
|
|
10694
11000
|
c.header("Content-Type", "text/plain; charset=utf-8");
|
|
10695
11001
|
c.header("Cache-Control", "no-cache");
|
|
10696
11002
|
c.header("Connection", "keep-alive");
|
|
10697
|
-
const existingData = existingCode && dataComponent.
|
|
11003
|
+
const existingData = existingCode && dataComponent.render && typeof dataComponent.render === "object" && "mockData" in dataComponent.render ? dataComponent.render.mockData : null;
|
|
10698
11004
|
return stream(c, async (stream3) => {
|
|
10699
11005
|
try {
|
|
10700
11006
|
for await (const partialObject of result.partialObjectStream) {
|
|
10701
|
-
const outputObject = instructions && existingData ? { ...partialObject,
|
|
11007
|
+
const outputObject = instructions && existingData ? { ...partialObject, mockData: existingData } : partialObject;
|
|
10702
11008
|
await stream3.write(JSON.stringify(outputObject) + "\n");
|
|
10703
11009
|
}
|
|
10704
11010
|
} catch (error) {
|
|
10705
|
-
|
|
11011
|
+
logger23.error(
|
|
10706
11012
|
{ error, tenantId, projectId, dataComponentId: id },
|
|
10707
11013
|
"Error streaming preview generation"
|
|
10708
11014
|
);
|
|
10709
11015
|
await stream3.write(
|
|
10710
|
-
JSON.stringify({
|
|
11016
|
+
JSON.stringify({ component: "// Error generating component preview", mockData: {} }) + "\n"
|
|
10711
11017
|
);
|
|
10712
11018
|
}
|
|
10713
11019
|
});
|
|
10714
11020
|
} catch (error) {
|
|
10715
|
-
|
|
11021
|
+
logger23.error(
|
|
10716
11022
|
{ error, tenantId, projectId, dataComponentId: id },
|
|
10717
11023
|
"Error generating component preview"
|
|
10718
11024
|
);
|
|
@@ -10755,13 +11061,13 @@ REQUIREMENTS:
|
|
|
10755
11061
|
|
|
10756
11062
|
OUTPUT FORMAT:
|
|
10757
11063
|
You need to generate only one thing:
|
|
10758
|
-
1. "
|
|
11064
|
+
1. "component": The modified React component code as a string
|
|
10759
11065
|
|
|
10760
|
-
Return ONLY the
|
|
11066
|
+
Return ONLY the component field, the mockData field will be reused from the existing render.
|
|
10761
11067
|
|
|
10762
11068
|
EXAMPLE OUTPUT:
|
|
10763
11069
|
{
|
|
10764
|
-
"
|
|
11070
|
+
"component": "import { Mail, User } from 'lucide-react';\\n\\nfunction ${componentName}(props) {\\n // Modified component code here\\n}"
|
|
10765
11071
|
}
|
|
10766
11072
|
|
|
10767
11073
|
Focus on making the requested changes while maintaining the component's quality and design principles.`;
|
|
@@ -10815,13 +11121,13 @@ AVAILABLE SEMANTIC COLOR CLASSES:
|
|
|
10815
11121
|
|
|
10816
11122
|
OUTPUT FORMAT:
|
|
10817
11123
|
You need to generate two things:
|
|
10818
|
-
1. "
|
|
10819
|
-
2. "
|
|
11124
|
+
1. "component": The complete React component code as a string
|
|
11125
|
+
2. "mockData": Realistic sample data that matches the props schema (as a JSON object)
|
|
10820
11126
|
|
|
10821
11127
|
EXAMPLE OUTPUT (for a user profile schema with name, email, role):
|
|
10822
11128
|
{
|
|
10823
|
-
"
|
|
10824
|
-
"
|
|
11129
|
+
"component": "import { Mail, User } from 'lucide-react';\\n\\nfunction ${componentName}(props) {\\n return (\\n <div className=\\"p-4 rounded-lg border border-border bg-card\\">\\n <div className=\\"flex items-center gap-2.5 mb-2\\">\\n <User className=\\"size-4 text-muted-foreground\\" />\\n <span className=\\"text-base font-medium text-foreground\\">{props.name}</span>\\n </div>\\n <div className=\\"flex items-center gap-2 text-sm text-muted-foreground\\">\\n <Mail className=\\"size-4\\" />\\n <span>{props.email}</span>\\n </div>\\n <div className=\\"text-xs text-muted-foreground mt-2\\">Role: {props.role}</div>\\n </div>\\n );\\n}",
|
|
11130
|
+
"mockData": {
|
|
10825
11131
|
"name": "Sarah Chen",
|
|
10826
11132
|
"email": "sarah.chen@example.com",
|
|
10827
11133
|
"role": "Product Manager"
|
|
@@ -10845,7 +11151,7 @@ var dataComponentPreview_default = app4;
|
|
|
10845
11151
|
function createMCPSchema(schema) {
|
|
10846
11152
|
return schema;
|
|
10847
11153
|
}
|
|
10848
|
-
var
|
|
11154
|
+
var logger24 = getLogger("mcp");
|
|
10849
11155
|
var _MockResponseSingleton = class _MockResponseSingleton {
|
|
10850
11156
|
constructor() {
|
|
10851
11157
|
__publicField(this, "mockRes");
|
|
@@ -10900,21 +11206,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
|
|
|
10900
11206
|
id: 0
|
|
10901
11207
|
});
|
|
10902
11208
|
var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
|
|
10903
|
-
|
|
11209
|
+
logger24.info({ sessionId }, "Spoofing initialization message to set transport state");
|
|
10904
11210
|
const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
|
|
10905
11211
|
const mockRes = MockResponseSingleton.getInstance().getMockResponse();
|
|
10906
11212
|
try {
|
|
10907
11213
|
await transport.handleRequest(req, mockRes, spoofInitMessage);
|
|
10908
|
-
|
|
11214
|
+
logger24.info({ sessionId }, "Successfully spoofed initialization");
|
|
10909
11215
|
} catch (spoofError) {
|
|
10910
|
-
|
|
11216
|
+
logger24.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
|
|
10911
11217
|
}
|
|
10912
11218
|
};
|
|
10913
11219
|
var validateSession = async (req, res, body, tenantId, projectId, agentId) => {
|
|
10914
11220
|
const sessionId = req.headers["mcp-session-id"];
|
|
10915
|
-
|
|
11221
|
+
logger24.info({ sessionId }, "Received MCP session ID");
|
|
10916
11222
|
if (!sessionId) {
|
|
10917
|
-
|
|
11223
|
+
logger24.info({ body }, "Missing session ID");
|
|
10918
11224
|
res.writeHead(400).end(
|
|
10919
11225
|
JSON.stringify({
|
|
10920
11226
|
jsonrpc: "2.0",
|
|
@@ -10940,7 +11246,7 @@ var validateSession = async (req, res, body, tenantId, projectId, agentId) => {
|
|
|
10940
11246
|
scopes: { tenantId, projectId },
|
|
10941
11247
|
conversationId: sessionId
|
|
10942
11248
|
});
|
|
10943
|
-
|
|
11249
|
+
logger24.info(
|
|
10944
11250
|
{
|
|
10945
11251
|
sessionId,
|
|
10946
11252
|
conversationFound: !!conversation,
|
|
@@ -10951,7 +11257,7 @@ var validateSession = async (req, res, body, tenantId, projectId, agentId) => {
|
|
|
10951
11257
|
"Conversation lookup result"
|
|
10952
11258
|
);
|
|
10953
11259
|
if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.agentId !== agentId) {
|
|
10954
|
-
|
|
11260
|
+
logger24.info(
|
|
10955
11261
|
{ sessionId, conversationId: conversation?.id },
|
|
10956
11262
|
"MCP session not found or invalid"
|
|
10957
11263
|
);
|
|
@@ -11012,7 +11318,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultS
|
|
|
11012
11318
|
requestId: requestId2,
|
|
11013
11319
|
sseHelper: mcpStreamHelper
|
|
11014
11320
|
});
|
|
11015
|
-
|
|
11321
|
+
logger24.info(
|
|
11016
11322
|
{ result },
|
|
11017
11323
|
`Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
|
|
11018
11324
|
);
|
|
@@ -11036,7 +11342,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultS
|
|
|
11036
11342
|
]
|
|
11037
11343
|
};
|
|
11038
11344
|
};
|
|
11039
|
-
var getServer = async (
|
|
11345
|
+
var getServer = async (headers2, executionContext, conversationId, credentialStores) => {
|
|
11040
11346
|
const { tenantId, projectId, agentId } = executionContext;
|
|
11041
11347
|
setupTracing(conversationId, tenantId, agentId);
|
|
11042
11348
|
const agent = await getAgentWithDefaultSubAgent(dbClient_default)({
|
|
@@ -11092,18 +11398,18 @@ var getServer = async (headers, executionContext, conversationId, credentialStor
|
|
|
11092
11398
|
projectId,
|
|
11093
11399
|
agentId,
|
|
11094
11400
|
conversationId,
|
|
11095
|
-
headers,
|
|
11401
|
+
headers: headers2,
|
|
11096
11402
|
dbClient: dbClient_default,
|
|
11097
11403
|
credentialStores
|
|
11098
11404
|
});
|
|
11099
|
-
|
|
11405
|
+
logger24.info(
|
|
11100
11406
|
{
|
|
11101
11407
|
tenantId,
|
|
11102
11408
|
projectId,
|
|
11103
11409
|
agentId,
|
|
11104
11410
|
conversationId,
|
|
11105
11411
|
hasContextConfig: !!agent.contextConfigId,
|
|
11106
|
-
hasHeaders: !!
|
|
11412
|
+
hasHeaders: !!headers2,
|
|
11107
11413
|
hasValidatedContext: !!resolvedContext
|
|
11108
11414
|
},
|
|
11109
11415
|
"parameters"
|
|
@@ -11158,7 +11464,7 @@ var validateRequestParameters = (c) => {
|
|
|
11158
11464
|
};
|
|
11159
11465
|
var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
|
|
11160
11466
|
const { tenantId, projectId, agentId } = executionContext;
|
|
11161
|
-
|
|
11467
|
+
logger24.info({ body }, "Received initialization request");
|
|
11162
11468
|
const sessionId = getConversationId();
|
|
11163
11469
|
const activeSpan = trace.getActiveSpan();
|
|
11164
11470
|
if (activeSpan) {
|
|
@@ -11214,7 +11520,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
|
|
|
11214
11520
|
}
|
|
11215
11521
|
}
|
|
11216
11522
|
});
|
|
11217
|
-
|
|
11523
|
+
logger24.info(
|
|
11218
11524
|
{ sessionId, conversationId: conversation.id },
|
|
11219
11525
|
"Created MCP session as conversation"
|
|
11220
11526
|
);
|
|
@@ -11223,9 +11529,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
|
|
|
11223
11529
|
});
|
|
11224
11530
|
const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
|
|
11225
11531
|
await server.connect(transport);
|
|
11226
|
-
|
|
11532
|
+
logger24.info({ sessionId }, "Server connected for initialization");
|
|
11227
11533
|
res.setHeader("Mcp-Session-Id", sessionId);
|
|
11228
|
-
|
|
11534
|
+
logger24.info(
|
|
11229
11535
|
{
|
|
11230
11536
|
sessionId,
|
|
11231
11537
|
bodyMethod: body?.method,
|
|
@@ -11234,7 +11540,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
|
|
|
11234
11540
|
"About to handle initialization request"
|
|
11235
11541
|
);
|
|
11236
11542
|
await transport.handleRequest(req, res, body);
|
|
11237
|
-
|
|
11543
|
+
logger24.info({ sessionId }, "Successfully handled initialization request");
|
|
11238
11544
|
return toFetchResponse(res);
|
|
11239
11545
|
});
|
|
11240
11546
|
};
|
|
@@ -11261,8 +11567,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
|
|
|
11261
11567
|
sessionId,
|
|
11262
11568
|
conversation.metadata?.session_data?.mcpProtocolVersion
|
|
11263
11569
|
);
|
|
11264
|
-
|
|
11265
|
-
|
|
11570
|
+
logger24.info({ sessionId }, "Server connected and transport initialized");
|
|
11571
|
+
logger24.info(
|
|
11266
11572
|
{
|
|
11267
11573
|
sessionId,
|
|
11268
11574
|
bodyKeys: Object.keys(body || {}),
|
|
@@ -11276,9 +11582,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
|
|
|
11276
11582
|
);
|
|
11277
11583
|
try {
|
|
11278
11584
|
await transport.handleRequest(req, res, body);
|
|
11279
|
-
|
|
11585
|
+
logger24.info({ sessionId }, "Successfully handled MCP request");
|
|
11280
11586
|
} catch (transportError) {
|
|
11281
|
-
|
|
11587
|
+
logger24.error(
|
|
11282
11588
|
{
|
|
11283
11589
|
sessionId,
|
|
11284
11590
|
error: transportError,
|
|
@@ -11329,13 +11635,13 @@ app5.openapi(
|
|
|
11329
11635
|
}
|
|
11330
11636
|
const { executionContext } = paramValidation;
|
|
11331
11637
|
const body = c.get("requestBody") || {};
|
|
11332
|
-
|
|
11638
|
+
logger24.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
|
|
11333
11639
|
const isInitRequest = body.method === "initialize";
|
|
11334
11640
|
const { req, res } = toReqRes(c.req.raw);
|
|
11335
11641
|
const validatedContext = c.get("validatedContext") || {};
|
|
11336
11642
|
const credentialStores = c.get("credentialStores");
|
|
11337
|
-
|
|
11338
|
-
|
|
11643
|
+
logger24.info({ validatedContext }, "Validated context");
|
|
11644
|
+
logger24.info({ req }, "request");
|
|
11339
11645
|
if (isInitRequest) {
|
|
11340
11646
|
return await handleInitializationRequest(
|
|
11341
11647
|
body,
|
|
@@ -11357,7 +11663,7 @@ app5.openapi(
|
|
|
11357
11663
|
);
|
|
11358
11664
|
}
|
|
11359
11665
|
} catch (e) {
|
|
11360
|
-
|
|
11666
|
+
logger24.error(
|
|
11361
11667
|
{
|
|
11362
11668
|
error: e instanceof Error ? e.message : e,
|
|
11363
11669
|
stack: e instanceof Error ? e.stack : void 0
|
|
@@ -11369,7 +11675,7 @@ app5.openapi(
|
|
|
11369
11675
|
}
|
|
11370
11676
|
);
|
|
11371
11677
|
app5.get("/", async (c) => {
|
|
11372
|
-
|
|
11678
|
+
logger24.info({}, "Received GET MCP request");
|
|
11373
11679
|
return c.json(
|
|
11374
11680
|
{
|
|
11375
11681
|
jsonrpc: "2.0",
|
|
@@ -11383,7 +11689,7 @@ app5.get("/", async (c) => {
|
|
|
11383
11689
|
);
|
|
11384
11690
|
});
|
|
11385
11691
|
app5.delete("/", async (c) => {
|
|
11386
|
-
|
|
11692
|
+
logger24.info({}, "Received DELETE MCP request");
|
|
11387
11693
|
return c.json(
|
|
11388
11694
|
{
|
|
11389
11695
|
jsonrpc: "2.0",
|
|
@@ -11396,7 +11702,7 @@ app5.delete("/", async (c) => {
|
|
|
11396
11702
|
var mcp_default = app5;
|
|
11397
11703
|
|
|
11398
11704
|
// src/app.ts
|
|
11399
|
-
var
|
|
11705
|
+
var logger25 = getLogger("agents-run-api");
|
|
11400
11706
|
function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
11401
11707
|
const app7 = new OpenAPIHono();
|
|
11402
11708
|
app7.use("*", otel());
|
|
@@ -11415,7 +11721,7 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11415
11721
|
const body = await c.req.json();
|
|
11416
11722
|
c.set("requestBody", body);
|
|
11417
11723
|
} catch (error) {
|
|
11418
|
-
|
|
11724
|
+
logger25.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
|
|
11419
11725
|
}
|
|
11420
11726
|
}
|
|
11421
11727
|
return next();
|
|
@@ -11466,8 +11772,8 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11466
11772
|
if (!isExpectedError) {
|
|
11467
11773
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
11468
11774
|
const errorStack = err instanceof Error ? err.stack : void 0;
|
|
11469
|
-
if (
|
|
11470
|
-
|
|
11775
|
+
if (logger25) {
|
|
11776
|
+
logger25.error(
|
|
11471
11777
|
{
|
|
11472
11778
|
error: err,
|
|
11473
11779
|
message: errorMessage,
|
|
@@ -11479,8 +11785,8 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11479
11785
|
);
|
|
11480
11786
|
}
|
|
11481
11787
|
} else {
|
|
11482
|
-
if (
|
|
11483
|
-
|
|
11788
|
+
if (logger25) {
|
|
11789
|
+
logger25.error(
|
|
11484
11790
|
{
|
|
11485
11791
|
error: err,
|
|
11486
11792
|
path: c.req.path,
|
|
@@ -11497,8 +11803,8 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11497
11803
|
const response = err.getResponse();
|
|
11498
11804
|
return response;
|
|
11499
11805
|
} catch (responseError) {
|
|
11500
|
-
if (
|
|
11501
|
-
|
|
11806
|
+
if (logger25) {
|
|
11807
|
+
logger25.error({ error: responseError }, "Error while handling HTTPException response");
|
|
11502
11808
|
}
|
|
11503
11809
|
}
|
|
11504
11810
|
}
|
|
@@ -11532,7 +11838,7 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11532
11838
|
app7.use("*", async (c, next) => {
|
|
11533
11839
|
const executionContext = c.get("executionContext");
|
|
11534
11840
|
if (!executionContext) {
|
|
11535
|
-
|
|
11841
|
+
logger25.debug({}, "Empty execution context");
|
|
11536
11842
|
return next();
|
|
11537
11843
|
}
|
|
11538
11844
|
const { tenantId, projectId, agentId } = executionContext;
|
|
@@ -11541,7 +11847,7 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11541
11847
|
if (requestBody) {
|
|
11542
11848
|
conversationId = requestBody.conversationId;
|
|
11543
11849
|
if (!conversationId) {
|
|
11544
|
-
|
|
11850
|
+
logger25.debug({ requestBody }, "No conversation ID found in request body");
|
|
11545
11851
|
}
|
|
11546
11852
|
}
|
|
11547
11853
|
const entries = Object.fromEntries(
|
|
@@ -11556,7 +11862,7 @@ function createExecutionHono(serverConfig, credentialStores, sandboxConfig) {
|
|
|
11556
11862
|
})
|
|
11557
11863
|
);
|
|
11558
11864
|
if (!Object.keys(entries).length) {
|
|
11559
|
-
|
|
11865
|
+
logger25.debug({}, "Empty entries for baggage");
|
|
11560
11866
|
return next();
|
|
11561
11867
|
}
|
|
11562
11868
|
const bag = Object.entries(entries).reduce(
|