@inkeep/agents-run-api 0.2.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { env, __publicField, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-
|
|
1
|
+
import { env, __publicField, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-7PHUFKNP.js';
|
|
2
2
|
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
|
|
3
3
|
import { BaggageSpanProcessor, ALLOW_ALL_BAGGAGE_KEYS } from '@opentelemetry/baggage-span-processor';
|
|
4
4
|
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
|
|
@@ -8,7 +8,8 @@ import { resourceFromAttributes } from '@opentelemetry/resources';
|
|
|
8
8
|
import { NodeSDK } from '@opentelemetry/sdk-node';
|
|
9
9
|
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
|
|
10
10
|
import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions';
|
|
11
|
-
import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts,
|
|
11
|
+
import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, createApiError, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, loggerFactory, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraphById, createTask, updateTask, setSpanWithError, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
|
|
12
|
+
import { otel } from '@hono/otel';
|
|
12
13
|
import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
|
|
13
14
|
import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
|
|
14
15
|
import { Hono } from 'hono';
|
|
@@ -22,7 +23,7 @@ import { streamSSE, stream } from 'hono/streaming';
|
|
|
22
23
|
import { nanoid } from 'nanoid';
|
|
23
24
|
import destr from 'destr';
|
|
24
25
|
import traverse from 'traverse';
|
|
25
|
-
import { createUIMessageStream, JsonToSseTransformStream, parsePartialJson, generateText, generateObject, tool, streamText } from 'ai';
|
|
26
|
+
import { createUIMessageStream, JsonToSseTransformStream, parsePartialJson, generateText, generateObject, tool, streamText, streamObject } from 'ai';
|
|
26
27
|
import { createAnthropic, anthropic } from '@ai-sdk/anthropic';
|
|
27
28
|
import { createGoogleGenerativeAI, google } from '@ai-sdk/google';
|
|
28
29
|
import { createOpenAI, openai } from '@ai-sdk/openai';
|
|
@@ -31,7 +32,6 @@ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
|
|
31
32
|
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
|
|
32
33
|
import { z as z$2 } from 'zod/v3';
|
|
33
34
|
import { toReqRes, toFetchResponse } from 'fetch-to-node';
|
|
34
|
-
import { otel } from '@hono/otel';
|
|
35
35
|
|
|
36
36
|
var maxExportBatchSize = env.OTEL_MAX_EXPORT_BATCH_SIZE ?? (env.ENVIRONMENT === "development" ? 1 : 512);
|
|
37
37
|
var otlpExporter = new OTLPTraceExporter();
|
|
@@ -965,7 +965,7 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
|
|
|
965
965
|
throw new Error("Agent ID is required");
|
|
966
966
|
}
|
|
967
967
|
const dbAgent = await getAgentById(dbClient_default)({
|
|
968
|
-
scopes: { tenantId, projectId },
|
|
968
|
+
scopes: { tenantId, projectId, graphId },
|
|
969
969
|
agentId
|
|
970
970
|
});
|
|
971
971
|
if (!dbAgent) {
|
|
@@ -980,6 +980,38 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
|
|
|
980
980
|
apiKey
|
|
981
981
|
});
|
|
982
982
|
}
|
|
983
|
+
async function resolveModelConfig(graphId, agent) {
|
|
984
|
+
if (agent.models?.base?.model) {
|
|
985
|
+
return {
|
|
986
|
+
base: agent.models.base,
|
|
987
|
+
structuredOutput: agent.models.structuredOutput || agent.models.base,
|
|
988
|
+
summarizer: agent.models.summarizer || agent.models.base
|
|
989
|
+
};
|
|
990
|
+
}
|
|
991
|
+
const graph = await getAgentGraphById(dbClient_default)({
|
|
992
|
+
scopes: { tenantId: agent.tenantId, projectId: agent.projectId, graphId }
|
|
993
|
+
});
|
|
994
|
+
if (graph?.models?.base?.model) {
|
|
995
|
+
return {
|
|
996
|
+
base: graph.models.base,
|
|
997
|
+
structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
|
|
998
|
+
summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
|
|
999
|
+
};
|
|
1000
|
+
}
|
|
1001
|
+
const project = await getProject(dbClient_default)({
|
|
1002
|
+
scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
|
|
1003
|
+
});
|
|
1004
|
+
if (project?.models?.base?.model) {
|
|
1005
|
+
return {
|
|
1006
|
+
base: project.models.base,
|
|
1007
|
+
structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
|
|
1008
|
+
summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
|
|
1009
|
+
};
|
|
1010
|
+
}
|
|
1011
|
+
throw new Error(
|
|
1012
|
+
"Base model configuration is required. Please configure models at the project level."
|
|
1013
|
+
);
|
|
1014
|
+
}
|
|
983
1015
|
function agentInitializingOp(sessionId, graphId) {
|
|
984
1016
|
return {
|
|
985
1017
|
type: "agent_initializing",
|
|
@@ -998,24 +1030,19 @@ function completionOp(agentId, iterations) {
|
|
|
998
1030
|
}
|
|
999
1031
|
};
|
|
1000
1032
|
}
|
|
1001
|
-
function errorOp(error,
|
|
1033
|
+
function errorOp(message, agentId, severity = "error", code) {
|
|
1002
1034
|
return {
|
|
1003
1035
|
type: "error",
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1036
|
+
message,
|
|
1037
|
+
agent: agentId,
|
|
1038
|
+
severity,
|
|
1039
|
+
code,
|
|
1040
|
+
timestamp: Date.now()
|
|
1008
1041
|
};
|
|
1009
1042
|
}
|
|
1010
1043
|
function generateToolId() {
|
|
1011
1044
|
return `tool_${nanoid(8)}`;
|
|
1012
1045
|
}
|
|
1013
|
-
function statusUpdateOp(ctx) {
|
|
1014
|
-
return {
|
|
1015
|
-
type: "status_update",
|
|
1016
|
-
ctx
|
|
1017
|
-
};
|
|
1018
|
-
}
|
|
1019
1046
|
var logger4 = getLogger("DataComponentSchema");
|
|
1020
1047
|
function jsonSchemaToZod(jsonSchema) {
|
|
1021
1048
|
if (!jsonSchema || typeof jsonSchema !== "object") {
|
|
@@ -1145,6 +1172,9 @@ var _ModelFactory = class _ModelFactory {
|
|
|
1145
1172
|
);
|
|
1146
1173
|
}
|
|
1147
1174
|
const modelSettings = config;
|
|
1175
|
+
if (!modelSettings.model) {
|
|
1176
|
+
throw new Error("Model configuration is required");
|
|
1177
|
+
}
|
|
1148
1178
|
const modelString = modelSettings.model.trim();
|
|
1149
1179
|
const { provider, modelName } = _ModelFactory.parseModelString(modelString);
|
|
1150
1180
|
logger5.debug(
|
|
@@ -1267,7 +1297,6 @@ var _ModelFactory = class _ModelFactory {
|
|
|
1267
1297
|
*/
|
|
1268
1298
|
__publicField(_ModelFactory, "SUPPORTED_PROVIDERS", ["anthropic", "openai", "google"]);
|
|
1269
1299
|
var ModelFactory = _ModelFactory;
|
|
1270
|
-
var tracer = getTracer("agents-run-api");
|
|
1271
1300
|
|
|
1272
1301
|
// src/utils/stream-registry.ts
|
|
1273
1302
|
var streamHelperRegistry = /* @__PURE__ */ new Map();
|
|
@@ -1283,6 +1312,7 @@ function getStreamHelper(requestId2) {
|
|
|
1283
1312
|
function unregisterStreamHelper(requestId2) {
|
|
1284
1313
|
streamHelperRegistry.delete(requestId2);
|
|
1285
1314
|
}
|
|
1315
|
+
var tracer = getTracer("agents-run-api");
|
|
1286
1316
|
|
|
1287
1317
|
// src/utils/graph-session.ts
|
|
1288
1318
|
var logger6 = getLogger("GraphSession");
|
|
@@ -1591,7 +1621,6 @@ var GraphSession = class {
|
|
|
1591
1621
|
}
|
|
1592
1622
|
this.isGeneratingUpdate = true;
|
|
1593
1623
|
const statusUpdateState = this.statusUpdateState;
|
|
1594
|
-
const graphId = this.graphId;
|
|
1595
1624
|
try {
|
|
1596
1625
|
const streamHelper = getStreamHelper(this.sessionId);
|
|
1597
1626
|
if (!streamHelper) {
|
|
@@ -1604,7 +1633,7 @@ var GraphSession = class {
|
|
|
1604
1633
|
}
|
|
1605
1634
|
const now = Date.now();
|
|
1606
1635
|
const elapsedTime = now - statusUpdateState.startTime;
|
|
1607
|
-
let
|
|
1636
|
+
let summaryToSend;
|
|
1608
1637
|
if (statusUpdateState.config.statusComponents && statusUpdateState.config.statusComponents.length > 0) {
|
|
1609
1638
|
const result = await this.generateStructuredStatusUpdate(
|
|
1610
1639
|
this.events.slice(statusUpdateState.lastEventCount),
|
|
@@ -1613,32 +1642,30 @@ var GraphSession = class {
|
|
|
1613
1642
|
statusUpdateState.summarizerModel,
|
|
1614
1643
|
this.previousSummaries
|
|
1615
1644
|
);
|
|
1616
|
-
if (result.
|
|
1617
|
-
for (const
|
|
1618
|
-
if (!
|
|
1645
|
+
if (result.summaries && result.summaries.length > 0) {
|
|
1646
|
+
for (const summary of result.summaries) {
|
|
1647
|
+
if (!summary || !summary.type || !summary.data || !summary.data.label || Object.keys(summary.data).length === 0) {
|
|
1619
1648
|
logger6.warn(
|
|
1620
1649
|
{
|
|
1621
1650
|
sessionId: this.sessionId,
|
|
1622
|
-
|
|
1651
|
+
summary
|
|
1623
1652
|
},
|
|
1624
1653
|
"Skipping empty or invalid structured operation"
|
|
1625
1654
|
);
|
|
1626
1655
|
continue;
|
|
1627
1656
|
}
|
|
1628
|
-
const
|
|
1629
|
-
type:
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
data
|
|
1634
|
-
|
|
1635
|
-
)
|
|
1636
|
-
}
|
|
1657
|
+
const summaryToSend2 = {
|
|
1658
|
+
type: summary.data.type || summary.type,
|
|
1659
|
+
// Preserve the actual custom type from LLM
|
|
1660
|
+
label: summary.data.label,
|
|
1661
|
+
details: Object.fromEntries(
|
|
1662
|
+
Object.entries(summary.data).filter(([key]) => !["label", "type"].includes(key))
|
|
1663
|
+
)
|
|
1637
1664
|
};
|
|
1638
|
-
await streamHelper.
|
|
1665
|
+
await streamHelper.writeSummary(summaryToSend2);
|
|
1639
1666
|
}
|
|
1640
|
-
const summaryTexts = result.
|
|
1641
|
-
(
|
|
1667
|
+
const summaryTexts = result.summaries.map(
|
|
1668
|
+
(summary) => JSON.stringify({ type: summary.type, data: summary.data })
|
|
1642
1669
|
);
|
|
1643
1670
|
this.previousSummaries.push(...summaryTexts);
|
|
1644
1671
|
if (this.statusUpdateState) {
|
|
@@ -1655,34 +1682,20 @@ var GraphSession = class {
|
|
|
1655
1682
|
this.previousSummaries
|
|
1656
1683
|
);
|
|
1657
1684
|
this.previousSummaries.push(summary);
|
|
1658
|
-
operation = statusUpdateOp({
|
|
1659
|
-
summary,
|
|
1660
|
-
eventCount: this.events.length,
|
|
1661
|
-
elapsedTime,
|
|
1662
|
-
currentPhase: "processing",
|
|
1663
|
-
activeAgent: "system",
|
|
1664
|
-
graphId,
|
|
1665
|
-
sessionId: this.sessionId
|
|
1666
|
-
});
|
|
1667
1685
|
}
|
|
1668
1686
|
if (this.previousSummaries.length > 3) {
|
|
1669
1687
|
this.previousSummaries.shift();
|
|
1670
1688
|
}
|
|
1671
|
-
|
|
1689
|
+
{
|
|
1672
1690
|
logger6.warn(
|
|
1673
1691
|
{
|
|
1674
1692
|
sessionId: this.sessionId,
|
|
1675
|
-
|
|
1693
|
+
summaryToSend
|
|
1676
1694
|
},
|
|
1677
1695
|
"Skipping empty or invalid status update operation"
|
|
1678
1696
|
);
|
|
1679
1697
|
return;
|
|
1680
1698
|
}
|
|
1681
|
-
await streamHelper.writeOperation(operation);
|
|
1682
|
-
if (this.statusUpdateState) {
|
|
1683
|
-
this.statusUpdateState.lastUpdateTime = now;
|
|
1684
|
-
this.statusUpdateState.lastEventCount = this.events.length;
|
|
1685
|
-
}
|
|
1686
1699
|
} catch (error) {
|
|
1687
1700
|
logger6.error(
|
|
1688
1701
|
{
|
|
@@ -1815,7 +1828,7 @@ ${previousSummaryContext}` : ""}
|
|
|
1815
1828
|
Activities:
|
|
1816
1829
|
${userVisibleActivities.join("\n") || "No New Activities"}
|
|
1817
1830
|
|
|
1818
|
-
|
|
1831
|
+
Create a short 3-5 word label describing the ACTUAL finding. Use sentence case (only capitalize the first word and proper nouns). Examples: "Found admin permissions needed", "Identified three channel types", "OAuth token required".
|
|
1819
1832
|
|
|
1820
1833
|
${this.statusUpdateState?.config.prompt?.trim() || ""}`;
|
|
1821
1834
|
const prompt = basePrompt;
|
|
@@ -1828,6 +1841,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
|
|
|
1828
1841
|
}
|
|
1829
1842
|
modelToUse = this.statusUpdateState.baseModel;
|
|
1830
1843
|
}
|
|
1844
|
+
if (!modelToUse) {
|
|
1845
|
+
throw new Error("No model configuration available");
|
|
1846
|
+
}
|
|
1831
1847
|
const model = ModelFactory.createModel(modelToUse);
|
|
1832
1848
|
const { text } = await generateText({
|
|
1833
1849
|
model,
|
|
@@ -1937,14 +1953,16 @@ Rules:
|
|
|
1937
1953
|
- Fill in data for relevant components only
|
|
1938
1954
|
- Use 'no_relevant_updates' if nothing substantially new to report. DO NOT WRITE LABELS OR USE OTHER COMPONENTS IF YOU USE THIS COMPONENT.
|
|
1939
1955
|
- Never repeat previous values, make every update EXTREMELY unique. If you cannot do that the update is not worth mentioning.
|
|
1940
|
-
- Labels MUST
|
|
1956
|
+
- Labels MUST be short 3-5 word phrases with ACTUAL information discovered. NEVER MAKE UP SOMETHING WITHOUT BACKING IT UP WITH ACTUAL INFORMATION.
|
|
1957
|
+
- Use sentence case: only capitalize the first word and proper nouns (e.g., "Admin permissions required", not "Admin Permissions Required"). ALWAYS capitalize the first word of the label.
|
|
1941
1958
|
- DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
|
|
1942
1959
|
- Include specific details, numbers, requirements, or insights discovered
|
|
1960
|
+
- Examples: "Admin permissions required", "Three OAuth steps found", "Token expires daily"
|
|
1943
1961
|
- You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
|
|
1944
|
-
- CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
|
|
1962
|
+
- CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", "artifact", or any internal system terminology in labels or any names of agents, tools, or systems.
|
|
1945
1963
|
- Present all operations as seamless actions by a single system
|
|
1946
1964
|
- Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
|
|
1947
|
-
- Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
|
|
1965
|
+
- Bad examples: "Transferring to search agent", "continuing transfer to qa agent", "Delegating task", "Routing request", "Processing request", "Artifact found", "Artifact saved", or not using the no_relevant_updates
|
|
1948
1966
|
- Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
|
|
1949
1967
|
|
|
1950
1968
|
REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
|
|
@@ -1960,6 +1978,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
|
|
|
1960
1978
|
}
|
|
1961
1979
|
modelToUse = this.statusUpdateState.baseModel;
|
|
1962
1980
|
}
|
|
1981
|
+
if (!modelToUse) {
|
|
1982
|
+
throw new Error("No model configuration available");
|
|
1983
|
+
}
|
|
1963
1984
|
const model = ModelFactory.createModel(modelToUse);
|
|
1964
1985
|
const { object } = await generateObject({
|
|
1965
1986
|
model,
|
|
@@ -1977,29 +1998,29 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
|
|
|
1977
1998
|
}
|
|
1978
1999
|
});
|
|
1979
2000
|
const result = object;
|
|
1980
|
-
const
|
|
2001
|
+
const summaries = [];
|
|
1981
2002
|
for (const [componentId, data] of Object.entries(result)) {
|
|
1982
2003
|
if (componentId === "no_relevant_updates") {
|
|
1983
2004
|
continue;
|
|
1984
2005
|
}
|
|
1985
2006
|
if (data && typeof data === "object" && Object.keys(data).length > 0) {
|
|
1986
|
-
|
|
2007
|
+
summaries.push({
|
|
1987
2008
|
type: componentId,
|
|
1988
2009
|
data
|
|
1989
2010
|
});
|
|
1990
2011
|
}
|
|
1991
2012
|
}
|
|
1992
2013
|
span.setAttributes({
|
|
1993
|
-
"
|
|
2014
|
+
"summaries.count": summaries.length,
|
|
1994
2015
|
"user_activities.count": userVisibleActivities.length,
|
|
1995
2016
|
"result_keys.count": Object.keys(result).length
|
|
1996
2017
|
});
|
|
1997
2018
|
span.setStatus({ code: SpanStatusCode.OK });
|
|
1998
|
-
return {
|
|
2019
|
+
return { summaries };
|
|
1999
2020
|
} catch (error) {
|
|
2000
2021
|
setSpanWithError(span, error);
|
|
2001
2022
|
logger6.error({ error }, "Failed to generate structured update, using fallback");
|
|
2002
|
-
return {
|
|
2023
|
+
return { summaries: [] };
|
|
2003
2024
|
} finally {
|
|
2004
2025
|
span.end();
|
|
2005
2026
|
}
|
|
@@ -2212,7 +2233,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
|
|
|
2212
2233
|
);
|
|
2213
2234
|
}
|
|
2214
2235
|
span.setAttributes({ "validation.passed": true });
|
|
2215
|
-
const { getFormattedConversationHistory: getFormattedConversationHistory2 } = await import('./conversations-
|
|
2236
|
+
const { getFormattedConversationHistory: getFormattedConversationHistory2 } = await import('./conversations-WDOIWO7W.js');
|
|
2216
2237
|
const conversationHistory = await getFormattedConversationHistory2({
|
|
2217
2238
|
tenantId: artifactData.tenantId,
|
|
2218
2239
|
projectId: artifactData.projectId,
|
|
@@ -2250,6 +2271,9 @@ Make it specific and relevant.`;
|
|
|
2250
2271
|
}
|
|
2251
2272
|
modelToUse = this.statusUpdateState.baseModel;
|
|
2252
2273
|
}
|
|
2274
|
+
if (!modelToUse) {
|
|
2275
|
+
throw new Error("No model configuration available");
|
|
2276
|
+
}
|
|
2253
2277
|
const model = ModelFactory.createModel(modelToUse);
|
|
2254
2278
|
const schema = z.object({
|
|
2255
2279
|
name: z.string().max(50).describe("Concise, descriptive name for the artifact"),
|
|
@@ -2528,6 +2552,7 @@ var _ArtifactParser = class _ArtifactParser {
|
|
|
2528
2552
|
}
|
|
2529
2553
|
for (let i = matches.length - 1; i >= 0; i--) {
|
|
2530
2554
|
const match = matches[i];
|
|
2555
|
+
if (match.index === void 0) continue;
|
|
2531
2556
|
const startIdx = match.index;
|
|
2532
2557
|
const textAfterMatch = text.slice(startIdx);
|
|
2533
2558
|
if (!textAfterMatch.includes("/>")) {
|
|
@@ -2577,7 +2602,8 @@ var _ArtifactParser = class _ArtifactParser {
|
|
|
2577
2602
|
taskId,
|
|
2578
2603
|
name: artifact.name || "Processing...",
|
|
2579
2604
|
description: artifact.description || "Name and description being generated...",
|
|
2580
|
-
|
|
2605
|
+
type: artifact.metadata?.artifactType || artifact.artifactType,
|
|
2606
|
+
// Map artifactType to type for consistency
|
|
2581
2607
|
artifactSummary: artifact.parts?.[0]?.data?.summary || {}
|
|
2582
2608
|
};
|
|
2583
2609
|
}
|
|
@@ -2594,10 +2620,11 @@ var _ArtifactParser = class _ArtifactParser {
|
|
|
2594
2620
|
let lastIndex = 0;
|
|
2595
2621
|
for (const match of matches) {
|
|
2596
2622
|
const [fullMatch, artifactId, taskId] = match;
|
|
2623
|
+
if (match.index === void 0) continue;
|
|
2597
2624
|
const matchStart = match.index;
|
|
2598
2625
|
if (matchStart > lastIndex) {
|
|
2599
2626
|
const textBefore = text.slice(lastIndex, matchStart);
|
|
2600
|
-
if (textBefore
|
|
2627
|
+
if (textBefore) {
|
|
2601
2628
|
parts.push({ kind: "text", text: textBefore });
|
|
2602
2629
|
}
|
|
2603
2630
|
}
|
|
@@ -2609,7 +2636,7 @@ var _ArtifactParser = class _ArtifactParser {
|
|
|
2609
2636
|
}
|
|
2610
2637
|
if (lastIndex < text.length) {
|
|
2611
2638
|
const remainingText = text.slice(lastIndex);
|
|
2612
|
-
if (remainingText
|
|
2639
|
+
if (remainingText) {
|
|
2613
2640
|
parts.push({ kind: "text", text: remainingText });
|
|
2614
2641
|
}
|
|
2615
2642
|
}
|
|
@@ -2719,8 +2746,9 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
|
|
|
2719
2746
|
var ArtifactParser = _ArtifactParser;
|
|
2720
2747
|
|
|
2721
2748
|
// src/utils/incremental-stream-parser.ts
|
|
2722
|
-
|
|
2723
|
-
var
|
|
2749
|
+
getLogger("IncrementalStreamParser");
|
|
2750
|
+
var _IncrementalStreamParser = class _IncrementalStreamParser {
|
|
2751
|
+
// Max number of streamed component IDs to track
|
|
2724
2752
|
constructor(streamHelper, tenantId, contextId) {
|
|
2725
2753
|
__publicField(this, "buffer", "");
|
|
2726
2754
|
__publicField(this, "pendingTextBuffer", "");
|
|
@@ -2730,6 +2758,9 @@ var IncrementalStreamParser = class {
|
|
|
2730
2758
|
__publicField(this, "collectedParts", []);
|
|
2731
2759
|
__publicField(this, "contextId");
|
|
2732
2760
|
__publicField(this, "lastChunkWasToolResult", false);
|
|
2761
|
+
__publicField(this, "componentAccumulator", {});
|
|
2762
|
+
__publicField(this, "lastStreamedComponents", /* @__PURE__ */ new Map());
|
|
2763
|
+
__publicField(this, "componentSnapshots", /* @__PURE__ */ new Map());
|
|
2733
2764
|
this.streamHelper = streamHelper;
|
|
2734
2765
|
this.contextId = contextId;
|
|
2735
2766
|
this.artifactParser = new ArtifactParser(tenantId);
|
|
@@ -2744,7 +2775,7 @@ var IncrementalStreamParser = class {
|
|
|
2744
2775
|
* Process a new text chunk for text streaming (handles artifact markers)
|
|
2745
2776
|
*/
|
|
2746
2777
|
async processTextChunk(chunk) {
|
|
2747
|
-
if (this.lastChunkWasToolResult && this.buffer === "" && chunk
|
|
2778
|
+
if (this.lastChunkWasToolResult && this.buffer === "" && chunk) {
|
|
2748
2779
|
chunk = "\n\n" + chunk;
|
|
2749
2780
|
this.lastChunkWasToolResult = false;
|
|
2750
2781
|
}
|
|
@@ -2756,100 +2787,122 @@ var IncrementalStreamParser = class {
|
|
|
2756
2787
|
this.buffer = parseResult.remainingBuffer;
|
|
2757
2788
|
}
|
|
2758
2789
|
/**
|
|
2759
|
-
* Process
|
|
2790
|
+
* Process object deltas directly from Vercel AI SDK's fullStream
|
|
2791
|
+
* Accumulates components and streams them when they're stable (unchanged between deltas)
|
|
2760
2792
|
*/
|
|
2761
|
-
async
|
|
2762
|
-
|
|
2763
|
-
|
|
2764
|
-
for (const part of parseResult.completeParts) {
|
|
2765
|
-
await this.streamPart(part);
|
|
2793
|
+
async processObjectDelta(delta) {
|
|
2794
|
+
if (!delta || typeof delta !== "object") {
|
|
2795
|
+
return;
|
|
2766
2796
|
}
|
|
2767
|
-
this.
|
|
2768
|
-
|
|
2769
|
-
|
|
2770
|
-
|
|
2771
|
-
|
|
2772
|
-
|
|
2773
|
-
|
|
2774
|
-
|
|
2775
|
-
|
|
2776
|
-
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
|
|
2780
|
-
const delta = part.argsTextDelta || "";
|
|
2781
|
-
if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
|
|
2782
|
-
logger8.warn(
|
|
2783
|
-
{ bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
|
|
2784
|
-
"JSON buffer exceeded maximum size, truncating"
|
|
2785
|
-
);
|
|
2786
|
-
jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
|
|
2797
|
+
this.componentAccumulator = this.deepMerge(this.componentAccumulator, delta);
|
|
2798
|
+
if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
|
|
2799
|
+
const components = this.componentAccumulator.dataComponents;
|
|
2800
|
+
const currentComponentIds = new Set(components.filter((c) => c?.id).map((c) => c.id));
|
|
2801
|
+
for (const [componentId, snapshot] of this.componentSnapshots.entries()) {
|
|
2802
|
+
if (!currentComponentIds.has(componentId) && !this.lastStreamedComponents.has(componentId)) {
|
|
2803
|
+
try {
|
|
2804
|
+
const component = JSON.parse(snapshot);
|
|
2805
|
+
if (this.isComponentComplete(component)) {
|
|
2806
|
+
await this.streamComponent(component);
|
|
2807
|
+
}
|
|
2808
|
+
} catch (e) {
|
|
2809
|
+
}
|
|
2787
2810
|
}
|
|
2788
|
-
|
|
2789
|
-
|
|
2790
|
-
|
|
2791
|
-
|
|
2792
|
-
|
|
2793
|
-
|
|
2794
|
-
|
|
2795
|
-
|
|
2796
|
-
|
|
2797
|
-
|
|
2811
|
+
}
|
|
2812
|
+
for (let i = 0; i < components.length; i++) {
|
|
2813
|
+
const component = components[i];
|
|
2814
|
+
if (!component?.id) continue;
|
|
2815
|
+
const componentKey = component.id;
|
|
2816
|
+
const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
|
|
2817
|
+
if (hasBeenStreamed) continue;
|
|
2818
|
+
const currentSnapshot = JSON.stringify(component);
|
|
2819
|
+
const previousSnapshot = this.componentSnapshots.get(componentKey);
|
|
2820
|
+
this.componentSnapshots.set(componentKey, currentSnapshot);
|
|
2821
|
+
if (this.componentSnapshots.size > _IncrementalStreamParser.MAX_SNAPSHOT_SIZE) {
|
|
2822
|
+
const firstKey = this.componentSnapshots.keys().next().value;
|
|
2823
|
+
if (firstKey) {
|
|
2824
|
+
this.componentSnapshots.delete(firstKey);
|
|
2798
2825
|
}
|
|
2799
|
-
|
|
2800
|
-
|
|
2801
|
-
|
|
2802
|
-
|
|
2803
|
-
|
|
2804
|
-
|
|
2805
|
-
|
|
2806
|
-
|
|
2807
|
-
|
|
2808
|
-
if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
|
|
2809
|
-
logger8.warn(
|
|
2810
|
-
{
|
|
2811
|
-
size: componentMatch[0].length,
|
|
2812
|
-
maxSize: MAX_COMPONENT_SIZE
|
|
2813
|
-
},
|
|
2814
|
-
"Component exceeds size limit, skipping"
|
|
2815
|
-
);
|
|
2816
|
-
componentBuffer = "";
|
|
2817
|
-
continue;
|
|
2818
|
-
}
|
|
2819
|
-
try {
|
|
2820
|
-
const component = JSON.parse(componentMatch[0]);
|
|
2821
|
-
if (typeof component !== "object" || !component.id) {
|
|
2822
|
-
logger8.warn({ component }, "Invalid component structure, skipping");
|
|
2823
|
-
componentBuffer = "";
|
|
2824
|
-
continue;
|
|
2825
|
-
}
|
|
2826
|
-
const parts = await this.artifactParser.parseObject({
|
|
2827
|
-
dataComponents: [component]
|
|
2828
|
-
});
|
|
2829
|
-
for (const part2 of parts) {
|
|
2830
|
-
await this.streamPart(part2);
|
|
2831
|
-
}
|
|
2832
|
-
componentsStreamed++;
|
|
2833
|
-
componentBuffer = "";
|
|
2834
|
-
} catch (e) {
|
|
2835
|
-
logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
|
|
2836
|
-
}
|
|
2837
|
-
}
|
|
2826
|
+
}
|
|
2827
|
+
if (component.name === "Text" && component.props?.text) {
|
|
2828
|
+
const previousTextContent = previousSnapshot ? JSON.parse(previousSnapshot).props?.text || "" : "";
|
|
2829
|
+
const currentTextContent = component.props.text || "";
|
|
2830
|
+
if (currentTextContent.length > previousTextContent.length) {
|
|
2831
|
+
const newText = currentTextContent.slice(previousTextContent.length);
|
|
2832
|
+
if (!this.hasStartedRole) {
|
|
2833
|
+
await this.streamHelper.writeRole("assistant");
|
|
2834
|
+
this.hasStartedRole = true;
|
|
2838
2835
|
}
|
|
2836
|
+
await this.streamHelper.streamText(newText, 50);
|
|
2837
|
+
this.collectedParts.push({
|
|
2838
|
+
kind: "text",
|
|
2839
|
+
text: newText
|
|
2840
|
+
});
|
|
2839
2841
|
}
|
|
2840
|
-
|
|
2842
|
+
continue;
|
|
2841
2843
|
}
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
const
|
|
2845
|
-
|
|
2846
|
-
await this.
|
|
2844
|
+
if (this.isComponentComplete(component)) {
|
|
2845
|
+
const currentPropsSnapshot = JSON.stringify(component.props);
|
|
2846
|
+
const previousPropsSnapshot = previousSnapshot ? JSON.stringify(JSON.parse(previousSnapshot).props) : null;
|
|
2847
|
+
if (previousPropsSnapshot === currentPropsSnapshot) {
|
|
2848
|
+
await this.streamComponent(component);
|
|
2847
2849
|
}
|
|
2848
2850
|
}
|
|
2849
|
-
break;
|
|
2850
2851
|
}
|
|
2851
2852
|
}
|
|
2852
|
-
|
|
2853
|
+
}
|
|
2854
|
+
/**
|
|
2855
|
+
* Stream a component and mark it as streamed
|
|
2856
|
+
* Note: Text components are handled separately with incremental streaming
|
|
2857
|
+
*/
|
|
2858
|
+
async streamComponent(component) {
|
|
2859
|
+
const parts = await this.artifactParser.parseObject({
|
|
2860
|
+
dataComponents: [component]
|
|
2861
|
+
});
|
|
2862
|
+
for (const part of parts) {
|
|
2863
|
+
await this.streamPart(part);
|
|
2864
|
+
}
|
|
2865
|
+
this.lastStreamedComponents.set(component.id, true);
|
|
2866
|
+
if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
|
|
2867
|
+
const firstKey = this.lastStreamedComponents.keys().next().value;
|
|
2868
|
+
if (firstKey) {
|
|
2869
|
+
this.lastStreamedComponents.delete(firstKey);
|
|
2870
|
+
}
|
|
2871
|
+
}
|
|
2872
|
+
this.componentSnapshots.delete(component.id);
|
|
2873
|
+
}
|
|
2874
|
+
/**
|
|
2875
|
+
* Check if a component has the basic structure required for streaming
|
|
2876
|
+
* Requires id, name, and props object with content
|
|
2877
|
+
*/
|
|
2878
|
+
isComponentComplete(component) {
|
|
2879
|
+
if (!component || !component.id || !component.name) {
|
|
2880
|
+
return false;
|
|
2881
|
+
}
|
|
2882
|
+
if (!component.props || typeof component.props !== "object") {
|
|
2883
|
+
return false;
|
|
2884
|
+
}
|
|
2885
|
+
const isArtifact = component.name === "Artifact" || component.props.artifact_id && component.props.task_id;
|
|
2886
|
+
if (isArtifact) {
|
|
2887
|
+
return Boolean(component.props.artifact_id && component.props.task_id);
|
|
2888
|
+
}
|
|
2889
|
+
return true;
|
|
2890
|
+
}
|
|
2891
|
+
/**
|
|
2892
|
+
* Deep merge helper for object deltas
|
|
2893
|
+
*/
|
|
2894
|
+
deepMerge(target, source) {
|
|
2895
|
+
if (!source) return target;
|
|
2896
|
+
if (!target) return source;
|
|
2897
|
+
const result = { ...target };
|
|
2898
|
+
for (const key in source) {
|
|
2899
|
+
if (source[key] && typeof source[key] === "object" && !Array.isArray(source[key])) {
|
|
2900
|
+
result[key] = this.deepMerge(target[key], source[key]);
|
|
2901
|
+
} else {
|
|
2902
|
+
result[key] = source[key];
|
|
2903
|
+
}
|
|
2904
|
+
}
|
|
2905
|
+
return result;
|
|
2853
2906
|
}
|
|
2854
2907
|
/**
|
|
2855
2908
|
* Legacy method for backward compatibility - defaults to text processing
|
|
@@ -2861,15 +2914,40 @@ var IncrementalStreamParser = class {
|
|
|
2861
2914
|
* Process any remaining buffer content at the end of stream
|
|
2862
2915
|
*/
|
|
2863
2916
|
async finalize() {
|
|
2864
|
-
if (this.
|
|
2917
|
+
if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
|
|
2918
|
+
const components = this.componentAccumulator.dataComponents;
|
|
2919
|
+
for (let i = 0; i < components.length; i++) {
|
|
2920
|
+
const component = components[i];
|
|
2921
|
+
if (!component?.id) continue;
|
|
2922
|
+
const componentKey = component.id;
|
|
2923
|
+
const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
|
|
2924
|
+
if (!hasBeenStreamed && this.isComponentComplete(component) && component.name !== "Text") {
|
|
2925
|
+
const parts = await this.artifactParser.parseObject({
|
|
2926
|
+
dataComponents: [component]
|
|
2927
|
+
});
|
|
2928
|
+
for (const part of parts) {
|
|
2929
|
+
await this.streamPart(part);
|
|
2930
|
+
}
|
|
2931
|
+
this.lastStreamedComponents.set(componentKey, true);
|
|
2932
|
+
if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
|
|
2933
|
+
const firstKey = this.lastStreamedComponents.keys().next().value;
|
|
2934
|
+
if (firstKey) {
|
|
2935
|
+
this.lastStreamedComponents.delete(firstKey);
|
|
2936
|
+
}
|
|
2937
|
+
}
|
|
2938
|
+
this.componentSnapshots.delete(componentKey);
|
|
2939
|
+
}
|
|
2940
|
+
}
|
|
2941
|
+
}
|
|
2942
|
+
if (this.buffer) {
|
|
2865
2943
|
const part = {
|
|
2866
2944
|
kind: "text",
|
|
2867
|
-
text: this.buffer
|
|
2945
|
+
text: this.buffer
|
|
2868
2946
|
};
|
|
2869
2947
|
await this.streamPart(part);
|
|
2870
2948
|
}
|
|
2871
|
-
if (this.pendingTextBuffer
|
|
2872
|
-
const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "")
|
|
2949
|
+
if (this.pendingTextBuffer) {
|
|
2950
|
+
const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
|
|
2873
2951
|
if (cleanedText) {
|
|
2874
2952
|
this.collectedParts.push({
|
|
2875
2953
|
kind: "text",
|
|
@@ -2879,6 +2957,9 @@ var IncrementalStreamParser = class {
|
|
|
2879
2957
|
}
|
|
2880
2958
|
this.pendingTextBuffer = "";
|
|
2881
2959
|
}
|
|
2960
|
+
this.componentSnapshots.clear();
|
|
2961
|
+
this.lastStreamedComponents.clear();
|
|
2962
|
+
this.componentAccumulator = {};
|
|
2882
2963
|
}
|
|
2883
2964
|
/**
|
|
2884
2965
|
* Get all collected parts for building the final response
|
|
@@ -2925,30 +3006,6 @@ var IncrementalStreamParser = class {
|
|
|
2925
3006
|
remainingBuffer: ""
|
|
2926
3007
|
};
|
|
2927
3008
|
}
|
|
2928
|
-
/**
|
|
2929
|
-
* Parse buffer for complete JSON objects with artifact references (for object streaming)
|
|
2930
|
-
*/
|
|
2931
|
-
async parseObjectBuffer() {
|
|
2932
|
-
const completeParts = [];
|
|
2933
|
-
try {
|
|
2934
|
-
const parsed = JSON.parse(this.buffer);
|
|
2935
|
-
const parts = await this.artifactParser.parseObject(parsed);
|
|
2936
|
-
return {
|
|
2937
|
-
completeParts: parts,
|
|
2938
|
-
remainingBuffer: ""
|
|
2939
|
-
};
|
|
2940
|
-
} catch {
|
|
2941
|
-
const { complete, remaining } = this.artifactParser.parsePartialJSON(this.buffer);
|
|
2942
|
-
for (const obj of complete) {
|
|
2943
|
-
const parts = await this.artifactParser.parseObject(obj);
|
|
2944
|
-
completeParts.push(...parts);
|
|
2945
|
-
}
|
|
2946
|
-
return {
|
|
2947
|
-
completeParts,
|
|
2948
|
-
remainingBuffer: remaining
|
|
2949
|
-
};
|
|
2950
|
-
}
|
|
2951
|
-
}
|
|
2952
3009
|
/**
|
|
2953
3010
|
* Check if text might be the start of an artifact marker
|
|
2954
3011
|
*/
|
|
@@ -2969,7 +3026,7 @@ var IncrementalStreamParser = class {
|
|
|
2969
3026
|
this.pendingTextBuffer += part.text;
|
|
2970
3027
|
if (!this.artifactParser.hasIncompleteArtifact(this.pendingTextBuffer)) {
|
|
2971
3028
|
const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
|
|
2972
|
-
if (cleanedText
|
|
3029
|
+
if (cleanedText) {
|
|
2973
3030
|
await this.streamHelper.streamText(cleanedText, 50);
|
|
2974
3031
|
}
|
|
2975
3032
|
this.pendingTextBuffer = "";
|
|
@@ -2977,7 +3034,7 @@ var IncrementalStreamParser = class {
|
|
|
2977
3034
|
} else if (part.kind === "data" && part.data) {
|
|
2978
3035
|
if (this.pendingTextBuffer) {
|
|
2979
3036
|
const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
|
|
2980
|
-
if (cleanedText
|
|
3037
|
+
if (cleanedText) {
|
|
2981
3038
|
await this.streamHelper.streamText(cleanedText, 50);
|
|
2982
3039
|
}
|
|
2983
3040
|
this.pendingTextBuffer = "";
|
|
@@ -2991,6 +3048,11 @@ var IncrementalStreamParser = class {
|
|
|
2991
3048
|
}
|
|
2992
3049
|
}
|
|
2993
3050
|
};
|
|
3051
|
+
// Memory management constants
|
|
3052
|
+
__publicField(_IncrementalStreamParser, "MAX_SNAPSHOT_SIZE", 100);
|
|
3053
|
+
// Max number of snapshots to keep
|
|
3054
|
+
__publicField(_IncrementalStreamParser, "MAX_STREAMED_SIZE", 1e3);
|
|
3055
|
+
var IncrementalStreamParser = _IncrementalStreamParser;
|
|
2994
3056
|
|
|
2995
3057
|
// src/utils/response-formatter.ts
|
|
2996
3058
|
var logger9 = getLogger("ResponseFormatter");
|
|
@@ -4377,7 +4439,8 @@ function createDelegateToAgentTool({
|
|
|
4377
4439
|
const externalAgent = await getExternalAgent(dbClient_default)({
|
|
4378
4440
|
scopes: {
|
|
4379
4441
|
tenantId,
|
|
4380
|
-
projectId
|
|
4442
|
+
projectId,
|
|
4443
|
+
graphId
|
|
4381
4444
|
},
|
|
4382
4445
|
agentId: delegateConfig.config.id
|
|
4383
4446
|
});
|
|
@@ -4979,6 +5042,23 @@ var Agent = class {
|
|
|
4979
5042
|
__publicField(this, "credentialStoreRegistry");
|
|
4980
5043
|
this.artifactComponents = config.artifactComponents || [];
|
|
4981
5044
|
let processedDataComponents = config.dataComponents || [];
|
|
5045
|
+
if (processedDataComponents.length > 0) {
|
|
5046
|
+
processedDataComponents.push({
|
|
5047
|
+
id: "text-content",
|
|
5048
|
+
name: "Text",
|
|
5049
|
+
description: "Natural conversational text for the user - write naturally without mentioning technical details. Avoid redundancy and repetition with data components.",
|
|
5050
|
+
props: {
|
|
5051
|
+
type: "object",
|
|
5052
|
+
properties: {
|
|
5053
|
+
text: {
|
|
5054
|
+
type: "string",
|
|
5055
|
+
description: "Natural conversational text - respond as if having a normal conversation, never mention JSON, components, schemas, or technical implementation. Avoid redundancy and repetition with data components."
|
|
5056
|
+
}
|
|
5057
|
+
},
|
|
5058
|
+
required: ["text"]
|
|
5059
|
+
}
|
|
5060
|
+
});
|
|
5061
|
+
}
|
|
4982
5062
|
if (this.artifactComponents.length > 0 && config.dataComponents && config.dataComponents.length > 0) {
|
|
4983
5063
|
processedDataComponents = [
|
|
4984
5064
|
ArtifactReferenceSchema.getDataComponent(config.tenantId, config.projectId),
|
|
@@ -5249,8 +5329,12 @@ var Agent = class {
|
|
|
5249
5329
|
async getMcpTool(tool4) {
|
|
5250
5330
|
const credentialReferenceId = tool4.credentialReferenceId;
|
|
5251
5331
|
const toolsForAgent = await getToolsForAgent(dbClient_default)({
|
|
5252
|
-
scopes: {
|
|
5253
|
-
|
|
5332
|
+
scopes: {
|
|
5333
|
+
tenantId: this.config.tenantId,
|
|
5334
|
+
projectId: this.config.projectId,
|
|
5335
|
+
graphId: this.config.graphId,
|
|
5336
|
+
agentId: this.config.id
|
|
5337
|
+
}
|
|
5254
5338
|
});
|
|
5255
5339
|
const selectedTools = toolsForAgent.data.find((t) => t.toolId === tool4.id)?.selectedTools || void 0;
|
|
5256
5340
|
let serverConfig;
|
|
@@ -5397,9 +5481,9 @@ var Agent = class {
|
|
|
5397
5481
|
const graphDefinition = await getFullGraphDefinition(dbClient_default)({
|
|
5398
5482
|
scopes: {
|
|
5399
5483
|
tenantId: this.config.tenantId,
|
|
5400
|
-
projectId: this.config.projectId
|
|
5401
|
-
|
|
5402
|
-
|
|
5484
|
+
projectId: this.config.projectId,
|
|
5485
|
+
graphId: this.config.graphId
|
|
5486
|
+
}
|
|
5403
5487
|
});
|
|
5404
5488
|
return graphDefinition?.graphPrompt || void 0;
|
|
5405
5489
|
} catch (error) {
|
|
@@ -5421,14 +5505,16 @@ var Agent = class {
|
|
|
5421
5505
|
const graphDefinition = await getFullGraphDefinition(dbClient_default)({
|
|
5422
5506
|
scopes: {
|
|
5423
5507
|
tenantId: this.config.tenantId,
|
|
5424
|
-
projectId: this.config.projectId
|
|
5425
|
-
|
|
5426
|
-
|
|
5508
|
+
projectId: this.config.projectId,
|
|
5509
|
+
graphId: this.config.graphId
|
|
5510
|
+
}
|
|
5427
5511
|
});
|
|
5428
5512
|
if (!graphDefinition) {
|
|
5429
5513
|
return false;
|
|
5430
5514
|
}
|
|
5431
|
-
return
|
|
5515
|
+
return Object.values(graphDefinition.agents).some(
|
|
5516
|
+
(agent) => "artifactComponents" in agent && agent.artifactComponents && agent.artifactComponents.length > 0
|
|
5517
|
+
);
|
|
5432
5518
|
} catch (error) {
|
|
5433
5519
|
logger15.warn(
|
|
5434
5520
|
{
|
|
@@ -5456,7 +5542,8 @@ Key requirements:
|
|
|
5456
5542
|
- Mix artifact references throughout your dataComponents array
|
|
5457
5543
|
- Each artifact reference must use EXACT IDs from tool outputs
|
|
5458
5544
|
- Reference artifacts that directly support the adjacent information
|
|
5459
|
-
- Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact
|
|
5545
|
+
- Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact
|
|
5546
|
+
- IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
|
|
5460
5547
|
}
|
|
5461
5548
|
if (hasDataComponents && !hasArtifactComponents) {
|
|
5462
5549
|
return `Generate the final structured JSON response using the configured data components. Organize the information from the research above into the appropriate structured format based on the available component schemas.
|
|
@@ -5464,7 +5551,8 @@ Key requirements:
|
|
|
5464
5551
|
Key requirements:
|
|
5465
5552
|
- Use the exact component structure and property names
|
|
5466
5553
|
- Fill in all relevant data from the research
|
|
5467
|
-
- Ensure data is organized logically and completely
|
|
5554
|
+
- Ensure data is organized logically and completely
|
|
5555
|
+
- IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
|
|
5468
5556
|
}
|
|
5469
5557
|
if (!hasDataComponents && hasArtifactComponents) {
|
|
5470
5558
|
return `Generate the final structured response with artifact references based on the research above. Use the artifact reference component to cite relevant information with exact artifact_id and task_id values from the tool outputs.
|
|
@@ -5474,7 +5562,7 @@ Key requirements:
|
|
|
5474
5562
|
- Reference artifacts that support your response
|
|
5475
5563
|
- Never make up or modify artifact IDs`;
|
|
5476
5564
|
}
|
|
5477
|
-
return `Generate the final response based on the research above.`;
|
|
5565
|
+
return `Generate the final response based on the research above. Write naturally as if having a conversation.`;
|
|
5478
5566
|
}
|
|
5479
5567
|
async buildSystemPrompt(runtimeContext, excludeDataComponents = false) {
|
|
5480
5568
|
const conversationId = runtimeContext?.metadata?.conversationId || runtimeContext?.contextId;
|
|
@@ -5625,9 +5713,9 @@ Key requirements:
|
|
|
5625
5713
|
return await graphHasArtifactComponents(dbClient_default)({
|
|
5626
5714
|
scopes: {
|
|
5627
5715
|
tenantId: this.config.tenantId,
|
|
5628
|
-
projectId: this.config.projectId
|
|
5629
|
-
|
|
5630
|
-
|
|
5716
|
+
projectId: this.config.projectId,
|
|
5717
|
+
graphId: this.config.graphId
|
|
5718
|
+
}
|
|
5631
5719
|
});
|
|
5632
5720
|
} catch (error) {
|
|
5633
5721
|
logger15.error(
|
|
@@ -5973,35 +6061,94 @@ ${output}`;
|
|
|
5973
6061
|
this.getStructuredOutputModel()
|
|
5974
6062
|
);
|
|
5975
6063
|
const phase2TimeoutMs = structuredModelSettings.maxDuration ? structuredModelSettings.maxDuration * 1e3 : CONSTANTS.PHASE_2_TIMEOUT_MS;
|
|
5976
|
-
const
|
|
5977
|
-
|
|
5978
|
-
|
|
5979
|
-
|
|
5980
|
-
|
|
5981
|
-
|
|
5982
|
-
|
|
5983
|
-
|
|
5984
|
-
|
|
5985
|
-
|
|
5986
|
-
|
|
5987
|
-
|
|
5988
|
-
|
|
5989
|
-
|
|
5990
|
-
|
|
5991
|
-
|
|
5992
|
-
|
|
5993
|
-
|
|
5994
|
-
|
|
5995
|
-
|
|
6064
|
+
const shouldStreamPhase2 = this.getStreamingHelper();
|
|
6065
|
+
if (shouldStreamPhase2) {
|
|
6066
|
+
const streamResult = streamObject({
|
|
6067
|
+
...structuredModelSettings,
|
|
6068
|
+
messages: [
|
|
6069
|
+
{ role: "user", content: userMessage },
|
|
6070
|
+
...reasoningFlow,
|
|
6071
|
+
{
|
|
6072
|
+
role: "user",
|
|
6073
|
+
content: await this.buildPhase2SystemPrompt()
|
|
6074
|
+
}
|
|
6075
|
+
],
|
|
6076
|
+
schema: z.object({
|
|
6077
|
+
dataComponents: z.array(dataComponentsSchema)
|
|
6078
|
+
}),
|
|
6079
|
+
experimental_telemetry: {
|
|
6080
|
+
isEnabled: true,
|
|
6081
|
+
functionId: this.config.id,
|
|
6082
|
+
recordInputs: true,
|
|
6083
|
+
recordOutputs: true,
|
|
6084
|
+
metadata: {
|
|
6085
|
+
phase: "structured_generation"
|
|
6086
|
+
}
|
|
6087
|
+
},
|
|
6088
|
+
abortSignal: AbortSignal.timeout(phase2TimeoutMs)
|
|
6089
|
+
});
|
|
6090
|
+
const streamHelper = this.getStreamingHelper();
|
|
6091
|
+
if (!streamHelper) {
|
|
6092
|
+
throw new Error("Stream helper is unexpectedly undefined in streaming context");
|
|
6093
|
+
}
|
|
6094
|
+
const parser = new IncrementalStreamParser(
|
|
6095
|
+
streamHelper,
|
|
6096
|
+
this.config.tenantId,
|
|
6097
|
+
contextId
|
|
6098
|
+
);
|
|
6099
|
+
for await (const delta of streamResult.partialObjectStream) {
|
|
6100
|
+
if (delta) {
|
|
6101
|
+
await parser.processObjectDelta(delta);
|
|
5996
6102
|
}
|
|
5997
|
-
}
|
|
5998
|
-
|
|
5999
|
-
|
|
6000
|
-
|
|
6001
|
-
|
|
6002
|
-
|
|
6003
|
-
|
|
6004
|
-
|
|
6103
|
+
}
|
|
6104
|
+
await parser.finalize();
|
|
6105
|
+
const structuredResponse = await streamResult;
|
|
6106
|
+
const collectedParts = parser.getCollectedParts();
|
|
6107
|
+
if (collectedParts.length > 0) {
|
|
6108
|
+
response.formattedContent = {
|
|
6109
|
+
parts: collectedParts.map((part) => ({
|
|
6110
|
+
kind: part.kind,
|
|
6111
|
+
...part.kind === "text" && { text: part.text },
|
|
6112
|
+
...part.kind === "data" && { data: part.data }
|
|
6113
|
+
}))
|
|
6114
|
+
};
|
|
6115
|
+
}
|
|
6116
|
+
response = {
|
|
6117
|
+
...response,
|
|
6118
|
+
object: structuredResponse.object
|
|
6119
|
+
};
|
|
6120
|
+
textResponse = JSON.stringify(structuredResponse.object, null, 2);
|
|
6121
|
+
} else {
|
|
6122
|
+
const structuredResponse = await generateObject({
|
|
6123
|
+
...structuredModelSettings,
|
|
6124
|
+
messages: [
|
|
6125
|
+
{ role: "user", content: userMessage },
|
|
6126
|
+
...reasoningFlow,
|
|
6127
|
+
{
|
|
6128
|
+
role: "user",
|
|
6129
|
+
content: await this.buildPhase2SystemPrompt()
|
|
6130
|
+
}
|
|
6131
|
+
],
|
|
6132
|
+
schema: z.object({
|
|
6133
|
+
dataComponents: z.array(dataComponentsSchema)
|
|
6134
|
+
}),
|
|
6135
|
+
experimental_telemetry: {
|
|
6136
|
+
isEnabled: true,
|
|
6137
|
+
functionId: this.config.id,
|
|
6138
|
+
recordInputs: true,
|
|
6139
|
+
recordOutputs: true,
|
|
6140
|
+
metadata: {
|
|
6141
|
+
phase: "structured_generation"
|
|
6142
|
+
}
|
|
6143
|
+
},
|
|
6144
|
+
abortSignal: AbortSignal.timeout(phase2TimeoutMs)
|
|
6145
|
+
});
|
|
6146
|
+
response = {
|
|
6147
|
+
...response,
|
|
6148
|
+
object: structuredResponse.object
|
|
6149
|
+
};
|
|
6150
|
+
textResponse = JSON.stringify(structuredResponse.object, null, 2);
|
|
6151
|
+
}
|
|
6005
6152
|
} else {
|
|
6006
6153
|
textResponse = response.text || "";
|
|
6007
6154
|
}
|
|
@@ -6046,39 +6193,6 @@ ${output}`;
|
|
|
6046
6193
|
});
|
|
6047
6194
|
}
|
|
6048
6195
|
};
|
|
6049
|
-
async function resolveModelConfig(graphId, agent) {
|
|
6050
|
-
if (agent.models?.base?.model) {
|
|
6051
|
-
return {
|
|
6052
|
-
base: agent.models.base,
|
|
6053
|
-
structuredOutput: agent.models.structuredOutput || agent.models.base,
|
|
6054
|
-
summarizer: agent.models.summarizer || agent.models.base
|
|
6055
|
-
};
|
|
6056
|
-
}
|
|
6057
|
-
const graph = await getAgentGraph(dbClient_default)({
|
|
6058
|
-
scopes: { tenantId: agent.tenantId, projectId: agent.projectId },
|
|
6059
|
-
graphId
|
|
6060
|
-
});
|
|
6061
|
-
if (graph?.models?.base?.model) {
|
|
6062
|
-
return {
|
|
6063
|
-
base: graph.models.base,
|
|
6064
|
-
structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
|
|
6065
|
-
summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
|
|
6066
|
-
};
|
|
6067
|
-
}
|
|
6068
|
-
const project = await getProject(dbClient_default)({
|
|
6069
|
-
scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
|
|
6070
|
-
});
|
|
6071
|
-
if (project?.models?.base?.model) {
|
|
6072
|
-
return {
|
|
6073
|
-
base: project.models.base,
|
|
6074
|
-
structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
|
|
6075
|
-
summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
|
|
6076
|
-
};
|
|
6077
|
-
}
|
|
6078
|
-
throw new Error(
|
|
6079
|
-
"Base model configuration is required. Please configure models at the project level."
|
|
6080
|
-
);
|
|
6081
|
-
}
|
|
6082
6196
|
|
|
6083
6197
|
// src/agents/generateTaskHandler.ts
|
|
6084
6198
|
function parseEmbeddedJson(data) {
|
|
@@ -6114,31 +6228,34 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
|
|
|
6114
6228
|
getRelatedAgentsForGraph(dbClient_default)({
|
|
6115
6229
|
scopes: {
|
|
6116
6230
|
tenantId: config.tenantId,
|
|
6117
|
-
projectId: config.projectId
|
|
6231
|
+
projectId: config.projectId,
|
|
6232
|
+
graphId: config.graphId
|
|
6118
6233
|
},
|
|
6119
|
-
graphId: config.graphId,
|
|
6120
6234
|
agentId: config.agentId
|
|
6121
6235
|
}),
|
|
6122
6236
|
getToolsForAgent(dbClient_default)({
|
|
6123
6237
|
scopes: {
|
|
6124
6238
|
tenantId: config.tenantId,
|
|
6125
|
-
projectId: config.projectId
|
|
6126
|
-
|
|
6127
|
-
|
|
6239
|
+
projectId: config.projectId,
|
|
6240
|
+
graphId: config.graphId,
|
|
6241
|
+
agentId: config.agentId
|
|
6242
|
+
}
|
|
6128
6243
|
}),
|
|
6129
6244
|
getDataComponentsForAgent(dbClient_default)({
|
|
6130
6245
|
scopes: {
|
|
6131
6246
|
tenantId: config.tenantId,
|
|
6132
|
-
projectId: config.projectId
|
|
6133
|
-
|
|
6134
|
-
|
|
6247
|
+
projectId: config.projectId,
|
|
6248
|
+
graphId: config.graphId,
|
|
6249
|
+
agentId: config.agentId
|
|
6250
|
+
}
|
|
6135
6251
|
}),
|
|
6136
6252
|
getArtifactComponentsForAgent(dbClient_default)({
|
|
6137
6253
|
scopes: {
|
|
6138
6254
|
tenantId: config.tenantId,
|
|
6139
|
-
projectId: config.projectId
|
|
6140
|
-
|
|
6141
|
-
|
|
6255
|
+
projectId: config.projectId,
|
|
6256
|
+
graphId: config.graphId,
|
|
6257
|
+
agentId: config.agentId
|
|
6258
|
+
}
|
|
6142
6259
|
})
|
|
6143
6260
|
]);
|
|
6144
6261
|
logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
|
|
@@ -6146,13 +6263,16 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
|
|
|
6146
6263
|
internalRelations.map(async (relation) => {
|
|
6147
6264
|
try {
|
|
6148
6265
|
const relatedAgent = await getAgentById(dbClient_default)({
|
|
6149
|
-
scopes: {
|
|
6266
|
+
scopes: {
|
|
6267
|
+
tenantId: config.tenantId,
|
|
6268
|
+
projectId: config.projectId,
|
|
6269
|
+
graphId: config.graphId
|
|
6270
|
+
},
|
|
6150
6271
|
agentId: relation.id
|
|
6151
6272
|
});
|
|
6152
6273
|
if (relatedAgent) {
|
|
6153
6274
|
const relatedAgentRelations = await getRelatedAgentsForGraph(dbClient_default)({
|
|
6154
|
-
scopes: { tenantId: config.tenantId, projectId: config.projectId },
|
|
6155
|
-
graphId: config.graphId,
|
|
6275
|
+
scopes: { tenantId: config.tenantId, projectId: config.projectId, graphId: config.graphId },
|
|
6156
6276
|
agentId: relation.id
|
|
6157
6277
|
});
|
|
6158
6278
|
const enhancedDescription = generateDescriptionWithTransfers(
|
|
@@ -6375,16 +6495,17 @@ var createTaskHandlerConfig = async (params) => {
|
|
|
6375
6495
|
const agent = await getAgentById(dbClient_default)({
|
|
6376
6496
|
scopes: {
|
|
6377
6497
|
tenantId: params.tenantId,
|
|
6378
|
-
projectId: params.projectId
|
|
6498
|
+
projectId: params.projectId,
|
|
6499
|
+
graphId: params.graphId
|
|
6379
6500
|
},
|
|
6380
6501
|
agentId: params.agentId
|
|
6381
6502
|
});
|
|
6382
|
-
const agentGraph = await
|
|
6503
|
+
const agentGraph = await getAgentGraphById(dbClient_default)({
|
|
6383
6504
|
scopes: {
|
|
6384
6505
|
tenantId: params.tenantId,
|
|
6385
|
-
projectId: params.projectId
|
|
6386
|
-
|
|
6387
|
-
|
|
6506
|
+
projectId: params.projectId,
|
|
6507
|
+
graphId: params.graphId
|
|
6508
|
+
}
|
|
6388
6509
|
});
|
|
6389
6510
|
if (!agent) {
|
|
6390
6511
|
throw new Error(`Agent not found: ${params.agentId}`);
|
|
@@ -6423,10 +6544,14 @@ async function hydrateGraph({
|
|
|
6423
6544
|
apiKey
|
|
6424
6545
|
}) {
|
|
6425
6546
|
try {
|
|
6547
|
+
if (!dbGraph.defaultAgentId) {
|
|
6548
|
+
throw new Error(`Graph ${dbGraph.id} does not have a default agent configured`);
|
|
6549
|
+
}
|
|
6426
6550
|
const defaultAgent = await getAgentById(dbClient_default)({
|
|
6427
6551
|
scopes: {
|
|
6428
6552
|
tenantId: dbGraph.tenantId,
|
|
6429
|
-
projectId: dbGraph.projectId
|
|
6553
|
+
projectId: dbGraph.projectId,
|
|
6554
|
+
graphId: dbGraph.id
|
|
6430
6555
|
},
|
|
6431
6556
|
agentId: dbGraph.defaultAgentId
|
|
6432
6557
|
});
|
|
@@ -6481,7 +6606,7 @@ async function hydrateGraph({
|
|
|
6481
6606
|
}
|
|
6482
6607
|
async function getRegisteredGraph(executionContext) {
|
|
6483
6608
|
const { tenantId, projectId, graphId, baseUrl, apiKey } = executionContext;
|
|
6484
|
-
const dbGraph = await
|
|
6609
|
+
const dbGraph = await getAgentGraphById(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
|
|
6485
6610
|
if (!dbGraph) {
|
|
6486
6611
|
return null;
|
|
6487
6612
|
}
|
|
@@ -6539,6 +6664,7 @@ app.openapi(
|
|
|
6539
6664
|
);
|
|
6540
6665
|
const executionContext = getRequestExecutionContext(c);
|
|
6541
6666
|
const { tenantId, projectId, graphId, agentId } = executionContext;
|
|
6667
|
+
console.dir("executionContext", executionContext);
|
|
6542
6668
|
if (agentId) {
|
|
6543
6669
|
logger17.info(
|
|
6544
6670
|
{
|
|
@@ -6554,7 +6680,10 @@ app.openapi(
|
|
|
6554
6680
|
const agent = await getRegisteredAgent(executionContext, credentialStores);
|
|
6555
6681
|
logger17.info({ agent }, "agent registered: well-known agent.json");
|
|
6556
6682
|
if (!agent) {
|
|
6557
|
-
|
|
6683
|
+
throw createApiError({
|
|
6684
|
+
code: "not_found",
|
|
6685
|
+
message: "Agent not found"
|
|
6686
|
+
});
|
|
6558
6687
|
}
|
|
6559
6688
|
return c.json(agent.agentCard);
|
|
6560
6689
|
} else {
|
|
@@ -6569,7 +6698,10 @@ app.openapi(
|
|
|
6569
6698
|
);
|
|
6570
6699
|
const graph = await getRegisteredGraph(executionContext);
|
|
6571
6700
|
if (!graph) {
|
|
6572
|
-
|
|
6701
|
+
throw createApiError({
|
|
6702
|
+
code: "not_found",
|
|
6703
|
+
message: "Graph not found"
|
|
6704
|
+
});
|
|
6573
6705
|
}
|
|
6574
6706
|
return c.json(graph.agentCard);
|
|
6575
6707
|
}
|
|
@@ -6626,8 +6758,7 @@ app.post("/a2a", async (c) => {
|
|
|
6626
6758
|
"graph-level a2a endpoint"
|
|
6627
6759
|
);
|
|
6628
6760
|
const graph = await getAgentGraphWithDefaultAgent(dbClient_default)({
|
|
6629
|
-
scopes: { tenantId, projectId }
|
|
6630
|
-
graphId
|
|
6761
|
+
scopes: { tenantId, projectId, graphId }
|
|
6631
6762
|
});
|
|
6632
6763
|
if (!graph) {
|
|
6633
6764
|
return c.json(
|
|
@@ -6639,6 +6770,16 @@ app.post("/a2a", async (c) => {
|
|
|
6639
6770
|
404
|
|
6640
6771
|
);
|
|
6641
6772
|
}
|
|
6773
|
+
if (!graph.defaultAgentId) {
|
|
6774
|
+
return c.json(
|
|
6775
|
+
{
|
|
6776
|
+
jsonrpc: "2.0",
|
|
6777
|
+
error: { code: -32004, message: "Graph does not have a default agent configured" },
|
|
6778
|
+
id: null
|
|
6779
|
+
},
|
|
6780
|
+
400
|
|
6781
|
+
);
|
|
6782
|
+
}
|
|
6642
6783
|
executionContext.agentId = graph.defaultAgentId;
|
|
6643
6784
|
const credentialStores = c.get("credentialStores");
|
|
6644
6785
|
const defaultAgent = await getRegisteredAgent(executionContext, credentialStores);
|
|
@@ -6683,7 +6824,7 @@ var SSEStreamHelper = class {
|
|
|
6683
6824
|
this.timestamp = timestamp;
|
|
6684
6825
|
// Stream queuing for proper event ordering
|
|
6685
6826
|
__publicField(this, "isTextStreaming", false);
|
|
6686
|
-
__publicField(this, "
|
|
6827
|
+
__publicField(this, "queuedEvents", []);
|
|
6687
6828
|
}
|
|
6688
6829
|
/**
|
|
6689
6830
|
* Write the initial role message
|
|
@@ -6748,9 +6889,10 @@ var SSEStreamHelper = class {
|
|
|
6748
6889
|
await this.writeContent(JSON.stringify(data));
|
|
6749
6890
|
}
|
|
6750
6891
|
/**
|
|
6751
|
-
* Write error message
|
|
6892
|
+
* Write error message or error event
|
|
6752
6893
|
*/
|
|
6753
|
-
async writeError(
|
|
6894
|
+
async writeError(error) {
|
|
6895
|
+
const errorMessage = typeof error === "string" ? error : error.message;
|
|
6754
6896
|
await this.writeContent(`
|
|
6755
6897
|
|
|
6756
6898
|
${errorMessage}`);
|
|
@@ -6774,22 +6916,6 @@ ${errorMessage}`);
|
|
|
6774
6916
|
})
|
|
6775
6917
|
});
|
|
6776
6918
|
}
|
|
6777
|
-
/**
|
|
6778
|
-
* Write the final [DONE] message
|
|
6779
|
-
*/
|
|
6780
|
-
async writeDone() {
|
|
6781
|
-
await this.stream.writeSSE({
|
|
6782
|
-
data: "[DONE]"
|
|
6783
|
-
});
|
|
6784
|
-
}
|
|
6785
|
-
/**
|
|
6786
|
-
* Complete the stream with finish reason and done message
|
|
6787
|
-
*/
|
|
6788
|
-
async complete(finishReason = "stop") {
|
|
6789
|
-
await this.flushQueuedOperations();
|
|
6790
|
-
await this.writeCompletion(finishReason);
|
|
6791
|
-
await this.writeDone();
|
|
6792
|
-
}
|
|
6793
6919
|
async writeData(type, data) {
|
|
6794
6920
|
await this.stream.writeSSE({
|
|
6795
6921
|
data: JSON.stringify({
|
|
@@ -6808,16 +6934,23 @@ ${errorMessage}`);
|
|
|
6808
6934
|
})
|
|
6809
6935
|
});
|
|
6810
6936
|
}
|
|
6811
|
-
async
|
|
6812
|
-
if (
|
|
6813
|
-
|
|
6814
|
-
type:
|
|
6815
|
-
|
|
6816
|
-
|
|
6817
|
-
|
|
6937
|
+
async writeSummary(summary) {
|
|
6938
|
+
if (this.isTextStreaming) {
|
|
6939
|
+
this.queuedEvents.push({
|
|
6940
|
+
type: "data-summary",
|
|
6941
|
+
event: summary
|
|
6942
|
+
});
|
|
6943
|
+
return;
|
|
6818
6944
|
}
|
|
6945
|
+
await this.flushQueuedOperations();
|
|
6946
|
+
await this.writeData("data-summary", summary);
|
|
6947
|
+
}
|
|
6948
|
+
async writeOperation(operation) {
|
|
6819
6949
|
if (this.isTextStreaming) {
|
|
6820
|
-
this.
|
|
6950
|
+
this.queuedEvents.push({
|
|
6951
|
+
type: "data-operation",
|
|
6952
|
+
event: operation
|
|
6953
|
+
});
|
|
6821
6954
|
return;
|
|
6822
6955
|
}
|
|
6823
6956
|
await this.flushQueuedOperations();
|
|
@@ -6827,15 +6960,31 @@ ${errorMessage}`);
|
|
|
6827
6960
|
* Flush all queued operations in order after text streaming completes
|
|
6828
6961
|
*/
|
|
6829
6962
|
async flushQueuedOperations() {
|
|
6830
|
-
if (this.
|
|
6963
|
+
if (this.queuedEvents.length === 0) {
|
|
6831
6964
|
return;
|
|
6832
6965
|
}
|
|
6833
|
-
const
|
|
6834
|
-
this.
|
|
6835
|
-
for (const
|
|
6836
|
-
await this.writeData(
|
|
6966
|
+
const eventsToFlush = [...this.queuedEvents];
|
|
6967
|
+
this.queuedEvents = [];
|
|
6968
|
+
for (const event of eventsToFlush) {
|
|
6969
|
+
await this.writeData(event.type, event.event);
|
|
6837
6970
|
}
|
|
6838
6971
|
}
|
|
6972
|
+
/**
|
|
6973
|
+
* Write the final [DONE] message
|
|
6974
|
+
*/
|
|
6975
|
+
async writeDone() {
|
|
6976
|
+
await this.stream.writeSSE({
|
|
6977
|
+
data: "[DONE]"
|
|
6978
|
+
});
|
|
6979
|
+
}
|
|
6980
|
+
/**
|
|
6981
|
+
* Complete the stream with finish reason and done message
|
|
6982
|
+
*/
|
|
6983
|
+
async complete(finishReason = "stop") {
|
|
6984
|
+
await this.flushQueuedOperations();
|
|
6985
|
+
await this.writeCompletion(finishReason);
|
|
6986
|
+
await this.writeDone();
|
|
6987
|
+
}
|
|
6839
6988
|
};
|
|
6840
6989
|
function createSSEStreamHelper(stream2, requestId2, timestamp) {
|
|
6841
6990
|
return new SSEStreamHelper(stream2, requestId2, timestamp);
|
|
@@ -6855,7 +7004,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
6855
7004
|
__publicField(this, "isCompleted", false);
|
|
6856
7005
|
// Stream queuing for proper event ordering
|
|
6857
7006
|
__publicField(this, "isTextStreaming", false);
|
|
6858
|
-
__publicField(this, "
|
|
7007
|
+
__publicField(this, "queuedEvents", []);
|
|
6859
7008
|
// Timing tracking for text sequences (text-end to text-start gap)
|
|
6860
7009
|
__publicField(this, "lastTextEndTimestamp", 0);
|
|
6861
7010
|
__publicField(this, "TEXT_GAP_THRESHOLD", 50);
|
|
@@ -6967,15 +7116,24 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
6967
7116
|
data
|
|
6968
7117
|
});
|
|
6969
7118
|
}
|
|
6970
|
-
async writeError(
|
|
7119
|
+
async writeError(error) {
|
|
6971
7120
|
if (this.isCompleted) {
|
|
6972
7121
|
console.warn("Attempted to write error to completed stream");
|
|
6973
7122
|
return;
|
|
6974
7123
|
}
|
|
6975
|
-
|
|
6976
|
-
|
|
6977
|
-
|
|
6978
|
-
|
|
7124
|
+
if (typeof error === "string") {
|
|
7125
|
+
this.writer.write({
|
|
7126
|
+
type: "error",
|
|
7127
|
+
message: error,
|
|
7128
|
+
severity: "error",
|
|
7129
|
+
timestamp: Date.now()
|
|
7130
|
+
});
|
|
7131
|
+
} else {
|
|
7132
|
+
this.writer.write({
|
|
7133
|
+
...error,
|
|
7134
|
+
type: "error"
|
|
7135
|
+
});
|
|
7136
|
+
}
|
|
6979
7137
|
}
|
|
6980
7138
|
async streamData(data) {
|
|
6981
7139
|
await this.writeContent(JSON.stringify(data));
|
|
@@ -6987,20 +7145,6 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
6987
7145
|
}
|
|
6988
7146
|
this.writer.merge(stream2);
|
|
6989
7147
|
}
|
|
6990
|
-
async writeCompletion(_finishReason = "stop") {
|
|
6991
|
-
}
|
|
6992
|
-
async writeDone() {
|
|
6993
|
-
}
|
|
6994
|
-
/**
|
|
6995
|
-
* Complete the stream and clean up all memory
|
|
6996
|
-
* This is the primary cleanup point to prevent memory leaks between requests
|
|
6997
|
-
*/
|
|
6998
|
-
async complete() {
|
|
6999
|
-
if (this.isCompleted) return;
|
|
7000
|
-
await this.flushQueuedOperations();
|
|
7001
|
-
this.isCompleted = true;
|
|
7002
|
-
this.cleanup();
|
|
7003
|
-
}
|
|
7004
7148
|
/**
|
|
7005
7149
|
* Clean up all memory allocations
|
|
7006
7150
|
* Should be called when the stream helper is no longer needed
|
|
@@ -7014,7 +7158,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
7014
7158
|
this.sentItems.clear();
|
|
7015
7159
|
this.completedItems.clear();
|
|
7016
7160
|
this.textId = null;
|
|
7017
|
-
this.
|
|
7161
|
+
this.queuedEvents = [];
|
|
7018
7162
|
this.isTextStreaming = false;
|
|
7019
7163
|
}
|
|
7020
7164
|
/**
|
|
@@ -7080,7 +7224,9 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
7080
7224
|
if (this.writer && !this.isCompleted) {
|
|
7081
7225
|
this.writer.write({
|
|
7082
7226
|
type: "error",
|
|
7083
|
-
|
|
7227
|
+
message: `Stream terminated: ${reason}`,
|
|
7228
|
+
severity: "error",
|
|
7229
|
+
timestamp: Date.now()
|
|
7084
7230
|
});
|
|
7085
7231
|
}
|
|
7086
7232
|
} catch (e) {
|
|
@@ -7103,23 +7249,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
7103
7249
|
isCompleted: this.isCompleted
|
|
7104
7250
|
};
|
|
7105
7251
|
}
|
|
7252
|
+
async writeSummary(summary) {
|
|
7253
|
+
if (this.isCompleted) {
|
|
7254
|
+
console.warn("Attempted to write summary to completed stream");
|
|
7255
|
+
return;
|
|
7256
|
+
}
|
|
7257
|
+
const now = Date.now();
|
|
7258
|
+
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
7259
|
+
if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
|
|
7260
|
+
this.queuedEvents.push({ type: "data-summary", event: summary });
|
|
7261
|
+
return;
|
|
7262
|
+
}
|
|
7263
|
+
await this.flushQueuedOperations();
|
|
7264
|
+
await this.writer.write({
|
|
7265
|
+
id: "id" in summary ? summary.id : void 0,
|
|
7266
|
+
type: "data-summary",
|
|
7267
|
+
data: summary
|
|
7268
|
+
});
|
|
7269
|
+
}
|
|
7106
7270
|
async writeOperation(operation) {
|
|
7107
7271
|
if (this.isCompleted) {
|
|
7108
7272
|
console.warn("Attempted to write operation to completed stream");
|
|
7109
7273
|
return;
|
|
7110
7274
|
}
|
|
7111
|
-
if (operation.type === "status_update" && operation.ctx.label) {
|
|
7112
|
-
operation = {
|
|
7113
|
-
type: operation.type,
|
|
7114
|
-
label: operation.ctx.label,
|
|
7115
|
-
// Preserve the label for the UI
|
|
7116
|
-
ctx: operation.ctx.data
|
|
7117
|
-
};
|
|
7118
|
-
}
|
|
7119
7275
|
const now = Date.now();
|
|
7120
7276
|
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
7121
7277
|
if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
|
|
7122
|
-
this.
|
|
7278
|
+
this.queuedEvents.push({ type: "data-operation", event: operation });
|
|
7123
7279
|
return;
|
|
7124
7280
|
}
|
|
7125
7281
|
await this.flushQueuedOperations();
|
|
@@ -7133,19 +7289,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
|
|
|
7133
7289
|
* Flush all queued operations in order after text streaming completes
|
|
7134
7290
|
*/
|
|
7135
7291
|
async flushQueuedOperations() {
|
|
7136
|
-
if (this.
|
|
7292
|
+
if (this.queuedEvents.length === 0) {
|
|
7137
7293
|
return;
|
|
7138
7294
|
}
|
|
7139
|
-
const
|
|
7140
|
-
this.
|
|
7141
|
-
for (const
|
|
7295
|
+
const eventsToFlush = [...this.queuedEvents];
|
|
7296
|
+
this.queuedEvents = [];
|
|
7297
|
+
for (const event of eventsToFlush) {
|
|
7142
7298
|
this.writer.write({
|
|
7143
|
-
id: "id" in
|
|
7144
|
-
type:
|
|
7145
|
-
data:
|
|
7299
|
+
id: "id" in event.event ? event.event.id : void 0,
|
|
7300
|
+
type: event.type,
|
|
7301
|
+
data: event.event
|
|
7146
7302
|
});
|
|
7147
7303
|
}
|
|
7148
7304
|
}
|
|
7305
|
+
async writeCompletion(_finishReason = "stop") {
|
|
7306
|
+
}
|
|
7307
|
+
async writeDone() {
|
|
7308
|
+
}
|
|
7309
|
+
/**
|
|
7310
|
+
* Complete the stream and clean up all memory
|
|
7311
|
+
* This is the primary cleanup point to prevent memory leaks between requests
|
|
7312
|
+
*/
|
|
7313
|
+
async complete() {
|
|
7314
|
+
if (this.isCompleted) return;
|
|
7315
|
+
await this.flushQueuedOperations();
|
|
7316
|
+
this.isCompleted = true;
|
|
7317
|
+
this.cleanup();
|
|
7318
|
+
}
|
|
7149
7319
|
};
|
|
7150
7320
|
// Memory management - focused on connection completion cleanup
|
|
7151
7321
|
__publicField(_VercelDataStreamHelper, "MAX_BUFFER_SIZE", 5 * 1024 * 1024);
|
|
@@ -7158,6 +7328,7 @@ var MCPStreamHelper = class {
|
|
|
7158
7328
|
__publicField(this, "capturedText", "");
|
|
7159
7329
|
__publicField(this, "capturedData", []);
|
|
7160
7330
|
__publicField(this, "capturedOperations", []);
|
|
7331
|
+
__publicField(this, "capturedSummaries", []);
|
|
7161
7332
|
__publicField(this, "hasError", false);
|
|
7162
7333
|
__publicField(this, "errorMessage", "");
|
|
7163
7334
|
__publicField(this, "sessionId");
|
|
@@ -7176,18 +7347,27 @@ var MCPStreamHelper = class {
|
|
|
7176
7347
|
async streamData(data) {
|
|
7177
7348
|
this.capturedData.push(data);
|
|
7178
7349
|
}
|
|
7350
|
+
async streamSummary(summary) {
|
|
7351
|
+
this.capturedSummaries.push(summary);
|
|
7352
|
+
}
|
|
7353
|
+
async streamOperation(operation) {
|
|
7354
|
+
this.capturedOperations.push(operation);
|
|
7355
|
+
}
|
|
7179
7356
|
async writeData(_type, data) {
|
|
7180
7357
|
this.capturedData.push(data);
|
|
7181
7358
|
}
|
|
7182
|
-
async
|
|
7183
|
-
this.
|
|
7184
|
-
this.errorMessage = errorMessage;
|
|
7185
|
-
}
|
|
7186
|
-
async complete() {
|
|
7359
|
+
async writeSummary(summary) {
|
|
7360
|
+
this.capturedSummaries.push(summary);
|
|
7187
7361
|
}
|
|
7188
7362
|
async writeOperation(operation) {
|
|
7189
7363
|
this.capturedOperations.push(operation);
|
|
7190
7364
|
}
|
|
7365
|
+
async writeError(error) {
|
|
7366
|
+
this.hasError = true;
|
|
7367
|
+
this.errorMessage = typeof error === "string" ? error : error.message;
|
|
7368
|
+
}
|
|
7369
|
+
async complete() {
|
|
7370
|
+
}
|
|
7191
7371
|
/**
|
|
7192
7372
|
* Get the captured response for MCP tool result
|
|
7193
7373
|
*/
|
|
@@ -7204,6 +7384,8 @@ var MCPStreamHelper = class {
|
|
|
7204
7384
|
function createMCPStreamHelper() {
|
|
7205
7385
|
return new MCPStreamHelper();
|
|
7206
7386
|
}
|
|
7387
|
+
|
|
7388
|
+
// src/handlers/executionHandler.ts
|
|
7207
7389
|
var logger19 = getLogger("ExecutionHandler");
|
|
7208
7390
|
var ExecutionHandler = class {
|
|
7209
7391
|
constructor() {
|
|
@@ -7232,7 +7414,7 @@ var ExecutionHandler = class {
|
|
|
7232
7414
|
logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
|
|
7233
7415
|
let graphConfig = null;
|
|
7234
7416
|
try {
|
|
7235
|
-
graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId
|
|
7417
|
+
graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
|
|
7236
7418
|
if (graphConfig?.statusUpdates && graphConfig.statusUpdates.enabled !== false) {
|
|
7237
7419
|
graphSessionManager.initializeStatusUpdates(
|
|
7238
7420
|
requestId2,
|
|
@@ -7386,7 +7568,6 @@ var ExecutionHandler = class {
|
|
|
7386
7568
|
if (errorCount >= this.MAX_ERRORS) {
|
|
7387
7569
|
const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
|
|
7388
7570
|
logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
|
|
7389
|
-
await sseHelper.writeError(errorMessage2);
|
|
7390
7571
|
await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
|
|
7391
7572
|
if (task) {
|
|
7392
7573
|
await updateTask(dbClient_default)({
|
|
@@ -7527,7 +7708,6 @@ var ExecutionHandler = class {
|
|
|
7527
7708
|
if (errorCount >= this.MAX_ERRORS) {
|
|
7528
7709
|
const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
|
|
7529
7710
|
logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
|
|
7530
|
-
await sseHelper.writeError(errorMessage2);
|
|
7531
7711
|
await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
|
|
7532
7712
|
if (task) {
|
|
7533
7713
|
await updateTask(dbClient_default)({
|
|
@@ -7549,7 +7729,6 @@ var ExecutionHandler = class {
|
|
|
7549
7729
|
}
|
|
7550
7730
|
const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
|
|
7551
7731
|
logger19.error({ maxTransfers, iterations }, errorMessage);
|
|
7552
|
-
await sseHelper.writeError(errorMessage);
|
|
7553
7732
|
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
|
|
7554
7733
|
if (task) {
|
|
7555
7734
|
await updateTask(dbClient_default)({
|
|
@@ -7570,8 +7749,7 @@ var ExecutionHandler = class {
|
|
|
7570
7749
|
} catch (error) {
|
|
7571
7750
|
logger19.error({ error }, "Error in execution handler");
|
|
7572
7751
|
const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
|
|
7573
|
-
await sseHelper.
|
|
7574
|
-
await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
|
|
7752
|
+
await sseHelper.writeOperation(errorOp(`Execution error: ${errorMessage}`, currentAgentId || "system"));
|
|
7575
7753
|
if (task) {
|
|
7576
7754
|
await updateTask(dbClient_default)({
|
|
7577
7755
|
taskId: task.id,
|
|
@@ -7733,8 +7911,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
7733
7911
|
const body = c.get("requestBody") || {};
|
|
7734
7912
|
const conversationId = body.conversationId || nanoid();
|
|
7735
7913
|
const fullGraph = await getFullGraph(dbClient_default)({
|
|
7736
|
-
scopes: { tenantId, projectId }
|
|
7737
|
-
graphId
|
|
7914
|
+
scopes: { tenantId, projectId, graphId }
|
|
7738
7915
|
});
|
|
7739
7916
|
let agentGraph;
|
|
7740
7917
|
let defaultAgentId;
|
|
@@ -7751,16 +7928,21 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
7751
7928
|
defaultAgentId = fullGraph.defaultAgentId || firstAgentId;
|
|
7752
7929
|
} else {
|
|
7753
7930
|
agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
|
|
7754
|
-
scopes: { tenantId, projectId }
|
|
7755
|
-
graphId
|
|
7931
|
+
scopes: { tenantId, projectId, graphId }
|
|
7756
7932
|
});
|
|
7757
7933
|
if (!agentGraph) {
|
|
7758
|
-
|
|
7934
|
+
throw createApiError({
|
|
7935
|
+
code: "not_found",
|
|
7936
|
+
message: "Agent graph not found"
|
|
7937
|
+
});
|
|
7759
7938
|
}
|
|
7760
7939
|
defaultAgentId = agentGraph.defaultAgentId || "";
|
|
7761
7940
|
}
|
|
7762
7941
|
if (!defaultAgentId) {
|
|
7763
|
-
|
|
7942
|
+
throw createApiError({
|
|
7943
|
+
code: "not_found",
|
|
7944
|
+
message: "No default agent found in graph"
|
|
7945
|
+
});
|
|
7764
7946
|
}
|
|
7765
7947
|
await createOrGetConversation(dbClient_default)({
|
|
7766
7948
|
tenantId,
|
|
@@ -7781,26 +7963,30 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
7781
7963
|
}
|
|
7782
7964
|
const agentId = activeAgent?.activeAgentId || defaultAgentId;
|
|
7783
7965
|
const agentInfo = await getAgentById(dbClient_default)({
|
|
7784
|
-
scopes: { tenantId, projectId },
|
|
7966
|
+
scopes: { tenantId, projectId, graphId },
|
|
7785
7967
|
agentId
|
|
7786
7968
|
});
|
|
7787
7969
|
if (!agentInfo) {
|
|
7788
|
-
|
|
7970
|
+
throw createApiError({
|
|
7971
|
+
code: "not_found",
|
|
7972
|
+
message: "Agent not found"
|
|
7973
|
+
});
|
|
7789
7974
|
}
|
|
7790
7975
|
const validatedContext = c.get("validatedContext") || body.requestContext || {};
|
|
7791
7976
|
const credentialStores = c.get("credentialStores");
|
|
7792
|
-
await handleContextResolution(
|
|
7977
|
+
await handleContextResolution({
|
|
7793
7978
|
tenantId,
|
|
7794
7979
|
projectId,
|
|
7795
|
-
conversationId,
|
|
7796
7980
|
graphId,
|
|
7797
|
-
|
|
7798
|
-
|
|
7981
|
+
conversationId,
|
|
7982
|
+
requestContext: validatedContext,
|
|
7983
|
+
dbClient: dbClient_default,
|
|
7799
7984
|
credentialStores
|
|
7800
|
-
);
|
|
7985
|
+
});
|
|
7801
7986
|
logger20.info(
|
|
7802
7987
|
{
|
|
7803
7988
|
tenantId,
|
|
7989
|
+
projectId,
|
|
7804
7990
|
graphId,
|
|
7805
7991
|
conversationId,
|
|
7806
7992
|
defaultAgentId,
|
|
@@ -7842,41 +8028,69 @@ app2.openapi(chatCompletionsRoute, async (c) => {
|
|
|
7842
8028
|
});
|
|
7843
8029
|
}
|
|
7844
8030
|
return streamSSE(c, async (stream2) => {
|
|
7845
|
-
|
|
7846
|
-
|
|
7847
|
-
|
|
7848
|
-
|
|
7849
|
-
|
|
7850
|
-
|
|
7851
|
-
|
|
7852
|
-
|
|
7853
|
-
|
|
7854
|
-
|
|
7855
|
-
|
|
7856
|
-
|
|
7857
|
-
|
|
7858
|
-
|
|
7859
|
-
|
|
7860
|
-
|
|
7861
|
-
|
|
7862
|
-
|
|
7863
|
-
|
|
8031
|
+
try {
|
|
8032
|
+
const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
|
|
8033
|
+
await sseHelper.writeRole();
|
|
8034
|
+
logger20.info({ agentId }, "Starting execution");
|
|
8035
|
+
const executionHandler = new ExecutionHandler();
|
|
8036
|
+
const result = await executionHandler.execute({
|
|
8037
|
+
executionContext,
|
|
8038
|
+
conversationId,
|
|
8039
|
+
userMessage,
|
|
8040
|
+
initialAgentId: agentId,
|
|
8041
|
+
requestId: requestId2,
|
|
8042
|
+
sseHelper
|
|
8043
|
+
});
|
|
8044
|
+
logger20.info(
|
|
8045
|
+
{ result },
|
|
8046
|
+
`Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
|
|
8047
|
+
);
|
|
8048
|
+
if (!result.success) {
|
|
8049
|
+
await sseHelper.writeOperation(
|
|
8050
|
+
errorOp(
|
|
8051
|
+
"Sorry, I was unable to process your request at this time. Please try again.",
|
|
8052
|
+
"system"
|
|
8053
|
+
)
|
|
8054
|
+
);
|
|
8055
|
+
}
|
|
8056
|
+
await sseHelper.complete();
|
|
8057
|
+
} catch (error) {
|
|
8058
|
+
logger20.error(
|
|
8059
|
+
{
|
|
8060
|
+
error: error instanceof Error ? error.message : error,
|
|
8061
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
8062
|
+
},
|
|
8063
|
+
"Error during streaming execution"
|
|
7864
8064
|
);
|
|
8065
|
+
try {
|
|
8066
|
+
const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
|
|
8067
|
+
await sseHelper.writeOperation(
|
|
8068
|
+
errorOp(
|
|
8069
|
+
"Sorry, I was unable to process your request at this time. Please try again.",
|
|
8070
|
+
"system"
|
|
8071
|
+
)
|
|
8072
|
+
);
|
|
8073
|
+
await sseHelper.complete();
|
|
8074
|
+
} catch (streamError) {
|
|
8075
|
+
logger20.error({ streamError }, "Failed to write error to stream");
|
|
8076
|
+
}
|
|
7865
8077
|
}
|
|
7866
|
-
await sseHelper.complete();
|
|
7867
8078
|
});
|
|
7868
8079
|
} catch (error) {
|
|
7869
|
-
|
|
7870
|
-
error: error instanceof Error ? error.message : error,
|
|
7871
|
-
stack: error instanceof Error ? error.stack : void 0
|
|
7872
|
-
});
|
|
7873
|
-
return c.json(
|
|
8080
|
+
logger20.error(
|
|
7874
8081
|
{
|
|
7875
|
-
error:
|
|
7876
|
-
|
|
8082
|
+
error: error instanceof Error ? error.message : error,
|
|
8083
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
7877
8084
|
},
|
|
7878
|
-
|
|
8085
|
+
"Error in chat completions endpoint before streaming"
|
|
7879
8086
|
);
|
|
8087
|
+
if (error && typeof error === "object" && "status" in error) {
|
|
8088
|
+
throw error;
|
|
8089
|
+
}
|
|
8090
|
+
throw createApiError({
|
|
8091
|
+
code: "internal_server_error",
|
|
8092
|
+
message: error instanceof Error ? error.message : "Failed to process chat completion"
|
|
8093
|
+
});
|
|
7880
8094
|
}
|
|
7881
8095
|
});
|
|
7882
8096
|
var getMessageText = (content) => {
|
|
@@ -7940,6 +8154,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
7940
8154
|
try {
|
|
7941
8155
|
const executionContext = getRequestExecutionContext(c);
|
|
7942
8156
|
const { tenantId, projectId, graphId } = executionContext;
|
|
8157
|
+
loggerFactory.getLogger("chatDataStream").debug({ tenantId, projectId, graphId }, "Extracted chatDataStream parameters");
|
|
7943
8158
|
const body = c.get("requestBody") || {};
|
|
7944
8159
|
const conversationId = body.conversationId || nanoid();
|
|
7945
8160
|
const activeSpan = trace.getActiveSpan();
|
|
@@ -7952,14 +8167,22 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
7952
8167
|
});
|
|
7953
8168
|
}
|
|
7954
8169
|
const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
|
|
7955
|
-
scopes: { tenantId, projectId }
|
|
7956
|
-
graphId
|
|
8170
|
+
scopes: { tenantId, projectId, graphId }
|
|
7957
8171
|
});
|
|
7958
8172
|
if (!agentGraph) {
|
|
7959
|
-
|
|
8173
|
+
throw createApiError({
|
|
8174
|
+
code: "not_found",
|
|
8175
|
+
message: "Agent graph not found"
|
|
8176
|
+
});
|
|
7960
8177
|
}
|
|
7961
8178
|
const defaultAgentId = agentGraph.defaultAgentId;
|
|
7962
8179
|
const graphName = agentGraph.name;
|
|
8180
|
+
if (!defaultAgentId) {
|
|
8181
|
+
throw createApiError({
|
|
8182
|
+
code: "bad_request",
|
|
8183
|
+
message: "Graph does not have a default agent configured"
|
|
8184
|
+
});
|
|
8185
|
+
}
|
|
7963
8186
|
const activeAgent = await getActiveAgentForConversation(dbClient_default)({
|
|
7964
8187
|
scopes: { tenantId, projectId },
|
|
7965
8188
|
conversationId
|
|
@@ -7973,23 +8196,26 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
7973
8196
|
}
|
|
7974
8197
|
const agentId = activeAgent?.activeAgentId || defaultAgentId;
|
|
7975
8198
|
const agentInfo = await getAgentById(dbClient_default)({
|
|
7976
|
-
scopes: { tenantId, projectId },
|
|
8199
|
+
scopes: { tenantId, projectId, graphId },
|
|
7977
8200
|
agentId
|
|
7978
8201
|
});
|
|
7979
8202
|
if (!agentInfo) {
|
|
7980
|
-
|
|
8203
|
+
throw createApiError({
|
|
8204
|
+
code: "not_found",
|
|
8205
|
+
message: "Agent not found"
|
|
8206
|
+
});
|
|
7981
8207
|
}
|
|
7982
8208
|
const validatedContext = c.get("validatedContext") || body.requestContext || {};
|
|
7983
8209
|
const credentialStores = c.get("credentialStores");
|
|
7984
|
-
await handleContextResolution(
|
|
8210
|
+
await handleContextResolution({
|
|
7985
8211
|
tenantId,
|
|
7986
8212
|
projectId,
|
|
7987
|
-
conversationId,
|
|
7988
8213
|
graphId,
|
|
7989
|
-
|
|
7990
|
-
|
|
8214
|
+
conversationId,
|
|
8215
|
+
requestContext: validatedContext,
|
|
8216
|
+
dbClient: dbClient_default,
|
|
7991
8217
|
credentialStores
|
|
7992
|
-
);
|
|
8218
|
+
});
|
|
7993
8219
|
const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
|
|
7994
8220
|
const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
|
|
7995
8221
|
logger21.info({ userText, lastUserMessage }, "userText");
|
|
@@ -8031,11 +8257,11 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
8031
8257
|
sseHelper: streamHelper
|
|
8032
8258
|
});
|
|
8033
8259
|
if (!result.success) {
|
|
8034
|
-
await streamHelper.
|
|
8260
|
+
await streamHelper.writeOperation(errorOp("Unable to process request", "system"));
|
|
8035
8261
|
}
|
|
8036
8262
|
} catch (err) {
|
|
8037
8263
|
logger21.error({ err }, "Streaming error");
|
|
8038
|
-
await streamHelper.
|
|
8264
|
+
await streamHelper.writeOperation(errorOp("Internal server error", "system"));
|
|
8039
8265
|
} finally {
|
|
8040
8266
|
if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
|
|
8041
8267
|
streamHelper.cleanup();
|
|
@@ -8056,7 +8282,10 @@ app3.openapi(chatDataStreamRoute, async (c) => {
|
|
|
8056
8282
|
);
|
|
8057
8283
|
} catch (error) {
|
|
8058
8284
|
logger21.error({ error }, "chatDataStream error");
|
|
8059
|
-
|
|
8285
|
+
throw createApiError({
|
|
8286
|
+
code: "internal_server_error",
|
|
8287
|
+
message: "Failed to process chat completion"
|
|
8288
|
+
});
|
|
8060
8289
|
}
|
|
8061
8290
|
});
|
|
8062
8291
|
var chatDataStream_default = app3;
|
|
@@ -8258,8 +8487,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
|
|
|
8258
8487
|
const { tenantId, projectId, graphId } = executionContext;
|
|
8259
8488
|
setupTracing(conversationId, tenantId, graphId);
|
|
8260
8489
|
const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
|
|
8261
|
-
scopes: { tenantId, projectId }
|
|
8262
|
-
graphId
|
|
8490
|
+
scopes: { tenantId, projectId, graphId }
|
|
8263
8491
|
});
|
|
8264
8492
|
if (!agentGraph) {
|
|
8265
8493
|
throw new Error("Agent graph not found");
|
|
@@ -8279,9 +8507,20 @@ var getServer = async (requestContext, executionContext, conversationId, credent
|
|
|
8279
8507
|
},
|
|
8280
8508
|
async ({ query }) => {
|
|
8281
8509
|
try {
|
|
8510
|
+
if (!agentGraph.defaultAgentId) {
|
|
8511
|
+
return {
|
|
8512
|
+
content: [
|
|
8513
|
+
{
|
|
8514
|
+
type: "text",
|
|
8515
|
+
text: `Graph does not have a default agent configured`
|
|
8516
|
+
}
|
|
8517
|
+
],
|
|
8518
|
+
isError: true
|
|
8519
|
+
};
|
|
8520
|
+
}
|
|
8282
8521
|
const defaultAgentId = agentGraph.defaultAgentId;
|
|
8283
8522
|
const agentInfo = await getAgentById(dbClient_default)({
|
|
8284
|
-
scopes: { tenantId, projectId },
|
|
8523
|
+
scopes: { tenantId, projectId, graphId },
|
|
8285
8524
|
agentId: defaultAgentId
|
|
8286
8525
|
});
|
|
8287
8526
|
if (!agentInfo) {
|
|
@@ -8295,18 +8534,19 @@ var getServer = async (requestContext, executionContext, conversationId, credent
|
|
|
8295
8534
|
isError: true
|
|
8296
8535
|
};
|
|
8297
8536
|
}
|
|
8298
|
-
const resolvedContext = await handleContextResolution(
|
|
8537
|
+
const resolvedContext = await handleContextResolution({
|
|
8299
8538
|
tenantId,
|
|
8300
8539
|
projectId,
|
|
8301
|
-
conversationId,
|
|
8302
8540
|
graphId,
|
|
8541
|
+
conversationId,
|
|
8303
8542
|
requestContext,
|
|
8304
|
-
dbClient_default,
|
|
8543
|
+
dbClient: dbClient_default,
|
|
8305
8544
|
credentialStores
|
|
8306
|
-
);
|
|
8545
|
+
});
|
|
8307
8546
|
logger22.info(
|
|
8308
8547
|
{
|
|
8309
8548
|
tenantId,
|
|
8549
|
+
projectId,
|
|
8310
8550
|
graphId,
|
|
8311
8551
|
conversationId,
|
|
8312
8552
|
hasContextConfig: !!agentGraph.contextConfigId,
|
|
@@ -8368,8 +8608,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
|
|
|
8368
8608
|
logger22.info({ body }, "Received initialization request");
|
|
8369
8609
|
const sessionId = nanoid();
|
|
8370
8610
|
const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
|
|
8371
|
-
scopes: { tenantId, projectId }
|
|
8372
|
-
graphId
|
|
8611
|
+
scopes: { tenantId, projectId, graphId }
|
|
8373
8612
|
});
|
|
8374
8613
|
if (!agentGraph) {
|
|
8375
8614
|
return c.json(
|
|
@@ -8381,6 +8620,16 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
|
|
|
8381
8620
|
{ status: 404 }
|
|
8382
8621
|
);
|
|
8383
8622
|
}
|
|
8623
|
+
if (!agentGraph.defaultAgentId) {
|
|
8624
|
+
return c.json(
|
|
8625
|
+
{
|
|
8626
|
+
jsonrpc: "2.0",
|
|
8627
|
+
error: { code: -32001, message: "Graph does not have a default agent configured" },
|
|
8628
|
+
id: body.id || null
|
|
8629
|
+
},
|
|
8630
|
+
{ status: 400 }
|
|
8631
|
+
);
|
|
8632
|
+
}
|
|
8384
8633
|
const conversation = await createOrGetConversation(dbClient_default)({
|
|
8385
8634
|
id: sessionId,
|
|
8386
8635
|
tenantId,
|
|
@@ -8577,6 +8826,8 @@ app4.delete("/", async (c) => {
|
|
|
8577
8826
|
);
|
|
8578
8827
|
});
|
|
8579
8828
|
var mcp_default = app4;
|
|
8829
|
+
|
|
8830
|
+
// src/app.ts
|
|
8580
8831
|
var logger23 = getLogger("agents-run-api");
|
|
8581
8832
|
function createExecutionHono(serverConfig, credentialStores) {
|
|
8582
8833
|
const app6 = new OpenAPIHono();
|