ema-mcp-toolkit 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +338 -0
- package/config.example.yaml +32 -0
- package/dist/cli/index.js +333 -0
- package/dist/config.js +136 -0
- package/dist/emaClient.js +398 -0
- package/dist/index.js +109 -0
- package/dist/mcp/handlers-consolidated.js +851 -0
- package/dist/mcp/index.js +15 -0
- package/dist/mcp/prompts.js +1753 -0
- package/dist/mcp/resources.js +624 -0
- package/dist/mcp/server.js +4585 -0
- package/dist/mcp/tools-consolidated.js +590 -0
- package/dist/mcp/tools-legacy.js +736 -0
- package/dist/models.js +8 -0
- package/dist/scheduler.js +21 -0
- package/dist/sdk/client.js +788 -0
- package/dist/sdk/config.js +136 -0
- package/dist/sdk/contracts.js +429 -0
- package/dist/sdk/generation-schema.js +189 -0
- package/dist/sdk/index.js +39 -0
- package/dist/sdk/knowledge.js +2780 -0
- package/dist/sdk/models.js +8 -0
- package/dist/sdk/state.js +88 -0
- package/dist/sdk/sync-options.js +216 -0
- package/dist/sdk/sync.js +220 -0
- package/dist/sdk/validation-rules.js +355 -0
- package/dist/sdk/workflow-generator.js +291 -0
- package/dist/sdk/workflow-intent.js +1585 -0
- package/dist/state.js +88 -0
- package/dist/sync.js +416 -0
- package/dist/syncOptions.js +216 -0
- package/dist/ui.js +334 -0
- package/docs/advisor-comms-assistant-fixes.md +175 -0
- package/docs/api-contracts.md +216 -0
- package/docs/auto-builder-analysis.md +271 -0
- package/docs/data-architecture.md +166 -0
- package/docs/ema-auto-builder-guide.html +394 -0
- package/docs/ema-user-guide.md +1121 -0
- package/docs/mcp-tools-guide.md +149 -0
- package/docs/naming-conventions.md +218 -0
- package/docs/tool-consolidation-proposal.md +427 -0
- package/package.json +95 -0
- package/resources/templates/chat-ai/README.md +119 -0
- package/resources/templates/chat-ai/persona-config.json +111 -0
- package/resources/templates/dashboard-ai/README.md +156 -0
- package/resources/templates/dashboard-ai/persona-config.json +180 -0
- package/resources/templates/voice-ai/README.md +123 -0
- package/resources/templates/voice-ai/persona-config.json +74 -0
- package/resources/templates/voice-ai/workflow-prompt.md +120 -0
|
@@ -0,0 +1,4585 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* MCP (Model Context Protocol) Server for Ema API
|
|
4
|
+
*
|
|
5
|
+
* Provides tools for interacting with Ema AI Employees (personas), Actions (agents),
|
|
6
|
+
* and Persona Synchronization across multiple environments.
|
|
7
|
+
*
|
|
8
|
+
* Terminology mapping:
|
|
9
|
+
* - API: "persona" <-> UI: "AI Employee"
|
|
10
|
+
* - API: "action" <-> UI: "Agent"
|
|
11
|
+
*
|
|
12
|
+
* Environment Selection:
|
|
13
|
+
* - Tools accept an optional `env` parameter to target specific environments
|
|
14
|
+
* - Default environment is set via EMA_ENV_NAME or first in config
|
|
15
|
+
* - Available environments come from sync config (EMA_AGENT_SYNC_CONFIG)
|
|
16
|
+
*/
|
|
17
|
+
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
18
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
19
|
+
import { CallToolRequestSchema, ListToolsRequestSchema, ListPromptsRequestSchema, GetPromptRequestSchema, ListResourcesRequestSchema, ReadResourceRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
|
|
20
|
+
// Prompts and Resources
|
|
21
|
+
import { PromptRegistry, isPromptError } from "./prompts.js";
|
|
22
|
+
import { ResourceRegistry, isResourceError } from "./resources.js";
|
|
23
|
+
import { EmaClient } from "../sdk/client.js";
|
|
24
|
+
import { loadConfigOptional } from "../sdk/config.js";
|
|
25
|
+
import { resolveSyncBehavior, loadSyncOptions } from "../sdk/sync-options.js";
|
|
26
|
+
import { SyncSDK } from "../sdk/sync.js";
|
|
27
|
+
import { fingerprintPersona, transformWorkflowForTarget, getCleanDescription, buildDescriptionWithSyncTag } from "../sync.js";
|
|
28
|
+
import { SYNC_METADATA_KEY } from "../sdk/models.js";
|
|
29
|
+
// Auto Builder Knowledge
|
|
30
|
+
import { AGENT_CATALOG, WORKFLOW_PATTERNS, QUALIFYING_QUESTIONS, PLATFORM_CONCEPTS, WORKFLOW_EXECUTION_MODEL, COMMON_MISTAKES, DEBUG_CHECKLIST, GUIDANCE_TOPICS, VOICE_PERSONA_TEMPLATE, PROJECT_TYPES, getAgentsByCategory, getAgentByName, getWidgetsForPersonaType, checkTypeCompatibility, getQualifyingQuestionsByCategory, getConceptByTerm, suggestAgentsForUseCase, validateWorkflowPrompt,
|
|
31
|
+
// Workflow Analysis
|
|
32
|
+
analyzeWorkflow, detectWorkflowIssues, validateWorkflowConnections, suggestWorkflowFixes, } from "../sdk/knowledge.js";
|
|
33
|
+
// Workflow Compiler (Template-driven)
|
|
34
|
+
import { compileWorkflow, } from "../sdk/workflow-generator.js";
|
|
35
|
+
// Workflow Intent (Normalization layer)
|
|
36
|
+
import { parseInput, intentToSpec, } from "../sdk/workflow-intent.js";
|
|
37
|
+
// Consolidated Tools (Unix CLI pattern) - NEW UNIFIED INTERFACE
|
|
38
|
+
import { generateConsolidatedTools, } from "./tools-consolidated.js";
|
|
39
|
+
// Legacy Tools (backwards compatibility) - DEPRECATED, OFF BY DEFAULT
|
|
40
|
+
import { generateLegacyTools, } from "./tools-legacy.js";
|
|
41
|
+
// Feature flag: Legacy tools are DISABLED by default to prevent LLM confusion
|
|
42
|
+
// Set EMA_ENABLE_LEGACY_TOOLS=true only if you need backwards compatibility
|
|
43
|
+
const ENABLE_LEGACY_TOOLS = process.env.EMA_ENABLE_LEGACY_TOOLS === "true";
|
|
44
|
+
import { handleEnv, handlePersona, handleAction, handleTemplate, handleKnowledge, handleReference, } from "./handlers-consolidated.js";
|
|
45
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
46
|
+
// Autobuilder Prompt Generation
|
|
47
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
48
|
+
/**
|
|
49
|
+
* Generate an optimized Autobuilder prompt from a natural language description.
|
|
50
|
+
* Uses knowledge of best practices, agent catalog, and workflow patterns.
|
|
51
|
+
*/
|
|
52
|
+
function generateAutobuilderPrompt(description, personaType) {
|
|
53
|
+
const typeConfig = {
|
|
54
|
+
voice: {
|
|
55
|
+
trigger: "voice_trigger",
|
|
56
|
+
outputNote: "Use results mapping for Voice AI (not WORKFLOW_OUTPUT node)",
|
|
57
|
+
},
|
|
58
|
+
chat: {
|
|
59
|
+
trigger: "chat_trigger",
|
|
60
|
+
outputNote: "Connect final response to WORKFLOW_OUTPUT node",
|
|
61
|
+
},
|
|
62
|
+
dashboard: {
|
|
63
|
+
trigger: "document_trigger",
|
|
64
|
+
outputNote: "Process documents and connect to WORKFLOW_OUTPUT",
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
const config = typeConfig[personaType] ?? typeConfig.chat;
|
|
68
|
+
// Detect intents from description
|
|
69
|
+
const hasIntentRouting = /intent|route|categor|branch|if.*then/i.test(description);
|
|
70
|
+
const hasKBSearch = /search|knowledge|faq|document|lookup/i.test(description);
|
|
71
|
+
const hasExternalAction = /servicenow|salesforce|jira|ticket|create|update|api|external/i.test(description);
|
|
72
|
+
const hasHITL = /approv|human|review|escalat/i.test(description);
|
|
73
|
+
// Build structured prompt
|
|
74
|
+
let prompt = `Create a ${personaType} AI Employee workflow for:\n${description}\n\n`;
|
|
75
|
+
prompt += "Requirements:\n";
|
|
76
|
+
prompt += `- Trigger: ${config.trigger}\n`;
|
|
77
|
+
if (hasIntentRouting) {
|
|
78
|
+
prompt += "- Use chat_categorizer for intent routing with clear category definitions\n";
|
|
79
|
+
prompt += "- Include a Fallback category for unmatched queries\n";
|
|
80
|
+
prompt += "- Use runIf conditions to route based on categorizer output\n";
|
|
81
|
+
}
|
|
82
|
+
if (hasKBSearch) {
|
|
83
|
+
prompt += "- Use search_datastore for knowledge base queries\n";
|
|
84
|
+
prompt += "- Use conversation_to_search_query if multi-turn context needed\n";
|
|
85
|
+
prompt += "- Use respond_with_sources for answers with citations\n";
|
|
86
|
+
}
|
|
87
|
+
if (hasExternalAction) {
|
|
88
|
+
prompt += "- Use external_action_caller for external system integration\n";
|
|
89
|
+
if (!hasHITL) {
|
|
90
|
+
prompt += "- Add HITL approval before executing external actions (safety)\n";
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
if (hasHITL) {
|
|
94
|
+
prompt += "- Include general_hitl for human approval flows\n";
|
|
95
|
+
prompt += "- Handle both approval and rejection paths\n";
|
|
96
|
+
}
|
|
97
|
+
prompt += `- ${config.outputNote}\n`;
|
|
98
|
+
prompt += "- Ensure all paths lead to a response (no dead ends)\n";
|
|
99
|
+
prompt += "- Use correct input types: chat_conversation for categorizers, user_query for search\n";
|
|
100
|
+
return prompt;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Generate a human-readable summary of a workflow for Auto Builder prompts.
|
|
104
|
+
* This creates a description the Auto Builder can understand to recreate the workflow.
|
|
105
|
+
*/
|
|
106
|
+
function summarizeWorkflow(workflowDef) {
|
|
107
|
+
const actions = workflowDef.actions;
|
|
108
|
+
if (!actions || actions.length === 0) {
|
|
109
|
+
return "Empty workflow with no actions";
|
|
110
|
+
}
|
|
111
|
+
const lines = [];
|
|
112
|
+
// Count action types
|
|
113
|
+
const actionTypes = new Map();
|
|
114
|
+
const nodeNames = [];
|
|
115
|
+
for (const action of actions) {
|
|
116
|
+
const actionType = action.actionType;
|
|
117
|
+
const typeName = actionType?.name?.name ?? "unknown";
|
|
118
|
+
actionTypes.set(typeName, (actionTypes.get(typeName) ?? 0) + 1);
|
|
119
|
+
const nodeName = String(action.name ?? "");
|
|
120
|
+
if (nodeName)
|
|
121
|
+
nodeNames.push(nodeName);
|
|
122
|
+
}
|
|
123
|
+
// Summary header
|
|
124
|
+
lines.push(`Workflow with ${actions.length} nodes:`);
|
|
125
|
+
// List action types and counts
|
|
126
|
+
for (const [type, count] of actionTypes.entries()) {
|
|
127
|
+
lines.push(`- ${type}: ${count}`);
|
|
128
|
+
}
|
|
129
|
+
// List node names
|
|
130
|
+
if (nodeNames.length > 0) {
|
|
131
|
+
lines.push(`\nNodes: ${nodeNames.slice(0, 20).join(", ")}${nodeNames.length > 20 ? "..." : ""}`);
|
|
132
|
+
}
|
|
133
|
+
// Check for categorizer (indicates intent routing)
|
|
134
|
+
const hasCategorizer = Array.from(actionTypes.keys()).some(t => t.includes("categorizer"));
|
|
135
|
+
if (hasCategorizer) {
|
|
136
|
+
lines.push("\nThis workflow uses intent-based routing with a categorizer.");
|
|
137
|
+
}
|
|
138
|
+
// Check for HITL
|
|
139
|
+
const hasHitl = Array.from(actionTypes.keys()).some(t => t.includes("hitl") || t.includes("general_hitl"));
|
|
140
|
+
if (hasHitl) {
|
|
141
|
+
lines.push("This workflow includes human-in-the-loop approval steps.");
|
|
142
|
+
}
|
|
143
|
+
// Include raw workflow JSON for Auto Builder to parse
|
|
144
|
+
lines.push("\n--- Full workflow_def JSON ---");
|
|
145
|
+
lines.push(JSON.stringify(workflowDef, null, 2));
|
|
146
|
+
return lines.join("\n");
|
|
147
|
+
}
|
|
148
|
+
/**
|
|
149
|
+
* Apply automatic fixes to a workflow based on detected issues.
|
|
150
|
+
* This function attempts to fix common issues like missing WORKFLOW_OUTPUT,
|
|
151
|
+
* wrong input sources, and other structural problems.
|
|
152
|
+
*/
|
|
153
|
+
function applyWorkflowFixes(workflowDef, issues, persona) {
|
|
154
|
+
// Deep clone the workflow to avoid mutating the original
|
|
155
|
+
const fixedWorkflow = JSON.parse(JSON.stringify(workflowDef));
|
|
156
|
+
const appliedFixes = [];
|
|
157
|
+
const actions = fixedWorkflow.actions;
|
|
158
|
+
if (!actions) {
|
|
159
|
+
return { fixedWorkflow, appliedFixes: [{ issue_type: "unknown", description: "No actions array found in workflow", applied: false }] };
|
|
160
|
+
}
|
|
161
|
+
for (const issue of issues) {
|
|
162
|
+
let fixed = false;
|
|
163
|
+
let fixDescription = "";
|
|
164
|
+
let failReason = "";
|
|
165
|
+
switch (issue.type) {
|
|
166
|
+
case "missing_workflow_output": {
|
|
167
|
+
// For Voice AI, WORKFLOW_OUTPUT is handled via results mapping, not a node
|
|
168
|
+
// Check if results mapping exists
|
|
169
|
+
const results = fixedWorkflow.results;
|
|
170
|
+
if (results && Object.keys(results).length > 0) {
|
|
171
|
+
// Results mapping exists, this is valid for Voice AI
|
|
172
|
+
fixed = true;
|
|
173
|
+
fixDescription = "Workflow uses results mapping (valid for Voice AI) - no WORKFLOW_OUTPUT node needed";
|
|
174
|
+
}
|
|
175
|
+
else {
|
|
176
|
+
// Try to add results mapping for all response nodes
|
|
177
|
+
const responseNodes = actions.filter(a => {
|
|
178
|
+
const name = String(a.name ?? "");
|
|
179
|
+
return name.includes("respond") || name.includes("response");
|
|
180
|
+
});
|
|
181
|
+
if (responseNodes.length > 0) {
|
|
182
|
+
const newResults = {};
|
|
183
|
+
for (const respNode of responseNodes) {
|
|
184
|
+
const nodeName = String(respNode.name);
|
|
185
|
+
newResults[`${nodeName}.response_with_sources`] = {
|
|
186
|
+
actionName: nodeName,
|
|
187
|
+
outputName: "response_with_sources",
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
fixedWorkflow.results = newResults;
|
|
191
|
+
fixed = true;
|
|
192
|
+
fixDescription = `Added results mapping for ${responseNodes.length} response node(s)`;
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
failReason = "No response nodes found to map to results";
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
break;
|
|
199
|
+
}
|
|
200
|
+
case "wrong_input_source": {
|
|
201
|
+
const targetNode = actions.find(a => String(a.name) === issue.node);
|
|
202
|
+
if (!targetNode || !targetNode.inputs) {
|
|
203
|
+
failReason = `Could not find node "${issue.node}" or its inputs`;
|
|
204
|
+
break;
|
|
205
|
+
}
|
|
206
|
+
const inputs = targetNode.inputs;
|
|
207
|
+
const nodeType = String(targetNode.actionType || targetNode.type || "");
|
|
208
|
+
// CASE 1: Email field needs entity extraction
|
|
209
|
+
// This requires adding a new node, not just rebinding
|
|
210
|
+
const isEmailField = issue.reason?.toLowerCase().includes("email") ||
|
|
211
|
+
issue.current?.toLowerCase().includes("email_to") ||
|
|
212
|
+
(nodeType.includes("send_email") && inputs.email_to);
|
|
213
|
+
if (isEmailField && inputs.email_to) {
|
|
214
|
+
// Check if entity_extraction already exists
|
|
215
|
+
let extractorNode = actions.find(a => String(a.actionType || a.type || "").includes("entity_extraction") ||
|
|
216
|
+
String(a.name).includes("entity_extract"));
|
|
217
|
+
if (!extractorNode) {
|
|
218
|
+
// Create entity_extraction node
|
|
219
|
+
const extractorName = "entity_extractor_email";
|
|
220
|
+
extractorNode = {
|
|
221
|
+
name: extractorName,
|
|
222
|
+
actionType: "entity_extraction",
|
|
223
|
+
inputs: {
|
|
224
|
+
conversation: {
|
|
225
|
+
actionOutput: {
|
|
226
|
+
actionName: "trigger",
|
|
227
|
+
output: "chat_conversation",
|
|
228
|
+
},
|
|
229
|
+
},
|
|
230
|
+
entities_to_extract: {
|
|
231
|
+
literalValue: {
|
|
232
|
+
value: "email_address",
|
|
233
|
+
},
|
|
234
|
+
},
|
|
235
|
+
},
|
|
236
|
+
outputs: ["email_address", "extracted_entities"],
|
|
237
|
+
};
|
|
238
|
+
actions.push(extractorNode);
|
|
239
|
+
// Wire email_to to use extracted email
|
|
240
|
+
inputs.email_to = {
|
|
241
|
+
actionOutput: {
|
|
242
|
+
actionName: extractorName,
|
|
243
|
+
output: "email_address",
|
|
244
|
+
},
|
|
245
|
+
};
|
|
246
|
+
fixed = true;
|
|
247
|
+
fixDescription = `Added entity_extraction node "${extractorName}" and wired ${issue.node}.email_to to use extracted email_address`;
|
|
248
|
+
}
|
|
249
|
+
else {
|
|
250
|
+
// Extractor exists, just rewire
|
|
251
|
+
inputs.email_to = {
|
|
252
|
+
actionOutput: {
|
|
253
|
+
actionName: String(extractorNode.name),
|
|
254
|
+
output: "email_address",
|
|
255
|
+
},
|
|
256
|
+
};
|
|
257
|
+
fixed = true;
|
|
258
|
+
fixDescription = `Wired ${issue.node}.email_to to use ${extractorNode.name}.email_address`;
|
|
259
|
+
}
|
|
260
|
+
break;
|
|
261
|
+
}
|
|
262
|
+
// CASE 2: Simple rebinding (conversation/query sources)
|
|
263
|
+
let inputField;
|
|
264
|
+
const possibleFields = ["conversation", "query", "input", "text"];
|
|
265
|
+
for (const field of possibleFields) {
|
|
266
|
+
if (inputs[field]) {
|
|
267
|
+
inputField = field;
|
|
268
|
+
break;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
if (inputField && inputs[inputField]) {
|
|
272
|
+
const inputBinding = inputs[inputField];
|
|
273
|
+
if (inputBinding.actionOutput) {
|
|
274
|
+
const actionOutput = inputBinding.actionOutput;
|
|
275
|
+
// Update to use recommended source
|
|
276
|
+
if (issue.recommended === "chat_conversation") {
|
|
277
|
+
actionOutput.actionName = "trigger";
|
|
278
|
+
actionOutput.output = "chat_conversation";
|
|
279
|
+
fixed = true;
|
|
280
|
+
}
|
|
281
|
+
else if (issue.recommended === "user_query") {
|
|
282
|
+
actionOutput.actionName = "trigger";
|
|
283
|
+
actionOutput.output = "user_query";
|
|
284
|
+
fixed = true;
|
|
285
|
+
}
|
|
286
|
+
else if (issue.recommended === "summarized_conversation") {
|
|
287
|
+
// Check if summarizer exists
|
|
288
|
+
const summarizer = actions.find(a => String(a.name).includes("summarizer") ||
|
|
289
|
+
String(a.name).includes("conversation_to_search"));
|
|
290
|
+
if (summarizer) {
|
|
291
|
+
actionOutput.actionName = String(summarizer.name);
|
|
292
|
+
actionOutput.output = "summarized_conversation";
|
|
293
|
+
fixed = true;
|
|
294
|
+
}
|
|
295
|
+
else {
|
|
296
|
+
failReason = "Recommended summarized_conversation but no summarizer node exists";
|
|
297
|
+
break;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
if (fixed) {
|
|
301
|
+
fixDescription = `Changed ${issue.node}.${inputField} from ${issue.current} to ${issue.recommended}`;
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
else if (!inputField) {
|
|
306
|
+
failReason = `Could not find input field (tried: conversation, query, input, text) in node "${issue.node}"`;
|
|
307
|
+
}
|
|
308
|
+
if (!fixed && !failReason) {
|
|
309
|
+
failReason = `Could not determine how to fix input source for "${issue.node}"`;
|
|
310
|
+
}
|
|
311
|
+
break;
|
|
312
|
+
}
|
|
313
|
+
case "incomplete_hitl": {
|
|
314
|
+
// This requires adding new nodes which is complex
|
|
315
|
+
// Mark as not auto-fixable
|
|
316
|
+
failReason = "HITL paths require manual configuration of success/failure response nodes";
|
|
317
|
+
break;
|
|
318
|
+
}
|
|
319
|
+
case "missing_category_edge": {
|
|
320
|
+
// Check if using runIf pattern (valid alternative to edges)
|
|
321
|
+
const nodesWithRunIf = actions.filter(a => a.runIf !== undefined);
|
|
322
|
+
const categorizerName = issue.node;
|
|
323
|
+
const nodesUsingCategorizer = nodesWithRunIf.filter(a => {
|
|
324
|
+
const runIf = a.runIf;
|
|
325
|
+
if (!runIf?.lhs)
|
|
326
|
+
return false;
|
|
327
|
+
const lhs = runIf.lhs;
|
|
328
|
+
const actionOutput = lhs.actionOutput;
|
|
329
|
+
return actionOutput?.actionName === categorizerName;
|
|
330
|
+
});
|
|
331
|
+
if (nodesUsingCategorizer.length > 0) {
|
|
332
|
+
fixed = true;
|
|
333
|
+
fixDescription = `Categorizer "${categorizerName}" uses runIf pattern (${nodesUsingCategorizer.length} conditional nodes) - valid alternative to explicit edges`;
|
|
334
|
+
}
|
|
335
|
+
else {
|
|
336
|
+
failReason = "Categorizer needs manual configuration of category routing edges";
|
|
337
|
+
}
|
|
338
|
+
break;
|
|
339
|
+
}
|
|
340
|
+
case "malformed_runif": {
|
|
341
|
+
// Fix malformed runIf: "category_<Name>" vs "true" -> "category" vs "<Name>"
|
|
342
|
+
const targetNode = actions.find(a => String(a.name) === issue.node);
|
|
343
|
+
if (targetNode?.runIf) {
|
|
344
|
+
const runIf = targetNode.runIf;
|
|
345
|
+
const lhs = runIf.lhs;
|
|
346
|
+
const rhs = runIf.rhs;
|
|
347
|
+
if (lhs?.actionOutput && rhs?.inline) {
|
|
348
|
+
const actionOutput = lhs.actionOutput;
|
|
349
|
+
const inlineRhs = rhs.inline;
|
|
350
|
+
const output = String(actionOutput.output ?? "");
|
|
351
|
+
if (output.startsWith("category_")) {
|
|
352
|
+
// Extract category name and fix the format
|
|
353
|
+
const categoryName = output.replace(/^category_/, "");
|
|
354
|
+
actionOutput.output = "category";
|
|
355
|
+
inlineRhs.enumValue = categoryName;
|
|
356
|
+
fixed = true;
|
|
357
|
+
fixDescription = `Fixed runIf for "${issue.node}": output="category", enumValue="${categoryName}"`;
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
if (!fixed) {
|
|
362
|
+
failReason = `Could not find node "${issue.node}" or its runIf condition`;
|
|
363
|
+
}
|
|
364
|
+
break;
|
|
365
|
+
}
|
|
366
|
+
case "orphan": {
|
|
367
|
+
// Remove orphan nodes
|
|
368
|
+
const orphanIndex = actions.findIndex(a => String(a.name) === issue.node);
|
|
369
|
+
if (orphanIndex >= 0) {
|
|
370
|
+
actions.splice(orphanIndex, 1);
|
|
371
|
+
fixed = true;
|
|
372
|
+
fixDescription = `Removed orphan node "${issue.node}" (not connected to workflow)`;
|
|
373
|
+
}
|
|
374
|
+
else {
|
|
375
|
+
failReason = `Could not find orphan node "${issue.node}" to remove`;
|
|
376
|
+
}
|
|
377
|
+
break;
|
|
378
|
+
}
|
|
379
|
+
case "redundant_search": {
|
|
380
|
+
// This is an optimization, not a critical fix - skip auto-fix
|
|
381
|
+
failReason = "Redundant search is an optimization suggestion - consider consolidating manually";
|
|
382
|
+
break;
|
|
383
|
+
}
|
|
384
|
+
case "type_mismatch": {
|
|
385
|
+
// Type mismatch fix strategy:
|
|
386
|
+
// 1. Handle multiBinding inputs - find and fix bindings inside multiBinding
|
|
387
|
+
// 2. Handle nested named_inputs targets - find binding inside named_inputs.multiBinding
|
|
388
|
+
// 3. For CHAT_CONVERSATION expected: use trigger.chat_conversation, move old to named_inputs
|
|
389
|
+
// 4. For SEARCH_RESULT expected: find actual search node output
|
|
390
|
+
// 5. For DOCUMENT/TEXT_WITH_SOURCES mismatches: move to named_inputs (accepts ANY type)
|
|
391
|
+
const sourceNode = issue.source?.split(".")[0];
|
|
392
|
+
const sourceOutput = issue.source?.split(".")[1];
|
|
393
|
+
const targetNode = issue.target?.split(".")[0];
|
|
394
|
+
const targetInput = issue.target?.split(".")[1];
|
|
395
|
+
const expectedType = issue.expected;
|
|
396
|
+
const gotType = issue.got;
|
|
397
|
+
if (!sourceNode || !targetNode || !targetInput) {
|
|
398
|
+
failReason = "Missing source/target node information";
|
|
399
|
+
break;
|
|
400
|
+
}
|
|
401
|
+
const targetAction = actions.find(a => String(a.name) === targetNode);
|
|
402
|
+
if (!targetAction || !targetAction.inputs) {
|
|
403
|
+
failReason = `Could not find target node "${targetNode}"`;
|
|
404
|
+
break;
|
|
405
|
+
}
|
|
406
|
+
const inputs = targetAction.inputs;
|
|
407
|
+
let inputBinding = inputs[targetInput];
|
|
408
|
+
let actionOutput;
|
|
409
|
+
let isNestedInNamedInputs = false;
|
|
410
|
+
let nestedBindingIndex = -1;
|
|
411
|
+
// Check for autoDetectedBinding - some bindings use this structure alongside or instead of others
|
|
412
|
+
if (inputBinding && !inputBinding.actionOutput && inputBinding.autoDetectedBinding) {
|
|
413
|
+
const autoBinding = inputBinding.autoDetectedBinding;
|
|
414
|
+
if (autoBinding.actionOutput) {
|
|
415
|
+
actionOutput = autoBinding.actionOutput;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
// Check if targetInput is a named_inputs reference (e.g., "named_inputs_conversation")
|
|
419
|
+
if (!inputBinding && targetInput.startsWith("named_inputs_")) {
|
|
420
|
+
const namedInputKey = targetInput.replace("named_inputs_", "");
|
|
421
|
+
const namedInputs = inputs.named_inputs;
|
|
422
|
+
if (namedInputs?.multiBinding) {
|
|
423
|
+
const mb = namedInputs.multiBinding;
|
|
424
|
+
if (mb.elements) {
|
|
425
|
+
nestedBindingIndex = mb.elements.findIndex(el => el.namedBinding?.name?.toLowerCase() === namedInputKey.toLowerCase());
|
|
426
|
+
if (nestedBindingIndex >= 0) {
|
|
427
|
+
const namedBinding = mb.elements[nestedBindingIndex].namedBinding;
|
|
428
|
+
if (namedBinding?.value?.actionOutput) {
|
|
429
|
+
actionOutput = namedBinding.value.actionOutput;
|
|
430
|
+
isNestedInNamedInputs = true;
|
|
431
|
+
inputBinding = namedBinding.value;
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
// If we couldn't find the binding in multiBinding, the type mismatch for named_inputs
|
|
437
|
+
// is actually acceptable since named_inputs accepts ANY type
|
|
438
|
+
if (!actionOutput && namedInputs) {
|
|
439
|
+
fixed = true;
|
|
440
|
+
fixDescription = `${targetNode}.${targetInput} is in named_inputs (accepts any type) - type mismatch is acceptable`;
|
|
441
|
+
break;
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
// If direct binding exists, check for actionOutput or multiBinding
|
|
445
|
+
if (!actionOutput && inputBinding) {
|
|
446
|
+
if (inputBinding.actionOutput) {
|
|
447
|
+
actionOutput = inputBinding.actionOutput;
|
|
448
|
+
}
|
|
449
|
+
else if (inputBinding.multiBinding) {
|
|
450
|
+
// Handle multiBinding - find the element that references the source node
|
|
451
|
+
const mb = inputBinding.multiBinding;
|
|
452
|
+
if (mb.elements) {
|
|
453
|
+
for (let i = 0; i < mb.elements.length; i++) {
|
|
454
|
+
const el = mb.elements[i];
|
|
455
|
+
const elActionOutput = el.namedBinding?.value?.actionOutput;
|
|
456
|
+
if (elActionOutput && String(elActionOutput.actionName) === sourceNode) {
|
|
457
|
+
actionOutput = elActionOutput;
|
|
458
|
+
nestedBindingIndex = i;
|
|
459
|
+
break;
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
// Also check if elements have direct actionOutput (not inside namedBinding)
|
|
464
|
+
if (!actionOutput && mb.elements) {
|
|
465
|
+
for (let i = 0; i < mb.elements.length; i++) {
|
|
466
|
+
const el = mb.elements[i];
|
|
467
|
+
// Check for actionOutput directly on element
|
|
468
|
+
if (el.actionOutput) {
|
|
469
|
+
actionOutput = el.actionOutput;
|
|
470
|
+
nestedBindingIndex = i;
|
|
471
|
+
break;
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
// If source not found in multiBinding, DON'T mark as fixed - the type mismatch is real
|
|
476
|
+
// We need to find and remove/fix the mismatched binding
|
|
477
|
+
if (!actionOutput && mb.elements) {
|
|
478
|
+
// Find ANY binding that has type mismatch and try to fix it
|
|
479
|
+
for (let i = 0; i < mb.elements.length; i++) {
|
|
480
|
+
const el = mb.elements[i];
|
|
481
|
+
const elActionOutput = el.namedBinding?.value?.actionOutput;
|
|
482
|
+
if (elActionOutput) {
|
|
483
|
+
// Use the first actionOutput we find - it's likely the mismatched one
|
|
484
|
+
actionOutput = elActionOutput;
|
|
485
|
+
nestedBindingIndex = i;
|
|
486
|
+
break;
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
if (!actionOutput) {
|
|
493
|
+
failReason = `No actionOutput binding found for ${targetNode}.${targetInput}`;
|
|
494
|
+
break;
|
|
495
|
+
}
|
|
496
|
+
// === FIX STRATEGY 1: CHAT_CONVERSATION expected ===
|
|
497
|
+
// If binding is nested in named_inputs, redirect to trigger.chat_conversation
|
|
498
|
+
if (expectedType?.includes("CHAT_CONVERSATION")) {
|
|
499
|
+
const oldActionName = String(actionOutput.actionName ?? "");
|
|
500
|
+
const oldOutput = String(actionOutput.output ?? "");
|
|
501
|
+
if (isNestedInNamedInputs) {
|
|
502
|
+
// Binding is inside named_inputs - change it to use trigger.chat_conversation
|
|
503
|
+
actionOutput.actionName = "trigger";
|
|
504
|
+
actionOutput.output = "chat_conversation";
|
|
505
|
+
// Rename the named binding to indicate it's now conversation
|
|
506
|
+
const namedInputs = inputs.named_inputs;
|
|
507
|
+
const mb = namedInputs.multiBinding;
|
|
508
|
+
if (mb.elements && nestedBindingIndex >= 0 && mb.elements[nestedBindingIndex]?.namedBinding) {
|
|
509
|
+
// Keep the old binding as additional context
|
|
510
|
+
mb.elements.push({
|
|
511
|
+
namedBinding: {
|
|
512
|
+
name: `${targetInput.replace("named_inputs_", "")}_context`,
|
|
513
|
+
value: { actionOutput: { actionName: oldActionName, output: oldOutput } }
|
|
514
|
+
}
|
|
515
|
+
});
|
|
516
|
+
}
|
|
517
|
+
fixed = true;
|
|
518
|
+
fixDescription = `Fixed ${targetNode}.${targetInput}: now uses trigger.chat_conversation`;
|
|
519
|
+
break;
|
|
520
|
+
}
|
|
521
|
+
// Standard case: direct binding - change to trigger.chat_conversation
|
|
522
|
+
actionOutput.actionName = "trigger";
|
|
523
|
+
actionOutput.output = "chat_conversation";
|
|
524
|
+
// Add old binding to named_inputs so it's still available
|
|
525
|
+
const namedInputs = inputs.named_inputs;
|
|
526
|
+
const contextName = targetInput === "conversation" ? "context" : `${targetInput}_context`;
|
|
527
|
+
if (namedInputs?.multiBinding) {
|
|
528
|
+
// Append to existing multiBinding
|
|
529
|
+
const mb = namedInputs.multiBinding;
|
|
530
|
+
if (!mb.elements)
|
|
531
|
+
mb.elements = [];
|
|
532
|
+
mb.elements.push({
|
|
533
|
+
namedBinding: {
|
|
534
|
+
name: contextName,
|
|
535
|
+
value: { actionOutput: { actionName: oldActionName, output: oldOutput } }
|
|
536
|
+
}
|
|
537
|
+
});
|
|
538
|
+
}
|
|
539
|
+
else {
|
|
540
|
+
// Create new named_inputs with multiBinding
|
|
541
|
+
inputs.named_inputs = {
|
|
542
|
+
multiBinding: {
|
|
543
|
+
elements: [{
|
|
544
|
+
namedBinding: {
|
|
545
|
+
name: contextName,
|
|
546
|
+
value: { actionOutput: { actionName: oldActionName, output: oldOutput } }
|
|
547
|
+
}
|
|
548
|
+
}]
|
|
549
|
+
}
|
|
550
|
+
};
|
|
551
|
+
}
|
|
552
|
+
fixed = true;
|
|
553
|
+
fixDescription = `Fixed ${targetNode}.${targetInput}: now uses trigger.chat_conversation, old binding moved to named_inputs.${contextName}`;
|
|
554
|
+
break;
|
|
555
|
+
}
|
|
556
|
+
// === FIX STRATEGY 2: SEARCH_RESULT expected ===
|
|
557
|
+
// Find an actual search node and use its search_results output
|
|
558
|
+
if (expectedType?.includes("SEARCH_RESULT") && gotType?.includes("TEXT_WITH_SOURCES")) {
|
|
559
|
+
// Find a search node (excluding web_search which has different output)
|
|
560
|
+
const searchNode = actions.find(a => {
|
|
561
|
+
const name = String(a.name ?? "");
|
|
562
|
+
const actionType = a.actionType?.name?.name ?? "";
|
|
563
|
+
return (name.includes("search") || actionType.includes("search_datastore"))
|
|
564
|
+
&& !name.includes("web_search")
|
|
565
|
+
&& !name.includes("combine");
|
|
566
|
+
});
|
|
567
|
+
if (searchNode) {
|
|
568
|
+
// Change to use search node's output directly
|
|
569
|
+
// Don't add old binding to named_inputs - it causes type mismatches
|
|
570
|
+
actionOutput.actionName = String(searchNode.name);
|
|
571
|
+
actionOutput.output = "search_results";
|
|
572
|
+
fixed = true;
|
|
573
|
+
fixDescription = `Fixed ${targetNode}.${targetInput}: now uses ${searchNode.name}.search_results`;
|
|
574
|
+
}
|
|
575
|
+
else {
|
|
576
|
+
failReason = `No search_datastore node found to provide SEARCH_RESULT for ${targetNode}`;
|
|
577
|
+
}
|
|
578
|
+
break;
|
|
579
|
+
}
|
|
580
|
+
// === FIX STRATEGY 3: DOCUMENT or other type → remove the mismatched binding ===
|
|
581
|
+
// Simply remove the bad binding - don't move to named_inputs (causes type mismatches)
|
|
582
|
+
if (gotType?.includes("DOCUMENT") ||
|
|
583
|
+
(expectedType?.includes("TEXT_WITH_SOURCES") && !gotType?.includes("TEXT_WITH_SOURCES"))) {
|
|
584
|
+
// If binding is inside multiBinding of the target input, remove the bad element
|
|
585
|
+
if (inputBinding?.multiBinding && nestedBindingIndex >= 0) {
|
|
586
|
+
const mb = inputBinding.multiBinding;
|
|
587
|
+
if (mb.elements && mb.elements.length > 1) {
|
|
588
|
+
// Remove the mismatched element, keep others
|
|
589
|
+
mb.elements.splice(nestedBindingIndex, 1);
|
|
590
|
+
fixed = true;
|
|
591
|
+
fixDescription = `Fixed ${targetNode}.${targetInput}: removed mismatched binding from multiBinding`;
|
|
592
|
+
break;
|
|
593
|
+
}
|
|
594
|
+
else if (mb.elements && mb.elements.length === 1) {
|
|
595
|
+
// Only one element - remove entire binding
|
|
596
|
+
delete inputBinding.multiBinding;
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
// Remove the mismatched binding from the typed input entirely
|
|
600
|
+
delete inputs[targetInput];
|
|
601
|
+
fixed = true;
|
|
602
|
+
fixDescription = `Fixed ${targetNode}.${targetInput}: removed mismatched binding (input now unbound)`;
|
|
603
|
+
break;
|
|
604
|
+
}
|
|
605
|
+
// === FALLBACK: Remove mismatched binding ===
|
|
606
|
+
// Don't move to named_inputs - it causes type mismatches
|
|
607
|
+
{
|
|
608
|
+
// Remove the mismatched binding entirely
|
|
609
|
+
delete inputs[targetInput];
|
|
610
|
+
fixed = true;
|
|
611
|
+
fixDescription = `Fixed ${targetNode}.${targetInput}: removed mismatched binding (${gotType} → expected ${expectedType})`;
|
|
612
|
+
}
|
|
613
|
+
break;
|
|
614
|
+
}
|
|
615
|
+
default: {
|
|
616
|
+
failReason = `Auto-fix not implemented for issue type: ${issue.type}`;
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
appliedFixes.push({
|
|
620
|
+
issue_type: issue.type,
|
|
621
|
+
description: fixed ? fixDescription : `Could not fix: ${issue.reason}`,
|
|
622
|
+
applied: fixed,
|
|
623
|
+
reason: fixed ? undefined : failReason,
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
return { fixedWorkflow, appliedFixes };
|
|
627
|
+
}
|
|
628
|
+
// Well-known Ema environments for auto-detection
|
|
629
|
+
const KNOWN_ENVS = {
|
|
630
|
+
demo: "https://api.demo.ema.co",
|
|
631
|
+
dev: "https://api.dev.ema.co",
|
|
632
|
+
staging: "https://api.staging.ema.co",
|
|
633
|
+
prod: "https://api.ema.co",
|
|
634
|
+
};
|
|
635
|
+
let cachedConfig = null;
|
|
636
|
+
let cachedEnvInfos = null;
|
|
637
|
+
function getConfig() {
|
|
638
|
+
if (cachedConfig === undefined) {
|
|
639
|
+
const configPath = process.env.EMA_AGENT_SYNC_CONFIG ?? "./config.yaml";
|
|
640
|
+
cachedConfig = loadConfigOptional(configPath);
|
|
641
|
+
}
|
|
642
|
+
return cachedConfig;
|
|
643
|
+
}
|
|
644
|
+
/**
|
|
645
|
+
* Detect available environments from:
|
|
646
|
+
* 1. YAML config file (if present) - full sync support
|
|
647
|
+
* 2. Environment variables (fallback) - browse-only mode
|
|
648
|
+
* - Looks for EMA_<ENV>_BEARER_TOKEN patterns
|
|
649
|
+
* - Uses well-known URLs or EMA_<ENV>_BASE_URL
|
|
650
|
+
*/
|
|
651
|
+
function getAvailableEnvironments() {
|
|
652
|
+
if (cachedEnvInfos)
|
|
653
|
+
return cachedEnvInfos;
|
|
654
|
+
// Option 1: From YAML config
|
|
655
|
+
const cfg = getConfig();
|
|
656
|
+
if (cfg?.environments?.length) {
|
|
657
|
+
cachedEnvInfos = cfg.environments.map((e) => ({
|
|
658
|
+
name: e.name,
|
|
659
|
+
baseUrl: e.baseUrl,
|
|
660
|
+
tokenEnvVar: e.bearerTokenEnv,
|
|
661
|
+
}));
|
|
662
|
+
return cachedEnvInfos;
|
|
663
|
+
}
|
|
664
|
+
// Option 2: Auto-detect from env vars (EMA_<ENV>_BEARER_TOKEN pattern)
|
|
665
|
+
const detected = [];
|
|
666
|
+
const tokenPattern = /^EMA_([A-Z]+)_BEARER_TOKEN$/;
|
|
667
|
+
for (const [key, value] of Object.entries(process.env)) {
|
|
668
|
+
if (!value)
|
|
669
|
+
continue;
|
|
670
|
+
const match = key.match(tokenPattern);
|
|
671
|
+
if (match) {
|
|
672
|
+
const envName = match[1].toLowerCase();
|
|
673
|
+
const baseUrlKey = `EMA_${match[1]}_BASE_URL`;
|
|
674
|
+
const baseUrl = process.env[baseUrlKey] ?? KNOWN_ENVS[envName] ?? `https://api.${envName}.ema.co`;
|
|
675
|
+
detected.push({ name: envName, baseUrl, tokenEnvVar: key });
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
if (detected.length > 0) {
|
|
679
|
+
cachedEnvInfos = detected;
|
|
680
|
+
return cachedEnvInfos;
|
|
681
|
+
}
|
|
682
|
+
// Option 3: Single default environment
|
|
683
|
+
const baseUrl = process.env.EMA_BASE_URL ?? "https://api.demo.ema.co";
|
|
684
|
+
const envName = process.env.EMA_ENV_NAME ?? "default";
|
|
685
|
+
cachedEnvInfos = [{
|
|
686
|
+
name: envName,
|
|
687
|
+
baseUrl,
|
|
688
|
+
tokenEnvVar: "EMA_BEARER_TOKEN",
|
|
689
|
+
}];
|
|
690
|
+
return cachedEnvInfos;
|
|
691
|
+
}
|
|
692
|
+
function getDefaultEnvName() {
|
|
693
|
+
const envs = getAvailableEnvironments();
|
|
694
|
+
// Prefer EMA_ENV_NAME, then first in config
|
|
695
|
+
const preferred = process.env.EMA_ENV_NAME;
|
|
696
|
+
if (preferred && envs.some((e) => e.name === preferred)) {
|
|
697
|
+
return preferred;
|
|
698
|
+
}
|
|
699
|
+
return envs[0]?.name ?? "default";
|
|
700
|
+
}
|
|
701
|
+
function createClient(envName) {
|
|
702
|
+
const targetEnv = envName ?? getDefaultEnvName();
|
|
703
|
+
const envs = getAvailableEnvironments();
|
|
704
|
+
const envInfo = envs.find((e) => e.name === targetEnv);
|
|
705
|
+
if (!envInfo) {
|
|
706
|
+
const available = envs.map((e) => e.name).join(", ");
|
|
707
|
+
throw new Error(`Unknown environment: ${targetEnv}. Available: ${available}`);
|
|
708
|
+
}
|
|
709
|
+
const token = process.env[envInfo.tokenEnvVar];
|
|
710
|
+
if (!token) {
|
|
711
|
+
throw new Error(`Missing token for environment ${targetEnv}. Set ${envInfo.tokenEnvVar}`);
|
|
712
|
+
}
|
|
713
|
+
return new EmaClient({
|
|
714
|
+
name: envInfo.name,
|
|
715
|
+
baseUrl: envInfo.baseUrl,
|
|
716
|
+
bearerToken: token,
|
|
717
|
+
});
|
|
718
|
+
}
|
|
719
|
+
function getSyncSDK() {
|
|
720
|
+
const cfg = getConfig();
|
|
721
|
+
if (!cfg)
|
|
722
|
+
return null;
|
|
723
|
+
return new SyncSDK(cfg);
|
|
724
|
+
}
|
|
725
|
+
async function directSyncPersona(opts) {
|
|
726
|
+
const { name, sourceEnv, targetEnv, dryRun, syncStatus } = opts;
|
|
727
|
+
// Create clients
|
|
728
|
+
const sourceClient = createClient(sourceEnv);
|
|
729
|
+
const targetClient = createClient(targetEnv);
|
|
730
|
+
// Find source persona by name (list doesn't include workflow_def)
|
|
731
|
+
const sourcePersonas = await sourceClient.getPersonasForTenant();
|
|
732
|
+
const sourcePersonaFromList = sourcePersonas.find((p) => p.name === name);
|
|
733
|
+
if (!sourcePersonaFromList) {
|
|
734
|
+
throw new Error(`Persona not found in ${sourceEnv}: ${name}`);
|
|
735
|
+
}
|
|
736
|
+
// Fetch full persona to get workflow_def
|
|
737
|
+
const sourcePersona = await sourceClient.getPersonaById(sourcePersonaFromList.id);
|
|
738
|
+
if (!sourcePersona) {
|
|
739
|
+
throw new Error(`Failed to fetch full persona details: ${sourcePersonaFromList.id}`);
|
|
740
|
+
}
|
|
741
|
+
return directSyncPersonaImpl({ sourcePersona, sourceEnv, targetEnv, sourceClient, targetClient, dryRun, syncStatus });
|
|
742
|
+
}
|
|
743
|
+
async function directSyncPersonaById(opts) {
|
|
744
|
+
const { personaId, sourceEnv, targetEnv, dryRun, syncStatus } = opts;
|
|
745
|
+
// Create clients
|
|
746
|
+
const sourceClient = createClient(sourceEnv);
|
|
747
|
+
const targetClient = createClient(targetEnv);
|
|
748
|
+
// Get full source persona
|
|
749
|
+
const sourcePersona = await sourceClient.getPersonaById(personaId);
|
|
750
|
+
if (!sourcePersona) {
|
|
751
|
+
throw new Error(`Persona not found in ${sourceEnv}: ${personaId}`);
|
|
752
|
+
}
|
|
753
|
+
return directSyncPersonaImpl({ sourcePersona, sourceEnv, targetEnv, sourceClient, targetClient, dryRun, syncStatus });
|
|
754
|
+
}
|
|
755
|
+
async function directSyncPersonaImpl(opts) {
|
|
756
|
+
const { sourcePersona, sourceEnv, targetEnv, targetClient, dryRun, syncStatus } = opts;
|
|
757
|
+
// Compute fingerprint
|
|
758
|
+
const fp = fingerprintPersona(sourcePersona);
|
|
759
|
+
// Find existing replica in target by sync tag or name
|
|
760
|
+
let targetPersonaId;
|
|
761
|
+
let created = false;
|
|
762
|
+
// First, check by sync tag (most reliable)
|
|
763
|
+
const syncedResult = await targetClient.findSyncedPersona(sourceEnv, sourcePersona.id);
|
|
764
|
+
if (syncedResult) {
|
|
765
|
+
targetPersonaId = syncedResult.persona.id;
|
|
766
|
+
// Check if fingerprints match - skip if no changes
|
|
767
|
+
if (syncedResult.syncMetadata.master_fingerprint === fp) {
|
|
768
|
+
return {
|
|
769
|
+
success: true,
|
|
770
|
+
source_env: sourceEnv,
|
|
771
|
+
target_env: targetEnv,
|
|
772
|
+
source_persona: { id: sourcePersona.id, name: sourcePersona.name },
|
|
773
|
+
target_persona: { id: targetPersonaId, name: syncedResult.persona.name, created: false },
|
|
774
|
+
fingerprint: fp,
|
|
775
|
+
dry_run: dryRun,
|
|
776
|
+
error: "Already in sync (fingerprints match)",
|
|
777
|
+
};
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
// Fallback: find by name
|
|
781
|
+
if (!targetPersonaId && sourcePersona.name) {
|
|
782
|
+
const targetPersonas = await targetClient.getPersonasForTenant();
|
|
783
|
+
const byName = targetPersonas.find((p) => p.name === sourcePersona.name);
|
|
784
|
+
if (byName) {
|
|
785
|
+
targetPersonaId = byName.id;
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
if (dryRun) {
|
|
789
|
+
return {
|
|
790
|
+
success: true,
|
|
791
|
+
source_env: sourceEnv,
|
|
792
|
+
target_env: targetEnv,
|
|
793
|
+
source_persona: { id: sourcePersona.id, name: sourcePersona.name },
|
|
794
|
+
target_persona: targetPersonaId
|
|
795
|
+
? { id: targetPersonaId, name: sourcePersona.name, created: false }
|
|
796
|
+
: undefined,
|
|
797
|
+
fingerprint: fp,
|
|
798
|
+
dry_run: true,
|
|
799
|
+
};
|
|
800
|
+
}
|
|
801
|
+
// Build sync metadata
|
|
802
|
+
const syncMetadata = {
|
|
803
|
+
master_env: sourceEnv,
|
|
804
|
+
master_id: sourcePersona.id,
|
|
805
|
+
synced_at: new Date().toISOString(),
|
|
806
|
+
master_fingerprint: fp,
|
|
807
|
+
};
|
|
808
|
+
// Copy proto_config from source (sync metadata now goes in status_log)
|
|
809
|
+
const protoConfig = { ...(sourcePersona.proto_config ?? {}) };
|
|
810
|
+
delete protoConfig[SYNC_METADATA_KEY]; // Clean any legacy sync tags from proto_config
|
|
811
|
+
if (targetPersonaId) {
|
|
812
|
+
// Update existing persona
|
|
813
|
+
// Transform workflow if present
|
|
814
|
+
let workflow;
|
|
815
|
+
if (sourcePersona.workflow_def) {
|
|
816
|
+
workflow = transformWorkflowForTarget(sourcePersona.workflow_def, sourcePersona.id, targetPersonaId);
|
|
817
|
+
}
|
|
818
|
+
// Clean source description and add compact sync tag
|
|
819
|
+
const cleanSourceDesc = getCleanDescription(sourcePersona.description);
|
|
820
|
+
const descriptionWithTag = buildDescriptionWithSyncTag(cleanSourceDesc, sourceEnv, sourcePersona.id);
|
|
821
|
+
// Clean proto_config description too
|
|
822
|
+
const protoConfigClean = {
|
|
823
|
+
...protoConfig,
|
|
824
|
+
description: getCleanDescription(protoConfig.description),
|
|
825
|
+
};
|
|
826
|
+
// Update persona with sync tag in description
|
|
827
|
+
await targetClient.updateAiEmployee({
|
|
828
|
+
persona_id: targetPersonaId,
|
|
829
|
+
name: sourcePersona.name,
|
|
830
|
+
description: descriptionWithTag,
|
|
831
|
+
proto_config: protoConfigClean,
|
|
832
|
+
welcome_messages: sourcePersona.welcome_messages,
|
|
833
|
+
embedding_enabled: sourcePersona.embedding_enabled,
|
|
834
|
+
workflow,
|
|
835
|
+
// Optionally sync enabled/disabled status (status can be "ready", "active", "inactive", etc.)
|
|
836
|
+
...(syncStatus && { enabled_by_user: sourcePersona.status !== "inactive" }),
|
|
837
|
+
});
|
|
838
|
+
}
|
|
839
|
+
else {
|
|
840
|
+
// Create new persona
|
|
841
|
+
const templateId = (sourcePersona.template_id ?? sourcePersona.templateId);
|
|
842
|
+
if (!templateId) {
|
|
843
|
+
throw new Error(`Cannot create persona in ${targetEnv}: source has no template_id`);
|
|
844
|
+
}
|
|
845
|
+
// Clean source description and add compact sync tag
|
|
846
|
+
const cleanSourceDescCreate = getCleanDescription(sourcePersona.description);
|
|
847
|
+
const descriptionWithTagCreate = buildDescriptionWithSyncTag(cleanSourceDescCreate, sourceEnv, sourcePersona.id);
|
|
848
|
+
// Clean proto_config description
|
|
849
|
+
const protoConfigCleanCreate = {
|
|
850
|
+
...protoConfig,
|
|
851
|
+
description: getCleanDescription(protoConfig.description),
|
|
852
|
+
};
|
|
853
|
+
const createResult = await targetClient.createAiEmployee({
|
|
854
|
+
name: sourcePersona.name ?? "Unnamed Persona",
|
|
855
|
+
description: descriptionWithTagCreate,
|
|
856
|
+
template_id: templateId,
|
|
857
|
+
proto_config: protoConfigCleanCreate,
|
|
858
|
+
welcome_messages: sourcePersona.welcome_messages,
|
|
859
|
+
trigger_type: sourcePersona.trigger_type,
|
|
860
|
+
});
|
|
861
|
+
targetPersonaId = createResult.persona_id ?? createResult.id;
|
|
862
|
+
created = true;
|
|
863
|
+
// Follow-up: Now that we have target persona ID, sync the workflow
|
|
864
|
+
if (sourcePersona.workflow_def && targetPersonaId) {
|
|
865
|
+
const workflow = transformWorkflowForTarget(sourcePersona.workflow_def, sourcePersona.id, targetPersonaId);
|
|
866
|
+
// Update with transformed workflow
|
|
867
|
+
await targetClient.updateAiEmployee({
|
|
868
|
+
persona_id: targetPersonaId,
|
|
869
|
+
proto_config: protoConfigCleanCreate,
|
|
870
|
+
workflow,
|
|
871
|
+
});
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
return {
|
|
875
|
+
success: true,
|
|
876
|
+
source_env: sourceEnv,
|
|
877
|
+
target_env: targetEnv,
|
|
878
|
+
source_persona: { id: sourcePersona.id, name: sourcePersona.name },
|
|
879
|
+
target_persona: { id: targetPersonaId, name: sourcePersona.name, created },
|
|
880
|
+
fingerprint: fp,
|
|
881
|
+
};
|
|
882
|
+
}
|
|
883
|
+
/**
|
|
884
|
+
* Config-less sync all: find all personas with sync tags in target env and re-sync from source
|
|
885
|
+
*/
|
|
886
|
+
async function directSyncAll(opts) {
|
|
887
|
+
const { targetEnv, dryRun } = opts;
|
|
888
|
+
const targetClient = createClient(targetEnv);
|
|
889
|
+
// Find all personas with sync tags
|
|
890
|
+
const personas = await targetClient.getPersonasForTenant();
|
|
891
|
+
const syncedPersonas = [];
|
|
892
|
+
for (const p of personas) {
|
|
893
|
+
const meta = targetClient.getSyncMetadata(p);
|
|
894
|
+
if (meta) {
|
|
895
|
+
syncedPersonas.push({ persona: p, meta });
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
const results = [];
|
|
899
|
+
const errors = [];
|
|
900
|
+
let synced = 0;
|
|
901
|
+
let skipped = 0;
|
|
902
|
+
for (const { persona, meta } of syncedPersonas) {
|
|
903
|
+
try {
|
|
904
|
+
// Get source client for this persona's master env
|
|
905
|
+
const sourceClient = createClient(meta.master_env);
|
|
906
|
+
// Fetch full source persona
|
|
907
|
+
const sourcePersona = await sourceClient.getPersonaById(meta.master_id);
|
|
908
|
+
if (!sourcePersona) {
|
|
909
|
+
errors.push(`Source persona not found: ${meta.master_id} in ${meta.master_env}`);
|
|
910
|
+
continue;
|
|
911
|
+
}
|
|
912
|
+
const result = await directSyncPersonaImpl({
|
|
913
|
+
sourcePersona,
|
|
914
|
+
sourceEnv: meta.master_env,
|
|
915
|
+
targetEnv,
|
|
916
|
+
sourceClient,
|
|
917
|
+
targetClient,
|
|
918
|
+
dryRun,
|
|
919
|
+
});
|
|
920
|
+
results.push(result);
|
|
921
|
+
if (result.error?.includes("Already in sync")) {
|
|
922
|
+
skipped++;
|
|
923
|
+
}
|
|
924
|
+
else {
|
|
925
|
+
synced++;
|
|
926
|
+
}
|
|
927
|
+
}
|
|
928
|
+
catch (e) {
|
|
929
|
+
errors.push(`${persona.name}: ${e instanceof Error ? e.message : String(e)}`);
|
|
930
|
+
}
|
|
931
|
+
}
|
|
932
|
+
return {
|
|
933
|
+
target_env: targetEnv,
|
|
934
|
+
scanned: syncedPersonas.length,
|
|
935
|
+
synced,
|
|
936
|
+
skipped,
|
|
937
|
+
errors,
|
|
938
|
+
results,
|
|
939
|
+
};
|
|
940
|
+
}
|
|
941
|
+
// Helper to add env parameter to schema - returns Tool-compatible inputSchema
|
|
942
|
+
function withEnvParam(props, required = []) {
|
|
943
|
+
const envs = getAvailableEnvironments();
|
|
944
|
+
const envNames = envs.map((e) => e.name);
|
|
945
|
+
return {
|
|
946
|
+
type: "object",
|
|
947
|
+
properties: {
|
|
948
|
+
...props,
|
|
949
|
+
env: {
|
|
950
|
+
type: "string",
|
|
951
|
+
description: `Target environment. Available: ${envNames.join(", ")}. Default: ${getDefaultEnvName()}`,
|
|
952
|
+
},
|
|
953
|
+
},
|
|
954
|
+
required,
|
|
955
|
+
};
|
|
956
|
+
}
|
|
957
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
958
|
+
// Tool Definitions
|
|
959
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
960
|
+
//
|
|
961
|
+
// TOOL STRUCTURE:
|
|
962
|
+
// 1. CONSOLIDATED TOOLS (9 tools) - Unix CLI pattern with flags
|
|
963
|
+
// - env, persona, workflow, action, template, knowledge, reference, sync, demo
|
|
964
|
+
// - Defined in: ./tools-consolidated.ts
|
|
965
|
+
//
|
|
966
|
+
// 2. LEGACY TOOLS (40+ tools) - Old separate tools for backwards compatibility
|
|
967
|
+
// - Defined in: ./tools-legacy.ts
|
|
968
|
+
// - Enable with EMA_ENABLE_LEGACY_TOOLS=true
|
|
969
|
+
//
|
|
970
|
+
// NAMING CONVENTION:
|
|
971
|
+
// - Tool names are defined as BASE NAMES (e.g., "persona", "list_ai_employees")
|
|
972
|
+
// - MCP clients prefix with "mcp_{server}_" (e.g., "mcp_ema_persona")
|
|
973
|
+
//
|
|
974
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
975
|
+
/**
|
|
976
|
+
* Generate all available tools based on configuration
|
|
977
|
+
*
|
|
978
|
+
* DEFAULT: Only consolidated tools (9 tools, Unix CLI pattern)
|
|
979
|
+
* OPTIONAL: Set EMA_ENABLE_LEGACY_TOOLS=true to add deprecated tools
|
|
980
|
+
*
|
|
981
|
+
* Why consolidated-only by default:
|
|
982
|
+
* - Prevents LLM confusion from having multiple tools that do the same thing
|
|
983
|
+
* - Ensures consistent tool usage patterns
|
|
984
|
+
* - Reduces context window waste from duplicate tool definitions
|
|
985
|
+
*/
|
|
986
|
+
function generateAllTools() {
|
|
987
|
+
const envNames = getAvailableEnvironments().map(e => e.name);
|
|
988
|
+
const defaultEnv = getDefaultEnvName();
|
|
989
|
+
// Always include consolidated tools (the new unified interface)
|
|
990
|
+
const consolidatedTools = generateConsolidatedTools(envNames, defaultEnv);
|
|
991
|
+
// Only include legacy tools if explicitly enabled (for migration period)
|
|
992
|
+
if (ENABLE_LEGACY_TOOLS) {
|
|
993
|
+
console.error("[MCP] WARNING: Legacy tools enabled via EMA_ENABLE_LEGACY_TOOLS=true");
|
|
994
|
+
console.error("[MCP] This is deprecated and will be removed. Migrate to consolidated tools.");
|
|
995
|
+
const legacyTools = generateLegacyTools(envNames, defaultEnv);
|
|
996
|
+
// Consolidated tools first (preferred), legacy tools after
|
|
997
|
+
return [...consolidatedTools, ...legacyTools];
|
|
998
|
+
}
|
|
999
|
+
return consolidatedTools;
|
|
1000
|
+
}
|
|
1001
|
+
// Generate tools (called once at module load)
|
|
1002
|
+
const TOOLS = generateAllTools();
|
|
1003
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1004
|
+
// ADDITIONAL TOOLS (not in consolidated or legacy)
|
|
1005
|
+
// These are special-purpose tools that don't fit the standard patterns
|
|
1006
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1007
|
+
const ADDITIONAL_TOOLS = [
|
|
1008
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1009
|
+
// NOTE: Legacy tools (get_persona, find_personas, create_ai_employee, etc.)
|
|
1010
|
+
// have been moved to ./tools-legacy.ts for backwards compatibility.
|
|
1011
|
+
//
|
|
1012
|
+
// Consolidated tools (persona, workflow, action, etc.) are generated from
|
|
1013
|
+
// ./tools-consolidated.ts following Unix CLI patterns.
|
|
1014
|
+
//
|
|
1015
|
+
// Only special-purpose tools remain here:
|
|
1016
|
+
// - "workflow" (unified tool) - experimental, bridges old and new patterns
|
|
1017
|
+
// - Demo data tools - special-purpose data generation
|
|
1018
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1019
|
+
// INLINE LEGACY TOOLS REMOVED
|
|
1020
|
+
// All legacy tools (get_workflow_action, find_workflow_actions, compare_ai_employees,
|
|
1021
|
+
// list_ai_employee_templates, sync, sync_info, list_auto_builder_agents, etc.)
|
|
1022
|
+
// have been moved to ./tools-legacy.ts
|
|
1023
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1024
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1025
|
+
// LEGACY INLINE TOOLS REMOVED - See ./tools-legacy.ts
|
|
1026
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1027
|
+
// The following tools have been removed from this file and are now generated
|
|
1028
|
+
// from tools-legacy.ts: list_auto_builder_agents, get_auto_builder_agent,
|
|
1029
|
+
// suggest_agents_for_use_case, get_workflow_pattern, list_workflow_patterns,
|
|
1030
|
+
// check_type_compatibility, get_widget_reference, get_qualifying_questions,
|
|
1031
|
+
// get_voice_persona_template, validate_workflow_prompt, get_auto_builder_guidance,
|
|
1032
|
+
// get_platform_concept, list_platform_concepts, get_common_mistakes,
|
|
1033
|
+
// get_debug_checklist, get_workflow_execution_model, analyze_workflow,
|
|
1034
|
+
// detect_workflow_issues, validate_workflow_connections, suggest_workflow_fixes,
|
|
1035
|
+
// compare_workflow_versions, get_workflow_metrics, compile_workflow,
|
|
1036
|
+
// upload_data_source, delete_data_source, list_data_sources, get_embedding_status,
|
|
1037
|
+
// toggle_embedding
|
|
1038
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1039
|
+
// The remaining legacy tools are handled by the LEGACY_TOOL_MAPPING in
|
|
1040
|
+
// tools-consolidated.ts for routing to consolidated handlers.
|
|
1041
|
+
//
|
|
1042
|
+
// INLINE DEFINITIONS REMOVED - They are now in tools-legacy.ts
|
|
1043
|
+
// (analyze_workflow, detect_workflow_issues, validate_workflow_connections, suggest_workflow_fixes)
|
|
1044
|
+
// (compare_workflow_versions, get_workflow_metrics) - moved to tools-legacy.ts
|
|
1045
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1046
|
+
// Workflow Compilation - Template-driven, no biased patterns
|
|
1047
|
+
// Read ema://catalog/patterns for pattern references, then construct nodes
|
|
1048
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
1049
|
+
{
|
|
1050
|
+
name: "compile_workflow",
|
|
1051
|
+
description: `🔧 Compile workflow from node specification. Template-driven - read \`ema://catalog/patterns\` for pattern templates, construct nodes, then compile.
|
|
1052
|
+
|
|
1053
|
+
**Process**:
|
|
1054
|
+
1. Read \`ema://catalog/patterns\` for pattern reference
|
|
1055
|
+
2. Read \`ema://catalog/agents\` for available actions
|
|
1056
|
+
3. Construct nodes array based on requirements
|
|
1057
|
+
4. Call compile_workflow with nodes
|
|
1058
|
+
5. Use deploy_workflow to deploy result
|
|
1059
|
+
|
|
1060
|
+
**Example nodes** (KB search):
|
|
1061
|
+
\`\`\`json
|
|
1062
|
+
[
|
|
1063
|
+
{ "id": "trigger", "action_type": "chat_trigger", "display_name": "Trigger" },
|
|
1064
|
+
{ "id": "search", "action_type": "search", "display_name": "Search", "inputs": { "query": { "type": "action_output", "action_name": "trigger", "output": "user_query" } } },
|
|
1065
|
+
{ "id": "respond", "action_type": "respond_with_sources", "display_name": "Respond", "inputs": { "search_results": { "type": "action_output", "action_name": "search", "output": "search_results" } } }
|
|
1066
|
+
]
|
|
1067
|
+
\`\`\``,
|
|
1068
|
+
inputSchema: {
|
|
1069
|
+
type: "object",
|
|
1070
|
+
properties: {
|
|
1071
|
+
name: { type: "string", description: "Workflow name" },
|
|
1072
|
+
description: { type: "string", description: "Workflow description" },
|
|
1073
|
+
persona_type: { type: "string", enum: ["voice", "chat", "dashboard"], description: "AI type" },
|
|
1074
|
+
nodes: {
|
|
1075
|
+
type: "array",
|
|
1076
|
+
description: "Node definitions",
|
|
1077
|
+
items: {
|
|
1078
|
+
type: "object",
|
|
1079
|
+
properties: {
|
|
1080
|
+
id: { type: "string", description: "Node ID" },
|
|
1081
|
+
action_type: { type: "string", description: "Action type (e.g., chat_trigger, search, respond_with_sources)" },
|
|
1082
|
+
display_name: { type: "string", description: "Display name" },
|
|
1083
|
+
description: { type: "string", description: "Optional description" },
|
|
1084
|
+
inputs: { type: "object", description: "Input bindings (key: input name, value: binding spec)" },
|
|
1085
|
+
run_if: {
|
|
1086
|
+
type: "object",
|
|
1087
|
+
description: "Conditional execution",
|
|
1088
|
+
properties: {
|
|
1089
|
+
source_action: { type: "string" },
|
|
1090
|
+
source_output: { type: "string", description: "Output name to check" },
|
|
1091
|
+
operator: { type: "string", enum: ["eq", "neq", "gt", "lt", "gte", "lte"], description: "Comparison operator" },
|
|
1092
|
+
value: { type: "string", description: "Value to compare against" },
|
|
1093
|
+
},
|
|
1094
|
+
},
|
|
1095
|
+
categories: {
|
|
1096
|
+
type: "array",
|
|
1097
|
+
description: "Categories for categorizer nodes",
|
|
1098
|
+
items: {
|
|
1099
|
+
type: "object",
|
|
1100
|
+
properties: {
|
|
1101
|
+
name: { type: "string", description: "Category name (e.g., 'Password Reset', 'Fallback')" },
|
|
1102
|
+
description: { type: "string", description: "When this category triggers" },
|
|
1103
|
+
examples: { type: "array", items: { type: "string" }, description: "Example phrases" },
|
|
1104
|
+
},
|
|
1105
|
+
},
|
|
1106
|
+
},
|
|
1107
|
+
tools: {
|
|
1108
|
+
type: "array",
|
|
1109
|
+
description: "External tools for external_action_caller nodes",
|
|
1110
|
+
items: {
|
|
1111
|
+
type: "object",
|
|
1112
|
+
properties: {
|
|
1113
|
+
name: { type: "string", description: "Tool name" },
|
|
1114
|
+
namespace: { type: "string", description: "Tool namespace" },
|
|
1115
|
+
},
|
|
1116
|
+
},
|
|
1117
|
+
},
|
|
1118
|
+
disable_human_interaction: { type: "boolean", description: "If true, disable HITL for this node" },
|
|
1119
|
+
},
|
|
1120
|
+
required: ["id", "action_type", "display_name"],
|
|
1121
|
+
},
|
|
1122
|
+
},
|
|
1123
|
+
result_mappings: {
|
|
1124
|
+
type: "array",
|
|
1125
|
+
description: "Which node outputs map to WORKFLOW_OUTPUT",
|
|
1126
|
+
items: {
|
|
1127
|
+
type: "object",
|
|
1128
|
+
properties: {
|
|
1129
|
+
node_id: { type: "string", description: "Node ID" },
|
|
1130
|
+
output: { type: "string", description: "Output name from the node" },
|
|
1131
|
+
},
|
|
1132
|
+
required: ["node_id", "output"],
|
|
1133
|
+
},
|
|
1134
|
+
},
|
|
1135
|
+
},
|
|
1136
|
+
required: ["name", "description", "persona_type", "nodes", "result_mappings"],
|
|
1137
|
+
},
|
|
1138
|
+
},
|
|
1139
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1140
|
+
// Data Source Management
|
|
1141
|
+
// TODO: Add support for 3rd party data sources (Google Drive, SharePoint, Confluence, etc.)
|
|
1142
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1143
|
+
{
|
|
1144
|
+
name: "upload_data_source",
|
|
1145
|
+
description: "Upload a file from the local filesystem to an AI Employee's knowledge base. The file will be available for RAG/search if embedding is enabled. IMPORTANT: Upload data sources BEFORE deploying workflows that reference them.",
|
|
1146
|
+
inputSchema: {
|
|
1147
|
+
type: "object",
|
|
1148
|
+
properties: {
|
|
1149
|
+
persona_id: {
|
|
1150
|
+
type: "string",
|
|
1151
|
+
description: "The AI Employee ID to upload the file to",
|
|
1152
|
+
},
|
|
1153
|
+
file_path: {
|
|
1154
|
+
type: "string",
|
|
1155
|
+
description: "Absolute path to the file on the local filesystem",
|
|
1156
|
+
},
|
|
1157
|
+
tags: {
|
|
1158
|
+
type: "string",
|
|
1159
|
+
description: "Optional tags for categorizing the file (default: 'fileUpload')",
|
|
1160
|
+
},
|
|
1161
|
+
env: {
|
|
1162
|
+
type: "string",
|
|
1163
|
+
description: "Target environment. Available: dev, demo, staging. Default: demo",
|
|
1164
|
+
},
|
|
1165
|
+
},
|
|
1166
|
+
required: ["persona_id", "file_path"],
|
|
1167
|
+
},
|
|
1168
|
+
},
|
|
1169
|
+
{
|
|
1170
|
+
name: "delete_data_source",
|
|
1171
|
+
description: "Delete a file from an AI Employee's knowledge base.",
|
|
1172
|
+
inputSchema: {
|
|
1173
|
+
type: "object",
|
|
1174
|
+
properties: {
|
|
1175
|
+
persona_id: {
|
|
1176
|
+
type: "string",
|
|
1177
|
+
description: "The AI Employee ID",
|
|
1178
|
+
},
|
|
1179
|
+
file_id: {
|
|
1180
|
+
type: "string",
|
|
1181
|
+
description: "The file ID to delete (from list_data_sources)",
|
|
1182
|
+
},
|
|
1183
|
+
env: {
|
|
1184
|
+
type: "string",
|
|
1185
|
+
description: "Target environment. Available: dev, demo, staging. Default: demo",
|
|
1186
|
+
},
|
|
1187
|
+
},
|
|
1188
|
+
required: ["persona_id", "file_id"],
|
|
1189
|
+
},
|
|
1190
|
+
},
|
|
1191
|
+
{
|
|
1192
|
+
name: "list_data_sources",
|
|
1193
|
+
description: "List data sources (knowledge base files/documents) configured for an AI Employee, including upload status and file count.",
|
|
1194
|
+
inputSchema: {
|
|
1195
|
+
type: "object",
|
|
1196
|
+
properties: {
|
|
1197
|
+
persona_id: {
|
|
1198
|
+
type: "string",
|
|
1199
|
+
description: "The AI Employee ID to list data sources for",
|
|
1200
|
+
},
|
|
1201
|
+
env: {
|
|
1202
|
+
type: "string",
|
|
1203
|
+
description: "Target environment. Available: dev, demo, staging. Default: demo",
|
|
1204
|
+
},
|
|
1205
|
+
},
|
|
1206
|
+
required: ["persona_id"],
|
|
1207
|
+
},
|
|
1208
|
+
},
|
|
1209
|
+
{
|
|
1210
|
+
name: "get_embedding_status",
|
|
1211
|
+
description: "Get the embedding/RAG status for an AI Employee's knowledge base.",
|
|
1212
|
+
inputSchema: {
|
|
1213
|
+
type: "object",
|
|
1214
|
+
properties: {
|
|
1215
|
+
persona_id: {
|
|
1216
|
+
type: "string",
|
|
1217
|
+
description: "The AI Employee ID",
|
|
1218
|
+
},
|
|
1219
|
+
env: {
|
|
1220
|
+
type: "string",
|
|
1221
|
+
description: "Target environment. Available: dev, demo, staging. Default: demo",
|
|
1222
|
+
},
|
|
1223
|
+
},
|
|
1224
|
+
required: ["persona_id"],
|
|
1225
|
+
},
|
|
1226
|
+
},
|
|
1227
|
+
{
|
|
1228
|
+
name: "toggle_embedding",
|
|
1229
|
+
description: "Enable or disable embedding/RAG for an AI Employee's knowledge base. When enabled, uploaded documents are indexed for semantic search.",
|
|
1230
|
+
inputSchema: {
|
|
1231
|
+
type: "object",
|
|
1232
|
+
properties: {
|
|
1233
|
+
persona_id: {
|
|
1234
|
+
type: "string",
|
|
1235
|
+
description: "The AI Employee ID",
|
|
1236
|
+
},
|
|
1237
|
+
enabled: {
|
|
1238
|
+
type: "boolean",
|
|
1239
|
+
description: "Whether to enable (true) or disable (false) embedding",
|
|
1240
|
+
},
|
|
1241
|
+
env: {
|
|
1242
|
+
type: "string",
|
|
1243
|
+
description: "Target environment. Available: dev, demo, staging. Default: demo",
|
|
1244
|
+
},
|
|
1245
|
+
},
|
|
1246
|
+
required: ["persona_id", "enabled"],
|
|
1247
|
+
},
|
|
1248
|
+
},
|
|
1249
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1250
|
+
// Unified Workflow Tool
|
|
1251
|
+
// Accepts any input: natural language, partial spec, full spec, or persona_id
|
|
1252
|
+
// Normalizes to WorkflowIntent → validates → generates → deploys
|
|
1253
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1254
|
+
{
|
|
1255
|
+
name: "workflow",
|
|
1256
|
+
description: `🔧 UNIFIED workflow tool. Accepts ANY input and normalizes it.
|
|
1257
|
+
|
|
1258
|
+
**Input types** (auto-detected):
|
|
1259
|
+
- Natural language: "IT helpdesk that creates ServiceNow tickets"
|
|
1260
|
+
- Partial spec: { intents: [...], tools: [...] }
|
|
1261
|
+
- Full nodes spec: { nodes: [...], result_mappings: [...] }
|
|
1262
|
+
- Existing persona: persona_id to analyze/improve
|
|
1263
|
+
|
|
1264
|
+
**Process**:
|
|
1265
|
+
1. Parse input → WorkflowIntent (normalized representation)
|
|
1266
|
+
2. Validate completeness → return questions if incomplete
|
|
1267
|
+
3. Generate workflow (local compile or Auto Builder)
|
|
1268
|
+
4. Validate output → auto-fix if enabled
|
|
1269
|
+
5. Deploy if persona_id provided
|
|
1270
|
+
|
|
1271
|
+
**Examples**:
|
|
1272
|
+
\`\`\`
|
|
1273
|
+
workflow("IT helpdesk bot with KB search")
|
|
1274
|
+
workflow({ intents: [{name: "Billing", handler: "search"}], tools: [{namespace: "service_now", action: "Create_Ticket"}] })
|
|
1275
|
+
workflow(persona_id, mode="improve")
|
|
1276
|
+
\`\`\``,
|
|
1277
|
+
inputSchema: withEnvParam({
|
|
1278
|
+
input: {
|
|
1279
|
+
description: "Natural language description, partial spec object, or full nodes spec",
|
|
1280
|
+
},
|
|
1281
|
+
persona_id: {
|
|
1282
|
+
type: "string",
|
|
1283
|
+
description: "For deployment OR to analyze/improve existing workflow",
|
|
1284
|
+
},
|
|
1285
|
+
mode: {
|
|
1286
|
+
type: "string",
|
|
1287
|
+
enum: ["generate", "improve", "analyze"],
|
|
1288
|
+
description: "generate (default): Create new workflow. improve: Fix existing. analyze: Validate only.",
|
|
1289
|
+
},
|
|
1290
|
+
persona_type: {
|
|
1291
|
+
type: "string",
|
|
1292
|
+
enum: ["voice", "chat", "dashboard"],
|
|
1293
|
+
description: "AI type (default: chat, auto-detected from input)",
|
|
1294
|
+
},
|
|
1295
|
+
use_autobuilder: {
|
|
1296
|
+
type: "boolean",
|
|
1297
|
+
description: "Force Auto Builder for generation (default: auto-decide based on complexity)",
|
|
1298
|
+
},
|
|
1299
|
+
auto_deploy: {
|
|
1300
|
+
type: "boolean",
|
|
1301
|
+
description: "Deploy immediately (default: false - returns preview)",
|
|
1302
|
+
},
|
|
1303
|
+
auto_fix: {
|
|
1304
|
+
type: "boolean",
|
|
1305
|
+
description: "Auto-fix detected issues (default: true)",
|
|
1306
|
+
},
|
|
1307
|
+
}, []),
|
|
1308
|
+
},
|
|
1309
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1310
|
+
// Demo Data Management
|
|
1311
|
+
// Tools for consolidating, transforming, and preparing mock data for RAG
|
|
1312
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1313
|
+
{
|
|
1314
|
+
name: "consolidate_demo_data",
|
|
1315
|
+
description: `Transform normalized JSON source files into RAG-optimized Markdown documents for Ema knowledge bases.
|
|
1316
|
+
|
|
1317
|
+
This tool pre-joins related data (like customers + orders + tickets) into denormalized entity documents that work well with semantic search.
|
|
1318
|
+
|
|
1319
|
+
**Why this matters**: RAG systems can't do SQL-style joins at query time. Data relationships must be explicit in the document content.
|
|
1320
|
+
|
|
1321
|
+
**Supported patterns**:
|
|
1322
|
+
- Entity consolidation (customer with all their orders, tickets, contacts)
|
|
1323
|
+
- Product catalogs with cross-references
|
|
1324
|
+
- Scenario documents for demos
|
|
1325
|
+
|
|
1326
|
+
**Output format**: Markdown files with embedded tables, metadata comments, and narrative summaries optimized for Ema's search agents.`,
|
|
1327
|
+
inputSchema: {
|
|
1328
|
+
type: "object",
|
|
1329
|
+
properties: {
|
|
1330
|
+
source_dir: {
|
|
1331
|
+
type: "string",
|
|
1332
|
+
description: "Path to directory containing source JSON files (e.g., './data/source')",
|
|
1333
|
+
},
|
|
1334
|
+
output_dir: {
|
|
1335
|
+
type: "string",
|
|
1336
|
+
description: "Path to output directory for generated Markdown files (e.g., './data/knowledge-base')",
|
|
1337
|
+
},
|
|
1338
|
+
entity_type: {
|
|
1339
|
+
type: "string",
|
|
1340
|
+
enum: ["customer", "product", "employee", "scenario", "custom"],
|
|
1341
|
+
description: "Type of entity being consolidated. Determines document structure.",
|
|
1342
|
+
},
|
|
1343
|
+
primary_file: {
|
|
1344
|
+
type: "string",
|
|
1345
|
+
description: "Name of the primary JSON file (e.g., 'customers.json')",
|
|
1346
|
+
},
|
|
1347
|
+
joins: {
|
|
1348
|
+
type: "array",
|
|
1349
|
+
items: {
|
|
1350
|
+
type: "object",
|
|
1351
|
+
properties: {
|
|
1352
|
+
file: { type: "string", description: "JSON file to join (e.g., 'orders.json')" },
|
|
1353
|
+
on: { type: "string", description: "Foreign key field (e.g., 'customerId')" },
|
|
1354
|
+
as: { type: "string", description: "Name for the joined data (e.g., 'orders')" },
|
|
1355
|
+
},
|
|
1356
|
+
},
|
|
1357
|
+
description: "Array of files to join with the primary file",
|
|
1358
|
+
},
|
|
1359
|
+
id_field: {
|
|
1360
|
+
type: "string",
|
|
1361
|
+
description: "Field name for the entity ID (default: 'id')",
|
|
1362
|
+
},
|
|
1363
|
+
name_field: {
|
|
1364
|
+
type: "string",
|
|
1365
|
+
description: "Field name for the entity name (default: 'name')",
|
|
1366
|
+
},
|
|
1367
|
+
tags: {
|
|
1368
|
+
type: "string",
|
|
1369
|
+
description: "Comma-separated tags to include in document metadata",
|
|
1370
|
+
},
|
|
1371
|
+
},
|
|
1372
|
+
required: ["source_dir", "output_dir", "entity_type", "primary_file"],
|
|
1373
|
+
},
|
|
1374
|
+
},
|
|
1375
|
+
{
|
|
1376
|
+
name: "generate_demo_document",
|
|
1377
|
+
description: `Generate a single RAG-optimized Markdown document from provided JSON data.
|
|
1378
|
+
|
|
1379
|
+
Use this for:
|
|
1380
|
+
- Creating individual entity documents programmatically
|
|
1381
|
+
- Testing document formats before batch consolidation
|
|
1382
|
+
- Custom document generation with specific data
|
|
1383
|
+
|
|
1384
|
+
The output follows Ema's knowledge base best practices with metadata comments, tables, and narrative context.`,
|
|
1385
|
+
inputSchema: {
|
|
1386
|
+
type: "object",
|
|
1387
|
+
properties: {
|
|
1388
|
+
entity_type: {
|
|
1389
|
+
type: "string",
|
|
1390
|
+
enum: ["customer", "product", "employee", "scenario", "reference"],
|
|
1391
|
+
description: "Type of document to generate",
|
|
1392
|
+
},
|
|
1393
|
+
data: {
|
|
1394
|
+
type: "object",
|
|
1395
|
+
description: "The entity data as a JSON object",
|
|
1396
|
+
},
|
|
1397
|
+
related_data: {
|
|
1398
|
+
type: "object",
|
|
1399
|
+
description: "Related data to include (e.g., { orders: [...], tickets: [...] })",
|
|
1400
|
+
},
|
|
1401
|
+
output_path: {
|
|
1402
|
+
type: "string",
|
|
1403
|
+
description: "Optional: Path to save the generated document. If not provided, returns the content.",
|
|
1404
|
+
},
|
|
1405
|
+
tags: {
|
|
1406
|
+
type: "string",
|
|
1407
|
+
description: "Comma-separated tags for metadata",
|
|
1408
|
+
},
|
|
1409
|
+
},
|
|
1410
|
+
required: ["entity_type", "data"],
|
|
1411
|
+
},
|
|
1412
|
+
},
|
|
1413
|
+
{
|
|
1414
|
+
name: "validate_demo_document",
|
|
1415
|
+
description: `Validate a Markdown document for RAG optimization and Ema compatibility.
|
|
1416
|
+
|
|
1417
|
+
Checks for:
|
|
1418
|
+
- Required metadata comments (ema_entity, ema_id, ema_tags)
|
|
1419
|
+
- Table formatting
|
|
1420
|
+
- Narrative context presence
|
|
1421
|
+
- Cross-reference consistency
|
|
1422
|
+
- Filename conventions`,
|
|
1423
|
+
inputSchema: {
|
|
1424
|
+
type: "object",
|
|
1425
|
+
properties: {
|
|
1426
|
+
file_path: {
|
|
1427
|
+
type: "string",
|
|
1428
|
+
description: "Path to the Markdown file to validate",
|
|
1429
|
+
},
|
|
1430
|
+
content: {
|
|
1431
|
+
type: "string",
|
|
1432
|
+
description: "Alternatively, provide the document content directly",
|
|
1433
|
+
},
|
|
1434
|
+
},
|
|
1435
|
+
required: [],
|
|
1436
|
+
},
|
|
1437
|
+
},
|
|
1438
|
+
{
|
|
1439
|
+
name: "get_demo_data_template",
|
|
1440
|
+
description: `Get a template for demo data documents based on entity type.
|
|
1441
|
+
|
|
1442
|
+
Returns:
|
|
1443
|
+
- Source JSON schema (what fields to include)
|
|
1444
|
+
- Output Markdown template
|
|
1445
|
+
- Best practices for the entity type
|
|
1446
|
+
- Example data`,
|
|
1447
|
+
inputSchema: {
|
|
1448
|
+
type: "object",
|
|
1449
|
+
properties: {
|
|
1450
|
+
entity_type: {
|
|
1451
|
+
type: "string",
|
|
1452
|
+
enum: ["customer", "product", "employee", "scenario", "reference"],
|
|
1453
|
+
description: "Type of template to get",
|
|
1454
|
+
},
|
|
1455
|
+
include_example: {
|
|
1456
|
+
type: "boolean",
|
|
1457
|
+
description: "Include example data (default: true)",
|
|
1458
|
+
},
|
|
1459
|
+
},
|
|
1460
|
+
required: ["entity_type"],
|
|
1461
|
+
},
|
|
1462
|
+
},
|
|
1463
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
1464
|
+
// CONSOLIDATED TOOLS (Unix CLI pattern)
|
|
1465
|
+
// These 9 tools follow Unix conventions: single command with flags
|
|
1466
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
1467
|
+
...generateConsolidatedTools(getAvailableEnvironments().map(e => e.name), getDefaultEnvName()),
|
|
1468
|
+
];
|
|
1469
|
+
const toolHandlers = {
|
|
1470
|
+
// Environment Management
|
|
1471
|
+
list_environments: async () => {
|
|
1472
|
+
const envs = getAvailableEnvironments();
|
|
1473
|
+
const defaultEnv = getDefaultEnvName();
|
|
1474
|
+
return {
|
|
1475
|
+
default_environment: defaultEnv,
|
|
1476
|
+
environments: envs.map((e) => ({
|
|
1477
|
+
name: e.name,
|
|
1478
|
+
url: e.baseUrl,
|
|
1479
|
+
is_default: e.name === defaultEnv,
|
|
1480
|
+
})),
|
|
1481
|
+
};
|
|
1482
|
+
},
|
|
1483
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1484
|
+
// AI Employee Handlers (Consolidated)
|
|
1485
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1486
|
+
get_persona: async (args) => {
|
|
1487
|
+
const client = createClient(args.env);
|
|
1488
|
+
const identifier = String(args.identifier);
|
|
1489
|
+
const includeWorkflow = args.include_workflow === true;
|
|
1490
|
+
const includeFingerprint = args.include_fingerprint === true;
|
|
1491
|
+
// Auto-detect: UUIDs are 36 chars with dashes, otherwise it's a name
|
|
1492
|
+
const isUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(identifier);
|
|
1493
|
+
let persona;
|
|
1494
|
+
if (isUUID) {
|
|
1495
|
+
// Fetch by ID - use full fetch if workflow needed, otherwise list
|
|
1496
|
+
if (includeWorkflow || includeFingerprint) {
|
|
1497
|
+
persona = (await client.getPersonaById(identifier)) ?? undefined;
|
|
1498
|
+
}
|
|
1499
|
+
else {
|
|
1500
|
+
const personas = await client.getPersonasForTenant();
|
|
1501
|
+
persona = personas.find((p) => p.id === identifier);
|
|
1502
|
+
}
|
|
1503
|
+
}
|
|
1504
|
+
else {
|
|
1505
|
+
// Fetch by name - always need to list first to find ID
|
|
1506
|
+
const personas = await client.getPersonasForTenant();
|
|
1507
|
+
persona = personas.find((p) => p.name === identifier);
|
|
1508
|
+
// If found and need workflow, fetch full details
|
|
1509
|
+
if (persona && (includeWorkflow || includeFingerprint)) {
|
|
1510
|
+
persona = (await client.getPersonaById(persona.id)) ?? undefined;
|
|
1511
|
+
}
|
|
1512
|
+
}
|
|
1513
|
+
if (!persona) {
|
|
1514
|
+
throw new Error(`AI Employee not found: ${identifier} (searched by ${isUUID ? "ID" : "name"})`);
|
|
1515
|
+
}
|
|
1516
|
+
// Build response
|
|
1517
|
+
const result = {
|
|
1518
|
+
environment: client["env"].name,
|
|
1519
|
+
ai_employee: persona,
|
|
1520
|
+
};
|
|
1521
|
+
// Add fingerprint if requested
|
|
1522
|
+
if (includeFingerprint) {
|
|
1523
|
+
result.fingerprint = fingerprintPersona(persona);
|
|
1524
|
+
}
|
|
1525
|
+
return result;
|
|
1526
|
+
},
|
|
1527
|
+
find_personas: async (args) => {
|
|
1528
|
+
const client = createClient(args.env);
|
|
1529
|
+
let personas = await client.getPersonasForTenant();
|
|
1530
|
+
// Apply filters
|
|
1531
|
+
if (args.query) {
|
|
1532
|
+
const q = String(args.query).toLowerCase();
|
|
1533
|
+
personas = personas.filter((p) => p.name?.toLowerCase().includes(q));
|
|
1534
|
+
}
|
|
1535
|
+
if (args.status) {
|
|
1536
|
+
const f = String(args.status).toLowerCase();
|
|
1537
|
+
personas = personas.filter((p) => p.status?.toLowerCase() === f);
|
|
1538
|
+
}
|
|
1539
|
+
if (args.trigger_type) {
|
|
1540
|
+
const f = String(args.trigger_type).toLowerCase();
|
|
1541
|
+
personas = personas.filter((p) => p.trigger_type?.toLowerCase() === f);
|
|
1542
|
+
}
|
|
1543
|
+
if (args.access_level) {
|
|
1544
|
+
const f = String(args.access_level).toLowerCase();
|
|
1545
|
+
personas = personas.filter((p) => p.access_level?.toLowerCase() === f);
|
|
1546
|
+
}
|
|
1547
|
+
if (typeof args.has_workflow === "boolean") {
|
|
1548
|
+
personas = personas.filter((p) => args.has_workflow ? !!p.workflow_id : !p.workflow_id);
|
|
1549
|
+
}
|
|
1550
|
+
if (typeof args.embedding_enabled === "boolean") {
|
|
1551
|
+
personas = personas.filter((p) => p.embedding_enabled === args.embedding_enabled);
|
|
1552
|
+
}
|
|
1553
|
+
const limit = typeof args.limit === "number" ? args.limit : 50;
|
|
1554
|
+
personas = personas.slice(0, limit);
|
|
1555
|
+
return {
|
|
1556
|
+
environment: client["env"].name,
|
|
1557
|
+
count: personas.length,
|
|
1558
|
+
ai_employees: personas.map((p) => ({
|
|
1559
|
+
id: p.id,
|
|
1560
|
+
name: p.name,
|
|
1561
|
+
description: p.description,
|
|
1562
|
+
status: p.status,
|
|
1563
|
+
template_id: p.template_id ?? p.templateId,
|
|
1564
|
+
workflow_id: p.workflow_id,
|
|
1565
|
+
trigger_type: p.trigger_type,
|
|
1566
|
+
access_level: p.access_level,
|
|
1567
|
+
embedding_enabled: p.embedding_enabled,
|
|
1568
|
+
})),
|
|
1569
|
+
};
|
|
1570
|
+
},
|
|
1571
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1572
|
+
// AI Employee CRUD Handlers
|
|
1573
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
1574
|
+
create_ai_employee: async (args) => {
|
|
1575
|
+
const client = createClient(args.env);
|
|
1576
|
+
// Default template IDs for common persona types
|
|
1577
|
+
const DEFAULT_TEMPLATES = {
|
|
1578
|
+
voice: "00000000-0000-0000-0000-00000000001e", // Voice AI template
|
|
1579
|
+
chat: "00000000-0000-0000-0000-000000000004", // Chat AI template
|
|
1580
|
+
dashboard: "00000000-0000-0000-0000-000000000002", // Dashboard AI template
|
|
1581
|
+
};
|
|
1582
|
+
// Determine template_id: explicit > default based on persona_type
|
|
1583
|
+
let templateId = args.template_id ? String(args.template_id) : undefined;
|
|
1584
|
+
const sourcePersonaId = args.source_persona_id ? String(args.source_persona_id) : undefined;
|
|
1585
|
+
// If no template_id or source_persona_id, use default template based on persona_type
|
|
1586
|
+
if (!templateId && !sourcePersonaId) {
|
|
1587
|
+
const personaType = args.persona_type ? String(args.persona_type).toLowerCase() : null;
|
|
1588
|
+
if (personaType && DEFAULT_TEMPLATES[personaType]) {
|
|
1589
|
+
templateId = DEFAULT_TEMPLATES[personaType];
|
|
1590
|
+
}
|
|
1591
|
+
else {
|
|
1592
|
+
throw new Error("Must provide template_id, source_persona_id, or persona_type ('voice', 'chat', 'dashboard')");
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
const req = {
|
|
1596
|
+
name: String(args.name),
|
|
1597
|
+
description: args.description ? String(args.description) : undefined,
|
|
1598
|
+
template_id: templateId,
|
|
1599
|
+
source_persona_id: sourcePersonaId,
|
|
1600
|
+
// Note: trigger_type is determined by template, not passed separately
|
|
1601
|
+
};
|
|
1602
|
+
const result = await client.createAiEmployee(req);
|
|
1603
|
+
return {
|
|
1604
|
+
environment: client["env"].name,
|
|
1605
|
+
success: true,
|
|
1606
|
+
persona_id: result.persona_id ?? result.id,
|
|
1607
|
+
status: result.status ?? "created",
|
|
1608
|
+
template_used: templateId,
|
|
1609
|
+
};
|
|
1610
|
+
},
|
|
1611
|
+
update_ai_employee: async (args) => {
|
|
1612
|
+
const client = createClient(args.env);
|
|
1613
|
+
const personaId = String(args.persona_id);
|
|
1614
|
+
// Use getPersonaById for complete data (including full proto_config)
|
|
1615
|
+
const existing = await client.getPersonaById(personaId);
|
|
1616
|
+
if (!existing)
|
|
1617
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
1618
|
+
// Helper to validate widget names
|
|
1619
|
+
const isValidWidget = (w) => {
|
|
1620
|
+
const name = w.name;
|
|
1621
|
+
return typeof name === "string" && name.trim().length > 0;
|
|
1622
|
+
};
|
|
1623
|
+
// Merge proto_config: start with existing, overlay with provided
|
|
1624
|
+
// This ensures we don't lose existing widget configs when updating
|
|
1625
|
+
// Also sanitize existing widgets to remove any with empty names (can happen from UI bugs)
|
|
1626
|
+
let mergedProtoConfig = existing.proto_config ?? {};
|
|
1627
|
+
if (mergedProtoConfig.widgets && Array.isArray(mergedProtoConfig.widgets)) {
|
|
1628
|
+
const sanitizedWidgets = mergedProtoConfig.widgets.filter(isValidWidget);
|
|
1629
|
+
mergedProtoConfig = { ...mergedProtoConfig, widgets: sanitizedWidgets };
|
|
1630
|
+
}
|
|
1631
|
+
if (args.proto_config && typeof args.proto_config === "object") {
|
|
1632
|
+
const providedConfig = args.proto_config;
|
|
1633
|
+
// Deep merge widgets if both exist
|
|
1634
|
+
// Widgets use: name (string like "voiceSettings"), type (number like 38)
|
|
1635
|
+
// Inner configs are stored under a key matching the widget's name (e.g., widget.voiceSettings: {...})
|
|
1636
|
+
const existingWidgetsRaw = mergedProtoConfig.widgets;
|
|
1637
|
+
const newWidgetsRaw = providedConfig.widgets;
|
|
1638
|
+
if (Array.isArray(existingWidgetsRaw) || Array.isArray(newWidgetsRaw)) {
|
|
1639
|
+
// Filter using isValidWidget helper (already defined above)
|
|
1640
|
+
const existingWidgets = (existingWidgetsRaw ?? []).filter(isValidWidget);
|
|
1641
|
+
const newWidgets = (newWidgetsRaw ?? []).filter(isValidWidget);
|
|
1642
|
+
// Merge widgets by 'name' field (e.g., "voiceSettings", "conversationSettings")
|
|
1643
|
+
const widgetMap = new Map();
|
|
1644
|
+
for (const w of existingWidgets) {
|
|
1645
|
+
const key = String(w.name);
|
|
1646
|
+
widgetMap.set(key, { ...w });
|
|
1647
|
+
}
|
|
1648
|
+
for (const w of newWidgets) {
|
|
1649
|
+
const key = String(w.name);
|
|
1650
|
+
const existingWidget = widgetMap.get(key);
|
|
1651
|
+
if (existingWidget) {
|
|
1652
|
+
// Deep merge inner config object (named same as widget's 'name')
|
|
1653
|
+
// e.g., for widget with name="voiceSettings", merge widget.voiceSettings
|
|
1654
|
+
const widgetName = String(w.name);
|
|
1655
|
+
const innerConfig = w[widgetName];
|
|
1656
|
+
const existingInner = existingWidget[widgetName];
|
|
1657
|
+
const mergedWidget = {
|
|
1658
|
+
...existingWidget,
|
|
1659
|
+
...w,
|
|
1660
|
+
};
|
|
1661
|
+
// Deep merge the inner config if both exist
|
|
1662
|
+
if (innerConfig && existingInner) {
|
|
1663
|
+
mergedWidget[widgetName] = { ...existingInner, ...innerConfig };
|
|
1664
|
+
}
|
|
1665
|
+
widgetMap.set(key, mergedWidget);
|
|
1666
|
+
}
|
|
1667
|
+
else {
|
|
1668
|
+
widgetMap.set(key, { ...w });
|
|
1669
|
+
}
|
|
1670
|
+
}
|
|
1671
|
+
// Only include widgets array if we have valid widgets
|
|
1672
|
+
const mergedWidgets = Array.from(widgetMap.values());
|
|
1673
|
+
if (mergedWidgets.length > 0) {
|
|
1674
|
+
mergedProtoConfig = { ...mergedProtoConfig, ...providedConfig, widgets: mergedWidgets };
|
|
1675
|
+
}
|
|
1676
|
+
else {
|
|
1677
|
+
// No valid widgets - exclude the widgets field entirely
|
|
1678
|
+
const { widgets: _unused, ...restProvidedConfig } = providedConfig;
|
|
1679
|
+
mergedProtoConfig = { ...mergedProtoConfig, ...restProvidedConfig };
|
|
1680
|
+
}
|
|
1681
|
+
}
|
|
1682
|
+
else {
|
|
1683
|
+
// Simple shallow merge for non-widget fields
|
|
1684
|
+
mergedProtoConfig = { ...mergedProtoConfig, ...providedConfig };
|
|
1685
|
+
}
|
|
1686
|
+
}
|
|
1687
|
+
// Check if user is trying to pass workflow - redirect them to deploy_workflow
|
|
1688
|
+
if (args.workflow) {
|
|
1689
|
+
return {
|
|
1690
|
+
environment: client["env"].name,
|
|
1691
|
+
success: false,
|
|
1692
|
+
error: "workflow_parameter_deprecated",
|
|
1693
|
+
message: "The 'workflow' parameter has been removed from update_ai_employee. Use deploy_workflow instead - it provides validation, auto-fix, and better error handling for workflow changes.",
|
|
1694
|
+
suggestion: {
|
|
1695
|
+
tool: "deploy_workflow",
|
|
1696
|
+
parameters: {
|
|
1697
|
+
persona_id: personaId,
|
|
1698
|
+
workflow_def: "your_workflow_here",
|
|
1699
|
+
validate_first: true,
|
|
1700
|
+
auto_fix: true,
|
|
1701
|
+
},
|
|
1702
|
+
},
|
|
1703
|
+
};
|
|
1704
|
+
}
|
|
1705
|
+
const req = {
|
|
1706
|
+
persona_id: personaId,
|
|
1707
|
+
name: args.name ? String(args.name) : undefined,
|
|
1708
|
+
description: args.description ? String(args.description) : undefined,
|
|
1709
|
+
proto_config: mergedProtoConfig,
|
|
1710
|
+
embedding_enabled: typeof args.embedding_enabled === "boolean" ? args.embedding_enabled : undefined,
|
|
1711
|
+
enabled_by_user: typeof args.enabled_by_user === "boolean" ? args.enabled_by_user : undefined,
|
|
1712
|
+
};
|
|
1713
|
+
await client.updateAiEmployee(req);
|
|
1714
|
+
return {
|
|
1715
|
+
environment: client["env"].name,
|
|
1716
|
+
success: true,
|
|
1717
|
+
persona_id: personaId,
|
|
1718
|
+
persona_name: existing.name,
|
|
1719
|
+
updated_fields: {
|
|
1720
|
+
name: !!args.name,
|
|
1721
|
+
description: !!args.description,
|
|
1722
|
+
proto_config: !!args.proto_config,
|
|
1723
|
+
embedding_enabled: typeof args.embedding_enabled === "boolean",
|
|
1724
|
+
enabled_by_user: typeof args.enabled_by_user === "boolean",
|
|
1725
|
+
},
|
|
1726
|
+
note: "For workflow changes, use deploy_workflow which provides validation and auto-fix.",
|
|
1727
|
+
};
|
|
1728
|
+
},
|
|
1729
|
+
deploy_workflow: async (args) => {
|
|
1730
|
+
const client = createClient(args.env);
|
|
1731
|
+
const personaId = String(args.persona_id);
|
|
1732
|
+
const validateFirst = args.validate_first !== false; // default true
|
|
1733
|
+
const autoFix = args.auto_fix === true; // default false
|
|
1734
|
+
// Get existing persona with full details
|
|
1735
|
+
const persona = await client.getPersonaById(personaId);
|
|
1736
|
+
if (!persona)
|
|
1737
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
1738
|
+
let workflowDef = args.workflow_def;
|
|
1739
|
+
const protoConfig = args.proto_config;
|
|
1740
|
+
if (!workflowDef && !protoConfig) {
|
|
1741
|
+
throw new Error("At least one of workflow_def or proto_config must be provided");
|
|
1742
|
+
}
|
|
1743
|
+
// Sanitize workflow_def to prevent server-side crashes
|
|
1744
|
+
if (workflowDef) {
|
|
1745
|
+
// Fix enumTypes - remove entries with empty/missing names (causes server panic)
|
|
1746
|
+
// EnumType structure in proto: { name: { name: { name: "string", namespaces: [] } }, options: [...] }
|
|
1747
|
+
// The backend calls FlattenNamespacedName(enum.Name.Name) which panics if Name.Name is nil
|
|
1748
|
+
const enumTypes = workflowDef.enumTypes;
|
|
1749
|
+
if (Array.isArray(enumTypes)) {
|
|
1750
|
+
const validEnumTypes = enumTypes.filter(e => {
|
|
1751
|
+
// Navigate the nested structure: e.name.name.name
|
|
1752
|
+
const outerName = e.name;
|
|
1753
|
+
const innerName = outerName?.name;
|
|
1754
|
+
const actualName = innerName?.name;
|
|
1755
|
+
// Must have the full structure with a non-empty string name
|
|
1756
|
+
return typeof actualName === "string" && actualName.trim().length > 0;
|
|
1757
|
+
});
|
|
1758
|
+
if (validEnumTypes.length > 0) {
|
|
1759
|
+
workflowDef.enumTypes = validEnumTypes;
|
|
1760
|
+
}
|
|
1761
|
+
else {
|
|
1762
|
+
// Remove empty enumTypes entirely
|
|
1763
|
+
delete workflowDef.enumTypes;
|
|
1764
|
+
}
|
|
1765
|
+
}
|
|
1766
|
+
// Ensure all actions have a 'name' field (node identifier)
|
|
1767
|
+
const actions = workflowDef.actions;
|
|
1768
|
+
if (Array.isArray(actions)) {
|
|
1769
|
+
for (const action of actions) {
|
|
1770
|
+
// If action has 'actionName' but not 'name', fix it
|
|
1771
|
+
if (!action.name && action.actionName) {
|
|
1772
|
+
action.name = action.actionName;
|
|
1773
|
+
}
|
|
1774
|
+
// Ensure name is a non-empty string
|
|
1775
|
+
if (!action.name || (typeof action.name === "string" && action.name.trim().length === 0)) {
|
|
1776
|
+
// Try to derive from action type
|
|
1777
|
+
const actionType = action.action;
|
|
1778
|
+
if (actionType?.name?.name) {
|
|
1779
|
+
action.name = `${actionType.name.name}_${actions.indexOf(action)}`;
|
|
1780
|
+
}
|
|
1781
|
+
}
|
|
1782
|
+
// CRITICAL: Normalize action structure to include required empty fields
|
|
1783
|
+
// The backend expects these fields to exist (even if empty) or it may 500
|
|
1784
|
+
if (action.typeArguments === undefined) {
|
|
1785
|
+
action.typeArguments = {};
|
|
1786
|
+
}
|
|
1787
|
+
if (action.tools === undefined) {
|
|
1788
|
+
action.tools = [];
|
|
1789
|
+
}
|
|
1790
|
+
if (action.disableHumanInteraction === undefined) {
|
|
1791
|
+
action.disableHumanInteraction = false;
|
|
1792
|
+
}
|
|
1793
|
+
// Ensure displaySettings exists and has required structure
|
|
1794
|
+
if (!action.displaySettings) {
|
|
1795
|
+
action.displaySettings = {
|
|
1796
|
+
displayName: String(action.name || ""),
|
|
1797
|
+
coordinates: { x: 0, y: 0 },
|
|
1798
|
+
description: "",
|
|
1799
|
+
showConfig: 0,
|
|
1800
|
+
};
|
|
1801
|
+
}
|
|
1802
|
+
else {
|
|
1803
|
+
const ds = action.displaySettings;
|
|
1804
|
+
if (ds.description === undefined)
|
|
1805
|
+
ds.description = "";
|
|
1806
|
+
if (ds.showConfig === undefined)
|
|
1807
|
+
ds.showConfig = 0;
|
|
1808
|
+
}
|
|
1809
|
+
// Ensure inputs exists
|
|
1810
|
+
if (action.inputs === undefined) {
|
|
1811
|
+
action.inputs = {};
|
|
1812
|
+
}
|
|
1813
|
+
// Normalize runIf operator enum: backend proto expects numeric values (e.g. 1),
|
|
1814
|
+
// but some JSON payloads use string enums (e.g. "OPERATOR_EQ") which can 500.
|
|
1815
|
+
const runIf = action.runIf;
|
|
1816
|
+
if (runIf && typeof runIf === "object") {
|
|
1817
|
+
const op = runIf.operator;
|
|
1818
|
+
if (typeof op === "string") {
|
|
1819
|
+
const opMap = {
|
|
1820
|
+
OPERATOR_EQ: 1,
|
|
1821
|
+
OPERATOR_NEQ: 2,
|
|
1822
|
+
OPERATOR_GT: 3,
|
|
1823
|
+
OPERATOR_GTE: 4,
|
|
1824
|
+
OPERATOR_LT: 5,
|
|
1825
|
+
OPERATOR_LTE: 6,
|
|
1826
|
+
OPERATOR_IN: 7,
|
|
1827
|
+
OPERATOR_NOT_IN: 8,
|
|
1828
|
+
};
|
|
1829
|
+
if (opMap[op] !== undefined)
|
|
1830
|
+
runIf.operator = opMap[op];
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1833
|
+
}
|
|
1834
|
+
}
|
|
1835
|
+
}
|
|
1836
|
+
// Get existing workflow info
|
|
1837
|
+
const existingWorkflow = persona.workflow_def;
|
|
1838
|
+
const existingWorkflowId = persona.workflow_id;
|
|
1839
|
+
// Copy missing top-level workflow fields from existing workflow.
|
|
1840
|
+
// Some backends are strict about presence of these keys.
|
|
1841
|
+
if (workflowDef && existingWorkflow) {
|
|
1842
|
+
const copyIfMissing = (k) => {
|
|
1843
|
+
if (workflowDef[k] === undefined && existingWorkflow[k] !== undefined) {
|
|
1844
|
+
workflowDef[k] = JSON.parse(JSON.stringify(existingWorkflow[k]));
|
|
1845
|
+
}
|
|
1846
|
+
};
|
|
1847
|
+
copyIfMissing("workflowInputs");
|
|
1848
|
+
copyIfMissing("namedResults");
|
|
1849
|
+
copyIfMissing("displayName");
|
|
1850
|
+
copyIfMissing("description");
|
|
1851
|
+
copyIfMissing("namedResultsEditable");
|
|
1852
|
+
copyIfMissing("namedResultsEnabled");
|
|
1853
|
+
copyIfMissing("edges");
|
|
1854
|
+
}
|
|
1855
|
+
// Determine deployment strategy
|
|
1856
|
+
const hasExistingWorkflow = !!existingWorkflowId;
|
|
1857
|
+
let deploymentMethod = hasExistingWorkflow ? "direct_api" : "autobuilder";
|
|
1858
|
+
// Transform workflow to match target persona (same approach sync uses)
|
|
1859
|
+
// This ensures workflowName.namespaces contains the correct persona ID
|
|
1860
|
+
if (workflowDef && hasExistingWorkflow && existingWorkflow) {
|
|
1861
|
+
// Extract source persona ID from incoming workflow (if any)
|
|
1862
|
+
const incomingWfName = workflowDef.workflowName;
|
|
1863
|
+
const sourcePersonaId = incomingWfName?.name?.namespaces?.[2]; // Usually at index 2: ["ema", "templates", "<persona_id>"]
|
|
1864
|
+
if (sourcePersonaId && sourcePersonaId !== personaId) {
|
|
1865
|
+
// Workflow was generated for a different persona - transform it
|
|
1866
|
+
workflowDef = transformWorkflowForTarget(workflowDef, sourcePersonaId, personaId);
|
|
1867
|
+
}
|
|
1868
|
+
// CRITICAL: Copy the exact workflowName structure from existing workflow
|
|
1869
|
+
// The API validates that the workflow name matches exactly
|
|
1870
|
+
const existingWfName = existingWorkflow.workflowName;
|
|
1871
|
+
if (existingWfName) {
|
|
1872
|
+
workflowDef.workflowName = JSON.parse(JSON.stringify(existingWfName));
|
|
1873
|
+
}
|
|
1874
|
+
}
|
|
1875
|
+
else if (workflowDef && hasExistingWorkflow && !existingWorkflow) {
|
|
1876
|
+
// Has workflow_id but no workflow_def (edge case) - construct workflowName from workflow_id
|
|
1877
|
+
// workflow_id format: "ema.templates.<persona_id>.default" or similar
|
|
1878
|
+
const parts = existingWorkflowId.split(".");
|
|
1879
|
+
if (parts.length >= 3) {
|
|
1880
|
+
workflowDef.workflowName = {
|
|
1881
|
+
name: {
|
|
1882
|
+
namespaces: parts.slice(0, -1),
|
|
1883
|
+
name: parts[parts.length - 1],
|
|
1884
|
+
},
|
|
1885
|
+
};
|
|
1886
|
+
}
|
|
1887
|
+
}
|
|
1888
|
+
// Track fixes applied
|
|
1889
|
+
const appliedFixes = [];
|
|
1890
|
+
let fixAttempted = false;
|
|
1891
|
+
// Validate workflow if requested and provided
|
|
1892
|
+
let validationResults = { valid: true, issues: [] };
|
|
1893
|
+
if (validateFirst && workflowDef) {
|
|
1894
|
+
const analysis = analyzeWorkflow(workflowDef, {
|
|
1895
|
+
persona_id: personaId,
|
|
1896
|
+
persona_name: persona.name ?? "Unknown"
|
|
1897
|
+
});
|
|
1898
|
+
if (!analysis.validation_passed) {
|
|
1899
|
+
validationResults.valid = false;
|
|
1900
|
+
validationResults.issues = analysis.issues ?? [];
|
|
1901
|
+
// Auto-fix if enabled
|
|
1902
|
+
if (autoFix && validationResults.issues.length > 0) {
|
|
1903
|
+
fixAttempted = true;
|
|
1904
|
+
const fixResult = applyWorkflowFixes(workflowDef, validationResults.issues, persona);
|
|
1905
|
+
workflowDef = fixResult.fixedWorkflow;
|
|
1906
|
+
appliedFixes.push(...fixResult.appliedFixes);
|
|
1907
|
+
// Re-validate after fixes
|
|
1908
|
+
const reAnalysis = analyzeWorkflow(workflowDef, {
|
|
1909
|
+
persona_id: personaId,
|
|
1910
|
+
persona_name: persona.name ?? "Unknown"
|
|
1911
|
+
});
|
|
1912
|
+
validationResults.valid = reAnalysis.validation_passed;
|
|
1913
|
+
validationResults.issues = reAnalysis.issues ?? [];
|
|
1914
|
+
}
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
// If validation failed and not forcing, return the issues
|
|
1918
|
+
if (!validationResults.valid) {
|
|
1919
|
+
return {
|
|
1920
|
+
environment: client["env"].name,
|
|
1921
|
+
success: false,
|
|
1922
|
+
persona_id: personaId,
|
|
1923
|
+
persona_name: persona.name,
|
|
1924
|
+
validation_failed: true,
|
|
1925
|
+
issues: validationResults.issues,
|
|
1926
|
+
auto_fix_attempted: fixAttempted,
|
|
1927
|
+
fixes_applied: appliedFixes.filter(f => f.applied),
|
|
1928
|
+
fixes_failed: appliedFixes.filter(f => !f.applied),
|
|
1929
|
+
remaining_issues: validationResults.issues.length,
|
|
1930
|
+
hint: autoFix
|
|
1931
|
+
? "Some issues could not be auto-fixed. Review the remaining issues and fix manually, or set validate_first=false to skip validation (not recommended)"
|
|
1932
|
+
: "Fix the issues above, enable auto_fix=true for automatic fixes, or set validate_first=false to skip validation (not recommended)",
|
|
1933
|
+
};
|
|
1934
|
+
}
|
|
1935
|
+
// Helper to validate widget names
|
|
1936
|
+
const isValidWidget = (w) => {
|
|
1937
|
+
const name = w.name;
|
|
1938
|
+
return typeof name === "string" && name.trim().length > 0;
|
|
1939
|
+
};
|
|
1940
|
+
// Merge proto_config with existing, preserving widgets
|
|
1941
|
+
// Widgets use: name (string like "voiceSettings"), type (number like 38)
|
|
1942
|
+
// Inner configs are stored under a key matching the widget's name
|
|
1943
|
+
// Also sanitize existing widgets to remove any with empty names (can happen from UI bugs)
|
|
1944
|
+
let mergedProtoConfig = persona.proto_config ?? {};
|
|
1945
|
+
if (mergedProtoConfig.widgets && Array.isArray(mergedProtoConfig.widgets)) {
|
|
1946
|
+
const sanitizedWidgets = mergedProtoConfig.widgets.filter(isValidWidget);
|
|
1947
|
+
mergedProtoConfig = { ...mergedProtoConfig, widgets: sanitizedWidgets };
|
|
1948
|
+
}
|
|
1949
|
+
if (protoConfig) {
|
|
1950
|
+
// Deep merge widgets if both exist
|
|
1951
|
+
const existingWidgetsRaw = mergedProtoConfig.widgets;
|
|
1952
|
+
const newWidgetsRaw = protoConfig.widgets;
|
|
1953
|
+
if (Array.isArray(existingWidgetsRaw) || Array.isArray(newWidgetsRaw)) {
|
|
1954
|
+
// Filter using isValidWidget helper
|
|
1955
|
+
const existingWidgets = (existingWidgetsRaw ?? []).filter(isValidWidget);
|
|
1956
|
+
const newWidgets = (newWidgetsRaw ?? []).filter(isValidWidget);
|
|
1957
|
+
// Merge widgets by 'name' field (e.g., "voiceSettings", "conversationSettings")
|
|
1958
|
+
const widgetMap = new Map();
|
|
1959
|
+
for (const w of existingWidgets) {
|
|
1960
|
+
const key = String(w.name);
|
|
1961
|
+
widgetMap.set(key, { ...w });
|
|
1962
|
+
}
|
|
1963
|
+
for (const w of newWidgets) {
|
|
1964
|
+
const key = String(w.name);
|
|
1965
|
+
const existingWidget = widgetMap.get(key);
|
|
1966
|
+
if (existingWidget) {
|
|
1967
|
+
// Deep merge inner config object (named same as widget's 'name')
|
|
1968
|
+
const widgetName = String(w.name);
|
|
1969
|
+
const innerConfig = w[widgetName];
|
|
1970
|
+
const existingInner = existingWidget[widgetName];
|
|
1971
|
+
const mergedWidget = {
|
|
1972
|
+
...existingWidget,
|
|
1973
|
+
...w,
|
|
1974
|
+
};
|
|
1975
|
+
// Deep merge the inner config if both exist
|
|
1976
|
+
if (innerConfig && existingInner) {
|
|
1977
|
+
mergedWidget[widgetName] = { ...existingInner, ...innerConfig };
|
|
1978
|
+
}
|
|
1979
|
+
widgetMap.set(key, mergedWidget);
|
|
1980
|
+
}
|
|
1981
|
+
else {
|
|
1982
|
+
widgetMap.set(key, { ...w });
|
|
1983
|
+
}
|
|
1984
|
+
}
|
|
1985
|
+
// Only include widgets array if we have valid widgets
|
|
1986
|
+
const mergedWidgets = Array.from(widgetMap.values());
|
|
1987
|
+
if (mergedWidgets.length > 0) {
|
|
1988
|
+
mergedProtoConfig = { ...mergedProtoConfig, ...protoConfig, widgets: mergedWidgets };
|
|
1989
|
+
}
|
|
1990
|
+
else {
|
|
1991
|
+
// No valid widgets - exclude the widgets field entirely
|
|
1992
|
+
const { widgets: _unused, ...restProtoConfig } = protoConfig;
|
|
1993
|
+
mergedProtoConfig = { ...mergedProtoConfig, ...restProtoConfig };
|
|
1994
|
+
}
|
|
1995
|
+
}
|
|
1996
|
+
else {
|
|
1997
|
+
mergedProtoConfig = { ...mergedProtoConfig, ...protoConfig };
|
|
1998
|
+
}
|
|
1999
|
+
}
|
|
2000
|
+
// Build update request
|
|
2001
|
+
const req = {
|
|
2002
|
+
persona_id: personaId,
|
|
2003
|
+
proto_config: mergedProtoConfig,
|
|
2004
|
+
workflow: workflowDef,
|
|
2005
|
+
};
|
|
2006
|
+
// Deployment attempt with automatic fallback
|
|
2007
|
+
let deployedVia = "direct_api";
|
|
2008
|
+
let autobuilderResult;
|
|
2009
|
+
if (deploymentMethod === "direct_api") {
|
|
2010
|
+
try {
|
|
2011
|
+
await client.updateAiEmployee(req);
|
|
2012
|
+
}
|
|
2013
|
+
catch (err) {
|
|
2014
|
+
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
2015
|
+
// If direct API fails due to "no existing workflow", try Auto Builder
|
|
2016
|
+
if (errorMessage.includes("Cannot set persona workflow without existing workflow") && workflowDef) {
|
|
2017
|
+
deploymentMethod = "autobuilder";
|
|
2018
|
+
}
|
|
2019
|
+
else if (errorMessage.includes("Workflow name does not match")) {
|
|
2020
|
+
// This shouldn't happen with our name sync, but handle gracefully
|
|
2021
|
+
throw new Error(`Workflow deployment failed: The workflow structure may be incompatible. ` +
|
|
2022
|
+
`Please use the Ema UI Auto Builder to make changes to this persona's workflow. ` +
|
|
2023
|
+
`(Technical: ${errorMessage})`);
|
|
2024
|
+
}
|
|
2025
|
+
else {
|
|
2026
|
+
// Other API errors - surface clearly
|
|
2027
|
+
throw new Error(`Workflow deployment failed: ${errorMessage}`);
|
|
2028
|
+
}
|
|
2029
|
+
}
|
|
2030
|
+
}
|
|
2031
|
+
// Auto Builder fallback for personas without existing workflows
|
|
2032
|
+
if (deploymentMethod === "autobuilder" && workflowDef) {
|
|
2033
|
+
try {
|
|
2034
|
+
// Generate a prompt that asks the Auto Builder to deploy this specific workflow
|
|
2035
|
+
const workflowSummary = summarizeWorkflow(workflowDef);
|
|
2036
|
+
const prompt = `Deploy this workflow to the persona. The workflow has the following structure:\n\n${workflowSummary}\n\nPlease create and save this workflow.`;
|
|
2037
|
+
// Use the iterate workflow method which handles Auto Builder discovery
|
|
2038
|
+
autobuilderResult = await client.iterateWorkflow(personaId, prompt, { newConversation: true });
|
|
2039
|
+
deployedVia = "autobuilder";
|
|
2040
|
+
// Also update proto_config if provided (Auto Builder may not handle this)
|
|
2041
|
+
if (protoConfig) {
|
|
2042
|
+
await client.updateAiEmployee({
|
|
2043
|
+
persona_id: personaId,
|
|
2044
|
+
proto_config: mergedProtoConfig,
|
|
2045
|
+
});
|
|
2046
|
+
}
|
|
2047
|
+
}
|
|
2048
|
+
catch (autoErr) {
|
|
2049
|
+
const autoErrorMessage = autoErr instanceof Error ? autoErr.message : String(autoErr);
|
|
2050
|
+
// If Auto Builder also fails, provide clear guidance
|
|
2051
|
+
if (autoErrorMessage.includes("No Autobuilder persona found")) {
|
|
2052
|
+
throw new Error(`Cannot deploy workflow: This persona has no existing workflow, and the Ema Auto Builder is not available in this tenant. ` +
|
|
2053
|
+
`Please contact your Ema administrator to enable the Auto Builder, or create a new persona from a workflow template.`);
|
|
2054
|
+
}
|
|
2055
|
+
throw new Error(`Workflow deployment via Auto Builder failed: ${autoErrorMessage}`);
|
|
2056
|
+
}
|
|
2057
|
+
}
|
|
2058
|
+
return {
|
|
2059
|
+
environment: client["env"].name,
|
|
2060
|
+
success: true,
|
|
2061
|
+
persona_id: personaId,
|
|
2062
|
+
persona_name: persona.name,
|
|
2063
|
+
deployed: {
|
|
2064
|
+
workflow_def: !!workflowDef,
|
|
2065
|
+
proto_config: !!protoConfig,
|
|
2066
|
+
},
|
|
2067
|
+
deployment_method: deployedVia,
|
|
2068
|
+
validation_passed: validationResults.valid,
|
|
2069
|
+
auto_fix_applied: fixAttempted && appliedFixes.some(f => f.applied),
|
|
2070
|
+
fixes_applied: appliedFixes.filter(f => f.applied),
|
|
2071
|
+
autobuilder_response: autobuilderResult?.response,
|
|
2072
|
+
note: deployedVia === "autobuilder"
|
|
2073
|
+
? "Workflow deployed via Ema Auto Builder (persona had no existing workflow). Test in the Ema simulator to verify behavior."
|
|
2074
|
+
: fixAttempted && appliedFixes.some(f => f.applied)
|
|
2075
|
+
? `Workflow deployed successfully with ${appliedFixes.filter(f => f.applied).length} auto-fix(es) applied. Test in the Ema simulator to verify behavior.`
|
|
2076
|
+
: workflowDef
|
|
2077
|
+
? "Workflow deployed successfully. Test in the Ema simulator to verify behavior."
|
|
2078
|
+
: "Proto config updated successfully.",
|
|
2079
|
+
};
|
|
2080
|
+
},
|
|
2081
|
+
optimize_workflow: async (args) => {
|
|
2082
|
+
const client = createClient(args.env);
|
|
2083
|
+
const identifier = args.identifier ? String(args.identifier) : undefined;
|
|
2084
|
+
const targetPersonaId = args.persona_id ? String(args.persona_id) : undefined;
|
|
2085
|
+
const prompt = args.prompt ? String(args.prompt) : undefined;
|
|
2086
|
+
const personaType = args.type ?? "chat";
|
|
2087
|
+
const preview = args.preview === true;
|
|
2088
|
+
// Validate inputs
|
|
2089
|
+
if (!identifier && !targetPersonaId && !prompt) {
|
|
2090
|
+
throw new Error('Provide either: identifier (to fix existing), or persona_id + prompt (to enhance existing)');
|
|
2091
|
+
}
|
|
2092
|
+
let persona = null;
|
|
2093
|
+
let workflowDef;
|
|
2094
|
+
let personaId;
|
|
2095
|
+
let enhancementPrompt = prompt; // Store prompt for enhancement logging
|
|
2096
|
+
// === ALWAYS START BY FETCHING EXISTING WORKFLOW ===
|
|
2097
|
+
// Brownfield: fix existing + apply enhancements from prompt
|
|
2098
|
+
// The prompt describes what to ADD or CHANGE, not a complete replacement
|
|
2099
|
+
const lookupId = identifier ?? targetPersonaId;
|
|
2100
|
+
const isUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(lookupId);
|
|
2101
|
+
if (isUUID) {
|
|
2102
|
+
persona = await client.getPersonaById(lookupId);
|
|
2103
|
+
}
|
|
2104
|
+
else {
|
|
2105
|
+
// Search by name
|
|
2106
|
+
const personas = await client.getPersonasForTenant();
|
|
2107
|
+
const match = personas.find((p) => p.name?.toLowerCase() === lookupId.toLowerCase() ||
|
|
2108
|
+
p.name?.toLowerCase().includes(lookupId.toLowerCase()));
|
|
2109
|
+
if (match) {
|
|
2110
|
+
persona = await client.getPersonaById(match.id);
|
|
2111
|
+
}
|
|
2112
|
+
}
|
|
2113
|
+
if (!persona) {
|
|
2114
|
+
throw new Error(`AI Employee "${lookupId}" not found. Check the name or ID.`);
|
|
2115
|
+
}
|
|
2116
|
+
personaId = persona.id;
|
|
2117
|
+
workflowDef = persona.workflow_def;
|
|
2118
|
+
if (!workflowDef) {
|
|
2119
|
+
return {
|
|
2120
|
+
success: false,
|
|
2121
|
+
persona: persona.name,
|
|
2122
|
+
status: "⚠️ No Workflow",
|
|
2123
|
+
message: "This AI Employee has no workflow. Use prompt parameter to generate one: optimize_workflow(persona_id=\"...\", prompt=\"description of what it should do\")",
|
|
2124
|
+
};
|
|
2125
|
+
}
|
|
2126
|
+
// Step 2: Analyze workflow for issues
|
|
2127
|
+
const analysis = analyzeWorkflow(workflowDef, {
|
|
2128
|
+
persona_id: personaId,
|
|
2129
|
+
persona_name: persona.name ?? "Unknown",
|
|
2130
|
+
});
|
|
2131
|
+
const issues = analysis.issues ?? [];
|
|
2132
|
+
const criticalIssues = issues.filter(i => i.severity === "critical");
|
|
2133
|
+
// If no issues, workflow is healthy
|
|
2134
|
+
if (issues.length === 0) {
|
|
2135
|
+
const result = {
|
|
2136
|
+
success: true,
|
|
2137
|
+
persona: persona.name,
|
|
2138
|
+
status: "✅ Workflow is healthy",
|
|
2139
|
+
nodes: analysis.summary?.total_nodes ?? 0,
|
|
2140
|
+
message: "No issues found - workflow is already optimized!",
|
|
2141
|
+
};
|
|
2142
|
+
// If enhancement prompt was provided, note that it can't be auto-applied
|
|
2143
|
+
if (enhancementPrompt) {
|
|
2144
|
+
result.enhancement_note = `Enhancement requested: "${enhancementPrompt}". Since there are no issues to fix, use the Ema UI Auto Builder to make this change manually.`;
|
|
2145
|
+
}
|
|
2146
|
+
return result;
|
|
2147
|
+
}
|
|
2148
|
+
// Step 3: Apply fixes
|
|
2149
|
+
let fixedWorkflow = workflowDef;
|
|
2150
|
+
const appliedFixes = [];
|
|
2151
|
+
if (issues.length > 0) {
|
|
2152
|
+
const fixResult = applyWorkflowFixes(workflowDef, issues, persona);
|
|
2153
|
+
fixedWorkflow = fixResult.fixedWorkflow;
|
|
2154
|
+
appliedFixes.push(...fixResult.appliedFixes);
|
|
2155
|
+
}
|
|
2156
|
+
// Re-analyze after fixes
|
|
2157
|
+
const postFixAnalysis = analyzeWorkflow(fixedWorkflow, {
|
|
2158
|
+
persona_id: personaId,
|
|
2159
|
+
persona_name: persona.name ?? "Unknown",
|
|
2160
|
+
});
|
|
2161
|
+
const remainingIssues = postFixAnalysis.issues ?? [];
|
|
2162
|
+
const remainingCritical = remainingIssues.filter(i => i.severity === "critical");
|
|
2163
|
+
// If preview mode, show what would happen without deploying
|
|
2164
|
+
if (preview) {
|
|
2165
|
+
const fixedCount = appliedFixes.filter(f => f.applied).length;
|
|
2166
|
+
const failedCount = appliedFixes.filter(f => !f.applied).length;
|
|
2167
|
+
const previewResult = {
|
|
2168
|
+
success: true,
|
|
2169
|
+
persona: persona.name,
|
|
2170
|
+
status: "📋 Preview - Fixes Available",
|
|
2171
|
+
mode: "optimize",
|
|
2172
|
+
nodes: fixedWorkflow.actions?.length ?? 0,
|
|
2173
|
+
found_issues: issues.length,
|
|
2174
|
+
can_fix: fixedCount,
|
|
2175
|
+
cannot_fix: failedCount,
|
|
2176
|
+
ready_to_deploy: remainingCritical.length === 0,
|
|
2177
|
+
issues: issues.length > 0 ? issues.map(i => `${i.severity === "critical" ? "❌" : "⚠️"} ${i.type}: ${i.reason}`) : ["No issues found"],
|
|
2178
|
+
fixes: appliedFixes.length > 0 ? appliedFixes.map(f => `${f.applied ? "✅" : "❌"} ${f.description}`) : [],
|
|
2179
|
+
next_step: remainingCritical.length === 0
|
|
2180
|
+
? `Run workflow(persona_id="${personaId}", mode="optimize") without preview to deploy fixes.`
|
|
2181
|
+
: `${remainingCritical.length} critical issue(s) need manual fix in Ema UI.`,
|
|
2182
|
+
};
|
|
2183
|
+
if (enhancementPrompt) {
|
|
2184
|
+
previewResult.enhancement_note = `Enhancement "${enhancementPrompt}" noted. Auto-fixes will be applied first; use Ema UI Auto Builder for the enhancement.`;
|
|
2185
|
+
}
|
|
2186
|
+
return previewResult;
|
|
2187
|
+
}
|
|
2188
|
+
// Step 4: Deploy if no critical issues remain
|
|
2189
|
+
if (remainingCritical.length > 0) {
|
|
2190
|
+
return {
|
|
2191
|
+
success: false,
|
|
2192
|
+
persona: persona.name,
|
|
2193
|
+
status: "❌ Cannot auto-fix",
|
|
2194
|
+
fixed: appliedFixes.filter(f => f.applied).length,
|
|
2195
|
+
remaining_critical: remainingCritical.length,
|
|
2196
|
+
manual_fixes_needed: remainingCritical.map(i => ({
|
|
2197
|
+
problem: i.type,
|
|
2198
|
+
details: i.reason,
|
|
2199
|
+
fix: i.recommendation,
|
|
2200
|
+
})),
|
|
2201
|
+
message: `${remainingCritical.length} issue(s) need manual fix in the Ema UI Auto Builder.`,
|
|
2202
|
+
};
|
|
2203
|
+
}
|
|
2204
|
+
// Sanitize workflow before deployment
|
|
2205
|
+
// Fix enumTypes - check nested structure
|
|
2206
|
+
const enumTypes = fixedWorkflow.enumTypes;
|
|
2207
|
+
if (Array.isArray(enumTypes)) {
|
|
2208
|
+
const validEnumTypes = enumTypes.filter(e => {
|
|
2209
|
+
const outerName = e.name;
|
|
2210
|
+
const innerName = outerName?.name;
|
|
2211
|
+
const actualName = innerName?.name;
|
|
2212
|
+
return typeof actualName === "string" && actualName.trim().length > 0;
|
|
2213
|
+
});
|
|
2214
|
+
if (validEnumTypes.length > 0) {
|
|
2215
|
+
fixedWorkflow.enumTypes = validEnumTypes;
|
|
2216
|
+
}
|
|
2217
|
+
else {
|
|
2218
|
+
delete fixedWorkflow.enumTypes;
|
|
2219
|
+
}
|
|
2220
|
+
}
|
|
2221
|
+
// Copy workflowName from existing
|
|
2222
|
+
const existingWfName = workflowDef.workflowName;
|
|
2223
|
+
if (existingWfName) {
|
|
2224
|
+
fixedWorkflow.workflowName = JSON.parse(JSON.stringify(existingWfName));
|
|
2225
|
+
}
|
|
2226
|
+
// Merge proto_config (sanitize widgets)
|
|
2227
|
+
const isValidWidget = (w) => {
|
|
2228
|
+
const name = w.name;
|
|
2229
|
+
return typeof name === "string" && name.trim().length > 0;
|
|
2230
|
+
};
|
|
2231
|
+
let mergedProtoConfig = persona.proto_config ?? {};
|
|
2232
|
+
if (mergedProtoConfig.widgets && Array.isArray(mergedProtoConfig.widgets)) {
|
|
2233
|
+
const sanitizedWidgets = mergedProtoConfig.widgets.filter(isValidWidget);
|
|
2234
|
+
mergedProtoConfig = { ...mergedProtoConfig, widgets: sanitizedWidgets };
|
|
2235
|
+
}
|
|
2236
|
+
// Build and send request
|
|
2237
|
+
const req = {
|
|
2238
|
+
persona_id: personaId,
|
|
2239
|
+
proto_config: mergedProtoConfig,
|
|
2240
|
+
workflow: fixedWorkflow,
|
|
2241
|
+
};
|
|
2242
|
+
const actionsArr = fixedWorkflow.actions ?? [];
|
|
2243
|
+
try {
|
|
2244
|
+
await client.updateAiEmployee(req);
|
|
2245
|
+
const fixedCount = appliedFixes.filter(f => f.applied).length;
|
|
2246
|
+
const deployResult = {
|
|
2247
|
+
success: true,
|
|
2248
|
+
persona: persona.name,
|
|
2249
|
+
status: "✅ Optimized & Deployed",
|
|
2250
|
+
mode: "optimized",
|
|
2251
|
+
issues_found: issues.length,
|
|
2252
|
+
issues_fixed: fixedCount,
|
|
2253
|
+
nodes: postFixAnalysis.summary?.total_nodes ?? actionsArr.length,
|
|
2254
|
+
message: fixedCount > 0
|
|
2255
|
+
? `Fixed ${fixedCount} issue(s) and deployed!`
|
|
2256
|
+
: "Deployed successfully!",
|
|
2257
|
+
next_step: "Test in the Ema simulator to verify behavior.",
|
|
2258
|
+
};
|
|
2259
|
+
if (enhancementPrompt) {
|
|
2260
|
+
deployResult.enhancement_note = `Fixes applied. Enhancement "${enhancementPrompt}" requires manual implementation via Ema UI Auto Builder.`;
|
|
2261
|
+
}
|
|
2262
|
+
return deployResult;
|
|
2263
|
+
}
|
|
2264
|
+
catch (err) {
|
|
2265
|
+
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
2266
|
+
// Extract EmaApiError body if available
|
|
2267
|
+
const apiBody = err?.body;
|
|
2268
|
+
const statusCode = err?.statusCode;
|
|
2269
|
+
return {
|
|
2270
|
+
success: false,
|
|
2271
|
+
persona: persona.name,
|
|
2272
|
+
status: "❌ Deploy Failed",
|
|
2273
|
+
error: errorMessage,
|
|
2274
|
+
status_code: statusCode,
|
|
2275
|
+
api_response: apiBody ? JSON.parse(apiBody) : undefined,
|
|
2276
|
+
fixes_attempted: appliedFixes.filter(f => f.applied).length,
|
|
2277
|
+
suggestion: "Check the api_response field for the actual backend error.",
|
|
2278
|
+
};
|
|
2279
|
+
}
|
|
2280
|
+
},
|
|
2281
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2282
|
+
// Action Handlers (Consolidated)
|
|
2283
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2284
|
+
get_workflow_action: async (args) => {
|
|
2285
|
+
const client = createClient(args.env);
|
|
2286
|
+
const identifier = String(args.identifier);
|
|
2287
|
+
const actions = await client.listActions();
|
|
2288
|
+
// Try ID first, then name
|
|
2289
|
+
const isUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(identifier);
|
|
2290
|
+
let action = isUUID
|
|
2291
|
+
? actions.find((a) => a.id === identifier)
|
|
2292
|
+
: actions.find((a) => a.name?.toLowerCase() === identifier.toLowerCase());
|
|
2293
|
+
if (!action) {
|
|
2294
|
+
throw new Error(`Action not found: ${identifier} (searched by ${isUUID ? "ID" : "name"})`);
|
|
2295
|
+
}
|
|
2296
|
+
return { environment: client["env"].name, action };
|
|
2297
|
+
},
|
|
2298
|
+
find_workflow_actions: async (args) => {
|
|
2299
|
+
const client = createClient(args.env);
|
|
2300
|
+
// Handle list_categories request
|
|
2301
|
+
if (args.list_categories === true) {
|
|
2302
|
+
const actions = await client.listActions();
|
|
2303
|
+
const categoryMap = new Map();
|
|
2304
|
+
for (const a of actions) {
|
|
2305
|
+
const cat = a.category ?? "uncategorized";
|
|
2306
|
+
categoryMap.set(cat, (categoryMap.get(cat) ?? 0) + 1);
|
|
2307
|
+
}
|
|
2308
|
+
return {
|
|
2309
|
+
environment: client["env"].name,
|
|
2310
|
+
categories: Array.from(categoryMap.entries()).map(([name, count]) => ({ name, count })).sort((a, b) => b.count - a.count),
|
|
2311
|
+
};
|
|
2312
|
+
}
|
|
2313
|
+
// Handle persona/workflow scope
|
|
2314
|
+
if (args.persona_id) {
|
|
2315
|
+
const personaId = String(args.persona_id);
|
|
2316
|
+
const personas = await client.getPersonasForTenant();
|
|
2317
|
+
const persona = personas.find((p) => p.id === personaId);
|
|
2318
|
+
if (!persona)
|
|
2319
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
2320
|
+
if (!persona.workflow_id) {
|
|
2321
|
+
return {
|
|
2322
|
+
environment: client["env"].name,
|
|
2323
|
+
persona_id: personaId,
|
|
2324
|
+
persona_name: persona.name,
|
|
2325
|
+
error: "AI Employee has no workflow",
|
|
2326
|
+
actions: [],
|
|
2327
|
+
};
|
|
2328
|
+
}
|
|
2329
|
+
const actions = await client.listActionsFromWorkflow(persona.workflow_id);
|
|
2330
|
+
return {
|
|
2331
|
+
environment: client["env"].name,
|
|
2332
|
+
persona_id: personaId,
|
|
2333
|
+
persona_name: persona.name,
|
|
2334
|
+
workflow_id: persona.workflow_id,
|
|
2335
|
+
count: actions.length,
|
|
2336
|
+
actions: actions.map((a) => ({
|
|
2337
|
+
id: a.id, name: a.name, description: a.description,
|
|
2338
|
+
category: a.category, inputs: a.inputs, outputs: a.outputs,
|
|
2339
|
+
})),
|
|
2340
|
+
};
|
|
2341
|
+
}
|
|
2342
|
+
if (args.workflow_id) {
|
|
2343
|
+
const workflowId = String(args.workflow_id);
|
|
2344
|
+
const actions = await client.listActionsFromWorkflow(workflowId);
|
|
2345
|
+
return {
|
|
2346
|
+
environment: client["env"].name,
|
|
2347
|
+
workflow_id: workflowId,
|
|
2348
|
+
count: actions.length,
|
|
2349
|
+
actions: actions.map((a) => ({
|
|
2350
|
+
id: a.id, name: a.name, description: a.description,
|
|
2351
|
+
category: a.category, inputs: a.inputs, outputs: a.outputs,
|
|
2352
|
+
})),
|
|
2353
|
+
};
|
|
2354
|
+
}
|
|
2355
|
+
// Default: search all actions
|
|
2356
|
+
let actions = await client.listActions();
|
|
2357
|
+
if (args.query) {
|
|
2358
|
+
const q = String(args.query).toLowerCase();
|
|
2359
|
+
actions = actions.filter((a) => a.name?.toLowerCase().includes(q));
|
|
2360
|
+
}
|
|
2361
|
+
if (args.category) {
|
|
2362
|
+
const f = String(args.category).toLowerCase();
|
|
2363
|
+
actions = actions.filter((a) => a.category?.toLowerCase() === f);
|
|
2364
|
+
}
|
|
2365
|
+
if (typeof args.enabled === "boolean") {
|
|
2366
|
+
actions = actions.filter((a) => a.enabled === args.enabled);
|
|
2367
|
+
}
|
|
2368
|
+
const limit = typeof args.limit === "number" ? args.limit : 100;
|
|
2369
|
+
actions = actions.slice(0, limit);
|
|
2370
|
+
return {
|
|
2371
|
+
environment: client["env"].name,
|
|
2372
|
+
count: actions.length,
|
|
2373
|
+
actions: actions.map((a) => ({
|
|
2374
|
+
id: a.id, name: a.name, description: a.description,
|
|
2375
|
+
category: a.category, enabled: a.enabled, tags: a.tags,
|
|
2376
|
+
})),
|
|
2377
|
+
};
|
|
2378
|
+
},
|
|
2379
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2380
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2381
|
+
// Diagnostics & Comparison
|
|
2382
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2383
|
+
compare_ai_employees: async (args) => {
|
|
2384
|
+
const env1 = args.env_1 ?? getDefaultEnvName();
|
|
2385
|
+
const env2 = args.env_2 ?? env1;
|
|
2386
|
+
const id1 = String(args.persona_id_1);
|
|
2387
|
+
const id2 = String(args.persona_id_2);
|
|
2388
|
+
const client1 = createClient(env1);
|
|
2389
|
+
const client2 = env1 === env2 ? client1 : createClient(env2);
|
|
2390
|
+
const [p1, p2] = await Promise.all([
|
|
2391
|
+
client1.getPersonaById(id1),
|
|
2392
|
+
client2.getPersonaById(id2),
|
|
2393
|
+
]);
|
|
2394
|
+
if (!p1)
|
|
2395
|
+
throw new Error(`AI Employee not found: ${id1} in ${env1}`);
|
|
2396
|
+
if (!p2)
|
|
2397
|
+
throw new Error(`AI Employee not found: ${id2} in ${env2}`);
|
|
2398
|
+
const fp1 = fingerprintPersona(p1);
|
|
2399
|
+
const fp2 = fingerprintPersona(p2);
|
|
2400
|
+
const compareFields = ["name", "description", "status", "trigger_type", "access_level", "embedding_enabled", "template_id", "workflow_id"];
|
|
2401
|
+
const differences = [];
|
|
2402
|
+
for (const field of compareFields) {
|
|
2403
|
+
const val1 = p1[field] ?? p1[field === "template_id" ? "templateId" : field];
|
|
2404
|
+
const val2 = p2[field] ?? p2[field === "template_id" ? "templateId" : field];
|
|
2405
|
+
if (JSON.stringify(val1) !== JSON.stringify(val2)) {
|
|
2406
|
+
differences.push({ field, value_1: val1, value_2: val2 });
|
|
2407
|
+
}
|
|
2408
|
+
}
|
|
2409
|
+
if (JSON.stringify(p1.proto_config ?? {}) !== JSON.stringify(p2.proto_config ?? {})) {
|
|
2410
|
+
differences.push({ field: "proto_config", value_1: "(differs)", value_2: "(differs)" });
|
|
2411
|
+
}
|
|
2412
|
+
if (JSON.stringify(p1.welcome_messages ?? {}) !== JSON.stringify(p2.welcome_messages ?? {})) {
|
|
2413
|
+
differences.push({ field: "welcome_messages", value_1: "(differs)", value_2: "(differs)" });
|
|
2414
|
+
}
|
|
2415
|
+
return {
|
|
2416
|
+
persona_1: { id: id1, env: env1, name: p1.name, fingerprint: fp1 },
|
|
2417
|
+
persona_2: { id: id2, env: env2, name: p2.name, fingerprint: fp2 },
|
|
2418
|
+
fingerprints_match: fp1 === fp2,
|
|
2419
|
+
difference_count: differences.length,
|
|
2420
|
+
differences,
|
|
2421
|
+
};
|
|
2422
|
+
},
|
|
2423
|
+
list_ai_employee_templates: async (args) => {
|
|
2424
|
+
const client = createClient(args.env);
|
|
2425
|
+
const personas = await client.getPersonasForTenant();
|
|
2426
|
+
const templateMap = new Map();
|
|
2427
|
+
for (const p of personas) {
|
|
2428
|
+
const templateId = p.template_id ?? p.templateId ?? "unknown";
|
|
2429
|
+
const existing = templateMap.get(templateId) ?? { count: 0, names: [] };
|
|
2430
|
+
existing.count++;
|
|
2431
|
+
if (p.name && existing.names.length < 3)
|
|
2432
|
+
existing.names.push(p.name);
|
|
2433
|
+
templateMap.set(templateId, existing);
|
|
2434
|
+
}
|
|
2435
|
+
const templates = Array.from(templateMap.entries())
|
|
2436
|
+
.map(([template_id, data]) => ({ template_id, usage_count: data.count, examples: data.names }))
|
|
2437
|
+
.sort((a, b) => b.usage_count - a.usage_count);
|
|
2438
|
+
return {
|
|
2439
|
+
environment: client["env"].name,
|
|
2440
|
+
total_ai_employees: personas.length,
|
|
2441
|
+
template_count: templates.length,
|
|
2442
|
+
templates,
|
|
2443
|
+
};
|
|
2444
|
+
},
|
|
2445
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2446
|
+
// CONSOLIDATED SYNC HANDLERS
|
|
2447
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2448
|
+
sync: async (args) => {
|
|
2449
|
+
const targetEnv = String(args.target_env);
|
|
2450
|
+
const sourceEnv = args.source_env ? String(args.source_env) : getDefaultEnvName();
|
|
2451
|
+
const dryRun = args.dry_run === true;
|
|
2452
|
+
const includeStatus = args.include_status === true;
|
|
2453
|
+
const scope = args.scope === "all" ? "all" : "one";
|
|
2454
|
+
const identifier = args.identifier ? String(args.identifier) : undefined;
|
|
2455
|
+
// Sync all tagged personas
|
|
2456
|
+
if (scope === "all" || !identifier) {
|
|
2457
|
+
const sdk = getSyncSDK();
|
|
2458
|
+
if (sdk) {
|
|
2459
|
+
try {
|
|
2460
|
+
const result = await sdk.runSync();
|
|
2461
|
+
return { success: true, mode: "config", ...result };
|
|
2462
|
+
}
|
|
2463
|
+
finally {
|
|
2464
|
+
sdk.close();
|
|
2465
|
+
}
|
|
2466
|
+
}
|
|
2467
|
+
// Config-less mode
|
|
2468
|
+
try {
|
|
2469
|
+
const result = await directSyncAll({ targetEnv, dryRun });
|
|
2470
|
+
return { success: true, mode: "tags", ...result };
|
|
2471
|
+
}
|
|
2472
|
+
catch (e) {
|
|
2473
|
+
return { success: false, error: e instanceof Error ? e.message : String(e) };
|
|
2474
|
+
}
|
|
2475
|
+
}
|
|
2476
|
+
// Sync single persona
|
|
2477
|
+
const isUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(identifier);
|
|
2478
|
+
const behavior = resolveSyncBehavior({
|
|
2479
|
+
personaName: isUUID ? undefined : identifier,
|
|
2480
|
+
targetEnv,
|
|
2481
|
+
overrides: {
|
|
2482
|
+
dry_run: dryRun ? true : undefined,
|
|
2483
|
+
sync_status: includeStatus ? true : undefined,
|
|
2484
|
+
},
|
|
2485
|
+
});
|
|
2486
|
+
try {
|
|
2487
|
+
const result = isUUID
|
|
2488
|
+
? await directSyncPersonaById({
|
|
2489
|
+
personaId: identifier,
|
|
2490
|
+
sourceEnv,
|
|
2491
|
+
targetEnv,
|
|
2492
|
+
dryRun: behavior.dry_run,
|
|
2493
|
+
syncStatus: behavior.sync_status,
|
|
2494
|
+
})
|
|
2495
|
+
: await directSyncPersona({
|
|
2496
|
+
name: identifier,
|
|
2497
|
+
sourceEnv,
|
|
2498
|
+
targetEnv,
|
|
2499
|
+
dryRun: behavior.dry_run,
|
|
2500
|
+
syncStatus: behavior.sync_status,
|
|
2501
|
+
});
|
|
2502
|
+
return { ...result, resolved_behavior: behavior };
|
|
2503
|
+
}
|
|
2504
|
+
catch (e) {
|
|
2505
|
+
return { success: false, error: e instanceof Error ? e.message : String(e) };
|
|
2506
|
+
}
|
|
2507
|
+
},
|
|
2508
|
+
sync_info: async (args) => {
|
|
2509
|
+
const client = args.env ? createClient(args.env) : undefined;
|
|
2510
|
+
// Check if persona is synced
|
|
2511
|
+
if (args.persona_id) {
|
|
2512
|
+
if (!client)
|
|
2513
|
+
throw new Error("env required when checking persona sync status");
|
|
2514
|
+
const personaId = String(args.persona_id);
|
|
2515
|
+
const personas = await client.getPersonasForTenant();
|
|
2516
|
+
const persona = personas.find((p) => p.id === personaId);
|
|
2517
|
+
if (!persona)
|
|
2518
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
2519
|
+
const meta = client.getSyncMetadata(persona);
|
|
2520
|
+
return {
|
|
2521
|
+
environment: client["env"].name,
|
|
2522
|
+
persona_id: personaId,
|
|
2523
|
+
persona_name: persona.name,
|
|
2524
|
+
is_synced: !!meta,
|
|
2525
|
+
sync_metadata: meta,
|
|
2526
|
+
};
|
|
2527
|
+
}
|
|
2528
|
+
// Check by persona name
|
|
2529
|
+
if (args.persona_name) {
|
|
2530
|
+
const sdk = getSyncSDK();
|
|
2531
|
+
if (!sdk)
|
|
2532
|
+
return { error: "No sync config found. Set EMA_AGENT_SYNC_CONFIG." };
|
|
2533
|
+
try {
|
|
2534
|
+
const persona = await sdk.getMasterPersonaByName(String(args.persona_name));
|
|
2535
|
+
if (!persona)
|
|
2536
|
+
return { error: `Persona not found: ${args.persona_name}` };
|
|
2537
|
+
return await sdk.getPersonaSyncStatus(persona.id);
|
|
2538
|
+
}
|
|
2539
|
+
finally {
|
|
2540
|
+
sdk.close();
|
|
2541
|
+
}
|
|
2542
|
+
}
|
|
2543
|
+
// List all synced personas
|
|
2544
|
+
if (args.list_synced === true) {
|
|
2545
|
+
if (!client)
|
|
2546
|
+
throw new Error("env required when listing synced personas");
|
|
2547
|
+
const personas = await client.getPersonasForTenant();
|
|
2548
|
+
const masterEnvFilter = args.master_env ? String(args.master_env).toLowerCase() : undefined;
|
|
2549
|
+
const synced = [];
|
|
2550
|
+
for (const p of personas) {
|
|
2551
|
+
const meta = client.getSyncMetadata(p);
|
|
2552
|
+
if (meta) {
|
|
2553
|
+
if (masterEnvFilter && meta.master_env.toLowerCase() !== masterEnvFilter)
|
|
2554
|
+
continue;
|
|
2555
|
+
synced.push({ persona_id: p.id, persona_name: p.name, sync_metadata: meta });
|
|
2556
|
+
}
|
|
2557
|
+
}
|
|
2558
|
+
return { environment: client["env"].name, count: synced.length, synced_personas: synced };
|
|
2559
|
+
}
|
|
2560
|
+
// Default: return overall sync config/status
|
|
2561
|
+
const sdk = getSyncSDK();
|
|
2562
|
+
const options = args.include_options === true ? loadSyncOptions() : undefined;
|
|
2563
|
+
if (!sdk) {
|
|
2564
|
+
return {
|
|
2565
|
+
configured: false,
|
|
2566
|
+
error: "No sync config found. Set EMA_AGENT_SYNC_CONFIG.",
|
|
2567
|
+
options,
|
|
2568
|
+
};
|
|
2569
|
+
}
|
|
2570
|
+
try {
|
|
2571
|
+
const master = sdk.getMasterEnvironment();
|
|
2572
|
+
const envs = sdk.getEnvironments();
|
|
2573
|
+
const personas = await sdk.listMasterPersonas();
|
|
2574
|
+
return {
|
|
2575
|
+
configured: true,
|
|
2576
|
+
master_environment: { name: master.name, url: master.baseUrl },
|
|
2577
|
+
target_environments: envs.filter((e) => !e.isMaster).map((e) => ({ name: e.name, url: e.baseUrl })),
|
|
2578
|
+
total_personas: personas.length,
|
|
2579
|
+
options,
|
|
2580
|
+
};
|
|
2581
|
+
}
|
|
2582
|
+
finally {
|
|
2583
|
+
sdk.close();
|
|
2584
|
+
}
|
|
2585
|
+
},
|
|
2586
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
2587
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
2588
|
+
// Auto Builder Knowledge Handlers
|
|
2589
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
2590
|
+
list_auto_builder_agents: async (args) => {
|
|
2591
|
+
const category = args.category;
|
|
2592
|
+
const agents = category ? getAgentsByCategory(category) : AGENT_CATALOG;
|
|
2593
|
+
return {
|
|
2594
|
+
count: agents.length,
|
|
2595
|
+
category: category ?? "all",
|
|
2596
|
+
agents: agents.map(a => ({
|
|
2597
|
+
action_name: a.actionName,
|
|
2598
|
+
display_name: a.displayName,
|
|
2599
|
+
category: a.category,
|
|
2600
|
+
description: a.description,
|
|
2601
|
+
when_to_use: a.whenToUse,
|
|
2602
|
+
inputs: a.inputs.map(i => i.name),
|
|
2603
|
+
outputs: a.outputs.map(o => o.name),
|
|
2604
|
+
has_critical_rules: !!a.criticalRules?.length,
|
|
2605
|
+
})),
|
|
2606
|
+
};
|
|
2607
|
+
},
|
|
2608
|
+
get_auto_builder_agent: async (args) => {
|
|
2609
|
+
const actionName = String(args.action_name);
|
|
2610
|
+
const agent = getAgentByName(actionName);
|
|
2611
|
+
if (!agent) {
|
|
2612
|
+
const available = AGENT_CATALOG.slice(0, 15).map(a => a.actionName);
|
|
2613
|
+
return {
|
|
2614
|
+
error: `Agent not found: ${actionName}`,
|
|
2615
|
+
available_examples: available,
|
|
2616
|
+
hint: "Use list_auto_builder_agents to see all available agents",
|
|
2617
|
+
};
|
|
2618
|
+
}
|
|
2619
|
+
return {
|
|
2620
|
+
action_name: agent.actionName,
|
|
2621
|
+
display_name: agent.displayName,
|
|
2622
|
+
category: agent.category,
|
|
2623
|
+
description: agent.description,
|
|
2624
|
+
inputs: agent.inputs,
|
|
2625
|
+
outputs: agent.outputs,
|
|
2626
|
+
critical_rules: agent.criticalRules ?? [],
|
|
2627
|
+
when_to_use: agent.whenToUse,
|
|
2628
|
+
when_not_to_use: agent.whenNotToUse,
|
|
2629
|
+
example: agent.example,
|
|
2630
|
+
};
|
|
2631
|
+
},
|
|
2632
|
+
suggest_agents_for_use_case: async (args) => {
|
|
2633
|
+
const useCase = String(args.use_case);
|
|
2634
|
+
const suggestions = suggestAgentsForUseCase(useCase);
|
|
2635
|
+
return {
|
|
2636
|
+
use_case: useCase,
|
|
2637
|
+
suggested_agent_count: suggestions.length,
|
|
2638
|
+
suggested_agents: suggestions.map(a => ({
|
|
2639
|
+
action_name: a.actionName,
|
|
2640
|
+
display_name: a.displayName,
|
|
2641
|
+
category: a.category,
|
|
2642
|
+
why: a.whenToUse,
|
|
2643
|
+
inputs: a.inputs.map(i => `${i.name} (${i.type})`),
|
|
2644
|
+
outputs: a.outputs.map(o => `${o.name} (${o.type})`),
|
|
2645
|
+
})),
|
|
2646
|
+
suggested_flow: suggestions.map(a => a.actionName).join(" → "),
|
|
2647
|
+
next_steps: [
|
|
2648
|
+
"Use get_auto_builder_agent to get detailed info on each agent",
|
|
2649
|
+
"Use get_workflow_pattern for a complete template if a pattern matches",
|
|
2650
|
+
"Use get_qualifying_questions to ensure you have all required information",
|
|
2651
|
+
],
|
|
2652
|
+
};
|
|
2653
|
+
},
|
|
2654
|
+
get_workflow_pattern: async (args) => {
|
|
2655
|
+
const patternName = String(args.pattern_name);
|
|
2656
|
+
const pattern = WORKFLOW_PATTERNS.find(p => p.name === patternName);
|
|
2657
|
+
if (!pattern) {
|
|
2658
|
+
return {
|
|
2659
|
+
error: `Pattern not found: ${patternName}`,
|
|
2660
|
+
available: WORKFLOW_PATTERNS.map(p => ({ name: p.name, description: p.description })),
|
|
2661
|
+
};
|
|
2662
|
+
}
|
|
2663
|
+
return {
|
|
2664
|
+
name: pattern.name,
|
|
2665
|
+
persona_type: pattern.personaType,
|
|
2666
|
+
description: pattern.description,
|
|
2667
|
+
use_case: pattern.useCase,
|
|
2668
|
+
nodes: pattern.nodes,
|
|
2669
|
+
connections: pattern.connections,
|
|
2670
|
+
anti_patterns: pattern.antiPatterns ?? [],
|
|
2671
|
+
implementation_notes: [
|
|
2672
|
+
"Replace * with actual category/handler names",
|
|
2673
|
+
"All paths must lead to WORKFLOW_OUTPUT",
|
|
2674
|
+
"Include Fallback category for categorizers",
|
|
2675
|
+
"Check type compatibility for all connections",
|
|
2676
|
+
],
|
|
2677
|
+
};
|
|
2678
|
+
},
|
|
2679
|
+
list_workflow_patterns: async (args) => {
|
|
2680
|
+
const personaType = args.persona_type;
|
|
2681
|
+
const patterns = personaType
|
|
2682
|
+
? WORKFLOW_PATTERNS.filter(p => p.personaType === personaType)
|
|
2683
|
+
: WORKFLOW_PATTERNS;
|
|
2684
|
+
return {
|
|
2685
|
+
count: patterns.length,
|
|
2686
|
+
persona_type_filter: personaType ?? "all",
|
|
2687
|
+
patterns: patterns.map(p => ({
|
|
2688
|
+
name: p.name,
|
|
2689
|
+
persona_type: p.personaType,
|
|
2690
|
+
description: p.description,
|
|
2691
|
+
use_case: p.useCase,
|
|
2692
|
+
node_count: p.nodes.length,
|
|
2693
|
+
})),
|
|
2694
|
+
};
|
|
2695
|
+
},
|
|
2696
|
+
check_type_compatibility: async (args) => {
|
|
2697
|
+
const sourceType = String(args.source_type);
|
|
2698
|
+
const targetType = String(args.target_type);
|
|
2699
|
+
const compat = checkTypeCompatibility(sourceType, targetType);
|
|
2700
|
+
if (!compat) {
|
|
2701
|
+
return {
|
|
2702
|
+
source_type: sourceType,
|
|
2703
|
+
target_type: targetType,
|
|
2704
|
+
compatible: false,
|
|
2705
|
+
note: "No explicit compatibility rule found - likely incompatible",
|
|
2706
|
+
recommendation: "Use an intermediate node to convert types, or check if target accepts WELL_KNOWN_TYPE_ANY",
|
|
2707
|
+
};
|
|
2708
|
+
}
|
|
2709
|
+
return {
|
|
2710
|
+
source_type: sourceType,
|
|
2711
|
+
target_type: targetType,
|
|
2712
|
+
compatible: compat.compatible,
|
|
2713
|
+
note: compat.note,
|
|
2714
|
+
recommendation: compat.compatible
|
|
2715
|
+
? "These types are compatible for direct connection"
|
|
2716
|
+
: `Incompatible. ${compat.note || "Use an intermediate node to convert types."}`,
|
|
2717
|
+
};
|
|
2718
|
+
},
|
|
2719
|
+
get_widget_reference: async (args) => {
|
|
2720
|
+
const personaType = String(args.persona_type);
|
|
2721
|
+
const widgets = getWidgetsForPersonaType(personaType);
|
|
2722
|
+
const projectType = PROJECT_TYPES[personaType];
|
|
2723
|
+
return {
|
|
2724
|
+
persona_type: personaType,
|
|
2725
|
+
project_type: projectType,
|
|
2726
|
+
widget_count: widgets.length,
|
|
2727
|
+
widgets: widgets.map(w => ({
|
|
2728
|
+
id: w.id,
|
|
2729
|
+
name: w.name,
|
|
2730
|
+
description: w.description,
|
|
2731
|
+
fields: w.fields,
|
|
2732
|
+
})),
|
|
2733
|
+
note: `Project type ${projectType} is used in proto_config for ${personaType} AI Employees`,
|
|
2734
|
+
};
|
|
2735
|
+
},
|
|
2736
|
+
get_qualifying_questions: async (args) => {
|
|
2737
|
+
const category = args.category;
|
|
2738
|
+
const requiredOnly = args.required_only === true;
|
|
2739
|
+
let questions = category
|
|
2740
|
+
? getQualifyingQuestionsByCategory(category)
|
|
2741
|
+
: QUALIFYING_QUESTIONS;
|
|
2742
|
+
if (requiredOnly) {
|
|
2743
|
+
questions = questions.filter(q => q.required);
|
|
2744
|
+
}
|
|
2745
|
+
const grouped = questions.reduce((acc, q) => {
|
|
2746
|
+
if (!acc[q.category])
|
|
2747
|
+
acc[q.category] = [];
|
|
2748
|
+
acc[q.category].push({ question: q.question, why_it_matters: q.whyItMatters, required: q.required });
|
|
2749
|
+
return acc;
|
|
2750
|
+
}, {});
|
|
2751
|
+
return {
|
|
2752
|
+
total_questions: questions.length,
|
|
2753
|
+
categories: Object.keys(grouped),
|
|
2754
|
+
questions_by_category: grouped,
|
|
2755
|
+
minimum_required: [
|
|
2756
|
+
"AI Type (Voice/Chat/Dashboard)",
|
|
2757
|
+
"2-3 intent categories + Fallback",
|
|
2758
|
+
"1 primary data source or action",
|
|
2759
|
+
"Success output format",
|
|
2760
|
+
],
|
|
2761
|
+
questioning_rounds: {
|
|
2762
|
+
round_1: "Core Context: AI type, trigger, main intents, data sources",
|
|
2763
|
+
round_2: "Workflow Details: Actions, validations, outputs, approvals",
|
|
2764
|
+
round_3: "Voice/Chat Specifics: Welcome message, hangup conditions (if applicable)",
|
|
2765
|
+
},
|
|
2766
|
+
};
|
|
2767
|
+
},
|
|
2768
|
+
get_voice_persona_template: async () => {
|
|
2769
|
+
return {
|
|
2770
|
+
template: VOICE_PERSONA_TEMPLATE,
|
|
2771
|
+
required_fields: ["welcomeMessage", "identityAndPurpose", "takeActionInstructions", "hangupInstructions"],
|
|
2772
|
+
optional_fields: ["transferCallInstructions", "speechCharacteristics", "systemPrompt", "formFillingInstructions", "waitMessage"],
|
|
2773
|
+
project_type: PROJECT_TYPES.voice,
|
|
2774
|
+
widget_ids: {
|
|
2775
|
+
voiceSettings: 38,
|
|
2776
|
+
conversationSettings: 39,
|
|
2777
|
+
vadSettings: 43,
|
|
2778
|
+
dataStorageSettings: 42,
|
|
2779
|
+
},
|
|
2780
|
+
instructions: [
|
|
2781
|
+
"Replace all {placeholder} values with actual content",
|
|
2782
|
+
"takeActionInstructions uses </Case N> format for each action",
|
|
2783
|
+
"speechCharacteristics should include TTS pronunciation rules",
|
|
2784
|
+
"systemPrompt defines tool calling behavior",
|
|
2785
|
+
],
|
|
2786
|
+
};
|
|
2787
|
+
},
|
|
2788
|
+
validate_workflow_prompt: async (args) => {
|
|
2789
|
+
const prompt = String(args.prompt);
|
|
2790
|
+
const result = validateWorkflowPrompt(prompt);
|
|
2791
|
+
return {
|
|
2792
|
+
valid: result.valid,
|
|
2793
|
+
issue_count: result.issues.length,
|
|
2794
|
+
warning_count: result.warnings.length,
|
|
2795
|
+
issues: result.issues,
|
|
2796
|
+
warnings: result.warnings,
|
|
2797
|
+
recommendations: result.issues.length > 0 ? [
|
|
2798
|
+
"Add Fallback category to all categorizers",
|
|
2799
|
+
"Ensure HITL nodes have both success and failure paths",
|
|
2800
|
+
"Map all response nodes to WORKFLOW_OUTPUT",
|
|
2801
|
+
"Specify persona type (Voice AI, Chat AI, Dashboard AI)",
|
|
2802
|
+
"Check type compatibility for all connections",
|
|
2803
|
+
] : ["Prompt structure looks valid - verify type compatibility after generation"],
|
|
2804
|
+
};
|
|
2805
|
+
},
|
|
2806
|
+
get_auto_builder_guidance: async (args) => {
|
|
2807
|
+
const topic = String(args.topic);
|
|
2808
|
+
const guidance = GUIDANCE_TOPICS[topic];
|
|
2809
|
+
if (!guidance) {
|
|
2810
|
+
return {
|
|
2811
|
+
error: `Topic not found: ${topic}`,
|
|
2812
|
+
available_topics: Object.keys(GUIDANCE_TOPICS).map(k => ({
|
|
2813
|
+
topic: k,
|
|
2814
|
+
title: GUIDANCE_TOPICS[k].title,
|
|
2815
|
+
})),
|
|
2816
|
+
};
|
|
2817
|
+
}
|
|
2818
|
+
return guidance;
|
|
2819
|
+
},
|
|
2820
|
+
get_platform_concept: async (args) => {
|
|
2821
|
+
const term = String(args.term);
|
|
2822
|
+
const concept = getConceptByTerm(term);
|
|
2823
|
+
if (!concept) {
|
|
2824
|
+
return {
|
|
2825
|
+
error: `Concept not found: ${term}`,
|
|
2826
|
+
available_concepts: PLATFORM_CONCEPTS.map(c => c.term),
|
|
2827
|
+
hint: "Try searching for aliases like 'Persona' (AI Employee) or 'Action' (Agent)",
|
|
2828
|
+
};
|
|
2829
|
+
}
|
|
2830
|
+
return {
|
|
2831
|
+
term: concept.term,
|
|
2832
|
+
definition: concept.definition,
|
|
2833
|
+
aliases: concept.aliases ?? [],
|
|
2834
|
+
related_terms: concept.relatedTerms ?? [],
|
|
2835
|
+
examples: concept.examples ?? [],
|
|
2836
|
+
common_confusions: concept.commonConfusions,
|
|
2837
|
+
};
|
|
2838
|
+
},
|
|
2839
|
+
list_platform_concepts: async () => {
|
|
2840
|
+
return {
|
|
2841
|
+
count: PLATFORM_CONCEPTS.length,
|
|
2842
|
+
concepts: PLATFORM_CONCEPTS.map(c => ({
|
|
2843
|
+
term: c.term,
|
|
2844
|
+
definition: c.definition,
|
|
2845
|
+
aliases: c.aliases ?? [],
|
|
2846
|
+
})),
|
|
2847
|
+
key_relationships: [
|
|
2848
|
+
"AI Employee CONTAINS Workflow (processing logic) + Persona (conversational behavior)",
|
|
2849
|
+
"Workflow is made up of Agents/Actions connected by Edges",
|
|
2850
|
+
"Agents use Connectors to interact with external systems",
|
|
2851
|
+
"HITL = Human-in-the-Loop approval/verification step",
|
|
2852
|
+
],
|
|
2853
|
+
};
|
|
2854
|
+
},
|
|
2855
|
+
get_common_mistakes: async () => {
|
|
2856
|
+
return {
|
|
2857
|
+
count: COMMON_MISTAKES.length,
|
|
2858
|
+
mistakes: COMMON_MISTAKES,
|
|
2859
|
+
top_3_critical: [
|
|
2860
|
+
COMMON_MISTAKES.find(m => m.mistake.includes("Fallback")),
|
|
2861
|
+
COMMON_MISTAKES.find(m => m.mistake.includes("HITL")),
|
|
2862
|
+
COMMON_MISTAKES.find(m => m.mistake.includes("duplicate")),
|
|
2863
|
+
].filter(Boolean),
|
|
2864
|
+
};
|
|
2865
|
+
},
|
|
2866
|
+
get_debug_checklist: async () => {
|
|
2867
|
+
return {
|
|
2868
|
+
total_steps: DEBUG_CHECKLIST.length,
|
|
2869
|
+
checklist: DEBUG_CHECKLIST,
|
|
2870
|
+
quick_checks: [
|
|
2871
|
+
"Is the AI Employee status 'active' or 'ready'?",
|
|
2872
|
+
"Does the categorizer have all category edges including Fallback?",
|
|
2873
|
+
"Do all paths lead to WORKFLOW_OUTPUT?",
|
|
2874
|
+
"Are all connections type-compatible?",
|
|
2875
|
+
],
|
|
2876
|
+
};
|
|
2877
|
+
},
|
|
2878
|
+
get_workflow_execution_model: async () => {
|
|
2879
|
+
return {
|
|
2880
|
+
...WORKFLOW_EXECUTION_MODEL,
|
|
2881
|
+
summary: "Each user message triggers a NEW workflow execution. Use chat_conversation to detect previous actions and avoid duplicates.",
|
|
2882
|
+
};
|
|
2883
|
+
},
|
|
2884
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
2885
|
+
// Workflow Review & Audit Handlers
|
|
2886
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
2887
|
+
analyze_workflow: async (args) => {
|
|
2888
|
+
const client = createClient(args.env);
|
|
2889
|
+
const personaId = String(args.persona_id);
|
|
2890
|
+
const persona = await client.getPersonaById(personaId);
|
|
2891
|
+
if (!persona)
|
|
2892
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
2893
|
+
if (!persona.workflow_def) {
|
|
2894
|
+
return {
|
|
2895
|
+
environment: client["env"].name,
|
|
2896
|
+
persona_id: personaId,
|
|
2897
|
+
persona_name: persona.name,
|
|
2898
|
+
error: "AI Employee has no workflow_def",
|
|
2899
|
+
recommendation: "This persona may not have a generated workflow. Use Auto Builder to create one.",
|
|
2900
|
+
};
|
|
2901
|
+
}
|
|
2902
|
+
const analysis = analyzeWorkflow(persona.workflow_def, {
|
|
2903
|
+
persona_id: personaId,
|
|
2904
|
+
persona_name: persona.name,
|
|
2905
|
+
environment: client["env"].name,
|
|
2906
|
+
});
|
|
2907
|
+
return {
|
|
2908
|
+
...analysis,
|
|
2909
|
+
recommendations: analysis.validation_passed
|
|
2910
|
+
? ["Workflow structure looks valid. Test with sample queries to verify behavior."]
|
|
2911
|
+
: [
|
|
2912
|
+
"Fix all critical issues before deployment",
|
|
2913
|
+
...analysis.issues
|
|
2914
|
+
.filter(i => i.severity === "critical")
|
|
2915
|
+
.slice(0, 3)
|
|
2916
|
+
.map(i => `Priority: ${i.reason}`),
|
|
2917
|
+
"Use suggest_workflow_fixes to get fix proposals",
|
|
2918
|
+
],
|
|
2919
|
+
};
|
|
2920
|
+
},
|
|
2921
|
+
detect_workflow_issues: async (args) => {
|
|
2922
|
+
const workflowDef = args.workflow_def;
|
|
2923
|
+
if (!workflowDef || typeof workflowDef !== "object") {
|
|
2924
|
+
return {
|
|
2925
|
+
error: "Invalid workflow_def - must be an object",
|
|
2926
|
+
hint: "Get workflow_def from get_ai_employee_full(persona_id).ai_employee.workflow_def",
|
|
2927
|
+
};
|
|
2928
|
+
}
|
|
2929
|
+
const issues = detectWorkflowIssues(workflowDef);
|
|
2930
|
+
const summary = {
|
|
2931
|
+
total: issues.length,
|
|
2932
|
+
critical: issues.filter(i => i.severity === "critical").length,
|
|
2933
|
+
warning: issues.filter(i => i.severity === "warning").length,
|
|
2934
|
+
info: issues.filter(i => i.severity === "info").length,
|
|
2935
|
+
};
|
|
2936
|
+
return {
|
|
2937
|
+
issues,
|
|
2938
|
+
summary,
|
|
2939
|
+
validation_passed: summary.critical === 0,
|
|
2940
|
+
issue_types_found: [...new Set(issues.map(i => i.type))],
|
|
2941
|
+
next_steps: issues.length > 0
|
|
2942
|
+
? ["Use suggest_workflow_fixes(issues) to get fix proposals"]
|
|
2943
|
+
: ["No issues detected - workflow structure looks valid"],
|
|
2944
|
+
};
|
|
2945
|
+
},
|
|
2946
|
+
validate_workflow_connections: async (args) => {
|
|
2947
|
+
const workflowDef = args.workflow_def;
|
|
2948
|
+
if (!workflowDef || typeof workflowDef !== "object") {
|
|
2949
|
+
return {
|
|
2950
|
+
error: "Invalid workflow_def - must be an object",
|
|
2951
|
+
hint: "Get workflow_def from get_ai_employee_full(persona_id).ai_employee.workflow_def",
|
|
2952
|
+
};
|
|
2953
|
+
}
|
|
2954
|
+
const validations = validateWorkflowConnections(workflowDef);
|
|
2955
|
+
const compatible = validations.filter(v => v.compatible);
|
|
2956
|
+
const incompatible = validations.filter(v => !v.compatible);
|
|
2957
|
+
return {
|
|
2958
|
+
total_edges: validations.length,
|
|
2959
|
+
compatible_count: compatible.length,
|
|
2960
|
+
incompatible_count: incompatible.length,
|
|
2961
|
+
all_valid: incompatible.length === 0,
|
|
2962
|
+
validations: validations.map(v => ({
|
|
2963
|
+
edge: v.edge_id,
|
|
2964
|
+
source_type: v.source_type,
|
|
2965
|
+
target_type: v.target_type,
|
|
2966
|
+
compatible: v.compatible,
|
|
2967
|
+
note: v.note,
|
|
2968
|
+
})),
|
|
2969
|
+
incompatible_edges: incompatible.map(v => ({
|
|
2970
|
+
edge: v.edge_id,
|
|
2971
|
+
source_type: v.source_type,
|
|
2972
|
+
target_type: v.target_type,
|
|
2973
|
+
note: v.note,
|
|
2974
|
+
fix_hint: v.source_type === "WELL_KNOWN_TYPE_CHAT_CONVERSATION" && v.target_type === "WELL_KNOWN_TYPE_TEXT_WITH_SOURCES"
|
|
2975
|
+
? "Insert conversation_to_search_query between source and target"
|
|
2976
|
+
: "Use named_inputs (accepts ANY type) or add intermediate conversion node",
|
|
2977
|
+
})),
|
|
2978
|
+
};
|
|
2979
|
+
},
|
|
2980
|
+
suggest_workflow_fixes: async (args) => {
|
|
2981
|
+
const issues = args.issues;
|
|
2982
|
+
if (!Array.isArray(issues)) {
|
|
2983
|
+
return {
|
|
2984
|
+
error: "Invalid issues array",
|
|
2985
|
+
hint: "Pass issues from detect_workflow_issues or analyze_workflow",
|
|
2986
|
+
example_input: [
|
|
2987
|
+
{ type: "missing_fallback", severity: "critical", node: "intent_classifier", reason: "..." },
|
|
2988
|
+
{ type: "type_mismatch", severity: "critical", source: "trigger.chat_conversation", target: "search.query", reason: "..." },
|
|
2989
|
+
],
|
|
2990
|
+
};
|
|
2991
|
+
}
|
|
2992
|
+
if (issues.length === 0) {
|
|
2993
|
+
return {
|
|
2994
|
+
message: "No issues to fix",
|
|
2995
|
+
fixes: [],
|
|
2996
|
+
};
|
|
2997
|
+
}
|
|
2998
|
+
const fixes = suggestWorkflowFixes(issues);
|
|
2999
|
+
return {
|
|
3000
|
+
issue_count: issues.length,
|
|
3001
|
+
fix_count: fixes.length,
|
|
3002
|
+
fixes,
|
|
3003
|
+
application_order: [
|
|
3004
|
+
"1. Fix critical issues first (missing_fallback, incomplete_hitl, cycles)",
|
|
3005
|
+
"2. Fix type mismatches (may require adding intermediate nodes)",
|
|
3006
|
+
"3. Address warnings (wrong_input_source, orphan nodes)",
|
|
3007
|
+
"4. Re-run detect_workflow_issues to verify fixes",
|
|
3008
|
+
],
|
|
3009
|
+
};
|
|
3010
|
+
},
|
|
3011
|
+
compare_workflow_versions: async (args) => {
|
|
3012
|
+
const client = createClient(args.env);
|
|
3013
|
+
const idBefore = String(args.persona_id_before);
|
|
3014
|
+
const idAfter = String(args.persona_id_after);
|
|
3015
|
+
const [personaBefore, personaAfter] = await Promise.all([
|
|
3016
|
+
client.getPersonaById(idBefore),
|
|
3017
|
+
client.getPersonaById(idAfter),
|
|
3018
|
+
]);
|
|
3019
|
+
if (!personaBefore)
|
|
3020
|
+
throw new Error(`AI Employee not found (before): ${idBefore}`);
|
|
3021
|
+
if (!personaAfter)
|
|
3022
|
+
throw new Error(`AI Employee not found (after): ${idAfter}`);
|
|
3023
|
+
const analysisBefore = personaBefore.workflow_def
|
|
3024
|
+
? analyzeWorkflow(personaBefore.workflow_def, { persona_id: idBefore, persona_name: personaBefore.name })
|
|
3025
|
+
: null;
|
|
3026
|
+
const analysisAfter = personaAfter.workflow_def
|
|
3027
|
+
? analyzeWorkflow(personaAfter.workflow_def, { persona_id: idAfter, persona_name: personaAfter.name })
|
|
3028
|
+
: null;
|
|
3029
|
+
// Compare fingerprints
|
|
3030
|
+
const fpBefore = personaBefore.workflow_def ? fingerprintPersona(personaBefore) : null;
|
|
3031
|
+
const fpAfter = personaAfter.workflow_def ? fingerprintPersona(personaAfter) : null;
|
|
3032
|
+
return {
|
|
3033
|
+
environment: client["env"].name,
|
|
3034
|
+
before: {
|
|
3035
|
+
persona_id: idBefore,
|
|
3036
|
+
name: personaBefore.name,
|
|
3037
|
+
fingerprint: fpBefore,
|
|
3038
|
+
has_workflow: !!personaBefore.workflow_def,
|
|
3039
|
+
node_count: analysisBefore?.summary.total_nodes ?? 0,
|
|
3040
|
+
edge_count: analysisBefore?.summary.total_edges ?? 0,
|
|
3041
|
+
critical_issues: analysisBefore?.issue_summary.critical ?? 0,
|
|
3042
|
+
validation_passed: analysisBefore?.validation_passed ?? false,
|
|
3043
|
+
},
|
|
3044
|
+
after: {
|
|
3045
|
+
persona_id: idAfter,
|
|
3046
|
+
name: personaAfter.name,
|
|
3047
|
+
fingerprint: fpAfter,
|
|
3048
|
+
has_workflow: !!personaAfter.workflow_def,
|
|
3049
|
+
node_count: analysisAfter?.summary.total_nodes ?? 0,
|
|
3050
|
+
edge_count: analysisAfter?.summary.total_edges ?? 0,
|
|
3051
|
+
critical_issues: analysisAfter?.issue_summary.critical ?? 0,
|
|
3052
|
+
validation_passed: analysisAfter?.validation_passed ?? false,
|
|
3053
|
+
},
|
|
3054
|
+
comparison: {
|
|
3055
|
+
fingerprints_match: fpBefore === fpAfter,
|
|
3056
|
+
node_count_change: (analysisAfter?.summary.total_nodes ?? 0) - (analysisBefore?.summary.total_nodes ?? 0),
|
|
3057
|
+
edge_count_change: (analysisAfter?.summary.total_edges ?? 0) - (analysisBefore?.summary.total_edges ?? 0),
|
|
3058
|
+
critical_issues_change: (analysisAfter?.issue_summary.critical ?? 0) - (analysisBefore?.issue_summary.critical ?? 0),
|
|
3059
|
+
validation_improved: !analysisBefore?.validation_passed && (analysisAfter?.validation_passed ?? false),
|
|
3060
|
+
validation_regressed: (analysisBefore?.validation_passed ?? false) && !analysisAfter?.validation_passed,
|
|
3061
|
+
},
|
|
3062
|
+
issues_before: analysisBefore?.issues.slice(0, 5) ?? [],
|
|
3063
|
+
issues_after: analysisAfter?.issues.slice(0, 5) ?? [],
|
|
3064
|
+
recommendations: fpBefore === fpAfter
|
|
3065
|
+
? ["No workflow changes detected between versions"]
|
|
3066
|
+
: [
|
|
3067
|
+
analysisAfter?.validation_passed
|
|
3068
|
+
? "After version passes validation"
|
|
3069
|
+
: "After version has validation issues - review before deploying",
|
|
3070
|
+
(analysisAfter?.issue_summary.critical ?? 0) > (analysisBefore?.issue_summary.critical ?? 0)
|
|
3071
|
+
? "Warning: More critical issues in after version"
|
|
3072
|
+
: (analysisAfter?.issue_summary.critical ?? 0) < (analysisBefore?.issue_summary.critical ?? 0)
|
|
3073
|
+
? "Good: Fewer critical issues in after version"
|
|
3074
|
+
: "Same number of critical issues",
|
|
3075
|
+
],
|
|
3076
|
+
};
|
|
3077
|
+
},
|
|
3078
|
+
get_workflow_metrics: async (args) => {
|
|
3079
|
+
const client = createClient(args.env);
|
|
3080
|
+
const personaId = String(args.persona_id);
|
|
3081
|
+
const persona = await client.getPersonaById(personaId);
|
|
3082
|
+
if (!persona)
|
|
3083
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
3084
|
+
if (!persona.workflow_def) {
|
|
3085
|
+
return {
|
|
3086
|
+
environment: client["env"].name,
|
|
3087
|
+
persona_id: personaId,
|
|
3088
|
+
persona_name: persona.name,
|
|
3089
|
+
error: "AI Employee has no workflow_def",
|
|
3090
|
+
};
|
|
3091
|
+
}
|
|
3092
|
+
const analysis = analyzeWorkflow(persona.workflow_def, {
|
|
3093
|
+
persona_id: personaId,
|
|
3094
|
+
persona_name: persona.name,
|
|
3095
|
+
environment: client["env"].name,
|
|
3096
|
+
});
|
|
3097
|
+
// Calculate complexity indicators
|
|
3098
|
+
const avgEdgesPerNode = analysis.summary.total_nodes > 0
|
|
3099
|
+
? (analysis.summary.total_edges / analysis.summary.total_nodes).toFixed(2)
|
|
3100
|
+
: 0;
|
|
3101
|
+
// Estimate parallel vs sequential
|
|
3102
|
+
const hasParallelBranches = analysis.summary.categorizers_count > 0;
|
|
3103
|
+
return {
|
|
3104
|
+
environment: client["env"].name,
|
|
3105
|
+
persona_id: personaId,
|
|
3106
|
+
persona_name: persona.name,
|
|
3107
|
+
structure: {
|
|
3108
|
+
total_nodes: analysis.summary.total_nodes,
|
|
3109
|
+
total_edges: analysis.summary.total_edges,
|
|
3110
|
+
has_trigger: analysis.summary.has_trigger,
|
|
3111
|
+
has_workflow_output: analysis.summary.has_workflow_output,
|
|
3112
|
+
},
|
|
3113
|
+
routing: {
|
|
3114
|
+
categorizers_count: analysis.summary.categorizers_count,
|
|
3115
|
+
hitl_nodes_count: analysis.summary.hitl_nodes_count,
|
|
3116
|
+
has_parallel_branches: hasParallelBranches,
|
|
3117
|
+
},
|
|
3118
|
+
quality: {
|
|
3119
|
+
validation_passed: analysis.validation_passed,
|
|
3120
|
+
critical_issues: analysis.issue_summary.critical,
|
|
3121
|
+
warnings: analysis.issue_summary.warning,
|
|
3122
|
+
},
|
|
3123
|
+
complexity: {
|
|
3124
|
+
avg_edges_per_node: avgEdgesPerNode,
|
|
3125
|
+
complexity_rating: analysis.summary.total_nodes <= 5 ? "simple"
|
|
3126
|
+
: analysis.summary.total_nodes <= 15 ? "moderate"
|
|
3127
|
+
: "complex",
|
|
3128
|
+
},
|
|
3129
|
+
recommendations: [
|
|
3130
|
+
analysis.summary.categorizers_count === 0 && analysis.summary.total_nodes > 3
|
|
3131
|
+
? "Consider adding intent routing with chat_categorizer for better user experience"
|
|
3132
|
+
: null,
|
|
3133
|
+
analysis.summary.hitl_nodes_count === 0
|
|
3134
|
+
? "No HITL nodes - consider adding human approval for sensitive actions"
|
|
3135
|
+
: null,
|
|
3136
|
+
analysis.issue_summary.critical > 0
|
|
3137
|
+
? `${analysis.issue_summary.critical} critical issues need attention - use analyze_workflow for details`
|
|
3138
|
+
: null,
|
|
3139
|
+
].filter(Boolean),
|
|
3140
|
+
};
|
|
3141
|
+
},
|
|
3142
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3143
|
+
// Workflow Compilation (Template-driven)
|
|
3144
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3145
|
+
compile_workflow: async (args) => {
|
|
3146
|
+
const name = String(args.name);
|
|
3147
|
+
const description = String(args.description);
|
|
3148
|
+
const personaType = String(args.persona_type);
|
|
3149
|
+
const rawNodes = args.nodes;
|
|
3150
|
+
const rawResultMappings = args.result_mappings;
|
|
3151
|
+
if (!["voice", "chat", "dashboard"].includes(personaType)) {
|
|
3152
|
+
throw new Error(`Invalid persona_type: ${personaType}. Must be one of: voice, chat, dashboard`);
|
|
3153
|
+
}
|
|
3154
|
+
if (!rawNodes || rawNodes.length === 0) {
|
|
3155
|
+
throw new Error("At least one node is required");
|
|
3156
|
+
}
|
|
3157
|
+
if (!rawResultMappings || rawResultMappings.length === 0) {
|
|
3158
|
+
throw new Error("At least one result_mapping is required to connect outputs to WORKFLOW_OUTPUT");
|
|
3159
|
+
}
|
|
3160
|
+
// Convert raw input spec to internal Node format
|
|
3161
|
+
const nodes = rawNodes.map((rawNode) => {
|
|
3162
|
+
const node = {
|
|
3163
|
+
id: rawNode.id,
|
|
3164
|
+
actionType: rawNode.action_type,
|
|
3165
|
+
displayName: rawNode.display_name,
|
|
3166
|
+
description: rawNode.description,
|
|
3167
|
+
disableHitl: rawNode.disable_hitl,
|
|
3168
|
+
};
|
|
3169
|
+
// Convert inputs
|
|
3170
|
+
if (rawNode.inputs) {
|
|
3171
|
+
node.inputs = {};
|
|
3172
|
+
for (const [key, rawBinding] of Object.entries(rawNode.inputs)) {
|
|
3173
|
+
const binding = {
|
|
3174
|
+
type: rawBinding.type,
|
|
3175
|
+
actionName: rawBinding.action_name,
|
|
3176
|
+
output: rawBinding.output,
|
|
3177
|
+
value: rawBinding.value,
|
|
3178
|
+
widgetName: rawBinding.widget_name,
|
|
3179
|
+
};
|
|
3180
|
+
node.inputs[key] = binding;
|
|
3181
|
+
}
|
|
3182
|
+
}
|
|
3183
|
+
// Convert run_if condition
|
|
3184
|
+
if (rawNode.run_if) {
|
|
3185
|
+
node.runIf = {
|
|
3186
|
+
sourceAction: rawNode.run_if.source_action,
|
|
3187
|
+
sourceOutput: rawNode.run_if.source_output,
|
|
3188
|
+
operator: rawNode.run_if.operator,
|
|
3189
|
+
value: rawNode.run_if.value,
|
|
3190
|
+
};
|
|
3191
|
+
}
|
|
3192
|
+
// Convert categories
|
|
3193
|
+
if (rawNode.categories) {
|
|
3194
|
+
node.categories = rawNode.categories.map((cat) => ({
|
|
3195
|
+
name: cat.name,
|
|
3196
|
+
description: cat.description,
|
|
3197
|
+
examples: cat.examples,
|
|
3198
|
+
}));
|
|
3199
|
+
}
|
|
3200
|
+
// Convert tools
|
|
3201
|
+
if (rawNode.tools) {
|
|
3202
|
+
node.tools = rawNode.tools.map((tool) => ({
|
|
3203
|
+
name: tool.name,
|
|
3204
|
+
namespace: tool.namespace,
|
|
3205
|
+
}));
|
|
3206
|
+
}
|
|
3207
|
+
return node;
|
|
3208
|
+
});
|
|
3209
|
+
// Convert result mappings
|
|
3210
|
+
const resultMappings = rawResultMappings.map((rm) => ({
|
|
3211
|
+
nodeId: rm.node_id,
|
|
3212
|
+
output: rm.output,
|
|
3213
|
+
}));
|
|
3214
|
+
// Build the WorkflowSpec
|
|
3215
|
+
const spec = {
|
|
3216
|
+
name,
|
|
3217
|
+
description,
|
|
3218
|
+
personaType,
|
|
3219
|
+
nodes,
|
|
3220
|
+
resultMappings,
|
|
3221
|
+
};
|
|
3222
|
+
// Compile the workflow
|
|
3223
|
+
const result = compileWorkflow(spec);
|
|
3224
|
+
return {
|
|
3225
|
+
success: true,
|
|
3226
|
+
message: `Compiled workflow for ${personaType} AI Employee "${name}" with ${nodes.length} nodes`,
|
|
3227
|
+
workflow_def: result.workflow_def,
|
|
3228
|
+
proto_config: result.proto_config,
|
|
3229
|
+
nodes_compiled: nodes.map((n) => ({ id: n.id, action: n.actionType, display_name: n.displayName })),
|
|
3230
|
+
result_mappings: resultMappings,
|
|
3231
|
+
usage: 'Deploy with: workflow(mode="deploy", persona_id="<persona_id>", workflow_def=<workflow_def>, proto_config=<proto_config>)',
|
|
3232
|
+
};
|
|
3233
|
+
},
|
|
3234
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3235
|
+
// Data Source / Embedding Management
|
|
3236
|
+
// TODO: Add support for 3rd party data sources (Google Drive, SharePoint, Confluence, etc.)
|
|
3237
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3238
|
+
upload_data_source: async (args) => {
|
|
3239
|
+
const client = createClient(args.env);
|
|
3240
|
+
const personaId = String(args.persona_id);
|
|
3241
|
+
const filePath = String(args.file_path);
|
|
3242
|
+
const tags = args.tags ? String(args.tags) : undefined;
|
|
3243
|
+
// Read the file from filesystem
|
|
3244
|
+
const fs = await import("fs/promises");
|
|
3245
|
+
const path = await import("path");
|
|
3246
|
+
// Check if file exists
|
|
3247
|
+
try {
|
|
3248
|
+
await fs.access(filePath);
|
|
3249
|
+
}
|
|
3250
|
+
catch {
|
|
3251
|
+
throw new Error(`File not found: ${filePath}`);
|
|
3252
|
+
}
|
|
3253
|
+
const fileContent = await fs.readFile(filePath);
|
|
3254
|
+
const filename = path.basename(filePath);
|
|
3255
|
+
// Upload the file
|
|
3256
|
+
const result = await client.uploadDataSource(personaId, fileContent, filename, { tags });
|
|
3257
|
+
// Get the persona to update status_log
|
|
3258
|
+
const persona = await client.getPersonaById(personaId);
|
|
3259
|
+
if (persona) {
|
|
3260
|
+
const existingStatusLog = persona.status_log ?? {};
|
|
3261
|
+
const fileUploadLog = existingStatusLog.fileUpload ?? [];
|
|
3262
|
+
// Add new file to status log
|
|
3263
|
+
const updatedStatusLog = {
|
|
3264
|
+
...existingStatusLog,
|
|
3265
|
+
fileUpload: [
|
|
3266
|
+
...fileUploadLog,
|
|
3267
|
+
{
|
|
3268
|
+
id: result.fileId,
|
|
3269
|
+
status: "success",
|
|
3270
|
+
filename: result.filename,
|
|
3271
|
+
main_comment: "File ingested successfully.",
|
|
3272
|
+
sub_comments: [],
|
|
3273
|
+
},
|
|
3274
|
+
],
|
|
3275
|
+
};
|
|
3276
|
+
// Update persona with new status_log
|
|
3277
|
+
await client.updateAiEmployee({
|
|
3278
|
+
persona_id: personaId,
|
|
3279
|
+
proto_config: persona.proto_config ?? {},
|
|
3280
|
+
status_log: updatedStatusLog,
|
|
3281
|
+
});
|
|
3282
|
+
}
|
|
3283
|
+
return {
|
|
3284
|
+
environment: client["env"].name,
|
|
3285
|
+
success: true,
|
|
3286
|
+
persona_id: personaId,
|
|
3287
|
+
file_id: result.fileId,
|
|
3288
|
+
filename: result.filename,
|
|
3289
|
+
status: result.status,
|
|
3290
|
+
message: `File "${filename}" uploaded successfully`,
|
|
3291
|
+
next_steps: [
|
|
3292
|
+
'Enable RAG: knowledge(persona_id="<persona_id>", mode="toggle", enabled=true)',
|
|
3293
|
+
'Verify upload: knowledge(persona_id="<persona_id>", mode="list")',
|
|
3294
|
+
],
|
|
3295
|
+
};
|
|
3296
|
+
},
|
|
3297
|
+
delete_data_source: async (args) => {
|
|
3298
|
+
const client = createClient(args.env);
|
|
3299
|
+
const personaId = String(args.persona_id);
|
|
3300
|
+
const fileId = String(args.file_id);
|
|
3301
|
+
const result = await client.deleteDataSource(personaId, fileId);
|
|
3302
|
+
return {
|
|
3303
|
+
environment: client["env"].name,
|
|
3304
|
+
success: result.success,
|
|
3305
|
+
persona_id: personaId,
|
|
3306
|
+
file_id: fileId,
|
|
3307
|
+
message: `Data source file deleted`,
|
|
3308
|
+
};
|
|
3309
|
+
},
|
|
3310
|
+
list_data_sources: async (args) => {
|
|
3311
|
+
const client = createClient(args.env);
|
|
3312
|
+
const personaId = String(args.persona_id);
|
|
3313
|
+
// Get the persona to check its configuration
|
|
3314
|
+
const persona = await client.getPersonaById(personaId);
|
|
3315
|
+
if (!persona) {
|
|
3316
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
3317
|
+
}
|
|
3318
|
+
// Extract data source info from proto_config widgets
|
|
3319
|
+
const protoConfig = persona.proto_config;
|
|
3320
|
+
const widgets = (protoConfig?.widgets ?? []);
|
|
3321
|
+
// Find fileUpload widget configuration (use correct field names)
|
|
3322
|
+
const fileUploadWidget = widgets.find((w) => w.name === "fileUpload" || w.type === "FILE_UPLOAD" || w.type === 3);
|
|
3323
|
+
// Extract file count from widget config
|
|
3324
|
+
const fileUploadConfig = fileUploadWidget?.fileUpload;
|
|
3325
|
+
const localFiles = fileUploadConfig?.localFiles;
|
|
3326
|
+
const fileCount = localFiles?.[0]?.numFiles ? parseInt(localFiles[0].numFiles, 10) : 0;
|
|
3327
|
+
// Get status log for upload history
|
|
3328
|
+
const statusLog = persona.status_log;
|
|
3329
|
+
const fileUploadLog = statusLog?.fileUpload ?? [];
|
|
3330
|
+
// Try to get actual file list from API
|
|
3331
|
+
const files = await client.listDataSourceFiles(personaId);
|
|
3332
|
+
return {
|
|
3333
|
+
environment: client["env"].name,
|
|
3334
|
+
persona_id: personaId,
|
|
3335
|
+
persona_name: persona.name,
|
|
3336
|
+
embedding_enabled: persona.embedding_enabled ?? false,
|
|
3337
|
+
file_count: fileCount,
|
|
3338
|
+
files: files.length > 0 ? files : fileUploadLog,
|
|
3339
|
+
use_chunking: fileUploadConfig?.useChunking ?? false,
|
|
3340
|
+
workflow_id: persona.workflow_id,
|
|
3341
|
+
actions: [
|
|
3342
|
+
'Upload: knowledge(persona_id="<persona_id>", mode="upload", file="/path/to/file.pdf")',
|
|
3343
|
+
'Delete: knowledge(persona_id="<persona_id>", mode="delete", file_id="<file_id>")',
|
|
3344
|
+
'Toggle embedding: knowledge(persona_id="<persona_id>", mode="toggle", enabled=true|false)',
|
|
3345
|
+
],
|
|
3346
|
+
};
|
|
3347
|
+
},
|
|
3348
|
+
get_embedding_status: async (args) => {
|
|
3349
|
+
const client = createClient(args.env);
|
|
3350
|
+
const personaId = String(args.persona_id);
|
|
3351
|
+
const persona = await client.getPersonaById(personaId);
|
|
3352
|
+
if (!persona) {
|
|
3353
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
3354
|
+
}
|
|
3355
|
+
// Extract RAG-related configuration
|
|
3356
|
+
const protoConfig = persona.proto_config;
|
|
3357
|
+
const widgets = (protoConfig?.widgets ?? []);
|
|
3358
|
+
const fileUploadWidget = widgets.find((w) => w.name === "fileUpload" || w.type === "FILE_UPLOAD" || w.type === 3);
|
|
3359
|
+
const fileUploadConfig = fileUploadWidget?.fileUpload;
|
|
3360
|
+
return {
|
|
3361
|
+
environment: client["env"].name,
|
|
3362
|
+
persona_id: personaId,
|
|
3363
|
+
persona_name: persona.name,
|
|
3364
|
+
embedding_enabled: persona.embedding_enabled ?? false,
|
|
3365
|
+
file_upload_configured: !!fileUploadWidget,
|
|
3366
|
+
use_chunking: fileUploadConfig?.useChunking ?? false,
|
|
3367
|
+
note: persona.embedding_enabled
|
|
3368
|
+
? "Embedding/RAG is enabled - uploaded documents will be indexed for search"
|
|
3369
|
+
: "Embedding/RAG is disabled - documents will not be indexed",
|
|
3370
|
+
};
|
|
3371
|
+
},
|
|
3372
|
+
toggle_embedding: async (args) => {
|
|
3373
|
+
const client = createClient(args.env);
|
|
3374
|
+
const personaId = String(args.persona_id);
|
|
3375
|
+
const enabled = Boolean(args.enabled);
|
|
3376
|
+
// Get existing persona to preserve other fields
|
|
3377
|
+
const persona = await client.getPersonaById(personaId);
|
|
3378
|
+
if (!persona) {
|
|
3379
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
3380
|
+
}
|
|
3381
|
+
// Update embedding status
|
|
3382
|
+
await client.updateAiEmployee({
|
|
3383
|
+
persona_id: personaId,
|
|
3384
|
+
proto_config: persona.proto_config ?? {},
|
|
3385
|
+
embedding_enabled: enabled,
|
|
3386
|
+
});
|
|
3387
|
+
return {
|
|
3388
|
+
environment: client["env"].name,
|
|
3389
|
+
success: true,
|
|
3390
|
+
persona_id: personaId,
|
|
3391
|
+
persona_name: persona.name,
|
|
3392
|
+
embedding_enabled: enabled,
|
|
3393
|
+
message: enabled
|
|
3394
|
+
? "Embedding/RAG enabled - uploaded documents will be indexed for semantic search"
|
|
3395
|
+
: "Embedding/RAG disabled - documents will not be indexed",
|
|
3396
|
+
next_steps: enabled
|
|
3397
|
+
? ['Upload documents: knowledge(persona_id="<persona_id>", mode="upload", file="/path/to/file.pdf")']
|
|
3398
|
+
: [],
|
|
3399
|
+
};
|
|
3400
|
+
},
|
|
3401
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3402
|
+
// Unified Workflow Handler
|
|
3403
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3404
|
+
workflow: async (args) => {
|
|
3405
|
+
const client = createClient(args.env);
|
|
3406
|
+
const personaId = args.persona_id ? String(args.persona_id) : undefined;
|
|
3407
|
+
const mode = args.mode ?? "generate";
|
|
3408
|
+
const useAutobuilder = args.use_autobuilder;
|
|
3409
|
+
const autoDeploy = Boolean(args.auto_deploy);
|
|
3410
|
+
const autoFix = args.auto_fix !== false; // Default true
|
|
3411
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3412
|
+
// MODE: IMPROVE - Improve existing workflow
|
|
3413
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3414
|
+
if (mode === "improve" && personaId) {
|
|
3415
|
+
const persona = await client.getPersonaById(personaId);
|
|
3416
|
+
if (!persona)
|
|
3417
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
3418
|
+
const currentWorkflow = persona.workflow_def;
|
|
3419
|
+
if (!currentWorkflow)
|
|
3420
|
+
throw new Error(`AI Employee "${persona.name}" has no workflow`);
|
|
3421
|
+
const issues = detectWorkflowIssues(currentWorkflow);
|
|
3422
|
+
if (issues.length === 0) {
|
|
3423
|
+
return {
|
|
3424
|
+
environment: client["env"].name,
|
|
3425
|
+
success: true,
|
|
3426
|
+
status: "no_issues",
|
|
3427
|
+
persona_id: personaId,
|
|
3428
|
+
persona_name: persona.name,
|
|
3429
|
+
message: "Workflow has no detected issues",
|
|
3430
|
+
};
|
|
3431
|
+
}
|
|
3432
|
+
// Try auto-fix first
|
|
3433
|
+
if (autoFix) {
|
|
3434
|
+
const fixResult = applyWorkflowFixes(currentWorkflow, issues, persona);
|
|
3435
|
+
if (fixResult.appliedFixes.some((f) => f.applied)) {
|
|
3436
|
+
const newIssues = detectWorkflowIssues(fixResult.fixedWorkflow);
|
|
3437
|
+
if (autoDeploy) {
|
|
3438
|
+
await client.updateAiEmployee({
|
|
3439
|
+
persona_id: personaId,
|
|
3440
|
+
proto_config: persona.proto_config ?? {},
|
|
3441
|
+
workflow: fixResult.fixedWorkflow,
|
|
3442
|
+
});
|
|
3443
|
+
}
|
|
3444
|
+
return {
|
|
3445
|
+
environment: client["env"].name,
|
|
3446
|
+
success: true,
|
|
3447
|
+
status: autoDeploy ? "deployed" : "improved",
|
|
3448
|
+
persona_id: personaId,
|
|
3449
|
+
persona_name: persona.name,
|
|
3450
|
+
fixes_applied: fixResult.appliedFixes.filter((f) => f.applied).map((f) => f.description),
|
|
3451
|
+
issues_before: issues.length,
|
|
3452
|
+
issues_after: newIssues.length,
|
|
3453
|
+
deployed: autoDeploy,
|
|
3454
|
+
workflow_def: autoDeploy ? undefined : fixResult.fixedWorkflow,
|
|
3455
|
+
};
|
|
3456
|
+
}
|
|
3457
|
+
}
|
|
3458
|
+
return {
|
|
3459
|
+
environment: client["env"].name,
|
|
3460
|
+
success: false,
|
|
3461
|
+
status: "needs_manual_fix",
|
|
3462
|
+
persona_id: personaId,
|
|
3463
|
+
issues,
|
|
3464
|
+
};
|
|
3465
|
+
}
|
|
3466
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3467
|
+
// MODE: ANALYZE - Analyze only (no generation)
|
|
3468
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3469
|
+
if (mode === "analyze" && personaId) {
|
|
3470
|
+
const persona = await client.getPersonaById(personaId);
|
|
3471
|
+
if (!persona)
|
|
3472
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
3473
|
+
const workflowDef = persona.workflow_def;
|
|
3474
|
+
if (!workflowDef) {
|
|
3475
|
+
return {
|
|
3476
|
+
environment: client["env"].name,
|
|
3477
|
+
success: true,
|
|
3478
|
+
status: "no_workflow",
|
|
3479
|
+
persona_id: personaId,
|
|
3480
|
+
persona_name: persona.name,
|
|
3481
|
+
};
|
|
3482
|
+
}
|
|
3483
|
+
const issues = detectWorkflowIssues(workflowDef);
|
|
3484
|
+
return {
|
|
3485
|
+
environment: client["env"].name,
|
|
3486
|
+
success: true,
|
|
3487
|
+
status: "analyzed",
|
|
3488
|
+
persona_id: personaId,
|
|
3489
|
+
persona_name: persona.name,
|
|
3490
|
+
issues,
|
|
3491
|
+
summary: {
|
|
3492
|
+
critical: issues.filter((i) => i.severity === "critical").length,
|
|
3493
|
+
warnings: issues.filter((i) => i.severity === "warning").length,
|
|
3494
|
+
},
|
|
3495
|
+
};
|
|
3496
|
+
}
|
|
3497
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3498
|
+
// MODE: GENERATE - Generate new workflow from input
|
|
3499
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3500
|
+
const input = args.input;
|
|
3501
|
+
if (!input && !personaId) {
|
|
3502
|
+
return {
|
|
3503
|
+
success: false,
|
|
3504
|
+
status: "missing_input",
|
|
3505
|
+
message: "Provide 'input' (description or spec) or 'persona_id' with mode='improve'",
|
|
3506
|
+
};
|
|
3507
|
+
}
|
|
3508
|
+
// Parse and normalize input
|
|
3509
|
+
const parseResult = parseInput(input);
|
|
3510
|
+
const { intent, input_type, validation } = parseResult;
|
|
3511
|
+
// Override persona_type if provided
|
|
3512
|
+
if (args.persona_type) {
|
|
3513
|
+
intent.persona_type = args.persona_type;
|
|
3514
|
+
}
|
|
3515
|
+
// If validation incomplete, return questions
|
|
3516
|
+
if (!validation.complete) {
|
|
3517
|
+
return {
|
|
3518
|
+
environment: client["env"].name,
|
|
3519
|
+
success: true,
|
|
3520
|
+
status: "needs_input",
|
|
3521
|
+
input_type,
|
|
3522
|
+
parsed_intent: intent,
|
|
3523
|
+
validation,
|
|
3524
|
+
message: "Need more information to generate workflow",
|
|
3525
|
+
questions: validation.questions,
|
|
3526
|
+
suggestions: validation.suggestions,
|
|
3527
|
+
};
|
|
3528
|
+
}
|
|
3529
|
+
// Decide: compile locally vs. use Auto Builder
|
|
3530
|
+
const isSimple = !intent.intents || intent.intents.length <= 2;
|
|
3531
|
+
const shouldUseAutobuilder = useAutobuilder ?? !isSimple;
|
|
3532
|
+
let workflowDef;
|
|
3533
|
+
let generationMethod;
|
|
3534
|
+
if (shouldUseAutobuilder) {
|
|
3535
|
+
// Use Auto Builder
|
|
3536
|
+
const prompt = generateAutobuilderPrompt(intent.description, intent.persona_type);
|
|
3537
|
+
const autobuilderId = await client.findAutobuilderPersona();
|
|
3538
|
+
const conv = await client.createAutobuilderConversation(autobuilderId);
|
|
3539
|
+
const result = await client.chatWithAutobuilder(conv.conversation_id, prompt);
|
|
3540
|
+
if (!result.workflow_def) {
|
|
3541
|
+
return {
|
|
3542
|
+
environment: client["env"].name,
|
|
3543
|
+
success: false,
|
|
3544
|
+
status: "generation_failed",
|
|
3545
|
+
message: "Auto Builder could not generate workflow. Try being more specific.",
|
|
3546
|
+
parsed_intent: intent,
|
|
3547
|
+
autobuilder_response: result.response,
|
|
3548
|
+
};
|
|
3549
|
+
}
|
|
3550
|
+
workflowDef = result.workflow_def;
|
|
3551
|
+
generationMethod = "autobuilder";
|
|
3552
|
+
}
|
|
3553
|
+
else {
|
|
3554
|
+
// Compile locally from intent
|
|
3555
|
+
const spec = intentToSpec(intent);
|
|
3556
|
+
const compiled = compileWorkflow(spec);
|
|
3557
|
+
workflowDef = compiled.workflow_def;
|
|
3558
|
+
generationMethod = "local_compile";
|
|
3559
|
+
}
|
|
3560
|
+
// Validate generated workflow
|
|
3561
|
+
const issues = detectWorkflowIssues(workflowDef);
|
|
3562
|
+
let fixesApplied = [];
|
|
3563
|
+
// Auto-fix if needed
|
|
3564
|
+
if (issues.length > 0 && autoFix) {
|
|
3565
|
+
const mockPersona = { id: personaId ?? "new", name: intent.name };
|
|
3566
|
+
const fixResult = applyWorkflowFixes(workflowDef, issues, mockPersona);
|
|
3567
|
+
if (fixResult.appliedFixes.some((f) => f.applied)) {
|
|
3568
|
+
workflowDef = fixResult.fixedWorkflow;
|
|
3569
|
+
fixesApplied = fixResult.appliedFixes.filter((f) => f.applied).map((f) => f.description);
|
|
3570
|
+
}
|
|
3571
|
+
}
|
|
3572
|
+
const finalIssues = detectWorkflowIssues(workflowDef);
|
|
3573
|
+
// Deploy if requested
|
|
3574
|
+
let deployed = false;
|
|
3575
|
+
if (personaId && autoDeploy) {
|
|
3576
|
+
const persona = await client.getPersonaById(personaId);
|
|
3577
|
+
if (persona) {
|
|
3578
|
+
await client.updateAiEmployee({
|
|
3579
|
+
persona_id: personaId,
|
|
3580
|
+
proto_config: persona.proto_config ?? {},
|
|
3581
|
+
workflow: workflowDef,
|
|
3582
|
+
});
|
|
3583
|
+
deployed = true;
|
|
3584
|
+
}
|
|
3585
|
+
}
|
|
3586
|
+
return {
|
|
3587
|
+
environment: client["env"].name,
|
|
3588
|
+
success: true,
|
|
3589
|
+
status: deployed ? "deployed" : "generated",
|
|
3590
|
+
input_type,
|
|
3591
|
+
generation_method: generationMethod,
|
|
3592
|
+
parsed_intent: intent,
|
|
3593
|
+
validation: {
|
|
3594
|
+
confidence: validation.confidence,
|
|
3595
|
+
suggestions: validation.suggestions,
|
|
3596
|
+
},
|
|
3597
|
+
issues_found: issues.length,
|
|
3598
|
+
auto_fixes: fixesApplied,
|
|
3599
|
+
remaining_issues: finalIssues.length,
|
|
3600
|
+
workflow_def: deployed ? undefined : workflowDef,
|
|
3601
|
+
deployed,
|
|
3602
|
+
persona_id: personaId,
|
|
3603
|
+
next_steps: deployed
|
|
3604
|
+
? [`Verify: get_persona("${personaId}")`]
|
|
3605
|
+
: personaId
|
|
3606
|
+
? [`Deploy: deploy_workflow("${personaId}", workflow_def)`]
|
|
3607
|
+
: ["Create AI Employee, then deploy workflow"],
|
|
3608
|
+
};
|
|
3609
|
+
},
|
|
3610
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3611
|
+
// Demo Data Management Handlers
|
|
3612
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
3613
|
+
consolidate_demo_data: async (args) => {
|
|
3614
|
+
const fs = await import("fs");
|
|
3615
|
+
const path = await import("path");
|
|
3616
|
+
const sourceDir = String(args.source_dir);
|
|
3617
|
+
const outputDir = String(args.output_dir);
|
|
3618
|
+
const entityType = String(args.entity_type);
|
|
3619
|
+
const primaryFile = String(args.primary_file);
|
|
3620
|
+
const joins = (args.joins ?? []);
|
|
3621
|
+
const idField = String(args.id_field ?? "id");
|
|
3622
|
+
const nameField = String(args.name_field ?? "name");
|
|
3623
|
+
const tags = args.tags ? String(args.tags) : entityType;
|
|
3624
|
+
// Ensure directories exist
|
|
3625
|
+
if (!fs.existsSync(sourceDir)) {
|
|
3626
|
+
throw new Error(`Source directory not found: ${sourceDir}`);
|
|
3627
|
+
}
|
|
3628
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
3629
|
+
// Load primary data
|
|
3630
|
+
const primaryPath = path.join(sourceDir, primaryFile);
|
|
3631
|
+
if (!fs.existsSync(primaryPath)) {
|
|
3632
|
+
throw new Error(`Primary file not found: ${primaryPath}`);
|
|
3633
|
+
}
|
|
3634
|
+
const primaryData = JSON.parse(fs.readFileSync(primaryPath, "utf-8"));
|
|
3635
|
+
// Load join data
|
|
3636
|
+
const joinData = {};
|
|
3637
|
+
for (const join of joins) {
|
|
3638
|
+
const joinPath = path.join(sourceDir, join.file);
|
|
3639
|
+
if (fs.existsSync(joinPath)) {
|
|
3640
|
+
joinData[join.as] = JSON.parse(fs.readFileSync(joinPath, "utf-8"));
|
|
3641
|
+
}
|
|
3642
|
+
}
|
|
3643
|
+
// Generate documents
|
|
3644
|
+
const generatedFiles = [];
|
|
3645
|
+
for (const entity of primaryData) {
|
|
3646
|
+
const entityId = String(entity[idField] ?? "unknown");
|
|
3647
|
+
const entityName = String(entity[nameField] ?? entityId);
|
|
3648
|
+
// Collect related data
|
|
3649
|
+
const related = {};
|
|
3650
|
+
for (const join of joins) {
|
|
3651
|
+
const data = joinData[join.as] ?? [];
|
|
3652
|
+
related[join.as] = data.filter((item) => item[join.on] === entityId);
|
|
3653
|
+
}
|
|
3654
|
+
// Generate document content
|
|
3655
|
+
const content = generateEntityDocument(entityType, entity, related, tags);
|
|
3656
|
+
// Write file
|
|
3657
|
+
const sanitizedName = entityName.toLowerCase().replace(/[^a-z0-9]+/g, "_");
|
|
3658
|
+
const sanitizedId = entityId.toLowerCase().replace(/[^a-z0-9]+/g, "_");
|
|
3659
|
+
const filename = `${entityType}_${sanitizedName}_${sanitizedId}.md`;
|
|
3660
|
+
const filePath = path.join(outputDir, filename);
|
|
3661
|
+
fs.writeFileSync(filePath, content);
|
|
3662
|
+
generatedFiles.push(filename);
|
|
3663
|
+
}
|
|
3664
|
+
return {
|
|
3665
|
+
success: true,
|
|
3666
|
+
source_dir: sourceDir,
|
|
3667
|
+
output_dir: outputDir,
|
|
3668
|
+
entity_type: entityType,
|
|
3669
|
+
entities_processed: primaryData.length,
|
|
3670
|
+
files_generated: generatedFiles,
|
|
3671
|
+
next_steps: [
|
|
3672
|
+
`Upload: for each file in ${outputDir}, call knowledge(persona_id="<persona_id>", mode="upload", file="/full/path/to/file.md")`,
|
|
3673
|
+
'Enable RAG: knowledge(persona_id="<persona_id>", mode="toggle", enabled=true)',
|
|
3674
|
+
],
|
|
3675
|
+
};
|
|
3676
|
+
},
|
|
3677
|
+
generate_demo_document: async (args) => {
|
|
3678
|
+
const fs = await import("fs");
|
|
3679
|
+
const entityType = String(args.entity_type);
|
|
3680
|
+
const data = args.data;
|
|
3681
|
+
const relatedData = (args.related_data ?? {});
|
|
3682
|
+
const outputPath = args.output_path ? String(args.output_path) : undefined;
|
|
3683
|
+
const tags = args.tags ? String(args.tags) : entityType;
|
|
3684
|
+
const content = generateEntityDocument(entityType, data, relatedData, tags);
|
|
3685
|
+
if (outputPath) {
|
|
3686
|
+
fs.writeFileSync(outputPath, content);
|
|
3687
|
+
return {
|
|
3688
|
+
success: true,
|
|
3689
|
+
output_path: outputPath,
|
|
3690
|
+
message: "Document generated and saved",
|
|
3691
|
+
};
|
|
3692
|
+
}
|
|
3693
|
+
return {
|
|
3694
|
+
success: true,
|
|
3695
|
+
content,
|
|
3696
|
+
message: "Document generated (not saved - no output_path provided)",
|
|
3697
|
+
};
|
|
3698
|
+
},
|
|
3699
|
+
validate_demo_document: async (args) => {
|
|
3700
|
+
const fs = await import("fs");
|
|
3701
|
+
let content;
|
|
3702
|
+
if (args.file_path) {
|
|
3703
|
+
if (!fs.existsSync(String(args.file_path))) {
|
|
3704
|
+
throw new Error(`File not found: ${args.file_path}`);
|
|
3705
|
+
}
|
|
3706
|
+
content = fs.readFileSync(String(args.file_path), "utf-8");
|
|
3707
|
+
}
|
|
3708
|
+
else if (args.content) {
|
|
3709
|
+
content = String(args.content);
|
|
3710
|
+
}
|
|
3711
|
+
else {
|
|
3712
|
+
throw new Error("Either file_path or content must be provided");
|
|
3713
|
+
}
|
|
3714
|
+
const issues = [];
|
|
3715
|
+
const suggestions = [];
|
|
3716
|
+
// Check for metadata comments
|
|
3717
|
+
if (!content.includes("<!-- ema_entity:")) {
|
|
3718
|
+
issues.push({ severity: "warning", message: "Missing ema_entity metadata comment" });
|
|
3719
|
+
suggestions.push("Add: <!-- ema_entity: customer | product | employee | scenario -->");
|
|
3720
|
+
}
|
|
3721
|
+
if (!content.includes("<!-- ema_id:")) {
|
|
3722
|
+
issues.push({ severity: "warning", message: "Missing ema_id metadata comment" });
|
|
3723
|
+
suggestions.push("Add: <!-- ema_id: unique-identifier -->");
|
|
3724
|
+
}
|
|
3725
|
+
if (!content.includes("<!-- ema_tags:")) {
|
|
3726
|
+
issues.push({ severity: "info", message: "Missing ema_tags metadata comment" });
|
|
3727
|
+
suggestions.push("Add: <!-- ema_tags: comma, separated, tags -->");
|
|
3728
|
+
}
|
|
3729
|
+
// Check for title
|
|
3730
|
+
if (!content.match(/^# /m)) {
|
|
3731
|
+
issues.push({ severity: "error", message: "Missing document title (# heading)" });
|
|
3732
|
+
}
|
|
3733
|
+
// Check for tables
|
|
3734
|
+
const hasTables = content.includes("|") && content.includes("---");
|
|
3735
|
+
if (!hasTables) {
|
|
3736
|
+
issues.push({ severity: "info", message: "No tables detected - consider adding structured data in table format" });
|
|
3737
|
+
}
|
|
3738
|
+
// Check for narrative content
|
|
3739
|
+
const paragraphCount = content.split(/\n\n/).filter((p) => p.trim().length > 100).length;
|
|
3740
|
+
if (paragraphCount < 1) {
|
|
3741
|
+
issues.push({ severity: "info", message: "Limited narrative content - consider adding relationship summaries for better semantic search" });
|
|
3742
|
+
}
|
|
3743
|
+
// Check filename convention (if file_path provided)
|
|
3744
|
+
if (args.file_path) {
|
|
3745
|
+
const filename = String(args.file_path).split("/").pop() ?? "";
|
|
3746
|
+
if (!filename.match(/^[a-z]+_[a-z0-9_]+\.md$/)) {
|
|
3747
|
+
issues.push({ severity: "info", message: `Filename '${filename}' doesn't follow convention: {entity}_{name}_{id}.md` });
|
|
3748
|
+
}
|
|
3749
|
+
}
|
|
3750
|
+
const criticalCount = issues.filter((i) => i.severity === "error").length;
|
|
3751
|
+
const warningCount = issues.filter((i) => i.severity === "warning").length;
|
|
3752
|
+
return {
|
|
3753
|
+
valid: criticalCount === 0,
|
|
3754
|
+
issues,
|
|
3755
|
+
suggestions,
|
|
3756
|
+
summary: {
|
|
3757
|
+
critical: criticalCount,
|
|
3758
|
+
warnings: warningCount,
|
|
3759
|
+
info: issues.filter((i) => i.severity === "info").length,
|
|
3760
|
+
},
|
|
3761
|
+
recommendation: criticalCount > 0
|
|
3762
|
+
? "Fix critical issues before uploading"
|
|
3763
|
+
: warningCount > 0
|
|
3764
|
+
? "Document is usable but consider addressing warnings"
|
|
3765
|
+
: "Document follows best practices",
|
|
3766
|
+
};
|
|
3767
|
+
},
|
|
3768
|
+
get_demo_data_template: async (args) => {
|
|
3769
|
+
const entityType = String(args.entity_type);
|
|
3770
|
+
const includeExample = args.include_example !== false;
|
|
3771
|
+
const templates = {
|
|
3772
|
+
customer: {
|
|
3773
|
+
sourceSchema: {
|
|
3774
|
+
id: "string (required) - Unique customer identifier",
|
|
3775
|
+
name: "string (required) - Company/customer name",
|
|
3776
|
+
industry: "string - Industry classification",
|
|
3777
|
+
tier: "string - Account tier (Enterprise, Mid-Market, SMB)",
|
|
3778
|
+
accountManager: "string - Assigned account manager",
|
|
3779
|
+
region: "string - Geographic region",
|
|
3780
|
+
annualRevenue: "number - Annual revenue",
|
|
3781
|
+
employeeCount: "string - Employee count range",
|
|
3782
|
+
contacts: "array - Key contacts [{name, role, email, phone, primary}]",
|
|
3783
|
+
},
|
|
3784
|
+
joinSuggestions: [
|
|
3785
|
+
{ file: "orders.json", on: "customerId", as: "orders" },
|
|
3786
|
+
{ file: "tickets.json", on: "customerId", as: "tickets" },
|
|
3787
|
+
{ file: "notes.json", on: "customerId", as: "notes" },
|
|
3788
|
+
],
|
|
3789
|
+
outputTemplate: `# Customer: {name}
|
|
3790
|
+
|
|
3791
|
+
## Metadata
|
|
3792
|
+
<!-- ema_entity: customer -->
|
|
3793
|
+
<!-- ema_id: {id} -->
|
|
3794
|
+
<!-- ema_tags: {tier}, {industry} -->
|
|
3795
|
+
|
|
3796
|
+
## Overview
|
|
3797
|
+
| Field | Value |
|
|
3798
|
+
|-------|-------|
|
|
3799
|
+
| Customer ID | {id} |
|
|
3800
|
+
| Company Name | {name} |
|
|
3801
|
+
| Industry | {industry} |
|
|
3802
|
+
| Account Tier | {tier} |
|
|
3803
|
+
|
|
3804
|
+
## Relationship Summary
|
|
3805
|
+
[Narrative describing customer relationship, history, and status]
|
|
3806
|
+
|
|
3807
|
+
## Orders
|
|
3808
|
+
| Order ID | Date | Amount | Status |
|
|
3809
|
+
|----------|------|--------|--------|
|
|
3810
|
+
[Order rows]
|
|
3811
|
+
|
|
3812
|
+
## Support Tickets
|
|
3813
|
+
| Ticket ID | Subject | Priority | Status |
|
|
3814
|
+
|-----------|---------|----------|--------|
|
|
3815
|
+
[Ticket rows]
|
|
3816
|
+
|
|
3817
|
+
## Key Contacts
|
|
3818
|
+
| Name | Role | Email | Primary |
|
|
3819
|
+
|------|------|-------|---------|
|
|
3820
|
+
[Contact rows]
|
|
3821
|
+
`,
|
|
3822
|
+
bestPractices: [
|
|
3823
|
+
"Include lifetime value calculation in Overview",
|
|
3824
|
+
"Add relationship summary with narrative context",
|
|
3825
|
+
"Include risk indicators for at-risk customers",
|
|
3826
|
+
"Cross-reference product SKUs in orders",
|
|
3827
|
+
],
|
|
3828
|
+
example: includeExample ? {
|
|
3829
|
+
source: {
|
|
3830
|
+
id: "CUST-001",
|
|
3831
|
+
name: "Acme Corporation",
|
|
3832
|
+
industry: "Manufacturing",
|
|
3833
|
+
tier: "Enterprise",
|
|
3834
|
+
accountManager: "Sarah Johnson",
|
|
3835
|
+
contacts: [
|
|
3836
|
+
{ name: "Lisa Park", role: "IT Director", email: "lisa@acme.com", primary: true },
|
|
3837
|
+
],
|
|
3838
|
+
},
|
|
3839
|
+
related: {
|
|
3840
|
+
orders: [
|
|
3841
|
+
{ id: "ORD-001", customerId: "CUST-001", date: "2024-01-15", amount: 45000, status: "Delivered" },
|
|
3842
|
+
],
|
|
3843
|
+
tickets: [
|
|
3844
|
+
{ id: "TKT-001", customerId: "CUST-001", subject: "API issue", priority: "High", status: "Open" },
|
|
3845
|
+
],
|
|
3846
|
+
},
|
|
3847
|
+
} : undefined,
|
|
3848
|
+
},
|
|
3849
|
+
product: {
|
|
3850
|
+
sourceSchema: {
|
|
3851
|
+
sku: "string (required) - Product SKU",
|
|
3852
|
+
name: "string (required) - Product name",
|
|
3853
|
+
category: "string - Product category",
|
|
3854
|
+
price: "number - Unit price",
|
|
3855
|
+
description: "string - Product description",
|
|
3856
|
+
specifications: "object - Technical specifications",
|
|
3857
|
+
compatibility: "array - Compatible product SKUs",
|
|
3858
|
+
incompatibility: "array - Incompatible product SKUs with reasons",
|
|
3859
|
+
},
|
|
3860
|
+
joinSuggestions: [
|
|
3861
|
+
{ file: "pricing_tiers.json", on: "sku", as: "pricing" },
|
|
3862
|
+
{ file: "customer_purchases.json", on: "sku", as: "customers" },
|
|
3863
|
+
],
|
|
3864
|
+
outputTemplate: `# Product: {name}
|
|
3865
|
+
|
|
3866
|
+
## Metadata
|
|
3867
|
+
<!-- ema_entity: product -->
|
|
3868
|
+
<!-- ema_id: {sku} -->
|
|
3869
|
+
<!-- ema_tags: {category} -->
|
|
3870
|
+
|
|
3871
|
+
## Basic Information
|
|
3872
|
+
| Field | Value |
|
|
3873
|
+
|-------|-------|
|
|
3874
|
+
| SKU | {sku} |
|
|
3875
|
+
| Name | {name} |
|
|
3876
|
+
| Category | {category} |
|
|
3877
|
+
| Price | {price} |
|
|
3878
|
+
|
|
3879
|
+
## Description
|
|
3880
|
+
{description}
|
|
3881
|
+
|
|
3882
|
+
## Specifications
|
|
3883
|
+
[Technical specs table]
|
|
3884
|
+
|
|
3885
|
+
## Compatibility
|
|
3886
|
+
| Compatible With | SKU | Notes |
|
|
3887
|
+
|-----------------|-----|-------|
|
|
3888
|
+
[Compatibility rows]
|
|
3889
|
+
|
|
3890
|
+
## Customer Examples
|
|
3891
|
+
| Customer | Quantity | Use Case |
|
|
3892
|
+
|----------|----------|----------|
|
|
3893
|
+
[Customer rows]
|
|
3894
|
+
`,
|
|
3895
|
+
bestPractices: [
|
|
3896
|
+
"Include compatibility matrix",
|
|
3897
|
+
"Add pricing tiers if applicable",
|
|
3898
|
+
"Cross-reference customer deployments",
|
|
3899
|
+
"Include common use cases",
|
|
3900
|
+
],
|
|
3901
|
+
example: includeExample ? {
|
|
3902
|
+
source: {
|
|
3903
|
+
sku: "EWP-100",
|
|
3904
|
+
name: "Enterprise Widget Pro",
|
|
3905
|
+
category: "Widgets",
|
|
3906
|
+
price: 999,
|
|
3907
|
+
description: "Flagship enterprise widget for mission-critical applications",
|
|
3908
|
+
},
|
|
3909
|
+
} : undefined,
|
|
3910
|
+
},
|
|
3911
|
+
scenario: {
|
|
3912
|
+
sourceSchema: {
|
|
3913
|
+
id: "string (required) - Scenario identifier",
|
|
3914
|
+
name: "string (required) - Scenario name",
|
|
3915
|
+
description: "string - Scenario overview",
|
|
3916
|
+
difficulty: "string - Demo difficulty level",
|
|
3917
|
+
customerContext: "object - Customer data for the scenario",
|
|
3918
|
+
expectedQueries: "array - Queries the user might ask",
|
|
3919
|
+
idealResponse: "string - Example ideal AI response",
|
|
3920
|
+
},
|
|
3921
|
+
joinSuggestions: [],
|
|
3922
|
+
outputTemplate: `# Scenario: {name}
|
|
3923
|
+
|
|
3924
|
+
## Metadata
|
|
3925
|
+
<!-- ema_scenario: {id} -->
|
|
3926
|
+
<!-- ema_tags: demo, {difficulty} -->
|
|
3927
|
+
|
|
3928
|
+
## Overview
|
|
3929
|
+
{description}
|
|
3930
|
+
|
|
3931
|
+
## Customer Context
|
|
3932
|
+
[Customer details for this scenario]
|
|
3933
|
+
|
|
3934
|
+
## Expected Queries
|
|
3935
|
+
- "{query1}"
|
|
3936
|
+
- "{query2}"
|
|
3937
|
+
|
|
3938
|
+
## Ideal Response Pattern
|
|
3939
|
+
[How the AI should respond]
|
|
3940
|
+
`,
|
|
3941
|
+
bestPractices: [
|
|
3942
|
+
"Include all context needed to answer expected queries",
|
|
3943
|
+
"Provide ideal response example",
|
|
3944
|
+
"List potential edge cases",
|
|
3945
|
+
"Include escalation triggers",
|
|
3946
|
+
],
|
|
3947
|
+
example: includeExample ? {
|
|
3948
|
+
source: {
|
|
3949
|
+
id: "renewal_high_value",
|
|
3950
|
+
name: "High-Value Customer Renewal",
|
|
3951
|
+
description: "Customer up for renewal with competitor evaluation active",
|
|
3952
|
+
difficulty: "intermediate",
|
|
3953
|
+
},
|
|
3954
|
+
} : undefined,
|
|
3955
|
+
},
|
|
3956
|
+
employee: {
|
|
3957
|
+
sourceSchema: {
|
|
3958
|
+
id: "string (required) - Employee ID",
|
|
3959
|
+
name: "string (required) - Full name",
|
|
3960
|
+
department: "string - Department",
|
|
3961
|
+
role: "string - Job title",
|
|
3962
|
+
manager: "string - Manager name",
|
|
3963
|
+
email: "string - Email address",
|
|
3964
|
+
location: "string - Office location",
|
|
3965
|
+
startDate: "string - Start date",
|
|
3966
|
+
},
|
|
3967
|
+
joinSuggestions: [
|
|
3968
|
+
{ file: "leave_balances.json", on: "employeeId", as: "leave" },
|
|
3969
|
+
{ file: "benefits.json", on: "employeeId", as: "benefits" },
|
|
3970
|
+
],
|
|
3971
|
+
outputTemplate: `# Employee: {name}
|
|
3972
|
+
|
|
3973
|
+
## Metadata
|
|
3974
|
+
<!-- ema_entity: employee -->
|
|
3975
|
+
<!-- ema_id: {id} -->
|
|
3976
|
+
<!-- ema_tags: {department} -->
|
|
3977
|
+
|
|
3978
|
+
## Profile
|
|
3979
|
+
| Field | Value |
|
|
3980
|
+
|-------|-------|
|
|
3981
|
+
| Employee ID | {id} |
|
|
3982
|
+
| Name | {name} |
|
|
3983
|
+
| Department | {department} |
|
|
3984
|
+
| Role | {role} |
|
|
3985
|
+
| Manager | {manager} |
|
|
3986
|
+
|
|
3987
|
+
## Leave Balances
|
|
3988
|
+
| Type | Available | Used |
|
|
3989
|
+
|------|-----------|------|
|
|
3990
|
+
[Leave rows]
|
|
3991
|
+
|
|
3992
|
+
## Benefits Enrollment
|
|
3993
|
+
| Benefit | Plan | Status |
|
|
3994
|
+
|---------|------|--------|
|
|
3995
|
+
[Benefits rows]
|
|
3996
|
+
`,
|
|
3997
|
+
bestPractices: [
|
|
3998
|
+
"Include manager hierarchy",
|
|
3999
|
+
"Add leave balances if HR use case",
|
|
4000
|
+
"Include benefits enrollment status",
|
|
4001
|
+
"Consider PII handling requirements",
|
|
4002
|
+
],
|
|
4003
|
+
example: includeExample ? {
|
|
4004
|
+
source: {
|
|
4005
|
+
id: "EMP-001",
|
|
4006
|
+
name: "Jane Doe",
|
|
4007
|
+
department: "Engineering",
|
|
4008
|
+
role: "Senior Engineer",
|
|
4009
|
+
manager: "John Smith",
|
|
4010
|
+
},
|
|
4011
|
+
} : undefined,
|
|
4012
|
+
},
|
|
4013
|
+
reference: {
|
|
4014
|
+
sourceSchema: {
|
|
4015
|
+
category: "string (required) - Reference category",
|
|
4016
|
+
title: "string (required) - Document title",
|
|
4017
|
+
content: "string - Main content",
|
|
4018
|
+
lastUpdated: "string - Last update date",
|
|
4019
|
+
},
|
|
4020
|
+
joinSuggestions: [],
|
|
4021
|
+
outputTemplate: `# {title}
|
|
4022
|
+
|
|
4023
|
+
## Metadata
|
|
4024
|
+
<!-- ema_doctype: reference -->
|
|
4025
|
+
<!-- ema_category: {category} -->
|
|
4026
|
+
<!-- ema_last_updated: {lastUpdated} -->
|
|
4027
|
+
|
|
4028
|
+
## Content
|
|
4029
|
+
{content}
|
|
4030
|
+
|
|
4031
|
+
## Related Topics
|
|
4032
|
+
[Cross-references to related documents]
|
|
4033
|
+
`,
|
|
4034
|
+
bestPractices: [
|
|
4035
|
+
"Include last updated date",
|
|
4036
|
+
"Cross-reference related documents",
|
|
4037
|
+
"Use consistent formatting",
|
|
4038
|
+
"Add search-friendly summaries",
|
|
4039
|
+
],
|
|
4040
|
+
example: undefined,
|
|
4041
|
+
},
|
|
4042
|
+
};
|
|
4043
|
+
const template = templates[entityType];
|
|
4044
|
+
if (!template) {
|
|
4045
|
+
throw new Error(`Unknown entity type: ${entityType}. Available: ${Object.keys(templates).join(", ")}`);
|
|
4046
|
+
}
|
|
4047
|
+
return {
|
|
4048
|
+
entity_type: entityType,
|
|
4049
|
+
source_schema: template.sourceSchema,
|
|
4050
|
+
suggested_joins: template.joinSuggestions,
|
|
4051
|
+
output_template: template.outputTemplate,
|
|
4052
|
+
best_practices: template.bestPractices,
|
|
4053
|
+
example: template.example,
|
|
4054
|
+
usage: {
|
|
4055
|
+
consolidate: `demo(mode="consolidate", source="./data/source", output="./data/kb", entity="${entityType}", primary="${entityType}s.json", joins=${JSON.stringify(template.joinSuggestions)})`,
|
|
4056
|
+
generate_single: `demo(mode="generate", entity="${entityType}", data={...}, related={...})`,
|
|
4057
|
+
},
|
|
4058
|
+
};
|
|
4059
|
+
},
|
|
4060
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
4061
|
+
// CONSOLIDATED TOOLS (Unix CLI pattern) - 9 tools replace 45
|
|
4062
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
4063
|
+
env: async () => {
|
|
4064
|
+
return handleEnv({}, () => getAvailableEnvironments().map(e => ({
|
|
4065
|
+
name: e.name,
|
|
4066
|
+
isDefault: e.name === getDefaultEnvName(),
|
|
4067
|
+
})));
|
|
4068
|
+
},
|
|
4069
|
+
persona: async (args) => {
|
|
4070
|
+
const client = createClient(args.env);
|
|
4071
|
+
const DEFAULT_TEMPLATES = {
|
|
4072
|
+
voice: "00000000-0000-0000-0000-00000000001e",
|
|
4073
|
+
chat: "00000000-0000-0000-0000-000000000004",
|
|
4074
|
+
dashboard: "00000000-0000-0000-0000-000000000002",
|
|
4075
|
+
};
|
|
4076
|
+
return handlePersona(args, client, (type) => DEFAULT_TEMPLATES[type], (env) => createClient(env));
|
|
4077
|
+
},
|
|
4078
|
+
// Note: 'workflow' handler already exists above - consolidated version adds analyze modes
|
|
4079
|
+
// The existing 'workflow' handler is kept for backward compatibility
|
|
4080
|
+
action: async (args) => {
|
|
4081
|
+
const client = createClient(args.env);
|
|
4082
|
+
return handleAction(args, client);
|
|
4083
|
+
},
|
|
4084
|
+
template: async (args) => {
|
|
4085
|
+
return handleTemplate(args);
|
|
4086
|
+
},
|
|
4087
|
+
knowledge: async (args) => {
|
|
4088
|
+
const client = createClient(args.env);
|
|
4089
|
+
const fs = await import("fs/promises");
|
|
4090
|
+
return handleKnowledge(args, client, (path) => fs.readFile(path));
|
|
4091
|
+
},
|
|
4092
|
+
reference: async (args) => {
|
|
4093
|
+
return handleReference(args);
|
|
4094
|
+
},
|
|
4095
|
+
// Note: 'sync' handler already exists above - keeping it for full functionality
|
|
4096
|
+
// Note: demo is handled via a consolidated adapter below
|
|
4097
|
+
};
|
|
4098
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
4099
|
+
// Consolidated Tool Adapters (contract ↔ implementation)
|
|
4100
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
4101
|
+
//
|
|
4102
|
+
// The tool schemas in tools-consolidated.ts are the public MCP contract.
|
|
4103
|
+
// This file still contains a number of proven "legacy" handlers that we reuse
|
|
4104
|
+
// (e.g. deploy_workflow, optimize_workflow, compile_workflow, etc.).
|
|
4105
|
+
//
|
|
4106
|
+
// These adapters ensure the consolidated tool surface behaves as documented,
|
|
4107
|
+
// while preserving the existing internal implementations.
|
|
4108
|
+
const legacyWorkflowTool = toolHandlers.workflow;
|
|
4109
|
+
const legacyDeployWorkflow = toolHandlers.deploy_workflow;
|
|
4110
|
+
const legacyOptimizeWorkflow = toolHandlers.optimize_workflow;
|
|
4111
|
+
const legacyCompareWorkflowVersions = toolHandlers.compare_workflow_versions;
|
|
4112
|
+
const legacyCompileWorkflow = toolHandlers.compile_workflow;
|
|
4113
|
+
const legacyDetectWorkflowIssues = toolHandlers.detect_workflow_issues;
|
|
4114
|
+
const legacyValidateWorkflowConnections = toolHandlers.validate_workflow_connections;
|
|
4115
|
+
const legacySuggestWorkflowFixes = toolHandlers.suggest_workflow_fixes;
|
|
4116
|
+
const legacySyncRun = toolHandlers.sync;
|
|
4117
|
+
const legacySyncInfo = toolHandlers.sync_info;
|
|
4118
|
+
const legacyConsolidateDemoData = toolHandlers.consolidate_demo_data;
|
|
4119
|
+
const legacyGenerateDemoDocument = toolHandlers.generate_demo_document;
|
|
4120
|
+
const legacyValidateDemoDocument = toolHandlers.validate_demo_document;
|
|
4121
|
+
const legacyGetDemoDataTemplate = toolHandlers.get_demo_data_template;
|
|
4122
|
+
// Unify workflow modes: generate | analyze | deploy | optimize | compare | compile
|
|
4123
|
+
toolHandlers.workflow = async (args) => {
|
|
4124
|
+
// Normalize persona type alias: tool schema uses "type", internal uses "persona_type"
|
|
4125
|
+
const normalizedArgs = { ...(args ?? {}) };
|
|
4126
|
+
if (normalizedArgs.persona_type === undefined && normalizedArgs.type !== undefined) {
|
|
4127
|
+
normalizedArgs.persona_type = normalizedArgs.type;
|
|
4128
|
+
}
|
|
4129
|
+
const personaId = normalizedArgs.persona_id ? String(normalizedArgs.persona_id) : undefined;
|
|
4130
|
+
const workflowDef = normalizedArgs.workflow_def;
|
|
4131
|
+
const inputProvided = normalizedArgs.input !== undefined;
|
|
4132
|
+
// Normalize mode aliases
|
|
4133
|
+
const rawMode = normalizedArgs.mode ? String(normalizedArgs.mode) : undefined;
|
|
4134
|
+
const mode = rawMode === "improve" ? "optimize" : rawMode;
|
|
4135
|
+
// Default mode selection:
|
|
4136
|
+
// - persona_id + input → optimize (BROWNFIELD: enhance existing workflow)
|
|
4137
|
+
// - persona_id only or workflow_def → analyze (inspect)
|
|
4138
|
+
// - input only → generate (GREENFIELD: create from scratch)
|
|
4139
|
+
// - Otherwise → generate (and prompt for missing info)
|
|
4140
|
+
const effectiveMode = mode ??
|
|
4141
|
+
(personaId && inputProvided ? "optimize" : // BROWNFIELD: existing persona + new requirements
|
|
4142
|
+
workflowDef || personaId ? "analyze" : // Inspect existing
|
|
4143
|
+
"generate"); // GREENFIELD: new workflow
|
|
4144
|
+
switch (effectiveMode) {
|
|
4145
|
+
case "generate": {
|
|
4146
|
+
const result = await legacyWorkflowTool({ ...normalizedArgs, mode: "generate" });
|
|
4147
|
+
// Ensure next_steps point at consolidated tools by default
|
|
4148
|
+
if (result && typeof result === "object") {
|
|
4149
|
+
const obj = result;
|
|
4150
|
+
const persona_id = obj.persona_id ? String(obj.persona_id) : personaId;
|
|
4151
|
+
const deployed = obj.deployed === true;
|
|
4152
|
+
obj.next_steps = deployed
|
|
4153
|
+
? persona_id
|
|
4154
|
+
? [`Verify: persona(id="${persona_id}", include_workflow=true)`]
|
|
4155
|
+
: ["Verify the deployed AI Employee in the Ema UI"]
|
|
4156
|
+
: persona_id
|
|
4157
|
+
? [
|
|
4158
|
+
`Deploy: workflow(mode="deploy", persona_id="${persona_id}", workflow_def=<workflow_def>, proto_config=<proto_config>)`,
|
|
4159
|
+
`Verify: persona(id="${persona_id}", include_workflow=true)`,
|
|
4160
|
+
]
|
|
4161
|
+
: [
|
|
4162
|
+
"Create an AI Employee in Ema, then deploy:",
|
|
4163
|
+
`workflow(mode="deploy", persona_id="<persona_id>", workflow_def=<workflow_def>, proto_config=<proto_config>)`,
|
|
4164
|
+
];
|
|
4165
|
+
}
|
|
4166
|
+
return result;
|
|
4167
|
+
}
|
|
4168
|
+
case "analyze": {
|
|
4169
|
+
const include = Array.isArray(normalizedArgs.include)
|
|
4170
|
+
? normalizedArgs.include.map(String)
|
|
4171
|
+
: ["issues", "connections", "fixes", "metrics"];
|
|
4172
|
+
let wf = workflowDef;
|
|
4173
|
+
let meta;
|
|
4174
|
+
if (!wf && personaId) {
|
|
4175
|
+
const client = createClient(normalizedArgs.env);
|
|
4176
|
+
const persona = await client.getPersonaById(personaId);
|
|
4177
|
+
if (!persona)
|
|
4178
|
+
throw new Error(`AI Employee not found: ${personaId}`);
|
|
4179
|
+
wf = persona.workflow_def;
|
|
4180
|
+
meta = {
|
|
4181
|
+
persona_id: personaId,
|
|
4182
|
+
persona_name: persona.name,
|
|
4183
|
+
environment: client["env"].name,
|
|
4184
|
+
};
|
|
4185
|
+
}
|
|
4186
|
+
if (!wf) {
|
|
4187
|
+
return {
|
|
4188
|
+
error: "Missing workflow to analyze. Provide persona_id or workflow_def.",
|
|
4189
|
+
hint: 'Examples: workflow(mode="analyze", persona_id="...") or workflow(mode="analyze", workflow_def={...})',
|
|
4190
|
+
};
|
|
4191
|
+
}
|
|
4192
|
+
const out = { mode: "analyze", ...(meta ?? {}) };
|
|
4193
|
+
// Issues + summary (source of truth)
|
|
4194
|
+
const issuesResult = await legacyDetectWorkflowIssues({ workflow_def: wf });
|
|
4195
|
+
const issues = issuesResult.issues;
|
|
4196
|
+
if (include.includes("issues")) {
|
|
4197
|
+
out.issues = issuesResult.issues;
|
|
4198
|
+
out.issue_summary = issuesResult.summary;
|
|
4199
|
+
out.validation_passed = issuesResult.validation_passed;
|
|
4200
|
+
}
|
|
4201
|
+
if (include.includes("connections")) {
|
|
4202
|
+
out.connections = await legacyValidateWorkflowConnections({ workflow_def: wf });
|
|
4203
|
+
}
|
|
4204
|
+
if (include.includes("fixes")) {
|
|
4205
|
+
out.fixes = Array.isArray(issues) ? await legacySuggestWorkflowFixes({ issues }) : { error: "No issues array available to generate fixes" };
|
|
4206
|
+
}
|
|
4207
|
+
if (include.includes("metrics")) {
|
|
4208
|
+
const actions = wf.actions ?? [];
|
|
4209
|
+
const edges = wf.edges ?? [];
|
|
4210
|
+
out.metrics = {
|
|
4211
|
+
node_count: Array.isArray(actions) ? actions.length : 0,
|
|
4212
|
+
edge_count: Array.isArray(edges) ? edges.length : 0,
|
|
4213
|
+
};
|
|
4214
|
+
}
|
|
4215
|
+
return out;
|
|
4216
|
+
}
|
|
4217
|
+
case "deploy": {
|
|
4218
|
+
if (!personaId) {
|
|
4219
|
+
throw new Error('persona_id is required for workflow(mode="deploy")');
|
|
4220
|
+
}
|
|
4221
|
+
const validateFirst = normalizedArgs.validate !== false; // default true
|
|
4222
|
+
const autoFix = normalizedArgs.auto_fix === true; // default false
|
|
4223
|
+
return legacyDeployWorkflow({
|
|
4224
|
+
persona_id: personaId,
|
|
4225
|
+
workflow_def: workflowDef,
|
|
4226
|
+
proto_config: normalizedArgs.proto_config,
|
|
4227
|
+
validate_first: validateFirst,
|
|
4228
|
+
auto_fix: autoFix,
|
|
4229
|
+
env: normalizedArgs.env,
|
|
4230
|
+
});
|
|
4231
|
+
}
|
|
4232
|
+
case "optimize": {
|
|
4233
|
+
// optimize_workflow supports both:
|
|
4234
|
+
// - fix mode: id (name/id) OR persona_id
|
|
4235
|
+
// - generate mode: persona_id + prompt
|
|
4236
|
+
const preview = normalizedArgs.preview === true;
|
|
4237
|
+
const id = normalizedArgs.id ? String(normalizedArgs.id) : undefined;
|
|
4238
|
+
const identifier = normalizedArgs.identifier ? String(normalizedArgs.identifier) : undefined; // deprecated alias
|
|
4239
|
+
const idOrIdentifier = id ?? identifier;
|
|
4240
|
+
// If a natural-language workflow "input" is provided, treat it as a prompt to optimize_workflow.
|
|
4241
|
+
const input = normalizedArgs.input;
|
|
4242
|
+
const prompt = typeof input === "string" ? input : undefined;
|
|
4243
|
+
return legacyOptimizeWorkflow({
|
|
4244
|
+
identifier: idOrIdentifier ?? personaId,
|
|
4245
|
+
persona_id: personaId,
|
|
4246
|
+
prompt,
|
|
4247
|
+
type: normalizedArgs.persona_type,
|
|
4248
|
+
preview,
|
|
4249
|
+
env: normalizedArgs.env,
|
|
4250
|
+
});
|
|
4251
|
+
}
|
|
4252
|
+
case "compare": {
|
|
4253
|
+
if (!personaId) {
|
|
4254
|
+
throw new Error('persona_id is required for workflow(mode="compare")');
|
|
4255
|
+
}
|
|
4256
|
+
const compareTo = normalizedArgs.compare_to ? String(normalizedArgs.compare_to) : undefined;
|
|
4257
|
+
if (!compareTo) {
|
|
4258
|
+
throw new Error('compare_to is required for workflow(mode="compare")');
|
|
4259
|
+
}
|
|
4260
|
+
return legacyCompareWorkflowVersions({
|
|
4261
|
+
persona_id_before: personaId,
|
|
4262
|
+
persona_id_after: compareTo,
|
|
4263
|
+
env: normalizedArgs.env,
|
|
4264
|
+
});
|
|
4265
|
+
}
|
|
4266
|
+
case "compile": {
|
|
4267
|
+
// Compile requires node specs + result mappings.
|
|
4268
|
+
if (!normalizedArgs.name || !normalizedArgs.description) {
|
|
4269
|
+
throw new Error('workflow(mode="compile") requires name and description');
|
|
4270
|
+
}
|
|
4271
|
+
if (!normalizedArgs.persona_type) {
|
|
4272
|
+
throw new Error('workflow(mode="compile") requires persona type via type="voice|chat|dashboard"');
|
|
4273
|
+
}
|
|
4274
|
+
return legacyCompileWorkflow({
|
|
4275
|
+
name: normalizedArgs.name,
|
|
4276
|
+
description: normalizedArgs.description,
|
|
4277
|
+
persona_type: normalizedArgs.persona_type,
|
|
4278
|
+
nodes: normalizedArgs.nodes,
|
|
4279
|
+
result_mappings: normalizedArgs.result_mappings,
|
|
4280
|
+
});
|
|
4281
|
+
}
|
|
4282
|
+
default: {
|
|
4283
|
+
return legacyWorkflowTool({ ...normalizedArgs, mode: effectiveMode });
|
|
4284
|
+
}
|
|
4285
|
+
}
|
|
4286
|
+
};
|
|
4287
|
+
// Unify sync modes: run | status | config
|
|
4288
|
+
toolHandlers.sync = async (args) => {
|
|
4289
|
+
const normalizedArgs = { ...(args ?? {}) };
|
|
4290
|
+
const rawMode = normalizedArgs.mode ? String(normalizedArgs.mode) : "run";
|
|
4291
|
+
const mode = rawMode;
|
|
4292
|
+
// Support both old and new arg names
|
|
4293
|
+
const target = (normalizedArgs.target ?? normalizedArgs.target_env);
|
|
4294
|
+
const source = (normalizedArgs.source ?? normalizedArgs.source_env);
|
|
4295
|
+
const id = normalizedArgs.id;
|
|
4296
|
+
const identifier = normalizedArgs.identifier; // deprecated alias
|
|
4297
|
+
const idOrIdentifier = id ?? identifier;
|
|
4298
|
+
if (mode === "config") {
|
|
4299
|
+
return legacySyncInfo({ include_options: true });
|
|
4300
|
+
}
|
|
4301
|
+
if (mode === "status") {
|
|
4302
|
+
const env = normalizedArgs.env;
|
|
4303
|
+
if (normalizedArgs.list_synced === true) {
|
|
4304
|
+
if (!env)
|
|
4305
|
+
throw new Error('env is required for sync(mode="status", list_synced=true)');
|
|
4306
|
+
return legacySyncInfo({ list_synced: true, master_env: normalizedArgs.master_env, env });
|
|
4307
|
+
}
|
|
4308
|
+
if (idOrIdentifier) {
|
|
4309
|
+
if (!env)
|
|
4310
|
+
throw new Error('env is required for sync(mode="status", id="...")');
|
|
4311
|
+
const identifierToResolve = String(idOrIdentifier);
|
|
4312
|
+
const isUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(identifierToResolve);
|
|
4313
|
+
if (isUUID) {
|
|
4314
|
+
return legacySyncInfo({ persona_id: identifierToResolve, env });
|
|
4315
|
+
}
|
|
4316
|
+
// Name lookup: resolve to ID in env, then reuse persona_id path
|
|
4317
|
+
const client = createClient(env);
|
|
4318
|
+
const personas = await client.getPersonasForTenant();
|
|
4319
|
+
const match = personas.find((p) => p.name === identifierToResolve);
|
|
4320
|
+
if (!match)
|
|
4321
|
+
throw new Error(`AI Employee not found by name in ${env}: ${identifierToResolve}`);
|
|
4322
|
+
return legacySyncInfo({ persona_id: match.id, env });
|
|
4323
|
+
}
|
|
4324
|
+
// Default: overall sync status/config summary
|
|
4325
|
+
return legacySyncInfo({ include_options: normalizedArgs.include_options === true });
|
|
4326
|
+
}
|
|
4327
|
+
// mode === "run" (default)
|
|
4328
|
+
if (!target) {
|
|
4329
|
+
throw new Error('target (or target_env) is required for sync(mode="run")');
|
|
4330
|
+
}
|
|
4331
|
+
return legacySyncRun({
|
|
4332
|
+
identifier: idOrIdentifier,
|
|
4333
|
+
target_env: target,
|
|
4334
|
+
source_env: source,
|
|
4335
|
+
scope: normalizedArgs.scope,
|
|
4336
|
+
dry_run: normalizedArgs.dry_run,
|
|
4337
|
+
include_status: normalizedArgs.include_status,
|
|
4338
|
+
});
|
|
4339
|
+
};
|
|
4340
|
+
// Consolidated demo tool: consolidate | generate | validate | template
|
|
4341
|
+
toolHandlers.demo = async (args) => {
|
|
4342
|
+
const normalizedArgs = { ...(args ?? {}) };
|
|
4343
|
+
const mode = normalizedArgs.mode ? String(normalizedArgs.mode) : "template";
|
|
4344
|
+
switch (mode) {
|
|
4345
|
+
case "consolidate": {
|
|
4346
|
+
const source = String(normalizedArgs.source ?? "");
|
|
4347
|
+
const output = String(normalizedArgs.output ?? "");
|
|
4348
|
+
const entity = String(normalizedArgs.entity ?? "");
|
|
4349
|
+
if (!source || !output || !entity) {
|
|
4350
|
+
throw new Error('demo(mode="consolidate") requires: source, output, entity');
|
|
4351
|
+
}
|
|
4352
|
+
return legacyConsolidateDemoData({
|
|
4353
|
+
source_dir: source,
|
|
4354
|
+
output_dir: output,
|
|
4355
|
+
entity_type: entity,
|
|
4356
|
+
primary_file: normalizedArgs.primary ?? `${entity}s.json`,
|
|
4357
|
+
joins: normalizedArgs.joins ?? [],
|
|
4358
|
+
tags: normalizedArgs.tags,
|
|
4359
|
+
});
|
|
4360
|
+
}
|
|
4361
|
+
case "generate": {
|
|
4362
|
+
const entity = String(normalizedArgs.entity ?? "");
|
|
4363
|
+
if (!entity)
|
|
4364
|
+
throw new Error('demo(mode="generate") requires: entity');
|
|
4365
|
+
return legacyGenerateDemoDocument({
|
|
4366
|
+
entity_type: entity,
|
|
4367
|
+
data: normalizedArgs.data ?? {},
|
|
4368
|
+
related_data: normalizedArgs.related ?? {},
|
|
4369
|
+
output_path: normalizedArgs.output,
|
|
4370
|
+
tags: normalizedArgs.tags,
|
|
4371
|
+
});
|
|
4372
|
+
}
|
|
4373
|
+
case "validate": {
|
|
4374
|
+
return legacyValidateDemoDocument({
|
|
4375
|
+
file_path: normalizedArgs.file,
|
|
4376
|
+
content: normalizedArgs.content,
|
|
4377
|
+
});
|
|
4378
|
+
}
|
|
4379
|
+
case "template": {
|
|
4380
|
+
const entity = String(normalizedArgs.entity ?? "");
|
|
4381
|
+
if (!entity)
|
|
4382
|
+
throw new Error('demo(mode="template") requires: entity');
|
|
4383
|
+
return legacyGetDemoDataTemplate({
|
|
4384
|
+
entity_type: entity,
|
|
4385
|
+
include_example: normalizedArgs.include_example,
|
|
4386
|
+
});
|
|
4387
|
+
}
|
|
4388
|
+
default:
|
|
4389
|
+
throw new Error(`Unknown demo mode: ${mode}`);
|
|
4390
|
+
}
|
|
4391
|
+
};
|
|
4392
|
+
// Helper function for document generation
|
|
4393
|
+
function generateEntityDocument(entityType, entity, related, tags) {
|
|
4394
|
+
const id = String(entity.id ?? entity.sku ?? entity.employeeId ?? "unknown");
|
|
4395
|
+
const name = String(entity.name ?? entity.title ?? id);
|
|
4396
|
+
let content = `# ${entityType.charAt(0).toUpperCase() + entityType.slice(1)}: ${name}
|
|
4397
|
+
|
|
4398
|
+
## Metadata
|
|
4399
|
+
<!-- ema_entity: ${entityType} -->
|
|
4400
|
+
<!-- ema_id: ${id} -->
|
|
4401
|
+
<!-- ema_tags: ${tags} -->
|
|
4402
|
+
<!-- ema_generated: ${new Date().toISOString().split("T")[0]} -->
|
|
4403
|
+
|
|
4404
|
+
## Overview
|
|
4405
|
+
| Field | Value |
|
|
4406
|
+
|-------|-------|
|
|
4407
|
+
`;
|
|
4408
|
+
// Add entity fields to overview table
|
|
4409
|
+
const skipFields = ["contacts", "items", "notes"];
|
|
4410
|
+
for (const [key, value] of Object.entries(entity)) {
|
|
4411
|
+
if (!skipFields.includes(key) && value !== null && value !== undefined && typeof value !== "object") {
|
|
4412
|
+
const displayKey = key.replace(/([A-Z])/g, " $1").replace(/^./, (s) => s.toUpperCase());
|
|
4413
|
+
const displayValue = typeof value === "number" && key.toLowerCase().includes("revenue")
|
|
4414
|
+
? `$${value.toLocaleString()}`
|
|
4415
|
+
: String(value);
|
|
4416
|
+
content += `| ${displayKey} | ${displayValue} |\n`;
|
|
4417
|
+
}
|
|
4418
|
+
}
|
|
4419
|
+
// Add relationship summary
|
|
4420
|
+
const relationshipParts = [];
|
|
4421
|
+
for (const [key, items] of Object.entries(related)) {
|
|
4422
|
+
if (items.length > 0) {
|
|
4423
|
+
relationshipParts.push(`${items.length} ${key}`);
|
|
4424
|
+
}
|
|
4425
|
+
}
|
|
4426
|
+
if (relationshipParts.length > 0) {
|
|
4427
|
+
content += `\n## Relationship Summary\nThis ${entityType} has ${relationshipParts.join(", ")}.\n`;
|
|
4428
|
+
}
|
|
4429
|
+
// Add contacts if present
|
|
4430
|
+
const contacts = entity.contacts;
|
|
4431
|
+
if (contacts && contacts.length > 0) {
|
|
4432
|
+
content += `\n## Key Contacts
|
|
4433
|
+
| Name | Role | Email | Primary |
|
|
4434
|
+
|------|------|-------|---------|
|
|
4435
|
+
`;
|
|
4436
|
+
for (const contact of contacts) {
|
|
4437
|
+
content += `| ${contact.name ?? ""} | ${contact.role ?? ""} | ${contact.email ?? ""} | ${contact.primary ? "Yes" : "No"} |\n`;
|
|
4438
|
+
}
|
|
4439
|
+
}
|
|
4440
|
+
// Add related data sections
|
|
4441
|
+
for (const [key, items] of Object.entries(related)) {
|
|
4442
|
+
if (items.length > 0) {
|
|
4443
|
+
const sectionTitle = key.charAt(0).toUpperCase() + key.slice(1);
|
|
4444
|
+
content += `\n## ${sectionTitle} (${items.length} total)\n`;
|
|
4445
|
+
// Get all unique keys from items
|
|
4446
|
+
const allKeys = new Set();
|
|
4447
|
+
for (const item of items) {
|
|
4448
|
+
const itemObj = item;
|
|
4449
|
+
for (const k of Object.keys(itemObj)) {
|
|
4450
|
+
if (typeof itemObj[k] !== "object")
|
|
4451
|
+
allKeys.add(k);
|
|
4452
|
+
}
|
|
4453
|
+
}
|
|
4454
|
+
const keys = Array.from(allKeys).slice(0, 6); // Limit columns
|
|
4455
|
+
// Create table header
|
|
4456
|
+
content += `| ${keys.map((k) => k.replace(/([A-Z])/g, " $1").replace(/^./, (s) => s.toUpperCase())).join(" | ")} |\n`;
|
|
4457
|
+
content += `|${keys.map(() => "---").join("|")}|\n`;
|
|
4458
|
+
// Add rows
|
|
4459
|
+
for (const item of items.slice(0, 10)) { // Limit rows
|
|
4460
|
+
const itemObj = item;
|
|
4461
|
+
const values = keys.map((k) => {
|
|
4462
|
+
const v = itemObj[k];
|
|
4463
|
+
if (typeof v === "number" && k.toLowerCase().includes("amount")) {
|
|
4464
|
+
return `$${v.toLocaleString()}`;
|
|
4465
|
+
}
|
|
4466
|
+
return String(v ?? "");
|
|
4467
|
+
});
|
|
4468
|
+
content += `| ${values.join(" | ")} |\n`;
|
|
4469
|
+
}
|
|
4470
|
+
if (items.length > 10) {
|
|
4471
|
+
content += `\n*...and ${items.length - 10} more*\n`;
|
|
4472
|
+
}
|
|
4473
|
+
}
|
|
4474
|
+
}
|
|
4475
|
+
return content;
|
|
4476
|
+
}
|
|
4477
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
4478
|
+
// MCP Server Setup
|
|
4479
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
4480
|
+
// Initialize registries
|
|
4481
|
+
const promptRegistry = new PromptRegistry();
|
|
4482
|
+
const resourceRegistry = new ResourceRegistry();
|
|
4483
|
+
export async function startMcpServer() {
|
|
4484
|
+
const server = new Server({ name: "ema", version: "1.0.0" }, {
|
|
4485
|
+
capabilities: {
|
|
4486
|
+
tools: {},
|
|
4487
|
+
prompts: {},
|
|
4488
|
+
resources: {},
|
|
4489
|
+
}
|
|
4490
|
+
});
|
|
4491
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
4492
|
+
// Tool Handlers
|
|
4493
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
4494
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: TOOLS }));
|
|
4495
|
+
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
4496
|
+
const { name, arguments: args } = request.params;
|
|
4497
|
+
const handler = toolHandlers[name];
|
|
4498
|
+
if (!handler) {
|
|
4499
|
+
return {
|
|
4500
|
+
content: [{ type: "text", text: JSON.stringify({ error: `Unknown tool: ${name}` }) }],
|
|
4501
|
+
isError: true,
|
|
4502
|
+
};
|
|
4503
|
+
}
|
|
4504
|
+
try {
|
|
4505
|
+
const result = await handler((args ?? {}));
|
|
4506
|
+
return {
|
|
4507
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
4508
|
+
};
|
|
4509
|
+
}
|
|
4510
|
+
catch (error) {
|
|
4511
|
+
return {
|
|
4512
|
+
content: [{ type: "text", text: JSON.stringify({ error: error instanceof Error ? error.message : String(error) }) }],
|
|
4513
|
+
isError: true,
|
|
4514
|
+
};
|
|
4515
|
+
}
|
|
4516
|
+
});
|
|
4517
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
4518
|
+
// Prompt Handlers
|
|
4519
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
4520
|
+
server.setRequestHandler(ListPromptsRequestSchema, async () => {
|
|
4521
|
+
const prompts = promptRegistry.list();
|
|
4522
|
+
return {
|
|
4523
|
+
prompts: prompts.map((p) => ({
|
|
4524
|
+
name: p.name,
|
|
4525
|
+
description: p.description,
|
|
4526
|
+
arguments: p.arguments.map((a) => ({
|
|
4527
|
+
name: a.name,
|
|
4528
|
+
description: a.description,
|
|
4529
|
+
required: a.required,
|
|
4530
|
+
})),
|
|
4531
|
+
})),
|
|
4532
|
+
};
|
|
4533
|
+
});
|
|
4534
|
+
server.setRequestHandler(GetPromptRequestSchema, async (request) => {
|
|
4535
|
+
const { name, arguments: args } = request.params;
|
|
4536
|
+
const result = promptRegistry.get(name, args);
|
|
4537
|
+
if (isPromptError(result)) {
|
|
4538
|
+
throw new Error(`${result.code}: ${result.message}`);
|
|
4539
|
+
}
|
|
4540
|
+
return {
|
|
4541
|
+
description: result.description,
|
|
4542
|
+
messages: result.messages.map((m) => ({
|
|
4543
|
+
role: m.role,
|
|
4544
|
+
content: m.content,
|
|
4545
|
+
})),
|
|
4546
|
+
};
|
|
4547
|
+
});
|
|
4548
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
4549
|
+
// Resource Handlers
|
|
4550
|
+
// ─────────────────────────────────────────────────────────────────────────
|
|
4551
|
+
server.setRequestHandler(ListResourcesRequestSchema, async () => {
|
|
4552
|
+
const resources = resourceRegistry.list();
|
|
4553
|
+
return {
|
|
4554
|
+
resources: resources.map((r) => ({
|
|
4555
|
+
uri: r.uri,
|
|
4556
|
+
name: r.name,
|
|
4557
|
+
description: r.description,
|
|
4558
|
+
mimeType: r.mimeType,
|
|
4559
|
+
})),
|
|
4560
|
+
};
|
|
4561
|
+
});
|
|
4562
|
+
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
|
4563
|
+
const { uri } = request.params;
|
|
4564
|
+
const result = await resourceRegistry.read(uri);
|
|
4565
|
+
if (isResourceError(result)) {
|
|
4566
|
+
throw new Error(`${result.code}: ${result.message}`);
|
|
4567
|
+
}
|
|
4568
|
+
return {
|
|
4569
|
+
contents: [
|
|
4570
|
+
{
|
|
4571
|
+
uri: result.uri,
|
|
4572
|
+
mimeType: result.mimeType,
|
|
4573
|
+
text: result.text,
|
|
4574
|
+
},
|
|
4575
|
+
],
|
|
4576
|
+
};
|
|
4577
|
+
});
|
|
4578
|
+
const transport = new StdioServerTransport();
|
|
4579
|
+
await server.connect(transport);
|
|
4580
|
+
console.error("Ema MCP Server started (multi-env) with prompts and resources");
|
|
4581
|
+
}
|
|
4582
|
+
startMcpServer().catch((error) => {
|
|
4583
|
+
console.error("Failed to start MCP server:", error);
|
|
4584
|
+
process.exit(1);
|
|
4585
|
+
});
|