kubiya-control-plane-api 0.9.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (479) hide show
  1. control_plane_api/LICENSE +676 -0
  2. control_plane_api/README.md +350 -0
  3. control_plane_api/__init__.py +4 -0
  4. control_plane_api/__version__.py +8 -0
  5. control_plane_api/alembic/README +1 -0
  6. control_plane_api/alembic/env.py +121 -0
  7. control_plane_api/alembic/script.py.mako +28 -0
  8. control_plane_api/alembic/versions/2613c65c3dbe_initial_database_setup.py +32 -0
  9. control_plane_api/alembic/versions/2df520d4927d_merge_heads.py +28 -0
  10. control_plane_api/alembic/versions/43abf98d6a01_add_paused_status_to_executions.py +73 -0
  11. control_plane_api/alembic/versions/6289854264cb_merge_multiple_heads.py +28 -0
  12. control_plane_api/alembic/versions/6a4d4dc3d8dc_generate_execution_transitions.py +50 -0
  13. control_plane_api/alembic/versions/87d11cf0a783_add_disconnected_status_to_worker_.py +44 -0
  14. control_plane_api/alembic/versions/add_ephemeral_queue_support.py +85 -0
  15. control_plane_api/alembic/versions/add_model_type_to_llm_models.py +31 -0
  16. control_plane_api/alembic/versions/add_plan_executions_table.py +114 -0
  17. control_plane_api/alembic/versions/add_trace_span_tables.py +154 -0
  18. control_plane_api/alembic/versions/add_user_info_to_traces.py +36 -0
  19. control_plane_api/alembic/versions/adjusting_foreign_keys.py +32 -0
  20. control_plane_api/alembic/versions/b4983d976db2_initial_tables.py +1128 -0
  21. control_plane_api/alembic/versions/d181a3b40e71_rename_custom_metadata_to_metadata_in_.py +50 -0
  22. control_plane_api/alembic/versions/df9117888e82_add_missing_columns.py +82 -0
  23. control_plane_api/alembic/versions/f25de6ad895a_missing_migrations.py +34 -0
  24. control_plane_api/alembic/versions/f71305fb69b9_fix_ephemeral_queue_deletion_foreign_key.py +54 -0
  25. control_plane_api/alembic/versions/mark_local_exec_queues_as_ephemeral.py +68 -0
  26. control_plane_api/alembic.ini +148 -0
  27. control_plane_api/api/index.py +12 -0
  28. control_plane_api/app/__init__.py +11 -0
  29. control_plane_api/app/activities/__init__.py +20 -0
  30. control_plane_api/app/activities/agent_activities.py +384 -0
  31. control_plane_api/app/activities/plan_generation_activities.py +499 -0
  32. control_plane_api/app/activities/team_activities.py +424 -0
  33. control_plane_api/app/activities/temporal_cloud_activities.py +588 -0
  34. control_plane_api/app/config/__init__.py +35 -0
  35. control_plane_api/app/config/api_config.py +469 -0
  36. control_plane_api/app/config/config_loader.py +224 -0
  37. control_plane_api/app/config/model_pricing.py +323 -0
  38. control_plane_api/app/config/storage_config.py +159 -0
  39. control_plane_api/app/config.py +115 -0
  40. control_plane_api/app/controllers/__init__.py +0 -0
  41. control_plane_api/app/controllers/execution_environment_controller.py +1315 -0
  42. control_plane_api/app/database.py +135 -0
  43. control_plane_api/app/exceptions.py +408 -0
  44. control_plane_api/app/lib/__init__.py +11 -0
  45. control_plane_api/app/lib/environment.py +65 -0
  46. control_plane_api/app/lib/event_bus/__init__.py +17 -0
  47. control_plane_api/app/lib/event_bus/base.py +136 -0
  48. control_plane_api/app/lib/event_bus/manager.py +335 -0
  49. control_plane_api/app/lib/event_bus/providers/__init__.py +6 -0
  50. control_plane_api/app/lib/event_bus/providers/http_provider.py +166 -0
  51. control_plane_api/app/lib/event_bus/providers/nats_provider.py +324 -0
  52. control_plane_api/app/lib/event_bus/providers/redis_provider.py +233 -0
  53. control_plane_api/app/lib/event_bus/providers/websocket_provider.py +497 -0
  54. control_plane_api/app/lib/job_executor.py +330 -0
  55. control_plane_api/app/lib/kubiya_client.py +293 -0
  56. control_plane_api/app/lib/litellm_pricing.py +166 -0
  57. control_plane_api/app/lib/mcp_validation.py +163 -0
  58. control_plane_api/app/lib/nats/__init__.py +13 -0
  59. control_plane_api/app/lib/nats/credentials_manager.py +288 -0
  60. control_plane_api/app/lib/nats/listener.py +374 -0
  61. control_plane_api/app/lib/planning_prompt_builder.py +153 -0
  62. control_plane_api/app/lib/planning_tools/__init__.py +41 -0
  63. control_plane_api/app/lib/planning_tools/agents.py +409 -0
  64. control_plane_api/app/lib/planning_tools/agno_toolkit.py +836 -0
  65. control_plane_api/app/lib/planning_tools/base.py +119 -0
  66. control_plane_api/app/lib/planning_tools/cognitive_memory_tools.py +403 -0
  67. control_plane_api/app/lib/planning_tools/context_graph_tools.py +545 -0
  68. control_plane_api/app/lib/planning_tools/environments.py +218 -0
  69. control_plane_api/app/lib/planning_tools/knowledge.py +204 -0
  70. control_plane_api/app/lib/planning_tools/models.py +93 -0
  71. control_plane_api/app/lib/planning_tools/planning_service.py +646 -0
  72. control_plane_api/app/lib/planning_tools/resources.py +242 -0
  73. control_plane_api/app/lib/planning_tools/teams.py +334 -0
  74. control_plane_api/app/lib/policy_enforcer_client.py +1016 -0
  75. control_plane_api/app/lib/redis_client.py +803 -0
  76. control_plane_api/app/lib/sqlalchemy_utils.py +486 -0
  77. control_plane_api/app/lib/state_transition_tools/__init__.py +7 -0
  78. control_plane_api/app/lib/state_transition_tools/execution_context.py +388 -0
  79. control_plane_api/app/lib/storage/__init__.py +20 -0
  80. control_plane_api/app/lib/storage/base_provider.py +274 -0
  81. control_plane_api/app/lib/storage/provider_factory.py +157 -0
  82. control_plane_api/app/lib/storage/vercel_blob_provider.py +468 -0
  83. control_plane_api/app/lib/supabase.py +71 -0
  84. control_plane_api/app/lib/supabase_utils.py +138 -0
  85. control_plane_api/app/lib/task_planning/__init__.py +138 -0
  86. control_plane_api/app/lib/task_planning/agent_factory.py +308 -0
  87. control_plane_api/app/lib/task_planning/agents.py +389 -0
  88. control_plane_api/app/lib/task_planning/cache.py +218 -0
  89. control_plane_api/app/lib/task_planning/entity_resolver.py +273 -0
  90. control_plane_api/app/lib/task_planning/helpers.py +293 -0
  91. control_plane_api/app/lib/task_planning/hooks.py +474 -0
  92. control_plane_api/app/lib/task_planning/models.py +503 -0
  93. control_plane_api/app/lib/task_planning/plan_validator.py +166 -0
  94. control_plane_api/app/lib/task_planning/planning_workflow.py +2911 -0
  95. control_plane_api/app/lib/task_planning/runner.py +656 -0
  96. control_plane_api/app/lib/task_planning/streaming_hook.py +213 -0
  97. control_plane_api/app/lib/task_planning/workflow.py +424 -0
  98. control_plane_api/app/lib/templating/__init__.py +88 -0
  99. control_plane_api/app/lib/templating/compiler.py +278 -0
  100. control_plane_api/app/lib/templating/engine.py +178 -0
  101. control_plane_api/app/lib/templating/parsers/__init__.py +29 -0
  102. control_plane_api/app/lib/templating/parsers/base.py +96 -0
  103. control_plane_api/app/lib/templating/parsers/env.py +85 -0
  104. control_plane_api/app/lib/templating/parsers/graph.py +112 -0
  105. control_plane_api/app/lib/templating/parsers/secret.py +87 -0
  106. control_plane_api/app/lib/templating/parsers/simple.py +81 -0
  107. control_plane_api/app/lib/templating/resolver.py +366 -0
  108. control_plane_api/app/lib/templating/types.py +214 -0
  109. control_plane_api/app/lib/templating/validator.py +201 -0
  110. control_plane_api/app/lib/temporal_client.py +232 -0
  111. control_plane_api/app/lib/temporal_credentials_cache.py +178 -0
  112. control_plane_api/app/lib/temporal_credentials_service.py +203 -0
  113. control_plane_api/app/lib/validation/__init__.py +24 -0
  114. control_plane_api/app/lib/validation/runtime_validation.py +388 -0
  115. control_plane_api/app/main.py +531 -0
  116. control_plane_api/app/middleware/__init__.py +10 -0
  117. control_plane_api/app/middleware/auth.py +645 -0
  118. control_plane_api/app/middleware/exception_handler.py +267 -0
  119. control_plane_api/app/middleware/prometheus_middleware.py +173 -0
  120. control_plane_api/app/middleware/rate_limiting.py +384 -0
  121. control_plane_api/app/middleware/request_id.py +202 -0
  122. control_plane_api/app/models/__init__.py +40 -0
  123. control_plane_api/app/models/agent.py +90 -0
  124. control_plane_api/app/models/analytics.py +206 -0
  125. control_plane_api/app/models/associations.py +107 -0
  126. control_plane_api/app/models/auth_user.py +73 -0
  127. control_plane_api/app/models/context.py +161 -0
  128. control_plane_api/app/models/custom_integration.py +99 -0
  129. control_plane_api/app/models/environment.py +64 -0
  130. control_plane_api/app/models/execution.py +125 -0
  131. control_plane_api/app/models/execution_transition.py +50 -0
  132. control_plane_api/app/models/job.py +159 -0
  133. control_plane_api/app/models/llm_model.py +78 -0
  134. control_plane_api/app/models/orchestration.py +66 -0
  135. control_plane_api/app/models/plan_execution.py +102 -0
  136. control_plane_api/app/models/presence.py +49 -0
  137. control_plane_api/app/models/project.py +61 -0
  138. control_plane_api/app/models/project_management.py +85 -0
  139. control_plane_api/app/models/session.py +29 -0
  140. control_plane_api/app/models/skill.py +155 -0
  141. control_plane_api/app/models/system_tables.py +43 -0
  142. control_plane_api/app/models/task_planning.py +372 -0
  143. control_plane_api/app/models/team.py +86 -0
  144. control_plane_api/app/models/trace.py +257 -0
  145. control_plane_api/app/models/user_profile.py +54 -0
  146. control_plane_api/app/models/worker.py +221 -0
  147. control_plane_api/app/models/workflow.py +161 -0
  148. control_plane_api/app/models/workspace.py +50 -0
  149. control_plane_api/app/observability/__init__.py +177 -0
  150. control_plane_api/app/observability/context_logging.py +475 -0
  151. control_plane_api/app/observability/decorators.py +337 -0
  152. control_plane_api/app/observability/local_span_processor.py +702 -0
  153. control_plane_api/app/observability/metrics.py +303 -0
  154. control_plane_api/app/observability/middleware.py +246 -0
  155. control_plane_api/app/observability/optional.py +115 -0
  156. control_plane_api/app/observability/tracing.py +382 -0
  157. control_plane_api/app/policies/README.md +149 -0
  158. control_plane_api/app/policies/approved_users.rego +62 -0
  159. control_plane_api/app/policies/business_hours.rego +51 -0
  160. control_plane_api/app/policies/rate_limiting.rego +100 -0
  161. control_plane_api/app/policies/tool_enforcement/README.md +336 -0
  162. control_plane_api/app/policies/tool_enforcement/bash_command_validation.rego +71 -0
  163. control_plane_api/app/policies/tool_enforcement/business_hours_enforcement.rego +82 -0
  164. control_plane_api/app/policies/tool_enforcement/mcp_tool_allowlist.rego +58 -0
  165. control_plane_api/app/policies/tool_enforcement/production_safeguards.rego +80 -0
  166. control_plane_api/app/policies/tool_enforcement/role_based_tool_access.rego +44 -0
  167. control_plane_api/app/policies/tool_restrictions.rego +86 -0
  168. control_plane_api/app/routers/__init__.py +4 -0
  169. control_plane_api/app/routers/agents.py +382 -0
  170. control_plane_api/app/routers/agents_v2.py +1598 -0
  171. control_plane_api/app/routers/analytics.py +1310 -0
  172. control_plane_api/app/routers/auth.py +59 -0
  173. control_plane_api/app/routers/client_config.py +57 -0
  174. control_plane_api/app/routers/context_graph.py +561 -0
  175. control_plane_api/app/routers/context_manager.py +577 -0
  176. control_plane_api/app/routers/custom_integrations.py +490 -0
  177. control_plane_api/app/routers/enforcer.py +132 -0
  178. control_plane_api/app/routers/environment_context.py +252 -0
  179. control_plane_api/app/routers/environments.py +761 -0
  180. control_plane_api/app/routers/execution_environment.py +847 -0
  181. control_plane_api/app/routers/executions/__init__.py +28 -0
  182. control_plane_api/app/routers/executions/router.py +286 -0
  183. control_plane_api/app/routers/executions/services/__init__.py +22 -0
  184. control_plane_api/app/routers/executions/services/demo_worker_health.py +156 -0
  185. control_plane_api/app/routers/executions/services/status_service.py +420 -0
  186. control_plane_api/app/routers/executions/services/test_worker_health.py +480 -0
  187. control_plane_api/app/routers/executions/services/worker_health.py +514 -0
  188. control_plane_api/app/routers/executions/streaming/__init__.py +22 -0
  189. control_plane_api/app/routers/executions/streaming/deduplication.py +352 -0
  190. control_plane_api/app/routers/executions/streaming/event_buffer.py +353 -0
  191. control_plane_api/app/routers/executions/streaming/event_formatter.py +964 -0
  192. control_plane_api/app/routers/executions/streaming/history_loader.py +588 -0
  193. control_plane_api/app/routers/executions/streaming/live_source.py +693 -0
  194. control_plane_api/app/routers/executions/streaming/streamer.py +849 -0
  195. control_plane_api/app/routers/executions.py +4888 -0
  196. control_plane_api/app/routers/health.py +165 -0
  197. control_plane_api/app/routers/health_v2.py +394 -0
  198. control_plane_api/app/routers/integration_templates.py +496 -0
  199. control_plane_api/app/routers/integrations.py +287 -0
  200. control_plane_api/app/routers/jobs.py +1809 -0
  201. control_plane_api/app/routers/metrics.py +517 -0
  202. control_plane_api/app/routers/models.py +82 -0
  203. control_plane_api/app/routers/models_v2.py +628 -0
  204. control_plane_api/app/routers/plan_executions.py +1481 -0
  205. control_plane_api/app/routers/plan_generation_async.py +304 -0
  206. control_plane_api/app/routers/policies.py +669 -0
  207. control_plane_api/app/routers/presence.py +234 -0
  208. control_plane_api/app/routers/projects.py +987 -0
  209. control_plane_api/app/routers/runners.py +379 -0
  210. control_plane_api/app/routers/runtimes.py +172 -0
  211. control_plane_api/app/routers/secrets.py +171 -0
  212. control_plane_api/app/routers/skills.py +1010 -0
  213. control_plane_api/app/routers/skills_definitions.py +140 -0
  214. control_plane_api/app/routers/storage.py +456 -0
  215. control_plane_api/app/routers/task_planning.py +611 -0
  216. control_plane_api/app/routers/task_queues.py +650 -0
  217. control_plane_api/app/routers/team_context.py +274 -0
  218. control_plane_api/app/routers/teams.py +1747 -0
  219. control_plane_api/app/routers/templates.py +248 -0
  220. control_plane_api/app/routers/traces.py +571 -0
  221. control_plane_api/app/routers/websocket_client.py +479 -0
  222. control_plane_api/app/routers/websocket_executions_status.py +437 -0
  223. control_plane_api/app/routers/websocket_gateway.py +323 -0
  224. control_plane_api/app/routers/websocket_traces.py +576 -0
  225. control_plane_api/app/routers/worker_queues.py +2555 -0
  226. control_plane_api/app/routers/worker_websocket.py +419 -0
  227. control_plane_api/app/routers/workers.py +1004 -0
  228. control_plane_api/app/routers/workflows.py +204 -0
  229. control_plane_api/app/runtimes/__init__.py +6 -0
  230. control_plane_api/app/runtimes/validation.py +344 -0
  231. control_plane_api/app/schemas/__init__.py +1 -0
  232. control_plane_api/app/schemas/job_schemas.py +302 -0
  233. control_plane_api/app/schemas/mcp_schemas.py +311 -0
  234. control_plane_api/app/schemas/template_schemas.py +133 -0
  235. control_plane_api/app/schemas/trace_schemas.py +168 -0
  236. control_plane_api/app/schemas/worker_queue_observability_schemas.py +165 -0
  237. control_plane_api/app/services/__init__.py +1 -0
  238. control_plane_api/app/services/agno_planning_strategy.py +233 -0
  239. control_plane_api/app/services/agno_service.py +838 -0
  240. control_plane_api/app/services/claude_code_planning_service.py +203 -0
  241. control_plane_api/app/services/context_graph_client.py +224 -0
  242. control_plane_api/app/services/custom_integration_service.py +415 -0
  243. control_plane_api/app/services/integration_resolution_service.py +345 -0
  244. control_plane_api/app/services/litellm_service.py +394 -0
  245. control_plane_api/app/services/plan_generator.py +79 -0
  246. control_plane_api/app/services/planning_strategy.py +66 -0
  247. control_plane_api/app/services/planning_strategy_factory.py +118 -0
  248. control_plane_api/app/services/policy_service.py +615 -0
  249. control_plane_api/app/services/state_transition_service.py +755 -0
  250. control_plane_api/app/services/storage_service.py +593 -0
  251. control_plane_api/app/services/temporal_cloud_provisioning.py +150 -0
  252. control_plane_api/app/services/toolsets/context_graph_skill.py +432 -0
  253. control_plane_api/app/services/trace_retention.py +354 -0
  254. control_plane_api/app/services/worker_queue_metrics_service.py +190 -0
  255. control_plane_api/app/services/workflow_cancellation_manager.py +135 -0
  256. control_plane_api/app/services/workflow_operations_service.py +611 -0
  257. control_plane_api/app/skills/__init__.py +100 -0
  258. control_plane_api/app/skills/base.py +239 -0
  259. control_plane_api/app/skills/builtin/__init__.py +37 -0
  260. control_plane_api/app/skills/builtin/agent_communication/__init__.py +8 -0
  261. control_plane_api/app/skills/builtin/agent_communication/skill.py +246 -0
  262. control_plane_api/app/skills/builtin/code_ingestion/__init__.py +4 -0
  263. control_plane_api/app/skills/builtin/code_ingestion/skill.py +267 -0
  264. control_plane_api/app/skills/builtin/cognitive_memory/__init__.py +4 -0
  265. control_plane_api/app/skills/builtin/cognitive_memory/skill.py +174 -0
  266. control_plane_api/app/skills/builtin/contextual_awareness/__init__.py +4 -0
  267. control_plane_api/app/skills/builtin/contextual_awareness/skill.py +387 -0
  268. control_plane_api/app/skills/builtin/data_visualization/__init__.py +4 -0
  269. control_plane_api/app/skills/builtin/data_visualization/skill.py +154 -0
  270. control_plane_api/app/skills/builtin/docker/__init__.py +4 -0
  271. control_plane_api/app/skills/builtin/docker/skill.py +104 -0
  272. control_plane_api/app/skills/builtin/file_generation/__init__.py +4 -0
  273. control_plane_api/app/skills/builtin/file_generation/skill.py +94 -0
  274. control_plane_api/app/skills/builtin/file_system/__init__.py +4 -0
  275. control_plane_api/app/skills/builtin/file_system/skill.py +110 -0
  276. control_plane_api/app/skills/builtin/knowledge_api/__init__.py +5 -0
  277. control_plane_api/app/skills/builtin/knowledge_api/skill.py +124 -0
  278. control_plane_api/app/skills/builtin/python/__init__.py +4 -0
  279. control_plane_api/app/skills/builtin/python/skill.py +92 -0
  280. control_plane_api/app/skills/builtin/remote_filesystem/__init__.py +5 -0
  281. control_plane_api/app/skills/builtin/remote_filesystem/skill.py +170 -0
  282. control_plane_api/app/skills/builtin/shell/__init__.py +4 -0
  283. control_plane_api/app/skills/builtin/shell/skill.py +161 -0
  284. control_plane_api/app/skills/builtin/slack/__init__.py +3 -0
  285. control_plane_api/app/skills/builtin/slack/skill.py +302 -0
  286. control_plane_api/app/skills/builtin/workflow_executor/__init__.py +4 -0
  287. control_plane_api/app/skills/builtin/workflow_executor/skill.py +469 -0
  288. control_plane_api/app/skills/business_intelligence.py +189 -0
  289. control_plane_api/app/skills/config.py +63 -0
  290. control_plane_api/app/skills/loaders/__init__.py +14 -0
  291. control_plane_api/app/skills/loaders/base.py +73 -0
  292. control_plane_api/app/skills/loaders/filesystem_loader.py +199 -0
  293. control_plane_api/app/skills/registry.py +125 -0
  294. control_plane_api/app/utils/helpers.py +12 -0
  295. control_plane_api/app/utils/workflow_executor.py +354 -0
  296. control_plane_api/app/workflows/__init__.py +11 -0
  297. control_plane_api/app/workflows/agent_execution.py +520 -0
  298. control_plane_api/app/workflows/agent_execution_with_skills.py +223 -0
  299. control_plane_api/app/workflows/namespace_provisioning.py +326 -0
  300. control_plane_api/app/workflows/plan_generation.py +254 -0
  301. control_plane_api/app/workflows/team_execution.py +442 -0
  302. control_plane_api/scripts/seed_models.py +240 -0
  303. control_plane_api/scripts/validate_existing_tool_names.py +492 -0
  304. control_plane_api/shared/__init__.py +8 -0
  305. control_plane_api/shared/version.py +17 -0
  306. control_plane_api/test_deduplication.py +274 -0
  307. control_plane_api/test_executor_deduplication_e2e.py +309 -0
  308. control_plane_api/test_job_execution_e2e.py +283 -0
  309. control_plane_api/test_real_integration.py +193 -0
  310. control_plane_api/version.py +38 -0
  311. control_plane_api/worker/__init__.py +0 -0
  312. control_plane_api/worker/activities/__init__.py +0 -0
  313. control_plane_api/worker/activities/agent_activities.py +1585 -0
  314. control_plane_api/worker/activities/approval_activities.py +234 -0
  315. control_plane_api/worker/activities/job_activities.py +199 -0
  316. control_plane_api/worker/activities/runtime_activities.py +1167 -0
  317. control_plane_api/worker/activities/skill_activities.py +282 -0
  318. control_plane_api/worker/activities/team_activities.py +479 -0
  319. control_plane_api/worker/agent_runtime_server.py +370 -0
  320. control_plane_api/worker/binary_manager.py +333 -0
  321. control_plane_api/worker/config/__init__.py +31 -0
  322. control_plane_api/worker/config/worker_config.py +273 -0
  323. control_plane_api/worker/control_plane_client.py +1491 -0
  324. control_plane_api/worker/examples/analytics_integration_example.py +362 -0
  325. control_plane_api/worker/health_monitor.py +159 -0
  326. control_plane_api/worker/metrics.py +237 -0
  327. control_plane_api/worker/models/__init__.py +1 -0
  328. control_plane_api/worker/models/error_events.py +105 -0
  329. control_plane_api/worker/models/inputs.py +89 -0
  330. control_plane_api/worker/runtimes/__init__.py +35 -0
  331. control_plane_api/worker/runtimes/agent_runtime/runtime.py +485 -0
  332. control_plane_api/worker/runtimes/agno/__init__.py +34 -0
  333. control_plane_api/worker/runtimes/agno/config.py +248 -0
  334. control_plane_api/worker/runtimes/agno/hooks.py +385 -0
  335. control_plane_api/worker/runtimes/agno/mcp_builder.py +195 -0
  336. control_plane_api/worker/runtimes/agno/runtime.py +1063 -0
  337. control_plane_api/worker/runtimes/agno/utils.py +163 -0
  338. control_plane_api/worker/runtimes/base.py +979 -0
  339. control_plane_api/worker/runtimes/claude_code/__init__.py +38 -0
  340. control_plane_api/worker/runtimes/claude_code/cleanup.py +184 -0
  341. control_plane_api/worker/runtimes/claude_code/client_pool.py +529 -0
  342. control_plane_api/worker/runtimes/claude_code/config.py +829 -0
  343. control_plane_api/worker/runtimes/claude_code/hooks.py +482 -0
  344. control_plane_api/worker/runtimes/claude_code/litellm_proxy.py +1702 -0
  345. control_plane_api/worker/runtimes/claude_code/mcp_builder.py +467 -0
  346. control_plane_api/worker/runtimes/claude_code/mcp_discovery.py +558 -0
  347. control_plane_api/worker/runtimes/claude_code/runtime.py +1546 -0
  348. control_plane_api/worker/runtimes/claude_code/tool_mapper.py +403 -0
  349. control_plane_api/worker/runtimes/claude_code/utils.py +149 -0
  350. control_plane_api/worker/runtimes/factory.py +173 -0
  351. control_plane_api/worker/runtimes/model_utils.py +107 -0
  352. control_plane_api/worker/runtimes/validation.py +93 -0
  353. control_plane_api/worker/services/__init__.py +1 -0
  354. control_plane_api/worker/services/agent_communication_tools.py +908 -0
  355. control_plane_api/worker/services/agent_executor.py +485 -0
  356. control_plane_api/worker/services/agent_executor_v2.py +793 -0
  357. control_plane_api/worker/services/analytics_collector.py +457 -0
  358. control_plane_api/worker/services/analytics_service.py +464 -0
  359. control_plane_api/worker/services/approval_tools.py +310 -0
  360. control_plane_api/worker/services/approval_tools_agno.py +207 -0
  361. control_plane_api/worker/services/cancellation_manager.py +177 -0
  362. control_plane_api/worker/services/code_ingestion_tools.py +465 -0
  363. control_plane_api/worker/services/contextual_awareness_tools.py +405 -0
  364. control_plane_api/worker/services/data_visualization.py +834 -0
  365. control_plane_api/worker/services/event_publisher.py +531 -0
  366. control_plane_api/worker/services/jira_tools.py +257 -0
  367. control_plane_api/worker/services/remote_filesystem_tools.py +498 -0
  368. control_plane_api/worker/services/runtime_analytics.py +328 -0
  369. control_plane_api/worker/services/session_service.py +365 -0
  370. control_plane_api/worker/services/skill_context_enhancement.py +181 -0
  371. control_plane_api/worker/services/skill_factory.py +471 -0
  372. control_plane_api/worker/services/system_prompt_enhancement.py +410 -0
  373. control_plane_api/worker/services/team_executor.py +715 -0
  374. control_plane_api/worker/services/team_executor_v2.py +1866 -0
  375. control_plane_api/worker/services/tool_enforcement.py +254 -0
  376. control_plane_api/worker/services/workflow_executor/__init__.py +52 -0
  377. control_plane_api/worker/services/workflow_executor/event_processor.py +287 -0
  378. control_plane_api/worker/services/workflow_executor/event_publisher.py +210 -0
  379. control_plane_api/worker/services/workflow_executor/executors/__init__.py +15 -0
  380. control_plane_api/worker/services/workflow_executor/executors/base.py +270 -0
  381. control_plane_api/worker/services/workflow_executor/executors/json_executor.py +50 -0
  382. control_plane_api/worker/services/workflow_executor/executors/python_executor.py +50 -0
  383. control_plane_api/worker/services/workflow_executor/models.py +142 -0
  384. control_plane_api/worker/services/workflow_executor_tools.py +1748 -0
  385. control_plane_api/worker/skills/__init__.py +12 -0
  386. control_plane_api/worker/skills/builtin/context_graph_search/README.md +213 -0
  387. control_plane_api/worker/skills/builtin/context_graph_search/__init__.py +5 -0
  388. control_plane_api/worker/skills/builtin/context_graph_search/agno_impl.py +808 -0
  389. control_plane_api/worker/skills/builtin/context_graph_search/skill.yaml +67 -0
  390. control_plane_api/worker/skills/builtin/contextual_awareness/__init__.py +4 -0
  391. control_plane_api/worker/skills/builtin/contextual_awareness/agno_impl.py +62 -0
  392. control_plane_api/worker/skills/builtin/data_visualization/agno_impl.py +18 -0
  393. control_plane_api/worker/skills/builtin/data_visualization/skill.yaml +84 -0
  394. control_plane_api/worker/skills/builtin/docker/agno_impl.py +65 -0
  395. control_plane_api/worker/skills/builtin/docker/skill.yaml +60 -0
  396. control_plane_api/worker/skills/builtin/file_generation/agno_impl.py +47 -0
  397. control_plane_api/worker/skills/builtin/file_generation/skill.yaml +64 -0
  398. control_plane_api/worker/skills/builtin/file_system/agno_impl.py +32 -0
  399. control_plane_api/worker/skills/builtin/file_system/skill.yaml +54 -0
  400. control_plane_api/worker/skills/builtin/knowledge_api/__init__.py +4 -0
  401. control_plane_api/worker/skills/builtin/knowledge_api/agno_impl.py +50 -0
  402. control_plane_api/worker/skills/builtin/knowledge_api/skill.yaml +66 -0
  403. control_plane_api/worker/skills/builtin/python/agno_impl.py +25 -0
  404. control_plane_api/worker/skills/builtin/python/skill.yaml +60 -0
  405. control_plane_api/worker/skills/builtin/schema_fix_mixin.py +260 -0
  406. control_plane_api/worker/skills/builtin/shell/agno_impl.py +31 -0
  407. control_plane_api/worker/skills/builtin/shell/skill.yaml +60 -0
  408. control_plane_api/worker/skills/builtin/slack/__init__.py +3 -0
  409. control_plane_api/worker/skills/builtin/slack/agno_impl.py +1282 -0
  410. control_plane_api/worker/skills/builtin/slack/skill.yaml +276 -0
  411. control_plane_api/worker/skills/builtin/workflow_executor/agno_impl.py +62 -0
  412. control_plane_api/worker/skills/builtin/workflow_executor/skill.yaml +79 -0
  413. control_plane_api/worker/skills/loaders/__init__.py +5 -0
  414. control_plane_api/worker/skills/loaders/base.py +23 -0
  415. control_plane_api/worker/skills/loaders/filesystem_loader.py +357 -0
  416. control_plane_api/worker/skills/registry.py +208 -0
  417. control_plane_api/worker/tests/__init__.py +1 -0
  418. control_plane_api/worker/tests/conftest.py +12 -0
  419. control_plane_api/worker/tests/e2e/__init__.py +0 -0
  420. control_plane_api/worker/tests/e2e/test_context_graph_real_api.py +338 -0
  421. control_plane_api/worker/tests/e2e/test_context_graph_templates_e2e.py +523 -0
  422. control_plane_api/worker/tests/e2e/test_enforcement_e2e.py +344 -0
  423. control_plane_api/worker/tests/e2e/test_execution_flow.py +571 -0
  424. control_plane_api/worker/tests/e2e/test_single_execution_mode.py +656 -0
  425. control_plane_api/worker/tests/integration/__init__.py +0 -0
  426. control_plane_api/worker/tests/integration/test_builtin_skills_fixes.py +245 -0
  427. control_plane_api/worker/tests/integration/test_context_graph_search_integration.py +365 -0
  428. control_plane_api/worker/tests/integration/test_control_plane_integration.py +308 -0
  429. control_plane_api/worker/tests/integration/test_hook_enforcement_integration.py +579 -0
  430. control_plane_api/worker/tests/integration/test_scheduled_job_workflow.py +237 -0
  431. control_plane_api/worker/tests/integration/test_system_prompt_enhancement_integration.py +343 -0
  432. control_plane_api/worker/tests/unit/__init__.py +0 -0
  433. control_plane_api/worker/tests/unit/test_builtin_skill_autoload.py +396 -0
  434. control_plane_api/worker/tests/unit/test_context_graph_search.py +450 -0
  435. control_plane_api/worker/tests/unit/test_context_graph_templates.py +403 -0
  436. control_plane_api/worker/tests/unit/test_control_plane_client.py +401 -0
  437. control_plane_api/worker/tests/unit/test_control_plane_client_jobs.py +345 -0
  438. control_plane_api/worker/tests/unit/test_job_activities.py +353 -0
  439. control_plane_api/worker/tests/unit/test_skill_context_enhancement.py +321 -0
  440. control_plane_api/worker/tests/unit/test_system_prompt_enhancement.py +415 -0
  441. control_plane_api/worker/tests/unit/test_tool_enforcement.py +324 -0
  442. control_plane_api/worker/utils/__init__.py +1 -0
  443. control_plane_api/worker/utils/chunk_batcher.py +330 -0
  444. control_plane_api/worker/utils/environment.py +65 -0
  445. control_plane_api/worker/utils/error_publisher.py +260 -0
  446. control_plane_api/worker/utils/event_batcher.py +256 -0
  447. control_plane_api/worker/utils/logging_config.py +335 -0
  448. control_plane_api/worker/utils/logging_helper.py +326 -0
  449. control_plane_api/worker/utils/parameter_validator.py +120 -0
  450. control_plane_api/worker/utils/retry_utils.py +60 -0
  451. control_plane_api/worker/utils/streaming_utils.py +665 -0
  452. control_plane_api/worker/utils/tool_validation.py +332 -0
  453. control_plane_api/worker/utils/workspace_manager.py +163 -0
  454. control_plane_api/worker/websocket_client.py +393 -0
  455. control_plane_api/worker/worker.py +1297 -0
  456. control_plane_api/worker/workflows/__init__.py +0 -0
  457. control_plane_api/worker/workflows/agent_execution.py +909 -0
  458. control_plane_api/worker/workflows/scheduled_job_wrapper.py +332 -0
  459. control_plane_api/worker/workflows/team_execution.py +611 -0
  460. kubiya_control_plane_api-0.9.15.dist-info/METADATA +354 -0
  461. kubiya_control_plane_api-0.9.15.dist-info/RECORD +479 -0
  462. kubiya_control_plane_api-0.9.15.dist-info/WHEEL +5 -0
  463. kubiya_control_plane_api-0.9.15.dist-info/entry_points.txt +5 -0
  464. kubiya_control_plane_api-0.9.15.dist-info/licenses/LICENSE +676 -0
  465. kubiya_control_plane_api-0.9.15.dist-info/top_level.txt +3 -0
  466. scripts/__init__.py +1 -0
  467. scripts/migrations.py +39 -0
  468. scripts/seed_worker_queues.py +128 -0
  469. scripts/setup_agent_runtime.py +142 -0
  470. worker_internal/__init__.py +1 -0
  471. worker_internal/planner/__init__.py +1 -0
  472. worker_internal/planner/activities.py +1499 -0
  473. worker_internal/planner/agent_tools.py +197 -0
  474. worker_internal/planner/event_models.py +148 -0
  475. worker_internal/planner/event_publisher.py +67 -0
  476. worker_internal/planner/models.py +199 -0
  477. worker_internal/planner/retry_logic.py +134 -0
  478. worker_internal/planner/worker.py +300 -0
  479. worker_internal/planner/workflows.py +970 -0
@@ -0,0 +1,1748 @@
1
+ """
2
+ Workflow Executor Tools for Agent Control Plane Worker
3
+
4
+ This module provides tools for agents to execute workflows defined via
5
+ JSON or Python DSL. Agents can call these tools to run multi-step workflows
6
+ with parameter injection and streaming execution.
7
+
8
+ Workflows execute remotely on specified runners using the Kubiya SDK.
9
+ """
10
+
11
+ import json
12
+ import structlog
13
+ import asyncio
14
+ import os
15
+ import hashlib
16
+ from typing import Optional, Callable, Dict, Any, List
17
+ from agno.tools import Toolkit
18
+ from control_plane_api.worker.skills.builtin.schema_fix_mixin import SchemaFixMixin
19
+
20
+ logger = structlog.get_logger(__name__)
21
+
22
+
23
+ class WorkflowExecutorTools(SchemaFixMixin, Toolkit):
24
+ """
25
+ Workflow Executor toolkit for running workflows from agents.
26
+
27
+ Agents can use these tools to:
28
+ - Execute JSON-defined workflows with parameters
29
+ - Run Python DSL workflows
30
+ - Stream workflow execution events
31
+ - Get workflow execution status
32
+ """
33
+
34
+ def __init__(
35
+ self,
36
+ name: Optional[str] = None,
37
+ workflows: Optional[List[Dict[str, Any]]] = None,
38
+ validation_enabled: bool = True,
39
+ default_runner: Optional[str] = None,
40
+ timeout: int = 3600,
41
+ default_parameters: Optional[Dict[str, Any]] = None,
42
+ stream_callback: Optional[Callable[[str], None]] = None,
43
+ kubiya_api_key: Optional[str] = None,
44
+ kubiya_api_base: Optional[str] = None,
45
+ execution_id: Optional[str] = None, # Add execution_id parameter
46
+ # Legacy parameters for backwards compatibility
47
+ workflow_type: Optional[str] = None,
48
+ workflow_definition: Optional[str] = None,
49
+ python_dsl_code: Optional[str] = None,
50
+ ):
51
+ """
52
+ Initialize WorkflowExecutorTools.
53
+
54
+ Args:
55
+ name: Skill instance name (defaults to "workflow_executor")
56
+ workflows: List of workflow definitions. Each workflow becomes a separate tool.
57
+ Format: [{"name": "analyze-logs", "type": "json", "definition": {...}}, ...]
58
+ validation_enabled: Enable pre-execution validation
59
+ default_runner: Default runner/environment name
60
+ timeout: Maximum execution timeout in seconds
61
+ default_parameters: Default parameter values to use for all workflows
62
+ stream_callback: Optional callback for streaming output
63
+ kubiya_api_key: Kubiya API key (defaults to KUBIYA_API_KEY env var)
64
+ kubiya_api_base: Kubiya API base URL (defaults to KUBIYA_API_BASE env var)
65
+ workflow_type: LEGACY - Type of workflow ("json" or "python_dsl")
66
+ workflow_definition: LEGACY - JSON workflow definition string
67
+ python_dsl_code: LEGACY - Python DSL code string
68
+ """
69
+ super().__init__(name=name or "workflow_executor")
70
+
71
+ self.validation_enabled = validation_enabled
72
+ self.default_runner = default_runner or "default"
73
+ self.timeout = timeout
74
+ self.default_parameters = default_parameters or {}
75
+ self.stream_callback = stream_callback
76
+ self.execution_id = execution_id or os.environ.get("EXECUTION_ID") # Store execution_id
77
+
78
+ print(f"\nšŸ” WORKFLOW EXECUTOR __init__ DEBUG:")
79
+ print(f" Received execution_id param: {execution_id}")
80
+ print(f" EXECUTION_ID env var: {os.environ.get('EXECUTION_ID')}")
81
+ print(f" Final self.execution_id: {self.execution_id}\n")
82
+
83
+ # Get Kubiya API credentials from parameters or environment
84
+ self.kubiya_api_key = kubiya_api_key or os.environ.get("KUBIYA_API_KEY")
85
+ self.kubiya_api_base = kubiya_api_base or os.environ.get("KUBIYA_API_BASE", "https://api.kubiya.ai")
86
+
87
+ if not self.kubiya_api_key:
88
+ logger.warning("No KUBIYA_API_KEY provided - workflow execution will fail")
89
+
90
+ # Get control plane client for publishing events
91
+ try:
92
+ from control_plane_api.worker.control_plane_client import get_control_plane_client
93
+ self.control_plane = get_control_plane_client()
94
+ except Exception as e:
95
+ logger.warning(f"Failed to get control plane client: {e}")
96
+ self.control_plane = None
97
+
98
+ # Initialize Kubiya SDK client for remote execution
99
+ self.kubiya_client = None
100
+ if self.kubiya_api_key:
101
+ try:
102
+ from kubiya import KubiyaClient
103
+
104
+ self.kubiya_client = KubiyaClient(
105
+ api_key=self.kubiya_api_key,
106
+ base_url=self.kubiya_api_base,
107
+ runner=self.default_runner,
108
+ timeout=self.timeout
109
+ )
110
+ logger.info(f"Initialized Kubiya SDK client for remote workflow execution (runner: {self.default_runner})")
111
+ except ImportError as e:
112
+ logger.error(f"Failed to import Kubiya SDK: {e}. Install with: pip install git+https://github.com/kubiyabot/sdk-py.git@main")
113
+ self.kubiya_client = None
114
+
115
+ # Handle legacy single workflow format
116
+ if workflow_definition or python_dsl_code:
117
+ logger.info("Using legacy single-workflow format")
118
+
119
+ legacy_workflow = {
120
+ "name": "default",
121
+ "type": workflow_type or "json",
122
+ }
123
+ if workflow_type == "json" and workflow_definition:
124
+ legacy_workflow["definition"] = workflow_definition
125
+ elif workflow_type == "python_dsl" and python_dsl_code:
126
+ legacy_workflow["code"] = python_dsl_code
127
+
128
+ workflows = [legacy_workflow]
129
+
130
+ # Store legacy attributes for backward compatibility
131
+ self.workflow_type = workflow_type
132
+ self.workflow_definition = workflow_definition
133
+ self.python_dsl_code = python_dsl_code
134
+
135
+ # Parse workflow data for legacy JSON workflows
136
+ if workflow_type == "json" and workflow_definition:
137
+ try:
138
+ self.workflow_data = json.loads(workflow_definition) if isinstance(workflow_definition, str) else workflow_definition
139
+ except Exception as e:
140
+ logger.error(f"Failed to parse legacy workflow definition: {e}")
141
+ self.workflow_data = None
142
+ else:
143
+ self.workflow_data = None
144
+ else:
145
+ # Not using legacy format - no legacy attributes
146
+ self.workflow_type = None
147
+ self.workflow_definition = None
148
+ self.python_dsl_code = None
149
+ self.workflow_data = None
150
+
151
+ # Store workflows collection
152
+ self.workflows = workflows or []
153
+
154
+ # Dynamically register a tool for each workflow
155
+ for workflow in self.workflows:
156
+ self._register_workflow_tool(workflow)
157
+
158
+ # If no workflows registered (empty or legacy format), register default execution tool
159
+ if not self.workflows or len(self.workflows) == 0:
160
+ logger.warning("No workflows configured in WorkflowExecutorTools")
161
+
162
+ # Register helper tools
163
+ self.register(self.list_all_workflows)
164
+ self.register(self.get_workflow_info)
165
+
166
+ # Fix: Rebuild function schemas with proper parameters
167
+ self._rebuild_function_schemas()
168
+
169
+ def _register_workflow_tool(self, workflow: Dict[str, Any]):
170
+ """
171
+ Dynamically register a tool method for a specific workflow.
172
+
173
+ Creates a method named after the workflow that executes it on the configured runner.
174
+
175
+ Args:
176
+ workflow: Workflow definition dict with name, type, and definition/code
177
+ """
178
+ workflow_name = workflow.get("name", "unknown")
179
+ workflow_type = workflow.get("type", "json")
180
+
181
+ # Use clean workflow name as method name (replace hyphens/spaces with underscores)
182
+ # For "analyze-logs" workflow → method name "analyze_logs"
183
+ # For "default" workflow (legacy) → use the toolkit name
184
+ safe_name = workflow_name.replace("-", "_").replace(" ", "_").lower()
185
+
186
+ # If this is the default workflow, use the skill name
187
+ if workflow_name == "default" and self.name != "workflow_executor":
188
+ method_name = self.name
189
+ else:
190
+ method_name = safe_name
191
+
192
+ # Create a closure that captures the workflow definition
193
+ def workflow_executor(parameters: Optional[Dict[str, Any]] = None) -> str:
194
+ f"""
195
+ Execute the '{workflow_name}' workflow on the configured runner.
196
+
197
+ This workflow executes on the runner specified in the workflow definition
198
+ using the Kubiya SDK. All steps are executed in dependency order.
199
+
200
+ Args:
201
+ parameters: Dictionary of parameters to inject into the workflow.
202
+ Parameters can be referenced in workflow steps using {{{{param_name}}}} syntax.
203
+
204
+ Returns:
205
+ str: Formatted workflow execution results including step outputs and status.
206
+
207
+ Examples:
208
+ # Execute workflow with parameters
209
+ {method_name}(parameters={{"environment": "production", "version": "v1.2.3"}})
210
+ """
211
+ return self._execute_specific_workflow(workflow, parameters)
212
+
213
+ # Set proper docstring
214
+ workflow_executor.__doc__ = f"""
215
+ Execute the '{workflow_name}' workflow on the configured runner.
216
+
217
+ Type: {workflow_type}
218
+ Runner: Specified in workflow definition or default_runner config
219
+
220
+ Args:
221
+ parameters: Optional dictionary of parameters to inject into workflow steps.
222
+ Reference parameters in steps using {{{{param_name}}}} syntax.
223
+
224
+ Returns:
225
+ str: Workflow execution results including all step outputs.
226
+ """
227
+
228
+ # Set method name for proper tool registration
229
+ workflow_executor.__name__ = method_name
230
+
231
+ # Register as a tool
232
+ self.register(workflow_executor)
233
+
234
+ # Also set as attribute on self for direct access
235
+ setattr(self, method_name, workflow_executor)
236
+
237
+ logger.info(f"Registered workflow tool: {method_name} for workflow '{workflow_name}'")
238
+
239
+ def _execute_specific_workflow(
240
+ self,
241
+ workflow: Dict[str, Any],
242
+ parameters: Optional[Dict[str, Any]] = None
243
+ ) -> str:
244
+ """
245
+ Execute a specific workflow from the collection.
246
+
247
+ Args:
248
+ workflow: Workflow definition
249
+ parameters: Execution parameters
250
+
251
+ Returns:
252
+ str: Formatted execution result
253
+ """
254
+ try:
255
+ workflow_name = workflow.get("name", "unknown")
256
+ workflow_type = workflow.get("type", "json")
257
+
258
+ # Merge default parameters with runtime parameters
259
+ # Runtime parameters override defaults
260
+ params = {**self.default_parameters, **(parameters or {})}
261
+
262
+ # Determine runner
263
+ effective_runner = self.default_runner
264
+ if workflow_type == "json":
265
+ workflow_def = workflow.get("definition")
266
+ if isinstance(workflow_def, str):
267
+ workflow_data = json.loads(workflow_def)
268
+ else:
269
+ workflow_data = workflow_def
270
+
271
+ effective_runner = workflow_data.get("runner") or self.default_runner
272
+ else:
273
+ effective_runner = self.default_runner
274
+
275
+ # Stream start message
276
+ if self.stream_callback:
277
+ self.stream_callback(
278
+ f"šŸš€ Starting workflow: {workflow_name}\n"
279
+ f" Type: {workflow_type}\n"
280
+ f" Parameters: {json.dumps(params, indent=2)}\n"
281
+ f" Runner: {effective_runner}\n\n"
282
+ )
283
+
284
+ # Execute based on workflow type
285
+ if workflow_type == "json":
286
+ result = self._execute_json_workflow_specific(workflow, params, effective_runner)
287
+ elif workflow_type == "python_dsl":
288
+ result = self._execute_python_dsl_workflow_specific(workflow, params, effective_runner)
289
+ else:
290
+ raise ValueError(f"Unsupported workflow type: {workflow_type}")
291
+
292
+ # Stream completion message
293
+ if self.stream_callback:
294
+ self.stream_callback(f"\nāœ… Workflow '{workflow_name}' completed successfully\n")
295
+
296
+ return result
297
+
298
+ except Exception as e:
299
+ error_msg = f"āŒ Workflow '{workflow.get('name', 'unknown')}' execution failed: {str(e)}"
300
+ logger.error(error_msg, exc_info=True)
301
+
302
+ if self.stream_callback:
303
+ self.stream_callback(f"\n{error_msg}\n")
304
+
305
+ return error_msg
306
+
307
+ def _execute_json_workflow_specific(
308
+ self,
309
+ workflow: Dict[str, Any],
310
+ parameters: Dict[str, Any],
311
+ runner: str
312
+ ) -> str:
313
+ """Execute a JSON workflow."""
314
+ workflow_def = workflow.get("definition")
315
+ if isinstance(workflow_def, str):
316
+ workflow_data = json.loads(workflow_def)
317
+ else:
318
+ workflow_data = workflow_def
319
+
320
+ if not workflow_data:
321
+ raise ValueError("No workflow definition available")
322
+
323
+ if not self.kubiya_client:
324
+ raise RuntimeError("Kubiya SDK client not initialized")
325
+
326
+ # Ensure runner is set
327
+ workflow_data["runner"] = runner
328
+
329
+ # Remove 'triggers' key if it exists - not needed for direct execution
330
+ # The DAG builder rejects this key when executing workflows directly
331
+ if "triggers" in workflow_data:
332
+ logger.debug(f"Removing 'triggers' key from workflow definition (not needed for execution)")
333
+ workflow_data.pop("triggers")
334
+
335
+ # Execute remotely
336
+ from datetime import datetime
337
+ import time
338
+ start_time = datetime.utcnow()
339
+
340
+ # Generate unique message_id for workflow streaming
341
+ workflow_message_id = f"{self.execution_id}_{int(time.time() * 1000000)}" if self.execution_id else None
342
+
343
+ if self.stream_callback:
344
+ self.stream_callback(f"ā–¶ļø Submitting to runner '{runner}'...\n\n")
345
+
346
+ # Publish workflow start to control plane
347
+ print(f"\n{'='*80}")
348
+ print(f"šŸ“” WORKFLOW STREAMING DEBUG")
349
+ print(f"{'='*80}")
350
+ print(f"control_plane exists: {self.control_plane is not None}")
351
+ print(f"execution_id: {self.execution_id}")
352
+ print(f"workflow_message_id: {workflow_message_id}")
353
+ print(f"{'='*80}\n")
354
+
355
+ if self.control_plane and self.execution_id and workflow_message_id:
356
+ try:
357
+ print(f"šŸ“” Publishing workflow start to control plane...")
358
+ self.control_plane.publish_event(
359
+ execution_id=self.execution_id,
360
+ event_type="message_chunk",
361
+ data={
362
+ "role": "assistant",
363
+ "content": f"šŸš€ Starting workflow: {workflow_data.get('name', 'unknown')}\nā–¶ļø Submitting to runner '{runner}'...\n\n",
364
+ "is_chunk": True,
365
+ "message_id": workflow_message_id,
366
+ "source": "workflow",
367
+ }
368
+ )
369
+ print(f"āœ… Successfully published workflow start to control plane\n")
370
+ except Exception as e:
371
+ print(f"āŒ Failed to publish workflow start: {e}\n")
372
+ logger.error(f"āŒ Failed to publish workflow start: {e}", exc_info=True)
373
+ else:
374
+ print(f"āš ļø Skipping control plane publish (one or more required fields is None)\n")
375
+
376
+ # āœ… Enable streaming to capture real-time workflow output
377
+ response = self.kubiya_client.execute_workflow(
378
+ workflow_definition=workflow_data,
379
+ parameters=parameters,
380
+ stream=True
381
+ )
382
+
383
+ # Accumulate streaming results
384
+ accumulated_output = []
385
+ event_count = 0
386
+ step_outputs = {}
387
+ current_step = None
388
+ seen_events = set() # Track event hashes to prevent duplicates
389
+
390
+ # Register workflow for cancellation tracking
391
+ from control_plane_api.app.services.workflow_cancellation_manager import workflow_cancellation_manager
392
+ cancellation_event = workflow_cancellation_manager.register_workflow(self.execution_id, workflow_message_id)
393
+
394
+ # Iterate over streaming results (SDK yields JSON strings)
395
+ for event in response:
396
+ event_count += 1
397
+
398
+ # Check for cancellation FIRST (immediate response)
399
+ if cancellation_event.is_set():
400
+ logger.warning("āš ļø Workflow execution cancelled by user")
401
+ workflow_cancellation_manager.clear_cancellation(self.execution_id, workflow_message_id)
402
+ return f"āŒ Workflow execution cancelled by user\n\nWorkflow: {workflow_data.get('name', 'unknown')}\nCancelled at: {datetime.utcnow().isoformat()}"
403
+
404
+ # Skip None/empty events
405
+ if event is None:
406
+ logger.debug(f"ā­ļø Skipping None event #{event_count}")
407
+ continue
408
+
409
+ # šŸ” DEBUG: Print raw event to understand SDK response format
410
+ print(f"\n{'='*80}")
411
+ print(f"šŸ” RAW SDK EVENT #{event_count}")
412
+ print(f"{'='*80}")
413
+ print(f"Type: {type(event).__name__}")
414
+ print(f"Length: {len(str(event)) if event else 0}")
415
+ print(f"Repr: {repr(event)[:500]}")
416
+ if isinstance(event, (str, bytes)):
417
+ print(f"First 200 chars: {str(event)[:200]}")
418
+ print(f"{'='*80}\n")
419
+
420
+ # Debug: Log raw event with actual content
421
+ event_repr = repr(event)[:500] # Use repr to see exact content
422
+ logger.info(f"šŸ“¦ Received event #{event_count} (type={type(event).__name__}, length={len(str(event)) if event else 0})")
423
+ logger.debug(f" Raw content: {event_repr}")
424
+
425
+ # Parse the event (SDK yields JSON strings or bytes)
426
+ try:
427
+ if isinstance(event, bytes):
428
+ # Skip empty bytes
429
+ if not event:
430
+ logger.debug(f" ā­ļø Skipping empty bytes")
431
+ continue
432
+
433
+ # Decode bytes to string first
434
+ logger.debug(f" šŸ”„ Decoding bytes to string...")
435
+ event = event.decode('utf-8')
436
+ logger.debug(f" āœ… Decoded to string (length={len(event)})")
437
+
438
+ if isinstance(event, str):
439
+ # Skip empty strings
440
+ if not event.strip():
441
+ logger.debug(f" ā­ļø Skipping empty string event")
442
+ continue
443
+
444
+ # Handle SSE (Server-Sent Events) format: "data: 2:{json}"
445
+ # The SDK sometimes returns events with this prefix
446
+ if event.startswith("data: "):
447
+ logger.debug(f" šŸ”„ Stripping SSE 'data: ' prefix...")
448
+ event = event[6:] # Remove "data: " prefix (6 chars)
449
+
450
+ # Also strip the message ID prefix like "2:"
451
+ if ":" in event and event.split(":", 1)[0].isdigit():
452
+ logger.debug(f" šŸ”„ Stripping message ID prefix...")
453
+ event = event.split(":", 1)[1] # Remove "2:" or similar prefix
454
+
455
+ logger.debug(f" āœ… Cleaned SSE event (length={len(event)})")
456
+
457
+ # Try to parse as JSON
458
+ logger.debug(f" šŸ”„ Parsing JSON string...")
459
+ event_data = json.loads(event)
460
+ logger.debug(f" āœ… Parsed JSON: type={event_data.get('type', 'unknown')}")
461
+ elif isinstance(event, dict):
462
+ # Already a dict
463
+ logger.debug(f" āœ… Already a dict: type={event.get('type', 'unknown')}")
464
+ event_data = event
465
+ else:
466
+ # Unknown type, treat as plain text
467
+ logger.warning(f" āš ļø Unknown event type: {type(event).__name__}, treating as plain text")
468
+ event_str = str(event)
469
+ if event_str.strip(): # Only add non-empty text
470
+ accumulated_output.append(event_str)
471
+ if self.stream_callback:
472
+ self.stream_callback(f"{event_str}\n")
473
+ continue
474
+ except (json.JSONDecodeError, UnicodeDecodeError) as e:
475
+ # If not valid JSON or can't decode, treat as plain text
476
+ logger.warning(f" āš ļø Failed to parse event: {e}, treating as plain text")
477
+ event_str = str(event)
478
+ if event_str.strip(): # Only add non-empty text
479
+ accumulated_output.append(event_str)
480
+ if self.stream_callback:
481
+ self.stream_callback(f"{event_str}\n")
482
+ continue
483
+
484
+ # Extract meaningful content based on event type
485
+ event_type = event_data.get("type", "unknown")
486
+ logger.info(f" šŸŽÆ Event type: {event_type}")
487
+
488
+ # Handle actual Kubiya workflow event types
489
+ if event_type == "step_output":
490
+ # step_output contains the actual workflow output in step.output
491
+ step = event_data.get("step", {})
492
+ step_name = step.get("name", "unknown")
493
+ output = step.get("output", "")
494
+
495
+ if output.strip():
496
+ # Deduplicate events - SDK sends same event twice (plain JSON + SSE format)
497
+ event_hash = hashlib.md5(f"{step_name}:{output}".encode()).hexdigest()
498
+ if event_hash in seen_events:
499
+ print(f" ā­ļø Skipping duplicate event: {step_name} - {output[:50]}")
500
+ logger.info(f" ā­ļø Skipping duplicate event: {step_name} - {output[:50]}")
501
+ continue
502
+ seen_events.add(event_hash)
503
+
504
+ logger.info(f" šŸ“ Step output: {step_name} - {output[:100]}")
505
+
506
+ # Format for display
507
+ formatted_output = f"```\n{output}\n```\n"
508
+
509
+ # Stream to callback if provided
510
+ if self.stream_callback:
511
+ self.stream_callback(formatted_output)
512
+
513
+ # Publish to control plane as message chunk
514
+ if self.control_plane and self.execution_id and workflow_message_id:
515
+ try:
516
+ print(f"šŸ“” Publishing step output to control plane (len={len(formatted_output)})")
517
+ result = self.control_plane.publish_event(
518
+ execution_id=self.execution_id,
519
+ event_type="message_chunk",
520
+ data={
521
+ "role": "assistant",
522
+ "content": formatted_output,
523
+ "is_chunk": True,
524
+ "message_id": workflow_message_id,
525
+ "source": "workflow",
526
+ "metadata": {
527
+ "step_name": step_name,
528
+ "event_type": "step_output",
529
+ }
530
+ }
531
+ )
532
+ print(f"āœ… Published step output: {result}")
533
+ except Exception as e:
534
+ print(f"āŒ Failed to publish workflow output: {e}")
535
+ logger.error(f"Failed to publish workflow output to control plane: {e}", exc_info=True)
536
+
537
+ accumulated_output.append(output)
538
+
539
+ # Track by step
540
+ if step_name not in step_outputs:
541
+ step_outputs[step_name] = []
542
+ step_outputs[step_name].append(output)
543
+
544
+ elif event_type == "step_running":
545
+ # Step is starting
546
+ step = event_data.get("step", {})
547
+ step_name = step.get("name", "unknown")
548
+ current_step = step_name
549
+ formatted = f"\nā–¶ļø Step: {step_name}"
550
+ logger.info(f" ā–¶ļø Starting step: {step_name}")
551
+ accumulated_output.append(formatted)
552
+ if self.stream_callback:
553
+ self.stream_callback(f"{formatted}\n")
554
+
555
+ # Publish to control plane as message chunk
556
+ if self.control_plane and self.execution_id and workflow_message_id:
557
+ try:
558
+ self.control_plane.publish_event(
559
+ execution_id=self.execution_id,
560
+ event_type="message_chunk",
561
+ data={
562
+ "role": "assistant",
563
+ "content": formatted,
564
+ "is_chunk": True,
565
+ "message_id": workflow_message_id,
566
+ "source": "workflow",
567
+ "metadata": {
568
+ "step_name": step_name,
569
+ "event_type": "step_start",
570
+ }
571
+ }
572
+ )
573
+ except Exception as e:
574
+ logger.debug(f"Failed to publish step_running to control plane: {e}")
575
+
576
+ elif event_type == "step_complete":
577
+ # Step finished
578
+ step = event_data.get("step", {})
579
+ step_name = step.get("name", "unknown")
580
+ status = step.get("status", "unknown")
581
+ icon = "āœ…" if status == "finished" else "āŒ"
582
+ formatted = f"{icon} Step '{step_name}' {status}"
583
+ logger.info(f" {icon} Step completed: {step_name} ({status})")
584
+ accumulated_output.append(formatted)
585
+ current_step = None
586
+ if self.stream_callback:
587
+ self.stream_callback(f"{formatted}\n")
588
+
589
+ # Publish to control plane as message chunk
590
+ if self.control_plane and self.execution_id and workflow_message_id:
591
+ try:
592
+ self.control_plane.publish_event(
593
+ execution_id=self.execution_id,
594
+ event_type="message_chunk",
595
+ data={
596
+ "role": "assistant",
597
+ "content": formatted,
598
+ "is_chunk": True,
599
+ "message_id": workflow_message_id,
600
+ "source": "workflow",
601
+ "metadata": {
602
+ "step_name": step_name,
603
+ "status": status,
604
+ "event_type": "step_complete",
605
+ }
606
+ }
607
+ )
608
+ except Exception as e:
609
+ logger.debug(f"Failed to publish step_complete to control plane: {e}")
610
+
611
+ elif event_type == "workflow_complete":
612
+ # Workflow finished
613
+ dag_name = event_data.get("dagName", "unknown")
614
+ status = event_data.get("status", "unknown")
615
+ success = event_data.get("success", False)
616
+ icon = "āœ…" if success else "āŒ"
617
+ formatted = f"{icon} Workflow '{dag_name}' {status}"
618
+ logger.info(f" {icon} Workflow completed: {dag_name} ({status}, success={success})")
619
+ accumulated_output.append(formatted)
620
+ if self.stream_callback:
621
+ self.stream_callback(f"{formatted}\n")
622
+
623
+ elif event_type == "log":
624
+ # Filter out noisy internal workflow runner logs
625
+ message = event_data.get("message", "")
626
+ level = event_data.get("level", "info")
627
+
628
+ # Skip internal/noisy log messages
629
+ noisy_patterns = [
630
+ "[SSE]",
631
+ "Published workflow stream event",
632
+ "Stored workflow data",
633
+ "Emitting log event",
634
+ "msg=status requestId",
635
+ ]
636
+
637
+ # Check if message contains any noisy pattern
638
+ if any(pattern in message for pattern in noisy_patterns):
639
+ logger.debug(f" šŸ”‡ Skipping noisy log: {message[:50]}")
640
+ continue
641
+
642
+ # Only show meaningful log messages
643
+ formatted = f"[{level.upper()}] {message}"
644
+ logger.info(f" šŸ’¬ Log message: {message[:100]}")
645
+ accumulated_output.append(formatted)
646
+ if self.stream_callback:
647
+ self.stream_callback(f"{formatted}\n")
648
+
649
+ elif event_type == "error":
650
+ error_msg = event_data.get("message", str(event_data))
651
+ formatted = f"āŒ Error: {error_msg}"
652
+ logger.error(f" āŒ Workflow error: {error_msg}")
653
+ accumulated_output.append(formatted)
654
+ if self.stream_callback:
655
+ self.stream_callback(f"{formatted}\n")
656
+
657
+ elif event_type == "heartbeat":
658
+ # Skip heartbeat events in output
659
+ logger.debug(f" šŸ’“ Heartbeat (skipping)")
660
+ continue
661
+
662
+ else:
663
+ # For unknown event types, log but don't show to user
664
+ logger.info(f" ā“ Unknown event type: {event_type}")
665
+ logger.debug(f" Raw data: {json.dumps(event_data)[:200]}")
666
+ # Skip unknown events instead of showing raw JSON
667
+ continue
668
+
669
+ end_time = datetime.utcnow()
670
+ duration = (end_time - start_time).total_seconds()
671
+
672
+ # Format complete results for Claude to see
673
+ result_text = f"\n{'='*60}\n"
674
+ result_text += f"Workflow Execution: {workflow_data.get('name', 'unknown')}\n"
675
+ result_text += f"{'='*60}\n\n"
676
+ result_text += f"Status: āœ… Completed\n"
677
+ result_text += f"Duration: {duration:.2f}s\n"
678
+ result_text += f"Runner: {runner}\n"
679
+ result_text += f"Parameters: {json.dumps(parameters, indent=2)}\n"
680
+ result_text += f"\nTotal Events: {event_count}\n"
681
+
682
+ # Include all captured output in the result
683
+ if accumulated_output:
684
+ result_text += f"\n{'='*60}\n"
685
+ result_text += f"Workflow Output:\n"
686
+ result_text += f"{'='*60}\n\n"
687
+ result_text += "\n".join(accumulated_output)
688
+ logger.info(f"āœ… Workflow execution complete: {event_count} events processed, {len(accumulated_output)} output lines accumulated")
689
+ else:
690
+ logger.warning(f"āš ļø No workflow output accumulated (received {event_count} events but none produced output)")
691
+
692
+ logger.debug(f"Final result preview: {result_text[:500]}")
693
+
694
+ # Close the workflow streaming message (empty content marks end of stream)
695
+ # The agent will process the workflow result and generate its OWN response
696
+ # with a different message_id - that's the next message the user sees
697
+ if self.control_plane and self.execution_id and workflow_message_id:
698
+ try:
699
+ print(f"\nšŸ“” Closing workflow streaming message...")
700
+ self.control_plane.publish_event(
701
+ execution_id=self.execution_id,
702
+ event_type="message_chunk",
703
+ data={
704
+ "role": "assistant",
705
+ "content": "", # Empty - just marks end of workflow stream
706
+ "is_chunk": False, # Final chunk - closes the streaming message
707
+ "message_id": workflow_message_id,
708
+ "source": "workflow",
709
+ "metadata": {
710
+ "event_type": "workflow_stream_end",
711
+ "duration": duration,
712
+ "total_events": event_count,
713
+ }
714
+ }
715
+ )
716
+ print(f"āœ… Workflow stream closed\n")
717
+ except Exception as e:
718
+ logger.debug(f"Failed to close workflow stream: {e}")
719
+
720
+ # Return result to agent - agent will process and respond with its OWN message_id
721
+ return result_text
722
+
723
+ def _execute_python_dsl_workflow_specific(
724
+ self,
725
+ workflow: Dict[str, Any],
726
+ parameters: Dict[str, Any],
727
+ runner: str
728
+ ) -> str:
729
+ """Execute a Python DSL workflow."""
730
+ python_code = workflow.get("code")
731
+ if not python_code:
732
+ raise ValueError("No Python DSL code available")
733
+
734
+ if not self.kubiya_client:
735
+ raise RuntimeError("Kubiya SDK client not initialized")
736
+
737
+ workflow_name = workflow.get("name", "python-dsl-workflow")
738
+
739
+ # Create workflow definition for remote execution
740
+ workflow_definition = {
741
+ "name": workflow_name,
742
+ "description": f"Python DSL workflow: {workflow_name}",
743
+ "runner": runner,
744
+ "steps": [
745
+ {
746
+ "name": "execute_python_dsl",
747
+ "description": "Execute Python DSL workflow code",
748
+ "executor": {
749
+ "type": "python_dsl",
750
+ "config": {"code": python_code}
751
+ }
752
+ }
753
+ ]
754
+ }
755
+
756
+ from datetime import datetime
757
+ start_time = datetime.utcnow()
758
+
759
+ if self.stream_callback:
760
+ self.stream_callback(f"ā–¶ļø Submitting to runner '{runner}'...\n\n")
761
+
762
+ # āœ… Enable streaming to capture real-time workflow output
763
+ response = self.kubiya_client.execute_workflow(
764
+ workflow_definition=workflow_definition,
765
+ parameters=parameters,
766
+ stream=True
767
+ )
768
+
769
+ # Accumulate streaming results
770
+ accumulated_output = []
771
+ event_count = 0
772
+ step_outputs = {}
773
+ current_step = None
774
+ seen_events = set() # Track event hashes to prevent duplicates
775
+
776
+ # Register workflow for cancellation tracking
777
+ from control_plane_api.app.services.workflow_cancellation_manager import workflow_cancellation_manager
778
+ cancellation_event = workflow_cancellation_manager.register_workflow(self.execution_id, workflow_message_id)
779
+
780
+ # Iterate over streaming results (SDK yields JSON strings)
781
+ for event in response:
782
+ event_count += 1
783
+
784
+ # Check for cancellation FIRST (immediate response)
785
+ if cancellation_event.is_set():
786
+ logger.warning("āš ļø Workflow execution cancelled by user")
787
+ workflow_cancellation_manager.clear_cancellation(self.execution_id, workflow_message_id)
788
+ return f"āŒ Workflow execution cancelled by user\n\nWorkflow: {workflow_data.get('name', 'unknown')}\nCancelled at: {datetime.utcnow().isoformat()}"
789
+
790
+ # Skip None/empty events
791
+ if event is None:
792
+ logger.debug(f"ā­ļø Skipping None event #{event_count}")
793
+ continue
794
+
795
+ # šŸ” DEBUG: Print raw event to understand SDK response format
796
+ print(f"\n{'='*80}")
797
+ print(f"šŸ” RAW SDK EVENT #{event_count}")
798
+ print(f"{'='*80}")
799
+ print(f"Type: {type(event).__name__}")
800
+ print(f"Length: {len(str(event)) if event else 0}")
801
+ print(f"Repr: {repr(event)[:500]}")
802
+ if isinstance(event, (str, bytes)):
803
+ print(f"First 200 chars: {str(event)[:200]}")
804
+ print(f"{'='*80}\n")
805
+
806
+ # Debug: Log raw event with actual content
807
+ event_repr = repr(event)[:500] # Use repr to see exact content
808
+ logger.info(f"šŸ“¦ Received event #{event_count} (type={type(event).__name__}, length={len(str(event)) if event else 0})")
809
+ logger.debug(f" Raw content: {event_repr}")
810
+
811
+ # Parse the event (SDK yields JSON strings or bytes)
812
+ try:
813
+ if isinstance(event, bytes):
814
+ # Skip empty bytes
815
+ if not event:
816
+ logger.debug(f" ā­ļø Skipping empty bytes")
817
+ continue
818
+
819
+ # Decode bytes to string first
820
+ logger.debug(f" šŸ”„ Decoding bytes to string...")
821
+ event = event.decode('utf-8')
822
+ logger.debug(f" āœ… Decoded to string (length={len(event)})")
823
+
824
+ if isinstance(event, str):
825
+ # Skip empty strings
826
+ if not event.strip():
827
+ logger.debug(f" ā­ļø Skipping empty string event")
828
+ continue
829
+
830
+ # Handle SSE (Server-Sent Events) format: "data: 2:{json}"
831
+ # The SDK sometimes returns events with this prefix
832
+ if event.startswith("data: "):
833
+ logger.debug(f" šŸ”„ Stripping SSE 'data: ' prefix...")
834
+ event = event[6:] # Remove "data: " prefix (6 chars)
835
+
836
+ # Also strip the message ID prefix like "2:"
837
+ if ":" in event and event.split(":", 1)[0].isdigit():
838
+ logger.debug(f" šŸ”„ Stripping message ID prefix...")
839
+ event = event.split(":", 1)[1] # Remove "2:" or similar prefix
840
+
841
+ logger.debug(f" āœ… Cleaned SSE event (length={len(event)})")
842
+
843
+ # Try to parse as JSON
844
+ logger.debug(f" šŸ”„ Parsing JSON string...")
845
+ event_data = json.loads(event)
846
+ logger.debug(f" āœ… Parsed JSON: type={event_data.get('type', 'unknown')}")
847
+ elif isinstance(event, dict):
848
+ # Already a dict
849
+ logger.debug(f" āœ… Already a dict: type={event.get('type', 'unknown')}")
850
+ event_data = event
851
+ else:
852
+ # Unknown type, treat as plain text
853
+ logger.warning(f" āš ļø Unknown event type: {type(event).__name__}, treating as plain text")
854
+ event_str = str(event)
855
+ if event_str.strip(): # Only add non-empty text
856
+ accumulated_output.append(event_str)
857
+ if self.stream_callback:
858
+ self.stream_callback(f"{event_str}\n")
859
+ continue
860
+ except (json.JSONDecodeError, UnicodeDecodeError) as e:
861
+ # If not valid JSON or can't decode, treat as plain text
862
+ logger.warning(f" āš ļø Failed to parse event: {e}, treating as plain text")
863
+ event_str = str(event)
864
+ if event_str.strip(): # Only add non-empty text
865
+ accumulated_output.append(event_str)
866
+ if self.stream_callback:
867
+ self.stream_callback(f"{event_str}\n")
868
+ continue
869
+
870
+ # Extract meaningful content based on event type
871
+ event_type = event_data.get("type", "unknown")
872
+ logger.info(f" šŸŽÆ Event type: {event_type}")
873
+
874
+ # Handle actual Kubiya workflow event types
875
+ if event_type == "step_output":
876
+ # step_output contains the actual workflow output in step.output
877
+ step = event_data.get("step", {})
878
+ step_name = step.get("name", "unknown")
879
+ output = step.get("output", "")
880
+
881
+ if output.strip():
882
+ # Deduplicate events - SDK sends same event twice (plain JSON + SSE format)
883
+ event_hash = hashlib.md5(f"{step_name}:{output}".encode()).hexdigest()
884
+ if event_hash in seen_events:
885
+ print(f" ā­ļø Skipping duplicate event: {step_name} - {output[:50]}")
886
+ logger.info(f" ā­ļø Skipping duplicate event: {step_name} - {output[:50]}")
887
+ continue
888
+ seen_events.add(event_hash)
889
+
890
+ logger.info(f" šŸ“ Step output: {step_name} - {output[:100]}")
891
+
892
+ # Format for display
893
+ formatted_output = f"```\n{output}\n```\n"
894
+
895
+ # Stream to callback if provided
896
+ if self.stream_callback:
897
+ self.stream_callback(formatted_output)
898
+
899
+ # Publish to control plane as message chunk
900
+ if self.control_plane and self.execution_id and workflow_message_id:
901
+ try:
902
+ print(f"šŸ“” Publishing step output to control plane (len={len(formatted_output)})")
903
+ result = self.control_plane.publish_event(
904
+ execution_id=self.execution_id,
905
+ event_type="message_chunk",
906
+ data={
907
+ "role": "assistant",
908
+ "content": formatted_output,
909
+ "is_chunk": True,
910
+ "message_id": workflow_message_id,
911
+ "source": "workflow",
912
+ "metadata": {
913
+ "step_name": step_name,
914
+ "event_type": "step_output",
915
+ }
916
+ }
917
+ )
918
+ print(f"āœ… Published step output: {result}")
919
+ except Exception as e:
920
+ print(f"āŒ Failed to publish workflow output: {e}")
921
+ logger.error(f"Failed to publish workflow output to control plane: {e}", exc_info=True)
922
+
923
+ accumulated_output.append(output)
924
+
925
+ # Track by step
926
+ if step_name not in step_outputs:
927
+ step_outputs[step_name] = []
928
+ step_outputs[step_name].append(output)
929
+
930
+ elif event_type == "step_running":
931
+ # Step is starting
932
+ step = event_data.get("step", {})
933
+ step_name = step.get("name", "unknown")
934
+ current_step = step_name
935
+ formatted = f"\nā–¶ļø Step: {step_name}"
936
+ logger.info(f" ā–¶ļø Starting step: {step_name}")
937
+ accumulated_output.append(formatted)
938
+ if self.stream_callback:
939
+ self.stream_callback(f"{formatted}\n")
940
+
941
+ # Publish to control plane as message chunk
942
+ if self.control_plane and self.execution_id and workflow_message_id:
943
+ try:
944
+ self.control_plane.publish_event(
945
+ execution_id=self.execution_id,
946
+ event_type="message_chunk",
947
+ data={
948
+ "role": "assistant",
949
+ "content": formatted,
950
+ "is_chunk": True,
951
+ "message_id": workflow_message_id,
952
+ "source": "workflow",
953
+ "metadata": {
954
+ "step_name": step_name,
955
+ "event_type": "step_start",
956
+ }
957
+ }
958
+ )
959
+ except Exception as e:
960
+ logger.debug(f"Failed to publish step_running to control plane: {e}")
961
+
962
+ elif event_type == "step_complete":
963
+ # Step finished
964
+ step = event_data.get("step", {})
965
+ step_name = step.get("name", "unknown")
966
+ status = step.get("status", "unknown")
967
+ icon = "āœ…" if status == "finished" else "āŒ"
968
+ formatted = f"{icon} Step '{step_name}' {status}"
969
+ logger.info(f" {icon} Step completed: {step_name} ({status})")
970
+ accumulated_output.append(formatted)
971
+ current_step = None
972
+ if self.stream_callback:
973
+ self.stream_callback(f"{formatted}\n")
974
+
975
+ # Publish to control plane as message chunk
976
+ if self.control_plane and self.execution_id and workflow_message_id:
977
+ try:
978
+ self.control_plane.publish_event(
979
+ execution_id=self.execution_id,
980
+ event_type="message_chunk",
981
+ data={
982
+ "role": "assistant",
983
+ "content": formatted,
984
+ "is_chunk": True,
985
+ "message_id": workflow_message_id,
986
+ "source": "workflow",
987
+ "metadata": {
988
+ "step_name": step_name,
989
+ "status": status,
990
+ "event_type": "step_complete",
991
+ }
992
+ }
993
+ )
994
+ except Exception as e:
995
+ logger.debug(f"Failed to publish step_complete to control plane: {e}")
996
+
997
+ elif event_type == "workflow_complete":
998
+ # Workflow finished
999
+ dag_name = event_data.get("dagName", "unknown")
1000
+ status = event_data.get("status", "unknown")
1001
+ success = event_data.get("success", False)
1002
+ icon = "āœ…" if success else "āŒ"
1003
+ formatted = f"{icon} Workflow '{dag_name}' {status}"
1004
+ logger.info(f" {icon} Workflow completed: {dag_name} ({status}, success={success})")
1005
+ accumulated_output.append(formatted)
1006
+ if self.stream_callback:
1007
+ self.stream_callback(f"{formatted}\n")
1008
+
1009
+ elif event_type == "log":
1010
+ # Filter out noisy internal workflow runner logs
1011
+ message = event_data.get("message", "")
1012
+ level = event_data.get("level", "info")
1013
+
1014
+ # Skip internal/noisy log messages
1015
+ noisy_patterns = [
1016
+ "[SSE]",
1017
+ "Published workflow stream event",
1018
+ "Stored workflow data",
1019
+ "Emitting log event",
1020
+ "msg=status requestId",
1021
+ ]
1022
+
1023
+ # Check if message contains any noisy pattern
1024
+ if any(pattern in message for pattern in noisy_patterns):
1025
+ logger.debug(f" šŸ”‡ Skipping noisy log: {message[:50]}")
1026
+ continue
1027
+
1028
+ # Only show meaningful log messages
1029
+ formatted = f"[{level.upper()}] {message}"
1030
+ logger.info(f" šŸ’¬ Log message: {message[:100]}")
1031
+ accumulated_output.append(formatted)
1032
+ if self.stream_callback:
1033
+ self.stream_callback(f"{formatted}\n")
1034
+
1035
+ elif event_type == "error":
1036
+ error_msg = event_data.get("message", str(event_data))
1037
+ formatted = f"āŒ Error: {error_msg}"
1038
+ logger.error(f" āŒ Workflow error: {error_msg}")
1039
+ accumulated_output.append(formatted)
1040
+ if self.stream_callback:
1041
+ self.stream_callback(f"{formatted}\n")
1042
+
1043
+ elif event_type == "heartbeat":
1044
+ # Skip heartbeat events in output
1045
+ logger.debug(f" šŸ’“ Heartbeat (skipping)")
1046
+ continue
1047
+
1048
+ else:
1049
+ # For unknown event types, log but don't show to user
1050
+ logger.info(f" ā“ Unknown event type: {event_type}")
1051
+ logger.debug(f" Raw data: {json.dumps(event_data)[:200]}")
1052
+ # Skip unknown events instead of showing raw JSON
1053
+ continue
1054
+
1055
+ end_time = datetime.utcnow()
1056
+ duration = (end_time - start_time).total_seconds()
1057
+
1058
+ result_text = f"\n{'='*60}\n"
1059
+ result_text += f"Python DSL Workflow: {workflow_name}\n"
1060
+ result_text += f"{'='*60}\n\n"
1061
+ result_text += f"Status: āœ… Completed\n"
1062
+ result_text += f"Duration: {duration:.2f}s\n"
1063
+ result_text += f"Runner: {runner}\n"
1064
+ result_text += f"\nTotal Events: {event_count}\n"
1065
+
1066
+ # Include all captured output in the result
1067
+ if accumulated_output:
1068
+ result_text += f"\n{'='*60}\n"
1069
+ result_text += f"Workflow Output:\n"
1070
+ result_text += f"{'='*60}\n\n"
1071
+ result_text += "\n".join(accumulated_output)
1072
+ logger.info(f"āœ… Workflow execution complete: {event_count} events processed, {len(accumulated_output)} output lines accumulated")
1073
+ else:
1074
+ logger.warning(f"āš ļø No workflow output accumulated (received {event_count} events but none produced output)")
1075
+
1076
+ logger.debug(f"Final result preview: {result_text[:500]}")
1077
+
1078
+ # Close the workflow streaming message (empty content marks end of stream)
1079
+ # The agent will process the workflow result and generate its OWN response
1080
+ # with a different message_id - that's the next message the user sees
1081
+ if self.control_plane and self.execution_id and workflow_message_id:
1082
+ try:
1083
+ print(f"\nšŸ“” Closing workflow streaming message...")
1084
+ self.control_plane.publish_event(
1085
+ execution_id=self.execution_id,
1086
+ event_type="message_chunk",
1087
+ data={
1088
+ "role": "assistant",
1089
+ "content": "", # Empty - just marks end of workflow stream
1090
+ "is_chunk": False, # Final chunk - closes the streaming message
1091
+ "message_id": workflow_message_id,
1092
+ "source": "workflow",
1093
+ "metadata": {
1094
+ "event_type": "workflow_stream_end",
1095
+ "duration": duration,
1096
+ "total_events": event_count,
1097
+ }
1098
+ }
1099
+ )
1100
+ print(f"āœ… Workflow stream closed\n")
1101
+ except Exception as e:
1102
+ logger.debug(f"Failed to close workflow stream: {e}")
1103
+
1104
+ # Return result to agent - agent will process and respond with its OWN message_id
1105
+ return result_text
1106
+
1107
+ def list_all_workflows(self) -> str:
1108
+ """
1109
+ List all available workflows in this skill instance.
1110
+
1111
+ Returns:
1112
+ str: Formatted list of all workflows with their names, types, and descriptions.
1113
+
1114
+ Examples:
1115
+ # List all workflows
1116
+ list_all_workflows()
1117
+ """
1118
+ if not self.workflows:
1119
+ return "No workflows defined in this skill instance."
1120
+
1121
+ result = f"\nšŸ“‹ Available Workflows ({len(self.workflows)}):\n"
1122
+ result += "=" * 60 + "\n\n"
1123
+
1124
+ for idx, workflow in enumerate(self.workflows, 1):
1125
+ name = workflow.get("name", "unknown")
1126
+ wf_type = workflow.get("type", "unknown")
1127
+ safe_name = name.replace("-", "_").replace(" ", "_").lower()
1128
+
1129
+ result += f"{idx}. {name} ({wf_type})\n"
1130
+ result += f" Tool: execute_workflow_{safe_name}()\n"
1131
+
1132
+ # Get description from workflow definition
1133
+ if wf_type == "json":
1134
+ wf_def = workflow.get("definition")
1135
+ if isinstance(wf_def, str):
1136
+ try:
1137
+ wf_data = json.loads(wf_def)
1138
+ desc = wf_data.get("description", "No description")
1139
+ steps = len(wf_data.get("steps", []))
1140
+ result += f" Description: {desc}\n"
1141
+ result += f" Steps: {steps}\n"
1142
+ except:
1143
+ pass
1144
+ elif isinstance(wf_def, dict):
1145
+ desc = wf_def.get("description", "No description")
1146
+ steps = len(wf_def.get("steps", []))
1147
+ result += f" Description: {desc}\n"
1148
+ result += f" Steps: {steps}\n"
1149
+
1150
+ result += "\n"
1151
+
1152
+ return result
1153
+
1154
+ def execute_workflow(
1155
+ self,
1156
+ parameters: Optional[Dict[str, Any]] = None,
1157
+ override_timeout: Optional[int] = None,
1158
+ ) -> str:
1159
+ """
1160
+ Execute the first configured workflow with the provided parameters.
1161
+
1162
+ LEGACY METHOD: For backward compatibility with single-workflow format.
1163
+ For multi-workflow skills, use execute_workflow_<name>() methods instead.
1164
+
1165
+ This tool allows agents to run multi-step workflows by providing
1166
+ parameter values. The workflow will execute all steps in dependency
1167
+ order and return the results.
1168
+
1169
+ The runner/environment is determined from the workflow definition itself,
1170
+ not passed as a parameter. This ensures workflows execute in their
1171
+ intended environments.
1172
+
1173
+ Args:
1174
+ parameters: Dictionary of parameters to inject into the workflow.
1175
+ These can be referenced in workflow steps using {{param_name}} syntax.
1176
+ override_timeout: Optional timeout override in seconds.
1177
+ If not provided, uses the timeout from configuration.
1178
+
1179
+ Returns:
1180
+ str: A formatted string containing the workflow execution results,
1181
+ including step outputs and any errors encountered.
1182
+
1183
+ Examples:
1184
+ # Execute a deployment workflow with environment parameter
1185
+ execute_workflow(parameters={"environment": "production", "version": "v1.2.3"})
1186
+
1187
+ # Execute with timeout override
1188
+ execute_workflow(
1189
+ parameters={"data_source": "s3://bucket/data"},
1190
+ override_timeout=7200
1191
+ )
1192
+ """
1193
+ try:
1194
+ # For multi-workflow format, execute the first workflow
1195
+ if self.workflows:
1196
+ if len(self.workflows) > 1:
1197
+ logger.warning(
1198
+ "Multiple workflows defined but execute_workflow() called. "
1199
+ "Executing first workflow. Use execute_workflow_<name>() for specific workflows."
1200
+ )
1201
+ return self._execute_specific_workflow(self.workflows[0], parameters)
1202
+
1203
+ # Legacy single-workflow format
1204
+ # Use provided parameters or empty dict
1205
+ params = parameters or {}
1206
+
1207
+ # Determine runner from workflow definition or use default_runner/default_runner from config
1208
+ effective_runner = None
1209
+ if hasattr(self, 'workflow_type') and self.workflow_type == "json" and hasattr(self, 'workflow_data') and self.workflow_data:
1210
+ # Get runner from workflow definition first, then step-level, then default
1211
+ effective_runner = self.workflow_data.get("runner") or self.default_runner
1212
+ else:
1213
+ effective_runner = self.default_runner
1214
+
1215
+ # Determine timeout
1216
+ effective_timeout = override_timeout or self.timeout
1217
+
1218
+ # Stream start message
1219
+ if self.stream_callback:
1220
+ self.stream_callback(
1221
+ f"šŸš€ Starting workflow execution...\n"
1222
+ f" Workflow Type: {getattr(self, 'workflow_type', 'unknown')}\n"
1223
+ f" Parameters: {json.dumps(params, indent=2)}\n"
1224
+ f" Runner: {effective_runner or 'default'}\n"
1225
+ f" Timeout: {effective_timeout}s\n\n"
1226
+ )
1227
+
1228
+ # Execute based on workflow type
1229
+ if hasattr(self, 'workflow_type'):
1230
+ if self.workflow_type == "json":
1231
+ result = self._execute_json_workflow(params, effective_runner, effective_timeout)
1232
+ elif self.workflow_type == "python_dsl":
1233
+ result = self._execute_python_dsl_workflow(params, effective_runner, effective_timeout)
1234
+ else:
1235
+ raise ValueError(f"Unsupported workflow type: {self.workflow_type}")
1236
+ else:
1237
+ raise ValueError("No workflow configured")
1238
+
1239
+ # Stream completion message
1240
+ if self.stream_callback:
1241
+ self.stream_callback(f"\nāœ… Workflow execution completed successfully\n")
1242
+
1243
+ return result
1244
+
1245
+ except Exception as e:
1246
+ error_msg = f"āŒ Workflow execution failed: {str(e)}"
1247
+ logger.error(error_msg, exc_info=True)
1248
+
1249
+ if self.stream_callback:
1250
+ self.stream_callback(f"\n{error_msg}\n")
1251
+
1252
+ return error_msg
1253
+
1254
+ def _execute_json_workflow(
1255
+ self,
1256
+ parameters: Dict[str, Any],
1257
+ runner: Optional[str],
1258
+ timeout: int
1259
+ ) -> str:
1260
+ """Execute a JSON workflow using kubiya SDK (remote execution)."""
1261
+ if not self.workflow_data:
1262
+ raise ValueError("No workflow definition available")
1263
+
1264
+ if not self.kubiya_client:
1265
+ raise RuntimeError("Kubiya SDK client not initialized - cannot execute workflow remotely")
1266
+
1267
+ workflow_name = self.workflow_data.get("name", "unknown")
1268
+ steps = self.workflow_data.get("steps", [])
1269
+
1270
+ if self.stream_callback:
1271
+ self.stream_callback(f"šŸ“‹ Workflow: {workflow_name}\n")
1272
+ self.stream_callback(f" Steps: {len(steps)}\n")
1273
+ self.stream_callback(f" Runner: {runner or self.default_runner}\n\n")
1274
+
1275
+ try:
1276
+ # Execute workflow remotely using Kubiya SDK
1277
+ from datetime import datetime
1278
+ start_time = datetime.utcnow()
1279
+
1280
+ if self.stream_callback:
1281
+ self.stream_callback(f"ā–¶ļø Submitting to runner '{runner or self.default_runner}'...\n\n")
1282
+
1283
+ # Submit workflow definition to remote runner
1284
+ # The workflow_data already contains the complete workflow definition
1285
+ workflow_def = dict(self.workflow_data)
1286
+
1287
+ # Ensure runner is set correctly
1288
+ workflow_def["runner"] = runner or self.default_runner
1289
+
1290
+ # Remove 'triggers' key if it exists - not needed for direct execution
1291
+ # The DAG builder rejects this key when executing workflows directly
1292
+ if "triggers" in workflow_def:
1293
+ logger.debug(f"Removing 'triggers' key from workflow definition (not needed for execution)")
1294
+ workflow_def.pop("triggers")
1295
+
1296
+ # āœ… Enable streaming to capture real-time workflow output
1297
+ response = self.kubiya_client.execute_workflow(
1298
+ workflow_definition=workflow_def,
1299
+ parameters=parameters,
1300
+ stream=True
1301
+ )
1302
+
1303
+ # Accumulate streaming results
1304
+ accumulated_output = []
1305
+ event_count = 0
1306
+
1307
+ # Iterate over streaming results
1308
+ for event in response:
1309
+ event_count += 1
1310
+
1311
+ # Stream to user in real-time
1312
+ if self.stream_callback:
1313
+ if isinstance(event, str):
1314
+ self.stream_callback(f"{event}\n")
1315
+ accumulated_output.append(event)
1316
+ elif isinstance(event, dict):
1317
+ event_type = event.get("type", "event")
1318
+ event_data = event.get("data", event)
1319
+ formatted_event = f"[{event_type}] {json.dumps(event_data, indent=2)}"
1320
+ self.stream_callback(f"{formatted_event}\n")
1321
+ accumulated_output.append(formatted_event)
1322
+ else:
1323
+ formatted_event = str(event)
1324
+ self.stream_callback(f"{formatted_event}\n")
1325
+ accumulated_output.append(formatted_event)
1326
+
1327
+ end_time = datetime.utcnow()
1328
+ duration = (end_time - start_time).total_seconds()
1329
+
1330
+ # Format results
1331
+ result_text = f"\n{'='*60}\n"
1332
+ result_text += f"Workflow Execution Summary\n"
1333
+ result_text += f"{'='*60}\n\n"
1334
+ result_text += f"Workflow: {workflow_name}\n"
1335
+ result_text += f"Runner: {runner or self.default_runner}\n"
1336
+ result_text += f"Status: āœ… Completed\n"
1337
+ result_text += f"Duration: {duration:.2f}s\n"
1338
+ result_text += f"Steps: {len(steps)}\n"
1339
+ result_text += f"Parameters: {json.dumps(parameters, indent=2)}\n"
1340
+ result_text += f"\nTotal Events: {event_count}\n"
1341
+
1342
+ # Include all captured output in the result
1343
+ if accumulated_output:
1344
+ result_text += f"\n{'='*60}\n"
1345
+ result_text += f"Workflow Output:\n"
1346
+ result_text += f"{'='*60}\n\n"
1347
+ result_text += "\n".join(accumulated_output)
1348
+
1349
+ if self.stream_callback:
1350
+ self.stream_callback(f"\nāœ… Workflow execution completed in {duration:.2f}s\n")
1351
+
1352
+ return result_text
1353
+
1354
+ except Exception as e:
1355
+ error_msg = f"JSON workflow execution failed: {str(e)}"
1356
+ logger.error(error_msg, exc_info=True)
1357
+ raise RuntimeError(error_msg)
1358
+
1359
+ def _execute_python_dsl_workflow(
1360
+ self,
1361
+ parameters: Dict[str, Any],
1362
+ runner: Optional[str],
1363
+ timeout: int
1364
+ ) -> str:
1365
+ """Execute a Python DSL workflow using kubiya SDK (remote execution)."""
1366
+ if not self.python_dsl_code:
1367
+ raise ValueError("No Python DSL code available")
1368
+
1369
+ if not self.kubiya_client:
1370
+ raise RuntimeError("Kubiya SDK client not initialized - cannot execute workflow remotely")
1371
+
1372
+ if self.stream_callback:
1373
+ self.stream_callback(f"šŸ Submitting Python DSL workflow for remote execution...\n\n")
1374
+
1375
+ try:
1376
+ # Parse the Python DSL code to extract workflow name
1377
+ # For now, we'll create a workflow definition that the runner can execute
1378
+ workflow_name = "python-dsl-workflow"
1379
+
1380
+ # Try to extract workflow name from code
1381
+ if "name=" in self.python_dsl_code:
1382
+ try:
1383
+ import re
1384
+ match = re.search(r'name\s*=\s*["\']([^"\']+)["\']', self.python_dsl_code)
1385
+ if match:
1386
+ workflow_name = match.group(1)
1387
+ except:
1388
+ pass
1389
+
1390
+ if self.stream_callback:
1391
+ self.stream_callback(f"šŸ“‹ Workflow: {workflow_name}\n")
1392
+ self.stream_callback(f" Runner: {runner or self.default_runner}\n")
1393
+ self.stream_callback(f" Parameters: {json.dumps(parameters)}\n\n")
1394
+
1395
+ # Create workflow definition for remote execution
1396
+ # The runner will execute the Python DSL code
1397
+ workflow_definition = {
1398
+ "name": workflow_name,
1399
+ "description": "Python DSL workflow",
1400
+ "runner": runner or self.default_runner,
1401
+ "steps": [
1402
+ {
1403
+ "name": "execute_python_dsl",
1404
+ "description": "Execute Python DSL workflow code",
1405
+ "executor": {
1406
+ "type": "python_dsl",
1407
+ "config": {
1408
+ "code": self.python_dsl_code
1409
+ }
1410
+ }
1411
+ }
1412
+ ]
1413
+ }
1414
+
1415
+ # Execute workflow remotely using Kubiya SDK
1416
+ from datetime import datetime
1417
+ start_time = datetime.utcnow()
1418
+
1419
+ if self.stream_callback:
1420
+ self.stream_callback(f"ā–¶ļø Submitting to runner '{runner or self.default_runner}'...\n\n")
1421
+
1422
+ # āœ… Enable streaming to capture real-time workflow output
1423
+ response = self.kubiya_client.execute_workflow(
1424
+ workflow_definition=workflow_definition,
1425
+ parameters=parameters,
1426
+ stream=True
1427
+ )
1428
+
1429
+ # Accumulate streaming results
1430
+ accumulated_output = []
1431
+ event_count = 0
1432
+
1433
+ # Iterate over streaming results
1434
+ for event in response:
1435
+ event_count += 1
1436
+
1437
+ # Stream to user in real-time
1438
+ if self.stream_callback:
1439
+ if isinstance(event, str):
1440
+ self.stream_callback(f"{event}\n")
1441
+ accumulated_output.append(event)
1442
+ elif isinstance(event, dict):
1443
+ event_type = event.get("type", "event")
1444
+ event_data = event.get("data", event)
1445
+ formatted_event = f"[{event_type}] {json.dumps(event_data, indent=2)}"
1446
+ self.stream_callback(f"{formatted_event}\n")
1447
+ accumulated_output.append(formatted_event)
1448
+ else:
1449
+ formatted_event = str(event)
1450
+ self.stream_callback(f"{formatted_event}\n")
1451
+ accumulated_output.append(formatted_event)
1452
+
1453
+ end_time = datetime.utcnow()
1454
+ duration = (end_time - start_time).total_seconds()
1455
+
1456
+ # Format results
1457
+ result_text = f"\n{'='*60}\n"
1458
+ result_text += f"Python DSL Workflow Execution Summary\n"
1459
+ result_text += f"{'='*60}\n\n"
1460
+ result_text += f"Workflow: {workflow_name}\n"
1461
+ result_text += f"Runner: {runner or self.default_runner}\n"
1462
+ result_text += f"Status: āœ… Completed\n"
1463
+ result_text += f"Duration: {duration:.2f}s\n"
1464
+ result_text += f"Parameters: {json.dumps(parameters, indent=2)}\n"
1465
+ result_text += f"\nTotal Events: {event_count}\n"
1466
+
1467
+ # Include all captured output in the result
1468
+ if accumulated_output:
1469
+ result_text += f"\n{'='*60}\n"
1470
+ result_text += f"Workflow Output:\n"
1471
+ result_text += f"{'='*60}\n\n"
1472
+ result_text += "\n".join(accumulated_output)
1473
+
1474
+ if self.stream_callback:
1475
+ self.stream_callback(f"\nāœ… Workflow execution completed in {duration:.2f}s\n")
1476
+
1477
+ return result_text
1478
+
1479
+ except Exception as e:
1480
+ error_msg = f"Python DSL workflow execution failed: {str(e)}"
1481
+ logger.error(error_msg, exc_info=True)
1482
+ raise RuntimeError(error_msg)
1483
+
1484
+ def _inject_parameters(self, config: Dict[str, Any], parameters: Dict[str, Any]) -> Dict[str, Any]:
1485
+ """Inject parameters into configuration values."""
1486
+ result = {}
1487
+
1488
+ for key, value in config.items():
1489
+ if isinstance(value, str):
1490
+ # Replace {{param_name}} with parameter value
1491
+ for param_name, param_value in parameters.items():
1492
+ value = value.replace(f"{{{{{param_name}}}}}", str(param_value))
1493
+ result[key] = value
1494
+ elif isinstance(value, dict):
1495
+ result[key] = self._inject_parameters(value, parameters)
1496
+ elif isinstance(value, list):
1497
+ result[key] = [
1498
+ self._inject_parameters(item, parameters) if isinstance(item, dict)
1499
+ else str(item).replace(f"{{{{{pn}}}}}", str(pv)) if isinstance(item, str) else item
1500
+ for item in value
1501
+ for pn, pv in [(pn, pv)]
1502
+ for pn, pv in [(list(parameters.keys())[0] if parameters else "", list(parameters.values())[0] if parameters else "")]
1503
+ ][:len(value)]
1504
+ # Simplified version
1505
+ result[key] = value
1506
+ else:
1507
+ result[key] = value
1508
+
1509
+ return result
1510
+
1511
+ def list_workflow_steps(self, workflow_name: Optional[str] = None) -> str:
1512
+ """
1513
+ List all steps in the configured workflow(s).
1514
+
1515
+ LEGACY METHOD: For multi-workflow skills, this lists all workflows.
1516
+ For legacy single-workflow format, lists steps of that workflow.
1517
+
1518
+ Args:
1519
+ workflow_name: Optional workflow name to filter by (multi-workflow only)
1520
+
1521
+ Returns:
1522
+ str: A formatted string listing all workflow steps with their
1523
+ descriptions, executor types, and dependencies.
1524
+
1525
+ Examples:
1526
+ # List all steps in the workflow
1527
+ list_workflow_steps()
1528
+ """
1529
+ try:
1530
+ # Multi-workflow format
1531
+ if self.workflows:
1532
+ if workflow_name:
1533
+ # Find specific workflow
1534
+ workflow = next((w for w in self.workflows if w.get("name") == workflow_name), None)
1535
+ if not workflow:
1536
+ return f"āŒ Workflow '{workflow_name}' not found"
1537
+ workflows_to_show = [workflow]
1538
+ else:
1539
+ workflows_to_show = self.workflows
1540
+
1541
+ result = f"\nšŸ“‹ Workflows: {len(workflows_to_show)}\n"
1542
+ result += "=" * 60 + "\n\n"
1543
+
1544
+ for wf in workflows_to_show:
1545
+ wf_name = wf.get("name", "unknown")
1546
+ wf_type = wf.get("type", "unknown")
1547
+
1548
+ result += f"Workflow: {wf_name} ({wf_type})\n"
1549
+
1550
+ if wf_type == "json":
1551
+ wf_def = wf.get("definition")
1552
+ if isinstance(wf_def, str):
1553
+ try:
1554
+ wf_data = json.loads(wf_def)
1555
+ except:
1556
+ result += " āŒ Invalid JSON definition\n\n"
1557
+ continue
1558
+ else:
1559
+ wf_data = wf_def
1560
+
1561
+ if wf_data:
1562
+ workflow_desc = wf_data.get("description", "No description")
1563
+ steps = wf_data.get("steps", [])
1564
+
1565
+ result += f" Description: {workflow_desc}\n"
1566
+ result += f" Total Steps: {len(steps)}\n\n"
1567
+
1568
+ if steps:
1569
+ result += " Steps:\n"
1570
+ for idx, step in enumerate(steps, 1):
1571
+ step_name = step.get("name", "unknown")
1572
+ step_desc = step.get("description", "")
1573
+ executor = step.get("executor", {})
1574
+ executor_type = executor.get("type", "unknown")
1575
+ depends_on = step.get("depends_on", [])
1576
+
1577
+ result += f" {idx}. {step_name}\n"
1578
+ if step_desc:
1579
+ result += f" Description: {step_desc}\n"
1580
+ result += f" Executor: {executor_type}\n"
1581
+ if depends_on:
1582
+ result += f" Depends on: {', '.join(depends_on)}\n"
1583
+ else:
1584
+ result += " (No steps defined)\n"
1585
+
1586
+ elif wf_type == "python_dsl":
1587
+ result += " Type: Python DSL\n"
1588
+ result += " (To view steps, execute the workflow)\n"
1589
+
1590
+ result += "\n"
1591
+
1592
+ return result
1593
+
1594
+ # Legacy single-workflow format
1595
+ if self.workflow_type == "json":
1596
+ if not self.workflow_data:
1597
+ return "āŒ No workflow definition available"
1598
+
1599
+ workflow_name_legacy = self.workflow_data.get("name", "unknown")
1600
+ workflow_desc = self.workflow_data.get("description", "No description")
1601
+ steps = self.workflow_data.get("steps", [])
1602
+
1603
+ result = f"\nšŸ“‹ Workflow: {workflow_name_legacy}\n"
1604
+ result += f" Description: {workflow_desc}\n"
1605
+ result += f" Total Steps: {len(steps)}\n\n"
1606
+
1607
+ if not steps:
1608
+ result += " (No steps defined)\n"
1609
+ return result
1610
+
1611
+ result += "Steps:\n"
1612
+ for idx, step in enumerate(steps, 1):
1613
+ step_name = step.get("name", "unknown")
1614
+ step_desc = step.get("description", "")
1615
+ executor = step.get("executor", {})
1616
+ executor_type = executor.get("type", "unknown")
1617
+ depends_on = step.get("depends_on", [])
1618
+
1619
+ result += f"\n{idx}. {step_name}\n"
1620
+ if step_desc:
1621
+ result += f" Description: {step_desc}\n"
1622
+ result += f" Executor: {executor_type}\n"
1623
+ if depends_on:
1624
+ result += f" Depends on: {', '.join(depends_on)}\n"
1625
+
1626
+ return result
1627
+
1628
+ elif self.workflow_type == "python_dsl":
1629
+ return f"\nšŸ Python DSL Workflow\n\nTo view steps, execute the workflow.\n"
1630
+
1631
+ else:
1632
+ return "āŒ No workflow configured"
1633
+
1634
+ except Exception as e:
1635
+ logger.error(f"Failed to list workflow steps: {e}", exc_info=True)
1636
+ return f"āŒ Error listing workflow steps: {str(e)}"
1637
+
1638
+ def get_workflow_info(self) -> str:
1639
+ """
1640
+ Get detailed information about the configured workflow(s).
1641
+
1642
+ This tool provides comprehensive information about the workflow
1643
+ including its name, description, type, number of steps, triggers,
1644
+ and configuration.
1645
+
1646
+ For multi-workflow skills, lists all workflows with their configurations.
1647
+ For legacy single-workflow format, shows that workflow's information.
1648
+
1649
+ Returns:
1650
+ str: A formatted string with complete workflow information.
1651
+
1652
+ Examples:
1653
+ # Get workflow information
1654
+ get_workflow_info()
1655
+ """
1656
+ try:
1657
+ result = f"\n{'='*60}\n"
1658
+ result += f"Workflow Executor Information\n"
1659
+ result += f"{'='*60}\n\n"
1660
+
1661
+ result += f"Validation Enabled: {self.validation_enabled}\n"
1662
+ result += f"Timeout: {self.timeout}s\n"
1663
+ result += f"Default Runner: {self.default_runner or 'None'}\n"
1664
+ result += f"Total Workflows: {len(self.workflows)}\n\n"
1665
+
1666
+ # Multi-workflow format
1667
+ if self.workflows:
1668
+ result += "Configured Workflows:\n"
1669
+ result += "-" * 60 + "\n\n"
1670
+
1671
+ for idx, workflow in enumerate(self.workflows, 1):
1672
+ wf_name = workflow.get("name", "unknown")
1673
+ wf_type = workflow.get("type", "unknown")
1674
+ safe_name = wf_name.replace("-", "_").replace(" ", "_").lower()
1675
+
1676
+ result += f"{idx}. {wf_name}\n"
1677
+ result += f" Type: {wf_type}\n"
1678
+ result += f" Tool: execute_workflow_{safe_name}()\n"
1679
+
1680
+ if wf_type == "json":
1681
+ wf_def = workflow.get("definition")
1682
+ if isinstance(wf_def, str):
1683
+ try:
1684
+ wf_data = json.loads(wf_def)
1685
+ except:
1686
+ result += " āŒ Invalid JSON definition\n\n"
1687
+ continue
1688
+ else:
1689
+ wf_data = wf_def
1690
+
1691
+ if wf_data:
1692
+ workflow_desc = wf_data.get("description", "No description")
1693
+ steps = wf_data.get("steps", [])
1694
+ triggers = wf_data.get("triggers", [])
1695
+ workflow_runner = wf_data.get("runner")
1696
+
1697
+ result += f" Description: {workflow_desc}\n"
1698
+ result += f" Steps: {len(steps)}\n"
1699
+ result += f" Triggers: {len(triggers)}\n"
1700
+
1701
+ # Show runner hierarchy
1702
+ if workflow_runner:
1703
+ result += f" Runner: {workflow_runner} (specified in workflow)\n"
1704
+ elif self.default_runner:
1705
+ result += f" Runner: {self.default_runner} (from skill config)\n"
1706
+ else:
1707
+ result += f" Runner: default (no runner specified)\n"
1708
+
1709
+ elif wf_type == "python_dsl":
1710
+ python_code = workflow.get("code", "")
1711
+ result += f" Code Length: {len(python_code)} characters\n"
1712
+
1713
+ result += "\n"
1714
+
1715
+ return result
1716
+
1717
+ # Legacy single-workflow format
1718
+ result += f"Type: {self.workflow_type or 'none'}\n\n"
1719
+
1720
+ if self.workflow_type == "json" and self.workflow_data:
1721
+ workflow_name = self.workflow_data.get("name", "unknown")
1722
+ workflow_desc = self.workflow_data.get("description", "No description")
1723
+ steps = self.workflow_data.get("steps", [])
1724
+ triggers = self.workflow_data.get("triggers", [])
1725
+ workflow_runner = self.workflow_data.get("runner")
1726
+
1727
+ result += f"Name: {workflow_name}\n"
1728
+ result += f"Description: {workflow_desc}\n"
1729
+ result += f"Steps: {len(steps)}\n"
1730
+ result += f"Triggers: {len(triggers)}\n"
1731
+
1732
+ # Show runner hierarchy
1733
+ if workflow_runner:
1734
+ result += f"Workflow Runner: {workflow_runner} (will be used for execution)\n"
1735
+ elif self.default_runner:
1736
+ result += f"Workflow Runner: {self.default_runner} (from skill config)\n"
1737
+ else:
1738
+ result += f"Workflow Runner: default (no runner specified)\n"
1739
+
1740
+ elif self.workflow_type == "python_dsl":
1741
+ result += f"Python DSL Workflow\n"
1742
+ result += f"Code Length: {len(self.python_dsl_code or '')} characters\n"
1743
+
1744
+ return result
1745
+
1746
+ except Exception as e:
1747
+ logger.error(f"Failed to get workflow info: {e}", exc_info=True)
1748
+ return f"āŒ Error getting workflow info: {str(e)}"