kubiya-control-plane-api 0.9.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- control_plane_api/LICENSE +676 -0
- control_plane_api/README.md +350 -0
- control_plane_api/__init__.py +4 -0
- control_plane_api/__version__.py +8 -0
- control_plane_api/alembic/README +1 -0
- control_plane_api/alembic/env.py +121 -0
- control_plane_api/alembic/script.py.mako +28 -0
- control_plane_api/alembic/versions/2613c65c3dbe_initial_database_setup.py +32 -0
- control_plane_api/alembic/versions/2df520d4927d_merge_heads.py +28 -0
- control_plane_api/alembic/versions/43abf98d6a01_add_paused_status_to_executions.py +73 -0
- control_plane_api/alembic/versions/6289854264cb_merge_multiple_heads.py +28 -0
- control_plane_api/alembic/versions/6a4d4dc3d8dc_generate_execution_transitions.py +50 -0
- control_plane_api/alembic/versions/87d11cf0a783_add_disconnected_status_to_worker_.py +44 -0
- control_plane_api/alembic/versions/add_ephemeral_queue_support.py +85 -0
- control_plane_api/alembic/versions/add_model_type_to_llm_models.py +31 -0
- control_plane_api/alembic/versions/add_plan_executions_table.py +114 -0
- control_plane_api/alembic/versions/add_trace_span_tables.py +154 -0
- control_plane_api/alembic/versions/add_user_info_to_traces.py +36 -0
- control_plane_api/alembic/versions/adjusting_foreign_keys.py +32 -0
- control_plane_api/alembic/versions/b4983d976db2_initial_tables.py +1128 -0
- control_plane_api/alembic/versions/d181a3b40e71_rename_custom_metadata_to_metadata_in_.py +50 -0
- control_plane_api/alembic/versions/df9117888e82_add_missing_columns.py +82 -0
- control_plane_api/alembic/versions/f25de6ad895a_missing_migrations.py +34 -0
- control_plane_api/alembic/versions/f71305fb69b9_fix_ephemeral_queue_deletion_foreign_key.py +54 -0
- control_plane_api/alembic/versions/mark_local_exec_queues_as_ephemeral.py +68 -0
- control_plane_api/alembic.ini +148 -0
- control_plane_api/api/index.py +12 -0
- control_plane_api/app/__init__.py +11 -0
- control_plane_api/app/activities/__init__.py +20 -0
- control_plane_api/app/activities/agent_activities.py +384 -0
- control_plane_api/app/activities/plan_generation_activities.py +499 -0
- control_plane_api/app/activities/team_activities.py +424 -0
- control_plane_api/app/activities/temporal_cloud_activities.py +588 -0
- control_plane_api/app/config/__init__.py +35 -0
- control_plane_api/app/config/api_config.py +469 -0
- control_plane_api/app/config/config_loader.py +224 -0
- control_plane_api/app/config/model_pricing.py +323 -0
- control_plane_api/app/config/storage_config.py +159 -0
- control_plane_api/app/config.py +115 -0
- control_plane_api/app/controllers/__init__.py +0 -0
- control_plane_api/app/controllers/execution_environment_controller.py +1315 -0
- control_plane_api/app/database.py +135 -0
- control_plane_api/app/exceptions.py +408 -0
- control_plane_api/app/lib/__init__.py +11 -0
- control_plane_api/app/lib/environment.py +65 -0
- control_plane_api/app/lib/event_bus/__init__.py +17 -0
- control_plane_api/app/lib/event_bus/base.py +136 -0
- control_plane_api/app/lib/event_bus/manager.py +335 -0
- control_plane_api/app/lib/event_bus/providers/__init__.py +6 -0
- control_plane_api/app/lib/event_bus/providers/http_provider.py +166 -0
- control_plane_api/app/lib/event_bus/providers/nats_provider.py +324 -0
- control_plane_api/app/lib/event_bus/providers/redis_provider.py +233 -0
- control_plane_api/app/lib/event_bus/providers/websocket_provider.py +497 -0
- control_plane_api/app/lib/job_executor.py +330 -0
- control_plane_api/app/lib/kubiya_client.py +293 -0
- control_plane_api/app/lib/litellm_pricing.py +166 -0
- control_plane_api/app/lib/mcp_validation.py +163 -0
- control_plane_api/app/lib/nats/__init__.py +13 -0
- control_plane_api/app/lib/nats/credentials_manager.py +288 -0
- control_plane_api/app/lib/nats/listener.py +374 -0
- control_plane_api/app/lib/planning_prompt_builder.py +153 -0
- control_plane_api/app/lib/planning_tools/__init__.py +41 -0
- control_plane_api/app/lib/planning_tools/agents.py +409 -0
- control_plane_api/app/lib/planning_tools/agno_toolkit.py +836 -0
- control_plane_api/app/lib/planning_tools/base.py +119 -0
- control_plane_api/app/lib/planning_tools/cognitive_memory_tools.py +403 -0
- control_plane_api/app/lib/planning_tools/context_graph_tools.py +545 -0
- control_plane_api/app/lib/planning_tools/environments.py +218 -0
- control_plane_api/app/lib/planning_tools/knowledge.py +204 -0
- control_plane_api/app/lib/planning_tools/models.py +93 -0
- control_plane_api/app/lib/planning_tools/planning_service.py +646 -0
- control_plane_api/app/lib/planning_tools/resources.py +242 -0
- control_plane_api/app/lib/planning_tools/teams.py +334 -0
- control_plane_api/app/lib/policy_enforcer_client.py +1016 -0
- control_plane_api/app/lib/redis_client.py +803 -0
- control_plane_api/app/lib/sqlalchemy_utils.py +486 -0
- control_plane_api/app/lib/state_transition_tools/__init__.py +7 -0
- control_plane_api/app/lib/state_transition_tools/execution_context.py +388 -0
- control_plane_api/app/lib/storage/__init__.py +20 -0
- control_plane_api/app/lib/storage/base_provider.py +274 -0
- control_plane_api/app/lib/storage/provider_factory.py +157 -0
- control_plane_api/app/lib/storage/vercel_blob_provider.py +468 -0
- control_plane_api/app/lib/supabase.py +71 -0
- control_plane_api/app/lib/supabase_utils.py +138 -0
- control_plane_api/app/lib/task_planning/__init__.py +138 -0
- control_plane_api/app/lib/task_planning/agent_factory.py +308 -0
- control_plane_api/app/lib/task_planning/agents.py +389 -0
- control_plane_api/app/lib/task_planning/cache.py +218 -0
- control_plane_api/app/lib/task_planning/entity_resolver.py +273 -0
- control_plane_api/app/lib/task_planning/helpers.py +293 -0
- control_plane_api/app/lib/task_planning/hooks.py +474 -0
- control_plane_api/app/lib/task_planning/models.py +503 -0
- control_plane_api/app/lib/task_planning/plan_validator.py +166 -0
- control_plane_api/app/lib/task_planning/planning_workflow.py +2911 -0
- control_plane_api/app/lib/task_planning/runner.py +656 -0
- control_plane_api/app/lib/task_planning/streaming_hook.py +213 -0
- control_plane_api/app/lib/task_planning/workflow.py +424 -0
- control_plane_api/app/lib/templating/__init__.py +88 -0
- control_plane_api/app/lib/templating/compiler.py +278 -0
- control_plane_api/app/lib/templating/engine.py +178 -0
- control_plane_api/app/lib/templating/parsers/__init__.py +29 -0
- control_plane_api/app/lib/templating/parsers/base.py +96 -0
- control_plane_api/app/lib/templating/parsers/env.py +85 -0
- control_plane_api/app/lib/templating/parsers/graph.py +112 -0
- control_plane_api/app/lib/templating/parsers/secret.py +87 -0
- control_plane_api/app/lib/templating/parsers/simple.py +81 -0
- control_plane_api/app/lib/templating/resolver.py +366 -0
- control_plane_api/app/lib/templating/types.py +214 -0
- control_plane_api/app/lib/templating/validator.py +201 -0
- control_plane_api/app/lib/temporal_client.py +232 -0
- control_plane_api/app/lib/temporal_credentials_cache.py +178 -0
- control_plane_api/app/lib/temporal_credentials_service.py +203 -0
- control_plane_api/app/lib/validation/__init__.py +24 -0
- control_plane_api/app/lib/validation/runtime_validation.py +388 -0
- control_plane_api/app/main.py +531 -0
- control_plane_api/app/middleware/__init__.py +10 -0
- control_plane_api/app/middleware/auth.py +645 -0
- control_plane_api/app/middleware/exception_handler.py +267 -0
- control_plane_api/app/middleware/prometheus_middleware.py +173 -0
- control_plane_api/app/middleware/rate_limiting.py +384 -0
- control_plane_api/app/middleware/request_id.py +202 -0
- control_plane_api/app/models/__init__.py +40 -0
- control_plane_api/app/models/agent.py +90 -0
- control_plane_api/app/models/analytics.py +206 -0
- control_plane_api/app/models/associations.py +107 -0
- control_plane_api/app/models/auth_user.py +73 -0
- control_plane_api/app/models/context.py +161 -0
- control_plane_api/app/models/custom_integration.py +99 -0
- control_plane_api/app/models/environment.py +64 -0
- control_plane_api/app/models/execution.py +125 -0
- control_plane_api/app/models/execution_transition.py +50 -0
- control_plane_api/app/models/job.py +159 -0
- control_plane_api/app/models/llm_model.py +78 -0
- control_plane_api/app/models/orchestration.py +66 -0
- control_plane_api/app/models/plan_execution.py +102 -0
- control_plane_api/app/models/presence.py +49 -0
- control_plane_api/app/models/project.py +61 -0
- control_plane_api/app/models/project_management.py +85 -0
- control_plane_api/app/models/session.py +29 -0
- control_plane_api/app/models/skill.py +155 -0
- control_plane_api/app/models/system_tables.py +43 -0
- control_plane_api/app/models/task_planning.py +372 -0
- control_plane_api/app/models/team.py +86 -0
- control_plane_api/app/models/trace.py +257 -0
- control_plane_api/app/models/user_profile.py +54 -0
- control_plane_api/app/models/worker.py +221 -0
- control_plane_api/app/models/workflow.py +161 -0
- control_plane_api/app/models/workspace.py +50 -0
- control_plane_api/app/observability/__init__.py +177 -0
- control_plane_api/app/observability/context_logging.py +475 -0
- control_plane_api/app/observability/decorators.py +337 -0
- control_plane_api/app/observability/local_span_processor.py +702 -0
- control_plane_api/app/observability/metrics.py +303 -0
- control_plane_api/app/observability/middleware.py +246 -0
- control_plane_api/app/observability/optional.py +115 -0
- control_plane_api/app/observability/tracing.py +382 -0
- control_plane_api/app/policies/README.md +149 -0
- control_plane_api/app/policies/approved_users.rego +62 -0
- control_plane_api/app/policies/business_hours.rego +51 -0
- control_plane_api/app/policies/rate_limiting.rego +100 -0
- control_plane_api/app/policies/tool_enforcement/README.md +336 -0
- control_plane_api/app/policies/tool_enforcement/bash_command_validation.rego +71 -0
- control_plane_api/app/policies/tool_enforcement/business_hours_enforcement.rego +82 -0
- control_plane_api/app/policies/tool_enforcement/mcp_tool_allowlist.rego +58 -0
- control_plane_api/app/policies/tool_enforcement/production_safeguards.rego +80 -0
- control_plane_api/app/policies/tool_enforcement/role_based_tool_access.rego +44 -0
- control_plane_api/app/policies/tool_restrictions.rego +86 -0
- control_plane_api/app/routers/__init__.py +4 -0
- control_plane_api/app/routers/agents.py +382 -0
- control_plane_api/app/routers/agents_v2.py +1598 -0
- control_plane_api/app/routers/analytics.py +1310 -0
- control_plane_api/app/routers/auth.py +59 -0
- control_plane_api/app/routers/client_config.py +57 -0
- control_plane_api/app/routers/context_graph.py +561 -0
- control_plane_api/app/routers/context_manager.py +577 -0
- control_plane_api/app/routers/custom_integrations.py +490 -0
- control_plane_api/app/routers/enforcer.py +132 -0
- control_plane_api/app/routers/environment_context.py +252 -0
- control_plane_api/app/routers/environments.py +761 -0
- control_plane_api/app/routers/execution_environment.py +847 -0
- control_plane_api/app/routers/executions/__init__.py +28 -0
- control_plane_api/app/routers/executions/router.py +286 -0
- control_plane_api/app/routers/executions/services/__init__.py +22 -0
- control_plane_api/app/routers/executions/services/demo_worker_health.py +156 -0
- control_plane_api/app/routers/executions/services/status_service.py +420 -0
- control_plane_api/app/routers/executions/services/test_worker_health.py +480 -0
- control_plane_api/app/routers/executions/services/worker_health.py +514 -0
- control_plane_api/app/routers/executions/streaming/__init__.py +22 -0
- control_plane_api/app/routers/executions/streaming/deduplication.py +352 -0
- control_plane_api/app/routers/executions/streaming/event_buffer.py +353 -0
- control_plane_api/app/routers/executions/streaming/event_formatter.py +964 -0
- control_plane_api/app/routers/executions/streaming/history_loader.py +588 -0
- control_plane_api/app/routers/executions/streaming/live_source.py +693 -0
- control_plane_api/app/routers/executions/streaming/streamer.py +849 -0
- control_plane_api/app/routers/executions.py +4888 -0
- control_plane_api/app/routers/health.py +165 -0
- control_plane_api/app/routers/health_v2.py +394 -0
- control_plane_api/app/routers/integration_templates.py +496 -0
- control_plane_api/app/routers/integrations.py +287 -0
- control_plane_api/app/routers/jobs.py +1809 -0
- control_plane_api/app/routers/metrics.py +517 -0
- control_plane_api/app/routers/models.py +82 -0
- control_plane_api/app/routers/models_v2.py +628 -0
- control_plane_api/app/routers/plan_executions.py +1481 -0
- control_plane_api/app/routers/plan_generation_async.py +304 -0
- control_plane_api/app/routers/policies.py +669 -0
- control_plane_api/app/routers/presence.py +234 -0
- control_plane_api/app/routers/projects.py +987 -0
- control_plane_api/app/routers/runners.py +379 -0
- control_plane_api/app/routers/runtimes.py +172 -0
- control_plane_api/app/routers/secrets.py +171 -0
- control_plane_api/app/routers/skills.py +1010 -0
- control_plane_api/app/routers/skills_definitions.py +140 -0
- control_plane_api/app/routers/storage.py +456 -0
- control_plane_api/app/routers/task_planning.py +611 -0
- control_plane_api/app/routers/task_queues.py +650 -0
- control_plane_api/app/routers/team_context.py +274 -0
- control_plane_api/app/routers/teams.py +1747 -0
- control_plane_api/app/routers/templates.py +248 -0
- control_plane_api/app/routers/traces.py +571 -0
- control_plane_api/app/routers/websocket_client.py +479 -0
- control_plane_api/app/routers/websocket_executions_status.py +437 -0
- control_plane_api/app/routers/websocket_gateway.py +323 -0
- control_plane_api/app/routers/websocket_traces.py +576 -0
- control_plane_api/app/routers/worker_queues.py +2555 -0
- control_plane_api/app/routers/worker_websocket.py +419 -0
- control_plane_api/app/routers/workers.py +1004 -0
- control_plane_api/app/routers/workflows.py +204 -0
- control_plane_api/app/runtimes/__init__.py +6 -0
- control_plane_api/app/runtimes/validation.py +344 -0
- control_plane_api/app/schemas/__init__.py +1 -0
- control_plane_api/app/schemas/job_schemas.py +302 -0
- control_plane_api/app/schemas/mcp_schemas.py +311 -0
- control_plane_api/app/schemas/template_schemas.py +133 -0
- control_plane_api/app/schemas/trace_schemas.py +168 -0
- control_plane_api/app/schemas/worker_queue_observability_schemas.py +165 -0
- control_plane_api/app/services/__init__.py +1 -0
- control_plane_api/app/services/agno_planning_strategy.py +233 -0
- control_plane_api/app/services/agno_service.py +838 -0
- control_plane_api/app/services/claude_code_planning_service.py +203 -0
- control_plane_api/app/services/context_graph_client.py +224 -0
- control_plane_api/app/services/custom_integration_service.py +415 -0
- control_plane_api/app/services/integration_resolution_service.py +345 -0
- control_plane_api/app/services/litellm_service.py +394 -0
- control_plane_api/app/services/plan_generator.py +79 -0
- control_plane_api/app/services/planning_strategy.py +66 -0
- control_plane_api/app/services/planning_strategy_factory.py +118 -0
- control_plane_api/app/services/policy_service.py +615 -0
- control_plane_api/app/services/state_transition_service.py +755 -0
- control_plane_api/app/services/storage_service.py +593 -0
- control_plane_api/app/services/temporal_cloud_provisioning.py +150 -0
- control_plane_api/app/services/toolsets/context_graph_skill.py +432 -0
- control_plane_api/app/services/trace_retention.py +354 -0
- control_plane_api/app/services/worker_queue_metrics_service.py +190 -0
- control_plane_api/app/services/workflow_cancellation_manager.py +135 -0
- control_plane_api/app/services/workflow_operations_service.py +611 -0
- control_plane_api/app/skills/__init__.py +100 -0
- control_plane_api/app/skills/base.py +239 -0
- control_plane_api/app/skills/builtin/__init__.py +37 -0
- control_plane_api/app/skills/builtin/agent_communication/__init__.py +8 -0
- control_plane_api/app/skills/builtin/agent_communication/skill.py +246 -0
- control_plane_api/app/skills/builtin/code_ingestion/__init__.py +4 -0
- control_plane_api/app/skills/builtin/code_ingestion/skill.py +267 -0
- control_plane_api/app/skills/builtin/cognitive_memory/__init__.py +4 -0
- control_plane_api/app/skills/builtin/cognitive_memory/skill.py +174 -0
- control_plane_api/app/skills/builtin/contextual_awareness/__init__.py +4 -0
- control_plane_api/app/skills/builtin/contextual_awareness/skill.py +387 -0
- control_plane_api/app/skills/builtin/data_visualization/__init__.py +4 -0
- control_plane_api/app/skills/builtin/data_visualization/skill.py +154 -0
- control_plane_api/app/skills/builtin/docker/__init__.py +4 -0
- control_plane_api/app/skills/builtin/docker/skill.py +104 -0
- control_plane_api/app/skills/builtin/file_generation/__init__.py +4 -0
- control_plane_api/app/skills/builtin/file_generation/skill.py +94 -0
- control_plane_api/app/skills/builtin/file_system/__init__.py +4 -0
- control_plane_api/app/skills/builtin/file_system/skill.py +110 -0
- control_plane_api/app/skills/builtin/knowledge_api/__init__.py +5 -0
- control_plane_api/app/skills/builtin/knowledge_api/skill.py +124 -0
- control_plane_api/app/skills/builtin/python/__init__.py +4 -0
- control_plane_api/app/skills/builtin/python/skill.py +92 -0
- control_plane_api/app/skills/builtin/remote_filesystem/__init__.py +5 -0
- control_plane_api/app/skills/builtin/remote_filesystem/skill.py +170 -0
- control_plane_api/app/skills/builtin/shell/__init__.py +4 -0
- control_plane_api/app/skills/builtin/shell/skill.py +161 -0
- control_plane_api/app/skills/builtin/slack/__init__.py +3 -0
- control_plane_api/app/skills/builtin/slack/skill.py +302 -0
- control_plane_api/app/skills/builtin/workflow_executor/__init__.py +4 -0
- control_plane_api/app/skills/builtin/workflow_executor/skill.py +469 -0
- control_plane_api/app/skills/business_intelligence.py +189 -0
- control_plane_api/app/skills/config.py +63 -0
- control_plane_api/app/skills/loaders/__init__.py +14 -0
- control_plane_api/app/skills/loaders/base.py +73 -0
- control_plane_api/app/skills/loaders/filesystem_loader.py +199 -0
- control_plane_api/app/skills/registry.py +125 -0
- control_plane_api/app/utils/helpers.py +12 -0
- control_plane_api/app/utils/workflow_executor.py +354 -0
- control_plane_api/app/workflows/__init__.py +11 -0
- control_plane_api/app/workflows/agent_execution.py +520 -0
- control_plane_api/app/workflows/agent_execution_with_skills.py +223 -0
- control_plane_api/app/workflows/namespace_provisioning.py +326 -0
- control_plane_api/app/workflows/plan_generation.py +254 -0
- control_plane_api/app/workflows/team_execution.py +442 -0
- control_plane_api/scripts/seed_models.py +240 -0
- control_plane_api/scripts/validate_existing_tool_names.py +492 -0
- control_plane_api/shared/__init__.py +8 -0
- control_plane_api/shared/version.py +17 -0
- control_plane_api/test_deduplication.py +274 -0
- control_plane_api/test_executor_deduplication_e2e.py +309 -0
- control_plane_api/test_job_execution_e2e.py +283 -0
- control_plane_api/test_real_integration.py +193 -0
- control_plane_api/version.py +38 -0
- control_plane_api/worker/__init__.py +0 -0
- control_plane_api/worker/activities/__init__.py +0 -0
- control_plane_api/worker/activities/agent_activities.py +1585 -0
- control_plane_api/worker/activities/approval_activities.py +234 -0
- control_plane_api/worker/activities/job_activities.py +199 -0
- control_plane_api/worker/activities/runtime_activities.py +1167 -0
- control_plane_api/worker/activities/skill_activities.py +282 -0
- control_plane_api/worker/activities/team_activities.py +479 -0
- control_plane_api/worker/agent_runtime_server.py +370 -0
- control_plane_api/worker/binary_manager.py +333 -0
- control_plane_api/worker/config/__init__.py +31 -0
- control_plane_api/worker/config/worker_config.py +273 -0
- control_plane_api/worker/control_plane_client.py +1491 -0
- control_plane_api/worker/examples/analytics_integration_example.py +362 -0
- control_plane_api/worker/health_monitor.py +159 -0
- control_plane_api/worker/metrics.py +237 -0
- control_plane_api/worker/models/__init__.py +1 -0
- control_plane_api/worker/models/error_events.py +105 -0
- control_plane_api/worker/models/inputs.py +89 -0
- control_plane_api/worker/runtimes/__init__.py +35 -0
- control_plane_api/worker/runtimes/agent_runtime/runtime.py +485 -0
- control_plane_api/worker/runtimes/agno/__init__.py +34 -0
- control_plane_api/worker/runtimes/agno/config.py +248 -0
- control_plane_api/worker/runtimes/agno/hooks.py +385 -0
- control_plane_api/worker/runtimes/agno/mcp_builder.py +195 -0
- control_plane_api/worker/runtimes/agno/runtime.py +1063 -0
- control_plane_api/worker/runtimes/agno/utils.py +163 -0
- control_plane_api/worker/runtimes/base.py +979 -0
- control_plane_api/worker/runtimes/claude_code/__init__.py +38 -0
- control_plane_api/worker/runtimes/claude_code/cleanup.py +184 -0
- control_plane_api/worker/runtimes/claude_code/client_pool.py +529 -0
- control_plane_api/worker/runtimes/claude_code/config.py +829 -0
- control_plane_api/worker/runtimes/claude_code/hooks.py +482 -0
- control_plane_api/worker/runtimes/claude_code/litellm_proxy.py +1702 -0
- control_plane_api/worker/runtimes/claude_code/mcp_builder.py +467 -0
- control_plane_api/worker/runtimes/claude_code/mcp_discovery.py +558 -0
- control_plane_api/worker/runtimes/claude_code/runtime.py +1546 -0
- control_plane_api/worker/runtimes/claude_code/tool_mapper.py +403 -0
- control_plane_api/worker/runtimes/claude_code/utils.py +149 -0
- control_plane_api/worker/runtimes/factory.py +173 -0
- control_plane_api/worker/runtimes/model_utils.py +107 -0
- control_plane_api/worker/runtimes/validation.py +93 -0
- control_plane_api/worker/services/__init__.py +1 -0
- control_plane_api/worker/services/agent_communication_tools.py +908 -0
- control_plane_api/worker/services/agent_executor.py +485 -0
- control_plane_api/worker/services/agent_executor_v2.py +793 -0
- control_plane_api/worker/services/analytics_collector.py +457 -0
- control_plane_api/worker/services/analytics_service.py +464 -0
- control_plane_api/worker/services/approval_tools.py +310 -0
- control_plane_api/worker/services/approval_tools_agno.py +207 -0
- control_plane_api/worker/services/cancellation_manager.py +177 -0
- control_plane_api/worker/services/code_ingestion_tools.py +465 -0
- control_plane_api/worker/services/contextual_awareness_tools.py +405 -0
- control_plane_api/worker/services/data_visualization.py +834 -0
- control_plane_api/worker/services/event_publisher.py +531 -0
- control_plane_api/worker/services/jira_tools.py +257 -0
- control_plane_api/worker/services/remote_filesystem_tools.py +498 -0
- control_plane_api/worker/services/runtime_analytics.py +328 -0
- control_plane_api/worker/services/session_service.py +365 -0
- control_plane_api/worker/services/skill_context_enhancement.py +181 -0
- control_plane_api/worker/services/skill_factory.py +471 -0
- control_plane_api/worker/services/system_prompt_enhancement.py +410 -0
- control_plane_api/worker/services/team_executor.py +715 -0
- control_plane_api/worker/services/team_executor_v2.py +1866 -0
- control_plane_api/worker/services/tool_enforcement.py +254 -0
- control_plane_api/worker/services/workflow_executor/__init__.py +52 -0
- control_plane_api/worker/services/workflow_executor/event_processor.py +287 -0
- control_plane_api/worker/services/workflow_executor/event_publisher.py +210 -0
- control_plane_api/worker/services/workflow_executor/executors/__init__.py +15 -0
- control_plane_api/worker/services/workflow_executor/executors/base.py +270 -0
- control_plane_api/worker/services/workflow_executor/executors/json_executor.py +50 -0
- control_plane_api/worker/services/workflow_executor/executors/python_executor.py +50 -0
- control_plane_api/worker/services/workflow_executor/models.py +142 -0
- control_plane_api/worker/services/workflow_executor_tools.py +1748 -0
- control_plane_api/worker/skills/__init__.py +12 -0
- control_plane_api/worker/skills/builtin/context_graph_search/README.md +213 -0
- control_plane_api/worker/skills/builtin/context_graph_search/__init__.py +5 -0
- control_plane_api/worker/skills/builtin/context_graph_search/agno_impl.py +808 -0
- control_plane_api/worker/skills/builtin/context_graph_search/skill.yaml +67 -0
- control_plane_api/worker/skills/builtin/contextual_awareness/__init__.py +4 -0
- control_plane_api/worker/skills/builtin/contextual_awareness/agno_impl.py +62 -0
- control_plane_api/worker/skills/builtin/data_visualization/agno_impl.py +18 -0
- control_plane_api/worker/skills/builtin/data_visualization/skill.yaml +84 -0
- control_plane_api/worker/skills/builtin/docker/agno_impl.py +65 -0
- control_plane_api/worker/skills/builtin/docker/skill.yaml +60 -0
- control_plane_api/worker/skills/builtin/file_generation/agno_impl.py +47 -0
- control_plane_api/worker/skills/builtin/file_generation/skill.yaml +64 -0
- control_plane_api/worker/skills/builtin/file_system/agno_impl.py +32 -0
- control_plane_api/worker/skills/builtin/file_system/skill.yaml +54 -0
- control_plane_api/worker/skills/builtin/knowledge_api/__init__.py +4 -0
- control_plane_api/worker/skills/builtin/knowledge_api/agno_impl.py +50 -0
- control_plane_api/worker/skills/builtin/knowledge_api/skill.yaml +66 -0
- control_plane_api/worker/skills/builtin/python/agno_impl.py +25 -0
- control_plane_api/worker/skills/builtin/python/skill.yaml +60 -0
- control_plane_api/worker/skills/builtin/schema_fix_mixin.py +260 -0
- control_plane_api/worker/skills/builtin/shell/agno_impl.py +31 -0
- control_plane_api/worker/skills/builtin/shell/skill.yaml +60 -0
- control_plane_api/worker/skills/builtin/slack/__init__.py +3 -0
- control_plane_api/worker/skills/builtin/slack/agno_impl.py +1282 -0
- control_plane_api/worker/skills/builtin/slack/skill.yaml +276 -0
- control_plane_api/worker/skills/builtin/workflow_executor/agno_impl.py +62 -0
- control_plane_api/worker/skills/builtin/workflow_executor/skill.yaml +79 -0
- control_plane_api/worker/skills/loaders/__init__.py +5 -0
- control_plane_api/worker/skills/loaders/base.py +23 -0
- control_plane_api/worker/skills/loaders/filesystem_loader.py +357 -0
- control_plane_api/worker/skills/registry.py +208 -0
- control_plane_api/worker/tests/__init__.py +1 -0
- control_plane_api/worker/tests/conftest.py +12 -0
- control_plane_api/worker/tests/e2e/__init__.py +0 -0
- control_plane_api/worker/tests/e2e/test_context_graph_real_api.py +338 -0
- control_plane_api/worker/tests/e2e/test_context_graph_templates_e2e.py +523 -0
- control_plane_api/worker/tests/e2e/test_enforcement_e2e.py +344 -0
- control_plane_api/worker/tests/e2e/test_execution_flow.py +571 -0
- control_plane_api/worker/tests/e2e/test_single_execution_mode.py +656 -0
- control_plane_api/worker/tests/integration/__init__.py +0 -0
- control_plane_api/worker/tests/integration/test_builtin_skills_fixes.py +245 -0
- control_plane_api/worker/tests/integration/test_context_graph_search_integration.py +365 -0
- control_plane_api/worker/tests/integration/test_control_plane_integration.py +308 -0
- control_plane_api/worker/tests/integration/test_hook_enforcement_integration.py +579 -0
- control_plane_api/worker/tests/integration/test_scheduled_job_workflow.py +237 -0
- control_plane_api/worker/tests/integration/test_system_prompt_enhancement_integration.py +343 -0
- control_plane_api/worker/tests/unit/__init__.py +0 -0
- control_plane_api/worker/tests/unit/test_builtin_skill_autoload.py +396 -0
- control_plane_api/worker/tests/unit/test_context_graph_search.py +450 -0
- control_plane_api/worker/tests/unit/test_context_graph_templates.py +403 -0
- control_plane_api/worker/tests/unit/test_control_plane_client.py +401 -0
- control_plane_api/worker/tests/unit/test_control_plane_client_jobs.py +345 -0
- control_plane_api/worker/tests/unit/test_job_activities.py +353 -0
- control_plane_api/worker/tests/unit/test_skill_context_enhancement.py +321 -0
- control_plane_api/worker/tests/unit/test_system_prompt_enhancement.py +415 -0
- control_plane_api/worker/tests/unit/test_tool_enforcement.py +324 -0
- control_plane_api/worker/utils/__init__.py +1 -0
- control_plane_api/worker/utils/chunk_batcher.py +330 -0
- control_plane_api/worker/utils/environment.py +65 -0
- control_plane_api/worker/utils/error_publisher.py +260 -0
- control_plane_api/worker/utils/event_batcher.py +256 -0
- control_plane_api/worker/utils/logging_config.py +335 -0
- control_plane_api/worker/utils/logging_helper.py +326 -0
- control_plane_api/worker/utils/parameter_validator.py +120 -0
- control_plane_api/worker/utils/retry_utils.py +60 -0
- control_plane_api/worker/utils/streaming_utils.py +665 -0
- control_plane_api/worker/utils/tool_validation.py +332 -0
- control_plane_api/worker/utils/workspace_manager.py +163 -0
- control_plane_api/worker/websocket_client.py +393 -0
- control_plane_api/worker/worker.py +1297 -0
- control_plane_api/worker/workflows/__init__.py +0 -0
- control_plane_api/worker/workflows/agent_execution.py +909 -0
- control_plane_api/worker/workflows/scheduled_job_wrapper.py +332 -0
- control_plane_api/worker/workflows/team_execution.py +611 -0
- kubiya_control_plane_api-0.9.15.dist-info/METADATA +354 -0
- kubiya_control_plane_api-0.9.15.dist-info/RECORD +479 -0
- kubiya_control_plane_api-0.9.15.dist-info/WHEEL +5 -0
- kubiya_control_plane_api-0.9.15.dist-info/entry_points.txt +5 -0
- kubiya_control_plane_api-0.9.15.dist-info/licenses/LICENSE +676 -0
- kubiya_control_plane_api-0.9.15.dist-info/top_level.txt +3 -0
- scripts/__init__.py +1 -0
- scripts/migrations.py +39 -0
- scripts/seed_worker_queues.py +128 -0
- scripts/setup_agent_runtime.py +142 -0
- worker_internal/__init__.py +1 -0
- worker_internal/planner/__init__.py +1 -0
- worker_internal/planner/activities.py +1499 -0
- worker_internal/planner/agent_tools.py +197 -0
- worker_internal/planner/event_models.py +148 -0
- worker_internal/planner/event_publisher.py +67 -0
- worker_internal/planner/models.py +199 -0
- worker_internal/planner/retry_logic.py +134 -0
- worker_internal/planner/worker.py +300 -0
- worker_internal/planner/workflows.py +970 -0
|
@@ -0,0 +1,1809 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Jobs router for scheduled and webhook-triggered executions.
|
|
3
|
+
|
|
4
|
+
This router handles:
|
|
5
|
+
- CRUD operations for jobs
|
|
6
|
+
- Manual job triggering
|
|
7
|
+
- Webhook URL generation and triggering
|
|
8
|
+
- Cron schedule management with Temporal
|
|
9
|
+
- Job execution history
|
|
10
|
+
|
|
11
|
+
Uses SQLAlchemy ORM for all database operations.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from fastapi import APIRouter, Depends, HTTPException, status, Request, Header
|
|
15
|
+
from typing import List, Optional
|
|
16
|
+
from datetime import datetime, timezone, timedelta
|
|
17
|
+
import structlog
|
|
18
|
+
import uuid as uuid_module
|
|
19
|
+
import hmac
|
|
20
|
+
import hashlib
|
|
21
|
+
import secrets
|
|
22
|
+
import json
|
|
23
|
+
|
|
24
|
+
from sqlalchemy.orm import Session
|
|
25
|
+
from sqlalchemy import desc
|
|
26
|
+
|
|
27
|
+
from control_plane_api.app.middleware.auth import get_current_organization
|
|
28
|
+
from control_plane_api.app.database import get_db
|
|
29
|
+
from control_plane_api.app.lib.sqlalchemy_utils import model_to_dict
|
|
30
|
+
from control_plane_api.app.lib.temporal_client import get_temporal_client
|
|
31
|
+
from control_plane_api.app.lib.job_executor import select_worker_queue, substitute_prompt_parameters
|
|
32
|
+
from control_plane_api.app.workflows.agent_execution import AgentExecutionWorkflow
|
|
33
|
+
from control_plane_api.app.workflows.team_execution import TeamExecutionWorkflow
|
|
34
|
+
from control_plane_api.app.routers.executions import validate_job_exists
|
|
35
|
+
from control_plane_api.app.routers.execution_environment import resolve_agent_execution_environment_internal
|
|
36
|
+
from control_plane_api.app.schemas.job_schemas import (
|
|
37
|
+
JobCreate,
|
|
38
|
+
JobUpdate,
|
|
39
|
+
JobResponse,
|
|
40
|
+
JobTriggerRequest,
|
|
41
|
+
JobTriggerResponse,
|
|
42
|
+
JobExecutionHistoryResponse,
|
|
43
|
+
JobExecutionHistoryItem,
|
|
44
|
+
WebhookPayload,
|
|
45
|
+
ExecutionEnvironment,
|
|
46
|
+
)
|
|
47
|
+
from control_plane_api.app.models.job import Job, JobExecution
|
|
48
|
+
from control_plane_api.app.models.execution import Execution
|
|
49
|
+
from control_plane_api.app.models.agent import Agent
|
|
50
|
+
from control_plane_api.app.models.team import Team
|
|
51
|
+
from control_plane_api.app.observability import (
|
|
52
|
+
instrument_endpoint,
|
|
53
|
+
create_span_with_context,
|
|
54
|
+
add_span_event,
|
|
55
|
+
add_span_error,
|
|
56
|
+
)
|
|
57
|
+
from temporalio.client import Schedule, ScheduleActionStartWorkflow, ScheduleSpec, ScheduleIntervalSpec, SchedulePolicy, ScheduleOverlapPolicy
|
|
58
|
+
from croniter import croniter
|
|
59
|
+
|
|
60
|
+
logger = structlog.get_logger()
|
|
61
|
+
|
|
62
|
+
router = APIRouter()
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def generate_webhook_secret() -> str:
|
|
66
|
+
"""Generate a secure random webhook secret"""
|
|
67
|
+
return secrets.token_urlsafe(32)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def generate_webhook_path() -> str:
|
|
71
|
+
"""Generate a unique webhook URL path"""
|
|
72
|
+
return secrets.token_urlsafe(16)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
async def start_job_execution(
|
|
76
|
+
job: dict,
|
|
77
|
+
organization_id: str,
|
|
78
|
+
trigger_type: str,
|
|
79
|
+
trigger_metadata: dict,
|
|
80
|
+
db: Session,
|
|
81
|
+
token: str,
|
|
82
|
+
parameters: Optional[dict] = None,
|
|
83
|
+
) -> tuple[str, str]:
|
|
84
|
+
"""
|
|
85
|
+
Start a job execution by directly triggering the appropriate workflow.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
job: Job data as dict
|
|
89
|
+
organization_id: Organization ID
|
|
90
|
+
trigger_type: Type of trigger (manual, cron, webhook)
|
|
91
|
+
trigger_metadata: Metadata about the trigger
|
|
92
|
+
db: SQLAlchemy database session
|
|
93
|
+
token: Authentication token for fetching Temporal credentials
|
|
94
|
+
parameters: Optional parameters for prompt substitution
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Tuple of (workflow_id, execution_id)
|
|
98
|
+
"""
|
|
99
|
+
# Get org-specific Temporal credentials and client
|
|
100
|
+
from control_plane_api.app.lib.temporal_credentials_service import get_temporal_credentials_for_org
|
|
101
|
+
from control_plane_api.app.lib.temporal_client import get_temporal_client_for_org
|
|
102
|
+
|
|
103
|
+
temporal_credentials = await get_temporal_credentials_for_org(
|
|
104
|
+
org_id=organization_id,
|
|
105
|
+
token=token,
|
|
106
|
+
use_fallback=True # Enable fallback during migration
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
temporal_client = await get_temporal_client_for_org(
|
|
110
|
+
namespace=temporal_credentials["namespace"],
|
|
111
|
+
api_key=temporal_credentials["api_key"],
|
|
112
|
+
host=temporal_credentials["host"],
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
planning_mode = job.get("planning_mode")
|
|
116
|
+
entity_type = job.get("entity_type")
|
|
117
|
+
entity_id = job.get("entity_id")
|
|
118
|
+
|
|
119
|
+
# Get the appropriate worker queue based on job configuration
|
|
120
|
+
worker_queue_name, _ = await select_worker_queue(
|
|
121
|
+
organization_id=organization_id,
|
|
122
|
+
executor_type=job.get("executor_type", "auto"),
|
|
123
|
+
worker_queue_name=job.get("worker_queue_name"),
|
|
124
|
+
environment_name=job.get("environment_name"),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if not worker_queue_name:
|
|
128
|
+
raise ValueError("No workers are currently running for your organization. Please start a worker to execute jobs.")
|
|
129
|
+
|
|
130
|
+
# Extract runner_name from worker_queue_name (format: "org_id.runner_name")
|
|
131
|
+
runner_name = worker_queue_name.split(".")[-1] if "." in worker_queue_name else worker_queue_name
|
|
132
|
+
|
|
133
|
+
# Get entity name for display
|
|
134
|
+
entity_name = job.get("entity_name")
|
|
135
|
+
if not entity_name and entity_id and entity_type:
|
|
136
|
+
# Try to get entity name from database using SQLAlchemy
|
|
137
|
+
try:
|
|
138
|
+
if entity_type == "agent":
|
|
139
|
+
entity_obj = db.query(Agent).filter(Agent.id == entity_id).first()
|
|
140
|
+
elif entity_type == "team":
|
|
141
|
+
entity_obj = db.query(Team).filter(Team.id == entity_id).first()
|
|
142
|
+
else:
|
|
143
|
+
entity_obj = None
|
|
144
|
+
if entity_obj:
|
|
145
|
+
entity_name = entity_obj.name
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.warning("failed_to_get_entity_name", entity_type=entity_type, entity_id=entity_id, error=str(e))
|
|
148
|
+
|
|
149
|
+
# Substitute parameters in prompt template
|
|
150
|
+
prompt = job.get("prompt_template", "")
|
|
151
|
+
if parameters:
|
|
152
|
+
prompt = substitute_prompt_parameters(prompt, parameters)
|
|
153
|
+
|
|
154
|
+
# For webhook triggers, append webhook context to the prompt
|
|
155
|
+
if trigger_type == "webhook" and (parameters or trigger_metadata.get("metadata")):
|
|
156
|
+
webhook_context = "\n\n---\nWebhook Context:\n"
|
|
157
|
+
if parameters:
|
|
158
|
+
webhook_context += f"Parameters: {json.dumps(parameters, indent=2)}\n"
|
|
159
|
+
if trigger_metadata.get("metadata"):
|
|
160
|
+
webhook_context += f"Metadata: {json.dumps(trigger_metadata.get('metadata'), indent=2)}\n"
|
|
161
|
+
prompt = prompt + webhook_context
|
|
162
|
+
|
|
163
|
+
# Generate execution ID
|
|
164
|
+
execution_id = str(uuid_module.uuid4())
|
|
165
|
+
execution_uuid = uuid_module.UUID(execution_id)
|
|
166
|
+
|
|
167
|
+
# Determine execution_type based on entity_type
|
|
168
|
+
execution_type_value = entity_type.upper() if entity_type else "AGENT"
|
|
169
|
+
|
|
170
|
+
# Map trigger_type to trigger_source
|
|
171
|
+
trigger_source_map = {
|
|
172
|
+
"manual": "job_manual",
|
|
173
|
+
"cron": "job_cron",
|
|
174
|
+
"webhook": "job_webhook",
|
|
175
|
+
}
|
|
176
|
+
trigger_source = trigger_source_map.get(trigger_type, "job_manual")
|
|
177
|
+
|
|
178
|
+
now = datetime.now(timezone.utc)
|
|
179
|
+
|
|
180
|
+
# Create placeholder execution record using SQLAlchemy
|
|
181
|
+
execution = Execution(
|
|
182
|
+
id=execution_uuid,
|
|
183
|
+
organization_id=organization_id,
|
|
184
|
+
execution_type=execution_type_value,
|
|
185
|
+
entity_id=uuid_module.UUID(entity_id) if entity_id else None,
|
|
186
|
+
entity_name=entity_name,
|
|
187
|
+
runner_name=runner_name,
|
|
188
|
+
trigger_source=trigger_source,
|
|
189
|
+
trigger_metadata={
|
|
190
|
+
"job_id": job["id"],
|
|
191
|
+
"job_name": job.get("name"),
|
|
192
|
+
"trigger_type": trigger_type,
|
|
193
|
+
**trigger_metadata,
|
|
194
|
+
},
|
|
195
|
+
user_id=trigger_metadata.get("user_id"),
|
|
196
|
+
user_email=trigger_metadata.get("triggered_by") or trigger_metadata.get("user_email"),
|
|
197
|
+
user_name=trigger_metadata.get("user_name"),
|
|
198
|
+
user_avatar=trigger_metadata.get("user_avatar"),
|
|
199
|
+
status="pending",
|
|
200
|
+
prompt=prompt if parameters else job.get("prompt_template", ""),
|
|
201
|
+
execution_metadata={
|
|
202
|
+
"job_id": job["id"],
|
|
203
|
+
"job_name": job.get("name"),
|
|
204
|
+
"trigger_type": trigger_type,
|
|
205
|
+
**trigger_metadata,
|
|
206
|
+
},
|
|
207
|
+
created_at=now,
|
|
208
|
+
updated_at=now,
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
db.add(execution)
|
|
212
|
+
db.commit()
|
|
213
|
+
|
|
214
|
+
logger.info(
|
|
215
|
+
"created_placeholder_execution",
|
|
216
|
+
execution_id=execution_id,
|
|
217
|
+
job_id=job["id"],
|
|
218
|
+
organization_id=organization_id,
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
# VALIDATION: Verify job still exists before creating junction record
|
|
222
|
+
# This prevents foreign key constraint violations if job was deleted
|
|
223
|
+
try:
|
|
224
|
+
await validate_job_exists(
|
|
225
|
+
db=db,
|
|
226
|
+
job_id=job["id"],
|
|
227
|
+
organization_id=organization_id,
|
|
228
|
+
logger_context={
|
|
229
|
+
"execution_id": execution_id,
|
|
230
|
+
"trigger_type": trigger_type,
|
|
231
|
+
"source": "start_job_execution",
|
|
232
|
+
}
|
|
233
|
+
)
|
|
234
|
+
except HTTPException as validation_error:
|
|
235
|
+
logger.error(
|
|
236
|
+
"job_validation_failed_during_execution_start",
|
|
237
|
+
job_id=job["id"],
|
|
238
|
+
execution_id=execution_id,
|
|
239
|
+
error_code=validation_error.status_code,
|
|
240
|
+
error_detail=validation_error.detail,
|
|
241
|
+
)
|
|
242
|
+
# Clean up the execution record we just created
|
|
243
|
+
db.query(Execution).filter(Execution.id == execution_uuid).delete()
|
|
244
|
+
db.commit()
|
|
245
|
+
logger.info("cleaned_up_orphaned_execution", execution_id=execution_id)
|
|
246
|
+
raise
|
|
247
|
+
|
|
248
|
+
# Create job_executions junction record to track this execution was triggered by a job
|
|
249
|
+
job_execution = JobExecution(
|
|
250
|
+
id=f"jobexec_{uuid_module.uuid4()}",
|
|
251
|
+
job_id=job["id"],
|
|
252
|
+
execution_id=execution_uuid,
|
|
253
|
+
organization_id=organization_id,
|
|
254
|
+
trigger_type=trigger_type,
|
|
255
|
+
trigger_metadata=trigger_metadata,
|
|
256
|
+
execution_status="pending",
|
|
257
|
+
created_at=now,
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
try:
|
|
261
|
+
db.add(job_execution)
|
|
262
|
+
db.commit()
|
|
263
|
+
logger.info(
|
|
264
|
+
"job_execution_junction_created",
|
|
265
|
+
job_id=job["id"],
|
|
266
|
+
execution_id=execution_id,
|
|
267
|
+
)
|
|
268
|
+
except Exception as e:
|
|
269
|
+
logger.error(
|
|
270
|
+
"failed_to_create_job_execution_junction",
|
|
271
|
+
error=str(e),
|
|
272
|
+
execution_id=execution_id,
|
|
273
|
+
job_id=job["id"],
|
|
274
|
+
)
|
|
275
|
+
db.rollback()
|
|
276
|
+
# Clean up the execution record if junction record creation fails
|
|
277
|
+
db.query(Execution).filter(Execution.id == execution_uuid).delete()
|
|
278
|
+
db.commit()
|
|
279
|
+
logger.info("cleaned_up_orphaned_execution_after_junction_failure", execution_id=execution_id)
|
|
280
|
+
raise HTTPException(
|
|
281
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
282
|
+
detail=f"Failed to create job execution record: {str(e)}"
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
# Prepare workflow input based on entity type
|
|
286
|
+
workflow_class = None
|
|
287
|
+
workflow_input = None
|
|
288
|
+
|
|
289
|
+
if planning_mode == "predefined_agent" and entity_type == "agent":
|
|
290
|
+
# Start AgentExecutionWorkflow
|
|
291
|
+
workflow_class = AgentExecutionWorkflow
|
|
292
|
+
|
|
293
|
+
# Get agent details using SQLAlchemy
|
|
294
|
+
agent_obj = db.query(Agent).filter(Agent.id == entity_id).first()
|
|
295
|
+
if not agent_obj:
|
|
296
|
+
raise ValueError(f"Agent {entity_id} not found")
|
|
297
|
+
|
|
298
|
+
agent = model_to_dict(agent_obj)
|
|
299
|
+
agent_config = agent.get("configuration", {}) or {}
|
|
300
|
+
|
|
301
|
+
# Resolve execution environment properly (same as regular agent executions)
|
|
302
|
+
# Token is None for job executions (no user context)
|
|
303
|
+
try:
|
|
304
|
+
resolved_env = await resolve_agent_execution_environment_internal(
|
|
305
|
+
agent_id=entity_id,
|
|
306
|
+
org_id=organization_id,
|
|
307
|
+
db=db,
|
|
308
|
+
token=None # No user token for job executions
|
|
309
|
+
)
|
|
310
|
+
except Exception as e:
|
|
311
|
+
logger.error(
|
|
312
|
+
"failed_to_resolve_execution_environment_for_job",
|
|
313
|
+
agent_id=entity_id,
|
|
314
|
+
job_id=job["id"],
|
|
315
|
+
error=str(e)
|
|
316
|
+
)
|
|
317
|
+
# Fallback to empty if resolution fails
|
|
318
|
+
resolved_env = {"mcp_servers": {}}
|
|
319
|
+
|
|
320
|
+
workflow_input = {
|
|
321
|
+
"execution_id": execution_id,
|
|
322
|
+
"agent_id": entity_id,
|
|
323
|
+
"organization_id": organization_id,
|
|
324
|
+
"prompt": prompt,
|
|
325
|
+
"system_prompt": job.get("system_prompt") or agent_config.get("system_prompt"),
|
|
326
|
+
"model_id": agent.get("model_id"),
|
|
327
|
+
"model_config": agent.get("model_config", {}) or {},
|
|
328
|
+
"agent_config": {**agent_config, **(job.get("config", {}) or {})},
|
|
329
|
+
"mcp_servers": resolved_env.get("mcp_servers", {}),
|
|
330
|
+
"user_metadata": {
|
|
331
|
+
"job_id": job["id"],
|
|
332
|
+
"job_name": job.get("name"),
|
|
333
|
+
"trigger_type": trigger_type,
|
|
334
|
+
**trigger_metadata,
|
|
335
|
+
},
|
|
336
|
+
"runtime_type": agent.get("runtime") or agent_config.get("runtime") or "default",
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
elif planning_mode == "predefined_team" and entity_type == "team":
|
|
340
|
+
# Start TeamExecutionWorkflow
|
|
341
|
+
workflow_class = TeamExecutionWorkflow
|
|
342
|
+
|
|
343
|
+
# Get team details using SQLAlchemy
|
|
344
|
+
team_obj = db.query(Team).filter(Team.id == entity_id).first()
|
|
345
|
+
team = model_to_dict(team_obj) if team_obj else {}
|
|
346
|
+
team_config = team.get("configuration", {}) or {}
|
|
347
|
+
|
|
348
|
+
workflow_input = {
|
|
349
|
+
"execution_id": execution_id,
|
|
350
|
+
"team_id": entity_id,
|
|
351
|
+
"organization_id": organization_id,
|
|
352
|
+
"prompt": prompt,
|
|
353
|
+
"system_prompt": job.get("system_prompt"),
|
|
354
|
+
"config": job.get("config", {}) or {},
|
|
355
|
+
"user_metadata": {
|
|
356
|
+
"job_id": job["id"],
|
|
357
|
+
"job_name": job.get("name"),
|
|
358
|
+
"trigger_type": trigger_type,
|
|
359
|
+
**trigger_metadata,
|
|
360
|
+
},
|
|
361
|
+
"runtime_type": team.get("runtime") or team_config.get("runtime") or "default",
|
|
362
|
+
}
|
|
363
|
+
else:
|
|
364
|
+
raise ValueError(f"Unsupported planning_mode '{planning_mode}' or entity_type '{entity_type}'")
|
|
365
|
+
|
|
366
|
+
# Start the workflow
|
|
367
|
+
# Use standard workflow ID format for consistency with direct agent/team executions
|
|
368
|
+
if entity_type == "agent":
|
|
369
|
+
workflow_id = f"agent-execution-{execution_id}"
|
|
370
|
+
elif entity_type == "team":
|
|
371
|
+
workflow_id = f"team-execution-{execution_id}"
|
|
372
|
+
else:
|
|
373
|
+
# Fallback for other entity types
|
|
374
|
+
workflow_id = f"job-{job['id']}-{trigger_type}-{uuid_module.uuid4()}"
|
|
375
|
+
|
|
376
|
+
await temporal_client.start_workflow(
|
|
377
|
+
workflow_class.run,
|
|
378
|
+
workflow_input,
|
|
379
|
+
id=workflow_id,
|
|
380
|
+
task_queue=worker_queue_name,
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
logger.info(
|
|
384
|
+
"job_execution_started",
|
|
385
|
+
job_id=job["id"],
|
|
386
|
+
workflow_id=workflow_id,
|
|
387
|
+
execution_id=execution_id,
|
|
388
|
+
trigger_type=trigger_type,
|
|
389
|
+
workflow_name=workflow_class.__name__,
|
|
390
|
+
worker_queue=worker_queue_name,
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
return workflow_id, execution_id
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def verify_webhook_signature(payload: bytes, signature: str, secret: str) -> bool:
|
|
397
|
+
"""
|
|
398
|
+
Verify HMAC signature for webhook payload.
|
|
399
|
+
|
|
400
|
+
Args:
|
|
401
|
+
payload: Raw request body bytes
|
|
402
|
+
signature: Signature from X-Webhook-Signature header
|
|
403
|
+
secret: Webhook secret from database
|
|
404
|
+
|
|
405
|
+
Returns:
|
|
406
|
+
True if signature is valid
|
|
407
|
+
"""
|
|
408
|
+
expected_signature = hmac.new(
|
|
409
|
+
secret.encode(),
|
|
410
|
+
payload,
|
|
411
|
+
hashlib.sha256
|
|
412
|
+
).hexdigest()
|
|
413
|
+
return hmac.compare_digest(signature, expected_signature)
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
async def create_temporal_schedule(
|
|
417
|
+
job_id: str,
|
|
418
|
+
organization_id: str,
|
|
419
|
+
job_data: dict,
|
|
420
|
+
cron_schedule: str,
|
|
421
|
+
cron_timezone: str,
|
|
422
|
+
db: Session,
|
|
423
|
+
token: str = "",
|
|
424
|
+
) -> str:
|
|
425
|
+
"""
|
|
426
|
+
Create Temporal Schedule for cron-based job.
|
|
427
|
+
|
|
428
|
+
The schedule directly triggers AgentExecutionWorkflow or TeamExecutionWorkflow
|
|
429
|
+
based on the job's planning_mode and entity configuration.
|
|
430
|
+
|
|
431
|
+
Args:
|
|
432
|
+
job_id: Job ID
|
|
433
|
+
organization_id: Organization ID
|
|
434
|
+
job_data: Complete job data including entity info, prompt, config
|
|
435
|
+
cron_schedule: Cron expression
|
|
436
|
+
cron_timezone: Timezone for schedule
|
|
437
|
+
db: SQLAlchemy database session
|
|
438
|
+
token: Authentication token for fetching Temporal credentials (defaults to empty/fallback)
|
|
439
|
+
|
|
440
|
+
Returns:
|
|
441
|
+
Temporal Schedule ID
|
|
442
|
+
"""
|
|
443
|
+
# Get org-specific Temporal credentials and client
|
|
444
|
+
from control_plane_api.app.lib.temporal_credentials_service import get_temporal_credentials_for_org
|
|
445
|
+
from control_plane_api.app.lib.temporal_client import get_temporal_client_for_org
|
|
446
|
+
|
|
447
|
+
temporal_credentials = await get_temporal_credentials_for_org(
|
|
448
|
+
org_id=organization_id,
|
|
449
|
+
token=token,
|
|
450
|
+
use_fallback=True # Enable fallback for schedule operations
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
client = await get_temporal_client_for_org(
|
|
454
|
+
namespace=temporal_credentials["namespace"],
|
|
455
|
+
api_key=temporal_credentials["api_key"],
|
|
456
|
+
host=temporal_credentials["host"],
|
|
457
|
+
)
|
|
458
|
+
schedule_id = f"job-{job_id}"
|
|
459
|
+
|
|
460
|
+
try:
|
|
461
|
+
# Determine execution type from planning_mode
|
|
462
|
+
planning_mode = job_data.get("planning_mode")
|
|
463
|
+
entity_type = job_data.get("entity_type")
|
|
464
|
+
entity_id = job_data.get("entity_id")
|
|
465
|
+
|
|
466
|
+
# Get the appropriate worker queue based on job configuration
|
|
467
|
+
executor_type = job_data.get("executor_type", "auto")
|
|
468
|
+
requested_queue = job_data.get("worker_queue_name")
|
|
469
|
+
requested_env = job_data.get("environment_name")
|
|
470
|
+
|
|
471
|
+
logger.info(
|
|
472
|
+
"resolving_worker_queue_for_job",
|
|
473
|
+
job_id=job_id,
|
|
474
|
+
executor_type=executor_type,
|
|
475
|
+
requested_queue=requested_queue,
|
|
476
|
+
requested_env=requested_env,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
worker_queue_name, _ = await select_worker_queue(
|
|
480
|
+
organization_id=organization_id,
|
|
481
|
+
executor_type=executor_type,
|
|
482
|
+
worker_queue_name=requested_queue,
|
|
483
|
+
environment_name=requested_env,
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
if not worker_queue_name:
|
|
487
|
+
# Provide detailed error message based on executor type
|
|
488
|
+
if executor_type == "specific_queue":
|
|
489
|
+
error_detail = (
|
|
490
|
+
f"Requested worker queue '{requested_queue}' has no active workers. "
|
|
491
|
+
f"Please start workers on this queue before creating the job."
|
|
492
|
+
)
|
|
493
|
+
elif executor_type == "environment" and requested_env:
|
|
494
|
+
error_detail = (
|
|
495
|
+
f"No active workers found in environment '{requested_env}'. "
|
|
496
|
+
f"Please start workers in this environment before creating the job."
|
|
497
|
+
)
|
|
498
|
+
else:
|
|
499
|
+
error_detail = (
|
|
500
|
+
f"No workers are currently running in your organization. "
|
|
501
|
+
f"Please start at least one worker before creating scheduled jobs."
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
logger.error(
|
|
505
|
+
"no_workers_available_for_job",
|
|
506
|
+
job_id=job_id,
|
|
507
|
+
executor_type=executor_type,
|
|
508
|
+
requested_queue=requested_queue,
|
|
509
|
+
requested_env=requested_env,
|
|
510
|
+
)
|
|
511
|
+
raise ValueError(error_detail)
|
|
512
|
+
|
|
513
|
+
logger.info(
|
|
514
|
+
"resolved_worker_queue_for_cron_job",
|
|
515
|
+
job_id=job_id,
|
|
516
|
+
worker_queue=worker_queue_name,
|
|
517
|
+
planning_mode=planning_mode,
|
|
518
|
+
entity_type=entity_type,
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
# Prepare workflow input based on entity type
|
|
522
|
+
# Use ScheduledJobWrapperWorkflow which handles execution_id generation
|
|
523
|
+
workflow_name = "ScheduledJobWrapperWorkflow"
|
|
524
|
+
workflow_input = None
|
|
525
|
+
|
|
526
|
+
if planning_mode == "predefined_agent" and entity_type == "agent":
|
|
527
|
+
# Get agent details using SQLAlchemy
|
|
528
|
+
agent_obj = db.query(Agent).filter(Agent.id == entity_id).first()
|
|
529
|
+
if not agent_obj:
|
|
530
|
+
raise ValueError(f"Agent {entity_id} not found")
|
|
531
|
+
|
|
532
|
+
agent = model_to_dict(agent_obj)
|
|
533
|
+
agent_config = agent.get("configuration", {}) or {}
|
|
534
|
+
|
|
535
|
+
# Resolve execution environment properly (same as regular agent executions)
|
|
536
|
+
# Token is None for cron job schedules (no user context)
|
|
537
|
+
try:
|
|
538
|
+
resolved_env = await resolve_agent_execution_environment_internal(
|
|
539
|
+
agent_id=entity_id,
|
|
540
|
+
org_id=organization_id,
|
|
541
|
+
db=db,
|
|
542
|
+
token=None # No user token for cron schedules
|
|
543
|
+
)
|
|
544
|
+
except Exception as e:
|
|
545
|
+
logger.error(
|
|
546
|
+
"failed_to_resolve_execution_environment_for_cron_job",
|
|
547
|
+
agent_id=entity_id,
|
|
548
|
+
job_id=job_id,
|
|
549
|
+
error=str(e)
|
|
550
|
+
)
|
|
551
|
+
# Fallback to empty if resolution fails
|
|
552
|
+
resolved_env = {"mcp_servers": {}}
|
|
553
|
+
|
|
554
|
+
workflow_input = {
|
|
555
|
+
"execution_id": None, # Will be generated by wrapper workflow
|
|
556
|
+
"agent_id": entity_id,
|
|
557
|
+
"organization_id": organization_id,
|
|
558
|
+
"prompt": job_data.get("prompt_template", ""),
|
|
559
|
+
"system_prompt": job_data.get("system_prompt") or agent_config.get("system_prompt"),
|
|
560
|
+
"model_id": agent.get("model_id"),
|
|
561
|
+
"model_config": agent.get("model_config", {}) or {},
|
|
562
|
+
"agent_config": {**agent_config, **(job_data.get("config", {}) or {})},
|
|
563
|
+
"mcp_servers": resolved_env.get("mcp_servers", {}),
|
|
564
|
+
"user_metadata": {
|
|
565
|
+
"job_id": job_id,
|
|
566
|
+
"job_name": job_data.get("name"),
|
|
567
|
+
"trigger_type": "cron",
|
|
568
|
+
"user_id": job_data.get("created_by"),
|
|
569
|
+
"user_email": job_data.get("created_by_email"),
|
|
570
|
+
"user_name": job_data.get("created_by_name"),
|
|
571
|
+
},
|
|
572
|
+
"runtime_type": agent.get("runtime") or agent_config.get("runtime") or "default",
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
elif planning_mode == "predefined_team" and entity_type == "team":
|
|
576
|
+
# Get team details using SQLAlchemy
|
|
577
|
+
team_obj = db.query(Team).filter(Team.id == entity_id).first()
|
|
578
|
+
team = model_to_dict(team_obj) if team_obj else {}
|
|
579
|
+
team_config = team.get("configuration", {}) or {}
|
|
580
|
+
|
|
581
|
+
workflow_input = {
|
|
582
|
+
"execution_id": None, # Will be generated by wrapper workflow
|
|
583
|
+
"team_id": entity_id,
|
|
584
|
+
"organization_id": organization_id,
|
|
585
|
+
"prompt": job_data.get("prompt_template", ""),
|
|
586
|
+
"system_prompt": job_data.get("system_prompt"),
|
|
587
|
+
"model_config": {},
|
|
588
|
+
"team_config": {**team_config, **(job_data.get("config", {}) or {})},
|
|
589
|
+
"mcp_servers": {},
|
|
590
|
+
"user_metadata": {
|
|
591
|
+
"job_id": job_id,
|
|
592
|
+
"job_name": job_data.get("name"),
|
|
593
|
+
"trigger_type": "cron",
|
|
594
|
+
"user_id": job_data.get("created_by"),
|
|
595
|
+
"user_email": job_data.get("created_by_email"),
|
|
596
|
+
"user_name": job_data.get("created_by_name"),
|
|
597
|
+
},
|
|
598
|
+
"runtime_type": team.get("runtime") or team_config.get("runtime") or "default",
|
|
599
|
+
}
|
|
600
|
+
else:
|
|
601
|
+
raise ValueError(f"Unsupported planning_mode '{planning_mode}' or entity_type '{entity_type}' for cron jobs")
|
|
602
|
+
|
|
603
|
+
# Create schedule action
|
|
604
|
+
action = ScheduleActionStartWorkflow(
|
|
605
|
+
workflow_name,
|
|
606
|
+
workflow_input,
|
|
607
|
+
id=f"job-{job_id}-{{{{SCHEDULE_ID}}}}",
|
|
608
|
+
task_queue=worker_queue_name,
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
# Parse cron expression for schedule spec
|
|
612
|
+
# Temporal accepts standard 5-field cron format: minute hour day month day_of_week
|
|
613
|
+
# No need to add seconds field - Temporal handles it automatically
|
|
614
|
+
temporal_cron = cron_schedule
|
|
615
|
+
|
|
616
|
+
schedule_spec = ScheduleSpec(
|
|
617
|
+
cron_expressions=[temporal_cron],
|
|
618
|
+
time_zone_name=cron_timezone,
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
# Create schedule with enhanced error handling
|
|
622
|
+
try:
|
|
623
|
+
logger.info(
|
|
624
|
+
"creating_temporal_schedule",
|
|
625
|
+
schedule_id=schedule_id,
|
|
626
|
+
workflow_name=workflow_name,
|
|
627
|
+
worker_queue=worker_queue_name,
|
|
628
|
+
cron_expression=temporal_cron,
|
|
629
|
+
timezone=cron_timezone,
|
|
630
|
+
job_id=job_id,
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
await client.create_schedule(
|
|
634
|
+
schedule_id,
|
|
635
|
+
Schedule(
|
|
636
|
+
action=action,
|
|
637
|
+
spec=schedule_spec,
|
|
638
|
+
policy=SchedulePolicy(
|
|
639
|
+
overlap=ScheduleOverlapPolicy.ALLOW_ALL,
|
|
640
|
+
catchup_window=timedelta(seconds=60), # Only catch up for recent misses
|
|
641
|
+
),
|
|
642
|
+
),
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
logger.info(
|
|
646
|
+
"temporal_schedule_created_successfully",
|
|
647
|
+
schedule_id=schedule_id,
|
|
648
|
+
job_id=job_id,
|
|
649
|
+
cron_schedule=cron_schedule,
|
|
650
|
+
worker_queue=worker_queue_name,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
return schedule_id
|
|
654
|
+
|
|
655
|
+
except Exception as temporal_error:
|
|
656
|
+
# Enhanced error reporting for Temporal schedule creation failures
|
|
657
|
+
error_msg = str(temporal_error)
|
|
658
|
+
error_type = type(temporal_error).__name__
|
|
659
|
+
|
|
660
|
+
logger.error(
|
|
661
|
+
"temporal_schedule_creation_failed",
|
|
662
|
+
error=error_msg,
|
|
663
|
+
error_type=error_type,
|
|
664
|
+
schedule_id=schedule_id,
|
|
665
|
+
job_id=job_id,
|
|
666
|
+
worker_queue=worker_queue_name,
|
|
667
|
+
workflow_name=workflow_name,
|
|
668
|
+
cron_expression=temporal_cron,
|
|
669
|
+
)
|
|
670
|
+
|
|
671
|
+
# Provide actionable error messages
|
|
672
|
+
if "connection" in error_msg.lower() or "unavailable" in error_msg.lower():
|
|
673
|
+
detail = (
|
|
674
|
+
f"Cannot connect to Temporal server. "
|
|
675
|
+
f"Please verify TEMPORAL_HOST and TEMPORAL_NAMESPACE are correctly configured. "
|
|
676
|
+
f"Error: {error_msg}"
|
|
677
|
+
)
|
|
678
|
+
elif "permission" in error_msg.lower() or "unauthorized" in error_msg.lower():
|
|
679
|
+
detail = (
|
|
680
|
+
f"Insufficient permissions to create Temporal schedule. "
|
|
681
|
+
f"Please verify Temporal API key or certificate authentication. "
|
|
682
|
+
f"Error: {error_msg}"
|
|
683
|
+
)
|
|
684
|
+
elif "already exists" in error_msg.lower() or "duplicate" in error_msg.lower():
|
|
685
|
+
detail = (
|
|
686
|
+
f"Schedule '{schedule_id}' already exists. "
|
|
687
|
+
f"Please check if this job was already scheduled. "
|
|
688
|
+
f"Error: {error_msg}"
|
|
689
|
+
)
|
|
690
|
+
elif "invalid cron" in error_msg.lower():
|
|
691
|
+
detail = (
|
|
692
|
+
f"Invalid cron expression '{cron_schedule}'. "
|
|
693
|
+
f"Please verify the cron format is correct. "
|
|
694
|
+
f"Error: {error_msg}"
|
|
695
|
+
)
|
|
696
|
+
else:
|
|
697
|
+
detail = f"Failed to create Temporal schedule: {error_msg}"
|
|
698
|
+
|
|
699
|
+
raise HTTPException(
|
|
700
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
701
|
+
detail=detail
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
except Exception as e:
|
|
705
|
+
logger.error(
|
|
706
|
+
"failed_to_create_temporal_schedule",
|
|
707
|
+
error=str(e),
|
|
708
|
+
job_id=job_id,
|
|
709
|
+
cron_schedule=cron_schedule,
|
|
710
|
+
)
|
|
711
|
+
raise HTTPException(
|
|
712
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
713
|
+
detail=f"Failed to create Temporal schedule: {str(e)}"
|
|
714
|
+
)
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
async def delete_temporal_schedule(schedule_id: str) -> None:
|
|
718
|
+
"""Delete Temporal Schedule"""
|
|
719
|
+
client = await get_temporal_client()
|
|
720
|
+
|
|
721
|
+
try:
|
|
722
|
+
handle = client.get_schedule_handle(schedule_id)
|
|
723
|
+
await handle.delete()
|
|
724
|
+
|
|
725
|
+
logger.info("temporal_schedule_deleted", schedule_id=schedule_id)
|
|
726
|
+
|
|
727
|
+
except Exception as e:
|
|
728
|
+
logger.error(
|
|
729
|
+
"failed_to_delete_temporal_schedule",
|
|
730
|
+
error=str(e),
|
|
731
|
+
schedule_id=schedule_id,
|
|
732
|
+
)
|
|
733
|
+
# Don't raise - schedule might not exist
|
|
734
|
+
|
|
735
|
+
|
|
736
|
+
async def pause_temporal_schedule(schedule_id: str) -> None:
|
|
737
|
+
"""Pause Temporal Schedule"""
|
|
738
|
+
client = await get_temporal_client()
|
|
739
|
+
|
|
740
|
+
try:
|
|
741
|
+
handle = client.get_schedule_handle(schedule_id)
|
|
742
|
+
await handle.pause()
|
|
743
|
+
|
|
744
|
+
logger.info("temporal_schedule_paused", schedule_id=schedule_id)
|
|
745
|
+
|
|
746
|
+
except Exception as e:
|
|
747
|
+
logger.error(
|
|
748
|
+
"failed_to_pause_temporal_schedule",
|
|
749
|
+
error=str(e),
|
|
750
|
+
schedule_id=schedule_id,
|
|
751
|
+
)
|
|
752
|
+
raise
|
|
753
|
+
|
|
754
|
+
|
|
755
|
+
async def unpause_temporal_schedule(schedule_id: str) -> None:
|
|
756
|
+
"""Unpause Temporal Schedule"""
|
|
757
|
+
client = await get_temporal_client()
|
|
758
|
+
|
|
759
|
+
try:
|
|
760
|
+
handle = client.get_schedule_handle(schedule_id)
|
|
761
|
+
await handle.unpause()
|
|
762
|
+
|
|
763
|
+
logger.info("temporal_schedule_unpaused", schedule_id=schedule_id)
|
|
764
|
+
|
|
765
|
+
except Exception as e:
|
|
766
|
+
logger.error(
|
|
767
|
+
"failed_to_unpause_temporal_schedule",
|
|
768
|
+
error=str(e),
|
|
769
|
+
schedule_id=schedule_id,
|
|
770
|
+
)
|
|
771
|
+
raise
|
|
772
|
+
|
|
773
|
+
|
|
774
|
+
@router.post("", response_model=JobResponse, status_code=status.HTTP_201_CREATED)
|
|
775
|
+
@instrument_endpoint("jobs.create_job")
|
|
776
|
+
async def create_job(
|
|
777
|
+
job_data: JobCreate,
|
|
778
|
+
request: Request,
|
|
779
|
+
organization: dict = Depends(get_current_organization),
|
|
780
|
+
db: Session = Depends(get_db),
|
|
781
|
+
):
|
|
782
|
+
"""
|
|
783
|
+
Create a new job.
|
|
784
|
+
|
|
785
|
+
Jobs can be triggered via:
|
|
786
|
+
- Cron schedule (requires cron_schedule parameter)
|
|
787
|
+
- Webhook (generates unique webhook URL)
|
|
788
|
+
- Manual API trigger
|
|
789
|
+
|
|
790
|
+
**Request Body:**
|
|
791
|
+
- name: Job name
|
|
792
|
+
- trigger_type: "cron", "webhook", or "manual"
|
|
793
|
+
- cron_schedule: Cron expression (required for cron trigger)
|
|
794
|
+
- planning_mode: "on_the_fly", "predefined_agent", "predefined_team", or "predefined_workflow"
|
|
795
|
+
- entity_id: Entity ID (required for predefined modes)
|
|
796
|
+
- prompt_template: Prompt template with {{variable}} placeholders
|
|
797
|
+
- executor_type: "auto", "specific_queue", or "environment"
|
|
798
|
+
"""
|
|
799
|
+
organization_id = organization["id"]
|
|
800
|
+
|
|
801
|
+
logger.info(
|
|
802
|
+
"creating_job",
|
|
803
|
+
organization_id=organization_id,
|
|
804
|
+
name=job_data.name,
|
|
805
|
+
trigger_type=job_data.trigger_type,
|
|
806
|
+
)
|
|
807
|
+
|
|
808
|
+
try:
|
|
809
|
+
job_id = f"job_{uuid_module.uuid4()}"
|
|
810
|
+
now = datetime.now(timezone.utc)
|
|
811
|
+
|
|
812
|
+
# Generate webhook URL if trigger_type is webhook
|
|
813
|
+
webhook_url_path = None
|
|
814
|
+
webhook_secret = None
|
|
815
|
+
if job_data.trigger_type == "webhook":
|
|
816
|
+
webhook_url_path = f"/api/v1/jobs/webhook/{generate_webhook_path()}"
|
|
817
|
+
webhook_secret = generate_webhook_secret()
|
|
818
|
+
|
|
819
|
+
# If entity_id is provided, fetch entity name using SQLAlchemy
|
|
820
|
+
entity_name = None
|
|
821
|
+
if job_data.entity_id and job_data.entity_type:
|
|
822
|
+
try:
|
|
823
|
+
if job_data.entity_type == "agent":
|
|
824
|
+
entity_obj = db.query(Agent).filter(
|
|
825
|
+
Agent.id == job_data.entity_id,
|
|
826
|
+
Agent.organization_id == organization_id
|
|
827
|
+
).first()
|
|
828
|
+
elif job_data.entity_type == "team":
|
|
829
|
+
entity_obj = db.query(Team).filter(
|
|
830
|
+
Team.id == job_data.entity_id,
|
|
831
|
+
Team.organization_id == organization_id
|
|
832
|
+
).first()
|
|
833
|
+
else:
|
|
834
|
+
entity_obj = None
|
|
835
|
+
if entity_obj:
|
|
836
|
+
entity_name = entity_obj.name
|
|
837
|
+
except Exception as e:
|
|
838
|
+
logger.warning("failed_to_get_entity_name", error=str(e))
|
|
839
|
+
|
|
840
|
+
# Create Job model instance
|
|
841
|
+
job = Job(
|
|
842
|
+
id=job_id,
|
|
843
|
+
organization_id=organization_id,
|
|
844
|
+
name=job_data.name,
|
|
845
|
+
description=job_data.description,
|
|
846
|
+
enabled=job_data.enabled,
|
|
847
|
+
status="active" if job_data.enabled else "disabled",
|
|
848
|
+
trigger_type=job_data.trigger_type,
|
|
849
|
+
cron_schedule=job_data.cron_schedule,
|
|
850
|
+
cron_timezone=job_data.cron_timezone or "UTC",
|
|
851
|
+
webhook_url_path=webhook_url_path,
|
|
852
|
+
webhook_secret=webhook_secret,
|
|
853
|
+
temporal_schedule_id=None,
|
|
854
|
+
planning_mode=job_data.planning_mode,
|
|
855
|
+
entity_type=job_data.entity_type,
|
|
856
|
+
entity_id=job_data.entity_id,
|
|
857
|
+
entity_name=entity_name,
|
|
858
|
+
prompt_template=job_data.prompt_template,
|
|
859
|
+
system_prompt=job_data.system_prompt,
|
|
860
|
+
executor_type=job_data.executor_type,
|
|
861
|
+
worker_queue_name=job_data.worker_queue_name,
|
|
862
|
+
environment_name=job_data.environment_name,
|
|
863
|
+
config=job_data.config or {},
|
|
864
|
+
execution_environment=job_data.execution_environment.model_dump() if job_data.execution_environment else {},
|
|
865
|
+
total_executions=0,
|
|
866
|
+
successful_executions=0,
|
|
867
|
+
failed_executions=0,
|
|
868
|
+
execution_history=[],
|
|
869
|
+
last_execution_id=None,
|
|
870
|
+
last_execution_at=None,
|
|
871
|
+
next_execution_at=None,
|
|
872
|
+
last_triggered_at=None,
|
|
873
|
+
created_by=organization.get("user_id"),
|
|
874
|
+
updated_by=None,
|
|
875
|
+
created_at=now,
|
|
876
|
+
updated_at=now,
|
|
877
|
+
)
|
|
878
|
+
|
|
879
|
+
# Create Temporal Schedule for cron jobs (need job_record dict for schedule creation)
|
|
880
|
+
if job_data.trigger_type == "cron" and job_data.enabled:
|
|
881
|
+
job_record = model_to_dict(job)
|
|
882
|
+
job_record["created_by_email"] = organization.get("user_email")
|
|
883
|
+
job_record["created_by_name"] = organization.get("user_name")
|
|
884
|
+
|
|
885
|
+
temporal_schedule_id = await create_temporal_schedule(
|
|
886
|
+
job_id=job_id,
|
|
887
|
+
organization_id=organization_id,
|
|
888
|
+
job_data=job_record,
|
|
889
|
+
cron_schedule=job_data.cron_schedule,
|
|
890
|
+
cron_timezone=job_data.cron_timezone or "UTC",
|
|
891
|
+
db=db,
|
|
892
|
+
)
|
|
893
|
+
job.temporal_schedule_id = temporal_schedule_id
|
|
894
|
+
|
|
895
|
+
# Calculate next execution time
|
|
896
|
+
cron_iter = croniter(job_data.cron_schedule, datetime.now(timezone.utc))
|
|
897
|
+
next_execution = cron_iter.get_next(datetime)
|
|
898
|
+
job.next_execution_at = next_execution
|
|
899
|
+
|
|
900
|
+
# Insert job into database
|
|
901
|
+
db.add(job)
|
|
902
|
+
db.commit()
|
|
903
|
+
db.refresh(job)
|
|
904
|
+
|
|
905
|
+
logger.info(
|
|
906
|
+
"job_created",
|
|
907
|
+
job_id=job_id,
|
|
908
|
+
name=job_data.name,
|
|
909
|
+
trigger_type=job_data.trigger_type,
|
|
910
|
+
)
|
|
911
|
+
|
|
912
|
+
# Build response
|
|
913
|
+
response_data = model_to_dict(job)
|
|
914
|
+
response_data["created_by_email"] = organization.get("user_email")
|
|
915
|
+
response_data["created_by_name"] = organization.get("user_name")
|
|
916
|
+
|
|
917
|
+
# Add full webhook URL to response
|
|
918
|
+
if webhook_url_path:
|
|
919
|
+
response_data["webhook_url"] = f"{str(request.base_url).rstrip('/')}{webhook_url_path}"
|
|
920
|
+
|
|
921
|
+
return JobResponse(**response_data)
|
|
922
|
+
|
|
923
|
+
except HTTPException:
|
|
924
|
+
raise
|
|
925
|
+
except Exception as e:
|
|
926
|
+
db.rollback()
|
|
927
|
+
logger.error(
|
|
928
|
+
"failed_to_create_job",
|
|
929
|
+
error=str(e),
|
|
930
|
+
organization_id=organization_id,
|
|
931
|
+
)
|
|
932
|
+
raise HTTPException(
|
|
933
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
934
|
+
detail=f"Failed to create job: {str(e)}"
|
|
935
|
+
)
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
@router.get("", response_model=List[JobResponse])
|
|
939
|
+
@instrument_endpoint("jobs.list_jobs")
|
|
940
|
+
async def list_jobs(
|
|
941
|
+
request: Request,
|
|
942
|
+
organization: dict = Depends(get_current_organization),
|
|
943
|
+
db: Session = Depends(get_db),
|
|
944
|
+
enabled: Optional[bool] = None,
|
|
945
|
+
trigger_type: Optional[str] = None,
|
|
946
|
+
):
|
|
947
|
+
"""
|
|
948
|
+
List all jobs for the organization.
|
|
949
|
+
|
|
950
|
+
**Query Parameters:**
|
|
951
|
+
- enabled: Filter by enabled status (true/false)
|
|
952
|
+
- trigger_type: Filter by trigger type ("cron", "webhook", "manual")
|
|
953
|
+
"""
|
|
954
|
+
organization_id = organization["id"]
|
|
955
|
+
|
|
956
|
+
try:
|
|
957
|
+
# Build query using SQLAlchemy
|
|
958
|
+
query = db.query(Job).filter(Job.organization_id == organization_id)
|
|
959
|
+
|
|
960
|
+
if enabled is not None:
|
|
961
|
+
query = query.filter(Job.enabled == enabled)
|
|
962
|
+
|
|
963
|
+
if trigger_type:
|
|
964
|
+
query = query.filter(Job.trigger_type == trigger_type)
|
|
965
|
+
|
|
966
|
+
job_objects = query.order_by(desc(Job.created_at)).all()
|
|
967
|
+
|
|
968
|
+
# Build responses with full webhook URLs and enrich with user emails
|
|
969
|
+
base_url = str(request.base_url).rstrip("/")
|
|
970
|
+
|
|
971
|
+
# Collect unique user IDs
|
|
972
|
+
user_ids = set()
|
|
973
|
+
for job_obj in job_objects:
|
|
974
|
+
if job_obj.created_by:
|
|
975
|
+
user_ids.add(job_obj.created_by)
|
|
976
|
+
if job_obj.updated_by:
|
|
977
|
+
user_ids.add(job_obj.updated_by)
|
|
978
|
+
|
|
979
|
+
# Fetch user details from Kubiya API
|
|
980
|
+
user_emails = {}
|
|
981
|
+
if user_ids:
|
|
982
|
+
try:
|
|
983
|
+
import httpx
|
|
984
|
+
org_id = organization_id
|
|
985
|
+
|
|
986
|
+
kubiya_url = "https://api.kubiya.ai/api/v2/users?limit=0&page=1&status=active"
|
|
987
|
+
|
|
988
|
+
headers = {
|
|
989
|
+
"Accept": "application/json",
|
|
990
|
+
"X-Organization-ID": org_id,
|
|
991
|
+
"X-Kubiya-Client": "agentmesh-backend",
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
auth_header = request.headers.get("authorization")
|
|
995
|
+
if auth_header:
|
|
996
|
+
headers["Authorization"] = auth_header
|
|
997
|
+
|
|
998
|
+
async with httpx.AsyncClient() as http_client:
|
|
999
|
+
response = await http_client.get(kubiya_url, headers=headers, timeout=10.0)
|
|
1000
|
+
if response.status_code == 200:
|
|
1001
|
+
data = response.json()
|
|
1002
|
+
users = data.get("items", [])
|
|
1003
|
+
for user in users:
|
|
1004
|
+
user_uuid = user.get("uuid") or user.get("_id") or user.get("id")
|
|
1005
|
+
if user_uuid in user_ids:
|
|
1006
|
+
user_emails[user_uuid] = user.get("email") or user.get("name") or user_uuid
|
|
1007
|
+
else:
|
|
1008
|
+
logger.warning("kubiya_api_users_fetch_failed", status_code=response.status_code)
|
|
1009
|
+
except Exception as e:
|
|
1010
|
+
logger.warning("failed_to_fetch_user_emails", error=str(e))
|
|
1011
|
+
|
|
1012
|
+
jobs = []
|
|
1013
|
+
for job_obj in job_objects:
|
|
1014
|
+
job_data = model_to_dict(job_obj)
|
|
1015
|
+
if job_obj.webhook_url_path:
|
|
1016
|
+
job_data["webhook_url"] = f"{base_url}{job_obj.webhook_url_path}"
|
|
1017
|
+
|
|
1018
|
+
# Enrich with user email if available
|
|
1019
|
+
if job_obj.created_by and job_obj.created_by in user_emails:
|
|
1020
|
+
job_data["created_by_email"] = user_emails[job_obj.created_by]
|
|
1021
|
+
if job_obj.updated_by and job_obj.updated_by in user_emails:
|
|
1022
|
+
job_data["updated_by_email"] = user_emails[job_obj.updated_by]
|
|
1023
|
+
|
|
1024
|
+
jobs.append(JobResponse(**job_data))
|
|
1025
|
+
|
|
1026
|
+
return jobs
|
|
1027
|
+
|
|
1028
|
+
except Exception as e:
|
|
1029
|
+
logger.error(
|
|
1030
|
+
"failed_to_list_jobs",
|
|
1031
|
+
error=str(e),
|
|
1032
|
+
organization_id=organization_id,
|
|
1033
|
+
)
|
|
1034
|
+
raise HTTPException(
|
|
1035
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1036
|
+
detail=f"Failed to list jobs: {str(e)}"
|
|
1037
|
+
)
|
|
1038
|
+
|
|
1039
|
+
|
|
1040
|
+
@router.get("/{job_id}", response_model=JobResponse)
|
|
1041
|
+
@instrument_endpoint("jobs.get_job")
|
|
1042
|
+
async def get_job(
|
|
1043
|
+
job_id: str,
|
|
1044
|
+
request: Request,
|
|
1045
|
+
organization: dict = Depends(get_current_organization),
|
|
1046
|
+
db: Session = Depends(get_db),
|
|
1047
|
+
):
|
|
1048
|
+
"""Get job details by ID"""
|
|
1049
|
+
organization_id = organization["id"]
|
|
1050
|
+
|
|
1051
|
+
try:
|
|
1052
|
+
job_obj = db.query(Job).filter(
|
|
1053
|
+
Job.id == job_id,
|
|
1054
|
+
Job.organization_id == organization_id
|
|
1055
|
+
).first()
|
|
1056
|
+
|
|
1057
|
+
if not job_obj:
|
|
1058
|
+
raise HTTPException(
|
|
1059
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1060
|
+
detail=f"Job {job_id} not found"
|
|
1061
|
+
)
|
|
1062
|
+
|
|
1063
|
+
job_data = model_to_dict(job_obj)
|
|
1064
|
+
|
|
1065
|
+
# Add full webhook URL
|
|
1066
|
+
if job_obj.webhook_url_path:
|
|
1067
|
+
base_url = str(request.base_url).rstrip("/")
|
|
1068
|
+
job_data["webhook_url"] = f"{base_url}{job_obj.webhook_url_path}"
|
|
1069
|
+
|
|
1070
|
+
return JobResponse(**job_data)
|
|
1071
|
+
|
|
1072
|
+
except HTTPException:
|
|
1073
|
+
raise
|
|
1074
|
+
except Exception as e:
|
|
1075
|
+
logger.error(
|
|
1076
|
+
"failed_to_get_job",
|
|
1077
|
+
error=str(e),
|
|
1078
|
+
job_id=job_id,
|
|
1079
|
+
)
|
|
1080
|
+
raise HTTPException(
|
|
1081
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1082
|
+
detail=f"Failed to get job: {str(e)}"
|
|
1083
|
+
)
|
|
1084
|
+
|
|
1085
|
+
|
|
1086
|
+
@router.patch("/{job_id}", response_model=JobResponse)
|
|
1087
|
+
@instrument_endpoint("jobs.update_job")
|
|
1088
|
+
async def update_job(
|
|
1089
|
+
job_id: str,
|
|
1090
|
+
job_data: JobUpdate,
|
|
1091
|
+
request: Request,
|
|
1092
|
+
organization: dict = Depends(get_current_organization),
|
|
1093
|
+
db: Session = Depends(get_db),
|
|
1094
|
+
):
|
|
1095
|
+
"""
|
|
1096
|
+
Update job configuration.
|
|
1097
|
+
|
|
1098
|
+
**Note:** Updating cron_schedule will recreate the Temporal Schedule.
|
|
1099
|
+
"""
|
|
1100
|
+
organization_id = organization["id"]
|
|
1101
|
+
|
|
1102
|
+
try:
|
|
1103
|
+
# Fetch existing job using SQLAlchemy
|
|
1104
|
+
job_obj = db.query(Job).filter(
|
|
1105
|
+
Job.id == job_id,
|
|
1106
|
+
Job.organization_id == organization_id
|
|
1107
|
+
).first()
|
|
1108
|
+
|
|
1109
|
+
if not job_obj:
|
|
1110
|
+
raise HTTPException(
|
|
1111
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1112
|
+
detail=f"Job {job_id} not found"
|
|
1113
|
+
)
|
|
1114
|
+
|
|
1115
|
+
existing_job = model_to_dict(job_obj)
|
|
1116
|
+
|
|
1117
|
+
# Build update data
|
|
1118
|
+
update_data = {}
|
|
1119
|
+
for field, value in job_data.model_dump(exclude_unset=True).items():
|
|
1120
|
+
if value is not None:
|
|
1121
|
+
if field == "execution_environment" and isinstance(value, ExecutionEnvironment):
|
|
1122
|
+
update_data[field] = value.model_dump()
|
|
1123
|
+
else:
|
|
1124
|
+
update_data[field] = value
|
|
1125
|
+
|
|
1126
|
+
if not update_data:
|
|
1127
|
+
raise HTTPException(
|
|
1128
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1129
|
+
detail="No fields to update"
|
|
1130
|
+
)
|
|
1131
|
+
|
|
1132
|
+
update_data["updated_by"] = organization.get("user_id")
|
|
1133
|
+
update_data["updated_at"] = datetime.now(timezone.utc)
|
|
1134
|
+
|
|
1135
|
+
# If entity_id is being updated, fetch entity name using SQLAlchemy
|
|
1136
|
+
if "entity_id" in update_data:
|
|
1137
|
+
entity_type = update_data.get("entity_type", existing_job.get("entity_type"))
|
|
1138
|
+
entity_id = update_data["entity_id"]
|
|
1139
|
+
try:
|
|
1140
|
+
if entity_type == "agent":
|
|
1141
|
+
entity_obj = db.query(Agent).filter(
|
|
1142
|
+
Agent.id == entity_id,
|
|
1143
|
+
Agent.organization_id == organization_id
|
|
1144
|
+
).first()
|
|
1145
|
+
elif entity_type == "team":
|
|
1146
|
+
entity_obj = db.query(Team).filter(
|
|
1147
|
+
Team.id == entity_id,
|
|
1148
|
+
Team.organization_id == organization_id
|
|
1149
|
+
).first()
|
|
1150
|
+
else:
|
|
1151
|
+
entity_obj = None
|
|
1152
|
+
if entity_obj:
|
|
1153
|
+
update_data["entity_name"] = entity_obj.name
|
|
1154
|
+
except Exception as e:
|
|
1155
|
+
logger.warning("failed_to_get_entity_name_during_update", error=str(e))
|
|
1156
|
+
|
|
1157
|
+
# Handle schedule updates - recreate if any workflow input fields change
|
|
1158
|
+
schedule_affecting_fields = {
|
|
1159
|
+
"cron_schedule", "cron_timezone", "entity_id", "entity_type",
|
|
1160
|
+
"prompt_template", "system_prompt", "config"
|
|
1161
|
+
}
|
|
1162
|
+
|
|
1163
|
+
should_recreate_schedule = (
|
|
1164
|
+
existing_job.get("trigger_type") == "cron" and
|
|
1165
|
+
existing_job.get("enabled", True) and
|
|
1166
|
+
existing_job.get("temporal_schedule_id") and
|
|
1167
|
+
any(field in update_data for field in schedule_affecting_fields)
|
|
1168
|
+
)
|
|
1169
|
+
|
|
1170
|
+
if should_recreate_schedule:
|
|
1171
|
+
logger.info(
|
|
1172
|
+
"recreating_temporal_schedule_due_to_updates",
|
|
1173
|
+
job_id=job_id,
|
|
1174
|
+
updated_fields=[f for f in update_data.keys() if f in schedule_affecting_fields],
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
# Delete existing schedule
|
|
1178
|
+
try:
|
|
1179
|
+
await delete_temporal_schedule(existing_job["temporal_schedule_id"])
|
|
1180
|
+
except Exception as delete_error:
|
|
1181
|
+
logger.error(
|
|
1182
|
+
"failed_to_delete_schedule_during_update",
|
|
1183
|
+
job_id=job_id,
|
|
1184
|
+
schedule_id=existing_job["temporal_schedule_id"],
|
|
1185
|
+
error=str(delete_error),
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1188
|
+
# Merge existing job data with updates for schedule
|
|
1189
|
+
updated_job_data = {**existing_job, **update_data}
|
|
1190
|
+
|
|
1191
|
+
# Create new schedule
|
|
1192
|
+
try:
|
|
1193
|
+
temporal_schedule_id = await create_temporal_schedule(
|
|
1194
|
+
job_id=job_id,
|
|
1195
|
+
organization_id=organization_id,
|
|
1196
|
+
job_data=updated_job_data,
|
|
1197
|
+
cron_schedule=update_data.get("cron_schedule", existing_job.get("cron_schedule")),
|
|
1198
|
+
cron_timezone=update_data.get("cron_timezone", existing_job.get("cron_timezone", "UTC")),
|
|
1199
|
+
db=db,
|
|
1200
|
+
)
|
|
1201
|
+
update_data["temporal_schedule_id"] = temporal_schedule_id
|
|
1202
|
+
|
|
1203
|
+
# Calculate next execution time if cron_schedule changed
|
|
1204
|
+
if "cron_schedule" in update_data:
|
|
1205
|
+
cron_iter = croniter(update_data["cron_schedule"], datetime.now(timezone.utc))
|
|
1206
|
+
next_execution = cron_iter.get_next(datetime)
|
|
1207
|
+
update_data["next_execution_at"] = next_execution
|
|
1208
|
+
|
|
1209
|
+
logger.info(
|
|
1210
|
+
"temporal_schedule_recreated_successfully",
|
|
1211
|
+
job_id=job_id,
|
|
1212
|
+
new_schedule_id=temporal_schedule_id,
|
|
1213
|
+
)
|
|
1214
|
+
except Exception as create_error:
|
|
1215
|
+
logger.error(
|
|
1216
|
+
"failed_to_recreate_schedule_during_update",
|
|
1217
|
+
job_id=job_id,
|
|
1218
|
+
error=str(create_error),
|
|
1219
|
+
)
|
|
1220
|
+
update_data["temporal_schedule_id"] = None
|
|
1221
|
+
raise HTTPException(
|
|
1222
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1223
|
+
detail=f"Failed to recreate Temporal schedule: {str(create_error)}"
|
|
1224
|
+
)
|
|
1225
|
+
|
|
1226
|
+
# Update job using SQLAlchemy
|
|
1227
|
+
for key, value in update_data.items():
|
|
1228
|
+
if hasattr(job_obj, key):
|
|
1229
|
+
setattr(job_obj, key, value)
|
|
1230
|
+
|
|
1231
|
+
db.commit()
|
|
1232
|
+
db.refresh(job_obj)
|
|
1233
|
+
|
|
1234
|
+
logger.info(
|
|
1235
|
+
"job_updated",
|
|
1236
|
+
job_id=job_id,
|
|
1237
|
+
updated_fields=list(update_data.keys()),
|
|
1238
|
+
)
|
|
1239
|
+
|
|
1240
|
+
job_data_response = model_to_dict(job_obj)
|
|
1241
|
+
|
|
1242
|
+
# Add full webhook URL
|
|
1243
|
+
if job_obj.webhook_url_path:
|
|
1244
|
+
base_url = str(request.base_url).rstrip("/")
|
|
1245
|
+
job_data_response["webhook_url"] = f"{base_url}{job_obj.webhook_url_path}"
|
|
1246
|
+
|
|
1247
|
+
return JobResponse(**job_data_response)
|
|
1248
|
+
|
|
1249
|
+
except HTTPException:
|
|
1250
|
+
raise
|
|
1251
|
+
except Exception as e:
|
|
1252
|
+
db.rollback()
|
|
1253
|
+
logger.error(
|
|
1254
|
+
"failed_to_update_job",
|
|
1255
|
+
error=str(e),
|
|
1256
|
+
job_id=job_id,
|
|
1257
|
+
)
|
|
1258
|
+
raise HTTPException(
|
|
1259
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1260
|
+
detail=f"Failed to update job: {str(e)}"
|
|
1261
|
+
)
|
|
1262
|
+
|
|
1263
|
+
|
|
1264
|
+
@router.delete("/{job_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
1265
|
+
@instrument_endpoint("jobs.delete_job")
|
|
1266
|
+
async def delete_job(
|
|
1267
|
+
job_id: str,
|
|
1268
|
+
organization: dict = Depends(get_current_organization),
|
|
1269
|
+
db: Session = Depends(get_db),
|
|
1270
|
+
):
|
|
1271
|
+
"""Delete a job and its Temporal Schedule"""
|
|
1272
|
+
organization_id = organization["id"]
|
|
1273
|
+
|
|
1274
|
+
try:
|
|
1275
|
+
# Fetch job details for audit logging using SQLAlchemy
|
|
1276
|
+
job_obj = db.query(Job).filter(
|
|
1277
|
+
Job.id == job_id,
|
|
1278
|
+
Job.organization_id == organization_id
|
|
1279
|
+
).first()
|
|
1280
|
+
|
|
1281
|
+
if not job_obj:
|
|
1282
|
+
raise HTTPException(
|
|
1283
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1284
|
+
detail=f"Job {job_id} not found"
|
|
1285
|
+
)
|
|
1286
|
+
|
|
1287
|
+
# Enhanced logging for job deletion audit trail
|
|
1288
|
+
logger.info(
|
|
1289
|
+
"job_deletion_initiated",
|
|
1290
|
+
job_id=job_id,
|
|
1291
|
+
job_name=job_obj.name,
|
|
1292
|
+
organization_id=organization_id,
|
|
1293
|
+
temporal_schedule_id=job_obj.temporal_schedule_id,
|
|
1294
|
+
enabled=job_obj.enabled,
|
|
1295
|
+
trigger_type=job_obj.trigger_type,
|
|
1296
|
+
)
|
|
1297
|
+
|
|
1298
|
+
# Delete Temporal Schedule
|
|
1299
|
+
if job_obj.temporal_schedule_id:
|
|
1300
|
+
try:
|
|
1301
|
+
await delete_temporal_schedule(job_obj.temporal_schedule_id)
|
|
1302
|
+
logger.info(
|
|
1303
|
+
"temporal_schedule_deleted",
|
|
1304
|
+
job_id=job_id,
|
|
1305
|
+
schedule_id=job_obj.temporal_schedule_id,
|
|
1306
|
+
)
|
|
1307
|
+
except Exception as temporal_error:
|
|
1308
|
+
# Log but don't fail - we still want to delete from DB
|
|
1309
|
+
logger.error(
|
|
1310
|
+
"failed_to_delete_temporal_schedule",
|
|
1311
|
+
job_id=job_id,
|
|
1312
|
+
schedule_id=job_obj.temporal_schedule_id,
|
|
1313
|
+
error=str(temporal_error),
|
|
1314
|
+
note="Job will be deleted from DB anyway. Run cleanup script to remove orphaned schedule.",
|
|
1315
|
+
)
|
|
1316
|
+
|
|
1317
|
+
# Delete job from database using SQLAlchemy
|
|
1318
|
+
job_name = job_obj.name # Store before deletion
|
|
1319
|
+
db.delete(job_obj)
|
|
1320
|
+
db.commit()
|
|
1321
|
+
|
|
1322
|
+
logger.info(
|
|
1323
|
+
"job_deleted_successfully",
|
|
1324
|
+
job_id=job_id,
|
|
1325
|
+
job_name=job_name,
|
|
1326
|
+
organization_id=organization_id,
|
|
1327
|
+
)
|
|
1328
|
+
|
|
1329
|
+
except HTTPException:
|
|
1330
|
+
raise
|
|
1331
|
+
except Exception as e:
|
|
1332
|
+
logger.error(
|
|
1333
|
+
"failed_to_delete_job",
|
|
1334
|
+
error=str(e),
|
|
1335
|
+
job_id=job_id,
|
|
1336
|
+
)
|
|
1337
|
+
raise HTTPException(
|
|
1338
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1339
|
+
detail=f"Failed to delete job: {str(e)}"
|
|
1340
|
+
)
|
|
1341
|
+
|
|
1342
|
+
|
|
1343
|
+
@router.post("/{job_id}/trigger", response_model=JobTriggerResponse)
|
|
1344
|
+
@instrument_endpoint("jobs.trigger_job")
|
|
1345
|
+
async def trigger_job(
|
|
1346
|
+
job_id: str,
|
|
1347
|
+
trigger_data: JobTriggerRequest,
|
|
1348
|
+
request: Request,
|
|
1349
|
+
organization: dict = Depends(get_current_organization),
|
|
1350
|
+
db: Session = Depends(get_db),
|
|
1351
|
+
):
|
|
1352
|
+
"""
|
|
1353
|
+
Manually trigger a job execution.
|
|
1354
|
+
|
|
1355
|
+
**Request Body:**
|
|
1356
|
+
- parameters: Dictionary of parameters to substitute in prompt template
|
|
1357
|
+
- config_override: Optional config overrides for this execution
|
|
1358
|
+
"""
|
|
1359
|
+
organization_id = organization["id"]
|
|
1360
|
+
|
|
1361
|
+
try:
|
|
1362
|
+
# Validate job exists and is enabled using SQLAlchemy
|
|
1363
|
+
job_obj = db.query(Job).filter(
|
|
1364
|
+
Job.id == job_id,
|
|
1365
|
+
Job.organization_id == organization_id
|
|
1366
|
+
).first()
|
|
1367
|
+
|
|
1368
|
+
if not job_obj:
|
|
1369
|
+
raise HTTPException(
|
|
1370
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1371
|
+
detail=f"Job {job_id} not found"
|
|
1372
|
+
)
|
|
1373
|
+
|
|
1374
|
+
job = model_to_dict(job_obj)
|
|
1375
|
+
|
|
1376
|
+
if not job.get("enabled"):
|
|
1377
|
+
raise HTTPException(
|
|
1378
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1379
|
+
detail="Job is disabled"
|
|
1380
|
+
)
|
|
1381
|
+
|
|
1382
|
+
# Apply config overrides if provided
|
|
1383
|
+
if trigger_data.config_override:
|
|
1384
|
+
job = {**job, "config": {**(job.get("config") or {}), **trigger_data.config_override}}
|
|
1385
|
+
|
|
1386
|
+
# Start the job execution directly (same as UI does)
|
|
1387
|
+
workflow_id, execution_id = await start_job_execution(
|
|
1388
|
+
job=job,
|
|
1389
|
+
organization_id=organization_id,
|
|
1390
|
+
trigger_type="manual",
|
|
1391
|
+
trigger_metadata={
|
|
1392
|
+
"triggered_by": organization.get("user_email"),
|
|
1393
|
+
"user_id": organization.get("user_id"),
|
|
1394
|
+
"user_email": organization.get("user_email"),
|
|
1395
|
+
"user_name": organization.get("user_name"),
|
|
1396
|
+
},
|
|
1397
|
+
db=db,
|
|
1398
|
+
token=request.state.kubiya_token,
|
|
1399
|
+
parameters=trigger_data.parameters,
|
|
1400
|
+
)
|
|
1401
|
+
|
|
1402
|
+
return JobTriggerResponse(
|
|
1403
|
+
job_id=job_id,
|
|
1404
|
+
workflow_id=workflow_id,
|
|
1405
|
+
execution_id=execution_id,
|
|
1406
|
+
status="started",
|
|
1407
|
+
message="Job execution started successfully",
|
|
1408
|
+
)
|
|
1409
|
+
|
|
1410
|
+
except HTTPException:
|
|
1411
|
+
raise
|
|
1412
|
+
except Exception as e:
|
|
1413
|
+
logger.error(
|
|
1414
|
+
"failed_to_trigger_job",
|
|
1415
|
+
error=str(e),
|
|
1416
|
+
job_id=job_id,
|
|
1417
|
+
)
|
|
1418
|
+
raise HTTPException(
|
|
1419
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1420
|
+
detail=f"Failed to trigger job: {str(e)}"
|
|
1421
|
+
)
|
|
1422
|
+
|
|
1423
|
+
|
|
1424
|
+
@router.post("/{job_id}/enable", response_model=JobResponse)
|
|
1425
|
+
@instrument_endpoint("jobs.enable_job")
|
|
1426
|
+
async def enable_job(
|
|
1427
|
+
job_id: str,
|
|
1428
|
+
request: Request,
|
|
1429
|
+
organization: dict = Depends(get_current_organization),
|
|
1430
|
+
db: Session = Depends(get_db),
|
|
1431
|
+
):
|
|
1432
|
+
"""Enable a job and unpause its Temporal Schedule"""
|
|
1433
|
+
organization_id = organization["id"]
|
|
1434
|
+
|
|
1435
|
+
try:
|
|
1436
|
+
# Fetch job using SQLAlchemy
|
|
1437
|
+
job_obj = db.query(Job).filter(
|
|
1438
|
+
Job.id == job_id,
|
|
1439
|
+
Job.organization_id == organization_id
|
|
1440
|
+
).first()
|
|
1441
|
+
|
|
1442
|
+
if not job_obj:
|
|
1443
|
+
raise HTTPException(
|
|
1444
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1445
|
+
detail=f"Job {job_id} not found"
|
|
1446
|
+
)
|
|
1447
|
+
|
|
1448
|
+
job = model_to_dict(job_obj)
|
|
1449
|
+
|
|
1450
|
+
# Unpause Temporal Schedule if it exists
|
|
1451
|
+
if job_obj.temporal_schedule_id:
|
|
1452
|
+
try:
|
|
1453
|
+
await unpause_temporal_schedule(job_obj.temporal_schedule_id)
|
|
1454
|
+
logger.info(
|
|
1455
|
+
"temporal_schedule_unpaused",
|
|
1456
|
+
job_id=job_id,
|
|
1457
|
+
schedule_id=job_obj.temporal_schedule_id,
|
|
1458
|
+
)
|
|
1459
|
+
except Exception as temporal_error:
|
|
1460
|
+
logger.error(
|
|
1461
|
+
"failed_to_unpause_temporal_schedule",
|
|
1462
|
+
job_id=job_id,
|
|
1463
|
+
schedule_id=job_obj.temporal_schedule_id,
|
|
1464
|
+
error=str(temporal_error),
|
|
1465
|
+
)
|
|
1466
|
+
raise HTTPException(
|
|
1467
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1468
|
+
detail=f"Failed to unpause Temporal schedule: {str(temporal_error)}"
|
|
1469
|
+
)
|
|
1470
|
+
|
|
1471
|
+
# Update job status
|
|
1472
|
+
job_obj.enabled = True
|
|
1473
|
+
job_obj.status = "active"
|
|
1474
|
+
job_obj.updated_at = datetime.now(timezone.utc)
|
|
1475
|
+
|
|
1476
|
+
elif job_obj.trigger_type == "cron":
|
|
1477
|
+
# Create schedule if it doesn't exist
|
|
1478
|
+
try:
|
|
1479
|
+
temporal_schedule_id = await create_temporal_schedule(
|
|
1480
|
+
job_id=job_id,
|
|
1481
|
+
organization_id=organization_id,
|
|
1482
|
+
job_data=job,
|
|
1483
|
+
cron_schedule=job_obj.cron_schedule,
|
|
1484
|
+
cron_timezone=job_obj.cron_timezone or "UTC",
|
|
1485
|
+
db=db,
|
|
1486
|
+
)
|
|
1487
|
+
except Exception as create_error:
|
|
1488
|
+
logger.error(
|
|
1489
|
+
"failed_to_create_temporal_schedule_during_enable",
|
|
1490
|
+
job_id=job_id,
|
|
1491
|
+
error=str(create_error),
|
|
1492
|
+
)
|
|
1493
|
+
raise HTTPException(
|
|
1494
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1495
|
+
detail=f"Failed to create Temporal schedule: {str(create_error)}"
|
|
1496
|
+
)
|
|
1497
|
+
|
|
1498
|
+
# Update job with schedule ID
|
|
1499
|
+
job_obj.temporal_schedule_id = temporal_schedule_id
|
|
1500
|
+
job_obj.enabled = True
|
|
1501
|
+
job_obj.status = "active"
|
|
1502
|
+
job_obj.updated_at = datetime.now(timezone.utc)
|
|
1503
|
+
|
|
1504
|
+
# Calculate next execution time
|
|
1505
|
+
cron_iter = croniter(job_obj.cron_schedule, datetime.now(timezone.utc))
|
|
1506
|
+
next_execution = cron_iter.get_next(datetime)
|
|
1507
|
+
job_obj.next_execution_at = next_execution
|
|
1508
|
+
else:
|
|
1509
|
+
# Just enable the job (non-cron jobs)
|
|
1510
|
+
job_obj.enabled = True
|
|
1511
|
+
job_obj.status = "active"
|
|
1512
|
+
job_obj.updated_at = datetime.now(timezone.utc)
|
|
1513
|
+
|
|
1514
|
+
db.commit()
|
|
1515
|
+
db.refresh(job_obj)
|
|
1516
|
+
|
|
1517
|
+
logger.info("job_enabled", job_id=job_id)
|
|
1518
|
+
|
|
1519
|
+
job_data = model_to_dict(job_obj)
|
|
1520
|
+
if job_obj.webhook_url_path:
|
|
1521
|
+
base_url = str(request.base_url).rstrip("/")
|
|
1522
|
+
job_data["webhook_url"] = f"{base_url}{job_obj.webhook_url_path}"
|
|
1523
|
+
|
|
1524
|
+
return JobResponse(**job_data)
|
|
1525
|
+
|
|
1526
|
+
except HTTPException:
|
|
1527
|
+
raise
|
|
1528
|
+
except Exception as e:
|
|
1529
|
+
db.rollback()
|
|
1530
|
+
logger.error(
|
|
1531
|
+
"failed_to_enable_job",
|
|
1532
|
+
error=str(e),
|
|
1533
|
+
job_id=job_id,
|
|
1534
|
+
)
|
|
1535
|
+
raise HTTPException(
|
|
1536
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1537
|
+
detail=f"Failed to enable job: {str(e)}"
|
|
1538
|
+
)
|
|
1539
|
+
|
|
1540
|
+
|
|
1541
|
+
@router.post("/{job_id}/disable", response_model=JobResponse)
|
|
1542
|
+
@instrument_endpoint("jobs.disable_job")
|
|
1543
|
+
async def disable_job(
|
|
1544
|
+
job_id: str,
|
|
1545
|
+
request: Request,
|
|
1546
|
+
organization: dict = Depends(get_current_organization),
|
|
1547
|
+
db: Session = Depends(get_db),
|
|
1548
|
+
):
|
|
1549
|
+
"""Disable a job and pause its Temporal Schedule"""
|
|
1550
|
+
organization_id = organization["id"]
|
|
1551
|
+
|
|
1552
|
+
try:
|
|
1553
|
+
# Fetch job using SQLAlchemy
|
|
1554
|
+
job_obj = db.query(Job).filter(
|
|
1555
|
+
Job.id == job_id,
|
|
1556
|
+
Job.organization_id == organization_id
|
|
1557
|
+
).first()
|
|
1558
|
+
|
|
1559
|
+
if not job_obj:
|
|
1560
|
+
raise HTTPException(
|
|
1561
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1562
|
+
detail=f"Job {job_id} not found"
|
|
1563
|
+
)
|
|
1564
|
+
|
|
1565
|
+
# Pause Temporal Schedule if it exists
|
|
1566
|
+
if job_obj.temporal_schedule_id:
|
|
1567
|
+
try:
|
|
1568
|
+
await pause_temporal_schedule(job_obj.temporal_schedule_id)
|
|
1569
|
+
logger.info(
|
|
1570
|
+
"temporal_schedule_paused",
|
|
1571
|
+
job_id=job_id,
|
|
1572
|
+
schedule_id=job_obj.temporal_schedule_id,
|
|
1573
|
+
)
|
|
1574
|
+
except Exception as temporal_error:
|
|
1575
|
+
logger.error(
|
|
1576
|
+
"failed_to_pause_temporal_schedule",
|
|
1577
|
+
job_id=job_id,
|
|
1578
|
+
schedule_id=job_obj.temporal_schedule_id,
|
|
1579
|
+
error=str(temporal_error),
|
|
1580
|
+
)
|
|
1581
|
+
raise HTTPException(
|
|
1582
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1583
|
+
detail=f"Failed to pause Temporal schedule: {str(temporal_error)}"
|
|
1584
|
+
)
|
|
1585
|
+
|
|
1586
|
+
# Update job status using SQLAlchemy
|
|
1587
|
+
job_obj.enabled = False
|
|
1588
|
+
job_obj.status = "disabled"
|
|
1589
|
+
job_obj.updated_at = datetime.now(timezone.utc)
|
|
1590
|
+
|
|
1591
|
+
db.commit()
|
|
1592
|
+
db.refresh(job_obj)
|
|
1593
|
+
|
|
1594
|
+
logger.info("job_disabled", job_id=job_id)
|
|
1595
|
+
|
|
1596
|
+
job_data = model_to_dict(job_obj)
|
|
1597
|
+
if job_obj.webhook_url_path:
|
|
1598
|
+
base_url = str(request.base_url).rstrip("/")
|
|
1599
|
+
job_data["webhook_url"] = f"{base_url}{job_obj.webhook_url_path}"
|
|
1600
|
+
|
|
1601
|
+
return JobResponse(**job_data)
|
|
1602
|
+
|
|
1603
|
+
except HTTPException:
|
|
1604
|
+
raise
|
|
1605
|
+
except Exception as e:
|
|
1606
|
+
db.rollback()
|
|
1607
|
+
logger.error(
|
|
1608
|
+
"failed_to_disable_job",
|
|
1609
|
+
error=str(e),
|
|
1610
|
+
job_id=job_id,
|
|
1611
|
+
)
|
|
1612
|
+
raise HTTPException(
|
|
1613
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1614
|
+
detail=f"Failed to disable job: {str(e)}"
|
|
1615
|
+
)
|
|
1616
|
+
|
|
1617
|
+
|
|
1618
|
+
@router.get("/{job_id}/executions", response_model=JobExecutionHistoryResponse)
|
|
1619
|
+
@instrument_endpoint("jobs.get_job_executions")
|
|
1620
|
+
async def get_job_executions(
|
|
1621
|
+
job_id: str,
|
|
1622
|
+
organization: dict = Depends(get_current_organization),
|
|
1623
|
+
db: Session = Depends(get_db),
|
|
1624
|
+
limit: int = 50,
|
|
1625
|
+
offset: int = 0,
|
|
1626
|
+
):
|
|
1627
|
+
"""
|
|
1628
|
+
Get execution history for a job.
|
|
1629
|
+
|
|
1630
|
+
**Query Parameters:**
|
|
1631
|
+
- limit: Maximum number of executions to return (default: 50)
|
|
1632
|
+
- offset: Number of executions to skip (default: 0)
|
|
1633
|
+
"""
|
|
1634
|
+
from sqlalchemy.orm import joinedload
|
|
1635
|
+
from sqlalchemy import func
|
|
1636
|
+
|
|
1637
|
+
organization_id = organization["id"]
|
|
1638
|
+
|
|
1639
|
+
try:
|
|
1640
|
+
# Fetch job executions with joined execution data using SQLAlchemy
|
|
1641
|
+
job_execution_objects = db.query(JobExecution).options(
|
|
1642
|
+
joinedload(JobExecution.execution)
|
|
1643
|
+
).filter(
|
|
1644
|
+
JobExecution.job_id == job_id,
|
|
1645
|
+
JobExecution.organization_id == organization_id
|
|
1646
|
+
).order_by(desc(JobExecution.created_at)).offset(offset).limit(limit).all()
|
|
1647
|
+
|
|
1648
|
+
# Count total executions
|
|
1649
|
+
total_count = db.query(func.count(JobExecution.id)).filter(
|
|
1650
|
+
JobExecution.job_id == job_id,
|
|
1651
|
+
JobExecution.organization_id == organization_id
|
|
1652
|
+
).scalar() or 0
|
|
1653
|
+
|
|
1654
|
+
executions = []
|
|
1655
|
+
for job_exec in job_execution_objects:
|
|
1656
|
+
execution = job_exec.execution
|
|
1657
|
+
execution_data = model_to_dict(execution) if execution else {}
|
|
1658
|
+
executions.append(
|
|
1659
|
+
JobExecutionHistoryItem(
|
|
1660
|
+
execution_id=str(execution.id) if execution else None,
|
|
1661
|
+
trigger_type=job_exec.trigger_type,
|
|
1662
|
+
status=execution_data.get("status"),
|
|
1663
|
+
started_at=execution_data.get("started_at"),
|
|
1664
|
+
completed_at=execution_data.get("completed_at"),
|
|
1665
|
+
duration_ms=job_exec.execution_duration_ms,
|
|
1666
|
+
error_message=execution_data.get("error_message"),
|
|
1667
|
+
trigger_metadata=execution_data.get("trigger_metadata"),
|
|
1668
|
+
)
|
|
1669
|
+
)
|
|
1670
|
+
|
|
1671
|
+
return JobExecutionHistoryResponse(
|
|
1672
|
+
job_id=job_id,
|
|
1673
|
+
total_count=total_count,
|
|
1674
|
+
executions=executions,
|
|
1675
|
+
)
|
|
1676
|
+
|
|
1677
|
+
except Exception as e:
|
|
1678
|
+
logger.error(
|
|
1679
|
+
"failed_to_get_job_executions",
|
|
1680
|
+
error=str(e),
|
|
1681
|
+
job_id=job_id,
|
|
1682
|
+
)
|
|
1683
|
+
raise HTTPException(
|
|
1684
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1685
|
+
detail=f"Failed to get job executions: {str(e)}"
|
|
1686
|
+
)
|
|
1687
|
+
|
|
1688
|
+
|
|
1689
|
+
@router.post("/webhook/{webhook_path}", response_model=JobTriggerResponse)
|
|
1690
|
+
@instrument_endpoint("jobs.trigger_webhook")
|
|
1691
|
+
async def trigger_webhook(
|
|
1692
|
+
webhook_path: str,
|
|
1693
|
+
payload: WebhookPayload,
|
|
1694
|
+
request: Request,
|
|
1695
|
+
x_webhook_signature: Optional[str] = Header(None),
|
|
1696
|
+
db: Session = Depends(get_db),
|
|
1697
|
+
):
|
|
1698
|
+
"""
|
|
1699
|
+
Trigger a job via webhook.
|
|
1700
|
+
|
|
1701
|
+
**Security:**
|
|
1702
|
+
- Requires HMAC signature in X-Webhook-Signature header
|
|
1703
|
+
- Signature format: hex(HMAC-SHA256(secret, request_body))
|
|
1704
|
+
|
|
1705
|
+
**Request Body:**
|
|
1706
|
+
- parameters: Dictionary of parameters to substitute in prompt template
|
|
1707
|
+
- config_override: Optional config overrides for this execution
|
|
1708
|
+
- metadata: Additional metadata for this trigger
|
|
1709
|
+
"""
|
|
1710
|
+
try:
|
|
1711
|
+
# Fetch job by webhook path using SQLAlchemy
|
|
1712
|
+
webhook_url_path = f"/api/v1/jobs/webhook/{webhook_path}"
|
|
1713
|
+
job_obj = db.query(Job).filter(
|
|
1714
|
+
Job.webhook_url_path == webhook_url_path
|
|
1715
|
+
).first()
|
|
1716
|
+
|
|
1717
|
+
if not job_obj:
|
|
1718
|
+
raise HTTPException(
|
|
1719
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1720
|
+
detail="Webhook not found"
|
|
1721
|
+
)
|
|
1722
|
+
|
|
1723
|
+
job = model_to_dict(job_obj)
|
|
1724
|
+
|
|
1725
|
+
# Verify webhook signature
|
|
1726
|
+
if not x_webhook_signature:
|
|
1727
|
+
raise HTTPException(
|
|
1728
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
1729
|
+
detail="Missing X-Webhook-Signature header"
|
|
1730
|
+
)
|
|
1731
|
+
|
|
1732
|
+
# Get raw request body for signature verification
|
|
1733
|
+
body = await request.body()
|
|
1734
|
+
if not verify_webhook_signature(body, x_webhook_signature, job_obj.webhook_secret):
|
|
1735
|
+
raise HTTPException(
|
|
1736
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
1737
|
+
detail="Invalid webhook signature"
|
|
1738
|
+
)
|
|
1739
|
+
|
|
1740
|
+
# Validate job is enabled
|
|
1741
|
+
if not job_obj.enabled:
|
|
1742
|
+
raise HTTPException(
|
|
1743
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1744
|
+
detail="Job is disabled"
|
|
1745
|
+
)
|
|
1746
|
+
|
|
1747
|
+
# Apply config overrides if provided
|
|
1748
|
+
if payload.config_override:
|
|
1749
|
+
job = {**job, "config": {**(job.get("config") or {}), **payload.config_override}}
|
|
1750
|
+
|
|
1751
|
+
# Start the job execution directly (same as UI does)
|
|
1752
|
+
# Include webhook payload in trigger_metadata so agent can access it
|
|
1753
|
+
# Extract user info from metadata if provided, otherwise mark as external
|
|
1754
|
+
webhook_metadata = payload.metadata or {}
|
|
1755
|
+
|
|
1756
|
+
# For webhooks, get a worker token from the organization's environment
|
|
1757
|
+
# This allows webhooks to use org-specific Temporal credentials
|
|
1758
|
+
from control_plane_api.app.models.environment import Environment
|
|
1759
|
+
env = db.query(Environment).filter(
|
|
1760
|
+
Environment.organization_id == job["organization_id"],
|
|
1761
|
+
Environment.status == "ready"
|
|
1762
|
+
).first()
|
|
1763
|
+
|
|
1764
|
+
# Use worker token if available, otherwise empty (will fallback to env vars)
|
|
1765
|
+
webhook_token = env.worker_token if env and env.worker_token else ""
|
|
1766
|
+
|
|
1767
|
+
workflow_id, execution_id = await start_job_execution(
|
|
1768
|
+
job=job,
|
|
1769
|
+
organization_id=job["organization_id"],
|
|
1770
|
+
trigger_type="webhook",
|
|
1771
|
+
trigger_metadata={
|
|
1772
|
+
"webhook_path": webhook_path,
|
|
1773
|
+
"webhook_payload": {
|
|
1774
|
+
"parameters": payload.parameters or {},
|
|
1775
|
+
"config_override": payload.config_override or {},
|
|
1776
|
+
"metadata": webhook_metadata,
|
|
1777
|
+
},
|
|
1778
|
+
"parameters": payload.parameters or {},
|
|
1779
|
+
"metadata": webhook_metadata,
|
|
1780
|
+
"triggered_by": webhook_metadata.get("user_email") or webhook_metadata.get("triggered_by") or "webhook",
|
|
1781
|
+
"user_id": webhook_metadata.get("user_id"),
|
|
1782
|
+
"user_email": webhook_metadata.get("user_email"),
|
|
1783
|
+
"user_name": webhook_metadata.get("user_name"),
|
|
1784
|
+
},
|
|
1785
|
+
db=db,
|
|
1786
|
+
token=webhook_token, # Use org's worker token for authentication
|
|
1787
|
+
parameters=payload.parameters,
|
|
1788
|
+
)
|
|
1789
|
+
|
|
1790
|
+
return JobTriggerResponse(
|
|
1791
|
+
job_id=job["id"],
|
|
1792
|
+
workflow_id=workflow_id,
|
|
1793
|
+
execution_id=execution_id,
|
|
1794
|
+
status="started",
|
|
1795
|
+
message="Job execution started successfully via webhook",
|
|
1796
|
+
)
|
|
1797
|
+
|
|
1798
|
+
except HTTPException:
|
|
1799
|
+
raise
|
|
1800
|
+
except Exception as e:
|
|
1801
|
+
logger.error(
|
|
1802
|
+
"failed_to_trigger_webhook",
|
|
1803
|
+
error=str(e),
|
|
1804
|
+
webhook_path=webhook_path,
|
|
1805
|
+
)
|
|
1806
|
+
raise HTTPException(
|
|
1807
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1808
|
+
detail=f"Failed to trigger webhook: {str(e)}"
|
|
1809
|
+
)
|