kubiya-control-plane-api 0.1.0__py3-none-any.whl → 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kubiya-control-plane-api might be problematic. Click here for more details.
- control_plane_api/README.md +266 -0
- control_plane_api/__init__.py +0 -0
- control_plane_api/__version__.py +1 -0
- control_plane_api/alembic/README +1 -0
- control_plane_api/alembic/env.py +98 -0
- control_plane_api/alembic/script.py.mako +28 -0
- control_plane_api/alembic/versions/1382bec74309_initial_migration_with_all_models.py +251 -0
- control_plane_api/alembic/versions/1f54bc2a37e3_add_analytics_tables.py +162 -0
- control_plane_api/alembic/versions/2e4cb136dc10_rename_toolset_ids_to_skill_ids_in_teams.py +30 -0
- control_plane_api/alembic/versions/31cd69a644ce_add_skill_templates_table.py +28 -0
- control_plane_api/alembic/versions/89e127caa47d_add_jobs_and_job_executions_tables.py +161 -0
- control_plane_api/alembic/versions/add_llm_models_table.py +51 -0
- control_plane_api/alembic/versions/b0e10697f212_add_runtime_column_to_teams_simple.py +42 -0
- control_plane_api/alembic/versions/ce43b24b63bf_add_execution_trigger_source_and_fix_.py +155 -0
- control_plane_api/alembic/versions/d4eaf16e3f8d_rename_toolsets_to_skills.py +84 -0
- control_plane_api/alembic/versions/efa2dc427da1_rename_metadata_to_custom_metadata.py +32 -0
- control_plane_api/alembic/versions/f973b431d1ce_add_workflow_executor_to_skill_types.py +44 -0
- control_plane_api/alembic.ini +148 -0
- control_plane_api/api/index.py +12 -0
- control_plane_api/app/__init__.py +11 -0
- control_plane_api/app/activities/__init__.py +20 -0
- control_plane_api/app/activities/agent_activities.py +379 -0
- control_plane_api/app/activities/team_activities.py +410 -0
- control_plane_api/app/activities/temporal_cloud_activities.py +577 -0
- control_plane_api/app/config/__init__.py +35 -0
- control_plane_api/app/config/api_config.py +354 -0
- control_plane_api/app/config/model_pricing.py +318 -0
- control_plane_api/app/config.py +95 -0
- control_plane_api/app/database.py +135 -0
- control_plane_api/app/exceptions.py +408 -0
- control_plane_api/app/lib/__init__.py +11 -0
- control_plane_api/app/lib/job_executor.py +312 -0
- control_plane_api/app/lib/kubiya_client.py +235 -0
- control_plane_api/app/lib/litellm_pricing.py +166 -0
- control_plane_api/app/lib/planning_tools/__init__.py +22 -0
- control_plane_api/app/lib/planning_tools/agents.py +155 -0
- control_plane_api/app/lib/planning_tools/base.py +189 -0
- control_plane_api/app/lib/planning_tools/environments.py +214 -0
- control_plane_api/app/lib/planning_tools/resources.py +240 -0
- control_plane_api/app/lib/planning_tools/teams.py +198 -0
- control_plane_api/app/lib/policy_enforcer_client.py +939 -0
- control_plane_api/app/lib/redis_client.py +436 -0
- control_plane_api/app/lib/supabase.py +71 -0
- control_plane_api/app/lib/temporal_client.py +138 -0
- control_plane_api/app/lib/validation/__init__.py +20 -0
- control_plane_api/app/lib/validation/runtime_validation.py +287 -0
- control_plane_api/app/main.py +128 -0
- control_plane_api/app/middleware/__init__.py +8 -0
- control_plane_api/app/middleware/auth.py +513 -0
- control_plane_api/app/middleware/exception_handler.py +267 -0
- control_plane_api/app/middleware/rate_limiting.py +384 -0
- control_plane_api/app/middleware/request_id.py +202 -0
- control_plane_api/app/models/__init__.py +27 -0
- control_plane_api/app/models/agent.py +79 -0
- control_plane_api/app/models/analytics.py +206 -0
- control_plane_api/app/models/associations.py +81 -0
- control_plane_api/app/models/environment.py +63 -0
- control_plane_api/app/models/execution.py +93 -0
- control_plane_api/app/models/job.py +179 -0
- control_plane_api/app/models/llm_model.py +75 -0
- control_plane_api/app/models/presence.py +49 -0
- control_plane_api/app/models/project.py +47 -0
- control_plane_api/app/models/session.py +38 -0
- control_plane_api/app/models/team.py +66 -0
- control_plane_api/app/models/workflow.py +55 -0
- control_plane_api/app/policies/README.md +121 -0
- control_plane_api/app/policies/approved_users.rego +62 -0
- control_plane_api/app/policies/business_hours.rego +51 -0
- control_plane_api/app/policies/rate_limiting.rego +100 -0
- control_plane_api/app/policies/tool_restrictions.rego +86 -0
- control_plane_api/app/routers/__init__.py +4 -0
- control_plane_api/app/routers/agents.py +364 -0
- control_plane_api/app/routers/agents_v2.py +1260 -0
- control_plane_api/app/routers/analytics.py +1014 -0
- control_plane_api/app/routers/context_manager.py +562 -0
- control_plane_api/app/routers/environment_context.py +270 -0
- control_plane_api/app/routers/environments.py +715 -0
- control_plane_api/app/routers/execution_environment.py +517 -0
- control_plane_api/app/routers/executions.py +1911 -0
- control_plane_api/app/routers/health.py +92 -0
- control_plane_api/app/routers/health_v2.py +326 -0
- control_plane_api/app/routers/integrations.py +274 -0
- control_plane_api/app/routers/jobs.py +1344 -0
- control_plane_api/app/routers/models.py +82 -0
- control_plane_api/app/routers/models_v2.py +361 -0
- control_plane_api/app/routers/policies.py +639 -0
- control_plane_api/app/routers/presence.py +234 -0
- control_plane_api/app/routers/projects.py +902 -0
- control_plane_api/app/routers/runners.py +379 -0
- control_plane_api/app/routers/runtimes.py +172 -0
- control_plane_api/app/routers/secrets.py +155 -0
- control_plane_api/app/routers/skills.py +1001 -0
- control_plane_api/app/routers/skills_definitions.py +140 -0
- control_plane_api/app/routers/task_planning.py +1256 -0
- control_plane_api/app/routers/task_queues.py +654 -0
- control_plane_api/app/routers/team_context.py +270 -0
- control_plane_api/app/routers/teams.py +1400 -0
- control_plane_api/app/routers/worker_queues.py +1545 -0
- control_plane_api/app/routers/workers.py +935 -0
- control_plane_api/app/routers/workflows.py +204 -0
- control_plane_api/app/runtimes/__init__.py +6 -0
- control_plane_api/app/runtimes/validation.py +344 -0
- control_plane_api/app/schemas/job_schemas.py +295 -0
- control_plane_api/app/services/__init__.py +1 -0
- control_plane_api/app/services/agno_service.py +619 -0
- control_plane_api/app/services/litellm_service.py +190 -0
- control_plane_api/app/services/policy_service.py +525 -0
- control_plane_api/app/services/temporal_cloud_provisioning.py +150 -0
- control_plane_api/app/skills/__init__.py +44 -0
- control_plane_api/app/skills/base.py +229 -0
- control_plane_api/app/skills/business_intelligence.py +189 -0
- control_plane_api/app/skills/data_visualization.py +154 -0
- control_plane_api/app/skills/docker.py +104 -0
- control_plane_api/app/skills/file_generation.py +94 -0
- control_plane_api/app/skills/file_system.py +110 -0
- control_plane_api/app/skills/python.py +92 -0
- control_plane_api/app/skills/registry.py +65 -0
- control_plane_api/app/skills/shell.py +102 -0
- control_plane_api/app/skills/workflow_executor.py +469 -0
- control_plane_api/app/utils/workflow_executor.py +354 -0
- control_plane_api/app/workflows/__init__.py +11 -0
- control_plane_api/app/workflows/agent_execution.py +507 -0
- control_plane_api/app/workflows/agent_execution_with_skills.py +222 -0
- control_plane_api/app/workflows/namespace_provisioning.py +326 -0
- control_plane_api/app/workflows/team_execution.py +399 -0
- control_plane_api/scripts/seed_models.py +239 -0
- control_plane_api/worker/__init__.py +0 -0
- control_plane_api/worker/activities/__init__.py +0 -0
- control_plane_api/worker/activities/agent_activities.py +1241 -0
- control_plane_api/worker/activities/approval_activities.py +234 -0
- control_plane_api/worker/activities/runtime_activities.py +388 -0
- control_plane_api/worker/activities/skill_activities.py +267 -0
- control_plane_api/worker/activities/team_activities.py +1217 -0
- control_plane_api/worker/config/__init__.py +31 -0
- control_plane_api/worker/config/worker_config.py +275 -0
- control_plane_api/worker/control_plane_client.py +529 -0
- control_plane_api/worker/examples/analytics_integration_example.py +362 -0
- control_plane_api/worker/models/__init__.py +1 -0
- control_plane_api/worker/models/inputs.py +89 -0
- control_plane_api/worker/runtimes/__init__.py +31 -0
- control_plane_api/worker/runtimes/base.py +789 -0
- control_plane_api/worker/runtimes/claude_code_runtime.py +1443 -0
- control_plane_api/worker/runtimes/default_runtime.py +617 -0
- control_plane_api/worker/runtimes/factory.py +173 -0
- control_plane_api/worker/runtimes/validation.py +93 -0
- control_plane_api/worker/services/__init__.py +1 -0
- control_plane_api/worker/services/agent_executor.py +422 -0
- control_plane_api/worker/services/agent_executor_v2.py +383 -0
- control_plane_api/worker/services/analytics_collector.py +457 -0
- control_plane_api/worker/services/analytics_service.py +464 -0
- control_plane_api/worker/services/approval_tools.py +310 -0
- control_plane_api/worker/services/approval_tools_agno.py +207 -0
- control_plane_api/worker/services/cancellation_manager.py +177 -0
- control_plane_api/worker/services/data_visualization.py +827 -0
- control_plane_api/worker/services/jira_tools.py +257 -0
- control_plane_api/worker/services/runtime_analytics.py +328 -0
- control_plane_api/worker/services/session_service.py +194 -0
- control_plane_api/worker/services/skill_factory.py +175 -0
- control_plane_api/worker/services/team_executor.py +574 -0
- control_plane_api/worker/services/team_executor_v2.py +465 -0
- control_plane_api/worker/services/workflow_executor_tools.py +1418 -0
- control_plane_api/worker/tests/__init__.py +1 -0
- control_plane_api/worker/tests/e2e/__init__.py +0 -0
- control_plane_api/worker/tests/e2e/test_execution_flow.py +571 -0
- control_plane_api/worker/tests/integration/__init__.py +0 -0
- control_plane_api/worker/tests/integration/test_control_plane_integration.py +308 -0
- control_plane_api/worker/tests/unit/__init__.py +0 -0
- control_plane_api/worker/tests/unit/test_control_plane_client.py +401 -0
- control_plane_api/worker/utils/__init__.py +1 -0
- control_plane_api/worker/utils/chunk_batcher.py +305 -0
- control_plane_api/worker/utils/retry_utils.py +60 -0
- control_plane_api/worker/utils/streaming_utils.py +373 -0
- control_plane_api/worker/worker.py +753 -0
- control_plane_api/worker/workflows/__init__.py +0 -0
- control_plane_api/worker/workflows/agent_execution.py +589 -0
- control_plane_api/worker/workflows/team_execution.py +429 -0
- kubiya_control_plane_api-0.3.4.dist-info/METADATA +229 -0
- kubiya_control_plane_api-0.3.4.dist-info/RECORD +182 -0
- kubiya_control_plane_api-0.3.4.dist-info/entry_points.txt +2 -0
- kubiya_control_plane_api-0.3.4.dist-info/top_level.txt +1 -0
- kubiya_control_plane_api-0.1.0.dist-info/METADATA +0 -66
- kubiya_control_plane_api-0.1.0.dist-info/RECORD +0 -5
- kubiya_control_plane_api-0.1.0.dist-info/top_level.txt +0 -1
- {kubiya_control_plane_api-0.1.0.dist-info/licenses → control_plane_api}/LICENSE +0 -0
- {kubiya_control_plane_api-0.1.0.dist-info → kubiya_control_plane_api-0.3.4.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,1344 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Jobs router for scheduled and webhook-triggered executions.
|
|
3
|
+
|
|
4
|
+
This router handles:
|
|
5
|
+
- CRUD operations for jobs
|
|
6
|
+
- Manual job triggering
|
|
7
|
+
- Webhook URL generation and triggering
|
|
8
|
+
- Cron schedule management with Temporal
|
|
9
|
+
- Job execution history
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from fastapi import APIRouter, Depends, HTTPException, status, Request, Header
|
|
13
|
+
from typing import List, Optional
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
import structlog
|
|
16
|
+
import uuid
|
|
17
|
+
import hmac
|
|
18
|
+
import hashlib
|
|
19
|
+
import secrets
|
|
20
|
+
import asyncio
|
|
21
|
+
|
|
22
|
+
from control_plane_api.app.middleware.auth import get_current_organization
|
|
23
|
+
from control_plane_api.app.lib.supabase import get_supabase
|
|
24
|
+
from control_plane_api.app.lib.temporal_client import get_temporal_client
|
|
25
|
+
from control_plane_api.app.lib.job_executor import select_worker_queue, substitute_prompt_parameters
|
|
26
|
+
from control_plane_api.app.schemas.job_schemas import (
|
|
27
|
+
JobCreate,
|
|
28
|
+
JobUpdate,
|
|
29
|
+
JobResponse,
|
|
30
|
+
JobTriggerRequest,
|
|
31
|
+
JobTriggerResponse,
|
|
32
|
+
JobExecutionHistoryResponse,
|
|
33
|
+
JobExecutionHistoryItem,
|
|
34
|
+
WebhookPayload,
|
|
35
|
+
ExecutionEnvironment,
|
|
36
|
+
)
|
|
37
|
+
from temporalio.client import Schedule, ScheduleActionStartWorkflow, ScheduleSpec, ScheduleIntervalSpec
|
|
38
|
+
from croniter import croniter
|
|
39
|
+
|
|
40
|
+
logger = structlog.get_logger()
|
|
41
|
+
|
|
42
|
+
router = APIRouter()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def generate_webhook_secret() -> str:
|
|
46
|
+
"""Generate a secure random webhook secret"""
|
|
47
|
+
return secrets.token_urlsafe(32)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def generate_webhook_path() -> str:
|
|
51
|
+
"""Generate a unique webhook URL path"""
|
|
52
|
+
return secrets.token_urlsafe(16)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
async def start_job_execution(
|
|
56
|
+
job: dict,
|
|
57
|
+
organization_id: str,
|
|
58
|
+
trigger_type: str,
|
|
59
|
+
trigger_metadata: dict,
|
|
60
|
+
parameters: Optional[dict] = None,
|
|
61
|
+
) -> tuple[str, str]:
|
|
62
|
+
"""
|
|
63
|
+
Start a job execution by directly triggering the appropriate workflow.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Tuple of (workflow_id, execution_id)
|
|
67
|
+
"""
|
|
68
|
+
from control_plane_api.app.lib.supabase import get_supabase
|
|
69
|
+
|
|
70
|
+
supabase = get_supabase()
|
|
71
|
+
temporal_client = await get_temporal_client()
|
|
72
|
+
|
|
73
|
+
planning_mode = job.get("planning_mode")
|
|
74
|
+
entity_type = job.get("entity_type")
|
|
75
|
+
entity_id = job.get("entity_id")
|
|
76
|
+
|
|
77
|
+
# Get the appropriate worker queue based on job configuration
|
|
78
|
+
worker_queue_name, _ = await select_worker_queue(
|
|
79
|
+
organization_id=organization_id,
|
|
80
|
+
executor_type=job.get("executor_type", "auto"),
|
|
81
|
+
worker_queue_name=job.get("worker_queue_name"),
|
|
82
|
+
environment_name=job.get("environment_name"),
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if not worker_queue_name:
|
|
86
|
+
raise ValueError("No workers are currently running for your organization. Please start a worker to execute jobs.")
|
|
87
|
+
|
|
88
|
+
# Extract runner_name from worker_queue_name (format: "org_id.runner_name")
|
|
89
|
+
runner_name = worker_queue_name.split(".")[-1] if "." in worker_queue_name else worker_queue_name
|
|
90
|
+
|
|
91
|
+
# Get entity name for display
|
|
92
|
+
entity_name = job.get("entity_name")
|
|
93
|
+
if not entity_name and entity_id and entity_type:
|
|
94
|
+
# Try to get entity name from database
|
|
95
|
+
entity_table = f"{entity_type}s" # agent -> agents, team -> teams
|
|
96
|
+
try:
|
|
97
|
+
entity_result = supabase.table(entity_table).select("name").eq("id", entity_id).single().execute()
|
|
98
|
+
if entity_result.data:
|
|
99
|
+
entity_name = entity_result.data.get("name")
|
|
100
|
+
except Exception as e:
|
|
101
|
+
logger.warning("failed_to_get_entity_name", entity_type=entity_type, entity_id=entity_id, error=str(e))
|
|
102
|
+
|
|
103
|
+
# Substitute parameters in prompt template
|
|
104
|
+
prompt = job.get("prompt_template", "")
|
|
105
|
+
if parameters:
|
|
106
|
+
prompt = substitute_prompt_parameters(prompt, parameters)
|
|
107
|
+
|
|
108
|
+
# Generate execution ID
|
|
109
|
+
execution_id = str(uuid.uuid4())
|
|
110
|
+
|
|
111
|
+
# Determine execution_type based on entity_type
|
|
112
|
+
execution_type_value = entity_type.upper() if entity_type else "AGENT"
|
|
113
|
+
|
|
114
|
+
# Map trigger_type to trigger_source
|
|
115
|
+
trigger_source_map = {
|
|
116
|
+
"manual": "job_manual",
|
|
117
|
+
"cron": "job_cron",
|
|
118
|
+
"webhook": "job_webhook",
|
|
119
|
+
}
|
|
120
|
+
trigger_source = trigger_source_map.get(trigger_type, "job_manual")
|
|
121
|
+
|
|
122
|
+
# Create placeholder execution record so UI can immediately query it
|
|
123
|
+
# The workflow will update this record with actual execution data
|
|
124
|
+
from datetime import datetime, timezone
|
|
125
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
126
|
+
|
|
127
|
+
execution_record = {
|
|
128
|
+
"id": execution_id,
|
|
129
|
+
"organization_id": organization_id,
|
|
130
|
+
"execution_type": execution_type_value,
|
|
131
|
+
"entity_id": entity_id,
|
|
132
|
+
"entity_name": entity_name,
|
|
133
|
+
"runner_name": runner_name,
|
|
134
|
+
"trigger_source": trigger_source,
|
|
135
|
+
"trigger_metadata": {
|
|
136
|
+
"job_id": job["id"],
|
|
137
|
+
"job_name": job.get("name"),
|
|
138
|
+
"trigger_type": trigger_type,
|
|
139
|
+
**trigger_metadata,
|
|
140
|
+
},
|
|
141
|
+
# User attribution from trigger metadata
|
|
142
|
+
"user_id": trigger_metadata.get("user_id"),
|
|
143
|
+
"user_email": trigger_metadata.get("triggered_by") or trigger_metadata.get("user_email"),
|
|
144
|
+
"user_name": trigger_metadata.get("user_name"),
|
|
145
|
+
"user_avatar": trigger_metadata.get("user_avatar"),
|
|
146
|
+
"status": "pending",
|
|
147
|
+
"prompt": prompt if parameters else job.get("prompt_template", ""),
|
|
148
|
+
"created_at": now,
|
|
149
|
+
"updated_at": now,
|
|
150
|
+
"execution_metadata": {
|
|
151
|
+
"job_id": job["id"],
|
|
152
|
+
"job_name": job.get("name"),
|
|
153
|
+
"trigger_type": trigger_type,
|
|
154
|
+
**trigger_metadata,
|
|
155
|
+
},
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
# Insert the placeholder record
|
|
159
|
+
supabase.table("executions").insert(execution_record).execute()
|
|
160
|
+
|
|
161
|
+
# Create job_executions junction record to track this execution was triggered by a job
|
|
162
|
+
job_execution_record = {
|
|
163
|
+
"id": str(uuid.uuid4()),
|
|
164
|
+
"job_id": job["id"],
|
|
165
|
+
"execution_id": execution_id,
|
|
166
|
+
"organization_id": organization_id,
|
|
167
|
+
"trigger_type": trigger_type,
|
|
168
|
+
"trigger_metadata": trigger_metadata,
|
|
169
|
+
"execution_status": "pending",
|
|
170
|
+
"created_at": now,
|
|
171
|
+
}
|
|
172
|
+
supabase.table("job_executions").insert(job_execution_record).execute()
|
|
173
|
+
|
|
174
|
+
logger.info(
|
|
175
|
+
"created_placeholder_execution",
|
|
176
|
+
execution_id=execution_id,
|
|
177
|
+
job_id=job["id"],
|
|
178
|
+
trigger_type=trigger_type,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
# Prepare workflow input based on entity type
|
|
182
|
+
workflow_name = None
|
|
183
|
+
workflow_input = None
|
|
184
|
+
|
|
185
|
+
if planning_mode == "predefined_agent" and entity_type == "agent":
|
|
186
|
+
# Start AgentExecutionWorkflow
|
|
187
|
+
workflow_name = "AgentExecutionWorkflow"
|
|
188
|
+
|
|
189
|
+
# Get agent details
|
|
190
|
+
agent_result = supabase.table("agents").select("*").eq("id", entity_id).single().execute()
|
|
191
|
+
if not agent_result.data:
|
|
192
|
+
raise ValueError(f"Agent {entity_id} not found")
|
|
193
|
+
|
|
194
|
+
agent = agent_result.data
|
|
195
|
+
agent_config = agent.get("configuration", {})
|
|
196
|
+
|
|
197
|
+
workflow_input = {
|
|
198
|
+
"execution_id": execution_id,
|
|
199
|
+
"agent_id": entity_id,
|
|
200
|
+
"organization_id": organization_id,
|
|
201
|
+
"prompt": prompt,
|
|
202
|
+
"system_prompt": job.get("system_prompt") or agent_config.get("system_prompt"),
|
|
203
|
+
"model_id": agent.get("model_id"),
|
|
204
|
+
"model_config": agent.get("model_config", {}),
|
|
205
|
+
"agent_config": {**agent_config, **(job.get("config", {}))},
|
|
206
|
+
"mcp_servers": agent_config.get("mcpServers", {}),
|
|
207
|
+
"user_metadata": {
|
|
208
|
+
"job_id": job["id"],
|
|
209
|
+
"job_name": job.get("name"),
|
|
210
|
+
"trigger_type": trigger_type,
|
|
211
|
+
**trigger_metadata,
|
|
212
|
+
},
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
elif planning_mode == "predefined_team" and entity_type == "team":
|
|
216
|
+
# Start TeamExecutionWorkflow
|
|
217
|
+
workflow_name = "TeamExecutionWorkflow"
|
|
218
|
+
|
|
219
|
+
workflow_input = {
|
|
220
|
+
"execution_id": execution_id,
|
|
221
|
+
"team_id": entity_id,
|
|
222
|
+
"organization_id": organization_id,
|
|
223
|
+
"prompt": prompt,
|
|
224
|
+
"system_prompt": job.get("system_prompt"),
|
|
225
|
+
"config": job.get("config", {}),
|
|
226
|
+
"user_metadata": {
|
|
227
|
+
"job_id": job["id"],
|
|
228
|
+
"job_name": job.get("name"),
|
|
229
|
+
"trigger_type": trigger_type,
|
|
230
|
+
**trigger_metadata,
|
|
231
|
+
},
|
|
232
|
+
}
|
|
233
|
+
else:
|
|
234
|
+
raise ValueError(f"Unsupported planning_mode '{planning_mode}' or entity_type '{entity_type}'")
|
|
235
|
+
|
|
236
|
+
# Start the workflow
|
|
237
|
+
# Use standard workflow ID format for consistency with direct agent/team executions
|
|
238
|
+
if entity_type == "agent":
|
|
239
|
+
workflow_id = f"agent-execution-{execution_id}"
|
|
240
|
+
elif entity_type == "team":
|
|
241
|
+
workflow_id = f"team-execution-{execution_id}"
|
|
242
|
+
else:
|
|
243
|
+
# Fallback for other entity types
|
|
244
|
+
workflow_id = f"job-{job['id']}-{trigger_type}-{uuid.uuid4()}"
|
|
245
|
+
|
|
246
|
+
await temporal_client.start_workflow(
|
|
247
|
+
workflow_name,
|
|
248
|
+
workflow_input,
|
|
249
|
+
id=workflow_id,
|
|
250
|
+
task_queue=worker_queue_name,
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
logger.info(
|
|
254
|
+
"job_execution_started",
|
|
255
|
+
job_id=job["id"],
|
|
256
|
+
workflow_id=workflow_id,
|
|
257
|
+
execution_id=execution_id,
|
|
258
|
+
trigger_type=trigger_type,
|
|
259
|
+
workflow_name=workflow_name,
|
|
260
|
+
worker_queue=worker_queue_name,
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
return workflow_id, execution_id
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def verify_webhook_signature(payload: bytes, signature: str, secret: str) -> bool:
|
|
267
|
+
"""
|
|
268
|
+
Verify HMAC signature for webhook payload.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
payload: Raw request body bytes
|
|
272
|
+
signature: Signature from X-Webhook-Signature header
|
|
273
|
+
secret: Webhook secret from database
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
True if signature is valid
|
|
277
|
+
"""
|
|
278
|
+
expected_signature = hmac.new(
|
|
279
|
+
secret.encode(),
|
|
280
|
+
payload,
|
|
281
|
+
hashlib.sha256
|
|
282
|
+
).hexdigest()
|
|
283
|
+
return hmac.compare_digest(signature, expected_signature)
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
async def create_temporal_schedule(
|
|
287
|
+
job_id: str,
|
|
288
|
+
organization_id: str,
|
|
289
|
+
job_data: dict,
|
|
290
|
+
cron_schedule: str,
|
|
291
|
+
cron_timezone: str,
|
|
292
|
+
) -> str:
|
|
293
|
+
"""
|
|
294
|
+
Create Temporal Schedule for cron-based job.
|
|
295
|
+
|
|
296
|
+
The schedule directly triggers AgentExecutionWorkflow or TeamExecutionWorkflow
|
|
297
|
+
based on the job's planning_mode and entity configuration.
|
|
298
|
+
|
|
299
|
+
Args:
|
|
300
|
+
job_id: Job ID
|
|
301
|
+
organization_id: Organization ID
|
|
302
|
+
job_data: Complete job data including entity info, prompt, config
|
|
303
|
+
cron_schedule: Cron expression
|
|
304
|
+
cron_timezone: Timezone for schedule
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
Temporal Schedule ID
|
|
308
|
+
"""
|
|
309
|
+
from control_plane_api.app.lib.supabase import get_supabase
|
|
310
|
+
|
|
311
|
+
client = await get_temporal_client()
|
|
312
|
+
supabase = get_supabase()
|
|
313
|
+
schedule_id = f"job-{job_id}"
|
|
314
|
+
|
|
315
|
+
try:
|
|
316
|
+
# Determine execution type from planning_mode
|
|
317
|
+
planning_mode = job_data.get("planning_mode")
|
|
318
|
+
entity_type = job_data.get("entity_type")
|
|
319
|
+
entity_id = job_data.get("entity_id")
|
|
320
|
+
|
|
321
|
+
# Get the appropriate worker queue based on job configuration
|
|
322
|
+
worker_queue_name, _ = await select_worker_queue(
|
|
323
|
+
organization_id=organization_id,
|
|
324
|
+
executor_type=job_data.get("executor_type", "auto"),
|
|
325
|
+
worker_queue_name=job_data.get("worker_queue_name"),
|
|
326
|
+
environment_name=job_data.get("environment_name"),
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
if not worker_queue_name:
|
|
330
|
+
raise ValueError(
|
|
331
|
+
f"No workers are currently running. Please start a worker before creating a cron job."
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
logger.info(
|
|
335
|
+
"resolved_worker_queue_for_cron_job",
|
|
336
|
+
job_id=job_id,
|
|
337
|
+
worker_queue=worker_queue_name,
|
|
338
|
+
planning_mode=planning_mode,
|
|
339
|
+
entity_type=entity_type,
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
# Prepare workflow input based on entity type
|
|
343
|
+
workflow_name = None
|
|
344
|
+
workflow_input = None
|
|
345
|
+
|
|
346
|
+
if planning_mode == "predefined_agent" and entity_type == "agent":
|
|
347
|
+
# Schedule AgentExecutionWorkflow
|
|
348
|
+
workflow_name = "AgentExecutionWorkflow"
|
|
349
|
+
|
|
350
|
+
# Get agent details
|
|
351
|
+
agent_result = supabase.table("agents").select("*").eq("id", entity_id).single().execute()
|
|
352
|
+
if not agent_result.data:
|
|
353
|
+
raise ValueError(f"Agent {entity_id} not found")
|
|
354
|
+
|
|
355
|
+
agent = agent_result.data
|
|
356
|
+
agent_config = agent.get("configuration", {})
|
|
357
|
+
|
|
358
|
+
workflow_input = {
|
|
359
|
+
"execution_id": None, # Will be generated by workflow
|
|
360
|
+
"agent_id": entity_id,
|
|
361
|
+
"organization_id": organization_id,
|
|
362
|
+
"prompt": job_data.get("prompt_template", ""),
|
|
363
|
+
"system_prompt": job_data.get("system_prompt") or agent_config.get("system_prompt"),
|
|
364
|
+
"model_id": agent.get("model_id"),
|
|
365
|
+
"model_config": agent.get("model_config", {}),
|
|
366
|
+
"agent_config": {**agent_config, **(job_data.get("config", {}))},
|
|
367
|
+
"mcp_servers": agent_config.get("mcpServers", {}),
|
|
368
|
+
"user_metadata": {
|
|
369
|
+
"job_id": job_id,
|
|
370
|
+
"job_name": job_data.get("name"),
|
|
371
|
+
"trigger_type": "cron",
|
|
372
|
+
},
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
elif planning_mode == "predefined_team" and entity_type == "team":
|
|
376
|
+
# Schedule TeamExecutionWorkflow
|
|
377
|
+
workflow_name = "TeamExecutionWorkflow"
|
|
378
|
+
|
|
379
|
+
workflow_input = {
|
|
380
|
+
"execution_id": None, # Will be generated by workflow
|
|
381
|
+
"team_id": entity_id,
|
|
382
|
+
"organization_id": organization_id,
|
|
383
|
+
"prompt": job_data.get("prompt_template", ""),
|
|
384
|
+
"system_prompt": job_data.get("system_prompt"),
|
|
385
|
+
"config": job_data.get("config", {}),
|
|
386
|
+
"user_metadata": {
|
|
387
|
+
"job_id": job_id,
|
|
388
|
+
"job_name": job_data.get("name"),
|
|
389
|
+
"trigger_type": "cron",
|
|
390
|
+
},
|
|
391
|
+
}
|
|
392
|
+
else:
|
|
393
|
+
raise ValueError(f"Unsupported planning_mode '{planning_mode}' or entity_type '{entity_type}' for cron jobs")
|
|
394
|
+
|
|
395
|
+
# Create schedule action
|
|
396
|
+
action = ScheduleActionStartWorkflow(
|
|
397
|
+
workflow_name,
|
|
398
|
+
workflow_input,
|
|
399
|
+
id=f"job-{job_id}-{{{{SCHEDULE_ID}}}}",
|
|
400
|
+
task_queue=worker_queue_name,
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
# Parse cron expression for schedule spec
|
|
404
|
+
# Temporal uses cron format: second minute hour day month day_of_week
|
|
405
|
+
# Standard cron is: minute hour day month day_of_week
|
|
406
|
+
# We need to add "0" for seconds
|
|
407
|
+
temporal_cron = f"0 {cron_schedule}"
|
|
408
|
+
|
|
409
|
+
schedule_spec = ScheduleSpec(
|
|
410
|
+
cron_expressions=[temporal_cron],
|
|
411
|
+
time_zone_name=cron_timezone,
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
# Create schedule
|
|
415
|
+
await client.create_schedule(
|
|
416
|
+
schedule_id,
|
|
417
|
+
Schedule(
|
|
418
|
+
action=action,
|
|
419
|
+
spec=schedule_spec,
|
|
420
|
+
),
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
logger.info(
|
|
424
|
+
"temporal_schedule_created",
|
|
425
|
+
schedule_id=schedule_id,
|
|
426
|
+
job_id=job_id,
|
|
427
|
+
cron_schedule=cron_schedule,
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
return schedule_id
|
|
431
|
+
|
|
432
|
+
except Exception as e:
|
|
433
|
+
logger.error(
|
|
434
|
+
"failed_to_create_temporal_schedule",
|
|
435
|
+
error=str(e),
|
|
436
|
+
job_id=job_id,
|
|
437
|
+
cron_schedule=cron_schedule,
|
|
438
|
+
)
|
|
439
|
+
raise HTTPException(
|
|
440
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
441
|
+
detail=f"Failed to create Temporal schedule: {str(e)}"
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
async def delete_temporal_schedule(schedule_id: str) -> None:
|
|
446
|
+
"""Delete Temporal Schedule"""
|
|
447
|
+
client = await get_temporal_client()
|
|
448
|
+
|
|
449
|
+
try:
|
|
450
|
+
handle = client.get_schedule_handle(schedule_id)
|
|
451
|
+
await handle.delete()
|
|
452
|
+
|
|
453
|
+
logger.info("temporal_schedule_deleted", schedule_id=schedule_id)
|
|
454
|
+
|
|
455
|
+
except Exception as e:
|
|
456
|
+
logger.error(
|
|
457
|
+
"failed_to_delete_temporal_schedule",
|
|
458
|
+
error=str(e),
|
|
459
|
+
schedule_id=schedule_id,
|
|
460
|
+
)
|
|
461
|
+
# Don't raise - schedule might not exist
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
async def pause_temporal_schedule(schedule_id: str) -> None:
|
|
465
|
+
"""Pause Temporal Schedule"""
|
|
466
|
+
client = await get_temporal_client()
|
|
467
|
+
|
|
468
|
+
try:
|
|
469
|
+
handle = client.get_schedule_handle(schedule_id)
|
|
470
|
+
await handle.pause()
|
|
471
|
+
|
|
472
|
+
logger.info("temporal_schedule_paused", schedule_id=schedule_id)
|
|
473
|
+
|
|
474
|
+
except Exception as e:
|
|
475
|
+
logger.error(
|
|
476
|
+
"failed_to_pause_temporal_schedule",
|
|
477
|
+
error=str(e),
|
|
478
|
+
schedule_id=schedule_id,
|
|
479
|
+
)
|
|
480
|
+
raise
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
async def unpause_temporal_schedule(schedule_id: str) -> None:
|
|
484
|
+
"""Unpause Temporal Schedule"""
|
|
485
|
+
client = await get_temporal_client()
|
|
486
|
+
|
|
487
|
+
try:
|
|
488
|
+
handle = client.get_schedule_handle(schedule_id)
|
|
489
|
+
await handle.unpause()
|
|
490
|
+
|
|
491
|
+
logger.info("temporal_schedule_unpaused", schedule_id=schedule_id)
|
|
492
|
+
|
|
493
|
+
except Exception as e:
|
|
494
|
+
logger.error(
|
|
495
|
+
"failed_to_unpause_temporal_schedule",
|
|
496
|
+
error=str(e),
|
|
497
|
+
schedule_id=schedule_id,
|
|
498
|
+
)
|
|
499
|
+
raise
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
@router.post("", response_model=JobResponse, status_code=status.HTTP_201_CREATED)
|
|
503
|
+
async def create_job(
|
|
504
|
+
job_data: JobCreate,
|
|
505
|
+
request: Request,
|
|
506
|
+
organization: dict = Depends(get_current_organization),
|
|
507
|
+
):
|
|
508
|
+
"""
|
|
509
|
+
Create a new job.
|
|
510
|
+
|
|
511
|
+
Jobs can be triggered via:
|
|
512
|
+
- Cron schedule (requires cron_schedule parameter)
|
|
513
|
+
- Webhook (generates unique webhook URL)
|
|
514
|
+
- Manual API trigger
|
|
515
|
+
|
|
516
|
+
**Request Body:**
|
|
517
|
+
- name: Job name
|
|
518
|
+
- trigger_type: "cron", "webhook", or "manual"
|
|
519
|
+
- cron_schedule: Cron expression (required for cron trigger)
|
|
520
|
+
- planning_mode: "on_the_fly", "predefined_agent", "predefined_team", or "predefined_workflow"
|
|
521
|
+
- entity_id: Entity ID (required for predefined modes)
|
|
522
|
+
- prompt_template: Prompt template with {{variable}} placeholders
|
|
523
|
+
- executor_type: "auto", "specific_queue", or "environment"
|
|
524
|
+
"""
|
|
525
|
+
client = get_supabase()
|
|
526
|
+
organization_id = organization["id"]
|
|
527
|
+
|
|
528
|
+
logger.info(
|
|
529
|
+
"creating_job",
|
|
530
|
+
organization_id=organization_id,
|
|
531
|
+
name=job_data.name,
|
|
532
|
+
trigger_type=job_data.trigger_type,
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
try:
|
|
536
|
+
job_id = str(uuid.uuid4())
|
|
537
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
538
|
+
|
|
539
|
+
# Generate webhook URL if trigger_type is webhook
|
|
540
|
+
webhook_url_path = None
|
|
541
|
+
webhook_secret = None
|
|
542
|
+
if job_data.trigger_type == "webhook":
|
|
543
|
+
webhook_url_path = f"/api/v1/jobs/webhook/{generate_webhook_path()}"
|
|
544
|
+
webhook_secret = generate_webhook_secret()
|
|
545
|
+
|
|
546
|
+
# Prepare job data
|
|
547
|
+
job_record = {
|
|
548
|
+
"id": job_id,
|
|
549
|
+
"organization_id": organization_id,
|
|
550
|
+
"name": job_data.name,
|
|
551
|
+
"description": job_data.description,
|
|
552
|
+
"enabled": job_data.enabled,
|
|
553
|
+
"status": "active" if job_data.enabled else "disabled",
|
|
554
|
+
"trigger_type": job_data.trigger_type,
|
|
555
|
+
"cron_schedule": job_data.cron_schedule,
|
|
556
|
+
"cron_timezone": job_data.cron_timezone or "UTC",
|
|
557
|
+
"webhook_url_path": webhook_url_path,
|
|
558
|
+
"webhook_secret": webhook_secret,
|
|
559
|
+
"temporal_schedule_id": None,
|
|
560
|
+
"planning_mode": job_data.planning_mode,
|
|
561
|
+
"entity_type": job_data.entity_type,
|
|
562
|
+
"entity_id": job_data.entity_id,
|
|
563
|
+
"prompt_template": job_data.prompt_template,
|
|
564
|
+
"system_prompt": job_data.system_prompt,
|
|
565
|
+
"executor_type": job_data.executor_type,
|
|
566
|
+
"worker_queue_name": job_data.worker_queue_name,
|
|
567
|
+
"environment_name": job_data.environment_name,
|
|
568
|
+
"config": job_data.config,
|
|
569
|
+
"execution_environment": job_data.execution_environment.dict() if job_data.execution_environment else {},
|
|
570
|
+
"total_executions": 0,
|
|
571
|
+
"successful_executions": 0,
|
|
572
|
+
"failed_executions": 0,
|
|
573
|
+
"execution_history": [],
|
|
574
|
+
"last_execution_id": None,
|
|
575
|
+
"last_execution_at": None,
|
|
576
|
+
"next_execution_at": None,
|
|
577
|
+
"last_triggered_at": None,
|
|
578
|
+
"created_by": organization.get("user_id"),
|
|
579
|
+
"updated_by": None,
|
|
580
|
+
"created_at": now,
|
|
581
|
+
"updated_at": now,
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
# If entity_id is provided, fetch entity name
|
|
585
|
+
if job_data.entity_id and job_data.entity_type:
|
|
586
|
+
table_name = f"{job_data.entity_type}s"
|
|
587
|
+
entity_result = (
|
|
588
|
+
client.table(table_name)
|
|
589
|
+
.select("name")
|
|
590
|
+
.eq("id", job_data.entity_id)
|
|
591
|
+
.eq("organization_id", organization_id)
|
|
592
|
+
.execute()
|
|
593
|
+
)
|
|
594
|
+
if entity_result.data:
|
|
595
|
+
job_record["entity_name"] = entity_result.data[0]["name"]
|
|
596
|
+
|
|
597
|
+
# Create Temporal Schedule for cron jobs
|
|
598
|
+
if job_data.trigger_type == "cron" and job_data.enabled:
|
|
599
|
+
temporal_schedule_id = await create_temporal_schedule(
|
|
600
|
+
job_id=job_id,
|
|
601
|
+
organization_id=organization_id,
|
|
602
|
+
job_data=job_record,
|
|
603
|
+
cron_schedule=job_data.cron_schedule,
|
|
604
|
+
cron_timezone=job_data.cron_timezone or "UTC",
|
|
605
|
+
)
|
|
606
|
+
job_record["temporal_schedule_id"] = temporal_schedule_id
|
|
607
|
+
|
|
608
|
+
# Calculate next execution time
|
|
609
|
+
cron_iter = croniter(job_data.cron_schedule, datetime.now(timezone.utc))
|
|
610
|
+
next_execution = cron_iter.get_next(datetime)
|
|
611
|
+
job_record["next_execution_at"] = next_execution.isoformat()
|
|
612
|
+
|
|
613
|
+
# Insert job into database
|
|
614
|
+
result = client.table("jobs").insert(job_record).execute()
|
|
615
|
+
|
|
616
|
+
logger.info(
|
|
617
|
+
"job_created",
|
|
618
|
+
job_id=job_id,
|
|
619
|
+
name=job_data.name,
|
|
620
|
+
trigger_type=job_data.trigger_type,
|
|
621
|
+
)
|
|
622
|
+
|
|
623
|
+
# Build response
|
|
624
|
+
job = result.data[0]
|
|
625
|
+
response_data = {**job}
|
|
626
|
+
|
|
627
|
+
# Add full webhook URL to response
|
|
628
|
+
if webhook_url_path:
|
|
629
|
+
response_data["webhook_url"] = f"{str(request.base_url).rstrip('')}{webhook_url_path}"
|
|
630
|
+
|
|
631
|
+
return JobResponse(**response_data)
|
|
632
|
+
|
|
633
|
+
except Exception as e:
|
|
634
|
+
logger.error(
|
|
635
|
+
"failed_to_create_job",
|
|
636
|
+
error=str(e),
|
|
637
|
+
organization_id=organization_id,
|
|
638
|
+
)
|
|
639
|
+
raise HTTPException(
|
|
640
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
641
|
+
detail=f"Failed to create job: {str(e)}"
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
@router.get("", response_model=List[JobResponse])
|
|
646
|
+
async def list_jobs(
|
|
647
|
+
request: Request,
|
|
648
|
+
organization: dict = Depends(get_current_organization),
|
|
649
|
+
enabled: Optional[bool] = None,
|
|
650
|
+
trigger_type: Optional[str] = None,
|
|
651
|
+
):
|
|
652
|
+
"""
|
|
653
|
+
List all jobs for the organization.
|
|
654
|
+
|
|
655
|
+
**Query Parameters:**
|
|
656
|
+
- enabled: Filter by enabled status (true/false)
|
|
657
|
+
- trigger_type: Filter by trigger type ("cron", "webhook", "manual")
|
|
658
|
+
"""
|
|
659
|
+
client = get_supabase()
|
|
660
|
+
organization_id = organization["id"]
|
|
661
|
+
|
|
662
|
+
try:
|
|
663
|
+
query = client.table("jobs").select("*").eq("organization_id", organization_id)
|
|
664
|
+
|
|
665
|
+
if enabled is not None:
|
|
666
|
+
query = query.eq("enabled", enabled)
|
|
667
|
+
|
|
668
|
+
if trigger_type:
|
|
669
|
+
query = query.eq("trigger_type", trigger_type)
|
|
670
|
+
|
|
671
|
+
result = query.order("created_at", desc=True).execute()
|
|
672
|
+
|
|
673
|
+
# Build responses with full webhook URLs
|
|
674
|
+
base_url = str(request.base_url).rstrip("/")
|
|
675
|
+
jobs = []
|
|
676
|
+
for job in result.data:
|
|
677
|
+
job_data = {**job}
|
|
678
|
+
if job.get("webhook_url_path"):
|
|
679
|
+
job_data["webhook_url"] = f"{base_url}{job['webhook_url_path']}"
|
|
680
|
+
jobs.append(JobResponse(**job_data))
|
|
681
|
+
|
|
682
|
+
return jobs
|
|
683
|
+
|
|
684
|
+
except Exception as e:
|
|
685
|
+
logger.error(
|
|
686
|
+
"failed_to_list_jobs",
|
|
687
|
+
error=str(e),
|
|
688
|
+
organization_id=organization_id,
|
|
689
|
+
)
|
|
690
|
+
raise HTTPException(
|
|
691
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
692
|
+
detail=f"Failed to list jobs: {str(e)}"
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
|
|
696
|
+
@router.get("/{job_id}", response_model=JobResponse)
|
|
697
|
+
async def get_job(
|
|
698
|
+
job_id: str,
|
|
699
|
+
request: Request,
|
|
700
|
+
organization: dict = Depends(get_current_organization),
|
|
701
|
+
):
|
|
702
|
+
"""Get job details by ID"""
|
|
703
|
+
client = get_supabase()
|
|
704
|
+
organization_id = organization["id"]
|
|
705
|
+
|
|
706
|
+
try:
|
|
707
|
+
result = (
|
|
708
|
+
client.table("jobs")
|
|
709
|
+
.select("*")
|
|
710
|
+
.eq("id", job_id)
|
|
711
|
+
.eq("organization_id", organization_id)
|
|
712
|
+
.execute()
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
if not result.data:
|
|
716
|
+
raise HTTPException(
|
|
717
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
718
|
+
detail=f"Job {job_id} not found"
|
|
719
|
+
)
|
|
720
|
+
|
|
721
|
+
job = result.data[0]
|
|
722
|
+
job_data = {**job}
|
|
723
|
+
|
|
724
|
+
# Add full webhook URL
|
|
725
|
+
if job.get("webhook_url_path"):
|
|
726
|
+
base_url = str(request.base_url).rstrip("/")
|
|
727
|
+
job_data["webhook_url"] = f"{base_url}{job['webhook_url_path']}"
|
|
728
|
+
|
|
729
|
+
return JobResponse(**job_data)
|
|
730
|
+
|
|
731
|
+
except HTTPException:
|
|
732
|
+
raise
|
|
733
|
+
except Exception as e:
|
|
734
|
+
logger.error(
|
|
735
|
+
"failed_to_get_job",
|
|
736
|
+
error=str(e),
|
|
737
|
+
job_id=job_id,
|
|
738
|
+
)
|
|
739
|
+
raise HTTPException(
|
|
740
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
741
|
+
detail=f"Failed to get job: {str(e)}"
|
|
742
|
+
)
|
|
743
|
+
|
|
744
|
+
|
|
745
|
+
@router.patch("/{job_id}", response_model=JobResponse)
|
|
746
|
+
async def update_job(
|
|
747
|
+
job_id: str,
|
|
748
|
+
job_data: JobUpdate,
|
|
749
|
+
request: Request,
|
|
750
|
+
organization: dict = Depends(get_current_organization),
|
|
751
|
+
):
|
|
752
|
+
"""
|
|
753
|
+
Update job configuration.
|
|
754
|
+
|
|
755
|
+
**Note:** Updating cron_schedule will recreate the Temporal Schedule.
|
|
756
|
+
"""
|
|
757
|
+
client = get_supabase()
|
|
758
|
+
organization_id = organization["id"]
|
|
759
|
+
|
|
760
|
+
try:
|
|
761
|
+
# Fetch existing job
|
|
762
|
+
result = (
|
|
763
|
+
client.table("jobs")
|
|
764
|
+
.select("*")
|
|
765
|
+
.eq("id", job_id)
|
|
766
|
+
.eq("organization_id", organization_id)
|
|
767
|
+
.execute()
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
if not result.data:
|
|
771
|
+
raise HTTPException(
|
|
772
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
773
|
+
detail=f"Job {job_id} not found"
|
|
774
|
+
)
|
|
775
|
+
|
|
776
|
+
existing_job = result.data[0]
|
|
777
|
+
|
|
778
|
+
# Build update data
|
|
779
|
+
update_data = {}
|
|
780
|
+
for field, value in job_data.dict(exclude_unset=True).items():
|
|
781
|
+
if value is not None:
|
|
782
|
+
if field == "execution_environment" and isinstance(value, ExecutionEnvironment):
|
|
783
|
+
update_data[field] = value.dict()
|
|
784
|
+
else:
|
|
785
|
+
update_data[field] = value
|
|
786
|
+
|
|
787
|
+
if not update_data:
|
|
788
|
+
raise HTTPException(
|
|
789
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
790
|
+
detail="No fields to update"
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
update_data["updated_by"] = organization.get("user_id")
|
|
794
|
+
update_data["updated_at"] = datetime.now(timezone.utc).isoformat()
|
|
795
|
+
|
|
796
|
+
# If entity_id is being updated, fetch entity name
|
|
797
|
+
if "entity_id" in update_data and "entity_type" in update_data:
|
|
798
|
+
entity_type = update_data.get("entity_type", existing_job["entity_type"])
|
|
799
|
+
entity_id = update_data["entity_id"]
|
|
800
|
+
table_name = f"{entity_type}s"
|
|
801
|
+
entity_result = (
|
|
802
|
+
client.table(table_name)
|
|
803
|
+
.select("name")
|
|
804
|
+
.eq("id", entity_id)
|
|
805
|
+
.eq("organization_id", organization_id)
|
|
806
|
+
.execute()
|
|
807
|
+
)
|
|
808
|
+
if entity_result.data:
|
|
809
|
+
update_data["entity_name"] = entity_result.data[0]["name"]
|
|
810
|
+
|
|
811
|
+
# Handle cron schedule updates
|
|
812
|
+
if "cron_schedule" in update_data and existing_job["trigger_type"] == "cron":
|
|
813
|
+
# Delete existing schedule
|
|
814
|
+
if existing_job.get("temporal_schedule_id"):
|
|
815
|
+
await delete_temporal_schedule(existing_job["temporal_schedule_id"])
|
|
816
|
+
|
|
817
|
+
# Create new schedule if job is enabled
|
|
818
|
+
if existing_job.get("enabled", True):
|
|
819
|
+
# Merge existing job data with updates for schedule
|
|
820
|
+
updated_job_data = {**existing_job, **update_data}
|
|
821
|
+
|
|
822
|
+
temporal_schedule_id = await create_temporal_schedule(
|
|
823
|
+
job_id=job_id,
|
|
824
|
+
organization_id=organization_id,
|
|
825
|
+
job_data=updated_job_data,
|
|
826
|
+
cron_schedule=update_data["cron_schedule"],
|
|
827
|
+
cron_timezone=update_data.get("cron_timezone", existing_job.get("cron_timezone", "UTC")),
|
|
828
|
+
)
|
|
829
|
+
update_data["temporal_schedule_id"] = temporal_schedule_id
|
|
830
|
+
|
|
831
|
+
# Calculate next execution time
|
|
832
|
+
cron_iter = croniter(update_data["cron_schedule"], datetime.now(timezone.utc))
|
|
833
|
+
next_execution = cron_iter.get_next(datetime)
|
|
834
|
+
update_data["next_execution_at"] = next_execution.isoformat()
|
|
835
|
+
|
|
836
|
+
# Update job
|
|
837
|
+
result = (
|
|
838
|
+
client.table("jobs")
|
|
839
|
+
.update(update_data)
|
|
840
|
+
.eq("id", job_id)
|
|
841
|
+
.eq("organization_id", organization_id)
|
|
842
|
+
.execute()
|
|
843
|
+
)
|
|
844
|
+
|
|
845
|
+
logger.info(
|
|
846
|
+
"job_updated",
|
|
847
|
+
job_id=job_id,
|
|
848
|
+
updated_fields=list(update_data.keys()),
|
|
849
|
+
)
|
|
850
|
+
|
|
851
|
+
job = result.data[0]
|
|
852
|
+
job_data_response = {**job}
|
|
853
|
+
|
|
854
|
+
# Add full webhook URL
|
|
855
|
+
if job.get("webhook_url_path"):
|
|
856
|
+
base_url = str(request.base_url).rstrip("/")
|
|
857
|
+
job_data_response["webhook_url"] = f"{base_url}{job['webhook_url_path']}"
|
|
858
|
+
|
|
859
|
+
return JobResponse(**job_data_response)
|
|
860
|
+
|
|
861
|
+
except HTTPException:
|
|
862
|
+
raise
|
|
863
|
+
except Exception as e:
|
|
864
|
+
logger.error(
|
|
865
|
+
"failed_to_update_job",
|
|
866
|
+
error=str(e),
|
|
867
|
+
job_id=job_id,
|
|
868
|
+
)
|
|
869
|
+
raise HTTPException(
|
|
870
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
871
|
+
detail=f"Failed to update job: {str(e)}"
|
|
872
|
+
)
|
|
873
|
+
|
|
874
|
+
|
|
875
|
+
@router.delete("/{job_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
876
|
+
async def delete_job(
|
|
877
|
+
job_id: str,
|
|
878
|
+
organization: dict = Depends(get_current_organization),
|
|
879
|
+
):
|
|
880
|
+
"""Delete a job and its Temporal Schedule"""
|
|
881
|
+
client = get_supabase()
|
|
882
|
+
organization_id = organization["id"]
|
|
883
|
+
|
|
884
|
+
try:
|
|
885
|
+
# Fetch job to get temporal_schedule_id
|
|
886
|
+
result = (
|
|
887
|
+
client.table("jobs")
|
|
888
|
+
.select("temporal_schedule_id")
|
|
889
|
+
.eq("id", job_id)
|
|
890
|
+
.eq("organization_id", organization_id)
|
|
891
|
+
.execute()
|
|
892
|
+
)
|
|
893
|
+
|
|
894
|
+
if not result.data:
|
|
895
|
+
raise HTTPException(
|
|
896
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
897
|
+
detail=f"Job {job_id} not found"
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
job = result.data[0]
|
|
901
|
+
|
|
902
|
+
# Delete Temporal Schedule
|
|
903
|
+
if job.get("temporal_schedule_id"):
|
|
904
|
+
await delete_temporal_schedule(job["temporal_schedule_id"])
|
|
905
|
+
|
|
906
|
+
# Delete job from database
|
|
907
|
+
client.table("jobs").delete().eq("id", job_id).eq("organization_id", organization_id).execute()
|
|
908
|
+
|
|
909
|
+
logger.info("job_deleted", job_id=job_id)
|
|
910
|
+
|
|
911
|
+
except HTTPException:
|
|
912
|
+
raise
|
|
913
|
+
except Exception as e:
|
|
914
|
+
logger.error(
|
|
915
|
+
"failed_to_delete_job",
|
|
916
|
+
error=str(e),
|
|
917
|
+
job_id=job_id,
|
|
918
|
+
)
|
|
919
|
+
raise HTTPException(
|
|
920
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
921
|
+
detail=f"Failed to delete job: {str(e)}"
|
|
922
|
+
)
|
|
923
|
+
|
|
924
|
+
|
|
925
|
+
@router.post("/{job_id}/trigger", response_model=JobTriggerResponse)
|
|
926
|
+
async def trigger_job(
|
|
927
|
+
job_id: str,
|
|
928
|
+
trigger_data: JobTriggerRequest,
|
|
929
|
+
request: Request,
|
|
930
|
+
organization: dict = Depends(get_current_organization),
|
|
931
|
+
):
|
|
932
|
+
"""
|
|
933
|
+
Manually trigger a job execution.
|
|
934
|
+
|
|
935
|
+
**Request Body:**
|
|
936
|
+
- parameters: Dictionary of parameters to substitute in prompt template
|
|
937
|
+
- config_override: Optional config overrides for this execution
|
|
938
|
+
"""
|
|
939
|
+
client = get_supabase()
|
|
940
|
+
temporal_client = await get_temporal_client()
|
|
941
|
+
organization_id = organization["id"]
|
|
942
|
+
|
|
943
|
+
try:
|
|
944
|
+
# Validate job exists and is enabled
|
|
945
|
+
result = (
|
|
946
|
+
client.table("jobs")
|
|
947
|
+
.select("*")
|
|
948
|
+
.eq("id", job_id)
|
|
949
|
+
.eq("organization_id", organization_id)
|
|
950
|
+
.execute()
|
|
951
|
+
)
|
|
952
|
+
|
|
953
|
+
if not result.data:
|
|
954
|
+
raise HTTPException(
|
|
955
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
956
|
+
detail=f"Job {job_id} not found"
|
|
957
|
+
)
|
|
958
|
+
|
|
959
|
+
job = result.data[0]
|
|
960
|
+
|
|
961
|
+
if not job.get("enabled"):
|
|
962
|
+
raise HTTPException(
|
|
963
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
964
|
+
detail="Job is disabled"
|
|
965
|
+
)
|
|
966
|
+
|
|
967
|
+
# Apply config overrides if provided
|
|
968
|
+
if trigger_data.config_override:
|
|
969
|
+
job = {**job, "config": {**job.get("config", {}), **trigger_data.config_override}}
|
|
970
|
+
|
|
971
|
+
# Start the job execution directly (same as UI does)
|
|
972
|
+
workflow_id, execution_id = await start_job_execution(
|
|
973
|
+
job=job,
|
|
974
|
+
organization_id=organization_id,
|
|
975
|
+
trigger_type="manual",
|
|
976
|
+
trigger_metadata={
|
|
977
|
+
"triggered_by": organization.get("user_email"),
|
|
978
|
+
"user_id": organization.get("user_id"),
|
|
979
|
+
},
|
|
980
|
+
parameters=trigger_data.parameters,
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
return JobTriggerResponse(
|
|
984
|
+
job_id=job_id,
|
|
985
|
+
workflow_id=workflow_id,
|
|
986
|
+
execution_id=execution_id,
|
|
987
|
+
status="started",
|
|
988
|
+
message="Job execution started successfully",
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
except HTTPException:
|
|
992
|
+
raise
|
|
993
|
+
except Exception as e:
|
|
994
|
+
logger.error(
|
|
995
|
+
"failed_to_trigger_job",
|
|
996
|
+
error=str(e),
|
|
997
|
+
job_id=job_id,
|
|
998
|
+
)
|
|
999
|
+
raise HTTPException(
|
|
1000
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1001
|
+
detail=f"Failed to trigger job: {str(e)}"
|
|
1002
|
+
)
|
|
1003
|
+
|
|
1004
|
+
|
|
1005
|
+
@router.post("/{job_id}/enable", response_model=JobResponse)
|
|
1006
|
+
async def enable_job(
|
|
1007
|
+
job_id: str,
|
|
1008
|
+
request: Request,
|
|
1009
|
+
organization: dict = Depends(get_current_organization),
|
|
1010
|
+
):
|
|
1011
|
+
"""Enable a job and unpause its Temporal Schedule"""
|
|
1012
|
+
client = get_supabase()
|
|
1013
|
+
organization_id = organization["id"]
|
|
1014
|
+
|
|
1015
|
+
try:
|
|
1016
|
+
# Fetch job
|
|
1017
|
+
result = (
|
|
1018
|
+
client.table("jobs")
|
|
1019
|
+
.select("*")
|
|
1020
|
+
.eq("id", job_id)
|
|
1021
|
+
.eq("organization_id", organization_id)
|
|
1022
|
+
.execute()
|
|
1023
|
+
)
|
|
1024
|
+
|
|
1025
|
+
if not result.data:
|
|
1026
|
+
raise HTTPException(
|
|
1027
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1028
|
+
detail=f"Job {job_id} not found"
|
|
1029
|
+
)
|
|
1030
|
+
|
|
1031
|
+
job = result.data[0]
|
|
1032
|
+
|
|
1033
|
+
# Unpause Temporal Schedule if it exists
|
|
1034
|
+
if job.get("temporal_schedule_id"):
|
|
1035
|
+
await unpause_temporal_schedule(job["temporal_schedule_id"])
|
|
1036
|
+
elif job.get("trigger_type") == "cron":
|
|
1037
|
+
# Create schedule if it doesn't exist
|
|
1038
|
+
temporal_schedule_id = await create_temporal_schedule(
|
|
1039
|
+
job_id=job_id,
|
|
1040
|
+
organization_id=organization_id,
|
|
1041
|
+
job_data=job,
|
|
1042
|
+
cron_schedule=job["cron_schedule"],
|
|
1043
|
+
cron_timezone=job.get("cron_timezone", "UTC"),
|
|
1044
|
+
)
|
|
1045
|
+
|
|
1046
|
+
# Update job with schedule ID
|
|
1047
|
+
update_data = {
|
|
1048
|
+
"temporal_schedule_id": temporal_schedule_id,
|
|
1049
|
+
"enabled": True,
|
|
1050
|
+
"status": "active",
|
|
1051
|
+
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
1052
|
+
}
|
|
1053
|
+
|
|
1054
|
+
# Calculate next execution time
|
|
1055
|
+
cron_iter = croniter(job["cron_schedule"], datetime.now(timezone.utc))
|
|
1056
|
+
next_execution = cron_iter.get_next(datetime)
|
|
1057
|
+
update_data["next_execution_at"] = next_execution.isoformat()
|
|
1058
|
+
|
|
1059
|
+
result = (
|
|
1060
|
+
client.table("jobs")
|
|
1061
|
+
.update(update_data)
|
|
1062
|
+
.eq("id", job_id)
|
|
1063
|
+
.eq("organization_id", organization_id)
|
|
1064
|
+
.execute()
|
|
1065
|
+
)
|
|
1066
|
+
|
|
1067
|
+
job = result.data[0]
|
|
1068
|
+
else:
|
|
1069
|
+
# Just enable the job
|
|
1070
|
+
result = (
|
|
1071
|
+
client.table("jobs")
|
|
1072
|
+
.update({
|
|
1073
|
+
"enabled": True,
|
|
1074
|
+
"status": "active",
|
|
1075
|
+
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
1076
|
+
})
|
|
1077
|
+
.eq("id", job_id)
|
|
1078
|
+
.eq("organization_id", organization_id)
|
|
1079
|
+
.execute()
|
|
1080
|
+
)
|
|
1081
|
+
|
|
1082
|
+
job = result.data[0]
|
|
1083
|
+
|
|
1084
|
+
logger.info("job_enabled", job_id=job_id)
|
|
1085
|
+
|
|
1086
|
+
job_data = {**job}
|
|
1087
|
+
if job.get("webhook_url_path"):
|
|
1088
|
+
base_url = str(request.base_url).rstrip("/")
|
|
1089
|
+
job_data["webhook_url"] = f"{base_url}{job['webhook_url_path']}"
|
|
1090
|
+
|
|
1091
|
+
return JobResponse(**job_data)
|
|
1092
|
+
|
|
1093
|
+
except HTTPException:
|
|
1094
|
+
raise
|
|
1095
|
+
except Exception as e:
|
|
1096
|
+
logger.error(
|
|
1097
|
+
"failed_to_enable_job",
|
|
1098
|
+
error=str(e),
|
|
1099
|
+
job_id=job_id,
|
|
1100
|
+
)
|
|
1101
|
+
raise HTTPException(
|
|
1102
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1103
|
+
detail=f"Failed to enable job: {str(e)}"
|
|
1104
|
+
)
|
|
1105
|
+
|
|
1106
|
+
|
|
1107
|
+
@router.post("/{job_id}/disable", response_model=JobResponse)
|
|
1108
|
+
async def disable_job(
|
|
1109
|
+
job_id: str,
|
|
1110
|
+
request: Request,
|
|
1111
|
+
organization: dict = Depends(get_current_organization),
|
|
1112
|
+
):
|
|
1113
|
+
"""Disable a job and pause its Temporal Schedule"""
|
|
1114
|
+
client = get_supabase()
|
|
1115
|
+
organization_id = organization["id"]
|
|
1116
|
+
|
|
1117
|
+
try:
|
|
1118
|
+
# Fetch job
|
|
1119
|
+
result = (
|
|
1120
|
+
client.table("jobs")
|
|
1121
|
+
.select("*")
|
|
1122
|
+
.eq("id", job_id)
|
|
1123
|
+
.eq("organization_id", organization_id)
|
|
1124
|
+
.execute()
|
|
1125
|
+
)
|
|
1126
|
+
|
|
1127
|
+
if not result.data:
|
|
1128
|
+
raise HTTPException(
|
|
1129
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1130
|
+
detail=f"Job {job_id} not found"
|
|
1131
|
+
)
|
|
1132
|
+
|
|
1133
|
+
job = result.data[0]
|
|
1134
|
+
|
|
1135
|
+
# Pause Temporal Schedule if it exists
|
|
1136
|
+
if job.get("temporal_schedule_id"):
|
|
1137
|
+
await pause_temporal_schedule(job["temporal_schedule_id"])
|
|
1138
|
+
|
|
1139
|
+
# Update job status
|
|
1140
|
+
result = (
|
|
1141
|
+
client.table("jobs")
|
|
1142
|
+
.update({
|
|
1143
|
+
"enabled": False,
|
|
1144
|
+
"status": "disabled",
|
|
1145
|
+
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
1146
|
+
})
|
|
1147
|
+
.eq("id", job_id)
|
|
1148
|
+
.eq("organization_id", organization_id)
|
|
1149
|
+
.execute()
|
|
1150
|
+
)
|
|
1151
|
+
|
|
1152
|
+
logger.info("job_disabled", job_id=job_id)
|
|
1153
|
+
|
|
1154
|
+
job = result.data[0]
|
|
1155
|
+
job_data = {**job}
|
|
1156
|
+
if job.get("webhook_url_path"):
|
|
1157
|
+
base_url = str(request.base_url).rstrip("/")
|
|
1158
|
+
job_data["webhook_url"] = f"{base_url}{job['webhook_url_path']}"
|
|
1159
|
+
|
|
1160
|
+
return JobResponse(**job_data)
|
|
1161
|
+
|
|
1162
|
+
except HTTPException:
|
|
1163
|
+
raise
|
|
1164
|
+
except Exception as e:
|
|
1165
|
+
logger.error(
|
|
1166
|
+
"failed_to_disable_job",
|
|
1167
|
+
error=str(e),
|
|
1168
|
+
job_id=job_id,
|
|
1169
|
+
)
|
|
1170
|
+
raise HTTPException(
|
|
1171
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1172
|
+
detail=f"Failed to disable job: {str(e)}"
|
|
1173
|
+
)
|
|
1174
|
+
|
|
1175
|
+
|
|
1176
|
+
@router.get("/{job_id}/executions", response_model=JobExecutionHistoryResponse)
|
|
1177
|
+
async def get_job_executions(
|
|
1178
|
+
job_id: str,
|
|
1179
|
+
organization: dict = Depends(get_current_organization),
|
|
1180
|
+
limit: int = 50,
|
|
1181
|
+
offset: int = 0,
|
|
1182
|
+
):
|
|
1183
|
+
"""
|
|
1184
|
+
Get execution history for a job.
|
|
1185
|
+
|
|
1186
|
+
**Query Parameters:**
|
|
1187
|
+
- limit: Maximum number of executions to return (default: 50)
|
|
1188
|
+
- offset: Number of executions to skip (default: 0)
|
|
1189
|
+
"""
|
|
1190
|
+
client = get_supabase()
|
|
1191
|
+
organization_id = organization["id"]
|
|
1192
|
+
|
|
1193
|
+
try:
|
|
1194
|
+
# Fetch executions from job_executions table with join to executions
|
|
1195
|
+
result = (
|
|
1196
|
+
client.table("job_executions")
|
|
1197
|
+
.select("*, executions(*)")
|
|
1198
|
+
.eq("job_id", job_id)
|
|
1199
|
+
.eq("organization_id", organization_id)
|
|
1200
|
+
.order("created_at", desc=True)
|
|
1201
|
+
.range(offset, offset + limit - 1)
|
|
1202
|
+
.execute()
|
|
1203
|
+
)
|
|
1204
|
+
|
|
1205
|
+
# Count total executions
|
|
1206
|
+
count_result = (
|
|
1207
|
+
client.table("job_executions")
|
|
1208
|
+
.select("id", count="exact")
|
|
1209
|
+
.eq("job_id", job_id)
|
|
1210
|
+
.eq("organization_id", organization_id)
|
|
1211
|
+
.execute()
|
|
1212
|
+
)
|
|
1213
|
+
|
|
1214
|
+
executions = []
|
|
1215
|
+
for item in result.data:
|
|
1216
|
+
execution = item.get("executions", {})
|
|
1217
|
+
executions.append(
|
|
1218
|
+
JobExecutionHistoryItem(
|
|
1219
|
+
execution_id=execution.get("id"),
|
|
1220
|
+
trigger_type=item.get("trigger_type"),
|
|
1221
|
+
status=execution.get("status"),
|
|
1222
|
+
started_at=execution.get("started_at"),
|
|
1223
|
+
completed_at=execution.get("completed_at"),
|
|
1224
|
+
duration_ms=item.get("execution_duration_ms"),
|
|
1225
|
+
error_message=execution.get("error_message"),
|
|
1226
|
+
)
|
|
1227
|
+
)
|
|
1228
|
+
|
|
1229
|
+
return JobExecutionHistoryResponse(
|
|
1230
|
+
job_id=job_id,
|
|
1231
|
+
total_count=count_result.count or 0,
|
|
1232
|
+
executions=executions,
|
|
1233
|
+
)
|
|
1234
|
+
|
|
1235
|
+
except Exception as e:
|
|
1236
|
+
logger.error(
|
|
1237
|
+
"failed_to_get_job_executions",
|
|
1238
|
+
error=str(e),
|
|
1239
|
+
job_id=job_id,
|
|
1240
|
+
)
|
|
1241
|
+
raise HTTPException(
|
|
1242
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1243
|
+
detail=f"Failed to get job executions: {str(e)}"
|
|
1244
|
+
)
|
|
1245
|
+
|
|
1246
|
+
|
|
1247
|
+
@router.post("/webhook/{webhook_path}", response_model=JobTriggerResponse)
|
|
1248
|
+
async def trigger_webhook(
|
|
1249
|
+
webhook_path: str,
|
|
1250
|
+
payload: WebhookPayload,
|
|
1251
|
+
request: Request,
|
|
1252
|
+
x_webhook_signature: Optional[str] = Header(None),
|
|
1253
|
+
):
|
|
1254
|
+
"""
|
|
1255
|
+
Trigger a job via webhook.
|
|
1256
|
+
|
|
1257
|
+
**Security:**
|
|
1258
|
+
- Requires HMAC signature in X-Webhook-Signature header
|
|
1259
|
+
- Signature format: hex(HMAC-SHA256(secret, request_body))
|
|
1260
|
+
|
|
1261
|
+
**Request Body:**
|
|
1262
|
+
- parameters: Dictionary of parameters to substitute in prompt template
|
|
1263
|
+
- config_override: Optional config overrides for this execution
|
|
1264
|
+
- metadata: Additional metadata for this trigger
|
|
1265
|
+
"""
|
|
1266
|
+
client = get_supabase()
|
|
1267
|
+
temporal_client = await get_temporal_client()
|
|
1268
|
+
|
|
1269
|
+
try:
|
|
1270
|
+
# Fetch job by webhook path
|
|
1271
|
+
webhook_url_path = f"/api/v1/jobs/webhook/{webhook_path}"
|
|
1272
|
+
result = (
|
|
1273
|
+
client.table("jobs")
|
|
1274
|
+
.select("*")
|
|
1275
|
+
.eq("webhook_url_path", webhook_url_path)
|
|
1276
|
+
.execute()
|
|
1277
|
+
)
|
|
1278
|
+
|
|
1279
|
+
if not result.data:
|
|
1280
|
+
raise HTTPException(
|
|
1281
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
1282
|
+
detail="Webhook not found"
|
|
1283
|
+
)
|
|
1284
|
+
|
|
1285
|
+
job = result.data[0]
|
|
1286
|
+
|
|
1287
|
+
# Verify webhook signature
|
|
1288
|
+
if not x_webhook_signature:
|
|
1289
|
+
raise HTTPException(
|
|
1290
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
1291
|
+
detail="Missing X-Webhook-Signature header"
|
|
1292
|
+
)
|
|
1293
|
+
|
|
1294
|
+
# Get raw request body for signature verification
|
|
1295
|
+
body = await request.body()
|
|
1296
|
+
if not verify_webhook_signature(body, x_webhook_signature, job["webhook_secret"]):
|
|
1297
|
+
raise HTTPException(
|
|
1298
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
1299
|
+
detail="Invalid webhook signature"
|
|
1300
|
+
)
|
|
1301
|
+
|
|
1302
|
+
# Validate job is enabled
|
|
1303
|
+
if not job.get("enabled"):
|
|
1304
|
+
raise HTTPException(
|
|
1305
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
1306
|
+
detail="Job is disabled"
|
|
1307
|
+
)
|
|
1308
|
+
|
|
1309
|
+
# Apply config overrides if provided
|
|
1310
|
+
if payload.config_override:
|
|
1311
|
+
job = {**job, "config": {**job.get("config", {}), **payload.config_override}}
|
|
1312
|
+
|
|
1313
|
+
# Start the job execution directly (same as UI does)
|
|
1314
|
+
workflow_id, execution_id = await start_job_execution(
|
|
1315
|
+
job=job,
|
|
1316
|
+
organization_id=job["organization_id"],
|
|
1317
|
+
trigger_type="webhook",
|
|
1318
|
+
trigger_metadata={
|
|
1319
|
+
"webhook_path": webhook_path,
|
|
1320
|
+
"metadata": payload.metadata or {},
|
|
1321
|
+
},
|
|
1322
|
+
parameters=payload.parameters,
|
|
1323
|
+
)
|
|
1324
|
+
|
|
1325
|
+
return JobTriggerResponse(
|
|
1326
|
+
job_id=job["id"],
|
|
1327
|
+
workflow_id=workflow_id,
|
|
1328
|
+
execution_id=execution_id,
|
|
1329
|
+
status="started",
|
|
1330
|
+
message="Job execution started successfully via webhook",
|
|
1331
|
+
)
|
|
1332
|
+
|
|
1333
|
+
except HTTPException:
|
|
1334
|
+
raise
|
|
1335
|
+
except Exception as e:
|
|
1336
|
+
logger.error(
|
|
1337
|
+
"failed_to_trigger_webhook",
|
|
1338
|
+
error=str(e),
|
|
1339
|
+
webhook_path=webhook_path,
|
|
1340
|
+
)
|
|
1341
|
+
raise HTTPException(
|
|
1342
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
1343
|
+
detail=f"Failed to trigger webhook: {str(e)}"
|
|
1344
|
+
)
|