loom-core 1.0.1__tar.gz → 1.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. {loom_core-1.0.1 → loom_core-1.0.2}/PKG-INFO +1 -1
  2. loom_core-1.0.2/loom/web/__init__.py +5 -0
  3. loom_core-1.0.2/loom/web/api/__init__.py +4 -0
  4. loom_core-1.0.2/loom/web/api/events.py +315 -0
  5. loom_core-1.0.2/loom/web/api/graphs.py +236 -0
  6. loom_core-1.0.2/loom/web/api/logs.py +342 -0
  7. loom_core-1.0.2/loom/web/api/stats.py +283 -0
  8. loom_core-1.0.2/loom/web/api/tasks.py +333 -0
  9. loom_core-1.0.2/loom/web/api/workflows.py +524 -0
  10. loom_core-1.0.2/loom/web/main.py +306 -0
  11. loom_core-1.0.2/loom/web/schemas.py +656 -0
  12. {loom_core-1.0.1 → loom_core-1.0.2}/loom_core.egg-info/PKG-INFO +1 -1
  13. {loom_core-1.0.1 → loom_core-1.0.2}/loom_core.egg-info/SOURCES.txt +10 -0
  14. {loom_core-1.0.1 → loom_core-1.0.2}/pyproject.toml +2 -2
  15. {loom_core-1.0.1 → loom_core-1.0.2}/LICENSE +0 -0
  16. {loom_core-1.0.1 → loom_core-1.0.2}/MANIFEST.in +0 -0
  17. {loom_core-1.0.1 → loom_core-1.0.2}/QUICKSTART.md +0 -0
  18. {loom_core-1.0.1 → loom_core-1.0.2}/README.md +0 -0
  19. {loom_core-1.0.1 → loom_core-1.0.2}/loom/__init__.py +0 -0
  20. {loom_core-1.0.1 → loom_core-1.0.2}/loom/__main__.py +0 -0
  21. {loom_core-1.0.1 → loom_core-1.0.2}/loom/cli/__init__.py +0 -0
  22. {loom_core-1.0.1 → loom_core-1.0.2}/loom/cli/cli.py +0 -0
  23. {loom_core-1.0.1 → loom_core-1.0.2}/loom/common/activity.py +0 -0
  24. {loom_core-1.0.1 → loom_core-1.0.2}/loom/common/config.py +0 -0
  25. {loom_core-1.0.1 → loom_core-1.0.2}/loom/common/errors.py +0 -0
  26. {loom_core-1.0.1 → loom_core-1.0.2}/loom/common/workflow.py +0 -0
  27. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/__init__.py +0 -0
  28. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/compiled.py +0 -0
  29. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/context.py +0 -0
  30. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/engine.py +0 -0
  31. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/graph.py +0 -0
  32. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/handle.py +0 -0
  33. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/logger.py +0 -0
  34. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/runner.py +0 -0
  35. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/state.py +0 -0
  36. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/worker.py +0 -0
  37. {loom_core-1.0.1 → loom_core-1.0.2}/loom/core/workflow.py +0 -0
  38. {loom_core-1.0.1 → loom_core-1.0.2}/loom/database/__init__.py +0 -0
  39. {loom_core-1.0.1 → loom_core-1.0.2}/loom/database/db.py +0 -0
  40. {loom_core-1.0.1 → loom_core-1.0.2}/loom/decorators/__init__.py +0 -0
  41. {loom_core-1.0.1 → loom_core-1.0.2}/loom/decorators/activity.py +0 -0
  42. {loom_core-1.0.1 → loom_core-1.0.2}/loom/decorators/workflow.py +0 -0
  43. {loom_core-1.0.1 → loom_core-1.0.2}/loom/lib/progress.py +0 -0
  44. {loom_core-1.0.1 → loom_core-1.0.2}/loom/lib/utils.py +0 -0
  45. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/001_setup_pragma.sql +0 -0
  46. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/002_create_workflows.sql +0 -0
  47. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/003.create_events.sql +0 -0
  48. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/004.create_tasks.sql +0 -0
  49. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/005.create_indexes.sql +0 -0
  50. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/006_auto_update_triggers.sql +0 -0
  51. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/down/007_create_logs.sql +0 -0
  52. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/001_setup_pragma.sql +0 -0
  53. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/002_create_workflows.sql +0 -0
  54. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/003_create_events.sql +0 -0
  55. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/004_create_tasks.sql +0 -0
  56. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/005_create_indexes.sql +0 -0
  57. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/006_auto_update_triggers.sql +0 -0
  58. {loom_core-1.0.1 → loom_core-1.0.2}/loom/migrations/up/007_create_logs.sql +0 -0
  59. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/__init__.py +0 -0
  60. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/activity.py +0 -0
  61. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/database.py +0 -0
  62. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/events.py +0 -0
  63. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/graph.py +0 -0
  64. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/tasks.py +0 -0
  65. {loom_core-1.0.1 → loom_core-1.0.2}/loom/schemas/workflow.py +0 -0
  66. {loom_core-1.0.1 → loom_core-1.0.2}/loom_core.egg-info/dependency_links.txt +0 -0
  67. {loom_core-1.0.1 → loom_core-1.0.2}/loom_core.egg-info/entry_points.txt +0 -0
  68. {loom_core-1.0.1 → loom_core-1.0.2}/loom_core.egg-info/requires.txt +0 -0
  69. {loom_core-1.0.1 → loom_core-1.0.2}/loom_core.egg-info/top_level.txt +0 -0
  70. {loom_core-1.0.1 → loom_core-1.0.2}/setup.cfg +0 -0
  71. {loom_core-1.0.1 → loom_core-1.0.2}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: loom-core
3
- Version: 1.0.1
3
+ Version: 1.0.2
4
4
  Summary: Durable workflow orchestration engine for Python
5
5
  Home-page: https://github.com/satadeep3927/loom
6
6
  Author: Satadeep Dasgupta
@@ -0,0 +1,5 @@
1
+ """Loom Web Dashboard Module
2
+
3
+ This module provides a FastAPI-based web interface for monitoring and managing
4
+ Loom workflows, including REST APIs and server-sent events for real-time updates.
5
+ """
@@ -0,0 +1,4 @@
1
+ """API Modules
2
+
3
+ This package contains all API route handlers organized by resource type.
4
+ """
@@ -0,0 +1,315 @@
1
+ """Event API Endpoints
2
+
3
+ Provides REST endpoints for querying workflow events across the system.
4
+ """
5
+
6
+ import json
7
+ import math
8
+ from typing import Any
9
+
10
+ from fastapi import APIRouter, Depends, HTTPException
11
+ from fastapi.responses import StreamingResponse
12
+
13
+ from ...database.db import Database
14
+ from ..schemas import (
15
+ ErrorResponse,
16
+ EventDetail,
17
+ EventListParams,
18
+ EventType,
19
+ PaginatedResponse,
20
+ PaginationMeta,
21
+ WorkflowStatus,
22
+ )
23
+
24
+ router = APIRouter()
25
+
26
+
27
+ async def get_db():
28
+ """Database dependency"""
29
+ async with Database[Any, Any]() as db:
30
+ yield db
31
+
32
+
33
+ @router.get(
34
+ "/",
35
+ response_model=PaginatedResponse[EventDetail],
36
+ summary="List events",
37
+ description="""
38
+ Retrieve a paginated list of events across all workflows with optional filtering.
39
+
40
+ **Filtering Options:**
41
+ - `workflow_id`: Filter by specific workflow
42
+ - `type`: Filter by event type (WORKFLOW_STARTED, STEP_START, etc.)
43
+ - `since`: Filter events after specified timestamp
44
+
45
+ **Sorting Options:**
46
+ - `sort_by`: Field to sort by (id, created_at)
47
+ - `sort_order`: Sort direction (asc/desc, default desc for recent-first)
48
+
49
+ **Pagination:**
50
+ - `page`: Page number (1-based)
51
+ - `per_page`: Items per page (1-1000, default 100)
52
+
53
+ **Use Cases:**
54
+ - System-wide event monitoring
55
+ - Debugging cross-workflow issues
56
+ - Audit trail and compliance reporting
57
+ """,
58
+ responses={
59
+ 400: {"model": ErrorResponse, "description": "Invalid request parameters"},
60
+ 500: {"model": ErrorResponse, "description": "Internal server error"},
61
+ },
62
+ )
63
+ async def list_events(
64
+ params: EventListParams = Depends(), db: Database = Depends(get_db)
65
+ ):
66
+ """List events with pagination and filtering"""
67
+ try:
68
+ # Build WHERE clause
69
+ where_conditions = []
70
+ query_params = []
71
+
72
+ if params.workflow_id:
73
+ where_conditions.append("e.workflow_id = ?")
74
+ query_params.append(params.workflow_id)
75
+
76
+ if params.type:
77
+ where_conditions.append("e.type = ?")
78
+ query_params.append(params.type.value)
79
+
80
+ if params.since:
81
+ where_conditions.append("e.created_at >= ?")
82
+ query_params.append(params.since.isoformat())
83
+
84
+ where_clause = (
85
+ f"WHERE {' AND '.join(where_conditions)}" if where_conditions else ""
86
+ )
87
+
88
+ # Get total count
89
+ count_sql = f"""
90
+ SELECT COUNT(*) as total
91
+ FROM events e
92
+ JOIN workflows w ON e.workflow_id = w.id
93
+ {where_clause}
94
+ """
95
+ count_result = await db.fetchone(count_sql, tuple(query_params))
96
+ total = count_result["total"] if count_result else 0
97
+
98
+ # Calculate pagination
99
+ pages = math.ceil(total / params.per_page) if total > 0 else 1
100
+ offset = (params.page - 1) * params.per_page
101
+
102
+ # Build ORDER BY clause
103
+ order_clause = f"ORDER BY e.{params.sort_by} {params.sort_order.upper()}"
104
+
105
+ # Get events for current page with workflow info
106
+ events_sql = f"""
107
+ SELECT
108
+ e.id,
109
+ e.workflow_id,
110
+ w.name as workflow_name,
111
+ w.status as workflow_status,
112
+ e.type,
113
+ e.payload,
114
+ e.created_at
115
+ FROM events e
116
+ JOIN workflows w ON e.workflow_id = w.id
117
+ {where_clause}
118
+ {order_clause}
119
+ LIMIT {params.per_page} OFFSET {offset}
120
+ """
121
+
122
+ events = await db.query(events_sql, tuple(query_params))
123
+
124
+ # Convert to response models
125
+ event_details = [
126
+ EventDetail(
127
+ id=e["id"],
128
+ workflow_id=e["workflow_id"],
129
+ workflow_name=e["workflow_name"],
130
+ workflow_status=WorkflowStatus(e["workflow_status"]),
131
+ type=EventType(e["type"]),
132
+ payload=json.loads(e["payload"]),
133
+ created_at=e["created_at"],
134
+ )
135
+ for e in events
136
+ ]
137
+
138
+ # Build pagination metadata
139
+ meta = PaginationMeta(
140
+ page=params.page,
141
+ per_page=params.per_page,
142
+ total=total,
143
+ pages=pages,
144
+ has_prev=params.page > 1,
145
+ has_next=params.page < pages,
146
+ )
147
+
148
+ return PaginatedResponse(data=event_details, meta=meta)
149
+
150
+ except Exception as e:
151
+ raise HTTPException(status_code=500, detail=f"Failed to list events: {str(e)}")
152
+
153
+
154
+ @router.get(
155
+ "/{event_id}",
156
+ response_model=EventDetail,
157
+ summary="Get event details",
158
+ description="""
159
+ Retrieve complete information for a specific event.
160
+
161
+ **Returns:**
162
+ - Complete event data including payload
163
+ - Parent workflow context and status
164
+ - Event timing information
165
+
166
+ **Use Cases:**
167
+ - Debug specific event handling
168
+ - Investigate event payload data
169
+ - Understand event context
170
+ """,
171
+ responses={
172
+ 404: {"model": ErrorResponse, "description": "Event not found"},
173
+ 500: {"model": ErrorResponse, "description": "Internal server error"},
174
+ },
175
+ )
176
+ async def get_event(event_id: int, db: Database = Depends(get_db)):
177
+ """Get detailed event information"""
178
+ try:
179
+ # Get event info with workflow context
180
+ event_sql = """
181
+ SELECT
182
+ e.id,
183
+ e.workflow_id,
184
+ w.name as workflow_name,
185
+ w.status as workflow_status,
186
+ e.type,
187
+ e.payload,
188
+ e.created_at
189
+ FROM events e
190
+ JOIN workflows w ON e.workflow_id = w.id
191
+ WHERE e.id = ?
192
+ """
193
+
194
+ event = await db.fetchone(event_sql, (event_id,))
195
+ if not event:
196
+ raise HTTPException(status_code=404, detail=f"Event {event_id} not found")
197
+
198
+ return EventDetail(
199
+ id=event["id"],
200
+ workflow_id=event["workflow_id"],
201
+ workflow_name=event["workflow_name"],
202
+ workflow_status=WorkflowStatus(event["workflow_status"]),
203
+ type=EventType(event["type"]),
204
+ payload=json.loads(event["payload"]),
205
+ created_at=event["created_at"],
206
+ )
207
+
208
+ except HTTPException:
209
+ raise
210
+ except Exception as e:
211
+ raise HTTPException(status_code=500, detail=f"Failed to get event: {str(e)}")
212
+
213
+
214
+ @router.get(
215
+ "/stream",
216
+ summary="Stream all events (SSE)",
217
+ description="""
218
+ **Server-Sent Events stream for real-time system-wide event monitoring.**
219
+
220
+ This endpoint provides a persistent connection that streams new events
221
+ as they occur across all workflows in the system.
222
+
223
+ **Stream Format:**
224
+ ```
225
+ data: {"id": 123, "workflow_id": "abc", "type": "STEP_START", "payload": {...}}
226
+
227
+ data: {"id": 124, "workflow_id": "def", "type": "WORKFLOW_COMPLETED", "payload": {...}}
228
+ ```
229
+
230
+ **Usage (JavaScript):**
231
+ ```javascript
232
+ const eventSource = new EventSource('/api/events/stream');
233
+ eventSource.onmessage = function(event) {
234
+ const data = JSON.parse(event.data);
235
+ console.log('System event:', data);
236
+ };
237
+ ```
238
+
239
+ **Use Cases:**
240
+ - Real-time system monitoring dashboard
241
+ - Live activity feeds
242
+ - Event-driven UI updates
243
+ - System health monitoring
244
+ """,
245
+ responses={500: {"model": ErrorResponse, "description": "Internal server error"}},
246
+ )
247
+ async def stream_all_events(db: Database = Depends(get_db)):
248
+ """Stream all system events via Server-Sent Events"""
249
+ try:
250
+
251
+ async def event_stream():
252
+ import asyncio
253
+ import json
254
+
255
+ last_event_id = 0
256
+
257
+ # Get current max event ID to avoid replaying history
258
+ max_id_sql = "SELECT MAX(id) as max_id FROM events"
259
+ max_result = await db.fetchone(max_id_sql, ())
260
+ if max_result and max_result["max_id"]:
261
+ last_event_id = max_result["max_id"]
262
+
263
+ while True:
264
+ try:
265
+ # Check for new events across all workflows
266
+ events_sql = """
267
+ SELECT
268
+ e.id,
269
+ e.workflow_id,
270
+ w.name as workflow_name,
271
+ e.type,
272
+ e.payload,
273
+ e.created_at
274
+ FROM events e
275
+ JOIN workflows w ON e.workflow_id = w.id
276
+ WHERE e.id > ?
277
+ ORDER BY e.id ASC
278
+ LIMIT 100
279
+ """
280
+ new_events = await db.query(events_sql, (last_event_id,))
281
+
282
+ for event in new_events:
283
+ event_data = {
284
+ "id": event["id"],
285
+ "workflow_id": event["workflow_id"],
286
+ "workflow_name": event["workflow_name"],
287
+ "type": event["type"],
288
+ "payload": json.loads(event["payload"]),
289
+ "created_at": event["created_at"],
290
+ }
291
+ yield f"data: {json.dumps(event_data)}\n\n"
292
+ last_event_id = event["id"]
293
+
294
+ # Poll every second
295
+ await asyncio.sleep(1)
296
+
297
+ except Exception as e:
298
+ yield f"data: {json.dumps({'error': str(e)})}\n\n"
299
+ # Don't break on errors, continue polling
300
+ await asyncio.sleep(5)
301
+
302
+ return StreamingResponse(
303
+ event_stream(),
304
+ media_type="text/plain",
305
+ headers={
306
+ "Cache-Control": "no-cache",
307
+ "Connection": "keep-alive",
308
+ "Content-Type": "text/event-stream",
309
+ },
310
+ )
311
+
312
+ except Exception as e:
313
+ raise HTTPException(
314
+ status_code=500, detail=f"Failed to stream events: {str(e)}"
315
+ )
@@ -0,0 +1,236 @@
1
+ """Graph API Endpoints
2
+
3
+ Provides REST endpoints for generating workflow definition graphs.
4
+ """
5
+
6
+ from typing import Any, Dict
7
+ from fastapi import APIRouter, HTTPException, Query, Depends
8
+ from enum import Enum
9
+
10
+ from ...core.graph import WorkflowAnalyzer, generate_mermaid_graph, generate_graphviz_dot
11
+ from ...common.workflow import workflow_registry
12
+ from ...database.db import Database
13
+ from ..schemas import ErrorResponse
14
+ from ...schemas.graph import WorkflowDefinitionGraph, GraphResponse, GraphFormat
15
+
16
+ router = APIRouter()
17
+
18
+
19
+ async def get_db():
20
+ """Database dependency"""
21
+ async with Database[Any, Any]() as db:
22
+ yield db
23
+
24
+
25
+ class GraphFormatEnum(str, Enum):
26
+ """Supported graph output formats"""
27
+ JSON = "json"
28
+ MERMAID = "mermaid"
29
+ DOT = "dot"
30
+
31
+
32
+ @router.get(
33
+ "/workflow/{workflow_id}/definition",
34
+ response_model=WorkflowDefinitionGraph,
35
+ summary="Get workflow definition graph",
36
+ description="""
37
+ Generate a static workflow definition graph showing the structure of steps,
38
+ activities, timers, and state dependencies as defined in the workflow code.
39
+
40
+ This is similar to Airflow's DAG view - it shows the workflow structure
41
+ based on code analysis, not runtime execution.
42
+
43
+ **Features:**
44
+ - Step sequence and dependencies
45
+ - Activity calls within each step
46
+ - Timer/sleep operations
47
+ - State read/write dependencies
48
+ - Workflow metadata
49
+
50
+ **Node Types:**
51
+ - `step`: Workflow steps (blue boxes)
52
+ - `activity`: Activity calls (green circles)
53
+ - `timer`: Sleep/delay operations (yellow diamonds)
54
+ - `state`: State variables (red hexagons)
55
+
56
+ **Edge Types:**
57
+ - `sequence`: Step-to-step flow
58
+ - `calls`: Step calls activity
59
+ - `reads`: Reads from state
60
+ - `writes`: Writes to state
61
+ - `waits`: Step waits on timer
62
+ """,
63
+ responses={
64
+ 404: {"model": ErrorResponse, "description": "Workflow not found"},
65
+ 400: {"model": ErrorResponse, "description": "Invalid workflow definition"},
66
+ 500: {"model": ErrorResponse, "description": "Analysis failed"}
67
+ }
68
+ )
69
+ async def get_workflow_definition_graph(workflow_id: str, db: Database = Depends(get_db)):
70
+ """Get workflow definition graph as structured data"""
71
+ try:
72
+ # Get workflow info from database
73
+ workflow_info = await db.get_workflow_info(workflow_id)
74
+ if not workflow_info:
75
+ raise HTTPException(
76
+ status_code=404,
77
+ detail=f"Workflow with ID '{workflow_id}' not found"
78
+ )
79
+
80
+ # Get workflow class using module and name from database
81
+ workflow_class = workflow_registry(workflow_info["module"], workflow_info["name"])
82
+
83
+ # Analyze workflow definition
84
+ graph = WorkflowAnalyzer.analyze_workflow_definition(workflow_class)
85
+
86
+ return graph
87
+
88
+ except ValueError as e:
89
+ raise HTTPException(status_code=400, detail=str(e))
90
+ except (ModuleNotFoundError, AttributeError, TypeError) as e:
91
+ raise HTTPException(status_code=400, detail=f"Failed to load workflow class: {str(e)}")
92
+ except Exception as e:
93
+ raise HTTPException(
94
+ status_code=500,
95
+ detail=f"Failed to analyze workflow: {str(e)}"
96
+ )
97
+
98
+
99
+ @router.get(
100
+ "/workflow/{workflow_id}/definition/render",
101
+ response_model=GraphResponse,
102
+ summary="Render workflow definition graph",
103
+ description="""
104
+ Generate a workflow definition graph in various output formats for visualization.
105
+
106
+ **Supported Formats:**
107
+ - `json`: Structured JSON data (same as /definition endpoint)
108
+ - `mermaid`: Mermaid diagram syntax for rendering
109
+ - `dot`: GraphViz DOT format for advanced visualization
110
+
111
+ **Usage Examples:**
112
+ - Use `mermaid` format to render in web UIs or documentation
113
+ - Use `dot` format for GraphViz tools (dot, neato, fdp, etc.)
114
+ - Use `json` format for custom visualization libraries
115
+
116
+ **Mermaid Example:**
117
+ ```
118
+ graph TD
119
+ step_process["Process Order"]
120
+ activity_payment("process_payment")
121
+ state_paid{state.payment_confirmed}
122
+ step_process --> activity_payment
123
+ step_process --> state_paid
124
+ ```
125
+ """,
126
+ responses={
127
+ 404: {"model": ErrorResponse, "description": "Workflow not found"},
128
+ 400: {"model": ErrorResponse, "description": "Invalid format or workflow"},
129
+ 500: {"model": ErrorResponse, "description": "Rendering failed"}
130
+ }
131
+ )
132
+ async def render_workflow_definition_graph(
133
+ workflow_id: str,
134
+ format: GraphFormatEnum = Query(
135
+ GraphFormatEnum.MERMAID,
136
+ description="Output format for the graph"
137
+ ),
138
+ db: Database = Depends(get_db)
139
+ ):
140
+ """Render workflow definition graph in specified format"""
141
+ try:
142
+ # Get workflow info from database
143
+ workflow_info = await db.get_workflow_info(workflow_id)
144
+ if not workflow_info:
145
+ raise HTTPException(
146
+ status_code=404,
147
+ detail=f"Workflow with ID '{workflow_id}' not found"
148
+ )
149
+
150
+ # Get workflow class using module and name from database
151
+ workflow_class = workflow_registry(workflow_info["module"], workflow_info["name"])
152
+
153
+ # Analyze workflow definition
154
+ graph = WorkflowAnalyzer.analyze_workflow_definition(workflow_class)
155
+
156
+ # Generate output based on format
157
+ if format == GraphFormatEnum.JSON:
158
+ content = graph.json(indent=2)
159
+ elif format == GraphFormatEnum.MERMAID:
160
+ content = generate_mermaid_graph(graph)
161
+ elif format == GraphFormatEnum.DOT:
162
+ content = generate_graphviz_dot(graph)
163
+ else:
164
+ raise HTTPException(
165
+ status_code=400,
166
+ detail=f"Unsupported format: {format}"
167
+ )
168
+
169
+ return GraphResponse(
170
+ format=format.value,
171
+ content=content,
172
+ metadata={
173
+ "workflow_id": workflow_id,
174
+ "workflow_name": workflow_info["name"],
175
+ "node_count": len(graph.nodes),
176
+ "edge_count": len(graph.edges),
177
+ **graph.metadata
178
+ }
179
+ )
180
+
181
+ except ValueError as e:
182
+ raise HTTPException(status_code=400, detail=str(e))
183
+ except (ModuleNotFoundError, AttributeError, TypeError) as e:
184
+ raise HTTPException(status_code=400, detail=f"Failed to load workflow class: {str(e)}")
185
+ except Exception as e:
186
+ raise HTTPException(
187
+ status_code=500,
188
+ detail=f"Failed to render graph: {str(e)}"
189
+ )
190
+
191
+
192
+ @router.get(
193
+ "/workflows/",
194
+ response_model=Dict[str, Any],
195
+ summary="List workflows for graph generation",
196
+ description="""
197
+ Get a list of all workflows in the database that can be analyzed for graphs.
198
+
199
+ Returns workflow IDs, names, versions, and basic metadata for each workflow.
200
+ Use the workflow ID with the graph endpoints to generate visualizations.
201
+ """
202
+ )
203
+ async def list_workflows_for_graphs(db: Database = Depends(get_db)):
204
+ """List all workflows available for graph generation"""
205
+ try:
206
+ # Get all workflows from database
207
+ workflows_sql = """
208
+ SELECT id, name, description, version, module, status, created_at, updated_at
209
+ FROM workflows
210
+ ORDER BY created_at DESC
211
+ """
212
+ workflows = await db.query(workflows_sql)
213
+
214
+ workflow_list = []
215
+ for workflow in workflows:
216
+ workflow_list.append({
217
+ "id": workflow["id"],
218
+ "name": workflow["name"],
219
+ "description": workflow["description"] or "",
220
+ "version": workflow["version"],
221
+ "module": workflow["module"],
222
+ "status": workflow["status"],
223
+ "created_at": workflow["created_at"],
224
+ "updated_at": workflow["updated_at"]
225
+ })
226
+
227
+ return {
228
+ "total_count": len(workflow_list),
229
+ "workflows": workflow_list
230
+ }
231
+
232
+ except Exception as e:
233
+ raise HTTPException(
234
+ status_code=500,
235
+ detail=f"Failed to list workflows: {str(e)}"
236
+ )