loom-core 1.0.0__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,524 @@
1
+ """Workflow API Endpoints
2
+
3
+ Provides REST endpoints for managing and querying workflows.
4
+ """
5
+
6
+ import json
7
+ import math
8
+ from typing import Any, Optional
9
+
10
+ from fastapi import APIRouter, Depends, HTTPException
11
+ from fastapi.responses import StreamingResponse
12
+
13
+ from ...database.db import Database
14
+ from ..schemas import (
15
+ ErrorResponse,
16
+ EventListParams,
17
+ EventSummary,
18
+ LogEntry,
19
+ LogListParams,
20
+ PaginatedResponse,
21
+ PaginationMeta,
22
+ WorkflowDetail,
23
+ WorkflowListParams,
24
+ WorkflowStatus,
25
+ WorkflowSummary,
26
+ )
27
+
28
+ router = APIRouter()
29
+
30
+
31
+ async def get_db():
32
+ """Database dependency"""
33
+ async with Database[Any, Any]() as db:
34
+ yield db
35
+
36
+
37
+ def calculate_duration(workflow: dict) -> Optional[int]:
38
+ """Calculate workflow duration in seconds if completed"""
39
+ if workflow["status"] in ["COMPLETED", "FAILED", "CANCELED"]:
40
+ from datetime import datetime
41
+
42
+ created = datetime.fromisoformat(workflow["created_at"].replace("Z", "+00:00"))
43
+ updated = datetime.fromisoformat(workflow["updated_at"].replace("Z", "+00:00"))
44
+ return int((updated - created).total_seconds())
45
+ return None
46
+
47
+
48
+ @router.get(
49
+ "/",
50
+ response_model=PaginatedResponse[WorkflowSummary],
51
+ summary="List workflows",
52
+ description="""
53
+ Retrieve a paginated list of workflows with optional filtering and sorting.
54
+
55
+ **Filtering Options:**
56
+ - `status`: Filter by workflow execution status
57
+ - `name`: Filter by workflow name (partial match, case-insensitive)
58
+
59
+ **Sorting Options:**
60
+ - `sort_by`: Field to sort by (created_at, updated_at, name, status)
61
+ - `sort_order`: Sort direction (asc/desc)
62
+
63
+ **Pagination:**
64
+ - `page`: Page number (1-based)
65
+ - `per_page`: Items per page (1-1000, default 50)
66
+
67
+ **Response includes:**
68
+ - List of workflow summaries for the requested page
69
+ - Pagination metadata (total count, page info, etc.)
70
+ """,
71
+ responses={
72
+ 400: {"model": ErrorResponse, "description": "Invalid request parameters"},
73
+ 500: {"model": ErrorResponse, "description": "Internal server error"},
74
+ },
75
+ )
76
+ async def list_workflows(
77
+ params: WorkflowListParams = Depends(), db: Database = Depends(get_db)
78
+ ):
79
+ """List workflows with pagination and filtering"""
80
+ try:
81
+ # Build WHERE clause
82
+ where_conditions = []
83
+ query_params = []
84
+
85
+ if params.status:
86
+ where_conditions.append("status = ?")
87
+ query_params.append(params.status.value)
88
+
89
+ if params.name:
90
+ where_conditions.append("name LIKE ?")
91
+ query_params.append(f"%{params.name}%")
92
+
93
+ where_clause = (
94
+ f"WHERE {' AND '.join(where_conditions)}" if where_conditions else ""
95
+ )
96
+
97
+ # Get total count
98
+ count_sql = f"SELECT COUNT(*) as total FROM workflows {where_clause}"
99
+ count_result = await db.fetchone(count_sql, tuple(query_params))
100
+ total = count_result["total"] if count_result else 0
101
+
102
+ # Calculate pagination
103
+ pages = math.ceil(total / params.per_page) if total > 0 else 1
104
+ offset = (params.page - 1) * params.per_page
105
+
106
+ # Build ORDER BY clause
107
+ order_clause = f"ORDER BY {params.sort_by} {params.sort_order.upper()}"
108
+
109
+ # Get workflows for current page
110
+ workflows_sql = f"""
111
+ SELECT id, name, status, created_at, updated_at
112
+ FROM workflows
113
+ {where_clause}
114
+ {order_clause}
115
+ LIMIT {params.per_page} OFFSET {offset}
116
+ """
117
+
118
+ workflows = await db.query(workflows_sql, tuple(query_params))
119
+
120
+ # Convert to response models
121
+ workflow_summaries = [
122
+ WorkflowSummary(
123
+ id=w["id"],
124
+ name=w["name"],
125
+ status=WorkflowStatus(w["status"]),
126
+ created_at=w["created_at"],
127
+ updated_at=w["updated_at"],
128
+ duration=calculate_duration(dict(w)),
129
+ )
130
+ for w in workflows
131
+ ]
132
+
133
+ # Build pagination metadata
134
+ meta = PaginationMeta(
135
+ page=params.page,
136
+ per_page=params.per_page,
137
+ total=total,
138
+ pages=pages,
139
+ has_prev=params.page > 1,
140
+ has_next=params.page < pages,
141
+ )
142
+
143
+ return PaginatedResponse(data=workflow_summaries, meta=meta)
144
+
145
+ except Exception as e:
146
+ raise HTTPException(
147
+ status_code=500, detail=f"Failed to list workflows: {str(e)}"
148
+ )
149
+
150
+
151
+ @router.get(
152
+ "/{workflow_id}",
153
+ response_model=WorkflowDetail,
154
+ summary="Get workflow details",
155
+ description="""
156
+ Retrieve complete information for a specific workflow.
157
+
158
+ **Returns:**
159
+ - Complete workflow metadata (name, version, module, etc.)
160
+ - Current execution status and timing information
161
+ - Workflow input data and current state
162
+ - Event count and computed statistics
163
+
164
+ **Computed Fields:**
165
+ - `duration`: Execution time in seconds (for completed workflows)
166
+ - `event_count`: Total number of events generated
167
+ - `current_state`: Reconstructed workflow state from events
168
+ """,
169
+ responses={
170
+ 404: {"model": ErrorResponse, "description": "Workflow not found"},
171
+ 500: {"model": ErrorResponse, "description": "Internal server error"},
172
+ },
173
+ )
174
+ async def get_workflow(workflow_id: str, db: Database = Depends(get_db)):
175
+ """Get detailed workflow information"""
176
+ try:
177
+ # Get workflow info
178
+ workflow = await db.get_workflow_info(workflow_id)
179
+
180
+ # Get event count
181
+ event_count_sql = "SELECT COUNT(*) as count FROM events WHERE workflow_id = ?"
182
+ event_count_result = await db.fetchone(event_count_sql, (workflow_id,))
183
+ event_count = event_count_result["count"] if event_count_result else 0
184
+
185
+ # Reconstruct current state from STATE_SET/STATE_UPDATE events
186
+ state_events_sql = """
187
+ SELECT type, payload FROM events
188
+ WHERE workflow_id = ? AND type IN ('STATE_SET', 'STATE_UPDATE')
189
+ ORDER BY id ASC
190
+ """
191
+ state_events = await db.query(state_events_sql, (workflow_id,))
192
+
193
+ current_state = {}
194
+ for event in state_events:
195
+ payload = json.loads(event["payload"])
196
+ if event["type"] == "STATE_SET":
197
+ current_state[payload["key"]] = payload["value"]
198
+ elif event["type"] == "STATE_UPDATE":
199
+ current_state.update(payload["values"])
200
+
201
+ return WorkflowDetail(
202
+ id=workflow["id"],
203
+ name=workflow["name"],
204
+ description=workflow.get("description", ""),
205
+ version=workflow["version"] if workflow.get("version") else "1.0.0",
206
+ module=workflow["module"],
207
+ status=WorkflowStatus(workflow["status"]),
208
+ input=json.loads(workflow["input"]),
209
+ created_at=workflow["created_at"],
210
+ updated_at=workflow["updated_at"],
211
+ duration=calculate_duration(workflow),
212
+ event_count=event_count,
213
+ current_state=current_state,
214
+ )
215
+
216
+ except Exception as e:
217
+ if "not found" in str(e).lower():
218
+ raise HTTPException(
219
+ status_code=404, detail=f"Workflow {workflow_id} not found"
220
+ )
221
+ raise HTTPException(status_code=500, detail=f"Failed to get workflow: {str(e)}")
222
+
223
+
224
+ @router.get(
225
+ "/{workflow_id}/events",
226
+ response_model=PaginatedResponse[EventSummary],
227
+ summary="Get workflow events",
228
+ description="""
229
+ Retrieve paginated events for a specific workflow in chronological order.
230
+
231
+ **Filtering Options:**
232
+ - `type`: Filter by event type
233
+ - `since`: Filter events after specified timestamp
234
+
235
+ **Sorting:**
236
+ - Events are sorted by ID (creation order) by default
237
+ - Use `sort_order=asc` for chronological order, `desc` for reverse
238
+
239
+ **Use Cases:**
240
+ - Debug workflow execution flow
241
+ - Audit trail and compliance
242
+ - Understanding state changes over time
243
+ """,
244
+ responses={
245
+ 404: {"model": ErrorResponse, "description": "Workflow not found"},
246
+ 500: {"model": ErrorResponse, "description": "Internal server error"},
247
+ },
248
+ )
249
+ async def get_workflow_events(
250
+ workflow_id: str,
251
+ params: EventListParams = Depends(),
252
+ db: Database = Depends(get_db),
253
+ ):
254
+ """Get paginated events for a workflow"""
255
+ try:
256
+ # Verify workflow exists
257
+ await db.get_workflow_info(workflow_id)
258
+
259
+ # Build WHERE clause
260
+ where_conditions = ["workflow_id = ?"]
261
+ query_params = [workflow_id]
262
+
263
+ if params.type:
264
+ where_conditions.append("type = ?")
265
+ query_params.append(params.type.value)
266
+
267
+ if params.since:
268
+ where_conditions.append("created_at >= ?")
269
+ query_params.append(params.since.isoformat())
270
+
271
+ where_clause = f"WHERE {' AND '.join(where_conditions)}"
272
+
273
+ # Get total count
274
+ count_sql = f"SELECT COUNT(*) as total FROM events {where_clause}"
275
+ count_result = await db.fetchone(count_sql, tuple(query_params))
276
+ total = count_result["total"] if count_result else 0
277
+
278
+ # Calculate pagination
279
+ pages = math.ceil(total / params.per_page) if total > 0 else 1
280
+ offset = (params.page - 1) * params.per_page
281
+
282
+ # Get events for current page
283
+ order_clause = f"ORDER BY {params.sort_by} {params.sort_order.upper()}"
284
+ events_sql = f"""
285
+ SELECT id, workflow_id, type, payload, created_at
286
+ FROM events
287
+ {where_clause}
288
+ {order_clause}
289
+ LIMIT {params.per_page} OFFSET {offset}
290
+ """
291
+
292
+ events = await db.query(events_sql, tuple(query_params))
293
+
294
+ # Convert to response models
295
+ event_summaries = [
296
+ EventSummary(
297
+ id=e["id"],
298
+ workflow_id=e["workflow_id"],
299
+ type=e["type"],
300
+ payload=json.loads(e["payload"]),
301
+ created_at=e["created_at"],
302
+ )
303
+ for e in events
304
+ ]
305
+
306
+ # Build pagination metadata
307
+ meta = PaginationMeta(
308
+ page=params.page,
309
+ per_page=params.per_page,
310
+ total=total,
311
+ pages=pages,
312
+ has_prev=params.page > 1,
313
+ has_next=params.page < pages,
314
+ )
315
+
316
+ return PaginatedResponse(data=event_summaries, meta=meta)
317
+
318
+ except HTTPException:
319
+ raise
320
+ except Exception as e:
321
+ raise HTTPException(status_code=500, detail=f"Failed to get events: {str(e)}")
322
+
323
+
324
+ @router.get(
325
+ "/{workflow_id}/logs",
326
+ response_model=PaginatedResponse[LogEntry],
327
+ summary="Get workflow logs",
328
+ description="""
329
+ Retrieve paginated log entries for a specific workflow.
330
+
331
+ **Filtering Options:**
332
+ - `level`: Filter by log level (DEBUG, INFO, WARNING, ERROR)
333
+ - `since`: Filter logs after specified timestamp
334
+
335
+ **Use Cases:**
336
+ - Debug workflow execution issues
337
+ - Monitor workflow progress and state changes
338
+ - Troubleshoot failed workflows
339
+ """,
340
+ responses={
341
+ 404: {"model": ErrorResponse, "description": "Workflow not found"},
342
+ 500: {"model": ErrorResponse, "description": "Internal server error"},
343
+ },
344
+ )
345
+ async def get_workflow_logs(
346
+ workflow_id: str, params: LogListParams = Depends(), db: Database = Depends(get_db)
347
+ ):
348
+ """Get paginated logs for a workflow"""
349
+ try:
350
+ # Verify workflow exists
351
+ workflow_info = await db.get_workflow_info(workflow_id)
352
+
353
+ # Build WHERE clause
354
+ where_conditions = ["workflow_id = ?"]
355
+ query_params = [workflow_id]
356
+
357
+ if params.level:
358
+ where_conditions.append("level = ?")
359
+ query_params.append(params.level.value)
360
+
361
+ if params.since:
362
+ where_conditions.append("created_at >= ?")
363
+ query_params.append(params.since.isoformat())
364
+
365
+ where_clause = f"WHERE {' AND '.join(where_conditions)}"
366
+
367
+ # Get total count
368
+ count_sql = f"SELECT COUNT(*) as total FROM logs {where_clause}"
369
+ count_result = await db.fetchone(count_sql, tuple(query_params))
370
+ total = count_result["total"] if count_result else 0
371
+
372
+ # Calculate pagination
373
+ pages = math.ceil(total / params.per_page) if total > 0 else 1
374
+ offset = (params.page - 1) * params.per_page
375
+
376
+ # Get logs for current page
377
+ order_clause = f"ORDER BY {params.sort_by} {params.sort_order.upper()}"
378
+ logs_sql = f"""
379
+ SELECT id, workflow_id, level, message, created_at
380
+ FROM logs
381
+ {where_clause}
382
+ {order_clause}
383
+ LIMIT {params.per_page} OFFSET {offset}
384
+ """
385
+
386
+ logs = await db.query(logs_sql, tuple(query_params))
387
+
388
+ # Convert to response models
389
+ log_entries = [
390
+ LogEntry(
391
+ id=log["id"],
392
+ workflow_id=log["workflow_id"],
393
+ workflow_name=workflow_info["name"],
394
+ level=log["level"],
395
+ message=log["message"],
396
+ created_at=log["created_at"],
397
+ )
398
+ for log in logs
399
+ ]
400
+
401
+ # Build pagination metadata
402
+ meta = PaginationMeta(
403
+ page=params.page,
404
+ per_page=params.per_page,
405
+ total=total,
406
+ pages=pages,
407
+ has_prev=params.page > 1,
408
+ has_next=params.page < pages,
409
+ )
410
+
411
+ return PaginatedResponse(data=log_entries, meta=meta)
412
+
413
+ except HTTPException:
414
+ raise
415
+ except Exception as e:
416
+ raise HTTPException(status_code=500, detail=f"Failed to get logs: {str(e)}")
417
+
418
+
419
+ @router.get(
420
+ "/{workflow_id}/events/stream",
421
+ summary="Stream workflow events (SSE)",
422
+ description="""
423
+ **Server-Sent Events stream for real-time workflow updates.**
424
+
425
+ This endpoint provides a persistent connection that streams new events
426
+ as they occur for the specified workflow.
427
+
428
+ **Stream Format:**
429
+ ```
430
+ data: {"id": 123, "type": "STEP_START", "payload": {...}, "created_at": "..."}
431
+
432
+ data: {"id": 124, "type": "STATE_SET", "payload": {...}, "created_at": "..."}
433
+ ```
434
+
435
+ **Usage (JavaScript):**
436
+ ```javascript
437
+ const eventSource = new EventSource('/api/workflows/abc123/events/stream');
438
+ eventSource.onmessage = function(event) {
439
+ const data = JSON.parse(event.data);
440
+ console.log('New event:', data);
441
+ };
442
+ ```
443
+
444
+ **Connection Management:**
445
+ - Auto-reconnects on connection loss
446
+ - Streams only new events (no historical replay)
447
+ - Closes automatically when workflow reaches terminal state
448
+ """,
449
+ responses={404: {"model": ErrorResponse, "description": "Workflow not found"}},
450
+ )
451
+ async def stream_workflow_events(workflow_id: str, db: Database = Depends(get_db)):
452
+ """Stream workflow events via Server-Sent Events"""
453
+ try:
454
+ # Verify workflow exists
455
+ await db.get_workflow_info(workflow_id)
456
+
457
+ async def event_stream():
458
+ import asyncio
459
+ import json
460
+
461
+ last_event_id = 0
462
+
463
+ # Get current max event ID to avoid replaying history
464
+ max_id_sql = "SELECT MAX(id) as max_id FROM events WHERE workflow_id = ?"
465
+ max_result = await db.fetchone(max_id_sql, (workflow_id,))
466
+ if max_result and max_result["max_id"]:
467
+ last_event_id = max_result["max_id"]
468
+
469
+ while True:
470
+ try:
471
+ # Check for new events
472
+ events_sql = """
473
+ SELECT id, type, payload, created_at
474
+ FROM events
475
+ WHERE workflow_id = ? AND id > ?
476
+ ORDER BY id ASC
477
+ """
478
+ new_events = await db.query(
479
+ events_sql, (workflow_id, last_event_id)
480
+ )
481
+
482
+ for event in new_events:
483
+ event_data = {
484
+ "id": event["id"],
485
+ "type": event["type"],
486
+ "payload": json.loads(event["payload"]),
487
+ "created_at": event["created_at"],
488
+ }
489
+ yield f"data: {json.dumps(event_data)}\n\n"
490
+ last_event_id = event["id"]
491
+
492
+ # Close stream if workflow is done
493
+ if event["type"] in [
494
+ "WORKFLOW_COMPLETED",
495
+ "WORKFLOW_FAILED",
496
+ "WORKFLOW_CANCELLED",
497
+ ]:
498
+ return
499
+
500
+ # Poll every second
501
+ await asyncio.sleep(1)
502
+
503
+ except Exception as e:
504
+ yield f"data: {json.dumps({'error': str(e)})}\n\n"
505
+ return
506
+
507
+ return StreamingResponse(
508
+ event_stream(),
509
+ media_type="text/plain",
510
+ headers={
511
+ "Cache-Control": "no-cache",
512
+ "Connection": "keep-alive",
513
+ "Content-Type": "text/event-stream",
514
+ },
515
+ )
516
+
517
+ except Exception as e:
518
+ if "not found" in str(e).lower():
519
+ raise HTTPException(
520
+ status_code=404, detail=f"Workflow {workflow_id} not found"
521
+ )
522
+ raise HTTPException(
523
+ status_code=500, detail=f"Failed to stream events: {str(e)}"
524
+ )