kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,619 @@
1
+ """Database storage backend for task tracking."""
2
+
3
+ import json
4
+ from datetime import datetime
5
+ from typing import List, Optional
6
+ from uuid import uuid4
7
+
8
+ from ..models import TaskMetrics, TaskRun, TaskStatus, WorkflowRun
9
+ from .base import StorageBackend
10
+
11
+
12
+ class DatabaseStorage(StorageBackend):
13
+ """Database storage backend using SQLite."""
14
+
15
+ def __init__(self, connection_string: str = "sqlite:///~/.kailash/tracking.db"):
16
+ """Initialize database storage.
17
+
18
+ Args:
19
+ connection_string: Database connection string
20
+ """
21
+ import os
22
+ import sqlite3
23
+
24
+ # Expand user path if using sqlite
25
+ if connection_string.startswith("sqlite://"):
26
+ db_path = connection_string.replace("sqlite://", "")
27
+ db_path = os.path.expanduser(db_path)
28
+ os.makedirs(os.path.dirname(db_path), exist_ok=True)
29
+ else:
30
+ # If not a sqlite:// connection string, assume it's a direct path
31
+ db_path = connection_string
32
+
33
+ # For this implementation, we'll use direct SQLite
34
+ self.db_path = db_path
35
+ self.conn = sqlite3.connect(db_path)
36
+ self._initialize_schema()
37
+
38
+ def _initialize_schema(self):
39
+ """Initialize database schema."""
40
+ cursor = self.conn.cursor()
41
+
42
+ # Create runs table
43
+ cursor.execute(
44
+ """
45
+ CREATE TABLE IF NOT EXISTS workflow_runs (
46
+ run_id TEXT PRIMARY KEY,
47
+ workflow_name TEXT NOT NULL,
48
+ status TEXT NOT NULL,
49
+ started_at TEXT NOT NULL,
50
+ ended_at TEXT,
51
+ metadata TEXT,
52
+ error TEXT
53
+ )
54
+ """
55
+ )
56
+
57
+ # Create tasks table
58
+ cursor.execute(
59
+ """
60
+ CREATE TABLE IF NOT EXISTS tasks (
61
+ task_id TEXT PRIMARY KEY,
62
+ run_id TEXT NOT NULL,
63
+ node_id TEXT NOT NULL,
64
+ node_type TEXT NOT NULL,
65
+ status TEXT NOT NULL,
66
+ started_at TEXT,
67
+ ended_at TEXT,
68
+ result TEXT,
69
+ error TEXT,
70
+ metadata TEXT,
71
+ input_data TEXT,
72
+ output_data TEXT,
73
+ FOREIGN KEY (run_id) REFERENCES workflow_runs(run_id)
74
+ )
75
+ """
76
+ )
77
+
78
+ # Create metrics table
79
+ cursor.execute(
80
+ """
81
+ CREATE TABLE IF NOT EXISTS metrics (
82
+ task_id TEXT PRIMARY KEY,
83
+ cpu_usage REAL,
84
+ memory_usage_mb REAL,
85
+ duration REAL,
86
+ custom_metrics TEXT,
87
+ FOREIGN KEY (task_id) REFERENCES tasks(task_id)
88
+ )
89
+ """
90
+ )
91
+
92
+ # For compatibility with code that uses task_runs
93
+ cursor.execute(
94
+ """
95
+ CREATE TABLE IF NOT EXISTS task_runs (
96
+ task_id TEXT PRIMARY KEY,
97
+ run_id TEXT NOT NULL,
98
+ node_id TEXT NOT NULL,
99
+ node_type TEXT NOT NULL,
100
+ status TEXT NOT NULL,
101
+ started_at TEXT,
102
+ ended_at TEXT,
103
+ result TEXT,
104
+ error TEXT,
105
+ metadata TEXT,
106
+ input_data TEXT,
107
+ output_data TEXT,
108
+ FOREIGN KEY (run_id) REFERENCES workflow_runs(run_id)
109
+ )
110
+ """
111
+ )
112
+
113
+ # Create indexes
114
+ cursor.execute(
115
+ "CREATE INDEX IF NOT EXISTS idx_runs_workflow ON workflow_runs(workflow_name)"
116
+ )
117
+ cursor.execute(
118
+ "CREATE INDEX IF NOT EXISTS idx_runs_status ON workflow_runs(status)"
119
+ )
120
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_tasks_run ON tasks(run_id)")
121
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_tasks_node ON tasks(node_id)")
122
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_tasks_status ON tasks(status)")
123
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_node_id ON tasks(node_id)")
124
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_status ON tasks(status)")
125
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_created_at ON tasks(started_at)")
126
+
127
+ # Indexes for task_runs table
128
+ cursor.execute(
129
+ "CREATE INDEX IF NOT EXISTS idx_task_runs_run ON task_runs(run_id)"
130
+ )
131
+ cursor.execute(
132
+ "CREATE INDEX IF NOT EXISTS idx_task_runs_node ON task_runs(node_id)"
133
+ )
134
+ cursor.execute(
135
+ "CREATE INDEX IF NOT EXISTS idx_task_runs_status ON task_runs(status)"
136
+ )
137
+
138
+ self.conn.commit()
139
+
140
+ def save_run(self, run: WorkflowRun) -> None:
141
+ """Save a workflow run."""
142
+ cursor = self.conn.cursor()
143
+
144
+ cursor.execute(
145
+ """
146
+ INSERT OR REPLACE INTO workflow_runs
147
+ (run_id, workflow_name, status, started_at, ended_at, metadata, error)
148
+ VALUES (?, ?, ?, ?, ?, ?, ?)
149
+ """,
150
+ (
151
+ run.run_id,
152
+ run.workflow_name,
153
+ run.status,
154
+ run.started_at.isoformat(),
155
+ run.ended_at.isoformat() if run.ended_at else None,
156
+ json.dumps(run.metadata),
157
+ run.error,
158
+ ),
159
+ )
160
+
161
+ self.conn.commit()
162
+
163
+ def load_run(self, run_id: str) -> Optional[WorkflowRun]:
164
+ """Load a workflow run by ID."""
165
+ cursor = self.conn.cursor()
166
+
167
+ cursor.execute(
168
+ """
169
+ SELECT * FROM workflow_runs WHERE run_id = ?
170
+ """,
171
+ (run_id,),
172
+ )
173
+
174
+ row = cursor.fetchone()
175
+ if not row:
176
+ return None
177
+
178
+ # Convert row to dict
179
+ columns = [desc[0] for desc in cursor.description]
180
+ data = dict(zip(columns, row))
181
+
182
+ # Parse JSON metadata
183
+ data["metadata"] = json.loads(data["metadata"] or "{}")
184
+
185
+ # Load task IDs
186
+ cursor.execute("SELECT task_id FROM task_runs WHERE run_id = ?", (run_id,))
187
+ data["tasks"] = [row[0] for row in cursor.fetchall()]
188
+
189
+ return WorkflowRun.model_validate(data)
190
+
191
+ def list_runs(
192
+ self, workflow_name: Optional[str] = None, status: Optional[str] = None
193
+ ) -> List[WorkflowRun]:
194
+ """List workflow runs."""
195
+ cursor = self.conn.cursor()
196
+
197
+ query = "SELECT * FROM workflow_runs WHERE 1=1"
198
+ params = []
199
+
200
+ if workflow_name:
201
+ query += " AND workflow_name = ?"
202
+ params.append(workflow_name)
203
+
204
+ if status:
205
+ query += " AND status = ?"
206
+ params.append(status)
207
+
208
+ query += " ORDER BY started_at DESC"
209
+
210
+ cursor.execute(query, params)
211
+
212
+ runs = []
213
+ columns = [desc[0] for desc in cursor.description]
214
+
215
+ for row in cursor.fetchall():
216
+ data = dict(zip(columns, row))
217
+ data["metadata"] = json.loads(data["metadata"] or "{}")
218
+
219
+ # Load task IDs
220
+ cursor.execute(
221
+ "SELECT task_id FROM task_runs WHERE run_id = ?", (data["run_id"],)
222
+ )
223
+ data["tasks"] = [row[0] for row in cursor.fetchall()]
224
+
225
+ runs.append(WorkflowRun.model_validate(data))
226
+
227
+ return runs
228
+
229
+ def save_task(self, task: TaskRun) -> None:
230
+ """Save a task."""
231
+ cursor = self.conn.cursor()
232
+
233
+ # Insert into both tables for compatibility
234
+ for table_name in ["tasks", "task_runs"]:
235
+ cursor.execute(
236
+ f"""
237
+ INSERT OR REPLACE INTO {table_name}
238
+ (task_id, run_id, node_id, node_type, status, started_at, ended_at, result, error, metadata, input_data, output_data)
239
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
240
+ """,
241
+ (
242
+ task.task_id,
243
+ task.run_id,
244
+ task.node_id,
245
+ task.node_type,
246
+ task.status,
247
+ task.started_at.isoformat() if task.started_at else None,
248
+ task.ended_at.isoformat() if task.ended_at else None,
249
+ json.dumps(task.result) if task.result else None,
250
+ task.error,
251
+ json.dumps(task.metadata),
252
+ json.dumps(task.input_data) if task.input_data else None,
253
+ json.dumps(task.output_data) if task.output_data else None,
254
+ ),
255
+ )
256
+
257
+ # Save metrics if present
258
+ if hasattr(task, "metrics") and task.metrics:
259
+ cursor.execute(
260
+ """
261
+ INSERT OR REPLACE INTO metrics
262
+ (task_id, cpu_usage, memory_usage_mb, duration, custom_metrics)
263
+ VALUES (?, ?, ?, ?, ?)
264
+ """,
265
+ (
266
+ task.task_id,
267
+ task.metrics.cpu_usage,
268
+ task.metrics.memory_usage_mb,
269
+ task.metrics.duration,
270
+ (
271
+ json.dumps(task.metrics.custom_metrics)
272
+ if hasattr(task.metrics, "custom_metrics")
273
+ else None
274
+ ),
275
+ ),
276
+ )
277
+
278
+ self.conn.commit()
279
+
280
+ def load_task(self, task_id: str) -> Optional[TaskRun]:
281
+ """Load a task by ID."""
282
+ cursor = self.conn.cursor()
283
+
284
+ # Try tasks table first
285
+ cursor.execute(
286
+ """
287
+ SELECT * FROM tasks WHERE task_id = ?
288
+ """,
289
+ (task_id,),
290
+ )
291
+
292
+ row = cursor.fetchone()
293
+ if not row:
294
+ # Fall back to task_runs if not found
295
+ cursor.execute(
296
+ """
297
+ SELECT * FROM task_runs WHERE task_id = ?
298
+ """,
299
+ (task_id,),
300
+ )
301
+ row = cursor.fetchone()
302
+
303
+ if not row:
304
+ return None
305
+
306
+ # Convert row to dict
307
+ columns = [desc[0] for desc in cursor.description]
308
+ data = dict(zip(columns, row))
309
+
310
+ # Parse JSON fields
311
+ if data["result"]:
312
+ data["result"] = json.loads(data["result"])
313
+ data["metadata"] = json.loads(data["metadata"] or "{}")
314
+ if data.get("input_data"):
315
+ try:
316
+ data["input_data"] = json.loads(data["input_data"])
317
+ except (json.JSONDecodeError, TypeError):
318
+ # Handle case where it's already a dict or invalid JSON
319
+ if isinstance(data["input_data"], str) and data["input_data"].strip():
320
+ # Try to sanitize it
321
+ try:
322
+ data["input_data"] = {"value": data["input_data"]}
323
+ except:
324
+ data["input_data"] = None
325
+ if data.get("output_data"):
326
+ try:
327
+ data["output_data"] = json.loads(data["output_data"])
328
+ except (json.JSONDecodeError, TypeError):
329
+ # Handle case where it's already a dict or invalid JSON
330
+ if isinstance(data["output_data"], str) and data["output_data"].strip():
331
+ # Try to sanitize it
332
+ try:
333
+ data["output_data"] = {"value": data["output_data"]}
334
+ except:
335
+ data["output_data"] = None
336
+
337
+ task = TaskRun.model_validate(data)
338
+
339
+ # Load metrics if available
340
+ cursor.execute("SELECT * FROM metrics WHERE task_id = ?", (task_id,))
341
+ metrics_row = cursor.fetchone()
342
+ if metrics_row:
343
+ metrics_columns = [desc[0] for desc in cursor.description]
344
+ metrics_data = dict(zip(metrics_columns, metrics_row))
345
+
346
+ # Parse custom metrics if present
347
+ if metrics_data.get("custom_metrics"):
348
+ metrics_data["custom_metrics"] = json.loads(
349
+ metrics_data["custom_metrics"]
350
+ )
351
+
352
+ # Create metrics object
353
+ task.metrics = TaskMetrics(
354
+ cpu_usage=metrics_data.get("cpu_usage"),
355
+ memory_usage=metrics_data.get("memory_usage_mb"),
356
+ duration=metrics_data.get("duration"),
357
+ custom_metrics=metrics_data.get("custom_metrics"),
358
+ )
359
+
360
+ return task
361
+
362
+ def list_tasks(
363
+ self,
364
+ run_id: str,
365
+ node_id: Optional[str] = None,
366
+ status: Optional[TaskStatus] = None,
367
+ ) -> List[TaskRun]:
368
+ """List tasks for a run."""
369
+ cursor = self.conn.cursor()
370
+
371
+ query = "SELECT * FROM tasks WHERE run_id = ?"
372
+ params = [run_id]
373
+
374
+ if node_id:
375
+ query += " AND node_id = ?"
376
+ params.append(node_id)
377
+
378
+ if status:
379
+ query += " AND status = ?"
380
+ params.append(status)
381
+
382
+ query += " ORDER BY started_at"
383
+
384
+ cursor.execute(query, params)
385
+
386
+ tasks = []
387
+ columns = [desc[0] for desc in cursor.description]
388
+
389
+ for row in cursor.fetchall():
390
+ data = dict(zip(columns, row))
391
+
392
+ # Parse JSON fields
393
+ if data["result"]:
394
+ data["result"] = json.loads(data["result"])
395
+ data["metadata"] = json.loads(data["metadata"] or "{}")
396
+ if data.get("input_data"):
397
+ try:
398
+ data["input_data"] = json.loads(data["input_data"])
399
+ except (json.JSONDecodeError, TypeError):
400
+ # Handle case where it's already a dict or invalid JSON
401
+ if (
402
+ isinstance(data["input_data"], str)
403
+ and data["input_data"].strip()
404
+ ):
405
+ # Try to sanitize it by wrapping in quotes if needed
406
+ try:
407
+ data["input_data"] = {"value": data["input_data"]}
408
+ except:
409
+ data["input_data"] = None
410
+ if data.get("output_data"):
411
+ try:
412
+ data["output_data"] = json.loads(data["output_data"])
413
+ except (json.JSONDecodeError, TypeError):
414
+ # Handle case where it's already a dict or invalid JSON
415
+ if (
416
+ isinstance(data["output_data"], str)
417
+ and data["output_data"].strip()
418
+ ):
419
+ # Try to sanitize it
420
+ try:
421
+ data["output_data"] = {"value": data["output_data"]}
422
+ except:
423
+ data["output_data"] = None
424
+
425
+ tasks.append(TaskRun.model_validate(data))
426
+
427
+ return tasks
428
+
429
+ def clear(self) -> None:
430
+ """Clear all stored data."""
431
+ cursor = self.conn.cursor()
432
+ cursor.execute("DELETE FROM task_runs")
433
+ cursor.execute("DELETE FROM workflow_runs")
434
+ self.conn.commit()
435
+
436
+ def export_run(self, run_id: str, output_path: str) -> None:
437
+ """Export a run and its tasks."""
438
+ run = self.load_run(run_id)
439
+ if not run:
440
+ raise ValueError(f"Run {run_id} not found")
441
+
442
+ tasks = self.list_tasks(run_id)
443
+
444
+ export_data = {
445
+ "run": run.to_dict(),
446
+ "tasks": [task.to_dict() for task in tasks],
447
+ }
448
+
449
+ with open(output_path, "w") as f:
450
+ json.dump(export_data, f, indent=2)
451
+
452
+ def import_run(self, input_path: str) -> str:
453
+ """Import a run and its tasks."""
454
+ with open(input_path, "r") as f:
455
+ import_data = json.load(f)
456
+
457
+ # Import run
458
+ run_data = import_data["run"]
459
+ run = WorkflowRun.model_validate(run_data)
460
+
461
+ # Generate new run ID to avoid conflicts
462
+ run.run_id = str(uuid4())
463
+
464
+ # Save run
465
+ self.save_run(run)
466
+
467
+ # Import tasks with updated run ID
468
+ for task_data in import_data.get("tasks", []):
469
+ task = TaskRun.model_validate(task_data)
470
+ task.run_id = run.run_id
471
+ self.save_task(task)
472
+
473
+ return run.run_id
474
+
475
+ def get_task(self, task_id: str) -> Optional[TaskRun]:
476
+ """Load a task by ID.
477
+
478
+ Alias for load_task for API compatibility.
479
+
480
+ Args:
481
+ task_id: Task ID to load
482
+
483
+ Returns:
484
+ TaskRun or None if not found
485
+ """
486
+ return self.load_task(task_id)
487
+
488
+ def get_all_tasks(self) -> List[TaskRun]:
489
+ """Get all tasks.
490
+
491
+ Returns:
492
+ List of all TaskRun objects
493
+ """
494
+ cursor = self.conn.cursor()
495
+ cursor.execute("SELECT * FROM task_runs")
496
+
497
+ tasks = []
498
+ columns = [desc[0] for desc in cursor.description]
499
+
500
+ for row in cursor.fetchall():
501
+ data = dict(zip(columns, row))
502
+
503
+ # Parse JSON fields
504
+ if data["result"]:
505
+ data["result"] = json.loads(data["result"])
506
+ data["metadata"] = json.loads(data["metadata"] or "{}")
507
+
508
+ tasks.append(TaskRun.model_validate(data))
509
+
510
+ return tasks
511
+
512
+ def update_task(self, task: TaskRun) -> None:
513
+ """Update an existing task.
514
+
515
+ Uses save_task internally since our implementation uses REPLACE.
516
+
517
+ Args:
518
+ task: TaskRun to update
519
+ """
520
+ self.save_task(task)
521
+
522
+ def delete_task(self, task_id: str) -> None:
523
+ """Delete a task.
524
+
525
+ Args:
526
+ task_id: Task ID to delete
527
+ """
528
+ cursor = self.conn.cursor()
529
+ cursor.execute("DELETE FROM tasks WHERE task_id = ?", (task_id,))
530
+ cursor.execute("DELETE FROM task_runs WHERE task_id = ?", (task_id,))
531
+ self.conn.commit()
532
+
533
+ def query_tasks(
534
+ self,
535
+ node_id: Optional[str] = None,
536
+ status: Optional[TaskStatus] = None,
537
+ started_after: Optional[datetime] = None,
538
+ completed_before: Optional[datetime] = None,
539
+ ) -> List[TaskRun]:
540
+ """Query tasks with filters.
541
+
542
+ Args:
543
+ node_id: Filter by node ID
544
+ status: Filter by status
545
+ started_after: Filter by start time (inclusive)
546
+ completed_before: Filter by completion time (exclusive)
547
+
548
+ Returns:
549
+ List of matching TaskRun objects
550
+ """
551
+ cursor = self.conn.cursor()
552
+
553
+ query = "SELECT * FROM task_runs WHERE 1=1"
554
+ params = []
555
+
556
+ if node_id:
557
+ query += " AND node_id = ?"
558
+ params.append(node_id)
559
+
560
+ if status:
561
+ query += " AND status = ?"
562
+ params.append(status)
563
+
564
+ if started_after:
565
+ query += " AND started_at >= ?"
566
+ params.append(
567
+ started_after.isoformat()
568
+ if hasattr(started_after, "isoformat")
569
+ else started_after
570
+ )
571
+
572
+ if completed_before:
573
+ query += " AND ended_at < ?"
574
+ params.append(
575
+ completed_before.isoformat()
576
+ if hasattr(completed_before, "isoformat")
577
+ else completed_before
578
+ )
579
+
580
+ cursor.execute(query, params)
581
+
582
+ tasks = []
583
+ columns = [desc[0] for desc in cursor.description]
584
+
585
+ for row in cursor.fetchall():
586
+ data = dict(zip(columns, row))
587
+
588
+ # Parse JSON fields
589
+ if data["result"]:
590
+ data["result"] = json.loads(data["result"])
591
+ data["metadata"] = json.loads(data["metadata"] or "{}")
592
+
593
+ tasks.append(TaskRun.model_validate(data))
594
+
595
+ return tasks
596
+
597
+ def _execute_query(self, query, params=()):
598
+ """Execute a query with parameters.
599
+
600
+ This is a helper method for tests that mock query execution.
601
+
602
+ Args:
603
+ query: SQL query string
604
+ params: Query parameters
605
+
606
+ Returns:
607
+ Cursor after execution
608
+
609
+ Raises:
610
+ sqlite3.Error: If the query fails
611
+ """
612
+ cursor = self.conn.cursor()
613
+ cursor.execute(query, params)
614
+ return cursor
615
+
616
+ def __del__(self):
617
+ """Close database connection."""
618
+ if hasattr(self, "conn"):
619
+ self.conn.close()