supervaizer 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. supervaizer/__init__.py +88 -0
  2. supervaizer/__version__.py +10 -0
  3. supervaizer/account.py +304 -0
  4. supervaizer/account_service.py +87 -0
  5. supervaizer/admin/routes.py +1254 -0
  6. supervaizer/admin/templates/agent_detail.html +145 -0
  7. supervaizer/admin/templates/agents.html +175 -0
  8. supervaizer/admin/templates/agents_grid.html +80 -0
  9. supervaizer/admin/templates/base.html +233 -0
  10. supervaizer/admin/templates/case_detail.html +230 -0
  11. supervaizer/admin/templates/cases_list.html +182 -0
  12. supervaizer/admin/templates/cases_table.html +134 -0
  13. supervaizer/admin/templates/console.html +389 -0
  14. supervaizer/admin/templates/dashboard.html +153 -0
  15. supervaizer/admin/templates/job_detail.html +192 -0
  16. supervaizer/admin/templates/jobs_list.html +180 -0
  17. supervaizer/admin/templates/jobs_table.html +122 -0
  18. supervaizer/admin/templates/navigation.html +153 -0
  19. supervaizer/admin/templates/recent_activity.html +81 -0
  20. supervaizer/admin/templates/server.html +105 -0
  21. supervaizer/admin/templates/server_status_cards.html +121 -0
  22. supervaizer/agent.py +816 -0
  23. supervaizer/case.py +400 -0
  24. supervaizer/cli.py +135 -0
  25. supervaizer/common.py +283 -0
  26. supervaizer/event.py +181 -0
  27. supervaizer/examples/controller-template.py +195 -0
  28. supervaizer/instructions.py +145 -0
  29. supervaizer/job.py +379 -0
  30. supervaizer/job_service.py +155 -0
  31. supervaizer/lifecycle.py +417 -0
  32. supervaizer/parameter.py +173 -0
  33. supervaizer/protocol/__init__.py +11 -0
  34. supervaizer/protocol/a2a/__init__.py +21 -0
  35. supervaizer/protocol/a2a/model.py +227 -0
  36. supervaizer/protocol/a2a/routes.py +99 -0
  37. supervaizer/protocol/acp/__init__.py +21 -0
  38. supervaizer/protocol/acp/model.py +198 -0
  39. supervaizer/protocol/acp/routes.py +74 -0
  40. supervaizer/py.typed +1 -0
  41. supervaizer/routes.py +667 -0
  42. supervaizer/server.py +554 -0
  43. supervaizer/server_utils.py +54 -0
  44. supervaizer/storage.py +436 -0
  45. supervaizer/telemetry.py +81 -0
  46. supervaizer-0.9.6.dist-info/METADATA +245 -0
  47. supervaizer-0.9.6.dist-info/RECORD +50 -0
  48. supervaizer-0.9.6.dist-info/WHEEL +4 -0
  49. supervaizer-0.9.6.dist-info/entry_points.txt +2 -0
  50. supervaizer-0.9.6.dist-info/licenses/LICENSE.md +346 -0
supervaizer/storage.py ADDED
@@ -0,0 +1,436 @@
1
+ # Copyright (c) 2024-2025 Alain Prasquier - Supervaize.com. All rights reserved.
2
+ #
3
+ # This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
4
+ # If a copy of the MPL was not distributed with this file, you can obtain one at
5
+ # https://mozilla.org/MPL/2.0/.
6
+
7
+ # Copyright (c) 2024-2025 Alain Prasquier - Supervaize.com. All rights reserved.
8
+ #
9
+ # This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
10
+ # If a copy of the MPL was not distributed with this file, You can obtain one at
11
+ # https://mozilla.org/MPL/2.0/.
12
+
13
+ import os
14
+ import threading
15
+ from pathlib import Path
16
+ from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar
17
+
18
+ from tinydb import Query, TinyDB
19
+
20
+ from supervaizer.common import log, singleton
21
+ from supervaizer.lifecycle import WorkflowEntity
22
+
23
+ if TYPE_CHECKING:
24
+ from supervaizer.case import Case
25
+ from supervaizer.job import Job
26
+ from supervaizer.lifecycle import EntityEvents, EntityStatus
27
+
28
+ T = TypeVar("T", bound=WorkflowEntity)
29
+
30
+ DATA_STORAGE_PATH = os.getenv("DATA_STORAGE_PATH", "./data")
31
+
32
+
33
+ @singleton
34
+ class StorageManager:
35
+ """
36
+ Thread-safe TinyDB-based persistence manager for WorkflowEntity instances.
37
+
38
+ Stores entities in separate tables by type, with foreign key relationships
39
+ represented as ID references (Job.case_ids, Case.job_id).
40
+ """
41
+
42
+ def __init__(self, db_path: str = f"{DATA_STORAGE_PATH}/entities.json"):
43
+ """
44
+ Initialize the storage manager.
45
+
46
+ Args:
47
+ db_path: Path to the TinyDB JSON file
48
+ """
49
+ self.db_path: Path = Path(db_path)
50
+ self._lock = threading.Lock()
51
+
52
+ # Ensure data directory exists
53
+ Path(db_path).parent.mkdir(parents=True, exist_ok=True)
54
+
55
+ # Initialize TinyDB without caching middleware for immediate persistence
56
+ # Note: Thread safety is handled by our own threading lock
57
+ self._db = TinyDB(
58
+ db_path,
59
+ sort_keys=True,
60
+ indent=2,
61
+ )
62
+
63
+ # log.debug(
64
+ # f"[StorageManager] 🗃️ Local DB initialized at {self.db_path.absolute()}"
65
+ # )
66
+
67
+ def save_object(self, type: str, obj: Dict[str, Any]) -> None:
68
+ """
69
+ Save an object to the appropriate table.
70
+
71
+ Args:
72
+ type: The object type (class name)
73
+ obj: Dictionary representation of the object
74
+ """
75
+ with self._lock:
76
+ table = self._db.table(type)
77
+ obj_id = obj.get("id")
78
+
79
+ if not obj_id:
80
+ raise ValueError(
81
+ f"[StorageManager] §SSSS01 Object must have an 'id' field: {obj}"
82
+ )
83
+
84
+ # Use upsert to handle both new and existing objects
85
+ query = Query()
86
+ table.upsert(obj, query.id == obj_id)
87
+
88
+ # log.debug(f"Saved object with ID: {type} {obj_id} - {obj}")
89
+
90
+ def get_objects(self, type: str) -> List[Dict[str, Any]]:
91
+ """
92
+ Get all objects of a specific type.
93
+
94
+ Args:
95
+ type: The object type (class name)
96
+
97
+ Returns:
98
+ List of object dictionaries
99
+ """
100
+ with self._lock:
101
+ table = self._db.table(type)
102
+ documents = table.all()
103
+ return [dict(doc) for doc in documents]
104
+
105
+ def get_object_by_id(self, type: str, obj_id: str) -> Optional[Dict[str, Any]]:
106
+ """
107
+ Get a specific object by its ID.
108
+
109
+ Args:
110
+ type: The object type (class name)
111
+ obj_id: The object ID
112
+
113
+ Returns:
114
+ Object dictionary if found, None otherwise
115
+ """
116
+ with self._lock:
117
+ table = self._db.table(type)
118
+ query = Query()
119
+ result = table.search(query.id == obj_id)
120
+ return result[0] if result else None
121
+
122
+ def delete_object(self, type: str, obj_id: str) -> bool:
123
+ """
124
+ Delete an object by its ID.
125
+
126
+ Args:
127
+ type: The object type (class name)
128
+ obj_id: The object ID
129
+
130
+ Returns:
131
+ True if object was deleted, False if not found
132
+ """
133
+ with self._lock:
134
+ table = self._db.table(type)
135
+ query = Query()
136
+ deleted_count = len(table.remove(query.id == obj_id))
137
+
138
+ if deleted_count > 0:
139
+ log.debug(f"Deleted {type} object with ID: {obj_id}")
140
+ return True
141
+ return False
142
+
143
+ def reset_storage(self) -> None:
144
+ """
145
+ Reset storage by clearing all tables but preserving the database file.
146
+ """
147
+ with self._lock:
148
+ # Clear all tables
149
+ for table_name in self._db.tables():
150
+ self._db.drop_table(table_name)
151
+
152
+ log.info("Storage reset - all tables cleared")
153
+
154
+ def get_cases_for_job(self, job_id: str) -> List[Dict[str, Any]]:
155
+ """
156
+ Helper method to get all cases for a specific job.
157
+
158
+ Args:
159
+ job_id: The job ID
160
+
161
+ Returns:
162
+ List of case dictionaries
163
+ """
164
+ with self._lock:
165
+ table = self._db.table("Case")
166
+ query = Query()
167
+ documents = table.search(query.job_id == job_id)
168
+ return [dict(doc) for doc in documents]
169
+
170
+ def close(self) -> None:
171
+ """Close the database connection."""
172
+ with self._lock:
173
+ if hasattr(self, "_db") and self._db is not None:
174
+ try:
175
+ if hasattr(self._db, "close"):
176
+ self._db.close()
177
+ log.info("StorageManager database closed")
178
+ except ValueError as e:
179
+ # Handle the case where the file is already closed
180
+ if "I/O operation on closed file" in str(e):
181
+ log.debug("Database file already closed")
182
+ else:
183
+ raise
184
+
185
+
186
+ class EntityRepository(Generic[T]):
187
+ """
188
+ Generic repository for WorkflowEntity types with type-safe operations.
189
+
190
+ Provides higher-level abstraction over StorageManager for specific entity types.
191
+ """
192
+
193
+ def __init__(
194
+ self, entity_class: type[T], storage_manager: Optional[StorageManager] = None
195
+ ):
196
+ """
197
+ Initialize repository for a specific entity type.
198
+
199
+ Args:
200
+ entity_class: The entity class this repository manages
201
+ storage_manager: Optional storage manager instance
202
+ """
203
+ self.entity_class = entity_class
204
+ self.type_name = entity_class.__name__
205
+ self.storage = storage_manager or StorageManager()
206
+
207
+ def get_by_id(self, entity_id: str) -> Optional[T]:
208
+ """
209
+ Get an entity by its ID.
210
+
211
+ Args:
212
+ entity_id: The entity ID
213
+
214
+ Returns:
215
+ Entity instance if found, None otherwise
216
+ """
217
+ data = self.storage.get_object_by_id(self.type_name, entity_id)
218
+ if data:
219
+ return self._from_dict(data)
220
+ return None
221
+
222
+ def save(self, entity: T) -> None:
223
+ """
224
+ Save an entity to storage.
225
+
226
+ Args:
227
+ entity: The entity to save
228
+ """
229
+ data = self._to_dict(entity)
230
+ self.storage.save_object(self.type_name, data)
231
+
232
+ def get_all(self) -> List[T]:
233
+ """
234
+ Get all entities of this type.
235
+
236
+ Returns:
237
+ List of entity instances
238
+ """
239
+ data_list = self.storage.get_objects(self.type_name)
240
+ return [self._from_dict(data) for data in data_list]
241
+
242
+ def delete(self, entity_id: str) -> bool:
243
+ """
244
+ Delete an entity by its ID.
245
+
246
+ Args:
247
+ entity_id: The entity ID
248
+
249
+ Returns:
250
+ True if deleted, False if not found
251
+ """
252
+ return self.storage.delete_object(self.type_name, entity_id)
253
+
254
+ def _to_dict(self, entity: T) -> Dict[str, Any]:
255
+ """Convert entity to dictionary using its to_dict property."""
256
+ if hasattr(entity, "to_dict"):
257
+ return dict(entity.to_dict)
258
+ else:
259
+ # Fallback for entities without to_dict
260
+ return {
261
+ field: getattr(entity, field)
262
+ for field in getattr(entity, "__dataclass_fields__", {})
263
+ if hasattr(entity, field)
264
+ }
265
+
266
+ def _from_dict(self, data: Dict[str, Any]) -> T:
267
+ """
268
+ Convert dictionary back to entity instance.
269
+
270
+ Note: This is a simplified implementation. In practice, you might need
271
+ more sophisticated deserialization depending on your entity structure.
272
+ """
273
+ # For entities inheriting from SvBaseModel (Pydantic), use model construction
274
+ if hasattr(self.entity_class, "model_validate"):
275
+ return self.entity_class.model_validate(data) # type: ignore
276
+ else:
277
+ # Fallback for other types
278
+ return self.entity_class(**data)
279
+
280
+
281
+ class PersistentEntityLifecycle:
282
+ """
283
+ Enhanced EntityLifecycle that automatically persists entity state changes.
284
+
285
+ This class wraps the original EntityLifecycle methods to add persistence.
286
+ """
287
+
288
+ @staticmethod
289
+ def transition(
290
+ entity: T, to_status: "EntityStatus", storage: Optional[StorageManager] = None
291
+ ) -> tuple[bool, str]:
292
+ """
293
+ Transition an entity and automatically persist the change.
294
+
295
+ Args:
296
+ entity: The entity to transition
297
+ to_status: Target status
298
+ storage: Optional storage manager instance
299
+
300
+ Returns:
301
+ Tuple of (success, error_message)
302
+ """
303
+ # Import here to avoid circular imports
304
+ from supervaizer.lifecycle import EntityLifecycle
305
+
306
+ # Perform the transition
307
+ success, error = EntityLifecycle.transition(entity, to_status)
308
+
309
+ # If successful, persist the entity
310
+ if success:
311
+ storage_mgr = storage or StorageManager()
312
+ entity_dict = entity.to_dict if hasattr(entity, "to_dict") else vars(entity)
313
+ storage_mgr.save_object(type(entity).__name__, entity_dict)
314
+ log.debug(
315
+ f"[Storage transition] Auto-persisted {type(entity).__name__} {entity.id} after transition to {to_status}"
316
+ )
317
+
318
+ return success, error
319
+
320
+ @staticmethod
321
+ def handle_event(
322
+ entity: T, event: "EntityEvents", storage: Optional[StorageManager] = None
323
+ ) -> tuple[bool, str]:
324
+ """
325
+ Handle an event and automatically persist the change.
326
+
327
+ Args:
328
+ entity: The entity to handle event for
329
+ event: The event to handle
330
+ storage: Optional storage manager instance
331
+
332
+ Returns:
333
+ Tuple of (success, error_message)
334
+ """
335
+ # Import here to avoid circular imports
336
+ from supervaizer.lifecycle import EntityLifecycle
337
+
338
+ # Handle the event
339
+ success, error = EntityLifecycle.handle_event(entity, event)
340
+
341
+ # If successful, persist the entity
342
+ if success:
343
+ storage_mgr = storage or StorageManager()
344
+ entity_dict = entity.to_dict if hasattr(entity, "to_dict") else vars(entity)
345
+ storage_mgr.save_object(type(entity).__name__, entity_dict)
346
+ log.debug(
347
+ f"[Storage handle_event] Auto-persisted {type(entity).__name__} {entity.id} after handling event {event}"
348
+ )
349
+
350
+ return success, error
351
+
352
+
353
+ def create_job_repository() -> "EntityRepository[Job]":
354
+ """Factory function to create a Job repository."""
355
+ from supervaizer.job import Job
356
+
357
+ return EntityRepository(Job)
358
+
359
+
360
+ def create_case_repository() -> "EntityRepository[Case]":
361
+ """Factory function to create a Case repository."""
362
+ from supervaizer.case import Case
363
+
364
+ return EntityRepository(Case)
365
+
366
+
367
+ def load_running_entities_on_startup() -> None:
368
+ """
369
+ Load all running entities from storage and populate registries at startup.
370
+
371
+ This function loads jobs and cases that are in running states:
372
+ - IN_PROGRESS
373
+ - CANCELLING
374
+ - AWAITING
375
+
376
+ This ensures that after a server restart, all running workflows
377
+ continue to be accessible through the in-memory registries.
378
+ """
379
+ from supervaizer.case import Case, Cases
380
+ from supervaizer.job import Job, Jobs
381
+ from supervaizer.lifecycle import EntityStatus
382
+
383
+ storage = StorageManager()
384
+
385
+ # Clear existing registries to start fresh
386
+ Jobs().reset()
387
+ Cases().reset()
388
+
389
+ # Load running jobs
390
+ job_data_list = storage.get_objects("Job")
391
+ loaded_jobs = 0
392
+
393
+ for job_data in job_data_list:
394
+ job_status = job_data.get("status")
395
+ if job_status in [status.value for status in EntityStatus.status_running()]:
396
+ try:
397
+ # Use model_construct to avoid triggering __init__ side effects
398
+ job = Job.model_construct(**job_data)
399
+ # Manually add to registry since we bypassed __init__
400
+ Jobs().add_job(job)
401
+ loaded_jobs += 1
402
+ # log.debug(
403
+ # f"[Startup] Loaded running job: {job.id} (status: {job.status})"
404
+ # )
405
+ except Exception as e:
406
+ log.error(
407
+ f"[Storage] §SSL01 Failed to load job {job_data.get('id', 'unknown')}: {e}"
408
+ )
409
+
410
+ # Load running cases
411
+ case_data_list = storage.get_objects("Case")
412
+ loaded_cases = 0
413
+
414
+ for case_data in case_data_list:
415
+ case_status = case_data.get("status")
416
+ if case_status in [status.value for status in EntityStatus.status_running()]:
417
+ try:
418
+ # Use model_construct to avoid triggering __init__ side effects
419
+ case = Case.model_construct(**case_data)
420
+ # Manually add to registry since we bypassed __init__
421
+ Cases().add_case(case)
422
+ loaded_cases += 1
423
+ # log.debug(
424
+ # f"[Storage] Loaded running case: {case.id} (status: {case.status}, job: {case.job_id})"
425
+ # )
426
+ except Exception as e:
427
+ log.error(
428
+ f"[Storage] §SSL03 Failed to load case {case_data.get('id', 'unknown')}: {e}"
429
+ )
430
+
431
+ log.info(
432
+ f"[Storage] Entity re-loading complete: {loaded_jobs} running jobs, {loaded_cases} running cases"
433
+ )
434
+
435
+
436
+ storage_manager = StorageManager()
@@ -0,0 +1,81 @@
1
+ # Copyright (c) 2024-2025 Alain Prasquier - Supervaize.com. All rights reserved.
2
+ #
3
+ # This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
4
+ # If a copy of the MPL was not distributed with this file, you can obtain one at
5
+ # https://mozilla.org/MPL/2.0/.
6
+
7
+
8
+ from enum import Enum
9
+ from typing import Any, ClassVar, Dict
10
+
11
+ from pydantic import BaseModel
12
+
13
+ from supervaizer.__version__ import VERSION
14
+
15
+ # TODO: Uuse OpenTelemetry / OpenInference standard - Consider connecting to Arize Phoenix observability backend for storage and visualization.
16
+
17
+
18
+ class TelemetryType(str, Enum):
19
+ LOGS = "logs"
20
+ METRICS = "metrics"
21
+ EVENTS = "events"
22
+ TRACES = "traces"
23
+ EXCEPTIONS = "exceptions"
24
+ DIAGNOSTICS = "diagnostics"
25
+ CUSTOM = "custom"
26
+
27
+
28
+ class TelemetryCategory(str, Enum):
29
+ SYSTEM = "system"
30
+ APPLICATION = "application"
31
+ USER_INTERACTION = "user_interaction"
32
+ SECURITY = "security"
33
+ BUSINESS = "business"
34
+ ENVIRONMENT = "environment"
35
+ NETWORKING = "networking"
36
+ CUSTOM = "custom"
37
+
38
+
39
+ class TelemetrySeverity(str, Enum):
40
+ DEBUG = "debug"
41
+ INFO = "info"
42
+ WARNING = "warning"
43
+ ERROR = "error"
44
+ CRITICAL = "critical"
45
+
46
+
47
+ class TelemetryModel(BaseModel):
48
+ supervaizer_VERSION: ClassVar[str] = VERSION
49
+ agentId: str
50
+ type: TelemetryType
51
+ category: TelemetryCategory
52
+ severity: TelemetrySeverity
53
+ details: Dict[str, Any]
54
+
55
+
56
+ class Telemetry(TelemetryModel):
57
+ """Base class for all telemetry data in the Supervaize Control system.
58
+
59
+ Telemetry represents monitoring and observability data sent from agents to the control system.
60
+ This includes logs, metrics, events, traces, exceptions, diagnostics and custom telemetry.
61
+
62
+ Inherits from TelemetryModel which defines the core telemetry attributes:
63
+ - agentId: The ID of the agent sending the telemetry
64
+ - type: The TelemetryType enum indicating the telemetry category (logs, metrics, etc)
65
+ - category: The TelemetryCategory enum for the functional area (system, application, etc)
66
+ - severity: The TelemetrySeverity enum indicating importance (debug, info, warning, etc)
67
+ - details: A dictionary containing telemetry-specific details
68
+ """
69
+
70
+ def __init__(self, **kwargs: Any) -> None:
71
+ super().__init__(**kwargs)
72
+
73
+ @property
74
+ def payload(self) -> Dict[str, Any]:
75
+ return {
76
+ "agentId": self.agentId,
77
+ "eventType": self.type.value,
78
+ "severity": self.severity.value,
79
+ "eventCategory": self.category.value,
80
+ "details": self.details,
81
+ }