dory-sdk 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. dory/__init__.py +70 -0
  2. dory/auto_instrument.py +142 -0
  3. dory/cli/__init__.py +5 -0
  4. dory/cli/main.py +290 -0
  5. dory/cli/templates.py +333 -0
  6. dory/config/__init__.py +23 -0
  7. dory/config/defaults.py +50 -0
  8. dory/config/loader.py +361 -0
  9. dory/config/presets.py +325 -0
  10. dory/config/schema.py +152 -0
  11. dory/core/__init__.py +27 -0
  12. dory/core/app.py +404 -0
  13. dory/core/context.py +209 -0
  14. dory/core/lifecycle.py +214 -0
  15. dory/core/meta.py +121 -0
  16. dory/core/modes.py +479 -0
  17. dory/core/processor.py +654 -0
  18. dory/core/signals.py +122 -0
  19. dory/decorators.py +142 -0
  20. dory/errors/__init__.py +117 -0
  21. dory/errors/classification.py +362 -0
  22. dory/errors/codes.py +495 -0
  23. dory/health/__init__.py +10 -0
  24. dory/health/probes.py +210 -0
  25. dory/health/server.py +306 -0
  26. dory/k8s/__init__.py +11 -0
  27. dory/k8s/annotation_watcher.py +184 -0
  28. dory/k8s/client.py +251 -0
  29. dory/k8s/pod_metadata.py +182 -0
  30. dory/logging/__init__.py +9 -0
  31. dory/logging/logger.py +175 -0
  32. dory/metrics/__init__.py +7 -0
  33. dory/metrics/collector.py +301 -0
  34. dory/middleware/__init__.py +36 -0
  35. dory/middleware/connection_tracker.py +608 -0
  36. dory/middleware/request_id.py +321 -0
  37. dory/middleware/request_tracker.py +501 -0
  38. dory/migration/__init__.py +11 -0
  39. dory/migration/configmap.py +260 -0
  40. dory/migration/serialization.py +167 -0
  41. dory/migration/state_manager.py +301 -0
  42. dory/monitoring/__init__.py +23 -0
  43. dory/monitoring/opentelemetry.py +462 -0
  44. dory/py.typed +2 -0
  45. dory/recovery/__init__.py +60 -0
  46. dory/recovery/golden_image.py +480 -0
  47. dory/recovery/golden_snapshot.py +561 -0
  48. dory/recovery/golden_validator.py +518 -0
  49. dory/recovery/partial_recovery.py +479 -0
  50. dory/recovery/recovery_decision.py +242 -0
  51. dory/recovery/restart_detector.py +142 -0
  52. dory/recovery/state_validator.py +187 -0
  53. dory/resilience/__init__.py +45 -0
  54. dory/resilience/circuit_breaker.py +454 -0
  55. dory/resilience/retry.py +389 -0
  56. dory/sidecar/__init__.py +6 -0
  57. dory/sidecar/main.py +75 -0
  58. dory/sidecar/server.py +329 -0
  59. dory/simple.py +342 -0
  60. dory/types.py +75 -0
  61. dory/utils/__init__.py +25 -0
  62. dory/utils/errors.py +59 -0
  63. dory/utils/retry.py +115 -0
  64. dory/utils/timeout.py +80 -0
  65. dory_sdk-2.1.0.dist-info/METADATA +663 -0
  66. dory_sdk-2.1.0.dist-info/RECORD +69 -0
  67. dory_sdk-2.1.0.dist-info/WHEEL +5 -0
  68. dory_sdk-2.1.0.dist-info/entry_points.txt +3 -0
  69. dory_sdk-2.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,260 @@
1
+ """
2
+ ConfigMap storage backend for state persistence.
3
+
4
+ Uses Kubernetes ConfigMaps to store processor state during migrations.
5
+ """
6
+
7
+ import logging
8
+ import time
9
+ from typing import Any
10
+
11
+ from dory.utils.errors import DoryK8sError, DoryStateError
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # Optional kubernetes import - gracefully handle if not available
16
+ try:
17
+ from kubernetes import client, config
18
+ from kubernetes.client.rest import ApiException
19
+ K8S_AVAILABLE = True
20
+ except ImportError:
21
+ K8S_AVAILABLE = False
22
+ client = None
23
+ config = None
24
+ ApiException = Exception
25
+
26
+
27
+ class ConfigMapStore:
28
+ """
29
+ Store and retrieve state from Kubernetes ConfigMaps.
30
+
31
+ ConfigMap naming convention: dory-state-{processor_id}
32
+ TTL: Auto-cleanup after 1 hour if not claimed.
33
+ """
34
+
35
+ STATE_CONFIGMAP_PREFIX = "dory-state-"
36
+ STATE_KEY = "state"
37
+ TTL_ANNOTATION = "dory.io/state-ttl"
38
+ CREATED_ANNOTATION = "dory.io/created-timestamp"
39
+ OWNER_LABEL = "dory.io/state-owner"
40
+ DEFAULT_TTL_SECONDS = 3600 # 1 hour
41
+
42
+ def __init__(self, namespace: str | None = None):
43
+ """
44
+ Initialize ConfigMap store.
45
+
46
+ Args:
47
+ namespace: Kubernetes namespace (defaults to current pod's namespace)
48
+ """
49
+ self._namespace = namespace
50
+ self._api: Any = None
51
+ self._initialized = False
52
+
53
+ def _ensure_initialized(self) -> None:
54
+ """Initialize Kubernetes client if not already done."""
55
+ if self._initialized:
56
+ return
57
+
58
+ if not K8S_AVAILABLE:
59
+ raise DoryK8sError(
60
+ "Kubernetes client not available. "
61
+ "Install with: pip install kubernetes"
62
+ )
63
+
64
+ try:
65
+ # Try in-cluster config first
66
+ config.load_incluster_config()
67
+ logger.debug("Using in-cluster Kubernetes config")
68
+ except config.ConfigException:
69
+ try:
70
+ # Fall back to kubeconfig
71
+ config.load_kube_config()
72
+ logger.debug("Using kubeconfig")
73
+ except config.ConfigException as e:
74
+ raise DoryK8sError(f"Failed to load Kubernetes config: {e}", cause=e)
75
+
76
+ self._api = client.CoreV1Api()
77
+
78
+ # Get namespace from pod environment if not specified
79
+ if not self._namespace:
80
+ import os
81
+ self._namespace = os.environ.get("POD_NAMESPACE", "default")
82
+
83
+ self._initialized = True
84
+
85
+ def _configmap_name(self, processor_id: str) -> str:
86
+ """Generate ConfigMap name for processor."""
87
+ return f"{self.STATE_CONFIGMAP_PREFIX}{processor_id}"
88
+
89
+ async def save(
90
+ self,
91
+ processor_id: str,
92
+ state_json: str,
93
+ ttl_seconds: int | None = None,
94
+ ) -> None:
95
+ """
96
+ Save state to ConfigMap.
97
+
98
+ Args:
99
+ processor_id: Processor ID
100
+ state_json: JSON-serialized state
101
+ ttl_seconds: TTL for auto-cleanup (default 1 hour)
102
+
103
+ Raises:
104
+ DoryK8sError: If ConfigMap operation fails
105
+ """
106
+ self._ensure_initialized()
107
+
108
+ cm_name = self._configmap_name(processor_id)
109
+ ttl = ttl_seconds or self.DEFAULT_TTL_SECONDS
110
+
111
+ configmap = client.V1ConfigMap(
112
+ metadata=client.V1ObjectMeta(
113
+ name=cm_name,
114
+ namespace=self._namespace,
115
+ labels={
116
+ self.OWNER_LABEL: "true",
117
+ },
118
+ annotations={
119
+ self.TTL_ANNOTATION: str(ttl),
120
+ self.CREATED_ANNOTATION: time.strftime(
121
+ "%Y-%m-%dT%H:%M:%SZ", time.gmtime()
122
+ ),
123
+ },
124
+ ),
125
+ data={
126
+ self.STATE_KEY: state_json,
127
+ },
128
+ )
129
+
130
+ try:
131
+ # Try to create first
132
+ self._api.create_namespaced_config_map(
133
+ namespace=self._namespace,
134
+ body=configmap,
135
+ )
136
+ logger.debug(f"Created state ConfigMap: {cm_name}")
137
+
138
+ except ApiException as e:
139
+ if e.status == 409:
140
+ # Already exists, update it
141
+ try:
142
+ self._api.replace_namespaced_config_map(
143
+ name=cm_name,
144
+ namespace=self._namespace,
145
+ body=configmap,
146
+ )
147
+ logger.debug(f"Updated state ConfigMap: {cm_name}")
148
+ except ApiException as e2:
149
+ raise DoryK8sError(
150
+ f"Failed to update ConfigMap {cm_name}: {e2}",
151
+ cause=e2,
152
+ )
153
+ else:
154
+ raise DoryK8sError(
155
+ f"Failed to create ConfigMap {cm_name}: {e}",
156
+ cause=e,
157
+ )
158
+
159
+ async def load(self, processor_id: str) -> str | None:
160
+ """
161
+ Load state from ConfigMap.
162
+
163
+ Args:
164
+ processor_id: Processor ID
165
+
166
+ Returns:
167
+ JSON-serialized state, or None if not found
168
+
169
+ Raises:
170
+ DoryK8sError: If ConfigMap operation fails
171
+ """
172
+ self._ensure_initialized()
173
+
174
+ cm_name = self._configmap_name(processor_id)
175
+
176
+ try:
177
+ configmap = self._api.read_namespaced_config_map(
178
+ name=cm_name,
179
+ namespace=self._namespace,
180
+ )
181
+
182
+ state_json = configmap.data.get(self.STATE_KEY)
183
+ if state_json:
184
+ logger.debug(f"Loaded state from ConfigMap: {cm_name}")
185
+ return state_json
186
+ else:
187
+ logger.warning(f"ConfigMap {cm_name} exists but has no state data")
188
+ return None
189
+
190
+ except ApiException as e:
191
+ if e.status == 404:
192
+ logger.debug(f"State ConfigMap not found: {cm_name}")
193
+ return None
194
+ raise DoryK8sError(
195
+ f"Failed to read ConfigMap {cm_name}: {e}",
196
+ cause=e,
197
+ )
198
+
199
+ async def delete(self, processor_id: str) -> bool:
200
+ """
201
+ Delete state ConfigMap.
202
+
203
+ Args:
204
+ processor_id: Processor ID
205
+
206
+ Returns:
207
+ True if deleted, False if not found
208
+
209
+ Raises:
210
+ DoryK8sError: If ConfigMap operation fails
211
+ """
212
+ self._ensure_initialized()
213
+
214
+ cm_name = self._configmap_name(processor_id)
215
+
216
+ try:
217
+ self._api.delete_namespaced_config_map(
218
+ name=cm_name,
219
+ namespace=self._namespace,
220
+ )
221
+ logger.debug(f"Deleted state ConfigMap: {cm_name}")
222
+ return True
223
+
224
+ except ApiException as e:
225
+ if e.status == 404:
226
+ logger.debug(f"State ConfigMap not found for deletion: {cm_name}")
227
+ return False
228
+ raise DoryK8sError(
229
+ f"Failed to delete ConfigMap {cm_name}: {e}",
230
+ cause=e,
231
+ )
232
+
233
+ async def exists(self, processor_id: str) -> bool:
234
+ """
235
+ Check if state ConfigMap exists.
236
+
237
+ Args:
238
+ processor_id: Processor ID
239
+
240
+ Returns:
241
+ True if ConfigMap exists
242
+ """
243
+ self._ensure_initialized()
244
+
245
+ cm_name = self._configmap_name(processor_id)
246
+
247
+ try:
248
+ self._api.read_namespaced_config_map(
249
+ name=cm_name,
250
+ namespace=self._namespace,
251
+ )
252
+ return True
253
+
254
+ except ApiException as e:
255
+ if e.status == 404:
256
+ return False
257
+ raise DoryK8sError(
258
+ f"Failed to check ConfigMap {cm_name}: {e}",
259
+ cause=e,
260
+ )
@@ -0,0 +1,167 @@
1
+ """
2
+ State serialization utilities.
3
+
4
+ Handles JSON serialization/deserialization with checksum validation.
5
+ """
6
+
7
+ import hashlib
8
+ import json
9
+ import time
10
+ from dataclasses import dataclass
11
+ from typing import Any
12
+
13
+ from dory.utils.errors import DoryStateError
14
+
15
+
16
+ @dataclass
17
+ class StateEnvelope:
18
+ """
19
+ Envelope wrapping state data with metadata.
20
+
21
+ Attributes:
22
+ payload: The actual state data
23
+ metadata: Metadata about when/where state was created
24
+ checksum: SHA256 checksum of payload for integrity
25
+ """
26
+
27
+ payload: dict[str, Any]
28
+ metadata: dict[str, Any]
29
+ checksum: str
30
+
31
+ def to_dict(self) -> dict[str, Any]:
32
+ """Convert to dictionary for serialization."""
33
+ return {
34
+ "payload": self.payload,
35
+ "metadata": self.metadata,
36
+ "checksum": self.checksum,
37
+ }
38
+
39
+ @classmethod
40
+ def from_dict(cls, data: dict[str, Any]) -> "StateEnvelope":
41
+ """Create from dictionary."""
42
+ return cls(
43
+ payload=data["payload"],
44
+ metadata=data["metadata"],
45
+ checksum=data["checksum"],
46
+ )
47
+
48
+
49
+ class StateSerializer:
50
+ """
51
+ Serializes and deserializes state with integrity checking.
52
+
53
+ Uses JSON format with SHA256 checksums for integrity validation.
54
+ """
55
+
56
+ @staticmethod
57
+ def compute_checksum(payload: dict[str, Any]) -> str:
58
+ """
59
+ Compute SHA256 checksum for payload.
60
+
61
+ Args:
62
+ payload: State payload
63
+
64
+ Returns:
65
+ Hex-encoded SHA256 checksum
66
+ """
67
+ payload_json = json.dumps(payload, sort_keys=True)
68
+ return hashlib.sha256(payload_json.encode()).hexdigest()
69
+
70
+ def serialize(
71
+ self,
72
+ state: dict[str, Any],
73
+ processor_id: str,
74
+ pod_name: str,
75
+ restart_count: int = 0,
76
+ ) -> str:
77
+ """
78
+ Serialize state to JSON string with envelope.
79
+
80
+ Args:
81
+ state: State dictionary to serialize
82
+ processor_id: Processor ID for metadata
83
+ pod_name: Pod name for metadata
84
+ restart_count: Current restart count
85
+
86
+ Returns:
87
+ JSON string with state envelope
88
+ """
89
+ envelope = StateEnvelope(
90
+ payload=state,
91
+ metadata={
92
+ "timestamp": time.time(),
93
+ "timestamp_iso": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
94
+ "processor_id": processor_id,
95
+ "pod_name": pod_name,
96
+ "restart_count": restart_count,
97
+ },
98
+ checksum=self.compute_checksum(state),
99
+ )
100
+
101
+ return json.dumps(envelope.to_dict(), indent=2)
102
+
103
+ def deserialize(self, data: str) -> dict[str, Any]:
104
+ """
105
+ Deserialize state from JSON string.
106
+
107
+ Args:
108
+ data: JSON string with state envelope
109
+
110
+ Returns:
111
+ State payload dictionary
112
+
113
+ Raises:
114
+ DoryStateError: If deserialization or validation fails
115
+ """
116
+ try:
117
+ envelope_dict = json.loads(data)
118
+ except json.JSONDecodeError as e:
119
+ raise DoryStateError(f"Invalid JSON in state data: {e}", cause=e)
120
+
121
+ try:
122
+ envelope = StateEnvelope.from_dict(envelope_dict)
123
+ except KeyError as e:
124
+ raise DoryStateError(f"Missing field in state envelope: {e}", cause=e)
125
+
126
+ # Validate checksum
127
+ expected_checksum = self.compute_checksum(envelope.payload)
128
+ if envelope.checksum != expected_checksum:
129
+ raise DoryStateError(
130
+ f"State checksum mismatch: expected {expected_checksum}, "
131
+ f"got {envelope.checksum}"
132
+ )
133
+
134
+ return envelope.payload
135
+
136
+ def deserialize_with_metadata(self, data: str) -> StateEnvelope:
137
+ """
138
+ Deserialize state with full envelope including metadata.
139
+
140
+ Args:
141
+ data: JSON string with state envelope
142
+
143
+ Returns:
144
+ StateEnvelope with payload and metadata
145
+
146
+ Raises:
147
+ DoryStateError: If deserialization or validation fails
148
+ """
149
+ try:
150
+ envelope_dict = json.loads(data)
151
+ except json.JSONDecodeError as e:
152
+ raise DoryStateError(f"Invalid JSON in state data: {e}", cause=e)
153
+
154
+ try:
155
+ envelope = StateEnvelope.from_dict(envelope_dict)
156
+ except KeyError as e:
157
+ raise DoryStateError(f"Missing field in state envelope: {e}", cause=e)
158
+
159
+ # Validate checksum
160
+ expected_checksum = self.compute_checksum(envelope.payload)
161
+ if envelope.checksum != expected_checksum:
162
+ raise DoryStateError(
163
+ f"State checksum mismatch: expected {expected_checksum}, "
164
+ f"got {envelope.checksum}"
165
+ )
166
+
167
+ return envelope