puffinflow 2.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- puffinflow/__init__.py +132 -0
- puffinflow/core/__init__.py +110 -0
- puffinflow/core/agent/__init__.py +320 -0
- puffinflow/core/agent/base.py +1635 -0
- puffinflow/core/agent/checkpoint.py +50 -0
- puffinflow/core/agent/context.py +521 -0
- puffinflow/core/agent/decorators/__init__.py +90 -0
- puffinflow/core/agent/decorators/builder.py +454 -0
- puffinflow/core/agent/decorators/flexible.py +714 -0
- puffinflow/core/agent/decorators/inspection.py +144 -0
- puffinflow/core/agent/dependencies.py +57 -0
- puffinflow/core/agent/scheduling/__init__.py +21 -0
- puffinflow/core/agent/scheduling/builder.py +160 -0
- puffinflow/core/agent/scheduling/exceptions.py +35 -0
- puffinflow/core/agent/scheduling/inputs.py +137 -0
- puffinflow/core/agent/scheduling/parser.py +209 -0
- puffinflow/core/agent/scheduling/scheduler.py +413 -0
- puffinflow/core/agent/state.py +141 -0
- puffinflow/core/config.py +62 -0
- puffinflow/core/coordination/__init__.py +137 -0
- puffinflow/core/coordination/agent_group.py +359 -0
- puffinflow/core/coordination/agent_pool.py +629 -0
- puffinflow/core/coordination/agent_team.py +577 -0
- puffinflow/core/coordination/coordinator.py +720 -0
- puffinflow/core/coordination/deadlock.py +1759 -0
- puffinflow/core/coordination/fluent_api.py +421 -0
- puffinflow/core/coordination/primitives.py +478 -0
- puffinflow/core/coordination/rate_limiter.py +520 -0
- puffinflow/core/observability/__init__.py +47 -0
- puffinflow/core/observability/agent.py +139 -0
- puffinflow/core/observability/alerting.py +73 -0
- puffinflow/core/observability/config.py +127 -0
- puffinflow/core/observability/context.py +88 -0
- puffinflow/core/observability/core.py +147 -0
- puffinflow/core/observability/decorators.py +105 -0
- puffinflow/core/observability/events.py +71 -0
- puffinflow/core/observability/interfaces.py +196 -0
- puffinflow/core/observability/metrics.py +137 -0
- puffinflow/core/observability/tracing.py +209 -0
- puffinflow/core/reliability/__init__.py +27 -0
- puffinflow/core/reliability/bulkhead.py +96 -0
- puffinflow/core/reliability/circuit_breaker.py +149 -0
- puffinflow/core/reliability/leak_detector.py +122 -0
- puffinflow/core/resources/__init__.py +77 -0
- puffinflow/core/resources/allocation.py +790 -0
- puffinflow/core/resources/pool.py +645 -0
- puffinflow/core/resources/quotas.py +567 -0
- puffinflow/core/resources/requirements.py +217 -0
- puffinflow/version.py +21 -0
- puffinflow-2.dev0.dist-info/METADATA +334 -0
- puffinflow-2.dev0.dist-info/RECORD +55 -0
- puffinflow-2.dev0.dist-info/WHEEL +5 -0
- puffinflow-2.dev0.dist-info/entry_points.txt +3 -0
- puffinflow-2.dev0.dist-info/licenses/LICENSE +21 -0
- puffinflow-2.dev0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""Checkpoint management for agents."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .base import Agent
|
|
9
|
+
from .state import (
|
|
10
|
+
AgentStatus,
|
|
11
|
+
PrioritizedState,
|
|
12
|
+
StateMetadata,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class AgentCheckpoint:
|
|
18
|
+
"""Checkpoint data for agent state."""
|
|
19
|
+
|
|
20
|
+
timestamp: float
|
|
21
|
+
agent_name: str
|
|
22
|
+
agent_status: "AgentStatus"
|
|
23
|
+
priority_queue: list["PrioritizedState"]
|
|
24
|
+
state_metadata: dict[str, "StateMetadata"]
|
|
25
|
+
running_states: set[str]
|
|
26
|
+
completed_states: set[str]
|
|
27
|
+
completed_once: set[str]
|
|
28
|
+
shared_state: dict[str, Any]
|
|
29
|
+
session_start: Optional[float]
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def create_from_agent(cls, agent: "Agent") -> "AgentCheckpoint":
|
|
33
|
+
"""Create checkpoint from agent instance."""
|
|
34
|
+
from copy import deepcopy
|
|
35
|
+
|
|
36
|
+
# Handle missing session_start gracefully
|
|
37
|
+
session_start = getattr(agent, "session_start", None)
|
|
38
|
+
|
|
39
|
+
return cls(
|
|
40
|
+
timestamp=time.time(),
|
|
41
|
+
agent_name=agent.name,
|
|
42
|
+
agent_status=agent.status,
|
|
43
|
+
priority_queue=deepcopy(agent.priority_queue),
|
|
44
|
+
state_metadata=deepcopy(agent.state_metadata),
|
|
45
|
+
running_states=set(agent.running_states),
|
|
46
|
+
completed_states=set(agent.completed_states),
|
|
47
|
+
completed_once=set(agent.completed_once),
|
|
48
|
+
shared_state=deepcopy(agent.shared_state),
|
|
49
|
+
session_start=session_start,
|
|
50
|
+
)
|
|
@@ -0,0 +1,521 @@
|
|
|
1
|
+
"""Context with rich content management."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import contextlib
|
|
5
|
+
import time
|
|
6
|
+
from typing import Any, Callable, Optional, TypeVar, Union
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from pydantic import BaseModel as PydanticBaseModel
|
|
10
|
+
|
|
11
|
+
_PYD_VER = 2
|
|
12
|
+
_PBM: Any = PydanticBaseModel
|
|
13
|
+
except ImportError:
|
|
14
|
+
try:
|
|
15
|
+
from pydantic.v1 import BaseModel as PydanticBaseModel # type: ignore
|
|
16
|
+
|
|
17
|
+
_PYD_VER = 1
|
|
18
|
+
_PBM = PydanticBaseModel
|
|
19
|
+
except ImportError as _e:
|
|
20
|
+
_PBM = type("BaseModel", (object,), {})
|
|
21
|
+
_PYD_VER = 0
|
|
22
|
+
_PYD_ERR = _e
|
|
23
|
+
|
|
24
|
+
from typing import Protocol, runtime_checkable
|
|
25
|
+
|
|
26
|
+
_PBM_T = TypeVar("_PBM_T", bound=_PBM)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@runtime_checkable
|
|
30
|
+
class TypedContextData(Protocol):
|
|
31
|
+
"""Protocol for typed context data."""
|
|
32
|
+
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class StateType:
|
|
37
|
+
"""State type enumeration for context data."""
|
|
38
|
+
|
|
39
|
+
ANY = "any"
|
|
40
|
+
TYPED = "typed"
|
|
41
|
+
UNTYPED = "untyped"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class Context:
|
|
45
|
+
"""Enhanced context for agent state management with rich content support."""
|
|
46
|
+
|
|
47
|
+
_META_TYPED = "_meta_typed_"
|
|
48
|
+
_META_VALIDATED = "_meta_validated_"
|
|
49
|
+
_META_METADATA = "_meta_metadata_"
|
|
50
|
+
_IMMUTABLE_PREFIXES = ("const_", "secret_")
|
|
51
|
+
|
|
52
|
+
def __init__(self, shared_state: dict[str, Any], cache_ttl: int = 300) -> None:
|
|
53
|
+
self.shared_state = shared_state if shared_state is not None else {}
|
|
54
|
+
self.cache_ttl = cache_ttl
|
|
55
|
+
self._typed_data: dict[str, Any] = {}
|
|
56
|
+
self._typed_var_types: dict[str, type] = {}
|
|
57
|
+
self._validated_types: dict[str, type] = {}
|
|
58
|
+
self._cache: dict[str, tuple] = {} # (value, expiry_time)
|
|
59
|
+
self._outputs: dict[str, Any] = {}
|
|
60
|
+
self._metadata: dict[str, Any] = {}
|
|
61
|
+
self._metrics: dict[str, Union[int, float]] = {}
|
|
62
|
+
self._restore_metadata()
|
|
63
|
+
|
|
64
|
+
def _restore_metadata(self) -> None:
|
|
65
|
+
"""Restore metadata from shared state."""
|
|
66
|
+
# Restore typed data metadata
|
|
67
|
+
typed_keys = [k for k in self.shared_state if k.startswith(self._META_TYPED)]
|
|
68
|
+
for k in typed_keys:
|
|
69
|
+
orig = k[len(self._META_TYPED) :]
|
|
70
|
+
self._typed_var_types[orig] = self.shared_state[k]
|
|
71
|
+
|
|
72
|
+
# Restore validated data metadata
|
|
73
|
+
validated_keys = [
|
|
74
|
+
k for k in self.shared_state if k.startswith(self._META_VALIDATED)
|
|
75
|
+
]
|
|
76
|
+
for k in validated_keys:
|
|
77
|
+
orig = k[len(self._META_VALIDATED) :]
|
|
78
|
+
self._validated_types[orig] = self.shared_state[k]
|
|
79
|
+
|
|
80
|
+
@staticmethod
|
|
81
|
+
def _now() -> float:
|
|
82
|
+
"""Get current timestamp."""
|
|
83
|
+
return time.time()
|
|
84
|
+
|
|
85
|
+
def _ensure_pydantic(self) -> None:
|
|
86
|
+
"""Ensure Pydantic is available."""
|
|
87
|
+
if _PYD_VER == 0:
|
|
88
|
+
raise ImportError(f"Pydantic is required for typed operations: {_PYD_ERR}")
|
|
89
|
+
|
|
90
|
+
def _guard_reserved(self, key: str) -> None:
|
|
91
|
+
"""Guard against reserved key prefixes."""
|
|
92
|
+
if any(key.startswith(prefix) for prefix in self._IMMUTABLE_PREFIXES):
|
|
93
|
+
raise ValueError(f"Cannot modify reserved key: {key}")
|
|
94
|
+
|
|
95
|
+
def _persist_meta(self, prefix: str, key: str, cls: type) -> None:
|
|
96
|
+
"""Persist metadata to shared state."""
|
|
97
|
+
meta_key = f"{prefix}{key}"
|
|
98
|
+
self.shared_state[meta_key] = cls
|
|
99
|
+
|
|
100
|
+
# Basic state management
|
|
101
|
+
def set_state(self, key: str, value: Any) -> None:
|
|
102
|
+
"""Set a state value."""
|
|
103
|
+
self._guard_reserved(key)
|
|
104
|
+
self.shared_state[key] = value
|
|
105
|
+
|
|
106
|
+
def get_state(self, key: str, default: Any = None) -> Any:
|
|
107
|
+
"""Get a state value."""
|
|
108
|
+
return self.shared_state.get(key, default)
|
|
109
|
+
|
|
110
|
+
# Typed data management
|
|
111
|
+
def set_typed(self, key: str, value: _PBM) -> None:
|
|
112
|
+
"""Set typed data with Pydantic model validation."""
|
|
113
|
+
self._ensure_pydantic()
|
|
114
|
+
if not isinstance(value, _PBM):
|
|
115
|
+
raise TypeError(f"Value must be a Pydantic model, got {type(value)}")
|
|
116
|
+
|
|
117
|
+
self._typed_data[key] = value
|
|
118
|
+
self._typed_var_types[key] = type(value)
|
|
119
|
+
self._persist_meta(self._META_TYPED, key, type(value))
|
|
120
|
+
|
|
121
|
+
def get_typed(self, key: str, expected: type[_PBM_T]) -> Optional[_PBM_T]:
|
|
122
|
+
"""Get typed data with type checking."""
|
|
123
|
+
self._ensure_pydantic()
|
|
124
|
+
val = self._typed_data.get(key)
|
|
125
|
+
if val is None:
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
if not isinstance(val, expected):
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
return val
|
|
132
|
+
|
|
133
|
+
def update_typed(self, key: str, **updates: Any) -> None:
|
|
134
|
+
"""Update fields in typed data."""
|
|
135
|
+
self._ensure_pydantic()
|
|
136
|
+
current = self._typed_data.get(key)
|
|
137
|
+
if current and isinstance(current, _PBM):
|
|
138
|
+
# Create new instance with updated fields
|
|
139
|
+
updated_data = current.dict()
|
|
140
|
+
updated_data.update(updates)
|
|
141
|
+
new_instance = type(current)(**updated_data)
|
|
142
|
+
self._typed_data[key] = new_instance
|
|
143
|
+
|
|
144
|
+
# Variable management (free variables)
|
|
145
|
+
def set_variable(self, key: str, value: Any) -> None:
|
|
146
|
+
"""Set a variable in shared state."""
|
|
147
|
+
if any(key.startswith(prefix) for prefix in self._IMMUTABLE_PREFIXES):
|
|
148
|
+
raise ValueError(f"Cannot set variable with reserved prefix: {key}")
|
|
149
|
+
self.shared_state[key] = value
|
|
150
|
+
|
|
151
|
+
def get_variable(self, key: str, default: Any = None) -> Any:
|
|
152
|
+
"""Get a variable from shared state."""
|
|
153
|
+
return self.shared_state.get(key, default)
|
|
154
|
+
|
|
155
|
+
def get_variable_keys(self) -> set[str]:
|
|
156
|
+
"""Get all variable keys, excluding reserved prefixes."""
|
|
157
|
+
return {
|
|
158
|
+
k
|
|
159
|
+
for k in self.shared_state
|
|
160
|
+
if not any(k.startswith(prefix) for prefix in self._IMMUTABLE_PREFIXES)
|
|
161
|
+
and not k.startswith(self._META_TYPED)
|
|
162
|
+
and not k.startswith(self._META_VALIDATED)
|
|
163
|
+
and not k.startswith(self._META_METADATA)
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
# Typed variables (with type consistency checking)
|
|
167
|
+
def set_typed_variable(self, key: str, value: Any) -> None:
|
|
168
|
+
"""Set a typed variable with type consistency checking."""
|
|
169
|
+
if any(key.startswith(prefix) for prefix in self._IMMUTABLE_PREFIXES):
|
|
170
|
+
raise ValueError(f"Cannot set typed variable with reserved prefix: {key}")
|
|
171
|
+
|
|
172
|
+
current_cls = self._typed_var_types.get(key)
|
|
173
|
+
if current_cls and not isinstance(value, current_cls):
|
|
174
|
+
raise TypeError(
|
|
175
|
+
f"Type mismatch for {key}: expected {current_cls}, got {type(value)}"
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
self.shared_state[key] = value
|
|
179
|
+
if key not in self._typed_var_types:
|
|
180
|
+
cls = type(value)
|
|
181
|
+
self._typed_var_types[key] = cls
|
|
182
|
+
self._persist_meta(self._META_TYPED, key, cls)
|
|
183
|
+
|
|
184
|
+
def get_typed_variable(self, key: str, expected: type[Any]) -> Optional[Any]:
|
|
185
|
+
"""Get a typed variable with type checking."""
|
|
186
|
+
val = self.shared_state.get(key)
|
|
187
|
+
if val is None:
|
|
188
|
+
return None
|
|
189
|
+
|
|
190
|
+
if not isinstance(val, expected):
|
|
191
|
+
return None
|
|
192
|
+
|
|
193
|
+
return val
|
|
194
|
+
|
|
195
|
+
# Validated data (Pydantic models stored in shared state)
|
|
196
|
+
def set_validated_data(self, key: str, value: _PBM) -> None:
|
|
197
|
+
"""Set validated Pydantic data in shared state."""
|
|
198
|
+
self._ensure_pydantic()
|
|
199
|
+
self._guard_reserved(key)
|
|
200
|
+
if not isinstance(value, _PBM):
|
|
201
|
+
raise TypeError(f"Value must be a Pydantic model, got {type(value)}")
|
|
202
|
+
|
|
203
|
+
current_cls = self._validated_types.get(key)
|
|
204
|
+
if current_cls and not isinstance(value, current_cls):
|
|
205
|
+
raise TypeError(
|
|
206
|
+
f"Type mismatch for {key}: expected {current_cls}, got {type(value)}"
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
self.shared_state[key] = value
|
|
210
|
+
self._validated_types[key] = type(value)
|
|
211
|
+
self._persist_meta(self._META_VALIDATED, key, type(value))
|
|
212
|
+
|
|
213
|
+
def get_validated_data(self, key: str, expected: type[_PBM_T]) -> Optional[_PBM_T]:
|
|
214
|
+
"""Get validated data with type checking."""
|
|
215
|
+
self._ensure_pydantic()
|
|
216
|
+
val = self.shared_state.get(key)
|
|
217
|
+
if val is None:
|
|
218
|
+
return None
|
|
219
|
+
|
|
220
|
+
if not isinstance(val, expected):
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
return val
|
|
224
|
+
|
|
225
|
+
# Immutable data (constants and secrets)
|
|
226
|
+
def _set_immutable(self, prefix: str, key: str, value: Any) -> None:
|
|
227
|
+
"""Set immutable data with prefix."""
|
|
228
|
+
full = f"{prefix}{key}"
|
|
229
|
+
if full in self.shared_state:
|
|
230
|
+
raise ValueError(f"Immutable key {key} already exists")
|
|
231
|
+
self.shared_state[full] = value
|
|
232
|
+
|
|
233
|
+
def set_constant(self, key: str, value: Any) -> None:
|
|
234
|
+
"""Set a constant value (immutable)."""
|
|
235
|
+
self._set_immutable("const_", key, value)
|
|
236
|
+
|
|
237
|
+
def get_constant(self, key: str, default: Any = None) -> Any:
|
|
238
|
+
"""Get a constant value."""
|
|
239
|
+
return self.shared_state.get(f"const_{key}", default)
|
|
240
|
+
|
|
241
|
+
def set_secret(self, key: str, value: str) -> None:
|
|
242
|
+
"""Set a secret value (immutable, string only)."""
|
|
243
|
+
if not isinstance(value, str):
|
|
244
|
+
raise TypeError("Secrets must be strings")
|
|
245
|
+
self._set_immutable("secret_", key, value)
|
|
246
|
+
|
|
247
|
+
def get_secret(self, key: str) -> Optional[str]:
|
|
248
|
+
"""Get a secret value."""
|
|
249
|
+
return self.shared_state.get(f"secret_{key}")
|
|
250
|
+
|
|
251
|
+
# Output management
|
|
252
|
+
def set_output(self, key: str, value: Any) -> None:
|
|
253
|
+
"""Set an output value."""
|
|
254
|
+
self._outputs[key] = value
|
|
255
|
+
|
|
256
|
+
def get_output(self, key: str, default: Any = None) -> Any:
|
|
257
|
+
"""Get an output value."""
|
|
258
|
+
return self._outputs.get(key, default)
|
|
259
|
+
|
|
260
|
+
def get_output_keys(self) -> set[str]:
|
|
261
|
+
"""Get all output keys."""
|
|
262
|
+
return set(self._outputs.keys())
|
|
263
|
+
|
|
264
|
+
def get_all_outputs(self) -> dict[str, Any]:
|
|
265
|
+
"""Get all outputs."""
|
|
266
|
+
return self._outputs.copy()
|
|
267
|
+
|
|
268
|
+
# Metadata management
|
|
269
|
+
def set_metadata(self, key: str, value: Any) -> None:
|
|
270
|
+
"""Set metadata value."""
|
|
271
|
+
self._metadata[key] = value
|
|
272
|
+
# Also persist to shared state for cross-agent access
|
|
273
|
+
self.shared_state[f"{self._META_METADATA}{key}"] = value
|
|
274
|
+
|
|
275
|
+
def get_metadata(self, key: str, default: Any = None) -> Any:
|
|
276
|
+
"""Get metadata value."""
|
|
277
|
+
# Try local metadata first, then shared state
|
|
278
|
+
value = self._metadata.get(key)
|
|
279
|
+
if value is not None:
|
|
280
|
+
return value
|
|
281
|
+
return self.shared_state.get(f"{self._META_METADATA}{key}", default)
|
|
282
|
+
|
|
283
|
+
def get_all_metadata(self) -> dict[str, Any]:
|
|
284
|
+
"""Get all metadata."""
|
|
285
|
+
result = self._metadata.copy()
|
|
286
|
+
# Add shared metadata
|
|
287
|
+
for key, value in self.shared_state.items():
|
|
288
|
+
if key.startswith(self._META_METADATA):
|
|
289
|
+
orig_key = key[len(self._META_METADATA) :]
|
|
290
|
+
if orig_key not in result:
|
|
291
|
+
result[orig_key] = value
|
|
292
|
+
return result
|
|
293
|
+
|
|
294
|
+
# Metrics management
|
|
295
|
+
def set_metric(self, key: str, value: Union[int, float]) -> None:
|
|
296
|
+
"""Set a metric value."""
|
|
297
|
+
if not isinstance(value, (int, float)):
|
|
298
|
+
raise TypeError("Metrics must be numeric")
|
|
299
|
+
self._metrics[key] = value
|
|
300
|
+
|
|
301
|
+
def get_metric(self, key: str, default: Union[int, float] = 0) -> Union[int, float]:
|
|
302
|
+
"""Get a metric value."""
|
|
303
|
+
return self._metrics.get(key, default)
|
|
304
|
+
|
|
305
|
+
def increment_metric(self, key: str, amount: Union[int, float] = 1) -> None:
|
|
306
|
+
"""Increment a metric."""
|
|
307
|
+
current = self._metrics.get(key, 0)
|
|
308
|
+
self._metrics[key] = current + amount
|
|
309
|
+
|
|
310
|
+
def get_all_metrics(self) -> dict[str, Union[int, float]]:
|
|
311
|
+
"""Get all metrics."""
|
|
312
|
+
return self._metrics.copy()
|
|
313
|
+
|
|
314
|
+
# Cache management with TTL
|
|
315
|
+
def set_cached(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
|
|
316
|
+
"""Set a cached value with TTL."""
|
|
317
|
+
if ttl is None:
|
|
318
|
+
ttl = self.cache_ttl
|
|
319
|
+
|
|
320
|
+
expiry_time = self._now() + ttl if ttl > 0 else self._now() - 1
|
|
321
|
+
self._cache[key] = (value, expiry_time)
|
|
322
|
+
|
|
323
|
+
def get_cached(self, key: str, default: Any = None) -> Any:
|
|
324
|
+
"""Get a cached value, respecting TTL."""
|
|
325
|
+
if key not in self._cache:
|
|
326
|
+
return default
|
|
327
|
+
|
|
328
|
+
value, expiry_time = self._cache[key]
|
|
329
|
+
if self._now() > expiry_time:
|
|
330
|
+
del self._cache[key]
|
|
331
|
+
return default
|
|
332
|
+
|
|
333
|
+
return value
|
|
334
|
+
|
|
335
|
+
def clear_expired_cache(self) -> int:
|
|
336
|
+
"""Clear expired cache entries and return count cleared."""
|
|
337
|
+
now = self._now()
|
|
338
|
+
expired_keys = [
|
|
339
|
+
key for key, (_, expiry_time) in self._cache.items() if now > expiry_time
|
|
340
|
+
]
|
|
341
|
+
|
|
342
|
+
for key in expired_keys:
|
|
343
|
+
del self._cache[key]
|
|
344
|
+
|
|
345
|
+
return len(expired_keys)
|
|
346
|
+
|
|
347
|
+
# State management and cleanup
|
|
348
|
+
def remove_state(self, key: str, state_type: str = StateType.ANY) -> bool:
|
|
349
|
+
"""Remove state data by type."""
|
|
350
|
+
removed = False
|
|
351
|
+
|
|
352
|
+
if (
|
|
353
|
+
state_type in (StateType.ANY, StateType.UNTYPED)
|
|
354
|
+
and key in self.shared_state
|
|
355
|
+
):
|
|
356
|
+
del self.shared_state[key]
|
|
357
|
+
removed = True
|
|
358
|
+
|
|
359
|
+
if state_type in (StateType.ANY, StateType.TYPED):
|
|
360
|
+
if key in self._typed_data:
|
|
361
|
+
del self._typed_data[key]
|
|
362
|
+
removed = True
|
|
363
|
+
if key in self._typed_var_types:
|
|
364
|
+
del self._typed_var_types[key]
|
|
365
|
+
removed = True
|
|
366
|
+
|
|
367
|
+
return removed
|
|
368
|
+
|
|
369
|
+
def clear_state(self, state_type: str = StateType.ANY) -> None:
|
|
370
|
+
"""Clear state data by type."""
|
|
371
|
+
if state_type in (StateType.ANY, StateType.UNTYPED):
|
|
372
|
+
# Clear non-reserved keys from shared state
|
|
373
|
+
keys_to_remove = [
|
|
374
|
+
k
|
|
375
|
+
for k in self.shared_state
|
|
376
|
+
if not any(k.startswith(prefix) for prefix in self._IMMUTABLE_PREFIXES)
|
|
377
|
+
and not k.startswith(self._META_TYPED)
|
|
378
|
+
and not k.startswith(self._META_VALIDATED)
|
|
379
|
+
]
|
|
380
|
+
for key in keys_to_remove:
|
|
381
|
+
del self.shared_state[key]
|
|
382
|
+
|
|
383
|
+
if state_type in (StateType.ANY, StateType.TYPED):
|
|
384
|
+
self._typed_data.clear()
|
|
385
|
+
self._typed_var_types.clear()
|
|
386
|
+
|
|
387
|
+
def get_keys(self, state_type: str = StateType.ANY) -> set[str]:
|
|
388
|
+
"""Get keys by state type."""
|
|
389
|
+
keys = set()
|
|
390
|
+
|
|
391
|
+
if state_type in (StateType.ANY, StateType.UNTYPED):
|
|
392
|
+
keys.update(self.get_variable_keys())
|
|
393
|
+
|
|
394
|
+
if state_type in (StateType.ANY, StateType.TYPED):
|
|
395
|
+
keys.update(self._typed_data.keys())
|
|
396
|
+
|
|
397
|
+
return keys
|
|
398
|
+
|
|
399
|
+
# Human-in-the-loop functionality
|
|
400
|
+
async def human_in_the_loop(
|
|
401
|
+
self,
|
|
402
|
+
prompt: str,
|
|
403
|
+
timeout: Optional[float] = None,
|
|
404
|
+
default: Optional[str] = None,
|
|
405
|
+
validator: Optional[Callable[[str], bool]] = None,
|
|
406
|
+
) -> Optional[str]:
|
|
407
|
+
"""Get human input with optional timeout and validation."""
|
|
408
|
+
max_attempts = 3
|
|
409
|
+
attempt = 0
|
|
410
|
+
|
|
411
|
+
while attempt < max_attempts:
|
|
412
|
+
try:
|
|
413
|
+
if timeout:
|
|
414
|
+
# Async input with timeout
|
|
415
|
+
reply = await asyncio.wait_for(
|
|
416
|
+
asyncio.get_event_loop().run_in_executor(None, input, prompt),
|
|
417
|
+
timeout=timeout,
|
|
418
|
+
)
|
|
419
|
+
else:
|
|
420
|
+
# Synchronous input
|
|
421
|
+
reply = input(prompt)
|
|
422
|
+
|
|
423
|
+
# Validate if validator provided
|
|
424
|
+
if validator:
|
|
425
|
+
if validator(reply):
|
|
426
|
+
return reply
|
|
427
|
+
else:
|
|
428
|
+
print("Invalid input, please try again.")
|
|
429
|
+
attempt += 1
|
|
430
|
+
continue
|
|
431
|
+
|
|
432
|
+
return reply
|
|
433
|
+
|
|
434
|
+
except asyncio.TimeoutError:
|
|
435
|
+
return default
|
|
436
|
+
except Exception:
|
|
437
|
+
attempt += 1
|
|
438
|
+
if attempt >= max_attempts:
|
|
439
|
+
return default
|
|
440
|
+
|
|
441
|
+
return default
|
|
442
|
+
|
|
443
|
+
# Content inspection
|
|
444
|
+
def get_content_summary(self) -> dict[str, Any]:
|
|
445
|
+
"""Get summary of all content in context."""
|
|
446
|
+
return {
|
|
447
|
+
"variables": len(self.get_variable_keys()),
|
|
448
|
+
"outputs": len(self._outputs),
|
|
449
|
+
"metadata": len(self._metadata),
|
|
450
|
+
"metrics": len(self._metrics),
|
|
451
|
+
"typed_data": len(self._typed_data),
|
|
452
|
+
"cached_items": len(self._cache),
|
|
453
|
+
"constants": len([k for k in self.shared_state if k.startswith("const_")]),
|
|
454
|
+
"secrets": len([k for k in self.shared_state if k.startswith("secret_")]),
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
def export_content(self, include_secrets: bool = False) -> dict[str, Any]:
|
|
458
|
+
"""Export all context content."""
|
|
459
|
+
content = {
|
|
460
|
+
"variables": {k: self.shared_state[k] for k in self.get_variable_keys()},
|
|
461
|
+
"outputs": self._outputs.copy(),
|
|
462
|
+
"metadata": self.get_all_metadata(),
|
|
463
|
+
"metrics": self._metrics.copy(),
|
|
464
|
+
"typed_data": self._typed_data.copy(),
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
# Add constants
|
|
468
|
+
constants = {
|
|
469
|
+
k[6:]: v for k, v in self.shared_state.items() if k.startswith("const_")
|
|
470
|
+
}
|
|
471
|
+
if constants:
|
|
472
|
+
content["constants"] = constants
|
|
473
|
+
|
|
474
|
+
# Add secrets if requested
|
|
475
|
+
if include_secrets:
|
|
476
|
+
secrets = {
|
|
477
|
+
k[7:]: v
|
|
478
|
+
for k, v in self.shared_state.items()
|
|
479
|
+
if k.startswith("secret_")
|
|
480
|
+
}
|
|
481
|
+
if secrets:
|
|
482
|
+
content["secrets"] = secrets
|
|
483
|
+
|
|
484
|
+
return content
|
|
485
|
+
|
|
486
|
+
def import_content(self, content: dict[str, Any]) -> None:
|
|
487
|
+
"""Import content into context."""
|
|
488
|
+
# Import variables
|
|
489
|
+
if "variables" in content:
|
|
490
|
+
for key, value in content["variables"].items():
|
|
491
|
+
self.set_variable(key, value)
|
|
492
|
+
|
|
493
|
+
# Import outputs
|
|
494
|
+
if "outputs" in content:
|
|
495
|
+
self._outputs.update(content["outputs"])
|
|
496
|
+
|
|
497
|
+
# Import metadata
|
|
498
|
+
if "metadata" in content:
|
|
499
|
+
for key, value in content["metadata"].items():
|
|
500
|
+
self.set_metadata(key, value)
|
|
501
|
+
|
|
502
|
+
# Import metrics
|
|
503
|
+
if "metrics" in content:
|
|
504
|
+
for key, value in content["metrics"].items():
|
|
505
|
+
self.set_metric(key, value)
|
|
506
|
+
|
|
507
|
+
# Import typed data
|
|
508
|
+
if "typed_data" in content:
|
|
509
|
+
self._typed_data.update(content["typed_data"])
|
|
510
|
+
|
|
511
|
+
# Import constants
|
|
512
|
+
if "constants" in content:
|
|
513
|
+
for key, value in content["constants"].items():
|
|
514
|
+
with contextlib.suppress(ValueError):
|
|
515
|
+
self.set_constant(key, value)
|
|
516
|
+
|
|
517
|
+
# Import secrets
|
|
518
|
+
if "secrets" in content:
|
|
519
|
+
for key, value in content["secrets"].items():
|
|
520
|
+
with contextlib.suppress(ValueError):
|
|
521
|
+
self.set_secret(key, value)
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""Enhanced state decorators with flexible configuration."""
|
|
2
|
+
|
|
3
|
+
# Import flexible decorator as the main state decorator
|
|
4
|
+
# Import builder pattern
|
|
5
|
+
from .builder import (
|
|
6
|
+
StateBuilder,
|
|
7
|
+
build_state,
|
|
8
|
+
cpu_state,
|
|
9
|
+
exclusive_state,
|
|
10
|
+
gpu_state,
|
|
11
|
+
memory_state,
|
|
12
|
+
)
|
|
13
|
+
from .builder import concurrent_state as builder_concurrent_state
|
|
14
|
+
from .builder import critical_state as builder_critical_state
|
|
15
|
+
from .builder import high_priority_state as builder_high_priority_state
|
|
16
|
+
from .flexible import (
|
|
17
|
+
PROFILES,
|
|
18
|
+
FlexibleStateDecorator,
|
|
19
|
+
StateProfile,
|
|
20
|
+
batch_state,
|
|
21
|
+
concurrent_state,
|
|
22
|
+
cpu_intensive,
|
|
23
|
+
create_custom_decorator,
|
|
24
|
+
critical_state,
|
|
25
|
+
get_profile,
|
|
26
|
+
gpu_accelerated,
|
|
27
|
+
io_intensive,
|
|
28
|
+
list_profiles,
|
|
29
|
+
memory_intensive,
|
|
30
|
+
minimal_state,
|
|
31
|
+
network_intensive,
|
|
32
|
+
quick_state,
|
|
33
|
+
state,
|
|
34
|
+
synchronized_state,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
# Import inspection utilities
|
|
38
|
+
from .inspection import (
|
|
39
|
+
compare_states,
|
|
40
|
+
get_state_config,
|
|
41
|
+
get_state_coordination,
|
|
42
|
+
get_state_rate_limit,
|
|
43
|
+
get_state_requirements,
|
|
44
|
+
get_state_summary,
|
|
45
|
+
is_puffinflow_state,
|
|
46
|
+
list_state_metadata,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
__all__ = [
|
|
50
|
+
"PROFILES",
|
|
51
|
+
"FlexibleStateDecorator",
|
|
52
|
+
# Builder pattern
|
|
53
|
+
"StateBuilder",
|
|
54
|
+
"StateProfile",
|
|
55
|
+
"batch_state",
|
|
56
|
+
"build_state",
|
|
57
|
+
"builder_concurrent_state",
|
|
58
|
+
"builder_critical_state",
|
|
59
|
+
"builder_high_priority_state",
|
|
60
|
+
"compare_states",
|
|
61
|
+
"concurrent_state",
|
|
62
|
+
"cpu_intensive",
|
|
63
|
+
"cpu_state",
|
|
64
|
+
"create_custom_decorator",
|
|
65
|
+
"critical_state",
|
|
66
|
+
"exclusive_state",
|
|
67
|
+
# Profile management
|
|
68
|
+
"get_profile",
|
|
69
|
+
"get_state_config",
|
|
70
|
+
"get_state_coordination",
|
|
71
|
+
"get_state_rate_limit",
|
|
72
|
+
"get_state_requirements",
|
|
73
|
+
"get_state_summary",
|
|
74
|
+
"gpu_accelerated",
|
|
75
|
+
"gpu_state",
|
|
76
|
+
"io_intensive",
|
|
77
|
+
# Inspection utilities
|
|
78
|
+
"is_puffinflow_state",
|
|
79
|
+
"list_profiles",
|
|
80
|
+
"list_state_metadata",
|
|
81
|
+
"memory_intensive",
|
|
82
|
+
"memory_state",
|
|
83
|
+
# Profile-based decorators
|
|
84
|
+
"minimal_state",
|
|
85
|
+
"network_intensive",
|
|
86
|
+
"quick_state",
|
|
87
|
+
# Main decorator
|
|
88
|
+
"state",
|
|
89
|
+
"synchronized_state",
|
|
90
|
+
]
|