penguiflow 1.0.3__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of penguiflow might be problematic. Click here for more details.
- penguiflow/__init__.py +45 -3
- penguiflow/admin.py +174 -0
- penguiflow/bus.py +30 -0
- penguiflow/core.py +941 -57
- penguiflow/errors.py +113 -0
- penguiflow/metrics.py +105 -0
- penguiflow/middlewares.py +6 -7
- penguiflow/patterns.py +47 -5
- penguiflow/policies.py +149 -0
- penguiflow/remote.py +486 -0
- penguiflow/state.py +64 -0
- penguiflow/streaming.py +142 -0
- penguiflow/testkit.py +269 -0
- penguiflow/types.py +15 -1
- penguiflow/viz.py +133 -24
- penguiflow-2.1.0.dist-info/METADATA +646 -0
- penguiflow-2.1.0.dist-info/RECORD +25 -0
- penguiflow-2.1.0.dist-info/entry_points.txt +2 -0
- penguiflow-2.1.0.dist-info/top_level.txt +2 -0
- penguiflow_a2a/__init__.py +19 -0
- penguiflow_a2a/server.py +695 -0
- penguiflow-1.0.3.dist-info/METADATA +0 -425
- penguiflow-1.0.3.dist-info/RECORD +0 -13
- penguiflow-1.0.3.dist-info/top_level.txt +0 -1
- {penguiflow-1.0.3.dist-info → penguiflow-2.1.0.dist-info}/WHEEL +0 -0
- {penguiflow-1.0.3.dist-info → penguiflow-2.1.0.dist-info}/licenses/LICENSE +0 -0
penguiflow/errors.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""Traceable exception surface for PenguiFlow."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class FlowErrorCode(str, Enum):
|
|
11
|
+
"""Stable error codes surfaced by the runtime."""
|
|
12
|
+
|
|
13
|
+
NODE_TIMEOUT = "NODE_TIMEOUT"
|
|
14
|
+
NODE_EXCEPTION = "NODE_EXCEPTION"
|
|
15
|
+
TRACE_CANCELLED = "TRACE_CANCELLED"
|
|
16
|
+
DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED"
|
|
17
|
+
HOP_BUDGET_EXHAUSTED = "HOP_BUDGET_EXHAUSTED"
|
|
18
|
+
TOKEN_BUDGET_EXHAUSTED = "TOKEN_BUDGET_EXHAUSTED"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class FlowError(Exception):
|
|
22
|
+
"""Wraps runtime failures with trace metadata for downstream handling."""
|
|
23
|
+
|
|
24
|
+
__slots__ = (
|
|
25
|
+
"trace_id",
|
|
26
|
+
"node_name",
|
|
27
|
+
"node_id",
|
|
28
|
+
"code",
|
|
29
|
+
"message",
|
|
30
|
+
"original_exc",
|
|
31
|
+
"metadata",
|
|
32
|
+
"exception_type",
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
*,
|
|
38
|
+
trace_id: str | None,
|
|
39
|
+
node_name: str | None,
|
|
40
|
+
code: FlowErrorCode | str,
|
|
41
|
+
message: str,
|
|
42
|
+
original_exc: BaseException | None = None,
|
|
43
|
+
node_id: str | None = None,
|
|
44
|
+
metadata: Mapping[str, Any] | None = None,
|
|
45
|
+
) -> None:
|
|
46
|
+
super().__init__(message)
|
|
47
|
+
self.trace_id = trace_id
|
|
48
|
+
self.node_name = node_name
|
|
49
|
+
self.node_id = node_id
|
|
50
|
+
self.code = code.value if isinstance(code, FlowErrorCode) else str(code)
|
|
51
|
+
self.message = message
|
|
52
|
+
self.original_exc = original_exc
|
|
53
|
+
self.metadata = dict(metadata or {})
|
|
54
|
+
self.exception_type = (
|
|
55
|
+
type(original_exc).__name__ if original_exc is not None else None
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def __str__(self) -> str: # pragma: no cover - debug helper
|
|
59
|
+
trace = f" trace={self.trace_id}" if self.trace_id else ""
|
|
60
|
+
node = f" node={self.node_name}" if self.node_name else ""
|
|
61
|
+
return f"[{self.code}] {self.message}{trace}{node}".strip()
|
|
62
|
+
|
|
63
|
+
def unwrap(self) -> BaseException | None:
|
|
64
|
+
"""Return the wrapped exception, if any."""
|
|
65
|
+
|
|
66
|
+
return self.original_exc
|
|
67
|
+
|
|
68
|
+
def to_payload(self) -> dict[str, Any]:
|
|
69
|
+
"""Return a JSON-serialisable representation of the error."""
|
|
70
|
+
|
|
71
|
+
payload: dict[str, Any] = {
|
|
72
|
+
"code": self.code,
|
|
73
|
+
"message": self.message,
|
|
74
|
+
}
|
|
75
|
+
if self.trace_id is not None:
|
|
76
|
+
payload["trace_id"] = self.trace_id
|
|
77
|
+
if self.node_name is not None:
|
|
78
|
+
payload["node_name"] = self.node_name
|
|
79
|
+
if self.node_id is not None:
|
|
80
|
+
payload["node_id"] = self.node_id
|
|
81
|
+
if self.exception_type is not None:
|
|
82
|
+
payload["exception_type"] = self.exception_type
|
|
83
|
+
if self.metadata:
|
|
84
|
+
payload["metadata"] = dict(self.metadata)
|
|
85
|
+
return payload
|
|
86
|
+
|
|
87
|
+
@classmethod
|
|
88
|
+
def from_exception(
|
|
89
|
+
cls,
|
|
90
|
+
*,
|
|
91
|
+
trace_id: str | None,
|
|
92
|
+
node_name: str | None,
|
|
93
|
+
node_id: str | None,
|
|
94
|
+
exc: BaseException,
|
|
95
|
+
code: FlowErrorCode,
|
|
96
|
+
message: str | None = None,
|
|
97
|
+
metadata: Mapping[str, Any] | None = None,
|
|
98
|
+
) -> FlowError:
|
|
99
|
+
"""Build a ``FlowError`` from an underlying exception."""
|
|
100
|
+
|
|
101
|
+
error_message = message or str(exc) or exc.__class__.__name__
|
|
102
|
+
return cls(
|
|
103
|
+
trace_id=trace_id,
|
|
104
|
+
node_name=node_name,
|
|
105
|
+
node_id=node_id,
|
|
106
|
+
code=code,
|
|
107
|
+
message=error_message,
|
|
108
|
+
original_exc=exc,
|
|
109
|
+
metadata=metadata,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
__all__ = ["FlowError", "FlowErrorCode"]
|
penguiflow/metrics.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"""Observability primitives for PenguiFlow."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from types import MappingProxyType
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(frozen=True, slots=True)
|
|
12
|
+
class FlowEvent:
|
|
13
|
+
"""Structured runtime event emitted around node execution."""
|
|
14
|
+
|
|
15
|
+
event_type: str
|
|
16
|
+
ts: float
|
|
17
|
+
node_name: str | None
|
|
18
|
+
node_id: str | None
|
|
19
|
+
trace_id: str | None
|
|
20
|
+
attempt: int
|
|
21
|
+
latency_ms: float | None
|
|
22
|
+
queue_depth_in: int
|
|
23
|
+
queue_depth_out: int
|
|
24
|
+
outgoing_edges: int
|
|
25
|
+
queue_maxsize: int
|
|
26
|
+
trace_pending: int | None
|
|
27
|
+
trace_inflight: int
|
|
28
|
+
trace_cancelled: bool
|
|
29
|
+
extra: Mapping[str, Any] = field(default_factory=dict)
|
|
30
|
+
|
|
31
|
+
def __post_init__(self) -> None:
|
|
32
|
+
object.__setattr__(self, "extra", MappingProxyType(dict(self.extra)))
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def queue_depth(self) -> int:
|
|
36
|
+
"""Return the combined depth of incoming and outgoing queues."""
|
|
37
|
+
|
|
38
|
+
return self.queue_depth_in + self.queue_depth_out
|
|
39
|
+
|
|
40
|
+
def to_payload(self) -> dict[str, Any]:
|
|
41
|
+
"""Render a dictionary payload suitable for structured logging."""
|
|
42
|
+
|
|
43
|
+
payload: dict[str, Any] = {
|
|
44
|
+
"ts": self.ts,
|
|
45
|
+
"event": self.event_type,
|
|
46
|
+
"node_name": self.node_name,
|
|
47
|
+
"node_id": self.node_id,
|
|
48
|
+
"trace_id": self.trace_id,
|
|
49
|
+
"latency_ms": self.latency_ms,
|
|
50
|
+
"q_depth_in": self.queue_depth_in,
|
|
51
|
+
"q_depth_out": self.queue_depth_out,
|
|
52
|
+
"q_depth_total": self.queue_depth,
|
|
53
|
+
"outgoing": self.outgoing_edges,
|
|
54
|
+
"queue_maxsize": self.queue_maxsize,
|
|
55
|
+
"attempt": self.attempt,
|
|
56
|
+
"trace_inflight": self.trace_inflight,
|
|
57
|
+
"trace_cancelled": self.trace_cancelled,
|
|
58
|
+
}
|
|
59
|
+
if self.trace_pending is not None:
|
|
60
|
+
payload["trace_pending"] = self.trace_pending
|
|
61
|
+
if self.extra:
|
|
62
|
+
payload.update(self.extra)
|
|
63
|
+
return payload
|
|
64
|
+
|
|
65
|
+
def metric_samples(self) -> dict[str, float]:
|
|
66
|
+
"""Derive numeric metrics for integrations such as MLflow."""
|
|
67
|
+
|
|
68
|
+
metrics: dict[str, float] = {
|
|
69
|
+
"queue_depth_in": float(self.queue_depth_in),
|
|
70
|
+
"queue_depth_out": float(self.queue_depth_out),
|
|
71
|
+
"queue_depth_total": float(self.queue_depth),
|
|
72
|
+
"attempt": float(self.attempt),
|
|
73
|
+
"trace_inflight": float(self.trace_inflight),
|
|
74
|
+
"trace_cancelled": 1.0 if self.trace_cancelled else 0.0,
|
|
75
|
+
}
|
|
76
|
+
if self.trace_pending is not None:
|
|
77
|
+
metrics["trace_pending"] = float(self.trace_pending)
|
|
78
|
+
if self.latency_ms is not None:
|
|
79
|
+
metrics["latency_ms"] = self.latency_ms
|
|
80
|
+
if (latency := self.extra.get("latency_ms")) is not None:
|
|
81
|
+
# Allow extra payloads to inject a latency override for retries.
|
|
82
|
+
try:
|
|
83
|
+
metrics["latency_ms"] = float(latency)
|
|
84
|
+
except (TypeError, ValueError): # pragma: no cover - defensive
|
|
85
|
+
pass
|
|
86
|
+
return metrics
|
|
87
|
+
|
|
88
|
+
def tag_values(self) -> dict[str, str]:
|
|
89
|
+
"""Return string tags describing the event."""
|
|
90
|
+
|
|
91
|
+
tags: dict[str, str] = {"event_type": self.event_type}
|
|
92
|
+
if self.node_name is not None:
|
|
93
|
+
tags["node_name"] = self.node_name
|
|
94
|
+
if self.node_id is not None:
|
|
95
|
+
tags["node_id"] = self.node_id
|
|
96
|
+
if self.trace_id is not None:
|
|
97
|
+
tags["trace_id"] = self.trace_id
|
|
98
|
+
if self.extra:
|
|
99
|
+
for key, value in self.extra.items():
|
|
100
|
+
if isinstance(value, str | int | float | bool):
|
|
101
|
+
tags[key] = str(value)
|
|
102
|
+
return tags
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
__all__ = ["FlowEvent"]
|
penguiflow/middlewares.py
CHANGED
|
@@ -1,17 +1,16 @@
|
|
|
1
|
-
"""Middleware hooks for PenguiFlow.
|
|
2
|
-
|
|
3
|
-
Instrumentation arrives in Phase 3.
|
|
4
|
-
"""
|
|
1
|
+
"""Middleware hooks for PenguiFlow."""
|
|
5
2
|
|
|
6
3
|
from __future__ import annotations
|
|
7
4
|
|
|
8
5
|
from typing import Protocol
|
|
9
6
|
|
|
7
|
+
from .metrics import FlowEvent
|
|
8
|
+
|
|
10
9
|
|
|
11
10
|
class Middleware(Protocol):
|
|
12
|
-
"""Base middleware signature."""
|
|
11
|
+
"""Base middleware signature receiving :class:`FlowEvent` objects."""
|
|
13
12
|
|
|
14
|
-
async def __call__(self, event:
|
|
13
|
+
async def __call__(self, event: FlowEvent) -> None: ...
|
|
15
14
|
|
|
16
15
|
|
|
17
|
-
__all__ = ["Middleware"]
|
|
16
|
+
__all__ = ["Middleware", "FlowEvent"]
|
penguiflow/patterns.py
CHANGED
|
@@ -11,6 +11,7 @@ from pydantic import BaseModel
|
|
|
11
11
|
from pydantic.type_adapter import TypeAdapter
|
|
12
12
|
|
|
13
13
|
from .node import Node, NodePolicy
|
|
14
|
+
from .policies import PolicyLike, RoutingRequest, evaluate_policy
|
|
14
15
|
from .types import Message
|
|
15
16
|
|
|
16
17
|
PayloadT = TypeVar("PayloadT")
|
|
@@ -47,6 +48,8 @@ async def map_concurrent(
|
|
|
47
48
|
def predicate_router(
|
|
48
49
|
name: str,
|
|
49
50
|
predicate: Callable[[Any], Sequence[Node | str] | Node | str | None],
|
|
51
|
+
*,
|
|
52
|
+
policy: PolicyLike | None = None,
|
|
50
53
|
) -> Node:
|
|
51
54
|
"""Create a node that routes messages based on predicate outputs."""
|
|
52
55
|
|
|
@@ -56,15 +59,36 @@ def predicate_router(
|
|
|
56
59
|
return
|
|
57
60
|
|
|
58
61
|
normalized = _normalize_targets(ctx, targets)
|
|
59
|
-
if normalized:
|
|
60
|
-
|
|
62
|
+
if not normalized:
|
|
63
|
+
return
|
|
61
64
|
|
|
62
|
-
|
|
65
|
+
selected = normalized
|
|
66
|
+
if policy is not None:
|
|
67
|
+
request = RoutingRequest(
|
|
68
|
+
message=msg,
|
|
69
|
+
context=ctx,
|
|
70
|
+
node=router_node,
|
|
71
|
+
proposed=tuple(normalized),
|
|
72
|
+
trace_id=getattr(msg, "trace_id", None),
|
|
73
|
+
)
|
|
74
|
+
decision = await evaluate_policy(policy, request)
|
|
75
|
+
if decision is None:
|
|
76
|
+
return
|
|
77
|
+
selected = _normalize_targets(ctx, decision)
|
|
78
|
+
if not selected:
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
await ctx.emit(msg, to=selected)
|
|
82
|
+
|
|
83
|
+
router_node = Node(router, name=name, policy=NodePolicy(validate="none"))
|
|
84
|
+
return router_node
|
|
63
85
|
|
|
64
86
|
|
|
65
87
|
def union_router(
|
|
66
88
|
name: str,
|
|
67
89
|
union_model: type[BaseModel],
|
|
90
|
+
*,
|
|
91
|
+
policy: PolicyLike | None = None,
|
|
68
92
|
) -> Node:
|
|
69
93
|
"""Route based on a discriminated union Pydantic model."""
|
|
70
94
|
|
|
@@ -77,9 +101,27 @@ def union_router(
|
|
|
77
101
|
normalized = _normalize_targets(ctx, target)
|
|
78
102
|
if not normalized:
|
|
79
103
|
raise KeyError(f"No successor matches '{target}'")
|
|
80
|
-
await ctx.emit(validated, to=normalized)
|
|
81
104
|
|
|
82
|
-
|
|
105
|
+
selected = normalized
|
|
106
|
+
if policy is not None:
|
|
107
|
+
request = RoutingRequest(
|
|
108
|
+
message=validated,
|
|
109
|
+
context=ctx,
|
|
110
|
+
node=router_node,
|
|
111
|
+
proposed=tuple(normalized),
|
|
112
|
+
trace_id=getattr(validated, "trace_id", None),
|
|
113
|
+
)
|
|
114
|
+
decision = await evaluate_policy(policy, request)
|
|
115
|
+
if decision is None:
|
|
116
|
+
return
|
|
117
|
+
selected = _normalize_targets(ctx, decision)
|
|
118
|
+
if not selected:
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
await ctx.emit(validated, to=selected)
|
|
122
|
+
|
|
123
|
+
router_node = Node(router, name=name, policy=NodePolicy(validate="none"))
|
|
124
|
+
return router_node
|
|
83
125
|
|
|
84
126
|
|
|
85
127
|
def join_k(name: str, k: int) -> Node:
|
penguiflow/policies.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"""Policy helpers for dynamic routing decisions."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import inspect
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
from collections.abc import Awaitable, Callable, Mapping, Sequence
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Protocol, TypeAlias, cast
|
|
11
|
+
|
|
12
|
+
from .node import Node
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING: # pragma: no cover - import cycle guard
|
|
15
|
+
from .core import Context
|
|
16
|
+
else: # pragma: no cover - runtime fallback
|
|
17
|
+
class Context: # type: ignore[too-many-ancestors]
|
|
18
|
+
"""Placeholder context type for runtime annotations."""
|
|
19
|
+
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
RoutingDecisionType: TypeAlias = None | Node | str | Sequence[Node | str]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass(slots=True)
|
|
26
|
+
class RoutingRequest:
|
|
27
|
+
"""Information provided to routing policies."""
|
|
28
|
+
|
|
29
|
+
message: Any
|
|
30
|
+
context: Context
|
|
31
|
+
node: Node
|
|
32
|
+
proposed: tuple[Node, ...]
|
|
33
|
+
trace_id: str | None
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def node_name(self) -> str:
|
|
37
|
+
return self.node.name or self.node.node_id
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def proposed_names(self) -> tuple[str, ...]:
|
|
41
|
+
names: list[str] = []
|
|
42
|
+
for candidate in self.proposed:
|
|
43
|
+
names.append(candidate.name or candidate.node_id)
|
|
44
|
+
return tuple(names)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class RoutingPolicy(Protocol):
|
|
48
|
+
"""Protocol for routing policies used by router nodes."""
|
|
49
|
+
|
|
50
|
+
def select(
|
|
51
|
+
self, request: RoutingRequest
|
|
52
|
+
) -> RoutingDecisionType | Awaitable[RoutingDecisionType]:
|
|
53
|
+
"""Return the desired routing targets for *request*."""
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
PolicyCallable = Callable[
|
|
57
|
+
[RoutingRequest], RoutingDecisionType | Awaitable[RoutingDecisionType]
|
|
58
|
+
]
|
|
59
|
+
PolicyLike = RoutingPolicy | PolicyCallable
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
async def evaluate_policy(
|
|
63
|
+
policy: PolicyLike,
|
|
64
|
+
request: RoutingRequest,
|
|
65
|
+
) -> RoutingDecisionType:
|
|
66
|
+
"""Evaluate *policy* for the given *request* supporting sync/async returns."""
|
|
67
|
+
|
|
68
|
+
if hasattr(policy, "select"):
|
|
69
|
+
selector = cast(RoutingPolicy, policy).select
|
|
70
|
+
candidate = selector(request)
|
|
71
|
+
else:
|
|
72
|
+
candidate = cast(PolicyCallable, policy)(request)
|
|
73
|
+
|
|
74
|
+
if inspect.isawaitable(candidate):
|
|
75
|
+
return await candidate
|
|
76
|
+
return candidate
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
KeyFn = Callable[[RoutingRequest], str | None]
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class DictRoutingPolicy:
|
|
83
|
+
"""Routing policy driven by a mapping loaded from config."""
|
|
84
|
+
|
|
85
|
+
def __init__(
|
|
86
|
+
self,
|
|
87
|
+
mapping: Mapping[str, RoutingDecisionType],
|
|
88
|
+
*,
|
|
89
|
+
default: RoutingDecisionType = None,
|
|
90
|
+
key_getter: KeyFn | None = None,
|
|
91
|
+
) -> None:
|
|
92
|
+
self._mapping: dict[str, RoutingDecisionType] = dict(mapping)
|
|
93
|
+
self._default = default
|
|
94
|
+
self._key_getter = key_getter or (lambda request: request.trace_id)
|
|
95
|
+
|
|
96
|
+
def select(self, request: RoutingRequest) -> RoutingDecisionType:
|
|
97
|
+
key = self._key_getter(request)
|
|
98
|
+
if key is None:
|
|
99
|
+
return self._default
|
|
100
|
+
return self._mapping.get(key, self._default)
|
|
101
|
+
|
|
102
|
+
def update_mapping(self, mapping: Mapping[str, RoutingDecisionType]) -> None:
|
|
103
|
+
self._mapping = dict(mapping)
|
|
104
|
+
|
|
105
|
+
def set_default(self, decision: RoutingDecisionType) -> None:
|
|
106
|
+
self._default = decision
|
|
107
|
+
|
|
108
|
+
@classmethod
|
|
109
|
+
def from_json(cls, payload: str, **kwargs: Any) -> DictRoutingPolicy:
|
|
110
|
+
data = json.loads(payload)
|
|
111
|
+
if not isinstance(data, Mapping):
|
|
112
|
+
raise TypeError("JSON payload must decode to a mapping")
|
|
113
|
+
return cls(data, **kwargs)
|
|
114
|
+
|
|
115
|
+
@classmethod
|
|
116
|
+
def from_json_file(cls, path: str, **kwargs: Any) -> DictRoutingPolicy:
|
|
117
|
+
with open(path, encoding="utf-8") as fh:
|
|
118
|
+
return cls.from_json(fh.read(), **kwargs)
|
|
119
|
+
|
|
120
|
+
@classmethod
|
|
121
|
+
def from_env(
|
|
122
|
+
cls,
|
|
123
|
+
env_var: str,
|
|
124
|
+
*,
|
|
125
|
+
loader: Callable[[str], Mapping[str, RoutingDecisionType]] | None = None,
|
|
126
|
+
default: RoutingDecisionType = None,
|
|
127
|
+
key_getter: KeyFn | None = None,
|
|
128
|
+
) -> DictRoutingPolicy:
|
|
129
|
+
raw = os.getenv(env_var)
|
|
130
|
+
if raw is None:
|
|
131
|
+
raise KeyError(f"Environment variable '{env_var}' not set")
|
|
132
|
+
if loader is None:
|
|
133
|
+
data = json.loads(raw)
|
|
134
|
+
else:
|
|
135
|
+
data = loader(raw)
|
|
136
|
+
if not isinstance(data, Mapping):
|
|
137
|
+
raise TypeError("Policy loader must return a mapping")
|
|
138
|
+
return cls(data, default=default, key_getter=key_getter)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
__all__ = [
|
|
142
|
+
"DictRoutingPolicy",
|
|
143
|
+
"PolicyCallable",
|
|
144
|
+
"PolicyLike",
|
|
145
|
+
"RoutingDecisionType",
|
|
146
|
+
"RoutingPolicy",
|
|
147
|
+
"RoutingRequest",
|
|
148
|
+
"evaluate_policy",
|
|
149
|
+
]
|