jaf-py 2.5.10__py3-none-any.whl → 2.5.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jaf/__init__.py +154 -57
- jaf/a2a/__init__.py +42 -21
- jaf/a2a/agent.py +79 -126
- jaf/a2a/agent_card.py +87 -78
- jaf/a2a/client.py +30 -66
- jaf/a2a/examples/client_example.py +12 -12
- jaf/a2a/examples/integration_example.py +38 -47
- jaf/a2a/examples/server_example.py +56 -53
- jaf/a2a/memory/__init__.py +0 -4
- jaf/a2a/memory/cleanup.py +28 -21
- jaf/a2a/memory/factory.py +155 -133
- jaf/a2a/memory/providers/composite.py +21 -26
- jaf/a2a/memory/providers/in_memory.py +89 -83
- jaf/a2a/memory/providers/postgres.py +117 -115
- jaf/a2a/memory/providers/redis.py +128 -121
- jaf/a2a/memory/serialization.py +77 -87
- jaf/a2a/memory/tests/run_comprehensive_tests.py +112 -83
- jaf/a2a/memory/tests/test_cleanup.py +211 -94
- jaf/a2a/memory/tests/test_serialization.py +73 -68
- jaf/a2a/memory/tests/test_stress_concurrency.py +186 -133
- jaf/a2a/memory/tests/test_task_lifecycle.py +138 -120
- jaf/a2a/memory/types.py +91 -53
- jaf/a2a/protocol.py +95 -125
- jaf/a2a/server.py +90 -118
- jaf/a2a/standalone_client.py +30 -43
- jaf/a2a/tests/__init__.py +16 -33
- jaf/a2a/tests/run_tests.py +17 -53
- jaf/a2a/tests/test_agent.py +40 -140
- jaf/a2a/tests/test_client.py +54 -117
- jaf/a2a/tests/test_integration.py +28 -82
- jaf/a2a/tests/test_protocol.py +54 -139
- jaf/a2a/tests/test_types.py +50 -136
- jaf/a2a/types.py +58 -34
- jaf/cli.py +21 -41
- jaf/core/__init__.py +7 -1
- jaf/core/agent_tool.py +93 -72
- jaf/core/analytics.py +257 -207
- jaf/core/checkpoint.py +223 -0
- jaf/core/composition.py +249 -235
- jaf/core/engine.py +817 -519
- jaf/core/errors.py +55 -42
- jaf/core/guardrails.py +276 -202
- jaf/core/handoff.py +47 -31
- jaf/core/parallel_agents.py +69 -75
- jaf/core/performance.py +75 -73
- jaf/core/proxy.py +43 -44
- jaf/core/proxy_helpers.py +24 -27
- jaf/core/regeneration.py +220 -129
- jaf/core/state.py +68 -66
- jaf/core/streaming.py +115 -108
- jaf/core/tool_results.py +111 -101
- jaf/core/tools.py +114 -116
- jaf/core/tracing.py +269 -210
- jaf/core/types.py +371 -151
- jaf/core/workflows.py +209 -168
- jaf/exceptions.py +46 -38
- jaf/memory/__init__.py +1 -6
- jaf/memory/approval_storage.py +54 -77
- jaf/memory/factory.py +4 -4
- jaf/memory/providers/in_memory.py +216 -180
- jaf/memory/providers/postgres.py +216 -146
- jaf/memory/providers/redis.py +173 -116
- jaf/memory/types.py +70 -51
- jaf/memory/utils.py +36 -34
- jaf/plugins/__init__.py +12 -12
- jaf/plugins/base.py +105 -96
- jaf/policies/__init__.py +0 -1
- jaf/policies/handoff.py +37 -46
- jaf/policies/validation.py +76 -52
- jaf/providers/__init__.py +6 -3
- jaf/providers/mcp.py +97 -51
- jaf/providers/model.py +360 -279
- jaf/server/__init__.py +1 -1
- jaf/server/main.py +7 -11
- jaf/server/server.py +514 -359
- jaf/server/types.py +208 -52
- jaf/utils/__init__.py +17 -18
- jaf/utils/attachments.py +111 -116
- jaf/utils/document_processor.py +175 -174
- jaf/visualization/__init__.py +1 -1
- jaf/visualization/example.py +111 -110
- jaf/visualization/functional_core.py +46 -71
- jaf/visualization/graphviz.py +154 -189
- jaf/visualization/imperative_shell.py +7 -16
- jaf/visualization/types.py +8 -4
- {jaf_py-2.5.10.dist-info → jaf_py-2.5.11.dist-info}/METADATA +2 -2
- jaf_py-2.5.11.dist-info/RECORD +97 -0
- jaf_py-2.5.10.dist-info/RECORD +0 -96
- {jaf_py-2.5.10.dist-info → jaf_py-2.5.11.dist-info}/WHEEL +0 -0
- {jaf_py-2.5.10.dist-info → jaf_py-2.5.11.dist-info}/entry_points.txt +0 -0
- {jaf_py-2.5.10.dist-info → jaf_py-2.5.11.dist-info}/licenses/LICENSE +0 -0
- {jaf_py-2.5.10.dist-info → jaf_py-2.5.11.dist-info}/top_level.txt +0 -0
jaf/core/performance.py
CHANGED
|
@@ -18,6 +18,7 @@ from .types import TraceEvent, RunId, TraceId
|
|
|
18
18
|
@dataclass(frozen=True)
|
|
19
19
|
class PerformanceMetrics:
|
|
20
20
|
"""Comprehensive performance metrics for agent execution."""
|
|
21
|
+
|
|
21
22
|
execution_time_ms: float
|
|
22
23
|
memory_usage_mb: float
|
|
23
24
|
peak_memory_mb: float
|
|
@@ -27,27 +28,30 @@ class PerformanceMetrics:
|
|
|
27
28
|
tool_call_count: int
|
|
28
29
|
error_count: int
|
|
29
30
|
retry_count: int
|
|
30
|
-
|
|
31
|
+
|
|
31
32
|
def to_dict(self) -> Dict[str, Any]:
|
|
32
33
|
"""Convert metrics to dictionary for serialization."""
|
|
33
34
|
return {
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
35
|
+
"execution_time_ms": self.execution_time_ms,
|
|
36
|
+
"memory_usage_mb": self.memory_usage_mb,
|
|
37
|
+
"peak_memory_mb": self.peak_memory_mb,
|
|
38
|
+
"token_count": self.token_count,
|
|
39
|
+
"cache_hit_rate": self.cache_hit_rate,
|
|
40
|
+
"llm_call_count": self.llm_call_count,
|
|
41
|
+
"tool_call_count": self.tool_call_count,
|
|
42
|
+
"error_count": self.error_count,
|
|
43
|
+
"retry_count": self.retry_count,
|
|
43
44
|
}
|
|
44
45
|
|
|
45
46
|
|
|
46
47
|
@dataclass(frozen=True)
|
|
47
48
|
class PerformanceEvent:
|
|
48
49
|
"""Performance-related trace event."""
|
|
49
|
-
|
|
50
|
-
|
|
50
|
+
|
|
51
|
+
type: str = "performance_metrics"
|
|
52
|
+
data: PerformanceMetrics = field(
|
|
53
|
+
default_factory=lambda: PerformanceMetrics(0, 0, 0, 0, 0, 0, 0, 0, 0)
|
|
54
|
+
)
|
|
51
55
|
timestamp: float = field(default_factory=time.time)
|
|
52
56
|
run_id: Optional[RunId] = None
|
|
53
57
|
trace_id: Optional[TraceId] = None
|
|
@@ -56,10 +60,10 @@ class PerformanceEvent:
|
|
|
56
60
|
class PerformanceMonitor:
|
|
57
61
|
"""
|
|
58
62
|
Performance monitoring system for JAF agents.
|
|
59
|
-
|
|
63
|
+
|
|
60
64
|
Tracks execution metrics, memory usage, and provides performance insights.
|
|
61
65
|
"""
|
|
62
|
-
|
|
66
|
+
|
|
63
67
|
def __init__(self):
|
|
64
68
|
self.start_time: Optional[float] = None
|
|
65
69
|
self.start_memory: Optional[float] = None
|
|
@@ -72,22 +76,22 @@ class PerformanceMonitor:
|
|
|
72
76
|
self.errors: int = 0
|
|
73
77
|
self.retries: int = 0
|
|
74
78
|
self.process = psutil.Process()
|
|
75
|
-
|
|
79
|
+
|
|
76
80
|
def start_monitoring(self) -> None:
|
|
77
81
|
"""Start performance monitoring."""
|
|
78
82
|
self.start_time = time.time()
|
|
79
83
|
self.start_memory = self._get_memory_usage()
|
|
80
84
|
self.peak_memory = self.start_memory
|
|
81
|
-
|
|
85
|
+
|
|
82
86
|
def stop_monitoring(self) -> PerformanceMetrics:
|
|
83
87
|
"""Stop monitoring and return collected metrics."""
|
|
84
88
|
if self.start_time is None:
|
|
85
89
|
raise ValueError("Monitoring not started")
|
|
86
|
-
|
|
90
|
+
|
|
87
91
|
execution_time = (time.time() - self.start_time) * 1000 # Convert to ms
|
|
88
92
|
current_memory = self._get_memory_usage()
|
|
89
93
|
cache_hit_rate = self._calculate_cache_hit_rate()
|
|
90
|
-
|
|
94
|
+
|
|
91
95
|
return PerformanceMetrics(
|
|
92
96
|
execution_time_ms=execution_time,
|
|
93
97
|
memory_usage_mb=current_memory,
|
|
@@ -97,36 +101,36 @@ class PerformanceMonitor:
|
|
|
97
101
|
llm_call_count=self.llm_calls,
|
|
98
102
|
tool_call_count=self.tool_calls,
|
|
99
103
|
error_count=self.errors,
|
|
100
|
-
retry_count=self.retries
|
|
104
|
+
retry_count=self.retries,
|
|
101
105
|
)
|
|
102
|
-
|
|
106
|
+
|
|
103
107
|
def record_llm_call(self, token_count: int = 0) -> None:
|
|
104
108
|
"""Record an LLM call with optional token count."""
|
|
105
109
|
self.llm_calls += 1
|
|
106
110
|
self.token_count += token_count
|
|
107
111
|
self._update_peak_memory()
|
|
108
|
-
|
|
112
|
+
|
|
109
113
|
def record_tool_call(self) -> None:
|
|
110
114
|
"""Record a tool call."""
|
|
111
115
|
self.tool_calls += 1
|
|
112
116
|
self._update_peak_memory()
|
|
113
|
-
|
|
117
|
+
|
|
114
118
|
def record_cache_hit(self) -> None:
|
|
115
119
|
"""Record a cache hit."""
|
|
116
120
|
self.cache_hits += 1
|
|
117
|
-
|
|
121
|
+
|
|
118
122
|
def record_cache_miss(self) -> None:
|
|
119
123
|
"""Record a cache miss."""
|
|
120
124
|
self.cache_misses += 1
|
|
121
|
-
|
|
125
|
+
|
|
122
126
|
def record_error(self) -> None:
|
|
123
127
|
"""Record an error occurrence."""
|
|
124
128
|
self.errors += 1
|
|
125
|
-
|
|
129
|
+
|
|
126
130
|
def record_retry(self) -> None:
|
|
127
131
|
"""Record a retry attempt."""
|
|
128
132
|
self.retries += 1
|
|
129
|
-
|
|
133
|
+
|
|
130
134
|
def _get_memory_usage(self) -> float:
|
|
131
135
|
"""Get current memory usage in MB."""
|
|
132
136
|
try:
|
|
@@ -134,13 +138,13 @@ class PerformanceMonitor:
|
|
|
134
138
|
return memory_info.rss / 1024 / 1024 # Convert bytes to MB
|
|
135
139
|
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
|
136
140
|
return 0.0
|
|
137
|
-
|
|
141
|
+
|
|
138
142
|
def _update_peak_memory(self) -> None:
|
|
139
143
|
"""Update peak memory usage."""
|
|
140
144
|
current_memory = self._get_memory_usage()
|
|
141
145
|
if current_memory > self.peak_memory:
|
|
142
146
|
self.peak_memory = current_memory
|
|
143
|
-
|
|
147
|
+
|
|
144
148
|
def _calculate_cache_hit_rate(self) -> float:
|
|
145
149
|
"""Calculate cache hit rate as a percentage."""
|
|
146
150
|
total_cache_operations = self.cache_hits + self.cache_misses
|
|
@@ -153,11 +157,11 @@ class PerformanceMonitor:
|
|
|
153
157
|
async def monitor_performance(
|
|
154
158
|
run_id: Optional[RunId] = None,
|
|
155
159
|
trace_id: Optional[TraceId] = None,
|
|
156
|
-
on_complete: Optional[Callable[[PerformanceMetrics], None]] = None
|
|
160
|
+
on_complete: Optional[Callable[[PerformanceMetrics], None]] = None,
|
|
157
161
|
) -> AsyncIterator[PerformanceMonitor]:
|
|
158
162
|
"""
|
|
159
163
|
Context manager for performance monitoring.
|
|
160
|
-
|
|
164
|
+
|
|
161
165
|
Usage:
|
|
162
166
|
async with monitor_performance() as monitor:
|
|
163
167
|
monitor.record_llm_call(150) # 150 tokens
|
|
@@ -165,50 +169,46 @@ async def monitor_performance(
|
|
|
165
169
|
"""
|
|
166
170
|
monitor = PerformanceMonitor()
|
|
167
171
|
monitor.start_monitoring()
|
|
168
|
-
|
|
172
|
+
|
|
169
173
|
try:
|
|
170
174
|
yield monitor
|
|
171
175
|
finally:
|
|
172
176
|
metrics = monitor.stop_monitoring()
|
|
173
|
-
|
|
177
|
+
|
|
174
178
|
if on_complete:
|
|
175
179
|
on_complete(metrics)
|
|
176
|
-
|
|
180
|
+
|
|
177
181
|
# Emit performance event
|
|
178
|
-
event = PerformanceEvent(
|
|
179
|
-
data=metrics,
|
|
180
|
-
run_id=run_id,
|
|
181
|
-
trace_id=trace_id
|
|
182
|
-
)
|
|
182
|
+
event = PerformanceEvent(data=metrics, run_id=run_id, trace_id=trace_id)
|
|
183
183
|
|
|
184
184
|
|
|
185
185
|
class PerformanceCollector:
|
|
186
186
|
"""
|
|
187
187
|
Collects and aggregates performance metrics across multiple runs.
|
|
188
188
|
"""
|
|
189
|
-
|
|
189
|
+
|
|
190
190
|
def __init__(self):
|
|
191
191
|
self.metrics_history: List[PerformanceMetrics] = []
|
|
192
192
|
self.run_metrics: Dict[str, PerformanceMetrics] = {}
|
|
193
|
-
|
|
193
|
+
|
|
194
194
|
def collect_metrics(self, metrics: PerformanceMetrics, run_id: Optional[str] = None) -> None:
|
|
195
195
|
"""Collect performance metrics from a run."""
|
|
196
196
|
self.metrics_history.append(metrics)
|
|
197
197
|
if run_id:
|
|
198
198
|
self.run_metrics[run_id] = metrics
|
|
199
|
-
|
|
199
|
+
|
|
200
200
|
def get_average_metrics(self, last_n: Optional[int] = None) -> Optional[PerformanceMetrics]:
|
|
201
201
|
"""Get average metrics across runs."""
|
|
202
202
|
if not self.metrics_history:
|
|
203
203
|
return None
|
|
204
|
-
|
|
204
|
+
|
|
205
205
|
metrics_to_analyze = self.metrics_history[-last_n:] if last_n else self.metrics_history
|
|
206
|
-
|
|
206
|
+
|
|
207
207
|
if not metrics_to_analyze:
|
|
208
208
|
return None
|
|
209
|
-
|
|
209
|
+
|
|
210
210
|
count = len(metrics_to_analyze)
|
|
211
|
-
|
|
211
|
+
|
|
212
212
|
return PerformanceMetrics(
|
|
213
213
|
execution_time_ms=sum(m.execution_time_ms for m in metrics_to_analyze) / count,
|
|
214
214
|
memory_usage_mb=sum(m.memory_usage_mb for m in metrics_to_analyze) / count,
|
|
@@ -218,52 +218,54 @@ class PerformanceCollector:
|
|
|
218
218
|
llm_call_count=sum(m.llm_call_count for m in metrics_to_analyze) / count,
|
|
219
219
|
tool_call_count=sum(m.tool_call_count for m in metrics_to_analyze) / count,
|
|
220
220
|
error_count=sum(m.error_count for m in metrics_to_analyze) / count,
|
|
221
|
-
retry_count=sum(m.retry_count for m in metrics_to_analyze) / count
|
|
221
|
+
retry_count=sum(m.retry_count for m in metrics_to_analyze) / count,
|
|
222
222
|
)
|
|
223
|
-
|
|
223
|
+
|
|
224
224
|
def get_performance_summary(self) -> Dict[str, Any]:
|
|
225
225
|
"""Get comprehensive performance summary."""
|
|
226
226
|
if not self.metrics_history:
|
|
227
|
-
return {
|
|
228
|
-
|
|
227
|
+
return {"status": "no_data"}
|
|
228
|
+
|
|
229
229
|
recent_metrics = self.get_average_metrics(last_n=10)
|
|
230
230
|
all_time_metrics = self.get_average_metrics()
|
|
231
|
-
|
|
231
|
+
|
|
232
232
|
return {
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
233
|
+
"total_runs": len(self.metrics_history),
|
|
234
|
+
"recent_average": recent_metrics.to_dict() if recent_metrics else None,
|
|
235
|
+
"all_time_average": all_time_metrics.to_dict() if all_time_metrics else None,
|
|
236
|
+
"performance_trends": self._analyze_trends(),
|
|
237
237
|
}
|
|
238
|
-
|
|
238
|
+
|
|
239
239
|
def _analyze_trends(self) -> Dict[str, str]:
|
|
240
240
|
"""Analyze performance trends."""
|
|
241
241
|
if len(self.metrics_history) < 2:
|
|
242
|
-
return {
|
|
243
|
-
|
|
244
|
-
recent =
|
|
242
|
+
return {"status": "insufficient_data"}
|
|
243
|
+
|
|
244
|
+
recent = (
|
|
245
|
+
self.metrics_history[-5:] if len(self.metrics_history) >= 5 else self.metrics_history
|
|
246
|
+
)
|
|
245
247
|
older = self.metrics_history[:-5] if len(self.metrics_history) >= 10 else []
|
|
246
|
-
|
|
248
|
+
|
|
247
249
|
if not older:
|
|
248
|
-
return {
|
|
249
|
-
|
|
250
|
+
return {"status": "insufficient_historical_data"}
|
|
251
|
+
|
|
250
252
|
recent_avg_time = sum(m.execution_time_ms for m in recent) / len(recent)
|
|
251
253
|
older_avg_time = sum(m.execution_time_ms for m in older) / len(older)
|
|
252
|
-
|
|
253
|
-
time_trend =
|
|
254
|
-
|
|
254
|
+
|
|
255
|
+
time_trend = "improving" if recent_avg_time < older_avg_time else "degrading"
|
|
256
|
+
|
|
255
257
|
recent_avg_memory = sum(m.memory_usage_mb for m in recent) / len(recent)
|
|
256
258
|
older_avg_memory = sum(m.memory_usage_mb for m in older) / len(older)
|
|
257
|
-
|
|
258
|
-
memory_trend =
|
|
259
|
-
|
|
259
|
+
|
|
260
|
+
memory_trend = "improving" if recent_avg_memory < older_avg_memory else "degrading"
|
|
261
|
+
|
|
260
262
|
return {
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
263
|
+
"execution_time_trend": time_trend,
|
|
264
|
+
"memory_usage_trend": memory_trend,
|
|
265
|
+
"recent_avg_time_ms": recent_avg_time,
|
|
266
|
+
"older_avg_time_ms": older_avg_time,
|
|
267
|
+
"recent_avg_memory_mb": recent_avg_memory,
|
|
268
|
+
"older_avg_memory_mb": older_avg_memory,
|
|
267
269
|
}
|
|
268
270
|
|
|
269
271
|
|
jaf/core/proxy.py
CHANGED
|
@@ -14,6 +14,7 @@ from urllib.parse import urlparse
|
|
|
14
14
|
@dataclass
|
|
15
15
|
class ProxyAuth:
|
|
16
16
|
"""Proxy authentication configuration."""
|
|
17
|
+
|
|
17
18
|
username: str
|
|
18
19
|
password: str
|
|
19
20
|
|
|
@@ -21,100 +22,98 @@ class ProxyAuth:
|
|
|
21
22
|
@dataclass
|
|
22
23
|
class ProxyConfig:
|
|
23
24
|
"""Proxy configuration for HTTP clients."""
|
|
25
|
+
|
|
24
26
|
http_proxy: Optional[str] = None
|
|
25
27
|
https_proxy: Optional[str] = None
|
|
26
28
|
no_proxy: Optional[str] = None
|
|
27
29
|
auth: Optional[ProxyAuth] = None
|
|
28
|
-
|
|
30
|
+
|
|
29
31
|
@classmethod
|
|
30
|
-
def from_environment(cls) ->
|
|
32
|
+
def from_environment(cls) -> "ProxyConfig":
|
|
31
33
|
"""Create proxy configuration from environment variables."""
|
|
32
34
|
return cls(
|
|
33
|
-
http_proxy=os.getenv(
|
|
34
|
-
https_proxy=os.getenv(
|
|
35
|
-
no_proxy=os.getenv(
|
|
35
|
+
http_proxy=os.getenv("HTTP_PROXY") or os.getenv("http_proxy"),
|
|
36
|
+
https_proxy=os.getenv("HTTPS_PROXY") or os.getenv("https_proxy"),
|
|
37
|
+
no_proxy=os.getenv("NO_PROXY") or os.getenv("no_proxy"),
|
|
36
38
|
auth=ProxyAuth(
|
|
37
|
-
username=os.getenv(
|
|
38
|
-
|
|
39
|
-
|
|
39
|
+
username=os.getenv("PROXY_USERNAME", ""), password=os.getenv("PROXY_PASSWORD", "")
|
|
40
|
+
)
|
|
41
|
+
if os.getenv("PROXY_USERNAME")
|
|
42
|
+
else None,
|
|
40
43
|
)
|
|
41
|
-
|
|
44
|
+
|
|
42
45
|
@classmethod
|
|
43
|
-
def from_url(cls, proxy_url: str, auth: Optional[ProxyAuth] = None) ->
|
|
46
|
+
def from_url(cls, proxy_url: str, auth: Optional[ProxyAuth] = None) -> "ProxyConfig":
|
|
44
47
|
"""Create proxy configuration from a single URL."""
|
|
45
|
-
return cls(
|
|
46
|
-
|
|
47
|
-
https_proxy=proxy_url,
|
|
48
|
-
auth=auth
|
|
49
|
-
)
|
|
50
|
-
|
|
48
|
+
return cls(http_proxy=proxy_url, https_proxy=proxy_url, auth=auth)
|
|
49
|
+
|
|
51
50
|
def to_httpx_proxies(self) -> Optional[Dict[str, str]]:
|
|
52
51
|
"""Convert to httpx proxies format."""
|
|
53
52
|
if not self.http_proxy and not self.https_proxy:
|
|
54
53
|
return None
|
|
55
|
-
|
|
54
|
+
|
|
56
55
|
proxies = {}
|
|
57
|
-
|
|
56
|
+
|
|
58
57
|
if self.http_proxy:
|
|
59
|
-
proxies[
|
|
60
|
-
|
|
58
|
+
proxies["http://"] = self._add_auth_to_url(self.http_proxy)
|
|
59
|
+
|
|
61
60
|
if self.https_proxy:
|
|
62
|
-
proxies[
|
|
63
|
-
|
|
61
|
+
proxies["https://"] = self._add_auth_to_url(self.https_proxy)
|
|
62
|
+
|
|
64
63
|
return proxies if proxies else None
|
|
65
|
-
|
|
64
|
+
|
|
66
65
|
def to_openai_proxies(self) -> Optional[Dict[str, str]]:
|
|
67
66
|
"""Convert to OpenAI client proxies format."""
|
|
68
67
|
# OpenAI client supports httpx-style proxy configuration
|
|
69
68
|
return self.to_httpx_proxies()
|
|
70
|
-
|
|
69
|
+
|
|
71
70
|
def _add_auth_to_url(self, url: str) -> str:
|
|
72
71
|
"""Add authentication to proxy URL if configured."""
|
|
73
72
|
if not self.auth or not self.auth.username:
|
|
74
73
|
return url
|
|
75
|
-
|
|
74
|
+
|
|
76
75
|
parsed = urlparse(url)
|
|
77
|
-
|
|
76
|
+
|
|
78
77
|
# If URL already has auth, don't override
|
|
79
|
-
if
|
|
78
|
+
if "@" in parsed.netloc:
|
|
80
79
|
return url
|
|
81
|
-
|
|
80
|
+
|
|
82
81
|
auth_string = f"{self.auth.username}:{self.auth.password}"
|
|
83
|
-
|
|
82
|
+
|
|
84
83
|
# Reconstruct URL with auth
|
|
85
84
|
if parsed.port:
|
|
86
85
|
netloc = f"{auth_string}@{parsed.hostname}:{parsed.port}"
|
|
87
86
|
else:
|
|
88
87
|
netloc = f"{auth_string}@{parsed.hostname}"
|
|
89
|
-
|
|
88
|
+
|
|
90
89
|
return f"{parsed.scheme}://{netloc}{parsed.path}"
|
|
91
|
-
|
|
90
|
+
|
|
92
91
|
def should_bypass_proxy(self, host: str) -> bool:
|
|
93
92
|
"""Check if a host should bypass the proxy based on no_proxy settings."""
|
|
94
93
|
if not self.no_proxy:
|
|
95
94
|
return False
|
|
96
|
-
|
|
97
|
-
no_proxy_hosts = [h.strip() for h in self.no_proxy.split(
|
|
98
|
-
|
|
95
|
+
|
|
96
|
+
no_proxy_hosts = [h.strip() for h in self.no_proxy.split(",")]
|
|
97
|
+
|
|
99
98
|
for no_proxy_host in no_proxy_hosts:
|
|
100
99
|
if not no_proxy_host:
|
|
101
100
|
continue
|
|
102
|
-
|
|
101
|
+
|
|
103
102
|
# Exact match
|
|
104
103
|
if host == no_proxy_host:
|
|
105
104
|
return True
|
|
106
|
-
|
|
105
|
+
|
|
107
106
|
# Wildcard match (e.g., *.example.com)
|
|
108
|
-
if no_proxy_host.startswith(
|
|
107
|
+
if no_proxy_host.startswith("*"):
|
|
109
108
|
suffix = no_proxy_host[1:]
|
|
110
109
|
if host.endswith(suffix):
|
|
111
110
|
return True
|
|
112
|
-
|
|
111
|
+
|
|
113
112
|
# Domain suffix match
|
|
114
|
-
if no_proxy_host.startswith(
|
|
113
|
+
if no_proxy_host.startswith("."):
|
|
115
114
|
if host.endswith(no_proxy_host) or host == no_proxy_host[1:]:
|
|
116
115
|
return True
|
|
117
|
-
|
|
116
|
+
|
|
118
117
|
return False
|
|
119
118
|
|
|
120
119
|
|
|
@@ -122,20 +121,20 @@ def create_proxy_config(
|
|
|
122
121
|
proxy_url: Optional[str] = None,
|
|
123
122
|
username: Optional[str] = None,
|
|
124
123
|
password: Optional[str] = None,
|
|
125
|
-
no_proxy: Optional[str] = None
|
|
124
|
+
no_proxy: Optional[str] = None,
|
|
126
125
|
) -> ProxyConfig:
|
|
127
126
|
"""Create a proxy configuration with optional parameters."""
|
|
128
127
|
auth = ProxyAuth(username, password) if username else None
|
|
129
|
-
|
|
128
|
+
|
|
130
129
|
if proxy_url:
|
|
131
130
|
config = ProxyConfig.from_url(proxy_url, auth)
|
|
132
131
|
if no_proxy:
|
|
133
132
|
config.no_proxy = no_proxy
|
|
134
133
|
return config
|
|
135
|
-
|
|
134
|
+
|
|
136
135
|
return ProxyConfig.from_environment()
|
|
137
136
|
|
|
138
137
|
|
|
139
138
|
def get_default_proxy_config() -> ProxyConfig:
|
|
140
139
|
"""Get the default proxy configuration from environment variables."""
|
|
141
|
-
return ProxyConfig.from_environment()
|
|
140
|
+
return ProxyConfig.from_environment()
|
jaf/core/proxy_helpers.py
CHANGED
|
@@ -17,11 +17,11 @@ def make_proxy_enabled_litellm_provider(
|
|
|
17
17
|
proxy_url: Optional[str] = None,
|
|
18
18
|
proxy_username: Optional[str] = None,
|
|
19
19
|
proxy_password: Optional[str] = None,
|
|
20
|
-
use_env_proxy: bool = True
|
|
20
|
+
use_env_proxy: bool = True,
|
|
21
21
|
):
|
|
22
22
|
"""
|
|
23
23
|
Create a LiteLLM provider with proxy support.
|
|
24
|
-
|
|
24
|
+
|
|
25
25
|
Args:
|
|
26
26
|
base_url: Base URL for the LiteLLM server
|
|
27
27
|
api_key: API key (defaults to "anything" for local servers)
|
|
@@ -30,19 +30,18 @@ def make_proxy_enabled_litellm_provider(
|
|
|
30
30
|
proxy_username: Proxy username for authentication
|
|
31
31
|
proxy_password: Proxy password for authentication
|
|
32
32
|
use_env_proxy: Whether to use proxy settings from environment variables
|
|
33
|
-
|
|
33
|
+
|
|
34
34
|
Returns:
|
|
35
35
|
ModelProvider with proxy configuration
|
|
36
36
|
"""
|
|
37
37
|
proxy_config = None
|
|
38
|
-
|
|
38
|
+
|
|
39
39
|
if proxy_url:
|
|
40
40
|
# Use explicitly provided proxy settings
|
|
41
41
|
from .proxy import ProxyAuth, create_proxy_config
|
|
42
|
+
|
|
42
43
|
proxy_config = create_proxy_config(
|
|
43
|
-
proxy_url=proxy_url,
|
|
44
|
-
username=proxy_username,
|
|
45
|
-
password=proxy_password
|
|
44
|
+
proxy_url=proxy_url, username=proxy_username, password=proxy_password
|
|
46
45
|
)
|
|
47
46
|
elif use_env_proxy:
|
|
48
47
|
# Use environment-based proxy settings
|
|
@@ -50,61 +49,59 @@ def make_proxy_enabled_litellm_provider(
|
|
|
50
49
|
# Only use proxy if actually configured in environment
|
|
51
50
|
if not proxy_config.http_proxy and not proxy_config.https_proxy:
|
|
52
51
|
proxy_config = None
|
|
53
|
-
|
|
52
|
+
|
|
54
53
|
return make_litellm_provider(
|
|
55
54
|
base_url=base_url,
|
|
56
55
|
api_key=api_key,
|
|
57
56
|
default_timeout=default_timeout,
|
|
58
|
-
proxy_config=proxy_config
|
|
57
|
+
proxy_config=proxy_config,
|
|
59
58
|
)
|
|
60
59
|
|
|
61
60
|
|
|
62
|
-
|
|
63
|
-
|
|
64
61
|
def get_proxy_info() -> Dict[str, Any]:
|
|
65
62
|
"""
|
|
66
63
|
Get information about current proxy configuration from environment.
|
|
67
|
-
|
|
64
|
+
|
|
68
65
|
Returns:
|
|
69
66
|
Dictionary with proxy configuration details
|
|
70
67
|
"""
|
|
71
68
|
proxy_config = get_default_proxy_config()
|
|
72
|
-
|
|
69
|
+
|
|
73
70
|
return {
|
|
74
71
|
"http_proxy": proxy_config.http_proxy,
|
|
75
72
|
"https_proxy": proxy_config.https_proxy,
|
|
76
73
|
"no_proxy": proxy_config.no_proxy,
|
|
77
74
|
"has_auth": proxy_config.auth is not None,
|
|
78
75
|
"auth_username": proxy_config.auth.username if proxy_config.auth else None,
|
|
79
|
-
"is_configured": bool(proxy_config.http_proxy or proxy_config.https_proxy)
|
|
76
|
+
"is_configured": bool(proxy_config.http_proxy or proxy_config.https_proxy),
|
|
80
77
|
}
|
|
81
78
|
|
|
82
79
|
|
|
83
80
|
def validate_proxy_config(proxy_config: ProxyConfig) -> Dict[str, Any]:
|
|
84
81
|
"""
|
|
85
82
|
Validate proxy configuration and return validation results.
|
|
86
|
-
|
|
83
|
+
|
|
87
84
|
Args:
|
|
88
85
|
proxy_config: Proxy configuration to validate
|
|
89
|
-
|
|
86
|
+
|
|
90
87
|
Returns:
|
|
91
88
|
Dictionary with validation results
|
|
92
89
|
"""
|
|
93
|
-
results = {
|
|
94
|
-
|
|
95
|
-
"warnings": [],
|
|
96
|
-
"errors": []
|
|
97
|
-
}
|
|
98
|
-
|
|
90
|
+
results = {"valid": True, "warnings": [], "errors": []}
|
|
91
|
+
|
|
99
92
|
# Check if at least one proxy is configured
|
|
100
93
|
if not proxy_config.http_proxy and not proxy_config.https_proxy:
|
|
101
94
|
results["warnings"].append("No proxy URLs configured")
|
|
102
|
-
|
|
95
|
+
|
|
103
96
|
# Validate proxy URLs if configured
|
|
104
|
-
for proxy_type, proxy_url in [
|
|
97
|
+
for proxy_type, proxy_url in [
|
|
98
|
+
("HTTP", proxy_config.http_proxy),
|
|
99
|
+
("HTTPS", proxy_config.https_proxy),
|
|
100
|
+
]:
|
|
105
101
|
if proxy_url:
|
|
106
102
|
try:
|
|
107
103
|
from urllib.parse import urlparse
|
|
104
|
+
|
|
108
105
|
parsed = urlparse(proxy_url)
|
|
109
106
|
if not parsed.scheme:
|
|
110
107
|
results["errors"].append(f"{proxy_type} proxy URL missing scheme: {proxy_url}")
|
|
@@ -115,12 +112,12 @@ def validate_proxy_config(proxy_config: ProxyConfig) -> Dict[str, Any]:
|
|
|
115
112
|
except Exception as e:
|
|
116
113
|
results["errors"].append(f"Invalid {proxy_type} proxy URL: {e}")
|
|
117
114
|
results["valid"] = False
|
|
118
|
-
|
|
115
|
+
|
|
119
116
|
# Check authentication consistency
|
|
120
117
|
if proxy_config.auth:
|
|
121
118
|
if not proxy_config.auth.username:
|
|
122
119
|
results["warnings"].append("Proxy authentication configured but username is empty")
|
|
123
120
|
if not proxy_config.auth.password:
|
|
124
121
|
results["warnings"].append("Proxy authentication configured but password is empty")
|
|
125
|
-
|
|
126
|
-
return results
|
|
122
|
+
|
|
123
|
+
return results
|