agentic-layer-sdk 0.13.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentic_layer_sdk-0.13.2.dist-info/METADATA +24 -0
- agentic_layer_sdk-0.13.2.dist-info/RECORD +9 -0
- agentic_layer_sdk-0.13.2.dist-info/WHEEL +4 -0
- agenticlayer/_otel.py +98 -0
- agenticlayer/config.py +75 -0
- agenticlayer/constants.py +6 -0
- agenticlayer/loguru_config.py +131 -0
- agenticlayer/otel_starlette.py +94 -0
- agenticlayer/py.typed +0 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: agentic-layer-sdk
|
|
3
|
+
Version: 0.13.2
|
|
4
|
+
Requires-Python: <3.15,>=3.14
|
|
5
|
+
Requires-Dist: a2a-sdk
|
|
6
|
+
Requires-Dist: loguru>=0.7.3
|
|
7
|
+
Requires-Dist: opentelemetry-exporter-otlp-proto-grpc
|
|
8
|
+
Requires-Dist: opentelemetry-exporter-otlp-proto-http
|
|
9
|
+
Requires-Dist: opentelemetry-instrumentation-httpx
|
|
10
|
+
Requires-Dist: opentelemetry-instrumentation-starlette
|
|
11
|
+
Requires-Dist: pydantic>=2
|
|
12
|
+
Requires-Dist: starlette
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
|
|
15
|
+
# agentic-layer-sdk
|
|
16
|
+
|
|
17
|
+
Framework-independent shared utilities for the Agentic Layer SDK.
|
|
18
|
+
|
|
19
|
+
This package contains common code used by all agent framework adapters:
|
|
20
|
+
- Configuration models (`SubAgent`, `McpTool`)
|
|
21
|
+
- Constants
|
|
22
|
+
- Logging setup (loguru)
|
|
23
|
+
- OpenTelemetry setup
|
|
24
|
+
- Starlette OpenTelemetry instrumentation
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
agenticlayer/_otel.py,sha256=K2qoSbC7kV7mkubuTtlh0QYUQKKKMpyaq51lT4JwRY0,4123
|
|
2
|
+
agenticlayer/config.py,sha256=mL7rinG2Uv5bn5xxN6Ap2FB6dE5BWKjKSWMCRRfiABc,1998
|
|
3
|
+
agenticlayer/constants.py,sha256=pdY0MPJp-nYHFHvok3slQLg7GVCfYXXYDjtcdY_e9ro,347
|
|
4
|
+
agenticlayer/loguru_config.py,sha256=muEe9Jiy2DwZDWwqx4Wtu1oeQao6HXgXYGG_ZqPj734,4282
|
|
5
|
+
agenticlayer/otel_starlette.py,sha256=akDLnoFWjM0xunO8eh1dvP2SWZpgbeTvL2tW1B7dlJM,3822
|
|
6
|
+
agenticlayer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
agentic_layer_sdk-0.13.2.dist-info/METADATA,sha256=v2X-dU9K12LehM0iexWAPawZRoEl33gi4gLob8qIFS0,759
|
|
8
|
+
agentic_layer_sdk-0.13.2.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
9
|
+
agentic_layer_sdk-0.13.2.dist-info/RECORD,,
|
agenticlayer/_otel.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"""Framework-independent OpenTelemetry setup."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
from opentelemetry import metrics, trace
|
|
8
|
+
from opentelemetry._logs import set_logger_provider
|
|
9
|
+
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
|
10
|
+
from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter
|
|
11
|
+
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
|
|
12
|
+
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
|
13
|
+
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
|
14
|
+
from opentelemetry.sdk.metrics import MeterProvider
|
|
15
|
+
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
|
|
16
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
17
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
|
18
|
+
|
|
19
|
+
_logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _is_text_content(content_type: str) -> bool:
|
|
23
|
+
"""Check if content type is text-based and safe to log."""
|
|
24
|
+
text_types = ("application/json", "application/xml", "text/", "application/x-www-form-urlencoded")
|
|
25
|
+
return any(ct in content_type.lower() for ct in text_types)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _decode_body(body: bytes) -> str:
|
|
29
|
+
"""Decode body bytes to string."""
|
|
30
|
+
return body.decode("utf-8", errors="replace")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def request_hook(span: trace.Span, request: httpx.Request) -> None:
|
|
34
|
+
"""Hook to log request body at DEBUG level."""
|
|
35
|
+
try:
|
|
36
|
+
# Skip streaming requests to avoid consuming the stream
|
|
37
|
+
if hasattr(request, "stream") and request.stream is not None:
|
|
38
|
+
return
|
|
39
|
+
|
|
40
|
+
content_type = request.headers.get("content-type", "")
|
|
41
|
+
if _is_text_content(content_type) and hasattr(request, "content") and request.content:
|
|
42
|
+
_logger.debug("HTTP request body: %s", _decode_body(request.content))
|
|
43
|
+
except Exception:
|
|
44
|
+
_logger.exception("Failed to log request body")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def response_hook(span: trace.Span, request: httpx.Request, response: httpx.Response) -> None:
|
|
48
|
+
"""Hook to log response body at DEBUG level."""
|
|
49
|
+
try:
|
|
50
|
+
# Skip streaming responses to avoid consuming the stream
|
|
51
|
+
if hasattr(response, "is_stream_consumed") and not response.is_stream_consumed:
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
content_type = response.headers.get("content-type", "")
|
|
55
|
+
if _is_text_content(content_type) and hasattr(response, "content") and response.content:
|
|
56
|
+
_logger.debug("HTTP response body: %s", _decode_body(response.content))
|
|
57
|
+
except Exception:
|
|
58
|
+
_logger.exception("Failed to log response body")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def setup_otel() -> None:
|
|
62
|
+
"""Set up OpenTelemetry tracing, logging and metrics (framework-independent)."""
|
|
63
|
+
# Set log level for urllib to WARNING to reduce noise (like sending logs to OTLP)
|
|
64
|
+
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
65
|
+
|
|
66
|
+
# Traces
|
|
67
|
+
trace_provider = TracerProvider()
|
|
68
|
+
if os.environ.get("OTEL_EXPORTER_OTLP_PROTOCOL", "grpc") == "grpc":
|
|
69
|
+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as OTLPSpanExporterGrpc
|
|
70
|
+
|
|
71
|
+
trace_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporterGrpc()))
|
|
72
|
+
else:
|
|
73
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as OTLPSpanExporterHttp
|
|
74
|
+
|
|
75
|
+
trace_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporterHttp()))
|
|
76
|
+
trace.set_tracer_provider(trace_provider)
|
|
77
|
+
|
|
78
|
+
# Instrument HTTPX clients (this also transfers the trace context automatically)
|
|
79
|
+
HTTPXClientInstrumentor().instrument(
|
|
80
|
+
request_hook=request_hook,
|
|
81
|
+
response_hook=response_hook,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Logs
|
|
85
|
+
logger_provider = LoggerProvider()
|
|
86
|
+
logger_provider.add_log_record_processor(BatchLogRecordProcessor(OTLPLogExporter()))
|
|
87
|
+
# Sets the global default logger provider
|
|
88
|
+
set_logger_provider(logger_provider)
|
|
89
|
+
|
|
90
|
+
# Attach OTLP handler to root logger
|
|
91
|
+
logging.getLogger().addHandler(LoggingHandler(level=logging.NOTSET, logger_provider=logger_provider))
|
|
92
|
+
|
|
93
|
+
# Sets the global default meter provider
|
|
94
|
+
metrics.set_meter_provider(
|
|
95
|
+
MeterProvider(
|
|
96
|
+
metric_readers=[PeriodicExportingMetricReader(OTLPMetricExporter())],
|
|
97
|
+
)
|
|
98
|
+
)
|
agenticlayer/config.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration parsing for sub-agents and tools.
|
|
3
|
+
Parses JSON configurations to create RemoteA2aAgents, AgentTools and McpToolsets.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
from enum import Enum
|
|
8
|
+
|
|
9
|
+
from pydantic import AnyHttpUrl, BaseModel
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class InteractionType(str, Enum):
|
|
13
|
+
TOOL_CALL = "tool_call"
|
|
14
|
+
TRANSFER = "transfer"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SubAgent(BaseModel):
|
|
18
|
+
name: str
|
|
19
|
+
url: AnyHttpUrl
|
|
20
|
+
interaction_type: InteractionType = InteractionType.TOOL_CALL
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class McpTool(BaseModel):
|
|
24
|
+
name: str
|
|
25
|
+
url: AnyHttpUrl
|
|
26
|
+
timeout: int = 30
|
|
27
|
+
propagate_headers: list[str] = []
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def parse_sub_agents(sub_agents_config: str) -> list[SubAgent]:
|
|
31
|
+
"""
|
|
32
|
+
Get sub agents from JSON string.
|
|
33
|
+
Format: {"agent_name": {"url": "http://agent_url", "interaction_type", "transfer|tool_call"}, ...}
|
|
34
|
+
|
|
35
|
+
:return: A tuple of:
|
|
36
|
+
- list of sub agents for transfer interaction type
|
|
37
|
+
- list of agent tools for tool_call interaction type
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
agents_map = json.loads(sub_agents_config)
|
|
42
|
+
except json.JSONDecodeError as e:
|
|
43
|
+
raise ValueError("Warning: Invalid JSON in SUB_AGENTS environment variable: " + sub_agents_config, e)
|
|
44
|
+
|
|
45
|
+
return [
|
|
46
|
+
SubAgent(
|
|
47
|
+
name=agent_name,
|
|
48
|
+
url=config["url"],
|
|
49
|
+
interaction_type=InteractionType(config.get("interaction_type", "tool_call")),
|
|
50
|
+
)
|
|
51
|
+
for agent_name, config in agents_map.items()
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def parse_tools(tools_config: str) -> list[McpTool]:
|
|
56
|
+
"""
|
|
57
|
+
Get tools from JSON string.
|
|
58
|
+
Format: {"tool_name": {"url": "http://tool_url", "timeout": 30}, ...}
|
|
59
|
+
|
|
60
|
+
:return: A list of McpToolset tools
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
tools_map = json.loads(tools_config)
|
|
65
|
+
except json.JSONDecodeError as e:
|
|
66
|
+
raise ValueError("Warning: Invalid JSON in AGENT_TOOLS environment variable: " + tools_config, e)
|
|
67
|
+
|
|
68
|
+
return [
|
|
69
|
+
McpTool(
|
|
70
|
+
name=name,
|
|
71
|
+
url=config["url"],
|
|
72
|
+
timeout=config.get("timeout", 30),
|
|
73
|
+
)
|
|
74
|
+
for name, config in tools_map.items()
|
|
75
|
+
]
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
"""Constants shared across the agenticlayer package."""
|
|
2
|
+
|
|
3
|
+
# Prefix used to store propagated HTTP headers in ADK session state as flat primitive keys.
|
|
4
|
+
# Each header is stored as a separate string entry: f"{HTTP_HEADERS_SESSION_KEY}.{header_name_lower}"
|
|
5
|
+
# e.g. "http_headers.authorization" -> "Bearer token"
|
|
6
|
+
HTTP_HEADERS_SESSION_KEY = "http_headers"
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import traceback
|
|
9
|
+
from logging import Logger
|
|
10
|
+
from typing import Callable, NotRequired, TextIO, TypedDict, Union
|
|
11
|
+
|
|
12
|
+
import loguru
|
|
13
|
+
from loguru import logger
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class JsonRecord(TypedDict):
|
|
17
|
+
timestamp: str
|
|
18
|
+
name: str | None
|
|
19
|
+
level: str
|
|
20
|
+
message: str
|
|
21
|
+
function: str
|
|
22
|
+
module: str
|
|
23
|
+
line: int
|
|
24
|
+
exception: NotRequired[dict[str, str]]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _serialize(record: loguru.Record) -> str:
|
|
28
|
+
log: JsonRecord = {
|
|
29
|
+
"timestamp": record["time"].isoformat(),
|
|
30
|
+
"name": record["name"],
|
|
31
|
+
"level": record["level"].name,
|
|
32
|
+
"message": record["message"],
|
|
33
|
+
"function": record["function"],
|
|
34
|
+
"module": record["module"],
|
|
35
|
+
"line": record["line"],
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if record["exception"] is not None:
|
|
39
|
+
log["exception"] = {
|
|
40
|
+
"stack": "".join(
|
|
41
|
+
traceback.format_exception(
|
|
42
|
+
record["exception"].type,
|
|
43
|
+
record["exception"].value,
|
|
44
|
+
record["exception"].traceback,
|
|
45
|
+
)
|
|
46
|
+
),
|
|
47
|
+
"kind": getattr(record["exception"].type, "__name__", "None"),
|
|
48
|
+
"message": str(record["exception"].value),
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return json.dumps(log)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _json_sink(message: loguru.Message) -> None:
|
|
55
|
+
serialized = _serialize(message.record)
|
|
56
|
+
sys.stderr.write(serialized + "\n")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _configure_loguru() -> None:
|
|
60
|
+
sink: Union[TextIO, Callable[[loguru.Message], None]]
|
|
61
|
+
log_format = os.environ.get("LOG_FORMAT", "Text")
|
|
62
|
+
if log_format == "JSON":
|
|
63
|
+
sink = _json_sink
|
|
64
|
+
else:
|
|
65
|
+
sink = sys.stderr
|
|
66
|
+
|
|
67
|
+
log_level = os.environ.get("LOGLEVEL", "INFO")
|
|
68
|
+
|
|
69
|
+
logger.remove()
|
|
70
|
+
logger.add(
|
|
71
|
+
sink,
|
|
72
|
+
# Log INFO by default
|
|
73
|
+
filter={
|
|
74
|
+
"": log_level,
|
|
75
|
+
# Reduce verbosity of some noisy loggers
|
|
76
|
+
"a2a.utils.telemetry": "INFO",
|
|
77
|
+
},
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def setup_logging() -> None:
|
|
82
|
+
"""Initializes the application so that logging is handled by loguru"""
|
|
83
|
+
|
|
84
|
+
_configure_loguru()
|
|
85
|
+
|
|
86
|
+
# Some libraries we use log to standard logging and not to loguru. To also get the logs from these frameworks, we
|
|
87
|
+
# add a handler to the root logger of standard logging, that converts the log entries to loguru. This way
|
|
88
|
+
# loguru has the final say regarding logging, and we don't get a mixture of both logging frameworks
|
|
89
|
+
logging.basicConfig(handlers=[InterceptHandler()], level=logging.NOTSET, force=True)
|
|
90
|
+
|
|
91
|
+
# We have to replace the existing handlers from the loggers we want to intercept as well.
|
|
92
|
+
for _log, _logger in logging.root.manager.loggerDict.items():
|
|
93
|
+
# print("Checking logger: ", _log, " ", _logger)
|
|
94
|
+
if not isinstance(_logger, Logger) or len(_logger.handlers) == 0:
|
|
95
|
+
# logger not yet created or no custom handlers. Skipping
|
|
96
|
+
continue
|
|
97
|
+
for handler in _logger.handlers:
|
|
98
|
+
if not isinstance(handler, logging.StreamHandler):
|
|
99
|
+
# We only replace stream handlers, which write to the console
|
|
100
|
+
# NullHandlers and other handlers are not replaced
|
|
101
|
+
continue
|
|
102
|
+
_logger.removeHandler(handler)
|
|
103
|
+
_logger.addHandler(InterceptHandler())
|
|
104
|
+
|
|
105
|
+
# Prevent duplicate logs
|
|
106
|
+
if _logger.propagate:
|
|
107
|
+
logger.debug("Disable propagate for logger {}", _log)
|
|
108
|
+
_logger.propagate = False
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class InterceptHandler(logging.Handler):
|
|
112
|
+
"""
|
|
113
|
+
A Handler for the standard python logging that sends all incoming logs to loguru. Taken from the loguru documentation
|
|
114
|
+
https://loguru.readthedocs.io/en/stable/overview.html
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
118
|
+
# Get corresponding Loguru level if it exists.
|
|
119
|
+
level: str | int
|
|
120
|
+
try:
|
|
121
|
+
level = logger.level(record.levelname).name
|
|
122
|
+
except ValueError:
|
|
123
|
+
level = record.levelno
|
|
124
|
+
|
|
125
|
+
# Find caller from where originated the logged message.
|
|
126
|
+
frame, depth = inspect.currentframe(), 0
|
|
127
|
+
while frame and (depth == 0 or frame.f_code.co_filename == logging.__file__):
|
|
128
|
+
frame = frame.f_back
|
|
129
|
+
depth += 1
|
|
130
|
+
|
|
131
|
+
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"""OpenTelemetry instrumentation for Starlette applications."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
from typing import Any, Dict
|
|
6
|
+
|
|
7
|
+
from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH
|
|
8
|
+
from opentelemetry import trace
|
|
9
|
+
from starlette.applications import Starlette
|
|
10
|
+
|
|
11
|
+
from ._otel import _decode_body, _is_text_content
|
|
12
|
+
|
|
13
|
+
_logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _starlette_server_request_hook(span: trace.Span, scope: Dict[str, Any]) -> None:
|
|
17
|
+
"""Hook to log Starlette request body at DEBUG level if enabled.
|
|
18
|
+
|
|
19
|
+
Note: This captures the body from the ASGI scope's cached body if available.
|
|
20
|
+
It does not consume the request stream to avoid breaking request handling.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
# Only process HTTP requests
|
|
25
|
+
if scope.get("type") != "http":
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
# Check if body is cached in scope (some middleware/frameworks cache it)
|
|
29
|
+
# Don't try to read the stream directly as it would consume it
|
|
30
|
+
if "body" in scope:
|
|
31
|
+
body = scope["body"]
|
|
32
|
+
if body:
|
|
33
|
+
# Get content type from headers
|
|
34
|
+
headers = dict(scope.get("headers", []))
|
|
35
|
+
content_type = headers.get(b"content-type", b"").decode("latin1")
|
|
36
|
+
|
|
37
|
+
if _is_text_content(content_type):
|
|
38
|
+
_logger.debug("Starlette request body: %s", _decode_body(body))
|
|
39
|
+
except Exception:
|
|
40
|
+
_logger.exception("Failed to log Starlette request body")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _starlette_client_request_hook(span: trace.Span, scope: Dict[str, Any], message: Dict[str, Any]) -> None:
|
|
44
|
+
"""Hook to log Starlette client request body at DEBUG level if enabled."""
|
|
45
|
+
try:
|
|
46
|
+
# Capture body from the message if available and it's the body message
|
|
47
|
+
if message.get("type") == "http.request" and "body" in message:
|
|
48
|
+
body = message["body"]
|
|
49
|
+
if body:
|
|
50
|
+
# Get content type from scope headers
|
|
51
|
+
headers = dict(scope.get("headers", []))
|
|
52
|
+
content_type = headers.get(b"content-type", b"").decode("latin1")
|
|
53
|
+
|
|
54
|
+
if _is_text_content(content_type):
|
|
55
|
+
_logger.debug("Starlette client request body: %s", _decode_body(body))
|
|
56
|
+
except Exception:
|
|
57
|
+
_logger.exception("Failed to log Starlette client request body")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _starlette_client_response_hook(span: trace.Span, scope: Dict[str, Any], message: Dict[str, Any]) -> None:
|
|
61
|
+
"""Hook to log Starlette client response body at DEBUG level if enabled."""
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
# Capture body from response message
|
|
65
|
+
if message.get("type") == "http.response.body" and "body" in message:
|
|
66
|
+
body = message["body"]
|
|
67
|
+
if body:
|
|
68
|
+
_logger.debug("Starlette client response body: %s", _decode_body(body))
|
|
69
|
+
except Exception:
|
|
70
|
+
_logger.exception("Failed to log Starlette client response body")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def instrument_starlette_app(app: Starlette) -> None:
|
|
74
|
+
"""Instrument a Starlette application with OpenTelemetry.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
app: The Starlette application to instrument
|
|
78
|
+
|
|
79
|
+
Note:
|
|
80
|
+
This should be called after setup_otel() has been called to set up the tracer provider.
|
|
81
|
+
Body logging for Starlette is limited compared to HTTPX as it must avoid consuming
|
|
82
|
+
request/response streams. Bodies are only captured when already buffered in the ASGI scope.
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
# env needs to be set here since _excluded_urls is initialized at module import time
|
|
86
|
+
os.environ.setdefault("OTEL_PYTHON_STARLETTE_EXCLUDED_URLS", AGENT_CARD_WELL_KNOWN_PATH)
|
|
87
|
+
from opentelemetry.instrumentation.starlette import StarletteInstrumentor
|
|
88
|
+
|
|
89
|
+
StarletteInstrumentor().instrument_app(
|
|
90
|
+
app,
|
|
91
|
+
server_request_hook=_starlette_server_request_hook,
|
|
92
|
+
client_request_hook=_starlette_client_request_hook,
|
|
93
|
+
client_response_hook=_starlette_client_response_hook,
|
|
94
|
+
)
|
agenticlayer/py.typed
ADDED
|
File without changes
|