agentshadow 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentshadow-0.1.0/PKG-INFO +19 -0
- agentshadow-0.1.0/README.md +10 -0
- agentshadow-0.1.0/agentshadow.egg-info/PKG-INFO +19 -0
- agentshadow-0.1.0/agentshadow.egg-info/SOURCES.txt +19 -0
- agentshadow-0.1.0/agentshadow.egg-info/dependency_links.txt +1 -0
- agentshadow-0.1.0/agentshadow.egg-info/requires.txt +3 -0
- agentshadow-0.1.0/agentshadow.egg-info/top_level.txt +1 -0
- agentshadow-0.1.0/agenttrace/__init__.py +28 -0
- agentshadow-0.1.0/agenttrace/client.py +45 -0
- agentshadow-0.1.0/agenttrace/contracts.py +98 -0
- agentshadow-0.1.0/agenttrace/crewai.py +80 -0
- agentshadow-0.1.0/agenttrace/langchain.py +287 -0
- agentshadow-0.1.0/agenttrace/llamaindex.py +135 -0
- agentshadow-0.1.0/agenttrace/trace.py +172 -0
- agentshadow-0.1.0/pyproject.toml +18 -0
- agentshadow-0.1.0/setup.cfg +4 -0
- agentshadow-0.1.0/tests/test_contracts.py +96 -0
- agentshadow-0.1.0/tests/test_crewai_callback.py +56 -0
- agentshadow-0.1.0/tests/test_langchain_callback.py +76 -0
- agentshadow-0.1.0/tests/test_llamaindex_callback.py +65 -0
- agentshadow-0.1.0/tests/test_trace.py +137 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: agentshadow
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: AgentTrace Python SDK
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Provides-Extra: async
|
|
8
|
+
Requires-Dist: aiohttp>=3.9; extra == "async"
|
|
9
|
+
|
|
10
|
+
# SDK Environment
|
|
11
|
+
|
|
12
|
+
Use `uv` for local development:
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
cd sdk
|
|
16
|
+
uv sync --dev
|
|
17
|
+
source .venv/bin/activate
|
|
18
|
+
pytest -q
|
|
19
|
+
```
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: agentshadow
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: AgentTrace Python SDK
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Provides-Extra: async
|
|
8
|
+
Requires-Dist: aiohttp>=3.9; extra == "async"
|
|
9
|
+
|
|
10
|
+
# SDK Environment
|
|
11
|
+
|
|
12
|
+
Use `uv` for local development:
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
cd sdk
|
|
16
|
+
uv sync --dev
|
|
17
|
+
source .venv/bin/activate
|
|
18
|
+
pytest -q
|
|
19
|
+
```
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
agentshadow.egg-info/PKG-INFO
|
|
4
|
+
agentshadow.egg-info/SOURCES.txt
|
|
5
|
+
agentshadow.egg-info/dependency_links.txt
|
|
6
|
+
agentshadow.egg-info/requires.txt
|
|
7
|
+
agentshadow.egg-info/top_level.txt
|
|
8
|
+
agenttrace/__init__.py
|
|
9
|
+
agenttrace/client.py
|
|
10
|
+
agenttrace/contracts.py
|
|
11
|
+
agenttrace/crewai.py
|
|
12
|
+
agenttrace/langchain.py
|
|
13
|
+
agenttrace/llamaindex.py
|
|
14
|
+
agenttrace/trace.py
|
|
15
|
+
tests/test_contracts.py
|
|
16
|
+
tests/test_crewai_callback.py
|
|
17
|
+
tests/test_langchain_callback.py
|
|
18
|
+
tests/test_llamaindex_callback.py
|
|
19
|
+
tests/test_trace.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
agenttrace
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from agenttrace.contracts import Run, Span, SpanType, build_ingest_payload
|
|
2
|
+
from agenttrace.langchain import AgentTraceCallbackHandler
|
|
3
|
+
from agenttrace.trace import RunTraceContext, trace
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def __getattr__(name: str):
|
|
7
|
+
if name == "AgentTraceCrewAIHandler":
|
|
8
|
+
from agenttrace.crewai import AgentTraceCrewAIHandler
|
|
9
|
+
|
|
10
|
+
return AgentTraceCrewAIHandler
|
|
11
|
+
if name == "AgentTraceLlamaIndexHandler":
|
|
12
|
+
from agenttrace.llamaindex import AgentTraceLlamaIndexHandler
|
|
13
|
+
|
|
14
|
+
return AgentTraceLlamaIndexHandler
|
|
15
|
+
raise AttributeError(f"module 'agenttrace' has no attribute {name!r}")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"AgentTraceCallbackHandler",
|
|
20
|
+
"AgentTraceCrewAIHandler",
|
|
21
|
+
"AgentTraceLlamaIndexHandler",
|
|
22
|
+
"Run",
|
|
23
|
+
"RunTraceContext",
|
|
24
|
+
"Span",
|
|
25
|
+
"SpanType",
|
|
26
|
+
"build_ingest_payload",
|
|
27
|
+
"trace",
|
|
28
|
+
]
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from typing import Any
|
|
6
|
+
from urllib import request
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class IngestClient:
|
|
10
|
+
def __init__(self, api_url: str, api_key: str | None = None, timeout_seconds: float = 5.0) -> None:
|
|
11
|
+
self.api_url = api_url.rstrip("/")
|
|
12
|
+
self.api_key = api_key
|
|
13
|
+
self.timeout_seconds = timeout_seconds
|
|
14
|
+
|
|
15
|
+
@classmethod
|
|
16
|
+
def from_env(cls) -> IngestClient | None:
|
|
17
|
+
api_url = os.getenv("AGENTTRACE_API_URL")
|
|
18
|
+
if not api_url:
|
|
19
|
+
return None
|
|
20
|
+
api_key = os.getenv("AGENTTRACE_API_KEY")
|
|
21
|
+
return cls(api_url=api_url, api_key=api_key)
|
|
22
|
+
|
|
23
|
+
def ingest(self, payload: dict[str, Any]) -> None:
|
|
24
|
+
data = json.dumps(payload).encode("utf-8")
|
|
25
|
+
url = f"{self.api_url}/v1/ingest"
|
|
26
|
+
headers = {"Content-Type": "application/json"}
|
|
27
|
+
if self.api_key:
|
|
28
|
+
headers["Authorization"] = f"Bearer {self.api_key}"
|
|
29
|
+
req = request.Request(url=url, data=data, headers=headers, method="POST")
|
|
30
|
+
with request.urlopen(req, timeout=self.timeout_seconds):
|
|
31
|
+
return
|
|
32
|
+
|
|
33
|
+
async def async_ingest(self, payload: dict[str, Any]) -> None:
|
|
34
|
+
import aiohttp
|
|
35
|
+
|
|
36
|
+
data = json.dumps(payload).encode("utf-8")
|
|
37
|
+
url = f"{self.api_url}/v1/ingest"
|
|
38
|
+
headers = {"Content-Type": "application/json"}
|
|
39
|
+
if self.api_key:
|
|
40
|
+
headers["Authorization"] = f"Bearer {self.api_key}"
|
|
41
|
+
async with aiohttp.ClientSession() as session:
|
|
42
|
+
async with session.post(
|
|
43
|
+
url, data=data, headers=headers, timeout=aiohttp.ClientTimeout(total=self.timeout_seconds)
|
|
44
|
+
):
|
|
45
|
+
return
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from dataclasses import asdict, dataclass, field
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Any, Literal
|
|
7
|
+
from uuid import UUID
|
|
8
|
+
|
|
9
|
+
SpanType = Literal["llm", "tool", "retrieval", "chain", "agent"]
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _ensure_json_serializable(value: Any, field_name: str) -> None:
|
|
13
|
+
try:
|
|
14
|
+
json.dumps(value)
|
|
15
|
+
except TypeError as exc:
|
|
16
|
+
raise ValueError(f"{field_name} must be JSON serializable") from exc
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass(slots=True)
|
|
20
|
+
class Run:
|
|
21
|
+
run_id: UUID
|
|
22
|
+
name: str
|
|
23
|
+
created_at: datetime
|
|
24
|
+
metadata_json: dict[str, Any] = field(default_factory=dict)
|
|
25
|
+
|
|
26
|
+
def validate(self) -> None:
|
|
27
|
+
if not self.name:
|
|
28
|
+
raise ValueError("run.name must be non-empty")
|
|
29
|
+
_ensure_json_serializable(self.metadata_json, "run.metadata_json")
|
|
30
|
+
|
|
31
|
+
def to_dict(self) -> dict[str, Any]:
|
|
32
|
+
self.validate()
|
|
33
|
+
payload = asdict(self)
|
|
34
|
+
payload["run_id"] = str(self.run_id)
|
|
35
|
+
payload["created_at"] = self.created_at.isoformat()
|
|
36
|
+
return payload
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass(slots=True)
|
|
40
|
+
class Span:
|
|
41
|
+
span_id: UUID
|
|
42
|
+
run_id: UUID
|
|
43
|
+
parent_span_id: UUID | None
|
|
44
|
+
span_type: SpanType
|
|
45
|
+
name: str
|
|
46
|
+
start_time: datetime
|
|
47
|
+
end_time: datetime
|
|
48
|
+
duration_ms: int
|
|
49
|
+
input_json: Any
|
|
50
|
+
output_json: Any
|
|
51
|
+
error: str | None
|
|
52
|
+
metadata_json: dict[str, Any] = field(default_factory=dict)
|
|
53
|
+
|
|
54
|
+
def validate(self) -> None:
|
|
55
|
+
if not self.name:
|
|
56
|
+
raise ValueError("span.name must be non-empty")
|
|
57
|
+
if self.duration_ms < 0:
|
|
58
|
+
raise ValueError("span.duration_ms must be >= 0")
|
|
59
|
+
_ensure_json_serializable(self.input_json, "span.input_json")
|
|
60
|
+
_ensure_json_serializable(self.output_json, "span.output_json")
|
|
61
|
+
_ensure_json_serializable(self.metadata_json, "span.metadata_json")
|
|
62
|
+
if self.start_time > self.end_time:
|
|
63
|
+
raise ValueError("span.start_time must be <= span.end_time")
|
|
64
|
+
|
|
65
|
+
def to_dict(self) -> dict[str, Any]:
|
|
66
|
+
self.validate()
|
|
67
|
+
payload = asdict(self)
|
|
68
|
+
payload["span_id"] = str(self.span_id)
|
|
69
|
+
payload["run_id"] = str(self.run_id)
|
|
70
|
+
payload["parent_span_id"] = str(self.parent_span_id) if self.parent_span_id else None
|
|
71
|
+
payload["start_time"] = self.start_time.isoformat()
|
|
72
|
+
payload["end_time"] = self.end_time.isoformat()
|
|
73
|
+
return payload
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def build_ingest_payload(runs: list[Run], spans: list[Span]) -> dict[str, Any]:
|
|
77
|
+
run_ids = {run.run_id for run in runs}
|
|
78
|
+
span_ids_by_run: dict[UUID, set[UUID]] = {}
|
|
79
|
+
|
|
80
|
+
for span in spans:
|
|
81
|
+
span.validate()
|
|
82
|
+
span_ids_by_run.setdefault(span.run_id, set()).add(span.span_id)
|
|
83
|
+
|
|
84
|
+
for span in spans:
|
|
85
|
+
if span.run_id not in run_ids:
|
|
86
|
+
raise ValueError(f"span {span.span_id} references unknown run_id {span.run_id}")
|
|
87
|
+
if span.parent_span_id and span.parent_span_id not in span_ids_by_run.get(span.run_id, set()):
|
|
88
|
+
raise ValueError(
|
|
89
|
+
f"span {span.span_id} has parent_span_id {span.parent_span_id} "
|
|
90
|
+
f"not found in run {span.run_id}"
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
payload = {
|
|
94
|
+
"runs": [run.to_dict() for run in runs],
|
|
95
|
+
"spans": [span.to_dict() for span in spans],
|
|
96
|
+
}
|
|
97
|
+
_ensure_json_serializable(payload, "ingest payload")
|
|
98
|
+
return payload
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""CrewAI integration for AgentTrace.
|
|
2
|
+
|
|
3
|
+
Usage::
|
|
4
|
+
|
|
5
|
+
from agenttrace.crewai import AgentTraceCrewAIHandler
|
|
6
|
+
|
|
7
|
+
handler = AgentTraceCrewAIHandler()
|
|
8
|
+
crew = Crew(
|
|
9
|
+
...,
|
|
10
|
+
step_callback=handler.step_callback,
|
|
11
|
+
task_callback=handler.task_callback,
|
|
12
|
+
)
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
from typing import Any
|
|
19
|
+
from uuid import uuid4
|
|
20
|
+
|
|
21
|
+
from agenttrace.trace import trace
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _now_utc() -> datetime:
|
|
25
|
+
return datetime.now(timezone.utc)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _safe_str(value: Any) -> str:
|
|
29
|
+
try:
|
|
30
|
+
return str(value)
|
|
31
|
+
except Exception: # noqa: BLE001
|
|
32
|
+
return "<unserializable>"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class AgentTraceCrewAIHandler:
|
|
36
|
+
"""Callback handler for CrewAI step_callback / task_callback hooks."""
|
|
37
|
+
|
|
38
|
+
def step_callback(self, step_output: Any) -> None:
|
|
39
|
+
"""Called by CrewAI after each agent step."""
|
|
40
|
+
ctx = trace.current()
|
|
41
|
+
if ctx is None:
|
|
42
|
+
return
|
|
43
|
+
|
|
44
|
+
now = _now_utc()
|
|
45
|
+
text = _safe_str(step_output)
|
|
46
|
+
|
|
47
|
+
ctx.add_span(
|
|
48
|
+
span_type="agent",
|
|
49
|
+
name="crewai_step",
|
|
50
|
+
start_time=now,
|
|
51
|
+
end_time=now,
|
|
52
|
+
input_json={},
|
|
53
|
+
output_json={"step_output": text},
|
|
54
|
+
metadata_json={"source": "crewai"},
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
def task_callback(self, task_output: Any) -> None:
|
|
58
|
+
"""Called by CrewAI after each task completes."""
|
|
59
|
+
ctx = trace.current()
|
|
60
|
+
if ctx is None:
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
now = _now_utc()
|
|
64
|
+
output_data: dict[str, Any] = {"task_output": _safe_str(task_output)}
|
|
65
|
+
|
|
66
|
+
# CrewAI TaskOutput objects may have structured attributes
|
|
67
|
+
for attr in ("description", "summary", "raw"):
|
|
68
|
+
val = getattr(task_output, attr, None)
|
|
69
|
+
if val is not None:
|
|
70
|
+
output_data[attr] = _safe_str(val)
|
|
71
|
+
|
|
72
|
+
ctx.add_span(
|
|
73
|
+
span_type="chain",
|
|
74
|
+
name="crewai_task",
|
|
75
|
+
start_time=now,
|
|
76
|
+
end_time=now,
|
|
77
|
+
input_json={},
|
|
78
|
+
output_json=output_data,
|
|
79
|
+
metadata_json={"source": "crewai"},
|
|
80
|
+
)
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
import json
|
|
6
|
+
from typing import Any
|
|
7
|
+
from uuid import UUID, uuid4
|
|
8
|
+
|
|
9
|
+
from agenttrace.trace import trace
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _now_utc() -> datetime:
|
|
13
|
+
return datetime.now(timezone.utc)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _to_run_key(value: UUID | str | None) -> str | None:
|
|
17
|
+
if value is None:
|
|
18
|
+
return None
|
|
19
|
+
return str(value)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _pick_name(serialized: dict[str, Any] | None, fallback: str) -> str:
|
|
23
|
+
if not serialized:
|
|
24
|
+
return fallback
|
|
25
|
+
value = serialized.get("name")
|
|
26
|
+
if isinstance(value, str) and value:
|
|
27
|
+
return value
|
|
28
|
+
return fallback
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _safe_json(value: Any, seen: set[int] | None = None) -> Any:
|
|
32
|
+
if seen is None:
|
|
33
|
+
seen = set()
|
|
34
|
+
if value is None or isinstance(value, (str, int, float, bool)):
|
|
35
|
+
return value
|
|
36
|
+
|
|
37
|
+
value_id = id(value)
|
|
38
|
+
if value_id in seen:
|
|
39
|
+
return "<recursive>"
|
|
40
|
+
|
|
41
|
+
if isinstance(value, dict):
|
|
42
|
+
seen.add(value_id)
|
|
43
|
+
return {str(k): _safe_json(v, seen) for k, v in value.items()}
|
|
44
|
+
if isinstance(value, (list, tuple, set)):
|
|
45
|
+
seen.add(value_id)
|
|
46
|
+
return [_safe_json(v, seen) for v in value]
|
|
47
|
+
if isinstance(value, bytes):
|
|
48
|
+
return value.decode("utf-8", errors="replace")
|
|
49
|
+
|
|
50
|
+
for attr in ("model_dump", "dict"):
|
|
51
|
+
fn = getattr(value, attr, None)
|
|
52
|
+
if callable(fn):
|
|
53
|
+
try:
|
|
54
|
+
return _safe_json(fn(), seen)
|
|
55
|
+
except Exception: # noqa: BLE001
|
|
56
|
+
pass
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
json.dumps(value)
|
|
60
|
+
return value
|
|
61
|
+
except TypeError:
|
|
62
|
+
return repr(value)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass(slots=True)
|
|
66
|
+
class _PendingSpan:
|
|
67
|
+
span_id: str
|
|
68
|
+
span_type: str
|
|
69
|
+
name: str
|
|
70
|
+
start_time: datetime
|
|
71
|
+
input_json: Any
|
|
72
|
+
metadata_json: dict[str, Any]
|
|
73
|
+
parent_run_key: str | None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class AgentTraceCallbackHandler:
|
|
77
|
+
def __init__(self) -> None:
|
|
78
|
+
# LangChain callback manager expects these handler attributes.
|
|
79
|
+
self.raise_error = False
|
|
80
|
+
self.run_inline = True
|
|
81
|
+
self.ignore_llm = False
|
|
82
|
+
self.ignore_chat_model = False
|
|
83
|
+
self.ignore_chain = False
|
|
84
|
+
self.ignore_agent = False
|
|
85
|
+
self.ignore_retriever = False
|
|
86
|
+
self.ignore_tool = False
|
|
87
|
+
self.ignore_custom_event = False
|
|
88
|
+
self.ignore_retry = False
|
|
89
|
+
|
|
90
|
+
self._pending: dict[str, _PendingSpan] = {}
|
|
91
|
+
self._span_id_by_run: dict[str, str] = {}
|
|
92
|
+
|
|
93
|
+
def _start(
|
|
94
|
+
self,
|
|
95
|
+
*,
|
|
96
|
+
span_type: str,
|
|
97
|
+
name: str,
|
|
98
|
+
input_json: Any,
|
|
99
|
+
run_id: UUID,
|
|
100
|
+
parent_run_id: UUID | None,
|
|
101
|
+
metadata_json: dict[str, Any] | None = None,
|
|
102
|
+
) -> None:
|
|
103
|
+
run_key = _to_run_key(run_id)
|
|
104
|
+
span_id = str(uuid4())
|
|
105
|
+
self._pending[run_key] = _PendingSpan(
|
|
106
|
+
span_id=span_id,
|
|
107
|
+
span_type=span_type,
|
|
108
|
+
name=name,
|
|
109
|
+
start_time=_now_utc(),
|
|
110
|
+
input_json=_safe_json(input_json),
|
|
111
|
+
metadata_json=_safe_json(metadata_json or {}),
|
|
112
|
+
parent_run_key=_to_run_key(parent_run_id),
|
|
113
|
+
)
|
|
114
|
+
self._span_id_by_run[run_key] = span_id
|
|
115
|
+
|
|
116
|
+
def _end(self, *, run_id: UUID, output_json: Any, error: str | None = None) -> None:
|
|
117
|
+
ctx = trace.current()
|
|
118
|
+
if ctx is None:
|
|
119
|
+
return
|
|
120
|
+
run_key = _to_run_key(run_id)
|
|
121
|
+
pending = self._pending.pop(run_key, None)
|
|
122
|
+
end_time = _now_utc()
|
|
123
|
+
if pending is None:
|
|
124
|
+
pending = _PendingSpan(
|
|
125
|
+
span_id=str(uuid4()),
|
|
126
|
+
span_type="chain",
|
|
127
|
+
name="unknown",
|
|
128
|
+
start_time=end_time,
|
|
129
|
+
input_json={},
|
|
130
|
+
metadata_json={},
|
|
131
|
+
parent_run_key=None,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
parent_span_id = None
|
|
135
|
+
if pending.parent_run_key:
|
|
136
|
+
parent_span_id = self._span_id_by_run.get(pending.parent_run_key)
|
|
137
|
+
|
|
138
|
+
span = ctx.add_span(
|
|
139
|
+
span_id=pending.span_id,
|
|
140
|
+
span_type=pending.span_type, # type: ignore[arg-type]
|
|
141
|
+
name=pending.name,
|
|
142
|
+
start_time=pending.start_time,
|
|
143
|
+
end_time=end_time,
|
|
144
|
+
input_json=pending.input_json,
|
|
145
|
+
output_json=_safe_json(output_json),
|
|
146
|
+
error=error,
|
|
147
|
+
metadata_json=pending.metadata_json,
|
|
148
|
+
parent_span_id=parent_span_id,
|
|
149
|
+
)
|
|
150
|
+
self._span_id_by_run[run_key] = str(span.span_id)
|
|
151
|
+
|
|
152
|
+
def on_llm_start(
|
|
153
|
+
self,
|
|
154
|
+
serialized: dict[str, Any],
|
|
155
|
+
prompts: list[str],
|
|
156
|
+
*,
|
|
157
|
+
run_id: UUID,
|
|
158
|
+
parent_run_id: UUID | None = None,
|
|
159
|
+
**_: Any,
|
|
160
|
+
) -> None:
|
|
161
|
+
self._start(
|
|
162
|
+
span_type="llm",
|
|
163
|
+
name=_pick_name(serialized, "llm"),
|
|
164
|
+
input_json={"prompts": prompts},
|
|
165
|
+
run_id=run_id,
|
|
166
|
+
parent_run_id=parent_run_id,
|
|
167
|
+
metadata_json={"serialized": serialized},
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
def on_llm_end(self, response: Any, *, run_id: UUID, **_: Any) -> None:
|
|
171
|
+
self._end(run_id=run_id, output_json={"response": response})
|
|
172
|
+
|
|
173
|
+
def on_chat_model_start(
|
|
174
|
+
self,
|
|
175
|
+
serialized: dict[str, Any],
|
|
176
|
+
messages: list[list[Any]],
|
|
177
|
+
*,
|
|
178
|
+
run_id: UUID,
|
|
179
|
+
parent_run_id: UUID | None = None,
|
|
180
|
+
**_: Any,
|
|
181
|
+
) -> None:
|
|
182
|
+
self._start(
|
|
183
|
+
span_type="llm",
|
|
184
|
+
name=_pick_name(serialized, "chat_model"),
|
|
185
|
+
input_json={"messages": messages},
|
|
186
|
+
run_id=run_id,
|
|
187
|
+
parent_run_id=parent_run_id,
|
|
188
|
+
metadata_json={"serialized": serialized},
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
def on_chat_model_end(self, response: Any, *, run_id: UUID, **_: Any) -> None:
|
|
192
|
+
self._end(run_id=run_id, output_json={"response": response})
|
|
193
|
+
|
|
194
|
+
def on_llm_new_token(self, token: str, *, run_id: UUID, **_: Any) -> None:
|
|
195
|
+
# Token-level streaming is not persisted in v0.
|
|
196
|
+
return
|
|
197
|
+
|
|
198
|
+
def on_llm_error(self, error: BaseException, *, run_id: UUID, **_: Any) -> None:
|
|
199
|
+
self._end(
|
|
200
|
+
run_id=run_id,
|
|
201
|
+
output_json={},
|
|
202
|
+
error=f"{type(error).__name__}: {error}",
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
def on_tool_start(
|
|
206
|
+
self,
|
|
207
|
+
serialized: dict[str, Any],
|
|
208
|
+
input_str: str,
|
|
209
|
+
*,
|
|
210
|
+
run_id: UUID,
|
|
211
|
+
parent_run_id: UUID | None = None,
|
|
212
|
+
**_: Any,
|
|
213
|
+
) -> None:
|
|
214
|
+
self._start(
|
|
215
|
+
span_type="tool",
|
|
216
|
+
name=_pick_name(serialized, "tool"),
|
|
217
|
+
input_json={"input": input_str},
|
|
218
|
+
run_id=run_id,
|
|
219
|
+
parent_run_id=parent_run_id,
|
|
220
|
+
metadata_json={"serialized": serialized},
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
def on_tool_end(self, output: Any, *, run_id: UUID, **_: Any) -> None:
|
|
224
|
+
self._end(run_id=run_id, output_json={"output": output})
|
|
225
|
+
|
|
226
|
+
def on_tool_error(self, error: BaseException, *, run_id: UUID, **_: Any) -> None:
|
|
227
|
+
self._end(
|
|
228
|
+
run_id=run_id,
|
|
229
|
+
output_json={},
|
|
230
|
+
error=f"{type(error).__name__}: {error}",
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
def on_retriever_start(
|
|
234
|
+
self,
|
|
235
|
+
serialized: dict[str, Any],
|
|
236
|
+
query: str,
|
|
237
|
+
*,
|
|
238
|
+
run_id: UUID,
|
|
239
|
+
parent_run_id: UUID | None = None,
|
|
240
|
+
**_: Any,
|
|
241
|
+
) -> None:
|
|
242
|
+
self._start(
|
|
243
|
+
span_type="retrieval",
|
|
244
|
+
name=_pick_name(serialized, "retriever"),
|
|
245
|
+
input_json={"query": query},
|
|
246
|
+
run_id=run_id,
|
|
247
|
+
parent_run_id=parent_run_id,
|
|
248
|
+
metadata_json={"serialized": serialized},
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
def on_retriever_end(self, documents: Any, *, run_id: UUID, **_: Any) -> None:
|
|
252
|
+
self._end(run_id=run_id, output_json={"documents": documents})
|
|
253
|
+
|
|
254
|
+
def on_retriever_error(self, error: BaseException, *, run_id: UUID, **_: Any) -> None:
|
|
255
|
+
self._end(
|
|
256
|
+
run_id=run_id,
|
|
257
|
+
output_json={},
|
|
258
|
+
error=f"{type(error).__name__}: {error}",
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
def on_chain_start(
|
|
262
|
+
self,
|
|
263
|
+
serialized: dict[str, Any],
|
|
264
|
+
inputs: Any,
|
|
265
|
+
*,
|
|
266
|
+
run_id: UUID,
|
|
267
|
+
parent_run_id: UUID | None = None,
|
|
268
|
+
**_: Any,
|
|
269
|
+
) -> None:
|
|
270
|
+
self._start(
|
|
271
|
+
span_type="chain",
|
|
272
|
+
name=_pick_name(serialized, "chain"),
|
|
273
|
+
input_json=inputs,
|
|
274
|
+
run_id=run_id,
|
|
275
|
+
parent_run_id=parent_run_id,
|
|
276
|
+
metadata_json={"serialized": serialized},
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
def on_chain_end(self, outputs: dict[str, Any], *, run_id: UUID, **_: Any) -> None:
|
|
280
|
+
self._end(run_id=run_id, output_json=outputs)
|
|
281
|
+
|
|
282
|
+
def on_chain_error(self, error: BaseException, *, run_id: UUID, **_: Any) -> None:
|
|
283
|
+
self._end(
|
|
284
|
+
run_id=run_id,
|
|
285
|
+
output_json={},
|
|
286
|
+
error=f"{type(error).__name__}: {error}",
|
|
287
|
+
)
|