traceagent 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- traceagent-0.1.0/.gitignore +38 -0
- traceagent-0.1.0/PKG-INFO +102 -0
- traceagent-0.1.0/README.md +67 -0
- traceagent-0.1.0/agentlens/__init__.py +47 -0
- traceagent-0.1.0/agentlens/auto.py +201 -0
- traceagent-0.1.0/agentlens/callbacks/__init__.py +1 -0
- traceagent-0.1.0/agentlens/callbacks/client.py +61 -0
- traceagent-0.1.0/agentlens/callbacks/decorator.py +142 -0
- traceagent-0.1.0/agentlens/callbacks/langchain_cb.py +516 -0
- traceagent-0.1.0/agentlens/config.py +62 -0
- traceagent-0.1.0/agentlens/core.py +160 -0
- traceagent-0.1.0/agentlens/models.py +245 -0
- traceagent-0.1.0/pyproject.toml +59 -0
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Byte-compiled
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# Distribution
|
|
7
|
+
dist/
|
|
8
|
+
build/
|
|
9
|
+
*.egg-info/
|
|
10
|
+
*.egg
|
|
11
|
+
|
|
12
|
+
# Virtual environments
|
|
13
|
+
.venv/
|
|
14
|
+
venv/
|
|
15
|
+
env/
|
|
16
|
+
|
|
17
|
+
# IDE
|
|
18
|
+
.vscode/
|
|
19
|
+
.idea/
|
|
20
|
+
*.swp
|
|
21
|
+
*.swo
|
|
22
|
+
|
|
23
|
+
# OS
|
|
24
|
+
.DS_Store
|
|
25
|
+
Thumbs.db
|
|
26
|
+
|
|
27
|
+
# Database
|
|
28
|
+
*.db
|
|
29
|
+
|
|
30
|
+
# Node
|
|
31
|
+
dashboard/node_modules/
|
|
32
|
+
|
|
33
|
+
# Built dashboard (generated — don't commit)
|
|
34
|
+
server/agentlens_server/static/
|
|
35
|
+
|
|
36
|
+
# Env
|
|
37
|
+
.env
|
|
38
|
+
.env.local
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: traceagent
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Local-first observability for AI agents. Lightweight SDK — trace LangChain, LangGraph, and any Python code.
|
|
5
|
+
Project-URL: Homepage, https://github.com/agentlens/agentlens
|
|
6
|
+
Project-URL: Documentation, https://github.com/agentlens/agentlens#readme
|
|
7
|
+
Project-URL: Repository, https://github.com/agentlens/agentlens
|
|
8
|
+
Project-URL: Issues, https://github.com/agentlens/agentlens/issues
|
|
9
|
+
Author: AgentLens Contributors
|
|
10
|
+
License: MIT
|
|
11
|
+
Keywords: agents,ai,debugging,langchain,langgraph,llm,observability,tracing
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
21
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
22
|
+
Classifier: Topic :: Software Development :: Debuggers
|
|
23
|
+
Requires-Python: >=3.9
|
|
24
|
+
Requires-Dist: pydantic>=2.0
|
|
25
|
+
Provides-Extra: all
|
|
26
|
+
Requires-Dist: langchain-core>=0.1.0; extra == 'all'
|
|
27
|
+
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: httpx>=0.24; extra == 'dev'
|
|
29
|
+
Requires-Dist: pytest-asyncio>=0.21; extra == 'dev'
|
|
30
|
+
Requires-Dist: pytest>=7.0; extra == 'dev'
|
|
31
|
+
Requires-Dist: ruff>=0.1.0; extra == 'dev'
|
|
32
|
+
Provides-Extra: langchain
|
|
33
|
+
Requires-Dist: langchain-core>=0.1.0; extra == 'langchain'
|
|
34
|
+
Description-Content-Type: text/markdown
|
|
35
|
+
|
|
36
|
+
# AgentLens SDK
|
|
37
|
+
|
|
38
|
+
**Local-first observability for AI agents.** Lightweight Python SDK — trace LangChain, LangGraph, and any Python code.
|
|
39
|
+
|
|
40
|
+
## Install
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
pip install agentlens
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Quick Start
|
|
47
|
+
|
|
48
|
+
### Option 1: Environment Variables (zero code changes, like LangSmith)
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
export AGENTLENS_ENABLED=true
|
|
52
|
+
export AGENTLENS_ENDPOINT=http://localhost:6832 # your AgentLens server
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
import agentlens # auto-traces all LangChain/LangGraph calls
|
|
57
|
+
|
|
58
|
+
result = graph.invoke({"messages": [HumanMessage(content="Hello!")]})
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### Option 2: Explicit Callback
|
|
62
|
+
|
|
63
|
+
```python
|
|
64
|
+
from agentlens import AgentLens
|
|
65
|
+
|
|
66
|
+
lens = AgentLens(server_url="http://localhost:6832")
|
|
67
|
+
|
|
68
|
+
result = graph.invoke(
|
|
69
|
+
{"messages": [HumanMessage(content="Hello!")]},
|
|
70
|
+
config={"callbacks": [lens.callback()]}
|
|
71
|
+
)
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Option 3: Decorators & Context Managers
|
|
75
|
+
|
|
76
|
+
```python
|
|
77
|
+
from agentlens import AgentLens
|
|
78
|
+
|
|
79
|
+
lens = AgentLens()
|
|
80
|
+
|
|
81
|
+
@lens.observe(name="search", kind="tool")
|
|
82
|
+
def search(query: str):
|
|
83
|
+
return results
|
|
84
|
+
|
|
85
|
+
with lens.trace("My Pipeline") as t:
|
|
86
|
+
result = search("hello")
|
|
87
|
+
t.outputs = {"result": result}
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## Configuration
|
|
91
|
+
|
|
92
|
+
| Env Variable | Description | Default |
|
|
93
|
+
|---|---|---|
|
|
94
|
+
| `AGENTLENS_ENABLED` | Enable tracing | `false` |
|
|
95
|
+
| `AGENTLENS_ENDPOINT` | Server URL | `http://localhost:6832` |
|
|
96
|
+
| `AGENTLENS_PROJECT` | Project name | `default` |
|
|
97
|
+
| `AGENTLENS_TAGS` | Comma-separated tags | |
|
|
98
|
+
| `AGENTLENS_AUTO_TRACE` | Auto-attach to LangChain | `true` when enabled |
|
|
99
|
+
|
|
100
|
+
## Server
|
|
101
|
+
|
|
102
|
+
The SDK sends traces to an AgentLens server. See [agentlens-server](../server/) for setup.
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# AgentLens SDK
|
|
2
|
+
|
|
3
|
+
**Local-first observability for AI agents.** Lightweight Python SDK — trace LangChain, LangGraph, and any Python code.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install agentlens
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
### Option 1: Environment Variables (zero code changes, like LangSmith)
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
export AGENTLENS_ENABLED=true
|
|
17
|
+
export AGENTLENS_ENDPOINT=http://localhost:6832 # your AgentLens server
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
```python
|
|
21
|
+
import agentlens # auto-traces all LangChain/LangGraph calls
|
|
22
|
+
|
|
23
|
+
result = graph.invoke({"messages": [HumanMessage(content="Hello!")]})
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
### Option 2: Explicit Callback
|
|
27
|
+
|
|
28
|
+
```python
|
|
29
|
+
from agentlens import AgentLens
|
|
30
|
+
|
|
31
|
+
lens = AgentLens(server_url="http://localhost:6832")
|
|
32
|
+
|
|
33
|
+
result = graph.invoke(
|
|
34
|
+
{"messages": [HumanMessage(content="Hello!")]},
|
|
35
|
+
config={"callbacks": [lens.callback()]}
|
|
36
|
+
)
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
### Option 3: Decorators & Context Managers
|
|
40
|
+
|
|
41
|
+
```python
|
|
42
|
+
from agentlens import AgentLens
|
|
43
|
+
|
|
44
|
+
lens = AgentLens()
|
|
45
|
+
|
|
46
|
+
@lens.observe(name="search", kind="tool")
|
|
47
|
+
def search(query: str):
|
|
48
|
+
return results
|
|
49
|
+
|
|
50
|
+
with lens.trace("My Pipeline") as t:
|
|
51
|
+
result = search("hello")
|
|
52
|
+
t.outputs = {"result": result}
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
## Configuration
|
|
56
|
+
|
|
57
|
+
| Env Variable | Description | Default |
|
|
58
|
+
|---|---|---|
|
|
59
|
+
| `AGENTLENS_ENABLED` | Enable tracing | `false` |
|
|
60
|
+
| `AGENTLENS_ENDPOINT` | Server URL | `http://localhost:6832` |
|
|
61
|
+
| `AGENTLENS_PROJECT` | Project name | `default` |
|
|
62
|
+
| `AGENTLENS_TAGS` | Comma-separated tags | |
|
|
63
|
+
| `AGENTLENS_AUTO_TRACE` | Auto-attach to LangChain | `true` when enabled |
|
|
64
|
+
|
|
65
|
+
## Server
|
|
66
|
+
|
|
67
|
+
The SDK sends traces to an AgentLens server. See [agentlens-server](../server/) for setup.
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AgentLens - Local-first observability for AI agents.
|
|
3
|
+
|
|
4
|
+
See everything your agents do, locally.
|
|
5
|
+
|
|
6
|
+
Option 1 — Environment Variables (like LangSmith, zero code changes):
|
|
7
|
+
|
|
8
|
+
export AGENTLENS_ENABLED=true
|
|
9
|
+
export AGENTLENS_ENDPOINT=http://localhost:6832 # optional
|
|
10
|
+
|
|
11
|
+
# Then just import agentlens anywhere and all LangChain/LangGraph
|
|
12
|
+
# calls are traced automatically:
|
|
13
|
+
import agentlens
|
|
14
|
+
|
|
15
|
+
Option 2 — Programmatic (one-liner):
|
|
16
|
+
|
|
17
|
+
import agentlens
|
|
18
|
+
agentlens.configure(enabled=True, project="my-project")
|
|
19
|
+
|
|
20
|
+
Option 3 — Explicit callback:
|
|
21
|
+
|
|
22
|
+
from agentlens import AgentLens
|
|
23
|
+
lens = AgentLens()
|
|
24
|
+
result = graph.invoke(input, config={"callbacks": [lens.callback()]})
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
from agentlens.core import AgentLens
|
|
28
|
+
from agentlens.auto import configure, disable_auto_trace
|
|
29
|
+
from agentlens.config import config
|
|
30
|
+
from agentlens.models import Span, SpanKind, SpanStatus, Trace, TraceStatus
|
|
31
|
+
|
|
32
|
+
__version__ = "0.1.0"
|
|
33
|
+
__all__ = [
|
|
34
|
+
"AgentLens",
|
|
35
|
+
"configure",
|
|
36
|
+
"disable_auto_trace",
|
|
37
|
+
"config",
|
|
38
|
+
"Trace",
|
|
39
|
+
"Span",
|
|
40
|
+
"TraceStatus",
|
|
41
|
+
"SpanStatus",
|
|
42
|
+
"SpanKind",
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
# ---- Auto-activate if env vars are set (like LangSmith) ----
|
|
46
|
+
if config.is_active and config.auto_trace:
|
|
47
|
+
configure(enabled=True)
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""Auto-tracing module — registers AgentLens as a global LangChain callback.
|
|
2
|
+
|
|
3
|
+
When AGENTLENS_ENABLED=true, this hooks into LangChain's callback system
|
|
4
|
+
so ALL LLM/chain/tool calls are traced automatically — zero code changes needed.
|
|
5
|
+
|
|
6
|
+
This mirrors how LangSmith works with LANGCHAIN_TRACING_V2=true.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import logging
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
from agentlens.config import config
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger("agentlens")
|
|
17
|
+
|
|
18
|
+
_auto_trace_installed = False
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def configure(
|
|
22
|
+
enabled: bool | None = None,
|
|
23
|
+
endpoint: str | None = None,
|
|
24
|
+
project: str | None = None,
|
|
25
|
+
tags: list[str] | None = None,
|
|
26
|
+
auto_trace: bool | None = None,
|
|
27
|
+
) -> None:
|
|
28
|
+
"""
|
|
29
|
+
Configure AgentLens programmatically. Call this once at the top of your app.
|
|
30
|
+
|
|
31
|
+
This is the alternative to setting environment variables.
|
|
32
|
+
|
|
33
|
+
Usage:
|
|
34
|
+
import agentlens
|
|
35
|
+
agentlens.configure(enabled=True, project="my-project")
|
|
36
|
+
|
|
37
|
+
# Now all LangChain/LangGraph calls are traced automatically!
|
|
38
|
+
result = graph.invoke({"messages": [...]})
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
enabled: Enable tracing (default reads AGENTLENS_ENABLED env var)
|
|
42
|
+
endpoint: Server URL (default: http://localhost:6832)
|
|
43
|
+
project: Project name for grouping traces
|
|
44
|
+
tags: Default tags added to every trace
|
|
45
|
+
auto_trace: Auto-register as global LangChain callback
|
|
46
|
+
"""
|
|
47
|
+
import os
|
|
48
|
+
|
|
49
|
+
if enabled is not None:
|
|
50
|
+
os.environ["AGENTLENS_ENABLED"] = str(enabled).lower()
|
|
51
|
+
if endpoint is not None:
|
|
52
|
+
os.environ["AGENTLENS_ENDPOINT"] = endpoint
|
|
53
|
+
if project is not None:
|
|
54
|
+
os.environ["AGENTLENS_PROJECT"] = project
|
|
55
|
+
if tags is not None:
|
|
56
|
+
os.environ["AGENTLENS_TAGS"] = ",".join(tags)
|
|
57
|
+
if auto_trace is not None:
|
|
58
|
+
os.environ["AGENTLENS_AUTO_TRACE"] = str(auto_trace).lower()
|
|
59
|
+
|
|
60
|
+
# Reload config from env
|
|
61
|
+
config.reload()
|
|
62
|
+
|
|
63
|
+
# Install auto-tracing if enabled
|
|
64
|
+
if config.is_active and config.auto_trace:
|
|
65
|
+
install_auto_trace()
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def install_auto_trace() -> None:
|
|
69
|
+
"""
|
|
70
|
+
Register AgentLens as a global LangChain callback handler.
|
|
71
|
+
|
|
72
|
+
After calling this, ALL LangChain/LangGraph invocations are traced
|
|
73
|
+
automatically without passing callbacks explicitly.
|
|
74
|
+
"""
|
|
75
|
+
global _auto_trace_installed
|
|
76
|
+
if _auto_trace_installed:
|
|
77
|
+
return
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
from langchain_core.callbacks import BaseCallbackHandler
|
|
81
|
+
from langchain_core.callbacks.manager import (
|
|
82
|
+
CallbackManager,
|
|
83
|
+
)
|
|
84
|
+
# Try to use the global callback approach
|
|
85
|
+
_install_via_env(config)
|
|
86
|
+
_auto_trace_installed = True
|
|
87
|
+
logger.info(
|
|
88
|
+
f"AgentLens auto-tracing enabled → {config.endpoint} "
|
|
89
|
+
f"(project: {config.project})"
|
|
90
|
+
)
|
|
91
|
+
except ImportError:
|
|
92
|
+
logger.debug(
|
|
93
|
+
"langchain-core not installed — auto-trace unavailable. "
|
|
94
|
+
"Use lens.callback() manually or install langchain-core."
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _install_via_env(cfg: Any) -> None:
|
|
99
|
+
"""
|
|
100
|
+
Hook into LangChain's global callback system.
|
|
101
|
+
|
|
102
|
+
LangChain checks for callbacks in multiple places. We register our handler
|
|
103
|
+
as a global default so it runs on every invocation.
|
|
104
|
+
"""
|
|
105
|
+
from agentlens.callbacks.client import IngestClient
|
|
106
|
+
from agentlens.callbacks.langchain_cb import AgentLensLangChainCallback
|
|
107
|
+
|
|
108
|
+
client = IngestClient(base_url=cfg.endpoint)
|
|
109
|
+
|
|
110
|
+
handler = AgentLensLangChainCallback(
|
|
111
|
+
client=client,
|
|
112
|
+
trace_name=cfg.project,
|
|
113
|
+
tags=cfg.tags,
|
|
114
|
+
metadata={"project": cfg.project, "auto_trace": True},
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
# Method 1: Set as environment-level default callbacks
|
|
118
|
+
try:
|
|
119
|
+
import langchain_core
|
|
120
|
+
|
|
121
|
+
if hasattr(langchain_core, "callbacks"):
|
|
122
|
+
from langchain_core.globals import set_llm_cache
|
|
123
|
+
pass
|
|
124
|
+
except Exception:
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
# Method 2: Monkey-patch the default callback manager constructor
|
|
128
|
+
# This is the most reliable approach (same pattern LangSmith uses)
|
|
129
|
+
try:
|
|
130
|
+
from langchain_core.callbacks.manager import CallbackManager
|
|
131
|
+
|
|
132
|
+
_original_configure = CallbackManager.configure
|
|
133
|
+
|
|
134
|
+
@classmethod # type: ignore
|
|
135
|
+
def _patched_configure(
|
|
136
|
+
cls,
|
|
137
|
+
inheritable_callbacks=None,
|
|
138
|
+
local_callbacks=None,
|
|
139
|
+
verbose=False,
|
|
140
|
+
inheritable_tags=None,
|
|
141
|
+
local_tags=None,
|
|
142
|
+
inheritable_metadata=None,
|
|
143
|
+
local_metadata=None,
|
|
144
|
+
):
|
|
145
|
+
# Create a fresh handler for each run to avoid state leaks
|
|
146
|
+
run_handler = AgentLensLangChainCallback(
|
|
147
|
+
client=client,
|
|
148
|
+
trace_name=cfg.project,
|
|
149
|
+
tags=cfg.tags,
|
|
150
|
+
metadata={"project": cfg.project, "auto_trace": True},
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
# Inject our handler into the inheritable callbacks
|
|
154
|
+
if inheritable_callbacks is None:
|
|
155
|
+
inheritable_callbacks = [run_handler]
|
|
156
|
+
elif isinstance(inheritable_callbacks, list):
|
|
157
|
+
# Don't add duplicates
|
|
158
|
+
has_agentlens = any(
|
|
159
|
+
isinstance(cb, AgentLensLangChainCallback)
|
|
160
|
+
for cb in inheritable_callbacks
|
|
161
|
+
)
|
|
162
|
+
if not has_agentlens:
|
|
163
|
+
inheritable_callbacks = [run_handler] + list(inheritable_callbacks)
|
|
164
|
+
elif isinstance(inheritable_callbacks, CallbackManager):
|
|
165
|
+
has_agentlens = any(
|
|
166
|
+
isinstance(cb, AgentLensLangChainCallback)
|
|
167
|
+
for cb in inheritable_callbacks.handlers
|
|
168
|
+
)
|
|
169
|
+
if not has_agentlens:
|
|
170
|
+
inheritable_callbacks.add_handler(run_handler)
|
|
171
|
+
|
|
172
|
+
return _original_configure.__func__(
|
|
173
|
+
cls,
|
|
174
|
+
inheritable_callbacks=inheritable_callbacks,
|
|
175
|
+
local_callbacks=local_callbacks,
|
|
176
|
+
verbose=verbose,
|
|
177
|
+
inheritable_tags=inheritable_tags,
|
|
178
|
+
local_tags=local_tags,
|
|
179
|
+
inheritable_metadata=inheritable_metadata,
|
|
180
|
+
local_metadata=local_metadata,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
CallbackManager.configure = _patched_configure
|
|
184
|
+
logger.debug("AgentLens auto-trace installed via CallbackManager patch")
|
|
185
|
+
|
|
186
|
+
except Exception as e:
|
|
187
|
+
logger.warning(f"Could not install auto-trace: {e}")
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def disable_auto_trace() -> None:
|
|
191
|
+
"""Remove AgentLens from global callbacks."""
|
|
192
|
+
global _auto_trace_installed
|
|
193
|
+
try:
|
|
194
|
+
from langchain_core.callbacks.manager import CallbackManager
|
|
195
|
+
|
|
196
|
+
if hasattr(CallbackManager.configure, "__wrapped__"):
|
|
197
|
+
CallbackManager.configure = CallbackManager.configure.__wrapped__
|
|
198
|
+
except Exception:
|
|
199
|
+
pass
|
|
200
|
+
_auto_trace_installed = False
|
|
201
|
+
logger.info("AgentLens auto-tracing disabled")
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Callbacks package — framework integrations."""
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""HTTP client for sending trace data to the AgentLens server."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import threading
|
|
8
|
+
import queue
|
|
9
|
+
from typing import Any
|
|
10
|
+
from urllib.request import Request, urlopen
|
|
11
|
+
from urllib.error import URLError
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger("agentlens")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class IngestClient:
|
|
17
|
+
"""
|
|
18
|
+
Lightweight HTTP client that sends trace/span data to the AgentLens server.
|
|
19
|
+
Uses a background thread so tracing never blocks your agent.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, base_url: str = "http://localhost:6832"):
|
|
23
|
+
self.base_url = base_url.rstrip("/")
|
|
24
|
+
self._queue: queue.Queue[tuple[str, str, dict]] = queue.Queue(maxsize=10_000)
|
|
25
|
+
self._thread = threading.Thread(target=self._worker, daemon=True)
|
|
26
|
+
self._thread.start()
|
|
27
|
+
|
|
28
|
+
def _worker(self) -> None:
|
|
29
|
+
"""Background worker that drains the queue and sends to server."""
|
|
30
|
+
while True:
|
|
31
|
+
try:
|
|
32
|
+
method, path, data = self._queue.get(timeout=1.0)
|
|
33
|
+
self._send(method, path, data)
|
|
34
|
+
except queue.Empty:
|
|
35
|
+
continue
|
|
36
|
+
except Exception as e:
|
|
37
|
+
logger.debug(f"AgentLens ingest error: {e}")
|
|
38
|
+
|
|
39
|
+
def _send(self, method: str, path: str, data: dict) -> None:
|
|
40
|
+
url = f"{self.base_url}/api{path}"
|
|
41
|
+
body = json.dumps(data, default=str).encode("utf-8")
|
|
42
|
+
req = Request(url, data=body, method=method)
|
|
43
|
+
req.add_header("Content-Type", "application/json")
|
|
44
|
+
try:
|
|
45
|
+
with urlopen(req, timeout=5) as resp:
|
|
46
|
+
resp.read()
|
|
47
|
+
except URLError:
|
|
48
|
+
# Server might not be running — that's fine, don't crash user's code
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
def send_trace(self, trace_data: dict) -> None:
|
|
52
|
+
self._queue.put(("POST", "/ingest/trace", {"trace": trace_data}))
|
|
53
|
+
|
|
54
|
+
def update_trace(self, trace_data: dict) -> None:
|
|
55
|
+
self._queue.put(("POST", "/ingest/trace/update", {"trace": trace_data}))
|
|
56
|
+
|
|
57
|
+
def send_span(self, span_data: dict) -> None:
|
|
58
|
+
self._queue.put(("POST", "/ingest/span", {"span": span_data}))
|
|
59
|
+
|
|
60
|
+
def update_span(self, span_data: dict) -> None:
|
|
61
|
+
self._queue.put(("POST", "/ingest/span/update", {"span": span_data}))
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""Decorator-based tracing for any Python function."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import functools
|
|
6
|
+
import inspect
|
|
7
|
+
import traceback
|
|
8
|
+
from typing import Any, Callable, TypeVar
|
|
9
|
+
|
|
10
|
+
from agentlens.callbacks.client import IngestClient
|
|
11
|
+
from agentlens.models import Span, SpanKind, Trace, _new_id
|
|
12
|
+
|
|
13
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TracerContext:
|
|
17
|
+
"""Manages trace context for decorator-based tracing."""
|
|
18
|
+
|
|
19
|
+
def __init__(self, client: IngestClient):
|
|
20
|
+
self._client = client
|
|
21
|
+
self._current_trace_id: str | None = None
|
|
22
|
+
self._span_stack: list[str] = [] # stack of span IDs
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def current_trace_id(self) -> str | None:
|
|
26
|
+
return self._current_trace_id
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
def current_span_id(self) -> str | None:
|
|
30
|
+
return self._span_stack[-1] if self._span_stack else None
|
|
31
|
+
|
|
32
|
+
def start_trace(self, name: str, inputs: dict | None = None, **kwargs: Any) -> Trace:
|
|
33
|
+
trace = Trace(name=name, inputs=inputs, **kwargs)
|
|
34
|
+
self._current_trace_id = trace.id
|
|
35
|
+
self._span_stack.clear()
|
|
36
|
+
self._client.send_trace(trace.model_dump())
|
|
37
|
+
return trace
|
|
38
|
+
|
|
39
|
+
def end_trace(self, trace: Trace, outputs: dict | None = None, error: str | None = None):
|
|
40
|
+
trace.finish(outputs=outputs, error=error)
|
|
41
|
+
self._client.update_trace(trace.model_dump())
|
|
42
|
+
self._current_trace_id = None
|
|
43
|
+
self._span_stack.clear()
|
|
44
|
+
|
|
45
|
+
def start_span(
|
|
46
|
+
self,
|
|
47
|
+
name: str,
|
|
48
|
+
kind: SpanKind = SpanKind.CUSTOM,
|
|
49
|
+
inputs: dict | None = None,
|
|
50
|
+
**kwargs: Any,
|
|
51
|
+
) -> Span:
|
|
52
|
+
if not self._current_trace_id:
|
|
53
|
+
# Auto-create a trace
|
|
54
|
+
trace = self.start_trace(name)
|
|
55
|
+
|
|
56
|
+
span = Span(
|
|
57
|
+
trace_id=self._current_trace_id,
|
|
58
|
+
parent_id=self.current_span_id,
|
|
59
|
+
name=name,
|
|
60
|
+
kind=kind,
|
|
61
|
+
inputs=inputs,
|
|
62
|
+
**kwargs,
|
|
63
|
+
)
|
|
64
|
+
self._span_stack.append(span.id)
|
|
65
|
+
self._client.send_span(span.model_dump())
|
|
66
|
+
return span
|
|
67
|
+
|
|
68
|
+
def end_span(self, span: Span, outputs: dict | None = None, error: str | None = None):
|
|
69
|
+
span.finish(outputs=outputs, error=error)
|
|
70
|
+
self._client.update_span(span.model_dump())
|
|
71
|
+
if self._span_stack and self._span_stack[-1] == span.id:
|
|
72
|
+
self._span_stack.pop()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def observe(
|
|
76
|
+
client: IngestClient,
|
|
77
|
+
tracer: TracerContext,
|
|
78
|
+
name: str | None = None,
|
|
79
|
+
kind: SpanKind = SpanKind.CUSTOM,
|
|
80
|
+
) -> Callable[[F], F]:
|
|
81
|
+
"""
|
|
82
|
+
Decorator that traces a function call as a span.
|
|
83
|
+
|
|
84
|
+
Usage:
|
|
85
|
+
lens = AgentLens()
|
|
86
|
+
|
|
87
|
+
@lens.observe(name="search", kind="tool")
|
|
88
|
+
def search(query: str):
|
|
89
|
+
return results
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
def decorator(fn: F) -> F:
|
|
93
|
+
span_name = name or fn.__qualname__
|
|
94
|
+
|
|
95
|
+
if inspect.iscoroutinefunction(fn):
|
|
96
|
+
|
|
97
|
+
@functools.wraps(fn)
|
|
98
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
99
|
+
# Capture inputs
|
|
100
|
+
sig = inspect.signature(fn)
|
|
101
|
+
try:
|
|
102
|
+
bound = sig.bind(*args, **kwargs)
|
|
103
|
+
bound.apply_defaults()
|
|
104
|
+
inputs = {k: repr(v)[:500] for k, v in bound.arguments.items()}
|
|
105
|
+
except Exception:
|
|
106
|
+
inputs = {"args": repr(args)[:500], "kwargs": repr(kwargs)[:500]}
|
|
107
|
+
|
|
108
|
+
span = tracer.start_span(span_name, kind=kind, inputs=inputs)
|
|
109
|
+
try:
|
|
110
|
+
result = await fn(*args, **kwargs)
|
|
111
|
+
tracer.end_span(span, outputs={"result": repr(result)[:1000]})
|
|
112
|
+
return result
|
|
113
|
+
except Exception as e:
|
|
114
|
+
tracer.end_span(span, error=f"{type(e).__name__}: {e}")
|
|
115
|
+
raise
|
|
116
|
+
|
|
117
|
+
return async_wrapper # type: ignore
|
|
118
|
+
|
|
119
|
+
else:
|
|
120
|
+
|
|
121
|
+
@functools.wraps(fn)
|
|
122
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
123
|
+
sig = inspect.signature(fn)
|
|
124
|
+
try:
|
|
125
|
+
bound = sig.bind(*args, **kwargs)
|
|
126
|
+
bound.apply_defaults()
|
|
127
|
+
inputs = {k: repr(v)[:500] for k, v in bound.arguments.items()}
|
|
128
|
+
except Exception:
|
|
129
|
+
inputs = {"args": repr(args)[:500], "kwargs": repr(kwargs)[:500]}
|
|
130
|
+
|
|
131
|
+
span = tracer.start_span(span_name, kind=kind, inputs=inputs)
|
|
132
|
+
try:
|
|
133
|
+
result = fn(*args, **kwargs)
|
|
134
|
+
tracer.end_span(span, outputs={"result": repr(result)[:1000]})
|
|
135
|
+
return result
|
|
136
|
+
except Exception as e:
|
|
137
|
+
tracer.end_span(span, error=f"{type(e).__name__}: {e}")
|
|
138
|
+
raise
|
|
139
|
+
|
|
140
|
+
return sync_wrapper # type: ignore
|
|
141
|
+
|
|
142
|
+
return decorator
|