fiddler-langgraph 0.1.1__tar.gz → 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fiddler_langgraph-0.1.1/fiddler_langgraph.egg-info → fiddler_langgraph-1.0.0}/PKG-INFO +9 -4
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/PUBLIC.md +5 -5
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/README.md +8 -3
- fiddler_langgraph-1.0.0/fiddler_langgraph/VERSION +1 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/core/attributes.py +0 -21
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/core/client.py +59 -27
- fiddler_langgraph-1.0.0/fiddler_langgraph/tracing/instrumentation.py +536 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0/fiddler_langgraph.egg-info}/PKG-INFO +9 -4
- fiddler_langgraph-0.1.1/fiddler_langgraph/VERSION +0 -1
- fiddler_langgraph-0.1.1/fiddler_langgraph/tracing/instrumentation.py +0 -264
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/MANIFEST.in +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/__init__.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/core/__init__.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/core/span_processor.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/tracing/__init__.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/tracing/callback.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/tracing/jsonl_capture.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/tracing/util.py +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph.egg-info/SOURCES.txt +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph.egg-info/dependency_links.txt +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph.egg-info/requires.txt +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph.egg-info/top_level.txt +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/pyproject.toml +0 -0
- {fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fiddler-langgraph
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.0
|
|
4
4
|
Summary: Python SDK for instrumenting GenAI Applications with Fiddler
|
|
5
5
|
Home-page: https://fiddler.ai
|
|
6
6
|
Author: Fiddler AI
|
|
@@ -87,7 +87,7 @@ from fiddler_langgraph import FiddlerClient
|
|
|
87
87
|
|
|
88
88
|
# Initialize the FiddlerClient with basic configuration
|
|
89
89
|
client = FiddlerClient(
|
|
90
|
-
url="https://
|
|
90
|
+
url="https://your-instance.fiddler.ai",
|
|
91
91
|
api_key="fdl_api_key",
|
|
92
92
|
application_id="fdl_application_id" # Must be a valid UUID4
|
|
93
93
|
)
|
|
@@ -152,7 +152,7 @@ The Fiddler SDK provides flexible configuration options for OpenTelemetry integr
|
|
|
152
152
|
client = FiddlerClient(
|
|
153
153
|
api_key="your-api-key",
|
|
154
154
|
application_id="your-app-id", # Must be a valid UUID4
|
|
155
|
-
url="https://
|
|
155
|
+
url="https://your-instance.fiddler.ai"
|
|
156
156
|
)
|
|
157
157
|
```
|
|
158
158
|
|
|
@@ -178,6 +178,7 @@ sampler = sampling.TraceIdRatioBased(0.1) # Sample 10% of traces
|
|
|
178
178
|
client = FiddlerClient(
|
|
179
179
|
api_key="your-api-key",
|
|
180
180
|
application_id="your-app-id",
|
|
181
|
+
url="https://your-instance.fiddler.ai",
|
|
181
182
|
span_limits=custom_limits,
|
|
182
183
|
sampler=sampler,
|
|
183
184
|
console_tracer=False, # Set to True for debugging
|
|
@@ -196,6 +197,7 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import Compression
|
|
|
196
197
|
client = FiddlerClient(
|
|
197
198
|
api_key="your-api-key",
|
|
198
199
|
application_id="your-app-id",
|
|
200
|
+
url="https://your-instance.fiddler.ai",
|
|
199
201
|
compression=Compression.Gzip,
|
|
200
202
|
)
|
|
201
203
|
|
|
@@ -203,6 +205,7 @@ client = FiddlerClient(
|
|
|
203
205
|
client = FiddlerClient(
|
|
204
206
|
api_key="your-api-key",
|
|
205
207
|
application_id="your-app-id",
|
|
208
|
+
url="https://your-instance.fiddler.ai",
|
|
206
209
|
compression=Compression.NoCompression,
|
|
207
210
|
)
|
|
208
211
|
|
|
@@ -210,6 +213,7 @@ client = FiddlerClient(
|
|
|
210
213
|
client = FiddlerClient(
|
|
211
214
|
api_key="your-api-key",
|
|
212
215
|
application_id="your-app-id",
|
|
216
|
+
url="https://your-instance.fiddler.ai",
|
|
213
217
|
compression=Compression.Deflate,
|
|
214
218
|
)
|
|
215
219
|
```
|
|
@@ -229,7 +233,8 @@ os.environ['OTEL_BSP_EXPORT_TIMEOUT'] = '10000'
|
|
|
229
233
|
|
|
230
234
|
client = FiddlerClient(
|
|
231
235
|
api_key="your-api-key",
|
|
232
|
-
application_id="your-app-id"
|
|
236
|
+
application_id="your-app-id",
|
|
237
|
+
url="https://your-instance.fiddler.ai"
|
|
233
238
|
)
|
|
234
239
|
```
|
|
235
240
|
|
|
@@ -30,7 +30,7 @@ pip install fiddler-langgraph
|
|
|
30
30
|
|
|
31
31
|
## Requirements
|
|
32
32
|
|
|
33
|
-
- Python 3.10, 3.11, 3.12,
|
|
33
|
+
- Python 3.10, 3.11, 3.12, or 3.13
|
|
34
34
|
- LangGraph >= 0.3.28 and <= 1.0.2 or Langchain >= 0.3.28 and <= 1.0.2
|
|
35
35
|
|
|
36
36
|
### With Example Dependencies
|
|
@@ -49,7 +49,7 @@ from fiddler_langgraph.tracing.instrumentation import LangGraphInstrumentor, set
|
|
|
49
49
|
|
|
50
50
|
# Initialize the FiddlerClient
|
|
51
51
|
client = FiddlerClient(
|
|
52
|
-
url="https://
|
|
52
|
+
url="https://your-instance.fiddler.ai",
|
|
53
53
|
api_key="your-api-key",
|
|
54
54
|
application_id="your-application-id" # Must be a valid UUID4
|
|
55
55
|
)
|
|
@@ -73,9 +73,8 @@ app.invoke({"messages": [{"role": "user", "content": "Write a novel"}]})
|
|
|
73
73
|
## Documentation
|
|
74
74
|
|
|
75
75
|
* 📚 [Complete Documentation](https://docs.fiddler.ai/)
|
|
76
|
-
* 🚀 [
|
|
77
|
-
* 📖 [API Reference](https://docs.fiddler.ai/)
|
|
78
|
-
* 💡 [Example Notebooks](https://github.com/fiddler-labs/fiddler-examples)
|
|
76
|
+
* 🚀 [LangGraph Quick Start Guide](https://docs.fiddler.ai/developers/tutorials/llm-monitoring/langgraph-sdk-quick-start)
|
|
77
|
+
* 📖 [API Reference](https://docs.fiddler.ai/api/fiddler-langgraph-sdk/langgraph)
|
|
79
78
|
|
|
80
79
|
## Example Usage
|
|
81
80
|
|
|
@@ -132,6 +131,7 @@ sampler = sampling.TraceIdRatioBased(0.1) # Sample 10% of traces
|
|
|
132
131
|
client = FiddlerClient(
|
|
133
132
|
api_key="your-api-key",
|
|
134
133
|
application_id="your-app-id",
|
|
134
|
+
url="https://your-instance.fiddler.ai",
|
|
135
135
|
span_limits=custom_limits,
|
|
136
136
|
sampler=sampler,
|
|
137
137
|
console_tracer=False, # Set to True for debugging
|
|
@@ -33,7 +33,7 @@ from fiddler_langgraph import FiddlerClient
|
|
|
33
33
|
|
|
34
34
|
# Initialize the FiddlerClient with basic configuration
|
|
35
35
|
client = FiddlerClient(
|
|
36
|
-
url="https://
|
|
36
|
+
url="https://your-instance.fiddler.ai",
|
|
37
37
|
api_key="fdl_api_key",
|
|
38
38
|
application_id="fdl_application_id" # Must be a valid UUID4
|
|
39
39
|
)
|
|
@@ -98,7 +98,7 @@ The Fiddler SDK provides flexible configuration options for OpenTelemetry integr
|
|
|
98
98
|
client = FiddlerClient(
|
|
99
99
|
api_key="your-api-key",
|
|
100
100
|
application_id="your-app-id", # Must be a valid UUID4
|
|
101
|
-
url="https://
|
|
101
|
+
url="https://your-instance.fiddler.ai"
|
|
102
102
|
)
|
|
103
103
|
```
|
|
104
104
|
|
|
@@ -124,6 +124,7 @@ sampler = sampling.TraceIdRatioBased(0.1) # Sample 10% of traces
|
|
|
124
124
|
client = FiddlerClient(
|
|
125
125
|
api_key="your-api-key",
|
|
126
126
|
application_id="your-app-id",
|
|
127
|
+
url="https://your-instance.fiddler.ai",
|
|
127
128
|
span_limits=custom_limits,
|
|
128
129
|
sampler=sampler,
|
|
129
130
|
console_tracer=False, # Set to True for debugging
|
|
@@ -142,6 +143,7 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import Compression
|
|
|
142
143
|
client = FiddlerClient(
|
|
143
144
|
api_key="your-api-key",
|
|
144
145
|
application_id="your-app-id",
|
|
146
|
+
url="https://your-instance.fiddler.ai",
|
|
145
147
|
compression=Compression.Gzip,
|
|
146
148
|
)
|
|
147
149
|
|
|
@@ -149,6 +151,7 @@ client = FiddlerClient(
|
|
|
149
151
|
client = FiddlerClient(
|
|
150
152
|
api_key="your-api-key",
|
|
151
153
|
application_id="your-app-id",
|
|
154
|
+
url="https://your-instance.fiddler.ai",
|
|
152
155
|
compression=Compression.NoCompression,
|
|
153
156
|
)
|
|
154
157
|
|
|
@@ -156,6 +159,7 @@ client = FiddlerClient(
|
|
|
156
159
|
client = FiddlerClient(
|
|
157
160
|
api_key="your-api-key",
|
|
158
161
|
application_id="your-app-id",
|
|
162
|
+
url="https://your-instance.fiddler.ai",
|
|
159
163
|
compression=Compression.Deflate,
|
|
160
164
|
)
|
|
161
165
|
```
|
|
@@ -175,7 +179,8 @@ os.environ['OTEL_BSP_EXPORT_TIMEOUT'] = '10000'
|
|
|
175
179
|
|
|
176
180
|
client = FiddlerClient(
|
|
177
181
|
api_key="your-api-key",
|
|
178
|
-
application_id="your-app-id"
|
|
182
|
+
application_id="your-app-id",
|
|
183
|
+
url="https://your-instance.fiddler.ai"
|
|
179
184
|
)
|
|
180
185
|
```
|
|
181
186
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
1.0.0
|
|
@@ -3,8 +3,6 @@
|
|
|
3
3
|
import contextvars
|
|
4
4
|
from typing import Any
|
|
5
5
|
|
|
6
|
-
from pydantic import ConfigDict, validate_call
|
|
7
|
-
|
|
8
6
|
# Key used for storing Fiddler-specific attributes in metadata dictionary
|
|
9
7
|
FIDDLER_METADATA_KEY = '_fiddler_attributes'
|
|
10
8
|
|
|
@@ -66,22 +64,3 @@ _CONVERSATION_ID: contextvars.ContextVar[str] = contextvars.ContextVar(
|
|
|
66
64
|
_CUSTOM_ATTRIBUTES: contextvars.ContextVar[dict[str, Any]] = contextvars.ContextVar(
|
|
67
65
|
'_CUSTOM_ATTRIBUTES'
|
|
68
66
|
)
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
@validate_call(config=ConfigDict(strict=True, arbitrary_types_allowed=True))
|
|
72
|
-
def add_session_attributes(key: str, value: str) -> None:
|
|
73
|
-
"""Adds Fiddler-specific attributes to a runnable's metadata.
|
|
74
|
-
|
|
75
|
-
This is used for various runnable types like Pregel nodes, LLM calls, tool
|
|
76
|
-
calls, and retriever calls.
|
|
77
|
-
|
|
78
|
-
Args:
|
|
79
|
-
key (str): The attribute key to add or update.
|
|
80
|
-
value (str): The attribute value to set.
|
|
81
|
-
"""
|
|
82
|
-
try:
|
|
83
|
-
current_attributes = _CUSTOM_ATTRIBUTES.get().copy()
|
|
84
|
-
except LookupError:
|
|
85
|
-
current_attributes = {}
|
|
86
|
-
current_attributes[key] = value
|
|
87
|
-
_CUSTOM_ATTRIBUTES.set(current_attributes)
|
|
@@ -60,7 +60,7 @@ class FiddlerClient:
|
|
|
60
60
|
self,
|
|
61
61
|
api_key: str,
|
|
62
62
|
application_id: str,
|
|
63
|
-
url: str
|
|
63
|
+
url: str,
|
|
64
64
|
console_tracer: bool = False,
|
|
65
65
|
span_limits: SpanLimits | None = _default_span_limits,
|
|
66
66
|
sampler: sampling.Sampler | None = None,
|
|
@@ -76,41 +76,69 @@ class FiddlerClient:
|
|
|
76
76
|
Args:
|
|
77
77
|
api_key (str): The API key for authenticating with the Fiddler backend. **Required**.
|
|
78
78
|
application_id (str): The unique identifier (UUID4) for the application. **Required**.
|
|
79
|
-
url (str): The base URL for
|
|
80
|
-
|
|
81
|
-
|
|
79
|
+
url (str): The base URL for your Fiddler instance. This is specific to your
|
|
80
|
+
deployment, whether hosted, VPC-deployed, on-premise, or local development
|
|
81
|
+
(e.g., `https://your-instance.fiddler.ai`, `http://localhost:4318`). **Required**.
|
|
82
82
|
console_tracer (bool): If True, traces will be printed to the console
|
|
83
83
|
instead of being sent to the Fiddler backend. Useful for debugging.
|
|
84
84
|
Defaults to `False`.
|
|
85
85
|
span_limits (SpanLimits | None): Configuration for span limits, such as the
|
|
86
86
|
maximum number of attributes or events. Defaults to a restrictive
|
|
87
|
-
set of internal limits.
|
|
87
|
+
set of internal limits (32 events/links/attributes, 2048 char limit).
|
|
88
88
|
sampler (sampling.Sampler | None): The sampler for deciding which spans to record.
|
|
89
|
-
Defaults to `None`, which uses the parent-based OpenTelemetry sampler
|
|
89
|
+
Defaults to `None`, which uses the parent-based always-on OpenTelemetry sampler
|
|
90
|
+
(100% sampling).
|
|
90
91
|
compression (Compression): The compression for exporting traces.
|
|
91
|
-
Can be `Compression.Gzip` or `Compression.NoCompression`.
|
|
92
|
-
Defaults to `Compression.Gzip
|
|
92
|
+
Can be `Compression.Gzip`, `Compression.Deflate`, or `Compression.NoCompression`.
|
|
93
|
+
Defaults to `Compression.Gzip` (recommended for production).
|
|
93
94
|
jsonl_capture_enabled (bool): Whether to enable JSONL capture of trace data.
|
|
94
95
|
When enabled, all span data will be captured and saved to a JSONL file
|
|
95
|
-
in OpenTelemetry format for analysis. Defaults to `False`.
|
|
96
|
+
in OpenTelemetry format for offline analysis. Defaults to `False`.
|
|
96
97
|
jsonl_file_path (str): Path to the JSONL file where trace data will be saved.
|
|
97
98
|
Only used when `jsonl_capture_enabled` is `True`. Defaults to
|
|
98
99
|
"fiddler_trace_data.jsonl".
|
|
99
100
|
|
|
100
101
|
Raises:
|
|
101
102
|
ValueError: If `application_id` is not a valid UUID4 or if the
|
|
102
|
-
`url` is not a valid HTTPS URL.
|
|
103
|
+
`url` is not a valid HTTP/HTTPS URL.
|
|
103
104
|
|
|
104
105
|
Examples:
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
106
|
+
Basic connection to your Fiddler instance:
|
|
107
|
+
|
|
108
|
+
.. code-block:: python
|
|
109
|
+
|
|
110
|
+
client = FiddlerClient(
|
|
111
|
+
api_key='YOUR_API_KEY',
|
|
112
|
+
application_id='YOUR_APPLICATION_ID',
|
|
113
|
+
url='https://your-instance.fiddler.ai',
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
High-volume applications with custom configuration:
|
|
117
|
+
|
|
118
|
+
.. code-block:: python
|
|
119
|
+
|
|
120
|
+
from opentelemetry.sdk.trace import SpanLimits, sampling
|
|
121
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import Compression
|
|
122
|
+
|
|
123
|
+
client = FiddlerClient(
|
|
124
|
+
api_key='YOUR_API_KEY',
|
|
125
|
+
application_id='YOUR_APPLICATION_ID',
|
|
126
|
+
url='https://your-instance.fiddler.ai',
|
|
127
|
+
span_limits=SpanLimits(max_span_attributes=64),
|
|
128
|
+
sampler=sampling.TraceIdRatioBased(0.1), # Sample 10% of traces
|
|
129
|
+
compression=Compression.Gzip,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
Local development with console output:
|
|
133
|
+
|
|
134
|
+
.. code-block:: python
|
|
135
|
+
|
|
136
|
+
client = FiddlerClient(
|
|
137
|
+
api_key='dev-key',
|
|
138
|
+
application_id='00000000-0000-0000-0000-000000000000',
|
|
139
|
+
url='http://localhost:4318',
|
|
140
|
+
console_tracer=True, # Print traces to console for debugging
|
|
141
|
+
)
|
|
114
142
|
"""
|
|
115
143
|
# Validate application_id is a valid UUID4
|
|
116
144
|
|
|
@@ -216,9 +244,11 @@ class FiddlerClient:
|
|
|
216
244
|
ValueError: If the tracer has already been initialized.
|
|
217
245
|
|
|
218
246
|
Examples:
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
247
|
+
.. code-block:: python
|
|
248
|
+
|
|
249
|
+
from fiddler_langgraph import FiddlerClient
|
|
250
|
+
client = FiddlerClient(api_key='...', application_id='...', url='https://your-instance.fiddler.ai')
|
|
251
|
+
client.update_resource({'service.version': '1.2.3'})
|
|
222
252
|
"""
|
|
223
253
|
if self._tracer is not None:
|
|
224
254
|
raise ValueError('Cannot update resource after tracer is initialized')
|
|
@@ -305,11 +335,13 @@ class FiddlerClient:
|
|
|
305
335
|
RuntimeError: If tracer initialization fails.
|
|
306
336
|
|
|
307
337
|
Examples:
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
338
|
+
.. code-block:: python
|
|
339
|
+
|
|
340
|
+
from fiddler_langgraph import FiddlerClient
|
|
341
|
+
client = FiddlerClient(api_key='...', application_id='...', url='https://your-instance.fiddler.ai')
|
|
342
|
+
tracer = client.get_tracer()
|
|
343
|
+
with tracer.start_as_current_span('my-operation'):
|
|
344
|
+
print('Doing some work...')
|
|
313
345
|
"""
|
|
314
346
|
if self._tracer is None:
|
|
315
347
|
self._initialize_tracer()
|
|
@@ -0,0 +1,536 @@
|
|
|
1
|
+
"""LangGraph instrumentation module for Fiddler."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable, Collection
|
|
4
|
+
from typing import Any, cast
|
|
5
|
+
|
|
6
|
+
from langchain_core.callbacks import BaseCallbackManager
|
|
7
|
+
from langchain_core.language_models import BaseLanguageModel
|
|
8
|
+
from langchain_core.retrievers import BaseRetriever
|
|
9
|
+
from langchain_core.runnables import RunnableBinding
|
|
10
|
+
from langchain_core.tools import BaseTool
|
|
11
|
+
from opentelemetry.instrumentation.instrumentor import ( # type: ignore[attr-defined]
|
|
12
|
+
BaseInstrumentor,
|
|
13
|
+
)
|
|
14
|
+
from pydantic import ConfigDict, validate_call
|
|
15
|
+
from wrapt import wrap_function_wrapper
|
|
16
|
+
|
|
17
|
+
from fiddler_langgraph.core.attributes import (
|
|
18
|
+
_CONVERSATION_ID,
|
|
19
|
+
_CUSTOM_ATTRIBUTES,
|
|
20
|
+
FIDDLER_METADATA_KEY,
|
|
21
|
+
FiddlerSpanAttributes,
|
|
22
|
+
)
|
|
23
|
+
from fiddler_langgraph.core.client import FiddlerClient
|
|
24
|
+
from fiddler_langgraph.tracing.callback import _CallbackHandler
|
|
25
|
+
from fiddler_langgraph.tracing.util import _check_langgraph_version, _get_package_version
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@validate_call(config=ConfigDict(strict=True))
|
|
29
|
+
def set_conversation_id(conversation_id: str) -> None:
|
|
30
|
+
"""Enables end-to-end tracing of multi-step workflows and conversations.
|
|
31
|
+
|
|
32
|
+
The primary purpose of set_conversation_id is to enable end-to-end tracing
|
|
33
|
+
of a multi-step workflow. Modern agentic applications often involve a complex
|
|
34
|
+
sequence of events to fulfill a single user request. The result in your Fiddler
|
|
35
|
+
dashboard is that you can instantly filter for and view the entire, ordered
|
|
36
|
+
sequence of operations that constituted a single conversation or task. This is
|
|
37
|
+
crucial for debugging complex failures, analyzing latency across an entire
|
|
38
|
+
workflow, and understanding the agent's behavior from start to finish.
|
|
39
|
+
|
|
40
|
+
This will remain in use until it is called again with a new conversation ID.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
conversation_id (str): Unique identifier for the conversation session. **Required**.
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
None
|
|
47
|
+
|
|
48
|
+
Examples:
|
|
49
|
+
.. code-block:: python
|
|
50
|
+
|
|
51
|
+
from langgraph.prebuilt import create_react_agent
|
|
52
|
+
from fiddler_langgraph.tracing.instrumentation import set_conversation_id
|
|
53
|
+
import uuid
|
|
54
|
+
|
|
55
|
+
# Basic usage
|
|
56
|
+
agent = create_react_agent(model, tools=[])
|
|
57
|
+
conversation_id = str(uuid.uuid4())
|
|
58
|
+
set_conversation_id(conversation_id)
|
|
59
|
+
agent.invoke({"messages": [{"role": "user", "content": "Write me a novel"}]})
|
|
60
|
+
|
|
61
|
+
# Multi-turn conversation tracking
|
|
62
|
+
def handle_conversation(user_id, session_id):
|
|
63
|
+
# Create a unique conversation ID combining user and session
|
|
64
|
+
conversation_id = f"{user_id}_{session_id}_{uuid.uuid4()}"
|
|
65
|
+
set_conversation_id(conversation_id)
|
|
66
|
+
return conversation_id
|
|
67
|
+
|
|
68
|
+
# Different conversation types
|
|
69
|
+
business_conversation_id = f"business_{uuid.uuid4()}"
|
|
70
|
+
support_conversation_id = f"support_{uuid.uuid4()}"
|
|
71
|
+
"""
|
|
72
|
+
_CONVERSATION_ID.set(conversation_id)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@validate_call(config=ConfigDict(strict=True, arbitrary_types_allowed=True))
|
|
76
|
+
def add_session_attributes(key: str, value: str) -> None:
|
|
77
|
+
"""Adds custom session-level attributes that persist across all spans in the current context.
|
|
78
|
+
|
|
79
|
+
Session attributes are key-value pairs that apply to all operations within the current
|
|
80
|
+
execution context (thread or async coroutine). Use this to add metadata that describes
|
|
81
|
+
the session environment, such as user information, deployment environment, or feature flags.
|
|
82
|
+
|
|
83
|
+
These attributes are stored in context variables and automatically included in all spans
|
|
84
|
+
created during the session. They persist until the context ends or the attribute is updated
|
|
85
|
+
with a new value.
|
|
86
|
+
|
|
87
|
+
Note: Context variables are shallow copied - modifications to mutable values (lists, dicts)
|
|
88
|
+
are shared between contexts.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
key (str): The attribute key to add or update. Will be formatted as
|
|
92
|
+
'fiddler.session.user.{key}' in the OpenTelemetry span. **Required**.
|
|
93
|
+
value (str): The attribute value to set. **Required**.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
None
|
|
97
|
+
|
|
98
|
+
Examples:
|
|
99
|
+
.. code-block:: python
|
|
100
|
+
|
|
101
|
+
from fiddler_langgraph.tracing.instrumentation import add_session_attributes
|
|
102
|
+
|
|
103
|
+
# Add user information to all spans in this session
|
|
104
|
+
add_session_attributes("user_id", "user_12345")
|
|
105
|
+
add_session_attributes("tier", "premium")
|
|
106
|
+
|
|
107
|
+
# Add deployment environment context
|
|
108
|
+
add_session_attributes("environment", "production")
|
|
109
|
+
add_session_attributes("region", "us-west-2")
|
|
110
|
+
|
|
111
|
+
# Update an existing attribute
|
|
112
|
+
add_session_attributes("user_id", "user_67890") # Overwrites previous value
|
|
113
|
+
"""
|
|
114
|
+
try:
|
|
115
|
+
current_attributes = _CUSTOM_ATTRIBUTES.get().copy()
|
|
116
|
+
except LookupError:
|
|
117
|
+
current_attributes = {}
|
|
118
|
+
current_attributes[key] = value
|
|
119
|
+
_CUSTOM_ATTRIBUTES.set(current_attributes)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@validate_call(config=ConfigDict(strict=True, arbitrary_types_allowed=True))
|
|
123
|
+
def _set_default_metadata(
|
|
124
|
+
node: BaseLanguageModel | BaseRetriever | BaseTool,
|
|
125
|
+
) -> None:
|
|
126
|
+
"""Ensures a node has the default Fiddler metadata dictionary.
|
|
127
|
+
|
|
128
|
+
If `node.metadata` does not exist or is not a dictionary, it will be
|
|
129
|
+
initialized. This function modifies the node in place.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
node (BaseLanguageModel | BaseRetriever | BaseTool): The node to modify.
|
|
133
|
+
"""
|
|
134
|
+
if not hasattr(node, 'metadata'):
|
|
135
|
+
node.metadata = {}
|
|
136
|
+
if not isinstance(node.metadata, dict):
|
|
137
|
+
node.metadata = {}
|
|
138
|
+
metadata = node.metadata
|
|
139
|
+
if FIDDLER_METADATA_KEY not in metadata:
|
|
140
|
+
metadata[FIDDLER_METADATA_KEY] = {}
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
@validate_call(config=ConfigDict(strict=True, arbitrary_types_allowed=True))
|
|
144
|
+
def add_span_attributes(
|
|
145
|
+
node: BaseLanguageModel | BaseRetriever | BaseTool,
|
|
146
|
+
**kwargs: Any,
|
|
147
|
+
) -> None:
|
|
148
|
+
"""Adds custom span-level attributes to a specific runnable component's metadata.
|
|
149
|
+
|
|
150
|
+
Span attributes are key-value pairs that apply to a specific component (LLM, tool, or retriever)
|
|
151
|
+
and are included in the OpenTelemetry spans created when that component executes. Use this to
|
|
152
|
+
add metadata that describes the component's configuration, purpose, or operational context.
|
|
153
|
+
|
|
154
|
+
Unlike session attributes (which apply to all spans in a context), span attributes are scoped
|
|
155
|
+
to individual components. This is useful for:
|
|
156
|
+
- Identifying which model or tool is being used
|
|
157
|
+
- Tagging components by purpose or category
|
|
158
|
+
- Adding version information or deployment metadata
|
|
159
|
+
- Tracking A/B test variants or experimental configurations
|
|
160
|
+
|
|
161
|
+
The attributes are stored in the component's metadata dictionary under the key
|
|
162
|
+
'_fiddler_attributes' and will be automatically included in spans when the component executes.
|
|
163
|
+
Attributes persist for the lifetime of the component instance.
|
|
164
|
+
|
|
165
|
+
Supported component types:
|
|
166
|
+
- **BaseLanguageModel**: LLM calls (ChatOpenAI, ChatAnthropic, etc.)
|
|
167
|
+
- **BaseRetriever**: Document retrieval operations
|
|
168
|
+
- **BaseTool**: Tool/function calls in agent workflows
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
node (BaseLanguageModel | BaseRetriever | BaseTool): The LangChain component to annotate
|
|
172
|
+
with custom attributes. The component's metadata will be modified in place. **Required**.
|
|
173
|
+
**kwargs (Any): Arbitrary keyword arguments representing the attributes to add. Each
|
|
174
|
+
key-value pair will be stored as a span attribute. Keys should be strings, and values
|
|
175
|
+
can be any type (though simple types like str, int, bool are recommended for
|
|
176
|
+
observability). **Required** (at least one attribute).
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
None
|
|
180
|
+
|
|
181
|
+
Examples:
|
|
182
|
+
Tagging an LLM with model information:
|
|
183
|
+
|
|
184
|
+
.. code-block:: python
|
|
185
|
+
|
|
186
|
+
from langchain_openai import ChatOpenAI
|
|
187
|
+
from fiddler_langgraph.tracing.instrumentation import add_span_attributes
|
|
188
|
+
|
|
189
|
+
llm = ChatOpenAI(model="gpt-4")
|
|
190
|
+
add_span_attributes(
|
|
191
|
+
llm,
|
|
192
|
+
model_name="gpt-4",
|
|
193
|
+
provider="openai",
|
|
194
|
+
purpose="summarization"
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
Adding version and environment metadata:
|
|
198
|
+
|
|
199
|
+
.. code-block:: python
|
|
200
|
+
|
|
201
|
+
add_span_attributes(
|
|
202
|
+
llm,
|
|
203
|
+
version="v2.1.0",
|
|
204
|
+
environment="production",
|
|
205
|
+
region="us-west-2"
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
Tagging tools in a multi-tool agent:
|
|
209
|
+
|
|
210
|
+
.. code-block:: python
|
|
211
|
+
|
|
212
|
+
from langchain.tools import Tool
|
|
213
|
+
|
|
214
|
+
search_tool = Tool(
|
|
215
|
+
name="search",
|
|
216
|
+
func=search_function,
|
|
217
|
+
description="Search the web"
|
|
218
|
+
)
|
|
219
|
+
add_span_attributes(
|
|
220
|
+
search_tool,
|
|
221
|
+
tool_category="external_api",
|
|
222
|
+
rate_limit="100/min",
|
|
223
|
+
cost_per_call=0.001
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
A/B testing different retrievers:
|
|
227
|
+
|
|
228
|
+
.. code-block:: python
|
|
229
|
+
|
|
230
|
+
from langchain_community.vectorstores import FAISS
|
|
231
|
+
|
|
232
|
+
retriever_a = FAISS.from_documents(docs, embeddings).as_retriever()
|
|
233
|
+
add_span_attributes(
|
|
234
|
+
retriever_a,
|
|
235
|
+
variant="semantic_search",
|
|
236
|
+
experiment_id="exp_2024_q1",
|
|
237
|
+
retrieval_strategy="similarity"
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
retriever_b = FAISS.from_documents(docs, embeddings).as_retriever(
|
|
241
|
+
search_type="mmr"
|
|
242
|
+
)
|
|
243
|
+
add_span_attributes(
|
|
244
|
+
retriever_b,
|
|
245
|
+
variant="mmr_search",
|
|
246
|
+
experiment_id="exp_2024_q1",
|
|
247
|
+
retrieval_strategy="maximum_marginal_relevance"
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
Combining with session attributes:
|
|
251
|
+
|
|
252
|
+
.. code-block:: python
|
|
253
|
+
|
|
254
|
+
from fiddler_langgraph.tracing.instrumentation import (
|
|
255
|
+
add_session_attributes,
|
|
256
|
+
add_span_attributes,
|
|
257
|
+
set_conversation_id
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
# Session-level: applies to all spans
|
|
261
|
+
set_conversation_id("conv_12345")
|
|
262
|
+
add_session_attributes("user_id", "user_789")
|
|
263
|
+
|
|
264
|
+
# Span-level: applies only to this LLM's spans
|
|
265
|
+
llm = ChatOpenAI(model="gpt-4-turbo")
|
|
266
|
+
add_span_attributes(
|
|
267
|
+
llm,
|
|
268
|
+
model_tier="premium",
|
|
269
|
+
use_case="customer_support"
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
Note:
|
|
273
|
+
- Attributes are stored in the component's `metadata` dictionary, which persists
|
|
274
|
+
for the lifetime of the component instance
|
|
275
|
+
- If the component doesn't have a `metadata` attribute, one will be created
|
|
276
|
+
- Multiple calls to `add_span_attributes` on the same component will merge attributes
|
|
277
|
+
- Later calls with the same key will overwrite previous values
|
|
278
|
+
- This modifies the component in place - no need to reassign the return value
|
|
279
|
+
"""
|
|
280
|
+
_set_default_metadata(node)
|
|
281
|
+
metadata = cast(dict[str, Any], node.metadata)
|
|
282
|
+
fiddler_attrs = cast(dict[str, Any], metadata.get(FIDDLER_METADATA_KEY, {}))
|
|
283
|
+
for key, value in kwargs.items():
|
|
284
|
+
fiddler_attrs[key] = value
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
@validate_call(config=ConfigDict(strict=True))
|
|
288
|
+
def set_llm_context(llm: BaseLanguageModel | RunnableBinding, context: str) -> None:
|
|
289
|
+
"""Sets additional context information on a language model instance.
|
|
290
|
+
|
|
291
|
+
This context provides environmental or operational information that will be
|
|
292
|
+
attached to all spans created for this model. Use this to add relevant metadata
|
|
293
|
+
such as user preferences, session state, or runtime conditions that influenced
|
|
294
|
+
the LLM's behavior. This is valuable for debugging and understanding why the
|
|
295
|
+
model produced specific outputs.
|
|
296
|
+
|
|
297
|
+
Supports both `BaseLanguageModel` instances and `RunnableBinding` objects. When a
|
|
298
|
+
`RunnableBinding` is provided, the context is automatically set on the underlying
|
|
299
|
+
bound object (which must be a `BaseLanguageModel`).
|
|
300
|
+
|
|
301
|
+
For more information on RunnableBinding, see:
|
|
302
|
+
https://python.langchain.com/api_reference/core/runnables/langchain_core.runnables.base.RunnableBinding.html
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
llm (BaseLanguageModel | RunnableBinding): The language model instance or binding. **Required**.
|
|
306
|
+
context (str): The context string to add. This will be included in span attributes
|
|
307
|
+
as 'gen_ai.llm.context'. **Required**.
|
|
308
|
+
|
|
309
|
+
Raises:
|
|
310
|
+
TypeError: If a RunnableBinding is provided but its bound object is not a BaseLanguageModel.
|
|
311
|
+
|
|
312
|
+
Examples:
|
|
313
|
+
Basic usage with ChatOpenAI:
|
|
314
|
+
|
|
315
|
+
.. code-block:: python
|
|
316
|
+
|
|
317
|
+
from langchain_openai import ChatOpenAI
|
|
318
|
+
from fiddler_langgraph.tracing.instrumentation import set_llm_context
|
|
319
|
+
|
|
320
|
+
llm = ChatOpenAI(model="gpt-4")
|
|
321
|
+
set_llm_context(llm, "User prefers concise responses")
|
|
322
|
+
|
|
323
|
+
With user preferences:
|
|
324
|
+
|
|
325
|
+
.. code-block:: python
|
|
326
|
+
|
|
327
|
+
set_llm_context(llm, "User language: Spanish, Expertise: Beginner")
|
|
328
|
+
|
|
329
|
+
Using with RunnableBinding:
|
|
330
|
+
|
|
331
|
+
.. code-block:: python
|
|
332
|
+
|
|
333
|
+
bound_llm = llm.bind(temperature=0.7, max_tokens=100)
|
|
334
|
+
set_llm_context(bound_llm, "Creative writing mode with token limits")
|
|
335
|
+
|
|
336
|
+
Adding session context:
|
|
337
|
+
|
|
338
|
+
.. code-block:: python
|
|
339
|
+
|
|
340
|
+
import uuid
|
|
341
|
+
session_id = uuid.uuid4()
|
|
342
|
+
set_llm_context(llm, f"Session: {session_id}, Environment: Production")
|
|
343
|
+
"""
|
|
344
|
+
if isinstance(llm, RunnableBinding):
|
|
345
|
+
if not isinstance(llm.bound, BaseLanguageModel):
|
|
346
|
+
raise TypeError(
|
|
347
|
+
'llm must be a BaseLanguageModel or a RunnableBinding of a BaseLanguageModel'
|
|
348
|
+
)
|
|
349
|
+
# RunnableBinding has config attribute (which can store metadata), however these are not passed
|
|
350
|
+
# to the callback handlers. So we need to use the bound object directly.
|
|
351
|
+
_llm = llm.bound
|
|
352
|
+
else:
|
|
353
|
+
_llm = llm
|
|
354
|
+
|
|
355
|
+
_set_default_metadata(_llm)
|
|
356
|
+
|
|
357
|
+
if _llm.metadata is None:
|
|
358
|
+
_llm.metadata = {}
|
|
359
|
+
fiddler_attrs = cast(dict[str, Any], _llm.metadata.get(FIDDLER_METADATA_KEY, {}))
|
|
360
|
+
fiddler_attrs[FiddlerSpanAttributes.LLM_CONTEXT] = context
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
class LangGraphInstrumentor(BaseInstrumentor):
|
|
364
|
+
"""An OpenTelemetry instrumentor for LangGraph applications.
|
|
365
|
+
|
|
366
|
+
This class provides automatic instrumentation for applications built with
|
|
367
|
+
LangGraph. It captures traces from the execution of LangGraph graphs and
|
|
368
|
+
sends them to the Fiddler platform for monitoring and analysis.
|
|
369
|
+
|
|
370
|
+
Instrumentation works by monkey-patching LangChain's callback system to inject
|
|
371
|
+
a custom callback handler that captures trace data. Once instrumented, all
|
|
372
|
+
LangGraph operations will automatically generate telemetry data.
|
|
373
|
+
|
|
374
|
+
Note: Instrumentation persists for the lifetime of the application unless
|
|
375
|
+
explicitly removed by calling `uninstrument()`. Calling `instrument()` multiple
|
|
376
|
+
times is safe - it will not create duplicate handlers.
|
|
377
|
+
|
|
378
|
+
Thread Safety: The instrumentation applies globally to the process and affects
|
|
379
|
+
all threads. In concurrent environments (multi-threading, async), all contexts
|
|
380
|
+
share the same instrumented callback system.
|
|
381
|
+
|
|
382
|
+
To use the instrumentor, you first need to create a `FiddlerClient`
|
|
383
|
+
instance. Then, you can create an instance of `LangGraphInstrumentor` and
|
|
384
|
+
call the `instrument()` method.
|
|
385
|
+
|
|
386
|
+
Examples:
|
|
387
|
+
Basic usage:
|
|
388
|
+
|
|
389
|
+
.. code-block:: python
|
|
390
|
+
|
|
391
|
+
from fiddler_langgraph import FiddlerClient
|
|
392
|
+
from fiddler_langgraph.tracing import LangGraphInstrumentor
|
|
393
|
+
|
|
394
|
+
client = FiddlerClient(api_key="...", application_id="...", url="https://your-instance.fiddler.ai")
|
|
395
|
+
instrumentor = LangGraphInstrumentor(client=client)
|
|
396
|
+
instrumentor.instrument()
|
|
397
|
+
|
|
398
|
+
Removing instrumentation:
|
|
399
|
+
|
|
400
|
+
.. code-block:: python
|
|
401
|
+
|
|
402
|
+
# Clean up instrumentation when shutting down
|
|
403
|
+
instrumentor.uninstrument()
|
|
404
|
+
|
|
405
|
+
Context manager pattern (advanced):
|
|
406
|
+
|
|
407
|
+
.. code-block:: python
|
|
408
|
+
|
|
409
|
+
with LangGraphInstrumentor(client).instrument():
|
|
410
|
+
# Instrumented operations here
|
|
411
|
+
agent.invoke({"messages": [...]})
|
|
412
|
+
# Automatically uninstrumented after block
|
|
413
|
+
|
|
414
|
+
Attributes:
|
|
415
|
+
_client (FiddlerClient): The FiddlerClient instance used for configuration.
|
|
416
|
+
_tracer (_CallbackHandler | None): The callback handler instance for tracing.
|
|
417
|
+
_langgraph_version: The installed LangGraph version.
|
|
418
|
+
_langchain_version: The installed LangChain Core version.
|
|
419
|
+
_fiddler_langgraph_version: The Fiddler LangGraph SDK version.
|
|
420
|
+
"""
|
|
421
|
+
|
|
422
|
+
def __init__(self, client: FiddlerClient):
|
|
423
|
+
"""Initializes the LangGraphInstrumentor.
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
client (FiddlerClient): The `FiddlerClient` instance. **Required**.
|
|
427
|
+
|
|
428
|
+
Raises:
|
|
429
|
+
ImportError: If LangGraph version is incompatible or not installed.
|
|
430
|
+
"""
|
|
431
|
+
super().__init__()
|
|
432
|
+
self._client = client
|
|
433
|
+
self._langgraph_version = _get_package_version('langgraph')
|
|
434
|
+
self._langchain_version = _get_package_version('langchain_core')
|
|
435
|
+
self._fiddler_langgraph_version = _get_package_version('fiddler_langgraph')
|
|
436
|
+
|
|
437
|
+
self._client.update_resource(
|
|
438
|
+
{
|
|
439
|
+
'lib.langgraph.version': self._langgraph_version.public,
|
|
440
|
+
'lib.langchain_core.version': self._langchain_version.public,
|
|
441
|
+
'lib.fiddler-langgraph.version': self._fiddler_langgraph_version.public,
|
|
442
|
+
}
|
|
443
|
+
)
|
|
444
|
+
self._tracer: _CallbackHandler | None = None
|
|
445
|
+
self._original_callback_manager_init: Callable[..., None] | None = None
|
|
446
|
+
|
|
447
|
+
# Check LangGraph version compatibility - we don't add this to dependencies
|
|
448
|
+
# because we leave it to the user to install the correct version of LangGraph
|
|
449
|
+
# We will check if the user installed version is compatible with the version of fiddler-langgraph
|
|
450
|
+
_check_langgraph_version(self._langgraph_version)
|
|
451
|
+
|
|
452
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
453
|
+
"""Returns the package dependencies required for this instrumentor.
|
|
454
|
+
|
|
455
|
+
Returns:
|
|
456
|
+
Collection[str]: A collection of package dependency strings.
|
|
457
|
+
"""
|
|
458
|
+
return ('langchain_core >= 0.1.0',)
|
|
459
|
+
|
|
460
|
+
def _instrument(self, **kwargs: Any) -> None:
|
|
461
|
+
"""Instruments LangGraph by monkey-patching `BaseCallbackManager`.
|
|
462
|
+
|
|
463
|
+
This method injects a custom callback handler into LangGraph's callback
|
|
464
|
+
system to capture trace data. This is done by wrapping the `__init__`
|
|
465
|
+
method of `BaseCallbackManager` to inject a `_CallbackHandler`.
|
|
466
|
+
|
|
467
|
+
Raises:
|
|
468
|
+
ValueError: If the tracer is not initialized in the FiddlerClient.
|
|
469
|
+
"""
|
|
470
|
+
import langchain_core
|
|
471
|
+
|
|
472
|
+
tracer = self._client.get_tracer()
|
|
473
|
+
if tracer is None:
|
|
474
|
+
raise ValueError('Context tracer is not initialized')
|
|
475
|
+
|
|
476
|
+
self._tracer = _CallbackHandler(tracer)
|
|
477
|
+
self._original_callback_manager_init = langchain_core.callbacks.BaseCallbackManager.__init__
|
|
478
|
+
wrap_function_wrapper(
|
|
479
|
+
module='langchain_core.callbacks',
|
|
480
|
+
name='BaseCallbackManager.__init__',
|
|
481
|
+
wrapper=_BaseCallbackManagerInit(self._tracer),
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
def _uninstrument(self, **kwargs: Any) -> None:
|
|
485
|
+
"""Removes the instrumentation from LangGraph.
|
|
486
|
+
|
|
487
|
+
This is done by restoring the original `__init__` method on the
|
|
488
|
+
`BaseCallbackManager` class.
|
|
489
|
+
"""
|
|
490
|
+
import langchain_core
|
|
491
|
+
|
|
492
|
+
if self._original_callback_manager_init is not None:
|
|
493
|
+
setattr( # noqa: B010
|
|
494
|
+
langchain_core.callbacks.BaseCallbackManager,
|
|
495
|
+
'__init__',
|
|
496
|
+
self._original_callback_manager_init,
|
|
497
|
+
)
|
|
498
|
+
self._original_callback_manager_init = None
|
|
499
|
+
self._tracer = None
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
class _BaseCallbackManagerInit:
|
|
503
|
+
"""A wrapper class for `BaseCallbackManager.__init__` to inject Fiddler's callback handler."""
|
|
504
|
+
|
|
505
|
+
__slots__ = ('_callback_handler',)
|
|
506
|
+
|
|
507
|
+
def __init__(self, callback_handler: _CallbackHandler):
|
|
508
|
+
"""Initializes the wrapper.
|
|
509
|
+
|
|
510
|
+
Args:
|
|
511
|
+
callback_handler (_CallbackHandler): The Fiddler callback handler instance
|
|
512
|
+
to be injected into the callback manager.
|
|
513
|
+
"""
|
|
514
|
+
self._callback_handler = callback_handler
|
|
515
|
+
|
|
516
|
+
def __call__(
|
|
517
|
+
self,
|
|
518
|
+
wrapped: Callable[..., None],
|
|
519
|
+
instance: 'BaseCallbackManager',
|
|
520
|
+
args: Any,
|
|
521
|
+
kwargs: Any,
|
|
522
|
+
) -> None:
|
|
523
|
+
"""Calls the original `__init__` and then adds the Fiddler handler.
|
|
524
|
+
|
|
525
|
+
It also ensures that the handler is not added multiple times if it
|
|
526
|
+
already exists in the list of inheritable handlers.
|
|
527
|
+
"""
|
|
528
|
+
wrapped(*args, **kwargs)
|
|
529
|
+
for handler in instance.inheritable_handlers:
|
|
530
|
+
# Handlers may be copied when new managers are created, so we
|
|
531
|
+
# don't want to keep adding. E.g. see the following location.
|
|
532
|
+
# https://github.com/langchain-ai/langchain/blob/5c2538b9f7fb64afed2a918b621d9d8681c7ae32/libs/core/langchain_core/callbacks/manager.py#L1876
|
|
533
|
+
if isinstance(handler, type(self._callback_handler)):
|
|
534
|
+
break
|
|
535
|
+
else:
|
|
536
|
+
instance.add_handler(self._callback_handler, True)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fiddler-langgraph
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.0
|
|
4
4
|
Summary: Python SDK for instrumenting GenAI Applications with Fiddler
|
|
5
5
|
Home-page: https://fiddler.ai
|
|
6
6
|
Author: Fiddler AI
|
|
@@ -87,7 +87,7 @@ from fiddler_langgraph import FiddlerClient
|
|
|
87
87
|
|
|
88
88
|
# Initialize the FiddlerClient with basic configuration
|
|
89
89
|
client = FiddlerClient(
|
|
90
|
-
url="https://
|
|
90
|
+
url="https://your-instance.fiddler.ai",
|
|
91
91
|
api_key="fdl_api_key",
|
|
92
92
|
application_id="fdl_application_id" # Must be a valid UUID4
|
|
93
93
|
)
|
|
@@ -152,7 +152,7 @@ The Fiddler SDK provides flexible configuration options for OpenTelemetry integr
|
|
|
152
152
|
client = FiddlerClient(
|
|
153
153
|
api_key="your-api-key",
|
|
154
154
|
application_id="your-app-id", # Must be a valid UUID4
|
|
155
|
-
url="https://
|
|
155
|
+
url="https://your-instance.fiddler.ai"
|
|
156
156
|
)
|
|
157
157
|
```
|
|
158
158
|
|
|
@@ -178,6 +178,7 @@ sampler = sampling.TraceIdRatioBased(0.1) # Sample 10% of traces
|
|
|
178
178
|
client = FiddlerClient(
|
|
179
179
|
api_key="your-api-key",
|
|
180
180
|
application_id="your-app-id",
|
|
181
|
+
url="https://your-instance.fiddler.ai",
|
|
181
182
|
span_limits=custom_limits,
|
|
182
183
|
sampler=sampler,
|
|
183
184
|
console_tracer=False, # Set to True for debugging
|
|
@@ -196,6 +197,7 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import Compression
|
|
|
196
197
|
client = FiddlerClient(
|
|
197
198
|
api_key="your-api-key",
|
|
198
199
|
application_id="your-app-id",
|
|
200
|
+
url="https://your-instance.fiddler.ai",
|
|
199
201
|
compression=Compression.Gzip,
|
|
200
202
|
)
|
|
201
203
|
|
|
@@ -203,6 +205,7 @@ client = FiddlerClient(
|
|
|
203
205
|
client = FiddlerClient(
|
|
204
206
|
api_key="your-api-key",
|
|
205
207
|
application_id="your-app-id",
|
|
208
|
+
url="https://your-instance.fiddler.ai",
|
|
206
209
|
compression=Compression.NoCompression,
|
|
207
210
|
)
|
|
208
211
|
|
|
@@ -210,6 +213,7 @@ client = FiddlerClient(
|
|
|
210
213
|
client = FiddlerClient(
|
|
211
214
|
api_key="your-api-key",
|
|
212
215
|
application_id="your-app-id",
|
|
216
|
+
url="https://your-instance.fiddler.ai",
|
|
213
217
|
compression=Compression.Deflate,
|
|
214
218
|
)
|
|
215
219
|
```
|
|
@@ -229,7 +233,8 @@ os.environ['OTEL_BSP_EXPORT_TIMEOUT'] = '10000'
|
|
|
229
233
|
|
|
230
234
|
client = FiddlerClient(
|
|
231
235
|
api_key="your-api-key",
|
|
232
|
-
application_id="your-app-id"
|
|
236
|
+
application_id="your-app-id",
|
|
237
|
+
url="https://your-instance.fiddler.ai"
|
|
233
238
|
)
|
|
234
239
|
```
|
|
235
240
|
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
0.1.1
|
|
@@ -1,264 +0,0 @@
|
|
|
1
|
-
"""LangGraph instrumentation module for Fiddler."""
|
|
2
|
-
|
|
3
|
-
from collections.abc import Callable, Collection
|
|
4
|
-
from typing import Any, cast
|
|
5
|
-
|
|
6
|
-
from langchain_core.callbacks import BaseCallbackManager
|
|
7
|
-
from langchain_core.language_models import BaseLanguageModel
|
|
8
|
-
from langchain_core.retrievers import BaseRetriever
|
|
9
|
-
from langchain_core.runnables import RunnableBinding
|
|
10
|
-
from langchain_core.tools import BaseTool
|
|
11
|
-
from opentelemetry.instrumentation.instrumentor import ( # type: ignore[attr-defined]
|
|
12
|
-
BaseInstrumentor,
|
|
13
|
-
)
|
|
14
|
-
from pydantic import ConfigDict, validate_call
|
|
15
|
-
from wrapt import wrap_function_wrapper
|
|
16
|
-
|
|
17
|
-
from fiddler_langgraph.core.attributes import (
|
|
18
|
-
_CONVERSATION_ID,
|
|
19
|
-
FIDDLER_METADATA_KEY,
|
|
20
|
-
FiddlerSpanAttributes,
|
|
21
|
-
)
|
|
22
|
-
from fiddler_langgraph.core.client import FiddlerClient
|
|
23
|
-
from fiddler_langgraph.tracing.callback import _CallbackHandler
|
|
24
|
-
from fiddler_langgraph.tracing.util import _check_langgraph_version, _get_package_version
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
@validate_call(config=ConfigDict(strict=True))
|
|
28
|
-
def set_conversation_id(conversation_id: str) -> None:
|
|
29
|
-
"""Set the conversation ID for the current application invocation.
|
|
30
|
-
This will remain in use until it is called again with a new conversation ID.
|
|
31
|
-
Note (Robin 11th Sep 2025): This should be moved to the core.attributes module in the future.
|
|
32
|
-
"""
|
|
33
|
-
_CONVERSATION_ID.set(conversation_id)
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
@validate_call(config=ConfigDict(strict=True, arbitrary_types_allowed=True))
|
|
37
|
-
def _set_default_metadata(
|
|
38
|
-
node: BaseLanguageModel | BaseRetriever | BaseTool,
|
|
39
|
-
) -> None:
|
|
40
|
-
"""Ensures a node has the default Fiddler metadata dictionary.
|
|
41
|
-
|
|
42
|
-
If `node.metadata` does not exist or is not a dictionary, it will be
|
|
43
|
-
initialized. This function modifies the node in place.
|
|
44
|
-
|
|
45
|
-
Args:
|
|
46
|
-
node (BaseLanguageModel | BaseRetriever | BaseTool): The node to modify.
|
|
47
|
-
"""
|
|
48
|
-
if not hasattr(node, 'metadata'):
|
|
49
|
-
node.metadata = {}
|
|
50
|
-
if not isinstance(node.metadata, dict):
|
|
51
|
-
node.metadata = {}
|
|
52
|
-
metadata = node.metadata
|
|
53
|
-
if FIDDLER_METADATA_KEY not in metadata:
|
|
54
|
-
metadata[FIDDLER_METADATA_KEY] = {}
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
@validate_call(config=ConfigDict(strict=True, arbitrary_types_allowed=True))
|
|
58
|
-
def add_span_attributes(
|
|
59
|
-
node: BaseLanguageModel | BaseRetriever | BaseTool,
|
|
60
|
-
**kwargs: Any,
|
|
61
|
-
) -> None:
|
|
62
|
-
"""Adds Fiddler-specific attributes to a runnable's metadata.
|
|
63
|
-
|
|
64
|
-
This is used for various runnable types like LLM calls, tool
|
|
65
|
-
calls, and retriever calls.
|
|
66
|
-
|
|
67
|
-
Args:
|
|
68
|
-
node (BaseLanguageModel | BaseRetriever | BaseTool): The runnable node.
|
|
69
|
-
**kwargs: The attributes to add as key-value pairs.
|
|
70
|
-
"""
|
|
71
|
-
_set_default_metadata(node)
|
|
72
|
-
metadata = cast(dict[str, Any], node.metadata)
|
|
73
|
-
fiddler_attrs = cast(dict[str, Any], metadata.get(FIDDLER_METADATA_KEY, {}))
|
|
74
|
-
for key, value in kwargs.items():
|
|
75
|
-
fiddler_attrs[key] = value
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
@validate_call(config=ConfigDict(strict=True))
|
|
79
|
-
def set_llm_context(llm: BaseLanguageModel | RunnableBinding, context: str) -> None:
|
|
80
|
-
"""Sets a context string on a language model instance.
|
|
81
|
-
If the language model is a RunnableBinding, the context will be set on the bound object.
|
|
82
|
-
|
|
83
|
-
https://python.langchain.com/api_reference/core/runnables/langchain_core.runnables.base.RunnableBinding.html
|
|
84
|
-
|
|
85
|
-
The bound object of the RunnableBinding must be a BaseLanguageModel.
|
|
86
|
-
This context can be used to provide additional information about the
|
|
87
|
-
environment or data that the language model is being used in. This
|
|
88
|
-
information will be attached to the spans created for this model.
|
|
89
|
-
In case the user passes a RunnableBinding, the context will be set on the
|
|
90
|
-
bound object.
|
|
91
|
-
|
|
92
|
-
Args:
|
|
93
|
-
llm (BaseLanguageModel | RunnableBinding): The language model instance. **Required**.
|
|
94
|
-
context (str): The context string to add. **Required**.
|
|
95
|
-
|
|
96
|
-
Examples:
|
|
97
|
-
>>> from langchain_openai import ChatOpenAI
|
|
98
|
-
>>> from fiddler_langgraph.tracing.instrumentation import set_llm_context
|
|
99
|
-
>>>
|
|
100
|
-
>>> llm = ChatOpenAI()
|
|
101
|
-
>>> set_llm_context(llm, "This is a test context.")
|
|
102
|
-
>>>
|
|
103
|
-
>>> # If you are using a RunnableBinding, you can pass the bound object
|
|
104
|
-
>>> # directly to set_llm_context.
|
|
105
|
-
>>> bound_llm = llm.bind(x=1)
|
|
106
|
-
>>> set_llm_context(bound_llm, "This is a test context.")
|
|
107
|
-
"""
|
|
108
|
-
if isinstance(llm, RunnableBinding):
|
|
109
|
-
if not isinstance(llm.bound, BaseLanguageModel):
|
|
110
|
-
raise TypeError(
|
|
111
|
-
'llm must be a BaseLanguageModel or a RunnableBinding of a BaseLanguageModel'
|
|
112
|
-
)
|
|
113
|
-
# RunnableBinding has config attribute (which can store metadata), however these are not passed
|
|
114
|
-
# to the callback handlers. So we need to use the bound object directly.
|
|
115
|
-
_llm = llm.bound
|
|
116
|
-
else:
|
|
117
|
-
_llm = llm
|
|
118
|
-
|
|
119
|
-
_set_default_metadata(_llm)
|
|
120
|
-
|
|
121
|
-
if _llm.metadata is None:
|
|
122
|
-
_llm.metadata = {}
|
|
123
|
-
fiddler_attrs = cast(dict[str, Any], _llm.metadata.get(FIDDLER_METADATA_KEY, {}))
|
|
124
|
-
fiddler_attrs[FiddlerSpanAttributes.LLM_CONTEXT] = context
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
class LangGraphInstrumentor(BaseInstrumentor):
|
|
128
|
-
"""An OpenTelemetry instrumentor for LangGraph applications.
|
|
129
|
-
|
|
130
|
-
This class provides automatic instrumentation for applications built with
|
|
131
|
-
LangGraph. It captures traces from the execution of LangGraph graphs and
|
|
132
|
-
sends them to the Fiddler platform.
|
|
133
|
-
|
|
134
|
-
To use the instrumentor, you first need to create a `FiddlerClient`
|
|
135
|
-
instance. Then, you can create an instance of `LangGraphInstrumentor` and
|
|
136
|
-
call the `instrument()` method.
|
|
137
|
-
|
|
138
|
-
Examples:
|
|
139
|
-
>>> from fiddler_langgraph import FiddlerClient
|
|
140
|
-
>>> from fiddler_langgraph.tracing import LangGraphInstrumentor
|
|
141
|
-
>>>
|
|
142
|
-
>>> client = FiddlerClient(api_key="...", application_id="...")
|
|
143
|
-
>>> instrumentor = LangGraphInstrumentor(client=client)
|
|
144
|
-
>>> instrumentor.instrument()
|
|
145
|
-
|
|
146
|
-
Attributes:
|
|
147
|
-
_client (FiddlerClient): The FiddlerClient instance used for configuration.
|
|
148
|
-
"""
|
|
149
|
-
|
|
150
|
-
def __init__(self, client: FiddlerClient):
|
|
151
|
-
"""Initializes the LangGraphInstrumentor.
|
|
152
|
-
|
|
153
|
-
Args:
|
|
154
|
-
client (FiddlerClient): The `FiddlerClient` instance. **Required**.
|
|
155
|
-
|
|
156
|
-
Raises:
|
|
157
|
-
ImportError: If LangGraph version is incompatible or not installed.
|
|
158
|
-
"""
|
|
159
|
-
super().__init__()
|
|
160
|
-
self._client = client
|
|
161
|
-
self._langgraph_version = _get_package_version('langgraph')
|
|
162
|
-
self._langchain_version = _get_package_version('langchain_core')
|
|
163
|
-
self._fiddler_langgraph_version = _get_package_version('fiddler_langgraph')
|
|
164
|
-
|
|
165
|
-
self._client.update_resource(
|
|
166
|
-
{
|
|
167
|
-
'lib.langgraph.version': self._langgraph_version.public,
|
|
168
|
-
'lib.langchain_core.version': self._langchain_version.public,
|
|
169
|
-
'lib.fiddler-langgraph.version': self._fiddler_langgraph_version.public,
|
|
170
|
-
}
|
|
171
|
-
)
|
|
172
|
-
self._tracer: _CallbackHandler | None = None
|
|
173
|
-
self._original_callback_manager_init: Callable[..., None] | None = None
|
|
174
|
-
|
|
175
|
-
# Check LangGraph version compatibility - we don't add this to dependencies
|
|
176
|
-
# because we leave it to the user to install the correct version of LangGraph
|
|
177
|
-
# We will check if the user installed version is compatible with the version of fiddler-langgraph
|
|
178
|
-
_check_langgraph_version(self._langgraph_version)
|
|
179
|
-
|
|
180
|
-
def instrumentation_dependencies(self) -> Collection[str]:
|
|
181
|
-
"""Returns the package dependencies required for this instrumentor.
|
|
182
|
-
|
|
183
|
-
Returns:
|
|
184
|
-
Collection[str]: A collection of package dependency strings.
|
|
185
|
-
"""
|
|
186
|
-
return ('langchain_core >= 0.1.0',)
|
|
187
|
-
|
|
188
|
-
def _instrument(self, **kwargs: Any) -> None:
|
|
189
|
-
"""Instruments LangGraph by monkey-patching `BaseCallbackManager`.
|
|
190
|
-
|
|
191
|
-
This method injects a custom callback handler into LangGraph's callback
|
|
192
|
-
system to capture trace data. This is done by wrapping the `__init__`
|
|
193
|
-
method of `BaseCallbackManager` to inject a `_CallbackHandler`.
|
|
194
|
-
|
|
195
|
-
Raises:
|
|
196
|
-
ValueError: If the tracer is not initialized in the FiddlerClient.
|
|
197
|
-
"""
|
|
198
|
-
import langchain_core
|
|
199
|
-
|
|
200
|
-
tracer = self._client.get_tracer()
|
|
201
|
-
if tracer is None:
|
|
202
|
-
raise ValueError('Context tracer is not initialized')
|
|
203
|
-
|
|
204
|
-
self._tracer = _CallbackHandler(tracer)
|
|
205
|
-
self._original_callback_manager_init = langchain_core.callbacks.BaseCallbackManager.__init__
|
|
206
|
-
wrap_function_wrapper(
|
|
207
|
-
module='langchain_core.callbacks',
|
|
208
|
-
name='BaseCallbackManager.__init__',
|
|
209
|
-
wrapper=_BaseCallbackManagerInit(self._tracer),
|
|
210
|
-
)
|
|
211
|
-
|
|
212
|
-
def _uninstrument(self, **kwargs: Any) -> None:
|
|
213
|
-
"""Removes the instrumentation from LangGraph.
|
|
214
|
-
|
|
215
|
-
This is done by restoring the original `__init__` method on the
|
|
216
|
-
`BaseCallbackManager` class.
|
|
217
|
-
"""
|
|
218
|
-
import langchain_core
|
|
219
|
-
|
|
220
|
-
if self._original_callback_manager_init is not None:
|
|
221
|
-
setattr( # noqa: B010
|
|
222
|
-
langchain_core.callbacks.BaseCallbackManager,
|
|
223
|
-
'__init__',
|
|
224
|
-
self._original_callback_manager_init,
|
|
225
|
-
)
|
|
226
|
-
self._original_callback_manager_init = None
|
|
227
|
-
self._tracer = None
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
class _BaseCallbackManagerInit:
|
|
231
|
-
"""A wrapper class for `BaseCallbackManager.__init__` to inject Fiddler's callback handler."""
|
|
232
|
-
|
|
233
|
-
__slots__ = ('_callback_handler',)
|
|
234
|
-
|
|
235
|
-
def __init__(self, callback_handler: _CallbackHandler):
|
|
236
|
-
"""Initializes the wrapper.
|
|
237
|
-
|
|
238
|
-
Args:
|
|
239
|
-
callback_handler (_CallbackHandler): The Fiddler callback handler instance
|
|
240
|
-
to be injected into the callback manager.
|
|
241
|
-
"""
|
|
242
|
-
self._callback_handler = callback_handler
|
|
243
|
-
|
|
244
|
-
def __call__(
|
|
245
|
-
self,
|
|
246
|
-
wrapped: Callable[..., None],
|
|
247
|
-
instance: 'BaseCallbackManager',
|
|
248
|
-
args: Any,
|
|
249
|
-
kwargs: Any,
|
|
250
|
-
) -> None:
|
|
251
|
-
"""Calls the original `__init__` and then adds the Fiddler handler.
|
|
252
|
-
|
|
253
|
-
It also ensures that the handler is not added multiple times if it
|
|
254
|
-
already exists in the list of inheritable handlers.
|
|
255
|
-
"""
|
|
256
|
-
wrapped(*args, **kwargs)
|
|
257
|
-
for handler in instance.inheritable_handlers:
|
|
258
|
-
# Handlers may be copied when new managers are created, so we
|
|
259
|
-
# don't want to keep adding. E.g. see the following location.
|
|
260
|
-
# https://github.com/langchain-ai/langchain/blob/5c2538b9f7fb64afed2a918b621d9d8681c7ae32/libs/core/langchain_core/callbacks/manager.py#L1876
|
|
261
|
-
if isinstance(handler, type(self._callback_handler)):
|
|
262
|
-
break
|
|
263
|
-
else:
|
|
264
|
-
instance.add_handler(self._callback_handler, True)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/core/span_processor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph/tracing/jsonl_capture.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
{fiddler_langgraph-0.1.1 → fiddler_langgraph-1.0.0}/fiddler_langgraph.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|