docent-python 0.1.3a0__py3-none-any.whl → 0.1.5a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of docent-python might be problematic. Click here for more details.
- docent/data_models/__init__.py +2 -9
- docent/data_models/agent_run.py +30 -20
- docent/data_models/metadata.py +229 -229
- docent/data_models/transcript.py +56 -16
- docent/loaders/load_inspect.py +37 -25
- docent/sdk/client.py +33 -23
- docent/trace.py +868 -304
- docent/trace_temp.py +1086 -0
- {docent_python-0.1.3a0.dist-info → docent_python-0.1.5a0.dist-info}/METADATA +1 -2
- {docent_python-0.1.3a0.dist-info → docent_python-0.1.5a0.dist-info}/RECORD +12 -12
- docent/trace_alt.py +0 -497
- {docent_python-0.1.3a0.dist-info → docent_python-0.1.5a0.dist-info}/WHEEL +0 -0
- {docent_python-0.1.3a0.dist-info → docent_python-0.1.5a0.dist-info}/licenses/LICENSE.md +0 -0
docent/trace.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
|
-
import asyncio
|
|
2
1
|
import atexit
|
|
3
2
|
import contextvars
|
|
4
|
-
import inspect
|
|
5
3
|
import itertools
|
|
6
4
|
import logging
|
|
7
5
|
import os
|
|
@@ -12,8 +10,11 @@ import uuid
|
|
|
12
10
|
from collections import defaultdict
|
|
13
11
|
from contextlib import asynccontextmanager, contextmanager
|
|
14
12
|
from contextvars import ContextVar, Token
|
|
15
|
-
from
|
|
13
|
+
from datetime import datetime, timezone
|
|
14
|
+
from enum import Enum
|
|
15
|
+
from typing import Any, AsyncIterator, Callable, Dict, Iterator, List, Optional, Set, Union
|
|
16
16
|
|
|
17
|
+
import requests
|
|
17
18
|
from opentelemetry import trace
|
|
18
19
|
from opentelemetry.context import Context
|
|
19
20
|
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GRPCExporter
|
|
@@ -39,36 +40,23 @@ logger.disabled = True
|
|
|
39
40
|
|
|
40
41
|
# Default configuration
|
|
41
42
|
DEFAULT_ENDPOINT = "https://api.docent.transluce.org/rest/telemetry"
|
|
43
|
+
DEFAULT_COLLECTION_NAME = "default-collection-name"
|
|
42
44
|
|
|
43
45
|
|
|
44
|
-
|
|
45
|
-
"""
|
|
46
|
-
try:
|
|
47
|
-
# Check if we're in an async function
|
|
48
|
-
frame = inspect.currentframe()
|
|
49
|
-
while frame:
|
|
50
|
-
if frame.f_code.co_flags & inspect.CO_COROUTINE:
|
|
51
|
-
return True
|
|
52
|
-
frame = frame.f_back
|
|
53
|
-
return False
|
|
54
|
-
except:
|
|
55
|
-
return False
|
|
46
|
+
class Instruments(Enum):
|
|
47
|
+
"""Enumeration of available instrument types."""
|
|
56
48
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
asyncio.get_running_loop()
|
|
62
|
-
return True
|
|
63
|
-
except RuntimeError:
|
|
64
|
-
return False
|
|
49
|
+
OPENAI = "openai"
|
|
50
|
+
ANTHROPIC = "anthropic"
|
|
51
|
+
BEDROCK = "bedrock"
|
|
52
|
+
LANGCHAIN = "langchain"
|
|
65
53
|
|
|
66
54
|
|
|
67
55
|
def _is_notebook() -> bool:
|
|
68
56
|
"""Check if we're running in a Jupyter notebook."""
|
|
69
57
|
try:
|
|
70
58
|
return "ipykernel" in sys.modules
|
|
71
|
-
except:
|
|
59
|
+
except Exception:
|
|
72
60
|
return False
|
|
73
61
|
|
|
74
62
|
|
|
@@ -77,7 +65,7 @@ class DocentTracer:
|
|
|
77
65
|
|
|
78
66
|
def __init__(
|
|
79
67
|
self,
|
|
80
|
-
collection_name: str =
|
|
68
|
+
collection_name: str = DEFAULT_COLLECTION_NAME,
|
|
81
69
|
collection_id: Optional[str] = None,
|
|
82
70
|
agent_run_id: Optional[str] = None,
|
|
83
71
|
endpoint: Union[str, List[str]] = DEFAULT_ENDPOINT,
|
|
@@ -86,7 +74,8 @@ class DocentTracer:
|
|
|
86
74
|
enable_console_export: bool = False,
|
|
87
75
|
enable_otlp_export: bool = True,
|
|
88
76
|
disable_batch: bool = False,
|
|
89
|
-
|
|
77
|
+
instruments: Optional[Set[Instruments]] = None,
|
|
78
|
+
block_instruments: Optional[Set[Instruments]] = None,
|
|
90
79
|
):
|
|
91
80
|
"""
|
|
92
81
|
Initialize Docent tracing manager.
|
|
@@ -101,7 +90,8 @@ class DocentTracer:
|
|
|
101
90
|
enable_console_export: Whether to export to console
|
|
102
91
|
enable_otlp_export: Whether to export to OTLP endpoint
|
|
103
92
|
disable_batch: Whether to disable batch processing (use SimpleSpanProcessor)
|
|
104
|
-
|
|
93
|
+
instruments: Set of instruments to enable (None = all instruments)
|
|
94
|
+
block_instruments: Set of instruments to explicitly disable
|
|
105
95
|
"""
|
|
106
96
|
self.collection_name: str = collection_name
|
|
107
97
|
self.collection_id: str = collection_id if collection_id else str(uuid.uuid4())
|
|
@@ -129,22 +119,30 @@ class DocentTracer:
|
|
|
129
119
|
self.enable_console_export = enable_console_export
|
|
130
120
|
self.enable_otlp_export = enable_otlp_export
|
|
131
121
|
self.disable_batch = disable_batch
|
|
132
|
-
self.
|
|
122
|
+
self.disabled_instruments: Set[Instruments] = {Instruments.LANGCHAIN}
|
|
123
|
+
self.instruments = instruments or (set(Instruments) - self.disabled_instruments)
|
|
124
|
+
self.block_instruments = block_instruments or set()
|
|
133
125
|
|
|
134
126
|
# Use separate tracer provider to avoid interfering with existing OTEL setup
|
|
135
127
|
self._tracer_provider: Optional[TracerProvider] = None
|
|
136
|
-
self.
|
|
137
|
-
self._root_context: Context = Context()
|
|
128
|
+
self._root_context: Optional[Context] = Context()
|
|
138
129
|
self._tracer: Optional[trace.Tracer] = None
|
|
139
130
|
self._initialized: bool = False
|
|
140
131
|
self._cleanup_registered: bool = False
|
|
141
132
|
self._disabled: bool = False
|
|
142
133
|
self._spans_processors: List[Union[BatchSpanProcessor, SimpleSpanProcessor]] = []
|
|
143
134
|
|
|
144
|
-
#
|
|
135
|
+
# Base HTTP endpoint for direct API calls (scores, metadata, trace-done)
|
|
136
|
+
if len(self.endpoints) > 0:
|
|
137
|
+
self._api_endpoint_base: Optional[str] = self.endpoints[0]
|
|
138
|
+
|
|
139
|
+
# Context variables for agent_run_id and transcript_id
|
|
145
140
|
self._collection_id_var: ContextVar[str] = contextvars.ContextVar("docent_collection_id")
|
|
146
141
|
self._agent_run_id_var: ContextVar[str] = contextvars.ContextVar("docent_agent_run_id")
|
|
147
142
|
self._transcript_id_var: ContextVar[str] = contextvars.ContextVar("docent_transcript_id")
|
|
143
|
+
self._transcript_group_id_var: ContextVar[str] = contextvars.ContextVar(
|
|
144
|
+
"docent_transcript_group_id"
|
|
145
|
+
)
|
|
148
146
|
self._attributes_var: ContextVar[dict[str, Any]] = contextvars.ContextVar(
|
|
149
147
|
"docent_attributes"
|
|
150
148
|
)
|
|
@@ -154,18 +152,17 @@ class DocentTracer:
|
|
|
154
152
|
)
|
|
155
153
|
self._transcript_counter_lock = threading.Lock()
|
|
156
154
|
|
|
157
|
-
def
|
|
158
|
-
"""
|
|
159
|
-
Get the current span from our isolated context.
|
|
160
|
-
This never touches the global OpenTelemetry context.
|
|
155
|
+
def get_current_agent_run_id(self) -> Optional[str]:
|
|
161
156
|
"""
|
|
162
|
-
|
|
163
|
-
return None
|
|
157
|
+
Get the current agent run ID from context.
|
|
164
158
|
|
|
159
|
+
Returns:
|
|
160
|
+
The current agent run ID if available, None otherwise
|
|
161
|
+
"""
|
|
165
162
|
try:
|
|
166
|
-
return
|
|
167
|
-
except
|
|
168
|
-
return
|
|
163
|
+
return self._agent_run_id_var.get()
|
|
164
|
+
except LookupError:
|
|
165
|
+
return self.default_agent_run_id
|
|
169
166
|
|
|
170
167
|
def _register_cleanup(self):
|
|
171
168
|
"""Register cleanup handlers."""
|
|
@@ -187,7 +184,7 @@ class DocentTracer:
|
|
|
187
184
|
|
|
188
185
|
def _next_span_order(self, transcript_id: str) -> int:
|
|
189
186
|
"""
|
|
190
|
-
Get the next
|
|
187
|
+
Get the next span order for a given transcript_id.
|
|
191
188
|
Thread-safe and guaranteed to be unique and monotonic.
|
|
192
189
|
"""
|
|
193
190
|
with self._transcript_counter_lock:
|
|
@@ -252,17 +249,16 @@ class DocentTracer:
|
|
|
252
249
|
resource=Resource.create({"service.name": self.collection_name})
|
|
253
250
|
)
|
|
254
251
|
|
|
255
|
-
# Add custom span processor for
|
|
252
|
+
# Add custom span processor for agent_run_id and transcript_id
|
|
256
253
|
class ContextSpanProcessor(SpanProcessor):
|
|
257
254
|
def __init__(self, manager: "DocentTracer"):
|
|
258
255
|
self.manager: "DocentTracer" = manager
|
|
259
256
|
|
|
260
257
|
def on_start(self, span: Span, parent_context: Optional[Context] = None) -> None:
|
|
261
|
-
# Add collection_id, agent_run_id, transcript_id, and any other current attributes
|
|
262
|
-
# Always add collection_id as it's always available
|
|
258
|
+
# Add collection_id, agent_run_id, transcript_id, transcript_group_id, and any other current attributes
|
|
263
259
|
span.set_attribute("collection_id", self.manager.collection_id)
|
|
264
260
|
|
|
265
|
-
#
|
|
261
|
+
# Set agent_run_id from context
|
|
266
262
|
try:
|
|
267
263
|
agent_run_id: str = self.manager._agent_run_id_var.get()
|
|
268
264
|
if agent_run_id:
|
|
@@ -274,7 +270,15 @@ class DocentTracer:
|
|
|
274
270
|
span.set_attribute("agent_run_id_default", True)
|
|
275
271
|
span.set_attribute("agent_run_id", self.manager.default_agent_run_id)
|
|
276
272
|
|
|
277
|
-
#
|
|
273
|
+
# Set transcript_group_id from context
|
|
274
|
+
try:
|
|
275
|
+
transcript_group_id: str = self.manager._transcript_group_id_var.get()
|
|
276
|
+
if transcript_group_id:
|
|
277
|
+
span.set_attribute("transcript_group_id", transcript_group_id)
|
|
278
|
+
except LookupError:
|
|
279
|
+
pass
|
|
280
|
+
|
|
281
|
+
# Set transcript_id from context
|
|
278
282
|
try:
|
|
279
283
|
transcript_id: str = self.manager._transcript_id_var.get()
|
|
280
284
|
if transcript_id:
|
|
@@ -286,7 +290,7 @@ class DocentTracer:
|
|
|
286
290
|
# transcript_id not available, skip it
|
|
287
291
|
pass
|
|
288
292
|
|
|
289
|
-
#
|
|
293
|
+
# Set custom attributes from context
|
|
290
294
|
try:
|
|
291
295
|
attributes: dict[str, Any] = self.manager._attributes_var.get()
|
|
292
296
|
for key, value in attributes.items():
|
|
@@ -340,51 +344,45 @@ class DocentTracer:
|
|
|
340
344
|
# Get tracer from our isolated provider (don't set global provider)
|
|
341
345
|
self._tracer = self._tracer_provider.get_tracer(__name__)
|
|
342
346
|
|
|
343
|
-
# Start root span
|
|
344
|
-
self._root_span = self._tracer.start_span(
|
|
345
|
-
"application_session",
|
|
346
|
-
attributes={
|
|
347
|
-
"service.name": self.collection_name,
|
|
348
|
-
"session.type": "application_root",
|
|
349
|
-
},
|
|
350
|
-
)
|
|
351
|
-
self._root_context = trace.set_span_in_context(
|
|
352
|
-
self._root_span, context=self._root_context
|
|
353
|
-
)
|
|
354
|
-
|
|
355
347
|
# Instrument threading for better context propagation
|
|
356
348
|
try:
|
|
357
349
|
ThreadingInstrumentor().instrument()
|
|
358
350
|
except Exception as e:
|
|
359
351
|
logger.warning(f"Failed to instrument threading: {e}")
|
|
360
352
|
|
|
353
|
+
enabled_instruments = self.instruments - self.block_instruments
|
|
354
|
+
|
|
361
355
|
# Instrument OpenAI with our isolated tracer provider
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
356
|
+
if Instruments.OPENAI in enabled_instruments:
|
|
357
|
+
try:
|
|
358
|
+
OpenAIInstrumentor().instrument(tracer_provider=self._tracer_provider)
|
|
359
|
+
logger.info("Instrumented OpenAI")
|
|
360
|
+
except Exception as e:
|
|
361
|
+
logger.warning(f"Failed to instrument OpenAI: {e}")
|
|
367
362
|
|
|
368
363
|
# Instrument Anthropic with our isolated tracer provider
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
364
|
+
if Instruments.ANTHROPIC in enabled_instruments:
|
|
365
|
+
try:
|
|
366
|
+
AnthropicInstrumentor().instrument(tracer_provider=self._tracer_provider)
|
|
367
|
+
logger.info("Instrumented Anthropic")
|
|
368
|
+
except Exception as e:
|
|
369
|
+
logger.warning(f"Failed to instrument Anthropic: {e}")
|
|
374
370
|
|
|
375
371
|
# Instrument Bedrock with our isolated tracer provider
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
372
|
+
if Instruments.BEDROCK in enabled_instruments:
|
|
373
|
+
try:
|
|
374
|
+
BedrockInstrumentor().instrument(tracer_provider=self._tracer_provider)
|
|
375
|
+
logger.info("Instrumented Bedrock")
|
|
376
|
+
except Exception as e:
|
|
377
|
+
logger.warning(f"Failed to instrument Bedrock: {e}")
|
|
381
378
|
|
|
382
379
|
# Instrument LangChain with our isolated tracer provider
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
380
|
+
if Instruments.LANGCHAIN in enabled_instruments:
|
|
381
|
+
try:
|
|
382
|
+
LangchainInstrumentor().instrument(tracer_provider=self._tracer_provider)
|
|
383
|
+
logger.info("Instrumented LangChain")
|
|
384
|
+
except Exception as e:
|
|
385
|
+
logger.warning(f"Failed to instrument LangChain: {e}")
|
|
388
386
|
|
|
389
387
|
# Register cleanup handlers
|
|
390
388
|
self._register_cleanup()
|
|
@@ -398,30 +396,14 @@ class DocentTracer:
|
|
|
398
396
|
raise
|
|
399
397
|
|
|
400
398
|
def cleanup(self):
|
|
401
|
-
"""Clean up Docent tracing resources."""
|
|
399
|
+
"""Clean up Docent tracing resources and signal trace completion to backend."""
|
|
402
400
|
try:
|
|
403
|
-
#
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
"event.type": "trace_end",
|
|
410
|
-
},
|
|
411
|
-
)
|
|
412
|
-
end_span.end()
|
|
413
|
-
|
|
414
|
-
if (
|
|
415
|
-
self._root_span
|
|
416
|
-
and hasattr(self._root_span, "is_recording")
|
|
417
|
-
and self._root_span.is_recording()
|
|
418
|
-
):
|
|
419
|
-
self._root_span.end()
|
|
420
|
-
elif self._root_span:
|
|
421
|
-
# Fallback if is_recording is not available
|
|
422
|
-
self._root_span.end()
|
|
423
|
-
|
|
424
|
-
self._root_span = None
|
|
401
|
+
# Notify backend that trace is done (no span creation)
|
|
402
|
+
try:
|
|
403
|
+
self._send_trace_done()
|
|
404
|
+
except Exception as e:
|
|
405
|
+
logger.warning(f"Failed to notify trace done: {e}")
|
|
406
|
+
|
|
425
407
|
self._root_context = None # type: ignore
|
|
426
408
|
|
|
427
409
|
# Shutdown our isolated tracer provider
|
|
@@ -485,48 +467,6 @@ class DocentTracer:
|
|
|
485
467
|
self.initialize()
|
|
486
468
|
return self._root_context
|
|
487
469
|
|
|
488
|
-
@contextmanager
|
|
489
|
-
def span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Iterator[Span]:
|
|
490
|
-
"""
|
|
491
|
-
Context manager for creating spans with attributes.
|
|
492
|
-
"""
|
|
493
|
-
if not self._initialized:
|
|
494
|
-
self.initialize()
|
|
495
|
-
|
|
496
|
-
if self._tracer is None:
|
|
497
|
-
raise RuntimeError("Tracer not initialized")
|
|
498
|
-
|
|
499
|
-
span_attributes: dict[str, Any] = attributes or {}
|
|
500
|
-
|
|
501
|
-
with self._tracer.start_as_current_span(
|
|
502
|
-
name, context=self._root_context, attributes=span_attributes
|
|
503
|
-
) as span:
|
|
504
|
-
yield span
|
|
505
|
-
|
|
506
|
-
@asynccontextmanager
|
|
507
|
-
async def async_span(
|
|
508
|
-
self, name: str, attributes: Optional[Dict[str, Any]] = None
|
|
509
|
-
) -> AsyncIterator[Span]:
|
|
510
|
-
"""
|
|
511
|
-
Async context manager for creating spans with attributes.
|
|
512
|
-
|
|
513
|
-
Args:
|
|
514
|
-
name: Name of the span
|
|
515
|
-
attributes: Dictionary of attributes to add to the span
|
|
516
|
-
"""
|
|
517
|
-
if not self._initialized:
|
|
518
|
-
self.initialize()
|
|
519
|
-
|
|
520
|
-
if self._tracer is None:
|
|
521
|
-
raise RuntimeError("Tracer not initialized")
|
|
522
|
-
|
|
523
|
-
span_attributes: dict[str, Any] = attributes or {}
|
|
524
|
-
|
|
525
|
-
with self._tracer.start_as_current_span(
|
|
526
|
-
name, context=self._root_context, attributes=span_attributes
|
|
527
|
-
) as span:
|
|
528
|
-
yield span
|
|
529
|
-
|
|
530
470
|
@contextmanager
|
|
531
471
|
def agent_run_context(
|
|
532
472
|
self,
|
|
@@ -541,7 +481,7 @@ class DocentTracer:
|
|
|
541
481
|
Args:
|
|
542
482
|
agent_run_id: Optional agent run ID (auto-generated if not provided)
|
|
543
483
|
transcript_id: Optional transcript ID (auto-generated if not provided)
|
|
544
|
-
metadata: Optional nested dictionary of metadata to
|
|
484
|
+
metadata: Optional nested dictionary of metadata to send to backend
|
|
545
485
|
**attributes: Additional attributes to add to the context
|
|
546
486
|
|
|
547
487
|
Yields:
|
|
@@ -550,9 +490,6 @@ class DocentTracer:
|
|
|
550
490
|
if not self._initialized:
|
|
551
491
|
self.initialize()
|
|
552
492
|
|
|
553
|
-
if self._tracer is None:
|
|
554
|
-
raise RuntimeError("Tracer not initialized")
|
|
555
|
-
|
|
556
493
|
if agent_run_id is None:
|
|
557
494
|
agent_run_id = str(uuid.uuid4())
|
|
558
495
|
if transcript_id is None:
|
|
@@ -564,20 +501,14 @@ class DocentTracer:
|
|
|
564
501
|
attributes_token: Token[dict[str, Any]] = self._attributes_var.set(attributes)
|
|
565
502
|
|
|
566
503
|
try:
|
|
567
|
-
#
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
) as _span:
|
|
576
|
-
# Attach metadata as events if provided
|
|
577
|
-
if metadata:
|
|
578
|
-
_add_metadata_event_to_span(_span, metadata)
|
|
579
|
-
|
|
580
|
-
yield agent_run_id, transcript_id
|
|
504
|
+
# Send metadata directly to backend if provided
|
|
505
|
+
if metadata:
|
|
506
|
+
try:
|
|
507
|
+
self.send_agent_run_metadata(agent_run_id, metadata)
|
|
508
|
+
except Exception as e:
|
|
509
|
+
logger.warning(f"Failed sending agent run metadata: {e}")
|
|
510
|
+
|
|
511
|
+
yield agent_run_id, transcript_id
|
|
581
512
|
finally:
|
|
582
513
|
self._agent_run_id_var.reset(agent_run_id_token)
|
|
583
514
|
self._transcript_id_var.reset(transcript_id_token)
|
|
@@ -598,7 +529,7 @@ class DocentTracer:
|
|
|
598
529
|
Args:
|
|
599
530
|
agent_run_id: Optional agent run ID (auto-generated if not provided)
|
|
600
531
|
transcript_id: Optional transcript ID (auto-generated if not provided)
|
|
601
|
-
metadata: Optional nested dictionary of metadata to
|
|
532
|
+
metadata: Optional nested dictionary of metadata to send to backend
|
|
602
533
|
**attributes: Additional attributes to add to the context
|
|
603
534
|
|
|
604
535
|
Yields:
|
|
@@ -607,9 +538,6 @@ class DocentTracer:
|
|
|
607
538
|
if not self._initialized:
|
|
608
539
|
self.initialize()
|
|
609
540
|
|
|
610
|
-
if self._tracer is None:
|
|
611
|
-
raise RuntimeError("Tracer not initialized")
|
|
612
|
-
|
|
613
541
|
if agent_run_id is None:
|
|
614
542
|
agent_run_id = str(uuid.uuid4())
|
|
615
543
|
if transcript_id is None:
|
|
@@ -621,117 +549,415 @@ class DocentTracer:
|
|
|
621
549
|
attributes_token: Token[dict[str, Any]] = self._attributes_var.set(attributes)
|
|
622
550
|
|
|
623
551
|
try:
|
|
624
|
-
#
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
) as _span:
|
|
633
|
-
# Attach metadata as events if provided
|
|
634
|
-
if metadata:
|
|
635
|
-
_add_metadata_event_to_span(_span, metadata)
|
|
636
|
-
|
|
637
|
-
yield agent_run_id, transcript_id
|
|
552
|
+
# Send metadata directly to backend if provided
|
|
553
|
+
if metadata:
|
|
554
|
+
try:
|
|
555
|
+
self.send_agent_run_metadata(agent_run_id, metadata)
|
|
556
|
+
except Exception as e:
|
|
557
|
+
logger.warning(f"Failed sending agent run metadata: {e}")
|
|
558
|
+
|
|
559
|
+
yield agent_run_id, transcript_id
|
|
638
560
|
finally:
|
|
639
561
|
self._agent_run_id_var.reset(agent_run_id_token)
|
|
640
562
|
self._transcript_id_var.reset(transcript_id_token)
|
|
641
563
|
self._attributes_var.reset(attributes_token)
|
|
642
564
|
|
|
643
|
-
def
|
|
565
|
+
def _api_headers(self) -> Dict[str, str]:
|
|
566
|
+
"""
|
|
567
|
+
Get the API headers for HTTP requests.
|
|
568
|
+
|
|
569
|
+
Returns:
|
|
570
|
+
Dictionary of headers including Authorization
|
|
571
|
+
"""
|
|
572
|
+
return {
|
|
573
|
+
"Content-Type": "application/json",
|
|
574
|
+
"Authorization": f"Bearer {self.headers.get('Authorization', '').replace('Bearer ', '')}",
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
def _post_json(self, path: str, data: Dict[str, Any]) -> None:
|
|
578
|
+
if not self._api_endpoint_base:
|
|
579
|
+
raise RuntimeError("API endpoint base is not configured")
|
|
580
|
+
url = f"{self._api_endpoint_base}{path}"
|
|
581
|
+
try:
|
|
582
|
+
resp = requests.post(url, json=data, headers=self._api_headers(), timeout=10)
|
|
583
|
+
resp.raise_for_status()
|
|
584
|
+
except requests.exceptions.RequestException as e:
|
|
585
|
+
logger.error(f"Failed POST {url}: {e}")
|
|
586
|
+
|
|
587
|
+
def send_agent_run_score(
|
|
644
588
|
self,
|
|
645
|
-
agent_run_id:
|
|
589
|
+
agent_run_id: str,
|
|
590
|
+
name: str,
|
|
591
|
+
score: float,
|
|
592
|
+
attributes: Optional[Dict[str, Any]] = None,
|
|
593
|
+
) -> None:
|
|
594
|
+
"""
|
|
595
|
+
Send a score to the backend for a specific agent run.
|
|
596
|
+
|
|
597
|
+
Args:
|
|
598
|
+
agent_run_id: The agent run ID
|
|
599
|
+
name: Name of the score metric
|
|
600
|
+
score: Numeric score value
|
|
601
|
+
attributes: Optional additional attributes
|
|
602
|
+
"""
|
|
603
|
+
collection_id = self.collection_id
|
|
604
|
+
payload: Dict[str, Any] = {
|
|
605
|
+
"collection_id": collection_id,
|
|
606
|
+
"agent_run_id": agent_run_id,
|
|
607
|
+
"score_name": name,
|
|
608
|
+
"score_value": score,
|
|
609
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
610
|
+
}
|
|
611
|
+
if attributes:
|
|
612
|
+
payload.update(attributes)
|
|
613
|
+
self._post_json("/v1/scores", payload)
|
|
614
|
+
|
|
615
|
+
def send_agent_run_metadata(self, agent_run_id: str, metadata: Dict[str, Any]) -> None:
|
|
616
|
+
collection_id = self.collection_id
|
|
617
|
+
payload: Dict[str, Any] = {
|
|
618
|
+
"collection_id": collection_id,
|
|
619
|
+
"agent_run_id": agent_run_id,
|
|
620
|
+
"metadata": metadata,
|
|
621
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
622
|
+
}
|
|
623
|
+
self._post_json("/v1/agent-run-metadata", payload)
|
|
624
|
+
|
|
625
|
+
def send_transcript_metadata(
|
|
626
|
+
self,
|
|
627
|
+
transcript_id: str,
|
|
628
|
+
name: Optional[str] = None,
|
|
629
|
+
description: Optional[str] = None,
|
|
630
|
+
transcript_group_id: Optional[str] = None,
|
|
631
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
632
|
+
) -> None:
|
|
633
|
+
"""
|
|
634
|
+
Send transcript data to the backend.
|
|
635
|
+
|
|
636
|
+
Args:
|
|
637
|
+
transcript_id: The transcript ID
|
|
638
|
+
name: Optional transcript name
|
|
639
|
+
description: Optional transcript description
|
|
640
|
+
transcript_group_id: Optional transcript group ID
|
|
641
|
+
metadata: Optional metadata to send
|
|
642
|
+
"""
|
|
643
|
+
collection_id = self.collection_id
|
|
644
|
+
payload: Dict[str, Any] = {
|
|
645
|
+
"collection_id": collection_id,
|
|
646
|
+
"transcript_id": transcript_id,
|
|
647
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
# Only add fields that are provided
|
|
651
|
+
if name is not None:
|
|
652
|
+
payload["name"] = name
|
|
653
|
+
if description is not None:
|
|
654
|
+
payload["description"] = description
|
|
655
|
+
if transcript_group_id is not None:
|
|
656
|
+
payload["transcript_group_id"] = transcript_group_id
|
|
657
|
+
if metadata is not None:
|
|
658
|
+
payload["metadata"] = metadata
|
|
659
|
+
|
|
660
|
+
self._post_json("/v1/transcript-metadata", payload)
|
|
661
|
+
|
|
662
|
+
def get_current_transcript_id(self) -> Optional[str]:
|
|
663
|
+
"""
|
|
664
|
+
Get the current transcript ID from context.
|
|
665
|
+
|
|
666
|
+
Returns:
|
|
667
|
+
The current transcript ID if available, None otherwise
|
|
668
|
+
"""
|
|
669
|
+
try:
|
|
670
|
+
return self._transcript_id_var.get()
|
|
671
|
+
except LookupError:
|
|
672
|
+
return None
|
|
673
|
+
|
|
674
|
+
def get_current_transcript_group_id(self) -> Optional[str]:
|
|
675
|
+
"""
|
|
676
|
+
Get the current transcript group ID from context.
|
|
677
|
+
|
|
678
|
+
Returns:
|
|
679
|
+
The current transcript group ID if available, None otherwise
|
|
680
|
+
"""
|
|
681
|
+
try:
|
|
682
|
+
return self._transcript_group_id_var.get()
|
|
683
|
+
except LookupError:
|
|
684
|
+
return None
|
|
685
|
+
|
|
686
|
+
@contextmanager
|
|
687
|
+
def transcript_context(
|
|
688
|
+
self,
|
|
689
|
+
name: Optional[str] = None,
|
|
646
690
|
transcript_id: Optional[str] = None,
|
|
647
|
-
|
|
648
|
-
|
|
691
|
+
description: Optional[str] = None,
|
|
692
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
693
|
+
transcript_group_id: Optional[str] = None,
|
|
694
|
+
) -> Iterator[str]:
|
|
649
695
|
"""
|
|
650
|
-
|
|
696
|
+
Context manager for setting up a transcript context.
|
|
651
697
|
|
|
652
698
|
Args:
|
|
653
|
-
|
|
699
|
+
name: Optional transcript name
|
|
654
700
|
transcript_id: Optional transcript ID (auto-generated if not provided)
|
|
655
|
-
|
|
701
|
+
description: Optional transcript description
|
|
702
|
+
metadata: Optional metadata to send to backend
|
|
703
|
+
transcript_group_id: Optional transcript group ID
|
|
656
704
|
|
|
657
|
-
|
|
658
|
-
|
|
705
|
+
Yields:
|
|
706
|
+
The transcript ID
|
|
659
707
|
"""
|
|
660
708
|
if not self._initialized:
|
|
661
|
-
|
|
709
|
+
raise RuntimeError(
|
|
710
|
+
"Tracer is not initialized. Call initialize_tracing() before using transcript context."
|
|
711
|
+
)
|
|
662
712
|
|
|
663
|
-
if
|
|
664
|
-
|
|
713
|
+
if transcript_id is None:
|
|
714
|
+
transcript_id = str(uuid.uuid4())
|
|
715
|
+
|
|
716
|
+
# Determine transcript group ID before setting new context
|
|
717
|
+
if transcript_group_id is None:
|
|
718
|
+
try:
|
|
719
|
+
transcript_group_id = self._transcript_group_id_var.get()
|
|
720
|
+
except LookupError:
|
|
721
|
+
# No current transcript group context, this transcript has no group
|
|
722
|
+
transcript_group_id = None
|
|
723
|
+
|
|
724
|
+
# Set context variable for this execution context
|
|
725
|
+
transcript_id_token: Token[str] = self._transcript_id_var.set(transcript_id)
|
|
726
|
+
|
|
727
|
+
try:
|
|
728
|
+
# Send transcript data and metadata to backend
|
|
729
|
+
try:
|
|
730
|
+
self.send_transcript_metadata(
|
|
731
|
+
transcript_id, name, description, transcript_group_id, metadata
|
|
732
|
+
)
|
|
733
|
+
except Exception as e:
|
|
734
|
+
logger.warning(f"Failed sending transcript data: {e}")
|
|
735
|
+
|
|
736
|
+
yield transcript_id
|
|
737
|
+
finally:
|
|
738
|
+
# Reset context variable to previous state
|
|
739
|
+
self._transcript_id_var.reset(transcript_id_token)
|
|
740
|
+
|
|
741
|
+
@asynccontextmanager
|
|
742
|
+
async def async_transcript_context(
|
|
743
|
+
self,
|
|
744
|
+
name: Optional[str] = None,
|
|
745
|
+
transcript_id: Optional[str] = None,
|
|
746
|
+
description: Optional[str] = None,
|
|
747
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
748
|
+
transcript_group_id: Optional[str] = None,
|
|
749
|
+
) -> AsyncIterator[str]:
|
|
750
|
+
"""
|
|
751
|
+
Async context manager for setting up a transcript context.
|
|
752
|
+
|
|
753
|
+
Args:
|
|
754
|
+
name: Optional transcript name
|
|
755
|
+
transcript_id: Optional transcript ID (auto-generated if not provided)
|
|
756
|
+
description: Optional transcript description
|
|
757
|
+
metadata: Optional metadata to send to backend
|
|
758
|
+
transcript_group_id: Optional transcript group ID
|
|
759
|
+
|
|
760
|
+
Yields:
|
|
761
|
+
The transcript ID
|
|
762
|
+
"""
|
|
763
|
+
if not self._initialized:
|
|
764
|
+
raise RuntimeError(
|
|
765
|
+
"Tracer is not initialized. Call initialize_tracing() before using transcript context."
|
|
766
|
+
)
|
|
665
767
|
|
|
666
|
-
if agent_run_id is None:
|
|
667
|
-
agent_run_id = str(uuid.uuid4())
|
|
668
768
|
if transcript_id is None:
|
|
669
769
|
transcript_id = str(uuid.uuid4())
|
|
670
770
|
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
771
|
+
# Determine transcript group ID before setting new context
|
|
772
|
+
if transcript_group_id is None:
|
|
773
|
+
try:
|
|
774
|
+
transcript_group_id = self._transcript_group_id_var.get()
|
|
775
|
+
except LookupError:
|
|
776
|
+
# No current transcript group context, this transcript has no group
|
|
777
|
+
transcript_group_id = None
|
|
676
778
|
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
779
|
+
# Set context variable for this execution context
|
|
780
|
+
transcript_id_token: Token[str] = self._transcript_id_var.set(transcript_id)
|
|
781
|
+
|
|
782
|
+
try:
|
|
783
|
+
# Send transcript data and metadata to backend
|
|
784
|
+
try:
|
|
785
|
+
self.send_transcript_metadata(
|
|
786
|
+
transcript_id, name, description, transcript_group_id, metadata
|
|
787
|
+
)
|
|
788
|
+
except Exception as e:
|
|
789
|
+
logger.warning(f"Failed sending transcript data: {e}")
|
|
680
790
|
|
|
681
|
-
|
|
791
|
+
yield transcript_id
|
|
792
|
+
finally:
|
|
793
|
+
# Reset context variable to previous state
|
|
794
|
+
self._transcript_id_var.reset(transcript_id_token)
|
|
682
795
|
|
|
683
|
-
def
|
|
796
|
+
def send_transcript_group_metadata(
|
|
797
|
+
self,
|
|
798
|
+
transcript_group_id: str,
|
|
799
|
+
name: Optional[str] = None,
|
|
800
|
+
description: Optional[str] = None,
|
|
801
|
+
parent_transcript_group_id: Optional[str] = None,
|
|
802
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
803
|
+
) -> None:
|
|
684
804
|
"""
|
|
685
|
-
|
|
805
|
+
Send transcript group data to the backend.
|
|
686
806
|
|
|
687
807
|
Args:
|
|
688
|
-
|
|
808
|
+
transcript_group_id: The transcript group ID
|
|
809
|
+
name: Optional transcript group name
|
|
810
|
+
description: Optional transcript group description
|
|
811
|
+
parent_transcript_group_id: Optional parent transcript group ID
|
|
812
|
+
metadata: Optional metadata to send
|
|
689
813
|
"""
|
|
690
|
-
|
|
691
|
-
|
|
814
|
+
collection_id = self.collection_id
|
|
815
|
+
payload: Dict[str, Any] = {
|
|
816
|
+
"collection_id": collection_id,
|
|
817
|
+
"transcript_group_id": transcript_group_id,
|
|
818
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
if name is not None:
|
|
822
|
+
payload["name"] = name
|
|
823
|
+
if description is not None:
|
|
824
|
+
payload["description"] = description
|
|
825
|
+
if parent_transcript_group_id is not None:
|
|
826
|
+
payload["parent_transcript_group_id"] = parent_transcript_group_id
|
|
827
|
+
if metadata is not None:
|
|
828
|
+
payload["metadata"] = metadata
|
|
692
829
|
|
|
693
|
-
|
|
830
|
+
self._post_json("/v1/transcript-group-metadata", payload)
|
|
831
|
+
|
|
832
|
+
@contextmanager
|
|
833
|
+
def transcript_group_context(
|
|
834
|
+
self,
|
|
835
|
+
name: Optional[str] = None,
|
|
836
|
+
transcript_group_id: Optional[str] = None,
|
|
837
|
+
description: Optional[str] = None,
|
|
838
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
839
|
+
parent_transcript_group_id: Optional[str] = None,
|
|
840
|
+
) -> Iterator[str]:
|
|
694
841
|
"""
|
|
695
|
-
|
|
842
|
+
Context manager for setting up a transcript group context.
|
|
696
843
|
|
|
697
844
|
Args:
|
|
698
|
-
name:
|
|
699
|
-
|
|
845
|
+
name: Optional transcript group name
|
|
846
|
+
transcript_group_id: Optional transcript group ID (auto-generated if not provided)
|
|
847
|
+
description: Optional transcript group description
|
|
848
|
+
metadata: Optional metadata to send to backend
|
|
849
|
+
parent_transcript_group_id: Optional parent transcript group ID
|
|
700
850
|
|
|
701
|
-
|
|
702
|
-
The
|
|
851
|
+
Yields:
|
|
852
|
+
The transcript group ID
|
|
703
853
|
"""
|
|
704
854
|
if not self._initialized:
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
raise RuntimeError("Tracer not initialized")
|
|
855
|
+
raise RuntimeError(
|
|
856
|
+
"Tracer is not initialized. Call initialize_tracing() before using transcript group context."
|
|
857
|
+
)
|
|
709
858
|
|
|
710
|
-
|
|
859
|
+
if transcript_group_id is None:
|
|
860
|
+
transcript_group_id = str(uuid.uuid4())
|
|
711
861
|
|
|
712
|
-
|
|
713
|
-
|
|
862
|
+
# Determine parent transcript group ID before setting new context
|
|
863
|
+
if parent_transcript_group_id is None:
|
|
864
|
+
try:
|
|
865
|
+
parent_transcript_group_id = self._transcript_group_id_var.get()
|
|
866
|
+
except LookupError:
|
|
867
|
+
# No current transcript group context, this becomes a root group
|
|
868
|
+
parent_transcript_group_id = None
|
|
869
|
+
|
|
870
|
+
# Set context variable for this execution context
|
|
871
|
+
transcript_group_id_token: Token[str] = self._transcript_group_id_var.set(
|
|
872
|
+
transcript_group_id
|
|
714
873
|
)
|
|
715
874
|
|
|
716
|
-
|
|
875
|
+
try:
|
|
876
|
+
# Send transcript group data and metadata to backend
|
|
877
|
+
try:
|
|
878
|
+
self.send_transcript_group_metadata(
|
|
879
|
+
transcript_group_id, name, description, parent_transcript_group_id, metadata
|
|
880
|
+
)
|
|
881
|
+
except Exception as e:
|
|
882
|
+
logger.warning(f"Failed sending transcript group data: {e}")
|
|
883
|
+
|
|
884
|
+
yield transcript_group_id
|
|
885
|
+
finally:
|
|
886
|
+
# Reset context variable to previous state
|
|
887
|
+
self._transcript_group_id_var.reset(transcript_group_id_token)
|
|
717
888
|
|
|
718
|
-
|
|
889
|
+
@asynccontextmanager
|
|
890
|
+
async def async_transcript_group_context(
|
|
891
|
+
self,
|
|
892
|
+
name: Optional[str] = None,
|
|
893
|
+
transcript_group_id: Optional[str] = None,
|
|
894
|
+
description: Optional[str] = None,
|
|
895
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
896
|
+
parent_transcript_group_id: Optional[str] = None,
|
|
897
|
+
) -> AsyncIterator[str]:
|
|
719
898
|
"""
|
|
720
|
-
|
|
899
|
+
Async context manager for setting up a transcript group context.
|
|
721
900
|
|
|
722
901
|
Args:
|
|
723
|
-
|
|
902
|
+
name: Optional transcript group name
|
|
903
|
+
transcript_group_id: Optional transcript group ID (auto-generated if not provided)
|
|
904
|
+
description: Optional transcript group description
|
|
905
|
+
metadata: Optional metadata to send to backend
|
|
906
|
+
parent_transcript_group_id: Optional parent transcript group ID
|
|
907
|
+
|
|
908
|
+
Yields:
|
|
909
|
+
The transcript group ID
|
|
724
910
|
"""
|
|
725
|
-
if
|
|
726
|
-
|
|
911
|
+
if not self._initialized:
|
|
912
|
+
raise RuntimeError(
|
|
913
|
+
"Tracer is not initialized. Call initialize_tracing() before using transcript group context."
|
|
914
|
+
)
|
|
915
|
+
|
|
916
|
+
if transcript_group_id is None:
|
|
917
|
+
transcript_group_id = str(uuid.uuid4())
|
|
918
|
+
|
|
919
|
+
# Determine parent transcript group ID before setting new context
|
|
920
|
+
if parent_transcript_group_id is None:
|
|
921
|
+
try:
|
|
922
|
+
parent_transcript_group_id = self._transcript_group_id_var.get()
|
|
923
|
+
except LookupError:
|
|
924
|
+
# No current transcript group context, this becomes a root group
|
|
925
|
+
parent_transcript_group_id = None
|
|
926
|
+
|
|
927
|
+
# Set context variable for this execution context
|
|
928
|
+
transcript_group_id_token: Token[str] = self._transcript_group_id_var.set(
|
|
929
|
+
transcript_group_id
|
|
930
|
+
)
|
|
931
|
+
|
|
932
|
+
try:
|
|
933
|
+
# Send transcript group data and metadata to backend
|
|
934
|
+
try:
|
|
935
|
+
self.send_transcript_group_metadata(
|
|
936
|
+
transcript_group_id, name, description, parent_transcript_group_id, metadata
|
|
937
|
+
)
|
|
938
|
+
except Exception as e:
|
|
939
|
+
logger.warning(f"Failed sending transcript group data: {e}")
|
|
940
|
+
|
|
941
|
+
yield transcript_group_id
|
|
942
|
+
finally:
|
|
943
|
+
# Reset context variable to previous state
|
|
944
|
+
self._transcript_group_id_var.reset(transcript_group_id_token)
|
|
945
|
+
|
|
946
|
+
def _send_trace_done(self) -> None:
|
|
947
|
+
collection_id = self.collection_id
|
|
948
|
+
payload: Dict[str, Any] = {
|
|
949
|
+
"collection_id": collection_id,
|
|
950
|
+
"status": "completed",
|
|
951
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
952
|
+
}
|
|
953
|
+
self._post_json("/v1/trace-done", payload)
|
|
727
954
|
|
|
728
955
|
|
|
729
|
-
# Global instance for easy access
|
|
730
956
|
_global_tracer: Optional[DocentTracer] = None
|
|
731
957
|
|
|
732
958
|
|
|
733
959
|
def initialize_tracing(
|
|
734
|
-
collection_name: str =
|
|
960
|
+
collection_name: str = DEFAULT_COLLECTION_NAME,
|
|
735
961
|
collection_id: Optional[str] = None,
|
|
736
962
|
endpoint: Union[str, List[str]] = DEFAULT_ENDPOINT,
|
|
737
963
|
headers: Optional[Dict[str, str]] = None,
|
|
@@ -739,7 +965,8 @@ def initialize_tracing(
|
|
|
739
965
|
enable_console_export: bool = False,
|
|
740
966
|
enable_otlp_export: bool = True,
|
|
741
967
|
disable_batch: bool = False,
|
|
742
|
-
|
|
968
|
+
instruments: Optional[Set[Instruments]] = None,
|
|
969
|
+
block_instruments: Optional[Set[Instruments]] = None,
|
|
743
970
|
) -> DocentTracer:
|
|
744
971
|
"""
|
|
745
972
|
Initialize the global Docent tracer.
|
|
@@ -756,7 +983,8 @@ def initialize_tracing(
|
|
|
756
983
|
enable_console_export: Whether to export spans to console
|
|
757
984
|
enable_otlp_export: Whether to export spans to OTLP endpoint
|
|
758
985
|
disable_batch: Whether to disable batch processing (use SimpleSpanProcessor)
|
|
759
|
-
|
|
986
|
+
instruments: Set of instruments to enable (None = all instruments).
|
|
987
|
+
block_instruments: Set of instruments to explicitly disable.
|
|
760
988
|
|
|
761
989
|
Returns:
|
|
762
990
|
The initialized Docent tracer
|
|
@@ -765,6 +993,7 @@ def initialize_tracing(
|
|
|
765
993
|
# Basic setup
|
|
766
994
|
initialize_tracing("my-collection")
|
|
767
995
|
"""
|
|
996
|
+
|
|
768
997
|
global _global_tracer
|
|
769
998
|
|
|
770
999
|
# Check for API key in environment variable if not provided as parameter
|
|
@@ -782,12 +1011,10 @@ def initialize_tracing(
|
|
|
782
1011
|
enable_console_export=enable_console_export,
|
|
783
1012
|
enable_otlp_export=enable_otlp_export,
|
|
784
1013
|
disable_batch=disable_batch,
|
|
785
|
-
|
|
1014
|
+
instruments=instruments,
|
|
1015
|
+
block_instruments=block_instruments,
|
|
786
1016
|
)
|
|
787
1017
|
_global_tracer.initialize()
|
|
788
|
-
else:
|
|
789
|
-
# If already initialized, ensure it's properly set up
|
|
790
|
-
_global_tracer.initialize()
|
|
791
1018
|
|
|
792
1019
|
return _global_tracer
|
|
793
1020
|
|
|
@@ -795,8 +1022,7 @@ def initialize_tracing(
|
|
|
795
1022
|
def get_tracer() -> DocentTracer:
|
|
796
1023
|
"""Get the global Docent tracer."""
|
|
797
1024
|
if _global_tracer is None:
|
|
798
|
-
|
|
799
|
-
return initialize_tracing()
|
|
1025
|
+
raise RuntimeError("Docent tracer not initialized")
|
|
800
1026
|
return _global_tracer
|
|
801
1027
|
|
|
802
1028
|
|
|
@@ -827,20 +1053,9 @@ def set_disabled(disabled: bool) -> None:
|
|
|
827
1053
|
_global_tracer.set_disabled(disabled)
|
|
828
1054
|
|
|
829
1055
|
|
|
830
|
-
def get_api_key() -> Optional[str]:
|
|
831
|
-
"""
|
|
832
|
-
Get the API key from environment variable.
|
|
833
|
-
|
|
834
|
-
Returns:
|
|
835
|
-
The API key from DOCENT_API_KEY environment variable, or None if not set
|
|
836
|
-
"""
|
|
837
|
-
return os.environ.get("DOCENT_API_KEY")
|
|
838
|
-
|
|
839
|
-
|
|
840
1056
|
def agent_run_score(name: str, score: float, attributes: Optional[Dict[str, Any]] = None) -> None:
|
|
841
1057
|
"""
|
|
842
|
-
|
|
843
|
-
Automatically works in both sync and async contexts.
|
|
1058
|
+
Send a score to the backend for the current agent run.
|
|
844
1059
|
|
|
845
1060
|
Args:
|
|
846
1061
|
name: Name of the score metric
|
|
@@ -848,22 +1063,16 @@ def agent_run_score(name: str, score: float, attributes: Optional[Dict[str, Any]
|
|
|
848
1063
|
attributes: Optional additional attributes for the score event
|
|
849
1064
|
"""
|
|
850
1065
|
try:
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
if attributes:
|
|
860
|
-
event_attributes.update(attributes)
|
|
861
|
-
|
|
862
|
-
current_span.add_event(name="agent_run_score", attributes=event_attributes)
|
|
863
|
-
else:
|
|
864
|
-
logger.warning("No current span available for recording score")
|
|
1066
|
+
tracer: DocentTracer = get_tracer()
|
|
1067
|
+
agent_run_id = tracer.get_current_agent_run_id()
|
|
1068
|
+
|
|
1069
|
+
if not agent_run_id:
|
|
1070
|
+
logger.warning("No active agent run context. Score will not be sent.")
|
|
1071
|
+
return
|
|
1072
|
+
|
|
1073
|
+
tracer.send_agent_run_score(agent_run_id, name, score, attributes)
|
|
865
1074
|
except Exception as e:
|
|
866
|
-
logger.error(f"Failed to
|
|
1075
|
+
logger.error(f"Failed to send score: {e}")
|
|
867
1076
|
|
|
868
1077
|
|
|
869
1078
|
def _flatten_dict(d: Dict[str, Any], prefix: str = "") -> Dict[str, Any]:
|
|
@@ -878,31 +1087,9 @@ def _flatten_dict(d: Dict[str, Any], prefix: str = "") -> Dict[str, Any]:
|
|
|
878
1087
|
return flattened
|
|
879
1088
|
|
|
880
1089
|
|
|
881
|
-
def _add_metadata_event_to_span(span: Span, metadata: Dict[str, Any]) -> None:
|
|
882
|
-
"""
|
|
883
|
-
Add metadata as an event to a span.
|
|
884
|
-
|
|
885
|
-
Args:
|
|
886
|
-
span: The span to add the event to
|
|
887
|
-
metadata: Dictionary of metadata (can be nested)
|
|
888
|
-
"""
|
|
889
|
-
if span and hasattr(span, "add_event"):
|
|
890
|
-
event_attributes: dict[str, Any] = {
|
|
891
|
-
"event.type": "metadata",
|
|
892
|
-
}
|
|
893
|
-
|
|
894
|
-
# Flatten nested metadata and add as event attributes
|
|
895
|
-
flattened_metadata = _flatten_dict(metadata)
|
|
896
|
-
for key, value in flattened_metadata.items():
|
|
897
|
-
event_attributes[f"metadata.{key}"] = value
|
|
898
|
-
span.add_event(name="agent_run_metadata", attributes=event_attributes)
|
|
899
|
-
|
|
900
|
-
|
|
901
1090
|
def agent_run_metadata(metadata: Dict[str, Any]) -> None:
|
|
902
1091
|
"""
|
|
903
|
-
|
|
904
|
-
Automatically works in both sync and async contexts.
|
|
905
|
-
Supports nested dictionaries by flattening them with dot notation.
|
|
1092
|
+
Send metadata directly to the backend for the current agent run.
|
|
906
1093
|
|
|
907
1094
|
Args:
|
|
908
1095
|
metadata: Dictionary of metadata to attach to the current span (can be nested)
|
|
@@ -912,28 +1099,49 @@ def agent_run_metadata(metadata: Dict[str, Any]) -> None:
|
|
|
912
1099
|
agent_run_metadata({"user": {"id": "123", "name": "John"}, "config": {"model": "gpt-4"}})
|
|
913
1100
|
"""
|
|
914
1101
|
try:
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
1102
|
+
tracer = get_tracer()
|
|
1103
|
+
agent_run_id = tracer.get_current_agent_run_id()
|
|
1104
|
+
if not agent_run_id:
|
|
1105
|
+
logger.warning("No active agent run context. Metadata will not be sent.")
|
|
1106
|
+
return
|
|
1107
|
+
|
|
1108
|
+
tracer.send_agent_run_metadata(agent_run_id, metadata)
|
|
920
1109
|
except Exception as e:
|
|
921
|
-
logger.error(f"Failed to
|
|
1110
|
+
logger.error(f"Failed to send metadata: {e}")
|
|
922
1111
|
|
|
923
1112
|
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
1113
|
+
def transcript_metadata(
|
|
1114
|
+
name: Optional[str] = None,
|
|
1115
|
+
description: Optional[str] = None,
|
|
1116
|
+
transcript_group_id: Optional[str] = None,
|
|
1117
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1118
|
+
) -> None:
|
|
927
1119
|
"""
|
|
928
|
-
|
|
929
|
-
|
|
1120
|
+
Send transcript metadata directly to the backend for the current transcript.
|
|
1121
|
+
|
|
1122
|
+
Args:
|
|
1123
|
+
name: Optional transcript name
|
|
1124
|
+
description: Optional transcript description
|
|
1125
|
+
parent_transcript_id: Optional parent transcript ID
|
|
1126
|
+
metadata: Optional metadata to send
|
|
1127
|
+
|
|
1128
|
+
Example:
|
|
1129
|
+
transcript_metadata(name="data_processing", description="Process user data")
|
|
1130
|
+
transcript_metadata(metadata={"user": "John", "model": "gpt-4"})
|
|
1131
|
+
transcript_metadata(name="validation", parent_transcript_id="parent-123")
|
|
930
1132
|
"""
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
1133
|
+
try:
|
|
1134
|
+
tracer = get_tracer()
|
|
1135
|
+
transcript_id = tracer.get_current_transcript_id()
|
|
1136
|
+
if not transcript_id:
|
|
1137
|
+
logger.warning("No active transcript context. Metadata will not be sent.")
|
|
1138
|
+
return
|
|
1139
|
+
|
|
1140
|
+
tracer.send_transcript_metadata(
|
|
1141
|
+
transcript_id, name, description, transcript_group_id, metadata
|
|
1142
|
+
)
|
|
1143
|
+
except Exception as e:
|
|
1144
|
+
logger.error(f"Failed to send transcript metadata: {e}")
|
|
937
1145
|
|
|
938
1146
|
|
|
939
1147
|
class AgentRunContext:
|
|
@@ -1084,3 +1292,359 @@ def agent_run_context(
|
|
|
1084
1292
|
pass
|
|
1085
1293
|
"""
|
|
1086
1294
|
return AgentRunContext(agent_run_id, transcript_id, metadata=metadata, **attributes)
|
|
1295
|
+
|
|
1296
|
+
|
|
1297
|
+
class TranscriptContext:
|
|
1298
|
+
"""Context manager for creating and managing transcripts."""
|
|
1299
|
+
|
|
1300
|
+
def __init__(
|
|
1301
|
+
self,
|
|
1302
|
+
name: Optional[str] = None,
|
|
1303
|
+
transcript_id: Optional[str] = None,
|
|
1304
|
+
description: Optional[str] = None,
|
|
1305
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1306
|
+
transcript_group_id: Optional[str] = None,
|
|
1307
|
+
):
|
|
1308
|
+
self.name = name
|
|
1309
|
+
self.transcript_id = transcript_id
|
|
1310
|
+
self.description = description
|
|
1311
|
+
self.metadata = metadata
|
|
1312
|
+
self.transcript_group_id = transcript_group_id
|
|
1313
|
+
self._sync_context: Optional[Any] = None
|
|
1314
|
+
self._async_context: Optional[Any] = None
|
|
1315
|
+
|
|
1316
|
+
def __enter__(self) -> str:
|
|
1317
|
+
"""Sync context manager entry."""
|
|
1318
|
+
self._sync_context = get_tracer().transcript_context(
|
|
1319
|
+
name=self.name,
|
|
1320
|
+
transcript_id=self.transcript_id,
|
|
1321
|
+
description=self.description,
|
|
1322
|
+
metadata=self.metadata,
|
|
1323
|
+
transcript_group_id=self.transcript_group_id,
|
|
1324
|
+
)
|
|
1325
|
+
return self._sync_context.__enter__()
|
|
1326
|
+
|
|
1327
|
+
def __exit__(self, exc_type: type[BaseException], exc_val: Any, exc_tb: Any) -> None:
|
|
1328
|
+
"""Sync context manager exit."""
|
|
1329
|
+
if self._sync_context:
|
|
1330
|
+
self._sync_context.__exit__(exc_type, exc_val, exc_tb)
|
|
1331
|
+
|
|
1332
|
+
async def __aenter__(self) -> str:
|
|
1333
|
+
"""Async context manager entry."""
|
|
1334
|
+
self._async_context = get_tracer().async_transcript_context(
|
|
1335
|
+
name=self.name,
|
|
1336
|
+
transcript_id=self.transcript_id,
|
|
1337
|
+
description=self.description,
|
|
1338
|
+
metadata=self.metadata,
|
|
1339
|
+
transcript_group_id=self.transcript_group_id,
|
|
1340
|
+
)
|
|
1341
|
+
return await self._async_context.__aenter__()
|
|
1342
|
+
|
|
1343
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
1344
|
+
"""Async context manager exit."""
|
|
1345
|
+
if self._async_context:
|
|
1346
|
+
await self._async_context.__aexit__(exc_type, exc_val, exc_tb)
|
|
1347
|
+
|
|
1348
|
+
|
|
1349
|
+
def transcript(
|
|
1350
|
+
func: Optional[Callable[..., Any]] = None,
|
|
1351
|
+
*,
|
|
1352
|
+
name: Optional[str] = None,
|
|
1353
|
+
transcript_id: Optional[str] = None,
|
|
1354
|
+
description: Optional[str] = None,
|
|
1355
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1356
|
+
transcript_group_id: Optional[str] = None,
|
|
1357
|
+
):
|
|
1358
|
+
"""
|
|
1359
|
+
Decorator to wrap a function in a transcript context.
|
|
1360
|
+
Injects transcript_id as a function attribute.
|
|
1361
|
+
|
|
1362
|
+
Example:
|
|
1363
|
+
@transcript
|
|
1364
|
+
def my_func(x, y):
|
|
1365
|
+
print(my_func.docent.transcript_id)
|
|
1366
|
+
|
|
1367
|
+
@transcript(name="data_processing", description="Process user data")
|
|
1368
|
+
def my_func_with_name(x, y):
|
|
1369
|
+
print(my_func_with_name.docent.transcript_id)
|
|
1370
|
+
|
|
1371
|
+
@transcript(metadata={"user": "John", "model": "gpt-4"})
|
|
1372
|
+
async def my_async_func(z):
|
|
1373
|
+
print(my_async_func.docent.transcript_id)
|
|
1374
|
+
"""
|
|
1375
|
+
import functools
|
|
1376
|
+
import inspect
|
|
1377
|
+
|
|
1378
|
+
def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
|
|
1379
|
+
if inspect.iscoroutinefunction(f):
|
|
1380
|
+
|
|
1381
|
+
@functools.wraps(f)
|
|
1382
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1383
|
+
async with TranscriptContext(
|
|
1384
|
+
name=name,
|
|
1385
|
+
transcript_id=transcript_id,
|
|
1386
|
+
description=description,
|
|
1387
|
+
metadata=metadata,
|
|
1388
|
+
transcript_group_id=transcript_group_id,
|
|
1389
|
+
) as transcript_id_result:
|
|
1390
|
+
# Store docent data as function attributes
|
|
1391
|
+
setattr(
|
|
1392
|
+
async_wrapper,
|
|
1393
|
+
"docent",
|
|
1394
|
+
type(
|
|
1395
|
+
"DocentData",
|
|
1396
|
+
(),
|
|
1397
|
+
{
|
|
1398
|
+
"transcript_id": transcript_id_result,
|
|
1399
|
+
},
|
|
1400
|
+
)(),
|
|
1401
|
+
)
|
|
1402
|
+
return await f(*args, **kwargs)
|
|
1403
|
+
|
|
1404
|
+
return async_wrapper
|
|
1405
|
+
else:
|
|
1406
|
+
|
|
1407
|
+
@functools.wraps(f)
|
|
1408
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1409
|
+
with TranscriptContext(
|
|
1410
|
+
name=name,
|
|
1411
|
+
transcript_id=transcript_id,
|
|
1412
|
+
description=description,
|
|
1413
|
+
metadata=metadata,
|
|
1414
|
+
transcript_group_id=transcript_group_id,
|
|
1415
|
+
) as transcript_id_result:
|
|
1416
|
+
# Store docent data as function attributes
|
|
1417
|
+
setattr(
|
|
1418
|
+
sync_wrapper,
|
|
1419
|
+
"docent",
|
|
1420
|
+
type(
|
|
1421
|
+
"DocentData",
|
|
1422
|
+
(),
|
|
1423
|
+
{
|
|
1424
|
+
"transcript_id": transcript_id_result,
|
|
1425
|
+
},
|
|
1426
|
+
)(),
|
|
1427
|
+
)
|
|
1428
|
+
return f(*args, **kwargs)
|
|
1429
|
+
|
|
1430
|
+
return sync_wrapper
|
|
1431
|
+
|
|
1432
|
+
if func is None:
|
|
1433
|
+
return decorator
|
|
1434
|
+
else:
|
|
1435
|
+
return decorator(func)
|
|
1436
|
+
|
|
1437
|
+
|
|
1438
|
+
def transcript_context(
|
|
1439
|
+
name: Optional[str] = None,
|
|
1440
|
+
transcript_id: Optional[str] = None,
|
|
1441
|
+
description: Optional[str] = None,
|
|
1442
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1443
|
+
transcript_group_id: Optional[str] = None,
|
|
1444
|
+
) -> TranscriptContext:
|
|
1445
|
+
"""
|
|
1446
|
+
Create a transcript context for tracing.
|
|
1447
|
+
|
|
1448
|
+
Args:
|
|
1449
|
+
name: Optional transcript name
|
|
1450
|
+
transcript_id: Optional transcript ID (auto-generated if not provided)
|
|
1451
|
+
description: Optional transcript description
|
|
1452
|
+
metadata: Optional metadata to attach to the transcript
|
|
1453
|
+
parent_transcript_id: Optional parent transcript ID
|
|
1454
|
+
|
|
1455
|
+
Returns:
|
|
1456
|
+
A context manager that can be used with both 'with' and 'async with'
|
|
1457
|
+
|
|
1458
|
+
Example:
|
|
1459
|
+
# Sync usage
|
|
1460
|
+
with transcript_context(name="data_processing") as transcript_id:
|
|
1461
|
+
pass
|
|
1462
|
+
|
|
1463
|
+
# Async usage
|
|
1464
|
+
async with transcript_context(description="Process user data") as transcript_id:
|
|
1465
|
+
pass
|
|
1466
|
+
|
|
1467
|
+
# With metadata
|
|
1468
|
+
with transcript_context(metadata={"user": "John", "model": "gpt-4"}) as transcript_id:
|
|
1469
|
+
pass
|
|
1470
|
+
"""
|
|
1471
|
+
return TranscriptContext(name, transcript_id, description, metadata, transcript_group_id)
|
|
1472
|
+
|
|
1473
|
+
|
|
1474
|
+
class TranscriptGroupContext:
|
|
1475
|
+
"""Context manager for creating and managing transcript groups."""
|
|
1476
|
+
|
|
1477
|
+
def __init__(
|
|
1478
|
+
self,
|
|
1479
|
+
name: Optional[str] = None,
|
|
1480
|
+
transcript_group_id: Optional[str] = None,
|
|
1481
|
+
description: Optional[str] = None,
|
|
1482
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1483
|
+
parent_transcript_group_id: Optional[str] = None,
|
|
1484
|
+
):
|
|
1485
|
+
self.name = name
|
|
1486
|
+
self.transcript_group_id = transcript_group_id
|
|
1487
|
+
self.description = description
|
|
1488
|
+
self.metadata = metadata
|
|
1489
|
+
self.parent_transcript_group_id = parent_transcript_group_id
|
|
1490
|
+
self._sync_context: Optional[Any] = None
|
|
1491
|
+
self._async_context: Optional[Any] = None
|
|
1492
|
+
|
|
1493
|
+
def __enter__(self) -> str:
|
|
1494
|
+
"""Sync context manager entry."""
|
|
1495
|
+
self._sync_context = get_tracer().transcript_group_context(
|
|
1496
|
+
name=self.name,
|
|
1497
|
+
transcript_group_id=self.transcript_group_id,
|
|
1498
|
+
description=self.description,
|
|
1499
|
+
metadata=self.metadata,
|
|
1500
|
+
parent_transcript_group_id=self.parent_transcript_group_id,
|
|
1501
|
+
)
|
|
1502
|
+
return self._sync_context.__enter__()
|
|
1503
|
+
|
|
1504
|
+
def __exit__(self, exc_type: type[BaseException], exc_val: Any, exc_tb: Any) -> None:
|
|
1505
|
+
"""Sync context manager exit."""
|
|
1506
|
+
if self._sync_context:
|
|
1507
|
+
self._sync_context.__exit__(exc_type, exc_val, exc_tb)
|
|
1508
|
+
|
|
1509
|
+
async def __aenter__(self) -> str:
|
|
1510
|
+
"""Async context manager entry."""
|
|
1511
|
+
self._async_context = get_tracer().async_transcript_group_context(
|
|
1512
|
+
name=self.name,
|
|
1513
|
+
transcript_group_id=self.transcript_group_id,
|
|
1514
|
+
description=self.description,
|
|
1515
|
+
metadata=self.metadata,
|
|
1516
|
+
parent_transcript_group_id=self.parent_transcript_group_id,
|
|
1517
|
+
)
|
|
1518
|
+
return await self._async_context.__aenter__()
|
|
1519
|
+
|
|
1520
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
1521
|
+
"""Async context manager exit."""
|
|
1522
|
+
if self._async_context:
|
|
1523
|
+
await self._async_context.__aexit__(exc_type, exc_val, exc_tb)
|
|
1524
|
+
|
|
1525
|
+
|
|
1526
|
+
def transcript_group(
|
|
1527
|
+
func: Optional[Callable[..., Any]] = None,
|
|
1528
|
+
*,
|
|
1529
|
+
name: Optional[str] = None,
|
|
1530
|
+
transcript_group_id: Optional[str] = None,
|
|
1531
|
+
description: Optional[str] = None,
|
|
1532
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1533
|
+
parent_transcript_group_id: Optional[str] = None,
|
|
1534
|
+
):
|
|
1535
|
+
"""
|
|
1536
|
+
Decorator to wrap a function in a transcript group context.
|
|
1537
|
+
Injects transcript_group_id as a function attribute.
|
|
1538
|
+
|
|
1539
|
+
Example:
|
|
1540
|
+
@transcript_group
|
|
1541
|
+
def my_func(x, y):
|
|
1542
|
+
print(my_func.docent.transcript_group_id)
|
|
1543
|
+
|
|
1544
|
+
@transcript_group(name="data_processing", description="Process user data")
|
|
1545
|
+
def my_func_with_name(x, y):
|
|
1546
|
+
print(my_func_with_name.docent.transcript_group_id)
|
|
1547
|
+
|
|
1548
|
+
@transcript_group(metadata={"user": "John", "model": "gpt-4"})
|
|
1549
|
+
async def my_async_func(z):
|
|
1550
|
+
print(my_async_func.docent.transcript_group_id)
|
|
1551
|
+
"""
|
|
1552
|
+
import functools
|
|
1553
|
+
import inspect
|
|
1554
|
+
|
|
1555
|
+
def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
|
|
1556
|
+
if inspect.iscoroutinefunction(f):
|
|
1557
|
+
|
|
1558
|
+
@functools.wraps(f)
|
|
1559
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1560
|
+
async with TranscriptGroupContext(
|
|
1561
|
+
name=name,
|
|
1562
|
+
transcript_group_id=transcript_group_id,
|
|
1563
|
+
description=description,
|
|
1564
|
+
metadata=metadata,
|
|
1565
|
+
parent_transcript_group_id=parent_transcript_group_id,
|
|
1566
|
+
) as transcript_group_id_result:
|
|
1567
|
+
# Store docent data as function attributes
|
|
1568
|
+
setattr(
|
|
1569
|
+
async_wrapper,
|
|
1570
|
+
"docent",
|
|
1571
|
+
type(
|
|
1572
|
+
"DocentData",
|
|
1573
|
+
(),
|
|
1574
|
+
{
|
|
1575
|
+
"transcript_group_id": transcript_group_id_result,
|
|
1576
|
+
},
|
|
1577
|
+
)(),
|
|
1578
|
+
)
|
|
1579
|
+
return await f(*args, **kwargs)
|
|
1580
|
+
|
|
1581
|
+
return async_wrapper
|
|
1582
|
+
else:
|
|
1583
|
+
|
|
1584
|
+
@functools.wraps(f)
|
|
1585
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1586
|
+
with TranscriptGroupContext(
|
|
1587
|
+
name=name,
|
|
1588
|
+
transcript_group_id=transcript_group_id,
|
|
1589
|
+
description=description,
|
|
1590
|
+
metadata=metadata,
|
|
1591
|
+
parent_transcript_group_id=parent_transcript_group_id,
|
|
1592
|
+
) as transcript_group_id_result:
|
|
1593
|
+
# Store docent data as function attributes
|
|
1594
|
+
setattr(
|
|
1595
|
+
sync_wrapper,
|
|
1596
|
+
"docent",
|
|
1597
|
+
type(
|
|
1598
|
+
"DocentData",
|
|
1599
|
+
(),
|
|
1600
|
+
{
|
|
1601
|
+
"transcript_group_id": transcript_group_id_result,
|
|
1602
|
+
},
|
|
1603
|
+
)(),
|
|
1604
|
+
)
|
|
1605
|
+
return f(*args, **kwargs)
|
|
1606
|
+
|
|
1607
|
+
return sync_wrapper
|
|
1608
|
+
|
|
1609
|
+
if func is None:
|
|
1610
|
+
return decorator
|
|
1611
|
+
else:
|
|
1612
|
+
return decorator(func)
|
|
1613
|
+
|
|
1614
|
+
|
|
1615
|
+
def transcript_group_context(
|
|
1616
|
+
name: Optional[str] = None,
|
|
1617
|
+
transcript_group_id: Optional[str] = None,
|
|
1618
|
+
description: Optional[str] = None,
|
|
1619
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1620
|
+
parent_transcript_group_id: Optional[str] = None,
|
|
1621
|
+
) -> TranscriptGroupContext:
|
|
1622
|
+
"""
|
|
1623
|
+
Create a transcript group context for tracing.
|
|
1624
|
+
|
|
1625
|
+
Args:
|
|
1626
|
+
name: Optional transcript group name
|
|
1627
|
+
transcript_group_id: Optional transcript group ID (auto-generated if not provided)
|
|
1628
|
+
description: Optional transcript group description
|
|
1629
|
+
metadata: Optional metadata to attach to the transcript group
|
|
1630
|
+
parent_transcript_group_id: Optional parent transcript group ID
|
|
1631
|
+
|
|
1632
|
+
Returns:
|
|
1633
|
+
A context manager that can be used with both 'with' and 'async with'
|
|
1634
|
+
|
|
1635
|
+
Example:
|
|
1636
|
+
# Sync usage
|
|
1637
|
+
with transcript_group_context(name="data_processing") as transcript_group_id:
|
|
1638
|
+
pass
|
|
1639
|
+
|
|
1640
|
+
# Async usage
|
|
1641
|
+
async with transcript_group_context(description="Process user data") as transcript_group_id:
|
|
1642
|
+
pass
|
|
1643
|
+
|
|
1644
|
+
# With metadata
|
|
1645
|
+
with transcript_group_context(metadata={"user": "John", "model": "gpt-4"}) as transcript_group_id:
|
|
1646
|
+
pass
|
|
1647
|
+
"""
|
|
1648
|
+
return TranscriptGroupContext(
|
|
1649
|
+
name, transcript_group_id, description, metadata, parent_transcript_group_id
|
|
1650
|
+
)
|