docent-python 0.1.2a0__tar.gz → 0.1.3a0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of docent-python might be problematic. Click here for more details.

Files changed (33) hide show
  1. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/.gitignore +2 -0
  2. docent_python-0.1.3a0/PKG-INFO +25 -0
  3. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/trace.py +81 -67
  4. docent_python-0.1.3a0/docent/trace_alt.py +497 -0
  5. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/pyproject.toml +11 -10
  6. docent_python-0.1.3a0/uv.lock +2030 -0
  7. docent_python-0.1.2a0/PKG-INFO +0 -24
  8. docent_python-0.1.2a0/uv.lock +0 -955
  9. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/LICENSE.md +0 -0
  10. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/README.md +0 -0
  11. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/__init__.py +0 -0
  12. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/_log_util/__init__.py +0 -0
  13. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/_log_util/logger.py +0 -0
  14. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/__init__.py +0 -0
  15. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/_tiktoken_util.py +0 -0
  16. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/agent_run.py +0 -0
  17. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/chat/__init__.py +0 -0
  18. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/chat/content.py +0 -0
  19. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/chat/message.py +0 -0
  20. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/chat/tool.py +0 -0
  21. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/citation.py +0 -0
  22. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/metadata.py +0 -0
  23. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/regex.py +0 -0
  24. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/shared_types.py +0 -0
  25. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/data_models/transcript.py +0 -0
  26. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/loaders/load_inspect.py +0 -0
  27. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/py.typed +0 -0
  28. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/samples/__init__.py +0 -0
  29. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/samples/load.py +0 -0
  30. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/samples/log.eval +0 -0
  31. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/samples/tb_airline.json +0 -0
  32. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/sdk/__init__.py +0 -0
  33. {docent_python-0.1.2a0 → docent_python-0.1.3a0}/docent/sdk/client.py +0 -0
@@ -190,3 +190,5 @@ cython_debug/
190
190
  # personal
191
191
  personal/caden/*
192
192
  inspect_evals
193
+
194
+ *.swp
@@ -0,0 +1,25 @@
1
+ Metadata-Version: 2.4
2
+ Name: docent-python
3
+ Version: 0.1.3a0
4
+ Summary: Docent SDK
5
+ Project-URL: Homepage, https://github.com/TransluceAI/docent
6
+ Project-URL: Issues, https://github.com/TransluceAI/docent/issues
7
+ Project-URL: Docs, https://transluce-docent.readthedocs-hosted.com/en/latest
8
+ Author-email: Transluce <info@transluce.org>
9
+ License-Expression: MIT
10
+ License-File: LICENSE.md
11
+ Requires-Python: >=3.11
12
+ Requires-Dist: opentelemetry-api>=1.34.1
13
+ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc>=1.34.1
14
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.34.1
15
+ Requires-Dist: opentelemetry-instrumentation-anthropic>=0.44.1
16
+ Requires-Dist: opentelemetry-instrumentation-bedrock>=0.44.1
17
+ Requires-Dist: opentelemetry-instrumentation-langchain>=0.44.1
18
+ Requires-Dist: opentelemetry-instrumentation-openai>=0.44.1
19
+ Requires-Dist: opentelemetry-instrumentation-threading>=0.55b1
20
+ Requires-Dist: opentelemetry-sdk>=1.34.1
21
+ Requires-Dist: pydantic>=2.11.7
22
+ Requires-Dist: pyyaml>=6.0.2
23
+ Requires-Dist: tiktoken>=0.7.0
24
+ Requires-Dist: tqdm>=4.67.1
25
+ Requires-Dist: traceloop-sdk>=0.44.1
@@ -15,6 +15,7 @@ from contextvars import ContextVar, Token
15
15
  from typing import Any, AsyncIterator, Callable, Dict, Iterator, List, Optional, Union
16
16
 
17
17
  from opentelemetry import trace
18
+ from opentelemetry.context import Context
18
19
  from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GRPCExporter
19
20
  from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPExporter
20
21
  from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
@@ -23,17 +24,18 @@ from opentelemetry.instrumentation.langchain import LangchainInstrumentor
23
24
  from opentelemetry.instrumentation.openai import OpenAIInstrumentor
24
25
  from opentelemetry.instrumentation.threading import ThreadingInstrumentor
25
26
  from opentelemetry.sdk.resources import Resource
26
- from opentelemetry.sdk.trace import ReadableSpan, TracerProvider
27
+ from opentelemetry.sdk.trace import ReadableSpan, SpanProcessor, TracerProvider
27
28
  from opentelemetry.sdk.trace.export import (
28
29
  BatchSpanProcessor,
29
30
  ConsoleSpanExporter,
30
31
  SimpleSpanProcessor,
31
32
  )
33
+ from opentelemetry.trace import Span
32
34
 
33
35
  # Configure logging
34
36
  logging.basicConfig(level=logging.INFO)
35
37
  logger = logging.getLogger(__name__)
36
- logging.disable()
38
+ logger.disabled = True
37
39
 
38
40
  # Default configuration
39
41
  DEFAULT_ENDPOINT = "https://api.docent.transluce.org/rest/telemetry"
@@ -130,26 +132,41 @@ class DocentTracer:
130
132
  self.span_postprocess_callback = span_postprocess_callback
131
133
 
132
134
  # Use separate tracer provider to avoid interfering with existing OTEL setup
133
- self._tracer_provider: Optional[Any] = None
134
- self._root_span: Optional[Any] = None
135
- self._root_context: Optional[Any] = None
136
- self._tracer: Optional[Any] = None
135
+ self._tracer_provider: Optional[TracerProvider] = None
136
+ self._root_span: Optional[Span] = None
137
+ self._root_context: Context = Context()
138
+ self._tracer: Optional[trace.Tracer] = None
137
139
  self._initialized: bool = False
138
140
  self._cleanup_registered: bool = False
139
141
  self._disabled: bool = False
140
- self._spans_processors: List[Any] = []
142
+ self._spans_processors: List[Union[BatchSpanProcessor, SimpleSpanProcessor]] = []
141
143
 
142
144
  # Context variables for agent_run_id and transcript_id (thread/async safe)
143
- self._collection_id_var: ContextVar[str] = contextvars.ContextVar("collection_id")
144
- self._agent_run_id_var: ContextVar[str] = contextvars.ContextVar("agent_run_id")
145
- self._transcript_id_var: ContextVar[str] = contextvars.ContextVar("transcript_id")
146
- self._attributes_var: ContextVar[dict[str, Any]] = contextvars.ContextVar("attributes")
145
+ self._collection_id_var: ContextVar[str] = contextvars.ContextVar("docent_collection_id")
146
+ self._agent_run_id_var: ContextVar[str] = contextvars.ContextVar("docent_agent_run_id")
147
+ self._transcript_id_var: ContextVar[str] = contextvars.ContextVar("docent_transcript_id")
148
+ self._attributes_var: ContextVar[dict[str, Any]] = contextvars.ContextVar(
149
+ "docent_attributes"
150
+ )
147
151
  # Store atomic span order counters per transcript_id to persist across context switches
148
152
  self._transcript_counters: defaultdict[str, itertools.count[int]] = defaultdict(
149
153
  lambda: itertools.count(0)
150
154
  )
151
155
  self._transcript_counter_lock = threading.Lock()
152
156
 
157
+ def get_current_docent_span(self) -> Optional[Span]:
158
+ """
159
+ Get the current span from our isolated context.
160
+ This never touches the global OpenTelemetry context.
161
+ """
162
+ if self._root_context is None:
163
+ return None
164
+
165
+ try:
166
+ return trace.get_current_span(context=self._root_context)
167
+ except Exception:
168
+ return None
169
+
153
170
  def _register_cleanup(self):
154
171
  """Register cleanup handlers."""
155
172
  if self._cleanup_registered:
@@ -176,7 +193,7 @@ class DocentTracer:
176
193
  with self._transcript_counter_lock:
177
194
  return next(self._transcript_counters[transcript_id])
178
195
 
179
- def _signal_handler(self, signum: int, frame: Any):
196
+ def _signal_handler(self, signum: int, frame: Optional[object]):
180
197
  """Handle shutdown signals."""
181
198
  self.cleanup()
182
199
  sys.exit(0)
@@ -213,13 +230,15 @@ class DocentTracer:
213
230
 
214
231
  return exporters
215
232
 
216
- def _create_span_processor(self, exporter: Any) -> Any:
233
+ def _create_span_processor(
234
+ self, exporter: Union[HTTPExporter, GRPCExporter, ConsoleSpanExporter]
235
+ ) -> Union[SimpleSpanProcessor, BatchSpanProcessor]:
217
236
  """Create appropriate span processor based on configuration."""
218
237
  if self.disable_batch or _is_notebook():
219
- simple_processor: Any = SimpleSpanProcessor(exporter)
238
+ simple_processor: SimpleSpanProcessor = SimpleSpanProcessor(exporter)
220
239
  return simple_processor
221
240
  else:
222
- batch_processor: Any = BatchSpanProcessor(exporter)
241
+ batch_processor: BatchSpanProcessor = BatchSpanProcessor(exporter)
223
242
  return batch_processor
224
243
 
225
244
  def initialize(self):
@@ -234,11 +253,11 @@ class DocentTracer:
234
253
  )
235
254
 
236
255
  # Add custom span processor for run_id and transcript_id
237
- class ContextSpanProcessor:
256
+ class ContextSpanProcessor(SpanProcessor):
238
257
  def __init__(self, manager: "DocentTracer"):
239
258
  self.manager: "DocentTracer" = manager
240
259
 
241
- def on_start(self, span: Any, parent_context: Any = None) -> None:
260
+ def on_start(self, span: Span, parent_context: Optional[Context] = None) -> None:
242
261
  # Add collection_id, agent_run_id, transcript_id, and any other current attributes
243
262
  # Always add collection_id as it's always available
244
263
  span.set_attribute("collection_id", self.manager.collection_id)
@@ -276,14 +295,14 @@ class DocentTracer:
276
295
  # attributes not available, skip them
277
296
  pass
278
297
 
279
- def on_end(self, span: Any) -> None:
298
+ def on_end(self, span: ReadableSpan) -> None:
280
299
  pass
281
300
 
282
301
  def shutdown(self) -> None:
283
302
  pass
284
303
 
285
- def force_flush(self) -> None:
286
- pass
304
+ def force_flush(self, timeout_millis: Optional[float] = None) -> bool:
305
+ return True
287
306
 
288
307
  # Configure span exporters for our isolated provider
289
308
  if self.enable_otlp_export:
@@ -294,7 +313,9 @@ class DocentTracer:
294
313
  if otlp_exporters:
295
314
  # Create a processor for each exporter
296
315
  for exporter in otlp_exporters:
297
- otlp_processor: Any = self._create_span_processor(exporter)
316
+ otlp_processor: Union[SimpleSpanProcessor, BatchSpanProcessor] = (
317
+ self._create_span_processor(exporter)
318
+ )
298
319
  self._tracer_provider.add_span_processor(otlp_processor)
299
320
  self._spans_processors.append(otlp_processor)
300
321
 
@@ -305,8 +326,10 @@ class DocentTracer:
305
326
  logger.warning("Failed to initialize OTLP exporter")
306
327
 
307
328
  if self.enable_console_export:
308
- console_exporter: Any = ConsoleSpanExporter()
309
- console_processor: Any = self._create_span_processor(console_exporter)
329
+ console_exporter: ConsoleSpanExporter = ConsoleSpanExporter()
330
+ console_processor: Union[SimpleSpanProcessor, BatchSpanProcessor] = (
331
+ self._create_span_processor(console_exporter)
332
+ )
310
333
  self._tracer_provider.add_span_processor(console_processor)
311
334
  self._spans_processors.append(console_processor)
312
335
 
@@ -318,9 +341,6 @@ class DocentTracer:
318
341
  self._tracer = self._tracer_provider.get_tracer(__name__)
319
342
 
320
343
  # Start root span
321
- if self._tracer is None:
322
- raise RuntimeError("Failed to get tracer from provider")
323
-
324
344
  self._root_span = self._tracer.start_span(
325
345
  "application_session",
326
346
  attributes={
@@ -328,8 +348,9 @@ class DocentTracer:
328
348
  "session.type": "application_root",
329
349
  },
330
350
  )
331
- if self._root_span is not None:
332
- self._root_context = trace.set_span_in_context(self._root_span)
351
+ self._root_context = trace.set_span_in_context(
352
+ self._root_span, context=self._root_context
353
+ )
333
354
 
334
355
  # Instrument threading for better context propagation
335
356
  try:
@@ -401,7 +422,7 @@ class DocentTracer:
401
422
  self._root_span.end()
402
423
 
403
424
  self._root_span = None
404
- self._root_context = None
425
+ self._root_context = None # type: ignore
405
426
 
406
427
  # Shutdown our isolated tracer provider
407
428
  if self._tracer_provider:
@@ -451,21 +472,21 @@ class DocentTracer:
451
472
  self.close()
452
473
 
453
474
  @property
454
- def tracer(self) -> Optional[Any]:
475
+ def tracer(self) -> Optional[trace.Tracer]:
455
476
  """Get the tracer instance."""
456
477
  if not self._initialized:
457
478
  self.initialize()
458
479
  return self._tracer
459
480
 
460
481
  @property
461
- def root_context(self) -> Optional[Any]:
482
+ def root_context(self) -> Optional[Context]:
462
483
  """Get the root context."""
463
484
  if not self._initialized:
464
485
  self.initialize()
465
486
  return self._root_context
466
487
 
467
488
  @contextmanager
468
- def span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Iterator[Any]:
489
+ def span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Iterator[Span]:
469
490
  """
470
491
  Context manager for creating spans with attributes.
471
492
  """
@@ -485,7 +506,7 @@ class DocentTracer:
485
506
  @asynccontextmanager
486
507
  async def async_span(
487
508
  self, name: str, attributes: Optional[Dict[str, Any]] = None
488
- ) -> AsyncIterator[Any]:
509
+ ) -> AsyncIterator[Span]:
489
510
  """
490
511
  Async context manager for creating spans with attributes.
491
512
 
@@ -513,7 +534,7 @@ class DocentTracer:
513
534
  transcript_id: Optional[str] = None,
514
535
  metadata: Optional[Dict[str, Any]] = None,
515
536
  **attributes: Any,
516
- ) -> Iterator[Any]:
537
+ ) -> Iterator[tuple[str, str]]:
517
538
  """
518
539
  Context manager for setting up an agent run context.
519
540
 
@@ -524,7 +545,7 @@ class DocentTracer:
524
545
  **attributes: Additional attributes to add to the context
525
546
 
526
547
  Yields:
527
- Tuple of (context, agent_run_id, transcript_id)
548
+ Tuple of (agent_run_id, transcript_id)
528
549
  """
529
550
  if not self._initialized:
530
551
  self.initialize()
@@ -556,8 +577,7 @@ class DocentTracer:
556
577
  if metadata:
557
578
  _add_metadata_event_to_span(_span, metadata)
558
579
 
559
- context = trace.get_current_span().get_span_context()
560
- yield context, agent_run_id, transcript_id
580
+ yield agent_run_id, transcript_id
561
581
  finally:
562
582
  self._agent_run_id_var.reset(agent_run_id_token)
563
583
  self._transcript_id_var.reset(transcript_id_token)
@@ -570,7 +590,7 @@ class DocentTracer:
570
590
  transcript_id: Optional[str] = None,
571
591
  metadata: Optional[Dict[str, Any]] = None,
572
592
  **attributes: Any,
573
- ) -> AsyncIterator[Any]:
593
+ ) -> AsyncIterator[tuple[str, str]]:
574
594
  """
575
595
  Async context manager for setting up an agent run context.
576
596
  Modifies the OpenTelemetry context so all spans inherit agent_run_id and transcript_id.
@@ -582,7 +602,7 @@ class DocentTracer:
582
602
  **attributes: Additional attributes to add to the context
583
603
 
584
604
  Yields:
585
- Tuple of (context, agent_run_id, transcript_id)
605
+ Tuple of (agent_run_id, transcript_id)
586
606
  """
587
607
  if not self._initialized:
588
608
  self.initialize()
@@ -596,9 +616,9 @@ class DocentTracer:
596
616
  transcript_id = str(uuid.uuid4())
597
617
 
598
618
  # Set context variables for this execution context
599
- agent_run_id_token: Any = self._agent_run_id_var.set(agent_run_id)
600
- transcript_id_token: Any = self._transcript_id_var.set(transcript_id)
601
- attributes_token: Any = self._attributes_var.set(attributes)
619
+ agent_run_id_token: Token[str] = self._agent_run_id_var.set(agent_run_id)
620
+ transcript_id_token: Token[str] = self._transcript_id_var.set(transcript_id)
621
+ attributes_token: Token[dict[str, Any]] = self._attributes_var.set(attributes)
602
622
 
603
623
  try:
604
624
  # Create a span with the agent run attributes
@@ -614,8 +634,7 @@ class DocentTracer:
614
634
  if metadata:
615
635
  _add_metadata_event_to_span(_span, metadata)
616
636
 
617
- context = trace.get_current_span().get_span_context()
618
- yield context, agent_run_id, transcript_id
637
+ yield agent_run_id, transcript_id
619
638
  finally:
620
639
  self._agent_run_id_var.reset(agent_run_id_token)
621
640
  self._transcript_id_var.reset(transcript_id_token)
@@ -661,7 +680,7 @@ class DocentTracer:
661
680
 
662
681
  return span, agent_run_id, transcript_id
663
682
 
664
- def stop_transcript(self, span: Any) -> None:
683
+ def stop_transcript(self, span: Span) -> None:
665
684
  """
666
685
  Manually stop a transcript span.
667
686
 
@@ -671,7 +690,7 @@ class DocentTracer:
671
690
  if span and hasattr(span, "end"):
672
691
  span.end()
673
692
 
674
- def start_span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Any:
693
+ def start_span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Span:
675
694
  """
676
695
  Manually start a span.
677
696
 
@@ -690,13 +709,13 @@ class DocentTracer:
690
709
 
691
710
  span_attributes: dict[str, Any] = attributes or {}
692
711
 
693
- span: Any = self._tracer.start_span(
712
+ span: Span = self._tracer.start_span(
694
713
  name, context=self._root_context, attributes=span_attributes
695
714
  )
696
715
 
697
716
  return span
698
717
 
699
- def stop_span(self, span: Any) -> None:
718
+ def stop_span(self, span: Span) -> None:
700
719
  """
701
720
  Manually stop a span.
702
721
 
@@ -829,7 +848,8 @@ def agent_run_score(name: str, score: float, attributes: Optional[Dict[str, Any]
829
848
  attributes: Optional additional attributes for the score event
830
849
  """
831
850
  try:
832
- current_span: Any = trace.get_current_span()
851
+ # Get current span from our isolated context instead of global context
852
+ current_span: Optional[Span] = get_tracer().get_current_docent_span()
833
853
  if current_span and hasattr(current_span, "add_event"):
834
854
  event_attributes: dict[str, Any] = {
835
855
  "score.name": name,
@@ -858,7 +878,7 @@ def _flatten_dict(d: Dict[str, Any], prefix: str = "") -> Dict[str, Any]:
858
878
  return flattened
859
879
 
860
880
 
861
- def _add_metadata_event_to_span(span: Any, metadata: Dict[str, Any]) -> None:
881
+ def _add_metadata_event_to_span(span: Span, metadata: Dict[str, Any]) -> None:
862
882
  """
863
883
  Add metadata as an event to a span.
864
884
 
@@ -892,7 +912,7 @@ def agent_run_metadata(metadata: Dict[str, Any]) -> None:
892
912
  agent_run_metadata({"user": {"id": "123", "name": "John"}, "config": {"model": "gpt-4"}})
893
913
  """
894
914
  try:
895
- current_span: Any = trace.get_current_span()
915
+ current_span: Optional[Span] = get_tracer().get_current_docent_span()
896
916
  if current_span:
897
917
  _add_metadata_event_to_span(current_span, metadata)
898
918
  else:
@@ -903,7 +923,7 @@ def agent_run_metadata(metadata: Dict[str, Any]) -> None:
903
923
 
904
924
  # Unified functions that automatically detect context
905
925
  @asynccontextmanager
906
- async def span(name: str, attributes: Optional[Dict[str, Any]] = None) -> AsyncIterator[Any]:
926
+ async def span(name: str, attributes: Optional[Dict[str, Any]] = None) -> AsyncIterator[Span]:
907
927
  """
908
928
  Automatically choose sync or async span based on context.
909
929
  Can be used with both 'with' and 'async with'.
@@ -933,7 +953,7 @@ class AgentRunContext:
933
953
  self._sync_context: Optional[Any] = None
934
954
  self._async_context: Optional[Any] = None
935
955
 
936
- def __enter__(self) -> Any:
956
+ def __enter__(self) -> tuple[str, str]:
937
957
  """Sync context manager entry."""
938
958
  self._sync_context = get_tracer().agent_run_context(
939
959
  self.agent_run_id, self.transcript_id, metadata=self.metadata, **self.attributes
@@ -945,7 +965,7 @@ class AgentRunContext:
945
965
  if self._sync_context:
946
966
  self._sync_context.__exit__(exc_type, exc_val, exc_tb)
947
967
 
948
- async def __aenter__(self) -> Any:
968
+ async def __aenter__(self) -> tuple[str, str]:
949
969
  """Async context manager entry."""
950
970
  self._async_context = get_tracer().async_agent_run_context(
951
971
  self.agent_run_id, self.transcript_id, metadata=self.metadata, **self.attributes
@@ -963,13 +983,13 @@ def agent_run(
963
983
  ):
964
984
  """
965
985
  Decorator to wrap a function in an agent_run_context (sync or async).
966
- Injects context, agent_run_id, and transcript_id as function attributes.
986
+ Injects agent_run_id and transcript_id as function attributes.
967
987
  Optionally accepts metadata to attach to the agent run context.
968
988
 
969
989
  Example:
970
990
  @agent_run
971
991
  def my_func(x, y):
972
- print(my_func.docent.context, my_func.docent.agent_run_id, my_func.docent.transcript_id)
992
+ print(my_func.docent.agent_run_id, my_func.docent.transcript_id)
973
993
 
974
994
  @agent_run(metadata={"user": "John", "model": "gpt-4"})
975
995
  def my_func_with_metadata(x, y):
@@ -987,11 +1007,7 @@ def agent_run(
987
1007
 
988
1008
  @functools.wraps(f)
989
1009
  async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
990
- async with AgentRunContext(metadata=metadata) as (
991
- context,
992
- agent_run_id,
993
- transcript_id,
994
- ):
1010
+ async with AgentRunContext(metadata=metadata) as (agent_run_id, transcript_id):
995
1011
  # Store docent data as function attributes
996
1012
  setattr(
997
1013
  async_wrapper,
@@ -1000,7 +1016,6 @@ def agent_run(
1000
1016
  "DocentData",
1001
1017
  (),
1002
1018
  {
1003
- "context": context,
1004
1019
  "agent_run_id": agent_run_id,
1005
1020
  "transcript_id": transcript_id,
1006
1021
  },
@@ -1013,7 +1028,7 @@ def agent_run(
1013
1028
 
1014
1029
  @functools.wraps(f)
1015
1030
  def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
1016
- with AgentRunContext(metadata=metadata) as (context, agent_run_id, transcript_id):
1031
+ with AgentRunContext(metadata=metadata) as (agent_run_id, transcript_id):
1017
1032
  # Store docent data as function attributes
1018
1033
  setattr(
1019
1034
  sync_wrapper,
@@ -1022,7 +1037,6 @@ def agent_run(
1022
1037
  "DocentData",
1023
1038
  (),
1024
1039
  {
1025
- "context": context,
1026
1040
  "agent_run_id": agent_run_id,
1027
1041
  "transcript_id": transcript_id,
1028
1042
  },
@@ -1058,15 +1072,15 @@ def agent_run_context(
1058
1072
 
1059
1073
  Example:
1060
1074
  # Sync usage
1061
- with agent_run_context() as (context, agent_run_id, transcript_id):
1075
+ with agent_run_context() as (agent_run_id, transcript_id):
1062
1076
  pass
1063
1077
 
1064
1078
  # Async usage
1065
- async with agent_run_context() as (context, agent_run_id, transcript_id):
1079
+ async with agent_run_context() as (agent_run_id, transcript_id):
1066
1080
  pass
1067
1081
 
1068
1082
  # With metadata
1069
- with agent_run_context(metadata={"user": "John", "model": "gpt-4"}) as (context, agent_run_id, transcript_id):
1083
+ with agent_run_context(metadata={"user": "John", "model": "gpt-4"}) as (agent_run_id, transcript_id):
1070
1084
  pass
1071
1085
  """
1072
1086
  return AgentRunContext(agent_run_id, transcript_id, metadata=metadata, **attributes)