docent-python 0.1.0a9__py3-none-any.whl → 0.1.2a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of docent-python might be problematic. Click here for more details.

@@ -151,7 +151,9 @@ class Transcript(BaseModel):
151
151
 
152
152
  # System messages are their own unit
153
153
  if role == "system":
154
- assert not current_unit, "System message should be the first message"
154
+ # Start a new unit if there's a current unit in progress
155
+ if current_unit:
156
+ _start_new_unit()
155
157
  units.append([i])
156
158
 
157
159
  # User message always starts a new unit UNLESS the previous message was a user message
docent/sdk/client.py CHANGED
@@ -24,8 +24,8 @@ class Docent:
24
24
 
25
25
  def __init__(
26
26
  self,
27
- server_url: str = "https://aws-docent-backend.transluce.org",
28
- web_url: str = "https://docent-alpha.transluce.org",
27
+ server_url: str = "https://api.docent.transluce.org",
28
+ web_url: str = "https://docent.transluce.org",
29
29
  api_key: str | None = None,
30
30
  ):
31
31
  self._server_url = server_url.rstrip("/") + "/rest"
@@ -291,3 +291,50 @@ class Docent:
291
291
  # We do this to avoid metadata validation failing
292
292
  # TODO(mengk): kinda hacky
293
293
  return AgentRunWithoutMetadataValidator.model_validate(response.json())
294
+
295
+ def make_collection_public(self, collection_id: str) -> dict[str, Any]:
296
+ """Make a collection publicly accessible to anyone with the link.
297
+
298
+ Args:
299
+ collection_id: ID of the Collection to make public.
300
+
301
+ Returns:
302
+ dict: API response data.
303
+
304
+ Raises:
305
+ requests.exceptions.HTTPError: If the API request fails.
306
+ """
307
+ url = f"{self._server_url}/{collection_id}/make_public"
308
+ response = self._session.post(url)
309
+ response.raise_for_status()
310
+
311
+ logger.info(f"Successfully made Collection '{collection_id}' public")
312
+ return response.json()
313
+
314
+ def share_collection_with_email(self, collection_id: str, email: str) -> dict[str, Any]:
315
+ """Share a collection with a specific user by email address.
316
+
317
+ Args:
318
+ collection_id: ID of the Collection to share.
319
+ email: Email address of the user to share with.
320
+
321
+ Returns:
322
+ dict: API response data.
323
+
324
+ Raises:
325
+ requests.exceptions.HTTPError: If the API request fails.
326
+ """
327
+ url = f"{self._server_url}/{collection_id}/share_with_email"
328
+ payload = {"email": email}
329
+ response = self._session.post(url, json=payload)
330
+
331
+ try:
332
+ response.raise_for_status()
333
+ except requests.exceptions.HTTPError:
334
+ if response.status_code == 404:
335
+ raise ValueError(f"The user you are trying to share with ({email}) does not exist.")
336
+ else:
337
+ raise # Re-raise the original exception
338
+
339
+ logger.info(f"Successfully shared Collection '{collection_id}' with {email}")
340
+ return response.json()
docent/trace.py ADDED
@@ -0,0 +1,1072 @@
1
+ import asyncio
2
+ import atexit
3
+ import contextvars
4
+ import inspect
5
+ import itertools
6
+ import logging
7
+ import os
8
+ import signal
9
+ import sys
10
+ import threading
11
+ import uuid
12
+ from collections import defaultdict
13
+ from contextlib import asynccontextmanager, contextmanager
14
+ from contextvars import ContextVar, Token
15
+ from typing import Any, AsyncIterator, Callable, Dict, Iterator, List, Optional, Union
16
+
17
+ from opentelemetry import trace
18
+ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GRPCExporter
19
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPExporter
20
+ from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
21
+ from opentelemetry.instrumentation.bedrock import BedrockInstrumentor
22
+ from opentelemetry.instrumentation.langchain import LangchainInstrumentor
23
+ from opentelemetry.instrumentation.openai import OpenAIInstrumentor
24
+ from opentelemetry.instrumentation.threading import ThreadingInstrumentor
25
+ from opentelemetry.sdk.resources import Resource
26
+ from opentelemetry.sdk.trace import ReadableSpan, TracerProvider
27
+ from opentelemetry.sdk.trace.export import (
28
+ BatchSpanProcessor,
29
+ ConsoleSpanExporter,
30
+ SimpleSpanProcessor,
31
+ )
32
+
33
+ # Configure logging
34
+ logging.basicConfig(level=logging.INFO)
35
+ logger = logging.getLogger(__name__)
36
+ logging.disable()
37
+
38
+ # Default configuration
39
+ DEFAULT_ENDPOINT = "https://api.docent.transluce.org/rest/telemetry"
40
+
41
+
42
+ def _is_async_context() -> bool:
43
+ """Detect if we're in an async context."""
44
+ try:
45
+ # Check if we're in an async function
46
+ frame = inspect.currentframe()
47
+ while frame:
48
+ if frame.f_code.co_flags & inspect.CO_COROUTINE:
49
+ return True
50
+ frame = frame.f_back
51
+ return False
52
+ except:
53
+ return False
54
+
55
+
56
+ def _is_running_in_event_loop() -> bool:
57
+ """Check if we're running in an event loop."""
58
+ try:
59
+ asyncio.get_running_loop()
60
+ return True
61
+ except RuntimeError:
62
+ return False
63
+
64
+
65
+ def _is_notebook() -> bool:
66
+ """Check if we're running in a Jupyter notebook."""
67
+ try:
68
+ return "ipykernel" in sys.modules
69
+ except:
70
+ return False
71
+
72
+
73
+ class DocentTracer:
74
+ """Manages Docent tracing setup and provides tracing utilities."""
75
+
76
+ def __init__(
77
+ self,
78
+ collection_name: str = "default-collection-name",
79
+ collection_id: Optional[str] = None,
80
+ agent_run_id: Optional[str] = None,
81
+ endpoint: Union[str, List[str]] = DEFAULT_ENDPOINT,
82
+ headers: Optional[Dict[str, str]] = None,
83
+ api_key: Optional[str] = None,
84
+ enable_console_export: bool = False,
85
+ enable_otlp_export: bool = True,
86
+ disable_batch: bool = False,
87
+ span_postprocess_callback: Optional[Callable[[ReadableSpan], None]] = None,
88
+ ):
89
+ """
90
+ Initialize Docent tracing manager.
91
+
92
+ Args:
93
+ collection_name: Name of the collection for resource attributes
94
+ collection_id: Optional collection ID (auto-generated if not provided)
95
+ agent_run_id: Optional agent_run_id to use for code outside of an agent run context (auto-generated if not provided)
96
+ endpoint: OTLP endpoint URL(s) - can be a single string or list of strings for multiple endpoints
97
+ headers: Optional headers for authentication
98
+ api_key: Optional API key for bearer token authentication (takes precedence over env var)
99
+ enable_console_export: Whether to export to console
100
+ enable_otlp_export: Whether to export to OTLP endpoint
101
+ disable_batch: Whether to disable batch processing (use SimpleSpanProcessor)
102
+ span_postprocess_callback: Optional callback for post-processing spans
103
+ """
104
+ self.collection_name: str = collection_name
105
+ self.collection_id: str = collection_id if collection_id else str(uuid.uuid4())
106
+ self.default_agent_run_id: str = agent_run_id if agent_run_id else str(uuid.uuid4())
107
+ self.endpoints: List[str]
108
+
109
+ # Handle endpoint parameter - convert to list if it's a string
110
+ if isinstance(endpoint, str):
111
+ self.endpoints = [endpoint]
112
+ else:
113
+ self.endpoints = endpoint
114
+
115
+ # Build headers with authentication if provided
116
+ self.headers = headers or {}
117
+
118
+ # Handle API key authentication (takes precedence over custom headers)
119
+ if api_key:
120
+ self.headers["Authorization"] = f"Bearer {api_key}"
121
+ logger.info(f"Using API key authentication for {self.collection_name}")
122
+ elif self.headers.get("Authorization"):
123
+ logger.info(f"Using custom Authorization header for {self.collection_name}")
124
+ else:
125
+ logger.info(f"No authentication configured for {self.collection_name}")
126
+
127
+ self.enable_console_export = enable_console_export
128
+ self.enable_otlp_export = enable_otlp_export
129
+ self.disable_batch = disable_batch
130
+ self.span_postprocess_callback = span_postprocess_callback
131
+
132
+ # Use separate tracer provider to avoid interfering with existing OTEL setup
133
+ self._tracer_provider: Optional[Any] = None
134
+ self._root_span: Optional[Any] = None
135
+ self._root_context: Optional[Any] = None
136
+ self._tracer: Optional[Any] = None
137
+ self._initialized: bool = False
138
+ self._cleanup_registered: bool = False
139
+ self._disabled: bool = False
140
+ self._spans_processors: List[Any] = []
141
+
142
+ # Context variables for agent_run_id and transcript_id (thread/async safe)
143
+ self._collection_id_var: ContextVar[str] = contextvars.ContextVar("collection_id")
144
+ self._agent_run_id_var: ContextVar[str] = contextvars.ContextVar("agent_run_id")
145
+ self._transcript_id_var: ContextVar[str] = contextvars.ContextVar("transcript_id")
146
+ self._attributes_var: ContextVar[dict[str, Any]] = contextvars.ContextVar("attributes")
147
+ # Store atomic span order counters per transcript_id to persist across context switches
148
+ self._transcript_counters: defaultdict[str, itertools.count[int]] = defaultdict(
149
+ lambda: itertools.count(0)
150
+ )
151
+ self._transcript_counter_lock = threading.Lock()
152
+
153
+ def _register_cleanup(self):
154
+ """Register cleanup handlers."""
155
+ if self._cleanup_registered:
156
+ return
157
+
158
+ # Register atexit handler
159
+ atexit.register(self.cleanup)
160
+
161
+ # Register signal handlers for graceful shutdown
162
+ try:
163
+ signal.signal(signal.SIGINT, self._signal_handler)
164
+ signal.signal(signal.SIGTERM, self._signal_handler)
165
+ except (ValueError, OSError):
166
+ # Signal handlers might not work in all environments
167
+ pass
168
+
169
+ self._cleanup_registered = True
170
+
171
+ def _next_span_order(self, transcript_id: str) -> int:
172
+ """
173
+ Get the next atomic span order for a given transcript_id.
174
+ Thread-safe and guaranteed to be unique and monotonic.
175
+ """
176
+ with self._transcript_counter_lock:
177
+ return next(self._transcript_counters[transcript_id])
178
+
179
+ def _signal_handler(self, signum: int, frame: Any):
180
+ """Handle shutdown signals."""
181
+ self.cleanup()
182
+ sys.exit(0)
183
+
184
+ def _init_spans_exporter(self, endpoint: str) -> Optional[Union[HTTPExporter, GRPCExporter]]:
185
+ """Initialize the appropriate span exporter based on endpoint."""
186
+ if not self.enable_otlp_export:
187
+ return None
188
+
189
+ try:
190
+ if "http" in endpoint.lower() or "https" in endpoint.lower():
191
+ http_exporter: HTTPExporter = HTTPExporter(
192
+ endpoint=f"{endpoint}/v1/traces", headers=self.headers
193
+ )
194
+ return http_exporter
195
+ else:
196
+ grpc_exporter: GRPCExporter = GRPCExporter(endpoint=endpoint, headers=self.headers)
197
+ return grpc_exporter
198
+ except Exception as e:
199
+ logger.error(f"Failed to initialize span exporter for {endpoint}: {e}")
200
+ return None
201
+
202
+ def _init_spans_exporters(self) -> List[Union[HTTPExporter, GRPCExporter]]:
203
+ """Initialize span exporters for all endpoints."""
204
+ exporters: List[Union[HTTPExporter, GRPCExporter]] = []
205
+
206
+ for endpoint in self.endpoints:
207
+ exporter = self._init_spans_exporter(endpoint)
208
+ if exporter:
209
+ exporters.append(exporter)
210
+ logger.info(f"Initialized exporter for endpoint: {endpoint}")
211
+ else:
212
+ logger.warning(f"Failed to initialize exporter for endpoint: {endpoint}")
213
+
214
+ return exporters
215
+
216
+ def _create_span_processor(self, exporter: Any) -> Any:
217
+ """Create appropriate span processor based on configuration."""
218
+ if self.disable_batch or _is_notebook():
219
+ simple_processor: Any = SimpleSpanProcessor(exporter)
220
+ return simple_processor
221
+ else:
222
+ batch_processor: Any = BatchSpanProcessor(exporter)
223
+ return batch_processor
224
+
225
+ def initialize(self):
226
+ """Initialize Docent tracing setup."""
227
+ if self._initialized or self._disabled:
228
+ return
229
+
230
+ try:
231
+ # Create our own isolated tracer provider
232
+ self._tracer_provider = TracerProvider(
233
+ resource=Resource.create({"service.name": self.collection_name})
234
+ )
235
+
236
+ # Add custom span processor for run_id and transcript_id
237
+ class ContextSpanProcessor:
238
+ def __init__(self, manager: "DocentTracer"):
239
+ self.manager: "DocentTracer" = manager
240
+
241
+ def on_start(self, span: Any, parent_context: Any = None) -> None:
242
+ # Add collection_id, agent_run_id, transcript_id, and any other current attributes
243
+ # Always add collection_id as it's always available
244
+ span.set_attribute("collection_id", self.manager.collection_id)
245
+
246
+ # Handle agent_run_id
247
+ try:
248
+ agent_run_id: str = self.manager._agent_run_id_var.get()
249
+ if agent_run_id:
250
+ span.set_attribute("agent_run_id", agent_run_id)
251
+ else:
252
+ span.set_attribute("agent_run_id_default", True)
253
+ span.set_attribute("agent_run_id", self.manager.default_agent_run_id)
254
+ except LookupError:
255
+ span.set_attribute("agent_run_id_default", True)
256
+ span.set_attribute("agent_run_id", self.manager.default_agent_run_id)
257
+
258
+ # Handle transcript_id
259
+ try:
260
+ transcript_id: str = self.manager._transcript_id_var.get()
261
+ if transcript_id:
262
+ span.set_attribute("transcript_id", transcript_id)
263
+ # Add atomic span order number
264
+ span_order: int = self.manager._next_span_order(transcript_id)
265
+ span.set_attribute("span_order", span_order)
266
+ except LookupError:
267
+ # transcript_id not available, skip it
268
+ pass
269
+
270
+ # Handle attributes
271
+ try:
272
+ attributes: dict[str, Any] = self.manager._attributes_var.get()
273
+ for key, value in attributes.items():
274
+ span.set_attribute(key, value)
275
+ except LookupError:
276
+ # attributes not available, skip them
277
+ pass
278
+
279
+ def on_end(self, span: Any) -> None:
280
+ pass
281
+
282
+ def shutdown(self) -> None:
283
+ pass
284
+
285
+ def force_flush(self) -> None:
286
+ pass
287
+
288
+ # Configure span exporters for our isolated provider
289
+ if self.enable_otlp_export:
290
+ otlp_exporters: List[Union[HTTPExporter, GRPCExporter]] = (
291
+ self._init_spans_exporters()
292
+ )
293
+
294
+ if otlp_exporters:
295
+ # Create a processor for each exporter
296
+ for exporter in otlp_exporters:
297
+ otlp_processor: Any = self._create_span_processor(exporter)
298
+ self._tracer_provider.add_span_processor(otlp_processor)
299
+ self._spans_processors.append(otlp_processor)
300
+
301
+ logger.info(
302
+ f"Added {len(otlp_exporters)} OTLP exporters for {len(self.endpoints)} endpoints"
303
+ )
304
+ else:
305
+ logger.warning("Failed to initialize OTLP exporter")
306
+
307
+ if self.enable_console_export:
308
+ console_exporter: Any = ConsoleSpanExporter()
309
+ console_processor: Any = self._create_span_processor(console_exporter)
310
+ self._tracer_provider.add_span_processor(console_processor)
311
+ self._spans_processors.append(console_processor)
312
+
313
+ # Add our custom context span processor
314
+ context_processor = ContextSpanProcessor(self)
315
+ self._tracer_provider.add_span_processor(context_processor)
316
+
317
+ # Get tracer from our isolated provider (don't set global provider)
318
+ self._tracer = self._tracer_provider.get_tracer(__name__)
319
+
320
+ # Start root span
321
+ if self._tracer is None:
322
+ raise RuntimeError("Failed to get tracer from provider")
323
+
324
+ self._root_span = self._tracer.start_span(
325
+ "application_session",
326
+ attributes={
327
+ "service.name": self.collection_name,
328
+ "session.type": "application_root",
329
+ },
330
+ )
331
+ if self._root_span is not None:
332
+ self._root_context = trace.set_span_in_context(self._root_span)
333
+
334
+ # Instrument threading for better context propagation
335
+ try:
336
+ ThreadingInstrumentor().instrument()
337
+ except Exception as e:
338
+ logger.warning(f"Failed to instrument threading: {e}")
339
+
340
+ # Instrument OpenAI with our isolated tracer provider
341
+ try:
342
+ OpenAIInstrumentor().instrument(tracer_provider=self._tracer_provider)
343
+ logger.info("Instrumented OpenAI")
344
+ except Exception as e:
345
+ logger.warning(f"Failed to instrument OpenAI: {e}")
346
+
347
+ # Instrument Anthropic with our isolated tracer provider
348
+ try:
349
+ AnthropicInstrumentor().instrument(tracer_provider=self._tracer_provider)
350
+ logger.info("Instrumented Anthropic")
351
+ except Exception as e:
352
+ logger.warning(f"Failed to instrument Anthropic: {e}")
353
+
354
+ # Instrument Bedrock with our isolated tracer provider
355
+ try:
356
+ BedrockInstrumentor().instrument(tracer_provider=self._tracer_provider)
357
+ logger.info("Instrumented Bedrock")
358
+ except Exception as e:
359
+ logger.warning(f"Failed to instrument Bedrock: {e}")
360
+
361
+ # Instrument LangChain with our isolated tracer provider
362
+ try:
363
+ LangchainInstrumentor().instrument(tracer_provider=self._tracer_provider)
364
+ logger.info("Instrumented LangChain")
365
+ except Exception as e:
366
+ logger.warning(f"Failed to instrument LangChain: {e}")
367
+
368
+ # Register cleanup handlers
369
+ self._register_cleanup()
370
+
371
+ self._initialized = True
372
+ logger.info(f"Docent tracing initialized for {self.collection_name}")
373
+
374
+ except Exception as e:
375
+ logger.error(f"Failed to initialize Docent tracing: {e}")
376
+ self._disabled = True
377
+ raise
378
+
379
+ def cleanup(self):
380
+ """Clean up Docent tracing resources."""
381
+ try:
382
+ # Create an explicit end-of-trace span before ending the root span
383
+ if self._tracer and self._root_span:
384
+ end_span = self._tracer.start_span(
385
+ "trace_end",
386
+ context=self._root_context,
387
+ attributes={
388
+ "event.type": "trace_end",
389
+ },
390
+ )
391
+ end_span.end()
392
+
393
+ if (
394
+ self._root_span
395
+ and hasattr(self._root_span, "is_recording")
396
+ and self._root_span.is_recording()
397
+ ):
398
+ self._root_span.end()
399
+ elif self._root_span:
400
+ # Fallback if is_recording is not available
401
+ self._root_span.end()
402
+
403
+ self._root_span = None
404
+ self._root_context = None
405
+
406
+ # Shutdown our isolated tracer provider
407
+ if self._tracer_provider:
408
+ self._tracer_provider.shutdown()
409
+ self._tracer_provider = None
410
+ except Exception as e:
411
+ logger.error(f"Error during cleanup: {e}")
412
+
413
+ def close(self):
414
+ """Explicitly close the Docent tracing manager."""
415
+ try:
416
+ self.cleanup()
417
+ if self._cleanup_registered:
418
+ atexit.unregister(self.cleanup)
419
+ self._cleanup_registered = False
420
+ except Exception as e:
421
+ logger.error(f"Error during close: {e}")
422
+
423
+ def flush(self) -> None:
424
+ """Force flush all spans to exporters."""
425
+ try:
426
+ for processor in self._spans_processors:
427
+ if hasattr(processor, "force_flush"):
428
+ processor.force_flush()
429
+ except Exception as e:
430
+ logger.error(f"Error during flush: {e}")
431
+
432
+ def set_disabled(self, disabled: bool) -> None:
433
+ """Enable or disable tracing."""
434
+ self._disabled = disabled
435
+ if disabled and self._initialized:
436
+ self.cleanup()
437
+
438
+ def verify_initialized(self) -> bool:
439
+ """Verify if the manager is properly initialized."""
440
+ if self._disabled:
441
+ return False
442
+ return self._initialized
443
+
444
+ def __enter__(self) -> "DocentTracer":
445
+ """Context manager entry."""
446
+ self.initialize()
447
+ return self
448
+
449
+ def __exit__(self, exc_type: type[BaseException], exc_val: Any, exc_tb: Any) -> None:
450
+ """Context manager exit."""
451
+ self.close()
452
+
453
+ @property
454
+ def tracer(self) -> Optional[Any]:
455
+ """Get the tracer instance."""
456
+ if not self._initialized:
457
+ self.initialize()
458
+ return self._tracer
459
+
460
+ @property
461
+ def root_context(self) -> Optional[Any]:
462
+ """Get the root context."""
463
+ if not self._initialized:
464
+ self.initialize()
465
+ return self._root_context
466
+
467
+ @contextmanager
468
+ def span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Iterator[Any]:
469
+ """
470
+ Context manager for creating spans with attributes.
471
+ """
472
+ if not self._initialized:
473
+ self.initialize()
474
+
475
+ if self._tracer is None:
476
+ raise RuntimeError("Tracer not initialized")
477
+
478
+ span_attributes: dict[str, Any] = attributes or {}
479
+
480
+ with self._tracer.start_as_current_span(
481
+ name, context=self._root_context, attributes=span_attributes
482
+ ) as span:
483
+ yield span
484
+
485
+ @asynccontextmanager
486
+ async def async_span(
487
+ self, name: str, attributes: Optional[Dict[str, Any]] = None
488
+ ) -> AsyncIterator[Any]:
489
+ """
490
+ Async context manager for creating spans with attributes.
491
+
492
+ Args:
493
+ name: Name of the span
494
+ attributes: Dictionary of attributes to add to the span
495
+ """
496
+ if not self._initialized:
497
+ self.initialize()
498
+
499
+ if self._tracer is None:
500
+ raise RuntimeError("Tracer not initialized")
501
+
502
+ span_attributes: dict[str, Any] = attributes or {}
503
+
504
+ with self._tracer.start_as_current_span(
505
+ name, context=self._root_context, attributes=span_attributes
506
+ ) as span:
507
+ yield span
508
+
509
+ @contextmanager
510
+ def agent_run_context(
511
+ self,
512
+ agent_run_id: Optional[str] = None,
513
+ transcript_id: Optional[str] = None,
514
+ metadata: Optional[Dict[str, Any]] = None,
515
+ **attributes: Any,
516
+ ) -> Iterator[Any]:
517
+ """
518
+ Context manager for setting up an agent run context.
519
+
520
+ Args:
521
+ agent_run_id: Optional agent run ID (auto-generated if not provided)
522
+ transcript_id: Optional transcript ID (auto-generated if not provided)
523
+ metadata: Optional nested dictionary of metadata to attach as events
524
+ **attributes: Additional attributes to add to the context
525
+
526
+ Yields:
527
+ Tuple of (context, agent_run_id, transcript_id)
528
+ """
529
+ if not self._initialized:
530
+ self.initialize()
531
+
532
+ if self._tracer is None:
533
+ raise RuntimeError("Tracer not initialized")
534
+
535
+ if agent_run_id is None:
536
+ agent_run_id = str(uuid.uuid4())
537
+ if transcript_id is None:
538
+ transcript_id = str(uuid.uuid4())
539
+
540
+ # Set context variables for this execution context
541
+ agent_run_id_token: Token[str] = self._agent_run_id_var.set(agent_run_id)
542
+ transcript_id_token: Token[str] = self._transcript_id_var.set(transcript_id)
543
+ attributes_token: Token[dict[str, Any]] = self._attributes_var.set(attributes)
544
+
545
+ try:
546
+ # Create a span with the agent run attributes
547
+ span_attributes: dict[str, Any] = {
548
+ "agent_run_id": agent_run_id,
549
+ "transcript_id": transcript_id,
550
+ **attributes,
551
+ }
552
+ with self._tracer.start_as_current_span(
553
+ "agent_run_context", context=self._root_context, attributes=span_attributes
554
+ ) as _span:
555
+ # Attach metadata as events if provided
556
+ if metadata:
557
+ _add_metadata_event_to_span(_span, metadata)
558
+
559
+ context = trace.get_current_span().get_span_context()
560
+ yield context, agent_run_id, transcript_id
561
+ finally:
562
+ self._agent_run_id_var.reset(agent_run_id_token)
563
+ self._transcript_id_var.reset(transcript_id_token)
564
+ self._attributes_var.reset(attributes_token)
565
+
566
+ @asynccontextmanager
567
+ async def async_agent_run_context(
568
+ self,
569
+ agent_run_id: Optional[str] = None,
570
+ transcript_id: Optional[str] = None,
571
+ metadata: Optional[Dict[str, Any]] = None,
572
+ **attributes: Any,
573
+ ) -> AsyncIterator[Any]:
574
+ """
575
+ Async context manager for setting up an agent run context.
576
+ Modifies the OpenTelemetry context so all spans inherit agent_run_id and transcript_id.
577
+
578
+ Args:
579
+ agent_run_id: Optional agent run ID (auto-generated if not provided)
580
+ transcript_id: Optional transcript ID (auto-generated if not provided)
581
+ metadata: Optional nested dictionary of metadata to attach as events
582
+ **attributes: Additional attributes to add to the context
583
+
584
+ Yields:
585
+ Tuple of (context, agent_run_id, transcript_id)
586
+ """
587
+ if not self._initialized:
588
+ self.initialize()
589
+
590
+ if self._tracer is None:
591
+ raise RuntimeError("Tracer not initialized")
592
+
593
+ if agent_run_id is None:
594
+ agent_run_id = str(uuid.uuid4())
595
+ if transcript_id is None:
596
+ transcript_id = str(uuid.uuid4())
597
+
598
+ # Set context variables for this execution context
599
+ agent_run_id_token: Any = self._agent_run_id_var.set(agent_run_id)
600
+ transcript_id_token: Any = self._transcript_id_var.set(transcript_id)
601
+ attributes_token: Any = self._attributes_var.set(attributes)
602
+
603
+ try:
604
+ # Create a span with the agent run attributes
605
+ span_attributes: dict[str, Any] = {
606
+ "agent_run_id": agent_run_id,
607
+ "transcript_id": transcript_id,
608
+ **attributes,
609
+ }
610
+ with self._tracer.start_as_current_span(
611
+ "agent_run_context", context=self._root_context, attributes=span_attributes
612
+ ) as _span:
613
+ # Attach metadata as events if provided
614
+ if metadata:
615
+ _add_metadata_event_to_span(_span, metadata)
616
+
617
+ context = trace.get_current_span().get_span_context()
618
+ yield context, agent_run_id, transcript_id
619
+ finally:
620
+ self._agent_run_id_var.reset(agent_run_id_token)
621
+ self._transcript_id_var.reset(transcript_id_token)
622
+ self._attributes_var.reset(attributes_token)
623
+
624
+ def start_transcript(
625
+ self,
626
+ agent_run_id: Optional[str] = None,
627
+ transcript_id: Optional[str] = None,
628
+ **attributes: Any,
629
+ ) -> tuple[Any, str, str]:
630
+ """
631
+ Manually start a transcript span.
632
+
633
+ Args:
634
+ agent_run_id: Optional agent run ID (auto-generated if not provided)
635
+ transcript_id: Optional transcript ID (auto-generated if not provided)
636
+ **attributes: Additional attributes to add to the span
637
+
638
+ Returns:
639
+ Tuple of (span, agent_run_id, transcript_id)
640
+ """
641
+ if not self._initialized:
642
+ self.initialize()
643
+
644
+ if self._tracer is None:
645
+ raise RuntimeError("Tracer not initialized")
646
+
647
+ if agent_run_id is None:
648
+ agent_run_id = str(uuid.uuid4())
649
+ if transcript_id is None:
650
+ transcript_id = str(uuid.uuid4())
651
+
652
+ span_attributes: dict[str, Any] = {
653
+ "agent_run_id": agent_run_id,
654
+ "transcript_id": transcript_id,
655
+ **attributes,
656
+ }
657
+
658
+ span: Any = self._tracer.start_span(
659
+ "transcript_span", context=self._root_context, attributes=span_attributes
660
+ )
661
+
662
+ return span, agent_run_id, transcript_id
663
+
664
+ def stop_transcript(self, span: Any) -> None:
665
+ """
666
+ Manually stop a transcript span.
667
+
668
+ Args:
669
+ span: The span to stop
670
+ """
671
+ if span and hasattr(span, "end"):
672
+ span.end()
673
+
674
+ def start_span(self, name: str, attributes: Optional[Dict[str, Any]] = None) -> Any:
675
+ """
676
+ Manually start a span.
677
+
678
+ Args:
679
+ name: Name of the span
680
+ attributes: Dictionary of attributes to add to the span
681
+
682
+ Returns:
683
+ The created span
684
+ """
685
+ if not self._initialized:
686
+ self.initialize()
687
+
688
+ if self._tracer is None:
689
+ raise RuntimeError("Tracer not initialized")
690
+
691
+ span_attributes: dict[str, Any] = attributes or {}
692
+
693
+ span: Any = self._tracer.start_span(
694
+ name, context=self._root_context, attributes=span_attributes
695
+ )
696
+
697
+ return span
698
+
699
+ def stop_span(self, span: Any) -> None:
700
+ """
701
+ Manually stop a span.
702
+
703
+ Args:
704
+ span: The span to stop
705
+ """
706
+ if span and hasattr(span, "end"):
707
+ span.end()
708
+
709
+
710
+ # Global instance for easy access
711
+ _global_tracer: Optional[DocentTracer] = None
712
+
713
+
714
+ def initialize_tracing(
715
+ collection_name: str = "default-service",
716
+ collection_id: Optional[str] = None,
717
+ endpoint: Union[str, List[str]] = DEFAULT_ENDPOINT,
718
+ headers: Optional[Dict[str, str]] = None,
719
+ api_key: Optional[str] = None,
720
+ enable_console_export: bool = False,
721
+ enable_otlp_export: bool = True,
722
+ disable_batch: bool = False,
723
+ span_postprocess_callback: Optional[Callable[[ReadableSpan], None]] = None,
724
+ ) -> DocentTracer:
725
+ """
726
+ Initialize the global Docent tracer.
727
+
728
+ This is the primary entry point for setting up Docent tracing.
729
+ It creates a global singleton instance that can be accessed via get_tracer().
730
+
731
+ Args:
732
+ collection_name: Name of the collection
733
+ collection_id: Optional collection ID (auto-generated if not provided)
734
+ endpoint: OTLP endpoint URL(s) for span export - can be a single string or list of strings for multiple endpoints
735
+ headers: Optional headers for authentication
736
+ api_key: Optional API key for bearer token authentication (takes precedence over env var)
737
+ enable_console_export: Whether to export spans to console
738
+ enable_otlp_export: Whether to export spans to OTLP endpoint
739
+ disable_batch: Whether to disable batch processing (use SimpleSpanProcessor)
740
+ span_postprocess_callback: Optional callback for post-processing spans
741
+
742
+ Returns:
743
+ The initialized Docent tracer
744
+
745
+ Example:
746
+ # Basic setup
747
+ initialize_tracing("my-collection")
748
+ """
749
+ global _global_tracer
750
+
751
+ # Check for API key in environment variable if not provided as parameter
752
+ if api_key is None:
753
+ env_api_key: Optional[str] = os.environ.get("DOCENT_API_KEY")
754
+ api_key = env_api_key
755
+
756
+ if _global_tracer is None:
757
+ _global_tracer = DocentTracer(
758
+ collection_name=collection_name,
759
+ collection_id=collection_id,
760
+ endpoint=endpoint,
761
+ headers=headers,
762
+ api_key=api_key,
763
+ enable_console_export=enable_console_export,
764
+ enable_otlp_export=enable_otlp_export,
765
+ disable_batch=disable_batch,
766
+ span_postprocess_callback=span_postprocess_callback,
767
+ )
768
+ _global_tracer.initialize()
769
+ else:
770
+ # If already initialized, ensure it's properly set up
771
+ _global_tracer.initialize()
772
+
773
+ return _global_tracer
774
+
775
+
776
+ def get_tracer() -> DocentTracer:
777
+ """Get the global Docent tracer."""
778
+ if _global_tracer is None:
779
+ # Auto-initialize with defaults if not already done
780
+ return initialize_tracing()
781
+ return _global_tracer
782
+
783
+
784
+ def close_tracing() -> None:
785
+ """Close the global Docent tracer."""
786
+ global _global_tracer
787
+ if _global_tracer:
788
+ _global_tracer.close()
789
+ _global_tracer = None
790
+
791
+
792
+ def flush_tracing() -> None:
793
+ """Force flush all spans to exporters."""
794
+ if _global_tracer:
795
+ _global_tracer.flush()
796
+
797
+
798
+ def verify_initialized() -> bool:
799
+ """Verify if the global Docent tracer is properly initialized."""
800
+ if _global_tracer is None:
801
+ return False
802
+ return _global_tracer.verify_initialized()
803
+
804
+
805
+ def set_disabled(disabled: bool) -> None:
806
+ """Enable or disable global tracing."""
807
+ if _global_tracer:
808
+ _global_tracer.set_disabled(disabled)
809
+
810
+
811
+ def get_api_key() -> Optional[str]:
812
+ """
813
+ Get the API key from environment variable.
814
+
815
+ Returns:
816
+ The API key from DOCENT_API_KEY environment variable, or None if not set
817
+ """
818
+ return os.environ.get("DOCENT_API_KEY")
819
+
820
+
821
+ def agent_run_score(name: str, score: float, attributes: Optional[Dict[str, Any]] = None) -> None:
822
+ """
823
+ Record a score event on the current span.
824
+ Automatically works in both sync and async contexts.
825
+
826
+ Args:
827
+ name: Name of the score metric
828
+ score: Numeric score value
829
+ attributes: Optional additional attributes for the score event
830
+ """
831
+ try:
832
+ current_span: Any = trace.get_current_span()
833
+ if current_span and hasattr(current_span, "add_event"):
834
+ event_attributes: dict[str, Any] = {
835
+ "score.name": name,
836
+ "score.value": score,
837
+ "event.type": "score",
838
+ }
839
+ if attributes:
840
+ event_attributes.update(attributes)
841
+
842
+ current_span.add_event(name="agent_run_score", attributes=event_attributes)
843
+ else:
844
+ logger.warning("No current span available for recording score")
845
+ except Exception as e:
846
+ logger.error(f"Failed to record score event: {e}")
847
+
848
+
849
+ def _flatten_dict(d: Dict[str, Any], prefix: str = "") -> Dict[str, Any]:
850
+ """Flatten nested dictionary with dot notation."""
851
+ flattened: Dict[str, Any] = {}
852
+ for key, value in d.items():
853
+ new_key = f"{prefix}.{key}" if prefix else key
854
+ if isinstance(value, dict):
855
+ flattened.update(_flatten_dict(dict(value), new_key)) # type: ignore
856
+ else:
857
+ flattened[new_key] = value
858
+ return flattened
859
+
860
+
861
+ def _add_metadata_event_to_span(span: Any, metadata: Dict[str, Any]) -> None:
862
+ """
863
+ Add metadata as an event to a span.
864
+
865
+ Args:
866
+ span: The span to add the event to
867
+ metadata: Dictionary of metadata (can be nested)
868
+ """
869
+ if span and hasattr(span, "add_event"):
870
+ event_attributes: dict[str, Any] = {
871
+ "event.type": "metadata",
872
+ }
873
+
874
+ # Flatten nested metadata and add as event attributes
875
+ flattened_metadata = _flatten_dict(metadata)
876
+ for key, value in flattened_metadata.items():
877
+ event_attributes[f"metadata.{key}"] = value
878
+ span.add_event(name="agent_run_metadata", attributes=event_attributes)
879
+
880
+
881
+ def agent_run_metadata(metadata: Dict[str, Any]) -> None:
882
+ """
883
+ Record metadata as an event on the current span.
884
+ Automatically works in both sync and async contexts.
885
+ Supports nested dictionaries by flattening them with dot notation.
886
+
887
+ Args:
888
+ metadata: Dictionary of metadata to attach to the current span (can be nested)
889
+
890
+ Example:
891
+ agent_run_metadata({"user": "John", "id": 123, "flagged": True})
892
+ agent_run_metadata({"user": {"id": "123", "name": "John"}, "config": {"model": "gpt-4"}})
893
+ """
894
+ try:
895
+ current_span: Any = trace.get_current_span()
896
+ if current_span:
897
+ _add_metadata_event_to_span(current_span, metadata)
898
+ else:
899
+ logger.warning("No current span available for recording metadata")
900
+ except Exception as e:
901
+ logger.error(f"Failed to record metadata event: {e}")
902
+
903
+
904
+ # Unified functions that automatically detect context
905
+ @asynccontextmanager
906
+ async def span(name: str, attributes: Optional[Dict[str, Any]] = None) -> AsyncIterator[Any]:
907
+ """
908
+ Automatically choose sync or async span based on context.
909
+ Can be used with both 'with' and 'async with'.
910
+ """
911
+ if _is_async_context() or _is_running_in_event_loop():
912
+ async with get_tracer().async_span(name, attributes) as span:
913
+ yield span
914
+ else:
915
+ with get_tracer().span(name, attributes) as span:
916
+ yield span
917
+
918
+
919
+ class AgentRunContext:
920
+ """Context manager that works in both sync and async contexts."""
921
+
922
+ def __init__(
923
+ self,
924
+ agent_run_id: Optional[str] = None,
925
+ transcript_id: Optional[str] = None,
926
+ metadata: Optional[Dict[str, Any]] = None,
927
+ **attributes: Any,
928
+ ):
929
+ self.agent_run_id = agent_run_id
930
+ self.transcript_id = transcript_id
931
+ self.metadata = metadata
932
+ self.attributes: dict[str, Any] = attributes
933
+ self._sync_context: Optional[Any] = None
934
+ self._async_context: Optional[Any] = None
935
+
936
+ def __enter__(self) -> Any:
937
+ """Sync context manager entry."""
938
+ self._sync_context = get_tracer().agent_run_context(
939
+ self.agent_run_id, self.transcript_id, metadata=self.metadata, **self.attributes
940
+ )
941
+ return self._sync_context.__enter__()
942
+
943
+ def __exit__(self, exc_type: type[BaseException], exc_val: Any, exc_tb: Any) -> None:
944
+ """Sync context manager exit."""
945
+ if self._sync_context:
946
+ self._sync_context.__exit__(exc_type, exc_val, exc_tb)
947
+
948
+ async def __aenter__(self) -> Any:
949
+ """Async context manager entry."""
950
+ self._async_context = get_tracer().async_agent_run_context(
951
+ self.agent_run_id, self.transcript_id, metadata=self.metadata, **self.attributes
952
+ )
953
+ return await self._async_context.__aenter__()
954
+
955
+ async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
956
+ """Async context manager exit."""
957
+ if self._async_context:
958
+ await self._async_context.__aexit__(exc_type, exc_val, exc_tb)
959
+
960
+
961
+ def agent_run(
962
+ func: Optional[Callable[..., Any]] = None, *, metadata: Optional[Dict[str, Any]] = None
963
+ ):
964
+ """
965
+ Decorator to wrap a function in an agent_run_context (sync or async).
966
+ Injects context, agent_run_id, and transcript_id as function attributes.
967
+ Optionally accepts metadata to attach to the agent run context.
968
+
969
+ Example:
970
+ @agent_run
971
+ def my_func(x, y):
972
+ print(my_func.docent.context, my_func.docent.agent_run_id, my_func.docent.transcript_id)
973
+
974
+ @agent_run(metadata={"user": "John", "model": "gpt-4"})
975
+ def my_func_with_metadata(x, y):
976
+ print(my_func_with_metadata.docent.agent_run_id)
977
+
978
+ @agent_run(metadata={"config": {"model": "gpt-4", "temperature": 0.7}})
979
+ async def my_async_func(z):
980
+ print(my_async_func.docent.agent_run_id)
981
+ """
982
+ import functools
983
+ import inspect
984
+
985
+ def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
986
+ if inspect.iscoroutinefunction(f):
987
+
988
+ @functools.wraps(f)
989
+ async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
990
+ async with AgentRunContext(metadata=metadata) as (
991
+ context,
992
+ agent_run_id,
993
+ transcript_id,
994
+ ):
995
+ # Store docent data as function attributes
996
+ setattr(
997
+ async_wrapper,
998
+ "docent",
999
+ type(
1000
+ "DocentData",
1001
+ (),
1002
+ {
1003
+ "context": context,
1004
+ "agent_run_id": agent_run_id,
1005
+ "transcript_id": transcript_id,
1006
+ },
1007
+ )(),
1008
+ )
1009
+ return await f(*args, **kwargs)
1010
+
1011
+ return async_wrapper
1012
+ else:
1013
+
1014
+ @functools.wraps(f)
1015
+ def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
1016
+ with AgentRunContext(metadata=metadata) as (context, agent_run_id, transcript_id):
1017
+ # Store docent data as function attributes
1018
+ setattr(
1019
+ sync_wrapper,
1020
+ "docent",
1021
+ type(
1022
+ "DocentData",
1023
+ (),
1024
+ {
1025
+ "context": context,
1026
+ "agent_run_id": agent_run_id,
1027
+ "transcript_id": transcript_id,
1028
+ },
1029
+ )(),
1030
+ )
1031
+ return f(*args, **kwargs)
1032
+
1033
+ return sync_wrapper
1034
+
1035
+ if func is None:
1036
+ return decorator
1037
+ else:
1038
+ return decorator(func)
1039
+
1040
+
1041
+ def agent_run_context(
1042
+ agent_run_id: Optional[str] = None,
1043
+ transcript_id: Optional[str] = None,
1044
+ metadata: Optional[Dict[str, Any]] = None,
1045
+ **attributes: Any,
1046
+ ) -> AgentRunContext:
1047
+ """
1048
+ Create an agent run context for tracing.
1049
+
1050
+ Args:
1051
+ agent_run_id: Optional agent run ID (auto-generated if not provided)
1052
+ transcript_id: Optional transcript ID (auto-generated if not provided)
1053
+ metadata: Optional nested dictionary of metadata to attach as events
1054
+ **attributes: Additional attributes to add to the context
1055
+
1056
+ Returns:
1057
+ A context manager that can be used with both 'with' and 'async with'
1058
+
1059
+ Example:
1060
+ # Sync usage
1061
+ with agent_run_context() as (context, agent_run_id, transcript_id):
1062
+ pass
1063
+
1064
+ # Async usage
1065
+ async with agent_run_context() as (context, agent_run_id, transcript_id):
1066
+ pass
1067
+
1068
+ # With metadata
1069
+ with agent_run_context(metadata={"user": "John", "model": "gpt-4"}) as (context, agent_run_id, transcript_id):
1070
+ pass
1071
+ """
1072
+ return AgentRunContext(agent_run_id, transcript_id, metadata=metadata, **attributes)
@@ -0,0 +1,24 @@
1
+ Metadata-Version: 2.4
2
+ Name: docent-python
3
+ Version: 0.1.2a0
4
+ Summary: Docent SDK
5
+ Project-URL: Homepage, https://github.com/TransluceAI/docent
6
+ Project-URL: Issues, https://github.com/TransluceAI/docent/issues
7
+ Project-URL: Docs, https://transluce-docent.readthedocs-hosted.com/en/latest
8
+ Author-email: Transluce <info@transluce.org>
9
+ License-Expression: MIT
10
+ License-File: LICENSE.md
11
+ Requires-Python: >=3.11
12
+ Requires-Dist: opentelemetry-api>=1.35.0
13
+ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc>=1.35.0
14
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.35.0
15
+ Requires-Dist: opentelemetry-instrumentation-anthropic>=0.42.0
16
+ Requires-Dist: opentelemetry-instrumentation-bedrock>=0.42.0
17
+ Requires-Dist: opentelemetry-instrumentation-langchain>=0.43.1
18
+ Requires-Dist: opentelemetry-instrumentation-openai>=0.42.0
19
+ Requires-Dist: opentelemetry-instrumentation-threading>=0.56b0
20
+ Requires-Dist: opentelemetry-sdk>=1.35.0
21
+ Requires-Dist: pydantic>=2.11.7
22
+ Requires-Dist: pyyaml>=6.0.2
23
+ Requires-Dist: tiktoken>=0.7.0
24
+ Requires-Dist: tqdm>=4.67.1
@@ -1,5 +1,6 @@
1
1
  docent/__init__.py,sha256=J2BbO6rzilfw9WXRUeolr439EGFezqbMU_kCpCCryRA,59
2
2
  docent/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ docent/trace.py,sha256=dWQiSo93YfHNuyC3zi_NHz_uymygMzEVtoQE_1wEyM0,39960
3
4
  docent/_log_util/__init__.py,sha256=3HXXrxrSm8PxwG4llotrCnSnp7GuroK1FNHsdg6f7aE,73
4
5
  docent/_log_util/logger.py,sha256=kwM0yRW1IJd6-XTorjWn48B4l8qvD2ZM6VDjY5eskQI,4422
5
6
  docent/data_models/__init__.py,sha256=-F8Er1RXPX6SEKFLnoi6OMhHq57KJqyHx1McP2rCsGQ,483
@@ -9,7 +10,7 @@ docent/data_models/citation.py,sha256=WsVQZcBT2EJD24ysyeVOC5Xfo165RI7P5_cOnJBgHj
9
10
  docent/data_models/metadata.py,sha256=oq2rO0a-914YKKznz-yrlQR28gBAmryRrwZL0QiaBGg,8702
10
11
  docent/data_models/regex.py,sha256=0ciIerkrNwb91bY5mTcyO5nDWH67xx2tZYObV52fmBo,1684
11
12
  docent/data_models/shared_types.py,sha256=jjm-Dh5S6v7UKInW7SEqoziOsx6Z7Uu4e3VzgCbTWvc,225
12
- docent/data_models/transcript.py,sha256=7xYCPi6gIUftX2tjOcteryQE9GWV7ThRZv4PmpteJhM,13793
13
+ docent/data_models/transcript.py,sha256=K6q40-EoSe-escmunX22LrN2T9QhPhxI9S9hgUaKx-4,13851
13
14
  docent/data_models/chat/__init__.py,sha256=O04XQ2NmO8GTWqkkB_Iydj8j_CucZuLhoyMVTxJN_cs,570
14
15
  docent/data_models/chat/content.py,sha256=Co-jO8frQa_DSP11wJuhPX0s-GpJk8yqtKqPeiAIZ_U,1672
15
16
  docent/data_models/chat/message.py,sha256=iAo38kbV6wYbFh8S23cxLy6HY4C_i3PzQ6RpSQG5dxM,3861
@@ -20,8 +21,8 @@ docent/samples/load.py,sha256=ZGE07r83GBNO4A0QBh5aQ18WAu3mTWA1vxUoHd90nrM,207
20
21
  docent/samples/log.eval,sha256=orrW__9WBfANq7NwKsPSq9oTsQRcG6KohG5tMr_X_XY,397708
21
22
  docent/samples/tb_airline.json,sha256=eR2jFFRtOw06xqbEglh6-dPewjifOk-cuxJq67Dtu5I,47028
22
23
  docent/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- docent/sdk/client.py,sha256=pj8sARtXQo80OJCbJH4AWzS_j_AJe5uNP-TsTn37oG8,10701
24
- docent_python-0.1.0a9.dist-info/METADATA,sha256=h6BF5r7AmXs3Rn14Pk-0bFIMWrWf4Clhebl60mpWdTw,521
25
- docent_python-0.1.0a9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
26
- docent_python-0.1.0a9.dist-info/licenses/LICENSE.md,sha256=vOHzq3K4Ndu0UV9hPrtXvlD7pHOjyDQmGjHuLSIkRQY,1087
27
- docent_python-0.1.0a9.dist-info/RECORD,,
24
+ docent/sdk/client.py,sha256=uyhTisb9bHk7Hd2G4UKLdfvuiAmYOOqJiwEPbYWN9IE,12371
25
+ docent_python-0.1.2a0.dist-info/METADATA,sha256=u8jTcnP8WZ_qABVDi5zABxI3d34oLMY8hLHb55oeO_s,1037
26
+ docent_python-0.1.2a0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
+ docent_python-0.1.2a0.dist-info/licenses/LICENSE.md,sha256=vOHzq3K4Ndu0UV9hPrtXvlD7pHOjyDQmGjHuLSIkRQY,1087
28
+ docent_python-0.1.2a0.dist-info/RECORD,,
@@ -1,15 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: docent-python
3
- Version: 0.1.0a9
4
- Summary: Docent SDK
5
- Project-URL: Homepage, https://github.com/TransluceAI/docent
6
- Project-URL: Issues, https://github.com/TransluceAI/docent/issues
7
- Project-URL: Docs, https://transluce-docent.readthedocs-hosted.com/en/latest
8
- Author-email: Transluce <info@transluce.org>
9
- License-Expression: MIT
10
- License-File: LICENSE.md
11
- Requires-Python: >=3.11
12
- Requires-Dist: pydantic>=2.11.7
13
- Requires-Dist: pyyaml>=6.0.2
14
- Requires-Dist: tiktoken>=0.7.0
15
- Requires-Dist: tqdm>=4.67.1