openai-agents 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openai-agents might be problematic. Click here for more details.

Files changed (53) hide show
  1. agents/__init__.py +223 -0
  2. agents/_config.py +23 -0
  3. agents/_debug.py +17 -0
  4. agents/_run_impl.py +792 -0
  5. agents/_utils.py +61 -0
  6. agents/agent.py +159 -0
  7. agents/agent_output.py +144 -0
  8. agents/computer.py +107 -0
  9. agents/exceptions.py +63 -0
  10. agents/extensions/handoff_filters.py +67 -0
  11. agents/extensions/handoff_prompt.py +19 -0
  12. agents/function_schema.py +340 -0
  13. agents/guardrail.py +320 -0
  14. agents/handoffs.py +236 -0
  15. agents/items.py +246 -0
  16. agents/lifecycle.py +105 -0
  17. agents/logger.py +3 -0
  18. agents/model_settings.py +36 -0
  19. agents/models/__init__.py +0 -0
  20. agents/models/_openai_shared.py +34 -0
  21. agents/models/fake_id.py +5 -0
  22. agents/models/interface.py +107 -0
  23. agents/models/openai_chatcompletions.py +952 -0
  24. agents/models/openai_provider.py +65 -0
  25. agents/models/openai_responses.py +384 -0
  26. agents/result.py +220 -0
  27. agents/run.py +904 -0
  28. agents/run_context.py +26 -0
  29. agents/stream_events.py +58 -0
  30. agents/strict_schema.py +167 -0
  31. agents/tool.py +288 -0
  32. agents/tracing/__init__.py +97 -0
  33. agents/tracing/create.py +306 -0
  34. agents/tracing/logger.py +3 -0
  35. agents/tracing/processor_interface.py +69 -0
  36. agents/tracing/processors.py +261 -0
  37. agents/tracing/scope.py +45 -0
  38. agents/tracing/setup.py +211 -0
  39. agents/tracing/span_data.py +188 -0
  40. agents/tracing/spans.py +264 -0
  41. agents/tracing/traces.py +195 -0
  42. agents/tracing/util.py +17 -0
  43. agents/usage.py +22 -0
  44. agents/version.py +7 -0
  45. openai_agents-0.0.3.dist-info/METADATA +204 -0
  46. openai_agents-0.0.3.dist-info/RECORD +49 -0
  47. openai_agents-0.0.3.dist-info/licenses/LICENSE +21 -0
  48. openai-agents/example.py +0 -2
  49. openai_agents-0.0.1.dist-info/METADATA +0 -17
  50. openai_agents-0.0.1.dist-info/RECORD +0 -6
  51. openai_agents-0.0.1.dist-info/licenses/LICENSE +0 -20
  52. {openai-agents → agents/extensions}/__init__.py +0 -0
  53. {openai_agents-0.0.1.dist-info → openai_agents-0.0.3.dist-info}/WHEEL +0 -0
@@ -0,0 +1,306 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping, Sequence
4
+ from typing import TYPE_CHECKING, Any
5
+
6
+ from .logger import logger
7
+ from .setup import GLOBAL_TRACE_PROVIDER
8
+ from .span_data import (
9
+ AgentSpanData,
10
+ CustomSpanData,
11
+ FunctionSpanData,
12
+ GenerationSpanData,
13
+ GuardrailSpanData,
14
+ HandoffSpanData,
15
+ ResponseSpanData,
16
+ )
17
+ from .spans import Span
18
+ from .traces import Trace
19
+
20
+ if TYPE_CHECKING:
21
+ from openai.types.responses import Response
22
+
23
+
24
+ def trace(
25
+ workflow_name: str,
26
+ trace_id: str | None = None,
27
+ group_id: str | None = None,
28
+ metadata: dict[str, Any] | None = None,
29
+ disabled: bool = False,
30
+ ) -> Trace:
31
+ """
32
+ Create a new trace. The trace will not be started automatically; you should either use
33
+ it as a context manager (`with trace(...):`) or call `trace.start()` + `trace.finish()`
34
+ manually.
35
+
36
+ In addition to the workflow name and optional grouping identifier, you can provide
37
+ an arbitrary metadata dictionary to attach additional user-defined information to
38
+ the trace.
39
+
40
+ Args:
41
+ workflow_name: The name of the logical app or workflow. For example, you might provide
42
+ "code_bot" for a coding agent, or "customer_support_agent" for a customer support agent.
43
+ trace_id: The ID of the trace. Optional. If not provided, we will generate an ID. We
44
+ recommend using `util.gen_trace_id()` to generate a trace ID, to guarantee that IDs are
45
+ correctly formatted.
46
+ group_id: Optional grouping identifier to link multiple traces from the same conversation
47
+ or process. For instance, you might use a chat thread ID.
48
+ metadata: Optional dictionary of additional metadata to attach to the trace.
49
+ disabled: If True, we will return a Trace but the Trace will not be recorded. This will
50
+ not be checked if there's an existing trace and `even_if_trace_running` is True.
51
+
52
+ Returns:
53
+ The newly created trace object.
54
+ """
55
+ current_trace = GLOBAL_TRACE_PROVIDER.get_current_trace()
56
+ if current_trace:
57
+ logger.warning(
58
+ "Trace already exists. Creating a new trace, but this is probably a mistake."
59
+ )
60
+
61
+ return GLOBAL_TRACE_PROVIDER.create_trace(
62
+ name=workflow_name,
63
+ trace_id=trace_id,
64
+ group_id=group_id,
65
+ metadata=metadata,
66
+ disabled=disabled,
67
+ )
68
+
69
+
70
+ def get_current_trace() -> Trace | None:
71
+ """Returns the currently active trace, if present."""
72
+ return GLOBAL_TRACE_PROVIDER.get_current_trace()
73
+
74
+
75
+ def get_current_span() -> Span[Any] | None:
76
+ """Returns the currently active span, if present."""
77
+ return GLOBAL_TRACE_PROVIDER.get_current_span()
78
+
79
+
80
+ def agent_span(
81
+ name: str,
82
+ handoffs: list[str] | None = None,
83
+ tools: list[str] | None = None,
84
+ output_type: str | None = None,
85
+ span_id: str | None = None,
86
+ parent: Trace | Span[Any] | None = None,
87
+ disabled: bool = False,
88
+ ) -> Span[AgentSpanData]:
89
+ """Create a new agent span. The span will not be started automatically, you should either do
90
+ `with agent_span() ...` or call `span.start()` + `span.finish()` manually.
91
+
92
+ Args:
93
+ name: The name of the agent.
94
+ handoffs: Optional list of agent names to which this agent could hand off control.
95
+ tools: Optional list of tool names available to this agent.
96
+ output_type: Optional name of the output type produced by the agent.
97
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
98
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
99
+ correctly formatted.
100
+ parent: The parent span or trace. If not provided, we will automatically use the current
101
+ trace/span as the parent.
102
+ disabled: If True, we will return a Span but the Span will not be recorded.
103
+
104
+ Returns:
105
+ The newly created agent span.
106
+ """
107
+ return GLOBAL_TRACE_PROVIDER.create_span(
108
+ span_data=AgentSpanData(name=name, handoffs=handoffs, tools=tools, output_type=output_type),
109
+ span_id=span_id,
110
+ parent=parent,
111
+ disabled=disabled,
112
+ )
113
+
114
+
115
+ def function_span(
116
+ name: str,
117
+ input: str | None = None,
118
+ output: str | None = None,
119
+ span_id: str | None = None,
120
+ parent: Trace | Span[Any] | None = None,
121
+ disabled: bool = False,
122
+ ) -> Span[FunctionSpanData]:
123
+ """Create a new function span. The span will not be started automatically, you should either do
124
+ `with function_span() ...` or call `span.start()` + `span.finish()` manually.
125
+
126
+ Args:
127
+ name: The name of the function.
128
+ input: The input to the function.
129
+ output: The output of the function.
130
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
131
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
132
+ correctly formatted.
133
+ parent: The parent span or trace. If not provided, we will automatically use the current
134
+ trace/span as the parent.
135
+ disabled: If True, we will return a Span but the Span will not be recorded.
136
+
137
+ Returns:
138
+ The newly created function span.
139
+ """
140
+ return GLOBAL_TRACE_PROVIDER.create_span(
141
+ span_data=FunctionSpanData(name=name, input=input, output=output),
142
+ span_id=span_id,
143
+ parent=parent,
144
+ disabled=disabled,
145
+ )
146
+
147
+
148
+ def generation_span(
149
+ input: Sequence[Mapping[str, Any]] | None = None,
150
+ output: Sequence[Mapping[str, Any]] | None = None,
151
+ model: str | None = None,
152
+ model_config: Mapping[str, Any] | None = None,
153
+ usage: dict[str, Any] | None = None,
154
+ span_id: str | None = None,
155
+ parent: Trace | Span[Any] | None = None,
156
+ disabled: bool = False,
157
+ ) -> Span[GenerationSpanData]:
158
+ """Create a new generation span. The span will not be started automatically, you should either
159
+ do `with generation_span() ...` or call `span.start()` + `span.finish()` manually.
160
+
161
+ This span captures the details of a model generation, including the
162
+ input message sequence, any generated outputs, the model name and
163
+ configuration, and usage data. If you only need to capture a model
164
+ response identifier, use `response_span()` instead.
165
+
166
+ Args:
167
+ input: The sequence of input messages sent to the model.
168
+ output: The sequence of output messages received from the model.
169
+ model: The model identifier used for the generation.
170
+ model_config: The model configuration (hyperparameters) used.
171
+ usage: A dictionary of usage information (input tokens, output tokens, etc.).
172
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
173
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
174
+ correctly formatted.
175
+ parent: The parent span or trace. If not provided, we will automatically use the current
176
+ trace/span as the parent.
177
+ disabled: If True, we will return a Span but the Span will not be recorded.
178
+
179
+ Returns:
180
+ The newly created generation span.
181
+ """
182
+ return GLOBAL_TRACE_PROVIDER.create_span(
183
+ span_data=GenerationSpanData(
184
+ input=input, output=output, model=model, model_config=model_config, usage=usage
185
+ ),
186
+ span_id=span_id,
187
+ parent=parent,
188
+ disabled=disabled,
189
+ )
190
+
191
+
192
+ def response_span(
193
+ response: Response | None = None,
194
+ span_id: str | None = None,
195
+ parent: Trace | Span[Any] | None = None,
196
+ disabled: bool = False,
197
+ ) -> Span[ResponseSpanData]:
198
+ """Create a new response span. The span will not be started automatically, you should either do
199
+ `with response_span() ...` or call `span.start()` + `span.finish()` manually.
200
+
201
+ Args:
202
+ response: The OpenAI Response object.
203
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
204
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
205
+ correctly formatted.
206
+ parent: The parent span or trace. If not provided, we will automatically use the current
207
+ trace/span as the parent.
208
+ disabled: If True, we will return a Span but the Span will not be recorded.
209
+ """
210
+ return GLOBAL_TRACE_PROVIDER.create_span(
211
+ span_data=ResponseSpanData(response=response),
212
+ span_id=span_id,
213
+ parent=parent,
214
+ disabled=disabled,
215
+ )
216
+
217
+
218
+ def handoff_span(
219
+ from_agent: str | None = None,
220
+ to_agent: str | None = None,
221
+ span_id: str | None = None,
222
+ parent: Trace | Span[Any] | None = None,
223
+ disabled: bool = False,
224
+ ) -> Span[HandoffSpanData]:
225
+ """Create a new handoff span. The span will not be started automatically, you should either do
226
+ `with handoff_span() ...` or call `span.start()` + `span.finish()` manually.
227
+
228
+ Args:
229
+ from_agent: The name of the agent that is handing off.
230
+ to_agent: The name of the agent that is receiving the handoff.
231
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
232
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
233
+ correctly formatted.
234
+ parent: The parent span or trace. If not provided, we will automatically use the current
235
+ trace/span as the parent.
236
+ disabled: If True, we will return a Span but the Span will not be recorded.
237
+
238
+ Returns:
239
+ The newly created handoff span.
240
+ """
241
+ return GLOBAL_TRACE_PROVIDER.create_span(
242
+ span_data=HandoffSpanData(from_agent=from_agent, to_agent=to_agent),
243
+ span_id=span_id,
244
+ parent=parent,
245
+ disabled=disabled,
246
+ )
247
+
248
+
249
+ def custom_span(
250
+ name: str,
251
+ data: dict[str, Any] | None = None,
252
+ span_id: str | None = None,
253
+ parent: Trace | Span[Any] | None = None,
254
+ disabled: bool = False,
255
+ ) -> Span[CustomSpanData]:
256
+ """Create a new custom span, to which you can add your own metadata. The span will not be
257
+ started automatically, you should either do `with custom_span() ...` or call
258
+ `span.start()` + `span.finish()` manually.
259
+
260
+ Args:
261
+ name: The name of the custom span.
262
+ data: Arbitrary structured data to associate with the span.
263
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
264
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
265
+ correctly formatted.
266
+ parent: The parent span or trace. If not provided, we will automatically use the current
267
+ trace/span as the parent.
268
+ disabled: If True, we will return a Span but the Span will not be recorded.
269
+
270
+ Returns:
271
+ The newly created custom span.
272
+ """
273
+ return GLOBAL_TRACE_PROVIDER.create_span(
274
+ span_data=CustomSpanData(name=name, data=data or {}),
275
+ span_id=span_id,
276
+ parent=parent,
277
+ disabled=disabled,
278
+ )
279
+
280
+
281
+ def guardrail_span(
282
+ name: str,
283
+ triggered: bool = False,
284
+ span_id: str | None = None,
285
+ parent: Trace | Span[Any] | None = None,
286
+ disabled: bool = False,
287
+ ) -> Span[GuardrailSpanData]:
288
+ """Create a new guardrail span. The span will not be started automatically, you should either
289
+ do `with guardrail_span() ...` or call `span.start()` + `span.finish()` manually.
290
+
291
+ Args:
292
+ name: The name of the guardrail.
293
+ triggered: Whether the guardrail was triggered.
294
+ span_id: The ID of the span. Optional. If not provided, we will generate an ID. We
295
+ recommend using `util.gen_span_id()` to generate a span ID, to guarantee that IDs are
296
+ correctly formatted.
297
+ parent: The parent span or trace. If not provided, we will automatically use the current
298
+ trace/span as the parent.
299
+ disabled: If True, we will return a Span but the Span will not be recorded.
300
+ """
301
+ return GLOBAL_TRACE_PROVIDER.create_span(
302
+ span_data=GuardrailSpanData(name=name, triggered=triggered),
303
+ span_id=span_id,
304
+ parent=parent,
305
+ disabled=disabled,
306
+ )
@@ -0,0 +1,3 @@
1
+ import logging
2
+
3
+ logger = logging.getLogger("openai.agents.tracing")
@@ -0,0 +1,69 @@
1
+ import abc
2
+ from typing import TYPE_CHECKING, Any
3
+
4
+ if TYPE_CHECKING:
5
+ from .spans import Span
6
+ from .traces import Trace
7
+
8
+
9
+ class TracingProcessor(abc.ABC):
10
+ """Interface for processing spans."""
11
+
12
+ @abc.abstractmethod
13
+ def on_trace_start(self, trace: "Trace") -> None:
14
+ """Called when a trace is started.
15
+
16
+ Args:
17
+ trace: The trace that started.
18
+ """
19
+ pass
20
+
21
+ @abc.abstractmethod
22
+ def on_trace_end(self, trace: "Trace") -> None:
23
+ """Called when a trace is finished.
24
+
25
+ Args:
26
+ trace: The trace that started.
27
+ """
28
+ pass
29
+
30
+ @abc.abstractmethod
31
+ def on_span_start(self, span: "Span[Any]") -> None:
32
+ """Called when a span is started.
33
+
34
+ Args:
35
+ span: The span that started.
36
+ """
37
+ pass
38
+
39
+ @abc.abstractmethod
40
+ def on_span_end(self, span: "Span[Any]") -> None:
41
+ """Called when a span is finished. Should not block or raise exceptions.
42
+
43
+ Args:
44
+ span: The span that finished.
45
+ """
46
+ pass
47
+
48
+ @abc.abstractmethod
49
+ def shutdown(self) -> None:
50
+ """Called when the application stops."""
51
+ pass
52
+
53
+ @abc.abstractmethod
54
+ def force_flush(self) -> None:
55
+ """Forces an immediate flush of all queued spans/traces."""
56
+ pass
57
+
58
+
59
+ class TracingExporter(abc.ABC):
60
+ """Exports traces and spans. For example, could log them or send them to a backend."""
61
+
62
+ @abc.abstractmethod
63
+ def export(self, items: list["Trace | Span[Any]"]) -> None:
64
+ """Exports a list of traces and spans.
65
+
66
+ Args:
67
+ items: The items to export.
68
+ """
69
+ pass
@@ -0,0 +1,261 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import queue
5
+ import random
6
+ import threading
7
+ import time
8
+ from typing import Any
9
+
10
+ import httpx
11
+
12
+ from .logger import logger
13
+ from .processor_interface import TracingExporter, TracingProcessor
14
+ from .spans import Span
15
+ from .traces import Trace
16
+
17
+
18
+ class ConsoleSpanExporter(TracingExporter):
19
+ """Prints the traces and spans to the console."""
20
+
21
+ def export(self, items: list[Trace | Span[Any]]) -> None:
22
+ for item in items:
23
+ if isinstance(item, Trace):
24
+ print(f"[Exporter] Export trace_id={item.trace_id}, name={item.name}, ")
25
+ else:
26
+ print(f"[Exporter] Export span: {item.export()}")
27
+
28
+
29
+ class BackendSpanExporter(TracingExporter):
30
+ def __init__(
31
+ self,
32
+ api_key: str | None = None,
33
+ organization: str | None = None,
34
+ project: str | None = None,
35
+ endpoint: str = "https://api.openai.com/v1/traces/ingest",
36
+ max_retries: int = 3,
37
+ base_delay: float = 1.0,
38
+ max_delay: float = 30.0,
39
+ ):
40
+ """
41
+ Args:
42
+ api_key: The API key for the "Authorization" header. Defaults to
43
+ `os.environ["OPENAI_TRACE_API_KEY"]` if not provided.
44
+ organization: The OpenAI organization to use. Defaults to
45
+ `os.environ["OPENAI_ORG_ID"]` if not provided.
46
+ project: The OpenAI project to use. Defaults to
47
+ `os.environ["OPENAI_PROJECT_ID"]` if not provided.
48
+ endpoint: The HTTP endpoint to which traces/spans are posted.
49
+ max_retries: Maximum number of retries upon failures.
50
+ base_delay: Base delay (in seconds) for the first backoff.
51
+ max_delay: Maximum delay (in seconds) for backoff growth.
52
+ """
53
+ self.api_key = api_key or os.environ.get("OPENAI_API_KEY")
54
+ self.organization = organization or os.environ.get("OPENAI_ORG_ID")
55
+ self.project = project or os.environ.get("OPENAI_PROJECT_ID")
56
+ self.endpoint = endpoint
57
+ self.max_retries = max_retries
58
+ self.base_delay = base_delay
59
+ self.max_delay = max_delay
60
+
61
+ # Keep a client open for connection pooling across multiple export calls
62
+ self._client = httpx.Client(timeout=httpx.Timeout(timeout=60, connect=5.0))
63
+
64
+ def set_api_key(self, api_key: str):
65
+ """Set the OpenAI API key for the exporter.
66
+
67
+ Args:
68
+ api_key: The OpenAI API key to use. This is the same key used by the OpenAI Python
69
+ client.
70
+ """
71
+ self.api_key = api_key
72
+
73
+ def export(self, items: list[Trace | Span[Any]]) -> None:
74
+ if not items:
75
+ return
76
+
77
+ if not self.api_key:
78
+ logger.warning("OPENAI_API_KEY is not set, skipping trace export")
79
+ return
80
+
81
+ traces: list[dict[str, Any]] = []
82
+ spans: list[dict[str, Any]] = []
83
+
84
+ data = [item.export() for item in items if item.export()]
85
+ payload = {"data": data}
86
+
87
+ headers = {
88
+ "Authorization": f"Bearer {self.api_key}",
89
+ "Content-Type": "application/json",
90
+ "OpenAI-Beta": "traces=v1",
91
+ }
92
+
93
+ # Exponential backoff loop
94
+ attempt = 0
95
+ delay = self.base_delay
96
+ while True:
97
+ attempt += 1
98
+ try:
99
+ response = self._client.post(url=self.endpoint, headers=headers, json=payload)
100
+
101
+ # If the response is successful, break out of the loop
102
+ if response.status_code < 300:
103
+ logger.debug(f"Exported {len(traces)} traces, {len(spans)} spans")
104
+ return
105
+
106
+ # If the response is a client error (4xx), we wont retry
107
+ if 400 <= response.status_code < 500:
108
+ logger.error(f"Tracing client error {response.status_code}: {response.text}")
109
+ return
110
+
111
+ # For 5xx or other unexpected codes, treat it as transient and retry
112
+ logger.warning(f"Server error {response.status_code}, retrying.")
113
+ except httpx.RequestError as exc:
114
+ # Network or other I/O error, we'll retry
115
+ logger.warning(f"Request failed: {exc}")
116
+
117
+ # If we reach here, we need to retry or give up
118
+ if attempt >= self.max_retries:
119
+ logger.error("Max retries reached, giving up on this batch.")
120
+ return
121
+
122
+ # Exponential backoff + jitter
123
+ sleep_time = delay + random.uniform(0, 0.1 * delay) # 10% jitter
124
+ time.sleep(sleep_time)
125
+ delay = min(delay * 2, self.max_delay)
126
+
127
+ def close(self):
128
+ """Close the underlying HTTP client."""
129
+ self._client.close()
130
+
131
+
132
+ class BatchTraceProcessor(TracingProcessor):
133
+ """Some implementation notes:
134
+ 1. Using Queue, which is thread-safe.
135
+ 2. Using a background thread to export spans, to minimize any performance issues.
136
+ 3. Spans are stored in memory until they are exported.
137
+ """
138
+
139
+ def __init__(
140
+ self,
141
+ exporter: TracingExporter,
142
+ max_queue_size: int = 8192,
143
+ max_batch_size: int = 128,
144
+ schedule_delay: float = 5.0,
145
+ export_trigger_ratio: float = 0.7,
146
+ ):
147
+ """
148
+ Args:
149
+ exporter: The exporter to use.
150
+ max_queue_size: The maximum number of spans to store in the queue. After this, we will
151
+ start dropping spans.
152
+ max_batch_size: The maximum number of spans to export in a single batch.
153
+ schedule_delay: The delay between checks for new spans to export.
154
+ export_trigger_ratio: The ratio of the queue size at which we will trigger an export.
155
+ """
156
+ self._exporter = exporter
157
+ self._queue: queue.Queue[Trace | Span[Any]] = queue.Queue(maxsize=max_queue_size)
158
+ self._max_queue_size = max_queue_size
159
+ self._max_batch_size = max_batch_size
160
+ self._schedule_delay = schedule_delay
161
+ self._shutdown_event = threading.Event()
162
+
163
+ # The queue size threshold at which we export immediately.
164
+ self._export_trigger_size = int(max_queue_size * export_trigger_ratio)
165
+
166
+ # Track when we next *must* perform a scheduled export
167
+ self._next_export_time = time.time() + self._schedule_delay
168
+
169
+ self._shutdown_event = threading.Event()
170
+ self._worker_thread = threading.Thread(target=self._run, daemon=True)
171
+ self._worker_thread.start()
172
+
173
+ def on_trace_start(self, trace: Trace) -> None:
174
+ try:
175
+ self._queue.put_nowait(trace)
176
+ except queue.Full:
177
+ logger.warning("Queue is full, dropping trace.")
178
+
179
+ def on_trace_end(self, trace: Trace) -> None:
180
+ # We send traces via on_trace_start, so we don't need to do anything here.
181
+ pass
182
+
183
+ def on_span_start(self, span: Span[Any]) -> None:
184
+ # We send spans via on_span_end, so we don't need to do anything here.
185
+ pass
186
+
187
+ def on_span_end(self, span: Span[Any]) -> None:
188
+ try:
189
+ self._queue.put_nowait(span)
190
+ except queue.Full:
191
+ logger.warning("Queue is full, dropping span.")
192
+
193
+ def shutdown(self, timeout: float | None = None):
194
+ """
195
+ Called when the application stops. We signal our thread to stop, then join it.
196
+ """
197
+ self._shutdown_event.set()
198
+ self._worker_thread.join(timeout=timeout)
199
+
200
+ def force_flush(self):
201
+ """
202
+ Forces an immediate flush of all queued spans.
203
+ """
204
+ self._export_batches(force=True)
205
+
206
+ def _run(self):
207
+ while not self._shutdown_event.is_set():
208
+ current_time = time.time()
209
+ queue_size = self._queue.qsize()
210
+
211
+ # If it's time for a scheduled flush or queue is above the trigger threshold
212
+ if current_time >= self._next_export_time or queue_size >= self._export_trigger_size:
213
+ self._export_batches(force=False)
214
+ # Reset the next scheduled flush time
215
+ self._next_export_time = time.time() + self._schedule_delay
216
+ else:
217
+ # Sleep a short interval so we don't busy-wait.
218
+ time.sleep(0.2)
219
+
220
+ # Final drain after shutdown
221
+ self._export_batches(force=True)
222
+
223
+ def _export_batches(self, force: bool = False):
224
+ """Drains the queue and exports in batches. If force=True, export everything.
225
+ Otherwise, export up to `max_batch_size` repeatedly until the queue is empty or below a
226
+ certain threshold.
227
+ """
228
+ while True:
229
+ items_to_export: list[Span[Any] | Trace] = []
230
+
231
+ # Gather a batch of spans up to max_batch_size
232
+ while not self._queue.empty() and (
233
+ force or len(items_to_export) < self._max_batch_size
234
+ ):
235
+ try:
236
+ items_to_export.append(self._queue.get_nowait())
237
+ except queue.Empty:
238
+ # Another thread might have emptied the queue between checks
239
+ break
240
+
241
+ # If we collected nothing, we're done
242
+ if not items_to_export:
243
+ break
244
+
245
+ # Export the batch
246
+ self._exporter.export(items_to_export)
247
+
248
+
249
+ # Create a shared global instance:
250
+ _global_exporter = BackendSpanExporter()
251
+ _global_processor = BatchTraceProcessor(_global_exporter)
252
+
253
+
254
+ def default_exporter() -> BackendSpanExporter:
255
+ """The default exporter, which exports traces and spans to the backend in batches."""
256
+ return _global_exporter
257
+
258
+
259
+ def default_processor() -> BatchTraceProcessor:
260
+ """The default processor, which exports traces and spans to the backend in batches."""
261
+ return _global_processor
@@ -0,0 +1,45 @@
1
+ # Holds the current active span
2
+ import contextvars
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from .logger import logger
6
+
7
+ if TYPE_CHECKING:
8
+ from .spans import Span
9
+ from .traces import Trace
10
+
11
+ _current_span: contextvars.ContextVar["Span[Any] | None"] = contextvars.ContextVar(
12
+ "current_span", default=None
13
+ )
14
+
15
+ _current_trace: contextvars.ContextVar["Trace | None"] = contextvars.ContextVar(
16
+ "current_trace", default=None
17
+ )
18
+
19
+
20
+ class Scope:
21
+ @classmethod
22
+ def get_current_span(cls) -> "Span[Any] | None":
23
+ return _current_span.get()
24
+
25
+ @classmethod
26
+ def set_current_span(cls, span: "Span[Any] | None") -> "contextvars.Token[Span[Any] | None]":
27
+ return _current_span.set(span)
28
+
29
+ @classmethod
30
+ def reset_current_span(cls, token: "contextvars.Token[Span[Any] | None]") -> None:
31
+ _current_span.reset(token)
32
+
33
+ @classmethod
34
+ def get_current_trace(cls) -> "Trace | None":
35
+ return _current_trace.get()
36
+
37
+ @classmethod
38
+ def set_current_trace(cls, trace: "Trace | None") -> "contextvars.Token[Trace | None]":
39
+ logger.debug(f"Setting current trace: {trace.trace_id if trace else None}")
40
+ return _current_trace.set(trace)
41
+
42
+ @classmethod
43
+ def reset_current_trace(cls, token: "contextvars.Token[Trace | None]") -> None:
44
+ logger.debug("Resetting current trace")
45
+ _current_trace.reset(token)