lmnr 0.7.17__tar.gz → 0.7.19__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lmnr might be problematic. Click here for more details.

Files changed (103) hide show
  1. {lmnr-0.7.17 → lmnr-0.7.19}/PKG-INFO +1 -1
  2. {lmnr-0.7.17 → lmnr-0.7.19}/pyproject.toml +1 -1
  3. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/__init__.py +21 -9
  4. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/exporter.py +26 -5
  5. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/processor.py +52 -30
  6. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/laminar.py +93 -7
  7. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/version.py +1 -1
  8. {lmnr-0.7.17 → lmnr-0.7.19}/README.md +0 -0
  9. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/__init__.py +0 -0
  10. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/cli.py +0 -0
  11. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/.flake8 +0 -0
  12. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/__init__.py +0 -0
  13. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/decorators/__init__.py +0 -0
  14. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/litellm/__init__.py +0 -0
  15. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/litellm/utils.py +0 -0
  16. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/__init__.py +0 -0
  17. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/config.py +0 -0
  18. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_emitter.py +0 -0
  19. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_models.py +0 -0
  20. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/span_utils.py +0 -0
  21. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/streaming.py +0 -0
  22. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/utils.py +0 -0
  23. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/version.py +0 -0
  24. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_agent/__init__.py +0 -0
  25. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_computer/__init__.py +0 -0
  26. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_computer/utils.py +0 -0
  27. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/config.py +0 -0
  28. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/schema_utils.py +0 -0
  29. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/utils.py +0 -0
  30. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py +0 -0
  31. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py +0 -0
  32. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_emitter.py +0 -0
  33. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_models.py +0 -0
  34. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/span_utils.py +0 -0
  35. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py +0 -0
  36. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py +0 -0
  37. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/__init__.py +0 -0
  38. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/utils.py +0 -0
  39. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/__init__.py +0 -0
  40. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/__init__.py +0 -0
  41. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +0 -0
  42. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +0 -0
  43. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/config.py +0 -0
  44. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +0 -0
  45. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_emitter.py +0 -0
  46. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_models.py +0 -0
  47. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +0 -0
  48. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py +0 -0
  49. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v0/__init__.py +0 -0
  50. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/__init__.py +0 -0
  51. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +0 -0
  52. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +0 -0
  53. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +0 -0
  54. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/version.py +0 -0
  55. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openhands_ai/__init__.py +0 -0
  56. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/opentelemetry/__init__.py +0 -0
  57. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/skyvern/__init__.py +0 -0
  58. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/threading/__init__.py +0 -0
  59. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/__init__.py +0 -0
  60. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +0 -0
  61. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/attributes.py +0 -0
  62. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/context.py +0 -0
  63. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/instruments.py +0 -0
  64. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/tracing/tracer.py +0 -0
  65. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/utils/__init__.py +0 -0
  66. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/utils/json_encoder.py +0 -0
  67. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/utils/package_check.py +0 -0
  68. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/opentelemetry_lib/utils/wrappers.py +0 -0
  69. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/py.typed +0 -0
  70. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/__init__.py +0 -0
  71. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/__init__.py +0 -0
  72. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/browser_use_cdp_otel.py +0 -0
  73. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/browser_use_otel.py +0 -0
  74. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/bubus_otel.py +0 -0
  75. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/cdp_utils.py +0 -0
  76. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/patchright_otel.py +0 -0
  77. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/playwright_otel.py +0 -0
  78. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/pw_utils.py +0 -0
  79. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/recorder/record.umd.min.cjs +0 -0
  80. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/browser/utils.py +0 -0
  81. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/async_client.py +0 -0
  82. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/__init__.py +0 -0
  83. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/agent.py +0 -0
  84. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/base.py +0 -0
  85. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/browser_events.py +0 -0
  86. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/evals.py +0 -0
  87. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/evaluators.py +0 -0
  88. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/asynchronous/resources/tags.py +0 -0
  89. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/__init__.py +0 -0
  90. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/agent.py +0 -0
  91. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/base.py +0 -0
  92. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/browser_events.py +0 -0
  93. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/evals.py +0 -0
  94. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/evaluators.py +0 -0
  95. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/resources/tags.py +0 -0
  96. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/client/synchronous/sync_client.py +0 -0
  97. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/datasets.py +0 -0
  98. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/decorators.py +0 -0
  99. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/eval_control.py +0 -0
  100. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/evaluations.py +0 -0
  101. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/log.py +0 -0
  102. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/types.py +0 -0
  103. {lmnr-0.7.17 → lmnr-0.7.19}/src/lmnr/sdk/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lmnr
3
- Version: 0.7.17
3
+ Version: 0.7.19
4
4
  Summary: Python SDK for Laminar
5
5
  Author: lmnr.ai
6
6
  Author-email: lmnr.ai <founders@lmnr.ai>
@@ -6,7 +6,7 @@
6
6
 
7
7
  [project]
8
8
  name = "lmnr"
9
- version = "0.7.17"
9
+ version = "0.7.19"
10
10
  description = "Python SDK for Laminar"
11
11
  authors = [
12
12
  { name = "lmnr.ai", email = "founders@lmnr.ai" }
@@ -206,15 +206,17 @@ def _set_request_attributes(span, args, kwargs):
206
206
  contents = [contents]
207
207
  for content in contents:
208
208
  processed_content = process_content_union(content)
209
- content_str = get_content(processed_content)
209
+ content_payload = get_content(processed_content)
210
+ if isinstance(content_payload, dict):
211
+ content_payload = [content_payload]
210
212
 
211
213
  set_span_attribute(
212
214
  span,
213
215
  f"{gen_ai_attributes.GEN_AI_PROMPT}.{i}.content",
214
216
  (
215
- content_str
216
- if isinstance(content_str, str)
217
- else json_dumps(content_str)
217
+ content_payload
218
+ if isinstance(content_payload, str)
219
+ else json_dumps(content_payload)
218
220
  ),
219
221
  )
220
222
  blocks = (
@@ -318,20 +320,22 @@ def _set_response_attributes(span, response: types.GenerateContentResponse):
318
320
  for candidate in candidates_list:
319
321
  has_content = False
320
322
  processed_content = process_content_union(candidate.content)
321
- content_str = get_content(processed_content)
323
+ content_payload = get_content(processed_content)
324
+ if isinstance(content_payload, dict):
325
+ content_payload = [content_payload]
322
326
 
323
327
  set_span_attribute(
324
328
  span, f"{gen_ai_attributes.GEN_AI_COMPLETION}.{i}.role", "model"
325
329
  )
326
- if content_str:
330
+ if content_payload:
327
331
  has_content = True
328
332
  set_span_attribute(
329
333
  span,
330
334
  f"{gen_ai_attributes.GEN_AI_COMPLETION}.{i}.content",
331
335
  (
332
- content_str
333
- if isinstance(content_str, str)
334
- else json_dumps(content_str)
336
+ content_payload
337
+ if isinstance(content_payload, str)
338
+ else json_dumps(content_payload)
335
339
  ),
336
340
  )
337
341
  blocks = (
@@ -380,6 +384,10 @@ def _build_from_streaming_response(
380
384
  aggregated_usage_metadata = defaultdict(int)
381
385
  model_version = None
382
386
  for chunk in response:
387
+ try:
388
+ span.add_event("llm.content.completion.chunk")
389
+ except Exception:
390
+ pass
383
391
  # Important: do all processing in a separate sync function, that is
384
392
  # wrapped in @dont_throw. If we did it here, the @dont_throw on top of
385
393
  # this function would not be able to catch the errors, as they are
@@ -430,6 +438,10 @@ async def _abuild_from_streaming_response(
430
438
  aggregated_usage_metadata = defaultdict(int)
431
439
  model_version = None
432
440
  async for chunk in response:
441
+ try:
442
+ span.add_event("llm.content.completion.chunk")
443
+ except Exception:
444
+ pass
433
445
  # Important: do all processing in a separate sync function, that is
434
446
  # wrapped in @dont_throw. If we did it here, the @dont_throw on top of
435
447
  # this function would not be able to catch the errors, as they are
@@ -1,5 +1,6 @@
1
1
  import grpc
2
2
  import re
3
+ import threading
3
4
  from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
4
5
  from opentelemetry.sdk.trace import ReadableSpan
5
6
  from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
@@ -22,6 +23,7 @@ class LaminarSpanExporter(SpanExporter):
22
23
  headers: dict[str, str]
23
24
  timeout: float
24
25
  force_http: bool
26
+ _instance_lock: threading.RLock
25
27
 
26
28
  def __init__(
27
29
  self,
@@ -31,6 +33,7 @@ class LaminarSpanExporter(SpanExporter):
31
33
  timeout_seconds: int = 30,
32
34
  force_http: bool = False,
33
35
  ):
36
+ self._instance_lock = threading.RLock()
34
37
  url = base_url or from_env("LMNR_BASE_URL") or "https://api.lmnr.ai"
35
38
  url = url.rstrip("/")
36
39
  if match := re.search(r":(\d{1,5})$", url):
@@ -74,27 +77,45 @@ class LaminarSpanExporter(SpanExporter):
74
77
  "- set the OTEL_ENDPOINT environment variable\n"
75
78
  "- pass the base_url parameter to Laminar.initialize"
76
79
  )
80
+ self._init_instance()
77
81
 
82
+ def _init_instance(self):
83
+ # Create new instance first (outside critical section for performance)
78
84
  if self.force_http:
79
- self.instance = HTTPOTLPSpanExporter(
85
+ new_instance = HTTPOTLPSpanExporter(
80
86
  endpoint=self.endpoint,
81
87
  headers=self.headers,
82
88
  compression=HTTPCompression.Gzip,
83
89
  timeout=self.timeout,
84
90
  )
85
91
  else:
86
- self.instance = OTLPSpanExporter(
92
+ new_instance = OTLPSpanExporter(
87
93
  endpoint=self.endpoint,
88
94
  headers=self.headers,
89
95
  timeout=self.timeout,
90
96
  compression=grpc.Compression.Gzip,
91
97
  )
92
98
 
99
+ # Atomic swap with proper cleanup
100
+ with self._instance_lock:
101
+ old_instance: OTLPSpanExporter | HTTPOTLPSpanExporter | None = getattr(
102
+ self, "instance", None
103
+ )
104
+ if old_instance is not None:
105
+ try:
106
+ old_instance.shutdown()
107
+ except Exception as e:
108
+ logger.warning(f"Error shutting down old exporter instance: {e}")
109
+ self.instance = new_instance
110
+
93
111
  def export(self, spans: list[ReadableSpan]) -> SpanExportResult:
94
- return self.instance.export(spans)
112
+ with self._instance_lock:
113
+ return self.instance.export(spans)
95
114
 
96
115
  def shutdown(self) -> None:
97
- return self.instance.shutdown()
116
+ with self._instance_lock:
117
+ return self.instance.shutdown()
98
118
 
99
119
  def force_flush(self, timeout_millis: int = 30000) -> bool:
100
- return self.instance.force_flush(timeout_millis)
120
+ with self._instance_lock:
121
+ return self.instance.force_flush(timeout_millis)
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import threading
2
3
  import uuid
3
4
 
4
5
  from opentelemetry.sdk.trace.export import (
@@ -30,6 +31,8 @@ class LaminarSpanProcessor(SpanProcessor):
30
31
  __span_id_to_path: dict[int, list[str]] = {}
31
32
  __span_id_lists: dict[int, list[str]] = {}
32
33
  max_export_batch_size: int
34
+ _instance_lock: threading.RLock
35
+ _paths_lock: threading.RLock
33
36
 
34
37
  def __init__(
35
38
  self,
@@ -42,6 +45,8 @@ class LaminarSpanProcessor(SpanProcessor):
42
45
  disable_batch: bool = False,
43
46
  exporter: SpanExporter | None = None,
44
47
  ):
48
+ self._instance_lock = threading.RLock()
49
+ self._paths_lock = threading.RLock()
45
50
  self.logger = get_default_logger(__name__)
46
51
  self.max_export_batch_size = max_export_batch_size
47
52
  self.exporter = exporter or LaminarSpanExporter(
@@ -60,20 +65,25 @@ class LaminarSpanProcessor(SpanProcessor):
60
65
  )
61
66
 
62
67
  def on_start(self, span: Span, parent_context: Context | None = None):
63
- parent_span_path = list(span.attributes.get(PARENT_SPAN_PATH, tuple())) or (
64
- self.__span_id_to_path.get(span.parent.span_id) if span.parent else None
65
- )
66
- parent_span_ids_path = list(
67
- span.attributes.get(PARENT_SPAN_IDS_PATH, tuple())
68
- ) or (self.__span_id_lists.get(span.parent.span_id, []) if span.parent else [])
69
- span_path = parent_span_path + [span.name] if parent_span_path else [span.name]
70
- span_ids_path = parent_span_ids_path + [
71
- str(uuid.UUID(int=span.get_span_context().span_id))
72
- ]
73
- span.set_attribute(SPAN_PATH, span_path)
74
- span.set_attribute(SPAN_IDS_PATH, span_ids_path)
75
- self.__span_id_to_path[span.get_span_context().span_id] = span_path
76
- self.__span_id_lists[span.get_span_context().span_id] = span_ids_path
68
+ with self._paths_lock:
69
+ parent_span_path = list(span.attributes.get(PARENT_SPAN_PATH, tuple())) or (
70
+ self.__span_id_to_path.get(span.parent.span_id) if span.parent else None
71
+ )
72
+ parent_span_ids_path = list(
73
+ span.attributes.get(PARENT_SPAN_IDS_PATH, tuple())
74
+ ) or (
75
+ self.__span_id_lists.get(span.parent.span_id, []) if span.parent else []
76
+ )
77
+ span_path = (
78
+ parent_span_path + [span.name] if parent_span_path else [span.name]
79
+ )
80
+ span_ids_path = parent_span_ids_path + [
81
+ str(uuid.UUID(int=span.get_span_context().span_id))
82
+ ]
83
+ span.set_attribute(SPAN_PATH, span_path)
84
+ span.set_attribute(SPAN_IDS_PATH, span_ids_path)
85
+ self.__span_id_to_path[span.get_span_context().span_id] = span_path
86
+ self.__span_id_lists[span.get_span_context().span_id] = span_ids_path
77
87
 
78
88
  span.set_attribute(SPAN_INSTRUMENTATION_SOURCE, "python")
79
89
  span.set_attribute(SPAN_SDK_VERSION, __version__)
@@ -84,13 +94,16 @@ class LaminarSpanProcessor(SpanProcessor):
84
94
  for key, value in graph_context.items():
85
95
  span.set_attribute(f"lmnr.association.properties.{key}", value)
86
96
 
87
- self.instance.on_start(span, parent_context)
97
+ with self._instance_lock:
98
+ self.instance.on_start(span, parent_context)
88
99
 
89
100
  def on_end(self, span: Span):
90
- self.instance.on_end(span)
101
+ with self._instance_lock:
102
+ self.instance.on_end(span)
91
103
 
92
104
  def force_flush(self, timeout_millis: int = 30000) -> bool:
93
- return self.instance.force_flush(timeout_millis)
105
+ with self._instance_lock:
106
+ return self.instance.force_flush(timeout_millis)
94
107
 
95
108
  def force_reinit(self):
96
109
  if not isinstance(self.exporter, LaminarSpanExporter):
@@ -98,23 +111,32 @@ class LaminarSpanProcessor(SpanProcessor):
98
111
  "LaminarSpanProcessor is not using LaminarSpanExporter, cannot force reinit"
99
112
  )
100
113
  return
101
- self.instance.shutdown()
102
- disable_batch = isinstance(self.instance, SimpleSpanProcessor)
103
- del self.exporter.instance
104
- del self.instance
105
114
 
115
+ # Reinitialize exporter (thread-safe, handles its own locking)
106
116
  self.exporter._init_instance()
107
- self.instance = (
108
- SimpleSpanProcessor(self.exporter)
109
- if disable_batch
110
- else BatchSpanProcessor(
111
- self.exporter, max_export_batch_size=self.max_export_batch_size
117
+
118
+ with self._instance_lock:
119
+ old_instance = self.instance
120
+ disable_batch = isinstance(old_instance, SimpleSpanProcessor)
121
+
122
+ try:
123
+ old_instance.shutdown()
124
+ except Exception as e:
125
+ self.logger.debug(f"Error shutting down old processor instance: {e}")
126
+
127
+ self.instance = (
128
+ SimpleSpanProcessor(self.exporter)
129
+ if disable_batch
130
+ else BatchSpanProcessor(
131
+ self.exporter, max_export_batch_size=self.max_export_batch_size
132
+ )
112
133
  )
113
- )
114
134
 
115
135
  def shutdown(self):
116
- self.instance.shutdown()
136
+ with self._instance_lock:
137
+ self.instance.shutdown()
117
138
 
118
139
  def clear(self):
119
- self.__span_id_to_path = {}
120
- self.__span_id_lists = {}
140
+ with self._paths_lock:
141
+ self.__span_id_to_path = {}
142
+ self.__span_id_lists = {}
@@ -1,5 +1,5 @@
1
1
  from contextlib import contextmanager
2
- from contextvars import Context
2
+ from contextvars import Context, Token
3
3
  import warnings
4
4
  from lmnr.opentelemetry_lib import TracerManager
5
5
  from lmnr.opentelemetry_lib.tracing import TracerWrapper, get_current_context
@@ -434,17 +434,17 @@ class Laminar:
434
434
  with Laminar.use_span(span):
435
435
  with Laminar.start_as_current_span("foo_inner"):
436
436
  some_function()
437
-
437
+
438
438
  def bar():
439
439
  with Laminar.use_span(span):
440
440
  openai_client.chat.completions.create()
441
-
441
+
442
442
  span = Laminar.start_span("outer")
443
443
  foo(span)
444
444
  bar(span)
445
445
  # IMPORTANT: End the span manually
446
446
  span.end()
447
-
447
+
448
448
  # Results in:
449
449
  # | outer
450
450
  # | | foo
@@ -642,6 +642,92 @@ class Laminar:
642
642
  if end_on_exit:
643
643
  span.end()
644
644
 
645
+ @classmethod
646
+ def start_active_span(
647
+ cls,
648
+ name: str,
649
+ input: Any = None,
650
+ span_type: Literal["DEFAULT", "LLM", "TOOL"] = "DEFAULT",
651
+ context: Context | None = None,
652
+ parent_span_context: LaminarSpanContext | None = None,
653
+ tags: list[str] | None = None,
654
+ ) -> tuple[Span, Token[Context] | None]:
655
+ """Start a new span. Useful for manual instrumentation.
656
+ If `span_type` is set to `"LLM"`, you should report usage and response
657
+ attributes manually. See `Laminar.set_span_attributes` for more
658
+ information. Returns the span and a context token that can be used to
659
+ detach the context.
660
+
661
+ Usage example:
662
+ ```python
663
+ from src.lmnr import Laminar
664
+ def foo():
665
+ with Laminar.start_active_span("foo_inner"):
666
+ some_function()
667
+
668
+ def bar():
669
+ openai_client.chat.completions.create()
670
+
671
+ span, ctx_token = Laminar.start_active_span("outer")
672
+ foo()
673
+ bar()
674
+ # IMPORTANT: End the span manually
675
+ Laminar.end_active_span(span, ctx_token)
676
+
677
+ # Results in:
678
+ # | outer
679
+ # | | foo
680
+ # | | | foo_inner
681
+ # | | bar
682
+ # | | | openai.chat
683
+ ```
684
+
685
+ Args:
686
+ name (str): name of the span
687
+ input (Any, optional): input to the span. Will be sent as an\
688
+ attribute, so must be json serializable. Defaults to None.
689
+ span_type (Literal["DEFAULT", "LLM", "TOOL"], optional):\
690
+ type of the span. If you use `"LLM"`, you should report usage\
691
+ and response attributes manually. Defaults to "DEFAULT".
692
+ context (Context | None, optional): raw OpenTelemetry context\
693
+ to attach the span to. Defaults to None.
694
+ parent_span_context (LaminarSpanContext | None, optional): parent\
695
+ span context to use for the span. Useful for continuing traces\
696
+ across services. If parent_span_context is a\
697
+ raw OpenTelemetry span context, or if it is a dictionary or string\
698
+ obtained from `Laminar.get_laminar_span_context_dict()` or\
699
+ `Laminar.get_laminar_span_context_str()` respectively, it will be\
700
+ converted to a `LaminarSpanContext` if possible. See also\
701
+ `Laminar.get_span_context`, `Laminar.get_span_context_dict` and\
702
+ `Laminar.get_span_context_str` for more information.
703
+ Defaults to None.
704
+ tags (list[str] | None, optional): tags to set for the span.
705
+ Defaults to None.
706
+ """
707
+ span = cls.start_span(
708
+ name, input, span_type, context, parent_span_context, tags
709
+ )
710
+ if not cls.is_initialized():
711
+ return span, None
712
+ wrapper = TracerWrapper()
713
+ context = wrapper.push_span_context(span)
714
+ context_token = context_api.attach(context)
715
+ return span, context_token
716
+
717
+ @classmethod
718
+ def end_active_span(cls, span: Span, ctx_token: Token[Context]):
719
+ """End an active span."""
720
+ span.end()
721
+ if not cls.is_initialized():
722
+ return
723
+ wrapper = TracerWrapper()
724
+ try:
725
+ wrapper.pop_span_context()
726
+ if ctx_token is not None:
727
+ context_api.detach(ctx_token)
728
+ except Exception:
729
+ pass
730
+
645
731
  @classmethod
646
732
  def set_span_output(cls, output: Any = None):
647
733
  """Set the output of the current span. Useful for manual
@@ -671,12 +757,12 @@ class Laminar:
671
757
  instrumentation.
672
758
  Example:
673
759
  ```python
674
- with L.start_as_current_span(
760
+ with Laminar.start_as_current_span(
675
761
  name="my_span_name", input=input["messages"], span_type="LLM"
676
762
  ):
677
763
  response = await my_custom_call_to_openai(input)
678
- L.set_span_output(response["choices"][0]["message"]["content"])
679
- L.set_span_attributes({
764
+ Laminar.set_span_output(response["choices"][0]["message"]["content"])
765
+ Laminar.set_span_attributes({
680
766
  Attributes.PROVIDER: 'openai',
681
767
  Attributes.REQUEST_MODEL: input["model"],
682
768
  Attributes.RESPONSE_MODEL: response["model"],
@@ -3,7 +3,7 @@ import httpx
3
3
  from packaging import version
4
4
 
5
5
 
6
- __version__ = "0.7.17"
6
+ __version__ = "0.7.19"
7
7
  PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}"
8
8
 
9
9
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes