lmnr 0.5.2__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. lmnr/__init__.py +7 -2
  2. lmnr/cli.py +10 -8
  3. lmnr/opentelemetry_lib/__init__.py +55 -0
  4. lmnr/{openllmetry_sdk/decorators/base.py → opentelemetry_lib/decorators/__init__.py} +24 -15
  5. lmnr/{openllmetry_sdk → opentelemetry_lib}/opentelemetry/instrumentation/google_genai/utils.py +1 -1
  6. lmnr/opentelemetry_lib/tracing/__init__.py +139 -0
  7. lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +398 -0
  8. lmnr/{openllmetry_sdk → opentelemetry_lib}/tracing/attributes.py +14 -7
  9. lmnr/opentelemetry_lib/tracing/context_properties.py +53 -0
  10. lmnr/opentelemetry_lib/tracing/exporter.py +60 -0
  11. lmnr/opentelemetry_lib/tracing/instruments.py +121 -0
  12. lmnr/opentelemetry_lib/tracing/processor.py +96 -0
  13. lmnr/{openllmetry_sdk/tracing/context_manager.py → opentelemetry_lib/tracing/tracer.py} +6 -1
  14. lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/package_check.py +3 -1
  15. lmnr/sdk/browser/browser_use_otel.py +20 -3
  16. lmnr/sdk/browser/patchright_otel.py +177 -0
  17. lmnr/sdk/browser/playwright_otel.py +16 -7
  18. lmnr/sdk/browser/pw_utils.py +116 -74
  19. lmnr/sdk/browser/rrweb/rrweb.umd.min.cjs +98 -0
  20. lmnr/sdk/client/asynchronous/resources/agent.py +22 -1
  21. lmnr/sdk/client/synchronous/resources/agent.py +23 -1
  22. lmnr/sdk/decorators.py +5 -3
  23. lmnr/sdk/eval_control.py +3 -2
  24. lmnr/sdk/evaluations.py +10 -16
  25. lmnr/sdk/laminar.py +16 -34
  26. lmnr/sdk/types.py +2 -0
  27. lmnr/sdk/utils.py +2 -3
  28. lmnr/version.py +1 -1
  29. {lmnr-0.5.2.dist-info → lmnr-0.6.0.dist-info}/METADATA +65 -63
  30. lmnr-0.6.0.dist-info/RECORD +54 -0
  31. {lmnr-0.5.2.dist-info → lmnr-0.6.0.dist-info}/WHEEL +1 -1
  32. lmnr/openllmetry_sdk/__init__.py +0 -75
  33. lmnr/openllmetry_sdk/config/__init__.py +0 -12
  34. lmnr/openllmetry_sdk/decorators/__init__.py +0 -0
  35. lmnr/openllmetry_sdk/instruments.py +0 -41
  36. lmnr/openllmetry_sdk/tracing/__init__.py +0 -1
  37. lmnr/openllmetry_sdk/tracing/content_allow_list.py +0 -24
  38. lmnr/openllmetry_sdk/tracing/tracing.py +0 -998
  39. lmnr/openllmetry_sdk/utils/in_memory_span_exporter.py +0 -61
  40. lmnr/sdk/browser/rrweb/rrweb.min.js +0 -18
  41. lmnr-0.5.2.dist-info/RECORD +0 -54
  42. /lmnr/{openllmetry_sdk → opentelemetry_lib}/.flake8 +0 -0
  43. /lmnr/{openllmetry_sdk → opentelemetry_lib}/opentelemetry/instrumentation/google_genai/__init__.py +0 -0
  44. /lmnr/{openllmetry_sdk → opentelemetry_lib}/opentelemetry/instrumentation/google_genai/config.py +0 -0
  45. /lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/__init__.py +0 -0
  46. /lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/json_encoder.py +0 -0
  47. {lmnr-0.5.2.dist-info → lmnr-0.6.0.dist-info}/LICENSE +0 -0
  48. {lmnr-0.5.2.dist-info → lmnr-0.6.0.dist-info}/entry_points.txt +0 -0
lmnr/__init__.py CHANGED
@@ -13,8 +13,10 @@ from .sdk.types import (
13
13
  )
14
14
  from .sdk.decorators import observe
15
15
  from .sdk.types import LaminarSpanContext
16
- from .openllmetry_sdk import Instruments
17
- from .openllmetry_sdk.tracing.attributes import Attributes
16
+ from .opentelemetry_lib.tracing.attributes import Attributes
17
+ from .opentelemetry_lib.tracing.instruments import Instruments
18
+ from .opentelemetry_lib.tracing.processor import LaminarSpanProcessor
19
+ from .opentelemetry_lib.tracing.tracer import get_laminar_tracer_provider, get_tracer
18
20
  from opentelemetry.trace import use_span
19
21
 
20
22
  __all__ = [
@@ -29,9 +31,12 @@ __all__ = [
29
31
  "LaminarClient",
30
32
  "LaminarDataset",
31
33
  "LaminarSpanContext",
34
+ "LaminarSpanProcessor",
32
35
  "RunAgentResponseChunk",
33
36
  "StepChunkContent",
34
37
  "TracingLevel",
38
+ "get_laminar_tracer_provider",
39
+ "get_tracer",
35
40
  "evaluate",
36
41
  "observe",
37
42
  "use_span",
lmnr/cli.py CHANGED
@@ -1,18 +1,17 @@
1
1
  from argparse import ArgumentParser
2
2
  import asyncio
3
3
  import importlib.util
4
- import logging
5
4
  import os
6
5
  import re
7
6
  import sys
7
+ from typing import Optional
8
+
9
+ from lmnr.sdk.evaluations import Evaluation
8
10
 
9
11
  from .sdk.eval_control import PREPARE_ONLY, EVALUATION_INSTANCE
10
- from .sdk.log import ColorfulFormatter
12
+ from .sdk.log import get_default_logger
11
13
 
12
- LOG = logging.getLogger(__name__)
13
- console_log_handler = logging.StreamHandler()
14
- console_log_handler.setFormatter(ColorfulFormatter())
15
- LOG.addHandler(console_log_handler)
14
+ LOG = get_default_logger(__name__)
16
15
 
17
16
 
18
17
  EVAL_DIR = "evals"
@@ -28,7 +27,10 @@ async def run_evaluation(args):
28
27
  if re.match(r".*_eval\.py$", f) or re.match(r"eval_.*\.py$", f)
29
28
  ]
30
29
  if len(files) == 0:
31
- LOG.error("No evaluation files found in evals directory")
30
+ LOG.error("No evaluation files found in `evals` directory")
31
+ LOG.info(
32
+ "Eval files must be located in the `evals` directory and must be named *_eval.py or eval_*.py"
33
+ )
32
34
  return
33
35
  files.sort()
34
36
  LOG.info(f"Located {len(files)} evaluation files in {EVAL_DIR}")
@@ -53,7 +55,7 @@ async def run_evaluation(args):
53
55
  sys.modules[name] = mod
54
56
 
55
57
  spec.loader.exec_module(mod)
56
- evaluation = EVALUATION_INSTANCE.get()
58
+ evaluation: Optional[Evaluation] = EVALUATION_INSTANCE.get()
57
59
  if evaluation is None:
58
60
  LOG.warning("Evaluation instance not found")
59
61
  if args.fail_on_error:
@@ -0,0 +1,55 @@
1
+ import sys
2
+
3
+ from typing import Optional, Set
4
+ from opentelemetry.sdk.trace.export import SpanExporter
5
+ from opentelemetry.sdk.resources import SERVICE_NAME
6
+
7
+ from lmnr.opentelemetry_lib.tracing.instruments import Instruments
8
+ from lmnr.opentelemetry_lib.tracing import TracerWrapper
9
+
10
+ MAX_MANUAL_SPAN_PAYLOAD_SIZE = 1024 * 1024 # 1MB
11
+
12
+
13
+ class TracerManager:
14
+ __tracer_wrapper: TracerWrapper
15
+
16
+ @staticmethod
17
+ def init(
18
+ app_name: Optional[str] = sys.argv[0],
19
+ disable_batch=False,
20
+ exporter: Optional[SpanExporter] = None,
21
+ resource_attributes: dict = {},
22
+ instruments: Optional[Set[Instruments]] = None,
23
+ base_url: str = "https://api.lmnr.ai",
24
+ port: int = 8443,
25
+ http_port: int = 443,
26
+ project_api_key: Optional[str] = None,
27
+ max_export_batch_size: Optional[int] = None,
28
+ force_http: bool = False,
29
+ timeout_seconds: int = 30,
30
+ ) -> None:
31
+ enable_content_tracing = True
32
+
33
+ # Tracer init
34
+ resource_attributes.update({SERVICE_NAME: app_name})
35
+ TracerWrapper.set_static_params(resource_attributes, enable_content_tracing)
36
+ TracerManager.__tracer_wrapper = TracerWrapper(
37
+ disable_batch=disable_batch,
38
+ exporter=exporter,
39
+ instruments=instruments,
40
+ base_url=base_url,
41
+ port=port,
42
+ http_port=http_port,
43
+ project_api_key=project_api_key,
44
+ max_export_batch_size=max_export_batch_size,
45
+ force_http=force_http,
46
+ timeout_seconds=timeout_seconds,
47
+ )
48
+
49
+ @staticmethod
50
+ def flush() -> bool:
51
+ return TracerManager.__tracer_wrapper.flush()
52
+
53
+ @staticmethod
54
+ def shutdown():
55
+ TracerManager.__tracer_wrapper.shutdown()
@@ -1,5 +1,5 @@
1
- import json
2
1
  from functools import wraps
2
+ import json
3
3
  import logging
4
4
  import pydantic
5
5
  import types
@@ -10,11 +10,11 @@ from opentelemetry import context as context_api
10
10
  from opentelemetry.trace import Span
11
11
 
12
12
  from lmnr.sdk.utils import get_input_from_func_args, is_method
13
- from lmnr.openllmetry_sdk.tracing import get_tracer
14
- from lmnr.openllmetry_sdk.tracing.attributes import SPAN_INPUT, SPAN_OUTPUT, SPAN_TYPE
15
- from lmnr.openllmetry_sdk.tracing.tracing import TracerWrapper
16
- from lmnr.openllmetry_sdk.utils.json_encoder import JSONEncoder
17
- from lmnr.openllmetry_sdk.config import MAX_MANUAL_SPAN_PAYLOAD_SIZE
13
+ from lmnr.opentelemetry_lib import MAX_MANUAL_SPAN_PAYLOAD_SIZE
14
+ from lmnr.opentelemetry_lib.tracing.tracer import get_tracer
15
+ from lmnr.opentelemetry_lib.tracing.attributes import SPAN_INPUT, SPAN_OUTPUT, SPAN_TYPE
16
+ from lmnr.opentelemetry_lib.tracing import TracerWrapper
17
+ from lmnr.opentelemetry_lib.utils.json_encoder import JSONEncoder
18
18
 
19
19
 
20
20
  class CustomJSONEncoder(JSONEncoder):
@@ -86,7 +86,16 @@ def entity_method(
86
86
 
87
87
  # span will be ended in the generator
88
88
  if isinstance(res, types.GeneratorType):
89
- return _handle_generator(span, res)
89
+ return _handle_generator(span, ctx_token, res)
90
+ if isinstance(res, types.AsyncGeneratorType):
91
+ # async def foo() -> AsyncGenerator[int, None]:
92
+ # is not considered async in a classical sense in Python,
93
+ # so we handle this inside the sync wrapper.
94
+ # In particular, CO_COROUTINE is different from CO_ASYNC_GENERATOR.
95
+ # Flags are listed from LSB here:
96
+ # https://docs.python.org/3/library/inspect.html#inspect-module-co-flags
97
+ # See also: https://groups.google.com/g/python-tulip/c/6rWweGXLutU?pli=1
98
+ return _ahandle_generator(span, ctx_token, res)
90
99
 
91
100
  try:
92
101
  if not ignore_output:
@@ -161,6 +170,8 @@ def aentity_method(
161
170
 
162
171
  # span will be ended in the generator
163
172
  if isinstance(res, types.AsyncGeneratorType):
173
+ # probably unreachable, read the comment in the similar
174
+ # part of the sync wrapper.
164
175
  return await _ahandle_generator(span, ctx_token, res)
165
176
 
166
177
  try:
@@ -185,24 +196,22 @@ def aentity_method(
185
196
  return decorate
186
197
 
187
198
 
188
- def _handle_generator(span, res):
189
- # for some reason the SPAN_KEY is not being set in the context of the generator, so we re-set it
190
- context_api.attach(trace.set_span_in_context(span))
199
+ def _handle_generator(span, ctx_token, res):
191
200
  yield from res
192
201
 
193
202
  span.end()
194
-
195
- # Note: we don't detach the context here as this fails in some situations
196
- # https://github.com/open-telemetry/opentelemetry-python/issues/2606
197
- # This is not a problem since the context will be detached automatically during garbage collection
203
+ if ctx_token is not None:
204
+ context_api.detach(ctx_token)
198
205
 
199
206
 
200
207
  async def _ahandle_generator(span, ctx_token, res):
208
+ # async with contextlib.aclosing(res) as closing_gen:
201
209
  async for part in res:
202
210
  yield part
203
211
 
204
212
  span.end()
205
- context_api.detach(ctx_token)
213
+ if ctx_token is not None:
214
+ context_api.detach(ctx_token)
206
215
 
207
216
 
208
217
  def _process_exception(span: Span, e: Exception):
@@ -34,7 +34,7 @@ def dont_throw(func):
34
34
  return func(*args, **kwargs)
35
35
  except Exception as e:
36
36
  logger.debug(
37
- "OpenLLMetry failed to trace in %s, error: %s",
37
+ "Laminar failed to trace in %s, error: %s",
38
38
  func.__name__,
39
39
  traceback.format_exc(),
40
40
  )
@@ -0,0 +1,139 @@
1
+ import atexit
2
+ import logging
3
+
4
+ from lmnr.opentelemetry_lib.tracing.processor import LaminarSpanProcessor
5
+ from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
6
+ from lmnr.sdk.client.synchronous.sync_client import LaminarClient
7
+ from lmnr.sdk.log import VerboseColorfulFormatter
8
+ from lmnr.opentelemetry_lib.tracing.instruments import (
9
+ Instruments,
10
+ init_instrumentations,
11
+ )
12
+
13
+ from opentelemetry import trace
14
+ from opentelemetry.sdk.resources import Resource
15
+ from opentelemetry.sdk.trace import TracerProvider, SpanProcessor
16
+ from opentelemetry.sdk.trace.export import SpanExporter
17
+
18
+ from typing import Optional, Set
19
+
20
+
21
+ module_logger = logging.getLogger(__name__)
22
+ console_log_handler = logging.StreamHandler()
23
+ console_log_handler.setFormatter(VerboseColorfulFormatter())
24
+ module_logger.addHandler(console_log_handler)
25
+
26
+
27
+ TRACER_NAME = "lmnr.tracer"
28
+
29
+ MAX_EVENTS_OR_ATTRIBUTES_PER_SPAN = 5000
30
+
31
+
32
+ class TracerWrapper(object):
33
+ resource_attributes: dict = {}
34
+ enable_content_tracing: bool = True
35
+ __tracer_provider: Optional[TracerProvider] = None
36
+ __logger: logging.Logger
37
+ __client: LaminarClient
38
+ __async_client: AsyncLaminarClient
39
+ __resource: Resource
40
+ __span_processor: SpanProcessor
41
+
42
+ def __new__(
43
+ cls,
44
+ disable_batch=False,
45
+ exporter: Optional[SpanExporter] = None,
46
+ instruments: Optional[Set[Instruments]] = None,
47
+ block_instruments: Optional[Set[Instruments]] = None,
48
+ base_url: str = "https://api.lmnr.ai",
49
+ port: int = 8443,
50
+ http_port: int = 443,
51
+ project_api_key: Optional[str] = None,
52
+ max_export_batch_size: Optional[int] = None,
53
+ force_http: bool = False,
54
+ timeout_seconds: int = 10,
55
+ ) -> "TracerWrapper":
56
+ base_http_url = f"{base_url}:{http_port}"
57
+ cls._initialize_logger(cls)
58
+ if not hasattr(cls, "instance"):
59
+ obj = cls.instance = super(TracerWrapper, cls).__new__(cls)
60
+
61
+ obj.__client = LaminarClient(
62
+ base_url=base_http_url,
63
+ project_api_key=project_api_key,
64
+ )
65
+ obj.__async_client = AsyncLaminarClient(
66
+ base_url=base_http_url,
67
+ project_api_key=project_api_key,
68
+ )
69
+
70
+ obj.__resource = Resource(attributes=TracerWrapper.resource_attributes)
71
+ obj.__tracer_provider = TracerProvider(resource=obj.__resource)
72
+
73
+ obj.__span_processor = LaminarSpanProcessor(
74
+ base_url=base_url,
75
+ api_key=project_api_key,
76
+ port=http_port if force_http else port,
77
+ exporter=exporter,
78
+ max_export_batch_size=max_export_batch_size,
79
+ timeout_seconds=timeout_seconds,
80
+ force_http=force_http,
81
+ disable_batch=disable_batch,
82
+ )
83
+
84
+ obj.__tracer_provider.add_span_processor(obj.__span_processor)
85
+
86
+ init_instrumentations(
87
+ tracer_provider=obj.__tracer_provider,
88
+ instruments=instruments,
89
+ block_instruments=block_instruments,
90
+ client=obj.__client,
91
+ async_client=obj.__async_client,
92
+ )
93
+
94
+ # Force flushes for debug environments (e.g. local development)
95
+ atexit.register(obj.exit_handler)
96
+
97
+ return cls.instance
98
+
99
+ def exit_handler(self):
100
+ if isinstance(self.__span_processor, LaminarSpanProcessor):
101
+ self.__span_processor.clear()
102
+ self.flush()
103
+
104
+ def _initialize_logger(self):
105
+ self.__logger = logging.getLogger(__name__)
106
+ console_log_handler = logging.StreamHandler()
107
+ console_log_handler.setFormatter(VerboseColorfulFormatter())
108
+ self.__logger.addHandler(console_log_handler)
109
+
110
+ @staticmethod
111
+ def set_static_params(
112
+ resource_attributes: dict,
113
+ enable_content_tracing: bool,
114
+ ) -> None:
115
+ TracerWrapper.resource_attributes = resource_attributes
116
+ TracerWrapper.enable_content_tracing = enable_content_tracing
117
+
118
+ @classmethod
119
+ def verify_initialized(cls) -> bool:
120
+ return hasattr(cls, "instance")
121
+
122
+ @classmethod
123
+ def clear(cls):
124
+ # Any state cleanup. Now used in between tests
125
+ if isinstance(cls.instance.__span_processor, LaminarSpanProcessor):
126
+ cls.instance.__span_processor.clear()
127
+
128
+ def shutdown(self):
129
+ if self.__tracer_provider is None:
130
+ return
131
+ self.__tracer_provider.shutdown()
132
+
133
+ def flush(self):
134
+ return self.__span_processor.force_flush()
135
+
136
+ def get_tracer(self):
137
+ if self.__tracer_provider is None:
138
+ return trace.get_tracer_provider().get_tracer(TRACER_NAME)
139
+ return self.__tracer_provider.get_tracer(TRACER_NAME)