lmnr 0.4.53.dev0__py3-none-any.whl → 0.7.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lmnr/__init__.py +32 -11
- lmnr/cli/__init__.py +270 -0
- lmnr/cli/datasets.py +371 -0
- lmnr/cli/evals.py +111 -0
- lmnr/cli/rules.py +42 -0
- lmnr/opentelemetry_lib/__init__.py +70 -0
- lmnr/opentelemetry_lib/decorators/__init__.py +337 -0
- lmnr/opentelemetry_lib/litellm/__init__.py +685 -0
- lmnr/opentelemetry_lib/litellm/utils.py +100 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/__init__.py +849 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/config.py +13 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_emitter.py +211 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/span_utils.py +401 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/streaming.py +425 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/utils.py +332 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/claude_agent/__init__.py +451 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/claude_agent/proxy.py +144 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_agent/__init__.py +100 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_computer/__init__.py +476 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_computer/utils.py +12 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/__init__.py +599 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/config.py +9 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/schema_utils.py +26 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/utils.py +330 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py +488 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py +8 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_emitter.py +143 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/span_utils.py +229 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py +92 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/kernel/__init__.py +381 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/kernel/utils.py +36 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/__init__.py +121 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/utils.py +60 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/__init__.py +61 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/__init__.py +472 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +1185 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +305 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/config.py +16 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +312 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_emitter.py +100 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +68 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py +197 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v0/__init__.py +176 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/__init__.py +368 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +325 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +135 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +786 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openhands_ai/__init__.py +388 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/opentelemetry/__init__.py +69 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/skyvern/__init__.py +191 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/threading/__init__.py +197 -0
- lmnr/opentelemetry_lib/tracing/__init__.py +263 -0
- lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +516 -0
- lmnr/{openllmetry_sdk → opentelemetry_lib}/tracing/attributes.py +21 -8
- lmnr/opentelemetry_lib/tracing/context.py +200 -0
- lmnr/opentelemetry_lib/tracing/exporter.py +153 -0
- lmnr/opentelemetry_lib/tracing/instruments.py +140 -0
- lmnr/opentelemetry_lib/tracing/processor.py +193 -0
- lmnr/opentelemetry_lib/tracing/span.py +398 -0
- lmnr/opentelemetry_lib/tracing/tracer.py +57 -0
- lmnr/opentelemetry_lib/tracing/utils.py +62 -0
- lmnr/opentelemetry_lib/utils/package_check.py +18 -0
- lmnr/opentelemetry_lib/utils/wrappers.py +11 -0
- lmnr/sdk/browser/__init__.py +0 -0
- lmnr/sdk/browser/background_send_events.py +158 -0
- lmnr/sdk/browser/browser_use_cdp_otel.py +100 -0
- lmnr/sdk/browser/browser_use_otel.py +142 -0
- lmnr/sdk/browser/bubus_otel.py +71 -0
- lmnr/sdk/browser/cdp_utils.py +518 -0
- lmnr/sdk/browser/inject_script.js +514 -0
- lmnr/sdk/browser/patchright_otel.py +151 -0
- lmnr/sdk/browser/playwright_otel.py +322 -0
- lmnr/sdk/browser/pw_utils.py +363 -0
- lmnr/sdk/browser/recorder/record.umd.min.cjs +84 -0
- lmnr/sdk/browser/utils.py +70 -0
- lmnr/sdk/client/asynchronous/async_client.py +180 -0
- lmnr/sdk/client/asynchronous/resources/__init__.py +6 -0
- lmnr/sdk/client/asynchronous/resources/base.py +32 -0
- lmnr/sdk/client/asynchronous/resources/browser_events.py +41 -0
- lmnr/sdk/client/asynchronous/resources/datasets.py +131 -0
- lmnr/sdk/client/asynchronous/resources/evals.py +266 -0
- lmnr/sdk/client/asynchronous/resources/evaluators.py +85 -0
- lmnr/sdk/client/asynchronous/resources/tags.py +83 -0
- lmnr/sdk/client/synchronous/resources/__init__.py +6 -0
- lmnr/sdk/client/synchronous/resources/base.py +32 -0
- lmnr/sdk/client/synchronous/resources/browser_events.py +40 -0
- lmnr/sdk/client/synchronous/resources/datasets.py +131 -0
- lmnr/sdk/client/synchronous/resources/evals.py +263 -0
- lmnr/sdk/client/synchronous/resources/evaluators.py +85 -0
- lmnr/sdk/client/synchronous/resources/tags.py +83 -0
- lmnr/sdk/client/synchronous/sync_client.py +191 -0
- lmnr/sdk/datasets/__init__.py +94 -0
- lmnr/sdk/datasets/file_utils.py +91 -0
- lmnr/sdk/decorators.py +163 -26
- lmnr/sdk/eval_control.py +3 -2
- lmnr/sdk/evaluations.py +403 -191
- lmnr/sdk/laminar.py +1080 -549
- lmnr/sdk/log.py +7 -2
- lmnr/sdk/types.py +246 -134
- lmnr/sdk/utils.py +151 -7
- lmnr/version.py +46 -0
- {lmnr-0.4.53.dev0.dist-info → lmnr-0.7.26.dist-info}/METADATA +152 -106
- lmnr-0.7.26.dist-info/RECORD +116 -0
- lmnr-0.7.26.dist-info/WHEEL +4 -0
- lmnr-0.7.26.dist-info/entry_points.txt +3 -0
- lmnr/cli.py +0 -101
- lmnr/openllmetry_sdk/.python-version +0 -1
- lmnr/openllmetry_sdk/__init__.py +0 -72
- lmnr/openllmetry_sdk/config/__init__.py +0 -9
- lmnr/openllmetry_sdk/decorators/base.py +0 -185
- lmnr/openllmetry_sdk/instruments.py +0 -38
- lmnr/openllmetry_sdk/tracing/__init__.py +0 -1
- lmnr/openllmetry_sdk/tracing/content_allow_list.py +0 -24
- lmnr/openllmetry_sdk/tracing/context_manager.py +0 -13
- lmnr/openllmetry_sdk/tracing/tracing.py +0 -884
- lmnr/openllmetry_sdk/utils/in_memory_span_exporter.py +0 -61
- lmnr/openllmetry_sdk/utils/package_check.py +0 -7
- lmnr/openllmetry_sdk/version.py +0 -1
- lmnr/sdk/datasets.py +0 -55
- lmnr-0.4.53.dev0.dist-info/LICENSE +0 -75
- lmnr-0.4.53.dev0.dist-info/RECORD +0 -33
- lmnr-0.4.53.dev0.dist-info/WHEEL +0 -4
- lmnr-0.4.53.dev0.dist-info/entry_points.txt +0 -3
- /lmnr/{openllmetry_sdk → opentelemetry_lib}/.flake8 +0 -0
- /lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/__init__.py +0 -0
- /lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/json_encoder.py +0 -0
- /lmnr/{openllmetry_sdk/decorators/__init__.py → py.typed} +0 -0
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
|
|
6
|
+
from lmnr.sdk.client.synchronous.sync_client import LaminarClient
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def with_tracer_wrapper(func):
|
|
12
|
+
"""Helper for providing tracer for wrapper functions."""
|
|
13
|
+
|
|
14
|
+
def _with_tracer(tracer, to_wrap):
|
|
15
|
+
def wrapper(wrapped, instance, args, kwargs):
|
|
16
|
+
return func(tracer, to_wrap, wrapped, instance, args, kwargs)
|
|
17
|
+
|
|
18
|
+
return wrapper
|
|
19
|
+
|
|
20
|
+
return _with_tracer
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def with_tracer_and_client_wrapper(func):
|
|
24
|
+
"""Helper for providing tracer and client for wrapper functions."""
|
|
25
|
+
|
|
26
|
+
def _with_tracer_and_client(
|
|
27
|
+
tracer, client: LaminarClient | AsyncLaminarClient, to_wrap
|
|
28
|
+
):
|
|
29
|
+
def wrapper(wrapped, instance, args, kwargs):
|
|
30
|
+
return func(tracer, client, to_wrap, wrapped, instance, args, kwargs)
|
|
31
|
+
|
|
32
|
+
return wrapper
|
|
33
|
+
|
|
34
|
+
return _with_tracer_and_client
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def retry_sync(func, retries=5, delay=0.5, error_message="Operation failed"):
|
|
38
|
+
"""Utility function for retry logic in synchronous operations"""
|
|
39
|
+
for attempt in range(retries):
|
|
40
|
+
try:
|
|
41
|
+
result = func()
|
|
42
|
+
if result: # If function returns truthy value, consider it successful
|
|
43
|
+
return result
|
|
44
|
+
if attempt == retries - 1: # Last attempt
|
|
45
|
+
logger.debug(f"{error_message} after all retries")
|
|
46
|
+
return None
|
|
47
|
+
except Exception as e:
|
|
48
|
+
if attempt == retries - 1: # Last attempt
|
|
49
|
+
logger.error(f"{error_message}: {e}")
|
|
50
|
+
return None
|
|
51
|
+
time.sleep(delay)
|
|
52
|
+
return None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
async def retry_async(func, retries=5, delay=0.5, error_message="Operation failed"):
|
|
56
|
+
"""Utility function for retry logic in asynchronous operations"""
|
|
57
|
+
for attempt in range(retries):
|
|
58
|
+
try:
|
|
59
|
+
result = await func()
|
|
60
|
+
if result: # If function returns truthy value, consider it successful
|
|
61
|
+
return result
|
|
62
|
+
if attempt == retries - 1: # Last attempt
|
|
63
|
+
logger.error(f"{error_message} after all retries")
|
|
64
|
+
return None
|
|
65
|
+
except Exception as e:
|
|
66
|
+
if attempt == retries - 1: # Last attempt
|
|
67
|
+
logger.error(f"{error_message}: {e}")
|
|
68
|
+
return None
|
|
69
|
+
await asyncio.sleep(delay)
|
|
70
|
+
return None
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Laminar HTTP client. Used to send data to/from the Laminar API.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
import re
|
|
7
|
+
from typing import TypeVar
|
|
8
|
+
from types import TracebackType
|
|
9
|
+
|
|
10
|
+
from lmnr.sdk.client.asynchronous.resources import (
|
|
11
|
+
AsyncBrowserEvents,
|
|
12
|
+
AsyncEvals,
|
|
13
|
+
AsyncTags,
|
|
14
|
+
AsyncEvaluators,
|
|
15
|
+
)
|
|
16
|
+
from lmnr.sdk.client.asynchronous.resources.datasets import AsyncDatasets
|
|
17
|
+
from lmnr.sdk.utils import from_env
|
|
18
|
+
|
|
19
|
+
_T = TypeVar("_T", bound="AsyncLaminarClient")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AsyncLaminarClient:
|
|
23
|
+
__base_url: str
|
|
24
|
+
__project_api_key: str
|
|
25
|
+
__client: httpx.AsyncClient = None
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
base_url: str | None = None,
|
|
30
|
+
project_api_key: str | None = None,
|
|
31
|
+
port: int | None = None,
|
|
32
|
+
timeout: int = 3600,
|
|
33
|
+
):
|
|
34
|
+
"""Initializer for the Laminar HTTP client.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
base_url (str | None): base URL of the Laminar API. If not
|
|
38
|
+
provided, the LMNR_BASE_URL environment variable is used or we
|
|
39
|
+
default to "https://api.lmnr.ai".
|
|
40
|
+
project_api_key (str | None): Laminar project API key. If not
|
|
41
|
+
provided, the LMNR_PROJECT_API_KEY environment variable is used.
|
|
42
|
+
port (int | None, optional): port of the Laminar API HTTP server.\
|
|
43
|
+
Overrides any port in the base URL.
|
|
44
|
+
Defaults to None. If none is provided, the default port (443) will
|
|
45
|
+
be used.
|
|
46
|
+
timeout (int, optional): global timeout seconds for the HTTP client.\
|
|
47
|
+
Applied to all httpx operations, i.e. connect, read, get_from_pool, etc.
|
|
48
|
+
Defaults to 3600.
|
|
49
|
+
"""
|
|
50
|
+
# If port is already in the base URL, use it as is
|
|
51
|
+
base_url = base_url or from_env("LMNR_BASE_URL") or "https://api.lmnr.ai"
|
|
52
|
+
if match := re.search(r":(\d{1,5})$", base_url):
|
|
53
|
+
base_url = base_url[: -len(match.group(0))]
|
|
54
|
+
if port is None:
|
|
55
|
+
port = int(match.group(1))
|
|
56
|
+
|
|
57
|
+
base_url = base_url.rstrip("/")
|
|
58
|
+
self.__base_url = f"{base_url}:{port or 443}"
|
|
59
|
+
self.__project_api_key = project_api_key or from_env("LMNR_PROJECT_API_KEY")
|
|
60
|
+
if not self.__project_api_key:
|
|
61
|
+
raise ValueError(
|
|
62
|
+
"Project API key is not set. Please set the LMNR_PROJECT_API_KEY environment "
|
|
63
|
+
"variable or pass project_api_key to the initializer."
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
self.__client = httpx.AsyncClient(
|
|
67
|
+
headers=self._headers(),
|
|
68
|
+
timeout=timeout,
|
|
69
|
+
# Context: If the server responds with a 413, the connection becomes
|
|
70
|
+
# poisoned and freezes on subsequent requests, and there is no way
|
|
71
|
+
# to recover or recycle such connection.
|
|
72
|
+
# Setting max_keepalive_connections to 0 will resolve this, but is
|
|
73
|
+
# less efficient, as it will create a new connection
|
|
74
|
+
# (not client, so still better) for each request.
|
|
75
|
+
#
|
|
76
|
+
# Note: from my experiments with a simple python server, forcing the
|
|
77
|
+
# server to read/consume the request payload from the socket seems
|
|
78
|
+
# to resolve this, but I haven't figured out how to do that in our
|
|
79
|
+
# real actix-web backend server and whether it makes sense to do so.
|
|
80
|
+
#
|
|
81
|
+
# TODO: investigate if there are better ways to fix this rather than
|
|
82
|
+
# setting keepalive_expiry to 0. Other alternative: migrate to
|
|
83
|
+
# requests + aiohttp.
|
|
84
|
+
#
|
|
85
|
+
# limits=httpx.Limits(
|
|
86
|
+
# max_keepalive_connections=0,
|
|
87
|
+
# keepalive_expiry=0,
|
|
88
|
+
# ),
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# Initialize resource objects
|
|
92
|
+
self.__evals = AsyncEvals(
|
|
93
|
+
self.__client, self.__base_url, self.__project_api_key
|
|
94
|
+
)
|
|
95
|
+
self.__evaluators = AsyncEvaluators(
|
|
96
|
+
self.__client, self.__base_url, self.__project_api_key
|
|
97
|
+
)
|
|
98
|
+
self.__browser_events = AsyncBrowserEvents(
|
|
99
|
+
self.__client, self.__base_url, self.__project_api_key
|
|
100
|
+
)
|
|
101
|
+
self.__tags = AsyncTags(self.__client, self.__base_url, self.__project_api_key)
|
|
102
|
+
self.__datasets = AsyncDatasets(
|
|
103
|
+
self.__client, self.__base_url, self.__project_api_key
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
def evals(self) -> AsyncEvals:
|
|
108
|
+
"""Get the Evals resource.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
AsyncEvals: The Evals resource instance.
|
|
112
|
+
"""
|
|
113
|
+
return self.__evals
|
|
114
|
+
|
|
115
|
+
@property
|
|
116
|
+
def _browser_events(self) -> AsyncBrowserEvents:
|
|
117
|
+
"""Get the BrowserEvents resource.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
AsyncBrowserEvents: The BrowserEvents resource instance.
|
|
121
|
+
"""
|
|
122
|
+
return self.__browser_events
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def tags(self) -> AsyncTags:
|
|
126
|
+
"""Get the Tags resource.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
AsyncTags: The Tags resource instance.
|
|
130
|
+
"""
|
|
131
|
+
return self.__tags
|
|
132
|
+
|
|
133
|
+
@property
|
|
134
|
+
def evaluators(self) -> AsyncEvaluators:
|
|
135
|
+
"""Get the Evaluators resource.
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
AsyncEvaluators: The Evaluators resource instance.
|
|
139
|
+
"""
|
|
140
|
+
return self.__evaluators
|
|
141
|
+
|
|
142
|
+
@property
|
|
143
|
+
def datasets(self) -> AsyncDatasets:
|
|
144
|
+
"""Get the Datasets resource.
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
AsyncDatasets: The Datasets resource instance.
|
|
148
|
+
"""
|
|
149
|
+
return self.__datasets
|
|
150
|
+
|
|
151
|
+
@property
|
|
152
|
+
def is_closed(self) -> bool:
|
|
153
|
+
return self.__client.is_closed
|
|
154
|
+
|
|
155
|
+
async def close(self) -> None:
|
|
156
|
+
"""Close the underlying HTTPX client.
|
|
157
|
+
|
|
158
|
+
The client will *not* be usable after this.
|
|
159
|
+
"""
|
|
160
|
+
if hasattr(self, "__client"):
|
|
161
|
+
await self.__client.aclose()
|
|
162
|
+
|
|
163
|
+
async def __aenter__(self: _T) -> _T:
|
|
164
|
+
return self
|
|
165
|
+
|
|
166
|
+
async def __aexit__(
|
|
167
|
+
self,
|
|
168
|
+
exc_type: type[BaseException] | None,
|
|
169
|
+
exc: BaseException | None,
|
|
170
|
+
exc_tb: TracebackType | None,
|
|
171
|
+
) -> None:
|
|
172
|
+
await self.close()
|
|
173
|
+
|
|
174
|
+
def _headers(self) -> dict[str, str]:
|
|
175
|
+
assert self.__project_api_key is not None, "Project API key is not set"
|
|
176
|
+
return {
|
|
177
|
+
"Authorization": "Bearer " + self.__project_api_key,
|
|
178
|
+
"Content-Type": "application/json",
|
|
179
|
+
"Accept": "application/json",
|
|
180
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
from lmnr.sdk.client.asynchronous.resources.browser_events import AsyncBrowserEvents
|
|
2
|
+
from lmnr.sdk.client.asynchronous.resources.evals import AsyncEvals
|
|
3
|
+
from lmnr.sdk.client.asynchronous.resources.tags import AsyncTags
|
|
4
|
+
from lmnr.sdk.client.asynchronous.resources.evaluators import AsyncEvaluators
|
|
5
|
+
|
|
6
|
+
__all__ = ["AsyncEvals", "AsyncBrowserEvents", "AsyncTags", "AsyncEvaluators"]
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"""Base class for resource objects."""
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BaseAsyncResource:
|
|
7
|
+
"""Base class for all API resources."""
|
|
8
|
+
|
|
9
|
+
def __init__(self, client: httpx.AsyncClient, base_url: str, project_api_key: str):
|
|
10
|
+
"""Initialize the resource.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
client (httpx.AsyncClient): HTTP client instance
|
|
14
|
+
base_url (str): Base URL for the API
|
|
15
|
+
project_api_key (str): Project API key
|
|
16
|
+
"""
|
|
17
|
+
self._client = client
|
|
18
|
+
self._base_url = base_url
|
|
19
|
+
self._project_api_key = project_api_key
|
|
20
|
+
|
|
21
|
+
def _headers(self) -> dict[str, str]:
|
|
22
|
+
"""Generate request headers with authentication.
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
dict[str, str]: Headers dictionary
|
|
26
|
+
"""
|
|
27
|
+
assert self._project_api_key is not None, "Project API key is not set"
|
|
28
|
+
return {
|
|
29
|
+
"Authorization": "Bearer " + self._project_api_key,
|
|
30
|
+
"Content-Type": "application/json",
|
|
31
|
+
"Accept": "application/json",
|
|
32
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""Resource for sending browser events."""
|
|
2
|
+
|
|
3
|
+
import gzip
|
|
4
|
+
import json
|
|
5
|
+
|
|
6
|
+
from lmnr.sdk.client.asynchronous.resources.base import BaseAsyncResource
|
|
7
|
+
|
|
8
|
+
from lmnr.version import PYTHON_VERSION, __version__
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AsyncBrowserEvents(BaseAsyncResource):
|
|
12
|
+
"""Resource for sending browser events."""
|
|
13
|
+
|
|
14
|
+
async def send(
|
|
15
|
+
self,
|
|
16
|
+
session_id: str,
|
|
17
|
+
trace_id: str,
|
|
18
|
+
events: list[dict],
|
|
19
|
+
):
|
|
20
|
+
url = self._base_url + "/v1/browser-sessions/events"
|
|
21
|
+
payload = {
|
|
22
|
+
"sessionId": session_id,
|
|
23
|
+
"traceId": trace_id,
|
|
24
|
+
"events": events,
|
|
25
|
+
"source": f"python@{PYTHON_VERSION}",
|
|
26
|
+
"sdkVersion": __version__,
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
compressed_payload = gzip.compress(json.dumps(payload).encode("utf-8"))
|
|
30
|
+
response = await self._client.post(
|
|
31
|
+
url,
|
|
32
|
+
content=compressed_payload,
|
|
33
|
+
headers={
|
|
34
|
+
**self._headers(),
|
|
35
|
+
"Content-Encoding": "gzip",
|
|
36
|
+
},
|
|
37
|
+
)
|
|
38
|
+
if response.status_code != 200:
|
|
39
|
+
raise ValueError(
|
|
40
|
+
f"Failed to send events: [{response.status_code}] {response.text}"
|
|
41
|
+
)
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""Datasets resource for interacting with Laminar datasets API."""
|
|
2
|
+
|
|
3
|
+
import math
|
|
4
|
+
import uuid
|
|
5
|
+
|
|
6
|
+
from lmnr.sdk.client.asynchronous.resources.base import BaseAsyncResource
|
|
7
|
+
from lmnr.sdk.log import get_default_logger
|
|
8
|
+
from lmnr.sdk.types import (
|
|
9
|
+
Datapoint,
|
|
10
|
+
Dataset,
|
|
11
|
+
GetDatapointsResponse,
|
|
12
|
+
PushDatapointsResponse,
|
|
13
|
+
)
|
|
14
|
+
from lmnr.sdk.utils import serialize
|
|
15
|
+
|
|
16
|
+
logger = get_default_logger(__name__)
|
|
17
|
+
|
|
18
|
+
DEFAULT_DATASET_PULL_LIMIT = 100
|
|
19
|
+
DEFAULT_DATASET_PUSH_BATCH_SIZE = 100
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AsyncDatasets(BaseAsyncResource):
|
|
23
|
+
"""Resource for interacting with Laminar datasets API."""
|
|
24
|
+
|
|
25
|
+
async def list_datasets(self) -> list[Dataset]:
|
|
26
|
+
"""List all datasets."""
|
|
27
|
+
response = await self._client.get(
|
|
28
|
+
f"{self._base_url}/v1/datasets",
|
|
29
|
+
headers=self._headers(),
|
|
30
|
+
)
|
|
31
|
+
if response.status_code != 200:
|
|
32
|
+
raise ValueError(
|
|
33
|
+
f"Error listing datasets: [{response.status_code}] {response.text}"
|
|
34
|
+
)
|
|
35
|
+
return [Dataset.model_validate(dataset) for dataset in response.json()]
|
|
36
|
+
|
|
37
|
+
async def get_dataset_by_name(self, name: str) -> list[Dataset]:
|
|
38
|
+
"""Get a dataset by name."""
|
|
39
|
+
response = await self._client.get(
|
|
40
|
+
f"{self._base_url}/v1/datasets",
|
|
41
|
+
params={"name": name},
|
|
42
|
+
headers=self._headers(),
|
|
43
|
+
)
|
|
44
|
+
if response.status_code != 200:
|
|
45
|
+
raise ValueError(
|
|
46
|
+
f"Error getting dataset: [{response.status_code}] {response.text}"
|
|
47
|
+
)
|
|
48
|
+
return [Dataset.model_validate(dataset) for dataset in response.json()]
|
|
49
|
+
|
|
50
|
+
async def push(
|
|
51
|
+
self,
|
|
52
|
+
points: list[Datapoint],
|
|
53
|
+
name: str | None = None,
|
|
54
|
+
id: uuid.UUID | None = None,
|
|
55
|
+
batch_size: int = DEFAULT_DATASET_PUSH_BATCH_SIZE,
|
|
56
|
+
create_dataset: bool = False,
|
|
57
|
+
) -> PushDatapointsResponse | None:
|
|
58
|
+
"""Push data to a dataset."""
|
|
59
|
+
|
|
60
|
+
if name is None and id is None:
|
|
61
|
+
raise ValueError("Either name or id must be provided")
|
|
62
|
+
|
|
63
|
+
if name is not None and id is not None:
|
|
64
|
+
raise ValueError("Only one of name or id must be provided")
|
|
65
|
+
|
|
66
|
+
if create_dataset and name is None:
|
|
67
|
+
raise ValueError("Name must be provided when creating a new dataset")
|
|
68
|
+
|
|
69
|
+
identifier = {"name": name} if name is not None else {"datasetId": id}
|
|
70
|
+
|
|
71
|
+
batch_num = 0
|
|
72
|
+
total_batches = math.ceil(len(points) / batch_size)
|
|
73
|
+
response = None
|
|
74
|
+
for i in range(0, len(points), batch_size):
|
|
75
|
+
batch_num += 1
|
|
76
|
+
logger.debug(f"Pushing batch {batch_num} of {total_batches}")
|
|
77
|
+
batch = points[i : i + batch_size]
|
|
78
|
+
response = await self._client.post(
|
|
79
|
+
f"{self._base_url}/v1/datasets/datapoints",
|
|
80
|
+
json={
|
|
81
|
+
**identifier,
|
|
82
|
+
"datapoints": [serialize(point) for point in batch],
|
|
83
|
+
"createDataset": create_dataset,
|
|
84
|
+
},
|
|
85
|
+
headers=self._headers(),
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
# 201 when creating a new dataset
|
|
89
|
+
if response.status_code not in [200, 201]:
|
|
90
|
+
raise ValueError(
|
|
91
|
+
f"Error pushing data to dataset: [{response.status_code}] {response.text}"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
response = PushDatapointsResponse.model_validate(response.json())
|
|
95
|
+
# Currently, the response only contains the dataset ID,
|
|
96
|
+
# so it's safe to return the last response only.
|
|
97
|
+
return response
|
|
98
|
+
|
|
99
|
+
async def pull(
|
|
100
|
+
self,
|
|
101
|
+
name: str | None = None,
|
|
102
|
+
id: uuid.UUID | None = None,
|
|
103
|
+
# TODO: move const to one file, import in CLI
|
|
104
|
+
limit: int = DEFAULT_DATASET_PULL_LIMIT,
|
|
105
|
+
offset: int = 0,
|
|
106
|
+
) -> GetDatapointsResponse:
|
|
107
|
+
"""Pull data from a dataset."""
|
|
108
|
+
|
|
109
|
+
if name is None and id is None:
|
|
110
|
+
raise ValueError("Either name or id must be provided")
|
|
111
|
+
|
|
112
|
+
if name is not None and id is not None:
|
|
113
|
+
raise ValueError("Only one of name or id must be provided")
|
|
114
|
+
|
|
115
|
+
identifier = {"name": name} if name is not None else {"datasetId": id}
|
|
116
|
+
|
|
117
|
+
params = {
|
|
118
|
+
**identifier,
|
|
119
|
+
"offset": offset,
|
|
120
|
+
"limit": limit,
|
|
121
|
+
}
|
|
122
|
+
response = await self._client.get(
|
|
123
|
+
f"{self._base_url}/v1/datasets/datapoints",
|
|
124
|
+
params=params,
|
|
125
|
+
headers=self._headers(),
|
|
126
|
+
)
|
|
127
|
+
if response.status_code != 200:
|
|
128
|
+
raise ValueError(
|
|
129
|
+
f"Error pulling data from dataset: [{response.status_code}] {response.text}"
|
|
130
|
+
)
|
|
131
|
+
return GetDatapointsResponse.model_validate(response.json())
|