lmnr 0.4.66__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. lmnr/__init__.py +30 -0
  2. lmnr/openllmetry_sdk/__init__.py +4 -16
  3. lmnr/openllmetry_sdk/tracing/attributes.py +0 -1
  4. lmnr/openllmetry_sdk/tracing/tracing.py +30 -10
  5. lmnr/sdk/browser/browser_use_otel.py +4 -4
  6. lmnr/sdk/browser/playwright_otel.py +299 -228
  7. lmnr/sdk/browser/pw_utils.py +289 -0
  8. lmnr/sdk/browser/utils.py +18 -53
  9. lmnr/sdk/client/asynchronous/async_client.py +157 -0
  10. lmnr/sdk/client/asynchronous/resources/__init__.py +13 -0
  11. lmnr/sdk/client/asynchronous/resources/agent.py +220 -0
  12. lmnr/sdk/client/asynchronous/resources/base.py +32 -0
  13. lmnr/sdk/client/asynchronous/resources/browser_events.py +40 -0
  14. lmnr/sdk/client/asynchronous/resources/evals.py +64 -0
  15. lmnr/sdk/client/asynchronous/resources/pipeline.py +89 -0
  16. lmnr/sdk/client/asynchronous/resources/semantic_search.py +60 -0
  17. lmnr/sdk/client/synchronous/resources/__init__.py +7 -0
  18. lmnr/sdk/client/synchronous/resources/agent.py +215 -0
  19. lmnr/sdk/client/synchronous/resources/base.py +32 -0
  20. lmnr/sdk/client/synchronous/resources/browser_events.py +40 -0
  21. lmnr/sdk/client/synchronous/resources/evals.py +102 -0
  22. lmnr/sdk/client/synchronous/resources/pipeline.py +89 -0
  23. lmnr/sdk/client/synchronous/resources/semantic_search.py +60 -0
  24. lmnr/sdk/client/synchronous/sync_client.py +170 -0
  25. lmnr/sdk/datasets.py +7 -2
  26. lmnr/sdk/evaluations.py +59 -35
  27. lmnr/sdk/laminar.py +34 -174
  28. lmnr/sdk/types.py +124 -23
  29. lmnr/sdk/utils.py +10 -0
  30. lmnr/version.py +6 -6
  31. {lmnr-0.4.66.dist-info → lmnr-0.5.1.dist-info}/METADATA +88 -38
  32. lmnr-0.5.1.dist-info/RECORD +55 -0
  33. {lmnr-0.4.66.dist-info → lmnr-0.5.1.dist-info}/WHEEL +1 -1
  34. lmnr/sdk/client.py +0 -313
  35. lmnr-0.4.66.dist-info/RECORD +0 -39
  36. {lmnr-0.4.66.dist-info → lmnr-0.5.1.dist-info}/LICENSE +0 -0
  37. {lmnr-0.4.66.dist-info → lmnr-0.5.1.dist-info}/entry_points.txt +0 -0
lmnr/sdk/laminar.py CHANGED
@@ -1,13 +1,12 @@
1
1
  from contextlib import contextmanager
2
2
  from contextvars import Context
3
- from lmnr.openllmetry_sdk import Traceloop
3
+ from lmnr.openllmetry_sdk import TracerManager
4
4
  from lmnr.openllmetry_sdk.instruments import Instruments
5
5
  from lmnr.openllmetry_sdk.tracing import get_tracer
6
6
  from lmnr.openllmetry_sdk.tracing.attributes import (
7
7
  ASSOCIATION_PROPERTIES,
8
8
  Attributes,
9
9
  SPAN_TYPE,
10
- OVERRIDE_PARENT_SPAN,
11
10
  )
12
11
  from lmnr.openllmetry_sdk.config import MAX_MANUAL_SPAN_PAYLOAD_SIZE
13
12
  from lmnr.openllmetry_sdk.decorators.base import json_dumps
@@ -20,19 +19,14 @@ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
20
19
  from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
21
20
  from opentelemetry.util.types import AttributeValue
22
21
 
23
- from typing import Any, Awaitable, Literal, Optional, Set, Union
22
+ from typing import Any, Literal, Optional, Set, Union
24
23
 
25
- import atexit
26
24
  import copy
27
25
  import datetime
28
- import dotenv
29
- import json
30
26
  import logging
31
27
  import os
32
- import random
33
28
  import re
34
29
  import uuid
35
- import warnings
36
30
 
37
31
  from lmnr.openllmetry_sdk.tracing.attributes import (
38
32
  SESSION_ID,
@@ -46,15 +40,12 @@ from lmnr.openllmetry_sdk.tracing.tracing import (
46
40
  set_association_properties,
47
41
  update_association_properties,
48
42
  )
49
- from lmnr.sdk.client import LaminarClient
43
+ from lmnr.sdk.utils import from_env
50
44
 
51
45
  from .log import VerboseColorfulFormatter
52
46
 
53
47
  from .types import (
54
48
  LaminarSpanContext,
55
- PipelineRunResponse,
56
- NodeInput,
57
- SemanticSearchResponse,
58
49
  TraceType,
59
50
  TracingLevel,
60
51
  )
@@ -64,14 +55,12 @@ class Laminar:
64
55
  __base_http_url: str
65
56
  __base_grpc_url: str
66
57
  __project_api_key: Optional[str] = None
67
- __env: dict[str, str] = {}
68
58
  __initialized: bool = False
69
59
 
70
60
  @classmethod
71
61
  def initialize(
72
62
  cls,
73
63
  project_api_key: Optional[str] = None,
74
- env: dict[str, str] = {},
75
64
  base_url: Optional[str] = None,
76
65
  http_port: Optional[int] = None,
77
66
  grpc_port: Optional[int] = None,
@@ -92,10 +81,6 @@ class Laminar:
92
81
  LMNR_PROJECT_API_KEY environment variable\
93
82
  in os.environ or in .env file.
94
83
  Defaults to None.
95
- env (dict[str, str], optional): Default environment passed to\
96
- `run` requests, unless overriden at request time.\
97
- Usually, model provider keys are stored here.
98
- Defaults to {}.
99
84
  base_url (Optional[str], optional): Laminar API url. Do NOT include\
100
85
  the port number, use `http_port` and `grpc_port`.\
101
86
  If not specified, defaults to https://api.lmnr.ai.
@@ -119,38 +104,31 @@ class Laminar:
119
104
  Raises:
120
105
  ValueError: If project API key is not set
121
106
  """
122
- cls.__project_api_key = project_api_key or os.environ.get(
123
- "LMNR_PROJECT_API_KEY"
124
- )
125
- if not cls.__project_api_key:
126
- dotenv_path = dotenv.find_dotenv(usecwd=True)
127
- cls.__project_api_key = dotenv.get_key(
128
- dotenv_path=dotenv_path, key_to_get="LMNR_PROJECT_API_KEY"
129
- )
107
+ cls.__project_api_key = project_api_key or from_env("LMNR_PROJECT_API_KEY")
130
108
  if not cls.__project_api_key:
131
109
  raise ValueError(
132
110
  "Please initialize the Laminar object with"
133
111
  " your project API key or set the LMNR_PROJECT_API_KEY"
134
112
  " environment variable in your environment or .env file"
135
113
  )
136
- url = re.sub(r"/$", "", base_url or "https://api.lmnr.ai")
137
- if re.search(r":\d{1,5}$", url):
138
- raise ValueError(
139
- "Please provide the `base_url` without the port number. "
140
- "Use the `http_port` and `grpc_port` arguments instead."
141
- )
114
+
115
+ cls._initialize_logger()
116
+
117
+ url = base_url or from_env("LMNR_BASE_URL") or "https://api.lmnr.ai"
118
+ url = url.rstrip("/")
119
+ if match := re.search(r":(\d{1,5})$", url):
120
+ url = url[: -len(match.group(0))]
121
+ if http_port is None:
122
+ cls.__logger.info(f"Using HTTP port from base URL: {match.group(1)}")
123
+ http_port = int(match.group(1))
124
+ else:
125
+ cls.__logger.info(f"Using HTTP port passed as an argument: {http_port}")
126
+
142
127
  cls.__base_http_url = f"{url}:{http_port or 443}"
143
128
  cls.__base_grpc_url = f"{url}:{grpc_port or 8443}"
144
129
 
145
- cls.__env = env
146
130
  cls.__initialized = True
147
- cls._initialize_logger()
148
- LaminarClient.initialize(
149
- base_url=cls.__base_http_url,
150
- project_api_key=cls.__project_api_key,
151
- )
152
- atexit.register(LaminarClient.shutdown)
153
- if not os.environ.get("OTEL_ATTRIBUTE_COUNT_LIMIT"):
131
+ if not os.getenv("OTEL_ATTRIBUTE_COUNT_LIMIT"):
154
132
  # each message is at least 2 attributes: role and content,
155
133
  # but the default attribute limit is 128, so raise it
156
134
  os.environ["OTEL_ATTRIBUTE_COUNT_LIMIT"] = "10000"
@@ -163,7 +141,7 @@ class Laminar:
163
141
  # "`pip install --upgrade lmnr`."
164
142
  # )
165
143
 
166
- Traceloop.init(
144
+ TracerManager.init(
167
145
  base_http_url=cls.__base_http_url,
168
146
  project_api_key=cls.__project_api_key,
169
147
  exporter=OTLPSpanExporter(
@@ -195,83 +173,6 @@ class Laminar:
195
173
  console_log_handler.setFormatter(VerboseColorfulFormatter())
196
174
  cls.__logger.addHandler(console_log_handler)
197
175
 
198
- @classmethod
199
- def run(
200
- cls,
201
- pipeline: str,
202
- inputs: dict[str, NodeInput],
203
- env: dict[str, str] = {},
204
- metadata: dict[str, str] = {},
205
- parent_span_id: Optional[uuid.UUID] = None,
206
- trace_id: Optional[uuid.UUID] = None,
207
- ) -> Union[PipelineRunResponse, Awaitable[PipelineRunResponse]]:
208
- """Runs the pipeline with the given inputs. If called from an async
209
- function, must be awaited.
210
-
211
- Args:
212
- pipeline (str): name of the Laminar pipeline.\
213
- The pipeline must have a target version set.
214
- inputs (dict[str, NodeInput]):
215
- inputs to the endpoint's target pipeline.\
216
- Keys in the dictionary must match input node names
217
- env (dict[str, str], optional):
218
- Environment variables for the pipeline execution.
219
- Defaults to {}.
220
- metadata (dict[str, str], optional):
221
- any custom metadata to be stored with execution trace.
222
- Defaults to {}.
223
- parent_span_id (Optional[uuid.UUID], optional): parent span id for\
224
- the resulting span.
225
- Defaults to None.
226
- trace_id (Optional[uuid.UUID], optional): trace id for the\
227
- resulting trace.
228
- Defaults to None.
229
-
230
- Returns:
231
- PipelineRunResponse: response object containing the outputs
232
-
233
- Raises:
234
- ValueError: if project API key is not set
235
- PipelineRunError: if the endpoint run fails
236
- """
237
- return LaminarClient.run_pipeline(
238
- pipeline=pipeline,
239
- inputs=inputs,
240
- env=env or cls.__env,
241
- metadata=metadata,
242
- parent_span_id=parent_span_id,
243
- trace_id=trace_id,
244
- )
245
-
246
- @classmethod
247
- def semantic_search(
248
- cls,
249
- query: str,
250
- dataset_id: uuid.UUID,
251
- limit: Optional[int] = None,
252
- threshold: Optional[float] = None,
253
- ) -> SemanticSearchResponse:
254
- """Perform a semantic search on a dataset. If called from an async
255
- function, must be awaited.
256
-
257
- Args:
258
- query (str): query string to search by
259
- dataset_id (uuid.UUID): id of the dataset to search in
260
- limit (Optional[int], optional): maximum number of results to\
261
- return. Defaults to None.
262
- threshold (Optional[float], optional): minimum score for a result\
263
- to be returned. Defaults to None.
264
-
265
- Returns:
266
- SemanticSearchResponse: response object containing the search results sorted by score in descending order
267
- """
268
- return LaminarClient.semantic_search(
269
- query=query,
270
- dataset_id=dataset_id,
271
- limit=limit,
272
- threshold=threshold,
273
- )
274
-
275
176
  @classmethod
276
177
  def event(
277
178
  cls,
@@ -326,8 +227,6 @@ class Laminar:
326
227
  context: Optional[Context] = None,
327
228
  labels: Optional[list[str]] = None,
328
229
  parent_span_context: Optional[LaminarSpanContext] = None,
329
- # deprecated, use parent_span_context instead
330
- trace_id: Optional[uuid.UUID] = None,
331
230
  ):
332
231
  """Start a new span as the current span. Useful for manual
333
232
  instrumentation. If `span_type` is set to `"LLM"`, you should report
@@ -362,9 +261,6 @@ class Laminar:
362
261
  Defaults to None.
363
262
  labels (Optional[list[str]], optional): labels to set for the\
364
263
  span. Defaults to None.
365
- trace_id (Optional[uuid.UUID], optional): [Deprecated] override\
366
- the trace id for the span. If not provided, use the current\
367
- trace id. Defaults to None.
368
264
  """
369
265
 
370
266
  if not cls.is_initialized():
@@ -379,13 +275,6 @@ class Laminar:
379
275
 
380
276
  with get_tracer() as tracer:
381
277
  ctx = context or context_api.get_current()
382
- if trace_id is not None:
383
- warnings.warn(
384
- "trace_id provided to `Laminar.start_as_current_span`"
385
- " is deprecated, use parent_span_context instead",
386
- DeprecationWarning,
387
- stacklevel=2,
388
- )
389
278
  if parent_span_context is not None:
390
279
  span_context = LaminarSpanContext.try_to_otel_span_context(
391
280
  parent_span_context, cls.__logger
@@ -393,16 +282,6 @@ class Laminar:
393
282
  ctx = trace.set_span_in_context(
394
283
  trace.NonRecordingSpan(span_context), ctx
395
284
  )
396
- elif trace_id is not None and isinstance(trace_id, uuid.UUID):
397
- span_context = trace.SpanContext(
398
- trace_id=int(trace_id),
399
- span_id=random.getrandbits(64),
400
- is_remote=False,
401
- trace_flags=trace.TraceFlags(trace.TraceFlags.SAMPLED),
402
- )
403
- ctx = trace.set_span_in_context(
404
- trace.NonRecordingSpan(span_context), ctx
405
- )
406
285
  ctx_token = attach(ctx)
407
286
  label_props = {}
408
287
  try:
@@ -421,8 +300,6 @@ class Laminar:
421
300
  **(label_props),
422
301
  },
423
302
  ) as span:
424
- if trace_id is not None and isinstance(trace_id, uuid.UUID):
425
- span.set_attribute(OVERRIDE_PARENT_SPAN, True)
426
303
  if input is not None:
427
304
  serialized_input = json_dumps(input)
428
305
  if len(serialized_input) > MAX_MANUAL_SPAN_PAYLOAD_SIZE:
@@ -493,8 +370,6 @@ class Laminar:
493
370
  context: Optional[Context] = None,
494
371
  parent_span_context: Optional[LaminarSpanContext] = None,
495
372
  labels: Optional[dict[str, str]] = None,
496
- # deprecated, use parent_span_context instead
497
- trace_id: Optional[uuid.UUID] = None,
498
373
  ):
499
374
  """Start a new span. Useful for manual instrumentation.
500
375
  If `span_type` is set to `"LLM"`, you should report usage and response
@@ -548,9 +423,6 @@ class Laminar:
548
423
  Defaults to None.
549
424
  labels (Optional[dict[str, str]], optional): labels to set for the\
550
425
  span. Defaults to None.
551
- trace_id (Optional[uuid.UUID], optional): Deprecated, use\
552
- `parent_span_context` instead. If provided, it will be used to\
553
- set the trace id for the span.
554
426
  """
555
427
  if not cls.is_initialized():
556
428
  return trace.NonRecordingSpan(
@@ -563,13 +435,6 @@ class Laminar:
563
435
 
564
436
  with get_tracer() as tracer:
565
437
  ctx = context or context_api.get_current()
566
- if trace_id is not None:
567
- warnings.warn(
568
- "trace_id provided to `Laminar.start_span`"
569
- " is deprecated, use parent_span_context instead",
570
- DeprecationWarning,
571
- stacklevel=2,
572
- )
573
438
  if parent_span_context is not None:
574
439
  span_context = LaminarSpanContext.try_to_otel_span_context(
575
440
  parent_span_context, cls.__logger
@@ -577,16 +442,6 @@ class Laminar:
577
442
  ctx = trace.set_span_in_context(
578
443
  trace.NonRecordingSpan(span_context), ctx
579
444
  )
580
- elif trace_id is not None and isinstance(trace_id, uuid.UUID):
581
- span_context = trace.SpanContext(
582
- trace_id=int(trace_id),
583
- span_id=random.getrandbits(64),
584
- is_remote=False,
585
- trace_flags=trace.TraceFlags(trace.TraceFlags.SAMPLED),
586
- )
587
- ctx = trace.set_span_in_context(
588
- trace.NonRecordingSpan(span_context), ctx
589
- )
590
445
  label_props = {}
591
446
  try:
592
447
  if labels:
@@ -606,8 +461,6 @@ class Laminar:
606
461
  **(label_props),
607
462
  },
608
463
  )
609
- if trace_id is not None and isinstance(trace_id, uuid.UUID):
610
- span.set_attribute(OVERRIDE_PARENT_SPAN, True)
611
464
  if input is not None:
612
465
  serialized_input = json_dumps(input)
613
466
  if len(serialized_input) > MAX_MANUAL_SPAN_PAYLOAD_SIZE:
@@ -743,7 +596,7 @@ class Laminar:
743
596
  span_context = cls.get_laminar_span_context(span)
744
597
  if span_context is None:
745
598
  return None
746
- return span_context.to_dict()
599
+ return span_context.model_dump()
747
600
 
748
601
  @classmethod
749
602
  def serialize_span_context(cls, span: Optional[trace.Span] = None) -> Optional[str]:
@@ -777,7 +630,7 @@ class Laminar:
777
630
  span_context = cls.get_laminar_span_context(span)
778
631
  if span_context is None:
779
632
  return None
780
- return json.dumps(span_context.to_dict())
633
+ return str(span_context)
781
634
 
782
635
  @classmethod
783
636
  def deserialize_span_context(
@@ -786,14 +639,21 @@ class Laminar:
786
639
  return LaminarSpanContext.deserialize(span_context)
787
640
 
788
641
  @classmethod
789
- def shutdown(cls):
790
- Traceloop.flush()
791
- LaminarClient.shutdown()
642
+ def flush(cls) -> bool:
643
+ """Flush the internal tracer.
644
+
645
+ Returns:
646
+ bool: True if the tracer was flushed, False otherwise
647
+ (e.g. no tracer or timeout).
648
+ """
649
+ if not cls.is_initialized():
650
+ return False
651
+ return TracerManager.flush()
792
652
 
793
653
  @classmethod
794
- async def shutdown_async(cls):
795
- Traceloop.flush()
796
- await LaminarClient.shutdown_async()
654
+ def shutdown(cls):
655
+ # other shutdown logic could be added here
656
+ cls.flush()
797
657
 
798
658
  @classmethod
799
659
  def set_session(
lmnr/sdk/types.py CHANGED
@@ -1,11 +1,11 @@
1
1
  import logging
2
- import aiohttp
3
2
  import datetime
4
3
  from enum import Enum
4
+ import httpx
5
5
  import json
6
6
  from opentelemetry.trace import SpanContext, TraceFlags
7
7
  import pydantic
8
- from typing import Any, Awaitable, Callable, Optional, Union
8
+ from typing import Any, Awaitable, Callable, Literal, Optional, Union
9
9
  import uuid
10
10
 
11
11
  from .utils import serialize
@@ -91,11 +91,15 @@ class PipelineRunError(Exception):
91
91
  error_code: str
92
92
  error_message: str
93
93
 
94
- def __init__(self, response: aiohttp.ClientResponse):
94
+ def __init__(self, response: httpx.Response):
95
95
  try:
96
96
  resp_json = response.json()
97
- self.error_code = resp_json["error_code"]
98
- self.error_message = resp_json["error_message"]
97
+ try:
98
+ resp_dict = dict(resp_json)
99
+ except Exception:
100
+ resp_dict = {}
101
+ self.error_code = resp_dict.get("error_code")
102
+ self.error_message = resp_dict.get("error_message")
99
103
  super().__init__(self.error_message)
100
104
  except Exception:
101
105
  super().__init__(response.text)
@@ -225,7 +229,6 @@ class SpanType(Enum):
225
229
 
226
230
  class TraceType(Enum):
227
231
  DEFAULT = "DEFAULT"
228
- EVENT = "EVENT" # deprecated
229
232
  EVALUATION = "EVALUATION"
230
233
 
231
234
 
@@ -257,21 +260,8 @@ class LaminarSpanContext(pydantic.BaseModel):
257
260
  span_id: uuid.UUID
258
261
  is_remote: bool = pydantic.Field(default=False)
259
262
 
260
- # uuid is not serializable by default, so we need to convert it to a string
261
- def to_dict(self):
262
- return {
263
- "traceId": str(self.trace_id),
264
- "spanId": str(self.span_id),
265
- "isRemote": self.is_remote,
266
- }
267
-
268
- @classmethod
269
- def from_dict(cls, data: dict[str, Any]) -> "LaminarSpanContext":
270
- return cls(
271
- trace_id=uuid.UUID(data.get("traceId") or data.get("trace_id")),
272
- span_id=uuid.UUID(data.get("spanId") or data.get("span_id")),
273
- is_remote=data.get("isRemote") or data.get("is_remote") or False,
274
- )
263
+ def __str__(self) -> str:
264
+ return self.model_dump_json()
275
265
 
276
266
  @classmethod
277
267
  def try_to_otel_span_context(
@@ -316,8 +306,119 @@ class LaminarSpanContext(pydantic.BaseModel):
316
306
  @classmethod
317
307
  def deserialize(cls, data: Union[dict[str, Any], str]) -> "LaminarSpanContext":
318
308
  if isinstance(data, dict):
319
- return cls.from_dict(data)
309
+ # Convert camelCase to snake_case for known fields
310
+ converted_data = {
311
+ "trace_id": data.get("trace_id") or data.get("traceId"),
312
+ "span_id": data.get("span_id") or data.get("spanId"),
313
+ "is_remote": data.get("is_remote") or data.get("isRemote", False),
314
+ }
315
+ return cls.model_validate(converted_data)
320
316
  elif isinstance(data, str):
321
- return cls.from_dict(json.loads(data))
317
+ return cls.deserialize(json.loads(data))
322
318
  else:
323
319
  raise ValueError("Invalid span_context provided")
320
+
321
+
322
+ class ModelProvider(str, Enum):
323
+ ANTHROPIC = "anthropic"
324
+ BEDROCK = "bedrock"
325
+
326
+
327
+ # class AgentChatMessageContentTextBlock(pydantic.BaseModel):
328
+ # type: Literal["text"]
329
+ # text: str
330
+
331
+
332
+ # class AgentChatMessageImageUrlBlock(pydantic.BaseModel):
333
+ # type: Literal["image"]
334
+ # imageUrl: str
335
+
336
+
337
+ # class AgentChatMessageImageBase64Block(pydantic.BaseModel):
338
+ # type: Literal["image"]
339
+ # imageB64: str
340
+
341
+
342
+ # class AgentChatMessageImageBlock(pydantic.RootModel):
343
+ # root: Union[AgentChatMessageImageUrlBlock, AgentChatMessageImageBase64Block]
344
+
345
+
346
+ # class AgentChatMessageContentBlock(pydantic.RootModel):
347
+ # root: Union[AgentChatMessageContentTextBlock, AgentChatMessageImageBlock]
348
+
349
+
350
+ # class AgentChatMessageContent(pydantic.RootModel):
351
+ # root: Union[str, list[AgentChatMessageContentBlock]]
352
+
353
+
354
+ # class AgentChatMessage(pydantic.BaseModel):
355
+ # role: str
356
+ # content: AgentChatMessageContent
357
+ # name: Optional[str] = None
358
+ # toolCallId: Optional[str] = None
359
+ # isStateMessage: bool = False
360
+
361
+
362
+ # class AgentState(pydantic.BaseModel):
363
+ # messages: str = pydantic.Field(default="")
364
+ # messages: list[AgentChatMessage] = pydantic.Field(default_factory=list)
365
+ # browser_state: Optional[BrowserState] = None
366
+
367
+
368
+ class RunAgentRequest(pydantic.BaseModel):
369
+ prompt: str
370
+ state: Optional[str] = pydantic.Field(default=None)
371
+ parent_span_context: Optional[str] = pydantic.Field(default=None)
372
+ model_provider: Optional[ModelProvider] = pydantic.Field(default=None)
373
+ model: Optional[str] = pydantic.Field(default=None)
374
+ stream: bool = pydantic.Field(default=False)
375
+ enable_thinking: bool = pydantic.Field(default=True)
376
+ cdp_url: Optional[str] = pydantic.Field(default=None)
377
+ return_screenshots: bool = pydantic.Field(default=False)
378
+
379
+ def to_dict(self):
380
+ result = {
381
+ "prompt": self.prompt,
382
+ "stream": self.stream,
383
+ "enableThinking": self.enable_thinking,
384
+ "returnScreenshots": self.return_screenshots,
385
+ }
386
+ if self.state:
387
+ result["state"] = self.state
388
+ if self.parent_span_context:
389
+ result["parentSpanContext"] = self.parent_span_context
390
+ if self.model_provider:
391
+ result["modelProvider"] = self.model_provider.value
392
+ if self.model:
393
+ result["model"] = self.model
394
+ if self.cdp_url:
395
+ result["cdpUrl"] = self.cdp_url
396
+ return result
397
+
398
+
399
+ class ActionResult(pydantic.BaseModel):
400
+ isDone: bool = pydantic.Field(default=False)
401
+ content: Optional[str] = pydantic.Field(default=None)
402
+ error: Optional[str] = pydantic.Field(default=None)
403
+
404
+
405
+ class AgentOutput(pydantic.BaseModel):
406
+ result: ActionResult = pydantic.Field(default_factory=ActionResult)
407
+
408
+
409
+ class StepChunkContent(pydantic.BaseModel):
410
+ chunkType: Literal["step"]
411
+ messageId: uuid.UUID
412
+ actionResult: ActionResult
413
+ summary: str
414
+ screenshot: Optional[str] = pydantic.Field(default=None)
415
+
416
+
417
+ class FinalOutputChunkContent(pydantic.BaseModel):
418
+ chunkType: Literal["finalOutput"]
419
+ messageId: uuid.UUID
420
+ content: AgentOutput
421
+
422
+
423
+ class RunAgentResponseChunk(pydantic.RootModel):
424
+ root: Union[StepChunkContent, FinalOutputChunkContent]
lmnr/sdk/utils.py CHANGED
@@ -1,8 +1,10 @@
1
1
  import asyncio
2
2
  import datetime
3
3
  import dataclasses
4
+ import dotenv
4
5
  import enum
5
6
  import inspect
7
+ import os
6
8
  import pydantic
7
9
  import queue
8
10
  import typing
@@ -97,3 +99,11 @@ def get_input_from_func_args(
97
99
  if i < len(func_args):
98
100
  res[k] = func_args[i]
99
101
  return res
102
+
103
+
104
+ def from_env(key: str) -> typing.Optional[str]:
105
+ if val := os.getenv(key):
106
+ return val
107
+ dotenv_path = dotenv.find_dotenv(usecwd=True)
108
+ # use DotEnv directly so we can set verbose to False
109
+ return dotenv.main.DotEnv(dotenv_path, verbose=False, encoding="utf-8").get(key)
lmnr/version.py CHANGED
@@ -1,15 +1,15 @@
1
1
  import sys
2
- import requests
2
+ import httpx
3
3
  from packaging import version
4
4
 
5
5
 
6
- SDK_VERSION = "0.4.66"
6
+ __version__ = "0.5.1"
7
7
  PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}"
8
8
 
9
9
 
10
10
  def is_latest_version() -> bool:
11
11
  try:
12
- return version.parse(SDK_VERSION) >= version.parse(get_latest_pypi_version())
12
+ return version.parse(__version__) >= version.parse(get_latest_pypi_version())
13
13
  except Exception:
14
14
  return True
15
15
 
@@ -20,7 +20,7 @@ def get_latest_pypi_version() -> str:
20
20
  Returns the version string or raises an exception if unable to fetch.
21
21
  """
22
22
  try:
23
- response = requests.get("https://pypi.org/pypi/lmnr/json")
23
+ response = httpx.get("https://pypi.org/pypi/lmnr/json")
24
24
  response.raise_for_status()
25
25
 
26
26
  releases = response.json()["releases"]
@@ -35,7 +35,7 @@ def get_latest_pypi_version() -> str:
35
35
  if not stable_versions:
36
36
  # do not scare the user, assume they are on
37
37
  # latest version
38
- return SDK_VERSION
38
+ return __version__
39
39
 
40
40
  latest_version = max(stable_versions, key=version.parse)
41
41
  return latest_version
@@ -43,4 +43,4 @@ def get_latest_pypi_version() -> str:
43
43
  except Exception:
44
44
  # do not scare the user, assume they are on
45
45
  # latest version
46
- return SDK_VERSION
46
+ return __version__