lmnr 0.2.15__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. lmnr/__init__.py +4 -4
  2. lmnr/sdk/client.py +161 -0
  3. lmnr/sdk/collector.py +177 -0
  4. lmnr/sdk/constants.py +1 -0
  5. lmnr/sdk/context.py +456 -0
  6. lmnr/sdk/decorators.py +277 -0
  7. lmnr/sdk/interface.py +339 -0
  8. lmnr/sdk/providers/__init__.py +2 -0
  9. lmnr/sdk/providers/base.py +28 -0
  10. lmnr/sdk/providers/fallback.py +131 -0
  11. lmnr/sdk/providers/openai.py +140 -0
  12. lmnr/sdk/providers/utils.py +33 -0
  13. lmnr/sdk/tracing_types.py +197 -0
  14. lmnr/sdk/types.py +69 -0
  15. lmnr/sdk/utils.py +102 -0
  16. lmnr-0.3.0.dist-info/METADATA +185 -0
  17. lmnr-0.3.0.dist-info/RECORD +21 -0
  18. lmnr/cli/__init__.py +0 -0
  19. lmnr/cli/__main__.py +0 -4
  20. lmnr/cli/cli.py +0 -230
  21. lmnr/cli/parser/__init__.py +0 -0
  22. lmnr/cli/parser/nodes/__init__.py +0 -45
  23. lmnr/cli/parser/nodes/code.py +0 -36
  24. lmnr/cli/parser/nodes/condition.py +0 -30
  25. lmnr/cli/parser/nodes/input.py +0 -25
  26. lmnr/cli/parser/nodes/json_extractor.py +0 -29
  27. lmnr/cli/parser/nodes/llm.py +0 -56
  28. lmnr/cli/parser/nodes/output.py +0 -27
  29. lmnr/cli/parser/nodes/router.py +0 -37
  30. lmnr/cli/parser/nodes/semantic_search.py +0 -53
  31. lmnr/cli/parser/nodes/types.py +0 -153
  32. lmnr/cli/parser/parser.py +0 -62
  33. lmnr/cli/parser/utils.py +0 -49
  34. lmnr/cli/zip.py +0 -16
  35. lmnr/sdk/endpoint.py +0 -186
  36. lmnr/sdk/registry.py +0 -29
  37. lmnr/sdk/remote_debugger.py +0 -148
  38. lmnr/types.py +0 -101
  39. lmnr-0.2.15.dist-info/METADATA +0 -187
  40. lmnr-0.2.15.dist-info/RECORD +0 -28
  41. {lmnr-0.2.15.dist-info → lmnr-0.3.0.dist-info}/LICENSE +0 -0
  42. {lmnr-0.2.15.dist-info → lmnr-0.3.0.dist-info}/WHEEL +0 -0
  43. {lmnr-0.2.15.dist-info → lmnr-0.3.0.dist-info}/entry_points.txt +0 -0
lmnr/sdk/context.py ADDED
@@ -0,0 +1,456 @@
1
+ from .collector import ThreadManager
2
+ from .client import Laminar
3
+ from .providers import Provider
4
+ from .providers.fallback import FallbackProvider
5
+ from .tracing_types import EvaluateEvent, Event, Span, Trace
6
+ from .types import PipelineRunResponse
7
+ from .utils import PROVIDER_NAME_TO_OBJECT, is_async_iterator, is_iterator
8
+
9
+ from contextvars import ContextVar
10
+ from typing import Any, AsyncGenerator, Generator, Literal, Optional, Union
11
+ import atexit
12
+ import datetime
13
+ import dotenv
14
+ import inspect
15
+ import logging
16
+ import os
17
+ import pydantic
18
+ import uuid
19
+
20
+
21
+ _lmnr_stack_context: ContextVar[list[Union[Span, Trace]]] = ContextVar(
22
+ "lmnr_stack_context", default=[]
23
+ )
24
+ _root_trace_id_context: ContextVar[Optional[str]] = ContextVar(
25
+ "root_trace_id_context", default=None
26
+ )
27
+
28
+
29
+ class LaminarContextManager:
30
+ _log = logging.getLogger("laminar.context_manager")
31
+
32
+ def __init__(
33
+ self,
34
+ project_api_key: str = None,
35
+ threads: int = 1,
36
+ max_task_queue_size: int = 1000,
37
+ ):
38
+ self.project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
39
+ if not self.project_api_key:
40
+ dotenv_path = dotenv.find_dotenv(usecwd=True)
41
+ self.project_api_key = dotenv.get_key(
42
+ dotenv_path=dotenv_path, key_to_get="LMNR_PROJECT_API_KEY"
43
+ )
44
+ self.laminar = Laminar(project_api_key=self.project_api_key)
45
+ self.thread_manager = ThreadManager(
46
+ client=self.laminar,
47
+ max_task_queue_size=max_task_queue_size,
48
+ threads=threads,
49
+ )
50
+ # atexit executes functions last in first out, so we want to make sure
51
+ # that we finalize the trace before thread manager is closed, so the updated
52
+ # trace is sent to the server
53
+ atexit.register(self._force_finalize_trace)
54
+
55
+ def observe_start(
56
+ self,
57
+ # span attributes
58
+ name: str,
59
+ input: Optional[Any] = None,
60
+ metadata: Optional[dict[str, Any]] = None,
61
+ attributes: Optional[dict[str, Any]] = None,
62
+ span_type: Literal["DEFAULT", "LLM"] = "DEFAULT",
63
+ check_event_names: list[str] = None,
64
+ # trace attributes
65
+ user_id: Optional[str] = None,
66
+ session_id: Optional[str] = None,
67
+ release: Optional[str] = None,
68
+ ) -> Span:
69
+ trace_id = _root_trace_id_context.get()
70
+ if not trace_id:
71
+ session_id = session_id or str(uuid.uuid4())
72
+ trace_id = uuid.uuid4()
73
+ trace = self.update_trace(
74
+ id=trace_id,
75
+ user_id=user_id,
76
+ session_id=session_id,
77
+ release=release,
78
+ start_time=datetime.datetime.now(datetime.timezone.utc),
79
+ )
80
+ _root_trace_id_context.set(trace.id)
81
+ _lmnr_stack_context.set([trace])
82
+
83
+ parent = _lmnr_stack_context.get()[-1] if _lmnr_stack_context.get() else None
84
+ parent_span_id = parent.id if isinstance(parent, Span) else None
85
+ span = self.create_span(
86
+ name=name,
87
+ trace_id=trace_id,
88
+ input=input,
89
+ metadata=metadata,
90
+ attributes=attributes,
91
+ parent_span_id=parent_span_id,
92
+ span_type=span_type,
93
+ check_event_names=check_event_names,
94
+ )
95
+ stack = _lmnr_stack_context.get()
96
+ _lmnr_stack_context.set(stack + [span])
97
+ return span
98
+
99
+ def observe_end(
100
+ self,
101
+ span: Span,
102
+ provider_name: str = None,
103
+ result: Optional[Any] = None,
104
+ metadata: Optional[dict[str, Any]] = None,
105
+ attributes: Optional[dict[str, Any]] = None,
106
+ error: Optional[Exception] = None,
107
+ ) -> Any:
108
+ stack = _lmnr_stack_context.get()
109
+ if not stack:
110
+ return
111
+ provider = PROVIDER_NAME_TO_OBJECT.get(provider_name, FallbackProvider())
112
+ new_stack = stack[:-1]
113
+ _lmnr_stack_context.set(new_stack)
114
+
115
+ if len(new_stack) == 1 and isinstance(stack[0], Trace):
116
+ trace = stack[0]
117
+ self.update_trace(
118
+ id=trace.id,
119
+ start_time=trace.startTime,
120
+ end_time=datetime.datetime.now(datetime.timezone.utc),
121
+ user_id=trace.userId,
122
+ session_id=trace.sessionId,
123
+ release=trace.release,
124
+ metadata=metadata,
125
+ )
126
+ _root_trace_id_context.set(None)
127
+ _lmnr_stack_context.set([])
128
+
129
+ if error is not None:
130
+ self.update_current_trace(
131
+ success=False, end_time=datetime.datetime.now(datetime.timezone.utc)
132
+ )
133
+
134
+ if inspect.isgenerator(result) or is_iterator(result):
135
+ return self._collect_generator_result(
136
+ provider=provider,
137
+ generator=result,
138
+ span=span,
139
+ metadata=metadata,
140
+ attributes=attributes,
141
+ )
142
+ elif inspect.isasyncgen(result) or is_async_iterator(result):
143
+ return self._collect_async_generator_result(
144
+ provider=provider,
145
+ generator=result,
146
+ span=span,
147
+ metadata=metadata,
148
+ attributes=attributes,
149
+ )
150
+ if span.spanType == "LLM" and error is None:
151
+ attributes = self._extract_llm_attributes_from_response(
152
+ provider=provider, response=result
153
+ )
154
+ return self._finalize_span(
155
+ span,
156
+ provider=provider,
157
+ result=error or result,
158
+ metadata=metadata,
159
+ attributes=attributes,
160
+ )
161
+
162
+ def update_current_span(
163
+ self,
164
+ metadata: Optional[dict[str, Any]] = None,
165
+ check_event_names: list[str] = None,
166
+ override: bool = False,
167
+ ):
168
+ stack = _lmnr_stack_context.get()
169
+ if not stack:
170
+ return
171
+ span = stack[-1]
172
+ new_metadata = (
173
+ metadata if override else {**(span.metadata or {}), **(metadata or {})}
174
+ )
175
+ new_check_event_names = (
176
+ check_event_names
177
+ if override
178
+ else span.evaluateEvents + (check_event_names or [])
179
+ )
180
+ self.update_span(
181
+ span=span,
182
+ metadata=new_metadata,
183
+ evaluate_events=new_check_event_names,
184
+ )
185
+
186
+ def update_current_trace(
187
+ self,
188
+ user_id: Optional[str] = None,
189
+ session_id: Optional[str] = None,
190
+ release: Optional[str] = None,
191
+ metadata: Optional[dict[str, Any]] = None,
192
+ success: bool = True,
193
+ end_time: Optional[datetime.datetime] = None,
194
+ ):
195
+ existing_trace = (
196
+ _lmnr_stack_context.get()[0] if _lmnr_stack_context.get() else None
197
+ )
198
+ if not existing_trace:
199
+ return
200
+ self.update_trace(
201
+ id=existing_trace.id,
202
+ start_time=existing_trace.startTime,
203
+ end_time=end_time,
204
+ user_id=user_id or existing_trace.userId,
205
+ session_id=session_id or existing_trace.sessionId,
206
+ release=release or existing_trace.release,
207
+ metadata=metadata or existing_trace.metadata,
208
+ success=success if success is not None else existing_trace.success,
209
+ )
210
+
211
+ def update_trace(
212
+ self,
213
+ id: uuid.UUID,
214
+ start_time: Optional[datetime.datetime] = None,
215
+ end_time: Optional[datetime.datetime] = None,
216
+ user_id: Optional[str] = None,
217
+ session_id: Optional[str] = None,
218
+ release: Optional[str] = None,
219
+ metadata: Optional[dict[str, Any]] = None,
220
+ success: bool = True,
221
+ ) -> Trace:
222
+ trace = Trace(
223
+ start_time=start_time,
224
+ end_time=end_time,
225
+ id=id,
226
+ user_id=user_id,
227
+ session_id=session_id,
228
+ release=release,
229
+ metadata=metadata,
230
+ success=success,
231
+ )
232
+ self._add_observation(trace)
233
+ return trace
234
+
235
+ def create_span(
236
+ self,
237
+ name: str,
238
+ trace_id: uuid.UUID,
239
+ start_time: Optional[datetime.datetime] = None,
240
+ span_type: Literal["DEFAULT", "LLM"] = "DEFAULT",
241
+ id: Optional[uuid.UUID] = None,
242
+ parent_span_id: Optional[uuid.UUID] = None,
243
+ input: Optional[Any] = None,
244
+ metadata: Optional[dict[str, Any]] = None,
245
+ attributes: Optional[dict[str, Any]] = None,
246
+ check_event_names: list[str] = None,
247
+ ) -> Span:
248
+ span = Span(
249
+ name=name,
250
+ trace_id=trace_id,
251
+ start_time=start_time or datetime.datetime.now(datetime.timezone.utc),
252
+ id=id,
253
+ parent_span_id=parent_span_id,
254
+ input=input,
255
+ metadata=metadata,
256
+ attributes=attributes,
257
+ span_type=span_type,
258
+ evaluate_events=check_event_names or [],
259
+ )
260
+ return span
261
+
262
+ def update_span(
263
+ self,
264
+ span: Span,
265
+ finalize: bool = False,
266
+ end_time: Optional[datetime.datetime] = None,
267
+ output: Optional[Any] = None,
268
+ metadata: Optional[dict[str, Any]] = None,
269
+ attributes: Optional[dict[str, Any]] = None,
270
+ evaluate_events: Optional[list[EvaluateEvent]] = None,
271
+ ) -> Span:
272
+ span.update(
273
+ end_time=end_time,
274
+ output=output,
275
+ metadata=metadata,
276
+ attributes=attributes,
277
+ evaluate_events=evaluate_events,
278
+ )
279
+ if finalize:
280
+ self._add_observation(span)
281
+ return span
282
+
283
+ def event(
284
+ self,
285
+ name: str,
286
+ value: Optional[Union[str, int]] = None,
287
+ timestamp: Optional[datetime.datetime] = None,
288
+ ):
289
+ span = _lmnr_stack_context.get()[-1] if _lmnr_stack_context.get() else None
290
+ if not span or not isinstance(span, Span):
291
+ self._log.warning(f"No active span to send event. Ignoring event. {name}")
292
+ return
293
+ event = Event(
294
+ name=name,
295
+ span_id=span.id,
296
+ timestamp=timestamp,
297
+ value=value,
298
+ )
299
+ span.add_event(event)
300
+
301
+ def evaluate_event(self, name: str, data: str):
302
+ stack = _lmnr_stack_context.get()
303
+ if not stack or not isinstance(stack[-1], Span):
304
+ self._log.warning(
305
+ f"No active span to add check event. Ignoring event. {name}"
306
+ )
307
+ return
308
+ stack[-1].evaluateEvents.append(EvaluateEvent(name=name, data=data))
309
+
310
+ def run_pipeline(
311
+ self,
312
+ pipeline: str,
313
+ inputs: dict[str, Any],
314
+ env: dict[str, str] = {},
315
+ metadata: dict[str, str] = {},
316
+ ) -> PipelineRunResponse:
317
+ span = _lmnr_stack_context.get()[-1] if _lmnr_stack_context.get() else None
318
+ span_id = span.id if isinstance(span, Span) else None
319
+ trace = _lmnr_stack_context.get()[0] if _lmnr_stack_context.get() else None
320
+ trace_id = trace.id if isinstance(trace, Trace) else None
321
+ return self.laminar.run(
322
+ pipeline=pipeline,
323
+ inputs=inputs,
324
+ env=env,
325
+ metadata=metadata,
326
+ parent_span_id=span_id,
327
+ trace_id=trace_id,
328
+ )
329
+
330
+ def _force_finalize_trace(self):
331
+ self.update_current_trace(end_time=datetime.datetime.now(datetime.timezone.utc))
332
+
333
+ def _add_observation(self, observation: Union[Span, Trace]) -> bool:
334
+ return self.thread_manager.add_task(observation)
335
+
336
+ def _extract_llm_attributes_from_response(
337
+ self,
338
+ provider: Provider,
339
+ response: Union[str, dict[str, Any], pydantic.BaseModel],
340
+ ) -> dict[str, Any]:
341
+ return provider.extract_llm_attributes_from_response(response)
342
+
343
+ def _stream_list_to_dict(
344
+ self, provider: Provider, response: list[Any]
345
+ ) -> dict[str, Any]:
346
+ return provider.stream_list_to_dict(response)
347
+
348
+ def _extract_llm_output(
349
+ self,
350
+ provider: Provider,
351
+ result: Union[dict[str, Any], pydantic.BaseModel],
352
+ ) -> str:
353
+ return provider.extract_llm_output(result)
354
+
355
+ def _finalize_span(
356
+ self,
357
+ span: Span,
358
+ provider: Provider = None,
359
+ result: Optional[Any] = None,
360
+ metadata: Optional[dict[str, Any]] = None,
361
+ attributes: Optional[dict[str, Any]] = None,
362
+ ) -> Any:
363
+ self.update_span(
364
+ span=span,
365
+ finalize=True,
366
+ output=(
367
+ result
368
+ if span.spanType != "LLM"
369
+ else self._extract_llm_output(provider, result)
370
+ ),
371
+ metadata=metadata,
372
+ attributes=attributes,
373
+ )
374
+ return result
375
+
376
+ def _collect_generator_result(
377
+ self,
378
+ generator: Generator,
379
+ span: Span,
380
+ provider: Provider = None,
381
+ metadata: Optional[dict[str, Any]] = None,
382
+ attributes: Optional[dict[str, Any]] = None,
383
+ ) -> Generator:
384
+ items = []
385
+ try:
386
+ for item in generator:
387
+ items.append(item)
388
+ yield item
389
+
390
+ finally:
391
+ output = items
392
+ if all(isinstance(item, str) for item in items):
393
+ output = "".join(items)
394
+ if span.spanType == "LLM":
395
+ collected = self._stream_list_to_dict(
396
+ provider=provider, response=output
397
+ )
398
+ attributes = self._extract_llm_attributes_from_response(
399
+ provider=provider, response=collected
400
+ )
401
+ self._finalize_span(
402
+ span=span,
403
+ provider=provider,
404
+ result=collected,
405
+ metadata=metadata,
406
+ attributes=attributes,
407
+ )
408
+
409
+ async def _collect_async_generator_result(
410
+ self,
411
+ generator: AsyncGenerator,
412
+ span: Span,
413
+ provider: Provider = None,
414
+ metadata: Optional[dict[str, Any]] = None,
415
+ attributes: Optional[dict[str, Any]] = None,
416
+ ) -> AsyncGenerator:
417
+ items = []
418
+ try:
419
+ async for item in generator:
420
+ items.append(item)
421
+ yield item
422
+
423
+ finally:
424
+ output = items
425
+ if all(isinstance(item, str) for item in items):
426
+ output = "".join(items)
427
+ if span.spanType == "LLM":
428
+ collected = self._stream_list_to_dict(
429
+ provider=provider, response=output
430
+ )
431
+ attributes = self._extract_llm_attributes_from_response(
432
+ provider=provider, response=collected
433
+ )
434
+ self._finalize_span(
435
+ span=span,
436
+ provider=provider,
437
+ result=collected,
438
+ metadata=metadata,
439
+ attributes=attributes,
440
+ )
441
+
442
+
443
+ # TODO: add lock for thread safety
444
+ class LaminarSingleton:
445
+ _instance = None
446
+ _l: Optional[LaminarContextManager] = None
447
+
448
+ def __new__(cls):
449
+ if not cls._instance:
450
+ cls._instance = super(LaminarSingleton, cls).__new__(cls)
451
+ return cls._instance
452
+
453
+ def get(cls, *args, **kwargs) -> LaminarContextManager:
454
+ if not cls._l:
455
+ cls._l = LaminarContextManager(*args, **kwargs)
456
+ return cls._l