lmnr 0.3.6__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lmnr/sdk/context.py DELETED
@@ -1,464 +0,0 @@
1
- from .collector import ThreadManager
2
- from .client import Laminar
3
- from .providers import Provider
4
- from .providers.fallback import FallbackProvider
5
- from .tracing_types import EvaluateEvent, Event, Span, Trace
6
- from .types import PipelineRunResponse
7
- from .utils import PROVIDER_NAME_TO_OBJECT, is_async_iterator, is_iterator
8
-
9
- from contextvars import ContextVar
10
- from typing import Any, AsyncGenerator, Generator, Literal, Optional, Union
11
- import atexit
12
- import datetime
13
- import dotenv
14
- import inspect
15
- import logging
16
- import os
17
- import pydantic
18
- import uuid
19
-
20
-
21
- _lmnr_stack_context: ContextVar[list[Union[Span, Trace]]] = ContextVar(
22
- "lmnr_stack_context", default=[]
23
- )
24
- _root_trace_id_context: ContextVar[Optional[str]] = ContextVar(
25
- "root_trace_id_context", default=None
26
- )
27
-
28
-
29
- class LaminarContextManager:
30
- _log = logging.getLogger("laminar.context_manager")
31
-
32
- def __init__(
33
- self,
34
- project_api_key: str = None,
35
- threads: int = 1,
36
- max_task_queue_size: int = 1000,
37
- ):
38
- self.project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
39
- if not self.project_api_key:
40
- dotenv_path = dotenv.find_dotenv(usecwd=True)
41
- self.project_api_key = dotenv.get_key(
42
- dotenv_path=dotenv_path, key_to_get="LMNR_PROJECT_API_KEY"
43
- )
44
- self.laminar = Laminar(project_api_key=self.project_api_key)
45
- self.thread_manager = ThreadManager(
46
- client=self.laminar,
47
- max_task_queue_size=max_task_queue_size,
48
- threads=threads,
49
- )
50
- # atexit executes functions last in first out, so we want to make sure
51
- # that we finalize the trace before thread manager is closed, so the updated
52
- # trace is sent to the server
53
- atexit.register(self._force_finalize_trace)
54
-
55
- def observe_start(
56
- self,
57
- # span attributes
58
- name: str,
59
- input: Optional[Any] = None,
60
- metadata: Optional[dict[str, Any]] = None,
61
- attributes: Optional[dict[str, Any]] = None,
62
- span_type: Literal["DEFAULT", "LLM"] = "DEFAULT",
63
- check_event_names: list[str] = None,
64
- # trace attributes
65
- user_id: Optional[str] = None,
66
- session_id: Optional[str] = None,
67
- release: Optional[str] = None,
68
- ) -> Span:
69
- trace_id = _root_trace_id_context.get()
70
- if not trace_id:
71
- session_id = session_id or str(uuid.uuid4())
72
- trace_id = uuid.uuid4()
73
- trace = self.update_trace(
74
- id=trace_id,
75
- user_id=user_id,
76
- session_id=session_id,
77
- release=release,
78
- )
79
- _root_trace_id_context.set(trace.id)
80
- _lmnr_stack_context.set([trace])
81
-
82
- parent = _lmnr_stack_context.get()[-1] if _lmnr_stack_context.get() else None
83
- parent_span_id = parent.id if isinstance(parent, Span) else None
84
- span = self.create_span(
85
- name=name,
86
- trace_id=trace_id,
87
- input=input,
88
- metadata=metadata,
89
- attributes=attributes,
90
- parent_span_id=parent_span_id,
91
- span_type=span_type,
92
- check_event_names=check_event_names,
93
- )
94
- stack = _lmnr_stack_context.get()
95
- _lmnr_stack_context.set(stack + [span])
96
- return span
97
-
98
- def observe_end(
99
- self,
100
- span: Span,
101
- provider_name: str = None,
102
- result: Optional[Any] = None,
103
- metadata: Optional[dict[str, Any]] = None,
104
- attributes: Optional[dict[str, Any]] = None,
105
- error: Optional[Exception] = None,
106
- ) -> Any:
107
- stack = _lmnr_stack_context.get()
108
- if not stack:
109
- return
110
- provider = PROVIDER_NAME_TO_OBJECT.get(provider_name, FallbackProvider())
111
- new_stack = stack[:-1]
112
- _lmnr_stack_context.set(new_stack)
113
-
114
- if len(new_stack) == 1 and isinstance(stack[0], Trace):
115
- trace = stack[0]
116
- self.update_trace(
117
- id=trace.id,
118
- user_id=trace.userId,
119
- session_id=trace.sessionId,
120
- release=trace.release,
121
- metadata=metadata,
122
- )
123
- _root_trace_id_context.set(None)
124
- _lmnr_stack_context.set([])
125
-
126
- if error is not None:
127
- self.update_current_trace(success=False)
128
-
129
- if inspect.isgenerator(result) or is_iterator(result):
130
- return self._collect_generator_result(
131
- provider=provider,
132
- generator=result,
133
- span=span,
134
- metadata=metadata,
135
- attributes=attributes,
136
- )
137
- elif inspect.isasyncgen(result) or is_async_iterator(result):
138
- return self._collect_async_generator_result(
139
- provider=provider,
140
- generator=result,
141
- span=span,
142
- metadata=metadata,
143
- attributes=attributes,
144
- )
145
- if span.spanType == "LLM" and error is None:
146
- attributes = self._extract_llm_attributes_from_response(
147
- provider=provider, response=result
148
- )
149
- return self._finalize_span(
150
- span,
151
- provider=provider,
152
- result=error or result,
153
- metadata=metadata,
154
- attributes=attributes,
155
- )
156
-
157
- def update_current_span(
158
- self,
159
- metadata: Optional[dict[str, Any]] = None,
160
- attributes: Optional[dict[str, Any]] = None,
161
- evaluate_events: list[EvaluateEvent] = None,
162
- override: bool = False,
163
- ):
164
- stack = _lmnr_stack_context.get()
165
- if not stack:
166
- return
167
- span = stack[-1]
168
- new_metadata = (
169
- metadata if override else {**(span.metadata or {}), **(metadata or {})}
170
- )
171
- new_evaluate_events = (
172
- evaluate_events
173
- if override
174
- else span.evaluateEvents + (evaluate_events or [])
175
- )
176
- new_attributes = (
177
- attributes
178
- if override
179
- else {**(span.attributes or {}), **(attributes or {})}
180
- )
181
- self.update_span(
182
- span=span,
183
- metadata=new_metadata,
184
- evaluate_events=new_evaluate_events,
185
- attributes=new_attributes,
186
- )
187
-
188
- def update_current_trace(
189
- self,
190
- user_id: Optional[str] = None,
191
- session_id: Optional[str] = None,
192
- release: Optional[str] = None,
193
- metadata: Optional[dict[str, Any]] = None,
194
- success: bool = True,
195
- ):
196
- existing_trace = (
197
- _lmnr_stack_context.get()[0] if _lmnr_stack_context.get() else None
198
- )
199
- if not existing_trace:
200
- return
201
- self.update_trace(
202
- id=existing_trace.id,
203
- user_id=user_id or existing_trace.userId,
204
- session_id=session_id or existing_trace.sessionId,
205
- release=release or existing_trace.release,
206
- metadata=metadata or existing_trace.metadata,
207
- success=success if success is not None else existing_trace.success,
208
- )
209
-
210
- def update_trace(
211
- self,
212
- id: uuid.UUID,
213
- user_id: Optional[str] = None,
214
- session_id: Optional[str] = None,
215
- release: Optional[str] = None,
216
- metadata: Optional[dict[str, Any]] = None,
217
- success: bool = True,
218
- ) -> Trace:
219
- trace = Trace(
220
- id=id,
221
- user_id=user_id,
222
- session_id=session_id,
223
- release=release,
224
- metadata=metadata,
225
- success=success,
226
- )
227
- self._add_observation(trace)
228
- return trace
229
-
230
- def create_span(
231
- self,
232
- name: str,
233
- trace_id: uuid.UUID,
234
- start_time: Optional[datetime.datetime] = None,
235
- span_type: Literal["DEFAULT", "LLM"] = "DEFAULT",
236
- id: Optional[uuid.UUID] = None,
237
- parent_span_id: Optional[uuid.UUID] = None,
238
- input: Optional[Any] = None,
239
- metadata: Optional[dict[str, Any]] = None,
240
- attributes: Optional[dict[str, Any]] = None,
241
- check_event_names: list[str] = None,
242
- ) -> Span:
243
- """Internal method to create a span object. Use `ObservationContext.span` instead."""
244
- span = Span(
245
- name=name,
246
- trace_id=trace_id,
247
- start_time=start_time or datetime.datetime.now(datetime.timezone.utc),
248
- id=id,
249
- parent_span_id=parent_span_id,
250
- input=input,
251
- metadata=metadata,
252
- attributes=attributes,
253
- span_type=span_type,
254
- evaluate_events=check_event_names or [],
255
- )
256
- return span
257
-
258
- def update_span(
259
- self,
260
- span: Span,
261
- finalize: bool = False,
262
- input: Optional[Any] = None,
263
- end_time: Optional[datetime.datetime] = None,
264
- output: Optional[Any] = None,
265
- metadata: Optional[dict[str, Any]] = None,
266
- attributes: Optional[dict[str, Any]] = None,
267
- evaluate_events: Optional[list[EvaluateEvent]] = None,
268
- override: bool = False,
269
- ) -> Span:
270
- """Internal method to update a span object. Use `SpanContext.update()` instead."""
271
- span.update(
272
- input=input or span.input,
273
- output=output or span.output,
274
- end_time=end_time,
275
- metadata=metadata,
276
- attributes=attributes,
277
- evaluate_events=evaluate_events,
278
- override=override,
279
- )
280
- if finalize:
281
- self._add_observation(span)
282
- return span
283
-
284
- def event(
285
- self,
286
- name: str,
287
- value: Optional[Union[str, int, float, bool]] = None,
288
- timestamp: Optional[datetime.datetime] = None,
289
- ):
290
- span = _lmnr_stack_context.get()[-1] if _lmnr_stack_context.get() else None
291
- if not span or not isinstance(span, Span):
292
- self._log.warning(f"No active span to send event. Ignoring event. {name}")
293
- return
294
- event = Event(
295
- name=name,
296
- span_id=span.id,
297
- timestamp=timestamp,
298
- value=value,
299
- )
300
- span.add_event(event)
301
-
302
- def evaluate_event(self, name: str, data: str):
303
- stack = _lmnr_stack_context.get()
304
- if not stack or not isinstance(stack[-1], Span):
305
- self._log.warning(
306
- f"No active span to add check event. Ignoring event. {name}"
307
- )
308
- return
309
- stack[-1].evaluateEvents.append(
310
- EvaluateEvent(
311
- name=name,
312
- data=data,
313
- timestamp=datetime.datetime.now(datetime.timezone.utc),
314
- )
315
- )
316
-
317
- def run_pipeline(
318
- self,
319
- pipeline: str,
320
- inputs: dict[str, Any],
321
- env: dict[str, str] = {},
322
- metadata: dict[str, str] = {},
323
- ) -> PipelineRunResponse:
324
- span = _lmnr_stack_context.get()[-1] if _lmnr_stack_context.get() else None
325
- span_id = span.id if isinstance(span, Span) else None
326
- trace = _lmnr_stack_context.get()[0] if _lmnr_stack_context.get() else None
327
- trace_id = trace.id if isinstance(trace, Trace) else None
328
- return self.laminar.run(
329
- pipeline=pipeline,
330
- inputs=inputs,
331
- env=env,
332
- metadata=metadata,
333
- parent_span_id=span_id,
334
- trace_id=trace_id,
335
- )
336
-
337
- def _force_finalize_trace(self):
338
- # TODO: flush in progress spans as error?
339
- pass
340
-
341
- def _add_observation(self, observation: Union[Span, Trace]) -> bool:
342
- return self.thread_manager.add_task(observation)
343
-
344
- def _extract_llm_attributes_from_response(
345
- self,
346
- provider: Provider,
347
- response: Union[str, dict[str, Any], pydantic.BaseModel],
348
- ) -> dict[str, Any]:
349
- return provider.extract_llm_attributes_from_response(response)
350
-
351
- def _stream_list_to_dict(
352
- self, provider: Provider, response: list[Any]
353
- ) -> dict[str, Any]:
354
- return provider.stream_list_to_dict(response)
355
-
356
- def _extract_llm_output(
357
- self,
358
- provider: Provider,
359
- result: Union[dict[str, Any], pydantic.BaseModel],
360
- ) -> str:
361
- return provider.extract_llm_output(result)
362
-
363
- def _finalize_span(
364
- self,
365
- span: Span,
366
- provider: Provider = None,
367
- result: Optional[Any] = None,
368
- metadata: Optional[dict[str, Any]] = None,
369
- attributes: Optional[dict[str, Any]] = None,
370
- ) -> Any:
371
- self.update_span(
372
- span=span,
373
- finalize=True,
374
- output=(
375
- result
376
- if span.spanType != "LLM"
377
- else self._extract_llm_output(provider, result)
378
- ),
379
- metadata=metadata,
380
- attributes=attributes,
381
- )
382
- return result
383
-
384
- def _collect_generator_result(
385
- self,
386
- generator: Generator,
387
- span: Span,
388
- provider: Provider = None,
389
- metadata: Optional[dict[str, Any]] = None,
390
- attributes: Optional[dict[str, Any]] = None,
391
- ) -> Generator:
392
- items = []
393
- try:
394
- for item in generator:
395
- items.append(item)
396
- yield item
397
-
398
- finally:
399
- output = items
400
- if all(isinstance(item, str) for item in items):
401
- output = "".join(items)
402
- if span.spanType == "LLM":
403
- collected = self._stream_list_to_dict(
404
- provider=provider, response=output
405
- )
406
- attributes = self._extract_llm_attributes_from_response(
407
- provider=provider, response=collected
408
- )
409
- self._finalize_span(
410
- span=span,
411
- provider=provider,
412
- result=collected,
413
- metadata=metadata,
414
- attributes=attributes,
415
- )
416
-
417
- async def _collect_async_generator_result(
418
- self,
419
- generator: AsyncGenerator,
420
- span: Span,
421
- provider: Provider = None,
422
- metadata: Optional[dict[str, Any]] = None,
423
- attributes: Optional[dict[str, Any]] = None,
424
- ) -> AsyncGenerator:
425
- items = []
426
- try:
427
- async for item in generator:
428
- items.append(item)
429
- yield item
430
-
431
- finally:
432
- output = items
433
- if all(isinstance(item, str) for item in items):
434
- output = "".join(items)
435
- if span.spanType == "LLM":
436
- collected = self._stream_list_to_dict(
437
- provider=provider, response=output
438
- )
439
- attributes = self._extract_llm_attributes_from_response(
440
- provider=provider, response=collected
441
- )
442
- self._finalize_span(
443
- span=span,
444
- provider=provider,
445
- result=collected,
446
- metadata=metadata,
447
- attributes=attributes,
448
- )
449
-
450
-
451
- # TODO: add lock for thread safety
452
- class LaminarSingleton:
453
- _instance = None
454
- _l: Optional[LaminarContextManager] = None
455
-
456
- def __new__(cls):
457
- if not cls._instance:
458
- cls._instance = super(LaminarSingleton, cls).__new__(cls)
459
- return cls._instance
460
-
461
- def get(cls, *args, **kwargs) -> LaminarContextManager:
462
- if not cls._l:
463
- cls._l = LaminarContextManager(*args, **kwargs)
464
- return cls._l