lmnr 0.2.15__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. lmnr/__init__.py +4 -4
  2. lmnr/sdk/client.py +161 -0
  3. lmnr/sdk/collector.py +177 -0
  4. lmnr/sdk/constants.py +1 -0
  5. lmnr/sdk/context.py +456 -0
  6. lmnr/sdk/decorators.py +277 -0
  7. lmnr/sdk/interface.py +339 -0
  8. lmnr/sdk/providers/__init__.py +2 -0
  9. lmnr/sdk/providers/base.py +28 -0
  10. lmnr/sdk/providers/fallback.py +131 -0
  11. lmnr/sdk/providers/openai.py +140 -0
  12. lmnr/sdk/providers/utils.py +33 -0
  13. lmnr/sdk/tracing_types.py +197 -0
  14. lmnr/sdk/types.py +69 -0
  15. lmnr/sdk/utils.py +102 -0
  16. lmnr-0.3.0.dist-info/METADATA +185 -0
  17. lmnr-0.3.0.dist-info/RECORD +21 -0
  18. lmnr/cli/__init__.py +0 -0
  19. lmnr/cli/__main__.py +0 -4
  20. lmnr/cli/cli.py +0 -230
  21. lmnr/cli/parser/__init__.py +0 -0
  22. lmnr/cli/parser/nodes/__init__.py +0 -45
  23. lmnr/cli/parser/nodes/code.py +0 -36
  24. lmnr/cli/parser/nodes/condition.py +0 -30
  25. lmnr/cli/parser/nodes/input.py +0 -25
  26. lmnr/cli/parser/nodes/json_extractor.py +0 -29
  27. lmnr/cli/parser/nodes/llm.py +0 -56
  28. lmnr/cli/parser/nodes/output.py +0 -27
  29. lmnr/cli/parser/nodes/router.py +0 -37
  30. lmnr/cli/parser/nodes/semantic_search.py +0 -53
  31. lmnr/cli/parser/nodes/types.py +0 -153
  32. lmnr/cli/parser/parser.py +0 -62
  33. lmnr/cli/parser/utils.py +0 -49
  34. lmnr/cli/zip.py +0 -16
  35. lmnr/sdk/endpoint.py +0 -186
  36. lmnr/sdk/registry.py +0 -29
  37. lmnr/sdk/remote_debugger.py +0 -148
  38. lmnr/types.py +0 -101
  39. lmnr-0.2.15.dist-info/METADATA +0 -187
  40. lmnr-0.2.15.dist-info/RECORD +0 -28
  41. {lmnr-0.2.15.dist-info → lmnr-0.3.0.dist-info}/LICENSE +0 -0
  42. {lmnr-0.2.15.dist-info → lmnr-0.3.0.dist-info}/WHEEL +0 -0
  43. {lmnr-0.2.15.dist-info → lmnr-0.3.0.dist-info}/entry_points.txt +0 -0
lmnr/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- from .sdk.endpoint import Laminar
2
- from .types import ChatMessage, EndpointRunError, EndpointRunResponse, NodeInput
3
- from .sdk.remote_debugger import RemoteDebugger as LaminarRemoteDebugger
4
- from .sdk.registry import Registry as Pipeline
1
+ from .sdk.client import Laminar
2
+ from .sdk.decorators import observe, lmnr_context, wrap_llm_call
3
+ from .sdk.interface import trace, TraceContext, SpanContext
4
+ from .sdk.types import ChatMessage, PipelineRunError, PipelineRunResponse, NodeInput
lmnr/sdk/client.py ADDED
@@ -0,0 +1,161 @@
1
+ from .tracing_types import Span, Trace
2
+
3
+ from pydantic.alias_generators import to_snake
4
+ from typing import Any, Optional, Union
5
+ import dotenv
6
+ import json
7
+ import logging
8
+ import os
9
+ import requests
10
+ import uuid
11
+
12
+ from .types import (
13
+ PipelineRunError,
14
+ PipelineRunResponse,
15
+ NodeInput,
16
+ PipelineRunRequest,
17
+ )
18
+
19
+
20
+ class APIError(Exception):
21
+ def __init__(self, status: Union[int, str], message: str, details: Any = None):
22
+ self.message = message
23
+ self.status = status
24
+ self.details = details
25
+
26
+ def __str__(self):
27
+ msg = "{0} ({1}): {2}"
28
+ return msg.format(self.message, self.status, self.details)
29
+
30
+
31
+ class Laminar:
32
+ _base_url = "https://api.lmnr.ai"
33
+
34
+ def __init__(self, project_api_key: Optional[str] = None):
35
+ self.project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
36
+ if not self.project_api_key:
37
+ dotenv_path = dotenv.find_dotenv(usecwd=True)
38
+ self.project_api_key = dotenv.get_key(
39
+ dotenv_path=dotenv_path, key_to_get="LMNR_PROJECT_API_KEY"
40
+ )
41
+ if not self.project_api_key:
42
+ raise ValueError(
43
+ "Please initialize the Laminar object with your project API key or set "
44
+ "the LMNR_PROJECT_API_KEY environment variable in your environment or .env file"
45
+ )
46
+
47
+ def run(
48
+ self,
49
+ pipeline: str,
50
+ inputs: dict[str, NodeInput],
51
+ env: dict[str, str] = {},
52
+ metadata: dict[str, str] = {},
53
+ parent_span_id: Optional[uuid.UUID] = None,
54
+ trace_id: Optional[uuid.UUID] = None,
55
+ ) -> PipelineRunResponse:
56
+ """Runs the pipeline with the given inputs
57
+
58
+ Args:
59
+ pipeline (str): name of the Laminar pipeline
60
+ inputs (dict[str, NodeInput]):
61
+ inputs to the endpoint's target pipeline.
62
+ Keys in the dictionary must match input node names
63
+ env (dict[str, str], optional):
64
+ Environment variables for the pipeline execution.
65
+ Defaults to {}.
66
+ metadata (dict[str, str], optional):
67
+ any custom metadata to be stored
68
+ with execution trace. Defaults to {}.
69
+ parent_span_id (Optional[uuid.UUID], optional):
70
+ parent span id for the resulting span.
71
+ Must usually be SpanContext.id()
72
+ Defaults to None.
73
+ trace_id (Optional[uuid.UUID], optional):
74
+ trace id for the resulting trace.
75
+ Must usually be TraceContext.id()
76
+ Defaults to None.
77
+
78
+ Returns:
79
+ PipelineRunResponse: response object containing the outputs
80
+
81
+ Raises:
82
+ ValueError: if project API key is not set
83
+ PipelineRunError: if the endpoint run fails
84
+ """
85
+ if self.project_api_key is None:
86
+ raise ValueError(
87
+ "Please initialize the Laminar object with your project API key or set "
88
+ "the LMNR_PROJECT_API_KEY environment variable"
89
+ )
90
+ try:
91
+ request = PipelineRunRequest(
92
+ inputs=inputs,
93
+ pipeline=pipeline,
94
+ env=env,
95
+ metadata=metadata,
96
+ parent_span_id=parent_span_id,
97
+ trace_id=trace_id,
98
+ )
99
+ except Exception as e:
100
+ raise ValueError(f"Invalid request: {e}")
101
+
102
+ response = requests.post(
103
+ self._base_url + "/v1/pipeline/run",
104
+ data=json.dumps(request.to_dict()),
105
+ headers=self._headers(),
106
+ )
107
+ if response.status_code != 200:
108
+ raise PipelineRunError(response)
109
+ try:
110
+ resp_json = response.json()
111
+ keys = list(resp_json.keys())
112
+ for key in keys:
113
+ value = resp_json[key]
114
+ del resp_json[key]
115
+ resp_json[to_snake(key)] = value
116
+ return PipelineRunResponse(**resp_json)
117
+ except Exception:
118
+ raise PipelineRunError(response)
119
+
120
+ def batch_post_traces(self, batch: list[Union[Span, Trace]]):
121
+ log = logging.getLogger("laminar.client")
122
+ url = self._base_url + "/v1/traces"
123
+ data = json.dumps({"traces": [item.to_dict() for item in batch]})
124
+ log.debug(f"making request to {url}")
125
+ headers = self._headers()
126
+ res = requests.post(url, data=data, headers=headers)
127
+
128
+ if res.status_code == 200:
129
+ log.debug("data uploaded successfully")
130
+
131
+ return self._process_response(
132
+ res, success_message="data uploaded successfully", return_json=False
133
+ )
134
+
135
+ def _process_response(
136
+ self, res: requests.Response, success_message: str, return_json: bool = True
137
+ ) -> Union[requests.Response, Any]:
138
+ log = logging.getLogger("laminar.client")
139
+ log.debug("received response: %s", res.text)
140
+ if res.status_code in (200, 201):
141
+ log.debug(success_message)
142
+ if return_json:
143
+ try:
144
+ return res.json()
145
+ except json.JSONDecodeError:
146
+ log.error("Response is not valid JSON.")
147
+ raise APIError(res.status_code, "Invalid JSON response received")
148
+ else:
149
+ return res
150
+ try:
151
+ payload = res.json()
152
+ log.error("received error response: %s", payload)
153
+ raise APIError(res.status_code, payload)
154
+ except (KeyError, ValueError):
155
+ raise APIError(res.status_code, res.text)
156
+
157
+ def _headers(self):
158
+ return {
159
+ "Authorization": "Bearer " + self.project_api_key,
160
+ "Content-Type": "application/json",
161
+ }
lmnr/sdk/collector.py ADDED
@@ -0,0 +1,177 @@
1
+ from .client import APIError, Laminar
2
+ from .tracing_types import Span, Trace
3
+
4
+ from queue import Queue, Empty, Full
5
+ from typing import Union
6
+
7
+ import atexit
8
+ import backoff
9
+ import logging
10
+ import time
11
+ import threading
12
+
13
+
14
+ class Collector(threading.Thread):
15
+ _log = logging.getLogger("laminar.collector")
16
+ _queue: Queue[Union[Span, Trace]]
17
+ _client: Laminar
18
+ _flush_interval: float
19
+
20
+ def __init__(
21
+ self,
22
+ queue: Queue[Union[Span, Trace]],
23
+ client: Laminar,
24
+ flush_interval: float = 5.0,
25
+ ):
26
+ super().__init__()
27
+ self.daemon = True
28
+ self._queue = queue
29
+ self.running = True
30
+ self._flush_interval = flush_interval
31
+ self._client = client
32
+
33
+ def run(self):
34
+ """Runs the collector."""
35
+ self._log.debug("collector is running...")
36
+ while self.running:
37
+ self.upload()
38
+
39
+ def upload(self):
40
+ """Upload the next batch of items, return whether successful."""
41
+ batch = self._next()
42
+ if len(batch) == 0:
43
+ return
44
+
45
+ try:
46
+ self._upload_batch(batch)
47
+ except Exception as e:
48
+ self._log.exception("error uploading: %s", e)
49
+ finally:
50
+ # mark items as acknowledged from queue
51
+ for _ in batch:
52
+ self._queue.task_done()
53
+
54
+ def pause(self):
55
+ self.running = False
56
+
57
+ def _next(self):
58
+ items = []
59
+ start_time = time.monotonic()
60
+
61
+ while True:
62
+ elapsed = time.monotonic() - start_time
63
+ if elapsed >= self._flush_interval:
64
+ break
65
+ try:
66
+ item = self._queue.get(
67
+ block=True, timeout=self._flush_interval - elapsed
68
+ )
69
+ items.append(item)
70
+ except Empty:
71
+ break
72
+ return items
73
+
74
+ def _upload_batch(self, batch: list[Union[Trace, Span]]):
75
+ self._log.debug("uploading batch of %d items", len(batch))
76
+
77
+ @backoff.on_exception(backoff.expo, Exception, max_tries=5)
78
+ def execute_task_with_backoff(batch: list[Union[Trace, Span]]):
79
+ try:
80
+ self._client.batch_post_traces(batch=batch)
81
+ except Exception as e:
82
+ if (
83
+ isinstance(e, APIError)
84
+ and 400 <= int(e.status) < 500
85
+ and int(e.status) != 429
86
+ ):
87
+ self._log.warn(
88
+ f"Received {e.status} error by Laminar server, not retrying: {e.message}"
89
+ )
90
+ return
91
+
92
+ raise e
93
+
94
+ execute_task_with_backoff(batch)
95
+ self._log.debug("successfully uploaded batch of %d items", len(batch))
96
+
97
+
98
+ class ThreadManager:
99
+ _log = logging.getLogger("laminar.task_manager")
100
+ _queue: Queue[Union[Span, Trace]]
101
+ _client: Laminar
102
+ _max_task_queue_size: int
103
+ _flush_interval: float
104
+ _collectors: list[Collector] = []
105
+ _threads: int
106
+
107
+ def __init__(
108
+ self,
109
+ client: Laminar,
110
+ flush_interval: float = 2.0,
111
+ max_task_queue_size: int = 1000,
112
+ threads: int = 1,
113
+ ):
114
+ self._max_task_queue_size = max_task_queue_size
115
+ self._threads = threads
116
+ self._queue = Queue(maxsize=self._max_task_queue_size)
117
+ self._flush_interval = flush_interval
118
+ self._client = client
119
+ for _ in range(self._threads):
120
+ collector = Collector(
121
+ queue=self._queue,
122
+ client=self._client,
123
+ flush_interval=flush_interval,
124
+ )
125
+ self._collectors.append(collector)
126
+ collector.start()
127
+ atexit.register(self.join)
128
+
129
+ def add_task(self, event: Union[Span, Trace]) -> bool:
130
+ try:
131
+ self._queue.put(event, block=False)
132
+ return True
133
+ except Full:
134
+ self._log.warning("queue is full")
135
+ return False
136
+ except Exception as e:
137
+ self._log.exception(f"Exception in adding task {e}")
138
+
139
+ return False
140
+
141
+ def flush(self):
142
+ """Forces a flush from the internal queue to the server"""
143
+ self._log.debug("flushing queue")
144
+ queue = self._queue
145
+ size = queue.qsize()
146
+ queue.join()
147
+ # Note that this message may not be precise, because of threading.
148
+ self._log.debug("successfully flushed about %s items.", size)
149
+
150
+ def join(self):
151
+ """Ends the collector threads once the queue is empty.
152
+ Blocks execution until finished
153
+ """
154
+ self._log.debug(f"joining {len(self._collectors)} collector threads")
155
+
156
+ # pause all collectors before joining them so we don't have to wait for multiple
157
+ # flush intervals to join them all.
158
+ for collector in self._collectors:
159
+ collector.pause()
160
+
161
+ for i, collector in enumerate(self._collectors):
162
+ try:
163
+ collector.join()
164
+ except RuntimeError:
165
+ # collector thread has not started
166
+ pass
167
+
168
+ self._log.debug(f"collector thread {i} joined")
169
+
170
+ def shutdown(self):
171
+ """Flush all messages and cleanly shutdown the client"""
172
+ self._log.debug("shutdown initiated")
173
+
174
+ self.flush()
175
+ self.join()
176
+
177
+ self._log.debug("shutdown completed")
lmnr/sdk/constants.py ADDED
@@ -0,0 +1 @@
1
+ CURRENT_TRACING_VERSION = "0.1.0"