lmnr 0.4.64__py3-none-any.whl → 0.4.66__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lmnr/sdk/laminar.py CHANGED
@@ -17,13 +17,12 @@ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
17
17
  OTLPSpanExporter,
18
18
  Compression,
19
19
  )
20
+ from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
20
21
  from opentelemetry.util.types import AttributeValue
21
22
 
22
- from pydantic.alias_generators import to_snake
23
23
  from typing import Any, Awaitable, Literal, Optional, Set, Union
24
24
 
25
- import aiohttp
26
- import asyncio
25
+ import atexit
27
26
  import copy
28
27
  import datetime
29
28
  import dotenv
@@ -31,9 +30,7 @@ import json
31
30
  import logging
32
31
  import os
33
32
  import random
34
- import requests
35
33
  import re
36
- import urllib.parse
37
34
  import uuid
38
35
  import warnings
39
36
 
@@ -44,23 +41,19 @@ from lmnr.openllmetry_sdk.tracing.attributes import (
44
41
  TRACE_TYPE,
45
42
  )
46
43
  from lmnr.openllmetry_sdk.tracing.tracing import (
44
+ get_association_properties,
47
45
  remove_association_properties,
48
46
  set_association_properties,
49
47
  update_association_properties,
50
48
  )
49
+ from lmnr.sdk.client import LaminarClient
51
50
 
52
51
  from .log import VerboseColorfulFormatter
53
52
 
54
53
  from .types import (
55
- InitEvaluationResponse,
56
- EvaluationResultDatapoint,
57
- GetDatapointsResponse,
58
54
  LaminarSpanContext,
59
- PipelineRunError,
60
55
  PipelineRunResponse,
61
56
  NodeInput,
62
- PipelineRunRequest,
63
- SemanticSearchRequest,
64
57
  SemanticSearchResponse,
65
58
  TraceType,
66
59
  TracingLevel,
@@ -140,7 +133,7 @@ class Laminar:
140
133
  " your project API key or set the LMNR_PROJECT_API_KEY"
141
134
  " environment variable in your environment or .env file"
142
135
  )
143
- url = base_url or "https://api.lmnr.ai"
136
+ url = re.sub(r"/$", "", base_url or "https://api.lmnr.ai")
144
137
  if re.search(r":\d{1,5}$", url):
145
138
  raise ValueError(
146
139
  "Please provide the `base_url` without the port number. "
@@ -152,6 +145,15 @@ class Laminar:
152
145
  cls.__env = env
153
146
  cls.__initialized = True
154
147
  cls._initialize_logger()
148
+ LaminarClient.initialize(
149
+ base_url=cls.__base_http_url,
150
+ project_api_key=cls.__project_api_key,
151
+ )
152
+ atexit.register(LaminarClient.shutdown)
153
+ if not os.environ.get("OTEL_ATTRIBUTE_COUNT_LIMIT"):
154
+ # each message is at least 2 attributes: role and content,
155
+ # but the default attribute limit is 128, so raise it
156
+ os.environ["OTEL_ATTRIBUTE_COUNT_LIMIT"] = "10000"
155
157
 
156
158
  # if not is_latest_version():
157
159
  # cls.__logger.warning(
@@ -232,35 +234,14 @@ class Laminar:
232
234
  ValueError: if project API key is not set
233
235
  PipelineRunError: if the endpoint run fails
234
236
  """
235
- if cls.__project_api_key is None:
236
- raise ValueError(
237
- "Please initialize the Laminar object with your project "
238
- "API key or set the LMNR_PROJECT_API_KEY environment variable"
239
- )
240
- try:
241
- current_span = trace.get_current_span()
242
- if current_span != trace.INVALID_SPAN:
243
- parent_span_id = parent_span_id or uuid.UUID(
244
- int=current_span.get_span_context().span_id
245
- )
246
- trace_id = trace_id or uuid.UUID(
247
- int=current_span.get_span_context().trace_id
248
- )
249
- request = PipelineRunRequest(
250
- inputs=inputs,
251
- pipeline=pipeline,
252
- env=env or cls.__env,
253
- metadata=metadata,
254
- parent_span_id=parent_span_id,
255
- trace_id=trace_id,
256
- )
257
- loop = asyncio.get_event_loop()
258
- if loop.is_running():
259
- return cls.__run(request)
260
- else:
261
- return asyncio.run(cls.__run(request))
262
- except Exception as e:
263
- raise ValueError(f"Invalid request: {e}")
237
+ return LaminarClient.run_pipeline(
238
+ pipeline=pipeline,
239
+ inputs=inputs,
240
+ env=env or cls.__env,
241
+ metadata=metadata,
242
+ parent_span_id=parent_span_id,
243
+ trace_id=trace_id,
244
+ )
264
245
 
265
246
  @classmethod
266
247
  def semantic_search(
@@ -284,17 +265,12 @@ class Laminar:
284
265
  Returns:
285
266
  SemanticSearchResponse: response object containing the search results sorted by score in descending order
286
267
  """
287
- request = SemanticSearchRequest(
268
+ return LaminarClient.semantic_search(
288
269
  query=query,
289
270
  dataset_id=dataset_id,
290
271
  limit=limit,
291
272
  threshold=threshold,
292
273
  )
293
- loop = asyncio.get_event_loop()
294
- if loop.is_running():
295
- return cls.__semantic_search(request)
296
- else:
297
- return asyncio.run(cls.__semantic_search(request))
298
274
 
299
275
  @classmethod
300
276
  def event(
@@ -348,7 +324,7 @@ class Laminar:
348
324
  Literal["DEFAULT"], Literal["LLM"], Literal["TOOL"]
349
325
  ] = "DEFAULT",
350
326
  context: Optional[Context] = None,
351
- labels: Optional[dict[str, str]] = None,
327
+ labels: Optional[list[str]] = None,
352
328
  parent_span_context: Optional[LaminarSpanContext] = None,
353
329
  # deprecated, use parent_span_context instead
354
330
  trace_id: Optional[uuid.UUID] = None,
@@ -384,7 +360,7 @@ class Laminar:
384
360
  `Laminar.get_span_context`, `Laminar.get_span_context_dict` and\
385
361
  `Laminar.get_span_context_str` for more information.
386
362
  Defaults to None.
387
- labels (Optional[dict[str, str]], optional): labels to set for the\
363
+ labels (Optional[list[str]], optional): labels to set for the\
388
364
  span. Defaults to None.
389
365
  trace_id (Optional[uuid.UUID], optional): [Deprecated] override\
390
366
  the trace id for the span. If not provided, use the current\
@@ -392,7 +368,13 @@ class Laminar:
392
368
  """
393
369
 
394
370
  if not cls.is_initialized():
395
- yield
371
+ yield trace.NonRecordingSpan(
372
+ trace.SpanContext(
373
+ trace_id=RandomIdGenerator().generate_trace_id(),
374
+ span_id=RandomIdGenerator().generate_span_id(),
375
+ is_remote=False,
376
+ )
377
+ )
396
378
  return
397
379
 
398
380
  with get_tracer() as tracer:
@@ -425,10 +407,7 @@ class Laminar:
425
407
  label_props = {}
426
408
  try:
427
409
  if labels:
428
- label_props = dict(
429
- (f"{ASSOCIATION_PROPERTIES}.label.{k}", json_dumps(v))
430
- for k, v in labels.items() # noqa: F821
431
- )
410
+ label_props = {f"{ASSOCIATION_PROPERTIES}.labels": labels}
432
411
  except Exception:
433
412
  cls.__logger.warning(
434
413
  f"`start_as_current_span` Could not set labels: {labels}. "
@@ -466,7 +445,7 @@ class Laminar:
466
445
 
467
446
  @classmethod
468
447
  @contextmanager
469
- def with_labels(cls, labels: dict[str, str], context: Optional[Context] = None):
448
+ def with_labels(cls, labels: list[str], context: Optional[Context] = None):
470
449
  """Set labels for spans within this `with` context. This is useful for
471
450
  adding labels to the spans created in the auto-instrumentations.
472
451
 
@@ -481,17 +460,21 @@ class Laminar:
481
460
  openai_client.chat.completions.create()
482
461
  ```
483
462
  """
463
+ if not cls.is_initialized():
464
+ yield
465
+ return
466
+
484
467
  with get_tracer():
485
468
  label_props = labels.copy()
486
- label_props = dict(
487
- (f"label.{k}", json_dumps(v)) for k, v in label_props.items()
488
- )
469
+ prev_labels = get_association_properties(context).get("labels", [])
489
470
  update_association_properties(
490
- label_props, set_on_current_span=False, context=context
471
+ {"labels": prev_labels + label_props},
472
+ set_on_current_span=False,
473
+ context=context,
491
474
  )
492
475
  yield
493
476
  try:
494
- remove_association_properties(label_props)
477
+ set_association_properties({"labels": prev_labels})
495
478
  except Exception:
496
479
  cls.__logger.warning(
497
480
  f"`with_labels` Could not remove labels: {labels}. They will be "
@@ -569,6 +552,15 @@ class Laminar:
569
552
  `parent_span_context` instead. If provided, it will be used to\
570
553
  set the trace id for the span.
571
554
  """
555
+ if not cls.is_initialized():
556
+ return trace.NonRecordingSpan(
557
+ trace.SpanContext(
558
+ trace_id=RandomIdGenerator().generate_trace_id(),
559
+ span_id=RandomIdGenerator().generate_span_id(),
560
+ is_remote=False,
561
+ )
562
+ )
563
+
572
564
  with get_tracer() as tracer:
573
565
  ctx = context or context_api.get_current()
574
566
  if trace_id is not None:
@@ -598,10 +590,9 @@ class Laminar:
598
590
  label_props = {}
599
591
  try:
600
592
  if labels:
601
- label_props = dict(
602
- (f"{ASSOCIATION_PROPERTIES}.label.{k}", json_dumps(v))
603
- for k, v in labels.items() # noqa: F821
604
- )
593
+ label_props = {
594
+ f"{ASSOCIATION_PROPERTIES}.labels": json_dumps(labels)
595
+ }
605
596
  except Exception:
606
597
  cls.__logger.warning(
607
598
  f"`start_span` Could not set labels: {labels}. They will be "
@@ -749,7 +740,14 @@ class Laminar:
749
740
  def get_laminar_span_context_dict(
750
741
  cls, span: Optional[trace.Span] = None
751
742
  ) -> Optional[dict]:
752
- """Get the laminar span context for a given span as a dictionary.
743
+ span_context = cls.get_laminar_span_context(span)
744
+ if span_context is None:
745
+ return None
746
+ return span_context.to_dict()
747
+
748
+ @classmethod
749
+ def serialize_span_context(cls, span: Optional[trace.Span] = None) -> Optional[str]:
750
+ """Get the laminar span context for a given span as a string.
753
751
  If no span is provided, the current active span will be used.
754
752
 
755
753
  This is useful for continuing a trace across services.
@@ -758,13 +756,13 @@ class Laminar:
758
756
  ```python
759
757
  # service A:
760
758
  with Laminar.start_as_current_span("service_a"):
761
- span_context = Laminar.get_laminar_span_context_dict()
759
+ span_context = Laminar.serialize_span_context()
762
760
  # send span_context to service B
763
761
  call_service_b(request, headers={"laminar-span-context": span_context})
764
762
 
765
763
  # service B:
766
764
  def call_service_b(request, headers):
767
- span_context = LaminarSpanContext.from_dict(headers["laminar-span-context"])
765
+ span_context = Laminar.deserialize_span_context(headers["laminar-span-context"])
768
766
  with Laminar.start_as_current_span("service_b", parent_span_context=span_context):
769
767
  # rest of the function
770
768
  pass
@@ -776,22 +774,13 @@ class Laminar:
776
774
  service_b
777
775
  ```
778
776
  """
779
- span_context = cls.get_laminar_span_context(span)
780
- if span_context is None:
781
- return None
782
- return span_context.to_dict()
783
-
784
- @classmethod
785
- def get_laminar_span_context_str(
786
- cls, span: Optional[trace.Span] = None
787
- ) -> Optional[str]:
788
777
  span_context = cls.get_laminar_span_context(span)
789
778
  if span_context is None:
790
779
  return None
791
780
  return json.dumps(span_context.to_dict())
792
781
 
793
782
  @classmethod
794
- def deserialize_laminar_span_context(
783
+ def deserialize_span_context(
795
784
  cls, span_context: Union[dict, str]
796
785
  ) -> LaminarSpanContext:
797
786
  return LaminarSpanContext.deserialize(span_context)
@@ -799,6 +788,12 @@ class Laminar:
799
788
  @classmethod
800
789
  def shutdown(cls):
801
790
  Traceloop.flush()
791
+ LaminarClient.shutdown()
792
+
793
+ @classmethod
794
+ async def shutdown_async(cls):
795
+ Traceloop.flush()
796
+ await LaminarClient.shutdown_async()
802
797
 
803
798
  @classmethod
804
799
  def set_session(
@@ -848,74 +843,6 @@ class Laminar:
848
843
  props.pop("user_id", None)
849
844
  set_association_properties(props)
850
845
 
851
- @classmethod
852
- async def init_eval(
853
- cls, name: Optional[str] = None, group_name: Optional[str] = None
854
- ) -> InitEvaluationResponse:
855
- async with aiohttp.ClientSession() as session:
856
- async with session.post(
857
- cls.__base_http_url + "/v1/evals",
858
- json={
859
- "name": name,
860
- "groupName": group_name,
861
- },
862
- headers=cls._headers(),
863
- ) as response:
864
- resp_json = await response.json()
865
- return InitEvaluationResponse.model_validate(resp_json)
866
-
867
- @classmethod
868
- async def save_eval_datapoints(
869
- cls,
870
- eval_id: uuid.UUID,
871
- datapoints: list[EvaluationResultDatapoint],
872
- groupName: Optional[str] = None,
873
- ):
874
- async with aiohttp.ClientSession() as session:
875
-
876
- async with session.post(
877
- cls.__base_http_url + f"/v1/evals/{eval_id}/datapoints",
878
- json={
879
- "points": [datapoint.to_dict() for datapoint in datapoints],
880
- "groupName": groupName,
881
- },
882
- headers=cls._headers(),
883
- ) as response:
884
- if response.status != 200:
885
- raise ValueError(
886
- f"Error saving evaluation datapoints: {response.text}"
887
- )
888
-
889
- @classmethod
890
- def get_datapoints(
891
- cls,
892
- dataset_name: str,
893
- offset: int,
894
- limit: int,
895
- ) -> GetDatapointsResponse:
896
- # TODO: Use aiohttp. Currently, this function is called from within
897
- # `LaminarDataset.__len__`, which is sync, but can be called from
898
- # both sync and async. Python does not make it easy to mix things this
899
- # way, so we should probably refactor `LaminarDataset`.
900
- params = {"name": dataset_name, "offset": offset, "limit": limit}
901
- url = (
902
- cls.__base_http_url
903
- + "/v1/datasets/datapoints?"
904
- + urllib.parse.urlencode(params)
905
- )
906
- response = requests.get(url, headers=cls._headers())
907
- if response.status_code != 200:
908
- try:
909
- resp_json = response.json()
910
- raise ValueError(
911
- f"Error fetching datapoints: [{response.status_code}] {json.dumps(resp_json)}"
912
- )
913
- except requests.exceptions.RequestException:
914
- raise ValueError(
915
- f"Error fetching datapoints: [{response.status_code}] {response.text}"
916
- )
917
- return GetDatapointsResponse.model_validate(response.json())
918
-
919
846
  @classmethod
920
847
  def _headers(cls):
921
848
  assert cls.__project_api_key is not None, "Project API key is not set"
@@ -937,52 +864,3 @@ class Laminar:
937
864
  TRACE_TYPE: trace_type.value,
938
865
  }
939
866
  update_association_properties(association_properties)
940
-
941
- @classmethod
942
- async def __run(
943
- cls,
944
- request: PipelineRunRequest,
945
- ) -> PipelineRunResponse:
946
- async with aiohttp.ClientSession() as session:
947
- async with session.post(
948
- cls.__base_http_url + "/v1/pipeline/run",
949
- data=json.dumps(request.to_dict()),
950
- headers=cls._headers(),
951
- ) as response:
952
- if response.status != 200:
953
- raise PipelineRunError(response)
954
- try:
955
- resp_json = await response.json()
956
- keys = list(resp_json.keys())
957
- for key in keys:
958
- value = resp_json[key]
959
- del resp_json[key]
960
- resp_json[to_snake(key)] = value
961
- return PipelineRunResponse(**resp_json)
962
- except Exception:
963
- raise PipelineRunError(response)
964
-
965
- @classmethod
966
- async def __semantic_search(
967
- cls,
968
- request: SemanticSearchRequest,
969
- ) -> SemanticSearchResponse:
970
- async with aiohttp.ClientSession() as session:
971
- async with session.post(
972
- cls.__base_http_url + "/v1/semantic-search",
973
- data=json.dumps(request.to_dict()),
974
- headers=cls._headers(),
975
- ) as response:
976
- if response.status != 200:
977
- raise ValueError(
978
- f"Error performing semantic search: [{response.status}] {response.text}"
979
- )
980
- try:
981
- resp_json = await response.json()
982
- for result in resp_json["results"]:
983
- result["dataset_id"] = uuid.UUID(result["datasetId"])
984
- return SemanticSearchResponse(**resp_json)
985
- except Exception as e:
986
- raise ValueError(
987
- f"Error parsing semantic search response: status={response.status} error={e}"
988
- )
lmnr/sdk/types.py CHANGED
@@ -153,7 +153,32 @@ class InitEvaluationResponse(pydantic.BaseModel):
153
153
  projectId: uuid.UUID
154
154
 
155
155
 
156
+ class PartialEvaluationDatapoint(pydantic.BaseModel):
157
+ id: uuid.UUID
158
+ data: EvaluationDatapointData
159
+ target: EvaluationDatapointTarget
160
+ index: int
161
+ trace_id: uuid.UUID
162
+ executor_span_id: uuid.UUID
163
+
164
+ # uuid is not serializable by default, so we need to convert it to a string
165
+ def to_dict(self):
166
+ try:
167
+ return {
168
+ "id": str(self.id),
169
+ "data": str(serialize(self.data))[:100],
170
+ "target": str(serialize(self.target))[:100],
171
+ "index": self.index,
172
+ "traceId": str(self.trace_id),
173
+ "executorSpanId": str(self.executor_span_id),
174
+ }
175
+ except Exception as e:
176
+ raise ValueError(f"Error serializing PartialEvaluationDatapoint: {e}")
177
+
178
+
156
179
  class EvaluationResultDatapoint(pydantic.BaseModel):
180
+ id: uuid.UUID
181
+ index: int
157
182
  data: EvaluationDatapointData
158
183
  target: EvaluationDatapointTarget
159
184
  executor_output: ExecutorFunctionReturnType
@@ -161,7 +186,6 @@ class EvaluationResultDatapoint(pydantic.BaseModel):
161
186
  human_evaluators: list[HumanEvaluator] = pydantic.Field(default_factory=list)
162
187
  trace_id: uuid.UUID
163
188
  executor_span_id: uuid.UUID
164
- index: int
165
189
 
166
190
  # uuid is not serializable by default, so we need to convert it to a string
167
191
  def to_dict(self):
@@ -169,6 +193,7 @@ class EvaluationResultDatapoint(pydantic.BaseModel):
169
193
  return {
170
194
  # preserve only preview of the data, target and executor output
171
195
  # (full data is in trace)
196
+ "id": str(self.id),
172
197
  "data": str(serialize(self.data))[:100],
173
198
  "target": str(serialize(self.target))[:100],
174
199
  "executorOutput": str(serialize(self.executor_output))[:100],
@@ -243,9 +268,9 @@ class LaminarSpanContext(pydantic.BaseModel):
243
268
  @classmethod
244
269
  def from_dict(cls, data: dict[str, Any]) -> "LaminarSpanContext":
245
270
  return cls(
246
- trace_id=uuid.UUID(data["traceId"]),
247
- span_id=uuid.UUID(data["spanId"]),
248
- is_remote=data["isRemote"],
271
+ trace_id=uuid.UUID(data.get("traceId") or data.get("trace_id")),
272
+ span_id=uuid.UUID(data.get("spanId") or data.get("span_id")),
273
+ is_remote=data.get("isRemote") or data.get("is_remote") or False,
249
274
  )
250
275
 
251
276
  @classmethod
lmnr/version.py CHANGED
@@ -3,7 +3,7 @@ import requests
3
3
  from packaging import version
4
4
 
5
5
 
6
- SDK_VERSION = "0.4.64"
6
+ SDK_VERSION = "0.4.66"
7
7
  PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}"
8
8
 
9
9
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: lmnr
3
- Version: 0.4.64
3
+ Version: 0.4.66
4
4
  Summary: Python SDK for Laminar
5
5
  License: Apache-2.0
6
6
  Author: lmnr.ai
@@ -45,60 +45,60 @@ Requires-Dist: grpcio (<1.68.0)
45
45
  Requires-Dist: opentelemetry-api (>=1.28.0)
46
46
  Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (>=1.28.0)
47
47
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.28.0)
48
- Requires-Dist: opentelemetry-instrumentation-alephalpha (>=0.37.1) ; extra == "alephalpha"
49
- Requires-Dist: opentelemetry-instrumentation-alephalpha (>=0.37.1) ; extra == "all"
50
- Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.37.1) ; extra == "all"
51
- Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.37.1) ; extra == "anthropic"
52
- Requires-Dist: opentelemetry-instrumentation-bedrock (>=0.37.1) ; extra == "all"
53
- Requires-Dist: opentelemetry-instrumentation-bedrock (>=0.37.1) ; extra == "bedrock"
54
- Requires-Dist: opentelemetry-instrumentation-chromadb (>=0.37.1) ; extra == "all"
55
- Requires-Dist: opentelemetry-instrumentation-chromadb (>=0.37.1) ; extra == "chromadb"
56
- Requires-Dist: opentelemetry-instrumentation-cohere (>=0.37.1) ; extra == "all"
57
- Requires-Dist: opentelemetry-instrumentation-cohere (>=0.37.1) ; extra == "cohere"
58
- Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.37.1) ; extra == "all"
59
- Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.37.1) ; extra == "google-generativeai"
60
- Requires-Dist: opentelemetry-instrumentation-groq (>=0.37.1) ; extra == "all"
61
- Requires-Dist: opentelemetry-instrumentation-groq (>=0.37.1) ; extra == "groq"
62
- Requires-Dist: opentelemetry-instrumentation-haystack (>=0.37.1) ; extra == "all"
63
- Requires-Dist: opentelemetry-instrumentation-haystack (>=0.37.1) ; extra == "haystack"
64
- Requires-Dist: opentelemetry-instrumentation-lancedb (>=0.37.1) ; extra == "all"
65
- Requires-Dist: opentelemetry-instrumentation-lancedb (>=0.37.1) ; extra == "lancedb"
66
- Requires-Dist: opentelemetry-instrumentation-langchain (>=0.37.1) ; extra == "all"
67
- Requires-Dist: opentelemetry-instrumentation-langchain (>=0.37.1) ; extra == "langchain"
68
- Requires-Dist: opentelemetry-instrumentation-llamaindex (>=0.37.1) ; extra == "all"
69
- Requires-Dist: opentelemetry-instrumentation-llamaindex (>=0.37.1) ; extra == "llamaindex"
70
- Requires-Dist: opentelemetry-instrumentation-marqo (>=0.37.1) ; extra == "all"
71
- Requires-Dist: opentelemetry-instrumentation-marqo (>=0.37.1) ; extra == "marqo"
72
- Requires-Dist: opentelemetry-instrumentation-milvus (>=0.37.1) ; extra == "all"
73
- Requires-Dist: opentelemetry-instrumentation-milvus (>=0.37.1) ; extra == "milvus"
74
- Requires-Dist: opentelemetry-instrumentation-mistralai (>=0.37.1) ; extra == "all"
75
- Requires-Dist: opentelemetry-instrumentation-mistralai (>=0.37.1) ; extra == "mistralai"
76
- Requires-Dist: opentelemetry-instrumentation-ollama (>=0.37.1) ; extra == "all"
77
- Requires-Dist: opentelemetry-instrumentation-ollama (>=0.37.1) ; extra == "ollama"
78
- Requires-Dist: opentelemetry-instrumentation-openai (>=0.37.1) ; extra == "all"
79
- Requires-Dist: opentelemetry-instrumentation-openai (>=0.37.1) ; extra == "openai"
80
- Requires-Dist: opentelemetry-instrumentation-pinecone (>=0.37.1) ; extra == "all"
81
- Requires-Dist: opentelemetry-instrumentation-pinecone (>=0.37.1) ; extra == "pinecone"
82
- Requires-Dist: opentelemetry-instrumentation-qdrant (>=0.37.1) ; extra == "all"
83
- Requires-Dist: opentelemetry-instrumentation-qdrant (>=0.37.1) ; extra == "qdrant"
84
- Requires-Dist: opentelemetry-instrumentation-replicate (>=0.37.1) ; extra == "all"
85
- Requires-Dist: opentelemetry-instrumentation-replicate (>=0.37.1) ; extra == "replicate"
48
+ Requires-Dist: opentelemetry-instrumentation-alephalpha (>=0.38.12) ; extra == "alephalpha"
49
+ Requires-Dist: opentelemetry-instrumentation-alephalpha (>=0.38.12) ; extra == "all"
50
+ Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.38.12) ; extra == "all"
51
+ Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.38.12) ; extra == "anthropic"
52
+ Requires-Dist: opentelemetry-instrumentation-bedrock (>=0.38.12) ; extra == "all"
53
+ Requires-Dist: opentelemetry-instrumentation-bedrock (>=0.38.12) ; extra == "bedrock"
54
+ Requires-Dist: opentelemetry-instrumentation-chromadb (>=0.38.12) ; extra == "all"
55
+ Requires-Dist: opentelemetry-instrumentation-chromadb (>=0.38.12) ; extra == "chromadb"
56
+ Requires-Dist: opentelemetry-instrumentation-cohere (>=0.38.12) ; extra == "all"
57
+ Requires-Dist: opentelemetry-instrumentation-cohere (>=0.38.12) ; extra == "cohere"
58
+ Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.38.12) ; extra == "all"
59
+ Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.38.12) ; extra == "google-generativeai"
60
+ Requires-Dist: opentelemetry-instrumentation-groq (>=0.38.12) ; extra == "all"
61
+ Requires-Dist: opentelemetry-instrumentation-groq (>=0.38.12) ; extra == "groq"
62
+ Requires-Dist: opentelemetry-instrumentation-haystack (>=0.38.12) ; extra == "all"
63
+ Requires-Dist: opentelemetry-instrumentation-haystack (>=0.38.12) ; extra == "haystack"
64
+ Requires-Dist: opentelemetry-instrumentation-lancedb (>=0.38.12) ; extra == "all"
65
+ Requires-Dist: opentelemetry-instrumentation-lancedb (>=0.38.12) ; extra == "lancedb"
66
+ Requires-Dist: opentelemetry-instrumentation-langchain (>=0.38.12) ; extra == "all"
67
+ Requires-Dist: opentelemetry-instrumentation-langchain (>=0.38.12) ; extra == "langchain"
68
+ Requires-Dist: opentelemetry-instrumentation-llamaindex (>=0.38.12) ; extra == "all"
69
+ Requires-Dist: opentelemetry-instrumentation-llamaindex (>=0.38.12) ; extra == "llamaindex"
70
+ Requires-Dist: opentelemetry-instrumentation-marqo (>=0.38.12) ; extra == "all"
71
+ Requires-Dist: opentelemetry-instrumentation-marqo (>=0.38.12) ; extra == "marqo"
72
+ Requires-Dist: opentelemetry-instrumentation-milvus (>=0.38.12) ; extra == "all"
73
+ Requires-Dist: opentelemetry-instrumentation-milvus (>=0.38.12) ; extra == "milvus"
74
+ Requires-Dist: opentelemetry-instrumentation-mistralai (>=0.38.12) ; extra == "all"
75
+ Requires-Dist: opentelemetry-instrumentation-mistralai (>=0.38.12) ; extra == "mistralai"
76
+ Requires-Dist: opentelemetry-instrumentation-ollama (>=0.38.12) ; extra == "all"
77
+ Requires-Dist: opentelemetry-instrumentation-ollama (>=0.38.12) ; extra == "ollama"
78
+ Requires-Dist: opentelemetry-instrumentation-openai (>=0.38.12) ; extra == "all"
79
+ Requires-Dist: opentelemetry-instrumentation-openai (>=0.38.12) ; extra == "openai"
80
+ Requires-Dist: opentelemetry-instrumentation-pinecone (>=0.38.12) ; extra == "all"
81
+ Requires-Dist: opentelemetry-instrumentation-pinecone (>=0.38.12) ; extra == "pinecone"
82
+ Requires-Dist: opentelemetry-instrumentation-qdrant (>=0.38.12) ; extra == "all"
83
+ Requires-Dist: opentelemetry-instrumentation-qdrant (>=0.38.12) ; extra == "qdrant"
84
+ Requires-Dist: opentelemetry-instrumentation-replicate (>=0.38.12) ; extra == "all"
85
+ Requires-Dist: opentelemetry-instrumentation-replicate (>=0.38.12) ; extra == "replicate"
86
86
  Requires-Dist: opentelemetry-instrumentation-requests (>=0.50b0)
87
- Requires-Dist: opentelemetry-instrumentation-sagemaker (>=0.37.1) ; extra == "all"
88
- Requires-Dist: opentelemetry-instrumentation-sagemaker (>=0.37.1) ; extra == "sagemaker"
87
+ Requires-Dist: opentelemetry-instrumentation-sagemaker (>=0.38.12) ; extra == "all"
88
+ Requires-Dist: opentelemetry-instrumentation-sagemaker (>=0.38.12) ; extra == "sagemaker"
89
89
  Requires-Dist: opentelemetry-instrumentation-sqlalchemy (>=0.50b0)
90
90
  Requires-Dist: opentelemetry-instrumentation-threading (>=0.50b0)
91
- Requires-Dist: opentelemetry-instrumentation-together (>=0.37.1) ; extra == "all"
92
- Requires-Dist: opentelemetry-instrumentation-together (>=0.37.1) ; extra == "together"
93
- Requires-Dist: opentelemetry-instrumentation-transformers (>=0.37.1) ; extra == "all"
94
- Requires-Dist: opentelemetry-instrumentation-transformers (>=0.37.1) ; extra == "transformers"
91
+ Requires-Dist: opentelemetry-instrumentation-together (>=0.38.12) ; extra == "all"
92
+ Requires-Dist: opentelemetry-instrumentation-together (>=0.38.12) ; extra == "together"
93
+ Requires-Dist: opentelemetry-instrumentation-transformers (>=0.38.12) ; extra == "all"
94
+ Requires-Dist: opentelemetry-instrumentation-transformers (>=0.38.12) ; extra == "transformers"
95
95
  Requires-Dist: opentelemetry-instrumentation-urllib3 (>=0.50b0)
96
- Requires-Dist: opentelemetry-instrumentation-vertexai (>=0.37.1) ; extra == "all"
97
- Requires-Dist: opentelemetry-instrumentation-vertexai (>=0.37.1) ; extra == "vertexai"
98
- Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.37.1) ; extra == "all"
99
- Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.37.1) ; extra == "watsonx"
100
- Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.37.1) ; extra == "all"
101
- Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.37.1) ; extra == "weaviate"
96
+ Requires-Dist: opentelemetry-instrumentation-vertexai (>=0.38.12) ; extra == "all"
97
+ Requires-Dist: opentelemetry-instrumentation-vertexai (>=0.38.12) ; extra == "vertexai"
98
+ Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.38.12) ; extra == "all"
99
+ Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.38.12) ; extra == "watsonx"
100
+ Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.38.12) ; extra == "all"
101
+ Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.38.12) ; extra == "weaviate"
102
102
  Requires-Dist: opentelemetry-sdk (>=1.28.0)
103
103
  Requires-Dist: opentelemetry-semantic-conventions-ai (>=0.4.2)
104
104
  Requires-Dist: pydantic (>=2.0.3)