langtrace-python-sdk 3.5.0__py3-none-any.whl → 3.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ import asyncio
2
+ from examples.graphlit_example.conversation import complete
3
+ from langtrace_python_sdk import with_langtrace_root_span
4
+
5
+
6
+ class GraphlitRunner:
7
+ @with_langtrace_root_span("GraphlitRun")
8
+ def run(self):
9
+ asyncio.run(complete())
@@ -0,0 +1,87 @@
1
+ import asyncio
2
+ from dotenv import find_dotenv, load_dotenv
3
+ from langtrace_python_sdk import langtrace
4
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
5
+ from openai import OpenAI
6
+ from graphlit import Graphlit
7
+ from graphlit_api import input_types, enums, exceptions
8
+
9
+ _ = load_dotenv(find_dotenv())
10
+
11
+ langtrace.init()
12
+
13
+
14
+ graphlit = Graphlit()
15
+
16
+ langtrace.init()
17
+ client = OpenAI()
18
+
19
+
20
+ async def complete():
21
+ uri = "https://themixchief.com"
22
+ try:
23
+ ingest_response = await graphlit.client.ingest_uri(uri=uri, is_synchronous=True)
24
+ content_id = ingest_response.ingest_uri.id if ingest_response.ingest_uri is not None else None
25
+
26
+ if content_id is not None:
27
+ print(f'Ingested content [{content_id}]:')
28
+
29
+ prompt = "In 1 sentence, what does mixchief do."
30
+
31
+ model = "gpt-4o"
32
+
33
+ specification_input = input_types.SpecificationInput(
34
+ name=f"OpenAI [{str(enums.OpenAIModels.GPT4O_128K)}]",
35
+ type=enums.SpecificationTypes.COMPLETION,
36
+ serviceType=enums.ModelServiceTypes.OPEN_AI,
37
+ openAI=input_types.OpenAIModelPropertiesInput(
38
+ model=enums.OpenAIModels.GPT4O_128K,
39
+ )
40
+ )
41
+
42
+ specification_response = await graphlit.client.create_specification(specification_input)
43
+ specification_id = specification_response.create_specification.id if specification_response.create_specification is not None else None
44
+
45
+ if specification_id is not None:
46
+ print(f'Created specification [{specification_id}].')
47
+
48
+ conversation_input = input_types.ConversationInput(
49
+ name="Conversation",
50
+ specification=input_types.EntityReferenceInput(
51
+ id=specification_id
52
+ ),
53
+ )
54
+
55
+ conversation_response = await graphlit.client.create_conversation(conversation_input)
56
+ conversation_id = conversation_response.create_conversation.id if conversation_response.create_conversation is not None else None
57
+
58
+ if conversation_id is not None:
59
+ print(f'Created conversation [{conversation_id}].')
60
+
61
+ format_response = await graphlit.client.format_conversation(prompt, conversation_id)
62
+ formatted_message = format_response.format_conversation.message.message if format_response.format_conversation is not None and format_response.format_conversation.message is not None else None
63
+
64
+ if formatted_message is not None:
65
+ stream_response = client.chat.completions.create(
66
+ model=model,
67
+ messages=[{"role": "user", "content": formatted_message}],
68
+ temperature=0.1,
69
+ top_p=0.2,
70
+ stream=True
71
+ )
72
+
73
+ completion = ""
74
+
75
+ for chunk in stream_response:
76
+ delta = chunk.choices[0].delta.content
77
+
78
+ if delta is not None:
79
+ completion += delta
80
+
81
+ if completion is not None:
82
+ # NOTE: stores completion back into conversation
83
+ complete_response = await graphlit.client.complete_conversation(completion, conversation_id)
84
+
85
+ print(complete_response.complete_conversation.message.message if complete_response.complete_conversation is not None and complete_response.complete_conversation.message is not None else "None")
86
+ except exceptions.GraphQLClientError as e:
87
+ print(f"Graphlit API error: {e}")
@@ -40,6 +40,7 @@ SERVICE_PROVIDERS = {
40
40
  "AWS_BEDROCK": "AWS Bedrock",
41
41
  "CEREBRAS": "Cerebras",
42
42
  "MILVUS": "Milvus",
43
+ "GRAPHLIT": "Graphlit",
43
44
  }
44
45
 
45
46
  LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY = "langtrace_additional_attributes"
@@ -26,6 +26,10 @@ from .pymongo import PyMongoInstrumentation
26
26
  from .qdrant import QdrantInstrumentation
27
27
  from .vertexai import VertexAIInstrumentation
28
28
  from .weaviate import WeaviateInstrumentation
29
+ from .cerebras import CerebrasInstrumentation
30
+ from .milvus import MilvusInstrumentation
31
+ from .google_genai import GoogleGenaiInstrumentation
32
+ from .graphlit import GraphlitInstrumentation
29
33
 
30
34
  __all__ = [
31
35
  "AnthropicInstrumentation",
@@ -56,4 +60,5 @@ __all__ = [
56
60
  "MilvusInstrumentation",
57
61
  "GoogleGenaiInstrumentation",
58
62
  "CrewaiToolsInstrumentation",
63
+ "GraphlitInstrumentation",
59
64
  ]
@@ -0,0 +1,3 @@
1
+ from .instrumentation import GraphlitInstrumentation
2
+
3
+ __all__ = ["GraphlitInstrumentation"]
@@ -0,0 +1,57 @@
1
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
2
+ from opentelemetry.trace import get_tracer
3
+ from wrapt import wrap_function_wrapper as _W
4
+ from typing import Collection
5
+ from importlib_metadata import version as v
6
+ from .patch import patch_graphlit_operation
7
+
8
+ class GraphlitInstrumentation(BaseInstrumentor):
9
+
10
+ def instrumentation_dependencies(self) -> Collection[str]:
11
+ return ["graphlit-client >= 1.0.0"]
12
+
13
+ def _instrument(self, **kwargs):
14
+ tracer_provider = kwargs.get("tracer_provider")
15
+ tracer = get_tracer(__name__, "", tracer_provider)
16
+ version = v("graphlit-client")
17
+ try:
18
+ _W(
19
+ "graphlit.graphlit",
20
+ "Client.ingest_uri",
21
+ patch_graphlit_operation("ingest_uri", version, tracer),
22
+ )
23
+ _W(
24
+ "graphlit.graphlit",
25
+ "Client.create_feed",
26
+ patch_graphlit_operation("create_feed", version, tracer),
27
+ )
28
+ _W(
29
+ "graphlit.graphlit",
30
+ "Client.create_specification",
31
+ patch_graphlit_operation("create_specification", version, tracer),
32
+ )
33
+ _W(
34
+ "graphlit.graphlit",
35
+ "Client.create_conversation",
36
+ patch_graphlit_operation("create_conversation", version, tracer),
37
+ )
38
+ _W(
39
+ "graphlit.graphlit",
40
+ "Client.format_conversation",
41
+ patch_graphlit_operation("format_conversation", version, tracer),
42
+ )
43
+ _W(
44
+ "graphlit.graphlit",
45
+ "Client.complete_conversation",
46
+ patch_graphlit_operation("complete_conversation", version, tracer),
47
+ )
48
+ _W(
49
+ "graphlit.graphlit",
50
+ "Client.prompt_conversation",
51
+ patch_graphlit_operation("prompt_conversation", version, tracer),
52
+ )
53
+ except Exception:
54
+ pass
55
+
56
+ def _uninstrument(self, **kwargs):
57
+ pass
@@ -0,0 +1,69 @@
1
+ import json
2
+ from importlib_metadata import version as v
3
+ from langtrace.trace_attributes import FrameworkSpanAttributes
4
+ from opentelemetry import baggage
5
+ from opentelemetry.trace import Span, SpanKind, Tracer
6
+ from opentelemetry.trace.status import Status, StatusCode
7
+
8
+ from langtrace_python_sdk.constants import LANGTRACE_SDK_NAME
9
+ from langtrace_python_sdk.constants.instrumentation.common import (
10
+ LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
11
+ SERVICE_PROVIDERS,
12
+ )
13
+ from langtrace_python_sdk.utils.llm import set_span_attributes
14
+ from langtrace_python_sdk.utils.misc import serialize_args, serialize_kwargs
15
+
16
+
17
+ def patch_graphlit_operation(operation_name, version, tracer: Tracer):
18
+ async def traced_method(wrapped, instance, args, kwargs):
19
+ service_provider = SERVICE_PROVIDERS["GRAPHLIT"]
20
+ extra_attributes = baggage.get_baggage(LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY)
21
+ span_attributes = {
22
+ "langtrace.sdk.name": "langtrace-python-sdk",
23
+ "langtrace.service.name": service_provider,
24
+ "langtrace.service.type": "framework",
25
+ "langtrace.service.version": version,
26
+ "langtrace.version": v(LANGTRACE_SDK_NAME),
27
+ **(extra_attributes if extra_attributes is not None else {}),
28
+ }
29
+
30
+ span_attributes["langchain.metadata"] = serialize_kwargs(**kwargs)
31
+ span_attributes["langchain.inputs"] = serialize_args(*args)
32
+
33
+ attributes = FrameworkSpanAttributes(**span_attributes)
34
+
35
+ with tracer.start_as_current_span(
36
+ name=f"graphlit.{operation_name}", kind=SpanKind.CLIENT
37
+ ) as span:
38
+ try:
39
+ set_span_attributes(span, attributes)
40
+ result = await wrapped(*args, **kwargs)
41
+
42
+ if result:
43
+ operation_result = json.loads(result.model_dump_json())[operation_name]
44
+ if operation_name == "complete_conversation" or operation_name == "prompt_conversation" or operation_name == "format_conversation":
45
+ set_graphlit_conversation_attributes(span, operation_result)
46
+ else:
47
+ for key, value in operation_result.items():
48
+ span.set_attribute(f"graphlit.{operation_name}.{key}", str(value))
49
+
50
+ span.set_status(Status(StatusCode.OK))
51
+
52
+ return result
53
+
54
+ except Exception as err:
55
+ span.record_exception(err)
56
+ span.set_status(Status(StatusCode.ERROR, str(err)))
57
+ raise
58
+
59
+ return traced_method
60
+
61
+
62
+ def set_graphlit_conversation_attributes(span: Span, response):
63
+ if not response or "message" not in response:
64
+ return
65
+
66
+ span.set_attribute(f"graphlit.conversation.id", response['conversation']['id'])
67
+
68
+ for key, value in response['message'].items():
69
+ span.set_attribute(f"graphlit.conversation.{key}", str(value))
@@ -29,9 +29,52 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import \
29
29
  from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
30
30
  from opentelemetry.sdk.resources import SERVICE_NAME, Resource
31
31
  from opentelemetry.sdk.trace import TracerProvider
32
- from opentelemetry.sdk.trace.export import (BatchSpanProcessor,
33
- ConsoleSpanExporter,
34
- SimpleSpanProcessor)
32
+ from opentelemetry.sdk.trace.export import (
33
+ BatchSpanProcessor,
34
+ ConsoleSpanExporter,
35
+ SimpleSpanProcessor,
36
+ )
37
+
38
+ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
39
+ OTLPSpanExporter as GRPCExporter,
40
+ )
41
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
42
+ OTLPSpanExporter as HTTPExporter,
43
+ )
44
+ from langtrace_python_sdk.constants.exporter.langtrace_exporter import (
45
+ LANGTRACE_REMOTE_URL,
46
+ LANGTRACE_SESSION_ID_HEADER,
47
+ )
48
+ from langtrace_python_sdk.instrumentation import (
49
+ AnthropicInstrumentation,
50
+ ChromaInstrumentation,
51
+ CohereInstrumentation,
52
+ CrewAIInstrumentation,
53
+ DspyInstrumentation,
54
+ EmbedchainInstrumentation,
55
+ GeminiInstrumentation,
56
+ GroqInstrumentation,
57
+ LangchainCommunityInstrumentation,
58
+ LangchainCoreInstrumentation,
59
+ LangchainInstrumentation,
60
+ LanggraphInstrumentation,
61
+ LiteLLMInstrumentation,
62
+ LlamaindexInstrumentation,
63
+ MistralInstrumentation,
64
+ AWSBedrockInstrumentation,
65
+ OllamaInstrumentor,
66
+ OpenAIInstrumentation,
67
+ PineconeInstrumentation,
68
+ QdrantInstrumentation,
69
+ AutogenInstrumentation,
70
+ VertexAIInstrumentation,
71
+ WeaviateInstrumentation,
72
+ PyMongoInstrumentation,
73
+ CerebrasInstrumentation,
74
+ MilvusInstrumentation,
75
+ GoogleGenaiInstrumentation,
76
+ GraphlitInstrumentation,
77
+ )
35
78
  from opentelemetry.util.re import parse_env_headers
36
79
  from sentry_sdk.types import Event, Hint
37
80
 
@@ -279,6 +322,7 @@ def init(
279
322
  "google-cloud-aiplatform": VertexAIInstrumentation(),
280
323
  "google-generativeai": GeminiInstrumentation(),
281
324
  "google-genai": GoogleGenaiInstrumentation(),
325
+ "graphlit-client": GraphlitInstrumentation(),
282
326
  "mistralai": MistralInstrumentation(),
283
327
  "boto3": AWSBedrockInstrumentation(),
284
328
  "autogen": AutogenInstrumentation(),
@@ -1 +1 @@
1
- __version__ = "3.5.0"
1
+ __version__ = "3.6.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langtrace-python-sdk
3
- Version: 3.5.0
3
+ Version: 3.6.0
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -23,7 +23,7 @@ Requires-Dist: sentry-sdk>=2.14.0
23
23
  Requires-Dist: setuptools
24
24
  Requires-Dist: sqlalchemy
25
25
  Requires-Dist: tiktoken>=0.1.1
26
- Requires-Dist: trace-attributes==7.1.1
26
+ Requires-Dist: trace-attributes==7.2.0
27
27
  Requires-Dist: transformers>=4.11.3
28
28
  Requires-Dist: ujson>=5.10.0
29
29
  Provides-Extra: dev
@@ -33,6 +33,7 @@ Requires-Dist: cohere; extra == 'dev'
33
33
  Requires-Dist: embedchain; extra == 'dev'
34
34
  Requires-Dist: google-cloud-aiplatform; extra == 'dev'
35
35
  Requires-Dist: google-generativeai; extra == 'dev'
36
+ Requires-Dist: graphlit-client; extra == 'dev'
36
37
  Requires-Dist: groq; extra == 'dev'
37
38
  Requires-Dist: langchain; extra == 'dev'
38
39
  Requires-Dist: langchain-community; extra == 'dev'
@@ -56,6 +56,8 @@ examples/gemini_example/function_tools.py,sha256=ZOBrdPy_8s3NDfsF5A4RXIoUi2VXlD8
56
56
  examples/gemini_example/main.py,sha256=issqP-0ofP2NzscXuT4ZoD9TemAyTt04o_EhhIrUo7g,2480
57
57
  examples/google_genai_example/__init__.py,sha256=k2Hp4asgvKzrKV_E8sJ-_TDxCvP7aLXZdV-dxVUJKwo,175
58
58
  examples/google_genai_example/main.py,sha256=DcEom8iqlTRFZhDxGEpBNs5GZDOHiZkVakSBM2bwcx4,717
59
+ examples/graphlit_example/__init__.py,sha256=12d7tBnPRdrsDv2szKjc_idu8VdG7ovjvQLo1ko9NdM,253
60
+ examples/graphlit_example/conversation.py,sha256=1mQCADquj_zCxB-XS9KZuLYy7fSe0GTkL320DEKvhRQ,3635
59
61
  examples/hiveagent_example/basic.py,sha256=Sd7I5w8w5Xx7ODaydTY30yiq9HwJDMKHQywrZjgehP0,441
60
62
  examples/inspect_ai_example/basic_eval.py,sha256=hDg2BB9ONNpOGRVH08HsghnS1373sOnq6dyDmUQd9gY,1040
61
63
  examples/langchain_example/__init__.py,sha256=hYsiJzR050yCwip4-KZD9EhSLeBMeOUU1eQzyXSfwRs,669
@@ -110,8 +112,8 @@ examples/vertexai_example/main.py,sha256=gndId5X5ksD-ycxnAWMdEqIDbLc3kz5Vt8vm4YP
110
112
  examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56snk-Bbg2Kw,618
111
113
  examples/weaviate_example/query_text.py,sha256=wPHQTc_58kPoKTZMygVjTj-2ZcdrIuaausJfMxNQnQc,127162
112
114
  langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
113
- langtrace_python_sdk/langtrace.py,sha256=ekJbkRnmJ2VHv0nrfqK8NiRvYI2ZA7YHK7r12_TiQCU,13425
114
- langtrace_python_sdk/version.py,sha256=j0HWOe68EJbDKQHKURp6LgON8HyoWHbKmvvCvTB1OzA,22
115
+ langtrace_python_sdk/langtrace.py,sha256=SeJdpry-dICKwytlzEovX33s-bJahGD5PTwpJb7dqVo,14618
116
+ langtrace_python_sdk/version.py,sha256=pnBAJI_5J7ByFo6sxUkzCfK3L9wcGJP7yeF7OJLugqk,22
115
117
  langtrace_python_sdk/constants/__init__.py,sha256=3CNYkWMdd1DrkGqzLUgNZXjdAlM6UFMlf_F-odAToyc,146
116
118
  langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=EVCrouYCpY98f0KSaKr4PzNxPULTZZO6dSA_crEOyJU,106
117
119
  langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -119,7 +121,7 @@ langtrace_python_sdk/constants/instrumentation/anthropic.py,sha256=YX3llt3zwDY6X
119
121
  langtrace_python_sdk/constants/instrumentation/aws_bedrock.py,sha256=QwKtO4NBarOZoGkt5cFCcpxAw3zvZxcMMWBbzPPGv-g,422
120
122
  langtrace_python_sdk/constants/instrumentation/chroma.py,sha256=hiPGYdHS0Yj4Kh3eaYBbuCAl_swqIygu80yFqkOgdak,955
121
123
  langtrace_python_sdk/constants/instrumentation/cohere.py,sha256=9yD133VdrYZ5BoJR4nJHlj67gHEImB9-KsD-NkzHW1I,1159
122
- langtrace_python_sdk/constants/instrumentation/common.py,sha256=CLL1YNXvmCSuYDCZUMUcs_plomCPcBqjITBt2uiN0RE,1224
124
+ langtrace_python_sdk/constants/instrumentation/common.py,sha256=8yPsGPo0q8PBXkST74mFKlmlg54c-iPSzc-dqeLigew,1252
123
125
  langtrace_python_sdk/constants/instrumentation/embedchain.py,sha256=HodCJvaFjILoOG50OwFObxfVxt_8VUaIAIqvgoN3tzo,278
124
126
  langtrace_python_sdk/constants/instrumentation/gemini.py,sha256=UAmfgg9FM7uNeOCdPfWlir6OIH-8BoxFGPRpdBd9ZZs,358
125
127
  langtrace_python_sdk/constants/instrumentation/groq.py,sha256=VFXmIl4aqGY_fS0PAmjPj_Qm7Tibxbx7Ur_e7rQpqXc,134
@@ -136,7 +138,7 @@ langtrace_python_sdk/constants/instrumentation/weaviate.py,sha256=gtv-JBxvNGClEM
136
138
  langtrace_python_sdk/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
137
139
  langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=ckd8dMmY6h2oxE04p1JFLwUB5PSJX_Cy4eDFEM6aj4Y,6605
138
140
  langtrace_python_sdk/extensions/langtrace_filesystem.py,sha256=34fZutG28EJ66l67OvTGsydAH3ZpXgikdE7hVLqBpG4,7863
139
- langtrace_python_sdk/instrumentation/__init__.py,sha256=eu3dzgG3rjwFPYLqMkvc-7xV15QRz37Bhe1LavNA7-s,2176
141
+ langtrace_python_sdk/instrumentation/__init__.py,sha256=V7RS5u2L4wNBbaLpFlNBykj_sGVFBj63CDM2VYrlSFM,2394
140
142
  langtrace_python_sdk/instrumentation/anthropic/__init__.py,sha256=donrurJAGYlxrSRA3BIf76jGeUcAx9Tq8CVpah68S0Y,101
141
143
  langtrace_python_sdk/instrumentation/anthropic/instrumentation.py,sha256=ndXdruI0BG7n75rsuEpKjfzePxrZxg40gZ39ONmD_v4,1845
142
144
  langtrace_python_sdk/instrumentation/anthropic/patch.py,sha256=ztPN4VZujoxYOKhTbFnup7Ibms9NAzYCPAJY43NUgKw,4935
@@ -176,6 +178,9 @@ langtrace_python_sdk/instrumentation/gemini/patch.py,sha256=PG5E5v253x2ufQ81-aUU
176
178
  langtrace_python_sdk/instrumentation/google_genai/__init__.py,sha256=nQ2kSsUDpLGZ_kz2QGddDwnawL9juVA7pW4G-BOe5VI,98
177
179
  langtrace_python_sdk/instrumentation/google_genai/instrumentation.py,sha256=oqKDhVcRRHhD5tK3adAIQo9Bpe7CA-k33gla1IymwXU,1080
178
180
  langtrace_python_sdk/instrumentation/google_genai/patch.py,sha256=X0TWY1D4XHweaNu70PlXBDzKEaHIibLpkJiIsp4jF6A,4115
181
+ langtrace_python_sdk/instrumentation/graphlit/__init__.py,sha256=mkUPwozR-I-D5ZrumJs1gS7sSouY0UN68Ne9LiNEBTs,92
182
+ langtrace_python_sdk/instrumentation/graphlit/instrumentation.py,sha256=V9GRZ6cj2lt20xCVfL55lGU4p0HlZxAYUqwpWogXDtY,2074
183
+ langtrace_python_sdk/instrumentation/graphlit/patch.py,sha256=4LNqY8hOnkRKJ8QWc7mczs1vs-4Tvfug57B55omB4Cg,2928
179
184
  langtrace_python_sdk/instrumentation/groq/__init__.py,sha256=ZXeq_nrej6Lm_uoMFEg8wbSejhjB2UJ5IoHQBPc2-C0,91
180
185
  langtrace_python_sdk/instrumentation/groq/instrumentation.py,sha256=Ttf07XVKhdYY1_fqJc7QWiSdmgEhEVyQB_3Az2_wqYo,1832
181
186
  langtrace_python_sdk/instrumentation/groq/patch.py,sha256=J0h8SXEw2LyMIJhKZTVydEysyKfSLWkCuhEharzDS4w,23161
@@ -280,8 +285,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
280
285
  tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
281
286
  tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
282
287
  tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
283
- langtrace_python_sdk-3.5.0.dist-info/METADATA,sha256=o2pEymNDig6HG7KcrftPBRhqH6mDgSGrlgpiOMBIBfo,15643
284
- langtrace_python_sdk-3.5.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
285
- langtrace_python_sdk-3.5.0.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
286
- langtrace_python_sdk-3.5.0.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
287
- langtrace_python_sdk-3.5.0.dist-info/RECORD,,
288
+ langtrace_python_sdk-3.6.0.dist-info/METADATA,sha256=L4S2ZdCsOhBL5aDTgAACAYdGyHL1P8zOR-wwmB3Zcgo,15690
289
+ langtrace_python_sdk-3.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
290
+ langtrace_python_sdk-3.6.0.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
291
+ langtrace_python_sdk-3.6.0.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
292
+ langtrace_python_sdk-3.6.0.dist-info/RECORD,,