langtrace-python-sdk 2.0.6__py3-none-any.whl → 2.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,81 +1,91 @@
1
- from unittest.mock import MagicMock, patch, call
2
- from langtrace_python_sdk.instrumentation.pinecone.patch import generic_patch
3
- from opentelemetry.trace import SpanKind
4
- import importlib.metadata
5
- import pinecone
6
- from opentelemetry.trace import SpanKind
7
- from opentelemetry.trace.status import Status, StatusCode
1
+ import pytest
2
+ import importlib
3
+ import json
8
4
  from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
9
5
  from langtrace_python_sdk.constants.instrumentation.pinecone import APIS
10
- import unittest
11
- import json
12
- from tests.utils import common_setup
13
-
14
-
15
- class TestPinecone(unittest.TestCase):
16
- data = {
17
- "status": "success",
18
- "message": "Data upserted successfully",
19
- "upserted_ids": [1, 2, 3],
20
- }
21
-
22
- def setUp(self):
23
- self.pinecone_mock, self.tracer, self.span = common_setup(
24
- self.data, "pinecone.Index.upsert"
25
- )
26
6
 
27
- def tearDown(self):
28
- self.pinecone_mock.stop()
29
7
 
30
- def test_pinecone(self):
8
+ def create_embedding(openai_client):
9
+ result = openai_client.embeddings.create(
10
+ model="text-embedding-ada-002",
11
+ input="Some random text string goes here",
12
+ encoding_format="float",
13
+ )
14
+ return result.data[0].embedding
31
15
 
32
- # Arrange
33
- version = importlib.metadata.version("pinecone-client")
34
- method = "UPSERT"
35
- vectors = [[1, 2, 3], [4, 5, 6]]
36
16
 
37
- # Act
38
- wrapped_function = generic_patch(
39
- pinecone.Index.upsert, method, version, self.tracer
40
- )
41
- result = wrapped_function(MagicMock(), MagicMock(), (vectors,), {})
42
-
43
- # Assert
44
- self.assertTrue(
45
- self.tracer.start_as_current_span.called_once_with(
46
- "pinecone.data.index", kind=SpanKind.CLIENT
47
- )
48
- )
49
- api = APIS[method]
50
- service_provider = SERVICE_PROVIDERS["PINECONE"]
51
- expected_attributes = {
52
- "langtrace.sdk.name": "langtrace-python-sdk",
53
- "langtrace.service.name": service_provider,
54
- "langtrace.service.type": "vectordb",
55
- "langtrace.service.version": version,
56
- "langtrace.version": "1.0.0",
57
- "db.system": "pinecone",
58
- "db.operation": api["OPERATION"],
59
- }
60
- self.assertTrue(
61
- self.span.set_attribute.has_calls(
62
- [call(key, value) for key, value in expected_attributes.items()],
63
- any_order=True,
64
- )
65
- )
66
-
67
- actual_calls = self.span.set_attribute.call_args_list
68
-
69
- for key, value in expected_attributes.items():
70
- self.assertIn(call(key, value), actual_calls)
17
+ @pytest.mark.vcr()
18
+ def test_upsert(openai_client, pinecone_client, exporter):
19
+ embedding = create_embedding(openai_client)
20
+ unique_id = "unique_random_id"
21
+ data_to_upsert = {
22
+ "id": unique_id,
23
+ "values": embedding,
24
+ "metadata": {"random": "random"},
25
+ }
26
+ index = pinecone_client.Index("test-index")
27
+ index.upsert(vectors=[data_to_upsert], namespace="test-namespace")
28
+ spans = exporter.get_finished_spans()
29
+ pinecone_span = spans[-1]
71
30
 
72
- self.assertEqual(self.span.set_status.call_count, 1)
73
- self.assertTrue(self.span.set_status.has_calls([call(Status(StatusCode.OK))]))
31
+ assert pinecone_span.name == APIS["UPSERT"]["METHOD"]
32
+ attributes = pinecone_span.attributes
74
33
 
75
- expected_result = ["status", "message", "upserted_ids"]
76
- result_keys = json.loads(result).keys()
77
- self.assertSetEqual(set(expected_result), set(result_keys), "Keys mismatch")
34
+ assert attributes.get("langtrace.sdk.name") == "langtrace-python-sdk"
35
+ assert attributes.get("langtrace.service.name") == SERVICE_PROVIDERS["PINECONE"]
36
+ assert attributes.get("langtrace.service.type") == "vectordb"
37
+ assert attributes.get("langtrace.service.version") == importlib.metadata.version(
38
+ "pinecone-client"
39
+ )
40
+ assert attributes.get("langtrace.version") == "1.0.0"
41
+ assert attributes.get("db.system") == "pinecone"
42
+ assert attributes.get("db.operation") == APIS["UPSERT"]["OPERATION"]
78
43
 
79
44
 
80
- if __name__ == "__main__":
81
- unittest.main()
45
+ @pytest.mark.vcr()
46
+ def test_query(openai_client, pinecone_client, exporter):
47
+ embedding = create_embedding(openai_client)
48
+ unique_id = "unique_random_id"
49
+ data_to_upsert = {
50
+ "id": unique_id,
51
+ "values": embedding,
52
+ "metadata": {"random": "random"},
53
+ }
54
+ index = pinecone_client.Index("test-index")
55
+ index.upsert(vectors=[data_to_upsert], namespace="test-namespace")
56
+ filter = {"random": "random"}
57
+ res = index.query(
58
+ vector=embedding,
59
+ top_k=3,
60
+ include_values=True,
61
+ namespace="test-namespace",
62
+ include_metadata=True,
63
+ filter=filter,
64
+ )
65
+ spans = exporter.get_finished_spans()
66
+ query_span = spans[-1]
67
+ assert query_span.name == APIS["QUERY"]["METHOD"]
68
+ attributes = query_span.attributes
69
+ assert attributes.get("langtrace.sdk.name") == "langtrace-python-sdk"
70
+ assert attributes.get("langtrace.service.name") == SERVICE_PROVIDERS["PINECONE"]
71
+ assert attributes.get("langtrace.service.type") == "vectordb"
72
+ assert attributes.get("langtrace.service.version") == importlib.metadata.version(
73
+ "pinecone-client"
74
+ )
75
+ assert attributes.get("langtrace.version") == "1.0.0"
76
+ assert attributes.get("db.system") == "pinecone"
77
+ assert attributes.get("db.operation") == APIS["QUERY"]["OPERATION"]
78
+ assert attributes.get("db.query.top_k") == 3
79
+ assert attributes.get("db.query.namespace") == "test-namespace"
80
+ assert attributes.get("db.query.include_values") is True
81
+ assert attributes.get("db.query.include_metadata") is True
82
+ assert attributes.get("db.query.usage.read_units") == 6
83
+ assert json.loads(attributes.get("db.query.filter")) == filter
84
+ res_matches = res.matches
85
+ events = query_span.events
86
+ assert len(res_matches) == len(events)
87
+ for idx, event in enumerate(events):
88
+ assert event.name == "db.query.match"
89
+ attrs = event.attributes
90
+ assert attrs.get("db.query.match.id") == res_matches[idx].id
91
+ assert attrs.get("db.query.match.score") == res_matches[idx].score
@@ -0,0 +1,19 @@
1
+ """Unit tests configuration module."""
2
+
3
+ import pytest
4
+
5
+
6
+ from langtrace_python_sdk.instrumentation.qdrant.instrumentation import (
7
+ QdrantInstrumentation,
8
+ )
9
+ from qdrant_client import QdrantClient
10
+
11
+
12
+ @pytest.fixture
13
+ def qdrant_client():
14
+ return QdrantClient(":memory:")
15
+
16
+
17
+ @pytest.fixture(scope="session", autouse=True)
18
+ def instrument():
19
+ QdrantInstrumentation().instrument()
@@ -0,0 +1,92 @@
1
+ from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
2
+ from qdrant_client.models import PointStruct, Distance, VectorParams
3
+ import importlib
4
+
5
+
6
+ COLLECTION_NAME = "test_collection"
7
+ EMBEDDING_DIM = 4
8
+
9
+
10
+ def test_qdrant_upsert(qdrant_client, exporter):
11
+ qdrant_client.create_collection(
12
+ collection_name=COLLECTION_NAME,
13
+ vectors_config=VectorParams(size=EMBEDDING_DIM, distance=Distance.DOT),
14
+ )
15
+
16
+ qdrant_client.upsert(
17
+ collection_name=COLLECTION_NAME,
18
+ wait=True,
19
+ points=[
20
+ PointStruct(
21
+ id=1, vector=[0.05, 0.61, 0.76, 0.74], payload={"city": "Berlin"}
22
+ ),
23
+ PointStruct(
24
+ id=2, vector=[0.19, 0.81, 0.75, 0.11], payload={"city": "London"}
25
+ ),
26
+ PointStruct(
27
+ id=3, vector=[0.36, 0.55, 0.47, 0.94], payload={"city": "Moscow"}
28
+ ),
29
+ PointStruct(
30
+ id=4, vector=[0.18, 0.01, 0.85, 0.80], payload={"city": "New York"}
31
+ ),
32
+ PointStruct(
33
+ id=5, vector=[0.24, 0.18, 0.22, 0.44], payload={"city": "Beijing"}
34
+ ),
35
+ PointStruct(
36
+ id=6, vector=[0.35, 0.08, 0.11, 0.44], payload={"city": "Mumbai"}
37
+ ),
38
+ ],
39
+ )
40
+ spans = exporter.get_finished_spans()
41
+ upsert_span = spans[0]
42
+ attributes = upsert_span.attributes
43
+ assert attributes.get("langtrace.sdk.name") == "langtrace-python-sdk"
44
+ assert attributes.get("langtrace.service.name") == SERVICE_PROVIDERS["QDRANT"]
45
+ assert attributes.get("langtrace.service.type") == "vectordb"
46
+ assert attributes.get("langtrace.service.version") == importlib.metadata.version(
47
+ "qdrant_client"
48
+ )
49
+ assert attributes.get("db.system") == SERVICE_PROVIDERS["QDRANT"].lower()
50
+ assert attributes.get("db.operation") == "upsert"
51
+ assert attributes.get("db.upsert.points_count") == 6
52
+ assert attributes.get("db.collection.name") == COLLECTION_NAME
53
+
54
+
55
+ def test_qdrant_search(qdrant_client, exporter):
56
+ qdrant_client.create_collection(
57
+ collection_name=COLLECTION_NAME,
58
+ vectors_config=VectorParams(size=EMBEDDING_DIM, distance=Distance.DOT),
59
+ )
60
+
61
+ qdrant_client.upsert(
62
+ collection_name=COLLECTION_NAME,
63
+ wait=True,
64
+ points=[
65
+ PointStruct(
66
+ id=1, vector=[0.05, 0.61, 0.76, 0.74], payload={"city": "Berlin"}
67
+ ),
68
+ PointStruct(
69
+ id=2, vector=[0.19, 0.81, 0.75, 0.11], payload={"city": "London"}
70
+ ),
71
+ PointStruct(
72
+ id=3, vector=[0.36, 0.55, 0.47, 0.94], payload={"city": "Moscow"}
73
+ ),
74
+ PointStruct(
75
+ id=4, vector=[0.18, 0.01, 0.85, 0.80], payload={"city": "New York"}
76
+ ),
77
+ PointStruct(
78
+ id=5, vector=[0.24, 0.18, 0.22, 0.44], payload={"city": "Beijing"}
79
+ ),
80
+ PointStruct(
81
+ id=6, vector=[0.35, 0.08, 0.11, 0.44], payload={"city": "Mumbai"}
82
+ ),
83
+ ],
84
+ )
85
+ qdrant_client.search(
86
+ collection_name=COLLECTION_NAME, query_vector=[0.2, 0.1, 0.9, 0.7], limit=3
87
+ )
88
+
89
+ spans = exporter.get_finished_spans()
90
+ search_span = spans[-1]
91
+ attributes = search_span.attributes
92
+ assert attributes.get("db.operation") == "search"