langtrace-python-sdk 2.2.6__py3-none-any.whl → 2.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/otlp_example/otlp_with_langtrace.py +59 -0
- langtrace_python_sdk/instrumentation/cohere/patch.py +2 -2
- langtrace_python_sdk/instrumentation/openai/patch.py +5 -5
- langtrace_python_sdk/utils/llm.py +2 -1
- langtrace_python_sdk/version.py +1 -1
- {langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/METADATA +2 -2
- {langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/RECORD +10 -10
- examples/ollama_example/basic_example_2.py +0 -34
- {langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/WHEEL +0 -0
- {langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/entry_points.txt +0 -0
- {langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# Instructions
|
|
2
|
+
# 1. Run the OpenTelemetry Collector with the OTLP receiver enabled
|
|
3
|
+
# Create otel-config.yaml with the following content:
|
|
4
|
+
# receivers:
|
|
5
|
+
# otlp:
|
|
6
|
+
# protocols:
|
|
7
|
+
# grpc:
|
|
8
|
+
# endpoint: "0.0.0.0:4317"
|
|
9
|
+
# http:
|
|
10
|
+
# endpoint: "0.0.0.0:4318"
|
|
11
|
+
|
|
12
|
+
# exporters:
|
|
13
|
+
# logging:
|
|
14
|
+
# loglevel: debug
|
|
15
|
+
|
|
16
|
+
# service:
|
|
17
|
+
# pipelines:
|
|
18
|
+
# traces:
|
|
19
|
+
# receivers: [otlp]
|
|
20
|
+
# exporters: [logging]
|
|
21
|
+
# docker pull otel/opentelemetry-collector:latest
|
|
22
|
+
# docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml
|
|
23
|
+
# 2. Run the following code
|
|
24
|
+
|
|
25
|
+
from langtrace_python_sdk import langtrace
|
|
26
|
+
from openai import OpenAI
|
|
27
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# Configure the OTLP exporter to use the correct endpoint and API key
|
|
31
|
+
otlp_endpoint = "http://localhost:4318/v1/traces"
|
|
32
|
+
otlp_exporter = OTLPSpanExporter(
|
|
33
|
+
endpoint=otlp_endpoint,
|
|
34
|
+
headers=(("Content-Type", "application/json"),))
|
|
35
|
+
langtrace.init(custom_remote_exporter=otlp_exporter, batch=False)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def chat_with_openai():
|
|
39
|
+
client = OpenAI()
|
|
40
|
+
messages = [
|
|
41
|
+
{
|
|
42
|
+
"role": "user",
|
|
43
|
+
"content": "Hello, I'm a human.",
|
|
44
|
+
},
|
|
45
|
+
]
|
|
46
|
+
chat_completion = client.chat.completions.create(
|
|
47
|
+
messages=messages,
|
|
48
|
+
stream=False,
|
|
49
|
+
model="gpt-3.5-turbo",
|
|
50
|
+
)
|
|
51
|
+
print(chat_completion.choices[0].message.content)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def main():
|
|
55
|
+
chat_with_openai()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
if __name__ == "__main__":
|
|
59
|
+
main()
|
|
@@ -44,7 +44,7 @@ def rerank(original_method, version, tracer):
|
|
|
44
44
|
|
|
45
45
|
span_attributes = {
|
|
46
46
|
**get_langtrace_attributes(version, service_provider),
|
|
47
|
-
**get_llm_request_attributes(kwargs),
|
|
47
|
+
**get_llm_request_attributes(kwargs, operation_name="rerank"),
|
|
48
48
|
**get_llm_url(instance),
|
|
49
49
|
SpanAttributes.LLM_REQUEST_MODEL: kwargs.get("model") or "command-r-plus",
|
|
50
50
|
SpanAttributes.LLM_URL: APIS["RERANK"]["URL"],
|
|
@@ -121,7 +121,7 @@ def embed(original_method, version, tracer):
|
|
|
121
121
|
|
|
122
122
|
span_attributes = {
|
|
123
123
|
**get_langtrace_attributes(version, service_provider),
|
|
124
|
-
**get_llm_request_attributes(kwargs),
|
|
124
|
+
**get_llm_request_attributes(kwargs, operation_name="embed"),
|
|
125
125
|
**get_llm_url(instance),
|
|
126
126
|
SpanAttributes.LLM_URL: APIS["EMBED"]["URL"],
|
|
127
127
|
SpanAttributes.LLM_PATH: APIS["EMBED"]["ENDPOINT"],
|
|
@@ -55,7 +55,7 @@ def images_generate(original_method, version, tracer):
|
|
|
55
55
|
service_provider = SERVICE_PROVIDERS["OPENAI"]
|
|
56
56
|
span_attributes = {
|
|
57
57
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
|
58
|
-
**get_llm_request_attributes(kwargs),
|
|
58
|
+
**get_llm_request_attributes(kwargs, operation_name="images_generate"),
|
|
59
59
|
**get_llm_url(instance),
|
|
60
60
|
SpanAttributes.LLM_PATH: APIS["IMAGES_GENERATION"]["ENDPOINT"],
|
|
61
61
|
**get_extra_attributes(),
|
|
@@ -118,7 +118,7 @@ def async_images_generate(original_method, version, tracer):
|
|
|
118
118
|
|
|
119
119
|
span_attributes = {
|
|
120
120
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
|
121
|
-
**get_llm_request_attributes(kwargs),
|
|
121
|
+
**get_llm_request_attributes(kwargs, operation_name="images_generate"),
|
|
122
122
|
**get_llm_url(instance),
|
|
123
123
|
SpanAttributes.LLM_PATH: APIS["IMAGES_GENERATION"]["ENDPOINT"],
|
|
124
124
|
**get_extra_attributes(),
|
|
@@ -181,7 +181,7 @@ def images_edit(original_method, version, tracer):
|
|
|
181
181
|
|
|
182
182
|
span_attributes = {
|
|
183
183
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
|
184
|
-
**get_llm_request_attributes(kwargs),
|
|
184
|
+
**get_llm_request_attributes(kwargs, operation_name="images_edit"),
|
|
185
185
|
**get_llm_url(instance),
|
|
186
186
|
SpanAttributes.LLM_PATH: APIS["IMAGES_EDIT"]["ENDPOINT"],
|
|
187
187
|
SpanAttributes.LLM_RESPONSE_FORMAT: kwargs.get("response_format"),
|
|
@@ -432,7 +432,7 @@ def embeddings_create(original_method, version, tracer):
|
|
|
432
432
|
|
|
433
433
|
span_attributes = {
|
|
434
434
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
|
435
|
-
**get_llm_request_attributes(kwargs),
|
|
435
|
+
**get_llm_request_attributes(kwargs, operation_name="embed"),
|
|
436
436
|
**get_llm_url(instance),
|
|
437
437
|
SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"],
|
|
438
438
|
SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"),
|
|
@@ -490,7 +490,7 @@ def async_embeddings_create(original_method, version, tracer):
|
|
|
490
490
|
|
|
491
491
|
span_attributes = {
|
|
492
492
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
|
493
|
-
**get_llm_request_attributes(kwargs),
|
|
493
|
+
**get_llm_request_attributes(kwargs, operation_name="embed"),
|
|
494
494
|
SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"],
|
|
495
495
|
SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"),
|
|
496
496
|
**get_extra_attributes(),
|
|
@@ -92,7 +92,7 @@ def get_langtrace_attributes(version, service_provider, vendor_type="llm"):
|
|
|
92
92
|
}
|
|
93
93
|
|
|
94
94
|
|
|
95
|
-
def get_llm_request_attributes(kwargs, prompts=None, model=None):
|
|
95
|
+
def get_llm_request_attributes(kwargs, prompts=None, model=None, operation_name="chat"):
|
|
96
96
|
|
|
97
97
|
user = kwargs.get("user", None)
|
|
98
98
|
if prompts is None:
|
|
@@ -111,6 +111,7 @@ def get_llm_request_attributes(kwargs, prompts=None, model=None):
|
|
|
111
111
|
top_p = kwargs.get("p", None) or kwargs.get("top_p", None)
|
|
112
112
|
tools = kwargs.get("tools", None)
|
|
113
113
|
return {
|
|
114
|
+
SpanAttributes.LLM_OPERATION_NAME: operation_name,
|
|
114
115
|
SpanAttributes.LLM_REQUEST_MODEL: model or kwargs.get("model"),
|
|
115
116
|
SpanAttributes.LLM_IS_STREAMING: kwargs.get("stream"),
|
|
116
117
|
SpanAttributes.LLM_REQUEST_TEMPERATURE: kwargs.get("temperature"),
|
langtrace_python_sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "2.2.
|
|
1
|
+
__version__ = "2.2.7"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: langtrace-python-sdk
|
|
3
|
-
Version: 2.2.
|
|
3
|
+
Version: 2.2.7
|
|
4
4
|
Summary: Python SDK for LangTrace
|
|
5
5
|
Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
6
|
Author-email: Scale3 Labs <engineering@scale3labs.com>
|
|
@@ -20,7 +20,7 @@ Requires-Dist: opentelemetry-instrumentation>=0.46b0
|
|
|
20
20
|
Requires-Dist: opentelemetry-sdk>=1.25.0
|
|
21
21
|
Requires-Dist: sqlalchemy
|
|
22
22
|
Requires-Dist: tiktoken>=0.1.1
|
|
23
|
-
Requires-Dist: trace-attributes
|
|
23
|
+
Requires-Dist: trace-attributes==7.0.0
|
|
24
24
|
Provides-Extra: dev
|
|
25
25
|
Requires-Dist: anthropic; extra == 'dev'
|
|
26
26
|
Requires-Dist: chromadb; extra == 'dev'
|
|
@@ -33,7 +33,6 @@ examples/llamaindex_example/basic.py,sha256=aFZngkye95sjUr4wc2Uo_Je0iEexXpNcdlV0
|
|
|
33
33
|
examples/llamaindex_example/data/abramov.txt,sha256=Ou-GyWZm5AjHLgxviBoRE9ikNv5MScsF0cd--0vVVhI,32667
|
|
34
34
|
examples/ollama_example/__init__.py,sha256=qOx0jGCPuSpRCPiqtDVm7F0z8hIZ8C75hDZ_C8Apz-s,399
|
|
35
35
|
examples/ollama_example/basic.py,sha256=EPbsigOF4xBDBgLgAD0EzPo737ycVm7aXZr7F5Xt-A4,1062
|
|
36
|
-
examples/ollama_example/basic_example_2.py,sha256=h6hLX4Mot6H8ezg970zrh5XFVyI4zMVHeYLhvAMTQlQ,953
|
|
37
36
|
examples/openai_example/__init__.py,sha256=MU4CELvhe2EU6d4Okg-bTfjvfGxQO7PNzqMw1yrVeCA,828
|
|
38
37
|
examples/openai_example/async_tool_calling_nonstreaming.py,sha256=H1-CrNfNDfqAkB5wEipITXlW2OsYL7XD5uQb6k3C6ps,3865
|
|
39
38
|
examples/openai_example/async_tool_calling_streaming.py,sha256=LaSKmn_Unv55eTHXYdEmKjo39eNuB3ASOBV-m8U1HfU,7136
|
|
@@ -49,6 +48,7 @@ examples/openai_example/tool_calling_streaming.py,sha256=mV1RbyAoVhumGRPpqPWQ6PM
|
|
|
49
48
|
examples/openai_example/resources/lounge_flamingo.png,sha256=aspniTtmWqwLp3YUhYqAe2ze8nJaq-bTSW7uUJudtd0,2416234
|
|
50
49
|
examples/openai_example/resources/mask.png,sha256=mUE9Dfp-x8jI0Nh4WGr0P9pueUqEZfpjwxR-6Rxzxz4,2483660
|
|
51
50
|
examples/otlp_example/otlp_basic.py,sha256=Ykbzu6EpO-V1wQsPePgC16eLFVym91r-ZR-SDj2mIT0,1346
|
|
51
|
+
examples/otlp_example/otlp_with_langtrace.py,sha256=0x5UOrisqaOSkBP8nbaNqL83_gB-jNM0Uq5FXZ9B4Q0,1535
|
|
52
52
|
examples/perplexity_example/basic.py,sha256=bp7n27gaugJkaFVyt8pjaEfi66lYcqP6eFFjPewUShY,668
|
|
53
53
|
examples/pinecone_example/__init__.py,sha256=_rvn7Ygt_QWMQoa5wB2GB0S9gZVrlJrPrEhXqU3hPKw,427
|
|
54
54
|
examples/pinecone_example/basic.py,sha256=5MoHZMBxHMdC61oj-CP19gj9SxSvIcDrQL934JPZoQs,1549
|
|
@@ -60,7 +60,7 @@ examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56sn
|
|
|
60
60
|
examples/weaviate_example/query_text.py,sha256=sG8O-bXQpflBAiYpgE_M2X7GcHUlZNgl_wJW8_h-W6Q,127024
|
|
61
61
|
langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
|
|
62
62
|
langtrace_python_sdk/langtrace.py,sha256=1L0IjME-pzEYht92QfwByPZr3H1MClTrqQdoN1KyKJY,7689
|
|
63
|
-
langtrace_python_sdk/version.py,sha256=
|
|
63
|
+
langtrace_python_sdk/version.py,sha256=ihs-OTG_m4F3lSAGofZ1X6UqOU41N4HD297yIDBaSZ0,22
|
|
64
64
|
langtrace_python_sdk/constants/__init__.py,sha256=P8QvYwt5czUNDZsKS64vxm9Dc41ptGbuF1TFtAF6nv4,44
|
|
65
65
|
langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=5MNjnAOg-4am78J3gVMH6FSwq5N8TOj72ugkhsw4vi0,46
|
|
66
66
|
langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -88,7 +88,7 @@ langtrace_python_sdk/instrumentation/chroma/instrumentation.py,sha256=nT6PS6bsrI
|
|
|
88
88
|
langtrace_python_sdk/instrumentation/chroma/patch.py,sha256=JfFc8SDfwkEyIwTd1yM6jwa1vu5hZH6IXyxAEcQQQOs,9010
|
|
89
89
|
langtrace_python_sdk/instrumentation/cohere/__init__.py,sha256=sGUSLdTUyYf36Tm6L5jQflhzCqvmWrhnBOMYHjvp6Hs,95
|
|
90
90
|
langtrace_python_sdk/instrumentation/cohere/instrumentation.py,sha256=YQFHZIBd7SSPD4b6Va-ZR0thf_AuBCqj5yzHLHJVWnM,2121
|
|
91
|
-
langtrace_python_sdk/instrumentation/cohere/patch.py,sha256=
|
|
91
|
+
langtrace_python_sdk/instrumentation/cohere/patch.py,sha256=_0mmJ9eCjokkJTQTXWYK2hSd5XY02_iu5PW6HB6DvQI,21175
|
|
92
92
|
langtrace_python_sdk/instrumentation/crewai/__init__.py,sha256=_UBKfvQv7l0g2_wnmA5F6CdSAFH0atNOVPd49zsN3aM,88
|
|
93
93
|
langtrace_python_sdk/instrumentation/crewai/instrumentation.py,sha256=q07x6nnig9JPxDT6ZylyIShfXWjNafKBetnNcA1UdEU,1836
|
|
94
94
|
langtrace_python_sdk/instrumentation/crewai/patch.py,sha256=Vnpip9Pbk4UFbTFHoUrHtAnDgsaihwSvZBgtUeOtLr8,6109
|
|
@@ -121,7 +121,7 @@ langtrace_python_sdk/instrumentation/ollama/instrumentation.py,sha256=jdsvkqUJAA
|
|
|
121
121
|
langtrace_python_sdk/instrumentation/ollama/patch.py,sha256=Twi3yeGgBj0DadBmZ0X0DsMPx71iSdL4R3OjOw3-p_E,8132
|
|
122
122
|
langtrace_python_sdk/instrumentation/openai/__init__.py,sha256=VPHRNCQEdkizIVP2d0Uw_a7t8XOTSTprEIB8oboJFbs,95
|
|
123
123
|
langtrace_python_sdk/instrumentation/openai/instrumentation.py,sha256=A0BJHRLcZ74TNVg6I0I9M5YWvSpAtXwMmME6N5CEQ_M,2945
|
|
124
|
-
langtrace_python_sdk/instrumentation/openai/patch.py,sha256=
|
|
124
|
+
langtrace_python_sdk/instrumentation/openai/patch.py,sha256=T0g9BbUw5JWSupZbWCF6sQxO2Auj_oPpAFw0RdVkKLg,24075
|
|
125
125
|
langtrace_python_sdk/instrumentation/pinecone/__init__.py,sha256=DzXyGh9_MGWveJvXULkFwdkf7PbG2s3bAWtT1Dmz7Ok,99
|
|
126
126
|
langtrace_python_sdk/instrumentation/pinecone/instrumentation.py,sha256=HDXkRITrVPwdQEoOYJOfMzZE_2-vDDvuqHTlD8W1lQw,1845
|
|
127
127
|
langtrace_python_sdk/instrumentation/pinecone/patch.py,sha256=KiIRRz8kk47FllFT746Cb_w6F6M60AN_pcsguD979E4,5172
|
|
@@ -137,7 +137,7 @@ langtrace_python_sdk/instrumentation/weaviate/patch.py,sha256=rRD6WfQcNGYpw9teoC
|
|
|
137
137
|
langtrace_python_sdk/types/__init__.py,sha256=KDW6S74FDxpeBa9xoH5zVEYfmRjccCCHzlW7lTJg1TA,3194
|
|
138
138
|
langtrace_python_sdk/utils/__init__.py,sha256=QPF7SMuiz_003fLCHkRrgNb9NjqErDQ5cQr6pkJReKc,724
|
|
139
139
|
langtrace_python_sdk/utils/langtrace_sampler.py,sha256=BupNndHbU9IL_wGleKetz8FdcveqHMBVz1bfKTTW80w,1753
|
|
140
|
-
langtrace_python_sdk/utils/llm.py,sha256=
|
|
140
|
+
langtrace_python_sdk/utils/llm.py,sha256=bvJkU3IGt_ssx5taY_Dx5GkgxGJ9Jqc1CrKElMVJ5to,12983
|
|
141
141
|
langtrace_python_sdk/utils/misc.py,sha256=CD9NWRLxLpFd0YwlHJqzlpFNedXVWtAKGOjQWnDCo8k,838
|
|
142
142
|
langtrace_python_sdk/utils/prompt_registry.py,sha256=n5dQMVLBw8aJZY8Utvf67bncc25ELf6AH9BYw8_hSzo,2619
|
|
143
143
|
langtrace_python_sdk/utils/sdk_version_checker.py,sha256=FzjIWZjn53cX0LEVPdipQd1fO9lG8iGVUEVUs9Hyk6M,1713
|
|
@@ -186,8 +186,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
|
|
|
186
186
|
tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
|
|
187
187
|
tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
|
|
188
188
|
tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
|
|
189
|
-
langtrace_python_sdk-2.2.
|
|
190
|
-
langtrace_python_sdk-2.2.
|
|
191
|
-
langtrace_python_sdk-2.2.
|
|
192
|
-
langtrace_python_sdk-2.2.
|
|
193
|
-
langtrace_python_sdk-2.2.
|
|
189
|
+
langtrace_python_sdk-2.2.7.dist-info/METADATA,sha256=YHtbzww64LbwfyNm1bLXWgf43rV3NYYCJ18vQUVzg_4,14464
|
|
190
|
+
langtrace_python_sdk-2.2.7.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
|
191
|
+
langtrace_python_sdk-2.2.7.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
|
|
192
|
+
langtrace_python_sdk-2.2.7.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
193
|
+
langtrace_python_sdk-2.2.7.dist-info/RECORD,,
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
from langtrace_python_sdk import langtrace
|
|
2
|
-
from openai import OpenAI
|
|
3
|
-
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
|
4
|
-
|
|
5
|
-
service_name = "langtrace-python-ollama"
|
|
6
|
-
otlp_endpoint = "http://localhost:4318/v1/traces"
|
|
7
|
-
otlp_exporter = OTLPSpanExporter(
|
|
8
|
-
endpoint=otlp_endpoint,
|
|
9
|
-
headers=(("Content-Type", "application/json"),))
|
|
10
|
-
langtrace.init(custom_remote_exporter=otlp_exporter, batch=False)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def chat_with_ollama():
|
|
14
|
-
# Use the OpenAI endpoint, not the Ollama API.
|
|
15
|
-
base_url = "http://localhost:11434/v1"
|
|
16
|
-
client = OpenAI(base_url=base_url, api_key="unused")
|
|
17
|
-
messages = [
|
|
18
|
-
{
|
|
19
|
-
"role": "user",
|
|
20
|
-
"content": "Hello, I'm a human.",
|
|
21
|
-
},
|
|
22
|
-
]
|
|
23
|
-
chat_completion = client.chat.completions.create(
|
|
24
|
-
model="llama3", messages=messages
|
|
25
|
-
)
|
|
26
|
-
print(chat_completion.choices[0].message.content)
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def main():
|
|
30
|
-
chat_with_ollama()
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
if __name__ == "__main__":
|
|
34
|
-
main()
|
|
File without changes
|
{langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{langtrace_python_sdk-2.2.6.dist-info → langtrace_python_sdk-2.2.7.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|