lmnr 0.4.6__py3-none-any.whl → 0.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. lmnr/sdk/decorators.py +2 -7
  2. lmnr/sdk/laminar.py +69 -4
  3. lmnr/traceloop_sdk/.flake8 +12 -0
  4. lmnr/traceloop_sdk/.python-version +1 -0
  5. lmnr/traceloop_sdk/README.md +16 -0
  6. lmnr/traceloop_sdk/__init__.py +138 -0
  7. lmnr/traceloop_sdk/config/__init__.py +13 -0
  8. lmnr/traceloop_sdk/decorators/__init__.py +131 -0
  9. lmnr/traceloop_sdk/decorators/base.py +253 -0
  10. lmnr/traceloop_sdk/instruments.py +29 -0
  11. lmnr/traceloop_sdk/metrics/__init__.py +0 -0
  12. lmnr/traceloop_sdk/metrics/metrics.py +176 -0
  13. lmnr/traceloop_sdk/tests/__init__.py +1 -0
  14. lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_and_external_association_properties.yaml +101 -0
  15. lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_association_properties.yaml +99 -0
  16. lmnr/traceloop_sdk/tests/cassettes/test_manual/test_manual_report.yaml +98 -0
  17. lmnr/traceloop_sdk/tests/cassettes/test_manual/test_resource_attributes.yaml +98 -0
  18. lmnr/traceloop_sdk/tests/cassettes/test_privacy_no_prompts/test_simple_workflow.yaml +199 -0
  19. lmnr/traceloop_sdk/tests/cassettes/test_prompt_management/test_prompt_management.yaml +202 -0
  20. lmnr/traceloop_sdk/tests/cassettes/test_sdk_initialization/test_resource_attributes.yaml +199 -0
  21. lmnr/traceloop_sdk/tests/cassettes/test_tasks/test_task_io_serialization_with_langchain.yaml +96 -0
  22. lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_aworkflow.yaml +98 -0
  23. lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_workflow.yaml +199 -0
  24. lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_streaming_workflow.yaml +167 -0
  25. lmnr/traceloop_sdk/tests/conftest.py +111 -0
  26. lmnr/traceloop_sdk/tests/test_association_properties.py +229 -0
  27. lmnr/traceloop_sdk/tests/test_manual.py +48 -0
  28. lmnr/traceloop_sdk/tests/test_nested_tasks.py +47 -0
  29. lmnr/traceloop_sdk/tests/test_privacy_no_prompts.py +50 -0
  30. lmnr/traceloop_sdk/tests/test_sdk_initialization.py +57 -0
  31. lmnr/traceloop_sdk/tests/test_tasks.py +32 -0
  32. lmnr/traceloop_sdk/tests/test_workflows.py +261 -0
  33. lmnr/traceloop_sdk/tracing/__init__.py +2 -0
  34. lmnr/traceloop_sdk/tracing/content_allow_list.py +24 -0
  35. lmnr/traceloop_sdk/tracing/context_manager.py +13 -0
  36. lmnr/traceloop_sdk/tracing/manual.py +57 -0
  37. lmnr/traceloop_sdk/tracing/tracing.py +1078 -0
  38. lmnr/traceloop_sdk/utils/__init__.py +26 -0
  39. lmnr/traceloop_sdk/utils/in_memory_span_exporter.py +61 -0
  40. lmnr/traceloop_sdk/utils/json_encoder.py +20 -0
  41. lmnr/traceloop_sdk/utils/package_check.py +8 -0
  42. lmnr/traceloop_sdk/version.py +1 -0
  43. {lmnr-0.4.6.dist-info → lmnr-0.4.7.dist-info}/METADATA +40 -3
  44. lmnr-0.4.7.dist-info/RECORD +53 -0
  45. lmnr-0.4.6.dist-info/RECORD +0 -13
  46. {lmnr-0.4.6.dist-info → lmnr-0.4.7.dist-info}/LICENSE +0 -0
  47. {lmnr-0.4.6.dist-info → lmnr-0.4.7.dist-info}/WHEEL +0 -0
  48. {lmnr-0.4.6.dist-info → lmnr-0.4.7.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,29 @@
1
+ from enum import Enum
2
+
3
+
4
+ class Instruments(Enum):
5
+ OPENAI = "openai"
6
+ ANTHROPIC = "anthropic"
7
+ COHERE = "cohere"
8
+ PINECONE = "pinecone"
9
+ CHROMA = "chroma"
10
+ GOOGLE_GENERATIVEAI = "google_generativeai"
11
+ LANGCHAIN = "langchain"
12
+ MISTRAL = "mistral"
13
+ OLLAMA = "ollama"
14
+ LLAMA_INDEX = "llama_index"
15
+ MILVUS = "milvus"
16
+ TRANSFORMERS = "transformers"
17
+ TOGETHER = "together"
18
+ REDIS = "redis"
19
+ REQUESTS = "requests"
20
+ URLLIB3 = "urllib3"
21
+ PYMYSQL = "pymysql"
22
+ BEDROCK = "bedrock"
23
+ REPLICATE = "replicate"
24
+ VERTEXAI = "vertexai"
25
+ WATSONX = "watsonx"
26
+ WEAVIATE = "weaviate"
27
+ ALEPHALPHA = "alephalpha"
28
+ MARQO = "marqo"
29
+ LANCEDB = "lancedb"
File without changes
@@ -0,0 +1,176 @@
1
+ from collections.abc import Sequence
2
+ from typing import Dict
3
+
4
+ from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
5
+ OTLPMetricExporter as GRPCExporter,
6
+ )
7
+ from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
8
+ OTLPMetricExporter as HTTPExporter,
9
+ )
10
+ from opentelemetry.semconv_ai import Meters
11
+ from opentelemetry.sdk.metrics import MeterProvider
12
+ from opentelemetry.sdk.metrics.export import (
13
+ PeriodicExportingMetricReader,
14
+ MetricExporter,
15
+ )
16
+ from opentelemetry.sdk.metrics.view import View, ExplicitBucketHistogramAggregation
17
+ from opentelemetry.sdk.resources import Resource
18
+
19
+ from opentelemetry import metrics
20
+
21
+
22
+ class MetricsWrapper(object):
23
+ resource_attributes: dict = {}
24
+ endpoint: str = None
25
+ # if it needs headers?
26
+ headers: Dict[str, str] = {}
27
+ __metrics_exporter: MetricExporter = None
28
+ __metrics_provider: MeterProvider = None
29
+
30
+ def __new__(cls, exporter: MetricExporter = None) -> "MetricsWrapper":
31
+ if not hasattr(cls, "instance"):
32
+ obj = cls.instance = super(MetricsWrapper, cls).__new__(cls)
33
+ if not MetricsWrapper.endpoint:
34
+ return obj
35
+
36
+ obj.__metrics_exporter = (
37
+ exporter
38
+ if exporter
39
+ else init_metrics_exporter(
40
+ MetricsWrapper.endpoint, MetricsWrapper.headers
41
+ )
42
+ )
43
+
44
+ obj.__metrics_provider = init_metrics_provider(
45
+ obj.__metrics_exporter, MetricsWrapper.resource_attributes
46
+ )
47
+
48
+ return cls.instance
49
+
50
+ @staticmethod
51
+ def set_static_params(
52
+ resource_attributes: dict,
53
+ endpoint: str,
54
+ headers: Dict[str, str],
55
+ ) -> None:
56
+ MetricsWrapper.resource_attributes = resource_attributes
57
+ MetricsWrapper.endpoint = endpoint
58
+ MetricsWrapper.headers = headers
59
+
60
+
61
+ def init_metrics_exporter(endpoint: str, headers: Dict[str, str]) -> MetricExporter:
62
+ if "http" in endpoint.lower() or "https" in endpoint.lower():
63
+ return HTTPExporter(endpoint=f"{endpoint}/v1/metrics", headers=headers)
64
+ else:
65
+ return GRPCExporter(endpoint=endpoint, headers=headers)
66
+
67
+
68
+ def init_metrics_provider(
69
+ exporter: MetricExporter, resource_attributes: dict = None
70
+ ) -> MeterProvider:
71
+ resource = (
72
+ Resource.create(resource_attributes)
73
+ if resource_attributes
74
+ else Resource.create()
75
+ )
76
+ reader = PeriodicExportingMetricReader(exporter)
77
+ provider = MeterProvider(
78
+ metric_readers=[reader],
79
+ resource=resource,
80
+ views=metric_views(),
81
+ )
82
+
83
+ metrics.set_meter_provider(provider)
84
+ return provider
85
+
86
+
87
+ def metric_views() -> Sequence[View]:
88
+ return [
89
+ View(
90
+ instrument_name=Meters.LLM_TOKEN_USAGE,
91
+ aggregation=ExplicitBucketHistogramAggregation(
92
+ [
93
+ 0.01,
94
+ 0.02,
95
+ 0.04,
96
+ 0.08,
97
+ 0.16,
98
+ 0.32,
99
+ 0.64,
100
+ 1.28,
101
+ 2.56,
102
+ 5.12,
103
+ 10.24,
104
+ 20.48,
105
+ 40.96,
106
+ 81.92,
107
+ ]
108
+ ),
109
+ ),
110
+ View(
111
+ instrument_name=Meters.LLM_OPERATION_DURATION,
112
+ aggregation=ExplicitBucketHistogramAggregation(
113
+ [
114
+ 1,
115
+ 4,
116
+ 16,
117
+ 64,
118
+ 256,
119
+ 1024,
120
+ 4096,
121
+ 16384,
122
+ 65536,
123
+ 262144,
124
+ 1048576,
125
+ 4194304,
126
+ 16777216,
127
+ 67108864,
128
+ ]
129
+ ),
130
+ ),
131
+ View(
132
+ instrument_name=Meters.PINECONE_DB_QUERY_DURATION,
133
+ aggregation=ExplicitBucketHistogramAggregation(
134
+ [
135
+ 0.01,
136
+ 0.02,
137
+ 0.04,
138
+ 0.08,
139
+ 0.16,
140
+ 0.32,
141
+ 0.64,
142
+ 1.28,
143
+ 2.56,
144
+ 5.12,
145
+ 10.24,
146
+ 20.48,
147
+ 40.96,
148
+ 81.92,
149
+ ]
150
+ ),
151
+ ),
152
+ View(
153
+ instrument_name=Meters.PINECONE_DB_QUERY_SCORES,
154
+ aggregation=ExplicitBucketHistogramAggregation(
155
+ [
156
+ -1,
157
+ -0.875,
158
+ -0.75,
159
+ -0.625,
160
+ -0.5,
161
+ -0.375,
162
+ -0.25,
163
+ -0.125,
164
+ 0,
165
+ 0.125,
166
+ 0.25,
167
+ 0.375,
168
+ 0.5,
169
+ 0.625,
170
+ 0.75,
171
+ 0.875,
172
+ 1,
173
+ ]
174
+ ),
175
+ ),
176
+ ]
@@ -0,0 +1 @@
1
+ """unit tests."""
@@ -0,0 +1,101 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"content": "You are helpful assistant", "role": "system"},
4
+ {"content": "tell me a short joke", "role": "user"}], "model": "gpt-3.5-turbo",
5
+ "logprobs": false, "n": 1, "stream": false, "temperature": 0.7}'
6
+ headers:
7
+ accept:
8
+ - application/json
9
+ accept-encoding:
10
+ - gzip, deflate
11
+ connection:
12
+ - keep-alive
13
+ content-length:
14
+ - '217'
15
+ content-type:
16
+ - application/json
17
+ host:
18
+ - api.openai.com
19
+ user-agent:
20
+ - OpenAI/Python 1.35.15
21
+ x-stainless-arch:
22
+ - arm64
23
+ x-stainless-async:
24
+ - 'false'
25
+ x-stainless-lang:
26
+ - python
27
+ x-stainless-os:
28
+ - MacOS
29
+ x-stainless-package-version:
30
+ - 1.35.15
31
+ x-stainless-runtime:
32
+ - CPython
33
+ x-stainless-runtime-version:
34
+ - 3.9.5
35
+ method: POST
36
+ uri: https://api.openai.com/v1/chat/completions
37
+ response:
38
+ body:
39
+ string: !!binary |
40
+ H4sIAAAAAAAAA1SRS0/DMBCE7/kVW1+4tKgJCo9eEHABhHooLyGEKtfdJgbHa7wbaFX1v6OEtIWL
41
+ D/PtrGbW6wRA2bkagTKlFlMFN7jIbiaP44fF59Pt5Pj65epu6Zbf44un69MxLVW/cdDsHY1sXYeG
42
+ quBQLPlfbCJqwWZrepLlaTo8S7MWVDRH19iKIIOjw3wgdZzRYJhmeecsyRpkNYLXBABg3b5NRj/H
43
+ pRrBsL9VKmTWBarRbghARXKNojSzZdFeVH8PDXlB38a+ryP24LlcwZz8gQAbi14sC4PEmgW0UMXn
44
+ cIlG14wgJa6g0h8IdQD8wriS0vqi93d9xEXNuqnna+c6fbPL66gIkWbc8Z2+sN5yOY2omXyTjYWC
45
+ aukmAXhr71L/q6pCpCrIVOgDfbMw686i9j+xh2neQSHRbq8f5UmXT/GKBavpwvoCY4i2PVLbYpP8
46
+ AAAA//8DAFwYnEsjAgAA
47
+ headers:
48
+ CF-Cache-Status:
49
+ - DYNAMIC
50
+ CF-RAY:
51
+ - 8bbd5d409b62b0bd-ATL
52
+ Connection:
53
+ - keep-alive
54
+ Content-Encoding:
55
+ - gzip
56
+ Content-Type:
57
+ - application/json
58
+ Date:
59
+ - Sat, 31 Aug 2024 13:28:32 GMT
60
+ Server:
61
+ - cloudflare
62
+ Set-Cookie:
63
+ - __cf_bm=3B.f5aMPKiXVHyNDAIAPma3ZGvDnGViQrDAMvyT4n_8-1725110912-1.0.1.1-.elzfgXAenLSaVeAmcRwzq2OROEZMEvOpxSRlQ7PPZ8n6nkbc2NfZXBU1bijPQNxQ28MLNRJFyh4B4Mq4G3PPA;
64
+ path=/; expires=Sat, 31-Aug-24 13:58:32 GMT; domain=.api.openai.com; HttpOnly;
65
+ Secure; SameSite=None
66
+ - _cfuvid=LakflcrbwsF6x0qpc03TIL8jU8c3IjMCt5dua3l4dVA-1725110912530-0.0.1.1-604800000;
67
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
68
+ Transfer-Encoding:
69
+ - chunked
70
+ X-Content-Type-Options:
71
+ - nosniff
72
+ access-control-expose-headers:
73
+ - X-Request-ID
74
+ alt-svc:
75
+ - h3=":443"; ma=86400
76
+ openai-organization:
77
+ - traceloop
78
+ openai-processing-ms:
79
+ - '233'
80
+ openai-version:
81
+ - '2020-10-01'
82
+ strict-transport-security:
83
+ - max-age=15552000; includeSubDomains; preload
84
+ x-ratelimit-limit-requests:
85
+ - '5000'
86
+ x-ratelimit-limit-tokens:
87
+ - '4000000'
88
+ x-ratelimit-remaining-requests:
89
+ - '4999'
90
+ x-ratelimit-remaining-tokens:
91
+ - '3999970'
92
+ x-ratelimit-reset-requests:
93
+ - 12ms
94
+ x-ratelimit-reset-tokens:
95
+ - 0s
96
+ x-request-id:
97
+ - req_459694e3c39fd24575ad9deb5b65a831
98
+ status:
99
+ code: 200
100
+ message: OK
101
+ version: 1
@@ -0,0 +1,99 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"content": "You are helpful assistant", "role": "system"},
4
+ {"content": "tell me a short joke", "role": "user"}], "model": "gpt-3.5-turbo",
5
+ "logprobs": false, "n": 1, "stream": false, "temperature": 0.7}'
6
+ headers:
7
+ accept:
8
+ - application/json
9
+ accept-encoding:
10
+ - gzip, deflate
11
+ connection:
12
+ - keep-alive
13
+ content-length:
14
+ - '217'
15
+ content-type:
16
+ - application/json
17
+ host:
18
+ - api.openai.com
19
+ user-agent:
20
+ - OpenAI/Python 1.35.15
21
+ x-stainless-arch:
22
+ - arm64
23
+ x-stainless-async:
24
+ - 'false'
25
+ x-stainless-lang:
26
+ - python
27
+ x-stainless-os:
28
+ - MacOS
29
+ x-stainless-package-version:
30
+ - 1.35.15
31
+ x-stainless-runtime:
32
+ - CPython
33
+ x-stainless-runtime-version:
34
+ - 3.9.5
35
+ method: POST
36
+ uri: https://api.openai.com/v1/chat/completions
37
+ response:
38
+ body:
39
+ string: !!binary |
40
+ H4sIAAAAAAAAA1SRzU7DMBCE73mKxRcuLYLSFsgFgbhwBCSQ+FHlONvE4Hgt71olQn135BBauPgw
41
+ s7P6dvxVAChbqxKUabWYLrjphdwk01VXzb1teXOX6pquzp9vKSwfFzdqkhNUvaOR39SRoS44FEv+
42
+ xzYRtWDeenI2m13Mzxfz5WB0VKPLsSbI9PRoMZUUK5oen8wWY7Ila5BVCS8FAMDX8GZGX+OnKuF4
43
+ 8qt0yKwbVOVuCEBFcllRmtmyaC9qsjcNeUE/YD+kiBNoMSJYBg3cUhR4pw+ENUXoKZWv/tU/tT0Y
44
+ Sq72hwLSIlTW9MYh5N01pABVD1YY3foSrtHoxAhWYKMZZEMgNmJ9oEaE7Y7dURMiVflOn5zb6Wvr
45
+ LberiJrJZ04WCj/xbQHwNnSU/p2tQqQuyEroA31eOBsrUvtf+WMuR1NItNvr82Ux8inuWbBbra1v
46
+ MIZoh8IyZbEtvgEAAP//AwAFMxvRLwIAAA==
47
+ headers:
48
+ CF-Cache-Status:
49
+ - DYNAMIC
50
+ CF-RAY:
51
+ - 8aef251d0f8609c9-HFA
52
+ Connection:
53
+ - keep-alive
54
+ Content-Encoding:
55
+ - gzip
56
+ Content-Type:
57
+ - application/json
58
+ Date:
59
+ - Tue, 06 Aug 2024 12:49:06 GMT
60
+ Server:
61
+ - cloudflare
62
+ Set-Cookie:
63
+ - __cf_bm=SWNMVXkujzFcfmlIKZqekPtNke27ztV8lzplh5iUMes-1722948546-1.0.1.1-oRP4d5x5PiBOFu.G77C4XWWMxrsKragvFMrFNCIRwudZ7Z2NMrKgMa_A7eqXOME9rU2sqUIpS9c8T9SBEAR7Fg;
64
+ path=/; expires=Tue, 06-Aug-24 13:19:06 GMT; domain=.api.openai.com; HttpOnly;
65
+ Secure; SameSite=None
66
+ - _cfuvid=VuBkx.e0oap2GPCBPG260hGbmwEpoHckoHEm5vALKZs-1722948546860-0.0.1.1-604800000;
67
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
68
+ Transfer-Encoding:
69
+ - chunked
70
+ X-Content-Type-Options:
71
+ - nosniff
72
+ alt-svc:
73
+ - h3=":443"; ma=86400
74
+ openai-organization:
75
+ - traceloop
76
+ openai-processing-ms:
77
+ - '400'
78
+ openai-version:
79
+ - '2020-10-01'
80
+ strict-transport-security:
81
+ - max-age=15552000; includeSubDomains; preload
82
+ x-ratelimit-limit-requests:
83
+ - '5000'
84
+ x-ratelimit-limit-tokens:
85
+ - '4000000'
86
+ x-ratelimit-remaining-requests:
87
+ - '4999'
88
+ x-ratelimit-remaining-tokens:
89
+ - '3999970'
90
+ x-ratelimit-reset-requests:
91
+ - 12ms
92
+ x-ratelimit-reset-tokens:
93
+ - 0s
94
+ x-request-id:
95
+ - req_bf54266d3b5b08c26a6dc51f55dd208c
96
+ status:
97
+ code: 200
98
+ message: OK
99
+ version: 1
@@ -0,0 +1,98 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"role": "user", "content": "Tell me a joke about opentelemetry"}],
4
+ "model": "gpt-3.5-turbo"}'
5
+ headers:
6
+ accept:
7
+ - application/json
8
+ accept-encoding:
9
+ - gzip, deflate
10
+ connection:
11
+ - keep-alive
12
+ content-length:
13
+ - '107'
14
+ content-type:
15
+ - application/json
16
+ host:
17
+ - api.openai.com
18
+ user-agent:
19
+ - OpenAI/Python 1.35.13
20
+ x-stainless-arch:
21
+ - arm64
22
+ x-stainless-async:
23
+ - 'false'
24
+ x-stainless-lang:
25
+ - python
26
+ x-stainless-os:
27
+ - MacOS
28
+ x-stainless-package-version:
29
+ - 1.35.13
30
+ x-stainless-runtime:
31
+ - CPython
32
+ x-stainless-runtime-version:
33
+ - 3.9.5
34
+ method: POST
35
+ uri: https://api.openai.com/v1/chat/completions
36
+ response:
37
+ body:
38
+ string: !!binary |
39
+ H4sIAAAAAAAAA1RRy07DMBC85ysWn1vUB6XQCxISIB5HJF5CletsElPHa603hYD678hpaMXFh5md
40
+ 8ezsTwagbK4WoEylxdTBDc/dw93zi71/vbotwtNN+Xy9fnl4XH9/3xdnp2qQFLT6QCN/qmNDdXAo
41
+ lvyONoxaMLmO55PxaD6azmcdUVOOLsnKIMPp8WwoDa9oOBpPZr2yImswqgW8ZQAAP92bMvocv9QC
42
+ RoM/pMYYdYlqsR8CUEwuIUrHaKNoL2pwIA15Qd/FfqpayG0OUiFQQC/osEbhFnLcoKOADCtGvYYm
43
+ wKeVKk1ahqBZPPIFXKLRTcQEt/CJjCCWMQcqwJDvvnYtCGtjfdmLcYPcQk0bPFJ9qu1+HUdlYFql
44
+ 1X3j3B4vrLexWjLqSD5Fj0JhJ99mAO9dbc2/JlRgqoMshdbok+F4trNTh0MdyMlJTwqJdgd8ep71
45
+ +VRso2C9LKwvkQPbrsOUMttmvwAAAP//AwAebllYQgIAAA==
46
+ headers:
47
+ CF-Cache-Status:
48
+ - DYNAMIC
49
+ CF-RAY:
50
+ - 8a3c07512da0135d-ATL
51
+ Connection:
52
+ - keep-alive
53
+ Content-Encoding:
54
+ - gzip
55
+ Content-Type:
56
+ - application/json
57
+ Date:
58
+ - Mon, 15 Jul 2024 19:06:15 GMT
59
+ Server:
60
+ - cloudflare
61
+ Set-Cookie:
62
+ - __cf_bm=14DjWb_t0PhGL5mOhK8gsqaD2anNOF1J7Y8Lo_SpKpw-1721070375-1.0.1.1-HZ1yyYErVn.USbzwQt76wp1v0Fpbz2MvF04IOMJMUI7ZFXPv0Np1tZ8z2AthYPyy1oxDYakl9du4ysPr.pp_jg;
63
+ path=/; expires=Mon, 15-Jul-24 19:36:15 GMT; domain=.api.openai.com; HttpOnly;
64
+ Secure; SameSite=None
65
+ - _cfuvid=KuBmiwwXOTWsR0nU52KjyIkpVEjiHsE8MSSzFnGTEv0-1721070375445-0.0.1.1-604800000;
66
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
67
+ Transfer-Encoding:
68
+ - chunked
69
+ X-Content-Type-Options:
70
+ - nosniff
71
+ alt-svc:
72
+ - h3=":443"; ma=86400
73
+ openai-organization:
74
+ - traceloop
75
+ openai-processing-ms:
76
+ - '381'
77
+ openai-version:
78
+ - '2020-10-01'
79
+ strict-transport-security:
80
+ - max-age=15552000; includeSubDomains; preload
81
+ x-ratelimit-limit-requests:
82
+ - '5000'
83
+ x-ratelimit-limit-tokens:
84
+ - '160000'
85
+ x-ratelimit-remaining-requests:
86
+ - '4999'
87
+ x-ratelimit-remaining-tokens:
88
+ - '159974'
89
+ x-ratelimit-reset-requests:
90
+ - 12ms
91
+ x-ratelimit-reset-tokens:
92
+ - 9ms
93
+ x-request-id:
94
+ - req_7e9ec34ca2189a55d52eeb1828fcef25
95
+ status:
96
+ code: 200
97
+ message: OK
98
+ version: 1
@@ -0,0 +1,98 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"role": "user", "content": "Tell me a joke about opentelemetry"}],
4
+ "model": "gpt-3.5-turbo"}'
5
+ headers:
6
+ accept:
7
+ - application/json
8
+ accept-encoding:
9
+ - gzip, deflate
10
+ connection:
11
+ - keep-alive
12
+ content-length:
13
+ - '107'
14
+ content-type:
15
+ - application/json
16
+ host:
17
+ - api.openai.com
18
+ user-agent:
19
+ - OpenAI/Python 1.35.13
20
+ x-stainless-arch:
21
+ - arm64
22
+ x-stainless-async:
23
+ - 'false'
24
+ x-stainless-lang:
25
+ - python
26
+ x-stainless-os:
27
+ - MacOS
28
+ x-stainless-package-version:
29
+ - 1.35.13
30
+ x-stainless-runtime:
31
+ - CPython
32
+ x-stainless-runtime-version:
33
+ - 3.9.5
34
+ method: POST
35
+ uri: https://api.openai.com/v1/chat/completions
36
+ response:
37
+ body:
38
+ string: !!binary |
39
+ H4sIAAAAAAAAA1SRT2/CMAzF7/0UXi67AGphhcFlGmib2DhO2mGaUEhNG0jjKHE1EOK7Tyn/tIsP
40
+ 7+fnPDuHBEDoQkxAqEqyqp3pjs3i/W3cX3zwc77D9fbVvZSfZj6bTeeboehEB602qPji6imqnUHW
41
+ ZE9YeZSMcWo26mfpKB3kjy2oqUATbaXj7qCXd7nxK+qmWT8/OyvSCoOYwHcCAHBoa8xoC9yJCaSd
42
+ i1JjCLJEMbk2AQhPJipChqADS8uic4OKLKNtY39Veyh0AVwhKCoQPK6bgMAEJcUagZOe908wRSUj
43
+ 0xwt9p7hV1puW5FByaasGLQFaYEcWkaDNbLfA3vp7sT5/eM1uKHSeVrFJW1jzFVfa6tDtfQoA9kY
44
+ MjC5k/2YAPy0B2r+7Sycp9rxkmmLNg7M8tM4cfuSG+wPz5CJpbnpD1lyzifCPjDWy7W2JXrndXut
45
+ mDI5Jn8AAAD//wMA+iUaUiwCAAA=
46
+ headers:
47
+ CF-Cache-Status:
48
+ - DYNAMIC
49
+ CF-RAY:
50
+ - 8a3c06ec694fada4-ATL
51
+ Connection:
52
+ - keep-alive
53
+ Content-Encoding:
54
+ - gzip
55
+ Content-Type:
56
+ - application/json
57
+ Date:
58
+ - Mon, 15 Jul 2024 19:05:59 GMT
59
+ Server:
60
+ - cloudflare
61
+ Set-Cookie:
62
+ - __cf_bm=7r92jUdUEA4wJGNCNqX1y_usNja6ZbX4SM4xdbD8r3E-1721070359-1.0.1.1-se82CXovc7ndM.lFT9BKWR72qvK2lRgsPlK5YnmE9otDNYE8e9R9v3CMBKy3SHO9cHAlhedMkC0x0GHKzILUPA;
63
+ path=/; expires=Mon, 15-Jul-24 19:35:59 GMT; domain=.api.openai.com; HttpOnly;
64
+ Secure; SameSite=None
65
+ - _cfuvid=DQePSM_v9bEa3hSaZw7w90aZlxFtRtbZunAmUoOiG98-1721070359012-0.0.1.1-604800000;
66
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
67
+ Transfer-Encoding:
68
+ - chunked
69
+ X-Content-Type-Options:
70
+ - nosniff
71
+ alt-svc:
72
+ - h3=":443"; ma=86400
73
+ openai-organization:
74
+ - traceloop
75
+ openai-processing-ms:
76
+ - '443'
77
+ openai-version:
78
+ - '2020-10-01'
79
+ strict-transport-security:
80
+ - max-age=15552000; includeSubDomains; preload
81
+ x-ratelimit-limit-requests:
82
+ - '5000'
83
+ x-ratelimit-limit-tokens:
84
+ - '160000'
85
+ x-ratelimit-remaining-requests:
86
+ - '4999'
87
+ x-ratelimit-remaining-tokens:
88
+ - '159974'
89
+ x-ratelimit-reset-requests:
90
+ - 12ms
91
+ x-ratelimit-reset-tokens:
92
+ - 9ms
93
+ x-request-id:
94
+ - req_2c640127e33a8865f70ee056e6105cf3
95
+ status:
96
+ code: 200
97
+ message: OK
98
+ version: 1