lmnr 0.4.5__py3-none-any.whl → 0.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. lmnr/sdk/decorators.py +2 -7
  2. lmnr/sdk/laminar.py +79 -9
  3. lmnr/traceloop_sdk/.flake8 +12 -0
  4. lmnr/traceloop_sdk/.python-version +1 -0
  5. lmnr/traceloop_sdk/README.md +16 -0
  6. lmnr/traceloop_sdk/__init__.py +138 -0
  7. lmnr/traceloop_sdk/config/__init__.py +13 -0
  8. lmnr/traceloop_sdk/decorators/__init__.py +131 -0
  9. lmnr/traceloop_sdk/decorators/base.py +253 -0
  10. lmnr/traceloop_sdk/instruments.py +29 -0
  11. lmnr/traceloop_sdk/metrics/__init__.py +0 -0
  12. lmnr/traceloop_sdk/metrics/metrics.py +176 -0
  13. lmnr/traceloop_sdk/tests/__init__.py +1 -0
  14. lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_and_external_association_properties.yaml +101 -0
  15. lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_association_properties.yaml +99 -0
  16. lmnr/traceloop_sdk/tests/cassettes/test_manual/test_manual_report.yaml +98 -0
  17. lmnr/traceloop_sdk/tests/cassettes/test_manual/test_resource_attributes.yaml +98 -0
  18. lmnr/traceloop_sdk/tests/cassettes/test_privacy_no_prompts/test_simple_workflow.yaml +199 -0
  19. lmnr/traceloop_sdk/tests/cassettes/test_prompt_management/test_prompt_management.yaml +202 -0
  20. lmnr/traceloop_sdk/tests/cassettes/test_sdk_initialization/test_resource_attributes.yaml +199 -0
  21. lmnr/traceloop_sdk/tests/cassettes/test_tasks/test_task_io_serialization_with_langchain.yaml +96 -0
  22. lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_aworkflow.yaml +98 -0
  23. lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_workflow.yaml +199 -0
  24. lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_streaming_workflow.yaml +167 -0
  25. lmnr/traceloop_sdk/tests/conftest.py +111 -0
  26. lmnr/traceloop_sdk/tests/test_association_properties.py +229 -0
  27. lmnr/traceloop_sdk/tests/test_manual.py +48 -0
  28. lmnr/traceloop_sdk/tests/test_nested_tasks.py +47 -0
  29. lmnr/traceloop_sdk/tests/test_privacy_no_prompts.py +50 -0
  30. lmnr/traceloop_sdk/tests/test_sdk_initialization.py +57 -0
  31. lmnr/traceloop_sdk/tests/test_tasks.py +32 -0
  32. lmnr/traceloop_sdk/tests/test_workflows.py +261 -0
  33. lmnr/traceloop_sdk/tracing/__init__.py +2 -0
  34. lmnr/traceloop_sdk/tracing/content_allow_list.py +24 -0
  35. lmnr/traceloop_sdk/tracing/context_manager.py +13 -0
  36. lmnr/traceloop_sdk/tracing/manual.py +57 -0
  37. lmnr/traceloop_sdk/tracing/tracing.py +1078 -0
  38. lmnr/traceloop_sdk/utils/__init__.py +26 -0
  39. lmnr/traceloop_sdk/utils/in_memory_span_exporter.py +61 -0
  40. lmnr/traceloop_sdk/utils/json_encoder.py +20 -0
  41. lmnr/traceloop_sdk/utils/package_check.py +8 -0
  42. lmnr/traceloop_sdk/version.py +1 -0
  43. {lmnr-0.4.5.dist-info → lmnr-0.4.7.dist-info}/METADATA +40 -3
  44. lmnr-0.4.7.dist-info/RECORD +53 -0
  45. lmnr-0.4.5.dist-info/RECORD +0 -13
  46. {lmnr-0.4.5.dist-info → lmnr-0.4.7.dist-info}/LICENSE +0 -0
  47. {lmnr-0.4.5.dist-info → lmnr-0.4.7.dist-info}/WHEEL +0 -0
  48. {lmnr-0.4.5.dist-info → lmnr-0.4.7.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,253 @@
1
+ import json
2
+ from functools import wraps
3
+ import os
4
+ import types
5
+ from typing import Any, Optional
6
+ import warnings
7
+
8
+ from opentelemetry import trace
9
+ from opentelemetry import context as context_api
10
+ from opentelemetry.semconv_ai import SpanAttributes, TraceloopSpanKindValues
11
+
12
+ from lmnr.traceloop_sdk.tracing import get_tracer, set_workflow_name
13
+ from lmnr.traceloop_sdk.tracing.tracing import (
14
+ TracerWrapper,
15
+ set_entity_path,
16
+ get_chained_entity_path,
17
+ )
18
+ from lmnr.traceloop_sdk.utils import camel_to_snake
19
+ from lmnr.traceloop_sdk.utils.json_encoder import JSONEncoder
20
+
21
+
22
+ class CustomJSONEncoder(JSONEncoder):
23
+ def default(self, o: Any) -> Any:
24
+ try:
25
+ return super().default(o)
26
+ except TypeError:
27
+ return str(o) # Fallback to string representation for unsupported types
28
+
29
+
30
+ def _json_dumps(data: dict) -> str:
31
+ try:
32
+ with warnings.catch_warnings():
33
+ warnings.simplefilter("ignore", RuntimeWarning)
34
+ return json.dumps(data, cls=CustomJSONEncoder)
35
+ except Exception:
36
+ # Log the exception and return a placeholder if serialization completely fails
37
+ # Telemetry().log_exception(e)
38
+ return "{}" # Return an empty JSON object as a fallback
39
+
40
+
41
+ def entity_method(
42
+ name: Optional[str] = None,
43
+ version: Optional[int] = None,
44
+ tlp_span_kind: Optional[TraceloopSpanKindValues] = TraceloopSpanKindValues.TASK,
45
+ ):
46
+ def decorate(fn):
47
+ @wraps(fn)
48
+ def wrap(*args, **kwargs):
49
+ if not TracerWrapper.verify_initialized():
50
+ return fn(*args, **kwargs)
51
+
52
+ entity_name = name or fn.__name__
53
+ if tlp_span_kind in [
54
+ TraceloopSpanKindValues.WORKFLOW,
55
+ TraceloopSpanKindValues.AGENT,
56
+ ]:
57
+ set_workflow_name(entity_name)
58
+ span_name = f"{entity_name}.{tlp_span_kind.value}"
59
+
60
+ with get_tracer() as tracer:
61
+ span = tracer.start_span(span_name)
62
+ ctx = trace.set_span_in_context(span)
63
+ ctx_token = context_api.attach(ctx)
64
+
65
+ if tlp_span_kind in [
66
+ TraceloopSpanKindValues.TASK,
67
+ TraceloopSpanKindValues.TOOL,
68
+ ]:
69
+ entity_path = get_chained_entity_path(entity_name)
70
+ set_entity_path(entity_path)
71
+
72
+ span.set_attribute(
73
+ SpanAttributes.TRACELOOP_SPAN_KIND, tlp_span_kind.value
74
+ )
75
+ span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_NAME, entity_name)
76
+ if version:
77
+ span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_VERSION, version)
78
+
79
+ try:
80
+ if _should_send_prompts():
81
+ span.set_attribute(
82
+ SpanAttributes.TRACELOOP_ENTITY_INPUT,
83
+ _json_dumps({"args": args, "kwargs": kwargs}),
84
+ )
85
+ except TypeError:
86
+ pass
87
+
88
+ res = fn(*args, **kwargs)
89
+
90
+ # span will be ended in the generator
91
+ if isinstance(res, types.GeneratorType):
92
+ return _handle_generator(span, res)
93
+
94
+ try:
95
+ if _should_send_prompts():
96
+ span.set_attribute(
97
+ SpanAttributes.TRACELOOP_ENTITY_OUTPUT,
98
+ _json_dumps(res),
99
+ )
100
+ except TypeError:
101
+ pass
102
+
103
+ span.end()
104
+ context_api.detach(ctx_token)
105
+
106
+ return res
107
+
108
+ return wrap
109
+
110
+ return decorate
111
+
112
+
113
+ def entity_class(
114
+ name: Optional[str],
115
+ version: Optional[int],
116
+ method_name: str,
117
+ tlp_span_kind: Optional[TraceloopSpanKindValues] = TraceloopSpanKindValues.TASK,
118
+ ):
119
+ def decorator(cls):
120
+ task_name = name if name else camel_to_snake(cls.__name__)
121
+ method = getattr(cls, method_name)
122
+ setattr(
123
+ cls,
124
+ method_name,
125
+ entity_method(name=task_name, version=version, tlp_span_kind=tlp_span_kind)(
126
+ method
127
+ ),
128
+ )
129
+ return cls
130
+
131
+ return decorator
132
+
133
+
134
+ # Async Decorators
135
+
136
+
137
+ def aentity_method(
138
+ name: Optional[str] = None,
139
+ version: Optional[int] = None,
140
+ tlp_span_kind: Optional[TraceloopSpanKindValues] = TraceloopSpanKindValues.TASK,
141
+ ):
142
+ def decorate(fn):
143
+ @wraps(fn)
144
+ async def wrap(*args, **kwargs):
145
+ if not TracerWrapper.verify_initialized():
146
+ print("Tracer not initialized")
147
+ return await fn(*args, **kwargs)
148
+
149
+ entity_name = name or fn.__name__
150
+ if tlp_span_kind in [
151
+ TraceloopSpanKindValues.WORKFLOW,
152
+ TraceloopSpanKindValues.AGENT,
153
+ ]:
154
+ set_workflow_name(entity_name)
155
+ span_name = f"{entity_name}.{tlp_span_kind.value}"
156
+
157
+ with get_tracer() as tracer:
158
+ span = tracer.start_span(span_name)
159
+ ctx = trace.set_span_in_context(span)
160
+ ctx_token = context_api.attach(ctx)
161
+
162
+ if tlp_span_kind in [
163
+ TraceloopSpanKindValues.TASK,
164
+ TraceloopSpanKindValues.TOOL,
165
+ ]:
166
+ entity_path = get_chained_entity_path(entity_name)
167
+ set_entity_path(entity_path)
168
+
169
+ span.set_attribute(
170
+ SpanAttributes.TRACELOOP_SPAN_KIND, tlp_span_kind.value
171
+ )
172
+ span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_NAME, entity_name)
173
+ if version:
174
+ span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_VERSION, version)
175
+
176
+ try:
177
+ if _should_send_prompts():
178
+ span.set_attribute(
179
+ SpanAttributes.TRACELOOP_ENTITY_INPUT,
180
+ _json_dumps({"args": args, "kwargs": kwargs}),
181
+ )
182
+ except TypeError:
183
+ pass
184
+
185
+ res = await fn(*args, **kwargs)
186
+
187
+ # span will be ended in the generator
188
+ if isinstance(res, types.AsyncGeneratorType):
189
+ return await _ahandle_generator(span, ctx_token, res)
190
+
191
+ try:
192
+ if _should_send_prompts():
193
+ span.set_attribute(
194
+ SpanAttributes.TRACELOOP_ENTITY_OUTPUT, json.dumps(res)
195
+ )
196
+ except TypeError:
197
+ pass
198
+
199
+ span.end()
200
+ context_api.detach(ctx_token)
201
+
202
+ return res
203
+
204
+ return wrap
205
+
206
+ return decorate
207
+
208
+
209
+ def aentity_class(
210
+ name: Optional[str],
211
+ version: Optional[int],
212
+ method_name: str,
213
+ tlp_span_kind: Optional[TraceloopSpanKindValues] = TraceloopSpanKindValues.TASK,
214
+ ):
215
+ def decorator(cls):
216
+ task_name = name if name else camel_to_snake(cls.__name__)
217
+ method = getattr(cls, method_name)
218
+ setattr(
219
+ cls,
220
+ method_name,
221
+ aentity_method(
222
+ name=task_name, version=version, tlp_span_kind=tlp_span_kind
223
+ )(method),
224
+ )
225
+ return cls
226
+
227
+ return decorator
228
+
229
+
230
+ def _handle_generator(span, res):
231
+ # for some reason the SPAN_KEY is not being set in the context of the generator, so we re-set it
232
+ context_api.attach(trace.set_span_in_context(span))
233
+ yield from res
234
+
235
+ span.end()
236
+
237
+ # Note: we don't detach the context here as this fails in some situations
238
+ # https://github.com/open-telemetry/opentelemetry-python/issues/2606
239
+ # This is not a problem since the context will be detached automatically during garbage collection
240
+
241
+
242
+ async def _ahandle_generator(span, ctx_token, res):
243
+ async for part in res:
244
+ yield part
245
+
246
+ span.end()
247
+ context_api.detach(ctx_token)
248
+
249
+
250
+ def _should_send_prompts():
251
+ return (
252
+ os.getenv("TRACELOOP_TRACE_CONTENT") or "true"
253
+ ).lower() == "true" or context_api.get_value("override_enable_content_tracing")
@@ -0,0 +1,29 @@
1
+ from enum import Enum
2
+
3
+
4
+ class Instruments(Enum):
5
+ OPENAI = "openai"
6
+ ANTHROPIC = "anthropic"
7
+ COHERE = "cohere"
8
+ PINECONE = "pinecone"
9
+ CHROMA = "chroma"
10
+ GOOGLE_GENERATIVEAI = "google_generativeai"
11
+ LANGCHAIN = "langchain"
12
+ MISTRAL = "mistral"
13
+ OLLAMA = "ollama"
14
+ LLAMA_INDEX = "llama_index"
15
+ MILVUS = "milvus"
16
+ TRANSFORMERS = "transformers"
17
+ TOGETHER = "together"
18
+ REDIS = "redis"
19
+ REQUESTS = "requests"
20
+ URLLIB3 = "urllib3"
21
+ PYMYSQL = "pymysql"
22
+ BEDROCK = "bedrock"
23
+ REPLICATE = "replicate"
24
+ VERTEXAI = "vertexai"
25
+ WATSONX = "watsonx"
26
+ WEAVIATE = "weaviate"
27
+ ALEPHALPHA = "alephalpha"
28
+ MARQO = "marqo"
29
+ LANCEDB = "lancedb"
File without changes
@@ -0,0 +1,176 @@
1
+ from collections.abc import Sequence
2
+ from typing import Dict
3
+
4
+ from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
5
+ OTLPMetricExporter as GRPCExporter,
6
+ )
7
+ from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
8
+ OTLPMetricExporter as HTTPExporter,
9
+ )
10
+ from opentelemetry.semconv_ai import Meters
11
+ from opentelemetry.sdk.metrics import MeterProvider
12
+ from opentelemetry.sdk.metrics.export import (
13
+ PeriodicExportingMetricReader,
14
+ MetricExporter,
15
+ )
16
+ from opentelemetry.sdk.metrics.view import View, ExplicitBucketHistogramAggregation
17
+ from opentelemetry.sdk.resources import Resource
18
+
19
+ from opentelemetry import metrics
20
+
21
+
22
+ class MetricsWrapper(object):
23
+ resource_attributes: dict = {}
24
+ endpoint: str = None
25
+ # if it needs headers?
26
+ headers: Dict[str, str] = {}
27
+ __metrics_exporter: MetricExporter = None
28
+ __metrics_provider: MeterProvider = None
29
+
30
+ def __new__(cls, exporter: MetricExporter = None) -> "MetricsWrapper":
31
+ if not hasattr(cls, "instance"):
32
+ obj = cls.instance = super(MetricsWrapper, cls).__new__(cls)
33
+ if not MetricsWrapper.endpoint:
34
+ return obj
35
+
36
+ obj.__metrics_exporter = (
37
+ exporter
38
+ if exporter
39
+ else init_metrics_exporter(
40
+ MetricsWrapper.endpoint, MetricsWrapper.headers
41
+ )
42
+ )
43
+
44
+ obj.__metrics_provider = init_metrics_provider(
45
+ obj.__metrics_exporter, MetricsWrapper.resource_attributes
46
+ )
47
+
48
+ return cls.instance
49
+
50
+ @staticmethod
51
+ def set_static_params(
52
+ resource_attributes: dict,
53
+ endpoint: str,
54
+ headers: Dict[str, str],
55
+ ) -> None:
56
+ MetricsWrapper.resource_attributes = resource_attributes
57
+ MetricsWrapper.endpoint = endpoint
58
+ MetricsWrapper.headers = headers
59
+
60
+
61
+ def init_metrics_exporter(endpoint: str, headers: Dict[str, str]) -> MetricExporter:
62
+ if "http" in endpoint.lower() or "https" in endpoint.lower():
63
+ return HTTPExporter(endpoint=f"{endpoint}/v1/metrics", headers=headers)
64
+ else:
65
+ return GRPCExporter(endpoint=endpoint, headers=headers)
66
+
67
+
68
+ def init_metrics_provider(
69
+ exporter: MetricExporter, resource_attributes: dict = None
70
+ ) -> MeterProvider:
71
+ resource = (
72
+ Resource.create(resource_attributes)
73
+ if resource_attributes
74
+ else Resource.create()
75
+ )
76
+ reader = PeriodicExportingMetricReader(exporter)
77
+ provider = MeterProvider(
78
+ metric_readers=[reader],
79
+ resource=resource,
80
+ views=metric_views(),
81
+ )
82
+
83
+ metrics.set_meter_provider(provider)
84
+ return provider
85
+
86
+
87
+ def metric_views() -> Sequence[View]:
88
+ return [
89
+ View(
90
+ instrument_name=Meters.LLM_TOKEN_USAGE,
91
+ aggregation=ExplicitBucketHistogramAggregation(
92
+ [
93
+ 0.01,
94
+ 0.02,
95
+ 0.04,
96
+ 0.08,
97
+ 0.16,
98
+ 0.32,
99
+ 0.64,
100
+ 1.28,
101
+ 2.56,
102
+ 5.12,
103
+ 10.24,
104
+ 20.48,
105
+ 40.96,
106
+ 81.92,
107
+ ]
108
+ ),
109
+ ),
110
+ View(
111
+ instrument_name=Meters.LLM_OPERATION_DURATION,
112
+ aggregation=ExplicitBucketHistogramAggregation(
113
+ [
114
+ 1,
115
+ 4,
116
+ 16,
117
+ 64,
118
+ 256,
119
+ 1024,
120
+ 4096,
121
+ 16384,
122
+ 65536,
123
+ 262144,
124
+ 1048576,
125
+ 4194304,
126
+ 16777216,
127
+ 67108864,
128
+ ]
129
+ ),
130
+ ),
131
+ View(
132
+ instrument_name=Meters.PINECONE_DB_QUERY_DURATION,
133
+ aggregation=ExplicitBucketHistogramAggregation(
134
+ [
135
+ 0.01,
136
+ 0.02,
137
+ 0.04,
138
+ 0.08,
139
+ 0.16,
140
+ 0.32,
141
+ 0.64,
142
+ 1.28,
143
+ 2.56,
144
+ 5.12,
145
+ 10.24,
146
+ 20.48,
147
+ 40.96,
148
+ 81.92,
149
+ ]
150
+ ),
151
+ ),
152
+ View(
153
+ instrument_name=Meters.PINECONE_DB_QUERY_SCORES,
154
+ aggregation=ExplicitBucketHistogramAggregation(
155
+ [
156
+ -1,
157
+ -0.875,
158
+ -0.75,
159
+ -0.625,
160
+ -0.5,
161
+ -0.375,
162
+ -0.25,
163
+ -0.125,
164
+ 0,
165
+ 0.125,
166
+ 0.25,
167
+ 0.375,
168
+ 0.5,
169
+ 0.625,
170
+ 0.75,
171
+ 0.875,
172
+ 1,
173
+ ]
174
+ ),
175
+ ),
176
+ ]
@@ -0,0 +1 @@
1
+ """unit tests."""
@@ -0,0 +1,101 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"content": "You are helpful assistant", "role": "system"},
4
+ {"content": "tell me a short joke", "role": "user"}], "model": "gpt-3.5-turbo",
5
+ "logprobs": false, "n": 1, "stream": false, "temperature": 0.7}'
6
+ headers:
7
+ accept:
8
+ - application/json
9
+ accept-encoding:
10
+ - gzip, deflate
11
+ connection:
12
+ - keep-alive
13
+ content-length:
14
+ - '217'
15
+ content-type:
16
+ - application/json
17
+ host:
18
+ - api.openai.com
19
+ user-agent:
20
+ - OpenAI/Python 1.35.15
21
+ x-stainless-arch:
22
+ - arm64
23
+ x-stainless-async:
24
+ - 'false'
25
+ x-stainless-lang:
26
+ - python
27
+ x-stainless-os:
28
+ - MacOS
29
+ x-stainless-package-version:
30
+ - 1.35.15
31
+ x-stainless-runtime:
32
+ - CPython
33
+ x-stainless-runtime-version:
34
+ - 3.9.5
35
+ method: POST
36
+ uri: https://api.openai.com/v1/chat/completions
37
+ response:
38
+ body:
39
+ string: !!binary |
40
+ H4sIAAAAAAAAA1SRS0/DMBCE7/kVW1+4tKgJCo9eEHABhHooLyGEKtfdJgbHa7wbaFX1v6OEtIWL
41
+ D/PtrGbW6wRA2bkagTKlFlMFN7jIbiaP44fF59Pt5Pj65epu6Zbf44un69MxLVW/cdDsHY1sXYeG
42
+ quBQLPlfbCJqwWZrepLlaTo8S7MWVDRH19iKIIOjw3wgdZzRYJhmeecsyRpkNYLXBABg3b5NRj/H
43
+ pRrBsL9VKmTWBarRbghARXKNojSzZdFeVH8PDXlB38a+ryP24LlcwZz8gQAbi14sC4PEmgW0UMXn
44
+ cIlG14wgJa6g0h8IdQD8wriS0vqi93d9xEXNuqnna+c6fbPL66gIkWbc8Z2+sN5yOY2omXyTjYWC
45
+ aukmAXhr71L/q6pCpCrIVOgDfbMw686i9j+xh2neQSHRbq8f5UmXT/GKBavpwvoCY4i2PVLbYpP8
46
+ AAAA//8DAFwYnEsjAgAA
47
+ headers:
48
+ CF-Cache-Status:
49
+ - DYNAMIC
50
+ CF-RAY:
51
+ - 8bbd5d409b62b0bd-ATL
52
+ Connection:
53
+ - keep-alive
54
+ Content-Encoding:
55
+ - gzip
56
+ Content-Type:
57
+ - application/json
58
+ Date:
59
+ - Sat, 31 Aug 2024 13:28:32 GMT
60
+ Server:
61
+ - cloudflare
62
+ Set-Cookie:
63
+ - __cf_bm=3B.f5aMPKiXVHyNDAIAPma3ZGvDnGViQrDAMvyT4n_8-1725110912-1.0.1.1-.elzfgXAenLSaVeAmcRwzq2OROEZMEvOpxSRlQ7PPZ8n6nkbc2NfZXBU1bijPQNxQ28MLNRJFyh4B4Mq4G3PPA;
64
+ path=/; expires=Sat, 31-Aug-24 13:58:32 GMT; domain=.api.openai.com; HttpOnly;
65
+ Secure; SameSite=None
66
+ - _cfuvid=LakflcrbwsF6x0qpc03TIL8jU8c3IjMCt5dua3l4dVA-1725110912530-0.0.1.1-604800000;
67
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
68
+ Transfer-Encoding:
69
+ - chunked
70
+ X-Content-Type-Options:
71
+ - nosniff
72
+ access-control-expose-headers:
73
+ - X-Request-ID
74
+ alt-svc:
75
+ - h3=":443"; ma=86400
76
+ openai-organization:
77
+ - traceloop
78
+ openai-processing-ms:
79
+ - '233'
80
+ openai-version:
81
+ - '2020-10-01'
82
+ strict-transport-security:
83
+ - max-age=15552000; includeSubDomains; preload
84
+ x-ratelimit-limit-requests:
85
+ - '5000'
86
+ x-ratelimit-limit-tokens:
87
+ - '4000000'
88
+ x-ratelimit-remaining-requests:
89
+ - '4999'
90
+ x-ratelimit-remaining-tokens:
91
+ - '3999970'
92
+ x-ratelimit-reset-requests:
93
+ - 12ms
94
+ x-ratelimit-reset-tokens:
95
+ - 0s
96
+ x-request-id:
97
+ - req_459694e3c39fd24575ad9deb5b65a831
98
+ status:
99
+ code: 200
100
+ message: OK
101
+ version: 1
@@ -0,0 +1,99 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"content": "You are helpful assistant", "role": "system"},
4
+ {"content": "tell me a short joke", "role": "user"}], "model": "gpt-3.5-turbo",
5
+ "logprobs": false, "n": 1, "stream": false, "temperature": 0.7}'
6
+ headers:
7
+ accept:
8
+ - application/json
9
+ accept-encoding:
10
+ - gzip, deflate
11
+ connection:
12
+ - keep-alive
13
+ content-length:
14
+ - '217'
15
+ content-type:
16
+ - application/json
17
+ host:
18
+ - api.openai.com
19
+ user-agent:
20
+ - OpenAI/Python 1.35.15
21
+ x-stainless-arch:
22
+ - arm64
23
+ x-stainless-async:
24
+ - 'false'
25
+ x-stainless-lang:
26
+ - python
27
+ x-stainless-os:
28
+ - MacOS
29
+ x-stainless-package-version:
30
+ - 1.35.15
31
+ x-stainless-runtime:
32
+ - CPython
33
+ x-stainless-runtime-version:
34
+ - 3.9.5
35
+ method: POST
36
+ uri: https://api.openai.com/v1/chat/completions
37
+ response:
38
+ body:
39
+ string: !!binary |
40
+ H4sIAAAAAAAAA1SRzU7DMBCE73mKxRcuLYLSFsgFgbhwBCSQ+FHlONvE4Hgt71olQn135BBauPgw
41
+ s7P6dvxVAChbqxKUabWYLrjphdwk01VXzb1teXOX6pquzp9vKSwfFzdqkhNUvaOR39SRoS44FEv+
42
+ xzYRtWDeenI2m13Mzxfz5WB0VKPLsSbI9PRoMZUUK5oen8wWY7Ila5BVCS8FAMDX8GZGX+OnKuF4
43
+ 8qt0yKwbVOVuCEBFcllRmtmyaC9qsjcNeUE/YD+kiBNoMSJYBg3cUhR4pw+ENUXoKZWv/tU/tT0Y
44
+ Sq72hwLSIlTW9MYh5N01pABVD1YY3foSrtHoxAhWYKMZZEMgNmJ9oEaE7Y7dURMiVflOn5zb6Wvr
45
+ LberiJrJZ04WCj/xbQHwNnSU/p2tQqQuyEroA31eOBsrUvtf+WMuR1NItNvr82Ux8inuWbBbra1v
46
+ MIZoh8IyZbEtvgEAAP//AwAFMxvRLwIAAA==
47
+ headers:
48
+ CF-Cache-Status:
49
+ - DYNAMIC
50
+ CF-RAY:
51
+ - 8aef251d0f8609c9-HFA
52
+ Connection:
53
+ - keep-alive
54
+ Content-Encoding:
55
+ - gzip
56
+ Content-Type:
57
+ - application/json
58
+ Date:
59
+ - Tue, 06 Aug 2024 12:49:06 GMT
60
+ Server:
61
+ - cloudflare
62
+ Set-Cookie:
63
+ - __cf_bm=SWNMVXkujzFcfmlIKZqekPtNke27ztV8lzplh5iUMes-1722948546-1.0.1.1-oRP4d5x5PiBOFu.G77C4XWWMxrsKragvFMrFNCIRwudZ7Z2NMrKgMa_A7eqXOME9rU2sqUIpS9c8T9SBEAR7Fg;
64
+ path=/; expires=Tue, 06-Aug-24 13:19:06 GMT; domain=.api.openai.com; HttpOnly;
65
+ Secure; SameSite=None
66
+ - _cfuvid=VuBkx.e0oap2GPCBPG260hGbmwEpoHckoHEm5vALKZs-1722948546860-0.0.1.1-604800000;
67
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
68
+ Transfer-Encoding:
69
+ - chunked
70
+ X-Content-Type-Options:
71
+ - nosniff
72
+ alt-svc:
73
+ - h3=":443"; ma=86400
74
+ openai-organization:
75
+ - traceloop
76
+ openai-processing-ms:
77
+ - '400'
78
+ openai-version:
79
+ - '2020-10-01'
80
+ strict-transport-security:
81
+ - max-age=15552000; includeSubDomains; preload
82
+ x-ratelimit-limit-requests:
83
+ - '5000'
84
+ x-ratelimit-limit-tokens:
85
+ - '4000000'
86
+ x-ratelimit-remaining-requests:
87
+ - '4999'
88
+ x-ratelimit-remaining-tokens:
89
+ - '3999970'
90
+ x-ratelimit-reset-requests:
91
+ - 12ms
92
+ x-ratelimit-reset-tokens:
93
+ - 0s
94
+ x-request-id:
95
+ - req_bf54266d3b5b08c26a6dc51f55dd208c
96
+ status:
97
+ code: 200
98
+ message: OK
99
+ version: 1