langtrace-python-sdk 1.3.5__tar.gz → 1.3.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/PKG-INFO +5 -1
  2. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/pyproject.toml +7 -1
  3. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/openai/patch.py +60 -20
  4. langtrace_python_sdk-1.3.6/src/langtrace_python_sdk/version.py +1 -0
  5. langtrace_python_sdk-1.3.6/src/tests/openai/cassettes/test_async_chat_completion_streaming.yaml +158 -0
  6. langtrace_python_sdk-1.3.6/src/tests/openai/cassettes/test_async_image_generation.yaml +97 -0
  7. langtrace_python_sdk-1.3.6/src/tests/openai/cassettes/test_chat_completion.yaml +101 -0
  8. langtrace_python_sdk-1.3.6/src/tests/openai/cassettes/test_chat_completion_streaming.yaml +200860 -0
  9. langtrace_python_sdk-1.3.6/src/tests/openai/cassettes/test_image_generation.yaml +97 -0
  10. langtrace_python_sdk-1.3.6/src/tests/openai/conftest.py +45 -0
  11. langtrace_python_sdk-1.3.6/src/tests/openai/test_chat_completion.py +142 -0
  12. langtrace_python_sdk-1.3.6/src/tests/openai/test_embeddings.py +0 -0
  13. langtrace_python_sdk-1.3.6/src/tests/openai/test_image_generation.py +77 -0
  14. langtrace_python_sdk-1.3.5/src/langtrace_python_sdk/version.py +0 -1
  15. langtrace_python_sdk-1.3.5/src/tests/openai/test_chat_completion.py +0 -91
  16. langtrace_python_sdk-1.3.5/src/tests/openai/test_image_generation.py +0 -76
  17. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/.gitignore +0 -0
  18. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/LICENSE +0 -0
  19. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/README.md +0 -0
  20. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/__init__.py +0 -0
  21. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/__init__.py +0 -0
  22. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/anthropic_example/__init__.py +0 -0
  23. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/anthropic_example/completion.py +0 -0
  24. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/chroma_example/__init__.py +0 -0
  25. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/chroma_example/basic.py +0 -0
  26. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/cohere_example/__init__.py +0 -0
  27. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/cohere_example/chat.py +0 -0
  28. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/cohere_example/chat_stream.py +0 -0
  29. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/cohere_example/embed_create.py +0 -0
  30. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/fastapi_example/basic_route.py +0 -0
  31. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/hiveagent_example/basic.py +0 -0
  32. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/langchain_example/__init__.py +0 -0
  33. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/langchain_example/basic.py +0 -0
  34. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/langchain_example/tool.py +0 -0
  35. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/llamaindex_example/__init__.py +0 -0
  36. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/llamaindex_example/agent.py +0 -0
  37. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/llamaindex_example/basic.py +0 -0
  38. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/llamaindex_example/data/abramov.txt +0 -0
  39. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/__init__.py +0 -0
  40. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/async_tool_calling_nonstreaming.py +0 -0
  41. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/async_tool_calling_streaming.py +0 -0
  42. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/chat_completion.py +0 -0
  43. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/embeddings_create.py +0 -0
  44. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/function_calling.py +0 -0
  45. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/images_generate.py +0 -0
  46. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/tool_calling_nonstreaming.py +0 -0
  47. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/openai/tool_calling_streaming.py +0 -0
  48. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/perplexity_example/basic.py +0 -0
  49. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/pinecone_example/__init__.py +0 -0
  50. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/examples/pinecone_example/basic.py +0 -0
  51. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/__init__.py +0 -0
  52. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/__init__.py +0 -0
  53. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/exporter/langtrace_exporter.py +0 -0
  54. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/__init__.py +0 -0
  55. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/anthropic.py +0 -0
  56. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/chroma.py +0 -0
  57. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/cohere.py +0 -0
  58. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/common.py +0 -0
  59. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/openai.py +0 -0
  60. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/constants/instrumentation/pinecone.py +0 -0
  61. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/extensions/__init__.py +0 -0
  62. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/extensions/langtrace_exporter.py +0 -0
  63. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/__init__.py +0 -0
  64. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/anthropic/__init__.py +0 -0
  65. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py +0 -0
  66. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +0 -0
  67. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/chroma/__init__.py +0 -0
  68. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py +0 -0
  69. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/chroma/patch.py +0 -0
  70. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/cohere/__init__.py +0 -0
  71. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/cohere/instrumentation.py +0 -0
  72. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/cohere/patch.py +0 -0
  73. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain/__init__.py +0 -0
  74. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py +0 -0
  75. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain/patch.py +0 -0
  76. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain_community/__init__.py +0 -0
  77. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain_community/instrumentation.py +0 -0
  78. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain_community/patch.py +0 -0
  79. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain_core/__init__.py +0 -0
  80. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py +0 -0
  81. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/langchain_core/patch.py +0 -0
  82. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/llamaindex/__init__.py +0 -0
  83. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +0 -0
  84. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/llamaindex/patch.py +0 -0
  85. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/openai/__init__.py +0 -0
  86. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/openai/instrumentation.py +0 -0
  87. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/pinecone/__init__.py +0 -0
  88. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/pinecone/instrumentation.py +0 -0
  89. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/instrumentation/pinecone/patch.py +0 -0
  90. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/langtrace.py +0 -0
  91. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/utils/__init__.py +0 -0
  92. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/utils/llm.py +0 -0
  93. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/langtrace_python_sdk/utils/with_root_span.py +0 -0
  94. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/run_example.py +0 -0
  95. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/__init__.py +0 -0
  96. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/anthropic/test_anthropic.py +0 -0
  97. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/chroma/test_chroma.py +0 -0
  98. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/langchain/test_langchain.py +0 -0
  99. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/langchain/test_langchain_community.py +0 -0
  100. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/langchain/test_langchain_core.py +0 -0
  101. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/pinecone/test_pinecone.py +0 -0
  102. {langtrace_python_sdk-1.3.5 → langtrace_python_sdk-1.3.6}/src/tests/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 1.3.5
3
+ Version: 1.3.6
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -25,6 +25,10 @@ Requires-Dist: langchain-openai; extra == 'dev'
25
25
  Requires-Dist: llama-index; extra == 'dev'
26
26
  Requires-Dist: openai; extra == 'dev'
27
27
  Requires-Dist: python-dotenv; extra == 'dev'
28
+ Provides-Extra: test
29
+ Requires-Dist: pytest; extra == 'test'
30
+ Requires-Dist: pytest-asyncio; extra == 'test'
31
+ Requires-Dist: pytest-vcr; extra == 'test'
28
32
  Description-Content-Type: text/markdown
29
33
 
30
34
  # [Langtrace](https://www.langtrace.ai)
@@ -41,6 +41,12 @@ dev = [
41
41
  "cohere"
42
42
  ]
43
43
 
44
+ test = [
45
+ "pytest",
46
+ "pytest-vcr",
47
+ "pytest-asyncio"
48
+ ]
49
+
44
50
 
45
51
 
46
52
  [project.urls]
@@ -56,7 +62,7 @@ include = [
56
62
  ]
57
63
 
58
64
  [tool.hatch.build.targets.wheel]
59
- packages = ["src/langtrace_python_sdk", "src/examples"]
65
+ packages = ["src/langtrace_python_sdk", "src/examples", "src/tests"]
60
66
 
61
67
  # [tool.semantic_release]
62
68
  # version_variable = "pyproject.toml:version" # version location
@@ -121,7 +121,8 @@ def async_images_generate(original_method, version, tracer):
121
121
  with tracer.start_as_current_span(
122
122
  APIS["IMAGES_GENERATION"]["METHOD"], kind=SpanKind.CLIENT
123
123
  ) as span:
124
- async for field, value in attributes.model_dump(by_alias=True).items():
124
+ items = attributes.model_dump(by_alias=True).items()
125
+ for field, value in items:
125
126
  if value is not None:
126
127
  span.set_attribute(field, value)
127
128
  try:
@@ -325,7 +326,9 @@ def chat_completions_create(original_method, version, tracer):
325
326
  span.end()
326
327
  raise
327
328
 
328
- def handle_streaming_response(result, span, prompt_tokens, function_call=False, tool_calls=False):
329
+ def handle_streaming_response(
330
+ result, span, prompt_tokens, function_call=False, tool_calls=False
331
+ ):
329
332
  """Process and yield streaming response chunks."""
330
333
  result_content = []
331
334
  span.add_event(Event.STREAM_START.value)
@@ -343,12 +346,16 @@ def chat_completions_create(original_method, version, tracer):
343
346
  content = [choice.delta.content]
344
347
  elif function_call:
345
348
  for choice in chunk.choices:
346
- if choice.delta and choice.delta.function_call and choice.delta.function_call.arguments is not None:
347
- token_counts = estimate_tokens(choice.delta.function_call.arguments)
348
- completion_tokens += token_counts
349
- content = [
349
+ if (
350
+ choice.delta
351
+ and choice.delta.function_call
352
+ and choice.delta.function_call.arguments is not None
353
+ ):
354
+ token_counts = estimate_tokens(
350
355
  choice.delta.function_call.arguments
351
- ]
356
+ )
357
+ completion_tokens += token_counts
358
+ content = [choice.delta.function_call.arguments]
352
359
  elif tool_calls:
353
360
  # TODO(Karthik): Tool calls streaming is tricky. The chunks after the
354
361
  # first one are missing the function name and id though the arguments
@@ -357,7 +364,14 @@ def chat_completions_create(original_method, version, tracer):
357
364
  else:
358
365
  content = []
359
366
  span.add_event(
360
- Event.STREAM_OUTPUT.value, {"response": "".join(content) if len(content) > 0 and content[0] is not None else ""}
367
+ Event.STREAM_OUTPUT.value,
368
+ {
369
+ "response": (
370
+ "".join(content)
371
+ if len(content) > 0 and content[0] is not None
372
+ else ""
373
+ )
374
+ },
361
375
  )
362
376
  result_content.append(content[0] if len(content) > 0 else "")
363
377
  yield chunk
@@ -559,7 +573,9 @@ def async_chat_completions_create(original_method, version, tracer):
559
573
  span.end()
560
574
  raise
561
575
 
562
- async def ahandle_streaming_response(result, span, prompt_tokens, function_call=False, tool_calls=False):
576
+ async def ahandle_streaming_response(
577
+ result, span, prompt_tokens, function_call=False, tool_calls=False
578
+ ):
563
579
  """Process and yield streaming response chunks."""
564
580
  result_content = []
565
581
  span.add_event(Event.STREAM_START.value)
@@ -577,12 +593,16 @@ def async_chat_completions_create(original_method, version, tracer):
577
593
  content = [choice.delta.content]
578
594
  elif function_call:
579
595
  for choice in chunk.choices:
580
- if choice.delta and choice.delta.function_call and choice.delta.function_call.arguments is not None:
581
- token_counts = estimate_tokens(choice.delta.function_call.arguments)
582
- completion_tokens += token_counts
583
- content = [
596
+ if (
597
+ choice.delta
598
+ and choice.delta.function_call
599
+ and choice.delta.function_call.arguments is not None
600
+ ):
601
+ token_counts = estimate_tokens(
584
602
  choice.delta.function_call.arguments
585
- ]
603
+ )
604
+ completion_tokens += token_counts
605
+ content = [choice.delta.function_call.arguments]
586
606
  elif tool_calls:
587
607
  # TODO(Karthik): Tool calls streaming is tricky. The chunks after the
588
608
  # first one are missing the function name and id though the arguments
@@ -591,7 +611,14 @@ def async_chat_completions_create(original_method, version, tracer):
591
611
  else:
592
612
  content = []
593
613
  span.add_event(
594
- Event.STREAM_OUTPUT.value, {"response": "".join(content) if len(content) > 0 and content[0] is not None else ""}
614
+ Event.STREAM_OUTPUT.value,
615
+ {
616
+ "response": (
617
+ "".join(content)
618
+ if len(content) > 0 and content[0] is not None
619
+ else ""
620
+ )
621
+ },
595
622
  )
596
623
  result_content.append(content[0] if len(content) > 0 else "")
597
624
  yield chunk
@@ -756,11 +783,19 @@ def async_embeddings_create(original_method, version, tracer):
756
783
 
757
784
  def extract_content(choice):
758
785
  # Check if choice.message exists and has a content attribute
759
- if hasattr(choice, 'message') and hasattr(choice.message, 'content') and choice.message.content is not None:
786
+ if (
787
+ hasattr(choice, "message")
788
+ and hasattr(choice.message, "content")
789
+ and choice.message.content is not None
790
+ ):
760
791
  return choice.message.content
761
792
 
762
793
  # Check if choice.message has tool_calls and extract information accordingly
763
- elif hasattr(choice, 'message') and hasattr(choice.message, 'tool_calls') and choice.message.tool_calls is not None:
794
+ elif (
795
+ hasattr(choice, "message")
796
+ and hasattr(choice.message, "tool_calls")
797
+ and choice.message.tool_calls is not None
798
+ ):
764
799
  result = [
765
800
  {
766
801
  "id": tool_call.id,
@@ -768,13 +803,18 @@ def extract_content(choice):
768
803
  "function": {
769
804
  "name": tool_call.function.name,
770
805
  "arguments": tool_call.function.arguments,
771
- }
772
- } for tool_call in choice.message.tool_calls
806
+ },
807
+ }
808
+ for tool_call in choice.message.tool_calls
773
809
  ]
774
810
  return result
775
811
 
776
812
  # Check if choice.message has a function_call and extract information accordingly
777
- elif hasattr(choice, 'message') and hasattr(choice.message, 'function_call') and choice.message.function_call is not None:
813
+ elif (
814
+ hasattr(choice, "message")
815
+ and hasattr(choice.message, "function_call")
816
+ and choice.message.function_call is not None
817
+ ):
778
818
  return {
779
819
  "name": choice.message.function_call.name,
780
820
  "arguments": choice.message.function_call.arguments,
@@ -0,0 +1 @@
1
+ __version__ = "1.3.6"
@@ -0,0 +1,158 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"role": "user", "content": "Say this is a test three times"}],
4
+ "model": "gpt-4", "stream": true}'
5
+ headers:
6
+ accept:
7
+ - application/json
8
+ accept-encoding:
9
+ - gzip, deflate
10
+ connection:
11
+ - keep-alive
12
+ content-length:
13
+ - '111'
14
+ content-type:
15
+ - application/json
16
+ host:
17
+ - api.openai.com
18
+ user-agent:
19
+ - AsyncOpenAI/Python 1.23.2
20
+ x-stainless-arch:
21
+ - arm64
22
+ x-stainless-async:
23
+ - async:asyncio
24
+ x-stainless-lang:
25
+ - python
26
+ x-stainless-os:
27
+ - MacOS
28
+ x-stainless-package-version:
29
+ - 1.23.2
30
+ x-stainless-runtime:
31
+ - CPython
32
+ x-stainless-runtime-version:
33
+ - 3.11.5
34
+ method: POST
35
+ uri: https://api.openai.com/v1/chat/completions
36
+ response:
37
+ body:
38
+ string: 'data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
39
+
40
+
41
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"This"},"logprobs":null,"finish_reason":null}]}
42
+
43
+
44
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
45
+ is"},"logprobs":null,"finish_reason":null}]}
46
+
47
+
48
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
49
+ a"},"logprobs":null,"finish_reason":null}]}
50
+
51
+
52
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
53
+ test"},"logprobs":null,"finish_reason":null}]}
54
+
55
+
56
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
57
+
58
+
59
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
60
+ This"},"logprobs":null,"finish_reason":null}]}
61
+
62
+
63
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
64
+ is"},"logprobs":null,"finish_reason":null}]}
65
+
66
+
67
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
68
+ a"},"logprobs":null,"finish_reason":null}]}
69
+
70
+
71
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
72
+ test"},"logprobs":null,"finish_reason":null}]}
73
+
74
+
75
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
76
+
77
+
78
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
79
+ This"},"logprobs":null,"finish_reason":null}]}
80
+
81
+
82
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
83
+ is"},"logprobs":null,"finish_reason":null}]}
84
+
85
+
86
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
87
+ a"},"logprobs":null,"finish_reason":null}]}
88
+
89
+
90
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
91
+ test"},"logprobs":null,"finish_reason":null}]}
92
+
93
+
94
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
95
+
96
+
97
+ data: {"id":"chatcmpl-9G7iUffCm37bRsnrZcMjcD55dwZcN","object":"chat.completion.chunk","created":1713629938,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
98
+
99
+
100
+ data: [DONE]
101
+
102
+
103
+ '
104
+ headers:
105
+ CF-Cache-Status:
106
+ - DYNAMIC
107
+ CF-RAY:
108
+ - 8776740aaa3e73a3-MRS
109
+ Cache-Control:
110
+ - no-cache, must-revalidate
111
+ Connection:
112
+ - keep-alive
113
+ Content-Type:
114
+ - text/event-stream
115
+ Date:
116
+ - Sat, 20 Apr 2024 16:18:59 GMT
117
+ Server:
118
+ - cloudflare
119
+ Set-Cookie:
120
+ - __cf_bm=VDKSK7GoYoxisSawjMl9W0b7YZarMfekW_Y69gq5ons-1713629939-1.0.1.1-fH6Sc.9fQ0Kb4MvzvnBRlAmk_cXfYNeNbDd_K6pZeMNxnmMy3qiDlS.olHx3Y7rfDhYg7a3FffrCHr.Xu8j_Uw;
121
+ path=/; expires=Sat, 20-Apr-24 16:48:59 GMT; domain=.api.openai.com; HttpOnly;
122
+ Secure; SameSite=None
123
+ - _cfuvid=GmZ5rJWGxt2nlOYm6_pDkhIo_8V.YkD6O9_B1qloO7g-1713629939085-0.0.1.1-604800000;
124
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
125
+ Transfer-Encoding:
126
+ - chunked
127
+ access-control-allow-origin:
128
+ - '*'
129
+ alt-svc:
130
+ - h3=":443"; ma=86400
131
+ openai-model:
132
+ - gpt-4-0613
133
+ openai-organization:
134
+ - scale3-1
135
+ openai-processing-ms:
136
+ - '247'
137
+ openai-version:
138
+ - '2020-10-01'
139
+ strict-transport-security:
140
+ - max-age=15724800; includeSubDomains
141
+ x-ratelimit-limit-requests:
142
+ - '10000'
143
+ x-ratelimit-limit-tokens:
144
+ - '300000'
145
+ x-ratelimit-remaining-requests:
146
+ - '9999'
147
+ x-ratelimit-remaining-tokens:
148
+ - '299975'
149
+ x-ratelimit-reset-requests:
150
+ - 6ms
151
+ x-ratelimit-reset-tokens:
152
+ - 5ms
153
+ x-request-id:
154
+ - req_4c6f987df5c44fb9c842d54126d2608d
155
+ status:
156
+ code: 200
157
+ message: OK
158
+ version: 1
@@ -0,0 +1,97 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"prompt": "A charming and adorable baby sea otter. This small, fluffy
4
+ creature is floating gracefully on its back, with its tiny webbed paws folded
5
+ cutely over its fuzzy belly. It has big, round, innocent eyes that are brimming
6
+ with youthful curiosity. As it blissfully floats on the calm, sparkling ocean
7
+ surface under the glow of the golden sunset, it playfully tosses a shiny seashell
8
+ from one paw to another, showcasing its playful and distinctively otter-like
9
+ behavior.", "model": "dall-e-3"}'
10
+ headers:
11
+ accept:
12
+ - application/json
13
+ accept-encoding:
14
+ - gzip, deflate
15
+ connection:
16
+ - keep-alive
17
+ content-length:
18
+ - '498'
19
+ content-type:
20
+ - application/json
21
+ host:
22
+ - api.openai.com
23
+ user-agent:
24
+ - AsyncOpenAI/Python 1.23.2
25
+ x-stainless-arch:
26
+ - arm64
27
+ x-stainless-async:
28
+ - async:asyncio
29
+ x-stainless-lang:
30
+ - python
31
+ x-stainless-os:
32
+ - MacOS
33
+ x-stainless-package-version:
34
+ - 1.23.2
35
+ x-stainless-runtime:
36
+ - CPython
37
+ x-stainless-runtime-version:
38
+ - 3.11.5
39
+ method: POST
40
+ uri: https://api.openai.com/v1/images/generations
41
+ response:
42
+ body:
43
+ string: !!binary |
44
+ H4sIAAAAAAAAA1yRYWvbSBCGv/tXDIb4kyRLliLLAXMkF0KT9O5S6qZJj+OY1Y6krda78s7KrlL6
45
+ 3w8ptJD7sgy87868z8z3GcC8dISe5PwCknWS5mmaxmkwChI9zi/g7xkAwPfpBZg7Oiom+W/n7L7z
46
+ 8wuYXzs8AULZoNsrUwMaCSitQ6EJBIoBmBCs9+Qi2DUEvEetA6h0X1UDTPN7R6AYKm3Rjz1qhyVV
47
+ vdYDWAPKMwgs22CqvDIDnEgIktDhiaGyWpKEsvc0+o/kJl/Vv7wMIEjrIYJbD9xZNzZSdQDO9kYG
48
+ oIyxJRkPNBBDpbQmCSflGxhs75uq11D2TllWfojgk5HkwDcEtbYnsBUg1ONsA9wbJh9M4oQ84YLQ
49
+ ivkVY0LjkQahRL0PgDt0rR5pbUk49nAVljRl7TQOr/+8ZSYGBG5GbibkhrSGytk9WEPjCsBbQGN9
50
+ Qy6ARtWNVnUz7XHcg1TslSm9OlLws/FrvlCrlkBQg0dlXTQPfh65d3q8bON9xxfLpUUlUWvCTnXO
51
+ Si57joS2Iiqto+ikjLQnjgz5ZefUET0travDq6/509O3bzd1e0fPN6bhlXnSsXHLnsmF7w77zrn2
52
+ /v74+Phyu7u/2mXNEycfl2pfh9zau/cPxbP4ozk8mMPjuvr6/O7D5jrqTP0b++0qXmVhnIWreJfk
53
+ Z+llcn6WXsbplwXTG614o3Vbt+DjaEjCuAjjfMFuKxaOS7lVRitDY+23ao81LTtTL7i1Sm5zRJQk
54
+ KcwqkYZZvilCLKo8zItMrtdFLuJ8veDWK7nFrChLPM9DyiWGWZFRiEWShZuyyDb5+UqUIh2tv1Im
55
+ m91qdZZeppsxZf5lwe0bhP+LvBULbt9CqHp7WMnbQ//591vR9B293BWbPz8J3WZ19dcy+fxM7c1V
56
+ zh+uH46b4iy9nk93/jED+Gf2Y/YfAAAA//8DAOKMzHwGBAAA
57
+ headers:
58
+ CF-Cache-Status:
59
+ - DYNAMIC
60
+ CF-RAY:
61
+ - 8776c5e52c3141e5-MRS
62
+ Connection:
63
+ - keep-alive
64
+ Content-Encoding:
65
+ - gzip
66
+ Content-Type:
67
+ - application/json
68
+ Date:
69
+ - Sat, 20 Apr 2024 17:15:03 GMT
70
+ Server:
71
+ - cloudflare
72
+ Set-Cookie:
73
+ - __cf_bm=PPmQaZgp0kP_w8e9X7YcZGZtYzSLNfQUY.TG.SD7Mm0-1713633303-1.0.1.1-RkZB0XF8ApSlz3LiOh15SGhomE_XSMJJ2hL6wVlaShtkXHWvH.VZhzRGqOG.rxD9v..GY0sNMFx7xtMSpCePWg;
74
+ path=/; expires=Sat, 20-Apr-24 17:45:03 GMT; domain=.api.openai.com; HttpOnly;
75
+ Secure; SameSite=None
76
+ - _cfuvid=2YN.H.aWijGSLvi2MBRgIoIhFI2FYDWLqNZOdzmNK80-1713633303345-0.0.1.1-604800000;
77
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
78
+ Transfer-Encoding:
79
+ - chunked
80
+ access-control-allow-origin:
81
+ - '*'
82
+ alt-svc:
83
+ - h3=":443"; ma=86400
84
+ openai-organization:
85
+ - scale3-1
86
+ openai-processing-ms:
87
+ - '12124'
88
+ openai-version:
89
+ - '2020-10-01'
90
+ strict-transport-security:
91
+ - max-age=15724800; includeSubDomains
92
+ x-request-id:
93
+ - req_e6ff12ba31f025e69137c82fbff524fb
94
+ status:
95
+ code: 200
96
+ message: OK
97
+ version: 1
@@ -0,0 +1,101 @@
1
+ interactions:
2
+ - request:
3
+ body: '{"messages": [{"role": "user", "content": "Say this is a test three times"}],
4
+ "model": "gpt-4", "stream": false}'
5
+ headers:
6
+ accept:
7
+ - application/json
8
+ accept-encoding:
9
+ - gzip, deflate
10
+ connection:
11
+ - keep-alive
12
+ content-length:
13
+ - '112'
14
+ content-type:
15
+ - application/json
16
+ host:
17
+ - api.openai.com
18
+ user-agent:
19
+ - OpenAI/Python 1.23.2
20
+ x-stainless-arch:
21
+ - arm64
22
+ x-stainless-async:
23
+ - 'false'
24
+ x-stainless-lang:
25
+ - python
26
+ x-stainless-os:
27
+ - MacOS
28
+ x-stainless-package-version:
29
+ - 1.23.2
30
+ x-stainless-runtime:
31
+ - CPython
32
+ x-stainless-runtime-version:
33
+ - 3.11.5
34
+ method: POST
35
+ uri: https://api.openai.com/v1/chat/completions
36
+ response:
37
+ body:
38
+ string: !!binary |
39
+ H4sIAAAAAAAAA4SQzWrDMBCE736KReckxE5sE99KCg0UeiiBBkoJirKx1cpaVdpA25B3L3Kcn556
40
+ EWhGM/utDgmA0FtRgVCNZNU6M5w9FKq521NerGafWfm8XMwXPy+r+WOZPd2LQUzQ5h0Vn1MjRa0z
41
+ yJrsyVYeJWNsTct0UmR5Ock7o6UtmhirHQ+nw3GRTvpEQ1phEBW8JgAAh+6MbHaLX6KC8eCstBiC
42
+ rFFUl0cAwpOJipAh6MDSshhcTUWW0Xa4y0YH0AEkMAYewT930ZccL9MN1c7TJpLavTEXfaetDs3a
43
+ owxk46TA5E7xYwLw1m25/wMunKfW8ZrpA20sTKenOnH9zxsz700mluaqZ7Ok5xPhOzC26522NXrn
44
+ dbdypEyOyS8AAAD//wMA4oDeWukBAAA=
45
+ headers:
46
+ CF-Cache-Status:
47
+ - DYNAMIC
48
+ CF-RAY:
49
+ - 87760d6f7ac6077a-MRS
50
+ Cache-Control:
51
+ - no-cache, must-revalidate
52
+ Connection:
53
+ - keep-alive
54
+ Content-Encoding:
55
+ - gzip
56
+ Content-Type:
57
+ - application/json
58
+ Date:
59
+ - Sat, 20 Apr 2024 15:08:56 GMT
60
+ Server:
61
+ - cloudflare
62
+ Set-Cookie:
63
+ - __cf_bm=.AWrS_oG3OU4o0c3bVjHojpv.kEUTemvQkhWuz9iz5U-1713625736-1.0.1.1-4NWMdmDl_wiWkhSU1E_K0o93evj.kwjYpG.N0O35W8ILiLMnk.fiJyCvlFOzyLJxK1VRH2JnM0znP_As2May1A;
64
+ path=/; expires=Sat, 20-Apr-24 15:38:56 GMT; domain=.api.openai.com; HttpOnly;
65
+ Secure; SameSite=None
66
+ - _cfuvid=LwFiBqHo.B57JZUvIXF2NgWHbiSKHD34H9ak.jh4FYw-1713625736732-0.0.1.1-604800000;
67
+ path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
68
+ Transfer-Encoding:
69
+ - chunked
70
+ access-control-allow-origin:
71
+ - '*'
72
+ alt-svc:
73
+ - h3=":443"; ma=86400
74
+ openai-model:
75
+ - gpt-4-0613
76
+ openai-organization:
77
+ - scale3-1
78
+ openai-processing-ms:
79
+ - '829'
80
+ openai-version:
81
+ - '2020-10-01'
82
+ strict-transport-security:
83
+ - max-age=15724800; includeSubDomains
84
+ x-ratelimit-limit-requests:
85
+ - '10000'
86
+ x-ratelimit-limit-tokens:
87
+ - '300000'
88
+ x-ratelimit-remaining-requests:
89
+ - '9999'
90
+ x-ratelimit-remaining-tokens:
91
+ - '299975'
92
+ x-ratelimit-reset-requests:
93
+ - 6ms
94
+ x-ratelimit-reset-tokens:
95
+ - 5ms
96
+ x-request-id:
97
+ - req_de693fb8a8b5e9790ff16c8fb0350074
98
+ status:
99
+ code: 200
100
+ message: OK
101
+ version: 1