promptlayer 1.0.63__tar.gz → 1.0.64__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

Files changed (22) hide show
  1. {promptlayer-1.0.63 → promptlayer-1.0.64}/PKG-INFO +1 -1
  2. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/__init__.py +1 -1
  3. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/promptlayer_mixins.py +3 -4
  4. {promptlayer-1.0.63 → promptlayer-1.0.64}/pyproject.toml +1 -1
  5. {promptlayer-1.0.63 → promptlayer-1.0.64}/LICENSE +0 -0
  6. {promptlayer-1.0.63 → promptlayer-1.0.64}/README.md +0 -0
  7. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/groups/__init__.py +0 -0
  8. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/groups/groups.py +0 -0
  9. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/promptlayer.py +2 -2
  10. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/promptlayer_base.py +0 -0
  11. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/span_exporter.py +0 -0
  12. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/streaming/__init__.py +0 -0
  13. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/streaming/blueprint_builder.py +0 -0
  14. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/streaming/response_handlers.py +0 -0
  15. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/streaming/stream_processor.py +0 -0
  16. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/templates.py +0 -0
  17. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/track/__init__.py +0 -0
  18. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/track/track.py +0 -0
  19. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/types/__init__.py +0 -0
  20. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/types/prompt_template.py +0 -0
  21. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/types/request_log.py +0 -0
  22. {promptlayer-1.0.63 → promptlayer-1.0.64}/promptlayer/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: promptlayer
3
- Version: 1.0.63
3
+ Version: 1.0.64
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import AsyncPromptLayer, PromptLayer
2
2
 
3
- __version__ = "1.0.63"
3
+ __version__ = "1.0.64"
4
4
  __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -215,6 +215,7 @@ class PromptLayerMixin:
215
215
  metadata: Union[Dict[str, str], None],
216
216
  provider: Union[str, None] = None,
217
217
  model: Union[str, None] = None,
218
+ model_parameter_overrides: Union[Dict[str, Any], None] = None,
218
219
  ) -> Dict[str, Any]:
219
220
  params = {}
220
221
 
@@ -230,6 +231,8 @@ class PromptLayerMixin:
230
231
  params["provider"] = provider
231
232
  if model:
232
233
  params["model"] = model
234
+ if model_parameter_overrides:
235
+ params["model_parameter_overrides"] = model_parameter_overrides
233
236
 
234
237
  return params
235
238
 
@@ -239,7 +242,6 @@ class PromptLayerMixin:
239
242
  prompt_blueprint,
240
243
  prompt_template,
241
244
  prompt_blueprint_model,
242
- model_parameter_overrides,
243
245
  stream,
244
246
  is_async=False,
245
247
  ):
@@ -257,9 +259,6 @@ class PromptLayerMixin:
257
259
  elif provider_base_url := prompt_blueprint.get("provider_base_url"):
258
260
  client_kwargs["base_url"] = provider_base_url["url"]
259
261
 
260
- if model_parameter_overrides:
261
- function_kwargs.update(model_parameter_overrides)
262
-
263
262
  if stream and provider in ["openai", "openai.azure"]:
264
263
  function_kwargs["stream_options"] = {"include_usage": True}
265
264
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.63"
3
+ version = "1.0.64"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
File without changes
File without changes
@@ -135,6 +135,7 @@ class PromptLayer(PromptLayerMixin):
135
135
  metadata=metadata,
136
136
  provider=provider,
137
137
  model=model,
138
+ model_parameter_overrides=model_parameter_overrides,
138
139
  )
139
140
  prompt_blueprint = self.templates.get(prompt_name, get_prompt_template_params)
140
141
  prompt_blueprint_model = self._validate_and_extract_model_from_prompt_blueprint(
@@ -144,7 +145,6 @@ class PromptLayer(PromptLayerMixin):
144
145
  prompt_blueprint=prompt_blueprint,
145
146
  prompt_template=prompt_blueprint["prompt_template"],
146
147
  prompt_blueprint_model=prompt_blueprint_model,
147
- model_parameter_overrides=model_parameter_overrides,
148
148
  stream=stream,
149
149
  )
150
150
 
@@ -572,6 +572,7 @@ class AsyncPromptLayer(PromptLayerMixin):
572
572
  metadata=metadata,
573
573
  provider=provider,
574
574
  model=model,
575
+ model_parameter_overrides=model_parameter_overrides,
575
576
  )
576
577
  prompt_blueprint = await self.templates.get(prompt_name, get_prompt_template_params)
577
578
  prompt_blueprint_model = self._validate_and_extract_model_from_prompt_blueprint(
@@ -581,7 +582,6 @@ class AsyncPromptLayer(PromptLayerMixin):
581
582
  prompt_blueprint=prompt_blueprint,
582
583
  prompt_template=prompt_blueprint["prompt_template"],
583
584
  prompt_blueprint_model=prompt_blueprint_model,
584
- model_parameter_overrides=model_parameter_overrides,
585
585
  stream=stream,
586
586
  is_async=True,
587
587
  )