promptlayer 1.0.4__tar.gz → 1.0.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 1.0.4
3
+ Version: 1.0.6
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -103,6 +103,8 @@ class PromptLayer:
103
103
  template_get_params["label"] = prompt_release_label
104
104
  if input_variables:
105
105
  template_get_params["input_variables"] = input_variables
106
+ if metadata:
107
+ template_get_params["metadata_filters"] = metadata
106
108
  prompt_blueprint = self.templates.get(prompt_name, template_get_params)
107
109
  prompt_template = prompt_blueprint["prompt_template"]
108
110
  if not prompt_blueprint["llm_kwargs"]:
@@ -8,6 +8,7 @@ class GetPromptTemplate(TypedDict, total=False):
8
8
  label: str
9
9
  provider: str
10
10
  input_variables: Dict[str, str]
11
+ metadata_filters: Dict[str, str]
11
12
 
12
13
 
13
14
  TemplateFormat = Literal["f-string", "jinja2"]
@@ -41,7 +41,12 @@ def promptlayer_api_handler(
41
41
  isinstance(response, types.GeneratorType)
42
42
  or isinstance(response, types.AsyncGeneratorType)
43
43
  or type(response).__name__
44
- in ["Stream", "AsyncStream", "AsyncMessageStreamManager"]
44
+ in [
45
+ "Stream",
46
+ "AsyncStream",
47
+ "AsyncMessageStreamManager",
48
+ "MessageStreamManager",
49
+ ]
45
50
  ):
46
51
  return GeneratorProxy(
47
52
  response,
@@ -356,6 +361,19 @@ class GeneratorProxy:
356
361
  self.api_key,
357
362
  )
358
363
 
364
+ def __enter__(self):
365
+ api_request_arguments = self.api_request_arugments
366
+ if hasattr(self.generator, "_MessageStreamManager__api_request"):
367
+ stream = self.generator.__enter__()
368
+ return GeneratorProxy(
369
+ stream,
370
+ api_request_arguments,
371
+ self.api_key,
372
+ )
373
+
374
+ def __exit__(self, exc_type, exc_val, exc_tb):
375
+ pass
376
+
359
377
  async def __aexit__(self, exc_type, exc_val, exc_tb):
360
378
  pass
361
379
 
@@ -458,7 +476,7 @@ class GeneratorProxy:
458
476
  hasattr(result.choices[0].delta, "content")
459
477
  and result.choices[0].delta.content is not None
460
478
  ):
461
- response[
479
+ response["content"] = response[
462
480
  "content"
463
481
  ] = f"{response['content']}{result.choices[0].delta.content}"
464
482
  final_result = deepcopy(self.results[-1])
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.4"
3
+ version = "1.0.6"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
File without changes
File without changes