promptlayer 1.0.13__tar.gz → 1.0.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 1.0.13
3
+ Version: 1.0.14
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import PromptLayer
2
2
 
3
- __version__ = "1.0.13"
3
+ __version__ = "1.0.14"
4
4
  __all__ = ["PromptLayer", "__version__"]
@@ -374,14 +374,14 @@ class PromptLayer:
374
374
  else:
375
375
  return self._run_internal(**_run_internal_kwargs)
376
376
 
377
- def traceable(self, metadata=None):
377
+ def traceable(self, attributes=None):
378
378
  def decorator(func):
379
379
  @wraps(func)
380
380
  def sync_wrapper(*args, **kwargs):
381
381
  if self.tracer:
382
382
  with self.tracer.start_as_current_span(func.__name__) as span:
383
- if metadata:
384
- for key, value in metadata.items():
383
+ if attributes:
384
+ for key, value in attributes.items():
385
385
  span.set_attribute(key, value)
386
386
 
387
387
  span.set_attribute(
@@ -398,8 +398,8 @@ class PromptLayer:
398
398
  async def async_wrapper(*args, **kwargs):
399
399
  if self.tracer:
400
400
  with self.tracer.start_as_current_span(func.__name__) as span:
401
- if metadata:
402
- for key, value in metadata.items():
401
+ if attributes:
402
+ for key, value in attributes.items():
403
403
  span.set_attribute(key, value)
404
404
 
405
405
  span.set_attribute(
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.13"
3
+ version = "1.0.14"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
File without changes
File without changes