promptlayer 1.0.19__tar.gz → 1.0.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 1.0.19
3
+ Version: 1.0.20
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import PromptLayer
2
2
 
3
- __version__ = "1.0.19"
3
+ __version__ = "1.0.20"
4
4
  __all__ = ["PromptLayer", "__version__"]
@@ -15,6 +15,7 @@ from promptlayer.promptlayer_base import PromptLayerBase
15
15
  from promptlayer.span_exporter import PromptLayerSpanExporter
16
16
  from promptlayer.templates import TemplateManager
17
17
  from promptlayer.track import TrackManager
18
+ from promptlayer.types.prompt_template import PromptTemplate
18
19
  from promptlayer.utils import (
19
20
  anthropic_request,
20
21
  anthropic_stream_completion,
@@ -24,6 +25,7 @@ from promptlayer.utils import (
24
25
  openai_stream_completion,
25
26
  stream_response,
26
27
  track_request,
28
+ util_log_request,
27
29
  )
28
30
 
29
31
  MAP_PROVIDER_TO_FUNCTION_NAME = {
@@ -399,3 +401,45 @@ class PromptLayer:
399
401
  return async_wrapper if asyncio.iscoroutinefunction(func) else sync_wrapper
400
402
 
401
403
  return decorator
404
+
405
+ def log_request(
406
+ self,
407
+ *,
408
+ provider: str,
409
+ model: str,
410
+ input: PromptTemplate,
411
+ output: PromptTemplate,
412
+ request_start_time: float,
413
+ request_end_time: float,
414
+ parameters: Dict[str, Any] = {},
415
+ tags: List[str] = [],
416
+ metadata: Dict[str, str] = {},
417
+ prompt_name: Union[str, None] = None,
418
+ prompt_version_number: Union[int, None] = None,
419
+ prompt_input_variables: Dict[str, Any] = {},
420
+ input_tokens: int = 0,
421
+ output_tokens: int = 0,
422
+ price: float = 0.0,
423
+ function_name: str = "",
424
+ score: int = 0,
425
+ ):
426
+ return util_log_request(
427
+ self.api_key,
428
+ provider=provider,
429
+ model=model,
430
+ input=input,
431
+ output=output,
432
+ request_start_time=request_start_time,
433
+ request_end_time=request_end_time,
434
+ parameters=parameters,
435
+ tags=tags,
436
+ metadata=metadata,
437
+ prompt_name=prompt_name,
438
+ prompt_version_number=prompt_version_number,
439
+ prompt_input_variables=prompt_input_variables,
440
+ input_tokens=input_tokens,
441
+ output_tokens=output_tokens,
442
+ price=price,
443
+ function_name=function_name,
444
+ score=score,
445
+ )
@@ -0,0 +1,4 @@
1
+ from . import prompt_template
2
+ from .request_log import RequestLog
3
+
4
+ __all__ = ["prompt_template", "RequestLog"]
@@ -157,13 +157,13 @@ class BasePromptTemplate(TypedDict, total=False):
157
157
  tags: List[str]
158
158
 
159
159
 
160
- class PromptVersion(TypedDict, total=False):
160
+ class PromptBlueprint(TypedDict, total=False):
161
161
  prompt_template: PromptTemplate
162
162
  commit_message: str
163
163
  metadata: Metadata
164
164
 
165
165
 
166
- class PublishPromptTemplate(BasePromptTemplate, PromptVersion, total=False):
166
+ class PublishPromptTemplate(BasePromptTemplate, PromptBlueprint, total=False):
167
167
  release_labels: Optional[List[str]] = None
168
168
 
169
169
 
@@ -0,0 +1,8 @@
1
+ from typing import TypedDict, Union
2
+
3
+ from .prompt_template import PromptBlueprint
4
+
5
+
6
+ class RequestLog(TypedDict):
7
+ id: int
8
+ prompt_version: Union[PromptBlueprint, None]
@@ -13,6 +13,7 @@ from typing import Callable, Generator, List, Union
13
13
  import requests
14
14
  from opentelemetry import context, trace
15
15
 
16
+ from promptlayer.types import RequestLog
16
17
  from promptlayer.types.prompt_template import (
17
18
  GetPromptTemplate,
18
19
  GetPromptTemplateResponse,
@@ -907,3 +908,25 @@ def get_api_key():
907
908
  "Please set your PROMPTLAYER_API_KEY environment variable or set API KEY in code using 'promptlayer.api_key = <your_api_key>' "
908
909
  )
909
910
  return api_key
911
+
912
+
913
+ def util_log_request(api_key: str, **kwargs) -> Union[RequestLog, None]:
914
+ try:
915
+ response = requests.post(
916
+ f"{URL_API_PROMPTLAYER}/log-request",
917
+ headers={"X-API-KEY": api_key},
918
+ json=kwargs,
919
+ )
920
+ if response.status_code != 201:
921
+ warn_on_bad_response(
922
+ response,
923
+ "WARNING: While logging your request PromptLayer had the following error",
924
+ )
925
+ return None
926
+ return response.json()
927
+ except Exception as e:
928
+ print(
929
+ f"WARNING: While tracking your prompt PromptLayer had the following error: {e}",
930
+ file=sys.stderr,
931
+ )
932
+ return None
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.19"
3
+ version = "1.0.20"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
@@ -1,3 +0,0 @@
1
- from . import prompt_template
2
-
3
- __all__ = ["prompt_template"]
File without changes
File without changes