promptlayer 1.0.24__py3-none-any.whl → 1.0.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

promptlayer/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import PromptLayer
2
2
 
3
- __version__ = "1.0.24"
3
+ __version__ = "1.0.25"
4
4
  __all__ = ["PromptLayer", "__version__"]
@@ -24,7 +24,7 @@ from promptlayer.utils import (
24
24
  openai_request,
25
25
  openai_stream_chat,
26
26
  openai_stream_completion,
27
- run_workflow_request,
27
+ run_workflow_async,
28
28
  stream_response,
29
29
  track_request,
30
30
  util_log_request,
@@ -380,15 +380,17 @@ class PromptLayer:
380
380
  workflow_version: Optional[
381
381
  int
382
382
  ] = None, # This is the version number, not the version ID
383
+ return_all_outputs: Optional[bool] = False,
383
384
  ) -> Dict[str, Any]:
384
385
  try:
385
- result = run_workflow_request(
386
+ result = run_workflow_async(
386
387
  workflow_name=workflow_name,
387
388
  input_variables=input_variables or {},
388
389
  metadata=metadata,
389
390
  workflow_label_name=workflow_label_name,
390
391
  workflow_version_number=workflow_version,
391
392
  api_key=self.api_key,
393
+ return_all_outputs=return_all_outputs,
392
394
  )
393
395
  return result
394
396
  except Exception as e:
promptlayer/utils.py CHANGED
@@ -10,7 +10,9 @@ from copy import deepcopy
10
10
  from enum import Enum
11
11
  from typing import Any, Callable, Dict, Generator, List, Optional, Union
12
12
 
13
+ import aiohttp
13
14
  import requests
15
+ from ably import AblyRealtime
14
16
  from opentelemetry import context, trace
15
17
 
16
18
  from promptlayer.types import RequestLog
@@ -27,7 +29,7 @@ URL_API_PROMPTLAYER = os.environ.setdefault(
27
29
  )
28
30
 
29
31
 
30
- def run_workflow_request(
32
+ def run_workflow_async(
31
33
  *,
32
34
  workflow_name: str,
33
35
  input_variables: Dict[str, Any],
@@ -35,37 +37,103 @@ def run_workflow_request(
35
37
  workflow_label_name: Optional[str] = None,
36
38
  workflow_version_number: Optional[int] = None,
37
39
  api_key: str,
40
+ return_all_outputs: Optional[bool] = False,
41
+ timeout: Optional[int] = 120,
42
+ ) -> Dict[str, Any]:
43
+ return asyncio.run(
44
+ run_workflow_request(
45
+ workflow_name=workflow_name,
46
+ input_variables=input_variables,
47
+ metadata=metadata,
48
+ workflow_label_name=workflow_label_name,
49
+ workflow_version_number=workflow_version_number,
50
+ api_key=api_key,
51
+ return_all_outputs=return_all_outputs,
52
+ timeout=timeout,
53
+ )
54
+ )
55
+
56
+
57
+ async def run_workflow_request(
58
+ *,
59
+ workflow_name: str,
60
+ input_variables: Dict[str, Any],
61
+ metadata: Optional[Dict[str, str]] = None,
62
+ workflow_label_name: Optional[str] = None,
63
+ workflow_version_number: Optional[int] = None,
64
+ api_key: str,
65
+ return_all_outputs: Optional[bool] = None,
66
+ timeout: Optional[int] = 120,
38
67
  ) -> Dict[str, Any]:
39
68
  payload = {
40
69
  "input_variables": input_variables,
41
70
  "metadata": metadata,
42
71
  "workflow_label_name": workflow_label_name,
43
72
  "workflow_version_number": workflow_version_number,
73
+ "return_all_outputs": return_all_outputs,
44
74
  }
45
75
 
46
76
  url = f"{URL_API_PROMPTLAYER}/workflows/{workflow_name}/run"
47
77
  headers = {"X-API-KEY": api_key}
48
78
 
49
79
  try:
50
- response = requests.post(url, json=payload, headers=headers)
51
- except requests.exceptions.RequestException as e:
80
+ async with aiohttp.ClientSession() as session:
81
+ async with session.post(url, json=payload, headers=headers) as response:
82
+ if response.status != 201:
83
+ error_message = f"Failed to run workflow: {response.status} {await response.text()}"
84
+ print(error_message)
85
+ raise Exception(error_message)
86
+ result = await response.json()
87
+ except Exception as e:
52
88
  error_message = f"Failed to run workflow: {e}"
53
- print(error_message, file=sys.stderr)
89
+ print(error_message)
54
90
  raise Exception(error_message)
55
91
 
56
- if response.status_code != 201:
57
- try:
58
- error_details = response.json().get("error", "Unknown error")
59
- except ValueError:
60
- error_details = response.text or "Unknown error"
92
+ execution_id = result.get("workflow_version_execution_id")
93
+ if not execution_id:
94
+ raise Exception("No execution ID returned from workflow run")
61
95
 
62
- error_message = f"Failed to run workflow: {error_details}"
63
- print(error_message, file=sys.stderr)
64
- raise Exception(error_message)
96
+ channel_name = f"workflow_updates:{execution_id}"
97
+
98
+ ws_response = requests.post(
99
+ f"{URL_API_PROMPTLAYER}/ws-token-request-library",
100
+ headers=headers,
101
+ params={"capability": channel_name},
102
+ )
103
+ token_details = ws_response.json()["token_details"]
104
+
105
+ # Initialize Ably client
106
+ ably_client = AblyRealtime(token=token_details["token"])
107
+
108
+ # Subscribe to the channel named after the execution ID
109
+ channel = ably_client.channels.get(channel_name)
110
+
111
+ final_output = {}
112
+ message_received_event = asyncio.Event()
113
+
114
+ async def message_listener(message):
115
+ if message.name == "set_workflow_node_output":
116
+ data = json.loads(message.data)
117
+ if data.get("status") == "workflow_complete":
118
+ final_output.update(data.get("final_output", {}))
119
+ message_received_event.set()
120
+
121
+ # Subscribe to the channel
122
+ await channel.subscribe("set_workflow_node_output", message_listener)
123
+
124
+ # Wait for the message or timeout
125
+ try:
126
+ await asyncio.wait_for(message_received_event.wait(), timeout)
127
+ except asyncio.TimeoutError:
128
+ channel.unsubscribe("set_workflow_node_output", message_listener)
129
+ await ably_client.close()
130
+ raise Exception("Workflow execution did not complete properly")
65
131
 
66
- result = response.json()
132
+ # Unsubscribe from the channel
133
+ channel.unsubscribe("set_workflow_node_output", message_listener)
134
+ await ably_client.close()
67
135
 
68
- return result
136
+ return final_output
69
137
 
70
138
 
71
139
  def promptlayer_api_handler(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 1.0.24
3
+ Version: 1.0.25
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -13,6 +13,7 @@ Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
+ Requires-Dist: ably (>=2.0.6,<3.0.0)
16
17
  Requires-Dist: opentelemetry-api (>=1.26.0,<2.0.0)
17
18
  Requires-Dist: opentelemetry-sdk (>=1.26.0,<2.0.0)
18
19
  Requires-Dist: requests (>=2.31.0,<3.0.0)
@@ -1,7 +1,7 @@
1
- promptlayer/__init__.py,sha256=JTVK-6FTroG1GYTAY06uIChUCDWGll51745efZYKqlw,102
1
+ promptlayer/__init__.py,sha256=RBD1elQNn3QMEckznmpU0Sw1FOhFNU6EgPXEM5yc5oE,102
2
2
  promptlayer/groups/__init__.py,sha256=-xs-2cn0nc0D_5YxZr3nC86iTdRVZmBhEpOKDJXE-sQ,224
3
3
  promptlayer/groups/groups.py,sha256=yeO6T0TM3qB0ondZRiHhcH8G06YygrpFoM8b9RmoIao,165
4
- promptlayer/promptlayer.py,sha256=Qyju-mDJcf0NiGjV6SQHsyf1syXHekvKhzRzExOn31k,16719
4
+ promptlayer/promptlayer.py,sha256=Zl4nGbCzukfadiLWCPjGyXTN5Zmrjzu_u2jHn9nK6NY,16822
5
5
  promptlayer/promptlayer_base.py,sha256=sev-EZehRXJSZSmJtMkqmAUK1345pqbDY_lNjPP5MYA,7158
6
6
  promptlayer/span_exporter.py,sha256=zIJNsb3Fe6yb5wKLDmkoPF2wqFjk1p39E0jWHD2plzI,2658
7
7
  promptlayer/templates.py,sha256=aY_-BCrL0AgIdYEUE28pi0AP_avTVAgwv5hgzrh75vo,717
@@ -10,8 +10,8 @@ promptlayer/track/track.py,sha256=XNEZT9yNiRBPp9vaDZo_f0dP_ldOu8q1qafpVfS5Ze8,16
10
10
  promptlayer/types/__init__.py,sha256=xJcvQuOk91ZBBePb40-1FDNDKYrZoH5lPE2q6_UhprM,111
11
11
  promptlayer/types/prompt_template.py,sha256=TUXLXvuvew0EBLfTMBa2LhFeQoF7R-tcFKg7_UUtHMQ,4433
12
12
  promptlayer/types/request_log.py,sha256=xU6bcxQar6GaBOJlgZTavXUV3FjE8sF_nSjPu4Ya_00,174
13
- promptlayer/utils.py,sha256=Oyk_n0hIbYaCFK98QaUzHQeEdGxfQL3HRfXY-AlbPm0,33662
14
- promptlayer-1.0.24.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
15
- promptlayer-1.0.24.dist-info/METADATA,sha256=iGmVrzJEanaYQsBLYY7t2hF4ly4jCAHQnJJD01QHXdc,4660
16
- promptlayer-1.0.24.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
17
- promptlayer-1.0.24.dist-info/RECORD,,
13
+ promptlayer/utils.py,sha256=yvlZDT9bunypNnIE4Xa8f2O5219F347HLKO3TaVmLyM,36135
14
+ promptlayer-1.0.25.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
15
+ promptlayer-1.0.25.dist-info/METADATA,sha256=1IJZrzSuS_440xIzR2Kavo7QPFVbL9-rpK9-ieft4OI,4697
16
+ promptlayer-1.0.25.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
17
+ promptlayer-1.0.25.dist-info/RECORD,,