prompty 0.1.19__py2.py3-none-any.whl → 0.1.20__py2.py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
prompty/azure/executor.py CHANGED
@@ -91,7 +91,7 @@ class AzureOpenAIExecutor(Invoker):
91
91
  elif self.api == "completion":
92
92
  trace("signature", "AzureOpenAI.completions.create")
93
93
  args = {
94
- "prompt": data.item,
94
+ "prompt": data,
95
95
  "model": self.deployment,
96
96
  **self.parameters,
97
97
  }
@@ -111,10 +111,22 @@ class AzureOpenAIExecutor(Invoker):
111
111
  trace("result", response)
112
112
 
113
113
  elif self.api == "image":
114
- raise NotImplementedError("Azure OpenAI Image API is not implemented yet")
114
+ trace("signature", "AzureOpenAI.images.generate")
115
+ args = {
116
+ "prompt": data,
117
+ "model": self.deployment,
118
+ **self.parameters,
119
+ }
120
+ trace("inputs", args)
121
+ response = client.images.generate.create(**args)
122
+ trace("result", response)
115
123
 
116
124
  # stream response
117
125
  if isinstance(response, Iterator):
118
- return PromptyStream("AzureOpenAIExecutor", response)
126
+ if self.api == "chat":
127
+ # TODO: handle the case where there might be no usage in the stream
128
+ return PromptyStream("AzureOpenAIExecutor", response)
129
+ else:
130
+ return PromptyStream("AzureOpenAIExecutor", response)
119
131
  else:
120
132
  return response
@@ -1,5 +1,6 @@
1
1
  from typing import Iterator
2
2
  from openai.types.completion import Completion
3
+ from openai.types.images_response import ImagesResponse
3
4
  from openai.types.chat.chat_completion import ChatCompletion
4
5
  from ..core import Invoker, InvokerFactory, Prompty, PromptyStream, ToolCall
5
6
  from openai.types.create_embedding_response import CreateEmbeddingResponse
@@ -50,6 +51,17 @@ class AzureOpenAIProcessor(Invoker):
50
51
  return data.data[0].embedding
51
52
  else:
52
53
  return [item.embedding for item in data.data]
54
+ elif isinstance(data, ImagesResponse):
55
+ self.prompty.model.parameters
56
+ item: ImagesResponse = data
57
+
58
+ if len(data.data) == 0:
59
+ raise ValueError("Invalid data")
60
+ elif len(data.data) == 1:
61
+ return data.data[0].url if item.data[0].url else item.data[0].b64_json
62
+ else:
63
+ return [item.url if item.url else item.b64_json for item in data.data]
64
+
53
65
  elif isinstance(data, Iterator):
54
66
 
55
67
  def generator():
prompty/core.py CHANGED
@@ -561,7 +561,9 @@ class AsyncPromptyStream(AsyncIterator):
561
561
  # StopIteration is raised
562
562
  # contents are exhausted
563
563
  if len(self.items) > 0:
564
- with Tracer.start(f"{self.name}.AsyncPromptyStream") as trace:
564
+ with Tracer.start("AsyncPromptyStream") as trace:
565
+ trace("signature", f"{self.name}.AsyncPromptyStream")
566
+ trace("inputs", "None")
565
567
  trace("result", [to_dict(s) for s in self.items])
566
568
 
567
569
  raise StopIteration
prompty/tracer.py CHANGED
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  import json
3
3
  import inspect
4
+ import traceback
4
5
  import importlib
5
6
  import contextlib
6
7
  from pathlib import Path
@@ -176,7 +177,8 @@ def _trace_async(
176
177
  "result",
177
178
  {
178
179
  "exception": {
179
- "type": type(e).__name__,
180
+ "type": type(e),
181
+ "traceback": traceback.format_tb(),
180
182
  "message": str(e),
181
183
  "args": to_dict(e.args),
182
184
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prompty
3
- Version: 0.1.19
3
+ Version: 0.1.20
4
4
  Summary: Prompty is a new asset class and format for LLM prompts that aims to provide observability, understandability, and portability for developers. It includes spec, tooling, and a runtime. This Prompty runtime supports Python
5
5
  Author-Email: Seth Juarez <seth.juarez@microsoft.com>
6
6
  Requires-Dist: pyyaml>=6.0.1
@@ -177,7 +177,7 @@ def get_response(customerId, prompt):
177
177
 
178
178
  ```
179
179
 
180
- In this case, whenever this code is executed, a `.ptrace` file will be created in the `path/to/output` directory. This file will contain the trace of the execution of the `get_response` function, the execution of the `get_customer` function, and the prompty internals that generated the response.
180
+ In this case, whenever this code is executed, a `.tracy` file will be created in the `path/to/output` directory. This file will contain the trace of the execution of the `get_response` function, the execution of the `get_customer` function, and the prompty internals that generated the response.
181
181
 
182
182
  ## OpenTelemetry Tracing
183
183
  You can add OpenTelemetry tracing to your application using the same hook mechanism. In your application, you might create something like `trace_span` to trace the execution of your prompts:
@@ -206,10 +206,10 @@ This will produce spans during the execution of the prompt that can be sent to a
206
206
  The Prompty runtime also comes with a CLI tool that allows you to run prompts from the command line. The CLI tool is installed with the Python package.
207
207
 
208
208
  ```bash
209
- prompty -s path/to/prompty/file
209
+ prompty -s path/to/prompty/file -e .env
210
210
  ```
211
211
 
212
- This will execute the prompt and print the response to the console. It also has default tracing enabled.
212
+ This will execute the prompt and print the response to the console. If there are any environment variables the CLI should take into account, you can pass those in via the `-e` flag. It also has default tracing enabled.
213
213
 
214
214
  ## Contributing
215
215
  We welcome contributions to the Prompty project! This community led project is open to all contributors. The project cvan be found on [GitHub](https://github.com/Microsoft/prompty).
@@ -1,13 +1,13 @@
1
- prompty-0.1.19.dist-info/METADATA,sha256=PnyGMOsBjZ6U9t76hJ3xQNFUULeE1nr7ef8MWz9SJgA,8916
2
- prompty-0.1.19.dist-info/WHEEL,sha256=CuZGaXTwoRLAOVv0AcE3bCTxO5ejVuBEJkUBe9C-kvk,94
3
- prompty-0.1.19.dist-info/entry_points.txt,sha256=9y1lKPWUpPWRJzUslcVH-gMwbNoa2PzjyoZsKYLQqyw,45
4
- prompty-0.1.19.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
1
+ prompty-0.1.20.dist-info/METADATA,sha256=zZL7kvH2TAuXCRThW1v53zq2pgaReO-7IGvwSLgCPSk,9037
2
+ prompty-0.1.20.dist-info/WHEEL,sha256=CuZGaXTwoRLAOVv0AcE3bCTxO5ejVuBEJkUBe9C-kvk,94
3
+ prompty-0.1.20.dist-info/entry_points.txt,sha256=9y1lKPWUpPWRJzUslcVH-gMwbNoa2PzjyoZsKYLQqyw,45
4
+ prompty-0.1.20.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
5
5
  prompty/__init__.py,sha256=XTUgJ3xT7HYJieuWW5PBItey0BWneg3G7iBBjIeNJZU,11628
6
6
  prompty/azure/__init__.py,sha256=ptGajCh68s_tugPv45Y4GJCyBToNFCExUzUh9yIBIfo,292
7
- prompty/azure/executor.py,sha256=M52n_hxxT57-_VdGYK-pXoELT0CDwnJaxN3xNNabgp8,4210
8
- prompty/azure/processor.py,sha256=e9CcKG665zvCLPeJfS91FM6c_W_6YY0mVENxinCo19A,2253
7
+ prompty/azure/executor.py,sha256=4BiVgDcoDaJ2QcSd3O1LIm_3Fh-ln6wjqO2ZoVYjslc,4700
8
+ prompty/azure/processor.py,sha256=UEuMqYlvfQM2loQ08g1zS7M08Kzxh69wskycZ_I5fIg,2755
9
9
  prompty/cli.py,sha256=k8Rxm41fMFNvmnsX737UiN6v-7756tpoJPN4rPXMNcU,3726
10
- prompty/core.py,sha256=VzhOp2tulAKAF0I_5ULlII6QZ0thN1FRtPqPhwDpjPY,17226
10
+ prompty/core.py,sha256=2ui3IHheqeFa6lErlL5bDJbPVcRvTDIMlgCfIdCYBXw,17331
11
11
  prompty/openai/__init__.py,sha256=16LxFrG_qGMg_Nx_BTMkCZupPEADsi8Gj234uFiXoZo,273
12
12
  prompty/openai/executor.py,sha256=tpX2vkPIJKM4XqEU0KzahgALcR_IBRrY_ca-woST-uc,3314
13
13
  prompty/openai/processor.py,sha256=Cw-_O_r9B5QqiCsfIglI5lcJgKCStkse2iIDbPWxfhg,2169
@@ -16,5 +16,5 @@ prompty/renderers.py,sha256=RSHFQFx7AtKLUfsMLCXR0a56Mb7DL1NJNgjUqgg3IqU,776
16
16
  prompty/serverless/__init__.py,sha256=NPqoFATEMQ96G8OQkVcGxUWU4llIQCwxfJePPo8YFY8,279
17
17
  prompty/serverless/executor.py,sha256=jbTnYE2aq8oS5PWcZ96NhhjL-gU1a_tlUoB449QqHaY,4648
18
18
  prompty/serverless/processor.py,sha256=pft1XGbPzo0MzQMbAt1VxsLsvRrjQO3B8MXEE2PfSA0,1982
19
- prompty/tracer.py,sha256=dtF7rlJVNkl30m5ZE_9nROGW6Nmm8vGSSWS6p3M3MXI,10436
20
- prompty-0.1.19.dist-info/RECORD,,
19
+ prompty/tracer.py,sha256=RDI7Ull1HDAnNhOBg0ZjRVeeuu_pl1Z-knXvgY0yGvo,10508
20
+ prompty-0.1.20.dist-info/RECORD,,