prompty 0.1.10__py3-none-any.whl → 0.1.33__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
prompty/executors.py DELETED
@@ -1,94 +0,0 @@
1
- import azure.identity
2
- import importlib.metadata
3
- from typing import Iterator
4
- from openai import AzureOpenAI
5
- from .core import Invoker, InvokerFactory, Prompty, PromptyStream
6
-
7
- VERSION = importlib.metadata.version("prompty")
8
-
9
-
10
- @InvokerFactory.register_executor("azure")
11
- @InvokerFactory.register_executor("azure_openai")
12
- class AzureOpenAIExecutor(Invoker):
13
- """ Azure OpenAI Executor """
14
- def __init__(self, prompty: Prompty) -> None:
15
- super().__init__(prompty)
16
- kwargs = {
17
- key: value
18
- for key, value in self.prompty.model.configuration.items()
19
- if key != "type"
20
- }
21
-
22
- # no key, use default credentials
23
- if "api_key" not in kwargs:
24
- # managed identity if client id
25
- if "client_id" in kwargs:
26
- default_credential = azure.identity.ManagedIdentityCredential(
27
- client_id=kwargs.pop("client_id"),
28
- )
29
- # default credential
30
- else:
31
- default_credential = azure.identity.DefaultAzureCredential(
32
- exclude_shared_token_cache_credential=True
33
- )
34
-
35
- kwargs["azure_ad_token_provider"] = (
36
- azure.identity.get_bearer_token_provider(
37
- default_credential, "https://cognitiveservices.azure.com/.default"
38
- )
39
- )
40
-
41
- self.client = AzureOpenAI(
42
- default_headers={
43
- "User-Agent": f"prompty{VERSION}",
44
- "x-ms-useragent": f"prompty/{VERSION}",
45
- },
46
- **kwargs,
47
- )
48
-
49
- self.api = self.prompty.model.api
50
- self.deployment = self.prompty.model.configuration["azure_deployment"]
51
- self.parameters = self.prompty.model.parameters
52
-
53
- def invoke(self, data: any) -> any:
54
- """ Invoke the Azure OpenAI API
55
-
56
- Parameters
57
- ----------
58
- data : any
59
- The data to send to the Azure OpenAI API
60
-
61
- Returns
62
- -------
63
- any
64
- The response from the Azure OpenAI API
65
- """
66
- if self.api == "chat":
67
- response = self.client.chat.completions.create(
68
- model=self.deployment,
69
- messages=data if isinstance(data, list) else [data],
70
- **self.parameters,
71
- )
72
-
73
- elif self.api == "completion":
74
- response = self.client.completions.create(
75
- prompt=data.item,
76
- model=self.deployment,
77
- **self.parameters,
78
- )
79
-
80
- elif self.api == "embedding":
81
- response = self.client.embeddings.create(
82
- input=data if isinstance(data, list) else [data],
83
- model=self.deployment,
84
- **self.parameters,
85
- )
86
-
87
- elif self.api == "image":
88
- raise NotImplementedError("Azure OpenAI Image API is not implemented yet")
89
-
90
- # stream response
91
- if isinstance(response, Iterator):
92
- return PromptyStream("AzureOpenAIExecutor", response)
93
- else:
94
- return response
@@ -1,136 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: prompty
3
- Version: 0.1.10
4
- Summary: Prompty is a new asset class and format for LLM prompts that aims to provide observability, understandability, and portability for developers. It includes spec, tooling, and a runtime. This Prompty runtime supports Python
5
- Author-Email: Seth Juarez <seth.juarez@microsoft.com>
6
- License: MIT
7
- Requires-Python: >=3.9
8
- Requires-Dist: pyyaml>=6.0.1
9
- Requires-Dist: pydantic>=2.8.2
10
- Requires-Dist: jinja2>=3.1.4
11
- Requires-Dist: openai>=1.35.10
12
- Requires-Dist: azure-identity>=1.17.1
13
- Requires-Dist: python-dotenv>=1.0.1
14
- Requires-Dist: click>=8.1.7
15
- Description-Content-Type: text/markdown
16
-
17
-
18
- Prompty is an asset class and format for LLM prompts designed to enhance observability, understandability, and portability for developers. The primary goal is to accelerate the developer inner loop of prompt engineering and prompt source management in a cross-language and cross-platform implementation.
19
-
20
- The file format has a supporting toolchain with a VS Code extension and runtimes in multiple programming languages to simplify and accelerate your AI application development.
21
-
22
- The tooling comes together in three ways: the *prompty file asset*, the *VS Code extension tool*, and *runtimes* in multiple programming languges.
23
-
24
- ## The Prompty File Format
25
- Prompty is a language agnostic prompt asset for creating prompts and engineering the responses. Learn more about the format [here](https://prompty.ai/docs/prompty-file-spec).
26
-
27
- Examples prompty file:
28
- ```markdown
29
- ---
30
- name: Basic Prompt
31
- description: A basic prompt that uses the GPT-3 chat API to answer questions
32
- authors:
33
- - sethjuarez
34
- - jietong
35
- model:
36
- api: chat
37
- configuration:
38
- azure_deployment: gpt-35-turbo
39
- sample:
40
- firstName: Jane
41
- lastName: Doe
42
- question: What is the meaning of life?
43
- ---
44
- system:
45
- You are an AI assistant who helps people find information.
46
- As the assistant, you answer questions briefly, succinctly,
47
- and in a personable manner using markdown and even add some personal flair with appropriate emojis.
48
-
49
- # Customer
50
- You are helping {{firstName}} {{lastName}} to find answers to their questions.
51
- Use their name to address them in your responses.
52
-
53
- user:
54
- {{question}}
55
- ```
56
-
57
-
58
- ## The Prompty VS Code Extension
59
- Run Prompty files directly in VS Code. This Visual Studio Code extension offers an intuitive prompt playground within VS Code to streamline the prompt engineering process. You can find the Prompty extension in the Visual Studio Code Marketplace.
60
-
61
- Download the [VS Code extension here](https://marketplace.visualstudio.com/items?itemName=ms-toolsai.prompty).
62
-
63
-
64
- ## Using this Prompty Runtime
65
- The Python runtime is a simple way to run your prompts in Python. The runtime is available as a Python package and can be installed using pip.
66
-
67
- ```bash
68
- pip install prompty
69
- ```
70
-
71
- Simple usage example:
72
-
73
- ```python
74
- import prompty
75
-
76
- # execute the prompt
77
- response = prompty.execute("path/to/prompty/file")
78
-
79
- print(response)
80
- ```
81
-
82
- ## Using Tracing in Prompty
83
- Prompty supports tracing to help you understand the execution of your prompts. The built-in tracing dumps the execution of the prompt to a file.
84
-
85
- ```python
86
- import prompty
87
- from prompty.tracer import Trace, PromptyTracer
88
-
89
- # add default tracer
90
- Trace.add_tracerTrace.add_tracer("prompty", PromptyTracer("path/to/trace/dir"))
91
-
92
- # execute the prompt
93
- response = prompty.execute("path/to/prompty/file")
94
-
95
- print(response)
96
- ```
97
-
98
- You can also bring your own tracer by creating a `Tracer` class.
99
- Simple example:
100
-
101
- ```python
102
- import prompty
103
- from prompty.tracer import Tracer
104
-
105
- class MyTracer(Tracer):
106
-
107
- def start(self, name: str) -> None:
108
- print(f"Starting {name}")
109
-
110
- def add(self, key: str, value: Any) -> None:
111
- print(f"Adding {key} with value {value}")
112
-
113
- def end(self) -> None:
114
- print("Ending")
115
-
116
- # add your tracer
117
- Trace.add_tracer("my_tracer", MyTracer())
118
-
119
- # execute the prompt
120
- response = prompty.execute("path/to/prompty/file")
121
-
122
- ```
123
-
124
- To define your own tracer, you can subclass the `Tracer` class and implement the `start`, `add`, and `end` methods and then add it to the `Trace` instance. You can add as many tracers as you like - the will all of them will be called in order.
125
-
126
- ## CLI
127
- The Prompty runtime also comes with a CLI tool that allows you to run prompts from the command line. The CLI tool is installed with the Python package.
128
-
129
- ```bash
130
- prompty -s path/to/prompty/file
131
- ```
132
-
133
- This will execute the prompt and print the response to the console. It also has default tracing enabled.
134
-
135
- ## Contributing
136
- We welcome contributions to the Prompty project! This community led project is open to all contributors. The project cvan be found on [GitHub](https://github.com/Microsoft/prompty).
@@ -1,12 +0,0 @@
1
- prompty-0.1.10.dist-info/METADATA,sha256=nO36pVf-OKjnsEZRKWV0q7XHuKLBOI7Hw2a9_8dtrIs,4669
2
- prompty-0.1.10.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
3
- prompty-0.1.10.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
4
- prompty/__init__.py,sha256=Msp8eiKdrDq0wyl6G5DFDH8r5BxM2_E60uzzL7_MJ5w,11183
5
- prompty/cli.py,sha256=_bx_l5v7OGhtAn4d_73b8tyfEw7OOkjCqGMQPu0YP5A,2489
6
- prompty/core.py,sha256=osKfdwc_uDhQpsRpB8TyvWELUfXPBAoS2iLEU-hPJW0,15373
7
- prompty/executors.py,sha256=z_SXF-i2qBbxmsBexQ4Ouiqwil6L0lU2wWfwIeSN-eE,3083
8
- prompty/parsers.py,sha256=4mmIn4SVNs8B0R1BufanqUJk8v4r0OEEo8yx6UOxQpA,4670
9
- prompty/processors.py,sha256=VaB7fGyaeIPRGuAZ9KTwktx7MIkfCtPALLQgNko1-Gk,2310
10
- prompty/renderers.py,sha256=RSHFQFx7AtKLUfsMLCXR0a56Mb7DL1NJNgjUqgg3IqU,776
11
- prompty/tracer.py,sha256=_C8bSoGNvucYnuS-DZSx6dq6iuEsVWWZxj-hUrgiTIw,6333
12
- prompty-0.1.10.dist-info/RECORD,,