prompty 0.1.14__tar.gz → 0.1.16__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {prompty-0.1.14 → prompty-0.1.16}/PKG-INFO +1 -2
- {prompty-0.1.14 → prompty-0.1.16}/README.md +0 -1
- {prompty-0.1.14 → prompty-0.1.16}/prompty/__init__.py +17 -6
- prompty-0.1.16/prompty/azure/__init__.py +10 -0
- prompty-0.1.16/prompty/azure_openai/__init__.py +10 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/core.py +26 -0
- prompty-0.1.16/prompty/openai/__init__.py +10 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/openai/executor.py +1 -1
- {prompty-0.1.14 → prompty-0.1.16}/prompty/openai/processor.py +1 -1
- prompty-0.1.16/prompty/serverless/__init__.py +8 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/tracer.py +1 -1
- {prompty-0.1.14 → prompty-0.1.16}/pyproject.toml +4 -1
- prompty-0.1.14/prompty/azure/__init__.py +0 -3
- prompty-0.1.14/prompty/openai/__init__.py +0 -3
- prompty-0.1.14/prompty/serverless/__init__.py +0 -3
- {prompty-0.1.14 → prompty-0.1.16}/LICENSE +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/azure/executor.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/azure/processor.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/cli.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/parsers.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/renderers.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/serverless/executor.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/prompty/serverless/processor.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/fake_azure_executor.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/fake_serverless_executor.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/1contoso.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/2contoso.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/3contoso.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/4contoso.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/basic.prompty.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/camping.jpg +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/context.prompty.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/contoso_multi.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/faithfulness.prompty.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/generated/groundedness.prompty.md +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/hello_world-goodbye_world-hello_again.embedding.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/hello_world.embedding.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/__init__.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/basic.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/basic.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/basic_json_output.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/camping.jpg +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/chat.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/context.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/context.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/context.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/embedding.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/embedding.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/evaluation.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/faithfulness.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/faithfulness.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/fake.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/funcfile.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/funcfile.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/functions.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/functions.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/groundedness.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/groundedness.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/prompty.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/serverless.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/serverless.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/serverless_stream.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/serverless_stream.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/streaming.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/streaming.prompty.execution.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/sub/__init__.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/sub/basic.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/sub/sub/__init__.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/sub/sub/basic.prompty +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/sub/sub/prompty.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/sub/sub/test.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompts/test.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/prompty.json +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/test_common.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/test_execute.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/test_factory_invoker.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/test_path_exec.py +0 -0
- {prompty-0.1.14 → prompty-0.1.16}/tests/test_tracing.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: prompty
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.16
|
4
4
|
Summary: Prompty is a new asset class and format for LLM prompts that aims to provide observability, understandability, and portability for developers. It includes spec, tooling, and a runtime. This Prompty runtime supports Python
|
5
5
|
Author-Email: Seth Juarez <seth.juarez@microsoft.com>
|
6
6
|
Requires-Dist: pyyaml>=6.0.1
|
@@ -39,7 +39,6 @@ model:
|
|
39
39
|
api: chat
|
40
40
|
configuration:
|
41
41
|
api_version: 2023-12-01-preview
|
42
|
-
azure_deployment: gpt-35-turbo
|
43
42
|
azure_endpoint: ${env:AZURE_OPENAI_ENDPOINT}
|
44
43
|
azure_deployment: ${env:AZURE_OPENAI_DEPLOYMENT:gpt-35-turbo}
|
45
44
|
sample:
|
@@ -6,6 +6,7 @@ from typing import Dict, List, Union
|
|
6
6
|
from prompty.tracer import trace
|
7
7
|
from prompty.core import (
|
8
8
|
Frontmatter,
|
9
|
+
InvokerException,
|
9
10
|
InvokerFactory,
|
10
11
|
ModelSettings,
|
11
12
|
Prompty,
|
@@ -321,18 +322,28 @@ def run(
|
|
321
322
|
if parameters != {}:
|
322
323
|
prompt.model.parameters = param_hoisting(parameters, prompt.model.parameters)
|
323
324
|
|
325
|
+
invoker_type = prompt.model.configuration["type"]
|
326
|
+
|
327
|
+
# invoker registration check
|
328
|
+
if not InvokerFactory.has_invoker("executor", invoker_type):
|
329
|
+
raise InvokerException(
|
330
|
+
f"{invoker_type} Invoker has not been registered properly.", invoker_type
|
331
|
+
)
|
332
|
+
|
324
333
|
# execute
|
325
|
-
executor = InvokerFactory.create_executor(
|
326
|
-
prompt.model.configuration["type"], prompt
|
327
|
-
)
|
334
|
+
executor = InvokerFactory.create_executor(invoker_type, prompt)
|
328
335
|
result = executor(content)
|
329
336
|
|
330
337
|
# skip?
|
331
338
|
if not raw:
|
339
|
+
# invoker registration check
|
340
|
+
if not InvokerFactory.has_invoker("processor", invoker_type):
|
341
|
+
raise InvokerException(
|
342
|
+
f"{invoker_type} Invoker has not been registered properly.", invoker_type
|
343
|
+
)
|
344
|
+
|
332
345
|
# process
|
333
|
-
processor = InvokerFactory.create_processor(
|
334
|
-
prompt.model.configuration["type"], prompt
|
335
|
-
)
|
346
|
+
processor = InvokerFactory.create_processor(invoker_type, prompt)
|
336
347
|
result = processor(result)
|
337
348
|
|
338
349
|
return result
|
@@ -0,0 +1,10 @@
|
|
1
|
+
# __init__.py
|
2
|
+
from prompty.core import InvokerException
|
3
|
+
|
4
|
+
try:
|
5
|
+
from .executor import AzureOpenAIExecutor
|
6
|
+
from .processor import AzureOpenAIProcessor
|
7
|
+
except ImportError:
|
8
|
+
raise InvokerException(
|
9
|
+
"Error registering AzureOpenAIExecutor and AzureOpenAIProcessor", "azure"
|
10
|
+
)
|
@@ -0,0 +1,10 @@
|
|
1
|
+
# __init__.py
|
2
|
+
from prompty.core import InvokerException
|
3
|
+
|
4
|
+
try:
|
5
|
+
from ..azure.executor import AzureOpenAIExecutor
|
6
|
+
from ..azure.processor import AzureOpenAIProcessor
|
7
|
+
except ImportError:
|
8
|
+
raise InvokerException(
|
9
|
+
"Error registering AzureOpenAIExecutor and AzureOpenAIProcessor", "azure"
|
10
|
+
)
|
@@ -343,6 +343,21 @@ class InvokerFactory:
|
|
343
343
|
_executors: Dict[str, Invoker] = {}
|
344
344
|
_processors: Dict[str, Invoker] = {}
|
345
345
|
|
346
|
+
@classmethod
|
347
|
+
def has_invoker(
|
348
|
+
cls, type: Literal["renderer", "parser", "executor", "processor"], name: str
|
349
|
+
) -> bool:
|
350
|
+
if type == "renderer":
|
351
|
+
return name in cls._renderers
|
352
|
+
elif type == "parser":
|
353
|
+
return name in cls._parsers
|
354
|
+
elif type == "executor":
|
355
|
+
return name in cls._executors
|
356
|
+
elif type == "processor":
|
357
|
+
return name in cls._processors
|
358
|
+
else:
|
359
|
+
raise ValueError(f"Type {type} not found")
|
360
|
+
|
346
361
|
@classmethod
|
347
362
|
def add_renderer(cls, name: str, invoker: Invoker) -> None:
|
348
363
|
cls._renderers[name] = invoker
|
@@ -416,6 +431,17 @@ class InvokerFactory:
|
|
416
431
|
return cls._processors[name](prompty)
|
417
432
|
|
418
433
|
|
434
|
+
class InvokerException(Exception):
|
435
|
+
"""Exception class for Invoker"""
|
436
|
+
|
437
|
+
def __init__(self, message: str, type: str) -> None:
|
438
|
+
super().__init__(message)
|
439
|
+
self.type = type
|
440
|
+
|
441
|
+
def __str__(self) -> str:
|
442
|
+
return f"{super().__str__()}. Make sure to pip install any necessary package extras (i.e. could be something like `pip install prompty[{self.type}]`) for {self.type} as well as import the appropriate invokers (i.e. could be something like `import prompty.{self.type}`)."
|
443
|
+
|
444
|
+
|
419
445
|
@InvokerFactory.register_renderer("NOOP")
|
420
446
|
@InvokerFactory.register_parser("NOOP")
|
421
447
|
@InvokerFactory.register_executor("NOOP")
|
@@ -0,0 +1,10 @@
|
|
1
|
+
# __init__.py
|
2
|
+
from prompty.core import InvokerException
|
3
|
+
|
4
|
+
try:
|
5
|
+
from .executor import OpenAIExecutor
|
6
|
+
from .processor import OpenAIProcessor
|
7
|
+
except ImportError:
|
8
|
+
raise InvokerException(
|
9
|
+
"Error registering OpenAIExecutor and OpenAIProcessor", "openai"
|
10
|
+
)
|
@@ -6,7 +6,7 @@ from openai.types.create_embedding_response import CreateEmbeddingResponse
|
|
6
6
|
|
7
7
|
|
8
8
|
@InvokerFactory.register_processor("openai")
|
9
|
-
class
|
9
|
+
class OpenAIProcessor(Invoker):
|
10
10
|
"""OpenAI Processor"""
|
11
11
|
|
12
12
|
def __init__(self, prompty: Prompty) -> None:
|
@@ -0,0 +1,8 @@
|
|
1
|
+
# __init__.py
|
2
|
+
from prompty.core import InvokerException
|
3
|
+
|
4
|
+
try:
|
5
|
+
from .executor import ServerlessExecutor
|
6
|
+
from .processor import ServerlessProcessor
|
7
|
+
except ImportError:
|
8
|
+
raise InvokerException("Error registering ServerlessExecutor and ServerlessProcessor", "serverless")
|
@@ -232,7 +232,7 @@ class PromptyTracer:
|
|
232
232
|
if len(self.stack) == 0:
|
233
233
|
trace_file = (
|
234
234
|
self.output
|
235
|
-
/ f"{frame['name']}.{datetime.now().strftime('%Y%m%d.%H%M%S')}.
|
235
|
+
/ f"{frame['name']}.{datetime.now().strftime('%Y%m%d.%H%M%S')}.tracy"
|
236
236
|
)
|
237
237
|
|
238
238
|
v = importlib.metadata.version("prompty")
|
@@ -13,7 +13,7 @@ dependencies = [
|
|
13
13
|
"python-dotenv>=1.0.1",
|
14
14
|
"click>=8.1.7",
|
15
15
|
]
|
16
|
-
version = "0.1.
|
16
|
+
version = "0.1.16"
|
17
17
|
|
18
18
|
[project.optional-dependencies]
|
19
19
|
azure = [
|
@@ -27,6 +27,9 @@ serverless = [
|
|
27
27
|
"azure-ai-inference>=1.0.0b3",
|
28
28
|
]
|
29
29
|
|
30
|
+
[project.scripts]
|
31
|
+
prompty = "prompty.cli:run"
|
32
|
+
|
30
33
|
[tool.pdm]
|
31
34
|
distribution = true
|
32
35
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{prompty-0.1.14 → prompty-0.1.16}/tests/hello_world-goodbye_world-hello_again.embedding.json
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|