prompt-caller 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,7 @@
1
+ from prompt_caller import PromptCaller
2
+
3
+ ai = PromptCaller()
4
+
5
+ response = ai.call("sample", {"expression": "3+8/9"})
6
+
7
+ print(response)
@@ -0,0 +1,95 @@
1
+ import os
2
+ import re
3
+
4
+ import yaml
5
+ from dotenv import load_dotenv
6
+ from jinja2 import Template
7
+ from langchain_core.messages import HumanMessage, SystemMessage
8
+ from langchain_openai import ChatOpenAI
9
+ from pydantic import BaseModel, Field, create_model
10
+
11
+ load_dotenv()
12
+
13
+
14
+ class PromptCaller:
15
+
16
+ def _loadPrompt(self, file_path):
17
+ with open(file_path, "r") as file:
18
+ content = file.read()
19
+
20
+ # Split YAML header and the body
21
+ header, body = content.split("---", 2)[1:]
22
+
23
+ # Parse the YAML header
24
+ model_config = yaml.safe_load(header.strip())
25
+
26
+ # Step 2: Parse the JSX body and return it
27
+ return model_config, body.strip()
28
+
29
+ def _renderTemplate(self, body, context):
30
+ template = Template(body)
31
+ return template.render(context)
32
+
33
+ def _parseJSXBody(self, body):
34
+ elements = []
35
+ tag_pattern = r"<(system|user|assistant)>(.*?)</\1>"
36
+
37
+ matches = re.findall(tag_pattern, body, re.DOTALL)
38
+
39
+ for tag, content in matches:
40
+ elements.append({"role": tag, "content": content.strip()})
41
+
42
+ return elements
43
+
44
+ def loadPrompt(self, promptName, context=None):
45
+ # initialize context
46
+ if context is None:
47
+ context = {}
48
+
49
+ configuration, template = self._loadPrompt(
50
+ os.path.join("prompts", f"{promptName}.prompt")
51
+ )
52
+
53
+ template = self._renderTemplate(template, context)
54
+
55
+ parsedMessages = self._parseJSXBody(template)
56
+
57
+ messages = []
58
+
59
+ for message in parsedMessages:
60
+ if message.get("role") == "system":
61
+ messages.append(SystemMessage(content=message.get("content")))
62
+
63
+ if message.get("role") == "user":
64
+ messages.append(HumanMessage(content=message.get("content")))
65
+
66
+ return configuration, messages
67
+
68
+ def createPydanticModel(self, dynamic_dict):
69
+ # Create a dynamic Pydantic model from the dictionary
70
+ fields = {
71
+ key: (str, Field(description=f"Description for {key}"))
72
+ for key in dynamic_dict.keys()
73
+ }
74
+ # Dynamically create the Pydantic model with the fields
75
+ return create_model("DynamicModel", **fields)
76
+
77
+ def call(self, promptName, context=None):
78
+
79
+ configuration, messages = self.loadPrompt(promptName, context)
80
+
81
+ output = None
82
+
83
+ if "output" in configuration:
84
+ output = configuration.get("output")
85
+ configuration.pop("output")
86
+
87
+ chat = ChatOpenAI(**configuration)
88
+
89
+ if output:
90
+ dynamicModel = self.createPydanticModel(output)
91
+ chat = chat.with_structured_output(dynamicModel)
92
+
93
+ response = chat.invoke(messages)
94
+
95
+ return response
@@ -0,0 +1,21 @@
1
+ Metadata-Version: 2.1
2
+ Name: prompt_caller
3
+ Version: 0.0.1
4
+ Summary: This package is responsible for calling prompts in a specific format. It uses LangChain and OpenAI API
5
+ Author: Thiago Nepomuceno
6
+ Author-email: thiago@neps.academy
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: License :: OSI Approved :: MIT License
9
+ Classifier: Operating System :: OS Independent
10
+ Description-Content-Type: text/markdown
11
+ Requires-Dist: toml
12
+
13
+ # Neps Academy AI
14
+
15
+ This package is responsible for all Neps Academy features related to AI
16
+
17
+ # Build and Upload
18
+
19
+ python setup.py sdist bdist_wheel
20
+
21
+ twine upload dist/\*
@@ -0,0 +1,7 @@
1
+ prompt_caller/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ prompt_caller/__main__.py,sha256=dJ0dYtVmnhZuoV79R6YiAIta1ZkUKb-TEX4VEuYbgk0,139
3
+ prompt_caller/prompt_caller.py,sha256=VmmMLiOWsTC-545ejXcfOP0neA3k4aDA2SLCYNGD2nI,2824
4
+ prompt_caller-0.0.1.dist-info/METADATA,sha256=B8L0iifaUnM3idkMsLV_AE455LVTE51WMFwKKnuhdCI,625
5
+ prompt_caller-0.0.1.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
6
+ prompt_caller-0.0.1.dist-info/top_level.txt,sha256=iihiDRq-0VrKB8IKjxf7Lrtv-fLMq4tvgM4fH3x0I94,14
7
+ prompt_caller-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.44.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ prompt_caller