drako 0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- drako-0.1/PKG-INFO +7 -0
- drako-0.1/drako/__init__.py +0 -0
- drako-0.1/drako/cli.py +93 -0
- drako-0.1/drako/main.py +19 -0
- drako-0.1/drako.egg-info/PKG-INFO +7 -0
- drako-0.1/drako.egg-info/SOURCES.txt +9 -0
- drako-0.1/drako.egg-info/dependency_links.txt +1 -0
- drako-0.1/drako.egg-info/entry_points.txt +2 -0
- drako-0.1/drako.egg-info/top_level.txt +1 -0
- drako-0.1/setup.cfg +4 -0
- drako-0.1/setup.py +21 -0
drako-0.1/PKG-INFO
ADDED
|
File without changes
|
drako-0.1/drako/cli.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from langchain_ollama import OllamaLLM
|
|
2
|
+
from langchain_core.prompts import ChatPromptTemplate
|
|
3
|
+
|
|
4
|
+
# Global variable for the user-editable portion of the template.
|
|
5
|
+
editable = """
|
|
6
|
+
You are a friendly AI. You are funny, but dont make it obvius.
|
|
7
|
+
|
|
8
|
+
Alias: Drako, Gemma, Bot
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def build_template():
|
|
12
|
+
# This function builds the full prompt template used internally.
|
|
13
|
+
return editable + """
|
|
14
|
+
Here is the conversation history: {context}
|
|
15
|
+
|
|
16
|
+
Prompt: {question}
|
|
17
|
+
|
|
18
|
+
Answer:
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
# Initialize model and chain.
|
|
22
|
+
raw_model = "gemma2:2b"
|
|
23
|
+
model = OllamaLLM(model=raw_model)
|
|
24
|
+
template = build_template()
|
|
25
|
+
prompt = ChatPromptTemplate.from_template(template)
|
|
26
|
+
chain = prompt | model
|
|
27
|
+
|
|
28
|
+
def handle_conversation():
|
|
29
|
+
global raw_model, model, editable, template, prompt, chain # Use globals so we can update them.
|
|
30
|
+
context = ""
|
|
31
|
+
print("An Ollama AI model. /? or /help for list of commands.")
|
|
32
|
+
print("---New conversation---")
|
|
33
|
+
|
|
34
|
+
while True:
|
|
35
|
+
user_input = input("You: ")
|
|
36
|
+
|
|
37
|
+
if user_input.lower() == "/bye":
|
|
38
|
+
break
|
|
39
|
+
|
|
40
|
+
elif user_input.lower() in ("/?", "/help"):
|
|
41
|
+
print(" | /bye - for exiting chat.")
|
|
42
|
+
print(" | /model - for selecting model. (Not recommended.)")
|
|
43
|
+
print(" | /temp - for viewing the editable template.")
|
|
44
|
+
print(" | /edit - for editing the template.")
|
|
45
|
+
print(" | /info - for information about this chatbot")
|
|
46
|
+
continue
|
|
47
|
+
|
|
48
|
+
elif user_input.lower() == "/model":
|
|
49
|
+
print(f"Current model: {raw_model}")
|
|
50
|
+
raw_model = input(
|
|
51
|
+
" -WARNING- Changing the model may require installation.\n"
|
|
52
|
+
"Enter new model to install/select: "
|
|
53
|
+
)
|
|
54
|
+
model = OllamaLLM(model=raw_model) # Update model
|
|
55
|
+
# Rebuild the chain with the new model.
|
|
56
|
+
chain = prompt | model
|
|
57
|
+
continue
|
|
58
|
+
|
|
59
|
+
elif user_input.lower() == "/temp":
|
|
60
|
+
print("\n--- Editable Template ---")
|
|
61
|
+
print(editable.strip())
|
|
62
|
+
print("-------------------------\n")
|
|
63
|
+
continue
|
|
64
|
+
|
|
65
|
+
elif user_input.lower() == "/edit":
|
|
66
|
+
new_editable = input("Enter new editable text for the template: ")
|
|
67
|
+
if new_editable.strip():
|
|
68
|
+
editable = new_editable
|
|
69
|
+
template = build_template()
|
|
70
|
+
prompt = ChatPromptTemplate.from_template(template)
|
|
71
|
+
chain = prompt | model # Rebuild the chain with updated template.
|
|
72
|
+
print("Template updated!")
|
|
73
|
+
else:
|
|
74
|
+
print("Template not changed.")
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
elif user_input.lower() == "/info":
|
|
78
|
+
print("\n--- Chatbot Information ---")
|
|
79
|
+
print("Current Model:", raw_model)
|
|
80
|
+
# Show only the editable part, not the internal formatting.
|
|
81
|
+
print("Template (editable part only):")
|
|
82
|
+
print(editable.strip())
|
|
83
|
+
print("---------------------------\n")
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
result = chain.invoke({"context": context, "question": user_input})
|
|
88
|
+
print("Bot:", result)
|
|
89
|
+
context += f"\nUser: {user_input}\nAI: {result}"
|
|
90
|
+
except Exception as e:
|
|
91
|
+
print("Error:", str(e))
|
|
92
|
+
|
|
93
|
+
handle_conversation()
|
drako-0.1/drako/main.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from drako.cli import handle_conversation # Import the conversation handler from chatbot.py
|
|
3
|
+
|
|
4
|
+
def cli():
|
|
5
|
+
# Check if a subcommand is provided.
|
|
6
|
+
if len(sys.argv) < 2:
|
|
7
|
+
print("Usage: gemma run")
|
|
8
|
+
sys.exit(1)
|
|
9
|
+
|
|
10
|
+
subcommand = sys.argv[1].lower()
|
|
11
|
+
if subcommand == "run":
|
|
12
|
+
# Run the chatbot script.
|
|
13
|
+
handle_conversation()
|
|
14
|
+
else:
|
|
15
|
+
print(f"Unknown command: {subcommand}")
|
|
16
|
+
sys.exit(1)
|
|
17
|
+
|
|
18
|
+
if __name__ == "__main__":
|
|
19
|
+
cli()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
drako
|
drako-0.1/setup.cfg
ADDED
drako-0.1/setup.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from setuptools import setup, find_packages
|
|
2
|
+
|
|
3
|
+
setup(
|
|
4
|
+
name="drako", # The name of your package
|
|
5
|
+
version="0.1",
|
|
6
|
+
packages=find_packages(),
|
|
7
|
+
entry_points={
|
|
8
|
+
"console_scripts": [
|
|
9
|
+
# "gemma" is the command you want available after install
|
|
10
|
+
# "drako.cli:main" is the function that will run when someone types `gemma`
|
|
11
|
+
"drako = drako.cli:main"
|
|
12
|
+
],
|
|
13
|
+
},
|
|
14
|
+
install_requires=[
|
|
15
|
+
# Put your dependencies here, e.g.:
|
|
16
|
+
# "langchain_ollama",
|
|
17
|
+
# "langchain_core",
|
|
18
|
+
],
|
|
19
|
+
author="ProjectCuboid",
|
|
20
|
+
description="CLI for DrakoAI",
|
|
21
|
+
)
|