llm-dialog-manager 0.1.2480__py3-none-any.whl → 0.2.1__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- llm_dialog_manager/__init__.py +1 -1
- llm_dialog_manager/agent.py +49 -30
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.1.dist-info}/METADATA +1 -1
- llm_dialog_manager-0.2.1.dist-info/RECORD +9 -0
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.1.dist-info}/WHEEL +1 -1
- llm_dialog_manager-0.1.2480.dist-info/RECORD +0 -9
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.1.dist-info}/LICENSE +0 -0
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.1.dist-info}/top_level.txt +0 -0
llm_dialog_manager/__init__.py
CHANGED
llm_dialog_manager/agent.py
CHANGED
@@ -113,27 +113,53 @@ def completion(model: str, messages: List[Dict[str, str]], max_tokens: int = 100
|
|
113
113
|
return response.content[0].text
|
114
114
|
|
115
115
|
elif "gemini" in model:
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
116
|
+
try:
|
117
|
+
# First try OpenAI-style API
|
118
|
+
client = openai.OpenAI(
|
119
|
+
api_key=api_key,
|
120
|
+
base_url="https://generativelanguage.googleapis.com/v1beta/"
|
121
|
+
)
|
122
|
+
# Remove any system message from the beginning if present
|
123
|
+
if messages and messages[0]["role"] == "system":
|
124
|
+
system_msg = messages.pop(0)
|
125
|
+
# Prepend system message to first user message if exists
|
126
|
+
if messages:
|
127
|
+
messages[0]["content"] = f"{system_msg['content']}\n\n{messages[0]['content']}"
|
128
|
+
|
129
|
+
response = client.chat.completions.create(
|
130
|
+
model=model,
|
131
|
+
messages=messages,
|
132
|
+
temperature=temperature
|
133
|
+
)
|
134
|
+
|
135
|
+
return response.choices[0].message.content
|
136
|
+
|
137
|
+
except Exception as e:
|
138
|
+
# If OpenAI-style API fails, fall back to Google's genai library
|
139
|
+
logger.info("Falling back to Google's genai library")
|
140
|
+
genai.configure(api_key=api_key)
|
141
|
+
|
142
|
+
# Convert messages to Gemini format
|
143
|
+
gemini_messages = []
|
144
|
+
for msg in messages:
|
145
|
+
if msg["role"] == "system":
|
146
|
+
# Prepend system message to first user message if exists
|
147
|
+
if gemini_messages:
|
148
|
+
gemini_messages[0].parts[0].text = f"{msg['content']}\n\n{gemini_messages[0].parts[0].text}"
|
149
|
+
else:
|
150
|
+
gemini_messages.append({"role": msg["role"], "parts": [{"text": msg["content"]}]})
|
151
|
+
|
152
|
+
# Create Gemini model and generate response
|
153
|
+
model = genai.GenerativeModel(model_name=model)
|
154
|
+
response = model.generate_content(
|
155
|
+
gemini_messages,
|
156
|
+
generation_config=genai.types.GenerationConfig(
|
157
|
+
temperature=temperature,
|
158
|
+
max_output_tokens=max_tokens
|
159
|
+
)
|
160
|
+
)
|
161
|
+
|
162
|
+
return response.text
|
137
163
|
|
138
164
|
elif "grok" in model:
|
139
165
|
# Randomly choose between OpenAI and Anthropic SDK
|
@@ -249,13 +275,6 @@ if __name__ == "__main__":
|
|
249
275
|
# write a test for detect finding agent
|
250
276
|
text = "I think the answer is 42"
|
251
277
|
|
252
|
-
# from agent.messageloader import information_detector_messages
|
253
|
-
|
254
|
-
# # Now you can print or use information_detector_messages as needed
|
255
|
-
# information_detector_agent = Agent("gemini-1.5-pro", information_detector_messages)
|
256
|
-
# information_detector_agent.add_message("user", text)
|
257
|
-
# response = information_detector_agent.generate_response()
|
258
|
-
# print(response)
|
259
278
|
agent = Agent("claude-3-5-sonnet-20241022", "you are an assistant", memory_enabled=True)
|
260
279
|
|
261
280
|
# Format the prompt to check if the section is the last one in the outline
|
@@ -267,6 +286,6 @@ if __name__ == "__main__":
|
|
267
286
|
|
268
287
|
print(agent.generate_response(max_tokens=20, temperature=0.0))
|
269
288
|
print(agent.history[:])
|
270
|
-
|
271
|
-
print(
|
289
|
+
last_message = agent.history.pop()
|
290
|
+
print(last_message)
|
272
291
|
print(agent.history[:])
|
@@ -0,0 +1,9 @@
|
|
1
|
+
llm_dialog_manager/__init__.py,sha256=joo2odAcSJIo-p3bwQbGxYok_vWUlI2RjkaAmAdKX3o,86
|
2
|
+
llm_dialog_manager/agent.py,sha256=cLwDTgCpO8teYaedIiee8XWU8vE9icBe_NNoAklm79w,11388
|
3
|
+
llm_dialog_manager/chat_history.py,sha256=xKA-oQCv8jv_g8EhXrG9h1S8Icbj2FfqPIhbty5vra4,6033
|
4
|
+
llm_dialog_manager/key_manager.py,sha256=shvxmn4zUtQx_p-x1EFyOmnk-WlhigbpKtxTKve-zXk,4421
|
5
|
+
llm_dialog_manager-0.2.1.dist-info/LICENSE,sha256=vWGbYgGuWpWrXL8-xi6pNcX5UzD6pWoIAZmcetyfbus,1064
|
6
|
+
llm_dialog_manager-0.2.1.dist-info/METADATA,sha256=F-7unvFE1V-Lk6G_nBvHHXxza3RHjXtB-LJlk_iP2rM,5193
|
7
|
+
llm_dialog_manager-0.2.1.dist-info/WHEEL,sha256=a7TGlA-5DaHMRrarXjVbQagU3Man_dCnGIWMJr5kRWo,91
|
8
|
+
llm_dialog_manager-0.2.1.dist-info/top_level.txt,sha256=u2EQEXW0NGAt0AAHT7jx1odXZ4rZfjcgbmJhvKFuMkI,19
|
9
|
+
llm_dialog_manager-0.2.1.dist-info/RECORD,,
|
@@ -1,9 +0,0 @@
|
|
1
|
-
llm_dialog_manager/__init__.py,sha256=AfSsuE8ty-luFdLkdgQ98oQfbOYqYM8SO6p4tMDcDZ4,89
|
2
|
-
llm_dialog_manager/agent.py,sha256=dExW-pfPpRLedi-iJsn5S_iImMmWGI1ixztVqWCmMHE,10303
|
3
|
-
llm_dialog_manager/chat_history.py,sha256=xKA-oQCv8jv_g8EhXrG9h1S8Icbj2FfqPIhbty5vra4,6033
|
4
|
-
llm_dialog_manager/key_manager.py,sha256=shvxmn4zUtQx_p-x1EFyOmnk-WlhigbpKtxTKve-zXk,4421
|
5
|
-
llm_dialog_manager-0.1.2480.dist-info/LICENSE,sha256=vWGbYgGuWpWrXL8-xi6pNcX5UzD6pWoIAZmcetyfbus,1064
|
6
|
-
llm_dialog_manager-0.1.2480.dist-info/METADATA,sha256=1MvkDLGrjfAsNe8HTAK0_BNNTMLO_UPZEmi27pGujkY,5196
|
7
|
-
llm_dialog_manager-0.1.2480.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
|
8
|
-
llm_dialog_manager-0.1.2480.dist-info/top_level.txt,sha256=u2EQEXW0NGAt0AAHT7jx1odXZ4rZfjcgbmJhvKFuMkI,19
|
9
|
-
llm_dialog_manager-0.1.2480.dist-info/RECORD,,
|
File without changes
|
File without changes
|