llm-dialog-manager 0.2.1__tar.gz → 0.2.4__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/PKG-INFO +1 -1
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager/__init__.py +1 -1
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager/agent.py +9 -2
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/PKG-INFO +1 -1
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/pyproject.toml +1 -1
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/tests/test_chat_history.py +1 -1
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/LICENSE +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/README.md +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager/chat_history.py +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager/key_manager.py +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/SOURCES.txt +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/dependency_links.txt +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/requires.txt +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/top_level.txt +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/setup.cfg +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/tests/test_agent.py +0 -0
- {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/tests/test_key_manager.py +0 -0
@@ -275,7 +275,14 @@ if __name__ == "__main__":
|
|
275
275
|
# write a test for detect finding agent
|
276
276
|
text = "I think the answer is 42"
|
277
277
|
|
278
|
-
|
278
|
+
# from agent.messageloader import information_detector_messages
|
279
|
+
|
280
|
+
# # Now you can print or use information_detector_messages as needed
|
281
|
+
# information_detector_agent = Agent("gemini-1.5-pro", information_detector_messages)
|
282
|
+
# information_detector_agent.add_message("user", text)
|
283
|
+
# response = information_detector_agent.generate_response()
|
284
|
+
# print(response)
|
285
|
+
agent = Agent("gemini-1.5-pro-002", "you are an assistant", memory_enabled=True)
|
279
286
|
|
280
287
|
# Format the prompt to check if the section is the last one in the outline
|
281
288
|
prompt = f"Say: {text}\n"
|
@@ -284,7 +291,7 @@ if __name__ == "__main__":
|
|
284
291
|
agent.add_message("user", prompt)
|
285
292
|
agent.add_message("assistant", "the answer")
|
286
293
|
|
287
|
-
print(agent.generate_response(
|
294
|
+
print(agent.generate_response())
|
288
295
|
print(agent.history[:])
|
289
296
|
last_message = agent.history.pop()
|
290
297
|
print(last_message)
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
4
4
|
|
5
5
|
[project]
|
6
6
|
name = "llm_dialog_manager"
|
7
|
-
version = "0.2.
|
7
|
+
version = "0.2.4"
|
8
8
|
description = "A Python package for managing LLM chat conversation history"
|
9
9
|
readme = "README.md"
|
10
10
|
authors = [{ name = "xihajun", email = "work@2333.fun" }]
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/SOURCES.txt
RENAMED
File without changes
|
File without changes
|
{llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/requires.txt
RENAMED
File without changes
|
{llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.4}/llm_dialog_manager.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|