llm-dialog-manager 0.1.2479__tar.gz → 0.2.7__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (17) hide show
  1. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/PKG-INFO +1 -1
  2. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager/__init__.py +1 -1
  3. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager/agent.py +2 -11
  4. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager.egg-info/PKG-INFO +1 -1
  5. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/pyproject.toml +1 -1
  6. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/LICENSE +0 -0
  7. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/README.md +0 -0
  8. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager/chat_history.py +0 -0
  9. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager/key_manager.py +0 -0
  10. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager.egg-info/SOURCES.txt +0 -0
  11. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager.egg-info/dependency_links.txt +0 -0
  12. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager.egg-info/requires.txt +0 -0
  13. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/llm_dialog_manager.egg-info/top_level.txt +0 -0
  14. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/setup.cfg +0 -0
  15. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/tests/test_agent.py +0 -0
  16. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/tests/test_chat_history.py +0 -0
  17. {llm_dialog_manager-0.1.2479 → llm_dialog_manager-0.2.7}/tests/test_key_manager.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llm_dialog_manager
3
- Version: 0.1.2479
3
+ Version: 0.2.7
4
4
  Summary: A Python package for managing LLM chat conversation history
5
5
  Author-email: xihajun <work@2333.fun>
6
6
  License: MIT
@@ -1,4 +1,4 @@
1
1
  from .chat_history import ChatHistory
2
2
  from .agent import Agent
3
3
 
4
- __version__ = "0.1.2479"
4
+ __version__ = "0.2.7"
@@ -117,7 +117,6 @@ def completion(model: str, messages: List[Dict[str, str]], max_tokens: int = 100
117
117
  api_key=api_key,
118
118
  base_url="https://generativelanguage.googleapis.com/v1beta/"
119
119
  )
120
- print(api_key)
121
120
  # Remove any system message from the beginning if present
122
121
  if messages and messages[0]["role"] == "system":
123
122
  system_msg = messages.pop(0)
@@ -131,7 +130,6 @@ def completion(model: str, messages: List[Dict[str, str]], max_tokens: int = 100
131
130
  # max_tokens=max_tokens,
132
131
  temperature=temperature
133
132
  )
134
- print(len(response.choices))
135
133
 
136
134
  return response.choices[0].message.content
137
135
 
@@ -249,13 +247,6 @@ if __name__ == "__main__":
249
247
  # write a test for detect finding agent
250
248
  text = "I think the answer is 42"
251
249
 
252
- # from agent.messageloader import information_detector_messages
253
-
254
- # # Now you can print or use information_detector_messages as needed
255
- # information_detector_agent = Agent("gemini-1.5-pro", information_detector_messages)
256
- # information_detector_agent.add_message("user", text)
257
- # response = information_detector_agent.generate_response()
258
- # print(response)
259
250
  agent = Agent("claude-3-5-sonnet-20241022", "you are an assistant", memory_enabled=True)
260
251
 
261
252
  # Format the prompt to check if the section is the last one in the outline
@@ -267,6 +258,6 @@ if __name__ == "__main__":
267
258
 
268
259
  print(agent.generate_response(max_tokens=20, temperature=0.0))
269
260
  print(agent.history[:])
270
- a = agent.history.pop()
271
- print(a)
261
+ last_message = agent.history.pop()
262
+ print(last_message)
272
263
  print(agent.history[:])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llm_dialog_manager
3
- Version: 0.1.2479
3
+ Version: 0.2.7
4
4
  Summary: A Python package for managing LLM chat conversation history
5
5
  Author-email: xihajun <work@2333.fun>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "llm_dialog_manager"
7
- version = "0.1.2479"
7
+ version = "0.2.7"
8
8
  description = "A Python package for managing LLM chat conversation history"
9
9
  readme = "README.md"
10
10
  authors = [{ name = "xihajun", email = "work@2333.fun" }]