llm-dialog-manager 0.2.1__tar.gz → 0.2.6__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (17) hide show
  1. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/PKG-INFO +23 -67
  2. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/README.md +22 -66
  3. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager/__init__.py +1 -1
  4. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager/agent.py +9 -2
  5. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager.egg-info/PKG-INFO +23 -67
  6. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/pyproject.toml +1 -1
  7. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/tests/test_chat_history.py +1 -1
  8. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/LICENSE +0 -0
  9. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager/chat_history.py +0 -0
  10. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager/key_manager.py +0 -0
  11. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager.egg-info/SOURCES.txt +0 -0
  12. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager.egg-info/dependency_links.txt +0 -0
  13. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager.egg-info/requires.txt +0 -0
  14. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/llm_dialog_manager.egg-info/top_level.txt +0 -0
  15. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/setup.cfg +0 -0
  16. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/tests/test_agent.py +0 -0
  17. {llm_dialog_manager-0.2.1 → llm_dialog_manager-0.2.6}/tests/test_key_manager.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llm_dialog_manager
3
- Version: 0.2.1
3
+ Version: 0.2.6
4
4
  Summary: A Python package for managing LLM chat conversation history
5
5
  Author-email: xihajun <work@2333.fun>
6
6
  License: MIT
@@ -73,23 +73,29 @@ pip install llm-dialog-manager
73
73
 
74
74
  ## Quick Start
75
75
 
76
- ### Basic Usage
77
76
 
78
- ```python
79
- from llm_dialog_manager import ChatHistory
77
+ ### Environment Variables
80
78
 
81
- # Initialize with a system message
82
- history = ChatHistory("You are a helpful assistant")
79
+ Create a `.env` file in your project root:
83
80
 
84
- # Add messages
85
- history.add_user_message("Hello!")
86
- history.add_assistant_message("Hi there! How can I help you today?")
81
+ ```bash
82
+ # OpenAI
83
+ OPENAI_API_KEY_1=your-key-1
84
+ OPENAI_API_BASE_1=https://api.openai.com/v1
87
85
 
88
- # Print conversation
89
- print(history)
86
+ # Anthropic
87
+ ANTHROPIC_API_KEY_1=your-anthropic-key
88
+ ANTHROPIC_API_BASE_1=https://api.anthropic.com
89
+
90
+ # Google
91
+ GEMINI_API_KEY=your-gemini-key
92
+
93
+ # X.AI
94
+ XAI_API_KEY=your-x-key
90
95
  ```
91
96
 
92
- ### Using the AI Agent
97
+ ### Basic Usage
98
+
93
99
 
94
100
  ```python
95
101
  from llm_dialog_manager import Agent
@@ -106,65 +112,15 @@ response = agent.generate_response()
106
112
  agent.save_conversation()
107
113
  ```
108
114
 
109
- ## Advanced Features
110
-
111
- ### Managing Multiple API Keys
112
-
113
- ```python
114
- from llm_dialog_manager import Agent
115
-
116
- # Use specific API key
117
- agent = Agent("gpt-4", api_key="your-api-key")
118
-
119
- # Or use environment variables
120
- # OPENAI_API_KEY_1=key1
121
- # OPENAI_API_KEY_2=key2
122
- # The system will automatically handle load balancing
123
- ```
124
-
125
- ### Conversation Management
126
-
127
- ```python
128
- from llm_dialog_manager import ChatHistory
129
-
130
- history = ChatHistory()
131
-
132
- # Add messages with role validation
133
- history.add_message("Hello system", "system")
134
- history.add_message("Hello user", "user")
135
- history.add_message("Hello assistant", "assistant")
136
-
137
- # Search conversations
138
- results = history.search_for_keyword("hello")
139
-
140
- # Get conversation status
141
- status = history.conversation_status()
142
- history.display_conversation_status()
143
-
144
- # Get conversation snippets
145
- snippet = history.get_conversation_snippet(1)
146
- history.display_snippet(1)
147
- ```
148
-
149
- ## Environment Variables
150
-
151
- Create a `.env` file in your project root:
115
+ ### Setup Debugging Console
152
116
 
153
117
  ```bash
154
- # OpenAI
155
- OPENAI_API_KEY_1=your-key-1
156
- OPENAI_API_BASE_1=https://api.openai.com/v1
157
-
158
- # Anthropic
159
- ANTHROPIC_API_KEY_1=your-anthropic-key
160
- ANTHROPIC_API_BASE_1=https://api.anthropic.com
118
+ python app.py
119
+ # open localhost:8000
120
+ ```
121
+ https://github.com/user-attachments/assets/5f640029-24e6-44ea-a3a3-02eb3de0d4df
161
122
 
162
- # Google
163
- GEMINI_API_KEY=your-gemini-key
164
123
 
165
- # X.AI
166
- XAI_API_KEY=your-x-key
167
- ```
168
124
 
169
125
  ## Development
170
126
 
@@ -26,23 +26,29 @@ pip install llm-dialog-manager
26
26
 
27
27
  ## Quick Start
28
28
 
29
- ### Basic Usage
30
29
 
31
- ```python
32
- from llm_dialog_manager import ChatHistory
30
+ ### Environment Variables
33
31
 
34
- # Initialize with a system message
35
- history = ChatHistory("You are a helpful assistant")
32
+ Create a `.env` file in your project root:
36
33
 
37
- # Add messages
38
- history.add_user_message("Hello!")
39
- history.add_assistant_message("Hi there! How can I help you today?")
34
+ ```bash
35
+ # OpenAI
36
+ OPENAI_API_KEY_1=your-key-1
37
+ OPENAI_API_BASE_1=https://api.openai.com/v1
40
38
 
41
- # Print conversation
42
- print(history)
39
+ # Anthropic
40
+ ANTHROPIC_API_KEY_1=your-anthropic-key
41
+ ANTHROPIC_API_BASE_1=https://api.anthropic.com
42
+
43
+ # Google
44
+ GEMINI_API_KEY=your-gemini-key
45
+
46
+ # X.AI
47
+ XAI_API_KEY=your-x-key
43
48
  ```
44
49
 
45
- ### Using the AI Agent
50
+ ### Basic Usage
51
+
46
52
 
47
53
  ```python
48
54
  from llm_dialog_manager import Agent
@@ -59,65 +65,15 @@ response = agent.generate_response()
59
65
  agent.save_conversation()
60
66
  ```
61
67
 
62
- ## Advanced Features
63
-
64
- ### Managing Multiple API Keys
65
-
66
- ```python
67
- from llm_dialog_manager import Agent
68
-
69
- # Use specific API key
70
- agent = Agent("gpt-4", api_key="your-api-key")
71
-
72
- # Or use environment variables
73
- # OPENAI_API_KEY_1=key1
74
- # OPENAI_API_KEY_2=key2
75
- # The system will automatically handle load balancing
76
- ```
77
-
78
- ### Conversation Management
79
-
80
- ```python
81
- from llm_dialog_manager import ChatHistory
82
-
83
- history = ChatHistory()
84
-
85
- # Add messages with role validation
86
- history.add_message("Hello system", "system")
87
- history.add_message("Hello user", "user")
88
- history.add_message("Hello assistant", "assistant")
89
-
90
- # Search conversations
91
- results = history.search_for_keyword("hello")
92
-
93
- # Get conversation status
94
- status = history.conversation_status()
95
- history.display_conversation_status()
96
-
97
- # Get conversation snippets
98
- snippet = history.get_conversation_snippet(1)
99
- history.display_snippet(1)
100
- ```
101
-
102
- ## Environment Variables
103
-
104
- Create a `.env` file in your project root:
68
+ ### Setup Debugging Console
105
69
 
106
70
  ```bash
107
- # OpenAI
108
- OPENAI_API_KEY_1=your-key-1
109
- OPENAI_API_BASE_1=https://api.openai.com/v1
110
-
111
- # Anthropic
112
- ANTHROPIC_API_KEY_1=your-anthropic-key
113
- ANTHROPIC_API_BASE_1=https://api.anthropic.com
71
+ python app.py
72
+ # open localhost:8000
73
+ ```
74
+ https://github.com/user-attachments/assets/5f640029-24e6-44ea-a3a3-02eb3de0d4df
114
75
 
115
- # Google
116
- GEMINI_API_KEY=your-gemini-key
117
76
 
118
- # X.AI
119
- XAI_API_KEY=your-x-key
120
- ```
121
77
 
122
78
  ## Development
123
79
 
@@ -1,4 +1,4 @@
1
1
  from .chat_history import ChatHistory
2
2
  from .agent import Agent
3
3
 
4
- __version__ = "0.2.1"
4
+ __version__ = "0.2.6"
@@ -275,7 +275,14 @@ if __name__ == "__main__":
275
275
  # write a test for detect finding agent
276
276
  text = "I think the answer is 42"
277
277
 
278
- agent = Agent("claude-3-5-sonnet-20241022", "you are an assistant", memory_enabled=True)
278
+ # from agent.messageloader import information_detector_messages
279
+
280
+ # # Now you can print or use information_detector_messages as needed
281
+ # information_detector_agent = Agent("gemini-1.5-pro", information_detector_messages)
282
+ # information_detector_agent.add_message("user", text)
283
+ # response = information_detector_agent.generate_response()
284
+ # print(response)
285
+ agent = Agent("gemini-1.5-pro-002", "you are an assistant", memory_enabled=True)
279
286
 
280
287
  # Format the prompt to check if the section is the last one in the outline
281
288
  prompt = f"Say: {text}\n"
@@ -284,7 +291,7 @@ if __name__ == "__main__":
284
291
  agent.add_message("user", prompt)
285
292
  agent.add_message("assistant", "the answer")
286
293
 
287
- print(agent.generate_response(max_tokens=20, temperature=0.0))
294
+ print(agent.generate_response())
288
295
  print(agent.history[:])
289
296
  last_message = agent.history.pop()
290
297
  print(last_message)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llm_dialog_manager
3
- Version: 0.2.1
3
+ Version: 0.2.6
4
4
  Summary: A Python package for managing LLM chat conversation history
5
5
  Author-email: xihajun <work@2333.fun>
6
6
  License: MIT
@@ -73,23 +73,29 @@ pip install llm-dialog-manager
73
73
 
74
74
  ## Quick Start
75
75
 
76
- ### Basic Usage
77
76
 
78
- ```python
79
- from llm_dialog_manager import ChatHistory
77
+ ### Environment Variables
80
78
 
81
- # Initialize with a system message
82
- history = ChatHistory("You are a helpful assistant")
79
+ Create a `.env` file in your project root:
83
80
 
84
- # Add messages
85
- history.add_user_message("Hello!")
86
- history.add_assistant_message("Hi there! How can I help you today?")
81
+ ```bash
82
+ # OpenAI
83
+ OPENAI_API_KEY_1=your-key-1
84
+ OPENAI_API_BASE_1=https://api.openai.com/v1
87
85
 
88
- # Print conversation
89
- print(history)
86
+ # Anthropic
87
+ ANTHROPIC_API_KEY_1=your-anthropic-key
88
+ ANTHROPIC_API_BASE_1=https://api.anthropic.com
89
+
90
+ # Google
91
+ GEMINI_API_KEY=your-gemini-key
92
+
93
+ # X.AI
94
+ XAI_API_KEY=your-x-key
90
95
  ```
91
96
 
92
- ### Using the AI Agent
97
+ ### Basic Usage
98
+
93
99
 
94
100
  ```python
95
101
  from llm_dialog_manager import Agent
@@ -106,65 +112,15 @@ response = agent.generate_response()
106
112
  agent.save_conversation()
107
113
  ```
108
114
 
109
- ## Advanced Features
110
-
111
- ### Managing Multiple API Keys
112
-
113
- ```python
114
- from llm_dialog_manager import Agent
115
-
116
- # Use specific API key
117
- agent = Agent("gpt-4", api_key="your-api-key")
118
-
119
- # Or use environment variables
120
- # OPENAI_API_KEY_1=key1
121
- # OPENAI_API_KEY_2=key2
122
- # The system will automatically handle load balancing
123
- ```
124
-
125
- ### Conversation Management
126
-
127
- ```python
128
- from llm_dialog_manager import ChatHistory
129
-
130
- history = ChatHistory()
131
-
132
- # Add messages with role validation
133
- history.add_message("Hello system", "system")
134
- history.add_message("Hello user", "user")
135
- history.add_message("Hello assistant", "assistant")
136
-
137
- # Search conversations
138
- results = history.search_for_keyword("hello")
139
-
140
- # Get conversation status
141
- status = history.conversation_status()
142
- history.display_conversation_status()
143
-
144
- # Get conversation snippets
145
- snippet = history.get_conversation_snippet(1)
146
- history.display_snippet(1)
147
- ```
148
-
149
- ## Environment Variables
150
-
151
- Create a `.env` file in your project root:
115
+ ### Setup Debugging Console
152
116
 
153
117
  ```bash
154
- # OpenAI
155
- OPENAI_API_KEY_1=your-key-1
156
- OPENAI_API_BASE_1=https://api.openai.com/v1
157
-
158
- # Anthropic
159
- ANTHROPIC_API_KEY_1=your-anthropic-key
160
- ANTHROPIC_API_BASE_1=https://api.anthropic.com
118
+ python app.py
119
+ # open localhost:8000
120
+ ```
121
+ https://github.com/user-attachments/assets/5f640029-24e6-44ea-a3a3-02eb3de0d4df
161
122
 
162
- # Google
163
- GEMINI_API_KEY=your-gemini-key
164
123
 
165
- # X.AI
166
- XAI_API_KEY=your-x-key
167
- ```
168
124
 
169
125
  ## Development
170
126
 
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "llm_dialog_manager"
7
- version = "0.2.1"
7
+ version = "0.2.6"
8
8
  description = "A Python package for managing LLM chat conversation history"
9
9
  readme = "README.md"
10
10
  authors = [{ name = "xihajun", email = "work@2333.fun" }]
@@ -1,5 +1,5 @@
1
1
  import pytest
2
- from ai_chat_history import ChatHistory
2
+ from llm_dialog_manager import ChatHistory
3
3
 
4
4
  def test_chat_history_initialization():
5
5
  # Test empty initialization