llm-dialog-manager 0.1.2480__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_dialog_manager/__init__.py +1 -1
- llm_dialog_manager/agent.py +51 -25
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.4.dist-info}/METADATA +10 -10
- llm_dialog_manager-0.2.4.dist-info/RECORD +9 -0
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.4.dist-info}/WHEEL +1 -1
- llm_dialog_manager-0.1.2480.dist-info/RECORD +0 -9
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.4.dist-info}/LICENSE +0 -0
- {llm_dialog_manager-0.1.2480.dist-info → llm_dialog_manager-0.2.4.dist-info}/top_level.txt +0 -0
llm_dialog_manager/__init__.py
CHANGED
llm_dialog_manager/agent.py
CHANGED
@@ -113,27 +113,53 @@ def completion(model: str, messages: List[Dict[str, str]], max_tokens: int = 100
|
|
113
113
|
return response.content[0].text
|
114
114
|
|
115
115
|
elif "gemini" in model:
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
116
|
+
try:
|
117
|
+
# First try OpenAI-style API
|
118
|
+
client = openai.OpenAI(
|
119
|
+
api_key=api_key,
|
120
|
+
base_url="https://generativelanguage.googleapis.com/v1beta/"
|
121
|
+
)
|
122
|
+
# Remove any system message from the beginning if present
|
123
|
+
if messages and messages[0]["role"] == "system":
|
124
|
+
system_msg = messages.pop(0)
|
125
|
+
# Prepend system message to first user message if exists
|
126
|
+
if messages:
|
127
|
+
messages[0]["content"] = f"{system_msg['content']}\n\n{messages[0]['content']}"
|
128
|
+
|
129
|
+
response = client.chat.completions.create(
|
130
|
+
model=model,
|
131
|
+
messages=messages,
|
132
|
+
temperature=temperature
|
133
|
+
)
|
134
|
+
|
135
|
+
return response.choices[0].message.content
|
136
|
+
|
137
|
+
except Exception as e:
|
138
|
+
# If OpenAI-style API fails, fall back to Google's genai library
|
139
|
+
logger.info("Falling back to Google's genai library")
|
140
|
+
genai.configure(api_key=api_key)
|
141
|
+
|
142
|
+
# Convert messages to Gemini format
|
143
|
+
gemini_messages = []
|
144
|
+
for msg in messages:
|
145
|
+
if msg["role"] == "system":
|
146
|
+
# Prepend system message to first user message if exists
|
147
|
+
if gemini_messages:
|
148
|
+
gemini_messages[0].parts[0].text = f"{msg['content']}\n\n{gemini_messages[0].parts[0].text}"
|
149
|
+
else:
|
150
|
+
gemini_messages.append({"role": msg["role"], "parts": [{"text": msg["content"]}]})
|
151
|
+
|
152
|
+
# Create Gemini model and generate response
|
153
|
+
model = genai.GenerativeModel(model_name=model)
|
154
|
+
response = model.generate_content(
|
155
|
+
gemini_messages,
|
156
|
+
generation_config=genai.types.GenerationConfig(
|
157
|
+
temperature=temperature,
|
158
|
+
max_output_tokens=max_tokens
|
159
|
+
)
|
160
|
+
)
|
161
|
+
|
162
|
+
return response.text
|
137
163
|
|
138
164
|
elif "grok" in model:
|
139
165
|
# Randomly choose between OpenAI and Anthropic SDK
|
@@ -256,7 +282,7 @@ if __name__ == "__main__":
|
|
256
282
|
# information_detector_agent.add_message("user", text)
|
257
283
|
# response = information_detector_agent.generate_response()
|
258
284
|
# print(response)
|
259
|
-
agent = Agent("
|
285
|
+
agent = Agent("gemini-1.5-pro-002", "you are an assistant", memory_enabled=True)
|
260
286
|
|
261
287
|
# Format the prompt to check if the section is the last one in the outline
|
262
288
|
prompt = f"Say: {text}\n"
|
@@ -265,8 +291,8 @@ if __name__ == "__main__":
|
|
265
291
|
agent.add_message("user", prompt)
|
266
292
|
agent.add_message("assistant", "the answer")
|
267
293
|
|
268
|
-
print(agent.generate_response(
|
294
|
+
print(agent.generate_response())
|
269
295
|
print(agent.history[:])
|
270
|
-
|
271
|
-
print(
|
296
|
+
last_message = agent.history.pop()
|
297
|
+
print(last_message)
|
272
298
|
print(agent.history[:])
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: llm_dialog_manager
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.2.4
|
4
4
|
Summary: A Python package for managing LLM chat conversation history
|
5
5
|
Author-email: xihajun <work@2333.fun>
|
6
6
|
License: MIT
|
@@ -25,25 +25,25 @@ Requires-Dist: google-generativeai>=0.1.0
|
|
25
25
|
Requires-Dist: python-dotenv>=1.0.0
|
26
26
|
Requires-Dist: typing-extensions>=4.0.0
|
27
27
|
Requires-Dist: uuid>=1.30
|
28
|
-
Provides-Extra: all
|
29
|
-
Requires-Dist: pytest>=8.0.0; extra == "all"
|
30
|
-
Requires-Dist: pytest-asyncio>=0.21.1; extra == "all"
|
31
|
-
Requires-Dist: pytest-cov>=4.1.0; extra == "all"
|
32
|
-
Requires-Dist: black>=23.9.1; extra == "all"
|
33
|
-
Requires-Dist: isort>=5.12.0; extra == "all"
|
34
28
|
Provides-Extra: dev
|
35
29
|
Requires-Dist: pytest>=8.0.0; extra == "dev"
|
36
30
|
Requires-Dist: pytest-asyncio>=0.21.1; extra == "dev"
|
37
31
|
Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
|
38
32
|
Requires-Dist: black>=23.9.1; extra == "dev"
|
39
33
|
Requires-Dist: isort>=5.12.0; extra == "dev"
|
40
|
-
Provides-Extra: lint
|
41
|
-
Requires-Dist: black>=22.0; extra == "lint"
|
42
|
-
Requires-Dist: isort>=5.0; extra == "lint"
|
43
34
|
Provides-Extra: test
|
44
35
|
Requires-Dist: pytest>=6.0; extra == "test"
|
45
36
|
Requires-Dist: pytest-asyncio>=0.14.0; extra == "test"
|
46
37
|
Requires-Dist: pytest-cov>=2.0; extra == "test"
|
38
|
+
Provides-Extra: lint
|
39
|
+
Requires-Dist: black>=22.0; extra == "lint"
|
40
|
+
Requires-Dist: isort>=5.0; extra == "lint"
|
41
|
+
Provides-Extra: all
|
42
|
+
Requires-Dist: pytest>=8.0.0; extra == "all"
|
43
|
+
Requires-Dist: pytest-asyncio>=0.21.1; extra == "all"
|
44
|
+
Requires-Dist: pytest-cov>=4.1.0; extra == "all"
|
45
|
+
Requires-Dist: black>=23.9.1; extra == "all"
|
46
|
+
Requires-Dist: isort>=5.12.0; extra == "all"
|
47
47
|
|
48
48
|
# LLM Dialog Manager
|
49
49
|
|
@@ -0,0 +1,9 @@
|
|
1
|
+
llm_dialog_manager/__init__.py,sha256=Ki4WpdLEEIJ_fi9daYLWSsbxWmp_gmIllCRZH8xgTBw,86
|
2
|
+
llm_dialog_manager/agent.py,sha256=aST_n9jU1tuHjMUK0ytCmWq3wYkZp9VHLcg4Q4Y7Tcw,11731
|
3
|
+
llm_dialog_manager/chat_history.py,sha256=xKA-oQCv8jv_g8EhXrG9h1S8Icbj2FfqPIhbty5vra4,6033
|
4
|
+
llm_dialog_manager/key_manager.py,sha256=shvxmn4zUtQx_p-x1EFyOmnk-WlhigbpKtxTKve-zXk,4421
|
5
|
+
llm_dialog_manager-0.2.4.dist-info/LICENSE,sha256=vWGbYgGuWpWrXL8-xi6pNcX5UzD6pWoIAZmcetyfbus,1064
|
6
|
+
llm_dialog_manager-0.2.4.dist-info/METADATA,sha256=n6l6z8jf4kqgEhqkRIZPhF9CuM9Q6OU0aj_5fQIEY84,5193
|
7
|
+
llm_dialog_manager-0.2.4.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
8
|
+
llm_dialog_manager-0.2.4.dist-info/top_level.txt,sha256=u2EQEXW0NGAt0AAHT7jx1odXZ4rZfjcgbmJhvKFuMkI,19
|
9
|
+
llm_dialog_manager-0.2.4.dist-info/RECORD,,
|
@@ -1,9 +0,0 @@
|
|
1
|
-
llm_dialog_manager/__init__.py,sha256=AfSsuE8ty-luFdLkdgQ98oQfbOYqYM8SO6p4tMDcDZ4,89
|
2
|
-
llm_dialog_manager/agent.py,sha256=dExW-pfPpRLedi-iJsn5S_iImMmWGI1ixztVqWCmMHE,10303
|
3
|
-
llm_dialog_manager/chat_history.py,sha256=xKA-oQCv8jv_g8EhXrG9h1S8Icbj2FfqPIhbty5vra4,6033
|
4
|
-
llm_dialog_manager/key_manager.py,sha256=shvxmn4zUtQx_p-x1EFyOmnk-WlhigbpKtxTKve-zXk,4421
|
5
|
-
llm_dialog_manager-0.1.2480.dist-info/LICENSE,sha256=vWGbYgGuWpWrXL8-xi6pNcX5UzD6pWoIAZmcetyfbus,1064
|
6
|
-
llm_dialog_manager-0.1.2480.dist-info/METADATA,sha256=1MvkDLGrjfAsNe8HTAK0_BNNTMLO_UPZEmi27pGujkY,5196
|
7
|
-
llm_dialog_manager-0.1.2480.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
|
8
|
-
llm_dialog_manager-0.1.2480.dist-info/top_level.txt,sha256=u2EQEXW0NGAt0AAHT7jx1odXZ4rZfjcgbmJhvKFuMkI,19
|
9
|
-
llm_dialog_manager-0.1.2480.dist-info/RECORD,,
|
File without changes
|
File without changes
|