llm-dialog-manager 0.1.2480__py3-none-any.whl → 0.2.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,4 +1,4 @@
1
1
  from .chat_history import ChatHistory
2
2
  from .agent import Agent
3
3
 
4
- __version__ = "0.1.2480"
4
+ __version__ = "0.2.1"
@@ -113,27 +113,53 @@ def completion(model: str, messages: List[Dict[str, str]], max_tokens: int = 100
113
113
  return response.content[0].text
114
114
 
115
115
  elif "gemini" in model:
116
- client = openai.OpenAI(
117
- api_key=api_key,
118
- base_url="https://generativelanguage.googleapis.com/v1beta/"
119
- )
120
- print(api_key)
121
- # Remove any system message from the beginning if present
122
- if messages and messages[0]["role"] == "system":
123
- system_msg = messages.pop(0)
124
- # Prepend system message to first user message if exists
125
- if messages:
126
- messages[0]["content"] = f"{system_msg['content']}\n\n{messages[0]['content']}"
127
-
128
- response = client.chat.completions.create(
129
- model=model,
130
- messages=messages,
131
- # max_tokens=max_tokens,
132
- temperature=temperature
133
- )
134
- print(len(response.choices))
135
-
136
- return response.choices[0].message.content
116
+ try:
117
+ # First try OpenAI-style API
118
+ client = openai.OpenAI(
119
+ api_key=api_key,
120
+ base_url="https://generativelanguage.googleapis.com/v1beta/"
121
+ )
122
+ # Remove any system message from the beginning if present
123
+ if messages and messages[0]["role"] == "system":
124
+ system_msg = messages.pop(0)
125
+ # Prepend system message to first user message if exists
126
+ if messages:
127
+ messages[0]["content"] = f"{system_msg['content']}\n\n{messages[0]['content']}"
128
+
129
+ response = client.chat.completions.create(
130
+ model=model,
131
+ messages=messages,
132
+ temperature=temperature
133
+ )
134
+
135
+ return response.choices[0].message.content
136
+
137
+ except Exception as e:
138
+ # If OpenAI-style API fails, fall back to Google's genai library
139
+ logger.info("Falling back to Google's genai library")
140
+ genai.configure(api_key=api_key)
141
+
142
+ # Convert messages to Gemini format
143
+ gemini_messages = []
144
+ for msg in messages:
145
+ if msg["role"] == "system":
146
+ # Prepend system message to first user message if exists
147
+ if gemini_messages:
148
+ gemini_messages[0].parts[0].text = f"{msg['content']}\n\n{gemini_messages[0].parts[0].text}"
149
+ else:
150
+ gemini_messages.append({"role": msg["role"], "parts": [{"text": msg["content"]}]})
151
+
152
+ # Create Gemini model and generate response
153
+ model = genai.GenerativeModel(model_name=model)
154
+ response = model.generate_content(
155
+ gemini_messages,
156
+ generation_config=genai.types.GenerationConfig(
157
+ temperature=temperature,
158
+ max_output_tokens=max_tokens
159
+ )
160
+ )
161
+
162
+ return response.text
137
163
 
138
164
  elif "grok" in model:
139
165
  # Randomly choose between OpenAI and Anthropic SDK
@@ -249,13 +275,6 @@ if __name__ == "__main__":
249
275
  # write a test for detect finding agent
250
276
  text = "I think the answer is 42"
251
277
 
252
- # from agent.messageloader import information_detector_messages
253
-
254
- # # Now you can print or use information_detector_messages as needed
255
- # information_detector_agent = Agent("gemini-1.5-pro", information_detector_messages)
256
- # information_detector_agent.add_message("user", text)
257
- # response = information_detector_agent.generate_response()
258
- # print(response)
259
278
  agent = Agent("claude-3-5-sonnet-20241022", "you are an assistant", memory_enabled=True)
260
279
 
261
280
  # Format the prompt to check if the section is the last one in the outline
@@ -267,6 +286,6 @@ if __name__ == "__main__":
267
286
 
268
287
  print(agent.generate_response(max_tokens=20, temperature=0.0))
269
288
  print(agent.history[:])
270
- a = agent.history.pop()
271
- print(a)
289
+ last_message = agent.history.pop()
290
+ print(last_message)
272
291
  print(agent.history[:])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llm_dialog_manager
3
- Version: 0.1.2480
3
+ Version: 0.2.1
4
4
  Summary: A Python package for managing LLM chat conversation history
5
5
  Author-email: xihajun <work@2333.fun>
6
6
  License: MIT
@@ -0,0 +1,9 @@
1
+ llm_dialog_manager/__init__.py,sha256=joo2odAcSJIo-p3bwQbGxYok_vWUlI2RjkaAmAdKX3o,86
2
+ llm_dialog_manager/agent.py,sha256=cLwDTgCpO8teYaedIiee8XWU8vE9icBe_NNoAklm79w,11388
3
+ llm_dialog_manager/chat_history.py,sha256=xKA-oQCv8jv_g8EhXrG9h1S8Icbj2FfqPIhbty5vra4,6033
4
+ llm_dialog_manager/key_manager.py,sha256=shvxmn4zUtQx_p-x1EFyOmnk-WlhigbpKtxTKve-zXk,4421
5
+ llm_dialog_manager-0.2.1.dist-info/LICENSE,sha256=vWGbYgGuWpWrXL8-xi6pNcX5UzD6pWoIAZmcetyfbus,1064
6
+ llm_dialog_manager-0.2.1.dist-info/METADATA,sha256=F-7unvFE1V-Lk6G_nBvHHXxza3RHjXtB-LJlk_iP2rM,5193
7
+ llm_dialog_manager-0.2.1.dist-info/WHEEL,sha256=a7TGlA-5DaHMRrarXjVbQagU3Man_dCnGIWMJr5kRWo,91
8
+ llm_dialog_manager-0.2.1.dist-info/top_level.txt,sha256=u2EQEXW0NGAt0AAHT7jx1odXZ4rZfjcgbmJhvKFuMkI,19
9
+ llm_dialog_manager-0.2.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.3.0)
2
+ Generator: setuptools (75.4.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,9 +0,0 @@
1
- llm_dialog_manager/__init__.py,sha256=AfSsuE8ty-luFdLkdgQ98oQfbOYqYM8SO6p4tMDcDZ4,89
2
- llm_dialog_manager/agent.py,sha256=dExW-pfPpRLedi-iJsn5S_iImMmWGI1ixztVqWCmMHE,10303
3
- llm_dialog_manager/chat_history.py,sha256=xKA-oQCv8jv_g8EhXrG9h1S8Icbj2FfqPIhbty5vra4,6033
4
- llm_dialog_manager/key_manager.py,sha256=shvxmn4zUtQx_p-x1EFyOmnk-WlhigbpKtxTKve-zXk,4421
5
- llm_dialog_manager-0.1.2480.dist-info/LICENSE,sha256=vWGbYgGuWpWrXL8-xi6pNcX5UzD6pWoIAZmcetyfbus,1064
6
- llm_dialog_manager-0.1.2480.dist-info/METADATA,sha256=1MvkDLGrjfAsNe8HTAK0_BNNTMLO_UPZEmi27pGujkY,5196
7
- llm_dialog_manager-0.1.2480.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
8
- llm_dialog_manager-0.1.2480.dist-info/top_level.txt,sha256=u2EQEXW0NGAt0AAHT7jx1odXZ4rZfjcgbmJhvKFuMkI,19
9
- llm_dialog_manager-0.1.2480.dist-info/RECORD,,