chat-console 0.3.993__py3-none-any.whl → 0.3.995__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- app/__init__.py +1 -1
- app/api/openai.py +26 -13
- {chat_console-0.3.993.dist-info → chat_console-0.3.995.dist-info}/METADATA +1 -1
- {chat_console-0.3.993.dist-info → chat_console-0.3.995.dist-info}/RECORD +8 -8
- {chat_console-0.3.993.dist-info → chat_console-0.3.995.dist-info}/WHEEL +0 -0
- {chat_console-0.3.993.dist-info → chat_console-0.3.995.dist-info}/entry_points.txt +0 -0
- {chat_console-0.3.993.dist-info → chat_console-0.3.995.dist-info}/licenses/LICENSE +0 -0
- {chat_console-0.3.993.dist-info → chat_console-0.3.995.dist-info}/top_level.txt +0 -0
app/__init__.py
CHANGED
app/api/openai.py
CHANGED
@@ -53,12 +53,18 @@ class OpenAIClient(BaseModelClient):
|
|
53
53
|
"""Generate a text completion using OpenAI"""
|
54
54
|
processed_messages = self._prepare_messages(messages, style)
|
55
55
|
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
56
|
+
# Create parameters dict, omitting max_tokens if it's None
|
57
|
+
params = {
|
58
|
+
"model": model,
|
59
|
+
"messages": processed_messages,
|
60
|
+
"temperature": temperature,
|
61
|
+
}
|
62
|
+
|
63
|
+
# Only add max_tokens if it's not None
|
64
|
+
if max_tokens is not None:
|
65
|
+
params["max_tokens"] = max_tokens
|
66
|
+
|
67
|
+
response = await self.client.chat.completions.create(**params)
|
62
68
|
|
63
69
|
return response.choices[0].message.content
|
64
70
|
|
@@ -113,13 +119,20 @@ class OpenAIClient(BaseModelClient):
|
|
113
119
|
|
114
120
|
while retry_count <= max_retries:
|
115
121
|
try:
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
stream
|
122
|
-
|
122
|
+
# Create parameters dict, omitting max_tokens if it's None
|
123
|
+
params = {
|
124
|
+
"model": model,
|
125
|
+
"messages": api_messages,
|
126
|
+
"temperature": temperature,
|
127
|
+
"stream": True,
|
128
|
+
}
|
129
|
+
|
130
|
+
# Only add max_tokens if it's not None
|
131
|
+
if max_tokens is not None:
|
132
|
+
params["max_tokens"] = max_tokens
|
133
|
+
|
134
|
+
debug_log(f"OpenAI: creating stream with params: {params}")
|
135
|
+
stream = await self.client.chat.completions.create(**params)
|
123
136
|
|
124
137
|
# Store the stream for potential cancellation
|
125
138
|
self._active_stream = stream
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: chat-console
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.995
|
4
4
|
Summary: A command-line interface for chatting with LLMs, storing chats and (future) rag interactions
|
5
5
|
Home-page: https://github.com/wazacraftrfid/chat-console
|
6
6
|
Author: Johnathan Greenaway
|
@@ -1,4 +1,4 @@
|
|
1
|
-
app/__init__.py,sha256=
|
1
|
+
app/__init__.py,sha256=7Y7rb5EnGtgcyFHL5Z-6rT9sxtKiZfSrs4YsouTOYNw,132
|
2
2
|
app/config.py,sha256=xeRGXcKbNvAdQGkaJJBipM4yHZJTM1y4ZFoW764APOU,7661
|
3
3
|
app/database.py,sha256=nt8CVuDpy6zw8mOYqDcfUmNw611t7Ln7pz22M0b6-MI,9967
|
4
4
|
app/main.py,sha256=WOcMP6yRwoEzftTSHf0e3zVK1aEuBgKMAsNbzHyKgiA,77427
|
@@ -8,7 +8,7 @@ app/api/__init__.py,sha256=A8UL84ldYlv8l7O-yKzraVFcfww86SgWfpl4p7R03-w,62
|
|
8
8
|
app/api/anthropic.py,sha256=uInwNvGLJ_iPUs4BjdwaqXTU6NfmK1SzX7498Pt44fI,10667
|
9
9
|
app/api/base.py,sha256=ELHl7K0jn0OuOfub7lVboigIbym0sv1se_-bCLscPJ8,10232
|
10
10
|
app/api/ollama.py,sha256=eFG24nI2MlF57z9EHiA97v02NgFJ0kxaPUX26xAXFsg,66154
|
11
|
-
app/api/openai.py,sha256=
|
11
|
+
app/api/openai.py,sha256=vWk8kmA5VWbYcGpXbNp1fk9ZzvxMn6g8nLHHA3CO0vY,11395
|
12
12
|
app/ui/__init__.py,sha256=RndfbQ1Tv47qdSiuQzvWP96lPS547SDaGE-BgOtiP_w,55
|
13
13
|
app/ui/chat_interface.py,sha256=oSDZi0Jgj_L8WnBh1RuJpIeIcN-RQ38CNejwsXiWTVg,18267
|
14
14
|
app/ui/chat_list.py,sha256=WQTYVNSSXlx_gQal3YqILZZKL9UiTjmNMIDX2I9pAMM,11205
|
@@ -16,9 +16,9 @@ app/ui/model_browser.py,sha256=pdblLVkdyVF0_Bo02bqbErGAtieyH-y6IfhMOPEqIso,71124
|
|
16
16
|
app/ui/model_selector.py,sha256=2G0TOXfcNodrXZOhLeaJJ2iG3Nck4c_NN1AvUAmaF3M,19172
|
17
17
|
app/ui/search.py,sha256=b-m14kG3ovqW1-i0qDQ8KnAqFJbi5b1FLM9dOnbTyIs,9763
|
18
18
|
app/ui/styles.py,sha256=04AhPuLrOd2yenfRySFRestPeuTPeMLzhmMB67NdGvw,5615
|
19
|
-
chat_console-0.3.
|
20
|
-
chat_console-0.3.
|
21
|
-
chat_console-0.3.
|
22
|
-
chat_console-0.3.
|
23
|
-
chat_console-0.3.
|
24
|
-
chat_console-0.3.
|
19
|
+
chat_console-0.3.995.dist-info/licenses/LICENSE,sha256=srHZ3fvcAuZY1LHxE7P6XWju2njRCHyK6h_ftEbzxSE,1057
|
20
|
+
chat_console-0.3.995.dist-info/METADATA,sha256=bJfytsa2etmHsB_yjV-SrlBTRXX6X7Omf85_c1miTR4,2923
|
21
|
+
chat_console-0.3.995.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
|
22
|
+
chat_console-0.3.995.dist-info/entry_points.txt,sha256=kkVdEc22U9PAi2AeruoKklfkng_a_aHAP6VRVwrAD7c,67
|
23
|
+
chat_console-0.3.995.dist-info/top_level.txt,sha256=io9g7LCbfmTG1SFKgEOGXmCFB9uMP2H5lerm0HiHWQE,4
|
24
|
+
chat_console-0.3.995.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|