pygpt-net 2.7.6__py3-none-any.whl → 2.7.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygpt_net/CHANGELOG.txt +13 -0
- pygpt_net/__init__.py +3 -3
- pygpt_net/app.py +5 -1
- pygpt_net/controller/assistant/batch.py +2 -2
- pygpt_net/controller/assistant/files.py +7 -6
- pygpt_net/controller/assistant/threads.py +0 -0
- pygpt_net/controller/chat/command.py +0 -0
- pygpt_net/controller/chat/remote_tools.py +3 -9
- pygpt_net/controller/chat/stream.py +2 -2
- pygpt_net/controller/chat/{handler/worker.py → stream_worker.py} +13 -35
- pygpt_net/controller/dialogs/confirm.py +35 -58
- pygpt_net/controller/lang/mapping.py +9 -9
- pygpt_net/controller/remote_store/{google/batch.py → batch.py} +209 -252
- pygpt_net/controller/remote_store/remote_store.py +982 -13
- pygpt_net/core/command/command.py +0 -0
- pygpt_net/core/db/viewer.py +1 -1
- pygpt_net/core/debug/models.py +2 -2
- pygpt_net/core/realtime/worker.py +3 -1
- pygpt_net/{controller/remote_store/google → core/remote_store/anthropic}/__init__.py +0 -1
- pygpt_net/core/remote_store/anthropic/files.py +211 -0
- pygpt_net/core/remote_store/anthropic/store.py +208 -0
- pygpt_net/core/remote_store/openai/store.py +5 -4
- pygpt_net/core/remote_store/remote_store.py +5 -1
- pygpt_net/{controller/remote_store/openai → core/remote_store/xai}/__init__.py +0 -1
- pygpt_net/core/remote_store/xai/files.py +225 -0
- pygpt_net/core/remote_store/xai/store.py +219 -0
- pygpt_net/data/config/config.json +18 -5
- pygpt_net/data/config/models.json +193 -4
- pygpt_net/data/config/settings.json +179 -36
- pygpt_net/data/icons/folder_eye.svg +1 -0
- pygpt_net/data/icons/folder_eye_filled.svg +1 -0
- pygpt_net/data/icons/folder_open.svg +1 -0
- pygpt_net/data/icons/folder_open_filled.svg +1 -0
- pygpt_net/data/locale/locale.de.ini +6 -3
- pygpt_net/data/locale/locale.en.ini +46 -12
- pygpt_net/data/locale/locale.es.ini +6 -3
- pygpt_net/data/locale/locale.fr.ini +6 -3
- pygpt_net/data/locale/locale.it.ini +6 -3
- pygpt_net/data/locale/locale.pl.ini +7 -4
- pygpt_net/data/locale/locale.uk.ini +6 -3
- pygpt_net/data/locale/locale.zh.ini +6 -3
- pygpt_net/icons.qrc +4 -0
- pygpt_net/icons_rc.py +282 -138
- pygpt_net/plugin/cmd_mouse_control/worker.py +2 -1
- pygpt_net/plugin/cmd_mouse_control/worker_sandbox.py +2 -1
- pygpt_net/provider/api/anthropic/__init__.py +10 -3
- pygpt_net/provider/api/anthropic/chat.py +342 -11
- pygpt_net/provider/api/anthropic/computer.py +844 -0
- pygpt_net/provider/api/anthropic/remote_tools.py +172 -0
- pygpt_net/provider/api/anthropic/store.py +307 -0
- pygpt_net/{controller/chat/handler/anthropic_stream.py → provider/api/anthropic/stream.py} +99 -10
- pygpt_net/provider/api/anthropic/tools.py +32 -77
- pygpt_net/provider/api/anthropic/utils.py +30 -0
- pygpt_net/{controller/chat/handler → provider/api/anthropic/worker}/__init__.py +0 -0
- pygpt_net/provider/api/anthropic/worker/importer.py +278 -0
- pygpt_net/provider/api/google/chat.py +62 -9
- pygpt_net/provider/api/google/store.py +124 -3
- pygpt_net/{controller/chat/handler/google_stream.py → provider/api/google/stream.py} +92 -25
- pygpt_net/provider/api/google/utils.py +185 -0
- pygpt_net/provider/api/google/worker/importer.py +16 -28
- pygpt_net/provider/api/langchain/__init__.py +0 -0
- pygpt_net/{controller/chat/handler/langchain_stream.py → provider/api/langchain/stream.py} +1 -1
- pygpt_net/provider/api/llama_index/__init__.py +0 -0
- pygpt_net/{controller/chat/handler/llamaindex_stream.py → provider/api/llama_index/stream.py} +1 -1
- pygpt_net/provider/api/openai/assistants.py +2 -2
- pygpt_net/provider/api/openai/image.py +2 -2
- pygpt_net/provider/api/openai/store.py +4 -1
- pygpt_net/{controller/chat/handler/openai_stream.py → provider/api/openai/stream.py} +1 -1
- pygpt_net/provider/api/openai/utils.py +69 -3
- pygpt_net/provider/api/openai/worker/importer.py +19 -61
- pygpt_net/provider/api/openai/worker/importer_assistants.py +230 -0
- pygpt_net/provider/api/x_ai/__init__.py +138 -15
- pygpt_net/provider/api/x_ai/audio.py +43 -11
- pygpt_net/provider/api/x_ai/chat.py +92 -4
- pygpt_net/provider/api/x_ai/image.py +149 -47
- pygpt_net/provider/api/x_ai/realtime/__init__.py +12 -0
- pygpt_net/provider/api/x_ai/realtime/client.py +1825 -0
- pygpt_net/provider/api/x_ai/realtime/realtime.py +198 -0
- pygpt_net/provider/api/x_ai/{remote.py → remote_tools.py} +183 -70
- pygpt_net/provider/api/x_ai/responses.py +507 -0
- pygpt_net/provider/api/x_ai/store.py +610 -0
- pygpt_net/{controller/chat/handler/xai_stream.py → provider/api/x_ai/stream.py} +42 -10
- pygpt_net/provider/api/x_ai/tools.py +59 -8
- pygpt_net/{controller/chat/handler → provider/api/x_ai}/utils.py +1 -2
- pygpt_net/provider/api/x_ai/vision.py +1 -4
- pygpt_net/provider/api/x_ai/worker/importer.py +308 -0
- pygpt_net/provider/audio_input/xai_grok_voice.py +390 -0
- pygpt_net/provider/audio_output/xai_tts.py +325 -0
- pygpt_net/provider/core/config/patch.py +39 -3
- pygpt_net/provider/core/config/patches/patch_before_2_6_42.py +2 -2
- pygpt_net/provider/core/model/patch.py +39 -1
- pygpt_net/tools/image_viewer/tool.py +334 -34
- pygpt_net/tools/image_viewer/ui/dialogs.py +319 -22
- pygpt_net/tools/text_editor/ui/dialogs.py +3 -2
- pygpt_net/tools/text_editor/ui/widgets.py +0 -0
- pygpt_net/ui/dialog/assistant.py +1 -1
- pygpt_net/ui/dialog/plugins.py +13 -5
- pygpt_net/ui/dialog/remote_store.py +552 -0
- pygpt_net/ui/dialogs.py +3 -5
- pygpt_net/ui/layout/ctx/ctx_list.py +58 -7
- pygpt_net/ui/menu/tools.py +6 -13
- pygpt_net/ui/widget/dialog/base.py +16 -5
- pygpt_net/ui/widget/dialog/{remote_store_google.py → remote_store.py} +10 -10
- pygpt_net/ui/widget/element/button.py +4 -4
- pygpt_net/ui/widget/image/display.py +2 -2
- pygpt_net/ui/widget/lists/context.py +2 -2
- pygpt_net/ui/widget/textarea/editor.py +0 -0
- {pygpt_net-2.7.6.dist-info → pygpt_net-2.7.8.dist-info}/METADATA +15 -2
- {pygpt_net-2.7.6.dist-info → pygpt_net-2.7.8.dist-info}/RECORD +107 -89
- pygpt_net/controller/remote_store/google/store.py +0 -615
- pygpt_net/controller/remote_store/openai/batch.py +0 -524
- pygpt_net/controller/remote_store/openai/store.py +0 -699
- pygpt_net/ui/dialog/remote_store_google.py +0 -539
- pygpt_net/ui/dialog/remote_store_openai.py +0 -539
- pygpt_net/ui/widget/dialog/remote_store_openai.py +0 -56
- pygpt_net/ui/widget/lists/remote_store_google.py +0 -248
- pygpt_net/ui/widget/lists/remote_store_openai.py +0 -317
- {pygpt_net-2.7.6.dist-info → pygpt_net-2.7.8.dist-info}/LICENSE +0 -0
- {pygpt_net-2.7.6.dist-info → pygpt_net-2.7.8.dist-info}/WHEEL +0 -0
- {pygpt_net-2.7.6.dist-info → pygpt_net-2.7.8.dist-info}/entry_points.txt +0 -0
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date:
|
|
9
|
+
# Updated Date: 2026.01.05 20:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
import json
|
|
@@ -152,78 +152,16 @@ class Tools:
|
|
|
152
152
|
if not params.get("type"):
|
|
153
153
|
params["type"] = "object"
|
|
154
154
|
|
|
155
|
-
|
|
155
|
+
# pass through tool as client tool
|
|
156
|
+
tool_def = {
|
|
156
157
|
"name": name,
|
|
157
158
|
"description": desc,
|
|
158
159
|
"input_schema": params or {"type": "object"},
|
|
159
|
-
})
|
|
160
|
-
|
|
161
|
-
return tools
|
|
162
|
-
|
|
163
|
-
def build_remote_tools(self, model: ModelItem = None) -> List[dict]:
|
|
164
|
-
"""
|
|
165
|
-
Build Anthropic server tools (remote tools) based on config flags.
|
|
166
|
-
Currently supports: Web Search tool.
|
|
167
|
-
|
|
168
|
-
Returns a list of tool dicts to be appended to 'tools' in messages.create.
|
|
169
|
-
|
|
170
|
-
:param model: ModelItem
|
|
171
|
-
:return: List of remote tool dicts
|
|
172
|
-
"""
|
|
173
|
-
cfg = self.window.core.config
|
|
174
|
-
tools: List[dict] = []
|
|
175
|
-
|
|
176
|
-
# sonnet-3.5 is not supported
|
|
177
|
-
if model and model.id and model.id.startswith("claude-3-5"):
|
|
178
|
-
return tools
|
|
179
|
-
|
|
180
|
-
is_web = self.window.controller.chat.remote_tools.enabled(model, "web_search") # get global config
|
|
181
|
-
|
|
182
|
-
# Web Search tool
|
|
183
|
-
if is_web:
|
|
184
|
-
ttype = cfg.get("remote_tools.anthropic.web_search.type", "web_search_20250305") # stable as of docs
|
|
185
|
-
tname = "web_search"
|
|
186
|
-
|
|
187
|
-
tool_def: Dict[str, Any] = {
|
|
188
|
-
"type": ttype,
|
|
189
|
-
"name": tname,
|
|
190
160
|
}
|
|
191
161
|
|
|
192
|
-
#
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
tool_def["max_uses"] = max_uses
|
|
196
|
-
|
|
197
|
-
def parse_csv_list(key: str) -> list:
|
|
198
|
-
raw = cfg.get(key, "")
|
|
199
|
-
if not raw:
|
|
200
|
-
return []
|
|
201
|
-
if isinstance(raw, list):
|
|
202
|
-
return [str(x).strip() for x in raw if str(x).strip()]
|
|
203
|
-
return [s.strip() for s in str(raw).split(",") if s.strip()]
|
|
204
|
-
|
|
205
|
-
allowed = parse_csv_list("remote_tools.anthropic.web_search.allowed_domains")
|
|
206
|
-
blocked = parse_csv_list("remote_tools.anthropic.web_search.blocked_domains")
|
|
207
|
-
if allowed:
|
|
208
|
-
tool_def["allowed_domains"] = allowed
|
|
209
|
-
elif blocked:
|
|
210
|
-
tool_def["blocked_domains"] = blocked
|
|
211
|
-
|
|
212
|
-
# Location (approximate)
|
|
213
|
-
loc_city = cfg.get("remote_tools.anthropic.web_search.user_location.city")
|
|
214
|
-
loc_region = cfg.get("remote_tools.anthropic.web_search.user_location.region")
|
|
215
|
-
loc_country = cfg.get("remote_tools.anthropic.web_search.user_location.country")
|
|
216
|
-
loc_tz = cfg.get("remote_tools.anthropic.web_search.user_location.timezone")
|
|
217
|
-
if any([loc_city, loc_region, loc_country, loc_tz]):
|
|
218
|
-
tool_def["user_location"] = {
|
|
219
|
-
"type": "approximate",
|
|
220
|
-
"city": str(loc_city) if loc_city else None,
|
|
221
|
-
"region": str(loc_region) if loc_region else None,
|
|
222
|
-
"country": str(loc_country) if loc_country else None,
|
|
223
|
-
"timezone": str(loc_tz) if loc_tz else None,
|
|
224
|
-
}
|
|
225
|
-
# remove None fields
|
|
226
|
-
tool_def["user_location"] = {k: v for k, v in tool_def["user_location"].items() if v is not None}
|
|
162
|
+
# optional: allow defer_loading for tool search when configured per-tool (kept compatible)
|
|
163
|
+
if isinstance(fn, dict) and fn.get("defer_loading") is True:
|
|
164
|
+
tool_def["defer_loading"] = True
|
|
227
165
|
|
|
228
166
|
tools.append(tool_def)
|
|
229
167
|
|
|
@@ -231,28 +169,45 @@ class Tools:
|
|
|
231
169
|
|
|
232
170
|
def merge_tools_dedup(self, primary: List[dict], secondary: List[dict]) -> List[dict]:
|
|
233
171
|
"""
|
|
234
|
-
Remove duplicate tools
|
|
172
|
+
Remove duplicate tools, preserving order:
|
|
235
173
|
|
|
236
174
|
- First from primary list
|
|
237
|
-
- Then from secondary list if
|
|
175
|
+
- Then from secondary list if not already present
|
|
176
|
+
|
|
177
|
+
Dedup rules:
|
|
178
|
+
* Tools with a 'name' are deduped by name.
|
|
179
|
+
* MCP toolsets (type == 'mcp_toolset') are deduped by (type, mcp_server_name).
|
|
180
|
+
* Tools without a 'name' use (type) as a fallback key.
|
|
238
181
|
|
|
239
182
|
:param primary: Primary list of tool dicts
|
|
240
183
|
:param secondary: Secondary list of tool dicts
|
|
241
184
|
:return: Merged list of tool dicts without duplicates
|
|
242
185
|
"""
|
|
186
|
+
def key_for(t: dict) -> str:
|
|
187
|
+
name = t.get("name")
|
|
188
|
+
if name:
|
|
189
|
+
return f"name::{name}"
|
|
190
|
+
ttype = t.get("type")
|
|
191
|
+
if ttype == "mcp_toolset":
|
|
192
|
+
return f"mcp::{t.get('mcp_server_name', '')}"
|
|
193
|
+
return f"type::{ttype}"
|
|
194
|
+
|
|
243
195
|
result: List[dict] = []
|
|
244
196
|
seen = set()
|
|
197
|
+
|
|
245
198
|
for t in primary or []:
|
|
246
|
-
|
|
247
|
-
if
|
|
248
|
-
seen.add(
|
|
199
|
+
k = key_for(t)
|
|
200
|
+
if k not in seen:
|
|
201
|
+
seen.add(k)
|
|
249
202
|
result.append(t)
|
|
203
|
+
|
|
250
204
|
for t in secondary or []:
|
|
251
|
-
|
|
252
|
-
if
|
|
205
|
+
k = key_for(t)
|
|
206
|
+
if k in seen:
|
|
253
207
|
continue
|
|
254
|
-
seen.add(
|
|
208
|
+
seen.add(k)
|
|
255
209
|
result.append(t)
|
|
210
|
+
|
|
256
211
|
return result
|
|
257
212
|
|
|
258
213
|
def get_all_tools(self, model: ModelItem, functions: list) -> List[dict]:
|
|
@@ -264,5 +219,5 @@ class Tools:
|
|
|
264
219
|
:return: Combined list of tool dicts
|
|
265
220
|
"""
|
|
266
221
|
base_tools = self.prepare(model, functions)
|
|
267
|
-
remote_tools = self.build_remote_tools(model)
|
|
222
|
+
remote_tools = self.window.core.api.anthropic.remote_tools.build_remote_tools(model)
|
|
268
223
|
return self.merge_tools_dedup(base_tools, remote_tools)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2026.01.05 20:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
from typing import Any, Optional
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def as_int(val: Any) -> Optional[int]:
|
|
16
|
+
"""
|
|
17
|
+
Coerce to int if possible, else None.
|
|
18
|
+
|
|
19
|
+
:param val: Input value
|
|
20
|
+
:return: int or None
|
|
21
|
+
"""
|
|
22
|
+
if val is None:
|
|
23
|
+
return None
|
|
24
|
+
try:
|
|
25
|
+
return int(val)
|
|
26
|
+
except Exception:
|
|
27
|
+
try:
|
|
28
|
+
return int(float(val))
|
|
29
|
+
except Exception:
|
|
30
|
+
return None
|
|
File without changes
|
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2026.01.05 17:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
from PySide6.QtCore import QObject, Signal, QRunnable, Slot
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Importer(QObject):
|
|
18
|
+
def __init__(self, window=None):
|
|
19
|
+
"""
|
|
20
|
+
Importer core (Anthropic Files)
|
|
21
|
+
|
|
22
|
+
:param window: Window instance
|
|
23
|
+
"""
|
|
24
|
+
super(Importer, self).__init__()
|
|
25
|
+
self.window = window
|
|
26
|
+
self.worker = None
|
|
27
|
+
|
|
28
|
+
@Slot(str, object)
|
|
29
|
+
def handle_error(self, mode: str, err: any):
|
|
30
|
+
batch = self.window.controller.remote_store.batch
|
|
31
|
+
if mode == "import_files":
|
|
32
|
+
batch.handle_imported_files_failed(err)
|
|
33
|
+
elif mode == "truncate_files":
|
|
34
|
+
batch.handle_truncated_files_failed(err)
|
|
35
|
+
elif mode == "upload_files":
|
|
36
|
+
batch.handle_uploaded_files_failed(err)
|
|
37
|
+
elif mode in "vector_stores":
|
|
38
|
+
batch.handle_imported_stores_failed(err)
|
|
39
|
+
elif mode in "truncate_vector_stores":
|
|
40
|
+
batch.handle_truncated_stores_failed(err)
|
|
41
|
+
elif mode in "refresh_vector_stores":
|
|
42
|
+
batch.handle_refreshed_stores_failed(err)
|
|
43
|
+
|
|
44
|
+
@Slot(str, str, int)
|
|
45
|
+
def handle_finished(self, mode: str, store_id: str = None, num: int = 0):
|
|
46
|
+
batch = self.window.controller.remote_store.batch
|
|
47
|
+
if mode == "import_files":
|
|
48
|
+
batch.handle_imported_files(num)
|
|
49
|
+
elif mode == "truncate_files":
|
|
50
|
+
batch.handle_truncated_files(store_id, num)
|
|
51
|
+
elif mode == "upload_files":
|
|
52
|
+
batch.handle_uploaded_files(num)
|
|
53
|
+
elif mode == "vector_stores":
|
|
54
|
+
batch.handle_imported_stores(num)
|
|
55
|
+
elif mode == "truncate_vector_stores":
|
|
56
|
+
batch.handle_truncated_stores(num)
|
|
57
|
+
elif mode == "refresh_vector_stores":
|
|
58
|
+
batch.handle_refreshed_stores(num)
|
|
59
|
+
|
|
60
|
+
@Slot(str, str)
|
|
61
|
+
def handle_status(self, mode: str, msg: str):
|
|
62
|
+
self.window.controller.assistant.batch.handle_status_change(mode, msg)
|
|
63
|
+
|
|
64
|
+
@Slot(str, str)
|
|
65
|
+
def handle_log(self, mode: str, msg: str):
|
|
66
|
+
self.window.controller.assistant.threads.log(mode + ": " + msg)
|
|
67
|
+
|
|
68
|
+
def import_vector_stores(self):
|
|
69
|
+
"""Create/ensure pseudo-store and import files list."""
|
|
70
|
+
self.worker = ImportWorker()
|
|
71
|
+
self.worker.window = self.window
|
|
72
|
+
self.worker.mode = "vector_stores"
|
|
73
|
+
self.connect_signals(self.worker)
|
|
74
|
+
self.window.threadpool.start(self.worker)
|
|
75
|
+
|
|
76
|
+
def truncate_vector_stores(self):
|
|
77
|
+
"""Clear local pseudo-store metadata (no remote action)."""
|
|
78
|
+
self.worker = ImportWorker()
|
|
79
|
+
self.worker.window = self.window
|
|
80
|
+
self.worker.mode = "truncate_vector_stores"
|
|
81
|
+
self.connect_signals(self.worker)
|
|
82
|
+
self.window.threadpool.start(self.worker)
|
|
83
|
+
|
|
84
|
+
def truncate_files(self, store_id: str = None):
|
|
85
|
+
"""Remove all files via Files API."""
|
|
86
|
+
self.worker = ImportWorker()
|
|
87
|
+
self.worker.window = self.window
|
|
88
|
+
self.worker.mode = "truncate_files"
|
|
89
|
+
self.worker.store_id = store_id
|
|
90
|
+
self.connect_signals(self.worker)
|
|
91
|
+
self.window.threadpool.start(self.worker)
|
|
92
|
+
|
|
93
|
+
def upload_files(self, store_id: str, files: list = None):
|
|
94
|
+
"""Upload files to Files API."""
|
|
95
|
+
self.worker = ImportWorker()
|
|
96
|
+
self.worker.window = self.window
|
|
97
|
+
self.worker.mode = "upload_files"
|
|
98
|
+
self.worker.store_id = store_id
|
|
99
|
+
self.worker.files = files or []
|
|
100
|
+
self.connect_signals(self.worker)
|
|
101
|
+
self.window.threadpool.start(self.worker)
|
|
102
|
+
|
|
103
|
+
def refresh_vector_stores(self):
|
|
104
|
+
"""Refresh pseudo-store status."""
|
|
105
|
+
self.worker = ImportWorker()
|
|
106
|
+
self.worker.window = self.window
|
|
107
|
+
self.worker.mode = "refresh_vector_stores"
|
|
108
|
+
self.connect_signals(self.worker)
|
|
109
|
+
self.window.threadpool.start(self.worker)
|
|
110
|
+
|
|
111
|
+
def import_files(self, store_id: str = None):
|
|
112
|
+
"""Import files from Files API."""
|
|
113
|
+
self.worker = ImportWorker()
|
|
114
|
+
self.worker.window = self.window
|
|
115
|
+
self.worker.mode = "import_files"
|
|
116
|
+
self.worker.store_id = store_id
|
|
117
|
+
self.connect_signals(self.worker)
|
|
118
|
+
self.window.threadpool.start(self.worker)
|
|
119
|
+
|
|
120
|
+
def connect_signals(self, worker):
|
|
121
|
+
worker.signals.finished.connect(self.handle_finished)
|
|
122
|
+
worker.signals.error.connect(self.handle_error)
|
|
123
|
+
worker.signals.status.connect(self.handle_status)
|
|
124
|
+
worker.signals.log.connect(self.handle_log)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class ImportWorkerSignals(QObject):
|
|
128
|
+
status = Signal(str, str) # mode, message
|
|
129
|
+
finished = Signal(str, str, int) # mode, store_id, num
|
|
130
|
+
error = Signal(str, object) # mode, error
|
|
131
|
+
log = Signal(str, str) # mode, message
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class ImportWorker(QRunnable):
|
|
135
|
+
"""Import worker (Anthropic)"""
|
|
136
|
+
def __init__(self, *args, **kwargs):
|
|
137
|
+
super().__init__()
|
|
138
|
+
self.signals = ImportWorkerSignals()
|
|
139
|
+
self.window = None
|
|
140
|
+
self.mode = "vector_stores"
|
|
141
|
+
self.store_id = "files"
|
|
142
|
+
self.files = []
|
|
143
|
+
|
|
144
|
+
@Slot()
|
|
145
|
+
def run(self):
|
|
146
|
+
try:
|
|
147
|
+
if self.mode == "vector_stores":
|
|
148
|
+
if self.import_vector_stores():
|
|
149
|
+
self.import_files()
|
|
150
|
+
elif self.mode == "truncate_vector_stores":
|
|
151
|
+
self.truncate_vector_stores()
|
|
152
|
+
elif self.mode == "refresh_vector_stores":
|
|
153
|
+
self.refresh_vector_stores()
|
|
154
|
+
elif self.mode == "truncate_files":
|
|
155
|
+
self.truncate_files()
|
|
156
|
+
elif self.mode == "import_files":
|
|
157
|
+
self.import_files()
|
|
158
|
+
elif self.mode == "upload_files":
|
|
159
|
+
self.upload_files()
|
|
160
|
+
except Exception as e:
|
|
161
|
+
self.signals.error.emit(self.mode, e)
|
|
162
|
+
finally:
|
|
163
|
+
self.cleanup()
|
|
164
|
+
|
|
165
|
+
def import_vector_stores(self, silent: bool = False) -> bool:
|
|
166
|
+
"""
|
|
167
|
+
Ensure pseudo-store exists locally.
|
|
168
|
+
"""
|
|
169
|
+
try:
|
|
170
|
+
self.log("Ensuring Anthropic workspace store...")
|
|
171
|
+
items = {}
|
|
172
|
+
# Single pseudo-store object
|
|
173
|
+
store = self.window.core.remote_store.anthropic.create("Files")
|
|
174
|
+
items[store.id] = store
|
|
175
|
+
self.window.core.remote_store.anthropic.import_items(items)
|
|
176
|
+
if not silent:
|
|
177
|
+
self.signals.finished.emit("vector_stores", self.store_id, 1)
|
|
178
|
+
return True
|
|
179
|
+
except Exception as e:
|
|
180
|
+
self.log("API error: {}".format(e))
|
|
181
|
+
self.signals.error.emit("vector_stores", e)
|
|
182
|
+
return False
|
|
183
|
+
|
|
184
|
+
def truncate_vector_stores(self, silent: bool = False) -> bool:
|
|
185
|
+
try:
|
|
186
|
+
self.log("Truncating local pseudo-store...")
|
|
187
|
+
self.window.core.remote_store.anthropic.items = {}
|
|
188
|
+
self.window.core.remote_store.anthropic.save()
|
|
189
|
+
if not silent:
|
|
190
|
+
self.signals.finished.emit("truncate_vector_stores", self.store_id, 1)
|
|
191
|
+
return True
|
|
192
|
+
except Exception as e:
|
|
193
|
+
self.log("API error: {}".format(e))
|
|
194
|
+
self.signals.error.emit("truncate_vector_stores", e)
|
|
195
|
+
return False
|
|
196
|
+
|
|
197
|
+
def refresh_vector_stores(self, silent: bool = False) -> bool:
|
|
198
|
+
try:
|
|
199
|
+
self.log("Refreshing workspace status...")
|
|
200
|
+
# Ensure exists
|
|
201
|
+
if "files" not in self.window.core.remote_store.anthropic.items:
|
|
202
|
+
self.import_vector_stores(silent=True)
|
|
203
|
+
store = self.window.core.remote_store.anthropic.items["files"]
|
|
204
|
+
self.window.controller.remote_store.refresh_store(store, update=False, provider="anthropic")
|
|
205
|
+
if not silent:
|
|
206
|
+
self.signals.finished.emit("refresh_vector_stores", self.store_id, 1)
|
|
207
|
+
return True
|
|
208
|
+
except Exception as e:
|
|
209
|
+
self.log("API error: {}".format(e))
|
|
210
|
+
self.signals.error.emit("refresh_vector_stores", e)
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
def truncate_files(self, silent: bool = False) -> bool:
|
|
214
|
+
try:
|
|
215
|
+
self.log("Removing all files via Anthropic Files API...")
|
|
216
|
+
num = self.window.core.api.anthropic.store.remove_files(callback=self.callback)
|
|
217
|
+
self.window.core.remote_store.anthropic.files.truncate_local()
|
|
218
|
+
if not silent:
|
|
219
|
+
self.signals.finished.emit("truncate_files", self.store_id, num)
|
|
220
|
+
return True
|
|
221
|
+
except Exception as e:
|
|
222
|
+
self.log("API error: {}".format(e))
|
|
223
|
+
self.signals.error.emit("truncate_files", e)
|
|
224
|
+
return False
|
|
225
|
+
|
|
226
|
+
def upload_files(self, silent: bool = False) -> bool:
|
|
227
|
+
num = 0
|
|
228
|
+
try:
|
|
229
|
+
self.log("Uploading files to Anthropic Files API...")
|
|
230
|
+
for path in self.files:
|
|
231
|
+
try:
|
|
232
|
+
f = self.window.core.api.anthropic.store.upload(path)
|
|
233
|
+
if f is not None:
|
|
234
|
+
self.window.core.remote_store.anthropic.files.insert("files", f)
|
|
235
|
+
num += 1
|
|
236
|
+
msg = "Uploaded file: {}/{}".format(num, len(self.files))
|
|
237
|
+
self.signals.status.emit("upload_files", msg)
|
|
238
|
+
self.log(msg)
|
|
239
|
+
else:
|
|
240
|
+
self.signals.status.emit("upload_files", "Failed to upload: {}".format(os.path.basename(path)))
|
|
241
|
+
except Exception as e:
|
|
242
|
+
self.window.core.debug.log(e)
|
|
243
|
+
self.signals.status.emit("upload_files", "Failed to upload: {}".format(os.path.basename(path)))
|
|
244
|
+
if not silent:
|
|
245
|
+
self.signals.finished.emit("upload_files", self.store_id, num)
|
|
246
|
+
return True
|
|
247
|
+
except Exception as e:
|
|
248
|
+
self.log("API error: {}".format(e))
|
|
249
|
+
self.signals.error.emit("upload_files", e)
|
|
250
|
+
return False
|
|
251
|
+
|
|
252
|
+
def import_files(self, silent: bool = False) -> bool:
|
|
253
|
+
try:
|
|
254
|
+
self.log("Importing files from Anthropic Files API...")
|
|
255
|
+
self.window.core.remote_store.anthropic.files.truncate_local()
|
|
256
|
+
num = self.window.core.api.anthropic.store.import_files(callback=self.callback)
|
|
257
|
+
if not silent:
|
|
258
|
+
self.signals.finished.emit("import_files", self.store_id, num)
|
|
259
|
+
return True
|
|
260
|
+
except Exception as e:
|
|
261
|
+
self.log("API error: {}".format(e))
|
|
262
|
+
self.signals.error.emit("import_files", e)
|
|
263
|
+
return False
|
|
264
|
+
|
|
265
|
+
def callback(self, msg: str):
|
|
266
|
+
self.log(msg)
|
|
267
|
+
|
|
268
|
+
def log(self, msg: str):
|
|
269
|
+
self.signals.log.emit(self.mode, msg)
|
|
270
|
+
|
|
271
|
+
def cleanup(self):
|
|
272
|
+
sig = self.signals
|
|
273
|
+
self.signals = None
|
|
274
|
+
if sig is not None:
|
|
275
|
+
try:
|
|
276
|
+
sig.deleteLater()
|
|
277
|
+
except RuntimeError:
|
|
278
|
+
pass
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date: 2026.01.
|
|
9
|
+
# Updated Date: 2026.01.05 20:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
import os
|
|
@@ -121,12 +121,8 @@ class Chat:
|
|
|
121
121
|
|
|
122
122
|
# Enable Computer Use tool in computer mode (use the official Tool/ComputerUse object)
|
|
123
123
|
if mode == MODE_COMPUTER or (model and isinstance(model.id, str) and "computer-use" in model.id.lower()):
|
|
124
|
-
|
|
125
|
-
tools = [
|
|
126
|
-
computer_use=gtypes.ComputerUse(
|
|
127
|
-
environment=comp_env,
|
|
128
|
-
)
|
|
129
|
-
)] # reset tools to only Computer Use (multiple tools not supported together)
|
|
124
|
+
tool = self.window.core.api.google.computer.get_tool()
|
|
125
|
+
tools = [tool] # reset tools to only Computer Use (multiple tools not supported together)
|
|
130
126
|
|
|
131
127
|
# Some models cannot use tools; keep behavior for image-only models
|
|
132
128
|
if model and isinstance(model.id, str) and "-image" in model.id:
|
|
@@ -366,6 +362,12 @@ class Chat:
|
|
|
366
362
|
except Exception:
|
|
367
363
|
pass
|
|
368
364
|
|
|
365
|
+
# Download Files API file_data parts if present
|
|
366
|
+
try:
|
|
367
|
+
self._maybe_download_response_files(response, ctx)
|
|
368
|
+
except Exception:
|
|
369
|
+
pass
|
|
370
|
+
|
|
369
371
|
def extract_text(self, response) -> str:
|
|
370
372
|
"""
|
|
371
373
|
Extract output text.
|
|
@@ -796,7 +798,7 @@ class Chat:
|
|
|
796
798
|
return bytes(data)
|
|
797
799
|
if isinstance(data, str):
|
|
798
800
|
import base64
|
|
799
|
-
return base64.b64decode(data)
|
|
801
|
+
return base64.b64encode(bytes()) if data == "" else base64.b64decode(data)
|
|
800
802
|
except Exception:
|
|
801
803
|
return None
|
|
802
804
|
return None
|
|
@@ -1004,4 +1006,55 @@ class Chat:
|
|
|
1004
1006
|
out.append({"type": typ, "uri": uri})
|
|
1005
1007
|
continue
|
|
1006
1008
|
|
|
1007
|
-
return out
|
|
1009
|
+
return out
|
|
1010
|
+
|
|
1011
|
+
def _maybe_download_response_files(self, response, ctx: CtxItem) -> None:
|
|
1012
|
+
"""
|
|
1013
|
+
Inspect non-stream response parts for Files API references and download them.
|
|
1014
|
+
"""
|
|
1015
|
+
try:
|
|
1016
|
+
cands = getattr(response, "candidates", None) or []
|
|
1017
|
+
if not cands:
|
|
1018
|
+
return
|
|
1019
|
+
first = cands[0]
|
|
1020
|
+
content = getattr(first, "content", None)
|
|
1021
|
+
parts = getattr(content, "parts", None) or []
|
|
1022
|
+
except Exception:
|
|
1023
|
+
parts = []
|
|
1024
|
+
|
|
1025
|
+
if not parts:
|
|
1026
|
+
return
|
|
1027
|
+
|
|
1028
|
+
downloaded: List[str] = []
|
|
1029
|
+
for p in parts:
|
|
1030
|
+
fdata = getattr(p, "file_data", None)
|
|
1031
|
+
if not fdata:
|
|
1032
|
+
continue
|
|
1033
|
+
try:
|
|
1034
|
+
uri = getattr(fdata, "file_uri", None) or getattr(fdata, "uri", None)
|
|
1035
|
+
prefer = getattr(fdata, "file_name", None) or getattr(fdata, "display_name", None)
|
|
1036
|
+
if not uri or not isinstance(uri, str):
|
|
1037
|
+
continue
|
|
1038
|
+
# Only Gemini Files API refs are supported for direct download
|
|
1039
|
+
save_path = self.window.core.api.google.store.download_to_dir(uri, prefer_name=prefer)
|
|
1040
|
+
if save_path:
|
|
1041
|
+
downloaded.append(save_path)
|
|
1042
|
+
except Exception:
|
|
1043
|
+
continue
|
|
1044
|
+
|
|
1045
|
+
if downloaded:
|
|
1046
|
+
downloaded = self.window.core.filesystem.make_local_list(downloaded)
|
|
1047
|
+
if not isinstance(ctx.files, list):
|
|
1048
|
+
ctx.files = []
|
|
1049
|
+
for path in downloaded:
|
|
1050
|
+
if path not in ctx.files:
|
|
1051
|
+
ctx.files.append(path)
|
|
1052
|
+
images = []
|
|
1053
|
+
for path in downloaded:
|
|
1054
|
+
ext = os.path.splitext(path)[1].lower().lstrip(".")
|
|
1055
|
+
if ext in ["png", "jpg", "jpeg", "gif", "bmp", "tiff", "webp"]:
|
|
1056
|
+
images.append(path)
|
|
1057
|
+
if images:
|
|
1058
|
+
if not isinstance(ctx.images, list):
|
|
1059
|
+
ctx.images = []
|
|
1060
|
+
ctx.images += images
|