pygpt-net 2.7.7__py3-none-any.whl → 2.7.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygpt_net/CHANGELOG.txt +7 -0
- pygpt_net/__init__.py +3 -3
- pygpt_net/app.py +5 -1
- pygpt_net/controller/assistant/batch.py +2 -2
- pygpt_net/controller/assistant/files.py +7 -6
- pygpt_net/controller/assistant/threads.py +0 -0
- pygpt_net/controller/chat/command.py +0 -0
- pygpt_net/controller/dialogs/confirm.py +35 -58
- pygpt_net/controller/lang/mapping.py +9 -9
- pygpt_net/controller/remote_store/{google/batch.py → batch.py} +209 -252
- pygpt_net/controller/remote_store/remote_store.py +982 -13
- pygpt_net/core/command/command.py +0 -0
- pygpt_net/core/db/viewer.py +1 -1
- pygpt_net/core/realtime/worker.py +3 -1
- pygpt_net/{controller/remote_store/google → core/remote_store/anthropic}/__init__.py +0 -1
- pygpt_net/core/remote_store/anthropic/files.py +211 -0
- pygpt_net/core/remote_store/anthropic/store.py +208 -0
- pygpt_net/core/remote_store/openai/store.py +5 -4
- pygpt_net/core/remote_store/remote_store.py +5 -1
- pygpt_net/{controller/remote_store/openai → core/remote_store/xai}/__init__.py +0 -1
- pygpt_net/core/remote_store/xai/files.py +225 -0
- pygpt_net/core/remote_store/xai/store.py +219 -0
- pygpt_net/data/config/config.json +9 -6
- pygpt_net/data/config/models.json +5 -4
- pygpt_net/data/config/settings.json +54 -1
- pygpt_net/data/icons/folder_eye.svg +1 -0
- pygpt_net/data/icons/folder_eye_filled.svg +1 -0
- pygpt_net/data/icons/folder_open.svg +1 -0
- pygpt_net/data/icons/folder_open_filled.svg +1 -0
- pygpt_net/data/locale/locale.de.ini +4 -3
- pygpt_net/data/locale/locale.en.ini +14 -4
- pygpt_net/data/locale/locale.es.ini +4 -3
- pygpt_net/data/locale/locale.fr.ini +4 -3
- pygpt_net/data/locale/locale.it.ini +4 -3
- pygpt_net/data/locale/locale.pl.ini +5 -4
- pygpt_net/data/locale/locale.uk.ini +4 -3
- pygpt_net/data/locale/locale.zh.ini +4 -3
- pygpt_net/icons.qrc +4 -0
- pygpt_net/icons_rc.py +282 -138
- pygpt_net/provider/api/anthropic/__init__.py +2 -0
- pygpt_net/provider/api/anthropic/chat.py +84 -1
- pygpt_net/provider/api/anthropic/store.py +307 -0
- pygpt_net/provider/api/anthropic/stream.py +75 -0
- pygpt_net/provider/api/anthropic/worker/__init__.py +0 -0
- pygpt_net/provider/api/anthropic/worker/importer.py +278 -0
- pygpt_net/provider/api/google/chat.py +59 -2
- pygpt_net/provider/api/google/store.py +124 -3
- pygpt_net/provider/api/google/stream.py +91 -24
- pygpt_net/provider/api/google/worker/importer.py +16 -28
- pygpt_net/provider/api/openai/assistants.py +2 -2
- pygpt_net/provider/api/openai/store.py +4 -1
- pygpt_net/provider/api/openai/worker/importer.py +19 -61
- pygpt_net/provider/api/openai/worker/importer_assistants.py +230 -0
- pygpt_net/provider/api/x_ai/__init__.py +30 -6
- pygpt_net/provider/api/x_ai/audio.py +43 -11
- pygpt_net/provider/api/x_ai/chat.py +92 -4
- pygpt_net/provider/api/x_ai/realtime/__init__.py +12 -0
- pygpt_net/provider/api/x_ai/realtime/client.py +1825 -0
- pygpt_net/provider/api/x_ai/realtime/realtime.py +198 -0
- pygpt_net/provider/api/x_ai/remote_tools.py +19 -1
- pygpt_net/provider/api/x_ai/store.py +610 -0
- pygpt_net/provider/api/x_ai/stream.py +30 -9
- pygpt_net/provider/api/x_ai/worker/importer.py +308 -0
- pygpt_net/provider/audio_input/xai_grok_voice.py +390 -0
- pygpt_net/provider/audio_output/xai_tts.py +325 -0
- pygpt_net/provider/core/config/patch.py +18 -3
- pygpt_net/provider/core/config/patches/patch_before_2_6_42.py +2 -2
- pygpt_net/provider/core/model/patch.py +13 -0
- pygpt_net/tools/image_viewer/tool.py +334 -34
- pygpt_net/tools/image_viewer/ui/dialogs.py +317 -21
- pygpt_net/ui/dialog/assistant.py +1 -1
- pygpt_net/ui/dialog/plugins.py +13 -5
- pygpt_net/ui/dialog/remote_store.py +552 -0
- pygpt_net/ui/dialogs.py +3 -5
- pygpt_net/ui/layout/ctx/ctx_list.py +58 -7
- pygpt_net/ui/menu/tools.py +6 -13
- pygpt_net/ui/widget/dialog/{remote_store_google.py → remote_store.py} +10 -10
- pygpt_net/ui/widget/element/button.py +4 -4
- pygpt_net/ui/widget/image/display.py +2 -2
- pygpt_net/ui/widget/lists/context.py +2 -2
- {pygpt_net-2.7.7.dist-info → pygpt_net-2.7.8.dist-info}/METADATA +9 -2
- {pygpt_net-2.7.7.dist-info → pygpt_net-2.7.8.dist-info}/RECORD +82 -70
- pygpt_net/controller/remote_store/google/store.py +0 -615
- pygpt_net/controller/remote_store/openai/batch.py +0 -524
- pygpt_net/controller/remote_store/openai/store.py +0 -699
- pygpt_net/ui/dialog/remote_store_google.py +0 -539
- pygpt_net/ui/dialog/remote_store_openai.py +0 -539
- pygpt_net/ui/widget/dialog/remote_store_openai.py +0 -56
- pygpt_net/ui/widget/lists/remote_store_google.py +0 -248
- pygpt_net/ui/widget/lists/remote_store_openai.py +0 -317
- {pygpt_net-2.7.7.dist-info → pygpt_net-2.7.8.dist-info}/LICENSE +0 -0
- {pygpt_net-2.7.7.dist-info → pygpt_net-2.7.8.dist-info}/WHEEL +0 -0
- {pygpt_net-2.7.7.dist-info → pygpt_net-2.7.8.dist-info}/entry_points.txt +0 -0
|
@@ -362,6 +362,12 @@ class Chat:
|
|
|
362
362
|
except Exception:
|
|
363
363
|
pass
|
|
364
364
|
|
|
365
|
+
# Download Files API file_data parts if present
|
|
366
|
+
try:
|
|
367
|
+
self._maybe_download_response_files(response, ctx)
|
|
368
|
+
except Exception:
|
|
369
|
+
pass
|
|
370
|
+
|
|
365
371
|
def extract_text(self, response) -> str:
|
|
366
372
|
"""
|
|
367
373
|
Extract output text.
|
|
@@ -792,7 +798,7 @@ class Chat:
|
|
|
792
798
|
return bytes(data)
|
|
793
799
|
if isinstance(data, str):
|
|
794
800
|
import base64
|
|
795
|
-
return base64.b64decode(data)
|
|
801
|
+
return base64.b64encode(bytes()) if data == "" else base64.b64decode(data)
|
|
796
802
|
except Exception:
|
|
797
803
|
return None
|
|
798
804
|
return None
|
|
@@ -1000,4 +1006,55 @@ class Chat:
|
|
|
1000
1006
|
out.append({"type": typ, "uri": uri})
|
|
1001
1007
|
continue
|
|
1002
1008
|
|
|
1003
|
-
return out
|
|
1009
|
+
return out
|
|
1010
|
+
|
|
1011
|
+
def _maybe_download_response_files(self, response, ctx: CtxItem) -> None:
|
|
1012
|
+
"""
|
|
1013
|
+
Inspect non-stream response parts for Files API references and download them.
|
|
1014
|
+
"""
|
|
1015
|
+
try:
|
|
1016
|
+
cands = getattr(response, "candidates", None) or []
|
|
1017
|
+
if not cands:
|
|
1018
|
+
return
|
|
1019
|
+
first = cands[0]
|
|
1020
|
+
content = getattr(first, "content", None)
|
|
1021
|
+
parts = getattr(content, "parts", None) or []
|
|
1022
|
+
except Exception:
|
|
1023
|
+
parts = []
|
|
1024
|
+
|
|
1025
|
+
if not parts:
|
|
1026
|
+
return
|
|
1027
|
+
|
|
1028
|
+
downloaded: List[str] = []
|
|
1029
|
+
for p in parts:
|
|
1030
|
+
fdata = getattr(p, "file_data", None)
|
|
1031
|
+
if not fdata:
|
|
1032
|
+
continue
|
|
1033
|
+
try:
|
|
1034
|
+
uri = getattr(fdata, "file_uri", None) or getattr(fdata, "uri", None)
|
|
1035
|
+
prefer = getattr(fdata, "file_name", None) or getattr(fdata, "display_name", None)
|
|
1036
|
+
if not uri or not isinstance(uri, str):
|
|
1037
|
+
continue
|
|
1038
|
+
# Only Gemini Files API refs are supported for direct download
|
|
1039
|
+
save_path = self.window.core.api.google.store.download_to_dir(uri, prefer_name=prefer)
|
|
1040
|
+
if save_path:
|
|
1041
|
+
downloaded.append(save_path)
|
|
1042
|
+
except Exception:
|
|
1043
|
+
continue
|
|
1044
|
+
|
|
1045
|
+
if downloaded:
|
|
1046
|
+
downloaded = self.window.core.filesystem.make_local_list(downloaded)
|
|
1047
|
+
if not isinstance(ctx.files, list):
|
|
1048
|
+
ctx.files = []
|
|
1049
|
+
for path in downloaded:
|
|
1050
|
+
if path not in ctx.files:
|
|
1051
|
+
ctx.files.append(path)
|
|
1052
|
+
images = []
|
|
1053
|
+
for path in downloaded:
|
|
1054
|
+
ext = os.path.splitext(path)[1].lower().lstrip(".")
|
|
1055
|
+
if ext in ["png", "jpg", "jpeg", "gif", "bmp", "tiff", "webp"]:
|
|
1056
|
+
images.append(path)
|
|
1057
|
+
if images:
|
|
1058
|
+
if not isinstance(ctx.images, list):
|
|
1059
|
+
ctx.images = []
|
|
1060
|
+
ctx.images += images
|
|
@@ -6,12 +6,13 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date: 2026.01.
|
|
9
|
+
# Updated Date: 2026.01.06 06:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
import os
|
|
13
13
|
import time
|
|
14
|
-
|
|
14
|
+
import mimetypes
|
|
15
|
+
from typing import Optional, List, Dict, Any, Union
|
|
15
16
|
|
|
16
17
|
from pygpt_net.item.store import RemoteStoreItem
|
|
17
18
|
|
|
@@ -63,6 +64,53 @@ class Store:
|
|
|
63
64
|
v = hi
|
|
64
65
|
return v
|
|
65
66
|
|
|
67
|
+
def _download_dir(self) -> str:
|
|
68
|
+
"""
|
|
69
|
+
Resolve target download directory (uses download.dir if set).
|
|
70
|
+
"""
|
|
71
|
+
if self.window.core.config.has("download.dir") and self.window.core.config.get("download.dir") != "":
|
|
72
|
+
dir_path = os.path.join(
|
|
73
|
+
self.window.core.config.get_user_dir('data'),
|
|
74
|
+
self.window.core.config.get("download.dir"),
|
|
75
|
+
)
|
|
76
|
+
else:
|
|
77
|
+
dir_path = self.window.core.config.get_user_dir('data')
|
|
78
|
+
os.makedirs(dir_path, exist_ok=True)
|
|
79
|
+
return dir_path
|
|
80
|
+
|
|
81
|
+
def _ensure_unique_path(self, dir_path: str, filename: str) -> str:
|
|
82
|
+
"""
|
|
83
|
+
Ensure unique filename in dir, add timestamp prefix if exists.
|
|
84
|
+
"""
|
|
85
|
+
path = os.path.join(dir_path, filename)
|
|
86
|
+
if os.path.exists(path):
|
|
87
|
+
prefix = time.strftime("%Y%m%d_%H%M%S_")
|
|
88
|
+
path = os.path.join(dir_path, f"{prefix}{filename}")
|
|
89
|
+
return path
|
|
90
|
+
|
|
91
|
+
def _guess_filename(self, file_meta: Any, fallback: str = "downloaded.bin") -> str:
|
|
92
|
+
"""
|
|
93
|
+
Best-effort filename from File metadata or URI.
|
|
94
|
+
"""
|
|
95
|
+
name = None
|
|
96
|
+
for attr in ("display_name", "filename", "name", "file_name"):
|
|
97
|
+
try:
|
|
98
|
+
val = getattr(file_meta, attr, None)
|
|
99
|
+
if not name and isinstance(val, str) and val:
|
|
100
|
+
name = os.path.basename(val)
|
|
101
|
+
except Exception:
|
|
102
|
+
pass
|
|
103
|
+
if not name and isinstance(file_meta, dict):
|
|
104
|
+
val = file_meta.get(attr)
|
|
105
|
+
if isinstance(val, str) and val:
|
|
106
|
+
name = os.path.basename(val)
|
|
107
|
+
|
|
108
|
+
if not name:
|
|
109
|
+
# allow URI-like strings
|
|
110
|
+
if isinstance(file_meta, str):
|
|
111
|
+
name = os.path.basename(file_meta.split("?")[0].split("#")[0])
|
|
112
|
+
return name or fallback
|
|
113
|
+
|
|
66
114
|
# -----------------------------
|
|
67
115
|
# Files service (global)
|
|
68
116
|
# -----------------------------
|
|
@@ -105,6 +153,79 @@ class Store:
|
|
|
105
153
|
if res is not None:
|
|
106
154
|
return file_name
|
|
107
155
|
|
|
156
|
+
def download(self, file: Union[str, Any], path: str) -> bool:
|
|
157
|
+
"""
|
|
158
|
+
Download a Files API item into the given path.
|
|
159
|
+
|
|
160
|
+
:param file: file name ('files/...'), file object, or file URI
|
|
161
|
+
:param path: target local path
|
|
162
|
+
:return: True on success
|
|
163
|
+
"""
|
|
164
|
+
client = self.get_client()
|
|
165
|
+
data = None
|
|
166
|
+
try:
|
|
167
|
+
data = client.files.download(file=file)
|
|
168
|
+
except Exception:
|
|
169
|
+
pass
|
|
170
|
+
if not data:
|
|
171
|
+
return False
|
|
172
|
+
# google-genai returns bytes
|
|
173
|
+
try:
|
|
174
|
+
with open(path, "wb") as f:
|
|
175
|
+
f.write(data if isinstance(data, (bytes, bytearray)) else bytes(data))
|
|
176
|
+
return True
|
|
177
|
+
except Exception:
|
|
178
|
+
return False
|
|
179
|
+
|
|
180
|
+
def download_to_dir(self, file: Union[str, Any], prefer_name: Optional[str] = None) -> Optional[str]:
|
|
181
|
+
"""
|
|
182
|
+
Download a Files API item into configured download directory.
|
|
183
|
+
|
|
184
|
+
:param file: file name ('files/...'), file object, or file URI
|
|
185
|
+
:param prefer_name: optional preferred filename
|
|
186
|
+
:return: saved path or None
|
|
187
|
+
"""
|
|
188
|
+
dir_path = self._download_dir()
|
|
189
|
+
filename = None
|
|
190
|
+
|
|
191
|
+
# Try to resolve filename from metadata
|
|
192
|
+
file_meta = None
|
|
193
|
+
try:
|
|
194
|
+
name = None
|
|
195
|
+
if isinstance(file, str) and file.startswith("files/"):
|
|
196
|
+
name = file
|
|
197
|
+
elif hasattr(file, "name"):
|
|
198
|
+
name = getattr(file, "name", None)
|
|
199
|
+
|
|
200
|
+
if name:
|
|
201
|
+
file_meta = self.get_file(name)
|
|
202
|
+
except Exception:
|
|
203
|
+
file_meta = None
|
|
204
|
+
|
|
205
|
+
if prefer_name and isinstance(prefer_name, str):
|
|
206
|
+
filename = os.path.basename(prefer_name)
|
|
207
|
+
|
|
208
|
+
if not filename:
|
|
209
|
+
filename = self._guess_filename(file_meta if file_meta is not None else file)
|
|
210
|
+
|
|
211
|
+
# Infer extension from mime, if missing
|
|
212
|
+
if not os.path.splitext(filename)[1] and file_meta is not None:
|
|
213
|
+
try:
|
|
214
|
+
mime = getattr(file_meta, "mime_type", None)
|
|
215
|
+
if isinstance(file_meta, dict):
|
|
216
|
+
mime = file_meta.get("mime_type", mime)
|
|
217
|
+
if mime:
|
|
218
|
+
ext = mimetypes.guess_extension(mime) or ""
|
|
219
|
+
if ext:
|
|
220
|
+
filename = filename + ext
|
|
221
|
+
except Exception:
|
|
222
|
+
pass
|
|
223
|
+
|
|
224
|
+
path = self._ensure_unique_path(dir_path, filename)
|
|
225
|
+
if self.download(file, path):
|
|
226
|
+
return path
|
|
227
|
+
return None
|
|
228
|
+
|
|
108
229
|
def get_files_ids_all(
|
|
109
230
|
self,
|
|
110
231
|
items: list,
|
|
@@ -377,7 +498,7 @@ class Store:
|
|
|
377
498
|
for doc_name in files:
|
|
378
499
|
self.log("Removing document from store [{}]:{} ".format(store_id, doc_name))
|
|
379
500
|
self.delete_store_file(store_id, doc_name)
|
|
380
|
-
num += 1
|
|
501
|
+
num += 1
|
|
381
502
|
return num
|
|
382
503
|
|
|
383
504
|
def remove_all(self, callback: Optional[callable] = None) -> int:
|
|
@@ -93,6 +93,43 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
93
93
|
except Exception:
|
|
94
94
|
pass
|
|
95
95
|
|
|
96
|
+
def _try_download_uri(uri: Optional[str], prefer_name: Optional[str] = None) -> Optional[str]:
|
|
97
|
+
"""
|
|
98
|
+
Attempt to download a Files API URI via store; return local path or None.
|
|
99
|
+
"""
|
|
100
|
+
if not isinstance(uri, str) or not uri:
|
|
101
|
+
return None
|
|
102
|
+
try:
|
|
103
|
+
path = core.api.google.store.download_to_dir(uri, prefer_name=prefer_name)
|
|
104
|
+
return path
|
|
105
|
+
except Exception:
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
def _append_downloaded(paths):
|
|
109
|
+
if not paths:
|
|
110
|
+
return
|
|
111
|
+
try:
|
|
112
|
+
loc = core.filesystem.make_local_list(paths)
|
|
113
|
+
except Exception:
|
|
114
|
+
loc = paths
|
|
115
|
+
if not isinstance(ctx.files, list):
|
|
116
|
+
ctx.files = []
|
|
117
|
+
for p in loc:
|
|
118
|
+
if p not in ctx.files:
|
|
119
|
+
ctx.files.append(p)
|
|
120
|
+
# images
|
|
121
|
+
imgs = []
|
|
122
|
+
for p in loc:
|
|
123
|
+
ext = p.lower().rsplit(".", 1)[-1] if "." in p else ""
|
|
124
|
+
if ext in ["png", "jpg", "jpeg", "gif", "bmp", "tiff", "webp"]:
|
|
125
|
+
imgs.append(p)
|
|
126
|
+
if imgs:
|
|
127
|
+
if not isinstance(ctx.images, list):
|
|
128
|
+
ctx.images = []
|
|
129
|
+
for p in imgs:
|
|
130
|
+
if p not in ctx.images:
|
|
131
|
+
ctx.images.append(p)
|
|
132
|
+
|
|
96
133
|
# Collect function calls from Responses API style stream
|
|
97
134
|
if fc_list:
|
|
98
135
|
for fc in fc_list:
|
|
@@ -114,6 +151,23 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
114
151
|
content = getattr(cand, "content", None)
|
|
115
152
|
parts = getattr(content, "parts", None) or []
|
|
116
153
|
for p in parts:
|
|
154
|
+
# Download Files API file_data parts if present
|
|
155
|
+
try:
|
|
156
|
+
fdata = getattr(p, "file_data", None)
|
|
157
|
+
if fdata:
|
|
158
|
+
uri = getattr(fdata, "file_uri", None) or getattr(fdata, "uri", None)
|
|
159
|
+
name = getattr(fdata, "file_name", None) or getattr(fdata, "display_name", None)
|
|
160
|
+
if uri and isinstance(uri, str):
|
|
161
|
+
if not hasattr(state, "google_downloaded_uris"):
|
|
162
|
+
state.google_downloaded_uris = set()
|
|
163
|
+
if uri not in state.google_downloaded_uris:
|
|
164
|
+
save = _try_download_uri(uri, name)
|
|
165
|
+
if save:
|
|
166
|
+
_append_downloaded([save])
|
|
167
|
+
state.google_downloaded_uris.add(uri)
|
|
168
|
+
except Exception:
|
|
169
|
+
pass
|
|
170
|
+
|
|
117
171
|
fn = getattr(p, "function_call", None)
|
|
118
172
|
if not fn:
|
|
119
173
|
continue
|
|
@@ -132,7 +186,6 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
132
186
|
pass
|
|
133
187
|
|
|
134
188
|
# Interactions API / Deep Research: collect streaming deltas and metadata
|
|
135
|
-
# Handles event_type, event_id, interaction.start/complete/status_update, and content.delta variants
|
|
136
189
|
try:
|
|
137
190
|
event_type = _get(chunk, "event_type", None)
|
|
138
191
|
if event_type:
|
|
@@ -215,10 +268,8 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
215
268
|
content_obj = _get(delta, "content", None)
|
|
216
269
|
thought_txt = None
|
|
217
270
|
if content_obj is not None:
|
|
218
|
-
# TextContent path
|
|
219
271
|
thought_txt = _get(content_obj, "text", None)
|
|
220
272
|
if thought_txt is None:
|
|
221
|
-
# Some SDKs expose 'thought' or 'content.text' differently
|
|
222
273
|
thought_txt = _get(delta, "thought", None)
|
|
223
274
|
if thought_txt:
|
|
224
275
|
_ensure_list_attr(state, "google_thought_summaries")
|
|
@@ -252,7 +303,6 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
252
303
|
|
|
253
304
|
# Function result delta (optional store)
|
|
254
305
|
elif delta_type == "function_result":
|
|
255
|
-
# Can be used to log tool results; not altering UI text
|
|
256
306
|
_ensure_list_attr(state, "google_function_results")
|
|
257
307
|
try:
|
|
258
308
|
state.google_function_results.append(_to_plain_dict(delta))
|
|
@@ -269,7 +319,6 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
269
319
|
else:
|
|
270
320
|
response_parts.append(str(code_txt))
|
|
271
321
|
elif delta_type == "code_execution_result":
|
|
272
|
-
# Close code block; keep output logging internal if needed
|
|
273
322
|
if state.is_code:
|
|
274
323
|
response_parts.append("\n\n```\n-----------\n")
|
|
275
324
|
state.is_code = False
|
|
@@ -282,7 +331,7 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
282
331
|
# Images in stream
|
|
283
332
|
elif delta_type == "image":
|
|
284
333
|
# ImageDelta may contain base64 data or uri
|
|
285
|
-
mime =
|
|
334
|
+
mime = _get(delta, "mime_type", None)
|
|
286
335
|
data_b64 = _get(delta, "data", None)
|
|
287
336
|
uri = _get(delta, "uri", None)
|
|
288
337
|
if data_b64:
|
|
@@ -299,12 +348,17 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
299
348
|
except Exception:
|
|
300
349
|
pass
|
|
301
350
|
elif uri:
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
351
|
+
# Try to download Files API content when URI is a file ref
|
|
352
|
+
save = _try_download_uri(uri)
|
|
353
|
+
if save:
|
|
354
|
+
_append_downloaded([save])
|
|
355
|
+
else:
|
|
356
|
+
try:
|
|
357
|
+
if not hasattr(ctx, "urls") or ctx.urls is None:
|
|
358
|
+
ctx.urls = []
|
|
359
|
+
ctx.urls.append(uri)
|
|
360
|
+
except Exception:
|
|
361
|
+
pass
|
|
308
362
|
|
|
309
363
|
# URL context call/result (Deep Research tool)
|
|
310
364
|
elif delta_type == "url_context_call":
|
|
@@ -368,7 +422,6 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
368
422
|
except Exception:
|
|
369
423
|
pass
|
|
370
424
|
|
|
371
|
-
# Thought signature delta (optional, store)
|
|
372
425
|
elif delta_type == "thought_signature":
|
|
373
426
|
_ensure_list_attr(state, "google_thought_signatures")
|
|
374
427
|
try:
|
|
@@ -380,12 +433,16 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
380
433
|
elif delta_type in ("audio", "video", "document"):
|
|
381
434
|
uri = _get(delta, "uri", None)
|
|
382
435
|
if uri:
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
436
|
+
save = _try_download_uri(uri)
|
|
437
|
+
if save:
|
|
438
|
+
_append_downloaded([save])
|
|
439
|
+
else:
|
|
440
|
+
try:
|
|
441
|
+
if not hasattr(ctx, "urls") or ctx.urls is None:
|
|
442
|
+
ctx.urls = []
|
|
443
|
+
ctx.urls.append(uri)
|
|
444
|
+
except Exception:
|
|
445
|
+
pass
|
|
389
446
|
|
|
390
447
|
except Exception:
|
|
391
448
|
pass
|
|
@@ -459,11 +516,21 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
|
459
516
|
fdata = getattr(p, "file_data", None)
|
|
460
517
|
if fdata:
|
|
461
518
|
uri = getattr(fdata, "file_uri", None) or getattr(fdata, "uri", None)
|
|
462
|
-
|
|
463
|
-
if uri
|
|
464
|
-
if
|
|
465
|
-
|
|
466
|
-
|
|
519
|
+
prefer = getattr(fdata, "file_name", None) or getattr(fdata, "display_name", None)
|
|
520
|
+
if uri:
|
|
521
|
+
if not hasattr(state, "google_downloaded_uris"):
|
|
522
|
+
state.google_downloaded_uris = set()
|
|
523
|
+
if uri not in state.google_downloaded_uris:
|
|
524
|
+
save = _try_download_uri(uri, prefer)
|
|
525
|
+
if save:
|
|
526
|
+
_append_downloaded([save])
|
|
527
|
+
state.google_downloaded_uris.add(uri)
|
|
528
|
+
# keep original behavior for image http links
|
|
529
|
+
mime = (getattr(fdata, "mime_type", "") or "").lower()
|
|
530
|
+
if uri.startswith(("http://", "https://")) and mime.startswith("image/"):
|
|
531
|
+
if ctx.urls is None:
|
|
532
|
+
ctx.urls = []
|
|
533
|
+
ctx.urls.append(uri)
|
|
467
534
|
|
|
468
535
|
collect_google_citations(ctx, state, chunk)
|
|
469
536
|
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date: 2026.01.
|
|
9
|
+
# Updated Date: 2026.01.06 06:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
import os
|
|
@@ -33,18 +33,19 @@ class Importer(QObject):
|
|
|
33
33
|
:param mode: mode
|
|
34
34
|
:param err: error message
|
|
35
35
|
"""
|
|
36
|
+
batch = self.window.controller.remote_store.batch
|
|
36
37
|
if mode == "import_files":
|
|
37
|
-
|
|
38
|
+
batch.handle_imported_files_failed(err)
|
|
38
39
|
elif mode == "truncate_files":
|
|
39
|
-
|
|
40
|
+
batch.handle_truncated_files_failed(err)
|
|
40
41
|
elif mode == "upload_files":
|
|
41
|
-
|
|
42
|
+
batch.handle_uploaded_files_failed(err)
|
|
42
43
|
elif mode in "vector_stores":
|
|
43
|
-
|
|
44
|
+
batch.handle_imported_stores_failed(err)
|
|
44
45
|
elif mode in "truncate_vector_stores":
|
|
45
|
-
|
|
46
|
+
batch.handle_truncated_stores_failed(err)
|
|
46
47
|
elif mode in "refresh_vector_stores":
|
|
47
|
-
|
|
48
|
+
batch.handle_refreshed_stores_failed(err)
|
|
48
49
|
|
|
49
50
|
@Slot(str, str, int)
|
|
50
51
|
def handle_finished(self, mode: str, store_id: str = None, num: int = 0):
|
|
@@ -55,18 +56,19 @@ class Importer(QObject):
|
|
|
55
56
|
:param store_id: store ID
|
|
56
57
|
:param num: number of affected items
|
|
57
58
|
"""
|
|
59
|
+
batch = self.window.controller.remote_store.batch
|
|
58
60
|
if mode == "import_files":
|
|
59
|
-
|
|
61
|
+
batch.handle_imported_files(num)
|
|
60
62
|
elif mode == "truncate_files":
|
|
61
|
-
|
|
63
|
+
batch.handle_truncated_files(store_id, num)
|
|
62
64
|
elif mode == "upload_files":
|
|
63
|
-
|
|
65
|
+
batch.handle_uploaded_files(num)
|
|
64
66
|
elif mode == "vector_stores":
|
|
65
|
-
|
|
67
|
+
batch.handle_imported_stores(num)
|
|
66
68
|
elif mode == "truncate_vector_stores":
|
|
67
|
-
|
|
69
|
+
batch.handle_truncated_stores(num)
|
|
68
70
|
elif mode == "refresh_vector_stores":
|
|
69
|
-
|
|
71
|
+
batch.handle_refreshed_stores(num)
|
|
70
72
|
|
|
71
73
|
@Slot(str, str)
|
|
72
74
|
def handle_status(self, mode: str, msg: str):
|
|
@@ -216,20 +218,6 @@ class ImportWorker(QRunnable):
|
|
|
216
218
|
finally:
|
|
217
219
|
self.cleanup()
|
|
218
220
|
|
|
219
|
-
def import_assistants(self, silent: bool = False) -> bool:
|
|
220
|
-
"""
|
|
221
|
-
Import assistants (not used for Google by default; kept for parity)
|
|
222
|
-
|
|
223
|
-
:param silent: silent mode
|
|
224
|
-
"""
|
|
225
|
-
try:
|
|
226
|
-
if not silent:
|
|
227
|
-
self.signals.finished.emit("assistants", self.store_id, 0)
|
|
228
|
-
return True
|
|
229
|
-
except Exception as e:
|
|
230
|
-
self.signals.error.emit("assistants", e)
|
|
231
|
-
return False
|
|
232
|
-
|
|
233
221
|
def import_vector_stores(self, silent: bool = False) -> bool:
|
|
234
222
|
"""
|
|
235
223
|
Import File Search stores
|
|
@@ -282,7 +270,7 @@ class ImportWorker(QRunnable):
|
|
|
282
270
|
for id in stores:
|
|
283
271
|
store = stores[id]
|
|
284
272
|
try:
|
|
285
|
-
self.window.controller.remote_store.
|
|
273
|
+
self.window.controller.remote_store.refresh_store(store, update=False, provider="google")
|
|
286
274
|
num += 1
|
|
287
275
|
except Exception as e:
|
|
288
276
|
self.log("Failed to refresh store: {}".format(id))
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date:
|
|
9
|
+
# Updated Date: 2026.01.05 17:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
import json
|
|
@@ -16,7 +16,7 @@ from pygpt_net.item.assistant import AssistantItem
|
|
|
16
16
|
from pygpt_net.item.ctx import CtxItem
|
|
17
17
|
|
|
18
18
|
from .worker.assistants import AssistantsWorker, EventHandler
|
|
19
|
-
from .worker.
|
|
19
|
+
from .worker.importer_assistants import Importer
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
class Assistants:
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date: 2026.01.
|
|
9
|
+
# Updated Date: 2026.01.05 17:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
import os
|
|
@@ -14,6 +14,8 @@ from typing import Optional, List
|
|
|
14
14
|
|
|
15
15
|
from pygpt_net.item.store import RemoteStoreItem
|
|
16
16
|
|
|
17
|
+
from .worker.importer import Importer
|
|
18
|
+
|
|
17
19
|
|
|
18
20
|
class Store:
|
|
19
21
|
def __init__(self, window=None):
|
|
@@ -23,6 +25,7 @@ class Store:
|
|
|
23
25
|
:param window: Window instance
|
|
24
26
|
"""
|
|
25
27
|
self.window = window
|
|
28
|
+
self.importer = Importer(window)
|
|
26
29
|
|
|
27
30
|
def get_client(self):
|
|
28
31
|
"""
|