pygpt-net 2.7.4__py3-none-any.whl → 2.7.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. pygpt_net/CHANGELOG.txt +7 -0
  2. pygpt_net/__init__.py +3 -3
  3. pygpt_net/app_core.py +4 -2
  4. pygpt_net/controller/__init__.py +5 -1
  5. pygpt_net/controller/assistant/assistant.py +1 -4
  6. pygpt_net/controller/assistant/batch.py +5 -504
  7. pygpt_net/controller/assistant/editor.py +5 -5
  8. pygpt_net/controller/assistant/files.py +16 -16
  9. pygpt_net/controller/chat/handler/google_stream.py +307 -1
  10. pygpt_net/controller/chat/handler/worker.py +8 -1
  11. pygpt_net/controller/chat/image.py +2 -2
  12. pygpt_net/controller/dialogs/confirm.py +73 -101
  13. pygpt_net/controller/lang/mapping.py +9 -9
  14. pygpt_net/controller/painter/capture.py +50 -1
  15. pygpt_net/controller/presets/presets.py +2 -1
  16. pygpt_net/controller/remote_store/__init__.py +12 -0
  17. pygpt_net/{provider/core/assistant_file/db_sqlite → controller/remote_store/google}/__init__.py +2 -2
  18. pygpt_net/controller/remote_store/google/batch.py +402 -0
  19. pygpt_net/controller/remote_store/google/store.py +615 -0
  20. pygpt_net/controller/remote_store/openai/__init__.py +12 -0
  21. pygpt_net/controller/remote_store/openai/batch.py +524 -0
  22. pygpt_net/controller/{assistant → remote_store/openai}/store.py +63 -60
  23. pygpt_net/controller/remote_store/remote_store.py +35 -0
  24. pygpt_net/controller/ui/ui.py +20 -1
  25. pygpt_net/core/assistants/assistants.py +3 -15
  26. pygpt_net/core/db/database.py +5 -3
  27. pygpt_net/core/locale/placeholder.py +35 -0
  28. pygpt_net/core/remote_store/__init__.py +12 -0
  29. pygpt_net/core/remote_store/google/__init__.py +11 -0
  30. pygpt_net/core/remote_store/google/files.py +224 -0
  31. pygpt_net/core/remote_store/google/store.py +248 -0
  32. pygpt_net/core/remote_store/openai/__init__.py +11 -0
  33. pygpt_net/core/{assistants → remote_store/openai}/files.py +26 -19
  34. pygpt_net/core/{assistants → remote_store/openai}/store.py +32 -15
  35. pygpt_net/core/remote_store/remote_store.py +24 -0
  36. pygpt_net/data/config/config.json +8 -4
  37. pygpt_net/data/config/models.json +77 -3
  38. pygpt_net/data/config/settings.json +45 -0
  39. pygpt_net/data/locale/locale.de.ini +41 -41
  40. pygpt_net/data/locale/locale.en.ini +53 -43
  41. pygpt_net/data/locale/locale.es.ini +41 -41
  42. pygpt_net/data/locale/locale.fr.ini +41 -41
  43. pygpt_net/data/locale/locale.it.ini +41 -41
  44. pygpt_net/data/locale/locale.pl.ini +42 -42
  45. pygpt_net/data/locale/locale.uk.ini +41 -41
  46. pygpt_net/data/locale/locale.zh.ini +41 -41
  47. pygpt_net/data/locale/plugin.cmd_history.de.ini +1 -1
  48. pygpt_net/data/locale/plugin.cmd_history.en.ini +1 -1
  49. pygpt_net/data/locale/plugin.cmd_history.es.ini +1 -1
  50. pygpt_net/data/locale/plugin.cmd_history.fr.ini +1 -1
  51. pygpt_net/data/locale/plugin.cmd_history.it.ini +1 -1
  52. pygpt_net/data/locale/plugin.cmd_history.pl.ini +1 -1
  53. pygpt_net/data/locale/plugin.cmd_history.uk.ini +1 -1
  54. pygpt_net/data/locale/plugin.cmd_history.zh.ini +1 -1
  55. pygpt_net/data/locale/plugin.cmd_mouse_control.en.ini +14 -0
  56. pygpt_net/data/locale/plugin.cmd_web.de.ini +1 -1
  57. pygpt_net/data/locale/plugin.cmd_web.en.ini +1 -1
  58. pygpt_net/data/locale/plugin.cmd_web.es.ini +1 -1
  59. pygpt_net/data/locale/plugin.cmd_web.fr.ini +1 -1
  60. pygpt_net/data/locale/plugin.cmd_web.it.ini +1 -1
  61. pygpt_net/data/locale/plugin.cmd_web.pl.ini +1 -1
  62. pygpt_net/data/locale/plugin.cmd_web.uk.ini +1 -1
  63. pygpt_net/data/locale/plugin.cmd_web.zh.ini +1 -1
  64. pygpt_net/data/locale/plugin.idx_llama_index.de.ini +2 -2
  65. pygpt_net/data/locale/plugin.idx_llama_index.en.ini +2 -2
  66. pygpt_net/data/locale/plugin.idx_llama_index.es.ini +2 -2
  67. pygpt_net/data/locale/plugin.idx_llama_index.fr.ini +2 -2
  68. pygpt_net/data/locale/plugin.idx_llama_index.it.ini +2 -2
  69. pygpt_net/data/locale/plugin.idx_llama_index.pl.ini +2 -2
  70. pygpt_net/data/locale/plugin.idx_llama_index.uk.ini +2 -2
  71. pygpt_net/data/locale/plugin.idx_llama_index.zh.ini +2 -2
  72. pygpt_net/item/assistant.py +1 -211
  73. pygpt_net/item/ctx.py +3 -1
  74. pygpt_net/item/store.py +238 -0
  75. pygpt_net/migrations/Version20260102190000.py +35 -0
  76. pygpt_net/migrations/__init__.py +3 -1
  77. pygpt_net/plugin/cmd_mouse_control/config.py +470 -1
  78. pygpt_net/plugin/cmd_mouse_control/plugin.py +488 -22
  79. pygpt_net/plugin/cmd_mouse_control/worker.py +464 -87
  80. pygpt_net/plugin/cmd_mouse_control/worker_sandbox.py +729 -0
  81. pygpt_net/plugin/idx_llama_index/config.py +2 -2
  82. pygpt_net/provider/api/google/__init__.py +16 -54
  83. pygpt_net/provider/api/google/chat.py +546 -129
  84. pygpt_net/provider/api/google/computer.py +190 -0
  85. pygpt_net/provider/api/google/realtime/realtime.py +2 -2
  86. pygpt_net/provider/api/google/remote_tools.py +93 -0
  87. pygpt_net/provider/api/google/store.py +546 -0
  88. pygpt_net/provider/api/google/worker/__init__.py +0 -0
  89. pygpt_net/provider/api/google/worker/importer.py +392 -0
  90. pygpt_net/provider/api/openai/computer.py +10 -1
  91. pygpt_net/provider/api/openai/store.py +6 -6
  92. pygpt_net/provider/api/openai/worker/importer.py +24 -24
  93. pygpt_net/provider/core/config/patch.py +16 -1
  94. pygpt_net/provider/core/config/patches/patch_before_2_6_42.py +3 -3
  95. pygpt_net/provider/core/model/patch.py +17 -3
  96. pygpt_net/provider/core/preset/json_file.py +13 -7
  97. pygpt_net/provider/core/{assistant_file → remote_file}/__init__.py +1 -1
  98. pygpt_net/provider/core/{assistant_file → remote_file}/base.py +9 -9
  99. pygpt_net/provider/core/remote_file/db_sqlite/__init__.py +12 -0
  100. pygpt_net/provider/core/{assistant_file → remote_file}/db_sqlite/patch.py +1 -1
  101. pygpt_net/provider/core/{assistant_file → remote_file}/db_sqlite/provider.py +23 -20
  102. pygpt_net/provider/core/{assistant_file → remote_file}/db_sqlite/storage.py +35 -27
  103. pygpt_net/provider/core/{assistant_file → remote_file}/db_sqlite/utils.py +5 -4
  104. pygpt_net/provider/core/{assistant_store → remote_store}/__init__.py +1 -1
  105. pygpt_net/provider/core/{assistant_store → remote_store}/base.py +10 -10
  106. pygpt_net/provider/core/{assistant_store → remote_store}/db_sqlite/__init__.py +1 -1
  107. pygpt_net/provider/core/{assistant_store → remote_store}/db_sqlite/patch.py +1 -1
  108. pygpt_net/provider/core/{assistant_store → remote_store}/db_sqlite/provider.py +16 -15
  109. pygpt_net/provider/core/{assistant_store → remote_store}/db_sqlite/storage.py +30 -23
  110. pygpt_net/provider/core/{assistant_store → remote_store}/db_sqlite/utils.py +5 -4
  111. pygpt_net/provider/core/{assistant_store → remote_store}/json_file.py +9 -9
  112. pygpt_net/provider/llms/google.py +2 -2
  113. pygpt_net/ui/base/config_dialog.py +3 -2
  114. pygpt_net/ui/dialog/assistant.py +3 -3
  115. pygpt_net/ui/dialog/plugins.py +3 -1
  116. pygpt_net/ui/dialog/remote_store_google.py +539 -0
  117. pygpt_net/ui/dialog/{assistant_store.py → remote_store_openai.py} +95 -95
  118. pygpt_net/ui/dialogs.py +5 -3
  119. pygpt_net/ui/layout/chat/attachments_uploaded.py +3 -3
  120. pygpt_net/ui/layout/toolbox/computer_env.py +26 -8
  121. pygpt_net/ui/menu/tools.py +13 -5
  122. pygpt_net/ui/widget/dialog/remote_store_google.py +56 -0
  123. pygpt_net/ui/widget/dialog/{assistant_store.py → remote_store_openai.py} +9 -9
  124. pygpt_net/ui/widget/element/button.py +4 -4
  125. pygpt_net/ui/widget/lists/remote_store_google.py +248 -0
  126. pygpt_net/ui/widget/lists/{assistant_store.py → remote_store_openai.py} +21 -21
  127. pygpt_net/ui/widget/option/checkbox_list.py +47 -9
  128. pygpt_net/ui/widget/option/combo.py +39 -3
  129. {pygpt_net-2.7.4.dist-info → pygpt_net-2.7.5.dist-info}/METADATA +33 -2
  130. {pygpt_net-2.7.4.dist-info → pygpt_net-2.7.5.dist-info}/RECORD +133 -108
  131. {pygpt_net-2.7.4.dist-info → pygpt_net-2.7.5.dist-info}/LICENSE +0 -0
  132. {pygpt_net-2.7.4.dist-info → pygpt_net-2.7.5.dist-info}/WHEEL +0 -0
  133. {pygpt_net-2.7.4.dist-info → pygpt_net-2.7.5.dist-info}/entry_points.txt +0 -0
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.08.24 23:00:00 #
9
+ # Updated Date: 2026.01.02 20:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import copy
@@ -115,9 +115,9 @@ class Editor:
115
115
  current_id = self.get_selected_store_id()
116
116
  if current_id is not None:
117
117
  current_idx = self.get_choice_idx_by_id(current_id)
118
- stores = self.window.core.assistants.store.get_all()
118
+ stores = self.window.core.remote_store.openai.get_all()
119
119
  for id in list(stores.keys()):
120
- if self.window.core.assistants.store.is_hidden(id):
120
+ if self.window.core.remote_store.openai.is_hidden(id):
121
121
  continue
122
122
  if stores[id].name is None or stores[id].name == "":
123
123
  items[id] = id
@@ -158,10 +158,10 @@ class Editor:
158
158
  :param store_id: store ID
159
159
  :return: combo idx
160
160
  """
161
- stores = self.window.core.assistants.store.get_all()
161
+ stores = self.window.core.remote_store.openai.get_all()
162
162
  i = 1
163
163
  for id in list(stores.keys()):
164
- if self.window.core.assistants.store.is_hidden(id):
164
+ if self.window.core.remote_store.openai.is_hidden(id):
165
165
  continue # ignore empty names
166
166
  if id == store_id:
167
167
  return i
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.12.27 00:00:00 #
9
+ # Updated Date: 2026.01.02 19:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import os
@@ -31,7 +31,7 @@ class Files:
31
31
  def update(self):
32
32
  """Update assistants files list"""
33
33
  self.update_list()
34
- self.window.controller.assistant.store.update_files_list()
34
+ self.window.controller.remote_store.openai.update_files_list()
35
35
 
36
36
  def select(self, idx: int):
37
37
  """
@@ -48,7 +48,7 @@ class Files:
48
48
 
49
49
  # get file by list index
50
50
  thread_id = self.window.core.config.get('assistant_thread')
51
- file_id = self.window.core.assistants.files.get_file_id_by_idx(idx, assistant.vector_store, thread_id)
51
+ file_id = self.window.core.remote_store.openai.files.get_file_id_by_idx(idx, assistant.vector_store, thread_id)
52
52
  self.window.core.assistants.current_file = file_id
53
53
 
54
54
  def count_upload(
@@ -96,7 +96,7 @@ class Files:
96
96
 
97
97
  ids = idx if isinstance(idx, list) else [idx]
98
98
  for idx in ids:
99
- file_id = self.window.core.assistants.files.get_file_id_by_idx(idx, assistant.vector_store, thread_id)
99
+ file_id = self.window.core.remote_store.openai.files.get_file_id_by_idx(idx, assistant.vector_store, thread_id)
100
100
  self.window.controller.attachment.download(file_id) # download file
101
101
 
102
102
  def rename(self, idx: int):
@@ -115,7 +115,7 @@ class Files:
115
115
  thread_id = self.window.core.config.get('assistant_thread')
116
116
 
117
117
  # get file by list index
118
- file = self.window.core.assistants.files.get_file_by_idx(idx, assistant.vector_store, thread_id)
118
+ file = self.window.core.remote_store.openai.files.get_file_by_idx(idx, assistant.vector_store, thread_id)
119
119
  if file is None:
120
120
  return
121
121
 
@@ -139,7 +139,7 @@ class Files:
139
139
  :param record_id: file record ID
140
140
  :param name: new name
141
141
  """
142
- self.window.core.assistants.files.rename(
142
+ self.window.core.remote_store.openai.files.rename(
143
143
  record_id,
144
144
  name,
145
145
  )
@@ -167,21 +167,21 @@ class Files:
167
167
  if self.window.core.assistants.has(id):
168
168
  assistant = self.window.core.assistants.get_by_id(id)
169
169
  thread_id = self.window.core.config.get('assistant_thread')
170
- items = self.window.core.assistants.files.get_by_store_or_thread(assistant.vector_store, thread_id)
170
+ items = self.window.core.remote_store.openai.files.get_by_store_or_thread(assistant.vector_store, thread_id)
171
171
  self.window.update_status(trans('status.sending'))
172
172
  QApplication.processEvents()
173
173
 
174
174
  for id in list(items.keys()):
175
175
  file = items[id]
176
176
  try:
177
- self.window.core.assistants.files.delete(file) # delete from DB, API and vector stores
177
+ self.window.core.remote_store.openai.files.delete(file) # delete from DB, API and vector stores
178
178
  except Exception as e:
179
179
  self.window.update_status(trans('status.error'))
180
180
  self.window.ui.dialogs.alert(e)
181
181
 
182
182
  # update store status
183
183
  if assistant.vector_store:
184
- self.window.controller.assistant.store.refresh_by_store_id(assistant.vector_store)
184
+ self.window.controller.remote_store.openai.refresh_by_store_id(assistant.vector_store)
185
185
 
186
186
  self.window.update_status(trans('status.deleted'))
187
187
 
@@ -215,7 +215,7 @@ class Files:
215
215
  # get files by list index
216
216
  ids = idx if isinstance(idx, list) else [idx]
217
217
  for idx in ids:
218
- file = self.window.core.assistants.files.get_file_by_idx(idx, assistant.vector_store, thread_id)
218
+ file = self.window.core.remote_store.openai.files.get_file_by_idx(idx, assistant.vector_store, thread_id)
219
219
  if file is None:
220
220
  continue
221
221
  files.append(file)
@@ -224,11 +224,11 @@ class Files:
224
224
  self.window.update_status(trans('status.sending'))
225
225
  QApplication.processEvents()
226
226
  try:
227
- self.window.core.assistants.files.delete(files) # delete from DB, API and vector stores
227
+ self.window.core.remote_store.openai.files.delete(files) # delete from DB, API and vector stores
228
228
 
229
229
  # update store status
230
230
  if assistant.vector_store:
231
- self.window.controller.assistant.store.refresh_by_store_id(assistant.vector_store)
231
+ self.window.controller.remote_store.openai.refresh_by_store_id(assistant.vector_store)
232
232
 
233
233
  self.window.update_status(trans('status.deleted'))
234
234
  except Exception as e:
@@ -312,7 +312,7 @@ class Files:
312
312
  attachment,
313
313
  )
314
314
 
315
- self.window.core.assistants.files.create(
315
+ self.window.core.remote_store.openai.files.create(
316
316
  assistant,
317
317
  thread_id,
318
318
  new_id,
@@ -341,7 +341,7 @@ class Files:
341
341
  if num > 0:
342
342
  # update store status
343
343
  if assistant.vector_store:
344
- self.window.controller.assistant.store.refresh_by_store_id(assistant.vector_store)
344
+ self.window.controller.remote_store.openai.refresh_by_store_id(assistant.vector_store)
345
345
 
346
346
  self.update_list() # update uploaded list UI
347
347
 
@@ -371,7 +371,7 @@ class Files:
371
371
  assistant = self.window.core.assistants.get_by_id(assistant_id)
372
372
  if assistant is None:
373
373
  return
374
- items = self.window.core.assistants.files.get_by_store_or_thread(assistant.vector_store, thread_id)
374
+ items = self.window.core.remote_store.openai.files.get_by_store_or_thread(assistant.vector_store, thread_id)
375
375
  self.window.ui.chat.input.attachments_uploaded.update(items)
376
376
  self.update_tab()
377
377
 
@@ -385,7 +385,7 @@ class Files:
385
385
  assistant = self.window.core.assistants.get_by_id(assistant_id)
386
386
  if assistant is None:
387
387
  return # no assistant
388
- num_files = self.window.core.assistants.files.count_by_store_or_thread(assistant.vector_store, thread_id)
388
+ num_files = self.window.core.remote_store.openai.files.count_by_store_or_thread(assistant.vector_store, thread_id)
389
389
  suffix = ''
390
390
  # append num of files
391
391
  if num_files > 0:
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.09.05 00:00:00 #
9
+ # Updated Date: 2026.01.03 02:10:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import base64
@@ -20,6 +20,10 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
20
20
  """
21
21
  Google python-genai streaming chunk.
22
22
 
23
+ Supports:
24
+ - Responses API streaming (generate_content)
25
+ - Interactions API streaming (including Deep Research agent and generic interactions)
26
+
23
27
  :param ctx: Chat context
24
28
  :param core: Core controller
25
29
  :param state: Chat state
@@ -70,6 +74,26 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
70
74
  return [_to_plain_dict(x) for x in obj]
71
75
  return obj
72
76
 
77
+ def _get(obj: Any, name: str, default: Any = None) -> Any:
78
+ """Safe getattr or dict get."""
79
+ try:
80
+ if hasattr(obj, name):
81
+ return getattr(obj, name)
82
+ except Exception:
83
+ pass
84
+ if isinstance(obj, dict):
85
+ return obj.get(name, default)
86
+ return default
87
+
88
+ def _ensure_list_attr(obj: Any, name: str):
89
+ """Ensure list attribute exists."""
90
+ if not hasattr(obj, name) or not isinstance(getattr(obj, name), list):
91
+ try:
92
+ setattr(obj, name, [])
93
+ except Exception:
94
+ pass
95
+
96
+ # Collect function calls from Responses API style stream
73
97
  if fc_list:
74
98
  for fc in fc_list:
75
99
  name = getattr(fc, "name", "") or ""
@@ -107,6 +131,273 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
107
131
  except Exception:
108
132
  pass
109
133
 
134
+ # Interactions API / Deep Research: collect streaming deltas and metadata
135
+ # Handles event_type, event_id, interaction.start/complete/status_update, and content.delta variants
136
+ try:
137
+ event_type = _get(chunk, "event_type", None)
138
+ if event_type:
139
+ # Track last event id for reconnection
140
+ event_id = _get(chunk, "event_id", None)
141
+ if event_id:
142
+ try:
143
+ state.google_last_event_id = event_id
144
+ except Exception:
145
+ pass
146
+ try:
147
+ if not hasattr(ctx, "extra") or ctx.extra is None:
148
+ ctx.extra = {}
149
+ ctx.extra["google_last_event_id"] = event_id
150
+ except Exception:
151
+ pass
152
+
153
+ # Interaction lifecycle events
154
+ if event_type == "interaction.start":
155
+ interaction = _get(chunk, "interaction", None)
156
+ interaction_id = _get(interaction, "id", None)
157
+ if interaction_id:
158
+ try:
159
+ state.google_interaction_id = interaction_id
160
+ except Exception:
161
+ pass
162
+ try:
163
+ if not hasattr(ctx, "extra") or ctx.extra is None:
164
+ ctx.extra = {}
165
+ ctx.extra["google_interaction_id"] = interaction_id
166
+ except Exception:
167
+ pass
168
+
169
+ elif event_type == "interaction.status_update":
170
+ status = _get(chunk, "status", None)
171
+ if status:
172
+ try:
173
+ state.google_interaction_status = status
174
+ except Exception:
175
+ pass
176
+ try:
177
+ if not hasattr(ctx, "extra") or ctx.extra is None:
178
+ ctx.extra = {}
179
+ ctx.extra["google_interaction_status"] = status
180
+ except Exception:
181
+ pass
182
+
183
+ elif event_type == "interaction.complete":
184
+ # Capture usage from the final interaction if available
185
+ interaction = _get(chunk, "interaction", None)
186
+ usage = _get(interaction, "usage", None)
187
+ if usage:
188
+ try:
189
+ capture_google_usage(state, usage)
190
+ except Exception:
191
+ pass
192
+
193
+ elif event_type == "error":
194
+ err = _get(chunk, "error", {}) or {}
195
+ try:
196
+ if not hasattr(ctx, "extra") or ctx.extra is None:
197
+ ctx.extra = {}
198
+ ctx.extra["google_interactions_error"] = _to_plain_dict(err)
199
+ except Exception:
200
+ pass
201
+
202
+ # Content deltas
203
+ if event_type == "content.delta":
204
+ delta = _get(chunk, "delta", {}) or {}
205
+ delta_type = (_get(delta, "type", "") or "").lower()
206
+
207
+ # Text delta
208
+ if delta_type == "text":
209
+ txt = _get(delta, "text", None)
210
+ if txt:
211
+ response_parts.append(txt)
212
+
213
+ # Thought summaries (Deep Research thinking summaries)
214
+ elif delta_type in ("thought", "thought_summary"):
215
+ content_obj = _get(delta, "content", None)
216
+ thought_txt = None
217
+ if content_obj is not None:
218
+ # TextContent path
219
+ thought_txt = _get(content_obj, "text", None)
220
+ if thought_txt is None:
221
+ # Some SDKs expose 'thought' or 'content.text' differently
222
+ thought_txt = _get(delta, "thought", None)
223
+ if thought_txt:
224
+ _ensure_list_attr(state, "google_thought_summaries")
225
+ try:
226
+ state.google_thought_summaries.append(thought_txt)
227
+ except Exception:
228
+ pass
229
+ try:
230
+ if not hasattr(ctx, "extra") or ctx.extra is None:
231
+ ctx.extra = {}
232
+ if "google_thought_summaries" not in ctx.extra or not isinstance(ctx.extra["google_thought_summaries"], list):
233
+ ctx.extra["google_thought_summaries"] = []
234
+ ctx.extra["google_thought_summaries"].append(thought_txt)
235
+ except Exception:
236
+ pass
237
+
238
+ # Function call delta (Interactions API tool/function calling)
239
+ elif delta_type == "function_call":
240
+ fname = _get(delta, "name", "") or ""
241
+ fargs_obj = _get(delta, "arguments", {}) or {}
242
+ call_id = _get(delta, "id", "") or ""
243
+ fargs_dict = _to_plain_dict(fargs_obj) or {}
244
+ new_calls.append({
245
+ "id": call_id,
246
+ "type": "function",
247
+ "function": {
248
+ "name": fname,
249
+ "arguments": json.dumps(fargs_dict, ensure_ascii=False),
250
+ }
251
+ })
252
+
253
+ # Function result delta (optional store)
254
+ elif delta_type == "function_result":
255
+ # Can be used to log tool results; not altering UI text
256
+ _ensure_list_attr(state, "google_function_results")
257
+ try:
258
+ state.google_function_results.append(_to_plain_dict(delta))
259
+ except Exception:
260
+ pass
261
+
262
+ # Code execution: code + result
263
+ elif delta_type == "code_execution_call":
264
+ lang = (_get(delta, "language", None) or "python").strip() or "python"
265
+ code_txt = _get(delta, "code", "") or ""
266
+ if not state.is_code:
267
+ response_parts.append(f"\n\n**Code interpreter**\n```{lang.lower()}\n{code_txt}")
268
+ state.is_code = True
269
+ else:
270
+ response_parts.append(str(code_txt))
271
+ elif delta_type == "code_execution_result":
272
+ # Close code block; keep output logging internal if needed
273
+ if state.is_code:
274
+ response_parts.append("\n\n```\n-----------\n")
275
+ state.is_code = False
276
+ _ensure_list_attr(state, "google_code_results")
277
+ try:
278
+ state.google_code_results.append(_to_plain_dict(delta))
279
+ except Exception:
280
+ pass
281
+
282
+ # Images in stream
283
+ elif delta_type == "image":
284
+ # ImageDelta may contain base64 data or uri
285
+ mime = (_get(delta, "mime_type", "") or "").lower()
286
+ data_b64 = _get(delta, "data", None)
287
+ uri = _get(delta, "uri", None)
288
+ if data_b64:
289
+ try:
290
+ img_bytes = base64.b64decode(data_b64)
291
+ save_path = core.image.gen_unique_path(ctx)
292
+ with open(save_path, "wb") as f:
293
+ f.write(img_bytes)
294
+ if not isinstance(ctx.images, list):
295
+ ctx.images = []
296
+ ctx.images.append(save_path)
297
+ state.image_paths.append(save_path)
298
+ state.has_google_inline_image = True
299
+ except Exception:
300
+ pass
301
+ elif uri:
302
+ try:
303
+ if not hasattr(ctx, "urls") or ctx.urls is None:
304
+ ctx.urls = []
305
+ ctx.urls.append(uri)
306
+ except Exception:
307
+ pass
308
+
309
+ # URL context call/result (Deep Research tool)
310
+ elif delta_type == "url_context_call":
311
+ urls = _get(delta, "urls", []) or []
312
+ _ensure_list_attr(state, "google_url_context_calls")
313
+ try:
314
+ state.google_url_context_calls.append({"urls": list(urls)})
315
+ except Exception:
316
+ pass
317
+ elif delta_type == "url_context_result":
318
+ url = _get(delta, "url", None)
319
+ status = _get(delta, "status", None)
320
+ _ensure_list_attr(state, "google_url_context_results")
321
+ try:
322
+ state.google_url_context_results.append({"url": url, "status": status, "raw": _to_plain_dict(delta)})
323
+ except Exception:
324
+ pass
325
+ if url:
326
+ try:
327
+ if not hasattr(ctx, "urls") or ctx.urls is None:
328
+ ctx.urls = []
329
+ ctx.urls.append(url)
330
+ except Exception:
331
+ pass
332
+
333
+ # Google Search call/result (Deep Research tool)
334
+ elif delta_type == "google_search_call":
335
+ queries = _get(delta, "queries", []) or []
336
+ _ensure_list_attr(state, "google_research_queries")
337
+ try:
338
+ state.google_research_queries.extend(list(queries))
339
+ except Exception:
340
+ pass
341
+ elif delta_type == "google_search_result":
342
+ url = _get(delta, "url", None)
343
+ title = _get(delta, "title", None)
344
+ rendered = _get(delta, "rendered_content", None)
345
+ _ensure_list_attr(state, "google_search_results")
346
+ try:
347
+ state.google_search_results.append({
348
+ "url": url,
349
+ "title": title,
350
+ "rendered_content": rendered,
351
+ "raw": _to_plain_dict(delta),
352
+ })
353
+ except Exception:
354
+ pass
355
+ if url:
356
+ try:
357
+ if not hasattr(ctx, "urls") or ctx.urls is None:
358
+ ctx.urls = []
359
+ ctx.urls.append(url)
360
+ except Exception:
361
+ pass
362
+
363
+ # File search results (optional)
364
+ elif delta_type == "file_search_result":
365
+ _ensure_list_attr(state, "google_file_search_results")
366
+ try:
367
+ state.google_file_search_results.append(_to_plain_dict(delta))
368
+ except Exception:
369
+ pass
370
+
371
+ # Thought signature delta (optional, store)
372
+ elif delta_type == "thought_signature":
373
+ _ensure_list_attr(state, "google_thought_signatures")
374
+ try:
375
+ state.google_thought_signatures.append(_to_plain_dict(delta))
376
+ except Exception:
377
+ pass
378
+
379
+ # Other modalities: audio/video/document (store URIs if available)
380
+ elif delta_type in ("audio", "video", "document"):
381
+ uri = _get(delta, "uri", None)
382
+ if uri:
383
+ try:
384
+ if not hasattr(ctx, "urls") or ctx.urls is None:
385
+ ctx.urls = []
386
+ ctx.urls.append(uri)
387
+ except Exception:
388
+ pass
389
+
390
+ except Exception:
391
+ pass
392
+
393
+ # Let Computer Use handler inspect chunk and tool calls (no-op if irrelevant)
394
+ new_calls, has_calls = core.api.google.computer.handle_stream_chunk(ctx, chunk, new_calls)
395
+ if has_calls:
396
+ ctx.extra["function_response_required"] = True # required for automatic with-screenshot response
397
+ ctx.extra["function_response_source"] = "ctx.tool_calls"
398
+ ctx.extra["function_response_reason"] = "computer_use"
399
+ state.force_func_call = True
400
+
110
401
  if new_calls:
111
402
  seen = {(tc["function"]["name"], tc["function"]["arguments"]) for tc in state.tool_calls}
112
403
  for tc in new_calls:
@@ -115,6 +406,7 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
115
406
  state.tool_calls.append(tc)
116
407
  seen.add(key)
117
408
 
409
+ # Responses API-specific: parts parsing (code, images, file_data, etc.)
118
410
  try:
119
411
  cands = getattr(chunk, "candidates", None) or []
120
412
  for cand in cands:
@@ -178,4 +470,18 @@ def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
178
470
  except Exception:
179
471
  pass
180
472
 
473
+ # Interactions API citations: try to collect from delta.annotations if present
474
+ try:
475
+ if _get(chunk, "event_type", None) == "content.delta":
476
+ delta = _get(chunk, "delta", {}) or {}
477
+ annotations = _get(delta, "annotations", None)
478
+ if annotations:
479
+ _ensure_list_attr(state, "google_annotations")
480
+ try:
481
+ state.google_annotations.extend(_to_plain_dict(annotations) or [])
482
+ except Exception:
483
+ pass
484
+ except Exception:
485
+ pass
486
+
181
487
  return "".join(response_parts) if response_parts else None
@@ -62,6 +62,7 @@ class ChunkType(str, Enum):
62
62
  LANGCHAIN_CHAT = "langchain_chat" # LangChain chat (deprecated)
63
63
  LLAMA_CHAT = "llama_chat" # LlamaIndex chat
64
64
  GOOGLE = "google" # Google SDK
65
+ GOOGLE_INTERACTIONS_API = "api_google_interactions" # Google SDK, deep research - interactions
65
66
  ANTHROPIC = "anthropic" # Anthropic SDK
66
67
  XAI_SDK = "xai_sdk" # xAI SDK
67
68
  RAW = "raw" # Raw string fallback
@@ -161,6 +162,12 @@ class StreamWorker(QRunnable):
161
162
  state.chunk_type = ChunkType.API_CHAT_RESPONSES
162
163
  else:
163
164
  continue
165
+ elif ctx.use_google_interactions_api:
166
+ if hasattr(chunk, 'event_type'):
167
+ etype = chunk.event_type # type: ignore[assignment]
168
+ state.chunk_type = ChunkType.GOOGLE_INTERACTIONS_API
169
+ else:
170
+ continue
164
171
  else:
165
172
  state.chunk_type = self._detect_chunk_type(chunk)
166
173
 
@@ -512,7 +519,7 @@ class StreamWorker(QRunnable):
512
519
  return self._process_langchain_chat(chunk)
513
520
  if t == ChunkType.LLAMA_CHAT:
514
521
  return self._process_llama_chat(state, chunk)
515
- if t == ChunkType.GOOGLE:
522
+ if t == ChunkType.GOOGLE or t == ChunkType.GOOGLE_INTERACTIONS_API:
516
523
  return self._process_google_chunk(ctx, core, state, chunk)
517
524
  if t == ChunkType.ANTHROPIC:
518
525
  return self._process_anthropic_chunk(ctx, core, state, chunk)
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.12.31 16:00:00 #
9
+ # Updated Date: 2026.01.01 15:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import os
@@ -186,7 +186,7 @@ class Image:
186
186
  f"![image]({ico_download})[**{trans('action.download')}**](bridge://download/{safe_path})")
187
187
  """
188
188
  urls.append(f"[**{trans('action.open')}**]({safe_path}) | "
189
- f"[**{trans('action.download')}**](bridge://download/{safe_path})")
189
+ f"[**{trans('action.save_as')}**](bridge://download/{safe_path})")
190
190
  i += 1
191
191
  string += "\n".join(urls)
192
192