pygpt-net 2.5.14__py3-none-any.whl → 2.5.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. pygpt_net/CHANGELOG.txt +12 -0
  2. pygpt_net/__init__.py +3 -3
  3. pygpt_net/controller/chat/input.py +9 -2
  4. pygpt_net/controller/chat/stream.py +65 -17
  5. pygpt_net/controller/lang/mapping.py +4 -2
  6. pygpt_net/controller/model/__init__.py +3 -1
  7. pygpt_net/controller/model/importer.py +337 -0
  8. pygpt_net/controller/settings/editor.py +3 -0
  9. pygpt_net/core/bridge/worker.py +4 -2
  10. pygpt_net/core/command/__init__.py +33 -2
  11. pygpt_net/core/models/__init__.py +6 -3
  12. pygpt_net/core/models/ollama.py +7 -2
  13. pygpt_net/data/config/config.json +9 -4
  14. pygpt_net/data/config/models.json +22 -22
  15. pygpt_net/data/locale/locale.de.ini +18 -0
  16. pygpt_net/data/locale/locale.en.ini +19 -2
  17. pygpt_net/data/locale/locale.es.ini +18 -0
  18. pygpt_net/data/locale/locale.fr.ini +18 -0
  19. pygpt_net/data/locale/locale.it.ini +18 -0
  20. pygpt_net/data/locale/locale.pl.ini +19 -1
  21. pygpt_net/data/locale/locale.uk.ini +18 -0
  22. pygpt_net/data/locale/locale.zh.ini +17 -0
  23. pygpt_net/item/ctx.py +2 -1
  24. pygpt_net/item/model.py +5 -1
  25. pygpt_net/plugin/cmd_files/__init__.py +2 -2
  26. pygpt_net/plugin/cmd_files/worker.py +2 -2
  27. pygpt_net/provider/core/model/json_file.py +3 -0
  28. pygpt_net/provider/core/model/patch.py +24 -1
  29. pygpt_net/provider/gpt/__init__.py +54 -21
  30. pygpt_net/provider/gpt/responses.py +279 -0
  31. pygpt_net/provider/gpt/vision.py +40 -16
  32. pygpt_net/provider/llms/ollama.py +7 -2
  33. pygpt_net/provider/llms/ollama_custom.py +693 -0
  34. pygpt_net/ui/dialog/models_importer.py +82 -0
  35. pygpt_net/ui/dialogs.py +3 -1
  36. pygpt_net/ui/menu/config.py +18 -7
  37. pygpt_net/ui/widget/dialog/model_importer.py +55 -0
  38. pygpt_net/ui/widget/lists/model_importer.py +151 -0
  39. {pygpt_net-2.5.14.dist-info → pygpt_net-2.5.16.dist-info}/METADATA +75 -9
  40. {pygpt_net-2.5.14.dist-info → pygpt_net-2.5.16.dist-info}/RECORD +43 -37
  41. {pygpt_net-2.5.14.dist-info → pygpt_net-2.5.16.dist-info}/LICENSE +0 -0
  42. {pygpt_net-2.5.14.dist-info → pygpt_net-2.5.16.dist-info}/WHEEL +0 -0
  43. {pygpt_net-2.5.14.dist-info → pygpt_net-2.5.16.dist-info}/entry_points.txt +0 -0
pygpt_net/CHANGELOG.txt CHANGED
@@ -1,3 +1,15 @@
1
+ 2.5.16 (2025-06-25)
2
+
3
+ - OpenAI API upgraded to 1.91.0.
4
+ - Chat mode migrated to Responses API with native built-in web search tool. (beta)
5
+ - Fixed file_read tool in I/O plugin.
6
+
7
+ 2.5.15 (2025-06-24)
8
+
9
+ - Added Ollama models importer in "Settings -> Models -> Import from Ollama".
10
+ - Fixed Ollama provider in the newest LlamaIndex.
11
+ - Added the ability to set a custom base URL for Ollama -> ENV: OLLAMA_API_BASE.
12
+
1
13
  2.5.14 (2025-06-23)
2
14
 
3
15
  - Fix: crash if empty shortcuts in config.
pygpt_net/__init__.py CHANGED
@@ -6,15 +6,15 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025-06-23 19:00:00 #
9
+ # Updated Date: 2025-06-25 02:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  __author__ = "Marcin Szczygliński"
13
13
  __copyright__ = "Copyright 2025, Marcin Szczygliński"
14
14
  __credits__ = ["Marcin Szczygliński"]
15
15
  __license__ = "MIT"
16
- __version__ = "2.5.14"
17
- __build__ = "2025-06-23"
16
+ __version__ = "2.5.16"
17
+ __build__ = "2025-06-25"
18
18
  __maintainer__ = "Marcin Szczygliński"
19
19
  __github__ = "https://github.com/szczyglis-dev/py-gpt"
20
20
  __report__ = "https://github.com/szczyglis-dev/py-gpt/issues"
@@ -6,9 +6,9 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.02.02 02:00:00 #
9
+ # Updated Date: 2025.06.24 16:00:00 #
10
10
  # ================================================== #
11
-
11
+ import os
12
12
  from typing import Optional, Any, Dict
13
13
 
14
14
  from pygpt_net.core.bridge import BridgeContext
@@ -80,6 +80,13 @@ class Input:
80
80
  model_data = self.window.core.models.get(model)
81
81
  if model_data is not None and model_data.is_ollama():
82
82
  model_id = model_data.get_ollama_model()
83
+ # load ENV vars first
84
+ if ('env' in model_data.llama_index
85
+ and model_data.llama_index['env'] is not None):
86
+ for item in model_data.llama_index['env']:
87
+ key = item.get('name', '').strip()
88
+ value = item.get('value', '').strip()
89
+ os.environ[key] = value
83
90
  status = self.window.core.models.ollama.check_model(model_id)
84
91
  is_installed = status.get('is_installed', False)
85
92
  is_model = status.get('is_model', False)
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.03.02 19:00:00 #
9
+ # Updated Date: 2025.06.25 02:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from typing import Any
@@ -36,6 +36,9 @@ class Stream:
36
36
  output_tokens = 0
37
37
  begin = True
38
38
  error = None
39
+ tool_calls = []
40
+ fn_args_buffers = {}
41
+ citations = []
39
42
 
40
43
  # chunks: stream begin
41
44
  data = {
@@ -60,24 +63,32 @@ class Stream:
60
63
  if error is not None:
61
64
  break # break if error
62
65
 
66
+ etype = None
63
67
  response = None
64
68
  chunk_type = "raw"
65
- if (hasattr(chunk, 'choices')
66
- and chunk.choices[0] is not None
67
- and hasattr(chunk.choices[0], 'delta')
68
- and chunk.choices[0].delta is not None):
69
- chunk_type = "api_chat"
70
- elif (hasattr(chunk, 'choices')
71
- and chunk.choices[0] is not None
72
- and hasattr(chunk.choices[0], 'text')
73
- and chunk.choices[0].text is not None):
74
- chunk_type = "api_completion"
75
- elif (hasattr(chunk, 'content')
76
- and chunk.content is not None):
77
- chunk_type = "langchain_chat"
78
- elif (hasattr(chunk, 'delta')
79
- and chunk.delta is not None):
80
- chunk_type = "llama_chat"
69
+ if ctx.use_responses_api:
70
+ if hasattr(chunk, 'type'): # streaming event type
71
+ etype = chunk.type
72
+ chunk_type = "api_chat_responses" # responses API
73
+ else:
74
+ continue
75
+ else:
76
+ if (hasattr(chunk, 'choices')
77
+ and chunk.choices[0] is not None
78
+ and hasattr(chunk.choices[0], 'delta')
79
+ and chunk.choices[0].delta is not None):
80
+ chunk_type = "api_chat" # chat completions API
81
+ elif (hasattr(chunk, 'choices')
82
+ and chunk.choices[0] is not None
83
+ and hasattr(chunk.choices[0], 'text')
84
+ and chunk.choices[0].text is not None):
85
+ chunk_type = "api_completion"
86
+ elif (hasattr(chunk, 'content')
87
+ and chunk.content is not None):
88
+ chunk_type = "langchain_chat"
89
+ elif (hasattr(chunk, 'delta')
90
+ and chunk.delta is not None):
91
+ chunk_type = "llama_chat"
81
92
 
82
93
  # OpenAI chat completion
83
94
  if chunk_type == "api_chat":
@@ -110,6 +121,43 @@ class Stream:
110
121
  if tool_chunk.function.arguments:
111
122
  tool_call["function"]["arguments"] += tool_chunk.function.arguments
112
123
 
124
+ elif chunk_type == "api_chat_responses":
125
+
126
+ if etype == "response.output_text.delta":
127
+ response = chunk.delta
128
+
129
+ # ---------- function_call ----------
130
+ elif etype == "response.output_item.added" and chunk.item.type == "function_call":
131
+ tool_calls.append({
132
+ "id": chunk.item.id,
133
+ "type": "function",
134
+ "function": {"name": chunk.item.name, "arguments": ""}
135
+ })
136
+ fn_args_buffers[chunk.item.id] = ""
137
+
138
+ elif etype == "response.function_call_arguments.delta":
139
+ fn_args_buffers[chunk.item_id] += chunk.delta
140
+
141
+ elif etype == "response.function_call_arguments.done":
142
+ for tc in tool_calls:
143
+ if tc["id"] == chunk.item_id:
144
+ tc["function"]["arguments"] = fn_args_buffers[chunk.item_id]
145
+ break
146
+ fn_args_buffers.pop(chunk.item_id, None)
147
+
148
+ # ---------- annotations ----------
149
+ elif etype == "response.output_text.annotation.added":
150
+ if chunk.annotation['type'] == "url_citation":
151
+ if citations is None:
152
+ citations = []
153
+ url_citation = chunk.annotation['url']
154
+ citations.append(url_citation)
155
+ ctx.urls = citations
156
+
157
+ # ---------- end / error ----------
158
+ elif etype in {"response.done", "response.failed", "error"}:
159
+ pass
160
+
113
161
  # OpenAI completion
114
162
  elif chunk_type == "api_completion":
115
163
  if chunk.choices[0].text is not None:
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.01.18 16:00:00 #
9
+ # Updated Date: 2025.06.24 02:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from typing import Dict
@@ -351,6 +351,7 @@ class Mapping:
351
351
  menu_title['config.edit.css'] = 'menu.config.edit.css'
352
352
  menu_title['config.edit.json'] = 'menu.config.edit.json'
353
353
  menu_title['config.profile'] = 'menu.config.profile'
354
+ menu_title['config.models'] = 'menu.config.models'
354
355
  menu_title['menu.lang'] = 'menu.lang'
355
356
  menu_title['menu.debug'] = 'menu.debug'
356
357
  menu_title['menu.theme'] = 'menu.theme'
@@ -376,7 +377,8 @@ class Mapping:
376
377
  menu_text['app.clear_history_groups'] = 'menu.file_clear_history_groups'
377
378
  menu_text['app.exit'] = 'menu.file.exit'
378
379
  menu_text['config.settings'] = 'menu.config.settings'
379
- menu_text['config.models'] = 'menu.config.models'
380
+ menu_text['config.models.edit'] = 'menu.config.models.edit'
381
+ menu_text['config.models.import.ollama'] = 'menu.config.models.import.ollama'
380
382
  menu_text['config.access'] = 'menu.config.access'
381
383
  menu_text['config.open_dir'] = 'menu.config.open_dir'
382
384
  menu_text['config.change_dir'] = 'menu.config.change_dir'
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.02.02 02:00:00 #
9
+ # Updated Date: 2025.06.24 02:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from typing import Optional
@@ -15,6 +15,7 @@ from pygpt_net.core.events import Event, AppEvent
15
15
  from pygpt_net.item.model import ModelItem
16
16
 
17
17
  from .editor import Editor
18
+ from .importer import Importer
18
19
 
19
20
 
20
21
  class Model:
@@ -26,6 +27,7 @@ class Model:
26
27
  """
27
28
  self.window = window
28
29
  self.editor = Editor(window)
30
+ self.importer = Importer(window)
29
31
 
30
32
  def select(self, model: str):
31
33
  """
@@ -0,0 +1,337 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ # ================================================== #
4
+ # This file is a part of PYGPT package #
5
+ # Website: https://pygpt.net #
6
+ # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
+ # MIT License #
8
+ # Created By : Marcin Szczygliński #
9
+ # Updated Date: 2025.06.24 02:00:00 #
10
+ # ================================================== #
11
+
12
+ import copy
13
+ import os
14
+ from typing import List, Dict, Optional
15
+
16
+ from pygpt_net.utils import trans
17
+
18
+
19
+ class Importer:
20
+ def __init__(self, window=None):
21
+ """
22
+ Models importer controller
23
+
24
+ :param window: Window instance
25
+ """
26
+ self.window = window
27
+ self.dialog = False
28
+ self.initialized = False
29
+ self.width = 800
30
+ self.height = 500
31
+ self.items_available = {} # available models for import
32
+ self.items_current = {} # current models in use
33
+ self.pending = {} # waiting to be imported models
34
+ self.removed = {} # waiting to be removed models
35
+ self.selected_available = None # selected available model
36
+ self.selected_current = None # selected current model
37
+ self.all = False # show all models, not only available for import
38
+
39
+ def in_current(self, model: str) -> bool:
40
+ """
41
+ Check if model is in current list
42
+
43
+ :param model: model ID
44
+ :return: True if model is in current list, False otherwise
45
+ """
46
+ if model in self.items_current:
47
+ return True
48
+ for key in list(self.items_current.keys()):
49
+ m = self.items_current[key]
50
+ if m.id == model: # also check in IDs
51
+ return True
52
+ return False
53
+
54
+ def change_available(self):
55
+ """On change available model selection"""
56
+ val = self.window.ui.nodes["models.importer.available"].selectionModel().currentIndex()
57
+ idx = val.row()
58
+ if idx < 0:
59
+ self.selected_available = None
60
+ self.window.ui.nodes["models.importer.add"].setEnabled(False)
61
+ else:
62
+ self.selected_available = self.get_by_idx(idx, self.items_available)
63
+ if self.items_available.get(self.selected_available) is None:
64
+ self.selected_available = None
65
+ self.window.ui.nodes["models.importer.add"].setEnabled(False)
66
+ else:
67
+ # if not in current then enable add button
68
+ if not self.in_current(self.selected_available):
69
+ self.window.ui.nodes["models.importer.add"].setEnabled(True)
70
+ else:
71
+ self.window.ui.nodes["models.importer.add"].setEnabled(False)
72
+
73
+ def change_current(self):
74
+ """On change current model selection"""
75
+ val = self.window.ui.nodes["models.importer.current"].selectionModel().currentIndex()
76
+ idx = val.row()
77
+ if idx < 0:
78
+ self.selected_current = None
79
+ self.window.ui.nodes["models.importer.remove"].setEnabled(False)
80
+ else:
81
+ self.selected_current = self.get_by_idx(idx, self.items_current)
82
+ if self.items_current.get(self.selected_current) is None:
83
+ self.selected_current = None
84
+ self.window.ui.nodes["models.importer.remove"].setEnabled(False)
85
+ else:
86
+ if self.selected_current in self.items_current and self.items_current[self.selected_current].imported:
87
+ self.window.ui.nodes["models.importer.remove"].setEnabled(True)
88
+ else:
89
+ self.window.ui.nodes["models.importer.remove"].setEnabled(False)
90
+
91
+ def add(self):
92
+ """Add model to current list"""
93
+ if self.selected_available is None:
94
+ self.set_status(trans('models.importer.error.add.no_model'))
95
+ return
96
+ if self.in_current(self.selected_available):
97
+ self.set_status(trans('models.importer.error.add.not_exists'))
98
+ return
99
+ model = self.items_available[self.selected_available]
100
+ self.items_current[self.selected_available] = model
101
+ if self.selected_available not in self.pending:
102
+ self.pending[self.selected_available] = model
103
+ if self.selected_available in self.removed:
104
+ del self.removed[self.selected_available]
105
+ if not self.all:
106
+ del self.items_available[self.selected_available]
107
+ self.refresh()
108
+
109
+ def remove(self):
110
+ """Remove model from current list"""
111
+ if self.selected_current is None:
112
+ self.set_status(trans('models.importer.error.remove.no_model'))
113
+ return
114
+ if not self.in_current(self.selected_current):
115
+ self.set_status(trans('models.importer.error.remove.not_exists'))
116
+ return
117
+ model = self.items_current[self.selected_current]
118
+ self.items_available[self.selected_current] = model
119
+ if self.selected_current not in self.removed:
120
+ self.removed[self.selected_current] = model
121
+ del self.items_current[self.selected_current]
122
+ if self.selected_current in self.pending:
123
+ del self.pending[self.selected_current]
124
+ self.refresh()
125
+
126
+ def setup(self):
127
+ """Set up importer"""
128
+ idx = None
129
+ self.window.model_importer.setup(idx) # widget dialog setup
130
+
131
+ def toggle_editor(self):
132
+ """Toggle models importer dialog"""
133
+ if self.dialog:
134
+ self.close()
135
+ else:
136
+ self.open()
137
+
138
+ def open(self, force: bool = False):
139
+ """
140
+ Open models editor dialog
141
+
142
+ :param force: force open dialog
143
+ """
144
+ if not self.initialized:
145
+ self.setup()
146
+ self.initialized = True
147
+ if not self.dialog or force:
148
+ self.pending = {}
149
+ self.removed = {}
150
+ self.init()
151
+ self.window.ui.dialogs.open(
152
+ "models.importer",
153
+ width=self.width,
154
+ height=self.height,
155
+ )
156
+ self.dialog = True
157
+
158
+ def close(self):
159
+ """Close models importer dialog"""
160
+ if self.dialog:
161
+ self.window.ui.dialogs.close('models.importer')
162
+ self.dialog = False
163
+
164
+ def cancel(self):
165
+ """Cancel models importer dialog"""
166
+ self.close()
167
+
168
+ def init(self):
169
+ """Initialize importer"""
170
+ if self.initialized and self.window.ui.nodes["models.importer.available.all"].isChecked():
171
+ self.all = True
172
+
173
+ base_url = "http://localhost:11434"
174
+ if 'OLLAMA_API_BASE' in os.environ:
175
+ base_url = os.environ['OLLAMA_API_BASE']
176
+ self.window.ui.nodes["models.importer.url"].setText(base_url)
177
+ self.items_available = self.get_ollama_available()
178
+ self.items_current = self.get_ollama_current()
179
+ self.refresh()
180
+
181
+ def toggle_all(self, all: bool):
182
+ """
183
+ Toggle all models visibility
184
+
185
+ :param all: show all models, not only available for import
186
+ """
187
+ self.all = all
188
+ self.refresh(reload=True)
189
+
190
+ def set_status(self, status: str):
191
+ """
192
+ Set status message
193
+
194
+ :param status: status message
195
+ """
196
+ if self.initialized:
197
+ self.window.ui.nodes["models.importer.status"].setText(status)
198
+
199
+ def get_ollama_current(self) -> Dict:
200
+ """
201
+ Get current ollama models
202
+
203
+ :return: ollama models dictionary
204
+ """
205
+ items = copy.deepcopy(self.window.core.models.items)
206
+ for key in list(items.keys()):
207
+ if (items[key].llama_index is None
208
+ or 'provider' not in items[key].llama_index
209
+ or items[key].llama_index['provider'] != 'ollama'):
210
+ del items[key]
211
+ return items
212
+
213
+ def get_ollama_available(self) -> Dict:
214
+ """
215
+ Get available ollama models
216
+
217
+ :return: ollama models dictionary
218
+ """
219
+ models = {}
220
+ status = self.window.core.models.ollama.get_status()
221
+ if not status['status']:
222
+ self.set_status(trans('models.importer.error.no_connection'))
223
+ return models
224
+ else:
225
+ ollama_models = status.get('models', [])
226
+ if not ollama_models:
227
+ self.set_status(trans('models.importer.error.no_models'))
228
+ return models
229
+ else:
230
+ for model in ollama_models:
231
+ name = model.get('name').replace(":latest", "")
232
+ m = self.window.core.models.create_empty(append=False)
233
+ m.id = name
234
+ m.name = name
235
+ m.mode = [
236
+ "llama_index",
237
+ "agent",
238
+ "agent_llama",
239
+ "expert",
240
+ ]
241
+ m.llama_index['provider'] = 'ollama'
242
+ m.llama_index['mode'] = ['chat']
243
+ m.llama_index['args'] = [
244
+ {
245
+ 'name': 'model',
246
+ 'value': name,
247
+ 'type': 'str'
248
+ }
249
+ ]
250
+ m.langchain['provider'] = 'ollama'
251
+ m.langchain['mode'] = ['chat']
252
+ m.langchain['args'] = [
253
+ {
254
+ 'name': 'model',
255
+ 'value': name,
256
+ 'type': 'str'
257
+ }
258
+ ]
259
+ m.imported = True
260
+ m.ctx = 32000 # default
261
+ key = m.id
262
+ #if key in self.items_current:
263
+ #key += "_imported"
264
+ models[key] = m
265
+ self.set_status(trans('models.importer.loaded'))
266
+ return models
267
+
268
+ def from_pending(self):
269
+ """Move pending models to base list"""
270
+ added = False
271
+ base_models = self.window.core.models.items
272
+ for key in list(self.pending.keys()):
273
+ if key not in base_models:
274
+ base_models[key] = copy.deepcopy(self.pending[key])
275
+ base_models[key].imported = True
276
+ added = True
277
+ for key in list(self.removed.keys()):
278
+ if key in base_models:
279
+ del base_models[key]
280
+ added = True
281
+ self.pending = {}
282
+ self.removed = {}
283
+ if added:
284
+ self.window.core.models.save()
285
+ self.set_status(trans('models.importer.status.imported'))
286
+
287
+ def save(self, persist: bool = True):
288
+ """
289
+ Save models
290
+
291
+ :param persist: persist to file and close dialog
292
+ """
293
+ self.from_pending()
294
+ self.window.controller.model.init_list()
295
+ self.window.controller.model.update()
296
+ self.close()
297
+
298
+ def refresh(self, reload: bool = False):
299
+ """
300
+ Reload items
301
+
302
+ :param reload: reload available models
303
+ """
304
+ if reload:
305
+ self.items_available = self.get_ollama_available()
306
+
307
+ # remove from available if already in current
308
+ if not self.all:
309
+ for key in list(self.items_available.keys()):
310
+ if self.in_current(key):
311
+ del self.items_available[key]
312
+
313
+ self.window.ui.nodes['models.importer.editor'].update_available(self.items_available)
314
+ self.window.ui.nodes['models.importer.editor'].update_current(self.items_current)
315
+
316
+ def get_by_idx(self, idx: int, items: Dict) -> Optional[str]:
317
+ """
318
+ Get model key by list index
319
+
320
+ :param idx: list index
321
+ :param items: items dictionary
322
+ :return: model key
323
+ """
324
+ model_idx = 0
325
+ for id in self.get_ids(items):
326
+ if model_idx == idx:
327
+ return id
328
+ model_idx += 1
329
+ return None
330
+
331
+ def get_ids(self, items: Dict) -> List[str]:
332
+ """
333
+ Return models ids
334
+
335
+ :return: model ids list
336
+ """
337
+ return list(items.keys())
@@ -203,6 +203,9 @@ class Editor:
203
203
  if self.config_changed('access.shortcuts'):
204
204
  self.window.setup_global_shortcuts()
205
205
 
206
+ # update ENV
207
+ self.window.core.config.setup_env()
208
+
206
209
  self.before_config = copy.deepcopy(self.window.core.config.all())
207
210
  self.window.controller.settings.close_window(id)
208
211
 
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2024.11.29 23:00:00 #
9
+ # Updated Date: 2025.06.25 02:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from PySide6.QtCore import QObject, Signal, QRunnable, Slot
@@ -15,6 +15,7 @@ from pygpt_net.core.types import (
15
15
  MODE_AGENT_LLAMA,
16
16
  MODE_LANGCHAIN,
17
17
  MODE_LLAMA_INDEX,
18
+ MODE_ASSISTANT,
18
19
  )
19
20
  from pygpt_net.core.events import KernelEvent, Event
20
21
 
@@ -48,7 +49,8 @@ class BridgeWorker(QObject, QRunnable):
48
49
  self.handle_post_prompt_async()
49
50
 
50
51
  # ADDITIONAL CONTEXT: append additional context from attachments
51
- self.handle_additional_context()
52
+ if self.mode != MODE_ASSISTANT:
53
+ self.handle_additional_context()
52
54
 
53
55
  # POST PROMPT END: handle post prompt end event
54
56
  self.handle_post_prompt_end()
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.02.02 02:00:00 #
9
+ # Updated Date: 2025.06.25 02:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import copy
@@ -228,6 +228,34 @@ class Command:
228
228
  print("Error parsing tool call: " + str(e))
229
229
  return parsed
230
230
 
231
+ def unpack_tool_calls_responses(
232
+ self,
233
+ tool_calls: List
234
+ ) -> List[Dict[str, Any]]:
235
+ """
236
+ Unpack tool calls from OpenAI response
237
+
238
+ :param tool_calls: tool calls list
239
+ :return: parsed tool calls list
240
+ """
241
+ parsed = []
242
+ for tool_call in tool_calls:
243
+ try:
244
+ parsed.append(
245
+ {
246
+ "id": tool_call.id,
247
+ "type": "function",
248
+ "function": {
249
+ "name": tool_call.name,
250
+ "arguments": json.loads(tool_call.arguments)
251
+ }
252
+ }
253
+ )
254
+ except Exception as e:
255
+ self.window.core.debug.log(e)
256
+ print("Error parsing tool call: " + str(e))
257
+ return parsed
258
+
231
259
  def unpack_tool_calls_chunks(
232
260
  self,
233
261
  ctx: CtxItem,
@@ -503,6 +531,9 @@ class Command:
503
531
  if "required" in param and param["required"]:
504
532
  required.append(param["name"])
505
533
 
534
+ if len(required) > 0:
535
+ params["required"] = required
536
+
506
537
  # extract params and convert to JSON schema format
507
538
  for param in cmd["params"]:
508
539
  try:
@@ -570,7 +601,7 @@ class Command:
570
601
  elif params["properties"][key]["type"] == "list":
571
602
  params["properties"][key]["type"] = "array"
572
603
  params["properties"][key]["items"] = {
573
- "$ref": "#"
604
+ "type": "string"
574
605
  }
575
606
  except Exception as e:
576
607
  print(e)