pygpt-net 2.6.10__py3-none-any.whl → 2.6.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. pygpt_net/CHANGELOG.txt +10 -0
  2. pygpt_net/__init__.py +3 -3
  3. pygpt_net/app.py +7 -1
  4. pygpt_net/config.py +11 -11
  5. pygpt_net/controller/access/access.py +49 -2
  6. pygpt_net/controller/chat/attachment.py +13 -13
  7. pygpt_net/controller/chat/command.py +4 -4
  8. pygpt_net/controller/chat/common.py +9 -14
  9. pygpt_net/controller/chat/files.py +2 -2
  10. pygpt_net/controller/chat/input.py +4 -4
  11. pygpt_net/controller/chat/output.py +4 -4
  12. pygpt_net/controller/chat/render.py +11 -6
  13. pygpt_net/controller/chat/response.py +7 -7
  14. pygpt_net/controller/chat/stream.py +11 -6
  15. pygpt_net/controller/chat/text.py +15 -10
  16. pygpt_net/controller/command/command.py +7 -7
  17. pygpt_net/controller/ctx/ctx.py +9 -5
  18. pygpt_net/controller/debug/debug.py +2 -2
  19. pygpt_net/core/ctx/bag.py +2 -1
  20. pygpt_net/core/debug/debug.py +17 -3
  21. pygpt_net/core/dispatcher/dispatcher.py +5 -3
  22. pygpt_net/core/events/render.py +3 -0
  23. pygpt_net/core/render/base.py +4 -4
  24. pygpt_net/core/render/web/body.py +83 -88
  25. pygpt_net/core/render/web/parser.py +11 -6
  26. pygpt_net/core/render/web/pid.py +19 -4
  27. pygpt_net/core/render/web/renderer.py +217 -74
  28. pygpt_net/data/config/config.json +3 -3
  29. pygpt_net/data/config/models.json +3 -3
  30. pygpt_net/data/config/presets/agent_openai.json +1 -1
  31. pygpt_net/data/config/presets/agent_openai_assistant.json +1 -1
  32. pygpt_net/data/config/presets/agent_planner.json +1 -1
  33. pygpt_net/data/config/presets/agent_react.json +1 -1
  34. pygpt_net/item/ctx.py +3 -3
  35. pygpt_net/launcher.py +2 -9
  36. pygpt_net/provider/gpt/__init__.py +13 -4
  37. pygpt_net/tools/code_interpreter/body.py +2 -3
  38. pygpt_net/ui/main.py +8 -3
  39. pygpt_net/ui/widget/textarea/html.py +2 -7
  40. pygpt_net/ui/widget/textarea/web.py +52 -28
  41. pygpt_net/utils.py +15 -8
  42. {pygpt_net-2.6.10.dist-info → pygpt_net-2.6.12.dist-info}/METADATA +12 -2
  43. {pygpt_net-2.6.10.dist-info → pygpt_net-2.6.12.dist-info}/RECORD +46 -46
  44. {pygpt_net-2.6.10.dist-info → pygpt_net-2.6.12.dist-info}/LICENSE +0 -0
  45. {pygpt_net-2.6.10.dist-info → pygpt_net-2.6.12.dist-info}/WHEEL +0 -0
  46. {pygpt_net-2.6.10.dist-info → pygpt_net-2.6.12.dist-info}/entry_points.txt +0 -0
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.08.06 01:00:00 #
9
+ # Updated Date: 2025.08.18 01:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from typing import Optional
@@ -73,6 +73,7 @@ class Text:
73
73
  ai_name = event.data['value']
74
74
 
75
75
  # prepare mode, model, etc.
76
+ agent_provider = None # agent provider (llama or openai)
76
77
  mode = self.window.core.config.get('mode')
77
78
  model = self.window.core.config.get('model')
78
79
  model_data = self.window.core.models.get(model)
@@ -93,11 +94,11 @@ class Text:
93
94
  agent_provider = self.window.core.config.get('agent.openai.provider')
94
95
 
95
96
  # o1 models: disable stream mode
96
- if mode in [MODE_AGENT_LLAMA, MODE_AUDIO]:
97
+ if mode in (MODE_AGENT_LLAMA, MODE_AUDIO):
97
98
  stream_mode = False
98
- if mode in [MODE_LLAMA_INDEX] and idx_mode == "retrieval":
99
+ if mode == MODE_LLAMA_INDEX and idx_mode == "retrieval":
99
100
  stream_mode = False
100
- if mode in [MODE_LLAMA_INDEX]:
101
+ if mode == MODE_LLAMA_INDEX:
101
102
  if not self.window.core.idx.chat.is_stream_allowed():
102
103
  stream_mode = False
103
104
 
@@ -156,7 +157,11 @@ class Text:
156
157
  self.window.dispatch(event)
157
158
 
158
159
  # agent or expert mode
159
- sys_prompt = self.window.controller.agent.experts.append_prompts(mode, sys_prompt, parent_id)
160
+ sys_prompt = self.window.controller.agent.experts.append_prompts(
161
+ mode,
162
+ sys_prompt,
163
+ parent_id
164
+ )
160
165
 
161
166
  # on pre prompt event
162
167
  event = Event(Event.PRE_PROMPT, {
@@ -221,7 +226,7 @@ class Text:
221
226
  files = self.window.core.attachments.get_all(mode)
222
227
  num_files = len(files)
223
228
  if num_files > 0:
224
- self.window.controller.chat.log("Attachments ({}): {}".format(mode, num_files))
229
+ self.window.controller.chat.log(f"Attachments ({mode}): {num_files}")
225
230
 
226
231
  # assistant
227
232
  assistant_id = self.window.core.config.get('assistant')
@@ -257,7 +262,7 @@ class Text:
257
262
  'reply': reply,
258
263
  'internal': internal,
259
264
  }
260
- if mode in [MODE_AGENT_LLAMA, MODE_AGENT_OPENAI]:
265
+ if mode in (MODE_AGENT_LLAMA, MODE_AGENT_OPENAI):
261
266
  extra['agent_idx'] = agent_idx
262
267
  extra['agent_provider'] = agent_provider
263
268
 
@@ -269,8 +274,8 @@ class Text:
269
274
  self.window.dispatch(event)
270
275
 
271
276
  except Exception as e:
272
- self.window.controller.chat.log("Bridge call ERROR: {}".format(e)) # log
277
+ self.window.controller.chat.log(f"Bridge call ERROR: {e}") # log
273
278
  self.window.controller.chat.handle_error(e)
274
- print("Error when calling bridge: " + str(e))
279
+ print(f"Error when calling bridge: {e}")
275
280
 
276
- return ctx
281
+ return ctx
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.07.26 18:00:00 #
9
+ # Updated Date: 2025.08.18 01:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import json
@@ -45,9 +45,9 @@ class Command:
45
45
  :param all: True to dispatch to all plugins
46
46
  :param execute_only: True to dispatch only to plugins with execute event
47
47
  """
48
- self.window.core.debug.info("Dispatch CMD event begin: " + event.name)
48
+ self.window.core.debug.info(f"Dispatch CMD event begin: {event.name}")
49
49
  if self.window.core.debug.enabled():
50
- self.window.core.debug.debug("EVENT BEFORE: " + str(event))
50
+ self.window.core.debug.debug(f"EVENT BEFORE: {str(event)}")
51
51
 
52
52
  # begin reply stack
53
53
  if event.name in self.flush_events:
@@ -69,7 +69,7 @@ class Command:
69
69
  self.stop = False # unlock needed here
70
70
  break
71
71
  if self.window.core.debug.enabled():
72
- self.window.core.debug.debug("Apply [{}] to plugin: ".format(event.name) + id)
72
+ self.window.core.debug.debug(f"Apply [{event.name}] to plugin: {id}")
73
73
 
74
74
  self.window.stateChanged.emit(self.window.STATE_BUSY)
75
75
  self.window.core.dispatcher.apply(id, event)
@@ -84,7 +84,7 @@ class Command:
84
84
 
85
85
  :param event: event object
86
86
  """
87
- self.window.core.debug.info("Dispatch CMD event begin: " + event.name)
87
+ self.window.core.debug.info(f"Dispatch CMD event begin: {event.name}")
88
88
  if event.name in self.flush_events:
89
89
  self.window.controller.kernel.replies.clear()
90
90
  for id in self.window.core.plugins.get_ids():
@@ -156,7 +156,7 @@ class Command:
156
156
  if ctx is not None:
157
157
  self.window.core.debug.info("Reply...")
158
158
  if self.window.core.debug.enabled():
159
- self.window.core.debug.debug("CTX REPLY: " + str(ctx))
159
+ self.window.core.debug.debug(f"CTX REPLY: {str(ctx)}")
160
160
 
161
161
  self.window.update_status("") # Clear status
162
162
  if ctx.reply:
@@ -176,4 +176,4 @@ class Command:
176
176
  'context': context,
177
177
  'extra': extra,
178
178
  })
179
- self.window.dispatch(event)
179
+ self.window.dispatch(event)
@@ -22,6 +22,7 @@ from .summarizer import Summarizer
22
22
  from .extra import Extra
23
23
 
24
24
  from pygpt_net.utils import trans
25
+ from ...core.types import MODE_ASSISTANT
25
26
 
26
27
 
27
28
  class Ctx:
@@ -293,7 +294,7 @@ class Ctx:
293
294
 
294
295
  mode = self.window.core.ctx.get_mode()
295
296
  assistant_id = None
296
- if mode == 'assistant':
297
+ if mode == MODE_ASSISTANT:
297
298
  assistant_id = self.window.core.config.get('assistant')
298
299
  self.window.controller.assistant.files.update() # always update assistant files
299
300
 
@@ -462,7 +463,7 @@ class Ctx:
462
463
  ctrl.presets.set(mode, preset)
463
464
  ctrl.presets.refresh()
464
465
 
465
- if mode == 'assistant':
466
+ if mode == MODE_ASSISTANT:
466
467
  if assistant_id is not None:
467
468
  ctrl.assistant.select_by_id(assistant_id)
468
469
  else:
@@ -480,7 +481,7 @@ class Ctx:
480
481
  id = None
481
482
  if self.window.core.ctx.is_allowed_for_mode(mode, False):
482
483
  self.window.core.ctx.update()
483
- if mode == 'assistant':
484
+ if mode == MODE_ASSISTANT:
484
485
  if self.window.core.ctx.get_assistant() is not None:
485
486
  id = self.window.core.ctx.get_assistant()
486
487
  else:
@@ -560,9 +561,12 @@ class Ctx:
560
561
  msg=trans('ctx.delete.item.confirm'),
561
562
  )
562
563
  return
564
+ item = self.window.core.ctx.get_item_by_id(id)
565
+ event = RenderEvent(RenderEvent.ITEM_DELETE_ID, {
566
+ "ctx": item,
567
+ })
563
568
  self.window.core.ctx.remove_item(id)
564
- self.refresh()
565
- self.update(no_scroll=True)
569
+ self.window.dispatch(event)
566
570
 
567
571
  def delete_history(self, force: bool = False):
568
572
  """
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.08.11 18:00:00 #
9
+ # Updated Date: 2025.08.18 01:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from datetime import datetime
@@ -147,7 +147,7 @@ class Debug(QObject):
147
147
  :param message: message
148
148
  :param source_id: source ID
149
149
  """
150
- data = "[JS] Line " + str(line_number) + ": " + message
150
+ data = f"[JS] Line {line_number}: {message}"
151
151
  self.log(data, window=True)
152
152
 
153
153
  @Slot(object)
pygpt_net/core/ctx/bag.py CHANGED
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.08.11 00:00:00 #
9
+ # Updated Date: 2025.08.19 07:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from typing import List
@@ -44,6 +44,7 @@ class Bag:
44
44
  def clear_items(self):
45
45
  """Clear items"""
46
46
  self.items.clear()
47
+ self.items = []
47
48
 
48
49
  def count_items(self) -> int:
49
50
  """
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.08.15 23:00:00 #
9
+ # Updated Date: 2025.08.19 07:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  import gc
@@ -21,6 +21,7 @@ from pathlib import Path
21
21
  from typing import Any, Tuple
22
22
 
23
23
  import psutil
24
+ from PySide6.QtCore import QObject
24
25
  from PySide6.QtWidgets import QApplication
25
26
 
26
27
  from pygpt_net.config import Config
@@ -337,8 +338,17 @@ class Debug:
337
338
  :param label: label for memory usage
338
339
  :return: formatted memory usage string
339
340
  """
340
- mem_mb = self._process.memory_info().rss / (1024 * 1024)
341
- data = f"Memory Usage: {mem_mb:.2f} MB"
341
+ rss_mb = self._process.memory_info().rss / (1024 * 1024)
342
+ uss_mb = getattr(self._process.memory_full_info(), "uss", 0) / 1024 / 1024
343
+ data = f"RSS={rss_mb:.0f} MB USS={uss_mb:.0f} MB"
344
+
345
+ children_parts = []
346
+ for c in self._process.children(recursive=True):
347
+ children_parts.append(
348
+ f"{c.pid} {c.name()} {round(c.memory_info().rss / 1024 / 1024)} MB"
349
+ )
350
+ if children_parts:
351
+ data += "\n" + "\n".join(children_parts)
342
352
  print(f"[{label}] {data}")
343
353
  return data
344
354
 
@@ -398,6 +408,10 @@ class Debug:
398
408
  stats.append(f"Widgets: {num_widgets}")
399
409
  stats.append(f"Threadpool: {num_threads}")
400
410
 
411
+ # count all QObjects in app
412
+ qobjects = sum(1 for obj in QApplication.allWidgets() if isinstance(obj, QObject))
413
+ stats.append(f"QObjects: {qobjects}")
414
+
401
415
  res += "\n" + "\n".join(stats)
402
416
  print("\n".join(stats))
403
417
  return res
@@ -6,7 +6,7 @@
6
6
  # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
7
  # MIT License #
8
8
  # Created By : Marcin Szczygliński #
9
- # Updated Date: 2025.07.17 01:00:00 #
9
+ # Updated Date: 2025.08.18 01:00:00 #
10
10
  # ================================================== #
11
11
 
12
12
  from typing import List, Tuple
@@ -62,7 +62,8 @@ class Dispatcher:
62
62
  debug.debug(f"[event] Before handle: {event}")
63
63
 
64
64
  if event.stop:
65
- debug.info(f"[event] Skipping... (stopped): {event.name}")
65
+ if log_event:
66
+ debug.info(f"[event] Skipping... (stopped): {event.name}")
66
67
  return [], event
67
68
 
68
69
  handled = False
@@ -100,7 +101,8 @@ class Dispatcher:
100
101
  for pid in plugin_ids:
101
102
  if controller.plugins.is_enabled(pid) or all:
102
103
  if event.stop:
103
- debug.info(f"[event] Skipping... (stopped): {event.name}")
104
+ if log_event:
105
+ debug.info(f"[event] Skipping... (stopped): {event.name}")
104
106
  break
105
107
  if log_event and debug.enabled():
106
108
  debug.debug(f"[event] Apply [{event.name}] to plugin: {pid}")
@@ -58,6 +58,9 @@ class RenderEvent(BaseEvent):
58
58
  ACTION_REGEN_SUBMIT = "render.action.regen.submit"
59
59
  ACTION_EDIT_SUBMIT = "render.action.edit.submit"
60
60
 
61
+ ITEM_DELETE_ID = "render.item.delete.id"
62
+ ITEM_DELETE_FROM_ID = "render.item.delete.from_id"
63
+
61
64
  STATE_IDLE = "render.state.idle"
62
65
  STATE_BUSY = "render.state.busy"
63
66
  STATE_ERROR = "render.state.error"
@@ -251,19 +251,19 @@ class BaseRenderer:
251
251
  """
252
252
  pass
253
253
 
254
- def remove_item(self, id: int):
254
+ def remove_item(self, ctx: CtxItem):
255
255
  """
256
256
  Remove item from output
257
257
 
258
- :param id: context item ID
258
+ :param ctx: context item
259
259
  """
260
260
  pass
261
261
 
262
- def remove_items_from(self, id: int):
262
+ def remove_items_from(self, ctx: CtxItem):
263
263
  """
264
264
  Remove item from output
265
265
 
266
- :param id: context item ID
266
+ :param ctx: context item
267
267
  """
268
268
  pass
269
269