pygpt-net 2.6.44__py3-none-any.whl → 2.6.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -118,7 +118,7 @@ class Output:
118
118
 
119
119
  nodes['inline.vision'] = HelpLabel(trans('inline.vision'))
120
120
  nodes['inline.vision'].setVisible(False)
121
- nodes['inline.vision'].setContentsMargins(0, 0, 0, 0)
121
+ nodes['inline.vision'].setContentsMargins(3, 2, 0, 0)
122
122
 
123
123
  opts_layout = QHBoxLayout()
124
124
  opts_layout.setContentsMargins(0, 0, 0, 0)
@@ -505,10 +505,10 @@ class Bridge(QObject):
505
505
  super(Bridge, self).__init__(parent)
506
506
  self.window = window
507
507
 
508
- chunk = Signal(str, str) # name, chunk
509
- node = Signal(str) # content
510
- nodeReplace = Signal(str) # content
511
- nodeInput = Signal(str) # content
508
+ chunk = Signal(str, str, str) # name, chunk, type
509
+ node = Signal(str) # JSON payload
510
+ nodeReplace = Signal(str) # JSON payload
511
+ nodeInput = Signal(str) # raw text
512
512
  readyChanged = Signal(bool)
513
513
 
514
514
  @Slot(int)
pygpt_net/utils.py CHANGED
@@ -285,9 +285,10 @@ def natsort(l: list) -> list:
285
285
  alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
286
286
  return sorted(l, key=alphanum_key)
287
287
 
288
- def mem_clean():
288
+ def mem_clean(force: bool = False) -> bool:
289
289
  """Clean memory by removing unused objects"""
290
- return
290
+ if not force:
291
+ return False
291
292
  import sys, gc
292
293
  ok = False
293
294
  try:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pygpt-net
3
- Version: 2.6.44
3
+ Version: 2.6.45
4
4
  Summary: Desktop AI Assistant powered by: OpenAI GPT-5, GPT-4, o1, o3, Gemini, Claude, Grok, DeepSeek, and other models supported by Llama Index, and Ollama. Chatbot, agents, completion, image generation, vision analysis, speech-to-text, plugins, internet access, file handling, command execution and more.
5
5
  License: MIT
6
6
  Keywords: ai,api,api key,app,assistant,bielik,chat,chatbot,chatgpt,claude,dall-e,deepseek,desktop,gemini,gpt,gpt-3.5,gpt-4,gpt-4-vision,gpt-4o,gpt-5,gpt-oss,gpt3.5,gpt4,grok,langchain,llama-index,llama3,mistral,o1,o3,ollama,openai,presets,py-gpt,py_gpt,pygpt,pyside,qt,text completion,tts,ui,vision,whisper
@@ -118,7 +118,7 @@ Description-Content-Type: text/markdown
118
118
 
119
119
  [![pygpt](https://snapcraft.io/pygpt/badge.svg)](https://snapcraft.io/pygpt)
120
120
 
121
- Release: **2.6.44** | build: **2025-09-12** | Python: **>=3.10, <3.14**
121
+ Release: **2.6.45** | build: **2025-09-13** | Python: **>=3.10, <3.14**
122
122
 
123
123
  > Official website: https://pygpt.net | Documentation: https://pygpt.readthedocs.io
124
124
  >
@@ -1236,6 +1236,8 @@ This file is located in your working directory. You can add new models provided
1236
1236
 
1237
1237
  You can import new models by manually editing `models.json` or by using the model importer in the `Config -> Models -> Import` menu.
1238
1238
 
1239
+ **Tip:** The models on the list are sorted by provider, not by manufacturer. A model from a particular manufacturer may be available through different providers (e.g., OpenAI models can be provided by the `OpenAI API` or by `OpenRouter`). If you want to use a specific model through a particular provider, you need to configure the provider in `Config -> Models -> Edit`, or import it directly via `Config -> Models -> Import`.
1240
+
1239
1241
  **Tip**: Anthropic and Deepseek API providers use VoyageAI for embeddings (Chat with Files and attachments RAG), so you must also configure the Voyage API key if you want to use embeddings from these providers.
1240
1242
 
1241
1243
  ## Adding a custom model
@@ -1245,7 +1247,7 @@ You can add your own models. See the section `Extending PyGPT / Adding a new mod
1245
1247
  There is built-in support for those LLM providers:
1246
1248
 
1247
1249
  - `Anthropic`
1248
- - `Azure OpenAI`
1250
+ - `Azure OpenAI` (native SDK)
1249
1251
  - `Deepseek API`
1250
1252
  - `Google` (native SDK)
1251
1253
  - `HuggingFace API`
@@ -1256,7 +1258,7 @@ There is built-in support for those LLM providers:
1256
1258
  - `OpenAI` (native SDK)
1257
1259
  - `OpenRouter`
1258
1260
  - `Perplexity`
1259
- - `xAI`
1261
+ - `xAI` (native SDK)
1260
1262
 
1261
1263
  ## How to use local or non-GPT models
1262
1264
 
@@ -2319,11 +2321,9 @@ Config -> Settings...
2319
2321
 
2320
2322
  **Layout**
2321
2323
 
2322
- - `Zoom`: Adjusts the zoom in chat window (web render view). `WebEngine / Chromium` render mode only.
2323
-
2324
2324
  - `Style (chat)`: Chat style (Blocks, or ChatGPT-like, or ChatGPT-like Wide. `WebEngine / Chromium` render mode only.
2325
2325
 
2326
- - `Code syntax highlight`: Syntax highlight theme in code blocks. `WebEngine / Chromium` render mode only.
2326
+ - `Zoom`: Adjusts the zoom in chat window (web render view). `WebEngine / Chromium` render mode only.
2327
2327
 
2328
2328
  - `Font Size (chat window)`: Adjusts the font size in the chat window (plain-text) and notepads.
2329
2329
 
@@ -2339,12 +2339,30 @@ Config -> Settings...
2339
2339
 
2340
2340
  - `DPI factor`: DPI factor. Restart of the application is required for this option to take effect. Default: 1.0.
2341
2341
 
2342
+ - `Auto-collapse user message (px)` - Auto-collapse user message after N pixels of height, set to 0 to disable auto-collapse.
2343
+
2342
2344
  - `Display tips (help descriptions)`: Display help tips, Default: True.
2343
2345
 
2344
2346
  - `Store dialog window positions`: Enable or disable dialogs positions store/restore, Default: True.
2345
2347
 
2346
2348
  - `Use theme colors in chat window`: Use color theme in chat window, Default: True.
2347
2349
 
2350
+ **Code syntax**
2351
+
2352
+ - `Code syntax highlight`: Syntax highlight theme in code blocks. `WebEngine / Chromium` render mode only.
2353
+
2354
+ - `Disable syntax highlight`: Option to disable syntax highlighting in code blocks. `WebEngine / Chromium` render mode only.
2355
+
2356
+ - `Max chars to highlight (static)`: Sets the maximum number of characters to be highlighted in static content. Set to 0 to disable. `WebEngine / Chromium` render mode only.
2357
+
2358
+ - `Max lines to highlight (static)`: Sets the maximum number of lines to be highlighted in static content. Set to 0 to disable. `WebEngine / Chromium` render mode only.
2359
+
2360
+ - `Max lines to highlight (real-time)`: Sets the maximum number of lines to be highlighted in real-time stream mode. Set to 0 to disable. `WebEngine / Chromium` render mode only.
2361
+
2362
+ - `Highlight every N chars (real-time)`: Sets the interval for highlighting every N characters in real-time stream mode. `WebEngine / Chromium` render mode only.
2363
+
2364
+ - `Highlight every N line (real-time)`: Sets the interval for highlighting every N lines in real-time stream mode. `WebEngine / Chromium` render mode only.
2365
+
2348
2366
  **Files and attachments**
2349
2367
 
2350
2368
  - `Store attachments in the workdir upload directory`: Enable to store a local copy of uploaded attachments for future use. Default: True
@@ -2399,7 +2417,14 @@ Config -> Settings...
2399
2417
 
2400
2418
  **Remote tools**
2401
2419
 
2402
- Enable/disable remote tools, like Web Search or Image generation to use in OpenAI Responses API (OpenAI models and Chat mode only).
2420
+ Enable/disable remote tools, like Web Search, MCP or Image generation.
2421
+
2422
+ Remote tools are available for these providers, and only via their native SDKs:
2423
+
2424
+ - Anthropic
2425
+ - Google
2426
+ - OpenAI
2427
+ - xAI
2403
2428
 
2404
2429
  **Models**
2405
2430
 
@@ -2447,7 +2472,9 @@ Enable/disable remote tools, like Web Search or Image generation to use in OpenA
2447
2472
 
2448
2473
  - `Image generate`: Prompt for generating prompts for image generation (if raw-mode is disabled).
2449
2474
 
2450
- **Images**
2475
+ **Images and video**
2476
+
2477
+ **Image**
2451
2478
 
2452
2479
  - `Image size`: The resolution of the generated images (DALL-E). Default: 1024x1024.
2453
2480
 
@@ -2455,15 +2482,33 @@ Enable/disable remote tools, like Web Search or Image generation to use in OpenA
2455
2482
 
2456
2483
  - `Prompt generation model`: Model used for generating prompts for image generation (if raw-mode is disabled).
2457
2484
 
2458
- **Vision**
2485
+ **Video**
2486
+
2487
+ - `Aspect ratio`: Specifies the frame aspect ratio (e.g., 16:9, 9:16, 1:1). Availability depends on the selected model.
2488
+
2489
+ - `Video duration`: Sets the clip length in seconds; limits may vary by model.
2490
+
2491
+ - `FPS`: Determines the frames per second (e.g., 24, 25, 30). Values may be rounded or ignored by the model.
2459
2492
 
2460
- - `Vision: Camera Input Device`: Video capture camera index (index of the camera, default: 0).
2493
+ - `Generate audio`: Option to include synthesized background audio if supported by the model.
2461
2494
 
2462
- - `Vision: Camera capture width (px)`: Video capture resolution (width).
2495
+ - `Negative prompt`: Specifies words or phrases to avoid in the output (comma-separated).
2463
2496
 
2464
- - `Vision: Camera capture height (px)`: Video capture resolution (height).
2497
+ - `Prompt enhancement model`: Defines the LLM used to refine your prompt before video generation. This is not the video model.
2465
2498
 
2466
- - `Vision: Image capture quality`: Video capture image JPEG quality (%).
2499
+ - `Video resolution`: Sets the target output resolution (e.g., 720p, 1080p). Availability depends on the model.
2500
+
2501
+ - `Seed`: Provides an optional random seed for reproducible results; leave empty for random.
2502
+
2503
+ **Vision and camera**
2504
+
2505
+ - `Camera Input Device`: Video capture camera index (index of the camera, default: 0).
2506
+
2507
+ - `Camera capture width (px)`: Video capture resolution (width).
2508
+
2509
+ - `Camera capture height (px)`: Video capture resolution (height).
2510
+
2511
+ - `Image capture quality`: Video capture image JPEG quality (%).
2467
2512
 
2468
2513
  **Audio**
2469
2514
 
@@ -3567,6 +3612,11 @@ may consume additional tokens that are not displayed in the main window.
3567
3612
 
3568
3613
  ## Recent changes:
3569
3614
 
3615
+ **2.6.45 (2025-09-13)**
3616
+
3617
+ - Improved: Parsing of custom markup in the stream.
3618
+ - Improved: Message block parsing moved to JavaScript.
3619
+
3570
3620
  **2.6.44 (2025-09-12)**
3571
3621
 
3572
3622
  - Added: Auto-collapse for large user input blocks.
@@ -1,7 +1,7 @@
1
- pygpt_net/CHANGELOG.txt,sha256=Cw-TW3b3CwTpFURkOaR1dLZakBEHm4X4gJWWzpycmbE,105702
1
+ pygpt_net/CHANGELOG.txt,sha256=2n-19YWzWCKMhuC_XixSimjNlo7hzhbmjA3i1VfsstQ,105831
2
2
  pygpt_net/LICENSE,sha256=dz9sfFgYahvu2NZbx4C1xCsVn9GVer2wXcMkFRBvqzY,1146
3
- pygpt_net/__init__.py,sha256=juzDYd78vIydeJrcQHoNSjT2uEwmeU0rr5tDOOdKkL0,1373
4
- pygpt_net/app.py,sha256=2Zi8o7OLEzIaUDztGSisJLOtTbpfTPbUH9V0c9NldbQ,22014
3
+ pygpt_net/__init__.py,sha256=8puOMfx58AKv9_yr0bg6ZqXhXUo-Te7Bh3vqletELtw,1373
4
+ pygpt_net/app.py,sha256=prS80WfKSu8U_Ox9oUdxgzgHgRB1nvQQAMFTNltiECY,21954
5
5
  pygpt_net/app_core.py,sha256=y7mHzH_sdUg2yoG_BKvMdJA6v5jCBSHhwcXqIZfxXs4,4169
6
6
  pygpt_net/config.py,sha256=SCps_FfwdrynVAgpn37Ci1qTN8BFC05IGl9sYIi9e0w,16720
7
7
  pygpt_net/controller/__init__.py,sha256=9TxP-zGmcblpNgh7oBf2lYvtBIAmlhkre1qkVRyLUKU,6459
@@ -72,12 +72,12 @@ pygpt_net/controller/config/field/textarea.py,sha256=Ln545IHzXBeFIjnfMIpmlUr-V3w
72
72
  pygpt_net/controller/config/placeholder.py,sha256=-PWPNILPVkxMsY64aYnKTWvgUIvx7KA2Nwfd2LW_K30,16711
73
73
  pygpt_net/controller/ctx/__init__.py,sha256=0wH7ziC75WscBW8cxpeGBwEz5tolo_kCxGPoz2udI_E,507
74
74
  pygpt_net/controller/ctx/common.py,sha256=1jjRfEK1S4IqnzEGg1CIF-QqSN_83NLpaVtfB610NcM,6592
75
- pygpt_net/controller/ctx/ctx.py,sha256=pnczNTAj6fCE0s7NbIqZxpbI6zNHBqz-qYccpcCeWJc,39165
75
+ pygpt_net/controller/ctx/ctx.py,sha256=i14sK7LbFHJQ_cvNI0X68vM1yBtSbp38a9NW-tLIlBw,39401
76
76
  pygpt_net/controller/ctx/extra.py,sha256=WApWjnIfl3SoI0VZVbptvjjqhFPJl-dSfqW12tlBHrY,8599
77
77
  pygpt_net/controller/ctx/summarizer.py,sha256=UNsq-JTARblGNT97uSMpZEVzdUuDJ8YA2j2dw9R2X3o,3079
78
78
  pygpt_net/controller/debug/__init__.py,sha256=dOJGTICjvTtrPIEDOsxCzcOHsfu8AFPLpSKbdN0q0KI,509
79
79
  pygpt_net/controller/debug/debug.py,sha256=gkkzBq6nLT-5GQD5549o0KldeGcH8Pbqwfe8ZqejG64,9045
80
- pygpt_net/controller/debug/fixtures.py,sha256=HrXjpzM610QHHpklkEtmJi2IrG44nebqAghxtwXOo-Y,2720
80
+ pygpt_net/controller/debug/fixtures.py,sha256=D0u3CM9KjFXnWPpZ09iUIlQLTEJd-6eGijNKRK0aweU,2735
81
81
  pygpt_net/controller/dialogs/__init__.py,sha256=jI2WisG3lzbeyf__1Y7g7wWrxlr1QnYBDDt4t_3eeYk,513
82
82
  pygpt_net/controller/dialogs/confirm.py,sha256=PgVOJS31quY24Fs9XhFplcMnKweKbAxGWDKSjlBgLJU,16860
83
83
  pygpt_net/controller/dialogs/debug.py,sha256=v6E85vyCwfaDG9XZysxhBjRwlrDkbYC-NxUnDamNRpk,5980
@@ -236,10 +236,10 @@ pygpt_net/core/debug/assistants.py,sha256=fUYVdMsdtfWd3njirvw2lvBv7CgPHdz_9ZSDVr
236
236
  pygpt_net/core/debug/attachments.py,sha256=K-iVhHUrLDxi89Hchf7lQEst21SOOd8SK8xfuCJ7kTU,1685
237
237
  pygpt_net/core/debug/config.py,sha256=MSxRLVIWZ0VHrB4p66fyM-nfQLMi2D6enDzJLDQEb40,2305
238
238
  pygpt_net/core/debug/console/__init__.py,sha256=HAeCpOIaHnDNzW8Y_E-2W7pvKiCwwzBcETLcEyKONhA,517
239
- pygpt_net/core/debug/console/console.py,sha256=vW4aLy-bPEW4G-SOBjGp6HXqY6TGynjCaGM0RPlJygU,4176
239
+ pygpt_net/core/debug/console/console.py,sha256=5MMnAtYg5_88GYHFE5pMuPLQkrysOQL-xOCVmCO8T7w,4248
240
240
  pygpt_net/core/debug/context.py,sha256=pNGrYBe6bmyMCU1MJPjAXNT-4SubEu-1DFGJafXwI_U,5838
241
241
  pygpt_net/core/debug/db.py,sha256=TQtNpCjrcFw943nae3OIyq0af_okjr-aTfFKS_QhQQk,776
242
- pygpt_net/core/debug/debug.py,sha256=pD37POYCYO7DSk96iOP51ukt5hOI5-7AwUcH8pxyoMw,14035
242
+ pygpt_net/core/debug/debug.py,sha256=aQTzqxHgf0Uq8b30FO7vA9V7Zz44ld3jAbRo2S1CQG8,14040
243
243
  pygpt_net/core/debug/events.py,sha256=OCVWSCCHkZkoQFjwO8c76xL02Gdcphv8eRJoXKrd-dQ,2206
244
244
  pygpt_net/core/debug/indexes.py,sha256=M8Uf6NT1LswyXOdpZ_QzkBbhvv08U1rhMb6PV7OaNtE,5416
245
245
  pygpt_net/core/debug/kernel.py,sha256=kFw3dp2Azl_g6omMoxlieV8E8pn5JqtI2PTHBmJ0fAU,1317
@@ -275,7 +275,7 @@ pygpt_net/core/filesystem/types.py,sha256=1HFubxAHYup_SLQ7SlR5EvZb3KgVyd8K8vBRUk
275
275
  pygpt_net/core/filesystem/url.py,sha256=PAhO73dlw_jBu7HN5RKHSVbbtrXTR4FoU_S1tD7dGvU,3528
276
276
  pygpt_net/core/fixtures/__init__,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
277
277
  pygpt_net/core/fixtures/stream/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
278
- pygpt_net/core/fixtures/stream/generator.py,sha256=oznoZaVFb09MfPZ_AZ0Wa-GIC-_8BsASl-_6UHhpIrw,9388
278
+ pygpt_net/core/fixtures/stream/generator.py,sha256=Gjm68CMUOpQiYLRwG0dxVgSPw_cMJLYcG74Ll_i2hvg,9331
279
279
  pygpt_net/core/history/__init__.py,sha256=OVtJM8Cf-9WV9-WmB6x__qB3QK4ZGaYzjpl4Fk8RdWM,511
280
280
  pygpt_net/core/history/history.py,sha256=PDE5Ut03mEgY9YPLZjqrimKQAyxoE7itViuqFV-VQf0,3123
281
281
  pygpt_net/core/idx/__init__.py,sha256=8-HStPMODmgzC3dBaJB6MDqGJHCHnKxNdt30Vzyu3cM,507
@@ -349,12 +349,12 @@ pygpt_net/core/render/plain/helpers.py,sha256=tWbdJZBfjnuHLAUDVYLTfv3Vt-h7XN1tg6
349
349
  pygpt_net/core/render/plain/pid.py,sha256=Zp-TNjKI3yReuSJxC8qRkOh2ou7zK6abFbXjebbpsO8,1484
350
350
  pygpt_net/core/render/plain/renderer.py,sha256=CA0pkTvzgO50vJlrkAkfMBvg60ffiEvDCyVkONKYkL4,16063
351
351
  pygpt_net/core/render/web/__init__.py,sha256=istp5dsn6EkLEP7lOBeDb8RjodUcWZqjcEvTroaTT-w,489
352
- pygpt_net/core/render/web/body.py,sha256=cUd_haY9Rg2O_eMoMFftkEwmNocvQsDKLGadRmlVLI4,19223
352
+ pygpt_net/core/render/web/body.py,sha256=NOfRinlwwk0j20mEQTp-Ayu7EvEbON2BZCnsL-ZZ9LA,29055
353
353
  pygpt_net/core/render/web/debug.py,sha256=784RYXF6inn_bkRtYD1_FllQSyk67JmxKGWPiAyscKw,7138
354
- pygpt_net/core/render/web/helpers.py,sha256=a0zyjQffQRyZTcRzXVuho7YtYzvLIwerWxPOO11yjXI,6624
354
+ pygpt_net/core/render/web/helpers.py,sha256=8-JkbEOOLoCEAkylJWr8yk3aIliQBMyqcatsyW99VWY,7340
355
355
  pygpt_net/core/render/web/parser.py,sha256=pDFc9Tf8P-jvrDilXyT1fukcQHbixHRJ9Dn9hF10Gko,12892
356
356
  pygpt_net/core/render/web/pid.py,sha256=F33x_OtrHL9BDMXx_JUbfo8-DOiN4vo1Tv4rrRVADTo,4343
357
- pygpt_net/core/render/web/renderer.py,sha256=wzhjh6SoEODgMSIYISnJYB9e1N0Mt6t5JMO7EK3jdfw,71953
357
+ pygpt_net/core/render/web/renderer.py,sha256=xjBeH-up3ZdKGLcmLU6fkzGqr_w5H6ysJpJo_bYJjio,68959
358
358
  pygpt_net/core/render/web/syntax_highlight.py,sha256=QSLGF5cJL_Xeqej7_TYwY_5C2w9enXV_cMEuaJ3C43U,2005
359
359
  pygpt_net/core/settings/__init__.py,sha256=GQ6_gJ2jf_Chm7ZuZLvkcvEh_sfMDVMBieeoJi2iPI4,512
360
360
  pygpt_net/core/settings/settings.py,sha256=Ix06y-gJ3q7NJDf55XAWBBYulBLpinBqzYqsytH_9mo,8686
@@ -395,8 +395,8 @@ pygpt_net/css_rc.py,sha256=PX6g9z5BsD-DXISuR2oq3jHcjiKfcJ4HsgcHez6wGMc,27762
395
395
  pygpt_net/data/audio/click_off.mp3,sha256=aNiRDP1pt-Jy7ija4YKCNFBwvGWbzU460F4pZWZDS90,65201
396
396
  pygpt_net/data/audio/click_on.mp3,sha256=qfdsSnthAEHVXzeyN4LlC0OvXuyW8p7stb7VXtlvZ1k,65201
397
397
  pygpt_net/data/audio/ok.mp3,sha256=LTiV32pEBkpUGBkKkcOdOFB7Eyt_QoP2Nv6c5AaXftk,32256
398
- pygpt_net/data/config/config.json,sha256=OwAbTYFOGf3ouLFaoY6rTzYVcaLGCe_Yjs3ldQFJnbQ,30845
399
- pygpt_net/data/config/models.json,sha256=OOkqQrUYQ9HhuQtUgkOWjqlcMmrBG01wvKQ21Frt7nE,118192
398
+ pygpt_net/data/config/config.json,sha256=GQO1iYnN0tPL-chQ1Sfsnjx1lWn1CJPCZQC_zeYje0w,30845
399
+ pygpt_net/data/config/models.json,sha256=HXBgeS6b6pIy21BXrLV04I73D9IYyXeIddkS0nhHgZs,118192
400
400
  pygpt_net/data/config/modes.json,sha256=IpjLOm428_vs6Ma9U-YQTNKJNtZw-qyM1lwhh73xl1w,2111
401
401
  pygpt_net/data/config/presets/agent_code_act.json,sha256=GYHqhxtKFLUCvRI3IJAJ7Qe1k8yD9wGGNwManldWzlI,754
402
402
  pygpt_net/data/config/presets/agent_openai.json,sha256=bpDJgLRey_effQkzFRoOEGd4aHUrmzeODSDdNzrf62I,730
@@ -454,6 +454,7 @@ pygpt_net/data/css/web-chatgpt_wide.css,sha256=4eTgIrbG6dRMcPYG9TyjsKn6R9eTla6lp
454
454
  pygpt_net/data/css/web-chatgpt_wide.dark.css,sha256=C4wgIbI8oOQ7BdnCTX3ceuXRDBsp8pvvCBjiOhn0ZO0,1500
455
455
  pygpt_net/data/css/web-chatgpt_wide.darkest.css,sha256=oclQT4ZjjIU-icpNaBoci81Ya6jWRoRYP9dtnofR5xg,1452
456
456
  pygpt_net/data/css/web-chatgpt_wide.light.css,sha256=WWhVX4xbDOPgW7O2SIRZCvWR1N8fzqviFO5fhe-AvDs,1503
457
+ pygpt_net/data/fixtures/fake_stream.txt,sha256=3C8uBnMlk8lbHKwxmcxxDsu9BXtGH-huXdol8yRrn5g,264970
457
458
  pygpt_net/data/fonts/Lato/Lato-Black.ttf,sha256=iUTaWoYezgAYX6Fz6mUyTn1Hl6qGPG-g8D4GaAWXS2w,69484
458
459
  pygpt_net/data/fonts/Lato/Lato-BlackItalic.ttf,sha256=G7asNGubFS-gk9VAMO_NBz4o5R7AB8-3bCAflv103mc,71948
459
460
  pygpt_net/data/fonts/Lato/Lato-Bold.ttf,sha256=e3IFmfiu07rFuVMf7PZ1DI-n5ZO3J3ObwGkvzA9Vtng,73316
@@ -659,7 +660,7 @@ pygpt_net/data/icons/window.svg,sha256=Ac0i_zydkTg_hHi-ZzTRNW0uQ5z7j-2TjNX6iwv3o
659
660
  pygpt_net/data/icons/work.svg,sha256=6dAOEVzYU-6RUn3NpTvf6l2d4dD9vnalkYQTwjKNRrs,365
660
661
  pygpt_net/data/icons/zoom_in.svg,sha256=tFULsXTslx_gX6wi0XUCk8hiFlptvYCr922ipMQvNrM,422
661
662
  pygpt_net/data/icons/zoom_out.svg,sha256=HA01vUxYKU4KuOzZ74O6K6fzpEg_b6XW5fJ6U-XUwzs,396
662
- pygpt_net/data/js/app.js,sha256=jWswdPnNb1uKo4sr1r3xPwDa59R6ROraTrlqU4Y5Xds,220543
663
+ pygpt_net/data/js/app.js,sha256=5wvIcaT2xjA4EGlnkKgmxrsErHhOFJGLF7MMsMpQY_E,263192
663
664
  pygpt_net/data/js/highlight/DIGESTS.md,sha256=bFIjBE_BrA2dkd0CM5SXLKQIccKOUcUeRFUK4c_3kWM,75091
664
665
  pygpt_net/data/js/highlight/LICENSE,sha256=bAgUMVkdnfaWyC3FmP4UI3ZbiimbIA7QCyga_Q9kxJA,1514
665
666
  pygpt_net/data/js/highlight/README.md,sha256=iUuAsk62y3lZEbACdBntj-_CEEN4gUfcbi4hkmEL6n0,1717
@@ -1676,7 +1677,7 @@ pygpt_net/data/js/markdown-it/markdown-it-katex.min.js,sha256=nWYrb1ouvTQ-qaHXWC
1676
1677
  pygpt_net/data/js/markdown-it/markdown-it.min.js,sha256=OMcKHnypGrQOLZ5uYBKYUacX7Rx9Ssu91Bv5UDeRz2g,123618
1677
1678
  pygpt_net/data/languages.csv,sha256=fvtER6vnTXFHQslCh-e0xCfZDQ-ijgW4GYpOJG4U7LY,8289
1678
1679
  pygpt_net/data/locale/locale.de.ini,sha256=nFqwNFFWxlZaTX2Pzs2w-uiD3qZMdenTBTjIVMQAKlQ,106273
1679
- pygpt_net/data/locale/locale.en.ini,sha256=kjgHF5Bw6HndsN4yxk1iwAnqEhWGT0K1JBTAAqKxcdM,100198
1680
+ pygpt_net/data/locale/locale.en.ini,sha256=H1JiWfZHElugm66djuzNRf5sgKHcdckdZ2CQGgvwFrQ,100198
1680
1681
  pygpt_net/data/locale/locale.es.ini,sha256=boozgxr57UrQe8B0M4sYfQbjJBmGMxZ3SSPIHz7yqj4,106936
1681
1682
  pygpt_net/data/locale/locale.fr.ini,sha256=aa5aJlVsXzfnYxyWf6_cEWVZcEH_XBycCs6177ypX5E,109674
1682
1683
  pygpt_net/data/locale/locale.it.ini,sha256=W2uCdniurtvhpzFTIX0TgZmVPMe0A2wQQUmH668dvZU,104620
@@ -1880,7 +1881,7 @@ pygpt_net/item/notepad.py,sha256=7v3A3HJjew0IoFM65Li0xfoSEdJzfEUZM1IH1ND0Bxw,180
1880
1881
  pygpt_net/item/preset.py,sha256=9VeMlWjUvRKYyVu1XPxaZOFZvL-N_ThXZR_M38TbpWA,8523
1881
1882
  pygpt_net/item/prompt.py,sha256=xequZDTEv4eKjmC5OLqZnNrbyy0YA4LHe0k2RpPiBw0,1745
1882
1883
  pygpt_net/js.qrc,sha256=Dyp5qWokt0MGSOmNfI6ASawF8Xo1f4SZArUJ8vckcbQ,526
1883
- pygpt_net/js_rc.py,sha256=Hbz0aqZJXzJTfKYTHEQJN7JXnkRpZ0KCwOLfkHgPBao,2333809
1884
+ pygpt_net/js_rc.py,sha256=P9XgLOEVTZ-jgX3mK09i1Hhi1vAz_lUKZr6OtyiuZQA,2385174
1884
1885
  pygpt_net/launcher.py,sha256=bqR175OVZ_Q3yKsIM5NiHB1S1b-vXrIglsQyr6zyrWU,10125
1885
1886
  pygpt_net/migrations/Version20231227152900.py,sha256=1Rw1mK2mVQs0B2HrbxHICu1Pd1X5jg4yZIrytnR5N5Y,2849
1886
1887
  pygpt_net/migrations/Version20231230095000.py,sha256=A1_e9oC_E4LSo9uBFiiI2dKH7N-SERFp7DMX1R_8LXQ,906
@@ -2415,7 +2416,7 @@ pygpt_net/ui/layout/chat/chat.py,sha256=qB4RwT9N0eCtrbyasgO0Cxvcm2nXACck6MflGAoQ
2415
2416
  pygpt_net/ui/layout/chat/explorer.py,sha256=Jg6aK5qTCTNgb4EXr-zeZXSexARQSzn4W8unqV1MGe8,1358
2416
2417
  pygpt_net/ui/layout/chat/input.py,sha256=Wnb29-1MQPD4AUU4CZN4vz6tba_L6tgIyJX-Xr4cgxY,9969
2417
2418
  pygpt_net/ui/layout/chat/markdown.py,sha256=hjYY8Da1z0IZZD086_csMcDY1wwagpuQTDZ-XfgeNgs,18656
2418
- pygpt_net/ui/layout/chat/output.py,sha256=b1qY1C2Fs_k3fOA47JnkHXfvRDdptHbEZpMF6aiwA8g,7280
2419
+ pygpt_net/ui/layout/chat/output.py,sha256=Skt2-Zeka7fY_uzh_8H8MsvusJaWpaesfk2Ezyft6p4,7280
2419
2420
  pygpt_net/ui/layout/chat/painter.py,sha256=XmogOPKRIBMldZOmJDNSVZLqFC_JTCXLu6Eyfw1Da3c,8552
2420
2421
  pygpt_net/ui/layout/ctx/__init__.py,sha256=NJ9L0yJKIx1nKnk2sczp7ILWVbu2hfpvUz4E56EFuPI,509
2421
2422
  pygpt_net/ui/layout/ctx/ctx.py,sha256=GDJyolAnFlAd49bbu9-LGsCxOUTAImSH5In4i8YHFOo,1653
@@ -2562,12 +2563,12 @@ pygpt_net/ui/widget/textarea/output.py,sha256=krWta3GHwdlPOqcxLln150bo7iUOtbFL_y
2562
2563
  pygpt_net/ui/widget/textarea/rename.py,sha256=NwuGRIeWMo7WfsMguAFpTqdOz1eTiXbxrDXGsbWF_TY,1358
2563
2564
  pygpt_net/ui/widget/textarea/search_input.py,sha256=aoOlunBwxn-z3gIMNKfnghHX00sC36wQHl87dRlDJlM,5227
2564
2565
  pygpt_net/ui/widget/textarea/url.py,sha256=xbNQxoM5fYI1ZWbvybQkPmNPrIq3yhtNPBOSOWftZCg,1337
2565
- pygpt_net/ui/widget/textarea/web.py,sha256=7NJJuTMG-9KDOSIXn1GexnsRkApDB24HSVziHskI4vQ,18064
2566
+ pygpt_net/ui/widget/textarea/web.py,sha256=t6ZppPxmkpw6z2hgL0w0-0EF_ez_0ZPi-JYWiTnpN6E,18086
2566
2567
  pygpt_net/ui/widget/vision/__init__.py,sha256=8HT4tQFqQogEEpGYTv2RplKBthlsFKcl5egnv4lzzEw,488
2567
2568
  pygpt_net/ui/widget/vision/camera.py,sha256=v1qEncaZr5pXocO5Cpk_lsgfCMvfFigdJmzsYfzvCl0,1877
2568
- pygpt_net/utils.py,sha256=FeoWestHkhaewYKqLswHsslChqCnakJdxUJdexd0Pw8,9653
2569
- pygpt_net-2.6.44.dist-info/LICENSE,sha256=rbPqNB_xxANH8hKayJyIcTwD4bj4Y2G-Mcm85r1OImM,1126
2570
- pygpt_net-2.6.44.dist-info/METADATA,sha256=DCmvMgiqjfXI6nt4FJbfxXBSsn0WnGplvclU5kLUTYc,160100
2571
- pygpt_net-2.6.44.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
2572
- pygpt_net-2.6.44.dist-info/entry_points.txt,sha256=qvpII6UHIt8XfokmQWnCYQrTgty8FeJ9hJvOuUFCN-8,43
2573
- pygpt_net-2.6.44.dist-info/RECORD,,
2569
+ pygpt_net/utils.py,sha256=7lZj_YSzx7ZfvqFtjYThEvRJNSBZzrJyK7ZxDAtYPAQ,9708
2570
+ pygpt_net-2.6.45.dist-info/LICENSE,sha256=rbPqNB_xxANH8hKayJyIcTwD4bj4Y2G-Mcm85r1OImM,1126
2571
+ pygpt_net-2.6.45.dist-info/METADATA,sha256=VrR-NeU2xxa-gd8eWdRfbKUtnPn_q7KqJ0LpSTzywWA,162703
2572
+ pygpt_net-2.6.45.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
2573
+ pygpt_net-2.6.45.dist-info/entry_points.txt,sha256=qvpII6UHIt8XfokmQWnCYQrTgty8FeJ9hJvOuUFCN-8,43
2574
+ pygpt_net-2.6.45.dist-info/RECORD,,