pygpt-net 2.6.25__py3-none-any.whl → 2.6.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygpt_net/CHANGELOG.txt +8 -0
- pygpt_net/__init__.py +1 -1
- pygpt_net/app.py +3 -1
- pygpt_net/controller/access/voice.py +3 -5
- pygpt_net/controller/audio/audio.py +9 -6
- pygpt_net/controller/audio/ui.py +263 -0
- pygpt_net/controller/chat/common.py +17 -1
- pygpt_net/controller/model/importer.py +17 -3
- pygpt_net/controller/theme/theme.py +4 -1
- pygpt_net/core/audio/backend/native.py +113 -79
- pygpt_net/core/audio/backend/pyaudio.py +16 -19
- pygpt_net/core/audio/backend/pygame.py +12 -15
- pygpt_net/core/audio/capture.py +10 -9
- pygpt_net/core/audio/context.py +3 -6
- pygpt_net/core/models/models.py +5 -1
- pygpt_net/core/types/openai.py +2 -1
- pygpt_net/data/config/config.json +18 -4
- pygpt_net/data/config/models.json +2 -2
- pygpt_net/data/config/settings.json +58 -10
- pygpt_net/data/locale/locale.de.ini +2 -0
- pygpt_net/data/locale/locale.en.ini +10 -2
- pygpt_net/data/locale/locale.es.ini +2 -0
- pygpt_net/data/locale/locale.fr.ini +2 -0
- pygpt_net/data/locale/locale.it.ini +2 -0
- pygpt_net/data/locale/locale.pl.ini +3 -1
- pygpt_net/data/locale/locale.uk.ini +2 -0
- pygpt_net/data/locale/locale.zh.ini +2 -0
- pygpt_net/plugin/audio_input/simple.py +5 -10
- pygpt_net/plugin/audio_output/plugin.py +4 -17
- pygpt_net/provider/core/config/patch.py +10 -1
- pygpt_net/provider/llms/local.py +8 -2
- pygpt_net/provider/llms/open_router.py +104 -0
- pygpt_net/ui/layout/chat/input.py +5 -2
- pygpt_net/ui/main.py +1 -2
- pygpt_net/ui/widget/audio/bar.py +5 -1
- pygpt_net/ui/widget/textarea/input.py +475 -50
- {pygpt_net-2.6.25.dist-info → pygpt_net-2.6.27.dist-info}/METADATA +46 -35
- {pygpt_net-2.6.25.dist-info → pygpt_net-2.6.27.dist-info}/RECORD +41 -39
- {pygpt_net-2.6.25.dist-info → pygpt_net-2.6.27.dist-info}/LICENSE +0 -0
- {pygpt_net-2.6.25.dist-info → pygpt_net-2.6.27.dist-info}/WHEEL +0 -0
- {pygpt_net-2.6.25.dist-info → pygpt_net-2.6.27.dist-info}/entry_points.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: pygpt-net
|
|
3
|
-
Version: 2.6.
|
|
3
|
+
Version: 2.6.27
|
|
4
4
|
Summary: Desktop AI Assistant powered by: OpenAI GPT-5, GPT-4, o1, o3, Gemini, Claude, Grok, DeepSeek, and other models supported by Llama Index, and Ollama. Chatbot, agents, completion, image generation, vision analysis, speech-to-text, plugins, internet access, file handling, command execution and more.
|
|
5
5
|
License: MIT
|
|
6
6
|
Keywords: ai,api,api key,app,assistant,bielik,chat,chatbot,chatgpt,claude,dall-e,deepseek,desktop,gemini,gpt,gpt-3.5,gpt-4,gpt-4-vision,gpt-4o,gpt-5,gpt-oss,gpt3.5,gpt4,grok,langchain,llama-index,llama3,mistral,o1,o3,ollama,openai,presets,py-gpt,py_gpt,pygpt,pyside,qt,text completion,tts,ui,vision,whisper
|
|
@@ -51,6 +51,7 @@ Requires-Dist: llama-index-embeddings-huggingface-api (>=0.3.1,<0.4.0)
|
|
|
51
51
|
Requires-Dist: llama-index-embeddings-mistralai (>=0.3.0,<0.4.0)
|
|
52
52
|
Requires-Dist: llama-index-embeddings-ollama (>=0.5.0,<0.6.0)
|
|
53
53
|
Requires-Dist: llama-index-embeddings-openai (>=0.3.1,<0.4.0)
|
|
54
|
+
Requires-Dist: llama-index-embeddings-openai-like (>=0.1.1,<0.2.0)
|
|
54
55
|
Requires-Dist: llama-index-embeddings-voyageai (>=0.3.6,<0.4.0)
|
|
55
56
|
Requires-Dist: llama-index-llms-anthropic (>=0.6.19,<0.7.0)
|
|
56
57
|
Requires-Dist: llama-index-llms-azure-openai (>=0.3.4,<0.4.0)
|
|
@@ -116,7 +117,7 @@ Description-Content-Type: text/markdown
|
|
|
116
117
|
|
|
117
118
|
[](https://snapcraft.io/pygpt)
|
|
118
119
|
|
|
119
|
-
Release: **2.6.
|
|
120
|
+
Release: **2.6.27** | build: **2025-08-26** | Python: **>=3.10, <3.14**
|
|
120
121
|
|
|
121
122
|
> Official website: https://pygpt.net | Documentation: https://pygpt.readthedocs.io
|
|
122
123
|
>
|
|
@@ -486,41 +487,41 @@ Please go to `Debugging and Logging` section for instructions on how to log and
|
|
|
486
487
|
|
|
487
488
|
# Quick Start
|
|
488
489
|
|
|
489
|
-
## Setting-up
|
|
490
|
+
## Setting-up API Key(s)
|
|
490
491
|
|
|
491
|
-
|
|
492
|
+
You can configure API keys for various providers, such as OpenAI, Anthropic, Google, xAI, Perplexity, OpenRouter, and more. This flexibility allows you to use different providers based on your needs.
|
|
492
493
|
|
|
493
|
-
During the initial
|
|
494
|
+
During the initial setup, configure your API keys within the application.
|
|
494
495
|
|
|
495
496
|
To do so, navigate to the menu:
|
|
496
497
|
|
|
497
|
-
|
|
498
|
-
Config -> Settings -> API Keys
|
|
499
|
-
```
|
|
498
|
+
`Config -> Settings -> API Keys`
|
|
500
499
|
|
|
501
|
-
|
|
500
|
+
Here, you can add or manage API keys for any supported provider.
|
|
502
501
|
|
|
503
502
|

|
|
504
503
|
|
|
505
|
-
|
|
504
|
+
**Configuring Provider**
|
|
506
505
|
|
|
507
|
-
|
|
506
|
+
1. **Select the Provider:** Choose a tab with provider.
|
|
507
|
+
2. **Enter the API Key:** Paste the corresponding API key for the selected provider.
|
|
508
508
|
|
|
509
|
-
|
|
509
|
+
**Example**
|
|
510
510
|
|
|
511
|
-
|
|
511
|
+
- **OpenAI:** Obtain your API key by registering on the OpenAI website: https://platform.openai.com and navigating to https://platform.openai.com/account/api-keys.
|
|
512
|
+
- **Anthropic, Google, etc.:** Follow similar steps on their respective platforms.
|
|
512
513
|
|
|
513
|
-
**Note:** The ability to use models
|
|
514
|
+
**Note:** The ability to use models or services depends on your access level with the respective provider. If you wish to use custom API endpoints or local APIs that do not require API keys, simply enter any value into the API key field to bypass prompts about an empty key.
|
|
514
515
|
|
|
515
|
-
#
|
|
516
|
+
# Work modes
|
|
516
517
|
|
|
517
518
|
## Chat
|
|
518
519
|
|
|
519
520
|
**+ Inline Vision and Image generation**
|
|
520
521
|
|
|
521
|
-
This mode in **PyGPT** mirrors `ChatGPT`, allowing you to chat with models such as `
|
|
522
|
+
This mode in **PyGPT** mirrors `ChatGPT`, allowing you to chat with models such as `GPT-5`, `GPT-4`, `o1`, `o3`, and`Claude`, `Gemini`, `Grok`, `Perplexity (sonar)`, `Deepseek`, and others. It works by using the `Responses` and `ChatCompletions` OpenAI API (or compatible). You can select the API endpoint to use in: `Config -> Settings -> API Keys`.
|
|
522
523
|
|
|
523
|
-
**Tip: This mode directly uses the OpenAI
|
|
524
|
+
**Tip: This mode directly uses the OpenAI SDK. Other models, such as Gemini, Claude, Grok, Sonar, or Llama3, are supported in Chat mode via LlamaIndex or OpenAI API compatible endpoints (if available), which the application switches to in the background when working with models other than OpenAI.**
|
|
524
525
|
|
|
525
526
|
The main part of the interface is a chat window where you see your conversations. Below it is a message box for typing. On the right side, you can set up or change the model and system prompt. You can also save these settings as presets to easily switch between models or tasks.
|
|
526
527
|
|
|
@@ -628,7 +629,7 @@ You can also develop and provide your own custom loader and register it within t
|
|
|
628
629
|
LlamaIndex is also integrated with context database - you can use data from database (your context history) as additional context in discussion.
|
|
629
630
|
Options for indexing existing context history or enabling real-time indexing new ones (from database) are available in `Settings / Indexes / LlamaIndex` section.
|
|
630
631
|
|
|
631
|
-
**WARNING:** remember that when indexing content, API calls to the embedding model are used. Each indexing consumes additional tokens. Always control the number of tokens used on the
|
|
632
|
+
**WARNING:** remember that when indexing content, API calls to the embedding model are used. Each indexing consumes additional tokens. Always control the number of tokens used on the provider's page.
|
|
632
633
|
|
|
633
634
|
**Tip:** Using the Chat with Files mode, you have default access to files manually indexed from the /data directory. However, you can use additional context by attaching a file - such additional context from the attachment does not land in the main index, but only in a temporary one, available only for the given conversation.
|
|
634
635
|
|
|
@@ -654,8 +655,6 @@ In the `Settings -> LlamaIndex -> Data loaders` section you can define the addit
|
|
|
654
655
|
|
|
655
656
|
## Chat with Audio
|
|
656
657
|
|
|
657
|
-
2024-11-26: currently in beta.
|
|
658
|
-
|
|
659
658
|
This mode works like the Chat mode but with native support for audio input and output using a multimodal model - `gpt-4o-audio`. In this mode, audio input and output are directed to and from the model directly, without the use of external plugins. This enables faster and better audio communication.
|
|
660
659
|
|
|
661
660
|
More info: https://platform.openai.com/docs/guides/audio/quickstart
|
|
@@ -664,8 +663,6 @@ Currently, in beta. Tool and function calls are not enabled in this mode.
|
|
|
664
663
|
|
|
665
664
|
## Research
|
|
666
665
|
|
|
667
|
-
2025-03-02: currently in beta.
|
|
668
|
-
|
|
669
666
|
This mode (when using Sonar and R1 models) operates using the Perplexity API: https://perplexity.ai.
|
|
670
667
|
|
|
671
668
|
It allows for deep web searching and utilizes Sonar models, available in `Perplexity AI`.
|
|
@@ -730,7 +727,7 @@ Images are stored in ``img`` directory in **PyGPT** user data folder.
|
|
|
730
727
|
|
|
731
728
|
## Vision
|
|
732
729
|
|
|
733
|
-
This mode enables image analysis using the `
|
|
730
|
+
This mode enables image analysis using the `GPT-5`, `GPT-4o` and other vision (multimodal) models. Functioning much like the chat mode,
|
|
734
731
|
it also allows you to upload images or provide URLs to images. The vision feature can analyze both local
|
|
735
732
|
images and those found online.
|
|
736
733
|
|
|
@@ -834,8 +831,6 @@ You can change the prompts used for evaluating the response in `Settings -> Prom
|
|
|
834
831
|
|
|
835
832
|
## Agent (OpenAI)
|
|
836
833
|
|
|
837
|
-
**Added in: 2.5.76** - currently in beta.
|
|
838
|
-
|
|
839
834
|
The mode operates on the `openai-agents` library integrated into the application:
|
|
840
835
|
|
|
841
836
|
https://github.com/openai/openai-agents-python
|
|
@@ -964,9 +959,7 @@ You can choose which internal mode the agent should use in the settings:
|
|
|
964
959
|
|
|
965
960
|
```Settings / Agent (autonomous) / Sub-mode to use```
|
|
966
961
|
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
Default is: `chat`.
|
|
962
|
+
Default mode is: `Chat`.
|
|
970
963
|
|
|
971
964
|
If you want to use the LlamaIndex mode when running the agent, you can also specify which index `LlamaIndex` should use with the option:
|
|
972
965
|
|
|
@@ -1005,8 +998,6 @@ Give me a list of active experts.
|
|
|
1005
998
|
|
|
1006
999
|
## Computer use
|
|
1007
1000
|
|
|
1008
|
-
**2.5.71**: Currently in beta.
|
|
1009
|
-
|
|
1010
1001
|
This mode allows for autonomous computer control.
|
|
1011
1002
|
|
|
1012
1003
|
In this mode, the model takes control of the mouse and keyboard and can navigate within the user's environment. The `Computer use` remote tool is used here: https://platform.openai.com/docs/guides/tools-computer-use, combined with the `Mouse and Keyboard` plugin.
|
|
@@ -1122,6 +1113,12 @@ Files such as jpg, png, and similar images are a special case. By default, image
|
|
|
1122
1113
|
|
|
1123
1114
|
To use the `RAG` mode, the file must be indexed in the vector database. This occurs automatically at the time of upload if the `Auto-index on upload` option in the `Attachments` tab is enabled. When uploading large files, such indexing might take a while - therefore, if you are using the `Full context` option, which does not use the index, you can disable the `Auto-index` option to speed up the upload of the attachment. In this case, it will only be indexed when the `RAG` option is called for the first time, and until then, attachment will be available in the form of `Full context` and `Summary`.
|
|
1124
1115
|
|
|
1116
|
+
**Embeddings**
|
|
1117
|
+
|
|
1118
|
+
When using RAG to query attachments, the documents are indexed into a temporary vector store. With multiple providers and models available, you can select the model used for querying attachments in: `Config -> Settings -> Files and Attachments`. You can also choose the embedding models for specified providers in `Config -> Settings -> Indexes / LlamaIndex -> Embeddings -> Default embedding models` list. By default, when querying an attachment using RAG, the default embedding model and provider corresponding to the RAG query model will be used. If no default configuration is provided for a specific provider, the global embedding configuration will be used.
|
|
1119
|
+
|
|
1120
|
+
For example, if the RAG query model is `gpt-4o-mini`, then the default model for the provider `OpenAI` will be used. If the default model for `OpenAI` is not specified on the list, the global provider and model will be used.
|
|
1121
|
+
|
|
1125
1122
|
## Downloading files
|
|
1126
1123
|
|
|
1127
1124
|
**PyGPT** enables the automatic download and saving of files created by the model. This is carried out in the background, with the files being saved to an `data` folder located within the user's working directory. To view or manage these files, users can navigate to the `Files` tab which features a file browser for this specific directory. Here, users have the interface to handle all files sent by the AI.
|
|
@@ -1170,7 +1167,7 @@ The name of the currently active profile is shown as (Profile Name) in the windo
|
|
|
1170
1167
|
|
|
1171
1168
|
## Built-in models
|
|
1172
1169
|
|
|
1173
|
-
PyGPT has
|
|
1170
|
+
PyGPT has a preconfigured list of models (as of 2025-07-26):
|
|
1174
1171
|
|
|
1175
1172
|
- `bielik-11b-v2.3-instruct:Q4_K_M` (Ollama)
|
|
1176
1173
|
- `chatgpt-4o-latest` (OpenAI)
|
|
@@ -1249,7 +1246,9 @@ PyGPT has built-in support for models (as of 2025-07-26):
|
|
|
1249
1246
|
All models are specified in the configuration file `models.json`, which you can customize.
|
|
1250
1247
|
This file is located in your working directory. You can add new models provided directly by `OpenAI API` (or compatible) and those supported by `LlamaIndex` or `Ollama` to this file. Configuration for LlamaIndex is placed in `llama_index` key.
|
|
1251
1248
|
|
|
1252
|
-
|
|
1249
|
+
You can import new models by manually editing `models.json` or by using the model importer in the `Config -> Models -> Import` menu.
|
|
1250
|
+
|
|
1251
|
+
**Tip**: Anthropic and Deepseek API providers use VoyageAI for embeddings (Chat with Files and attachments RAG), so you must also configure the Voyage API key if you want to use embeddings from these providers.
|
|
1253
1252
|
|
|
1254
1253
|
## Adding a custom model
|
|
1255
1254
|
|
|
@@ -1267,6 +1266,7 @@ There is built-in support for those LLM providers:
|
|
|
1267
1266
|
- `Mistral AI`
|
|
1268
1267
|
- `Ollama`
|
|
1269
1268
|
- `OpenAI`
|
|
1269
|
+
- `OpenRouter`
|
|
1270
1270
|
- `Perplexity`
|
|
1271
1271
|
- `xAI`
|
|
1272
1272
|
|
|
@@ -1933,7 +1933,7 @@ PyGPT can be extended with:
|
|
|
1933
1933
|
|
|
1934
1934
|
- custom web search engine providers
|
|
1935
1935
|
|
|
1936
|
-
- custom agents
|
|
1936
|
+
- custom agents (LlamaIndex or OpenAI Agents)
|
|
1937
1937
|
|
|
1938
1938
|
|
|
1939
1939
|
See the section `Extending PyGPT / Adding a custom plugin` for more details.
|
|
@@ -2111,7 +2111,7 @@ a particular query will consume and displays this estimate in real time. This gi
|
|
|
2111
2111
|
control over your token usage. The app provides detailed information about the tokens used for the user's prompt,
|
|
2112
2112
|
the system prompt, any additional data, and those used within the context (the memory of previous entries).
|
|
2113
2113
|
|
|
2114
|
-
**Remember that these are only approximate calculations and do not include, for example, the number of tokens consumed by some plugins. You can find the exact number of tokens used on
|
|
2114
|
+
**Remember that these are only approximate calculations and do not include, for example, the number of tokens consumed by some plugins. You can find the exact number of tokens used on provider's website.**
|
|
2115
2115
|
|
|
2116
2116
|

|
|
2117
2117
|
|
|
@@ -2467,6 +2467,8 @@ Enable/disable remote tools, like Web Search or Image generation to use in OpenA
|
|
|
2467
2467
|
|
|
2468
2468
|
- `Audio notify microphone listening start/stop`: enables audio "tick" notify when microphone listening started/ended.
|
|
2469
2469
|
|
|
2470
|
+
- `Continuous Audio Recording (Chunks)`: Enable recording in chunks for long audio recordings in notepad (voice notes).
|
|
2471
|
+
|
|
2470
2472
|
**Indexes / LlamaIndex**
|
|
2471
2473
|
|
|
2472
2474
|
**General**
|
|
@@ -3035,7 +3037,7 @@ PyGPT can be extended with:
|
|
|
3035
3037
|
|
|
3036
3038
|
- custom web search engine providers
|
|
3037
3039
|
|
|
3038
|
-
- custom agents
|
|
3040
|
+
- custom agents (LlamaIndex or OpenAI Agents)
|
|
3039
3041
|
|
|
3040
3042
|
**Examples (tutorial files)**
|
|
3041
3043
|
|
|
@@ -3141,6 +3143,7 @@ There is built-in support for those LLM providers:
|
|
|
3141
3143
|
- Local models (OpenAI API compatible)
|
|
3142
3144
|
- Ollama
|
|
3143
3145
|
- OpenAI
|
|
3146
|
+
- OpenRouter
|
|
3144
3147
|
- Perplexity
|
|
3145
3148
|
- xAI
|
|
3146
3149
|
|
|
@@ -3540,6 +3543,14 @@ may consume additional tokens that are not displayed in the main window.
|
|
|
3540
3543
|
|
|
3541
3544
|
## Recent changes:
|
|
3542
3545
|
|
|
3546
|
+
**2.6.27 (2025-08-26)**
|
|
3547
|
+
|
|
3548
|
+
- Simplified audio input: A microphone icon has been added to the input field.
|
|
3549
|
+
|
|
3550
|
+
**2.6.26 (2025-08-26)**
|
|
3551
|
+
|
|
3552
|
+
- Added new provider: OpenRouter (#133).
|
|
3553
|
+
|
|
3543
3554
|
**2.6.25 (2025-08-26)**
|
|
3544
3555
|
|
|
3545
3556
|
- Fixed the empty agent ID issue in OpenAI Agents evaluation.
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
pygpt_net/CHANGELOG.txt,sha256=
|
|
1
|
+
pygpt_net/CHANGELOG.txt,sha256=d25hMVdSIdlMirnV9GwQiFxfdYogn_vQPm8ESYXTEZ8,102310
|
|
2
2
|
pygpt_net/LICENSE,sha256=dz9sfFgYahvu2NZbx4C1xCsVn9GVer2wXcMkFRBvqzY,1146
|
|
3
|
-
pygpt_net/__init__.py,sha256=
|
|
4
|
-
pygpt_net/app.py,sha256=
|
|
3
|
+
pygpt_net/__init__.py,sha256=O1wVaFcshzNvIoOz9Wr0nSHNFaV53ZsS-H6PyB9AI7c,1373
|
|
4
|
+
pygpt_net/app.py,sha256=yj9spTUBBXrsKAFF3FlQrLgNNtFMXKLnsRIRxlXtqCY,21233
|
|
5
5
|
pygpt_net/config.py,sha256=LCKrqQfePVNrAvH3EY_1oZx1Go754sDoyUneJ0iGWFI,16660
|
|
6
6
|
pygpt_net/container.py,sha256=NsMSHURaEC_eW8vrCNdztwqkxB7jui3yVlzUOMYvCHg,4124
|
|
7
7
|
pygpt_net/controller/__init__.py,sha256=JEG5RwohoHCcqhBVoUhlwDJwsAhMbCRfl89QTtVXW9Q,6145
|
|
8
8
|
pygpt_net/controller/access/__init__.py,sha256=_XZxGy5U93JGU49GbIB9E_I26_uRV_Zbz18lcp7u23A,510
|
|
9
9
|
pygpt_net/controller/access/access.py,sha256=nPttwQf6RZHJAlXZ-3fnlcplwXxcJWp8ciq3FMsSssI,3974
|
|
10
10
|
pygpt_net/controller/access/control.py,sha256=nMGWzg60jNJMVAHIrism0_APzVMpbLAOcXG6mJuOSJ8,17332
|
|
11
|
-
pygpt_net/controller/access/voice.py,sha256=
|
|
11
|
+
pygpt_net/controller/access/voice.py,sha256=vMFRpHJeUlHzjrfdoCD1S9cVBNm17hvn13vRh9WcYvk,15703
|
|
12
12
|
pygpt_net/controller/agent/__init__.py,sha256=GRKHllr8kuzoA2_rRHiQv27znsEcwLCiuNuU4G9xVZw,509
|
|
13
13
|
pygpt_net/controller/agent/agent.py,sha256=2MYfXbWB4z11eAo5Y3Iz8kll3RKwA156E8D_BioXUSc,6319
|
|
14
14
|
pygpt_net/controller/agent/common.py,sha256=55CHhV-dsWeNe5QvdvNoyhEYVhQNrHt_Lv-VDTuiYRc,3871
|
|
@@ -25,7 +25,8 @@ pygpt_net/controller/assistant/threads.py,sha256=eh8qJfcMFJSWfciXMpsLvRgcdaK-ilX
|
|
|
25
25
|
pygpt_net/controller/attachment/__init__.py,sha256=-5owOyszPze-YLQuTtFLQnSwEj_ftTxFwAP_3jPNpss,514
|
|
26
26
|
pygpt_net/controller/attachment/attachment.py,sha256=jO45LFBY9tbVWXPGwHenAMZUIFPdubIkoSr10aAUMHQ,20942
|
|
27
27
|
pygpt_net/controller/audio/__init__.py,sha256=Ci5ClV3DKuMCLtFqQEOr5qun--tlIzKkQlwj9ug_kI0,509
|
|
28
|
-
pygpt_net/controller/audio/audio.py,sha256=
|
|
28
|
+
pygpt_net/controller/audio/audio.py,sha256=sW7RC9oL20D7lLS5i7mxSHn9brCcJLd0T0FMET2puGw,13665
|
|
29
|
+
pygpt_net/controller/audio/ui.py,sha256=XFTd3XneK-uXLsNWYZJmJsvIQV-t81r5SU7Qq0F0n9E,7762
|
|
29
30
|
pygpt_net/controller/calendar/__init__.py,sha256=AyzoNqYgxV35CMEzoi_SCSsQh4ehg_Wu_2nsK3xsbyg,512
|
|
30
31
|
pygpt_net/controller/calendar/calendar.py,sha256=s55RkCFQPFzdDoQ2zp3kohlNdpiWxdSxQtsaROeiigw,4424
|
|
31
32
|
pygpt_net/controller/calendar/note.py,sha256=AkOQ0FslaDkQbNwXBg95XhtPi3KzhKiFd374L8kTKBA,10169
|
|
@@ -36,7 +37,7 @@ pygpt_net/controller/chat/attachment.py,sha256=hU1Slpop6rebGcLTwln-zL5SjXxhDNJUC
|
|
|
36
37
|
pygpt_net/controller/chat/audio.py,sha256=QsU36McxqlRoP6B-NSeck968g1M8JhlLkLwGLunbapw,3210
|
|
37
38
|
pygpt_net/controller/chat/chat.py,sha256=3cOxTeub6X7ui3TxoP9rAi4GPuwUeOEcLd-TG5ogbc4,3003
|
|
38
39
|
pygpt_net/controller/chat/command.py,sha256=eKFQzP0tehZ3S_G7RoZVMTebQFSjPIpix3t7vDB7MWc,5291
|
|
39
|
-
pygpt_net/controller/chat/common.py,sha256=
|
|
40
|
+
pygpt_net/controller/chat/common.py,sha256=SaWRLcm0VfJbadvqbmIIxHe4ltWq1wcFbYgkpCkyt98,17710
|
|
40
41
|
pygpt_net/controller/chat/files.py,sha256=QZAi1Io57EU7htKt9M5I9OoGAFX51OH2V5-NsJktOto,2838
|
|
41
42
|
pygpt_net/controller/chat/image.py,sha256=2mZdFiCyej1RimfsKn0CSuu9kLOw0Za6B3lhEUEz9nU,8027
|
|
42
43
|
pygpt_net/controller/chat/input.py,sha256=5CKEHSzx1SU1F-ktIUt9VA3TLtxP5kSqWyvYzANqruY,7846
|
|
@@ -99,7 +100,7 @@ pygpt_net/controller/mode/__init__.py,sha256=1Kcz0xHc2IW_if9S9eQozBUvIu69eLAe7T-
|
|
|
99
100
|
pygpt_net/controller/mode/mode.py,sha256=F3rERGN_sAgAqDITFYd1Nj56_4MiBIS9TwjjSPH1uEc,7437
|
|
100
101
|
pygpt_net/controller/model/__init__.py,sha256=mQXq9u269D8TD3u_44J6DFFyHKkaZplk-tRFCssBGbE,509
|
|
101
102
|
pygpt_net/controller/model/editor.py,sha256=_WDVFTrgZKM5Y8MZiWur4e5oSuRbXr-Q3PDozVtZ9fw,16384
|
|
102
|
-
pygpt_net/controller/model/importer.py,sha256=
|
|
103
|
+
pygpt_net/controller/model/importer.py,sha256=yODAd4eNOdWEt12TP7DmXuqmu900s-mVbF_6e3hB61g,23531
|
|
103
104
|
pygpt_net/controller/model/model.py,sha256=E0VfgIwNn75pjnB_v3RnqHr6jV1Eeua8VgpreQlA8vI,9132
|
|
104
105
|
pygpt_net/controller/notepad/__init__.py,sha256=ZbMh4D6nsGuI4AwYMdegfij5ubmUznEE_UcqSSDjSPk,511
|
|
105
106
|
pygpt_net/controller/notepad/notepad.py,sha256=mQgXalIMKkYTVKGkUD1mEHkHIzhTlt3QSiSb5eIhZgo,10767
|
|
@@ -125,7 +126,7 @@ pygpt_net/controller/theme/common.py,sha256=z5mzpMnfkTeFstKm_uodDboAa3xj5vTpMKGC
|
|
|
125
126
|
pygpt_net/controller/theme/markdown.py,sha256=iH34dsZWyXCtIZuuRBHiAV__W0P4bY-7OuzEwehizr0,6064
|
|
126
127
|
pygpt_net/controller/theme/menu.py,sha256=17D8mW5i97D_nENKFM2EZ9KgR7RP0whiaYXLzHO2rb8,7452
|
|
127
128
|
pygpt_net/controller/theme/nodes.py,sha256=RTaxLR2aXXDrVNIpS9585xbFQlqrGI3gAO7Jng8NUHs,4871
|
|
128
|
-
pygpt_net/controller/theme/theme.py,sha256=
|
|
129
|
+
pygpt_net/controller/theme/theme.py,sha256=Qow-s3s8dN_L4AXgCPFCEdaiF9cOipJUJMehdVndmKU,8883
|
|
129
130
|
pygpt_net/controller/tools/__init__.py,sha256=ds63rOuwLEIe-SlY_sQkhWSdXS0lfVwseUiHkg2NTD4,509
|
|
130
131
|
pygpt_net/controller/tools/tools.py,sha256=bWxdwL3J2-WHBS3MBiKsS3kTW_rQI_nS9z8-8iKifKg,2920
|
|
131
132
|
pygpt_net/controller/ui/__init__.py,sha256=cxfh2SYeEDATGAZpcYDqCxYfp4KReQ1CYehevSf89EU,507
|
|
@@ -170,11 +171,11 @@ pygpt_net/core/attachments/worker.py,sha256=NgZmrAPGzD8_97UXgvMZKYI_DtJKRTOG62_Q
|
|
|
170
171
|
pygpt_net/core/audio/__init__.py,sha256=SNShKpjqXzLhaSKxWiM8b6lasHRkrdSZ8ck-X7RJ-VY,509
|
|
171
172
|
pygpt_net/core/audio/audio.py,sha256=_WaS1JHT9S3BqmzEc7bOcWwMfth6ueMJQVs13jzoy4c,7709
|
|
172
173
|
pygpt_net/core/audio/backend/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
173
|
-
pygpt_net/core/audio/backend/native.py,sha256=
|
|
174
|
-
pygpt_net/core/audio/backend/pyaudio.py,sha256=
|
|
175
|
-
pygpt_net/core/audio/backend/pygame.py,sha256=
|
|
176
|
-
pygpt_net/core/audio/capture.py,sha256=
|
|
177
|
-
pygpt_net/core/audio/context.py,sha256=
|
|
174
|
+
pygpt_net/core/audio/backend/native.py,sha256=0dTEp-ZZKniDGXEWnHIbA8iLDV2ntUslkINlbDxP0PA,23641
|
|
175
|
+
pygpt_net/core/audio/backend/pyaudio.py,sha256=GWS3cBpFxOb7T8_8q9JhVEcSL0LMggFtCSwd8ycAbNU,18508
|
|
176
|
+
pygpt_net/core/audio/backend/pygame.py,sha256=jVzPY2_RRjRgQu_aU5Frn-t1V5oSvHSVuxmX-16dyDM,16717
|
|
177
|
+
pygpt_net/core/audio/capture.py,sha256=z0uvhTmShq0hdnQgWxaIZ5tPXxN6ThuHbOwjwpg5q44,3813
|
|
178
|
+
pygpt_net/core/audio/context.py,sha256=7Hgph0cvpB5_RE66icGT7vp98iMnHUbvxCmfBSAutqY,1050
|
|
178
179
|
pygpt_net/core/audio/output.py,sha256=qhXw1rziBShha84b1lBKstNlA470Yuz-abkx9aNId6U,2737
|
|
179
180
|
pygpt_net/core/audio/whisper.py,sha256=WZ_fNQ06s1NBxyoYB-lTFqDO6ARcnq9MZFekRaTNxTo,993
|
|
180
181
|
pygpt_net/core/bridge/__init__.py,sha256=RCrT3CuP8-Gf_APr5mBXyNcRigrfHcgS-SYVVP_9flE,510
|
|
@@ -271,7 +272,7 @@ pygpt_net/core/llm/llm.py,sha256=O4dSkOvs0nKQffdFuGSlScvPCyrTFTNRFJTH-frqOnM,238
|
|
|
271
272
|
pygpt_net/core/locale/__init__.py,sha256=5fmTz0u-DvCrII3KqfVAnd8YIQ8F_JDPfN16z5JRcU4,510
|
|
272
273
|
pygpt_net/core/locale/locale.py,sha256=lplM0fr0oFOcp8Nhoss7EGfbnAqE_kQnX0KbttQjgP0,6059
|
|
273
274
|
pygpt_net/core/models/__init__.py,sha256=EpJrNNINMcaO4Qc6a87IWZkfBMx7G9YJN-pdLpcqH3w,510
|
|
274
|
-
pygpt_net/core/models/models.py,sha256=
|
|
275
|
+
pygpt_net/core/models/models.py,sha256=kvy5tpsbDwEws0W6v3Y4CjbV2VrGtu1_d4RubstzS_A,15782
|
|
275
276
|
pygpt_net/core/models/ollama.py,sha256=MiCt1Nzd3VHjnj7a0CmGjqUkPuD7401obd7G7KQIZzU,3189
|
|
276
277
|
pygpt_net/core/modes/__init__.py,sha256=dKpce7VTQCzmSfNBT1WHd_zKzXRthRs7ZKqHQSEtftc,509
|
|
277
278
|
pygpt_net/core/modes/modes.py,sha256=Dm1mChW26dzjrMe8QPUAbwnl95o62vyqbQVxwztMX5A,3065
|
|
@@ -330,7 +331,7 @@ pygpt_net/core/types/console.py,sha256=vzYZ4fYkwK71ECJB7Qop0qcVIC6USLxxKuFN-Zweu
|
|
|
330
331
|
pygpt_net/core/types/mode.py,sha256=jJIFch8lRo5q7lr3uX77odFjXBHwG1_h7_CxE56slSg,858
|
|
331
332
|
pygpt_net/core/types/model.py,sha256=V8O9yipzqyTmVjzeESQ1xvZpSdRU6UYmvWJ1M2Kxs5A,549
|
|
332
333
|
pygpt_net/core/types/multimodal.py,sha256=yeKLZ5MrCHU5LhWwFE-yGApt-FB59kTmElo3G7td9uw,594
|
|
333
|
-
pygpt_net/core/types/openai.py,sha256=
|
|
334
|
+
pygpt_net/core/types/openai.py,sha256=LQyJ506IbG2EJxLVHgKBDAjMLi_qbIC7ow_kM2zHwkw,1516
|
|
334
335
|
pygpt_net/core/types/tools.py,sha256=BdonNwytk5SxYtYdlDkMg5lMvFoXz3CQJHZ__oVlm_8,1223
|
|
335
336
|
pygpt_net/core/updater/__init__.py,sha256=fC4g0Xn9S8lLxGbop1q2o2qi9IZegoVayNVWemgBwds,511
|
|
336
337
|
pygpt_net/core/updater/updater.py,sha256=cykBw8BqJlJNisWnpXnSPZ25Gfw3Ufyd9a_dUuEo3p8,16740
|
|
@@ -347,8 +348,8 @@ pygpt_net/css_rc.py,sha256=i13kX7irhbYCWZ5yJbcMmnkFp_UfS4PYnvRFSPF7XXo,11349
|
|
|
347
348
|
pygpt_net/data/audio/click_off.mp3,sha256=aNiRDP1pt-Jy7ija4YKCNFBwvGWbzU460F4pZWZDS90,65201
|
|
348
349
|
pygpt_net/data/audio/click_on.mp3,sha256=qfdsSnthAEHVXzeyN4LlC0OvXuyW8p7stb7VXtlvZ1k,65201
|
|
349
350
|
pygpt_net/data/audio/ok.mp3,sha256=LTiV32pEBkpUGBkKkcOdOFB7Eyt_QoP2Nv6c5AaXftk,32256
|
|
350
|
-
pygpt_net/data/config/config.json,sha256=
|
|
351
|
-
pygpt_net/data/config/models.json,sha256=
|
|
351
|
+
pygpt_net/data/config/config.json,sha256=CPa4-sH8OmZXEymUSarvRN-FLPFPifQviX8JftBeEpU,26026
|
|
352
|
+
pygpt_net/data/config/models.json,sha256=2wF9zzowNmmRds1yRgTJMqLAmLX19oKH27uW_iroXwc,110162
|
|
352
353
|
pygpt_net/data/config/modes.json,sha256=M882iiqX_R2sNQl9cqZ3k-uneEvO9wpARtHRMLx_LHw,2265
|
|
353
354
|
pygpt_net/data/config/presets/agent_code_act.json,sha256=GYHqhxtKFLUCvRI3IJAJ7Qe1k8yD9wGGNwManldWzlI,754
|
|
354
355
|
pygpt_net/data/config/presets/agent_openai.json,sha256=bpDJgLRey_effQkzFRoOEGd4aHUrmzeODSDdNzrf62I,730
|
|
@@ -383,7 +384,7 @@ pygpt_net/data/config/presets/current.vision.json,sha256=x1ll5B3ROSKYQA6l27PRGXU
|
|
|
383
384
|
pygpt_net/data/config/presets/dalle_white_cat.json,sha256=esqUb43cqY8dAo7B5u99tRC0MBV5lmlrVLnJhTSkL8w,552
|
|
384
385
|
pygpt_net/data/config/presets/joke_agent.json,sha256=R6n9P7KRb0s-vZWZE7kHdlOfXAx1yYrPmUw8uLyw8OE,474
|
|
385
386
|
pygpt_net/data/config/presets/joke_expert.json,sha256=jjcoIYEOaEp8kLoIbecxQROiq4J3Zess5w8_HmngPOY,671
|
|
386
|
-
pygpt_net/data/config/settings.json,sha256=
|
|
387
|
+
pygpt_net/data/config/settings.json,sha256=FYQual-ajyQGqUDXJpZFpmOSmpuzoElTTFuUP8hvlhM,69392
|
|
387
388
|
pygpt_net/data/config/settings_section.json,sha256=OLWgjs3hHFzk50iwzVyUpcFW7dfochOnbZS0vDoMlDU,1158
|
|
388
389
|
pygpt_net/data/css/fix_windows.css,sha256=Mks14Vg25ncbMqZJfAMStrhvZmgHF6kU75ohTWRZeI8,664
|
|
389
390
|
pygpt_net/data/css/fix_windows.dark.css,sha256=7hGbT_qI5tphYC_WlFpJRDAcmjBb0AQ2Yc-y-_Zzf2M,161
|
|
@@ -1607,14 +1608,14 @@ pygpt_net/data/js/katex/fonts/KaTeX_Typewriter-Regular.woff2,sha256=cdUX1ngneHz6
|
|
|
1607
1608
|
pygpt_net/data/js/katex/katex.min.css,sha256=lVaKnUaQNG4pI71WHffQZVALLQF4LMZEk4nOia8U9ow,23532
|
|
1608
1609
|
pygpt_net/data/js/katex/katex.min.js,sha256=KLASOtKS2x8pUxWVzCDmlWJ4jhuLb0vtrgakbD6gDDo,276757
|
|
1609
1610
|
pygpt_net/data/languages.csv,sha256=fvtER6vnTXFHQslCh-e0xCfZDQ-ijgW4GYpOJG4U7LY,8289
|
|
1610
|
-
pygpt_net/data/locale/locale.de.ini,sha256
|
|
1611
|
-
pygpt_net/data/locale/locale.en.ini,sha256=
|
|
1612
|
-
pygpt_net/data/locale/locale.es.ini,sha256=
|
|
1613
|
-
pygpt_net/data/locale/locale.fr.ini,sha256=
|
|
1614
|
-
pygpt_net/data/locale/locale.it.ini,sha256=
|
|
1615
|
-
pygpt_net/data/locale/locale.pl.ini,sha256=
|
|
1616
|
-
pygpt_net/data/locale/locale.uk.ini,sha256=
|
|
1617
|
-
pygpt_net/data/locale/locale.zh.ini,sha256=
|
|
1611
|
+
pygpt_net/data/locale/locale.de.ini,sha256=z7X-5AjmZrqkLQGbel9u_rYSuoF9_gt4aaxwMq_zTiw,102827
|
|
1612
|
+
pygpt_net/data/locale/locale.en.ini,sha256=DQ8AvAA9V4LgWYCdIsZH-NPltNlxL705ncBcwOZnOTQ,94182
|
|
1613
|
+
pygpt_net/data/locale/locale.es.ini,sha256=LkR1AGN-XvBdg5zptmOg-UQXmQS9nhLePAO8cillmGo,103487
|
|
1614
|
+
pygpt_net/data/locale/locale.fr.ini,sha256=qfaLjDnmwsZr8q4UOoBPIu3z8FlrEo6-zHegfRgJ340,106292
|
|
1615
|
+
pygpt_net/data/locale/locale.it.ini,sha256=ekPv-WlnAhGcGO8Fi3wkgC_77JfDX9PjVaUiqn4OrPk,101282
|
|
1616
|
+
pygpt_net/data/locale/locale.pl.ini,sha256=E8XlrZR__yN353h16nWQanxTC9WKknSCI7YI1t08UUM,101028
|
|
1617
|
+
pygpt_net/data/locale/locale.uk.ini,sha256=cTQuWq2sbCSOqula6zr_iV6AYDyaKEMxsvXOzW1C-88,140816
|
|
1618
|
+
pygpt_net/data/locale/locale.zh.ini,sha256=dHbX2rjc46RHYk9T7ldjUGlqH7GvpbBC6SEGOXR-jI8,90170
|
|
1618
1619
|
pygpt_net/data/locale/plugin.agent.de.ini,sha256=BY28KpfFvgfVYJzcw2o5ScWnR4uuErIYGyc3NVHlmTw,1714
|
|
1619
1620
|
pygpt_net/data/locale/plugin.agent.en.ini,sha256=HwOWCI7e8uzlIgyRWRVyr1x6Xzs8Xjv5pfEc7jfLOo4,1728
|
|
1620
1621
|
pygpt_net/data/locale/plugin.agent.es.ini,sha256=bqaJQne8HPKFVtZ8Ukzo1TSqVW41yhYbGUqW3j2x1p8,1680
|
|
@@ -1835,11 +1836,11 @@ pygpt_net/plugin/agent/plugin.py,sha256=FcYwDE4QofWc0Utp89__bRpTYYbXcMk8xKRaM8GJ
|
|
|
1835
1836
|
pygpt_net/plugin/audio_input/__init__.py,sha256=c2WZ3EJZbtdBebTHvvTAURbe7ATo0oXjkKzblgJsMnk,511
|
|
1836
1837
|
pygpt_net/plugin/audio_input/config.py,sha256=x57IVxBapJp9rwos327T6U0jTFSPeRJ6BorqfYxJ4u0,9197
|
|
1837
1838
|
pygpt_net/plugin/audio_input/plugin.py,sha256=vGwDtsPkwCEwiz2ePnzW48Tuhr0VHEc7kQua11VE7tI,15714
|
|
1838
|
-
pygpt_net/plugin/audio_input/simple.py,sha256=
|
|
1839
|
+
pygpt_net/plugin/audio_input/simple.py,sha256=5zEAGsbzSE74nZC7D7cKHmPkSXa4VPG8L8ATtUPRDx8,6580
|
|
1839
1840
|
pygpt_net/plugin/audio_input/worker.py,sha256=06-08cQ63n7V-47tjUdUdE_gs5GMQTG--b5fxeHP1Zs,12186
|
|
1840
1841
|
pygpt_net/plugin/audio_output/__init__.py,sha256=UglI8YPtzF_-buENrR0vqDuvzlK3CJdIXKx-iaJozZM,510
|
|
1841
1842
|
pygpt_net/plugin/audio_output/config.py,sha256=IA2K-9fQMZSwYGyi30Uh5qAlYwuqwaHo3dtDJ13vQdo,1208
|
|
1842
|
-
pygpt_net/plugin/audio_output/plugin.py,sha256=
|
|
1843
|
+
pygpt_net/plugin/audio_output/plugin.py,sha256=plnsKSyfe7Ad5O5gTKWXmMKVkb1aOR_LOylw002iRwo,9845
|
|
1843
1844
|
pygpt_net/plugin/audio_output/worker.py,sha256=XhkY0uYlx1UIuAeWB3CA9MLvvDxI870E0iKJ0O2Lx10,3718
|
|
1844
1845
|
pygpt_net/plugin/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
1845
1846
|
pygpt_net/plugin/base/config.py,sha256=q5WAcF-h3KZH4bJFYANasM7UmV1v1c43fF1EZ05iF7Y,848
|
|
@@ -2038,7 +2039,7 @@ pygpt_net/provider/core/calendar/db_sqlite/storage.py,sha256=QDclQCQdr4QyRIqjgGX
|
|
|
2038
2039
|
pygpt_net/provider/core/config/__init__.py,sha256=jQQgG9u_ZLsZWXustoc1uvC-abUvj4RBKPAM30-f2Kc,488
|
|
2039
2040
|
pygpt_net/provider/core/config/base.py,sha256=cbvzbMNqL2XgC-36gGubnU37t94AX7LEw0lecb2Nm80,1365
|
|
2040
2041
|
pygpt_net/provider/core/config/json_file.py,sha256=GCcpCRQnBiSLWwlGbG9T3ZgiHkTfp5Jsg2KYkZcakBw,6789
|
|
2041
|
-
pygpt_net/provider/core/config/patch.py,sha256=
|
|
2042
|
+
pygpt_net/provider/core/config/patch.py,sha256=SipJw4rQ7TxOGd7zq5Ppjro28a2zAhIWYPBLKqkylag,123759
|
|
2042
2043
|
pygpt_net/provider/core/ctx/__init__.py,sha256=jQQgG9u_ZLsZWXustoc1uvC-abUvj4RBKPAM30-f2Kc,488
|
|
2043
2044
|
pygpt_net/provider/core/ctx/base.py,sha256=Tfb4MDNe9BXXPU3lbzpdYwJF9S1oa2-mzgu5XT4It9g,3003
|
|
2044
2045
|
pygpt_net/provider/core/ctx/db_sqlite/__init__.py,sha256=0dP8VhI4bnFsQQKxAkaleKFlyaMycDD_cnE7gBCa57Y,512
|
|
@@ -2128,10 +2129,11 @@ pygpt_net/provider/llms/llama_index/openai/responses.py,sha256=dOzrPV0u1CQRloU2c
|
|
|
2128
2129
|
pygpt_net/provider/llms/llama_index/openai/utils.py,sha256=IdvbjJ2y5zWDkntoPgBZ2pGbcrYIbGbg1smoju2XkUI,29243
|
|
2129
2130
|
pygpt_net/provider/llms/llama_index/x_ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2130
2131
|
pygpt_net/provider/llms/llama_index/x_ai/embedding.py,sha256=QrGgpkD0F4Jm5cMJgN6oYai7UK_bJ0YoGr7Uvy5GtRU,2458
|
|
2131
|
-
pygpt_net/provider/llms/local.py,sha256=
|
|
2132
|
+
pygpt_net/provider/llms/local.py,sha256=UGFFbao5zOaoztg7ED4kyz3-QYTDSGozhaXm2T-G5K0,2570
|
|
2132
2133
|
pygpt_net/provider/llms/mistral.py,sha256=e8pcWyNT8HjA3KLZL1vO7z4FlBxer-QCVpaGtKgQ4UY,3858
|
|
2133
2134
|
pygpt_net/provider/llms/ollama.py,sha256=vVqA22eH-APgyfHCaHSvJlAgxLSvspvZSaOCeaKWQCw,4434
|
|
2134
2135
|
pygpt_net/provider/llms/ollama_custom.py,sha256=WVbLiEEwnz5loKiLy7EYmpuWz0Tp5Vhd1vOUB2051kI,24167
|
|
2136
|
+
pygpt_net/provider/llms/open_router.py,sha256=WydEHidDtkTCJ2_4xmjGNxCttzj8WcLL0oc3iTui3QE,3569
|
|
2135
2137
|
pygpt_net/provider/llms/openai.py,sha256=oaPCEffOQI3TGe_l15Ta3Mt_MKshxsONRCSc59fINaE,5419
|
|
2136
2138
|
pygpt_net/provider/llms/perplexity.py,sha256=DO5RZaUEDmRhps0Hoa1OX05no5n4uxT4JjwOGWPshPY,3899
|
|
2137
2139
|
pygpt_net/provider/llms/x_ai.py,sha256=TkOdSce3OndH4lSoYgBB7FkljM0eqbSX3OT5UtT0Cqc,4509
|
|
@@ -2298,7 +2300,7 @@ pygpt_net/ui/layout/chat/attachments_uploaded.py,sha256=MZA0aFOm9iKbYc6NrM7Ivg2i
|
|
|
2298
2300
|
pygpt_net/ui/layout/chat/calendar.py,sha256=hE9Gl0h5kPXe0OUkimRfys2aFti0Y4wzKxhh1gyGnjs,6578
|
|
2299
2301
|
pygpt_net/ui/layout/chat/chat.py,sha256=qB4RwT9N0eCtrbyasgO0Cxvcm2nXACck6MflGAoQqK0,2221
|
|
2300
2302
|
pygpt_net/ui/layout/chat/explorer.py,sha256=Jg6aK5qTCTNgb4EXr-zeZXSexARQSzn4W8unqV1MGe8,1358
|
|
2301
|
-
pygpt_net/ui/layout/chat/input.py,sha256=
|
|
2303
|
+
pygpt_net/ui/layout/chat/input.py,sha256=0EHzkZiZSzWXUuP_CSS5Fz1Ykx_TOgtEBMMipy1pjg0,9965
|
|
2302
2304
|
pygpt_net/ui/layout/chat/markdown.py,sha256=hjYY8Da1z0IZZD086_csMcDY1wwagpuQTDZ-XfgeNgs,18656
|
|
2303
2305
|
pygpt_net/ui/layout/chat/output.py,sha256=b1qY1C2Fs_k3fOA47JnkHXfvRDdptHbEZpMF6aiwA8g,7280
|
|
2304
2306
|
pygpt_net/ui/layout/chat/painter.py,sha256=fOoGvVHnKpkpilK-3ZgZh6kCh9uxINNtsCT8NUtOarQ,5761
|
|
@@ -2322,7 +2324,7 @@ pygpt_net/ui/layout/toolbox/presets.py,sha256=OSpVhZjmdI11ZH3YB3G1Vx9XFvQbJdTXu6
|
|
|
2322
2324
|
pygpt_net/ui/layout/toolbox/prompt.py,sha256=subUUZJgkCmvSRekZcgVYs6wzl-MYPBLXKTs0wcJFgw,2663
|
|
2323
2325
|
pygpt_net/ui/layout/toolbox/toolbox.py,sha256=CvYn_rCJ3JgDfXqw5IqslMDtNxd8Hyd5lgbqGpSUbus,2988
|
|
2324
2326
|
pygpt_net/ui/layout/toolbox/vision.py,sha256=E6-lLfU3vrWdlprayr6gxFs7F7AGkn4OIrFXrQ9p5XA,2035
|
|
2325
|
-
pygpt_net/ui/main.py,sha256=
|
|
2327
|
+
pygpt_net/ui/main.py,sha256=ytc69FgZgEr_JpHRDOeJTeMtqLsvRdGsRW29oeGg5zE,14116
|
|
2326
2328
|
pygpt_net/ui/menu/__init__.py,sha256=wAIKG9wLWfYv6tpXCTXptWb_XKoCc-4lYWLDvV1bVYk,508
|
|
2327
2329
|
pygpt_net/ui/menu/about.py,sha256=BtelbYhpXJGgsoEwsPuw61wVuGkzogpY3FVvWtd09HE,4619
|
|
2328
2330
|
pygpt_net/ui/menu/audio.py,sha256=3vQhMq8vk_h7yb_Gk2dZMRviFR2PExgR_ynpgOkyl-g,4226
|
|
@@ -2340,7 +2342,7 @@ pygpt_net/ui/widget/__init__.py,sha256=X9-pucLqQF9_ocDV-qNY6EQAJ_4dubGb-7TcWIzCX
|
|
|
2340
2342
|
pygpt_net/ui/widget/anims/loader.py,sha256=PzxHraeABUyMIZlg4Rk_tbJnUPmiwxlhdcHaCkURWWw,5989
|
|
2341
2343
|
pygpt_net/ui/widget/anims/toggles.py,sha256=9mX_yRwYJJxseb3cNIkWed_euv-iRNPzOBxiAz8wLjk,5935
|
|
2342
2344
|
pygpt_net/ui/widget/audio/__init__.py,sha256=8HT4tQFqQogEEpGYTv2RplKBthlsFKcl5egnv4lzzEw,488
|
|
2343
|
-
pygpt_net/ui/widget/audio/bar.py,sha256=
|
|
2345
|
+
pygpt_net/ui/widget/audio/bar.py,sha256=1TnNx5x4sXx-Iu4zPtwID7DGf97TH3OABG8BKyiC0Lk,3275
|
|
2344
2346
|
pygpt_net/ui/widget/audio/input.py,sha256=t9VAhP15HkSOvNV2crI3Kg6AgrQDj-wSQiiYTMlvK60,1721
|
|
2345
2347
|
pygpt_net/ui/widget/audio/input_button.py,sha256=QDfA8WBskY0pkN8EMXZ97017Sj11t4p9b_jCz4Q6aRY,4498
|
|
2346
2348
|
pygpt_net/ui/widget/audio/output.py,sha256=UxkiCnVT9DNFeByDGTFW_CK0LW8xSvhEK1zygtHvx4k,1586
|
|
@@ -2436,7 +2438,7 @@ pygpt_net/ui/widget/textarea/create.py,sha256=f4SrAW-2hjkKYIPrwVliSYH-LkgsQP8G13
|
|
|
2436
2438
|
pygpt_net/ui/widget/textarea/editor.py,sha256=qCMFJk8T7f4u3TFYKi02r2kOFSzzwrasWm_zSMsisPE,5153
|
|
2437
2439
|
pygpt_net/ui/widget/textarea/find.py,sha256=fQu6t-_LTZGFRNCkezywtMVsL-DocIkGBR_HbRFq61g,1534
|
|
2438
2440
|
pygpt_net/ui/widget/textarea/html.py,sha256=4DOnUYtHBhN-6X5w13GK-ceAAvTPd8M4mH_N-c3L_h0,12344
|
|
2439
|
-
pygpt_net/ui/widget/textarea/input.py,sha256=
|
|
2441
|
+
pygpt_net/ui/widget/textarea/input.py,sha256=uN6EFpcA-rm2ICdcy6UhuID9SAqe_sZCnIjSZHvAHNk,23654
|
|
2440
2442
|
pygpt_net/ui/widget/textarea/name.py,sha256=vcyAY_pJWJoS_IJqdJjhIeDSniTL9rfpt8aaobWNFVY,1132
|
|
2441
2443
|
pygpt_net/ui/widget/textarea/notepad.py,sha256=Yl2fvJyRzADxvZe0B44QG8E15LdZtPjwGcdM2iIcgu4,9858
|
|
2442
2444
|
pygpt_net/ui/widget/textarea/output.py,sha256=krWta3GHwdlPOqcxLln150bo7iUOtbFL_yJzMucGOFU,6246
|
|
@@ -2447,8 +2449,8 @@ pygpt_net/ui/widget/textarea/web.py,sha256=cqs5i67bD19_BNgcYL7NXlwYBei4UYSL_IYPZ
|
|
|
2447
2449
|
pygpt_net/ui/widget/vision/__init__.py,sha256=8HT4tQFqQogEEpGYTv2RplKBthlsFKcl5egnv4lzzEw,488
|
|
2448
2450
|
pygpt_net/ui/widget/vision/camera.py,sha256=v1qEncaZr5pXocO5Cpk_lsgfCMvfFigdJmzsYfzvCl0,1877
|
|
2449
2451
|
pygpt_net/utils.py,sha256=GBAXOpp_Wjfu7Al7TnTV62-R-JPMiP9GuPXLJ0HmeJU,8906
|
|
2450
|
-
pygpt_net-2.6.
|
|
2451
|
-
pygpt_net-2.6.
|
|
2452
|
-
pygpt_net-2.6.
|
|
2453
|
-
pygpt_net-2.6.
|
|
2454
|
-
pygpt_net-2.6.
|
|
2452
|
+
pygpt_net-2.6.27.dist-info/LICENSE,sha256=rbPqNB_xxANH8hKayJyIcTwD4bj4Y2G-Mcm85r1OImM,1126
|
|
2453
|
+
pygpt_net-2.6.27.dist-info/METADATA,sha256=tGZmVyRsBUbWGdxw2ppqgpImXmGIL29bgdA9W_DwyZM,159554
|
|
2454
|
+
pygpt_net-2.6.27.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
2455
|
+
pygpt_net-2.6.27.dist-info/entry_points.txt,sha256=qvpII6UHIt8XfokmQWnCYQrTgty8FeJ9hJvOuUFCN-8,43
|
|
2456
|
+
pygpt_net-2.6.27.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|