gpt-pr 0.7.0__tar.gz → 0.7.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gpt-pr might be problematic. Click here for more details.

Files changed (34) hide show
  1. gpt_pr-0.7.1/PKG-INFO +286 -0
  2. {gpt-pr-0.7.0 → gpt_pr-0.7.1}/README.md +4 -6
  3. gpt_pr-0.7.1/gpt_pr/__init__.py +3 -0
  4. {gpt-pr-0.7.0/gptpr → gpt_pr-0.7.1/gpt_pr}/checkversion.py +22 -19
  5. gpt_pr-0.7.1/gpt_pr/gh.py +44 -0
  6. gpt_pr-0.7.1/gpt_pr/gpt.py +8 -0
  7. gpt_pr-0.7.1/gpt_pr/main.py +117 -0
  8. gpt_pr-0.7.1/gpt_pr/prdata.py +238 -0
  9. {gpt-pr-0.7.0/gptpr → gpt_pr-0.7.1/gpt_pr}/test_checkversion.py +44 -38
  10. {gpt-pr-0.7.0/gptpr → gpt_pr-0.7.1/gpt_pr}/test_config.py +55 -35
  11. gpt_pr-0.7.1/gpt_pr/test_gh.py +60 -0
  12. gpt_pr-0.7.1/gpt_pr/test_prdata.py +17 -0
  13. gpt_pr-0.7.1/pyproject.toml +35 -0
  14. gpt-pr-0.7.0/MANIFEST.in +0 -3
  15. gpt-pr-0.7.0/PKG-INFO +0 -9
  16. gpt-pr-0.7.0/gpt_pr.egg-info/PKG-INFO +0 -9
  17. gpt-pr-0.7.0/gpt_pr.egg-info/SOURCES.txt +0 -24
  18. gpt-pr-0.7.0/gpt_pr.egg-info/dependency_links.txt +0 -1
  19. gpt-pr-0.7.0/gpt_pr.egg-info/entry_points.txt +0 -3
  20. gpt-pr-0.7.0/gpt_pr.egg-info/not-zip-safe +0 -1
  21. gpt-pr-0.7.0/gpt_pr.egg-info/requires.txt +0 -59
  22. gpt-pr-0.7.0/gpt_pr.egg-info/top_level.txt +0 -1
  23. gpt-pr-0.7.0/gptpr/__init__.py +0 -0
  24. gpt-pr-0.7.0/gptpr/gh.py +0 -37
  25. gpt-pr-0.7.0/gptpr/main.py +0 -102
  26. gpt-pr-0.7.0/gptpr/prdata.py +0 -199
  27. gpt-pr-0.7.0/gptpr/test_prdata.py +0 -13
  28. gpt-pr-0.7.0/gptpr/version.py +0 -1
  29. gpt-pr-0.7.0/requirements.txt +0 -40
  30. gpt-pr-0.7.0/setup.cfg +0 -7
  31. gpt-pr-0.7.0/setup.py +0 -50
  32. {gpt-pr-0.7.0/gptpr → gpt_pr-0.7.1/gpt_pr}/config.py +0 -0
  33. {gpt-pr-0.7.0/gptpr → gpt_pr-0.7.1/gpt_pr}/consolecolor.py +0 -0
  34. {gpt-pr-0.7.0/gptpr → gpt_pr-0.7.1/gpt_pr}/gitutil.py +0 -0
gpt_pr-0.7.1/PKG-INFO ADDED
@@ -0,0 +1,286 @@
1
+ Metadata-Version: 2.4
2
+ Name: gpt-pr
3
+ Version: 0.7.1
4
+ Summary: Automate your GitHub workflow with GPT-PR: an OpenAI powered library for streamlined PR generation.
5
+ Author: alissonperez
6
+ Author-email: 756802+alissonperez@users.noreply.github.com
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.9
10
+ Classifier: Programming Language :: Python :: 3.10
11
+ Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Classifier: Programming Language :: Python :: 3.14
15
+ Requires-Dist: fire (>=0.7.1,<0.8.0)
16
+ Requires-Dist: gitpython (>=3.1.45,<4.0.0)
17
+ Requires-Dist: inquirerpy (>=0.3.4,<0.4.0)
18
+ Requires-Dist: openai (>=2.5.0,<3.0.0)
19
+ Requires-Dist: prompt-toolkit (>=3.0.52,<4.0.0)
20
+ Requires-Dist: pygithub (>=2.8.1,<3.0.0)
21
+ Requires-Dist: requests (>=2.32.5,<3.0.0)
22
+ Requires-Dist: tiktoken (>=0.12.0,<0.13.0)
23
+ Description-Content-Type: text/markdown
24
+
25
+ # GPT-PR
26
+
27
+ GPT-PR is an open-source command-line tool designed to streamline your GitHub workflow for opening PRs. Leveraging OpenAI's ChatGPT API, it automatically opens a GitHub Pull Request with a predefined description and title directly from your current project directory.
28
+
29
+ [![asciicast](https://asciinema.org/a/u0PwZlNjAGZcdXPPrjf84wj2A.svg)](https://asciinema.org/a/u0PwZlNjAGZcdXPPrjf84wj2A)
30
+
31
+ ## Quick Start
32
+
33
+ For a more detailed explanation, see [Installation](#installation) and [Configuration](#configuration).
34
+
35
+ ### 1. Installation
36
+
37
+ ```bash
38
+ pip install -U gpt-pr
39
+ ```
40
+
41
+ If you don't have the `pip` command available, follow [these instructions](https://pip.pypa.io/en/stable/installation/) to install it on different platforms.
42
+
43
+ ### 2. Fill OpenAI API Key
44
+
45
+ 1. Go to [OpenAI API Keys](https://platform.openai.com/api-keys) and generate a new key.
46
+ 2. Run the following command to fill your key in GPT-PR (it will be stored in `~/.gpt-pr.ini`):
47
+
48
+ ```bash
49
+ gpt-pr-config set openai_api_key MY-API-KEY-VALUE
50
+ ```
51
+
52
+ ### 3. Generate a GitHub GH Token to Open PRs
53
+
54
+ 1. Go to [GitHub Settings](https://github.com/settings/tokens), choose `Generate new token (classic)`, and select all permissions under `repo` (full control of private repositories).
55
+ 2. Run the following command to fill your GH token (it will also be stored in `~/.gpt-pr.ini`):
56
+
57
+ ```bash
58
+ gpt-pr-config set gh_token MY-GH-TOKEN-VALUE
59
+ ```
60
+
61
+ ### 4. Ready to NEVER WRITE A PR AGAIN
62
+
63
+ 1. Make your changes, commit them, and push to origin (important!).
64
+ 2. Run the following command in your project directory:
65
+
66
+ ```bash
67
+ gpt-pr
68
+ ```
69
+
70
+ 3. Answer the questions. At the end, you'll receive the URL of a freshly opened PR.
71
+
72
+ ## Contributing and Feedback
73
+
74
+ We welcome your contributions and feedback to help improve GPT-PR! Here’s how you can get involved:
75
+
76
+ ### Open Issues
77
+
78
+ - **Feature Requests**: Have an idea for a new feature? We’d love to hear it! Open an issue to request new features or enhancements.
79
+ - **Bug Reports**: Encountered a bug? Let us know by opening an issue with detailed information so we can fix it.
80
+ - **General Feedback**: Any other suggestions or feedback? Feel free to share your thoughts.
81
+
82
+ To open an issue, go to the [Issues](https://github.com/your-repo/gpt-pr/issues) section of our GitHub repository. Your contributions are very welcome and highly appreciated!
83
+
84
+ More details about it at our [CONTRIBUTING](./CONTRIBUTING.md) guide.
85
+
86
+ ## Table of Contents
87
+
88
+ - [Features](#features)
89
+ - [Prerequisites](#prerequisites)
90
+ - [Installation](#installation)
91
+ - [Configuration](#configuration)
92
+ - [Usage](#usage)
93
+ - [How to Contribute](#how-to-contribute)
94
+ - [Roadmap](#roadmap)
95
+
96
+ ## Features
97
+
98
+ - Analyzes the diff changes of the current branch against the `main` branch.
99
+ - Provides an option to exclude certain file changes from PR generation (for instance, you can ignore a `package.lock` file with 5k lines changed).
100
+ - Incorporates commit messages into the process.
101
+
102
+ ## Prerequisites
103
+
104
+ Before getting started, make sure you have the following installed:
105
+
106
+ - Python 3.9 or higher
107
+ - [Poetry](https://python-poetry.org/)
108
+
109
+ ## Installation
110
+
111
+ You can install and use GPT-PR in one of two ways. Choose the option that best suits your needs.
112
+
113
+ ### Option 1: Using `pip install` (Recommended)
114
+
115
+ 1. Install OR Update the package:
116
+
117
+ ```bash
118
+ pip install -U gpt-pr
119
+ ```
120
+
121
+ 2. Setup API keys for GitHub and OpenAI, take a look at [Configuration](#configuration).
122
+
123
+ 3. Inside the Git repository you are working on, ensure you have pushed your branch to origin, then run:
124
+
125
+ ```bash
126
+ gpt-pr --help
127
+ ```
128
+
129
+ ### Option 2: Cloning the code (NOT recommended)
130
+
131
+ 1. Clone the repository:
132
+
133
+ ```bash
134
+ git clone https://github.com/alissonperez/gpt-pr.git
135
+ ```
136
+
137
+ 2. Navigate to the project directory and install dependencies:
138
+
139
+ ```bash
140
+ cd gpt-pr
141
+ poetry install
142
+ ```
143
+
144
+ After setting up API keys ([Configuration](#configuration)), you can use GPT-PR within any git project directory. Suppose you've cloned **this project** to `~/workplace/gpt-pr`, here's how you can use it:
145
+
146
+ ```bash
147
+ poetry run python gpt_pr/main.py --help
148
+ ```
149
+
150
+ ## Configuration
151
+
152
+ ### See all configs available
153
+
154
+ To print all default configs and what is being used, just run:
155
+
156
+ ```bash
157
+ gpt-pr-config print
158
+ ```
159
+
160
+ ### Setting up GitHub Token (`GH_TOKEN`)
161
+
162
+ GPT-PR tool will look for a `GH_TOKEN` in current shell env var OR in gpt-pr config file (at `~/.gpt-pr.ini`).
163
+
164
+ To authenticate with GitHub, generate and export a GitHub Personal Access Token:
165
+
166
+ 1. Navigate to [GitHub's Personal Access Token page](https://github.com/settings/tokens).
167
+ 2. Click "Generate new token."
168
+ 3. Provide a description and select the required permissions `repo` for the token.
169
+ 4. Click "Generate token" at the bottom of the page.
170
+ 5. Copy the generated token.
171
+ 6. Set `gh_token` config running (supposing your gh token is `ghp_4Mb1QEr9gY5e8Lk3tN1KjPzX7W9z2V4HtJ2b`):
172
+
173
+ ```bash
174
+ gpt-pr-config set gh_token ghp_4Mb1QEr9gY5e8Lk3tN1KjPzX7W9z2V4HtJ2b
175
+ ```
176
+
177
+ Or just export it as an environment variable in your shell initializer:
178
+
179
+ ```bash
180
+ export GH_TOKEN=your_generated_token_here
181
+ ```
182
+
183
+ ### Setting up OpenAI API Key (`OPENAI_API_KEY`)
184
+
185
+ GPT-PR tool will look for a `OPENAI_API_KEY` env var in current shell OR in gpt-pr config file (at `~/.gpt-pr.ini`).
186
+
187
+ This project needs to interact with the ChatGPT API to generate the pull request description. So, you need to generate and export an OpenAI API Key:
188
+
189
+ 1. Navigate to [OpenAI's API Key page](https://platform.openai.com/signup).
190
+ 2. If you don't have an account, sign up and log in.
191
+ 3. Go to the API Keys section and click "Create new key."
192
+ 4. Provide a description and click "Create."
193
+ 5. Copy the generated API key.
194
+ 6. Set `openai_api_key` config running (supposing your openai_api_key is `QEr9gY5e8Lk3tN1KjPzX7W9z2V4Ht`):
195
+
196
+ ```bash
197
+ gpt-pr-config set openai_api_key QEr9gY5e8Lk3tN1KjPzX7W9z2V4Ht
198
+ ```
199
+
200
+ Or just export it as an environment variable in your shell initializer:
201
+
202
+ ```bash
203
+ export OPENAI_API_KEY=your_generated_api_key_here
204
+ ```
205
+
206
+ ### Setting Max Input LLM Tokens
207
+
208
+ You can adjust the maximum number of input tokens allowed when calling the LLM model by modifying the corresponding setting.
209
+
210
+ For example, to change the maximum to 20,000 tokens, use the following command:
211
+
212
+ ```bash
213
+ gpt-pr-config set input_max_tokens 20000
214
+ ```
215
+
216
+ ### Change OpenAI model
217
+
218
+ To change OpenAI model, just run:
219
+
220
+ ```bash
221
+ gpt-pr-config set openai_model gpt-4o-mini
222
+ ```
223
+
224
+ > Obs.: `gpt-4o-mini` already is the default model of the project
225
+
226
+ To see a full list of available models, access [OpenAI Models Documentation](https://platform.openai.com/docs/models)
227
+
228
+ ### GPT-PR Library Signature in PRs
229
+
230
+ To help other developers recognize and understand the use of the GPT-PR library in generating pull requests, we have included an optional signature feature. By default, this feature is enabled and appends the text "Generated by GPT-PR" at the end of each pull request. This transparency fosters better collaboration and awareness among team members about the tools being utilized in the development process.
231
+
232
+ If you prefer to disable this feature, simply run the following command:
233
+
234
+ ```bash
235
+ gpt-pr-config set add_tool_signature false
236
+ ```
237
+
238
+ ### Reset config
239
+
240
+ To reset any config to default value, just run:
241
+
242
+ ```bash
243
+ gpt-pr-config reset config_name
244
+ ```
245
+
246
+ Example:
247
+
248
+ ```bash
249
+ gpt-pr-config reset openai_model
250
+ ```
251
+
252
+ ## Usage
253
+
254
+ ### Generating Github Pull Requests
255
+
256
+ To create a Pull request from your current branch commits to merge with `main` branch, just run:
257
+
258
+ ```
259
+ gpt-pr
260
+ ```
261
+
262
+ If you would like to compare with other base branch that is not `main`, just use `-b` param:
263
+
264
+ ```
265
+ gpt-pr -b my-other-branch
266
+ ```
267
+
268
+ ### Usage help
269
+
270
+ To show help commands:
271
+
272
+ ```
273
+ gpt-pr -h
274
+ ```
275
+
276
+ Output:
277
+ ![image](https://github.com/alissonperez/gpt-pr/assets/756802/cc6c0ca4-5759-44ce-ad35-e4e7305b3875)
278
+
279
+ ## Roadmap
280
+
281
+ - [x] Improve execution method, possibly through a shell script or at least an alias in bash rc files.
282
+ - Change to use with pip installation and console_scripts entry point.
283
+ - [x] Fetch GitHub PR templates from the current project.
284
+ - [ ] Add configuration to set which LLM and model should be used (OpenAI GPT, Mistral, etc...)
285
+ - [ ] Add unit tests.
286
+
@@ -79,8 +79,8 @@ More details about it at our [CONTRIBUTING](./CONTRIBUTING.md) guide.
79
79
 
80
80
  Before getting started, make sure you have the following installed:
81
81
 
82
- - Python 3.7 or higher
83
- - [Pipenv](https://pipenv.pypa.io/en/latest/)
82
+ - Python 3.9 or higher
83
+ - [Poetry](https://python-poetry.org/)
84
84
 
85
85
  ## Installation
86
86
 
@@ -114,15 +114,13 @@ git clone https://github.com/alissonperez/gpt-pr.git
114
114
 
115
115
  ```bash
116
116
  cd gpt-pr
117
- pipenv install
117
+ poetry install
118
118
  ```
119
119
 
120
120
  After setting up API keys ([Configuration](#configuration)), you can use GPT-PR within any git project directory. Suppose you've cloned **this project** to `~/workplace/gpt-pr`, here's how you can use it:
121
121
 
122
122
  ```bash
123
- PYTHONPATH=~/workplace/gpt-pr/gpt-pr \
124
- PIPENV_PIPFILE=~/workplace/gpt-pr/Pipfile \
125
- pipenv run python ~/workplace/gpt-pr/gptpr/main.py --help
123
+ poetry run python gpt_pr/main.py --help
126
124
  ```
127
125
 
128
126
  ## Configuration
@@ -0,0 +1,3 @@
1
+ from importlib.metadata import version
2
+
3
+ __version__ = version(__name__)
@@ -2,35 +2,35 @@ import requests
2
2
  import os
3
3
  import json
4
4
  import tempfile
5
- from gptpr.version import __version__
5
+ from gpt_pr import __version__
6
6
  from datetime import datetime, timedelta
7
7
 
8
- from gptpr import consolecolor as cc
8
+ from gpt_pr import consolecolor as cc
9
9
 
10
10
 
11
- PACKAGE_NAME = 'gpt-pr'
12
- CACHE_FILE = os.path.join(os.path.expanduser("~"), '.gpt_pr_update_cache.json')
11
+ PACKAGE_NAME = "gpt-pr"
12
+ CACHE_FILE = os.path.join(os.path.expanduser("~"), ".gpt_pr_update_cache.json")
13
13
  CACHE_DURATION = timedelta(days=1)
14
14
 
15
15
 
16
16
  def cache_daily_version(func):
17
17
  def wrapper(*args, **kwargs):
18
18
  cache = load_cache()
19
- last_checked = cache.get('last_checked')
19
+ last_checked = cache.get("last_checked")
20
20
 
21
21
  if last_checked:
22
22
  last_checked = datetime.fromisoformat(last_checked)
23
23
 
24
24
  if datetime.now() - last_checked < CACHE_DURATION:
25
25
  # Use cached version info
26
- latest_version = cache.get('latest_version')
26
+ latest_version = cache.get("latest_version")
27
27
  if latest_version:
28
28
  return latest_version
29
29
 
30
30
  latest_version = func(*args, **kwargs)
31
31
  cache = {
32
- 'last_checked': datetime.now().isoformat(),
33
- 'latest_version': latest_version
32
+ "last_checked": datetime.now().isoformat(),
33
+ "latest_version": latest_version,
34
34
  }
35
35
  save_cache(cache)
36
36
 
@@ -41,18 +41,18 @@ def cache_daily_version(func):
41
41
 
42
42
  def get_cache_file_path():
43
43
  temp_dir = tempfile.gettempdir()
44
- return os.path.join(temp_dir, f'{PACKAGE_NAME}_update_cache.json')
44
+ return os.path.join(temp_dir, f"{PACKAGE_NAME}_update_cache.json")
45
45
 
46
46
 
47
47
  @cache_daily_version
48
48
  def get_latest_version():
49
- url = f'https://pypi.org/pypi/{PACKAGE_NAME}/json'
49
+ url = f"https://pypi.org/pypi/{PACKAGE_NAME}/json"
50
50
 
51
51
  try:
52
52
  response = requests.get(url)
53
53
  response.raise_for_status()
54
54
  data = response.json()
55
- return data['info']['version']
55
+ return data["info"]["version"]
56
56
  except requests.exceptions.RequestException as e:
57
57
  print(f"Error fetching latest version info: {e}")
58
58
  return None
@@ -61,7 +61,7 @@ def get_latest_version():
61
61
  def load_cache():
62
62
  cache_file = get_cache_file_path()
63
63
  if os.path.exists(cache_file):
64
- with open(cache_file, 'r') as file:
64
+ with open(cache_file, "r") as file:
65
65
  return json.load(file)
66
66
 
67
67
  return {}
@@ -69,7 +69,7 @@ def load_cache():
69
69
 
70
70
  def save_cache(data):
71
71
  cache_file = get_cache_file_path()
72
- with open(cache_file, 'w') as file:
72
+ with open(cache_file, "w") as file:
73
73
  file.write(json.dumps(data))
74
74
 
75
75
 
@@ -77,12 +77,15 @@ def check_for_updates():
77
77
  latest_version = get_latest_version()
78
78
 
79
79
  if latest_version and latest_version != __version__:
80
- print('')
81
- print(cc.yellow(
82
- f'A new version of {PACKAGE_NAME} is available ({latest_version}). '
83
- f'You are using version {__version__}. Please update by running'),
84
- cc.green(f'pip install --upgrade {PACKAGE_NAME}.'))
85
- print('')
80
+ print("")
81
+ print(
82
+ cc.yellow(
83
+ f"A new version of {PACKAGE_NAME} is available ({latest_version}). "
84
+ f"You are using version {__version__}. Please update by running"
85
+ ),
86
+ cc.green(f"pip install --upgrade {PACKAGE_NAME}."),
87
+ )
88
+ print("")
86
89
 
87
90
 
88
91
  if __name__ == "__main__":
@@ -0,0 +1,44 @@
1
+ import os
2
+ from github import Github
3
+ from InquirerPy import inquirer
4
+ from gpt_pr.config import config, config_command_example, CONFIG_README_SECTION
5
+
6
+
7
+ def _get_gh_token():
8
+ gh_token = config.get_user_config("GH_TOKEN")
9
+ if not gh_token:
10
+ gh_token = os.environ.get("GH_TOKEN")
11
+
12
+ if not gh_token:
13
+ print(
14
+ 'Please set "gh_token" config. Just run:',
15
+ config_command_example("gh_token", "[my gh token]"),
16
+ "more about at",
17
+ CONFIG_README_SECTION,
18
+ )
19
+ raise SystemExit(1)
20
+
21
+ return gh_token
22
+
23
+
24
+ def create_pr(pr_data, yield_confirmation, gh=None):
25
+ if not gh:
26
+ gh = Github(_get_gh_token())
27
+
28
+ repo = gh.get_repo(f"{pr_data.branch_info.owner}/{pr_data.branch_info.repo}")
29
+
30
+ pr_confirmation = (
31
+ yield_confirmation
32
+ or inquirer.confirm(message="Create GitHub PR?", default=True).execute()
33
+ )
34
+
35
+ if pr_confirmation:
36
+ pr = repo.create_pull(
37
+ title=pr_data.title,
38
+ body=pr_data.create_body(),
39
+ head=pr_data.branch_info.branch,
40
+ base=pr_data.branch_info.base_branch,
41
+ )
42
+ print("Pull request created successfully: ", pr.html_url)
43
+ else:
44
+ print("cancelling...")
@@ -0,0 +1,8 @@
1
+ import tiktoken
2
+
3
+
4
+ def num_tokens_from_string(string: str, encoding_name: str) -> int:
5
+ """Returns the number of tokens in a text string."""
6
+ encoding = tiktoken.get_encoding(encoding_name)
7
+ num_tokens = len(encoding.encode(string))
8
+ return num_tokens
@@ -0,0 +1,117 @@
1
+ import fire
2
+ from InquirerPy import inquirer
3
+
4
+ from gpt_pr.gitutil import get_branch_info
5
+ from gpt_pr.gh import create_pr
6
+ from gpt_pr.prdata import get_pr_data
7
+ from gpt_pr import __version__
8
+ from gpt_pr.config import config, config_command_example, CONFIG_README_SECTION
9
+ from gpt_pr import consolecolor as cc
10
+ from gpt_pr.checkversion import check_for_updates
11
+
12
+
13
+ def run(base_branch="main", origin="origin", yield_confirmation=False, version=False):
14
+ """
15
+ Create Pull Requests from current branch with base branch (default 'main' branch)
16
+ """
17
+
18
+ if version:
19
+ print("Current version:", __version__)
20
+ return
21
+
22
+ branch_info = get_branch_info(base_branch, origin, yield_confirmation)
23
+
24
+ if not branch_info:
25
+ return
26
+
27
+ pr_data = None
28
+ generate_pr_data = True
29
+ while generate_pr_data:
30
+ pr_data = get_pr_data(branch_info)
31
+ print("")
32
+ print("#########################################")
33
+ print(pr_data.to_display())
34
+ print("#########################################")
35
+ print("")
36
+
37
+ if yield_confirmation:
38
+ break
39
+
40
+ generate_pr_data = not inquirer.confirm(
41
+ message="Create PR with this? If 'no', let's try again...", default=True
42
+ ).execute()
43
+
44
+ if generate_pr_data:
45
+ print("Generating another PR data...")
46
+
47
+ create_pr(pr_data, yield_confirmation)
48
+
49
+
50
+ def set_config(name, value):
51
+ name = name.upper()
52
+ config.set_user_config(name, value)
53
+ config.persist()
54
+
55
+ print("Config value", cc.bold(name), "set to", cc.yellow(value))
56
+
57
+
58
+ def get_config(name):
59
+ upper_name = name.upper()
60
+ print(
61
+ "Config value",
62
+ cc.bold(name),
63
+ "=",
64
+ cc.yellow(config.get_user_config(upper_name)),
65
+ )
66
+
67
+
68
+ def reset_config(name):
69
+ upper_name = name.upper()
70
+ config.reset_user_config(upper_name)
71
+ print(
72
+ "Config value",
73
+ cc.bold(name),
74
+ "=",
75
+ cc.yellow(config.get_user_config(upper_name)),
76
+ )
77
+
78
+
79
+ def print_config():
80
+ print("Config values at", cc.yellow(config.get_filepath()))
81
+ print("")
82
+ print(
83
+ "To set values, just run:",
84
+ cc.yellow(config_command_example("[config name]", "[value]")),
85
+ )
86
+ print("More about at", cc.yellow(CONFIG_README_SECTION))
87
+ print("")
88
+ current_section = None
89
+ for section, option, value in config.all_values():
90
+ if current_section != section:
91
+ print("")
92
+ current_section = section
93
+
94
+ print(f"[{cc.bold(section)}]", option, "=", cc.yellow(value))
95
+
96
+
97
+ def main():
98
+ check_for_updates()
99
+
100
+ fire.Fire(run)
101
+
102
+
103
+ def run_config():
104
+ check_for_updates()
105
+
106
+ fire.Fire(
107
+ {
108
+ "set": set_config,
109
+ "get": get_config,
110
+ "print": print_config,
111
+ "reset": reset_config,
112
+ }
113
+ )
114
+
115
+
116
+ if __name__ == "__main__":
117
+ main()