gpt-pr 0.2.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gpt-pr might be problematic. Click here for more details.
- {gpt_pr-0.2.1.dist-info → gpt_pr-0.4.0.dist-info}/METADATA +6 -6
- gpt_pr-0.4.0.dist-info/RECORD +17 -0
- {gpt_pr-0.2.1.dist-info → gpt_pr-0.4.0.dist-info}/entry_points.txt +1 -0
- gptpr/checkversion.py +89 -0
- gptpr/config.py +97 -0
- gptpr/gh.py +15 -5
- gptpr/main.py +50 -0
- gptpr/prdata.py +19 -7
- gptpr/test_checkversion.py +126 -0
- gptpr/test_config.py +99 -0
- gptpr/version.py +1 -1
- gpt_pr-0.2.1.dist-info/RECORD +0 -13
- {gpt_pr-0.2.1.dist-info → gpt_pr-0.4.0.dist-info}/WHEEL +0 -0
- {gpt_pr-0.2.1.dist-info → gpt_pr-0.4.0.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: gpt-pr
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: Automate your GitHub workflow with GPT-PR: an OpenAI powered library for streamlined PR generation.
|
|
5
5
|
Home-page: http://github.com/alissonperez/gpt-pr
|
|
6
6
|
Author: Alisson R. Perez
|
|
@@ -8,20 +8,19 @@ Author-email: alissonperez@outlook.com
|
|
|
8
8
|
License: MIT
|
|
9
9
|
Requires-Python: >=3.7
|
|
10
10
|
Requires-Dist: cffi ==1.15.1
|
|
11
|
-
Requires-Dist: cryptography ==42.0.
|
|
11
|
+
Requires-Dist: cryptography ==42.0.7
|
|
12
12
|
Requires-Dist: fire ==0.6.0
|
|
13
13
|
Requires-Dist: pycparser ==2.21
|
|
14
14
|
Requires-Dist: wcwidth ==0.2.13
|
|
15
15
|
Requires-Dist: charset-normalizer ==3.3.2 ; python_full_version >= "3.7.0"
|
|
16
16
|
Requires-Dist: prompt-toolkit ==3.0.43 ; python_full_version >= "3.7.0"
|
|
17
17
|
Requires-Dist: openai ==1.14.0 ; python_full_version >= "3.7.1"
|
|
18
|
-
Requires-Dist: exceptiongroup ==1.2.
|
|
18
|
+
Requires-Dist: exceptiongroup ==1.2.1 ; python_version < "3.11"
|
|
19
19
|
Requires-Dist: cached-property ==1.5.2 ; python_version < "3.8"
|
|
20
|
-
Requires-Dist: typing-extensions ==4.7.1 ; python_version < "3.8"
|
|
21
20
|
Requires-Dist: importlib-metadata ==6.7.0 ; python_version == "3.7"
|
|
22
21
|
Requires-Dist: six ==1.16.0 ; python_version >= "2.7" and python_version not in "3.0, 3.1, 3.2"
|
|
23
22
|
Requires-Dist: deprecated ==1.2.14 ; python_version >= "2.7" and python_version not in "3.0, 3.1, 3.2, 3.3"
|
|
24
|
-
Requires-Dist: idna ==3.
|
|
23
|
+
Requires-Dist: idna ==3.7 ; python_version >= "3.5"
|
|
25
24
|
Requires-Dist: certifi ==2024.2.2 ; python_version >= "3.6"
|
|
26
25
|
Requires-Dist: distro ==1.9.0 ; python_version >= "3.6"
|
|
27
26
|
Requires-Dist: pynacl ==1.5.0 ; python_version >= "3.6"
|
|
@@ -41,7 +40,8 @@ Requires-Dist: requests ==2.31.0 ; python_version >= "3.7"
|
|
|
41
40
|
Requires-Dist: smmap ==5.0.1 ; python_version >= "3.7"
|
|
42
41
|
Requires-Dist: sniffio ==1.3.1 ; python_version >= "3.7"
|
|
43
42
|
Requires-Dist: termcolor ==2.3.0 ; python_version >= "3.7"
|
|
44
|
-
Requires-Dist: tqdm ==4.66.
|
|
43
|
+
Requires-Dist: tqdm ==4.66.4 ; python_version >= "3.7"
|
|
44
|
+
Requires-Dist: typing-extensions ==4.7.1 ; python_version >= "3.7"
|
|
45
45
|
Requires-Dist: urllib3 ==2.0.7 ; python_version >= "3.7"
|
|
46
46
|
Requires-Dist: zipp ==3.15.0 ; python_version >= "3.7"
|
|
47
47
|
Requires-Dist: inquirerpy ==0.3.4 ; python_version >= "3.7" and python_version < "4.0"
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
gptpr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
gptpr/checkversion.py,sha256=9RfB9emkRwk6QJ_KfSZ7zYQQeOcsze-0783qILxoJ_U,2350
|
|
3
|
+
gptpr/config.py,sha256=YVQgTuJYVwqAabqK-E3xSzlc4QUvZb0q4dUW-_pY2cg,2609
|
|
4
|
+
gptpr/consolecolor.py,sha256=_JmBMNjIflWMlgP2VkCWu6uQLR9oHBy52uV3TRJJgF4,800
|
|
5
|
+
gptpr/gh.py,sha256=uSWY_TzbrAM00neOBkyfV5vxDO4FzMtIrs-Zczp-Tck,1127
|
|
6
|
+
gptpr/gitutil.py,sha256=NBD3iRnbFEPRU47w7c5TowwtZieDYkU4zybvv0PoOU0,5783
|
|
7
|
+
gptpr/main.py,sha256=gen_8YXYAUJU0BpzULZ28j9Br6Y9PTqTr9JEm4nwsJI,2694
|
|
8
|
+
gptpr/prdata.py,sha256=ejx4zLRViJ83OmqvlxGWnw7alo8RPL_YdlwEJFhMQ8g,6183
|
|
9
|
+
gptpr/test_checkversion.py,sha256=WtJ3v4MMkFG0Kob0R1wi_nwVhcQFd4mXtKCKZHajEhM,4266
|
|
10
|
+
gptpr/test_config.py,sha256=_vP-3RJf8WXGGQESr5bCUbmxf8owc1uVJXMSBF_MtH0,2712
|
|
11
|
+
gptpr/test_prdata.py,sha256=rSJ-yqOdw-iYdBWyqnA2SXbdrhT8KgIkRTTf9SY1S1g,474
|
|
12
|
+
gptpr/version.py,sha256=42STGor_9nKYXumfeV5tiyD_M8VdcddX7CEexmibPBk,22
|
|
13
|
+
gpt_pr-0.4.0.dist-info/METADATA,sha256=BzXUg-M78hvaQ-Ih7dhrYi9VabHZ-xG6hlU833INufg,2638
|
|
14
|
+
gpt_pr-0.4.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
15
|
+
gpt_pr-0.4.0.dist-info/entry_points.txt,sha256=WhcbcQXqo5-IGliYWiYMhop4-Wm7bcH2ljFKLWrmO7c,81
|
|
16
|
+
gpt_pr-0.4.0.dist-info/top_level.txt,sha256=DZcbzlsjh4BD8njGcvhOeCZ83U_oYWgCn0w8qx5--04,6
|
|
17
|
+
gpt_pr-0.4.0.dist-info/RECORD,,
|
gptpr/checkversion.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import os
|
|
3
|
+
import json
|
|
4
|
+
import tempfile
|
|
5
|
+
from gptpr.version import __version__
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
|
|
8
|
+
from gptpr import consolecolor as cc
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
PACKAGE_NAME = 'gpt-pr'
|
|
12
|
+
CACHE_FILE = os.path.join(os.path.expanduser("~"), '.gpt_pr_update_cache.json')
|
|
13
|
+
CACHE_DURATION = timedelta(days=1)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def cache_daily_version(func):
|
|
17
|
+
def wrapper(*args, **kwargs):
|
|
18
|
+
cache = load_cache()
|
|
19
|
+
last_checked = cache.get('last_checked')
|
|
20
|
+
|
|
21
|
+
if last_checked:
|
|
22
|
+
last_checked = datetime.fromisoformat(last_checked)
|
|
23
|
+
|
|
24
|
+
if datetime.now() - last_checked < CACHE_DURATION:
|
|
25
|
+
# Use cached version info
|
|
26
|
+
latest_version = cache.get('latest_version')
|
|
27
|
+
return latest_version
|
|
28
|
+
|
|
29
|
+
latest_version = func(*args, **kwargs)
|
|
30
|
+
cache = {
|
|
31
|
+
'last_checked': datetime.now().isoformat(),
|
|
32
|
+
'latest_version': latest_version
|
|
33
|
+
}
|
|
34
|
+
save_cache(cache)
|
|
35
|
+
|
|
36
|
+
return latest_version
|
|
37
|
+
|
|
38
|
+
return wrapper
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_cache_file_path():
|
|
42
|
+
temp_dir = tempfile.gettempdir()
|
|
43
|
+
return os.path.join(temp_dir, f'{PACKAGE_NAME}_update_cache.json')
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@cache_daily_version
|
|
47
|
+
def get_latest_version():
|
|
48
|
+
url = f'https://pypi.org/pypi/{PACKAGE_NAME}/json'
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
response = requests.get(url)
|
|
52
|
+
response.raise_for_status()
|
|
53
|
+
data = response.json()
|
|
54
|
+
return data['info']['version']
|
|
55
|
+
except requests.exceptions.RequestException as e:
|
|
56
|
+
print(f"Error fetching latest version info: {e}")
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def load_cache():
|
|
61
|
+
cache_file = get_cache_file_path()
|
|
62
|
+
if os.path.exists(cache_file):
|
|
63
|
+
with open(cache_file, 'r') as file:
|
|
64
|
+
return json.load(file)
|
|
65
|
+
|
|
66
|
+
return {}
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def save_cache(data):
|
|
70
|
+
cache_file = get_cache_file_path()
|
|
71
|
+
with open(cache_file, 'w') as file:
|
|
72
|
+
file.write(json.dumps(data))
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def check_for_updates():
|
|
76
|
+
latest_version = get_latest_version()
|
|
77
|
+
|
|
78
|
+
if latest_version and latest_version != __version__:
|
|
79
|
+
print('')
|
|
80
|
+
print(cc.yellow(
|
|
81
|
+
f'A new version of {PACKAGE_NAME} is available ({latest_version}). '
|
|
82
|
+
f'You are using version {__version__}. Please update by running'),
|
|
83
|
+
cc.green(f'pip install --upgrade {PACKAGE_NAME}.'))
|
|
84
|
+
print('')
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
if __name__ == "__main__":
|
|
88
|
+
check_for_updates()
|
|
89
|
+
# Your CLI code here
|
gptpr/config.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
import configparser
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def config_command_example(name, value_sample):
|
|
7
|
+
return f'gpt-pr-config set {name} {value_sample}'
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
CONFIG_README_SECTION = 'https://github.com/alissonperez/gpt-pr?tab=readme-ov-file#authentication--api-keys'
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Config:
|
|
14
|
+
|
|
15
|
+
config_filename = '.gpt-pr.ini'
|
|
16
|
+
|
|
17
|
+
_default_config = {
|
|
18
|
+
# Github
|
|
19
|
+
'GH_TOKEN': '',
|
|
20
|
+
|
|
21
|
+
# Open AI info
|
|
22
|
+
'OPENAI_MODEL': 'gpt-4o',
|
|
23
|
+
'OPENAI_API_KEY': '',
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
def __init__(self, config_dir=None):
|
|
27
|
+
self.default_config = deepcopy(self._default_config)
|
|
28
|
+
self._config_dir = config_dir or os.path.expanduser('~')
|
|
29
|
+
self._config = configparser.ConfigParser()
|
|
30
|
+
self._initialized = False
|
|
31
|
+
|
|
32
|
+
def load(self):
|
|
33
|
+
if self._initialized:
|
|
34
|
+
return
|
|
35
|
+
|
|
36
|
+
config_file_path = self.get_filepath()
|
|
37
|
+
|
|
38
|
+
if os.path.exists(config_file_path):
|
|
39
|
+
self._config.read(config_file_path)
|
|
40
|
+
self._ensure_default_values()
|
|
41
|
+
else:
|
|
42
|
+
self._config['user'] = {}
|
|
43
|
+
self._config['DEFAULT'] = deepcopy(self.default_config)
|
|
44
|
+
self.persist()
|
|
45
|
+
|
|
46
|
+
self._initialized = True
|
|
47
|
+
|
|
48
|
+
def _ensure_default_values(self):
|
|
49
|
+
added = False
|
|
50
|
+
for key, value in self.default_config.items():
|
|
51
|
+
if key not in self._config['DEFAULT']:
|
|
52
|
+
self._config['DEFAULT'][key] = value
|
|
53
|
+
added = True
|
|
54
|
+
|
|
55
|
+
if added:
|
|
56
|
+
self.persist()
|
|
57
|
+
|
|
58
|
+
def persist(self):
|
|
59
|
+
config_file_path = self.get_filepath()
|
|
60
|
+
|
|
61
|
+
with open(config_file_path, 'w') as configfile:
|
|
62
|
+
self._config.write(configfile)
|
|
63
|
+
|
|
64
|
+
def get_filepath(self):
|
|
65
|
+
return os.path.join(self._config_dir, self.config_filename)
|
|
66
|
+
|
|
67
|
+
def set_user_config(self, name, value):
|
|
68
|
+
self.load()
|
|
69
|
+
self._config['user'][name] = value
|
|
70
|
+
|
|
71
|
+
def reset_user_config(self, name):
|
|
72
|
+
self.load()
|
|
73
|
+
self._config['user'][name] = self.default_config[name]
|
|
74
|
+
self.persist()
|
|
75
|
+
|
|
76
|
+
def get_user_config(self, name):
|
|
77
|
+
self.load()
|
|
78
|
+
return self._config['user'][name]
|
|
79
|
+
|
|
80
|
+
def all_values(self):
|
|
81
|
+
self.load()
|
|
82
|
+
|
|
83
|
+
# iterate over all sections and values and return them in a list
|
|
84
|
+
result = []
|
|
85
|
+
|
|
86
|
+
# add default section
|
|
87
|
+
for option in self._config['DEFAULT']:
|
|
88
|
+
result.append(('DEFAULT', option, self._config['DEFAULT'][option]))
|
|
89
|
+
|
|
90
|
+
for section in self._config.sections():
|
|
91
|
+
for option in self._config[section]:
|
|
92
|
+
result.append((section, option, self._config[section][option]))
|
|
93
|
+
|
|
94
|
+
return result
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
config = Config()
|
gptpr/gh.py
CHANGED
|
@@ -1,14 +1,24 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from github import Github
|
|
3
3
|
from InquirerPy import inquirer
|
|
4
|
+
from gptpr.config import config, config_command_example, CONFIG_README_SECTION
|
|
4
5
|
|
|
5
|
-
GH_TOKEN = os.environ.get('GH_TOKEN')
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
7
|
+
def _get_gh_token():
|
|
8
|
+
gh_token = config.get_user_config('GH_TOKEN')
|
|
9
|
+
if not gh_token:
|
|
10
|
+
gh_token = os.environ.get('GH_TOKEN')
|
|
10
11
|
|
|
11
|
-
|
|
12
|
+
if not gh_token:
|
|
13
|
+
print('Please set "gh_token" config. Just run:',
|
|
14
|
+
config_command_example('gh_token', '[my gh token]'),
|
|
15
|
+
'more about at', CONFIG_README_SECTION)
|
|
16
|
+
exit(1)
|
|
17
|
+
|
|
18
|
+
return gh_token
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
gh = Github(_get_gh_token())
|
|
12
22
|
|
|
13
23
|
|
|
14
24
|
def create_pr(pr_data, yield_confirmation):
|
gptpr/main.py
CHANGED
|
@@ -5,6 +5,9 @@ from gptpr.gitutil import get_branch_info
|
|
|
5
5
|
from gptpr.gh import create_pr
|
|
6
6
|
from gptpr.prdata import get_pr_data
|
|
7
7
|
from gptpr.version import __version__
|
|
8
|
+
from gptpr.config import config, config_command_example, CONFIG_README_SECTION
|
|
9
|
+
from gptpr import consolecolor as cc
|
|
10
|
+
from gptpr.checkversion import check_for_updates
|
|
8
11
|
|
|
9
12
|
|
|
10
13
|
def run(base_branch='main', yield_confirmation=False, version=False):
|
|
@@ -44,9 +47,56 @@ def run(base_branch='main', yield_confirmation=False, version=False):
|
|
|
44
47
|
create_pr(pr_data, yield_confirmation)
|
|
45
48
|
|
|
46
49
|
|
|
50
|
+
def set_config(name, value):
|
|
51
|
+
name = name.upper()
|
|
52
|
+
config.set_user_config(name, value)
|
|
53
|
+
config.persist()
|
|
54
|
+
|
|
55
|
+
print('Config value', cc.bold(name), 'set to', cc.yellow(value))
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_config(name):
|
|
59
|
+
upper_name = name.upper()
|
|
60
|
+
print('Config value', cc.bold(name), '=', cc.yellow(config.get_user_config(upper_name)))
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def reset_config(name):
|
|
64
|
+
upper_name = name.upper()
|
|
65
|
+
config.reset_user_config(upper_name)
|
|
66
|
+
print('Config value', cc.bold(name), '=', cc.yellow(config.get_user_config(upper_name)))
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def print_config():
|
|
70
|
+
print('Config values at', cc.yellow(config.get_filepath()))
|
|
71
|
+
print('')
|
|
72
|
+
print('To set values, just run:', cc.yellow(config_command_example('[config name]', '[value]')))
|
|
73
|
+
print('More about at', cc.yellow(CONFIG_README_SECTION))
|
|
74
|
+
print('')
|
|
75
|
+
current_section = None
|
|
76
|
+
for section, option, value in config.all_values():
|
|
77
|
+
if current_section != section:
|
|
78
|
+
print('')
|
|
79
|
+
current_section = section
|
|
80
|
+
|
|
81
|
+
print(f'[{cc.bold(section)}]', option, '=', cc.yellow(value))
|
|
82
|
+
|
|
83
|
+
|
|
47
84
|
def main():
|
|
85
|
+
check_for_updates()
|
|
86
|
+
|
|
48
87
|
fire.Fire(run)
|
|
49
88
|
|
|
50
89
|
|
|
90
|
+
def run_config():
|
|
91
|
+
check_for_updates()
|
|
92
|
+
|
|
93
|
+
fire.Fire({
|
|
94
|
+
'set': set_config,
|
|
95
|
+
'get': get_config,
|
|
96
|
+
'print': print_config,
|
|
97
|
+
'reset': reset_config
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
|
|
51
101
|
if __name__ == '__main__':
|
|
52
102
|
main()
|
gptpr/prdata.py
CHANGED
|
@@ -4,6 +4,7 @@ import os
|
|
|
4
4
|
from openai import OpenAI
|
|
5
5
|
|
|
6
6
|
from gptpr.gitutil import BranchInfo
|
|
7
|
+
from gptpr.config import config
|
|
7
8
|
import gptpr.consolecolor as cc
|
|
8
9
|
|
|
9
10
|
TOKENIZER_RATIO = 4
|
|
@@ -37,6 +38,20 @@ def _get_pr_template():
|
|
|
37
38
|
return pr_template
|
|
38
39
|
|
|
39
40
|
|
|
41
|
+
def _get_open_ai_key():
|
|
42
|
+
api_key = config.get_user_config('OPENAI_API_KEY')
|
|
43
|
+
|
|
44
|
+
if not api_key:
|
|
45
|
+
api_key = os.environ.get('OPENAI_API_KEY')
|
|
46
|
+
|
|
47
|
+
if not api_key:
|
|
48
|
+
print('Please set "openai_api_key" config, just run:',
|
|
49
|
+
cc.yellow('gpt-pr-config set openai_api_key [open ai key]'))
|
|
50
|
+
exit(1)
|
|
51
|
+
|
|
52
|
+
return api_key
|
|
53
|
+
|
|
54
|
+
|
|
40
55
|
@dataclass
|
|
41
56
|
class PrData():
|
|
42
57
|
branch_info: BranchInfo
|
|
@@ -108,17 +123,14 @@ def get_pr_data(branch_info):
|
|
|
108
123
|
else:
|
|
109
124
|
messages.append({'role': 'user', 'content': 'Diff changes:\n' + branch_info.diff})
|
|
110
125
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
if not openai_api_key:
|
|
114
|
-
print("Please set OPENAI_API_KEY environment variable.")
|
|
115
|
-
exit(1)
|
|
126
|
+
client = OpenAI(api_key=_get_open_ai_key())
|
|
116
127
|
|
|
117
|
-
|
|
128
|
+
openai_model = config.get_user_config('OPENAI_MODEL')
|
|
129
|
+
print('Using OpenAI model:', cc.yellow(openai_model))
|
|
118
130
|
|
|
119
131
|
chat_completion = client.chat.completions.create(
|
|
120
132
|
messages=messages,
|
|
121
|
-
model=
|
|
133
|
+
model=openai_model,
|
|
122
134
|
functions=functions,
|
|
123
135
|
function_call={'name': 'create_pr'},
|
|
124
136
|
temperature=0,
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
import requests
|
|
3
|
+
import json
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from unittest.mock import patch, mock_open
|
|
6
|
+
|
|
7
|
+
from gptpr.version import __version__
|
|
8
|
+
from gptpr.checkversion import (get_latest_version, load_cache,
|
|
9
|
+
save_cache, check_for_updates,
|
|
10
|
+
CACHE_DURATION)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@pytest.fixture
|
|
14
|
+
def mock_requests_get(mocker):
|
|
15
|
+
return mocker.patch('requests.get')
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@pytest.fixture
|
|
19
|
+
def mock_os_path_exists(mocker):
|
|
20
|
+
return mocker.patch('os.path.exists')
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@pytest.fixture
|
|
24
|
+
def mock_open_file(mocker):
|
|
25
|
+
return mocker.patch('builtins.open', mock_open())
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@pytest.fixture
|
|
29
|
+
def mock_datetime(mocker):
|
|
30
|
+
return mocker.patch('gptpr.checkversion.datetime')
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_get_latest_version(mock_requests_get, mock_os_path_exists):
|
|
34
|
+
mock_os_path_exists.return_value = False
|
|
35
|
+
mock_response = mock_requests_get.return_value
|
|
36
|
+
mock_response.raise_for_status.return_value = None
|
|
37
|
+
mock_response.json.return_value = {'info': {'version': '2.0.0'}}
|
|
38
|
+
|
|
39
|
+
assert get_latest_version() == '2.0.0'
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def test_get_latest_version_error(mock_requests_get, mock_os_path_exists):
|
|
43
|
+
mock_os_path_exists.return_value = False
|
|
44
|
+
mock_requests_get.side_effect = requests.exceptions.RequestException
|
|
45
|
+
|
|
46
|
+
assert get_latest_version() is None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def test_load_cache(mock_os_path_exists, mock_open_file):
|
|
50
|
+
mock_os_path_exists.return_value = True
|
|
51
|
+
mock_open_file.return_value.read.return_value = json.dumps({
|
|
52
|
+
'last_checked': datetime.now().isoformat(),
|
|
53
|
+
'latest_version': '2.0.0'
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
cache = load_cache()
|
|
57
|
+
assert cache['latest_version'] == '2.0.0'
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def test_load_cache_no_file(mock_os_path_exists):
|
|
61
|
+
mock_os_path_exists.return_value = False
|
|
62
|
+
|
|
63
|
+
cache = load_cache()
|
|
64
|
+
assert cache == {}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def test_save_cache(mock_open_file):
|
|
68
|
+
data = {
|
|
69
|
+
'last_checked': datetime.now().isoformat(),
|
|
70
|
+
'latest_version': '2.0.0'
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
save_cache(data)
|
|
74
|
+
mock_open_file.return_value.write.assert_called_once_with(json.dumps(data))
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_check_for_updates_new_version(mocker, mock_datetime, mock_requests_get, mock_open_file):
|
|
78
|
+
# Set up mocks
|
|
79
|
+
last_checked_str = (datetime(2024, 1, 1) - CACHE_DURATION).isoformat()
|
|
80
|
+
mock_datetime.now.return_value = datetime(2024, 1, 2)
|
|
81
|
+
mock_datetime.fromisoformat.return_value = datetime.fromisoformat(last_checked_str)
|
|
82
|
+
mock_open_file.return_value.read.return_value = json.dumps({
|
|
83
|
+
'last_checked': last_checked_str,
|
|
84
|
+
'latest_version': '1.0.0'
|
|
85
|
+
})
|
|
86
|
+
mock_requests_get.return_value.raise_for_status.return_value = None
|
|
87
|
+
mock_requests_get.return_value.json.return_value = {'info': {'version': '2.0.0'}}
|
|
88
|
+
|
|
89
|
+
# Capture the print statements
|
|
90
|
+
with patch('builtins.print') as mocked_print:
|
|
91
|
+
check_for_updates()
|
|
92
|
+
assert mocked_print.call_count == 3
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def test_check_for_updates_no_new_version(mocker, mock_datetime, mock_requests_get, mock_open_file):
|
|
96
|
+
# Set up mocks
|
|
97
|
+
last_checked_str = (datetime(2024, 1, 1) - CACHE_DURATION).isoformat()
|
|
98
|
+
mock_datetime.now.return_value = datetime(2024, 1, 2)
|
|
99
|
+
mock_datetime.fromisoformat.return_value = datetime.fromisoformat(last_checked_str)
|
|
100
|
+
mock_open_file.return_value.read.return_value = json.dumps({
|
|
101
|
+
'last_checked': (datetime(2024, 1, 1) - CACHE_DURATION).isoformat(),
|
|
102
|
+
'latest_version': __version__
|
|
103
|
+
})
|
|
104
|
+
mock_requests_get.return_value.raise_for_status.return_value = None
|
|
105
|
+
mock_requests_get.return_value.json.return_value = {'info': {'version': __version__}}
|
|
106
|
+
|
|
107
|
+
# Capture the print statements
|
|
108
|
+
with patch('builtins.print') as mocked_print:
|
|
109
|
+
check_for_updates()
|
|
110
|
+
assert mocked_print.call_count == 0
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def test_check_for_updates_cache_valid(mock_datetime, mock_open_file):
|
|
114
|
+
# Set up mocks
|
|
115
|
+
last_checked_str = datetime(2024, 1, 2).isoformat()
|
|
116
|
+
mock_datetime.now.return_value = datetime(2024, 1, 2)
|
|
117
|
+
mock_datetime.fromisoformat.return_value = datetime.fromisoformat(last_checked_str)
|
|
118
|
+
mock_open_file.return_value.read.return_value = json.dumps({
|
|
119
|
+
'last_checked': last_checked_str,
|
|
120
|
+
'latest_version': __version__
|
|
121
|
+
})
|
|
122
|
+
|
|
123
|
+
# Capture the print statements
|
|
124
|
+
with patch('builtins.print') as mocked_print:
|
|
125
|
+
check_for_updates()
|
|
126
|
+
assert mocked_print.call_count == 0
|
gptpr/test_config.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import configparser
|
|
3
|
+
|
|
4
|
+
from pytest import fixture
|
|
5
|
+
|
|
6
|
+
from gptpr.config import Config
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@fixture
|
|
10
|
+
def temp_config(tmpdir):
|
|
11
|
+
temp_dir = tmpdir.mkdir('config_dir')
|
|
12
|
+
config = Config(temp_dir)
|
|
13
|
+
return config, temp_dir
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _check_config(config, temp_dir, config_list):
|
|
17
|
+
# Read the configuration file and verify its contents
|
|
18
|
+
config_to_test = configparser.ConfigParser()
|
|
19
|
+
config_to_test.read(os.path.join(str(temp_dir), config.config_filename))
|
|
20
|
+
|
|
21
|
+
for section, key, value in config_list:
|
|
22
|
+
assert config_to_test[section][key] == value
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def test_init_config_file(temp_config):
|
|
26
|
+
config, temp_dir = temp_config
|
|
27
|
+
config.load()
|
|
28
|
+
|
|
29
|
+
# Check if the file exists
|
|
30
|
+
assert os.path.isfile(os.path.join(str(temp_dir), config.config_filename))
|
|
31
|
+
|
|
32
|
+
_check_config(config, temp_dir, [
|
|
33
|
+
('DEFAULT', 'OPENAI_MODEL', 'gpt-4o'),
|
|
34
|
+
('DEFAULT', 'OPENAI_API_KEY', ''),
|
|
35
|
+
])
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_new_default_value_should_be_added(temp_config):
|
|
39
|
+
config, temp_dir = temp_config
|
|
40
|
+
config.load() # data was written to the file
|
|
41
|
+
|
|
42
|
+
new_config = Config(temp_dir)
|
|
43
|
+
|
|
44
|
+
# Add a new default value
|
|
45
|
+
new_config.default_config['NEW_DEFAULT'] = 'new_default_value'
|
|
46
|
+
new_config.load() # Should update config file...
|
|
47
|
+
|
|
48
|
+
_check_config(new_config, temp_dir, [
|
|
49
|
+
('DEFAULT', 'NEW_DEFAULT', 'new_default_value'),
|
|
50
|
+
])
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def test_set_user_config(temp_config):
|
|
54
|
+
config, temp_dir = temp_config
|
|
55
|
+
|
|
56
|
+
config.set_user_config('OPENAI_MODEL', 'gpt-3.5')
|
|
57
|
+
config.persist()
|
|
58
|
+
|
|
59
|
+
# Read the configuration file and verify its contents
|
|
60
|
+
config_to_test = configparser.ConfigParser()
|
|
61
|
+
config_to_test.read(os.path.join(str(temp_dir), config.config_filename))
|
|
62
|
+
|
|
63
|
+
_check_config(config, temp_dir, [
|
|
64
|
+
('user', 'OPENAI_MODEL', 'gpt-3.5'),
|
|
65
|
+
('user', 'OPENAI_API_KEY', ''),
|
|
66
|
+
])
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def test_all_values(temp_config):
|
|
70
|
+
config, temp_dir = temp_config
|
|
71
|
+
|
|
72
|
+
all_values = config.all_values()
|
|
73
|
+
|
|
74
|
+
assert all_values == [
|
|
75
|
+
('DEFAULT', 'gh_token', ''),
|
|
76
|
+
('DEFAULT', 'openai_model', 'gpt-4o'),
|
|
77
|
+
('DEFAULT', 'openai_api_key', ''),
|
|
78
|
+
('user', 'gh_token', ''),
|
|
79
|
+
('user', 'openai_model', 'gpt-4o'),
|
|
80
|
+
('user', 'openai_api_key', ''),
|
|
81
|
+
]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def test_reset_user_config(temp_config):
|
|
85
|
+
config, temp_dir = temp_config
|
|
86
|
+
|
|
87
|
+
config.set_user_config('OPENAI_MODEL', 'gpt-3.5')
|
|
88
|
+
config.persist()
|
|
89
|
+
|
|
90
|
+
config.reset_user_config('OPENAI_MODEL')
|
|
91
|
+
|
|
92
|
+
# Read the configuration file and verify its contents
|
|
93
|
+
config_to_test = configparser.ConfigParser()
|
|
94
|
+
config_to_test.read(os.path.join(str(temp_dir), config.config_filename))
|
|
95
|
+
|
|
96
|
+
_check_config(config, temp_dir, [
|
|
97
|
+
('user', 'OPENAI_MODEL', 'gpt-4o'),
|
|
98
|
+
('user', 'OPENAI_API_KEY', ''),
|
|
99
|
+
])
|
gptpr/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.
|
|
1
|
+
__version__ = "0.4.0"
|
gpt_pr-0.2.1.dist-info/RECORD
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
gptpr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
gptpr/consolecolor.py,sha256=_JmBMNjIflWMlgP2VkCWu6uQLR9oHBy52uV3TRJJgF4,800
|
|
3
|
-
gptpr/gh.py,sha256=aunn_8wi4pGoGkjdqyA2MZVAAxfItpnnmDCp20-i-WA,786
|
|
4
|
-
gptpr/gitutil.py,sha256=NBD3iRnbFEPRU47w7c5TowwtZieDYkU4zybvv0PoOU0,5783
|
|
5
|
-
gptpr/main.py,sha256=rkalqLcc1Nh5WH51w7ayEMIYNoScrRBNVYl3KLuZFdY,1270
|
|
6
|
-
gptpr/prdata.py,sha256=y4VodgCiSaARPXzJHsg32-cqrR_nZqyyGSLij6ee-oo,5846
|
|
7
|
-
gptpr/test_prdata.py,sha256=rSJ-yqOdw-iYdBWyqnA2SXbdrhT8KgIkRTTf9SY1S1g,474
|
|
8
|
-
gptpr/version.py,sha256=HfjVOrpTnmZ-xVFCYSVmX50EXaBQeJteUHG-PD6iQs8,22
|
|
9
|
-
gpt_pr-0.2.1.dist-info/METADATA,sha256=rJ-4clCl3pJh99vscyuuVQsLt4XQzqpgf4K67YYMZJY,2637
|
|
10
|
-
gpt_pr-0.2.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
11
|
-
gpt_pr-0.2.1.dist-info/entry_points.txt,sha256=aXCkyNdoUHfSJXVhRKHj8m09twDfcDmY7xC66u5N3hE,43
|
|
12
|
-
gpt_pr-0.2.1.dist-info/top_level.txt,sha256=DZcbzlsjh4BD8njGcvhOeCZ83U_oYWgCn0w8qx5--04,6
|
|
13
|
-
gpt_pr-0.2.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|