gpt-pr 0.4.1__tar.gz → 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gpt-pr might be problematic. Click here for more details.

Files changed (26) hide show
  1. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/PKG-INFO +1 -1
  2. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/README.md +29 -9
  3. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/PKG-INFO +1 -1
  4. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/requires.txt +3 -3
  5. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/config.py +10 -3
  6. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/gh.py +1 -1
  7. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/prdata.py +60 -34
  8. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/test_config.py +23 -4
  9. gpt-pr-0.6.0/gptpr/version.py +1 -0
  10. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/requirements.txt +3 -3
  11. gpt-pr-0.4.1/gptpr/version.py +0 -1
  12. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/MANIFEST.in +0 -0
  13. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/SOURCES.txt +0 -0
  14. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/dependency_links.txt +0 -0
  15. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/entry_points.txt +0 -0
  16. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/not-zip-safe +0 -0
  17. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gpt_pr.egg-info/top_level.txt +0 -0
  18. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/__init__.py +0 -0
  19. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/checkversion.py +0 -0
  20. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/consolecolor.py +0 -0
  21. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/gitutil.py +0 -0
  22. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/main.py +0 -0
  23. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/test_checkversion.py +0 -0
  24. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/gptpr/test_prdata.py +0 -0
  25. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/setup.cfg +0 -0
  26. {gpt-pr-0.4.1 → gpt-pr-0.6.0}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: gpt-pr
3
- Version: 0.4.1
3
+ Version: 0.6.0
4
4
  Summary: Automate your GitHub workflow with GPT-PR: an OpenAI powered library for streamlined PR generation.
5
5
  Home-page: http://github.com/alissonperez/gpt-pr
6
6
  Author: Alisson R. Perez
@@ -33,14 +33,12 @@ You can install and use GPT-PR in one of two ways. Choose the option that best s
33
33
 
34
34
  ### Option 1: Using `pip install` (Recommended)
35
35
 
36
- 1. Install the package:
36
+ 1. Install OR Update the package:
37
37
 
38
38
  ```bash
39
39
  pip install -U gpt-pr
40
40
  ```
41
41
 
42
- > Note: Use this command to **update** gpt-pr package to the latest version.
43
-
44
42
  2. Setup API keys for GitHub and OpenAI, take a look at [Configuration](#configuration).
45
43
 
46
44
  3. Inside the Git repository you are working on, ensure you have pushed your branch to origin, then run:
@@ -49,7 +47,7 @@ pip install -U gpt-pr
49
47
  gpt-pr --help
50
48
  ```
51
49
 
52
- ### Option 2: Cloning the code
50
+ ### Option 2: Cloning the code (NOT recommended)
53
51
 
54
52
  1. Clone the repository:
55
53
 
@@ -74,6 +72,14 @@ pipenv run python ~/workplace/gpt-pr/gptpr/main.py --help
74
72
 
75
73
  ## Configuration
76
74
 
75
+ ### See all configs available
76
+
77
+ To print all default configs and what is being used, just run:
78
+
79
+ ```bash
80
+ gpt-pr-config print
81
+ ```
82
+
77
83
  ### Setting up GitHub Token (`GH_TOKEN`)
78
84
 
79
85
  GPT-PR tool will look for a `GH_TOKEN` in current shell env var OR in gpt-pr config file (at `~/.gpt-pr.ini`).
@@ -120,22 +126,36 @@ Or just export it as an environment variable in your shell initializer:
120
126
  export OPENAI_API_KEY=your_generated_api_key_here
121
127
  ```
122
128
 
129
+ ### Setting Max Input LLM Tokens
130
+
131
+ You can adjust the maximum number of input tokens allowed when calling the LLM model by modifying the corresponding setting.
132
+
133
+ For example, to change the maximum to 20,000 tokens, use the following command:
134
+
135
+ ```bash
136
+ gpt-pr-config set input_max_tokens 20000
137
+ ```
138
+
123
139
  ### Change OpenAI model
124
140
 
125
141
  To change OpenAI model, just run:
126
142
 
127
143
  ```bash
128
- gpt-pr-config set openai_model gpt-3.5-turbo
144
+ gpt-pr-config set openai_model gpt-4o-mini
129
145
  ```
130
146
 
147
+ > Obs.: `gpt-4o-mini` already is the default model of the project
148
+
131
149
  To see a full list of available models, access [OpenAI Models Documentation](https://platform.openai.com/docs/models)
132
150
 
133
- ### See all configs available
151
+ ### GPT-PR Library Signature in PRs
134
152
 
135
- To print all default configs and what is being used, just run:
153
+ To help other developers recognize and understand the use of the GPT-PR library in generating pull requests, we have included an optional signature feature. By default, this feature is enabled and appends the text "Generated by GPT-PR" at the end of each pull request. This transparency fosters better collaboration and awareness among team members about the tools being utilized in the development process.
154
+
155
+ If you prefer to disable this feature, simply run the following command:
136
156
 
137
157
  ```bash
138
- gpt-pr-config print
158
+ gpt-pr-config set add_tool_signature false
139
159
  ```
140
160
 
141
161
  ### Reset config
@@ -143,7 +163,7 @@ gpt-pr-config print
143
163
  To reset any config to default value, just run:
144
164
 
145
165
  ```bash
146
- gpt-pr-config reset config-name
166
+ gpt-pr-config reset config_name
147
167
  ```
148
168
 
149
169
  Example:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: gpt-pr
3
- Version: 0.4.1
3
+ Version: 0.6.0
4
4
  Summary: Automate your GitHub workflow with GPT-PR: an OpenAI powered library for streamlined PR generation.
5
5
  Home-page: http://github.com/alissonperez/gpt-pr
6
6
  Author: Alisson R. Perez
@@ -1,5 +1,5 @@
1
1
  cffi==1.15.1
2
- cryptography==42.0.7
2
+ cryptography==43.0.0
3
3
  fire==0.6.0
4
4
  pycparser==2.21
5
5
  wcwidth==0.2.13
@@ -12,7 +12,7 @@ prompt-toolkit==3.0.43
12
12
  openai==1.14.0
13
13
 
14
14
  [:python_version < "3.11"]
15
- exceptiongroup==1.2.1
15
+ exceptiongroup==1.2.2
16
16
 
17
17
  [:python_version < "3.8"]
18
18
  cached-property==1.5.2
@@ -30,7 +30,7 @@ deprecated==1.2.14
30
30
  idna==3.7
31
31
 
32
32
  [:python_version >= "3.6"]
33
- certifi==2024.2.2
33
+ certifi==2024.7.4
34
34
  distro==1.9.0
35
35
  pynacl==1.5.0
36
36
  wrapt==1.16.0
@@ -7,7 +7,8 @@ def config_command_example(name, value_sample):
7
7
  return f'gpt-pr-config set {name} {value_sample}'
8
8
 
9
9
 
10
- CONFIG_README_SECTION = 'https://github.com/alissonperez/gpt-pr?tab=readme-ov-file#authentication--api-keys'
10
+ CONFIG_PROJECT_REPO_URL = 'https://github.com/alissonperez/gpt-pr'
11
+ CONFIG_README_SECTION = f'{CONFIG_PROJECT_REPO_URL}?tab=readme-ov-file#configuration'
11
12
 
12
13
 
13
14
  class Config:
@@ -15,11 +16,17 @@ class Config:
15
16
  config_filename = '.gpt-pr.ini'
16
17
 
17
18
  _default_config = {
19
+ # Amenities
20
+ 'ADD_TOOL_SIGNATURE': 'true', # Add GPT-PR signature to PRs
21
+
18
22
  # Github
19
23
  'GH_TOKEN': '',
20
24
 
25
+ # LLM input MAX Tokens
26
+ 'INPUT_MAX_TOKENS': '15000',
27
+
21
28
  # Open AI info
22
- 'OPENAI_MODEL': 'gpt-4o',
29
+ 'OPENAI_MODEL': 'gpt-4o-mini',
23
30
  'OPENAI_API_KEY': '',
24
31
  }
25
32
 
@@ -66,7 +73,7 @@ class Config:
66
73
 
67
74
  def set_user_config(self, name, value):
68
75
  self.load()
69
- self._config['user'][name] = value
76
+ self._config['user'][name] = str(value)
70
77
 
71
78
  def reset_user_config(self, name):
72
79
  self.load()
@@ -30,7 +30,7 @@ def create_pr(pr_data, yield_confirmation):
30
30
  default=True).execute()
31
31
 
32
32
  if pr_confirmation:
33
- pr = repo.create_pull(title=pr_data.title, body=pr_data.body,
33
+ pr = repo.create_pull(title=pr_data.title, body=pr_data.create_body(),
34
34
  head=pr_data.branch_info.branch, base=pr_data.branch_info.base_branch)
35
35
  print("Pull request created successfully: ", pr.html_url)
36
36
  else:
@@ -4,11 +4,10 @@ import os
4
4
  from openai import OpenAI
5
5
 
6
6
  from gptpr.gitutil import BranchInfo
7
- from gptpr.config import config
7
+ from gptpr.config import config, CONFIG_PROJECT_REPO_URL
8
8
  import gptpr.consolecolor as cc
9
9
 
10
10
  TOKENIZER_RATIO = 4
11
- MAX_TOKENS = 6000
12
11
 
13
12
  DEFAULT_PR_TEMPLATE = ('### Ref. [Link]\n\n## What was done?\n[Fill here]\n\n'
14
13
  '## How was it done?\n[Fill here]\n\n'
@@ -52,6 +51,10 @@ def _get_open_ai_key():
52
51
  return api_key
53
52
 
54
53
 
54
+ def _count_tokens(text):
55
+ return len(text.split(' '))
56
+
57
+
55
58
  @dataclass
56
59
  class PrData():
57
60
  branch_info: BranchInfo
@@ -64,9 +67,18 @@ class PrData():
64
67
  f'{cc.bold("Title")}: {cc.yellow(self.title)}',
65
68
  f'{cc.bold("Branch name")}: {cc.yellow(self.branch_info.branch)}',
66
69
  f'{cc.bold("Base branch")}: {cc.yellow(self.branch_info.base_branch)}',
67
- f'{cc.bold("PR Description")}:\n{self.body}',
70
+ f'{cc.bold("PR Description")}:\n{self.create_body()}',
68
71
  ])
69
72
 
73
+ def create_body(self):
74
+ body = self.body
75
+
76
+ if config.get_user_config('ADD_TOOL_SIGNATURE') == 'true':
77
+ pr_signature = f'Generated by [GPT-PR]({CONFIG_PROJECT_REPO_URL})'
78
+ body += '\n\n---\n\n' + pr_signature
79
+
80
+ return body
81
+
70
82
 
71
83
  functions = [
72
84
  {
@@ -91,6 +103,35 @@ functions = [
91
103
 
92
104
 
93
105
  def get_pr_data(branch_info):
106
+ client = OpenAI(api_key=_get_open_ai_key())
107
+
108
+ messages = _get_messages(branch_info)
109
+
110
+ openai_model = config.get_user_config('OPENAI_MODEL')
111
+ print('Using OpenAI model:', cc.yellow(openai_model))
112
+
113
+ chat_completion = client.chat.completions.create(
114
+ messages=messages,
115
+ model=openai_model,
116
+ functions=functions,
117
+ function_call={'name': 'create_pr'},
118
+ temperature=0,
119
+ max_tokens=1000,
120
+ top_p=1,
121
+ frequency_penalty=0,
122
+ presence_penalty=0
123
+ )
124
+
125
+ arguments = _parse_json(chat_completion.choices[0].message.function_call.arguments)
126
+
127
+ return PrData(
128
+ branch_info=branch_info,
129
+ title=arguments['title'],
130
+ body=arguments['description']
131
+ )
132
+
133
+
134
+ def _get_messages(branch_info):
94
135
  system_content = ('You are a development assistant designed to craft Git pull requests '
95
136
  'by incorporating information from main and secondary commits, diff changes, '
96
137
  'and adhering to a provided PR template. Your output includes a complete PR '
@@ -108,45 +149,30 @@ def get_pr_data(branch_info):
108
149
  messages.append({'role': 'user', 'content': 'main commits: ' + '\n'.join(branch_info.highlight_commits)})
109
150
  messages.append({'role': 'user', 'content': 'secondary commits: ' + '\n'.join(branch_info.commits)})
110
151
  else:
111
- messages.append({'role': 'user', 'content': 'git commits: ' + '\n'.join(branch_info.commits)})
152
+ messages.append({'role': 'user', 'content': 'git commits: \n' + '\n'.join(branch_info.commits)})
112
153
 
113
154
  messages.append({'role': 'user', 'content': 'PR template:\n' + _get_pr_template()})
114
155
 
115
- current_total_length = sum([len(m['content']) for m in messages])
156
+ joined_messages = '\n'.join([m['content'] for m in messages])
157
+ current_total_tokens = _count_tokens(joined_messages)
158
+
159
+ input_max_tokens = int(config.get_user_config('INPUT_MAX_TOKENS'))
116
160
 
117
- if current_total_length / TOKENIZER_RATIO > MAX_TOKENS:
118
- raise Exception(f'Current total length {current_total_length} is greater than max tokens {MAX_TOKENS}')
161
+ if current_total_tokens > input_max_tokens:
162
+ exp_message = (f'Length of {current_total_tokens} tokens for basic prompt '
163
+ f'(description and commits) is greater than max tokens {input_max_tokens} '
164
+ '(config \'input_max_tokens\')')
165
+ raise Exception(exp_message)
119
166
 
120
- total_length_with_diff = current_total_length + len(branch_info.diff)
121
- if total_length_with_diff / TOKENIZER_RATIO > MAX_TOKENS:
122
- print('Total content length (with diff) is too big.', cc.red('Skipping diff content...'))
167
+ total_tokens_with_diff = current_total_tokens + _count_tokens(branch_info.diff)
168
+ if total_tokens_with_diff > input_max_tokens:
169
+ print_msg = (f'Length git changes with diff is too big (total is {total_tokens_with_diff}, '
170
+ f'\'input_max_tokens\' config is {input_max_tokens}).')
171
+ print(print_msg, cc.red('Skipping changes diff content...'))
123
172
  else:
124
173
  messages.append({'role': 'user', 'content': 'Diff changes:\n' + branch_info.diff})
125
174
 
126
- client = OpenAI(api_key=_get_open_ai_key())
127
-
128
- openai_model = config.get_user_config('OPENAI_MODEL')
129
- print('Using OpenAI model:', cc.yellow(openai_model))
130
-
131
- chat_completion = client.chat.completions.create(
132
- messages=messages,
133
- model=openai_model,
134
- functions=functions,
135
- function_call={'name': 'create_pr'},
136
- temperature=0,
137
- max_tokens=512,
138
- top_p=1,
139
- frequency_penalty=0,
140
- presence_penalty=0
141
- )
142
-
143
- arguments = _parse_json(chat_completion.choices[0].message.function_call.arguments)
144
-
145
- return PrData(
146
- branch_info=branch_info,
147
- title=arguments['title'],
148
- body=arguments['description']
149
- )
175
+ return messages
150
176
 
151
177
 
152
178
  def _parse_json(content):
@@ -30,7 +30,7 @@ def test_init_config_file(temp_config):
30
30
  assert os.path.isfile(os.path.join(str(temp_dir), config.config_filename))
31
31
 
32
32
  _check_config(config, temp_dir, [
33
- ('DEFAULT', 'OPENAI_MODEL', 'gpt-4o'),
33
+ ('DEFAULT', 'OPENAI_MODEL', 'gpt-4o-mini'),
34
34
  ('DEFAULT', 'OPENAI_API_KEY', ''),
35
35
  ])
36
36
 
@@ -66,17 +66,36 @@ def test_set_user_config(temp_config):
66
66
  ])
67
67
 
68
68
 
69
+ def test_set_user_config_with_int_value(temp_config):
70
+ config, temp_dir = temp_config
71
+
72
+ config.set_user_config('INPUT_MAX_TOKENS', 100)
73
+ config.persist()
74
+
75
+ # Read the configuration file and verify its contents
76
+ config_to_test = configparser.ConfigParser()
77
+ config_to_test.read(os.path.join(str(temp_dir), config.config_filename))
78
+
79
+ _check_config(config, temp_dir, [
80
+ ('user', 'INPUT_MAX_TOKENS', '100'),
81
+ ])
82
+
83
+
69
84
  def test_all_values(temp_config):
70
85
  config, temp_dir = temp_config
71
86
 
72
87
  all_values = config.all_values()
73
88
 
74
89
  assert all_values == [
90
+ ('DEFAULT', 'add_tool_signature', 'true'),
75
91
  ('DEFAULT', 'gh_token', ''),
76
- ('DEFAULT', 'openai_model', 'gpt-4o'),
92
+ ('DEFAULT', 'input_max_tokens', '15000'),
93
+ ('DEFAULT', 'openai_model', 'gpt-4o-mini'),
77
94
  ('DEFAULT', 'openai_api_key', ''),
95
+ ('user', 'add_tool_signature', 'true'),
78
96
  ('user', 'gh_token', ''),
79
- ('user', 'openai_model', 'gpt-4o'),
97
+ ('user', 'input_max_tokens', '15000'),
98
+ ('user', 'openai_model', 'gpt-4o-mini'),
80
99
  ('user', 'openai_api_key', ''),
81
100
  ]
82
101
 
@@ -94,6 +113,6 @@ def test_reset_user_config(temp_config):
94
113
  config_to_test.read(os.path.join(str(temp_dir), config.config_filename))
95
114
 
96
115
  _check_config(config, temp_dir, [
97
- ('user', 'OPENAI_MODEL', 'gpt-4o'),
116
+ ('user', 'OPENAI_MODEL', 'gpt-4o-mini'),
98
117
  ('user', 'OPENAI_API_KEY', ''),
99
118
  ])
@@ -0,0 +1 @@
1
+ __version__ = "0.6.0"
@@ -2,13 +2,13 @@
2
2
  annotated-types==0.5.0; python_version >= '3.7'
3
3
  anyio==3.7.1; python_version >= '3.7'
4
4
  cached-property==1.5.2; python_version < '3.8'
5
- certifi==2024.2.2; python_version >= '3.6'
5
+ certifi==2024.7.4; python_version >= '3.6'
6
6
  cffi==1.15.1
7
7
  charset-normalizer==3.3.2; python_full_version >= '3.7.0'
8
- cryptography==42.0.7
8
+ cryptography==43.0.0
9
9
  deprecated==1.2.14; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
10
10
  distro==1.9.0; python_version >= '3.6'
11
- exceptiongroup==1.2.1; python_version < '3.11'
11
+ exceptiongroup==1.2.2; python_version < '3.11'
12
12
  fire==0.6.0
13
13
  gitdb==4.0.11; python_version >= '3.7'
14
14
  gitpython==3.1.42; python_version >= '3.7'
@@ -1 +0,0 @@
1
- __version__ = "0.4.1"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes