commit-maker 0.2.2__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- commit_maker/colored.py +59 -0
- commit_maker/custom_int_prompt.py +6 -0
- commit_maker/cut_think_part.py +6 -0
- commit_maker/main.py +259 -388
- commit_maker/mistral.py +68 -0
- commit_maker/ollama.py +65 -0
- commit_maker/rich_custom_formatter.py +11 -0
- commit_maker-0.3.0.dist-info/METADATA +127 -0
- commit_maker-0.3.0.dist-info/RECORD +14 -0
- {commit_maker-0.2.2.dist-info → commit_maker-0.3.0.dist-info}/WHEEL +1 -1
- commit_maker-0.2.2.dist-info/METADATA +0 -154
- commit_maker-0.2.2.dist-info/RECORD +0 -8
- {commit_maker-0.2.2.dist-info → commit_maker-0.3.0.dist-info}/entry_points.txt +0 -0
- {commit_maker-0.2.2.dist-info → commit_maker-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {commit_maker-0.2.2.dist-info → commit_maker-0.3.0.dist-info}/top_level.txt +0 -0
commit_maker/main.py
CHANGED
|
@@ -1,6 +1,4 @@
|
|
|
1
|
-
# CLI
|
|
2
|
-
# noqa: F841
|
|
3
|
-
|
|
1
|
+
# CLI utility that generates commit messages using AI.
|
|
4
2
|
import argparse
|
|
5
3
|
import importlib
|
|
6
4
|
import os
|
|
@@ -8,105 +6,43 @@ import subprocess
|
|
|
8
6
|
|
|
9
7
|
import requests
|
|
10
8
|
import rich.console
|
|
11
|
-
import rich_argparse
|
|
12
9
|
|
|
13
|
-
|
|
10
|
+
from .colored import colored
|
|
11
|
+
from .custom_int_prompt import CustomIntPrompt
|
|
12
|
+
from .cut_think_part import cut_think
|
|
13
|
+
from .mistral import MistralAI
|
|
14
|
+
from .ollama import Ollama
|
|
15
|
+
from .rich_custom_formatter import CustomFormatter
|
|
16
|
+
|
|
17
|
+
# Constants
|
|
14
18
|
mistral_api_key = os.environ.get("MISTRAL_API_KEY")
|
|
15
19
|
console = rich.console.Console()
|
|
20
|
+
prompt = CustomIntPrompt()
|
|
21
|
+
available_langs = ["en", "ru"]
|
|
16
22
|
|
|
17
|
-
#
|
|
18
|
-
# документации rich_argparse)
|
|
19
|
-
rich_argparse.RichHelpFormatter.styles = {
|
|
20
|
-
"argparse.args": "cyan bold",
|
|
21
|
-
"argparse.groups": "green bold",
|
|
22
|
-
"argparse.metavar": "dark_cyan",
|
|
23
|
-
"argparse.prog": "dark_green bold",
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
# Функции для цветного вывода
|
|
28
|
-
def bold(text: str) -> str:
|
|
29
|
-
"""Возвращает жирный текст
|
|
30
|
-
|
|
31
|
-
Args:
|
|
32
|
-
text (str): Текст
|
|
33
|
-
|
|
34
|
-
Returns:
|
|
35
|
-
str: Жирный текст
|
|
36
|
-
"""
|
|
37
|
-
bold_start = "\033[1m"
|
|
38
|
-
bold_end = "\033[0m"
|
|
39
|
-
return f"{bold_start}{text}{bold_end}"
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def colored(
|
|
43
|
-
string: str,
|
|
44
|
-
color: str,
|
|
45
|
-
text_bold: bool = True,
|
|
46
|
-
) -> str:
|
|
47
|
-
"""Функция для 'окраски' строк для красивого вывода
|
|
48
|
-
|
|
49
|
-
Args:
|
|
50
|
-
string (str): Строка, которую нужно покрасить
|
|
51
|
-
color (str): Цвет покраски ['red', 'yellow', 'green', 'magenta',\
|
|
52
|
-
'blue', 'cyan', 'reset']
|
|
53
|
-
text_bold (bool, optional): Жирный текст или нет. Defaults to True.
|
|
54
|
-
|
|
55
|
-
Returns:
|
|
56
|
-
str: Покрашенная строка
|
|
57
|
-
|
|
58
|
-
Example:
|
|
59
|
-
`print(colored(string='Success!', color='green'))` # Выводит 'Success!'
|
|
60
|
-
зеленого цвета
|
|
61
|
-
"""
|
|
62
|
-
COLOR_RED = "\033[31m"
|
|
63
|
-
COLOR_GREEN = "\033[32m"
|
|
64
|
-
COLOR_YELLOW = "\033[33m"
|
|
65
|
-
COLOR_BLUE = "\033[94m"
|
|
66
|
-
COLOR_MAGENTA = "\033[95m"
|
|
67
|
-
COLOR_CYAN = "\033[96m"
|
|
68
|
-
COLOR_RESET = "\033[0m"
|
|
69
|
-
COLORS_DICT = {
|
|
70
|
-
"red": COLOR_RED,
|
|
71
|
-
"green": COLOR_GREEN,
|
|
72
|
-
"yellow": COLOR_YELLOW,
|
|
73
|
-
"blue": COLOR_BLUE,
|
|
74
|
-
"magenta": COLOR_MAGENTA,
|
|
75
|
-
"cyan": COLOR_CYAN,
|
|
76
|
-
"reset": COLOR_RESET,
|
|
77
|
-
}
|
|
78
|
-
return (
|
|
79
|
-
bold(f"{COLORS_DICT[color]}{string}{COLORS_DICT['reset']}")
|
|
80
|
-
if text_bold
|
|
81
|
-
else f"{COLORS_DICT[color]}{string}{COLORS_DICT['reset']}"
|
|
82
|
-
)
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
# Парсер параметров
|
|
23
|
+
# Argument parser
|
|
86
24
|
parser = argparse.ArgumentParser(
|
|
87
25
|
prog="commit_maker",
|
|
88
|
-
description="CLI
|
|
89
|
-
"
|
|
90
|
-
|
|
91
|
-
formatter_class=rich_argparse.RichHelpFormatter,
|
|
26
|
+
description="CLI utility that generates commit messages using AI. "
|
|
27
|
+
"Supports local models/Mistral AI API. Local models use ollama.",
|
|
28
|
+
formatter_class=CustomFormatter,
|
|
92
29
|
)
|
|
93
30
|
|
|
94
|
-
#
|
|
95
|
-
general_params = parser.add_argument_group("
|
|
31
|
+
# General parameters
|
|
32
|
+
general_params = parser.add_argument_group("General parameters")
|
|
96
33
|
general_params.add_argument(
|
|
97
34
|
"-l",
|
|
98
35
|
"--local-models",
|
|
99
36
|
action="store_true",
|
|
100
37
|
default=False,
|
|
101
|
-
help="
|
|
38
|
+
help="Use local models",
|
|
102
39
|
)
|
|
103
40
|
general_params.add_argument(
|
|
104
41
|
"-d",
|
|
105
42
|
"--dry-run",
|
|
106
43
|
action="store_true",
|
|
107
44
|
default=False,
|
|
108
|
-
help="
|
|
109
|
-
"изменений, без создания коммита",
|
|
45
|
+
help="Dry run: show commit message without creating commit",
|
|
110
46
|
)
|
|
111
47
|
general_params.add_argument(
|
|
112
48
|
"-V",
|
|
@@ -114,173 +50,65 @@ general_params.add_argument(
|
|
|
114
50
|
action="version",
|
|
115
51
|
version=f"%(prog)s {importlib.metadata.version('commit-maker')}",
|
|
116
52
|
)
|
|
53
|
+
general_params.add_argument(
|
|
54
|
+
"-o",
|
|
55
|
+
"--timeout",
|
|
56
|
+
type=int,
|
|
57
|
+
default=None,
|
|
58
|
+
help="Change timeout for models. Default is None.",
|
|
59
|
+
)
|
|
117
60
|
|
|
118
|
-
#
|
|
119
|
-
generation_params = parser.add_argument_group("
|
|
61
|
+
# Generation parameters
|
|
62
|
+
generation_params = parser.add_argument_group("Generation parameters")
|
|
120
63
|
generation_params.add_argument(
|
|
121
64
|
"-t",
|
|
122
65
|
"--temperature",
|
|
123
66
|
default=1.0,
|
|
124
67
|
type=float,
|
|
125
|
-
help="
|
|
126
|
-
|
|
68
|
+
help="Model temperature for message generation. "
|
|
69
|
+
"Range: [0.0, 1.5]. Default: 1.0",
|
|
127
70
|
)
|
|
128
71
|
generation_params.add_argument(
|
|
129
72
|
"-m",
|
|
130
73
|
"--max-symbols",
|
|
131
74
|
type=int,
|
|
132
75
|
default=200,
|
|
133
|
-
help="
|
|
76
|
+
help="Maximum commit message length. Default: 200",
|
|
134
77
|
)
|
|
135
78
|
generation_params.add_argument(
|
|
136
79
|
"-M",
|
|
137
80
|
"--model",
|
|
138
81
|
type=str,
|
|
139
|
-
help="
|
|
82
|
+
help="Model to be used by ollama",
|
|
140
83
|
)
|
|
141
84
|
generation_params.add_argument(
|
|
142
85
|
"-e",
|
|
143
86
|
"--exclude",
|
|
144
87
|
nargs="+",
|
|
145
88
|
default=[],
|
|
146
|
-
help="
|
|
89
|
+
help="Files to exclude when generating commit message",
|
|
90
|
+
)
|
|
91
|
+
generation_params.add_argument(
|
|
92
|
+
"-w",
|
|
93
|
+
"--wish",
|
|
94
|
+
default=None,
|
|
95
|
+
type=str,
|
|
96
|
+
help="Custom wishes/edits for the commit message",
|
|
97
|
+
)
|
|
98
|
+
generation_params.add_argument(
|
|
99
|
+
"-L",
|
|
100
|
+
"--language",
|
|
101
|
+
choices=available_langs,
|
|
102
|
+
default="ru",
|
|
103
|
+
help="Language of generated commit message (en/ru)",
|
|
147
104
|
)
|
|
148
105
|
|
|
149
106
|
|
|
150
|
-
#
|
|
151
|
-
class MistralAI:
|
|
152
|
-
"""Класс для общения с MistralAI.
|
|
153
|
-
Написан с помощью requests."""
|
|
154
|
-
|
|
155
|
-
def __init__(
|
|
156
|
-
self,
|
|
157
|
-
api_key: str,
|
|
158
|
-
model: str = "mistral-small-latest",
|
|
159
|
-
):
|
|
160
|
-
"""Инициализация класса
|
|
161
|
-
|
|
162
|
-
Args:
|
|
163
|
-
api_key (str): Апи ключ MistralAI
|
|
164
|
-
"""
|
|
165
|
-
self.url = "https://api.mistral.ai/v1/chat/completions"
|
|
166
|
-
self.api_key = api_key
|
|
167
|
-
self.headers = {
|
|
168
|
-
"Content-Type": "application/json",
|
|
169
|
-
"Accept": "application/json",
|
|
170
|
-
"Authorization": f"Bearer {api_key}",
|
|
171
|
-
}
|
|
172
|
-
self.model = model
|
|
173
|
-
|
|
174
|
-
def message(
|
|
175
|
-
self,
|
|
176
|
-
message: str,
|
|
177
|
-
role: str = "user",
|
|
178
|
-
temperature: float = 0.7,
|
|
179
|
-
) -> str:
|
|
180
|
-
"""Функция сообщения
|
|
181
|
-
|
|
182
|
-
Args:
|
|
183
|
-
message (str): Сообщение
|
|
184
|
-
role (str, optional): Роль. Defaults to "user".
|
|
185
|
-
|
|
186
|
-
Returns:
|
|
187
|
-
str: Json-ответ/Err
|
|
188
|
-
"""
|
|
189
|
-
data = {
|
|
190
|
-
"model": self.model,
|
|
191
|
-
"messages": [
|
|
192
|
-
{
|
|
193
|
-
"role": role,
|
|
194
|
-
"content": message,
|
|
195
|
-
}
|
|
196
|
-
],
|
|
197
|
-
"temperature": 0.7,
|
|
198
|
-
}
|
|
199
|
-
try:
|
|
200
|
-
response = requests.post(
|
|
201
|
-
url=self.url,
|
|
202
|
-
json=data,
|
|
203
|
-
headers=self.headers,
|
|
204
|
-
timeout=60,
|
|
205
|
-
)
|
|
206
|
-
response.raise_for_status()
|
|
207
|
-
return response.json()["choices"][0]["message"]["content"]
|
|
208
|
-
|
|
209
|
-
except requests.exceptions.RequestException as e:
|
|
210
|
-
print(colored(f"Ошибка при обращении к Mistral AI: {e}", "red"))
|
|
211
|
-
except KeyError:
|
|
212
|
-
print(colored("Ошибка парсинга ответа от Mistral AI", "red"))
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
# Класс для использования API Ollama
|
|
216
|
-
class Ollama:
|
|
217
|
-
"""Класс для общения с локальными моделями Ollama.
|
|
218
|
-
Написан с помощью requests."""
|
|
219
|
-
|
|
220
|
-
def __init__(
|
|
221
|
-
self,
|
|
222
|
-
model: str,
|
|
223
|
-
):
|
|
224
|
-
"""Инициализация класса"""
|
|
225
|
-
self.model = model
|
|
226
|
-
self.url = "http://localhost:11434/api/chat"
|
|
227
|
-
self.headers = {
|
|
228
|
-
"Content-Type": "application/json",
|
|
229
|
-
"Accept": "application/json",
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
def message(
|
|
233
|
-
self,
|
|
234
|
-
message: str,
|
|
235
|
-
temperature: float = 0.7,
|
|
236
|
-
role: str = "user",
|
|
237
|
-
) -> str:
|
|
238
|
-
"""Функция сообщения
|
|
239
|
-
|
|
240
|
-
Args:
|
|
241
|
-
message (str): Сообщение
|
|
242
|
-
model (str): Модель, с которой будем общаться
|
|
243
|
-
temperature (float, optional): Температура общения. Defaults to 0.7
|
|
244
|
-
role (str, optional): Роль в сообщении.
|
|
245
|
-
|
|
246
|
-
Returns:
|
|
247
|
-
str: Json-ответ/Err
|
|
248
|
-
"""
|
|
249
|
-
data = {
|
|
250
|
-
"model": self.model,
|
|
251
|
-
"messages": [
|
|
252
|
-
{
|
|
253
|
-
"role": role,
|
|
254
|
-
"content": message,
|
|
255
|
-
}
|
|
256
|
-
],
|
|
257
|
-
"options": {
|
|
258
|
-
"temperature": temperature,
|
|
259
|
-
},
|
|
260
|
-
"stream": False,
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
try:
|
|
264
|
-
response = requests.post(
|
|
265
|
-
url=self.url,
|
|
266
|
-
json=data,
|
|
267
|
-
headers=self.headers,
|
|
268
|
-
timeout=60,
|
|
269
|
-
)
|
|
270
|
-
response.raise_for_status() # выбросит ошибку при плохом статусе
|
|
271
|
-
return response.json()["choices"][0]["message"]["content"]
|
|
272
|
-
|
|
273
|
-
except requests.exceptions.RequestException as e:
|
|
274
|
-
print(colored(f"Ошибка при обращении к Ollama: {e}", "red"))
|
|
275
|
-
except KeyError:
|
|
276
|
-
print(colored("Ошибка парсинга ответа от Ollama", "red"))
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
# main функция
|
|
107
|
+
# Main function
|
|
280
108
|
|
|
281
109
|
|
|
282
110
|
def main() -> None:
|
|
283
|
-
#
|
|
111
|
+
# Parsing arguments
|
|
284
112
|
parsed_args = parser.parse_args()
|
|
285
113
|
use_local_models = parsed_args.local_models
|
|
286
114
|
max_symbols = parsed_args.max_symbols
|
|
@@ -288,24 +116,31 @@ def main() -> None:
|
|
|
288
116
|
dry_run = parsed_args.dry_run
|
|
289
117
|
temperature = parsed_args.temperature
|
|
290
118
|
excluded_files = parsed_args.exclude
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
119
|
+
wish = parsed_args.wish
|
|
120
|
+
timeout = parsed_args.timeout
|
|
121
|
+
lang = parsed_args.language
|
|
122
|
+
|
|
123
|
+
# AI prompt
|
|
124
|
+
prompt_for_ai = f"""You are a git commit message generator.
|
|
125
|
+
Generate a single commit message in
|
|
126
|
+
{"Russian" if lang == "ru" else "English"} that:
|
|
127
|
+
Clearly summarizes the purpose of the changes.
|
|
128
|
+
Does not exceed {max_symbols} characters.
|
|
129
|
+
Uses information from git status and git diff.
|
|
130
|
+
Takes into account user preferences: {wish}.
|
|
131
|
+
Output only the commit message — plain text, no markdown, no
|
|
132
|
+
explanations, no formatting."""
|
|
300
133
|
|
|
301
134
|
try:
|
|
302
135
|
if not use_local_models and not mistral_api_key:
|
|
303
|
-
print(
|
|
304
|
-
|
|
136
|
+
console.print(
|
|
137
|
+
"MISTRAL_API_KEY not found for API usage!",
|
|
138
|
+
style="red",
|
|
139
|
+
highlight=False,
|
|
305
140
|
)
|
|
306
141
|
return
|
|
307
142
|
|
|
308
|
-
#
|
|
143
|
+
# Get git version if available
|
|
309
144
|
git_version = subprocess.run( # noqa
|
|
310
145
|
["git", "--version"],
|
|
311
146
|
capture_output=True,
|
|
@@ -313,98 +148,12 @@ def main() -> None:
|
|
|
313
148
|
encoding="utf-8",
|
|
314
149
|
).stdout
|
|
315
150
|
|
|
316
|
-
if
|
|
317
|
-
# Получаем список моделей из Ollama, если Ollama есть
|
|
318
|
-
ollama_list_of_models = (
|
|
319
|
-
subprocess.run(
|
|
320
|
-
["ollama", "ls"],
|
|
321
|
-
capture_output=True,
|
|
322
|
-
text=True,
|
|
323
|
-
encoding="utf-8",
|
|
324
|
-
)
|
|
325
|
-
.stdout.strip()
|
|
326
|
-
.split("\n")
|
|
327
|
-
)
|
|
328
|
-
ollama_list_of_models = [
|
|
329
|
-
i.split()[0] for i in ollama_list_of_models[1:]
|
|
330
|
-
]
|
|
331
|
-
else:
|
|
332
|
-
ollama_list_of_models = 0
|
|
333
|
-
|
|
334
|
-
# Обработка отсутствия ollama
|
|
335
|
-
if not ollama_list_of_models and use_local_models:
|
|
336
|
-
print(
|
|
337
|
-
colored(
|
|
338
|
-
"Ollama не установлена или список моделей пуст!", "yellow"
|
|
339
|
-
)
|
|
340
|
-
+ " Для установки перейдите по https://ollama.com/download"
|
|
341
|
-
)
|
|
342
|
-
return None
|
|
343
|
-
elif not use_local_models and model:
|
|
344
|
-
print(
|
|
345
|
-
f"Для использования {model} локально используйте флаг "
|
|
346
|
-
+ colored("--local-models", "yellow")
|
|
347
|
-
+ ". Если нужна помощь: "
|
|
348
|
-
+ colored("--help", "yellow")
|
|
349
|
-
)
|
|
350
|
-
return None
|
|
351
|
-
elif ollama_list_of_models and use_local_models:
|
|
352
|
-
if not model:
|
|
353
|
-
if len(ollama_list_of_models) > 1:
|
|
354
|
-
print(
|
|
355
|
-
colored(
|
|
356
|
-
"Для использования локальных моделей необходимо "
|
|
357
|
-
"выбрать модель:",
|
|
358
|
-
"yellow",
|
|
359
|
-
)
|
|
360
|
-
+ "\n"
|
|
361
|
-
+ "\n".join(
|
|
362
|
-
[
|
|
363
|
-
f"{i + 1}. {colored(model, 'magenta', False,)}"
|
|
364
|
-
for i, model in enumerate(
|
|
365
|
-
ollama_list_of_models
|
|
366
|
-
)
|
|
367
|
-
]
|
|
368
|
-
)
|
|
369
|
-
)
|
|
370
|
-
model_is_selected = False
|
|
371
|
-
while not model_is_selected:
|
|
372
|
-
model = input(
|
|
373
|
-
colored(
|
|
374
|
-
"Введите число от 1 до "
|
|
375
|
-
f"{len(ollama_list_of_models)}: ",
|
|
376
|
-
"yellow",
|
|
377
|
-
)
|
|
378
|
-
)
|
|
379
|
-
model = int(model) if model.isdigit() else -1
|
|
380
|
-
if model > len(ollama_list_of_models) or model == -1:
|
|
381
|
-
continue
|
|
382
|
-
model = ollama_list_of_models[model - 1]
|
|
383
|
-
model_is_selected = True
|
|
384
|
-
break
|
|
385
|
-
else:
|
|
386
|
-
model = ollama_list_of_models[0]
|
|
387
|
-
else:
|
|
388
|
-
if model not in ollama_list_of_models:
|
|
389
|
-
print(
|
|
390
|
-
colored(
|
|
391
|
-
f"{model} не является доступной моделью! ", "red"
|
|
392
|
-
)
|
|
393
|
-
+ "Доступные модели: "
|
|
394
|
-
+ colored(
|
|
395
|
-
f"{', '.join(ollama_list_of_models)}", "yellow"
|
|
396
|
-
)
|
|
397
|
-
)
|
|
398
|
-
return None
|
|
399
|
-
if model:
|
|
400
|
-
print("Выбрана модель: " + colored(model, "yellow"))
|
|
401
|
-
|
|
402
|
-
# Проверяем, есть ли .git
|
|
151
|
+
# Check if .git exists
|
|
403
152
|
dot_git = ".git" in os.listdir("./")
|
|
404
153
|
|
|
405
|
-
#
|
|
154
|
+
# If .git exists
|
|
406
155
|
if dot_git:
|
|
407
|
-
#
|
|
156
|
+
# Get commit differences
|
|
408
157
|
git_status = subprocess.run(
|
|
409
158
|
["git", "status"],
|
|
410
159
|
capture_output=True,
|
|
@@ -421,9 +170,7 @@ def main() -> None:
|
|
|
421
170
|
|
|
422
171
|
if excluded_files:
|
|
423
172
|
git_diff_command = ["git", "diff", "--staged", "--", "."]
|
|
424
|
-
git_diff_command.extend(
|
|
425
|
-
[f":!{file}" for file in excluded_files]
|
|
426
|
-
)
|
|
173
|
+
git_diff_command.extend([f":!{file}" for file in excluded_files]) # noqa
|
|
427
174
|
else:
|
|
428
175
|
git_diff_command = ["git", "diff", "--staged"]
|
|
429
176
|
|
|
@@ -444,15 +191,18 @@ def main() -> None:
|
|
|
444
191
|
encoding="utf-8",
|
|
445
192
|
).stdout
|
|
446
193
|
)
|
|
447
|
-
): #
|
|
448
|
-
print(
|
|
194
|
+
): # Check for no changes
|
|
195
|
+
console.print(
|
|
196
|
+
"[red]No changes added![/red]",
|
|
197
|
+
highlight=False,
|
|
198
|
+
)
|
|
449
199
|
return None
|
|
450
200
|
if not git_diff.stdout:
|
|
451
201
|
if not dry_run:
|
|
452
202
|
if (
|
|
453
203
|
input(
|
|
454
|
-
colored("
|
|
455
|
-
+ "
|
|
204
|
+
colored("No staged changes!", "red")
|
|
205
|
+
+ " Add all automatically using "
|
|
456
206
|
+ colored("git add -A", "yellow")
|
|
457
207
|
+ "? [y/N]: "
|
|
458
208
|
)
|
|
@@ -462,17 +212,18 @@ def main() -> None:
|
|
|
462
212
|
["git", "add", "-A"],
|
|
463
213
|
)
|
|
464
214
|
else:
|
|
465
|
-
print(
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
215
|
+
console.print(
|
|
216
|
+
"Add required files manually.",
|
|
217
|
+
style="yellow",
|
|
218
|
+
highlight=False,
|
|
469
219
|
)
|
|
470
220
|
return None
|
|
471
221
|
else:
|
|
472
|
-
print(
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
222
|
+
console.print(
|
|
223
|
+
"[red]Nothing to commit![/red]"
|
|
224
|
+
" Add required files using "
|
|
225
|
+
"[yellow]git add <filename>[/yellow]",
|
|
226
|
+
highlight=False,
|
|
476
227
|
)
|
|
477
228
|
return None
|
|
478
229
|
git_diff = subprocess.run(
|
|
@@ -486,18 +237,123 @@ def main() -> None:
|
|
|
486
237
|
capture_output=True,
|
|
487
238
|
encoding="utf-8",
|
|
488
239
|
).stdout:
|
|
489
|
-
print(
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
240
|
+
console.print(
|
|
241
|
+
"[red]Note: You have unstaged changes![/red]"
|
|
242
|
+
" To add more files, press "
|
|
243
|
+
"[yellow]Ctrl + C[/yellow] and run "
|
|
244
|
+
"[yellow]git add <filename>[/yellow]",
|
|
245
|
+
highlight=False,
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
if use_local_models:
|
|
249
|
+
# Check Ollama installation
|
|
250
|
+
try:
|
|
251
|
+
subprocess.run(
|
|
252
|
+
["ollama", "--version"],
|
|
253
|
+
text=True,
|
|
254
|
+
capture_output=True,
|
|
255
|
+
)
|
|
256
|
+
except FileNotFoundError:
|
|
257
|
+
console.print(
|
|
258
|
+
"Ollama is not installed!",
|
|
259
|
+
style="red bold",
|
|
494
260
|
)
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
261
|
+
return None
|
|
262
|
+
|
|
263
|
+
# Check if Ollama is running
|
|
264
|
+
ollama_served = (
|
|
265
|
+
requests.get("http://localhost:11434").status_code == 200
|
|
500
266
|
)
|
|
267
|
+
|
|
268
|
+
if ollama_served:
|
|
269
|
+
# Get list of models from Ollama
|
|
270
|
+
ollama_models_json = requests.get(
|
|
271
|
+
"http://localhost:11434/api/tags"
|
|
272
|
+
).json()
|
|
273
|
+
if ollama_models_json["models"]:
|
|
274
|
+
ollama_list_of_models = [
|
|
275
|
+
i["model"] for i in ollama_models_json["models"]
|
|
276
|
+
]
|
|
277
|
+
else:
|
|
278
|
+
console.print(
|
|
279
|
+
"[yellow]Ollama model list is empty!"
|
|
280
|
+
"[/yellow] To install models, visit "
|
|
281
|
+
"https://ollama.com/models",
|
|
282
|
+
highlight=False,
|
|
283
|
+
)
|
|
284
|
+
return None
|
|
285
|
+
else:
|
|
286
|
+
console.print(
|
|
287
|
+
"[yellow]Ollama server not running\n"
|
|
288
|
+
"or not installed![/yellow]"
|
|
289
|
+
)
|
|
290
|
+
return None
|
|
291
|
+
else:
|
|
292
|
+
ollama_list_of_models = 0
|
|
293
|
+
|
|
294
|
+
# Handle missing Ollama
|
|
295
|
+
if not ollama_list_of_models and use_local_models:
|
|
296
|
+
console.print(
|
|
297
|
+
"[yellow]Ollama is not installed or model list is empty!"
|
|
298
|
+
"[/yellow] To install, visit "
|
|
299
|
+
"https://ollama.com/download",
|
|
300
|
+
highlight=False,
|
|
301
|
+
)
|
|
302
|
+
return None
|
|
303
|
+
elif not use_local_models and model:
|
|
304
|
+
console.print(
|
|
305
|
+
f"To use {model} locally, use the flag "
|
|
306
|
+
"[yellow]--local-models[/yellow]. For help: "
|
|
307
|
+
"[yellow]--help[/yellow]",
|
|
308
|
+
highlight=False,
|
|
309
|
+
)
|
|
310
|
+
return None
|
|
311
|
+
elif ollama_list_of_models and use_local_models:
|
|
312
|
+
if not model:
|
|
313
|
+
if len(ollama_list_of_models) > 1:
|
|
314
|
+
console.print(
|
|
315
|
+
"[yellow]Select a local model:[/yellow]\n"
|
|
316
|
+
+ "\n".join(
|
|
317
|
+
[
|
|
318
|
+
f"[magenta]{i + 1}. {model}[/magenta]"
|
|
319
|
+
for i, model in enumerate(ollama_list_of_models) # noqa
|
|
320
|
+
]
|
|
321
|
+
),
|
|
322
|
+
highlight=False,
|
|
323
|
+
)
|
|
324
|
+
model_is_selected = False
|
|
325
|
+
while not model_is_selected:
|
|
326
|
+
model = prompt.ask(
|
|
327
|
+
"[yellow]Enter a number from 1 to "
|
|
328
|
+
f"{len(ollama_list_of_models)}[/yellow]",
|
|
329
|
+
)
|
|
330
|
+
if not (1 <= model <= len(ollama_list_of_models)):
|
|
331
|
+
console.print(
|
|
332
|
+
"[red]Enter a valid number![/red]",
|
|
333
|
+
highlight=False,
|
|
334
|
+
)
|
|
335
|
+
continue
|
|
336
|
+
model = ollama_list_of_models[model - 1]
|
|
337
|
+
model_is_selected = True
|
|
338
|
+
break
|
|
339
|
+
else:
|
|
340
|
+
model = ollama_list_of_models[0]
|
|
341
|
+
else:
|
|
342
|
+
if model not in ollama_list_of_models:
|
|
343
|
+
console.print(
|
|
344
|
+
f"[red]{model} is not an available model!"
|
|
345
|
+
"[/red] "
|
|
346
|
+
"Available models: [yellow]"
|
|
347
|
+
f"{', '.join(ollama_list_of_models)}[/yellow]",
|
|
348
|
+
highlight=False,
|
|
349
|
+
)
|
|
350
|
+
return None
|
|
351
|
+
if model:
|
|
352
|
+
console.print(
|
|
353
|
+
f"Selected model: [yellow]{model}[/yellow]",
|
|
354
|
+
highlight=False,
|
|
355
|
+
)
|
|
356
|
+
# Create AI client
|
|
501
357
|
if use_local_models:
|
|
502
358
|
client = Ollama(model=model)
|
|
503
359
|
else:
|
|
@@ -509,19 +365,30 @@ def main() -> None:
|
|
|
509
365
|
retry = True
|
|
510
366
|
while retry:
|
|
511
367
|
with console.status(
|
|
512
|
-
"[magenta bold]
|
|
368
|
+
"[magenta bold]Generating commit message...",
|
|
513
369
|
spinner_style="magenta",
|
|
514
370
|
):
|
|
515
|
-
commit_message =
|
|
516
|
-
message
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
371
|
+
commit_message = cut_think(
|
|
372
|
+
client.message(
|
|
373
|
+
messages=[
|
|
374
|
+
{
|
|
375
|
+
"role": "system",
|
|
376
|
+
"content": prompt_for_ai,
|
|
377
|
+
},
|
|
378
|
+
{
|
|
379
|
+
"role": "user",
|
|
380
|
+
"content": "Git status: "
|
|
381
|
+
+ git_status.stdout
|
|
382
|
+
+ "Git diff: "
|
|
383
|
+
+ git_diff.stdout,
|
|
384
|
+
},
|
|
385
|
+
],
|
|
386
|
+
temperature=temperature,
|
|
387
|
+
timeout=timeout,
|
|
388
|
+
)
|
|
522
389
|
)
|
|
523
390
|
commit_with_message_from_ai = input(
|
|
524
|
-
"
|
|
391
|
+
"Commit with message "
|
|
525
392
|
+ colored(f"'{commit_message}'", "yellow")
|
|
526
393
|
+ "? [y/N/r]: "
|
|
527
394
|
)
|
|
@@ -533,37 +400,45 @@ def main() -> None:
|
|
|
533
400
|
["git", "commit", "-m", f"{commit_message}"],
|
|
534
401
|
encoding="utf-8",
|
|
535
402
|
)
|
|
536
|
-
print(
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
+ "Git status: "
|
|
541
|
-
+ git_status.stdout
|
|
542
|
-
+ "Git diff: "
|
|
543
|
-
+ git_diff.stdout,
|
|
544
|
-
temperature=temperature,
|
|
545
|
-
)
|
|
546
|
-
print(
|
|
547
|
-
colored(
|
|
548
|
-
"Коммит-месседж успешно сгенерирован:", "green", False
|
|
403
|
+
console.print(
|
|
404
|
+
"Commit created successfully!",
|
|
405
|
+
style="green bold",
|
|
406
|
+
highlight=False,
|
|
549
407
|
)
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
408
|
+
else:
|
|
409
|
+
with console.status(
|
|
410
|
+
"[magenta bold]Generating commit message...",
|
|
411
|
+
spinner_style="magenta",
|
|
412
|
+
):
|
|
413
|
+
commit_message = cut_think(
|
|
414
|
+
client.message(
|
|
415
|
+
messages=[
|
|
416
|
+
{
|
|
417
|
+
"role": "system",
|
|
418
|
+
"content": prompt_for_ai,
|
|
419
|
+
},
|
|
420
|
+
{
|
|
421
|
+
"role": "user",
|
|
422
|
+
"content": "Git status: "
|
|
423
|
+
+ git_status.stdout
|
|
424
|
+
+ "Git diff: "
|
|
425
|
+
+ git_diff.stdout,
|
|
426
|
+
},
|
|
427
|
+
],
|
|
428
|
+
temperature=temperature,
|
|
429
|
+
timeout=timeout,
|
|
430
|
+
)
|
|
556
431
|
)
|
|
557
|
-
)
|
|
432
|
+
console.print(commit_message, style="yellow", highlight=False)
|
|
558
433
|
return None
|
|
559
434
|
|
|
560
|
-
#
|
|
435
|
+
# If .git does not exist
|
|
561
436
|
else:
|
|
562
437
|
init_git_repo = (
|
|
563
438
|
True
|
|
564
439
|
if input(
|
|
565
|
-
colored("
|
|
566
|
-
+ "
|
|
440
|
+
colored("Git repository not initialized!", "red")
|
|
441
|
+
+ " Run "
|
|
567
442
|
+ colored("git init", "yellow")
|
|
568
443
|
+ "? [y/N]: "
|
|
569
444
|
)
|
|
@@ -593,21 +468,17 @@ def main() -> None:
|
|
|
593
468
|
),
|
|
594
469
|
)
|
|
595
470
|
if input(
|
|
596
|
-
"
|
|
471
|
+
"Make first commit with message "
|
|
597
472
|
+ colored("'Initial commit?'", "yellow")
|
|
598
473
|
+ " [y/N]: "
|
|
599
474
|
)
|
|
600
475
|
== "y"
|
|
601
476
|
else None
|
|
602
477
|
)
|
|
603
|
-
except
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
else colored(str(e), "red")
|
|
608
|
-
)
|
|
609
|
-
except Exception as e:
|
|
610
|
-
print(colored("Ошибка:", "red") + " " + str(e))
|
|
478
|
+
except KeyboardInterrupt:
|
|
479
|
+
return None
|
|
480
|
+
except Exception:
|
|
481
|
+
console.print_exception()
|
|
611
482
|
|
|
612
483
|
|
|
613
484
|
if __name__ == "__main__":
|