git-llm-tool 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of git-llm-tool might be problematic. Click here for more details.

@@ -0,0 +1,202 @@
1
+ """Base LLM provider interface."""
2
+
3
+ from abc import ABC, abstractmethod
4
+ from typing import Dict, Any, Optional
5
+
6
+ from git_llm_tool.core.config import AppConfig
7
+
8
+
9
+ class PromptTemplates:
10
+ """Centralized prompt templates for better code readability."""
11
+
12
+ # Base prompt with conventional commit types
13
+ BASE_COMMIT_PROMPT = """Based on the following git diff, generate a concise commit message in {language}.
14
+
15
+ **Conventional Commit types**:
16
+ - feat: new feature
17
+ - fix: bug fix
18
+ - docs: documentation changes
19
+ - style: formatting, missing semicolons, etc
20
+ - refactor: code restructuring without changing functionality
21
+ - test: adding or modifying tests
22
+ - chore: maintenance tasks
23
+
24
+ Git diff:
25
+ ```
26
+ {diff}
27
+ ```"""
28
+
29
+ # Format for commits without Jira tickets
30
+ NO_JIRA_FORMAT = """
31
+ Generate the commit message in **bulleted format**:
32
+ - feat: description of new features
33
+ - fix: description of bug fixes
34
+ - docs: description of documentation changes
35
+ (include only the types that apply to your changes)
36
+
37
+ Example format:
38
+ - feat: add user authentication endpoints
39
+ - fix: resolve login validation issue"""
40
+
41
+ # Format for commits with Jira tickets
42
+ JIRA_FORMAT = """
43
+ **Jira ticket found**: {jira_ticket}
44
+
45
+ Generate the commit message in this **exact format**:
46
+ {jira_ticket} <summary> #time <time_spent>
47
+ - feat: detailed description of new features
48
+ - fix: detailed description of bug fixes
49
+ - docs: detailed description of documentation changes
50
+ (include only the types that apply to your changes)
51
+
52
+ Where:
53
+ - First line: {jira_ticket} <brief_summary> #time <time_spent>
54
+ - Following lines: List each change type with "- type: description" format
55
+ - Only include the conventional commit types that actually apply to the changes"""
56
+
57
+ # Time tracking instructions
58
+ EXACT_TIME_INSTRUCTION = """
59
+ **Use exact time**: #time {work_hours}
60
+
61
+ Example format:
62
+ {jira_ticket} Implement user authentication system #time {work_hours}
63
+ - feat: add login and registration endpoints
64
+ - feat: implement JWT token validation
65
+ - docs: update API documentation"""
66
+
67
+ ESTIMATE_TIME_INSTRUCTION = """
68
+ **Estimate appropriate time** (e.g., #time 2h, #time 45m, #time 30m)
69
+
70
+ Example format:
71
+ {jira_ticket} Implement user authentication system #time 2h
72
+ - feat: add login and registration endpoints
73
+ - feat: implement JWT token validation
74
+ - docs: update API documentation"""
75
+
76
+ # Final instruction
77
+ FINAL_INSTRUCTION = "\n\nGenerate ONLY the commit message in the specified format, no additional text or explanation."
78
+
79
+
80
+ class LlmProvider(ABC):
81
+ """Abstract base class for LLM providers."""
82
+
83
+ def __init__(self, config: AppConfig):
84
+ """Initialize the provider with configuration."""
85
+ self.config = config
86
+
87
+ @abstractmethod
88
+ def generate_commit_message(
89
+ self,
90
+ diff: str,
91
+ jira_ticket: Optional[str] = None,
92
+ work_hours: Optional[str] = None,
93
+ **kwargs,
94
+ ) -> str:
95
+ """Generate commit message from git diff and optional Jira information.
96
+
97
+ Args:
98
+ diff: Git diff output
99
+ jira_ticket: Jira ticket number (optional)
100
+ work_hours: Work hours spent (optional)
101
+ **kwargs: Additional provider-specific arguments
102
+
103
+ Returns:
104
+ Generated commit message
105
+
106
+ Raises:
107
+ ApiError: If API call fails
108
+ """
109
+ pass
110
+
111
+ @abstractmethod
112
+ def generate_changelog(self, commit_messages: list[str], **kwargs) -> str:
113
+ """Generate changelog from commit messages.
114
+
115
+ Args:
116
+ commit_messages: List of commit messages
117
+ **kwargs: Additional provider-specific arguments
118
+
119
+ Returns:
120
+ Generated changelog in markdown format
121
+
122
+ Raises:
123
+ ApiError: If API call fails
124
+ """
125
+ pass
126
+
127
+ def _build_commit_prompt(
128
+ self,
129
+ diff: str,
130
+ jira_ticket: Optional[str] = None,
131
+ work_hours: Optional[str] = None,
132
+ ) -> str:
133
+ """Build prompt for commit message generation."""
134
+
135
+ # Prepare all template variables
136
+ template_vars = {
137
+ 'language': self.config.llm.language,
138
+ 'diff': diff,
139
+ 'jira_ticket': jira_ticket or '',
140
+ 'work_hours': work_hours or ''
141
+ }
142
+
143
+ # Build prompt components
144
+ prompt_parts = [PromptTemplates.BASE_COMMIT_PROMPT]
145
+
146
+ # Add format-specific instructions
147
+ if jira_ticket:
148
+ prompt_parts.append(PromptTemplates.JIRA_FORMAT)
149
+
150
+ # Add time tracking instructions
151
+ if work_hours:
152
+ prompt_parts.append(PromptTemplates.EXACT_TIME_INSTRUCTION)
153
+ else:
154
+ prompt_parts.append(PromptTemplates.ESTIMATE_TIME_INSTRUCTION)
155
+ else:
156
+ prompt_parts.append(PromptTemplates.NO_JIRA_FORMAT)
157
+
158
+ # Add final instruction
159
+ prompt_parts.append(PromptTemplates.FINAL_INSTRUCTION)
160
+
161
+ # Combine and format all parts at once
162
+ full_template = ''.join(prompt_parts)
163
+ return full_template.format(**template_vars)
164
+
165
+ def _build_changelog_prompt(self, commit_messages: list[str]) -> str:
166
+ """Build prompt for changelog generation."""
167
+ commits_text = "\n".join([f"- {msg}" for msg in commit_messages])
168
+
169
+ return f"""Generate a structured changelog in {self.config.llm.language} from the following commit messages.
170
+
171
+ Organize by categories:
172
+ ✨ **Features**
173
+ 🐛 **Bug Fixes**
174
+ 📚 **Documentation**
175
+ 🎨 **Style**
176
+ ♻️ **Refactoring**
177
+ 🧪 **Tests**
178
+ 🔧 **Chores**
179
+ 💥 **Breaking Changes**
180
+
181
+ Only include categories that have items. Use markdown format.
182
+
183
+ Commit messages:
184
+ {commits_text}
185
+
186
+ Generate the changelog:"""
187
+
188
+ @abstractmethod
189
+ def _make_api_call(self, prompt: str, **kwargs) -> str:
190
+ """Make API call to the LLM provider.
191
+
192
+ Args:
193
+ prompt: The prompt to send
194
+ **kwargs: Provider-specific arguments
195
+
196
+ Returns:
197
+ Generated text response
198
+
199
+ Raises:
200
+ ApiError: If API call fails
201
+ """
202
+ pass
@@ -0,0 +1,77 @@
1
+ """LLM Provider factory for automatic provider selection."""
2
+
3
+ from git_llm_tool.core.config import AppConfig
4
+ from git_llm_tool.core.exceptions import ApiError
5
+ from git_llm_tool.providers.base import LlmProvider
6
+ from git_llm_tool.providers.openai import OpenAiProvider
7
+ from git_llm_tool.providers.azure_openai import AzureOpenAiProvider
8
+ from git_llm_tool.providers.anthropic import AnthropicProvider
9
+ from git_llm_tool.providers.gemini import GeminiProvider
10
+
11
+
12
+ def get_provider(config: AppConfig) -> LlmProvider:
13
+ """Get appropriate LLM provider based on model name in config.
14
+
15
+ Args:
16
+ config: Application configuration
17
+
18
+ Returns:
19
+ Initialized LLM provider
20
+
21
+ Raises:
22
+ ApiError: If no suitable provider is found or API key is missing
23
+ """
24
+ model = config.llm.default_model.lower()
25
+
26
+ # Check if Azure OpenAI is configured (highest priority for OpenAI-compatible models)
27
+ if config.llm.azure_openai and config.llm.azure_openai.get("endpoint"):
28
+ if model.startswith(("gpt-", "o1-")) or "azure" in model:
29
+ if "azure_openai" not in config.llm.api_keys:
30
+ raise ApiError("Azure OpenAI API key required for Azure OpenAI models")
31
+ return AzureOpenAiProvider(config)
32
+
33
+ # OpenAI models (regular OpenAI API)
34
+ if model.startswith(("gpt-", "o1-")):
35
+ if "openai" not in config.llm.api_keys:
36
+ raise ApiError("OpenAI API key required for GPT models")
37
+ return OpenAiProvider(config)
38
+
39
+ # Anthropic models
40
+ elif model.startswith("claude-"):
41
+ if "anthropic" not in config.llm.api_keys:
42
+ raise ApiError("Anthropic API key required for Claude models")
43
+ return AnthropicProvider(config)
44
+
45
+ # Google models
46
+ elif model.startswith("gemini-"):
47
+ if "google" not in config.llm.api_keys:
48
+ raise ApiError("Google API key required for Gemini models")
49
+ return GeminiProvider(config)
50
+
51
+ # Fallback logic - try providers in order of preference
52
+ else:
53
+ # Try Azure OpenAI first if configured
54
+ if config.llm.azure_openai and config.llm.azure_openai.get("endpoint") and "azure_openai" in config.llm.api_keys:
55
+ return AzureOpenAiProvider(config)
56
+
57
+ # Try OpenAI second (most common)
58
+ elif "openai" in config.llm.api_keys:
59
+ return OpenAiProvider(config)
60
+
61
+ # Try Anthropic third
62
+ elif "anthropic" in config.llm.api_keys:
63
+ return AnthropicProvider(config)
64
+
65
+ # Try Google last
66
+ elif "google" in config.llm.api_keys:
67
+ return GeminiProvider(config)
68
+
69
+ # No API keys available
70
+ else:
71
+ raise ApiError(
72
+ "No API keys configured. Please set at least one API key:\n"
73
+ " git-llm config set llm.api_keys.openai sk-your-key\n"
74
+ " git-llm config set llm.api_keys.azure_openai your-azure-key\n"
75
+ " git-llm config set llm.api_keys.anthropic sk-ant-your-key\n"
76
+ " git-llm config set llm.api_keys.google your-google-key"
77
+ )
@@ -0,0 +1,83 @@
1
+ """Google Gemini LLM provider implementation."""
2
+
3
+ from typing import Optional
4
+ import google.generativeai as genai
5
+
6
+ from git_llm_tool.core.config import AppConfig
7
+ from git_llm_tool.core.exceptions import ApiError
8
+ from git_llm_tool.providers.base import LlmProvider
9
+
10
+
11
+ class GeminiProvider(LlmProvider):
12
+ """Google Gemini provider implementation."""
13
+
14
+ def __init__(self, config: AppConfig):
15
+ """Initialize Gemini provider."""
16
+ super().__init__(config)
17
+
18
+ # Get API key
19
+ api_key = config.llm.api_keys.get("google")
20
+ if not api_key:
21
+ raise ApiError("Google API key not found in configuration")
22
+
23
+ # Configure Gemini
24
+ genai.configure(api_key=api_key)
25
+
26
+ # Determine model
27
+ model = config.llm.default_model
28
+ if not model.startswith("gemini-"):
29
+ # Fallback to Gemini Pro if model doesn't look like Google model
30
+ model = "gemini-1.5-pro"
31
+ self.model = genai.GenerativeModel(model)
32
+
33
+ def generate_commit_message(
34
+ self,
35
+ diff: str,
36
+ jira_ticket: Optional[str] = None,
37
+ work_hours: Optional[str] = None,
38
+ **kwargs
39
+ ) -> str:
40
+ """Generate commit message using Gemini API."""
41
+ prompt = self._build_commit_prompt(diff, jira_ticket, work_hours)
42
+ return self._make_api_call(prompt, **kwargs)
43
+
44
+ def generate_changelog(
45
+ self,
46
+ commit_messages: list[str],
47
+ **kwargs
48
+ ) -> str:
49
+ """Generate changelog using Gemini API."""
50
+ prompt = self._build_changelog_prompt(commit_messages)
51
+ return self._make_api_call(prompt, **kwargs)
52
+
53
+ def _make_api_call(self, prompt: str, **kwargs) -> str:
54
+ """Make API call to Gemini."""
55
+ try:
56
+ # Configure generation parameters
57
+ generation_config = genai.types.GenerationConfig(
58
+ max_output_tokens=kwargs.get("max_tokens", 150),
59
+ temperature=kwargs.get("temperature", 0.7),
60
+ )
61
+
62
+ # Make API call
63
+ response = self.model.generate_content(
64
+ prompt,
65
+ generation_config=generation_config
66
+ )
67
+
68
+ # Extract response text
69
+ if response.text:
70
+ return response.text.strip()
71
+
72
+ raise ApiError("Empty response from Gemini API")
73
+
74
+ except Exception as e:
75
+ # Gemini exceptions are not well documented, so catch all
76
+ if "API_KEY" in str(e).upper():
77
+ raise ApiError("Invalid Google API key")
78
+ elif "QUOTA" in str(e).upper() or "RATE" in str(e).upper():
79
+ raise ApiError("Google API rate limit exceeded")
80
+ elif "CONNECTION" in str(e).upper() or "NETWORK" in str(e).upper():
81
+ raise ApiError("Failed to connect to Google API")
82
+ else:
83
+ raise ApiError(f"Google API error: {e}")
@@ -0,0 +1,93 @@
1
+ """OpenAI LLM provider implementation."""
2
+
3
+ from typing import Optional
4
+ import openai
5
+
6
+ from git_llm_tool.core.config import AppConfig
7
+ from git_llm_tool.core.exceptions import ApiError
8
+ from git_llm_tool.providers.base import LlmProvider
9
+
10
+
11
+ class OpenAiProvider(LlmProvider):
12
+ """OpenAI GPT provider implementation."""
13
+
14
+ def __init__(self, config: AppConfig):
15
+ """Initialize OpenAI provider."""
16
+ super().__init__(config)
17
+
18
+ # Get API key
19
+ api_key = config.llm.api_keys.get("openai")
20
+ if not api_key:
21
+ raise ApiError("OpenAI API key not found in configuration")
22
+
23
+ # Initialize OpenAI client
24
+ self.client = openai.OpenAI(api_key=api_key)
25
+
26
+ # Determine model
27
+ model = config.llm.default_model
28
+ if not model.startswith(("gpt-", "o1-")):
29
+ # Fallback to GPT-4o if model doesn't look like OpenAI model
30
+ model = "gpt-4o"
31
+ self.model = model
32
+
33
+ def generate_commit_message(
34
+ self,
35
+ diff: str,
36
+ jira_ticket: Optional[str] = None,
37
+ work_hours: Optional[str] = None,
38
+ **kwargs
39
+ ) -> str:
40
+ """Generate commit message using OpenAI API."""
41
+ prompt = self._build_commit_prompt(diff, jira_ticket, work_hours)
42
+ return self._make_api_call(prompt, **kwargs)
43
+
44
+ def generate_changelog(
45
+ self,
46
+ commit_messages: list[str],
47
+ **kwargs
48
+ ) -> str:
49
+ """Generate changelog using OpenAI API."""
50
+ prompt = self._build_changelog_prompt(commit_messages)
51
+ return self._make_api_call(prompt, **kwargs)
52
+
53
+ def _make_api_call(self, prompt: str, **kwargs) -> str:
54
+ """Make API call to OpenAI."""
55
+ try:
56
+ # Default parameters
57
+ api_params = {
58
+ "model": self.model,
59
+ "messages": [
60
+ {
61
+ "role": "system",
62
+ "content": "You are a helpful assistant that generates git commit messages and changelogs."
63
+ },
64
+ {
65
+ "role": "user",
66
+ "content": prompt
67
+ }
68
+ ],
69
+ "max_tokens": kwargs.get("max_tokens", 150),
70
+ "temperature": kwargs.get("temperature", 0.7),
71
+ }
72
+
73
+ # Make API call
74
+ response = self.client.chat.completions.create(**api_params)
75
+
76
+ # Extract response text
77
+ if response.choices and len(response.choices) > 0:
78
+ content = response.choices[0].message.content
79
+ if content:
80
+ return content.strip()
81
+
82
+ raise ApiError("Empty response from OpenAI API")
83
+
84
+ except openai.AuthenticationError:
85
+ raise ApiError("Invalid OpenAI API key")
86
+ except openai.RateLimitError:
87
+ raise ApiError("OpenAI API rate limit exceeded")
88
+ except openai.APIConnectionError:
89
+ raise ApiError("Failed to connect to OpenAI API")
90
+ except openai.APIError as e:
91
+ raise ApiError(f"OpenAI API error: {e}")
92
+ except Exception as e:
93
+ raise ApiError(f"Unexpected error calling OpenAI API: {e}")
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 skyler-gogolook
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.