craftcommit-cli 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +222 -0
- package/dist/ai.d.ts +9 -0
- package/dist/ai.d.ts.map +1 -0
- package/dist/ai.js +189 -0
- package/dist/ai.js.map +1 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +262 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +16 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +94 -0
- package/dist/config.js.map +1 -0
- package/package.json +52 -0
- package/src/ai.ts +268 -0
- package/src/cli.ts +310 -0
- package/src/config.ts +118 -0
- package/tsconfig.json +20 -0
package/src/ai.ts
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { GoogleGenerativeAI } from '@google/generative-ai';
|
|
3
|
+
import type { Provider, Language } from './config.js';
|
|
4
|
+
|
|
5
|
+
export interface CommitMessage {
|
|
6
|
+
type: string;
|
|
7
|
+
scope?: string;
|
|
8
|
+
subject: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const SYSTEM_PROMPT_EN = `You are a commit message generator. Analyze the git diff and generate 3 different commit message suggestions following Conventional Commits format.
|
|
12
|
+
|
|
13
|
+
Output JSON only: { "candidates": [{ "type": "...", "scope": "...", "subject": "..." }, ...] }
|
|
14
|
+
|
|
15
|
+
Rules:
|
|
16
|
+
- type: one of feat, fix, docs, style, refactor, perf, test, build, ci, chore
|
|
17
|
+
- scope: optional, short identifier for affected area (e.g., "auth", "api", "ui"). Omit if changes span multiple areas
|
|
18
|
+
- subject: imperative mood (e.g., "add", "fix", "update"), max 72 chars, no period at end, lowercase first letter
|
|
19
|
+
- Provide 3 different variations with different wording or focus
|
|
20
|
+
|
|
21
|
+
Example output:
|
|
22
|
+
{ "candidates": [
|
|
23
|
+
{ "type": "feat", "scope": "auth", "subject": "add OAuth2 login support" },
|
|
24
|
+
{ "type": "feat", "scope": "auth", "subject": "implement OAuth2 authentication flow" },
|
|
25
|
+
{ "type": "feat", "subject": "add social login via OAuth2" }
|
|
26
|
+
]}`;
|
|
27
|
+
|
|
28
|
+
const SYSTEM_PROMPT_KO = `You are a commit message generator. Analyze the git diff and generate 3 different commit message suggestions following Conventional Commits format.
|
|
29
|
+
|
|
30
|
+
Output JSON only: { "candidates": [{ "type": "...", "scope": "...", "subject": "..." }, ...] }
|
|
31
|
+
|
|
32
|
+
Rules:
|
|
33
|
+
- type: one of feat, fix, docs, style, refactor, perf, test, build, ci, chore (MUST be in English)
|
|
34
|
+
- scope: optional, short identifier for affected area (MUST be in English)
|
|
35
|
+
- subject: MUST be written in Korean, max 72 chars, no period at end
|
|
36
|
+
- Provide 3 different variations with different wording or focus
|
|
37
|
+
|
|
38
|
+
Example output:
|
|
39
|
+
{ "candidates": [
|
|
40
|
+
{ "type": "feat", "scope": "auth", "subject": "OAuth2 로그인 지원 추가" },
|
|
41
|
+
{ "type": "feat", "scope": "auth", "subject": "OAuth2 인증 흐름 구현" },
|
|
42
|
+
{ "type": "feat", "subject": "OAuth2를 통한 소셜 로그인 추가" }
|
|
43
|
+
]}`;
|
|
44
|
+
|
|
45
|
+
function getSystemPrompt(language: Language, issueReference?: string): string {
|
|
46
|
+
let prompt = language === 'korean' ? SYSTEM_PROMPT_KO : SYSTEM_PROMPT_EN;
|
|
47
|
+
|
|
48
|
+
if (issueReference) {
|
|
49
|
+
prompt += `\n\nIMPORTANT: Include "${issueReference}" at the end of each subject.`;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return prompt;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function buildUserPrompt(diff: string, fileSummary: string): string {
|
|
56
|
+
return `Generate 3 commit message candidates for the following changes:
|
|
57
|
+
|
|
58
|
+
## Files Changed:
|
|
59
|
+
${fileSummary || 'No files changed'}
|
|
60
|
+
|
|
61
|
+
## Diff:
|
|
62
|
+
${diff || 'No diff available'}`;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function parseJsonFromText(text: string): unknown {
|
|
66
|
+
const jsonMatch = text.match(/```(?:json)?\s*([\s\S]*?)```/) ||
|
|
67
|
+
text.match(/(\{[\s\S]*\})/);
|
|
68
|
+
|
|
69
|
+
if (jsonMatch) {
|
|
70
|
+
return JSON.parse(jsonMatch[1].trim());
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return JSON.parse(text);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
function validateCandidates(data: unknown): CommitMessage[] {
|
|
77
|
+
if (typeof data !== 'object' || data === null) {
|
|
78
|
+
throw new Error('Response is not an object');
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const obj = data as Record<string, unknown>;
|
|
82
|
+
|
|
83
|
+
if (!Array.isArray(obj.candidates)) {
|
|
84
|
+
throw new Error('Missing candidates array');
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return obj.candidates.slice(0, 3).map((candidate) => {
|
|
88
|
+
const c = candidate as Record<string, unknown>;
|
|
89
|
+
let subject = String(c.subject || '').trim();
|
|
90
|
+
|
|
91
|
+
if (subject.endsWith('.')) {
|
|
92
|
+
subject = subject.slice(0, -1);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (subject.length > 72) {
|
|
96
|
+
subject = subject.substring(0, 69) + '...';
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return {
|
|
100
|
+
type: String(c.type || 'chore'),
|
|
101
|
+
scope: c.scope ? String(c.scope).trim() : undefined,
|
|
102
|
+
subject
|
|
103
|
+
};
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async function callOpenAI(
|
|
108
|
+
apiKey: string,
|
|
109
|
+
model: string,
|
|
110
|
+
diff: string,
|
|
111
|
+
fileSummary: string,
|
|
112
|
+
language: Language,
|
|
113
|
+
issueReference?: string
|
|
114
|
+
): Promise<CommitMessage[]> {
|
|
115
|
+
const client = new OpenAI({ apiKey });
|
|
116
|
+
|
|
117
|
+
const response = await client.chat.completions.create({
|
|
118
|
+
model,
|
|
119
|
+
messages: [
|
|
120
|
+
{ role: 'system', content: getSystemPrompt(language, issueReference) },
|
|
121
|
+
{ role: 'user', content: buildUserPrompt(diff, fileSummary) }
|
|
122
|
+
],
|
|
123
|
+
response_format: { type: 'json_object' },
|
|
124
|
+
temperature: 0.7,
|
|
125
|
+
max_tokens: 500
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
const content = response.choices[0]?.message?.content;
|
|
129
|
+
if (!content) {
|
|
130
|
+
throw new Error('Empty response from OpenAI');
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return validateCandidates(JSON.parse(content));
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async function callGroq(
|
|
137
|
+
apiKey: string,
|
|
138
|
+
model: string,
|
|
139
|
+
diff: string,
|
|
140
|
+
fileSummary: string,
|
|
141
|
+
language: Language,
|
|
142
|
+
issueReference?: string
|
|
143
|
+
): Promise<CommitMessage[]> {
|
|
144
|
+
const client = new OpenAI({
|
|
145
|
+
apiKey,
|
|
146
|
+
baseURL: 'https://api.groq.com/openai/v1'
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
const response = await client.chat.completions.create({
|
|
150
|
+
model,
|
|
151
|
+
messages: [
|
|
152
|
+
{ role: 'system', content: getSystemPrompt(language, issueReference) },
|
|
153
|
+
{ role: 'user', content: buildUserPrompt(diff, fileSummary) }
|
|
154
|
+
],
|
|
155
|
+
response_format: { type: 'json_object' },
|
|
156
|
+
temperature: 0.7,
|
|
157
|
+
max_tokens: 500
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
const content = response.choices[0]?.message?.content;
|
|
161
|
+
if (!content) {
|
|
162
|
+
throw new Error('Empty response from Groq');
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return validateCandidates(JSON.parse(content));
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
async function callGemini(
|
|
169
|
+
apiKey: string,
|
|
170
|
+
model: string,
|
|
171
|
+
diff: string,
|
|
172
|
+
fileSummary: string,
|
|
173
|
+
language: Language,
|
|
174
|
+
issueReference?: string
|
|
175
|
+
): Promise<CommitMessage[]> {
|
|
176
|
+
const genAI = new GoogleGenerativeAI(apiKey);
|
|
177
|
+
const geminiModel = genAI.getGenerativeModel({
|
|
178
|
+
model,
|
|
179
|
+
generationConfig: {
|
|
180
|
+
temperature: 0.7,
|
|
181
|
+
maxOutputTokens: 500,
|
|
182
|
+
responseMimeType: 'application/json'
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
const prompt = `${getSystemPrompt(language, issueReference)}
|
|
187
|
+
|
|
188
|
+
${buildUserPrompt(diff, fileSummary)}`;
|
|
189
|
+
|
|
190
|
+
const result = await geminiModel.generateContent(prompt);
|
|
191
|
+
const text = result.response.text();
|
|
192
|
+
|
|
193
|
+
if (!text) {
|
|
194
|
+
throw new Error('Empty response from Gemini');
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return validateCandidates(parseJsonFromText(text));
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
async function callOllama(
|
|
201
|
+
ollamaUrl: string,
|
|
202
|
+
model: string,
|
|
203
|
+
diff: string,
|
|
204
|
+
fileSummary: string,
|
|
205
|
+
language: Language,
|
|
206
|
+
issueReference?: string
|
|
207
|
+
): Promise<CommitMessage[]> {
|
|
208
|
+
const response = await fetch(`${ollamaUrl}/api/chat`, {
|
|
209
|
+
method: 'POST',
|
|
210
|
+
headers: { 'Content-Type': 'application/json' },
|
|
211
|
+
body: JSON.stringify({
|
|
212
|
+
model,
|
|
213
|
+
messages: [
|
|
214
|
+
{ role: 'system', content: getSystemPrompt(language, issueReference) },
|
|
215
|
+
{ role: 'user', content: buildUserPrompt(diff, fileSummary) }
|
|
216
|
+
],
|
|
217
|
+
format: 'json',
|
|
218
|
+
stream: false
|
|
219
|
+
})
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
if (!response.ok) {
|
|
223
|
+
if (response.status === 404) {
|
|
224
|
+
throw new Error(`Model "${model}" not found. Run: ollama pull ${model}`);
|
|
225
|
+
}
|
|
226
|
+
throw new Error(`Ollama request failed: ${response.status}`);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
const data = await response.json() as { message?: { content?: string } };
|
|
230
|
+
const content = data.message?.content;
|
|
231
|
+
|
|
232
|
+
if (!content) {
|
|
233
|
+
throw new Error('Empty response from Ollama');
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
return validateCandidates(parseJsonFromText(content));
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
export async function generateCommitMessages(
|
|
240
|
+
provider: Provider,
|
|
241
|
+
apiKey: string,
|
|
242
|
+
diff: string,
|
|
243
|
+
fileSummary: string,
|
|
244
|
+
model: string,
|
|
245
|
+
language: Language,
|
|
246
|
+
ollamaUrl?: string,
|
|
247
|
+
issueReference?: string
|
|
248
|
+
): Promise<CommitMessage[]> {
|
|
249
|
+
switch (provider) {
|
|
250
|
+
case 'openai':
|
|
251
|
+
return callOpenAI(apiKey, model, diff, fileSummary, language, issueReference);
|
|
252
|
+
case 'groq':
|
|
253
|
+
return callGroq(apiKey, model, diff, fileSummary, language, issueReference);
|
|
254
|
+
case 'gemini':
|
|
255
|
+
return callGemini(apiKey, model, diff, fileSummary, language, issueReference);
|
|
256
|
+
case 'ollama':
|
|
257
|
+
return callOllama(ollamaUrl || 'http://localhost:11434', model, diff, fileSummary, language, issueReference);
|
|
258
|
+
default:
|
|
259
|
+
throw new Error(`Unknown provider: ${provider}`);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
export function formatCommitMessage(commit: CommitMessage): string {
|
|
264
|
+
if (commit.scope) {
|
|
265
|
+
return `${commit.type}(${commit.scope}): ${commit.subject}`;
|
|
266
|
+
}
|
|
267
|
+
return `${commit.type}: ${commit.subject}`;
|
|
268
|
+
}
|
package/src/cli.ts
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { Command } from 'commander';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import ora from 'ora';
|
|
6
|
+
import inquirer from 'inquirer';
|
|
7
|
+
import clipboardy from 'clipboardy';
|
|
8
|
+
import { simpleGit, SimpleGit } from 'simple-git';
|
|
9
|
+
import { loadConfig, saveConfig, getApiKey, getDefaultModel, type Provider, type Language } from './config.js';
|
|
10
|
+
import { generateCommitMessages, formatCommitMessage, type CommitMessage } from './ai.js';
|
|
11
|
+
|
|
12
|
+
const VERSION = '0.3.0';
|
|
13
|
+
|
|
14
|
+
const git: SimpleGit = simpleGit();
|
|
15
|
+
|
|
16
|
+
async function getStagedDiff(): Promise<{ diff: string; fileSummary: string }> {
|
|
17
|
+
const [diff, fileSummary] = await Promise.all([
|
|
18
|
+
git.diff(['--cached']),
|
|
19
|
+
git.diff(['--cached', '--name-status'])
|
|
20
|
+
]);
|
|
21
|
+
|
|
22
|
+
return { diff, fileSummary };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async function hasStagedChanges(): Promise<boolean> {
|
|
26
|
+
const diff = await git.diff(['--cached', '--name-only']);
|
|
27
|
+
return diff.trim().length > 0;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function getCurrentBranch(): Promise<string> {
|
|
31
|
+
const branchSummary = await git.branch();
|
|
32
|
+
return branchSummary.current;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function extractIssueFromBranch(branchName: string, pattern?: string): string | null {
|
|
36
|
+
if (!pattern) return null;
|
|
37
|
+
|
|
38
|
+
try {
|
|
39
|
+
const regex = new RegExp(pattern);
|
|
40
|
+
const match = branchName.match(regex);
|
|
41
|
+
return match?.[1] || match?.[0] || null;
|
|
42
|
+
} catch {
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async function generateCommand(options: {
|
|
48
|
+
provider?: Provider;
|
|
49
|
+
model?: string;
|
|
50
|
+
language?: Language;
|
|
51
|
+
commit?: boolean;
|
|
52
|
+
issuePattern?: string;
|
|
53
|
+
issuePrefix?: string;
|
|
54
|
+
}) {
|
|
55
|
+
const config = loadConfig();
|
|
56
|
+
|
|
57
|
+
// Override config with CLI options
|
|
58
|
+
const provider = options.provider || config.provider;
|
|
59
|
+
const model = options.model || config.model || getDefaultModel(provider);
|
|
60
|
+
const language = options.language || config.language;
|
|
61
|
+
|
|
62
|
+
// Check for API key
|
|
63
|
+
const apiKey = getApiKey({ ...config, provider });
|
|
64
|
+
if (!apiKey) {
|
|
65
|
+
console.error(chalk.red(`\nNo API key found for ${provider}.`));
|
|
66
|
+
console.log(chalk.yellow(`\nSet your API key:`));
|
|
67
|
+
console.log(chalk.gray(` Environment: OPENAI_API_KEY, GROQ_API_KEY, or GEMINI_API_KEY`));
|
|
68
|
+
console.log(chalk.gray(` Config file: ~/.commitcraftrc`));
|
|
69
|
+
console.log(chalk.gray(` Or run: commitcraft config\n`));
|
|
70
|
+
process.exit(1);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Check for staged changes
|
|
74
|
+
const hasChanges = await hasStagedChanges();
|
|
75
|
+
if (!hasChanges) {
|
|
76
|
+
console.log(chalk.yellow('\nNo staged changes found.'));
|
|
77
|
+
console.log(chalk.gray('Stage files with: git add <files>\n'));
|
|
78
|
+
process.exit(1);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Get diff
|
|
82
|
+
const spinner = ora('Getting staged changes...').start();
|
|
83
|
+
let diff: string;
|
|
84
|
+
let fileSummary: string;
|
|
85
|
+
|
|
86
|
+
try {
|
|
87
|
+
const result = await getStagedDiff();
|
|
88
|
+
diff = result.diff;
|
|
89
|
+
fileSummary = result.fileSummary;
|
|
90
|
+
|
|
91
|
+
if (!diff.trim()) {
|
|
92
|
+
spinner.fail('No diff content found');
|
|
93
|
+
process.exit(1);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Truncate large diffs
|
|
97
|
+
const MAX_CHARS = 12000;
|
|
98
|
+
if (diff.length > MAX_CHARS) {
|
|
99
|
+
diff = diff.substring(0, MAX_CHARS) + '\n... [truncated]';
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
spinner.succeed('Got staged changes');
|
|
103
|
+
} catch (error) {
|
|
104
|
+
spinner.fail('Failed to get staged changes');
|
|
105
|
+
console.error(chalk.red(error instanceof Error ? error.message : String(error)));
|
|
106
|
+
process.exit(1);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Extract issue from branch if pattern is configured
|
|
110
|
+
let issueReference: string | undefined;
|
|
111
|
+
if (options.issuePattern) {
|
|
112
|
+
try {
|
|
113
|
+
const branchName = await getCurrentBranch();
|
|
114
|
+
const issue = extractIssueFromBranch(branchName, options.issuePattern);
|
|
115
|
+
if (issue) {
|
|
116
|
+
issueReference = (options.issuePrefix || '') + issue;
|
|
117
|
+
console.log(chalk.blue(`\nDetected issue: ${issueReference}`));
|
|
118
|
+
}
|
|
119
|
+
} catch {
|
|
120
|
+
// Ignore branch detection errors
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Generate commit messages
|
|
125
|
+
const generateSpinner = ora(`Generating commit messages via ${provider}...`).start();
|
|
126
|
+
let candidates: CommitMessage[];
|
|
127
|
+
|
|
128
|
+
try {
|
|
129
|
+
candidates = await generateCommitMessages(
|
|
130
|
+
provider,
|
|
131
|
+
apiKey,
|
|
132
|
+
diff,
|
|
133
|
+
fileSummary,
|
|
134
|
+
model,
|
|
135
|
+
language,
|
|
136
|
+
config.ollamaUrl,
|
|
137
|
+
issueReference
|
|
138
|
+
);
|
|
139
|
+
generateSpinner.succeed('Generated commit messages');
|
|
140
|
+
} catch (error) {
|
|
141
|
+
generateSpinner.fail('Failed to generate commit messages');
|
|
142
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
143
|
+
|
|
144
|
+
if (message.includes('401') || message.includes('Unauthorized')) {
|
|
145
|
+
console.error(chalk.red(`\nInvalid ${provider} API key.`));
|
|
146
|
+
} else if (message.includes('ECONNREFUSED')) {
|
|
147
|
+
console.error(chalk.red(`\nCannot connect to Ollama at ${config.ollamaUrl}`));
|
|
148
|
+
} else {
|
|
149
|
+
console.error(chalk.red(`\n${message}`));
|
|
150
|
+
}
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Format choices for inquirer
|
|
155
|
+
const choices = candidates.map((commit, index) => ({
|
|
156
|
+
name: `${index + 1}. ${formatCommitMessage(commit)}`,
|
|
157
|
+
value: commit
|
|
158
|
+
}));
|
|
159
|
+
|
|
160
|
+
choices.push({
|
|
161
|
+
name: chalk.gray('Cancel'),
|
|
162
|
+
value: null as unknown as CommitMessage
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
// Let user select a message
|
|
166
|
+
console.log();
|
|
167
|
+
const { selected } = await inquirer.prompt<{ selected: CommitMessage | null }>([
|
|
168
|
+
{
|
|
169
|
+
type: 'list',
|
|
170
|
+
name: 'selected',
|
|
171
|
+
message: 'Select a commit message:',
|
|
172
|
+
choices
|
|
173
|
+
}
|
|
174
|
+
]);
|
|
175
|
+
|
|
176
|
+
if (!selected) {
|
|
177
|
+
console.log(chalk.yellow('\nCancelled.'));
|
|
178
|
+
process.exit(0);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
const commitMessage = formatCommitMessage(selected);
|
|
182
|
+
|
|
183
|
+
if (options.commit) {
|
|
184
|
+
// Auto-commit
|
|
185
|
+
const commitSpinner = ora('Committing...').start();
|
|
186
|
+
try {
|
|
187
|
+
await git.commit(commitMessage);
|
|
188
|
+
commitSpinner.succeed(chalk.green(`Committed: ${commitMessage}`));
|
|
189
|
+
} catch (error) {
|
|
190
|
+
commitSpinner.fail('Failed to commit');
|
|
191
|
+
console.error(chalk.red(error instanceof Error ? error.message : String(error)));
|
|
192
|
+
process.exit(1);
|
|
193
|
+
}
|
|
194
|
+
} else {
|
|
195
|
+
// Copy to clipboard
|
|
196
|
+
try {
|
|
197
|
+
await clipboardy.write(commitMessage);
|
|
198
|
+
console.log(chalk.green(`\n✓ Copied to clipboard: ${commitMessage}`));
|
|
199
|
+
} catch {
|
|
200
|
+
// Clipboard might not work in all environments
|
|
201
|
+
console.log(chalk.green(`\n✓ Selected: ${commitMessage}`));
|
|
202
|
+
console.log(chalk.gray('(Copy manually if clipboard is not available)'));
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
async function configCommand() {
|
|
208
|
+
console.log(chalk.bold('\nCommitCraft CLI Configuration\n'));
|
|
209
|
+
|
|
210
|
+
const { provider } = await inquirer.prompt<{ provider: Provider }>([
|
|
211
|
+
{
|
|
212
|
+
type: 'list',
|
|
213
|
+
name: 'provider',
|
|
214
|
+
message: 'Select AI provider:',
|
|
215
|
+
choices: [
|
|
216
|
+
{ name: 'OpenAI', value: 'openai' },
|
|
217
|
+
{ name: 'Groq', value: 'groq' },
|
|
218
|
+
{ name: 'Gemini', value: 'gemini' },
|
|
219
|
+
{ name: 'Ollama (local)', value: 'ollama' }
|
|
220
|
+
]
|
|
221
|
+
}
|
|
222
|
+
]);
|
|
223
|
+
|
|
224
|
+
let apiKey: string | undefined;
|
|
225
|
+
|
|
226
|
+
if (provider !== 'ollama') {
|
|
227
|
+
const { key } = await inquirer.prompt<{ key: string }>([
|
|
228
|
+
{
|
|
229
|
+
type: 'password',
|
|
230
|
+
name: 'key',
|
|
231
|
+
message: `Enter your ${provider} API key:`,
|
|
232
|
+
mask: '*'
|
|
233
|
+
}
|
|
234
|
+
]);
|
|
235
|
+
apiKey = key;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
const { language } = await inquirer.prompt<{ language: Language }>([
|
|
239
|
+
{
|
|
240
|
+
type: 'list',
|
|
241
|
+
name: 'language',
|
|
242
|
+
message: 'Select language for commit messages:',
|
|
243
|
+
choices: [
|
|
244
|
+
{ name: 'English', value: 'english' },
|
|
245
|
+
{ name: 'Korean', value: 'korean' }
|
|
246
|
+
]
|
|
247
|
+
}
|
|
248
|
+
]);
|
|
249
|
+
|
|
250
|
+
const { model } = await inquirer.prompt<{ model: string }>([
|
|
251
|
+
{
|
|
252
|
+
type: 'input',
|
|
253
|
+
name: 'model',
|
|
254
|
+
message: 'Model (leave empty for default):',
|
|
255
|
+
default: getDefaultModel(provider)
|
|
256
|
+
}
|
|
257
|
+
]);
|
|
258
|
+
|
|
259
|
+
const configToSave: Record<string, string> = {
|
|
260
|
+
provider,
|
|
261
|
+
language,
|
|
262
|
+
model
|
|
263
|
+
};
|
|
264
|
+
|
|
265
|
+
if (apiKey) {
|
|
266
|
+
const keyName = `${provider}ApiKey`;
|
|
267
|
+
configToSave[keyName] = apiKey;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
if (provider === 'ollama') {
|
|
271
|
+
const { ollamaUrl } = await inquirer.prompt<{ ollamaUrl: string }>([
|
|
272
|
+
{
|
|
273
|
+
type: 'input',
|
|
274
|
+
name: 'ollamaUrl',
|
|
275
|
+
message: 'Ollama URL:',
|
|
276
|
+
default: 'http://localhost:11434'
|
|
277
|
+
}
|
|
278
|
+
]);
|
|
279
|
+
configToSave.ollamaUrl = ollamaUrl;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
saveConfig(configToSave);
|
|
283
|
+
console.log(chalk.green('\n✓ Configuration saved to ~/.commitcraftrc\n'));
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// Main CLI
|
|
287
|
+
const program = new Command();
|
|
288
|
+
|
|
289
|
+
program
|
|
290
|
+
.name('commitcraft')
|
|
291
|
+
.description('Generate AI-powered commit messages')
|
|
292
|
+
.version(VERSION);
|
|
293
|
+
|
|
294
|
+
program
|
|
295
|
+
.command('generate', { isDefault: true })
|
|
296
|
+
.description('Generate commit message from staged changes')
|
|
297
|
+
.option('-p, --provider <provider>', 'AI provider (openai, groq, gemini, ollama)')
|
|
298
|
+
.option('-m, --model <model>', 'Model to use')
|
|
299
|
+
.option('-l, --language <language>', 'Language (english, korean)')
|
|
300
|
+
.option('-c, --commit', 'Auto-commit with selected message')
|
|
301
|
+
.option('--issue-pattern <pattern>', 'Regex to extract issue from branch name')
|
|
302
|
+
.option('--issue-prefix <prefix>', 'Prefix for issue reference (e.g., #)')
|
|
303
|
+
.action(generateCommand);
|
|
304
|
+
|
|
305
|
+
program
|
|
306
|
+
.command('config')
|
|
307
|
+
.description('Configure API keys and preferences')
|
|
308
|
+
.action(configCommand);
|
|
309
|
+
|
|
310
|
+
program.parse();
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import dotenv from 'dotenv';
|
|
5
|
+
|
|
6
|
+
export type Provider = 'openai' | 'groq' | 'gemini' | 'ollama';
|
|
7
|
+
export type Language = 'english' | 'korean';
|
|
8
|
+
|
|
9
|
+
export interface Config {
|
|
10
|
+
provider: Provider;
|
|
11
|
+
model: string;
|
|
12
|
+
language: Language;
|
|
13
|
+
ollamaUrl: string;
|
|
14
|
+
openaiApiKey?: string;
|
|
15
|
+
groqApiKey?: string;
|
|
16
|
+
geminiApiKey?: string;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const CONFIG_FILE = '.commitcraftrc';
|
|
20
|
+
const CONFIG_PATH = path.join(os.homedir(), CONFIG_FILE);
|
|
21
|
+
|
|
22
|
+
const DEFAULT_MODELS: Record<Provider, string> = {
|
|
23
|
+
openai: 'gpt-4o-mini',
|
|
24
|
+
groq: 'llama-3.1-8b-instant',
|
|
25
|
+
gemini: 'gemini-1.5-flash',
|
|
26
|
+
ollama: 'llama3.2'
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
function loadEnvFile(): void {
|
|
30
|
+
// Load from current directory .env first
|
|
31
|
+
const localEnv = path.join(process.cwd(), '.env');
|
|
32
|
+
if (fs.existsSync(localEnv)) {
|
|
33
|
+
dotenv.config({ path: localEnv });
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Then load from home directory
|
|
37
|
+
const homeEnv = path.join(os.homedir(), '.env');
|
|
38
|
+
if (fs.existsSync(homeEnv)) {
|
|
39
|
+
dotenv.config({ path: homeEnv });
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function loadConfigFile(): Partial<Config> {
|
|
44
|
+
// Check local config first
|
|
45
|
+
const localConfig = path.join(process.cwd(), CONFIG_FILE);
|
|
46
|
+
if (fs.existsSync(localConfig)) {
|
|
47
|
+
try {
|
|
48
|
+
const content = fs.readFileSync(localConfig, 'utf-8');
|
|
49
|
+
return JSON.parse(content);
|
|
50
|
+
} catch {
|
|
51
|
+
// Ignore parse errors
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Then check home directory
|
|
56
|
+
if (fs.existsSync(CONFIG_PATH)) {
|
|
57
|
+
try {
|
|
58
|
+
const content = fs.readFileSync(CONFIG_PATH, 'utf-8');
|
|
59
|
+
return JSON.parse(content);
|
|
60
|
+
} catch {
|
|
61
|
+
// Ignore parse errors
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return {};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function loadConfig(): Config {
|
|
69
|
+
loadEnvFile();
|
|
70
|
+
const fileConfig = loadConfigFile();
|
|
71
|
+
|
|
72
|
+
const provider = (process.env.COMMITCRAFT_PROVIDER || fileConfig.provider || 'openai') as Provider;
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
provider,
|
|
76
|
+
model: process.env.COMMITCRAFT_MODEL || fileConfig.model || DEFAULT_MODELS[provider],
|
|
77
|
+
language: (process.env.COMMITCRAFT_LANGUAGE || fileConfig.language || 'english') as Language,
|
|
78
|
+
ollamaUrl: process.env.OLLAMA_URL || fileConfig.ollamaUrl || 'http://localhost:11434',
|
|
79
|
+
openaiApiKey: process.env.OPENAI_API_KEY || fileConfig.openaiApiKey,
|
|
80
|
+
groqApiKey: process.env.GROQ_API_KEY || fileConfig.groqApiKey,
|
|
81
|
+
geminiApiKey: process.env.GEMINI_API_KEY || fileConfig.geminiApiKey
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
export function saveConfig(config: Partial<Config>): void {
|
|
86
|
+
let existingConfig: Partial<Config> = {};
|
|
87
|
+
|
|
88
|
+
if (fs.existsSync(CONFIG_PATH)) {
|
|
89
|
+
try {
|
|
90
|
+
const content = fs.readFileSync(CONFIG_PATH, 'utf-8');
|
|
91
|
+
existingConfig = JSON.parse(content);
|
|
92
|
+
} catch {
|
|
93
|
+
// Ignore parse errors
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const newConfig = { ...existingConfig, ...config };
|
|
98
|
+
fs.writeFileSync(CONFIG_PATH, JSON.stringify(newConfig, null, 2));
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
export function getApiKey(config: Config): string | undefined {
|
|
102
|
+
switch (config.provider) {
|
|
103
|
+
case 'openai':
|
|
104
|
+
return config.openaiApiKey;
|
|
105
|
+
case 'groq':
|
|
106
|
+
return config.groqApiKey;
|
|
107
|
+
case 'gemini':
|
|
108
|
+
return config.geminiApiKey;
|
|
109
|
+
case 'ollama':
|
|
110
|
+
return 'ollama-no-key-needed';
|
|
111
|
+
default:
|
|
112
|
+
return undefined;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
export function getDefaultModel(provider: Provider): string {
|
|
117
|
+
return DEFAULT_MODELS[provider];
|
|
118
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "NodeNext",
|
|
5
|
+
"moduleResolution": "NodeNext",
|
|
6
|
+
"lib": ["ES2022"],
|
|
7
|
+
"outDir": "./dist",
|
|
8
|
+
"rootDir": "./src",
|
|
9
|
+
"strict": true,
|
|
10
|
+
"esModuleInterop": true,
|
|
11
|
+
"skipLibCheck": true,
|
|
12
|
+
"forceConsistentCasingInFileNames": true,
|
|
13
|
+
"resolveJsonModule": true,
|
|
14
|
+
"declaration": true,
|
|
15
|
+
"declarationMap": true,
|
|
16
|
+
"sourceMap": true
|
|
17
|
+
},
|
|
18
|
+
"include": ["src/**/*"],
|
|
19
|
+
"exclude": ["node_modules", "dist"]
|
|
20
|
+
}
|