@zjex/git-workflow 0.2.5 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -4
- package/dist/index.js +519 -49
- package/package.json +2 -2
- package/src/ai-service.ts +350 -0
- package/src/commands/commit.ts +121 -36
- package/src/commands/init.ts +156 -9
- package/src/config.ts +9 -0
- package/src/index.ts +23 -1
- package/src/update-notifier.ts +25 -11
- package/test-ctrl-c.mjs +0 -32
- package/test-full-flow.mjs +0 -108
- package/test-update-flow.mjs +0 -98
|
@@ -0,0 +1,350 @@
|
|
|
1
|
+
import { execOutput } from "./utils.js";
|
|
2
|
+
import type { GwConfig } from "./config.js";
|
|
3
|
+
|
|
4
|
+
interface AIProvider {
|
|
5
|
+
name: string;
|
|
6
|
+
endpoint: string;
|
|
7
|
+
defaultModel: string;
|
|
8
|
+
free: boolean;
|
|
9
|
+
needsKey: boolean;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const AI_PROVIDERS: Record<string, AIProvider> = {
|
|
13
|
+
github: {
|
|
14
|
+
name: "GitHub Models",
|
|
15
|
+
endpoint: "https://models.github.ai/inference/chat/completions",
|
|
16
|
+
defaultModel: "gpt-4o-mini",
|
|
17
|
+
free: true,
|
|
18
|
+
needsKey: true,
|
|
19
|
+
},
|
|
20
|
+
groq: {
|
|
21
|
+
name: "Groq",
|
|
22
|
+
endpoint: "https://api.groq.com/openai/v1/chat/completions",
|
|
23
|
+
defaultModel: "llama-3.1-8b-instant",
|
|
24
|
+
free: true,
|
|
25
|
+
needsKey: true,
|
|
26
|
+
},
|
|
27
|
+
openai: {
|
|
28
|
+
name: "OpenAI",
|
|
29
|
+
endpoint: "https://api.openai.com/v1/chat/completions",
|
|
30
|
+
defaultModel: "gpt-4o-mini",
|
|
31
|
+
free: false,
|
|
32
|
+
needsKey: true,
|
|
33
|
+
},
|
|
34
|
+
claude: {
|
|
35
|
+
name: "Claude",
|
|
36
|
+
endpoint: "https://api.anthropic.com/v1/messages",
|
|
37
|
+
defaultModel: "claude-3-haiku-20240307",
|
|
38
|
+
free: false,
|
|
39
|
+
needsKey: true,
|
|
40
|
+
},
|
|
41
|
+
ollama: {
|
|
42
|
+
name: "Ollama",
|
|
43
|
+
endpoint: "http://localhost:11434/api/generate",
|
|
44
|
+
defaultModel: "qwen2.5-coder:7b",
|
|
45
|
+
free: true,
|
|
46
|
+
needsKey: false,
|
|
47
|
+
},
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* 获取 git diff 内容
|
|
52
|
+
*/
|
|
53
|
+
function getGitDiff(): string {
|
|
54
|
+
try {
|
|
55
|
+
// 获取已暂存的更改
|
|
56
|
+
const diff = execOutput("git diff --cached");
|
|
57
|
+
if (!diff) {
|
|
58
|
+
// 如果没有暂存的更改,获取所有更改
|
|
59
|
+
return execOutput("git diff");
|
|
60
|
+
}
|
|
61
|
+
return diff;
|
|
62
|
+
} catch {
|
|
63
|
+
return "";
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* 构建 AI prompt
|
|
69
|
+
*/
|
|
70
|
+
function buildPrompt(diff: string, language: string): string {
|
|
71
|
+
const isZh = language === "zh-CN";
|
|
72
|
+
|
|
73
|
+
const systemPrompt = isZh
|
|
74
|
+
? `你是一个专业的 Git commit message 生成助手。请根据提供的 git diff 生成符合 Conventional Commits 规范的 commit message。
|
|
75
|
+
|
|
76
|
+
规则:
|
|
77
|
+
1. 格式:<type>(<scope>): <subject>
|
|
78
|
+
2. type 必须是以下之一:feat, fix, docs, style, refactor, perf, test, build, ci, chore, revert
|
|
79
|
+
3. scope 是可选的,表示影响范围
|
|
80
|
+
4. subject 用中文描述,简洁明了,不超过 50 字
|
|
81
|
+
5. 只返回 commit message,不要有其他解释
|
|
82
|
+
|
|
83
|
+
示例:
|
|
84
|
+
- feat(auth): 添加用户登录功能
|
|
85
|
+
- fix(api): 修复数据获取失败的问题
|
|
86
|
+
- docs(readme): 更新安装说明`
|
|
87
|
+
: `You are a professional Git commit message generator. Generate a commit message following Conventional Commits specification based on the provided git diff.
|
|
88
|
+
|
|
89
|
+
Rules:
|
|
90
|
+
1. Format: <type>(<scope>): <subject>
|
|
91
|
+
2. type must be one of: feat, fix, docs, style, refactor, perf, test, build, ci, chore, revert
|
|
92
|
+
3. scope is optional, indicates the affected area
|
|
93
|
+
4. subject should be concise, no more than 50 characters
|
|
94
|
+
5. Return only the commit message, no explanations
|
|
95
|
+
|
|
96
|
+
Examples:
|
|
97
|
+
- feat(auth): add user login functionality
|
|
98
|
+
- fix(api): resolve data fetching failure
|
|
99
|
+
- docs(readme): update installation guide`;
|
|
100
|
+
|
|
101
|
+
const userPrompt = isZh
|
|
102
|
+
? `请根据以下 git diff 生成 commit message:\n\n${diff}`
|
|
103
|
+
: `Generate a commit message based on the following git diff:\n\n${diff}`;
|
|
104
|
+
|
|
105
|
+
return `${systemPrompt}\n\n${userPrompt}`;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* 调用 GitHub Models API
|
|
110
|
+
*/
|
|
111
|
+
async function callGitHubAPI(
|
|
112
|
+
prompt: string,
|
|
113
|
+
apiKey: string,
|
|
114
|
+
model: string,
|
|
115
|
+
maxTokens: number
|
|
116
|
+
): Promise<string> {
|
|
117
|
+
const response = await fetch(AI_PROVIDERS.github.endpoint, {
|
|
118
|
+
method: "POST",
|
|
119
|
+
headers: {
|
|
120
|
+
Authorization: `Bearer ${apiKey}`,
|
|
121
|
+
"Content-Type": "application/json",
|
|
122
|
+
},
|
|
123
|
+
body: JSON.stringify({
|
|
124
|
+
model,
|
|
125
|
+
messages: [{ role: "user", content: prompt }],
|
|
126
|
+
max_tokens: maxTokens,
|
|
127
|
+
temperature: 0.3,
|
|
128
|
+
}),
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
if (!response.ok) {
|
|
132
|
+
const error = await response.text();
|
|
133
|
+
throw new Error(`GitHub Models API 错误: ${response.status} ${error}`);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const data = await response.json();
|
|
137
|
+
return data.choices[0]?.message?.content?.trim() || "";
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* 调用 Groq API
|
|
142
|
+
*/
|
|
143
|
+
async function callGroqAPI(
|
|
144
|
+
prompt: string,
|
|
145
|
+
apiKey: string,
|
|
146
|
+
model: string,
|
|
147
|
+
maxTokens: number
|
|
148
|
+
): Promise<string> {
|
|
149
|
+
const response = await fetch(AI_PROVIDERS.groq.endpoint, {
|
|
150
|
+
method: "POST",
|
|
151
|
+
headers: {
|
|
152
|
+
Authorization: `Bearer ${apiKey}`,
|
|
153
|
+
"Content-Type": "application/json",
|
|
154
|
+
},
|
|
155
|
+
body: JSON.stringify({
|
|
156
|
+
model,
|
|
157
|
+
messages: [{ role: "user", content: prompt }],
|
|
158
|
+
max_tokens: maxTokens,
|
|
159
|
+
temperature: 0.3,
|
|
160
|
+
}),
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
if (!response.ok) {
|
|
164
|
+
const error = await response.text();
|
|
165
|
+
throw new Error(`Groq API 错误: ${response.status} ${error}`);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const data = await response.json();
|
|
169
|
+
return data.choices[0]?.message?.content?.trim() || "";
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* 调用 OpenAI API
|
|
174
|
+
*/
|
|
175
|
+
async function callOpenAIAPI(
|
|
176
|
+
prompt: string,
|
|
177
|
+
apiKey: string,
|
|
178
|
+
model: string,
|
|
179
|
+
maxTokens: number
|
|
180
|
+
): Promise<string> {
|
|
181
|
+
const response = await fetch(AI_PROVIDERS.openai.endpoint, {
|
|
182
|
+
method: "POST",
|
|
183
|
+
headers: {
|
|
184
|
+
Authorization: `Bearer ${apiKey}`,
|
|
185
|
+
"Content-Type": "application/json",
|
|
186
|
+
},
|
|
187
|
+
body: JSON.stringify({
|
|
188
|
+
model,
|
|
189
|
+
messages: [{ role: "user", content: prompt }],
|
|
190
|
+
max_tokens: maxTokens,
|
|
191
|
+
temperature: 0.3,
|
|
192
|
+
}),
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
if (!response.ok) {
|
|
196
|
+
const error = await response.text();
|
|
197
|
+
throw new Error(`OpenAI API 错误: ${response.status} ${error}`);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const data = await response.json();
|
|
201
|
+
return data.choices[0]?.message?.content?.trim() || "";
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* 调用 Claude API
|
|
206
|
+
*/
|
|
207
|
+
async function callClaudeAPI(
|
|
208
|
+
prompt: string,
|
|
209
|
+
apiKey: string,
|
|
210
|
+
model: string,
|
|
211
|
+
maxTokens: number
|
|
212
|
+
): Promise<string> {
|
|
213
|
+
const response = await fetch(AI_PROVIDERS.claude.endpoint, {
|
|
214
|
+
method: "POST",
|
|
215
|
+
headers: {
|
|
216
|
+
"x-api-key": apiKey,
|
|
217
|
+
"anthropic-version": "2023-06-01",
|
|
218
|
+
"Content-Type": "application/json",
|
|
219
|
+
},
|
|
220
|
+
body: JSON.stringify({
|
|
221
|
+
model,
|
|
222
|
+
messages: [{ role: "user", content: prompt }],
|
|
223
|
+
max_tokens: maxTokens,
|
|
224
|
+
temperature: 0.3,
|
|
225
|
+
}),
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
if (!response.ok) {
|
|
229
|
+
const error = await response.text();
|
|
230
|
+
throw new Error(`Claude API 错误: ${response.status} ${error}`);
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
const data = await response.json();
|
|
234
|
+
return data.content[0]?.text?.trim() || "";
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* 调用 Ollama API
|
|
239
|
+
*/
|
|
240
|
+
async function callOllamaAPI(
|
|
241
|
+
prompt: string,
|
|
242
|
+
model: string,
|
|
243
|
+
maxTokens: number
|
|
244
|
+
): Promise<string> {
|
|
245
|
+
try {
|
|
246
|
+
const response = await fetch(AI_PROVIDERS.ollama.endpoint, {
|
|
247
|
+
method: "POST",
|
|
248
|
+
headers: { "Content-Type": "application/json" },
|
|
249
|
+
body: JSON.stringify({
|
|
250
|
+
model,
|
|
251
|
+
prompt,
|
|
252
|
+
stream: false,
|
|
253
|
+
options: {
|
|
254
|
+
num_predict: maxTokens,
|
|
255
|
+
temperature: 0.3,
|
|
256
|
+
},
|
|
257
|
+
}),
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
if (!response.ok) {
|
|
261
|
+
throw new Error(`Ollama 未运行或模型未安装`);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
const data = await response.json();
|
|
265
|
+
return data.response?.trim() || "";
|
|
266
|
+
} catch (error) {
|
|
267
|
+
throw new Error(
|
|
268
|
+
`Ollama 连接失败。请确保:\n1. 已安装 Ollama (https://ollama.com)\n2. 运行 'ollama serve'\n3. 下载模型 'ollama pull ${model}'`
|
|
269
|
+
);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* 生成 AI commit message
|
|
275
|
+
*/
|
|
276
|
+
export async function generateAICommitMessage(
|
|
277
|
+
config: GwConfig
|
|
278
|
+
): Promise<string> {
|
|
279
|
+
const aiConfig = config.aiCommit || {};
|
|
280
|
+
const provider = aiConfig.provider || "groq";
|
|
281
|
+
const language = aiConfig.language || "zh-CN";
|
|
282
|
+
const maxTokens = aiConfig.maxTokens || 200;
|
|
283
|
+
|
|
284
|
+
// 获取 git diff
|
|
285
|
+
const diff = getGitDiff();
|
|
286
|
+
if (!diff) {
|
|
287
|
+
throw new Error("没有检测到代码更改");
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// 限制 diff 长度,避免超过 token 限制
|
|
291
|
+
const maxDiffLength = 4000;
|
|
292
|
+
const truncatedDiff =
|
|
293
|
+
diff.length > maxDiffLength ? diff.slice(0, maxDiffLength) + "\n..." : diff;
|
|
294
|
+
|
|
295
|
+
// 构建 prompt
|
|
296
|
+
const prompt = buildPrompt(truncatedDiff, language);
|
|
297
|
+
|
|
298
|
+
// 根据提供商调用对应的 API
|
|
299
|
+
const providerInfo = AI_PROVIDERS[provider];
|
|
300
|
+
if (!providerInfo) {
|
|
301
|
+
throw new Error(`不支持的 AI 提供商: ${provider}`);
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
const model = aiConfig.model || providerInfo.defaultModel;
|
|
305
|
+
|
|
306
|
+
// 获取 API key
|
|
307
|
+
const apiKey = aiConfig.apiKey || "";
|
|
308
|
+
|
|
309
|
+
if (providerInfo.needsKey && !apiKey) {
|
|
310
|
+
throw new Error(
|
|
311
|
+
`${providerInfo.name} 需要 API key。请运行 'gw init' 配置 AI commit,或在 .gwrc.json 中设置 aiCommit.apiKey`
|
|
312
|
+
);
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// 调用 API
|
|
316
|
+
switch (provider) {
|
|
317
|
+
case "github":
|
|
318
|
+
return await callGitHubAPI(prompt, apiKey, model, maxTokens);
|
|
319
|
+
case "groq":
|
|
320
|
+
return await callGroqAPI(prompt, apiKey, model, maxTokens);
|
|
321
|
+
case "openai":
|
|
322
|
+
return await callOpenAIAPI(prompt, apiKey, model, maxTokens);
|
|
323
|
+
case "claude":
|
|
324
|
+
return await callClaudeAPI(prompt, apiKey, model, maxTokens);
|
|
325
|
+
case "ollama":
|
|
326
|
+
return await callOllamaAPI(prompt, model, maxTokens);
|
|
327
|
+
default:
|
|
328
|
+
throw new Error(`不支持的 AI 提供商: ${provider}`);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
/**
|
|
333
|
+
* 检查 AI commit 是否可用
|
|
334
|
+
*/
|
|
335
|
+
export function isAICommitAvailable(config: GwConfig): boolean {
|
|
336
|
+
const aiConfig = config.aiCommit;
|
|
337
|
+
if (!aiConfig) return true; // 默认启用
|
|
338
|
+
|
|
339
|
+
// 如果明确禁用,返回 false
|
|
340
|
+
if (aiConfig.enabled === false) return false;
|
|
341
|
+
|
|
342
|
+
return true;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
/**
|
|
346
|
+
* 获取 AI 提供商信息
|
|
347
|
+
*/
|
|
348
|
+
export function getProviderInfo(provider: string): AIProvider | null {
|
|
349
|
+
return AI_PROVIDERS[provider] || null;
|
|
350
|
+
}
|
package/src/commands/commit.ts
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import { execSync } from "child_process";
|
|
2
|
-
import { select, input,
|
|
2
|
+
import { select, input, checkbox } from "@inquirer/prompts";
|
|
3
3
|
import ora from "ora";
|
|
4
4
|
import { colors, theme, execOutput, divider } from "../utils.js";
|
|
5
5
|
import { getConfig } from "../config.js";
|
|
6
|
+
import { generateAICommitMessage, isAICommitAvailable } from "../ai-service.js";
|
|
6
7
|
|
|
7
8
|
// Conventional Commits 类型 + Gitmoji
|
|
8
9
|
const DEFAULT_COMMIT_TYPES = [
|
|
@@ -140,6 +141,119 @@ export async function commit(): Promise<void> {
|
|
|
140
141
|
divider();
|
|
141
142
|
}
|
|
142
143
|
|
|
144
|
+
// 询问用户选择手动还是 AI 生成
|
|
145
|
+
const aiAvailable = isAICommitAvailable(config);
|
|
146
|
+
let commitMode: "ai" | "manual" = "manual";
|
|
147
|
+
|
|
148
|
+
if (aiAvailable) {
|
|
149
|
+
commitMode = await select({
|
|
150
|
+
message: "选择 commit 方式:",
|
|
151
|
+
choices: [
|
|
152
|
+
{
|
|
153
|
+
name: "🤖 AI 自动生成 commit message",
|
|
154
|
+
value: "ai",
|
|
155
|
+
description: "使用 AI 分析代码变更自动生成",
|
|
156
|
+
},
|
|
157
|
+
{
|
|
158
|
+
name: "✍️ 手动编写 commit message",
|
|
159
|
+
value: "manual",
|
|
160
|
+
description: "传统的交互式输入方式",
|
|
161
|
+
},
|
|
162
|
+
],
|
|
163
|
+
theme,
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
let message: string;
|
|
168
|
+
|
|
169
|
+
if (commitMode === "ai") {
|
|
170
|
+
// AI 生成模式
|
|
171
|
+
const spinner = ora("AI 正在分析代码变更...").start();
|
|
172
|
+
|
|
173
|
+
try {
|
|
174
|
+
const aiMessage = await generateAICommitMessage(config);
|
|
175
|
+
spinner.succeed("AI 生成完成");
|
|
176
|
+
|
|
177
|
+
console.log("");
|
|
178
|
+
console.log("AI 生成的 commit message:");
|
|
179
|
+
console.log(colors.green(aiMessage));
|
|
180
|
+
divider();
|
|
181
|
+
|
|
182
|
+
const useAI = await select({
|
|
183
|
+
message: "使用这个 commit message?",
|
|
184
|
+
choices: [
|
|
185
|
+
{ name: "✅ 使用", value: true },
|
|
186
|
+
{ name: "❌ 不使用,切换到手动模式", value: false },
|
|
187
|
+
],
|
|
188
|
+
theme,
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
if (useAI) {
|
|
192
|
+
message = aiMessage;
|
|
193
|
+
} else {
|
|
194
|
+
spinner.info("切换到手动模式");
|
|
195
|
+
commitMode = "manual";
|
|
196
|
+
}
|
|
197
|
+
} catch (error) {
|
|
198
|
+
spinner.fail("AI 生成失败");
|
|
199
|
+
console.log(
|
|
200
|
+
colors.red(error instanceof Error ? error.message : String(error))
|
|
201
|
+
);
|
|
202
|
+
console.log(colors.yellow("\n切换到手动模式..."));
|
|
203
|
+
divider();
|
|
204
|
+
commitMode = "manual";
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (commitMode === "manual") {
|
|
209
|
+
// 手动输入模式(原有逻辑)
|
|
210
|
+
message = await buildManualCommitMessage(config);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
divider();
|
|
214
|
+
console.log("提交信息预览:");
|
|
215
|
+
console.log(colors.green(message));
|
|
216
|
+
divider();
|
|
217
|
+
|
|
218
|
+
const shouldCommit = await select({
|
|
219
|
+
message: "确认提交?",
|
|
220
|
+
choices: [
|
|
221
|
+
{ name: "✅ 确认提交", value: true },
|
|
222
|
+
{ name: "❌ 取消", value: false },
|
|
223
|
+
],
|
|
224
|
+
theme,
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
if (!shouldCommit) {
|
|
228
|
+
console.log(colors.yellow("已取消"));
|
|
229
|
+
return;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
const spinner = ora("正在提交...").start();
|
|
233
|
+
|
|
234
|
+
try {
|
|
235
|
+
// 使用 -m 参数,需要转义引号
|
|
236
|
+
const escapedMessage = message.replace(/"/g, '\\"');
|
|
237
|
+
execSync(`git commit -m "${escapedMessage}"`, { stdio: "pipe" });
|
|
238
|
+
spinner.succeed("提交成功");
|
|
239
|
+
|
|
240
|
+
// 显示提交信息
|
|
241
|
+
const commitHash = execOutput("git rev-parse --short HEAD");
|
|
242
|
+
console.log(colors.dim(`commit: ${commitHash}`));
|
|
243
|
+
} catch (error) {
|
|
244
|
+
spinner.fail("提交失败");
|
|
245
|
+
if (error instanceof Error) {
|
|
246
|
+
console.log(colors.red(error.message));
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* 手动构建 commit message
|
|
253
|
+
*/
|
|
254
|
+
async function buildManualCommitMessage(
|
|
255
|
+
config: ReturnType<typeof getConfig>
|
|
256
|
+
): Promise<string> {
|
|
143
257
|
// 获取提交类型(支持自定义 emoji)
|
|
144
258
|
const commitTypes = getCommitTypes(config);
|
|
145
259
|
|
|
@@ -177,9 +291,12 @@ export async function commit(): Promise<void> {
|
|
|
177
291
|
});
|
|
178
292
|
|
|
179
293
|
// 是否有破坏性变更
|
|
180
|
-
const hasBreaking = await
|
|
294
|
+
const hasBreaking = await select({
|
|
181
295
|
message: "是否包含破坏性变更 (BREAKING CHANGE)?",
|
|
182
|
-
|
|
296
|
+
choices: [
|
|
297
|
+
{ name: "否", value: false },
|
|
298
|
+
{ name: "是", value: true },
|
|
299
|
+
],
|
|
183
300
|
theme,
|
|
184
301
|
});
|
|
185
302
|
|
|
@@ -227,37 +344,5 @@ export async function commit(): Promise<void> {
|
|
|
227
344
|
}
|
|
228
345
|
}
|
|
229
346
|
|
|
230
|
-
|
|
231
|
-
console.log("提交信息预览:");
|
|
232
|
-
console.log(colors.green(message));
|
|
233
|
-
divider();
|
|
234
|
-
|
|
235
|
-
const shouldCommit = await confirm({
|
|
236
|
-
message: "确认提交?",
|
|
237
|
-
default: true,
|
|
238
|
-
theme,
|
|
239
|
-
});
|
|
240
|
-
|
|
241
|
-
if (!shouldCommit) {
|
|
242
|
-
console.log(colors.yellow("已取消"));
|
|
243
|
-
return;
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
const spinner = ora("正在提交...").start();
|
|
247
|
-
|
|
248
|
-
try {
|
|
249
|
-
// 使用 -m 参数,需要转义引号
|
|
250
|
-
const escapedMessage = message.replace(/"/g, '\\"');
|
|
251
|
-
execSync(`git commit -m "${escapedMessage}"`, { stdio: "pipe" });
|
|
252
|
-
spinner.succeed("提交成功");
|
|
253
|
-
|
|
254
|
-
// 显示提交信息
|
|
255
|
-
const commitHash = execOutput("git rev-parse --short HEAD");
|
|
256
|
-
console.log(colors.dim(`commit: ${commitHash}`));
|
|
257
|
-
} catch (error) {
|
|
258
|
-
spinner.fail("提交失败");
|
|
259
|
-
if (error instanceof Error) {
|
|
260
|
-
console.log(colors.red(error.message));
|
|
261
|
-
}
|
|
262
|
-
}
|
|
347
|
+
return message;
|
|
263
348
|
}
|