code-agent-auto-commit 1.1.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/cli.js +40 -0
- package/dist/core/ai.d.ts +3 -2
- package/dist/core/ai.js +176 -56
- package/dist/core/run.js +35 -16
- package/dist/types.d.ts +18 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -21,13 +21,15 @@
|
|
|
21
21
|
## Installation
|
|
22
22
|
|
|
23
23
|
```bash
|
|
24
|
-
pnpm add -g code-agent-auto-commit
|
|
24
|
+
pnpm add -g code-agent-auto-commit@latest
|
|
25
25
|
```
|
|
26
26
|
|
|
27
27
|
To update to the latest version:
|
|
28
28
|
|
|
29
29
|
```bash
|
|
30
30
|
pnpm update -g code-agent-auto-commit
|
|
31
|
+
OR:
|
|
32
|
+
pnpm add -g code-agent-auto-commit@latest
|
|
31
33
|
```
|
|
32
34
|
|
|
33
35
|
Then use the short command:
|
package/dist/cli.js
CHANGED
|
@@ -9,6 +9,7 @@ const node_path_1 = __importDefault(require("node:path"));
|
|
|
9
9
|
const claude_1 = require("./adapters/claude");
|
|
10
10
|
const codex_1 = require("./adapters/codex");
|
|
11
11
|
const opencode_1 = require("./adapters/opencode");
|
|
12
|
+
const ai_1 = require("./core/ai");
|
|
12
13
|
const config_1 = require("./core/config");
|
|
13
14
|
const fs_1 = require("./core/fs");
|
|
14
15
|
const run_1 = require("./core/run");
|
|
@@ -89,6 +90,7 @@ Usage:
|
|
|
89
90
|
cac status [--scope project|global] [--worktree <path>] [--config <path>]
|
|
90
91
|
cac run [--tool opencode|codex|claude|manual] [--worktree <path>] [--config <path>] [--event-json <json>] [--event-stdin]
|
|
91
92
|
cac set-worktree <path> [--config <path>]
|
|
93
|
+
cac ai <message> [--config <path>]
|
|
92
94
|
cac version
|
|
93
95
|
`);
|
|
94
96
|
}
|
|
@@ -240,6 +242,40 @@ async function commandRun(flags, positionals) {
|
|
|
240
242
|
console.log(`- ${item.hash.slice(0, 12)} ${item.message}`);
|
|
241
243
|
}
|
|
242
244
|
console.log(`Pushed: ${result.pushed ? "yes" : "no"}`);
|
|
245
|
+
if (result.tokenUsage) {
|
|
246
|
+
console.log(`AI tokens: ${result.tokenUsage.totalTokens} (prompt: ${result.tokenUsage.promptTokens}, completion: ${result.tokenUsage.completionTokens})`);
|
|
247
|
+
}
|
|
248
|
+
if (result.aiWarning) {
|
|
249
|
+
console.warn(`\nWarning: AI commit message failed — ${result.aiWarning}`);
|
|
250
|
+
console.warn(`Using fallback prefix instead. Run "cac ai hello" to test your AI config.`);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
async function commandAI(flags, positionals) {
|
|
254
|
+
const message = positionals.join(" ").trim();
|
|
255
|
+
if (!message) {
|
|
256
|
+
console.error(`Usage: cac ai <message>`);
|
|
257
|
+
console.error(`Example: cac ai "hello, are you there?"`);
|
|
258
|
+
process.exitCode = 1;
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
const worktree = node_path_1.default.resolve(getStringFlag(flags, "worktree") ?? process.cwd());
|
|
262
|
+
const explicitConfig = getStringFlag(flags, "config");
|
|
263
|
+
const loaded = (0, config_1.loadConfig)({ explicitPath: explicitConfig, worktree });
|
|
264
|
+
console.log(`Provider: ${loaded.config.ai.defaultProvider}`);
|
|
265
|
+
console.log(`Model: ${loaded.config.ai.model}`);
|
|
266
|
+
console.log(`Sending: "${message}"`);
|
|
267
|
+
console.log();
|
|
268
|
+
const result = await (0, ai_1.testAI)(loaded.config.ai, message);
|
|
269
|
+
if (!result.ok) {
|
|
270
|
+
console.error(`AI test failed: ${result.error}`);
|
|
271
|
+
process.exitCode = 1;
|
|
272
|
+
return;
|
|
273
|
+
}
|
|
274
|
+
console.log(`Reply: ${result.reply}`);
|
|
275
|
+
if (result.usage) {
|
|
276
|
+
console.log(`Tokens: ${result.usage.totalTokens} (prompt: ${result.usage.promptTokens}, completion: ${result.usage.completionTokens})`);
|
|
277
|
+
}
|
|
278
|
+
console.log(`\nAI is configured correctly.`);
|
|
243
279
|
}
|
|
244
280
|
async function main() {
|
|
245
281
|
const argv = process.argv.slice(2);
|
|
@@ -278,6 +314,10 @@ async function main() {
|
|
|
278
314
|
await commandRun(parsed.flags, parsed.positionals);
|
|
279
315
|
return;
|
|
280
316
|
}
|
|
317
|
+
if (command === "ai") {
|
|
318
|
+
await commandAI(parsed.flags, parsed.positionals);
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
281
321
|
throw new Error(`Unknown command: ${command}`);
|
|
282
322
|
}
|
|
283
323
|
main().catch((error) => {
|
package/dist/core/ai.d.ts
CHANGED
|
@@ -1,2 +1,3 @@
|
|
|
1
|
-
import type { AIConfig, CommitSummary } from "../types";
|
|
2
|
-
export declare function generateCommitMessage(ai: AIConfig, summary: CommitSummary, maxLength: number): Promise<
|
|
1
|
+
import type { AIConfig, AIGenerateResult, AITestResult, CommitSummary } from "../types";
|
|
2
|
+
export declare function generateCommitMessage(ai: AIConfig, summary: CommitSummary, maxLength: number): Promise<AIGenerateResult>;
|
|
3
|
+
export declare function testAI(ai: AIConfig, userMessage: string): Promise<AITestResult>;
|
package/dist/core/ai.js
CHANGED
|
@@ -1,52 +1,56 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.generateCommitMessage = generateCommitMessage;
|
|
4
|
-
|
|
4
|
+
exports.testAI = testAI;
|
|
5
|
+
const VALID_TYPES = new Set([
|
|
6
|
+
"feat", "fix", "refactor", "docs", "style", "test",
|
|
7
|
+
"chore", "perf", "ci", "build", "revert",
|
|
8
|
+
]);
|
|
9
|
+
const TYPE_ALIASES = {
|
|
10
|
+
feature: "feat",
|
|
11
|
+
bugfix: "fix",
|
|
12
|
+
hotfix: "fix",
|
|
13
|
+
refactoring: "refactor",
|
|
14
|
+
refector: "refactor",
|
|
15
|
+
};
|
|
5
16
|
function normalizeCommitType(raw) {
|
|
6
17
|
const value = raw.trim().toLowerCase();
|
|
7
|
-
if (value
|
|
8
|
-
return
|
|
9
|
-
}
|
|
10
|
-
if (value === "fix" || value === "bugfix" || value === "hotfix") {
|
|
11
|
-
return "fix";
|
|
12
|
-
}
|
|
13
|
-
if (value === "refector"
|
|
14
|
-
|| value === "refactor"
|
|
15
|
-
|| value === "refactoring"
|
|
16
|
-
|| value === "chore"
|
|
17
|
-
|| value === "docs"
|
|
18
|
-
|| value === "style"
|
|
19
|
-
|| value === "test"
|
|
20
|
-
|| value === "perf"
|
|
21
|
-
|| value === "build"
|
|
22
|
-
|| value === "ci"
|
|
23
|
-
|| value === "revert") {
|
|
24
|
-
return "refector";
|
|
18
|
+
if (VALID_TYPES.has(value)) {
|
|
19
|
+
return value;
|
|
25
20
|
}
|
|
26
|
-
return undefined;
|
|
21
|
+
return TYPE_ALIASES[value] ?? undefined;
|
|
27
22
|
}
|
|
28
23
|
function formatTypedMessage(raw, maxLength) {
|
|
29
|
-
const conventional = raw.match(/^([a-zA-Z-]+)(
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
24
|
+
const conventional = raw.match(/^([a-zA-Z-]+)(\([^)]*\))?\s*:\s*(.+)$/);
|
|
25
|
+
if (conventional) {
|
|
26
|
+
const type = normalizeCommitType(conventional[1]) ?? "chore";
|
|
27
|
+
const scope = conventional[2] ?? "";
|
|
28
|
+
const subject = conventional[3]
|
|
29
|
+
.replace(/^['"`]+|['"`]+$/g, "")
|
|
30
|
+
.replace(/^[-:]+/, "")
|
|
31
|
+
.trim();
|
|
32
|
+
if (subject.length === 0)
|
|
33
|
+
return "";
|
|
34
|
+
const full = `${type}${scope}: ${subject}`;
|
|
35
|
+
if (full.length <= maxLength)
|
|
36
|
+
return full;
|
|
37
|
+
const prefix = `${type}${scope}: `;
|
|
38
|
+
const available = maxLength - prefix.length;
|
|
39
|
+
if (available <= 1)
|
|
40
|
+
return prefix.trimEnd().slice(0, maxLength);
|
|
41
|
+
return `${prefix}${subject.slice(0, available - 1).trimEnd()}…`;
|
|
39
42
|
}
|
|
40
|
-
const
|
|
41
|
-
|
|
42
|
-
|
|
43
|
+
const subject = raw.replace(/^['"`]+|['"`]+$/g, "").trim();
|
|
44
|
+
if (subject.length === 0)
|
|
45
|
+
return "";
|
|
46
|
+
const prefix = "chore: ";
|
|
47
|
+
const full = `${prefix}${subject}`;
|
|
48
|
+
if (full.length <= maxLength)
|
|
43
49
|
return full;
|
|
44
|
-
}
|
|
45
50
|
const available = maxLength - prefix.length;
|
|
46
|
-
if (available <= 1)
|
|
51
|
+
if (available <= 1)
|
|
47
52
|
return prefix.trimEnd().slice(0, maxLength);
|
|
48
|
-
}
|
|
49
|
-
return `${prefix}${subjectCandidate.slice(0, available - 1).trimEnd()}…`;
|
|
53
|
+
return `${prefix}${subject.slice(0, available - 1).trimEnd()}…`;
|
|
50
54
|
}
|
|
51
55
|
function normalizeMessage(raw, maxLength) {
|
|
52
56
|
const withoutThinking = raw
|
|
@@ -108,6 +112,28 @@ function buildUserPrompt(summary, maxLength) {
|
|
|
108
112
|
summary.patch || "(none)",
|
|
109
113
|
].join("\n");
|
|
110
114
|
}
|
|
115
|
+
function validateAIConfig(ai) {
|
|
116
|
+
if (!ai.enabled) {
|
|
117
|
+
return "ai.enabled is false";
|
|
118
|
+
}
|
|
119
|
+
const { provider, model } = splitModelRef(ai.model, ai.defaultProvider);
|
|
120
|
+
if (!provider || !model) {
|
|
121
|
+
return `invalid ai.model "${ai.model}" — expected "provider/model" format`;
|
|
122
|
+
}
|
|
123
|
+
const providerConfig = ai.providers[provider];
|
|
124
|
+
if (!providerConfig) {
|
|
125
|
+
return `provider "${provider}" not found in ai.providers (available: ${Object.keys(ai.providers).join(", ") || "none"})`;
|
|
126
|
+
}
|
|
127
|
+
const apiKey = getApiKey(providerConfig);
|
|
128
|
+
if (!apiKey) {
|
|
129
|
+
const envName = providerConfig.apiKeyEnv;
|
|
130
|
+
if (envName) {
|
|
131
|
+
return `API key not found — env var "${envName}" is not set. Run: export ${envName}='your-key'`;
|
|
132
|
+
}
|
|
133
|
+
return `no API key configured for provider "${provider}" — set apiKeyEnv or apiKey in config`;
|
|
134
|
+
}
|
|
135
|
+
return undefined;
|
|
136
|
+
}
|
|
111
137
|
async function generateOpenAiStyleMessage(provider, model, summary, maxLength, signal) {
|
|
112
138
|
const apiKey = getApiKey(provider);
|
|
113
139
|
const headers = {
|
|
@@ -137,15 +163,23 @@ async function generateOpenAiStyleMessage(provider, model, summary, maxLength, s
|
|
|
137
163
|
signal,
|
|
138
164
|
});
|
|
139
165
|
if (!response.ok) {
|
|
140
|
-
|
|
166
|
+
const body = await response.text().catch(() => "");
|
|
167
|
+
return { content: undefined, usage: undefined, error: `HTTP ${response.status}: ${body.slice(0, 200)}` };
|
|
141
168
|
}
|
|
142
169
|
const payload = (await response.json());
|
|
143
|
-
|
|
170
|
+
const usage = payload.usage
|
|
171
|
+
? {
|
|
172
|
+
promptTokens: payload.usage.prompt_tokens ?? 0,
|
|
173
|
+
completionTokens: payload.usage.completion_tokens ?? 0,
|
|
174
|
+
totalTokens: payload.usage.total_tokens ?? 0,
|
|
175
|
+
}
|
|
176
|
+
: undefined;
|
|
177
|
+
return { content: payload.choices?.[0]?.message?.content, usage };
|
|
144
178
|
}
|
|
145
179
|
async function generateAnthropicStyleMessage(provider, model, summary, maxLength, signal) {
|
|
146
180
|
const apiKey = getApiKey(provider);
|
|
147
181
|
if (!apiKey) {
|
|
148
|
-
return undefined;
|
|
182
|
+
return { content: undefined, usage: undefined, error: "no API key" };
|
|
149
183
|
}
|
|
150
184
|
const headers = {
|
|
151
185
|
"Content-Type": "application/json",
|
|
@@ -171,39 +205,125 @@ async function generateAnthropicStyleMessage(provider, model, summary, maxLength
|
|
|
171
205
|
signal,
|
|
172
206
|
});
|
|
173
207
|
if (!response.ok) {
|
|
174
|
-
|
|
208
|
+
const body = await response.text().catch(() => "");
|
|
209
|
+
return { content: undefined, usage: undefined, error: `HTTP ${response.status}: ${body.slice(0, 200)}` };
|
|
175
210
|
}
|
|
176
211
|
const payload = (await response.json());
|
|
177
212
|
const firstText = payload.content?.find((item) => item.type === "text")?.text;
|
|
178
|
-
|
|
213
|
+
const usage = payload.usage
|
|
214
|
+
? {
|
|
215
|
+
promptTokens: payload.usage.input_tokens ?? 0,
|
|
216
|
+
completionTokens: payload.usage.output_tokens ?? 0,
|
|
217
|
+
totalTokens: (payload.usage.input_tokens ?? 0) + (payload.usage.output_tokens ?? 0),
|
|
218
|
+
}
|
|
219
|
+
: undefined;
|
|
220
|
+
return { content: firstText, usage };
|
|
179
221
|
}
|
|
180
222
|
async function generateCommitMessage(ai, summary, maxLength) {
|
|
181
|
-
|
|
182
|
-
|
|
223
|
+
const configError = validateAIConfig(ai);
|
|
224
|
+
if (configError) {
|
|
225
|
+
return { message: undefined, usage: undefined, warning: configError };
|
|
183
226
|
}
|
|
184
227
|
const { provider, model } = splitModelRef(ai.model, ai.defaultProvider);
|
|
185
|
-
if (!provider || !model) {
|
|
186
|
-
return undefined;
|
|
187
|
-
}
|
|
188
228
|
const providerConfig = ai.providers[provider];
|
|
189
|
-
|
|
190
|
-
|
|
229
|
+
const controller = new AbortController();
|
|
230
|
+
const timeout = setTimeout(() => controller.abort(), ai.timeoutMs);
|
|
231
|
+
try {
|
|
232
|
+
let result;
|
|
233
|
+
if (providerConfig.api === "openai-completions") {
|
|
234
|
+
result = await generateOpenAiStyleMessage(providerConfig, model, summary, maxLength, controller.signal);
|
|
235
|
+
}
|
|
236
|
+
else {
|
|
237
|
+
result = await generateAnthropicStyleMessage(providerConfig, model, summary, maxLength, controller.signal);
|
|
238
|
+
}
|
|
239
|
+
if (result.error) {
|
|
240
|
+
return { message: undefined, usage: result.usage, warning: result.error };
|
|
241
|
+
}
|
|
242
|
+
const normalized = normalizeMessage(result.content ?? "", maxLength);
|
|
243
|
+
return { message: normalized || undefined, usage: result.usage };
|
|
244
|
+
}
|
|
245
|
+
catch (err) {
|
|
246
|
+
const msg = err instanceof Error && err.name === "AbortError"
|
|
247
|
+
? `AI request timed out after ${ai.timeoutMs}ms`
|
|
248
|
+
: `AI request failed: ${err instanceof Error ? err.message : String(err)}`;
|
|
249
|
+
return { message: undefined, usage: undefined, warning: msg };
|
|
250
|
+
}
|
|
251
|
+
finally {
|
|
252
|
+
clearTimeout(timeout);
|
|
191
253
|
}
|
|
254
|
+
}
|
|
255
|
+
async function testAI(ai, userMessage) {
|
|
256
|
+
const configError = validateAIConfig(ai);
|
|
257
|
+
if (configError) {
|
|
258
|
+
return { ok: false, error: configError };
|
|
259
|
+
}
|
|
260
|
+
const { provider, model } = splitModelRef(ai.model, ai.defaultProvider);
|
|
261
|
+
const providerConfig = ai.providers[provider];
|
|
262
|
+
const apiKey = getApiKey(providerConfig);
|
|
192
263
|
const controller = new AbortController();
|
|
193
264
|
const timeout = setTimeout(() => controller.abort(), ai.timeoutMs);
|
|
194
265
|
try {
|
|
195
|
-
let content;
|
|
196
266
|
if (providerConfig.api === "openai-completions") {
|
|
197
|
-
|
|
267
|
+
const headers = {
|
|
268
|
+
"Content-Type": "application/json",
|
|
269
|
+
Authorization: `Bearer ${apiKey}`,
|
|
270
|
+
...(providerConfig.headers ?? {}),
|
|
271
|
+
};
|
|
272
|
+
const response = await fetch(`${providerConfig.baseUrl.replace(/\/$/, "")}/chat/completions`, {
|
|
273
|
+
method: "POST",
|
|
274
|
+
headers,
|
|
275
|
+
body: JSON.stringify({
|
|
276
|
+
model,
|
|
277
|
+
temperature: 0.2,
|
|
278
|
+
messages: [{ role: "user", content: userMessage }],
|
|
279
|
+
}),
|
|
280
|
+
signal: controller.signal,
|
|
281
|
+
});
|
|
282
|
+
if (!response.ok) {
|
|
283
|
+
const body = await response.text().catch(() => "");
|
|
284
|
+
return { ok: false, error: `HTTP ${response.status}: ${body.slice(0, 300)}` };
|
|
285
|
+
}
|
|
286
|
+
const payload = (await response.json());
|
|
287
|
+
const reply = payload.choices?.[0]?.message?.content ?? "";
|
|
288
|
+
const usage = payload.usage
|
|
289
|
+
? { promptTokens: payload.usage.prompt_tokens ?? 0, completionTokens: payload.usage.completion_tokens ?? 0, totalTokens: payload.usage.total_tokens ?? 0 }
|
|
290
|
+
: undefined;
|
|
291
|
+
return { ok: true, reply, usage };
|
|
198
292
|
}
|
|
199
293
|
else {
|
|
200
|
-
|
|
294
|
+
const headers = {
|
|
295
|
+
"Content-Type": "application/json",
|
|
296
|
+
"x-api-key": apiKey,
|
|
297
|
+
"anthropic-version": "2023-06-01",
|
|
298
|
+
...(providerConfig.headers ?? {}),
|
|
299
|
+
};
|
|
300
|
+
const response = await fetch(`${providerConfig.baseUrl.replace(/\/$/, "")}/messages`, {
|
|
301
|
+
method: "POST",
|
|
302
|
+
headers,
|
|
303
|
+
body: JSON.stringify({
|
|
304
|
+
model,
|
|
305
|
+
max_tokens: 256,
|
|
306
|
+
messages: [{ role: "user", content: userMessage }],
|
|
307
|
+
}),
|
|
308
|
+
signal: controller.signal,
|
|
309
|
+
});
|
|
310
|
+
if (!response.ok) {
|
|
311
|
+
const body = await response.text().catch(() => "");
|
|
312
|
+
return { ok: false, error: `HTTP ${response.status}: ${body.slice(0, 300)}` };
|
|
313
|
+
}
|
|
314
|
+
const payload = (await response.json());
|
|
315
|
+
const reply = payload.content?.find((item) => item.type === "text")?.text ?? "";
|
|
316
|
+
const usage = payload.usage
|
|
317
|
+
? { promptTokens: payload.usage.input_tokens ?? 0, completionTokens: payload.usage.output_tokens ?? 0, totalTokens: (payload.usage.input_tokens ?? 0) + (payload.usage.output_tokens ?? 0) }
|
|
318
|
+
: undefined;
|
|
319
|
+
return { ok: true, reply, usage };
|
|
201
320
|
}
|
|
202
|
-
const normalized = normalizeMessage(content ?? "", maxLength);
|
|
203
|
-
return normalized || undefined;
|
|
204
321
|
}
|
|
205
|
-
catch {
|
|
206
|
-
|
|
322
|
+
catch (err) {
|
|
323
|
+
const msg = err instanceof Error && err.name === "AbortError"
|
|
324
|
+
? `request timed out after ${ai.timeoutMs}ms`
|
|
325
|
+
: `request failed: ${err instanceof Error ? err.message : String(err)}`;
|
|
326
|
+
return { ok: false, error: msg };
|
|
207
327
|
}
|
|
208
328
|
finally {
|
|
209
329
|
clearTimeout(timeout);
|
package/dist/core/run.js
CHANGED
|
@@ -17,10 +17,11 @@ function normalizeFallbackType(prefix) {
|
|
|
17
17
|
if (/(^|[^a-z])(fix|bugfix|hotfix)([^a-z]|$)/.test(value)) {
|
|
18
18
|
return "fix";
|
|
19
19
|
}
|
|
20
|
-
if (/(^|[^a-z])(
|
|
21
|
-
|
|
20
|
+
if (/(^|[^a-z])(refactor|chore|docs|style|test|perf|build|ci|revert)([^a-z]|$)/.test(value)) {
|
|
21
|
+
const match = value.match(/(refactor|chore|docs|style|test|perf|build|ci|revert)/);
|
|
22
|
+
return match ? match[1] : "chore";
|
|
22
23
|
}
|
|
23
|
-
return "
|
|
24
|
+
return "chore";
|
|
24
25
|
}
|
|
25
26
|
function fallbackSingleMessage(prefix, count) {
|
|
26
27
|
const suffix = count === 1 ? "file" : "files";
|
|
@@ -59,14 +60,14 @@ function filterFiles(files, include, exclude) {
|
|
|
59
60
|
}
|
|
60
61
|
async function buildMessage(prefix, maxLength, aiConfig, stagedPath, fallback, worktree) {
|
|
61
62
|
const summary = (0, git_1.getStagedSummary)(worktree, stagedPath);
|
|
62
|
-
const
|
|
63
|
-
if (
|
|
64
|
-
return
|
|
63
|
+
const result = await (0, ai_1.generateCommitMessage)(aiConfig, summary, maxLength);
|
|
64
|
+
if (result.message) {
|
|
65
|
+
return { message: result.message, usage: result.usage };
|
|
65
66
|
}
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
return
|
|
67
|
+
const msg = fallback.length <= maxLength
|
|
68
|
+
? fallback
|
|
69
|
+
: `${normalizeFallbackType(prefix)}: update changes`;
|
|
70
|
+
return { message: msg, usage: result.usage, warning: result.warning };
|
|
70
71
|
}
|
|
71
72
|
async function runAutoCommit(context, configOptions) {
|
|
72
73
|
const { config } = (0, config_1.loadConfig)(configOptions);
|
|
@@ -92,6 +93,15 @@ async function runAutoCommit(context, configOptions) {
|
|
|
92
93
|
};
|
|
93
94
|
}
|
|
94
95
|
const commits = [];
|
|
96
|
+
const totalUsage = { promptTokens: 0, completionTokens: 0, totalTokens: 0 };
|
|
97
|
+
let firstWarning;
|
|
98
|
+
function addUsage(usage) {
|
|
99
|
+
if (!usage)
|
|
100
|
+
return;
|
|
101
|
+
totalUsage.promptTokens += usage.promptTokens;
|
|
102
|
+
totalUsage.completionTokens += usage.completionTokens;
|
|
103
|
+
totalUsage.totalTokens += usage.totalTokens;
|
|
104
|
+
}
|
|
95
105
|
if (config.commit.mode === "single") {
|
|
96
106
|
for (const file of changed) {
|
|
97
107
|
(0, git_1.stagePath)(worktree, file.path);
|
|
@@ -106,11 +116,14 @@ async function runAutoCommit(context, configOptions) {
|
|
|
106
116
|
};
|
|
107
117
|
}
|
|
108
118
|
const fallback = fallbackSingleMessage(config.commit.fallbackPrefix, changed.length);
|
|
109
|
-
const
|
|
110
|
-
|
|
119
|
+
const result = await buildMessage(config.commit.fallbackPrefix, config.commit.maxMessageLength, config.ai, undefined, fallback, worktree);
|
|
120
|
+
addUsage(result.usage);
|
|
121
|
+
if (result.warning && !firstWarning)
|
|
122
|
+
firstWarning = result.warning;
|
|
123
|
+
const hash = (0, git_1.commit)(worktree, result.message);
|
|
111
124
|
commits.push({
|
|
112
125
|
hash,
|
|
113
|
-
message,
|
|
126
|
+
message: result.message,
|
|
114
127
|
files: changed.map((item) => item.path),
|
|
115
128
|
});
|
|
116
129
|
}
|
|
@@ -124,11 +137,14 @@ async function runAutoCommit(context, configOptions) {
|
|
|
124
137
|
continue;
|
|
125
138
|
}
|
|
126
139
|
const fallback = fallbackPerFileMessage(config.commit.fallbackPrefix, file);
|
|
127
|
-
const
|
|
128
|
-
|
|
140
|
+
const result = await buildMessage(config.commit.fallbackPrefix, config.commit.maxMessageLength, config.ai, file.path, fallback, worktree);
|
|
141
|
+
addUsage(result.usage);
|
|
142
|
+
if (result.warning && !firstWarning)
|
|
143
|
+
firstWarning = result.warning;
|
|
144
|
+
const hash = (0, git_1.commit)(worktree, result.message);
|
|
129
145
|
commits.push({
|
|
130
146
|
hash,
|
|
131
|
-
message,
|
|
147
|
+
message: result.message,
|
|
132
148
|
files: [file.path],
|
|
133
149
|
});
|
|
134
150
|
}
|
|
@@ -139,10 +155,13 @@ async function runAutoCommit(context, configOptions) {
|
|
|
139
155
|
(0, git_1.push)(worktree, config.push.remote, branch, config.push.provider);
|
|
140
156
|
pushed = true;
|
|
141
157
|
}
|
|
158
|
+
const hasUsage = totalUsage.totalTokens > 0;
|
|
142
159
|
return {
|
|
143
160
|
skipped: false,
|
|
144
161
|
worktree,
|
|
145
162
|
committed: commits,
|
|
146
163
|
pushed,
|
|
164
|
+
tokenUsage: hasUsage ? totalUsage : undefined,
|
|
165
|
+
aiWarning: firstWarning,
|
|
147
166
|
};
|
|
148
167
|
}
|
package/dist/types.d.ts
CHANGED
|
@@ -57,12 +57,30 @@ export interface CommitRecord {
|
|
|
57
57
|
message: string;
|
|
58
58
|
files: string[];
|
|
59
59
|
}
|
|
60
|
+
export interface TokenUsage {
|
|
61
|
+
promptTokens: number;
|
|
62
|
+
completionTokens: number;
|
|
63
|
+
totalTokens: number;
|
|
64
|
+
}
|
|
65
|
+
export interface AIGenerateResult {
|
|
66
|
+
message: string | undefined;
|
|
67
|
+
usage: TokenUsage | undefined;
|
|
68
|
+
warning?: string;
|
|
69
|
+
}
|
|
70
|
+
export interface AITestResult {
|
|
71
|
+
ok: boolean;
|
|
72
|
+
reply?: string;
|
|
73
|
+
usage?: TokenUsage;
|
|
74
|
+
error?: string;
|
|
75
|
+
}
|
|
60
76
|
export interface RunResult {
|
|
61
77
|
skipped: boolean;
|
|
62
78
|
reason?: string;
|
|
63
79
|
worktree: string;
|
|
64
80
|
committed: CommitRecord[];
|
|
65
81
|
pushed: boolean;
|
|
82
|
+
tokenUsage?: TokenUsage;
|
|
83
|
+
aiWarning?: string;
|
|
66
84
|
}
|
|
67
85
|
export interface RunContext {
|
|
68
86
|
tool: ToolName;
|
package/package.json
CHANGED