@kud/ai-conventional-commit-cli 2.0.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,120 @@
1
+ // src/config.ts
2
+ import { cosmiconfig } from "cosmiconfig";
3
+ import { resolve, dirname, join } from "path";
4
+ import { existsSync, readFileSync, writeFileSync, mkdirSync } from "fs";
5
+ import { homedir } from "os";
6
+ var DEFAULTS = {
7
+ model: process.env.AICC_MODEL || "github-copilot/gpt-4.1",
8
+ privacy: process.env.AICC_PRIVACY || "low",
9
+ style: process.env.AICC_STYLE || "standard",
10
+ styleSamples: parseInt(process.env.AICC_STYLE_SAMPLES || "120", 10),
11
+ maxTokens: parseInt(process.env.AICC_MAX_TOKENS || "512", 10),
12
+ maxFileLines: parseInt(process.env.AICC_MAX_FILE_LINES || "1000", 10),
13
+ skipFilePatterns: [
14
+ "**/package-lock.json",
15
+ "**/yarn.lock",
16
+ "**/pnpm-lock.yaml",
17
+ "**/bun.lockb",
18
+ "**/composer.lock",
19
+ "**/Gemfile.lock",
20
+ "**/Cargo.lock",
21
+ "**/poetry.lock",
22
+ "**/*.d.ts",
23
+ "**/dist/**",
24
+ "**/build/**",
25
+ "**/.next/**",
26
+ "**/out/**",
27
+ "**/coverage/**",
28
+ "**/*.min.js",
29
+ "**/*.min.css",
30
+ "**/*.map"
31
+ ],
32
+ cacheDir: ".git/.aicc-cache",
33
+ plugins: [],
34
+ verbose: process.env.AICC_VERBOSE === "true"
35
+ };
36
+ function getGlobalConfigPath() {
37
+ const base = process.env.XDG_CONFIG_HOME || join(homedir(), ".config");
38
+ return resolve(base, "ai-conventional-commit-cli", "aicc.json");
39
+ }
40
+ function saveGlobalConfig(partial) {
41
+ const filePath = getGlobalConfigPath();
42
+ const dir = dirname(filePath);
43
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
44
+ let existing = {};
45
+ if (existsSync(filePath)) {
46
+ try {
47
+ existing = JSON.parse(readFileSync(filePath, "utf8")) || {};
48
+ } catch (e) {
49
+ if (process.env.AICC_VERBOSE === "true") {
50
+ console.error("[ai-cc] Failed to parse existing global config, overwriting.");
51
+ }
52
+ }
53
+ }
54
+ const merged = { ...existing, ...partial };
55
+ writeFileSync(filePath, JSON.stringify(merged, null, 2) + "\n", "utf8");
56
+ return filePath;
57
+ }
58
+ async function loadConfig(cwd = process.cwd()) {
59
+ return (await loadConfigDetailed(cwd)).config;
60
+ }
61
+ async function loadConfigDetailed(cwd = process.cwd()) {
62
+ let globalCfg = {};
63
+ const globalPath = getGlobalConfigPath();
64
+ if (existsSync(globalPath)) {
65
+ try {
66
+ globalCfg = JSON.parse(readFileSync(globalPath, "utf8")) || {};
67
+ } catch (e) {
68
+ if (process.env.AICC_VERBOSE === "true") {
69
+ console.error("[ai-cc] Failed to parse global config, ignoring.");
70
+ }
71
+ }
72
+ }
73
+ const explorer = cosmiconfig("aicc");
74
+ const result = await explorer.search(cwd);
75
+ const projectCfg = result?.config || {};
76
+ const envCfg = {};
77
+ if (process.env.AICC_MODEL) envCfg.model = process.env.AICC_MODEL;
78
+ if (process.env.AICC_PRIVACY) envCfg.privacy = process.env.AICC_PRIVACY;
79
+ if (process.env.AICC_STYLE) envCfg.style = process.env.AICC_STYLE;
80
+ if (process.env.AICC_STYLE_SAMPLES)
81
+ envCfg.styleSamples = parseInt(process.env.AICC_STYLE_SAMPLES, 10);
82
+ if (process.env.AICC_MAX_TOKENS) envCfg.maxTokens = parseInt(process.env.AICC_MAX_TOKENS, 10);
83
+ if (process.env.AICC_MAX_FILE_LINES)
84
+ envCfg.maxFileLines = parseInt(process.env.AICC_MAX_FILE_LINES, 10);
85
+ if (process.env.AICC_VERBOSE) envCfg.verbose = process.env.AICC_VERBOSE === "true";
86
+ const merged = {
87
+ ...DEFAULTS,
88
+ ...globalCfg,
89
+ ...projectCfg,
90
+ ...envCfg
91
+ };
92
+ merged.plugins = (merged.plugins || []).filter((p) => {
93
+ const abs = resolve(cwd, p);
94
+ return existsSync(abs);
95
+ });
96
+ if (!merged.skipFilePatterns) {
97
+ merged.skipFilePatterns = DEFAULTS.skipFilePatterns;
98
+ }
99
+ const sources = Object.keys(merged).reduce((acc, key) => {
100
+ const k = key;
101
+ let src = "default";
102
+ if (k in globalCfg) src = "global";
103
+ if (k in projectCfg) src = "project";
104
+ if (k in envCfg) src = "env";
105
+ acc[k] = src;
106
+ return acc;
107
+ }, {});
108
+ const withMeta = Object.assign(merged, { _sources: sources });
109
+ return {
110
+ config: withMeta,
111
+ raw: { defaults: DEFAULTS, global: globalCfg, project: projectCfg, env: envCfg }
112
+ };
113
+ }
114
+
115
+ export {
116
+ getGlobalConfigPath,
117
+ saveGlobalConfig,
118
+ loadConfig,
119
+ loadConfigDetailed
120
+ };
@@ -171,7 +171,18 @@ Refine now.`
171
171
 
172
172
  // src/model/provider.ts
173
173
  import { z } from "zod";
174
+ import { createServer } from "net";
174
175
  import { createOpencode } from "@opencode-ai/sdk";
176
+ function findFreePort() {
177
+ return new Promise((resolve, reject) => {
178
+ const server = createServer();
179
+ server.on("error", reject);
180
+ server.listen(0, "127.0.0.1", () => {
181
+ const port = server.address().port;
182
+ server.close(() => resolve(port));
183
+ });
184
+ });
185
+ }
175
186
  var OpenCodeProvider = class {
176
187
  constructor(model = "github-copilot/gpt-4.1") {
177
188
  this.model = model;
@@ -210,11 +221,16 @@ ${userAggregate}`;
210
221
  const start = Date.now();
211
222
  let server;
212
223
  try {
213
- const opencode = await createOpencode({ signal: ac.signal });
224
+ if (debug) console.error("[ai-cc][provider] starting opencode server");
225
+ const port = await findFreePort();
226
+ const opencode = await createOpencode({ signal: ac.signal, port });
214
227
  server = opencode.server;
215
- const { client } = opencode;
228
+ const client = opencode.client;
216
229
  const session = await client.session.create({ body: { title: "aicc" } });
217
- if (!session.data) throw new Error("Failed to create opencode session");
230
+ if (!session.data) {
231
+ const errMsg = session.error?.message ?? JSON.stringify(session.error) ?? "unknown";
232
+ throw new Error(`Failed to create opencode session: ${errMsg}`);
233
+ }
218
234
  const result = await client.session.prompt({
219
235
  path: { id: session.data.id },
220
236
  body: {