@corbat-tech/coco 2.0.0 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -379,7 +379,7 @@ interface Orchestrator {
379
379
  interface OrchestratorConfig {
380
380
  projectPath: string;
381
381
  provider: {
382
- type: "anthropic" | "openai" | "gemini" | "kimi" | "lmstudio";
382
+ type: "anthropic" | "openai" | "gemini" | "kimi" | "kimi-code" | "lmstudio";
383
383
  apiKey?: string;
384
384
  model: string;
385
385
  maxTokens?: number;
@@ -500,6 +500,7 @@ declare const CocoConfigSchema: z.ZodObject<{
500
500
  openai: "openai";
501
501
  gemini: "gemini";
502
502
  kimi: "kimi";
503
+ "kimi-code": "kimi-code";
503
504
  lmstudio: "lmstudio";
504
505
  deepseek: "deepseek";
505
506
  mistral: "mistral";
@@ -2958,6 +2959,15 @@ declare class AnthropicProvider implements LLMProvider {
2958
2959
  * Ensure client is initialized
2959
2960
  */
2960
2961
  private ensureInitialized;
2962
+ /**
2963
+ * Extract system prompt from messages array or options.
2964
+ *
2965
+ * The agent-loop passes the system message as the first element of the
2966
+ * messages array (role: "system"). convertMessages() strips it out because
2967
+ * Anthropic requires it as a top-level parameter — but all callers forgot
2968
+ * to also pass it via options.system. This helper bridges that gap.
2969
+ */
2970
+ private extractSystem;
2961
2971
  /**
2962
2972
  * Convert messages to Anthropic format
2963
2973
  */
@@ -3071,7 +3081,7 @@ declare class TaskError extends CocoError {
3071
3081
  /**
3072
3082
  * Supported provider types
3073
3083
  */
3074
- type ProviderType = "anthropic" | "openai" | "codex" | "gemini" | "kimi" | "lmstudio" | "ollama" | "groq" | "openrouter" | "mistral" | "deepseek" | "together" | "huggingface";
3084
+ type ProviderType = "anthropic" | "openai" | "codex" | "gemini" | "kimi" | "kimi-code" | "lmstudio" | "ollama" | "groq" | "openrouter" | "mistral" | "deepseek" | "together" | "huggingface";
3075
3085
  /**
3076
3086
  * Create a provider by type
3077
3087
  */
package/dist/index.js CHANGED
@@ -118,6 +118,8 @@ function getApiKey(provider) {
118
118
  return process.env["GEMINI_API_KEY"] ?? process.env["GOOGLE_API_KEY"];
119
119
  case "kimi":
120
120
  return process.env["KIMI_API_KEY"] ?? process.env["MOONSHOT_API_KEY"];
121
+ case "kimi-code":
122
+ return process.env["KIMI_CODE_API_KEY"];
121
123
  case "lmstudio":
122
124
  return process.env["LMSTUDIO_API_KEY"] ?? "lm-studio";
123
125
  case "ollama":
@@ -148,6 +150,8 @@ function getBaseUrl(provider) {
148
150
  return process.env["OPENAI_BASE_URL"];
149
151
  case "kimi":
150
152
  return process.env["KIMI_BASE_URL"] ?? "https://api.moonshot.ai/v1";
153
+ case "kimi-code":
154
+ return process.env["KIMI_CODE_BASE_URL"] ?? "https://api.kimi.com/coding/v1";
151
155
  case "lmstudio":
152
156
  return process.env["LMSTUDIO_BASE_URL"] ?? "http://localhost:1234/v1";
153
157
  case "ollama":
@@ -180,6 +184,8 @@ function getDefaultModel(provider) {
180
184
  return process.env["GEMINI_MODEL"] ?? "gemini-3-flash-preview";
181
185
  case "kimi":
182
186
  return process.env["KIMI_MODEL"] ?? "kimi-k2.5";
187
+ case "kimi-code":
188
+ return process.env["KIMI_CODE_MODEL"] ?? "kimi-for-coding";
183
189
  case "lmstudio":
184
190
  return process.env["LMSTUDIO_MODEL"] ?? "local-model";
185
191
  case "ollama":
@@ -220,6 +226,7 @@ var init_env = __esm({
220
226
  "codex",
221
227
  "gemini",
222
228
  "kimi",
229
+ "kimi-code",
223
230
  "lmstudio",
224
231
  "ollama",
225
232
  "groq",
@@ -3934,6 +3941,17 @@ var DEFAULT_QUALITY_THRESHOLDS = {
3934
3941
  maxIterations: 10,
3935
3942
  minIterations: 2
3936
3943
  };
3944
+
3945
+ // src/utils/subprocess-registry.ts
3946
+ var activeSubprocesses = /* @__PURE__ */ new Set();
3947
+ function trackSubprocess(proc) {
3948
+ activeSubprocesses.add(proc);
3949
+ const cleanup = () => activeSubprocesses.delete(proc);
3950
+ proc.then(cleanup, cleanup);
3951
+ return proc;
3952
+ }
3953
+
3954
+ // src/quality/analyzers/coverage.ts
3937
3955
  async function detectTestFramework(projectPath) {
3938
3956
  try {
3939
3957
  const pkgPath = join(projectPath, "package.json");
@@ -4044,12 +4062,16 @@ var CoverageAnalyzer = class {
4044
4062
  }
4045
4063
  const commands = this.buildCoverageCommand(framework, coverageTool);
4046
4064
  try {
4047
- const result = await execa(commands.command, commands.args, {
4065
+ const proc = execa(commands.command, commands.args, {
4048
4066
  cwd: this.projectPath,
4049
4067
  reject: false,
4050
- timeout: 12e4
4068
+ timeout: 12e4,
4051
4069
  // 2 minutes
4070
+ cleanup: true
4071
+ // kill process tree on parent exit
4052
4072
  });
4073
+ trackSubprocess(proc);
4074
+ const result = await proc;
4053
4075
  if (result.exitCode !== 0 && !result.stdout.includes("coverage")) {
4054
4076
  throw new Error(`Tests failed: ${result.stderr || result.stdout}`);
4055
4077
  }
@@ -4687,7 +4709,14 @@ var BuildVerifier = class {
4687
4709
  if (!SAFE_BUILD_PATTERN.test(buildCommand.trim())) {
4688
4710
  return {
4689
4711
  success: false,
4690
- errors: [{ file: "", line: 0, column: 0, message: `Unsafe build command rejected: ${buildCommand}` }],
4712
+ errors: [
4713
+ {
4714
+ file: "",
4715
+ line: 0,
4716
+ column: 0,
4717
+ message: `Unsafe build command rejected: ${buildCommand}`
4718
+ }
4719
+ ],
4691
4720
  warnings: [],
4692
4721
  duration: Date.now() - startTime,
4693
4722
  stdout: "",
@@ -4979,12 +5008,16 @@ var CorrectnessAnalyzer = class {
4979
5008
  return { passed: 0, failed: 0, skipped: 0 };
4980
5009
  }
4981
5010
  try {
4982
- const result = await execa(cmd.command, cmd.args, {
5011
+ const proc = execa(cmd.command, cmd.args, {
4983
5012
  cwd: this.projectPath,
4984
5013
  reject: false,
4985
- timeout: 3e5
5014
+ timeout: 3e5,
4986
5015
  // 5 minutes
5016
+ cleanup: true
5017
+ // kill process tree on parent exit
4987
5018
  });
5019
+ trackSubprocess(proc);
5020
+ const result = await proc;
4988
5021
  const output = result.stdout + "\n" + result.stderr;
4989
5022
  switch (framework) {
4990
5023
  case "vitest":
@@ -6242,9 +6275,7 @@ async function loadProjectConfig(projectPath) {
6242
6275
  const parsed = JSON.parse(raw);
6243
6276
  const result = ProjectConfigSchema.safeParse(parsed);
6244
6277
  if (!result.success) {
6245
- throw new Error(
6246
- `Invalid ${PROJECT_CONFIG_FILENAME} at ${configPath}: ${result.error.message}`
6247
- );
6278
+ throw new Error(`Invalid ${PROJECT_CONFIG_FILENAME} at ${configPath}: ${result.error.message}`);
6248
6279
  }
6249
6280
  let config = result.data;
6250
6281
  if (config.extend) {
@@ -6330,10 +6361,7 @@ function mergeThresholds(base, overrides) {
6330
6361
  }
6331
6362
  function resolvedThresholds(projectConfig) {
6332
6363
  if (!projectConfig) return DEFAULT_QUALITY_THRESHOLDS;
6333
- return mergeThresholds(
6334
- DEFAULT_QUALITY_THRESHOLDS,
6335
- thresholdsFromProjectConfig(projectConfig)
6336
- );
6364
+ return mergeThresholds(DEFAULT_QUALITY_THRESHOLDS, thresholdsFromProjectConfig(projectConfig));
6337
6365
  }
6338
6366
  function weightsFromProjectConfig(config) {
6339
6367
  const overrides = config.quality?.weights;
@@ -6416,9 +6444,7 @@ function getFileExtension(filePath) {
6416
6444
  function buildEvidence(dominant, counts, totalSourceFiles, files) {
6417
6445
  const evidence = [];
6418
6446
  const dominantCount = counts.get(dominant) ?? 0;
6419
- evidence.push(
6420
- `${dominantCount} of ${totalSourceFiles} source files are ${dominant}`
6421
- );
6447
+ evidence.push(`${dominantCount} of ${totalSourceFiles} source files are ${dominant}`);
6422
6448
  const configFiles = ["tsconfig.json", "pom.xml", "build.gradle", "Cargo.toml", "go.mod"];
6423
6449
  for (const cfg of configFiles) {
6424
6450
  if (files.some((f) => path15.basename(f) === cfg)) {
@@ -6665,10 +6691,7 @@ var JavaStyleAnalyzer = class {
6665
6691
  for (const { path: filePath, content } of files) {
6666
6692
  violations.push(...this.checkFile(filePath, content));
6667
6693
  }
6668
- const deduction = violations.reduce(
6669
- (sum, v) => sum + (v.severity === "error" ? 10 : 5),
6670
- 0
6671
- );
6694
+ const deduction = violations.reduce((sum, v) => sum + (v.severity === "error" ? 10 : 5), 0);
6672
6695
  const score = Math.max(0, 100 - deduction);
6673
6696
  return { score, violations };
6674
6697
  }
@@ -6697,7 +6720,9 @@ var JavaStyleAnalyzer = class {
6697
6720
  severity: "error"
6698
6721
  });
6699
6722
  }
6700
- const methodMatch = /\b(?:public|private|protected|static)\s+(?!class|interface|enum|record|new\b)(?:void|[\w<>[\]]+)\s+([A-Z]\w*)\s*\(/.exec(line);
6723
+ const methodMatch = /\b(?:public|private|protected|static)\s+(?!class|interface|enum|record|new\b)(?:void|[\w<>[\]]+)\s+([A-Z]\w*)\s*\(/.exec(
6724
+ line
6725
+ );
6701
6726
  if (methodMatch && !line.trim().startsWith("class") && !line.includes("class ")) {
6702
6727
  violations.push({
6703
6728
  rule: "MethodName",
@@ -6750,7 +6775,13 @@ var JavaDocumentationAnalyzer = class {
6750
6775
  async analyze(files) {
6751
6776
  const javaFiles = files ?? await findJavaFiles(this.projectPath, { srcPattern: "src/main/**/*.java" });
6752
6777
  if (!javaFiles.length) {
6753
- return { score: 100, javadocCoverage: 1, totalMethods: 0, documentedMethods: 0, undocumentedPublicMethods: [] };
6778
+ return {
6779
+ score: 100,
6780
+ javadocCoverage: 1,
6781
+ totalMethods: 0,
6782
+ documentedMethods: 0,
6783
+ undocumentedPublicMethods: []
6784
+ };
6754
6785
  }
6755
6786
  const fileContents = await Promise.all(
6756
6787
  javaFiles.map(async (f) => ({
@@ -6762,7 +6793,13 @@ var JavaDocumentationAnalyzer = class {
6762
6793
  }
6763
6794
  analyzeContent(files) {
6764
6795
  if (!files.length) {
6765
- return { score: 100, javadocCoverage: 1, totalMethods: 0, documentedMethods: 0, undocumentedPublicMethods: [] };
6796
+ return {
6797
+ score: 100,
6798
+ javadocCoverage: 1,
6799
+ totalMethods: 0,
6800
+ documentedMethods: 0,
6801
+ undocumentedPublicMethods: []
6802
+ };
6766
6803
  }
6767
6804
  let totalMethods = 0;
6768
6805
  let documentedMethods = 0;
@@ -6825,7 +6862,13 @@ var JavaCoverageAnalyzer = class {
6825
6862
  }
6826
6863
  parseJacocoXml(xml2) {
6827
6864
  if (!xml2.trim()) {
6828
- return { score: 0, lineCoverage: 0, branchCoverage: 0, methodCoverage: 0, reportFound: false };
6865
+ return {
6866
+ score: 0,
6867
+ lineCoverage: 0,
6868
+ branchCoverage: 0,
6869
+ methodCoverage: 0,
6870
+ reportFound: false
6871
+ };
6829
6872
  }
6830
6873
  const lineCoverage = this.extractCoverage(xml2, "LINE");
6831
6874
  const branchCoverage = this.extractCoverage(xml2, "BRANCH");
@@ -6834,9 +6877,7 @@ var JavaCoverageAnalyzer = class {
6834
6877
  return { score, lineCoverage, branchCoverage, methodCoverage, reportFound: true };
6835
6878
  }
6836
6879
  extractCoverage(xml2, type) {
6837
- const regex = new RegExp(
6838
- `<counter\\s+type="${type}"\\s+missed="(\\d+)"\\s+covered="(\\d+)"`
6839
- );
6880
+ const regex = new RegExp(`<counter\\s+type="${type}"\\s+missed="(\\d+)"\\s+covered="(\\d+)"`);
6840
6881
  const match = regex.exec(xml2);
6841
6882
  if (!match) return 0;
6842
6883
  const missed = parseInt(match[1] ?? "0", 10);
@@ -11498,7 +11539,7 @@ var AnthropicProvider = class {
11498
11539
  model: options?.model ?? this.config.model ?? DEFAULT_MODEL,
11499
11540
  max_tokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
11500
11541
  temperature: options?.temperature ?? this.config.temperature ?? 0,
11501
- system: options?.system,
11542
+ system: this.extractSystem(messages, options?.system),
11502
11543
  messages: this.convertMessages(messages),
11503
11544
  stop_sequences: options?.stopSequences
11504
11545
  });
@@ -11528,7 +11569,7 @@ var AnthropicProvider = class {
11528
11569
  model: options?.model ?? this.config.model ?? DEFAULT_MODEL,
11529
11570
  max_tokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
11530
11571
  temperature: options?.temperature ?? this.config.temperature ?? 0,
11531
- system: options?.system,
11572
+ system: this.extractSystem(messages, options?.system),
11532
11573
  messages: this.convertMessages(messages),
11533
11574
  tools: this.convertTools(options.tools),
11534
11575
  tool_choice: options.toolChoice ? this.convertToolChoice(options.toolChoice) : void 0
@@ -11560,7 +11601,7 @@ var AnthropicProvider = class {
11560
11601
  model: options?.model ?? this.config.model ?? DEFAULT_MODEL,
11561
11602
  max_tokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
11562
11603
  temperature: options?.temperature ?? this.config.temperature ?? 0,
11563
- system: options?.system,
11604
+ system: this.extractSystem(messages, options?.system),
11564
11605
  messages: this.convertMessages(messages)
11565
11606
  });
11566
11607
  for await (const event of stream) {
@@ -11586,7 +11627,7 @@ var AnthropicProvider = class {
11586
11627
  model: options?.model ?? this.config.model ?? DEFAULT_MODEL,
11587
11628
  max_tokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
11588
11629
  temperature: options?.temperature ?? this.config.temperature ?? 0,
11589
- system: options?.system,
11630
+ system: this.extractSystem(messages, options?.system),
11590
11631
  messages: this.convertMessages(messages),
11591
11632
  tools: this.convertTools(options.tools),
11592
11633
  tool_choice: options.toolChoice ? this.convertToolChoice(options.toolChoice) : void 0
@@ -11709,6 +11750,22 @@ var AnthropicProvider = class {
11709
11750
  });
11710
11751
  }
11711
11752
  }
11753
+ /**
11754
+ * Extract system prompt from messages array or options.
11755
+ *
11756
+ * The agent-loop passes the system message as the first element of the
11757
+ * messages array (role: "system"). convertMessages() strips it out because
11758
+ * Anthropic requires it as a top-level parameter — but all callers forgot
11759
+ * to also pass it via options.system. This helper bridges that gap.
11760
+ */
11761
+ extractSystem(messages, optionsSystem) {
11762
+ if (optionsSystem !== void 0) return optionsSystem;
11763
+ const systemMsg = messages.find((m) => m.role === "system");
11764
+ if (!systemMsg) return void 0;
11765
+ if (typeof systemMsg.content === "string") return systemMsg.content;
11766
+ const text = systemMsg.content.filter((b) => b.type === "text").map((b) => b.text).join("");
11767
+ return text || void 0;
11768
+ }
11712
11769
  /**
11713
11770
  * Convert messages to Anthropic format
11714
11771
  */
@@ -11869,6 +11926,7 @@ var CONTEXT_WINDOWS2 = {
11869
11926
  "kimi-k2.5": 262144,
11870
11927
  "kimi-k2-0324": 131072,
11871
11928
  "kimi-latest": 131072,
11929
+ "kimi-for-coding": 131072,
11872
11930
  "moonshot-v1-8k": 8e3,
11873
11931
  "moonshot-v1-32k": 32e3,
11874
11932
  "moonshot-v1-128k": 128e3,
@@ -11945,7 +12003,12 @@ var LOCAL_MODEL_PATTERNS = [
11945
12003
  "gemma",
11946
12004
  "starcoder"
11947
12005
  ];
11948
- var MODELS_WITH_THINKING_MODE = ["kimi-k2.5", "kimi-k2-0324", "kimi-latest"];
12006
+ var MODELS_WITH_THINKING_MODE = [
12007
+ "kimi-k2.5",
12008
+ "kimi-k2-0324",
12009
+ "kimi-latest",
12010
+ "kimi-for-coding"
12011
+ ];
11949
12012
  var OpenAIProvider = class {
11950
12013
  id;
11951
12014
  name;
@@ -12564,6 +12627,20 @@ function createKimiProvider(config) {
12564
12627
  }
12565
12628
  return provider;
12566
12629
  }
12630
+ function createKimiCodeProvider(config) {
12631
+ const provider = new OpenAIProvider("kimi-code", "Kimi Code");
12632
+ const kimiCodeConfig = {
12633
+ ...config,
12634
+ baseUrl: config?.baseUrl ?? process.env["KIMI_CODE_BASE_URL"] ?? "https://api.kimi.com/coding/v1",
12635
+ apiKey: config?.apiKey ?? process.env["KIMI_CODE_API_KEY"],
12636
+ model: config?.model ?? "kimi-for-coding"
12637
+ };
12638
+ if (kimiCodeConfig.apiKey) {
12639
+ provider.initialize(kimiCodeConfig).catch(() => {
12640
+ });
12641
+ }
12642
+ return provider;
12643
+ }
12567
12644
  var OAUTH_CONFIGS = {
12568
12645
  /**
12569
12646
  * OpenAI OAuth (ChatGPT Plus/Pro subscriptions)
@@ -13067,7 +13144,7 @@ var GeminiProvider = class {
13067
13144
  temperature: options?.temperature ?? this.config.temperature ?? 0,
13068
13145
  stopSequences: options?.stopSequences
13069
13146
  },
13070
- systemInstruction: options?.system
13147
+ systemInstruction: this.extractSystem(messages, options?.system)
13071
13148
  });
13072
13149
  const { history, lastMessage } = this.convertMessages(messages);
13073
13150
  const chat = model.startChat({ history });
@@ -13095,7 +13172,7 @@ var GeminiProvider = class {
13095
13172
  maxOutputTokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
13096
13173
  temperature: options?.temperature ?? this.config.temperature ?? 0
13097
13174
  },
13098
- systemInstruction: options?.system,
13175
+ systemInstruction: this.extractSystem(messages, options?.system),
13099
13176
  tools,
13100
13177
  toolConfig: {
13101
13178
  functionCallingConfig: {
@@ -13124,7 +13201,7 @@ var GeminiProvider = class {
13124
13201
  maxOutputTokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
13125
13202
  temperature: options?.temperature ?? this.config.temperature ?? 0
13126
13203
  },
13127
- systemInstruction: options?.system
13204
+ systemInstruction: this.extractSystem(messages, options?.system)
13128
13205
  });
13129
13206
  const { history, lastMessage } = this.convertMessages(messages);
13130
13207
  const chat = model.startChat({ history });
@@ -13158,7 +13235,7 @@ var GeminiProvider = class {
13158
13235
  maxOutputTokens: options?.maxTokens ?? this.config.maxTokens ?? 8192,
13159
13236
  temperature: options?.temperature ?? this.config.temperature ?? 0
13160
13237
  },
13161
- systemInstruction: options?.system,
13238
+ systemInstruction: this.extractSystem(messages, options?.system),
13162
13239
  tools,
13163
13240
  toolConfig: {
13164
13241
  functionCallingConfig: {
@@ -13259,6 +13336,21 @@ var GeminiProvider = class {
13259
13336
  });
13260
13337
  }
13261
13338
  }
13339
+ /**
13340
+ * Extract system prompt from messages array or options.
13341
+ *
13342
+ * convertMessages() skips system-role messages ("handled via systemInstruction"),
13343
+ * but all callers forgot to also pass it via options.system. This helper bridges
13344
+ * that gap — mirrors the same fix applied to AnthropicProvider.
13345
+ */
13346
+ extractSystem(messages, optionsSystem) {
13347
+ if (optionsSystem !== void 0) return optionsSystem;
13348
+ const systemMsg = messages.find((m) => m.role === "system");
13349
+ if (!systemMsg) return void 0;
13350
+ if (typeof systemMsg.content === "string") return systemMsg.content;
13351
+ const text = systemMsg.content.filter((b) => b.type === "text").map((b) => b.text).join("");
13352
+ return text || void 0;
13353
+ }
13262
13354
  /**
13263
13355
  * Convert messages to Gemini format
13264
13356
  */
@@ -13457,6 +13549,10 @@ async function createProvider(type, config = {}) {
13457
13549
  provider = createKimiProvider(mergedConfig);
13458
13550
  await provider.initialize(mergedConfig);
13459
13551
  return provider;
13552
+ case "kimi-code":
13553
+ provider = createKimiCodeProvider(mergedConfig);
13554
+ await provider.initialize(mergedConfig);
13555
+ return provider;
13460
13556
  case "lmstudio":
13461
13557
  provider = new OpenAIProvider("lmstudio", "LM Studio");
13462
13558
  mergedConfig.baseUrl = mergedConfig.baseUrl ?? "http://localhost:1234/v1";
@@ -13988,6 +14084,7 @@ var ProviderConfigSchema = z.object({
13988
14084
  "codex",
13989
14085
  "gemini",
13990
14086
  "kimi",
14087
+ "kimi-code",
13991
14088
  "lmstudio",
13992
14089
  "ollama",
13993
14090
  "groq",
@@ -15890,12 +15987,16 @@ Examples:
15890
15987
  tool: "run_tests"
15891
15988
  });
15892
15989
  }
15893
- const result = await execa(command, args, {
15990
+ const proc = execa(command, args, {
15894
15991
  cwd: projectDir,
15895
15992
  reject: false,
15896
- timeout: 3e5
15993
+ timeout: 3e5,
15897
15994
  // 5 minute timeout
15995
+ cleanup: true
15996
+ // kill process tree on parent exit
15898
15997
  });
15998
+ trackSubprocess(proc);
15999
+ const result = await proc;
15899
16000
  const duration = performance.now() - startTime;
15900
16001
  return parseTestResults(
15901
16002
  detectedFramework,