@corbat-tech/coco 1.2.2 → 1.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -3,10 +3,12 @@ import * as path14 from 'path';
3
3
  import path14__default, { dirname, join, basename } from 'path';
4
4
  import * as fs4 from 'fs';
5
5
  import fs4__default, { readFileSync, constants } from 'fs';
6
- import { fileURLToPath } from 'url';
7
- import { randomUUID } from 'crypto';
8
6
  import * as fs14 from 'fs/promises';
9
7
  import fs14__default, { readFile, access, readdir } from 'fs/promises';
8
+ import chalk3 from 'chalk';
9
+ import * as p3 from '@clack/prompts';
10
+ import { fileURLToPath } from 'url';
11
+ import { randomUUID } from 'crypto';
10
12
  import { execa } from 'execa';
11
13
  import { parse } from '@typescript-eslint/typescript-estree';
12
14
  import { glob } from 'glob';
@@ -17,8 +19,6 @@ import { Logger } from 'tslog';
17
19
  import Anthropic from '@anthropic-ai/sdk';
18
20
  import OpenAI from 'openai';
19
21
  import 'http';
20
- import '@clack/prompts';
21
- import chalk3 from 'chalk';
22
22
  import { GoogleGenerativeAI, FunctionCallingMode } from '@google/generative-ai';
23
23
  import JSON5 from 'json5';
24
24
  import 'events';
@@ -38,10 +38,15 @@ import typescript from 'highlight.js/lib/languages/typescript';
38
38
  import xml from 'highlight.js/lib/languages/xml';
39
39
  import yaml from 'highlight.js/lib/languages/yaml';
40
40
 
41
+ var __defProp = Object.defineProperty;
41
42
  var __getOwnPropNames = Object.getOwnPropertyNames;
42
43
  var __esm = (fn, res) => function __init() {
43
44
  return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
44
45
  };
46
+ var __export = (target, all) => {
47
+ for (var name in all)
48
+ __defProp(target, name, { get: all[name], enumerable: true });
49
+ };
45
50
  var COCO_HOME, CONFIG_PATHS;
46
51
  var init_paths = __esm({
47
52
  "src/config/paths.ts"() {
@@ -171,6 +176,119 @@ var init_env = __esm({
171
176
  });
172
177
  }
173
178
  });
179
+ function getAllowedPaths() {
180
+ return [...sessionAllowedPaths];
181
+ }
182
+ function isWithinAllowedPath(absolutePath, operation) {
183
+ const normalizedTarget = path14__default.normalize(absolutePath);
184
+ for (const entry of sessionAllowedPaths) {
185
+ const normalizedAllowed = path14__default.normalize(entry.path);
186
+ if (normalizedTarget === normalizedAllowed || normalizedTarget.startsWith(normalizedAllowed + path14__default.sep)) {
187
+ if (operation === "read") return true;
188
+ if (entry.level === "write") return true;
189
+ }
190
+ }
191
+ return false;
192
+ }
193
+ function addAllowedPathToSession(dirPath, level) {
194
+ const absolute = path14__default.resolve(dirPath);
195
+ if (sessionAllowedPaths.some((e) => path14__default.normalize(e.path) === path14__default.normalize(absolute))) {
196
+ return;
197
+ }
198
+ sessionAllowedPaths.push({
199
+ path: absolute,
200
+ authorizedAt: (/* @__PURE__ */ new Date()).toISOString(),
201
+ level
202
+ });
203
+ }
204
+ async function persistAllowedPath(dirPath, level) {
205
+ if (!currentProjectPath) return;
206
+ const absolute = path14__default.resolve(dirPath);
207
+ const store = await loadStore();
208
+ if (!store.projects[currentProjectPath]) {
209
+ store.projects[currentProjectPath] = [];
210
+ }
211
+ const entries = store.projects[currentProjectPath];
212
+ const normalized = path14__default.normalize(absolute);
213
+ if (entries.some((e) => path14__default.normalize(e.path) === normalized)) {
214
+ return;
215
+ }
216
+ entries.push({
217
+ path: absolute,
218
+ authorizedAt: (/* @__PURE__ */ new Date()).toISOString(),
219
+ level
220
+ });
221
+ await saveStore(store);
222
+ }
223
+ async function loadStore() {
224
+ try {
225
+ const content = await fs14__default.readFile(STORE_FILE, "utf-8");
226
+ return { ...DEFAULT_STORE, ...JSON.parse(content) };
227
+ } catch {
228
+ return { ...DEFAULT_STORE };
229
+ }
230
+ }
231
+ async function saveStore(store) {
232
+ try {
233
+ await fs14__default.mkdir(path14__default.dirname(STORE_FILE), { recursive: true });
234
+ await fs14__default.writeFile(STORE_FILE, JSON.stringify(store, null, 2), "utf-8");
235
+ } catch {
236
+ }
237
+ }
238
+ var STORE_FILE, DEFAULT_STORE, sessionAllowedPaths, currentProjectPath;
239
+ var init_allowed_paths = __esm({
240
+ "src/tools/allowed-paths.ts"() {
241
+ init_paths();
242
+ STORE_FILE = path14__default.join(CONFIG_PATHS.home, "allowed-paths.json");
243
+ DEFAULT_STORE = {
244
+ version: 1,
245
+ projects: {}
246
+ };
247
+ sessionAllowedPaths = [];
248
+ currentProjectPath = "";
249
+ }
250
+ });
251
+
252
+ // src/cli/repl/allow-path-prompt.ts
253
+ var allow_path_prompt_exports = {};
254
+ __export(allow_path_prompt_exports, {
255
+ promptAllowPath: () => promptAllowPath
256
+ });
257
+ async function promptAllowPath(dirPath) {
258
+ const absolute = path14__default.resolve(dirPath);
259
+ console.log();
260
+ console.log(chalk3.yellow(" \u26A0 Access denied \u2014 path is outside the project directory"));
261
+ console.log(chalk3.dim(` \u{1F4C1} ${absolute}`));
262
+ console.log();
263
+ const action = await p3.select({
264
+ message: "Grant access to this directory?",
265
+ options: [
266
+ { value: "session-write", label: "\u2713 Allow write (this session)" },
267
+ { value: "session-read", label: "\u25D0 Allow read-only (this session)" },
268
+ { value: "persist-write", label: "\u26A1 Allow write (remember for this project)" },
269
+ { value: "persist-read", label: "\u{1F4BE} Allow read-only (remember for this project)" },
270
+ { value: "no", label: "\u2717 Deny" }
271
+ ]
272
+ });
273
+ if (p3.isCancel(action) || action === "no") {
274
+ return false;
275
+ }
276
+ const level = action.includes("read") ? "read" : "write";
277
+ const persist = action.startsWith("persist");
278
+ addAllowedPathToSession(absolute, level);
279
+ if (persist) {
280
+ await persistAllowedPath(absolute, level);
281
+ }
282
+ const levelLabel = level === "write" ? "write" : "read-only";
283
+ const persistLabel = persist ? " (remembered)" : "";
284
+ console.log(chalk3.green(` \u2713 Access granted: ${levelLabel}${persistLabel}`));
285
+ return true;
286
+ }
287
+ var init_allow_path_prompt = __esm({
288
+ "src/cli/repl/allow-path-prompt.ts"() {
289
+ init_allowed_paths();
290
+ }
291
+ });
174
292
  function findPackageJson() {
175
293
  let dir = dirname(fileURLToPath(import.meta.url));
176
294
  for (let i = 0; i < 10; i++) {
@@ -3776,10 +3894,10 @@ var CoverageAnalyzer = class {
3776
3894
  join(this.projectPath, ".coverage", "coverage-summary.json"),
3777
3895
  join(this.projectPath, "coverage", "lcov-report", "coverage-summary.json")
3778
3896
  ];
3779
- for (const path32 of possiblePaths) {
3897
+ for (const path36 of possiblePaths) {
3780
3898
  try {
3781
- await access(path32, constants.R_OK);
3782
- const content = await readFile(path32, "utf-8");
3899
+ await access(path36, constants.R_OK);
3900
+ const content = await readFile(path36, "utf-8");
3783
3901
  const report = JSON.parse(content);
3784
3902
  return parseCoverageSummary(report);
3785
3903
  } catch {
@@ -11893,9 +12011,9 @@ function createInitialState(config) {
11893
12011
  }
11894
12012
  async function loadExistingState(projectPath) {
11895
12013
  try {
11896
- const fs33 = await import('fs/promises');
12014
+ const fs35 = await import('fs/promises');
11897
12015
  const statePath = `${projectPath}/.coco/state/project.json`;
11898
- const content = await fs33.readFile(statePath, "utf-8");
12016
+ const content = await fs35.readFile(statePath, "utf-8");
11899
12017
  const data = JSON.parse(content);
11900
12018
  data.createdAt = new Date(data.createdAt);
11901
12019
  data.updatedAt = new Date(data.updatedAt);
@@ -11905,13 +12023,13 @@ async function loadExistingState(projectPath) {
11905
12023
  }
11906
12024
  }
11907
12025
  async function saveState(state) {
11908
- const fs33 = await import('fs/promises');
12026
+ const fs35 = await import('fs/promises');
11909
12027
  const statePath = `${state.path}/.coco/state`;
11910
- await fs33.mkdir(statePath, { recursive: true });
12028
+ await fs35.mkdir(statePath, { recursive: true });
11911
12029
  const filePath = `${statePath}/project.json`;
11912
12030
  const tmpPath = `${filePath}.tmp.${Date.now()}`;
11913
- await fs33.writeFile(tmpPath, JSON.stringify(state, null, 2), "utf-8");
11914
- await fs33.rename(tmpPath, filePath);
12031
+ await fs35.writeFile(tmpPath, JSON.stringify(state, null, 2), "utf-8");
12032
+ await fs35.rename(tmpPath, filePath);
11915
12033
  }
11916
12034
  function getPhaseExecutor(phase) {
11917
12035
  switch (phase) {
@@ -11970,20 +12088,20 @@ async function createPhaseContext(config, state) {
11970
12088
  };
11971
12089
  const tools = {
11972
12090
  file: {
11973
- async read(path32) {
11974
- const fs33 = await import('fs/promises');
11975
- return fs33.readFile(path32, "utf-8");
12091
+ async read(path36) {
12092
+ const fs35 = await import('fs/promises');
12093
+ return fs35.readFile(path36, "utf-8");
11976
12094
  },
11977
- async write(path32, content) {
11978
- const fs33 = await import('fs/promises');
12095
+ async write(path36, content) {
12096
+ const fs35 = await import('fs/promises');
11979
12097
  const nodePath = await import('path');
11980
- await fs33.mkdir(nodePath.dirname(path32), { recursive: true });
11981
- await fs33.writeFile(path32, content, "utf-8");
12098
+ await fs35.mkdir(nodePath.dirname(path36), { recursive: true });
12099
+ await fs35.writeFile(path36, content, "utf-8");
11982
12100
  },
11983
- async exists(path32) {
11984
- const fs33 = await import('fs/promises');
12101
+ async exists(path36) {
12102
+ const fs35 = await import('fs/promises');
11985
12103
  try {
11986
- await fs33.access(path32);
12104
+ await fs35.access(path36);
11987
12105
  return true;
11988
12106
  } catch {
11989
12107
  return false;
@@ -12132,9 +12250,9 @@ async function createSnapshot(state) {
12132
12250
  var MAX_CHECKPOINT_VERSIONS = 5;
12133
12251
  async function getCheckpointFiles(state, phase) {
12134
12252
  try {
12135
- const fs33 = await import('fs/promises');
12253
+ const fs35 = await import('fs/promises');
12136
12254
  const checkpointDir = `${state.path}/.coco/checkpoints`;
12137
- const files = await fs33.readdir(checkpointDir);
12255
+ const files = await fs35.readdir(checkpointDir);
12138
12256
  const phaseFiles = files.filter((f) => f.startsWith(`snapshot-pre-${phase}-`) && f.endsWith(".json")).sort((a, b) => {
12139
12257
  const tsA = parseInt(a.split("-").pop()?.replace(".json", "") ?? "0", 10);
12140
12258
  const tsB = parseInt(b.split("-").pop()?.replace(".json", "") ?? "0", 10);
@@ -12147,11 +12265,11 @@ async function getCheckpointFiles(state, phase) {
12147
12265
  }
12148
12266
  async function cleanupOldCheckpoints(state, phase) {
12149
12267
  try {
12150
- const fs33 = await import('fs/promises');
12268
+ const fs35 = await import('fs/promises');
12151
12269
  const files = await getCheckpointFiles(state, phase);
12152
12270
  if (files.length > MAX_CHECKPOINT_VERSIONS) {
12153
12271
  const filesToDelete = files.slice(MAX_CHECKPOINT_VERSIONS);
12154
- await Promise.all(filesToDelete.map((f) => fs33.unlink(f).catch(() => {
12272
+ await Promise.all(filesToDelete.map((f) => fs35.unlink(f).catch(() => {
12155
12273
  })));
12156
12274
  }
12157
12275
  } catch {
@@ -12159,13 +12277,13 @@ async function cleanupOldCheckpoints(state, phase) {
12159
12277
  }
12160
12278
  async function saveSnapshot(state, snapshotId) {
12161
12279
  try {
12162
- const fs33 = await import('fs/promises');
12280
+ const fs35 = await import('fs/promises');
12163
12281
  const snapshotPath = `${state.path}/.coco/checkpoints/snapshot-${snapshotId}.json`;
12164
12282
  const snapshotDir = `${state.path}/.coco/checkpoints`;
12165
- await fs33.mkdir(snapshotDir, { recursive: true });
12283
+ await fs35.mkdir(snapshotDir, { recursive: true });
12166
12284
  const createdAt = state.createdAt instanceof Date ? state.createdAt.toISOString() : String(state.createdAt);
12167
12285
  const updatedAt = state.updatedAt instanceof Date ? state.updatedAt.toISOString() : String(state.updatedAt);
12168
- await fs33.writeFile(
12286
+ await fs35.writeFile(
12169
12287
  snapshotPath,
12170
12288
  JSON.stringify(
12171
12289
  {
@@ -12540,24 +12658,7 @@ z.string().regex(
12540
12658
  /^\d+\.\d+\.\d+$/,
12541
12659
  "Version must be in semver format (e.g., 1.0.0)"
12542
12660
  );
12543
-
12544
- // src/tools/allowed-paths.ts
12545
- init_paths();
12546
- path14__default.join(CONFIG_PATHS.home, "allowed-paths.json");
12547
- var sessionAllowedPaths = [];
12548
- function isWithinAllowedPath(absolutePath, operation) {
12549
- const normalizedTarget = path14__default.normalize(absolutePath);
12550
- for (const entry of sessionAllowedPaths) {
12551
- const normalizedAllowed = path14__default.normalize(entry.path);
12552
- if (normalizedTarget === normalizedAllowed || normalizedTarget.startsWith(normalizedAllowed + path14__default.sep)) {
12553
- if (operation === "read") return true;
12554
- if (entry.level === "write") return true;
12555
- }
12556
- }
12557
- return false;
12558
- }
12559
-
12560
- // src/tools/file.ts
12661
+ init_allowed_paths();
12561
12662
  var SENSITIVE_PATTERNS = [
12562
12663
  /\.env(?:\.\w+)?$/,
12563
12664
  // .env, .env.local, etc.
@@ -13421,7 +13522,8 @@ function truncateOutput(output, maxLength = 5e4) {
13421
13522
  [Output truncated - ${output.length - maxLength} more characters]`;
13422
13523
  }
13423
13524
  function getGit(cwd) {
13424
- return simpleGit(cwd ?? process.cwd());
13525
+ const baseDir = cwd ?? process.cwd();
13526
+ return simpleGit({ baseDir });
13425
13527
  }
13426
13528
  var gitStatusTool = defineTool({
13427
13529
  name: "git_status",
@@ -13782,9 +13884,11 @@ var gitTools = [
13782
13884
  gitInitTool
13783
13885
  ];
13784
13886
  function generateSimpleCommitMessage() {
13887
+ const cwd = process.cwd();
13785
13888
  try {
13786
13889
  const diff = execSync("git diff --cached --name-only", {
13787
13890
  encoding: "utf-8",
13891
+ cwd,
13788
13892
  stdio: ["pipe", "pipe", "ignore"]
13789
13893
  });
13790
13894
  const files = diff.trim().split("\n").filter(Boolean);
@@ -13820,6 +13924,7 @@ var checkProtectedBranchTool = defineTool({
13820
13924
  try {
13821
13925
  const branch = execSync("git rev-parse --abbrev-ref HEAD", {
13822
13926
  encoding: "utf-8",
13927
+ cwd: process.cwd(),
13823
13928
  stdio: ["pipe", "pipe", "ignore"]
13824
13929
  }).trim();
13825
13930
  const protected_branches = ["main", "master", "develop", "production"];
@@ -13855,7 +13960,7 @@ var simpleAutoCommitTool = defineTool({
13855
13960
  async execute(input) {
13856
13961
  try {
13857
13962
  try {
13858
- execSync("git diff --cached --quiet", { stdio: "ignore" });
13963
+ execSync("git diff --cached --quiet", { cwd: process.cwd(), stdio: "ignore" });
13859
13964
  return {
13860
13965
  stdout: "",
13861
13966
  stderr: "No staged changes to commit",
@@ -13867,6 +13972,7 @@ var simpleAutoCommitTool = defineTool({
13867
13972
  const message = input.message || generateSimpleCommitMessage();
13868
13973
  execSync(`git commit -m "${message}"`, {
13869
13974
  encoding: "utf-8",
13975
+ cwd: process.cwd(),
13870
13976
  stdio: "pipe"
13871
13977
  });
13872
13978
  return {
@@ -16468,9 +16574,17 @@ Examples:
16468
16574
  }
16469
16575
  });
16470
16576
  var diffTools = [showDiffTool];
16471
- async function fileExists(path32) {
16577
+ async function fileExists(filePath) {
16578
+ try {
16579
+ await fs14__default.access(filePath);
16580
+ return true;
16581
+ } catch {
16582
+ return false;
16583
+ }
16584
+ }
16585
+ async function fileExists2(path36) {
16472
16586
  try {
16473
- await access(path32);
16587
+ await access(path36);
16474
16588
  return true;
16475
16589
  } catch {
16476
16590
  return false;
@@ -16485,7 +16599,7 @@ async function dirHasFiles(dir) {
16485
16599
  }
16486
16600
  }
16487
16601
  async function detectMaturity(cwd) {
16488
- const hasPackageJson = await fileExists(join(cwd, "package.json"));
16602
+ const hasPackageJson = await fileExists2(join(cwd, "package.json"));
16489
16603
  if (!hasPackageJson) {
16490
16604
  const otherManifests = [
16491
16605
  "go.mod",
@@ -16498,7 +16612,7 @@ async function detectMaturity(cwd) {
16498
16612
  ];
16499
16613
  let hasAnyManifest = false;
16500
16614
  for (const m of otherManifests) {
16501
- if (await fileExists(join(cwd, m))) {
16615
+ if (await fileExists2(join(cwd, m))) {
16502
16616
  hasAnyManifest = true;
16503
16617
  break;
16504
16618
  }
@@ -16539,7 +16653,7 @@ async function detectMaturity(cwd) {
16539
16653
  cwd,
16540
16654
  ignore: ["node_modules/**", "dist/**", "build/**"]
16541
16655
  });
16542
- const hasCI = await fileExists(join(cwd, ".github/workflows")) && await dirHasFiles(join(cwd, ".github/workflows"));
16656
+ const hasCI = await fileExists2(join(cwd, ".github/workflows")) && await dirHasFiles(join(cwd, ".github/workflows"));
16543
16657
  const lintConfigs = [
16544
16658
  ".eslintrc.js",
16545
16659
  ".eslintrc.json",
@@ -16552,7 +16666,7 @@ async function detectMaturity(cwd) {
16552
16666
  ];
16553
16667
  let hasLintConfig = false;
16554
16668
  for (const config of lintConfigs) {
16555
- if (await fileExists(join(cwd, config))) {
16669
+ if (await fileExists2(join(cwd, config))) {
16556
16670
  hasLintConfig = true;
16557
16671
  break;
16558
16672
  }
@@ -16560,7 +16674,7 @@ async function detectMaturity(cwd) {
16560
16674
  if (!hasLintConfig && hasPackageJson) {
16561
16675
  try {
16562
16676
  const pkgRaw = await import('fs/promises').then(
16563
- (fs33) => fs33.readFile(join(cwd, "package.json"), "utf-8")
16677
+ (fs35) => fs35.readFile(join(cwd, "package.json"), "utf-8")
16564
16678
  );
16565
16679
  const pkg = JSON.parse(pkgRaw);
16566
16680
  if (pkg.scripts?.lint || pkg.scripts?.["lint:fix"]) {
@@ -16605,7 +16719,8 @@ var SECURITY_PATTERNS2 = [
16605
16719
  regex: /console\.(log|debug|info)\(/,
16606
16720
  severity: "minor",
16607
16721
  category: "best-practice",
16608
- message: "Remove console.log \u2014 use structured logging instead"
16722
+ message: "Remove console.log \u2014 use structured logging instead",
16723
+ excludePaths: /\/(cli|repl|bin|scripts)\//
16609
16724
  }
16610
16725
  ];
16611
16726
  var CORRECTNESS_PATTERNS = [
@@ -16685,6 +16800,7 @@ function analyzePatterns(diff) {
16685
16800
  for (const line of hunk.lines) {
16686
16801
  if (line.type !== "add") continue;
16687
16802
  for (const pattern of ALL_PATTERNS) {
16803
+ if (pattern.excludePaths?.test(file.path)) continue;
16688
16804
  if (pattern.regex.test(line.content)) {
16689
16805
  findings.push({
16690
16806
  file: file.path,
@@ -16701,7 +16817,8 @@ function analyzePatterns(diff) {
16701
16817
  }
16702
16818
  return findings;
16703
16819
  }
16704
- function checkTestCoverage(diff) {
16820
+ var TEST_COVERAGE_LARGE_CHANGE_THRESHOLD = 15;
16821
+ async function checkTestCoverage(diff, cwd) {
16705
16822
  const findings = [];
16706
16823
  const changedSrc = [];
16707
16824
  const changedTests = /* @__PURE__ */ new Set();
@@ -16711,22 +16828,35 @@ function checkTestCoverage(diff) {
16711
16828
  changedTests.add(file.path);
16712
16829
  } else if (/\.(ts|tsx|js|jsx)$/.test(file.path)) {
16713
16830
  if (file.additions > 5) {
16714
- changedSrc.push(file.path);
16831
+ changedSrc.push({ path: file.path, additions: file.additions });
16715
16832
  }
16716
16833
  }
16717
16834
  }
16718
16835
  for (const src of changedSrc) {
16719
- const baseName = src.replace(/\.(ts|tsx|js|jsx)$/, "");
16836
+ const baseName = src.path.replace(/\.(ts|tsx|js|jsx)$/, "");
16720
16837
  const hasTestChange = [...changedTests].some(
16721
16838
  (t) => t.includes(baseName.split("/").pop()) || t.startsWith(baseName)
16722
16839
  );
16723
16840
  if (!hasTestChange) {
16724
- findings.push({
16725
- file: src,
16726
- severity: "minor",
16727
- category: "testing",
16728
- message: "Logic changes without corresponding test updates"
16729
- });
16841
+ const ext = src.path.match(/\.(ts|tsx|js|jsx)$/)?.[0] ?? ".ts";
16842
+ const testExists = await fileExists(path14__default.join(cwd, `${baseName}.test${ext}`)) || await fileExists(path14__default.join(cwd, `${baseName}.spec${ext}`));
16843
+ if (testExists) {
16844
+ if (src.additions >= TEST_COVERAGE_LARGE_CHANGE_THRESHOLD) {
16845
+ findings.push({
16846
+ file: src.path,
16847
+ severity: "info",
16848
+ category: "testing",
16849
+ message: "Test file exists but was not updated \u2014 verify existing tests cover these changes"
16850
+ });
16851
+ }
16852
+ } else {
16853
+ findings.push({
16854
+ file: src.path,
16855
+ severity: "minor",
16856
+ category: "testing",
16857
+ message: "Logic changes without corresponding test updates"
16858
+ });
16859
+ }
16730
16860
  }
16731
16861
  }
16732
16862
  return findings;
@@ -16876,7 +17006,7 @@ Examples:
16876
17006
  const maturity = maturityInfo.level;
16877
17007
  let allFindings = [];
16878
17008
  allFindings.push(...analyzePatterns(diff));
16879
- allFindings.push(...checkTestCoverage(diff));
17009
+ allFindings.push(...await checkTestCoverage(diff, projectDir));
16880
17010
  allFindings.push(...checkDocumentation(diff));
16881
17011
  if (runLinter) {
16882
17012
  try {
@@ -16927,8 +17057,8 @@ Examples:
16927
17057
  }
16928
17058
  });
16929
17059
  var reviewTools = [reviewCodeTool];
16930
- var fs22 = await import('fs/promises');
16931
- var path22 = await import('path');
17060
+ var fs23 = await import('fs/promises');
17061
+ var path24 = await import('path');
16932
17062
  var { glob: glob12 } = await import('glob');
16933
17063
  var DEFAULT_MAX_FILES = 200;
16934
17064
  var LANGUAGE_EXTENSIONS = {
@@ -16954,7 +17084,7 @@ var DEFAULT_EXCLUDES = [
16954
17084
  "**/*.d.ts"
16955
17085
  ];
16956
17086
  function detectLanguage2(filePath) {
16957
- const ext = path22.extname(filePath).toLowerCase();
17087
+ const ext = path24.extname(filePath).toLowerCase();
16958
17088
  for (const [lang, extensions] of Object.entries(LANGUAGE_EXTENSIONS)) {
16959
17089
  if (extensions.includes(ext)) return lang;
16960
17090
  }
@@ -17363,9 +17493,9 @@ Examples:
17363
17493
  }),
17364
17494
  async execute({ path: rootPath, include, exclude, languages, maxFiles, depth }) {
17365
17495
  const startTime = performance.now();
17366
- const absPath = path22.resolve(rootPath);
17496
+ const absPath = path24.resolve(rootPath);
17367
17497
  try {
17368
- const stat2 = await fs22.stat(absPath);
17498
+ const stat2 = await fs23.stat(absPath);
17369
17499
  if (!stat2.isDirectory()) {
17370
17500
  throw new ToolError(`Path is not a directory: ${absPath}`, {
17371
17501
  tool: "codebase_map"
@@ -17402,14 +17532,14 @@ Examples:
17402
17532
  let totalDefinitions = 0;
17403
17533
  let exportedSymbols = 0;
17404
17534
  for (const file of limitedFiles) {
17405
- const fullPath = path22.join(absPath, file);
17535
+ const fullPath = path24.join(absPath, file);
17406
17536
  const language = detectLanguage2(file);
17407
17537
  if (!language) continue;
17408
17538
  if (languages && !languages.includes(language)) {
17409
17539
  continue;
17410
17540
  }
17411
17541
  try {
17412
- const content = await fs22.readFile(fullPath, "utf-8");
17542
+ const content = await fs23.readFile(fullPath, "utf-8");
17413
17543
  const lineCount = content.split("\n").length;
17414
17544
  const parsed = parseFile(content, language);
17415
17545
  const definitions = depth === "overview" ? parsed.definitions.filter((d) => d.exported) : parsed.definitions;
@@ -17442,23 +17572,23 @@ Examples:
17442
17572
  });
17443
17573
  var codebaseMapTools = [codebaseMapTool];
17444
17574
  init_paths();
17445
- var fs23 = await import('fs/promises');
17446
- var path23 = await import('path');
17447
- var crypto2 = await import('crypto');
17448
- var GLOBAL_MEMORIES_DIR = path23.join(COCO_HOME, "memories");
17575
+ var fs24 = await import('fs/promises');
17576
+ var path25 = await import('path');
17577
+ var crypto3 = await import('crypto');
17578
+ var GLOBAL_MEMORIES_DIR = path25.join(COCO_HOME, "memories");
17449
17579
  var PROJECT_MEMORIES_DIR = ".coco/memories";
17450
17580
  var DEFAULT_MAX_MEMORIES = 1e3;
17451
17581
  async function ensureDir(dirPath) {
17452
- await fs23.mkdir(dirPath, { recursive: true });
17582
+ await fs24.mkdir(dirPath, { recursive: true });
17453
17583
  }
17454
17584
  function getMemoriesDir(scope) {
17455
17585
  return scope === "global" ? GLOBAL_MEMORIES_DIR : PROJECT_MEMORIES_DIR;
17456
17586
  }
17457
17587
  async function loadIndex(scope) {
17458
17588
  const dir = getMemoriesDir(scope);
17459
- const indexPath = path23.join(dir, "index.json");
17589
+ const indexPath = path25.join(dir, "index.json");
17460
17590
  try {
17461
- const content = await fs23.readFile(indexPath, "utf-8");
17591
+ const content = await fs24.readFile(indexPath, "utf-8");
17462
17592
  return JSON.parse(content);
17463
17593
  } catch {
17464
17594
  return [];
@@ -17467,14 +17597,14 @@ async function loadIndex(scope) {
17467
17597
  async function saveIndex(scope, index) {
17468
17598
  const dir = getMemoriesDir(scope);
17469
17599
  await ensureDir(dir);
17470
- const indexPath = path23.join(dir, "index.json");
17471
- await fs23.writeFile(indexPath, JSON.stringify(index, null, 2), "utf-8");
17600
+ const indexPath = path25.join(dir, "index.json");
17601
+ await fs24.writeFile(indexPath, JSON.stringify(index, null, 2), "utf-8");
17472
17602
  }
17473
17603
  async function loadMemory(scope, id) {
17474
17604
  const dir = getMemoriesDir(scope);
17475
- const memPath = path23.join(dir, `${id}.json`);
17605
+ const memPath = path25.join(dir, `${id}.json`);
17476
17606
  try {
17477
- const content = await fs23.readFile(memPath, "utf-8");
17607
+ const content = await fs24.readFile(memPath, "utf-8");
17478
17608
  return JSON.parse(content);
17479
17609
  } catch {
17480
17610
  return null;
@@ -17483,8 +17613,8 @@ async function loadMemory(scope, id) {
17483
17613
  async function saveMemory(scope, memory) {
17484
17614
  const dir = getMemoriesDir(scope);
17485
17615
  await ensureDir(dir);
17486
- const memPath = path23.join(dir, `${memory.id}.json`);
17487
- await fs23.writeFile(memPath, JSON.stringify(memory, null, 2), "utf-8");
17616
+ const memPath = path25.join(dir, `${memory.id}.json`);
17617
+ await fs24.writeFile(memPath, JSON.stringify(memory, null, 2), "utf-8");
17488
17618
  }
17489
17619
  var createMemoryTool = defineTool({
17490
17620
  name: "create_memory",
@@ -17526,7 +17656,7 @@ Examples:
17526
17656
  { tool: "create_memory" }
17527
17657
  );
17528
17658
  }
17529
- const id = crypto2.randomUUID();
17659
+ const id = crypto3.randomUUID();
17530
17660
  const memory = {
17531
17661
  id,
17532
17662
  key,
@@ -17636,17 +17766,17 @@ Examples:
17636
17766
  }
17637
17767
  });
17638
17768
  var memoryTools = [createMemoryTool, recallMemoryTool, listMemoriesTool];
17639
- var fs24 = await import('fs/promises');
17640
- var crypto3 = await import('crypto');
17769
+ var fs25 = await import('fs/promises');
17770
+ var crypto4 = await import('crypto');
17641
17771
  var CHECKPOINT_FILE = ".coco/checkpoints.json";
17642
17772
  var DEFAULT_MAX_CHECKPOINTS = 50;
17643
17773
  var STASH_PREFIX = "coco-cp";
17644
17774
  async function ensureCocoDir() {
17645
- await fs24.mkdir(".coco", { recursive: true });
17775
+ await fs25.mkdir(".coco", { recursive: true });
17646
17776
  }
17647
17777
  async function loadCheckpoints() {
17648
17778
  try {
17649
- const content = await fs24.readFile(CHECKPOINT_FILE, "utf-8");
17779
+ const content = await fs25.readFile(CHECKPOINT_FILE, "utf-8");
17650
17780
  return JSON.parse(content);
17651
17781
  } catch {
17652
17782
  return [];
@@ -17654,7 +17784,7 @@ async function loadCheckpoints() {
17654
17784
  }
17655
17785
  async function saveCheckpoints(checkpoints) {
17656
17786
  await ensureCocoDir();
17657
- await fs24.writeFile(CHECKPOINT_FILE, JSON.stringify(checkpoints, null, 2), "utf-8");
17787
+ await fs25.writeFile(CHECKPOINT_FILE, JSON.stringify(checkpoints, null, 2), "utf-8");
17658
17788
  }
17659
17789
  async function execGit(args) {
17660
17790
  const { execaCommand } = await import('execa');
@@ -17692,7 +17822,7 @@ Examples:
17692
17822
  description: z.string().min(1).max(200).describe("Description of this checkpoint")
17693
17823
  }),
17694
17824
  async execute({ description }) {
17695
- const id = crypto3.randomUUID().slice(0, 8);
17825
+ const id = crypto4.randomUUID().slice(0, 8);
17696
17826
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
17697
17827
  const stashMessage = `${STASH_PREFIX}-${id}-${description.replace(/\s+/g, "-").slice(0, 50)}`;
17698
17828
  const changedFiles = await getChangedFiles();
@@ -17814,8 +17944,8 @@ Examples:
17814
17944
  }
17815
17945
  });
17816
17946
  var checkpointTools = [createCheckpointTool, restoreCheckpointTool, listCheckpointsTool];
17817
- var fs25 = await import('fs/promises');
17818
- var path24 = await import('path');
17947
+ var fs26 = await import('fs/promises');
17948
+ var path26 = await import('path');
17819
17949
  var { glob: glob13 } = await import('glob');
17820
17950
  var INDEX_DIR = ".coco/search-index";
17821
17951
  var DEFAULT_CHUNK_SIZE = 20;
@@ -17941,20 +18071,20 @@ async function getEmbedding(text) {
17941
18071
  }
17942
18072
  async function loadIndex2(indexDir) {
17943
18073
  try {
17944
- const indexPath = path24.join(indexDir, "index.json");
17945
- const content = await fs25.readFile(indexPath, "utf-8");
18074
+ const indexPath = path26.join(indexDir, "index.json");
18075
+ const content = await fs26.readFile(indexPath, "utf-8");
17946
18076
  return JSON.parse(content);
17947
18077
  } catch {
17948
18078
  return null;
17949
18079
  }
17950
18080
  }
17951
18081
  async function saveIndex2(indexDir, index) {
17952
- await fs25.mkdir(indexDir, { recursive: true });
17953
- const indexPath = path24.join(indexDir, "index.json");
17954
- await fs25.writeFile(indexPath, JSON.stringify(index), "utf-8");
18082
+ await fs26.mkdir(indexDir, { recursive: true });
18083
+ const indexPath = path26.join(indexDir, "index.json");
18084
+ await fs26.writeFile(indexPath, JSON.stringify(index), "utf-8");
17955
18085
  }
17956
18086
  function isBinary(filePath) {
17957
- return BINARY_EXTENSIONS.has(path24.extname(filePath).toLowerCase());
18087
+ return BINARY_EXTENSIONS.has(path26.extname(filePath).toLowerCase());
17958
18088
  }
17959
18089
  var semanticSearchTool = defineTool({
17960
18090
  name: "semantic_search",
@@ -17979,8 +18109,8 @@ Examples:
17979
18109
  const effectivePath = rootPath ?? ".";
17980
18110
  const effectiveMaxResults = maxResults ?? 10;
17981
18111
  const effectiveThreshold = threshold ?? 0.3;
17982
- const absPath = path24.resolve(effectivePath);
17983
- const indexDir = path24.join(absPath, INDEX_DIR);
18112
+ const absPath = path26.resolve(effectivePath);
18113
+ const indexDir = path26.join(absPath, INDEX_DIR);
17984
18114
  let index = reindex ? null : await loadIndex2(indexDir);
17985
18115
  if (!index) {
17986
18116
  const pattern = include ?? "**/*";
@@ -17993,10 +18123,10 @@ Examples:
17993
18123
  const chunks = [];
17994
18124
  for (const file of files) {
17995
18125
  if (isBinary(file)) continue;
17996
- const fullPath = path24.join(absPath, file);
18126
+ const fullPath = path26.join(absPath, file);
17997
18127
  try {
17998
- const stat2 = await fs25.stat(fullPath);
17999
- const content = await fs25.readFile(fullPath, "utf-8");
18128
+ const stat2 = await fs26.stat(fullPath);
18129
+ const content = await fs26.readFile(fullPath, "utf-8");
18000
18130
  if (content.length > 1e5) continue;
18001
18131
  const fileChunks = chunkContent(content, DEFAULT_CHUNK_SIZE);
18002
18132
  for (const chunk of fileChunks) {
@@ -18055,8 +18185,8 @@ Examples:
18055
18185
  }
18056
18186
  });
18057
18187
  var semanticSearchTools = [semanticSearchTool];
18058
- var fs26 = await import('fs/promises');
18059
- var path25 = await import('path');
18188
+ var fs27 = await import('fs/promises');
18189
+ var path27 = await import('path');
18060
18190
  var { glob: glob14 } = await import('glob');
18061
18191
  async function parseClassRelationships(rootPath, include) {
18062
18192
  const pattern = include ?? "**/*.{ts,tsx,js,jsx}";
@@ -18069,7 +18199,7 @@ async function parseClassRelationships(rootPath, include) {
18069
18199
  const interfaces = [];
18070
18200
  for (const file of files.slice(0, 100)) {
18071
18201
  try {
18072
- const content = await fs26.readFile(path25.join(rootPath, file), "utf-8");
18202
+ const content = await fs27.readFile(path27.join(rootPath, file), "utf-8");
18073
18203
  const lines = content.split("\n");
18074
18204
  for (let i = 0; i < lines.length; i++) {
18075
18205
  const line = lines[i];
@@ -18188,14 +18318,14 @@ async function generateClassDiagram(rootPath, include) {
18188
18318
  };
18189
18319
  }
18190
18320
  async function generateArchitectureDiagram(rootPath) {
18191
- const entries = await fs26.readdir(rootPath, { withFileTypes: true });
18321
+ const entries = await fs27.readdir(rootPath, { withFileTypes: true });
18192
18322
  const dirs = entries.filter(
18193
18323
  (e) => e.isDirectory() && !e.name.startsWith(".") && !["node_modules", "dist", "build", "coverage", "__pycache__", "target"].includes(e.name)
18194
18324
  );
18195
18325
  const lines = ["graph TD"];
18196
18326
  let nodeCount = 0;
18197
18327
  let edgeCount = 0;
18198
- const rootName = path25.basename(rootPath);
18328
+ const rootName = path27.basename(rootPath);
18199
18329
  lines.push(` ROOT["${rootName}"]`);
18200
18330
  nodeCount++;
18201
18331
  for (const dir of dirs) {
@@ -18205,7 +18335,7 @@ async function generateArchitectureDiagram(rootPath) {
18205
18335
  nodeCount++;
18206
18336
  edgeCount++;
18207
18337
  try {
18208
- const subEntries = await fs26.readdir(path25.join(rootPath, dir.name), {
18338
+ const subEntries = await fs27.readdir(path27.join(rootPath, dir.name), {
18209
18339
  withFileTypes: true
18210
18340
  });
18211
18341
  const subDirs = subEntries.filter(
@@ -18328,7 +18458,7 @@ Examples:
18328
18458
  tool: "generate_diagram"
18329
18459
  });
18330
18460
  }
18331
- const absPath = rootPath ? path25.resolve(rootPath) : process.cwd();
18461
+ const absPath = rootPath ? path27.resolve(rootPath) : process.cwd();
18332
18462
  switch (type) {
18333
18463
  case "class":
18334
18464
  return generateClassDiagram(absPath, include);
@@ -18389,8 +18519,8 @@ Examples:
18389
18519
  }
18390
18520
  });
18391
18521
  var diagramTools = [generateDiagramTool];
18392
- var fs27 = await import('fs/promises');
18393
- var path26 = await import('path');
18522
+ var fs28 = await import('fs/promises');
18523
+ var path28 = await import('path');
18394
18524
  var DEFAULT_MAX_PAGES = 20;
18395
18525
  var MAX_FILE_SIZE = 50 * 1024 * 1024;
18396
18526
  function parsePageRange(rangeStr, totalPages) {
@@ -18425,9 +18555,9 @@ Examples:
18425
18555
  }),
18426
18556
  async execute({ path: filePath, pages, maxPages }) {
18427
18557
  const startTime = performance.now();
18428
- const absPath = path26.resolve(filePath);
18558
+ const absPath = path28.resolve(filePath);
18429
18559
  try {
18430
- const stat2 = await fs27.stat(absPath);
18560
+ const stat2 = await fs28.stat(absPath);
18431
18561
  if (!stat2.isFile()) {
18432
18562
  throw new ToolError(`Path is not a file: ${absPath}`, {
18433
18563
  tool: "read_pdf"
@@ -18458,7 +18588,7 @@ Examples:
18458
18588
  }
18459
18589
  try {
18460
18590
  const pdfParse = await import('pdf-parse');
18461
- const dataBuffer = await fs27.readFile(absPath);
18591
+ const dataBuffer = await fs28.readFile(absPath);
18462
18592
  const pdfData = await pdfParse.default(dataBuffer, {
18463
18593
  max: maxPages
18464
18594
  });
@@ -18504,8 +18634,8 @@ Examples:
18504
18634
  }
18505
18635
  });
18506
18636
  var pdfTools = [readPdfTool];
18507
- var fs28 = await import('fs/promises');
18508
- var path27 = await import('path');
18637
+ var fs29 = await import('fs/promises');
18638
+ var path29 = await import('path');
18509
18639
  var SUPPORTED_FORMATS = /* @__PURE__ */ new Set([".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp"]);
18510
18640
  var MAX_IMAGE_SIZE = 20 * 1024 * 1024;
18511
18641
  var MIME_TYPES = {
@@ -18533,15 +18663,15 @@ Examples:
18533
18663
  async execute({ path: filePath, prompt, provider }) {
18534
18664
  const startTime = performance.now();
18535
18665
  const effectivePrompt = prompt ?? "Describe this image in detail. If it's code or a UI, identify the key elements.";
18536
- const absPath = path27.resolve(filePath);
18666
+ const absPath = path29.resolve(filePath);
18537
18667
  const cwd = process.cwd();
18538
- if (!absPath.startsWith(cwd + path27.sep) && absPath !== cwd) {
18668
+ if (!absPath.startsWith(cwd + path29.sep) && absPath !== cwd) {
18539
18669
  throw new ToolError(
18540
18670
  `Path traversal denied: '${filePath}' resolves outside the project directory`,
18541
18671
  { tool: "read_image" }
18542
18672
  );
18543
18673
  }
18544
- const ext = path27.extname(absPath).toLowerCase();
18674
+ const ext = path29.extname(absPath).toLowerCase();
18545
18675
  if (!SUPPORTED_FORMATS.has(ext)) {
18546
18676
  throw new ToolError(
18547
18677
  `Unsupported image format '${ext}'. Supported: ${Array.from(SUPPORTED_FORMATS).join(", ")}`,
@@ -18549,7 +18679,7 @@ Examples:
18549
18679
  );
18550
18680
  }
18551
18681
  try {
18552
- const stat2 = await fs28.stat(absPath);
18682
+ const stat2 = await fs29.stat(absPath);
18553
18683
  if (!stat2.isFile()) {
18554
18684
  throw new ToolError(`Path is not a file: ${absPath}`, {
18555
18685
  tool: "read_image"
@@ -18570,7 +18700,7 @@ Examples:
18570
18700
  if (error instanceof ToolError) throw error;
18571
18701
  throw error;
18572
18702
  }
18573
- const imageBuffer = await fs28.readFile(absPath);
18703
+ const imageBuffer = await fs29.readFile(absPath);
18574
18704
  const base64 = imageBuffer.toString("base64");
18575
18705
  const mimeType = MIME_TYPES[ext] ?? "image/png";
18576
18706
  const selectedProvider = provider ?? "anthropic";
@@ -18683,7 +18813,7 @@ Examples:
18683
18813
  }
18684
18814
  });
18685
18815
  var imageTools = [readImageTool];
18686
- var path28 = await import('path');
18816
+ var path30 = await import('path');
18687
18817
  var DANGEROUS_PATTERNS2 = [
18688
18818
  /\bDROP\s+(?:TABLE|DATABASE|INDEX|VIEW)\b/i,
18689
18819
  /\bTRUNCATE\b/i,
@@ -18714,7 +18844,7 @@ Examples:
18714
18844
  async execute({ database, query, params, readonly: isReadonlyParam }) {
18715
18845
  const isReadonly = isReadonlyParam ?? true;
18716
18846
  const startTime = performance.now();
18717
- const absPath = path28.resolve(database);
18847
+ const absPath = path30.resolve(database);
18718
18848
  if (isReadonly && isDangerousSql(query)) {
18719
18849
  throw new ToolError(
18720
18850
  "Write operations (INSERT, UPDATE, DELETE, DROP, ALTER, TRUNCATE, CREATE) are blocked in readonly mode. Set readonly: false to allow writes.",
@@ -18787,7 +18917,7 @@ Examples:
18787
18917
  }),
18788
18918
  async execute({ database, table }) {
18789
18919
  const startTime = performance.now();
18790
- const absPath = path28.resolve(database);
18920
+ const absPath = path30.resolve(database);
18791
18921
  try {
18792
18922
  const { default: Database } = await import('better-sqlite3');
18793
18923
  const db = new Database(absPath, { readonly: true, fileMustExist: true });
@@ -18840,14 +18970,14 @@ Examples:
18840
18970
  }
18841
18971
  });
18842
18972
  var databaseTools = [sqlQueryTool, inspectSchemaTool];
18843
- var fs29 = await import('fs/promises');
18844
- var path29 = await import('path');
18973
+ var fs30 = await import('fs/promises');
18974
+ var path31 = await import('path');
18845
18975
  var AnalyzeFileSchema = z.object({
18846
18976
  filePath: z.string().describe("Path to file to analyze"),
18847
18977
  includeAst: z.boolean().default(false).describe("Include AST in result")
18848
18978
  });
18849
18979
  async function analyzeFile(filePath, includeAst = false) {
18850
- const content = await fs29.readFile(filePath, "utf-8");
18980
+ const content = await fs30.readFile(filePath, "utf-8");
18851
18981
  const lines = content.split("\n").length;
18852
18982
  const functions = [];
18853
18983
  const classes = [];
@@ -18951,10 +19081,10 @@ async function analyzeDirectory(dirPath) {
18951
19081
  try {
18952
19082
  const analysis = await analyzeFile(file, false);
18953
19083
  totalLines += analysis.lines;
18954
- const ext = path29.extname(file);
19084
+ const ext = path31.extname(file);
18955
19085
  filesByType[ext] = (filesByType[ext] || 0) + 1;
18956
19086
  fileStats.push({
18957
- file: path29.relative(dirPath, file),
19087
+ file: path31.relative(dirPath, file),
18958
19088
  lines: analysis.lines,
18959
19089
  complexity: analysis.complexity.cyclomatic
18960
19090
  });
@@ -19277,13 +19407,13 @@ ${completed.map((r) => `- ${r.agentId}: Success`).join("\n")}`;
19277
19407
  }
19278
19408
  });
19279
19409
  var agentCoordinatorTools = [createAgentPlanTool, delegateTaskTool, aggregateResultsTool];
19280
- var fs30 = await import('fs/promises');
19410
+ var fs31 = await import('fs/promises');
19281
19411
  var SuggestImprovementsSchema = z.object({
19282
19412
  filePath: z.string().describe("File to analyze for improvement suggestions"),
19283
19413
  context: z.string().optional().describe("Additional context about the code")
19284
19414
  });
19285
19415
  async function analyzeAndSuggest(filePath, _context) {
19286
- const content = await fs30.readFile(filePath, "utf-8");
19416
+ const content = await fs31.readFile(filePath, "utf-8");
19287
19417
  const lines = content.split("\n");
19288
19418
  const suggestions = [];
19289
19419
  for (let i = 0; i < lines.length; i++) {
@@ -19375,7 +19505,7 @@ async function analyzeAndSuggest(filePath, _context) {
19375
19505
  if (filePath.endsWith(".ts") && !filePath.includes("test") && !filePath.includes(".d.ts") && line.includes("export ")) {
19376
19506
  const testPath = filePath.replace(".ts", ".test.ts");
19377
19507
  try {
19378
- await fs30.access(testPath);
19508
+ await fs31.access(testPath);
19379
19509
  } catch {
19380
19510
  suggestions.push({
19381
19511
  type: "testing",
@@ -19432,7 +19562,7 @@ var calculateCodeScoreTool = defineTool({
19432
19562
  async execute(input) {
19433
19563
  const { filePath } = input;
19434
19564
  const suggestions = await analyzeAndSuggest(filePath);
19435
- const content = await fs30.readFile(filePath, "utf-8");
19565
+ const content = await fs31.readFile(filePath, "utf-8");
19436
19566
  const lines = content.split("\n");
19437
19567
  const nonEmptyLines = lines.filter((l) => l.trim()).length;
19438
19568
  let score = 100;
@@ -19466,8 +19596,8 @@ var calculateCodeScoreTool = defineTool({
19466
19596
  }
19467
19597
  });
19468
19598
  var smartSuggestionsTools = [suggestImprovementsTool, calculateCodeScoreTool];
19469
- var fs31 = await import('fs/promises');
19470
- var path30 = await import('path');
19599
+ var fs32 = await import('fs/promises');
19600
+ var path32 = await import('path');
19471
19601
  var ContextMemoryStore = class {
19472
19602
  items = /* @__PURE__ */ new Map();
19473
19603
  learnings = /* @__PURE__ */ new Map();
@@ -19479,7 +19609,7 @@ var ContextMemoryStore = class {
19479
19609
  }
19480
19610
  async load() {
19481
19611
  try {
19482
- const content = await fs31.readFile(this.storePath, "utf-8");
19612
+ const content = await fs32.readFile(this.storePath, "utf-8");
19483
19613
  const data = JSON.parse(content);
19484
19614
  this.items = new Map(Object.entries(data.items || {}));
19485
19615
  this.learnings = new Map(Object.entries(data.learnings || {}));
@@ -19487,15 +19617,15 @@ var ContextMemoryStore = class {
19487
19617
  }
19488
19618
  }
19489
19619
  async save() {
19490
- const dir = path30.dirname(this.storePath);
19491
- await fs31.mkdir(dir, { recursive: true });
19620
+ const dir = path32.dirname(this.storePath);
19621
+ await fs32.mkdir(dir, { recursive: true });
19492
19622
  const data = {
19493
19623
  sessionId: this.sessionId,
19494
19624
  items: Object.fromEntries(this.items),
19495
19625
  learnings: Object.fromEntries(this.learnings),
19496
19626
  savedAt: Date.now()
19497
19627
  };
19498
- await fs31.writeFile(this.storePath, JSON.stringify(data, null, 2));
19628
+ await fs32.writeFile(this.storePath, JSON.stringify(data, null, 2));
19499
19629
  }
19500
19630
  addContext(id, item) {
19501
19631
  this.items.set(id, item);
@@ -19645,11 +19775,11 @@ var getLearnedPatternsTool = defineTool({
19645
19775
  const patterns = store.getFrequentPatterns(typedInput.limit);
19646
19776
  return {
19647
19777
  totalPatterns: patterns.length,
19648
- patterns: patterns.map((p3) => ({
19649
- pattern: p3.pattern,
19650
- preference: p3.userPreference,
19651
- frequency: p3.frequency,
19652
- lastUsed: new Date(p3.lastUsed).toISOString()
19778
+ patterns: patterns.map((p4) => ({
19779
+ pattern: p4.pattern,
19780
+ preference: p4.userPreference,
19781
+ frequency: p4.frequency,
19782
+ lastUsed: new Date(p4.lastUsed).toISOString()
19653
19783
  }))
19654
19784
  };
19655
19785
  }
@@ -19660,11 +19790,11 @@ var contextEnhancerTools = [
19660
19790
  recordLearningTool,
19661
19791
  getLearnedPatternsTool
19662
19792
  ];
19663
- var fs32 = await import('fs/promises');
19664
- var path31 = await import('path');
19793
+ var fs33 = await import('fs/promises');
19794
+ var path33 = await import('path');
19665
19795
  async function discoverSkills(skillsDir) {
19666
19796
  try {
19667
- const files = await fs32.readdir(skillsDir);
19797
+ const files = await fs33.readdir(skillsDir);
19668
19798
  return files.filter((f) => f.endsWith(".ts") || f.endsWith(".js"));
19669
19799
  } catch {
19670
19800
  return [];
@@ -19672,12 +19802,12 @@ async function discoverSkills(skillsDir) {
19672
19802
  }
19673
19803
  async function loadSkillMetadata(skillPath) {
19674
19804
  try {
19675
- const content = await fs32.readFile(skillPath, "utf-8");
19805
+ const content = await fs33.readFile(skillPath, "utf-8");
19676
19806
  const nameMatch = content.match(/@name\s+(\S+)/);
19677
19807
  const descMatch = content.match(/@description\s+(.+)/);
19678
19808
  const versionMatch = content.match(/@version\s+(\S+)/);
19679
19809
  return {
19680
- name: nameMatch?.[1] || path31.basename(skillPath, path31.extname(skillPath)),
19810
+ name: nameMatch?.[1] || path33.basename(skillPath, path33.extname(skillPath)),
19681
19811
  description: descMatch?.[1] || "No description",
19682
19812
  version: versionMatch?.[1] || "1.0.0",
19683
19813
  dependencies: []
@@ -19721,7 +19851,7 @@ var discoverSkillsTool = defineTool({
19721
19851
  const { skillsDir } = input;
19722
19852
  const skills = await discoverSkills(skillsDir);
19723
19853
  const metadata = await Promise.all(
19724
- skills.map((s) => loadSkillMetadata(path31.join(skillsDir, s)))
19854
+ skills.map((s) => loadSkillMetadata(path33.join(skillsDir, s)))
19725
19855
  );
19726
19856
  return {
19727
19857
  skillsDir,
@@ -19844,14 +19974,17 @@ export const ${typedInput.name}Tool = defineTool({
19844
19974
  }
19845
19975
  });
19846
19976
  var skillEnhancerTools = [discoverSkillsTool, validateSkillTool, createCustomToolTool];
19977
+ function gitExec(cmd, opts = {}) {
19978
+ return execSync(cmd, { encoding: "utf-8", cwd: process.cwd(), ...opts });
19979
+ }
19847
19980
  function analyzeRepoHealth() {
19848
19981
  const issues = [];
19849
19982
  const recommendations = [];
19850
19983
  let score = 100;
19851
19984
  try {
19852
19985
  try {
19853
- execSync("git status --porcelain", { stdio: "pipe" });
19854
- const status = execSync("git status --porcelain", { encoding: "utf-8" });
19986
+ gitExec("git status --porcelain", { stdio: "pipe" });
19987
+ const status = gitExec("git status --porcelain");
19855
19988
  if (status.trim()) {
19856
19989
  issues.push("Uncommitted changes present");
19857
19990
  score -= 10;
@@ -19859,7 +19992,7 @@ function analyzeRepoHealth() {
19859
19992
  } catch {
19860
19993
  }
19861
19994
  try {
19862
- const untracked = execSync("git ls-files --others --exclude-standard", { encoding: "utf-8" });
19995
+ const untracked = gitExec("git ls-files --others --exclude-standard");
19863
19996
  if (untracked.trim()) {
19864
19997
  const count = untracked.trim().split("\n").length;
19865
19998
  issues.push(`${count} untracked files`);
@@ -19869,9 +20002,9 @@ function analyzeRepoHealth() {
19869
20002
  } catch {
19870
20003
  }
19871
20004
  try {
19872
- const branch = execSync("git rev-parse --abbrev-ref HEAD", { encoding: "utf-8" }).trim();
19873
- const local = execSync("git rev-parse HEAD", { encoding: "utf-8" }).trim();
19874
- const remote = execSync(`git rev-parse origin/${branch}`, { encoding: "utf-8" }).trim();
20005
+ const branch = gitExec("git rev-parse --abbrev-ref HEAD").trim();
20006
+ const local = gitExec("git rev-parse HEAD").trim();
20007
+ const remote = gitExec(`git rev-parse origin/${branch}`).trim();
19875
20008
  if (local !== remote) {
19876
20009
  issues.push("Branch is not up-to-date with remote");
19877
20010
  score -= 15;
@@ -19880,14 +20013,14 @@ function analyzeRepoHealth() {
19880
20013
  } catch {
19881
20014
  }
19882
20015
  try {
19883
- const files = execSync("git ls-files", { encoding: "utf-8" }).trim().split("\n");
20016
+ const files = gitExec("git ls-files").trim().split("\n");
19884
20017
  if (files.length > 1e3) {
19885
20018
  recommendations.push("Repository has many files, consider using .gitignore");
19886
20019
  }
19887
20020
  } catch {
19888
20021
  }
19889
20022
  try {
19890
- const conflicts = execSync("git diff --name-only --diff-filter=U", { encoding: "utf-8" });
20023
+ const conflicts = gitExec("git diff --name-only --diff-filter=U");
19891
20024
  if (conflicts.trim()) {
19892
20025
  issues.push("Merge conflicts present");
19893
20026
  score -= 30;
@@ -19903,12 +20036,11 @@ function analyzeRepoHealth() {
19903
20036
  }
19904
20037
  function getCommitStats() {
19905
20038
  try {
19906
- const count = execSync("git rev-list --count HEAD", { encoding: "utf-8" }).trim();
19907
- const authors = execSync('git log --format="%an" | sort -u', {
19908
- encoding: "utf-8",
20039
+ const count = gitExec("git rev-list --count HEAD").trim();
20040
+ const authors = gitExec('git log --format="%an" | sort -u', {
19909
20041
  shell: "/bin/bash"
19910
20042
  }).trim().split("\n");
19911
- const lastCommit = execSync('git log -1 --format="%cr"', { encoding: "utf-8" }).trim();
20043
+ const lastCommit = gitExec('git log -1 --format="%cr"').trim();
19912
20044
  return {
19913
20045
  totalCommits: parseInt(count, 10),
19914
20046
  authors,
@@ -19981,7 +20113,7 @@ var recommendBranchTool = defineTool({
19981
20113
  const branchName = `${prefix}/${slug}`;
19982
20114
  let exists = false;
19983
20115
  try {
19984
- execSync(`git rev-parse --verify ${branchName}`, { stdio: "ignore" });
20116
+ execSync(`git rev-parse --verify ${branchName}`, { cwd: process.cwd(), stdio: "ignore" });
19985
20117
  exists = true;
19986
20118
  } catch {
19987
20119
  exists = false;
@@ -19996,6 +20128,104 @@ var recommendBranchTool = defineTool({
19996
20128
  }
19997
20129
  });
19998
20130
  var gitEnhancedTools = [analyzeRepoHealthTool, getCommitStatsTool, recommendBranchTool];
20131
+ init_allowed_paths();
20132
+ var BLOCKED_SYSTEM_PATHS = [
20133
+ "/etc",
20134
+ "/var",
20135
+ "/usr",
20136
+ "/root",
20137
+ "/sys",
20138
+ "/proc",
20139
+ "/boot",
20140
+ "/bin",
20141
+ "/sbin"
20142
+ ];
20143
+ var authorizePathTool = defineTool({
20144
+ name: "authorize_path",
20145
+ description: `Request user permission to access a directory outside the project root.
20146
+
20147
+ Use this BEFORE attempting file operations on external directories. The user will see
20148
+ an interactive prompt where they choose to allow or deny access.
20149
+
20150
+ Returns whether the path was authorized. If authorized, subsequent file operations
20151
+ on that directory will succeed.
20152
+
20153
+ Examples:
20154
+ - Need to read config from another project: authorize_path({ path: "/home/user/other-project" })
20155
+ - Need to access shared libraries: authorize_path({ path: "/opt/shared/libs", reason: "Read shared type definitions" })`,
20156
+ category: "config",
20157
+ parameters: z.object({
20158
+ path: z.string().min(1).describe("Absolute path to the directory to authorize"),
20159
+ reason: z.string().optional().describe("Why access is needed (shown to user for context)")
20160
+ }),
20161
+ async execute({ path: dirPath, reason }) {
20162
+ const absolute = path14__default.resolve(dirPath);
20163
+ if (isWithinAllowedPath(absolute, "read")) {
20164
+ return {
20165
+ authorized: true,
20166
+ path: absolute,
20167
+ message: "Path is already authorized."
20168
+ };
20169
+ }
20170
+ for (const blocked of BLOCKED_SYSTEM_PATHS) {
20171
+ const normalizedBlocked = path14__default.normalize(blocked);
20172
+ if (absolute === normalizedBlocked || absolute.startsWith(normalizedBlocked + path14__default.sep)) {
20173
+ return {
20174
+ authorized: false,
20175
+ path: absolute,
20176
+ message: `System path '${blocked}' cannot be authorized for security reasons.`
20177
+ };
20178
+ }
20179
+ }
20180
+ const cwd = process.cwd();
20181
+ if (absolute === path14__default.normalize(cwd) || absolute.startsWith(path14__default.normalize(cwd) + path14__default.sep)) {
20182
+ return {
20183
+ authorized: true,
20184
+ path: absolute,
20185
+ message: "Path is within the project directory \u2014 already accessible."
20186
+ };
20187
+ }
20188
+ try {
20189
+ const stat2 = await fs14__default.stat(absolute);
20190
+ if (!stat2.isDirectory()) {
20191
+ return {
20192
+ authorized: false,
20193
+ path: absolute,
20194
+ message: `Not a directory: ${absolute}`
20195
+ };
20196
+ }
20197
+ } catch {
20198
+ return {
20199
+ authorized: false,
20200
+ path: absolute,
20201
+ message: `Directory not found: ${absolute}`
20202
+ };
20203
+ }
20204
+ const existing = getAllowedPaths();
20205
+ if (existing.some((e) => path14__default.normalize(e.path) === path14__default.normalize(absolute))) {
20206
+ return {
20207
+ authorized: true,
20208
+ path: absolute,
20209
+ message: "Path is already authorized."
20210
+ };
20211
+ }
20212
+ const { promptAllowPath: promptAllowPath2 } = await Promise.resolve().then(() => (init_allow_path_prompt(), allow_path_prompt_exports));
20213
+ const wasAuthorized = await promptAllowPath2(absolute);
20214
+ if (wasAuthorized) {
20215
+ return {
20216
+ authorized: true,
20217
+ path: absolute,
20218
+ message: `Access granted to ${absolute}.${reason ? ` Reason: ${reason}` : ""}`
20219
+ };
20220
+ }
20221
+ return {
20222
+ authorized: false,
20223
+ path: absolute,
20224
+ message: "User denied access to this directory."
20225
+ };
20226
+ }
20227
+ });
20228
+ var authorizePathTools = [authorizePathTool];
19999
20229
 
20000
20230
  // src/tools/index.ts
20001
20231
  function registerAllTools(registry) {
@@ -20028,7 +20258,8 @@ function registerAllTools(registry) {
20028
20258
  ...smartSuggestionsTools,
20029
20259
  ...contextEnhancerTools,
20030
20260
  ...skillEnhancerTools,
20031
- ...gitEnhancedTools
20261
+ ...gitEnhancedTools,
20262
+ ...authorizePathTools
20032
20263
  ];
20033
20264
  for (const tool of allTools) {
20034
20265
  registry.register(tool);