@corbat-tech/coco 1.2.2 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -3,10 +3,12 @@ import * as path14 from 'path';
3
3
  import path14__default, { dirname, join, basename } from 'path';
4
4
  import * as fs4 from 'fs';
5
5
  import fs4__default, { readFileSync, constants } from 'fs';
6
- import { fileURLToPath } from 'url';
7
- import { randomUUID } from 'crypto';
8
6
  import * as fs14 from 'fs/promises';
9
7
  import fs14__default, { readFile, access, readdir } from 'fs/promises';
8
+ import chalk3 from 'chalk';
9
+ import * as p3 from '@clack/prompts';
10
+ import { fileURLToPath } from 'url';
11
+ import { randomUUID } from 'crypto';
10
12
  import { execa } from 'execa';
11
13
  import { parse } from '@typescript-eslint/typescript-estree';
12
14
  import { glob } from 'glob';
@@ -17,8 +19,6 @@ import { Logger } from 'tslog';
17
19
  import Anthropic from '@anthropic-ai/sdk';
18
20
  import OpenAI from 'openai';
19
21
  import 'http';
20
- import '@clack/prompts';
21
- import chalk3 from 'chalk';
22
22
  import { GoogleGenerativeAI, FunctionCallingMode } from '@google/generative-ai';
23
23
  import JSON5 from 'json5';
24
24
  import 'events';
@@ -38,10 +38,15 @@ import typescript from 'highlight.js/lib/languages/typescript';
38
38
  import xml from 'highlight.js/lib/languages/xml';
39
39
  import yaml from 'highlight.js/lib/languages/yaml';
40
40
 
41
+ var __defProp = Object.defineProperty;
41
42
  var __getOwnPropNames = Object.getOwnPropertyNames;
42
43
  var __esm = (fn, res) => function __init() {
43
44
  return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
44
45
  };
46
+ var __export = (target, all) => {
47
+ for (var name in all)
48
+ __defProp(target, name, { get: all[name], enumerable: true });
49
+ };
45
50
  var COCO_HOME, CONFIG_PATHS;
46
51
  var init_paths = __esm({
47
52
  "src/config/paths.ts"() {
@@ -112,6 +117,8 @@ function getApiKey(provider) {
112
117
  return process.env["KIMI_API_KEY"] ?? process.env["MOONSHOT_API_KEY"];
113
118
  case "lmstudio":
114
119
  return process.env["LMSTUDIO_API_KEY"] ?? "lm-studio";
120
+ case "ollama":
121
+ return process.env["OLLAMA_API_KEY"] ?? "ollama";
115
122
  case "codex":
116
123
  return void 0;
117
124
  default:
@@ -128,6 +135,8 @@ function getBaseUrl(provider) {
128
135
  return process.env["KIMI_BASE_URL"] ?? "https://api.moonshot.ai/v1";
129
136
  case "lmstudio":
130
137
  return process.env["LMSTUDIO_BASE_URL"] ?? "http://localhost:1234/v1";
138
+ case "ollama":
139
+ return process.env["OLLAMA_BASE_URL"] ?? "http://localhost:11434/v1";
131
140
  case "codex":
132
141
  return "https://chatgpt.com/backend-api/codex/responses";
133
142
  default:
@@ -139,22 +148,24 @@ function getDefaultModel(provider) {
139
148
  case "anthropic":
140
149
  return process.env["ANTHROPIC_MODEL"] ?? "claude-opus-4-6-20260115";
141
150
  case "openai":
142
- return process.env["OPENAI_MODEL"] ?? "gpt-5.2-codex";
151
+ return process.env["OPENAI_MODEL"] ?? "gpt-5.3-codex";
143
152
  case "gemini":
144
153
  return process.env["GEMINI_MODEL"] ?? "gemini-3-flash-preview";
145
154
  case "kimi":
146
155
  return process.env["KIMI_MODEL"] ?? "kimi-k2.5";
147
156
  case "lmstudio":
148
157
  return process.env["LMSTUDIO_MODEL"] ?? "local-model";
158
+ case "ollama":
159
+ return process.env["OLLAMA_MODEL"] ?? "llama3.1";
149
160
  case "codex":
150
- return process.env["CODEX_MODEL"] ?? "gpt-5.2-codex";
161
+ return process.env["CODEX_MODEL"] ?? "gpt-5.3-codex";
151
162
  default:
152
- return "gpt-5.2-codex";
163
+ return "gpt-5.3-codex";
153
164
  }
154
165
  }
155
166
  function getDefaultProvider() {
156
167
  const provider = process.env["COCO_PROVIDER"]?.toLowerCase();
157
- if (provider && ["anthropic", "openai", "codex", "gemini", "kimi", "lmstudio"].includes(provider)) {
168
+ if (provider && ["anthropic", "openai", "codex", "gemini", "kimi", "lmstudio", "ollama"].includes(provider)) {
158
169
  return provider;
159
170
  }
160
171
  return "anthropic";
@@ -171,6 +182,119 @@ var init_env = __esm({
171
182
  });
172
183
  }
173
184
  });
185
+ function getAllowedPaths() {
186
+ return [...sessionAllowedPaths];
187
+ }
188
+ function isWithinAllowedPath(absolutePath, operation) {
189
+ const normalizedTarget = path14__default.normalize(absolutePath);
190
+ for (const entry of sessionAllowedPaths) {
191
+ const normalizedAllowed = path14__default.normalize(entry.path);
192
+ if (normalizedTarget === normalizedAllowed || normalizedTarget.startsWith(normalizedAllowed + path14__default.sep)) {
193
+ if (operation === "read") return true;
194
+ if (entry.level === "write") return true;
195
+ }
196
+ }
197
+ return false;
198
+ }
199
+ function addAllowedPathToSession(dirPath, level) {
200
+ const absolute = path14__default.resolve(dirPath);
201
+ if (sessionAllowedPaths.some((e) => path14__default.normalize(e.path) === path14__default.normalize(absolute))) {
202
+ return;
203
+ }
204
+ sessionAllowedPaths.push({
205
+ path: absolute,
206
+ authorizedAt: (/* @__PURE__ */ new Date()).toISOString(),
207
+ level
208
+ });
209
+ }
210
+ async function persistAllowedPath(dirPath, level) {
211
+ if (!currentProjectPath) return;
212
+ const absolute = path14__default.resolve(dirPath);
213
+ const store = await loadStore();
214
+ if (!store.projects[currentProjectPath]) {
215
+ store.projects[currentProjectPath] = [];
216
+ }
217
+ const entries = store.projects[currentProjectPath];
218
+ const normalized = path14__default.normalize(absolute);
219
+ if (entries.some((e) => path14__default.normalize(e.path) === normalized)) {
220
+ return;
221
+ }
222
+ entries.push({
223
+ path: absolute,
224
+ authorizedAt: (/* @__PURE__ */ new Date()).toISOString(),
225
+ level
226
+ });
227
+ await saveStore(store);
228
+ }
229
+ async function loadStore() {
230
+ try {
231
+ const content = await fs14__default.readFile(STORE_FILE, "utf-8");
232
+ return { ...DEFAULT_STORE, ...JSON.parse(content) };
233
+ } catch {
234
+ return { ...DEFAULT_STORE };
235
+ }
236
+ }
237
+ async function saveStore(store) {
238
+ try {
239
+ await fs14__default.mkdir(path14__default.dirname(STORE_FILE), { recursive: true });
240
+ await fs14__default.writeFile(STORE_FILE, JSON.stringify(store, null, 2), "utf-8");
241
+ } catch {
242
+ }
243
+ }
244
+ var STORE_FILE, DEFAULT_STORE, sessionAllowedPaths, currentProjectPath;
245
+ var init_allowed_paths = __esm({
246
+ "src/tools/allowed-paths.ts"() {
247
+ init_paths();
248
+ STORE_FILE = path14__default.join(CONFIG_PATHS.home, "allowed-paths.json");
249
+ DEFAULT_STORE = {
250
+ version: 1,
251
+ projects: {}
252
+ };
253
+ sessionAllowedPaths = [];
254
+ currentProjectPath = "";
255
+ }
256
+ });
257
+
258
+ // src/cli/repl/allow-path-prompt.ts
259
+ var allow_path_prompt_exports = {};
260
+ __export(allow_path_prompt_exports, {
261
+ promptAllowPath: () => promptAllowPath
262
+ });
263
+ async function promptAllowPath(dirPath) {
264
+ const absolute = path14__default.resolve(dirPath);
265
+ console.log();
266
+ console.log(chalk3.yellow(" \u26A0 Access denied \u2014 path is outside the project directory"));
267
+ console.log(chalk3.dim(` \u{1F4C1} ${absolute}`));
268
+ console.log();
269
+ const action = await p3.select({
270
+ message: "Grant access to this directory?",
271
+ options: [
272
+ { value: "session-write", label: "\u2713 Allow write (this session)" },
273
+ { value: "session-read", label: "\u25D0 Allow read-only (this session)" },
274
+ { value: "persist-write", label: "\u26A1 Allow write (remember for this project)" },
275
+ { value: "persist-read", label: "\u{1F4BE} Allow read-only (remember for this project)" },
276
+ { value: "no", label: "\u2717 Deny" }
277
+ ]
278
+ });
279
+ if (p3.isCancel(action) || action === "no") {
280
+ return false;
281
+ }
282
+ const level = action.includes("read") ? "read" : "write";
283
+ const persist = action.startsWith("persist");
284
+ addAllowedPathToSession(absolute, level);
285
+ if (persist) {
286
+ await persistAllowedPath(absolute, level);
287
+ }
288
+ const levelLabel = level === "write" ? "write" : "read-only";
289
+ const persistLabel = persist ? " (remembered)" : "";
290
+ console.log(chalk3.green(` \u2713 Access granted: ${levelLabel}${persistLabel}`));
291
+ return true;
292
+ }
293
+ var init_allow_path_prompt = __esm({
294
+ "src/cli/repl/allow-path-prompt.ts"() {
295
+ init_allowed_paths();
296
+ }
297
+ });
174
298
  function findPackageJson() {
175
299
  let dir = dirname(fileURLToPath(import.meta.url));
176
300
  for (let i = 0; i < 10; i++) {
@@ -3776,10 +3900,10 @@ var CoverageAnalyzer = class {
3776
3900
  join(this.projectPath, ".coverage", "coverage-summary.json"),
3777
3901
  join(this.projectPath, "coverage", "lcov-report", "coverage-summary.json")
3778
3902
  ];
3779
- for (const path32 of possiblePaths) {
3903
+ for (const path37 of possiblePaths) {
3780
3904
  try {
3781
- await access(path32, constants.R_OK);
3782
- const content = await readFile(path32, "utf-8");
3905
+ await access(path37, constants.R_OK);
3906
+ const content = await readFile(path37, "utf-8");
3783
3907
  const report = JSON.parse(content);
3784
3908
  return parseCoverageSummary(report);
3785
3909
  } catch {
@@ -10208,7 +10332,7 @@ function createAnthropicProvider(config) {
10208
10332
  }
10209
10333
  return provider;
10210
10334
  }
10211
- var DEFAULT_MODEL2 = "gpt-5.2-codex";
10335
+ var DEFAULT_MODEL2 = "gpt-5.3-codex";
10212
10336
  var CONTEXT_WINDOWS2 = {
10213
10337
  // OpenAI models
10214
10338
  "gpt-4o": 128e3,
@@ -10219,6 +10343,10 @@ var CONTEXT_WINDOWS2 = {
10219
10343
  o1: 2e5,
10220
10344
  "o1-mini": 128e3,
10221
10345
  "o3-mini": 2e5,
10346
+ "o4-mini": 2e5,
10347
+ // GPT-4.1 series (Feb 2026)
10348
+ "gpt-4.1": 1048576,
10349
+ "gpt-4.1-mini": 1048576,
10222
10350
  // GPT-5 series (2025-2026)
10223
10351
  "gpt-5": 4e5,
10224
10352
  "gpt-5.2": 4e5,
@@ -10226,6 +10354,7 @@ var CONTEXT_WINDOWS2 = {
10226
10354
  "gpt-5.2-thinking": 4e5,
10227
10355
  "gpt-5.2-instant": 4e5,
10228
10356
  "gpt-5.2-pro": 4e5,
10357
+ "gpt-5.3-codex": 4e5,
10229
10358
  // Kimi/Moonshot models
10230
10359
  "kimi-k2.5": 262144,
10231
10360
  "kimi-k2-0324": 131072,
@@ -11021,10 +11150,11 @@ async function getCachedADCToken() {
11021
11150
 
11022
11151
  // src/providers/codex.ts
11023
11152
  var CODEX_API_ENDPOINT = "https://chatgpt.com/backend-api/codex/responses";
11024
- var DEFAULT_MODEL3 = "gpt-5.2-codex";
11153
+ var DEFAULT_MODEL3 = "gpt-5.3-codex";
11025
11154
  var CONTEXT_WINDOWS3 = {
11026
- "gpt-5-codex": 2e5,
11155
+ "gpt-5.3-codex": 2e5,
11027
11156
  "gpt-5.2-codex": 2e5,
11157
+ "gpt-5-codex": 2e5,
11028
11158
  "gpt-5.1-codex": 2e5,
11029
11159
  "gpt-5": 2e5,
11030
11160
  "gpt-5.2": 2e5,
@@ -11772,6 +11902,11 @@ async function createProvider(type, config = {}) {
11772
11902
  mergedConfig.baseUrl = mergedConfig.baseUrl ?? "http://localhost:1234/v1";
11773
11903
  mergedConfig.apiKey = mergedConfig.apiKey ?? "lm-studio";
11774
11904
  break;
11905
+ case "ollama":
11906
+ provider = new OpenAIProvider();
11907
+ mergedConfig.baseUrl = mergedConfig.baseUrl ?? "http://localhost:11434/v1";
11908
+ mergedConfig.apiKey = mergedConfig.apiKey ?? "ollama";
11909
+ break;
11775
11910
  default:
11776
11911
  throw new ProviderError(`Unknown provider type: ${type}`, {
11777
11912
  provider: type
@@ -11893,9 +12028,9 @@ function createInitialState(config) {
11893
12028
  }
11894
12029
  async function loadExistingState(projectPath) {
11895
12030
  try {
11896
- const fs33 = await import('fs/promises');
12031
+ const fs36 = await import('fs/promises');
11897
12032
  const statePath = `${projectPath}/.coco/state/project.json`;
11898
- const content = await fs33.readFile(statePath, "utf-8");
12033
+ const content = await fs36.readFile(statePath, "utf-8");
11899
12034
  const data = JSON.parse(content);
11900
12035
  data.createdAt = new Date(data.createdAt);
11901
12036
  data.updatedAt = new Date(data.updatedAt);
@@ -11905,13 +12040,13 @@ async function loadExistingState(projectPath) {
11905
12040
  }
11906
12041
  }
11907
12042
  async function saveState(state) {
11908
- const fs33 = await import('fs/promises');
12043
+ const fs36 = await import('fs/promises');
11909
12044
  const statePath = `${state.path}/.coco/state`;
11910
- await fs33.mkdir(statePath, { recursive: true });
12045
+ await fs36.mkdir(statePath, { recursive: true });
11911
12046
  const filePath = `${statePath}/project.json`;
11912
12047
  const tmpPath = `${filePath}.tmp.${Date.now()}`;
11913
- await fs33.writeFile(tmpPath, JSON.stringify(state, null, 2), "utf-8");
11914
- await fs33.rename(tmpPath, filePath);
12048
+ await fs36.writeFile(tmpPath, JSON.stringify(state, null, 2), "utf-8");
12049
+ await fs36.rename(tmpPath, filePath);
11915
12050
  }
11916
12051
  function getPhaseExecutor(phase) {
11917
12052
  switch (phase) {
@@ -11970,20 +12105,20 @@ async function createPhaseContext(config, state) {
11970
12105
  };
11971
12106
  const tools = {
11972
12107
  file: {
11973
- async read(path32) {
11974
- const fs33 = await import('fs/promises');
11975
- return fs33.readFile(path32, "utf-8");
12108
+ async read(path37) {
12109
+ const fs36 = await import('fs/promises');
12110
+ return fs36.readFile(path37, "utf-8");
11976
12111
  },
11977
- async write(path32, content) {
11978
- const fs33 = await import('fs/promises');
12112
+ async write(path37, content) {
12113
+ const fs36 = await import('fs/promises');
11979
12114
  const nodePath = await import('path');
11980
- await fs33.mkdir(nodePath.dirname(path32), { recursive: true });
11981
- await fs33.writeFile(path32, content, "utf-8");
12115
+ await fs36.mkdir(nodePath.dirname(path37), { recursive: true });
12116
+ await fs36.writeFile(path37, content, "utf-8");
11982
12117
  },
11983
- async exists(path32) {
11984
- const fs33 = await import('fs/promises');
12118
+ async exists(path37) {
12119
+ const fs36 = await import('fs/promises');
11985
12120
  try {
11986
- await fs33.access(path32);
12121
+ await fs36.access(path37);
11987
12122
  return true;
11988
12123
  } catch {
11989
12124
  return false;
@@ -11996,9 +12131,9 @@ async function createPhaseContext(config, state) {
11996
12131
  },
11997
12132
  bash: {
11998
12133
  async exec(command, options = {}) {
11999
- const { execa: execa9 } = await import('execa');
12134
+ const { execa: execa11 } = await import('execa');
12000
12135
  try {
12001
- const result = await execa9(command, {
12136
+ const result = await execa11(command, {
12002
12137
  shell: true,
12003
12138
  cwd: options.cwd || state.path,
12004
12139
  timeout: options.timeout,
@@ -12021,8 +12156,8 @@ async function createPhaseContext(config, state) {
12021
12156
  },
12022
12157
  git: {
12023
12158
  async status() {
12024
- const { execa: execa9 } = await import('execa');
12025
- const result = await execa9("git", ["status", "--porcelain", "-b"], { cwd: state.path });
12159
+ const { execa: execa11 } = await import('execa');
12160
+ const result = await execa11("git", ["status", "--porcelain", "-b"], { cwd: state.path });
12026
12161
  const lines = result.stdout.split("\n");
12027
12162
  const branchLine = lines[0] || "";
12028
12163
  const branch = branchLine.replace("## ", "").split("...")[0] || "main";
@@ -12035,24 +12170,24 @@ async function createPhaseContext(config, state) {
12035
12170
  };
12036
12171
  },
12037
12172
  async commit(message, files) {
12038
- const { execa: execa9 } = await import('execa');
12173
+ const { execa: execa11 } = await import('execa');
12039
12174
  if (files && files.length > 0) {
12040
- await execa9("git", ["add", ...files], { cwd: state.path });
12175
+ await execa11("git", ["add", ...files], { cwd: state.path });
12041
12176
  }
12042
- await execa9("git", ["commit", "-m", message], { cwd: state.path });
12177
+ await execa11("git", ["commit", "-m", message], { cwd: state.path });
12043
12178
  },
12044
12179
  async push() {
12045
- const { execa: execa9 } = await import('execa');
12046
- await execa9("git", ["push"], { cwd: state.path });
12180
+ const { execa: execa11 } = await import('execa');
12181
+ await execa11("git", ["push"], { cwd: state.path });
12047
12182
  }
12048
12183
  },
12049
12184
  test: {
12050
12185
  async run(pattern) {
12051
- const { execa: execa9 } = await import('execa');
12186
+ const { execa: execa11 } = await import('execa');
12052
12187
  try {
12053
12188
  const args = ["test", "--reporter=json"];
12054
12189
  if (pattern) args.push(pattern);
12055
- await execa9("pnpm", args, { cwd: state.path });
12190
+ await execa11("pnpm", args, { cwd: state.path });
12056
12191
  return {
12057
12192
  passed: 0,
12058
12193
  failed: 0,
@@ -12132,9 +12267,9 @@ async function createSnapshot(state) {
12132
12267
  var MAX_CHECKPOINT_VERSIONS = 5;
12133
12268
  async function getCheckpointFiles(state, phase) {
12134
12269
  try {
12135
- const fs33 = await import('fs/promises');
12270
+ const fs36 = await import('fs/promises');
12136
12271
  const checkpointDir = `${state.path}/.coco/checkpoints`;
12137
- const files = await fs33.readdir(checkpointDir);
12272
+ const files = await fs36.readdir(checkpointDir);
12138
12273
  const phaseFiles = files.filter((f) => f.startsWith(`snapshot-pre-${phase}-`) && f.endsWith(".json")).sort((a, b) => {
12139
12274
  const tsA = parseInt(a.split("-").pop()?.replace(".json", "") ?? "0", 10);
12140
12275
  const tsB = parseInt(b.split("-").pop()?.replace(".json", "") ?? "0", 10);
@@ -12147,11 +12282,11 @@ async function getCheckpointFiles(state, phase) {
12147
12282
  }
12148
12283
  async function cleanupOldCheckpoints(state, phase) {
12149
12284
  try {
12150
- const fs33 = await import('fs/promises');
12285
+ const fs36 = await import('fs/promises');
12151
12286
  const files = await getCheckpointFiles(state, phase);
12152
12287
  if (files.length > MAX_CHECKPOINT_VERSIONS) {
12153
12288
  const filesToDelete = files.slice(MAX_CHECKPOINT_VERSIONS);
12154
- await Promise.all(filesToDelete.map((f) => fs33.unlink(f).catch(() => {
12289
+ await Promise.all(filesToDelete.map((f) => fs36.unlink(f).catch(() => {
12155
12290
  })));
12156
12291
  }
12157
12292
  } catch {
@@ -12159,13 +12294,13 @@ async function cleanupOldCheckpoints(state, phase) {
12159
12294
  }
12160
12295
  async function saveSnapshot(state, snapshotId) {
12161
12296
  try {
12162
- const fs33 = await import('fs/promises');
12297
+ const fs36 = await import('fs/promises');
12163
12298
  const snapshotPath = `${state.path}/.coco/checkpoints/snapshot-${snapshotId}.json`;
12164
12299
  const snapshotDir = `${state.path}/.coco/checkpoints`;
12165
- await fs33.mkdir(snapshotDir, { recursive: true });
12300
+ await fs36.mkdir(snapshotDir, { recursive: true });
12166
12301
  const createdAt = state.createdAt instanceof Date ? state.createdAt.toISOString() : String(state.createdAt);
12167
12302
  const updatedAt = state.updatedAt instanceof Date ? state.updatedAt.toISOString() : String(state.updatedAt);
12168
- await fs33.writeFile(
12303
+ await fs36.writeFile(
12169
12304
  snapshotPath,
12170
12305
  JSON.stringify(
12171
12306
  {
@@ -12263,7 +12398,7 @@ function generateId() {
12263
12398
  return `proj_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 9)}`;
12264
12399
  }
12265
12400
  var ProviderConfigSchema = z.object({
12266
- type: z.enum(["anthropic", "openai", "gemini", "kimi"]).default("anthropic"),
12401
+ type: z.enum(["anthropic", "openai", "gemini", "kimi", "lmstudio", "ollama"]).default("anthropic"),
12267
12402
  apiKey: z.string().optional(),
12268
12403
  model: z.string().default("claude-sonnet-4-20250514"),
12269
12404
  maxTokens: z.number().min(1).max(2e5).default(8192),
@@ -12351,6 +12486,22 @@ var ToolsConfigSchema = z.object({
12351
12486
  threshold: z.number().min(0).max(1).default(0.3)
12352
12487
  }).optional()
12353
12488
  });
12489
+ var ShipConfigSchema = z.object({
12490
+ /** Default base branch for PRs */
12491
+ defaultBaseBranch: z.string().default("main"),
12492
+ /** Auto-detect version bump from commit history */
12493
+ autoDetectBump: z.boolean().default(true),
12494
+ /** Use squash merge for PRs */
12495
+ squashMerge: z.boolean().default(true),
12496
+ /** Delete feature branch after merge */
12497
+ deleteBranchAfterMerge: z.boolean().default(true),
12498
+ /** Create PRs as draft by default */
12499
+ draftPr: z.boolean().default(false),
12500
+ /** CI check timeout in ms (default 10 minutes) */
12501
+ ciCheckTimeoutMs: z.number().default(6e5),
12502
+ /** CI check poll interval in ms (default 15 seconds) */
12503
+ ciCheckPollMs: z.number().default(15e3)
12504
+ });
12354
12505
  var CocoConfigSchema = z.object({
12355
12506
  project: ProjectConfigSchema,
12356
12507
  provider: ProviderConfigSchema.default({
@@ -12377,7 +12528,8 @@ var CocoConfigSchema = z.object({
12377
12528
  stack: StackConfigSchema.optional(),
12378
12529
  integrations: IntegrationsConfigSchema.optional(),
12379
12530
  mcp: MCPConfigSchema.optional(),
12380
- tools: ToolsConfigSchema.optional()
12531
+ tools: ToolsConfigSchema.optional(),
12532
+ ship: ShipConfigSchema.optional()
12381
12533
  });
12382
12534
  function createDefaultConfigObject(projectName, language = "typescript") {
12383
12535
  return {
@@ -12540,24 +12692,7 @@ z.string().regex(
12540
12692
  /^\d+\.\d+\.\d+$/,
12541
12693
  "Version must be in semver format (e.g., 1.0.0)"
12542
12694
  );
12543
-
12544
- // src/tools/allowed-paths.ts
12545
- init_paths();
12546
- path14__default.join(CONFIG_PATHS.home, "allowed-paths.json");
12547
- var sessionAllowedPaths = [];
12548
- function isWithinAllowedPath(absolutePath, operation) {
12549
- const normalizedTarget = path14__default.normalize(absolutePath);
12550
- for (const entry of sessionAllowedPaths) {
12551
- const normalizedAllowed = path14__default.normalize(entry.path);
12552
- if (normalizedTarget === normalizedAllowed || normalizedTarget.startsWith(normalizedAllowed + path14__default.sep)) {
12553
- if (operation === "read") return true;
12554
- if (entry.level === "write") return true;
12555
- }
12556
- }
12557
- return false;
12558
- }
12559
-
12560
- // src/tools/file.ts
12695
+ init_allowed_paths();
12561
12696
  var SENSITIVE_PATTERNS = [
12562
12697
  /\.env(?:\.\w+)?$/,
12563
12698
  // .env, .env.local, etc.
@@ -13421,7 +13556,8 @@ function truncateOutput(output, maxLength = 5e4) {
13421
13556
  [Output truncated - ${output.length - maxLength} more characters]`;
13422
13557
  }
13423
13558
  function getGit(cwd) {
13424
- return simpleGit(cwd ?? process.cwd());
13559
+ const baseDir = cwd ?? process.cwd();
13560
+ return simpleGit({ baseDir });
13425
13561
  }
13426
13562
  var gitStatusTool = defineTool({
13427
13563
  name: "git_status",
@@ -13782,9 +13918,11 @@ var gitTools = [
13782
13918
  gitInitTool
13783
13919
  ];
13784
13920
  function generateSimpleCommitMessage() {
13921
+ const cwd = process.cwd();
13785
13922
  try {
13786
13923
  const diff = execSync("git diff --cached --name-only", {
13787
13924
  encoding: "utf-8",
13925
+ cwd,
13788
13926
  stdio: ["pipe", "pipe", "ignore"]
13789
13927
  });
13790
13928
  const files = diff.trim().split("\n").filter(Boolean);
@@ -13820,6 +13958,7 @@ var checkProtectedBranchTool = defineTool({
13820
13958
  try {
13821
13959
  const branch = execSync("git rev-parse --abbrev-ref HEAD", {
13822
13960
  encoding: "utf-8",
13961
+ cwd: process.cwd(),
13823
13962
  stdio: ["pipe", "pipe", "ignore"]
13824
13963
  }).trim();
13825
13964
  const protected_branches = ["main", "master", "develop", "production"];
@@ -13855,7 +13994,7 @@ var simpleAutoCommitTool = defineTool({
13855
13994
  async execute(input) {
13856
13995
  try {
13857
13996
  try {
13858
- execSync("git diff --cached --quiet", { stdio: "ignore" });
13997
+ execSync("git diff --cached --quiet", { cwd: process.cwd(), stdio: "ignore" });
13859
13998
  return {
13860
13999
  stdout: "",
13861
14000
  stderr: "No staged changes to commit",
@@ -13867,6 +14006,7 @@ var simpleAutoCommitTool = defineTool({
13867
14006
  const message = input.message || generateSimpleCommitMessage();
13868
14007
  execSync(`git commit -m "${message}"`, {
13869
14008
  encoding: "utf-8",
14009
+ cwd: process.cwd(),
13870
14010
  stdio: "pipe"
13871
14011
  });
13872
14012
  return {
@@ -16468,9 +16608,17 @@ Examples:
16468
16608
  }
16469
16609
  });
16470
16610
  var diffTools = [showDiffTool];
16471
- async function fileExists(path32) {
16611
+ async function fileExists(filePath) {
16472
16612
  try {
16473
- await access(path32);
16613
+ await fs14__default.access(filePath);
16614
+ return true;
16615
+ } catch {
16616
+ return false;
16617
+ }
16618
+ }
16619
+ async function fileExists2(path37) {
16620
+ try {
16621
+ await access(path37);
16474
16622
  return true;
16475
16623
  } catch {
16476
16624
  return false;
@@ -16485,7 +16633,7 @@ async function dirHasFiles(dir) {
16485
16633
  }
16486
16634
  }
16487
16635
  async function detectMaturity(cwd) {
16488
- const hasPackageJson = await fileExists(join(cwd, "package.json"));
16636
+ const hasPackageJson = await fileExists2(join(cwd, "package.json"));
16489
16637
  if (!hasPackageJson) {
16490
16638
  const otherManifests = [
16491
16639
  "go.mod",
@@ -16498,7 +16646,7 @@ async function detectMaturity(cwd) {
16498
16646
  ];
16499
16647
  let hasAnyManifest = false;
16500
16648
  for (const m of otherManifests) {
16501
- if (await fileExists(join(cwd, m))) {
16649
+ if (await fileExists2(join(cwd, m))) {
16502
16650
  hasAnyManifest = true;
16503
16651
  break;
16504
16652
  }
@@ -16539,7 +16687,7 @@ async function detectMaturity(cwd) {
16539
16687
  cwd,
16540
16688
  ignore: ["node_modules/**", "dist/**", "build/**"]
16541
16689
  });
16542
- const hasCI = await fileExists(join(cwd, ".github/workflows")) && await dirHasFiles(join(cwd, ".github/workflows"));
16690
+ const hasCI = await fileExists2(join(cwd, ".github/workflows")) && await dirHasFiles(join(cwd, ".github/workflows"));
16543
16691
  const lintConfigs = [
16544
16692
  ".eslintrc.js",
16545
16693
  ".eslintrc.json",
@@ -16552,7 +16700,7 @@ async function detectMaturity(cwd) {
16552
16700
  ];
16553
16701
  let hasLintConfig = false;
16554
16702
  for (const config of lintConfigs) {
16555
- if (await fileExists(join(cwd, config))) {
16703
+ if (await fileExists2(join(cwd, config))) {
16556
16704
  hasLintConfig = true;
16557
16705
  break;
16558
16706
  }
@@ -16560,7 +16708,7 @@ async function detectMaturity(cwd) {
16560
16708
  if (!hasLintConfig && hasPackageJson) {
16561
16709
  try {
16562
16710
  const pkgRaw = await import('fs/promises').then(
16563
- (fs33) => fs33.readFile(join(cwd, "package.json"), "utf-8")
16711
+ (fs36) => fs36.readFile(join(cwd, "package.json"), "utf-8")
16564
16712
  );
16565
16713
  const pkg = JSON.parse(pkgRaw);
16566
16714
  if (pkg.scripts?.lint || pkg.scripts?.["lint:fix"]) {
@@ -16605,7 +16753,8 @@ var SECURITY_PATTERNS2 = [
16605
16753
  regex: /console\.(log|debug|info)\(/,
16606
16754
  severity: "minor",
16607
16755
  category: "best-practice",
16608
- message: "Remove console.log \u2014 use structured logging instead"
16756
+ message: "Remove console.log \u2014 use structured logging instead",
16757
+ excludePaths: /\/(cli|repl|bin|scripts)\//
16609
16758
  }
16610
16759
  ];
16611
16760
  var CORRECTNESS_PATTERNS = [
@@ -16685,6 +16834,7 @@ function analyzePatterns(diff) {
16685
16834
  for (const line of hunk.lines) {
16686
16835
  if (line.type !== "add") continue;
16687
16836
  for (const pattern of ALL_PATTERNS) {
16837
+ if (pattern.excludePaths?.test(file.path)) continue;
16688
16838
  if (pattern.regex.test(line.content)) {
16689
16839
  findings.push({
16690
16840
  file: file.path,
@@ -16701,7 +16851,8 @@ function analyzePatterns(diff) {
16701
16851
  }
16702
16852
  return findings;
16703
16853
  }
16704
- function checkTestCoverage(diff) {
16854
+ var TEST_COVERAGE_LARGE_CHANGE_THRESHOLD = 15;
16855
+ async function checkTestCoverage(diff, cwd) {
16705
16856
  const findings = [];
16706
16857
  const changedSrc = [];
16707
16858
  const changedTests = /* @__PURE__ */ new Set();
@@ -16711,22 +16862,35 @@ function checkTestCoverage(diff) {
16711
16862
  changedTests.add(file.path);
16712
16863
  } else if (/\.(ts|tsx|js|jsx)$/.test(file.path)) {
16713
16864
  if (file.additions > 5) {
16714
- changedSrc.push(file.path);
16865
+ changedSrc.push({ path: file.path, additions: file.additions });
16715
16866
  }
16716
16867
  }
16717
16868
  }
16718
16869
  for (const src of changedSrc) {
16719
- const baseName = src.replace(/\.(ts|tsx|js|jsx)$/, "");
16870
+ const baseName = src.path.replace(/\.(ts|tsx|js|jsx)$/, "");
16720
16871
  const hasTestChange = [...changedTests].some(
16721
16872
  (t) => t.includes(baseName.split("/").pop()) || t.startsWith(baseName)
16722
16873
  );
16723
16874
  if (!hasTestChange) {
16724
- findings.push({
16725
- file: src,
16726
- severity: "minor",
16727
- category: "testing",
16728
- message: "Logic changes without corresponding test updates"
16729
- });
16875
+ const ext = src.path.match(/\.(ts|tsx|js|jsx)$/)?.[0] ?? ".ts";
16876
+ const testExists = await fileExists(path14__default.join(cwd, `${baseName}.test${ext}`)) || await fileExists(path14__default.join(cwd, `${baseName}.spec${ext}`));
16877
+ if (testExists) {
16878
+ if (src.additions >= TEST_COVERAGE_LARGE_CHANGE_THRESHOLD) {
16879
+ findings.push({
16880
+ file: src.path,
16881
+ severity: "info",
16882
+ category: "testing",
16883
+ message: "Test file exists but was not updated \u2014 verify existing tests cover these changes"
16884
+ });
16885
+ }
16886
+ } else {
16887
+ findings.push({
16888
+ file: src.path,
16889
+ severity: "minor",
16890
+ category: "testing",
16891
+ message: "Logic changes without corresponding test updates"
16892
+ });
16893
+ }
16730
16894
  }
16731
16895
  }
16732
16896
  return findings;
@@ -16876,7 +17040,7 @@ Examples:
16876
17040
  const maturity = maturityInfo.level;
16877
17041
  let allFindings = [];
16878
17042
  allFindings.push(...analyzePatterns(diff));
16879
- allFindings.push(...checkTestCoverage(diff));
17043
+ allFindings.push(...await checkTestCoverage(diff, projectDir));
16880
17044
  allFindings.push(...checkDocumentation(diff));
16881
17045
  if (runLinter) {
16882
17046
  try {
@@ -16927,8 +17091,8 @@ Examples:
16927
17091
  }
16928
17092
  });
16929
17093
  var reviewTools = [reviewCodeTool];
16930
- var fs22 = await import('fs/promises');
16931
- var path22 = await import('path');
17094
+ var fs23 = await import('fs/promises');
17095
+ var path24 = await import('path');
16932
17096
  var { glob: glob12 } = await import('glob');
16933
17097
  var DEFAULT_MAX_FILES = 200;
16934
17098
  var LANGUAGE_EXTENSIONS = {
@@ -16954,7 +17118,7 @@ var DEFAULT_EXCLUDES = [
16954
17118
  "**/*.d.ts"
16955
17119
  ];
16956
17120
  function detectLanguage2(filePath) {
16957
- const ext = path22.extname(filePath).toLowerCase();
17121
+ const ext = path24.extname(filePath).toLowerCase();
16958
17122
  for (const [lang, extensions] of Object.entries(LANGUAGE_EXTENSIONS)) {
16959
17123
  if (extensions.includes(ext)) return lang;
16960
17124
  }
@@ -17363,9 +17527,9 @@ Examples:
17363
17527
  }),
17364
17528
  async execute({ path: rootPath, include, exclude, languages, maxFiles, depth }) {
17365
17529
  const startTime = performance.now();
17366
- const absPath = path22.resolve(rootPath);
17530
+ const absPath = path24.resolve(rootPath);
17367
17531
  try {
17368
- const stat2 = await fs22.stat(absPath);
17532
+ const stat2 = await fs23.stat(absPath);
17369
17533
  if (!stat2.isDirectory()) {
17370
17534
  throw new ToolError(`Path is not a directory: ${absPath}`, {
17371
17535
  tool: "codebase_map"
@@ -17402,14 +17566,14 @@ Examples:
17402
17566
  let totalDefinitions = 0;
17403
17567
  let exportedSymbols = 0;
17404
17568
  for (const file of limitedFiles) {
17405
- const fullPath = path22.join(absPath, file);
17569
+ const fullPath = path24.join(absPath, file);
17406
17570
  const language = detectLanguage2(file);
17407
17571
  if (!language) continue;
17408
17572
  if (languages && !languages.includes(language)) {
17409
17573
  continue;
17410
17574
  }
17411
17575
  try {
17412
- const content = await fs22.readFile(fullPath, "utf-8");
17576
+ const content = await fs23.readFile(fullPath, "utf-8");
17413
17577
  const lineCount = content.split("\n").length;
17414
17578
  const parsed = parseFile(content, language);
17415
17579
  const definitions = depth === "overview" ? parsed.definitions.filter((d) => d.exported) : parsed.definitions;
@@ -17442,23 +17606,23 @@ Examples:
17442
17606
  });
17443
17607
  var codebaseMapTools = [codebaseMapTool];
17444
17608
  init_paths();
17445
- var fs23 = await import('fs/promises');
17446
- var path23 = await import('path');
17447
- var crypto2 = await import('crypto');
17448
- var GLOBAL_MEMORIES_DIR = path23.join(COCO_HOME, "memories");
17609
+ var fs24 = await import('fs/promises');
17610
+ var path25 = await import('path');
17611
+ var crypto3 = await import('crypto');
17612
+ var GLOBAL_MEMORIES_DIR = path25.join(COCO_HOME, "memories");
17449
17613
  var PROJECT_MEMORIES_DIR = ".coco/memories";
17450
17614
  var DEFAULT_MAX_MEMORIES = 1e3;
17451
17615
  async function ensureDir(dirPath) {
17452
- await fs23.mkdir(dirPath, { recursive: true });
17616
+ await fs24.mkdir(dirPath, { recursive: true });
17453
17617
  }
17454
17618
  function getMemoriesDir(scope) {
17455
17619
  return scope === "global" ? GLOBAL_MEMORIES_DIR : PROJECT_MEMORIES_DIR;
17456
17620
  }
17457
17621
  async function loadIndex(scope) {
17458
17622
  const dir = getMemoriesDir(scope);
17459
- const indexPath = path23.join(dir, "index.json");
17623
+ const indexPath = path25.join(dir, "index.json");
17460
17624
  try {
17461
- const content = await fs23.readFile(indexPath, "utf-8");
17625
+ const content = await fs24.readFile(indexPath, "utf-8");
17462
17626
  return JSON.parse(content);
17463
17627
  } catch {
17464
17628
  return [];
@@ -17467,14 +17631,14 @@ async function loadIndex(scope) {
17467
17631
  async function saveIndex(scope, index) {
17468
17632
  const dir = getMemoriesDir(scope);
17469
17633
  await ensureDir(dir);
17470
- const indexPath = path23.join(dir, "index.json");
17471
- await fs23.writeFile(indexPath, JSON.stringify(index, null, 2), "utf-8");
17634
+ const indexPath = path25.join(dir, "index.json");
17635
+ await fs24.writeFile(indexPath, JSON.stringify(index, null, 2), "utf-8");
17472
17636
  }
17473
17637
  async function loadMemory(scope, id) {
17474
17638
  const dir = getMemoriesDir(scope);
17475
- const memPath = path23.join(dir, `${id}.json`);
17639
+ const memPath = path25.join(dir, `${id}.json`);
17476
17640
  try {
17477
- const content = await fs23.readFile(memPath, "utf-8");
17641
+ const content = await fs24.readFile(memPath, "utf-8");
17478
17642
  return JSON.parse(content);
17479
17643
  } catch {
17480
17644
  return null;
@@ -17483,8 +17647,8 @@ async function loadMemory(scope, id) {
17483
17647
  async function saveMemory(scope, memory) {
17484
17648
  const dir = getMemoriesDir(scope);
17485
17649
  await ensureDir(dir);
17486
- const memPath = path23.join(dir, `${memory.id}.json`);
17487
- await fs23.writeFile(memPath, JSON.stringify(memory, null, 2), "utf-8");
17650
+ const memPath = path25.join(dir, `${memory.id}.json`);
17651
+ await fs24.writeFile(memPath, JSON.stringify(memory, null, 2), "utf-8");
17488
17652
  }
17489
17653
  var createMemoryTool = defineTool({
17490
17654
  name: "create_memory",
@@ -17526,7 +17690,7 @@ Examples:
17526
17690
  { tool: "create_memory" }
17527
17691
  );
17528
17692
  }
17529
- const id = crypto2.randomUUID();
17693
+ const id = crypto3.randomUUID();
17530
17694
  const memory = {
17531
17695
  id,
17532
17696
  key,
@@ -17636,17 +17800,17 @@ Examples:
17636
17800
  }
17637
17801
  });
17638
17802
  var memoryTools = [createMemoryTool, recallMemoryTool, listMemoriesTool];
17639
- var fs24 = await import('fs/promises');
17640
- var crypto3 = await import('crypto');
17803
+ var fs25 = await import('fs/promises');
17804
+ var crypto4 = await import('crypto');
17641
17805
  var CHECKPOINT_FILE = ".coco/checkpoints.json";
17642
17806
  var DEFAULT_MAX_CHECKPOINTS = 50;
17643
17807
  var STASH_PREFIX = "coco-cp";
17644
17808
  async function ensureCocoDir() {
17645
- await fs24.mkdir(".coco", { recursive: true });
17809
+ await fs25.mkdir(".coco", { recursive: true });
17646
17810
  }
17647
17811
  async function loadCheckpoints() {
17648
17812
  try {
17649
- const content = await fs24.readFile(CHECKPOINT_FILE, "utf-8");
17813
+ const content = await fs25.readFile(CHECKPOINT_FILE, "utf-8");
17650
17814
  return JSON.parse(content);
17651
17815
  } catch {
17652
17816
  return [];
@@ -17654,7 +17818,7 @@ async function loadCheckpoints() {
17654
17818
  }
17655
17819
  async function saveCheckpoints(checkpoints) {
17656
17820
  await ensureCocoDir();
17657
- await fs24.writeFile(CHECKPOINT_FILE, JSON.stringify(checkpoints, null, 2), "utf-8");
17821
+ await fs25.writeFile(CHECKPOINT_FILE, JSON.stringify(checkpoints, null, 2), "utf-8");
17658
17822
  }
17659
17823
  async function execGit(args) {
17660
17824
  const { execaCommand } = await import('execa');
@@ -17692,7 +17856,7 @@ Examples:
17692
17856
  description: z.string().min(1).max(200).describe("Description of this checkpoint")
17693
17857
  }),
17694
17858
  async execute({ description }) {
17695
- const id = crypto3.randomUUID().slice(0, 8);
17859
+ const id = crypto4.randomUUID().slice(0, 8);
17696
17860
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
17697
17861
  const stashMessage = `${STASH_PREFIX}-${id}-${description.replace(/\s+/g, "-").slice(0, 50)}`;
17698
17862
  const changedFiles = await getChangedFiles();
@@ -17814,8 +17978,8 @@ Examples:
17814
17978
  }
17815
17979
  });
17816
17980
  var checkpointTools = [createCheckpointTool, restoreCheckpointTool, listCheckpointsTool];
17817
- var fs25 = await import('fs/promises');
17818
- var path24 = await import('path');
17981
+ var fs26 = await import('fs/promises');
17982
+ var path26 = await import('path');
17819
17983
  var { glob: glob13 } = await import('glob');
17820
17984
  var INDEX_DIR = ".coco/search-index";
17821
17985
  var DEFAULT_CHUNK_SIZE = 20;
@@ -17941,20 +18105,20 @@ async function getEmbedding(text) {
17941
18105
  }
17942
18106
  async function loadIndex2(indexDir) {
17943
18107
  try {
17944
- const indexPath = path24.join(indexDir, "index.json");
17945
- const content = await fs25.readFile(indexPath, "utf-8");
18108
+ const indexPath = path26.join(indexDir, "index.json");
18109
+ const content = await fs26.readFile(indexPath, "utf-8");
17946
18110
  return JSON.parse(content);
17947
18111
  } catch {
17948
18112
  return null;
17949
18113
  }
17950
18114
  }
17951
18115
  async function saveIndex2(indexDir, index) {
17952
- await fs25.mkdir(indexDir, { recursive: true });
17953
- const indexPath = path24.join(indexDir, "index.json");
17954
- await fs25.writeFile(indexPath, JSON.stringify(index), "utf-8");
18116
+ await fs26.mkdir(indexDir, { recursive: true });
18117
+ const indexPath = path26.join(indexDir, "index.json");
18118
+ await fs26.writeFile(indexPath, JSON.stringify(index), "utf-8");
17955
18119
  }
17956
18120
  function isBinary(filePath) {
17957
- return BINARY_EXTENSIONS.has(path24.extname(filePath).toLowerCase());
18121
+ return BINARY_EXTENSIONS.has(path26.extname(filePath).toLowerCase());
17958
18122
  }
17959
18123
  var semanticSearchTool = defineTool({
17960
18124
  name: "semantic_search",
@@ -17979,8 +18143,8 @@ Examples:
17979
18143
  const effectivePath = rootPath ?? ".";
17980
18144
  const effectiveMaxResults = maxResults ?? 10;
17981
18145
  const effectiveThreshold = threshold ?? 0.3;
17982
- const absPath = path24.resolve(effectivePath);
17983
- const indexDir = path24.join(absPath, INDEX_DIR);
18146
+ const absPath = path26.resolve(effectivePath);
18147
+ const indexDir = path26.join(absPath, INDEX_DIR);
17984
18148
  let index = reindex ? null : await loadIndex2(indexDir);
17985
18149
  if (!index) {
17986
18150
  const pattern = include ?? "**/*";
@@ -17993,10 +18157,10 @@ Examples:
17993
18157
  const chunks = [];
17994
18158
  for (const file of files) {
17995
18159
  if (isBinary(file)) continue;
17996
- const fullPath = path24.join(absPath, file);
18160
+ const fullPath = path26.join(absPath, file);
17997
18161
  try {
17998
- const stat2 = await fs25.stat(fullPath);
17999
- const content = await fs25.readFile(fullPath, "utf-8");
18162
+ const stat2 = await fs26.stat(fullPath);
18163
+ const content = await fs26.readFile(fullPath, "utf-8");
18000
18164
  if (content.length > 1e5) continue;
18001
18165
  const fileChunks = chunkContent(content, DEFAULT_CHUNK_SIZE);
18002
18166
  for (const chunk of fileChunks) {
@@ -18055,8 +18219,8 @@ Examples:
18055
18219
  }
18056
18220
  });
18057
18221
  var semanticSearchTools = [semanticSearchTool];
18058
- var fs26 = await import('fs/promises');
18059
- var path25 = await import('path');
18222
+ var fs27 = await import('fs/promises');
18223
+ var path27 = await import('path');
18060
18224
  var { glob: glob14 } = await import('glob');
18061
18225
  async function parseClassRelationships(rootPath, include) {
18062
18226
  const pattern = include ?? "**/*.{ts,tsx,js,jsx}";
@@ -18069,7 +18233,7 @@ async function parseClassRelationships(rootPath, include) {
18069
18233
  const interfaces = [];
18070
18234
  for (const file of files.slice(0, 100)) {
18071
18235
  try {
18072
- const content = await fs26.readFile(path25.join(rootPath, file), "utf-8");
18236
+ const content = await fs27.readFile(path27.join(rootPath, file), "utf-8");
18073
18237
  const lines = content.split("\n");
18074
18238
  for (let i = 0; i < lines.length; i++) {
18075
18239
  const line = lines[i];
@@ -18188,14 +18352,14 @@ async function generateClassDiagram(rootPath, include) {
18188
18352
  };
18189
18353
  }
18190
18354
  async function generateArchitectureDiagram(rootPath) {
18191
- const entries = await fs26.readdir(rootPath, { withFileTypes: true });
18355
+ const entries = await fs27.readdir(rootPath, { withFileTypes: true });
18192
18356
  const dirs = entries.filter(
18193
18357
  (e) => e.isDirectory() && !e.name.startsWith(".") && !["node_modules", "dist", "build", "coverage", "__pycache__", "target"].includes(e.name)
18194
18358
  );
18195
18359
  const lines = ["graph TD"];
18196
18360
  let nodeCount = 0;
18197
18361
  let edgeCount = 0;
18198
- const rootName = path25.basename(rootPath);
18362
+ const rootName = path27.basename(rootPath);
18199
18363
  lines.push(` ROOT["${rootName}"]`);
18200
18364
  nodeCount++;
18201
18365
  for (const dir of dirs) {
@@ -18205,7 +18369,7 @@ async function generateArchitectureDiagram(rootPath) {
18205
18369
  nodeCount++;
18206
18370
  edgeCount++;
18207
18371
  try {
18208
- const subEntries = await fs26.readdir(path25.join(rootPath, dir.name), {
18372
+ const subEntries = await fs27.readdir(path27.join(rootPath, dir.name), {
18209
18373
  withFileTypes: true
18210
18374
  });
18211
18375
  const subDirs = subEntries.filter(
@@ -18328,7 +18492,7 @@ Examples:
18328
18492
  tool: "generate_diagram"
18329
18493
  });
18330
18494
  }
18331
- const absPath = rootPath ? path25.resolve(rootPath) : process.cwd();
18495
+ const absPath = rootPath ? path27.resolve(rootPath) : process.cwd();
18332
18496
  switch (type) {
18333
18497
  case "class":
18334
18498
  return generateClassDiagram(absPath, include);
@@ -18389,8 +18553,8 @@ Examples:
18389
18553
  }
18390
18554
  });
18391
18555
  var diagramTools = [generateDiagramTool];
18392
- var fs27 = await import('fs/promises');
18393
- var path26 = await import('path');
18556
+ var fs28 = await import('fs/promises');
18557
+ var path28 = await import('path');
18394
18558
  var DEFAULT_MAX_PAGES = 20;
18395
18559
  var MAX_FILE_SIZE = 50 * 1024 * 1024;
18396
18560
  function parsePageRange(rangeStr, totalPages) {
@@ -18425,9 +18589,9 @@ Examples:
18425
18589
  }),
18426
18590
  async execute({ path: filePath, pages, maxPages }) {
18427
18591
  const startTime = performance.now();
18428
- const absPath = path26.resolve(filePath);
18592
+ const absPath = path28.resolve(filePath);
18429
18593
  try {
18430
- const stat2 = await fs27.stat(absPath);
18594
+ const stat2 = await fs28.stat(absPath);
18431
18595
  if (!stat2.isFile()) {
18432
18596
  throw new ToolError(`Path is not a file: ${absPath}`, {
18433
18597
  tool: "read_pdf"
@@ -18458,7 +18622,7 @@ Examples:
18458
18622
  }
18459
18623
  try {
18460
18624
  const pdfParse = await import('pdf-parse');
18461
- const dataBuffer = await fs27.readFile(absPath);
18625
+ const dataBuffer = await fs28.readFile(absPath);
18462
18626
  const pdfData = await pdfParse.default(dataBuffer, {
18463
18627
  max: maxPages
18464
18628
  });
@@ -18504,8 +18668,8 @@ Examples:
18504
18668
  }
18505
18669
  });
18506
18670
  var pdfTools = [readPdfTool];
18507
- var fs28 = await import('fs/promises');
18508
- var path27 = await import('path');
18671
+ var fs29 = await import('fs/promises');
18672
+ var path29 = await import('path');
18509
18673
  var SUPPORTED_FORMATS = /* @__PURE__ */ new Set([".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp"]);
18510
18674
  var MAX_IMAGE_SIZE = 20 * 1024 * 1024;
18511
18675
  var MIME_TYPES = {
@@ -18533,15 +18697,15 @@ Examples:
18533
18697
  async execute({ path: filePath, prompt, provider }) {
18534
18698
  const startTime = performance.now();
18535
18699
  const effectivePrompt = prompt ?? "Describe this image in detail. If it's code or a UI, identify the key elements.";
18536
- const absPath = path27.resolve(filePath);
18700
+ const absPath = path29.resolve(filePath);
18537
18701
  const cwd = process.cwd();
18538
- if (!absPath.startsWith(cwd + path27.sep) && absPath !== cwd) {
18702
+ if (!absPath.startsWith(cwd + path29.sep) && absPath !== cwd) {
18539
18703
  throw new ToolError(
18540
18704
  `Path traversal denied: '${filePath}' resolves outside the project directory`,
18541
18705
  { tool: "read_image" }
18542
18706
  );
18543
18707
  }
18544
- const ext = path27.extname(absPath).toLowerCase();
18708
+ const ext = path29.extname(absPath).toLowerCase();
18545
18709
  if (!SUPPORTED_FORMATS.has(ext)) {
18546
18710
  throw new ToolError(
18547
18711
  `Unsupported image format '${ext}'. Supported: ${Array.from(SUPPORTED_FORMATS).join(", ")}`,
@@ -18549,7 +18713,7 @@ Examples:
18549
18713
  );
18550
18714
  }
18551
18715
  try {
18552
- const stat2 = await fs28.stat(absPath);
18716
+ const stat2 = await fs29.stat(absPath);
18553
18717
  if (!stat2.isFile()) {
18554
18718
  throw new ToolError(`Path is not a file: ${absPath}`, {
18555
18719
  tool: "read_image"
@@ -18570,7 +18734,7 @@ Examples:
18570
18734
  if (error instanceof ToolError) throw error;
18571
18735
  throw error;
18572
18736
  }
18573
- const imageBuffer = await fs28.readFile(absPath);
18737
+ const imageBuffer = await fs29.readFile(absPath);
18574
18738
  const base64 = imageBuffer.toString("base64");
18575
18739
  const mimeType = MIME_TYPES[ext] ?? "image/png";
18576
18740
  const selectedProvider = provider ?? "anthropic";
@@ -18683,7 +18847,7 @@ Examples:
18683
18847
  }
18684
18848
  });
18685
18849
  var imageTools = [readImageTool];
18686
- var path28 = await import('path');
18850
+ var path30 = await import('path');
18687
18851
  var DANGEROUS_PATTERNS2 = [
18688
18852
  /\bDROP\s+(?:TABLE|DATABASE|INDEX|VIEW)\b/i,
18689
18853
  /\bTRUNCATE\b/i,
@@ -18714,7 +18878,7 @@ Examples:
18714
18878
  async execute({ database, query, params, readonly: isReadonlyParam }) {
18715
18879
  const isReadonly = isReadonlyParam ?? true;
18716
18880
  const startTime = performance.now();
18717
- const absPath = path28.resolve(database);
18881
+ const absPath = path30.resolve(database);
18718
18882
  if (isReadonly && isDangerousSql(query)) {
18719
18883
  throw new ToolError(
18720
18884
  "Write operations (INSERT, UPDATE, DELETE, DROP, ALTER, TRUNCATE, CREATE) are blocked in readonly mode. Set readonly: false to allow writes.",
@@ -18787,7 +18951,7 @@ Examples:
18787
18951
  }),
18788
18952
  async execute({ database, table }) {
18789
18953
  const startTime = performance.now();
18790
- const absPath = path28.resolve(database);
18954
+ const absPath = path30.resolve(database);
18791
18955
  try {
18792
18956
  const { default: Database } = await import('better-sqlite3');
18793
18957
  const db = new Database(absPath, { readonly: true, fileMustExist: true });
@@ -18840,14 +19004,14 @@ Examples:
18840
19004
  }
18841
19005
  });
18842
19006
  var databaseTools = [sqlQueryTool, inspectSchemaTool];
18843
- var fs29 = await import('fs/promises');
18844
- var path29 = await import('path');
19007
+ var fs30 = await import('fs/promises');
19008
+ var path31 = await import('path');
18845
19009
  var AnalyzeFileSchema = z.object({
18846
19010
  filePath: z.string().describe("Path to file to analyze"),
18847
19011
  includeAst: z.boolean().default(false).describe("Include AST in result")
18848
19012
  });
18849
19013
  async function analyzeFile(filePath, includeAst = false) {
18850
- const content = await fs29.readFile(filePath, "utf-8");
19014
+ const content = await fs30.readFile(filePath, "utf-8");
18851
19015
  const lines = content.split("\n").length;
18852
19016
  const functions = [];
18853
19017
  const classes = [];
@@ -18951,10 +19115,10 @@ async function analyzeDirectory(dirPath) {
18951
19115
  try {
18952
19116
  const analysis = await analyzeFile(file, false);
18953
19117
  totalLines += analysis.lines;
18954
- const ext = path29.extname(file);
19118
+ const ext = path31.extname(file);
18955
19119
  filesByType[ext] = (filesByType[ext] || 0) + 1;
18956
19120
  fileStats.push({
18957
- file: path29.relative(dirPath, file),
19121
+ file: path31.relative(dirPath, file),
18958
19122
  lines: analysis.lines,
18959
19123
  complexity: analysis.complexity.cyclomatic
18960
19124
  });
@@ -19277,13 +19441,13 @@ ${completed.map((r) => `- ${r.agentId}: Success`).join("\n")}`;
19277
19441
  }
19278
19442
  });
19279
19443
  var agentCoordinatorTools = [createAgentPlanTool, delegateTaskTool, aggregateResultsTool];
19280
- var fs30 = await import('fs/promises');
19444
+ var fs31 = await import('fs/promises');
19281
19445
  var SuggestImprovementsSchema = z.object({
19282
19446
  filePath: z.string().describe("File to analyze for improvement suggestions"),
19283
19447
  context: z.string().optional().describe("Additional context about the code")
19284
19448
  });
19285
19449
  async function analyzeAndSuggest(filePath, _context) {
19286
- const content = await fs30.readFile(filePath, "utf-8");
19450
+ const content = await fs31.readFile(filePath, "utf-8");
19287
19451
  const lines = content.split("\n");
19288
19452
  const suggestions = [];
19289
19453
  for (let i = 0; i < lines.length; i++) {
@@ -19375,7 +19539,7 @@ async function analyzeAndSuggest(filePath, _context) {
19375
19539
  if (filePath.endsWith(".ts") && !filePath.includes("test") && !filePath.includes(".d.ts") && line.includes("export ")) {
19376
19540
  const testPath = filePath.replace(".ts", ".test.ts");
19377
19541
  try {
19378
- await fs30.access(testPath);
19542
+ await fs31.access(testPath);
19379
19543
  } catch {
19380
19544
  suggestions.push({
19381
19545
  type: "testing",
@@ -19432,7 +19596,7 @@ var calculateCodeScoreTool = defineTool({
19432
19596
  async execute(input) {
19433
19597
  const { filePath } = input;
19434
19598
  const suggestions = await analyzeAndSuggest(filePath);
19435
- const content = await fs30.readFile(filePath, "utf-8");
19599
+ const content = await fs31.readFile(filePath, "utf-8");
19436
19600
  const lines = content.split("\n");
19437
19601
  const nonEmptyLines = lines.filter((l) => l.trim()).length;
19438
19602
  let score = 100;
@@ -19466,8 +19630,8 @@ var calculateCodeScoreTool = defineTool({
19466
19630
  }
19467
19631
  });
19468
19632
  var smartSuggestionsTools = [suggestImprovementsTool, calculateCodeScoreTool];
19469
- var fs31 = await import('fs/promises');
19470
- var path30 = await import('path');
19633
+ var fs32 = await import('fs/promises');
19634
+ var path32 = await import('path');
19471
19635
  var ContextMemoryStore = class {
19472
19636
  items = /* @__PURE__ */ new Map();
19473
19637
  learnings = /* @__PURE__ */ new Map();
@@ -19479,7 +19643,7 @@ var ContextMemoryStore = class {
19479
19643
  }
19480
19644
  async load() {
19481
19645
  try {
19482
- const content = await fs31.readFile(this.storePath, "utf-8");
19646
+ const content = await fs32.readFile(this.storePath, "utf-8");
19483
19647
  const data = JSON.parse(content);
19484
19648
  this.items = new Map(Object.entries(data.items || {}));
19485
19649
  this.learnings = new Map(Object.entries(data.learnings || {}));
@@ -19487,15 +19651,15 @@ var ContextMemoryStore = class {
19487
19651
  }
19488
19652
  }
19489
19653
  async save() {
19490
- const dir = path30.dirname(this.storePath);
19491
- await fs31.mkdir(dir, { recursive: true });
19654
+ const dir = path32.dirname(this.storePath);
19655
+ await fs32.mkdir(dir, { recursive: true });
19492
19656
  const data = {
19493
19657
  sessionId: this.sessionId,
19494
19658
  items: Object.fromEntries(this.items),
19495
19659
  learnings: Object.fromEntries(this.learnings),
19496
19660
  savedAt: Date.now()
19497
19661
  };
19498
- await fs31.writeFile(this.storePath, JSON.stringify(data, null, 2));
19662
+ await fs32.writeFile(this.storePath, JSON.stringify(data, null, 2));
19499
19663
  }
19500
19664
  addContext(id, item) {
19501
19665
  this.items.set(id, item);
@@ -19645,11 +19809,11 @@ var getLearnedPatternsTool = defineTool({
19645
19809
  const patterns = store.getFrequentPatterns(typedInput.limit);
19646
19810
  return {
19647
19811
  totalPatterns: patterns.length,
19648
- patterns: patterns.map((p3) => ({
19649
- pattern: p3.pattern,
19650
- preference: p3.userPreference,
19651
- frequency: p3.frequency,
19652
- lastUsed: new Date(p3.lastUsed).toISOString()
19812
+ patterns: patterns.map((p4) => ({
19813
+ pattern: p4.pattern,
19814
+ preference: p4.userPreference,
19815
+ frequency: p4.frequency,
19816
+ lastUsed: new Date(p4.lastUsed).toISOString()
19653
19817
  }))
19654
19818
  };
19655
19819
  }
@@ -19660,11 +19824,11 @@ var contextEnhancerTools = [
19660
19824
  recordLearningTool,
19661
19825
  getLearnedPatternsTool
19662
19826
  ];
19663
- var fs32 = await import('fs/promises');
19664
- var path31 = await import('path');
19827
+ var fs33 = await import('fs/promises');
19828
+ var path33 = await import('path');
19665
19829
  async function discoverSkills(skillsDir) {
19666
19830
  try {
19667
- const files = await fs32.readdir(skillsDir);
19831
+ const files = await fs33.readdir(skillsDir);
19668
19832
  return files.filter((f) => f.endsWith(".ts") || f.endsWith(".js"));
19669
19833
  } catch {
19670
19834
  return [];
@@ -19672,12 +19836,12 @@ async function discoverSkills(skillsDir) {
19672
19836
  }
19673
19837
  async function loadSkillMetadata(skillPath) {
19674
19838
  try {
19675
- const content = await fs32.readFile(skillPath, "utf-8");
19839
+ const content = await fs33.readFile(skillPath, "utf-8");
19676
19840
  const nameMatch = content.match(/@name\s+(\S+)/);
19677
19841
  const descMatch = content.match(/@description\s+(.+)/);
19678
19842
  const versionMatch = content.match(/@version\s+(\S+)/);
19679
19843
  return {
19680
- name: nameMatch?.[1] || path31.basename(skillPath, path31.extname(skillPath)),
19844
+ name: nameMatch?.[1] || path33.basename(skillPath, path33.extname(skillPath)),
19681
19845
  description: descMatch?.[1] || "No description",
19682
19846
  version: versionMatch?.[1] || "1.0.0",
19683
19847
  dependencies: []
@@ -19721,7 +19885,7 @@ var discoverSkillsTool = defineTool({
19721
19885
  const { skillsDir } = input;
19722
19886
  const skills = await discoverSkills(skillsDir);
19723
19887
  const metadata = await Promise.all(
19724
- skills.map((s) => loadSkillMetadata(path31.join(skillsDir, s)))
19888
+ skills.map((s) => loadSkillMetadata(path33.join(skillsDir, s)))
19725
19889
  );
19726
19890
  return {
19727
19891
  skillsDir,
@@ -19844,14 +20008,17 @@ export const ${typedInput.name}Tool = defineTool({
19844
20008
  }
19845
20009
  });
19846
20010
  var skillEnhancerTools = [discoverSkillsTool, validateSkillTool, createCustomToolTool];
20011
+ function gitExec(cmd, opts = {}) {
20012
+ return execSync(cmd, { encoding: "utf-8", cwd: process.cwd(), ...opts });
20013
+ }
19847
20014
  function analyzeRepoHealth() {
19848
20015
  const issues = [];
19849
20016
  const recommendations = [];
19850
20017
  let score = 100;
19851
20018
  try {
19852
20019
  try {
19853
- execSync("git status --porcelain", { stdio: "pipe" });
19854
- const status = execSync("git status --porcelain", { encoding: "utf-8" });
20020
+ gitExec("git status --porcelain", { stdio: "pipe" });
20021
+ const status = gitExec("git status --porcelain");
19855
20022
  if (status.trim()) {
19856
20023
  issues.push("Uncommitted changes present");
19857
20024
  score -= 10;
@@ -19859,7 +20026,7 @@ function analyzeRepoHealth() {
19859
20026
  } catch {
19860
20027
  }
19861
20028
  try {
19862
- const untracked = execSync("git ls-files --others --exclude-standard", { encoding: "utf-8" });
20029
+ const untracked = gitExec("git ls-files --others --exclude-standard");
19863
20030
  if (untracked.trim()) {
19864
20031
  const count = untracked.trim().split("\n").length;
19865
20032
  issues.push(`${count} untracked files`);
@@ -19869,9 +20036,9 @@ function analyzeRepoHealth() {
19869
20036
  } catch {
19870
20037
  }
19871
20038
  try {
19872
- const branch = execSync("git rev-parse --abbrev-ref HEAD", { encoding: "utf-8" }).trim();
19873
- const local = execSync("git rev-parse HEAD", { encoding: "utf-8" }).trim();
19874
- const remote = execSync(`git rev-parse origin/${branch}`, { encoding: "utf-8" }).trim();
20039
+ const branch = gitExec("git rev-parse --abbrev-ref HEAD").trim();
20040
+ const local = gitExec("git rev-parse HEAD").trim();
20041
+ const remote = gitExec(`git rev-parse origin/${branch}`).trim();
19875
20042
  if (local !== remote) {
19876
20043
  issues.push("Branch is not up-to-date with remote");
19877
20044
  score -= 15;
@@ -19880,14 +20047,14 @@ function analyzeRepoHealth() {
19880
20047
  } catch {
19881
20048
  }
19882
20049
  try {
19883
- const files = execSync("git ls-files", { encoding: "utf-8" }).trim().split("\n");
20050
+ const files = gitExec("git ls-files").trim().split("\n");
19884
20051
  if (files.length > 1e3) {
19885
20052
  recommendations.push("Repository has many files, consider using .gitignore");
19886
20053
  }
19887
20054
  } catch {
19888
20055
  }
19889
20056
  try {
19890
- const conflicts = execSync("git diff --name-only --diff-filter=U", { encoding: "utf-8" });
20057
+ const conflicts = gitExec("git diff --name-only --diff-filter=U");
19891
20058
  if (conflicts.trim()) {
19892
20059
  issues.push("Merge conflicts present");
19893
20060
  score -= 30;
@@ -19903,12 +20070,11 @@ function analyzeRepoHealth() {
19903
20070
  }
19904
20071
  function getCommitStats() {
19905
20072
  try {
19906
- const count = execSync("git rev-list --count HEAD", { encoding: "utf-8" }).trim();
19907
- const authors = execSync('git log --format="%an" | sort -u', {
19908
- encoding: "utf-8",
20073
+ const count = gitExec("git rev-list --count HEAD").trim();
20074
+ const authors = gitExec('git log --format="%an" | sort -u', {
19909
20075
  shell: "/bin/bash"
19910
20076
  }).trim().split("\n");
19911
- const lastCommit = execSync('git log -1 --format="%cr"', { encoding: "utf-8" }).trim();
20077
+ const lastCommit = gitExec('git log -1 --format="%cr"').trim();
19912
20078
  return {
19913
20079
  totalCommits: parseInt(count, 10),
19914
20080
  authors,
@@ -19981,7 +20147,7 @@ var recommendBranchTool = defineTool({
19981
20147
  const branchName = `${prefix}/${slug}`;
19982
20148
  let exists = false;
19983
20149
  try {
19984
- execSync(`git rev-parse --verify ${branchName}`, { stdio: "ignore" });
20150
+ execSync(`git rev-parse --verify ${branchName}`, { cwd: process.cwd(), stdio: "ignore" });
19985
20151
  exists = true;
19986
20152
  } catch {
19987
20153
  exists = false;
@@ -19996,6 +20162,449 @@ var recommendBranchTool = defineTool({
19996
20162
  }
19997
20163
  });
19998
20164
  var gitEnhancedTools = [analyzeRepoHealthTool, getCommitStatsTool, recommendBranchTool];
20165
+ async function ghExec(args, cwd) {
20166
+ try {
20167
+ const result = await execa("gh", args, {
20168
+ cwd: cwd ?? process.cwd(),
20169
+ timeout: 6e4
20170
+ });
20171
+ return { stdout: result.stdout, stderr: result.stderr };
20172
+ } catch (error) {
20173
+ const message = error instanceof Error ? error.message : String(error);
20174
+ throw new ToolError(`gh command failed: ${message}`, {
20175
+ tool: "github",
20176
+ cause: error instanceof Error ? error : void 0
20177
+ });
20178
+ }
20179
+ }
20180
+ var ghCheckAuthTool = defineTool({
20181
+ name: "gh_check_auth",
20182
+ description: "Check if the GitHub CLI is installed and authenticated.",
20183
+ category: "git",
20184
+ parameters: z.object({
20185
+ cwd: z.string().optional()
20186
+ }),
20187
+ async execute({ cwd }) {
20188
+ try {
20189
+ const { stdout } = await ghExec(["auth", "status"], cwd);
20190
+ const userMatch = stdout.match(/Logged in to .+ as (\S+)/);
20191
+ return {
20192
+ authenticated: true,
20193
+ user: userMatch?.[1]
20194
+ };
20195
+ } catch {
20196
+ return { authenticated: false, error: "gh CLI not authenticated. Run: gh auth login" };
20197
+ }
20198
+ }
20199
+ });
20200
+ var ghRepoInfoTool = defineTool({
20201
+ name: "gh_repo_info",
20202
+ description: "Get GitHub repository information (name, default branch, URL).",
20203
+ category: "git",
20204
+ parameters: z.object({
20205
+ cwd: z.string().optional()
20206
+ }),
20207
+ async execute({ cwd }) {
20208
+ const { stdout } = await ghExec(
20209
+ ["repo", "view", "--json", "name,nameWithOwner,defaultBranchRef,url,isPrivate"],
20210
+ cwd
20211
+ );
20212
+ const data = JSON.parse(stdout);
20213
+ return {
20214
+ name: data.name,
20215
+ fullName: data.nameWithOwner,
20216
+ defaultBranch: data.defaultBranchRef.name,
20217
+ url: data.url,
20218
+ private: data.isPrivate
20219
+ };
20220
+ }
20221
+ });
20222
+ var ghPrCreateTool = defineTool({
20223
+ name: "gh_pr_create",
20224
+ description: "Create a GitHub pull request.",
20225
+ category: "git",
20226
+ parameters: z.object({
20227
+ title: z.string().describe("PR title"),
20228
+ body: z.string().describe("PR body (markdown)"),
20229
+ base: z.string().optional().describe("Base branch (default: repo default)"),
20230
+ draft: z.boolean().optional().default(false),
20231
+ cwd: z.string().optional()
20232
+ }),
20233
+ async execute({ title, body, base, draft, cwd }) {
20234
+ const args = ["pr", "create", "--title", title, "--body", body];
20235
+ if (base) args.push("--base", base);
20236
+ if (draft) args.push("--draft");
20237
+ const { stdout } = await ghExec(args, cwd);
20238
+ const url = stdout.trim();
20239
+ const numberMatch = url.match(/\/pull\/(\d+)/);
20240
+ return {
20241
+ number: numberMatch ? parseInt(numberMatch[1], 10) : 0,
20242
+ url
20243
+ };
20244
+ }
20245
+ });
20246
+ var ghPrMergeTool = defineTool({
20247
+ name: "gh_pr_merge",
20248
+ description: "Merge a GitHub pull request.",
20249
+ category: "git",
20250
+ parameters: z.object({
20251
+ number: z.number().describe("PR number"),
20252
+ method: z.enum(["squash", "merge", "rebase"]).optional().default("squash"),
20253
+ deleteBranch: z.boolean().optional().default(true),
20254
+ subject: z.string().optional().describe("Merge commit subject line"),
20255
+ body: z.string().optional().describe("Merge commit body"),
20256
+ cwd: z.string().optional()
20257
+ }),
20258
+ async execute({ number, method, deleteBranch, subject, body, cwd }) {
20259
+ const args = ["pr", "merge", String(number), `--${method}`];
20260
+ if (deleteBranch) args.push("--delete-branch");
20261
+ if (subject) args.push("--subject", subject);
20262
+ if (body) args.push("--body", body);
20263
+ await ghExec(args, cwd);
20264
+ return { merged: true, method };
20265
+ }
20266
+ });
20267
+ var ghPrChecksTool = defineTool({
20268
+ name: "gh_pr_checks",
20269
+ description: "Get CI check statuses for a pull request.",
20270
+ category: "git",
20271
+ parameters: z.object({
20272
+ number: z.number().describe("PR number"),
20273
+ cwd: z.string().optional()
20274
+ }),
20275
+ async execute({ number, cwd }) {
20276
+ const { stdout } = await ghExec(
20277
+ ["pr", "checks", String(number), "--json", "name,state,conclusion,detailsUrl"],
20278
+ cwd
20279
+ );
20280
+ const raw = JSON.parse(stdout);
20281
+ const checks = raw.map((c) => {
20282
+ let status = "pending";
20283
+ if (c.state === "SUCCESS" || c.conclusion === "SUCCESS") status = "pass";
20284
+ else if (c.state === "FAILURE" || c.conclusion === "FAILURE") status = "fail";
20285
+ else if (c.state === "SKIPPED" || c.conclusion === "SKIPPED") status = "skipping";
20286
+ return {
20287
+ name: c.name,
20288
+ status,
20289
+ conclusion: c.conclusion || c.state,
20290
+ url: c.detailsUrl
20291
+ };
20292
+ });
20293
+ return {
20294
+ checks,
20295
+ allPassed: checks.length > 0 && checks.every((c) => c.status === "pass" || c.status === "skipping"),
20296
+ anyFailed: checks.some((c) => c.status === "fail"),
20297
+ anyPending: checks.some((c) => c.status === "pending")
20298
+ };
20299
+ }
20300
+ });
20301
+ var ghPrListTool = defineTool({
20302
+ name: "gh_pr_list",
20303
+ description: "List pull requests, optionally filtered by head branch.",
20304
+ category: "git",
20305
+ parameters: z.object({
20306
+ head: z.string().optional().describe("Filter by head branch name"),
20307
+ state: z.string().optional().default("open"),
20308
+ cwd: z.string().optional()
20309
+ }),
20310
+ async execute({ head, state, cwd }) {
20311
+ const args = ["pr", "list", "--json", "number,title,url,state", "--state", state];
20312
+ if (head) args.push("--head", head);
20313
+ const { stdout } = await ghExec(args, cwd);
20314
+ const raw = JSON.parse(stdout);
20315
+ return { prs: raw };
20316
+ }
20317
+ });
20318
+ var ghReleaseCreateTool = defineTool({
20319
+ name: "gh_release_create",
20320
+ description: "Create a GitHub release with notes.",
20321
+ category: "git",
20322
+ parameters: z.object({
20323
+ tag: z.string().describe("Tag name (e.g., v1.2.3)"),
20324
+ title: z.string().optional().describe("Release title"),
20325
+ notes: z.string().optional().describe("Release notes (markdown)"),
20326
+ draft: z.boolean().optional().default(false),
20327
+ prerelease: z.boolean().optional().default(false),
20328
+ cwd: z.string().optional()
20329
+ }),
20330
+ async execute({ tag, title, notes, draft, prerelease, cwd }) {
20331
+ const args = ["release", "create", tag];
20332
+ if (title) args.push("--title", title);
20333
+ if (notes) args.push("--notes", notes);
20334
+ if (draft) args.push("--draft");
20335
+ if (prerelease) args.push("--prerelease");
20336
+ const { stdout } = await ghExec(args, cwd);
20337
+ return { url: stdout.trim(), tag };
20338
+ }
20339
+ });
20340
+ var githubTools = [
20341
+ ghCheckAuthTool,
20342
+ ghRepoInfoTool,
20343
+ ghPrCreateTool,
20344
+ ghPrMergeTool,
20345
+ ghPrChecksTool,
20346
+ ghPrListTool,
20347
+ ghReleaseCreateTool
20348
+ ];
20349
+ var INTERPRETER_MAP = {
20350
+ ".py": ["python3"],
20351
+ ".sh": ["bash"],
20352
+ ".bash": ["bash"],
20353
+ ".zsh": ["zsh"],
20354
+ ".js": ["node"],
20355
+ ".ts": ["npx", "tsx"],
20356
+ ".rb": ["ruby"],
20357
+ ".pl": ["perl"],
20358
+ ".lua": ["lua"],
20359
+ ".php": ["php"]
20360
+ };
20361
+ var BLOCKED_PATHS2 = ["/etc", "/var", "/usr", "/root", "/sys", "/proc", "/boot", "/dev"];
20362
+ var BLOCKED_EXEC_PATTERNS = [
20363
+ /\.env(?:\.\w+)?$/,
20364
+ /\.pem$/,
20365
+ /\.key$/,
20366
+ /id_rsa/,
20367
+ /credentials\.\w+$/i,
20368
+ /secrets?\.\w+$/i
20369
+ ];
20370
+ var DANGEROUS_ARG_PATTERNS = [
20371
+ /\brm\s+-rf\s+\/(?!\w)/,
20372
+ /\bsudo\s+rm/,
20373
+ /\bdd\s+if=.*of=\/dev\//,
20374
+ /`[^`]+`/,
20375
+ /\$\([^)]+\)/,
20376
+ /\beval\s+/,
20377
+ /\bcurl\s+.*\|\s*(ba)?sh/
20378
+ ];
20379
+ function getSystemOpenCommand() {
20380
+ return process.platform === "darwin" ? "open" : "xdg-open";
20381
+ }
20382
+ function hasNullByte2(str) {
20383
+ return str.includes("\0");
20384
+ }
20385
+ function isBlockedPath(absolute) {
20386
+ for (const blocked of BLOCKED_PATHS2) {
20387
+ const normalizedBlocked = path14__default.normalize(blocked);
20388
+ if (absolute === normalizedBlocked || absolute.startsWith(normalizedBlocked + path14__default.sep)) {
20389
+ return blocked;
20390
+ }
20391
+ }
20392
+ return void 0;
20393
+ }
20394
+ function isBlockedExecFile(filePath) {
20395
+ return BLOCKED_EXEC_PATTERNS.some((p4) => p4.test(filePath));
20396
+ }
20397
+ function hasDangerousArgs(args) {
20398
+ const joined = args.join(" ");
20399
+ return DANGEROUS_ARG_PATTERNS.some((p4) => p4.test(joined));
20400
+ }
20401
+ function getInterpreter(ext) {
20402
+ return INTERPRETER_MAP[ext.toLowerCase()];
20403
+ }
20404
+ async function isExecutable(filePath) {
20405
+ try {
20406
+ await fs14__default.access(filePath, fs14__default.constants.X_OK);
20407
+ return true;
20408
+ } catch {
20409
+ return false;
20410
+ }
20411
+ }
20412
+ var openFileTool = defineTool({
20413
+ name: "open_file",
20414
+ description: `Open a file with the system application or execute a script/binary.
20415
+
20416
+ Mode "open" (default): Opens the file with the OS default application.
20417
+ - HTML files \u2192 browser
20418
+ - Images \u2192 image viewer
20419
+ - PDFs \u2192 PDF reader
20420
+ - Directories \u2192 file manager
20421
+
20422
+ Mode "exec": Executes a script or binary.
20423
+ - .py \u2192 python3, .sh \u2192 bash, .js \u2192 node, .ts \u2192 npx tsx
20424
+ - .rb \u2192 ruby, .pl \u2192 perl, .lua \u2192 lua, .php \u2192 php
20425
+ - Binaries with +x permissions \u2192 direct execution
20426
+
20427
+ Examples:
20428
+ - Open in browser: { "path": "docs/index.html" }
20429
+ - View image: { "path": "screenshot.png" }
20430
+ - Run script: { "path": "scripts/setup.sh", "mode": "exec" }
20431
+ - Run with args: { "path": "deploy.py", "mode": "exec", "args": ["--env", "staging"] }`,
20432
+ category: "bash",
20433
+ parameters: z.object({
20434
+ path: z.string().describe("File path to open or execute"),
20435
+ mode: z.enum(["open", "exec"]).optional().default("open").describe("open = system app, exec = run script"),
20436
+ args: z.array(z.string()).optional().default([]).describe("Arguments for exec mode"),
20437
+ cwd: z.string().optional().describe("Working directory"),
20438
+ timeout: z.number().optional().describe("Timeout in ms for exec mode (default 120000)")
20439
+ }),
20440
+ async execute({ path: filePath, mode = "open", args = [], cwd, timeout }) {
20441
+ const start = performance.now();
20442
+ if (!filePath || hasNullByte2(filePath)) {
20443
+ throw new ToolError("Invalid file path", { tool: "open_file" });
20444
+ }
20445
+ const workDir = cwd ?? process.cwd();
20446
+ const absolute = path14__default.isAbsolute(filePath) ? path14__default.normalize(filePath) : path14__default.resolve(workDir, filePath);
20447
+ const blockedBy = isBlockedPath(absolute);
20448
+ if (blockedBy) {
20449
+ throw new ToolError(`Access to system path '${blockedBy}' is not allowed`, {
20450
+ tool: "open_file"
20451
+ });
20452
+ }
20453
+ try {
20454
+ await fs14__default.access(absolute);
20455
+ } catch {
20456
+ throw new ToolError(`File not found: ${absolute}`, { tool: "open_file" });
20457
+ }
20458
+ if (mode === "open") {
20459
+ const cmd = getSystemOpenCommand();
20460
+ await execa(cmd, [absolute], { timeout: 1e4 });
20461
+ return {
20462
+ action: "opened",
20463
+ path: absolute,
20464
+ resolvedCommand: cmd,
20465
+ duration: performance.now() - start
20466
+ };
20467
+ }
20468
+ if (isBlockedExecFile(absolute)) {
20469
+ throw new ToolError(`Execution of sensitive file is blocked: ${path14__default.basename(absolute)}`, {
20470
+ tool: "open_file"
20471
+ });
20472
+ }
20473
+ if (args.length > 0 && hasDangerousArgs(args)) {
20474
+ throw new ToolError("Arguments contain dangerous patterns", { tool: "open_file" });
20475
+ }
20476
+ const ext = path14__default.extname(absolute);
20477
+ const interpreter = getInterpreter(ext);
20478
+ const executable = await isExecutable(absolute);
20479
+ let command;
20480
+ let cmdArgs;
20481
+ if (interpreter) {
20482
+ command = interpreter[0];
20483
+ cmdArgs = [...interpreter.slice(1), absolute, ...args];
20484
+ } else if (executable) {
20485
+ command = absolute;
20486
+ cmdArgs = [...args];
20487
+ } else {
20488
+ throw new ToolError(
20489
+ `Cannot execute '${path14__default.basename(absolute)}': no known interpreter for '${ext || "(no extension)"}' and file is not executable`,
20490
+ { tool: "open_file" }
20491
+ );
20492
+ }
20493
+ const result = await execa(command, cmdArgs, {
20494
+ cwd: workDir,
20495
+ timeout: timeout ?? 12e4,
20496
+ reject: false
20497
+ });
20498
+ return {
20499
+ action: "executed",
20500
+ path: absolute,
20501
+ resolvedCommand: interpreter ? interpreter.join(" ") : absolute,
20502
+ stdout: result.stdout || void 0,
20503
+ stderr: result.stderr || void 0,
20504
+ exitCode: result.exitCode ?? 0,
20505
+ duration: performance.now() - start
20506
+ };
20507
+ }
20508
+ });
20509
+ var openTools = [openFileTool];
20510
+ init_allowed_paths();
20511
+ var BLOCKED_SYSTEM_PATHS = [
20512
+ "/etc",
20513
+ "/var",
20514
+ "/usr",
20515
+ "/root",
20516
+ "/sys",
20517
+ "/proc",
20518
+ "/boot",
20519
+ "/bin",
20520
+ "/sbin"
20521
+ ];
20522
+ var authorizePathTool = defineTool({
20523
+ name: "authorize_path",
20524
+ description: `Request user permission to access a directory outside the project root.
20525
+
20526
+ Use this BEFORE attempting file operations on external directories. The user will see
20527
+ an interactive prompt where they choose to allow or deny access.
20528
+
20529
+ Returns whether the path was authorized. If authorized, subsequent file operations
20530
+ on that directory will succeed.
20531
+
20532
+ Examples:
20533
+ - Need to read config from another project: authorize_path({ path: "/home/user/other-project" })
20534
+ - Need to access shared libraries: authorize_path({ path: "/opt/shared/libs", reason: "Read shared type definitions" })`,
20535
+ category: "config",
20536
+ parameters: z.object({
20537
+ path: z.string().min(1).describe("Absolute path to the directory to authorize"),
20538
+ reason: z.string().optional().describe("Why access is needed (shown to user for context)")
20539
+ }),
20540
+ async execute({ path: dirPath, reason }) {
20541
+ const absolute = path14__default.resolve(dirPath);
20542
+ if (isWithinAllowedPath(absolute, "read")) {
20543
+ return {
20544
+ authorized: true,
20545
+ path: absolute,
20546
+ message: "Path is already authorized."
20547
+ };
20548
+ }
20549
+ for (const blocked of BLOCKED_SYSTEM_PATHS) {
20550
+ const normalizedBlocked = path14__default.normalize(blocked);
20551
+ if (absolute === normalizedBlocked || absolute.startsWith(normalizedBlocked + path14__default.sep)) {
20552
+ return {
20553
+ authorized: false,
20554
+ path: absolute,
20555
+ message: `System path '${blocked}' cannot be authorized for security reasons.`
20556
+ };
20557
+ }
20558
+ }
20559
+ const cwd = process.cwd();
20560
+ if (absolute === path14__default.normalize(cwd) || absolute.startsWith(path14__default.normalize(cwd) + path14__default.sep)) {
20561
+ return {
20562
+ authorized: true,
20563
+ path: absolute,
20564
+ message: "Path is within the project directory \u2014 already accessible."
20565
+ };
20566
+ }
20567
+ try {
20568
+ const stat2 = await fs14__default.stat(absolute);
20569
+ if (!stat2.isDirectory()) {
20570
+ return {
20571
+ authorized: false,
20572
+ path: absolute,
20573
+ message: `Not a directory: ${absolute}`
20574
+ };
20575
+ }
20576
+ } catch {
20577
+ return {
20578
+ authorized: false,
20579
+ path: absolute,
20580
+ message: `Directory not found: ${absolute}`
20581
+ };
20582
+ }
20583
+ const existing = getAllowedPaths();
20584
+ if (existing.some((e) => path14__default.normalize(e.path) === path14__default.normalize(absolute))) {
20585
+ return {
20586
+ authorized: true,
20587
+ path: absolute,
20588
+ message: "Path is already authorized."
20589
+ };
20590
+ }
20591
+ const { promptAllowPath: promptAllowPath2 } = await Promise.resolve().then(() => (init_allow_path_prompt(), allow_path_prompt_exports));
20592
+ const wasAuthorized = await promptAllowPath2(absolute);
20593
+ if (wasAuthorized) {
20594
+ return {
20595
+ authorized: true,
20596
+ path: absolute,
20597
+ message: `Access granted to ${absolute}.${reason ? ` Reason: ${reason}` : ""}`
20598
+ };
20599
+ }
20600
+ return {
20601
+ authorized: false,
20602
+ path: absolute,
20603
+ message: "User denied access to this directory."
20604
+ };
20605
+ }
20606
+ });
20607
+ var authorizePathTools = [authorizePathTool];
19999
20608
 
20000
20609
  // src/tools/index.ts
20001
20610
  function registerAllTools(registry) {
@@ -20028,7 +20637,10 @@ function registerAllTools(registry) {
20028
20637
  ...smartSuggestionsTools,
20029
20638
  ...contextEnhancerTools,
20030
20639
  ...skillEnhancerTools,
20031
- ...gitEnhancedTools
20640
+ ...gitEnhancedTools,
20641
+ ...githubTools,
20642
+ ...openTools,
20643
+ ...authorizePathTools
20032
20644
  ];
20033
20645
  for (const tool of allTools) {
20034
20646
  registry.register(tool);