mcp-agents 0.5.2 → 0.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +61 -20
  2. package/package.json +1 -1
  3. package/server.js +32 -13
package/README.md CHANGED
@@ -15,16 +15,29 @@ MCP server that wraps AI CLI tools — [Claude Code](https://docs.anthropic.com/
15
15
 
16
16
  Only the CLI you select with `--provider` needs to be present.
17
17
 
18
+ ## Install
19
+
20
+ ```bash
21
+ npm install -g mcp-agents
22
+ ```
23
+
24
+ Global install is **strongly recommended** over `npx -y mcp-agents@latest`. The `npx`
25
+ approach performs a network round-trip on every cold start, which can exceed MCP client
26
+ connection timeouts and cause "stream disconnected" errors.
27
+
28
+ **Tip:** If your project's `.mcp.json` references `mcp-agents`, add `npm install -g mcp-agents`
29
+ to your setup script (e.g. `bin/setup`) so new developers get it automatically.
30
+
18
31
  ## Quick test
19
32
 
20
33
  ```bash
21
34
  # Default provider (codex)
22
- npx mcp-agents
35
+ mcp-agents
23
36
 
24
37
  # Specific provider
25
- npx mcp-agents --provider claude
26
- npx mcp-agents --provider gemini
27
- npx mcp-agents --provider gemini --sandbox false
38
+ mcp-agents --provider claude
39
+ mcp-agents --provider gemini
40
+ mcp-agents --provider gemini --sandbox false
28
41
  ```
29
42
 
30
43
  The server speaks [JSON-RPC over stdio](https://modelcontextprotocol.io/docs/concepts/transports#stdio). It prints `[mcp-agents] ready (provider: <name>)` to stderr when it's listening.
@@ -57,29 +70,29 @@ Each `--provider` flag maps to a single exposed tool:
57
70
  ### `codex` (pass-through)
58
71
 
59
72
  The codex provider passes through to Codex's native MCP server (`codex mcp-server`)
60
- with configurable flags:
73
+ using `-c key=value` config overrides:
61
74
 
62
- | CLI Flag | Default | Codex flag |
63
- |----------|---------|------------|
64
- | `--model` | `gpt-5.3-codex` | `-m <model>` |
65
- | `--model_reasoning_effort` | `high` | `-c model_reasoning_effort=<value>` |
75
+ | CLI Flag | Default | Codex config key |
76
+ |----------|---------|-----------------|
77
+ | `--model` | `gpt-5.3-codex` | `model` |
78
+ | `--model_reasoning_effort` | `high` | `model_reasoning_effort` |
66
79
 
67
- Hardcoded defaults: `-s read-only -a never` (safe for MCP server mode).
80
+ Hardcoded defaults: `sandbox_mode=read-only`, `approval_policy=never` (safe for MCP server mode).
68
81
 
69
82
  ## Integration with Claude Code
70
83
 
71
- Add entries to your project's `.mcp.json`:
84
+ Add entries to your project's `.mcp.json` (requires `npm i -g mcp-agents`):
72
85
 
73
86
  ```json
74
87
  {
75
88
  "mcpServers": {
76
89
  "codex": {
77
- "command": "npx",
78
- "args": ["-y", "mcp-agents@latest", "--provider", "codex"]
90
+ "command": "mcp-agents",
91
+ "args": ["--provider", "codex"]
79
92
  },
80
93
  "gemini": {
81
- "command": "npx",
82
- "args": ["-y", "mcp-agents@latest", "--provider", "gemini", "--sandbox", "false"]
94
+ "command": "mcp-agents",
95
+ "args": ["--provider", "gemini", "--sandbox", "false"]
83
96
  }
84
97
  }
85
98
  }
@@ -87,33 +100,61 @@ Add entries to your project's `.mcp.json`:
87
100
 
88
101
  Override codex defaults:
89
102
 
103
+ ```json
104
+ {
105
+ "mcpServers": {
106
+ "codex": {
107
+ "command": "mcp-agents",
108
+ "args": ["--provider", "codex", "--model", "o3-pro", "--model_reasoning_effort", "medium"]
109
+ }
110
+ }
111
+ }
112
+ ```
113
+
114
+ <details>
115
+ <summary>Alternative: using npx (slower, not recommended)</summary>
116
+
90
117
  ```json
91
118
  {
92
119
  "mcpServers": {
93
120
  "codex": {
94
121
  "command": "npx",
95
- "args": ["-y", "mcp-agents@latest", "--provider", "codex", "--model", "o3-pro", "--model_reasoning_effort", "medium"]
122
+ "args": ["-y", "mcp-agents@latest", "--provider", "codex"]
96
123
  }
97
124
  }
98
125
  }
99
126
  ```
100
127
 
128
+ > **Warning:** `npx -y mcp-agents@latest` performs a network round-trip on every cold
129
+ > start (~70s), which can exceed MCP client connection timeouts.
130
+
131
+ </details>
132
+
101
133
  ## Integration with OpenAI Codex
102
134
 
103
135
  Add two entries to `~/.codex/config.toml` — one per provider you want available:
104
136
 
105
137
  ```toml
106
138
  [mcp_servers.claude-code]
107
- command = "npx"
108
- args = ["-y", "mcp-agents", "--provider", "claude"]
139
+ command = "mcp-agents"
140
+ args = ["--provider", "claude"]
109
141
 
110
142
  [mcp_servers.gemini]
111
- command = "npx"
112
- args = ["-y", "mcp-agents", "--provider", "gemini", "--sandbox", "false"]
143
+ command = "mcp-agents"
144
+ args = ["--provider", "gemini", "--sandbox", "false"]
113
145
  ```
114
146
 
115
147
  Then in a Codex session you can call the `claude_code` or `gemini` tools, which shell out to the respective CLIs.
116
148
 
149
+ ## Development
150
+
151
+ ```bash
152
+ npm install
153
+ npm link # symlinks mcp-agents to your local server.js
154
+ ```
155
+
156
+ After `npm link`, any edits to `server.js` take effect immediately — no reinstall needed.
157
+
117
158
  ## How it works
118
159
 
119
160
  1. An MCP client connects over stdio
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "mcp-agents",
3
- "version": "0.5.2",
3
+ "version": "0.5.3",
4
4
  "description": "MCP server that wraps AI CLI tools (Claude Code, Gemini CLI, Codex CLI) for use by any MCP client",
5
5
  "type": "module",
6
6
  "bin": {
package/server.js CHANGED
@@ -92,8 +92,8 @@ Usage: mcp-agents [options]
92
92
 
93
93
  Options:
94
94
  --provider <name> CLI backend to use (${providers}) [default: codex]
95
- --model <model> Model to use (codex) [default: gpt-5.3-codex]
96
- --model_reasoning_effort <e> Reasoning effort (codex) [default: high]
95
+ --model <model> Codex model [default: gpt-5.3-codex]
96
+ --model_reasoning_effort <e> Codex reasoning effort [default: high]
97
97
  --sandbox <bool> Gemini sandbox mode (true/false) [default: false]
98
98
  --timeout <seconds> Default timeout per call [default: 300]
99
99
  --help, -h Show this help message
@@ -274,28 +274,47 @@ function runCli(command, args, opts = {}) {
274
274
  */
275
275
  function runCodexPassthrough({ model, modelReasoningEffort }) {
276
276
  const args = [
277
- "-m",
278
- model || "gpt-5.3-codex",
279
- "-s",
280
- "read-only",
281
- "-a",
282
- "never",
283
- "-c",
284
- `model_reasoning_effort=${modelReasoningEffort || "high"}`,
285
277
  "mcp-server",
278
+ "-c", `model=${model || "gpt-5.3-codex"}`,
279
+ "-c", "sandbox_mode=read-only",
280
+ "-c", "approval_policy=never",
281
+ "-c", `model_reasoning_effort=${modelReasoningEffort || "high"}`,
286
282
  ];
287
283
 
288
284
  logErr(`[mcp-agents] passthrough: codex ${args.join(" ")}`);
289
285
 
290
- const child = spawn("codex", args, { stdio: "inherit" });
286
+ const child = spawn("codex", args, {
287
+ stdio: ["inherit", "inherit", "pipe"],
288
+ });
289
+
290
+ child.stderr.on("data", (chunk) => {
291
+ logErr(`[codex] ${chunk.toString().trimEnd()}`);
292
+ });
293
+
294
+ const SIGNAL_CODES = { SIGHUP: 1, SIGINT: 2, SIGTERM: 15 };
295
+ for (const sig of ["SIGTERM", "SIGINT", "SIGHUP"]) {
296
+ process.once(sig, () => {
297
+ child.kill(sig);
298
+ setTimeout(() => {
299
+ child.kill("SIGKILL");
300
+ process.exit(128 + SIGNAL_CODES[sig]);
301
+ }, 5000).unref();
302
+ });
303
+ }
291
304
 
292
305
  child.on("error", (err) => {
293
306
  logErr(`[mcp-agents] failed to start codex: ${err.message}`);
294
307
  process.exitCode = 1;
295
308
  });
296
309
 
297
- child.on("exit", (code) => {
298
- process.exitCode = code ?? 1;
310
+ child.on("exit", (code, signal) => {
311
+ if (signal) {
312
+ logErr(`[mcp-agents] codex killed by ${signal}`);
313
+ process.exitCode = 128 + (SIGNAL_CODES[signal] ?? 0);
314
+ } else {
315
+ if (code !== 0) logErr(`[mcp-agents] codex exited with code ${code}`);
316
+ process.exitCode = code ?? 1;
317
+ }
299
318
  });
300
319
  }
301
320