consult-llm-mcp 2.13.2 → 2.13.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/README.md +2 -1
- package/package.json +5 -5
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## v2.13.3 (2026-04-23)
|
|
4
|
+
|
|
5
|
+
- Added gpt-5.5 model support ($5/$30 per million tokens). The `openai` selector
|
|
6
|
+
now resolves to gpt-5.5, and the Cursor CLI backend automatically appends the
|
|
7
|
+
reasoning effort suffix when routing through cursor-agent
|
|
8
|
+
|
|
3
9
|
## v2.13.2 (2026-04-22)
|
|
4
10
|
|
|
5
11
|
- Added Anthropic provider support with the `claude-opus-4-7` model. Configure
|
package/README.md
CHANGED
|
@@ -646,7 +646,7 @@ built-in fallback.
|
|
|
646
646
|
| Selector | Priority |
|
|
647
647
|
| ----------- | -------------------------------------------------------------- |
|
|
648
648
|
| `gemini` | gemini-3.1-pro-preview → gemini-3-pro-preview → gemini-2.5-pro |
|
|
649
|
-
| `openai` | gpt-5.4 → gpt-5.3-codex → gpt-5.2 → gpt-5.2-codex
|
|
649
|
+
| `openai` | gpt-5.5 → gpt-5.4 → gpt-5.3-codex → gpt-5.2 → gpt-5.2-codex |
|
|
650
650
|
| `anthropic` | claude-opus-4-7 |
|
|
651
651
|
| `deepseek` | deepseek-reasoner |
|
|
652
652
|
|
|
@@ -719,6 +719,7 @@ models complex questions.
|
|
|
719
719
|
- **gemini-3.1-pro-preview**: Google's Gemini 3.1 Pro Preview
|
|
720
720
|
- **deepseek-reasoner**: DeepSeek's reasoning model
|
|
721
721
|
- **MiniMax-M2.7**: MiniMax's M2.7 reasoning model (204K context)
|
|
722
|
+
- **gpt-5.5**: OpenAI's GPT-5.5 model
|
|
722
723
|
- **gpt-5.4**: OpenAI's GPT-5.4 model
|
|
723
724
|
- **gpt-5.2**: OpenAI's GPT-5.2 model
|
|
724
725
|
- **gpt-5.3-codex**: OpenAI's Codex model based on GPT-5.3
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "consult-llm-mcp",
|
|
3
|
-
"version": "2.13.
|
|
3
|
+
"version": "2.13.3",
|
|
4
4
|
"description": "MCP server for consulting powerful AI models",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -31,9 +31,9 @@
|
|
|
31
31
|
"ai"
|
|
32
32
|
],
|
|
33
33
|
"optionalDependencies": {
|
|
34
|
-
"consult-llm-mcp-darwin-arm64": "2.13.
|
|
35
|
-
"consult-llm-mcp-darwin-x64": "2.13.
|
|
36
|
-
"consult-llm-mcp-linux-x64": "2.13.
|
|
37
|
-
"consult-llm-mcp-linux-arm64": "2.13.
|
|
34
|
+
"consult-llm-mcp-darwin-arm64": "2.13.3",
|
|
35
|
+
"consult-llm-mcp-darwin-x64": "2.13.3",
|
|
36
|
+
"consult-llm-mcp-linux-x64": "2.13.3",
|
|
37
|
+
"consult-llm-mcp-linux-arm64": "2.13.3"
|
|
38
38
|
}
|
|
39
39
|
}
|