phone-a-friend-mcp-server 0.1.0__tar.gz → 0.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- phone_a_friend_mcp_server-0.1.2/PKG-INFO +205 -0
- phone_a_friend_mcp_server-0.1.2/README.md +181 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/pyproject.toml +12 -12
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/__init__.py +4 -7
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/config.py +47 -8
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/server.py +18 -13
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/tools/fax_tool.py +45 -5
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/tools/phone_tool.py +15 -10
- phone_a_friend_mcp_server-0.1.2/tests/test_tools.py +184 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/uv.lock +12 -9
- phone_a_friend_mcp_server-0.1.0/PKG-INFO +0 -320
- phone_a_friend_mcp_server-0.1.0/README.md +0 -289
- phone_a_friend_mcp_server-0.1.0/tests/test_tools.py +0 -112
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/.gitignore +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/.pre-commit-config.yaml +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/Dockerfile +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/LICENSE +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/__main__.py +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/client/__init__.py +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/tools/__init__.py +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/tools/base_tools.py +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/src/phone_a_friend_mcp_server/tools/tool_manager.py +0 -0
- {phone_a_friend_mcp_server-0.1.0 → phone_a_friend_mcp_server-0.1.2}/tests/__init__.py +0 -0
@@ -0,0 +1,205 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: phone-a-friend-mcp-server
|
3
|
+
Version: 0.1.2
|
4
|
+
Summary: MCP Server for Phone-a-Friend assistance
|
5
|
+
Project-URL: GitHub, https://github.com/abhishekbhakat/phone-a-friend-mcp-server
|
6
|
+
Project-URL: Issues, https://github.com/abhishekbhakat/phone-a-friend-mcp-server/issues
|
7
|
+
Author-email: Abhishek Bhakat <abhishek.bhakat@hotmail.com>
|
8
|
+
License-Expression: MIT
|
9
|
+
License-File: LICENSE
|
10
|
+
Classifier: Development Status :: 3 - Alpha
|
11
|
+
Classifier: Operating System :: OS Independent
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
15
|
+
Requires-Python: >=3.11
|
16
|
+
Requires-Dist: aiofiles>=24.1.0
|
17
|
+
Requires-Dist: aiohttp>=3.12.7
|
18
|
+
Requires-Dist: click>=8.2.1
|
19
|
+
Requires-Dist: mcp>=1.9.2
|
20
|
+
Requires-Dist: pydantic-ai-slim[anthropic,google,openai]>=0.2.14
|
21
|
+
Requires-Dist: pydantic>=2.11.5
|
22
|
+
Requires-Dist: pyyaml>=6.0.0
|
23
|
+
Description-Content-Type: text/markdown
|
24
|
+
|
25
|
+
# Phone-a-Friend MCP Server 🧠📞
|
26
|
+
|
27
|
+
An AI-to-AI consultation system that enables one AI to "phone a friend" (another AI) for critical thinking, long context reasoning, and complex problem solving via OpenRouter.
|
28
|
+
|
29
|
+
## The Problem 🤔
|
30
|
+
|
31
|
+
Sometimes an AI encounters complex problems that require:
|
32
|
+
- **Deep critical thinking** beyond immediate capabilities
|
33
|
+
- **Long context reasoning** with extensive information
|
34
|
+
- **Multi-step analysis** that benefits from external perspective
|
35
|
+
- **Specialized expertise** from different AI models
|
36
|
+
|
37
|
+
## The Solution �
|
38
|
+
|
39
|
+
Phone-a-Friend MCP Server creates a **two-step consultation process**:
|
40
|
+
|
41
|
+
1. **Context + Reasoning**: Package all relevant context and send to external AI for deep analysis
|
42
|
+
2. **Extract Actionable Insights**: Process the reasoning response into usable format for the primary AI
|
43
|
+
|
44
|
+
This enables AI systems to leverage other AI models as "consultants" for complex reasoning tasks.
|
45
|
+
|
46
|
+
## Architecture 🏗️
|
47
|
+
|
48
|
+
```
|
49
|
+
Primary AI → Phone-a-Friend MCP → OpenRouter → External AI (GPT-4, Claude, etc.) → Processed Response → Primary AI
|
50
|
+
```
|
51
|
+
|
52
|
+
**Sequential Workflow:**
|
53
|
+
1. `analyze_context` - Gather and structure all relevant context
|
54
|
+
2. `get_critical_thinking` - Send context to external AI via OpenRouter for reasoning
|
55
|
+
3. `extract_actionable_insights` - Process response into actionable format
|
56
|
+
|
57
|
+
## When to Use 🎯
|
58
|
+
|
59
|
+
**Ideal for:**
|
60
|
+
- Complex multi-step problems requiring deep analysis
|
61
|
+
- Situations needing long context reasoning (>100k tokens)
|
62
|
+
- Cross-domain expertise consultation
|
63
|
+
- Critical decision-making with high stakes
|
64
|
+
- Problems requiring multiple perspectives
|
65
|
+
|
66
|
+
## Installation 🚀
|
67
|
+
|
68
|
+
1. Clone the repository:
|
69
|
+
```bash
|
70
|
+
git clone https://github.com/abhishekbhakat/phone-a-friend-mcp-server.git
|
71
|
+
cd phone-a-friend-mcp-server
|
72
|
+
```
|
73
|
+
|
74
|
+
2. Install dependencies:
|
75
|
+
```bash
|
76
|
+
uv pip install -e .
|
77
|
+
```
|
78
|
+
|
79
|
+
3. Configure API access (choose one method):
|
80
|
+
|
81
|
+
**Option A: Environment Variables**
|
82
|
+
```bash
|
83
|
+
export OPENROUTER_API_KEY="your-openrouter-key"
|
84
|
+
# OR
|
85
|
+
export OPENAI_API_KEY="your-openai-key"
|
86
|
+
# OR
|
87
|
+
export ANTHROPIC_API_KEY="your-anthropic-key"
|
88
|
+
# OR
|
89
|
+
export GOOGLE_API_KEY="your-google-key"
|
90
|
+
```
|
91
|
+
|
92
|
+
**Option B: CLI Arguments**
|
93
|
+
```bash
|
94
|
+
phone-a-friend-mcp-server --api-key "your-api-key" --provider openai
|
95
|
+
```
|
96
|
+
|
97
|
+
## Usage 💡
|
98
|
+
|
99
|
+
### Command Line Options
|
100
|
+
```bash
|
101
|
+
|
102
|
+
# Custom base URL (if needed)
|
103
|
+
phone-a-friend-mcp-server --base-url "https://custom-api.example.com"
|
104
|
+
|
105
|
+
# Temperature control (0.0 = deterministic, 2.0 = very creative)
|
106
|
+
phone-a-friend-mcp-server --temperature 0.4
|
107
|
+
|
108
|
+
# Combined example
|
109
|
+
phone-a-friend-mcp-server --api-key "sk-..." --provider openai --model "o3" -v
|
110
|
+
```
|
111
|
+
|
112
|
+
### Environment Variables (Optional)
|
113
|
+
```bash
|
114
|
+
|
115
|
+
# Optional model overrides
|
116
|
+
export PHONE_A_FRIEND_MODEL="your-preferred-model"
|
117
|
+
export PHONE_A_FRIEND_PROVIDER="your-preferred-provider"
|
118
|
+
export PHONE_A_FRIEND_BASE_URL="https://custom-api.example.com"
|
119
|
+
|
120
|
+
# Temperature control (0.0-2.0, where 0.0 = deterministic, 2.0 = very creative)
|
121
|
+
export PHONE_A_FRIEND_TEMPERATURE=0.4
|
122
|
+
```
|
123
|
+
|
124
|
+
## Model Selection 🤖
|
125
|
+
|
126
|
+
Default reasoning models to be selected:
|
127
|
+
- **OpenAI**: o3
|
128
|
+
- **Anthropic**: Claude 4 Opus
|
129
|
+
- **Google**: Gemini 2.5 Pro Preview 05-06 (automatically set temperature to 0.0)
|
130
|
+
- **OpenRouter**: For other models like Deepseek or Qwen
|
131
|
+
|
132
|
+
You can override the auto-selection by setting `PHONE_A_FRIEND_MODEL` environment variable or using the `--model` CLI option.
|
133
|
+
|
134
|
+
## Available Tools 🛠️
|
135
|
+
|
136
|
+
### phone_a_friend
|
137
|
+
📞 Consult external AI for critical thinking and complex reasoning. Makes API calls to get responses.
|
138
|
+
|
139
|
+
### fax_a_friend
|
140
|
+
📠 Generate master prompt file for manual AI consultation. Creates file for copy-paste workflow.
|
141
|
+
|
142
|
+
**Parameters (both tools):**
|
143
|
+
- `all_related_context` (required): All context related to the problem
|
144
|
+
- `any_additional_context` (optional): Additional helpful context
|
145
|
+
- `task` (required): Specific task or question for the AI
|
146
|
+
|
147
|
+
|
148
|
+
## Use Cases 🎯
|
149
|
+
|
150
|
+
1. In-depth Reasoning for Vibe Coding
|
151
|
+
2. For complex algorithms, data structures, or mathematical computations
|
152
|
+
3. Frontend Development with React, Vue, CSS, or modern frontend frameworks
|
153
|
+
|
154
|
+
## Claude Desktop Configuration 🖥️
|
155
|
+
|
156
|
+
To use Phone-a-Friend MCP server with Claude Desktop, add this configuration to your `claude_desktop_config.json` file:
|
157
|
+
|
158
|
+
### Configuration File Location
|
159
|
+
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
160
|
+
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
|
161
|
+
|
162
|
+
### Configuration
|
163
|
+
|
164
|
+
**Option 1: Using uv (Recommended)**
|
165
|
+
```json
|
166
|
+
{
|
167
|
+
"mcpServers": {
|
168
|
+
"phone-a-friend": {
|
169
|
+
"command": "uvx",
|
170
|
+
"args": [
|
171
|
+
"--refresh",
|
172
|
+
"phone-a-friend-mcp-server",
|
173
|
+
],
|
174
|
+
"env": {
|
175
|
+
"OPENROUTER_API_KEY": "your-openrouter-api-key",
|
176
|
+
"PHONE_A_FRIEND_MODEL": "anthropic/claude-4-opus",
|
177
|
+
"PHONE_A_FRIEND_TEMPERATURE": "0.4"
|
178
|
+
}
|
179
|
+
}
|
180
|
+
}
|
181
|
+
}
|
182
|
+
```
|
183
|
+
|
184
|
+
### Environment Variables in Configuration
|
185
|
+
|
186
|
+
You can configure different AI providers directly in the Claude Desktop config:
|
187
|
+
|
188
|
+
```json
|
189
|
+
{
|
190
|
+
"mcpServers": {
|
191
|
+
"phone-a-friend": {
|
192
|
+
"command": "phone-a-friend-mcp-server",
|
193
|
+
"env": {
|
194
|
+
"OPENROUTER_API_KEY": "your-openrouter-api-key",
|
195
|
+
"PHONE_A_FRIEND_MODEL": "anthropic/claude-4-opus",
|
196
|
+
"PHONE_A_FRIEND_TEMPERATURE": "0.4"
|
197
|
+
}
|
198
|
+
}
|
199
|
+
}
|
200
|
+
}
|
201
|
+
```
|
202
|
+
|
203
|
+
## License 📄
|
204
|
+
|
205
|
+
MIT License - see LICENSE file for details.
|
@@ -0,0 +1,181 @@
|
|
1
|
+
# Phone-a-Friend MCP Server 🧠📞
|
2
|
+
|
3
|
+
An AI-to-AI consultation system that enables one AI to "phone a friend" (another AI) for critical thinking, long context reasoning, and complex problem solving via OpenRouter.
|
4
|
+
|
5
|
+
## The Problem 🤔
|
6
|
+
|
7
|
+
Sometimes an AI encounters complex problems that require:
|
8
|
+
- **Deep critical thinking** beyond immediate capabilities
|
9
|
+
- **Long context reasoning** with extensive information
|
10
|
+
- **Multi-step analysis** that benefits from external perspective
|
11
|
+
- **Specialized expertise** from different AI models
|
12
|
+
|
13
|
+
## The Solution �
|
14
|
+
|
15
|
+
Phone-a-Friend MCP Server creates a **two-step consultation process**:
|
16
|
+
|
17
|
+
1. **Context + Reasoning**: Package all relevant context and send to external AI for deep analysis
|
18
|
+
2. **Extract Actionable Insights**: Process the reasoning response into usable format for the primary AI
|
19
|
+
|
20
|
+
This enables AI systems to leverage other AI models as "consultants" for complex reasoning tasks.
|
21
|
+
|
22
|
+
## Architecture 🏗️
|
23
|
+
|
24
|
+
```
|
25
|
+
Primary AI → Phone-a-Friend MCP → OpenRouter → External AI (GPT-4, Claude, etc.) → Processed Response → Primary AI
|
26
|
+
```
|
27
|
+
|
28
|
+
**Sequential Workflow:**
|
29
|
+
1. `analyze_context` - Gather and structure all relevant context
|
30
|
+
2. `get_critical_thinking` - Send context to external AI via OpenRouter for reasoning
|
31
|
+
3. `extract_actionable_insights` - Process response into actionable format
|
32
|
+
|
33
|
+
## When to Use 🎯
|
34
|
+
|
35
|
+
**Ideal for:**
|
36
|
+
- Complex multi-step problems requiring deep analysis
|
37
|
+
- Situations needing long context reasoning (>100k tokens)
|
38
|
+
- Cross-domain expertise consultation
|
39
|
+
- Critical decision-making with high stakes
|
40
|
+
- Problems requiring multiple perspectives
|
41
|
+
|
42
|
+
## Installation 🚀
|
43
|
+
|
44
|
+
1. Clone the repository:
|
45
|
+
```bash
|
46
|
+
git clone https://github.com/abhishekbhakat/phone-a-friend-mcp-server.git
|
47
|
+
cd phone-a-friend-mcp-server
|
48
|
+
```
|
49
|
+
|
50
|
+
2. Install dependencies:
|
51
|
+
```bash
|
52
|
+
uv pip install -e .
|
53
|
+
```
|
54
|
+
|
55
|
+
3. Configure API access (choose one method):
|
56
|
+
|
57
|
+
**Option A: Environment Variables**
|
58
|
+
```bash
|
59
|
+
export OPENROUTER_API_KEY="your-openrouter-key"
|
60
|
+
# OR
|
61
|
+
export OPENAI_API_KEY="your-openai-key"
|
62
|
+
# OR
|
63
|
+
export ANTHROPIC_API_KEY="your-anthropic-key"
|
64
|
+
# OR
|
65
|
+
export GOOGLE_API_KEY="your-google-key"
|
66
|
+
```
|
67
|
+
|
68
|
+
**Option B: CLI Arguments**
|
69
|
+
```bash
|
70
|
+
phone-a-friend-mcp-server --api-key "your-api-key" --provider openai
|
71
|
+
```
|
72
|
+
|
73
|
+
## Usage 💡
|
74
|
+
|
75
|
+
### Command Line Options
|
76
|
+
```bash
|
77
|
+
|
78
|
+
# Custom base URL (if needed)
|
79
|
+
phone-a-friend-mcp-server --base-url "https://custom-api.example.com"
|
80
|
+
|
81
|
+
# Temperature control (0.0 = deterministic, 2.0 = very creative)
|
82
|
+
phone-a-friend-mcp-server --temperature 0.4
|
83
|
+
|
84
|
+
# Combined example
|
85
|
+
phone-a-friend-mcp-server --api-key "sk-..." --provider openai --model "o3" -v
|
86
|
+
```
|
87
|
+
|
88
|
+
### Environment Variables (Optional)
|
89
|
+
```bash
|
90
|
+
|
91
|
+
# Optional model overrides
|
92
|
+
export PHONE_A_FRIEND_MODEL="your-preferred-model"
|
93
|
+
export PHONE_A_FRIEND_PROVIDER="your-preferred-provider"
|
94
|
+
export PHONE_A_FRIEND_BASE_URL="https://custom-api.example.com"
|
95
|
+
|
96
|
+
# Temperature control (0.0-2.0, where 0.0 = deterministic, 2.0 = very creative)
|
97
|
+
export PHONE_A_FRIEND_TEMPERATURE=0.4
|
98
|
+
```
|
99
|
+
|
100
|
+
## Model Selection 🤖
|
101
|
+
|
102
|
+
Default reasoning models to be selected:
|
103
|
+
- **OpenAI**: o3
|
104
|
+
- **Anthropic**: Claude 4 Opus
|
105
|
+
- **Google**: Gemini 2.5 Pro Preview 05-06 (automatically set temperature to 0.0)
|
106
|
+
- **OpenRouter**: For other models like Deepseek or Qwen
|
107
|
+
|
108
|
+
You can override the auto-selection by setting `PHONE_A_FRIEND_MODEL` environment variable or using the `--model` CLI option.
|
109
|
+
|
110
|
+
## Available Tools 🛠️
|
111
|
+
|
112
|
+
### phone_a_friend
|
113
|
+
📞 Consult external AI for critical thinking and complex reasoning. Makes API calls to get responses.
|
114
|
+
|
115
|
+
### fax_a_friend
|
116
|
+
📠 Generate master prompt file for manual AI consultation. Creates file for copy-paste workflow.
|
117
|
+
|
118
|
+
**Parameters (both tools):**
|
119
|
+
- `all_related_context` (required): All context related to the problem
|
120
|
+
- `any_additional_context` (optional): Additional helpful context
|
121
|
+
- `task` (required): Specific task or question for the AI
|
122
|
+
|
123
|
+
|
124
|
+
## Use Cases 🎯
|
125
|
+
|
126
|
+
1. In-depth Reasoning for Vibe Coding
|
127
|
+
2. For complex algorithms, data structures, or mathematical computations
|
128
|
+
3. Frontend Development with React, Vue, CSS, or modern frontend frameworks
|
129
|
+
|
130
|
+
## Claude Desktop Configuration 🖥️
|
131
|
+
|
132
|
+
To use Phone-a-Friend MCP server with Claude Desktop, add this configuration to your `claude_desktop_config.json` file:
|
133
|
+
|
134
|
+
### Configuration File Location
|
135
|
+
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
136
|
+
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
|
137
|
+
|
138
|
+
### Configuration
|
139
|
+
|
140
|
+
**Option 1: Using uv (Recommended)**
|
141
|
+
```json
|
142
|
+
{
|
143
|
+
"mcpServers": {
|
144
|
+
"phone-a-friend": {
|
145
|
+
"command": "uvx",
|
146
|
+
"args": [
|
147
|
+
"--refresh",
|
148
|
+
"phone-a-friend-mcp-server",
|
149
|
+
],
|
150
|
+
"env": {
|
151
|
+
"OPENROUTER_API_KEY": "your-openrouter-api-key",
|
152
|
+
"PHONE_A_FRIEND_MODEL": "anthropic/claude-4-opus",
|
153
|
+
"PHONE_A_FRIEND_TEMPERATURE": "0.4"
|
154
|
+
}
|
155
|
+
}
|
156
|
+
}
|
157
|
+
}
|
158
|
+
```
|
159
|
+
|
160
|
+
### Environment Variables in Configuration
|
161
|
+
|
162
|
+
You can configure different AI providers directly in the Claude Desktop config:
|
163
|
+
|
164
|
+
```json
|
165
|
+
{
|
166
|
+
"mcpServers": {
|
167
|
+
"phone-a-friend": {
|
168
|
+
"command": "phone-a-friend-mcp-server",
|
169
|
+
"env": {
|
170
|
+
"OPENROUTER_API_KEY": "your-openrouter-api-key",
|
171
|
+
"PHONE_A_FRIEND_MODEL": "anthropic/claude-4-opus",
|
172
|
+
"PHONE_A_FRIEND_TEMPERATURE": "0.4"
|
173
|
+
}
|
174
|
+
}
|
175
|
+
}
|
176
|
+
}
|
177
|
+
```
|
178
|
+
|
179
|
+
## License 📄
|
180
|
+
|
181
|
+
MIT License - see LICENSE file for details.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "phone-a-friend-mcp-server"
|
3
|
-
version = "0.1.
|
3
|
+
version = "0.1.2"
|
4
4
|
description = "MCP Server for Phone-a-Friend assistance"
|
5
5
|
readme = "README.md"
|
6
6
|
requires-python = ">=3.11"
|
@@ -14,7 +14,7 @@ dependencies = [
|
|
14
14
|
"pydantic>=2.11.5",
|
15
15
|
"pydantic-ai-slim[openai,anthropic,google]>=0.2.14",
|
16
16
|
"click>=8.2.1",
|
17
|
-
"pyyaml>=6.0.0",
|
17
|
+
"pyyaml>=6.0.0",
|
18
18
|
]
|
19
19
|
classifiers = [
|
20
20
|
"Development Status :: 3 - Alpha",
|
@@ -32,16 +32,6 @@ Issues = "https://github.com/abhishekbhakat/phone-a-friend-mcp-server/issues"
|
|
32
32
|
[project.scripts]
|
33
33
|
phone-a-friend-mcp-server = "phone_a_friend_mcp_server.__main__:main"
|
34
34
|
|
35
|
-
[project.optional-dependencies]
|
36
|
-
dev = [
|
37
|
-
"build>=1.2.2.post1",
|
38
|
-
"pre-commit>=4.2.0",
|
39
|
-
"pytest>=8.3.4", # No newer version found in search results
|
40
|
-
"pytest-asyncio>=0.26.0",
|
41
|
-
"pytest-mock>=3.14.1",
|
42
|
-
"ruff>=0.11.12"
|
43
|
-
]
|
44
|
-
|
45
35
|
[build-system]
|
46
36
|
requires = ["hatchling"]
|
47
37
|
build-backend = "hatchling.build"
|
@@ -78,3 +68,13 @@ lint.unfixable = []
|
|
78
68
|
[tool.ruff.format]
|
79
69
|
quote-style = "double"
|
80
70
|
indent-style = "space"
|
71
|
+
|
72
|
+
[dependency-groups]
|
73
|
+
dev = [
|
74
|
+
"build>=1.2.2.post1",
|
75
|
+
"pre-commit>=4.2.0",
|
76
|
+
"pytest>=8.4.0",
|
77
|
+
"pytest-asyncio>=1.0.0",
|
78
|
+
"pytest-mock>=3.14.1",
|
79
|
+
"ruff>=0.11.12",
|
80
|
+
]
|
@@ -15,7 +15,8 @@ from phone_a_friend_mcp_server.server import serve
|
|
15
15
|
@click.option("--model", help="Model to use (e.g., 'gpt-4', 'anthropic/claude-3.5-sonnet')")
|
16
16
|
@click.option("--provider", help="Provider type ('openai', 'openrouter', 'anthropic', 'google')")
|
17
17
|
@click.option("--base-url", help="Base URL for API")
|
18
|
-
|
18
|
+
@click.option("--temperature", type=float, help="Temperature for the model (0.0-2.0). Lower values = more deterministic, higher = more creative")
|
19
|
+
def main(verbose: int, api_key: str = None, model: str = None, provider: str = None, base_url: str = None, temperature: float = None) -> None:
|
19
20
|
"""MCP server for Phone-a-Friend AI consultation"""
|
20
21
|
logging_level = logging.WARN
|
21
22
|
if verbose == 1:
|
@@ -25,7 +26,6 @@ def main(verbose: int, api_key: str = None, model: str = None, provider: str = N
|
|
25
26
|
|
26
27
|
logging.basicConfig(level=logging_level, stream=sys.stderr)
|
27
28
|
|
28
|
-
# Read environment variables with proper precedence
|
29
29
|
config_api_key = (
|
30
30
|
api_key
|
31
31
|
or os.environ.get("OPENROUTER_API_KEY")
|
@@ -37,15 +37,12 @@ def main(verbose: int, api_key: str = None, model: str = None, provider: str = N
|
|
37
37
|
config_model = model or os.environ.get("PHONE_A_FRIEND_MODEL")
|
38
38
|
config_provider = provider or os.environ.get("PHONE_A_FRIEND_PROVIDER")
|
39
39
|
config_base_url = base_url or os.environ.get("PHONE_A_FRIEND_BASE_URL")
|
40
|
-
|
41
|
-
# Initialize configuration
|
40
|
+
config_temperature = temperature
|
42
41
|
try:
|
43
|
-
config = PhoneAFriendConfig(api_key=config_api_key, model=config_model, provider=config_provider, base_url=config_base_url)
|
42
|
+
config = PhoneAFriendConfig(api_key=config_api_key, model=config_model, provider=config_provider, base_url=config_base_url, temperature=config_temperature)
|
44
43
|
except ValueError as e:
|
45
44
|
click.echo(f"Configuration error: {e}", err=True)
|
46
45
|
sys.exit(1)
|
47
|
-
|
48
|
-
# Start the server
|
49
46
|
asyncio.run(serve(config))
|
50
47
|
|
51
48
|
|
@@ -1,28 +1,30 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
|
1
4
|
class PhoneAFriendConfig:
|
2
5
|
"""Centralized configuration for Phone-a-Friend MCP server."""
|
3
6
|
|
4
|
-
def __init__(self, api_key: str | None = None, model: str | None = None, base_url: str | None = None, provider: str | None = None) -> None:
|
7
|
+
def __init__(self, api_key: str | None = None, model: str | None = None, base_url: str | None = None, provider: str | None = None, temperature: float | None = None) -> None:
|
5
8
|
"""Initialize configuration with provided values.
|
6
9
|
|
7
10
|
Args:
|
8
11
|
api_key: API key for external AI services
|
9
12
|
model: Model to use (e.g., 'gpt-4', 'anthropic/claude-3.5-sonnet')
|
10
13
|
base_url: Custom base URL for API (optional, providers use defaults)
|
11
|
-
provider: Provider type ('openai', 'openrouter', 'anthropic')
|
14
|
+
provider: Provider type ('openai', 'openrouter', 'anthropic', 'google')
|
15
|
+
temperature: Temperature value for the model (0.0-2.0), overrides defaults
|
12
16
|
"""
|
13
17
|
self.api_key = api_key
|
14
18
|
self.provider = provider or self._detect_provider()
|
15
19
|
self.model = model or self._get_default_model()
|
16
|
-
self.base_url = base_url
|
20
|
+
self.base_url = base_url
|
21
|
+
self.temperature = self._validate_temperature(temperature)
|
17
22
|
|
18
|
-
# Validate required configuration
|
19
23
|
if not self.api_key:
|
20
24
|
raise ValueError(f"Missing required API key for {self.provider}. Set {self._get_env_var_name()} environment variable or pass --api-key")
|
21
25
|
|
22
26
|
def _detect_provider(self) -> str:
|
23
27
|
"""Detect provider based on available environment variables."""
|
24
|
-
import os
|
25
|
-
|
26
28
|
if os.environ.get("OPENROUTER_API_KEY"):
|
27
29
|
return "openrouter"
|
28
30
|
elif os.environ.get("ANTHROPIC_API_KEY"):
|
@@ -32,12 +34,11 @@ class PhoneAFriendConfig:
|
|
32
34
|
elif os.environ.get("OPENAI_API_KEY"):
|
33
35
|
return "openai"
|
34
36
|
else:
|
35
|
-
# Default to OpenAI
|
36
37
|
return "openai"
|
37
38
|
|
38
39
|
def _get_default_model(self) -> str:
|
39
40
|
"""Get default model based on provider."""
|
40
|
-
models = {"openai": "o3", "openrouter": "anthropic/claude-4-opus", "anthropic": "claude-4-opus", "google": "gemini-2.5-pro-preview-05
|
41
|
+
models = {"openai": "o3", "openrouter": "anthropic/claude-4-opus", "anthropic": "claude-4-opus", "google": "gemini-2.5-pro-preview-06-05"}
|
41
42
|
if self.provider not in models:
|
42
43
|
raise ValueError(f"Unknown provider: {self.provider}. Supported providers: {list(models.keys())}")
|
43
44
|
return models[self.provider]
|
@@ -46,3 +47,41 @@ class PhoneAFriendConfig:
|
|
46
47
|
"""Get environment variable name for the provider."""
|
47
48
|
env_vars = {"openai": "OPENAI_API_KEY", "openrouter": "OPENROUTER_API_KEY", "anthropic": "ANTHROPIC_API_KEY", "google": "GOOGLE_API_KEY or GEMINI_API_KEY"}
|
48
49
|
return env_vars.get(self.provider, "OPENAI_API_KEY")
|
50
|
+
|
51
|
+
def _validate_temperature(self, temperature: float | None) -> float | None:
|
52
|
+
"""Validate temperature value or get from environment variable."""
|
53
|
+
temp_value = temperature
|
54
|
+
if temp_value is None:
|
55
|
+
env_temp = os.environ.get("PHONE_A_FRIEND_TEMPERATURE")
|
56
|
+
if env_temp is not None:
|
57
|
+
try:
|
58
|
+
temp_value = float(env_temp)
|
59
|
+
except ValueError:
|
60
|
+
raise ValueError(f"Invalid temperature value in PHONE_A_FRIEND_TEMPERATURE: {env_temp}")
|
61
|
+
|
62
|
+
if temp_value is None:
|
63
|
+
temp_value = self._get_default_temperature_for_model()
|
64
|
+
|
65
|
+
if temp_value is not None:
|
66
|
+
if not isinstance(temp_value, int | float):
|
67
|
+
raise ValueError(f"Temperature must be a number, got {type(temp_value).__name__}")
|
68
|
+
if not (0.0 <= temp_value <= 2.0):
|
69
|
+
raise ValueError(f"Temperature must be between 0.0 and 2.0, got {temp_value}")
|
70
|
+
|
71
|
+
return temp_value
|
72
|
+
|
73
|
+
def _get_default_temperature_for_model(self) -> float | None:
|
74
|
+
"""Get default temperature for specific models that benefit from it."""
|
75
|
+
default_temperatures = {
|
76
|
+
"gemini-2.5-pro-preview-06-05": 0.0,
|
77
|
+
}
|
78
|
+
|
79
|
+
return default_temperatures.get(self.model)
|
80
|
+
|
81
|
+
def get_temperature(self) -> float | None:
|
82
|
+
"""Get the temperature setting for the current model.
|
83
|
+
|
84
|
+
Returns:
|
85
|
+
Temperature value if set, None otherwise
|
86
|
+
"""
|
87
|
+
return self.temperature
|
@@ -12,6 +12,22 @@ from phone_a_friend_mcp_server.tools.tool_manager import ToolManager
|
|
12
12
|
logger = logging.getLogger(__name__)
|
13
13
|
|
14
14
|
|
15
|
+
def _format_tool_result(result: Any) -> str:
|
16
|
+
"""Format tool result for display."""
|
17
|
+
if isinstance(result, dict):
|
18
|
+
formatted_result = ""
|
19
|
+
for key, value in result.items():
|
20
|
+
if isinstance(value, list):
|
21
|
+
formatted_result += f"{key.title()}:\n"
|
22
|
+
for item in value:
|
23
|
+
formatted_result += f" • {item}\n"
|
24
|
+
else:
|
25
|
+
formatted_result += f"{key.title()}: {value}\n"
|
26
|
+
return formatted_result.strip()
|
27
|
+
else:
|
28
|
+
return str(result)
|
29
|
+
|
30
|
+
|
15
31
|
async def serve(config: PhoneAFriendConfig) -> None:
|
16
32
|
"""Start the Phone-a-Friend MCP server.
|
17
33
|
|
@@ -55,19 +71,8 @@ async def serve(config: PhoneAFriendConfig) -> None:
|
|
55
71
|
logger.info(f"Calling tool: {name} with arguments: {arguments}")
|
56
72
|
tool = tool_manager.get_tool(name)
|
57
73
|
result = await tool.run(**arguments)
|
58
|
-
|
59
|
-
|
60
|
-
formatted_result = ""
|
61
|
-
for key, value in result.items():
|
62
|
-
if isinstance(value, list):
|
63
|
-
formatted_result += f"{key.title()}:\n"
|
64
|
-
for item in value:
|
65
|
-
formatted_result += f" • {item}\n"
|
66
|
-
else:
|
67
|
-
formatted_result += f"{key.title()}: {value}\n"
|
68
|
-
return [TextContent(type="text", text=formatted_result.strip())]
|
69
|
-
else:
|
70
|
-
return [TextContent(type="text", text=str(result))]
|
74
|
+
formatted_result = _format_tool_result(result)
|
75
|
+
return [TextContent(type="text", text=formatted_result)]
|
71
76
|
|
72
77
|
except Exception as e:
|
73
78
|
logger.error("Tool execution failed: %s", e)
|