iflow-mcp_the-ai-workshops-searxng-mcp-server 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- .dockerignore +13 -0
- .env +3 -0
- .github/FUNDING.yml +4 -0
- 3184_process.log +3 -0
- Dockerfile +22 -0
- Local SearXNG MCP Agent Template.json +134 -0
- PKG-INFO +372 -0
- README.md +362 -0
- iflow_mcp_the_ai_workshops_searxng_mcp_server-0.1.0.dist-info/METADATA +372 -0
- iflow_mcp_the_ai_workshops_searxng_mcp_server-0.1.0.dist-info/RECORD +21 -0
- iflow_mcp_the_ai_workshops_searxng_mcp_server-0.1.0.dist-info/WHEEL +4 -0
- iflow_mcp_the_ai_workshops_searxng_mcp_server-0.1.0.dist-info/entry_points.txt +2 -0
- language.json +1 -0
- package.json +13 -0
- package_name +1 -0
- push_info.json +5 -0
- pyproject.toml +21 -0
- requirements.txt +3 -0
- server.py +120 -0
- wrangler.jsonc +4 -0
- wrangler.toml +12 -0
.dockerignore
ADDED
.env
ADDED
.github/FUNDING.yml
ADDED
3184_process.log
ADDED
Dockerfile
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# syntax=docker/dockerfile:1
|
|
2
|
+
|
|
3
|
+
FROM python:3.11-slim
|
|
4
|
+
|
|
5
|
+
WORKDIR /app
|
|
6
|
+
|
|
7
|
+
# Install system dependencies (if any)
|
|
8
|
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
9
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
10
|
+
|
|
11
|
+
# Copy requirements and install
|
|
12
|
+
COPY requirements.txt .
|
|
13
|
+
RUN pip install --no-cache-dir -r requirements.txt
|
|
14
|
+
|
|
15
|
+
# Copy the rest of the code
|
|
16
|
+
COPY . .
|
|
17
|
+
|
|
18
|
+
# Expose the default port (can be overridden)
|
|
19
|
+
EXPOSE 32769
|
|
20
|
+
|
|
21
|
+
# Entrypoint for SSE (default)
|
|
22
|
+
CMD ["python", "server.py"]
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "Agent",
|
|
3
|
+
"nodes": [
|
|
4
|
+
{
|
|
5
|
+
"parameters": {
|
|
6
|
+
"options": {}
|
|
7
|
+
},
|
|
8
|
+
"type": "@n8n/n8n-nodes-langchain.chatTrigger",
|
|
9
|
+
"typeVersion": 1.1,
|
|
10
|
+
"position": [
|
|
11
|
+
0,
|
|
12
|
+
0
|
|
13
|
+
],
|
|
14
|
+
"id": "3651ed70-8127-4baa-9e90-f8cfcbd097cb",
|
|
15
|
+
"name": "When chat message received"
|
|
16
|
+
},
|
|
17
|
+
{
|
|
18
|
+
"parameters": {
|
|
19
|
+
"options": {
|
|
20
|
+
"systemMessage": "You are a helpful assistant"
|
|
21
|
+
}
|
|
22
|
+
},
|
|
23
|
+
"type": "@n8n/n8n-nodes-langchain.agent",
|
|
24
|
+
"typeVersion": 1.8,
|
|
25
|
+
"position": [
|
|
26
|
+
220,
|
|
27
|
+
0
|
|
28
|
+
],
|
|
29
|
+
"id": "e2e8e789-2930-4712-a566-34d2ada35258",
|
|
30
|
+
"name": "AI Agent"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"parameters": {
|
|
34
|
+
"model": {
|
|
35
|
+
"__rl": true,
|
|
36
|
+
"value": "gpt-4.1-mini",
|
|
37
|
+
"mode": "list",
|
|
38
|
+
"cachedResultName": "gpt-4.1-mini"
|
|
39
|
+
},
|
|
40
|
+
"options": {}
|
|
41
|
+
},
|
|
42
|
+
"type": "@n8n/n8n-nodes-langchain.lmChatOpenAi",
|
|
43
|
+
"typeVersion": 1.2,
|
|
44
|
+
"position": [
|
|
45
|
+
100,
|
|
46
|
+
220
|
|
47
|
+
],
|
|
48
|
+
"id": "91043d43-1839-4d53-a217-57c82b79f640",
|
|
49
|
+
"name": "OpenAI Chat Model"
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
"parameters": {},
|
|
53
|
+
"type": "@n8n/n8n-nodes-langchain.memoryBufferWindow",
|
|
54
|
+
"typeVersion": 1.3,
|
|
55
|
+
"position": [
|
|
56
|
+
320,
|
|
57
|
+
220
|
|
58
|
+
],
|
|
59
|
+
"id": "678bda29-fd99-4021-a8e0-f6e9abae2eff",
|
|
60
|
+
"name": "Simple Memory"
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
"parameters": {
|
|
64
|
+
"sseEndpoint": "http://host.docker.internal:32769/sse"
|
|
65
|
+
},
|
|
66
|
+
"type": "@n8n/n8n-nodes-langchain.mcpClientTool",
|
|
67
|
+
"typeVersion": 1,
|
|
68
|
+
"position": [
|
|
69
|
+
480,
|
|
70
|
+
220
|
|
71
|
+
],
|
|
72
|
+
"id": "6a6e9557-dcec-47ce-98c4-c69229dddc17",
|
|
73
|
+
"name": "SearXNG MCP Tool"
|
|
74
|
+
}
|
|
75
|
+
],
|
|
76
|
+
"pinData": {},
|
|
77
|
+
"connections": {
|
|
78
|
+
"When chat message received": {
|
|
79
|
+
"main": [
|
|
80
|
+
[
|
|
81
|
+
{
|
|
82
|
+
"node": "AI Agent",
|
|
83
|
+
"type": "main",
|
|
84
|
+
"index": 0
|
|
85
|
+
}
|
|
86
|
+
]
|
|
87
|
+
]
|
|
88
|
+
},
|
|
89
|
+
"OpenAI Chat Model": {
|
|
90
|
+
"ai_languageModel": [
|
|
91
|
+
[
|
|
92
|
+
{
|
|
93
|
+
"node": "AI Agent",
|
|
94
|
+
"type": "ai_languageModel",
|
|
95
|
+
"index": 0
|
|
96
|
+
}
|
|
97
|
+
]
|
|
98
|
+
]
|
|
99
|
+
},
|
|
100
|
+
"Simple Memory": {
|
|
101
|
+
"ai_memory": [
|
|
102
|
+
[
|
|
103
|
+
{
|
|
104
|
+
"node": "AI Agent",
|
|
105
|
+
"type": "ai_memory",
|
|
106
|
+
"index": 0
|
|
107
|
+
}
|
|
108
|
+
]
|
|
109
|
+
]
|
|
110
|
+
},
|
|
111
|
+
"SearXNG MCP Tool": {
|
|
112
|
+
"ai_tool": [
|
|
113
|
+
[
|
|
114
|
+
{
|
|
115
|
+
"node": "AI Agent",
|
|
116
|
+
"type": "ai_tool",
|
|
117
|
+
"index": 0
|
|
118
|
+
}
|
|
119
|
+
]
|
|
120
|
+
]
|
|
121
|
+
}
|
|
122
|
+
},
|
|
123
|
+
"active": false,
|
|
124
|
+
"settings": {
|
|
125
|
+
"executionOrder": "v1"
|
|
126
|
+
},
|
|
127
|
+
"versionId": "61023ccc-0124-4f00-8e2b-3852b530b26f",
|
|
128
|
+
"meta": {
|
|
129
|
+
"templateCredsSetupCompleted": true,
|
|
130
|
+
"instanceId": "REMOVED"
|
|
131
|
+
},
|
|
132
|
+
"id": "5uL9T6bjMsNOrmKv",
|
|
133
|
+
"tags": []
|
|
134
|
+
}
|
PKG-INFO
ADDED
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: iflow-mcp_the-ai-workshops-searxng-mcp-server
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: MCP server to search the web using SearXNG instance
|
|
5
|
+
Requires-Python: >=3.9
|
|
6
|
+
Requires-Dist: fastmcp>=0.1.0
|
|
7
|
+
Requires-Dist: httpx>=0.27.0
|
|
8
|
+
Requires-Dist: python-dotenv>=1.0.0
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
|
|
11
|
+
# SearXNG MCP Server
|
|
12
|
+
|
|
13
|
+
An MCP sse implementation of the Model Context Protocol (MCP) server integrated with [SearXNG](https://github.com/searxng/searxng) for providing AI agents with powerful, privacy-respecting search capabilities.
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
17
|
+
## Overview
|
|
18
|
+
|
|
19
|
+
This project demonstrates how to build an MCP server that enables AI agents to perform web searches using a SearXNG instance. It serves as a practical template for creating your own MCP servers, using SearXNG as a backend.
|
|
20
|
+
|
|
21
|
+
The implementation follows the best practices laid out by Anthropic for building MCP servers, allowing seamless integration with any MCP-compatible client.
|
|
22
|
+
|
|
23
|
+
---
|
|
24
|
+
|
|
25
|
+
## Prerequisites
|
|
26
|
+
|
|
27
|
+
- Python 3.9+
|
|
28
|
+
- Access to a running SearXNG instance (local or remote)
|
|
29
|
+
- Docker (optional, for containerized deployment)
|
|
30
|
+
- [uv](https://github.com/astral-sh/uv) (optional, for fast Python dependency management)
|
|
31
|
+
- [Smithery](https://github.com/The-AI-Workshops/smithery) (optional, for MCP server management)
|
|
32
|
+
|
|
33
|
+
### SearXNG Server (Required)
|
|
34
|
+
|
|
35
|
+
You must have a SearXNG server running and accessible. The recommended way is via Docker:
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
docker run -d --name=searxng -p 32768:8080 -v "/root/searxng:/etc/searxng" \
|
|
39
|
+
-e "BASE_URL=http://0.0.0.0:32768/" \
|
|
40
|
+
-e "INSTANCE_NAME=home" \
|
|
41
|
+
--restart always searxng/searxng
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
- This will run SearXNG on port 32768 and persist configuration in `/root/searxng`.
|
|
45
|
+
- The MCP server expects SearXNG to be available at `http://172.17.0.1:32768` by default (see `.env`).
|
|
46
|
+
|
|
47
|
+
---
|
|
48
|
+
|
|
49
|
+
## Installation
|
|
50
|
+
|
|
51
|
+
### Using uv
|
|
52
|
+
|
|
53
|
+
Install uv if you don't have it:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
pip install uv
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
Clone this repository:
|
|
60
|
+
|
|
61
|
+
```bash
|
|
62
|
+
git clone https://github.com/The-AI-Workshops/searxng-mcp-server.git
|
|
63
|
+
cd searxng-mcp-server/dev/searXNG-mcp
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Install dependencies:
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
uv pip install -r requirements.txt
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
Create a `.env` file based on the provided example:
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
nano .env
|
|
76
|
+
# Edit .env as needed
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
Configure your environment variables in the `.env` file (see Configuration section).
|
|
80
|
+
|
|
81
|
+
---
|
|
82
|
+
|
|
83
|
+
### Using Docker (Recommended)
|
|
84
|
+
|
|
85
|
+
Build the Docker image:
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
docker build -t mcp/searxng-mcp .
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
Create a `.env` file and configure your environment variables.
|
|
92
|
+
|
|
93
|
+
---
|
|
94
|
+
Run the Docker image:
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
docker run -d --env-file ./.env -p 32769:32769 mcp/searxng-mcp
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
---
|
|
101
|
+
|
|
102
|
+
### Using Smithery
|
|
103
|
+
|
|
104
|
+
[Smithery](https://github.com/The-AI-Workshops/smithery) is a command-line tool for managing AI agent tools and MCP servers.
|
|
105
|
+
|
|
106
|
+
Install Smithery if you don't have it (see Smithery documentation for various installation methods, e.g., using pipx):
|
|
107
|
+
```bash
|
|
108
|
+
pipx install smithery
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
Install the SearXNG MCP server using Smithery:
|
|
112
|
+
```bash
|
|
113
|
+
smithery install @The-AI-Workshops/searxng-mcp-server
|
|
114
|
+
```
|
|
115
|
+
This will install the server and its dependencies into a dedicated environment managed by Smithery.
|
|
116
|
+
|
|
117
|
+
After installation, Smithery will provide you with the path to the installed server. You will need to navigate to this directory to configure it. For example, if Smithery installs tools into `~/.smithery/tools/`, the path might be `~/.smithery/tools/The-AI-Workshops/searxng-mcp-server`.
|
|
118
|
+
|
|
119
|
+
Create a `.env` file in the server's directory by copying the example:
|
|
120
|
+
```bash
|
|
121
|
+
# Example:
|
|
122
|
+
# cd ~/.smithery/tools/The-AI-Workshops/searxng-mcp-server
|
|
123
|
+
cp .env.example .env
|
|
124
|
+
nano .env
|
|
125
|
+
# Edit .env as needed
|
|
126
|
+
```
|
|
127
|
+
Configure your environment variables in the `.env` file (see Configuration section).
|
|
128
|
+
|
|
129
|
+
---
|
|
130
|
+
|
|
131
|
+
## Configuration
|
|
132
|
+
|
|
133
|
+
The following environment variables can be configured in your `.env` file:
|
|
134
|
+
|
|
135
|
+
| Variable | Description | Example |
|
|
136
|
+
|--------------------|---------------------------------------------|-----------------------------------------|
|
|
137
|
+
| SEARXNG_BASE_URL | Base URL of your SearXNG instance | http://172.17.0.1:32768 |
|
|
138
|
+
| HOST | Host to bind to when using SSE transport | 0.0.0.0 |
|
|
139
|
+
| PORT | Port to listen on when using SSE transport | 32769 |
|
|
140
|
+
| TRANSPORT | Transport protocol (sse or stdio) | sse |
|
|
141
|
+
|
|
142
|
+
---
|
|
143
|
+
|
|
144
|
+
## Running the Server
|
|
145
|
+
|
|
146
|
+
### Using uv
|
|
147
|
+
|
|
148
|
+
**SSE Transport**
|
|
149
|
+
|
|
150
|
+
Set `TRANSPORT=sse` in `.env` then:
|
|
151
|
+
|
|
152
|
+
```bash
|
|
153
|
+
uv run dev/searXNG-mcp/server.py
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
**Stdio Transport**
|
|
157
|
+
|
|
158
|
+
With stdio, the MCP client itself can spin up the MCP server, so nothing to run at this point.
|
|
159
|
+
|
|
160
|
+
---
|
|
161
|
+
|
|
162
|
+
### Using Docker
|
|
163
|
+
|
|
164
|
+
**SSE Transport**
|
|
165
|
+
|
|
166
|
+
```bash
|
|
167
|
+
docker build -t mcp/searxng-mcp .
|
|
168
|
+
docker run --rm -it -p 32769:32769 --env-file dev/searXNG-mcp/.env -v $(pwd)/dev/searXNG-mcp:/app mcp/searxng-mcp
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
- The `-v $(pwd)/dev/searXNG-mcp:/app` mount allows you to live-edit the code and .env file on your host and have changes reflected in the running container.
|
|
172
|
+
- The server will be available at `http://localhost:32769/sse`.
|
|
173
|
+
|
|
174
|
+
**Stdio Transport**
|
|
175
|
+
|
|
176
|
+
With stdio, the MCP client itself can spin up the MCP server container, so nothing to run at this point.
|
|
177
|
+
|
|
178
|
+
---
|
|
179
|
+
|
|
180
|
+
### Running with Smithery
|
|
181
|
+
|
|
182
|
+
**SSE Transport**
|
|
183
|
+
|
|
184
|
+
Set `TRANSPORT=sse` in `.env` in the Smithery-installed server directory.
|
|
185
|
+
Then, you can typically run the server using the Python interpreter from the virtual environment Smithery created for the tool:
|
|
186
|
+
```bash
|
|
187
|
+
# Navigate to the server directory, e.g.,
|
|
188
|
+
# cd ~/.smithery/tools/The-AI-Workshops/searxng-mcp-server
|
|
189
|
+
~/.smithery/venvs/The-AI-Workshops_searxng-mcp-server/bin/python server.py
|
|
190
|
+
```
|
|
191
|
+
Alternatively, if Smithery provides a direct run command for installed tools (check Smithery documentation):
|
|
192
|
+
```bash
|
|
193
|
+
smithery run @The-AI-Workshops/searxng-mcp-server
|
|
194
|
+
```
|
|
195
|
+
The server will be available based on your HOST and PORT settings in `.env` (e.g., `http://localhost:32769/sse`).
|
|
196
|
+
|
|
197
|
+
**Stdio Transport**
|
|
198
|
+
|
|
199
|
+
With stdio, the MCP client itself will spin up the server. The client configuration will need to point to the `server.py` script within the Smithery-managed directory, potentially using `smithery exec` or the direct path to the Python interpreter in the tool's virtual environment. See the "Integration with MCP Clients" section for examples.
|
|
200
|
+
|
|
201
|
+
---
|
|
202
|
+
|
|
203
|
+
## Integration with MCP Clients
|
|
204
|
+
|
|
205
|
+
### SSE Configuration
|
|
206
|
+
|
|
207
|
+
Once you have the server running with SSE transport, you can connect to it using this configuration:
|
|
208
|
+
|
|
209
|
+
```json
|
|
210
|
+
{
|
|
211
|
+
"mcpServers": {
|
|
212
|
+
"searxng": {
|
|
213
|
+
"transport": "sse",
|
|
214
|
+
"url": "http://localhost:32769/sse"
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
**Note for Windsurf users:** Use `serverUrl` instead of `url` in your configuration:
|
|
221
|
+
|
|
222
|
+
```json
|
|
223
|
+
{
|
|
224
|
+
"mcpServers": {
|
|
225
|
+
"searxng": {
|
|
226
|
+
"transport": "sse",
|
|
227
|
+
"serverUrl": "http://localhost:32769/sse"
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
**Note for n8n users:** Use `host.docker.internal` instead of `localhost` since n8n has to reach outside of its own container to the host machine:
|
|
234
|
+
|
|
235
|
+
So the full URL in the MCP node would be: `http://host.docker.internal:32769/sse`
|
|
236
|
+
|
|
237
|
+
Make sure to update the port if you are using a value other than the default 32769.
|
|
238
|
+
|
|
239
|
+
---
|
|
240
|
+
|
|
241
|
+
### Python with Stdio Configuration
|
|
242
|
+
|
|
243
|
+
Add this server to your MCP configuration for Claude Desktop, Windsurf, or any other MCP client:
|
|
244
|
+
|
|
245
|
+
```json
|
|
246
|
+
{
|
|
247
|
+
"mcpServers": {
|
|
248
|
+
"searxng": {
|
|
249
|
+
"command": "python",
|
|
250
|
+
"args": ["dev/searXNG-mcp/server.py"],
|
|
251
|
+
"env": {
|
|
252
|
+
"TRANSPORT": "stdio",
|
|
253
|
+
"SEARXNG_BASE_URL": "http://localhost:32768",
|
|
254
|
+
"HOST": "0.0.0.0",
|
|
255
|
+
"PORT": "32769"
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
```
|
|
261
|
+
|
|
262
|
+
---
|
|
263
|
+
|
|
264
|
+
### Docker with Stdio Configuration
|
|
265
|
+
|
|
266
|
+
```json
|
|
267
|
+
{
|
|
268
|
+
"mcpServers": {
|
|
269
|
+
"searxng": {
|
|
270
|
+
"command": "docker",
|
|
271
|
+
"args": ["run", "--rm", "-i",
|
|
272
|
+
"-e", "TRANSPORT",
|
|
273
|
+
"-e", "SEARXNG_BASE_URL",
|
|
274
|
+
"-e", "HOST",
|
|
275
|
+
"-e", "PORT",
|
|
276
|
+
"mcp/searxng-mcp"],
|
|
277
|
+
"env": {
|
|
278
|
+
"TRANSPORT": "stdio",
|
|
279
|
+
"SEARXNG_BASE_URL": "http://localhost:32768",
|
|
280
|
+
"HOST": "0.0.0.0",
|
|
281
|
+
"PORT": "32769"
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
---
|
|
289
|
+
|
|
290
|
+
### Smithery with Stdio Configuration
|
|
291
|
+
|
|
292
|
+
If you installed the server using Smithery, you can configure your MCP client to run it via stdio. Smithery provides an `exec` command to run executables from within the tool's environment.
|
|
293
|
+
|
|
294
|
+
```json
|
|
295
|
+
{
|
|
296
|
+
"mcpServers": {
|
|
297
|
+
"searxng": {
|
|
298
|
+
"command": "smithery",
|
|
299
|
+
"args": ["exec", "@The-AI-Workshops/searxng-mcp-server", "--", "python", "server.py"],
|
|
300
|
+
// "cwd" (current working directory) might be automatically handled by Smithery.
|
|
301
|
+
// If server.py is in a subdirectory, adjust the python script path e.g., "python", "path/to/server.py"
|
|
302
|
+
"env": {
|
|
303
|
+
"TRANSPORT": "stdio",
|
|
304
|
+
"SEARXNG_BASE_URL": "http://localhost:32768", // Adjust as needed
|
|
305
|
+
"HOST": "0.0.0.0", // Typically not used by stdio server itself but good to set
|
|
306
|
+
"PORT": "32769" // Typically not used by stdio server itself
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
```
|
|
312
|
+
Alternatively, you can find the path to the Python interpreter in the virtual environment created by Smithery (e.g., `~/.smithery/venvs/The-AI-Workshops_searxng-mcp-server/bin/python`) and the path to `server.py` (e.g., `~/.smithery/tools/The-AI-Workshops/searxng-mcp-server/server.py`) and use those directly:
|
|
313
|
+
```json
|
|
314
|
+
{
|
|
315
|
+
"mcpServers": {
|
|
316
|
+
"searxng": {
|
|
317
|
+
"command": "~/.smithery/venvs/The-AI-Workshops_searxng-mcp-server/bin/python",
|
|
318
|
+
"args": ["~/.smithery/tools/The-AI-Workshops/searxng-mcp-server/server.py"],
|
|
319
|
+
// "cwd" should be the directory containing server.py if not using absolute paths for args,
|
|
320
|
+
// or if server.py relies on relative paths for other files (like .env).
|
|
321
|
+
// Example: "cwd": "~/.smithery/tools/The-AI-Workshops/searxng-mcp-server",
|
|
322
|
+
"env": {
|
|
323
|
+
"TRANSPORT": "stdio",
|
|
324
|
+
"SEARXNG_BASE_URL": "http://localhost:32768"
|
|
325
|
+
// Other necessary env vars from .env can be duplicated here
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
```
|
|
331
|
+
Ensure the paths are correct for your Smithery installation and that the `.env` file is discoverable by `server.py` (usually by setting `cwd` to the server's root directory or ensuring `server.py` loads it from an absolute path if Smithery sets one).
|
|
332
|
+
|
|
333
|
+
---
|
|
334
|
+
|
|
335
|
+
## Building Your Own Server
|
|
336
|
+
|
|
337
|
+
This template provides a foundation for building more complex MCP servers. To build your own:
|
|
338
|
+
|
|
339
|
+
- Add your own tools by creating methods with the `@mcp.tool()` decorator
|
|
340
|
+
- Create your own lifespan function to add your own dependencies (clients, database connections, etc.)
|
|
341
|
+
- Add prompts and resources as well with `@mcp.resource()` and `@mcp.prompt()`
|
|
342
|
+
|
|
343
|
+
---
|
|
344
|
+
|
|
345
|
+
## SearXNG Search Tool Parameters
|
|
346
|
+
|
|
347
|
+
The `search` tool supports the following parameters (all optional except `q`):
|
|
348
|
+
|
|
349
|
+
- `q` (required): The search query string.
|
|
350
|
+
- `categories`: Comma-separated list of active search categories.
|
|
351
|
+
- `engines`: Comma-separated list of active search engines.
|
|
352
|
+
- `language`: Code of the language.
|
|
353
|
+
- `page`: Search page number (default: 1).
|
|
354
|
+
- `time_range`: [day, month, year]
|
|
355
|
+
- `format`: [json, csv, rss] (default: json)
|
|
356
|
+
- `results_on_new_tab`: [0, 1]
|
|
357
|
+
- `image_proxy`: [true, false]
|
|
358
|
+
- `autocomplete`: [google, dbpedia, duckduckgo, mwmbl, startpage, wikipedia, stract, swisscows, qwant]
|
|
359
|
+
- `safesearch`: [0, 1, 2]
|
|
360
|
+
- `theme`: [simple]
|
|
361
|
+
- `enabled_plugins`: List of enabled plugins.
|
|
362
|
+
- `disabled_plugins`: List of disabled plugins.
|
|
363
|
+
- `enabled_engines`: List of enabled engines.
|
|
364
|
+
- `disabled_engines`: List of disabled engines.
|
|
365
|
+
|
|
366
|
+
See the [SearXNG documentation](https://docs.searxng.org/) for more details.
|
|
367
|
+
|
|
368
|
+
---
|
|
369
|
+
|
|
370
|
+
## License
|
|
371
|
+
|
|
372
|
+
MIT License
|