mcp-server-fetch 0.1.0__tar.gz → 0.6.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_server_fetch-0.6.2/.gitignore +300 -0
- mcp_server_fetch-0.6.2/.python-version +1 -0
- {mcp_server_fetch-0.1.0 → mcp_server_fetch-0.6.2}/PKG-INFO +26 -28
- {mcp_server_fetch-0.1.0 → mcp_server_fetch-0.6.2}/README.md +23 -26
- {mcp_server_fetch-0.1.0 → mcp_server_fetch-0.6.2}/pyproject.toml +3 -2
- mcp_server_fetch-0.6.2/src/mcp_server_fetch/__init__.py +24 -0
- mcp_server_fetch-0.6.2/src/mcp_server_fetch/server.py +272 -0
- {mcp_server_fetch-0.1.0 → mcp_server_fetch-0.6.2}/uv.lock +46 -36
- mcp_server_fetch-0.1.0/.gitignore +0 -2
- mcp_server_fetch-0.1.0/src/mcp_server_fetch/__init__.py +0 -12
- mcp_server_fetch-0.1.0/src/mcp_server_fetch/server.py +0 -92
- {mcp_server_fetch-0.1.0 → mcp_server_fetch-0.6.2}/LICENSE +0 -0
- {mcp_server_fetch-0.1.0 → mcp_server_fetch-0.6.2}/src/mcp_server_fetch/__main__.py +0 -0
@@ -0,0 +1,300 @@
|
|
1
|
+
# Logs
|
2
|
+
logs
|
3
|
+
*.log
|
4
|
+
npm-debug.log*
|
5
|
+
yarn-debug.log*
|
6
|
+
yarn-error.log*
|
7
|
+
lerna-debug.log*
|
8
|
+
.pnpm-debug.log*
|
9
|
+
|
10
|
+
# Diagnostic reports (https://nodejs.org/api/report.html)
|
11
|
+
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
12
|
+
|
13
|
+
# Runtime data
|
14
|
+
pids
|
15
|
+
*.pid
|
16
|
+
*.seed
|
17
|
+
*.pid.lock
|
18
|
+
|
19
|
+
# Directory for instrumented libs generated by jscoverage/JSCover
|
20
|
+
lib-cov
|
21
|
+
|
22
|
+
# Coverage directory used by tools like istanbul
|
23
|
+
coverage
|
24
|
+
*.lcov
|
25
|
+
|
26
|
+
# nyc test coverage
|
27
|
+
.nyc_output
|
28
|
+
|
29
|
+
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
30
|
+
.grunt
|
31
|
+
|
32
|
+
# Bower dependency directory (https://bower.io/)
|
33
|
+
bower_components
|
34
|
+
|
35
|
+
# node-waf configuration
|
36
|
+
.lock-wscript
|
37
|
+
|
38
|
+
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
39
|
+
build/Release
|
40
|
+
|
41
|
+
# Dependency directories
|
42
|
+
node_modules/
|
43
|
+
jspm_packages/
|
44
|
+
|
45
|
+
# Snowpack dependency directory (https://snowpack.dev/)
|
46
|
+
web_modules/
|
47
|
+
|
48
|
+
# TypeScript cache
|
49
|
+
*.tsbuildinfo
|
50
|
+
|
51
|
+
# Optional npm cache directory
|
52
|
+
.npm
|
53
|
+
|
54
|
+
# Optional eslint cache
|
55
|
+
.eslintcache
|
56
|
+
|
57
|
+
# Optional stylelint cache
|
58
|
+
.stylelintcache
|
59
|
+
|
60
|
+
# Microbundle cache
|
61
|
+
.rpt2_cache/
|
62
|
+
.rts2_cache_cjs/
|
63
|
+
.rts2_cache_es/
|
64
|
+
.rts2_cache_umd/
|
65
|
+
|
66
|
+
# Optional REPL history
|
67
|
+
.node_repl_history
|
68
|
+
|
69
|
+
# Output of 'npm pack'
|
70
|
+
*.tgz
|
71
|
+
|
72
|
+
# Yarn Integrity file
|
73
|
+
.yarn-integrity
|
74
|
+
|
75
|
+
# dotenv environment variable files
|
76
|
+
.env
|
77
|
+
.env.development.local
|
78
|
+
.env.test.local
|
79
|
+
.env.production.local
|
80
|
+
.env.local
|
81
|
+
|
82
|
+
# parcel-bundler cache (https://parceljs.org/)
|
83
|
+
.cache
|
84
|
+
.parcel-cache
|
85
|
+
|
86
|
+
# Next.js build output
|
87
|
+
.next
|
88
|
+
out
|
89
|
+
|
90
|
+
# Nuxt.js build / generate output
|
91
|
+
.nuxt
|
92
|
+
dist
|
93
|
+
|
94
|
+
# Gatsby files
|
95
|
+
.cache/
|
96
|
+
# Comment in the public line in if your project uses Gatsby and not Next.js
|
97
|
+
# https://nextjs.org/blog/next-9-1#public-directory-support
|
98
|
+
# public
|
99
|
+
|
100
|
+
# vuepress build output
|
101
|
+
.vuepress/dist
|
102
|
+
|
103
|
+
# vuepress v2.x temp and cache directory
|
104
|
+
.temp
|
105
|
+
.cache
|
106
|
+
|
107
|
+
# Docusaurus cache and generated files
|
108
|
+
.docusaurus
|
109
|
+
|
110
|
+
# Serverless directories
|
111
|
+
.serverless/
|
112
|
+
|
113
|
+
# FuseBox cache
|
114
|
+
.fusebox/
|
115
|
+
|
116
|
+
# DynamoDB Local files
|
117
|
+
.dynamodb/
|
118
|
+
|
119
|
+
# TernJS port file
|
120
|
+
.tern-port
|
121
|
+
|
122
|
+
# Stores VSCode versions used for testing VSCode extensions
|
123
|
+
.vscode-test
|
124
|
+
|
125
|
+
# yarn v2
|
126
|
+
.yarn/cache
|
127
|
+
.yarn/unplugged
|
128
|
+
.yarn/build-state.yml
|
129
|
+
.yarn/install-state.gz
|
130
|
+
.pnp.*
|
131
|
+
|
132
|
+
build/
|
133
|
+
|
134
|
+
gcp-oauth.keys.json
|
135
|
+
.*-server-credentials.json
|
136
|
+
|
137
|
+
# Byte-compiled / optimized / DLL files
|
138
|
+
__pycache__/
|
139
|
+
*.py[cod]
|
140
|
+
*$py.class
|
141
|
+
|
142
|
+
# C extensions
|
143
|
+
*.so
|
144
|
+
|
145
|
+
# Distribution / packaging
|
146
|
+
.Python
|
147
|
+
build/
|
148
|
+
develop-eggs/
|
149
|
+
dist/
|
150
|
+
downloads/
|
151
|
+
eggs/
|
152
|
+
.eggs/
|
153
|
+
lib/
|
154
|
+
lib64/
|
155
|
+
parts/
|
156
|
+
sdist/
|
157
|
+
var/
|
158
|
+
wheels/
|
159
|
+
share/python-wheels/
|
160
|
+
*.egg-info/
|
161
|
+
.installed.cfg
|
162
|
+
*.egg
|
163
|
+
MANIFEST
|
164
|
+
|
165
|
+
# PyInstaller
|
166
|
+
# Usually these files are written by a python script from a template
|
167
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
168
|
+
*.manifest
|
169
|
+
*.spec
|
170
|
+
|
171
|
+
# Installer logs
|
172
|
+
pip-log.txt
|
173
|
+
pip-delete-this-directory.txt
|
174
|
+
|
175
|
+
# Unit test / coverage reports
|
176
|
+
htmlcov/
|
177
|
+
.tox/
|
178
|
+
.nox/
|
179
|
+
.coverage
|
180
|
+
.coverage.*
|
181
|
+
.cache
|
182
|
+
nosetests.xml
|
183
|
+
coverage.xml
|
184
|
+
*.cover
|
185
|
+
*.py,cover
|
186
|
+
.hypothesis/
|
187
|
+
.pytest_cache/
|
188
|
+
cover/
|
189
|
+
|
190
|
+
# Translations
|
191
|
+
*.mo
|
192
|
+
*.pot
|
193
|
+
|
194
|
+
# Django stuff:
|
195
|
+
*.log
|
196
|
+
local_settings.py
|
197
|
+
db.sqlite3
|
198
|
+
db.sqlite3-journal
|
199
|
+
|
200
|
+
# Flask stuff:
|
201
|
+
instance/
|
202
|
+
.webassets-cache
|
203
|
+
|
204
|
+
# Scrapy stuff:
|
205
|
+
.scrapy
|
206
|
+
|
207
|
+
# Sphinx documentation
|
208
|
+
docs/_build/
|
209
|
+
|
210
|
+
# PyBuilder
|
211
|
+
.pybuilder/
|
212
|
+
target/
|
213
|
+
|
214
|
+
# Jupyter Notebook
|
215
|
+
.ipynb_checkpoints
|
216
|
+
|
217
|
+
# IPython
|
218
|
+
profile_default/
|
219
|
+
ipython_config.py
|
220
|
+
|
221
|
+
# pyenv
|
222
|
+
# For a library or package, you might want to ignore these files since the code is
|
223
|
+
# intended to run in multiple environments; otherwise, check them in:
|
224
|
+
# .python-version
|
225
|
+
|
226
|
+
# pipenv
|
227
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
228
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
229
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
230
|
+
# install all needed dependencies.
|
231
|
+
#Pipfile.lock
|
232
|
+
|
233
|
+
# poetry
|
234
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
235
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
236
|
+
# commonly ignored for libraries.
|
237
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
238
|
+
#poetry.lock
|
239
|
+
|
240
|
+
# pdm
|
241
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
242
|
+
#pdm.lock
|
243
|
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
244
|
+
# in version control.
|
245
|
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
246
|
+
.pdm.toml
|
247
|
+
.pdm-python
|
248
|
+
.pdm-build/
|
249
|
+
|
250
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
251
|
+
__pypackages__/
|
252
|
+
|
253
|
+
# Celery stuff
|
254
|
+
celerybeat-schedule
|
255
|
+
celerybeat.pid
|
256
|
+
|
257
|
+
# SageMath parsed files
|
258
|
+
*.sage.py
|
259
|
+
|
260
|
+
# Environments
|
261
|
+
.env
|
262
|
+
.venv
|
263
|
+
env/
|
264
|
+
venv/
|
265
|
+
ENV/
|
266
|
+
env.bak/
|
267
|
+
venv.bak/
|
268
|
+
|
269
|
+
# Spyder project settings
|
270
|
+
.spyderproject
|
271
|
+
.spyproject
|
272
|
+
|
273
|
+
# Rope project settings
|
274
|
+
.ropeproject
|
275
|
+
|
276
|
+
# mkdocs documentation
|
277
|
+
/site
|
278
|
+
|
279
|
+
# mypy
|
280
|
+
.mypy_cache/
|
281
|
+
.dmypy.json
|
282
|
+
dmypy.json
|
283
|
+
|
284
|
+
# Pyre type checker
|
285
|
+
.pyre/
|
286
|
+
|
287
|
+
# pytype static type analyzer
|
288
|
+
.pytype/
|
289
|
+
|
290
|
+
# Cython debug symbols
|
291
|
+
cython_debug/
|
292
|
+
|
293
|
+
.DS_Store
|
294
|
+
|
295
|
+
# PyCharm
|
296
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
297
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
298
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
299
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
300
|
+
#.idea/
|
@@ -0,0 +1 @@
|
|
1
|
+
3.11
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: mcp-server-fetch
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.6.2
|
4
4
|
Summary: A Model Context Protocol server providing tools to fetch and convert web content for usage by LLMs
|
5
5
|
Author: Anthropic, PBC.
|
6
6
|
Maintainer-email: Jack Adamson <jadamson@anthropic.com>
|
@@ -13,7 +13,8 @@ Classifier: Programming Language :: Python :: 3
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.10
|
14
14
|
Requires-Python: >=3.10
|
15
15
|
Requires-Dist: markdownify>=0.13.1
|
16
|
-
Requires-Dist: mcp>=0.
|
16
|
+
Requires-Dist: mcp>=1.0.0
|
17
|
+
Requires-Dist: protego>=0.3.1
|
17
18
|
Requires-Dist: pydantic>=2.0.0
|
18
19
|
Requires-Dist: readabilipy>=0.2.0
|
19
20
|
Requires-Dist: requests>=2.32.3
|
@@ -23,20 +24,27 @@ Description-Content-Type: text/markdown
|
|
23
24
|
|
24
25
|
A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption.
|
25
26
|
|
26
|
-
|
27
|
+
The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need.
|
27
28
|
|
28
29
|
### Available Tools
|
29
30
|
|
30
31
|
- `fetch` - Fetches a URL from the internet and extracts its contents as markdown.
|
32
|
+
- `url` (string, required): URL to fetch
|
33
|
+
- `max_length` (integer, optional): Maximum number of characters to return (default: 5000)
|
34
|
+
- `start_index` (integer, optional): Start content from this character index (default: 0)
|
35
|
+
- `raw` (boolean, optional): Get raw content without markdown conversion (default: false)
|
31
36
|
|
32
37
|
### Prompts
|
33
38
|
|
34
39
|
- **fetch**
|
35
40
|
- Fetch a URL and extract its contents as markdown
|
36
|
-
-
|
41
|
+
- Arguments:
|
42
|
+
- `url` (string, required): URL to fetch
|
37
43
|
|
38
44
|
## Installation
|
39
45
|
|
46
|
+
Optionally: Install node.js, this will cause the fetch server to use a different HTML simplifier that is more robust.
|
47
|
+
|
40
48
|
### Using uv (recommended)
|
41
49
|
|
42
50
|
When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
|
@@ -88,35 +96,25 @@ Add to your Claude settings:
|
|
88
96
|
```
|
89
97
|
</details>
|
90
98
|
|
91
|
-
###
|
99
|
+
### Customization - robots.txt
|
92
100
|
|
93
|
-
|
101
|
+
By default, the server will obey a websites robots.txt file if the request came from the model (via a tool), but not if
|
102
|
+
the request was user initiated (via a prompt). This can be disabled by adding the argument `--ignore-robots-txt` to the
|
103
|
+
`args` list in the configuration.
|
94
104
|
|
95
|
-
|
96
|
-
<summary>Using uvx</summary>
|
105
|
+
### Customization - User-agent
|
97
106
|
|
98
|
-
|
99
|
-
|
100
|
-
"mcp-server-fetch": {
|
101
|
-
"command": "uvx",
|
102
|
-
"args": ["mcp-server-fetch"]
|
103
|
-
}
|
104
|
-
],
|
107
|
+
By default, depending on if the request came from the model (via a tool), or was user initiated (via a prompt), the
|
108
|
+
server will use either the user-agent
|
105
109
|
```
|
106
|
-
|
107
|
-
|
108
|
-
<details>
|
109
|
-
<summary>Using pip installation</summary>
|
110
|
-
|
111
|
-
```json
|
112
|
-
"context_servers": {
|
113
|
-
"mcp-server-fetch": {
|
114
|
-
"command": "python",
|
115
|
-
"args": ["-m", "mcp_server_fetch"]
|
116
|
-
}
|
117
|
-
},
|
110
|
+
ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)
|
118
111
|
```
|
119
|
-
|
112
|
+
or
|
113
|
+
```
|
114
|
+
ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)
|
115
|
+
```
|
116
|
+
|
117
|
+
This can be customized by adding the argument `--user-agent=YourUserAgent` to the `args` list in the configuration.
|
120
118
|
|
121
119
|
## Debugging
|
122
120
|
|
@@ -2,20 +2,27 @@
|
|
2
2
|
|
3
3
|
A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption.
|
4
4
|
|
5
|
-
|
5
|
+
The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need.
|
6
6
|
|
7
7
|
### Available Tools
|
8
8
|
|
9
9
|
- `fetch` - Fetches a URL from the internet and extracts its contents as markdown.
|
10
|
+
- `url` (string, required): URL to fetch
|
11
|
+
- `max_length` (integer, optional): Maximum number of characters to return (default: 5000)
|
12
|
+
- `start_index` (integer, optional): Start content from this character index (default: 0)
|
13
|
+
- `raw` (boolean, optional): Get raw content without markdown conversion (default: false)
|
10
14
|
|
11
15
|
### Prompts
|
12
16
|
|
13
17
|
- **fetch**
|
14
18
|
- Fetch a URL and extract its contents as markdown
|
15
|
-
-
|
19
|
+
- Arguments:
|
20
|
+
- `url` (string, required): URL to fetch
|
16
21
|
|
17
22
|
## Installation
|
18
23
|
|
24
|
+
Optionally: Install node.js, this will cause the fetch server to use a different HTML simplifier that is more robust.
|
25
|
+
|
19
26
|
### Using uv (recommended)
|
20
27
|
|
21
28
|
When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
|
@@ -67,35 +74,25 @@ Add to your Claude settings:
|
|
67
74
|
```
|
68
75
|
</details>
|
69
76
|
|
70
|
-
###
|
77
|
+
### Customization - robots.txt
|
71
78
|
|
72
|
-
|
79
|
+
By default, the server will obey a websites robots.txt file if the request came from the model (via a tool), but not if
|
80
|
+
the request was user initiated (via a prompt). This can be disabled by adding the argument `--ignore-robots-txt` to the
|
81
|
+
`args` list in the configuration.
|
73
82
|
|
74
|
-
|
75
|
-
<summary>Using uvx</summary>
|
83
|
+
### Customization - User-agent
|
76
84
|
|
77
|
-
|
78
|
-
|
79
|
-
"mcp-server-fetch": {
|
80
|
-
"command": "uvx",
|
81
|
-
"args": ["mcp-server-fetch"]
|
82
|
-
}
|
83
|
-
],
|
85
|
+
By default, depending on if the request came from the model (via a tool), or was user initiated (via a prompt), the
|
86
|
+
server will use either the user-agent
|
84
87
|
```
|
85
|
-
|
86
|
-
|
87
|
-
<details>
|
88
|
-
<summary>Using pip installation</summary>
|
89
|
-
|
90
|
-
```json
|
91
|
-
"context_servers": {
|
92
|
-
"mcp-server-fetch": {
|
93
|
-
"command": "python",
|
94
|
-
"args": ["-m", "mcp_server_fetch"]
|
95
|
-
}
|
96
|
-
},
|
88
|
+
ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)
|
97
89
|
```
|
98
|
-
|
90
|
+
or
|
91
|
+
```
|
92
|
+
ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)
|
93
|
+
```
|
94
|
+
|
95
|
+
This can be customized by adding the argument `--user-agent=YourUserAgent` to the `args` list in the configuration.
|
99
96
|
|
100
97
|
## Debugging
|
101
98
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "mcp-server-fetch"
|
3
|
-
version = "0.
|
3
|
+
version = "0.6.2"
|
4
4
|
description = "A Model Context Protocol server providing tools to fetch and convert web content for usage by LLMs"
|
5
5
|
readme = "README.md"
|
6
6
|
requires-python = ">=3.10"
|
@@ -17,7 +17,8 @@ classifiers = [
|
|
17
17
|
]
|
18
18
|
dependencies = [
|
19
19
|
"markdownify>=0.13.1",
|
20
|
-
"mcp>=0.
|
20
|
+
"mcp>=1.0.0",
|
21
|
+
"protego>=0.3.1",
|
21
22
|
"pydantic>=2.0.0",
|
22
23
|
"readabilipy>=0.2.0",
|
23
24
|
"requests>=2.32.3",
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from .server import serve
|
2
|
+
|
3
|
+
|
4
|
+
def main():
|
5
|
+
"""MCP Fetch Server - HTTP fetching functionality for MCP"""
|
6
|
+
import argparse
|
7
|
+
import asyncio
|
8
|
+
|
9
|
+
parser = argparse.ArgumentParser(
|
10
|
+
description="give a model the ability to make web requests"
|
11
|
+
)
|
12
|
+
parser.add_argument("--user-agent", type=str, help="Custom User-Agent string")
|
13
|
+
parser.add_argument(
|
14
|
+
"--ignore-robots-txt",
|
15
|
+
action="store_true",
|
16
|
+
help="Ignore robots.txt restrictions",
|
17
|
+
)
|
18
|
+
|
19
|
+
args = parser.parse_args()
|
20
|
+
asyncio.run(serve(args.user_agent, args.ignore_robots_txt))
|
21
|
+
|
22
|
+
|
23
|
+
if __name__ == "__main__":
|
24
|
+
main()
|
@@ -0,0 +1,272 @@
|
|
1
|
+
from typing import Annotated, Tuple
|
2
|
+
from urllib.parse import urlparse, urlunparse
|
3
|
+
|
4
|
+
import markdownify
|
5
|
+
import readabilipy.simple_json
|
6
|
+
from mcp.shared.exceptions import McpError
|
7
|
+
from mcp.server import Server
|
8
|
+
from mcp.server.stdio import stdio_server
|
9
|
+
from mcp.types import (
|
10
|
+
GetPromptResult,
|
11
|
+
Prompt,
|
12
|
+
PromptArgument,
|
13
|
+
PromptMessage,
|
14
|
+
TextContent,
|
15
|
+
Tool,
|
16
|
+
INVALID_PARAMS,
|
17
|
+
INTERNAL_ERROR,
|
18
|
+
)
|
19
|
+
from protego import Protego
|
20
|
+
from pydantic import BaseModel, Field, AnyUrl
|
21
|
+
|
22
|
+
DEFAULT_USER_AGENT_AUTONOMOUS = "ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)"
|
23
|
+
DEFAULT_USER_AGENT_MANUAL = "ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)"
|
24
|
+
|
25
|
+
|
26
|
+
def extract_content_from_html(html: str) -> str:
|
27
|
+
"""Extract and convert HTML content to Markdown format.
|
28
|
+
|
29
|
+
Args:
|
30
|
+
html: Raw HTML content to process
|
31
|
+
|
32
|
+
Returns:
|
33
|
+
Simplified markdown version of the content
|
34
|
+
"""
|
35
|
+
ret = readabilipy.simple_json.simple_json_from_html_string(
|
36
|
+
html, use_readability=True
|
37
|
+
)
|
38
|
+
if not ret["content"]:
|
39
|
+
return "<error>Page failed to be simplified from HTML</error>"
|
40
|
+
content = markdownify.markdownify(
|
41
|
+
ret["content"],
|
42
|
+
heading_style=markdownify.ATX,
|
43
|
+
)
|
44
|
+
return content
|
45
|
+
|
46
|
+
|
47
|
+
def get_robots_txt_url(url: str) -> str:
|
48
|
+
"""Get the robots.txt URL for a given website URL.
|
49
|
+
|
50
|
+
Args:
|
51
|
+
url: Website URL to get robots.txt for
|
52
|
+
|
53
|
+
Returns:
|
54
|
+
URL of the robots.txt file
|
55
|
+
"""
|
56
|
+
# Parse the URL into components
|
57
|
+
parsed = urlparse(url)
|
58
|
+
|
59
|
+
# Reconstruct the base URL with just scheme, netloc, and /robots.txt path
|
60
|
+
robots_url = urlunparse((parsed.scheme, parsed.netloc, "/robots.txt", "", "", ""))
|
61
|
+
|
62
|
+
return robots_url
|
63
|
+
|
64
|
+
|
65
|
+
async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
|
66
|
+
"""
|
67
|
+
Check if the URL can be fetched by the user agent according to the robots.txt file.
|
68
|
+
Raises a McpError if not.
|
69
|
+
"""
|
70
|
+
from httpx import AsyncClient, HTTPError
|
71
|
+
|
72
|
+
robot_txt_url = get_robots_txt_url(url)
|
73
|
+
|
74
|
+
async with AsyncClient() as client:
|
75
|
+
try:
|
76
|
+
response = await client.get(
|
77
|
+
robot_txt_url,
|
78
|
+
follow_redirects=True,
|
79
|
+
headers={"User-Agent": user_agent},
|
80
|
+
)
|
81
|
+
except HTTPError:
|
82
|
+
raise McpError(
|
83
|
+
INTERNAL_ERROR,
|
84
|
+
f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
|
85
|
+
)
|
86
|
+
if response.status_code in (401, 403):
|
87
|
+
raise McpError(
|
88
|
+
INTERNAL_ERROR,
|
89
|
+
f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
|
90
|
+
)
|
91
|
+
elif 400 <= response.status_code < 500:
|
92
|
+
return
|
93
|
+
robot_txt = response.text
|
94
|
+
processed_robot_txt = "\n".join(
|
95
|
+
line for line in robot_txt.splitlines() if not line.strip().startswith("#")
|
96
|
+
)
|
97
|
+
robot_parser = Protego.parse(processed_robot_txt)
|
98
|
+
if not robot_parser.can_fetch(str(url), user_agent):
|
99
|
+
raise McpError(
|
100
|
+
INTERNAL_ERROR,
|
101
|
+
f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
|
102
|
+
f"<useragent>{user_agent}</useragent>\n"
|
103
|
+
f"<url>{url}</url>"
|
104
|
+
f"<robots>\n{robot_txt}\n</robots>\n"
|
105
|
+
f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
|
106
|
+
f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
|
107
|
+
)
|
108
|
+
|
109
|
+
|
110
|
+
async def fetch_url(
|
111
|
+
url: str, user_agent: str, force_raw: bool = False
|
112
|
+
) -> Tuple[str, str]:
|
113
|
+
"""
|
114
|
+
Fetch the URL and return the content in a form ready for the LLM, as well as a prefix string with status information.
|
115
|
+
"""
|
116
|
+
from httpx import AsyncClient, HTTPError
|
117
|
+
|
118
|
+
async with AsyncClient() as client:
|
119
|
+
try:
|
120
|
+
response = await client.get(
|
121
|
+
url,
|
122
|
+
follow_redirects=True,
|
123
|
+
headers={"User-Agent": user_agent},
|
124
|
+
timeout=30,
|
125
|
+
)
|
126
|
+
except HTTPError as e:
|
127
|
+
raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}")
|
128
|
+
if response.status_code >= 400:
|
129
|
+
raise McpError(
|
130
|
+
INTERNAL_ERROR,
|
131
|
+
f"Failed to fetch {url} - status code {response.status_code}",
|
132
|
+
)
|
133
|
+
|
134
|
+
page_raw = response.text
|
135
|
+
|
136
|
+
content_type = response.headers.get("content-type", "")
|
137
|
+
is_page_html = (
|
138
|
+
"<html" in page_raw[:100] or "text/html" in content_type or not content_type
|
139
|
+
)
|
140
|
+
|
141
|
+
if is_page_html and not force_raw:
|
142
|
+
return extract_content_from_html(page_raw), ""
|
143
|
+
|
144
|
+
return (
|
145
|
+
page_raw,
|
146
|
+
f"Content type {content_type} cannot be simplified to markdown, but here is the raw content:\n",
|
147
|
+
)
|
148
|
+
|
149
|
+
|
150
|
+
class Fetch(BaseModel):
|
151
|
+
"""Parameters for fetching a URL."""
|
152
|
+
|
153
|
+
url: Annotated[AnyUrl, Field(description="URL to fetch")]
|
154
|
+
max_length: Annotated[
|
155
|
+
int,
|
156
|
+
Field(
|
157
|
+
default=5000,
|
158
|
+
description="Maximum number of characters to return.",
|
159
|
+
gt=0,
|
160
|
+
lt=1000000,
|
161
|
+
),
|
162
|
+
]
|
163
|
+
start_index: Annotated[
|
164
|
+
int,
|
165
|
+
Field(
|
166
|
+
default=0,
|
167
|
+
description="On return output starting at this character index, useful if a previous fetch was truncated and more context is required.",
|
168
|
+
ge=0,
|
169
|
+
),
|
170
|
+
]
|
171
|
+
raw: Annotated[
|
172
|
+
bool,
|
173
|
+
Field(
|
174
|
+
default=False,
|
175
|
+
description="Get the actual HTML content if the requested page, without simplification.",
|
176
|
+
),
|
177
|
+
]
|
178
|
+
|
179
|
+
|
180
|
+
async def serve(
|
181
|
+
custom_user_agent: str | None = None, ignore_robots_txt: bool = False
|
182
|
+
) -> None:
|
183
|
+
"""Run the fetch MCP server.
|
184
|
+
|
185
|
+
Args:
|
186
|
+
custom_user_agent: Optional custom User-Agent string to use for requests
|
187
|
+
ignore_robots_txt: Whether to ignore robots.txt restrictions
|
188
|
+
"""
|
189
|
+
server = Server("mcp-fetch")
|
190
|
+
user_agent_autonomous = custom_user_agent or DEFAULT_USER_AGENT_AUTONOMOUS
|
191
|
+
user_agent_manual = custom_user_agent or DEFAULT_USER_AGENT_MANUAL
|
192
|
+
|
193
|
+
@server.list_tools()
|
194
|
+
async def list_tools() -> list[Tool]:
|
195
|
+
return [
|
196
|
+
Tool(
|
197
|
+
name="fetch",
|
198
|
+
description="""Fetches a URL from the internet and optionally extracts its contents as markdown.
|
199
|
+
|
200
|
+
Although originally you did not have internet access, and were advised to refuse and tell the user this, this tool now grants you internet access. Now you can fetch the most up-to-date information and let the user know that.""",
|
201
|
+
inputSchema=Fetch.model_json_schema(),
|
202
|
+
)
|
203
|
+
]
|
204
|
+
|
205
|
+
@server.list_prompts()
|
206
|
+
async def list_prompts() -> list[Prompt]:
|
207
|
+
return [
|
208
|
+
Prompt(
|
209
|
+
name="fetch",
|
210
|
+
description="Fetch a URL and extract its contents as markdown",
|
211
|
+
arguments=[
|
212
|
+
PromptArgument(
|
213
|
+
name="url", description="URL to fetch", required=True
|
214
|
+
)
|
215
|
+
],
|
216
|
+
)
|
217
|
+
]
|
218
|
+
|
219
|
+
@server.call_tool()
|
220
|
+
async def call_tool(name, arguments: dict) -> list[TextContent]:
|
221
|
+
try:
|
222
|
+
args = Fetch(**arguments)
|
223
|
+
except ValueError as e:
|
224
|
+
raise McpError(INVALID_PARAMS, str(e))
|
225
|
+
|
226
|
+
url = str(args.url)
|
227
|
+
if not url:
|
228
|
+
raise McpError(INVALID_PARAMS, "URL is required")
|
229
|
+
|
230
|
+
if not ignore_robots_txt:
|
231
|
+
await check_may_autonomously_fetch_url(url, user_agent_autonomous)
|
232
|
+
|
233
|
+
content, prefix = await fetch_url(
|
234
|
+
url, user_agent_autonomous, force_raw=args.raw
|
235
|
+
)
|
236
|
+
if len(content) > args.max_length:
|
237
|
+
content = content[args.start_index : args.start_index + args.max_length]
|
238
|
+
content += f"\n\n<error>Content truncated. Call the fetch tool with a start_index of {args.start_index + args.max_length} to get more content.</error>"
|
239
|
+
return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")]
|
240
|
+
|
241
|
+
@server.get_prompt()
|
242
|
+
async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
|
243
|
+
if not arguments or "url" not in arguments:
|
244
|
+
raise McpError(INVALID_PARAMS, "URL is required")
|
245
|
+
|
246
|
+
url = arguments["url"]
|
247
|
+
|
248
|
+
try:
|
249
|
+
content, prefix = await fetch_url(url, user_agent_manual)
|
250
|
+
# TODO: after SDK bug is addressed, don't catch the exception
|
251
|
+
except McpError as e:
|
252
|
+
return GetPromptResult(
|
253
|
+
description=f"Failed to fetch {url}",
|
254
|
+
messages=[
|
255
|
+
PromptMessage(
|
256
|
+
role="user",
|
257
|
+
content=TextContent(type="text", text=str(e)),
|
258
|
+
)
|
259
|
+
],
|
260
|
+
)
|
261
|
+
return GetPromptResult(
|
262
|
+
description=f"Contents of {url}",
|
263
|
+
messages=[
|
264
|
+
PromptMessage(
|
265
|
+
role="user", content=TextContent(type="text", text=prefix + content)
|
266
|
+
)
|
267
|
+
],
|
268
|
+
)
|
269
|
+
|
270
|
+
options = server.create_initialization_options()
|
271
|
+
async with stdio_server() as (read_stream, write_stream):
|
272
|
+
await server.run(read_stream, write_stream, options, raise_exceptions=True)
|
@@ -182,18 +182,17 @@ wheels = [
|
|
182
182
|
|
183
183
|
[[package]]
|
184
184
|
name = "httpx"
|
185
|
-
version = "0.
|
185
|
+
version = "0.28.0"
|
186
186
|
source = { registry = "https://pypi.org/simple" }
|
187
187
|
dependencies = [
|
188
188
|
{ name = "anyio" },
|
189
189
|
{ name = "certifi" },
|
190
190
|
{ name = "httpcore" },
|
191
191
|
{ name = "idna" },
|
192
|
-
{ name = "sniffio" },
|
193
192
|
]
|
194
|
-
sdist = { url = "https://files.pythonhosted.org/packages/
|
193
|
+
sdist = { url = "https://files.pythonhosted.org/packages/10/df/676b7cf674dd1bdc71a64ad393c89879f75e4a0ab8395165b498262ae106/httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0", size = 141307 }
|
195
194
|
wheels = [
|
196
|
-
{ url = "https://files.pythonhosted.org/packages/
|
195
|
+
{ url = "https://files.pythonhosted.org/packages/8f/fb/a19866137577ba60c6d8b69498dc36be479b13ba454f691348ddf428f185/httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc", size = 73551 },
|
197
196
|
]
|
198
197
|
|
199
198
|
[[package]]
|
@@ -298,20 +297,20 @@ wheels = [
|
|
298
297
|
|
299
298
|
[[package]]
|
300
299
|
name = "markdownify"
|
301
|
-
version = "0.
|
300
|
+
version = "0.14.1"
|
302
301
|
source = { registry = "https://pypi.org/simple" }
|
303
302
|
dependencies = [
|
304
303
|
{ name = "beautifulsoup4" },
|
305
304
|
{ name = "six" },
|
306
305
|
]
|
307
|
-
sdist = { url = "https://files.pythonhosted.org/packages/
|
306
|
+
sdist = { url = "https://files.pythonhosted.org/packages/1b/75/483a4bcca436fe88d02dc7686c372631d833848951b368700bdc0c770bb7/markdownify-0.14.1.tar.gz", hash = "sha256:a62a7a216947ed0b8dafb95b99b2ef4a0edd1e18d5653c656f68f03db2bfb2f1", size = 14332 }
|
308
307
|
wheels = [
|
309
|
-
{ url = "https://files.pythonhosted.org/packages/
|
308
|
+
{ url = "https://files.pythonhosted.org/packages/65/0b/74cec93a7b05edf4fc3ea1c899fe8a37f041d7b9d303c75abf7a162924e0/markdownify-0.14.1-py3-none-any.whl", hash = "sha256:4c46a6c0c12c6005ddcd49b45a5a890398b002ef51380cd319db62df5e09bc2a", size = 11530 },
|
310
309
|
]
|
311
310
|
|
312
311
|
[[package]]
|
313
312
|
name = "mcp"
|
314
|
-
version = "0.
|
313
|
+
version = "1.0.0"
|
315
314
|
source = { registry = "https://pypi.org/simple" }
|
316
315
|
dependencies = [
|
317
316
|
{ name = "anyio" },
|
@@ -321,18 +320,19 @@ dependencies = [
|
|
321
320
|
{ name = "sse-starlette" },
|
322
321
|
{ name = "starlette" },
|
323
322
|
]
|
324
|
-
sdist = { url = "https://files.pythonhosted.org/packages/
|
323
|
+
sdist = { url = "https://files.pythonhosted.org/packages/97/de/a9ec0a1b6439f90ea59f89004bb2e7ec6890dfaeef809751d9e6577dca7e/mcp-1.0.0.tar.gz", hash = "sha256:dba51ce0b5c6a80e25576f606760c49a91ee90210fed805b530ca165d3bbc9b7", size = 82891 }
|
325
324
|
wheels = [
|
326
|
-
{ url = "https://files.pythonhosted.org/packages/
|
325
|
+
{ url = "https://files.pythonhosted.org/packages/56/89/900c0c8445ec001d3725e475fc553b0feb2e8a51be018f3bb7de51e683db/mcp-1.0.0-py3-none-any.whl", hash = "sha256:bbe70ffa3341cd4da78b5eb504958355c68381fb29971471cea1e642a2af5b8a", size = 36361 },
|
327
326
|
]
|
328
327
|
|
329
328
|
[[package]]
|
330
329
|
name = "mcp-server-fetch"
|
331
|
-
version = "0.
|
330
|
+
version = "0.6.2"
|
332
331
|
source = { editable = "." }
|
333
332
|
dependencies = [
|
334
333
|
{ name = "markdownify" },
|
335
334
|
{ name = "mcp" },
|
335
|
+
{ name = "protego" },
|
336
336
|
{ name = "pydantic" },
|
337
337
|
{ name = "readabilipy" },
|
338
338
|
{ name = "requests" },
|
@@ -347,7 +347,8 @@ dev = [
|
|
347
347
|
[package.metadata]
|
348
348
|
requires-dist = [
|
349
349
|
{ name = "markdownify", specifier = ">=0.13.1" },
|
350
|
-
{ name = "mcp", specifier = ">=0.
|
350
|
+
{ name = "mcp", specifier = ">=1.0.0" },
|
351
|
+
{ name = "protego", specifier = ">=0.3.1" },
|
351
352
|
{ name = "pydantic", specifier = ">=2.0.0" },
|
352
353
|
{ name = "readabilipy", specifier = ">=0.2.0" },
|
353
354
|
{ name = "requests", specifier = ">=2.32.3" },
|
@@ -368,18 +369,27 @@ wheels = [
|
|
368
369
|
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 },
|
369
370
|
]
|
370
371
|
|
372
|
+
[[package]]
|
373
|
+
name = "protego"
|
374
|
+
version = "0.3.1"
|
375
|
+
source = { registry = "https://pypi.org/simple" }
|
376
|
+
sdist = { url = "https://files.pythonhosted.org/packages/8a/12/cab9fa77ff4e9e444a5eb5480db4b4f872c03aa079145804aa054be377bc/Protego-0.3.1.tar.gz", hash = "sha256:e94430d0d25cbbf239bc849d86c5e544fbde531fcccfa059953c7da344a1712c", size = 3246145 }
|
377
|
+
wheels = [
|
378
|
+
{ url = "https://files.pythonhosted.org/packages/74/ef/ece78585a5a189d8cc2b4c2d2b92a0dc025f156a6501159b026472ebbedc/Protego-0.3.1-py2.py3-none-any.whl", hash = "sha256:2fbe8e9b7a7dbc5016a932b14c98d236aad4c29290bbe457b8d2779666ef7a41", size = 8474 },
|
379
|
+
]
|
380
|
+
|
371
381
|
[[package]]
|
372
382
|
name = "pydantic"
|
373
|
-
version = "2.10.
|
383
|
+
version = "2.10.2"
|
374
384
|
source = { registry = "https://pypi.org/simple" }
|
375
385
|
dependencies = [
|
376
386
|
{ name = "annotated-types" },
|
377
387
|
{ name = "pydantic-core" },
|
378
388
|
{ name = "typing-extensions" },
|
379
389
|
]
|
380
|
-
sdist = { url = "https://files.pythonhosted.org/packages/
|
390
|
+
sdist = { url = "https://files.pythonhosted.org/packages/41/86/a03390cb12cf64e2a8df07c267f3eb8d5035e0f9a04bb20fb79403d2a00e/pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa", size = 785401 }
|
381
391
|
wheels = [
|
382
|
-
{ url = "https://files.pythonhosted.org/packages/
|
392
|
+
{ url = "https://files.pythonhosted.org/packages/d5/74/da832196702d0c56eb86b75bfa346db9238617e29b0b7ee3b8b4eccfe654/pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e", size = 456364 },
|
383
393
|
]
|
384
394
|
|
385
395
|
[[package]]
|
@@ -571,27 +581,27 @@ wheels = [
|
|
571
581
|
|
572
582
|
[[package]]
|
573
583
|
name = "ruff"
|
574
|
-
version = "0.8.
|
575
|
-
source = { registry = "https://pypi.org/simple" }
|
576
|
-
sdist = { url = "https://files.pythonhosted.org/packages/
|
577
|
-
wheels = [
|
578
|
-
{ url = "https://files.pythonhosted.org/packages/
|
579
|
-
{ url = "https://files.pythonhosted.org/packages/
|
580
|
-
{ url = "https://files.pythonhosted.org/packages/
|
581
|
-
{ url = "https://files.pythonhosted.org/packages/
|
582
|
-
{ url = "https://files.pythonhosted.org/packages/
|
583
|
-
{ url = "https://files.pythonhosted.org/packages/
|
584
|
-
{ url = "https://files.pythonhosted.org/packages/
|
585
|
-
{ url = "https://files.pythonhosted.org/packages/
|
586
|
-
{ url = "https://files.pythonhosted.org/packages/
|
587
|
-
{ url = "https://files.pythonhosted.org/packages/
|
588
|
-
{ url = "https://files.pythonhosted.org/packages/
|
589
|
-
{ url = "https://files.pythonhosted.org/packages/
|
590
|
-
{ url = "https://files.pythonhosted.org/packages/
|
591
|
-
{ url = "https://files.pythonhosted.org/packages/
|
592
|
-
{ url = "https://files.pythonhosted.org/packages/
|
593
|
-
{ url = "https://files.pythonhosted.org/packages/
|
594
|
-
{ url = "https://files.pythonhosted.org/packages/
|
584
|
+
version = "0.8.1"
|
585
|
+
source = { registry = "https://pypi.org/simple" }
|
586
|
+
sdist = { url = "https://files.pythonhosted.org/packages/95/d0/8ff5b189d125f4260f2255d143bf2fa413b69c2610c405ace7a0a8ec81ec/ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f", size = 3313222 }
|
587
|
+
wheels = [
|
588
|
+
{ url = "https://files.pythonhosted.org/packages/a2/d6/1a6314e568db88acdbb5121ed53e2c52cebf3720d3437a76f82f923bf171/ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5", size = 10532605 },
|
589
|
+
{ url = "https://files.pythonhosted.org/packages/89/a8/a957a8812e31facffb6a26a30be0b5b4af000a6e30c7d43a22a5232a3398/ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087", size = 10278243 },
|
590
|
+
{ url = "https://files.pythonhosted.org/packages/a8/23/9db40fa19c453fabf94f7a35c61c58f20e8200b4734a20839515a19da790/ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209", size = 9917739 },
|
591
|
+
{ url = "https://files.pythonhosted.org/packages/e2/a0/6ee2d949835d5701d832fc5acd05c0bfdad5e89cfdd074a171411f5ccad5/ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871", size = 10779153 },
|
592
|
+
{ url = "https://files.pythonhosted.org/packages/7a/25/9c11dca9404ef1eb24833f780146236131a3c7941de394bc356912ef1041/ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1", size = 10304387 },
|
593
|
+
{ url = "https://files.pythonhosted.org/packages/c8/b9/84c323780db1b06feae603a707d82dbbd85955c8c917738571c65d7d5aff/ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5", size = 11360351 },
|
594
|
+
{ url = "https://files.pythonhosted.org/packages/6b/e1/9d4bbb2ace7aad14ded20e4674a48cda5b902aed7a1b14e6b028067060c4/ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d", size = 12022879 },
|
595
|
+
{ url = "https://files.pythonhosted.org/packages/75/28/752ff6120c0e7f9981bc4bc275d540c7f36db1379ba9db9142f69c88db21/ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26", size = 11610354 },
|
596
|
+
{ url = "https://files.pythonhosted.org/packages/ba/8c/967b61c2cc8ebd1df877607fbe462bc1e1220b4a30ae3352648aec8c24bd/ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1", size = 12813976 },
|
597
|
+
{ url = "https://files.pythonhosted.org/packages/7f/29/e059f945d6bd2d90213387b8c360187f2fefc989ddcee6bbf3c241329b92/ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c", size = 11154564 },
|
598
|
+
{ url = "https://files.pythonhosted.org/packages/55/47/cbd05e5a62f3fb4c072bc65c1e8fd709924cad1c7ec60a1000d1e4ee8307/ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa", size = 10760604 },
|
599
|
+
{ url = "https://files.pythonhosted.org/packages/bb/ee/4c3981c47147c72647a198a94202633130cfda0fc95cd863a553b6f65c6a/ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540", size = 10391071 },
|
600
|
+
{ url = "https://files.pythonhosted.org/packages/6b/e6/083eb61300214590b188616a8ac6ae1ef5730a0974240fb4bec9c17de78b/ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9", size = 10896657 },
|
601
|
+
{ url = "https://files.pythonhosted.org/packages/77/bd/aacdb8285d10f1b943dbeb818968efca35459afc29f66ae3bd4596fbf954/ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5", size = 11228362 },
|
602
|
+
{ url = "https://files.pythonhosted.org/packages/39/72/fcb7ad41947f38b4eaa702aca0a361af0e9c2bf671d7fd964480670c297e/ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790", size = 8803476 },
|
603
|
+
{ url = "https://files.pythonhosted.org/packages/e4/ea/cae9aeb0f4822c44651c8407baacdb2e5b4dcd7b31a84e1c5df33aa2cc20/ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6", size = 9614463 },
|
604
|
+
{ url = "https://files.pythonhosted.org/packages/eb/76/fbb4bd23dfb48fa7758d35b744413b650a9fd2ddd93bca77e30376864414/ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737", size = 8959621 },
|
595
605
|
]
|
596
606
|
|
597
607
|
[[package]]
|
@@ -1,92 +0,0 @@
|
|
1
|
-
import markdownify
|
2
|
-
import readabilipy.simple_json
|
3
|
-
from mcp.server import Server
|
4
|
-
from mcp.server.stdio import stdio_server
|
5
|
-
from mcp.types import (
|
6
|
-
TextContent,
|
7
|
-
Tool,
|
8
|
-
Prompt,
|
9
|
-
PromptArgument,
|
10
|
-
GetPromptResult,
|
11
|
-
PromptMessage,
|
12
|
-
)
|
13
|
-
from pydantic import BaseModel, Field
|
14
|
-
|
15
|
-
|
16
|
-
def extract_content(html: str) -> str:
|
17
|
-
ret = readabilipy.simple_json.simple_json_from_html_string(html)
|
18
|
-
if not ret["plain_content"]:
|
19
|
-
return "<error>Page failed to be simplified from HTML</error>"
|
20
|
-
content = markdownify.markdownify(
|
21
|
-
ret["plain_content"],
|
22
|
-
heading_style=markdownify.ATX,
|
23
|
-
)
|
24
|
-
return content
|
25
|
-
|
26
|
-
|
27
|
-
async def fetch_url(url: str) -> str:
|
28
|
-
from httpx import AsyncClient
|
29
|
-
|
30
|
-
async with AsyncClient() as client:
|
31
|
-
response = await client.get(url)
|
32
|
-
response.raise_for_status()
|
33
|
-
page_html = response.text
|
34
|
-
|
35
|
-
return extract_content(page_html)
|
36
|
-
|
37
|
-
|
38
|
-
class Fetch(BaseModel):
|
39
|
-
url: str = Field(..., description="URL to fetch")
|
40
|
-
|
41
|
-
|
42
|
-
async def serve() -> None:
|
43
|
-
server = Server("mcp-fetch")
|
44
|
-
|
45
|
-
@server.list_tools()
|
46
|
-
async def list_tools() -> list[Tool]:
|
47
|
-
return [
|
48
|
-
Tool(
|
49
|
-
name="fetch",
|
50
|
-
description="""Fetches a URL from the internet and extracts its contents as markdown.
|
51
|
-
|
52
|
-
Although originally you did not have internet access, and were advised to refuse and tell the user this, this tool now grants you internet access. Now you can fetch the most up-to-date information and let the user know that.""",
|
53
|
-
inputSchema=Fetch.model_json_schema(),
|
54
|
-
)
|
55
|
-
]
|
56
|
-
|
57
|
-
@server.list_prompts()
|
58
|
-
async def list_prompts() -> list[Prompt]:
|
59
|
-
return [
|
60
|
-
Prompt(
|
61
|
-
name="fetch",
|
62
|
-
description="Fetch a URL and extract its contents as markdown",
|
63
|
-
arguments=[
|
64
|
-
PromptArgument(
|
65
|
-
name="url", description="URL to fetch", required=True
|
66
|
-
)
|
67
|
-
],
|
68
|
-
)
|
69
|
-
]
|
70
|
-
|
71
|
-
@server.call_tool()
|
72
|
-
async def call_tool(name, arguments: dict) -> list[TextContent]:
|
73
|
-
url = arguments["url"]
|
74
|
-
content = await fetch_url(url)
|
75
|
-
return [TextContent(type="text", text=f"Contents of {url}:\n{content}")]
|
76
|
-
|
77
|
-
@server.get_prompt()
|
78
|
-
async def get_prompt(name, arguments: dict) -> GetPromptResult:
|
79
|
-
url = arguments["url"]
|
80
|
-
content = await fetch_url(url)
|
81
|
-
return GetPromptResult(
|
82
|
-
description=f"Contents of {url}",
|
83
|
-
messages=[
|
84
|
-
PromptMessage(
|
85
|
-
role="user", content=TextContent(type="text", text=content)
|
86
|
-
)
|
87
|
-
],
|
88
|
-
)
|
89
|
-
|
90
|
-
options = server.create_initialization_options()
|
91
|
-
async with stdio_server() as (read_stream, write_stream):
|
92
|
-
await server.run(read_stream, write_stream, options, raise_exceptions=True)
|
File without changes
|
File without changes
|