iflow-mcp_modal-mcp-toolbox 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,89 @@
1
+ Metadata-Version: 2.4
2
+ Name: iflow-mcp_modal-mcp-toolbox
3
+ Version: 0.1.12
4
+ Summary: A collection of Model Context Protocol (MCP) tools for Modal
5
+ Author-email: Philipp Eisen <hello@philippeisen.de>
6
+ License-File: LICENSE
7
+ Requires-Python: <3.13,>=3.10
8
+ Requires-Dist: mcp>=1.3.0
9
+ Requires-Dist: modal>=0.73.43
10
+ Description-Content-Type: text/markdown
11
+
12
+ # Modal MCP Toolbox 🛠️
13
+
14
+ [![smithery badge](https://smithery.ai/badge/@philipp-eisen/modal-mcp-toolbox)](https://smithery.ai/server/@philipp-eisen/modal-mcp-toolbox)
15
+
16
+ A collection of Model Context Protocol (MCP) tools that run on Modal.
17
+ This let's you extend the capabilities of your LLM in tools such as [Goose](https://block.github.io/goose/) or the [Claude Desktop App](https://claude.ai/download).
18
+
19
+ <a href="https://glama.ai/mcp/servers/ai78w0p5mc"><img width="380" height="200" src="https://glama.ai/mcp/servers/ai78w0p5mc/badge" alt="Modal Toolbox MCP server" /></a>
20
+
21
+ ## Tools
22
+
23
+ - `run_python_code_in_sandbox`: Let's you run python code in a sandboxed environment.
24
+ - `generate_flux_image`: Generate an image using the FLUX model.
25
+
26
+ ## Demo
27
+
28
+ ### Flux Image Generation
29
+
30
+ ![🎬Flux Image Generation](./assets/flux.gif)
31
+
32
+ ### Python Code Execution
33
+
34
+ ![🎬Python Code Execution](./assets/python-sandbox.gif)
35
+
36
+ ## Prerequisites
37
+
38
+ - A [modal account](https://modal.com/signup) and a configured modal CLI.
39
+ - [UV](https://github.com/astral-sh/uv?tab=readme-ov-file#installation)
40
+ - A client that supports MCP. Such as the [Claude Desktop App](https://claude.ai/download) or [Goose](https://block.github.io/goose/)
41
+
42
+ This runs against your modal account, so you will need to have a modal account and be logged in.
43
+
44
+ ## Installation
45
+
46
+ Installation depends on the client that uses the MCP. Here is instructions for Claude and Goose.
47
+
48
+ ### Claude
49
+
50
+ Got to `Settings > Developer` in the Claude Desktop App. And click on Edit Config.
51
+ ![🖼️Claude Settings](./assets/claude-settings.png)
52
+
53
+ Add the config for the mcp server. My config looks like this:
54
+
55
+ ```json
56
+ {
57
+ "mcpServers": {
58
+ "modal-toolbox": {
59
+ "command": "uvx",
60
+ "args": ["modal-mcp-toolbox"]
61
+ }
62
+ }
63
+ }
64
+ ```
65
+
66
+ ### Goose
67
+
68
+ Go to `Settings` and Click on Add.
69
+
70
+ ![🖼️Goose Settings](./assets/goose-settings-1.png)
71
+
72
+ Then add an extension like in the screenshot below.
73
+ The important part is to set command to:
74
+
75
+ ```
76
+ uvx modal-mcp-toolbox
77
+ ```
78
+
79
+ The rest you can fill in as you like.
80
+
81
+ ![🖼️Goose MCP Settings](./assets/goose-settings-2.png)
82
+
83
+ ### Installing via Smithery (not working currently)
84
+
85
+ To install Modal MCP Toolbox for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@philipp-eisen/modal-mcp-toolbox):
86
+
87
+ ```bash
88
+ npx -y @smithery/cli install @philipp-eisen/modal-mcp-toolbox --client claude
89
+ ```
@@ -0,0 +1,9 @@
1
+ modal_mcp_toolbox/__init__.py,sha256=7CI-2YSApcw-itJA6uABy_iL42CB_vFf2DKwK3qd8jk,857
2
+ modal_mcp_toolbox/__main__.py,sha256=7pRdxSEidTQ4IciFf6qBuhweD8klMGdKTGiNxc6Nce0,43
3
+ modal_mcp_toolbox/code.py,sha256=jCaTiQuJ4kBM6WmAQA51K2u3BwP7mxQuWw3t7Iepa7g,3012
4
+ modal_mcp_toolbox/flux.py,sha256=zJyyhcSgWrBBQBd375hTgNrcgesQzd7b6sHjKE9zFlo,5396
5
+ iflow_mcp_modal_mcp_toolbox-0.1.12.dist-info/METADATA,sha256=Gdz-SHxhD_0hfIdw6X71b2DoSM6mIxQPJ5z_8tfQPiw,2721
6
+ iflow_mcp_modal_mcp_toolbox-0.1.12.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
+ iflow_mcp_modal_mcp_toolbox-0.1.12.dist-info/entry_points.txt,sha256=RLhgHKt9SLV7sY5yzLQkDXy8PYr9IKW3nDRSbOUGF58,61
8
+ iflow_mcp_modal_mcp_toolbox-0.1.12.dist-info/licenses/LICENSE,sha256=n0FejJtmhOuaIfMIXu7XNk044eH1Kmh89jjHfYrHekQ,1070
9
+ iflow_mcp_modal_mcp_toolbox-0.1.12.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ modal-mcp-toolbox = modal_mcp_toolbox:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Philipp Eisen
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,29 @@
1
+ import argparse
2
+ import logging
3
+ from importlib.metadata import version
4
+
5
+ from mcp.server.fastmcp import FastMCP
6
+
7
+ from modal_mcp_toolbox.code import run_python_code_in_sandbox
8
+ from modal_mcp_toolbox.flux import generate_flux_image
9
+
10
+ server = FastMCP("modal-toolbox")
11
+
12
+ server.add_tool(run_python_code_in_sandbox)
13
+ server.add_tool(generate_flux_image)
14
+
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def main():
20
+ """MCP Modal Sandbox: A sandbox for running python code in a safe environment."""
21
+ parser = argparse.ArgumentParser(description="A sandbox for running python code in a safe environment.")
22
+ parser.add_argument("--version", action="version", version=version("iflow-mcp_modal-mcp-toolbox"))
23
+ parser.parse_args()
24
+ server.run()
25
+
26
+
27
+ if __name__ == "__main__":
28
+ logging.basicConfig(level=logging.INFO, handlers=[logging.StreamHandler()])
29
+ main()
@@ -0,0 +1,3 @@
1
+ from modal_mcp_toolbox import main
2
+
3
+ main()
@@ -0,0 +1,75 @@
1
+ from pathlib import Path
2
+ from typing import Annotated
3
+
4
+ import modal
5
+ from mcp import ErrorData, McpError
6
+ from mcp.types import INVALID_PARAMS, Annotations, TextContent
7
+ from pydantic import Field
8
+
9
+
10
+ async def run_python_code_in_sandbox(
11
+ code: Annotated[str, Field(description="The python code to run.")],
12
+ requirements: Annotated[list[str] | None, Field(description="The requirements to install.")] = None,
13
+ python_version: Annotated[str, Field(description="The python version to use. If not provided defaults to 3.13")] = "3.13",
14
+ mount_directory: Annotated[
15
+ str | None,
16
+ Field(
17
+ description="Allows you to make a local directory available at `/mounted-dir` for the code in `code`. Needs to be an absolute path. "
18
+ "Writes to this directory will NOT be reflected in the local directory."
19
+ ),
20
+ ] = None,
21
+ pull_files: Annotated[
22
+ list[tuple[str, str]] | None,
23
+ Field(
24
+ description="List of tuples (absolut_path_sandbox_file, absolute_path_local_file). "
25
+ "When provided downloads the file(s) from the sandbox to the local file(s)."
26
+ ),
27
+ ] = None,
28
+ ) -> TextContent:
29
+ """
30
+ Runs python code in a safe environment and returns the output.
31
+
32
+ Usage:
33
+ run_python_code_in_sandbox("print('Hello, world!')")
34
+ run_python_code_in_sandbox("import requests\nprint(requests.get('https://icanhazip.com').text)", requirements=["requests"])
35
+ """
36
+
37
+ app = modal.App.lookup("mcp-toolbox--code", create_if_missing=True)
38
+ image = modal.Image.debian_slim(python_version=python_version).pip_install(requirements or [])
39
+
40
+ mounts: list[modal.Mount] = []
41
+ if mount_directory:
42
+ mounts.append(modal.Mount.from_local_dir(mount_directory, remote_path="/mounted-dir"))
43
+
44
+ sb = modal.Sandbox.create(image=image, app=app, mounts=mounts)
45
+ try:
46
+ exc = sb.exec("python", "-c", code)
47
+ exc.wait()
48
+ if exc.returncode != 0:
49
+ stderr = exc.stderr.read()
50
+ raise McpError(
51
+ ErrorData(
52
+ code=INVALID_PARAMS,
53
+ message=f"Error running code:\n{stderr}",
54
+ )
55
+ )
56
+
57
+ if pull_files:
58
+ for remote_file, local_file in pull_files:
59
+ if not Path(local_file).parent.exists():
60
+ Path(local_file).parent.mkdir(parents=True, exist_ok=True)
61
+
62
+ if Path(local_file).exists():
63
+ raise McpError(
64
+ ErrorData(
65
+ code=INVALID_PARAMS,
66
+ message=f"File {local_file} already exists.",
67
+ )
68
+ )
69
+ with sb.open(remote_file, "rb") as f:
70
+ with open(local_file, "wb") as f2:
71
+ f2.write(f.read())
72
+ return TextContent(type="text", text=exc.stdout.read(), annotations=Annotations(audience=["user", "assistant"], priority=0.5))
73
+
74
+ finally:
75
+ sb.terminate()
@@ -0,0 +1,168 @@
1
+ # based on https://github.com/modal-labs/modal-examples/blob/main/06_gpu_and_ml/stable_diffusion/flux.py
2
+ import logging
3
+ from importlib.metadata import PackageNotFoundError, version
4
+ from io import BytesIO
5
+ from typing import Annotated
6
+
7
+ import modal
8
+ from mcp.server.fastmcp import Context, Image
9
+ from mcp.types import Annotations, ImageContent
10
+ from modal.exception import NotFoundError
11
+ from modal.runner import deploy_app
12
+ from pydantic import Field
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+ MINUTES = 60 # seconds
17
+ VARIANT = "schnell" # or "dev", but note [dev] requires you to accept terms and conditions on HF
18
+ NUM_INFERENCE_STEPS = 4 # use ~50 for [dev], smaller for [schnell]
19
+ IMAGE_FORMAT = "JPEG"
20
+
21
+
22
+ cuda_version = "12.4.0" # should be no greater than host CUDA version
23
+ flavor = "devel" # includes full CUDA toolkit
24
+ operating_sys = "ubuntu22.04"
25
+ tag = f"{cuda_version}-{flavor}-{operating_sys}"
26
+
27
+ cuda_dev_image = modal.Image.from_registry(f"nvidia/cuda:{tag}", add_python="3.11").entrypoint([])
28
+ diffusers_commit_sha = "81cf3b2f155f1de322079af28f625349ee21ec6b"
29
+
30
+ flux_image = (
31
+ cuda_dev_image.apt_install(
32
+ "git",
33
+ "libglib2.0-0",
34
+ "libsm6",
35
+ "libxrender1",
36
+ "libxext6",
37
+ "ffmpeg",
38
+ "libgl1",
39
+ )
40
+ .pip_install(
41
+ "invisible_watermark==0.2.0",
42
+ "transformers==4.44.0",
43
+ "huggingface_hub[hf_transfer]==0.26.2",
44
+ "accelerate==0.33.0",
45
+ "safetensors==0.4.4",
46
+ "sentencepiece==0.2.0",
47
+ "torch==2.5.0",
48
+ f"git+https://github.com/huggingface/diffusers.git@{diffusers_commit_sha}",
49
+ "numpy<2",
50
+ # This is a bit of a hack to ensure that the the version modal-mcp-toolbox is the same as the local version.
51
+ # -- not really ideal
52
+ f"iflow-mcp_modal-mcp-toolbox=={version('iflow-mcp_modal-mcp-toolbox')}",
53
+ )
54
+ .env({"HF_HUB_ENABLE_HF_TRANSFER": "1", "HF_HUB_CACHE": "/cache"})
55
+ )
56
+
57
+
58
+ flux_image = flux_image.env(
59
+ {
60
+ "TORCHINDUCTOR_CACHE_DIR": "/root/.inductor-cache",
61
+ "TORCHINDUCTOR_FX_GRAPH_CACHE": "1",
62
+ }
63
+ )
64
+
65
+
66
+ app_name = "mcp-toolbox--flux"
67
+ app = modal.App(app_name, image=flux_image)
68
+
69
+ with flux_image.imports():
70
+ import torch
71
+ from diffusers import FluxPipeline
72
+
73
+
74
+ @app.cls(
75
+ gpu="L40S",
76
+ scaledown_window=5 * MINUTES,
77
+ image=flux_image,
78
+ volumes={
79
+ "/cache": modal.Volume.from_name("hf-hub-cache", create_if_missing=True),
80
+ },
81
+ enable_memory_snapshot=True,
82
+ )
83
+ class Model:
84
+ @modal.enter(snap=True)
85
+ def load(self):
86
+ print("🔄 loading model...")
87
+ pipe = FluxPipeline.from_pretrained(f"black-forest-labs/FLUX.1-{VARIANT}", torch_dtype=torch.bfloat16)
88
+ self.pipe = _optimize(pipe)
89
+
90
+ @modal.enter(snap=False)
91
+ def setup(self):
92
+ print("🔄 moving model to GPU...")
93
+ self.pipe = self.pipe.to("cuda")
94
+
95
+ @modal.method()
96
+ def inference(self, prompt: str) -> bytes:
97
+ print("🎨 generating image...")
98
+ out = self.pipe(
99
+ prompt,
100
+ output_type="pil",
101
+ num_inference_steps=NUM_INFERENCE_STEPS,
102
+ ).images[0]
103
+
104
+ byte_stream = BytesIO()
105
+ out.save(byte_stream, format=IMAGE_FORMAT)
106
+ return byte_stream.getvalue()
107
+
108
+
109
+ @app.function(
110
+ # This is a bit of a hack to ensure that the the version modal-mcp-toolbox is the same as the local version.
111
+ # -- not really ideal
112
+ image=modal.Image.debian_slim().pip_install(f"iflow-mcp_modal-mcp-toolbox=={version('iflow-mcp_modal-mcp-toolbox')}"),
113
+ scaledown_window=5 * MINUTES,
114
+ )
115
+ def get_version():
116
+ try:
117
+ print("modal_mcp_toolbox version:", version("modal_mcp_toolbox"))
118
+ return version("modal_mcp_toolbox")
119
+ except PackageNotFoundError:
120
+ print("modal_mcp_toolbox version: unknown")
121
+ return "unknown"
122
+
123
+
124
+ def _optimize(pipe):
125
+ # fuse QKV projections in Transformer and VAE
126
+ pipe.transformer.fuse_qkv_projections()
127
+ pipe.vae.fuse_qkv_projections()
128
+
129
+ # switch memory layout to Torch's preferred, channels_last
130
+ pipe.transformer.to(memory_format=torch.channels_last)
131
+ pipe.vae.to(memory_format=torch.channels_last)
132
+
133
+ return pipe
134
+
135
+
136
+ async def _ensure_app_deployment_is_up_to_date(ctx: Context):
137
+ try:
138
+ fn = modal.Function.from_name(app_name, "get_version")
139
+ remote_version = await fn.remote.aio()
140
+
141
+ if remote_version != version("iflow-mcp_modal-mcp-toolbox"):
142
+ await ctx.info("App is out of date. Deploying ...")
143
+ logger.info("App is out of date. Deploying ...")
144
+ deploy_app(app)
145
+ except NotFoundError:
146
+ await ctx.info("App not found. Deploying...")
147
+ logger.info("App not found. Deploying...")
148
+ deploy_app(app)
149
+
150
+
151
+ async def generate_flux_image(prompt: Annotated[str, Field(description="The prompt to generate an image for")], ctx: Context) -> ImageContent:
152
+ """Let's you generate an image using the Flux model."""
153
+ await _ensure_app_deployment_is_up_to_date(ctx)
154
+
155
+ cls = modal.Cls.from_name(app_name, Model.__name__)
156
+ image_bytes = await cls().inference.remote.aio(prompt)
157
+ image_content = Image(data=image_bytes, format=IMAGE_FORMAT).to_image_content()
158
+ image_content.annotations = Annotations(audience=["user", "assistant"], priority=0.5)
159
+ return image_content
160
+
161
+
162
+ if __name__ == "__main__":
163
+ deploy_app(app)
164
+
165
+
166
+ @app.local_entrypoint()
167
+ async def main():
168
+ print(await get_version.remote.aio())