qiskit-mcp-server 0.1.0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/PKG-INFO +62 -4
  2. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/README.md +60 -2
  3. qiskit_mcp_server-0.1.1/examples/README.md +295 -0
  4. qiskit_mcp_server-0.1.1/examples/langchain_agent.ipynb +1084 -0
  5. qiskit_mcp_server-0.1.1/examples/langchain_agent.py +375 -0
  6. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/pyproject.toml +8 -2
  7. qiskit_mcp_server-0.1.1/server.json +25 -0
  8. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/.env.example +0 -0
  9. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/.gitignore +0 -0
  10. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/LICENSE +0 -0
  11. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/run_tests.sh +0 -0
  12. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/src/qiskit_mcp_server/__init__.py +0 -0
  13. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/src/qiskit_mcp_server/circuit_serialization.py +0 -0
  14. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/src/qiskit_mcp_server/py.typed +0 -0
  15. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/src/qiskit_mcp_server/server.py +0 -0
  16. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/src/qiskit_mcp_server/transpiler.py +0 -0
  17. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/src/qiskit_mcp_server/utils.py +0 -0
  18. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/tests/__init__.py +0 -0
  19. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/tests/conftest.py +0 -0
  20. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/tests/test_circuit_serialization.py +0 -0
  21. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/tests/test_transpiler.py +0 -0
  22. {qiskit_mcp_server-0.1.0 → qiskit_mcp_server-0.1.1}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qiskit-mcp-server
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: MCP server for Qiskit quantum computing capabilities with circuit serialization utilities
5
5
  Project-URL: Homepage, https://github.com/Qiskit/mcp-servers
6
6
  Project-URL: Repository, https://github.com/Qiskit/mcp-servers
@@ -10,7 +10,7 @@ Author-email: "Quantum+AI Team. IBM Quantum" <Quantum.Plus.AI@ibm.com>
10
10
  License: Apache-2.0
11
11
  License-File: LICENSE
12
12
  Requires-Python: <3.15,>=3.10
13
- Requires-Dist: fastmcp>=2.8.1
13
+ Requires-Dist: fastmcp<3,>=2.8.1
14
14
  Requires-Dist: nest-asyncio>=1.5.0
15
15
  Requires-Dist: pydantic>=2.0.0
16
16
  Requires-Dist: python-dotenv>=1.0.0
@@ -39,6 +39,8 @@ Description-Content-Type: text/markdown
39
39
 
40
40
  # Qiskit MCP Server
41
41
 
42
+ <!-- mcp-name: io.github.Qiskit/qiskit-mcp-server -->
43
+
42
44
  A Model Context Protocol (MCP) server that provides quantum circuit transpilation capabilities using Qiskit's pass managers. This server enables AI assistants to optimize quantum circuits for various hardware targets.
43
45
 
44
46
  ## Features
@@ -48,7 +50,7 @@ A Model Context Protocol (MCP) server that provides quantum circuit transpilatio
48
50
  - **Topology Support**: Built-in support for linear, ring, grid, and custom coupling maps
49
51
  - **Circuit Analysis**: Analyze circuit complexity without transpilation
50
52
  - **Optimization Comparison**: Compare results across all optimization levels
51
- - **Dual API**: Supports both async (MCP) and sync (DSPy, Jupyter, scripts) usage
53
+ - **Dual API**: Supports both async (MCP) and sync (Jupyter, scripts) usage
52
54
 
53
55
  ## Prerequisites
54
56
 
@@ -163,7 +165,7 @@ transpiled_qpy = result["transpiled_circuit"]["circuit_qpy"]
163
165
  result2 = await transpile_circuit(transpiled_qpy, circuit_format="qpy", optimization_level=3)
164
166
  ```
165
167
 
166
- ### Sync Usage (DSPy, Jupyter, Scripts)
168
+ ### Sync Usage (Scripts, Jupyter)
167
169
 
168
170
  ```python
169
171
  from qiskit_mcp_server.transpiler import transpile_circuit, analyze_circuit
@@ -190,6 +192,62 @@ for level in range(4):
190
192
  print(f"Level {level}: depth={result['depth']}, size={result['size']}")
191
193
  ```
192
194
 
195
+ **LangChain Integration Example:**
196
+
197
+ > **Note:** To run LangChain examples you will need to install the dependencies:
198
+ > ```bash
199
+ > pip install langchain langchain-mcp-adapters langchain-openai python-dotenv
200
+ > ```
201
+
202
+ ```python
203
+ import asyncio
204
+ import os
205
+ from langchain.agents import create_agent
206
+ from langchain_mcp_adapters.client import MultiServerMCPClient
207
+ from langchain_mcp_adapters.tools import load_mcp_tools
208
+ from langchain_openai import ChatOpenAI
209
+ from dotenv import load_dotenv
210
+
211
+ # Load environment variables (OPENAI_API_KEY, etc.)
212
+ load_dotenv()
213
+
214
+ # Sample Bell state circuit
215
+ SAMPLE_BELL = """
216
+ OPENQASM 3.0;
217
+ include "stdgates.inc";
218
+ qubit[2] q;
219
+ h q[0];
220
+ cx q[0], q[1];
221
+ """
222
+
223
+ async def main():
224
+ # Configure MCP client
225
+ mcp_client = MultiServerMCPClient({
226
+ "qiskit": {
227
+ "transport": "stdio",
228
+ "command": "qiskit-mcp-server",
229
+ "args": [],
230
+ "env": {},
231
+ }
232
+ })
233
+
234
+ # Use persistent session for efficient tool calls
235
+ async with mcp_client.session("qiskit") as session:
236
+ tools = await load_mcp_tools(session)
237
+
238
+ # Create agent with LLM
239
+ llm = ChatOpenAI(model="gpt-5.2", temperature=0)
240
+ agent = create_agent(llm, tools)
241
+
242
+ # Run a query
243
+ response = await agent.ainvoke(f"Transpile this circuit for IBM Heron: {SAMPLE_BELL}")
244
+ print(response)
245
+
246
+ asyncio.run(main())
247
+ ```
248
+
249
+ For more LLM providers (Anthropic, Google, Ollama, Watsonx) and detailed examples including Jupyter notebooks, see the [examples/](examples/) directory.
250
+
193
251
  ## API Reference
194
252
 
195
253
  ### Tools
@@ -1,5 +1,7 @@
1
1
  # Qiskit MCP Server
2
2
 
3
+ <!-- mcp-name: io.github.Qiskit/qiskit-mcp-server -->
4
+
3
5
  A Model Context Protocol (MCP) server that provides quantum circuit transpilation capabilities using Qiskit's pass managers. This server enables AI assistants to optimize quantum circuits for various hardware targets.
4
6
 
5
7
  ## Features
@@ -9,7 +11,7 @@ A Model Context Protocol (MCP) server that provides quantum circuit transpilatio
9
11
  - **Topology Support**: Built-in support for linear, ring, grid, and custom coupling maps
10
12
  - **Circuit Analysis**: Analyze circuit complexity without transpilation
11
13
  - **Optimization Comparison**: Compare results across all optimization levels
12
- - **Dual API**: Supports both async (MCP) and sync (DSPy, Jupyter, scripts) usage
14
+ - **Dual API**: Supports both async (MCP) and sync (Jupyter, scripts) usage
13
15
 
14
16
  ## Prerequisites
15
17
 
@@ -124,7 +126,7 @@ transpiled_qpy = result["transpiled_circuit"]["circuit_qpy"]
124
126
  result2 = await transpile_circuit(transpiled_qpy, circuit_format="qpy", optimization_level=3)
125
127
  ```
126
128
 
127
- ### Sync Usage (DSPy, Jupyter, Scripts)
129
+ ### Sync Usage (Scripts, Jupyter)
128
130
 
129
131
  ```python
130
132
  from qiskit_mcp_server.transpiler import transpile_circuit, analyze_circuit
@@ -151,6 +153,62 @@ for level in range(4):
151
153
  print(f"Level {level}: depth={result['depth']}, size={result['size']}")
152
154
  ```
153
155
 
156
+ **LangChain Integration Example:**
157
+
158
+ > **Note:** To run LangChain examples you will need to install the dependencies:
159
+ > ```bash
160
+ > pip install langchain langchain-mcp-adapters langchain-openai python-dotenv
161
+ > ```
162
+
163
+ ```python
164
+ import asyncio
165
+ import os
166
+ from langchain.agents import create_agent
167
+ from langchain_mcp_adapters.client import MultiServerMCPClient
168
+ from langchain_mcp_adapters.tools import load_mcp_tools
169
+ from langchain_openai import ChatOpenAI
170
+ from dotenv import load_dotenv
171
+
172
+ # Load environment variables (OPENAI_API_KEY, etc.)
173
+ load_dotenv()
174
+
175
+ # Sample Bell state circuit
176
+ SAMPLE_BELL = """
177
+ OPENQASM 3.0;
178
+ include "stdgates.inc";
179
+ qubit[2] q;
180
+ h q[0];
181
+ cx q[0], q[1];
182
+ """
183
+
184
+ async def main():
185
+ # Configure MCP client
186
+ mcp_client = MultiServerMCPClient({
187
+ "qiskit": {
188
+ "transport": "stdio",
189
+ "command": "qiskit-mcp-server",
190
+ "args": [],
191
+ "env": {},
192
+ }
193
+ })
194
+
195
+ # Use persistent session for efficient tool calls
196
+ async with mcp_client.session("qiskit") as session:
197
+ tools = await load_mcp_tools(session)
198
+
199
+ # Create agent with LLM
200
+ llm = ChatOpenAI(model="gpt-5.2", temperature=0)
201
+ agent = create_agent(llm, tools)
202
+
203
+ # Run a query
204
+ response = await agent.ainvoke(f"Transpile this circuit for IBM Heron: {SAMPLE_BELL}")
205
+ print(response)
206
+
207
+ asyncio.run(main())
208
+ ```
209
+
210
+ For more LLM providers (Anthropic, Google, Ollama, Watsonx) and detailed examples including Jupyter notebooks, see the [examples/](examples/) directory.
211
+
154
212
  ## API Reference
155
213
 
156
214
  ### Tools
@@ -0,0 +1,295 @@
1
+ # Qiskit MCP Server Examples
2
+
3
+ This directory contains examples demonstrating how to build AI agents that interact with Qiskit's transpiler through the **qiskit-mcp-server**.
4
+
5
+ ## Available Examples
6
+
7
+ | File | Description |
8
+ |------|-------------|
9
+ | [`langchain_agent.ipynb`](langchain_agent.ipynb) | **Jupyter Notebook** - Interactive tutorial with step-by-step examples |
10
+ | [`langchain_agent.py`](langchain_agent.py) | **Python Script** - Command-line agent with multiple LLM provider support |
11
+
12
+ ## LangChain Agent Example
13
+
14
+ The examples show how to create an AI agent using LangChain that connects to the qiskit-mcp-server via the Model Context Protocol (MCP).
15
+
16
+ ### Quick Start with Jupyter Notebook
17
+
18
+ For an interactive experience, open the notebook:
19
+
20
+ ```bash
21
+ jupyter notebook langchain_agent.ipynb
22
+ ```
23
+
24
+ The notebook includes:
25
+ - Step-by-step setup instructions
26
+ - Multiple LLM provider options (just run the cell for your provider)
27
+ - Sample QASM circuits for testing
28
+ - Interactive examples for transpilation and analysis
29
+ - A custom query cell for your own circuits
30
+
31
+ ### Features
32
+
33
+ The agent can:
34
+
35
+ - Transpile quantum circuits with configurable optimization levels (0-3)
36
+ - Analyze circuit structure and complexity
37
+ - Compare optimization levels to find the best settings
38
+ - Target different hardware backends (IBM Eagle, Heron, etc.)
39
+ - Apply different topologies (linear, ring, grid, heavy_hex, full)
40
+
41
+ ### Supported LLM Providers
42
+
43
+ | Provider | Package | Default Model | API Key Required |
44
+ |----------|---------|---------------|------------------|
45
+ | OpenAI | `langchain-openai` | gpt-5.2 | Yes (`OPENAI_API_KEY`) |
46
+ | Anthropic | `langchain-anthropic` | claude-sonnet-4-5-20250929 | Yes (`ANTHROPIC_API_KEY`) |
47
+ | Google | `langchain-google-genai` | gemini-3-pro-preview | Yes (`GOOGLE_API_KEY`) |
48
+ | Ollama | `langchain-ollama` | llama3.3 | No (runs locally) |
49
+ | Watsonx | `langchain-ibm` | ibm/granite-4-h-small | Yes (`WATSONX_APIKEY`, `WATSONX_PROJECT_ID`) |
50
+
51
+ ### Architecture
52
+
53
+ ```
54
+ ┌─────────────┐ MCP Protocol ┌──────────────────────────┐
55
+ │ LangChain │ ◄──────────────────► │ qiskit-mcp-server │
56
+ │ Agent │ │ │
57
+ └─────────────┘ │ ┌────────────────────┐ │
58
+ │ │ Qiskit Transpiler │ │
59
+ │ └────────────────────┘ │
60
+ └──────────────────────────┘
61
+ ```
62
+
63
+ ### Prerequisites
64
+
65
+ 1. **Python 3.10+**
66
+
67
+ 2. **Install the MCP server:**
68
+
69
+ ```bash
70
+ pip install qiskit-mcp-server
71
+ ```
72
+
73
+ 3. **Install LangChain dependencies:**
74
+
75
+ ```bash
76
+ # Core dependencies
77
+ pip install langchain langchain-mcp-adapters python-dotenv
78
+
79
+ # Install at least ONE of the following based on your preferred LLM provider(s):
80
+ pip install langchain-openai # For OpenAI
81
+ pip install langchain-anthropic # For Anthropic Claude
82
+ pip install langchain-google-genai # For Google Gemini
83
+ pip install langchain-ollama # For local Ollama
84
+ pip install langchain-ibm # For IBM Watsonx
85
+ ```
86
+
87
+ 4. **Set up environment variables:**
88
+
89
+ ```bash
90
+ # LLM API key (depends on provider)
91
+ export OPENAI_API_KEY="your-openai-api-key" # For OpenAI
92
+ export ANTHROPIC_API_KEY="your-anthropic-api-key" # For Anthropic
93
+ export GOOGLE_API_KEY="your-google-api-key" # For Google
94
+ # No API key needed for Ollama (runs locally)
95
+
96
+ # For Watsonx
97
+ export WATSONX_APIKEY="your-watsonx-api-key"
98
+ export WATSONX_PROJECT_ID="your-project-id"
99
+ export WATSONX_URL="https://us-south.ml.cloud.ibm.com" # Optional, this is the default
100
+ ```
101
+
102
+ Or create a `.env` file:
103
+
104
+ ```env
105
+ OPENAI_API_KEY=your-openai-api-key
106
+
107
+ # For Watsonx
108
+ WATSONX_APIKEY=your-watsonx-api-key
109
+ WATSONX_PROJECT_ID=your-project-id
110
+ ```
111
+
112
+ **Note:** This server doesn't require IBM Quantum credentials - it uses local Qiskit transpilation.
113
+
114
+ ### Running the Example
115
+
116
+ **Interactive mode with OpenAI (default):**
117
+
118
+ ```bash
119
+ cd examples
120
+ python langchain_agent.py
121
+ ```
122
+
123
+ **With Anthropic Claude:**
124
+
125
+ ```bash
126
+ python langchain_agent.py --provider anthropic
127
+ ```
128
+
129
+ **With Google Gemini:**
130
+
131
+ ```bash
132
+ python langchain_agent.py --provider google
133
+ ```
134
+
135
+ **With local Ollama (no API key needed):**
136
+
137
+ ```bash
138
+ # First, make sure Ollama is running with a model pulled
139
+ # ollama pull llama3.3
140
+ python langchain_agent.py --provider ollama --model llama3.3
141
+ ```
142
+
143
+ **With IBM Watsonx:**
144
+
145
+ ```bash
146
+ python langchain_agent.py --provider watsonx
147
+ # Or with a specific model
148
+ python langchain_agent.py --provider watsonx --model ibm/granite-4-h-small
149
+ ```
150
+
151
+ **Single query mode:**
152
+
153
+ ```bash
154
+ python langchain_agent.py --single
155
+ python langchain_agent.py --provider anthropic --single
156
+ ```
157
+
158
+ **Custom model:**
159
+
160
+ ```bash
161
+ python langchain_agent.py --provider openai --model gpt-4-turbo
162
+ python langchain_agent.py --provider anthropic --model claude-3-haiku-20240307
163
+ ```
164
+
165
+ ### Example Interactions
166
+
167
+ Once running, you can ask the agent questions like:
168
+
169
+ - "Transpile my bell circuit for IBM Heron"
170
+ - "Analyze my ghz circuit"
171
+ - "Compare optimization levels for my qft circuit"
172
+ - "Transpile this circuit with linear topology"
173
+ - "What's the depth of this circuit after transpilation?"
174
+
175
+ The agent comes with sample circuits built-in:
176
+ - `bell`: 2-qubit Bell state circuit
177
+ - `ghz`: 4-qubit GHZ state circuit
178
+ - `qft`: 3-qubit QFT circuit
179
+
180
+ ### Available MCP Tools
181
+
182
+ The agent has access to these tools provided by the MCP server:
183
+
184
+ | Tool | Description |
185
+ |------|-------------|
186
+ | `transpile_circuit_tool` | Transpile a circuit with configurable optimization |
187
+ | `analyze_circuit_tool` | Analyze circuit structure without transpiling |
188
+ | `compare_optimization_levels_tool` | Compare all optimization levels (0-3) |
189
+
190
+ ### Basis Gate Presets
191
+
192
+ | Preset | Gates | Description |
193
+ |--------|-------|-------------|
194
+ | `ibm_eagle` | id, rz, sx, x, ecr, reset | IBM Eagle r3 (127 qubits, uses ECR) |
195
+ | `ibm_heron` | id, rz, sx, x, cz, reset | IBM Heron (133-156 qubits, uses CZ) |
196
+ | `ibm_legacy` | id, rz, sx, x, cx, reset | Older IBM systems (uses CX) |
197
+
198
+ ### Available Topologies
199
+
200
+ | Topology | Description |
201
+ |----------|-------------|
202
+ | `linear` | Chain connectivity (qubit i ↔ i+1) |
203
+ | `ring` | Linear with wraparound |
204
+ | `grid` | 2D grid connectivity |
205
+ | `heavy_hex` | IBM heavy-hex topology |
206
+ | `full` | All-to-all connectivity |
207
+
208
+ ### Optimization Levels
209
+
210
+ | Level | Description | Use Case |
211
+ |-------|-------------|----------|
212
+ | 0 | No optimization, only basis gate decomposition | Quick iterations, debugging |
213
+ | 1 | Light optimization with default layout | Development, prototyping |
214
+ | 2 | Medium optimization with noise-aware layout | Production use (recommended) |
215
+ | 3 | Heavy optimization for best results | Critical applications, small circuits |
216
+
217
+ ### Using as a Library
218
+
219
+ You can import and use the agent in your own async code:
220
+
221
+ ```python
222
+ import asyncio
223
+ from langchain_agent import (
224
+ get_mcp_client,
225
+ create_transpiler_agent_with_session,
226
+ run_agent_query,
227
+ SAMPLE_BELL_STATE,
228
+ )
229
+
230
+ async def main():
231
+ # Use persistent session for efficient tool calls
232
+ mcp_client = get_mcp_client()
233
+ async with mcp_client.session("qiskit") as session:
234
+ agent = await create_transpiler_agent_with_session(session, provider="openai")
235
+
236
+ # Run queries
237
+ query = f"Transpile this circuit for IBM Heron:\n{SAMPLE_BELL_STATE}"
238
+ response = await run_agent_query(agent, query)
239
+ print(response)
240
+
241
+ asyncio.run(main())
242
+ ```
243
+
244
+ ### Customizing the Agent
245
+
246
+ You can modify the system prompt or use a different LLM by creating your own agent setup:
247
+
248
+ ```python
249
+ import asyncio
250
+ from langchain.agents import create_agent
251
+ from langchain_mcp_adapters.client import MultiServerMCPClient
252
+ from langchain_mcp_adapters.tools import load_mcp_tools
253
+ from langchain_openai import ChatOpenAI
254
+
255
+ async def create_custom_agent():
256
+ # Configure MCP client
257
+ mcp_client = MultiServerMCPClient({
258
+ "qiskit": {
259
+ "transport": "stdio",
260
+ "command": "qiskit-mcp-server",
261
+ "args": [],
262
+ "env": {},
263
+ }
264
+ })
265
+
266
+ # Use persistent session for efficient tool calls
267
+ async with mcp_client.session("qiskit") as session:
268
+ tools = await load_mcp_tools(session)
269
+
270
+ # Custom system prompt
271
+ system_prompt = "You are a quantum circuit optimization expert..."
272
+
273
+ llm = ChatOpenAI(model="gpt-5.2", temperature=0)
274
+ agent = create_agent(llm, tools, system_prompt=system_prompt)
275
+
276
+ # Use the agent within the session context
277
+ # ... your agent logic here ...
278
+
279
+ asyncio.run(create_custom_agent())
280
+ ```
281
+
282
+ ### Troubleshooting
283
+
284
+ **"MCP server not found"**
285
+ - Ensure `qiskit-mcp-server` is installed and available in your PATH
286
+ - Try running `qiskit-mcp-server` directly to verify installation
287
+
288
+ **"Invalid QASM"**
289
+ - Ensure your QASM circuit is valid QASM 3.0 or QASM 2.0 syntax
290
+ - Include the `include "stdgates.inc";` line for standard gates
291
+
292
+ **"Transpilation slow"**
293
+ - Use optimization level 2 instead of 3 for larger circuits
294
+ - Level 3 can be very slow for circuits with >20 qubits or >500 gates
295
+ - Use the `compare_optimization_levels` tool to find the best level for your circuit