iflow-mcp_amansingh0311-mcp-qdrant-openai 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,164 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Qdrant MCP Server - Provides vector search capabilities using OpenAI embeddings
4
+ """
5
+
6
+ import os
7
+ import json
8
+ from typing import Dict, List, Optional, Any
9
+ from dataclasses import dataclass
10
+
11
+ # Load environment variables from .env file
12
+ try:
13
+ from dotenv import load_dotenv
14
+
15
+ load_dotenv()
16
+ except ImportError:
17
+ print(
18
+ "Warning: python-dotenv not installed. Environment variables must be set manually."
19
+ )
20
+
21
+ import openai
22
+ from qdrant_client import QdrantClient
23
+ from qdrant_client.http.models import Filter, FieldCondition, MatchValue
24
+
25
+ from mcp.server.fastmcp import FastMCP, Context
26
+
27
+
28
+ # Initialize Qdrant client
29
+ def get_qdrant_client() -> QdrantClient:
30
+ """Get Qdrant client from environment variables or defaults"""
31
+ url = os.getenv("QDRANT_URL", "http://localhost:6333")
32
+ api_key = os.getenv("QDRANT_API_KEY", None)
33
+
34
+ if url.startswith("http"):
35
+ return QdrantClient(url=url, api_key=api_key)
36
+ else:
37
+ # For local file-based storage
38
+ return QdrantClient(path=url)
39
+
40
+
41
+ # Function to create embeddings
42
+ def get_embedding(text: str, model: str = "text-embedding-3-small") -> List[float]:
43
+ """Get embeddings from OpenAI API"""
44
+ client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
45
+
46
+ response = client.embeddings.create(model=model, input=text)
47
+
48
+ return response.data[0].embedding
49
+
50
+
51
+ # Create MCP Server
52
+ mcp = FastMCP("Qdrant-OpenAI")
53
+
54
+
55
+ @dataclass
56
+ class QdrantContext:
57
+ client: QdrantClient
58
+
59
+
60
+ @mcp.tool()
61
+ async def query_collection(
62
+ collection_name: str,
63
+ query_text: str,
64
+ limit: int = 5,
65
+ model: str = "text-embedding-3-small",
66
+ ) -> str:
67
+ """
68
+ Search a Qdrant collection using semantic search with OpenAI embeddings.
69
+
70
+ Args:
71
+ collection_name: Name of the Qdrant collection to search
72
+ query_text: The search query in natural language
73
+ limit: Maximum number of results to return (default: 5)
74
+ model: OpenAI embedding model to use (default: text-embedding-3-small)
75
+
76
+ Returns:
77
+ JSON string containing search results
78
+ """
79
+ # Get Qdrant client from context
80
+ client = get_qdrant_client()
81
+
82
+ # Generate embedding for query
83
+ query_vector = get_embedding(query_text, model)
84
+
85
+ # Search Qdrant
86
+ try:
87
+ search_result = client.search(
88
+ collection_name=collection_name,
89
+ query_vector=query_vector,
90
+ limit=limit,
91
+ )
92
+
93
+ # Format results
94
+ results = []
95
+ for scored_point in search_result:
96
+ result = {
97
+ "id": scored_point.id,
98
+ "score": scored_point.score,
99
+ "payload": scored_point.payload,
100
+ }
101
+ results.append(result)
102
+
103
+ return json.dumps({"results": results}, indent=2)
104
+
105
+ except Exception as e:
106
+ return json.dumps({"error": str(e)})
107
+
108
+
109
+ @mcp.tool()
110
+ async def list_collections() -> str:
111
+ """
112
+ List all available collections in the Qdrant database.
113
+
114
+ Returns:
115
+ JSON string containing the list of collections
116
+ """
117
+ client = get_qdrant_client()
118
+
119
+ try:
120
+ collections = client.get_collections()
121
+ return json.dumps(
122
+ {"collections": [c.name for c in collections.collections]}, indent=2
123
+ )
124
+ except Exception as e:
125
+ return json.dumps({"error": str(e)})
126
+
127
+
128
+ @mcp.tool()
129
+ async def collection_info(collection_name: str) -> str:
130
+ """
131
+ Get information about a specific collection.
132
+
133
+ Args:
134
+ collection_name: Name of the collection
135
+
136
+ Returns:
137
+ JSON string containing collection information
138
+ """
139
+ client = get_qdrant_client()
140
+
141
+ try:
142
+ collection_info = client.get_collection(collection_name)
143
+ return json.dumps(
144
+ {
145
+
146
+ "vectors_count": collection_info.vectors_count,
147
+ "points_count": collection_info.points_count,
148
+ "dimension": collection_info.config.params.vectors.size,
149
+ "distance": collection_info.config.params.vectors.distance,
150
+ },
151
+ indent=2,
152
+ )
153
+ except Exception as e:
154
+ return json.dumps({"error": str(e)})
155
+
156
+
157
+ def main():
158
+ """Main entry point for the MCP server"""
159
+ mcp.run(transport="stdio")
160
+
161
+
162
+ if __name__ == "__main__":
163
+ # Run the server
164
+ main()
@@ -0,0 +1,116 @@
1
+ Metadata-Version: 2.4
2
+ Name: iflow-mcp_amansingh0311-mcp-qdrant-openai
3
+ Version: 0.1.0
4
+ Summary: MCP Qdrant Server with OpenAI Embeddings
5
+ Requires-Python: >=3.10
6
+ Requires-Dist: mcp>=1.2.0
7
+ Requires-Dist: openai>=1.0.0
8
+ Requires-Dist: python-dotenv>=1.0.0
9
+ Requires-Dist: qdrant-client>=1.6.0
10
+ Provides-Extra: cli
11
+ Requires-Dist: mcp[cli]; extra == 'cli'
12
+ Description-Content-Type: text/markdown
13
+
14
+ [![MseeP.ai Security Assessment Badge](https://mseep.net/pr/amansingh0311-mcp-qdrant-openai-badge.png)](https://mseep.ai/app/amansingh0311-mcp-qdrant-openai)
15
+
16
+ # MCP Qdrant Server with OpenAI Embeddings
17
+
18
+ [![smithery badge](https://smithery.ai/badge/@amansingh0311/mcp-qdrant-openai)](https://smithery.ai/server/@amansingh0311/mcp-qdrant-openai)
19
+
20
+ This MCP server provides vector search capabilities using Qdrant vector database and OpenAI embeddings.
21
+
22
+ ## Features
23
+
24
+ - Semantic search in Qdrant collections using OpenAI embeddings
25
+ - List available collections
26
+ - View collection information
27
+
28
+ ## Prerequisites
29
+
30
+ - Python 3.10+ installed
31
+ - Qdrant instance (local or remote)
32
+ - OpenAI API key
33
+
34
+ ## Installation
35
+
36
+ ### Installing via Smithery
37
+
38
+ To install Qdrant Vector Search Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@amansingh0311/mcp-qdrant-openai):
39
+
40
+ ```bash
41
+ npx -y @smithery/cli install @amansingh0311/mcp-qdrant-openai --client claude
42
+ ```
43
+
44
+ ### Manual Installation
45
+ 1. Clone this repository:
46
+
47
+ ```bash
48
+ git clone https://github.com/yourusername/mcp-qdrant-openai.git
49
+ cd mcp-qdrant-openai
50
+ ```
51
+
52
+ 2. Install dependencies:
53
+ ```bash
54
+ pip install -r requirements.txt
55
+ ```
56
+
57
+ ## Configuration
58
+
59
+ Set the following environment variables:
60
+
61
+ - `OPENAI_API_KEY`: Your OpenAI API key
62
+ - `QDRANT_URL`: URL to your Qdrant instance (default: "http://localhost:6333")
63
+ - `QDRANT_API_KEY`: Your Qdrant API key (if applicable)
64
+
65
+ ## Usage
66
+
67
+ ### Run the server directly
68
+
69
+ ```bash
70
+ python mcp_qdrant_server.py
71
+ ```
72
+
73
+ ### Run with MCP CLI
74
+
75
+ ```bash
76
+ mcp dev mcp_qdrant_server.py
77
+ ```
78
+
79
+ ### Installing in Claude Desktop
80
+
81
+ ```bash
82
+ mcp install mcp_qdrant_server.py --name "Qdrant-OpenAI"
83
+ ```
84
+
85
+ ## Available Tools
86
+
87
+ ### query_collection
88
+
89
+ Search a Qdrant collection using semantic search with OpenAI embeddings.
90
+
91
+ - `collection_name`: Name of the Qdrant collection to search
92
+ - `query_text`: The search query in natural language
93
+ - `limit`: Maximum number of results to return (default: 5)
94
+ - `model`: OpenAI embedding model to use (default: text-embedding-3-small)
95
+
96
+ ### list_collections
97
+
98
+ List all available collections in the Qdrant database.
99
+
100
+ ### collection_info
101
+
102
+ Get information about a specific collection.
103
+
104
+ - `collection_name`: Name of the collection to get information about
105
+
106
+ ## Example Usage in Claude Desktop
107
+
108
+ Once installed in Claude Desktop, you can use the tools like this:
109
+
110
+ ```
111
+ What collections are available in my Qdrant database?
112
+
113
+ Search for documents about climate change in my "documents" collection.
114
+
115
+ Show me information about the "articles" collection.
116
+ ```
@@ -0,0 +1,6 @@
1
+ iflow_mcp_amansingh0311_mcp_qdrant_openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ iflow_mcp_amansingh0311_mcp_qdrant_openai/mcp_qdrant_server.py,sha256=oddX4Yccy7rNQdtXCfLaW-1Z9FWpmQ33lVoOSJuXs-I,4275
3
+ iflow_mcp_amansingh0311_mcp_qdrant_openai-0.1.0.dist-info/METADATA,sha256=RA6WM_gZWdLNBntjljcg152PSX1nszSQx1Rw4ec_2uY,2955
4
+ iflow_mcp_amansingh0311_mcp_qdrant_openai-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
5
+ iflow_mcp_amansingh0311_mcp_qdrant_openai-0.1.0.dist-info/entry_points.txt,sha256=5Y0zdzV8uqQe7R2ao6p9g2Rh0C0jF72MO9bLmAZO81c,103
6
+ iflow_mcp_amansingh0311_mcp_qdrant_openai-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ mcp-qdrant-openai = iflow_mcp_amansingh0311_mcp_qdrant_openai.mcp_qdrant_server:main