glide-mcp 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of glide-mcp might be problematic. Click here for more details.

@@ -0,0 +1,11 @@
1
+ # Remove AI code slop
2
+
3
+ remove all AI generated slop introduced in this branch.
4
+
5
+ This includes:
6
+ - Extra comments that a human wouldn't add or is inconsistent with the rest of the file
7
+ - Extra defensive checks or try/catch blocks that are abnormal for that area of the codebase (especially if called by trusted / validated codepaths)
8
+ - Casts to any to get around type issues
9
+ - Any other style that is inconsistent with the file
10
+
11
+ Report at the end with only a 1-3 sentence summary of what you changed
@@ -0,0 +1,4 @@
1
+ VOYAGEAI_API_KEY=pa-...
2
+ CEREBRAS_API_KEY=csk...
3
+ HELIX_LOCAL=False # or True
4
+ HELIX_API_ENDPOINT=https://your-helix-deployment.dev
@@ -0,0 +1,9 @@
1
+ .helix/
2
+ target/
3
+ *.log
4
+
5
+ .env
6
+
7
+ __pycache__/
8
+
9
+ glide.egg-info/
@@ -0,0 +1 @@
1
+ 3.13
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 SoarAILabs
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,67 @@
1
+ Metadata-Version: 2.4
2
+ Name: glide-mcp
3
+ Version: 0.1.6
4
+ Summary: mcp server that tries to save you from git troubles
5
+ License-File: LICENSE
6
+ Requires-Python: >=3.13
7
+ Requires-Dist: black>=25.9.0
8
+ Requires-Dist: cerebras-cloud-sdk>=1.56.1
9
+ Requires-Dist: fastmcp>=2.12.5
10
+ Requires-Dist: helix-py>=0.2.30
11
+ Requires-Dist: numpy>=2.3.4
12
+ Requires-Dist: pytest-asyncio>=1.2.0
13
+ Requires-Dist: pytest>=8.4.2
14
+ Requires-Dist: python-dotenv>=1.1.1
15
+ Description-Content-Type: text/markdown
16
+
17
+ ## How to Use
18
+
19
+ ### 1. Clone the repository
20
+
21
+ ```bash
22
+ git clone https://github.com/SoarAILabs/glide.git
23
+ ```
24
+
25
+ ### 2. Navigate to the project directory
26
+
27
+ ```bash
28
+ cd glide
29
+ ```
30
+
31
+ ### 3. Start the server
32
+
33
+ ```bash
34
+ uv run python -m src.mcp.app
35
+ ```
36
+
37
+ > **Note:** Currently, only [Cursor](https://www.cursor.so/) is supported as the MCP Client.
38
+
39
+ ### 4. Configure Cursor to use your local MCP server
40
+
41
+ **One-Click Install:**
42
+
43
+ [![Install MCP Server](https://cursor.com/deeplink/mcp-install-dark.svg)](https://cursor.com/en-US/install-mcp?name=glide-mcp&config=eyJlbnYiOnsiVk9ZQUdFQUlfQVBJX0tFWSI6IiIsIkhFTElYX0FQSV9FTkRQT0lOVCI6IiIsIkNFUkVCUkFTX0FQSV9LRVkiOiIiLCJDRVJFQlJBU19NT0RFTF9JRCI6InF3ZW4tMy0zMmIiLCJIRUxJWF9MT0NBTCI6IiJ9LCJjb21tYW5kIjoidXZ4IC0tZnJvbSBnbGlkZS1tY3AgZ2xpZGUifQ%3D%3D)
44
+
45
+ **Manual Installation:**
46
+
47
+ Add the following to your `mcp.json` configuration in Cursor:
48
+
49
+ ```json
50
+ {
51
+ "mcpServers": {
52
+ "glide": {
53
+ "url": "http://127.0.0.1:8000/mcp"
54
+ }
55
+ }
56
+ }
57
+ ```
58
+
59
+ > **Note:** The port (`8000` above) is just an example.
60
+ > To use a different port, open `src/mcp/app.py` and update the following lines accordingly:
61
+
62
+ ```python
63
+ if __name__ == "__main__":
64
+ mcp.run(transport="streamable-http", host="127.0.0.1", port=8000)
65
+ ```
66
+
67
+ Replace `8000` with your desired port number.
@@ -0,0 +1,51 @@
1
+ ## How to Use
2
+
3
+ ### 1. Clone the repository
4
+
5
+ ```bash
6
+ git clone https://github.com/SoarAILabs/glide.git
7
+ ```
8
+
9
+ ### 2. Navigate to the project directory
10
+
11
+ ```bash
12
+ cd glide
13
+ ```
14
+
15
+ ### 3. Start the server
16
+
17
+ ```bash
18
+ uv run python -m src.mcp.app
19
+ ```
20
+
21
+ > **Note:** Currently, only [Cursor](https://www.cursor.so/) is supported as the MCP Client.
22
+
23
+ ### 4. Configure Cursor to use your local MCP server
24
+
25
+ **One-Click Install:**
26
+
27
+ [![Install MCP Server](https://cursor.com/deeplink/mcp-install-dark.svg)](https://cursor.com/en-US/install-mcp?name=glide-mcp&config=eyJlbnYiOnsiVk9ZQUdFQUlfQVBJX0tFWSI6IiIsIkhFTElYX0FQSV9FTkRQT0lOVCI6IiIsIkNFUkVCUkFTX0FQSV9LRVkiOiIiLCJDRVJFQlJBU19NT0RFTF9JRCI6InF3ZW4tMy0zMmIiLCJIRUxJWF9MT0NBTCI6IiJ9LCJjb21tYW5kIjoidXZ4IC0tZnJvbSBnbGlkZS1tY3AgZ2xpZGUifQ%3D%3D)
28
+
29
+ **Manual Installation:**
30
+
31
+ Add the following to your `mcp.json` configuration in Cursor:
32
+
33
+ ```json
34
+ {
35
+ "mcpServers": {
36
+ "glide": {
37
+ "url": "http://127.0.0.1:8000/mcp"
38
+ }
39
+ }
40
+ }
41
+ ```
42
+
43
+ > **Note:** The port (`8000` above) is just an example.
44
+ > To use a different port, open `src/mcp/app.py` and update the following lines accordingly:
45
+
46
+ ```python
47
+ if __name__ == "__main__":
48
+ mcp.run(transport="streamable-http", host="127.0.0.1", port=8000)
49
+ ```
50
+
51
+ Replace `8000` with your desired port number.
@@ -0,0 +1,131 @@
1
+ // Minimal queries for ingesting Git graph data and retrieving semantic diffs.
2
+ // Commit messages are generated in the app using retrieved diff summaries.
3
+
4
+ // Ingestion helpers
5
+
6
+ // createRepository: creates the root repository node; needed to scope all data.
7
+ QUERY createRepository(repo_id: String, name: String) =>
8
+ repo <- AddN<Repository>({
9
+ repo_id: repo_id,
10
+ name: name
11
+ })
12
+ RETURN repo
13
+
14
+ // createBranch: creates a branch and links it to its repo
15
+ QUERY createBranch(repo_id: String, branch_id: String, name: String) =>
16
+ repo <- N<Repository>({repo_id: repo_id})
17
+ branch <- AddN<Branch>({
18
+ branch_id: branch_id,
19
+ name: name
20
+ })
21
+ AddE<HAS_BRANCH>()::From(repo)::To(branch)
22
+ RETURN branch
23
+
24
+ // createCommit: creates a commit on a branch
25
+ QUERY createCommit(
26
+ branch_id: String,
27
+ commit_id: String,
28
+ short_id: String,
29
+ author: String,
30
+ committed_at: Date,
31
+ is_merge: Boolean
32
+ ) =>
33
+ branch <- N<Branch>({branch_id: branch_id})
34
+ commit <- AddN<Commit>({
35
+ commit_id: commit_id,
36
+ short_id: short_id,
37
+ author: author,
38
+ committed_at: committed_at,
39
+ is_merge: is_merge
40
+ })
41
+ AddE<HAS_COMMIT>()::From(branch)::To(commit)
42
+ RETURN commit
43
+
44
+
45
+ // LinkParentCommit: records DAG parentage
46
+ QUERY linkParentCommit(child_commit_id: String, parent_commit_id: String) =>
47
+ child <- N<Commit>({commit_id: child_commit_id})
48
+ parent <- N<Commit>({commit_id: parent_commit_id})
49
+ AddE<PARENT>()::From(child)::To(parent)
50
+ RETURN "OK"
51
+
52
+
53
+ // createFile: creates a file node; enables path-scoped queries and file-level analytics.
54
+ QUERY createFile(file_id: String, path: String, language: String) =>
55
+ file <- AddN<File>({
56
+ file_id: file_id,
57
+ path: path,
58
+ language: language
59
+ })
60
+ RETURN file
61
+
62
+
63
+ // createDiff: attaches a diff with a precomputed Voyage vector
64
+ QUERY createDiff(
65
+ commit_id: String,
66
+ file_id: String,
67
+ diff_id: String,
68
+ kind: String,
69
+ additions: I64,
70
+ deletions: I64,
71
+ summary: String,
72
+ vec: [F64]
73
+ ) =>
74
+ commit <- N<Commit>({commit_id: commit_id})
75
+ file <- N<File>({file_id: file_id})
76
+ diff <- AddN<Diff>({
77
+ diff_id: diff_id,
78
+ kind: kind,
79
+ additions: additions,
80
+ deletions: deletions,
81
+ summary: summary
82
+ })
83
+ embedding <- AddV<DiffEmbedding>(vec)
84
+ AddE<HAS_DIFF>()::From(commit)::To(diff)
85
+ AddE<AFFECTS_FILE>()::From(diff)::To(file)
86
+ AddE<HAS_EMBEDDING>()::From(diff)::To(embedding)
87
+ RETURN diff
88
+
89
+
90
+
91
+
92
+ // Search & retrieval
93
+
94
+ // getSimilarDiffsByVector: ANN over diffs using vectors
95
+ QUERY getSimilarDiffsByVector(vec: [F64], k: I64) =>
96
+ embeddings <- SearchV<DiffEmbedding>(vec, k)
97
+ results <- embeddings::In<HAS_EMBEDDING>
98
+ RETURN results::{
99
+ diff_id: diff_id,
100
+ kind: kind,
101
+ additions: additions,
102
+ deletions: deletions,
103
+ summary: summary,
104
+ commit_id: _::In<HAS_DIFF>::{commit_id},
105
+ commit_message: _::In<HAS_DIFF>::{message},
106
+ file_path: _::Out<AFFECTS_FILE>::{path}
107
+ }
108
+
109
+
110
+ // getDiffIdsForRepo: collects diff IDs under a repo
111
+ QUERY getDiffIdsForRepo(repo_id: String) =>
112
+ diffs <- N<Repository>({repo_id: repo_id})::Out<HAS_BRANCH>::Out<HAS_COMMIT>::Out<HAS_DIFF>
113
+ RETURN diffs::{ diff_id: diff_id }
114
+
115
+
116
+ // getDiffIdsForBranch: collects diff IDs under a branch
117
+ QUERY getDiffIdsForBranch(branch_id: String) =>
118
+ diffs <- N<Branch>({branch_id: branch_id})::Out<HAS_COMMIT>::Out<HAS_DIFF>
119
+ RETURN diffs::{ diff_id: diff_id }
120
+
121
+ // GetCommitDiffSummaries: returns per-diff summaries and paths for a commit
122
+ QUERY getCommitDiffSummaries(commit_id: String) =>
123
+ diffs <- N<Commit>({commit_id: commit_id})::Out<HAS_DIFF>
124
+ RETURN diffs::{
125
+ diff_id: diff_id,
126
+ kind: kind,
127
+ additions: additions,
128
+ deletions: deletions,
129
+ summary: summary,
130
+ file_path: _::Out<AFFECTS_FILE>::{path}
131
+ }
@@ -0,0 +1,77 @@
1
+ // Nodes
2
+ N::Repository {
3
+ INDEX repo_id: String,
4
+ INDEX name: String,
5
+ created_at: Date DEFAULT NOW
6
+ }
7
+
8
+ N::Branch {
9
+ INDEX branch_id: String,
10
+ name: String,
11
+ created_at: Date DEFAULT NOW
12
+ }
13
+
14
+ N::Commit {
15
+ INDEX commit_id: String,
16
+ short_id: String,
17
+ author: String,
18
+ message: String,
19
+ committed_at: Date,
20
+ is_merge: Boolean DEFAULT false
21
+ }
22
+
23
+ N::File {
24
+ INDEX file_id: String,
25
+ path: String,
26
+ language: String
27
+ }
28
+
29
+ N::Diff {
30
+ INDEX diff_id: String,
31
+ kind: String,
32
+ additions: I64,
33
+ deletions: I64,
34
+ summary: String
35
+ }
36
+
37
+ // Vector type for embeddings
38
+ V::DiffEmbedding {
39
+ vector: [F64]
40
+ }
41
+
42
+ // Edges
43
+ E::HAS_BRANCH {
44
+ From: Repository,
45
+ To: Branch,
46
+ Properties: {}
47
+ }
48
+
49
+ E::HAS_COMMIT {
50
+ From: Branch,
51
+ To: Commit,
52
+ Properties: {}
53
+ }
54
+
55
+ E::PARENT {
56
+ From: Commit,
57
+ To: Commit,
58
+ Properties: {}
59
+ }
60
+
61
+ E::HAS_DIFF {
62
+ From: Commit,
63
+ To: Diff,
64
+ Properties: {}
65
+ }
66
+
67
+ E::AFFECTS_FILE {
68
+ From: Diff,
69
+ To: File,
70
+ Properties: {}
71
+ }
72
+
73
+ E::HAS_EMBEDDING {
74
+ From: Diff,
75
+ To: DiffEmbedding,
76
+ Properties: {}
77
+ }
@@ -0,0 +1,18 @@
1
+ [project]
2
+ name = "glide"
3
+ queries = "./db/"
4
+
5
+ [local.dev]
6
+ port = 6969
7
+ build_mode = "dev"
8
+
9
+ [local.glide]
10
+ build_mode = "dev"
11
+
12
+ [cloud.production.fly]
13
+ build_mode = "debug"
14
+ vm_size = "shared-cpu-4x"
15
+ volume = "helix_glide_production_data:/data"
16
+ volume_initial_size = 20
17
+ private = false
18
+ auth_type = "Cli"
@@ -0,0 +1,32 @@
1
+ [project]
2
+ name = "glide-mcp"
3
+ version = "0.1.6"
4
+ description = "mcp server that tries to save you from git troubles"
5
+ readme = "README.md"
6
+ requires-python = ">=3.13"
7
+ dependencies = [
8
+ "black>=25.9.0",
9
+ "cerebras-cloud-sdk>=1.56.1",
10
+ "fastmcp>=2.12.5",
11
+ "helix-py>=0.2.30",
12
+ "numpy>=2.3.4",
13
+ "pytest>=8.4.2",
14
+ "pytest-asyncio>=1.2.0",
15
+ "python-dotenv>=1.1.1",
16
+ ]
17
+
18
+ [build-system]
19
+ requires = ["hatchling"]
20
+ build-backend = "hatchling.build"
21
+
22
+ [tool.hatch.build.targets.wheel]
23
+ packages = ["src"]
24
+
25
+ [tool.uv]
26
+ package = true
27
+
28
+ [tool.uv.workspace]
29
+ members = []
30
+
31
+ [project.scripts]
32
+ glide = "src.mcp.app:main"
@@ -0,0 +1,9 @@
1
+ # Core dependencies - only what your code actually imports
2
+ black>=25.9.0
3
+ cerebras-cloud-sdk>=1.56.1
4
+ fastmcp>=2.12.5
5
+ helix-py>=0.2.30
6
+ numpy>=2.3.4
7
+ pytest>=8.4.2
8
+ pytest-asyncio>=1.2.0
9
+ python-dotenv>=1.1.1
@@ -0,0 +1,111 @@
1
+ import os
2
+ import asyncio
3
+ from typing import Any, Dict, List, Optional
4
+ from dotenv import load_dotenv
5
+ from cerebras.cloud.sdk import AsyncCerebras
6
+
7
+ load_dotenv()
8
+
9
+ # Default model; override per-call via the `model` argument
10
+ DEFAULT_MODEL_ID: str = os.getenv("CEREBRAS_MODEL_ID", "qwen-3-coder-480b")
11
+
12
+ _async_client: Optional[AsyncCerebras] = None
13
+
14
+
15
+ def _get_api_key(explicit_api_key: Optional[str] = None) -> str:
16
+ api_key = explicit_api_key or os.getenv("CEREBRAS_API_KEY", "")
17
+ if not api_key:
18
+ raise RuntimeError(
19
+ "CEREBRAS_API_KEY is not set. Set it in the environment or pass api_key explicitly."
20
+ )
21
+ return api_key
22
+
23
+
24
+ def init_cerebras_async_client(api_key: Optional[str] = None) -> AsyncCerebras:
25
+ """
26
+ Initialize and cache a global AsyncCerebras client.
27
+
28
+ Safe to call multiple times; subsequent calls return the cached instance.
29
+ """
30
+ global _async_client
31
+ if _async_client is None:
32
+ _async_client = AsyncCerebras(api_key=_get_api_key(api_key))
33
+ return _async_client
34
+
35
+
36
+ def get_cerebras_async_client() -> AsyncCerebras:
37
+ """Return the cached AsyncCerebras client, initializing it if needed."""
38
+ return init_cerebras_async_client()
39
+
40
+
41
+ async def chat(
42
+ messages: List[Dict[str, str]],
43
+ *,
44
+ model: Optional[str] = None,
45
+ temperature: Optional[float] = None,
46
+ max_tokens: Optional[int] = None,
47
+ api_key: Optional[str] = None,
48
+ extra_params: Optional[Dict[str, Any]] = None,
49
+ ) -> str:
50
+ """
51
+ Send a structured chat to Cerebras and return the assistant's message content.
52
+
53
+ messages: List of {"role": "user"|"system"|"assistant", "content": str}
54
+ model: Model name; defaults to DEFAULT_MODEL_ID
55
+ temperature, max_tokens: Optional generation controls
56
+ api_key: Optional override for API key (avoids relying on env)
57
+ extra_params: Additional keyword arguments passed through to the API
58
+ """
59
+ client = init_cerebras_async_client(api_key)
60
+ response = await client.chat.completions.create(
61
+ messages=messages,
62
+ model=model or DEFAULT_MODEL_ID,
63
+ **({"temperature": temperature} if temperature is not None else {}),
64
+ **({"max_tokens": max_tokens} if max_tokens is not None else {}),
65
+ **(extra_params or {}),
66
+ )
67
+ return response.choices[0].message.content
68
+
69
+
70
+ async def complete(
71
+ prompt: str,
72
+ *,
73
+ system: Optional[str] = None,
74
+ model: Optional[str] = None,
75
+ temperature: Optional[float] = None,
76
+ max_tokens: Optional[int] = None,
77
+ api_key: Optional[str] = None,
78
+ extra_params: Optional[Dict[str, Any]] = None,
79
+ ) -> str:
80
+ """
81
+ Convenience wrapper for single-turn prompts. Builds messages from `system` and `prompt`.
82
+ """
83
+ messages: List[Dict[str, str]] = []
84
+ if system:
85
+ messages.append({"role": "system", "content": system})
86
+ messages.append({"role": "user", "content": prompt})
87
+ return await chat(
88
+ messages,
89
+ model=model,
90
+ temperature=temperature,
91
+ max_tokens=max_tokens,
92
+ api_key=api_key,
93
+ extra_params=extra_params,
94
+ )
95
+
96
+
97
+ __all__ = [
98
+ "init_cerebras_async_client",
99
+ "get_cerebras_async_client",
100
+ "chat",
101
+ "complete",
102
+ "DEFAULT_MODEL_ID",
103
+ ]
104
+
105
+
106
+ if __name__ == "__main__":
107
+ async def _demo() -> None:
108
+ reply = await complete("Why is fast inference important?")
109
+ print(reply)
110
+
111
+ asyncio.run(_demo())
@@ -0,0 +1,11 @@
1
+ You are the assistant of Glide by the Interaction Company of California. You are the "execution engine" of Glide, helping split large commits for Glide-MCP, while the MCP client talks to the user. Your job is to execute and accomplish a goal, and you do not have direct access to the user.
2
+
3
+ Your final output is directed to Glide MCP Client, which handles user conversations and presents your results to the user. Focus on providing Glide with adequate contextual information; you are not responsible for framing responses in a user-friendly way.
4
+
5
+ If it needs more data from Glide or the user, you should also include it in your final output message.
6
+
7
+ If you ever need to send a message to the user, you should tell Glide to forward that message to the user.
8
+
9
+ You should seek to accomplish tasks with as much parallelism as possible. If tasks don't need to be sequential, launch them in parallel. This includes spawning multiple subagents simultaneously for both search operations and MCP integrations when the information could be found in multiple sources.
10
+
11
+ EXTREMELY IMPORTANT: Never make up information if you can't find it. If you can't find something or you aren't sure about something, relay this to the inbound agent instead of guessing.
@@ -0,0 +1,59 @@
1
+ from helix.embedding.voyageai_client import VoyageAIEmbedder
2
+ from helix import Chunk
3
+ import os
4
+
5
+ # Lazy-loaded embedder - only created when needed
6
+ _voyage_embedder = None
7
+
8
+
9
+ def _get_embedder():
10
+ """Get or create the voyage embedder instance (lazy initialization)."""
11
+ global _voyage_embedder
12
+ if _voyage_embedder is None:
13
+ _voyage_embedder = VoyageAIEmbedder()
14
+ return _voyage_embedder
15
+
16
+
17
+ def embed_code(code: str, file_path: str = None):
18
+
19
+ # For diffs, use token_chunk instead of code_chunk since diffs are text format
20
+ # and code_chunk has API compatibility issues
21
+ try:
22
+ # Try code_chunk first if we have a valid language
23
+ if file_path:
24
+ ext = os.path.splitext(file_path)[1].lstrip(".")
25
+ lang_map = {
26
+ "py": "python",
27
+ "js": "javascript",
28
+ "ts": "typescript",
29
+ "jsx": "javascript",
30
+ "tsx": "typescript",
31
+ "java": "java",
32
+ "cpp": "cpp",
33
+ "c": "c",
34
+ "cs": "csharp",
35
+ "go": "go",
36
+ "rs": "rust",
37
+ "rb": "ruby",
38
+ "php": "php",
39
+ "swift": "swift",
40
+ "kt": "kotlin",
41
+ "scala": "scala",
42
+ "sh": "bash",
43
+ "hx": "python",
44
+ }
45
+ language = lang_map.get(ext.lower())
46
+ if language:
47
+ code_chunks = Chunk.code_chunk(code, language=language)
48
+ else:
49
+ code_chunks = Chunk.token_chunk(code)
50
+ else:
51
+ code_chunks = Chunk.token_chunk(code)
52
+ except Exception:
53
+ # Fallback to token_chunk if code_chunk fails
54
+ code_chunks = Chunk.token_chunk(code)
55
+
56
+ voyage_embedder = _get_embedder()
57
+ code_embeddings = voyage_embedder.embed_batch([f"{code_chunks}"])
58
+
59
+ return code_embeddings