google-drive-forge 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ from .intelligent_client import IntelligentDriveClient
2
+ from .executor import ScriptExecutor
3
+ from .skill_loader import SkillLoader
4
+ from .client import DriveClient
5
+
6
+ # Alias for branding
7
+ ForgeClient = IntelligentDriveClient
8
+
9
+ __all__ = ["IntelligentDriveClient", "ForgeClient", "ScriptExecutor", "SkillLoader", "DriveClient"]
@@ -0,0 +1,59 @@
1
+ import logging
2
+ import os
3
+ import sys
4
+ from mcp.server.fastmcp import FastMCP
5
+ from .intelligent_client import IntelligentDriveClient
6
+ from .tools import register_tools, register_intelligent_tools
7
+ from .resources import register_resources
8
+ from .executor import ScriptExecutor
9
+ from .skill_loader import SkillLoader
10
+ from .audit import AuditLogger
11
+
12
+ # Configure logging
13
+ logging.basicConfig()
14
+ logger = logging.getLogger(__name__)
15
+ logger.setLevel(logging.INFO)
16
+
17
+ # Initialize FastMCP Server
18
+ mcp = FastMCP("google-drive-forge")
19
+
20
+ # Setup Paths from Environment or Defaults
21
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
22
+ PROJECT_ROOT = os.path.dirname(os.path.dirname(BASE_DIR))
23
+
24
+ SKILLS_DIR = os.getenv("GOOGLE_DRIVE_SKILLS_DIR", os.path.join(BASE_DIR, "skills"))
25
+ # Default to current python if no specific venv provided or found
26
+ DEFAULT_VENV = os.path.join(os.path.dirname(BASE_DIR), "antigravity-env")
27
+ PYTHON_EXE = os.getenv("GOOGLE_DRIVE_PYTHON_PATH")
28
+
29
+ if not PYTHON_EXE:
30
+ if os.path.exists(DEFAULT_VENV):
31
+ PYTHON_EXE = DEFAULT_VENV
32
+ else:
33
+ PYTHON_EXE = sys.executable
34
+
35
+ AUDIT_LOG = os.getenv("GOOGLE_DRIVE_AUDIT_LOG", os.path.join(PROJECT_ROOT, "docs/research/intelligent_audit.log"))
36
+
37
+ try:
38
+ # Initialize Core Components
39
+ audit = AuditLogger(AUDIT_LOG)
40
+ client = IntelligentDriveClient(audit=audit)
41
+ executor = ScriptExecutor(PYTHON_EXE, SKILLS_DIR)
42
+ loader = SkillLoader(SKILLS_DIR)
43
+
44
+ # Register Components
45
+ register_tools(mcp, client)
46
+ register_resources(mcp, client)
47
+ register_intelligent_tools(mcp, client, executor, loader, audit)
48
+
49
+ except Exception as e:
50
+ logger.error(f"Failed to initialize server components: {e}")
51
+ @mcp.tool()
52
+ def status() -> str:
53
+ return f"Server failed to initialize: {str(e)}. Please check setup."
54
+
55
+ def main():
56
+ mcp.run()
57
+
58
+ if __name__ == "__main__":
59
+ main()
@@ -0,0 +1,37 @@
1
+ import os
2
+ import datetime
3
+ import logging
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+ class AuditLogger:
8
+ def __init__(self, audit_log_path: str):
9
+ self.audit_log_path = audit_log_path
10
+ # Ensure directory exists
11
+ os.makedirs(os.path.dirname(audit_log_path), exist_ok=True)
12
+
13
+ def log_event(self, event_type: str, details: str, status: str = "INFO"):
14
+ """
15
+ Logs an intelligent operation event.
16
+ """
17
+ timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
18
+ log_entry = f"[{timestamp}] [{status}] [{event_type}] {details}\n"
19
+
20
+ try:
21
+ with open(self.audit_log_path, "a") as f:
22
+ f.write(log_entry)
23
+ except Exception as e:
24
+ logger.error(f"Failed to write to audit log: {e}")
25
+
26
+ def log_recovery(self, original_id: str, recovered_name: str, success: bool):
27
+ """
28
+ Specific log for autonomous recovery events.
29
+ """
30
+ status = "SUCCESS" if success else "FAILURE"
31
+ self.log_event("RECOVERY", f"Target ID: {original_id} -> Found: {recovered_name}", status)
32
+
33
+ def log_skill_creation(self, skill_name: str):
34
+ """
35
+ Specific log for new skill forging.
36
+ """
37
+ self.log_event("SKILL_FORGE", f"New capability created: {skill_name}")
@@ -0,0 +1,75 @@
1
+ import os
2
+ import json
3
+ import logging
4
+ from google.oauth2.credentials import Credentials
5
+ from google_auth_oauthlib.flow import InstalledAppFlow
6
+ from google.auth.transport.requests import Request
7
+
8
+ # Configure logger for this module
9
+ logger = logging.getLogger(__name__)
10
+
11
+ # Scopes required for the application
12
+ SCOPES = ['https://www.googleapis.com/auth/drive']
13
+
14
+ # Base directory for relative paths (assumes this file is in src/, so go up one level)
15
+ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
16
+ TOKEN_PATH = os.path.join(BASE_DIR, 'token.json')
17
+ CREDENTIALS_PATH = os.path.join(BASE_DIR, 'credentials.json')
18
+
19
+ def get_credentials() -> Credentials:
20
+ """
21
+ Retrieves OAuth2 credentials.
22
+ Refreshes expired tokens if possible, or triggers a new login flow.
23
+ """
24
+ creds = None
25
+
26
+ # 1. Try to load existing token
27
+ if os.path.exists(TOKEN_PATH):
28
+ try:
29
+ creds = Credentials.from_authorized_user_file(TOKEN_PATH, SCOPES)
30
+ except Exception as e:
31
+ logger.warning(f"Error loading token.json: {e}")
32
+ # Invalid token file, ignore it
33
+
34
+ # 2. If no valid credentials, login or refresh
35
+ if not creds or not creds.valid:
36
+ if creds and creds.expired and creds.refresh_token:
37
+ logger.info("Refreshing access token...")
38
+ try:
39
+ creds.refresh(Request())
40
+ except Exception as e:
41
+ logger.warning(f"Error refreshing token: {e}. Initiating new login.")
42
+ creds = _login_flow()
43
+ else:
44
+ logger.info("No valid token found. Initiating login flow...")
45
+ creds = _login_flow()
46
+
47
+ # 3. Save the new/refreshed token
48
+ with open(TOKEN_PATH, 'w') as token:
49
+ token.write(creds.to_json())
50
+ logger.info(f"Token saved to {TOKEN_PATH}")
51
+
52
+ return creds
53
+
54
+ def _login_flow() -> Credentials:
55
+ """Helper to run the interactive OAuth flow."""
56
+ if not os.path.exists(CREDENTIALS_PATH):
57
+ raise FileNotFoundError(
58
+ f"Credentials file not found at {CREDENTIALS_PATH}. "
59
+ "Please download it from Google Cloud Console and place it there."
60
+ )
61
+
62
+ flow = InstalledAppFlow.from_client_secrets_file(CREDENTIALS_PATH, SCOPES)
63
+
64
+ # Check for headless environment
65
+ is_headless = os.getenv("GOOGLE_DRIVE_HEADLESS_AUTH", "false").lower() == "true"
66
+
67
+ if is_headless:
68
+ logger.info("Headless mode detected. Please follow the URL to authenticate.")
69
+ # run_local_server with open_browser=False is the modern replacement for run_console
70
+ creds = flow.run_local_server(port=0, open_browser=False)
71
+ else:
72
+ # Standard flow with browser
73
+ creds = flow.run_local_server(port=0)
74
+
75
+ return creds
@@ -0,0 +1,140 @@
1
+ import io
2
+ import logging
3
+ import functools
4
+ from typing import List, Dict, Any, Optional, Union
5
+ from googleapiclient.discovery import build
6
+ from googleapiclient.http import MediaIoBaseDownload, MediaFileUpload
7
+ from googleapiclient.errors import HttpError
8
+ from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_exception_type
9
+
10
+ from .auth import get_credentials
11
+
12
+ # Setup basic logging
13
+ logging.basicConfig(level=logging.INFO)
14
+ logger = logging.getLogger(__name__)
15
+
16
+ class DriveClient:
17
+ def __init__(self):
18
+ self.creds = get_credentials()
19
+ self.service = build('drive', 'v3', credentials=self.creds)
20
+
21
+ @functools.lru_cache(maxsize=128)
22
+ def _cached_list_files(self, q: str, limit: int) -> List[Dict[str, Any]]:
23
+ """Internal cached method for listing files."""
24
+ results = self.service.files().list(
25
+ q=q,
26
+ pageSize=limit,
27
+ fields="nextPageToken, files(id, name, mimeType, parents, owners, modifiedTime, webViewLink, size)"
28
+ ).execute()
29
+ return results.get('files', [])
30
+
31
+ @retry(
32
+ retry=retry_if_exception_type(HttpError),
33
+ stop=stop_after_attempt(3),
34
+ wait=wait_exponential(multiplier=1, min=2, max=10)
35
+ )
36
+ def list_files(self, query: str = None, limit: int = 10) -> List[Dict[str, Any]]:
37
+ """
38
+ Lists files with retry logic.
39
+ """
40
+ try:
41
+ # Default query to not show trashed files if no query provided
42
+ if not query:
43
+ q = "trashed = false"
44
+ else:
45
+ q = f"({query}) and trashed = false"
46
+
47
+ return self._cached_list_files(q, limit)
48
+ except HttpError as error:
49
+ logger.error(f"An error occurred: {error}")
50
+ raise
51
+
52
+ @functools.lru_cache(maxsize=256)
53
+ def get_file_metadata(self, file_id: str) -> Dict[str, Any]:
54
+ """Get detailed metadata for a file."""
55
+ return self.service.files().get(
56
+ fileId=file_id,
57
+ fields="id, name, mimeType, parents, owners, modifiedTime, webViewLink, size, exportLinks"
58
+ ).execute()
59
+
60
+ def download_file(self, file_id: str, export_mime_type: Optional[str] = None) -> bytes:
61
+ """
62
+ Downloads a file's content.
63
+ Handles binary downloads and Google Workspace document exports.
64
+ """
65
+ try:
66
+ meta = self.get_file_metadata(file_id)
67
+ mime_type = meta.get('mimeType')
68
+
69
+ # Handle Google Workspace documents (Docs, Sheets, Slides)
70
+ if mime_type == 'application/vnd.google-apps.document':
71
+ # Default to text/plain if requested or for general text-based use
72
+ target_mime = export_mime_type or 'application/pdf'
73
+ request = self.service.files().export_media(fileId=file_id, mimeType=target_mime)
74
+ elif mime_type == 'application/vnd.google-apps.spreadsheet':
75
+ target_mime = export_mime_type or 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
76
+ request = self.service.files().export_media(fileId=file_id, mimeType=target_mime)
77
+ elif mime_type == 'application/vnd.google-apps.presentation':
78
+ target_mime = export_mime_type or 'application/pdf'
79
+ request = self.service.files().export_media(fileId=file_id, mimeType=target_mime)
80
+ else:
81
+ # Standard binary download
82
+ request = self.service.files().get_media(fileId=file_id)
83
+
84
+ file_io = io.BytesIO()
85
+ downloader = MediaIoBaseDownload(file_io, request)
86
+ done = False
87
+ while done is False:
88
+ status, done = downloader.next_chunk()
89
+
90
+ return file_io.getvalue()
91
+ except HttpError as error:
92
+ logger.error(f"Error downloading file {file_id}: {error}")
93
+ raise
94
+
95
+ def create_folder(self, name: str, parent_id: str = 'root') -> Dict[str, Any]:
96
+ """Create a new folder."""
97
+ file_metadata = {
98
+ 'name': name,
99
+ 'mimeType': 'application/vnd.google-apps.folder',
100
+ 'parents': [parent_id]
101
+ }
102
+ return self.service.files().create(body=file_metadata, fields='id, name, webViewLink').execute()
103
+
104
+ def upload_file(self, name: str, content: Union[str, bytes], parent_id: str = 'root', mime_type: str = 'text/plain') -> Dict[str, Any]:
105
+ """Upload a file."""
106
+ file_metadata = {
107
+ 'name': name,
108
+ 'parents': [parent_id]
109
+ }
110
+
111
+ if isinstance(content, str):
112
+ content_bytes = io.BytesIO(content.encode('utf-8'))
113
+ else:
114
+ content_bytes = io.BytesIO(content)
115
+
116
+ # Use MediaIoBaseUpload for in-memory bytes
117
+ from googleapiclient.http import MediaIoBaseUpload
118
+ media = MediaIoBaseUpload(content_bytes, mimetype=mime_type, resumable=True)
119
+
120
+ return self.service.files().create(
121
+ body=file_metadata,
122
+ media_body=media,
123
+ fields='id, name, webViewLink'
124
+ ).execute()
125
+
126
+ def trash_file(self, file_id: str) -> Dict[str, Any]:
127
+ """Move a file to trash."""
128
+ body = {'trashed': True}
129
+ return self.service.files().update(fileId=file_id, body=body).execute()
130
+
131
+ def list_folder_children(self, folder_id: str, limit: int = 100) -> List[Dict[str, Any]]:
132
+ """List all children of a specific folder."""
133
+ query = f"'{folder_id}' in parents"
134
+ return self.list_files(query=query, limit=limit)
135
+
136
+ def search(self, text: str, limit: int = 20) -> List[Dict[str, Any]]:
137
+ """Perform a semantic/name search."""
138
+ # Simple name contains search for now, can be expanded
139
+ query = f"name contains '{text}'"
140
+ return self.list_files(query=query, limit=limit)
@@ -0,0 +1,73 @@
1
+ import subprocess
2
+ import os
3
+ import sys
4
+ import logging
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ class ScriptExecutor:
9
+ def __init__(self, python_path: str, skills_dir: str):
10
+ self.skills_dir = skills_dir
11
+
12
+ # Resolve the actual python executable
13
+ if os.path.isfile(python_path):
14
+ self.python_exe = python_path
15
+ else:
16
+ # Assume it's a venv directory
17
+ # Support both Unix (bin) and Windows (Scripts)
18
+ bin_path = os.path.join(python_path, "bin", "python")
19
+ if not os.path.exists(bin_path):
20
+ # Fallback for Windows or common alternative structures
21
+ bin_path = os.path.join(python_path, "Scripts", "python.exe")
22
+ if not os.path.exists(bin_path):
23
+ # Second fallback: check if python is in the folder directly
24
+ bin_path = os.path.join(python_path, "python")
25
+
26
+ self.python_exe = bin_path if os.path.exists(bin_path) else python_path
27
+
28
+ logger.info(f"ScriptExecutor initialized with Python: {self.python_exe}")
29
+
30
+ def run_skill(self, skill_name: str, args: list = None) -> str:
31
+ """
32
+ Runs a skill's main script.
33
+ Assumes the skill is in a folder: skills_dir/skill_name/script.py
34
+ """
35
+ script_path = os.path.join(self.skills_dir, skill_name, "script.py")
36
+
37
+ if not os.path.exists(script_path):
38
+ return f"Error: Skill script not found at {script_path}"
39
+
40
+ cmd = [self.python_exe, script_path]
41
+ if args:
42
+ cmd.extend(args)
43
+
44
+ try:
45
+ # Add the current project directory to PYTHONPATH so scripts can import google_drive_forge
46
+ env = os.environ.copy()
47
+ # __file__ is inside google_drive_forge/, so go up one level to get the package root
48
+ package_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
49
+
50
+ # Ensure the skills can find the modules by importing 'google_drive_forge'
51
+ current_pythonpath = env.get("PYTHONPATH", "")
52
+ if current_pythonpath:
53
+ env["PYTHONPATH"] = f"{package_root}{os.pathsep}{current_pythonpath}"
54
+ else:
55
+ env["PYTHONPATH"] = package_root
56
+
57
+ result = subprocess.run(
58
+ cmd,
59
+ capture_output=True,
60
+ text=True,
61
+ check=False,
62
+ env=env,
63
+ cwd=mcp_root
64
+ )
65
+
66
+ output = result.stdout
67
+ if result.stderr:
68
+ output += f"\n--- Errors/Warnings ---\n{result.stderr}"
69
+
70
+ return output if output.strip() else "Script executed successfully with no output."
71
+
72
+ except Exception as e:
73
+ return f"Failed to execute skill: {str(e)}"
@@ -0,0 +1,103 @@
1
+ import logging
2
+ import functools
3
+ from typing import List, Dict, Any, Optional, Callable
4
+ from googleapiclient.errors import HttpError
5
+ from .client import DriveClient
6
+
7
+ from .audit import AuditLogger
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ def self_healing_recovery(func: Callable):
12
+ """
13
+ Decorator that attempts autonomous recovery on Google Drive API failures.
14
+ Specifically targets 404 (Not Found) errors and logs events.
15
+ """
16
+ @functools.wraps(func)
17
+ def wrapper(self, *args, **kwargs):
18
+ try:
19
+ return func(self, *args, **kwargs)
20
+ except HttpError as error:
21
+ if error.resp.status == 404:
22
+ file_id = kwargs.get('file_id') or (args[0] if args else None)
23
+ if file_id and isinstance(file_id, str):
24
+ message = f"File ID '{file_id}' not found. Suggesting search recovery."
25
+ if hasattr(self, 'audit'):
26
+ self.audit.log_recovery(file_id, "Unknown (Need Search)", False)
27
+
28
+ logger.info(f"Self-healing: {message}")
29
+ raise HttpError(error.resp, f"Autonomous Recovery: {message} Use 'search_files' to find the new ID.".encode())
30
+ raise
31
+ return wrapper
32
+
33
+ class IntelligentDriveClient(DriveClient):
34
+ """
35
+ An advanced Drive client that implements autonomous patterns and self-healing.
36
+ """
37
+ def __init__(self, audit: Optional[AuditLogger] = None):
38
+ super().__init__()
39
+ self.audit = audit
40
+
41
+ @self_healing_recovery
42
+ def get_file_metadata(self, file_id: str) -> Dict[str, Any]:
43
+ return super().get_file_metadata(file_id)
44
+
45
+ @self_healing_recovery
46
+ def download_file(self, file_id: str, export_mime_type: Optional[str] = None) -> bytes:
47
+ """
48
+ Enhanced download with MIME-type intelligence.
49
+ Autonomously determines if a file needs export vs download.
50
+ """
51
+ try:
52
+ return super().download_file(file_id, export_mime_type=export_mime_type)
53
+ except HttpError as error:
54
+ # Add specific MIME-type failure handling if super().download_file fails
55
+ logger.error(f"Intelligent Download failed for {file_id}: {error}")
56
+ raise
57
+
58
+ def find_and_heal_path(self, path: str) -> Optional[str]:
59
+ """
60
+ Autonomous Path Discovery with Active Healing.
61
+ If a path like /Project/2026/Budgt fails, it auto-corrects to the closest match.
62
+ """
63
+ parts = [p for p in path.split('/') if p]
64
+ current_parent = 'root'
65
+ healed_path_parts = []
66
+
67
+ for part in parts:
68
+ # Try exact match first
69
+ results = self.list_files(query=f"name = '{part}' and '{current_parent}' in parents")
70
+
71
+ if results:
72
+ current_parent = results[0]['id']
73
+ healed_path_parts.append(part)
74
+ continue
75
+
76
+ # Exact match failed. Attempt Active Healing.
77
+ # 1. Get all children of the current parent
78
+ children = self.list_files(query=f"'{current_parent}' in parents")
79
+
80
+ # 2. Simple fuzzy match: case-insensitive match or name contains
81
+ matches = [c for c in children if part.lower() in c['name'].lower()]
82
+
83
+ if len(matches) == 1:
84
+ # High confidence recovery
85
+ healed_name = matches[0]['name']
86
+ old_id = current_parent
87
+ current_parent = matches[0]['id']
88
+
89
+ logger.info(f"Active Healing: Resolved '{part}' -> '{healed_name}' in folder {old_id}")
90
+ if self.audit:
91
+ self.audit.log_recovery(part, healed_name, True)
92
+
93
+ healed_path_parts.append(healed_name)
94
+ else:
95
+ # No definitive match
96
+ if self.audit:
97
+ self.audit.log_recovery(part, "Ambiguous/Not Found", False)
98
+
99
+ suggestion_names = [c["name"] for c in children]
100
+ logger.warning(f"Path break at '{part}'. Suggestions: {suggestion_names}")
101
+ return None
102
+
103
+ return current_parent
@@ -0,0 +1,29 @@
1
+ from mcp.server.fastmcp import Context, FastMCP
2
+ from .client import DriveClient
3
+
4
+ def register_resources(mcp: FastMCP, client: DriveClient):
5
+ """Registers resource handlers to the MCP server."""
6
+
7
+ @mcp.resource("gdrive://{file_id}/content")
8
+ def get_file_content(file_id: str) -> str:
9
+ """
10
+ Reads the content of a file from Google Drive.
11
+ Automatic export for Google Docs/Sheets/Slides (to PDF/Excel).
12
+ """
13
+ try:
14
+ content_bytes = client.download_file(file_id)
15
+
16
+ # Try to decode as text first
17
+ try:
18
+ return content_bytes.decode('utf-8')
19
+ except UnicodeDecodeError:
20
+ # If binary, return a representation or base64?
21
+ # MCP text resources expect string. For binary, we might need a different approach
22
+ # or just indicate it's binary.
23
+ # For now, let's return a clear message or a hex representation if small?
24
+ # A better pattern for binary files in MCP is usually not raw text resource
25
+ # unless using the 'blob' resource type (which FastMCP might wrap differently).
26
+ # FastMCP resource decorators typically return text.
27
+ return f"<Binary Content: {len(content_bytes)} bytes>"
28
+ except Exception as e:
29
+ return f"Error reading file {file_id}: {str(e)}"
@@ -0,0 +1,65 @@
1
+ import os
2
+ import yaml
3
+ import logging
4
+ from typing import List, Dict, Any, Optional
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ class SkillMetadata:
9
+ def __init__(self, name: str, description: str, folder_path: str):
10
+ self.name = name
11
+ self.description = description
12
+ self.folder_path = folder_path
13
+
14
+ class SkillLoader:
15
+ def __init__(self, skills_dir: str):
16
+ self.skills_dir = skills_dir
17
+
18
+ def discover_skills(self) -> List[SkillMetadata]:
19
+ """
20
+ Scans the skills directory for valid skill folders (containing SKILL.md).
21
+ """
22
+ skills = []
23
+ if not os.path.exists(self.skills_dir):
24
+ logger.warning(f"Skills directory not found: {self.skills_dir}")
25
+ return skills
26
+
27
+ for entry in os.scandir(self.skills_dir):
28
+ if entry.is_dir():
29
+ skill_md_path = os.path.join(entry.path, "SKILL.md")
30
+ if os.path.exists(skill_md_path):
31
+ try:
32
+ meta = self._parse_skill_md(skill_md_path, entry.path)
33
+ if meta:
34
+ skills.append(meta)
35
+ except Exception as e:
36
+ logger.error(f"Error parsing {skill_md_path}: {e}")
37
+
38
+ return skills
39
+
40
+ def _parse_skill_md(self, file_path: str, folder_path: str) -> Optional[SkillMetadata]:
41
+ """
42
+ Parses the YAML frontmatter from a SKILL.md file.
43
+ """
44
+ with open(file_path, "r") as f:
45
+ content = f.read()
46
+
47
+ if not content.startswith("---"):
48
+ return None
49
+
50
+ # Basic YAML frontmatter extraction
51
+ parts = content.split("---", 2)
52
+ if len(parts) < 3:
53
+ return None
54
+
55
+ try:
56
+ frontmatter = yaml.safe_load(parts[1])
57
+ name = frontmatter.get("name")
58
+ description = frontmatter.get("description")
59
+
60
+ if name and description:
61
+ return SkillMetadata(name, description, folder_path)
62
+ except Exception:
63
+ return None
64
+
65
+ return None
@@ -0,0 +1,324 @@
1
+ from typing import Optional, List
2
+ from mcp.server.fastmcp import Context, FastMCP
3
+ from .client import DriveClient
4
+ from .executor import ScriptExecutor
5
+ from .skill_loader import SkillLoader
6
+ from .audit import AuditLogger
7
+
8
+ def register_tools(mcp: FastMCP, client: DriveClient):
9
+ """Registers tool handlers to the MCP server."""
10
+
11
+ @mcp.tool()
12
+ def list_files(limit: int = 20) -> str:
13
+ """
14
+ List the most recent files in Google Drive.
15
+
16
+ Args:
17
+ limit: Number of files to return (default 20, max 100).
18
+ """
19
+ import json
20
+ files = client.list_files(limit=limit)
21
+ return json.dumps(files, indent=2)
22
+
23
+ @mcp.tool()
24
+ def search_files(query: str, limit: int = 20) -> str:
25
+ """
26
+ Search for files in Google Drive by name.
27
+
28
+ Args:
29
+ query: The search text (e.g. project name).
30
+ limit: Max results.
31
+ """
32
+ import json
33
+ files = client.search(query, limit=limit)
34
+ return json.dumps(files, indent=2)
35
+
36
+ @mcp.tool()
37
+ def list_folder(folder_id: str, limit: int = 50) -> str:
38
+ """
39
+ List all children (files and subfolders) of a specific folder.
40
+
41
+ Args:
42
+ folder_id: The ID of the folder to list. Use 'root' for top level.
43
+ limit: Limit results.
44
+ """
45
+ import json
46
+ files = client.list_folder_children(folder_id, limit=limit)
47
+ return json.dumps(files, indent=2)
48
+
49
+ @mcp.tool()
50
+ def get_file_metadata(file_id: str) -> str:
51
+ """
52
+ Get detailed metadata for a file.
53
+
54
+ Args:
55
+ file_id: The ID of the file.
56
+ """
57
+ import json
58
+ meta = client.get_file_metadata(file_id)
59
+ return json.dumps(meta, indent=2)
60
+
61
+ @mcp.tool()
62
+ def create_folder(name: str, parent_id: str = 'root') -> str:
63
+ """
64
+ Create a new folder.
65
+
66
+ Args:
67
+ name: Name of the new folder.
68
+ parent_id: ID of the parent folder (default 'root').
69
+ """
70
+ import json
71
+ res = client.create_folder(name, parent_id)
72
+ return json.dumps(res, indent=2)
73
+
74
+ @mcp.tool()
75
+ def upload_file(name: str, content: str, parent_id: str = 'root') -> str:
76
+ """
77
+ Upload a text file to Google Drive.
78
+
79
+ Args:
80
+ name: Name of the file.
81
+ content: Text content of the file.
82
+ parent_id: ID of the parent folder.
83
+ """
84
+ import json
85
+ res = client.upload_file(name, content, parent_id=parent_id)
86
+ return json.dumps(res, indent=2)
87
+
88
+ @mcp.tool()
89
+ def trash_file(file_id: str) -> str:
90
+ """
91
+ Move a file to the trash.
92
+
93
+ Args:
94
+ file_id: ID of the file to trash.
95
+ """
96
+ import json
97
+ res = client.trash_file(file_id)
98
+ return json.dumps(res, indent=2)
99
+
100
+ def register_intelligent_tools(mcp: FastMCP, client: DriveClient, executor: ScriptExecutor, loader: SkillLoader, audit: AuditLogger):
101
+ """Registers the 'Forge' and 'Autonomy' tools to the MCP server."""
102
+
103
+ @mcp.tool()
104
+ def create_skill(name: str, code: str, description: str) -> str:
105
+ """
106
+ Forges a new capability (Skill) by writing a Python script.
107
+
108
+ Args:
109
+ name: Technical name of the skill (e.g., 'archive_old_files'). No spaces.
110
+ code: The Python code for the script.
111
+ description: What this skill does (will be saved in SKILL.md).
112
+ """
113
+ import os
114
+ import re
115
+
116
+ # Sanitize name
117
+ safe_name = re.sub(r'[^a-zA-Z0-9_]', '_', name).lower()
118
+
119
+ skill_dir = os.path.join(loader.skills_dir, safe_name)
120
+ os.makedirs(skill_dir, exist_ok=True)
121
+
122
+ # Write script.py
123
+ with open(os.path.join(skill_dir, "script.py"), "w") as f:
124
+ f.write(code)
125
+
126
+ # Write SKILL.md
127
+ skill_md_content = f"""---
128
+ name: {safe_name}
129
+ description: {description}
130
+ ---
131
+
132
+ {description}
133
+ """
134
+ with open(os.path.join(skill_dir, "SKILL.md"), "w") as f:
135
+ f.write(skill_md_content)
136
+
137
+ # Log to Audit
138
+ audit.log_skill_creation(safe_name)
139
+
140
+ return f"Skill '{safe_name}' forged successfully in {skill_dir}"
141
+
142
+ @mcp.tool()
143
+ def list_skills() -> str:
144
+ """
145
+ Lists all available AI-forged skills in the library.
146
+ """
147
+ import json
148
+ skills = loader.discover_skills()
149
+ return json.dumps([{"name": s.name, "description": s.description} for s in skills], indent=2)
150
+
151
+ @mcp.tool()
152
+ def update_skill(name: str, code: str, description: Optional[str] = None) -> str:
153
+ """
154
+ Updates an existing skill with new code or description.
155
+
156
+ Args:
157
+ name: Technical name of the skill to update.
158
+ code: The new Python code.
159
+ description: Optional updated description.
160
+ """
161
+ import os
162
+ import re
163
+
164
+ safe_name = re.sub(r'[^a-zA-Z0-9_]', '_', name).lower()
165
+ skill_dir = os.path.join(loader.skills_dir, safe_name)
166
+
167
+ if not os.path.exists(skill_dir):
168
+ return f"Error: Skill '{safe_name}' does not exist. Use create_skill first."
169
+
170
+ # Update script.py
171
+ with open(os.path.join(skill_dir, "script.py"), "w") as f:
172
+ f.write(code)
173
+
174
+ # Update SKILL.md if description provided
175
+ if description:
176
+ skill_md_content = f"""---
177
+ name: {safe_name}
178
+ description: {description}
179
+ ---
180
+
181
+ {description}
182
+ """
183
+ with open(os.path.join(skill_dir, "SKILL.md"), "w") as f:
184
+ f.write(skill_md_content)
185
+
186
+ audit.log_event("SKILL_UPDATE", f"Capability updated: {safe_name}")
187
+ return f"Skill '{safe_name}' updated successfully."
188
+
189
+ @mcp.tool()
190
+ def run_skill(name: str, args: Optional[List[str]] = None) -> str:
191
+ """
192
+ Executes an AI-forged skill from the library.
193
+
194
+ Args:
195
+ name: The name of the skill to run.
196
+ args: Optional list of command-line arguments for the script.
197
+ """
198
+ import re
199
+ safe_name = re.sub(r'[^a-zA-Z0-9_]', '_', name).lower()
200
+ return executor.run_skill(safe_name, args)
201
+
202
+ @mcp.tool()
203
+ def resolve_path(path: str) -> str:
204
+ """
205
+ Intelligently resolves a human-readable path (e.g., '/Projects/2026') to a File ID.
206
+ Includes autonomous healing if the path is broken.
207
+
208
+ Args:
209
+ path: The full path to resolve.
210
+ """
211
+ file_id = client.find_and_heal_path(path)
212
+ if file_id:
213
+ return f"Resolved '{path}' to ID: {file_id}"
214
+ return f"Error: Could not resolve path '{path}'. Check logs for suggestions."
215
+
216
+
217
+ @mcp.tool()
218
+ def get_skill_guide() -> str:
219
+ """
220
+ Returns the detailed manual (SKILL.md) for Google Drive Forge.
221
+ Read this to understand how to use autonomous features, The Forge, and script templates.
222
+ """
223
+ import os
224
+
225
+ # Locate SKILL.md in the root of the MCP installation
226
+ # Helper: tools.py is in src/, so root is ../
227
+ base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
228
+ skill_md_path = os.path.join(base_dir, "SKILL.md")
229
+
230
+ if not os.path.exists(skill_md_path):
231
+ return "Error: SKILL.md not found in the MCP root directory."
232
+
233
+ with open(skill_md_path, "r", encoding="utf-8") as f:
234
+ return f.read()
235
+
236
+ @mcp.tool()
237
+ def download_to_local(file_id: str, local_path: str) -> str:
238
+ """
239
+ Downloads a file from Drive to the local filesystem.
240
+ Automatically converts Google Docs/Sheets to meaningful text/markdown formats.
241
+
242
+ Args:
243
+ file_id: The ID of the file to download.
244
+ local_path: Absolute path on the local machine to save the file.
245
+ """
246
+ import os
247
+ try:
248
+ # 1. Get Metadata to check type
249
+ meta = client.get_file_metadata(file_id)
250
+ name = meta.get('name')
251
+ mime_type = meta.get('mimeType')
252
+
253
+ # 2. Determine conversion (if needed)
254
+ content = None
255
+ final_path = local_path
256
+
257
+ # Normalize path: if directory, append filename
258
+ if os.path.isdir(local_path) or local_path.endswith(os.sep):
259
+ os.makedirs(local_path, exist_ok=True)
260
+ final_path = os.path.join(local_path, name)
261
+
262
+ # Ensure parent dir exists
263
+ os.makedirs(os.path.dirname(final_path), exist_ok=True)
264
+
265
+ if mime_type == 'application/vnd.google-apps.document':
266
+ # Export as text for Docs
267
+ content = client.download_file(file_id, export_mime_type='text/plain')
268
+ if not final_path.endswith(('.txt', '.md')):
269
+ final_path += '.md'
270
+ elif mime_type == 'application/vnd.google-apps.spreadsheet':
271
+ # Export as CSV for Sheets (easy to read) or Excel
272
+ content = client.download_file(file_id, export_mime_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
273
+ if not final_path.endswith('.xlsx'):
274
+ final_path += '.xlsx'
275
+ else:
276
+ # Binary / Default
277
+ content = client.download_file(file_id)
278
+
279
+ # 3. Write to Disk
280
+ with open(final_path, 'wb') as f:
281
+ if isinstance(content, str):
282
+ f.write(content.encode('utf-8'))
283
+ else:
284
+ f.write(content)
285
+
286
+ return f"Successfully downloaded '{name}' to '{final_path}'"
287
+
288
+ except Exception as e:
289
+ return f"Error downloading file: {str(e)}"
290
+
291
+ @mcp.tool()
292
+ def smart_read(path: str) -> str:
293
+ """
294
+ Resolves a path and reads its content in one step.
295
+ Autonomously handles path healing and MIME-type conversion.
296
+
297
+ Args:
298
+ path: Path to the file.
299
+ """
300
+ file_id = client.find_and_heal_path(path)
301
+ if not file_id:
302
+ return f"Error: Could not resolve path '{path}'"
303
+
304
+ try:
305
+ # Check if it's a Google Doc that needs text export
306
+ meta = client.get_file_metadata(file_id)
307
+ mime_type = meta.get('mimeType')
308
+
309
+ if mime_type == 'application/vnd.google-apps.document':
310
+ # Force text export for reading
311
+ content_bytes = client.download_file(file_id, export_mime_type='text/plain')
312
+ else:
313
+ content_bytes = client.download_file(file_id)
314
+
315
+ # Try to decode
316
+ if isinstance(content_bytes, str):
317
+ return content_bytes
318
+
319
+ try:
320
+ return content_bytes.decode('utf-8')
321
+ except UnicodeDecodeError:
322
+ return f"<Binary Content: {len(content_bytes)} bytes> (MIME: {mime_type})"
323
+ except Exception as e:
324
+ return f"Error reading file at '{path}': {str(e)}"
@@ -0,0 +1,136 @@
1
+ Metadata-Version: 2.4
2
+ Name: google-drive-forge
3
+ Version: 0.2.0
4
+ Summary: Autonomous Google Drive MCP with Skill Forge Capabilities
5
+ Project-URL: Homepage, https://github.com/traylinx/google_drive_forge
6
+ Project-URL: Repository, https://github.com/traylinx/google_drive_forge
7
+ Project-URL: Documentation, https://github.com/traylinx/google_drive_forge#readme
8
+ Author-email: Sebastian Velandia <sebastian@traylinx.com>
9
+ License-Expression: MIT
10
+ License-File: LICENSE
11
+ Keywords: agentic,ai,automation,google-drive,mcp,skill-forge
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
+ Requires-Python: >=3.10
21
+ Requires-Dist: google-api-python-client
22
+ Requires-Dist: google-auth
23
+ Requires-Dist: google-auth-httplib2
24
+ Requires-Dist: google-auth-oauthlib
25
+ Requires-Dist: mcp
26
+ Requires-Dist: pyyaml
27
+ Requires-Dist: tenacity
28
+ Description-Content-Type: text/markdown
29
+
30
+ <div align="center">
31
+ <img src="assets/banner.png" width="250" alt="Google Drive Forge Icon">
32
+ <h1>Google Drive Forge</h1>
33
+ </div>
34
+
35
+ [![MCP](https://img.shields.io/badge/MCP-Protocol-blue.svg)](https://modelcontextprotocol.io)
36
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
37
+ [![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
38
+
39
+ Transform your AI agent from a simple file explorer into an **Autonomous Power-User**. This isn't just a Google Drive connector; it's a cognitive layer for your cloud storage.
40
+
41
+ ---
42
+
43
+ ## 💎 The "Intelligent" Edge
44
+
45
+ Most MCP servers just list files. This one **reasons** about them.
46
+
47
+ | Feature | Description | Benefit |
48
+ | :------------------ | :-------------------------------------------------------- | :-------------------------------------------------------------- |
49
+ | **🔨 The Forge** | On-the-fly Python "Skill" creation and execution. | Solve complex tasks like "Archive all PDFs older than 30 days". |
50
+ | **🩹 Path Healing** | Fuzzy, human-like path resolution (e.g., `/Work/Drafts`). | No more hunting for obscure File IDs. |
51
+ | **🧠 Auto-Recovery** | Intercepts 404s and suggests correct paths/files. | Agent remains autonomous even when it makes mistakes. |
52
+ | **🔍 Smart Read** | Auto-detection of MIME types and content conversion. | Seamless interaction with Sheets, Docs, and PDFs. |
53
+ | **📜 Audit Log** | Transparent tracking of all autonomous decisions. | Complete visibility into what your agent is doing. |
54
+
55
+ ---
56
+
57
+ ## 🛠 Features Breakdown
58
+
59
+ ### 📂 Standard Operations
60
+ - **Discovery**: `list_files`, `search_files`, `list_folder`.
61
+ - **Management**: `create_folder`, `upload_file`, `trash_file`.
62
+ - **Metadata**: Deep metadata inspection for any file object.
63
+
64
+ ### ⚡ The Forge (Agent Skills)
65
+ Empower your agent to expand its own capabilities. Using the [Agent Skills](https://agentskills.io) standard, the agent can:
66
+ - **`create_skill`**: Write Python logic to perform multi-step Drive operations.
67
+ - **`list_skills`**: Browse its library of forged capabilities.
68
+ - **`run_skill`**: Execute its custom logic with full API access.
69
+
70
+ ### 🧭 Autonomous Navigation
71
+ - **`resolve_path`**: Converts `/Project/2026/Budget.xlsx` into a working ID, healing broken paths automatically.
72
+ - **`smart_read`**: A high-level tool that handles resolution, downloading, and decoding in one step.
73
+
74
+ ---
75
+
76
+ ## � Getting Started
77
+
78
+ ### 1. Requirements
79
+ - Python 3.10 or higher.
80
+ - A Google Cloud Project with the **Drive API** enabled.
81
+
82
+ ### 2. Deep Dive Into Setup
83
+ For a step-by-step walkthrough on generating your `credentials.json` and `token.json`, please refer to our:
84
+ 👉 **[Comprehensive Setup Guide](docs/setup.md)**
85
+
86
+ ### 3. Quick Configuration
87
+ Add this to your MCP host configuration (e.g., Antigravity):
88
+
89
+ ```json
90
+ "google-drive": {
91
+ "command": "python",
92
+ "args": ["/path/to/server.py"],
93
+ "env": {
94
+ "GOOGLE_DRIVE_CREDENTIALS": "/path/to/credentials.json",
95
+ "GOOGLE_DRIVE_HEADLESS_AUTH": "false",
96
+ "GOOGLE_DRIVE_PYTHON_PATH": "/usr/bin/python3"
97
+ }
98
+ }
99
+ ```
100
+
101
+ #### ⚙️ Environment Variables
102
+ | Variable | Description | Default |
103
+ | :--------------------------- | :------------------------------------------------ | :------------------- |
104
+ | `GOOGLE_DRIVE_CREDENTIALS` | Path to your `credentials.json`. | `./credentials.json` |
105
+ | `GOOGLE_DRIVE_HEADLESS_AUTH` | Set to `true` for console-based OAuth on servers. | `false` |
106
+ | `GOOGLE_DRIVE_PYTHON_PATH` | Path to a specific Python executable or venv. | `sys.executable` |
107
+ | `GOOGLE_DRIVE_SKILLS_DIR` | Where to store forged AI Skills. | `./skills` |
108
+
109
+ ---
110
+
111
+ ## 🛡️ Security Implications
112
+
113
+ **Please Read Carefully:**
114
+ This MCP server implements **The Forge**, which allows AI agents to write and execute arbitrary Python code on your machine.
115
+ * **Local Use Only**: This tool is designed for local power-users.
116
+ * **Sandbox**: It does not currently implement a sandbox for executed scripts. Only use it with agents you trust to handle your local filesystem.
117
+
118
+ ---
119
+
120
+ ## Contributing & Extension
121
+
122
+ We believe in open autonomy. If you have ideas for new core skills or improvements to the path-healing engine:
123
+ 1. Check the [Contributing Guide](CONTRIBUTING.md).
124
+ 2. Explore the `skills/` directory for examples of forged logic.
125
+ 3. Submit a PR!
126
+
127
+ ---
128
+
129
+ ## ⚖️ License
130
+
131
+ Distributed under the **MIT License**. See `LICENSE` for more information.
132
+
133
+ ---
134
+ <p align="center">
135
+ Built with ❤️ for the next generation of AI Agents.
136
+ </p>
@@ -0,0 +1,15 @@
1
+ google_drive_forge/__init__.py,sha256=z_vaI-6hkbxOK0JGD31H2n2i_yIkQ3cAFA3xF3I-xLI,321
2
+ google_drive_forge/__main__.py,sha256=F8Ucw5KoaMRuG6Elo7LIMr1xJsSh0h0FmgeX637jSLk,1859
3
+ google_drive_forge/audit.py,sha256=k5tpSwUEzgITMz0oPmT2rP76a0VHLexkSRx3ZZ0nWgw,1304
4
+ google_drive_forge/auth.py,sha256=oIVQVn148z33loEt5Irts2fMUOqqvczy_aJMReNNkrs,2771
5
+ google_drive_forge/client.py,sha256=Mnf45E3WB6z8tLEx3jGTyzmFlJr11L-z-AFqbfelHiI,5809
6
+ google_drive_forge/executor.py,sha256=kwUrRKVqqrqGHzWpHaCSWCP5bDnigo9qtAwrfA7Md2Y,2861
7
+ google_drive_forge/intelligent_client.py,sha256=I5k7mlmRHByhAlNksdkeTSGpXRA98IMaCxMmFw8EEmg,4228
8
+ google_drive_forge/resources.py,sha256=6UEeFY8D0mPaQ8aBeLIB1H7lo7689i9uX9_YhkHWG6Y,1402
9
+ google_drive_forge/skill_loader.py,sha256=LMboX7L-TIamVv0Ynhtjc4TnxTEyWhY-Xsf646cUnK4,2114
10
+ google_drive_forge/tools.py,sha256=UWW1rPN4itpj5CHWYZScGXPDqekd2vNpr4_o-ElKJEc,11230
11
+ google_drive_forge-0.2.0.dist-info/METADATA,sha256=Zp8jrtFp6wwhbCkx98mpFU_zvq6_Wx9iSwC-b94vbbU,6087
12
+ google_drive_forge-0.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
13
+ google_drive_forge-0.2.0.dist-info/entry_points.txt,sha256=5WcFKx8JkaDAXsvd0EUThiUmSgYDvwoI_pNVoMqPNoI,72
14
+ google_drive_forge-0.2.0.dist-info/licenses/LICENSE,sha256=uq6rz2JUfUAfzLRl8jdFrOsB-cyVv0LNRtbsn0ikPac,1088
15
+ google_drive_forge-0.2.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.28.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ google-drive-forge = google_drive_forge.__main__:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Google Drive Forge Contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.