workspace-mcp 1.1.7__py3-none-any.whl → 1.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- auth/google_auth.py +1 -1
- auth/oauth21/__init__.py +108 -0
- auth/oauth21/compat.py +422 -0
- auth/oauth21/config.py +380 -0
- auth/oauth21/discovery.py +232 -0
- auth/oauth21/example_config.py +303 -0
- auth/oauth21/handler.py +440 -0
- auth/oauth21/http.py +270 -0
- auth/oauth21/jwt.py +438 -0
- auth/oauth21/middleware.py +426 -0
- auth/oauth21/oauth2.py +353 -0
- auth/oauth21/sessions.py +519 -0
- auth/oauth21/tokens.py +392 -0
- auth/oauth_callback_server.py +1 -1
- auth/service_decorator.py +2 -5
- core/comments.py +0 -3
- core/server.py +35 -36
- core/utils.py +3 -4
- gcalendar/calendar_tools.py +4 -5
- gchat/chat_tools.py +0 -1
- gdocs/docs_tools.py +73 -16
- gdrive/drive_tools.py +1 -3
- gforms/forms_tools.py +0 -1
- gmail/gmail_tools.py +184 -70
- gsheets/sheets_tools.py +0 -2
- gslides/slides_tools.py +1 -3
- gtasks/tasks_tools.py +1 -2
- main.py +2 -2
- {workspace_mcp-1.1.7.dist-info → workspace_mcp-1.1.9.dist-info}/METADATA +3 -2
- workspace_mcp-1.1.9.dist-info/RECORD +48 -0
- workspace_mcp-1.1.7.dist-info/RECORD +0 -36
- {workspace_mcp-1.1.7.dist-info → workspace_mcp-1.1.9.dist-info}/WHEEL +0 -0
- {workspace_mcp-1.1.7.dist-info → workspace_mcp-1.1.9.dist-info}/entry_points.txt +0 -0
- {workspace_mcp-1.1.7.dist-info → workspace_mcp-1.1.9.dist-info}/licenses/LICENSE +0 -0
- {workspace_mcp-1.1.7.dist-info → workspace_mcp-1.1.9.dist-info}/top_level.txt +0 -0
auth/oauth21/tokens.py
ADDED
@@ -0,0 +1,392 @@
|
|
1
|
+
"""
|
2
|
+
Token Validator
|
3
|
+
|
4
|
+
Validates and parses Bearer tokens, supporting both JWT and opaque token formats.
|
5
|
+
Implements token introspection per RFC7662 for opaque tokens.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from typing import Dict, Any, Optional, List
|
10
|
+
from datetime import datetime, timezone
|
11
|
+
|
12
|
+
import aiohttp
|
13
|
+
import jwt
|
14
|
+
from cachetools import TTLCache
|
15
|
+
|
16
|
+
from .discovery import AuthorizationServerDiscovery
|
17
|
+
|
18
|
+
logger = logging.getLogger(__name__)
|
19
|
+
|
20
|
+
|
21
|
+
class TokenValidationError(Exception):
|
22
|
+
"""Exception raised when token validation fails."""
|
23
|
+
|
24
|
+
def __init__(self, message: str, error_code: str = "invalid_token"):
|
25
|
+
super().__init__(message)
|
26
|
+
self.error_code = error_code
|
27
|
+
|
28
|
+
|
29
|
+
class TokenValidator:
|
30
|
+
"""Validates and parses Bearer tokens."""
|
31
|
+
|
32
|
+
def __init__(
|
33
|
+
self,
|
34
|
+
discovery_service: Optional[AuthorizationServerDiscovery] = None,
|
35
|
+
cache_ttl: int = 300, # 5 minutes
|
36
|
+
max_cache_size: int = 1000,
|
37
|
+
):
|
38
|
+
"""
|
39
|
+
Initialize the token validator.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
discovery_service: Authorization server discovery service
|
43
|
+
cache_ttl: Token validation cache TTL in seconds
|
44
|
+
max_cache_size: Maximum number of cached validations
|
45
|
+
"""
|
46
|
+
self.discovery = discovery_service or AuthorizationServerDiscovery()
|
47
|
+
self.validation_cache = TTLCache(maxsize=max_cache_size, ttl=cache_ttl)
|
48
|
+
self.jwks_cache = TTLCache(maxsize=10, ttl=3600) # 1 hour for JWKS
|
49
|
+
self._session: Optional[aiohttp.ClientSession] = None
|
50
|
+
|
51
|
+
async def _get_session(self) -> aiohttp.ClientSession:
|
52
|
+
"""Get or create HTTP session."""
|
53
|
+
if self._session is None or self._session.closed:
|
54
|
+
self._session = aiohttp.ClientSession(
|
55
|
+
timeout=aiohttp.ClientTimeout(total=30),
|
56
|
+
headers={"User-Agent": "MCP-OAuth2.1-Client/1.0"},
|
57
|
+
)
|
58
|
+
return self._session
|
59
|
+
|
60
|
+
async def close(self):
|
61
|
+
"""Clean up resources."""
|
62
|
+
if self._session and not self._session.closed:
|
63
|
+
await self._session.close()
|
64
|
+
await self.discovery.close()
|
65
|
+
|
66
|
+
def _is_jwt_format(self, token: str) -> bool:
|
67
|
+
"""
|
68
|
+
Check if token appears to be in JWT format.
|
69
|
+
|
70
|
+
Args:
|
71
|
+
token: Token to check
|
72
|
+
|
73
|
+
Returns:
|
74
|
+
True if token appears to be JWT
|
75
|
+
"""
|
76
|
+
# JWT has 3 parts separated by dots
|
77
|
+
parts = token.split('.')
|
78
|
+
return len(parts) == 3
|
79
|
+
|
80
|
+
async def validate_token(
|
81
|
+
self,
|
82
|
+
token: str,
|
83
|
+
expected_audience: Optional[str] = None,
|
84
|
+
required_scopes: Optional[List[str]] = None,
|
85
|
+
authorization_server_url: Optional[str] = None,
|
86
|
+
) -> Dict[str, Any]:
|
87
|
+
"""
|
88
|
+
Validate token and extract claims.
|
89
|
+
|
90
|
+
Args:
|
91
|
+
token: Bearer token to validate
|
92
|
+
expected_audience: Expected audience claim
|
93
|
+
required_scopes: Required scopes
|
94
|
+
authorization_server_url: Authorization server URL for introspection
|
95
|
+
|
96
|
+
Returns:
|
97
|
+
Dictionary containing validated token information
|
98
|
+
|
99
|
+
Raises:
|
100
|
+
TokenValidationError: If token validation fails
|
101
|
+
"""
|
102
|
+
# Check cache first
|
103
|
+
cache_key = f"token:{hash(token)}:{expected_audience}:{','.join(required_scopes or [])}"
|
104
|
+
if cache_key in self.validation_cache:
|
105
|
+
logger.debug("Using cached token validation result")
|
106
|
+
return self.validation_cache[cache_key]
|
107
|
+
|
108
|
+
try:
|
109
|
+
if self._is_jwt_format(token):
|
110
|
+
result = await self._validate_jwt_token(token, expected_audience, required_scopes)
|
111
|
+
else:
|
112
|
+
result = await self._validate_opaque_token(
|
113
|
+
token, expected_audience, required_scopes, authorization_server_url
|
114
|
+
)
|
115
|
+
|
116
|
+
# Cache successful validation
|
117
|
+
self.validation_cache[cache_key] = result
|
118
|
+
return result
|
119
|
+
|
120
|
+
except Exception as e:
|
121
|
+
if isinstance(e, TokenValidationError):
|
122
|
+
raise
|
123
|
+
else:
|
124
|
+
logger.error(f"Unexpected error validating token: {e}")
|
125
|
+
raise TokenValidationError(f"Token validation failed: {str(e)}")
|
126
|
+
|
127
|
+
async def _validate_jwt_token(
|
128
|
+
self,
|
129
|
+
token: str,
|
130
|
+
expected_audience: Optional[str] = None,
|
131
|
+
required_scopes: Optional[List[str]] = None,
|
132
|
+
) -> Dict[str, Any]:
|
133
|
+
"""Validate JWT token."""
|
134
|
+
try:
|
135
|
+
# First decode without verification to get issuer and key ID
|
136
|
+
unverified_payload = jwt.decode(token, options={"verify_signature": False})
|
137
|
+
issuer = unverified_payload.get("iss")
|
138
|
+
|
139
|
+
if not issuer:
|
140
|
+
raise TokenValidationError("JWT missing issuer claim")
|
141
|
+
|
142
|
+
# Get JWKS for signature verification
|
143
|
+
jwks = await self._fetch_jwks(issuer)
|
144
|
+
|
145
|
+
# Decode and verify the JWT
|
146
|
+
payload = jwt.decode(
|
147
|
+
token,
|
148
|
+
key=jwks,
|
149
|
+
algorithms=["RS256", "ES256"],
|
150
|
+
audience=expected_audience,
|
151
|
+
issuer=issuer,
|
152
|
+
options={
|
153
|
+
"verify_exp": True,
|
154
|
+
"verify_aud": expected_audience is not None,
|
155
|
+
"verify_iss": True,
|
156
|
+
}
|
157
|
+
)
|
158
|
+
|
159
|
+
# Extract user identity
|
160
|
+
user_identity = self.extract_user_identity(payload)
|
161
|
+
|
162
|
+
# Validate scopes if required
|
163
|
+
if required_scopes:
|
164
|
+
token_scopes = self._extract_scopes_from_jwt(payload)
|
165
|
+
if not self._validate_scopes(token_scopes, required_scopes):
|
166
|
+
raise TokenValidationError(
|
167
|
+
f"Insufficient scope. Required: {required_scopes}, Got: {token_scopes}",
|
168
|
+
error_code="insufficient_scope"
|
169
|
+
)
|
170
|
+
|
171
|
+
return {
|
172
|
+
"valid": True,
|
173
|
+
"token_type": "jwt",
|
174
|
+
"user_identity": user_identity,
|
175
|
+
"scopes": self._extract_scopes_from_jwt(payload),
|
176
|
+
"expires_at": payload.get("exp"),
|
177
|
+
"issuer": issuer,
|
178
|
+
"audience": payload.get("aud"),
|
179
|
+
"claims": payload,
|
180
|
+
}
|
181
|
+
|
182
|
+
except jwt.ExpiredSignatureError:
|
183
|
+
raise TokenValidationError("JWT token has expired", error_code="invalid_token")
|
184
|
+
except jwt.InvalidAudienceError:
|
185
|
+
raise TokenValidationError("JWT audience mismatch", error_code="invalid_token")
|
186
|
+
except jwt.InvalidIssuerError:
|
187
|
+
raise TokenValidationError("JWT issuer invalid", error_code="invalid_token")
|
188
|
+
except jwt.InvalidSignatureError:
|
189
|
+
raise TokenValidationError("JWT signature verification failed", error_code="invalid_token")
|
190
|
+
except jwt.InvalidTokenError as e:
|
191
|
+
raise TokenValidationError(f"Invalid JWT token: {str(e)}", error_code="invalid_token")
|
192
|
+
|
193
|
+
async def _validate_opaque_token(
|
194
|
+
self,
|
195
|
+
token: str,
|
196
|
+
expected_audience: Optional[str] = None,
|
197
|
+
required_scopes: Optional[List[str]] = None,
|
198
|
+
authorization_server_url: Optional[str] = None,
|
199
|
+
) -> Dict[str, Any]:
|
200
|
+
"""Validate opaque token via introspection."""
|
201
|
+
if not authorization_server_url:
|
202
|
+
# Try to determine from discovery
|
203
|
+
servers = await self.discovery.discover_authorization_servers()
|
204
|
+
if servers:
|
205
|
+
authorization_server_url = servers[0].get("issuer")
|
206
|
+
|
207
|
+
if not authorization_server_url:
|
208
|
+
raise TokenValidationError("No authorization server URL for token introspection")
|
209
|
+
|
210
|
+
introspection_result = await self.introspect_opaque_token(token, authorization_server_url)
|
211
|
+
|
212
|
+
if not introspection_result.get("active", False):
|
213
|
+
raise TokenValidationError("Token is not active", error_code="invalid_token")
|
214
|
+
|
215
|
+
# Validate audience if provided
|
216
|
+
if expected_audience:
|
217
|
+
token_audience = introspection_result.get("aud")
|
218
|
+
if token_audience and token_audience != expected_audience:
|
219
|
+
raise TokenValidationError("Token audience mismatch", error_code="invalid_token")
|
220
|
+
|
221
|
+
# Validate scopes if required
|
222
|
+
if required_scopes:
|
223
|
+
token_scopes = introspection_result.get("scope", "").split()
|
224
|
+
if not self._validate_scopes(token_scopes, required_scopes):
|
225
|
+
raise TokenValidationError(
|
226
|
+
f"Insufficient scope. Required: {required_scopes}, Got: {token_scopes}",
|
227
|
+
error_code="insufficient_scope"
|
228
|
+
)
|
229
|
+
|
230
|
+
# Extract user identity
|
231
|
+
user_identity = self.extract_user_identity(introspection_result)
|
232
|
+
|
233
|
+
return {
|
234
|
+
"valid": True,
|
235
|
+
"token_type": "opaque",
|
236
|
+
"user_identity": user_identity,
|
237
|
+
"scopes": token_scopes if required_scopes else introspection_result.get("scope", "").split(),
|
238
|
+
"expires_at": introspection_result.get("exp"),
|
239
|
+
"issuer": introspection_result.get("iss"),
|
240
|
+
"audience": introspection_result.get("aud"),
|
241
|
+
"claims": introspection_result,
|
242
|
+
}
|
243
|
+
|
244
|
+
async def introspect_opaque_token(
|
245
|
+
self,
|
246
|
+
token: str,
|
247
|
+
authorization_server_url: str,
|
248
|
+
) -> Dict[str, Any]:
|
249
|
+
"""
|
250
|
+
Query authorization server for opaque token details per RFC7662.
|
251
|
+
|
252
|
+
Args:
|
253
|
+
token: Opaque token to introspect
|
254
|
+
authorization_server_url: Authorization server URL
|
255
|
+
|
256
|
+
Returns:
|
257
|
+
Token introspection response
|
258
|
+
|
259
|
+
Raises:
|
260
|
+
TokenValidationError: If introspection fails
|
261
|
+
"""
|
262
|
+
# Get authorization server metadata
|
263
|
+
as_metadata = await self.discovery.get_authorization_server_metadata(authorization_server_url)
|
264
|
+
introspection_endpoint = as_metadata.get("introspection_endpoint")
|
265
|
+
|
266
|
+
if not introspection_endpoint:
|
267
|
+
raise TokenValidationError("Authorization server does not support token introspection")
|
268
|
+
|
269
|
+
session = await self._get_session()
|
270
|
+
|
271
|
+
try:
|
272
|
+
# Prepare introspection request
|
273
|
+
data = {"token": token}
|
274
|
+
headers = {
|
275
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
276
|
+
"Accept": "application/json",
|
277
|
+
}
|
278
|
+
|
279
|
+
async with session.post(introspection_endpoint, data=data, headers=headers) as response:
|
280
|
+
if response.status != 200:
|
281
|
+
raise TokenValidationError(f"Token introspection failed: {response.status}")
|
282
|
+
|
283
|
+
result = await response.json()
|
284
|
+
logger.debug("Token introspection completed")
|
285
|
+
return result
|
286
|
+
|
287
|
+
except aiohttp.ClientError as e:
|
288
|
+
raise TokenValidationError(f"Failed to introspect token: {str(e)}")
|
289
|
+
|
290
|
+
def extract_user_identity(self, token_payload: Dict[str, Any]) -> str:
|
291
|
+
"""
|
292
|
+
Extract user email/identity from validated token.
|
293
|
+
|
294
|
+
Args:
|
295
|
+
token_payload: Validated token payload/claims
|
296
|
+
|
297
|
+
Returns:
|
298
|
+
User email or identifier
|
299
|
+
|
300
|
+
Raises:
|
301
|
+
TokenValidationError: If no user identity found
|
302
|
+
"""
|
303
|
+
# Try different claim names for user identity
|
304
|
+
identity_claims = ["email", "sub", "preferred_username", "upn", "unique_name"]
|
305
|
+
|
306
|
+
for claim in identity_claims:
|
307
|
+
value = token_payload.get(claim)
|
308
|
+
if value:
|
309
|
+
# Prefer email-like identities
|
310
|
+
if "@" in str(value):
|
311
|
+
return str(value)
|
312
|
+
elif claim == "email": # Email claim should be email
|
313
|
+
return str(value)
|
314
|
+
|
315
|
+
# Fallback to first available identity claim
|
316
|
+
for claim in identity_claims:
|
317
|
+
value = token_payload.get(claim)
|
318
|
+
if value:
|
319
|
+
return str(value)
|
320
|
+
|
321
|
+
raise TokenValidationError("No user identity found in token", error_code="invalid_token")
|
322
|
+
|
323
|
+
async def _fetch_jwks(self, issuer: str) -> Dict[str, Any]:
|
324
|
+
"""Fetch and cache JWKS from authorization server."""
|
325
|
+
cache_key = f"jwks:{issuer}"
|
326
|
+
if cache_key in self.jwks_cache:
|
327
|
+
return self.jwks_cache[cache_key]
|
328
|
+
|
329
|
+
# Get JWKS URI from metadata
|
330
|
+
as_metadata = await self.discovery.get_authorization_server_metadata(issuer)
|
331
|
+
jwks_uri = as_metadata.get("jwks_uri")
|
332
|
+
|
333
|
+
if not jwks_uri:
|
334
|
+
raise TokenValidationError(f"No JWKS URI found for issuer {issuer}")
|
335
|
+
|
336
|
+
session = await self._get_session()
|
337
|
+
|
338
|
+
try:
|
339
|
+
async with session.get(jwks_uri) as response:
|
340
|
+
if response.status != 200:
|
341
|
+
raise TokenValidationError(f"Failed to fetch JWKS: {response.status}")
|
342
|
+
|
343
|
+
jwks = await response.json()
|
344
|
+
self.jwks_cache[cache_key] = jwks
|
345
|
+
logger.debug(f"Fetched JWKS from {jwks_uri}")
|
346
|
+
return jwks
|
347
|
+
|
348
|
+
except aiohttp.ClientError as e:
|
349
|
+
raise TokenValidationError(f"Failed to fetch JWKS: {str(e)}")
|
350
|
+
|
351
|
+
def _extract_scopes_from_jwt(self, payload: Dict[str, Any]) -> List[str]:
|
352
|
+
"""Extract scopes from JWT payload."""
|
353
|
+
# Try different scope claim formats
|
354
|
+
scope_claim = payload.get("scope") or payload.get("scp")
|
355
|
+
|
356
|
+
if isinstance(scope_claim, str):
|
357
|
+
return scope_claim.split()
|
358
|
+
elif isinstance(scope_claim, list):
|
359
|
+
return scope_claim
|
360
|
+
else:
|
361
|
+
return []
|
362
|
+
|
363
|
+
def _validate_scopes(self, token_scopes: List[str], required_scopes: List[str]) -> bool:
|
364
|
+
"""Check if token has all required scopes."""
|
365
|
+
token_scope_set = set(token_scopes)
|
366
|
+
required_scope_set = set(required_scopes)
|
367
|
+
return required_scope_set.issubset(token_scope_set)
|
368
|
+
|
369
|
+
def is_token_expired(self, token_info: Dict[str, Any]) -> bool:
|
370
|
+
"""
|
371
|
+
Check if token is expired.
|
372
|
+
|
373
|
+
Args:
|
374
|
+
token_info: Token information from validation
|
375
|
+
|
376
|
+
Returns:
|
377
|
+
True if token is expired
|
378
|
+
"""
|
379
|
+
exp = token_info.get("expires_at")
|
380
|
+
if not exp:
|
381
|
+
return False # No expiration info
|
382
|
+
|
383
|
+
try:
|
384
|
+
if isinstance(exp, (int, float)):
|
385
|
+
exp_time = datetime.fromtimestamp(exp, tz=timezone.utc)
|
386
|
+
else:
|
387
|
+
exp_time = datetime.fromisoformat(str(exp))
|
388
|
+
|
389
|
+
return datetime.now(timezone.utc) >= exp_time
|
390
|
+
except (ValueError, TypeError):
|
391
|
+
logger.warning(f"Invalid expiration time format: {exp}")
|
392
|
+
return False
|
auth/oauth_callback_server.py
CHANGED
@@ -172,7 +172,7 @@ class MinimalOAuthServer:
|
|
172
172
|
self.server_thread.join(timeout=3.0)
|
173
173
|
|
174
174
|
self.is_running = False
|
175
|
-
logger.info(
|
175
|
+
logger.info("Minimal OAuth server stopped")
|
176
176
|
|
177
177
|
except Exception as e:
|
178
178
|
logger.error(f"Error stopping minimal OAuth server: {e}", exc_info=True)
|
auth/service_decorator.py
CHANGED
@@ -6,10 +6,6 @@ from datetime import datetime, timedelta
|
|
6
6
|
|
7
7
|
from google.auth.exceptions import RefreshError
|
8
8
|
from auth.google_auth import get_authenticated_google_service, GoogleAuthenticationError
|
9
|
-
|
10
|
-
logger = logging.getLogger(__name__)
|
11
|
-
|
12
|
-
# Import scope constants
|
13
9
|
from auth.scopes import (
|
14
10
|
GMAIL_READONLY_SCOPE, GMAIL_SEND_SCOPE, GMAIL_COMPOSE_SCOPE, GMAIL_MODIFY_SCOPE, GMAIL_LABELS_SCOPE,
|
15
11
|
DRIVE_READONLY_SCOPE, DRIVE_FILE_SCOPE,
|
@@ -22,6 +18,8 @@ from auth.scopes import (
|
|
22
18
|
TASKS_SCOPE, TASKS_READONLY_SCOPE
|
23
19
|
)
|
24
20
|
|
21
|
+
logger = logging.getLogger(__name__)
|
22
|
+
|
25
23
|
# Service configuration mapping
|
26
24
|
SERVICE_CONFIGS = {
|
27
25
|
"gmail": {"service": "gmail", "version": "v1"},
|
@@ -391,7 +389,6 @@ def clear_service_cache(user_email: Optional[str] = None) -> int:
|
|
391
389
|
|
392
390
|
def get_cache_stats() -> Dict[str, Any]:
|
393
391
|
"""Get service cache statistics."""
|
394
|
-
now = datetime.now()
|
395
392
|
valid_entries = 0
|
396
393
|
expired_entries = 0
|
397
394
|
|
core/comments.py
CHANGED
@@ -7,10 +7,7 @@ All Google Workspace apps (Docs, Sheets, Slides) use the Drive API for comment o
|
|
7
7
|
|
8
8
|
import logging
|
9
9
|
import asyncio
|
10
|
-
from typing import Dict, Any
|
11
10
|
|
12
|
-
from mcp import types
|
13
|
-
from googleapiclient.errors import HttpError
|
14
11
|
|
15
12
|
from auth.service_decorator import require_google_service
|
16
13
|
from core.server import server
|
core/server.py
CHANGED
@@ -6,7 +6,6 @@ from importlib import metadata
|
|
6
6
|
from fastapi import Header
|
7
7
|
from fastapi.responses import HTMLResponse
|
8
8
|
|
9
|
-
from mcp import types
|
10
9
|
|
11
10
|
from mcp.server.fastmcp import FastMCP
|
12
11
|
from starlette.requests import Request
|
@@ -18,41 +17,41 @@ from auth.oauth_responses import create_error_response, create_success_response,
|
|
18
17
|
# Import shared configuration
|
19
18
|
from auth.scopes import (
|
20
19
|
OAUTH_STATE_TO_SESSION_ID_MAP,
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
20
|
+
SCOPES,
|
21
|
+
USERINFO_EMAIL_SCOPE, # noqa: F401
|
22
|
+
OPENID_SCOPE, # noqa: F401
|
23
|
+
CALENDAR_READONLY_SCOPE, # noqa: F401
|
24
|
+
CALENDAR_EVENTS_SCOPE, # noqa: F401
|
25
|
+
DRIVE_READONLY_SCOPE, # noqa: F401
|
26
|
+
DRIVE_FILE_SCOPE, # noqa: F401
|
27
|
+
GMAIL_READONLY_SCOPE, # noqa: F401
|
28
|
+
GMAIL_SEND_SCOPE, # noqa: F401
|
29
|
+
GMAIL_COMPOSE_SCOPE, # noqa: F401
|
30
|
+
GMAIL_MODIFY_SCOPE, # noqa: F401
|
31
|
+
GMAIL_LABELS_SCOPE, # noqa: F401
|
32
|
+
BASE_SCOPES, # noqa: F401
|
33
|
+
CALENDAR_SCOPES, # noqa: F401
|
34
|
+
DRIVE_SCOPES, # noqa: F401
|
35
|
+
GMAIL_SCOPES, # noqa: F401
|
36
|
+
DOCS_READONLY_SCOPE, # noqa: F401
|
37
|
+
DOCS_WRITE_SCOPE, # noqa: F401
|
38
|
+
CHAT_READONLY_SCOPE, # noqa: F401
|
39
|
+
CHAT_WRITE_SCOPE, # noqa: F401
|
40
|
+
CHAT_SPACES_SCOPE, # noqa: F401
|
41
|
+
CHAT_SCOPES, # noqa: F401
|
42
|
+
SHEETS_READONLY_SCOPE, # noqa: F401
|
43
|
+
SHEETS_WRITE_SCOPE, # noqa: F401
|
44
|
+
SHEETS_SCOPES, # noqa: F401
|
45
|
+
FORMS_BODY_SCOPE, # noqa: F401
|
46
|
+
FORMS_BODY_READONLY_SCOPE, # noqa: F401
|
47
|
+
FORMS_RESPONSES_READONLY_SCOPE, # noqa: F401
|
48
|
+
FORMS_SCOPES, # noqa: F401
|
49
|
+
SLIDES_SCOPE, # noqa: F401
|
50
|
+
SLIDES_READONLY_SCOPE, # noqa: F401
|
51
|
+
SLIDES_SCOPES, # noqa: F401
|
52
|
+
TASKS_SCOPE, # noqa: F401
|
53
|
+
TASKS_READONLY_SCOPE, # noqa: F401
|
54
|
+
TASKS_SCOPES, # noqa: F401
|
56
55
|
)
|
57
56
|
|
58
57
|
# Configure logging
|
core/utils.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
1
|
import io
|
2
2
|
import logging
|
3
3
|
import os
|
4
|
-
import
|
5
|
-
import
|
4
|
+
import zipfile
|
5
|
+
import xml.etree.ElementTree as ET
|
6
6
|
import ssl
|
7
|
-
import time
|
8
7
|
import asyncio
|
9
8
|
import functools
|
10
9
|
|
@@ -70,7 +69,7 @@ def check_credentials_directory_permissions(credentials_dir: str = None) -> None
|
|
70
69
|
try:
|
71
70
|
if os.path.exists(credentials_dir):
|
72
71
|
os.rmdir(credentials_dir)
|
73
|
-
except:
|
72
|
+
except (PermissionError, OSError):
|
74
73
|
pass
|
75
74
|
raise PermissionError(
|
76
75
|
f"Cannot create or write to credentials directory '{os.path.abspath(credentials_dir)}': {e}"
|
gcalendar/calendar_tools.py
CHANGED
@@ -10,7 +10,6 @@ import asyncio
|
|
10
10
|
import re
|
11
11
|
from typing import List, Optional, Dict, Any
|
12
12
|
|
13
|
-
from mcp import types
|
14
13
|
from googleapiclient.errors import HttpError
|
15
14
|
from googleapiclient.discovery import build
|
16
15
|
|
@@ -300,7 +299,7 @@ async def create_event(
|
|
300
299
|
title = filename
|
301
300
|
logger.info(f"[create_event] Using filename '{filename}' as attachment title")
|
302
301
|
else:
|
303
|
-
logger.info(
|
302
|
+
logger.info("[create_event] No filename found, using generic title")
|
304
303
|
except Exception as e:
|
305
304
|
logger.warning(f"Could not fetch metadata for file {file_id}: {e}")
|
306
305
|
event_body["attachments"].append({
|
@@ -397,7 +396,7 @@ async def modify_event(
|
|
397
396
|
# might handle this more robustly or require start/end with timezone.
|
398
397
|
# For now, we'll log a warning and skip applying timezone if start/end are missing.
|
399
398
|
logger.warning(
|
400
|
-
|
399
|
+
"[modify_event] Timezone provided but start_time and end_time are missing. Timezone will not be applied unless start/end times are also provided."
|
401
400
|
)
|
402
401
|
|
403
402
|
if not event_body:
|
@@ -416,7 +415,7 @@ async def modify_event(
|
|
416
415
|
lambda: service.events().get(calendarId=calendar_id, eventId=event_id).execute()
|
417
416
|
)
|
418
417
|
logger.info(
|
419
|
-
|
418
|
+
"[modify_event] Successfully verified event exists before update"
|
420
419
|
)
|
421
420
|
except HttpError as get_error:
|
422
421
|
if get_error.resp.status == 404:
|
@@ -475,7 +474,7 @@ async def delete_event(service, user_google_email: str, event_id: str, calendar_
|
|
475
474
|
lambda: service.events().get(calendarId=calendar_id, eventId=event_id).execute()
|
476
475
|
)
|
477
476
|
logger.info(
|
478
|
-
|
477
|
+
"[delete_event] Successfully verified event exists before deletion"
|
479
478
|
)
|
480
479
|
except HttpError as get_error:
|
481
480
|
if get_error.resp.status == 404:
|