amazon-ads-mcp 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- amazon_ads_mcp/__init__.py +11 -0
- amazon_ads_mcp/auth/__init__.py +33 -0
- amazon_ads_mcp/auth/base.py +211 -0
- amazon_ads_mcp/auth/hooks.py +172 -0
- amazon_ads_mcp/auth/manager.py +791 -0
- amazon_ads_mcp/auth/oauth_state_store.py +277 -0
- amazon_ads_mcp/auth/providers/__init__.py +14 -0
- amazon_ads_mcp/auth/providers/direct.py +393 -0
- amazon_ads_mcp/auth/providers/example_auth0.py.example +216 -0
- amazon_ads_mcp/auth/providers/openbridge.py +512 -0
- amazon_ads_mcp/auth/registry.py +146 -0
- amazon_ads_mcp/auth/secure_token_store.py +297 -0
- amazon_ads_mcp/auth/token_store.py +723 -0
- amazon_ads_mcp/config/__init__.py +5 -0
- amazon_ads_mcp/config/sampling.py +111 -0
- amazon_ads_mcp/config/settings.py +366 -0
- amazon_ads_mcp/exceptions.py +314 -0
- amazon_ads_mcp/middleware/__init__.py +11 -0
- amazon_ads_mcp/middleware/authentication.py +1474 -0
- amazon_ads_mcp/middleware/caching.py +177 -0
- amazon_ads_mcp/middleware/oauth.py +175 -0
- amazon_ads_mcp/middleware/sampling.py +112 -0
- amazon_ads_mcp/models/__init__.py +320 -0
- amazon_ads_mcp/models/amc_models.py +837 -0
- amazon_ads_mcp/models/api_responses.py +847 -0
- amazon_ads_mcp/models/base_models.py +215 -0
- amazon_ads_mcp/models/builtin_responses.py +496 -0
- amazon_ads_mcp/models/dsp_models.py +556 -0
- amazon_ads_mcp/models/stores_brands.py +610 -0
- amazon_ads_mcp/server/__init__.py +6 -0
- amazon_ads_mcp/server/__main__.py +6 -0
- amazon_ads_mcp/server/builtin_prompts.py +269 -0
- amazon_ads_mcp/server/builtin_tools.py +962 -0
- amazon_ads_mcp/server/file_routes.py +547 -0
- amazon_ads_mcp/server/html_templates.py +149 -0
- amazon_ads_mcp/server/mcp_server.py +327 -0
- amazon_ads_mcp/server/openapi_utils.py +158 -0
- amazon_ads_mcp/server/sampling_handler.py +251 -0
- amazon_ads_mcp/server/server_builder.py +751 -0
- amazon_ads_mcp/server/sidecar_loader.py +178 -0
- amazon_ads_mcp/server/transform_executor.py +827 -0
- amazon_ads_mcp/tools/__init__.py +22 -0
- amazon_ads_mcp/tools/cache_management.py +105 -0
- amazon_ads_mcp/tools/download_tools.py +267 -0
- amazon_ads_mcp/tools/identity.py +236 -0
- amazon_ads_mcp/tools/oauth.py +598 -0
- amazon_ads_mcp/tools/profile.py +150 -0
- amazon_ads_mcp/tools/profile_listing.py +285 -0
- amazon_ads_mcp/tools/region.py +320 -0
- amazon_ads_mcp/tools/region_identity.py +175 -0
- amazon_ads_mcp/utils/__init__.py +6 -0
- amazon_ads_mcp/utils/async_compat.py +215 -0
- amazon_ads_mcp/utils/errors.py +452 -0
- amazon_ads_mcp/utils/export_content_type_resolver.py +249 -0
- amazon_ads_mcp/utils/export_download_handler.py +579 -0
- amazon_ads_mcp/utils/header_resolver.py +81 -0
- amazon_ads_mcp/utils/http/__init__.py +56 -0
- amazon_ads_mcp/utils/http/circuit_breaker.py +127 -0
- amazon_ads_mcp/utils/http/client_manager.py +329 -0
- amazon_ads_mcp/utils/http/request.py +207 -0
- amazon_ads_mcp/utils/http/resilience.py +512 -0
- amazon_ads_mcp/utils/http/resilient_client.py +195 -0
- amazon_ads_mcp/utils/http/retry.py +76 -0
- amazon_ads_mcp/utils/http_client.py +873 -0
- amazon_ads_mcp/utils/media/__init__.py +21 -0
- amazon_ads_mcp/utils/media/negotiator.py +243 -0
- amazon_ads_mcp/utils/media/types.py +199 -0
- amazon_ads_mcp/utils/openapi/__init__.py +16 -0
- amazon_ads_mcp/utils/openapi/json.py +55 -0
- amazon_ads_mcp/utils/openapi/loader.py +263 -0
- amazon_ads_mcp/utils/openapi/refs.py +46 -0
- amazon_ads_mcp/utils/region_config.py +200 -0
- amazon_ads_mcp/utils/response_wrapper.py +171 -0
- amazon_ads_mcp/utils/sampling_helpers.py +156 -0
- amazon_ads_mcp/utils/sampling_wrapper.py +173 -0
- amazon_ads_mcp/utils/security.py +630 -0
- amazon_ads_mcp/utils/tool_naming.py +137 -0
- amazon_ads_mcp-0.2.7.dist-info/METADATA +664 -0
- amazon_ads_mcp-0.2.7.dist-info/RECORD +82 -0
- amazon_ads_mcp-0.2.7.dist-info/WHEEL +4 -0
- amazon_ads_mcp-0.2.7.dist-info/entry_points.txt +3 -0
- amazon_ads_mcp-0.2.7.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,723 @@
|
|
|
1
|
+
"""Token storage abstraction for unified token management.
|
|
2
|
+
|
|
3
|
+
This module provides a pluggable token storage system that serves as the
|
|
4
|
+
single source of truth for all authentication tokens across providers.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import base64
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import threading
|
|
12
|
+
import time
|
|
13
|
+
from abc import ABC, abstractmethod
|
|
14
|
+
from dataclasses import dataclass
|
|
15
|
+
from datetime import datetime, timedelta, timezone
|
|
16
|
+
from enum import Enum
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Any, Dict, Optional
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
from cryptography.fernet import Fernet
|
|
22
|
+
|
|
23
|
+
CRYPTOGRAPHY_AVAILABLE = True
|
|
24
|
+
except ImportError:
|
|
25
|
+
CRYPTOGRAPHY_AVAILABLE = False
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TokenKind(Enum):
|
|
31
|
+
"""Types of tokens stored in the system."""
|
|
32
|
+
|
|
33
|
+
REFRESH = "refresh" # Long-lived refresh token
|
|
34
|
+
ACCESS = "access" # Short-lived access token
|
|
35
|
+
PROVIDER_JWT = "provider_jwt" # Provider-specific JWT (e.g., OpenBridge)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
class TokenKey:
|
|
40
|
+
"""Composite key for token storage.
|
|
41
|
+
|
|
42
|
+
Uniquely identifies a token by provider, identity, type, and scope.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
provider_type: str # e.g., "direct", "openbridge"
|
|
46
|
+
identity_id: str # e.g., "default", "direct-auth", remote identity ID
|
|
47
|
+
token_kind: TokenKind # Type of token
|
|
48
|
+
region: Optional[str] = None # e.g., "na", "eu", "fe"
|
|
49
|
+
marketplace: Optional[str] = None # e.g., "ATVPDKIKX0DER"
|
|
50
|
+
profile_id: Optional[str] = None # Amazon Ads profile ID
|
|
51
|
+
|
|
52
|
+
def to_string(self) -> str:
|
|
53
|
+
"""Convert to a string key for storage."""
|
|
54
|
+
parts = [
|
|
55
|
+
self.provider_type,
|
|
56
|
+
self.identity_id,
|
|
57
|
+
self.token_kind.value,
|
|
58
|
+
self.region or "global",
|
|
59
|
+
self.marketplace or "none",
|
|
60
|
+
self.profile_id or "none",
|
|
61
|
+
]
|
|
62
|
+
return ":".join(parts)
|
|
63
|
+
|
|
64
|
+
@classmethod
|
|
65
|
+
def from_string(cls, key_str: str) -> "TokenKey":
|
|
66
|
+
"""Parse from a string key."""
|
|
67
|
+
parts = key_str.split(":")
|
|
68
|
+
if len(parts) != 6:
|
|
69
|
+
raise ValueError(f"Invalid token key format: {key_str}")
|
|
70
|
+
|
|
71
|
+
return cls(
|
|
72
|
+
provider_type=parts[0],
|
|
73
|
+
identity_id=parts[1],
|
|
74
|
+
token_kind=TokenKind(parts[2]),
|
|
75
|
+
region=parts[3] if parts[3] != "global" else None,
|
|
76
|
+
marketplace=parts[4] if parts[4] != "none" else None,
|
|
77
|
+
profile_id=parts[5] if parts[5] != "none" else None,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@dataclass
|
|
82
|
+
class TokenEntry:
|
|
83
|
+
"""A stored token with metadata."""
|
|
84
|
+
|
|
85
|
+
value: str # The actual token
|
|
86
|
+
expires_at: datetime # When the token expires
|
|
87
|
+
metadata: Dict[str, Any] # Additional metadata (scope, token_type, etc.)
|
|
88
|
+
created_at: datetime = None # When entry was created
|
|
89
|
+
|
|
90
|
+
def __post_init__(self):
|
|
91
|
+
if self.created_at is None:
|
|
92
|
+
self.created_at = datetime.now(timezone.utc)
|
|
93
|
+
|
|
94
|
+
def is_expired(self, buffer_seconds: int = 300) -> bool:
|
|
95
|
+
"""Check if token is expired or will expire soon."""
|
|
96
|
+
expiry_with_buffer = self.expires_at - timedelta(seconds=buffer_seconds)
|
|
97
|
+
# Ensure both datetimes are timezone-aware for comparison
|
|
98
|
+
now = datetime.now(timezone.utc)
|
|
99
|
+
if expiry_with_buffer.tzinfo is None:
|
|
100
|
+
expiry_with_buffer = expiry_with_buffer.replace(tzinfo=timezone.utc)
|
|
101
|
+
return now >= expiry_with_buffer
|
|
102
|
+
|
|
103
|
+
def to_dict(self) -> dict:
|
|
104
|
+
"""Serialize for storage."""
|
|
105
|
+
return {
|
|
106
|
+
"value": self.value,
|
|
107
|
+
"expires_at": self.expires_at.isoformat(),
|
|
108
|
+
"metadata": self.metadata,
|
|
109
|
+
"created_at": self.created_at.isoformat(),
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def from_dict(cls, data: dict) -> "TokenEntry":
|
|
114
|
+
"""Deserialize from storage."""
|
|
115
|
+
return cls(
|
|
116
|
+
value=data["value"],
|
|
117
|
+
expires_at=datetime.fromisoformat(data["expires_at"]),
|
|
118
|
+
metadata=data.get("metadata", {}),
|
|
119
|
+
created_at=datetime.fromisoformat(data["created_at"]),
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class TokenStore(ABC):
|
|
124
|
+
"""Abstract base class for token storage implementations."""
|
|
125
|
+
|
|
126
|
+
@abstractmethod
|
|
127
|
+
async def get(self, key: TokenKey) -> Optional[TokenEntry]:
|
|
128
|
+
"""Retrieve a token by key.
|
|
129
|
+
|
|
130
|
+
Returns None if not found or expired (implementation may auto-cleanup).
|
|
131
|
+
"""
|
|
132
|
+
pass
|
|
133
|
+
|
|
134
|
+
@abstractmethod
|
|
135
|
+
async def set(self, key: TokenKey, entry: TokenEntry) -> None:
|
|
136
|
+
"""Store or update a token."""
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
@abstractmethod
|
|
140
|
+
async def invalidate(self, key: TokenKey) -> None:
|
|
141
|
+
"""Invalidate a specific token."""
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
@abstractmethod
|
|
145
|
+
async def invalidate_pattern(
|
|
146
|
+
self,
|
|
147
|
+
provider_type: Optional[str] = None,
|
|
148
|
+
identity_id: Optional[str] = None,
|
|
149
|
+
token_kind: Optional[TokenKind] = None,
|
|
150
|
+
region: Optional[str] = None,
|
|
151
|
+
) -> int:
|
|
152
|
+
"""Invalidate tokens matching a pattern.
|
|
153
|
+
|
|
154
|
+
Returns the number of invalidated entries.
|
|
155
|
+
"""
|
|
156
|
+
pass
|
|
157
|
+
|
|
158
|
+
@abstractmethod
|
|
159
|
+
async def clear(self) -> None:
|
|
160
|
+
"""Clear all stored tokens."""
|
|
161
|
+
pass
|
|
162
|
+
|
|
163
|
+
async def get_access_token(
|
|
164
|
+
self,
|
|
165
|
+
provider_type: str,
|
|
166
|
+
identity_id: str,
|
|
167
|
+
region: Optional[str] = None,
|
|
168
|
+
profile_id: Optional[str] = None,
|
|
169
|
+
) -> Optional[TokenEntry]:
|
|
170
|
+
"""Convenience method to get an access token."""
|
|
171
|
+
key = TokenKey(
|
|
172
|
+
provider_type=provider_type,
|
|
173
|
+
identity_id=identity_id,
|
|
174
|
+
token_kind=TokenKind.ACCESS,
|
|
175
|
+
region=region,
|
|
176
|
+
profile_id=profile_id,
|
|
177
|
+
)
|
|
178
|
+
return await self.get(key)
|
|
179
|
+
|
|
180
|
+
async def set_access_token(
|
|
181
|
+
self,
|
|
182
|
+
provider_type: str,
|
|
183
|
+
identity_id: str,
|
|
184
|
+
token: str,
|
|
185
|
+
expires_at: datetime,
|
|
186
|
+
metadata: Dict[str, Any] = None,
|
|
187
|
+
region: Optional[str] = None,
|
|
188
|
+
profile_id: Optional[str] = None,
|
|
189
|
+
) -> None:
|
|
190
|
+
"""Convenience method to set an access token."""
|
|
191
|
+
key = TokenKey(
|
|
192
|
+
provider_type=provider_type,
|
|
193
|
+
identity_id=identity_id,
|
|
194
|
+
token_kind=TokenKind.ACCESS,
|
|
195
|
+
region=region,
|
|
196
|
+
profile_id=profile_id,
|
|
197
|
+
)
|
|
198
|
+
entry = TokenEntry(value=token, expires_at=expires_at, metadata=metadata or {})
|
|
199
|
+
await self.set(key, entry)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
class InMemoryTokenStore(TokenStore):
|
|
203
|
+
"""In-memory token storage with TTL and automatic cleanup."""
|
|
204
|
+
|
|
205
|
+
def __init__(
|
|
206
|
+
self,
|
|
207
|
+
max_entries: int = 1000,
|
|
208
|
+
cleanup_interval: int = 300, # 5 minutes
|
|
209
|
+
default_ttl: int = 3600, # 1 hour
|
|
210
|
+
):
|
|
211
|
+
self._store: Dict[str, TokenEntry] = {}
|
|
212
|
+
self._lock = threading.Lock()
|
|
213
|
+
self._max_entries = max_entries
|
|
214
|
+
self._cleanup_interval = cleanup_interval
|
|
215
|
+
self._default_ttl = default_ttl
|
|
216
|
+
self._last_cleanup = time.time()
|
|
217
|
+
|
|
218
|
+
async def get(self, key: TokenKey) -> Optional[TokenEntry]:
|
|
219
|
+
"""Get token with automatic cleanup of expired entries."""
|
|
220
|
+
now = time.time()
|
|
221
|
+
|
|
222
|
+
# Periodic cleanup
|
|
223
|
+
if now - self._last_cleanup > self._cleanup_interval:
|
|
224
|
+
await self._cleanup()
|
|
225
|
+
self._last_cleanup = now
|
|
226
|
+
|
|
227
|
+
with self._lock:
|
|
228
|
+
key_str = key.to_string()
|
|
229
|
+
entry = self._store.get(key_str)
|
|
230
|
+
|
|
231
|
+
if entry:
|
|
232
|
+
if entry.is_expired():
|
|
233
|
+
# Remove expired entry
|
|
234
|
+
del self._store[key_str]
|
|
235
|
+
logger.debug(f"Removed expired token: {key_str}")
|
|
236
|
+
return None
|
|
237
|
+
return entry
|
|
238
|
+
|
|
239
|
+
return None
|
|
240
|
+
|
|
241
|
+
async def set(self, key: TokenKey, entry: TokenEntry) -> None:
|
|
242
|
+
"""Store token with size limit enforcement."""
|
|
243
|
+
with self._lock:
|
|
244
|
+
# Enforce max entries (simple LRU by removing oldest)
|
|
245
|
+
if len(self._store) >= self._max_entries:
|
|
246
|
+
# Remove oldest entry
|
|
247
|
+
oldest_key = min(
|
|
248
|
+
self._store.keys(), key=lambda k: self._store[k].created_at
|
|
249
|
+
)
|
|
250
|
+
del self._store[oldest_key]
|
|
251
|
+
logger.debug(f"Evicted oldest token due to size limit: {oldest_key}")
|
|
252
|
+
|
|
253
|
+
key_str = key.to_string()
|
|
254
|
+
self._store[key_str] = entry
|
|
255
|
+
logger.debug(f"Stored token: {key_str}")
|
|
256
|
+
|
|
257
|
+
async def invalidate(self, key: TokenKey) -> None:
|
|
258
|
+
"""Remove a specific token."""
|
|
259
|
+
with self._lock:
|
|
260
|
+
key_str = key.to_string()
|
|
261
|
+
if key_str in self._store:
|
|
262
|
+
del self._store[key_str]
|
|
263
|
+
logger.debug(f"Invalidated token: {key_str}")
|
|
264
|
+
|
|
265
|
+
async def invalidate_pattern(
|
|
266
|
+
self,
|
|
267
|
+
provider_type: Optional[str] = None,
|
|
268
|
+
identity_id: Optional[str] = None,
|
|
269
|
+
token_kind: Optional[TokenKind] = None,
|
|
270
|
+
region: Optional[str] = None,
|
|
271
|
+
) -> int:
|
|
272
|
+
"""Invalidate tokens matching pattern."""
|
|
273
|
+
with self._lock:
|
|
274
|
+
to_remove = []
|
|
275
|
+
|
|
276
|
+
for key_str in self._store.keys():
|
|
277
|
+
key = TokenKey.from_string(key_str)
|
|
278
|
+
|
|
279
|
+
# Check pattern match
|
|
280
|
+
if provider_type and key.provider_type != provider_type:
|
|
281
|
+
continue
|
|
282
|
+
if identity_id and key.identity_id != identity_id:
|
|
283
|
+
continue
|
|
284
|
+
if token_kind and key.token_kind != token_kind:
|
|
285
|
+
continue
|
|
286
|
+
if region and key.region != region:
|
|
287
|
+
continue
|
|
288
|
+
|
|
289
|
+
to_remove.append(key_str)
|
|
290
|
+
|
|
291
|
+
# Remove matching entries
|
|
292
|
+
for key_str in to_remove:
|
|
293
|
+
del self._store[key_str]
|
|
294
|
+
|
|
295
|
+
if to_remove:
|
|
296
|
+
logger.info(f"Invalidated {len(to_remove)} tokens matching pattern")
|
|
297
|
+
|
|
298
|
+
return len(to_remove)
|
|
299
|
+
|
|
300
|
+
async def clear(self) -> None:
|
|
301
|
+
"""Clear all tokens."""
|
|
302
|
+
with self._lock:
|
|
303
|
+
count = len(self._store)
|
|
304
|
+
self._store.clear()
|
|
305
|
+
logger.info(f"Cleared {count} tokens from store")
|
|
306
|
+
|
|
307
|
+
async def _cleanup(self) -> None:
|
|
308
|
+
"""Remove expired entries."""
|
|
309
|
+
with self._lock:
|
|
310
|
+
expired_keys = [
|
|
311
|
+
key for key, entry in self._store.items() if entry.is_expired()
|
|
312
|
+
]
|
|
313
|
+
|
|
314
|
+
for key in expired_keys:
|
|
315
|
+
del self._store[key]
|
|
316
|
+
|
|
317
|
+
if expired_keys:
|
|
318
|
+
logger.info(f"Cleaned up {len(expired_keys)} expired tokens")
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
class PersistentTokenStore(InMemoryTokenStore):
|
|
322
|
+
"""Token storage with optional file-based persistence.
|
|
323
|
+
|
|
324
|
+
Extends InMemoryTokenStore with file persistence for refresh tokens only.
|
|
325
|
+
Access tokens are kept in memory for performance.
|
|
326
|
+
"""
|
|
327
|
+
|
|
328
|
+
def __init__(
|
|
329
|
+
self,
|
|
330
|
+
storage_path: Optional[Path] = None,
|
|
331
|
+
encrypt_at_rest: bool = True,
|
|
332
|
+
**kwargs,
|
|
333
|
+
):
|
|
334
|
+
super().__init__(**kwargs)
|
|
335
|
+
|
|
336
|
+
# Determine storage path based on environment
|
|
337
|
+
if storage_path is None:
|
|
338
|
+
# Check for explicit cache directory from environment
|
|
339
|
+
cache_dir = os.getenv("AMAZON_ADS_CACHE_DIR")
|
|
340
|
+
if cache_dir:
|
|
341
|
+
storage_path = Path(cache_dir) / "tokens.json"
|
|
342
|
+
logger.info(
|
|
343
|
+
f"Using cache directory from AMAZON_ADS_CACHE_DIR: {storage_path}"
|
|
344
|
+
)
|
|
345
|
+
# Check if we're in a Docker container with /app/.cache available
|
|
346
|
+
elif Path("/app/.cache").exists() and Path("/app/.cache").is_dir():
|
|
347
|
+
storage_path = Path("/app/.cache") / "amazon-ads-mcp" / "tokens.json"
|
|
348
|
+
logger.info(f"Using Docker cache directory: {storage_path}")
|
|
349
|
+
else:
|
|
350
|
+
# Fall back to home directory for local development
|
|
351
|
+
storage_path = Path.home() / ".amazon-ads-mcp" / "tokens.json"
|
|
352
|
+
logger.info(f"Using local cache directory: {storage_path}")
|
|
353
|
+
|
|
354
|
+
self._storage_path = storage_path
|
|
355
|
+
self._encrypt_at_rest = encrypt_at_rest
|
|
356
|
+
self._cipher = None
|
|
357
|
+
|
|
358
|
+
# Initialize encryption if requested and available
|
|
359
|
+
if self._encrypt_at_rest:
|
|
360
|
+
self._cipher = self._initialize_encryption()
|
|
361
|
+
|
|
362
|
+
# Create directory with restricted permissions
|
|
363
|
+
try:
|
|
364
|
+
self._storage_path.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
|
|
365
|
+
except PermissionError:
|
|
366
|
+
# In some Docker environments, we might not be able to set permissions
|
|
367
|
+
self._storage_path.parent.mkdir(parents=True, exist_ok=True)
|
|
368
|
+
logger.warning(
|
|
369
|
+
f"Could not set restricted permissions on {self._storage_path.parent}"
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
# Load existing tokens on startup
|
|
373
|
+
self._load_from_disk()
|
|
374
|
+
|
|
375
|
+
# Warn about security implications of token persistence
|
|
376
|
+
logger.info(
|
|
377
|
+
f"Token persistence ENABLED. Refresh tokens will be stored at {self._storage_path}\n"
|
|
378
|
+
f"Security considerations:\n"
|
|
379
|
+
f" - Tokens are encrypted at rest, but the encryption key is stored alongside\n"
|
|
380
|
+
f" - Anyone with access to the volume/filesystem can potentially decrypt tokens\n"
|
|
381
|
+
f" - For production use, set AMAZON_ADS_ENCRYPTION_KEY externally\n"
|
|
382
|
+
f" - Consider in-memory-only storage (AMAZON_ADS_TOKEN_PERSIST=false) if possible"
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
async def set(self, key: TokenKey, entry: TokenEntry) -> None:
|
|
386
|
+
"""Store token, persisting refresh tokens to disk."""
|
|
387
|
+
await super().set(key, entry)
|
|
388
|
+
|
|
389
|
+
# Only persist refresh tokens
|
|
390
|
+
if key.token_kind == TokenKind.REFRESH:
|
|
391
|
+
await self._persist_to_disk()
|
|
392
|
+
|
|
393
|
+
async def invalidate(self, key: TokenKey) -> None:
|
|
394
|
+
"""Invalidate token, updating persistence if needed."""
|
|
395
|
+
await super().invalidate(key)
|
|
396
|
+
|
|
397
|
+
# Update disk if it was a refresh token
|
|
398
|
+
if key.token_kind == TokenKind.REFRESH:
|
|
399
|
+
await self._persist_to_disk()
|
|
400
|
+
|
|
401
|
+
async def clear(self) -> None:
|
|
402
|
+
"""Clear all tokens including persistent storage."""
|
|
403
|
+
await super().clear()
|
|
404
|
+
|
|
405
|
+
# Clear persistent storage
|
|
406
|
+
if self._storage_path.exists():
|
|
407
|
+
self._storage_path.unlink()
|
|
408
|
+
logger.info(f"Cleared persistent token storage at {self._storage_path}")
|
|
409
|
+
|
|
410
|
+
def _load_from_disk(self) -> None:
|
|
411
|
+
"""Load refresh tokens from disk on startup."""
|
|
412
|
+
if not self._storage_path.exists():
|
|
413
|
+
return
|
|
414
|
+
|
|
415
|
+
try:
|
|
416
|
+
with open(self._storage_path, "r") as f:
|
|
417
|
+
data = json.load(f)
|
|
418
|
+
|
|
419
|
+
# Decrypt if needed
|
|
420
|
+
if self._encrypt_at_rest:
|
|
421
|
+
data = self._decrypt_data(data)
|
|
422
|
+
|
|
423
|
+
# Load refresh tokens into memory
|
|
424
|
+
for key_str, entry_dict in data.items():
|
|
425
|
+
key = TokenKey.from_string(key_str)
|
|
426
|
+
|
|
427
|
+
# Only load refresh tokens
|
|
428
|
+
if key.token_kind != TokenKind.REFRESH:
|
|
429
|
+
continue
|
|
430
|
+
|
|
431
|
+
entry = TokenEntry.from_dict(entry_dict)
|
|
432
|
+
|
|
433
|
+
# Skip expired tokens
|
|
434
|
+
if not entry.is_expired():
|
|
435
|
+
self._store[key_str] = entry
|
|
436
|
+
|
|
437
|
+
logger.info(f"Loaded {len(self._store)} tokens from persistent storage")
|
|
438
|
+
|
|
439
|
+
except Exception as e:
|
|
440
|
+
logger.error(f"Failed to load tokens from disk: {e}")
|
|
441
|
+
|
|
442
|
+
async def _persist_to_disk(self) -> None:
|
|
443
|
+
"""Save refresh tokens to disk."""
|
|
444
|
+
try:
|
|
445
|
+
# Extract only refresh tokens
|
|
446
|
+
refresh_tokens = {}
|
|
447
|
+
with self._lock:
|
|
448
|
+
for key_str, entry in self._store.items():
|
|
449
|
+
key = TokenKey.from_string(key_str)
|
|
450
|
+
if key.token_kind == TokenKind.REFRESH:
|
|
451
|
+
refresh_tokens[key_str] = entry.to_dict()
|
|
452
|
+
|
|
453
|
+
# Encrypt if needed
|
|
454
|
+
data = refresh_tokens
|
|
455
|
+
if self._encrypt_at_rest:
|
|
456
|
+
data = self._encrypt_data(data)
|
|
457
|
+
|
|
458
|
+
# Write with atomic operation for safety
|
|
459
|
+
temp_path = self._storage_path.with_suffix(".tmp")
|
|
460
|
+
with open(temp_path, "w") as f:
|
|
461
|
+
json.dump(data, f, indent=2)
|
|
462
|
+
|
|
463
|
+
# Atomic move
|
|
464
|
+
temp_path.replace(self._storage_path)
|
|
465
|
+
|
|
466
|
+
# Try to set restrictive permissions (may fail in some Docker environments)
|
|
467
|
+
try:
|
|
468
|
+
self._storage_path.chmod(0o600)
|
|
469
|
+
except (PermissionError, OSError):
|
|
470
|
+
pass # Permissions might not be settable in Docker
|
|
471
|
+
|
|
472
|
+
logger.debug(f"Persisted {len(refresh_tokens)} refresh tokens to disk")
|
|
473
|
+
|
|
474
|
+
except Exception as e:
|
|
475
|
+
logger.error(f"Failed to persist tokens to disk: {e}")
|
|
476
|
+
|
|
477
|
+
def _initialize_encryption(self) -> Optional[Any]:
|
|
478
|
+
"""Initialize encryption cipher for token storage.
|
|
479
|
+
|
|
480
|
+
Uses Fernet symmetric encryption with a key derived from:
|
|
481
|
+
1. Environment variable AMAZON_ADS_ENCRYPTION_KEY (if set)
|
|
482
|
+
2. Machine-specific seed + app identifier (fallback)
|
|
483
|
+
|
|
484
|
+
Returns:
|
|
485
|
+
Fernet cipher instance or None if encryption unavailable
|
|
486
|
+
"""
|
|
487
|
+
if not CRYPTOGRAPHY_AVAILABLE:
|
|
488
|
+
# Check if plaintext persistence is explicitly allowed
|
|
489
|
+
allow_plaintext = (
|
|
490
|
+
os.getenv("AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST", "false").lower()
|
|
491
|
+
== "true"
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
error_msg = (
|
|
495
|
+
"SECURITY ERROR: cryptography library not installed!\n"
|
|
496
|
+
"Tokens would be stored in PLAINTEXT without encryption.\n"
|
|
497
|
+
"Options:\n"
|
|
498
|
+
"1. Install cryptography: pip install cryptography (RECOMMENDED)\n"
|
|
499
|
+
"2. Disable persistence: AMAZON_ADS_TOKEN_PERSIST=false\n"
|
|
500
|
+
"3. Allow plaintext (INSECURE): AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST=true"
|
|
501
|
+
)
|
|
502
|
+
|
|
503
|
+
if not allow_plaintext:
|
|
504
|
+
logger.error(error_msg)
|
|
505
|
+
raise RuntimeError(
|
|
506
|
+
"Refusing to store tokens in plaintext. "
|
|
507
|
+
"Install cryptography or set AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST=true (not recommended)"
|
|
508
|
+
)
|
|
509
|
+
else:
|
|
510
|
+
logger.warning(
|
|
511
|
+
"WARNING: Plaintext token storage explicitly allowed!\n"
|
|
512
|
+
"This is INSECURE and should not be used in production.\n"
|
|
513
|
+
"Install cryptography: pip install cryptography"
|
|
514
|
+
)
|
|
515
|
+
return None
|
|
516
|
+
|
|
517
|
+
try:
|
|
518
|
+
# Try to get encryption key from environment
|
|
519
|
+
env_key = os.getenv("AMAZON_ADS_ENCRYPTION_KEY")
|
|
520
|
+
|
|
521
|
+
if env_key:
|
|
522
|
+
# Use provided key (should be base64-encoded Fernet key)
|
|
523
|
+
try:
|
|
524
|
+
# Validate key format
|
|
525
|
+
if (
|
|
526
|
+
len(env_key) != 44
|
|
527
|
+
): # Fernet keys are exactly 44 chars when base64-encoded
|
|
528
|
+
raise ValueError(
|
|
529
|
+
f"Invalid key length: {len(env_key)} (expected 44)"
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
cipher = Fernet(
|
|
533
|
+
env_key.encode() if isinstance(env_key, str) else env_key
|
|
534
|
+
)
|
|
535
|
+
logger.info("Using encryption key from AMAZON_ADS_ENCRYPTION_KEY")
|
|
536
|
+
return cipher
|
|
537
|
+
except Exception as e:
|
|
538
|
+
logger.error(
|
|
539
|
+
f"CRITICAL: Invalid AMAZON_ADS_ENCRYPTION_KEY: {e}\n"
|
|
540
|
+
f"Previously encrypted tokens will be UNREADABLE!\n"
|
|
541
|
+
f'Generate a valid key with: python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())"\n'
|
|
542
|
+
f"Falling back to auto-generated random key"
|
|
543
|
+
)
|
|
544
|
+
|
|
545
|
+
# Generate and persist a strong random key
|
|
546
|
+
# This ensures we always use strong encryption, never weak deterministic keys
|
|
547
|
+
# Use the parent directory of storage_path for the encryption key
|
|
548
|
+
key_file = self._storage_path.parent / ".encryption.key"
|
|
549
|
+
|
|
550
|
+
# Check if we're in production-like environment
|
|
551
|
+
is_production = (
|
|
552
|
+
os.getenv("ENV") in ["production", "prod", "staging"]
|
|
553
|
+
or os.getenv("ENVIRONMENT") in ["production", "prod", "staging"]
|
|
554
|
+
or os.getenv("NODE_ENV") in ["production", "prod", "staging"]
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
if is_production:
|
|
558
|
+
# In production, warn but don't fail
|
|
559
|
+
logger.warning(
|
|
560
|
+
"SECURITY WARNING: No AMAZON_ADS_ENCRYPTION_KEY set in production!\n"
|
|
561
|
+
"Using auto-generated key. For better security, set an explicit key:\n"
|
|
562
|
+
'Generate one with: python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())"\n'
|
|
563
|
+
"Then set: export AMAZON_ADS_ENCRYPTION_KEY='<generated-key>'"
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
if key_file.exists():
|
|
567
|
+
# Load existing key
|
|
568
|
+
try:
|
|
569
|
+
with open(key_file, 'rb') as f:
|
|
570
|
+
key = f.read()
|
|
571
|
+
cipher = Fernet(key)
|
|
572
|
+
logger.info("Loaded persistent encryption key from cache")
|
|
573
|
+
return cipher
|
|
574
|
+
except Exception as e:
|
|
575
|
+
logger.warning(f"Failed to load existing key: {e}, generating new one")
|
|
576
|
+
|
|
577
|
+
# Generate a new random key with strong entropy
|
|
578
|
+
key = Fernet.generate_key()
|
|
579
|
+
cipher = Fernet(key)
|
|
580
|
+
|
|
581
|
+
# Save the key for persistence
|
|
582
|
+
try:
|
|
583
|
+
key_file.parent.mkdir(parents=True, exist_ok=True)
|
|
584
|
+
# Set restrictive permissions (owner read/write only)
|
|
585
|
+
with open(key_file, 'wb') as f:
|
|
586
|
+
f.write(key)
|
|
587
|
+
os.chmod(key_file, 0o600)
|
|
588
|
+
logger.info(
|
|
589
|
+
f"Generated and saved new encryption key to {key_file}\n"
|
|
590
|
+
"This strong random key will be reused across sessions.\n"
|
|
591
|
+
"For explicit control, set AMAZON_ADS_ENCRYPTION_KEY environment variable."
|
|
592
|
+
)
|
|
593
|
+
except Exception as e:
|
|
594
|
+
logger.warning(f"Could not persist encryption key: {e}")
|
|
595
|
+
|
|
596
|
+
return cipher
|
|
597
|
+
|
|
598
|
+
except Exception as e:
|
|
599
|
+
logger.error(f"Failed to initialize encryption: {e}")
|
|
600
|
+
return None
|
|
601
|
+
|
|
602
|
+
def _encrypt_data(self, data: dict) -> dict:
|
|
603
|
+
"""Encrypt token data for at-rest storage.
|
|
604
|
+
|
|
605
|
+
Uses Fernet symmetric encryption when available.
|
|
606
|
+
Falls back to plaintext with warning if encryption unavailable.
|
|
607
|
+
"""
|
|
608
|
+
if not self._cipher:
|
|
609
|
+
if self._encrypt_at_rest:
|
|
610
|
+
# Check if we explicitly allow plaintext
|
|
611
|
+
allow_plaintext = (
|
|
612
|
+
os.getenv("AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST", "false").lower()
|
|
613
|
+
== "true"
|
|
614
|
+
)
|
|
615
|
+
if not allow_plaintext:
|
|
616
|
+
raise RuntimeError(
|
|
617
|
+
"Cannot encrypt tokens - cryptography not available. "
|
|
618
|
+
"Install cryptography or set AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST=true"
|
|
619
|
+
)
|
|
620
|
+
logger.warning(
|
|
621
|
+
"SECURITY WARNING: Storing tokens in PLAINTEXT (encryption unavailable). "
|
|
622
|
+
"Install cryptography: pip install cryptography"
|
|
623
|
+
)
|
|
624
|
+
return data
|
|
625
|
+
|
|
626
|
+
try:
|
|
627
|
+
# Serialize data to JSON
|
|
628
|
+
json_data = json.dumps(data, separators=(",", ":"))
|
|
629
|
+
|
|
630
|
+
# Encrypt the JSON string
|
|
631
|
+
encrypted_bytes = self._cipher.encrypt(json_data.encode())
|
|
632
|
+
|
|
633
|
+
# Return as a dict with encrypted data
|
|
634
|
+
return {
|
|
635
|
+
"_encrypted": True,
|
|
636
|
+
"_version": "1.0",
|
|
637
|
+
"data": base64.b64encode(encrypted_bytes).decode("ascii"),
|
|
638
|
+
}
|
|
639
|
+
except Exception as e:
|
|
640
|
+
# Encryption failure is critical - never fall back to plaintext
|
|
641
|
+
logger.error(f"CRITICAL: Encryption failed: {e}")
|
|
642
|
+
|
|
643
|
+
# Check if plaintext persistence is explicitly allowed (for testing only)
|
|
644
|
+
if os.getenv("AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST") == "true":
|
|
645
|
+
logger.warning(
|
|
646
|
+
"AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST is enabled - storing in plaintext.\n"
|
|
647
|
+
"This should ONLY be used for testing!"
|
|
648
|
+
)
|
|
649
|
+
return data
|
|
650
|
+
|
|
651
|
+
# Raise the exception to prevent silent security downgrade
|
|
652
|
+
raise ValueError(
|
|
653
|
+
f"Token encryption failed: {e}\n"
|
|
654
|
+
"Refusing to store tokens in plaintext.\n"
|
|
655
|
+
"To allow plaintext storage for testing, set AMAZON_ADS_ALLOW_PLAINTEXT_PERSIST=true"
|
|
656
|
+
) from e
|
|
657
|
+
|
|
658
|
+
def _decrypt_data(self, data: dict) -> dict:
|
|
659
|
+
"""Decrypt token data from storage.
|
|
660
|
+
|
|
661
|
+
Handles both encrypted and plaintext data for compatibility.
|
|
662
|
+
"""
|
|
663
|
+
# Check if data is encrypted
|
|
664
|
+
if not isinstance(data, dict) or not data.get("_encrypted"):
|
|
665
|
+
# Data is not encrypted, return as-is
|
|
666
|
+
return data
|
|
667
|
+
|
|
668
|
+
if not self._cipher:
|
|
669
|
+
logger.error("Cannot decrypt data - encryption not available")
|
|
670
|
+
return {}
|
|
671
|
+
|
|
672
|
+
try:
|
|
673
|
+
# Extract and decode the encrypted data
|
|
674
|
+
encrypted_b64 = data.get("data", "")
|
|
675
|
+
encrypted_bytes = base64.b64decode(encrypted_b64)
|
|
676
|
+
|
|
677
|
+
# Decrypt the data
|
|
678
|
+
decrypted_bytes = self._cipher.decrypt(encrypted_bytes)
|
|
679
|
+
|
|
680
|
+
# Parse the JSON
|
|
681
|
+
return json.loads(decrypted_bytes.decode())
|
|
682
|
+
|
|
683
|
+
except Exception as e:
|
|
684
|
+
logger.error(f"Decryption failed: {e}")
|
|
685
|
+
return {}
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
# Factory function for creating token stores
|
|
689
|
+
def create_token_store(persist: bool = True, **kwargs) -> TokenStore:
|
|
690
|
+
"""Create a token store instance.
|
|
691
|
+
|
|
692
|
+
SECURITY NOTE: Environment variable AMAZON_ADS_TOKEN_PERSIST overrides
|
|
693
|
+
the persist parameter. Default is "false" for security (tokens in memory only).
|
|
694
|
+
|
|
695
|
+
When persistence is enabled:
|
|
696
|
+
- Tokens are ENCRYPTED at rest using Fernet (AES-128) if cryptography is installed
|
|
697
|
+
- Falls back to PLAINTEXT with warnings if cryptography is unavailable
|
|
698
|
+
- For production, set AMAZON_ADS_ENCRYPTION_KEY with a secure key
|
|
699
|
+
|
|
700
|
+
The machine-derived key is for development/local use only and should NOT be
|
|
701
|
+
relied upon for production security.
|
|
702
|
+
|
|
703
|
+
Args:
|
|
704
|
+
persist: Whether to enable persistent storage (overridden by env var)
|
|
705
|
+
**kwargs: Additional configuration for the store
|
|
706
|
+
|
|
707
|
+
Returns:
|
|
708
|
+
TokenStore instance (InMemory or Persistent based on config)
|
|
709
|
+
"""
|
|
710
|
+
# Check environment for persistence override
|
|
711
|
+
# Default is False for security, but can be enabled by setting to "true"
|
|
712
|
+
env_persist = os.getenv("AMAZON_ADS_TOKEN_PERSIST", "false").lower()
|
|
713
|
+
if env_persist == "false":
|
|
714
|
+
persist = False
|
|
715
|
+
elif env_persist == "true":
|
|
716
|
+
persist = True
|
|
717
|
+
|
|
718
|
+
if persist:
|
|
719
|
+
logger.info("Creating persistent token store")
|
|
720
|
+
return PersistentTokenStore(**kwargs)
|
|
721
|
+
else:
|
|
722
|
+
logger.info("Creating in-memory token store")
|
|
723
|
+
return InMemoryTokenStore(**kwargs)
|