api-mocker 0.1.2__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- api_mocker/__init__.py +1 -1
- api_mocker/advanced.py +391 -0
- api_mocker/analytics.py +368 -0
- api_mocker/cli.py +167 -0
- api_mocker/dashboard.py +386 -0
- api_mocker-0.1.3.dist-info/METADATA +441 -0
- api_mocker-0.1.3.dist-info/RECORD +17 -0
- api_mocker-0.1.2.dist-info/METADATA +0 -657
- api_mocker-0.1.2.dist-info/RECORD +0 -14
- {api_mocker-0.1.2.dist-info → api_mocker-0.1.3.dist-info}/WHEEL +0 -0
- {api_mocker-0.1.2.dist-info → api_mocker-0.1.3.dist-info}/entry_points.txt +0 -0
- {api_mocker-0.1.2.dist-info → api_mocker-0.1.3.dist-info}/licenses/LICENSE +0 -0
- {api_mocker-0.1.2.dist-info → api_mocker-0.1.3.dist-info}/top_level.txt +0 -0
api_mocker/analytics.py
ADDED
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Analytics and metrics tracking for api-mocker.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import time
|
|
7
|
+
import uuid
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Dict, List, Optional, Any
|
|
11
|
+
import sqlite3
|
|
12
|
+
import threading
|
|
13
|
+
from dataclasses import dataclass, asdict
|
|
14
|
+
import hashlib
|
|
15
|
+
import platform
|
|
16
|
+
import psutil
|
|
17
|
+
import requests
|
|
18
|
+
from contextlib import contextmanager
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class RequestMetrics:
|
|
22
|
+
"""Metrics for a single request."""
|
|
23
|
+
request_id: str
|
|
24
|
+
timestamp: float
|
|
25
|
+
method: str
|
|
26
|
+
path: str
|
|
27
|
+
status_code: int
|
|
28
|
+
response_time_ms: float
|
|
29
|
+
request_size_bytes: int
|
|
30
|
+
response_size_bytes: int
|
|
31
|
+
user_agent: str
|
|
32
|
+
ip_address: str
|
|
33
|
+
path_params: Dict[str, str]
|
|
34
|
+
query_params: Dict[str, str]
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class ServerMetrics:
|
|
38
|
+
"""Overall server metrics."""
|
|
39
|
+
server_id: str
|
|
40
|
+
start_time: float
|
|
41
|
+
uptime_seconds: float
|
|
42
|
+
total_requests: int
|
|
43
|
+
requests_per_minute: float
|
|
44
|
+
average_response_time_ms: float
|
|
45
|
+
error_rate: float
|
|
46
|
+
memory_usage_mb: float
|
|
47
|
+
cpu_usage_percent: float
|
|
48
|
+
active_connections: int
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class UserSession:
|
|
52
|
+
"""User session tracking."""
|
|
53
|
+
session_id: str
|
|
54
|
+
start_time: float
|
|
55
|
+
last_activity: float
|
|
56
|
+
total_requests: int
|
|
57
|
+
unique_endpoints: int
|
|
58
|
+
user_agent: str
|
|
59
|
+
ip_address: str
|
|
60
|
+
|
|
61
|
+
class AnalyticsManager:
|
|
62
|
+
"""Manages analytics and metrics collection."""
|
|
63
|
+
|
|
64
|
+
def __init__(self, db_path: str = "api_mocker_analytics.db"):
|
|
65
|
+
self.db_path = Path(db_path)
|
|
66
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
67
|
+
self._lock = threading.Lock()
|
|
68
|
+
self.server_id = str(uuid.uuid4())
|
|
69
|
+
self.start_time = time.time()
|
|
70
|
+
self._init_database()
|
|
71
|
+
|
|
72
|
+
def _init_database(self):
|
|
73
|
+
"""Initialize the analytics database."""
|
|
74
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
75
|
+
conn.execute("""
|
|
76
|
+
CREATE TABLE IF NOT EXISTS request_metrics (
|
|
77
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
78
|
+
request_id TEXT NOT NULL,
|
|
79
|
+
timestamp REAL NOT NULL,
|
|
80
|
+
method TEXT NOT NULL,
|
|
81
|
+
path TEXT NOT NULL,
|
|
82
|
+
status_code INTEGER NOT NULL,
|
|
83
|
+
response_time_ms REAL NOT NULL,
|
|
84
|
+
request_size_bytes INTEGER NOT NULL,
|
|
85
|
+
response_size_bytes INTEGER NOT NULL,
|
|
86
|
+
user_agent TEXT,
|
|
87
|
+
ip_address TEXT,
|
|
88
|
+
path_params TEXT,
|
|
89
|
+
query_params TEXT,
|
|
90
|
+
server_id TEXT NOT NULL
|
|
91
|
+
)
|
|
92
|
+
""")
|
|
93
|
+
|
|
94
|
+
conn.execute("""
|
|
95
|
+
CREATE TABLE IF NOT EXISTS server_metrics (
|
|
96
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
97
|
+
server_id TEXT NOT NULL,
|
|
98
|
+
timestamp REAL NOT NULL,
|
|
99
|
+
uptime_seconds REAL NOT NULL,
|
|
100
|
+
total_requests INTEGER NOT NULL,
|
|
101
|
+
requests_per_minute REAL NOT NULL,
|
|
102
|
+
average_response_time_ms REAL NOT NULL,
|
|
103
|
+
error_rate REAL NOT NULL,
|
|
104
|
+
memory_usage_mb REAL NOT NULL,
|
|
105
|
+
cpu_usage_percent REAL NOT NULL,
|
|
106
|
+
active_connections INTEGER NOT NULL
|
|
107
|
+
)
|
|
108
|
+
""")
|
|
109
|
+
|
|
110
|
+
conn.execute("""
|
|
111
|
+
CREATE TABLE IF NOT EXISTS user_sessions (
|
|
112
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
113
|
+
session_id TEXT NOT NULL,
|
|
114
|
+
start_time REAL NOT NULL,
|
|
115
|
+
last_activity REAL NOT NULL,
|
|
116
|
+
total_requests INTEGER NOT NULL,
|
|
117
|
+
unique_endpoints INTEGER NOT NULL,
|
|
118
|
+
user_agent TEXT,
|
|
119
|
+
ip_address TEXT,
|
|
120
|
+
server_id TEXT NOT NULL
|
|
121
|
+
)
|
|
122
|
+
""")
|
|
123
|
+
|
|
124
|
+
conn.execute("""
|
|
125
|
+
CREATE TABLE IF NOT EXISTS feature_usage (
|
|
126
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
127
|
+
feature_name TEXT NOT NULL,
|
|
128
|
+
usage_count INTEGER NOT NULL,
|
|
129
|
+
last_used REAL NOT NULL,
|
|
130
|
+
server_id TEXT NOT NULL
|
|
131
|
+
)
|
|
132
|
+
""")
|
|
133
|
+
|
|
134
|
+
# Create indexes for better performance
|
|
135
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_request_timestamp ON request_metrics(timestamp)")
|
|
136
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_request_path ON request_metrics(path)")
|
|
137
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_server_metrics_timestamp ON server_metrics(timestamp)")
|
|
138
|
+
|
|
139
|
+
def track_request(self, metrics: RequestMetrics):
|
|
140
|
+
"""Track a single request."""
|
|
141
|
+
with self._lock:
|
|
142
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
143
|
+
conn.execute("""
|
|
144
|
+
INSERT INTO request_metrics
|
|
145
|
+
(request_id, timestamp, method, path, status_code, response_time_ms,
|
|
146
|
+
request_size_bytes, response_size_bytes, user_agent, ip_address,
|
|
147
|
+
path_params, query_params, server_id)
|
|
148
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
149
|
+
""", (
|
|
150
|
+
metrics.request_id, metrics.timestamp, metrics.method, metrics.path,
|
|
151
|
+
metrics.status_code, metrics.response_time_ms, metrics.request_size_bytes,
|
|
152
|
+
metrics.response_size_bytes, metrics.user_agent, metrics.ip_address,
|
|
153
|
+
json.dumps(metrics.path_params), json.dumps(metrics.query_params),
|
|
154
|
+
self.server_id
|
|
155
|
+
))
|
|
156
|
+
|
|
157
|
+
def get_server_metrics(self) -> ServerMetrics:
|
|
158
|
+
"""Get current server metrics."""
|
|
159
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
160
|
+
# Get total requests
|
|
161
|
+
total_requests = conn.execute(
|
|
162
|
+
"SELECT COUNT(*) FROM request_metrics WHERE server_id = ?",
|
|
163
|
+
(self.server_id,)
|
|
164
|
+
).fetchone()[0]
|
|
165
|
+
|
|
166
|
+
# Get requests in last minute
|
|
167
|
+
one_minute_ago = time.time() - 60
|
|
168
|
+
recent_requests = conn.execute(
|
|
169
|
+
"SELECT COUNT(*) FROM request_metrics WHERE server_id = ? AND timestamp > ?",
|
|
170
|
+
(self.server_id, one_minute_ago)
|
|
171
|
+
).fetchone()[0]
|
|
172
|
+
|
|
173
|
+
# Get average response time
|
|
174
|
+
avg_response_time = conn.execute(
|
|
175
|
+
"SELECT AVG(response_time_ms) FROM request_metrics WHERE server_id = ?",
|
|
176
|
+
(self.server_id,)
|
|
177
|
+
).fetchone()[0] or 0
|
|
178
|
+
|
|
179
|
+
# Get error rate
|
|
180
|
+
total_errors = conn.execute(
|
|
181
|
+
"SELECT COUNT(*) FROM request_metrics WHERE server_id = ? AND status_code >= 400",
|
|
182
|
+
(self.server_id,)
|
|
183
|
+
).fetchone()[0]
|
|
184
|
+
error_rate = (total_errors / total_requests * 100) if total_requests > 0 else 0
|
|
185
|
+
|
|
186
|
+
# System metrics
|
|
187
|
+
memory_usage = psutil.virtual_memory().percent
|
|
188
|
+
cpu_usage = psutil.cpu_percent()
|
|
189
|
+
|
|
190
|
+
return ServerMetrics(
|
|
191
|
+
server_id=self.server_id,
|
|
192
|
+
start_time=self.start_time,
|
|
193
|
+
uptime_seconds=time.time() - self.start_time,
|
|
194
|
+
total_requests=total_requests,
|
|
195
|
+
requests_per_minute=recent_requests,
|
|
196
|
+
average_response_time_ms=avg_response_time,
|
|
197
|
+
error_rate=error_rate,
|
|
198
|
+
memory_usage_mb=memory_usage,
|
|
199
|
+
cpu_usage_percent=cpu_usage,
|
|
200
|
+
active_connections=0 # TODO: Implement connection tracking
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
def track_feature_usage(self, feature_name: str):
|
|
204
|
+
"""Track feature usage."""
|
|
205
|
+
with self._lock:
|
|
206
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
207
|
+
# Check if feature exists
|
|
208
|
+
existing = conn.execute(
|
|
209
|
+
"SELECT usage_count FROM feature_usage WHERE feature_name = ? AND server_id = ?",
|
|
210
|
+
(feature_name, self.server_id)
|
|
211
|
+
).fetchone()
|
|
212
|
+
|
|
213
|
+
if existing:
|
|
214
|
+
conn.execute("""
|
|
215
|
+
UPDATE feature_usage
|
|
216
|
+
SET usage_count = usage_count + 1, last_used = ?
|
|
217
|
+
WHERE feature_name = ? AND server_id = ?
|
|
218
|
+
""", (time.time(), feature_name, self.server_id))
|
|
219
|
+
else:
|
|
220
|
+
conn.execute("""
|
|
221
|
+
INSERT INTO feature_usage (feature_name, usage_count, last_used, server_id)
|
|
222
|
+
VALUES (?, 1, ?, ?)
|
|
223
|
+
""", (feature_name, time.time(), self.server_id))
|
|
224
|
+
|
|
225
|
+
def get_analytics_summary(self, hours: int = 24) -> Dict[str, Any]:
|
|
226
|
+
"""Get analytics summary for the specified time period."""
|
|
227
|
+
since = time.time() - (hours * 3600)
|
|
228
|
+
|
|
229
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
230
|
+
# Request statistics
|
|
231
|
+
total_requests = conn.execute(
|
|
232
|
+
"SELECT COUNT(*) FROM request_metrics WHERE server_id = ? AND timestamp > ?",
|
|
233
|
+
(self.server_id, since)
|
|
234
|
+
).fetchone()[0]
|
|
235
|
+
|
|
236
|
+
# Method distribution
|
|
237
|
+
methods = conn.execute("""
|
|
238
|
+
SELECT method, COUNT(*) as count
|
|
239
|
+
FROM request_metrics
|
|
240
|
+
WHERE server_id = ? AND timestamp > ?
|
|
241
|
+
GROUP BY method
|
|
242
|
+
""", (self.server_id, since)).fetchall()
|
|
243
|
+
|
|
244
|
+
# Status code distribution
|
|
245
|
+
status_codes = conn.execute("""
|
|
246
|
+
SELECT status_code, COUNT(*) as count
|
|
247
|
+
FROM request_metrics
|
|
248
|
+
WHERE server_id = ? AND timestamp > ?
|
|
249
|
+
GROUP BY status_code
|
|
250
|
+
""", (self.server_id, since)).fetchall()
|
|
251
|
+
|
|
252
|
+
# Most popular endpoints
|
|
253
|
+
popular_endpoints = conn.execute("""
|
|
254
|
+
SELECT path, COUNT(*) as count
|
|
255
|
+
FROM request_metrics
|
|
256
|
+
WHERE server_id = ? AND timestamp > ?
|
|
257
|
+
GROUP BY path
|
|
258
|
+
ORDER BY count DESC
|
|
259
|
+
LIMIT 10
|
|
260
|
+
""", (self.server_id, since)).fetchall()
|
|
261
|
+
|
|
262
|
+
# Average response times by endpoint
|
|
263
|
+
response_times = conn.execute("""
|
|
264
|
+
SELECT path, AVG(response_time_ms) as avg_time
|
|
265
|
+
FROM request_metrics
|
|
266
|
+
WHERE server_id = ? AND timestamp > ?
|
|
267
|
+
GROUP BY path
|
|
268
|
+
ORDER BY avg_time DESC
|
|
269
|
+
LIMIT 10
|
|
270
|
+
""", (self.server_id, since)).fetchall()
|
|
271
|
+
|
|
272
|
+
# Feature usage
|
|
273
|
+
feature_usage = conn.execute("""
|
|
274
|
+
SELECT feature_name, usage_count
|
|
275
|
+
FROM feature_usage
|
|
276
|
+
WHERE server_id = ?
|
|
277
|
+
ORDER BY usage_count DESC
|
|
278
|
+
""", (self.server_id,)).fetchall()
|
|
279
|
+
|
|
280
|
+
return {
|
|
281
|
+
"period_hours": hours,
|
|
282
|
+
"total_requests": total_requests,
|
|
283
|
+
"methods": dict(methods),
|
|
284
|
+
"status_codes": dict(status_codes),
|
|
285
|
+
"popular_endpoints": dict(popular_endpoints),
|
|
286
|
+
"slowest_endpoints": dict(response_times),
|
|
287
|
+
"feature_usage": dict(feature_usage),
|
|
288
|
+
"server_metrics": asdict(self.get_server_metrics())
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
def export_analytics(self, output_path: str, format: str = "json"):
|
|
292
|
+
"""Export analytics data."""
|
|
293
|
+
summary = self.get_analytics_summary()
|
|
294
|
+
|
|
295
|
+
if format.lower() == "json":
|
|
296
|
+
with open(output_path, 'w') as f:
|
|
297
|
+
json.dump(summary, f, indent=2)
|
|
298
|
+
elif format.lower() == "csv":
|
|
299
|
+
# TODO: Implement CSV export
|
|
300
|
+
pass
|
|
301
|
+
|
|
302
|
+
def cleanup_old_data(self, days: int = 30):
|
|
303
|
+
"""Clean up old analytics data."""
|
|
304
|
+
cutoff = time.time() - (days * 24 * 3600)
|
|
305
|
+
|
|
306
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
307
|
+
conn.execute(
|
|
308
|
+
"DELETE FROM request_metrics WHERE timestamp < ?",
|
|
309
|
+
(cutoff,)
|
|
310
|
+
)
|
|
311
|
+
conn.execute(
|
|
312
|
+
"DELETE FROM server_metrics WHERE timestamp < ?",
|
|
313
|
+
(cutoff,)
|
|
314
|
+
)
|
|
315
|
+
conn.execute(
|
|
316
|
+
"DELETE FROM user_sessions WHERE last_activity < ?",
|
|
317
|
+
(cutoff,)
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
class AnalyticsMiddleware:
|
|
321
|
+
"""FastAPI middleware for automatic analytics tracking."""
|
|
322
|
+
|
|
323
|
+
def __init__(self, analytics_manager: AnalyticsManager):
|
|
324
|
+
self.analytics = analytics_manager
|
|
325
|
+
|
|
326
|
+
async def __call__(self, request, call_next):
|
|
327
|
+
start_time = time.time()
|
|
328
|
+
|
|
329
|
+
# Track feature usage
|
|
330
|
+
self.analytics.track_feature_usage("http_request")
|
|
331
|
+
|
|
332
|
+
# Process request
|
|
333
|
+
response = await call_next(request)
|
|
334
|
+
|
|
335
|
+
# Calculate metrics
|
|
336
|
+
end_time = time.time()
|
|
337
|
+
response_time_ms = (end_time - start_time) * 1000
|
|
338
|
+
|
|
339
|
+
# Get request size (approximate)
|
|
340
|
+
request_size = len(str(request.headers)) + len(str(request.query_params))
|
|
341
|
+
if hasattr(request, 'body'):
|
|
342
|
+
request_size += len(str(request.body))
|
|
343
|
+
|
|
344
|
+
# Get response size (approximate)
|
|
345
|
+
response_size = len(str(response.headers))
|
|
346
|
+
if hasattr(response, 'body'):
|
|
347
|
+
response_size += len(str(response.body))
|
|
348
|
+
|
|
349
|
+
# Create metrics
|
|
350
|
+
metrics = RequestMetrics(
|
|
351
|
+
request_id=str(uuid.uuid4()),
|
|
352
|
+
timestamp=start_time,
|
|
353
|
+
method=request.method,
|
|
354
|
+
path=request.url.path,
|
|
355
|
+
status_code=response.status_code,
|
|
356
|
+
response_time_ms=response_time_ms,
|
|
357
|
+
request_size_bytes=request_size,
|
|
358
|
+
response_size_bytes=response_size,
|
|
359
|
+
user_agent=request.headers.get("user-agent", ""),
|
|
360
|
+
ip_address=request.client.host if request.client else "",
|
|
361
|
+
path_params=dict(request.path_params),
|
|
362
|
+
query_params=dict(request.query_params)
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
# Track the request
|
|
366
|
+
self.analytics.track_request(metrics)
|
|
367
|
+
|
|
368
|
+
return response
|
api_mocker/cli.py
CHANGED
|
@@ -11,6 +11,9 @@ from api_mocker import MockServer
|
|
|
11
11
|
from api_mocker.openapi import OpenAPIParser, PostmanImporter
|
|
12
12
|
from api_mocker.recorder import RequestRecorder, ProxyRecorder, ReplayEngine
|
|
13
13
|
from api_mocker.plugins import PluginManager, BUILTIN_PLUGINS
|
|
14
|
+
from api_mocker.analytics import AnalyticsManager
|
|
15
|
+
from api_mocker.dashboard import DashboardManager
|
|
16
|
+
from api_mocker.advanced import AdvancedFeatures, RateLimitConfig, CacheConfig, AuthConfig
|
|
14
17
|
|
|
15
18
|
app = typer.Typer(help="api-mocker: The industry-standard, production-ready, free API mocking and development acceleration tool.")
|
|
16
19
|
console = Console()
|
|
@@ -493,5 +496,169 @@ api-mocker record https://api.example.com --output recordings/recorded.json
|
|
|
493
496
|
console.print(f"[red]✗[/red] Failed to create project: {e}")
|
|
494
497
|
raise typer.Exit(1)
|
|
495
498
|
|
|
499
|
+
@app.command()
|
|
500
|
+
def analytics(
|
|
501
|
+
action: str = typer.Argument(..., help="Analytics action (dashboard, export, summary)"),
|
|
502
|
+
hours: int = typer.Option(24, "--hours", help="Time period for analytics (hours)"),
|
|
503
|
+
output: str = typer.Option(None, "--output", help="Output file for export"),
|
|
504
|
+
format: str = typer.Option("json", "--format", help="Export format (json, csv)"),
|
|
505
|
+
):
|
|
506
|
+
"""Manage analytics and metrics."""
|
|
507
|
+
try:
|
|
508
|
+
analytics_manager = AnalyticsManager()
|
|
509
|
+
|
|
510
|
+
if action == "dashboard":
|
|
511
|
+
console.print("[blue]📊[/blue] Starting analytics dashboard...")
|
|
512
|
+
dashboard = DashboardManager(analytics_manager)
|
|
513
|
+
dashboard.start()
|
|
514
|
+
|
|
515
|
+
elif action == "export":
|
|
516
|
+
if not output:
|
|
517
|
+
output = f"analytics-{int(time.time())}.{format}"
|
|
518
|
+
|
|
519
|
+
console.print(f"[blue]📤[/blue] Exporting analytics to {output}...")
|
|
520
|
+
analytics_manager.export_analytics(output, format)
|
|
521
|
+
console.print(f"[green]✓[/green] Analytics exported to: {output}")
|
|
522
|
+
|
|
523
|
+
elif action == "summary":
|
|
524
|
+
console.print(f"[blue]📈[/blue] Generating analytics summary for last {hours} hours...")
|
|
525
|
+
summary = analytics_manager.get_analytics_summary(hours)
|
|
526
|
+
|
|
527
|
+
# Display summary
|
|
528
|
+
table = Table(title=f"Analytics Summary (Last {hours} hours)")
|
|
529
|
+
table.add_column("Metric", style="cyan")
|
|
530
|
+
table.add_column("Value", style="green")
|
|
531
|
+
|
|
532
|
+
table.add_row("Total Requests", str(summary["total_requests"]))
|
|
533
|
+
table.add_row("Popular Endpoints", str(len(summary["popular_endpoints"])))
|
|
534
|
+
table.add_row("Average Response Time", f"{summary['server_metrics']['average_response_time_ms']:.2f}ms")
|
|
535
|
+
table.add_row("Error Rate", f"{summary['server_metrics']['error_rate']:.2f}%")
|
|
536
|
+
|
|
537
|
+
console.print(table)
|
|
538
|
+
|
|
539
|
+
else:
|
|
540
|
+
console.print(f"[red]✗[/red] Unknown action: {action}")
|
|
541
|
+
raise typer.Exit(1)
|
|
542
|
+
|
|
543
|
+
except Exception as e:
|
|
544
|
+
console.print(f"[red]✗[/red] Analytics error: {e}")
|
|
545
|
+
raise typer.Exit(1)
|
|
546
|
+
|
|
547
|
+
@app.command()
|
|
548
|
+
def advanced(
|
|
549
|
+
feature: str = typer.Argument(..., help="Advanced feature (rate-limit, cache, auth, health)"),
|
|
550
|
+
config_file: str = typer.Option(None, "--config", help="Configuration file path"),
|
|
551
|
+
enable: bool = typer.Option(True, "--enable/--disable", help="Enable or disable feature"),
|
|
552
|
+
):
|
|
553
|
+
"""Configure advanced features."""
|
|
554
|
+
try:
|
|
555
|
+
if feature == "rate-limit":
|
|
556
|
+
console.print("[blue]🛡️[/blue] Configuring rate limiting...")
|
|
557
|
+
|
|
558
|
+
config = RateLimitConfig(
|
|
559
|
+
requests_per_minute=60,
|
|
560
|
+
requests_per_hour=1000,
|
|
561
|
+
burst_size=10
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
if config_file:
|
|
565
|
+
# Load from file
|
|
566
|
+
with open(config_file, 'r') as f:
|
|
567
|
+
if config_file.endswith('.yaml') or config_file.endswith('.yml'):
|
|
568
|
+
import yaml
|
|
569
|
+
file_config = yaml.safe_load(f)
|
|
570
|
+
else:
|
|
571
|
+
file_config = json.load(f)
|
|
572
|
+
|
|
573
|
+
config = RateLimitConfig(**file_config.get("rate_limit", {}))
|
|
574
|
+
|
|
575
|
+
console.print(f"[green]✓[/green] Rate limiting configured:")
|
|
576
|
+
console.print(f" - Requests per minute: {config.requests_per_minute}")
|
|
577
|
+
console.print(f" - Requests per hour: {config.requests_per_hour}")
|
|
578
|
+
console.print(f" - Burst size: {config.burst_size}")
|
|
579
|
+
|
|
580
|
+
elif feature == "cache":
|
|
581
|
+
console.print("[blue]⚡[/blue] Configuring caching...")
|
|
582
|
+
|
|
583
|
+
config = CacheConfig(
|
|
584
|
+
enabled=True,
|
|
585
|
+
ttl_seconds=300,
|
|
586
|
+
max_size=1000,
|
|
587
|
+
strategy="lru"
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
if config_file:
|
|
591
|
+
with open(config_file, 'r') as f:
|
|
592
|
+
if config_file.endswith('.yaml') or config_file.endswith('.yml'):
|
|
593
|
+
import yaml
|
|
594
|
+
file_config = yaml.safe_load(f)
|
|
595
|
+
else:
|
|
596
|
+
file_config = json.load(f)
|
|
597
|
+
|
|
598
|
+
config = CacheConfig(**file_config.get("cache", {}))
|
|
599
|
+
|
|
600
|
+
console.print(f"[green]✓[/green] Caching configured:")
|
|
601
|
+
console.print(f" - Enabled: {config.enabled}")
|
|
602
|
+
console.print(f" - TTL: {config.ttl_seconds} seconds")
|
|
603
|
+
console.print(f" - Max size: {config.max_size}")
|
|
604
|
+
console.print(f" - Strategy: {config.strategy}")
|
|
605
|
+
|
|
606
|
+
elif feature == "auth":
|
|
607
|
+
console.print("[blue]🔐[/blue] Configuring authentication...")
|
|
608
|
+
|
|
609
|
+
config = AuthConfig(
|
|
610
|
+
enabled=True,
|
|
611
|
+
secret_key="your-secret-key-change-this",
|
|
612
|
+
algorithm="HS256",
|
|
613
|
+
token_expiry_hours=24
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
if config_file:
|
|
617
|
+
with open(config_file, 'r') as f:
|
|
618
|
+
if config_file.endswith('.yaml') or config_file.endswith('.yml'):
|
|
619
|
+
import yaml
|
|
620
|
+
file_config = yaml.safe_load(f)
|
|
621
|
+
else:
|
|
622
|
+
file_config = json.load(f)
|
|
623
|
+
|
|
624
|
+
config = AuthConfig(**file_config.get("auth", {}))
|
|
625
|
+
|
|
626
|
+
console.print(f"[green]✓[/green] Authentication configured:")
|
|
627
|
+
console.print(f" - Enabled: {config.enabled}")
|
|
628
|
+
console.print(f" - Algorithm: {config.algorithm}")
|
|
629
|
+
console.print(f" - Token expiry: {config.token_expiry_hours} hours")
|
|
630
|
+
|
|
631
|
+
elif feature == "health":
|
|
632
|
+
console.print("[blue]🏥[/blue] Running health checks...")
|
|
633
|
+
|
|
634
|
+
from api_mocker.advanced import HealthChecker, check_database_connection, check_memory_usage, check_disk_space
|
|
635
|
+
|
|
636
|
+
health_checker = HealthChecker()
|
|
637
|
+
health_checker.add_check("database", check_database_connection)
|
|
638
|
+
health_checker.add_check("memory", check_memory_usage)
|
|
639
|
+
health_checker.add_check("disk", check_disk_space)
|
|
640
|
+
|
|
641
|
+
status = health_checker.get_health_status()
|
|
642
|
+
|
|
643
|
+
table = Table(title="Health Check Results")
|
|
644
|
+
table.add_column("Check", style="cyan")
|
|
645
|
+
table.add_column("Status", style="green")
|
|
646
|
+
|
|
647
|
+
for check_name, check_status in status["checks"].items():
|
|
648
|
+
status_icon = "✓" if check_status else "✗"
|
|
649
|
+
status_color = "green" if check_status else "red"
|
|
650
|
+
table.add_row(check_name, f"[{status_color}]{status_icon}[/{status_color}]")
|
|
651
|
+
|
|
652
|
+
console.print(table)
|
|
653
|
+
console.print(f"Overall status: {status['status']}")
|
|
654
|
+
|
|
655
|
+
else:
|
|
656
|
+
console.print(f"[red]✗[/red] Unknown feature: {feature}")
|
|
657
|
+
raise typer.Exit(1)
|
|
658
|
+
|
|
659
|
+
except Exception as e:
|
|
660
|
+
console.print(f"[red]✗[/red] Advanced feature error: {e}")
|
|
661
|
+
raise typer.Exit(1)
|
|
662
|
+
|
|
496
663
|
if __name__ == "__main__":
|
|
497
664
|
app()
|