@joaosens/fullstack-prompts 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prompts/About.Me.md +188 -0
- package/.prompts/Backend.Rules.md +325 -0
- package/.prompts/Frontend.Rules.md +366 -0
- package/.prompts/Main.Context.md +165 -0
- package/.prompts/System.Example.md +2165 -0
- package/package.json +18 -0
|
@@ -0,0 +1,2165 @@
|
|
|
1
|
+
# Production Engineering Reference
|
|
2
|
+
## GitHub Analytics Platform — Senior Architecture Patterns
|
|
3
|
+
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# TABLE OF CONTENTS
|
|
7
|
+
|
|
8
|
+
1. Project Structure & App Factory
|
|
9
|
+
2. Middleware Architecture
|
|
10
|
+
3. Logging Middleware
|
|
11
|
+
4. Redis Rate Limiting
|
|
12
|
+
5. JWT Auth System
|
|
13
|
+
6. Service Layer Patterns
|
|
14
|
+
7. DTO / Pydantic Validation
|
|
15
|
+
8. PostgreSQL Integration (Async)
|
|
16
|
+
9. Docker Infrastructure
|
|
17
|
+
10. External API Integration (GitHub)
|
|
18
|
+
11. Retry & Resilience Patterns
|
|
19
|
+
12. Error Handling Architecture
|
|
20
|
+
13. Frontend Service Abstraction
|
|
21
|
+
14. React Scalable Folder Structure
|
|
22
|
+
15. TailwindCSS System
|
|
23
|
+
16. React Three Fiber Scene Architecture
|
|
24
|
+
17. API Client Abstraction
|
|
25
|
+
18. Observability & Logging Patterns
|
|
26
|
+
|
|
27
|
+
---
|
|
28
|
+
|
|
29
|
+
# 1. FASTAPI MODULAR ARCHITECTURE
|
|
30
|
+
|
|
31
|
+
## Why This Structure
|
|
32
|
+
|
|
33
|
+
The application is split by responsibility, not by file type. Each domain (auth, github, users) is self-contained. The app factory pattern (`create_app`) keeps the entry point clean and testable — you can instantiate the app with different configs for testing without touching global state.
|
|
34
|
+
|
|
35
|
+
## Project Layout
|
|
36
|
+
|
|
37
|
+
```
|
|
38
|
+
backend/
|
|
39
|
+
├── app/
|
|
40
|
+
│ ├── __init__.py
|
|
41
|
+
│ ├── main.py # App factory — entry point
|
|
42
|
+
│ ├── config.py # Typed settings via Pydantic BaseSettings
|
|
43
|
+
│ ├── dependencies.py # Shared FastAPI dependencies
|
|
44
|
+
│ │
|
|
45
|
+
│ ├── core/
|
|
46
|
+
│ │ ├── database.py # Async SQLAlchemy engine + session factory
|
|
47
|
+
│ │ ├── redis.py # Redis connection pool
|
|
48
|
+
│ │ ├── security.py # JWT encode/decode, password hashing
|
|
49
|
+
│ │ └── exceptions.py # Domain exception hierarchy
|
|
50
|
+
│ │
|
|
51
|
+
│ ├── middleware/
|
|
52
|
+
│ │ ├── logging.py # Structured request/response logging
|
|
53
|
+
│ │ ├── rate_limit.py # Redis-backed rate limiting
|
|
54
|
+
│ │ └── auth.py # JWT validation middleware (if global)
|
|
55
|
+
│ │
|
|
56
|
+
│ ├── modules/
|
|
57
|
+
│ │ ├── auth/
|
|
58
|
+
│ │ │ ├── router.py # Route definitions only
|
|
59
|
+
│ │ │ ├── service.py # Auth business logic
|
|
60
|
+
│ │ │ ├── schemas.py # Request/response DTOs
|
|
61
|
+
│ │ │ └── dependencies.py # Auth-specific FastAPI deps
|
|
62
|
+
│ │ │
|
|
63
|
+
│ │ ├── github/
|
|
64
|
+
│ │ │ ├── router.py
|
|
65
|
+
│ │ │ ├── service.py # GitHub orchestration logic
|
|
66
|
+
│ │ │ ├── client.py # GitHub HTTP client (external API)
|
|
67
|
+
│ │ │ ├── schemas.py
|
|
68
|
+
│ │ │ └── cache.py # GitHub-specific Redis caching
|
|
69
|
+
│ │ │
|
|
70
|
+
│ │ └── users/
|
|
71
|
+
│ │ ├── router.py
|
|
72
|
+
│ │ ├── service.py
|
|
73
|
+
│ │ ├── schemas.py
|
|
74
|
+
│ │ └── repository.py # DB access layer for users
|
|
75
|
+
│ │
|
|
76
|
+
│ └── models/
|
|
77
|
+
│ ├── user.py # SQLAlchemy ORM models
|
|
78
|
+
│ └── github_stat.py
|
|
79
|
+
│
|
|
80
|
+
├── tests/
|
|
81
|
+
│ ├── conftest.py
|
|
82
|
+
│ ├── test_auth/
|
|
83
|
+
│ └── test_github/
|
|
84
|
+
│
|
|
85
|
+
├── Dockerfile
|
|
86
|
+
├── docker-compose.yml
|
|
87
|
+
└── pyproject.toml
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## App Factory — `main.py`
|
|
91
|
+
|
|
92
|
+
```python
|
|
93
|
+
# app/main.py
|
|
94
|
+
from contextlib import asynccontextmanager
|
|
95
|
+
|
|
96
|
+
from fastapi import FastAPI
|
|
97
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
98
|
+
|
|
99
|
+
from app.config import get_settings
|
|
100
|
+
from app.core.database import init_db
|
|
101
|
+
from app.core.redis import init_redis, close_redis
|
|
102
|
+
from app.middleware.logging import LoggingMiddleware
|
|
103
|
+
from app.middleware.rate_limit import RateLimitMiddleware
|
|
104
|
+
from app.modules.auth.router import router as auth_router
|
|
105
|
+
from app.modules.github.router import router as github_router
|
|
106
|
+
from app.modules.users.router import router as users_router
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@asynccontextmanager
|
|
110
|
+
async def lifespan(app: FastAPI):
|
|
111
|
+
"""
|
|
112
|
+
Lifespan replaces on_event startup/shutdown (deprecated in FastAPI 0.93+).
|
|
113
|
+
Runs before requests are accepted, tears down cleanly on shutdown.
|
|
114
|
+
Critical for connection pool management.
|
|
115
|
+
"""
|
|
116
|
+
settings = get_settings()
|
|
117
|
+
|
|
118
|
+
await init_db()
|
|
119
|
+
await init_redis(settings.REDIS_URL)
|
|
120
|
+
|
|
121
|
+
yield # Application runs here
|
|
122
|
+
|
|
123
|
+
await close_redis()
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def create_app() -> FastAPI:
|
|
127
|
+
settings = get_settings()
|
|
128
|
+
|
|
129
|
+
app = FastAPI(
|
|
130
|
+
title="GitHub Analytics API",
|
|
131
|
+
version="1.0.0",
|
|
132
|
+
docs_url="/docs" if settings.ENVIRONMENT != "production" else None,
|
|
133
|
+
redoc_url=None,
|
|
134
|
+
lifespan=lifespan,
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
# Order matters: CORS before application middleware
|
|
138
|
+
app.add_middleware(
|
|
139
|
+
CORSMiddleware,
|
|
140
|
+
allow_origins=settings.ALLOWED_ORIGINS,
|
|
141
|
+
allow_credentials=True,
|
|
142
|
+
allow_methods=["*"],
|
|
143
|
+
allow_headers=["*"],
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
# Custom middleware — outermost executes first on request, last on response
|
|
147
|
+
app.add_middleware(LoggingMiddleware)
|
|
148
|
+
app.add_middleware(RateLimitMiddleware)
|
|
149
|
+
|
|
150
|
+
# Routers — prefix and tag grouping for OpenAPI clarity
|
|
151
|
+
app.include_router(auth_router, prefix="/auth", tags=["auth"])
|
|
152
|
+
app.include_router(github_router, prefix="/github", tags=["github"])
|
|
153
|
+
app.include_router(users_router, prefix="/users", tags=["users"])
|
|
154
|
+
|
|
155
|
+
return app
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
app = create_app()
|
|
159
|
+
```
|
|
160
|
+
|
|
161
|
+
## Typed Settings — `config.py`
|
|
162
|
+
|
|
163
|
+
```python
|
|
164
|
+
# app/config.py
|
|
165
|
+
from functools import lru_cache
|
|
166
|
+
from typing import List
|
|
167
|
+
|
|
168
|
+
from pydantic import PostgresDsn, RedisDsn, field_validator
|
|
169
|
+
from pydantic_settings import BaseSettings
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class Settings(BaseSettings):
|
|
173
|
+
ENVIRONMENT: str = "development"
|
|
174
|
+
SECRET_KEY: str
|
|
175
|
+
ALGORITHM: str = "HS256"
|
|
176
|
+
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
|
|
177
|
+
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
|
|
178
|
+
|
|
179
|
+
DATABASE_URL: PostgresDsn
|
|
180
|
+
REDIS_URL: RedisDsn
|
|
181
|
+
|
|
182
|
+
GITHUB_API_TOKEN: str
|
|
183
|
+
GITHUB_API_BASE_URL: str = "https://api.github.com"
|
|
184
|
+
|
|
185
|
+
ALLOWED_ORIGINS: List[str] = ["http://localhost:5173"]
|
|
186
|
+
|
|
187
|
+
RATE_LIMIT_REQUESTS: int = 100
|
|
188
|
+
RATE_LIMIT_WINDOW_SECONDS: int = 60
|
|
189
|
+
|
|
190
|
+
class Config:
|
|
191
|
+
env_file = ".env"
|
|
192
|
+
case_sensitive = True
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@lru_cache
|
|
196
|
+
def get_settings() -> Settings:
|
|
197
|
+
"""
|
|
198
|
+
Cached singleton. Safe to call from anywhere without re-parsing env.
|
|
199
|
+
lru_cache ensures single instantiation across the application lifecycle.
|
|
200
|
+
"""
|
|
201
|
+
return Settings()
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
---
|
|
205
|
+
|
|
206
|
+
# 2. MIDDLEWARE ARCHITECTURE
|
|
207
|
+
|
|
208
|
+
## Design Principle
|
|
209
|
+
|
|
210
|
+
Each middleware handles exactly one concern. Middleware runs outside the route lifecycle — it cannot access route dependencies, so it must be self-contained. Think of middleware as infrastructure infrastructure, not business logic.
|
|
211
|
+
|
|
212
|
+
## Middleware Execution Order
|
|
213
|
+
|
|
214
|
+
```
|
|
215
|
+
Request IN:
|
|
216
|
+
CORS → RateLimit → Logging → Router → Service → DB
|
|
217
|
+
|
|
218
|
+
Response OUT:
|
|
219
|
+
DB → Service → Router → Logging → RateLimit → CORS
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
The outermost middleware (added last via `add_middleware`) wraps everything. FastAPI's `add_middleware` uses a stack — last added = outermost wrapper.
|
|
223
|
+
|
|
224
|
+
---
|
|
225
|
+
|
|
226
|
+
# 3. LOGGING MIDDLEWARE
|
|
227
|
+
|
|
228
|
+
## Why Structured Logging
|
|
229
|
+
|
|
230
|
+
Raw print statements are noise. Structured logs are queryable. When production incidents happen, you need to filter by `request_id`, `user_id`, `route`, or `duration_ms` — not grep through unstructured strings.
|
|
231
|
+
|
|
232
|
+
```python
|
|
233
|
+
# app/middleware/logging.py
|
|
234
|
+
import time
|
|
235
|
+
import uuid
|
|
236
|
+
import logging
|
|
237
|
+
from typing import Callable
|
|
238
|
+
|
|
239
|
+
from fastapi import Request, Response
|
|
240
|
+
from starlette.middleware.base import BaseHTTPMiddleware
|
|
241
|
+
from starlette.types import ASGIApp
|
|
242
|
+
|
|
243
|
+
logger = logging.getLogger("api.requests")
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
class LoggingMiddleware(BaseHTTPMiddleware):
|
|
247
|
+
"""
|
|
248
|
+
Captures the full request/response lifecycle.
|
|
249
|
+
Attaches a request_id for distributed tracing correlation.
|
|
250
|
+
Never logs request bodies by default — PII risk.
|
|
251
|
+
"""
|
|
252
|
+
|
|
253
|
+
def __init__(self, app: ASGIApp):
|
|
254
|
+
super().__init__(app)
|
|
255
|
+
|
|
256
|
+
async def dispatch(self, request: Request, call_next: Callable) -> Response:
|
|
257
|
+
request_id = str(uuid.uuid4())
|
|
258
|
+
start_time = time.perf_counter()
|
|
259
|
+
|
|
260
|
+
# Attach request_id to request state for downstream access
|
|
261
|
+
request.state.request_id = request_id
|
|
262
|
+
|
|
263
|
+
logger.info(
|
|
264
|
+
"request_started",
|
|
265
|
+
extra={
|
|
266
|
+
"request_id": request_id,
|
|
267
|
+
"method": request.method,
|
|
268
|
+
"path": request.url.path,
|
|
269
|
+
"client_ip": self._get_client_ip(request),
|
|
270
|
+
"user_agent": request.headers.get("user-agent", "unknown"),
|
|
271
|
+
},
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
try:
|
|
275
|
+
response = await call_next(request)
|
|
276
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
277
|
+
|
|
278
|
+
logger.info(
|
|
279
|
+
"request_completed",
|
|
280
|
+
extra={
|
|
281
|
+
"request_id": request_id,
|
|
282
|
+
"method": request.method,
|
|
283
|
+
"path": request.url.path,
|
|
284
|
+
"status_code": response.status_code,
|
|
285
|
+
"duration_ms": round(duration_ms, 2),
|
|
286
|
+
},
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
# Propagate request_id to client for support correlation
|
|
290
|
+
response.headers["X-Request-ID"] = request_id
|
|
291
|
+
return response
|
|
292
|
+
|
|
293
|
+
except Exception as exc:
|
|
294
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
295
|
+
logger.error(
|
|
296
|
+
"request_failed",
|
|
297
|
+
extra={
|
|
298
|
+
"request_id": request_id,
|
|
299
|
+
"method": request.method,
|
|
300
|
+
"path": request.url.path,
|
|
301
|
+
"duration_ms": round(duration_ms, 2),
|
|
302
|
+
"error": str(exc),
|
|
303
|
+
},
|
|
304
|
+
exc_info=True,
|
|
305
|
+
)
|
|
306
|
+
raise
|
|
307
|
+
|
|
308
|
+
@staticmethod
|
|
309
|
+
def _get_client_ip(request: Request) -> str:
|
|
310
|
+
"""
|
|
311
|
+
Respect X-Forwarded-For when behind NGINX/load balancer.
|
|
312
|
+
Falls back to direct connection IP.
|
|
313
|
+
"""
|
|
314
|
+
forwarded_for = request.headers.get("X-Forwarded-For")
|
|
315
|
+
if forwarded_for:
|
|
316
|
+
return forwarded_for.split(",")[0].strip()
|
|
317
|
+
return request.client.host if request.client else "unknown"
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def configure_logging(environment: str) -> None:
|
|
321
|
+
"""
|
|
322
|
+
Called once at startup. JSON formatting in production,
|
|
323
|
+
human-readable in development.
|
|
324
|
+
"""
|
|
325
|
+
import logging.config
|
|
326
|
+
|
|
327
|
+
log_level = "DEBUG" if environment == "development" else "INFO"
|
|
328
|
+
|
|
329
|
+
config = {
|
|
330
|
+
"version": 1,
|
|
331
|
+
"disable_existing_loggers": False,
|
|
332
|
+
"formatters": {
|
|
333
|
+
"json": {
|
|
334
|
+
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
|
335
|
+
"format": "%(asctime)s %(name)s %(levelname)s %(message)s",
|
|
336
|
+
},
|
|
337
|
+
"console": {
|
|
338
|
+
"format": "%(asctime)s | %(levelname)-8s | %(name)s | %(message)s",
|
|
339
|
+
"datefmt": "%H:%M:%S",
|
|
340
|
+
},
|
|
341
|
+
},
|
|
342
|
+
"handlers": {
|
|
343
|
+
"console": {
|
|
344
|
+
"class": "logging.StreamHandler",
|
|
345
|
+
"formatter": "console" if environment == "development" else "json",
|
|
346
|
+
},
|
|
347
|
+
},
|
|
348
|
+
"root": {
|
|
349
|
+
"level": log_level,
|
|
350
|
+
"handlers": ["console"],
|
|
351
|
+
},
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
logging.config.dictConfig(config)
|
|
355
|
+
```
|
|
356
|
+
|
|
357
|
+
---
|
|
358
|
+
|
|
359
|
+
# 4. REDIS RATE LIMITING
|
|
360
|
+
|
|
361
|
+
## Architecture Decision
|
|
362
|
+
|
|
363
|
+
Rate limiting at the middleware layer means it fires before any route logic — no DB queries, no service calls, no wasted compute. The sliding window algorithm via Redis atomic operations (`INCR` + `EXPIRE`) prevents thundering herd on burst traffic.
|
|
364
|
+
|
|
365
|
+
Per-IP is the baseline. For authenticated routes, swap the key to `user_id` for per-user limits — this prevents a single user from burning shared IP capacity.
|
|
366
|
+
|
|
367
|
+
```python
|
|
368
|
+
# app/middleware/rate_limit.py
|
|
369
|
+
import logging
|
|
370
|
+
from typing import Callable
|
|
371
|
+
|
|
372
|
+
from fastapi import Request, Response
|
|
373
|
+
from fastapi.responses import JSONResponse
|
|
374
|
+
from starlette.middleware.base import BaseHTTPMiddleware
|
|
375
|
+
|
|
376
|
+
from app.config import get_settings
|
|
377
|
+
from app.core.redis import get_redis
|
|
378
|
+
|
|
379
|
+
logger = logging.getLogger("api.rate_limit")
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
class RateLimitMiddleware(BaseHTTPMiddleware):
|
|
383
|
+
"""
|
|
384
|
+
Sliding-window rate limiter backed by Redis.
|
|
385
|
+
|
|
386
|
+
Key design choices:
|
|
387
|
+
- INCR + EXPIRE is atomic enough for our use case (minor race on first request is acceptable)
|
|
388
|
+
- For strict accuracy use Redis Lua scripts or MULTI/EXEC
|
|
389
|
+
- Health endpoints are excluded from limiting
|
|
390
|
+
"""
|
|
391
|
+
|
|
392
|
+
EXCLUDED_PATHS = {"/health", "/metrics", "/docs", "/openapi.json"}
|
|
393
|
+
|
|
394
|
+
async def dispatch(self, request: Request, call_next: Callable) -> Response:
|
|
395
|
+
if request.url.path in self.EXCLUDED_PATHS:
|
|
396
|
+
return await call_next(request)
|
|
397
|
+
|
|
398
|
+
settings = get_settings()
|
|
399
|
+
redis = await get_redis()
|
|
400
|
+
|
|
401
|
+
identifier = self._get_identifier(request)
|
|
402
|
+
key = f"rate_limit:{identifier}"
|
|
403
|
+
|
|
404
|
+
try:
|
|
405
|
+
current = await redis.incr(key)
|
|
406
|
+
|
|
407
|
+
if current == 1:
|
|
408
|
+
# First request in window — set expiry
|
|
409
|
+
await redis.expire(key, settings.RATE_LIMIT_WINDOW_SECONDS)
|
|
410
|
+
|
|
411
|
+
remaining = max(0, settings.RATE_LIMIT_REQUESTS - current)
|
|
412
|
+
|
|
413
|
+
if current > settings.RATE_LIMIT_REQUESTS:
|
|
414
|
+
logger.warning(
|
|
415
|
+
"rate_limit_exceeded",
|
|
416
|
+
extra={
|
|
417
|
+
"identifier": identifier,
|
|
418
|
+
"count": current,
|
|
419
|
+
"limit": settings.RATE_LIMIT_REQUESTS,
|
|
420
|
+
"path": request.url.path,
|
|
421
|
+
},
|
|
422
|
+
)
|
|
423
|
+
return JSONResponse(
|
|
424
|
+
status_code=429,
|
|
425
|
+
content={
|
|
426
|
+
"error": "rate_limit_exceeded",
|
|
427
|
+
"message": "Too many requests. Please slow down.",
|
|
428
|
+
"retry_after": settings.RATE_LIMIT_WINDOW_SECONDS,
|
|
429
|
+
},
|
|
430
|
+
headers={
|
|
431
|
+
"X-RateLimit-Limit": str(settings.RATE_LIMIT_REQUESTS),
|
|
432
|
+
"X-RateLimit-Remaining": "0",
|
|
433
|
+
"Retry-After": str(settings.RATE_LIMIT_WINDOW_SECONDS),
|
|
434
|
+
},
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
response = await call_next(request)
|
|
438
|
+
|
|
439
|
+
# Attach rate limit headers for client awareness
|
|
440
|
+
response.headers["X-RateLimit-Limit"] = str(settings.RATE_LIMIT_REQUESTS)
|
|
441
|
+
response.headers["X-RateLimit-Remaining"] = str(remaining)
|
|
442
|
+
|
|
443
|
+
return response
|
|
444
|
+
|
|
445
|
+
except Exception as exc:
|
|
446
|
+
# Redis failure must not block requests — fail open
|
|
447
|
+
logger.error(
|
|
448
|
+
"rate_limit_redis_failure",
|
|
449
|
+
extra={"error": str(exc)},
|
|
450
|
+
exc_info=True,
|
|
451
|
+
)
|
|
452
|
+
return await call_next(request)
|
|
453
|
+
|
|
454
|
+
def _get_identifier(self, request: Request) -> str:
|
|
455
|
+
"""
|
|
456
|
+
Prefer user_id from JWT state if already resolved.
|
|
457
|
+
Falls back to IP for unauthenticated routes.
|
|
458
|
+
"""
|
|
459
|
+
user_id = getattr(request.state, "user_id", None)
|
|
460
|
+
if user_id:
|
|
461
|
+
return f"user:{user_id}"
|
|
462
|
+
|
|
463
|
+
forwarded_for = request.headers.get("X-Forwarded-For")
|
|
464
|
+
if forwarded_for:
|
|
465
|
+
return f"ip:{forwarded_for.split(',')[0].strip()}"
|
|
466
|
+
|
|
467
|
+
client_host = request.client.host if request.client else "unknown"
|
|
468
|
+
return f"ip:{client_host}"
|
|
469
|
+
```
|
|
470
|
+
|
|
471
|
+
## Redis Connection Pool — `core/redis.py`
|
|
472
|
+
|
|
473
|
+
```python
|
|
474
|
+
# app/core/redis.py
|
|
475
|
+
import logging
|
|
476
|
+
from typing import Optional
|
|
477
|
+
|
|
478
|
+
import redis.asyncio as aioredis
|
|
479
|
+
from redis.asyncio import Redis
|
|
480
|
+
|
|
481
|
+
logger = logging.getLogger("app.redis")
|
|
482
|
+
|
|
483
|
+
_redis_client: Optional[Redis] = None
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
async def init_redis(url: str) -> None:
|
|
487
|
+
global _redis_client
|
|
488
|
+
_redis_client = aioredis.from_url(
|
|
489
|
+
str(url),
|
|
490
|
+
encoding="utf-8",
|
|
491
|
+
decode_responses=True,
|
|
492
|
+
max_connections=20,
|
|
493
|
+
)
|
|
494
|
+
# Validate connection on startup
|
|
495
|
+
await _redis_client.ping()
|
|
496
|
+
logger.info("redis_connected", extra={"url": str(url).split("@")[-1]})
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
async def get_redis() -> Redis:
|
|
500
|
+
if _redis_client is None:
|
|
501
|
+
raise RuntimeError("Redis not initialized. Call init_redis() first.")
|
|
502
|
+
return _redis_client
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
async def close_redis() -> None:
|
|
506
|
+
global _redis_client
|
|
507
|
+
if _redis_client:
|
|
508
|
+
await _redis_client.aclose()
|
|
509
|
+
logger.info("redis_disconnected")
|
|
510
|
+
```
|
|
511
|
+
|
|
512
|
+
---
|
|
513
|
+
|
|
514
|
+
# 5. JWT AUTH SYSTEM
|
|
515
|
+
|
|
516
|
+
## Token Architecture
|
|
517
|
+
|
|
518
|
+
Access tokens are short-lived (30m). Refresh tokens are long-lived (7d) and stored in the DB, which allows revocation. This is a deliberate tradeoff — pure stateless JWT can't be revoked, so the refresh token anchors the session.
|
|
519
|
+
|
|
520
|
+
Never store tokens in localStorage. HttpOnly cookies prevent XSS exfiltration.
|
|
521
|
+
|
|
522
|
+
```python
|
|
523
|
+
# app/core/security.py
|
|
524
|
+
from datetime import datetime, timedelta, timezone
|
|
525
|
+
from typing import Any
|
|
526
|
+
|
|
527
|
+
import bcrypt
|
|
528
|
+
from jose import JWTError, jwt
|
|
529
|
+
|
|
530
|
+
from app.config import get_settings
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
def hash_password(plain: str) -> str:
|
|
534
|
+
return bcrypt.hashpw(plain.encode(), bcrypt.gensalt()).decode()
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
def verify_password(plain: str, hashed: str) -> bool:
|
|
538
|
+
return bcrypt.checkpw(plain.encode(), hashed.encode())
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
def create_access_token(subject: str | int, extra_claims: dict[str, Any] = {}) -> str:
|
|
542
|
+
settings = get_settings()
|
|
543
|
+
now = datetime.now(timezone.utc)
|
|
544
|
+
|
|
545
|
+
payload = {
|
|
546
|
+
"sub": str(subject),
|
|
547
|
+
"iat": now,
|
|
548
|
+
"exp": now + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES),
|
|
549
|
+
"type": "access",
|
|
550
|
+
**extra_claims,
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
return jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
def create_refresh_token(subject: str | int) -> str:
|
|
557
|
+
settings = get_settings()
|
|
558
|
+
now = datetime.now(timezone.utc)
|
|
559
|
+
|
|
560
|
+
payload = {
|
|
561
|
+
"sub": str(subject),
|
|
562
|
+
"iat": now,
|
|
563
|
+
"exp": now + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS),
|
|
564
|
+
"type": "refresh",
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
return jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def decode_token(token: str) -> dict[str, Any]:
|
|
571
|
+
"""
|
|
572
|
+
Raises JWTError on invalid/expired tokens.
|
|
573
|
+
Callers must handle this exception.
|
|
574
|
+
"""
|
|
575
|
+
settings = get_settings()
|
|
576
|
+
return jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
|
577
|
+
```
|
|
578
|
+
|
|
579
|
+
## Auth Dependencies — `modules/auth/dependencies.py`
|
|
580
|
+
|
|
581
|
+
```python
|
|
582
|
+
# app/modules/auth/dependencies.py
|
|
583
|
+
import logging
|
|
584
|
+
from typing import Annotated
|
|
585
|
+
|
|
586
|
+
from fastapi import Depends, HTTPException, status
|
|
587
|
+
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
|
588
|
+
|
|
589
|
+
from app.core.security import decode_token
|
|
590
|
+
from app.modules.users.repository import UserRepository
|
|
591
|
+
from app.models.user import User
|
|
592
|
+
|
|
593
|
+
logger = logging.getLogger("api.auth")
|
|
594
|
+
|
|
595
|
+
bearer_scheme = HTTPBearer(auto_error=False)
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
async def get_current_user(
|
|
599
|
+
credentials: Annotated[
|
|
600
|
+
HTTPAuthorizationCredentials | None, Depends(bearer_scheme)
|
|
601
|
+
],
|
|
602
|
+
user_repo: Annotated[UserRepository, Depends()],
|
|
603
|
+
) -> User:
|
|
604
|
+
"""
|
|
605
|
+
FastAPI dependency for protected routes.
|
|
606
|
+
Validates JWT, loads user from DB, injects into route handlers.
|
|
607
|
+
|
|
608
|
+
Deliberately does NOT log the token — PII/security risk.
|
|
609
|
+
"""
|
|
610
|
+
if not credentials:
|
|
611
|
+
raise HTTPException(
|
|
612
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
613
|
+
detail="Missing authentication token",
|
|
614
|
+
headers={"WWW-Authenticate": "Bearer"},
|
|
615
|
+
)
|
|
616
|
+
|
|
617
|
+
try:
|
|
618
|
+
payload = decode_token(credentials.credentials)
|
|
619
|
+
except Exception:
|
|
620
|
+
logger.warning("invalid_token_attempt")
|
|
621
|
+
raise HTTPException(
|
|
622
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
623
|
+
detail="Invalid or expired token",
|
|
624
|
+
headers={"WWW-Authenticate": "Bearer"},
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
if payload.get("type") != "access":
|
|
628
|
+
raise HTTPException(
|
|
629
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
630
|
+
detail="Invalid token type",
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
user_id = payload.get("sub")
|
|
634
|
+
user = await user_repo.get_by_id(int(user_id))
|
|
635
|
+
|
|
636
|
+
if not user or not user.is_active:
|
|
637
|
+
raise HTTPException(
|
|
638
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
639
|
+
detail="User not found or inactive",
|
|
640
|
+
)
|
|
641
|
+
|
|
642
|
+
return user
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
# Type alias for injection clarity
|
|
646
|
+
CurrentUser = Annotated[User, Depends(get_current_user)]
|
|
647
|
+
```
|
|
648
|
+
|
|
649
|
+
## Auth Service — `modules/auth/service.py`
|
|
650
|
+
|
|
651
|
+
```python
|
|
652
|
+
# app/modules/auth/service.py
|
|
653
|
+
import logging
|
|
654
|
+
from datetime import datetime, timezone
|
|
655
|
+
|
|
656
|
+
from app.core.security import (
|
|
657
|
+
create_access_token,
|
|
658
|
+
create_refresh_token,
|
|
659
|
+
hash_password,
|
|
660
|
+
verify_password,
|
|
661
|
+
)
|
|
662
|
+
from app.core.exceptions import AuthenticationError, ConflictError
|
|
663
|
+
from app.modules.users.repository import UserRepository
|
|
664
|
+
from app.modules.auth.schemas import TokenPair, RegisterRequest, LoginRequest
|
|
665
|
+
|
|
666
|
+
logger = logging.getLogger("api.auth.service")
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
class AuthService:
|
|
670
|
+
"""
|
|
671
|
+
Owns all authentication business logic.
|
|
672
|
+
Repository handles DB. Service handles rules.
|
|
673
|
+
"""
|
|
674
|
+
|
|
675
|
+
def __init__(self, user_repo: UserRepository):
|
|
676
|
+
self.user_repo = user_repo
|
|
677
|
+
|
|
678
|
+
async def register(self, payload: RegisterRequest) -> TokenPair:
|
|
679
|
+
existing = await self.user_repo.get_by_email(payload.email)
|
|
680
|
+
if existing:
|
|
681
|
+
raise ConflictError(f"Email already registered: {payload.email}")
|
|
682
|
+
|
|
683
|
+
hashed = hash_password(payload.password)
|
|
684
|
+
user = await self.user_repo.create(
|
|
685
|
+
email=payload.email,
|
|
686
|
+
username=payload.username,
|
|
687
|
+
password_hash=hashed,
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
logger.info("user_registered", extra={"user_id": user.id})
|
|
691
|
+
|
|
692
|
+
return self._generate_token_pair(user.id)
|
|
693
|
+
|
|
694
|
+
async def login(self, payload: LoginRequest) -> TokenPair:
|
|
695
|
+
user = await self.user_repo.get_by_email(payload.email)
|
|
696
|
+
|
|
697
|
+
# Constant-time-safe: always verify even if user not found
|
|
698
|
+
# to prevent email enumeration via timing attack
|
|
699
|
+
password_valid = verify_password(
|
|
700
|
+
payload.password,
|
|
701
|
+
user.password_hash if user else "$2b$12$placeholder_hash_for_timing",
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
if not user or not password_valid:
|
|
705
|
+
logger.warning(
|
|
706
|
+
"failed_login_attempt",
|
|
707
|
+
extra={"email": payload.email},
|
|
708
|
+
)
|
|
709
|
+
raise AuthenticationError("Invalid credentials")
|
|
710
|
+
|
|
711
|
+
logger.info("user_logged_in", extra={"user_id": user.id})
|
|
712
|
+
return self._generate_token_pair(user.id)
|
|
713
|
+
|
|
714
|
+
def _generate_token_pair(self, user_id: int) -> TokenPair:
|
|
715
|
+
return TokenPair(
|
|
716
|
+
access_token=create_access_token(subject=user_id),
|
|
717
|
+
refresh_token=create_refresh_token(subject=user_id),
|
|
718
|
+
token_type="bearer",
|
|
719
|
+
)
|
|
720
|
+
```
|
|
721
|
+
|
|
722
|
+
---
|
|
723
|
+
|
|
724
|
+
# 6. SERVICE LAYER PATTERNS
|
|
725
|
+
|
|
726
|
+
## Why a Service Layer Exists
|
|
727
|
+
|
|
728
|
+
Routes answer the question "what HTTP contract do we expose?" Services answer "what does this application actually do?" Keeping them separate means you can call services from other services, background workers, CLI scripts, or tests — without needing an HTTP context.
|
|
729
|
+
|
|
730
|
+
```python
|
|
731
|
+
# app/modules/github/service.py
|
|
732
|
+
import logging
|
|
733
|
+
from typing import Optional
|
|
734
|
+
|
|
735
|
+
from app.modules.github.client import GitHubClient
|
|
736
|
+
from app.modules.github.cache import GitHubCache
|
|
737
|
+
from app.modules.github.schemas import (
|
|
738
|
+
GitHubUserStats,
|
|
739
|
+
GitHubRepoSummary,
|
|
740
|
+
GitHubAnalytics,
|
|
741
|
+
)
|
|
742
|
+
from app.core.exceptions import NotFoundError, ExternalAPIError
|
|
743
|
+
|
|
744
|
+
logger = logging.getLogger("api.github.service")
|
|
745
|
+
|
|
746
|
+
|
|
747
|
+
class GitHubService:
|
|
748
|
+
"""
|
|
749
|
+
Orchestrates GitHub data retrieval.
|
|
750
|
+
|
|
751
|
+
Responsibilities:
|
|
752
|
+
- Check cache before hitting GitHub API
|
|
753
|
+
- Transform raw API data into domain-relevant schemas
|
|
754
|
+
- Handle business rules (e.g. analytics computation)
|
|
755
|
+
- Delegate caching to GitHubCache
|
|
756
|
+
- Delegate HTTP to GitHubClient
|
|
757
|
+
|
|
758
|
+
Does NOT:
|
|
759
|
+
- Know about HTTP request/response
|
|
760
|
+
- Know about JWT or auth
|
|
761
|
+
- Manage DB connections
|
|
762
|
+
"""
|
|
763
|
+
|
|
764
|
+
def __init__(self, client: GitHubClient, cache: GitHubCache):
|
|
765
|
+
self.client = client
|
|
766
|
+
self.cache = cache
|
|
767
|
+
|
|
768
|
+
async def get_user_analytics(self, github_username: str) -> GitHubAnalytics:
|
|
769
|
+
cache_key = f"analytics:{github_username}"
|
|
770
|
+
cached = await self.cache.get(cache_key)
|
|
771
|
+
if cached:
|
|
772
|
+
logger.debug("cache_hit", extra={"key": cache_key})
|
|
773
|
+
return GitHubAnalytics.model_validate(cached)
|
|
774
|
+
|
|
775
|
+
logger.info(
|
|
776
|
+
"fetching_github_analytics",
|
|
777
|
+
extra={"username": github_username},
|
|
778
|
+
)
|
|
779
|
+
|
|
780
|
+
user_data = await self.client.get_user(github_username)
|
|
781
|
+
if not user_data:
|
|
782
|
+
raise NotFoundError(f"GitHub user not found: {github_username}")
|
|
783
|
+
|
|
784
|
+
repos = await self.client.get_user_repos(github_username)
|
|
785
|
+
languages = self._aggregate_languages(repos)
|
|
786
|
+
analytics = GitHubAnalytics(
|
|
787
|
+
username=github_username,
|
|
788
|
+
public_repos=user_data["public_repos"],
|
|
789
|
+
followers=user_data["followers"],
|
|
790
|
+
following=user_data["following"],
|
|
791
|
+
total_stars=sum(r.get("stargazers_count", 0) for r in repos),
|
|
792
|
+
top_languages=languages,
|
|
793
|
+
repos=[
|
|
794
|
+
GitHubRepoSummary(
|
|
795
|
+
name=r["name"],
|
|
796
|
+
stars=r.get("stargazers_count", 0),
|
|
797
|
+
forks=r.get("forks_count", 0),
|
|
798
|
+
language=r.get("language"),
|
|
799
|
+
description=r.get("description"),
|
|
800
|
+
)
|
|
801
|
+
for r in repos[:10] # Top 10 by default
|
|
802
|
+
],
|
|
803
|
+
)
|
|
804
|
+
|
|
805
|
+
await self.cache.set(cache_key, analytics.model_dump(), ttl=300)
|
|
806
|
+
|
|
807
|
+
return analytics
|
|
808
|
+
|
|
809
|
+
def _aggregate_languages(self, repos: list[dict]) -> dict[str, int]:
|
|
810
|
+
languages: dict[str, int] = {}
|
|
811
|
+
for repo in repos:
|
|
812
|
+
lang = repo.get("language")
|
|
813
|
+
if lang:
|
|
814
|
+
languages[lang] = languages.get(lang, 0) + 1
|
|
815
|
+
return dict(sorted(languages.items(), key=lambda x: x[1], reverse=True)[:8])
|
|
816
|
+
```
|
|
817
|
+
|
|
818
|
+
## Router — Thin by Design
|
|
819
|
+
|
|
820
|
+
```python
|
|
821
|
+
# app/modules/github/router.py
|
|
822
|
+
from typing import Annotated
|
|
823
|
+
|
|
824
|
+
from fastapi import APIRouter, Depends
|
|
825
|
+
|
|
826
|
+
from app.modules.auth.dependencies import CurrentUser
|
|
827
|
+
from app.modules.github.service import GitHubService
|
|
828
|
+
from app.modules.github.schemas import GitHubAnalytics
|
|
829
|
+
from app.dependencies import get_github_service
|
|
830
|
+
|
|
831
|
+
router = APIRouter()
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
@router.get("/stats/{username}", response_model=GitHubAnalytics)
|
|
835
|
+
async def get_github_stats(
|
|
836
|
+
username: str,
|
|
837
|
+
current_user: CurrentUser,
|
|
838
|
+
service: Annotated[GitHubService, Depends(get_github_service)],
|
|
839
|
+
) -> GitHubAnalytics:
|
|
840
|
+
"""
|
|
841
|
+
Route does exactly three things:
|
|
842
|
+
1. Accepts the request and validates path param
|
|
843
|
+
2. Calls the service
|
|
844
|
+
3. Returns the response
|
|
845
|
+
|
|
846
|
+
No business logic here. Ever.
|
|
847
|
+
"""
|
|
848
|
+
return await service.get_user_analytics(username)
|
|
849
|
+
```
|
|
850
|
+
|
|
851
|
+
---
|
|
852
|
+
|
|
853
|
+
# 7. DTO / PYDANTIC VALIDATION
|
|
854
|
+
|
|
855
|
+
## Design Philosophy
|
|
856
|
+
|
|
857
|
+
DTOs (Data Transfer Objects) are the contract between your API and the outside world. They validate input before it ever touches a service, and they control exactly what gets serialized into responses. Never return raw ORM models directly to clients.
|
|
858
|
+
|
|
859
|
+
```python
|
|
860
|
+
# app/modules/auth/schemas.py
|
|
861
|
+
from pydantic import BaseModel, EmailStr, Field, field_validator
|
|
862
|
+
import re
|
|
863
|
+
|
|
864
|
+
|
|
865
|
+
class RegisterRequest(BaseModel):
|
|
866
|
+
email: EmailStr
|
|
867
|
+
username: str = Field(min_length=3, max_length=32, pattern=r"^[a-zA-Z0-9_-]+$")
|
|
868
|
+
password: str = Field(min_length=8, max_length=128)
|
|
869
|
+
|
|
870
|
+
@field_validator("password")
|
|
871
|
+
@classmethod
|
|
872
|
+
def password_strength(cls, v: str) -> str:
|
|
873
|
+
if not re.search(r"[A-Z]", v):
|
|
874
|
+
raise ValueError("Password must contain at least one uppercase letter")
|
|
875
|
+
if not re.search(r"[0-9]", v):
|
|
876
|
+
raise ValueError("Password must contain at least one digit")
|
|
877
|
+
return v
|
|
878
|
+
|
|
879
|
+
|
|
880
|
+
class LoginRequest(BaseModel):
|
|
881
|
+
email: EmailStr
|
|
882
|
+
password: str
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
class TokenPair(BaseModel):
|
|
886
|
+
access_token: str
|
|
887
|
+
refresh_token: str
|
|
888
|
+
token_type: str = "bearer"
|
|
889
|
+
|
|
890
|
+
|
|
891
|
+
class RefreshRequest(BaseModel):
|
|
892
|
+
refresh_token: str
|
|
893
|
+
|
|
894
|
+
|
|
895
|
+
# app/modules/github/schemas.py
|
|
896
|
+
from typing import Optional
|
|
897
|
+
from pydantic import BaseModel, Field
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
class GitHubRepoSummary(BaseModel):
|
|
901
|
+
name: str
|
|
902
|
+
stars: int = Field(ge=0)
|
|
903
|
+
forks: int = Field(ge=0)
|
|
904
|
+
language: Optional[str] = None
|
|
905
|
+
description: Optional[str] = None
|
|
906
|
+
|
|
907
|
+
|
|
908
|
+
class GitHubAnalytics(BaseModel):
|
|
909
|
+
username: str
|
|
910
|
+
public_repos: int
|
|
911
|
+
followers: int
|
|
912
|
+
following: int
|
|
913
|
+
total_stars: int
|
|
914
|
+
top_languages: dict[str, int]
|
|
915
|
+
repos: list[GitHubRepoSummary]
|
|
916
|
+
|
|
917
|
+
model_config = {"from_attributes": True}
|
|
918
|
+
|
|
919
|
+
|
|
920
|
+
# Shared error response schema — consistent across all endpoints
|
|
921
|
+
class ErrorResponse(BaseModel):
|
|
922
|
+
error: str
|
|
923
|
+
message: str
|
|
924
|
+
request_id: Optional[str] = None
|
|
925
|
+
```
|
|
926
|
+
|
|
927
|
+
---
|
|
928
|
+
|
|
929
|
+
# 8. POSTGRESQL INTEGRATION (ASYNC)
|
|
930
|
+
|
|
931
|
+
## Why Async SQLAlchemy
|
|
932
|
+
|
|
933
|
+
Blocking DB calls in an async framework nullify async benefits. With `asyncpg` + `async_sessionmaker`, DB I/O yields the event loop, allowing concurrent request handling on a single process.
|
|
934
|
+
|
|
935
|
+
```python
|
|
936
|
+
# app/core/database.py
|
|
937
|
+
import logging
|
|
938
|
+
from typing import AsyncGenerator
|
|
939
|
+
|
|
940
|
+
from sqlalchemy.ext.asyncio import (
|
|
941
|
+
AsyncSession,
|
|
942
|
+
async_sessionmaker,
|
|
943
|
+
create_async_engine,
|
|
944
|
+
)
|
|
945
|
+
from sqlalchemy.orm import DeclarativeBase
|
|
946
|
+
|
|
947
|
+
from app.config import get_settings
|
|
948
|
+
|
|
949
|
+
logger = logging.getLogger("app.database")
|
|
950
|
+
|
|
951
|
+
_engine = None
|
|
952
|
+
_session_factory = None
|
|
953
|
+
|
|
954
|
+
|
|
955
|
+
class Base(DeclarativeBase):
|
|
956
|
+
pass
|
|
957
|
+
|
|
958
|
+
|
|
959
|
+
async def init_db() -> None:
|
|
960
|
+
global _engine, _session_factory
|
|
961
|
+
|
|
962
|
+
settings = get_settings()
|
|
963
|
+
|
|
964
|
+
_engine = create_async_engine(
|
|
965
|
+
str(settings.DATABASE_URL).replace("postgresql://", "postgresql+asyncpg://"),
|
|
966
|
+
pool_size=10,
|
|
967
|
+
max_overflow=20,
|
|
968
|
+
pool_pre_ping=True, # Validates connections before checkout
|
|
969
|
+
echo=settings.ENVIRONMENT == "development",
|
|
970
|
+
)
|
|
971
|
+
|
|
972
|
+
_session_factory = async_sessionmaker(
|
|
973
|
+
bind=_engine,
|
|
974
|
+
expire_on_commit=False, # Prevent lazy-load failures after commit
|
|
975
|
+
class_=AsyncSession,
|
|
976
|
+
)
|
|
977
|
+
|
|
978
|
+
logger.info("database_initialized")
|
|
979
|
+
|
|
980
|
+
|
|
981
|
+
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
|
982
|
+
"""
|
|
983
|
+
FastAPI dependency. Yields a session per request, commits on success,
|
|
984
|
+
rolls back on exception. Never leaks connections.
|
|
985
|
+
"""
|
|
986
|
+
if _session_factory is None:
|
|
987
|
+
raise RuntimeError("Database not initialized")
|
|
988
|
+
|
|
989
|
+
async with _session_factory() as session:
|
|
990
|
+
try:
|
|
991
|
+
yield session
|
|
992
|
+
await session.commit()
|
|
993
|
+
except Exception:
|
|
994
|
+
await session.rollback()
|
|
995
|
+
raise
|
|
996
|
+
finally:
|
|
997
|
+
await session.close()
|
|
998
|
+
```
|
|
999
|
+
|
|
1000
|
+
## ORM Model
|
|
1001
|
+
|
|
1002
|
+
```python
|
|
1003
|
+
# app/models/user.py
|
|
1004
|
+
from datetime import datetime, timezone
|
|
1005
|
+
from typing import Optional
|
|
1006
|
+
|
|
1007
|
+
from sqlalchemy import Boolean, DateTime, Integer, String, func
|
|
1008
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
1009
|
+
|
|
1010
|
+
from app.core.database import Base
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
class User(Base):
|
|
1014
|
+
__tablename__ = "users"
|
|
1015
|
+
|
|
1016
|
+
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
|
1017
|
+
email: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
|
|
1018
|
+
username: Mapped[str] = mapped_column(String(64), unique=True, index=True, nullable=False)
|
|
1019
|
+
password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
|
1020
|
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
|
|
1021
|
+
created_at: Mapped[datetime] = mapped_column(
|
|
1022
|
+
DateTime(timezone=True),
|
|
1023
|
+
server_default=func.now(),
|
|
1024
|
+
nullable=False,
|
|
1025
|
+
)
|
|
1026
|
+
updated_at: Mapped[Optional[datetime]] = mapped_column(
|
|
1027
|
+
DateTime(timezone=True),
|
|
1028
|
+
onupdate=func.now(),
|
|
1029
|
+
nullable=True,
|
|
1030
|
+
)
|
|
1031
|
+
```
|
|
1032
|
+
|
|
1033
|
+
## Repository Pattern
|
|
1034
|
+
|
|
1035
|
+
```python
|
|
1036
|
+
# app/modules/users/repository.py
|
|
1037
|
+
import logging
|
|
1038
|
+
from typing import Optional
|
|
1039
|
+
|
|
1040
|
+
from sqlalchemy import select
|
|
1041
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
1042
|
+
from fastapi import Depends
|
|
1043
|
+
|
|
1044
|
+
from app.core.database import get_session
|
|
1045
|
+
from app.models.user import User
|
|
1046
|
+
|
|
1047
|
+
logger = logging.getLogger("api.users.repository")
|
|
1048
|
+
|
|
1049
|
+
|
|
1050
|
+
class UserRepository:
|
|
1051
|
+
"""
|
|
1052
|
+
Isolates all DB access for the User domain.
|
|
1053
|
+
|
|
1054
|
+
Why a repository and not direct session usage in services?
|
|
1055
|
+
- Services should not know about SQLAlchemy internals
|
|
1056
|
+
- Repositories are mockable in tests
|
|
1057
|
+
- Query logic stays in one place
|
|
1058
|
+
"""
|
|
1059
|
+
|
|
1060
|
+
def __init__(self, session: AsyncSession = Depends(get_session)):
|
|
1061
|
+
self.session = session
|
|
1062
|
+
|
|
1063
|
+
async def get_by_id(self, user_id: int) -> Optional[User]:
|
|
1064
|
+
result = await self.session.execute(
|
|
1065
|
+
select(User).where(User.id == user_id)
|
|
1066
|
+
)
|
|
1067
|
+
return result.scalar_one_or_none()
|
|
1068
|
+
|
|
1069
|
+
async def get_by_email(self, email: str) -> Optional[User]:
|
|
1070
|
+
result = await self.session.execute(
|
|
1071
|
+
select(User).where(User.email == email)
|
|
1072
|
+
)
|
|
1073
|
+
return result.scalar_one_or_none()
|
|
1074
|
+
|
|
1075
|
+
async def create(self, email: str, username: str, password_hash: str) -> User:
|
|
1076
|
+
user = User(email=email, username=username, password_hash=password_hash)
|
|
1077
|
+
self.session.add(user)
|
|
1078
|
+
await self.session.flush() # Gets ID without committing — session manages tx
|
|
1079
|
+
await self.session.refresh(user)
|
|
1080
|
+
logger.info("user_created", extra={"user_id": user.id})
|
|
1081
|
+
return user
|
|
1082
|
+
```
|
|
1083
|
+
|
|
1084
|
+
---
|
|
1085
|
+
|
|
1086
|
+
# 9. DOCKER INFRASTRUCTURE
|
|
1087
|
+
|
|
1088
|
+
## Philosophy
|
|
1089
|
+
|
|
1090
|
+
Dev and prod containers differ intentionally. Dev mounts source code as volumes for hot reload. Prod builds a minimal image with no dev dependencies, non-root user, and a single process.
|
|
1091
|
+
|
|
1092
|
+
## `Dockerfile` (Production)
|
|
1093
|
+
|
|
1094
|
+
```dockerfile
|
|
1095
|
+
# Dockerfile
|
|
1096
|
+
FROM python:3.12-slim AS base
|
|
1097
|
+
|
|
1098
|
+
ENV PYTHONDONTWRITEBYTECODE=1 \
|
|
1099
|
+
PYTHONUNBUFFERED=1 \
|
|
1100
|
+
PIP_NO_CACHE_DIR=1
|
|
1101
|
+
|
|
1102
|
+
WORKDIR /app
|
|
1103
|
+
|
|
1104
|
+
# Install system deps separately — layer caches unless deps change
|
|
1105
|
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
1106
|
+
libpq-dev gcc \
|
|
1107
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
1108
|
+
|
|
1109
|
+
# ------- Dependency layer (cached unless pyproject.toml changes) -------
|
|
1110
|
+
FROM base AS deps
|
|
1111
|
+
COPY pyproject.toml ./
|
|
1112
|
+
RUN pip install --upgrade pip && pip install .
|
|
1113
|
+
|
|
1114
|
+
# ------- Production image -------
|
|
1115
|
+
FROM base AS production
|
|
1116
|
+
|
|
1117
|
+
# Non-root user — security baseline
|
|
1118
|
+
RUN addgroup --system appgroup && adduser --system --group appuser
|
|
1119
|
+
|
|
1120
|
+
COPY --from=deps /usr/local/lib/python3.12 /usr/local/lib/python3.12
|
|
1121
|
+
COPY --from=deps /usr/local/bin /usr/local/bin
|
|
1122
|
+
|
|
1123
|
+
COPY --chown=appuser:appgroup ./app ./app
|
|
1124
|
+
|
|
1125
|
+
USER appuser
|
|
1126
|
+
|
|
1127
|
+
EXPOSE 8000
|
|
1128
|
+
|
|
1129
|
+
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--workers", "2"]
|
|
1130
|
+
```
|
|
1131
|
+
|
|
1132
|
+
## `docker-compose.yml` (Development)
|
|
1133
|
+
|
|
1134
|
+
```yaml
|
|
1135
|
+
# docker-compose.yml
|
|
1136
|
+
version: "3.9"
|
|
1137
|
+
|
|
1138
|
+
services:
|
|
1139
|
+
api:
|
|
1140
|
+
build:
|
|
1141
|
+
context: .
|
|
1142
|
+
target: base # Dev target — no prod optimizations
|
|
1143
|
+
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
|
|
1144
|
+
volumes:
|
|
1145
|
+
- ./app:/app/app # Hot reload via volume mount
|
|
1146
|
+
env_file: .env
|
|
1147
|
+
ports:
|
|
1148
|
+
- "8000:8000"
|
|
1149
|
+
depends_on:
|
|
1150
|
+
postgres:
|
|
1151
|
+
condition: service_healthy
|
|
1152
|
+
redis:
|
|
1153
|
+
condition: service_healthy
|
|
1154
|
+
networks:
|
|
1155
|
+
- backend
|
|
1156
|
+
|
|
1157
|
+
postgres:
|
|
1158
|
+
image: postgres:16-alpine
|
|
1159
|
+
environment:
|
|
1160
|
+
POSTGRES_USER: ${POSTGRES_USER}
|
|
1161
|
+
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
|
1162
|
+
POSTGRES_DB: ${POSTGRES_DB}
|
|
1163
|
+
volumes:
|
|
1164
|
+
- postgres_data:/var/lib/postgresql/data
|
|
1165
|
+
healthcheck:
|
|
1166
|
+
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER}"]
|
|
1167
|
+
interval: 5s
|
|
1168
|
+
timeout: 5s
|
|
1169
|
+
retries: 5
|
|
1170
|
+
networks:
|
|
1171
|
+
- backend
|
|
1172
|
+
|
|
1173
|
+
redis:
|
|
1174
|
+
image: redis:7-alpine
|
|
1175
|
+
command: redis-server --maxmemory 256mb --maxmemory-policy allkeys-lru
|
|
1176
|
+
volumes:
|
|
1177
|
+
- redis_data:/data
|
|
1178
|
+
healthcheck:
|
|
1179
|
+
test: ["CMD", "redis-cli", "ping"]
|
|
1180
|
+
interval: 5s
|
|
1181
|
+
timeout: 3s
|
|
1182
|
+
retries: 5
|
|
1183
|
+
networks:
|
|
1184
|
+
- backend
|
|
1185
|
+
|
|
1186
|
+
nginx:
|
|
1187
|
+
image: nginx:alpine
|
|
1188
|
+
ports:
|
|
1189
|
+
- "80:80"
|
|
1190
|
+
volumes:
|
|
1191
|
+
- ./nginx/dev.conf:/etc/nginx/nginx.conf:ro
|
|
1192
|
+
depends_on:
|
|
1193
|
+
- api
|
|
1194
|
+
networks:
|
|
1195
|
+
- backend
|
|
1196
|
+
|
|
1197
|
+
volumes:
|
|
1198
|
+
postgres_data:
|
|
1199
|
+
redis_data:
|
|
1200
|
+
|
|
1201
|
+
networks:
|
|
1202
|
+
backend:
|
|
1203
|
+
driver: bridge
|
|
1204
|
+
```
|
|
1205
|
+
|
|
1206
|
+
---
|
|
1207
|
+
|
|
1208
|
+
# 10. EXTERNAL API INTEGRATION — GITHUB CLIENT
|
|
1209
|
+
|
|
1210
|
+
## Design Principles
|
|
1211
|
+
|
|
1212
|
+
The GitHub client is infrastructure, not business logic. It speaks HTTP. It knows nothing about your domain. The service knows what data to request; the client knows how to transport it.
|
|
1213
|
+
|
|
1214
|
+
Retry is built in at the client level — transient failures (502, 503, rate limit) are transparently retried before the error surfaces to the service.
|
|
1215
|
+
|
|
1216
|
+
```python
|
|
1217
|
+
# app/modules/github/client.py
|
|
1218
|
+
import logging
|
|
1219
|
+
from typing import Any, Optional
|
|
1220
|
+
|
|
1221
|
+
import httpx
|
|
1222
|
+
from tenacity import (
|
|
1223
|
+
retry,
|
|
1224
|
+
retry_if_exception_type,
|
|
1225
|
+
stop_after_attempt,
|
|
1226
|
+
wait_exponential,
|
|
1227
|
+
before_sleep_log,
|
|
1228
|
+
)
|
|
1229
|
+
|
|
1230
|
+
from app.config import get_settings
|
|
1231
|
+
from app.core.exceptions import ExternalAPIError, RateLimitError
|
|
1232
|
+
|
|
1233
|
+
logger = logging.getLogger("api.github.client")
|
|
1234
|
+
|
|
1235
|
+
RETRYABLE_STATUS_CODES = {429, 500, 502, 503, 504}
|
|
1236
|
+
|
|
1237
|
+
|
|
1238
|
+
class GitHubClient:
|
|
1239
|
+
"""
|
|
1240
|
+
GitHub REST API client.
|
|
1241
|
+
|
|
1242
|
+
Responsibilities:
|
|
1243
|
+
- Authenticate requests with token
|
|
1244
|
+
- Serialize/deserialize HTTP
|
|
1245
|
+
- Retry on transient failures
|
|
1246
|
+
- Surface clean domain exceptions (not httpx errors)
|
|
1247
|
+
|
|
1248
|
+
Does NOT:
|
|
1249
|
+
- Cache responses (GitHubCache's job)
|
|
1250
|
+
- Transform data into domain models (service's job)
|
|
1251
|
+
- Know about users or analytics logic
|
|
1252
|
+
"""
|
|
1253
|
+
|
|
1254
|
+
def __init__(self):
|
|
1255
|
+
settings = get_settings()
|
|
1256
|
+
self._base_url = settings.GITHUB_API_BASE_URL
|
|
1257
|
+
self._token = settings.GITHUB_API_TOKEN
|
|
1258
|
+
self._client: Optional[httpx.AsyncClient] = None
|
|
1259
|
+
|
|
1260
|
+
async def _get_client(self) -> httpx.AsyncClient:
|
|
1261
|
+
if self._client is None or self._client.is_closed:
|
|
1262
|
+
self._client = httpx.AsyncClient(
|
|
1263
|
+
base_url=self._base_url,
|
|
1264
|
+
headers={
|
|
1265
|
+
"Authorization": f"Bearer {self._token}",
|
|
1266
|
+
"Accept": "application/vnd.github.v3+json",
|
|
1267
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
1268
|
+
},
|
|
1269
|
+
timeout=httpx.Timeout(connect=5.0, read=15.0, write=5.0, pool=5.0),
|
|
1270
|
+
)
|
|
1271
|
+
return self._client
|
|
1272
|
+
|
|
1273
|
+
@retry(
|
|
1274
|
+
retry=retry_if_exception_type(ExternalAPIError),
|
|
1275
|
+
stop=stop_after_attempt(3),
|
|
1276
|
+
wait=wait_exponential(multiplier=1, min=1, max=8),
|
|
1277
|
+
before_sleep=before_sleep_log(logger, logging.WARNING),
|
|
1278
|
+
reraise=True,
|
|
1279
|
+
)
|
|
1280
|
+
async def get_user(self, username: str) -> dict[str, Any]:
|
|
1281
|
+
return await self._get("/users/{username}", username=username)
|
|
1282
|
+
|
|
1283
|
+
@retry(
|
|
1284
|
+
retry=retry_if_exception_type(ExternalAPIError),
|
|
1285
|
+
stop=stop_after_attempt(3),
|
|
1286
|
+
wait=wait_exponential(multiplier=1, min=1, max=8),
|
|
1287
|
+
before_sleep=before_sleep_log(logger, logging.WARNING),
|
|
1288
|
+
reraise=True,
|
|
1289
|
+
)
|
|
1290
|
+
async def get_user_repos(
|
|
1291
|
+
self,
|
|
1292
|
+
username: str,
|
|
1293
|
+
per_page: int = 100,
|
|
1294
|
+
) -> list[dict[str, Any]]:
|
|
1295
|
+
return await self._get(
|
|
1296
|
+
f"/users/{username}/repos",
|
|
1297
|
+
params={"per_page": per_page, "sort": "stars", "direction": "desc"},
|
|
1298
|
+
)
|
|
1299
|
+
|
|
1300
|
+
async def _get(self, path: str, params: dict = {}, **kwargs) -> Any:
|
|
1301
|
+
url = path.format(**kwargs) if kwargs else path
|
|
1302
|
+
client = await self._get_client()
|
|
1303
|
+
|
|
1304
|
+
try:
|
|
1305
|
+
response = await client.get(url, params=params)
|
|
1306
|
+
except httpx.TimeoutException as exc:
|
|
1307
|
+
logger.error("github_timeout", extra={"path": url})
|
|
1308
|
+
raise ExternalAPIError(f"GitHub API timeout: {url}") from exc
|
|
1309
|
+
except httpx.RequestError as exc:
|
|
1310
|
+
logger.error("github_request_error", extra={"path": url, "error": str(exc)})
|
|
1311
|
+
raise ExternalAPIError(f"GitHub API request failed: {url}") from exc
|
|
1312
|
+
|
|
1313
|
+
if response.status_code == 404:
|
|
1314
|
+
return None
|
|
1315
|
+
|
|
1316
|
+
if response.status_code == 429:
|
|
1317
|
+
reset = response.headers.get("X-RateLimit-Reset", "unknown")
|
|
1318
|
+
logger.warning("github_rate_limited", extra={"reset": reset})
|
|
1319
|
+
raise RateLimitError(f"GitHub rate limit exceeded. Reset: {reset}")
|
|
1320
|
+
|
|
1321
|
+
if response.status_code in RETRYABLE_STATUS_CODES:
|
|
1322
|
+
logger.warning(
|
|
1323
|
+
"github_retryable_error",
|
|
1324
|
+
extra={"status": response.status_code, "path": url},
|
|
1325
|
+
)
|
|
1326
|
+
raise ExternalAPIError(
|
|
1327
|
+
f"GitHub API error {response.status_code}: {url}"
|
|
1328
|
+
)
|
|
1329
|
+
|
|
1330
|
+
if not response.is_success:
|
|
1331
|
+
logger.error(
|
|
1332
|
+
"github_api_error",
|
|
1333
|
+
extra={"status": response.status_code, "path": url},
|
|
1334
|
+
)
|
|
1335
|
+
raise ExternalAPIError(
|
|
1336
|
+
f"GitHub API unexpected error {response.status_code}"
|
|
1337
|
+
)
|
|
1338
|
+
|
|
1339
|
+
return response.json()
|
|
1340
|
+
|
|
1341
|
+
async def close(self) -> None:
|
|
1342
|
+
if self._client and not self._client.is_closed:
|
|
1343
|
+
await self._client.aclose()
|
|
1344
|
+
```
|
|
1345
|
+
|
|
1346
|
+
---
|
|
1347
|
+
|
|
1348
|
+
# 11. ERROR HANDLING ARCHITECTURE
|
|
1349
|
+
|
|
1350
|
+
## Domain Exception Hierarchy
|
|
1351
|
+
|
|
1352
|
+
```python
|
|
1353
|
+
# app/core/exceptions.py
|
|
1354
|
+
|
|
1355
|
+
|
|
1356
|
+
class AppError(Exception):
|
|
1357
|
+
"""Base for all application errors."""
|
|
1358
|
+
status_code: int = 500
|
|
1359
|
+
error_code: str = "internal_error"
|
|
1360
|
+
|
|
1361
|
+
|
|
1362
|
+
class AuthenticationError(AppError):
|
|
1363
|
+
status_code = 401
|
|
1364
|
+
error_code = "authentication_error"
|
|
1365
|
+
|
|
1366
|
+
|
|
1367
|
+
class AuthorizationError(AppError):
|
|
1368
|
+
status_code = 403
|
|
1369
|
+
error_code = "authorization_error"
|
|
1370
|
+
|
|
1371
|
+
|
|
1372
|
+
class NotFoundError(AppError):
|
|
1373
|
+
status_code = 404
|
|
1374
|
+
error_code = "not_found"
|
|
1375
|
+
|
|
1376
|
+
|
|
1377
|
+
class ConflictError(AppError):
|
|
1378
|
+
status_code = 409
|
|
1379
|
+
error_code = "conflict"
|
|
1380
|
+
|
|
1381
|
+
|
|
1382
|
+
class ValidationError(AppError):
|
|
1383
|
+
status_code = 422
|
|
1384
|
+
error_code = "validation_error"
|
|
1385
|
+
|
|
1386
|
+
|
|
1387
|
+
class ExternalAPIError(AppError):
|
|
1388
|
+
status_code = 502
|
|
1389
|
+
error_code = "external_api_error"
|
|
1390
|
+
|
|
1391
|
+
|
|
1392
|
+
class RateLimitError(AppError):
|
|
1393
|
+
status_code = 429
|
|
1394
|
+
error_code = "rate_limit_error"
|
|
1395
|
+
```
|
|
1396
|
+
|
|
1397
|
+
## Global Exception Handler
|
|
1398
|
+
|
|
1399
|
+
```python
|
|
1400
|
+
# app/core/error_handlers.py
|
|
1401
|
+
import logging
|
|
1402
|
+
from fastapi import FastAPI, Request
|
|
1403
|
+
from fastapi.responses import JSONResponse
|
|
1404
|
+
from pydantic import ValidationError as PydanticValidationError
|
|
1405
|
+
|
|
1406
|
+
from app.core.exceptions import AppError
|
|
1407
|
+
|
|
1408
|
+
logger = logging.getLogger("api.errors")
|
|
1409
|
+
|
|
1410
|
+
|
|
1411
|
+
def register_exception_handlers(app: FastAPI) -> None:
|
|
1412
|
+
"""
|
|
1413
|
+
Centralized exception handling.
|
|
1414
|
+
All exceptions normalize to the same response shape.
|
|
1415
|
+
Internal details are logged, never leaked to clients.
|
|
1416
|
+
"""
|
|
1417
|
+
|
|
1418
|
+
@app.exception_handler(AppError)
|
|
1419
|
+
async def app_error_handler(request: Request, exc: AppError) -> JSONResponse:
|
|
1420
|
+
request_id = getattr(request.state, "request_id", None)
|
|
1421
|
+
|
|
1422
|
+
log_level = logging.ERROR if exc.status_code >= 500 else logging.WARNING
|
|
1423
|
+
logger.log(
|
|
1424
|
+
log_level,
|
|
1425
|
+
"app_error",
|
|
1426
|
+
extra={
|
|
1427
|
+
"request_id": request_id,
|
|
1428
|
+
"error_code": exc.error_code,
|
|
1429
|
+
"status_code": exc.status_code,
|
|
1430
|
+
"message": str(exc),
|
|
1431
|
+
"path": request.url.path,
|
|
1432
|
+
},
|
|
1433
|
+
exc_info=exc.status_code >= 500,
|
|
1434
|
+
)
|
|
1435
|
+
|
|
1436
|
+
return JSONResponse(
|
|
1437
|
+
status_code=exc.status_code,
|
|
1438
|
+
content={
|
|
1439
|
+
"error": exc.error_code,
|
|
1440
|
+
"message": str(exc),
|
|
1441
|
+
"request_id": request_id,
|
|
1442
|
+
},
|
|
1443
|
+
)
|
|
1444
|
+
|
|
1445
|
+
@app.exception_handler(PydanticValidationError)
|
|
1446
|
+
async def pydantic_error_handler(
|
|
1447
|
+
request: Request, exc: PydanticValidationError
|
|
1448
|
+
) -> JSONResponse:
|
|
1449
|
+
return JSONResponse(
|
|
1450
|
+
status_code=422,
|
|
1451
|
+
content={
|
|
1452
|
+
"error": "validation_error",
|
|
1453
|
+
"message": "Request validation failed",
|
|
1454
|
+
"details": exc.errors(),
|
|
1455
|
+
},
|
|
1456
|
+
)
|
|
1457
|
+
|
|
1458
|
+
@app.exception_handler(Exception)
|
|
1459
|
+
async def unhandled_exception_handler(
|
|
1460
|
+
request: Request, exc: Exception
|
|
1461
|
+
) -> JSONResponse:
|
|
1462
|
+
request_id = getattr(request.state, "request_id", None)
|
|
1463
|
+
logger.error(
|
|
1464
|
+
"unhandled_exception",
|
|
1465
|
+
extra={"request_id": request_id, "path": request.url.path},
|
|
1466
|
+
exc_info=True,
|
|
1467
|
+
)
|
|
1468
|
+
return JSONResponse(
|
|
1469
|
+
status_code=500,
|
|
1470
|
+
content={
|
|
1471
|
+
"error": "internal_error",
|
|
1472
|
+
"message": "An unexpected error occurred",
|
|
1473
|
+
"request_id": request_id,
|
|
1474
|
+
},
|
|
1475
|
+
)
|
|
1476
|
+
```
|
|
1477
|
+
|
|
1478
|
+
---
|
|
1479
|
+
|
|
1480
|
+
# 12. FRONTEND SERVICE ABSTRACTION
|
|
1481
|
+
|
|
1482
|
+
## Why a Service Layer on the Frontend
|
|
1483
|
+
|
|
1484
|
+
Components should not contain `fetch` calls. API communication belongs in a service layer that handles auth headers, error normalization, and response typing. Components just consume hooks; hooks consume services.
|
|
1485
|
+
|
|
1486
|
+
## API Client — The Transport Layer
|
|
1487
|
+
|
|
1488
|
+
```typescript
|
|
1489
|
+
// src/services/api-client.ts
|
|
1490
|
+
import { TokenStorage } from "@/lib/token-storage";
|
|
1491
|
+
|
|
1492
|
+
interface RequestOptions extends RequestInit {
|
|
1493
|
+
skipAuth?: boolean;
|
|
1494
|
+
}
|
|
1495
|
+
|
|
1496
|
+
class ApiError extends Error {
|
|
1497
|
+
constructor(
|
|
1498
|
+
public readonly status: number,
|
|
1499
|
+
public readonly code: string,
|
|
1500
|
+
message: string,
|
|
1501
|
+
public readonly requestId?: string
|
|
1502
|
+
) {
|
|
1503
|
+
super(message);
|
|
1504
|
+
this.name = "ApiError";
|
|
1505
|
+
}
|
|
1506
|
+
}
|
|
1507
|
+
|
|
1508
|
+
async function apiClient<T>(
|
|
1509
|
+
path: string,
|
|
1510
|
+
options: RequestOptions = {}
|
|
1511
|
+
): Promise<T> {
|
|
1512
|
+
const { skipAuth = false, ...fetchOptions } = options;
|
|
1513
|
+
|
|
1514
|
+
const headers: Record<string, string> = {
|
|
1515
|
+
"Content-Type": "application/json",
|
|
1516
|
+
...(fetchOptions.headers as Record<string, string>),
|
|
1517
|
+
};
|
|
1518
|
+
|
|
1519
|
+
if (!skipAuth) {
|
|
1520
|
+
const token = TokenStorage.getAccessToken();
|
|
1521
|
+
if (token) headers["Authorization"] = `Bearer ${token}`;
|
|
1522
|
+
}
|
|
1523
|
+
|
|
1524
|
+
const response = await fetch(`${import.meta.env.VITE_API_BASE_URL}${path}`, {
|
|
1525
|
+
...fetchOptions,
|
|
1526
|
+
headers,
|
|
1527
|
+
});
|
|
1528
|
+
|
|
1529
|
+
const requestId = response.headers.get("X-Request-ID") ?? undefined;
|
|
1530
|
+
|
|
1531
|
+
if (!response.ok) {
|
|
1532
|
+
let errorBody: { error?: string; message?: string } = {};
|
|
1533
|
+
try {
|
|
1534
|
+
errorBody = await response.json();
|
|
1535
|
+
} catch {
|
|
1536
|
+
// Non-JSON error body — use defaults
|
|
1537
|
+
}
|
|
1538
|
+
|
|
1539
|
+
// Silently attempt token refresh on 401
|
|
1540
|
+
if (response.status === 401 && !skipAuth) {
|
|
1541
|
+
const refreshed = await attemptTokenRefresh();
|
|
1542
|
+
if (refreshed) {
|
|
1543
|
+
return apiClient<T>(path, options); // Retry original request
|
|
1544
|
+
}
|
|
1545
|
+
}
|
|
1546
|
+
|
|
1547
|
+
throw new ApiError(
|
|
1548
|
+
response.status,
|
|
1549
|
+
errorBody.error ?? "unknown_error",
|
|
1550
|
+
errorBody.message ?? "An unexpected error occurred",
|
|
1551
|
+
requestId
|
|
1552
|
+
);
|
|
1553
|
+
}
|
|
1554
|
+
|
|
1555
|
+
if (response.status === 204) return undefined as T;
|
|
1556
|
+
return response.json() as Promise<T>;
|
|
1557
|
+
}
|
|
1558
|
+
|
|
1559
|
+
async function attemptTokenRefresh(): Promise<boolean> {
|
|
1560
|
+
const refreshToken = TokenStorage.getRefreshToken();
|
|
1561
|
+
if (!refreshToken) return false;
|
|
1562
|
+
|
|
1563
|
+
try {
|
|
1564
|
+
const result = await apiClient<{ access_token: string }>(
|
|
1565
|
+
"/auth/refresh",
|
|
1566
|
+
{
|
|
1567
|
+
method: "POST",
|
|
1568
|
+
body: JSON.stringify({ refresh_token: refreshToken }),
|
|
1569
|
+
skipAuth: true,
|
|
1570
|
+
}
|
|
1571
|
+
);
|
|
1572
|
+
TokenStorage.setAccessToken(result.access_token);
|
|
1573
|
+
return true;
|
|
1574
|
+
} catch {
|
|
1575
|
+
TokenStorage.clear();
|
|
1576
|
+
return false;
|
|
1577
|
+
}
|
|
1578
|
+
}
|
|
1579
|
+
|
|
1580
|
+
export { apiClient, ApiError };
|
|
1581
|
+
```
|
|
1582
|
+
|
|
1583
|
+
## GitHub Service
|
|
1584
|
+
|
|
1585
|
+
```typescript
|
|
1586
|
+
// src/services/github.service.ts
|
|
1587
|
+
import { apiClient } from "./api-client";
|
|
1588
|
+
|
|
1589
|
+
export interface RepoSummary {
|
|
1590
|
+
name: string;
|
|
1591
|
+
stars: number;
|
|
1592
|
+
forks: number;
|
|
1593
|
+
language: string | null;
|
|
1594
|
+
description: string | null;
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
export interface GitHubAnalytics {
|
|
1598
|
+
username: string;
|
|
1599
|
+
public_repos: number;
|
|
1600
|
+
followers: number;
|
|
1601
|
+
following: number;
|
|
1602
|
+
total_stars: number;
|
|
1603
|
+
top_languages: Record<string, number>;
|
|
1604
|
+
repos: RepoSummary[];
|
|
1605
|
+
}
|
|
1606
|
+
|
|
1607
|
+
export const githubService = {
|
|
1608
|
+
async getAnalytics(username: string): Promise<GitHubAnalytics> {
|
|
1609
|
+
return apiClient<GitHubAnalytics>(`/github/stats/${username}`);
|
|
1610
|
+
},
|
|
1611
|
+
};
|
|
1612
|
+
```
|
|
1613
|
+
|
|
1614
|
+
## React Hook — Consumes the Service
|
|
1615
|
+
|
|
1616
|
+
```typescript
|
|
1617
|
+
// src/hooks/useGithubAnalytics.ts
|
|
1618
|
+
import { useState, useEffect, useCallback } from "react";
|
|
1619
|
+
import { githubService, GitHubAnalytics } from "@/services/github.service";
|
|
1620
|
+
import { ApiError } from "@/services/api-client";
|
|
1621
|
+
|
|
1622
|
+
interface State {
|
|
1623
|
+
data: GitHubAnalytics | null;
|
|
1624
|
+
isLoading: boolean;
|
|
1625
|
+
error: string | null;
|
|
1626
|
+
}
|
|
1627
|
+
|
|
1628
|
+
export function useGithubAnalytics(username: string | null) {
|
|
1629
|
+
const [state, setState] = useState<State>({
|
|
1630
|
+
data: null,
|
|
1631
|
+
isLoading: false,
|
|
1632
|
+
error: null,
|
|
1633
|
+
});
|
|
1634
|
+
|
|
1635
|
+
const fetch = useCallback(async () => {
|
|
1636
|
+
if (!username) return;
|
|
1637
|
+
|
|
1638
|
+
setState({ data: null, isLoading: true, error: null });
|
|
1639
|
+
|
|
1640
|
+
try {
|
|
1641
|
+
const data = await githubService.getAnalytics(username);
|
|
1642
|
+
setState({ data, isLoading: false, error: null });
|
|
1643
|
+
} catch (err) {
|
|
1644
|
+
const message =
|
|
1645
|
+
err instanceof ApiError
|
|
1646
|
+
? err.message
|
|
1647
|
+
: "Failed to load GitHub analytics";
|
|
1648
|
+
setState({ data: null, isLoading: false, error: message });
|
|
1649
|
+
}
|
|
1650
|
+
}, [username]);
|
|
1651
|
+
|
|
1652
|
+
useEffect(() => {
|
|
1653
|
+
fetch();
|
|
1654
|
+
}, [fetch]);
|
|
1655
|
+
|
|
1656
|
+
return { ...state, refetch: fetch };
|
|
1657
|
+
}
|
|
1658
|
+
```
|
|
1659
|
+
|
|
1660
|
+
---
|
|
1661
|
+
|
|
1662
|
+
# 13. REACT SCALABLE FOLDER STRUCTURE
|
|
1663
|
+
|
|
1664
|
+
## Principle: Feature-Based, Not Type-Based
|
|
1665
|
+
|
|
1666
|
+
Grouping by feature keeps related code co-located. When you work on GitHub analytics, everything you need is in `/features/github` — not scattered across `/components`, `/services`, `/hooks`.
|
|
1667
|
+
|
|
1668
|
+
```
|
|
1669
|
+
frontend/
|
|
1670
|
+
├── src/
|
|
1671
|
+
│ ├── app/
|
|
1672
|
+
│ │ ├── App.tsx # Root with providers
|
|
1673
|
+
│ │ ├── router.tsx # Route definitions
|
|
1674
|
+
│ │ └── providers.tsx # Auth, Query, Theme providers
|
|
1675
|
+
│ │
|
|
1676
|
+
│ ├── features/
|
|
1677
|
+
│ │ ├── auth/
|
|
1678
|
+
│ │ │ ├── components/
|
|
1679
|
+
│ │ │ │ ├── LoginForm.tsx
|
|
1680
|
+
│ │ │ │ └── RegisterForm.tsx
|
|
1681
|
+
│ │ │ ├── hooks/
|
|
1682
|
+
│ │ │ │ └── useAuth.ts
|
|
1683
|
+
│ │ │ ├── services/
|
|
1684
|
+
│ │ │ │ └── auth.service.ts
|
|
1685
|
+
│ │ │ └── index.ts # Public API for the feature
|
|
1686
|
+
│ │ │
|
|
1687
|
+
│ │ └── github/
|
|
1688
|
+
│ │ ├── components/
|
|
1689
|
+
│ │ │ ├── AnalyticsDashboard.tsx
|
|
1690
|
+
│ │ │ ├── RepoList.tsx
|
|
1691
|
+
│ │ │ ├── LanguageChart.tsx
|
|
1692
|
+
│ │ │ └── StatsGrid.tsx
|
|
1693
|
+
│ │ ├── hooks/
|
|
1694
|
+
│ │ │ └── useGithubAnalytics.ts
|
|
1695
|
+
│ │ ├── services/
|
|
1696
|
+
│ │ │ └── github.service.ts
|
|
1697
|
+
│ │ ├── three/ # R3F scene for this feature
|
|
1698
|
+
│ │ │ ├── GlobeScene.tsx
|
|
1699
|
+
│ │ │ └── ActivityMesh.tsx
|
|
1700
|
+
│ │ └── index.ts
|
|
1701
|
+
│ │
|
|
1702
|
+
│ ├── components/ # Truly shared UI primitives
|
|
1703
|
+
│ │ ├── ui/
|
|
1704
|
+
│ │ │ ├── Button.tsx
|
|
1705
|
+
│ │ │ ├── Card.tsx
|
|
1706
|
+
│ │ │ ├── Spinner.tsx
|
|
1707
|
+
│ │ │ └── ErrorBoundary.tsx
|
|
1708
|
+
│ │ └── layout/
|
|
1709
|
+
│ │ ├── Navbar.tsx
|
|
1710
|
+
│ │ └── PageShell.tsx
|
|
1711
|
+
│ │
|
|
1712
|
+
│ ├── lib/
|
|
1713
|
+
│ │ ├── token-storage.ts # Auth token management
|
|
1714
|
+
│ │ └── cn.ts # Tailwind class merge utility
|
|
1715
|
+
│ │
|
|
1716
|
+
│ ├── services/
|
|
1717
|
+
│ │ └── api-client.ts # Base HTTP client (shared)
|
|
1718
|
+
│ │
|
|
1719
|
+
│ ├── styles/
|
|
1720
|
+
│ │ ├── globals.css
|
|
1721
|
+
│ │ └── tailwind.config.ts
|
|
1722
|
+
│ │
|
|
1723
|
+
│ └── types/
|
|
1724
|
+
│ └── api.ts # Shared API type definitions
|
|
1725
|
+
│
|
|
1726
|
+
├── public/
|
|
1727
|
+
├── index.html
|
|
1728
|
+
├── vite.config.ts
|
|
1729
|
+
└── package.json
|
|
1730
|
+
```
|
|
1731
|
+
|
|
1732
|
+
---
|
|
1733
|
+
|
|
1734
|
+
# 14. TAILWINDCSS SYSTEM
|
|
1735
|
+
|
|
1736
|
+
## Design Tokens via Config
|
|
1737
|
+
|
|
1738
|
+
```typescript
|
|
1739
|
+
// tailwind.config.ts
|
|
1740
|
+
import type { Config } from "tailwindcss";
|
|
1741
|
+
|
|
1742
|
+
export default {
|
|
1743
|
+
content: ["./index.html", "./src/**/*.{ts,tsx}"],
|
|
1744
|
+
theme: {
|
|
1745
|
+
extend: {
|
|
1746
|
+
colors: {
|
|
1747
|
+
// Semantic color system — not raw hex values in components
|
|
1748
|
+
brand: {
|
|
1749
|
+
50: "#f0f9ff",
|
|
1750
|
+
500: "#0ea5e9",
|
|
1751
|
+
600: "#0284c7",
|
|
1752
|
+
900: "#0c4a6e",
|
|
1753
|
+
},
|
|
1754
|
+
surface: {
|
|
1755
|
+
DEFAULT: "#0f172a",
|
|
1756
|
+
elevated: "#1e293b",
|
|
1757
|
+
border: "#334155",
|
|
1758
|
+
},
|
|
1759
|
+
text: {
|
|
1760
|
+
primary: "#f1f5f9",
|
|
1761
|
+
secondary: "#94a3b8",
|
|
1762
|
+
muted: "#475569",
|
|
1763
|
+
},
|
|
1764
|
+
},
|
|
1765
|
+
fontFamily: {
|
|
1766
|
+
sans: ["Inter", "system-ui", "sans-serif"],
|
|
1767
|
+
mono: ["JetBrains Mono", "monospace"],
|
|
1768
|
+
},
|
|
1769
|
+
animation: {
|
|
1770
|
+
"fade-in": "fadeIn 0.2s ease-out",
|
|
1771
|
+
"slide-up": "slideUp 0.3s ease-out",
|
|
1772
|
+
},
|
|
1773
|
+
keyframes: {
|
|
1774
|
+
fadeIn: {
|
|
1775
|
+
"0%": { opacity: "0" },
|
|
1776
|
+
"100%": { opacity: "1" },
|
|
1777
|
+
},
|
|
1778
|
+
slideUp: {
|
|
1779
|
+
"0%": { opacity: "0", transform: "translateY(8px)" },
|
|
1780
|
+
"100%": { opacity: "1", transform: "translateY(0)" },
|
|
1781
|
+
},
|
|
1782
|
+
},
|
|
1783
|
+
},
|
|
1784
|
+
},
|
|
1785
|
+
plugins: [],
|
|
1786
|
+
} satisfies Config;
|
|
1787
|
+
```
|
|
1788
|
+
|
|
1789
|
+
## Component Composition Pattern
|
|
1790
|
+
|
|
1791
|
+
```tsx
|
|
1792
|
+
// src/lib/cn.ts
|
|
1793
|
+
import { clsx, type ClassValue } from "clsx";
|
|
1794
|
+
import { twMerge } from "tailwind-merge";
|
|
1795
|
+
|
|
1796
|
+
// Merges Tailwind classes and resolves conflicts deterministically
|
|
1797
|
+
export function cn(...inputs: ClassValue[]) {
|
|
1798
|
+
return twMerge(clsx(inputs));
|
|
1799
|
+
}
|
|
1800
|
+
|
|
1801
|
+
// src/components/ui/Button.tsx
|
|
1802
|
+
import { cn } from "@/lib/cn";
|
|
1803
|
+
import { type ButtonHTMLAttributes, forwardRef } from "react";
|
|
1804
|
+
|
|
1805
|
+
type Variant = "primary" | "secondary" | "ghost" | "danger";
|
|
1806
|
+
type Size = "sm" | "md" | "lg";
|
|
1807
|
+
|
|
1808
|
+
interface ButtonProps extends ButtonHTMLAttributes<HTMLButtonElement> {
|
|
1809
|
+
variant?: Variant;
|
|
1810
|
+
size?: Size;
|
|
1811
|
+
isLoading?: boolean;
|
|
1812
|
+
}
|
|
1813
|
+
|
|
1814
|
+
const variantClasses: Record<Variant, string> = {
|
|
1815
|
+
primary:
|
|
1816
|
+
"bg-brand-500 text-white hover:bg-brand-600 focus:ring-brand-500",
|
|
1817
|
+
secondary:
|
|
1818
|
+
"bg-surface-elevated text-text-primary border border-surface-border hover:bg-surface-border",
|
|
1819
|
+
ghost:
|
|
1820
|
+
"text-text-secondary hover:text-text-primary hover:bg-surface-elevated",
|
|
1821
|
+
danger: "bg-red-600 text-white hover:bg-red-700 focus:ring-red-500",
|
|
1822
|
+
};
|
|
1823
|
+
|
|
1824
|
+
const sizeClasses: Record<Size, string> = {
|
|
1825
|
+
sm: "h-8 px-3 text-sm",
|
|
1826
|
+
md: "h-10 px-4 text-sm",
|
|
1827
|
+
lg: "h-12 px-6 text-base",
|
|
1828
|
+
};
|
|
1829
|
+
|
|
1830
|
+
export const Button = forwardRef<HTMLButtonElement, ButtonProps>(
|
|
1831
|
+
(
|
|
1832
|
+
{
|
|
1833
|
+
variant = "primary",
|
|
1834
|
+
size = "md",
|
|
1835
|
+
isLoading = false,
|
|
1836
|
+
disabled,
|
|
1837
|
+
className,
|
|
1838
|
+
children,
|
|
1839
|
+
...props
|
|
1840
|
+
},
|
|
1841
|
+
ref
|
|
1842
|
+
) => (
|
|
1843
|
+
<button
|
|
1844
|
+
ref={ref}
|
|
1845
|
+
disabled={disabled || isLoading}
|
|
1846
|
+
className={cn(
|
|
1847
|
+
"inline-flex items-center justify-center gap-2 rounded-lg font-medium",
|
|
1848
|
+
"transition-colors duration-150 focus:outline-none focus:ring-2 focus:ring-offset-2",
|
|
1849
|
+
"disabled:opacity-50 disabled:cursor-not-allowed",
|
|
1850
|
+
variantClasses[variant],
|
|
1851
|
+
sizeClasses[size],
|
|
1852
|
+
className
|
|
1853
|
+
)}
|
|
1854
|
+
{...props}
|
|
1855
|
+
>
|
|
1856
|
+
{isLoading && (
|
|
1857
|
+
<span className="h-4 w-4 animate-spin rounded-full border-2 border-current border-t-transparent" />
|
|
1858
|
+
)}
|
|
1859
|
+
{children}
|
|
1860
|
+
</button>
|
|
1861
|
+
)
|
|
1862
|
+
);
|
|
1863
|
+
|
|
1864
|
+
Button.displayName = "Button";
|
|
1865
|
+
```
|
|
1866
|
+
|
|
1867
|
+
---
|
|
1868
|
+
|
|
1869
|
+
# 15. REACT THREE FIBER SCENE ARCHITECTURE
|
|
1870
|
+
|
|
1871
|
+
## Performance Principles
|
|
1872
|
+
|
|
1873
|
+
R3F renders inside a WebGL context on a continuous loop. Every unnecessary re-render is a frame budget hit. Keep scene logic inside R3F-specific components; never let scene state leak into React's reconciler unnecessarily.
|
|
1874
|
+
|
|
1875
|
+
```tsx
|
|
1876
|
+
// src/features/github/three/GlobeScene.tsx
|
|
1877
|
+
import { Canvas } from "@react-three/fiber";
|
|
1878
|
+
import { OrbitControls, Environment, Stars } from "@react-three/drei";
|
|
1879
|
+
import { Suspense, memo } from "react";
|
|
1880
|
+
import { ActivityGlobe } from "./ActivityGlobe";
|
|
1881
|
+
import { CommitParticles } from "./CommitParticles";
|
|
1882
|
+
import type { GitHubAnalytics } from "../services/github.service";
|
|
1883
|
+
|
|
1884
|
+
interface GlobeSceneProps {
|
|
1885
|
+
analytics: GitHubAnalytics;
|
|
1886
|
+
}
|
|
1887
|
+
|
|
1888
|
+
/**
|
|
1889
|
+
* Scene entry point. Canvas is isolated here.
|
|
1890
|
+
* memo prevents re-render when parent re-renders with same analytics ref.
|
|
1891
|
+
*
|
|
1892
|
+
* Performance decisions:
|
|
1893
|
+
* - dpr capped at [1, 2] — prevents GPU overload on high-DPI displays
|
|
1894
|
+
* - gl.antialias: true — acceptable cost at this scene complexity
|
|
1895
|
+
* - frameloop "demand" skips renders when nothing moves
|
|
1896
|
+
*/
|
|
1897
|
+
export const GlobeScene = memo(({ analytics }: GlobeSceneProps) => (
|
|
1898
|
+
<div className="h-[500px] w-full rounded-xl overflow-hidden">
|
|
1899
|
+
<Canvas
|
|
1900
|
+
camera={{ position: [0, 0, 4], fov: 60 }}
|
|
1901
|
+
dpr={[1, 2]}
|
|
1902
|
+
gl={{ antialias: true, alpha: true }}
|
|
1903
|
+
frameloop="demand" // Only re-renders when scene state changes
|
|
1904
|
+
>
|
|
1905
|
+
<Suspense fallback={null}>
|
|
1906
|
+
<SceneContent analytics={analytics} />
|
|
1907
|
+
</Suspense>
|
|
1908
|
+
</Canvas>
|
|
1909
|
+
</div>
|
|
1910
|
+
));
|
|
1911
|
+
|
|
1912
|
+
GlobeScene.displayName = "GlobeScene";
|
|
1913
|
+
|
|
1914
|
+
// Separate inner component: keeps Canvas config clean
|
|
1915
|
+
function SceneContent({ analytics }: GlobeSceneProps) {
|
|
1916
|
+
return (
|
|
1917
|
+
<>
|
|
1918
|
+
<ambientLight intensity={0.4} />
|
|
1919
|
+
<directionalLight position={[5, 5, 5]} intensity={1.2} />
|
|
1920
|
+
|
|
1921
|
+
<Stars radius={80} depth={50} count={3000} factor={4} fade />
|
|
1922
|
+
<Environment preset="night" />
|
|
1923
|
+
|
|
1924
|
+
<ActivityGlobe analytics={analytics} />
|
|
1925
|
+
<CommitParticles count={analytics.total_stars} />
|
|
1926
|
+
|
|
1927
|
+
<OrbitControls
|
|
1928
|
+
enablePan={false}
|
|
1929
|
+
minDistance={2.5}
|
|
1930
|
+
maxDistance={8}
|
|
1931
|
+
autoRotate
|
|
1932
|
+
autoRotateSpeed={0.4}
|
|
1933
|
+
/>
|
|
1934
|
+
</>
|
|
1935
|
+
);
|
|
1936
|
+
}
|
|
1937
|
+
```
|
|
1938
|
+
|
|
1939
|
+
```tsx
|
|
1940
|
+
// src/features/github/three/ActivityGlobe.tsx
|
|
1941
|
+
import { useRef, useMemo } from "react";
|
|
1942
|
+
import { useFrame } from "@react-three/fiber";
|
|
1943
|
+
import { Sphere, MeshDistortMaterial } from "@react-three/drei";
|
|
1944
|
+
import * as THREE from "three";
|
|
1945
|
+
import type { GitHubAnalytics } from "../services/github.service";
|
|
1946
|
+
|
|
1947
|
+
interface ActivityGlobeProps {
|
|
1948
|
+
analytics: GitHubAnalytics;
|
|
1949
|
+
}
|
|
1950
|
+
|
|
1951
|
+
export function ActivityGlobe({ analytics }: ActivityGlobeProps) {
|
|
1952
|
+
const meshRef = useRef<THREE.Mesh>(null);
|
|
1953
|
+
|
|
1954
|
+
// Derive visual properties from real data — only recomputed when analytics changes
|
|
1955
|
+
const distortionFactor = useMemo(() => {
|
|
1956
|
+
const normalized = Math.min(analytics.total_stars / 10000, 1);
|
|
1957
|
+
return 0.1 + normalized * 0.4;
|
|
1958
|
+
}, [analytics.total_stars]);
|
|
1959
|
+
|
|
1960
|
+
// useFrame runs every tick — keep it minimal, no allocations
|
|
1961
|
+
useFrame((state) => {
|
|
1962
|
+
if (!meshRef.current) return;
|
|
1963
|
+
const t = state.clock.elapsedTime;
|
|
1964
|
+
meshRef.current.rotation.y = t * 0.08;
|
|
1965
|
+
meshRef.current.rotation.x = Math.sin(t * 0.05) * 0.05;
|
|
1966
|
+
});
|
|
1967
|
+
|
|
1968
|
+
return (
|
|
1969
|
+
<Sphere ref={meshRef} args={[1.4, 64, 64]}>
|
|
1970
|
+
<MeshDistortMaterial
|
|
1971
|
+
color="#0ea5e9"
|
|
1972
|
+
distort={distortionFactor}
|
|
1973
|
+
speed={1.5}
|
|
1974
|
+
roughness={0.2}
|
|
1975
|
+
metalness={0.8}
|
|
1976
|
+
transparent
|
|
1977
|
+
opacity={0.85}
|
|
1978
|
+
/>
|
|
1979
|
+
</Sphere>
|
|
1980
|
+
);
|
|
1981
|
+
}
|
|
1982
|
+
```
|
|
1983
|
+
|
|
1984
|
+
---
|
|
1985
|
+
|
|
1986
|
+
# 16. OBSERVABILITY PATTERNS
|
|
1987
|
+
|
|
1988
|
+
## Structured Log Configuration
|
|
1989
|
+
|
|
1990
|
+
```python
|
|
1991
|
+
# app/core/logging.py
|
|
1992
|
+
import logging
|
|
1993
|
+
import sys
|
|
1994
|
+
from typing import Any
|
|
1995
|
+
|
|
1996
|
+
|
|
1997
|
+
class RequestContextFilter(logging.Filter):
|
|
1998
|
+
"""
|
|
1999
|
+
Injects request_id into all log records when available.
|
|
2000
|
+
Enables filtering all logs for a specific request in production log systems.
|
|
2001
|
+
"""
|
|
2002
|
+
|
|
2003
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
2004
|
+
if not hasattr(record, "request_id"):
|
|
2005
|
+
record.request_id = "no-request-context"
|
|
2006
|
+
return True
|
|
2007
|
+
|
|
2008
|
+
|
|
2009
|
+
def setup_logging(environment: str) -> None:
|
|
2010
|
+
"""
|
|
2011
|
+
JSON in prod (ingested by Datadog, Loki, CloudWatch).
|
|
2012
|
+
Console in dev.
|
|
2013
|
+
"""
|
|
2014
|
+
root_logger = logging.getLogger()
|
|
2015
|
+
root_logger.setLevel(logging.DEBUG if environment == "development" else logging.INFO)
|
|
2016
|
+
|
|
2017
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
2018
|
+
handler.addFilter(RequestContextFilter())
|
|
2019
|
+
|
|
2020
|
+
if environment == "production":
|
|
2021
|
+
try:
|
|
2022
|
+
from pythonjsonlogger import jsonlogger
|
|
2023
|
+
formatter = jsonlogger.JsonFormatter(
|
|
2024
|
+
fmt="%(asctime)s %(name)s %(levelname)s %(message)s %(request_id)s"
|
|
2025
|
+
)
|
|
2026
|
+
except ImportError:
|
|
2027
|
+
formatter = logging.Formatter(
|
|
2028
|
+
"%(asctime)s | %(levelname)s | %(name)s | %(message)s"
|
|
2029
|
+
)
|
|
2030
|
+
else:
|
|
2031
|
+
formatter = logging.Formatter(
|
|
2032
|
+
"%(asctime)s | %(levelname)-8s | %(name)-30s | %(message)s",
|
|
2033
|
+
datefmt="%H:%M:%S",
|
|
2034
|
+
)
|
|
2035
|
+
|
|
2036
|
+
handler.setFormatter(formatter)
|
|
2037
|
+
root_logger.addHandler(handler)
|
|
2038
|
+
|
|
2039
|
+
# Suppress noisy third-party loggers
|
|
2040
|
+
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
2041
|
+
logging.getLogger("sqlalchemy.engine").setLevel(
|
|
2042
|
+
logging.INFO if environment == "development" else logging.WARNING
|
|
2043
|
+
)
|
|
2044
|
+
```
|
|
2045
|
+
|
|
2046
|
+
## Health Check Endpoint
|
|
2047
|
+
|
|
2048
|
+
```python
|
|
2049
|
+
# app/modules/health/router.py
|
|
2050
|
+
import logging
|
|
2051
|
+
from fastapi import APIRouter
|
|
2052
|
+
from fastapi.responses import JSONResponse
|
|
2053
|
+
|
|
2054
|
+
from app.core.database import get_engine
|
|
2055
|
+
from app.core.redis import get_redis
|
|
2056
|
+
|
|
2057
|
+
logger = logging.getLogger("api.health")
|
|
2058
|
+
router = APIRouter()
|
|
2059
|
+
|
|
2060
|
+
|
|
2061
|
+
@router.get("/health")
|
|
2062
|
+
async def health_check() -> JSONResponse:
|
|
2063
|
+
"""
|
|
2064
|
+
Infrastructure health endpoint.
|
|
2065
|
+
Used by Docker HEALTHCHECK, NGINX upstream checks, k8s readiness probes.
|
|
2066
|
+
Returns 503 if any critical dependency is down.
|
|
2067
|
+
"""
|
|
2068
|
+
checks = {}
|
|
2069
|
+
healthy = True
|
|
2070
|
+
|
|
2071
|
+
# Database check
|
|
2072
|
+
try:
|
|
2073
|
+
engine = get_engine()
|
|
2074
|
+
async with engine.connect() as conn:
|
|
2075
|
+
await conn.execute("SELECT 1")
|
|
2076
|
+
checks["database"] = "ok"
|
|
2077
|
+
except Exception as exc:
|
|
2078
|
+
logger.error("health_check_db_failed", extra={"error": str(exc)})
|
|
2079
|
+
checks["database"] = "failed"
|
|
2080
|
+
healthy = False
|
|
2081
|
+
|
|
2082
|
+
# Redis check
|
|
2083
|
+
try:
|
|
2084
|
+
redis = await get_redis()
|
|
2085
|
+
await redis.ping()
|
|
2086
|
+
checks["redis"] = "ok"
|
|
2087
|
+
except Exception as exc:
|
|
2088
|
+
logger.error("health_check_redis_failed", extra={"error": str(exc)})
|
|
2089
|
+
checks["redis"] = "failed"
|
|
2090
|
+
healthy = False
|
|
2091
|
+
|
|
2092
|
+
status_code = 200 if healthy else 503
|
|
2093
|
+
return JSONResponse(
|
|
2094
|
+
status_code=status_code,
|
|
2095
|
+
content={"status": "healthy" if healthy else "degraded", "checks": checks},
|
|
2096
|
+
)
|
|
2097
|
+
```
|
|
2098
|
+
|
|
2099
|
+
---
|
|
2100
|
+
|
|
2101
|
+
# 17. SHARED DEPENDENCIES — `dependencies.py`
|
|
2102
|
+
|
|
2103
|
+
```python
|
|
2104
|
+
# app/dependencies.py
|
|
2105
|
+
from typing import Annotated
|
|
2106
|
+
|
|
2107
|
+
from fastapi import Depends
|
|
2108
|
+
|
|
2109
|
+
from app.modules.github.client import GitHubClient
|
|
2110
|
+
from app.modules.github.cache import GitHubCache
|
|
2111
|
+
from app.modules.github.service import GitHubService
|
|
2112
|
+
from app.modules.users.repository import UserRepository
|
|
2113
|
+
from app.core.database import get_session
|
|
2114
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
2115
|
+
|
|
2116
|
+
|
|
2117
|
+
def get_github_client() -> GitHubClient:
|
|
2118
|
+
return GitHubClient()
|
|
2119
|
+
|
|
2120
|
+
|
|
2121
|
+
def get_github_cache() -> GitHubCache:
|
|
2122
|
+
return GitHubCache()
|
|
2123
|
+
|
|
2124
|
+
|
|
2125
|
+
def get_github_service(
|
|
2126
|
+
client: Annotated[GitHubClient, Depends(get_github_client)],
|
|
2127
|
+
cache: Annotated[GitHubCache, Depends(get_github_cache)],
|
|
2128
|
+
) -> GitHubService:
|
|
2129
|
+
return GitHubService(client=client, cache=cache)
|
|
2130
|
+
|
|
2131
|
+
|
|
2132
|
+
def get_user_repository(
|
|
2133
|
+
session: Annotated[AsyncSession, Depends(get_session)],
|
|
2134
|
+
) -> UserRepository:
|
|
2135
|
+
return UserRepository(session=session)
|
|
2136
|
+
```
|
|
2137
|
+
|
|
2138
|
+
---
|
|
2139
|
+
|
|
2140
|
+
# PRODUCTION TRADEOFFS — ENGINEERING NOTES
|
|
2141
|
+
|
|
2142
|
+
## What This Architecture Optimizes For
|
|
2143
|
+
|
|
2144
|
+
- **Debuggability**: Every request has a `request_id`. Every failure is logged with context. On-call engineers can trace any incident end-to-end.
|
|
2145
|
+
- **Testability**: Services are injectable. Repositories are mockable. Routes are thin. You can test the entire business logic layer without HTTP.
|
|
2146
|
+
- **Resilience**: Redis failures don't block requests. GitHub API failures are retried transparently. External errors are translated into domain errors before surfacing.
|
|
2147
|
+
- **Maintainability**: New engineers find feature logic in `features/github/` or `modules/github/` — not spread across 12 files in different directories.
|
|
2148
|
+
|
|
2149
|
+
## What This Architecture Trades Off
|
|
2150
|
+
|
|
2151
|
+
- **Initial velocity**: More files, more boilerplate than a single-file FastAPI script. The payoff comes at scale, not day one.
|
|
2152
|
+
- **Flexibility**: DTOs enforcing strict response shapes mean API changes require schema updates. This is the right tradeoff for a contract-driven API.
|
|
2153
|
+
- **Async complexity**: Async SQLAlchemy has subtleties (session lifecycle, lazy loading). The patterns above handle these correctly, but they require understanding the async execution model.
|
|
2154
|
+
|
|
2155
|
+
## Next Layer: Worker Systems
|
|
2156
|
+
|
|
2157
|
+
When GitHub data fetching becomes too slow for synchronous request/response, move it to a background worker:
|
|
2158
|
+
|
|
2159
|
+
```
|
|
2160
|
+
Request → API → Publish job to RabbitMQ → Return 202 Accepted
|
|
2161
|
+
Worker → Consume job → Fetch GitHub → Store in DB → Publish event
|
|
2162
|
+
WebSocket / SSE → Push completion to client
|
|
2163
|
+
```
|
|
2164
|
+
|
|
2165
|
+
The current service layer is already structured to support this transition — the `GitHubService` methods can be called from a worker with zero modification.
|