agri-secure-framework 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agri_secure/__init__.py +857 -0
- agri_secure/adapters/__init__.py +0 -0
- agri_secure/adapters/cache.py +527 -0
- agri_secure/adapters/database.py +529 -0
- agri_secure/admin.py +330 -0
- agri_secure/blockchain/__init__.py +0 -0
- agri_secure/blockchain/ledger.py +609 -0
- agri_secure/cli.py +565 -0
- agri_secure/core/__init__.py +0 -0
- agri_secure/core/access_control.py +750 -0
- agri_secure/core/authentication.py +685 -0
- agri_secure/core/encryption.py +493 -0
- agri_secure/exceptions/__init__.py +307 -0
- agri_secure/models/__init__.py +0 -0
- agri_secure/models/schemas.py +570 -0
- agri_secure/utils/__init__.py +0 -0
- agri_secure/utils/helpers.py +362 -0
- agri_secure/utils/validators.py +361 -0
- agri_secure_framework-2.0.0.dist-info/METADATA +230 -0
- agri_secure_framework-2.0.0.dist-info/RECORD +24 -0
- agri_secure_framework-2.0.0.dist-info/WHEEL +5 -0
- agri_secure_framework-2.0.0.dist-info/entry_points.txt +3 -0
- agri_secure_framework-2.0.0.dist-info/licenses/LICENSE +21 -0
- agri_secure_framework-2.0.0.dist-info/top_level.txt +1 -0
agri_secure/__init__.py
ADDED
|
@@ -0,0 +1,857 @@
|
|
|
1
|
+
"""
|
|
2
|
+
agri_secure - Production-ready cybersecurity framework for agricultural data systems
|
|
3
|
+
===============================================================================
|
|
4
|
+
|
|
5
|
+
A comprehensive framework that provides:
|
|
6
|
+
- Military-grade encryption (AES-256)
|
|
7
|
+
- Multi-factor authentication
|
|
8
|
+
- Role-based access control
|
|
9
|
+
- Blockchain-inspired audit trails
|
|
10
|
+
- Cloud-native architecture
|
|
11
|
+
- Database agnostic (PostgreSQL, MySQL, MongoDB, etc.)
|
|
12
|
+
- Web framework adapters (Django, Flask, FastAPI)
|
|
13
|
+
|
|
14
|
+
Basic Usage:
|
|
15
|
+
>>> from agri_secure import AgriculturalSecurity
|
|
16
|
+
>>>
|
|
17
|
+
>>> # Initialize the framework
|
|
18
|
+
>>> security = AgriculturalSecurity(
|
|
19
|
+
... app_name="my_farm_app",
|
|
20
|
+
... environment="production",
|
|
21
|
+
... encryption_key=os.getenv("ENCRYPTION_KEY")
|
|
22
|
+
... )
|
|
23
|
+
>>>
|
|
24
|
+
>>> # Secure data sharing
|
|
25
|
+
>>> result = security.share_data(
|
|
26
|
+
... data={"farmer": "John", "yield": 500},
|
|
27
|
+
... recipient="ministry",
|
|
28
|
+
... user_role="extension_officer"
|
|
29
|
+
... )
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
import logging
|
|
33
|
+
import os
|
|
34
|
+
import sys
|
|
35
|
+
from typing import Dict, Any, Optional, Union, List
|
|
36
|
+
from datetime import datetime, timedelta
|
|
37
|
+
from pathlib import Path
|
|
38
|
+
|
|
39
|
+
# Version
|
|
40
|
+
__version__ = "2.0.0"
|
|
41
|
+
__author__ = "Zambia Agricultural Cybersecurity Initiative"
|
|
42
|
+
__license__ = "MIT"
|
|
43
|
+
|
|
44
|
+
# Configure root logger
|
|
45
|
+
logging.basicConfig(
|
|
46
|
+
level=logging.INFO,
|
|
47
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
48
|
+
handlers=[
|
|
49
|
+
logging.StreamHandler(sys.stdout),
|
|
50
|
+
logging.FileHandler('agri_secure.log')
|
|
51
|
+
]
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
logger = logging.getLogger(__name__)
|
|
55
|
+
|
|
56
|
+
# Import core components
|
|
57
|
+
from .core.encryption import AgriculturalEncryption
|
|
58
|
+
from .core.authentication import AgriculturalAuthenticator
|
|
59
|
+
from .core.access_control import AgriculturalAccessControl
|
|
60
|
+
from .blockchain.ledger import AgriculturalLedger
|
|
61
|
+
from .models.schemas import (
|
|
62
|
+
UserBase, UserCreate, UserInDB, UserResponse,
|
|
63
|
+
FarmBase, FarmCreate, FarmInDB, FarmResponse,
|
|
64
|
+
DataClassification, UserRole
|
|
65
|
+
)
|
|
66
|
+
from .exceptions import (
|
|
67
|
+
AgriSecureError, AuthenticationError, AuthorizationError,
|
|
68
|
+
EncryptionError, ValidationError, BlockchainError
|
|
69
|
+
)
|
|
70
|
+
from .utils.validators import DataValidator
|
|
71
|
+
from .utils.helpers import generate_id, hash_password, verify_password
|
|
72
|
+
|
|
73
|
+
class AgriculturalSecurity:
|
|
74
|
+
"""
|
|
75
|
+
Main framework class - production-ready agricultural security system.
|
|
76
|
+
|
|
77
|
+
This class integrates all security components into a unified interface.
|
|
78
|
+
Designed for high performance, scalability, and ease of use.
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
def __init__(
|
|
82
|
+
self,
|
|
83
|
+
app_name: str,
|
|
84
|
+
environment: str = "development",
|
|
85
|
+
encryption_key: Optional[str] = None,
|
|
86
|
+
jwt_secret: Optional[str] = None,
|
|
87
|
+
database_url: Optional[str] = None,
|
|
88
|
+
redis_url: Optional[str] = None,
|
|
89
|
+
mfa_required: bool = True,
|
|
90
|
+
blockchain_enabled: bool = True,
|
|
91
|
+
log_level: str = "INFO",
|
|
92
|
+
metrics_enabled: bool = True,
|
|
93
|
+
tracing_enabled: bool = False,
|
|
94
|
+
cache_ttl: int = 3600,
|
|
95
|
+
max_failed_attempts: int = 5,
|
|
96
|
+
session_timeout: int = 28800, # 8 hours
|
|
97
|
+
token_expiry: int = 3600, # 1 hour
|
|
98
|
+
backup_enabled: bool = True,
|
|
99
|
+
auto_migrate: bool = False
|
|
100
|
+
):
|
|
101
|
+
"""
|
|
102
|
+
Initialize the agricultural security framework.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
app_name: Name of your application
|
|
106
|
+
environment: development/staging/production
|
|
107
|
+
encryption_key: Master encryption key (from environment)
|
|
108
|
+
jwt_secret: Secret for JWT tokens
|
|
109
|
+
database_url: Database connection string
|
|
110
|
+
redis_url: Redis URL for caching/sessions
|
|
111
|
+
mfa_required: Require multi-factor authentication
|
|
112
|
+
blockchain_enabled: Enable blockchain audit trail
|
|
113
|
+
log_level: Logging level
|
|
114
|
+
metrics_enabled: Enable Prometheus metrics
|
|
115
|
+
tracing_enabled: Enable OpenTelemetry tracing
|
|
116
|
+
cache_ttl: Cache TTL in seconds
|
|
117
|
+
max_failed_attempts: Max failed login attempts before lockout
|
|
118
|
+
session_timeout: Session timeout in seconds
|
|
119
|
+
token_expiry: JWT token expiry in seconds
|
|
120
|
+
backup_enabled: Enable automatic backups
|
|
121
|
+
auto_migrate: Auto-run database migrations
|
|
122
|
+
"""
|
|
123
|
+
self.app_name = app_name
|
|
124
|
+
self.environment = environment
|
|
125
|
+
self.start_time = datetime.utcnow()
|
|
126
|
+
|
|
127
|
+
# Configure logging
|
|
128
|
+
self._configure_logging(log_level)
|
|
129
|
+
self.logger = logging.getLogger(f"agri_secure.{app_name}")
|
|
130
|
+
self.logger.info(f"Initializing Agricultural Security Framework v{__version__}")
|
|
131
|
+
self.logger.info(f"Environment: {environment}")
|
|
132
|
+
|
|
133
|
+
# Validate production requirements
|
|
134
|
+
if environment == "production":
|
|
135
|
+
self._validate_production_config(
|
|
136
|
+
encryption_key, jwt_secret, database_url
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Initialize components
|
|
140
|
+
try:
|
|
141
|
+
# Core security
|
|
142
|
+
self.encryption = AgriculturalEncryption(
|
|
143
|
+
master_key=encryption_key,
|
|
144
|
+
key_rotation_days=90,
|
|
145
|
+
algorithm="AES-256-GCM"
|
|
146
|
+
)
|
|
147
|
+
self.logger.debug("Encryption module initialized")
|
|
148
|
+
|
|
149
|
+
self.auth = AgriculturalAuthenticator(
|
|
150
|
+
secret_key=jwt_secret,
|
|
151
|
+
mfa_required=mfa_required,
|
|
152
|
+
token_expiry=token_expiry,
|
|
153
|
+
max_failed_attempts=max_failed_attempts,
|
|
154
|
+
redis_url=redis_url,
|
|
155
|
+
session_timeout=session_timeout
|
|
156
|
+
)
|
|
157
|
+
self.logger.debug("Authentication module initialized")
|
|
158
|
+
|
|
159
|
+
self.access = AgriculturalAccessControl(
|
|
160
|
+
cache_ttl=cache_ttl,
|
|
161
|
+
redis_url=redis_url
|
|
162
|
+
)
|
|
163
|
+
self.logger.debug("Access control module initialized")
|
|
164
|
+
|
|
165
|
+
# Blockchain ledger
|
|
166
|
+
if blockchain_enabled:
|
|
167
|
+
self.ledger = AgriculturalLedger(
|
|
168
|
+
database_url=database_url,
|
|
169
|
+
consensus="raft",
|
|
170
|
+
backup_enabled=backup_enabled
|
|
171
|
+
)
|
|
172
|
+
self.logger.debug("Blockchain ledger initialized")
|
|
173
|
+
else:
|
|
174
|
+
self.ledger = None
|
|
175
|
+
self.logger.warning("Blockchain ledger disabled")
|
|
176
|
+
|
|
177
|
+
# Database connection
|
|
178
|
+
if database_url:
|
|
179
|
+
self._init_database(database_url, auto_migrate)
|
|
180
|
+
|
|
181
|
+
# Cache connection
|
|
182
|
+
if redis_url:
|
|
183
|
+
self._init_cache(redis_url)
|
|
184
|
+
|
|
185
|
+
# Metrics and tracing
|
|
186
|
+
if metrics_enabled:
|
|
187
|
+
self._init_metrics()
|
|
188
|
+
|
|
189
|
+
if tracing_enabled:
|
|
190
|
+
self._init_tracing()
|
|
191
|
+
|
|
192
|
+
self.logger.info("Framework initialization complete")
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
self.logger.error(f"Failed to initialize framework: {e}")
|
|
196
|
+
raise AgriSecureError(f"Initialization failed: {e}")
|
|
197
|
+
|
|
198
|
+
def _configure_logging(self, level: str):
|
|
199
|
+
"""Configure structured logging for cloud environments"""
|
|
200
|
+
log_level = getattr(logging, level.upper(), logging.INFO)
|
|
201
|
+
|
|
202
|
+
# Use JSON logging in production for better cloud integration
|
|
203
|
+
if self.environment == "production":
|
|
204
|
+
from pythonjsonlogger import jsonlogger
|
|
205
|
+
|
|
206
|
+
log_handler = logging.StreamHandler(sys.stdout)
|
|
207
|
+
formatter = jsonlogger.JsonFormatter(
|
|
208
|
+
fmt='%(asctime)s %(name)s %(levelname)s %(message)s'
|
|
209
|
+
)
|
|
210
|
+
log_handler.setFormatter(formatter)
|
|
211
|
+
|
|
212
|
+
logging.root.handlers = [log_handler]
|
|
213
|
+
logging.root.setLevel(log_level)
|
|
214
|
+
|
|
215
|
+
def _validate_production_config(self, encryption_key, jwt_secret, database_url):
|
|
216
|
+
"""Validate required configuration for production"""
|
|
217
|
+
missing = []
|
|
218
|
+
|
|
219
|
+
if not encryption_key:
|
|
220
|
+
missing.append("ENCRYPTION_KEY")
|
|
221
|
+
if not jwt_secret:
|
|
222
|
+
missing.append("JWT_SECRET")
|
|
223
|
+
if not database_url:
|
|
224
|
+
missing.append("DATABASE_URL")
|
|
225
|
+
|
|
226
|
+
if missing:
|
|
227
|
+
raise AgriSecureError(
|
|
228
|
+
f"Production environment requires: {', '.join(missing)}. "
|
|
229
|
+
"Set these in environment variables or .env file."
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
def _init_database(self, database_url: str, auto_migrate: bool):
|
|
233
|
+
"""Initialize database connection"""
|
|
234
|
+
self.logger.info(f"Connecting to database: {database_url.split('@')[-1]}")
|
|
235
|
+
# Database initialization logic
|
|
236
|
+
from .adapters.database import DatabaseAdapter
|
|
237
|
+
self.db = DatabaseAdapter(database_url, auto_migrate)
|
|
238
|
+
|
|
239
|
+
def _init_cache(self, redis_url: str):
|
|
240
|
+
"""Initialize Redis cache"""
|
|
241
|
+
self.logger.info("Connecting to Redis cache")
|
|
242
|
+
from .adapters.cache import CacheAdapter
|
|
243
|
+
self.cache = CacheAdapter(redis_url)
|
|
244
|
+
|
|
245
|
+
def _init_metrics(self):
|
|
246
|
+
"""Initialize Prometheus metrics"""
|
|
247
|
+
from prometheus_client import Counter, Histogram, Gauge
|
|
248
|
+
|
|
249
|
+
self.metrics = {
|
|
250
|
+
'requests': Counter('agri_secure_requests_total', 'Total requests'),
|
|
251
|
+
'errors': Counter('agri_secure_errors_total', 'Total errors'),
|
|
252
|
+
'response_time': Histogram('agri_secure_response_seconds', 'Response time'),
|
|
253
|
+
'active_users': Gauge('agri_secure_active_users', 'Active users')
|
|
254
|
+
}
|
|
255
|
+
self.logger.debug("Metrics initialized")
|
|
256
|
+
|
|
257
|
+
def _init_tracing(self):
|
|
258
|
+
"""Initialize OpenTelemetry tracing"""
|
|
259
|
+
from opentelemetry import trace
|
|
260
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
261
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
|
262
|
+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
|
263
|
+
|
|
264
|
+
trace.set_tracer_provider(TracerProvider())
|
|
265
|
+
tracer = trace.get_tracer(__name__)
|
|
266
|
+
self.logger.debug("Tracing initialized")
|
|
267
|
+
|
|
268
|
+
def share_data(
|
|
269
|
+
self,
|
|
270
|
+
data: Dict[str, Any],
|
|
271
|
+
recipient: Union[str, List[str]],
|
|
272
|
+
user_role: str,
|
|
273
|
+
user_id: Optional[str] = None,
|
|
274
|
+
consent_verified: bool = False,
|
|
275
|
+
purpose: str = "agricultural_analysis",
|
|
276
|
+
expiry: Optional[datetime] = None,
|
|
277
|
+
encrypt: bool = True,
|
|
278
|
+
audit: bool = True
|
|
279
|
+
) -> Dict[str, Any]:
|
|
280
|
+
"""
|
|
281
|
+
Securely share agricultural data with authorized recipients.
|
|
282
|
+
|
|
283
|
+
This is the main method for data sharing. It handles:
|
|
284
|
+
- Access control verification
|
|
285
|
+
- Data encryption
|
|
286
|
+
- Consent management
|
|
287
|
+
- Audit logging
|
|
288
|
+
- Blockchain recording
|
|
289
|
+
- Recipient notification
|
|
290
|
+
|
|
291
|
+
Args:
|
|
292
|
+
data: Agricultural data to share (dictionary)
|
|
293
|
+
recipient: Recipient ID(s) - single user or list
|
|
294
|
+
user_role: Role of user sharing data
|
|
295
|
+
user_id: ID of user sharing data
|
|
296
|
+
consent_verified: Whether farmer consent obtained
|
|
297
|
+
purpose: Reason for sharing (for audit)
|
|
298
|
+
expiry: When the share permission expires
|
|
299
|
+
encrypt: Whether to encrypt the data
|
|
300
|
+
audit: Whether to record in audit log
|
|
301
|
+
|
|
302
|
+
Returns:
|
|
303
|
+
Dictionary with:
|
|
304
|
+
- share_id: Unique identifier for this share
|
|
305
|
+
- transaction_id: Blockchain transaction ID
|
|
306
|
+
- encrypted_data: Encrypted data (if encrypt=True)
|
|
307
|
+
- recipients: List of recipients
|
|
308
|
+
- timestamp: When shared
|
|
309
|
+
- expires_at: When access expires
|
|
310
|
+
|
|
311
|
+
Raises:
|
|
312
|
+
AuthorizationError: If user not authorized to share
|
|
313
|
+
ValidationError: If data is invalid
|
|
314
|
+
EncryptionError: If encryption fails
|
|
315
|
+
"""
|
|
316
|
+
start_time = datetime.utcnow()
|
|
317
|
+
|
|
318
|
+
try:
|
|
319
|
+
# Update metrics
|
|
320
|
+
if hasattr(self, 'metrics'):
|
|
321
|
+
self.metrics['requests'].inc()
|
|
322
|
+
|
|
323
|
+
self.logger.info(
|
|
324
|
+
f"Data share attempt: {user_role} ({user_id}) -> {recipient}"
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
# Validate input
|
|
328
|
+
if not isinstance(data, dict):
|
|
329
|
+
raise ValidationError("Data must be a dictionary")
|
|
330
|
+
|
|
331
|
+
# Check authorization
|
|
332
|
+
if not self.access.check_permission(
|
|
333
|
+
user_role=user_role,
|
|
334
|
+
resource_type="farm_data",
|
|
335
|
+
action="share",
|
|
336
|
+
resource_owner=data.get("farmer_id"),
|
|
337
|
+
current_user=user_id
|
|
338
|
+
):
|
|
339
|
+
self.logger.warning(
|
|
340
|
+
f"Access denied: {user_role} attempted unauthorized share"
|
|
341
|
+
)
|
|
342
|
+
if hasattr(self, 'metrics'):
|
|
343
|
+
self.metrics['errors'].inc()
|
|
344
|
+
raise AuthorizationError(
|
|
345
|
+
f"{user_role} not authorized to share this data"
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
# Verify consent for farmer data
|
|
349
|
+
if "farmer_id" in data and not consent_verified:
|
|
350
|
+
if user_role != "farmer":
|
|
351
|
+
raise AuthorizationError(
|
|
352
|
+
"Farmer consent required for data sharing"
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
# Generate share ID
|
|
356
|
+
share_id = generate_id(prefix="share")
|
|
357
|
+
|
|
358
|
+
# Prepare metadata
|
|
359
|
+
metadata = {
|
|
360
|
+
"share_id": share_id,
|
|
361
|
+
"shared_by": user_id or user_role,
|
|
362
|
+
"shared_by_role": user_role,
|
|
363
|
+
"recipients": [recipient] if isinstance(recipient, str) else recipient,
|
|
364
|
+
"purpose": purpose,
|
|
365
|
+
"timestamp": datetime.utcnow().isoformat(),
|
|
366
|
+
"expires_at": (expiry or datetime.utcnow() + timedelta(days=30)).isoformat(),
|
|
367
|
+
"data_types": list(data.keys())
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
# Encrypt data if requested
|
|
371
|
+
encrypted_payload = None
|
|
372
|
+
if encrypt:
|
|
373
|
+
encrypted_payload = self.encryption.encrypt_data(
|
|
374
|
+
data=data,
|
|
375
|
+
context={
|
|
376
|
+
"purpose": purpose,
|
|
377
|
+
"recipients": metadata["recipients"]
|
|
378
|
+
}
|
|
379
|
+
)
|
|
380
|
+
self.logger.debug("Data encrypted successfully")
|
|
381
|
+
|
|
382
|
+
# Record in blockchain if enabled
|
|
383
|
+
transaction_id = None
|
|
384
|
+
if self.ledger and audit:
|
|
385
|
+
transaction_id = self.ledger.add_transaction(
|
|
386
|
+
transaction_type="DATA_SHARE",
|
|
387
|
+
data={
|
|
388
|
+
"share_id": share_id,
|
|
389
|
+
"metadata": metadata,
|
|
390
|
+
"data_hash": self.encryption.hash_data(data)
|
|
391
|
+
},
|
|
392
|
+
initiator=user_id or user_role
|
|
393
|
+
)
|
|
394
|
+
self.logger.debug(f"Blockchain transaction: {transaction_id}")
|
|
395
|
+
|
|
396
|
+
# Store share record
|
|
397
|
+
share_record = {
|
|
398
|
+
"share_id": share_id,
|
|
399
|
+
"transaction_id": transaction_id,
|
|
400
|
+
"metadata": metadata,
|
|
401
|
+
"encrypted_data": encrypted_payload,
|
|
402
|
+
"status": "active"
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
# Save to database if available
|
|
406
|
+
if hasattr(self, 'db'):
|
|
407
|
+
self.db.save_share(share_record)
|
|
408
|
+
|
|
409
|
+
# Cache share record
|
|
410
|
+
if hasattr(self, 'cache'):
|
|
411
|
+
self.cache.set(
|
|
412
|
+
f"share:{share_id}",
|
|
413
|
+
share_record,
|
|
414
|
+
ttl=metadata.get("expires_at")
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
# Record response time
|
|
418
|
+
if hasattr(self, 'metrics'):
|
|
419
|
+
duration = (datetime.utcnow() - start_time).total_seconds()
|
|
420
|
+
self.metrics['response_time'].observe(duration)
|
|
421
|
+
|
|
422
|
+
self.logger.info(f"Data shared successfully: {share_id}")
|
|
423
|
+
|
|
424
|
+
return {
|
|
425
|
+
"success": True,
|
|
426
|
+
"share_id": share_id,
|
|
427
|
+
"transaction_id": transaction_id,
|
|
428
|
+
"encrypted_data": encrypted_payload,
|
|
429
|
+
"recipients": metadata["recipients"],
|
|
430
|
+
"timestamp": metadata["timestamp"],
|
|
431
|
+
"expires_at": metadata["expires_at"],
|
|
432
|
+
"verification_url": f"/api/verify/{transaction_id}" if transaction_id else None
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
except Exception as e:
|
|
436
|
+
self.logger.error(f"Data sharing failed: {str(e)}", exc_info=True)
|
|
437
|
+
if hasattr(self, 'metrics'):
|
|
438
|
+
self.metrics['errors'].inc()
|
|
439
|
+
raise
|
|
440
|
+
|
|
441
|
+
def receive_data(
|
|
442
|
+
self,
|
|
443
|
+
share_id: str,
|
|
444
|
+
user_role: str,
|
|
445
|
+
user_id: str,
|
|
446
|
+
access_reason: str = "operational_need"
|
|
447
|
+
) -> Dict[str, Any]:
|
|
448
|
+
"""
|
|
449
|
+
Receive and decrypt shared data.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
share_id: Share identifier from share_data()
|
|
453
|
+
user_role: Role of receiving user
|
|
454
|
+
user_id: ID of receiving user
|
|
455
|
+
access_reason: Why user needs this data
|
|
456
|
+
|
|
457
|
+
Returns:
|
|
458
|
+
Decrypted data with metadata
|
|
459
|
+
"""
|
|
460
|
+
self.logger.info(f"Data receive attempt: {user_role} ({user_id}) -> {share_id}")
|
|
461
|
+
|
|
462
|
+
# Get share record
|
|
463
|
+
share_record = None
|
|
464
|
+
|
|
465
|
+
# Try cache first
|
|
466
|
+
if hasattr(self, 'cache'):
|
|
467
|
+
share_record = self.cache.get(f"share:{share_id}")
|
|
468
|
+
|
|
469
|
+
# Try database if not in cache
|
|
470
|
+
if not share_record and hasattr(self, 'db'):
|
|
471
|
+
share_record = self.db.get_share(share_id)
|
|
472
|
+
|
|
473
|
+
if not share_record:
|
|
474
|
+
raise ValidationError(f"Share {share_id} not found")
|
|
475
|
+
|
|
476
|
+
# Check if share has expired
|
|
477
|
+
expires_at = datetime.fromisoformat(share_record["metadata"]["expires_at"])
|
|
478
|
+
if expires_at < datetime.utcnow():
|
|
479
|
+
raise AuthorizationError("Share has expired")
|
|
480
|
+
|
|
481
|
+
# Check if user is authorized recipient
|
|
482
|
+
if user_id not in share_record["metadata"]["recipients"]:
|
|
483
|
+
if user_role not in ["system_admin", "auditor"]:
|
|
484
|
+
raise AuthorizationError("Not authorized to receive this data")
|
|
485
|
+
|
|
486
|
+
# Decrypt data
|
|
487
|
+
decrypted_data = self.encryption.decrypt_data(
|
|
488
|
+
encrypted_payload=share_record["encrypted_data"],
|
|
489
|
+
context={"user_role": user_role, "user_id": user_id}
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
# Record access in blockchain
|
|
493
|
+
if self.ledger:
|
|
494
|
+
self.ledger.add_transaction(
|
|
495
|
+
transaction_type="DATA_ACCESS",
|
|
496
|
+
data={
|
|
497
|
+
"share_id": share_id,
|
|
498
|
+
"accessor": user_id,
|
|
499
|
+
"accessor_role": user_role,
|
|
500
|
+
"reason": access_reason,
|
|
501
|
+
"timestamp": datetime.utcnow().isoformat()
|
|
502
|
+
},
|
|
503
|
+
initiator=user_id
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
self.logger.info(f"Data accessed successfully: {share_id}")
|
|
507
|
+
|
|
508
|
+
return {
|
|
509
|
+
"success": True,
|
|
510
|
+
"data": decrypted_data,
|
|
511
|
+
"metadata": share_record["metadata"],
|
|
512
|
+
"accessed_at": datetime.utcnow().isoformat()
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
def verify_data_integrity(
|
|
516
|
+
self,
|
|
517
|
+
transaction_id: str,
|
|
518
|
+
include_proof: bool = True
|
|
519
|
+
) -> Dict[str, Any]:
|
|
520
|
+
"""
|
|
521
|
+
Verify that data hasn't been tampered with.
|
|
522
|
+
|
|
523
|
+
Args:
|
|
524
|
+
transaction_id: Blockchain transaction ID
|
|
525
|
+
include_proof: Include cryptographic proof
|
|
526
|
+
|
|
527
|
+
Returns:
|
|
528
|
+
Verification results with proof if requested
|
|
529
|
+
"""
|
|
530
|
+
if not self.ledger:
|
|
531
|
+
raise AgriSecureError("Blockchain ledger not enabled")
|
|
532
|
+
|
|
533
|
+
self.logger.info(f"Verifying transaction: {transaction_id}")
|
|
534
|
+
|
|
535
|
+
verification = self.ledger.verify_transaction(
|
|
536
|
+
transaction_id,
|
|
537
|
+
include_proof=include_proof
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
return {
|
|
541
|
+
"transaction_id": transaction_id,
|
|
542
|
+
"verified": verification["valid"],
|
|
543
|
+
"timestamp": verification["timestamp"],
|
|
544
|
+
"block_height": verification.get("block_height"),
|
|
545
|
+
"previous_hash": verification.get("previous_hash"),
|
|
546
|
+
"proof": verification.get("proof") if include_proof else None,
|
|
547
|
+
"message": "Data integrity confirmed" if verification["valid"]
|
|
548
|
+
else "Data may have been tampered with"
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
def get_audit_trail(
|
|
552
|
+
self,
|
|
553
|
+
user_id: Optional[str] = None,
|
|
554
|
+
resource_id: Optional[str] = None,
|
|
555
|
+
start_date: Optional[datetime] = None,
|
|
556
|
+
end_date: Optional[datetime] = None,
|
|
557
|
+
limit: int = 1000,
|
|
558
|
+
offset: int = 0,
|
|
559
|
+
include_blockchain: bool = True
|
|
560
|
+
) -> Dict[str, Any]:
|
|
561
|
+
"""
|
|
562
|
+
Get comprehensive audit trail.
|
|
563
|
+
|
|
564
|
+
Args:
|
|
565
|
+
user_id: Filter by user
|
|
566
|
+
resource_id: Filter by resource
|
|
567
|
+
start_date: Start of date range
|
|
568
|
+
end_date: End of date range
|
|
569
|
+
limit: Max records to return
|
|
570
|
+
offset: Pagination offset
|
|
571
|
+
include_blockchain: Include blockchain records
|
|
572
|
+
|
|
573
|
+
Returns:
|
|
574
|
+
Audit trail with metadata
|
|
575
|
+
"""
|
|
576
|
+
self.logger.info(f"Generating audit trail for user={user_id}, resource={resource_id}")
|
|
577
|
+
|
|
578
|
+
events = []
|
|
579
|
+
|
|
580
|
+
# Get from database if available
|
|
581
|
+
if hasattr(self, 'db'):
|
|
582
|
+
db_events = self.db.get_audit_events(
|
|
583
|
+
user_id=user_id,
|
|
584
|
+
resource_id=resource_id,
|
|
585
|
+
start_date=start_date,
|
|
586
|
+
end_date=end_date,
|
|
587
|
+
limit=limit,
|
|
588
|
+
offset=offset
|
|
589
|
+
)
|
|
590
|
+
events.extend(db_events)
|
|
591
|
+
|
|
592
|
+
# Get from blockchain if requested
|
|
593
|
+
if include_blockchain and self.ledger:
|
|
594
|
+
blockchain_events = self.ledger.get_transactions(
|
|
595
|
+
user_id=user_id,
|
|
596
|
+
resource_id=resource_id,
|
|
597
|
+
start_date=start_date,
|
|
598
|
+
end_date=end_date,
|
|
599
|
+
limit=limit
|
|
600
|
+
)
|
|
601
|
+
events.extend(blockchain_events)
|
|
602
|
+
|
|
603
|
+
# Sort by timestamp
|
|
604
|
+
events.sort(key=lambda x: x.get("timestamp", ""), reverse=True)
|
|
605
|
+
|
|
606
|
+
return {
|
|
607
|
+
"total_events": len(events),
|
|
608
|
+
"events": events[:limit],
|
|
609
|
+
"filters": {
|
|
610
|
+
"user_id": user_id,
|
|
611
|
+
"resource_id": resource_id,
|
|
612
|
+
"start_date": start_date.isoformat() if start_date else None,
|
|
613
|
+
"end_date": end_date.isoformat() if end_date else None
|
|
614
|
+
},
|
|
615
|
+
"generated_at": datetime.utcnow().isoformat()
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
def health_check(self, deep: bool = False) -> Dict[str, Any]:
|
|
619
|
+
"""
|
|
620
|
+
Comprehensive health check of all components.
|
|
621
|
+
|
|
622
|
+
Args:
|
|
623
|
+
deep: Perform deep checks (may be slower)
|
|
624
|
+
|
|
625
|
+
Returns:
|
|
626
|
+
Health status of all modules
|
|
627
|
+
"""
|
|
628
|
+
health = {
|
|
629
|
+
"status": "healthy",
|
|
630
|
+
"version": __version__,
|
|
631
|
+
"app_name": self.app_name,
|
|
632
|
+
"environment": self.environment,
|
|
633
|
+
"uptime": (datetime.utcnow() - self.start_time).total_seconds(),
|
|
634
|
+
"timestamp": datetime.utcnow().isoformat(),
|
|
635
|
+
"modules": {}
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
# Check encryption
|
|
639
|
+
try:
|
|
640
|
+
test_data = {"test": "health_check", "timestamp": datetime.utcnow().isoformat()}
|
|
641
|
+
encrypted = self.encryption.encrypt_data(test_data)
|
|
642
|
+
decrypted = self.encryption.decrypt_data(encrypted)
|
|
643
|
+
|
|
644
|
+
if decrypted["test"] == test_data["test"]:
|
|
645
|
+
health["modules"]["encryption"] = {
|
|
646
|
+
"status": "operational",
|
|
647
|
+
"algorithm": "AES-256-GCM",
|
|
648
|
+
"key_rotation_days": self.encryption.key_rotation_days
|
|
649
|
+
}
|
|
650
|
+
else:
|
|
651
|
+
health["modules"]["encryption"] = {"status": "degraded", "error": "Encryption/decryption mismatch"}
|
|
652
|
+
health["status"] = "degraded"
|
|
653
|
+
except Exception as e:
|
|
654
|
+
health["modules"]["encryption"] = {"status": "failed", "error": str(e)}
|
|
655
|
+
health["status"] = "degraded"
|
|
656
|
+
|
|
657
|
+
# Check authentication
|
|
658
|
+
try:
|
|
659
|
+
health["modules"]["authentication"] = {
|
|
660
|
+
"status": "operational",
|
|
661
|
+
"mfa_required": self.auth.mfa_required,
|
|
662
|
+
"active_sessions": len(self.auth.active_sessions) if hasattr(self.auth, 'active_sessions') else 0
|
|
663
|
+
}
|
|
664
|
+
except Exception as e:
|
|
665
|
+
health["modules"]["authentication"] = {"status": "failed", "error": str(e)}
|
|
666
|
+
health["status"] = "degraded"
|
|
667
|
+
|
|
668
|
+
# Check database
|
|
669
|
+
if hasattr(self, 'db'):
|
|
670
|
+
try:
|
|
671
|
+
db_status = self.db.health_check()
|
|
672
|
+
health["modules"]["database"] = db_status
|
|
673
|
+
if db_status.get("status") != "operational":
|
|
674
|
+
health["status"] = "degraded"
|
|
675
|
+
except Exception as e:
|
|
676
|
+
health["modules"]["database"] = {"status": "failed", "error": str(e)}
|
|
677
|
+
health["status"] = "degraded"
|
|
678
|
+
|
|
679
|
+
# Check cache
|
|
680
|
+
if hasattr(self, 'cache'):
|
|
681
|
+
try:
|
|
682
|
+
cache_status = self.cache.health_check()
|
|
683
|
+
health["modules"]["cache"] = cache_status
|
|
684
|
+
if cache_status.get("status") != "operational":
|
|
685
|
+
health["status"] = "degraded"
|
|
686
|
+
except Exception as e:
|
|
687
|
+
health["modules"]["cache"] = {"status": "failed", "error": str(e)}
|
|
688
|
+
health["status"] = "degraded"
|
|
689
|
+
|
|
690
|
+
# Check blockchain
|
|
691
|
+
if self.ledger:
|
|
692
|
+
try:
|
|
693
|
+
if deep:
|
|
694
|
+
# Deep check - verify entire chain
|
|
695
|
+
verification = self.ledger.verify_chain()
|
|
696
|
+
health["modules"]["blockchain"] = {
|
|
697
|
+
"status": "operational" if verification["valid"] else "compromised",
|
|
698
|
+
"blocks": verification["blocks_checked"],
|
|
699
|
+
"height": verification.get("height")
|
|
700
|
+
}
|
|
701
|
+
if not verification["valid"]:
|
|
702
|
+
health["status"] = "degraded"
|
|
703
|
+
else:
|
|
704
|
+
health["modules"]["blockchain"] = {
|
|
705
|
+
"status": "operational",
|
|
706
|
+
"height": self.ledger.get_height()
|
|
707
|
+
}
|
|
708
|
+
except Exception as e:
|
|
709
|
+
health["modules"]["blockchain"] = {"status": "failed", "error": str(e)}
|
|
710
|
+
health["status"] = "degraded"
|
|
711
|
+
|
|
712
|
+
# Check metrics
|
|
713
|
+
if hasattr(self, 'metrics'):
|
|
714
|
+
health["modules"]["metrics"] = {"status": "operational"}
|
|
715
|
+
|
|
716
|
+
# Check tracing
|
|
717
|
+
if hasattr(self, 'tracer'):
|
|
718
|
+
health["modules"]["tracing"] = {"status": "operational"}
|
|
719
|
+
|
|
720
|
+
return health
|
|
721
|
+
|
|
722
|
+
def create_user(
|
|
723
|
+
self,
|
|
724
|
+
username: str,
|
|
725
|
+
password: str,
|
|
726
|
+
role: str,
|
|
727
|
+
email: Optional[str] = None,
|
|
728
|
+
phone: Optional[str] = None,
|
|
729
|
+
metadata: Optional[Dict] = None
|
|
730
|
+
) -> Dict[str, Any]:
|
|
731
|
+
"""
|
|
732
|
+
Create a new user in the system.
|
|
733
|
+
|
|
734
|
+
Args:
|
|
735
|
+
username: Unique username
|
|
736
|
+
password: User password (will be hashed)
|
|
737
|
+
role: User role (farmer, extension_officer, etc.)
|
|
738
|
+
email: User email (for notifications)
|
|
739
|
+
phone: User phone (for MFA)
|
|
740
|
+
metadata: Additional user metadata
|
|
741
|
+
|
|
742
|
+
Returns:
|
|
743
|
+
User object with ID and MFA setup
|
|
744
|
+
"""
|
|
745
|
+
self.logger.info(f"Creating user: {username} with role: {role}")
|
|
746
|
+
|
|
747
|
+
# Validate role
|
|
748
|
+
if role not in [r.value for r in UserRole]:
|
|
749
|
+
valid_roles = [r.value for r in UserRole]
|
|
750
|
+
raise ValidationError(f"Invalid role. Must be one of: {valid_roles}")
|
|
751
|
+
|
|
752
|
+
# Create user in authentication system
|
|
753
|
+
user = self.auth.register_user(
|
|
754
|
+
username=username,
|
|
755
|
+
password=password,
|
|
756
|
+
role=role,
|
|
757
|
+
email=email,
|
|
758
|
+
phone=phone
|
|
759
|
+
)
|
|
760
|
+
|
|
761
|
+
# Add metadata
|
|
762
|
+
if metadata:
|
|
763
|
+
user["metadata"] = metadata
|
|
764
|
+
|
|
765
|
+
# Save to database if available
|
|
766
|
+
if hasattr(self, 'db'):
|
|
767
|
+
self.db.save_user(user)
|
|
768
|
+
|
|
769
|
+
# Cache user
|
|
770
|
+
if hasattr(self, 'cache'):
|
|
771
|
+
self.cache.set(f"user:{user['user_id']}", user, ttl=86400)
|
|
772
|
+
|
|
773
|
+
# Record in blockchain
|
|
774
|
+
if self.ledger:
|
|
775
|
+
self.ledger.add_transaction(
|
|
776
|
+
transaction_type="USER_CREATED",
|
|
777
|
+
data={"user_id": user["user_id"], "role": role, "username": username},
|
|
778
|
+
initiator="system"
|
|
779
|
+
)
|
|
780
|
+
|
|
781
|
+
self.logger.info(f"User created successfully: {user['user_id']}")
|
|
782
|
+
|
|
783
|
+
return user
|
|
784
|
+
|
|
785
|
+
def authenticate_user(
|
|
786
|
+
self,
|
|
787
|
+
username: str,
|
|
788
|
+
password: str,
|
|
789
|
+
mfa_code: Optional[str] = None,
|
|
790
|
+
ip_address: Optional[str] = None,
|
|
791
|
+
user_agent: Optional[str] = None
|
|
792
|
+
) -> Dict[str, Any]:
|
|
793
|
+
"""
|
|
794
|
+
Authenticate a user with MFA.
|
|
795
|
+
|
|
796
|
+
Args:
|
|
797
|
+
username: Username
|
|
798
|
+
password: Password
|
|
799
|
+
mfa_code: MFA code from authenticator app
|
|
800
|
+
ip_address: Client IP for audit
|
|
801
|
+
user_agent: Client user agent for audit
|
|
802
|
+
|
|
803
|
+
Returns:
|
|
804
|
+
Session with JWT token and user info
|
|
805
|
+
"""
|
|
806
|
+
self.logger.info(f"Authentication attempt: {username} from {ip_address}")
|
|
807
|
+
|
|
808
|
+
# Authenticate
|
|
809
|
+
session = self.auth.authenticate(
|
|
810
|
+
username=username,
|
|
811
|
+
password=password,
|
|
812
|
+
mfa_code=mfa_code
|
|
813
|
+
)
|
|
814
|
+
|
|
815
|
+
# Add audit info
|
|
816
|
+
session["ip_address"] = ip_address
|
|
817
|
+
session["user_agent"] = user_agent
|
|
818
|
+
session["authenticated_at"] = datetime.utcnow().isoformat()
|
|
819
|
+
|
|
820
|
+
# Record in blockchain
|
|
821
|
+
if self.ledger:
|
|
822
|
+
self.ledger.add_transaction(
|
|
823
|
+
transaction_type="USER_LOGIN",
|
|
824
|
+
data={
|
|
825
|
+
"user_id": session["user_id"],
|
|
826
|
+
"ip_address": ip_address,
|
|
827
|
+
"success": True
|
|
828
|
+
},
|
|
829
|
+
initiator=session["user_id"]
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
# Update metrics
|
|
833
|
+
if hasattr(self, 'metrics'):
|
|
834
|
+
self.metrics['active_users'].inc()
|
|
835
|
+
|
|
836
|
+
self.logger.info(f"Authentication successful: {username}")
|
|
837
|
+
|
|
838
|
+
return session
|
|
839
|
+
|
|
840
|
+
def __repr__(self) -> str:
|
|
841
|
+
return f"<AgriculturalSecurity app={self.app_name} env={self.environment}>"
|
|
842
|
+
|
|
843
|
+
def __str__(self) -> str:
|
|
844
|
+
return f"Agricultural Security Framework - {self.app_name} ({self.environment})"
|
|
845
|
+
|
|
846
|
+
# Export main class
|
|
847
|
+
__all__ = [
|
|
848
|
+
"AgriculturalSecurity",
|
|
849
|
+
"UserRole",
|
|
850
|
+
"DataClassification",
|
|
851
|
+
"AgriSecureError",
|
|
852
|
+
"AuthenticationError",
|
|
853
|
+
"AuthorizationError",
|
|
854
|
+
"EncryptionError",
|
|
855
|
+
"ValidationError",
|
|
856
|
+
"BlockchainError"
|
|
857
|
+
]
|