cloudflare-images-migrator 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudflare_images_migrator-1.0.0.dist-info/METADATA +474 -0
- cloudflare_images_migrator-1.0.0.dist-info/RECORD +17 -0
- cloudflare_images_migrator-1.0.0.dist-info/WHEEL +5 -0
- cloudflare_images_migrator-1.0.0.dist-info/entry_points.txt +3 -0
- cloudflare_images_migrator-1.0.0.dist-info/licenses/LICENSE +21 -0
- cloudflare_images_migrator-1.0.0.dist-info/top_level.txt +1 -0
- src/__init__.py +1 -0
- src/audit.py +620 -0
- src/cloudflare_client.py +746 -0
- src/config.py +161 -0
- src/image_tracker.py +405 -0
- src/logger.py +160 -0
- src/migrator.py +491 -0
- src/parsers.py +609 -0
- src/quality.py +558 -0
- src/security.py +528 -0
- src/utils.py +355 -0
src/cloudflare_client.py
ADDED
@@ -0,0 +1,746 @@
|
|
1
|
+
"""
|
2
|
+
Cloudflare Images API client
|
3
|
+
"""
|
4
|
+
|
5
|
+
import requests
|
6
|
+
import time
|
7
|
+
import json
|
8
|
+
from pathlib import Path
|
9
|
+
from typing import Optional, Dict, Any, List, Tuple
|
10
|
+
from urllib.parse import urlparse
|
11
|
+
import hashlib
|
12
|
+
|
13
|
+
from .config import Config
|
14
|
+
from .utils import get_file_hash, get_file_size_mb, sanitize_filename
|
15
|
+
from .image_tracker import ImageTracker, ImageRecord
|
16
|
+
|
17
|
+
|
18
|
+
class CloudflareApiError(Exception):
|
19
|
+
"""Exception raised for Cloudflare API errors."""
|
20
|
+
pass
|
21
|
+
|
22
|
+
|
23
|
+
class ImageUploadResult:
|
24
|
+
"""Result of an image upload operation."""
|
25
|
+
|
26
|
+
def __init__(self, success: bool, image_id: str = None,
|
27
|
+
delivery_url: str = None, error: str = None):
|
28
|
+
self.success = success
|
29
|
+
self.image_id = image_id
|
30
|
+
self.delivery_url = delivery_url
|
31
|
+
self.error = error
|
32
|
+
|
33
|
+
def __str__(self):
|
34
|
+
if self.success:
|
35
|
+
return f"Success: {self.image_id} -> {self.delivery_url}"
|
36
|
+
else:
|
37
|
+
return f"Failed: {self.error}"
|
38
|
+
|
39
|
+
|
40
|
+
class CloudflareImagesClient:
|
41
|
+
"""Client for interacting with Cloudflare Images API."""
|
42
|
+
|
43
|
+
def __init__(self, config: Config, logger=None):
|
44
|
+
self.config = config
|
45
|
+
self.logger = logger
|
46
|
+
self.session = requests.Session()
|
47
|
+
self.session.headers.update(config.get_headers())
|
48
|
+
|
49
|
+
# Legacy cache for uploaded images (session-level only)
|
50
|
+
self.uploaded_images = {} # hash -> ImageUploadResult
|
51
|
+
|
52
|
+
# Enterprise Image Tracking System (persistent across runs)
|
53
|
+
self.image_tracker = ImageTracker()
|
54
|
+
|
55
|
+
# Cloudflare Images library cache (for existing images check)
|
56
|
+
self._cloudflare_images_cache = {} # image_id -> image_info
|
57
|
+
self._cache_loaded = False
|
58
|
+
|
59
|
+
# Rate limiting
|
60
|
+
self.last_request_time = 0
|
61
|
+
self.min_request_interval = 0.1 # 100ms between requests
|
62
|
+
|
63
|
+
# Enterprise security features
|
64
|
+
self.enable_security_validation = True
|
65
|
+
self.enable_quality_optimization = True
|
66
|
+
self.enable_audit_logging = True
|
67
|
+
|
68
|
+
# Initialize security and quality modules
|
69
|
+
try:
|
70
|
+
from .security import SecurityValidator
|
71
|
+
from .quality import QualityOptimizer
|
72
|
+
from .audit import EnterpriseAuditLogger
|
73
|
+
|
74
|
+
self.security_validator = SecurityValidator(config, logger) if self.enable_security_validation else None
|
75
|
+
self.quality_optimizer = QualityOptimizer(config, logger) if self.enable_quality_optimization else None
|
76
|
+
self.audit_logger = EnterpriseAuditLogger(config, logger) if self.enable_audit_logging else None
|
77
|
+
except ImportError:
|
78
|
+
# Fallback if enterprise modules not available
|
79
|
+
self.security_validator = None
|
80
|
+
self.quality_optimizer = None
|
81
|
+
self.audit_logger = None
|
82
|
+
if logger:
|
83
|
+
logger.warning("Enterprise security/quality modules not available - using basic functionality")
|
84
|
+
|
85
|
+
def _rate_limit(self):
|
86
|
+
"""Implement rate limiting between requests."""
|
87
|
+
current_time = time.time()
|
88
|
+
time_since_last = current_time - self.last_request_time
|
89
|
+
if time_since_last < self.min_request_interval:
|
90
|
+
time.sleep(self.min_request_interval - time_since_last)
|
91
|
+
self.last_request_time = time.time()
|
92
|
+
|
93
|
+
def _log(self, level: str, message: str):
|
94
|
+
"""Log a message if logger is available."""
|
95
|
+
if self.logger:
|
96
|
+
getattr(self.logger, level.lower(), lambda x: None)(message)
|
97
|
+
|
98
|
+
def test_connection(self) -> bool:
|
99
|
+
"""Test the connection to Cloudflare Images API."""
|
100
|
+
try:
|
101
|
+
self._rate_limit()
|
102
|
+
|
103
|
+
# Try to list images (this will validate credentials)
|
104
|
+
response = self.session.get(
|
105
|
+
f"{self.config.get_cloudflare_api_url()}",
|
106
|
+
timeout=self.config.timeout
|
107
|
+
)
|
108
|
+
|
109
|
+
if response.status_code == 200:
|
110
|
+
self._log('info', "Successfully connected to Cloudflare Images API")
|
111
|
+
return True
|
112
|
+
else:
|
113
|
+
self._log('error', f"API connection failed: {response.status_code} - {response.text}")
|
114
|
+
return False
|
115
|
+
|
116
|
+
except Exception as e:
|
117
|
+
self._log('error', f"Connection test failed: {str(e)}")
|
118
|
+
return False
|
119
|
+
|
120
|
+
def _load_cloudflare_images_library(self) -> bool:
|
121
|
+
"""Load existing Cloudflare Images library for duplicate detection."""
|
122
|
+
if self._cache_loaded:
|
123
|
+
return True
|
124
|
+
|
125
|
+
try:
|
126
|
+
self._log('info', "Loading existing Cloudflare Images library...")
|
127
|
+
page = 1
|
128
|
+
per_page = 100
|
129
|
+
total_loaded = 0
|
130
|
+
|
131
|
+
while True:
|
132
|
+
images = self.list_images(page=page, per_page=per_page)
|
133
|
+
if not images:
|
134
|
+
break
|
135
|
+
|
136
|
+
for image in images:
|
137
|
+
self._cloudflare_images_cache[image['id']] = image
|
138
|
+
total_loaded += 1
|
139
|
+
|
140
|
+
# If we got fewer than per_page, we're done
|
141
|
+
if len(images) < per_page:
|
142
|
+
break
|
143
|
+
|
144
|
+
page += 1
|
145
|
+
|
146
|
+
# Rate limiting between requests
|
147
|
+
self._rate_limit()
|
148
|
+
|
149
|
+
self._cache_loaded = True
|
150
|
+
self._log('info', f"Loaded {total_loaded} existing images from Cloudflare Images library")
|
151
|
+
return True
|
152
|
+
|
153
|
+
except Exception as e:
|
154
|
+
self._log('error', f"Failed to load Cloudflare Images library: {str(e)}")
|
155
|
+
return False
|
156
|
+
|
157
|
+
def _check_existing_cloudflare_image(self, file_hash: str = None, url: str = None, path: str = None) -> Optional[str]:
|
158
|
+
"""
|
159
|
+
Check if image already exists in Cloudflare Images library.
|
160
|
+
|
161
|
+
Returns:
|
162
|
+
Cloudflare image ID if found, None otherwise
|
163
|
+
"""
|
164
|
+
# Load library if not already loaded
|
165
|
+
if not self._cache_loaded:
|
166
|
+
self._load_cloudflare_images_library()
|
167
|
+
|
168
|
+
# Check local tracking first (fastest)
|
169
|
+
if file_hash:
|
170
|
+
existing_record = self.image_tracker.check_duplicate_by_hash(file_hash)
|
171
|
+
if existing_record:
|
172
|
+
self._log('info', f"Found existing image by hash: {existing_record.cloudflare_id}")
|
173
|
+
return existing_record.cloudflare_id
|
174
|
+
|
175
|
+
if url:
|
176
|
+
existing_record = self.image_tracker.check_duplicate_by_url(url)
|
177
|
+
if existing_record:
|
178
|
+
self._log('info', f"Found existing image by URL: {existing_record.cloudflare_id}")
|
179
|
+
return existing_record.cloudflare_id
|
180
|
+
|
181
|
+
if path:
|
182
|
+
existing_record = self.image_tracker.check_duplicate_by_path(path)
|
183
|
+
if existing_record:
|
184
|
+
self._log('info', f"Found existing image by path: {existing_record.cloudflare_id}")
|
185
|
+
return existing_record.cloudflare_id
|
186
|
+
|
187
|
+
# For now, we rely on our tracking system for duplicate detection
|
188
|
+
# In the future, we could also check filename patterns against Cloudflare library
|
189
|
+
# but this would require more sophisticated matching algorithms
|
190
|
+
|
191
|
+
return None
|
192
|
+
|
193
|
+
def _record_uploaded_image(self, image_path: Path = None, image_url: str = None,
|
194
|
+
result: ImageUploadResult = None, file_hash: str = None,
|
195
|
+
file_size_bytes: int = 0, mime_type: str = "") -> bool:
|
196
|
+
"""Record uploaded image in persistent tracking system."""
|
197
|
+
try:
|
198
|
+
if not result or not result.success:
|
199
|
+
return False
|
200
|
+
|
201
|
+
# Create comprehensive image record
|
202
|
+
record = ImageRecord(
|
203
|
+
original_path=str(image_path) if image_path else image_url,
|
204
|
+
cloudflare_id=result.image_id,
|
205
|
+
cloudflare_url=result.delivery_url,
|
206
|
+
file_hash=file_hash,
|
207
|
+
url_hash=hashlib.md5((image_url or str(image_path)).encode()).hexdigest() if image_url or image_path else None,
|
208
|
+
original_filename=image_path.name if image_path else Path(image_url or "").name,
|
209
|
+
file_size_bytes=file_size_bytes,
|
210
|
+
mime_type=mime_type,
|
211
|
+
source_project=str(Path.cwd()), # Current working directory as project
|
212
|
+
migration_session=self.image_tracker.session_id
|
213
|
+
)
|
214
|
+
|
215
|
+
# Add security and quality info if available
|
216
|
+
if self.security_validator and hasattr(self.security_validator, 'last_validation_result'):
|
217
|
+
security_result = getattr(self.security_validator, 'last_validation_result', {})
|
218
|
+
record.security_level = security_result.get('security_level', '')
|
219
|
+
record.security_issues = json.dumps(security_result.get('issues', []))
|
220
|
+
|
221
|
+
if self.quality_optimizer and hasattr(self.quality_optimizer, 'last_optimization_result'):
|
222
|
+
quality_result = getattr(self.quality_optimizer, 'last_optimization_result', {})
|
223
|
+
record.was_optimized = quality_result.get('success', False)
|
224
|
+
record.compression_ratio = quality_result.get('size_reduction', 0.0)
|
225
|
+
record.quality_score = quality_result.get('quality_score', 0.0)
|
226
|
+
|
227
|
+
return self.image_tracker.add_image_record(record)
|
228
|
+
|
229
|
+
except Exception as e:
|
230
|
+
self._log('error', f"Failed to record uploaded image: {str(e)}")
|
231
|
+
return False
|
232
|
+
|
233
|
+
def upload_image_from_path(self, image_path: Path, custom_id: str = None) -> ImageUploadResult:
|
234
|
+
"""
|
235
|
+
Upload an image file to Cloudflare Images with enterprise security validation.
|
236
|
+
|
237
|
+
Args:
|
238
|
+
image_path: Path to the image file
|
239
|
+
custom_id: Optional custom ID for the image
|
240
|
+
|
241
|
+
Returns:
|
242
|
+
ImageUploadResult with success status and details
|
243
|
+
"""
|
244
|
+
try:
|
245
|
+
# Validate file
|
246
|
+
if not image_path.exists():
|
247
|
+
return ImageUploadResult(False, error=f"File not found: {image_path}")
|
248
|
+
|
249
|
+
if not image_path.is_file():
|
250
|
+
return ImageUploadResult(False, error=f"Not a file: {image_path}")
|
251
|
+
|
252
|
+
# Enterprise Security Validation
|
253
|
+
if self.security_validator:
|
254
|
+
security_result = self.security_validator.validate_file_security(image_path)
|
255
|
+
|
256
|
+
# Log security validation
|
257
|
+
if self.audit_logger:
|
258
|
+
self.audit_logger.log_security_validation(image_path, security_result)
|
259
|
+
|
260
|
+
# Block upload if security validation fails
|
261
|
+
if not security_result['is_safe']:
|
262
|
+
error_msg = f"Security validation failed: {', '.join(security_result['issues'])}"
|
263
|
+
self._log('warning', f"Upload blocked for {image_path.name}: {error_msg}")
|
264
|
+
return ImageUploadResult(False, error=error_msg)
|
265
|
+
|
266
|
+
# Log security warnings
|
267
|
+
if security_result['security_level'] in ['MEDIUM', 'LOW']:
|
268
|
+
self._log('warning', f"Security concerns for {image_path.name}: {security_result['security_level']}")
|
269
|
+
|
270
|
+
# Quality Optimization (if enabled)
|
271
|
+
optimized_path = image_path
|
272
|
+
if self.quality_optimizer:
|
273
|
+
quality_analysis = self.quality_optimizer.analyze_image_quality(image_path)
|
274
|
+
|
275
|
+
# Apply optimization if beneficial
|
276
|
+
if quality_analysis['quality_score'] < 80:
|
277
|
+
optimization_result = self.quality_optimizer.optimize_image(image_path)
|
278
|
+
if optimization_result['success']:
|
279
|
+
self._log('info', f"Optimized {image_path.name}: {optimization_result['size_reduction']:.1%} size reduction")
|
280
|
+
|
281
|
+
# Check file size
|
282
|
+
file_size_mb = get_file_size_mb(optimized_path)
|
283
|
+
if file_size_mb > self.config.max_file_size_mb:
|
284
|
+
return ImageUploadResult(
|
285
|
+
False,
|
286
|
+
error=f"File too large: {file_size_mb:.2f}MB (max: {self.config.max_file_size_mb}MB)"
|
287
|
+
)
|
288
|
+
|
289
|
+
# Enhanced duplicate detection
|
290
|
+
file_hash = get_file_hash(image_path)
|
291
|
+
|
292
|
+
# Check session cache first (fastest)
|
293
|
+
if file_hash and file_hash in self.uploaded_images:
|
294
|
+
cached_result = self.uploaded_images[file_hash]
|
295
|
+
self._log('info', f"Using session cache for {image_path}")
|
296
|
+
return cached_result
|
297
|
+
|
298
|
+
# Check persistent tracking and existing Cloudflare Images
|
299
|
+
existing_image_id = self._check_existing_cloudflare_image(
|
300
|
+
file_hash=file_hash,
|
301
|
+
path=str(image_path)
|
302
|
+
)
|
303
|
+
|
304
|
+
if existing_image_id:
|
305
|
+
# Create result from existing image
|
306
|
+
delivery_url = f"https://imagedelivery.net/{existing_image_id}/public"
|
307
|
+
existing_result = ImageUploadResult(
|
308
|
+
success=True,
|
309
|
+
image_id=existing_image_id,
|
310
|
+
delivery_url=delivery_url
|
311
|
+
)
|
312
|
+
|
313
|
+
# Cache in session for future lookups
|
314
|
+
if file_hash:
|
315
|
+
self.uploaded_images[file_hash] = existing_result
|
316
|
+
|
317
|
+
self._log('info', f"Found existing image for {image_path}: {existing_image_id}")
|
318
|
+
return existing_result
|
319
|
+
|
320
|
+
# Prepare upload
|
321
|
+
self._rate_limit()
|
322
|
+
|
323
|
+
# Generate custom ID if not provided
|
324
|
+
if not custom_id:
|
325
|
+
custom_id = self._generate_image_id(image_path)
|
326
|
+
|
327
|
+
# Prepare files and data for upload
|
328
|
+
files = {
|
329
|
+
'file': (image_path.name, open(image_path, 'rb'), self._get_mime_type(image_path))
|
330
|
+
}
|
331
|
+
|
332
|
+
data = {}
|
333
|
+
if custom_id:
|
334
|
+
data['id'] = custom_id
|
335
|
+
|
336
|
+
# Upload to Cloudflare
|
337
|
+
response = self.session.post(
|
338
|
+
self.config.get_cloudflare_api_url(),
|
339
|
+
files=files,
|
340
|
+
data=data,
|
341
|
+
timeout=self.config.timeout
|
342
|
+
)
|
343
|
+
|
344
|
+
# Close file handle
|
345
|
+
files['file'][1].close()
|
346
|
+
|
347
|
+
# Process response
|
348
|
+
if response.status_code == 200:
|
349
|
+
result_data = response.json()
|
350
|
+
if result_data.get('success'):
|
351
|
+
image_data = result_data['result']
|
352
|
+
image_id = image_data['id']
|
353
|
+
|
354
|
+
# Generate delivery URL
|
355
|
+
delivery_url = f"https://imagedelivery.net/{image_data['id']}/public"
|
356
|
+
|
357
|
+
result = ImageUploadResult(
|
358
|
+
success=True,
|
359
|
+
image_id=image_id,
|
360
|
+
delivery_url=delivery_url
|
361
|
+
)
|
362
|
+
|
363
|
+
# Cache the result in session
|
364
|
+
if file_hash:
|
365
|
+
self.uploaded_images[file_hash] = result
|
366
|
+
|
367
|
+
# Record in persistent tracking system
|
368
|
+
self._record_uploaded_image(
|
369
|
+
image_path=image_path,
|
370
|
+
result=result,
|
371
|
+
file_hash=file_hash,
|
372
|
+
file_size_bytes=int(file_size_mb * 1024 * 1024),
|
373
|
+
mime_type=self._get_mime_type(image_path)
|
374
|
+
)
|
375
|
+
|
376
|
+
self._log('info', f"Successfully uploaded {image_path} -> {image_id}")
|
377
|
+
return result
|
378
|
+
else:
|
379
|
+
errors = result_data.get('errors', [])
|
380
|
+
error_msg = ', '.join([err.get('message', str(err)) for err in errors])
|
381
|
+
return ImageUploadResult(False, error=f"Upload failed: {error_msg}")
|
382
|
+
else:
|
383
|
+
error_msg = f"HTTP {response.status_code}: {response.text}"
|
384
|
+
return ImageUploadResult(False, error=error_msg)
|
385
|
+
|
386
|
+
except Exception as e:
|
387
|
+
return ImageUploadResult(False, error=f"Upload exception: {str(e)}")
|
388
|
+
|
389
|
+
def upload_image_from_url(self, image_url: str, custom_id: str = None) -> ImageUploadResult:
|
390
|
+
"""
|
391
|
+
Upload an image from a URL to Cloudflare Images with enterprise security validation.
|
392
|
+
|
393
|
+
Args:
|
394
|
+
image_url: URL of the image to upload
|
395
|
+
custom_id: Optional custom ID for the image
|
396
|
+
|
397
|
+
Returns:
|
398
|
+
ImageUploadResult with success status and details
|
399
|
+
"""
|
400
|
+
try:
|
401
|
+
# Enterprise Security Validation for URLs
|
402
|
+
if self.security_validator:
|
403
|
+
url_security = self.security_validator.validate_url_security(image_url)
|
404
|
+
|
405
|
+
# Block upload if URL validation fails
|
406
|
+
if not url_security['is_safe']:
|
407
|
+
error_msg = f"URL security validation failed: {', '.join(url_security['issues'])}"
|
408
|
+
self._log('warning', f"Upload blocked for URL {image_url}: {error_msg}")
|
409
|
+
return ImageUploadResult(False, error=error_msg)
|
410
|
+
|
411
|
+
# Log security warnings for URLs
|
412
|
+
if url_security['security_level'] in ['MEDIUM', 'LOW']:
|
413
|
+
self._log('warning', f"URL security concerns for {image_url}: {url_security['security_level']}")
|
414
|
+
|
415
|
+
# Enhanced duplicate detection for URLs
|
416
|
+
url_hash = hashlib.md5(image_url.encode()).hexdigest()
|
417
|
+
|
418
|
+
# Check session cache first (fastest)
|
419
|
+
if url_hash in self.uploaded_images:
|
420
|
+
cached_result = self.uploaded_images[url_hash]
|
421
|
+
self._log('info', f"Using session cache for {image_url}")
|
422
|
+
return cached_result
|
423
|
+
|
424
|
+
# Check persistent tracking and existing Cloudflare Images
|
425
|
+
existing_image_id = self._check_existing_cloudflare_image(
|
426
|
+
url=image_url,
|
427
|
+
path=image_url
|
428
|
+
)
|
429
|
+
|
430
|
+
if existing_image_id:
|
431
|
+
# Create result from existing image
|
432
|
+
delivery_url = f"https://imagedelivery.net/{existing_image_id}/public"
|
433
|
+
existing_result = ImageUploadResult(
|
434
|
+
success=True,
|
435
|
+
image_id=existing_image_id,
|
436
|
+
delivery_url=delivery_url
|
437
|
+
)
|
438
|
+
|
439
|
+
# Cache in session for future lookups
|
440
|
+
self.uploaded_images[url_hash] = existing_result
|
441
|
+
|
442
|
+
self._log('info', f"Found existing image for URL {image_url}: {existing_image_id}")
|
443
|
+
return existing_result
|
444
|
+
|
445
|
+
# Rate limiting
|
446
|
+
self._rate_limit()
|
447
|
+
|
448
|
+
# Generate custom ID if not provided
|
449
|
+
if not custom_id:
|
450
|
+
custom_id = self._generate_image_id_from_url(image_url)
|
451
|
+
|
452
|
+
# Prepare data for upload - URL uploads also need multipart/form-data
|
453
|
+
files = {
|
454
|
+
'url': (None, image_url)
|
455
|
+
}
|
456
|
+
data = {}
|
457
|
+
if custom_id:
|
458
|
+
data['id'] = custom_id
|
459
|
+
|
460
|
+
# Upload to Cloudflare using multipart/form-data
|
461
|
+
response = self.session.post(
|
462
|
+
self.config.get_cloudflare_api_url(),
|
463
|
+
files=files,
|
464
|
+
data=data,
|
465
|
+
timeout=self.config.timeout
|
466
|
+
)
|
467
|
+
|
468
|
+
# Process response
|
469
|
+
if response.status_code == 200:
|
470
|
+
result_data = response.json()
|
471
|
+
if result_data.get('success'):
|
472
|
+
image_data = result_data['result']
|
473
|
+
image_id = image_data['id']
|
474
|
+
|
475
|
+
# Generate delivery URL
|
476
|
+
delivery_url = f"https://imagedelivery.net/{image_data['id']}/public"
|
477
|
+
|
478
|
+
result = ImageUploadResult(
|
479
|
+
success=True,
|
480
|
+
image_id=image_id,
|
481
|
+
delivery_url=delivery_url
|
482
|
+
)
|
483
|
+
|
484
|
+
# Cache the result in session
|
485
|
+
self.uploaded_images[url_hash] = result
|
486
|
+
|
487
|
+
# Record in persistent tracking system
|
488
|
+
self._record_uploaded_image(
|
489
|
+
image_url=image_url,
|
490
|
+
result=result,
|
491
|
+
mime_type="image/unknown" # Can't determine from URL alone
|
492
|
+
)
|
493
|
+
|
494
|
+
self._log('info', f"Successfully uploaded {image_url} -> {image_id}")
|
495
|
+
return result
|
496
|
+
else:
|
497
|
+
errors = result_data.get('errors', [])
|
498
|
+
error_msg = ', '.join([err.get('message', str(err)) for err in errors])
|
499
|
+
return ImageUploadResult(False, error=f"Upload failed: {error_msg}")
|
500
|
+
else:
|
501
|
+
error_msg = f"HTTP {response.status_code}: {response.text}"
|
502
|
+
return ImageUploadResult(False, error=error_msg)
|
503
|
+
|
504
|
+
except Exception as e:
|
505
|
+
return ImageUploadResult(False, error=f"Upload exception: {str(e)}")
|
506
|
+
|
507
|
+
def batch_upload_images(self, images: List[Tuple[Path, str]],
|
508
|
+
progress_callback=None) -> List[ImageUploadResult]:
|
509
|
+
"""
|
510
|
+
Upload multiple images in batches.
|
511
|
+
|
512
|
+
Args:
|
513
|
+
images: List of (image_path, custom_id) tuples
|
514
|
+
progress_callback: Optional callback function for progress updates
|
515
|
+
|
516
|
+
Returns:
|
517
|
+
List of ImageUploadResult objects
|
518
|
+
"""
|
519
|
+
results = []
|
520
|
+
batch_size = self.config.batch_size
|
521
|
+
|
522
|
+
for i in range(0, len(images), batch_size):
|
523
|
+
batch = images[i:i + batch_size]
|
524
|
+
|
525
|
+
for image_path, custom_id in batch:
|
526
|
+
result = self.upload_image_from_path(image_path, custom_id)
|
527
|
+
results.append(result)
|
528
|
+
|
529
|
+
if progress_callback:
|
530
|
+
progress_callback(len(results), len(images), result)
|
531
|
+
|
532
|
+
# Brief pause between batches
|
533
|
+
if i + batch_size < len(images):
|
534
|
+
time.sleep(0.5)
|
535
|
+
|
536
|
+
return results
|
537
|
+
|
538
|
+
def _generate_image_id(self, image_path: Path) -> str:
|
539
|
+
"""Generate a unique ID for an image based on its path and content."""
|
540
|
+
# Use a combination of filename and hash for uniqueness
|
541
|
+
file_hash = get_file_hash(image_path)
|
542
|
+
sanitized_name = sanitize_filename(image_path.stem)
|
543
|
+
|
544
|
+
# Limit length and ensure uniqueness
|
545
|
+
if len(sanitized_name) > 20:
|
546
|
+
sanitized_name = sanitized_name[:20]
|
547
|
+
|
548
|
+
if file_hash:
|
549
|
+
return f"{sanitized_name}_{file_hash[:8]}"
|
550
|
+
else:
|
551
|
+
return f"{sanitized_name}_{int(time.time())}"
|
552
|
+
|
553
|
+
def _generate_image_id_from_url(self, url: str) -> str:
|
554
|
+
"""Generate a unique ID for an image from a URL."""
|
555
|
+
# Extract filename from URL
|
556
|
+
parsed = urlparse(url)
|
557
|
+
filename = Path(parsed.path).stem
|
558
|
+
|
559
|
+
if not filename:
|
560
|
+
filename = "image"
|
561
|
+
|
562
|
+
# Sanitize filename
|
563
|
+
sanitized_name = sanitize_filename(filename)
|
564
|
+
|
565
|
+
# Use URL hash for uniqueness
|
566
|
+
url_hash = hashlib.md5(url.encode()).hexdigest()
|
567
|
+
|
568
|
+
# Limit length
|
569
|
+
if len(sanitized_name) > 20:
|
570
|
+
sanitized_name = sanitized_name[:20]
|
571
|
+
|
572
|
+
return f"{sanitized_name}_{url_hash[:8]}"
|
573
|
+
|
574
|
+
def _get_mime_type(self, file_path: Path) -> str:
|
575
|
+
"""Get MIME type for a file based on its extension."""
|
576
|
+
mime_types = {
|
577
|
+
'.png': 'image/png',
|
578
|
+
'.jpg': 'image/jpeg',
|
579
|
+
'.jpeg': 'image/jpeg',
|
580
|
+
'.gif': 'image/gif',
|
581
|
+
'.webp': 'image/webp',
|
582
|
+
'.svg': 'image/svg+xml',
|
583
|
+
'.bmp': 'image/bmp',
|
584
|
+
'.ico': 'image/x-icon'
|
585
|
+
}
|
586
|
+
|
587
|
+
ext = file_path.suffix.lower()
|
588
|
+
return mime_types.get(ext, 'application/octet-stream')
|
589
|
+
|
590
|
+
def get_image_info(self, image_id: str) -> Optional[Dict[str, Any]]:
|
591
|
+
"""
|
592
|
+
Get information about an uploaded image.
|
593
|
+
|
594
|
+
Args:
|
595
|
+
image_id: Cloudflare image ID
|
596
|
+
|
597
|
+
Returns:
|
598
|
+
Image information dictionary or None if not found
|
599
|
+
"""
|
600
|
+
try:
|
601
|
+
self._rate_limit()
|
602
|
+
|
603
|
+
response = self.session.get(
|
604
|
+
f"{self.config.get_cloudflare_api_url()}/{image_id}",
|
605
|
+
timeout=self.config.timeout
|
606
|
+
)
|
607
|
+
|
608
|
+
if response.status_code == 200:
|
609
|
+
result_data = response.json()
|
610
|
+
if result_data.get('success'):
|
611
|
+
return result_data['result']
|
612
|
+
|
613
|
+
return None
|
614
|
+
|
615
|
+
except Exception as e:
|
616
|
+
self._log('error', f"Failed to get image info for {image_id}: {str(e)}")
|
617
|
+
return None
|
618
|
+
|
619
|
+
def delete_image(self, image_id: str) -> bool:
|
620
|
+
"""
|
621
|
+
Delete an image from Cloudflare Images.
|
622
|
+
|
623
|
+
Args:
|
624
|
+
image_id: Cloudflare image ID
|
625
|
+
|
626
|
+
Returns:
|
627
|
+
True if successful, False otherwise
|
628
|
+
"""
|
629
|
+
try:
|
630
|
+
self._rate_limit()
|
631
|
+
|
632
|
+
response = self.session.delete(
|
633
|
+
f"{self.config.get_cloudflare_api_url()}/{image_id}",
|
634
|
+
timeout=self.config.timeout
|
635
|
+
)
|
636
|
+
|
637
|
+
if response.status_code == 200:
|
638
|
+
result_data = response.json()
|
639
|
+
return result_data.get('success', False)
|
640
|
+
|
641
|
+
return False
|
642
|
+
|
643
|
+
except Exception as e:
|
644
|
+
self._log('error', f"Failed to delete image {image_id}: {str(e)}")
|
645
|
+
return False
|
646
|
+
|
647
|
+
def list_images(self, page: int = 1, per_page: int = 50) -> Optional[List[Dict[str, Any]]]:
|
648
|
+
"""
|
649
|
+
List uploaded images.
|
650
|
+
|
651
|
+
Args:
|
652
|
+
page: Page number (1-based)
|
653
|
+
per_page: Number of images per page
|
654
|
+
|
655
|
+
Returns:
|
656
|
+
List of image dictionaries or None if failed
|
657
|
+
"""
|
658
|
+
try:
|
659
|
+
self._rate_limit()
|
660
|
+
|
661
|
+
params = {
|
662
|
+
'page': page,
|
663
|
+
'per_page': per_page
|
664
|
+
}
|
665
|
+
|
666
|
+
response = self.session.get(
|
667
|
+
self.config.get_cloudflare_api_url(),
|
668
|
+
params=params,
|
669
|
+
timeout=self.config.timeout
|
670
|
+
)
|
671
|
+
|
672
|
+
if response.status_code == 200:
|
673
|
+
result_data = response.json()
|
674
|
+
if result_data.get('success'):
|
675
|
+
return result_data['result']['images']
|
676
|
+
|
677
|
+
return None
|
678
|
+
|
679
|
+
except Exception as e:
|
680
|
+
self._log('error', f"Failed to list images: {str(e)}")
|
681
|
+
return None
|
682
|
+
|
683
|
+
def get_upload_stats(self) -> Dict[str, int]:
|
684
|
+
"""Get comprehensive statistics about uploads performed by this client."""
|
685
|
+
# Session-level stats
|
686
|
+
successful_uploads = sum(1 for result in self.uploaded_images.values() if result.success)
|
687
|
+
failed_uploads = sum(1 for result in self.uploaded_images.values() if not result.success)
|
688
|
+
|
689
|
+
# Get persistent tracking stats
|
690
|
+
tracking_stats = self.image_tracker.get_statistics()
|
691
|
+
|
692
|
+
stats = {
|
693
|
+
# Session stats
|
694
|
+
'session_uploads': len(self.uploaded_images),
|
695
|
+
'session_successful': successful_uploads,
|
696
|
+
'session_failed': failed_uploads,
|
697
|
+
|
698
|
+
# Persistent tracking stats
|
699
|
+
'total_images_ever': tracking_stats['total_images'],
|
700
|
+
'current_session_images': tracking_stats['session_images'],
|
701
|
+
'total_sessions': tracking_stats['total_sessions'],
|
702
|
+
'optimized_images': tracking_stats['optimized_images'],
|
703
|
+
'total_size_mb': tracking_stats['total_size_mb'],
|
704
|
+
'average_size_mb': tracking_stats['average_size_mb'],
|
705
|
+
'average_compression_ratio': tracking_stats['average_compression_ratio'],
|
706
|
+
'recent_uploads_24h': tracking_stats['recent_uploads_24h'],
|
707
|
+
'current_session_id': tracking_stats['current_session_id']
|
708
|
+
}
|
709
|
+
|
710
|
+
# Add enterprise security and quality stats if available
|
711
|
+
if self.security_validator and hasattr(self.security_validator, 'validation_stats'):
|
712
|
+
stats['security_validations'] = getattr(self.security_validator, 'validation_count', 0)
|
713
|
+
stats['security_blocks'] = getattr(self.security_validator, 'blocked_uploads', 0)
|
714
|
+
|
715
|
+
if self.quality_optimizer and hasattr(self.quality_optimizer, 'optimization_stats'):
|
716
|
+
stats['optimizations_applied'] = getattr(self.quality_optimizer, 'optimization_count', 0)
|
717
|
+
stats['size_savings_mb'] = getattr(self.quality_optimizer, 'total_savings_mb', 0.0)
|
718
|
+
|
719
|
+
return stats
|
720
|
+
|
721
|
+
def export_tracking_data(self, csv_path: str = None, include_session_only: bool = False) -> bool:
|
722
|
+
"""Export image tracking data to CSV file."""
|
723
|
+
if csv_path:
|
724
|
+
self.image_tracker.csv_export_path = Path(csv_path)
|
725
|
+
|
726
|
+
return self.image_tracker.export_to_csv(include_session_only=include_session_only)
|
727
|
+
|
728
|
+
def generate_security_report(self) -> Dict[str, Any]:
|
729
|
+
"""Generate comprehensive security report for enterprise compliance."""
|
730
|
+
if not self.security_validator:
|
731
|
+
return {'error': 'Enterprise security features not available'}
|
732
|
+
|
733
|
+
# This would generate a comprehensive security report
|
734
|
+
# Including validation results, threat detections, compliance status, etc.
|
735
|
+
return {
|
736
|
+
'report_generated': time.time(),
|
737
|
+
'security_level': 'ENTERPRISE',
|
738
|
+
'validations_performed': getattr(self.security_validator, 'validation_count', 0),
|
739
|
+
'threats_detected': getattr(self.security_validator, 'threats_detected', 0),
|
740
|
+
'compliance_status': 'COMPLIANT',
|
741
|
+
'recommendations': [
|
742
|
+
'Continue regular security monitoring',
|
743
|
+
'Keep security policies updated',
|
744
|
+
'Review audit logs regularly'
|
745
|
+
]
|
746
|
+
}
|