spatelier 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. analytics/__init__.py +1 -0
  2. analytics/reporter.py +497 -0
  3. cli/__init__.py +1 -0
  4. cli/app.py +147 -0
  5. cli/audio.py +129 -0
  6. cli/cli_analytics.py +320 -0
  7. cli/cli_utils.py +282 -0
  8. cli/error_handlers.py +122 -0
  9. cli/files.py +299 -0
  10. cli/update.py +325 -0
  11. cli/video.py +823 -0
  12. cli/worker.py +615 -0
  13. core/__init__.py +1 -0
  14. core/analytics_dashboard.py +368 -0
  15. core/base.py +303 -0
  16. core/base_service.py +69 -0
  17. core/config.py +345 -0
  18. core/database_service.py +116 -0
  19. core/decorators.py +263 -0
  20. core/error_handler.py +210 -0
  21. core/file_tracker.py +254 -0
  22. core/interactive_cli.py +366 -0
  23. core/interfaces.py +166 -0
  24. core/job_queue.py +437 -0
  25. core/logger.py +79 -0
  26. core/package_updater.py +469 -0
  27. core/progress.py +228 -0
  28. core/service_factory.py +295 -0
  29. core/streaming.py +299 -0
  30. core/worker.py +765 -0
  31. database/__init__.py +1 -0
  32. database/connection.py +265 -0
  33. database/metadata.py +516 -0
  34. database/models.py +288 -0
  35. database/repository.py +592 -0
  36. database/transcription_storage.py +219 -0
  37. modules/__init__.py +1 -0
  38. modules/audio/__init__.py +5 -0
  39. modules/audio/converter.py +197 -0
  40. modules/video/__init__.py +16 -0
  41. modules/video/converter.py +191 -0
  42. modules/video/fallback_extractor.py +334 -0
  43. modules/video/services/__init__.py +18 -0
  44. modules/video/services/audio_extraction_service.py +274 -0
  45. modules/video/services/download_service.py +852 -0
  46. modules/video/services/metadata_service.py +190 -0
  47. modules/video/services/playlist_service.py +445 -0
  48. modules/video/services/transcription_service.py +491 -0
  49. modules/video/transcription_service.py +385 -0
  50. modules/video/youtube_api.py +397 -0
  51. spatelier/__init__.py +33 -0
  52. spatelier-0.3.0.dist-info/METADATA +260 -0
  53. spatelier-0.3.0.dist-info/RECORD +59 -0
  54. spatelier-0.3.0.dist-info/WHEEL +5 -0
  55. spatelier-0.3.0.dist-info/entry_points.txt +2 -0
  56. spatelier-0.3.0.dist-info/licenses/LICENSE +21 -0
  57. spatelier-0.3.0.dist-info/top_level.txt +7 -0
  58. utils/__init__.py +1 -0
  59. utils/helpers.py +250 -0
core/interfaces.py ADDED
@@ -0,0 +1,166 @@
1
+ """
2
+ Core interfaces for dependency injection and service layer.
3
+
4
+ This module defines abstract interfaces for the service layer,
5
+ enabling dependency injection and better testability.
6
+ """
7
+
8
+ from abc import ABC, abstractmethod
9
+ from pathlib import Path
10
+ from typing import Any, Dict, List, Optional, Union
11
+
12
+ from core.base import ProcessingResult
13
+ from core.config import Config
14
+
15
+
16
+ class IDatabaseService(ABC):
17
+ """Interface for database services."""
18
+
19
+ @abstractmethod
20
+ def initialize(self) -> "IRepositoryContainer":
21
+ """Initialize database connections and return repository container."""
22
+ pass
23
+
24
+ @abstractmethod
25
+ def close_connections(self):
26
+ """Close all database connections."""
27
+ pass
28
+
29
+
30
+ class IRepositoryContainer(ABC):
31
+ """Interface for repository container."""
32
+
33
+ @property
34
+ @abstractmethod
35
+ def media(self):
36
+ """Media file repository."""
37
+ pass
38
+
39
+ @property
40
+ @abstractmethod
41
+ def jobs(self):
42
+ """Processing job repository."""
43
+ pass
44
+
45
+ @property
46
+ @abstractmethod
47
+ def analytics(self):
48
+ """Analytics repository."""
49
+ pass
50
+
51
+ @property
52
+ @abstractmethod
53
+ def playlists(self):
54
+ """Playlist repository."""
55
+ pass
56
+
57
+ @property
58
+ @abstractmethod
59
+ def playlist_videos(self):
60
+ """Playlist video repository."""
61
+ pass
62
+
63
+
64
+ class IVideoDownloadService(ABC):
65
+ """Interface for video download service."""
66
+
67
+ @abstractmethod
68
+ def download_video(
69
+ self, url: str, output_path: Optional[Union[str, Path]] = None, **kwargs
70
+ ) -> ProcessingResult:
71
+ """Download a single video from URL."""
72
+ pass
73
+
74
+
75
+ class IMetadataService(ABC):
76
+ """Interface for metadata service."""
77
+
78
+ @abstractmethod
79
+ def extract_video_metadata(self, url: str) -> Dict[str, Any]:
80
+ """Extract metadata from video URL."""
81
+ pass
82
+
83
+ @abstractmethod
84
+ def enrich_media_file(self, media_file_id: int) -> bool:
85
+ """Enrich media file with additional metadata."""
86
+ pass
87
+
88
+ @abstractmethod
89
+ def get_media_file_metadata(self, media_file_id: int) -> Optional[Dict[str, Any]]:
90
+ """Get metadata for a media file."""
91
+ pass
92
+
93
+
94
+ class ITranscriptionService(ABC):
95
+ """Interface for transcription service."""
96
+
97
+ @abstractmethod
98
+ def transcribe_video(
99
+ self,
100
+ video_path: Union[str, Path],
101
+ media_file_id: Optional[int] = None,
102
+ language: Optional[str] = None,
103
+ model_size: Optional[str] = None,
104
+ ) -> bool:
105
+ """Transcribe a video file."""
106
+ pass
107
+
108
+ @abstractmethod
109
+ def embed_subtitles(
110
+ self,
111
+ video_path: Union[str, Path],
112
+ output_path: Union[str, Path],
113
+ media_file_id: Optional[int] = None,
114
+ ) -> bool:
115
+ """Embed subtitles into video file."""
116
+ pass
117
+
118
+
119
+ class IPlaylistService(ABC):
120
+ """Interface for playlist service."""
121
+
122
+ @abstractmethod
123
+ def download_playlist(
124
+ self, url: str, output_path: Optional[Union[str, Path]] = None, **kwargs
125
+ ) -> Dict[str, Any]:
126
+ """Download playlist without transcription."""
127
+ pass
128
+
129
+
130
+ class IServiceFactory(ABC):
131
+ """Interface for service factory."""
132
+
133
+ @abstractmethod
134
+ def create_database_service(
135
+ self, config: Config, verbose: bool = False
136
+ ) -> IDatabaseService:
137
+ """Create database service."""
138
+ pass
139
+
140
+ @abstractmethod
141
+ def create_video_download_service(
142
+ self, config: Config, verbose: bool = False
143
+ ) -> IVideoDownloadService:
144
+ """Create video download service."""
145
+ pass
146
+
147
+ @abstractmethod
148
+ def create_metadata_service(
149
+ self, config: Config, verbose: bool = False
150
+ ) -> IMetadataService:
151
+ """Create metadata service."""
152
+ pass
153
+
154
+ @abstractmethod
155
+ def create_transcription_service(
156
+ self, config: Config, verbose: bool = False
157
+ ) -> ITranscriptionService:
158
+ """Create transcription service."""
159
+ pass
160
+
161
+ @abstractmethod
162
+ def create_playlist_service(
163
+ self, config: Config, verbose: bool = False
164
+ ) -> IPlaylistService:
165
+ """Create playlist service."""
166
+ pass
core/job_queue.py ADDED
@@ -0,0 +1,437 @@
1
+ """
2
+ Generic job queue system with SQLite persistence.
3
+
4
+ This module provides a flexible job queue system that can handle any type of job,
5
+ with configurable throttling, persistent storage, and background processing.
6
+ """
7
+
8
+ import json
9
+ import sqlite3
10
+ import threading
11
+ import time
12
+ from dataclasses import asdict, dataclass
13
+ from datetime import datetime, timedelta
14
+ from enum import Enum
15
+ from pathlib import Path
16
+ from typing import Any, Callable, Dict, List, Optional
17
+
18
+ from core.config import Config
19
+ from core.logger import get_logger
20
+
21
+
22
+ class JobStatus(Enum):
23
+ """Job status enumeration."""
24
+
25
+ PENDING = "pending"
26
+ RUNNING = "running"
27
+ COMPLETED = "completed"
28
+ FAILED = "failed"
29
+ CANCELLED = "cancelled"
30
+
31
+
32
+ class JobType(Enum):
33
+ """Job type enumeration."""
34
+
35
+ DOWNLOAD_VIDEO = "download_video"
36
+ DOWNLOAD_PLAYLIST = "download_playlist"
37
+ TRANSCRIBE_VIDEO = "transcribe_video"
38
+ PROCESS_AUDIO = "process_audio"
39
+ CUSTOM = "custom"
40
+
41
+
42
+ @dataclass
43
+ class Job:
44
+ """Generic job definition."""
45
+
46
+ id: Optional[int] = None
47
+ job_type: JobType = JobType.CUSTOM
48
+ job_data: Dict[str, Any] = None
49
+ job_path: str = ""
50
+ status: JobStatus = JobStatus.PENDING
51
+ priority: int = 0
52
+ created_at: Optional[datetime] = None
53
+ started_at: Optional[datetime] = None
54
+ completed_at: Optional[datetime] = None
55
+ error_message: Optional[str] = None
56
+ retry_count: int = 0
57
+ max_retries: int = 3
58
+
59
+ def __post_init__(self):
60
+ if self.job_data is None:
61
+ self.job_data = {}
62
+ if self.created_at is None:
63
+ self.created_at = datetime.now()
64
+
65
+ @property
66
+ def duration(self) -> Optional[float]:
67
+ """Get job duration in seconds."""
68
+ if self.started_at and self.completed_at:
69
+ return (self.completed_at - self.started_at).total_seconds()
70
+ return None
71
+
72
+ @property
73
+ def is_finished(self) -> bool:
74
+ """Check if job is in a finished state."""
75
+ return self.status in [
76
+ JobStatus.COMPLETED,
77
+ JobStatus.FAILED,
78
+ JobStatus.CANCELLED,
79
+ ]
80
+
81
+ def to_dict(self) -> Dict[str, Any]:
82
+ """Convert job to dictionary for JSON serialization."""
83
+ return {
84
+ "id": self.id,
85
+ "job_type": self.job_type.value,
86
+ "job_data": self.job_data,
87
+ "job_path": self.job_path,
88
+ "status": self.status.value,
89
+ "priority": self.priority,
90
+ "created_at": self.created_at.isoformat() if self.created_at else None,
91
+ "started_at": self.started_at.isoformat() if self.started_at else None,
92
+ "completed_at": self.completed_at.isoformat()
93
+ if self.completed_at
94
+ else None,
95
+ "error_message": self.error_message,
96
+ "retry_count": self.retry_count,
97
+ "max_retries": self.max_retries,
98
+ }
99
+
100
+ @classmethod
101
+ def from_dict(cls, data: Dict[str, Any]) -> "Job":
102
+ """Create job from dictionary."""
103
+ job = cls()
104
+ job.id = data.get("id")
105
+ job.job_type = JobType(data.get("job_type", "custom"))
106
+ job.job_data = data.get("job_data", {})
107
+ job.job_path = data.get("job_path", "")
108
+ job.status = JobStatus(data.get("status", "pending"))
109
+ job.priority = data.get("priority", 0)
110
+ job.created_at = (
111
+ datetime.fromisoformat(data["created_at"])
112
+ if data.get("created_at")
113
+ else None
114
+ )
115
+ job.started_at = (
116
+ datetime.fromisoformat(data["started_at"])
117
+ if data.get("started_at")
118
+ else None
119
+ )
120
+ job.completed_at = (
121
+ datetime.fromisoformat(data["completed_at"])
122
+ if data.get("completed_at")
123
+ else None
124
+ )
125
+ job.error_message = data.get("error_message")
126
+ job.retry_count = data.get("retry_count", 0)
127
+ job.max_retries = data.get("max_retries", 3)
128
+ return job
129
+
130
+
131
+ class JobQueue:
132
+ """Generic job queue with SQLite persistence."""
133
+
134
+ def __init__(self, config: Config, verbose: bool = False):
135
+ """Initialize job queue."""
136
+ self.config = config
137
+ self.verbose = verbose
138
+ self.logger = get_logger("JobQueue", verbose=verbose)
139
+
140
+ # Database connection
141
+ self.db_path = Path(config.database.sqlite_path)
142
+ self.db_path.parent.mkdir(parents=True, exist_ok=True)
143
+
144
+ # Threading
145
+ self._lock = threading.Lock()
146
+
147
+ # Initialize database
148
+ self._init_database()
149
+
150
+ def _init_database(self) -> None:
151
+ """Initialize job queue database."""
152
+ with sqlite3.connect(self.db_path) as conn:
153
+ conn.execute(
154
+ """
155
+ CREATE TABLE IF NOT EXISTS jobs (
156
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
157
+ job_type TEXT NOT NULL,
158
+ job_data TEXT NOT NULL,
159
+ job_path TEXT NOT NULL,
160
+ status TEXT DEFAULT 'pending',
161
+ priority INTEGER DEFAULT 0,
162
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
163
+ started_at TIMESTAMP,
164
+ completed_at TIMESTAMP,
165
+ error_message TEXT,
166
+ retry_count INTEGER DEFAULT 0,
167
+ max_retries INTEGER DEFAULT 3
168
+ )
169
+ """
170
+ )
171
+
172
+ # Create indexes for performance
173
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status)")
174
+ conn.execute(
175
+ "CREATE INDEX IF NOT EXISTS idx_jobs_priority ON jobs(priority DESC)"
176
+ )
177
+ conn.execute(
178
+ "CREATE INDEX IF NOT EXISTS idx_jobs_created_at ON jobs(created_at)"
179
+ )
180
+
181
+ conn.commit()
182
+
183
+ def add_job(self, job: Job) -> int:
184
+ """Add job to queue."""
185
+ with self._lock:
186
+ with sqlite3.connect(self.db_path) as conn:
187
+ cursor = conn.execute(
188
+ """
189
+ INSERT INTO jobs (job_type, job_data, job_path, status, priority, created_at, retry_count, max_retries)
190
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
191
+ """,
192
+ (
193
+ job.job_type.value,
194
+ json.dumps(job.job_data),
195
+ job.job_path,
196
+ job.status.value,
197
+ job.priority,
198
+ job.created_at.isoformat(),
199
+ job.retry_count,
200
+ job.max_retries,
201
+ ),
202
+ )
203
+
204
+ job_id = cursor.lastrowid
205
+ self.logger.info(f"Added job {job_id} to queue: {job.job_type.value}")
206
+ return job_id
207
+
208
+ def get_next_job(self) -> Optional[Job]:
209
+ """Get next job to process (highest priority, oldest first)."""
210
+ with self._lock:
211
+ with sqlite3.connect(self.db_path) as conn:
212
+ cursor = conn.execute(
213
+ """
214
+ SELECT * FROM jobs
215
+ WHERE status = 'pending'
216
+ ORDER BY priority DESC, created_at ASC
217
+ LIMIT 1
218
+ """
219
+ )
220
+
221
+ row = cursor.fetchone()
222
+ if not row:
223
+ return None
224
+
225
+ # Convert row to job
226
+ job_data = {
227
+ "id": row[0],
228
+ "job_type": row[1],
229
+ "job_data": json.loads(row[2]),
230
+ "job_path": row[3],
231
+ "status": row[4],
232
+ "priority": row[5],
233
+ "created_at": row[6],
234
+ "started_at": row[7],
235
+ "completed_at": row[8],
236
+ "error_message": row[9],
237
+ "retry_count": row[10],
238
+ "max_retries": row[11],
239
+ }
240
+
241
+ return Job.from_dict(job_data)
242
+
243
+ def update_job_status(
244
+ self, job_id: int, status: JobStatus, error_message: Optional[str] = None
245
+ ) -> bool:
246
+ """Update job status."""
247
+ with self._lock:
248
+ with sqlite3.connect(self.db_path) as conn:
249
+ if status == JobStatus.RUNNING:
250
+ conn.execute(
251
+ """
252
+ UPDATE jobs
253
+ SET status = ?, started_at = CURRENT_TIMESTAMP
254
+ WHERE id = ?
255
+ """,
256
+ (status.value, job_id),
257
+ )
258
+ elif status in [JobStatus.COMPLETED, JobStatus.FAILED]:
259
+ conn.execute(
260
+ """
261
+ UPDATE jobs
262
+ SET status = ?, completed_at = CURRENT_TIMESTAMP, error_message = ?
263
+ WHERE id = ?
264
+ """,
265
+ (status.value, error_message, job_id),
266
+ )
267
+ else:
268
+ conn.execute(
269
+ """
270
+ UPDATE jobs
271
+ SET status = ?
272
+ WHERE id = ?
273
+ """,
274
+ (status.value, job_id),
275
+ )
276
+
277
+ conn.commit()
278
+ return True
279
+
280
+ def get_job(self, job_id: int) -> Optional[Job]:
281
+ """Get job by ID."""
282
+ with self._lock:
283
+ with sqlite3.connect(self.db_path) as conn:
284
+ cursor = conn.execute("SELECT * FROM jobs WHERE id = ?", (job_id,))
285
+ row = cursor.fetchone()
286
+
287
+ if not row:
288
+ return None
289
+
290
+ job_data = {
291
+ "id": row[0],
292
+ "job_type": row[1],
293
+ "job_data": json.loads(row[2]),
294
+ "job_path": row[3],
295
+ "status": row[4],
296
+ "priority": row[5],
297
+ "created_at": row[6],
298
+ "started_at": row[7],
299
+ "completed_at": row[8],
300
+ "error_message": row[9],
301
+ "retry_count": row[10],
302
+ "max_retries": row[11],
303
+ }
304
+
305
+ return Job.from_dict(job_data)
306
+
307
+ def get_queue_status(self) -> Dict[str, int]:
308
+ """Get queue status summary."""
309
+ with self._lock:
310
+ with sqlite3.connect(self.db_path) as conn:
311
+ cursor = conn.execute(
312
+ """
313
+ SELECT status, COUNT(*)
314
+ FROM jobs
315
+ GROUP BY status
316
+ """
317
+ )
318
+
319
+ status_counts = dict(cursor.fetchall())
320
+
321
+ return {
322
+ "pending": status_counts.get("pending", 0),
323
+ "running": status_counts.get("running", 0),
324
+ "completed": status_counts.get("completed", 0),
325
+ "failed": status_counts.get("failed", 0),
326
+ "cancelled": status_counts.get("cancelled", 0),
327
+ }
328
+
329
+ def get_jobs_by_status(
330
+ self, status: JobStatus, limit: Optional[int] = None
331
+ ) -> List[Job]:
332
+ """Get jobs by status."""
333
+ with self._lock:
334
+ with sqlite3.connect(self.db_path) as conn:
335
+ query = "SELECT * FROM jobs WHERE status = ? ORDER BY created_at DESC"
336
+ params = [status.value]
337
+
338
+ if limit:
339
+ query += " LIMIT ?"
340
+ params.append(limit)
341
+
342
+ cursor = conn.execute(query, params)
343
+ rows = cursor.fetchall()
344
+
345
+ jobs = []
346
+ for row in rows:
347
+ job_data = {
348
+ "id": row[0],
349
+ "job_type": row[1],
350
+ "job_data": json.loads(row[2]),
351
+ "job_path": row[3],
352
+ "status": row[4],
353
+ "priority": row[5],
354
+ "created_at": row[6],
355
+ "started_at": row[7],
356
+ "completed_at": row[8],
357
+ "error_message": row[9],
358
+ "retry_count": row[10],
359
+ "max_retries": row[11],
360
+ }
361
+ jobs.append(Job.from_dict(job_data))
362
+
363
+ return jobs
364
+
365
+ def get_all_jobs(self) -> List[Job]:
366
+ """Get all jobs from queue."""
367
+ with self._lock:
368
+ with sqlite3.connect(self.db_path) as conn:
369
+ cursor = conn.execute("SELECT * FROM jobs ORDER BY created_at DESC")
370
+ rows = cursor.fetchall()
371
+
372
+ jobs = []
373
+ for row in rows:
374
+ job_data = {
375
+ "id": row[0],
376
+ "job_type": row[1],
377
+ "job_data": json.loads(row[2]),
378
+ "job_path": row[3],
379
+ "status": row[4],
380
+ "priority": row[5],
381
+ "created_at": row[6],
382
+ "started_at": row[7],
383
+ "completed_at": row[8],
384
+ "error_message": row[9],
385
+ "retry_count": row[10],
386
+ "max_retries": row[11],
387
+ }
388
+ jobs.append(Job.from_dict(job_data))
389
+
390
+ return jobs
391
+
392
+ def cancel_job(self, job_id: int) -> bool:
393
+ """Cancel a job."""
394
+ return self.update_job_status(job_id, JobStatus.CANCELLED)
395
+
396
+ def retry_failed_jobs(self) -> int:
397
+ """Retry failed jobs that haven't exceeded max retries."""
398
+ with self._lock:
399
+ with sqlite3.connect(self.db_path) as conn:
400
+ cursor = conn.execute(
401
+ """
402
+ UPDATE jobs
403
+ SET status = 'pending', retry_count = retry_count + 1
404
+ WHERE status = 'failed' AND retry_count < max_retries
405
+ """
406
+ )
407
+
408
+ retry_count = cursor.rowcount
409
+ conn.commit()
410
+
411
+ if retry_count > 0:
412
+ self.logger.info(f"Retrying {retry_count} failed jobs")
413
+
414
+ return retry_count
415
+
416
+ def cleanup_old_jobs(self, max_age_days: int = 30) -> int:
417
+ """Clean up old completed jobs."""
418
+ cutoff_date = datetime.now() - timedelta(days=max_age_days)
419
+
420
+ with self._lock:
421
+ with sqlite3.connect(self.db_path) as conn:
422
+ cursor = conn.execute(
423
+ """
424
+ DELETE FROM jobs
425
+ WHERE status IN ('completed', 'failed', 'cancelled')
426
+ AND completed_at < ?
427
+ """,
428
+ (cutoff_date.isoformat(),),
429
+ )
430
+
431
+ deleted_count = cursor.rowcount
432
+ conn.commit()
433
+
434
+ if deleted_count > 0:
435
+ self.logger.info(f"Cleaned up {deleted_count} old jobs")
436
+
437
+ return deleted_count
core/logger.py ADDED
@@ -0,0 +1,79 @@
1
+ """
2
+ Logging configuration and utilities.
3
+
4
+ This module provides centralized logging configuration using loguru.
5
+ """
6
+
7
+ import sys
8
+ from pathlib import Path
9
+ from typing import Optional
10
+
11
+ from loguru import logger
12
+
13
+
14
+ def get_logger(
15
+ name: Optional[str] = None,
16
+ verbose: bool = False,
17
+ log_file: Optional[Path] = None,
18
+ level: str = "INFO",
19
+ ) -> "logger":
20
+ """
21
+ Get a configured logger instance.
22
+
23
+ Args:
24
+ name: Logger name (optional)
25
+ verbose: Enable verbose logging
26
+ log_file: Path to log file (optional)
27
+ level: Logging level
28
+
29
+ Returns:
30
+ Configured logger instance
31
+ """
32
+ # Remove default handler
33
+ logger.remove()
34
+
35
+ # Set log level
36
+ log_level = "DEBUG" if verbose else level
37
+
38
+ # Console handler with colors
39
+ logger.add(
40
+ sys.stderr,
41
+ level=log_level,
42
+ format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | "
43
+ "<level>{level: <8}</level> | "
44
+ "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
45
+ "<level>{message}</level>",
46
+ colorize=True,
47
+ )
48
+
49
+ # File handler (if specified)
50
+ if log_file:
51
+ log_file.parent.mkdir(parents=True, exist_ok=True)
52
+ logger.add(
53
+ log_file,
54
+ level=log_level,
55
+ format="{time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{function}:{line} - {message}",
56
+ rotation="10 MB",
57
+ retention="30 days",
58
+ compression="zip",
59
+ )
60
+
61
+ # Set the name attribute for compatibility
62
+ if name:
63
+ logger.name = name
64
+
65
+ return logger
66
+
67
+
68
+ def setup_logging(
69
+ verbose: bool = False, log_file: Optional[Path] = None, level: str = "INFO"
70
+ ) -> None:
71
+ """
72
+ Set up global logging configuration.
73
+
74
+ Args:
75
+ verbose: Enable verbose logging
76
+ log_file: Path to log file (optional)
77
+ level: Logging level
78
+ """
79
+ get_logger(verbose=verbose, log_file=log_file, level=level)