py-alaska 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_alaska/SmBlock.py +263 -0
- py_alaska/__init__.py +63 -0
- py_alaska/div_logo.png +0 -0
- py_alaska/gconfig.py +1241 -0
- py_alaska/imi_camera.py +391 -0
- py_alaska/tab_camera.py +730 -0
- py_alaska/task_manager.py +661 -0
- py_alaska/task_monitor.py +1533 -0
- py_alaska/task_performance.py +550 -0
- py_alaska/task_signal.py +238 -0
- py_alaska-0.1.0.dist-info/METADATA +263 -0
- py_alaska-0.1.0.dist-info/RECORD +15 -0
- py_alaska-0.1.0.dist-info/WHEEL +5 -0
- py_alaska-0.1.0.dist-info/licenses/LICENSE +21 -0
- py_alaska-0.1.0.dist-info/top_level.txt +1 -0
py_alaska/gconfig.py
ADDED
|
@@ -0,0 +1,1241 @@
|
|
|
1
|
+
"""
|
|
2
|
+
╔══════════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
║ ALASKA v2.0 로봇 얼라인 비전 시스템 - GConfig Module ║
|
|
4
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
5
|
+
║ Project : ALASKA v2.0 ║
|
|
6
|
+
║ Company : 동일비전소유 ║
|
|
7
|
+
║ Version : 2.0.0 ║
|
|
8
|
+
║ Date : 2026-01-31 ║
|
|
9
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
10
|
+
║ Description: ║
|
|
11
|
+
║ Global configuration management for multiprocess environments ║
|
|
12
|
+
║ - YAML/JSON configuration file support ║
|
|
13
|
+
║ - Memory caching with mtime-based invalidation ║
|
|
14
|
+
║ - File locking for multiprocess synchronization ║
|
|
15
|
+
║ - Automatic backup and recovery ║
|
|
16
|
+
║ - Security hardening (path validation, checksum, audit logging) ║
|
|
17
|
+
╚══════════════════════════════════════════════════════════════════════════════╝
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import hashlib
|
|
23
|
+
import json
|
|
24
|
+
import os
|
|
25
|
+
import platform
|
|
26
|
+
import shutil
|
|
27
|
+
import sys
|
|
28
|
+
import tempfile
|
|
29
|
+
import time
|
|
30
|
+
from datetime import datetime
|
|
31
|
+
from pathlib import Path
|
|
32
|
+
from typing import Any, Dict, List, Optional, Union
|
|
33
|
+
|
|
34
|
+
# Conditional imports for file locking
|
|
35
|
+
if platform.system() == 'Windows':
|
|
36
|
+
import msvcrt
|
|
37
|
+
else:
|
|
38
|
+
import fcntl
|
|
39
|
+
|
|
40
|
+
# Optional YAML support
|
|
41
|
+
try:
|
|
42
|
+
import yaml
|
|
43
|
+
YAML_AVAILABLE = True
|
|
44
|
+
except ImportError:
|
|
45
|
+
YAML_AVAILABLE = False
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
# ==============================================================================
|
|
49
|
+
# Exception Classes
|
|
50
|
+
# ==============================================================================
|
|
51
|
+
|
|
52
|
+
class ConfigError(Exception):
|
|
53
|
+
"""Base exception for configuration errors."""
|
|
54
|
+
|
|
55
|
+
def __init__(self, message: str, path: Optional[str] = None):
|
|
56
|
+
self.message = message
|
|
57
|
+
self.path = path
|
|
58
|
+
super().__init__(message)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class ConfigFileNotFoundError(ConfigError):
|
|
62
|
+
"""Raised when configuration file does not exist."""
|
|
63
|
+
|
|
64
|
+
def __init__(self, filepath: str):
|
|
65
|
+
super().__init__(f"Configuration file not found: {filepath}", filepath)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class ConfigPathNotFoundError(ConfigError):
|
|
69
|
+
"""Raised when path does not exist (during set operation)."""
|
|
70
|
+
|
|
71
|
+
def __init__(self, path: str):
|
|
72
|
+
super().__init__(f"Path not found, cannot create new path: {path}", path)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class ConfigLockTimeoutError(ConfigError):
|
|
76
|
+
"""Raised when file lock acquisition times out."""
|
|
77
|
+
|
|
78
|
+
def __init__(self, filepath: str, timeout: float):
|
|
79
|
+
self.timeout = timeout
|
|
80
|
+
super().__init__(f"Lock timeout after {timeout}s: {filepath}", filepath)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class ConfigValidationError(ConfigError):
|
|
84
|
+
"""Raised when data validation fails."""
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class ConfigParseError(ConfigError):
|
|
89
|
+
"""Raised when configuration file parsing fails."""
|
|
90
|
+
|
|
91
|
+
def __init__(self, filepath: str, reason: str):
|
|
92
|
+
self.reason = reason
|
|
93
|
+
super().__init__(f"Parse error in {filepath}: {reason}", filepath)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class ConfigIntegrityError(ConfigError):
|
|
97
|
+
"""Raised when checksum verification fails."""
|
|
98
|
+
|
|
99
|
+
def __init__(self, filepath: str, expected: str, actual: str):
|
|
100
|
+
self.expected = expected
|
|
101
|
+
self.actual = actual
|
|
102
|
+
super().__init__(
|
|
103
|
+
f"Checksum mismatch for {filepath}: expected {expected[:8]}..., got {actual[:8]}...",
|
|
104
|
+
filepath
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class ConfigSecurityError(ConfigError):
|
|
109
|
+
"""Raised when security violation is detected."""
|
|
110
|
+
|
|
111
|
+
PATH_TRAVERSAL = "PATH_TRAVERSAL"
|
|
112
|
+
SYMLINK_ATTACK = "SYMLINK_ATTACK"
|
|
113
|
+
SIZE_EXCEEDED = "SIZE_EXCEEDED"
|
|
114
|
+
INVALID_PATH = "INVALID_PATH"
|
|
115
|
+
|
|
116
|
+
def __init__(self, message: str, violation_type: str, path: Optional[str] = None):
|
|
117
|
+
self.violation_type = violation_type
|
|
118
|
+
super().__init__(message, path)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class ConfigStaleLockError(ConfigError):
|
|
122
|
+
"""Raised when stale lock is detected."""
|
|
123
|
+
|
|
124
|
+
def __init__(self, filepath: str, pid: int):
|
|
125
|
+
self.pid = pid
|
|
126
|
+
super().__init__(f"Stale lock detected for {filepath} (PID: {pid})", filepath)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class ConfigMandatoryFieldError(ConfigError):
|
|
130
|
+
"""Raised when mandatory field is missing."""
|
|
131
|
+
|
|
132
|
+
APP_INFO_NAME = "app_info.name"
|
|
133
|
+
APP_INFO_VERSION = "app_info.version"
|
|
134
|
+
APP_INFO_ID = "app_info.id"
|
|
135
|
+
TASK_CONFIG = "task_config"
|
|
136
|
+
|
|
137
|
+
MANDATORY_FIELDS = [APP_INFO_NAME, APP_INFO_VERSION, APP_INFO_ID, TASK_CONFIG]
|
|
138
|
+
|
|
139
|
+
def __init__(self, field: str, reason: str = "Mandatory field missing"):
|
|
140
|
+
self.field = field
|
|
141
|
+
self.reason = reason
|
|
142
|
+
super().__init__(f"{reason}: {field}", field)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class ConfigRecoveryError(ConfigError):
|
|
146
|
+
"""Raised when recovery from backup fails."""
|
|
147
|
+
|
|
148
|
+
def __init__(self, filepath: str, tried_backups: List[str]):
|
|
149
|
+
self.tried_backups = tried_backups
|
|
150
|
+
super().__init__(
|
|
151
|
+
f"Recovery failed for {filepath}, tried {len(tried_backups)} backups",
|
|
152
|
+
filepath
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
# ==============================================================================
|
|
157
|
+
# PathParser - Static utility class for dot notation path parsing
|
|
158
|
+
# ==============================================================================
|
|
159
|
+
|
|
160
|
+
class PathParser:
|
|
161
|
+
"""Static utility class for parsing dot notation paths."""
|
|
162
|
+
|
|
163
|
+
@staticmethod
|
|
164
|
+
def parse(path: str) -> List[str]:
|
|
165
|
+
"""Parse path string into list of keys."""
|
|
166
|
+
if not path:
|
|
167
|
+
return []
|
|
168
|
+
return path.split('.')
|
|
169
|
+
|
|
170
|
+
@staticmethod
|
|
171
|
+
def validate(path: str, max_depth: int = 10) -> bool:
|
|
172
|
+
"""Validate path string."""
|
|
173
|
+
if not path:
|
|
174
|
+
return False
|
|
175
|
+
if not isinstance(path, str):
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
parts = path.split('.')
|
|
179
|
+
if len(parts) > max_depth:
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
for part in parts:
|
|
183
|
+
if not part:
|
|
184
|
+
return False
|
|
185
|
+
if part.startswith('_'):
|
|
186
|
+
return False
|
|
187
|
+
if '..' in path:
|
|
188
|
+
return False
|
|
189
|
+
|
|
190
|
+
return True
|
|
191
|
+
|
|
192
|
+
@staticmethod
|
|
193
|
+
def get_parent(path: str) -> str:
|
|
194
|
+
"""Get parent path."""
|
|
195
|
+
parts = path.rsplit('.', 1)
|
|
196
|
+
return parts[0] if len(parts) > 1 else ""
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
def get_key(path: str) -> str:
|
|
200
|
+
"""Get the last key from path."""
|
|
201
|
+
parts = path.rsplit('.', 1)
|
|
202
|
+
return parts[-1] if parts else ""
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
# ==============================================================================
|
|
206
|
+
# ConfigCache - Memory cache management
|
|
207
|
+
# ==============================================================================
|
|
208
|
+
|
|
209
|
+
class ConfigCache:
|
|
210
|
+
"""Memory cache for configuration data."""
|
|
211
|
+
|
|
212
|
+
__slots__ = ('_data', '_mtime', '_filepath')
|
|
213
|
+
|
|
214
|
+
def __init__(self, filepath: str):
|
|
215
|
+
self._data: Dict[str, Any] = {}
|
|
216
|
+
self._mtime: float = 0.0
|
|
217
|
+
self._filepath = filepath
|
|
218
|
+
|
|
219
|
+
def get(self, path: str, default: Any = None) -> Any:
|
|
220
|
+
"""Get value from cache using dot notation path."""
|
|
221
|
+
keys = PathParser.parse(path)
|
|
222
|
+
return self._get_nested(self._data, keys, default)
|
|
223
|
+
|
|
224
|
+
def set(self, path: str, value: Any) -> None:
|
|
225
|
+
"""Set value in cache using dot notation path."""
|
|
226
|
+
keys = PathParser.parse(path)
|
|
227
|
+
self._set_nested(self._data, keys, value)
|
|
228
|
+
|
|
229
|
+
def is_stale(self) -> bool:
|
|
230
|
+
"""Check if cache is stale (file has been modified)."""
|
|
231
|
+
if not os.path.exists(self._filepath):
|
|
232
|
+
return True
|
|
233
|
+
try:
|
|
234
|
+
current_mtime = os.path.getmtime(self._filepath)
|
|
235
|
+
return current_mtime > self._mtime
|
|
236
|
+
except OSError:
|
|
237
|
+
return True
|
|
238
|
+
|
|
239
|
+
def refresh(self, data: Dict[str, Any], mtime: float) -> None:
|
|
240
|
+
"""Refresh cache with new data."""
|
|
241
|
+
self._data = data
|
|
242
|
+
self._mtime = mtime
|
|
243
|
+
|
|
244
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
245
|
+
"""Return copy of all cached data."""
|
|
246
|
+
return dict(self._data)
|
|
247
|
+
|
|
248
|
+
def _get_nested(self, data: Dict, keys: List[str], default: Any) -> Any:
|
|
249
|
+
"""Get value from nested dictionary."""
|
|
250
|
+
current = data
|
|
251
|
+
for key in keys:
|
|
252
|
+
if isinstance(current, dict) and key in current:
|
|
253
|
+
current = current[key]
|
|
254
|
+
else:
|
|
255
|
+
return default
|
|
256
|
+
return current
|
|
257
|
+
|
|
258
|
+
def _set_nested(self, data: Dict, keys: List[str], value: Any) -> None:
|
|
259
|
+
"""Set value in nested dictionary."""
|
|
260
|
+
current = data
|
|
261
|
+
for key in keys[:-1]:
|
|
262
|
+
if key not in current:
|
|
263
|
+
raise ConfigPathNotFoundError('.'.join(keys))
|
|
264
|
+
if not isinstance(current[key], dict):
|
|
265
|
+
raise ConfigPathNotFoundError('.'.join(keys))
|
|
266
|
+
current = current[key]
|
|
267
|
+
|
|
268
|
+
if keys:
|
|
269
|
+
final_key = keys[-1]
|
|
270
|
+
if final_key not in current and len(keys) > 1:
|
|
271
|
+
parent_path = '.'.join(keys[:-1])
|
|
272
|
+
if not isinstance(current, dict):
|
|
273
|
+
raise ConfigPathNotFoundError(parent_path)
|
|
274
|
+
current[final_key] = value
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
# ==============================================================================
|
|
278
|
+
# FileLock - Process-safe file locking
|
|
279
|
+
# ==============================================================================
|
|
280
|
+
|
|
281
|
+
class FileLock:
|
|
282
|
+
"""Cross-platform file locking for multiprocess synchronization."""
|
|
283
|
+
|
|
284
|
+
__slots__ = ('_path', '_fd', '_locked', '_timeout', '_retry_interval')
|
|
285
|
+
|
|
286
|
+
def __init__(self, filepath: str, timeout: float = 5.0, retry_interval: float = 0.1):
|
|
287
|
+
self._path = filepath + '.lock'
|
|
288
|
+
self._fd: Optional[int] = None
|
|
289
|
+
self._locked = False
|
|
290
|
+
self._timeout = timeout
|
|
291
|
+
self._retry_interval = retry_interval
|
|
292
|
+
|
|
293
|
+
def acquire(self, timeout: Optional[float] = None) -> bool:
|
|
294
|
+
"""Acquire file lock."""
|
|
295
|
+
if timeout is None:
|
|
296
|
+
timeout = self._timeout
|
|
297
|
+
|
|
298
|
+
start_time = time.time()
|
|
299
|
+
|
|
300
|
+
while True:
|
|
301
|
+
try:
|
|
302
|
+
if self._try_lock():
|
|
303
|
+
self._write_lock_info()
|
|
304
|
+
self._locked = True
|
|
305
|
+
return True
|
|
306
|
+
except (IOError, OSError):
|
|
307
|
+
pass
|
|
308
|
+
|
|
309
|
+
elapsed = time.time() - start_time
|
|
310
|
+
if elapsed >= timeout:
|
|
311
|
+
raise ConfigLockTimeoutError(self._path, timeout)
|
|
312
|
+
|
|
313
|
+
time.sleep(self._retry_interval)
|
|
314
|
+
|
|
315
|
+
return False
|
|
316
|
+
|
|
317
|
+
def release(self) -> None:
|
|
318
|
+
"""Release file lock."""
|
|
319
|
+
if not self._locked:
|
|
320
|
+
return
|
|
321
|
+
|
|
322
|
+
try:
|
|
323
|
+
if self._fd is not None:
|
|
324
|
+
if platform.system() == 'Windows':
|
|
325
|
+
msvcrt.locking(self._fd, msvcrt.LK_UNLCK, 1)
|
|
326
|
+
else:
|
|
327
|
+
fcntl.flock(self._fd, fcntl.LOCK_UN)
|
|
328
|
+
os.close(self._fd)
|
|
329
|
+
self._fd = None
|
|
330
|
+
|
|
331
|
+
if os.path.exists(self._path):
|
|
332
|
+
os.remove(self._path)
|
|
333
|
+
except (IOError, OSError):
|
|
334
|
+
pass
|
|
335
|
+
finally:
|
|
336
|
+
self._locked = False
|
|
337
|
+
|
|
338
|
+
def _try_lock(self) -> bool:
|
|
339
|
+
"""Try to acquire lock once."""
|
|
340
|
+
if self._check_stale_lock():
|
|
341
|
+
self._cleanup_stale_lock()
|
|
342
|
+
|
|
343
|
+
self._fd = os.open(self._path, os.O_CREAT | os.O_RDWR)
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
if platform.system() == 'Windows':
|
|
347
|
+
msvcrt.locking(self._fd, msvcrt.LK_NBLCK, 1)
|
|
348
|
+
else:
|
|
349
|
+
fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
350
|
+
return True
|
|
351
|
+
except (IOError, OSError):
|
|
352
|
+
os.close(self._fd)
|
|
353
|
+
self._fd = None
|
|
354
|
+
return False
|
|
355
|
+
|
|
356
|
+
def _write_lock_info(self) -> None:
|
|
357
|
+
"""Write lock owner information."""
|
|
358
|
+
lock_info = {
|
|
359
|
+
"pid": os.getpid(),
|
|
360
|
+
"timestamp": time.time(),
|
|
361
|
+
"hostname": platform.node()
|
|
362
|
+
}
|
|
363
|
+
try:
|
|
364
|
+
with open(self._path, 'w') as f:
|
|
365
|
+
json.dump(lock_info, f)
|
|
366
|
+
except (IOError, OSError):
|
|
367
|
+
pass
|
|
368
|
+
|
|
369
|
+
def _check_stale_lock(self) -> bool:
|
|
370
|
+
"""Check if existing lock is stale."""
|
|
371
|
+
if not os.path.exists(self._path):
|
|
372
|
+
return False
|
|
373
|
+
|
|
374
|
+
try:
|
|
375
|
+
with open(self._path, 'r') as f:
|
|
376
|
+
lock_info = json.load(f)
|
|
377
|
+
|
|
378
|
+
pid = lock_info.get('pid')
|
|
379
|
+
if pid is None:
|
|
380
|
+
return True
|
|
381
|
+
|
|
382
|
+
if not self._is_process_alive(pid):
|
|
383
|
+
return True
|
|
384
|
+
|
|
385
|
+
timestamp = lock_info.get('timestamp', 0)
|
|
386
|
+
if time.time() - timestamp > 300:
|
|
387
|
+
return True
|
|
388
|
+
|
|
389
|
+
return False
|
|
390
|
+
except (IOError, OSError, json.JSONDecodeError):
|
|
391
|
+
return True
|
|
392
|
+
|
|
393
|
+
def _cleanup_stale_lock(self) -> None:
|
|
394
|
+
"""Remove stale lock file."""
|
|
395
|
+
try:
|
|
396
|
+
if os.path.exists(self._path):
|
|
397
|
+
os.remove(self._path)
|
|
398
|
+
except (IOError, OSError):
|
|
399
|
+
pass
|
|
400
|
+
|
|
401
|
+
@staticmethod
|
|
402
|
+
def _is_process_alive(pid: int) -> bool:
|
|
403
|
+
"""Check if process is alive."""
|
|
404
|
+
try:
|
|
405
|
+
if platform.system() == 'Windows':
|
|
406
|
+
import ctypes
|
|
407
|
+
kernel32 = ctypes.windll.kernel32
|
|
408
|
+
handle = kernel32.OpenProcess(0x1000, False, pid)
|
|
409
|
+
if handle:
|
|
410
|
+
kernel32.CloseHandle(handle)
|
|
411
|
+
return True
|
|
412
|
+
return False
|
|
413
|
+
else:
|
|
414
|
+
os.kill(pid, 0)
|
|
415
|
+
return True
|
|
416
|
+
except (OSError, AttributeError):
|
|
417
|
+
return False
|
|
418
|
+
|
|
419
|
+
def __enter__(self) -> 'FileLock':
|
|
420
|
+
self.acquire()
|
|
421
|
+
return self
|
|
422
|
+
|
|
423
|
+
def __exit__(self, *args) -> None:
|
|
424
|
+
self.release()
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
# ==============================================================================
|
|
428
|
+
# BackupManager - Backup creation and rotation
|
|
429
|
+
# ==============================================================================
|
|
430
|
+
|
|
431
|
+
class BackupManager:
|
|
432
|
+
"""Manages backup creation and rotation."""
|
|
433
|
+
|
|
434
|
+
__slots__ = ('_filepath', '_max_count')
|
|
435
|
+
|
|
436
|
+
def __init__(self, filepath: str, max_count: int = 5):
|
|
437
|
+
self._filepath = filepath
|
|
438
|
+
self._max_count = max_count
|
|
439
|
+
|
|
440
|
+
def create_backup(self) -> Optional[str]:
|
|
441
|
+
"""Create a new backup file."""
|
|
442
|
+
if not os.path.exists(self._filepath):
|
|
443
|
+
return None
|
|
444
|
+
|
|
445
|
+
self._rotate()
|
|
446
|
+
|
|
447
|
+
backup_path = f"{self._filepath}.bak.1"
|
|
448
|
+
try:
|
|
449
|
+
shutil.copy2(self._filepath, backup_path)
|
|
450
|
+
return backup_path
|
|
451
|
+
except (IOError, OSError):
|
|
452
|
+
return None
|
|
453
|
+
|
|
454
|
+
def _rotate(self) -> None:
|
|
455
|
+
"""Rotate existing backup files."""
|
|
456
|
+
for i in range(self._max_count, 0, -1):
|
|
457
|
+
old_path = f"{self._filepath}.bak.{i}"
|
|
458
|
+
new_path = f"{self._filepath}.bak.{i + 1}"
|
|
459
|
+
|
|
460
|
+
if os.path.exists(old_path):
|
|
461
|
+
if i >= self._max_count:
|
|
462
|
+
os.remove(old_path)
|
|
463
|
+
else:
|
|
464
|
+
shutil.move(old_path, new_path)
|
|
465
|
+
|
|
466
|
+
def get_backups(self) -> List[str]:
|
|
467
|
+
"""Get list of backup files in order."""
|
|
468
|
+
backups = []
|
|
469
|
+
for i in range(1, self._max_count + 1):
|
|
470
|
+
backup_path = f"{self._filepath}.bak.{i}"
|
|
471
|
+
if os.path.exists(backup_path):
|
|
472
|
+
backups.append(backup_path)
|
|
473
|
+
return backups
|
|
474
|
+
|
|
475
|
+
def restore(self, number: int = 1) -> bool:
|
|
476
|
+
"""Restore from backup number."""
|
|
477
|
+
backup_path = f"{self._filepath}.bak.{number}"
|
|
478
|
+
if not os.path.exists(backup_path):
|
|
479
|
+
return False
|
|
480
|
+
|
|
481
|
+
try:
|
|
482
|
+
shutil.copy2(backup_path, self._filepath)
|
|
483
|
+
return True
|
|
484
|
+
except (IOError, OSError):
|
|
485
|
+
return False
|
|
486
|
+
|
|
487
|
+
def cleanup_old_backups(self) -> None:
|
|
488
|
+
"""Remove backups exceeding max_count."""
|
|
489
|
+
i = self._max_count + 1
|
|
490
|
+
while True:
|
|
491
|
+
backup_path = f"{self._filepath}.bak.{i}"
|
|
492
|
+
if os.path.exists(backup_path):
|
|
493
|
+
os.remove(backup_path)
|
|
494
|
+
i += 1
|
|
495
|
+
else:
|
|
496
|
+
break
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
# ==============================================================================
|
|
500
|
+
# Parsers - YAML and JSON parsing
|
|
501
|
+
# ==============================================================================
|
|
502
|
+
|
|
503
|
+
class YamlParser:
|
|
504
|
+
"""Static class for YAML parsing."""
|
|
505
|
+
|
|
506
|
+
@staticmethod
|
|
507
|
+
def parse(content: str) -> Dict[str, Any]:
|
|
508
|
+
"""Parse YAML content."""
|
|
509
|
+
if not YAML_AVAILABLE:
|
|
510
|
+
raise ConfigParseError("", "PyYAML is not installed")
|
|
511
|
+
return yaml.safe_load(content) or {}
|
|
512
|
+
|
|
513
|
+
@staticmethod
|
|
514
|
+
def dump(data: Dict[str, Any]) -> str:
|
|
515
|
+
"""Dump data to YAML string."""
|
|
516
|
+
if not YAML_AVAILABLE:
|
|
517
|
+
raise ConfigParseError("", "PyYAML is not installed")
|
|
518
|
+
return yaml.dump(data, default_flow_style=False, allow_unicode=True)
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
class JsonParser:
|
|
522
|
+
"""Static class for JSON parsing."""
|
|
523
|
+
|
|
524
|
+
@staticmethod
|
|
525
|
+
def parse(content: str) -> Dict[str, Any]:
|
|
526
|
+
"""Parse JSON content."""
|
|
527
|
+
return json.loads(content)
|
|
528
|
+
|
|
529
|
+
@staticmethod
|
|
530
|
+
def dump(data: Dict[str, Any]) -> str:
|
|
531
|
+
"""Dump data to JSON string."""
|
|
532
|
+
return json.dumps(data, indent=2, ensure_ascii=False)
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
# ==============================================================================
|
|
536
|
+
# Security Classes (Hardening)
|
|
537
|
+
# ==============================================================================
|
|
538
|
+
|
|
539
|
+
class PathValidator:
|
|
540
|
+
"""Validates file paths for security."""
|
|
541
|
+
|
|
542
|
+
DANGEROUS_PATTERNS = ['..', '~', '$', '%', '|', '>', '<', '`']
|
|
543
|
+
|
|
544
|
+
@staticmethod
|
|
545
|
+
def validate(filepath: str, base_dir: Optional[str] = None) -> bool:
|
|
546
|
+
"""Validate file path for security issues."""
|
|
547
|
+
if not filepath:
|
|
548
|
+
return False
|
|
549
|
+
|
|
550
|
+
for pattern in PathValidator.DANGEROUS_PATTERNS:
|
|
551
|
+
if pattern in filepath:
|
|
552
|
+
raise ConfigSecurityError(
|
|
553
|
+
f"Path contains dangerous pattern: {pattern}",
|
|
554
|
+
ConfigSecurityError.PATH_TRAVERSAL,
|
|
555
|
+
filepath
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
path = Path(filepath)
|
|
559
|
+
if path.is_symlink():
|
|
560
|
+
raise ConfigSecurityError(
|
|
561
|
+
"Symbolic links are not allowed",
|
|
562
|
+
ConfigSecurityError.SYMLINK_ATTACK,
|
|
563
|
+
filepath
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
if base_dir:
|
|
567
|
+
try:
|
|
568
|
+
resolved = path.resolve()
|
|
569
|
+
base_resolved = Path(base_dir).resolve()
|
|
570
|
+
if not str(resolved).startswith(str(base_resolved)):
|
|
571
|
+
raise ConfigSecurityError(
|
|
572
|
+
"Path traversal detected",
|
|
573
|
+
ConfigSecurityError.PATH_TRAVERSAL,
|
|
574
|
+
filepath
|
|
575
|
+
)
|
|
576
|
+
except (OSError, ValueError):
|
|
577
|
+
return False
|
|
578
|
+
|
|
579
|
+
return True
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
class SizeValidator:
|
|
583
|
+
"""Validates sizes for security limits."""
|
|
584
|
+
|
|
585
|
+
DEFAULT_MAX_VALUE_SIZE = 1048576
|
|
586
|
+
DEFAULT_MAX_FILE_SIZE = 10485760
|
|
587
|
+
|
|
588
|
+
@staticmethod
|
|
589
|
+
def validate_value(value: Any, max_size: int = DEFAULT_MAX_VALUE_SIZE) -> bool:
|
|
590
|
+
"""Validate value size."""
|
|
591
|
+
try:
|
|
592
|
+
serialized = json.dumps(value, ensure_ascii=False)
|
|
593
|
+
if len(serialized.encode('utf-8')) > max_size:
|
|
594
|
+
raise ConfigSecurityError(
|
|
595
|
+
f"Value size exceeds limit: {max_size} bytes",
|
|
596
|
+
ConfigSecurityError.SIZE_EXCEEDED
|
|
597
|
+
)
|
|
598
|
+
return True
|
|
599
|
+
except (TypeError, ValueError):
|
|
600
|
+
return True
|
|
601
|
+
|
|
602
|
+
@staticmethod
|
|
603
|
+
def validate_file(filepath: str, max_size: int = DEFAULT_MAX_FILE_SIZE) -> bool:
|
|
604
|
+
"""Validate file size."""
|
|
605
|
+
if not os.path.exists(filepath):
|
|
606
|
+
return True
|
|
607
|
+
|
|
608
|
+
size = os.path.getsize(filepath)
|
|
609
|
+
if size > max_size:
|
|
610
|
+
raise ConfigSecurityError(
|
|
611
|
+
f"File size {size} exceeds limit: {max_size} bytes",
|
|
612
|
+
ConfigSecurityError.SIZE_EXCEEDED,
|
|
613
|
+
filepath
|
|
614
|
+
)
|
|
615
|
+
return True
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
class ChecksumManager:
|
|
619
|
+
"""Manages file checksums for integrity verification."""
|
|
620
|
+
|
|
621
|
+
__slots__ = ('_filepath', '_algorithm')
|
|
622
|
+
|
|
623
|
+
def __init__(self, filepath: str, algorithm: str = 'sha256'):
|
|
624
|
+
self._filepath = filepath
|
|
625
|
+
self._algorithm = algorithm
|
|
626
|
+
|
|
627
|
+
def calculate(self) -> str:
|
|
628
|
+
"""Calculate file checksum."""
|
|
629
|
+
if not os.path.exists(self._filepath):
|
|
630
|
+
return ""
|
|
631
|
+
|
|
632
|
+
hasher = hashlib.new(self._algorithm)
|
|
633
|
+
with open(self._filepath, 'rb') as f:
|
|
634
|
+
for chunk in iter(lambda: f.read(8192), b''):
|
|
635
|
+
hasher.update(chunk)
|
|
636
|
+
return hasher.hexdigest()
|
|
637
|
+
|
|
638
|
+
def verify(self) -> bool:
|
|
639
|
+
"""Verify file checksum against stored value."""
|
|
640
|
+
stored = self.load()
|
|
641
|
+
if not stored:
|
|
642
|
+
return True
|
|
643
|
+
|
|
644
|
+
current = self.calculate()
|
|
645
|
+
if current != stored:
|
|
646
|
+
raise ConfigIntegrityError(self._filepath, stored, current)
|
|
647
|
+
return True
|
|
648
|
+
|
|
649
|
+
def save(self) -> None:
|
|
650
|
+
"""Save current checksum."""
|
|
651
|
+
checksum = self.calculate()
|
|
652
|
+
checksum_file = self._filepath + '.checksum'
|
|
653
|
+
|
|
654
|
+
data = {
|
|
655
|
+
"algorithm": self._algorithm,
|
|
656
|
+
"checksum": checksum,
|
|
657
|
+
"timestamp": time.time(),
|
|
658
|
+
"size": os.path.getsize(self._filepath) if os.path.exists(self._filepath) else 0
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
with open(checksum_file, 'w') as f:
|
|
662
|
+
json.dump(data, f)
|
|
663
|
+
|
|
664
|
+
def load(self) -> Optional[str]:
|
|
665
|
+
"""Load stored checksum."""
|
|
666
|
+
checksum_file = self._filepath + '.checksum'
|
|
667
|
+
if not os.path.exists(checksum_file):
|
|
668
|
+
return None
|
|
669
|
+
|
|
670
|
+
try:
|
|
671
|
+
with open(checksum_file, 'r') as f:
|
|
672
|
+
data = json.load(f)
|
|
673
|
+
return data.get('checksum')
|
|
674
|
+
except (IOError, json.JSONDecodeError):
|
|
675
|
+
return None
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
class AuditLogger:
|
|
679
|
+
"""Logs configuration changes for auditing."""
|
|
680
|
+
|
|
681
|
+
__slots__ = ('_filepath', '_mask_sensitive')
|
|
682
|
+
|
|
683
|
+
SENSITIVE_KEYWORDS = ['password', 'secret', 'key', 'token', 'credential', 'auth']
|
|
684
|
+
|
|
685
|
+
def __init__(self, filepath: str, mask_sensitive: bool = True):
|
|
686
|
+
self._filepath = filepath
|
|
687
|
+
self._mask_sensitive = mask_sensitive
|
|
688
|
+
|
|
689
|
+
def log(self, action: str, path: str, old_value: Any = None, new_value: Any = None) -> None:
|
|
690
|
+
"""Log an audit entry."""
|
|
691
|
+
log_file = self._filepath + '.audit.log'
|
|
692
|
+
|
|
693
|
+
if self._mask_sensitive:
|
|
694
|
+
old_value = self._mask_value(path, old_value)
|
|
695
|
+
new_value = self._mask_value(path, new_value)
|
|
696
|
+
|
|
697
|
+
entry = {
|
|
698
|
+
"timestamp": datetime.now().isoformat(),
|
|
699
|
+
"action": action,
|
|
700
|
+
"path": path,
|
|
701
|
+
"old_value": str(old_value) if old_value is not None else None,
|
|
702
|
+
"new_value": str(new_value) if new_value is not None else None,
|
|
703
|
+
"pid": os.getpid()
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
try:
|
|
707
|
+
with open(log_file, 'a') as f:
|
|
708
|
+
f.write(json.dumps(entry, ensure_ascii=False) + '\n')
|
|
709
|
+
except (IOError, OSError):
|
|
710
|
+
pass
|
|
711
|
+
|
|
712
|
+
def _mask_value(self, path: str, value: Any) -> Any:
|
|
713
|
+
"""Mask sensitive values."""
|
|
714
|
+
if value is None:
|
|
715
|
+
return None
|
|
716
|
+
|
|
717
|
+
path_lower = path.lower()
|
|
718
|
+
for keyword in self.SENSITIVE_KEYWORDS:
|
|
719
|
+
if keyword in path_lower:
|
|
720
|
+
value_str = str(value)
|
|
721
|
+
if len(value_str) <= 4:
|
|
722
|
+
return "****"
|
|
723
|
+
return value_str[:2] + "****" + value_str[-2:]
|
|
724
|
+
|
|
725
|
+
return value
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
# ==============================================================================
|
|
729
|
+
# GConfig - Main configuration manager (Singleton)
|
|
730
|
+
# ==============================================================================
|
|
731
|
+
|
|
732
|
+
class GConfig:
|
|
733
|
+
"""Global configuration manager with multiprocess support."""
|
|
734
|
+
|
|
735
|
+
__slots__ = (
|
|
736
|
+
'_filepath', '_cache', '_lock', '_backup', '_encoding',
|
|
737
|
+
'_auto_refresh', '_lock_timeout', '_backup_count',
|
|
738
|
+
'_max_path_depth', '_max_value_size', '_max_file_size',
|
|
739
|
+
'_enable_checksum', '_enable_audit_log', '_mask_sensitive',
|
|
740
|
+
'_checksum_manager', '_audit_logger'
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
_instance: Optional['GConfig'] = None
|
|
744
|
+
|
|
745
|
+
def __new__(cls) -> 'GConfig':
|
|
746
|
+
if cls._instance is None:
|
|
747
|
+
cls._instance = super().__new__(cls)
|
|
748
|
+
cls._instance._init_defaults()
|
|
749
|
+
return cls._instance
|
|
750
|
+
|
|
751
|
+
def _init_defaults(self) -> None:
|
|
752
|
+
"""Initialize default values."""
|
|
753
|
+
self._filepath: Optional[str] = None
|
|
754
|
+
self._cache: Optional[ConfigCache] = None
|
|
755
|
+
self._lock: Optional[FileLock] = None
|
|
756
|
+
self._backup: Optional[BackupManager] = None
|
|
757
|
+
self._encoding: str = 'utf-8'
|
|
758
|
+
self._auto_refresh: bool = True
|
|
759
|
+
self._lock_timeout: float = 5.0
|
|
760
|
+
self._backup_count: int = 5
|
|
761
|
+
self._max_path_depth: int = 10
|
|
762
|
+
self._max_value_size: int = 1048576
|
|
763
|
+
self._max_file_size: int = 10485760
|
|
764
|
+
self._enable_checksum: bool = False
|
|
765
|
+
self._enable_audit_log: bool = False
|
|
766
|
+
self._mask_sensitive: bool = True
|
|
767
|
+
self._checksum_manager: Optional[ChecksumManager] = None
|
|
768
|
+
self._audit_logger: Optional[AuditLogger] = None
|
|
769
|
+
|
|
770
|
+
def configure(self, **options) -> None:
|
|
771
|
+
"""Configure GConfig options."""
|
|
772
|
+
if 'lock_timeout' in options:
|
|
773
|
+
self._lock_timeout = float(options['lock_timeout'])
|
|
774
|
+
if 'backup_count' in options:
|
|
775
|
+
self._backup_count = int(options['backup_count'])
|
|
776
|
+
if 'auto_refresh' in options:
|
|
777
|
+
self._auto_refresh = bool(options['auto_refresh'])
|
|
778
|
+
if 'max_path_depth' in options:
|
|
779
|
+
self._max_path_depth = int(options['max_path_depth'])
|
|
780
|
+
if 'max_value_size' in options:
|
|
781
|
+
self._max_value_size = int(options['max_value_size'])
|
|
782
|
+
if 'max_file_size' in options:
|
|
783
|
+
self._max_file_size = int(options['max_file_size'])
|
|
784
|
+
if 'enable_checksum' in options:
|
|
785
|
+
self._enable_checksum = bool(options['enable_checksum'])
|
|
786
|
+
if 'enable_audit_log' in options:
|
|
787
|
+
self._enable_audit_log = bool(options['enable_audit_log'])
|
|
788
|
+
if 'mask_sensitive' in options:
|
|
789
|
+
self._mask_sensitive = bool(options['mask_sensitive'])
|
|
790
|
+
if 'lock_retry_interval' in options and self._lock:
|
|
791
|
+
self._lock._retry_interval = float(options['lock_retry_interval'])
|
|
792
|
+
|
|
793
|
+
if self._backup:
|
|
794
|
+
self._backup._max_count = self._backup_count
|
|
795
|
+
|
|
796
|
+
def load(self, filepath: str, encoding: str = 'utf-8') -> 'GConfig':
|
|
797
|
+
"""Load configuration file.
|
|
798
|
+
|
|
799
|
+
Returns:
|
|
800
|
+
self for method chaining (e.g., gconfig.load("config.yaml").dump())
|
|
801
|
+
"""
|
|
802
|
+
if not os.path.exists(filepath):
|
|
803
|
+
raise ConfigFileNotFoundError(filepath)
|
|
804
|
+
|
|
805
|
+
PathValidator.validate(filepath)
|
|
806
|
+
SizeValidator.validate_file(filepath, self._max_file_size)
|
|
807
|
+
|
|
808
|
+
self._filepath = os.path.abspath(filepath)
|
|
809
|
+
self._encoding = encoding
|
|
810
|
+
|
|
811
|
+
self._cache = ConfigCache(self._filepath)
|
|
812
|
+
self._lock = FileLock(self._filepath, self._lock_timeout)
|
|
813
|
+
self._backup = BackupManager(self._filepath, self._backup_count)
|
|
814
|
+
|
|
815
|
+
if self._enable_checksum:
|
|
816
|
+
self._checksum_manager = ChecksumManager(self._filepath)
|
|
817
|
+
|
|
818
|
+
if self._enable_audit_log:
|
|
819
|
+
self._audit_logger = AuditLogger(self._filepath, self._mask_sensitive)
|
|
820
|
+
|
|
821
|
+
self._load_file()
|
|
822
|
+
|
|
823
|
+
self.validate_mandatory_fields()
|
|
824
|
+
|
|
825
|
+
if self._enable_audit_log and self._audit_logger:
|
|
826
|
+
self._audit_logger.log("LOAD", self._filepath)
|
|
827
|
+
|
|
828
|
+
return self
|
|
829
|
+
|
|
830
|
+
def _load_file(self, max_retries: int = 5, retry_delay: float = 0.1) -> None:
|
|
831
|
+
"""Load and parse configuration file with retry logic for multiprocess safety."""
|
|
832
|
+
last_error = None
|
|
833
|
+
|
|
834
|
+
for attempt in range(max_retries):
|
|
835
|
+
try:
|
|
836
|
+
# Check if file exists (may be temporarily missing during atomic write)
|
|
837
|
+
if not os.path.exists(self._filepath):
|
|
838
|
+
if attempt < max_retries - 1:
|
|
839
|
+
time.sleep(retry_delay)
|
|
840
|
+
continue
|
|
841
|
+
raise ConfigFileNotFoundError(self._filepath)
|
|
842
|
+
|
|
843
|
+
with open(self._filepath, 'r', encoding=self._encoding) as f:
|
|
844
|
+
content = f.read()
|
|
845
|
+
|
|
846
|
+
# Empty content may indicate file is being rewritten
|
|
847
|
+
if not content.strip():
|
|
848
|
+
if attempt < max_retries - 1:
|
|
849
|
+
time.sleep(retry_delay)
|
|
850
|
+
continue
|
|
851
|
+
raise ConfigParseError(self._filepath, "Empty configuration file")
|
|
852
|
+
|
|
853
|
+
if self._filepath.endswith('.yaml') or self._filepath.endswith('.yml'):
|
|
854
|
+
data = YamlParser.parse(content)
|
|
855
|
+
elif self._filepath.endswith('.json'):
|
|
856
|
+
data = JsonParser.parse(content)
|
|
857
|
+
else:
|
|
858
|
+
try:
|
|
859
|
+
data = YamlParser.parse(content)
|
|
860
|
+
except:
|
|
861
|
+
data = JsonParser.parse(content)
|
|
862
|
+
|
|
863
|
+
mtime = os.path.getmtime(self._filepath)
|
|
864
|
+
self._cache.refresh(data, mtime)
|
|
865
|
+
|
|
866
|
+
if self._enable_checksum and self._checksum_manager:
|
|
867
|
+
self._checksum_manager.verify()
|
|
868
|
+
|
|
869
|
+
return # Success
|
|
870
|
+
|
|
871
|
+
except (FileNotFoundError, PermissionError, IOError, OSError) as e:
|
|
872
|
+
last_error = e
|
|
873
|
+
if attempt < max_retries - 1:
|
|
874
|
+
time.sleep(retry_delay)
|
|
875
|
+
continue
|
|
876
|
+
except (yaml.YAMLError if YAML_AVAILABLE else Exception) as e:
|
|
877
|
+
last_error = e
|
|
878
|
+
if attempt < max_retries - 1:
|
|
879
|
+
time.sleep(retry_delay)
|
|
880
|
+
continue
|
|
881
|
+
raise ConfigParseError(self._filepath, str(e))
|
|
882
|
+
except json.JSONDecodeError as e:
|
|
883
|
+
last_error = e
|
|
884
|
+
if attempt < max_retries - 1:
|
|
885
|
+
time.sleep(retry_delay)
|
|
886
|
+
continue
|
|
887
|
+
raise ConfigParseError(self._filepath, str(e))
|
|
888
|
+
|
|
889
|
+
# All retries failed
|
|
890
|
+
if last_error:
|
|
891
|
+
if isinstance(last_error, (FileNotFoundError, PermissionError, IOError, OSError)):
|
|
892
|
+
raise ConfigFileNotFoundError(self._filepath)
|
|
893
|
+
raise ConfigParseError(self._filepath, str(last_error))
|
|
894
|
+
|
|
895
|
+
def data_get(self, path: str, default: Any = None) -> Any:
|
|
896
|
+
"""Get configuration value using dot notation path."""
|
|
897
|
+
if self._cache is None:
|
|
898
|
+
return default
|
|
899
|
+
|
|
900
|
+
if self._auto_refresh and self._cache.is_stale():
|
|
901
|
+
try:
|
|
902
|
+
self._load_file()
|
|
903
|
+
except (ConfigFileNotFoundError, ConfigParseError):
|
|
904
|
+
# If refresh fails, use cached data (better than failing)
|
|
905
|
+
# This can happen during concurrent writes
|
|
906
|
+
pass
|
|
907
|
+
|
|
908
|
+
return self._cache.get(path, default)
|
|
909
|
+
|
|
910
|
+
def data_set(self, path: str, value: Any, max_retries: int = 3) -> None:
|
|
911
|
+
"""Set configuration value using dot notation path."""
|
|
912
|
+
if self._cache is None:
|
|
913
|
+
raise ConfigError("No configuration loaded")
|
|
914
|
+
|
|
915
|
+
if not PathParser.validate(path, self._max_path_depth):
|
|
916
|
+
raise ConfigSecurityError(
|
|
917
|
+
f"Invalid path: {path}",
|
|
918
|
+
ConfigSecurityError.INVALID_PATH,
|
|
919
|
+
path
|
|
920
|
+
)
|
|
921
|
+
|
|
922
|
+
SizeValidator.validate_value(value, self._max_value_size)
|
|
923
|
+
|
|
924
|
+
old_value = self._cache.get(path) if self._enable_audit_log else None
|
|
925
|
+
|
|
926
|
+
last_error = None
|
|
927
|
+
for attempt in range(max_retries):
|
|
928
|
+
try:
|
|
929
|
+
# Reload before setting to get latest data
|
|
930
|
+
if self._auto_refresh and self._cache.is_stale():
|
|
931
|
+
try:
|
|
932
|
+
self._load_file()
|
|
933
|
+
except (ConfigFileNotFoundError, ConfigParseError):
|
|
934
|
+
# May happen during concurrent write, continue with cached data
|
|
935
|
+
pass
|
|
936
|
+
|
|
937
|
+
self._cache.set(path, value)
|
|
938
|
+
self._save_file(path, value)
|
|
939
|
+
|
|
940
|
+
if self._enable_audit_log and self._audit_logger:
|
|
941
|
+
self._audit_logger.log("SET", path, old_value, value)
|
|
942
|
+
|
|
943
|
+
return # Success
|
|
944
|
+
|
|
945
|
+
except ConfigLockTimeoutError:
|
|
946
|
+
raise # Don't retry lock timeouts
|
|
947
|
+
except (IOError, OSError, PermissionError) as e:
|
|
948
|
+
last_error = e
|
|
949
|
+
if attempt < max_retries - 1:
|
|
950
|
+
time.sleep(0.1)
|
|
951
|
+
continue
|
|
952
|
+
raise
|
|
953
|
+
|
|
954
|
+
if last_error:
|
|
955
|
+
raise last_error
|
|
956
|
+
|
|
957
|
+
def _save_file(self, set_path: str = None, set_value: Any = None) -> None:
|
|
958
|
+
"""Save configuration to file with locking and backup."""
|
|
959
|
+
with self._lock:
|
|
960
|
+
# Reload file to get latest data, then re-apply our change
|
|
961
|
+
try:
|
|
962
|
+
self._load_file()
|
|
963
|
+
# Re-apply the value we're setting (it was overwritten by reload)
|
|
964
|
+
if set_path is not None:
|
|
965
|
+
self._cache.set(set_path, set_value)
|
|
966
|
+
except (ConfigFileNotFoundError, ConfigParseError):
|
|
967
|
+
pass
|
|
968
|
+
|
|
969
|
+
self._backup.create_backup()
|
|
970
|
+
|
|
971
|
+
data = self._cache.to_dict()
|
|
972
|
+
|
|
973
|
+
if self._filepath.endswith('.yaml') or self._filepath.endswith('.yml'):
|
|
974
|
+
content = YamlParser.dump(data)
|
|
975
|
+
else:
|
|
976
|
+
content = JsonParser.dump(data)
|
|
977
|
+
|
|
978
|
+
dir_path = os.path.dirname(self._filepath)
|
|
979
|
+
fd, temp_path = tempfile.mkstemp(dir=dir_path, suffix='.tmp')
|
|
980
|
+
try:
|
|
981
|
+
with os.fdopen(fd, 'w', encoding=self._encoding) as f:
|
|
982
|
+
f.write(content)
|
|
983
|
+
f.flush()
|
|
984
|
+
os.fsync(f.fileno())
|
|
985
|
+
|
|
986
|
+
if platform.system() == 'Windows':
|
|
987
|
+
# Windows atomic replace with retry
|
|
988
|
+
max_attempts = 5
|
|
989
|
+
for attempt in range(max_attempts):
|
|
990
|
+
try:
|
|
991
|
+
if os.path.exists(self._filepath):
|
|
992
|
+
os.replace(temp_path, self._filepath)
|
|
993
|
+
else:
|
|
994
|
+
os.rename(temp_path, self._filepath)
|
|
995
|
+
break
|
|
996
|
+
except PermissionError:
|
|
997
|
+
if attempt < max_attempts - 1:
|
|
998
|
+
time.sleep(0.05)
|
|
999
|
+
else:
|
|
1000
|
+
raise
|
|
1001
|
+
else:
|
|
1002
|
+
os.rename(temp_path, self._filepath)
|
|
1003
|
+
|
|
1004
|
+
mtime = os.path.getmtime(self._filepath)
|
|
1005
|
+
self._cache._mtime = mtime
|
|
1006
|
+
|
|
1007
|
+
if self._enable_checksum and self._checksum_manager:
|
|
1008
|
+
self._checksum_manager.save()
|
|
1009
|
+
|
|
1010
|
+
except Exception:
|
|
1011
|
+
if os.path.exists(temp_path):
|
|
1012
|
+
try:
|
|
1013
|
+
os.remove(temp_path)
|
|
1014
|
+
except OSError:
|
|
1015
|
+
pass
|
|
1016
|
+
raise
|
|
1017
|
+
|
|
1018
|
+
def save(self) -> None:
|
|
1019
|
+
"""Explicitly save current configuration."""
|
|
1020
|
+
if self._cache is None:
|
|
1021
|
+
raise ConfigError("No configuration loaded")
|
|
1022
|
+
|
|
1023
|
+
self._save_file(None, None)
|
|
1024
|
+
|
|
1025
|
+
if self._enable_audit_log and self._audit_logger:
|
|
1026
|
+
self._audit_logger.log("SAVE", self._filepath)
|
|
1027
|
+
|
|
1028
|
+
def dump(self, use_ascii: bool = None) -> str:
|
|
1029
|
+
"""Display configuration as tree structure using box characters.
|
|
1030
|
+
|
|
1031
|
+
Args:
|
|
1032
|
+
use_ascii: Use ASCII characters instead of Unicode.
|
|
1033
|
+
None = auto-detect (default), True = ASCII, False = Unicode
|
|
1034
|
+
|
|
1035
|
+
Returns:
|
|
1036
|
+
Tree-formatted string of configuration
|
|
1037
|
+
|
|
1038
|
+
Example:
|
|
1039
|
+
gconfig.load("config.yaml").dump()
|
|
1040
|
+
# Output (Unicode):
|
|
1041
|
+
# ╔════════════════════════════════════════════════════════════╗
|
|
1042
|
+
# ║ config.yaml ║
|
|
1043
|
+
# ╚════════════════════════════════════════════════════════════╝
|
|
1044
|
+
# ├─ app_info
|
|
1045
|
+
# │ ├─ name: "ALASKA"
|
|
1046
|
+
# │ ├─ version: "2.0.0"
|
|
1047
|
+
# │ └─ id: "alaska_01"
|
|
1048
|
+
# └─ task_config
|
|
1049
|
+
# └─ worker1
|
|
1050
|
+
# ├─ timeout: 5.0
|
|
1051
|
+
# └─ retry: 3
|
|
1052
|
+
"""
|
|
1053
|
+
if self._cache is None:
|
|
1054
|
+
raise ConfigError("No configuration loaded")
|
|
1055
|
+
|
|
1056
|
+
data = self._cache.to_dict()
|
|
1057
|
+
lines = []
|
|
1058
|
+
|
|
1059
|
+
# Header with filename
|
|
1060
|
+
filename = os.path.basename(self._filepath) if self._filepath else "config"
|
|
1061
|
+
|
|
1062
|
+
# Auto-detect if we need ASCII (Windows console often can't handle Unicode)
|
|
1063
|
+
if use_ascii is None:
|
|
1064
|
+
try:
|
|
1065
|
+
encoding = getattr(sys.stdout, 'encoding', None) or 'utf-8'
|
|
1066
|
+
"╔═╗║╚╝├─└│".encode(encoding)
|
|
1067
|
+
use_ascii = False
|
|
1068
|
+
except (UnicodeEncodeError, LookupError):
|
|
1069
|
+
use_ascii = True
|
|
1070
|
+
|
|
1071
|
+
if use_ascii:
|
|
1072
|
+
# ASCII box drawing
|
|
1073
|
+
lines.append(f"+{'-' * 60}+")
|
|
1074
|
+
lines.append(f"| {filename:<58} |")
|
|
1075
|
+
lines.append(f"+{'-' * 60}+")
|
|
1076
|
+
branch_mid = "+--"
|
|
1077
|
+
branch_last = "`--"
|
|
1078
|
+
vertical = "| "
|
|
1079
|
+
space = " "
|
|
1080
|
+
else:
|
|
1081
|
+
# Unicode box drawing (전각문자)
|
|
1082
|
+
lines.append(f"╔{'═' * 60}╗")
|
|
1083
|
+
lines.append(f"║ {filename:<58} ║")
|
|
1084
|
+
lines.append(f"╚{'═' * 60}╝")
|
|
1085
|
+
branch_mid = "├─"
|
|
1086
|
+
branch_last = "└─"
|
|
1087
|
+
vertical = "│ "
|
|
1088
|
+
space = " "
|
|
1089
|
+
|
|
1090
|
+
def _format_value(value: Any) -> str:
|
|
1091
|
+
"""Format value for display."""
|
|
1092
|
+
if isinstance(value, str):
|
|
1093
|
+
return f'"{value}"'
|
|
1094
|
+
elif value is None:
|
|
1095
|
+
return "null"
|
|
1096
|
+
elif isinstance(value, bool):
|
|
1097
|
+
return "true" if value else "false"
|
|
1098
|
+
else:
|
|
1099
|
+
return str(value)
|
|
1100
|
+
|
|
1101
|
+
def _build_tree(obj: Any, prefix: str = "", is_last: bool = True, key: str = "") -> None:
|
|
1102
|
+
"""Recursively build tree lines."""
|
|
1103
|
+
branch = branch_last if is_last else branch_mid
|
|
1104
|
+
extension = space if is_last else vertical
|
|
1105
|
+
|
|
1106
|
+
if isinstance(obj, dict):
|
|
1107
|
+
if key:
|
|
1108
|
+
lines.append(f"{prefix}{branch} {key}")
|
|
1109
|
+
new_prefix = prefix + extension
|
|
1110
|
+
else:
|
|
1111
|
+
new_prefix = prefix
|
|
1112
|
+
|
|
1113
|
+
items = list(obj.items())
|
|
1114
|
+
for i, (k, v) in enumerate(items):
|
|
1115
|
+
is_last_item = (i == len(items) - 1)
|
|
1116
|
+
_build_tree(v, new_prefix, is_last_item, k)
|
|
1117
|
+
|
|
1118
|
+
elif isinstance(obj, list):
|
|
1119
|
+
if key:
|
|
1120
|
+
lines.append(f"{prefix}{branch} {key}")
|
|
1121
|
+
new_prefix = prefix + extension
|
|
1122
|
+
else:
|
|
1123
|
+
new_prefix = prefix
|
|
1124
|
+
|
|
1125
|
+
for i, item in enumerate(obj):
|
|
1126
|
+
is_last_item = (i == len(obj) - 1)
|
|
1127
|
+
_build_tree(item, new_prefix, is_last_item, f"[{i}]")
|
|
1128
|
+
|
|
1129
|
+
else:
|
|
1130
|
+
# Leaf node
|
|
1131
|
+
formatted_value = _format_value(obj)
|
|
1132
|
+
lines.append(f"{prefix}{branch} {key}: {formatted_value}")
|
|
1133
|
+
|
|
1134
|
+
# Build tree from root
|
|
1135
|
+
if data:
|
|
1136
|
+
items = list(data.items())
|
|
1137
|
+
for i, (k, v) in enumerate(items):
|
|
1138
|
+
is_last_item = (i == len(items) - 1)
|
|
1139
|
+
_build_tree(v, "", is_last_item, k)
|
|
1140
|
+
|
|
1141
|
+
result = "\n".join(lines)
|
|
1142
|
+
print(result)
|
|
1143
|
+
return result
|
|
1144
|
+
|
|
1145
|
+
def refresh(self) -> None:
|
|
1146
|
+
"""Force refresh cache from file."""
|
|
1147
|
+
if self._filepath is None:
|
|
1148
|
+
raise ConfigError("No configuration loaded")
|
|
1149
|
+
|
|
1150
|
+
self._load_file()
|
|
1151
|
+
|
|
1152
|
+
def validate_mandatory_fields(self) -> None:
|
|
1153
|
+
"""Validate that all mandatory fields exist."""
|
|
1154
|
+
if self._cache is None:
|
|
1155
|
+
raise ConfigError("No configuration loaded")
|
|
1156
|
+
|
|
1157
|
+
for field in ConfigMandatoryFieldError.MANDATORY_FIELDS:
|
|
1158
|
+
value = self._cache.get(field)
|
|
1159
|
+
if value is None:
|
|
1160
|
+
raise ConfigMandatoryFieldError(field)
|
|
1161
|
+
|
|
1162
|
+
def recover_from_backup(self) -> bool:
|
|
1163
|
+
"""Attempt to recover configuration from backup files."""
|
|
1164
|
+
if self._backup is None:
|
|
1165
|
+
return False
|
|
1166
|
+
|
|
1167
|
+
backups = self._backup.get_backups()
|
|
1168
|
+
if not backups:
|
|
1169
|
+
raise ConfigRecoveryError(self._filepath or "", [])
|
|
1170
|
+
|
|
1171
|
+
tried_backups = []
|
|
1172
|
+
for backup_path in backups:
|
|
1173
|
+
tried_backups.append(backup_path)
|
|
1174
|
+
try:
|
|
1175
|
+
temp_cache = ConfigCache(backup_path)
|
|
1176
|
+
|
|
1177
|
+
with open(backup_path, 'r', encoding=self._encoding) as f:
|
|
1178
|
+
content = f.read()
|
|
1179
|
+
|
|
1180
|
+
if backup_path.endswith('.yaml') or backup_path.endswith('.yml') or \
|
|
1181
|
+
self._filepath.endswith('.yaml') or self._filepath.endswith('.yml'):
|
|
1182
|
+
data = YamlParser.parse(content)
|
|
1183
|
+
else:
|
|
1184
|
+
data = JsonParser.parse(content)
|
|
1185
|
+
|
|
1186
|
+
temp_cache.refresh(data, os.path.getmtime(backup_path))
|
|
1187
|
+
|
|
1188
|
+
for field in ConfigMandatoryFieldError.MANDATORY_FIELDS:
|
|
1189
|
+
if temp_cache.get(field) is None:
|
|
1190
|
+
raise ConfigMandatoryFieldError(field)
|
|
1191
|
+
|
|
1192
|
+
shutil.copy2(backup_path, self._filepath)
|
|
1193
|
+
self._load_file()
|
|
1194
|
+
|
|
1195
|
+
if self._enable_audit_log and self._audit_logger:
|
|
1196
|
+
self._audit_logger.log("RECOVER", backup_path)
|
|
1197
|
+
|
|
1198
|
+
return True
|
|
1199
|
+
|
|
1200
|
+
except (ConfigError, IOError, OSError):
|
|
1201
|
+
continue
|
|
1202
|
+
|
|
1203
|
+
raise ConfigRecoveryError(self._filepath or "", tried_backups)
|
|
1204
|
+
|
|
1205
|
+
|
|
1206
|
+
# ==============================================================================
|
|
1207
|
+
# Global instance
|
|
1208
|
+
# ==============================================================================
|
|
1209
|
+
|
|
1210
|
+
gconfig = GConfig()
|
|
1211
|
+
|
|
1212
|
+
|
|
1213
|
+
# ==============================================================================
|
|
1214
|
+
# Module exports
|
|
1215
|
+
# ==============================================================================
|
|
1216
|
+
|
|
1217
|
+
__all__ = [
|
|
1218
|
+
'gconfig',
|
|
1219
|
+
'GConfig',
|
|
1220
|
+
'ConfigError',
|
|
1221
|
+
'ConfigFileNotFoundError',
|
|
1222
|
+
'ConfigPathNotFoundError',
|
|
1223
|
+
'ConfigLockTimeoutError',
|
|
1224
|
+
'ConfigValidationError',
|
|
1225
|
+
'ConfigParseError',
|
|
1226
|
+
'ConfigIntegrityError',
|
|
1227
|
+
'ConfigSecurityError',
|
|
1228
|
+
'ConfigStaleLockError',
|
|
1229
|
+
'ConfigMandatoryFieldError',
|
|
1230
|
+
'ConfigRecoveryError',
|
|
1231
|
+
'PathParser',
|
|
1232
|
+
'ConfigCache',
|
|
1233
|
+
'FileLock',
|
|
1234
|
+
'BackupManager',
|
|
1235
|
+
'YamlParser',
|
|
1236
|
+
'JsonParser',
|
|
1237
|
+
'PathValidator',
|
|
1238
|
+
'SizeValidator',
|
|
1239
|
+
'ChecksumManager',
|
|
1240
|
+
'AuditLogger',
|
|
1241
|
+
]
|