aline-ai 0.2.5__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.2.5.dist-info → aline_ai-0.3.0.dist-info}/METADATA +3 -1
- aline_ai-0.3.0.dist-info/RECORD +41 -0
- aline_ai-0.3.0.dist-info/entry_points.txt +3 -0
- realign/__init__.py +32 -1
- realign/cli.py +203 -19
- realign/commands/__init__.py +2 -2
- realign/commands/clean.py +149 -0
- realign/commands/config.py +1 -1
- realign/commands/export_shares.py +1785 -0
- realign/commands/hide.py +112 -24
- realign/commands/import_history.py +873 -0
- realign/commands/init.py +104 -217
- realign/commands/mirror.py +131 -0
- realign/commands/pull.py +101 -0
- realign/commands/push.py +155 -245
- realign/commands/review.py +216 -54
- realign/commands/session_utils.py +139 -4
- realign/commands/share.py +965 -0
- realign/commands/status.py +559 -0
- realign/commands/sync.py +91 -0
- realign/commands/undo.py +423 -0
- realign/commands/watcher.py +805 -0
- realign/config.py +21 -10
- realign/file_lock.py +3 -1
- realign/hash_registry.py +310 -0
- realign/hooks.py +368 -384
- realign/logging_config.py +2 -2
- realign/mcp_server.py +263 -549
- realign/mcp_watcher.py +999 -142
- realign/mirror_utils.py +322 -0
- realign/prompts/__init__.py +21 -0
- realign/prompts/presets.py +238 -0
- realign/redactor.py +168 -16
- realign/tracker/__init__.py +9 -0
- realign/tracker/git_tracker.py +1123 -0
- realign/watcher_daemon.py +115 -0
- aline_ai-0.2.5.dist-info/RECORD +0 -28
- aline_ai-0.2.5.dist-info/entry_points.txt +0 -5
- realign/commands/auto_commit.py +0 -231
- realign/commands/commit.py +0 -379
- realign/commands/search.py +0 -449
- realign/commands/show.py +0 -416
- {aline_ai-0.2.5.dist-info → aline_ai-0.3.0.dist-info}/WHEEL +0 -0
- {aline_ai-0.2.5.dist-info → aline_ai-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.2.5.dist-info → aline_ai-0.3.0.dist-info}/top_level.txt +0 -0
realign/config.py
CHANGED
|
@@ -13,19 +13,21 @@ class ReAlignConfig:
|
|
|
13
13
|
|
|
14
14
|
local_history_path: str = "~/.local/share/realign/histories"
|
|
15
15
|
summary_max_chars: int = 500
|
|
16
|
-
redact_on_match: bool =
|
|
16
|
+
redact_on_match: bool = False # Default: disable redaction (can be enabled in config)
|
|
17
17
|
hooks_installation: str = "repo"
|
|
18
18
|
use_LLM: bool = True
|
|
19
19
|
llm_provider: str = "auto" # LLM provider: "auto", "claude", or "openai"
|
|
20
20
|
auto_detect_claude: bool = True # Enable Claude Code session auto-detection
|
|
21
21
|
auto_detect_codex: bool = True # Enable Codex session auto-detection
|
|
22
22
|
mcp_auto_commit: bool = True # Enable MCP watcher auto-commit after each user request completes
|
|
23
|
+
use_mcp_sampling: bool = True # Use MCP Sampling API for LLM summaries (requires MCP mode)
|
|
24
|
+
share_backend_url: str = "https://realign-server.vercel.app" # Backend URL for interactive share export
|
|
23
25
|
|
|
24
26
|
@classmethod
|
|
25
27
|
def load(cls, config_path: Optional[Path] = None) -> "ReAlignConfig":
|
|
26
28
|
"""Load configuration from file with environment variable overrides."""
|
|
27
29
|
if config_path is None:
|
|
28
|
-
config_path = Path.home() / ".config" / "
|
|
30
|
+
config_path = Path.home() / ".config" / "aline" / "config.yaml"
|
|
29
31
|
|
|
30
32
|
config_dict = {}
|
|
31
33
|
|
|
@@ -45,13 +47,15 @@ class ReAlignConfig:
|
|
|
45
47
|
"auto_detect_claude": os.getenv("REALIGN_AUTO_DETECT_CLAUDE"),
|
|
46
48
|
"auto_detect_codex": os.getenv("REALIGN_AUTO_DETECT_CODEX"),
|
|
47
49
|
"mcp_auto_commit": os.getenv("REALIGN_MCP_AUTO_COMMIT"),
|
|
50
|
+
"use_mcp_sampling": os.getenv("REALIGN_USE_MCP_SAMPLING"),
|
|
51
|
+
"share_backend_url": os.getenv("REALIGN_SHARE_BACKEND_URL"),
|
|
48
52
|
}
|
|
49
53
|
|
|
50
54
|
for key, value in env_overrides.items():
|
|
51
55
|
if value is not None:
|
|
52
56
|
if key == "summary_max_chars":
|
|
53
57
|
config_dict[key] = int(value)
|
|
54
|
-
elif key in ["redact_on_match", "use_LLM", "auto_detect_claude", "auto_detect_codex", "mcp_auto_commit"]:
|
|
58
|
+
elif key in ["redact_on_match", "use_LLM", "auto_detect_claude", "auto_detect_codex", "mcp_auto_commit", "use_mcp_sampling"]:
|
|
55
59
|
config_dict[key] = value.lower() in ("true", "1", "yes")
|
|
56
60
|
else:
|
|
57
61
|
config_dict[key] = value
|
|
@@ -61,7 +65,7 @@ class ReAlignConfig:
|
|
|
61
65
|
def save(self, config_path: Optional[Path] = None):
|
|
62
66
|
"""Save configuration to file."""
|
|
63
67
|
if config_path is None:
|
|
64
|
-
config_path = Path.home() / ".config" / "
|
|
68
|
+
config_path = Path.home() / ".config" / "aline" / "config.yaml"
|
|
65
69
|
|
|
66
70
|
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
67
71
|
|
|
@@ -75,6 +79,8 @@ class ReAlignConfig:
|
|
|
75
79
|
"auto_detect_claude": self.auto_detect_claude,
|
|
76
80
|
"auto_detect_codex": self.auto_detect_codex,
|
|
77
81
|
"mcp_auto_commit": self.mcp_auto_commit,
|
|
82
|
+
"use_mcp_sampling": self.use_mcp_sampling,
|
|
83
|
+
"share_backend_url": self.share_backend_url,
|
|
78
84
|
}
|
|
79
85
|
|
|
80
86
|
with open(config_path, "w", encoding="utf-8") as f:
|
|
@@ -130,26 +136,31 @@ def get_default_config_content() -> str:
|
|
|
130
136
|
return """# ReAlign Global Configuration (User Home Directory)
|
|
131
137
|
local_history_path: "~/.local/share/realign/histories" # Directory for local agent session files
|
|
132
138
|
summary_max_chars: 500 # Maximum length of commit message summaries
|
|
133
|
-
redact_on_match:
|
|
139
|
+
redact_on_match: false # Automatically redact sensitive information (disabled by default)
|
|
134
140
|
# Original sessions are backed up to .realign/sessions-original/
|
|
135
|
-
# Set to
|
|
141
|
+
# Set to true to enable if you plan to share sessions publicly
|
|
136
142
|
hooks_installation: "repo" # Repo mode: sets core.hooksPath=.realign/hooks
|
|
137
143
|
use_LLM: true # Whether to use a cloud LLM to generate summaries
|
|
138
144
|
llm_provider: "auto" # LLM provider: "auto" (try Claude then OpenAI), "claude", or "openai"
|
|
139
145
|
auto_detect_claude: true # Automatically detect Claude Code session directory (~/.claude/projects/)
|
|
140
146
|
auto_detect_codex: true # Automatically detect Codex session files (~/.codex/sessions/)
|
|
141
147
|
mcp_auto_commit: true # Enable MCP watcher to auto-commit after each user request completes
|
|
148
|
+
use_mcp_sampling: true # Use MCP Sampling API when available (Claude Code)
|
|
149
|
+
# Falls back to direct API if unavailable or denied
|
|
150
|
+
share_backend_url: "https://realign-server.vercel.app" # Backend URL for interactive share export
|
|
151
|
+
# For local development, use: "http://localhost:3000"
|
|
142
152
|
|
|
143
153
|
# LLM API Keys (environment variable configuration):
|
|
144
154
|
# export ANTHROPIC_API_KEY="your-anthropic-api-key" # For Claude (Anthropic)
|
|
145
155
|
# export OPENAI_API_KEY="your-openai-api-key" # For OpenAI (GPT)
|
|
146
|
-
# When
|
|
156
|
+
# When use_mcp_sampling=true: tries MCP Sampling first (no API key needed)
|
|
157
|
+
# Falls back to direct API when not in Claude Code or user denies approval
|
|
147
158
|
|
|
148
159
|
# Secret Detection & Redaction:
|
|
149
|
-
# ReAlign
|
|
160
|
+
# ReAlign can use detect-secrets to automatically scan for and redact:
|
|
150
161
|
# - API keys, tokens, passwords
|
|
151
162
|
# - Private keys, certificates
|
|
152
163
|
# - AWS credentials, database URLs
|
|
153
|
-
#
|
|
154
|
-
# To
|
|
164
|
+
# Note: High-entropy strings (like Base64) are filtered out to reduce false positives
|
|
165
|
+
# To enable redaction: realign config set redact_on_match true
|
|
155
166
|
"""
|
realign/file_lock.py
CHANGED
|
@@ -108,7 +108,9 @@ def commit_lock(repo_path: Path, timeout: float = 10.0):
|
|
|
108
108
|
Yields:
|
|
109
109
|
True if lock was acquired
|
|
110
110
|
"""
|
|
111
|
-
|
|
111
|
+
from realign import get_realign_dir
|
|
112
|
+
realign_dir = get_realign_dir(repo_path)
|
|
113
|
+
lock_file = realign_dir / ".commit.lock"
|
|
112
114
|
lock = FileLock(lock_file, timeout=timeout)
|
|
113
115
|
|
|
114
116
|
try:
|
realign/hash_registry.py
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
"""Persistent hash registry for preventing duplicate auto-commits."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import shutil
|
|
5
|
+
import time
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, Optional
|
|
8
|
+
|
|
9
|
+
from .file_lock import FileLock
|
|
10
|
+
from .logging_config import setup_logger
|
|
11
|
+
|
|
12
|
+
logger = setup_logger('realign.hash_registry', 'hash_registry.log')
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class HashRegistry:
|
|
16
|
+
"""
|
|
17
|
+
Persistent storage for turn content hashes to prevent duplicate commits.
|
|
18
|
+
|
|
19
|
+
This class manages a JSON file that stores MD5 hashes of committed turn content,
|
|
20
|
+
allowing the system to detect and prevent duplicate commits even after process
|
|
21
|
+
restarts.
|
|
22
|
+
|
|
23
|
+
Features:
|
|
24
|
+
- Thread-safe operations using file locking
|
|
25
|
+
- Atomic writes using temp file + rename
|
|
26
|
+
- In-memory caching with TTL for performance
|
|
27
|
+
- Automatic cleanup of stale entries
|
|
28
|
+
- Graceful error handling and recovery
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, realign_dir: Path):
|
|
32
|
+
"""
|
|
33
|
+
Initialize the hash registry.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
realign_dir: Path to the .aline/{project} directory
|
|
37
|
+
"""
|
|
38
|
+
self.realign_dir = realign_dir
|
|
39
|
+
self.metadata_dir = realign_dir / ".metadata"
|
|
40
|
+
self.registry_file = self.metadata_dir / "commit_hashes.json"
|
|
41
|
+
self.lock_file = self.metadata_dir / ".hash_registry.lock"
|
|
42
|
+
|
|
43
|
+
# In-memory cache (60s TTL)
|
|
44
|
+
self._cache: Optional[Dict] = None
|
|
45
|
+
self._cache_time: float = 0
|
|
46
|
+
self._cache_ttl: float = 60.0
|
|
47
|
+
|
|
48
|
+
logger.debug(f"Initialized HashRegistry for {realign_dir}")
|
|
49
|
+
|
|
50
|
+
def get_last_hash(self, session_file: Path) -> Optional[str]:
|
|
51
|
+
"""
|
|
52
|
+
Get the last committed hash for a session file.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
session_file: Path to the session file
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
The MD5 hash of the last committed turn content, or None if not found
|
|
59
|
+
"""
|
|
60
|
+
try:
|
|
61
|
+
with self._acquire_lock():
|
|
62
|
+
registry = self._load_registry()
|
|
63
|
+
entry = registry.get("hashes", {}).get(str(session_file))
|
|
64
|
+
if entry:
|
|
65
|
+
logger.debug(f"Found hash for {session_file.name}: {entry['last_hash'][:8]}...")
|
|
66
|
+
return entry["last_hash"]
|
|
67
|
+
else:
|
|
68
|
+
logger.debug(f"No hash found for {session_file.name}")
|
|
69
|
+
return None
|
|
70
|
+
except TimeoutError:
|
|
71
|
+
logger.warning(f"Hash registry lock timeout for {session_file.name} - skipping duplicate check")
|
|
72
|
+
return None # Fail-safe: allow commit rather than block
|
|
73
|
+
except Exception as e:
|
|
74
|
+
logger.error(f"Error getting hash for {session_file.name}: {e}", exc_info=True)
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
def set_last_hash(
|
|
78
|
+
self,
|
|
79
|
+
session_file: Path,
|
|
80
|
+
hash_value: str,
|
|
81
|
+
commit_sha: str,
|
|
82
|
+
turn_number: int
|
|
83
|
+
):
|
|
84
|
+
"""
|
|
85
|
+
Store the hash of a newly committed turn.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
session_file: Path to the session file
|
|
89
|
+
hash_value: MD5 hash of the turn content
|
|
90
|
+
commit_sha: Git commit SHA
|
|
91
|
+
turn_number: Turn number in the session
|
|
92
|
+
"""
|
|
93
|
+
try:
|
|
94
|
+
with self._acquire_lock():
|
|
95
|
+
registry = self._load_registry()
|
|
96
|
+
|
|
97
|
+
# Update entry
|
|
98
|
+
registry["hashes"][str(session_file)] = {
|
|
99
|
+
"last_hash": hash_value,
|
|
100
|
+
"last_commit_sha": commit_sha,
|
|
101
|
+
"last_turn_number": turn_number,
|
|
102
|
+
"last_updated": time.time(),
|
|
103
|
+
"session_name": session_file.name
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
self._save_registry(registry)
|
|
107
|
+
logger.debug(f"Stored hash for {session_file.name}: {hash_value[:8]}... (commit: {commit_sha[:8]})")
|
|
108
|
+
except TimeoutError:
|
|
109
|
+
logger.warning(f"Hash registry lock timeout for {session_file.name} - hash not stored")
|
|
110
|
+
except Exception as e:
|
|
111
|
+
logger.error(f"Error storing hash for {session_file.name}: {e}", exc_info=True)
|
|
112
|
+
|
|
113
|
+
def cleanup_stale_entries(self, max_age_days: int = 30) -> int:
|
|
114
|
+
"""
|
|
115
|
+
Remove entries for sessions that no longer exist or are very old.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
max_age_days: Maximum age of entries to keep (default: 30 days)
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Number of entries removed
|
|
122
|
+
"""
|
|
123
|
+
try:
|
|
124
|
+
with self._acquire_lock():
|
|
125
|
+
registry = self._load_registry()
|
|
126
|
+
hashes = registry.get("hashes", {})
|
|
127
|
+
|
|
128
|
+
current_time = time.time()
|
|
129
|
+
max_age_seconds = max_age_days * 86400
|
|
130
|
+
|
|
131
|
+
cleaned = {}
|
|
132
|
+
for session_path, entry in hashes.items():
|
|
133
|
+
session_file = Path(session_path)
|
|
134
|
+
|
|
135
|
+
# Keep if session file exists and not too old
|
|
136
|
+
if session_file.exists():
|
|
137
|
+
age = current_time - entry.get("last_updated", 0)
|
|
138
|
+
if age < max_age_seconds:
|
|
139
|
+
cleaned[session_path] = entry
|
|
140
|
+
|
|
141
|
+
registry["hashes"] = cleaned
|
|
142
|
+
registry["metadata"]["last_cleanup"] = current_time
|
|
143
|
+
self._save_registry(registry)
|
|
144
|
+
|
|
145
|
+
removed_count = len(hashes) - len(cleaned)
|
|
146
|
+
if removed_count > 0:
|
|
147
|
+
logger.info(f"Cleaned up {removed_count} stale hash entries")
|
|
148
|
+
return removed_count
|
|
149
|
+
except TimeoutError:
|
|
150
|
+
logger.warning("Hash registry lock timeout during cleanup")
|
|
151
|
+
return 0
|
|
152
|
+
except Exception as e:
|
|
153
|
+
logger.error(f"Error during cleanup: {e}", exc_info=True)
|
|
154
|
+
return 0
|
|
155
|
+
|
|
156
|
+
def _load_registry(self) -> Dict:
|
|
157
|
+
"""
|
|
158
|
+
Load registry from disk with caching.
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
Registry data dictionary
|
|
162
|
+
"""
|
|
163
|
+
# Check cache first (60s TTL)
|
|
164
|
+
if self._cache and (time.time() - self._cache_time) < self._cache_ttl:
|
|
165
|
+
logger.debug("Using cached registry")
|
|
166
|
+
return self._cache.copy()
|
|
167
|
+
|
|
168
|
+
# Load from disk
|
|
169
|
+
data = self._load_from_disk()
|
|
170
|
+
|
|
171
|
+
# Update cache
|
|
172
|
+
self._cache = data
|
|
173
|
+
self._cache_time = time.time()
|
|
174
|
+
|
|
175
|
+
return data.copy()
|
|
176
|
+
|
|
177
|
+
def _load_from_disk(self) -> Dict:
|
|
178
|
+
"""
|
|
179
|
+
Load registry from disk file.
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
Registry data dictionary
|
|
183
|
+
"""
|
|
184
|
+
if not self.registry_file.exists():
|
|
185
|
+
logger.debug("Registry file doesn't exist, creating new empty registry")
|
|
186
|
+
return self._new_empty_registry()
|
|
187
|
+
|
|
188
|
+
try:
|
|
189
|
+
with open(self.registry_file, 'r', encoding='utf-8') as f:
|
|
190
|
+
data = json.load(f)
|
|
191
|
+
|
|
192
|
+
# Validate schema version
|
|
193
|
+
if data.get("version") != 1:
|
|
194
|
+
logger.warning(f"Unknown registry version: {data.get('version')}, migrating...")
|
|
195
|
+
data = self._migrate_schema(data)
|
|
196
|
+
|
|
197
|
+
logger.debug(f"Loaded registry with {len(data.get('hashes', {}))} entries")
|
|
198
|
+
return data
|
|
199
|
+
|
|
200
|
+
except (json.JSONDecodeError, IOError) as e:
|
|
201
|
+
logger.error(f"Corrupted hash registry: {e}")
|
|
202
|
+
|
|
203
|
+
# Backup corrupted file
|
|
204
|
+
backup_path = self.registry_file.with_suffix(f'.corrupted.{int(time.time())}')
|
|
205
|
+
try:
|
|
206
|
+
shutil.copy(self.registry_file, backup_path)
|
|
207
|
+
logger.warning(f"Backed up corrupted registry to {backup_path}")
|
|
208
|
+
except Exception as backup_error:
|
|
209
|
+
logger.error(f"Failed to backup corrupted registry: {backup_error}")
|
|
210
|
+
|
|
211
|
+
# Return empty registry (fail-safe)
|
|
212
|
+
return self._new_empty_registry()
|
|
213
|
+
|
|
214
|
+
def _save_registry(self, data: Dict):
|
|
215
|
+
"""
|
|
216
|
+
Save registry to disk using atomic write.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
data: Registry data to save
|
|
220
|
+
"""
|
|
221
|
+
# Ensure metadata directory exists
|
|
222
|
+
self.metadata_dir.mkdir(parents=True, exist_ok=True)
|
|
223
|
+
|
|
224
|
+
# Write to temporary file first
|
|
225
|
+
temp_file = self.registry_file.with_suffix('.tmp')
|
|
226
|
+
try:
|
|
227
|
+
with open(temp_file, 'w', encoding='utf-8') as f:
|
|
228
|
+
json.dump(data, f, indent=2)
|
|
229
|
+
|
|
230
|
+
# Atomic rename (overwrites existing file)
|
|
231
|
+
temp_file.replace(self.registry_file)
|
|
232
|
+
logger.debug(f"Saved registry with {len(data.get('hashes', {}))} entries")
|
|
233
|
+
|
|
234
|
+
# Invalidate cache
|
|
235
|
+
self._cache = None
|
|
236
|
+
|
|
237
|
+
except Exception as e:
|
|
238
|
+
logger.error(f"Error saving registry: {e}", exc_info=True)
|
|
239
|
+
# Clean up temp file if it exists
|
|
240
|
+
if temp_file.exists():
|
|
241
|
+
try:
|
|
242
|
+
temp_file.unlink()
|
|
243
|
+
except Exception:
|
|
244
|
+
pass
|
|
245
|
+
raise
|
|
246
|
+
|
|
247
|
+
def _acquire_lock(self) -> FileLock:
|
|
248
|
+
"""
|
|
249
|
+
Acquire exclusive lock on registry file.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
FileLock context manager
|
|
253
|
+
|
|
254
|
+
Raises:
|
|
255
|
+
TimeoutError: If lock cannot be acquired within timeout
|
|
256
|
+
"""
|
|
257
|
+
return FileLock(self.lock_file, timeout=5.0)
|
|
258
|
+
|
|
259
|
+
def _new_empty_registry(self) -> Dict:
|
|
260
|
+
"""
|
|
261
|
+
Create a new empty registry structure.
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
Empty registry dictionary
|
|
265
|
+
"""
|
|
266
|
+
return {
|
|
267
|
+
"version": 1,
|
|
268
|
+
"hashes": {},
|
|
269
|
+
"metadata": {
|
|
270
|
+
"created_at": time.time(),
|
|
271
|
+
"last_cleanup": time.time()
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
def _migrate_schema(self, data: Dict) -> Dict:
|
|
276
|
+
"""
|
|
277
|
+
Migrate registry from old schema to current version.
|
|
278
|
+
|
|
279
|
+
Args:
|
|
280
|
+
data: Old registry data
|
|
281
|
+
|
|
282
|
+
Returns:
|
|
283
|
+
Migrated registry data
|
|
284
|
+
"""
|
|
285
|
+
# Currently only version 1 exists, but this method is ready for future migrations
|
|
286
|
+
logger.warning(f"Schema migration not implemented for version {data.get('version')}, creating new registry")
|
|
287
|
+
return self._new_empty_registry()
|
|
288
|
+
|
|
289
|
+
def should_cleanup(self, cleanup_interval_hours: int = 24) -> bool:
|
|
290
|
+
"""
|
|
291
|
+
Check if cleanup should be performed based on last cleanup time.
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
cleanup_interval_hours: Minimum hours between cleanups (default: 24)
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
True if cleanup should be performed
|
|
298
|
+
"""
|
|
299
|
+
try:
|
|
300
|
+
registry = self._load_registry()
|
|
301
|
+
last_cleanup = registry.get("metadata", {}).get("last_cleanup", 0)
|
|
302
|
+
hours_since_cleanup = (time.time() - last_cleanup) / 3600
|
|
303
|
+
|
|
304
|
+
should_run = hours_since_cleanup >= cleanup_interval_hours
|
|
305
|
+
if should_run:
|
|
306
|
+
logger.info(f"Cleanup recommended ({hours_since_cleanup:.1f} hours since last cleanup)")
|
|
307
|
+
return should_run
|
|
308
|
+
except Exception as e:
|
|
309
|
+
logger.error(f"Error checking cleanup status: {e}", exc_info=True)
|
|
310
|
+
return False
|