adaptive-memory-multi-model-router 1.2.2 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +146 -66
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/integrations/airtable.js +20 -0
- package/dist/integrations/discord.js +18 -0
- package/dist/integrations/github.js +23 -0
- package/dist/integrations/gmail.js +19 -0
- package/dist/integrations/google-calendar.js +18 -0
- package/dist/integrations/index.js +61 -0
- package/dist/integrations/jira.js +21 -0
- package/dist/integrations/linear.js +19 -0
- package/dist/integrations/notion.js +19 -0
- package/dist/integrations/slack.js +18 -0
- package/dist/integrations/telegram.js +19 -0
- package/dist/providers/registry.js +7 -3
- package/docs/ARCHITECTURAL-IMPROVEMENTS-2025.md +1391 -0
- package/docs/ARCHITECTURAL-IMPROVEMENTS-REVISED-2025.md +1051 -0
- package/docs/CONFIGURATION.md +476 -0
- package/docs/COUNCIL_DECISION.json +308 -0
- package/docs/COUNCIL_SUMMARY.md +265 -0
- package/docs/COUNCIL_V2.2_DECISION.md +416 -0
- package/docs/IMPROVEMENT_ROADMAP.md +515 -0
- package/docs/LLM_COUNCIL_DECISION.md +508 -0
- package/docs/QUICK_START_VISIBILITY.md +782 -0
- package/docs/REDDIT_GAP_ANALYSIS.md +299 -0
- package/docs/RESEARCH_BACKED_IMPROVEMENTS.md +1180 -0
- package/docs/TMLPD_QNA.md +751 -0
- package/docs/TMLPD_V2.1_COMPLETE.md +763 -0
- package/docs/TMLPD_V2.2_RESEARCH_ROADMAP.md +754 -0
- package/docs/V2.2_IMPLEMENTATION_COMPLETE.md +446 -0
- package/docs/V2_IMPLEMENTATION_GUIDE.md +388 -0
- package/docs/VISIBILITY_ADOPTION_PLAN.md +1005 -0
- package/docs/launch-content/LAUNCH_EXECUTION_CHECKLIST.md +421 -0
- package/docs/launch-content/README.md +457 -0
- package/docs/launch-content/assets/cost_comparison_100_tasks.png +0 -0
- package/docs/launch-content/assets/cumulative_savings.png +0 -0
- package/docs/launch-content/assets/parallel_speedup.png +0 -0
- package/docs/launch-content/assets/provider_pricing_comparison.png +0 -0
- package/docs/launch-content/assets/task_breakdown_comparison.png +0 -0
- package/docs/launch-content/generate_charts.py +313 -0
- package/docs/launch-content/hn_show_post.md +139 -0
- package/docs/launch-content/partner_outreach_templates.md +745 -0
- package/docs/launch-content/reddit_posts.md +467 -0
- package/docs/launch-content/twitter_thread.txt +460 -0
- package/examples/QUICKSTART.md +1 -1
- package/openclaw-alexa-bridge/ALL_REMAINING_FIXES_PLAN.md +313 -0
- package/openclaw-alexa-bridge/REMAINING_FIXES_SUMMARY.md +277 -0
- package/openclaw-alexa-bridge/src/alexa_handler_no_tmlpd.js +1234 -0
- package/openclaw-alexa-bridge/test_fixes.js +77 -0
- package/package.json +120 -29
- package/package.json.tmp +0 -0
- package/qna/TMLPD_QNA.md +3 -3
- package/skill/SKILL.md +2 -2
- package/src/__tests__/integration/tmpld_integration.test.py +540 -0
- package/src/agents/skill_enhanced_agent.py +318 -0
- package/src/memory/__init__.py +15 -0
- package/src/memory/agentic_memory.py +353 -0
- package/src/memory/semantic_memory.py +444 -0
- package/src/memory/simple_memory.py +466 -0
- package/src/memory/working_memory.py +447 -0
- package/src/orchestration/__init__.py +52 -0
- package/src/orchestration/execution_engine.py +353 -0
- package/src/orchestration/halo_orchestrator.py +367 -0
- package/src/orchestration/mcts_workflow.py +498 -0
- package/src/orchestration/role_assigner.py +473 -0
- package/src/orchestration/task_planner.py +522 -0
- package/src/providers/__init__.py +67 -0
- package/src/providers/anthropic.py +304 -0
- package/src/providers/base.py +241 -0
- package/src/providers/cerebras.py +373 -0
- package/src/providers/registry.py +476 -0
- package/src/routing/__init__.py +30 -0
- package/src/routing/universal_router.py +621 -0
- package/src/skills/TMLPD-QUICKREF.md +210 -0
- package/src/skills/TMLPD-SETUP-SUMMARY.md +157 -0
- package/src/skills/TMLPD.md +540 -0
- package/src/skills/__tests__/skill_manager.test.ts +328 -0
- package/src/skills/skill_manager.py +385 -0
- package/src/skills/test-tmlpd.sh +108 -0
- package/src/skills/tmlpd-category.yaml +67 -0
- package/src/skills/tmlpd-monitoring.yaml +188 -0
- package/src/skills/tmlpd-phase.yaml +132 -0
- package/src/state/__init__.py +17 -0
- package/src/state/simple_checkpoint.py +508 -0
- package/src/tmlpd_agent.py +464 -0
- package/src/tmpld_v2.py +427 -0
- package/src/workflows/__init__.py +18 -0
- package/src/workflows/advanced_difficulty_classifier.py +377 -0
- package/src/workflows/chaining_executor.py +417 -0
- package/src/workflows/difficulty_integration.py +209 -0
- package/src/workflows/orchestrator.py +469 -0
- package/src/workflows/orchestrator_executor.py +456 -0
- package/src/workflows/parallelization_executor.py +382 -0
- package/src/workflows/router.py +311 -0
- package/test_integration_simple.py +86 -0
- package/test_mcts_workflow.py +150 -0
- package/test_templd_integration.py +262 -0
- package/test_universal_router.py +275 -0
- package/tmlpd-pi-extension/README.md +36 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.d.ts +114 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.js +285 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.js.map +1 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.d.ts +58 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.js +153 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.js.map +1 -0
- package/tmlpd-pi-extension/dist/cli.js +59 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.d.ts +95 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.js +240 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.js.map +1 -0
- package/tmlpd-pi-extension/dist/index.d.ts +723 -0
- package/tmlpd-pi-extension/dist/index.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/index.js +239 -0
- package/tmlpd-pi-extension/dist/index.js.map +1 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.d.ts +82 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.js +145 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.js.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.d.ts +102 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.js +207 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.js.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.d.ts +85 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.js +210 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.js.map +1 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.d.ts +102 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.js +338 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.js.map +1 -0
- package/tmlpd-pi-extension/dist/providers/registry.d.ts +55 -0
- package/tmlpd-pi-extension/dist/providers/registry.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/providers/registry.js +138 -0
- package/tmlpd-pi-extension/dist/providers/registry.js.map +1 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.d.ts +68 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.js +332 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.js.map +1 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.d.ts +101 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.js +368 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.d.ts +96 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.js +170 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/compression.d.ts +61 -0
- package/tmlpd-pi-extension/dist/utils/compression.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/compression.js +281 -0
- package/tmlpd-pi-extension/dist/utils/compression.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/reliability.d.ts +74 -0
- package/tmlpd-pi-extension/dist/utils/reliability.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/reliability.js +177 -0
- package/tmlpd-pi-extension/dist/utils/reliability.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.d.ts +117 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.js +246 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.d.ts +50 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.js +124 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.js.map +1 -0
- package/tmlpd-pi-extension/examples/QUICKSTART.md +183 -0
- package/tmlpd-pi-extension/package-lock.json +75 -0
- package/tmlpd-pi-extension/package.json +172 -0
- package/tmlpd-pi-extension/python/examples.py +53 -0
- package/tmlpd-pi-extension/python/integrations.py +330 -0
- package/tmlpd-pi-extension/python/setup.py +28 -0
- package/tmlpd-pi-extension/python/tmlpd.py +369 -0
- package/tmlpd-pi-extension/qna/REDDIT_GAP_ANALYSIS.md +299 -0
- package/tmlpd-pi-extension/qna/TMLPD_QNA.md +751 -0
- package/tmlpd-pi-extension/skill/SKILL.md +238 -0
- package/{src → tmlpd-pi-extension/src}/index.ts +1 -1
- package/tmlpd-pi-extension/tsconfig.json +18 -0
- package/demo/research-demo.js +0 -266
- package/notebooks/quickstart.ipynb +0 -157
- package/rust/tmlpd.h +0 -268
- package/src/cache/prefixCache.ts +0 -365
- package/src/routing/advancedRouter.ts +0 -406
- package/src/utils/speculativeDecoding.ts +0 -344
- /package/{src → tmlpd-pi-extension/src}/cache/responseCache.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/cost/costTracker.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/memory/episodicMemory.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/orchestration/haloOrchestrator.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/orchestration/mctsWorkflow.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/providers/localProvider.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/providers/registry.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/tools/tmlpdTools.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/batchProcessor.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/compression.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/reliability.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/tokenUtils.ts +0 -0
|
@@ -0,0 +1,508 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Simple Checkpoint System - Workflow Phase 4
|
|
3
|
+
|
|
4
|
+
Implements lightweight JSON-based checkpointing for state management.
|
|
5
|
+
No complex versioning or branching - just save and restore.
|
|
6
|
+
|
|
7
|
+
Philosophy:
|
|
8
|
+
- Save state at critical points
|
|
9
|
+
- Easy recovery from failures
|
|
10
|
+
- Minimal overhead (plain JSON)
|
|
11
|
+
- Transparent and debuggable
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from typing import Dict, List, Any, Optional
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
import json
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
import shutil
|
|
19
|
+
import hashlib
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SimpleCheckpoint:
|
|
23
|
+
"""
|
|
24
|
+
Simple checkpoint system for state management.
|
|
25
|
+
|
|
26
|
+
Features:
|
|
27
|
+
- Save checkpoints with metadata
|
|
28
|
+
- List available checkpoints
|
|
29
|
+
- Restore from checkpoint
|
|
30
|
+
- Automatic cleanup of old checkpoints
|
|
31
|
+
- Checkpoint validation
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
checkpoint_dir: str = ".taskmaster/checkpoints",
|
|
37
|
+
max_checkpoints: int = 10
|
|
38
|
+
):
|
|
39
|
+
"""
|
|
40
|
+
Initialize Simple Checkpoint
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
checkpoint_dir: Directory to store checkpoints
|
|
44
|
+
max_checkpoints: Maximum number of checkpoints to keep
|
|
45
|
+
"""
|
|
46
|
+
self.checkpoint_dir = Path(checkpoint_dir)
|
|
47
|
+
self.checkpoint_dir.mkdir(parents=True, exist_ok=True)
|
|
48
|
+
|
|
49
|
+
self.max_checkpoints = max_checkpoints
|
|
50
|
+
self.index_file = self.checkpoint_dir / "index.json"
|
|
51
|
+
|
|
52
|
+
self.index = self._load_index()
|
|
53
|
+
|
|
54
|
+
self.stats = {
|
|
55
|
+
"checkpoints_created": 0,
|
|
56
|
+
"checkpoints_restored": 0,
|
|
57
|
+
"checkpoints_deleted": 0
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
def _load_index(self) -> Dict[str, Any]:
|
|
61
|
+
"""Load checkpoint index"""
|
|
62
|
+
if self.index_file.exists():
|
|
63
|
+
try:
|
|
64
|
+
with open(self.index_file, 'r') as f:
|
|
65
|
+
return json.load(f)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
print(f"Warning: Failed to load checkpoint index: {e}")
|
|
68
|
+
return self._empty_index()
|
|
69
|
+
|
|
70
|
+
return self._empty_index()
|
|
71
|
+
|
|
72
|
+
def _empty_index(self) -> Dict[str, Any]:
|
|
73
|
+
"""Create empty checkpoint index"""
|
|
74
|
+
return {
|
|
75
|
+
"version": "1.0",
|
|
76
|
+
"created_at": datetime.now().isoformat(),
|
|
77
|
+
"last_updated": datetime.now().isoformat(),
|
|
78
|
+
"checkpoints": []
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
def _save_index(self):
|
|
82
|
+
"""Save checkpoint index"""
|
|
83
|
+
self.index["last_updated"] = datetime.now().isoformat()
|
|
84
|
+
|
|
85
|
+
with open(self.index_file, 'w') as f:
|
|
86
|
+
json.dump(self.index, f, indent=2)
|
|
87
|
+
|
|
88
|
+
def _generate_checksum(self, data: Dict[str, Any]) -> str:
|
|
89
|
+
"""
|
|
90
|
+
Generate checksum for checkpoint data.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
data: Checkpoint data
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
SHA256 hex digest
|
|
97
|
+
"""
|
|
98
|
+
data_str = json.dumps(data, sort_keys=True)
|
|
99
|
+
return hashlib.sha256(data_str.encode()).hexdigest()
|
|
100
|
+
|
|
101
|
+
def create_checkpoint(
|
|
102
|
+
self,
|
|
103
|
+
state: Dict[str, Any],
|
|
104
|
+
name: Optional[str] = None,
|
|
105
|
+
description: Optional[str] = None,
|
|
106
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
107
|
+
) -> str:
|
|
108
|
+
"""
|
|
109
|
+
Create a checkpoint.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
state: Current state to save
|
|
113
|
+
name: Optional checkpoint name
|
|
114
|
+
description: Optional checkpoint description
|
|
115
|
+
metadata: Optional additional metadata
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Checkpoint ID
|
|
119
|
+
"""
|
|
120
|
+
checkpoint_id = f"checkpoint_{datetime.now().strftime('%Y%m%d_%H%M%S_%f')}"
|
|
121
|
+
|
|
122
|
+
# Generate checksum
|
|
123
|
+
checksum = self._generate_checksum(state)
|
|
124
|
+
|
|
125
|
+
checkpoint = {
|
|
126
|
+
"id": checkpoint_id,
|
|
127
|
+
"name": name or checkpoint_id,
|
|
128
|
+
"description": description or "",
|
|
129
|
+
"created_at": datetime.now().isoformat(),
|
|
130
|
+
"checksum": checksum,
|
|
131
|
+
"metadata": metadata or {},
|
|
132
|
+
"state": state
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
# Save checkpoint file
|
|
136
|
+
checkpoint_file = self.checkpoint_dir / f"{checkpoint_id}.json"
|
|
137
|
+
with open(checkpoint_file, 'w') as f:
|
|
138
|
+
json.dump(checkpoint, f, indent=2)
|
|
139
|
+
|
|
140
|
+
# Update index
|
|
141
|
+
self.index["checkpoints"].append({
|
|
142
|
+
"id": checkpoint_id,
|
|
143
|
+
"name": name or checkpoint_id,
|
|
144
|
+
"description": description or "",
|
|
145
|
+
"created_at": checkpoint["created_at"],
|
|
146
|
+
"checksum": checksum,
|
|
147
|
+
"file": str(checkpoint_file),
|
|
148
|
+
"metadata": metadata or {}
|
|
149
|
+
})
|
|
150
|
+
|
|
151
|
+
# Cleanup old checkpoints
|
|
152
|
+
self._cleanup_old_checkpoints()
|
|
153
|
+
|
|
154
|
+
self._save_index()
|
|
155
|
+
self.stats["checkpoints_created"] += 1
|
|
156
|
+
|
|
157
|
+
return checkpoint_id
|
|
158
|
+
|
|
159
|
+
def restore_checkpoint(
|
|
160
|
+
self,
|
|
161
|
+
checkpoint_id: Optional[str] = None,
|
|
162
|
+
name: Optional[str] = None,
|
|
163
|
+
validate: bool = True
|
|
164
|
+
) -> Dict[str, Any]:
|
|
165
|
+
"""
|
|
166
|
+
Restore from a checkpoint.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
checkpoint_id: Checkpoint ID to restore
|
|
170
|
+
name: Checkpoint name to restore (alternative to ID)
|
|
171
|
+
validate: Whether to validate checksum
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Restored state
|
|
175
|
+
|
|
176
|
+
Raises:
|
|
177
|
+
FileNotFoundError: If checkpoint not found
|
|
178
|
+
ValueError: If validation fails
|
|
179
|
+
"""
|
|
180
|
+
# Find checkpoint
|
|
181
|
+
checkpoint_info = None
|
|
182
|
+
|
|
183
|
+
if checkpoint_id:
|
|
184
|
+
checkpoint_info = next(
|
|
185
|
+
(cp for cp in self.index["checkpoints"] if cp["id"] == checkpoint_id),
|
|
186
|
+
None
|
|
187
|
+
)
|
|
188
|
+
elif name:
|
|
189
|
+
checkpoint_info = next(
|
|
190
|
+
(cp for cp in self.index["checkpoints"] if cp["name"] == name),
|
|
191
|
+
None
|
|
192
|
+
)
|
|
193
|
+
else:
|
|
194
|
+
# Get most recent checkpoint
|
|
195
|
+
if self.index["checkpoints"]:
|
|
196
|
+
checkpoint_info = self.index["checkpoints"][-1]
|
|
197
|
+
|
|
198
|
+
if not checkpoint_info:
|
|
199
|
+
raise FileNotFoundError(f"Checkpoint not found: {checkpoint_id or name}")
|
|
200
|
+
|
|
201
|
+
# Load checkpoint file
|
|
202
|
+
checkpoint_file = Path(checkpoint_info["file"])
|
|
203
|
+
|
|
204
|
+
if not checkpoint_file.exists():
|
|
205
|
+
raise FileNotFoundError(f"Checkpoint file not found: {checkpoint_file}")
|
|
206
|
+
|
|
207
|
+
with open(checkpoint_file, 'r') as f:
|
|
208
|
+
checkpoint = json.load(f)
|
|
209
|
+
|
|
210
|
+
# Validate checksum if requested
|
|
211
|
+
if validate:
|
|
212
|
+
current_checksum = self._generate_checksum(checkpoint["state"])
|
|
213
|
+
if current_checksum != checkpoint["checksum"]:
|
|
214
|
+
raise ValueError(
|
|
215
|
+
f"Checksum mismatch: expected {checkpoint['checksum']}, got {current_checksum}"
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
self.stats["checkpoints_restored"] += 1
|
|
219
|
+
|
|
220
|
+
return checkpoint["state"]
|
|
221
|
+
|
|
222
|
+
def list_checkpoints(
|
|
223
|
+
self,
|
|
224
|
+
sort_by: str = "created_at",
|
|
225
|
+
reverse: bool = True
|
|
226
|
+
) -> List[Dict[str, Any]]:
|
|
227
|
+
"""
|
|
228
|
+
List all checkpoints.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
sort_by: Field to sort by ("created_at", "name")
|
|
232
|
+
reverse: Sort order (descending if True)
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
List of checkpoint info dictionaries
|
|
236
|
+
"""
|
|
237
|
+
checkpoints = self.index["checkpoints"].copy()
|
|
238
|
+
|
|
239
|
+
if sort_by == "created_at":
|
|
240
|
+
checkpoints.sort(key=lambda x: x["created_at"], reverse=reverse)
|
|
241
|
+
elif sort_by == "name":
|
|
242
|
+
checkpoints.sort(key=lambda x: x["name"], reverse=reverse)
|
|
243
|
+
|
|
244
|
+
return checkpoints
|
|
245
|
+
|
|
246
|
+
def get_latest_checkpoint(self) -> Optional[Dict[str, Any]]:
|
|
247
|
+
"""
|
|
248
|
+
Get the most recent checkpoint.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Latest checkpoint info or None
|
|
252
|
+
"""
|
|
253
|
+
if not self.index["checkpoints"]:
|
|
254
|
+
return None
|
|
255
|
+
|
|
256
|
+
return self.index["checkpoints"][-1]
|
|
257
|
+
|
|
258
|
+
def delete_checkpoint(
|
|
259
|
+
self,
|
|
260
|
+
checkpoint_id: Optional[str] = None,
|
|
261
|
+
name: Optional[str] = None
|
|
262
|
+
):
|
|
263
|
+
"""
|
|
264
|
+
Delete a checkpoint.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
checkpoint_id: Checkpoint ID to delete
|
|
268
|
+
name: Checkpoint name to delete (alternative to ID)
|
|
269
|
+
"""
|
|
270
|
+
# Find checkpoint
|
|
271
|
+
checkpoint_info = None
|
|
272
|
+
|
|
273
|
+
if checkpoint_id:
|
|
274
|
+
checkpoint_info = next(
|
|
275
|
+
(cp for cp in self.index["checkpoints"] if cp["id"] == checkpoint_id),
|
|
276
|
+
None
|
|
277
|
+
)
|
|
278
|
+
elif name:
|
|
279
|
+
checkpoint_info = next(
|
|
280
|
+
(cp for cp in self.index["checkpoints"] if cp["name"] == name),
|
|
281
|
+
None
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
if not checkpoint_info:
|
|
285
|
+
raise FileNotFoundError(f"Checkpoint not found: {checkpoint_id or name}")
|
|
286
|
+
|
|
287
|
+
# Delete file
|
|
288
|
+
checkpoint_file = Path(checkpoint_info["file"])
|
|
289
|
+
if checkpoint_file.exists():
|
|
290
|
+
checkpoint_file.unlink()
|
|
291
|
+
|
|
292
|
+
# Remove from index
|
|
293
|
+
self.index["checkpoints"].remove(checkpoint_info)
|
|
294
|
+
|
|
295
|
+
self._save_index()
|
|
296
|
+
self.stats["checkpoints_deleted"] += 1
|
|
297
|
+
|
|
298
|
+
def _cleanup_old_checkpoints(self):
|
|
299
|
+
"""Delete oldest checkpoints if exceeding max_checkpoints"""
|
|
300
|
+
while len(self.index["checkpoints"]) > self.max_checkpoints:
|
|
301
|
+
oldest = self.index["checkpoints"][0]
|
|
302
|
+
|
|
303
|
+
# Delete file
|
|
304
|
+
checkpoint_file = Path(oldest["file"])
|
|
305
|
+
if checkpoint_file.exists():
|
|
306
|
+
checkpoint_file.unlink()
|
|
307
|
+
|
|
308
|
+
# Remove from index
|
|
309
|
+
self.index["checkpoints"].remove(oldest)
|
|
310
|
+
|
|
311
|
+
self.stats["checkpoints_deleted"] += 1
|
|
312
|
+
|
|
313
|
+
def clear_all_checkpoints(self):
|
|
314
|
+
"""Delete all checkpoints"""
|
|
315
|
+
for checkpoint_info in self.index["checkpoints"]:
|
|
316
|
+
checkpoint_file = Path(checkpoint_info["file"])
|
|
317
|
+
if checkpoint_file.exists():
|
|
318
|
+
checkpoint_file.unlink()
|
|
319
|
+
|
|
320
|
+
self.index["checkpoints"] = []
|
|
321
|
+
self._save_index()
|
|
322
|
+
|
|
323
|
+
print(f"Cleared all checkpoints")
|
|
324
|
+
|
|
325
|
+
def validate_all_checkpoints(self) -> Dict[str, Any]:
|
|
326
|
+
"""
|
|
327
|
+
Validate all checkpoints.
|
|
328
|
+
|
|
329
|
+
Returns:
|
|
330
|
+
Validation report with valid/invalid checkpoints
|
|
331
|
+
"""
|
|
332
|
+
results = {
|
|
333
|
+
"total": len(self.index["checkpoints"]),
|
|
334
|
+
"valid": 0,
|
|
335
|
+
"invalid": 0,
|
|
336
|
+
"invalid_checkpoints": []
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
for checkpoint_info in self.index["checkpoints"]:
|
|
340
|
+
checkpoint_file = Path(checkpoint_info["file"])
|
|
341
|
+
|
|
342
|
+
if not checkpoint_file.exists():
|
|
343
|
+
results["invalid"] += 1
|
|
344
|
+
results["invalid_checkpoints"].append({
|
|
345
|
+
"id": checkpoint_info["id"],
|
|
346
|
+
"reason": "File not found"
|
|
347
|
+
})
|
|
348
|
+
continue
|
|
349
|
+
|
|
350
|
+
try:
|
|
351
|
+
with open(checkpoint_file, 'r') as f:
|
|
352
|
+
checkpoint = json.load(f)
|
|
353
|
+
|
|
354
|
+
# Validate checksum
|
|
355
|
+
current_checksum = self._generate_checksum(checkpoint["state"])
|
|
356
|
+
if current_checksum != checkpoint["checksum"]:
|
|
357
|
+
results["invalid"] += 1
|
|
358
|
+
results["invalid_checkpoints"].append({
|
|
359
|
+
"id": checkpoint_info["id"],
|
|
360
|
+
"reason": "Checksum mismatch"
|
|
361
|
+
})
|
|
362
|
+
else:
|
|
363
|
+
results["valid"] += 1
|
|
364
|
+
|
|
365
|
+
except Exception as e:
|
|
366
|
+
results["invalid"] += 1
|
|
367
|
+
results["invalid_checkpoints"].append({
|
|
368
|
+
"id": checkpoint_info["id"],
|
|
369
|
+
"reason": str(e)
|
|
370
|
+
})
|
|
371
|
+
|
|
372
|
+
return results
|
|
373
|
+
|
|
374
|
+
def export_checkpoint(
|
|
375
|
+
self,
|
|
376
|
+
checkpoint_id: str,
|
|
377
|
+
export_path: str
|
|
378
|
+
):
|
|
379
|
+
"""
|
|
380
|
+
Export a checkpoint to a file.
|
|
381
|
+
|
|
382
|
+
Args:
|
|
383
|
+
checkpoint_id: Checkpoint ID to export
|
|
384
|
+
export_path: Path to export to
|
|
385
|
+
"""
|
|
386
|
+
checkpoint_info = next(
|
|
387
|
+
(cp for cp in self.index["checkpoints"] if cp["id"] == checkpoint_id),
|
|
388
|
+
None
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
if not checkpoint_info:
|
|
392
|
+
raise FileNotFoundError(f"Checkpoint not found: {checkpoint_id}")
|
|
393
|
+
|
|
394
|
+
checkpoint_file = Path(checkpoint_info["file"])
|
|
395
|
+
export_path = Path(export_path)
|
|
396
|
+
|
|
397
|
+
shutil.copy2(checkpoint_file, export_path)
|
|
398
|
+
|
|
399
|
+
print(f"Checkpoint exported to {export_path}")
|
|
400
|
+
|
|
401
|
+
def import_checkpoint(
|
|
402
|
+
self,
|
|
403
|
+
import_path: str,
|
|
404
|
+
name: Optional[str] = None
|
|
405
|
+
) -> str:
|
|
406
|
+
"""
|
|
407
|
+
Import a checkpoint from a file.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
import_path: Path to import from
|
|
411
|
+
name: Optional name for imported checkpoint
|
|
412
|
+
|
|
413
|
+
Returns:
|
|
414
|
+
Checkpoint ID
|
|
415
|
+
"""
|
|
416
|
+
import_path = Path(import_path)
|
|
417
|
+
|
|
418
|
+
if not import_path.exists():
|
|
419
|
+
raise FileNotFoundError(f"Import file not found: {import_path}")
|
|
420
|
+
|
|
421
|
+
with open(import_path, 'r') as f:
|
|
422
|
+
checkpoint = json.load(f)
|
|
423
|
+
|
|
424
|
+
# Generate new ID
|
|
425
|
+
checkpoint_id = f"checkpoint_{datetime.now().strftime('%Y%m%d_%H%M%S_%f')}"
|
|
426
|
+
checkpoint["id"] = checkpoint_id
|
|
427
|
+
checkpoint["name"] = name or checkpoint.get("name", checkpoint_id)
|
|
428
|
+
checkpoint["imported_at"] = datetime.now().isoformat()
|
|
429
|
+
|
|
430
|
+
# Save checkpoint file
|
|
431
|
+
checkpoint_file = self.checkpoint_dir / f"{checkpoint_id}.json"
|
|
432
|
+
with open(checkpoint_file, 'w') as f:
|
|
433
|
+
json.dump(checkpoint, f, indent=2)
|
|
434
|
+
|
|
435
|
+
# Update index
|
|
436
|
+
self.index["checkpoints"].append({
|
|
437
|
+
"id": checkpoint_id,
|
|
438
|
+
"name": checkpoint["name"],
|
|
439
|
+
"description": checkpoint.get("description", ""),
|
|
440
|
+
"created_at": checkpoint["created_at"],
|
|
441
|
+
"checksum": checkpoint["checksum"],
|
|
442
|
+
"file": str(checkpoint_file),
|
|
443
|
+
"metadata": checkpoint.get("metadata", {})
|
|
444
|
+
})
|
|
445
|
+
|
|
446
|
+
# Cleanup old checkpoints
|
|
447
|
+
self._cleanup_old_checkpoints()
|
|
448
|
+
|
|
449
|
+
self._save_index()
|
|
450
|
+
self.stats["checkpoints_created"] += 1
|
|
451
|
+
|
|
452
|
+
return checkpoint_id
|
|
453
|
+
|
|
454
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
455
|
+
"""
|
|
456
|
+
Get checkpoint statistics.
|
|
457
|
+
|
|
458
|
+
Returns:
|
|
459
|
+
Statistics dictionary
|
|
460
|
+
"""
|
|
461
|
+
return {
|
|
462
|
+
**self.stats,
|
|
463
|
+
"total_checkpoints": len(self.index["checkpoints"]),
|
|
464
|
+
"checkpoint_dir": str(self.checkpoint_dir),
|
|
465
|
+
"max_checkpoints": self.max_checkpoints
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
# Convenience functions for quick checkpoint operations
|
|
470
|
+
|
|
471
|
+
def save_checkpoint(
|
|
472
|
+
state: Dict[str, Any],
|
|
473
|
+
name: Optional[str] = None,
|
|
474
|
+
checkpoint_dir: str = ".taskmaster/checkpoints"
|
|
475
|
+
) -> str:
|
|
476
|
+
"""
|
|
477
|
+
Quick function to save a checkpoint.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
state: State to save
|
|
481
|
+
name: Optional checkpoint name
|
|
482
|
+
checkpoint_dir: Checkpoint directory
|
|
483
|
+
|
|
484
|
+
Returns:
|
|
485
|
+
Checkpoint ID
|
|
486
|
+
"""
|
|
487
|
+
checkpoint = SimpleCheckpoint(checkpoint_dir)
|
|
488
|
+
return checkpoint.create_checkpoint(state, name=name)
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
def load_checkpoint(
|
|
492
|
+
checkpoint_id: Optional[str] = None,
|
|
493
|
+
name: Optional[str] = None,
|
|
494
|
+
checkpoint_dir: str = ".taskmaster/checkpoints"
|
|
495
|
+
) -> Dict[str, Any]:
|
|
496
|
+
"""
|
|
497
|
+
Quick function to load a checkpoint.
|
|
498
|
+
|
|
499
|
+
Args:
|
|
500
|
+
checkpoint_id: Checkpoint ID to load
|
|
501
|
+
name: Checkpoint name to load
|
|
502
|
+
checkpoint_dir: Checkpoint directory
|
|
503
|
+
|
|
504
|
+
Returns:
|
|
505
|
+
Restored state
|
|
506
|
+
"""
|
|
507
|
+
checkpoint = SimpleCheckpoint(checkpoint_dir)
|
|
508
|
+
return checkpoint.restore_checkpoint(checkpoint_id=checkpoint_id, name=name)
|