@rookiestar/eng-lang-tutor 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +22 -0
- package/.gitignore +32 -0
- package/CHANGELOG.md +37 -0
- package/CLAUDE.md +275 -0
- package/README.md +369 -0
- package/SKILL.md +613 -0
- package/bin/eng-lang-tutor.js +177 -0
- package/docs/OPENCLAW_DEPLOYMENT.md +241 -0
- package/examples/sample_keypoint_a1.json +112 -0
- package/examples/sample_keypoint_a2.json +124 -0
- package/examples/sample_keypoint_b1.json +135 -0
- package/examples/sample_keypoint_b2.json +137 -0
- package/examples/sample_keypoint_c1.json +134 -0
- package/examples/sample_keypoint_c2.json +141 -0
- package/examples/sample_quiz_a1.json +94 -0
- package/examples/sample_quiz_a2.json +94 -0
- package/examples/sample_quiz_b1.json +92 -0
- package/examples/sample_quiz_b2.json +94 -0
- package/examples/sample_quiz_c1.json +94 -0
- package/examples/sample_quiz_c2.json +104 -0
- package/package.json +41 -0
- package/references/resources.md +292 -0
- package/requirements.txt +16 -0
- package/scripts/__init__.py +28 -0
- package/scripts/audio/__init__.py +23 -0
- package/scripts/audio/composer.py +367 -0
- package/scripts/audio/converter.py +331 -0
- package/scripts/audio/feishu_voice.py +404 -0
- package/scripts/audio/tts/__init__.py +30 -0
- package/scripts/audio/tts/base.py +166 -0
- package/scripts/audio/tts/manager.py +306 -0
- package/scripts/audio/tts/providers/__init__.py +12 -0
- package/scripts/audio/tts/providers/edge.py +111 -0
- package/scripts/audio/tts/providers/xunfei.py +205 -0
- package/scripts/audio/utils.py +63 -0
- package/scripts/cli/__init__.py +7 -0
- package/scripts/cli/cli.py +229 -0
- package/scripts/cli/command_parser.py +336 -0
- package/scripts/core/__init__.py +30 -0
- package/scripts/core/constants.py +125 -0
- package/scripts/core/error_notebook.py +308 -0
- package/scripts/core/gamification.py +405 -0
- package/scripts/core/scorer.py +295 -0
- package/scripts/core/state_manager.py +814 -0
- package/scripts/eng-lang-tutor +16 -0
- package/scripts/scheduling/__init__.py +6 -0
- package/scripts/scheduling/cron_push.py +229 -0
- package/scripts/utils/__init__.py +12 -0
- package/scripts/utils/dedup.py +331 -0
- package/scripts/utils/helpers.py +82 -0
- package/templates/keypoint_schema.json +420 -0
- package/templates/prompt_templates.md +73 -0
- package/templates/prompts/display_guide.md +106 -0
- package/templates/prompts/initialization.md +350 -0
- package/templates/prompts/keypoint_generation.md +272 -0
- package/templates/prompts/output_rules.md +106 -0
- package/templates/prompts/quiz_generation.md +190 -0
- package/templates/prompts/responses.md +339 -0
- package/templates/prompts/shared_enums.md +252 -0
- package/templates/quiz_schema.json +214 -0
- package/templates/state_schema.json +277 -0
|
@@ -0,0 +1,814 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
State Manager - Handles state persistence and event logging for eng-lang-tutor.
|
|
4
|
+
|
|
5
|
+
Responsibilities:
|
|
6
|
+
- Load/save state.json
|
|
7
|
+
- Append events to monthly log files (events_YYYY-MM.jsonl)
|
|
8
|
+
- Provide atomic write operations for crash recovery
|
|
9
|
+
- Manage daily content directories
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from datetime import datetime, date
|
|
16
|
+
from typing import Dict, Any, Optional, List
|
|
17
|
+
import shutil
|
|
18
|
+
import threading
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
from ..utils.helpers import deep_merge
|
|
22
|
+
from .error_notebook import ErrorNotebookManager
|
|
23
|
+
except ImportError:
|
|
24
|
+
from scripts.utils.helpers import deep_merge
|
|
25
|
+
from scripts.core.error_notebook import ErrorNotebookManager
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_default_state_dir() -> Path:
|
|
29
|
+
"""
|
|
30
|
+
Get the default state directory path.
|
|
31
|
+
|
|
32
|
+
Priority:
|
|
33
|
+
1. OPENCLAW_STATE_DIR environment variable (if set)
|
|
34
|
+
2. ~/.openclaw/state/eng-lang-tutor/ (default)
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Path to the state directory
|
|
38
|
+
"""
|
|
39
|
+
env_dir = os.environ.get('OPENCLAW_STATE_DIR')
|
|
40
|
+
if env_dir:
|
|
41
|
+
return Path(env_dir)
|
|
42
|
+
return Path.home() / '.openclaw' / 'state' / 'eng-lang-tutor'
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class StateManager:
|
|
46
|
+
"""Manages state persistence and event logging."""
|
|
47
|
+
|
|
48
|
+
def __init__(self, data_dir: str = None):
|
|
49
|
+
"""
|
|
50
|
+
Initialize the state manager.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
data_dir: Path to the data directory (relative or absolute).
|
|
54
|
+
If None, uses OPENCLAW_STATE_DIR env var or
|
|
55
|
+
~/.openclaw/state/eng-lang-tutor/ as default.
|
|
56
|
+
"""
|
|
57
|
+
if data_dir is None:
|
|
58
|
+
self.data_dir = get_default_state_dir()
|
|
59
|
+
else:
|
|
60
|
+
self.data_dir = Path(data_dir)
|
|
61
|
+
|
|
62
|
+
self.state_file = self.data_dir / "state.json"
|
|
63
|
+
self.logs_dir = self.data_dir / "logs"
|
|
64
|
+
self.daily_dir = self.data_dir / "daily"
|
|
65
|
+
self.audio_dir = self.data_dir / "audio"
|
|
66
|
+
|
|
67
|
+
# Migrate from old data/ directory if needed
|
|
68
|
+
self._migrate_from_old_location()
|
|
69
|
+
|
|
70
|
+
# Ensure directories exist
|
|
71
|
+
self._ensure_directories()
|
|
72
|
+
|
|
73
|
+
# Initialize sub-managers
|
|
74
|
+
self.error_notebook = ErrorNotebookManager(self)
|
|
75
|
+
|
|
76
|
+
# Track background audio generation threads
|
|
77
|
+
self._audio_threads: Dict[str, Any] = {}
|
|
78
|
+
|
|
79
|
+
def _migrate_from_old_location(self) -> None:
|
|
80
|
+
"""
|
|
81
|
+
Migrate data from old data/ directory to new state directory.
|
|
82
|
+
|
|
83
|
+
This is a one-time migration that runs if:
|
|
84
|
+
1. The new state directory doesn't have state.json
|
|
85
|
+
2. The old data/ directory exists with state.json
|
|
86
|
+
"""
|
|
87
|
+
# Only migrate if using default state directory
|
|
88
|
+
if self.data_dir != get_default_state_dir():
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
# Check if new location already has data
|
|
92
|
+
if self.state_file.exists():
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
# Find old data directory (relative to this script's location)
|
|
96
|
+
script_dir = Path(__file__).parent.parent
|
|
97
|
+
old_data_dir = script_dir / "data"
|
|
98
|
+
|
|
99
|
+
if not old_data_dir.exists():
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
old_state_file = old_data_dir / "state.json"
|
|
103
|
+
if not old_state_file.exists():
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
# Perform migration
|
|
107
|
+
print(f"Migrating data from {old_data_dir} to {self.data_dir}...")
|
|
108
|
+
|
|
109
|
+
try:
|
|
110
|
+
# Create new directory
|
|
111
|
+
self.data_dir.mkdir(parents=True, exist_ok=True)
|
|
112
|
+
|
|
113
|
+
# Copy all contents
|
|
114
|
+
for item in old_data_dir.iterdir():
|
|
115
|
+
dest = self.data_dir / item.name
|
|
116
|
+
if item.is_dir():
|
|
117
|
+
if dest.exists():
|
|
118
|
+
shutil.rmtree(dest)
|
|
119
|
+
shutil.copytree(item, dest)
|
|
120
|
+
else:
|
|
121
|
+
shutil.copy2(item, dest)
|
|
122
|
+
|
|
123
|
+
# Rename old directory to backup
|
|
124
|
+
backup_dir = script_dir / "data.backup"
|
|
125
|
+
if backup_dir.exists():
|
|
126
|
+
shutil.rmtree(backup_dir)
|
|
127
|
+
old_data_dir.rename(backup_dir)
|
|
128
|
+
|
|
129
|
+
print(f"Migration complete. Old data backed up to {backup_dir}")
|
|
130
|
+
except Exception as e:
|
|
131
|
+
print(f"Warning: Migration failed: {e}")
|
|
132
|
+
print("Will use new empty state directory.")
|
|
133
|
+
# Log migration failure to event log (after ensuring directories exist)
|
|
134
|
+
try:
|
|
135
|
+
self._ensure_directories()
|
|
136
|
+
self.append_event('migration_failed', {
|
|
137
|
+
"error": str(e),
|
|
138
|
+
"source_dir": str(old_data_dir),
|
|
139
|
+
"target_dir": str(self.data_dir)
|
|
140
|
+
})
|
|
141
|
+
except Exception:
|
|
142
|
+
pass # Silently ignore logging failures during migration
|
|
143
|
+
|
|
144
|
+
def _ensure_directories(self) -> None:
|
|
145
|
+
"""Create necessary directories if they don't exist."""
|
|
146
|
+
self.data_dir.mkdir(parents=True, exist_ok=True)
|
|
147
|
+
self.logs_dir.mkdir(parents=True, exist_ok=True)
|
|
148
|
+
self.daily_dir.mkdir(parents=True, exist_ok=True)
|
|
149
|
+
self.audio_dir.mkdir(parents=True, exist_ok=True)
|
|
150
|
+
|
|
151
|
+
def _default_state(self) -> Dict[str, Any]:
|
|
152
|
+
"""Return the default state structure."""
|
|
153
|
+
return {
|
|
154
|
+
"version": 2,
|
|
155
|
+
"initialized": False,
|
|
156
|
+
"onboarding_step": 0,
|
|
157
|
+
"completion_status": {
|
|
158
|
+
"quiz_completed_date": None,
|
|
159
|
+
"keypoint_view_history": []
|
|
160
|
+
},
|
|
161
|
+
"schedule": {
|
|
162
|
+
"keypoint_time": "06:45",
|
|
163
|
+
"quiz_time": "22:45",
|
|
164
|
+
"timezone": "Asia/Shanghai"
|
|
165
|
+
},
|
|
166
|
+
"user": {
|
|
167
|
+
"xp": 0,
|
|
168
|
+
"level": 1,
|
|
169
|
+
"streak": 0,
|
|
170
|
+
"streak_freeze": 0,
|
|
171
|
+
"gems": 0,
|
|
172
|
+
"badges": []
|
|
173
|
+
},
|
|
174
|
+
"preferences": {
|
|
175
|
+
"cefr_level": "B1",
|
|
176
|
+
"oral_written_ratio": 0.7,
|
|
177
|
+
"topics": {
|
|
178
|
+
"movies": 0.2,
|
|
179
|
+
"news": 0.15,
|
|
180
|
+
"gaming": 0.15,
|
|
181
|
+
"sports": 0.1,
|
|
182
|
+
"workplace": 0.2,
|
|
183
|
+
"social": 0.1,
|
|
184
|
+
"daily_life": 0.1
|
|
185
|
+
},
|
|
186
|
+
"tutor_style": "humorous",
|
|
187
|
+
"dedup_days": 14
|
|
188
|
+
},
|
|
189
|
+
"progress": {
|
|
190
|
+
"total_quizzes": 0,
|
|
191
|
+
"correct_rate": 0.0,
|
|
192
|
+
"last_study_date": None,
|
|
193
|
+
"perfect_quizzes": 0,
|
|
194
|
+
"expressions_learned": 0
|
|
195
|
+
},
|
|
196
|
+
"recent_topics": [],
|
|
197
|
+
"error_notebook": [],
|
|
198
|
+
"error_archive": []
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
def load_state(self) -> Dict[str, Any]:
|
|
202
|
+
"""
|
|
203
|
+
Load current state from file.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
State dictionary with all user data, preferences, and progress.
|
|
207
|
+
"""
|
|
208
|
+
if not self.state_file.exists():
|
|
209
|
+
return self._default_state()
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
with open(self.state_file, 'r', encoding='utf-8') as f:
|
|
213
|
+
state = json.load(f)
|
|
214
|
+
# Merge with defaults to ensure all fields exist
|
|
215
|
+
return self._merge_with_defaults(state)
|
|
216
|
+
except (json.JSONDecodeError, IOError) as e:
|
|
217
|
+
print(f"Error loading state: {e}. Using defaults.")
|
|
218
|
+
return self._default_state()
|
|
219
|
+
|
|
220
|
+
def _merge_with_defaults(self, state: Dict[str, Any]) -> Dict[str, Any]:
|
|
221
|
+
"""
|
|
222
|
+
Merge loaded state with defaults to ensure all fields exist.
|
|
223
|
+
|
|
224
|
+
Uses deep merge to handle nested structures properly.
|
|
225
|
+
"""
|
|
226
|
+
defaults = self._default_state()
|
|
227
|
+
return deep_merge(defaults, state)
|
|
228
|
+
|
|
229
|
+
def save_state(self, state: Dict[str, Any]) -> None:
|
|
230
|
+
"""
|
|
231
|
+
Save state to file with atomic write for crash recovery.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
state: Complete state dictionary to save
|
|
235
|
+
"""
|
|
236
|
+
# Ensure directories exist
|
|
237
|
+
self._ensure_directories()
|
|
238
|
+
|
|
239
|
+
# Write to temp file first, then rename for atomicity
|
|
240
|
+
temp_file = self.state_file.with_suffix('.tmp')
|
|
241
|
+
|
|
242
|
+
try:
|
|
243
|
+
with open(temp_file, 'w', encoding='utf-8') as f:
|
|
244
|
+
json.dump(state, f, ensure_ascii=False, indent=2)
|
|
245
|
+
|
|
246
|
+
# Atomic rename
|
|
247
|
+
temp_file.rename(self.state_file)
|
|
248
|
+
except IOError as e:
|
|
249
|
+
print(f"Error saving state: {e}")
|
|
250
|
+
if temp_file.exists():
|
|
251
|
+
temp_file.unlink()
|
|
252
|
+
raise
|
|
253
|
+
|
|
254
|
+
def append_event(self, event_type: str, data: Dict[str, Any]) -> None:
|
|
255
|
+
"""
|
|
256
|
+
Append an event to the monthly log file.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
event_type: Type of event (e.g., 'keypoint_generated', 'quiz_completed')
|
|
260
|
+
data: Event-specific data
|
|
261
|
+
"""
|
|
262
|
+
self.logs_dir.mkdir(parents=True, exist_ok=True)
|
|
263
|
+
|
|
264
|
+
today = datetime.now()
|
|
265
|
+
log_file = self.logs_dir / f"events_{today.strftime('%Y-%m')}.jsonl"
|
|
266
|
+
|
|
267
|
+
event = {
|
|
268
|
+
"timestamp": today.isoformat(),
|
|
269
|
+
"type": event_type,
|
|
270
|
+
"data": data
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
with open(log_file, 'a', encoding='utf-8') as f:
|
|
275
|
+
f.write(json.dumps(event, ensure_ascii=False) + '\n')
|
|
276
|
+
except IOError as e:
|
|
277
|
+
print(f"Error appending event: {e}")
|
|
278
|
+
raise
|
|
279
|
+
|
|
280
|
+
def get_today_dir(self) -> Path:
|
|
281
|
+
"""Get the directory for today's content."""
|
|
282
|
+
today = date.today().strftime('%Y-%m-%d')
|
|
283
|
+
today_dir = self.daily_dir / today
|
|
284
|
+
today_dir.mkdir(parents=True, exist_ok=True)
|
|
285
|
+
return today_dir
|
|
286
|
+
|
|
287
|
+
def get_daily_dir(self, target_date: date) -> Path:
|
|
288
|
+
"""
|
|
289
|
+
Get the directory for a specific date's content.
|
|
290
|
+
|
|
291
|
+
Args:
|
|
292
|
+
target_date: The date to get the directory for
|
|
293
|
+
|
|
294
|
+
Returns:
|
|
295
|
+
Path to the daily directory
|
|
296
|
+
"""
|
|
297
|
+
date_str = target_date.strftime('%Y-%m-%d')
|
|
298
|
+
daily_path = self.daily_dir / date_str
|
|
299
|
+
return daily_path
|
|
300
|
+
|
|
301
|
+
def save_daily_content(self, content_type: str, content: Dict[str, Any],
|
|
302
|
+
target_date: Optional[date] = None,
|
|
303
|
+
generate_audio: bool = True,
|
|
304
|
+
async_audio: bool = True) -> Path:
|
|
305
|
+
"""
|
|
306
|
+
Save content to the daily directory.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
content_type: Type of content ('keypoint', 'quiz', 'user_answers')
|
|
310
|
+
content: Content dictionary to save
|
|
311
|
+
target_date: Date for the content (defaults to today)
|
|
312
|
+
generate_audio: Whether to auto-generate audio for keypoints (default True)
|
|
313
|
+
async_audio: Whether to generate audio in background thread (default True)
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
Path to the saved file
|
|
317
|
+
"""
|
|
318
|
+
if target_date is None:
|
|
319
|
+
target_date = date.today()
|
|
320
|
+
|
|
321
|
+
daily_path = self.get_daily_dir(target_date)
|
|
322
|
+
daily_path.mkdir(parents=True, exist_ok=True)
|
|
323
|
+
|
|
324
|
+
file_path = daily_path / f"{content_type}.json"
|
|
325
|
+
|
|
326
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
327
|
+
json.dump(content, f, ensure_ascii=False, indent=2)
|
|
328
|
+
|
|
329
|
+
# Auto-generate audio for keypoints
|
|
330
|
+
if content_type == 'keypoint' and generate_audio:
|
|
331
|
+
if async_audio:
|
|
332
|
+
# Mark as pending
|
|
333
|
+
content['audio'] = {
|
|
334
|
+
'status': 'pending',
|
|
335
|
+
'generated_at': None
|
|
336
|
+
}
|
|
337
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
338
|
+
json.dump(content, f, ensure_ascii=False, indent=2)
|
|
339
|
+
|
|
340
|
+
# Start background thread
|
|
341
|
+
date_str = target_date.isoformat()
|
|
342
|
+
thread = threading.Thread(
|
|
343
|
+
target=self._generate_audio_background,
|
|
344
|
+
args=(target_date, file_path),
|
|
345
|
+
daemon=True
|
|
346
|
+
)
|
|
347
|
+
self._audio_threads[date_str] = thread
|
|
348
|
+
thread.start()
|
|
349
|
+
else:
|
|
350
|
+
# Synchronous generation (backward compatible)
|
|
351
|
+
try:
|
|
352
|
+
audio_result = self.generate_keypoint_audio(target_date)
|
|
353
|
+
if audio_result.get('success'):
|
|
354
|
+
audio_path = audio_result.get('audio_path')
|
|
355
|
+
content['audio'] = {
|
|
356
|
+
'status': 'completed',
|
|
357
|
+
'composed': audio_path,
|
|
358
|
+
'duration_seconds': audio_result.get('duration_seconds'),
|
|
359
|
+
'generated_at': datetime.now().isoformat()
|
|
360
|
+
}
|
|
361
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
362
|
+
json.dump(content, f, ensure_ascii=False, indent=2)
|
|
363
|
+
except Exception as e:
|
|
364
|
+
print(f"Warning: Audio generation failed: {e}")
|
|
365
|
+
|
|
366
|
+
return file_path
|
|
367
|
+
|
|
368
|
+
def _generate_audio_background(self, target_date: date, file_path: Path) -> None:
|
|
369
|
+
"""
|
|
370
|
+
Generate audio in background thread and update keypoint file.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
target_date: Date for the keypoint
|
|
374
|
+
file_path: Path to the keypoint JSON file
|
|
375
|
+
"""
|
|
376
|
+
date_str = target_date.isoformat()
|
|
377
|
+
try:
|
|
378
|
+
audio_result = self.generate_keypoint_audio(target_date)
|
|
379
|
+
if audio_result.get('success'):
|
|
380
|
+
# Re-read and update
|
|
381
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
382
|
+
content = json.load(f)
|
|
383
|
+
content['audio'] = {
|
|
384
|
+
'status': 'completed',
|
|
385
|
+
'composed': audio_result.get('audio_path'),
|
|
386
|
+
'duration_seconds': audio_result.get('duration_seconds'),
|
|
387
|
+
'generated_at': datetime.now().isoformat()
|
|
388
|
+
}
|
|
389
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
390
|
+
json.dump(content, f, ensure_ascii=False, indent=2)
|
|
391
|
+
except Exception as e:
|
|
392
|
+
print(f"Warning: Background audio generation failed for {date_str}: {e}")
|
|
393
|
+
finally:
|
|
394
|
+
# Clean up thread reference
|
|
395
|
+
self._audio_threads.pop(date_str, None)
|
|
396
|
+
|
|
397
|
+
def generate_keypoint_audio(self, target_date: Optional[date] = None) -> Dict[str, Any]:
|
|
398
|
+
"""
|
|
399
|
+
Generate composed audio for a keypoint.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
target_date: Date for the keypoint (defaults to today)
|
|
403
|
+
|
|
404
|
+
Returns:
|
|
405
|
+
Dictionary with:
|
|
406
|
+
- success: bool
|
|
407
|
+
- audio_path: str (relative path from data_dir)
|
|
408
|
+
- duration_seconds: float
|
|
409
|
+
- error_message: str (if failed)
|
|
410
|
+
"""
|
|
411
|
+
try:
|
|
412
|
+
from ..audio.composer import AudioComposer
|
|
413
|
+
from ..audio.tts import TTSManager
|
|
414
|
+
except ImportError:
|
|
415
|
+
from audio.composer import AudioComposer
|
|
416
|
+
from audio.tts import TTSManager
|
|
417
|
+
|
|
418
|
+
if target_date is None:
|
|
419
|
+
target_date = date.today()
|
|
420
|
+
|
|
421
|
+
# Load the keypoint
|
|
422
|
+
keypoint = self.load_daily_content('keypoint', target_date)
|
|
423
|
+
if not keypoint:
|
|
424
|
+
return {
|
|
425
|
+
'success': False,
|
|
426
|
+
'error_message': f'No keypoint found for {target_date}'
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
# Prepare output path - directly to OpenClaw media directory
|
|
430
|
+
# OpenClaw only allows media from ~/.openclaw/media/
|
|
431
|
+
date_str = target_date.strftime('%Y-%m-%d')
|
|
432
|
+
media_dir = Path.home() / '.openclaw' / 'media' / 'eng-lang-tutor' / date_str
|
|
433
|
+
media_dir.mkdir(parents=True, exist_ok=True)
|
|
434
|
+
|
|
435
|
+
# Intermediate MP3 and final Opus paths
|
|
436
|
+
mp3_path = media_dir / "keypoint_full.mp3"
|
|
437
|
+
opus_path = media_dir / "keypoint_full.opus"
|
|
438
|
+
|
|
439
|
+
try:
|
|
440
|
+
# Initialize TTS and composer (handle both package and direct imports)
|
|
441
|
+
try:
|
|
442
|
+
from ..audio.composer import AudioComposer
|
|
443
|
+
from ..audio.tts import TTSManager
|
|
444
|
+
from ..audio.converter import convert_mp3_to_opus
|
|
445
|
+
except ImportError:
|
|
446
|
+
from audio.composer import AudioComposer
|
|
447
|
+
from audio.tts import TTSManager
|
|
448
|
+
from audio.converter import convert_mp3_to_opus
|
|
449
|
+
|
|
450
|
+
tts = TTSManager.from_env()
|
|
451
|
+
composer = AudioComposer(tts)
|
|
452
|
+
|
|
453
|
+
# Step 1: Compose audio to MP3 (intermediate format)
|
|
454
|
+
result = composer.compose_keypoint_audio(keypoint, mp3_path)
|
|
455
|
+
|
|
456
|
+
if not result.success:
|
|
457
|
+
return {
|
|
458
|
+
'success': False,
|
|
459
|
+
'error_message': result.error_message
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
# Step 2: Convert to Opus format for Feishu voice bubble compatibility
|
|
463
|
+
# .opus = Ogg container + libopus codec
|
|
464
|
+
# - Feishu plugin: detects libopus → triggers voice bubble
|
|
465
|
+
# - Discord: native Opus support
|
|
466
|
+
# - Other platforms: may need fallback
|
|
467
|
+
convert_result = convert_mp3_to_opus(
|
|
468
|
+
input_path=mp3_path,
|
|
469
|
+
output_path=opus_path
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
if convert_result.success:
|
|
473
|
+
# Use Opus format
|
|
474
|
+
audio_path = f"eng-lang-tutor/{date_str}/keypoint_full.opus"
|
|
475
|
+
duration_seconds = convert_result.duration_seconds or result.duration_seconds
|
|
476
|
+
audio_format = 'opus'
|
|
477
|
+
else:
|
|
478
|
+
# Fallback to MP3 if conversion fails
|
|
479
|
+
audio_path = f"eng-lang-tutor/{date_str}/keypoint_full.mp3"
|
|
480
|
+
duration_seconds = result.duration_seconds
|
|
481
|
+
audio_format = 'mp3'
|
|
482
|
+
|
|
483
|
+
# Update keypoint with audio metadata
|
|
484
|
+
keypoint['audio'] = {
|
|
485
|
+
'composed': audio_path,
|
|
486
|
+
'duration_seconds': duration_seconds,
|
|
487
|
+
'generated_at': datetime.now().isoformat(),
|
|
488
|
+
'format': audio_format
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
# Save updated keypoint
|
|
492
|
+
daily_path = self.get_daily_dir(target_date)
|
|
493
|
+
file_path = daily_path / "keypoint.json"
|
|
494
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
495
|
+
json.dump(keypoint, f, ensure_ascii=False, indent=2)
|
|
496
|
+
|
|
497
|
+
return {
|
|
498
|
+
'success': True,
|
|
499
|
+
'audio_path': audio_path,
|
|
500
|
+
'duration_seconds': duration_seconds
|
|
501
|
+
}
|
|
502
|
+
except Exception as e:
|
|
503
|
+
return {
|
|
504
|
+
'success': False,
|
|
505
|
+
'error_message': str(e)
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
def load_daily_content(self, content_type: str,
|
|
509
|
+
target_date: Optional[date] = None) -> Optional[Dict[str, Any]]:
|
|
510
|
+
"""
|
|
511
|
+
Load content from the daily directory.
|
|
512
|
+
|
|
513
|
+
Args:
|
|
514
|
+
content_type: Type of content to load
|
|
515
|
+
target_date: Date for the content (defaults to today)
|
|
516
|
+
|
|
517
|
+
Returns:
|
|
518
|
+
Content dictionary or None if not found
|
|
519
|
+
"""
|
|
520
|
+
if target_date is None:
|
|
521
|
+
target_date = date.today()
|
|
522
|
+
|
|
523
|
+
file_path = self.get_daily_dir(target_date) / f"{content_type}.json"
|
|
524
|
+
|
|
525
|
+
if not file_path.exists():
|
|
526
|
+
return None
|
|
527
|
+
|
|
528
|
+
try:
|
|
529
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
530
|
+
return json.load(f)
|
|
531
|
+
except (json.JSONDecodeError, IOError) as e:
|
|
532
|
+
print(f"Error loading {content_type}: {e}")
|
|
533
|
+
return None
|
|
534
|
+
|
|
535
|
+
def get_recent_daily_content(self, days: int = 14) -> List[Dict[str, Any]]:
|
|
536
|
+
"""
|
|
537
|
+
Get content from recent N days for deduplication.
|
|
538
|
+
|
|
539
|
+
Args:
|
|
540
|
+
days: Number of days to look back
|
|
541
|
+
|
|
542
|
+
Returns:
|
|
543
|
+
List of content dictionaries from recent days
|
|
544
|
+
"""
|
|
545
|
+
recent_content = []
|
|
546
|
+
today = date.today()
|
|
547
|
+
|
|
548
|
+
for i in range(days):
|
|
549
|
+
target_date = today - __import__('datetime').timedelta(days=i)
|
|
550
|
+
content = self.load_daily_content('keypoint', target_date)
|
|
551
|
+
if content:
|
|
552
|
+
recent_content.append(content)
|
|
553
|
+
|
|
554
|
+
return recent_content
|
|
555
|
+
|
|
556
|
+
def get_recent_topics(self, days: int = 14) -> List[str]:
|
|
557
|
+
"""
|
|
558
|
+
Get topic fingerprints from recent days for deduplication.
|
|
559
|
+
|
|
560
|
+
Args:
|
|
561
|
+
days: Number of days to look back
|
|
562
|
+
|
|
563
|
+
Returns:
|
|
564
|
+
List of topic fingerprints
|
|
565
|
+
"""
|
|
566
|
+
recent_content = self.get_recent_daily_content(days)
|
|
567
|
+
return [c.get('topic_fingerprint', '') for c in recent_content if c.get('topic_fingerprint')]
|
|
568
|
+
|
|
569
|
+
# === Error Notebook Methods (delegated to ErrorNotebookManager) ===
|
|
570
|
+
|
|
571
|
+
def add_to_error_notebook(self, state: Dict[str, Any],
|
|
572
|
+
error: Dict[str, Any]) -> Dict[str, Any]:
|
|
573
|
+
"""Delegate to ErrorNotebookManager."""
|
|
574
|
+
return self.error_notebook.add_to_error_notebook(state, error)
|
|
575
|
+
|
|
576
|
+
def get_errors_page(self, state: Dict[str, Any],
|
|
577
|
+
page: int = 1,
|
|
578
|
+
per_page: int = 5,
|
|
579
|
+
month: str = None,
|
|
580
|
+
random: int = None) -> Dict[str, Any]:
|
|
581
|
+
"""Delegate to ErrorNotebookManager."""
|
|
582
|
+
return self.error_notebook.get_errors_page(state, page, per_page, month, random)
|
|
583
|
+
|
|
584
|
+
def get_error_stats(self, state: Dict[str, Any]) -> Dict[str, Any]:
|
|
585
|
+
"""Delegate to ErrorNotebookManager."""
|
|
586
|
+
return self.error_notebook.get_error_stats(state)
|
|
587
|
+
|
|
588
|
+
def review_error(self, state: Dict[str, Any], error_index: int,
|
|
589
|
+
correct: bool) -> Dict[str, Any]:
|
|
590
|
+
"""Delegate to ErrorNotebookManager."""
|
|
591
|
+
return self.error_notebook.review_error(state, error_index, correct)
|
|
592
|
+
|
|
593
|
+
def increment_wrong_count(self, state: Dict[str, Any],
|
|
594
|
+
error_index: int) -> Dict[str, Any]:
|
|
595
|
+
"""Delegate to ErrorNotebookManager."""
|
|
596
|
+
return self.error_notebook.increment_wrong_count(state, error_index)
|
|
597
|
+
|
|
598
|
+
def get_review_errors(self, state: Dict[str, Any],
|
|
599
|
+
count: int = 5) -> List[Dict[str, Any]]:
|
|
600
|
+
"""Delegate to ErrorNotebookManager."""
|
|
601
|
+
return self.error_notebook.get_review_errors(state, count)
|
|
602
|
+
|
|
603
|
+
def archive_stale_errors(self, state: Dict[str, Any]) -> Dict[str, Any]:
|
|
604
|
+
"""Delegate to ErrorNotebookManager."""
|
|
605
|
+
return self.error_notebook.archive_stale_errors(state)
|
|
606
|
+
|
|
607
|
+
def clear_reviewed_errors(self, state: Dict[str, Any]) -> Dict[str, Any]:
|
|
608
|
+
"""Delegate to ErrorNotebookManager."""
|
|
609
|
+
return self.error_notebook.clear_reviewed_errors(state)
|
|
610
|
+
|
|
611
|
+
# === Completion Tracking Methods ===
|
|
612
|
+
|
|
613
|
+
def can_take_quiz(self, state: Dict[str, Any]) -> bool:
|
|
614
|
+
"""
|
|
615
|
+
Check if user can take today's quiz.
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
state: Current state
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
True if quiz can be taken, False if already completed today
|
|
622
|
+
"""
|
|
623
|
+
today = date.today().isoformat()
|
|
624
|
+
completed_date = state.get("completion_status", {}).get("quiz_completed_date")
|
|
625
|
+
return completed_date != today
|
|
626
|
+
|
|
627
|
+
def mark_quiz_completed(self, state: Dict[str, Any]) -> Dict[str, Any]:
|
|
628
|
+
"""
|
|
629
|
+
Mark today's quiz as completed.
|
|
630
|
+
|
|
631
|
+
Args:
|
|
632
|
+
state: Current state
|
|
633
|
+
|
|
634
|
+
Returns:
|
|
635
|
+
Updated state
|
|
636
|
+
"""
|
|
637
|
+
today = date.today().isoformat()
|
|
638
|
+
if "completion_status" not in state:
|
|
639
|
+
state["completion_status"] = {}
|
|
640
|
+
state["completion_status"]["quiz_completed_date"] = today
|
|
641
|
+
return state
|
|
642
|
+
|
|
643
|
+
# === Keypoint View Tracking ===
|
|
644
|
+
|
|
645
|
+
def record_keypoint_view(self, state: Dict[str, Any], view_date: date = None) -> Dict[str, Any]:
|
|
646
|
+
"""
|
|
647
|
+
Record that a keypoint was viewed.
|
|
648
|
+
|
|
649
|
+
Args:
|
|
650
|
+
state: Current state
|
|
651
|
+
view_date: Date of viewed keypoint (defaults to today)
|
|
652
|
+
|
|
653
|
+
Returns:
|
|
654
|
+
Updated state
|
|
655
|
+
"""
|
|
656
|
+
if view_date is None:
|
|
657
|
+
view_date = date.today()
|
|
658
|
+
|
|
659
|
+
if "completion_status" not in state:
|
|
660
|
+
state["completion_status"] = {}
|
|
661
|
+
|
|
662
|
+
if "keypoint_view_history" not in state["completion_status"]:
|
|
663
|
+
state["completion_status"]["keypoint_view_history"] = []
|
|
664
|
+
|
|
665
|
+
# Add view record
|
|
666
|
+
state["completion_status"]["keypoint_view_history"].append({
|
|
667
|
+
"date": view_date.isoformat(),
|
|
668
|
+
"viewed_at": datetime.now().isoformat()
|
|
669
|
+
})
|
|
670
|
+
|
|
671
|
+
# Keep last 30 entries to avoid unbounded growth
|
|
672
|
+
state["completion_status"]["keypoint_view_history"] = \
|
|
673
|
+
state["completion_status"]["keypoint_view_history"][-30:]
|
|
674
|
+
|
|
675
|
+
return state
|
|
676
|
+
|
|
677
|
+
def has_viewed_keypoint(self, state: Dict[str, Any], view_date: date) -> bool:
|
|
678
|
+
"""
|
|
679
|
+
Check if a keypoint for a specific date has been viewed/pushed.
|
|
680
|
+
|
|
681
|
+
Args:
|
|
682
|
+
state: Current state
|
|
683
|
+
view_date: Date to check
|
|
684
|
+
|
|
685
|
+
Returns:
|
|
686
|
+
True if keypoint for that date has been viewed
|
|
687
|
+
"""
|
|
688
|
+
history = state.get("completion_status", {}).get("keypoint_view_history", [])
|
|
689
|
+
return any(entry.get("date") == view_date.isoformat() for entry in history)
|
|
690
|
+
|
|
691
|
+
# === Initialization and Preferences ===
|
|
692
|
+
|
|
693
|
+
def update_onboarding_step(self, state: Dict[str, Any], step: int) -> Dict[str, Any]:
|
|
694
|
+
"""
|
|
695
|
+
Update onboarding step.
|
|
696
|
+
|
|
697
|
+
Args:
|
|
698
|
+
state: Current state
|
|
699
|
+
step: New step (0-6)
|
|
700
|
+
|
|
701
|
+
Returns:
|
|
702
|
+
Updated state
|
|
703
|
+
"""
|
|
704
|
+
state["onboarding_step"] = max(0, min(6, step))
|
|
705
|
+
if step >= 6:
|
|
706
|
+
state["initialized"] = True
|
|
707
|
+
return state
|
|
708
|
+
|
|
709
|
+
def update_preferences(self, state: Dict[str, Any],
|
|
710
|
+
cefr_level: str = None,
|
|
711
|
+
tutor_style: str = None,
|
|
712
|
+
oral_written_ratio: float = None,
|
|
713
|
+
topics: Dict[str, float] = None) -> Dict[str, Any]:
|
|
714
|
+
"""
|
|
715
|
+
Update user preferences.
|
|
716
|
+
|
|
717
|
+
Args:
|
|
718
|
+
state: Current state
|
|
719
|
+
cefr_level: New CEFR level (optional)
|
|
720
|
+
tutor_style: New tutor style (optional)
|
|
721
|
+
oral_written_ratio: New oral/written ratio (optional)
|
|
722
|
+
topics: New topic weights (optional)
|
|
723
|
+
|
|
724
|
+
Returns:
|
|
725
|
+
Updated state
|
|
726
|
+
"""
|
|
727
|
+
if "preferences" not in state:
|
|
728
|
+
state["preferences"] = {}
|
|
729
|
+
|
|
730
|
+
if cefr_level:
|
|
731
|
+
state["preferences"]["cefr_level"] = cefr_level
|
|
732
|
+
if tutor_style:
|
|
733
|
+
state["preferences"]["tutor_style"] = tutor_style
|
|
734
|
+
if oral_written_ratio is not None:
|
|
735
|
+
state["preferences"]["oral_written_ratio"] = oral_written_ratio
|
|
736
|
+
if topics:
|
|
737
|
+
state["preferences"]["topics"] = topics
|
|
738
|
+
|
|
739
|
+
return state
|
|
740
|
+
|
|
741
|
+
def update_schedule(self, state: Dict[str, Any],
|
|
742
|
+
keypoint_time: str = None,
|
|
743
|
+
quiz_time: str = None,
|
|
744
|
+
timezone: str = None) -> Dict[str, Any]:
|
|
745
|
+
"""
|
|
746
|
+
Update schedule preferences.
|
|
747
|
+
|
|
748
|
+
Args:
|
|
749
|
+
state: Current state
|
|
750
|
+
keypoint_time: Keypoint push time (HH:MM)
|
|
751
|
+
quiz_time: Quiz push time (HH:MM)
|
|
752
|
+
timezone: Timezone string
|
|
753
|
+
|
|
754
|
+
Returns:
|
|
755
|
+
Updated state
|
|
756
|
+
"""
|
|
757
|
+
if "schedule" not in state:
|
|
758
|
+
state["schedule"] = {}
|
|
759
|
+
|
|
760
|
+
if keypoint_time:
|
|
761
|
+
state["schedule"]["keypoint_time"] = keypoint_time
|
|
762
|
+
if quiz_time:
|
|
763
|
+
state["schedule"]["quiz_time"] = quiz_time
|
|
764
|
+
if timezone:
|
|
765
|
+
state["schedule"]["timezone"] = timezone
|
|
766
|
+
|
|
767
|
+
return state
|
|
768
|
+
|
|
769
|
+
def backup_state(self, backup_dir: str = "backups") -> Path:
|
|
770
|
+
"""
|
|
771
|
+
Create a backup of the current state.
|
|
772
|
+
|
|
773
|
+
Args:
|
|
774
|
+
backup_dir: Directory to store backups
|
|
775
|
+
|
|
776
|
+
Returns:
|
|
777
|
+
Path to the backup file
|
|
778
|
+
"""
|
|
779
|
+
backup_path = Path(backup_dir)
|
|
780
|
+
backup_path.mkdir(parents=True, exist_ok=True)
|
|
781
|
+
|
|
782
|
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
783
|
+
backup_file = backup_path / f"state_backup_{timestamp}.json"
|
|
784
|
+
|
|
785
|
+
state = self.load_state()
|
|
786
|
+
with open(backup_file, 'w', encoding='utf-8') as f:
|
|
787
|
+
json.dump(state, f, ensure_ascii=False, indent=2)
|
|
788
|
+
|
|
789
|
+
return backup_file
|
|
790
|
+
|
|
791
|
+
def restore_from_backup(self, backup_file: Path) -> bool:
|
|
792
|
+
"""
|
|
793
|
+
Restore state from a backup file.
|
|
794
|
+
|
|
795
|
+
Args:
|
|
796
|
+
backup_file: Path to the backup file
|
|
797
|
+
|
|
798
|
+
Returns:
|
|
799
|
+
True if successful, False otherwise
|
|
800
|
+
"""
|
|
801
|
+
try:
|
|
802
|
+
with open(backup_file, 'r', encoding='utf-8') as f:
|
|
803
|
+
state = json.load(f)
|
|
804
|
+
self.save_state(state)
|
|
805
|
+
return True
|
|
806
|
+
except (json.JSONDecodeError, IOError) as e:
|
|
807
|
+
print(f"Error restoring from backup: {e}")
|
|
808
|
+
return False
|
|
809
|
+
|
|
810
|
+
|
|
811
|
+
# CLI interface - delegated to cli.py for better code organization
|
|
812
|
+
if __name__ == "__main__":
|
|
813
|
+
from cli import main
|
|
814
|
+
main()
|