netra-zen 1.0.9__py3-none-any.whl → 1.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_interface/__init__.py +25 -25
- agent_interface/base_agent.py +350 -350
- {netra_zen-1.0.9.dist-info → netra_zen-1.0.11.dist-info}/METADATA +36 -15
- netra_zen-1.0.11.dist-info/RECORD +30 -0
- {netra_zen-1.0.9.dist-info → netra_zen-1.0.11.dist-info}/licenses/LICENSE.md +1 -1
- scripts/__init__.py +1 -1
- scripts/__main__.py +5 -5
- scripts/agent_cli.py +7179 -6948
- scripts/agent_logs.py +327 -327
- scripts/bump_version.py +137 -137
- scripts/demo_log_collection.py +146 -144
- scripts/embed_release_credentials.py +75 -75
- scripts/test_apex_telemetry_debug.py +221 -0
- scripts/verify_log_transmission.py +140 -140
- token_budget/budget_manager.py +199 -199
- token_budget/models.py +73 -73
- token_budget/visualization.py +21 -21
- token_transparency/__init__.py +19 -19
- token_transparency/claude_pricing_engine.py +326 -326
- zen/__init__.py +7 -7
- zen/__main__.py +11 -11
- zen/telemetry/__init__.py +14 -11
- zen/telemetry/apex_telemetry.py +259 -0
- zen/telemetry/embedded_credentials.py +59 -59
- zen/telemetry/manager.py +249 -249
- zen_orchestrator.py +3058 -3008
- netra_zen-1.0.9.dist-info/RECORD +0 -28
- {netra_zen-1.0.9.dist-info → netra_zen-1.0.11.dist-info}/WHEEL +0 -0
- {netra_zen-1.0.9.dist-info → netra_zen-1.0.11.dist-info}/entry_points.txt +0 -0
- {netra_zen-1.0.9.dist-info → netra_zen-1.0.11.dist-info}/top_level.txt +0 -0
scripts/agent_logs.py
CHANGED
@@ -1,327 +1,327 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
"""
|
3
|
-
Agent Logs Collection Helper
|
4
|
-
Collects recent JSONL logs from .claude/Projects for agent CLI integration
|
5
|
-
"""
|
6
|
-
|
7
|
-
import hashlib
|
8
|
-
import json
|
9
|
-
import logging
|
10
|
-
import os
|
11
|
-
import platform
|
12
|
-
from pathlib import Path
|
13
|
-
from typing import Optional, List, Dict, Any
|
14
|
-
|
15
|
-
# Configure module logger
|
16
|
-
logger = logging.getLogger(__name__)
|
17
|
-
|
18
|
-
|
19
|
-
def _get_default_user() -> Optional[str]:
|
20
|
-
"""
|
21
|
-
Get default username for Windows path resolution.
|
22
|
-
|
23
|
-
Returns:
|
24
|
-
Username from environment or None if not available
|
25
|
-
"""
|
26
|
-
return os.environ.get('USERNAME') or os.environ.get('USER')
|
27
|
-
|
28
|
-
|
29
|
-
def _resolve_projects_root(
|
30
|
-
platform_name: Optional[str] = None,
|
31
|
-
username: Optional[str] = None,
|
32
|
-
base_path: Optional[Path] = None
|
33
|
-
) -> Path:
|
34
|
-
"""
|
35
|
-
Resolve the .claude/Projects root directory based on platform.
|
36
|
-
|
37
|
-
Args:
|
38
|
-
platform_name: Platform identifier ('Darwin', 'Windows', 'Linux') or None for auto-detect
|
39
|
-
username: Windows username override
|
40
|
-
base_path: Direct path override (bypasses platform resolution)
|
41
|
-
|
42
|
-
Returns:
|
43
|
-
Path to .claude/Projects directory
|
44
|
-
|
45
|
-
Raises:
|
46
|
-
ValueError: If path cannot be resolved
|
47
|
-
"""
|
48
|
-
if base_path:
|
49
|
-
return Path(base_path).resolve()
|
50
|
-
|
51
|
-
platform_name = platform_name or platform.system()
|
52
|
-
|
53
|
-
if platform_name == 'Windows':
|
54
|
-
# Windows: C:\Users\<username>\.claude\Projects
|
55
|
-
if username:
|
56
|
-
user_home = Path(f"C:/Users/{username}")
|
57
|
-
else:
|
58
|
-
user_home = Path(os.environ.get('USERPROFILE', Path.home()))
|
59
|
-
else:
|
60
|
-
# macOS/Linux: ~/.claude/Projects
|
61
|
-
user_home = Path.home()
|
62
|
-
|
63
|
-
projects_root = user_home / ".claude" / "Projects"
|
64
|
-
|
65
|
-
return projects_root.resolve()
|
66
|
-
|
67
|
-
|
68
|
-
def _sanitize_project_name(project_name: str) -> str:
|
69
|
-
"""
|
70
|
-
Sanitize project name to prevent directory traversal attacks.
|
71
|
-
|
72
|
-
Args:
|
73
|
-
project_name: Raw project name
|
74
|
-
|
75
|
-
Returns:
|
76
|
-
Sanitized project name safe for path construction
|
77
|
-
|
78
|
-
Raises:
|
79
|
-
ValueError: If project name contains dangerous patterns
|
80
|
-
"""
|
81
|
-
if not project_name:
|
82
|
-
raise ValueError("Project name cannot be empty")
|
83
|
-
|
84
|
-
# Remove path separators and parent directory references
|
85
|
-
dangerous_patterns = ['..', '/', '\\', '\0']
|
86
|
-
for pattern in dangerous_patterns:
|
87
|
-
if pattern in project_name:
|
88
|
-
raise ValueError(f"Project name contains invalid pattern: {pattern}")
|
89
|
-
|
90
|
-
# Remove leading/trailing whitespace and dots
|
91
|
-
sanitized = project_name.strip().strip('.')
|
92
|
-
|
93
|
-
if not sanitized:
|
94
|
-
raise ValueError("Project name invalid after sanitization")
|
95
|
-
|
96
|
-
return sanitized
|
97
|
-
|
98
|
-
|
99
|
-
def _find_most_recent_project(projects_root: Path) -> Optional[Path]:
|
100
|
-
"""
|
101
|
-
Find the most recently modified project directory.
|
102
|
-
|
103
|
-
Args:
|
104
|
-
projects_root: Path to .claude/Projects directory
|
105
|
-
|
106
|
-
Returns:
|
107
|
-
Path to most recent project directory or None if no projects found
|
108
|
-
"""
|
109
|
-
if not projects_root.exists() or not projects_root.is_dir():
|
110
|
-
logger.warning(f"Projects root does not exist: {projects_root}")
|
111
|
-
return None
|
112
|
-
|
113
|
-
try:
|
114
|
-
# Get all subdirectories
|
115
|
-
project_dirs = [p for p in projects_root.iterdir() if p.is_dir()]
|
116
|
-
|
117
|
-
if not project_dirs:
|
118
|
-
logger.warning(f"No project directories found in {projects_root}")
|
119
|
-
return None
|
120
|
-
|
121
|
-
# Sort by modification time, most recent first
|
122
|
-
project_dirs.sort(key=lambda p: p.stat().st_mtime, reverse=True)
|
123
|
-
|
124
|
-
return project_dirs[0]
|
125
|
-
|
126
|
-
except Exception as e:
|
127
|
-
logger.error(f"Error finding most recent project: {e}")
|
128
|
-
return None
|
129
|
-
|
130
|
-
|
131
|
-
def _collect_jsonl_files(project_path: Path, limit: int) -> tuple[List[Dict[str, Any]], int, List[Dict[str, str]]]:
|
132
|
-
"""
|
133
|
-
Collect and parse JSONL files from project directory.
|
134
|
-
|
135
|
-
Args:
|
136
|
-
project_path: Path to project directory
|
137
|
-
limit: Maximum number of log files to read
|
138
|
-
|
139
|
-
Returns:
|
140
|
-
Tuple of (list of parsed log entries, number of files read, list of file info dicts)
|
141
|
-
"""
|
142
|
-
if not project_path.exists() or not project_path.is_dir():
|
143
|
-
logger.warning(f"Project path does not exist: {project_path}")
|
144
|
-
return [], 0, []
|
145
|
-
|
146
|
-
try:
|
147
|
-
# Find all .jsonl files
|
148
|
-
jsonl_files = list(project_path.glob("*.jsonl"))
|
149
|
-
|
150
|
-
if not jsonl_files:
|
151
|
-
logger.info(f"No .jsonl files found in {project_path}")
|
152
|
-
return [], 0, []
|
153
|
-
|
154
|
-
# Sort by modification time, most recent first
|
155
|
-
jsonl_files.sort(key=lambda p: p.stat().st_mtime, reverse=True)
|
156
|
-
|
157
|
-
# Limit number of files to read
|
158
|
-
jsonl_files = jsonl_files[:limit]
|
159
|
-
files_read = len(jsonl_files)
|
160
|
-
|
161
|
-
all_logs = []
|
162
|
-
file_info = []
|
163
|
-
|
164
|
-
for jsonl_file in jsonl_files:
|
165
|
-
try:
|
166
|
-
# Calculate file hash for tracking
|
167
|
-
hasher = hashlib.sha256()
|
168
|
-
entry_count = 0
|
169
|
-
|
170
|
-
with open(jsonl_file, 'rb') as f:
|
171
|
-
# Read in chunks for efficient hashing
|
172
|
-
for chunk in iter(lambda: f.read(4096), b''):
|
173
|
-
hasher.update(chunk)
|
174
|
-
|
175
|
-
file_hash = hasher.hexdigest()[:8] # First 8 chars of hash
|
176
|
-
|
177
|
-
# Now read and parse the file
|
178
|
-
with open(jsonl_file, 'r', encoding='utf-8') as f:
|
179
|
-
for line_num, line in enumerate(f, 1):
|
180
|
-
line = line.strip()
|
181
|
-
if not line:
|
182
|
-
continue
|
183
|
-
|
184
|
-
try:
|
185
|
-
log_entry = json.loads(line)
|
186
|
-
all_logs.append(log_entry)
|
187
|
-
entry_count += 1
|
188
|
-
except json.JSONDecodeError as e:
|
189
|
-
logger.debug(
|
190
|
-
f"Skipping malformed JSON in {jsonl_file.name}:{line_num}: {e}"
|
191
|
-
)
|
192
|
-
continue
|
193
|
-
|
194
|
-
file_info.append({
|
195
|
-
'name': jsonl_file.name,
|
196
|
-
'hash': file_hash,
|
197
|
-
'entries': entry_count
|
198
|
-
})
|
199
|
-
|
200
|
-
except Exception as e:
|
201
|
-
logger.warning(f"Error reading {jsonl_file.name}: {e}")
|
202
|
-
continue
|
203
|
-
|
204
|
-
logger.info(f"Collected {len(all_logs)} log entries from {files_read} files")
|
205
|
-
return all_logs, files_read, file_info
|
206
|
-
|
207
|
-
except Exception as e:
|
208
|
-
logger.error(f"Error collecting JSONL files: {e}")
|
209
|
-
return [], 0, []
|
210
|
-
|
211
|
-
|
212
|
-
def collect_recent_logs(
|
213
|
-
limit: int =
|
214
|
-
project_name: Optional[str] = None,
|
215
|
-
base_path: Optional[str] = None,
|
216
|
-
username: Optional[str] = None,
|
217
|
-
platform_name: Optional[str] = None
|
218
|
-
) -> Optional[tuple[List[Dict[str, Any]], int, List[Dict[str, str]]]]:
|
219
|
-
"""
|
220
|
-
Collect recent JSONL logs from .claude/Projects directory.
|
221
|
-
|
222
|
-
Args:
|
223
|
-
limit: Maximum number of log files to read (default:
|
224
|
-
project_name: Specific project name or None for most recent
|
225
|
-
base_path: Direct path override to logs directory OR a specific .jsonl file
|
226
|
-
username: Windows username override
|
227
|
-
platform_name: Platform override for testing ('Darwin', 'Windows', 'Linux')
|
228
|
-
|
229
|
-
Returns:
|
230
|
-
Tuple of (list of log entry dicts, number of files read, list of file info) or None if no logs found
|
231
|
-
|
232
|
-
Raises:
|
233
|
-
ValueError: If limit is not positive or project_name is invalid
|
234
|
-
"""
|
235
|
-
if limit < 1:
|
236
|
-
raise ValueError(f"Limit must be positive, got {limit}")
|
237
|
-
|
238
|
-
try:
|
239
|
-
# Check if base_path points to a specific .jsonl file
|
240
|
-
if base_path:
|
241
|
-
base_path_obj = Path(base_path)
|
242
|
-
if base_path_obj.is_file() and base_path_obj.suffix == '.jsonl':
|
243
|
-
# Handle direct file path
|
244
|
-
logger.info(f"Reading specific log file: {base_path_obj}")
|
245
|
-
|
246
|
-
if not base_path_obj.exists():
|
247
|
-
logger.warning(f"Specified log file does not exist: {base_path_obj}")
|
248
|
-
return None
|
249
|
-
|
250
|
-
# Read the single file
|
251
|
-
all_logs = []
|
252
|
-
file_info = []
|
253
|
-
|
254
|
-
try:
|
255
|
-
# Calculate file hash
|
256
|
-
hasher = hashlib.sha256()
|
257
|
-
entry_count = 0
|
258
|
-
|
259
|
-
with open(base_path_obj, 'rb') as f:
|
260
|
-
for chunk in iter(lambda: f.read(4096), b''):
|
261
|
-
hasher.update(chunk)
|
262
|
-
|
263
|
-
file_hash = hasher.hexdigest()[:8]
|
264
|
-
|
265
|
-
# Read and parse the file
|
266
|
-
with open(base_path_obj, 'r', encoding='utf-8') as f:
|
267
|
-
for line_num, line in enumerate(f, 1):
|
268
|
-
line = line.strip()
|
269
|
-
if not line:
|
270
|
-
continue
|
271
|
-
|
272
|
-
try:
|
273
|
-
log_entry = json.loads(line)
|
274
|
-
all_logs.append(log_entry)
|
275
|
-
entry_count += 1
|
276
|
-
except json.JSONDecodeError as e:
|
277
|
-
logger.debug(
|
278
|
-
f"Skipping malformed JSON in {base_path_obj.name}:{line_num}: {e}"
|
279
|
-
)
|
280
|
-
continue
|
281
|
-
|
282
|
-
file_info.append({
|
283
|
-
'name': base_path_obj.name,
|
284
|
-
'hash': file_hash,
|
285
|
-
'entries': entry_count
|
286
|
-
})
|
287
|
-
|
288
|
-
logger.info(f"Collected {len(all_logs)} log entries from {base_path_obj.name}")
|
289
|
-
return all_logs, 1, file_info
|
290
|
-
|
291
|
-
except Exception as e:
|
292
|
-
logger.error(f"Error reading log file {base_path_obj}: {e}")
|
293
|
-
return None
|
294
|
-
|
295
|
-
# Original directory-based logic
|
296
|
-
base = Path(base_path) if base_path else None
|
297
|
-
projects_root = _resolve_projects_root(
|
298
|
-
platform_name=platform_name,
|
299
|
-
username=username,
|
300
|
-
base_path=base
|
301
|
-
)
|
302
|
-
|
303
|
-
# Determine target project
|
304
|
-
if project_name:
|
305
|
-
sanitized_name = _sanitize_project_name(project_name)
|
306
|
-
project_path = projects_root / sanitized_name
|
307
|
-
|
308
|
-
if not project_path.exists():
|
309
|
-
logger.warning(f"Specified project does not exist: {project_path}")
|
310
|
-
return None
|
311
|
-
else:
|
312
|
-
# Auto-detect most recent project
|
313
|
-
project_path = _find_most_recent_project(projects_root)
|
314
|
-
if not project_path:
|
315
|
-
return None
|
316
|
-
|
317
|
-
# Collect logs
|
318
|
-
logs, files_read, file_info = _collect_jsonl_files(project_path, limit)
|
319
|
-
|
320
|
-
if not logs:
|
321
|
-
return None
|
322
|
-
|
323
|
-
return logs, files_read, file_info
|
324
|
-
|
325
|
-
except Exception as e:
|
326
|
-
logger.error(f"Failed to collect logs: {e}")
|
327
|
-
return None
|
1
|
+
#!/usr/bin/env python3
|
2
|
+
"""
|
3
|
+
Agent Logs Collection Helper
|
4
|
+
Collects recent JSONL logs from .claude/Projects for agent CLI integration
|
5
|
+
"""
|
6
|
+
|
7
|
+
import hashlib
|
8
|
+
import json
|
9
|
+
import logging
|
10
|
+
import os
|
11
|
+
import platform
|
12
|
+
from pathlib import Path
|
13
|
+
from typing import Optional, List, Dict, Any
|
14
|
+
|
15
|
+
# Configure module logger
|
16
|
+
logger = logging.getLogger(__name__)
|
17
|
+
|
18
|
+
|
19
|
+
def _get_default_user() -> Optional[str]:
|
20
|
+
"""
|
21
|
+
Get default username for Windows path resolution.
|
22
|
+
|
23
|
+
Returns:
|
24
|
+
Username from environment or None if not available
|
25
|
+
"""
|
26
|
+
return os.environ.get('USERNAME') or os.environ.get('USER')
|
27
|
+
|
28
|
+
|
29
|
+
def _resolve_projects_root(
|
30
|
+
platform_name: Optional[str] = None,
|
31
|
+
username: Optional[str] = None,
|
32
|
+
base_path: Optional[Path] = None
|
33
|
+
) -> Path:
|
34
|
+
"""
|
35
|
+
Resolve the .claude/Projects root directory based on platform.
|
36
|
+
|
37
|
+
Args:
|
38
|
+
platform_name: Platform identifier ('Darwin', 'Windows', 'Linux') or None for auto-detect
|
39
|
+
username: Windows username override
|
40
|
+
base_path: Direct path override (bypasses platform resolution)
|
41
|
+
|
42
|
+
Returns:
|
43
|
+
Path to .claude/Projects directory
|
44
|
+
|
45
|
+
Raises:
|
46
|
+
ValueError: If path cannot be resolved
|
47
|
+
"""
|
48
|
+
if base_path:
|
49
|
+
return Path(base_path).resolve()
|
50
|
+
|
51
|
+
platform_name = platform_name or platform.system()
|
52
|
+
|
53
|
+
if platform_name == 'Windows':
|
54
|
+
# Windows: C:\Users\<username>\.claude\Projects
|
55
|
+
if username:
|
56
|
+
user_home = Path(f"C:/Users/{username}")
|
57
|
+
else:
|
58
|
+
user_home = Path(os.environ.get('USERPROFILE', Path.home()))
|
59
|
+
else:
|
60
|
+
# macOS/Linux: ~/.claude/Projects
|
61
|
+
user_home = Path.home()
|
62
|
+
|
63
|
+
projects_root = user_home / ".claude" / "Projects"
|
64
|
+
|
65
|
+
return projects_root.resolve()
|
66
|
+
|
67
|
+
|
68
|
+
def _sanitize_project_name(project_name: str) -> str:
|
69
|
+
"""
|
70
|
+
Sanitize project name to prevent directory traversal attacks.
|
71
|
+
|
72
|
+
Args:
|
73
|
+
project_name: Raw project name
|
74
|
+
|
75
|
+
Returns:
|
76
|
+
Sanitized project name safe for path construction
|
77
|
+
|
78
|
+
Raises:
|
79
|
+
ValueError: If project name contains dangerous patterns
|
80
|
+
"""
|
81
|
+
if not project_name:
|
82
|
+
raise ValueError("Project name cannot be empty")
|
83
|
+
|
84
|
+
# Remove path separators and parent directory references
|
85
|
+
dangerous_patterns = ['..', '/', '\\', '\0']
|
86
|
+
for pattern in dangerous_patterns:
|
87
|
+
if pattern in project_name:
|
88
|
+
raise ValueError(f"Project name contains invalid pattern: {pattern}")
|
89
|
+
|
90
|
+
# Remove leading/trailing whitespace and dots
|
91
|
+
sanitized = project_name.strip().strip('.')
|
92
|
+
|
93
|
+
if not sanitized:
|
94
|
+
raise ValueError("Project name invalid after sanitization")
|
95
|
+
|
96
|
+
return sanitized
|
97
|
+
|
98
|
+
|
99
|
+
def _find_most_recent_project(projects_root: Path) -> Optional[Path]:
|
100
|
+
"""
|
101
|
+
Find the most recently modified project directory.
|
102
|
+
|
103
|
+
Args:
|
104
|
+
projects_root: Path to .claude/Projects directory
|
105
|
+
|
106
|
+
Returns:
|
107
|
+
Path to most recent project directory or None if no projects found
|
108
|
+
"""
|
109
|
+
if not projects_root.exists() or not projects_root.is_dir():
|
110
|
+
logger.warning(f"Projects root does not exist: {projects_root}")
|
111
|
+
return None
|
112
|
+
|
113
|
+
try:
|
114
|
+
# Get all subdirectories
|
115
|
+
project_dirs = [p for p in projects_root.iterdir() if p.is_dir()]
|
116
|
+
|
117
|
+
if not project_dirs:
|
118
|
+
logger.warning(f"No project directories found in {projects_root}")
|
119
|
+
return None
|
120
|
+
|
121
|
+
# Sort by modification time, most recent first
|
122
|
+
project_dirs.sort(key=lambda p: p.stat().st_mtime, reverse=True)
|
123
|
+
|
124
|
+
return project_dirs[0]
|
125
|
+
|
126
|
+
except Exception as e:
|
127
|
+
logger.error(f"Error finding most recent project: {e}")
|
128
|
+
return None
|
129
|
+
|
130
|
+
|
131
|
+
def _collect_jsonl_files(project_path: Path, limit: int) -> tuple[List[Dict[str, Any]], int, List[Dict[str, str]]]:
|
132
|
+
"""
|
133
|
+
Collect and parse JSONL files from project directory.
|
134
|
+
|
135
|
+
Args:
|
136
|
+
project_path: Path to project directory
|
137
|
+
limit: Maximum number of log files to read
|
138
|
+
|
139
|
+
Returns:
|
140
|
+
Tuple of (list of parsed log entries, number of files read, list of file info dicts)
|
141
|
+
"""
|
142
|
+
if not project_path.exists() or not project_path.is_dir():
|
143
|
+
logger.warning(f"Project path does not exist: {project_path}")
|
144
|
+
return [], 0, []
|
145
|
+
|
146
|
+
try:
|
147
|
+
# Find all .jsonl files
|
148
|
+
jsonl_files = list(project_path.glob("*.jsonl"))
|
149
|
+
|
150
|
+
if not jsonl_files:
|
151
|
+
logger.info(f"No .jsonl files found in {project_path}")
|
152
|
+
return [], 0, []
|
153
|
+
|
154
|
+
# Sort by modification time, most recent first
|
155
|
+
jsonl_files.sort(key=lambda p: p.stat().st_mtime, reverse=True)
|
156
|
+
|
157
|
+
# Limit number of files to read
|
158
|
+
jsonl_files = jsonl_files[:limit]
|
159
|
+
files_read = len(jsonl_files)
|
160
|
+
|
161
|
+
all_logs = []
|
162
|
+
file_info = []
|
163
|
+
|
164
|
+
for jsonl_file in jsonl_files:
|
165
|
+
try:
|
166
|
+
# Calculate file hash for tracking
|
167
|
+
hasher = hashlib.sha256()
|
168
|
+
entry_count = 0
|
169
|
+
|
170
|
+
with open(jsonl_file, 'rb') as f:
|
171
|
+
# Read in chunks for efficient hashing
|
172
|
+
for chunk in iter(lambda: f.read(4096), b''):
|
173
|
+
hasher.update(chunk)
|
174
|
+
|
175
|
+
file_hash = hasher.hexdigest()[:8] # First 8 chars of hash
|
176
|
+
|
177
|
+
# Now read and parse the file
|
178
|
+
with open(jsonl_file, 'r', encoding='utf-8') as f:
|
179
|
+
for line_num, line in enumerate(f, 1):
|
180
|
+
line = line.strip()
|
181
|
+
if not line:
|
182
|
+
continue
|
183
|
+
|
184
|
+
try:
|
185
|
+
log_entry = json.loads(line)
|
186
|
+
all_logs.append(log_entry)
|
187
|
+
entry_count += 1
|
188
|
+
except json.JSONDecodeError as e:
|
189
|
+
logger.debug(
|
190
|
+
f"Skipping malformed JSON in {jsonl_file.name}:{line_num}: {e}"
|
191
|
+
)
|
192
|
+
continue
|
193
|
+
|
194
|
+
file_info.append({
|
195
|
+
'name': jsonl_file.name,
|
196
|
+
'hash': file_hash,
|
197
|
+
'entries': entry_count
|
198
|
+
})
|
199
|
+
|
200
|
+
except Exception as e:
|
201
|
+
logger.warning(f"Error reading {jsonl_file.name}: {e}")
|
202
|
+
continue
|
203
|
+
|
204
|
+
logger.info(f"Collected {len(all_logs)} log entries from {files_read} files")
|
205
|
+
return all_logs, files_read, file_info
|
206
|
+
|
207
|
+
except Exception as e:
|
208
|
+
logger.error(f"Error collecting JSONL files: {e}")
|
209
|
+
return [], 0, []
|
210
|
+
|
211
|
+
|
212
|
+
def collect_recent_logs(
|
213
|
+
limit: int = 1,
|
214
|
+
project_name: Optional[str] = None,
|
215
|
+
base_path: Optional[str] = None,
|
216
|
+
username: Optional[str] = None,
|
217
|
+
platform_name: Optional[str] = None
|
218
|
+
) -> Optional[tuple[List[Dict[str, Any]], int, List[Dict[str, str]]]]:
|
219
|
+
"""
|
220
|
+
Collect recent JSONL logs from .claude/Projects directory.
|
221
|
+
|
222
|
+
Args:
|
223
|
+
limit: Maximum number of log files to read (default: 1). For best results, use 1 log at a time for focused analysis.
|
224
|
+
project_name: Specific project name or None for most recent
|
225
|
+
base_path: Direct path override to logs directory OR a specific .jsonl file
|
226
|
+
username: Windows username override
|
227
|
+
platform_name: Platform override for testing ('Darwin', 'Windows', 'Linux')
|
228
|
+
|
229
|
+
Returns:
|
230
|
+
Tuple of (list of log entry dicts, number of files read, list of file info) or None if no logs found
|
231
|
+
|
232
|
+
Raises:
|
233
|
+
ValueError: If limit is not positive or project_name is invalid
|
234
|
+
"""
|
235
|
+
if limit < 1:
|
236
|
+
raise ValueError(f"Limit must be positive, got {limit}")
|
237
|
+
|
238
|
+
try:
|
239
|
+
# Check if base_path points to a specific .jsonl file
|
240
|
+
if base_path:
|
241
|
+
base_path_obj = Path(base_path)
|
242
|
+
if base_path_obj.is_file() and base_path_obj.suffix == '.jsonl':
|
243
|
+
# Handle direct file path
|
244
|
+
logger.info(f"Reading specific log file: {base_path_obj}")
|
245
|
+
|
246
|
+
if not base_path_obj.exists():
|
247
|
+
logger.warning(f"Specified log file does not exist: {base_path_obj}")
|
248
|
+
return None
|
249
|
+
|
250
|
+
# Read the single file
|
251
|
+
all_logs = []
|
252
|
+
file_info = []
|
253
|
+
|
254
|
+
try:
|
255
|
+
# Calculate file hash
|
256
|
+
hasher = hashlib.sha256()
|
257
|
+
entry_count = 0
|
258
|
+
|
259
|
+
with open(base_path_obj, 'rb') as f:
|
260
|
+
for chunk in iter(lambda: f.read(4096), b''):
|
261
|
+
hasher.update(chunk)
|
262
|
+
|
263
|
+
file_hash = hasher.hexdigest()[:8]
|
264
|
+
|
265
|
+
# Read and parse the file
|
266
|
+
with open(base_path_obj, 'r', encoding='utf-8') as f:
|
267
|
+
for line_num, line in enumerate(f, 1):
|
268
|
+
line = line.strip()
|
269
|
+
if not line:
|
270
|
+
continue
|
271
|
+
|
272
|
+
try:
|
273
|
+
log_entry = json.loads(line)
|
274
|
+
all_logs.append(log_entry)
|
275
|
+
entry_count += 1
|
276
|
+
except json.JSONDecodeError as e:
|
277
|
+
logger.debug(
|
278
|
+
f"Skipping malformed JSON in {base_path_obj.name}:{line_num}: {e}"
|
279
|
+
)
|
280
|
+
continue
|
281
|
+
|
282
|
+
file_info.append({
|
283
|
+
'name': base_path_obj.name,
|
284
|
+
'hash': file_hash,
|
285
|
+
'entries': entry_count
|
286
|
+
})
|
287
|
+
|
288
|
+
logger.info(f"Collected {len(all_logs)} log entries from {base_path_obj.name}")
|
289
|
+
return all_logs, 1, file_info
|
290
|
+
|
291
|
+
except Exception as e:
|
292
|
+
logger.error(f"Error reading log file {base_path_obj}: {e}")
|
293
|
+
return None
|
294
|
+
|
295
|
+
# Original directory-based logic
|
296
|
+
base = Path(base_path) if base_path else None
|
297
|
+
projects_root = _resolve_projects_root(
|
298
|
+
platform_name=platform_name,
|
299
|
+
username=username,
|
300
|
+
base_path=base
|
301
|
+
)
|
302
|
+
|
303
|
+
# Determine target project
|
304
|
+
if project_name:
|
305
|
+
sanitized_name = _sanitize_project_name(project_name)
|
306
|
+
project_path = projects_root / sanitized_name
|
307
|
+
|
308
|
+
if not project_path.exists():
|
309
|
+
logger.warning(f"Specified project does not exist: {project_path}")
|
310
|
+
return None
|
311
|
+
else:
|
312
|
+
# Auto-detect most recent project
|
313
|
+
project_path = _find_most_recent_project(projects_root)
|
314
|
+
if not project_path:
|
315
|
+
return None
|
316
|
+
|
317
|
+
# Collect logs
|
318
|
+
logs, files_read, file_info = _collect_jsonl_files(project_path, limit)
|
319
|
+
|
320
|
+
if not logs:
|
321
|
+
return None
|
322
|
+
|
323
|
+
return logs, files_read, file_info
|
324
|
+
|
325
|
+
except Exception as e:
|
326
|
+
logger.error(f"Failed to collect logs: {e}")
|
327
|
+
return None
|