netra-zen 1.0.7__py3-none-any.whl → 1.0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
scripts/agent_logs.py CHANGED
@@ -4,6 +4,7 @@ Agent Logs Collection Helper
4
4
  Collects recent JSONL logs from .claude/Projects for agent CLI integration
5
5
  """
6
6
 
7
+ import hashlib
7
8
  import json
8
9
  import logging
9
10
  import os
@@ -127,7 +128,7 @@ def _find_most_recent_project(projects_root: Path) -> Optional[Path]:
127
128
  return None
128
129
 
129
130
 
130
- def _collect_jsonl_files(project_path: Path, limit: int) -> List[Dict[str, Any]]:
131
+ def _collect_jsonl_files(project_path: Path, limit: int) -> tuple[List[Dict[str, Any]], int, List[Dict[str, str]]]:
131
132
  """
132
133
  Collect and parse JSONL files from project directory.
133
134
 
@@ -136,11 +137,11 @@ def _collect_jsonl_files(project_path: Path, limit: int) -> List[Dict[str, Any]]
136
137
  limit: Maximum number of log files to read
137
138
 
138
139
  Returns:
139
- List of parsed log entries (dicts)
140
+ Tuple of (list of parsed log entries, number of files read, list of file info dicts)
140
141
  """
141
142
  if not project_path.exists() or not project_path.is_dir():
142
143
  logger.warning(f"Project path does not exist: {project_path}")
143
- return []
144
+ return [], 0, []
144
145
 
145
146
  try:
146
147
  # Find all .jsonl files
@@ -148,18 +149,32 @@ def _collect_jsonl_files(project_path: Path, limit: int) -> List[Dict[str, Any]]
148
149
 
149
150
  if not jsonl_files:
150
151
  logger.info(f"No .jsonl files found in {project_path}")
151
- return []
152
+ return [], 0, []
152
153
 
153
154
  # Sort by modification time, most recent first
154
155
  jsonl_files.sort(key=lambda p: p.stat().st_mtime, reverse=True)
155
156
 
156
157
  # Limit number of files to read
157
158
  jsonl_files = jsonl_files[:limit]
159
+ files_read = len(jsonl_files)
158
160
 
159
161
  all_logs = []
162
+ file_info = []
160
163
 
161
164
  for jsonl_file in jsonl_files:
162
165
  try:
166
+ # Calculate file hash for tracking
167
+ hasher = hashlib.sha256()
168
+ entry_count = 0
169
+
170
+ with open(jsonl_file, 'rb') as f:
171
+ # Read in chunks for efficient hashing
172
+ for chunk in iter(lambda: f.read(4096), b''):
173
+ hasher.update(chunk)
174
+
175
+ file_hash = hasher.hexdigest()[:8] # First 8 chars of hash
176
+
177
+ # Now read and parse the file
163
178
  with open(jsonl_file, 'r', encoding='utf-8') as f:
164
179
  for line_num, line in enumerate(f, 1):
165
180
  line = line.strip()
@@ -169,43 +184,50 @@ def _collect_jsonl_files(project_path: Path, limit: int) -> List[Dict[str, Any]]
169
184
  try:
170
185
  log_entry = json.loads(line)
171
186
  all_logs.append(log_entry)
187
+ entry_count += 1
172
188
  except json.JSONDecodeError as e:
173
189
  logger.debug(
174
190
  f"Skipping malformed JSON in {jsonl_file.name}:{line_num}: {e}"
175
191
  )
176
192
  continue
177
193
 
194
+ file_info.append({
195
+ 'name': jsonl_file.name,
196
+ 'hash': file_hash,
197
+ 'entries': entry_count
198
+ })
199
+
178
200
  except Exception as e:
179
201
  logger.warning(f"Error reading {jsonl_file.name}: {e}")
180
202
  continue
181
203
 
182
- logger.info(f"Collected {len(all_logs)} log entries from {len(jsonl_files)} files")
183
- return all_logs
204
+ logger.info(f"Collected {len(all_logs)} log entries from {files_read} files")
205
+ return all_logs, files_read, file_info
184
206
 
185
207
  except Exception as e:
186
208
  logger.error(f"Error collecting JSONL files: {e}")
187
- return []
209
+ return [], 0, []
188
210
 
189
211
 
190
212
  def collect_recent_logs(
191
- limit: int = 5,
213
+ limit: int = 3,
192
214
  project_name: Optional[str] = None,
193
215
  base_path: Optional[str] = None,
194
216
  username: Optional[str] = None,
195
217
  platform_name: Optional[str] = None
196
- ) -> Optional[List[Dict[str, Any]]]:
218
+ ) -> Optional[tuple[List[Dict[str, Any]], int, List[Dict[str, str]]]]:
197
219
  """
198
220
  Collect recent JSONL logs from .claude/Projects directory.
199
221
 
200
222
  Args:
201
- limit: Maximum number of log files to read (default: 5)
223
+ limit: Maximum number of log files to read (default: 3)
202
224
  project_name: Specific project name or None for most recent
203
- base_path: Direct path override to logs directory
225
+ base_path: Direct path override to logs directory OR a specific .jsonl file
204
226
  username: Windows username override
205
227
  platform_name: Platform override for testing ('Darwin', 'Windows', 'Linux')
206
228
 
207
229
  Returns:
208
- List of log entry dicts or None if no logs found
230
+ Tuple of (list of log entry dicts, number of files read, list of file info) or None if no logs found
209
231
 
210
232
  Raises:
211
233
  ValueError: If limit is not positive or project_name is invalid
@@ -214,7 +236,63 @@ def collect_recent_logs(
214
236
  raise ValueError(f"Limit must be positive, got {limit}")
215
237
 
216
238
  try:
217
- # Resolve projects root
239
+ # Check if base_path points to a specific .jsonl file
240
+ if base_path:
241
+ base_path_obj = Path(base_path)
242
+ if base_path_obj.is_file() and base_path_obj.suffix == '.jsonl':
243
+ # Handle direct file path
244
+ logger.info(f"Reading specific log file: {base_path_obj}")
245
+
246
+ if not base_path_obj.exists():
247
+ logger.warning(f"Specified log file does not exist: {base_path_obj}")
248
+ return None
249
+
250
+ # Read the single file
251
+ all_logs = []
252
+ file_info = []
253
+
254
+ try:
255
+ # Calculate file hash
256
+ hasher = hashlib.sha256()
257
+ entry_count = 0
258
+
259
+ with open(base_path_obj, 'rb') as f:
260
+ for chunk in iter(lambda: f.read(4096), b''):
261
+ hasher.update(chunk)
262
+
263
+ file_hash = hasher.hexdigest()[:8]
264
+
265
+ # Read and parse the file
266
+ with open(base_path_obj, 'r', encoding='utf-8') as f:
267
+ for line_num, line in enumerate(f, 1):
268
+ line = line.strip()
269
+ if not line:
270
+ continue
271
+
272
+ try:
273
+ log_entry = json.loads(line)
274
+ all_logs.append(log_entry)
275
+ entry_count += 1
276
+ except json.JSONDecodeError as e:
277
+ logger.debug(
278
+ f"Skipping malformed JSON in {base_path_obj.name}:{line_num}: {e}"
279
+ )
280
+ continue
281
+
282
+ file_info.append({
283
+ 'name': base_path_obj.name,
284
+ 'hash': file_hash,
285
+ 'entries': entry_count
286
+ })
287
+
288
+ logger.info(f"Collected {len(all_logs)} log entries from {base_path_obj.name}")
289
+ return all_logs, 1, file_info
290
+
291
+ except Exception as e:
292
+ logger.error(f"Error reading log file {base_path_obj}: {e}")
293
+ return None
294
+
295
+ # Original directory-based logic
218
296
  base = Path(base_path) if base_path else None
219
297
  projects_root = _resolve_projects_root(
220
298
  platform_name=platform_name,
@@ -237,12 +315,12 @@ def collect_recent_logs(
237
315
  return None
238
316
 
239
317
  # Collect logs
240
- logs = _collect_jsonl_files(project_path, limit)
318
+ logs, files_read, file_info = _collect_jsonl_files(project_path, limit)
241
319
 
242
320
  if not logs:
243
321
  return None
244
322
 
245
- return logs
323
+ return logs, files_read, file_info
246
324
 
247
325
  except Exception as e:
248
326
  logger.error(f"Failed to collect logs: {e}")
scripts/bump_version.py CHANGED
@@ -1,138 +1,138 @@
1
- #!/usr/bin/env python3
2
- """
3
- Version bump utility for Zen Orchestrator.
4
- Updates version in all relevant files.
5
-
6
- Usage:
7
- python scripts/bump_version.py patch # 1.0.0 -> 1.0.1
8
- python scripts/bump_version.py minor # 1.0.0 -> 1.1.0
9
- python scripts/bump_version.py major # 1.0.0 -> 2.0.0
10
- python scripts/bump_version.py 1.2.3 # Set specific version
11
- """
12
-
13
- import re
14
- import sys
15
- from pathlib import Path
16
- from typing import Tuple
17
-
18
-
19
- def parse_version(version_str: str) -> Tuple[int, int, int]:
20
- """Parse version string to tuple of integers."""
21
- match = re.match(r'^(\d+)\.(\d+)\.(\d+)$', version_str)
22
- if not match:
23
- raise ValueError(f"Invalid version format: {version_str}")
24
- return tuple(map(int, match.groups()))
25
-
26
-
27
- def format_version(version_tuple: Tuple[int, int, int]) -> str:
28
- """Format version tuple to string."""
29
- return '.'.join(map(str, version_tuple))
30
-
31
-
32
- def get_current_version() -> str:
33
- """Get current version from __init__.py."""
34
- init_file = Path(__file__).parent.parent / "__init__.py"
35
- content = init_file.read_text()
36
- match = re.search(r'__version__\s*=\s*["\']([^"\']+)["\']', content)
37
- if not match:
38
- raise ValueError("Could not find version in __init__.py")
39
- return match.group(1)
40
-
41
-
42
- def bump_version(current: str, bump_type: str) -> str:
43
- """Bump version based on type."""
44
- if re.match(r'^\d+\.\d+\.\d+$', bump_type):
45
- # Specific version provided
46
- return bump_type
47
-
48
- major, minor, patch = parse_version(current)
49
-
50
- if bump_type == 'major':
51
- return format_version((major + 1, 0, 0))
52
- elif bump_type == 'minor':
53
- return format_version((major, minor + 1, 0))
54
- elif bump_type == 'patch':
55
- return format_version((major, minor, patch + 1))
56
- else:
57
- raise ValueError(f"Invalid bump type: {bump_type}")
58
-
59
-
60
- def update_file(file_path: Path, old_version: str, new_version: str, patterns: list):
61
- """Update version in a file using specified patterns."""
62
- if not file_path.exists():
63
- print(f" ⚠️ {file_path} does not exist, skipping...")
64
- return
65
-
66
- content = file_path.read_text()
67
- original_content = content
68
-
69
- for pattern in patterns:
70
- old_pattern = pattern.format(version=old_version)
71
- new_pattern = pattern.format(version=new_version)
72
- content = content.replace(old_pattern, new_pattern)
73
-
74
- if content != original_content:
75
- file_path.write_text(content)
76
- print(f" ✅ Updated {file_path}")
77
- else:
78
- print(f" ℹ️ No changes in {file_path}")
79
-
80
-
81
- def main():
82
- """Main function."""
83
- if len(sys.argv) != 2:
84
- print(__doc__)
85
- sys.exit(1)
86
-
87
- bump_type = sys.argv[1]
88
-
89
- # Get current version
90
- try:
91
- current = get_current_version()
92
- print(f"Current version: {current}")
93
- except Exception as e:
94
- print(f"Error getting current version: {e}")
95
- sys.exit(1)
96
-
97
- # Calculate new version
98
- try:
99
- new = bump_version(current, bump_type)
100
- print(f"New version: {new}")
101
- except Exception as e:
102
- print(f"Error calculating new version: {e}")
103
- sys.exit(1)
104
-
105
- # Update files
106
- base_path = Path(__file__).parent.parent
107
-
108
- files_to_update = [
109
- (
110
- base_path / "__init__.py",
111
- ['__version__ = "{version}"']
112
- ),
113
- (
114
- base_path / "setup.py",
115
- ['version="{version}"']
116
- ),
117
- (
118
- base_path / "pyproject.toml",
119
- ['version = "{version}"']
120
- ),
121
- ]
122
-
123
- print("\nUpdating files:")
124
- for file_path, patterns in files_to_update:
125
- update_file(file_path, current, new, patterns)
126
-
127
- print(f"\n✨ Version bumped from {current} to {new}")
128
- print("\nNext steps:")
129
- print(f" 1. Update CHANGELOG.md with changes for v{new}")
130
- print(f" 2. Commit: git commit -am 'Bump version to {new}'")
131
- print(f" 3. Tag: git tag -a v{new} -m 'Release version {new}'")
132
- print(f" 4. Push: git push origin main --tags")
133
- print(f" 5. Build: python -m build")
134
- print(f" 6. Upload: python -m twine upload dist/*")
135
-
136
-
137
- if __name__ == "__main__":
1
+ #!/usr/bin/env python3
2
+ """
3
+ Version bump utility for Zen Orchestrator.
4
+ Updates version in all relevant files.
5
+
6
+ Usage:
7
+ python scripts/bump_version.py patch # 1.0.0 -> 1.0.1
8
+ python scripts/bump_version.py minor # 1.0.0 -> 1.1.0
9
+ python scripts/bump_version.py major # 1.0.0 -> 2.0.0
10
+ python scripts/bump_version.py 1.2.3 # Set specific version
11
+ """
12
+
13
+ import re
14
+ import sys
15
+ from pathlib import Path
16
+ from typing import Tuple
17
+
18
+
19
+ def parse_version(version_str: str) -> Tuple[int, int, int]:
20
+ """Parse version string to tuple of integers."""
21
+ match = re.match(r'^(\d+)\.(\d+)\.(\d+)$', version_str)
22
+ if not match:
23
+ raise ValueError(f"Invalid version format: {version_str}")
24
+ return tuple(map(int, match.groups()))
25
+
26
+
27
+ def format_version(version_tuple: Tuple[int, int, int]) -> str:
28
+ """Format version tuple to string."""
29
+ return '.'.join(map(str, version_tuple))
30
+
31
+
32
+ def get_current_version() -> str:
33
+ """Get current version from __init__.py."""
34
+ init_file = Path(__file__).parent.parent / "__init__.py"
35
+ content = init_file.read_text()
36
+ match = re.search(r'__version__\s*=\s*["\']([^"\']+)["\']', content)
37
+ if not match:
38
+ raise ValueError("Could not find version in __init__.py")
39
+ return match.group(1)
40
+
41
+
42
+ def bump_version(current: str, bump_type: str) -> str:
43
+ """Bump version based on type."""
44
+ if re.match(r'^\d+\.\d+\.\d+$', bump_type):
45
+ # Specific version provided
46
+ return bump_type
47
+
48
+ major, minor, patch = parse_version(current)
49
+
50
+ if bump_type == 'major':
51
+ return format_version((major + 1, 0, 0))
52
+ elif bump_type == 'minor':
53
+ return format_version((major, minor + 1, 0))
54
+ elif bump_type == 'patch':
55
+ return format_version((major, minor, patch + 1))
56
+ else:
57
+ raise ValueError(f"Invalid bump type: {bump_type}")
58
+
59
+
60
+ def update_file(file_path: Path, old_version: str, new_version: str, patterns: list):
61
+ """Update version in a file using specified patterns."""
62
+ if not file_path.exists():
63
+ print(f" ⚠️ {file_path} does not exist, skipping...")
64
+ return
65
+
66
+ content = file_path.read_text()
67
+ original_content = content
68
+
69
+ for pattern in patterns:
70
+ old_pattern = pattern.format(version=old_version)
71
+ new_pattern = pattern.format(version=new_version)
72
+ content = content.replace(old_pattern, new_pattern)
73
+
74
+ if content != original_content:
75
+ file_path.write_text(content)
76
+ print(f" ✅ Updated {file_path}")
77
+ else:
78
+ print(f" ℹ️ No changes in {file_path}")
79
+
80
+
81
+ def main():
82
+ """Main function."""
83
+ if len(sys.argv) != 2:
84
+ print(__doc__)
85
+ sys.exit(1)
86
+
87
+ bump_type = sys.argv[1]
88
+
89
+ # Get current version
90
+ try:
91
+ current = get_current_version()
92
+ print(f"Current version: {current}")
93
+ except Exception as e:
94
+ print(f"Error getting current version: {e}")
95
+ sys.exit(1)
96
+
97
+ # Calculate new version
98
+ try:
99
+ new = bump_version(current, bump_type)
100
+ print(f"New version: {new}")
101
+ except Exception as e:
102
+ print(f"Error calculating new version: {e}")
103
+ sys.exit(1)
104
+
105
+ # Update files
106
+ base_path = Path(__file__).parent.parent
107
+
108
+ files_to_update = [
109
+ (
110
+ base_path / "__init__.py",
111
+ ['__version__ = "{version}"']
112
+ ),
113
+ (
114
+ base_path / "setup.py",
115
+ ['version="{version}"']
116
+ ),
117
+ (
118
+ base_path / "pyproject.toml",
119
+ ['version = "{version}"']
120
+ ),
121
+ ]
122
+
123
+ print("\nUpdating files:")
124
+ for file_path, patterns in files_to_update:
125
+ update_file(file_path, current, new, patterns)
126
+
127
+ print(f"\n✨ Version bumped from {current} to {new}")
128
+ print("\nNext steps:")
129
+ print(f" 1. Update CHANGELOG.md with changes for v{new}")
130
+ print(f" 2. Commit: git commit -am 'Bump version to {new}'")
131
+ print(f" 3. Tag: git tag -a v{new} -m 'Release version {new}'")
132
+ print(f" 4. Push: git push origin main --tags")
133
+ print(f" 5. Build: python -m build")
134
+ print(f" 6. Upload: python -m twine upload dist/*")
135
+
136
+
137
+ if __name__ == "__main__":
138
138
  main()
@@ -0,0 +1,140 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Verification script to prove JSONL logs are bundled in payload
4
+ """
5
+
6
+ import json
7
+ import sys
8
+ from pathlib import Path
9
+
10
+ # Add parent directory to path
11
+ sys.path.insert(0, str(Path(__file__).parent.parent))
12
+
13
+ from scripts.agent_logs import collect_recent_logs
14
+
15
+
16
+ def verify_log_bundling(log_path: str):
17
+ """
18
+ Verify that logs are properly collected and bundled
19
+
20
+ Args:
21
+ log_path: Path to JSONL file or directory
22
+ """
23
+ print("=" * 70)
24
+ print("JSONL LOG TRANSMISSION VERIFICATION")
25
+ print("=" * 70)
26
+ print()
27
+
28
+ # Step 1: Collect logs
29
+ print("Step 1: Collecting logs from file...")
30
+ result = collect_recent_logs(limit=1, base_path=log_path)
31
+
32
+ if not result:
33
+ print("❌ FAILED: No logs collected")
34
+ return False
35
+
36
+ logs, files_read, file_info = result
37
+ print(f"✓ Successfully collected {len(logs)} log entries from {files_read} file(s)")
38
+ print()
39
+
40
+ # Step 2: Show file details
41
+ print("Step 2: File details...")
42
+ for info in file_info:
43
+ print(f" File: {info['name']}")
44
+ print(f" Hash: {info['hash']}")
45
+ print(f" Entries: {info['entries']}")
46
+ print()
47
+
48
+ # Step 3: Simulate payload creation
49
+ print("Step 3: Simulating WebSocket payload creation...")
50
+ payload = {
51
+ "type": "message_create",
52
+ "run_id": "test-run-id",
53
+ "payload": {
54
+ "message": "Test message with logs",
55
+ "jsonl_logs": logs # This is where logs are added
56
+ }
57
+ }
58
+
59
+ print(f"✓ Payload created with 'jsonl_logs' key")
60
+ print(f" Payload keys: {list(payload['payload'].keys())}")
61
+ print()
62
+
63
+ # Step 4: Verify payload size
64
+ print("Step 4: Calculating payload size...")
65
+ payload_json = json.dumps(payload)
66
+ payload_size_bytes = len(payload_json.encode('utf-8'))
67
+ payload_size_kb = payload_size_bytes / 1024
68
+ payload_size_mb = payload_size_kb / 1024
69
+
70
+ if payload_size_mb >= 1:
71
+ size_str = f"{payload_size_mb:.2f} MB"
72
+ elif payload_size_kb >= 1:
73
+ size_str = f"{payload_size_kb:.2f} KB"
74
+ else:
75
+ size_str = f"{payload_size_bytes} bytes"
76
+
77
+ print(f"✓ Total payload size: {size_str}")
78
+ print()
79
+
80
+ # Step 5: Show sample log entries
81
+ print("Step 5: Sample log entries in payload...")
82
+ if logs:
83
+ print(f" First entry keys: {list(logs[0].keys())}")
84
+ print(f" First entry timestamp: {logs[0].get('timestamp', 'N/A')}")
85
+ print(f" Last entry timestamp: {logs[-1].get('timestamp', 'N/A')}")
86
+ print()
87
+
88
+ # Step 6: Verify transmission-ready
89
+ print("Step 6: Transmission verification...")
90
+ print(f"✓ Payload is valid JSON: {payload_json is not None}")
91
+ print(f"✓ Payload contains 'jsonl_logs': {'jsonl_logs' in payload['payload']}")
92
+ print(f"✓ Log count in payload: {len(payload['payload']['jsonl_logs'])}")
93
+ print()
94
+
95
+ print("=" * 70)
96
+ print("✅ VERIFICATION COMPLETE")
97
+ print("=" * 70)
98
+ print()
99
+ print("PROOF OF TRANSMISSION:")
100
+ print(f" • {len(logs)} JSONL log entries are bundled in the payload")
101
+ print(f" • Payload size: {size_str}")
102
+ print(f" • Ready for WebSocket transmission to backend")
103
+ print()
104
+
105
+ # Optional: Save proof file
106
+ proof_file = Path("/tmp/zen_transmission_proof.json")
107
+ proof_payload = {
108
+ "verification_timestamp": "verification_run",
109
+ "log_count": len(logs),
110
+ "files_read": files_read,
111
+ "file_info": file_info,
112
+ "payload_size": size_str,
113
+ "sample_first_entry": logs[0] if logs else None,
114
+ "sample_last_entry": logs[-1] if logs else None,
115
+ "payload_structure": {
116
+ "type": payload["type"],
117
+ "run_id": payload["run_id"],
118
+ "payload_keys": list(payload["payload"].keys()),
119
+ "jsonl_logs_present": "jsonl_logs" in payload["payload"],
120
+ "jsonl_logs_count": len(payload["payload"]["jsonl_logs"])
121
+ }
122
+ }
123
+
124
+ with open(proof_file, 'w') as f:
125
+ json.dump(proof_payload, f, indent=2)
126
+
127
+ print(f"📝 Detailed proof saved to: {proof_file}")
128
+ print()
129
+
130
+ return True
131
+
132
+
133
+ if __name__ == "__main__":
134
+ if len(sys.argv) < 2:
135
+ print("Usage: python verify_log_transmission.py <path-to-jsonl-file>")
136
+ sys.exit(1)
137
+
138
+ log_path = sys.argv[1]
139
+ success = verify_log_bundling(log_path)
140
+ sys.exit(0 if success else 1)