cite-agent 1.3.7__tar.gz → 1.3.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cite-agent might be problematic. Click here for more details.
- {cite_agent-1.3.7/cite_agent.egg-info → cite_agent-1.3.8}/PKG-INFO +1 -1
- cite_agent-1.3.8/cite_agent/__version__.py +1 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/enhanced_ai_agent.py +770 -6
- {cite_agent-1.3.7 → cite_agent-1.3.8/cite_agent.egg-info}/PKG-INFO +1 -1
- {cite_agent-1.3.7 → cite_agent-1.3.8}/setup.py +1 -1
- cite_agent-1.3.7/cite_agent/__version__.py +0 -1
- {cite_agent-1.3.7 → cite_agent-1.3.8}/LICENSE +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/MANIFEST.in +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/README.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/__init__.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/__main__.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/account_client.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/agent_backend_only.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/ascii_plotting.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/auth.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/backend_only_client.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/cli.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/cli_conversational.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/cli_enhanced.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/cli_workflow.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/dashboard.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/project_detector.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/rate_limiter.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/session_manager.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/setup_config.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/streaming_ui.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/telemetry.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/ui.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/updater.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/web_search.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/workflow.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent/workflow_integration.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent.egg-info/SOURCES.txt +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent.egg-info/dependency_links.txt +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent.egg-info/entry_points.txt +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent.egg-info/requires.txt +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/cite_agent.egg-info/top_level.txt +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/BETA_LAUNCH_CHECKLIST.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/BETA_RELEASE_CHECKLIST.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/ENHANCED_CAPABILITIES.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/GROQ_RATE_LIMITS.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/INSTALL.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/PUBLISHING_PYPI.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/SECURE_PACKAGING_GUIDE.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/SECURITY_AUDIT.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/USER_GETTING_STARTED.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/docs/playbooks/BETA_LAUNCH_PLAYBOOK.md +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/requirements.txt +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/setup.cfg +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/beta_launch_test_suite.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/enhanced/test_account_client.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/enhanced/test_archive_agent.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/enhanced/test_enhanced_agent_runtime.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/enhanced/test_reasoning_engine.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/enhanced/test_setup_config.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/enhanced/test_tool_framework.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/integration_test.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/session_affirmation.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/test_cli_direct.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/test_end_to_end.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/test_setup_flow.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/test_truth_seeking_comprehensive.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/test_version_1.0.4.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_accuracy_system.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_agent_live.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_backend_local.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_cerebras_comparison.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_improved_prompt.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_qualitative_robustness.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_qualitative_system.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_truth_seeking_chinese.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_truth_seeking_comprehensive.py +0 -0
- {cite_agent-1.3.7 → cite_agent-1.3.8}/tests/validation/test_truth_seeking_real.py +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "1.3.8"
|
|
@@ -1003,7 +1003,17 @@ class EnhancedNocturnalAgent:
|
|
|
1003
1003
|
capability_lines.append("• You can SEARCH user's paper collection")
|
|
1004
1004
|
capability_lines.append("• You can COPY text to user's clipboard")
|
|
1005
1005
|
capability_lines.append("• User's query history is automatically tracked")
|
|
1006
|
-
|
|
1006
|
+
|
|
1007
|
+
# Add file operation capabilities (Claude Code / Cursor parity)
|
|
1008
|
+
capability_lines.append("")
|
|
1009
|
+
capability_lines.append("📁 DIRECT FILE OPERATIONS (Always available):")
|
|
1010
|
+
capability_lines.append("• read_file(path) - Read files with line numbers (like cat but better)")
|
|
1011
|
+
capability_lines.append("• write_file(path, content) - Create/overwrite files directly")
|
|
1012
|
+
capability_lines.append("• edit_file(path, old, new) - Surgical find/replace edits")
|
|
1013
|
+
capability_lines.append("• glob_search(pattern) - Fast file search (e.g., '**/*.py')")
|
|
1014
|
+
capability_lines.append("• grep_search(pattern) - Fast content search in files")
|
|
1015
|
+
capability_lines.append("• batch_edit_files(edits) - Multi-file refactoring")
|
|
1016
|
+
|
|
1007
1017
|
sections.append("Capabilities in play:\n" + "\n".join(capability_lines))
|
|
1008
1018
|
|
|
1009
1019
|
# ENHANCED TRUTH-SEEKING RULES (adapt based on mode)
|
|
@@ -1098,6 +1108,48 @@ class EnhancedNocturnalAgent:
|
|
|
1098
1108
|
"• Example: 'I found 3 papers. I can save them to your library or export to BibTeX if you'd like.'",
|
|
1099
1109
|
]
|
|
1100
1110
|
rules.extend(workflow_rules)
|
|
1111
|
+
|
|
1112
|
+
# Add file operation tool usage rules (CRITICAL for Claude Code parity)
|
|
1113
|
+
file_ops_rules = [
|
|
1114
|
+
"",
|
|
1115
|
+
"📁 FILE OPERATION TOOL USAGE (Use these INSTEAD of shell commands):",
|
|
1116
|
+
"",
|
|
1117
|
+
"🔴 ALWAYS PREFER (in order):",
|
|
1118
|
+
"1. read_file(path) → INSTEAD OF: cat, head, tail",
|
|
1119
|
+
"2. write_file(path, content) → INSTEAD OF: echo >, cat << EOF, printf >",
|
|
1120
|
+
"3. edit_file(path, old, new) → INSTEAD OF: sed, awk",
|
|
1121
|
+
"4. glob_search(pattern, path) → INSTEAD OF: find, ls",
|
|
1122
|
+
"5. grep_search(pattern, path, file_pattern) → INSTEAD OF: grep -r",
|
|
1123
|
+
"",
|
|
1124
|
+
"✅ CORRECT USAGE:",
|
|
1125
|
+
"• Reading code: result = read_file('app.py')",
|
|
1126
|
+
"• Creating file: write_file('config.json', '{...}')",
|
|
1127
|
+
"• Editing code: edit_file('main.py', 'old_var', 'new_var', replace_all=True)",
|
|
1128
|
+
"• Finding files: glob_search('**/*.py', '/home/user/project')",
|
|
1129
|
+
"• Searching code: grep_search('class.*Agent', '.', '*.py', output_mode='content')",
|
|
1130
|
+
"• Multi-file refactor: batch_edit_files([{file: 'a.py', old: '...', new: '...'}, ...])",
|
|
1131
|
+
"",
|
|
1132
|
+
"❌ ANTI-PATTERNS (Don't do these):",
|
|
1133
|
+
"• DON'T use cat when read_file exists",
|
|
1134
|
+
"• DON'T use echo > when write_file exists",
|
|
1135
|
+
"• DON'T use sed when edit_file exists",
|
|
1136
|
+
"• DON'T use find when glob_search exists",
|
|
1137
|
+
"• DON'T use grep -r when grep_search exists",
|
|
1138
|
+
"",
|
|
1139
|
+
"🎯 WHY USE THESE TOOLS:",
|
|
1140
|
+
"• read_file() shows line numbers (critical for code analysis)",
|
|
1141
|
+
"• write_file() handles escaping/quoting automatically (no heredoc hell)",
|
|
1142
|
+
"• edit_file() validates changes before applying (safer than sed)",
|
|
1143
|
+
"• glob_search() is faster and cleaner than find",
|
|
1144
|
+
"• grep_search() returns structured data (easier to parse)",
|
|
1145
|
+
"",
|
|
1146
|
+
"⚠️ SHELL COMMANDS ONLY FOR:",
|
|
1147
|
+
"• System operations (ps, df, du, uptime)",
|
|
1148
|
+
"• Git commands (git status, git diff, git log)",
|
|
1149
|
+
"• Package installs (pip install, Rscript -e \"install.packages(...)\")",
|
|
1150
|
+
"• Running Python/R scripts (python script.py, Rscript analysis.R)",
|
|
1151
|
+
]
|
|
1152
|
+
rules.extend(file_ops_rules)
|
|
1101
1153
|
|
|
1102
1154
|
sections.append("CRITICAL RULES:\n" + "\n".join(rules))
|
|
1103
1155
|
|
|
@@ -2191,10 +2243,478 @@ class EnhancedNocturnalAgent:
|
|
|
2191
2243
|
|
|
2192
2244
|
output = '\n'.join(output_lines).strip()
|
|
2193
2245
|
return output if output else "Command executed (no output)"
|
|
2194
|
-
|
|
2246
|
+
|
|
2195
2247
|
except Exception as e:
|
|
2196
2248
|
return f"ERROR: {e}"
|
|
2197
2249
|
|
|
2250
|
+
# ========================================================================
|
|
2251
|
+
# DIRECT FILE OPERATIONS (Claude Code / Cursor Parity)
|
|
2252
|
+
# ========================================================================
|
|
2253
|
+
|
|
2254
|
+
def read_file(self, file_path: str, offset: int = 0, limit: int = 2000) -> str:
|
|
2255
|
+
"""
|
|
2256
|
+
Read file with line numbers (like Claude Code's Read tool)
|
|
2257
|
+
|
|
2258
|
+
Args:
|
|
2259
|
+
file_path: Path to file
|
|
2260
|
+
offset: Starting line number (0-indexed)
|
|
2261
|
+
limit: Maximum number of lines to read
|
|
2262
|
+
|
|
2263
|
+
Returns:
|
|
2264
|
+
File contents with line numbers in format: " 123→content"
|
|
2265
|
+
"""
|
|
2266
|
+
try:
|
|
2267
|
+
# Expand ~ to home directory
|
|
2268
|
+
file_path = os.path.expanduser(file_path)
|
|
2269
|
+
|
|
2270
|
+
# Make absolute if relative
|
|
2271
|
+
if not os.path.isabs(file_path):
|
|
2272
|
+
file_path = os.path.abspath(file_path)
|
|
2273
|
+
|
|
2274
|
+
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
2275
|
+
lines = f.readlines()
|
|
2276
|
+
|
|
2277
|
+
# Apply offset and limit
|
|
2278
|
+
if offset or limit:
|
|
2279
|
+
lines = lines[offset:offset+limit if limit else None]
|
|
2280
|
+
|
|
2281
|
+
# Format with line numbers (1-indexed, like vim/editors)
|
|
2282
|
+
numbered_lines = [
|
|
2283
|
+
f"{offset+i+1:6d}→{line.rstrip()}\n"
|
|
2284
|
+
for i, line in enumerate(lines)
|
|
2285
|
+
]
|
|
2286
|
+
|
|
2287
|
+
result = ''.join(numbered_lines)
|
|
2288
|
+
|
|
2289
|
+
# Update file context
|
|
2290
|
+
self.file_context['last_file'] = file_path
|
|
2291
|
+
if file_path not in self.file_context['recent_files']:
|
|
2292
|
+
self.file_context['recent_files'].append(file_path)
|
|
2293
|
+
self.file_context['recent_files'] = self.file_context['recent_files'][-5:]
|
|
2294
|
+
|
|
2295
|
+
return result if result else "(empty file)"
|
|
2296
|
+
|
|
2297
|
+
except FileNotFoundError:
|
|
2298
|
+
return f"ERROR: File not found: {file_path}"
|
|
2299
|
+
except PermissionError:
|
|
2300
|
+
return f"ERROR: Permission denied: {file_path}"
|
|
2301
|
+
except IsADirectoryError:
|
|
2302
|
+
return f"ERROR: {file_path} is a directory, not a file"
|
|
2303
|
+
except Exception as e:
|
|
2304
|
+
return f"ERROR: {type(e).__name__}: {e}"
|
|
2305
|
+
|
|
2306
|
+
def write_file(self, file_path: str, content: str) -> Dict[str, Any]:
|
|
2307
|
+
"""
|
|
2308
|
+
Write file directly (like Claude Code's Write tool)
|
|
2309
|
+
Creates new file or overwrites existing one.
|
|
2310
|
+
|
|
2311
|
+
Args:
|
|
2312
|
+
file_path: Path to file
|
|
2313
|
+
content: Full file content
|
|
2314
|
+
|
|
2315
|
+
Returns:
|
|
2316
|
+
{"success": bool, "message": str, "bytes_written": int}
|
|
2317
|
+
"""
|
|
2318
|
+
try:
|
|
2319
|
+
# Expand ~ to home directory
|
|
2320
|
+
file_path = os.path.expanduser(file_path)
|
|
2321
|
+
|
|
2322
|
+
# Make absolute if relative
|
|
2323
|
+
if not os.path.isabs(file_path):
|
|
2324
|
+
file_path = os.path.abspath(file_path)
|
|
2325
|
+
|
|
2326
|
+
# Create parent directories if needed
|
|
2327
|
+
parent_dir = os.path.dirname(file_path)
|
|
2328
|
+
if parent_dir and not os.path.exists(parent_dir):
|
|
2329
|
+
os.makedirs(parent_dir, exist_ok=True)
|
|
2330
|
+
|
|
2331
|
+
# Write file
|
|
2332
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
2333
|
+
bytes_written = f.write(content)
|
|
2334
|
+
|
|
2335
|
+
# Update file context
|
|
2336
|
+
self.file_context['last_file'] = file_path
|
|
2337
|
+
if file_path not in self.file_context['recent_files']:
|
|
2338
|
+
self.file_context['recent_files'].append(file_path)
|
|
2339
|
+
self.file_context['recent_files'] = self.file_context['recent_files'][-5:]
|
|
2340
|
+
|
|
2341
|
+
return {
|
|
2342
|
+
"success": True,
|
|
2343
|
+
"message": f"Wrote {bytes_written} bytes to {file_path}",
|
|
2344
|
+
"bytes_written": bytes_written
|
|
2345
|
+
}
|
|
2346
|
+
|
|
2347
|
+
except PermissionError:
|
|
2348
|
+
return {
|
|
2349
|
+
"success": False,
|
|
2350
|
+
"message": f"ERROR: Permission denied: {file_path}",
|
|
2351
|
+
"bytes_written": 0
|
|
2352
|
+
}
|
|
2353
|
+
except Exception as e:
|
|
2354
|
+
return {
|
|
2355
|
+
"success": False,
|
|
2356
|
+
"message": f"ERROR: {type(e).__name__}: {e}",
|
|
2357
|
+
"bytes_written": 0
|
|
2358
|
+
}
|
|
2359
|
+
|
|
2360
|
+
def edit_file(self, file_path: str, old_string: str, new_string: str,
|
|
2361
|
+
replace_all: bool = False) -> Dict[str, Any]:
|
|
2362
|
+
"""
|
|
2363
|
+
Surgical file edit (like Claude Code's Edit tool)
|
|
2364
|
+
|
|
2365
|
+
Args:
|
|
2366
|
+
file_path: Path to file
|
|
2367
|
+
old_string: Exact string to replace (must be unique unless replace_all=True)
|
|
2368
|
+
new_string: Replacement string
|
|
2369
|
+
replace_all: If True, replace all occurrences. If False, old_string must be unique.
|
|
2370
|
+
|
|
2371
|
+
Returns:
|
|
2372
|
+
{"success": bool, "message": str, "replacements": int}
|
|
2373
|
+
"""
|
|
2374
|
+
try:
|
|
2375
|
+
# Expand ~ to home directory
|
|
2376
|
+
file_path = os.path.expanduser(file_path)
|
|
2377
|
+
|
|
2378
|
+
# Make absolute if relative
|
|
2379
|
+
if not os.path.isabs(file_path):
|
|
2380
|
+
file_path = os.path.abspath(file_path)
|
|
2381
|
+
|
|
2382
|
+
# Read file
|
|
2383
|
+
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
2384
|
+
content = f.read()
|
|
2385
|
+
|
|
2386
|
+
# Check if old_string exists
|
|
2387
|
+
if old_string not in content:
|
|
2388
|
+
return {
|
|
2389
|
+
"success": False,
|
|
2390
|
+
"message": f"ERROR: old_string not found in {file_path}",
|
|
2391
|
+
"replacements": 0
|
|
2392
|
+
}
|
|
2393
|
+
|
|
2394
|
+
# Check uniqueness if not replace_all
|
|
2395
|
+
occurrences = content.count(old_string)
|
|
2396
|
+
if not replace_all and occurrences > 1:
|
|
2397
|
+
return {
|
|
2398
|
+
"success": False,
|
|
2399
|
+
"message": f"ERROR: old_string appears {occurrences} times in {file_path}. Use replace_all=True or provide more context to make it unique.",
|
|
2400
|
+
"replacements": 0
|
|
2401
|
+
}
|
|
2402
|
+
|
|
2403
|
+
# Perform replacement
|
|
2404
|
+
if replace_all:
|
|
2405
|
+
new_content = content.replace(old_string, new_string)
|
|
2406
|
+
else:
|
|
2407
|
+
new_content = content.replace(old_string, new_string, 1)
|
|
2408
|
+
|
|
2409
|
+
# Write back
|
|
2410
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
2411
|
+
f.write(new_content)
|
|
2412
|
+
|
|
2413
|
+
# Update file context
|
|
2414
|
+
self.file_context['last_file'] = file_path
|
|
2415
|
+
|
|
2416
|
+
return {
|
|
2417
|
+
"success": True,
|
|
2418
|
+
"message": f"Replaced {occurrences if replace_all else 1} occurrence(s) in {file_path}",
|
|
2419
|
+
"replacements": occurrences if replace_all else 1
|
|
2420
|
+
}
|
|
2421
|
+
|
|
2422
|
+
except FileNotFoundError:
|
|
2423
|
+
return {
|
|
2424
|
+
"success": False,
|
|
2425
|
+
"message": f"ERROR: File not found: {file_path}",
|
|
2426
|
+
"replacements": 0
|
|
2427
|
+
}
|
|
2428
|
+
except PermissionError:
|
|
2429
|
+
return {
|
|
2430
|
+
"success": False,
|
|
2431
|
+
"message": f"ERROR: Permission denied: {file_path}",
|
|
2432
|
+
"replacements": 0
|
|
2433
|
+
}
|
|
2434
|
+
except Exception as e:
|
|
2435
|
+
return {
|
|
2436
|
+
"success": False,
|
|
2437
|
+
"message": f"ERROR: {type(e).__name__}: {e}",
|
|
2438
|
+
"replacements": 0
|
|
2439
|
+
}
|
|
2440
|
+
|
|
2441
|
+
def glob_search(self, pattern: str, path: str = ".") -> Dict[str, Any]:
|
|
2442
|
+
"""
|
|
2443
|
+
Fast file pattern matching (like Claude Code's Glob tool)
|
|
2444
|
+
|
|
2445
|
+
Args:
|
|
2446
|
+
pattern: Glob pattern (e.g., "*.py", "**/*.md", "src/**/*.ts")
|
|
2447
|
+
path: Starting directory (default: current directory)
|
|
2448
|
+
|
|
2449
|
+
Returns:
|
|
2450
|
+
{"files": List[str], "count": int, "pattern": str}
|
|
2451
|
+
"""
|
|
2452
|
+
try:
|
|
2453
|
+
import glob as glob_module
|
|
2454
|
+
|
|
2455
|
+
# Expand ~ to home directory
|
|
2456
|
+
path = os.path.expanduser(path)
|
|
2457
|
+
|
|
2458
|
+
# Make absolute if relative
|
|
2459
|
+
if not os.path.isabs(path):
|
|
2460
|
+
path = os.path.abspath(path)
|
|
2461
|
+
|
|
2462
|
+
# Combine path and pattern
|
|
2463
|
+
full_pattern = os.path.join(path, pattern)
|
|
2464
|
+
|
|
2465
|
+
# Find matches (recursive if ** in pattern)
|
|
2466
|
+
matches = glob_module.glob(full_pattern, recursive=True)
|
|
2467
|
+
|
|
2468
|
+
# Filter to files only (not directories)
|
|
2469
|
+
files = [f for f in matches if os.path.isfile(f)]
|
|
2470
|
+
|
|
2471
|
+
# Sort by modification time (newest first)
|
|
2472
|
+
files.sort(key=lambda f: os.path.getmtime(f), reverse=True)
|
|
2473
|
+
|
|
2474
|
+
return {
|
|
2475
|
+
"files": files,
|
|
2476
|
+
"count": len(files),
|
|
2477
|
+
"pattern": full_pattern
|
|
2478
|
+
}
|
|
2479
|
+
|
|
2480
|
+
except Exception as e:
|
|
2481
|
+
return {
|
|
2482
|
+
"files": [],
|
|
2483
|
+
"count": 0,
|
|
2484
|
+
"pattern": pattern,
|
|
2485
|
+
"error": f"{type(e).__name__}: {e}"
|
|
2486
|
+
}
|
|
2487
|
+
|
|
2488
|
+
def grep_search(self, pattern: str, path: str = ".",
|
|
2489
|
+
file_pattern: str = "*",
|
|
2490
|
+
output_mode: str = "files_with_matches",
|
|
2491
|
+
context_lines: int = 0,
|
|
2492
|
+
ignore_case: bool = False,
|
|
2493
|
+
max_results: int = 100) -> Dict[str, Any]:
|
|
2494
|
+
"""
|
|
2495
|
+
Fast content search (like Claude Code's Grep tool / ripgrep)
|
|
2496
|
+
|
|
2497
|
+
Args:
|
|
2498
|
+
pattern: Regex pattern to search for
|
|
2499
|
+
path: Directory to search in
|
|
2500
|
+
file_pattern: Glob pattern for files to search (e.g., "*.py")
|
|
2501
|
+
output_mode: "files_with_matches", "content", or "count"
|
|
2502
|
+
context_lines: Lines of context around matches
|
|
2503
|
+
ignore_case: Case-insensitive search
|
|
2504
|
+
max_results: Maximum number of results to return
|
|
2505
|
+
|
|
2506
|
+
Returns:
|
|
2507
|
+
Depends on output_mode:
|
|
2508
|
+
- files_with_matches: {"files": List[str], "count": int}
|
|
2509
|
+
- content: {"matches": {file: [(line_num, line_content), ...]}}
|
|
2510
|
+
- count: {"counts": {file: match_count}}
|
|
2511
|
+
"""
|
|
2512
|
+
try:
|
|
2513
|
+
import re
|
|
2514
|
+
|
|
2515
|
+
# Expand ~ to home directory
|
|
2516
|
+
path = os.path.expanduser(path)
|
|
2517
|
+
|
|
2518
|
+
# Make absolute if relative
|
|
2519
|
+
if not os.path.isabs(path):
|
|
2520
|
+
path = os.path.abspath(path)
|
|
2521
|
+
|
|
2522
|
+
# Compile regex
|
|
2523
|
+
flags = re.IGNORECASE if ignore_case else 0
|
|
2524
|
+
regex = re.compile(pattern, flags)
|
|
2525
|
+
|
|
2526
|
+
# Find files to search
|
|
2527
|
+
glob_result = self.glob_search(file_pattern, path)
|
|
2528
|
+
files_to_search = glob_result["files"]
|
|
2529
|
+
|
|
2530
|
+
# Search each file
|
|
2531
|
+
if output_mode == "files_with_matches":
|
|
2532
|
+
matching_files = []
|
|
2533
|
+
for file_path in files_to_search[:max_results]:
|
|
2534
|
+
try:
|
|
2535
|
+
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
2536
|
+
content = f.read()
|
|
2537
|
+
if regex.search(content):
|
|
2538
|
+
matching_files.append(file_path)
|
|
2539
|
+
except:
|
|
2540
|
+
continue
|
|
2541
|
+
|
|
2542
|
+
return {
|
|
2543
|
+
"files": matching_files,
|
|
2544
|
+
"count": len(matching_files),
|
|
2545
|
+
"pattern": pattern
|
|
2546
|
+
}
|
|
2547
|
+
|
|
2548
|
+
elif output_mode == "content":
|
|
2549
|
+
matches = {}
|
|
2550
|
+
for file_path in files_to_search:
|
|
2551
|
+
try:
|
|
2552
|
+
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
2553
|
+
lines = f.readlines()
|
|
2554
|
+
|
|
2555
|
+
file_matches = []
|
|
2556
|
+
for line_num, line in enumerate(lines, 1):
|
|
2557
|
+
if regex.search(line):
|
|
2558
|
+
file_matches.append((line_num, line.rstrip()))
|
|
2559
|
+
|
|
2560
|
+
if len(file_matches) >= max_results:
|
|
2561
|
+
break
|
|
2562
|
+
|
|
2563
|
+
if file_matches:
|
|
2564
|
+
matches[file_path] = file_matches
|
|
2565
|
+
except:
|
|
2566
|
+
continue
|
|
2567
|
+
|
|
2568
|
+
return {
|
|
2569
|
+
"matches": matches,
|
|
2570
|
+
"file_count": len(matches),
|
|
2571
|
+
"pattern": pattern
|
|
2572
|
+
}
|
|
2573
|
+
|
|
2574
|
+
elif output_mode == "count":
|
|
2575
|
+
counts = {}
|
|
2576
|
+
for file_path in files_to_search:
|
|
2577
|
+
try:
|
|
2578
|
+
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
2579
|
+
content = f.read()
|
|
2580
|
+
|
|
2581
|
+
match_count = len(regex.findall(content))
|
|
2582
|
+
if match_count > 0:
|
|
2583
|
+
counts[file_path] = match_count
|
|
2584
|
+
except:
|
|
2585
|
+
continue
|
|
2586
|
+
|
|
2587
|
+
return {
|
|
2588
|
+
"counts": counts,
|
|
2589
|
+
"total_matches": sum(counts.values()),
|
|
2590
|
+
"pattern": pattern
|
|
2591
|
+
}
|
|
2592
|
+
|
|
2593
|
+
else:
|
|
2594
|
+
return {
|
|
2595
|
+
"error": f"Invalid output_mode: {output_mode}. Use 'files_with_matches', 'content', or 'count'."
|
|
2596
|
+
}
|
|
2597
|
+
|
|
2598
|
+
except re.error as e:
|
|
2599
|
+
return {
|
|
2600
|
+
"error": f"Invalid regex pattern: {e}"
|
|
2601
|
+
}
|
|
2602
|
+
except Exception as e:
|
|
2603
|
+
return {
|
|
2604
|
+
"error": f"{type(e).__name__}: {e}"
|
|
2605
|
+
}
|
|
2606
|
+
|
|
2607
|
+
async def batch_edit_files(self, edits: List[Dict[str, str]]) -> Dict[str, Any]:
|
|
2608
|
+
"""
|
|
2609
|
+
Apply multiple file edits atomically (all-or-nothing)
|
|
2610
|
+
|
|
2611
|
+
Args:
|
|
2612
|
+
edits: List of edit operations:
|
|
2613
|
+
[
|
|
2614
|
+
{"file": "path.py", "old": "...", "new": "..."},
|
|
2615
|
+
{"file": "other.py", "old": "...", "new": "...", "replace_all": True},
|
|
2616
|
+
...
|
|
2617
|
+
]
|
|
2618
|
+
|
|
2619
|
+
Returns:
|
|
2620
|
+
{
|
|
2621
|
+
"success": bool,
|
|
2622
|
+
"results": {file: {"success": bool, "message": str, "replacements": int}},
|
|
2623
|
+
"total_edits": int,
|
|
2624
|
+
"failed_edits": int
|
|
2625
|
+
}
|
|
2626
|
+
"""
|
|
2627
|
+
try:
|
|
2628
|
+
results = {}
|
|
2629
|
+
|
|
2630
|
+
# Phase 1: Validate all edits
|
|
2631
|
+
for edit in edits:
|
|
2632
|
+
file_path = edit["file"]
|
|
2633
|
+
old_string = edit["old"]
|
|
2634
|
+
replace_all = edit.get("replace_all", False)
|
|
2635
|
+
|
|
2636
|
+
# Expand path
|
|
2637
|
+
file_path = os.path.expanduser(file_path)
|
|
2638
|
+
if not os.path.isabs(file_path):
|
|
2639
|
+
file_path = os.path.abspath(file_path)
|
|
2640
|
+
|
|
2641
|
+
# Check file exists
|
|
2642
|
+
if not os.path.exists(file_path):
|
|
2643
|
+
return {
|
|
2644
|
+
"success": False,
|
|
2645
|
+
"results": {},
|
|
2646
|
+
"total_edits": 0,
|
|
2647
|
+
"failed_edits": len(edits),
|
|
2648
|
+
"error": f"Validation failed: {file_path} not found. No edits applied."
|
|
2649
|
+
}
|
|
2650
|
+
|
|
2651
|
+
# Check old_string exists
|
|
2652
|
+
try:
|
|
2653
|
+
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
2654
|
+
content = f.read()
|
|
2655
|
+
|
|
2656
|
+
if old_string not in content:
|
|
2657
|
+
return {
|
|
2658
|
+
"success": False,
|
|
2659
|
+
"results": {},
|
|
2660
|
+
"total_edits": 0,
|
|
2661
|
+
"failed_edits": len(edits),
|
|
2662
|
+
"error": f"Validation failed: Pattern not found in {file_path}. No edits applied."
|
|
2663
|
+
}
|
|
2664
|
+
|
|
2665
|
+
# Check uniqueness if not replace_all
|
|
2666
|
+
if not replace_all and content.count(old_string) > 1:
|
|
2667
|
+
return {
|
|
2668
|
+
"success": False,
|
|
2669
|
+
"results": {},
|
|
2670
|
+
"total_edits": 0,
|
|
2671
|
+
"failed_edits": len(edits),
|
|
2672
|
+
"error": f"Validation failed: Pattern appears {content.count(old_string)} times in {file_path}. Use replace_all or provide more context. No edits applied."
|
|
2673
|
+
}
|
|
2674
|
+
except Exception as e:
|
|
2675
|
+
return {
|
|
2676
|
+
"success": False,
|
|
2677
|
+
"results": {},
|
|
2678
|
+
"total_edits": 0,
|
|
2679
|
+
"failed_edits": len(edits),
|
|
2680
|
+
"error": f"Validation failed reading {file_path}: {e}. No edits applied."
|
|
2681
|
+
}
|
|
2682
|
+
|
|
2683
|
+
# Phase 2: Apply all edits (validation passed)
|
|
2684
|
+
for edit in edits:
|
|
2685
|
+
file_path = edit["file"]
|
|
2686
|
+
old_string = edit["old"]
|
|
2687
|
+
new_string = edit["new"]
|
|
2688
|
+
replace_all = edit.get("replace_all", False)
|
|
2689
|
+
|
|
2690
|
+
result = self.edit_file(file_path, old_string, new_string, replace_all)
|
|
2691
|
+
results[file_path] = result
|
|
2692
|
+
|
|
2693
|
+
# Count successes/failures
|
|
2694
|
+
successful_edits = sum(1 for r in results.values() if r["success"])
|
|
2695
|
+
failed_edits = len(edits) - successful_edits
|
|
2696
|
+
|
|
2697
|
+
return {
|
|
2698
|
+
"success": failed_edits == 0,
|
|
2699
|
+
"results": results,
|
|
2700
|
+
"total_edits": len(edits),
|
|
2701
|
+
"successful_edits": successful_edits,
|
|
2702
|
+
"failed_edits": failed_edits
|
|
2703
|
+
}
|
|
2704
|
+
|
|
2705
|
+
except Exception as e:
|
|
2706
|
+
return {
|
|
2707
|
+
"success": False,
|
|
2708
|
+
"results": {},
|
|
2709
|
+
"total_edits": 0,
|
|
2710
|
+
"failed_edits": len(edits),
|
|
2711
|
+
"error": f"Batch edit failed: {type(e).__name__}: {e}"
|
|
2712
|
+
}
|
|
2713
|
+
|
|
2714
|
+
# ========================================================================
|
|
2715
|
+
# END DIRECT FILE OPERATIONS
|
|
2716
|
+
# ========================================================================
|
|
2717
|
+
|
|
2198
2718
|
def _classify_command_safety(self, cmd: str) -> str:
|
|
2199
2719
|
"""
|
|
2200
2720
|
Classify command by safety level for smart execution.
|
|
@@ -2795,6 +3315,8 @@ IMPORTANT RULES:
|
|
|
2795
3315
|
8. For creating files: touch filename OR echo "content" > filename
|
|
2796
3316
|
9. For creating directories: mkdir dirname
|
|
2797
3317
|
10. ALWAYS include 2>/dev/null to suppress errors from find
|
|
3318
|
+
11. 🚨 MULTI-STEP QUERIES: For queries like "read X and do Y", ONLY generate the FIRST step (reading X). The LLM will handle subsequent steps after seeing the file contents.
|
|
3319
|
+
12. 🚨 NEVER use python -m py_compile or other code execution for finding bugs - just read the file with cat/head
|
|
2798
3320
|
|
|
2799
3321
|
Examples:
|
|
2800
3322
|
"where am i?" → {{"action": "execute", "command": "pwd", "reason": "Show current directory", "updates_context": false}}
|
|
@@ -2804,7 +3326,13 @@ Examples:
|
|
|
2804
3326
|
"show me calc.R" → {{"action": "execute", "command": "head -100 calc.R", "reason": "Display file contents", "updates_context": true}}
|
|
2805
3327
|
"create test directory" → {{"action": "execute", "command": "mkdir test && echo 'Created test/'", "reason": "Create new directory", "updates_context": true}}
|
|
2806
3328
|
"create empty config.json" → {{"action": "execute", "command": "touch config.json && echo 'Created config.json'", "reason": "Create empty file", "updates_context": true}}
|
|
3329
|
+
"write hello.txt with content Hello World" → {{"action": "execute", "command": "echo 'Hello World' > hello.txt", "reason": "Create file with content", "updates_context": true}}
|
|
3330
|
+
"create results.txt with line 1 and line 2" → {{"action": "execute", "command": "echo 'line 1' > results.txt && echo 'line 2' >> results.txt", "reason": "Create file with multiple lines", "updates_context": true}}
|
|
3331
|
+
"fix bug in script.py change OLD to NEW" → {{"action": "execute", "command": "sed -i 's/OLD/NEW/g' script.py && echo 'Fixed script.py'", "reason": "Edit file to fix bug", "updates_context": true}}
|
|
2807
3332
|
"search for TODO in py files" → {{"action": "execute", "command": "grep -n 'TODO' *.py 2>/dev/null", "reason": "Find TODO comments", "updates_context": false}}
|
|
3333
|
+
"find all bugs in code" → {{"action": "execute", "command": "grep -rn 'BUG:' . 2>/dev/null", "reason": "Search for bug markers in code", "updates_context": false}}
|
|
3334
|
+
"read analyze.py and find bugs" → {{"action": "execute", "command": "head -200 analyze.py", "reason": "Read file to analyze bugs", "updates_context": false}}
|
|
3335
|
+
"show me calc.py completely" → {{"action": "execute", "command": "cat calc.py", "reason": "Display entire file", "updates_context": false}}
|
|
2808
3336
|
"git status" → {{"action": "execute", "command": "git status", "reason": "Check repository status", "updates_context": false}}
|
|
2809
3337
|
"what's in that file?" + last_file=data.csv → {{"action": "execute", "command": "head -100 data.csv", "reason": "Show file contents", "updates_context": false}}
|
|
2810
3338
|
"hello" → {{"action": "none", "reason": "Conversational greeting, no command needed"}}
|
|
@@ -2851,8 +3379,211 @@ JSON:"""
|
|
|
2851
3379
|
"reason": "This command could cause system damage"
|
|
2852
3380
|
}
|
|
2853
3381
|
else:
|
|
2854
|
-
#
|
|
2855
|
-
|
|
3382
|
+
# ========================================
|
|
3383
|
+
# COMMAND INTERCEPTOR: Translate shell commands to file operations
|
|
3384
|
+
# (Claude Code / Cursor parity)
|
|
3385
|
+
# ========================================
|
|
3386
|
+
intercepted = False
|
|
3387
|
+
output = ""
|
|
3388
|
+
|
|
3389
|
+
# Check for file reading commands (cat, head, tail)
|
|
3390
|
+
if command.startswith(('cat ', 'head ', 'tail ')):
|
|
3391
|
+
import shlex
|
|
3392
|
+
try:
|
|
3393
|
+
parts = shlex.split(command)
|
|
3394
|
+
cmd = parts[0]
|
|
3395
|
+
|
|
3396
|
+
# Extract filename (last non-flag argument)
|
|
3397
|
+
filename = None
|
|
3398
|
+
for part in reversed(parts[1:]):
|
|
3399
|
+
if not part.startswith('-'):
|
|
3400
|
+
filename = part
|
|
3401
|
+
break
|
|
3402
|
+
|
|
3403
|
+
if filename:
|
|
3404
|
+
# Use read_file instead of cat/head/tail
|
|
3405
|
+
if cmd == 'head':
|
|
3406
|
+
# head -n 100 file OR head file
|
|
3407
|
+
limit = 100 # default
|
|
3408
|
+
if '-n' in parts or '-' in parts[0]:
|
|
3409
|
+
try:
|
|
3410
|
+
idx = parts.index('-n') if '-n' in parts else 0
|
|
3411
|
+
limit = int(parts[idx + 1])
|
|
3412
|
+
except:
|
|
3413
|
+
pass
|
|
3414
|
+
output = self.read_file(filename, offset=0, limit=limit)
|
|
3415
|
+
elif cmd == 'tail':
|
|
3416
|
+
# For tail, read last N lines (harder, so just read all and show it's tail)
|
|
3417
|
+
output = self.read_file(filename)
|
|
3418
|
+
if "ERROR" not in output:
|
|
3419
|
+
lines = output.split('\n')
|
|
3420
|
+
output = '\n'.join(lines[-100:]) # last 100 lines
|
|
3421
|
+
else: # cat
|
|
3422
|
+
output = self.read_file(filename)
|
|
3423
|
+
|
|
3424
|
+
intercepted = True
|
|
3425
|
+
tools_used.append("read_file")
|
|
3426
|
+
if debug_mode:
|
|
3427
|
+
print(f"🔄 Intercepted: {command} → read_file({filename})")
|
|
3428
|
+
except:
|
|
3429
|
+
pass # Fall back to shell execution
|
|
3430
|
+
|
|
3431
|
+
# Check for file search commands (find)
|
|
3432
|
+
if not intercepted and 'find' in command and '-name' in command:
|
|
3433
|
+
try:
|
|
3434
|
+
import re
|
|
3435
|
+
# Extract pattern: find ... -name '*pattern*'
|
|
3436
|
+
name_match = re.search(r"-name\s+['\"]?\*?([^'\"*\s]+)\*?['\"]?", command)
|
|
3437
|
+
if name_match:
|
|
3438
|
+
pattern = f"**/*{name_match.group(1)}*"
|
|
3439
|
+
path_match = re.search(r"find\s+([^\s]+)", command)
|
|
3440
|
+
search_path = path_match.group(1) if path_match else "."
|
|
3441
|
+
|
|
3442
|
+
result = self.glob_search(pattern, search_path)
|
|
3443
|
+
output = '\n'.join(result['files'][:20]) # Show first 20 matches
|
|
3444
|
+
intercepted = True
|
|
3445
|
+
tools_used.append("glob_search")
|
|
3446
|
+
if debug_mode:
|
|
3447
|
+
print(f"🔄 Intercepted: {command} → glob_search({pattern}, {search_path})")
|
|
3448
|
+
except:
|
|
3449
|
+
pass
|
|
3450
|
+
|
|
3451
|
+
# Check for file writing commands (echo > file, grep > file, etc.) - CHECK THIS FIRST!
|
|
3452
|
+
# This must come BEFORE the plain grep interceptor
|
|
3453
|
+
if not intercepted and ('>' in command or '>>' in command):
|
|
3454
|
+
try:
|
|
3455
|
+
import re
|
|
3456
|
+
|
|
3457
|
+
# Handle grep ... > file (intercept and execute grep, then write output)
|
|
3458
|
+
if 'grep' in command and '>' in command:
|
|
3459
|
+
# Extract: grep -rn 'pattern' path > output.txt
|
|
3460
|
+
grep_match = re.search(r"grep\s+(.*)>\s*(\S+)", command)
|
|
3461
|
+
if grep_match:
|
|
3462
|
+
grep_part = grep_match.group(1).strip()
|
|
3463
|
+
output_file = grep_match.group(2)
|
|
3464
|
+
|
|
3465
|
+
# Extract pattern and options from grep command
|
|
3466
|
+
pattern_match = re.search(r"['\"]([^'\"]+)['\"]", grep_part)
|
|
3467
|
+
if pattern_match:
|
|
3468
|
+
pattern = pattern_match.group(1)
|
|
3469
|
+
search_path = "."
|
|
3470
|
+
file_pattern = "*.py" if "*.py" in command else "*"
|
|
3471
|
+
|
|
3472
|
+
if debug_mode:
|
|
3473
|
+
print(f"🔄 Intercepted: {command} → grep_search('{pattern}', '{search_path}', '{file_pattern}') + write_file({output_file})")
|
|
3474
|
+
|
|
3475
|
+
# Execute grep_search
|
|
3476
|
+
try:
|
|
3477
|
+
grep_result = self.grep_search(
|
|
3478
|
+
pattern=pattern,
|
|
3479
|
+
path=search_path,
|
|
3480
|
+
file_pattern=file_pattern,
|
|
3481
|
+
output_mode="content"
|
|
3482
|
+
)
|
|
3483
|
+
|
|
3484
|
+
# Format matches as text (like grep -rn output)
|
|
3485
|
+
output_lines = []
|
|
3486
|
+
for file_path, matches in grep_result.get('matches', {}).items():
|
|
3487
|
+
for line_num, line_content in matches:
|
|
3488
|
+
output_lines.append(f"{file_path}:{line_num}:{line_content}")
|
|
3489
|
+
|
|
3490
|
+
content_to_write = '\n'.join(output_lines) if output_lines else "(no matches found)"
|
|
3491
|
+
|
|
3492
|
+
# Write grep output to file
|
|
3493
|
+
write_result = self.write_file(output_file, content_to_write)
|
|
3494
|
+
if write_result['success']:
|
|
3495
|
+
output = f"Found {len(output_lines)} lines with '{pattern}' → Created {output_file} ({write_result['bytes_written']} bytes)"
|
|
3496
|
+
intercepted = True
|
|
3497
|
+
tools_used.extend(["grep_search", "write_file"])
|
|
3498
|
+
except Exception as e:
|
|
3499
|
+
if debug_mode:
|
|
3500
|
+
print(f"⚠️ Grep > file interception error: {e}")
|
|
3501
|
+
# Fall back to normal execution
|
|
3502
|
+
pass
|
|
3503
|
+
|
|
3504
|
+
# Extract: echo 'content' > filename OR cat << EOF > filename
|
|
3505
|
+
if not intercepted and 'echo' in command and '>' in command:
|
|
3506
|
+
# echo 'content' > file OR echo "content" > file
|
|
3507
|
+
match = re.search(r"echo\s+['\"](.+?)['\"].*?>\s*(\S+)", command)
|
|
3508
|
+
if match:
|
|
3509
|
+
content = match.group(1)
|
|
3510
|
+
filename = match.group(2)
|
|
3511
|
+
# Unescape common sequences
|
|
3512
|
+
content = content.replace('\\n', '\n').replace('\\t', '\t')
|
|
3513
|
+
result = self.write_file(filename, content + '\n')
|
|
3514
|
+
if result['success']:
|
|
3515
|
+
output = f"Created {filename} ({result['bytes_written']} bytes)"
|
|
3516
|
+
intercepted = True
|
|
3517
|
+
tools_used.append("write_file")
|
|
3518
|
+
if debug_mode:
|
|
3519
|
+
print(f"🔄 Intercepted: {command} → write_file({filename}, ...)")
|
|
3520
|
+
except:
|
|
3521
|
+
pass
|
|
3522
|
+
|
|
3523
|
+
# Check for sed editing commands
|
|
3524
|
+
if not intercepted and command.startswith('sed '):
|
|
3525
|
+
try:
|
|
3526
|
+
import re
|
|
3527
|
+
# sed 's/old/new/g' file OR sed -i 's/old/new/' file
|
|
3528
|
+
match = re.search(r"sed.*?['\"]s/([^/]+)/([^/]+)/", command)
|
|
3529
|
+
if match:
|
|
3530
|
+
old_text = match.group(1)
|
|
3531
|
+
new_text = match.group(2)
|
|
3532
|
+
# Extract filename (last argument)
|
|
3533
|
+
parts = command.split()
|
|
3534
|
+
filename = parts[-1]
|
|
3535
|
+
|
|
3536
|
+
# Determine if replace_all based on /g flag
|
|
3537
|
+
replace_all = '/g' in command
|
|
3538
|
+
|
|
3539
|
+
result = self.edit_file(filename, old_text, new_text, replace_all=replace_all)
|
|
3540
|
+
if result['success']:
|
|
3541
|
+
output = result['message']
|
|
3542
|
+
intercepted = True
|
|
3543
|
+
tools_used.append("edit_file")
|
|
3544
|
+
if debug_mode:
|
|
3545
|
+
print(f"🔄 Intercepted: {command} → edit_file({filename}, {old_text}, {new_text})")
|
|
3546
|
+
except:
|
|
3547
|
+
pass
|
|
3548
|
+
|
|
3549
|
+
# Check for heredoc file creation (cat << EOF > file)
|
|
3550
|
+
if not intercepted and '<<' in command and ('EOF' in command or 'HEREDOC' in command):
|
|
3551
|
+
try:
|
|
3552
|
+
import re
|
|
3553
|
+
# Extract: cat << EOF > filename OR cat > filename << EOF
|
|
3554
|
+
# Note: We can't actually get the heredoc content from a single command line
|
|
3555
|
+
# This would need to be handled differently (multi-line input)
|
|
3556
|
+
# For now, just detect and warn
|
|
3557
|
+
if debug_mode:
|
|
3558
|
+
print(f"⚠️ Heredoc detected but not intercepted: {command[:80]}")
|
|
3559
|
+
except:
|
|
3560
|
+
pass
|
|
3561
|
+
|
|
3562
|
+
# Check for content search commands (grep -r) WITHOUT redirection
|
|
3563
|
+
# This comes AFTER grep > file interceptor to avoid conflicts
|
|
3564
|
+
if not intercepted and command.startswith('grep ') and ('-r' in command or '-R' in command):
|
|
3565
|
+
try:
|
|
3566
|
+
import re
|
|
3567
|
+
# Extract pattern: grep -r 'pattern' path
|
|
3568
|
+
pattern_match = re.search(r"grep.*?['\"]([^'\"]+)['\"]", command)
|
|
3569
|
+
if pattern_match:
|
|
3570
|
+
pattern = pattern_match.group(1)
|
|
3571
|
+
# Extract path (last argument usually)
|
|
3572
|
+
parts = command.split()
|
|
3573
|
+
search_path = parts[-1] if len(parts) > 2 else "."
|
|
3574
|
+
|
|
3575
|
+
result = self.grep_search(pattern, search_path, "*.py", output_mode="files_with_matches")
|
|
3576
|
+
output = f"Files matching '{pattern}':\n" + '\n'.join(result['files'][:20])
|
|
3577
|
+
intercepted = True
|
|
3578
|
+
tools_used.append("grep_search")
|
|
3579
|
+
if debug_mode:
|
|
3580
|
+
print(f"🔄 Intercepted: {command} → grep_search({pattern}, {search_path})")
|
|
3581
|
+
except:
|
|
3582
|
+
pass
|
|
3583
|
+
|
|
3584
|
+
# If not intercepted, execute as shell command
|
|
3585
|
+
if not intercepted:
|
|
3586
|
+
output = self.execute_command(command)
|
|
2856
3587
|
|
|
2857
3588
|
if not output.startswith("ERROR"):
|
|
2858
3589
|
# Success - store results
|
|
@@ -3245,11 +3976,44 @@ JSON:"""
|
|
|
3245
3976
|
api_results=api_results,
|
|
3246
3977
|
tools_used=tools_used
|
|
3247
3978
|
)
|
|
3248
|
-
|
|
3979
|
+
|
|
3980
|
+
# POST-PROCESSING: Auto-extract code blocks and write files if user requested file creation
|
|
3981
|
+
# This fixes the issue where LLM shows corrected code but doesn't create the file
|
|
3982
|
+
if any(keyword in request.question.lower() for keyword in ['create', 'write', 'save', 'generate', 'fixed', 'corrected']):
|
|
3983
|
+
# Extract filename from query (e.g., "write to foo.py", "create bar_fixed.py")
|
|
3984
|
+
import re
|
|
3985
|
+
filename_match = re.search(r'(?:to|create|write|save|generate)\s+(\w+[._-]\w+\.[\w]+)', request.question, re.IGNORECASE)
|
|
3986
|
+
if not filename_match:
|
|
3987
|
+
# Try pattern: "foo_fixed.py" or "bar.py"
|
|
3988
|
+
filename_match = re.search(r'(\w+_fixed\.[\w]+|\w+\.[\w]+)', request.question)
|
|
3989
|
+
|
|
3990
|
+
if filename_match:
|
|
3991
|
+
target_filename = filename_match.group(1)
|
|
3992
|
+
|
|
3993
|
+
# Extract code block from response (```python ... ``` or ``` ... ```)
|
|
3994
|
+
code_block_pattern = r'```(?:python|bash|sh|r|sql)?\n(.*?)```'
|
|
3995
|
+
code_blocks = re.findall(code_block_pattern, response.response, re.DOTALL)
|
|
3996
|
+
|
|
3997
|
+
if code_blocks:
|
|
3998
|
+
# Use the LARGEST code block (likely the complete file)
|
|
3999
|
+
largest_block = max(code_blocks, key=len)
|
|
4000
|
+
|
|
4001
|
+
# Write to file
|
|
4002
|
+
try:
|
|
4003
|
+
write_result = self.write_file(target_filename, largest_block)
|
|
4004
|
+
if write_result['success']:
|
|
4005
|
+
# Append confirmation to response
|
|
4006
|
+
response.response += f"\n\n✅ File created: {target_filename} ({write_result['bytes_written']} bytes)"
|
|
4007
|
+
if debug_mode:
|
|
4008
|
+
print(f"🔄 Auto-extracted code block → write_file({target_filename})")
|
|
4009
|
+
except Exception as e:
|
|
4010
|
+
if debug_mode:
|
|
4011
|
+
print(f"⚠️ Auto-write failed: {e}")
|
|
4012
|
+
|
|
3249
4013
|
# CRITICAL: Save to conversation history
|
|
3250
4014
|
self.conversation_history.append({"role": "user", "content": request.question})
|
|
3251
4015
|
self.conversation_history.append({"role": "assistant", "content": response.response})
|
|
3252
|
-
|
|
4016
|
+
|
|
3253
4017
|
return response
|
|
3254
4018
|
|
|
3255
4019
|
# DEV MODE ONLY: Direct Groq calls (only works with local API keys)
|
|
@@ -7,7 +7,7 @@ long_description = readme_path.read_text() if readme_path.exists() else "Termina
|
|
|
7
7
|
|
|
8
8
|
setup(
|
|
9
9
|
name="cite-agent",
|
|
10
|
-
version="1.3.
|
|
10
|
+
version="1.3.8",
|
|
11
11
|
author="Cite-Agent Team",
|
|
12
12
|
author_email="contact@citeagent.dev",
|
|
13
13
|
description="Terminal AI assistant for academic research with citation verification",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "1.3.7"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|