dtSpark 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. dtSpark/__init__.py +0 -0
  2. dtSpark/_description.txt +1 -0
  3. dtSpark/_full_name.txt +1 -0
  4. dtSpark/_licence.txt +21 -0
  5. dtSpark/_metadata.yaml +6 -0
  6. dtSpark/_name.txt +1 -0
  7. dtSpark/_version.txt +1 -0
  8. dtSpark/aws/__init__.py +7 -0
  9. dtSpark/aws/authentication.py +296 -0
  10. dtSpark/aws/bedrock.py +578 -0
  11. dtSpark/aws/costs.py +318 -0
  12. dtSpark/aws/pricing.py +580 -0
  13. dtSpark/cli_interface.py +2645 -0
  14. dtSpark/conversation_manager.py +3050 -0
  15. dtSpark/core/__init__.py +12 -0
  16. dtSpark/core/application.py +3355 -0
  17. dtSpark/core/context_compaction.py +735 -0
  18. dtSpark/daemon/__init__.py +104 -0
  19. dtSpark/daemon/__main__.py +10 -0
  20. dtSpark/daemon/action_monitor.py +213 -0
  21. dtSpark/daemon/daemon_app.py +730 -0
  22. dtSpark/daemon/daemon_manager.py +289 -0
  23. dtSpark/daemon/execution_coordinator.py +194 -0
  24. dtSpark/daemon/pid_file.py +169 -0
  25. dtSpark/database/__init__.py +482 -0
  26. dtSpark/database/autonomous_actions.py +1191 -0
  27. dtSpark/database/backends.py +329 -0
  28. dtSpark/database/connection.py +122 -0
  29. dtSpark/database/conversations.py +520 -0
  30. dtSpark/database/credential_prompt.py +218 -0
  31. dtSpark/database/files.py +205 -0
  32. dtSpark/database/mcp_ops.py +355 -0
  33. dtSpark/database/messages.py +161 -0
  34. dtSpark/database/schema.py +673 -0
  35. dtSpark/database/tool_permissions.py +186 -0
  36. dtSpark/database/usage.py +167 -0
  37. dtSpark/files/__init__.py +4 -0
  38. dtSpark/files/manager.py +322 -0
  39. dtSpark/launch.py +39 -0
  40. dtSpark/limits/__init__.py +10 -0
  41. dtSpark/limits/costs.py +296 -0
  42. dtSpark/limits/tokens.py +342 -0
  43. dtSpark/llm/__init__.py +17 -0
  44. dtSpark/llm/anthropic_direct.py +446 -0
  45. dtSpark/llm/base.py +146 -0
  46. dtSpark/llm/context_limits.py +438 -0
  47. dtSpark/llm/manager.py +177 -0
  48. dtSpark/llm/ollama.py +578 -0
  49. dtSpark/mcp_integration/__init__.py +5 -0
  50. dtSpark/mcp_integration/manager.py +653 -0
  51. dtSpark/mcp_integration/tool_selector.py +225 -0
  52. dtSpark/resources/config.yaml.template +631 -0
  53. dtSpark/safety/__init__.py +22 -0
  54. dtSpark/safety/llm_service.py +111 -0
  55. dtSpark/safety/patterns.py +229 -0
  56. dtSpark/safety/prompt_inspector.py +442 -0
  57. dtSpark/safety/violation_logger.py +346 -0
  58. dtSpark/scheduler/__init__.py +20 -0
  59. dtSpark/scheduler/creation_tools.py +599 -0
  60. dtSpark/scheduler/execution_queue.py +159 -0
  61. dtSpark/scheduler/executor.py +1152 -0
  62. dtSpark/scheduler/manager.py +395 -0
  63. dtSpark/tools/__init__.py +4 -0
  64. dtSpark/tools/builtin.py +833 -0
  65. dtSpark/web/__init__.py +20 -0
  66. dtSpark/web/auth.py +152 -0
  67. dtSpark/web/dependencies.py +37 -0
  68. dtSpark/web/endpoints/__init__.py +17 -0
  69. dtSpark/web/endpoints/autonomous_actions.py +1125 -0
  70. dtSpark/web/endpoints/chat.py +621 -0
  71. dtSpark/web/endpoints/conversations.py +353 -0
  72. dtSpark/web/endpoints/main_menu.py +547 -0
  73. dtSpark/web/endpoints/streaming.py +421 -0
  74. dtSpark/web/server.py +578 -0
  75. dtSpark/web/session.py +167 -0
  76. dtSpark/web/ssl_utils.py +195 -0
  77. dtSpark/web/static/css/dark-theme.css +427 -0
  78. dtSpark/web/static/js/actions.js +1101 -0
  79. dtSpark/web/static/js/chat.js +614 -0
  80. dtSpark/web/static/js/main.js +496 -0
  81. dtSpark/web/static/js/sse-client.js +242 -0
  82. dtSpark/web/templates/actions.html +408 -0
  83. dtSpark/web/templates/base.html +93 -0
  84. dtSpark/web/templates/chat.html +814 -0
  85. dtSpark/web/templates/conversations.html +350 -0
  86. dtSpark/web/templates/goodbye.html +81 -0
  87. dtSpark/web/templates/login.html +90 -0
  88. dtSpark/web/templates/main_menu.html +983 -0
  89. dtSpark/web/templates/new_conversation.html +191 -0
  90. dtSpark/web/web_interface.py +137 -0
  91. dtspark-1.0.4.dist-info/METADATA +187 -0
  92. dtspark-1.0.4.dist-info/RECORD +96 -0
  93. dtspark-1.0.4.dist-info/WHEEL +5 -0
  94. dtspark-1.0.4.dist-info/entry_points.txt +3 -0
  95. dtspark-1.0.4.dist-info/licenses/LICENSE +21 -0
  96. dtspark-1.0.4.dist-info/top_level.txt +1 -0
@@ -0,0 +1,833 @@
1
+ """
2
+ Built-in Tools module for providing default tool capabilities.
3
+
4
+ This module provides built-in tools that are always available to the LLM,
5
+ such as date/time information with timezone awareness and filesystem access.
6
+
7
+
8
+ """
9
+
10
+ import logging
11
+ import os
12
+ import base64
13
+ import fnmatch
14
+ from pathlib import Path
15
+ from datetime import datetime
16
+ from typing import Dict, List, Any, Optional
17
+ from zoneinfo import ZoneInfo, available_timezones
18
+
19
+
20
+ def get_builtin_tools(config: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]:
21
+ """
22
+ Get the list of built-in tool definitions.
23
+
24
+ Args:
25
+ config: Optional configuration dictionary containing embedded_tools settings
26
+
27
+ Returns:
28
+ List of tool definitions in Claude API format
29
+ """
30
+ tools = [
31
+ {
32
+ "name": "get_current_datetime",
33
+ "description": "Get the current date and time with timezone awareness. "
34
+ "Returns the current datetime in ISO 8601 format. "
35
+ "Optionally specify a timezone to get the time in that zone.",
36
+ "input_schema": {
37
+ "type": "object",
38
+ "properties": {
39
+ "timezone": {
40
+ "type": "string",
41
+ "description": "Optional timezone identifier (e.g., 'Australia/Sydney', 'America/New_York', 'UTC'). "
42
+ "If not provided, uses the system's local timezone.",
43
+ "default": None
44
+ },
45
+ "format": {
46
+ "type": "string",
47
+ "description": "Optional format for the datetime output. Options: 'iso' (ISO 8601), 'human' (human-readable). "
48
+ "Default is 'iso'.",
49
+ "enum": ["iso", "human"],
50
+ "default": "iso"
51
+ }
52
+ },
53
+ "required": []
54
+ }
55
+ }
56
+ ]
57
+
58
+ # Add filesystem tools if enabled
59
+ if config.get('embedded_tools', None):
60
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
61
+ if fs_config.get('enabled', False):
62
+ fs_tools = _get_filesystem_tools(fs_config)
63
+ tools.extend(fs_tools)
64
+ logging.info(f"Embedded filesystem tools enabled: {len(fs_tools)} tools added")
65
+
66
+ return tools
67
+
68
+
69
+ def execute_builtin_tool(tool_name: str, tool_input: Dict[str, Any],
70
+ config: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
71
+ """
72
+ Execute a built-in tool.
73
+
74
+ Args:
75
+ tool_name: Name of the tool to execute
76
+ tool_input: Input parameters for the tool
77
+ config: Optional configuration dictionary for filesystem tools
78
+
79
+ Returns:
80
+ Dictionary containing:
81
+ - success: Boolean indicating if execution was successful
82
+ - result: The tool execution result (if successful)
83
+ - error: Error message (if failed)
84
+ """
85
+ try:
86
+ if tool_name == "get_current_datetime":
87
+ return _execute_get_current_datetime(tool_input)
88
+
89
+ # Filesystem tools
90
+ elif tool_name == "list_files_recursive":
91
+ return _execute_list_files_recursive(tool_input, config)
92
+ elif tool_name == "search_files":
93
+ return _execute_search_files(tool_input, config)
94
+ elif tool_name == "read_file_text":
95
+ return _execute_read_file_text(tool_input, config)
96
+ elif tool_name == "read_file_binary":
97
+ return _execute_read_file_binary(tool_input, config)
98
+ elif tool_name == "write_file":
99
+ return _execute_write_file(tool_input, config)
100
+ elif tool_name == "create_directories":
101
+ return _execute_create_directories(tool_input, config)
102
+ else:
103
+ return {
104
+ "success": False,
105
+ "error": f"Unknown built-in tool: {tool_name}"
106
+ }
107
+ except Exception as e:
108
+ logging.error(f"Error executing built-in tool {tool_name}: {e}")
109
+ return {
110
+ "success": False,
111
+ "error": str(e)
112
+ }
113
+
114
+
115
+ def _execute_get_current_datetime(tool_input: Dict[str, Any]) -> Dict[str, Any]:
116
+ """
117
+ Execute the get_current_datetime tool.
118
+
119
+ Args:
120
+ tool_input: Dictionary containing optional 'timezone' and 'format' keys
121
+
122
+ Returns:
123
+ Dictionary with success status and datetime result
124
+ """
125
+ timezone_str = tool_input.get("timezone")
126
+ output_format = tool_input.get("format", "iso")
127
+
128
+ try:
129
+ # Get current datetime
130
+ if timezone_str:
131
+ # Validate timezone
132
+ if timezone_str not in available_timezones():
133
+ return {
134
+ "success": False,
135
+ "error": f"Invalid timezone: {timezone_str}. Use a valid IANA timezone identifier."
136
+ }
137
+
138
+ # Get datetime in specified timezone
139
+ tz = ZoneInfo(timezone_str)
140
+ now = datetime.now(tz)
141
+ else:
142
+ # Get local datetime with system timezone
143
+ now = datetime.now().astimezone()
144
+
145
+ # Format output
146
+ if output_format == "human":
147
+ # Human-readable format
148
+ result = {
149
+ "datetime": now.strftime("%A, %d %B %Y at %I:%M:%S %p"),
150
+ "timezone": now.strftime("%Z (UTC%z)"),
151
+ "iso_format": now.isoformat()
152
+ }
153
+ else:
154
+ # ISO 8601 format (default)
155
+ result = {
156
+ "datetime": now.isoformat(),
157
+ "timezone": str(now.tzinfo),
158
+ "timezone_offset": now.strftime("%z"),
159
+ "unix_timestamp": int(now.timestamp())
160
+ }
161
+
162
+ logging.info(f"Built-in tool get_current_datetime executed: timezone={timezone_str or 'local'}, format={output_format}")
163
+
164
+ return {
165
+ "success": True,
166
+ "result": result
167
+ }
168
+
169
+ except Exception as e:
170
+ logging.error(f"Error in get_current_datetime: {e}")
171
+ return {
172
+ "success": False,
173
+ "error": str(e)
174
+ }
175
+
176
+
177
+ def get_available_timezones() -> List[str]:
178
+ """
179
+ Get a list of all available timezone identifiers.
180
+
181
+ Returns:
182
+ Sorted list of timezone identifiers
183
+ """
184
+ return sorted(available_timezones())
185
+
186
+
187
+ def validate_timezone(timezone_str: str) -> bool:
188
+ """
189
+ Validate if a timezone string is valid.
190
+
191
+ Args:
192
+ timezone_str: Timezone identifier to validate
193
+
194
+ Returns:
195
+ True if valid, False otherwise
196
+ """
197
+ return timezone_str in available_timezones()
198
+
199
+
200
+ # ============================================================================
201
+ # Filesystem Tools
202
+ # ============================================================================
203
+
204
+ def _get_filesystem_tools(fs_config: Dict[str, Any]) -> List[Dict[str, Any]]:
205
+ """
206
+ Get filesystem tool definitions based on configuration.
207
+
208
+ Args:
209
+ fs_config: Filesystem configuration dictionary
210
+
211
+ Returns:
212
+ List of filesystem tool definitions
213
+ """
214
+ access_mode = fs_config.get('access_mode', 'read')
215
+ allowed_path = fs_config.get('allowed_path', '.')
216
+
217
+ # Read-only tools (always included when filesystem is enabled)
218
+ tools = [
219
+ {
220
+ "name": "list_files_recursive",
221
+ "description": f"List all files and directories recursively within the allowed path ({allowed_path}). "
222
+ "Returns a structured list of all files with their paths, sizes, and modification times. "
223
+ "Useful for understanding directory structure and finding files.",
224
+ "input_schema": {
225
+ "type": "object",
226
+ "properties": {
227
+ "path": {
228
+ "type": "string",
229
+ "description": f"Optional subdirectory within {allowed_path} to list. "
230
+ "If not provided, lists from the root of the allowed path.",
231
+ "default": ""
232
+ },
233
+ "include_hidden": {
234
+ "type": "boolean",
235
+ "description": "Include hidden files and directories (those starting with '.')",
236
+ "default": False
237
+ }
238
+ },
239
+ "required": []
240
+ }
241
+ },
242
+ {
243
+ "name": "search_files",
244
+ "description": f"Search for files by filename within the allowed path ({allowed_path}). "
245
+ "Supports wildcards (* for any characters, ? for single character). "
246
+ "Returns matching file paths.",
247
+ "input_schema": {
248
+ "type": "object",
249
+ "properties": {
250
+ "pattern": {
251
+ "type": "string",
252
+ "description": "Search pattern for filename. Supports wildcards: * (any characters), ? (single character). "
253
+ "Examples: '*.py' (all Python files), 'test_*.py' (test files), 'config.???' (config with 3-char extension)",
254
+ },
255
+ "case_sensitive": {
256
+ "type": "boolean",
257
+ "description": "Whether the search should be case-sensitive",
258
+ "default": False
259
+ }
260
+ },
261
+ "required": ["pattern"]
262
+ }
263
+ },
264
+ {
265
+ "name": "read_file_text",
266
+ "description": f"Read the contents of a text file within the allowed path ({allowed_path}). "
267
+ "Attempts to decode the file as UTF-8 text. Use read_file_binary for non-text files.",
268
+ "input_schema": {
269
+ "type": "object",
270
+ "properties": {
271
+ "path": {
272
+ "type": "string",
273
+ "description": "Path to the file to read (relative to allowed path or absolute within allowed path)",
274
+ }
275
+ },
276
+ "required": ["path"]
277
+ }
278
+ },
279
+ {
280
+ "name": "read_file_binary",
281
+ "description": f"Read the contents of a file as binary data within the allowed path ({allowed_path}). "
282
+ "Returns base64-encoded binary content. Use for images, PDFs, or other non-text files.",
283
+ "input_schema": {
284
+ "type": "object",
285
+ "properties": {
286
+ "path": {
287
+ "type": "string",
288
+ "description": "Path to the file to read (relative to allowed path or absolute within allowed path)",
289
+ },
290
+ "max_size_mb": {
291
+ "type": "number",
292
+ "description": "Maximum file size in MB to read (default: 10MB). Prevents reading very large files.",
293
+ "default": 10
294
+ }
295
+ },
296
+ "required": ["path"]
297
+ }
298
+ }
299
+ ]
300
+
301
+ # Write tools (only added if access_mode is read_write)
302
+ if access_mode == 'read_write':
303
+ tools.extend([
304
+ {
305
+ "name": "write_file",
306
+ "description": f"Write content to a file within the allowed path ({allowed_path}). "
307
+ "Creates the file if it doesn't exist, or overwrites if it exists. "
308
+ "Parent directories must already exist (use create_directories first if needed).",
309
+ "input_schema": {
310
+ "type": "object",
311
+ "properties": {
312
+ "path": {
313
+ "type": "string",
314
+ "description": "Path to the file to write (relative to allowed path or absolute within allowed path)",
315
+ },
316
+ "content": {
317
+ "type": "string",
318
+ "description": "Content to write to the file",
319
+ },
320
+ "encoding": {
321
+ "type": "string",
322
+ "description": "Text encoding to use (default: utf-8)",
323
+ "default": "utf-8"
324
+ }
325
+ },
326
+ "required": ["path", "content"]
327
+ }
328
+ },
329
+ {
330
+ "name": "create_directories",
331
+ "description": f"Create one or more nested directories within the allowed path ({allowed_path}). "
332
+ "Creates all intermediate directories as needed (like 'mkdir -p'). "
333
+ "Safe to call even if directories already exist.",
334
+ "input_schema": {
335
+ "type": "object",
336
+ "properties": {
337
+ "path": {
338
+ "type": "string",
339
+ "description": "Directory path to create (relative to allowed path or absolute within allowed path). "
340
+ "Can include multiple nested levels (e.g., 'data/processed/reports')",
341
+ }
342
+ },
343
+ "required": ["path"]
344
+ }
345
+ }
346
+ ])
347
+
348
+ return tools
349
+
350
+
351
+ def _validate_path(file_path: str, allowed_path: str) -> Dict[str, Any]:
352
+ """
353
+ Validate that a file path is within the allowed directory.
354
+
355
+ Args:
356
+ file_path: File path to validate
357
+ allowed_path: Root path that file must be within
358
+
359
+ Returns:
360
+ Dictionary with:
361
+ - valid: Boolean indicating if path is valid
362
+ - resolved_path: Absolute resolved path (if valid)
363
+ - error: Error message (if invalid)
364
+ """
365
+ try:
366
+ # Resolve allowed path to absolute
367
+ allowed_abs = Path(allowed_path).resolve()
368
+
369
+ # Handle empty file_path (means root of allowed path)
370
+ if not file_path or file_path == '.':
371
+ return {
372
+ "valid": True,
373
+ "resolved_path": str(allowed_abs),
374
+ "error": None
375
+ }
376
+
377
+ # Resolve file path
378
+ # If file_path is absolute, use it directly; otherwise treat as relative to allowed_path
379
+ if Path(file_path).is_absolute():
380
+ file_abs = Path(file_path).resolve()
381
+ else:
382
+ file_abs = (allowed_abs / file_path).resolve()
383
+
384
+ # Check if file path is within allowed path
385
+ try:
386
+ file_abs.relative_to(allowed_abs)
387
+ except ValueError:
388
+ return {
389
+ "valid": False,
390
+ "resolved_path": None,
391
+ "error": f"Access denied: Path '{file_path}' is outside allowed directory '{allowed_path}'"
392
+ }
393
+
394
+ return {
395
+ "valid": True,
396
+ "resolved_path": str(file_abs),
397
+ "error": None
398
+ }
399
+
400
+ except Exception as e:
401
+ return {
402
+ "valid": False,
403
+ "resolved_path": None,
404
+ "error": f"Invalid path: {str(e)}"
405
+ }
406
+
407
+
408
+ def _execute_list_files_recursive(tool_input: Dict[str, Any],
409
+ config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
410
+ """
411
+ Execute the list_files_recursive tool.
412
+
413
+ Args:
414
+ tool_input: Tool input parameters
415
+ config: Configuration dictionary
416
+
417
+ Returns:
418
+ Dictionary with success status and file listing
419
+ """
420
+ if not config.get('embedded_tools'):
421
+ return {"success": False, "error": "Filesystem tools not configured"}
422
+
423
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
424
+ allowed_path = fs_config.get('allowed_path', '.')
425
+
426
+ # Get parameters
427
+ sub_path = tool_input.get('path', '')
428
+ include_hidden = tool_input.get('include_hidden', False)
429
+
430
+ # Validate path
431
+ validation = _validate_path(sub_path, allowed_path)
432
+ if not validation['valid']:
433
+ return {"success": False, "error": validation['error']}
434
+
435
+ root_path = Path(validation['resolved_path'])
436
+
437
+ # Check if path exists
438
+ if not root_path.exists():
439
+ return {"success": False, "error": f"Path does not exist: {sub_path}"}
440
+
441
+ if not root_path.is_dir():
442
+ return {"success": False, "error": f"Path is not a directory: {sub_path}"}
443
+
444
+ # Collect all files and directories
445
+ files = []
446
+ directories = []
447
+
448
+ try:
449
+ for item in root_path.rglob('*'):
450
+ # Skip hidden files if not requested
451
+ if not include_hidden and any(part.startswith('.') for part in item.parts):
452
+ continue
453
+
454
+ # Get relative path from root
455
+ rel_path = item.relative_to(root_path)
456
+
457
+ if item.is_file():
458
+ files.append({
459
+ "path": str(rel_path),
460
+ "full_path": str(item),
461
+ "size_bytes": item.stat().st_size,
462
+ "modified": datetime.fromtimestamp(item.stat().st_mtime).isoformat(),
463
+ "type": "file"
464
+ })
465
+ elif item.is_dir():
466
+ directories.append({
467
+ "path": str(rel_path),
468
+ "full_path": str(item),
469
+ "type": "directory"
470
+ })
471
+
472
+ result = {
473
+ "root_path": str(root_path),
474
+ "total_files": len(files),
475
+ "total_directories": len(directories),
476
+ "files": sorted(files, key=lambda x: x['path']),
477
+ "directories": sorted(directories, key=lambda x: x['path'])
478
+ }
479
+
480
+ logging.info(f"Listed {len(files)} files and {len(directories)} directories from {root_path}")
481
+ return {"success": True, "result": result}
482
+
483
+ except Exception as e:
484
+ logging.error(f"Error listing files: {e}")
485
+ return {"success": False, "error": str(e)}
486
+
487
+
488
+ def _execute_search_files(tool_input: Dict[str, Any],
489
+ config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
490
+ """
491
+ Execute the search_files tool.
492
+
493
+ Args:
494
+ tool_input: Tool input parameters
495
+ config: Configuration dictionary
496
+
497
+ Returns:
498
+ Dictionary with success status and search results
499
+ """
500
+ if not config.get('embedded_tools'):
501
+ return {"success": False, "error": "Filesystem tools not configured"}
502
+
503
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
504
+ allowed_path = fs_config.get('allowed_path', '.')
505
+
506
+ # Get parameters
507
+ pattern = tool_input.get('pattern')
508
+ case_sensitive = tool_input.get('case_sensitive', False)
509
+
510
+ if not pattern:
511
+ return {"success": False, "error": "Search pattern is required"}
512
+
513
+ # Validate path
514
+ validation = _validate_path('', allowed_path)
515
+ if not validation['valid']:
516
+ return {"success": False, "error": validation['error']}
517
+
518
+ root_path = Path(validation['resolved_path'])
519
+
520
+ # Search for matching files
521
+ matches = []
522
+
523
+ try:
524
+ for item in root_path.rglob('*'):
525
+ if item.is_file():
526
+ filename = item.name
527
+
528
+ # Apply pattern matching
529
+ if case_sensitive:
530
+ match = fnmatch.fnmatch(filename, pattern)
531
+ else:
532
+ match = fnmatch.fnmatch(filename.lower(), pattern.lower())
533
+
534
+ if match:
535
+ rel_path = item.relative_to(root_path)
536
+ matches.append({
537
+ "filename": filename,
538
+ "path": str(rel_path),
539
+ "full_path": str(item),
540
+ "size_bytes": item.stat().st_size,
541
+ "modified": datetime.fromtimestamp(item.stat().st_mtime).isoformat()
542
+ })
543
+
544
+ result = {
545
+ "pattern": pattern,
546
+ "total_matches": len(matches),
547
+ "matches": sorted(matches, key=lambda x: x['path'])
548
+ }
549
+
550
+ logging.info(f"Search for '{pattern}' found {len(matches)} matches")
551
+ return {"success": True, "result": result}
552
+
553
+ except Exception as e:
554
+ logging.error(f"Error searching files: {e}")
555
+ return {"success": False, "error": str(e)}
556
+
557
+
558
+ def _execute_read_file_text(tool_input: Dict[str, Any],
559
+ config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
560
+ """
561
+ Execute the read_file_text tool.
562
+
563
+ Args:
564
+ tool_input: Tool input parameters
565
+ config: Configuration dictionary
566
+
567
+ Returns:
568
+ Dictionary with success status and file content
569
+ """
570
+ if not config.get('embedded_tools'):
571
+ return {"success": False, "error": "Filesystem tools not configured"}
572
+
573
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
574
+ allowed_path = fs_config.get('allowed_path', '.')
575
+
576
+ # Get parameters
577
+ file_path = tool_input.get('path')
578
+
579
+ if not file_path:
580
+ return {"success": False, "error": "File path is required"}
581
+
582
+ # Validate path
583
+ validation = _validate_path(file_path, allowed_path)
584
+ if not validation['valid']:
585
+ return {"success": False, "error": validation['error']}
586
+
587
+ full_path = Path(validation['resolved_path'])
588
+
589
+ # Check if file exists
590
+ if not full_path.exists():
591
+ return {"success": False, "error": f"File does not exist: {file_path}"}
592
+
593
+ if not full_path.is_file():
594
+ return {"success": False, "error": f"Path is not a file: {file_path}"}
595
+
596
+ # Read file as text
597
+ try:
598
+ with open(full_path, 'r', encoding='utf-8') as f:
599
+ content = f.read()
600
+
601
+ result = {
602
+ "path": file_path,
603
+ "full_path": str(full_path),
604
+ "content": content,
605
+ "size_bytes": full_path.stat().st_size,
606
+ "encoding": "utf-8"
607
+ }
608
+
609
+ logging.info(f"Read text file: {file_path} ({result['size_bytes']} bytes)")
610
+ return {"success": True, "result": result}
611
+
612
+ except UnicodeDecodeError:
613
+ return {
614
+ "success": False,
615
+ "error": f"File is not valid UTF-8 text. Use read_file_binary instead: {file_path}"
616
+ }
617
+ except Exception as e:
618
+ logging.error(f"Error reading file {file_path}: {e}")
619
+ return {"success": False, "error": str(e)}
620
+
621
+
622
+ def _execute_read_file_binary(tool_input: Dict[str, Any],
623
+ config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
624
+ """
625
+ Execute the read_file_binary tool.
626
+
627
+ Args:
628
+ tool_input: Tool input parameters
629
+ config: Configuration dictionary
630
+
631
+ Returns:
632
+ Dictionary with success status and base64-encoded content
633
+ """
634
+ if not config.get('embedded_tools'):
635
+ return {"success": False, "error": "Filesystem tools not configured"}
636
+
637
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
638
+ allowed_path = fs_config.get('allowed_path', '.')
639
+
640
+ # Get parameters
641
+ file_path = tool_input.get('path')
642
+ max_size_mb = tool_input.get('max_size_mb', 10)
643
+
644
+ if not file_path:
645
+ return {"success": False, "error": "File path is required"}
646
+
647
+ # Validate path
648
+ validation = _validate_path(file_path, allowed_path)
649
+ if not validation['valid']:
650
+ return {"success": False, "error": validation['error']}
651
+
652
+ full_path = Path(validation['resolved_path'])
653
+
654
+ # Check if file exists
655
+ if not full_path.exists():
656
+ return {"success": False, "error": f"File does not exist: {file_path}"}
657
+
658
+ if not full_path.is_file():
659
+ return {"success": False, "error": f"Path is not a file: {file_path}"}
660
+
661
+ # Check file size
662
+ file_size = full_path.stat().st_size
663
+ max_size_bytes = max_size_mb * 1024 * 1024
664
+
665
+ if file_size > max_size_bytes:
666
+ return {
667
+ "success": False,
668
+ "error": f"File size ({file_size / 1024 / 1024:.2f} MB) exceeds maximum ({max_size_mb} MB)"
669
+ }
670
+
671
+ # Read file as binary
672
+ try:
673
+ with open(full_path, 'rb') as f:
674
+ binary_content = f.read()
675
+
676
+ # Encode as base64
677
+ base64_content = base64.b64encode(binary_content).decode('utf-8')
678
+
679
+ result = {
680
+ "path": file_path,
681
+ "full_path": str(full_path),
682
+ "content_base64": base64_content,
683
+ "size_bytes": file_size
684
+ }
685
+
686
+ logging.info(f"Read binary file: {file_path} ({file_size} bytes)")
687
+ return {"success": True, "result": result}
688
+
689
+ except Exception as e:
690
+ logging.error(f"Error reading binary file {file_path}: {e}")
691
+ return {"success": False, "error": str(e)}
692
+
693
+
694
+ def _execute_write_file(tool_input: Dict[str, Any],
695
+ config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
696
+ """
697
+ Execute the write_file tool.
698
+
699
+ Args:
700
+ tool_input: Tool input parameters
701
+ config: Configuration dictionary
702
+
703
+ Returns:
704
+ Dictionary with success status
705
+ """
706
+ logging.debug(f"write_file called with config keys: {list(config.keys()) if config else 'None'}")
707
+
708
+ if not config.get('embedded_tools'):
709
+ logging.warning("write_file failed: embedded_tools not in config")
710
+ return {"success": False, "error": "Filesystem tools not configured"}
711
+
712
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
713
+ allowed_path = fs_config.get('allowed_path', '.')
714
+ access_mode = fs_config.get('access_mode', 'read')
715
+
716
+ logging.debug(f"write_file fs_config: allowed_path={allowed_path}, access_mode={access_mode}")
717
+
718
+ # Check if write access is enabled
719
+ if access_mode != 'read_write':
720
+ logging.warning(f"write_file failed: access_mode is '{access_mode}', not 'read_write'")
721
+ return {
722
+ "success": False,
723
+ "error": "Write operations are disabled. Set access_mode to 'read_write' in configuration."
724
+ }
725
+
726
+ # Get parameters
727
+ file_path = tool_input.get('path')
728
+ content = tool_input.get('content')
729
+ encoding = tool_input.get('encoding', 'utf-8')
730
+
731
+ logging.debug(f"write_file params: path={file_path}, content_len={len(content) if content else 0}")
732
+
733
+ if not file_path:
734
+ logging.warning("write_file failed: no file path provided")
735
+ return {"success": False, "error": "File path is required"}
736
+
737
+ if content is None:
738
+ logging.warning("write_file failed: no content provided")
739
+ return {"success": False, "error": "Content is required"}
740
+
741
+ # Validate path
742
+ validation = _validate_path(file_path, allowed_path)
743
+ if not validation['valid']:
744
+ logging.warning(f"write_file failed: path validation error: {validation['error']}")
745
+ return {"success": False, "error": validation['error']}
746
+
747
+ full_path = Path(validation['resolved_path'])
748
+ logging.debug(f"write_file resolved path: {full_path}")
749
+
750
+ # Check if parent directory exists
751
+ if not full_path.parent.exists():
752
+ logging.warning(f"write_file failed: parent directory does not exist: {full_path.parent}")
753
+ return {
754
+ "success": False,
755
+ "error": f"Parent directory does not exist: {full_path.parent}. Use create_directories first."
756
+ }
757
+
758
+ # Write file
759
+ try:
760
+ logging.debug(f"write_file: attempting to write {len(content)} chars to {full_path}")
761
+ with open(full_path, 'w', encoding=encoding) as f:
762
+ f.write(content)
763
+
764
+ result = {
765
+ "path": file_path,
766
+ "full_path": str(full_path),
767
+ "size_bytes": full_path.stat().st_size,
768
+ "encoding": encoding
769
+ }
770
+
771
+ logging.info(f"Wrote file: {file_path} ({result['size_bytes']} bytes)")
772
+ return {"success": True, "result": result}
773
+
774
+ except Exception as e:
775
+ logging.error(f"Error writing file {file_path}: {e}")
776
+ return {"success": False, "error": str(e)}
777
+
778
+
779
+ def _execute_create_directories(tool_input: Dict[str, Any],
780
+ config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
781
+ """
782
+ Execute the create_directories tool.
783
+
784
+ Args:
785
+ tool_input: Tool input parameters
786
+ config: Configuration dictionary
787
+
788
+ Returns:
789
+ Dictionary with success status
790
+ """
791
+ if not config.get('embedded_tools'):
792
+ return {"success": False, "error": "Filesystem tools not configured"}
793
+
794
+ fs_config = config.get('embedded_tools', {}).get('filesystem', {})
795
+ allowed_path = fs_config.get('allowed_path', '.')
796
+ access_mode = fs_config.get('access_mode', 'read')
797
+
798
+ # Check if write access is enabled
799
+ if access_mode != 'read_write':
800
+ return {
801
+ "success": False,
802
+ "error": "Write operations are disabled. Set access_mode to 'read_write' in configuration."
803
+ }
804
+
805
+ # Get parameters
806
+ dir_path = tool_input.get('path')
807
+
808
+ if not dir_path:
809
+ return {"success": False, "error": "Directory path is required"}
810
+
811
+ # Validate path
812
+ validation = _validate_path(dir_path, allowed_path)
813
+ if not validation['valid']:
814
+ return {"success": False, "error": validation['error']}
815
+
816
+ full_path = Path(validation['resolved_path'])
817
+
818
+ # Create directories
819
+ try:
820
+ full_path.mkdir(parents=True, exist_ok=True)
821
+
822
+ result = {
823
+ "path": dir_path,
824
+ "full_path": str(full_path),
825
+ "created": not full_path.exists() or len(list(full_path.iterdir())) == 0
826
+ }
827
+
828
+ logging.info(f"Created directories: {dir_path}")
829
+ return {"success": True, "result": result}
830
+
831
+ except Exception as e:
832
+ logging.error(f"Error creating directories {dir_path}: {e}")
833
+ return {"success": False, "error": str(e)}