ostruct-cli 0.7.2__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ostruct/cli/__init__.py +21 -3
- ostruct/cli/base_errors.py +1 -1
- ostruct/cli/cli.py +66 -1983
- ostruct/cli/click_options.py +460 -28
- ostruct/cli/code_interpreter.py +238 -0
- ostruct/cli/commands/__init__.py +32 -0
- ostruct/cli/commands/list_models.py +128 -0
- ostruct/cli/commands/quick_ref.py +50 -0
- ostruct/cli/commands/run.py +137 -0
- ostruct/cli/commands/update_registry.py +71 -0
- ostruct/cli/config.py +277 -0
- ostruct/cli/cost_estimation.py +134 -0
- ostruct/cli/errors.py +310 -6
- ostruct/cli/exit_codes.py +1 -0
- ostruct/cli/explicit_file_processor.py +548 -0
- ostruct/cli/field_utils.py +69 -0
- ostruct/cli/file_info.py +42 -9
- ostruct/cli/file_list.py +301 -102
- ostruct/cli/file_search.py +455 -0
- ostruct/cli/file_utils.py +47 -13
- ostruct/cli/mcp_integration.py +541 -0
- ostruct/cli/model_creation.py +150 -1
- ostruct/cli/model_validation.py +204 -0
- ostruct/cli/progress_reporting.py +398 -0
- ostruct/cli/registry_updates.py +14 -9
- ostruct/cli/runner.py +1418 -0
- ostruct/cli/schema_utils.py +113 -0
- ostruct/cli/services.py +626 -0
- ostruct/cli/template_debug.py +748 -0
- ostruct/cli/template_debug_help.py +162 -0
- ostruct/cli/template_env.py +15 -6
- ostruct/cli/template_filters.py +55 -3
- ostruct/cli/template_optimizer.py +474 -0
- ostruct/cli/template_processor.py +1080 -0
- ostruct/cli/template_rendering.py +69 -34
- ostruct/cli/token_validation.py +286 -0
- ostruct/cli/types.py +78 -0
- ostruct/cli/unattended_operation.py +269 -0
- ostruct/cli/validators.py +386 -3
- {ostruct_cli-0.7.2.dist-info → ostruct_cli-0.8.0.dist-info}/LICENSE +2 -0
- ostruct_cli-0.8.0.dist-info/METADATA +633 -0
- ostruct_cli-0.8.0.dist-info/RECORD +69 -0
- {ostruct_cli-0.7.2.dist-info → ostruct_cli-0.8.0.dist-info}/WHEEL +1 -1
- ostruct_cli-0.7.2.dist-info/METADATA +0 -370
- ostruct_cli-0.7.2.dist-info/RECORD +0 -45
- {ostruct_cli-0.7.2.dist-info → ostruct_cli-0.8.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,548 @@
|
|
1
|
+
"""Explicit File Routing System for ostruct CLI.
|
2
|
+
|
3
|
+
This module implements the explicit file routing system with tool-specific file handling.
|
4
|
+
Following the design philosophy "Explicit is better than implicit" - zero magic behavior.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import logging
|
8
|
+
from dataclasses import dataclass, field
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import Any, Dict, List, Optional, Set
|
11
|
+
|
12
|
+
from .security.security_manager import SecurityManager
|
13
|
+
|
14
|
+
logger = logging.getLogger(__name__)
|
15
|
+
|
16
|
+
|
17
|
+
@dataclass
|
18
|
+
class FileForSpec:
|
19
|
+
"""Specification for files routed to specific tools."""
|
20
|
+
|
21
|
+
tools: List[str] # ["code-interpreter", "file-search", "template"]
|
22
|
+
files: List[str] # ["data.csv", "analysis.py"]
|
23
|
+
|
24
|
+
|
25
|
+
@dataclass
|
26
|
+
class ExplicitRouting:
|
27
|
+
"""Explicit routing configuration for files to tools."""
|
28
|
+
|
29
|
+
template_files: List[str] = field(
|
30
|
+
default_factory=list
|
31
|
+
) # Template access only
|
32
|
+
code_interpreter_files: List[str] = field(
|
33
|
+
default_factory=list
|
34
|
+
) # Code Interpreter uploads
|
35
|
+
file_search_files: List[str] = field(
|
36
|
+
default_factory=list
|
37
|
+
) # File Search uploads
|
38
|
+
template_dirs: List[str] = field(
|
39
|
+
default_factory=list
|
40
|
+
) # Template directory access
|
41
|
+
code_interpreter_dirs: List[str] = field(
|
42
|
+
default_factory=list
|
43
|
+
) # Code Interpreter directory uploads
|
44
|
+
file_search_dirs: List[str] = field(
|
45
|
+
default_factory=list
|
46
|
+
) # File Search directory uploads
|
47
|
+
|
48
|
+
# Directory aliases for stable template variable names
|
49
|
+
template_dir_aliases: List[tuple[str, str]] = field(
|
50
|
+
default_factory=list
|
51
|
+
) # (alias_name, dir_path) for template access
|
52
|
+
code_interpreter_dir_aliases: List[tuple[str, str]] = field(
|
53
|
+
default_factory=list
|
54
|
+
) # (alias_name, dir_path) for code interpreter
|
55
|
+
file_search_dir_aliases: List[tuple[str, str]] = field(
|
56
|
+
default_factory=list
|
57
|
+
) # (alias_name, dir_path) for file search
|
58
|
+
|
59
|
+
|
60
|
+
@dataclass
|
61
|
+
class ProcessingResult:
|
62
|
+
"""Result of file routing processing."""
|
63
|
+
|
64
|
+
routing: ExplicitRouting
|
65
|
+
enabled_tools: Set[str]
|
66
|
+
validated_files: Dict[str, List[str]] # tool -> file paths
|
67
|
+
auto_enabled_feedback: Optional[str] = None
|
68
|
+
|
69
|
+
|
70
|
+
class ExplicitFileProcessor:
|
71
|
+
"""Processor for explicit file routing with tool-specific handling."""
|
72
|
+
|
73
|
+
def __init__(self, security_manager: SecurityManager):
|
74
|
+
"""Initialize the explicit file processor.
|
75
|
+
|
76
|
+
Args:
|
77
|
+
security_manager: Security manager for file validation
|
78
|
+
"""
|
79
|
+
self.security_manager = security_manager
|
80
|
+
|
81
|
+
async def process_file_routing(
|
82
|
+
self, args: Dict[str, Any], explicit_tools: Optional[List[str]] = None
|
83
|
+
) -> ProcessingResult:
|
84
|
+
"""Process files with explicit tool routing.
|
85
|
+
|
86
|
+
Args:
|
87
|
+
args: CLI arguments containing file routing specifications
|
88
|
+
explicit_tools: Explicitly specified tools to enable
|
89
|
+
|
90
|
+
Returns:
|
91
|
+
ProcessingResult with routing configuration and enabled tools
|
92
|
+
|
93
|
+
Raises:
|
94
|
+
ValueError: If routing configuration is invalid
|
95
|
+
"""
|
96
|
+
logger.debug("=== Explicit File Routing Processing ===")
|
97
|
+
|
98
|
+
# Phase 1: Parse file routing from CLI arguments
|
99
|
+
routing = self._parse_file_routing_from_args(args)
|
100
|
+
|
101
|
+
# Phase 2: Auto-detect and enable tools based on file routing
|
102
|
+
enabled_tools, auto_feedback = self._resolve_tools(
|
103
|
+
routing, explicit_tools
|
104
|
+
)
|
105
|
+
|
106
|
+
# Phase 3: Security validation for all files
|
107
|
+
validated_routing = await self._validate_routing_security(routing)
|
108
|
+
|
109
|
+
# Phase 4: Create validated file mappings
|
110
|
+
validated_files = self._create_validated_file_mappings(
|
111
|
+
validated_routing
|
112
|
+
)
|
113
|
+
|
114
|
+
logger.debug(
|
115
|
+
f"File routing processed: {len(enabled_tools)} tools enabled"
|
116
|
+
)
|
117
|
+
|
118
|
+
return ProcessingResult(
|
119
|
+
routing=validated_routing,
|
120
|
+
enabled_tools=enabled_tools,
|
121
|
+
validated_files=validated_files,
|
122
|
+
auto_enabled_feedback=auto_feedback,
|
123
|
+
)
|
124
|
+
|
125
|
+
def _parse_file_routing_from_args(
|
126
|
+
self, args: Dict[str, Any]
|
127
|
+
) -> ExplicitRouting:
|
128
|
+
"""Parse file routing specifications from CLI arguments.
|
129
|
+
|
130
|
+
Args:
|
131
|
+
args: CLI arguments
|
132
|
+
|
133
|
+
Returns:
|
134
|
+
ExplicitRouting configuration
|
135
|
+
"""
|
136
|
+
routing = ExplicitRouting()
|
137
|
+
|
138
|
+
# Legacy options (-f, -d) are handled separately in create_template_context_from_routing
|
139
|
+
# to preserve their custom variable naming semantics
|
140
|
+
legacy_files = args.get("files", [])
|
141
|
+
legacy_dirs = args.get("dir", [])
|
142
|
+
|
143
|
+
if legacy_files:
|
144
|
+
logger.debug(
|
145
|
+
f"Legacy -f flag detected: {len(legacy_files)} files (handled separately)"
|
146
|
+
)
|
147
|
+
|
148
|
+
if legacy_dirs:
|
149
|
+
logger.debug(
|
150
|
+
f"Legacy -d flag detected: {len(legacy_dirs)} dirs (handled separately)"
|
151
|
+
)
|
152
|
+
|
153
|
+
# Handle explicit tool routing - file options now have different formats
|
154
|
+
|
155
|
+
# Template files (from -ft) - now single-argument auto-naming
|
156
|
+
template_file_paths = args.get("template_files", [])
|
157
|
+
for file_path in template_file_paths:
|
158
|
+
if isinstance(file_path, str):
|
159
|
+
routing.template_files.append(file_path)
|
160
|
+
else:
|
161
|
+
# Fallback for old format (shouldn't happen with new implementation)
|
162
|
+
routing.template_files.append(str(file_path))
|
163
|
+
|
164
|
+
# Template file aliases (from --fta) - two-argument explicit naming
|
165
|
+
template_file_aliases = args.get("template_file_aliases", [])
|
166
|
+
for name_path_tuple in template_file_aliases:
|
167
|
+
if isinstance(name_path_tuple, tuple):
|
168
|
+
name, path = name_path_tuple
|
169
|
+
routing.template_files.append(str(path))
|
170
|
+
else:
|
171
|
+
routing.template_files.append(str(name_path_tuple))
|
172
|
+
|
173
|
+
# Code Interpreter files (from -fc) - now single-argument auto-naming
|
174
|
+
code_interpreter_file_paths = args.get("code_interpreter_files", [])
|
175
|
+
for file_path in code_interpreter_file_paths:
|
176
|
+
if isinstance(file_path, str):
|
177
|
+
routing.code_interpreter_files.append(file_path)
|
178
|
+
else:
|
179
|
+
# Fallback for old format (shouldn't happen with new implementation)
|
180
|
+
routing.code_interpreter_files.append(str(file_path))
|
181
|
+
|
182
|
+
# Code interpreter file aliases (from --fca) - two-argument explicit naming
|
183
|
+
code_interpreter_file_aliases = args.get(
|
184
|
+
"code_interpreter_file_aliases", []
|
185
|
+
)
|
186
|
+
for name_path_tuple in code_interpreter_file_aliases:
|
187
|
+
if isinstance(name_path_tuple, tuple):
|
188
|
+
name, path = name_path_tuple
|
189
|
+
routing.code_interpreter_files.append(str(path))
|
190
|
+
else:
|
191
|
+
routing.code_interpreter_files.append(str(name_path_tuple))
|
192
|
+
|
193
|
+
# File Search files (from -fs) - now single-argument auto-naming
|
194
|
+
file_search_file_paths = args.get("file_search_files", [])
|
195
|
+
for file_path in file_search_file_paths:
|
196
|
+
if isinstance(file_path, str):
|
197
|
+
routing.file_search_files.append(file_path)
|
198
|
+
else:
|
199
|
+
# Fallback for old format (shouldn't happen with new implementation)
|
200
|
+
routing.file_search_files.append(str(file_path))
|
201
|
+
|
202
|
+
# File search file aliases (from --fsa) - two-argument explicit naming
|
203
|
+
file_search_file_aliases = args.get("file_search_file_aliases", [])
|
204
|
+
for name_path_tuple in file_search_file_aliases:
|
205
|
+
if isinstance(name_path_tuple, tuple):
|
206
|
+
name, path = name_path_tuple
|
207
|
+
routing.file_search_files.append(str(path))
|
208
|
+
else:
|
209
|
+
routing.file_search_files.append(str(name_path_tuple))
|
210
|
+
|
211
|
+
# Directory options - auto-naming (existing behavior)
|
212
|
+
routing.template_dirs.extend(args.get("template_dirs", []))
|
213
|
+
routing.code_interpreter_dirs.extend(
|
214
|
+
args.get("code_interpreter_dirs", [])
|
215
|
+
)
|
216
|
+
routing.file_search_dirs.extend(args.get("file_search_dirs", []))
|
217
|
+
|
218
|
+
# Directory aliases - custom naming for stable template variables
|
219
|
+
template_dir_aliases = args.get("template_dir_aliases", [])
|
220
|
+
for alias_name, dir_path in template_dir_aliases:
|
221
|
+
routing.template_dir_aliases.append((alias_name, str(dir_path)))
|
222
|
+
|
223
|
+
code_interpreter_dir_aliases = args.get(
|
224
|
+
"code_interpreter_dir_aliases", []
|
225
|
+
)
|
226
|
+
for alias_name, dir_path in code_interpreter_dir_aliases:
|
227
|
+
routing.code_interpreter_dir_aliases.append(
|
228
|
+
(alias_name, str(dir_path))
|
229
|
+
)
|
230
|
+
|
231
|
+
file_search_dir_aliases = args.get("file_search_dir_aliases", [])
|
232
|
+
for alias_name, dir_path in file_search_dir_aliases:
|
233
|
+
routing.file_search_dir_aliases.append((alias_name, str(dir_path)))
|
234
|
+
|
235
|
+
# Handle tool-specific file routing
|
236
|
+
# New --file-for syntax: --file-for TOOL PATH
|
237
|
+
tool_files = args.get("tool_files", [])
|
238
|
+
valid_tools = {"code-interpreter", "file-search", "template"}
|
239
|
+
|
240
|
+
for tool, file_path in tool_files:
|
241
|
+
if tool not in valid_tools:
|
242
|
+
raise ValueError(
|
243
|
+
f"Invalid tool '{tool}' in --file-for. "
|
244
|
+
f"Valid tools: {', '.join(sorted(valid_tools))}"
|
245
|
+
)
|
246
|
+
|
247
|
+
if tool == "code-interpreter":
|
248
|
+
routing.code_interpreter_files.append(file_path)
|
249
|
+
elif tool == "file-search":
|
250
|
+
routing.file_search_files.append(file_path)
|
251
|
+
elif tool == "template":
|
252
|
+
routing.template_files.append(file_path)
|
253
|
+
|
254
|
+
return routing
|
255
|
+
|
256
|
+
def _resolve_tools(
|
257
|
+
self,
|
258
|
+
routing: ExplicitRouting,
|
259
|
+
explicit_tools: Optional[List[str]] = None,
|
260
|
+
) -> tuple[Set[str], Optional[str]]:
|
261
|
+
"""Resolve which tools should be enabled based on file routing.
|
262
|
+
|
263
|
+
Args:
|
264
|
+
routing: File routing configuration
|
265
|
+
explicit_tools: Explicitly specified tools
|
266
|
+
|
267
|
+
Returns:
|
268
|
+
Tuple of (enabled_tools_set, auto_enablement_feedback_message)
|
269
|
+
"""
|
270
|
+
enabled_tools = set(explicit_tools or [])
|
271
|
+
auto_enabled = set()
|
272
|
+
|
273
|
+
# Auto-enable tools based on file routing
|
274
|
+
if (
|
275
|
+
routing.code_interpreter_files
|
276
|
+
or routing.code_interpreter_dirs
|
277
|
+
or routing.code_interpreter_dir_aliases
|
278
|
+
):
|
279
|
+
if "code-interpreter" not in enabled_tools:
|
280
|
+
auto_enabled.add("code-interpreter")
|
281
|
+
enabled_tools.add("code-interpreter")
|
282
|
+
|
283
|
+
if (
|
284
|
+
routing.file_search_files
|
285
|
+
or routing.file_search_dirs
|
286
|
+
or routing.file_search_dir_aliases
|
287
|
+
):
|
288
|
+
if "file-search" not in enabled_tools:
|
289
|
+
auto_enabled.add("file-search")
|
290
|
+
enabled_tools.add("file-search")
|
291
|
+
|
292
|
+
# Generate feedback message for auto-enabled tools
|
293
|
+
auto_feedback = None
|
294
|
+
if auto_enabled:
|
295
|
+
auto_feedback = f"ℹ️ Based on explicit routing, auto-enabled tools: {', '.join(sorted(auto_enabled))}"
|
296
|
+
logger.info(auto_feedback)
|
297
|
+
|
298
|
+
return enabled_tools, auto_feedback
|
299
|
+
|
300
|
+
async def _validate_routing_security(
|
301
|
+
self, routing: ExplicitRouting
|
302
|
+
) -> ExplicitRouting:
|
303
|
+
"""Validate file routing through security manager.
|
304
|
+
|
305
|
+
Args:
|
306
|
+
routing: File routing configuration
|
307
|
+
|
308
|
+
Returns:
|
309
|
+
Validated routing configuration
|
310
|
+
|
311
|
+
Raises:
|
312
|
+
SecurityError: If any files fail security validation
|
313
|
+
"""
|
314
|
+
logger.debug("Validating file routing security")
|
315
|
+
|
316
|
+
# Collect all files for validation
|
317
|
+
all_files = []
|
318
|
+
all_files.extend(routing.template_files)
|
319
|
+
all_files.extend(routing.code_interpreter_files)
|
320
|
+
all_files.extend(routing.file_search_files)
|
321
|
+
|
322
|
+
# Collect all directories for validation
|
323
|
+
all_dirs = []
|
324
|
+
all_dirs.extend(routing.template_dirs)
|
325
|
+
all_dirs.extend(routing.code_interpreter_dirs)
|
326
|
+
all_dirs.extend(routing.file_search_dirs)
|
327
|
+
|
328
|
+
# Add directory aliases (extract paths from tuples)
|
329
|
+
all_dirs.extend(
|
330
|
+
[dir_path for _, dir_path in routing.template_dir_aliases]
|
331
|
+
)
|
332
|
+
all_dirs.extend(
|
333
|
+
[dir_path for _, dir_path in routing.code_interpreter_dir_aliases]
|
334
|
+
)
|
335
|
+
all_dirs.extend(
|
336
|
+
[dir_path for _, dir_path in routing.file_search_dir_aliases]
|
337
|
+
)
|
338
|
+
|
339
|
+
# Validate files through security manager
|
340
|
+
for file_path in all_files:
|
341
|
+
try:
|
342
|
+
# Use security manager's validation methods
|
343
|
+
validated_path = self.security_manager.validate_path(file_path)
|
344
|
+
# Check if it's actually a file
|
345
|
+
if not validated_path.is_file():
|
346
|
+
raise ValueError(f"Path is not a file: {file_path}")
|
347
|
+
except Exception as e:
|
348
|
+
logger.error(
|
349
|
+
f"Security validation failed for file {file_path}: {e}"
|
350
|
+
)
|
351
|
+
raise
|
352
|
+
|
353
|
+
# Validate directories through security manager
|
354
|
+
for dir_path in all_dirs:
|
355
|
+
try:
|
356
|
+
validated_path = self.security_manager.validate_path(dir_path)
|
357
|
+
# Check if it's actually a directory
|
358
|
+
if not validated_path.is_dir():
|
359
|
+
raise ValueError(f"Path is not a directory: {dir_path}")
|
360
|
+
except Exception as e:
|
361
|
+
logger.error(
|
362
|
+
f"Security validation failed for directory {dir_path}: {e}"
|
363
|
+
)
|
364
|
+
raise
|
365
|
+
|
366
|
+
logger.debug(
|
367
|
+
f"Security validation passed for {len(all_files)} files and {len(all_dirs)} directories"
|
368
|
+
)
|
369
|
+
return routing
|
370
|
+
|
371
|
+
def _create_validated_file_mappings(
|
372
|
+
self, routing: ExplicitRouting
|
373
|
+
) -> Dict[str, List[str]]:
|
374
|
+
"""Create validated file mappings for each tool.
|
375
|
+
|
376
|
+
Args:
|
377
|
+
routing: Validated routing configuration
|
378
|
+
|
379
|
+
Returns:
|
380
|
+
Dictionary mapping tool names to file paths
|
381
|
+
"""
|
382
|
+
validated_files: Dict[str, List[str]] = {
|
383
|
+
"template": [],
|
384
|
+
"code-interpreter": [],
|
385
|
+
"file-search": [],
|
386
|
+
}
|
387
|
+
|
388
|
+
# Add files for each tool
|
389
|
+
validated_files["template"].extend(routing.template_files)
|
390
|
+
validated_files["code-interpreter"].extend(
|
391
|
+
routing.code_interpreter_files
|
392
|
+
)
|
393
|
+
validated_files["file-search"].extend(routing.file_search_files)
|
394
|
+
|
395
|
+
# Expand directories to individual files
|
396
|
+
for dir_path in routing.template_dirs:
|
397
|
+
validated_files["template"].extend(
|
398
|
+
self._expand_directory(dir_path)
|
399
|
+
)
|
400
|
+
|
401
|
+
for dir_path in routing.code_interpreter_dirs:
|
402
|
+
validated_files["code-interpreter"].extend(
|
403
|
+
self._expand_directory(dir_path)
|
404
|
+
)
|
405
|
+
|
406
|
+
for dir_path in routing.file_search_dirs:
|
407
|
+
validated_files["file-search"].extend(
|
408
|
+
self._expand_directory(dir_path)
|
409
|
+
)
|
410
|
+
|
411
|
+
# Expand directory aliases to individual files
|
412
|
+
for alias_name, dir_path in routing.template_dir_aliases:
|
413
|
+
validated_files["template"].extend(
|
414
|
+
self._expand_directory(dir_path)
|
415
|
+
)
|
416
|
+
|
417
|
+
for alias_name, dir_path in routing.code_interpreter_dir_aliases:
|
418
|
+
validated_files["code-interpreter"].extend(
|
419
|
+
self._expand_directory(dir_path)
|
420
|
+
)
|
421
|
+
|
422
|
+
for alias_name, dir_path in routing.file_search_dir_aliases:
|
423
|
+
validated_files["file-search"].extend(
|
424
|
+
self._expand_directory(dir_path)
|
425
|
+
)
|
426
|
+
|
427
|
+
# Remove duplicates while preserving order
|
428
|
+
for tool in validated_files:
|
429
|
+
validated_files[tool] = list(dict.fromkeys(validated_files[tool]))
|
430
|
+
|
431
|
+
return validated_files
|
432
|
+
|
433
|
+
def _expand_directory(self, dir_path: str) -> List[str]:
|
434
|
+
"""Expand directory to list of individual file paths.
|
435
|
+
|
436
|
+
Args:
|
437
|
+
dir_path: Directory path to expand
|
438
|
+
|
439
|
+
Returns:
|
440
|
+
List of file paths within the directory
|
441
|
+
"""
|
442
|
+
try:
|
443
|
+
path = Path(dir_path)
|
444
|
+
if not path.exists() or not path.is_dir():
|
445
|
+
logger.warning(
|
446
|
+
f"Directory not found or not a directory: {dir_path}"
|
447
|
+
)
|
448
|
+
return []
|
449
|
+
|
450
|
+
files = []
|
451
|
+
for file_path in path.iterdir():
|
452
|
+
if file_path.is_file():
|
453
|
+
files.append(str(file_path))
|
454
|
+
|
455
|
+
logger.debug(
|
456
|
+
f"Expanded directory {dir_path} to {len(files)} files"
|
457
|
+
)
|
458
|
+
return files
|
459
|
+
|
460
|
+
except Exception as e:
|
461
|
+
logger.error(f"Failed to expand directory {dir_path}: {e}")
|
462
|
+
return []
|
463
|
+
|
464
|
+
def get_routing_summary(self, result: ProcessingResult) -> Dict[str, Any]:
|
465
|
+
"""Get a summary of the file routing configuration.
|
466
|
+
|
467
|
+
Args:
|
468
|
+
result: Processing result to summarize
|
469
|
+
|
470
|
+
Returns:
|
471
|
+
Dictionary with routing summary information
|
472
|
+
"""
|
473
|
+
routing = result.routing
|
474
|
+
|
475
|
+
summary = {
|
476
|
+
"enabled_tools": list(result.enabled_tools),
|
477
|
+
"file_counts": {
|
478
|
+
"template": len(routing.template_files),
|
479
|
+
"code_interpreter": len(routing.code_interpreter_files),
|
480
|
+
"file_search": len(routing.file_search_files),
|
481
|
+
},
|
482
|
+
"directory_counts": {
|
483
|
+
"template": len(routing.template_dirs),
|
484
|
+
"code_interpreter": len(routing.code_interpreter_dirs),
|
485
|
+
"file_search": len(routing.file_search_dirs),
|
486
|
+
},
|
487
|
+
"total_files": sum(
|
488
|
+
len(files) for files in result.validated_files.values()
|
489
|
+
),
|
490
|
+
"auto_enabled_feedback": result.auto_enabled_feedback,
|
491
|
+
}
|
492
|
+
|
493
|
+
return summary
|
494
|
+
|
495
|
+
def validate_routing_consistency(
|
496
|
+
self, routing: ExplicitRouting
|
497
|
+
) -> List[str]:
|
498
|
+
"""Validate routing configuration for consistency issues.
|
499
|
+
|
500
|
+
Args:
|
501
|
+
routing: Routing configuration to validate
|
502
|
+
|
503
|
+
Returns:
|
504
|
+
List of validation warnings/errors
|
505
|
+
"""
|
506
|
+
issues = []
|
507
|
+
|
508
|
+
# Check for files that don't exist
|
509
|
+
all_files = (
|
510
|
+
routing.template_files
|
511
|
+
+ routing.code_interpreter_files
|
512
|
+
+ routing.file_search_files
|
513
|
+
)
|
514
|
+
|
515
|
+
for file_path in all_files:
|
516
|
+
if not Path(file_path).exists():
|
517
|
+
issues.append(f"File not found: {file_path}")
|
518
|
+
|
519
|
+
# Check for directories that don't exist
|
520
|
+
all_dirs = (
|
521
|
+
routing.template_dirs
|
522
|
+
+ routing.code_interpreter_dirs
|
523
|
+
+ routing.file_search_dirs
|
524
|
+
)
|
525
|
+
|
526
|
+
for dir_path in all_dirs:
|
527
|
+
path = Path(dir_path)
|
528
|
+
if not path.exists():
|
529
|
+
issues.append(f"Directory not found: {dir_path}")
|
530
|
+
elif not path.is_dir():
|
531
|
+
issues.append(f"Path is not a directory: {dir_path}")
|
532
|
+
|
533
|
+
# Check for duplicate files across different tools
|
534
|
+
file_tool_mapping: Dict[str, str] = {}
|
535
|
+
for tool, files in [
|
536
|
+
("template", routing.template_files),
|
537
|
+
("code-interpreter", routing.code_interpreter_files),
|
538
|
+
("file-search", routing.file_search_files),
|
539
|
+
]:
|
540
|
+
for file_path in files:
|
541
|
+
if file_path in file_tool_mapping:
|
542
|
+
issues.append(
|
543
|
+
f"File {file_path} is routed to multiple tools: {file_tool_mapping[file_path]} and {tool}"
|
544
|
+
)
|
545
|
+
else:
|
546
|
+
file_tool_mapping[file_path] = tool
|
547
|
+
|
548
|
+
return issues
|
@@ -0,0 +1,69 @@
|
|
1
|
+
"""Field utilities for Pydantic model creation."""
|
2
|
+
|
3
|
+
from typing import Any, Union
|
4
|
+
|
5
|
+
from pydantic import Field
|
6
|
+
from pydantic.fields import FieldInfo as FieldInfoType
|
7
|
+
|
8
|
+
|
9
|
+
def pattern(regex: str) -> Any:
|
10
|
+
"""Create a pattern constraint for string fields."""
|
11
|
+
return Field(pattern=regex)
|
12
|
+
|
13
|
+
|
14
|
+
def min_length(length: int) -> Any:
|
15
|
+
"""Create a minimum length constraint for string fields."""
|
16
|
+
return Field(min_length=length)
|
17
|
+
|
18
|
+
|
19
|
+
def max_length(length: int) -> Any:
|
20
|
+
"""Create a maximum length constraint for string fields."""
|
21
|
+
return Field(max_length=length)
|
22
|
+
|
23
|
+
|
24
|
+
def ge(value: Union[int, float]) -> Any:
|
25
|
+
"""Create a greater-than-or-equal constraint for numeric fields."""
|
26
|
+
return Field(ge=value)
|
27
|
+
|
28
|
+
|
29
|
+
def le(value: Union[int, float]) -> Any:
|
30
|
+
"""Create a less-than-or-equal constraint for numeric fields."""
|
31
|
+
return Field(le=value)
|
32
|
+
|
33
|
+
|
34
|
+
def gt(value: Union[int, float]) -> Any:
|
35
|
+
"""Create a greater-than constraint for numeric fields."""
|
36
|
+
return Field(gt=value)
|
37
|
+
|
38
|
+
|
39
|
+
def lt(value: Union[int, float]) -> Any:
|
40
|
+
"""Create a less-than constraint for numeric fields."""
|
41
|
+
return Field(lt=value)
|
42
|
+
|
43
|
+
|
44
|
+
def multiple_of(value: Union[int, float]) -> Any:
|
45
|
+
"""Create a multiple-of constraint for numeric fields."""
|
46
|
+
return Field(multiple_of=value)
|
47
|
+
|
48
|
+
|
49
|
+
def _create_field(**kwargs: Any) -> FieldInfoType:
|
50
|
+
"""Create a Pydantic field with the given constraints."""
|
51
|
+
return Field(**kwargs) # type: ignore[no-any-return]
|
52
|
+
|
53
|
+
|
54
|
+
def _get_type_with_constraints(
|
55
|
+
base_type: type, constraints: dict, field_name: str = ""
|
56
|
+
) -> type:
|
57
|
+
"""Get a type with constraints applied.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
base_type: The base Python type
|
61
|
+
constraints: Dictionary of constraints to apply
|
62
|
+
field_name: Name of the field (for error reporting)
|
63
|
+
|
64
|
+
Returns:
|
65
|
+
Type with constraints applied
|
66
|
+
"""
|
67
|
+
# For now, just return the base type
|
68
|
+
# This can be expanded in the future to create constrained types
|
69
|
+
return base_type
|