ostruct-cli 0.8.29__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ostruct/cli/__init__.py +3 -15
- ostruct/cli/attachment_processor.py +455 -0
- ostruct/cli/attachment_template_bridge.py +973 -0
- ostruct/cli/cli.py +157 -33
- ostruct/cli/click_options.py +775 -692
- ostruct/cli/code_interpreter.py +195 -12
- ostruct/cli/commands/__init__.py +0 -3
- ostruct/cli/commands/run.py +289 -62
- ostruct/cli/config.py +23 -22
- ostruct/cli/constants.py +89 -0
- ostruct/cli/errors.py +175 -5
- ostruct/cli/explicit_file_processor.py +0 -15
- ostruct/cli/file_info.py +97 -15
- ostruct/cli/file_list.py +43 -1
- ostruct/cli/file_search.py +68 -2
- ostruct/cli/help_json.py +235 -0
- ostruct/cli/mcp_integration.py +13 -16
- ostruct/cli/params.py +217 -0
- ostruct/cli/plan_assembly.py +335 -0
- ostruct/cli/plan_printing.py +385 -0
- ostruct/cli/progress_reporting.py +8 -56
- ostruct/cli/quick_ref_help.py +128 -0
- ostruct/cli/rich_config.py +299 -0
- ostruct/cli/runner.py +397 -190
- ostruct/cli/security/__init__.py +2 -0
- ostruct/cli/security/allowed_checker.py +41 -0
- ostruct/cli/security/normalization.py +13 -9
- ostruct/cli/security/security_manager.py +558 -17
- ostruct/cli/security/types.py +15 -0
- ostruct/cli/template_debug.py +283 -261
- ostruct/cli/template_debug_help.py +233 -142
- ostruct/cli/template_env.py +46 -5
- ostruct/cli/template_filters.py +415 -8
- ostruct/cli/template_processor.py +240 -619
- ostruct/cli/template_rendering.py +49 -73
- ostruct/cli/template_validation.py +2 -1
- ostruct/cli/token_validation.py +35 -15
- ostruct/cli/types.py +15 -19
- ostruct/cli/unicode_compat.py +283 -0
- ostruct/cli/upload_manager.py +448 -0
- ostruct/cli/validators.py +255 -54
- {ostruct_cli-0.8.29.dist-info → ostruct_cli-1.0.0.dist-info}/METADATA +230 -127
- ostruct_cli-1.0.0.dist-info/RECORD +80 -0
- ostruct/cli/commands/quick_ref.py +0 -54
- ostruct/cli/template_optimizer.py +0 -478
- ostruct_cli-0.8.29.dist-info/RECORD +0 -71
- {ostruct_cli-0.8.29.dist-info → ostruct_cli-1.0.0.dist-info}/LICENSE +0 -0
- {ostruct_cli-0.8.29.dist-info → ostruct_cli-1.0.0.dist-info}/WHEEL +0 -0
- {ostruct_cli-0.8.29.dist-info → ostruct_cli-1.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,973 @@
|
|
1
|
+
"""Bridge between new attachment system and template processing.
|
2
|
+
|
3
|
+
This module converts processed attachments into template context variables
|
4
|
+
while maintaining compatibility with existing template patterns.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import logging
|
8
|
+
from pathlib import Path
|
9
|
+
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
10
|
+
|
11
|
+
from .attachment_processor import AttachmentSpec, ProcessedAttachments
|
12
|
+
from .file_info import FileInfo, FileRoutingIntent
|
13
|
+
from .file_list import FileInfoList
|
14
|
+
from .security import SecurityManager
|
15
|
+
from .template_schema import DotDict
|
16
|
+
|
17
|
+
logger = logging.getLogger(__name__)
|
18
|
+
|
19
|
+
# Template variable name constants
|
20
|
+
UTILITY_VARIABLES = {
|
21
|
+
"files",
|
22
|
+
"file_count",
|
23
|
+
"has_files",
|
24
|
+
}
|
25
|
+
|
26
|
+
SYSTEM_CONFIG_VARIABLES = {
|
27
|
+
"current_model",
|
28
|
+
"stdin",
|
29
|
+
"web_search_enabled",
|
30
|
+
"code_interpreter_enabled",
|
31
|
+
"auto_download_enabled",
|
32
|
+
"code_interpreter_config",
|
33
|
+
}
|
34
|
+
|
35
|
+
|
36
|
+
class LazyLoadError(Exception):
|
37
|
+
"""Exception raised during lazy loading operations."""
|
38
|
+
|
39
|
+
pass
|
40
|
+
|
41
|
+
|
42
|
+
class LazyLoadSizeError(LazyLoadError):
|
43
|
+
"""Exception raised when file exceeds size limits."""
|
44
|
+
|
45
|
+
pass
|
46
|
+
|
47
|
+
|
48
|
+
class LazyFileContent:
|
49
|
+
"""Enhanced lazy loading file content with configurable size limits and caching."""
|
50
|
+
|
51
|
+
def __init__(
|
52
|
+
self,
|
53
|
+
file_info: FileInfo,
|
54
|
+
max_size: Optional[int] = None,
|
55
|
+
encoding: str = "utf-8",
|
56
|
+
strict_mode: bool = False,
|
57
|
+
):
|
58
|
+
"""Initialize lazy file content.
|
59
|
+
|
60
|
+
Args:
|
61
|
+
file_info: FileInfo object for the file
|
62
|
+
max_size: Maximum file size in bytes (uses environment default if None)
|
63
|
+
encoding: Text encoding to use
|
64
|
+
strict_mode: If True, raise exceptions instead of returning fallback content
|
65
|
+
"""
|
66
|
+
self.file_info = file_info
|
67
|
+
self.max_size = max_size or self._get_default_max_size()
|
68
|
+
self.encoding = encoding
|
69
|
+
self.strict_mode = strict_mode
|
70
|
+
self._content: Optional[str] = None
|
71
|
+
self._loaded = False
|
72
|
+
self._size_checked = False
|
73
|
+
self._actual_size: Optional[int] = None
|
74
|
+
|
75
|
+
@property
|
76
|
+
def name(self) -> str:
|
77
|
+
"""Get the filename without loading content."""
|
78
|
+
return self.file_info.name
|
79
|
+
|
80
|
+
@property
|
81
|
+
def path(self) -> str:
|
82
|
+
"""Get the file path without loading content."""
|
83
|
+
return self.file_info.path
|
84
|
+
|
85
|
+
@property
|
86
|
+
def content(self) -> str:
|
87
|
+
"""Get file content, loading it if necessary (may raise in strict mode)."""
|
88
|
+
return self.load_safe()
|
89
|
+
|
90
|
+
@staticmethod
|
91
|
+
def _get_default_max_size() -> int:
|
92
|
+
"""Get default max size from config or environment.
|
93
|
+
|
94
|
+
Returns:
|
95
|
+
Default maximum file size in bytes
|
96
|
+
"""
|
97
|
+
import os
|
98
|
+
|
99
|
+
try:
|
100
|
+
return int(
|
101
|
+
os.getenv("OSTRUCT_TEMPLATE_FILE_LIMIT", "65536")
|
102
|
+
) # 64KB default
|
103
|
+
except ValueError:
|
104
|
+
logger.warning(
|
105
|
+
"Invalid OSTRUCT_TEMPLATE_FILE_LIMIT value, using 64KB default"
|
106
|
+
)
|
107
|
+
return 65536
|
108
|
+
|
109
|
+
def check_size(self) -> bool:
|
110
|
+
"""Check if file size is within limits.
|
111
|
+
|
112
|
+
Returns:
|
113
|
+
True if file size is acceptable
|
114
|
+
|
115
|
+
Raises:
|
116
|
+
LazyLoadError: If file cannot be accessed
|
117
|
+
"""
|
118
|
+
if not self._size_checked:
|
119
|
+
try:
|
120
|
+
self._actual_size = (
|
121
|
+
Path(self.file_info.abs_path).stat().st_size
|
122
|
+
)
|
123
|
+
self._size_checked = True
|
124
|
+
except OSError as e:
|
125
|
+
raise LazyLoadError(
|
126
|
+
f"Cannot access file {self.file_info.path}: {e}"
|
127
|
+
)
|
128
|
+
|
129
|
+
return (
|
130
|
+
self._actual_size is not None
|
131
|
+
and self._actual_size <= self.max_size
|
132
|
+
)
|
133
|
+
|
134
|
+
@property
|
135
|
+
def actual_size(self) -> Optional[int]:
|
136
|
+
"""Get the actual file size in bytes.
|
137
|
+
|
138
|
+
Returns:
|
139
|
+
File size in bytes, or None if not checked yet
|
140
|
+
"""
|
141
|
+
if not self._size_checked:
|
142
|
+
try:
|
143
|
+
self.check_size()
|
144
|
+
except LazyLoadError:
|
145
|
+
return None
|
146
|
+
return self._actual_size
|
147
|
+
|
148
|
+
def __str__(self) -> str:
|
149
|
+
"""Get file content, loading it if necessary.
|
150
|
+
|
151
|
+
Returns:
|
152
|
+
File content as string, or error message for oversized files
|
153
|
+
"""
|
154
|
+
return self.load_safe()
|
155
|
+
|
156
|
+
def _load_content(self) -> None:
|
157
|
+
"""Load file content with size checking.
|
158
|
+
|
159
|
+
Raises:
|
160
|
+
LazyLoadSizeError: If file exceeds size limits
|
161
|
+
LazyLoadError: If file cannot be loaded
|
162
|
+
"""
|
163
|
+
try:
|
164
|
+
if not self.check_size():
|
165
|
+
error_msg = (
|
166
|
+
f"File {self.file_info.path} ({self._actual_size:,} bytes) "
|
167
|
+
f"exceeds size limit ({self.max_size:,} bytes)"
|
168
|
+
)
|
169
|
+
logger.warning(error_msg)
|
170
|
+
raise LazyLoadSizeError(error_msg)
|
171
|
+
|
172
|
+
# Use FileInfo's content property for actual loading
|
173
|
+
self._content = self.file_info.content
|
174
|
+
self._loaded = True
|
175
|
+
logger.debug(
|
176
|
+
f"Loaded content for {self.file_info.path} ({len(self._content)} chars)"
|
177
|
+
)
|
178
|
+
|
179
|
+
except LazyLoadSizeError:
|
180
|
+
# Re-raise size errors
|
181
|
+
raise
|
182
|
+
except Exception as e:
|
183
|
+
logger.error(
|
184
|
+
f"Failed to load content for {self.file_info.path}: {e}"
|
185
|
+
)
|
186
|
+
raise LazyLoadError(
|
187
|
+
f"Failed to load content for {self.file_info.path}: {e}"
|
188
|
+
)
|
189
|
+
|
190
|
+
def load_safe(
|
191
|
+
self, fallback_content: str = "[File too large or unavailable]"
|
192
|
+
) -> str:
|
193
|
+
"""Load content safely with fallback for oversized files.
|
194
|
+
|
195
|
+
Args:
|
196
|
+
fallback_content: Content to return if file cannot be loaded
|
197
|
+
|
198
|
+
Returns:
|
199
|
+
File content or fallback content
|
200
|
+
|
201
|
+
Raises:
|
202
|
+
LazyLoadSizeError: If file is too large and strict_mode is True
|
203
|
+
LazyLoadError: If file cannot be loaded and strict_mode is True
|
204
|
+
"""
|
205
|
+
try:
|
206
|
+
if not self._loaded:
|
207
|
+
self._load_content()
|
208
|
+
return self._content or ""
|
209
|
+
except LazyLoadSizeError:
|
210
|
+
if self.strict_mode:
|
211
|
+
raise # Re-raise the exception in strict mode
|
212
|
+
return f"[File too large: {self._actual_size:,} bytes > {self.max_size:,} bytes]"
|
213
|
+
except LazyLoadError as e:
|
214
|
+
if self.strict_mode:
|
215
|
+
raise # Re-raise the exception in strict mode
|
216
|
+
return f"[Error: {e}]"
|
217
|
+
|
218
|
+
def preview(self, max_chars: int = 200) -> str:
|
219
|
+
"""Get content preview without full loading.
|
220
|
+
|
221
|
+
Args:
|
222
|
+
max_chars: Maximum characters to return
|
223
|
+
|
224
|
+
Returns:
|
225
|
+
Preview of file content
|
226
|
+
"""
|
227
|
+
if self._loaded:
|
228
|
+
return (self._content or "")[:max_chars]
|
229
|
+
|
230
|
+
try:
|
231
|
+
# Try to read just the preview amount
|
232
|
+
with open(
|
233
|
+
self.file_info.abs_path,
|
234
|
+
"r",
|
235
|
+
encoding=self.encoding,
|
236
|
+
errors="replace",
|
237
|
+
) as f:
|
238
|
+
return f.read(max_chars)
|
239
|
+
except Exception as e:
|
240
|
+
return f"[Preview error: {e}]"
|
241
|
+
|
242
|
+
def __iter__(self) -> Iterator["LazyFileContent"]:
|
243
|
+
"""Make LazyFileContent iterable by yielding itself.
|
244
|
+
|
245
|
+
This implements the file-sequence protocol, allowing single files
|
246
|
+
to be treated uniformly with file collections in templates.
|
247
|
+
|
248
|
+
Returns:
|
249
|
+
Iterator that yields this LazyFileContent instance
|
250
|
+
"""
|
251
|
+
yield self
|
252
|
+
|
253
|
+
@property
|
254
|
+
def first(self) -> "LazyFileContent":
|
255
|
+
"""Get the first file in the sequence (itself for single files).
|
256
|
+
|
257
|
+
This provides a uniform interface with FileInfoList.first,
|
258
|
+
allowing templates to use .first regardless of whether they're
|
259
|
+
dealing with a single file or a collection.
|
260
|
+
|
261
|
+
Returns:
|
262
|
+
This LazyFileContent instance
|
263
|
+
"""
|
264
|
+
return self
|
265
|
+
|
266
|
+
@property
|
267
|
+
def is_collection(self) -> bool:
|
268
|
+
"""Indicate whether this is a collection of files.
|
269
|
+
|
270
|
+
Returns:
|
271
|
+
False, since LazyFileContent represents a single file
|
272
|
+
"""
|
273
|
+
return False
|
274
|
+
|
275
|
+
|
276
|
+
class ValidationResult:
|
277
|
+
"""Result of file size validation with errors and warnings."""
|
278
|
+
|
279
|
+
def __init__(self) -> None:
|
280
|
+
self.errors: List[str] = []
|
281
|
+
self.warnings: List[str] = []
|
282
|
+
self.total_size: int = 0
|
283
|
+
|
284
|
+
def add_error(self, message: str) -> None:
|
285
|
+
"""Add an error message."""
|
286
|
+
self.errors.append(message)
|
287
|
+
|
288
|
+
def add_warning(self, message: str) -> None:
|
289
|
+
"""Add a warning message."""
|
290
|
+
self.warnings.append(message)
|
291
|
+
|
292
|
+
def has_errors(self) -> bool:
|
293
|
+
"""Check if validation has errors."""
|
294
|
+
return len(self.errors) > 0
|
295
|
+
|
296
|
+
def has_warnings(self) -> bool:
|
297
|
+
"""Check if validation has warnings."""
|
298
|
+
return len(self.warnings) > 0
|
299
|
+
|
300
|
+
def is_valid(self) -> bool:
|
301
|
+
"""Check if validation passed (no errors)."""
|
302
|
+
return not self.has_errors()
|
303
|
+
|
304
|
+
|
305
|
+
class FileSizeValidator:
|
306
|
+
"""Validates file sizes before processing to prevent memory exhaustion."""
|
307
|
+
|
308
|
+
def __init__(
|
309
|
+
self,
|
310
|
+
max_individual: Optional[int] = None,
|
311
|
+
max_total: Optional[int] = None,
|
312
|
+
):
|
313
|
+
"""Initialize file size validator.
|
314
|
+
|
315
|
+
Args:
|
316
|
+
max_individual: Maximum size per individual file in bytes
|
317
|
+
max_total: Maximum total size for all files in bytes
|
318
|
+
"""
|
319
|
+
import os
|
320
|
+
|
321
|
+
# Use environment variables for defaults if not specified
|
322
|
+
default_individual = int(
|
323
|
+
os.getenv("OSTRUCT_TEMPLATE_FILE_LIMIT", "65536")
|
324
|
+
) # 64KB
|
325
|
+
default_total = int(
|
326
|
+
os.getenv("OSTRUCT_TEMPLATE_TOTAL_LIMIT", "1048576")
|
327
|
+
) # 1MB
|
328
|
+
|
329
|
+
self.max_individual = max_individual or default_individual
|
330
|
+
self.max_total = max_total or default_total
|
331
|
+
|
332
|
+
def validate_file_list(self, files: List[Path]) -> ValidationResult:
|
333
|
+
"""Validate list of files against size limits.
|
334
|
+
|
335
|
+
Args:
|
336
|
+
files: List of file paths to validate
|
337
|
+
|
338
|
+
Returns:
|
339
|
+
ValidationResult with errors, warnings, and total size
|
340
|
+
"""
|
341
|
+
results = ValidationResult()
|
342
|
+
total_size = 0
|
343
|
+
|
344
|
+
for file_path in files:
|
345
|
+
try:
|
346
|
+
size = file_path.stat().st_size
|
347
|
+
total_size += size
|
348
|
+
|
349
|
+
if size > self.max_individual:
|
350
|
+
results.add_warning(
|
351
|
+
f"File {file_path} ({size:,} bytes) exceeds individual limit "
|
352
|
+
f"({self.max_individual:,} bytes) - will use lazy loading"
|
353
|
+
)
|
354
|
+
|
355
|
+
if total_size > self.max_total:
|
356
|
+
results.add_error(
|
357
|
+
f"Total file size ({total_size:,} bytes) exceeds limit "
|
358
|
+
f"({self.max_total:,} bytes) after processing {file_path}"
|
359
|
+
)
|
360
|
+
break
|
361
|
+
|
362
|
+
except OSError as e:
|
363
|
+
results.add_error(f"Cannot access {file_path}: {e}")
|
364
|
+
|
365
|
+
results.total_size = total_size
|
366
|
+
return results
|
367
|
+
|
368
|
+
def validate_single_file(self, file_path: Path) -> ValidationResult:
|
369
|
+
"""Validate a single file against size limits.
|
370
|
+
|
371
|
+
Args:
|
372
|
+
file_path: Path to file to validate
|
373
|
+
|
374
|
+
Returns:
|
375
|
+
ValidationResult for the single file
|
376
|
+
"""
|
377
|
+
return self.validate_file_list([file_path])
|
378
|
+
|
379
|
+
|
380
|
+
class ProgressiveLoader:
|
381
|
+
"""Loads files progressively based on size and usage priority."""
|
382
|
+
|
383
|
+
def __init__(self, validator: FileSizeValidator):
|
384
|
+
"""Initialize progressive loader.
|
385
|
+
|
386
|
+
Args:
|
387
|
+
validator: File size validator to use
|
388
|
+
"""
|
389
|
+
self.validator = validator
|
390
|
+
self._load_queue: List[Tuple[int, LazyFileContent]] = []
|
391
|
+
self._loaded_count = 0
|
392
|
+
|
393
|
+
def create_lazy_content(
|
394
|
+
self, file_info: FileInfo, priority: int = 0, strict_mode: bool = False
|
395
|
+
) -> LazyFileContent:
|
396
|
+
"""Create lazy content with loading priority.
|
397
|
+
|
398
|
+
Args:
|
399
|
+
file_info: FileInfo for the file
|
400
|
+
priority: Loading priority (higher = loaded first)
|
401
|
+
strict_mode: If True, raise exceptions instead of returning fallback content
|
402
|
+
|
403
|
+
Returns:
|
404
|
+
LazyFileContent instance
|
405
|
+
"""
|
406
|
+
lazy_content = LazyFileContent(
|
407
|
+
file_info,
|
408
|
+
max_size=self.validator.max_individual,
|
409
|
+
strict_mode=strict_mode,
|
410
|
+
)
|
411
|
+
|
412
|
+
# Pre-check size and determine loading strategy
|
413
|
+
try:
|
414
|
+
if lazy_content.check_size():
|
415
|
+
# Small file - can load immediately if requested
|
416
|
+
self._load_queue.append((priority, lazy_content))
|
417
|
+
logger.debug(
|
418
|
+
f"Added {file_info.path} to load queue (priority {priority})"
|
419
|
+
)
|
420
|
+
else:
|
421
|
+
# Large file - will always use lazy loading
|
422
|
+
logger.info(
|
423
|
+
f"Large file {file_info.path} will use lazy loading"
|
424
|
+
)
|
425
|
+
except LazyLoadError as e:
|
426
|
+
logger.warning(f"Cannot check size of {file_info.path}: {e}")
|
427
|
+
|
428
|
+
return lazy_content
|
429
|
+
|
430
|
+
def preload_high_priority(self, max_files: int = 5) -> int:
|
431
|
+
"""Preload high-priority small files.
|
432
|
+
|
433
|
+
Args:
|
434
|
+
max_files: Maximum number of files to preload
|
435
|
+
|
436
|
+
Returns:
|
437
|
+
Number of files successfully preloaded
|
438
|
+
"""
|
439
|
+
# Sort by priority (highest first)
|
440
|
+
self._load_queue.sort(key=lambda x: x[0], reverse=True)
|
441
|
+
|
442
|
+
preloaded = 0
|
443
|
+
for i, (priority, lazy_content) in enumerate(
|
444
|
+
self._load_queue[:max_files]
|
445
|
+
):
|
446
|
+
try:
|
447
|
+
# In strict mode, don't preload content - it will be loaded on demand
|
448
|
+
# This prevents dry-run from failing when only metadata is accessed
|
449
|
+
if lazy_content.strict_mode:
|
450
|
+
logger.debug(
|
451
|
+
f"Skipping preload for {lazy_content.file_info.path} (strict mode)"
|
452
|
+
)
|
453
|
+
continue
|
454
|
+
|
455
|
+
# Trigger loading by accessing content safely
|
456
|
+
_ = lazy_content.load_safe()
|
457
|
+
if lazy_content._loaded:
|
458
|
+
preloaded += 1
|
459
|
+
logger.debug(f"Preloaded {lazy_content.file_info.path}")
|
460
|
+
except Exception as e:
|
461
|
+
logger.debug(
|
462
|
+
f"Failed to preload {lazy_content.file_info.path}: {e}"
|
463
|
+
)
|
464
|
+
|
465
|
+
self._loaded_count = preloaded
|
466
|
+
return preloaded
|
467
|
+
|
468
|
+
def get_load_summary(self) -> Dict[str, Any]:
|
469
|
+
"""Get summary of loading operations.
|
470
|
+
|
471
|
+
Returns:
|
472
|
+
Dictionary with loading statistics
|
473
|
+
"""
|
474
|
+
return {
|
475
|
+
"total_queued": len(self._load_queue),
|
476
|
+
"preloaded": self._loaded_count,
|
477
|
+
"pending": len(self._load_queue) - self._loaded_count,
|
478
|
+
"max_individual_size": self.validator.max_individual,
|
479
|
+
"max_total_size": self.validator.max_total,
|
480
|
+
}
|
481
|
+
|
482
|
+
|
483
|
+
class AttachmentTemplateContext:
|
484
|
+
"""Helper class for building template context from attachments."""
|
485
|
+
|
486
|
+
size_validator: Optional[FileSizeValidator]
|
487
|
+
progressive_loader: Optional[ProgressiveLoader]
|
488
|
+
|
489
|
+
def __init__(
|
490
|
+
self,
|
491
|
+
security_manager: SecurityManager,
|
492
|
+
use_progressive_loading: bool = True,
|
493
|
+
):
|
494
|
+
"""Initialize context builder.
|
495
|
+
|
496
|
+
Args:
|
497
|
+
security_manager: Security manager for file validation
|
498
|
+
use_progressive_loading: Enable progressive loading with size validation
|
499
|
+
"""
|
500
|
+
self.security_manager = security_manager
|
501
|
+
self.use_progressive_loading = use_progressive_loading
|
502
|
+
|
503
|
+
# Initialize size validator and progressive loader if enabled
|
504
|
+
if self.use_progressive_loading:
|
505
|
+
self.size_validator = FileSizeValidator()
|
506
|
+
self.progressive_loader = ProgressiveLoader(self.size_validator)
|
507
|
+
else:
|
508
|
+
self.size_validator = None
|
509
|
+
self.progressive_loader = None
|
510
|
+
|
511
|
+
def build_template_context(
|
512
|
+
self,
|
513
|
+
processed_attachments: ProcessedAttachments,
|
514
|
+
base_context: Optional[Dict[str, Any]] = None,
|
515
|
+
strict_mode: bool = False,
|
516
|
+
) -> Dict[str, Any]:
|
517
|
+
"""Build template context from processed attachments.
|
518
|
+
|
519
|
+
Args:
|
520
|
+
processed_attachments: Processed attachment specifications
|
521
|
+
base_context: Existing context to extend (optional)
|
522
|
+
strict_mode: If True, raise exceptions for file loading errors
|
523
|
+
|
524
|
+
Returns:
|
525
|
+
Template context dictionary with attachment-derived variables
|
526
|
+
"""
|
527
|
+
logger.debug("Building template context from attachments")
|
528
|
+
|
529
|
+
context = base_context.copy() if base_context else {}
|
530
|
+
|
531
|
+
# Add individual alias-based variables
|
532
|
+
for alias, spec in processed_attachments.alias_map.items():
|
533
|
+
context[alias] = self._create_attachment_variable(
|
534
|
+
spec, strict_mode=strict_mode
|
535
|
+
)
|
536
|
+
|
537
|
+
# Add utility variables for template iteration
|
538
|
+
all_files = self._collect_all_files(processed_attachments)
|
539
|
+
|
540
|
+
context["files"] = all_files
|
541
|
+
context["file_count"] = len(all_files)
|
542
|
+
context["has_files"] = len(all_files) > 0
|
543
|
+
|
544
|
+
# Note: Legacy compatibility removed per CLI redesign plan (breaking change)
|
545
|
+
|
546
|
+
# Perform file size validation if progressive loading is enabled
|
547
|
+
validation_result = None
|
548
|
+
if self.use_progressive_loading and self.size_validator:
|
549
|
+
# Collect all file paths for validation
|
550
|
+
all_file_paths = []
|
551
|
+
for file_info in all_files:
|
552
|
+
all_file_paths.append(Path(file_info.abs_path))
|
553
|
+
|
554
|
+
validation_result = self.size_validator.validate_file_list(
|
555
|
+
all_file_paths
|
556
|
+
)
|
557
|
+
|
558
|
+
# Log validation results
|
559
|
+
if validation_result.has_errors():
|
560
|
+
for error in validation_result.errors:
|
561
|
+
logger.error(f"File size validation error: {error}")
|
562
|
+
|
563
|
+
if validation_result.has_warnings():
|
564
|
+
for warning in validation_result.warnings:
|
565
|
+
logger.warning(f"File size validation warning: {warning}")
|
566
|
+
|
567
|
+
# Perform progressive loading for high-priority files
|
568
|
+
if self.progressive_loader:
|
569
|
+
preloaded_count = (
|
570
|
+
self.progressive_loader.preload_high_priority()
|
571
|
+
)
|
572
|
+
logger.debug(
|
573
|
+
f"Preloaded {preloaded_count} high-priority files"
|
574
|
+
)
|
575
|
+
|
576
|
+
# Add attachment metadata
|
577
|
+
metadata = {
|
578
|
+
"aliases": list(processed_attachments.alias_map.keys()),
|
579
|
+
"template_file_count": len(processed_attachments.template_files),
|
580
|
+
"template_dir_count": len(processed_attachments.template_dirs),
|
581
|
+
"ci_file_count": len(processed_attachments.ci_files),
|
582
|
+
"ci_dir_count": len(processed_attachments.ci_dirs),
|
583
|
+
"fs_file_count": len(processed_attachments.fs_files),
|
584
|
+
"fs_dir_count": len(processed_attachments.fs_dirs),
|
585
|
+
"progressive_loading_enabled": self.use_progressive_loading,
|
586
|
+
}
|
587
|
+
|
588
|
+
# Add size validation metadata if available
|
589
|
+
if validation_result:
|
590
|
+
metadata.update(
|
591
|
+
{
|
592
|
+
"total_file_size": validation_result.total_size,
|
593
|
+
"size_validation_errors": validation_result.errors,
|
594
|
+
"size_validation_warnings": validation_result.warnings,
|
595
|
+
"size_validation_passed": validation_result.is_valid(),
|
596
|
+
}
|
597
|
+
)
|
598
|
+
|
599
|
+
# Add progressive loading summary if available
|
600
|
+
if self.progressive_loader:
|
601
|
+
metadata.update(
|
602
|
+
{"loading_summary": self.progressive_loader.get_load_summary()}
|
603
|
+
)
|
604
|
+
|
605
|
+
context["_attachments"] = metadata
|
606
|
+
|
607
|
+
logger.debug(
|
608
|
+
f"Built template context with {len(processed_attachments.alias_map)} aliases, "
|
609
|
+
f"{len(all_files)} total files, progressive loading: {self.use_progressive_loading}"
|
610
|
+
)
|
611
|
+
|
612
|
+
return context
|
613
|
+
|
614
|
+
def debug_attachment_context(
|
615
|
+
self,
|
616
|
+
context: Dict[str, Any],
|
617
|
+
processed_attachments: ProcessedAttachments,
|
618
|
+
show_detailed: bool = False,
|
619
|
+
) -> None:
|
620
|
+
"""Debug template context created from attachments.
|
621
|
+
|
622
|
+
Args:
|
623
|
+
context: Template context to debug
|
624
|
+
processed_attachments: Source attachment specifications
|
625
|
+
show_detailed: Whether to show detailed debugging output
|
626
|
+
"""
|
627
|
+
import click
|
628
|
+
|
629
|
+
click.echo("🔗 Attachment-Based Template Context Debug:", err=True)
|
630
|
+
click.echo("=" * 60, err=True)
|
631
|
+
|
632
|
+
# Show attachment summary
|
633
|
+
click.echo("📎 Attachment Summary:", err=True)
|
634
|
+
for alias, spec in processed_attachments.alias_map.items():
|
635
|
+
targets_str = ", ".join(sorted(spec.targets))
|
636
|
+
path_type = "directory" if Path(spec.path).is_dir() else "file"
|
637
|
+
click.echo(f" {alias}: {path_type} → {targets_str}", err=True)
|
638
|
+
if show_detailed:
|
639
|
+
click.echo(f" Path: {spec.path}", err=True)
|
640
|
+
if spec.recursive:
|
641
|
+
click.echo(f" Recursive: {spec.recursive}", err=True)
|
642
|
+
if spec.pattern:
|
643
|
+
click.echo(f" Pattern: {spec.pattern}", err=True)
|
644
|
+
|
645
|
+
# Show template variable mapping
|
646
|
+
click.echo("\n📝 Template Variables Created:", err=True)
|
647
|
+
attachment_vars = []
|
648
|
+
utility_vars = []
|
649
|
+
user_defined_vars = []
|
650
|
+
system_config_vars = []
|
651
|
+
|
652
|
+
for key, value in context.items():
|
653
|
+
if key in processed_attachments.alias_map:
|
654
|
+
attachment_vars.append(key)
|
655
|
+
elif key.startswith("_") or key in UTILITY_VARIABLES:
|
656
|
+
utility_vars.append(key)
|
657
|
+
elif key in SYSTEM_CONFIG_VARIABLES:
|
658
|
+
system_config_vars.append(key)
|
659
|
+
elif isinstance(value, LazyFileContent):
|
660
|
+
user_defined_vars.append(key)
|
661
|
+
else:
|
662
|
+
user_defined_vars.append(key)
|
663
|
+
|
664
|
+
if attachment_vars:
|
665
|
+
click.echo(" Attachment aliases:", err=True)
|
666
|
+
for var in sorted(attachment_vars):
|
667
|
+
value = context[var]
|
668
|
+
if isinstance(value, LazyFileContent):
|
669
|
+
# Show user-friendly file information instead of class name
|
670
|
+
try:
|
671
|
+
file_size = value.actual_size or 0
|
672
|
+
if file_size > 0:
|
673
|
+
size_str = f"{file_size:,} bytes"
|
674
|
+
else:
|
675
|
+
size_str = "unknown size"
|
676
|
+
click.echo(
|
677
|
+
f" {var}: file {value.name} ({size_str})",
|
678
|
+
err=True,
|
679
|
+
)
|
680
|
+
except Exception:
|
681
|
+
click.echo(
|
682
|
+
f" {var}: file {getattr(value, 'name', 'unknown')}",
|
683
|
+
err=True,
|
684
|
+
)
|
685
|
+
elif hasattr(value, "__len__"):
|
686
|
+
# FileInfoList or similar collections
|
687
|
+
try:
|
688
|
+
count = len(value)
|
689
|
+
if count == 1:
|
690
|
+
click.echo(f" {var}: 1 file", err=True)
|
691
|
+
else:
|
692
|
+
click.echo(f" {var}: {count} files", err=True)
|
693
|
+
except Exception:
|
694
|
+
click.echo(f" {var}: file collection", err=True)
|
695
|
+
else:
|
696
|
+
# Fallback to type name for other cases
|
697
|
+
var_type = type(value).__name__
|
698
|
+
click.echo(f" {var}: {var_type}", err=True)
|
699
|
+
|
700
|
+
if user_defined_vars:
|
701
|
+
click.echo(" User-defined variables:", err=True)
|
702
|
+
for var in sorted(user_defined_vars):
|
703
|
+
if not var.startswith("_"): # Skip internal variables
|
704
|
+
if isinstance(context[var], (int, bool)):
|
705
|
+
click.echo(f" {var}: {context[var]}", err=True)
|
706
|
+
else:
|
707
|
+
var_type = type(context[var]).__name__
|
708
|
+
click.echo(f" {var}: {var_type}", err=True)
|
709
|
+
|
710
|
+
if utility_vars:
|
711
|
+
click.echo(" Utility variables:", err=True)
|
712
|
+
for var in sorted(utility_vars):
|
713
|
+
if isinstance(context[var], (int, bool)):
|
714
|
+
click.echo(f" {var}: {context[var]}", err=True)
|
715
|
+
else:
|
716
|
+
var_type = type(context[var]).__name__
|
717
|
+
click.echo(f" {var}: {var_type}", err=True)
|
718
|
+
|
719
|
+
if system_config_vars:
|
720
|
+
click.echo(" System configuration variables:", err=True)
|
721
|
+
for var in sorted(system_config_vars):
|
722
|
+
if isinstance(context[var], (int, bool)):
|
723
|
+
click.echo(f" {var}: {context[var]}", err=True)
|
724
|
+
else:
|
725
|
+
var_type = type(context[var]).__name__
|
726
|
+
click.echo(f" {var}: {var_type}", err=True)
|
727
|
+
|
728
|
+
# Show file statistics
|
729
|
+
total_files = context.get("file_count", 0)
|
730
|
+
click.echo("\n📊 File Statistics:", err=True)
|
731
|
+
click.echo(f" Total files: {total_files}", err=True)
|
732
|
+
click.echo(
|
733
|
+
f" Template files: {context.get('template_file_count', 0)}",
|
734
|
+
err=True,
|
735
|
+
)
|
736
|
+
click.echo(
|
737
|
+
f" Code interpreter files: {context.get('ci_file_count', 0)}",
|
738
|
+
err=True,
|
739
|
+
)
|
740
|
+
click.echo(
|
741
|
+
f" File search files: {context.get('fs_file_count', 0)}", err=True
|
742
|
+
)
|
743
|
+
|
744
|
+
if show_detailed and total_files > 0:
|
745
|
+
click.echo("\n📄 File Details:", err=True)
|
746
|
+
files_list = context.get("files", [])
|
747
|
+
for i, file_info in enumerate(
|
748
|
+
files_list[:10]
|
749
|
+
): # Show first 10 files
|
750
|
+
click.echo(
|
751
|
+
f" {i + 1}. {file_info.path} ({file_info.routing_intent.value})",
|
752
|
+
err=True,
|
753
|
+
)
|
754
|
+
if len(files_list) > 10:
|
755
|
+
click.echo(
|
756
|
+
f" ... and {len(files_list) - 10} more files", err=True
|
757
|
+
)
|
758
|
+
|
759
|
+
click.echo("=" * 60, err=True)
|
760
|
+
|
761
|
+
def _create_attachment_variable(
|
762
|
+
self, spec: AttachmentSpec, strict_mode: bool = False
|
763
|
+
) -> Union[LazyFileContent, FileInfoList, DotDict]:
|
764
|
+
"""Create template variable for a single attachment.
|
765
|
+
|
766
|
+
Args:
|
767
|
+
spec: Attachment specification
|
768
|
+
strict_mode: If True, raise exceptions for file loading errors
|
769
|
+
|
770
|
+
Returns:
|
771
|
+
Template variable (file content, file list, or directory info)
|
772
|
+
"""
|
773
|
+
path = Path(spec.path)
|
774
|
+
|
775
|
+
if path.is_file():
|
776
|
+
# Single file - create FileInfo and wrap in LazyFileContent
|
777
|
+
file_info = FileInfo.from_path(
|
778
|
+
str(path),
|
779
|
+
self.security_manager,
|
780
|
+
routing_type="template",
|
781
|
+
routing_intent=FileRoutingIntent.TEMPLATE_ONLY,
|
782
|
+
parent_alias=spec.alias,
|
783
|
+
relative_path=path.name,
|
784
|
+
base_path=str(path.parent),
|
785
|
+
from_collection=False,
|
786
|
+
attachment_type=spec.attachment_type,
|
787
|
+
)
|
788
|
+
|
789
|
+
# Use progressive loader if available, otherwise create LazyFileContent directly
|
790
|
+
if self.progressive_loader:
|
791
|
+
return self.progressive_loader.create_lazy_content(
|
792
|
+
file_info, priority=1, strict_mode=strict_mode
|
793
|
+
)
|
794
|
+
else:
|
795
|
+
return LazyFileContent(file_info, strict_mode=strict_mode)
|
796
|
+
|
797
|
+
elif path.is_dir():
|
798
|
+
# Directory - create FileInfoList with file expansion
|
799
|
+
files = self._expand_directory(spec)
|
800
|
+
return FileInfoList(files)
|
801
|
+
|
802
|
+
else:
|
803
|
+
logger.warning(
|
804
|
+
f"Attachment path {spec.path} is neither file nor directory"
|
805
|
+
)
|
806
|
+
# Return empty DotDict for invalid paths
|
807
|
+
return DotDict(
|
808
|
+
{
|
809
|
+
"path": str(spec.path),
|
810
|
+
"error": f"Invalid path: {spec.path}",
|
811
|
+
"content": f"[Invalid path: {spec.path}]",
|
812
|
+
}
|
813
|
+
)
|
814
|
+
|
815
|
+
def _expand_directory(self, spec: AttachmentSpec) -> List[FileInfo]:
|
816
|
+
"""Expand directory attachment into list of files.
|
817
|
+
|
818
|
+
Args:
|
819
|
+
spec: Directory attachment specification
|
820
|
+
|
821
|
+
Returns:
|
822
|
+
List of FileInfo objects for files in directory
|
823
|
+
"""
|
824
|
+
path = Path(spec.path)
|
825
|
+
files = []
|
826
|
+
|
827
|
+
try:
|
828
|
+
if spec.recursive:
|
829
|
+
if spec.pattern:
|
830
|
+
file_paths = list(path.rglob(spec.pattern))
|
831
|
+
else:
|
832
|
+
file_paths = [f for f in path.rglob("*") if f.is_file()]
|
833
|
+
else:
|
834
|
+
if spec.pattern:
|
835
|
+
file_paths = list(path.glob(spec.pattern))
|
836
|
+
else:
|
837
|
+
file_paths = [f for f in path.iterdir() if f.is_file()]
|
838
|
+
|
839
|
+
# Convert to FileInfo objects with security validation
|
840
|
+
for file_path in file_paths:
|
841
|
+
try:
|
842
|
+
validated_path = (
|
843
|
+
self.security_manager.validate_file_access(
|
844
|
+
file_path,
|
845
|
+
context=f"directory expansion {spec.alias}",
|
846
|
+
)
|
847
|
+
)
|
848
|
+
file_info = FileInfo.from_path(
|
849
|
+
str(validated_path),
|
850
|
+
self.security_manager,
|
851
|
+
routing_type="template",
|
852
|
+
routing_intent=FileRoutingIntent.TEMPLATE_ONLY,
|
853
|
+
parent_alias=spec.alias,
|
854
|
+
relative_path=str(file_path.relative_to(path)),
|
855
|
+
base_path=str(spec.path),
|
856
|
+
from_collection=False,
|
857
|
+
attachment_type=spec.attachment_type,
|
858
|
+
)
|
859
|
+
files.append(file_info)
|
860
|
+
except Exception as e:
|
861
|
+
logger.warning(f"Skipping file {file_path}: {e}")
|
862
|
+
|
863
|
+
logger.debug(
|
864
|
+
f"Expanded directory {spec.path} to {len(files)} files "
|
865
|
+
f"(recursive={spec.recursive}, pattern={spec.pattern})"
|
866
|
+
)
|
867
|
+
|
868
|
+
except Exception as e:
|
869
|
+
logger.error(f"Error expanding directory {spec.path}: {e}")
|
870
|
+
|
871
|
+
return files
|
872
|
+
|
873
|
+
def _collect_all_files(
|
874
|
+
self, processed_attachments: ProcessedAttachments
|
875
|
+
) -> FileInfoList:
|
876
|
+
"""Collect all file attachments into a single list.
|
877
|
+
|
878
|
+
Args:
|
879
|
+
processed_attachments: Processed attachment specifications
|
880
|
+
|
881
|
+
Returns:
|
882
|
+
FileInfoList containing all files from all attachments
|
883
|
+
"""
|
884
|
+
all_files = []
|
885
|
+
|
886
|
+
# Collect files from all attachment types
|
887
|
+
for spec in processed_attachments.template_files:
|
888
|
+
if Path(spec.path).is_file():
|
889
|
+
try:
|
890
|
+
file_info = FileInfo.from_path(
|
891
|
+
str(spec.path),
|
892
|
+
self.security_manager,
|
893
|
+
routing_type="template",
|
894
|
+
routing_intent=FileRoutingIntent.TEMPLATE_ONLY,
|
895
|
+
parent_alias=spec.collection_base_alias or spec.alias,
|
896
|
+
relative_path=Path(spec.path).name,
|
897
|
+
base_path=str(Path(spec.path).parent),
|
898
|
+
from_collection=spec.from_collection,
|
899
|
+
attachment_type=spec.attachment_type,
|
900
|
+
)
|
901
|
+
all_files.append(file_info)
|
902
|
+
except Exception as e:
|
903
|
+
logger.warning(f"Could not add file {spec.path}: {e}")
|
904
|
+
|
905
|
+
# Expand directories and add their files
|
906
|
+
for spec in processed_attachments.template_dirs:
|
907
|
+
dir_files = self._expand_directory(spec)
|
908
|
+
all_files.extend(dir_files)
|
909
|
+
|
910
|
+
# Include CI and FS files for template access as well
|
911
|
+
for spec in processed_attachments.ci_files:
|
912
|
+
if Path(spec.path).is_file():
|
913
|
+
try:
|
914
|
+
file_info = FileInfo.from_path(
|
915
|
+
str(spec.path),
|
916
|
+
self.security_manager,
|
917
|
+
routing_type="template",
|
918
|
+
routing_intent=FileRoutingIntent.CODE_INTERPRETER,
|
919
|
+
parent_alias=spec.collection_base_alias or spec.alias,
|
920
|
+
relative_path=Path(spec.path).name,
|
921
|
+
base_path=str(Path(spec.path).parent),
|
922
|
+
from_collection=spec.from_collection,
|
923
|
+
attachment_type=spec.attachment_type,
|
924
|
+
)
|
925
|
+
all_files.append(file_info)
|
926
|
+
except Exception as e:
|
927
|
+
logger.warning(f"Could not add CI file {spec.path}: {e}")
|
928
|
+
|
929
|
+
for spec in processed_attachments.fs_files:
|
930
|
+
if Path(spec.path).is_file():
|
931
|
+
try:
|
932
|
+
file_info = FileInfo.from_path(
|
933
|
+
str(spec.path),
|
934
|
+
self.security_manager,
|
935
|
+
routing_type="template",
|
936
|
+
routing_intent=FileRoutingIntent.FILE_SEARCH,
|
937
|
+
parent_alias=spec.collection_base_alias or spec.alias,
|
938
|
+
relative_path=Path(spec.path).name,
|
939
|
+
base_path=str(Path(spec.path).parent),
|
940
|
+
from_collection=spec.from_collection,
|
941
|
+
attachment_type=spec.attachment_type,
|
942
|
+
)
|
943
|
+
all_files.append(file_info)
|
944
|
+
except Exception as e:
|
945
|
+
logger.warning(f"Could not add FS file {spec.path}: {e}")
|
946
|
+
|
947
|
+
return FileInfoList(all_files)
|
948
|
+
|
949
|
+
|
950
|
+
def build_template_context_from_attachments(
|
951
|
+
processed_attachments: ProcessedAttachments,
|
952
|
+
security_manager: SecurityManager,
|
953
|
+
base_context: Optional[Dict[str, Any]] = None,
|
954
|
+
strict_mode: bool = False,
|
955
|
+
) -> Dict[str, Any]:
|
956
|
+
"""Build template context from processed attachments.
|
957
|
+
|
958
|
+
This is the main entry point for converting attachment specifications
|
959
|
+
into template context variables.
|
960
|
+
|
961
|
+
Args:
|
962
|
+
processed_attachments: Processed attachment specifications
|
963
|
+
security_manager: Security manager for file validation
|
964
|
+
base_context: Existing context to extend (optional)
|
965
|
+
strict_mode: If True, raise exceptions for file loading errors
|
966
|
+
|
967
|
+
Returns:
|
968
|
+
Template context dictionary
|
969
|
+
"""
|
970
|
+
context_builder = AttachmentTemplateContext(security_manager)
|
971
|
+
return context_builder.build_template_context(
|
972
|
+
processed_attachments, base_context, strict_mode=strict_mode
|
973
|
+
)
|