pdd-cli 0.0.45__py3-none-any.whl → 0.0.118__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pdd/__init__.py +40 -8
- pdd/agentic_bug.py +323 -0
- pdd/agentic_bug_orchestrator.py +497 -0
- pdd/agentic_change.py +231 -0
- pdd/agentic_change_orchestrator.py +526 -0
- pdd/agentic_common.py +598 -0
- pdd/agentic_crash.py +534 -0
- pdd/agentic_e2e_fix.py +319 -0
- pdd/agentic_e2e_fix_orchestrator.py +426 -0
- pdd/agentic_fix.py +1294 -0
- pdd/agentic_langtest.py +162 -0
- pdd/agentic_update.py +387 -0
- pdd/agentic_verify.py +183 -0
- pdd/architecture_sync.py +565 -0
- pdd/auth_service.py +210 -0
- pdd/auto_deps_main.py +71 -51
- pdd/auto_include.py +245 -5
- pdd/auto_update.py +125 -47
- pdd/bug_main.py +196 -23
- pdd/bug_to_unit_test.py +2 -0
- pdd/change_main.py +11 -4
- pdd/cli.py +22 -1181
- pdd/cmd_test_main.py +350 -150
- pdd/code_generator.py +60 -18
- pdd/code_generator_main.py +790 -57
- pdd/commands/__init__.py +48 -0
- pdd/commands/analysis.py +306 -0
- pdd/commands/auth.py +309 -0
- pdd/commands/connect.py +290 -0
- pdd/commands/fix.py +163 -0
- pdd/commands/generate.py +257 -0
- pdd/commands/maintenance.py +175 -0
- pdd/commands/misc.py +87 -0
- pdd/commands/modify.py +256 -0
- pdd/commands/report.py +144 -0
- pdd/commands/sessions.py +284 -0
- pdd/commands/templates.py +215 -0
- pdd/commands/utility.py +110 -0
- pdd/config_resolution.py +58 -0
- pdd/conflicts_main.py +8 -3
- pdd/construct_paths.py +589 -111
- pdd/context_generator.py +10 -2
- pdd/context_generator_main.py +175 -76
- pdd/continue_generation.py +53 -10
- pdd/core/__init__.py +33 -0
- pdd/core/cli.py +527 -0
- pdd/core/cloud.py +237 -0
- pdd/core/dump.py +554 -0
- pdd/core/errors.py +67 -0
- pdd/core/remote_session.py +61 -0
- pdd/core/utils.py +90 -0
- pdd/crash_main.py +262 -33
- pdd/data/language_format.csv +71 -63
- pdd/data/llm_model.csv +20 -18
- pdd/detect_change_main.py +5 -4
- pdd/docs/prompting_guide.md +864 -0
- pdd/docs/whitepaper_with_benchmarks/data_and_functions/benchmark_analysis.py +495 -0
- pdd/docs/whitepaper_with_benchmarks/data_and_functions/creation_compare.py +528 -0
- pdd/fix_code_loop.py +523 -95
- pdd/fix_code_module_errors.py +6 -2
- pdd/fix_error_loop.py +491 -92
- pdd/fix_errors_from_unit_tests.py +4 -3
- pdd/fix_main.py +278 -21
- pdd/fix_verification_errors.py +12 -100
- pdd/fix_verification_errors_loop.py +529 -286
- pdd/fix_verification_main.py +294 -89
- pdd/frontend/dist/assets/index-B5DZHykP.css +1 -0
- pdd/frontend/dist/assets/index-DQ3wkeQ2.js +449 -0
- pdd/frontend/dist/index.html +376 -0
- pdd/frontend/dist/logo.svg +33 -0
- pdd/generate_output_paths.py +139 -15
- pdd/generate_test.py +218 -146
- pdd/get_comment.py +19 -44
- pdd/get_extension.py +8 -9
- pdd/get_jwt_token.py +318 -22
- pdd/get_language.py +8 -7
- pdd/get_run_command.py +75 -0
- pdd/get_test_command.py +68 -0
- pdd/git_update.py +70 -19
- pdd/incremental_code_generator.py +2 -2
- pdd/insert_includes.py +13 -4
- pdd/llm_invoke.py +1711 -181
- pdd/load_prompt_template.py +19 -12
- pdd/path_resolution.py +140 -0
- pdd/pdd_completion.fish +25 -2
- pdd/pdd_completion.sh +30 -4
- pdd/pdd_completion.zsh +79 -4
- pdd/postprocess.py +14 -4
- pdd/preprocess.py +293 -24
- pdd/preprocess_main.py +41 -6
- pdd/prompts/agentic_bug_step10_pr_LLM.prompt +182 -0
- pdd/prompts/agentic_bug_step1_duplicate_LLM.prompt +73 -0
- pdd/prompts/agentic_bug_step2_docs_LLM.prompt +129 -0
- pdd/prompts/agentic_bug_step3_triage_LLM.prompt +95 -0
- pdd/prompts/agentic_bug_step4_reproduce_LLM.prompt +97 -0
- pdd/prompts/agentic_bug_step5_root_cause_LLM.prompt +123 -0
- pdd/prompts/agentic_bug_step6_test_plan_LLM.prompt +107 -0
- pdd/prompts/agentic_bug_step7_generate_LLM.prompt +172 -0
- pdd/prompts/agentic_bug_step8_verify_LLM.prompt +119 -0
- pdd/prompts/agentic_bug_step9_e2e_test_LLM.prompt +289 -0
- pdd/prompts/agentic_change_step10_identify_issues_LLM.prompt +1006 -0
- pdd/prompts/agentic_change_step11_fix_issues_LLM.prompt +984 -0
- pdd/prompts/agentic_change_step12_create_pr_LLM.prompt +131 -0
- pdd/prompts/agentic_change_step1_duplicate_LLM.prompt +73 -0
- pdd/prompts/agentic_change_step2_docs_LLM.prompt +101 -0
- pdd/prompts/agentic_change_step3_research_LLM.prompt +126 -0
- pdd/prompts/agentic_change_step4_clarify_LLM.prompt +164 -0
- pdd/prompts/agentic_change_step5_docs_change_LLM.prompt +981 -0
- pdd/prompts/agentic_change_step6_devunits_LLM.prompt +1005 -0
- pdd/prompts/agentic_change_step7_architecture_LLM.prompt +1044 -0
- pdd/prompts/agentic_change_step8_analyze_LLM.prompt +1027 -0
- pdd/prompts/agentic_change_step9_implement_LLM.prompt +1077 -0
- pdd/prompts/agentic_crash_explore_LLM.prompt +49 -0
- pdd/prompts/agentic_e2e_fix_step1_unit_tests_LLM.prompt +90 -0
- pdd/prompts/agentic_e2e_fix_step2_e2e_tests_LLM.prompt +91 -0
- pdd/prompts/agentic_e2e_fix_step3_root_cause_LLM.prompt +89 -0
- pdd/prompts/agentic_e2e_fix_step4_fix_e2e_tests_LLM.prompt +96 -0
- pdd/prompts/agentic_e2e_fix_step5_identify_devunits_LLM.prompt +91 -0
- pdd/prompts/agentic_e2e_fix_step6_create_unit_tests_LLM.prompt +106 -0
- pdd/prompts/agentic_e2e_fix_step7_verify_tests_LLM.prompt +116 -0
- pdd/prompts/agentic_e2e_fix_step8_run_pdd_fix_LLM.prompt +120 -0
- pdd/prompts/agentic_e2e_fix_step9_verify_all_LLM.prompt +146 -0
- pdd/prompts/agentic_fix_explore_LLM.prompt +45 -0
- pdd/prompts/agentic_fix_harvest_only_LLM.prompt +48 -0
- pdd/prompts/agentic_fix_primary_LLM.prompt +85 -0
- pdd/prompts/agentic_update_LLM.prompt +925 -0
- pdd/prompts/agentic_verify_explore_LLM.prompt +45 -0
- pdd/prompts/auto_include_LLM.prompt +122 -905
- pdd/prompts/change_LLM.prompt +3093 -1
- pdd/prompts/detect_change_LLM.prompt +686 -27
- pdd/prompts/example_generator_LLM.prompt +22 -1
- pdd/prompts/extract_code_LLM.prompt +5 -1
- pdd/prompts/extract_program_code_fix_LLM.prompt +7 -1
- pdd/prompts/extract_prompt_update_LLM.prompt +7 -8
- pdd/prompts/extract_promptline_LLM.prompt +17 -11
- pdd/prompts/find_verification_errors_LLM.prompt +6 -0
- pdd/prompts/fix_code_module_errors_LLM.prompt +12 -2
- pdd/prompts/fix_errors_from_unit_tests_LLM.prompt +9 -0
- pdd/prompts/fix_verification_errors_LLM.prompt +22 -0
- pdd/prompts/generate_test_LLM.prompt +41 -7
- pdd/prompts/generate_test_from_example_LLM.prompt +115 -0
- pdd/prompts/increase_tests_LLM.prompt +1 -5
- pdd/prompts/insert_includes_LLM.prompt +316 -186
- pdd/prompts/prompt_code_diff_LLM.prompt +119 -0
- pdd/prompts/prompt_diff_LLM.prompt +82 -0
- pdd/prompts/trace_LLM.prompt +25 -22
- pdd/prompts/unfinished_prompt_LLM.prompt +85 -1
- pdd/prompts/update_prompt_LLM.prompt +22 -1
- pdd/pytest_output.py +127 -12
- pdd/remote_session.py +876 -0
- pdd/render_mermaid.py +236 -0
- pdd/server/__init__.py +52 -0
- pdd/server/app.py +335 -0
- pdd/server/click_executor.py +587 -0
- pdd/server/executor.py +338 -0
- pdd/server/jobs.py +661 -0
- pdd/server/models.py +241 -0
- pdd/server/routes/__init__.py +31 -0
- pdd/server/routes/architecture.py +451 -0
- pdd/server/routes/auth.py +364 -0
- pdd/server/routes/commands.py +929 -0
- pdd/server/routes/config.py +42 -0
- pdd/server/routes/files.py +603 -0
- pdd/server/routes/prompts.py +1322 -0
- pdd/server/routes/websocket.py +473 -0
- pdd/server/security.py +243 -0
- pdd/server/terminal_spawner.py +209 -0
- pdd/server/token_counter.py +222 -0
- pdd/setup_tool.py +648 -0
- pdd/simple_math.py +2 -0
- pdd/split_main.py +3 -2
- pdd/summarize_directory.py +237 -195
- pdd/sync_animation.py +8 -4
- pdd/sync_determine_operation.py +839 -112
- pdd/sync_main.py +351 -57
- pdd/sync_orchestration.py +1400 -756
- pdd/sync_tui.py +848 -0
- pdd/template_expander.py +161 -0
- pdd/template_registry.py +264 -0
- pdd/templates/architecture/architecture_json.prompt +237 -0
- pdd/templates/generic/generate_prompt.prompt +174 -0
- pdd/trace.py +168 -12
- pdd/trace_main.py +4 -3
- pdd/track_cost.py +140 -63
- pdd/unfinished_prompt.py +51 -4
- pdd/update_main.py +567 -67
- pdd/update_model_costs.py +2 -2
- pdd/update_prompt.py +19 -4
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.118.dist-info}/METADATA +29 -11
- pdd_cli-0.0.118.dist-info/RECORD +227 -0
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.118.dist-info}/licenses/LICENSE +1 -1
- pdd_cli-0.0.45.dist-info/RECORD +0 -116
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.118.dist-info}/WHEEL +0 -0
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.118.dist-info}/entry_points.txt +0 -0
- {pdd_cli-0.0.45.dist-info → pdd_cli-0.0.118.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""Configuration routes for PDD Server.
|
|
2
|
+
|
|
3
|
+
Provides endpoints for server configuration and environment information.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter
|
|
8
|
+
from pydantic import BaseModel
|
|
9
|
+
|
|
10
|
+
from pdd.core.cloud import CloudConfig
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
router = APIRouter(prefix="/api/v1/config", tags=["config"])
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class CloudUrlResponse(BaseModel):
|
|
17
|
+
"""Response model for cloud URL."""
|
|
18
|
+
|
|
19
|
+
cloud_url: str
|
|
20
|
+
environment: str
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@router.get("/cloud-url", response_model=CloudUrlResponse)
|
|
24
|
+
async def get_cloud_url() -> CloudUrlResponse:
|
|
25
|
+
"""
|
|
26
|
+
Get the cloud functions URL that the server is configured to use.
|
|
27
|
+
|
|
28
|
+
This ensures the frontend uses the same cloud URL as the CLI,
|
|
29
|
+
preventing environment mismatches (staging vs production).
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
CloudUrlResponse with cloud_url and environment
|
|
33
|
+
"""
|
|
34
|
+
import os
|
|
35
|
+
|
|
36
|
+
cloud_url = CloudConfig.get_base_url()
|
|
37
|
+
environment = os.environ.get("PDD_ENV", "production")
|
|
38
|
+
|
|
39
|
+
return CloudUrlResponse(
|
|
40
|
+
cloud_url=cloud_url,
|
|
41
|
+
environment=environment
|
|
42
|
+
)
|
|
@@ -0,0 +1,603 @@
|
|
|
1
|
+
"""
|
|
2
|
+
REST API endpoints for file operations.
|
|
3
|
+
|
|
4
|
+
Provides endpoints for browsing, reading, and writing files in the project
|
|
5
|
+
directory with proper security validation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import base64
|
|
11
|
+
import hashlib
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Annotated, List, Literal, Optional
|
|
15
|
+
|
|
16
|
+
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
from rich.console import Console
|
|
20
|
+
console = Console()
|
|
21
|
+
except ImportError:
|
|
22
|
+
class Console:
|
|
23
|
+
def print(self, *args, **kwargs):
|
|
24
|
+
import builtins
|
|
25
|
+
builtins.print(*args)
|
|
26
|
+
console = Console()
|
|
27
|
+
|
|
28
|
+
from ..models import FileContent, FileMetadata, FileTreeNode, WriteFileRequest, WriteResult
|
|
29
|
+
from ..security import PathValidator, SecurityError
|
|
30
|
+
|
|
31
|
+
# Binary file extensions
|
|
32
|
+
BINARY_EXTENSIONS = {
|
|
33
|
+
".png", ".jpg", ".jpeg", ".gif", ".bmp", ".ico", ".webp",
|
|
34
|
+
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx",
|
|
35
|
+
".zip", ".tar", ".gz", ".rar", ".7z",
|
|
36
|
+
".exe", ".dll", ".so", ".dylib",
|
|
37
|
+
".pyc", ".pyo", ".class",
|
|
38
|
+
".mp3", ".mp4", ".wav", ".avi", ".mov",
|
|
39
|
+
".ttf", ".otf", ".woff", ".woff2",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Default chunk size for large files
|
|
43
|
+
DEFAULT_CHUNK_SIZE = 100000
|
|
44
|
+
|
|
45
|
+
router = APIRouter(prefix="/api/v1/files", tags=["files"])
|
|
46
|
+
|
|
47
|
+
# Dependency injection placeholder - will be overridden by app
|
|
48
|
+
_path_validator: Optional[PathValidator] = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def get_path_validator() -> PathValidator:
|
|
52
|
+
"""Dependency to get the PathValidator instance."""
|
|
53
|
+
if _path_validator is None:
|
|
54
|
+
raise RuntimeError("PathValidator not configured")
|
|
55
|
+
return _path_validator
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def set_path_validator(validator: PathValidator) -> None:
|
|
59
|
+
"""Configure the PathValidator instance."""
|
|
60
|
+
global _path_validator
|
|
61
|
+
_path_validator = validator
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _is_binary_file(path: Path) -> bool:
|
|
65
|
+
"""Check if a file is binary based on extension or content."""
|
|
66
|
+
if path.suffix.lower() in BINARY_EXTENSIONS:
|
|
67
|
+
return True
|
|
68
|
+
# Try reading first bytes to detect binary content
|
|
69
|
+
try:
|
|
70
|
+
with open(path, "rb") as f:
|
|
71
|
+
chunk = f.read(8192)
|
|
72
|
+
if b"\x00" in chunk:
|
|
73
|
+
return True
|
|
74
|
+
except Exception:
|
|
75
|
+
pass
|
|
76
|
+
return False
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _build_file_tree(
|
|
80
|
+
path: Path,
|
|
81
|
+
project_root: Path,
|
|
82
|
+
depth: int,
|
|
83
|
+
current_depth: int = 0
|
|
84
|
+
) -> Optional[FileTreeNode]:
|
|
85
|
+
"""Recursively build a file tree structure."""
|
|
86
|
+
relative_path = path.relative_to(project_root)
|
|
87
|
+
|
|
88
|
+
# Handle broken symlinks - use lstat to not follow symlinks
|
|
89
|
+
try:
|
|
90
|
+
stat_info = path.stat()
|
|
91
|
+
except (FileNotFoundError, OSError):
|
|
92
|
+
# Broken symlink or inaccessible file - skip it
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
if path.is_dir():
|
|
96
|
+
children = None
|
|
97
|
+
if current_depth < depth:
|
|
98
|
+
try:
|
|
99
|
+
entries = sorted(path.iterdir(), key=lambda p: (not p.is_dir(), p.name.lower()))
|
|
100
|
+
children = [
|
|
101
|
+
node for node in (
|
|
102
|
+
_build_file_tree(entry, project_root, depth, current_depth + 1)
|
|
103
|
+
for entry in entries
|
|
104
|
+
if not entry.name.startswith(".") # Skip hidden files
|
|
105
|
+
)
|
|
106
|
+
if node is not None # Skip broken symlinks
|
|
107
|
+
]
|
|
108
|
+
except PermissionError:
|
|
109
|
+
children = []
|
|
110
|
+
|
|
111
|
+
return FileTreeNode(
|
|
112
|
+
name=path.name,
|
|
113
|
+
path=str(relative_path),
|
|
114
|
+
type="directory",
|
|
115
|
+
children=children,
|
|
116
|
+
mtime=datetime.fromtimestamp(stat_info.st_mtime),
|
|
117
|
+
)
|
|
118
|
+
else:
|
|
119
|
+
return FileTreeNode(
|
|
120
|
+
name=path.name,
|
|
121
|
+
path=str(relative_path),
|
|
122
|
+
type="file",
|
|
123
|
+
size=stat_info.st_size,
|
|
124
|
+
mtime=datetime.fromtimestamp(stat_info.st_mtime),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
@router.get("/tree", response_model=FileTreeNode)
|
|
129
|
+
async def get_file_tree(
|
|
130
|
+
path: Annotated[str, Query(description="Path relative to project root")] = "",
|
|
131
|
+
depth: Annotated[int, Query(description="Maximum recursion depth", ge=1, le=10)] = 3,
|
|
132
|
+
validator: PathValidator = Depends(get_path_validator),
|
|
133
|
+
):
|
|
134
|
+
"""
|
|
135
|
+
Get directory structure as a tree.
|
|
136
|
+
|
|
137
|
+
Returns metadata only, not file contents.
|
|
138
|
+
"""
|
|
139
|
+
try:
|
|
140
|
+
if path:
|
|
141
|
+
abs_path = validator.validate(path)
|
|
142
|
+
else:
|
|
143
|
+
abs_path = validator.project_root
|
|
144
|
+
|
|
145
|
+
if not abs_path.exists():
|
|
146
|
+
raise HTTPException(status_code=404, detail=f"Path not found: {path}")
|
|
147
|
+
|
|
148
|
+
if not abs_path.is_dir():
|
|
149
|
+
raise HTTPException(status_code=400, detail=f"Not a directory: {path}")
|
|
150
|
+
|
|
151
|
+
return _build_file_tree(abs_path, validator.project_root, depth)
|
|
152
|
+
|
|
153
|
+
except SecurityError as e:
|
|
154
|
+
raise HTTPException(status_code=403, detail=e.message)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@router.get("/content", response_model=FileContent)
|
|
158
|
+
async def get_file_content(
|
|
159
|
+
path: Annotated[str, Query(description="File path relative to project root")],
|
|
160
|
+
encoding: Annotated[Literal["utf-8", "base64"], Query(description="Content encoding")] = "utf-8",
|
|
161
|
+
chunk: Annotated[Optional[int], Query(description="Chunk index for large files", ge=0)] = None,
|
|
162
|
+
chunk_size: Annotated[int, Query(description="Chunk size in bytes")] = DEFAULT_CHUNK_SIZE,
|
|
163
|
+
validator: PathValidator = Depends(get_path_validator),
|
|
164
|
+
):
|
|
165
|
+
"""
|
|
166
|
+
Read file content.
|
|
167
|
+
|
|
168
|
+
Binary files are returned as base64. Large files support chunked responses.
|
|
169
|
+
Includes SHA-256 checksum for verification.
|
|
170
|
+
"""
|
|
171
|
+
try:
|
|
172
|
+
abs_path = validator.validate(path)
|
|
173
|
+
|
|
174
|
+
if not abs_path.exists():
|
|
175
|
+
raise HTTPException(status_code=404, detail=f"File not found: {path}")
|
|
176
|
+
|
|
177
|
+
if abs_path.is_dir():
|
|
178
|
+
raise HTTPException(status_code=400, detail=f"Cannot read directory: {path}")
|
|
179
|
+
|
|
180
|
+
file_size = abs_path.stat().st_size
|
|
181
|
+
is_binary = _is_binary_file(abs_path)
|
|
182
|
+
|
|
183
|
+
# Determine encoding
|
|
184
|
+
# If base64 is explicitly requested, treat as binary to ensure consistent return type
|
|
185
|
+
treat_as_binary = is_binary or encoding == "base64"
|
|
186
|
+
|
|
187
|
+
# Read file content
|
|
188
|
+
content_bytes = b""
|
|
189
|
+
sha256_hash = hashlib.sha256()
|
|
190
|
+
|
|
191
|
+
# Always open in binary mode to support seeking and accurate byte chunking
|
|
192
|
+
with open(abs_path, "rb") as f:
|
|
193
|
+
if chunk is not None:
|
|
194
|
+
f.seek(chunk * chunk_size)
|
|
195
|
+
content_bytes = f.read(chunk_size)
|
|
196
|
+
else:
|
|
197
|
+
content_bytes = f.read()
|
|
198
|
+
|
|
199
|
+
sha256_hash.update(content_bytes)
|
|
200
|
+
|
|
201
|
+
if treat_as_binary:
|
|
202
|
+
content = base64.b64encode(content_bytes).decode("ascii")
|
|
203
|
+
actual_encoding = "base64"
|
|
204
|
+
else:
|
|
205
|
+
try:
|
|
206
|
+
content = content_bytes.decode("utf-8")
|
|
207
|
+
actual_encoding = "utf-8"
|
|
208
|
+
except UnicodeDecodeError:
|
|
209
|
+
# Fallback for binary content or split multi-byte characters in chunk
|
|
210
|
+
content = base64.b64encode(content_bytes).decode("ascii")
|
|
211
|
+
actual_encoding = "base64"
|
|
212
|
+
# Update is_binary flag since we forced binary encoding
|
|
213
|
+
is_binary = True
|
|
214
|
+
|
|
215
|
+
# Calculate chunking info
|
|
216
|
+
total_chunks = None
|
|
217
|
+
chunk_index = None
|
|
218
|
+
if chunk is not None:
|
|
219
|
+
if chunk_size > 0:
|
|
220
|
+
total_chunks = (file_size + chunk_size - 1) // chunk_size
|
|
221
|
+
else:
|
|
222
|
+
total_chunks = 1
|
|
223
|
+
chunk_index = chunk
|
|
224
|
+
|
|
225
|
+
return FileContent(
|
|
226
|
+
path=path,
|
|
227
|
+
content=content,
|
|
228
|
+
encoding=actual_encoding,
|
|
229
|
+
size=len(content_bytes), # Size of the actual bytes returned
|
|
230
|
+
is_binary=is_binary,
|
|
231
|
+
chunk_index=chunk_index,
|
|
232
|
+
total_chunks=total_chunks,
|
|
233
|
+
checksum=sha256_hash.hexdigest(),
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
except SecurityError as e:
|
|
237
|
+
raise HTTPException(status_code=403, detail=e.message)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
@router.post("/write", response_model=WriteResult)
|
|
241
|
+
async def write_file(
|
|
242
|
+
request: WriteFileRequest,
|
|
243
|
+
validator: PathValidator = Depends(get_path_validator),
|
|
244
|
+
):
|
|
245
|
+
"""
|
|
246
|
+
Write content to a file.
|
|
247
|
+
|
|
248
|
+
Creates parent directories if needed.
|
|
249
|
+
"""
|
|
250
|
+
try:
|
|
251
|
+
abs_path = validator.validate(request.path)
|
|
252
|
+
|
|
253
|
+
# Create parent directories if requested
|
|
254
|
+
if request.create_parents:
|
|
255
|
+
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
|
256
|
+
|
|
257
|
+
# Decode and write content
|
|
258
|
+
if request.encoding == "base64":
|
|
259
|
+
content_bytes = base64.b64decode(request.content)
|
|
260
|
+
with open(abs_path, "wb") as f:
|
|
261
|
+
f.write(content_bytes)
|
|
262
|
+
else:
|
|
263
|
+
with open(abs_path, "w", encoding="utf-8") as f:
|
|
264
|
+
f.write(request.content)
|
|
265
|
+
|
|
266
|
+
stat_info = abs_path.stat()
|
|
267
|
+
return WriteResult(
|
|
268
|
+
success=True,
|
|
269
|
+
path=request.path,
|
|
270
|
+
mtime=datetime.fromtimestamp(stat_info.st_mtime),
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
except SecurityError as e:
|
|
274
|
+
raise HTTPException(status_code=403, detail=e.message)
|
|
275
|
+
except Exception as e:
|
|
276
|
+
return WriteResult(
|
|
277
|
+
success=False,
|
|
278
|
+
path=request.path,
|
|
279
|
+
error=str(e),
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
# Known language suffixes for prompt files (e.g., "calculator_python.prompt")
|
|
284
|
+
KNOWN_LANGUAGES = ["python", "typescript", "javascript", "java", "go", "rust", "cpp", "c", "csharp", "ruby", "swift", "kotlin"]
|
|
285
|
+
|
|
286
|
+
# Map language to file extensions
|
|
287
|
+
LANGUAGE_EXTENSIONS = {
|
|
288
|
+
"python": [".py"],
|
|
289
|
+
"typescript": [".ts", ".tsx"],
|
|
290
|
+
"javascript": [".js", ".jsx"],
|
|
291
|
+
"java": [".java"],
|
|
292
|
+
"go": [".go"],
|
|
293
|
+
"rust": [".rs"],
|
|
294
|
+
"cpp": [".cpp", ".cc", ".cxx"],
|
|
295
|
+
"c": [".c"],
|
|
296
|
+
"csharp": [".cs"],
|
|
297
|
+
"ruby": [".rb"],
|
|
298
|
+
"swift": [".swift"],
|
|
299
|
+
"kotlin": [".kt"],
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def load_pddrc(project_root: Path) -> dict:
|
|
304
|
+
"""
|
|
305
|
+
Load .pddrc configuration file if it exists.
|
|
306
|
+
|
|
307
|
+
Returns parsed YAML config or empty dict.
|
|
308
|
+
"""
|
|
309
|
+
import fnmatch
|
|
310
|
+
pddrc_path = project_root / ".pddrc"
|
|
311
|
+
if not pddrc_path.exists():
|
|
312
|
+
return {}
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
import yaml
|
|
316
|
+
with open(pddrc_path) as f:
|
|
317
|
+
return yaml.safe_load(f) or {}
|
|
318
|
+
except Exception:
|
|
319
|
+
return {}
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def match_context(prompt_path: str, pddrc: dict) -> tuple:
|
|
323
|
+
"""
|
|
324
|
+
Match a prompt path to a context in .pddrc and return context name and defaults.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
prompt_path: Relative path to prompt file (e.g., "prompts/calculator_python.prompt")
|
|
328
|
+
pddrc: Parsed .pddrc configuration
|
|
329
|
+
|
|
330
|
+
Returns:
|
|
331
|
+
Tuple of (context_name, defaults_dict)
|
|
332
|
+
"""
|
|
333
|
+
import fnmatch
|
|
334
|
+
|
|
335
|
+
contexts = pddrc.get("contexts", {})
|
|
336
|
+
|
|
337
|
+
# Try each context in order (order matters for matching)
|
|
338
|
+
for context_name, context_config in contexts.items():
|
|
339
|
+
paths = context_config.get("paths", [])
|
|
340
|
+
defaults = context_config.get("defaults", {})
|
|
341
|
+
|
|
342
|
+
for pattern in paths:
|
|
343
|
+
if fnmatch.fnmatch(prompt_path, pattern):
|
|
344
|
+
return context_name, defaults
|
|
345
|
+
|
|
346
|
+
# Return default context if exists, otherwise empty
|
|
347
|
+
default_context = contexts.get("default", {})
|
|
348
|
+
return "default", default_context.get("defaults", {})
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def parse_prompt_stem(stem: str) -> tuple:
|
|
352
|
+
"""
|
|
353
|
+
Parse sync_basename and language from prompt stem.
|
|
354
|
+
|
|
355
|
+
Case-insensitive language matching to handle both:
|
|
356
|
+
- "calculator_python" -> ("calculator", "python")
|
|
357
|
+
- "calculator_Python" -> ("calculator", "python")
|
|
358
|
+
- "simple_math_TypeScript" -> ("simple_math", "typescript")
|
|
359
|
+
- "unknown" -> ("unknown", None)
|
|
360
|
+
"""
|
|
361
|
+
stem_lower = stem.lower()
|
|
362
|
+
for lang in KNOWN_LANGUAGES:
|
|
363
|
+
suffix = f"_{lang}"
|
|
364
|
+
if stem_lower.endswith(suffix):
|
|
365
|
+
# Return the basename portion and normalized lowercase language
|
|
366
|
+
return stem[:-len(suffix)], lang
|
|
367
|
+
return stem, None
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
@router.get("/prompts")
|
|
371
|
+
async def list_prompt_files(
|
|
372
|
+
validator: PathValidator = Depends(get_path_validator),
|
|
373
|
+
):
|
|
374
|
+
"""
|
|
375
|
+
List all .prompt files in the project.
|
|
376
|
+
|
|
377
|
+
Returns a list of prompt files with their related dev-unit files
|
|
378
|
+
(code, tests, examples) if they exist.
|
|
379
|
+
|
|
380
|
+
Uses .pddrc configuration if available to determine correct paths.
|
|
381
|
+
|
|
382
|
+
Each result includes:
|
|
383
|
+
- prompt: Full path to .prompt file
|
|
384
|
+
- sync_basename: Basename for sync command (without language suffix)
|
|
385
|
+
- language: Detected language (e.g., "python")
|
|
386
|
+
- code, test, example: Paths to related files if they exist
|
|
387
|
+
"""
|
|
388
|
+
project_root = validator.project_root
|
|
389
|
+
prompts_dir = project_root / "prompts"
|
|
390
|
+
|
|
391
|
+
# Load .pddrc for context-specific paths
|
|
392
|
+
pddrc = load_pddrc(project_root)
|
|
393
|
+
|
|
394
|
+
results = []
|
|
395
|
+
|
|
396
|
+
# Find all .prompt files using set to avoid duplicates
|
|
397
|
+
prompt_files = set()
|
|
398
|
+
|
|
399
|
+
# 1. prompts/ directory (recursive)
|
|
400
|
+
if prompts_dir.exists():
|
|
401
|
+
prompt_files.update(prompts_dir.rglob("*.prompt"))
|
|
402
|
+
|
|
403
|
+
# 2. Project root
|
|
404
|
+
prompt_files.update(project_root.glob("*.prompt"))
|
|
405
|
+
|
|
406
|
+
# 3. Check prompts_dir from contexts
|
|
407
|
+
for context_name, context_config in pddrc.get("contexts", {}).items():
|
|
408
|
+
defaults = context_config.get("defaults", {})
|
|
409
|
+
custom_prompts_dir = defaults.get("prompts_dir")
|
|
410
|
+
if custom_prompts_dir:
|
|
411
|
+
custom_path = project_root / custom_prompts_dir
|
|
412
|
+
if custom_path.exists():
|
|
413
|
+
prompt_files.update(custom_path.rglob("*.prompt"))
|
|
414
|
+
|
|
415
|
+
for prompt_path in sorted(prompt_files):
|
|
416
|
+
relative_path = str(prompt_path.relative_to(project_root))
|
|
417
|
+
full_stem = prompt_path.stem # e.g., "calculator_python"
|
|
418
|
+
|
|
419
|
+
# Parse language suffix to get sync_basename
|
|
420
|
+
sync_basename, language = parse_prompt_stem(full_stem) # e.g., ("calculator", "python")
|
|
421
|
+
|
|
422
|
+
# Get context-specific paths from .pddrc
|
|
423
|
+
context_name, context_defaults = match_context(relative_path, pddrc)
|
|
424
|
+
|
|
425
|
+
# Extract subdirectory structure from prompt path
|
|
426
|
+
# e.g., "prompts/server/click_executor_python.prompt" -> "server"
|
|
427
|
+
prompt_subdir = ""
|
|
428
|
+
prompts_base = context_defaults.get("prompts_dir", "prompts")
|
|
429
|
+
# Check if prompt is under the prompts base directory
|
|
430
|
+
if relative_path.startswith(prompts_base + "/"):
|
|
431
|
+
# Get path after prompts base, excluding the filename
|
|
432
|
+
after_base = relative_path[len(prompts_base) + 1:]
|
|
433
|
+
if "/" in after_base:
|
|
434
|
+
prompt_subdir = "/".join(after_base.split("/")[:-1])
|
|
435
|
+
elif "/" in relative_path:
|
|
436
|
+
# For prompts not in a prompts/ directory, check if there's a subdirectory
|
|
437
|
+
# e.g., "server/click_executor_python.prompt" -> "server"
|
|
438
|
+
parts = relative_path.split("/")
|
|
439
|
+
if len(parts) > 1:
|
|
440
|
+
prompt_subdir = "/".join(parts[:-1])
|
|
441
|
+
|
|
442
|
+
# Get file extensions for this language
|
|
443
|
+
extensions = LANGUAGE_EXTENSIONS.get(language, [".py", ".ts", ".js", ".java"]) if language else [".py", ".ts", ".tsx", ".js", ".jsx", ".java"]
|
|
444
|
+
|
|
445
|
+
# Try to find related files (code, test, example)
|
|
446
|
+
related = {
|
|
447
|
+
"prompt": relative_path,
|
|
448
|
+
"sync_basename": sync_basename, # For sync command: "calculator"
|
|
449
|
+
"language": language, # Detected language: "python"
|
|
450
|
+
"context": context_name, # Matched .pddrc context name
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
# ===== CODE FILE DETECTION =====
|
|
454
|
+
# Use generate_output_path from .pddrc if available
|
|
455
|
+
code_dirs = []
|
|
456
|
+
|
|
457
|
+
# Priority 1: .pddrc generate_output_path
|
|
458
|
+
pddrc_code_dir = context_defaults.get("generate_output_path")
|
|
459
|
+
if pddrc_code_dir:
|
|
460
|
+
# Strip trailing slash
|
|
461
|
+
pddrc_code_dir = pddrc_code_dir.rstrip("/")
|
|
462
|
+
code_dirs.append(pddrc_code_dir)
|
|
463
|
+
|
|
464
|
+
# Priority 2: Default locations
|
|
465
|
+
code_dirs.extend(["src", ""]) # Empty string for project root
|
|
466
|
+
|
|
467
|
+
for code_dir in code_dirs:
|
|
468
|
+
for ext in extensions:
|
|
469
|
+
# Try with subdirectory first, then without
|
|
470
|
+
paths_to_try = []
|
|
471
|
+
if code_dir:
|
|
472
|
+
if prompt_subdir:
|
|
473
|
+
paths_to_try.append(project_root / code_dir / prompt_subdir / f"{sync_basename}{ext}")
|
|
474
|
+
paths_to_try.append(project_root / code_dir / f"{sync_basename}{ext}")
|
|
475
|
+
else:
|
|
476
|
+
if prompt_subdir:
|
|
477
|
+
paths_to_try.append(project_root / prompt_subdir / f"{sync_basename}{ext}")
|
|
478
|
+
paths_to_try.append(project_root / f"{sync_basename}{ext}")
|
|
479
|
+
|
|
480
|
+
for code_path in paths_to_try:
|
|
481
|
+
if code_path.exists():
|
|
482
|
+
related["code"] = str(code_path.relative_to(project_root))
|
|
483
|
+
break
|
|
484
|
+
if "code" in related:
|
|
485
|
+
break
|
|
486
|
+
if "code" in related:
|
|
487
|
+
break
|
|
488
|
+
|
|
489
|
+
# ===== TEST FILE DETECTION =====
|
|
490
|
+
# Use test_output_path from .pddrc if available
|
|
491
|
+
test_dirs = []
|
|
492
|
+
|
|
493
|
+
pddrc_test_dir = context_defaults.get("test_output_path")
|
|
494
|
+
if pddrc_test_dir:
|
|
495
|
+
pddrc_test_dir = pddrc_test_dir.rstrip("/")
|
|
496
|
+
test_dirs.append(pddrc_test_dir)
|
|
497
|
+
|
|
498
|
+
test_dirs.extend(["tests", "test", ""]) # Empty string for project root
|
|
499
|
+
test_prefixes = ["test_", ""]
|
|
500
|
+
test_suffixes = ["", "_test"]
|
|
501
|
+
|
|
502
|
+
for test_dir in test_dirs:
|
|
503
|
+
found = False
|
|
504
|
+
for prefix in test_prefixes:
|
|
505
|
+
for suffix in test_suffixes:
|
|
506
|
+
# Skip invalid combination (no prefix and no suffix with just basename)
|
|
507
|
+
if not prefix and not suffix:
|
|
508
|
+
continue
|
|
509
|
+
for ext in extensions:
|
|
510
|
+
test_name = f"{prefix}{sync_basename}{suffix}{ext}"
|
|
511
|
+
# Try with subdirectory first, then without
|
|
512
|
+
paths_to_try = []
|
|
513
|
+
if test_dir:
|
|
514
|
+
if prompt_subdir:
|
|
515
|
+
paths_to_try.append(project_root / test_dir / prompt_subdir / test_name)
|
|
516
|
+
paths_to_try.append(project_root / test_dir / test_name)
|
|
517
|
+
else:
|
|
518
|
+
if prompt_subdir:
|
|
519
|
+
paths_to_try.append(project_root / prompt_subdir / test_name)
|
|
520
|
+
paths_to_try.append(project_root / test_name)
|
|
521
|
+
|
|
522
|
+
for test_path in paths_to_try:
|
|
523
|
+
if test_path.exists():
|
|
524
|
+
related["test"] = str(test_path.relative_to(project_root))
|
|
525
|
+
found = True
|
|
526
|
+
break
|
|
527
|
+
if found:
|
|
528
|
+
break
|
|
529
|
+
if found:
|
|
530
|
+
break
|
|
531
|
+
if found:
|
|
532
|
+
break
|
|
533
|
+
if found:
|
|
534
|
+
break
|
|
535
|
+
|
|
536
|
+
# ===== EXAMPLE FILE DETECTION =====
|
|
537
|
+
# Use example_output_path from .pddrc if available
|
|
538
|
+
example_dirs = []
|
|
539
|
+
|
|
540
|
+
pddrc_example_dir = context_defaults.get("example_output_path")
|
|
541
|
+
if pddrc_example_dir:
|
|
542
|
+
pddrc_example_dir = pddrc_example_dir.rstrip("/")
|
|
543
|
+
example_dirs.append(pddrc_example_dir)
|
|
544
|
+
|
|
545
|
+
example_dirs.extend(["examples", ""]) # Empty string for project root
|
|
546
|
+
|
|
547
|
+
for example_dir in example_dirs:
|
|
548
|
+
for ext in extensions:
|
|
549
|
+
example_name = f"{sync_basename}_example{ext}"
|
|
550
|
+
# Try with subdirectory first, then without
|
|
551
|
+
paths_to_try = []
|
|
552
|
+
if example_dir:
|
|
553
|
+
if prompt_subdir:
|
|
554
|
+
paths_to_try.append(project_root / example_dir / prompt_subdir / example_name)
|
|
555
|
+
paths_to_try.append(project_root / example_dir / example_name)
|
|
556
|
+
else:
|
|
557
|
+
if prompt_subdir:
|
|
558
|
+
paths_to_try.append(project_root / prompt_subdir / example_name)
|
|
559
|
+
paths_to_try.append(project_root / example_name)
|
|
560
|
+
|
|
561
|
+
for example_path in paths_to_try:
|
|
562
|
+
if example_path.exists():
|
|
563
|
+
related["example"] = str(example_path.relative_to(project_root))
|
|
564
|
+
break
|
|
565
|
+
if "example" in related:
|
|
566
|
+
break
|
|
567
|
+
if "example" in related:
|
|
568
|
+
break
|
|
569
|
+
|
|
570
|
+
results.append(related)
|
|
571
|
+
|
|
572
|
+
return results
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
@router.get("/metadata", response_model=List[FileMetadata])
|
|
576
|
+
async def get_file_metadata(
|
|
577
|
+
paths: Annotated[List[str], Query(description="List of paths to check")],
|
|
578
|
+
validator: PathValidator = Depends(get_path_validator),
|
|
579
|
+
):
|
|
580
|
+
"""
|
|
581
|
+
Get metadata for multiple files.
|
|
582
|
+
|
|
583
|
+
Batch endpoint for checking file existence and properties.
|
|
584
|
+
"""
|
|
585
|
+
results = []
|
|
586
|
+
for path in paths:
|
|
587
|
+
try:
|
|
588
|
+
abs_path = validator.validate(path)
|
|
589
|
+
if abs_path.exists():
|
|
590
|
+
stat_info = abs_path.stat()
|
|
591
|
+
results.append(FileMetadata(
|
|
592
|
+
path=path,
|
|
593
|
+
exists=True,
|
|
594
|
+
size=stat_info.st_size,
|
|
595
|
+
mtime=datetime.fromtimestamp(stat_info.st_mtime),
|
|
596
|
+
is_directory=abs_path.is_dir(),
|
|
597
|
+
))
|
|
598
|
+
else:
|
|
599
|
+
results.append(FileMetadata(path=path, exists=False))
|
|
600
|
+
except SecurityError:
|
|
601
|
+
results.append(FileMetadata(path=path, exists=False))
|
|
602
|
+
|
|
603
|
+
return results
|