pdd-cli 0.0.90__py3-none-any.whl → 0.0.121__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. pdd/__init__.py +38 -6
  2. pdd/agentic_bug.py +323 -0
  3. pdd/agentic_bug_orchestrator.py +506 -0
  4. pdd/agentic_change.py +231 -0
  5. pdd/agentic_change_orchestrator.py +537 -0
  6. pdd/agentic_common.py +533 -770
  7. pdd/agentic_crash.py +2 -1
  8. pdd/agentic_e2e_fix.py +319 -0
  9. pdd/agentic_e2e_fix_orchestrator.py +582 -0
  10. pdd/agentic_fix.py +118 -3
  11. pdd/agentic_update.py +27 -9
  12. pdd/agentic_verify.py +3 -2
  13. pdd/architecture_sync.py +565 -0
  14. pdd/auth_service.py +210 -0
  15. pdd/auto_deps_main.py +63 -53
  16. pdd/auto_include.py +236 -3
  17. pdd/auto_update.py +125 -47
  18. pdd/bug_main.py +195 -23
  19. pdd/cmd_test_main.py +345 -197
  20. pdd/code_generator.py +4 -2
  21. pdd/code_generator_main.py +118 -32
  22. pdd/commands/__init__.py +6 -0
  23. pdd/commands/analysis.py +113 -48
  24. pdd/commands/auth.py +309 -0
  25. pdd/commands/connect.py +358 -0
  26. pdd/commands/fix.py +155 -114
  27. pdd/commands/generate.py +5 -0
  28. pdd/commands/maintenance.py +3 -2
  29. pdd/commands/misc.py +8 -0
  30. pdd/commands/modify.py +225 -163
  31. pdd/commands/sessions.py +284 -0
  32. pdd/commands/utility.py +12 -7
  33. pdd/construct_paths.py +334 -32
  34. pdd/context_generator_main.py +167 -170
  35. pdd/continue_generation.py +6 -3
  36. pdd/core/__init__.py +33 -0
  37. pdd/core/cli.py +44 -7
  38. pdd/core/cloud.py +237 -0
  39. pdd/core/dump.py +68 -20
  40. pdd/core/errors.py +4 -0
  41. pdd/core/remote_session.py +61 -0
  42. pdd/crash_main.py +219 -23
  43. pdd/data/llm_model.csv +4 -4
  44. pdd/docs/prompting_guide.md +864 -0
  45. pdd/docs/whitepaper_with_benchmarks/data_and_functions/benchmark_analysis.py +495 -0
  46. pdd/docs/whitepaper_with_benchmarks/data_and_functions/creation_compare.py +528 -0
  47. pdd/fix_code_loop.py +208 -34
  48. pdd/fix_code_module_errors.py +6 -2
  49. pdd/fix_error_loop.py +291 -38
  50. pdd/fix_main.py +208 -6
  51. pdd/fix_verification_errors_loop.py +235 -26
  52. pdd/fix_verification_main.py +269 -83
  53. pdd/frontend/dist/assets/index-B5DZHykP.css +1 -0
  54. pdd/frontend/dist/assets/index-CUWd8al1.js +450 -0
  55. pdd/frontend/dist/index.html +376 -0
  56. pdd/frontend/dist/logo.svg +33 -0
  57. pdd/generate_output_paths.py +46 -5
  58. pdd/generate_test.py +212 -151
  59. pdd/get_comment.py +19 -44
  60. pdd/get_extension.py +8 -9
  61. pdd/get_jwt_token.py +309 -20
  62. pdd/get_language.py +8 -7
  63. pdd/get_run_command.py +7 -5
  64. pdd/insert_includes.py +2 -1
  65. pdd/llm_invoke.py +531 -97
  66. pdd/load_prompt_template.py +15 -34
  67. pdd/operation_log.py +342 -0
  68. pdd/path_resolution.py +140 -0
  69. pdd/postprocess.py +122 -97
  70. pdd/preprocess.py +68 -12
  71. pdd/preprocess_main.py +33 -1
  72. pdd/prompts/agentic_bug_step10_pr_LLM.prompt +182 -0
  73. pdd/prompts/agentic_bug_step1_duplicate_LLM.prompt +73 -0
  74. pdd/prompts/agentic_bug_step2_docs_LLM.prompt +129 -0
  75. pdd/prompts/agentic_bug_step3_triage_LLM.prompt +95 -0
  76. pdd/prompts/agentic_bug_step4_reproduce_LLM.prompt +97 -0
  77. pdd/prompts/agentic_bug_step5_root_cause_LLM.prompt +123 -0
  78. pdd/prompts/agentic_bug_step6_test_plan_LLM.prompt +107 -0
  79. pdd/prompts/agentic_bug_step7_generate_LLM.prompt +172 -0
  80. pdd/prompts/agentic_bug_step8_verify_LLM.prompt +119 -0
  81. pdd/prompts/agentic_bug_step9_e2e_test_LLM.prompt +289 -0
  82. pdd/prompts/agentic_change_step10_identify_issues_LLM.prompt +1006 -0
  83. pdd/prompts/agentic_change_step11_fix_issues_LLM.prompt +984 -0
  84. pdd/prompts/agentic_change_step12_create_pr_LLM.prompt +140 -0
  85. pdd/prompts/agentic_change_step1_duplicate_LLM.prompt +73 -0
  86. pdd/prompts/agentic_change_step2_docs_LLM.prompt +101 -0
  87. pdd/prompts/agentic_change_step3_research_LLM.prompt +126 -0
  88. pdd/prompts/agentic_change_step4_clarify_LLM.prompt +164 -0
  89. pdd/prompts/agentic_change_step5_docs_change_LLM.prompt +981 -0
  90. pdd/prompts/agentic_change_step6_devunits_LLM.prompt +1005 -0
  91. pdd/prompts/agentic_change_step7_architecture_LLM.prompt +1044 -0
  92. pdd/prompts/agentic_change_step8_analyze_LLM.prompt +1027 -0
  93. pdd/prompts/agentic_change_step9_implement_LLM.prompt +1077 -0
  94. pdd/prompts/agentic_e2e_fix_step1_unit_tests_LLM.prompt +90 -0
  95. pdd/prompts/agentic_e2e_fix_step2_e2e_tests_LLM.prompt +91 -0
  96. pdd/prompts/agentic_e2e_fix_step3_root_cause_LLM.prompt +89 -0
  97. pdd/prompts/agentic_e2e_fix_step4_fix_e2e_tests_LLM.prompt +96 -0
  98. pdd/prompts/agentic_e2e_fix_step5_identify_devunits_LLM.prompt +91 -0
  99. pdd/prompts/agentic_e2e_fix_step6_create_unit_tests_LLM.prompt +106 -0
  100. pdd/prompts/agentic_e2e_fix_step7_verify_tests_LLM.prompt +116 -0
  101. pdd/prompts/agentic_e2e_fix_step8_run_pdd_fix_LLM.prompt +120 -0
  102. pdd/prompts/agentic_e2e_fix_step9_verify_all_LLM.prompt +146 -0
  103. pdd/prompts/agentic_fix_primary_LLM.prompt +2 -2
  104. pdd/prompts/agentic_update_LLM.prompt +192 -338
  105. pdd/prompts/auto_include_LLM.prompt +22 -0
  106. pdd/prompts/change_LLM.prompt +3093 -1
  107. pdd/prompts/detect_change_LLM.prompt +571 -14
  108. pdd/prompts/fix_code_module_errors_LLM.prompt +8 -0
  109. pdd/prompts/fix_errors_from_unit_tests_LLM.prompt +1 -0
  110. pdd/prompts/generate_test_LLM.prompt +19 -1
  111. pdd/prompts/generate_test_from_example_LLM.prompt +366 -0
  112. pdd/prompts/insert_includes_LLM.prompt +262 -252
  113. pdd/prompts/prompt_code_diff_LLM.prompt +123 -0
  114. pdd/prompts/prompt_diff_LLM.prompt +82 -0
  115. pdd/remote_session.py +876 -0
  116. pdd/server/__init__.py +52 -0
  117. pdd/server/app.py +335 -0
  118. pdd/server/click_executor.py +587 -0
  119. pdd/server/executor.py +338 -0
  120. pdd/server/jobs.py +661 -0
  121. pdd/server/models.py +241 -0
  122. pdd/server/routes/__init__.py +31 -0
  123. pdd/server/routes/architecture.py +451 -0
  124. pdd/server/routes/auth.py +364 -0
  125. pdd/server/routes/commands.py +929 -0
  126. pdd/server/routes/config.py +42 -0
  127. pdd/server/routes/files.py +603 -0
  128. pdd/server/routes/prompts.py +1347 -0
  129. pdd/server/routes/websocket.py +473 -0
  130. pdd/server/security.py +243 -0
  131. pdd/server/terminal_spawner.py +217 -0
  132. pdd/server/token_counter.py +222 -0
  133. pdd/summarize_directory.py +236 -237
  134. pdd/sync_animation.py +8 -4
  135. pdd/sync_determine_operation.py +329 -47
  136. pdd/sync_main.py +272 -28
  137. pdd/sync_orchestration.py +289 -211
  138. pdd/sync_order.py +304 -0
  139. pdd/template_expander.py +161 -0
  140. pdd/templates/architecture/architecture_json.prompt +41 -46
  141. pdd/trace.py +1 -1
  142. pdd/track_cost.py +0 -13
  143. pdd/unfinished_prompt.py +2 -1
  144. pdd/update_main.py +68 -26
  145. {pdd_cli-0.0.90.dist-info → pdd_cli-0.0.121.dist-info}/METADATA +15 -10
  146. pdd_cli-0.0.121.dist-info/RECORD +229 -0
  147. pdd_cli-0.0.90.dist-info/RECORD +0 -153
  148. {pdd_cli-0.0.90.dist-info → pdd_cli-0.0.121.dist-info}/WHEEL +0 -0
  149. {pdd_cli-0.0.90.dist-info → pdd_cli-0.0.121.dist-info}/entry_points.txt +0 -0
  150. {pdd_cli-0.0.90.dist-info → pdd_cli-0.0.121.dist-info}/licenses/LICENSE +0 -0
  151. {pdd_cli-0.0.90.dist-info → pdd_cli-0.0.121.dist-info}/top_level.txt +0 -0
pdd/sync_order.py ADDED
@@ -0,0 +1,304 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import re
5
+ import stat
6
+ import logging
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+ from typing import Set, Optional, Dict, List, Tuple, Deque
10
+ from collections import deque, defaultdict
11
+
12
+ from rich.console import Console
13
+
14
+ # Initialize rich console
15
+ console = Console()
16
+
17
+ # Configure logging
18
+ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
19
+ logger = logging.getLogger(__name__)
20
+
21
+ def extract_includes_from_file(file_path: Path) -> Set[str]:
22
+ """
23
+ Parses <include> tags from a prompt file.
24
+
25
+ Args:
26
+ file_path: Path to the prompt file.
27
+
28
+ Returns:
29
+ Set of included paths found in the file. Returns empty set if file
30
+ cannot be read.
31
+ """
32
+ if not file_path.exists() or not file_path.is_file():
33
+ logger.warning(f"File not found or not a file: {file_path}")
34
+ return set()
35
+
36
+ try:
37
+ content = file_path.read_text(encoding="utf-8")
38
+ # Regex pattern matching <include>...</include> tags
39
+ pattern = r'<include>(.*?)</include>'
40
+ matches = re.findall(pattern, content, re.DOTALL)
41
+
42
+ # Clean up matches (strip whitespace)
43
+ includes = {m.strip() for m in matches if m.strip()}
44
+ return includes
45
+ except Exception as e:
46
+ logger.error(f"Error reading file {file_path}: {e}")
47
+ return set()
48
+
49
+
50
+ def extract_module_from_include(include_path: str) -> Optional[str]:
51
+ """
52
+ Maps include paths to module names by stripping suffixes.
53
+
54
+ Args:
55
+ include_path: The path string found inside an include tag.
56
+
57
+ Returns:
58
+ The extracted module name, or None if it's not a module include.
59
+ """
60
+ path_obj = Path(include_path)
61
+ filename = path_obj.name
62
+ stem = path_obj.stem
63
+
64
+ # Logic:
65
+ # 1. If it's a context example file (e.g., context/llm_invoke_example.py)
66
+ # 2. If it's a prompt file with language suffix (e.g., prompts/cli_python.prompt)
67
+
68
+ # Check if it looks like a module file:
69
+ # - Example files contain "_example" in the stem
70
+ # - Prompt files must have a language suffix (_python, _typescript, _LLM)
71
+ is_example = "_example" in stem
72
+ has_language_suffix = bool(re.search(r'(_python|_typescript|_LLM)$', stem, re.IGNORECASE))
73
+ is_module_prompt = filename.endswith(".prompt") and has_language_suffix
74
+
75
+ if not (is_example or is_module_prompt):
76
+ return None
77
+
78
+ # Remove suffixes
79
+ # Order matters: remove language specific suffixes first, then _example
80
+ clean_name = stem
81
+
82
+ # Remove language suffixes
83
+ clean_name = re.sub(r'(_python|_typescript|_LLM)$', '', clean_name, flags=re.IGNORECASE)
84
+
85
+ # Remove example suffix
86
+ clean_name = re.sub(r'_example$', '', clean_name, flags=re.IGNORECASE)
87
+
88
+ if not clean_name:
89
+ return None
90
+
91
+ return clean_name
92
+
93
+
94
+ def build_dependency_graph(prompts_dir: Path) -> Dict[str, List[str]]:
95
+ """
96
+ Scans prompt files and builds a dependency graph based on includes.
97
+
98
+ Args:
99
+ prompts_dir: Directory containing .prompt files.
100
+
101
+ Returns:
102
+ Dictionary mapping module_name -> list of dependencies (modules it depends on).
103
+ """
104
+ if not prompts_dir.exists() or not prompts_dir.is_dir():
105
+ logger.error(f"Prompts directory not found: {prompts_dir}")
106
+ return {}
107
+
108
+ dependency_graph: Dict[str, Set[str]] = defaultdict(set)
109
+
110
+ # Scan for relevant prompt files
111
+ patterns = ["*_python.prompt", "*_typescript.prompt", "*_LLM.prompt"]
112
+ prompt_files: List[Path] = []
113
+ for pattern in patterns:
114
+ prompt_files.extend(prompts_dir.glob(pattern))
115
+
116
+ for p_file in prompt_files:
117
+ # Determine current module name from filename
118
+ # e.g., "foo_python.prompt" -> "foo"
119
+ current_module = extract_module_from_include(p_file.name)
120
+
121
+ if not current_module:
122
+ continue
123
+
124
+ # Ensure module exists in graph even if it has no dependencies
125
+ if current_module not in dependency_graph:
126
+ dependency_graph[current_module] = set()
127
+
128
+ # Parse includes
129
+ includes = extract_includes_from_file(p_file)
130
+
131
+ for inc in includes:
132
+ dep_module = extract_module_from_include(inc)
133
+
134
+ # Add dependency if valid and not self-reference
135
+ if dep_module and dep_module != current_module:
136
+ dependency_graph[current_module].add(dep_module)
137
+
138
+ # Convert sets to lists for return type consistency
139
+ return {k: list(v) for k, v in dependency_graph.items()}
140
+
141
+
142
+ def topological_sort(graph: Dict[str, List[str]]) -> Tuple[List[str], List[List[str]]]:
143
+ """
144
+ Performs topological sort using Kahn's algorithm.
145
+
146
+ Args:
147
+ graph: Adjacency list (module -> dependencies).
148
+
149
+ Returns:
150
+ Tuple containing:
151
+ 1. List of modules in topological order (dependencies first).
152
+ 2. List of cycles detected (if any).
153
+ """
154
+ # Calculate in-degrees (number of modules depending on key)
155
+ # Note: The input graph is "Module -> Depends On".
156
+ # For Kahn's algo to output [Dependency, ..., Dependent], we need to process
157
+ # nodes with 0 dependencies first.
158
+
159
+ # Normalize graph to ensure all nodes are keys
160
+ all_nodes = set(graph.keys())
161
+ for deps in graph.values():
162
+ all_nodes.update(deps)
163
+
164
+ adj_list = {node: graph.get(node, []) for node in all_nodes}
165
+
166
+ # In Kahn's, usually we track edges: Dependency -> Dependent.
167
+ # Our input is Dependent -> [Dependencies].
168
+ # So, in-degree here represents "number of unsatisfied dependencies".
169
+ in_degree = {node: 0 for node in all_nodes}
170
+
171
+ # Reverse graph: Dependency -> [Dependents] (needed to update neighbors)
172
+ reverse_graph: Dict[str, List[str]] = defaultdict(list)
173
+
174
+ for node, deps in adj_list.items():
175
+ in_degree[node] = len(deps)
176
+ for dep in deps:
177
+ reverse_graph[dep].append(node)
178
+
179
+ # Queue for nodes with 0 dependencies
180
+ queue: Deque[str] = deque([node for node, deg in in_degree.items() if deg == 0])
181
+
182
+ sorted_list: List[str] = []
183
+ processed_count = 0
184
+
185
+ while queue:
186
+ u = queue.popleft()
187
+ sorted_list.append(u)
188
+ processed_count += 1
189
+
190
+ # For every module 'v' that depends on 'u'
191
+ for v in reverse_graph[u]:
192
+ in_degree[v] -= 1
193
+ if in_degree[v] == 0:
194
+ queue.append(v)
195
+
196
+ cycles: List[List[str]] = []
197
+
198
+ if processed_count != len(all_nodes):
199
+ # Cycle detected. Identify nodes involved in cycles.
200
+ remaining_nodes = [n for n, deg in in_degree.items() if deg > 0]
201
+ if remaining_nodes:
202
+ cycles.append(remaining_nodes)
203
+ logger.warning(f"Cyclic dependencies detected involving: {remaining_nodes}")
204
+
205
+ return sorted_list, cycles
206
+
207
+
208
+ def get_affected_modules(sorted_modules: List[str], modified: Set[str], graph: Dict[str, List[str]]) -> List[str]:
209
+ """
210
+ Identifies modules that need syncing based on modified modules and dependencies.
211
+
212
+ Args:
213
+ sorted_modules: Full list of modules in topological order.
214
+ modified: Set of module names that have changed.
215
+ graph: Dependency graph (module -> dependencies).
216
+
217
+ Returns:
218
+ List of modules to sync, preserving topological order.
219
+ """
220
+ if not modified:
221
+ return []
222
+
223
+ # Build reverse graph: Dependency -> [Dependents]
224
+ # This allows us to traverse "up" the chain from a modified dependency to things that use it
225
+ reverse_graph: Dict[str, Set[str]] = defaultdict(set)
226
+ for node, deps in graph.items():
227
+ for dep in deps:
228
+ reverse_graph[dep].add(node)
229
+
230
+ affected = set()
231
+ queue = deque(modified)
232
+
233
+ # BFS to find all transitive dependents
234
+ while queue:
235
+ current = queue.popleft()
236
+ if current in affected:
237
+ continue
238
+
239
+ affected.add(current)
240
+
241
+ # Add all modules that depend on current
242
+ for dependent in reverse_graph.get(current, []):
243
+ if dependent not in affected:
244
+ queue.append(dependent)
245
+
246
+ # Filter sorted_modules to keep only affected ones, preserving order
247
+ result = [m for m in sorted_modules if m in affected]
248
+
249
+ return result
250
+
251
+
252
+ def generate_sync_order_script(modules: List[str], output_path: Path, worktree_path: Optional[Path] = None) -> str:
253
+ """
254
+ Generates a shell script to execute pdd sync commands in order.
255
+
256
+ Args:
257
+ modules: Ordered list of module names to sync.
258
+ output_path: Path where the script should be written.
259
+ worktree_path: Optional path to cd into before running commands.
260
+
261
+ Returns:
262
+ The content of the generated script.
263
+ """
264
+ if not modules:
265
+ logger.info("No modules to sync. Skipping script generation.")
266
+ return ""
267
+
268
+ lines = [
269
+ "#!/bin/bash",
270
+ "#",
271
+ "# PDD Sync Order Script",
272
+ f"# Generated: {datetime.now().isoformat()}",
273
+ f"# Total Modules: {len(modules)}",
274
+ "#",
275
+ "",
276
+ "set -e # Exit immediately if a command exits with a non-zero status",
277
+ ""
278
+ ]
279
+
280
+ if worktree_path:
281
+ lines.append(f"cd {worktree_path}")
282
+ lines.append("")
283
+
284
+ total = len(modules)
285
+ for i, module in enumerate(modules, 1):
286
+ lines.append(f'echo "[{i}/{total}] Syncing {module}..."')
287
+ lines.append(f"pdd sync {module}")
288
+ lines.append("")
289
+
290
+ script_content = "\n".join(lines)
291
+
292
+ try:
293
+ output_path.write_text(script_content, encoding="utf-8")
294
+
295
+ # Make executable (chmod +x)
296
+ st = os.stat(output_path)
297
+ os.chmod(output_path, st.st_mode | stat.S_IEXEC)
298
+
299
+ console.print(f"[green]Successfully generated sync script at: {output_path}[/green]")
300
+ except Exception as e:
301
+ console.print(f"[red]Failed to write sync script: {e}[/red]")
302
+ raise
303
+
304
+ return script_content
@@ -0,0 +1,161 @@
1
+ # pdd/template_expander.py
2
+ """
3
+ Template expansion utility for output path configuration.
4
+
5
+ This module provides a function to expand path templates with placeholders
6
+ like {name}, {category}, {ext}, etc. It enables extensible project layouts
7
+ for different languages and frameworks (Python, TypeScript, Vue, Go, etc.).
8
+
9
+ Supported placeholders:
10
+ {name} - Base name (last segment of input path)
11
+ {category} - Parent path segments (empty if none)
12
+ {dir_prefix} - Full input directory prefix with trailing /
13
+ {ext} - File extension from language (e.g., "py", "tsx")
14
+ {language} - Full language name (e.g., "python", "typescript")
15
+ {name_snake} - snake_case version of name
16
+ {name_pascal} - PascalCase version of name
17
+ {name_kebab} - kebab-case version of name
18
+
19
+ Example:
20
+ >>> expand_template(
21
+ ... "frontend/src/components/{category}/{name}/{name}.tsx",
22
+ ... {"name": "AssetCard", "category": "marketplace"}
23
+ ... )
24
+ 'frontend/src/components/marketplace/AssetCard/AssetCard.tsx'
25
+ """
26
+
27
+ import re
28
+ import os
29
+ from typing import Dict, Any
30
+
31
+
32
+ def _to_snake_case(s: str) -> str:
33
+ """
34
+ Convert string to snake_case.
35
+
36
+ Handles PascalCase, camelCase, and existing snake_case.
37
+
38
+ Examples:
39
+ AssetCard -> asset_card
40
+ assetCard -> asset_card
41
+ already_snake -> already_snake
42
+ """
43
+ if not s:
44
+ return s
45
+ # Insert underscore before uppercase letters (except at start)
46
+ result = re.sub(r'(?<!^)(?=[A-Z])', '_', s)
47
+ return result.lower()
48
+
49
+
50
+ def _to_pascal_case(s: str) -> str:
51
+ """
52
+ Convert string to PascalCase.
53
+
54
+ Handles snake_case, kebab-case, and existing PascalCase.
55
+
56
+ Examples:
57
+ asset_card -> AssetCard
58
+ asset-card -> AssetCard
59
+ AssetCard -> Assetcard (note: re-capitalizes)
60
+ """
61
+ if not s:
62
+ return s
63
+ # Split on underscores, hyphens, or other common delimiters
64
+ parts = re.split(r'[_\-\s]+', s)
65
+ return ''.join(part.title() for part in parts if part)
66
+
67
+
68
+ def _to_kebab_case(s: str) -> str:
69
+ """
70
+ Convert string to kebab-case.
71
+
72
+ Handles PascalCase, camelCase, and existing kebab-case.
73
+
74
+ Examples:
75
+ AssetCard -> asset-card
76
+ assetCard -> asset-card
77
+ already-kebab -> already-kebab
78
+ """
79
+ if not s:
80
+ return s
81
+ # Insert hyphen before uppercase letters (except at start)
82
+ result = re.sub(r'(?<!^)(?=[A-Z])', '-', s)
83
+ return result.lower()
84
+
85
+
86
+ def _normalize_path(path: str) -> str:
87
+ """
88
+ Normalize a path to remove double slashes and resolve . and ..
89
+
90
+ This handles edge cases like empty {category} producing paths like:
91
+ "src/components//Button" -> "src/components/Button"
92
+
93
+ Unlike os.path.normpath, this preserves relative paths without
94
+ converting them to absolute paths.
95
+ """
96
+ if not path:
97
+ return path
98
+
99
+ # Split path and filter empty segments (which cause double slashes)
100
+ parts = path.split('/')
101
+ normalized_parts = [p for p in parts if p]
102
+
103
+ # Rejoin with single slashes
104
+ result = '/'.join(normalized_parts)
105
+
106
+ # Use os.path.normpath for additional cleanup (handles . and ..)
107
+ # but it converts to OS-specific separators, so convert back
108
+ result = os.path.normpath(result)
109
+
110
+ # On Windows, normpath uses backslashes; convert back to forward slashes
111
+ result = result.replace('\\', '/')
112
+
113
+ return result
114
+
115
+
116
+ def expand_template(template: str, context: Dict[str, Any]) -> str:
117
+ """
118
+ Expand a path template with placeholder values.
119
+
120
+ Args:
121
+ template: Path template with {placeholder} syntax
122
+ context: Dictionary of values to substitute
123
+
124
+ Returns:
125
+ Expanded path with normalized slashes
126
+
127
+ Example:
128
+ >>> expand_template(
129
+ ... "frontend/src/components/{category}/{name}/{name}.tsx",
130
+ ... {"name": "AssetCard", "category": "marketplace"}
131
+ ... )
132
+ 'frontend/src/components/marketplace/AssetCard/AssetCard.tsx'
133
+ """
134
+ # Get base values from context (with empty string defaults)
135
+ name = context.get('name', '')
136
+ category = context.get('category', '')
137
+ dir_prefix = context.get('dir_prefix', '')
138
+ ext = context.get('ext', '')
139
+ language = context.get('language', '')
140
+
141
+ # Build the full set of available placeholders
142
+ placeholders = {
143
+ 'name': name,
144
+ 'category': category,
145
+ 'dir_prefix': dir_prefix,
146
+ 'ext': ext,
147
+ 'language': language,
148
+ 'name_snake': _to_snake_case(name),
149
+ 'name_pascal': _to_pascal_case(name),
150
+ 'name_kebab': _to_kebab_case(name),
151
+ }
152
+
153
+ # Perform substitution
154
+ result = template
155
+ for key, value in placeholders.items():
156
+ result = result.replace(f'{{{key}}}', str(value))
157
+
158
+ # Normalize the path to handle empty segments (double slashes)
159
+ result = _normalize_path(result)
160
+
161
+ return result
@@ -140,56 +140,51 @@ INSTRUCTIONS:
140
140
  - When interface.type is "page", each entry in `dataSources` must be an object with at least `kind` and `source` (e.g., URL or identifier). The `kind` field MUST be exactly one of: `"api"`, `"query"`, `"stream"`, `"file"`, `"cache"`, `"message"`, `"job"`, or `"other"`. Do not invent new values like `"api/mutation"`; instead, use `"api"` (for any HTTP/REST/GraphQL endpoint) or `"other"` and describe details such as queries vs. mutations in `description` or `notes`. Provide `method`, `description`, and any other useful metadata when known.
141
141
  - Valid JSON only. No comments or trailing commas.
142
142
 
143
- OUTPUT FORMAT (authoritative):
143
+ OUTPUT FORMAT - CRITICAL: Return a raw JSON array, NOT an object with "items" or "data" wrapper:
144
144
  ```json
145
- {
146
- "type": "array",
147
- "items": {
148
- "type": "object",
149
- "required": ["reason", "description", "dependencies", "priority", "filename", "filepath"],
150
- "properties": {
151
- "reason": {"type": "string"},
152
- "description": {"type": "string"},
153
- "dependencies": {"type": "array", "items": {"type": "string"}},
154
- "priority": {"type": "integer", "minimum": 1},
155
- "filename": {"type": "string"},
156
- "filepath": {"type": "string"},
157
- "tags": {"type": "array", "items": {"type": "string"}},
158
- "interface": {
159
- "type": "object",
160
- "properties": {
161
- "type": {"type": "string", "enum": ["component", "page", "module", "api", "graphql", "cli", "job", "message", "config"]},
162
- "component": {"type": "object"},
163
- "page": {
164
- "type": "object",
165
- "properties": {
166
- "route": {"type": "string"},
167
- "params": {
168
- "type": "array",
169
- "items": {
170
- "type": "object",
171
- "required": ["name", "type"],
172
- "properties": {
173
- "name": {"type": "string"},
174
- "type": {"type": "string"},
175
- "description": {"type": "string"}
176
- }
177
- }
178
- }
179
- }
180
- },
181
- "module": {"type": "object"},
182
- "api": {"type": "object"},
183
- "graphql": {"type": "object"},
184
- "cli": {"type": "object"},
185
- "job": {"type": "object"},
186
- "message": {"type": "object"},
187
- "config": {"type": "object"}
188
- }
145
+ [
146
+ {
147
+ "reason": "Core data models needed by all other modules",
148
+ "description": "Defines Order, User, and Item data models with validation",
149
+ "dependencies": [],
150
+ "priority": 1,
151
+ "filename": "models_Python.prompt",
152
+ "filepath": "src/models.py",
153
+ "tags": ["backend", "data"],
154
+ "interface": {
155
+ "type": "module",
156
+ "module": {
157
+ "functions": [
158
+ {"name": "Order", "signature": "class Order(BaseModel)", "returns": "Order instance"}
159
+ ]
160
+ }
161
+ }
162
+ },
163
+ {
164
+ "reason": "API endpoints for order management",
165
+ "description": "REST API for creating, reading, updating orders",
166
+ "dependencies": ["models_Python.prompt"],
167
+ "priority": 2,
168
+ "filename": "orders_api_Python.prompt",
169
+ "filepath": "src/api/orders.py",
170
+ "tags": ["backend", "api"],
171
+ "interface": {
172
+ "type": "api",
173
+ "api": {
174
+ "endpoints": [
175
+ {"method": "POST", "path": "/orders", "auth": "jwt"},
176
+ {"method": "GET", "path": "/orders/{id}", "auth": "jwt"}
177
+ ]
189
178
  }
190
179
  }
191
180
  }
192
- }
181
+ ]
182
+ ```
183
+ WRONG (do NOT do this):
184
+ ```json
185
+ {"items": [...]} // WRONG - no wrapper objects!
186
+ {"data": [...]} // WRONG - no wrapper objects!
187
+ {"type": "array", "items": [...]} // WRONG - this is schema, not output!
193
188
  ```
194
189
 
195
190
  INTERFACE TYPES (emit only applicable):
pdd/trace.py CHANGED
@@ -222,7 +222,7 @@ def trace(
222
222
  for start_idx in range(1, len(prompt_lines) - window_size + 2):
223
223
  window_lines = prompt_lines[start_idx - 1 : start_idx - 1 + window_size]
224
224
  window_text = " ".join(window_lines)
225
- normalized_window = normalize_text(window_text).casefold()
225
+ normalized_window = _normalize_text(window_text).casefold()
226
226
  seg_len = len(normalized_window)
227
227
  if seg_len == 0:
228
228
  continue
pdd/track_cost.py CHANGED
@@ -57,9 +57,7 @@ def track_cost(func):
57
57
  # (it might have been created/deleted during command execution)
58
58
  if os.path.exists(abs_path) or '.' in os.path.basename(f):
59
59
  files_set.add(abs_path)
60
- print(f"Debug: Added to core_dump_files: {abs_path} (exists: {os.path.exists(abs_path)})")
61
60
  ctx.obj['core_dump_files'] = files_set
62
- print(f"Debug: Total files in core_dump_files: {len(files_set)}")
63
61
 
64
62
  # Check if we need to write cost tracking (only on success)
65
63
  if exception_raised is None:
@@ -92,10 +90,6 @@ def track_cost(func):
92
90
  writer.writeheader()
93
91
  writer.writerow(row)
94
92
 
95
- print(f"Debug: Writing row to CSV: {row}")
96
- print(f"Debug: Input files: {input_files}")
97
- print(f"Debug: Output files: {output_files}")
98
-
99
93
  except Exception as e:
100
94
  rprint(f"[red]Error tracking cost: {e}[/red]")
101
95
 
@@ -116,11 +110,6 @@ def collect_files(args, kwargs):
116
110
  input_files = []
117
111
  output_files = []
118
112
 
119
- print(f"Debug: collect_files called")
120
- print(f"Debug: args = {args}")
121
- print(f"Debug: kwargs keys = {list(kwargs.keys())}")
122
- print(f"Debug: kwargs = {kwargs}")
123
-
124
113
  # Known input parameter names that typically contain file paths
125
114
  input_param_names = {
126
115
  'prompt_file', 'prompt', 'input', 'input_file', 'source', 'source_file',
@@ -187,6 +176,4 @@ def collect_files(args, kwargs):
187
176
  if isinstance(item, str) and item and looks_like_file(item):
188
177
  input_files.append(item)
189
178
 
190
- print(f"Debug: Collected input files: {input_files}")
191
- print(f"Debug: Collected output files: {output_files}")
192
179
  return input_files, output_files
pdd/unfinished_prompt.py CHANGED
@@ -114,7 +114,8 @@ def unfinished_prompt(
114
114
  temperature=temperature,
115
115
  time=time,
116
116
  verbose=verbose,
117
- output_pydantic=PromptAnalysis
117
+ output_pydantic=PromptAnalysis,
118
+ language=language,
118
119
  )
119
120
 
120
121
  # Step 3: Extract and return results