xenfra 0.4.2__py3-none-any.whl → 0.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xenfra/utils/config.py CHANGED
@@ -1,432 +1,459 @@
1
- """
2
- Configuration file generation utilities.
3
- """
4
-
5
- import os
6
- import shutil
7
- from datetime import datetime
8
- from pathlib import Path
9
-
10
- import click
11
- import yaml
12
- from rich.console import Console
13
- from rich.prompt import Confirm, IntPrompt, Prompt
14
- from xenfra_sdk import CodebaseAnalysisResponse
15
-
16
- console = Console()
17
-
18
-
19
- def read_xenfra_yaml(filename: str = "xenfra.yaml") -> dict:
20
- """
21
- Read and parse xenfra.yaml configuration file.
22
-
23
- Args:
24
- filename: Path to the config file (default: xenfra.yaml)
25
-
26
- Returns:
27
- Dictionary containing the configuration
28
-
29
- Raises:
30
- FileNotFoundError: If the config file doesn't exist
31
- yaml.YAMLError: If the YAML is malformed
32
- ValueError: If the YAML is invalid
33
- IOError: If reading fails
34
- """
35
- if not Path(filename).exists():
36
- raise FileNotFoundError(
37
- f"Configuration file '{filename}' not found. Run 'xenfra init' first."
38
- )
39
-
40
- try:
41
- with open(filename, "r") as f:
42
- return yaml.safe_load(f) or {}
43
- except yaml.YAMLError as e:
44
- raise ValueError(f"Invalid YAML in {filename}: {e}")
45
- except Exception as e:
46
- raise IOError(f"Failed to read {filename}: {e}")
47
-
48
-
49
- def generate_xenfra_yaml(analysis: CodebaseAnalysisResponse, filename: str = "xenfra.yaml", package_manager_override: str = None, dependency_file_override: str = None) -> str:
50
- """
51
- Generate xenfra.yaml from AI codebase analysis.
52
-
53
- Args:
54
- analysis: CodebaseAnalysisResponse from Intelligence Service
55
- filename: Output filename (default: xenfra.yaml)
56
- package_manager_override: Optional override for package manager (user selection)
57
- dependency_file_override: Optional override for dependency file (user selection)
58
-
59
- Returns:
60
- Path to the generated file
61
- """
62
- # Build configuration dictionary
63
- config = {
64
- "name": os.path.basename(os.getcwd()),
65
- "framework": analysis.framework,
66
- "region": "nyc3", # Default to NYC3
67
- "port": analysis.port,
68
- }
69
-
70
- # Add database configuration if detected
71
- if analysis.database and analysis.database != "none":
72
- config["database"] = {"type": analysis.database, "env_var": "DATABASE_URL"}
73
-
74
- # Add cache configuration if detected
75
- if analysis.cache and analysis.cache != "none":
76
- config["cache"] = {"type": analysis.cache, "env_var": f"{analysis.cache.upper()}_URL"}
77
-
78
- # Add worker configuration if detected
79
- if analysis.workers and len(analysis.workers) > 0:
80
- config["workers"] = analysis.workers
81
-
82
- # Add environment variables
83
- if analysis.env_vars and len(analysis.env_vars) > 0:
84
- config["env_vars"] = analysis.env_vars
85
-
86
- # Infrastructure configuration
87
- config["instance_size"] = analysis.instance_size
88
- config["resources"] = {
89
- "cpu": 1,
90
- "ram": "1GB"
91
- }
92
-
93
- # Map resources based on detected size for better defaults
94
- if analysis.instance_size == "standard":
95
- config["resources"]["cpu"] = 2
96
- config["resources"]["ram"] = "4GB"
97
- elif analysis.instance_size == "premium":
98
- config["resources"]["cpu"] = 4
99
- config["resources"]["ram"] = "8GB"
100
-
101
- # Add package manager info (use override if provided, otherwise use analysis)
102
- package_manager = package_manager_override or analysis.package_manager
103
- dependency_file = dependency_file_override or analysis.dependency_file
104
-
105
- if package_manager:
106
- config["package_manager"] = package_manager
107
- if dependency_file:
108
- config["dependency_file"] = dependency_file
109
-
110
- # Write to file
111
- with open(filename, "w") as f:
112
- yaml.dump(config, f, sort_keys=False, default_flow_style=False)
113
-
114
- return filename
115
-
116
-
117
- def create_backup(file_path: str) -> str:
118
- """
119
- Create a timestamped backup of a file in .xenfra/backups/ directory.
120
-
121
- Args:
122
- file_path: Path to the file to backup
123
-
124
- Returns:
125
- Path to the backup file
126
- """
127
- # Create .xenfra/backups directory if it doesn't exist
128
- backup_dir = Path(".xenfra") / "backups"
129
- backup_dir.mkdir(parents=True, exist_ok=True)
130
-
131
- # Generate timestamped backup filename
132
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
133
- file_name = Path(file_path).name
134
- backup_path = backup_dir / f"{file_name}.{timestamp}.backup"
135
-
136
- # Copy file to backup location
137
- shutil.copy2(file_path, backup_path)
138
-
139
- return str(backup_path)
140
-
141
-
142
- def apply_patch(patch: dict, target_file: str = None, create_backup_file: bool = True):
143
- """
144
- Apply a JSON patch to a configuration file with automatic backup.
145
-
146
- Args:
147
- patch: Patch object with file, operation, path, value
148
- target_file: Optional override for the file to patch
149
- create_backup_file: Whether to create a backup before patching (default: True)
150
-
151
- Returns:
152
- Path to the backup file if created, None otherwise
153
-
154
- Raises:
155
- ValueError: If patch structure is invalid
156
- FileNotFoundError: If target file doesn't exist
157
- NotImplementedError: If file type is not supported
158
- """
159
- # Validate patch structure
160
- if not isinstance(patch, dict):
161
- raise ValueError("Patch must be a dictionary")
162
-
163
- required_fields = ["file", "operation"]
164
- for field in required_fields:
165
- if field not in patch:
166
- raise ValueError(f"Patch missing required field: {field}")
167
-
168
- operation = patch.get("operation")
169
- if operation not in ["add", "replace", "remove"]:
170
- raise ValueError(
171
- f"Invalid patch operation: {operation}. Must be 'add', 'replace', or 'remove'"
172
- )
173
-
174
- file_to_patch = target_file or patch.get("file")
175
-
176
- if not file_to_patch:
177
- raise ValueError("No target file specified in patch")
178
-
179
- if not os.path.exists(file_to_patch):
180
- raise FileNotFoundError(f"File '{file_to_patch}' not found")
181
-
182
- # Create backup before modifying
183
- backup_path = None
184
- if create_backup_file:
185
- backup_path = create_backup(file_to_patch)
186
-
187
- # For YAML files
188
- if file_to_patch.endswith((".yaml", ".yml")):
189
- with open(file_to_patch, "r") as f:
190
- config_data = yaml.safe_load(f) or {}
191
-
192
- # Apply patch based on operation
193
- operation = patch.get("operation")
194
- path = (patch.get("path") or "").strip("/")
195
- value = patch.get("value")
196
-
197
- if operation == "add":
198
- # For simple paths, add to root
199
- if path:
200
- path_parts = path.split("/")
201
- current = config_data
202
- for part in path_parts[:-1]:
203
- if part not in current:
204
- current[part] = {}
205
- current = current[part]
206
- current[path_parts[-1]] = value
207
- else:
208
- # Add to root level
209
- if isinstance(value, dict):
210
- config_data.update(value)
211
- else:
212
- config_data = value
213
-
214
- elif operation == "replace":
215
- if path:
216
- path_parts = path.split("/")
217
- current = config_data
218
- for part in path_parts[:-1]:
219
- current = current[part]
220
- current[path_parts[-1]] = value
221
- else:
222
- config_data = value
223
-
224
- # Write back
225
- with open(file_to_patch, "w") as f:
226
- yaml.dump(config_data, f, sort_keys=False, default_flow_style=False)
227
-
228
- # For JSON files
229
- elif file_to_patch.endswith(".json"):
230
- import json
231
-
232
- with open(file_to_patch, "r") as f:
233
- config_data = json.load(f)
234
-
235
- operation = patch.get("operation")
236
- path = (patch.get("path") or "").strip("/")
237
- value = patch.get("value")
238
-
239
- if operation == "add":
240
- if path:
241
- path_parts = path.split("/")
242
- current = config_data
243
- for part in path_parts[:-1]:
244
- if part not in current:
245
- current[part] = {}
246
- current = current[part]
247
- current[path_parts[-1]] = value
248
- else:
249
- if isinstance(value, dict):
250
- config_data.update(value)
251
- else:
252
- config_data = value
253
-
254
- elif operation == "replace":
255
- if path:
256
- path_parts = path.split("/")
257
- current = config_data
258
- for part in path_parts[:-1]:
259
- current = current[part]
260
- current[path_parts[-1]] = value
261
- else:
262
- config_data = value
263
-
264
- # Write back
265
- with open(file_to_patch, "w") as f:
266
- json.dump(config_data, f, indent=2)
267
-
268
- # For text files (like requirements.txt)
269
- elif file_to_patch.endswith(".txt"):
270
- operation = patch.get("operation")
271
- value = patch.get("value")
272
-
273
- if operation == "add":
274
- # Append to file
275
- with open(file_to_patch, "a") as f:
276
- f.write(f"\n{value}\n")
277
- elif operation == "replace":
278
- # Replace entire file
279
- with open(file_to_patch, "w") as f:
280
- f.write(str(value))
281
-
282
- # For TOML files (pyproject.toml)
283
- elif file_to_patch.endswith(".toml"):
284
- import toml
285
-
286
- with open(file_to_patch, "r") as f:
287
- config_data = toml.load(f)
288
-
289
- operation = patch.get("operation")
290
- path = (patch.get("path") or "").strip("/")
291
- value = patch.get("value")
292
-
293
- if operation == "add":
294
- # Special case for pyproject.toml dependencies
295
- is_pyproject = os.path.basename(file_to_patch) == "pyproject.toml"
296
- if is_pyproject and (not path or path == "project/dependencies"):
297
- # Ensure project and dependencies exist
298
- if "project" not in config_data:
299
- config_data["project"] = {}
300
- if "dependencies" not in config_data["project"]:
301
- config_data["project"]["dependencies"] = []
302
-
303
- # Add value if not already present
304
- if value not in config_data["project"]["dependencies"]:
305
- config_data["project"]["dependencies"].append(value)
306
- elif path:
307
- path_parts = path.split("/")
308
- current = config_data
309
- for part in path_parts[:-1]:
310
- if part not in current:
311
- current[part] = {}
312
- current = current[part]
313
-
314
- # If target is a list (like dependencies), append
315
- target_key = path_parts[-1]
316
- if target_key in current and isinstance(current[target_key], list):
317
- if value not in current[target_key]:
318
- current[target_key].append(value)
319
- else:
320
- current[target_key] = value
321
- else:
322
- # Root level add
323
- if isinstance(value, dict):
324
- config_data.update(value)
325
- else:
326
- # Ignore root-level non-dict adds for structured files
327
- # to prevent overwriting the entire config with a string
328
- pass
329
-
330
- elif operation == "replace":
331
- if path:
332
- path_parts = path.split("/")
333
- current = config_data
334
- for part in path_parts[:-1]:
335
- current = current[part]
336
- current[path_parts[-1]] = value
337
- else:
338
- config_data = value
339
-
340
- # Write back
341
- with open(file_to_patch, "w") as f:
342
- toml.dump(config_data, f)
343
- else:
344
- # Design decision: Only support auto-patching for common dependency files
345
- # Other file types should be manually edited to avoid data loss
346
- # See docs/future-enhancements.md #4 for potential extensions
347
- raise NotImplementedError(f"Patching not supported for file type: {file_to_patch}")
348
-
349
- return backup_path
350
-
351
-
352
- def manual_prompt_for_config(filename: str = "xenfra.yaml") -> str:
353
- """
354
- Prompt user interactively for configuration details and generate xenfra.yaml.
355
-
356
- Args:
357
- filename: Output filename (default: xenfra.yaml)
358
-
359
- Returns:
360
- Path to the generated file
361
- """
362
- config = {}
363
-
364
- # Project name (default to directory name)
365
- default_name = os.path.basename(os.getcwd())
366
- config["name"] = Prompt.ask("Project name", default=default_name)
367
-
368
- # Framework
369
- framework = Prompt.ask(
370
- "Framework", choices=["fastapi", "flask", "django", "other"], default="fastapi"
371
- )
372
- config["framework"] = framework
373
-
374
- # Port
375
- port = IntPrompt.ask("Application port", default=8000)
376
- # Validate port
377
- from .validation import validate_port
378
-
379
- is_valid, error_msg = validate_port(port)
380
- if not is_valid:
381
- console.print(f"[bold red]Invalid port: {error_msg}[/bold red]")
382
- raise click.Abort()
383
- config["port"] = port
384
-
385
- # Database
386
- use_database = Confirm.ask("Does your app use a database?", default=False)
387
- if use_database:
388
- db_type = Prompt.ask(
389
- "Database type",
390
- choices=["postgresql", "mysql", "sqlite", "mongodb"],
391
- default="postgresql",
392
- )
393
- config["database"] = {"type": db_type, "env_var": "DATABASE_URL"}
394
-
395
- # Cache
396
- use_cache = Confirm.ask("Does your app use caching?", default=False)
397
- if use_cache:
398
- cache_type = Prompt.ask("Cache type", choices=["redis", "memcached"], default="redis")
399
- config["cache"] = {"type": cache_type, "env_var": f"{cache_type.upper()}_URL"}
400
-
401
- # Region
402
- config["region"] = Prompt.ask("Region", choices=["nyc3", "sfo3", "ams3", "fra1", "lon1"], default="nyc3")
403
-
404
- # Instance size
405
- instance_size = Prompt.ask(
406
- "Instance size", choices=["basic", "standard", "premium"], default="basic"
407
- )
408
- config["instance_size"] = instance_size
409
-
410
- # Resources (CPU/RAM)
411
- config["resources"] = {
412
- "cpu": IntPrompt.ask("CPU (vCPUs)", default=1 if instance_size == "basic" else 2),
413
- "ram": Prompt.ask("RAM (e.g., 1GB, 4GB)", default="1GB" if instance_size == "basic" else "4GB"),
414
- }
415
-
416
- # Environment variables
417
- add_env = Confirm.ask("Add environment variables?", default=False)
418
- if add_env:
419
- env_vars = []
420
- while True:
421
- env_var = Prompt.ask("Environment variable name (blank to finish)", default="")
422
- if not env_var:
423
- break
424
- env_vars.append(env_var)
425
- if env_vars:
426
- config["env_vars"] = env_vars
427
-
428
- # Write to file
429
- with open(filename, "w") as f:
430
- yaml.dump(config, f, sort_keys=False, default_flow_style=False)
431
-
432
- return filename
1
+ """
2
+ Configuration file generation utilities.
3
+ """
4
+
5
+ import os
6
+ import shutil
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+
10
+ import click
11
+ import yaml
12
+ from rich.console import Console
13
+ from rich.prompt import Confirm, IntPrompt, Prompt
14
+ from xenfra_sdk import CodebaseAnalysisResponse
15
+
16
+ console = Console()
17
+
18
+
19
+ def read_xenfra_yaml(filename: str = "xenfra.yaml") -> dict:
20
+ """
21
+ Read and parse xenfra.yaml configuration file.
22
+
23
+ Args:
24
+ filename: Path to the config file (default: xenfra.yaml)
25
+
26
+ Returns:
27
+ Dictionary containing the configuration
28
+
29
+ Raises:
30
+ FileNotFoundError: If the config file doesn't exist
31
+ yaml.YAMLError: If the YAML is malformed
32
+ ValueError: If the YAML is invalid
33
+ IOError: If reading fails
34
+ """
35
+ if not Path(filename).exists():
36
+ raise FileNotFoundError(
37
+ f"Configuration file '{filename}' not found. Run 'xenfra init' first."
38
+ )
39
+
40
+ try:
41
+ with open(filename, "r") as f:
42
+ return yaml.safe_load(f) or {}
43
+ except yaml.YAMLError as e:
44
+ raise ValueError(f"Invalid YAML in {filename}: {e}")
45
+ except Exception as e:
46
+ raise IOError(f"Failed to read {filename}: {e}")
47
+
48
+
49
+ def generate_xenfra_yaml(analysis: CodebaseAnalysisResponse, filename: str = "xenfra.yaml", package_manager_override: str = None, dependency_file_override: str = None) -> str:
50
+ """
51
+ Generate xenfra.yaml from AI codebase analysis.
52
+
53
+ Args:
54
+ analysis: CodebaseAnalysisResponse from Intelligence Service
55
+ filename: Output filename (default: xenfra.yaml)
56
+ package_manager_override: Optional override for package manager (user selection)
57
+ dependency_file_override: Optional override for dependency file (user selection)
58
+
59
+ Returns:
60
+ Path to the generated file
61
+ """
62
+ # Build configuration dictionary
63
+ config = {
64
+ "name": os.path.basename(os.getcwd()),
65
+ "framework": analysis.framework,
66
+ "region": "nyc3", # Default to NYC3
67
+ "port": analysis.port,
68
+ }
69
+
70
+ # Add entrypoint if detected (e.g., "todo.main:app")
71
+ if hasattr(analysis, 'entrypoint') and analysis.entrypoint:
72
+ config["entrypoint"] = analysis.entrypoint
73
+
74
+ # Add database configuration if detected
75
+ if analysis.database and analysis.database != "none":
76
+ config["database"] = {"type": analysis.database, "env_var": "DATABASE_URL"}
77
+
78
+ # Add cache configuration if detected
79
+ if analysis.cache and analysis.cache != "none":
80
+ config["cache"] = {"type": analysis.cache, "env_var": f"{analysis.cache.upper()}_URL"}
81
+
82
+ # Add worker configuration if detected
83
+ if analysis.workers and len(analysis.workers) > 0:
84
+ config["workers"] = analysis.workers
85
+
86
+ # Add environment variables
87
+ if analysis.env_vars and len(analysis.env_vars) > 0:
88
+ config["env_vars"] = analysis.env_vars
89
+
90
+ # Infrastructure configuration
91
+ config["instance_size"] = analysis.instance_size
92
+ config["resources"] = {
93
+ "cpu": 1,
94
+ "ram": "1GB"
95
+ }
96
+
97
+ # Map resources based on detected size for better defaults
98
+ if analysis.instance_size == "standard":
99
+ config["resources"]["cpu"] = 2
100
+ config["resources"]["ram"] = "4GB"
101
+ elif analysis.instance_size == "premium":
102
+ config["resources"]["cpu"] = 4
103
+ config["resources"]["ram"] = "8GB"
104
+
105
+ # Add package manager info (use override if provided, otherwise use analysis)
106
+ package_manager = package_manager_override or analysis.package_manager
107
+ dependency_file = dependency_file_override or analysis.dependency_file
108
+
109
+ if package_manager:
110
+ config["package_manager"] = package_manager
111
+ if dependency_file:
112
+ config["dependency_file"] = dependency_file
113
+
114
+ # Write to file
115
+ with open(filename, "w") as f:
116
+ yaml.dump(config, f, sort_keys=False, default_flow_style=False)
117
+
118
+ return filename
119
+
120
+
121
+ def create_backup(file_path: str) -> str:
122
+ """
123
+ Create a timestamped backup of a file in .xenfra/backups/ directory.
124
+
125
+ Args:
126
+ file_path: Path to the file to backup
127
+
128
+ Returns:
129
+ Path to the backup file
130
+ """
131
+ # Create .xenfra/backups directory if it doesn't exist
132
+ backup_dir = Path(".xenfra") / "backups"
133
+ backup_dir.mkdir(parents=True, exist_ok=True)
134
+
135
+ # Generate timestamped backup filename
136
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
137
+ file_name = Path(file_path).name
138
+ backup_path = backup_dir / f"{file_name}.{timestamp}.backup"
139
+
140
+ # Copy file to backup location
141
+ shutil.copy2(file_path, backup_path)
142
+
143
+ return str(backup_path)
144
+
145
+
146
+ def apply_patch(patch: dict, target_file: str = None, create_backup_file: bool = True):
147
+ """
148
+ Apply a JSON patch to a configuration file with automatic backup.
149
+
150
+ Args:
151
+ patch: Patch object with file, operation, path, value
152
+ target_file: Optional override for the file to patch
153
+ create_backup_file: Whether to create a backup before patching (default: True)
154
+
155
+ Returns:
156
+ Path to the backup file if created, None otherwise
157
+
158
+ Raises:
159
+ ValueError: If patch structure is invalid
160
+ FileNotFoundError: If target file doesn't exist
161
+ NotImplementedError: If file type is not supported
162
+ """
163
+ # Validate patch structure
164
+ if not isinstance(patch, dict):
165
+ raise ValueError("Patch must be a dictionary")
166
+
167
+ required_fields = ["file", "operation"]
168
+ for field in required_fields:
169
+ if field not in patch:
170
+ raise ValueError(f"Patch missing required field: {field}")
171
+
172
+ operation = patch.get("operation")
173
+ if operation not in ["add", "replace", "remove"]:
174
+ raise ValueError(
175
+ f"Invalid patch operation: {operation}. Must be 'add', 'replace', or 'remove'"
176
+ )
177
+
178
+ file_to_patch = target_file or patch.get("file")
179
+
180
+ if not file_to_patch:
181
+ raise ValueError("No target file specified in patch")
182
+
183
+ if not os.path.exists(file_to_patch):
184
+ # Path resolution fallback for multi-service projects
185
+ filename = os.path.basename(file_to_patch)
186
+ if os.path.exists(filename):
187
+ console.print(f"[dim]Note: Suggested path '{file_to_patch}' not found. Falling back to '{filename}'[/dim]")
188
+ file_to_patch = filename
189
+ else:
190
+ # Try to resolve via xenfra.yaml if available
191
+ try:
192
+ from .config import read_xenfra_yaml
193
+ config = read_xenfra_yaml()
194
+ if "services" in config:
195
+ for svc in config["services"]:
196
+ svc_path = svc.get("path", ".")
197
+ # If service path is '.' and we're looking for filename in it
198
+ potential_path = os.path.join(svc_path, filename) if svc_path != "." else filename
199
+ if os.path.exists(potential_path):
200
+ console.print(f"[dim]Note: Resolved '{file_to_patch}' to '{potential_path}' via xenfra.yaml[/dim]")
201
+ file_to_patch = potential_path
202
+ break
203
+ except Exception:
204
+ pass
205
+
206
+ if not os.path.exists(file_to_patch):
207
+ raise FileNotFoundError(f"File '{file_to_patch}' not found")
208
+
209
+ # Create backup before modifying
210
+ backup_path = None
211
+ if create_backup_file:
212
+ backup_path = create_backup(file_to_patch)
213
+
214
+ # For YAML files
215
+ if file_to_patch.endswith((".yaml", ".yml")):
216
+ with open(file_to_patch, "r") as f:
217
+ config_data = yaml.safe_load(f) or {}
218
+
219
+ # Apply patch based on operation
220
+ operation = patch.get("operation")
221
+ path = (patch.get("path") or "").strip("/")
222
+ value = patch.get("value")
223
+
224
+ if operation == "add":
225
+ # For simple paths, add to root
226
+ if path:
227
+ path_parts = path.split("/")
228
+ current = config_data
229
+ for part in path_parts[:-1]:
230
+ if part not in current:
231
+ current[part] = {}
232
+ current = current[part]
233
+ current[path_parts[-1]] = value
234
+ else:
235
+ # Add to root level
236
+ if isinstance(value, dict):
237
+ config_data.update(value)
238
+ else:
239
+ config_data = value
240
+
241
+ elif operation == "replace":
242
+ if path:
243
+ path_parts = path.split("/")
244
+ current = config_data
245
+ for part in path_parts[:-1]:
246
+ current = current[part]
247
+ current[path_parts[-1]] = value
248
+ else:
249
+ config_data = value
250
+
251
+ # Write back
252
+ with open(file_to_patch, "w") as f:
253
+ yaml.dump(config_data, f, sort_keys=False, default_flow_style=False)
254
+
255
+ # For JSON files
256
+ elif file_to_patch.endswith(".json"):
257
+ import json
258
+
259
+ with open(file_to_patch, "r") as f:
260
+ config_data = json.load(f)
261
+
262
+ operation = patch.get("operation")
263
+ path = (patch.get("path") or "").strip("/")
264
+ value = patch.get("value")
265
+
266
+ if operation == "add":
267
+ if path:
268
+ path_parts = path.split("/")
269
+ current = config_data
270
+ for part in path_parts[:-1]:
271
+ if part not in current:
272
+ current[part] = {}
273
+ current = current[part]
274
+ current[path_parts[-1]] = value
275
+ else:
276
+ if isinstance(value, dict):
277
+ config_data.update(value)
278
+ else:
279
+ config_data = value
280
+
281
+ elif operation == "replace":
282
+ if path:
283
+ path_parts = path.split("/")
284
+ current = config_data
285
+ for part in path_parts[:-1]:
286
+ current = current[part]
287
+ current[path_parts[-1]] = value
288
+ else:
289
+ config_data = value
290
+
291
+ # Write back
292
+ with open(file_to_patch, "w") as f:
293
+ json.dump(config_data, f, indent=2)
294
+
295
+ # For text files (like requirements.txt)
296
+ elif file_to_patch.endswith(".txt"):
297
+ operation = patch.get("operation")
298
+ value = patch.get("value")
299
+
300
+ if operation == "add":
301
+ # Append to file
302
+ with open(file_to_patch, "a") as f:
303
+ f.write(f"\n{value}\n")
304
+ elif operation == "replace":
305
+ # Replace entire file
306
+ with open(file_to_patch, "w") as f:
307
+ f.write(str(value))
308
+
309
+ # For TOML files (pyproject.toml)
310
+ elif file_to_patch.endswith(".toml"):
311
+ import toml
312
+
313
+ with open(file_to_patch, "r") as f:
314
+ config_data = toml.load(f)
315
+
316
+ operation = patch.get("operation")
317
+ path = (patch.get("path") or "").strip("/")
318
+ value = patch.get("value")
319
+
320
+ if operation == "add":
321
+ # Special case for pyproject.toml dependencies
322
+ is_pyproject = os.path.basename(file_to_patch) == "pyproject.toml"
323
+ if is_pyproject and (not path or path == "project/dependencies"):
324
+ # Ensure project and dependencies exist
325
+ if "project" not in config_data:
326
+ config_data["project"] = {}
327
+ if "dependencies" not in config_data["project"]:
328
+ config_data["project"]["dependencies"] = []
329
+
330
+ # Add value if not already present
331
+ if value not in config_data["project"]["dependencies"]:
332
+ config_data["project"]["dependencies"].append(value)
333
+ elif path:
334
+ path_parts = path.split("/")
335
+ current = config_data
336
+ for part in path_parts[:-1]:
337
+ if part not in current:
338
+ current[part] = {}
339
+ current = current[part]
340
+
341
+ # If target is a list (like dependencies), append
342
+ target_key = path_parts[-1]
343
+ if target_key in current and isinstance(current[target_key], list):
344
+ if value not in current[target_key]:
345
+ current[target_key].append(value)
346
+ else:
347
+ current[target_key] = value
348
+ else:
349
+ # Root level add
350
+ if isinstance(value, dict):
351
+ config_data.update(value)
352
+ else:
353
+ # Ignore root-level non-dict adds for structured files
354
+ # to prevent overwriting the entire config with a string
355
+ pass
356
+
357
+ elif operation == "replace":
358
+ if path:
359
+ path_parts = path.split("/")
360
+ current = config_data
361
+ for part in path_parts[:-1]:
362
+ current = current[part]
363
+ current[path_parts[-1]] = value
364
+ else:
365
+ config_data = value
366
+
367
+ # Write back
368
+ with open(file_to_patch, "w") as f:
369
+ toml.dump(config_data, f)
370
+ else:
371
+ # Design decision: Only support auto-patching for common dependency files
372
+ # Other file types should be manually edited to avoid data loss
373
+ # See docs/future-enhancements.md #4 for potential extensions
374
+ raise NotImplementedError(f"Patching not supported for file type: {file_to_patch}")
375
+
376
+ return backup_path
377
+
378
+
379
+ def manual_prompt_for_config(filename: str = "xenfra.yaml") -> str:
380
+ """
381
+ Prompt user interactively for configuration details and generate xenfra.yaml.
382
+
383
+ Args:
384
+ filename: Output filename (default: xenfra.yaml)
385
+
386
+ Returns:
387
+ Path to the generated file
388
+ """
389
+ config = {}
390
+
391
+ # Project name (default to directory name)
392
+ default_name = os.path.basename(os.getcwd())
393
+ config["name"] = Prompt.ask("Project name", default=default_name)
394
+
395
+ # Framework
396
+ framework = Prompt.ask(
397
+ "Framework", choices=["fastapi", "flask", "django", "other"], default="fastapi"
398
+ )
399
+ config["framework"] = framework
400
+
401
+ # Port
402
+ port = IntPrompt.ask("Application port", default=8000)
403
+ # Validate port
404
+ from .validation import validate_port
405
+
406
+ is_valid, error_msg = validate_port(port)
407
+ if not is_valid:
408
+ console.print(f"[bold red]Invalid port: {error_msg}[/bold red]")
409
+ raise click.Abort()
410
+ config["port"] = port
411
+
412
+ # Database
413
+ use_database = Confirm.ask("Does your app use a database?", default=False)
414
+ if use_database:
415
+ db_type = Prompt.ask(
416
+ "Database type",
417
+ choices=["postgresql", "mysql", "sqlite", "mongodb"],
418
+ default="postgresql",
419
+ )
420
+ config["database"] = {"type": db_type, "env_var": "DATABASE_URL"}
421
+
422
+ # Cache
423
+ use_cache = Confirm.ask("Does your app use caching?", default=False)
424
+ if use_cache:
425
+ cache_type = Prompt.ask("Cache type", choices=["redis", "memcached"], default="redis")
426
+ config["cache"] = {"type": cache_type, "env_var": f"{cache_type.upper()}_URL"}
427
+
428
+ # Region
429
+ config["region"] = Prompt.ask("Region", choices=["nyc3", "sfo3", "ams3", "fra1", "lon1"], default="nyc3")
430
+
431
+ # Instance size
432
+ instance_size = Prompt.ask(
433
+ "Instance size", choices=["basic", "standard", "premium"], default="basic"
434
+ )
435
+ config["instance_size"] = instance_size
436
+
437
+ # Resources (CPU/RAM)
438
+ config["resources"] = {
439
+ "cpu": IntPrompt.ask("CPU (vCPUs)", default=1 if instance_size == "basic" else 2),
440
+ "ram": Prompt.ask("RAM (e.g., 1GB, 4GB)", default="1GB" if instance_size == "basic" else "4GB"),
441
+ }
442
+
443
+ # Environment variables
444
+ add_env = Confirm.ask("Add environment variables?", default=False)
445
+ if add_env:
446
+ env_vars = []
447
+ while True:
448
+ env_var = Prompt.ask("Environment variable name (blank to finish)", default="")
449
+ if not env_var:
450
+ break
451
+ env_vars.append(env_var)
452
+ if env_vars:
453
+ config["env_vars"] = env_vars
454
+
455
+ # Write to file
456
+ with open(filename, "w") as f:
457
+ yaml.dump(config, f, sort_keys=False, default_flow_style=False)
458
+
459
+ return filename