xenfra 0.2.8__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xenfra/utils/config.py CHANGED
@@ -1,363 +1,363 @@
1
- """
2
- Configuration file generation utilities.
3
- """
4
-
5
- import os
6
- import shutil
7
- from datetime import datetime
8
- from pathlib import Path
9
-
10
- import click
11
- import yaml
12
- from rich.console import Console
13
- from rich.prompt import Confirm, IntPrompt, Prompt
14
- from xenfra_sdk import CodebaseAnalysisResponse
15
-
16
- console = Console()
17
-
18
-
19
- def read_xenfra_yaml(filename: str = "xenfra.yaml") -> dict:
20
- """
21
- Read and parse xenfra.yaml configuration file.
22
-
23
- Args:
24
- filename: Path to the config file (default: xenfra.yaml)
25
-
26
- Returns:
27
- Dictionary containing the configuration
28
-
29
- Raises:
30
- FileNotFoundError: If the config file doesn't exist
31
- yaml.YAMLError: If the YAML is malformed
32
- """
33
- """
34
- Read and parse xenfra.yaml configuration file.
35
-
36
- Args:
37
- filename: Path to the config file (default: xenfra.yaml)
38
-
39
- Returns:
40
- Dictionary containing the configuration
41
-
42
- Raises:
43
- FileNotFoundError: If the config file doesn't exist
44
- """
45
- if not Path(filename).exists():
46
- raise FileNotFoundError(
47
- f"Configuration file '{filename}' not found. Run 'xenfra init' first."
48
- )
49
-
50
- try:
51
- with open(filename, "r") as f:
52
- return yaml.safe_load(f) or {}
53
- except yaml.YAMLError as e:
54
- raise ValueError(f"Invalid YAML in {filename}: {e}")
55
- except Exception as e:
56
- raise IOError(f"Failed to read {filename}: {e}")
57
-
58
-
59
- def generate_xenfra_yaml(analysis: CodebaseAnalysisResponse, filename: str = "xenfra.yaml") -> str:
60
- """
61
- Generate xenfra.yaml from AI codebase analysis.
62
-
63
- Args:
64
- analysis: CodebaseAnalysisResponse from Intelligence Service
65
- filename: Output filename (default: xenfra.yaml)
66
-
67
- Returns:
68
- Path to the generated file
69
- """
70
- # Build configuration dictionary
71
- config = {
72
- "name": os.path.basename(os.getcwd()),
73
- "framework": analysis.framework,
74
- "port": analysis.port,
75
- }
76
-
77
- # Add database configuration if detected
78
- if analysis.database and analysis.database != "none":
79
- config["database"] = {"type": analysis.database, "env_var": "DATABASE_URL"}
80
-
81
- # Add cache configuration if detected
82
- if analysis.cache and analysis.cache != "none":
83
- config["cache"] = {"type": analysis.cache, "env_var": f"{analysis.cache.upper()}_URL"}
84
-
85
- # Add worker configuration if detected
86
- if analysis.workers and len(analysis.workers) > 0:
87
- config["workers"] = analysis.workers
88
-
89
- # Add environment variables
90
- if analysis.env_vars and len(analysis.env_vars) > 0:
91
- config["env_vars"] = analysis.env_vars
92
-
93
- # Add instance size
94
- config["instance_size"] = analysis.instance_size
95
-
96
- # Add package manager info (for intelligent diagnosis)
97
- if analysis.package_manager:
98
- config["package_manager"] = analysis.package_manager
99
- if analysis.dependency_file:
100
- config["dependency_file"] = analysis.dependency_file
101
-
102
- # Write to file
103
- with open(filename, "w") as f:
104
- yaml.dump(config, f, sort_keys=False, default_flow_style=False)
105
-
106
- return filename
107
-
108
-
109
- def create_backup(file_path: str) -> str:
110
- """
111
- Create a timestamped backup of a file in .xenfra/backups/ directory.
112
-
113
- Args:
114
- file_path: Path to the file to backup
115
-
116
- Returns:
117
- Path to the backup file
118
- """
119
- # Create .xenfra/backups directory if it doesn't exist
120
- backup_dir = Path(".xenfra") / "backups"
121
- backup_dir.mkdir(parents=True, exist_ok=True)
122
-
123
- # Generate timestamped backup filename
124
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
125
- file_name = Path(file_path).name
126
- backup_path = backup_dir / f"{file_name}.{timestamp}.backup"
127
-
128
- # Copy file to backup location
129
- shutil.copy2(file_path, backup_path)
130
-
131
- return str(backup_path)
132
-
133
-
134
- def apply_patch(patch: dict, target_file: str = None, create_backup_file: bool = True):
135
- """
136
- Apply a JSON patch to a configuration file with automatic backup.
137
-
138
- Args:
139
- patch: Patch object with file, operation, path, value
140
- target_file: Optional override for the file to patch
141
- create_backup_file: Whether to create a backup before patching (default: True)
142
-
143
- Returns:
144
- Path to the backup file if created, None otherwise
145
-
146
- Raises:
147
- ValueError: If patch structure is invalid
148
- FileNotFoundError: If target file doesn't exist
149
- NotImplementedError: If file type is not supported
150
- """
151
- # Validate patch structure
152
- if not isinstance(patch, dict):
153
- raise ValueError("Patch must be a dictionary")
154
-
155
- required_fields = ["file", "operation"]
156
- for field in required_fields:
157
- if field not in patch:
158
- raise ValueError(f"Patch missing required field: {field}")
159
-
160
- operation = patch.get("operation")
161
- if operation not in ["add", "replace", "remove"]:
162
- raise ValueError(
163
- f"Invalid patch operation: {operation}. Must be 'add', 'replace', or 'remove'"
164
- )
165
- """
166
- Apply a JSON patch to a configuration file with automatic backup.
167
-
168
- Args:
169
- patch: Patch object with file, operation, path, value
170
- target_file: Optional override for the file to patch
171
- create_backup_file: Whether to create a backup before patching (default: True)
172
-
173
- Returns:
174
- Path to the backup file if created, None otherwise
175
- """
176
- file_to_patch = target_file or patch.get("file")
177
-
178
- if not file_to_patch:
179
- raise ValueError("No target file specified in patch")
180
-
181
- if not os.path.exists(file_to_patch):
182
- raise FileNotFoundError(f"File '{file_to_patch}' not found")
183
-
184
- # Create backup before modifying
185
- backup_path = None
186
- if create_backup_file:
187
- backup_path = create_backup(file_to_patch)
188
-
189
- # For YAML files
190
- if file_to_patch.endswith((".yaml", ".yml")):
191
- with open(file_to_patch, "r") as f:
192
- config_data = yaml.safe_load(f) or {}
193
-
194
- # Apply patch based on operation
195
- operation = patch.get("operation")
196
- path = patch.get("path", "").strip("/")
197
- value = patch.get("value")
198
-
199
- if operation == "add":
200
- # For simple paths, add to root
201
- if path:
202
- path_parts = path.split("/")
203
- current = config_data
204
- for part in path_parts[:-1]:
205
- if part not in current:
206
- current[part] = {}
207
- current = current[part]
208
- current[path_parts[-1]] = value
209
- else:
210
- # Add to root level
211
- if isinstance(value, dict):
212
- config_data.update(value)
213
- else:
214
- config_data = value
215
-
216
- elif operation == "replace":
217
- if path:
218
- path_parts = path.split("/")
219
- current = config_data
220
- for part in path_parts[:-1]:
221
- current = current[part]
222
- current[path_parts[-1]] = value
223
- else:
224
- config_data = value
225
-
226
- # Write back
227
- with open(file_to_patch, "w") as f:
228
- yaml.dump(config_data, f, sort_keys=False, default_flow_style=False)
229
-
230
- # For JSON files
231
- elif file_to_patch.endswith(".json"):
232
- import json
233
-
234
- with open(file_to_patch, "r") as f:
235
- config_data = json.load(f)
236
-
237
- operation = patch.get("operation")
238
- path = patch.get("path", "").strip("/")
239
- value = patch.get("value")
240
-
241
- if operation == "add":
242
- if path:
243
- path_parts = path.split("/")
244
- current = config_data
245
- for part in path_parts[:-1]:
246
- if part not in current:
247
- current[part] = {}
248
- current = current[part]
249
- current[path_parts[-1]] = value
250
- else:
251
- if isinstance(value, dict):
252
- config_data.update(value)
253
- else:
254
- config_data = value
255
-
256
- elif operation == "replace":
257
- if path:
258
- path_parts = path.split("/")
259
- current = config_data
260
- for part in path_parts[:-1]:
261
- current = current[part]
262
- current[path_parts[-1]] = value
263
- else:
264
- config_data = value
265
-
266
- # Write back
267
- with open(file_to_patch, "w") as f:
268
- json.dump(config_data, f, indent=2)
269
-
270
- # For text files (like requirements.txt)
271
- elif file_to_patch.endswith(".txt"):
272
- operation = patch.get("operation")
273
- value = patch.get("value")
274
-
275
- if operation == "add":
276
- # Append to file
277
- with open(file_to_patch, "a") as f:
278
- f.write(f"\n{value}\n")
279
- elif operation == "replace":
280
- # Replace entire file
281
- with open(file_to_patch, "w") as f:
282
- f.write(str(value))
283
- else:
284
- # Design decision: Only support auto-patching for common dependency files
285
- # Other file types should be manually edited to avoid data loss
286
- # See docs/future-enhancements.md #4 for potential extensions
287
- raise NotImplementedError(f"Patching not supported for file type: {file_to_patch}")
288
-
289
- return backup_path
290
-
291
-
292
- def manual_prompt_for_config(filename: str = "xenfra.yaml") -> str:
293
- """
294
- Prompt user interactively for configuration details and generate xenfra.yaml.
295
-
296
- Args:
297
- filename: Output filename (default: xenfra.yaml)
298
-
299
- Returns:
300
- Path to the generated file
301
- """
302
- config = {}
303
-
304
- # Project name (default to directory name)
305
- default_name = os.path.basename(os.getcwd())
306
- config["name"] = Prompt.ask("Project name", default=default_name)
307
-
308
- # Framework
309
- framework = Prompt.ask(
310
- "Framework", choices=["fastapi", "flask", "django", "other"], default="fastapi"
311
- )
312
- config["framework"] = framework
313
-
314
- # Port
315
- port = IntPrompt.ask("Application port", default=8000)
316
- # Validate port
317
- from .validation import validate_port
318
-
319
- is_valid, error_msg = validate_port(port)
320
- if not is_valid:
321
- console.print(f"[bold red]Invalid port: {error_msg}[/bold red]")
322
- raise click.Abort()
323
- config["port"] = port
324
-
325
- # Database
326
- use_database = Confirm.ask("Does your app use a database?", default=False)
327
- if use_database:
328
- db_type = Prompt.ask(
329
- "Database type",
330
- choices=["postgresql", "mysql", "sqlite", "mongodb"],
331
- default="postgresql",
332
- )
333
- config["database"] = {"type": db_type, "env_var": "DATABASE_URL"}
334
-
335
- # Cache
336
- use_cache = Confirm.ask("Does your app use caching?", default=False)
337
- if use_cache:
338
- cache_type = Prompt.ask("Cache type", choices=["redis", "memcached"], default="redis")
339
- config["cache"] = {"type": cache_type, "env_var": f"{cache_type.upper()}_URL"}
340
-
341
- # Instance size
342
- instance_size = Prompt.ask(
343
- "Instance size", choices=["basic", "standard", "premium"], default="basic"
344
- )
345
- config["instance_size"] = instance_size
346
-
347
- # Environment variables
348
- add_env = Confirm.ask("Add environment variables?", default=False)
349
- if add_env:
350
- env_vars = []
351
- while True:
352
- env_var = Prompt.ask("Environment variable name (blank to finish)", default="")
353
- if not env_var:
354
- break
355
- env_vars.append(env_var)
356
- if env_vars:
357
- config["env_vars"] = env_vars
358
-
359
- # Write to file
360
- with open(filename, "w") as f:
361
- yaml.dump(config, f, sort_keys=False, default_flow_style=False)
362
-
363
- return filename
1
+ """
2
+ Configuration file generation utilities.
3
+ """
4
+
5
+ import os
6
+ import shutil
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+
10
+ import click
11
+ import yaml
12
+ from rich.console import Console
13
+ from rich.prompt import Confirm, IntPrompt, Prompt
14
+ from xenfra_sdk import CodebaseAnalysisResponse
15
+
16
+ console = Console()
17
+
18
+
19
+ def read_xenfra_yaml(filename: str = "xenfra.yaml") -> dict:
20
+ """
21
+ Read and parse xenfra.yaml configuration file.
22
+
23
+ Args:
24
+ filename: Path to the config file (default: xenfra.yaml)
25
+
26
+ Returns:
27
+ Dictionary containing the configuration
28
+
29
+ Raises:
30
+ FileNotFoundError: If the config file doesn't exist
31
+ yaml.YAMLError: If the YAML is malformed
32
+ """
33
+ """
34
+ Read and parse xenfra.yaml configuration file.
35
+
36
+ Args:
37
+ filename: Path to the config file (default: xenfra.yaml)
38
+
39
+ Returns:
40
+ Dictionary containing the configuration
41
+
42
+ Raises:
43
+ FileNotFoundError: If the config file doesn't exist
44
+ """
45
+ if not Path(filename).exists():
46
+ raise FileNotFoundError(
47
+ f"Configuration file '{filename}' not found. Run 'xenfra init' first."
48
+ )
49
+
50
+ try:
51
+ with open(filename, "r") as f:
52
+ return yaml.safe_load(f) or {}
53
+ except yaml.YAMLError as e:
54
+ raise ValueError(f"Invalid YAML in {filename}: {e}")
55
+ except Exception as e:
56
+ raise IOError(f"Failed to read {filename}: {e}")
57
+
58
+
59
+ def generate_xenfra_yaml(analysis: CodebaseAnalysisResponse, filename: str = "xenfra.yaml") -> str:
60
+ """
61
+ Generate xenfra.yaml from AI codebase analysis.
62
+
63
+ Args:
64
+ analysis: CodebaseAnalysisResponse from Intelligence Service
65
+ filename: Output filename (default: xenfra.yaml)
66
+
67
+ Returns:
68
+ Path to the generated file
69
+ """
70
+ # Build configuration dictionary
71
+ config = {
72
+ "name": os.path.basename(os.getcwd()),
73
+ "framework": analysis.framework,
74
+ "port": analysis.port,
75
+ }
76
+
77
+ # Add database configuration if detected
78
+ if analysis.database and analysis.database != "none":
79
+ config["database"] = {"type": analysis.database, "env_var": "DATABASE_URL"}
80
+
81
+ # Add cache configuration if detected
82
+ if analysis.cache and analysis.cache != "none":
83
+ config["cache"] = {"type": analysis.cache, "env_var": f"{analysis.cache.upper()}_URL"}
84
+
85
+ # Add worker configuration if detected
86
+ if analysis.workers and len(analysis.workers) > 0:
87
+ config["workers"] = analysis.workers
88
+
89
+ # Add environment variables
90
+ if analysis.env_vars and len(analysis.env_vars) > 0:
91
+ config["env_vars"] = analysis.env_vars
92
+
93
+ # Add instance size
94
+ config["instance_size"] = analysis.instance_size
95
+
96
+ # Add package manager info (for intelligent diagnosis)
97
+ if analysis.package_manager:
98
+ config["package_manager"] = analysis.package_manager
99
+ if analysis.dependency_file:
100
+ config["dependency_file"] = analysis.dependency_file
101
+
102
+ # Write to file
103
+ with open(filename, "w") as f:
104
+ yaml.dump(config, f, sort_keys=False, default_flow_style=False)
105
+
106
+ return filename
107
+
108
+
109
+ def create_backup(file_path: str) -> str:
110
+ """
111
+ Create a timestamped backup of a file in .xenfra/backups/ directory.
112
+
113
+ Args:
114
+ file_path: Path to the file to backup
115
+
116
+ Returns:
117
+ Path to the backup file
118
+ """
119
+ # Create .xenfra/backups directory if it doesn't exist
120
+ backup_dir = Path(".xenfra") / "backups"
121
+ backup_dir.mkdir(parents=True, exist_ok=True)
122
+
123
+ # Generate timestamped backup filename
124
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
125
+ file_name = Path(file_path).name
126
+ backup_path = backup_dir / f"{file_name}.{timestamp}.backup"
127
+
128
+ # Copy file to backup location
129
+ shutil.copy2(file_path, backup_path)
130
+
131
+ return str(backup_path)
132
+
133
+
134
+ def apply_patch(patch: dict, target_file: str = None, create_backup_file: bool = True):
135
+ """
136
+ Apply a JSON patch to a configuration file with automatic backup.
137
+
138
+ Args:
139
+ patch: Patch object with file, operation, path, value
140
+ target_file: Optional override for the file to patch
141
+ create_backup_file: Whether to create a backup before patching (default: True)
142
+
143
+ Returns:
144
+ Path to the backup file if created, None otherwise
145
+
146
+ Raises:
147
+ ValueError: If patch structure is invalid
148
+ FileNotFoundError: If target file doesn't exist
149
+ NotImplementedError: If file type is not supported
150
+ """
151
+ # Validate patch structure
152
+ if not isinstance(patch, dict):
153
+ raise ValueError("Patch must be a dictionary")
154
+
155
+ required_fields = ["file", "operation"]
156
+ for field in required_fields:
157
+ if field not in patch:
158
+ raise ValueError(f"Patch missing required field: {field}")
159
+
160
+ operation = patch.get("operation")
161
+ if operation not in ["add", "replace", "remove"]:
162
+ raise ValueError(
163
+ f"Invalid patch operation: {operation}. Must be 'add', 'replace', or 'remove'"
164
+ )
165
+ """
166
+ Apply a JSON patch to a configuration file with automatic backup.
167
+
168
+ Args:
169
+ patch: Patch object with file, operation, path, value
170
+ target_file: Optional override for the file to patch
171
+ create_backup_file: Whether to create a backup before patching (default: True)
172
+
173
+ Returns:
174
+ Path to the backup file if created, None otherwise
175
+ """
176
+ file_to_patch = target_file or patch.get("file")
177
+
178
+ if not file_to_patch:
179
+ raise ValueError("No target file specified in patch")
180
+
181
+ if not os.path.exists(file_to_patch):
182
+ raise FileNotFoundError(f"File '{file_to_patch}' not found")
183
+
184
+ # Create backup before modifying
185
+ backup_path = None
186
+ if create_backup_file:
187
+ backup_path = create_backup(file_to_patch)
188
+
189
+ # For YAML files
190
+ if file_to_patch.endswith((".yaml", ".yml")):
191
+ with open(file_to_patch, "r") as f:
192
+ config_data = yaml.safe_load(f) or {}
193
+
194
+ # Apply patch based on operation
195
+ operation = patch.get("operation")
196
+ path = patch.get("path", "").strip("/")
197
+ value = patch.get("value")
198
+
199
+ if operation == "add":
200
+ # For simple paths, add to root
201
+ if path:
202
+ path_parts = path.split("/")
203
+ current = config_data
204
+ for part in path_parts[:-1]:
205
+ if part not in current:
206
+ current[part] = {}
207
+ current = current[part]
208
+ current[path_parts[-1]] = value
209
+ else:
210
+ # Add to root level
211
+ if isinstance(value, dict):
212
+ config_data.update(value)
213
+ else:
214
+ config_data = value
215
+
216
+ elif operation == "replace":
217
+ if path:
218
+ path_parts = path.split("/")
219
+ current = config_data
220
+ for part in path_parts[:-1]:
221
+ current = current[part]
222
+ current[path_parts[-1]] = value
223
+ else:
224
+ config_data = value
225
+
226
+ # Write back
227
+ with open(file_to_patch, "w") as f:
228
+ yaml.dump(config_data, f, sort_keys=False, default_flow_style=False)
229
+
230
+ # For JSON files
231
+ elif file_to_patch.endswith(".json"):
232
+ import json
233
+
234
+ with open(file_to_patch, "r") as f:
235
+ config_data = json.load(f)
236
+
237
+ operation = patch.get("operation")
238
+ path = patch.get("path", "").strip("/")
239
+ value = patch.get("value")
240
+
241
+ if operation == "add":
242
+ if path:
243
+ path_parts = path.split("/")
244
+ current = config_data
245
+ for part in path_parts[:-1]:
246
+ if part not in current:
247
+ current[part] = {}
248
+ current = current[part]
249
+ current[path_parts[-1]] = value
250
+ else:
251
+ if isinstance(value, dict):
252
+ config_data.update(value)
253
+ else:
254
+ config_data = value
255
+
256
+ elif operation == "replace":
257
+ if path:
258
+ path_parts = path.split("/")
259
+ current = config_data
260
+ for part in path_parts[:-1]:
261
+ current = current[part]
262
+ current[path_parts[-1]] = value
263
+ else:
264
+ config_data = value
265
+
266
+ # Write back
267
+ with open(file_to_patch, "w") as f:
268
+ json.dump(config_data, f, indent=2)
269
+
270
+ # For text files (like requirements.txt)
271
+ elif file_to_patch.endswith(".txt"):
272
+ operation = patch.get("operation")
273
+ value = patch.get("value")
274
+
275
+ if operation == "add":
276
+ # Append to file
277
+ with open(file_to_patch, "a") as f:
278
+ f.write(f"\n{value}\n")
279
+ elif operation == "replace":
280
+ # Replace entire file
281
+ with open(file_to_patch, "w") as f:
282
+ f.write(str(value))
283
+ else:
284
+ # Design decision: Only support auto-patching for common dependency files
285
+ # Other file types should be manually edited to avoid data loss
286
+ # See docs/future-enhancements.md #4 for potential extensions
287
+ raise NotImplementedError(f"Patching not supported for file type: {file_to_patch}")
288
+
289
+ return backup_path
290
+
291
+
292
+ def manual_prompt_for_config(filename: str = "xenfra.yaml") -> str:
293
+ """
294
+ Prompt user interactively for configuration details and generate xenfra.yaml.
295
+
296
+ Args:
297
+ filename: Output filename (default: xenfra.yaml)
298
+
299
+ Returns:
300
+ Path to the generated file
301
+ """
302
+ config = {}
303
+
304
+ # Project name (default to directory name)
305
+ default_name = os.path.basename(os.getcwd())
306
+ config["name"] = Prompt.ask("Project name", default=default_name)
307
+
308
+ # Framework
309
+ framework = Prompt.ask(
310
+ "Framework", choices=["fastapi", "flask", "django", "other"], default="fastapi"
311
+ )
312
+ config["framework"] = framework
313
+
314
+ # Port
315
+ port = IntPrompt.ask("Application port", default=8000)
316
+ # Validate port
317
+ from .validation import validate_port
318
+
319
+ is_valid, error_msg = validate_port(port)
320
+ if not is_valid:
321
+ console.print(f"[bold red]Invalid port: {error_msg}[/bold red]")
322
+ raise click.Abort()
323
+ config["port"] = port
324
+
325
+ # Database
326
+ use_database = Confirm.ask("Does your app use a database?", default=False)
327
+ if use_database:
328
+ db_type = Prompt.ask(
329
+ "Database type",
330
+ choices=["postgresql", "mysql", "sqlite", "mongodb"],
331
+ default="postgresql",
332
+ )
333
+ config["database"] = {"type": db_type, "env_var": "DATABASE_URL"}
334
+
335
+ # Cache
336
+ use_cache = Confirm.ask("Does your app use caching?", default=False)
337
+ if use_cache:
338
+ cache_type = Prompt.ask("Cache type", choices=["redis", "memcached"], default="redis")
339
+ config["cache"] = {"type": cache_type, "env_var": f"{cache_type.upper()}_URL"}
340
+
341
+ # Instance size
342
+ instance_size = Prompt.ask(
343
+ "Instance size", choices=["basic", "standard", "premium"], default="basic"
344
+ )
345
+ config["instance_size"] = instance_size
346
+
347
+ # Environment variables
348
+ add_env = Confirm.ask("Add environment variables?", default=False)
349
+ if add_env:
350
+ env_vars = []
351
+ while True:
352
+ env_var = Prompt.ask("Environment variable name (blank to finish)", default="")
353
+ if not env_var:
354
+ break
355
+ env_vars.append(env_var)
356
+ if env_vars:
357
+ config["env_vars"] = env_vars
358
+
359
+ # Write to file
360
+ with open(filename, "w") as f:
361
+ yaml.dump(config, f, sort_keys=False, default_flow_style=False)
362
+
363
+ return filename