xenfra 0.3.7__py3-none-any.whl → 0.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,7 +14,7 @@ from xenfra_sdk.privacy import scrub_logs
14
14
 
15
15
  from ..utils.auth import API_BASE_URL, get_auth_token
16
16
  from ..utils.codebase import has_xenfra_config
17
- from ..utils.config import apply_patch
17
+ from ..utils.config import apply_patch, read_xenfra_yaml
18
18
  from ..utils.validation import (
19
19
  validate_branch_name,
20
20
  validate_deployment_id,
@@ -23,6 +23,12 @@ from ..utils.validation import (
23
23
  validate_project_name,
24
24
  )
25
25
 
26
+ import time
27
+ from datetime import datetime
28
+
29
+ from rich.live import Live
30
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn
31
+
26
32
  console = Console()
27
33
 
28
34
  # Maximum number of retry attempts for auto-healing
@@ -59,6 +65,187 @@ def show_patch_preview(patch_data: dict):
59
65
  console.print()
60
66
 
61
67
 
68
+ def _stream_deployment(client: XenfraClient, project_name: str, git_repo: str, branch: str, framework: str, region: str, size: str, is_dockerized: bool = True):
69
+ """
70
+ Creates deployment with real-time SSE streaming (no polling needed).
71
+
72
+ Returns tuple of (status, deployment_id, logs_buffer)
73
+ """
74
+ console.print(Panel(
75
+ f"[bold cyan]Project:[/bold cyan] {project_name}\n"
76
+ f"[bold cyan]Mode:[/bold cyan] Real-time Streaming Deployment",
77
+ title="[bold green]🚀 Deployment Starting[/bold green]",
78
+ border_style="green"
79
+ ))
80
+
81
+ deployment_id = None
82
+ logs_buffer = []
83
+ status_val = "PENDING"
84
+
85
+ try:
86
+ for event in client.deployments.create_stream(
87
+ project_name=project_name,
88
+ git_repo=git_repo,
89
+ branch=branch,
90
+ framework=framework,
91
+ region=region,
92
+ size_slug=size,
93
+ is_dockerized=is_dockerized,
94
+ ):
95
+ event_type = event.get("event", "message")
96
+ data = event.get("data", "")
97
+
98
+ if event_type == "deployment_created":
99
+ # Extract deployment ID
100
+ if isinstance(data, dict):
101
+ deployment_id = data.get("deployment_id")
102
+ console.print(f"[bold green]✓[/bold green] Deployment created: [cyan]{deployment_id}[/cyan]\n")
103
+
104
+ elif event_type == "log":
105
+ # Real-time log output
106
+ log_line = str(data)
107
+ logs_buffer.append(log_line)
108
+
109
+ # Colorize output
110
+ if any(x in log_line for x in ["ERROR", "FAILED", "✗"]):
111
+ console.print(f"[bold red]{log_line}[/bold red]")
112
+ elif any(x in log_line for x in ["WARN", "WARNING", "⚠"]):
113
+ console.print(f"[yellow]{log_line}[/yellow]")
114
+ elif any(x in log_line for x in ["SUCCESS", "COMPLETED", "✓", "passed!"]):
115
+ console.print(f"[bold green]{log_line}[/bold green]")
116
+ elif "PHASE" in log_line:
117
+ console.print(f"\n[bold blue]{log_line}[/bold blue]")
118
+ elif "[InfraEngine]" in log_line or "[INFO]" in log_line:
119
+ console.print(f"[cyan]›[/cyan] {log_line}")
120
+ else:
121
+ console.print(f"[dim]{log_line}[/dim]")
122
+
123
+ elif event_type == "error":
124
+ error_msg = str(data)
125
+ logs_buffer.append(f"ERROR: {error_msg}")
126
+ console.print(f"\n[bold red]❌ Error: {error_msg}[/bold red]")
127
+ status_val = "FAILED"
128
+
129
+ elif event_type == "deployment_complete":
130
+ # Final status
131
+ if isinstance(data, dict):
132
+ status_val = data.get("status", "UNKNOWN")
133
+ ip_address = data.get("ip_address")
134
+
135
+ console.print()
136
+ if status_val == "SUCCESS":
137
+ console.print("[bold green]✨ SUCCESS: Your application is live![/bold green]")
138
+ if ip_address and ip_address != "unknown":
139
+ console.print(f"[bold]Accessible at:[/bold] [link=http://{ip_address}]http://{ip_address}[/link]")
140
+ elif status_val == "FAILED":
141
+ console.print("[bold red]❌ DEPLOYMENT FAILED[/bold red]")
142
+ error = data.get("error")
143
+ if error:
144
+ console.print(f"[red]Error: {error}[/red]")
145
+ break
146
+
147
+ except Exception as e:
148
+ console.print(f"\n[bold red]❌ Streaming error: {e}[/bold red]")
149
+ status_val = "FAILED"
150
+ logs_buffer.append(f"Streaming error: {e}")
151
+
152
+ return (status_val, deployment_id, "\n".join(logs_buffer))
153
+
154
+
155
+ def _follow_deployment(client: XenfraClient, deployment_id: str):
156
+ """
157
+ Polls logs and status in real-time until completion with CI/CD style output.
158
+ (LEGACY - Used for backward compatibility)
159
+ """
160
+ console.print(Panel(
161
+ f"[bold cyan]Deployment ID:[/bold cyan] {deployment_id}\n"
162
+ f"[bold cyan]Mode:[/bold cyan] Streaming Real-time Infrastructure Logs",
163
+ title="[bold green]🚀 Deployment Monitor[/bold green]",
164
+ border_style="green"
165
+ ))
166
+
167
+ last_log_len = 0
168
+ status_val = "PENDING"
169
+
170
+ # Use a live display for the progress bar at the bottom
171
+ with Progress(
172
+ SpinnerColumn(),
173
+ TextColumn("[bold blue]{task.description}"),
174
+ BarColumn(bar_width=40),
175
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
176
+ console=console,
177
+ transient=False,
178
+ ) as progress:
179
+ task = progress.add_task("Waiting for server response...", total=100)
180
+
181
+ while status_val not in ["SUCCESS", "FAILED", "CANCELLED"]:
182
+ try:
183
+ # 1. Update Status
184
+ dep_status = client.deployments.get_status(deployment_id)
185
+ status_val = dep_status.get("status", "PENDING")
186
+ progress_val = dep_status.get("progress", 0)
187
+ state = dep_status.get("state", "preparing")
188
+
189
+ # Use a more descriptive description for the progress task
190
+ desc = f"Phase: {state}"
191
+ if status_val == "FAILED":
192
+ desc = "[bold red]FAILED[/bold red]"
193
+ elif status_val == "SUCCESS":
194
+ desc = "[bold green]SUCCESS[/bold green]"
195
+
196
+ progress.update(task, completed=progress_val, description=desc)
197
+
198
+ # 2. Update Logs
199
+ log_content = client.deployments.get_logs(deployment_id)
200
+ if log_content and len(log_content) > last_log_len:
201
+ new_logs = log_content[last_log_len:].strip()
202
+ for line in new_logs.split("\n"):
203
+ # Process and colorize lines
204
+ clean_line = line.strip()
205
+ if not clean_line:
206
+ continue
207
+
208
+ if any(x in clean_line for x in ["ERROR", "FAILED", "✗"]):
209
+ progress.console.print(f"[bold red]{clean_line}[/bold red]")
210
+ elif any(x in clean_line for x in ["WARN", "WARNING", "⚠"]):
211
+ progress.console.print(f"[yellow]{clean_line}[/yellow]")
212
+ elif any(x in clean_line for x in ["SUCCESS", "COMPLETED", "✓", "passed!"]):
213
+ progress.console.print(f"[bold green]{clean_line}[/bold green]")
214
+ elif "PHASE" in clean_line:
215
+ progress.console.print(f"\n[bold blue]{clean_line}[/bold blue]")
216
+ elif "[InfraEngine]" in clean_line:
217
+ progress.console.print(f"[dim]{clean_line}[/dim]")
218
+ else:
219
+ progress.console.print(f"[cyan]›[/cyan] {clean_line}")
220
+
221
+ last_log_len = len(log_content)
222
+
223
+ if status_val in ["SUCCESS", "FAILED", "CANCELLED"]:
224
+ break
225
+
226
+ time.sleep(1.5) # Slightly faster polling for better feel
227
+ except Exception as e:
228
+ # progress.console.print(f"[dim]Transient connection issue: {e}[/dim]")
229
+ time.sleep(3)
230
+ continue
231
+
232
+ console.print()
233
+ if status_val == "SUCCESS":
234
+ console.print("[bold green]✨ SUCCESS: Your application is live![/bold green]")
235
+ # Try to get the IP address
236
+ try:
237
+ final_status = client.deployments.get_status(deployment_id)
238
+ ip = final_status.get("ip_address")
239
+ if ip:
240
+ console.print(f"[bold]Accessible at:[/bold] [link=http://{ip}]http://{ip}[/link]")
241
+ except:
242
+ pass
243
+ elif status_val == "FAILED":
244
+ console.print("\n[bold red]❌ FAILURE DETECTED: Entering AI Diagnosis Mode...[/bold red]")
245
+
246
+ return status_val
247
+
248
+
62
249
  def zen_nod_workflow(logs: str, client: XenfraClient, attempt: int) -> bool:
63
250
  """
64
251
  Execute the Zen Nod auto-healing workflow.
@@ -74,6 +261,12 @@ def zen_nod_workflow(logs: str, client: XenfraClient, attempt: int) -> bool:
74
261
  console.print()
75
262
  console.print(f"[cyan]🤖 Analyzing failure (attempt {attempt}/{MAX_RETRY_ATTEMPTS})...[/cyan]")
76
263
 
264
+ # Slice logs to last 300 lines for focused diagnosis (Fix #26)
265
+ log_lines = logs.split("\n")
266
+ if len(log_lines) > 300:
267
+ logs = "\n".join(log_lines[-300:])
268
+ console.print("[dim]Note: Analyzing only the last 300 lines of logs for efficiency.[/dim]")
269
+
77
270
  # Scrub sensitive data from logs
78
271
  scrubbed_logs = scrub_logs(logs)
79
272
 
@@ -120,8 +313,10 @@ def zen_nod_workflow(logs: str, client: XenfraClient, attempt: int) -> bool:
120
313
  @click.option("--git-repo", help="Git repository URL (if deploying from git)")
121
314
  @click.option("--branch", default="main", help="Git branch (default: main)")
122
315
  @click.option("--framework", help="Framework override (fastapi, flask, django)")
316
+ @click.option("--region", help="DigitalOcean region override")
317
+ @click.option("--size", help="DigitalOcean size slug override")
123
318
  @click.option("--no-heal", is_flag=True, help="Disable auto-healing on failure")
124
- def deploy(project_name, git_repo, branch, framework, no_heal):
319
+ def deploy(project_name, git_repo, branch, framework, region, size, no_heal):
125
320
  """
126
321
  Deploy current project to DigitalOcean with auto-healing.
127
322
 
@@ -150,6 +345,35 @@ def deploy(project_name, git_repo, branch, framework, no_heal):
150
345
  console.print("[dim]Deployment cancelled.[/dim]")
151
346
  return
152
347
 
348
+ # Load configuration from xenfra.yaml if it exists
349
+ config = {}
350
+ if has_xenfra_config():
351
+ try:
352
+ config = read_xenfra_yaml()
353
+ except Exception as e:
354
+ console.print(f"[yellow]Warning: Could not read xenfra.yaml: {e}[/dim]")
355
+
356
+ # Resolve values with precedence: 1. CLI Flag, 2. xenfra.yaml, 3. Default
357
+ project_name = project_name or config.get("name") or os.path.basename(os.getcwd())
358
+ framework = framework or config.get("framework")
359
+ region = region or config.get("region") or "nyc3"
360
+
361
+ # Resolve size slug (complex mapping)
362
+ if not size:
363
+ if config.get("size"):
364
+ size = config.get("size")
365
+ else:
366
+ instance_size = config.get("instance_size", "basic")
367
+ resources = config.get("resources", {})
368
+ cpu = resources.get("cpu", 1)
369
+
370
+ if instance_size == "standard" or cpu >= 2:
371
+ size = "s-2vcpu-4gb"
372
+ elif instance_size == "premium" or cpu >= 4:
373
+ size = "s-4vcpu-8gb"
374
+ else:
375
+ size = "s-1vcpu-1gb"
376
+
153
377
  # Default project name to current directory
154
378
  if not project_name:
155
379
  project_name = os.path.basename(os.getcwd())
@@ -206,72 +430,60 @@ def deploy(project_name, git_repo, branch, framework, no_heal):
206
430
  else:
207
431
  console.print("[cyan]Creating deployment...[/cyan]")
208
432
 
209
- # Detect framework if not provided
433
+ # Detect framework if not provided (AI-powered Zen Mode)
210
434
  if not framework:
211
- console.print("[dim]Auto-detecting framework...[/dim]")
212
- framework = "fastapi" # Default for now
213
-
214
- # Create deployment
435
+ console.print("[cyan]🔍 AI Auto-detecting project type...[/cyan]")
436
+ try:
437
+ from ..utils.codebase import scan_codebase
438
+ code_snippets = scan_codebase()
439
+ if code_snippets:
440
+ analysis = client.intelligence.analyze_codebase(code_snippets)
441
+ framework = analysis.framework
442
+ is_dockerized = analysis.is_dockerized
443
+ # Override port and size if AI has strong recommendations
444
+ if not size and analysis.instance_size:
445
+ size = "s-1vcpu-1gb" if analysis.instance_size == "basic" else "s-2vcpu-4gb"
446
+
447
+ mode_str = "Docker" if is_dockerized else "Bare Metal"
448
+ console.print(f"[green]✓ Detected {framework.upper()} project ({mode_str} Mode)[/green] (Port: {analysis.port})")
449
+ else:
450
+ console.print("[yellow]⚠ No code files found for AI analysis. Defaulting to 'fastapi'[/yellow]")
451
+ framework = "fastapi"
452
+ is_dockerized = True
453
+ except Exception as e:
454
+ console.print(f"[yellow]⚠ AI detection failed: {e}. Defaulting to 'fastapi'[/yellow]")
455
+ framework = "fastapi"
456
+ is_dockerized = True
457
+
458
+ # Create deployment with real-time streaming
215
459
  try:
216
- from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
217
-
218
- with Progress(
219
- SpinnerColumn(),
220
- TextColumn("[bold blue]{task.description}"),
221
- BarColumn(),
222
- TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
223
- console=console,
224
- ) as progress:
225
- # Track deployment phases
226
- task = progress.add_task("Creating deployment...", total=100)
227
-
228
- deployment = client.deployments.create(
229
- project_name=project_name,
230
- git_repo=git_repo,
231
- branch=branch,
232
- framework=framework,
233
- )
234
-
235
- progress.update(task, advance=100, description="Deployment created!")
236
-
237
- deployment_id = deployment["deployment_id"]
238
- console.print(
239
- f"[bold green]✓[/bold green] Deployment created: [cyan]{deployment_id}[/cyan]"
460
+ status_result, deployment_id, logs_data = _stream_deployment(
461
+ client=client,
462
+ project_name=project_name,
463
+ git_repo=git_repo,
464
+ branch=branch,
465
+ framework=framework,
466
+ region=region,
467
+ size_slug=size,
468
+ is_dockerized=is_dockerized,
240
469
  )
241
470
 
242
- # Show deployment details
243
- details_table = Table(show_header=False, box=None)
244
- details_table.add_column("Property", style="cyan")
245
- details_table.add_column("Value", style="white")
471
+ if status_result == "FAILED" and not no_heal:
472
+ # Hand off to the Zen Nod AI Agent
473
+ should_retry = zen_nod_workflow(logs_data, client, attempt)
246
474
 
247
- details_table.add_row("Deployment ID", str(deployment_id))
248
- details_table.add_row("Project", project_name)
249
- if git_repo:
250
- details_table.add_row("Repository", git_repo)
251
- details_table.add_row("Branch", branch)
475
+ if should_retry:
476
+ # The agent applied a fix, loop back for attempt + 1
477
+ continue
478
+ else:
479
+ # Agent couldn't fix it or user declined
480
+ raise click.Abort()
481
+
482
+ # If we got here with success, break the retry loop
483
+ if status_result == "SUCCESS":
484
+ break
252
485
  else:
253
- details_table.add_row("Source", "Local directory")
254
- details_table.add_row("Framework", framework)
255
- details_table.add_row("Status", deployment.get("status", "PENDING"))
256
-
257
- panel = Panel(
258
- details_table,
259
- title="[bold green]Deployment Started[/bold green]",
260
- border_style="green",
261
- )
262
- console.print(panel)
263
-
264
- # Show next steps
265
- console.print("\n[bold]Next steps:[/bold]")
266
- console.print(f" • Monitor status: [cyan]xenfra status {deployment_id}[/cyan]")
267
- console.print(f" • View logs: [cyan]xenfra logs {deployment_id}[/cyan]")
268
- if not no_heal:
269
- console.print(
270
- f" • Diagnose issues: [cyan]xenfra diagnose {deployment_id}[/cyan]"
271
- )
272
-
273
- # Success - break out of retry loop
274
- break
486
+ raise click.Abort()
275
487
 
276
488
  except XenfraAPIError as e:
277
489
  # Deployment failed - try to provide helpful error
@@ -373,9 +585,8 @@ def logs(deployment_id, follow, tail):
373
585
  console.print(f"\n[bold]Logs for deployment {deployment_id}:[/bold]\n")
374
586
 
375
587
  if follow:
376
- console.print(
377
- "[yellow]Note: --follow flag not yet implemented (showing static logs)[/yellow]\n"
378
- )
588
+ _follow_deployment(client, deployment_id)
589
+ return
379
590
 
380
591
  # Display logs
381
592
  for line in log_lines:
@@ -427,9 +638,8 @@ def status(deployment_id, watch):
427
638
  deployment_status = client.deployments.get_status(deployment_id)
428
639
 
429
640
  if watch:
430
- console.print(
431
- "[yellow]Note: --watch flag not yet implemented (showing current status)[/yellow]\n"
432
- )
641
+ _follow_deployment(client, deployment_id)
642
+ return
433
643
 
434
644
  # Display status
435
645
  status_value = deployment_status.get("status", "UNKNOWN")
@@ -190,7 +190,16 @@ def init(manual, accept_all):
190
190
  table.add_row("Workers", ", ".join(analysis.workers))
191
191
  table.add_row("Package Manager", selected_package_manager)
192
192
  table.add_row("Dependency File", selected_dependency_file)
193
+
194
+ # New: Infrastructure details in summary
195
+ table.add_row("Region", "nyc3 (default)")
193
196
  table.add_row("Instance Size", analysis.instance_size)
197
+
198
+ # Resource visualization
199
+ cpu = 1 if analysis.instance_size == "basic" else (2 if analysis.instance_size == "standard" else 4)
200
+ ram = "1GB" if analysis.instance_size == "basic" else ("4GB" if analysis.instance_size == "standard" else "8GB")
201
+ table.add_row("Resources", f"{cpu} vCPU, {ram} RAM")
202
+
194
203
  table.add_row("Estimated Cost", f"${analysis.estimated_cost_monthly:.2f}/month")
195
204
  table.add_row("Confidence", f"{analysis.confidence:.0%}")
196
205
 
@@ -371,7 +380,16 @@ def analyze():
371
380
  table.add_row("Workers", ", ".join(analysis.workers))
372
381
  if analysis.env_vars:
373
382
  table.add_row("Environment Variables", ", ".join(analysis.env_vars))
383
+
384
+ # New: Infrastructure details in preview
385
+ table.add_row("Region", "nyc3 (default)")
374
386
  table.add_row("Instance Size", analysis.instance_size)
387
+
388
+ # Resource visualization
389
+ cpu = 1 if analysis.instance_size == "basic" else (2 if analysis.instance_size == "standard" else 4)
390
+ ram = "1GB" if analysis.instance_size == "basic" else ("4GB" if analysis.instance_size == "standard" else "8GB")
391
+ table.add_row("Resources", f"{cpu} vCPU, {ram} RAM")
392
+
375
393
  table.add_row("Estimated Cost", f"${analysis.estimated_cost_monthly:.2f}/month")
376
394
  table.add_row("Confidence", f"{analysis.confidence:.0%}")
377
395
 
xenfra/utils/config.py CHANGED
@@ -61,6 +61,7 @@ def generate_xenfra_yaml(analysis: CodebaseAnalysisResponse, filename: str = "xe
61
61
  config = {
62
62
  "name": os.path.basename(os.getcwd()),
63
63
  "framework": analysis.framework,
64
+ "region": "nyc3", # Default to NYC3
64
65
  "port": analysis.port,
65
66
  }
66
67
 
@@ -80,8 +81,20 @@ def generate_xenfra_yaml(analysis: CodebaseAnalysisResponse, filename: str = "xe
80
81
  if analysis.env_vars and len(analysis.env_vars) > 0:
81
82
  config["env_vars"] = analysis.env_vars
82
83
 
83
- # Add instance size
84
+ # Infrastructure configuration
84
85
  config["instance_size"] = analysis.instance_size
86
+ config["resources"] = {
87
+ "cpu": 1,
88
+ "ram": "1GB"
89
+ }
90
+
91
+ # Map resources based on detected size for better defaults
92
+ if analysis.instance_size == "standard":
93
+ config["resources"]["cpu"] = 2
94
+ config["resources"]["ram"] = "4GB"
95
+ elif analysis.instance_size == "premium":
96
+ config["resources"]["cpu"] = 4
97
+ config["resources"]["ram"] = "8GB"
85
98
 
86
99
  # Add package manager info (for intelligent diagnosis)
87
100
  if analysis.package_manager:
@@ -380,12 +393,21 @@ def manual_prompt_for_config(filename: str = "xenfra.yaml") -> str:
380
393
  cache_type = Prompt.ask("Cache type", choices=["redis", "memcached"], default="redis")
381
394
  config["cache"] = {"type": cache_type, "env_var": f"{cache_type.upper()}_URL"}
382
395
 
396
+ # Region
397
+ config["region"] = Prompt.ask("Region", choices=["nyc3", "sfo3", "ams3", "fra1", "lon1"], default="nyc3")
398
+
383
399
  # Instance size
384
400
  instance_size = Prompt.ask(
385
401
  "Instance size", choices=["basic", "standard", "premium"], default="basic"
386
402
  )
387
403
  config["instance_size"] = instance_size
388
404
 
405
+ # Resources (CPU/RAM)
406
+ config["resources"] = {
407
+ "cpu": IntPrompt.ask("CPU (vCPUs)", default=1 if instance_size == "basic" else 2),
408
+ "ram": Prompt.ask("RAM (e.g., 1GB, 4GB)", default="1GB" if instance_size == "basic" else "4GB"),
409
+ }
410
+
389
411
  # Environment variables
390
412
  add_env = Confirm.ask("Add environment variables?", default=False)
391
413
  if add_env:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: xenfra
3
- Version: 0.3.7
3
+ Version: 0.3.9
4
4
  Summary: A 'Zen Mode' infrastructure engine for Python developers.
5
5
  Author: xenfra-cloud
6
6
  Author-email: xenfra-cloud <xenfracloud@gmail.com>
@@ -2,19 +2,19 @@ xenfra/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  xenfra/commands/__init__.py,sha256=kTTwVnTvoxikyPUhQiyTAbnw4PYafktuE1----TqQoA,43
3
3
  xenfra/commands/auth.py,sha256=ecReVCGl7Ys2d77mv_e4mCbs4ug6FLIb3S9dl2FUhr4,4178
4
4
  xenfra/commands/auth_device.py,sha256=caD2UdveEZtAFjgjmnA-l5bjbbPONFjXJXgeJN7mhbk,6710
5
- xenfra/commands/deployments.py,sha256=bVpQ9kuFDT5a9M7ADwXZzXIXYyPS92saZEuYmiI_lU8,30016
6
- xenfra/commands/intelligence.py,sha256=jDQa2Bx4blTFoqtyutf1xsf6fHjIjRYEFW5G3mrz-Ks,16594
5
+ xenfra/commands/deployments.py,sha256=SE6SeKl-waoMxO1kf16jLCMw-mQAnHn_w05iin2uCZw,39425
6
+ xenfra/commands/intelligence.py,sha256=_H0t9OJwPbd9E0r1tcMACrt6-UBPrrTII8M47kC_iHA,17496
7
7
  xenfra/commands/projects.py,sha256=SAxF_pOr95K6uz35U-zENptKndKxJNZn6bcD45PHcpI,6696
8
8
  xenfra/commands/security_cmd.py,sha256=EI5sjX2lcMxgMH-LCFmPVkc9YqadOrcoSgTiKknkVRY,7327
9
9
  xenfra/main.py,sha256=2EPPuIdxjhW-I-e-Mc0i2ayeLaSJdmzddNThkXq7B7c,2033
10
10
  xenfra/utils/__init__.py,sha256=4ZRYkrb--vzoXjBHG8zRxz2jCXNGtAoKNtkyu2WRI2A,45
11
11
  xenfra/utils/auth.py,sha256=9JbFnv0-rdlJF-4hKD2uWd9h9ehqC1iIHee1O5e-3RM,13769
12
12
  xenfra/utils/codebase.py,sha256=GMrqhOJWX8q5ZXSLI9P3hJZBpufXMQA3Z4fKh2XSTNo,5949
13
- xenfra/utils/config.py,sha256=BylyzHLkL6rmvNNW9zxCaSvk1dV0yzJQPaW-dP7E5j0,13931
13
+ xenfra/utils/config.py,sha256=K2k7hxz94dzbxvCw_PDXtq4o1VlmJMTFktlL-F2g5rY,14786
14
14
  xenfra/utils/errors.py,sha256=6G91YzzDDNkKHANTgfAMiOiMElEyi57wo6-FzRa4VuQ,4211
15
15
  xenfra/utils/security.py,sha256=EA8CIPLt8Y-QP5uZ7c5NuC6ZLRV1aZS8NapS9ix_vok,11479
16
16
  xenfra/utils/validation.py,sha256=cvuL_AEFJ2oCoP0abCqoOIABOwz79Gkf-jh_dcFIQlM,6912
17
- xenfra-0.3.7.dist-info/WHEEL,sha256=KSLUh82mDPEPk0Bx0ScXlWL64bc8KmzIPNcpQZFV-6E,79
18
- xenfra-0.3.7.dist-info/entry_points.txt,sha256=a_2cGhYK__X6eW05Ba8uB6RIM_61c2sHtXsPY8N0mic,45
19
- xenfra-0.3.7.dist-info/METADATA,sha256=P2qLV0wwwrioPQtkXda94RA7FUOBSmKmzOooQCNiD8Q,3898
20
- xenfra-0.3.7.dist-info/RECORD,,
17
+ xenfra-0.3.9.dist-info/WHEEL,sha256=KSLUh82mDPEPk0Bx0ScXlWL64bc8KmzIPNcpQZFV-6E,79
18
+ xenfra-0.3.9.dist-info/entry_points.txt,sha256=a_2cGhYK__X6eW05Ba8uB6RIM_61c2sHtXsPY8N0mic,45
19
+ xenfra-0.3.9.dist-info/METADATA,sha256=WpBPXhZW_cnDA1_rTvKpSMVFCY73yZ6gq3C7EOlfW2A,3898
20
+ xenfra-0.3.9.dist-info/RECORD,,
File without changes