xenfra-sdk 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,110 @@
1
+ """
2
+ Xenfra Discovery Engine - Recursive Scanner and Infrastructure Inferrer.
3
+ Follows XENFRA_PROTOCOL.md Stage 2 (The Harness/Discovery).
4
+ """
5
+
6
+ import os
7
+ from pathlib import Path
8
+ from typing import List, Dict, Any, Optional
9
+ import re
10
+
11
+ from xenfra_sdk.manifest import ServiceDefinition, InfrastructureService, XenfraConfig
12
+ from xenfra_sdk.constants import DEFAULT_PORT_RANGE_START
13
+ from xenfra_sdk.railpack_detector import get_railpack_detector
14
+
15
+
16
+ class RecursiveScanner:
17
+ """
18
+ Scans a directory tree to find microservices and infrastructure dependencies.
19
+ Uses RailpackDetector for framework identification.
20
+ """
21
+
22
+ def __init__(self, root_path: str):
23
+ self.root_path = Path(root_path).resolve()
24
+ self.services: List[ServiceDefinition] = []
25
+ self.infrastructure: List[InfrastructureService] = []
26
+ self._port_counter = DEFAULT_PORT_RANGE_START
27
+ self.detector = get_railpack_detector()
28
+
29
+ def scan(self) -> XenfraConfig:
30
+ """
31
+ Recursively scans the root path for projects.
32
+ """
33
+ for root, dirs, files in os.walk(self.root_path):
34
+ # Skip hidden directories and virtual envs
35
+ dirs[:] = [d for d in dirs if not d.startswith(".") and d not in ["venv", "node_modules", "__pycache__"]]
36
+
37
+ root_p = Path(root)
38
+
39
+ # Create manifest for Railpack detection
40
+ manifest = []
41
+ for f in files:
42
+ f_path = root_p / f
43
+ # Only read content of relevant files to speed up scanning
44
+ content = ""
45
+ if f in ["package.json", "requirements.txt", "pyproject.toml", "go.mod", "Cargo.toml", "Gemfile", "composer.json", "pom.xml", "build.gradle"]:
46
+ content = f_path.read_text(errors="ignore")
47
+
48
+ manifest.append({
49
+ "path": f,
50
+ "content": content
51
+ })
52
+
53
+ # 1. Detect Framework via Railpack Logic
54
+ result = self.detector.detect_from_manifest(manifest)
55
+
56
+ if result.framework != "unknown":
57
+ # Sanitize Name
58
+ rel_path = root_p.relative_to(self.root_path)
59
+ name = rel_path.name.lower().replace("_", "-").replace(" ", "-")
60
+ if name == "." or not name:
61
+ name = self.root_path.name.lower().replace("_", "-")
62
+
63
+ # Assign Port
64
+ port = self._port_counter
65
+ self._port_counter += 1
66
+
67
+ # Register Service
68
+ service = ServiceDefinition(
69
+ name=name,
70
+ path=str(rel_path),
71
+ port=port,
72
+ framework=result.framework,
73
+ # Railpack gives us extra details we can optionally use
74
+ )
75
+ self.services.append(service)
76
+
77
+ # 2. Infer Infrastructure (Post-Process)
78
+ # Railpack doesn't explicitly output "needs redis", so we keep our robust heuristic
79
+ infra_deps = set()
80
+
81
+ # Check known files for db drivers
82
+ for item in manifest:
83
+ content = item.get("content", "").lower()
84
+ if not content: continue
85
+
86
+ if "redis" in content: infra_deps.add("redis")
87
+ if "kafka" in content or "confluent-kafka" in content: infra_deps.add("kafka")
88
+
89
+ # Postgres
90
+ if any(x in content for x in ["psycopg2", "asyncpg", "pg", "postgres"]):
91
+ infra_deps.add("postgres")
92
+
93
+ # MongoDB
94
+ if any(x in content for x in ["pymongo", "mongoose", "mongodb"]):
95
+ infra_deps.add("mongodb")
96
+
97
+ # Add infrastructure
98
+ for inf in infra_deps:
99
+ if not any(i.type == inf for i in self.infrastructure):
100
+ self.infrastructure.append(InfrastructureService(
101
+ name=f"shared-{inf}",
102
+ type=inf,
103
+ port=6379 if inf == "redis" else 5432 if inf == "postgres" else 27017
104
+ ))
105
+
106
+ return XenfraConfig(
107
+ name=self.root_path.name.lower().replace("_", "-"),
108
+ services=self.services,
109
+ infrastructure=self.infrastructure
110
+ )
xenfra_sdk/engine.py CHANGED
@@ -18,6 +18,7 @@ from sqlmodel import Session, select
18
18
 
19
19
  # Xenfra modules
20
20
  from . import dockerizer, privacy, constants
21
+ from xenfra_sdk.patching import PatchManager
21
22
  from .db.models import Project
22
23
  from .db.session import get_session
23
24
  from .events import EventEmitter, DeploymentPhase, EventStatus
@@ -25,7 +26,7 @@ from .exceptions import DeploymentError
25
26
  from .governance import get_polling_interval, get_resource_limits
26
27
  from .models.context import DeploymentContext
27
28
  from .blueprints.factory import render_blueprint
28
- # from .devbox import DevboxHarness # Removed
29
+ from .client import XenfraClient
29
30
 
30
31
  class InfraEngine:
31
32
  """
@@ -58,6 +59,8 @@ class InfraEngine:
58
59
  logger=self.context.get("logger"),
59
60
  event_callback=self.context.get("event_callback")
60
61
  )
62
+ # Initialize internal client for API access (Intelligence/Sandbox)
63
+ self.client = XenfraClient(token=self.token)
61
64
 
62
65
  def _get_connection(self, ip_address: str):
63
66
  """Establishes a Fabric connection to the server."""
@@ -492,6 +495,7 @@ class InfraEngine:
492
495
  droplet = None
493
496
  session = db_session or self.db_session
494
497
  framework = kwargs.get("framework")
498
+ mode = kwargs.get("mode", "monolithic")
495
499
  tier = kwargs.get("tier", "FREE") # Default to FREE tier
496
500
 
497
501
  # ZEN GAP FIX: Resource Governance - Set tier-based polling interval
@@ -597,7 +601,7 @@ class InfraEngine:
597
601
 
598
602
  # === 0b. MICROSERVICES DELEGATION ===
599
603
  # If services are provided but no pre-generated assets, delegate to Orchestrator
600
- if services and not (multi_service_compose or multi_service_caddy):
604
+ if services and mode != "monolithic" and not (multi_service_compose or multi_service_caddy):
601
605
  logger("\n[bold magenta]MICROSERVICES DETECTED - Delegating to ServiceOrchestrator[/bold magenta]")
602
606
  from .orchestrator import ServiceOrchestrator, load_services_from_xenfra_yaml
603
607
  from .manifest import create_services_from_detected
@@ -738,6 +742,7 @@ class InfraEngine:
738
742
  file_manifest = enhanced_manifest
739
743
 
740
744
 
745
+ # Protocol Compliance: Build Type-Safe DeploymentContext
741
746
  # Protocol Compliance: Build Type-Safe DeploymentContext
742
747
  ctx = DeploymentContext(
743
748
  project_name=name,
@@ -859,20 +864,104 @@ class InfraEngine:
859
864
  }
860
865
  }
861
866
 
862
- # === ZEN MODE: PRE-MITOSIS (E2B GATE) ===
863
- # Replaced by Secure Ralph Loop (server-side Firecracker verification)
864
- # Enforce "No Compromise" - Verify before Deploy
865
- if verify_local and not dry_run:
866
- logger("\n[bold yellow]🛡️ E2B GATE: Verifying build in Cloud Sandbox...[/bold yellow]")
867
- # Call Intelligence Service to verify
868
- try:
869
- # This presumes we have access to the intelligence client
870
- # For now, we simulate the "No Compromise" check or call via HTTP
871
- # In a real run, we would POST to /intelligence/verify with the assets
872
- pass
873
- logger(" - [Verified] E2B Sandbox check passed.")
874
- except Exception as e:
875
- raise DeploymentError(f"E2B Verification Failed: {e}", stage="Pre-Mitosis")
867
+ # === ZEN MODE: PRE-MITOSIS (THE RALPH LOOP) ===
868
+ # Replaces the static E2B gate with an autonomous repair loop
869
+ if not dry_run:
870
+ max_retries = 2
871
+ attempt = 0
872
+
873
+ while attempt <= max_retries:
874
+ attempt += 1
875
+ logger(f"\n[bold yellow]🛡️ E2B GATE (Attempt {attempt}/{max_retries + 1}): Verifying build...[/bold yellow]")
876
+
877
+ try:
878
+ # 1. Verify
879
+ result = self.client.intelligence.verify(
880
+ logs="PRE-DEPLOYMENT AUDIT",
881
+ code_snippets=[{"path": k, "content": v} for k, v in ctx_dict.get("file_manifest", [])]
882
+ )
883
+
884
+ if result.get("status") == "failed":
885
+ raise Exception(f"Sandbox verification failed: {result.get('error')}")
886
+
887
+ logger(" - [Verified] E2B Sandbox check passed.")
888
+ break # Success! Exit loop.
889
+
890
+ except Exception as e:
891
+ if attempt > max_retries:
892
+ raise DeploymentError(f"E2B Verification Failed after {attempt} attempts: {e}", stage="Pre-Mitosis")
893
+
894
+ logger(f" - [Diagnostics] Verification failed. Asking Ralph for a fix...")
895
+
896
+ # 2. Diagnose & Fix
897
+ try:
898
+ # Diagnose using the error logs/message
899
+ diagnosis = self.client.intelligence.diagnose(
900
+ logs=str(e),
901
+ services=services
902
+ )
903
+
904
+ # Check if diagnosis suggested a patch
905
+ # Note: DiagnosisResponse model structure assumed based on intelligence.py
906
+ if hasattr(diagnosis, "patch") and diagnosis.patch and diagnosis.patch.target_file:
907
+ logger(f" - [Ralph] Found fix: {diagnosis.suggestion}")
908
+ logger(f" - [Patching] Applying fix to {diagnosis.patch.target_file}...")
909
+
910
+ # Apply patch to local temp clone or CWD (if CLI mode)
911
+ base_dir = Path(temp_repo_path) if temp_repo_path else Path(os.getcwd())
912
+
913
+ # Use PatchManager
914
+ patcher = PatchManager(base_dir)
915
+ filename = getattr(diagnosis.patch, "target_file", None) or diagnosis.patch.file
916
+
917
+ if not filename:
918
+ logger(" [red]Patch missing filename. Skipping.[/red]")
919
+ continue
920
+
921
+ # Extract params from Pydantic or Dict
922
+ operation = getattr(diagnosis.patch, "operation", "append")
923
+ search_content = getattr(diagnosis.patch, "search_content", None)
924
+ replacement_content = getattr(diagnosis.patch, "content", None) or diagnosis.patch.value
925
+ json_path = getattr(diagnosis.patch, "path", None)
926
+
927
+ if not replacement_content and operation != "create":
928
+ # create might imply empty file? Usually needs content.
929
+ logger(" [red]Patch missing content. Skipping.[/red]")
930
+ continue
931
+
932
+ # Apply
933
+ p_result = patcher.apply_patch(
934
+ filename=filename,
935
+ operation=operation,
936
+ content=replacement_content,
937
+ search_content=search_content,
938
+ value=replacement_content, # Pass as value too for json/yaml
939
+ path=json_path
940
+ )
941
+
942
+ if p_result.success:
943
+ logger(f" [green]- [Ralph] {p_result.message}[/green]")
944
+ else:
945
+ logger(f" [red]- [Patch Failed] {p_result.message}[/red]")
946
+
947
+
948
+ # Refresh context manifest for the next verification attempt
949
+ for f_entry in ctx_dict.get("file_manifest", []):
950
+ if f_entry["path"] == diagnosis.patch.target_file:
951
+ with open(target_path, "r", encoding="utf-8") as f:
952
+ f_entry["content"] = f.read()
953
+
954
+ # Re-render assets with new content
955
+ logger(" - [Regenerating] Re-rendering deployment assets...")
956
+ rendered_assets = render_blueprint(ctx_dict)
957
+
958
+ else:
959
+ logger(" - [Ralph] No auto-fix available.")
960
+ raise e
961
+
962
+ except Exception as diagnosis_error:
963
+ logger(f" - [Ralph] Auto-fix failed: {diagnosis_error}")
964
+ raise e
876
965
 
877
966
  # === 4. DROPLET CREATION STAGE ===
878
967
  self.emitter.start_phase(DeploymentPhase.CELL_BIRTH, "Submitting DNA to provider (Creating Droplet)")
@@ -144,9 +144,10 @@ class PatchObject(BaseModel):
144
144
  file: str | None = Field(
145
145
  None, description="The name of the file to be patched (e.g., 'requirements.txt')"
146
146
  )
147
- operation: str | None = Field(None, description="The patch operation (e.g., 'add', 'replace')")
147
+ operation: str | None = Field(None, description="The patch operation (e.g., 'add', 'replace', 'replace_block')")
148
148
  path: str | None = Field(None, description="A JSON-like path to the field to be changed")
149
149
  value: str | None = Field(None, description="The new value to apply")
150
+ search_content: str | None = Field(None, description="Exact string to search for and replace")
150
151
 
151
152
 
152
153
  class DiagnosisResponse(BaseModel):
xenfra_sdk/patching.py ADDED
@@ -0,0 +1,230 @@
1
+
2
+ import json
3
+ import logging
4
+ import os
5
+ import re
6
+ from pathlib import Path
7
+ from typing import Any, Dict, Optional, Union
8
+ import yaml
9
+
10
+ # Try importing tomllib (Python 3.11+)
11
+ try:
12
+ import tomllib
13
+ except ImportError:
14
+ # Fallback/Mock for older python if needed, but project is >=3.13
15
+ tomllib = None
16
+
17
+ from pydantic import BaseModel
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+ class PatchResult(BaseModel):
22
+ success: bool
23
+ message: str
24
+ diff: Optional[str] = None
25
+
26
+ class PatchManager:
27
+ """
28
+ Handles safe, structured patching of files based on their type.
29
+ Prevents corruption of JSON/YAML/TOML by using parsers instead of text appending.
30
+ """
31
+
32
+ def __init__(self, base_dir: Path):
33
+ self.base_dir = base_dir.resolve()
34
+
35
+ def apply_patch(self,
36
+ filename: str,
37
+ operation: str,
38
+ content: Optional[str] = None,
39
+ search_content: Optional[str] = None,
40
+ value: Optional[str] = None,
41
+ path: Optional[str] = None) -> PatchResult:
42
+ """
43
+ Dispatches the patch to the correct strategy based on file extension.
44
+ """
45
+ target_path = self.base_dir / filename
46
+
47
+ # Security check: prevent directory traversal
48
+ try:
49
+ target_path.resolve().relative_to(self.base_dir)
50
+ except ValueError:
51
+ return PatchResult(success=False, message=f"Access denied: {filename} is outside project root.")
52
+
53
+ if not target_path.exists() and operation != "create":
54
+ # If operation is overwrite, we might allow creating
55
+ if operation == "overwrite":
56
+ pass
57
+ else:
58
+ return PatchResult(success=False, message=f"File not found: {filename}")
59
+
60
+ # Determine Strategy
61
+ ext = target_path.suffix.lower()
62
+
63
+ try:
64
+ if ext == ".json":
65
+ return self._patch_json(target_path, operation, path, value or content)
66
+ elif ext in [".yaml", ".yml"]:
67
+ return self._patch_yaml(target_path, operation, path, value or content)
68
+ elif ext == ".toml":
69
+ # TOML Strategy: We lack a writer, so we use Smart Block Replacement but Validate syntax
70
+ return self._patch_toml_smart(target_path, operation, content, search_content)
71
+ else:
72
+ return self._patch_text_smart(target_path, operation, content, search_content)
73
+ except Exception as e:
74
+ return PatchResult(success=False, message=f"Patching failed: {str(e)}")
75
+
76
+ def _patch_json(self, file_path: Path, operation: str, json_path: Optional[str], value: Any) -> PatchResult:
77
+ """Safe JSON Manipulation."""
78
+ try:
79
+ # Load
80
+ if file_path.exists():
81
+ text = file_path.read_text(encoding="utf-8")
82
+ data = json.loads(text)
83
+ else:
84
+ data = {}
85
+
86
+ # Parse input value if it looks like JSON/Dict
87
+ new_val = value
88
+ if isinstance(value, str):
89
+ try:
90
+ new_val = json.loads(value)
91
+ except json.JSONDecodeError:
92
+ pass # Keep as string
93
+
94
+ # Apply
95
+ if operation == "merge_key" and json_path:
96
+ # Simple logic for top-level keys for now (e.g. "dependencies")
97
+ # A full JSONPath impl would be complex.
98
+ # Supporting "dependencies.requests" -> nested set
99
+ keys = json_path.split(".")
100
+ current = data
101
+ for k in keys[:-1]:
102
+ current = current.setdefault(k, {})
103
+
104
+ # Merge logic
105
+ last_key = keys[-1]
106
+ if isinstance(current.get(last_key), dict) and isinstance(new_val, dict):
107
+ current[last_key].update(new_val)
108
+ else:
109
+ current[last_key] = new_val
110
+
111
+ elif operation == "overwrite":
112
+ data = new_val
113
+
114
+ else:
115
+ # Default for JSON: "Add/Update root key" if path missing?
116
+ # Or if operation="replace_block", we can't do that easily in JSON.
117
+ # Fallback to Text strategy if operation is unknown for JSON?
118
+ pass
119
+
120
+ # Dump
121
+ with open(file_path, "w", encoding="utf-8") as f:
122
+ json.dump(data, f, indent=2)
123
+
124
+ return PatchResult(success=True, message=f"Updated JSON {file_path.name}")
125
+
126
+ except json.JSONDecodeError:
127
+ return PatchResult(success=False, message=f"Invalid JSON in file {file_path.name}")
128
+
129
+ def _patch_yaml(self, file_path: Path, operation: str, yaml_path: Optional[str], value: Any) -> PatchResult:
130
+ """Safe YAML Manipulation."""
131
+ try:
132
+ if file_path.exists():
133
+ data = yaml.safe_load(file_path.read_text(encoding="utf-8")) or {}
134
+ else:
135
+ data = {}
136
+
137
+ # Parse value
138
+ new_val = value
139
+ if isinstance(value, str):
140
+ try:
141
+ new_val = yaml.safe_load(value)
142
+ except:
143
+ pass
144
+
145
+ if operation == "merge_key" and yaml_path:
146
+ keys = yaml_path.split(".")
147
+ current = data
148
+ for k in keys[:-1]:
149
+ current = current.setdefault(k, {})
150
+
151
+ last_key = keys[-1]
152
+ if isinstance(current.get(last_key), dict) and isinstance(new_val, dict):
153
+ current[last_key].update(new_val)
154
+ else:
155
+ current[last_key] = new_val
156
+
157
+ elif operation == "overwrite":
158
+ data = new_val
159
+
160
+ with open(file_path, "w", encoding="utf-8") as f:
161
+ yaml.dump(data, f, default_flow_style=False)
162
+
163
+ return PatchResult(success=True, message=f"Updated YAML {file_path.name}")
164
+
165
+ except yaml.YAMLError as e:
166
+ return PatchResult(success=False, message=f"YAML Error: {e}")
167
+
168
+ def _patch_toml_smart(self, file_path: Path, operation: str, content: str, search_content: Optional[str]) -> PatchResult:
169
+ """
170
+ Smart Text Patching for TOML, but validates the result is valid TOML.
171
+ """
172
+ # 1. Apply Text Patch
173
+ res = self._patch_text_smart(file_path, operation, content, search_content)
174
+ if not res.success:
175
+ return res
176
+
177
+ # 2. Verify Syntax (Safety Check)
178
+ if tomllib:
179
+ try:
180
+ new_text = file_path.read_text(encoding="utf-8")
181
+ tomllib.loads(new_text)
182
+ except tomllib.TOMLDecodeError as e:
183
+ # Revert!
184
+ # Note: A real robust implementation needs a backup step.
185
+ # For now, we warn.
186
+ return PatchResult(success=False, message=f"Patch applied but resulted in INVALID TOML: {e}. (Recommend Revert)")
187
+ return res
188
+
189
+ def _patch_text_smart(self, file_path: Path, operation: str, content: str, search_content: Optional[str]) -> PatchResult:
190
+ """
191
+ Robust Text/Code Patching.
192
+ Supports:
193
+ - overwrite: Replaces entire file.
194
+ - replace_block: Replaces exact block (safe).
195
+ - replace_fuzzy: (TODO) Normalized whitespace replacement.
196
+ - append_safe: Appends to end (legacy).
197
+ """
198
+ if not content:
199
+ return PatchResult(success=False, message="No content provided for patch.")
200
+
201
+ current_text = ""
202
+ if file_path.exists():
203
+ current_text = file_path.read_text(encoding="utf-8")
204
+
205
+ if operation == "overwrite":
206
+ file_path.write_text(content, encoding="utf-8")
207
+ return PatchResult(success=True, message="File overwritten")
208
+
209
+ elif operation == "replace_block":
210
+ if not search_content:
211
+ return PatchResult(success=False, message="Missing 'search_content' for block replacement.")
212
+
213
+ if search_content in current_text:
214
+ new_text = current_text.replace(search_content, content)
215
+ file_path.write_text(new_text, encoding="utf-8")
216
+ return PatchResult(success=True, message="Block replaced successfully")
217
+ else:
218
+ # TODO: Try Fuzzy Match here?
219
+ return PatchResult(success=False, message="Search block not found exactly in file.")
220
+
221
+ elif operation == "append":
222
+ # Safe Append check: Ensure newline
223
+ with open(file_path, "a", encoding="utf-8") as f:
224
+ if current_text and not current_text.endswith("\n"):
225
+ f.write("\n")
226
+ f.write(content)
227
+ return PatchResult(success=True, message="Content appended")
228
+
229
+ else:
230
+ return PatchResult(success=False, message=f"Unknown operation: {operation}")
@@ -0,0 +1,7 @@
1
+ from .base import BaseManager
2
+
3
+ class SandboxManager(BaseManager):
4
+ """
5
+ Manager for E2B Sandbox operations.
6
+ """
7
+ pass
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: xenfra-sdk
3
- Version: 0.2.6
3
+ Version: 0.2.8
4
4
  Summary: Xenfra SDK: Core engine and utilities for the Xenfra platform.
5
5
  Author: xenfra-cloud
6
- Author-email: xenfra-cloud <xenfracloud@gmail.com>
6
+ Author-email: xenfra-cloud <support@xenfra.com>
7
7
  Classifier: Programming Language :: Python :: 3
8
8
  Classifier: License :: OSI Approved :: MIT License
9
9
  Classifier: Operating System :: OS Independent
@@ -16,16 +16,18 @@ xenfra_sdk/db/models.py,sha256=nbnlNiFShUo9uxrcVLHUezlNtF_NATS85pPYTKBriIE,714
16
16
  xenfra_sdk/db/session.py,sha256=Cryt02u01Lq_olwOji3X9a5pI7lE5p0O0mKX3na9h6M,879
17
17
  xenfra_sdk/dependencies.py,sha256=WHGfIrEYkss5yuBd_uOFrB6lPBWM2X0mEuG26ILAjXI,1211
18
18
  xenfra_sdk/detection.py,sha256=7DDfwOAfmZYF5tzJAcbUsME8alfsqS0HQgrI4eTfwTA,18913
19
+ xenfra_sdk/discovery.py,sha256=42JawKgOUIY9Al4znpf5n4dL-rXKhAnhOdM3L8yT0VM,4565
19
20
  xenfra_sdk/dockerizer.py,sha256=iSn-HZ2-07zgM-C2aQkJH-MBtPrkEimurKLY9hth34Y,5191
20
- xenfra_sdk/engine.py,sha256=nmrjFKaYsouHvfFThYo4gXTuoCr0Xr2zgFy35XcfIKo,67310
21
+ xenfra_sdk/engine.py,sha256=BueUHy_ofIEERiXTGADXRaWHSfu0XSM_hY1zVHiBrKM,72615
21
22
  xenfra_sdk/events.py,sha256=7BgtTc4a-bw6ybicQblQ-Y0CGbUpgR_Cx4YNaJi_SsU,8013
22
23
  xenfra_sdk/exceptions.py,sha256=rNUpuk6NXsigqBUNMqzit48DjvZev5Try87zVT8ethE,753
23
24
  xenfra_sdk/governance.py,sha256=g9bN174HeoA13Cv-8o6hoVK_dRcrnHOBVRdKkZE9pWY,3784
24
25
  xenfra_sdk/manifest.py,sha256=ymAvih83VzKcrMxIpNGnffg0TtxCowiPUgvYx4qks6E,6210
25
26
  xenfra_sdk/mcp_client.py,sha256=QTKu-gkm0oeYOyCyXweZgxEXA3DvBLwQxEaNMevBBzw,5869
26
- xenfra_sdk/models/__init__.py,sha256=Ek5AT--i6oIKUjQJJGNQiLLwkLieMm4w9do4h7XwoGc,8174
27
+ xenfra_sdk/models/__init__.py,sha256=5VGfCk25wNigzgwztluT5I_39acWHesaWVm4Y38IQdg,8290
27
28
  xenfra_sdk/models/context.py,sha256=ByQofUAKqDRqJ7HZoiVLXnfuw6X0cWhsuMlUrV9Bix0,2341
28
29
  xenfra_sdk/orchestrator.py,sha256=2vd1YCQX6pVsa1wtdhRD4Xo4Nscmxt2_1_sjKw39CGw,34006
30
+ xenfra_sdk/patching.py,sha256=Yb-j9Ua_s_3-uoAbYfAaR9xw-FH_lZwDqhLfxhpEC6M,9005
29
31
  xenfra_sdk/patterns.json,sha256=xHxbc0ogHDwysMczi30_hW1Ylfdsf-nsQdAom7RZ4KI,446
30
32
  xenfra_sdk/privacy.py,sha256=YpV8roCX2C2oLY_6TMXSDISuzNb192RuUPriwORKzNQ,5800
31
33
  xenfra_sdk/protocol.py,sha256=nVamJTzEAKk71v-MUR_5yvkxMkMzR5Crmp9ecbqLVhI,1210
@@ -41,9 +43,10 @@ xenfra_sdk/resources/deployments.py,sha256=VjTUn0TZyGUEvEsgy7TUa4T4VZH_IPr7IV4-s
41
43
  xenfra_sdk/resources/files.py,sha256=opdxZt6GWMU-qu5ltfWfv2hm8TscdaK-r9o4k743Irs,3087
42
44
  xenfra_sdk/resources/intelligence.py,sha256=oD8DxyLxivvTrD-Xvv1pDJtKsZkiTOOwIMenZAssbzI,4397
43
45
  xenfra_sdk/resources/projects.py,sha256=EsCVXmqkhWl_Guz_8WDQDi3kAm1Wyg1rjXcyAigPD6E,3712
46
+ xenfra_sdk/resources/sandbox.py,sha256=4OjMCSVnVTxuMo6765ioBdryS9G_0PhXwQPXyVzJFJI,131
44
47
  xenfra_sdk/security.py,sha256=6vMZpbglhkRGBVVj4RCTu45-MCnQ15wt94-996zmaT8,1199
45
48
  xenfra_sdk/security_scanner.py,sha256=US9QdMjHdTUkqObrGPHvDPxMP0QXCxmwK5e28d8KT2E,12957
46
49
  xenfra_sdk/utils.py,sha256=d8eCjjV32QwqoJa759CEcETnnsjG5qVKDLQ84yYtlus,3898
47
- xenfra_sdk-0.2.6.dist-info/WHEEL,sha256=5DEXXimM34_d4Gx1AuF9ysMr1_maoEtGKjaILM3s4w4,80
48
- xenfra_sdk-0.2.6.dist-info/METADATA,sha256=PsSEAuYqDKMuXabOgzvPOY_E80jdsDFjSyw_3JdNIRo,3889
49
- xenfra_sdk-0.2.6.dist-info/RECORD,,
50
+ xenfra_sdk-0.2.8.dist-info/WHEEL,sha256=iHtWm8nRfs0VRdCYVXocAWFW8ppjHL-uTJkAdZJKOBM,80
51
+ xenfra_sdk-0.2.8.dist-info/METADATA,sha256=RT0ly-geSY8yYW74fuA0cr4c8kRFl2VNvPyjSt27ffU,3886
52
+ xenfra_sdk-0.2.8.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: uv 0.9.29
2
+ Generator: uv 0.9.30
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any