dalexor 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dalexor/__init__.py +0 -0
- dalexor/main.py +1315 -0
- dalexor-0.1.4.dist-info/METADATA +68 -0
- dalexor-0.1.4.dist-info/RECORD +8 -0
- dalexor-0.1.4.dist-info/WHEEL +5 -0
- dalexor-0.1.4.dist-info/entry_points.txt +3 -0
- dalexor-0.1.4.dist-info/licenses/LICENSE +13 -0
- dalexor-0.1.4.dist-info/top_level.txt +1 -0
dalexor/main.py
ADDED
|
@@ -0,0 +1,1315 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import logging
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
import math
|
|
7
|
+
import hashlib
|
|
8
|
+
import zlib
|
|
9
|
+
import re
|
|
10
|
+
import difflib
|
|
11
|
+
import threading
|
|
12
|
+
import argparse
|
|
13
|
+
import subprocess
|
|
14
|
+
import ast
|
|
15
|
+
import base64
|
|
16
|
+
import secrets
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
# VERSION: E2EE_NEURAL_VAULT_V18
|
|
19
|
+
from threading import Timer
|
|
20
|
+
from collections import Counter, deque
|
|
21
|
+
from typing import Any, List, Dict, Optional
|
|
22
|
+
|
|
23
|
+
# MCP & Web Imports
|
|
24
|
+
from mcp.server.fastmcp import FastMCP
|
|
25
|
+
from dotenv import load_dotenv
|
|
26
|
+
from supabase import create_client, Client
|
|
27
|
+
from groq import Groq
|
|
28
|
+
import uvicorn
|
|
29
|
+
from starlette.responses import JSONResponse
|
|
30
|
+
from starlette.requests import Request
|
|
31
|
+
import httpx
|
|
32
|
+
|
|
33
|
+
# Configure logging to strictly use stderr to prevent polluting the JSON-RPC stdout stream
|
|
34
|
+
logging.basicConfig(
|
|
35
|
+
level=logging.WARNING,
|
|
36
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
37
|
+
stream=sys.stderr
|
|
38
|
+
)
|
|
39
|
+
logger = logging.getLogger("DalexorBridge")
|
|
40
|
+
from watchdog.observers import Observer
|
|
41
|
+
from watchdog.events import FileSystemEventHandler
|
|
42
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
43
|
+
|
|
44
|
+
# Load local environment if it exists (Priority: Local .env > Shell Env)
|
|
45
|
+
load_dotenv(override=True)
|
|
46
|
+
|
|
47
|
+
# --- CONFIGURATION & AUTH (STATELESS) ---
|
|
48
|
+
|
|
49
|
+
def load_global_config():
|
|
50
|
+
"""Load configuration from global user config file."""
|
|
51
|
+
import json
|
|
52
|
+
config_file = os.path.join(os.path.expanduser("~/.dalexor"), "config.json")
|
|
53
|
+
if os.path.exists(config_file):
|
|
54
|
+
try:
|
|
55
|
+
with open(config_file, 'r') as f:
|
|
56
|
+
return json.load(f)
|
|
57
|
+
except Exception as e:
|
|
58
|
+
print(f"[!] Warning: Failed to load global config: {e}")
|
|
59
|
+
return {}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# Load Global Config
|
|
64
|
+
GLOBAL_CONFIG = load_global_config()
|
|
65
|
+
|
|
66
|
+
# Try to load API KEY from Env -> Global Config
|
|
67
|
+
DX_API_KEY = os.getenv("DX_API_KEY") or os.getenv("DALEXORMI_API_KEY") or GLOBAL_CONFIG.get("api_key")
|
|
68
|
+
|
|
69
|
+
CLOUD_URL = os.getenv("DALEXORMI_URL", "http://46.225.19.164:8080")
|
|
70
|
+
DX_SOVEREIGN = os.getenv("DX_SOVEREIGN", "false").lower() == "true"
|
|
71
|
+
DX_TEAM_SECRET = (os.getenv("DX_TEAM_SECRET") or GLOBAL_CONFIG.get("team_secret") or "").strip()
|
|
72
|
+
|
|
73
|
+
# Set globals for project context if available
|
|
74
|
+
if not os.getenv("DX_PROJECT_ID") and GLOBAL_CONFIG.get("project_id"):
|
|
75
|
+
os.environ["DX_PROJECT_ID"] = GLOBAL_CONFIG.get("project_id")
|
|
76
|
+
|
|
77
|
+
if not os.getenv("DX_PROJECT_NAME") and GLOBAL_CONFIG.get("project_name"):
|
|
78
|
+
os.environ["DX_PROJECT_NAME"] = GLOBAL_CONFIG.get("project_name")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
# --- NEURAL VAULT (E2EE) ---
|
|
82
|
+
class NeuralVault:
|
|
83
|
+
@staticmethod
|
|
84
|
+
def get_key(secret: str):
|
|
85
|
+
import hashlib, base64
|
|
86
|
+
hasher = hashlib.sha256()
|
|
87
|
+
hasher.update(secret.encode())
|
|
88
|
+
return base64.urlsafe_b64encode(hasher.digest())
|
|
89
|
+
|
|
90
|
+
# Removed NeuralVault.lock: Encryption is now handled by the Cloud Brain.
|
|
91
|
+
|
|
92
|
+
@staticmethod
|
|
93
|
+
def unlock(blob: str, secret: str) -> str:
|
|
94
|
+
if not blob or not secret: return blob
|
|
95
|
+
if not str(blob).startswith("vault_v1:"): return blob
|
|
96
|
+
try:
|
|
97
|
+
from cryptography.fernet import Fernet
|
|
98
|
+
import base64
|
|
99
|
+
cipher_text = blob.replace("vault_v1:", "").strip()
|
|
100
|
+
|
|
101
|
+
# 🛡️ NOVEL BASE64 RECOVERY: Repair padding
|
|
102
|
+
while len(cipher_text) % 4 != 0:
|
|
103
|
+
cipher_text += "="
|
|
104
|
+
|
|
105
|
+
f = Fernet(NeuralVault.get_key(secret))
|
|
106
|
+
return f.decrypt(cipher_text.encode()).decode()
|
|
107
|
+
except:
|
|
108
|
+
return "[Neural Vault: Decryption Failed (Check DX_TEAM_SECRET)]"
|
|
109
|
+
|
|
110
|
+
# ... (Supabase/Groq init logic remains same) ...
|
|
111
|
+
# These are used by the CLOUD BRAIN (Railway) to talk to the DB/AI
|
|
112
|
+
SUPABASE_URL = os.getenv("SUPABASE_URL")
|
|
113
|
+
SUPABASE_SERVICE_ROLE = os.getenv("SUPABASE_SERVICE_ROLE_KEY") or os.getenv("SUPABASE_KEY")
|
|
114
|
+
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
|
115
|
+
|
|
116
|
+
# Detection: Are we running on Railway?
|
|
117
|
+
IS_CLOUD = os.getenv("RAILWAY_ENVIRONMENT") == "true" or os.getenv("RAILWAY_STATIC_URL") is not None
|
|
118
|
+
|
|
119
|
+
# Shared Constants
|
|
120
|
+
IGNORE_LIST = {'.git', 'node_modules', '__pycache__', '.env', 'dist', 'build', '.next', '.dalexor', '.vscode', '.idea', 'venv', '.pyc', '.egg-info', '.pytest_cache', '.mypy_cache'}
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
# --- INITIALIZE DATABASE ONLY IF IN CLOUD ---
|
|
124
|
+
supabase_admin: Client = create_client(SUPABASE_URL, SUPABASE_SERVICE_ROLE) if (IS_CLOUD and SUPABASE_URL and SUPABASE_SERVICE_ROLE) else None
|
|
125
|
+
groq_client = Groq(api_key=GROQ_API_KEY) if (IS_CLOUD and GROQ_API_KEY) else None
|
|
126
|
+
|
|
127
|
+
# --- ENTROPY ENGINE ---
|
|
128
|
+
class SovereignEntropyEngine:
|
|
129
|
+
@staticmethod
|
|
130
|
+
def calculate(content, file_path=""):
|
|
131
|
+
if not content or len(content) < 10: return 0.0
|
|
132
|
+
|
|
133
|
+
# Layer 1: Token-based Shannon Entropy (Semantic Density)
|
|
134
|
+
tokens = re.findall(r'[a-zA-Z_][a-zA-Z0-9_]*', content)
|
|
135
|
+
if not tokens: return 0.5
|
|
136
|
+
|
|
137
|
+
counts = Counter(tokens)
|
|
138
|
+
total_tokens = len(tokens)
|
|
139
|
+
shannon = -sum((count/total_tokens) * math.log2(count/total_tokens) for count in counts.values())
|
|
140
|
+
|
|
141
|
+
# Layer 2: Autonomous Gravity (Structural Signature Analysis)
|
|
142
|
+
gravity = 1.0
|
|
143
|
+
|
|
144
|
+
# A. Identity Affinity: If tokens match the file name, the file is describing ITSELF (High Signal).
|
|
145
|
+
if file_path:
|
|
146
|
+
file_name = os.path.basename(file_path).split('.')[0].lower()
|
|
147
|
+
if len(file_name) > 2:
|
|
148
|
+
token_set = {t.lower() for t in tokens}
|
|
149
|
+
if file_name in token_set:
|
|
150
|
+
gravity = max(gravity, 3.5)
|
|
151
|
+
|
|
152
|
+
# B. Symbol Complexity Boost: Any token that looks like a Class, Function, or Constant.
|
|
153
|
+
# PascalCase, camelCase, or UPPER_SNAKE
|
|
154
|
+
complex_symbols = [t for t in tokens if re.search(r'[a-z][A-Z]', t) or re.match(r'^[A-Z][a-z]+[A-Z]', t) or (re.match(r'^[A-Z_]{4,}$', t))]
|
|
155
|
+
symbol_density = len(complex_symbols) / total_tokens
|
|
156
|
+
gravity += (symbol_density * 5.0)
|
|
157
|
+
|
|
158
|
+
# Layer 3: Structural Multiplier (Logic density check)
|
|
159
|
+
structure_multiplier = 1.0
|
|
160
|
+
logic_patterns = [
|
|
161
|
+
r'def\s+\w+', r'async\s+def', r'class\s+\w+',
|
|
162
|
+
r'return\s+', r'if\s+.*:', r'import\s+', r'@\w+\(',
|
|
163
|
+
r'\{[\s\S]*\}', r'supabase'
|
|
164
|
+
]
|
|
165
|
+
|
|
166
|
+
for pattern in logic_patterns:
|
|
167
|
+
matches = re.findall(pattern, content)
|
|
168
|
+
if matches:
|
|
169
|
+
structure_multiplier += (len(matches) * 0.20)
|
|
170
|
+
|
|
171
|
+
# Layer 4: Noise Penalty (Repetitive boilerplate reduction)
|
|
172
|
+
unique_tokens = len(counts)
|
|
173
|
+
lexical_diversity = unique_tokens / total_tokens
|
|
174
|
+
if lexical_diversity < 0.2 and total_tokens > 30:
|
|
175
|
+
structure_multiplier *= 0.4
|
|
176
|
+
|
|
177
|
+
# Final Sovereign Calculation
|
|
178
|
+
score = (shannon * gravity * structure_multiplier) / 1.5
|
|
179
|
+
return round(min(score, 10.0), 2)
|
|
180
|
+
|
|
181
|
+
# --- TRANSACTION MEMORY ---
|
|
182
|
+
class TransactionMemory:
|
|
183
|
+
def __init__(self, cloud_callback):
|
|
184
|
+
self.buffer = deque(maxlen=50)
|
|
185
|
+
self.cloud_callback = cloud_callback
|
|
186
|
+
self.lock = threading.Lock()
|
|
187
|
+
self.flush_timer = None
|
|
188
|
+
|
|
189
|
+
def add(self, file_path, diff, entropy, synapses, symbols, force_seal=False):
|
|
190
|
+
with self.lock:
|
|
191
|
+
# Check if this file already in buffer. If so, update it.
|
|
192
|
+
# This is "Edge Coalescence"
|
|
193
|
+
self.buffer.append({"file": file_path, "diff": diff, "entropy": entropy, "synapses": synapses, "symbols": symbols, "ts": time.time(), "seal": force_seal})
|
|
194
|
+
|
|
195
|
+
if force_seal or len(self.buffer) >= 5:
|
|
196
|
+
self._flush()
|
|
197
|
+
else:
|
|
198
|
+
if self.flush_timer: self.flush_timer.cancel()
|
|
199
|
+
self.flush_timer = Timer(10.0, self._flush)
|
|
200
|
+
self.flush_timer.start()
|
|
201
|
+
|
|
202
|
+
def flush_all(self):
|
|
203
|
+
"""Force immediate flush of all pending items."""
|
|
204
|
+
with self.lock:
|
|
205
|
+
if not self.buffer:
|
|
206
|
+
print("[!] No pending evolution detected. Sending Checkpoint Milestone...")
|
|
207
|
+
# Create a virtual entry for the milestone
|
|
208
|
+
payload = {
|
|
209
|
+
"content": "CHECKPOINT: Manual Milestone Seal (No local changes buffered)",
|
|
210
|
+
"entropy_score": 10.0,
|
|
211
|
+
"atom_type": "milestone",
|
|
212
|
+
"source": "sentinel",
|
|
213
|
+
"metadata": {
|
|
214
|
+
"summary": "🏆 CHECKPOINT: Project Milestone Sealed",
|
|
215
|
+
"dx_seal": True,
|
|
216
|
+
"manual": True
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
threading.Thread(target=self.cloud_callback, args=(payload,), daemon=True).start()
|
|
220
|
+
return
|
|
221
|
+
print(f"[!] Sealing Milestone: Transmitting {len(self.buffer)} pending evolutionary atoms...")
|
|
222
|
+
self._flush()
|
|
223
|
+
|
|
224
|
+
def _flush(self):
|
|
225
|
+
with self.lock:
|
|
226
|
+
if not self.buffer: return
|
|
227
|
+
batch = list(self.buffer)
|
|
228
|
+
self.buffer.clear()
|
|
229
|
+
|
|
230
|
+
for item in batch:
|
|
231
|
+
content = item['diff']
|
|
232
|
+
file_path = item['file']
|
|
233
|
+
entropy = item['entropy']
|
|
234
|
+
is_seal = item.get('seal', False)
|
|
235
|
+
|
|
236
|
+
payload = {
|
|
237
|
+
"content": scrub_secrets(content[:10000]) if not content.startswith("vault_") else content,
|
|
238
|
+
"entropy_score": round(entropy, 2) if not is_seal else 10.0, # Push max entropy on seal
|
|
239
|
+
"atom_type": "evolution_event" if not is_seal else "milestone",
|
|
240
|
+
"source": "sentinel",
|
|
241
|
+
"synapses": item.get('synapses', []),
|
|
242
|
+
"symbols": item.get('symbols', []),
|
|
243
|
+
"is_sovereign": DX_SOVEREIGN,
|
|
244
|
+
"request_server_encryption": DX_SOVEREIGN,
|
|
245
|
+
"metadata": {
|
|
246
|
+
"file_path": file_path.replace("\\", "/"),
|
|
247
|
+
"file_name": os.path.basename(file_path),
|
|
248
|
+
"content_hash": hashlib.md5(content.encode()).hexdigest(),
|
|
249
|
+
"summary": f"Evolutionary Shift in {os.path.basename(file_path)}" if not is_seal else f"MILESTONE: {os.path.basename(file_path)} Sealed",
|
|
250
|
+
"synapses": item.get('synapses', []),
|
|
251
|
+
"symbols": item.get('symbols', []),
|
|
252
|
+
"dx_seal": is_seal # Signal to Cloud Brain to bypass coalescence window
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
threading.Thread(target=self.cloud_callback, args=(payload,), daemon=True).start()
|
|
256
|
+
|
|
257
|
+
# --- UTILITY FUNCTIONS ---
|
|
258
|
+
def scrub_secrets(text):
|
|
259
|
+
"""Remove potential secrets from text before uploading."""
|
|
260
|
+
patterns = [
|
|
261
|
+
(r'(api[_-]?key|token|secret|password)\s*[=:]\s*[\'"]?([^\s\'"]+)', r'\1=***REDACTED***'),
|
|
262
|
+
(r'Bearer\s+[A-Za-z0-9\-._~+/]+=*', 'Bearer ***REDACTED***'),
|
|
263
|
+
]
|
|
264
|
+
for pattern, replacement in patterns:
|
|
265
|
+
text = re.sub(pattern, replacement, text, flags=re.IGNORECASE)
|
|
266
|
+
return text
|
|
267
|
+
|
|
268
|
+
def print_banner(email="Sovereign", plan="Identity Verified"):
|
|
269
|
+
"""Print welcome banner with user info."""
|
|
270
|
+
display_plan = plan
|
|
271
|
+
if plan.lower() == "free": display_plan = "Surgical Sandbox (Limited)"
|
|
272
|
+
elif plan.lower() == "starter": display_plan = "Starter Package"
|
|
273
|
+
|
|
274
|
+
print("\n" + "="*60)
|
|
275
|
+
print(" DALEXOR INTELLIGENCE SYSTEM")
|
|
276
|
+
print("="*60)
|
|
277
|
+
print(f" User: {email}")
|
|
278
|
+
print(f" Plan: {display_plan}")
|
|
279
|
+
print("="*60 + "\n")
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def interactive_verification(force_prompt=False):
|
|
283
|
+
"""Verify identity with the Global Brain using environment or interactive prompt."""
|
|
284
|
+
global DX_API_KEY, DX_TEAM_SECRET
|
|
285
|
+
if not sys.stdin.isatty(): return (DX_API_KEY, "System", "Automated")
|
|
286
|
+
|
|
287
|
+
current_key = DX_API_KEY
|
|
288
|
+
if not current_key or force_prompt:
|
|
289
|
+
print("\n" + "="*60)
|
|
290
|
+
print(" DALEXOR INTELLIGENCE : SECURE HANDSHAKE")
|
|
291
|
+
print("="*60)
|
|
292
|
+
current_key = input("\n[*] Enter Sovereign API Key: ").strip()
|
|
293
|
+
if not current_key: sys.exit(1)
|
|
294
|
+
DX_API_KEY = current_key
|
|
295
|
+
|
|
296
|
+
print("\n[*] Connecting to Global Brain...")
|
|
297
|
+
try:
|
|
298
|
+
import requests
|
|
299
|
+
headers = {"X-DX-Key": current_key}
|
|
300
|
+
response = requests.get(f"{CLOUD_URL}/validate", headers=headers, timeout=10)
|
|
301
|
+
|
|
302
|
+
if response.status_code == 200:
|
|
303
|
+
data = response.json()
|
|
304
|
+
user_data = data.get("user", {})
|
|
305
|
+
email = user_data.get('email', 'Sovereign')
|
|
306
|
+
plan = data.get("subscription", {}).get("display", "Identity Verified")
|
|
307
|
+
|
|
308
|
+
# 🌩️ Cloud Context Recovery: Adopt the session state from the server
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
print(f"[+] Identity Confirmed: {email}")
|
|
312
|
+
print(f"[*] Subscription: {plan}")
|
|
313
|
+
|
|
314
|
+
# 🛡️ SOVEREIGN MODE ACTIVATION (For Pro/Enterprise/Sovereign plans)
|
|
315
|
+
is_sovereign_plan = any(p in plan.lower() for p in ["sovereign", "enterprise", "professional", "unlimited"])
|
|
316
|
+
if is_sovereign_plan:
|
|
317
|
+
global DX_SOVEREIGN
|
|
318
|
+
DX_SOVEREIGN = True
|
|
319
|
+
print(f"[*] Sovereign Mode: ACTIVATED (End-to-End Encryption Enabled)")
|
|
320
|
+
|
|
321
|
+
return (current_key, email, plan)
|
|
322
|
+
else:
|
|
323
|
+
print(f"[!] Verification Failed: {response.json().get('error', 'Invalid Key')}")
|
|
324
|
+
sys.exit(1)
|
|
325
|
+
except Exception as e:
|
|
326
|
+
print(f"[!] Error: {e}")
|
|
327
|
+
sys.exit(1)
|
|
328
|
+
|
|
329
|
+
def save_to_env(key, project_id=None, project_name=None, team_secret=None):
|
|
330
|
+
"""Save configuration to global config file."""
|
|
331
|
+
import json
|
|
332
|
+
|
|
333
|
+
# Use global config directory
|
|
334
|
+
config_dir = os.path.expanduser("~/.dalexor")
|
|
335
|
+
config_file = os.path.join(config_dir, "config.json")
|
|
336
|
+
|
|
337
|
+
# Create directory if it doesn't exist
|
|
338
|
+
os.makedirs(config_dir, exist_ok=True)
|
|
339
|
+
|
|
340
|
+
# Read existing config
|
|
341
|
+
config = {}
|
|
342
|
+
if os.path.exists(config_file):
|
|
343
|
+
try:
|
|
344
|
+
with open(config_file, 'r') as f:
|
|
345
|
+
config = json.load(f)
|
|
346
|
+
except:
|
|
347
|
+
pass
|
|
348
|
+
|
|
349
|
+
# Update with new values
|
|
350
|
+
config['api_key'] = key
|
|
351
|
+
if project_id:
|
|
352
|
+
config['project_id'] = project_id
|
|
353
|
+
config['last_project_id'] = project_id
|
|
354
|
+
if project_name:
|
|
355
|
+
config['project_name'] = project_name
|
|
356
|
+
if team_secret:
|
|
357
|
+
config['team_secret'] = team_secret
|
|
358
|
+
config['cloud_url'] = CLOUD_URL
|
|
359
|
+
|
|
360
|
+
# Save config
|
|
361
|
+
with open(config_file, 'w') as f:
|
|
362
|
+
json.dump(config, f, indent=2)
|
|
363
|
+
|
|
364
|
+
print(f"[+] Configuration saved to {config_file}")
|
|
365
|
+
|
|
366
|
+
def select_project(key, plan="free"):
|
|
367
|
+
"""Fetch and select a project, pinning it on the server for stateless sessions."""
|
|
368
|
+
import requests
|
|
369
|
+
|
|
370
|
+
# Update global API key to match the one used in dx init
|
|
371
|
+
global DX_API_KEY
|
|
372
|
+
DX_API_KEY = key
|
|
373
|
+
|
|
374
|
+
headers = {"X-DX-Key": key}
|
|
375
|
+
print("\n[*] Fetching project blueprints...")
|
|
376
|
+
try:
|
|
377
|
+
proj_res = requests.get(f"{CLOUD_URL}/projects", headers=headers, timeout=10)
|
|
378
|
+
project_id = None
|
|
379
|
+
project_name = "Default"
|
|
380
|
+
|
|
381
|
+
if proj_res.status_code == 200:
|
|
382
|
+
projects = proj_res.json().get("projects", [])
|
|
383
|
+
if projects:
|
|
384
|
+
print(f"\n[?] Found {len(projects)} existing project universes:")
|
|
385
|
+
for idx, p in enumerate(projects):
|
|
386
|
+
print(f" {idx + 1}. {p['name']} ({p.get('status', 'active')})")
|
|
387
|
+
print(f" {len(projects) + 1}. [Create New Project]")
|
|
388
|
+
|
|
389
|
+
choice = input("\n[>] Select Project: ").strip()
|
|
390
|
+
try:
|
|
391
|
+
c_idx = int(choice) - 1
|
|
392
|
+
if 0 <= c_idx < len(projects):
|
|
393
|
+
project_id = projects[c_idx]["id"]
|
|
394
|
+
project_name = projects[c_idx]["name"]
|
|
395
|
+
else:
|
|
396
|
+
project_name = input("[>] Enter New Project Name: ").strip()
|
|
397
|
+
except:
|
|
398
|
+
project_id = None
|
|
399
|
+
project_name = input("[>] Enter New Project Name: ").strip()
|
|
400
|
+
else:
|
|
401
|
+
project_name = input("[>] No projects found. Enter Project Name: ").strip()
|
|
402
|
+
|
|
403
|
+
if not project_id:
|
|
404
|
+
create_res = requests.post(f"{CLOUD_URL}/projects", json={"name": project_name}, headers=headers, timeout=10)
|
|
405
|
+
if create_res.status_code == 200:
|
|
406
|
+
project_id = create_res.json().get("project", {}).get("id")
|
|
407
|
+
print(f"[+] Project '{project_name}' created and assigned.")
|
|
408
|
+
|
|
409
|
+
# 🌩️ PIN PROJECT ON SERVER (No local files!)
|
|
410
|
+
requests.post(f"{CLOUD_URL}/select-project", json={"project_id": project_id}, headers=headers, timeout=10)
|
|
411
|
+
os.environ["DX_PROJECT_ID"] = project_id
|
|
412
|
+
os.environ["DX_PROJECT_NAME"] = project_name
|
|
413
|
+
print(f"[+] Context Pinned to Cloud: {project_name}")
|
|
414
|
+
|
|
415
|
+
# --- E2EE NEURAL VAULT KEY MANAGEMENT ---
|
|
416
|
+
print("\n" + "-"*60)
|
|
417
|
+
print(" NEURAL VAULT : END-TO-END ENCRYPTION")
|
|
418
|
+
print("-"*60)
|
|
419
|
+
|
|
420
|
+
is_free_plan = "sandbox" in plan.lower() or "free" in plan.lower() or "limited" in plan.lower()
|
|
421
|
+
if is_free_plan:
|
|
422
|
+
print("[!] PLAN RESTRICTION: Neural Vault (E2EE) is a Professional/Sovereign feature.")
|
|
423
|
+
print("[!] Your 'Surgical Sandbox' plan uses Standard Encryption (TLS/AES).")
|
|
424
|
+
else:
|
|
425
|
+
# Secrets come from server only in stateless mode
|
|
426
|
+
all_proj_res = requests.get(f"{CLOUD_URL}/projects", headers=headers, timeout=10).json()
|
|
427
|
+
selected_p = next((p for p in all_proj_res.get("projects", []) if p['id'] == project_id), {})
|
|
428
|
+
team_secret = selected_p.get("team_secret")
|
|
429
|
+
if team_secret:
|
|
430
|
+
print(f"[*] Neural Vault Key active for this project (Cloud Sync).")
|
|
431
|
+
os.environ["DX_TEAM_SECRET"] = team_secret
|
|
432
|
+
|
|
433
|
+
print(f"[+] Project {project_name} Ready. CLI is operating in Stateless Cloud Mode.")
|
|
434
|
+
|
|
435
|
+
# Save to .env file
|
|
436
|
+
save_to_env(key, project_id, project_name, os.environ.get("DX_TEAM_SECRET"))
|
|
437
|
+
|
|
438
|
+
return project_id, project_name
|
|
439
|
+
except Exception as e:
|
|
440
|
+
print(f"[!] Project Selection Error: {e}")
|
|
441
|
+
return None, "Default"
|
|
442
|
+
|
|
443
|
+
# --- SENTINEL HANDLERS ---
|
|
444
|
+
class CodeChangeHandler(FileSystemEventHandler):
|
|
445
|
+
def __init__(self, tx_memory):
|
|
446
|
+
self.tx_memory = tx_memory
|
|
447
|
+
self.last_hashes = {}
|
|
448
|
+
self.content_cache = {}
|
|
449
|
+
|
|
450
|
+
def on_modified(self, event): self._handle_event(event)
|
|
451
|
+
def on_created(self, event): self._handle_event(event)
|
|
452
|
+
|
|
453
|
+
def _handle_event(self, event):
|
|
454
|
+
if event.is_directory or any(x in event.src_path for x in IGNORE_LIST): return
|
|
455
|
+
|
|
456
|
+
# Normalize Windows paths to Forward Slashes for Lineage Parity
|
|
457
|
+
src_path = event.src_path.replace('\\', '/')
|
|
458
|
+
|
|
459
|
+
# Neural Pulse Feedback
|
|
460
|
+
print(f"[*] Neural Pulse: {os.path.basename(src_path)} detected.")
|
|
461
|
+
|
|
462
|
+
# Windows Grace Period: Let the IDE finish writing the file
|
|
463
|
+
time.sleep(0.2)
|
|
464
|
+
|
|
465
|
+
# Instant Presence report (Presence Parity)
|
|
466
|
+
self.report_presence(src_path)
|
|
467
|
+
# Debounce/Throttle at Sentinel level too
|
|
468
|
+
self.process_change(src_path)
|
|
469
|
+
|
|
470
|
+
def report_presence(self, file_path):
|
|
471
|
+
import requests
|
|
472
|
+
try:
|
|
473
|
+
# Dynamically fetch key to ensure validity
|
|
474
|
+
key = os.getenv("DX_API_KEY") or os.getenv("DALEXORMI_API_KEY") or DX_API_KEY
|
|
475
|
+
if not key: return
|
|
476
|
+
headers = {"X-DX-Key": key}
|
|
477
|
+
# Silent fire-and-forget presence pulse
|
|
478
|
+
requests.post(f"{CLOUD_URL}/presence", json={"file_path": file_path}, headers=headers, timeout=1.0)
|
|
479
|
+
except Exception as e:
|
|
480
|
+
# Presence pulse is non-critical, but we log the debug signal
|
|
481
|
+
logger.debug(f"Presence pulse failed: {e}")
|
|
482
|
+
pass
|
|
483
|
+
|
|
484
|
+
def extract_synapses(self, content, current_file_name="", project_catalog=None):
|
|
485
|
+
"""Atomic Synapse Engine V4: Greedy, multi-language, catalog-aware detection."""
|
|
486
|
+
if not content: return []
|
|
487
|
+
|
|
488
|
+
found = set()
|
|
489
|
+
lines = content.split('\n')
|
|
490
|
+
in_block_comment = False
|
|
491
|
+
comment_start_tags = ['"""', "'''", '/*', '<!--', '{-', '(*']
|
|
492
|
+
|
|
493
|
+
# 0. GREEDY CATALOG MATCHING (High Intelligence)
|
|
494
|
+
# If we know all files in the project, we can find links just by seeing the name
|
|
495
|
+
if project_catalog:
|
|
496
|
+
# Tokenize content to find potential matches
|
|
497
|
+
tokens = set(re.findall(r'[\w\.-]+', content))
|
|
498
|
+
for token in tokens:
|
|
499
|
+
if token in project_catalog and token != current_file_name:
|
|
500
|
+
found.add(token)
|
|
501
|
+
# Try matching base names (e.g., config.py -> config)
|
|
502
|
+
elif token.split('.')[0] in project_catalog and token.split('.')[0] != current_file_name:
|
|
503
|
+
# Only map if the token looks like a deliberate reference
|
|
504
|
+
pass
|
|
505
|
+
|
|
506
|
+
# 1. TRADITIONAL SEMANTIC DETECTION
|
|
507
|
+
contexts = [
|
|
508
|
+
"import", "from", "require", "include", "using", "use",
|
|
509
|
+
"load", "source", "mod", "src", "href", "path", "file",
|
|
510
|
+
"@", "COPY", "library", "part", "export", "module", "python", "node", "npm", "exec", "call"
|
|
511
|
+
]
|
|
512
|
+
|
|
513
|
+
for line in lines:
|
|
514
|
+
stripped = line.strip()
|
|
515
|
+
|
|
516
|
+
# Block Comment Tracking
|
|
517
|
+
if any(tag in stripped for tag in comment_start_tags) and not in_block_comment:
|
|
518
|
+
if not (('*/' in stripped) or (stripped.count('"""') == 2) or ('-->' in stripped) or ('-}' in stripped)):
|
|
519
|
+
in_block_comment = True
|
|
520
|
+
continue
|
|
521
|
+
if in_block_comment:
|
|
522
|
+
if any(tag in stripped for tag in ['*/', '"""', "'''", '-->', '-}', '*)']):
|
|
523
|
+
in_block_comment = False
|
|
524
|
+
continue
|
|
525
|
+
|
|
526
|
+
if not stripped or any(stripped.startswith(c) for c in ['#', '//', '--', ';', '%', '!', "'"]):
|
|
527
|
+
continue
|
|
528
|
+
|
|
529
|
+
# Pattern Matching
|
|
530
|
+
for ctx in contexts:
|
|
531
|
+
if ctx.lower() in stripped.lower() or "./" in stripped or "../" in stripped:
|
|
532
|
+
# Extract potential filenames
|
|
533
|
+
matches = re.findall(r'[\'"]([^\'"]+\.[a-zA-Z0-9]+)[\'"]', stripped)
|
|
534
|
+
for m in matches:
|
|
535
|
+
name = m.split('/')[-1].split('\\')[-1]
|
|
536
|
+
if name and name != current_file_name:
|
|
537
|
+
found.add(name)
|
|
538
|
+
|
|
539
|
+
if ctx.lower() in ['from', 'import', 'use', 'mod']:
|
|
540
|
+
word_matches = re.findall(r'(?:from|import|use|mod)\s+([\w\.]+)', stripped)
|
|
541
|
+
for wm in word_matches:
|
|
542
|
+
name = wm.split('.')[-1]
|
|
543
|
+
if name and name != current_file_name:
|
|
544
|
+
system_libs = {'os', 'sys', 'json', 'math', 'hashlib', 'time', 're', 'datetime'}
|
|
545
|
+
if name.lower() in system_libs: continue
|
|
546
|
+
found.add(name)
|
|
547
|
+
if '.' not in name:
|
|
548
|
+
for ext in ['.py', '.js', '.ts', '.rs', '.go', '.php', '.cpp', '.h']:
|
|
549
|
+
found.add(f"{name}{ext}")
|
|
550
|
+
|
|
551
|
+
# 2. FILE TYPE SPECIALIZED DETECTION
|
|
552
|
+
if current_file_name == "package.json":
|
|
553
|
+
try:
|
|
554
|
+
pkg = json.loads(content)
|
|
555
|
+
deps = {**pkg.get("dependencies", {}), **pkg.get("devDependencies", {})}
|
|
556
|
+
for d in deps.keys():
|
|
557
|
+
found.add(d)
|
|
558
|
+
except Exception as e:
|
|
559
|
+
print(f"[!] Warning: Synapse Parse Error in package.json: {e}")
|
|
560
|
+
|
|
561
|
+
if "Dockerfile" in current_file_name:
|
|
562
|
+
# Catch links to local scripts being copied
|
|
563
|
+
for line in lines:
|
|
564
|
+
if "COPY" in line or "ADD" in line:
|
|
565
|
+
parts = line.split()
|
|
566
|
+
if len(parts) > 1:
|
|
567
|
+
src = parts[1].split('/')[-1]
|
|
568
|
+
if src and src != current_file_name: found.add(src)
|
|
569
|
+
|
|
570
|
+
return list(found)
|
|
571
|
+
|
|
572
|
+
def extract_symbols(self, content):
|
|
573
|
+
"""Sovereign Symbol Extraction: Identify key logic blocks for privacy-first indexing."""
|
|
574
|
+
if not content: return []
|
|
575
|
+
# Support for Python, JS, TS, Rust, Go, C++, etc.
|
|
576
|
+
sym_patterns = [
|
|
577
|
+
r'(?:async\s+)?(?:def|class|function|struct|interface|const|let|var)\s+([a-zA-Z_][a-zA-Z0-9_]*)',
|
|
578
|
+
r'@([a-zA-Z_][a-zA-Z0-9_]*)', # Decorators
|
|
579
|
+
]
|
|
580
|
+
all_syms = set()
|
|
581
|
+
for p in sym_patterns:
|
|
582
|
+
all_syms.update(re.findall(p, content))
|
|
583
|
+
return list(all_syms)
|
|
584
|
+
|
|
585
|
+
def process_change(self, file_path):
|
|
586
|
+
try:
|
|
587
|
+
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
588
|
+
content = f.read()
|
|
589
|
+
curr_hash = hashlib.md5(content.encode()).hexdigest()
|
|
590
|
+
if self.last_hashes.get(file_path) == curr_hash: return
|
|
591
|
+
|
|
592
|
+
prev_content = self.content_cache.get(file_path, "")
|
|
593
|
+
diff_lines = []
|
|
594
|
+
for line in difflib.unified_diff(prev_content.splitlines(), content.splitlines(), lineterm=''):
|
|
595
|
+
if (line.startswith('+') or line.startswith('-')) and not (line.startswith('+++') or line.startswith('---')):
|
|
596
|
+
diff_lines.append(line)
|
|
597
|
+
|
|
598
|
+
diff_text = "\n".join(diff_lines)
|
|
599
|
+
analysis_target = diff_text if prev_content else content
|
|
600
|
+
entropy = SovereignEntropyEngine.calculate(analysis_target, file_path)
|
|
601
|
+
|
|
602
|
+
self.last_hashes[file_path] = curr_hash
|
|
603
|
+
self.content_cache[file_path] = content
|
|
604
|
+
|
|
605
|
+
if entropy >= 0.1:
|
|
606
|
+
synapses = self.extract_synapses(content, os.path.basename(file_path))
|
|
607
|
+
symbols = self.extract_symbols(content)
|
|
608
|
+
print(f"[+] Significant evolution in {os.path.basename(file_path)} (Entropy: {entropy})")
|
|
609
|
+
self.tx_memory.add(file_path, diff_text if prev_content else content[:10000], entropy, synapses, symbols)
|
|
610
|
+
else:
|
|
611
|
+
# Provide feedback even for low-value changes so user knows it's working
|
|
612
|
+
print(f"[-] Filtered {os.path.basename(file_path)}: Significance {entropy} < 0.1 thresh.")
|
|
613
|
+
except Exception as e:
|
|
614
|
+
print(f"[!] Engine Error in {os.path.basename(file_path)}: {e}")
|
|
615
|
+
|
|
616
|
+
def warm_up(self, root_dir):
|
|
617
|
+
"""Silently index all existing files to prevent 'Ghost Changes' on startup."""
|
|
618
|
+
print("[*] Warming up Sentinel Cache...")
|
|
619
|
+
count = 0
|
|
620
|
+
for root, dirs, files in os.walk(root_dir):
|
|
621
|
+
# Prune ignored directories
|
|
622
|
+
dirs[:] = [d for d in dirs if d not in IGNORE_LIST and not d.startswith('.')]
|
|
623
|
+
for file in files:
|
|
624
|
+
if any(x in file for x in IGNORE_LIST) or file.startswith('.'): continue
|
|
625
|
+
path = os.path.join(root, file).replace("\\", "/")
|
|
626
|
+
try:
|
|
627
|
+
with open(path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
628
|
+
content = f.read()
|
|
629
|
+
curr_hash = hashlib.md5(content.encode()).hexdigest()
|
|
630
|
+
self.last_hashes[path] = curr_hash
|
|
631
|
+
self.content_cache[path] = content
|
|
632
|
+
count += 1
|
|
633
|
+
except Exception as e:
|
|
634
|
+
print(f"[*] Warning: Skipping unreadable artifact {path}: {e}")
|
|
635
|
+
print(f"[+] Indexed {count} existing artifacts.")
|
|
636
|
+
|
|
637
|
+
class DummyMCP:
|
|
638
|
+
def tool(self): return lambda f: f
|
|
639
|
+
def resource(self, path): return lambda f: f
|
|
640
|
+
def prompt(self): return lambda f: f
|
|
641
|
+
def run(self, transport): raise RuntimeError("FastMCP not initialized")
|
|
642
|
+
|
|
643
|
+
# --- MCP SERVER INITIALIZATION ---
|
|
644
|
+
try:
|
|
645
|
+
from mcp.server.fastmcp import FastMCP
|
|
646
|
+
mcp = FastMCP("DalexorGlobal")
|
|
647
|
+
except Exception as e:
|
|
648
|
+
sys.stderr.write(f"[Dalexor MCP] FastMCP Init Failed: {e}\n")
|
|
649
|
+
mcp = DummyMCP()
|
|
650
|
+
|
|
651
|
+
async def cloud_call(endpoint: str, payload: dict, timeout: int = 15):
|
|
652
|
+
"""Secure Async Handshake with Cloud Brain."""
|
|
653
|
+
try:
|
|
654
|
+
async with httpx.AsyncClient(timeout=timeout) as client:
|
|
655
|
+
headers = {"X-DX-Key": DX_API_KEY}
|
|
656
|
+
payload["project_id"] = os.getenv("DX_PROJECT_ID")
|
|
657
|
+
res = await client.post(f"{CLOUD_URL}/tools/{endpoint}", json=payload, headers=headers)
|
|
658
|
+
if res.status_code == 200:
|
|
659
|
+
data = res.json()
|
|
660
|
+
result = data.get("result", "No result returned.")
|
|
661
|
+
# Auto-Decrypt if E2EE is active
|
|
662
|
+
if isinstance(result, str) and "vault_" in result:
|
|
663
|
+
result = NeuralVault.unlock(result, DX_TEAM_SECRET)
|
|
664
|
+
return result
|
|
665
|
+
return f"Brain Error ({res.status_code}): {res.text[:200]}"
|
|
666
|
+
except Exception as e:
|
|
667
|
+
return f"Neural Disconnect: {e}"
|
|
668
|
+
|
|
669
|
+
@mcp.tool()
|
|
670
|
+
async def mcp_health_check() -> str:
|
|
671
|
+
"""Returns local agent status and connectivity to Global Brain."""
|
|
672
|
+
error_msg = ""
|
|
673
|
+
try:
|
|
674
|
+
async with httpx.AsyncClient(timeout=10) as client:
|
|
675
|
+
res = await client.get(f"{CLOUD_URL}/", headers={"X-DX-Key": DX_API_KEY})
|
|
676
|
+
cloud_status = "ONLINE" if res.status_code == 200 else f"ERROR ({res.status_code})"
|
|
677
|
+
if res.status_code != 200:
|
|
678
|
+
error_msg = f" | Error: {res.text[:100]}"
|
|
679
|
+
except Exception as e:
|
|
680
|
+
cloud_status = "OFFLINE"
|
|
681
|
+
error_msg = f" | Exception: {str(e)}"
|
|
682
|
+
|
|
683
|
+
return f"Dalexor Local Agent: ACTIVE\nGlobal Brain: {cloud_status}{error_msg}\nURL: {CLOUD_URL}\nAPI Key: {'Set' if DX_API_KEY else 'Missing'}\nProject: {os.getenv('DX_PROJECT_ID')}\nSovereign: {DX_SOVEREIGN}"
|
|
684
|
+
|
|
685
|
+
@mcp.tool()
|
|
686
|
+
async def get_logical_evolution() -> str:
|
|
687
|
+
"""Historical Replay: Summarize logical shifts in the project over the last hour."""
|
|
688
|
+
return await cloud_call("logical_evolution", {"window_minutes": 60})
|
|
689
|
+
|
|
690
|
+
@mcp.tool()
|
|
691
|
+
async def trace_dependency(symbol: str) -> str:
|
|
692
|
+
"""Impact Analysis: Find all files that import or call a specific symbol (function, class, variable)."""
|
|
693
|
+
# Prefer Local Search first for speed
|
|
694
|
+
local_files = []
|
|
695
|
+
try:
|
|
696
|
+
cmd = ["rg", "-l", symbol, "."]
|
|
697
|
+
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=5)
|
|
698
|
+
if proc.stdout:
|
|
699
|
+
local_files = [f.strip() for f in proc.stdout.split('\n') if f.strip() and not any(x in f for x in IGNORE_LIST)]
|
|
700
|
+
except Exception as e:
|
|
701
|
+
print(f"[*] Local dependency trace restricted: {e}")
|
|
702
|
+
|
|
703
|
+
cloud_data = await cloud_call("trace_dependency", {"symbol": symbol})
|
|
704
|
+
if local_files:
|
|
705
|
+
return f"**Local Workspace Mentions:**\n" + "\n".join(local_files) + f"\n\n**Cloud Synaptic Context:**\n{cloud_data}"
|
|
706
|
+
return cloud_data
|
|
707
|
+
|
|
708
|
+
@mcp.tool()
|
|
709
|
+
async def get_surgical_context(file_path: str) -> str:
|
|
710
|
+
"""Neighborhood Retrieval: Get the full history of a file and all its physically linked neighbors."""
|
|
711
|
+
return await cloud_call("surgical_context", {"file_path": file_path}, timeout=20)
|
|
712
|
+
|
|
713
|
+
@mcp.tool()
|
|
714
|
+
async def find_related_decisions(topic: str) -> str:
|
|
715
|
+
"""Cross-Reference: Find architectural decisions related to a specific topic."""
|
|
716
|
+
return await cloud_call("related_decisions", {"topic": topic})
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
@mcp.tool()
|
|
720
|
+
async def find_definition(symbol: str) -> str:
|
|
721
|
+
"""Global Symbol Provenance: Find where a function, class, or variable is defined."""
|
|
722
|
+
# Try local search first
|
|
723
|
+
patterns = [rf"(async\s+)?def\s+{symbol}\b", rf"class\s+{symbol}\b", rf"function\s+{symbol}\b"]
|
|
724
|
+
for p in patterns:
|
|
725
|
+
try:
|
|
726
|
+
cmd = ["rg", "-n", "--column", "-e", p, "."]
|
|
727
|
+
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=3)
|
|
728
|
+
if proc.stdout: return f"**Local Match:**\n{proc.stdout}"
|
|
729
|
+
except Exception as e:
|
|
730
|
+
logger.debug(f"Local ripgrep search failed for {symbol}: {e}")
|
|
731
|
+
continue
|
|
732
|
+
return await cloud_call("find_definition", {"symbol": symbol})
|
|
733
|
+
|
|
734
|
+
@mcp.tool()
|
|
735
|
+
async def get_atomic_diff(file_path: str) -> str:
|
|
736
|
+
"""Atomic Diffing: Replay exact code changes between last two versions of a file."""
|
|
737
|
+
return await cloud_call("atomic_diff", {"file_path": file_path})
|
|
738
|
+
|
|
739
|
+
@mcp.tool()
|
|
740
|
+
async def get_dependency_topology(file_path: str) -> str:
|
|
741
|
+
"""Dependency Topology: Map structural relationships (imports/calls) for the target file."""
|
|
742
|
+
return await cloud_call("dependency_topology", {"file_path": file_path})
|
|
743
|
+
|
|
744
|
+
@mcp.tool()
|
|
745
|
+
async def predict_conflicts(file_path: str) -> str:
|
|
746
|
+
"""Predictive Risk Analysis: Detect if a file was edited recently and analyze cross-team collision risks."""
|
|
747
|
+
return await cloud_call("predict_conflicts", {"file_path": file_path})
|
|
748
|
+
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
|
|
755
|
+
|
|
756
|
+
|
|
757
|
+
# --- CLI COMMANDS ---
|
|
758
|
+
|
|
759
|
+
def dx_init():
|
|
760
|
+
print("[*] Initializing Dalexor Project Memory...")
|
|
761
|
+
# Stateless Init: Link to cloud project without writing local files
|
|
762
|
+
key_tuple = interactive_verification(force_prompt=True)
|
|
763
|
+
if key_tuple and key_tuple[0]:
|
|
764
|
+
select_project(key_tuple[0], plan=key_tuple[2])
|
|
765
|
+
|
|
766
|
+
print("[+] Project Initialized. CLI is linked to Cloud Session.")
|
|
767
|
+
|
|
768
|
+
def dx_sync(args=None):
|
|
769
|
+
"""Deep Project Analysis: Ingest the current state of all files and map synapses."""
|
|
770
|
+
global DX_API_KEY
|
|
771
|
+
|
|
772
|
+
# Force fresh project lookup from server instead of stale env vars
|
|
773
|
+
import requests
|
|
774
|
+
headers = {"X-DX-Key": DX_API_KEY}
|
|
775
|
+
project_id = None
|
|
776
|
+
|
|
777
|
+
try:
|
|
778
|
+
user_res = requests.get(f"{CLOUD_URL}/validate", headers=headers, timeout=10)
|
|
779
|
+
if user_res.status_code == 200:
|
|
780
|
+
last_project = user_res.json().get("user", {}).get("last_project_id")
|
|
781
|
+
if last_project:
|
|
782
|
+
project_id = last_project
|
|
783
|
+
print(f"[+] Resumed Context: Project {project_id}")
|
|
784
|
+
else:
|
|
785
|
+
print("[!] No active project. Run 'dx init' first.")
|
|
786
|
+
return
|
|
787
|
+
else:
|
|
788
|
+
print("[!] Authentication failed.")
|
|
789
|
+
return
|
|
790
|
+
except Exception as e:
|
|
791
|
+
print(f"[!] Connection error: {e}")
|
|
792
|
+
return
|
|
793
|
+
|
|
794
|
+
print(f"[*] Analyzing files line-by-line for neural mapping into Project: {project_id}...")
|
|
795
|
+
|
|
796
|
+
# Pre-calculate project catalog for greedy synapse matching
|
|
797
|
+
PROJECT_CATALOG = set()
|
|
798
|
+
for root, dirs, files in os.walk("."):
|
|
799
|
+
dirs[:] = [d for d in dirs if d not in IGNORE_LIST and not d.startswith('.')]
|
|
800
|
+
for f in files:
|
|
801
|
+
if not any(x in f for x in IGNORE_LIST) and not f.startswith('.'):
|
|
802
|
+
PROJECT_CATALOG.add(f)
|
|
803
|
+
# Also index base names for languages like Python or Rust
|
|
804
|
+
if '.' in f:
|
|
805
|
+
PROJECT_CATALOG.add(f.split('.')[0])
|
|
806
|
+
|
|
807
|
+
def cloud_callback(payload):
|
|
808
|
+
import requests
|
|
809
|
+
import time
|
|
810
|
+
|
|
811
|
+
# Auto-enable encryption if plan allows (Double check global flag)
|
|
812
|
+
if DX_SOVEREIGN:
|
|
813
|
+
payload["is_sovereign"] = True
|
|
814
|
+
payload["request_server_encryption"] = True
|
|
815
|
+
|
|
816
|
+
retries = 3
|
|
817
|
+
for attempt in range(retries):
|
|
818
|
+
try:
|
|
819
|
+
res = requests.post(f"{CLOUD_URL}/ingest", json=payload, headers={"X-DX-Key": DX_API_KEY, "Content-Type": "application/json"}, timeout=60)
|
|
820
|
+
if res.status_code == 429:
|
|
821
|
+
print(f"\n[!] Rate Limit Exceeded: Sync paused. Wait an hour or upgrade for higher velocity.")
|
|
822
|
+
sys.exit(0) # Stop sync immediately
|
|
823
|
+
if res.status_code == 402:
|
|
824
|
+
print(f"\n[!] Quota Full: Sync aborted. You have reached your atom limit. Manage atoms at dalexor.mi")
|
|
825
|
+
sys.exit(0)
|
|
826
|
+
if res.status_code != 200:
|
|
827
|
+
print(f"[!] Server returned {res.status_code}: {res.text[:200]}")
|
|
828
|
+
return None
|
|
829
|
+
return res.json()
|
|
830
|
+
except requests.exceptions.RequestException as e:
|
|
831
|
+
if attempt < retries - 1:
|
|
832
|
+
wait_time = 2 * (attempt + 1)
|
|
833
|
+
print(f"[!] Connection glitch. Retrying in {wait_time}s...")
|
|
834
|
+
time.sleep(wait_time)
|
|
835
|
+
else:
|
|
836
|
+
print(f"[!] Upload failed after {retries} attempts: {str(e)}")
|
|
837
|
+
return None
|
|
838
|
+
except Exception as e:
|
|
839
|
+
print(f"[!] Critical Error: {str(e)}")
|
|
840
|
+
return None
|
|
841
|
+
|
|
842
|
+
def generate_smart_summary(content, filename):
|
|
843
|
+
"""Extract meaningful summary from content header or docstrings."""
|
|
844
|
+
# 1. Try Local AI if available (Rich Summary)
|
|
845
|
+
groq_key = os.getenv("GROQ_API_KEY")
|
|
846
|
+
if groq_key:
|
|
847
|
+
try:
|
|
848
|
+
from groq import Groq
|
|
849
|
+
client = Groq(api_key=groq_key)
|
|
850
|
+
completion = client.chat.completions.create(
|
|
851
|
+
model="llama-3.1-8b-instant",
|
|
852
|
+
messages=[
|
|
853
|
+
{"role": "system", "content": "You are Dalexor, a Sovereign Intelligence Detective. Analyze this evidence. Output a RICH, STRUCTURED summary using these tags: [LOGIC], [SYSTEM], [SECURITY]. Explain exactly WHAT THIS FILE IS FOR. Use sections 'NARRATIVE:' and 'SYMBOLS:'. Example: '[LOGIC] Core Logic NARRATIVE: This file provides the central authentication logic...'. Keep it precise but visually consistent with the dashboard."},
|
|
854
|
+
{"role": "user", "content": f"Explain what this file is for and analyze its purpose:\nFilename: {filename}\nContent:\n{content[:2500]}"}
|
|
855
|
+
],
|
|
856
|
+
temperature=0.1
|
|
857
|
+
)
|
|
858
|
+
return completion.choices[0].message.content.strip()
|
|
859
|
+
except Exception as e:
|
|
860
|
+
# Log to stderr to avoid corrupting command output
|
|
861
|
+
sys.stderr.write(f"[*] Intelligence Insight restricted: {e}\n")
|
|
862
|
+
|
|
863
|
+
# 2. Heuristic Extraction (Fast & Private)
|
|
864
|
+
ext = os.path.splitext(filename)[1].lower()
|
|
865
|
+
|
|
866
|
+
# Python Docstrings
|
|
867
|
+
if ext == '.py':
|
|
868
|
+
match = re.search(r'^"""(.*?)"""', content, re.DOTALL)
|
|
869
|
+
if match:
|
|
870
|
+
return match.group(1).strip().split('\n')[0][:80]
|
|
871
|
+
match = re.search(r"^'''(.*?)'''", content, re.DOTALL)
|
|
872
|
+
if match:
|
|
873
|
+
return match.group(1).strip().split('\n')[0][:80]
|
|
874
|
+
|
|
875
|
+
# JS/TS Comments
|
|
876
|
+
if ext in ['.js', '.ts', '.jsx', '.tsx']:
|
|
877
|
+
match = re.search(r'/\*\*(.*?)\*/', content, re.DOTALL)
|
|
878
|
+
if match:
|
|
879
|
+
clean = re.sub(r'\s*\*\s*', ' ', match.group(1)).strip()
|
|
880
|
+
return clean.split('.')[0][:80]
|
|
881
|
+
# Single line comment at top
|
|
882
|
+
match = re.search(r'^//\s*(.*)', content)
|
|
883
|
+
if match:
|
|
884
|
+
return match.group(1).strip()[:80]
|
|
885
|
+
|
|
886
|
+
# HTML Title
|
|
887
|
+
if ext == '.html':
|
|
888
|
+
match = re.search(r'<title>(.*?)</title>', content, re.IGNORECASE)
|
|
889
|
+
if match:
|
|
890
|
+
return f"Page: {match.group(1).strip()}"
|
|
891
|
+
|
|
892
|
+
# Markdown Header
|
|
893
|
+
if ext == '.md':
|
|
894
|
+
match = re.search(r'^#\s+(.*)', content)
|
|
895
|
+
if match:
|
|
896
|
+
return f"Doc: {match.group(1).strip()}"
|
|
897
|
+
|
|
898
|
+
# 3. Fallback Heuristics for Config/Scripts
|
|
899
|
+
try:
|
|
900
|
+
if filename == 'requirements.txt':
|
|
901
|
+
lines = [l.strip() for l in content.splitlines() if l.strip() and not l.startswith('#')]
|
|
902
|
+
if filename == 'requirements.txt':
|
|
903
|
+
lines = [l.strip() for l in content.splitlines() if l.strip() and not l.startswith('#')]
|
|
904
|
+
if len(lines) > 3:
|
|
905
|
+
return f"[SYSTEM] Neural Evidence NARRATIVE: 12 core dependencies detected including {', '.join(lines[:3])}..."
|
|
906
|
+
return f"[SYSTEM] Dependency Manifest NARRATIVE: {', '.join(lines)}"
|
|
907
|
+
|
|
908
|
+
if filename == 'package.json':
|
|
909
|
+
import json
|
|
910
|
+
try:
|
|
911
|
+
pkg = json.loads(content)
|
|
912
|
+
deps = list(pkg.get('dependencies', {}).keys())
|
|
913
|
+
if deps:
|
|
914
|
+
return f"[SYSTEM] Sovereign Manifest NARRATIVE: Node Configuration for {pkg.get('name', 'Project')}. Deps: {', '.join(deps[:3])}"
|
|
915
|
+
return f"[SYSTEM] Node Config NARRATIVE: {pkg.get('name', 'Project')}"
|
|
916
|
+
except Exception as e:
|
|
917
|
+
sys.stderr.write(f"[*] Heuristic Parse restricted for {filename}: {e}\n")
|
|
918
|
+
|
|
919
|
+
if ext in ['.bat', '.sh']:
|
|
920
|
+
# Extract first non-comment echo that isn't just "off" or "on"
|
|
921
|
+
# Find all echos
|
|
922
|
+
echos = re.findall(r'echo\s+(.*)', content, re.IGNORECASE)
|
|
923
|
+
for e in echos:
|
|
924
|
+
clean = e.strip().strip('"').strip("'")
|
|
925
|
+
if clean.lower() not in ['off', 'on', '']:
|
|
926
|
+
return f"[LOGIC] Tactical Script NARRATIVE: {clean}"
|
|
927
|
+
|
|
928
|
+
if ext == '.json':
|
|
929
|
+
# Generic JSON summary
|
|
930
|
+
return f"[SYSTEM] Secure Artifact NARRATIVE: Configuration data."
|
|
931
|
+
except Exception as e:
|
|
932
|
+
logger.debug(f"Smart summary generation failed for {filename}: {e}")
|
|
933
|
+
pass
|
|
934
|
+
|
|
935
|
+
return f"[SYSTEM] Sovereign Sync NARRATIVE: {filename}"
|
|
936
|
+
|
|
937
|
+
tx = TransactionMemory(cloud_callback)
|
|
938
|
+
handler = CodeChangeHandler(tx)
|
|
939
|
+
|
|
940
|
+
EXTENSION_MAP = {
|
|
941
|
+
'.md': 'document',
|
|
942
|
+
'.txt': 'document',
|
|
943
|
+
'.json': 'artifact',
|
|
944
|
+
'.yaml': 'artifact',
|
|
945
|
+
'.yml': 'artifact',
|
|
946
|
+
'.py': 'code',
|
|
947
|
+
'.js': 'code',
|
|
948
|
+
'.ts': 'code',
|
|
949
|
+
'.html': 'code',
|
|
950
|
+
'.css': 'code',
|
|
951
|
+
'.sh': 'code',
|
|
952
|
+
'.bat': 'code',
|
|
953
|
+
'.ps1': 'code'
|
|
954
|
+
}
|
|
955
|
+
|
|
956
|
+
count = 0
|
|
957
|
+
synapse_count = 0
|
|
958
|
+
for root, dirs, files in os.walk("."):
|
|
959
|
+
dirs[:] = [d for d in dirs if d not in IGNORE_LIST and not d.startswith('.')]
|
|
960
|
+
for file in files:
|
|
961
|
+
if any(x in file for x in IGNORE_LIST) or file.startswith('.'): continue
|
|
962
|
+
path = os.path.relpath(os.path.join(root, file), ".").replace("\\", "/")
|
|
963
|
+
ext = os.path.splitext(file)[1].lower()
|
|
964
|
+
atom_type = EXTENSION_MAP.get(ext, 'code')
|
|
965
|
+
|
|
966
|
+
try:
|
|
967
|
+
with open(path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
968
|
+
content = f.read()
|
|
969
|
+
|
|
970
|
+
# Zero-Trust Deduplication: Check if content changed since last sync
|
|
971
|
+
# We blend the secret into the hash so if the key changes, we force a re-upload!
|
|
972
|
+
content_hash_input = content + (DX_TEAM_SECRET or "")
|
|
973
|
+
curr_hash = hashlib.md5(content_hash_input.encode()).hexdigest()
|
|
974
|
+
|
|
975
|
+
# Check for FORCE flag or changed content
|
|
976
|
+
force_sync = (args.extra == "force") if 'args' in globals() and args else False
|
|
977
|
+
|
|
978
|
+
if not force_sync and handler.last_hashes.get(path) == curr_hash:
|
|
979
|
+
continue
|
|
980
|
+
|
|
981
|
+
entropy = SovereignEntropyEngine.calculate(content, path)
|
|
982
|
+
synapses = handler.extract_synapses(content, os.path.basename(path), PROJECT_CATALOG)
|
|
983
|
+
symbols = handler.extract_symbols(content)
|
|
984
|
+
synapse_count += len(synapses)
|
|
985
|
+
|
|
986
|
+
# Direct upload for sync mode
|
|
987
|
+
smart_summary = generate_smart_summary(content, os.path.basename(path))
|
|
988
|
+
|
|
989
|
+
# Generate stable UUID for source_id based on file path to avoid duplicates on re-sync
|
|
990
|
+
import uuid
|
|
991
|
+
source_id_seed = f"{os.getenv('DX_PROJECT_ID')}:{path}"
|
|
992
|
+
source_id = str(uuid.uuid5(uuid.NAMESPACE_URL, source_id_seed))
|
|
993
|
+
|
|
994
|
+
payload = {
|
|
995
|
+
"content": content[:10000],
|
|
996
|
+
"entropy_score": round(entropy, 2),
|
|
997
|
+
"atom_type": atom_type,
|
|
998
|
+
"source": "api",
|
|
999
|
+
"file_path": path, # Universal forward slashes
|
|
1000
|
+
"file_name": os.path.basename(path),
|
|
1001
|
+
"source_id": source_id,
|
|
1002
|
+
"content_hash": curr_hash,
|
|
1003
|
+
"synapses": synapses,
|
|
1004
|
+
"symbols": symbols,
|
|
1005
|
+
"is_sovereign": DX_SOVEREIGN,
|
|
1006
|
+
"request_server_encryption": DX_SOVEREIGN,
|
|
1007
|
+
"is_encrypted": False,
|
|
1008
|
+
"project_id": os.getenv("DX_PROJECT_ID"),
|
|
1009
|
+
"project_name": os.getenv("DX_PROJECT_NAME") or "Default",
|
|
1010
|
+
"metadata": {
|
|
1011
|
+
"file_path": path,
|
|
1012
|
+
"file_name": os.path.basename(path),
|
|
1013
|
+
"summary": smart_summary,
|
|
1014
|
+
"synapses": synapses,
|
|
1015
|
+
"content_hash": curr_hash
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
|
|
1020
|
+
api_res = cloud_callback(payload)
|
|
1021
|
+
if api_res:
|
|
1022
|
+
handler.last_hashes[path] = curr_hash
|
|
1023
|
+
handler.content_cache[path] = content
|
|
1024
|
+
count += 1
|
|
1025
|
+
|
|
1026
|
+
status = api_res.get("status")
|
|
1027
|
+
if status == "skipped":
|
|
1028
|
+
print(f"[-] Unchanged: {path}")
|
|
1029
|
+
else:
|
|
1030
|
+
print(f"[+] Indexed {atom_type}: {path} ({len(synapses)} synapses)")
|
|
1031
|
+
else:
|
|
1032
|
+
print(f"[!] Failed: {path}")
|
|
1033
|
+
|
|
1034
|
+
except Exception as e:
|
|
1035
|
+
print(f"[!] Error syncing {path}: {e}")
|
|
1036
|
+
|
|
1037
|
+
print(f"\n[+] Sync Complete! {count} artifacts analyzed. {synapse_count} synapses mapped.")
|
|
1038
|
+
|
|
1039
|
+
def dx_tail():
|
|
1040
|
+
global DX_API_KEY
|
|
1041
|
+
print("\033[95m[*] Dalexor Intelligence Stream Active...\033[0m")
|
|
1042
|
+
print("(Press Ctrl+C to disconnect)")
|
|
1043
|
+
|
|
1044
|
+
import requests
|
|
1045
|
+
import json
|
|
1046
|
+
|
|
1047
|
+
headers = {"X-DX-Key": DX_API_KEY}
|
|
1048
|
+
try:
|
|
1049
|
+
# Connect to the Global Brain's SSE Tail endpoint
|
|
1050
|
+
response = requests.get(f"{CLOUD_URL}/tail", headers=headers, stream=True, timeout=None)
|
|
1051
|
+
|
|
1052
|
+
if response.status_code != 200:
|
|
1053
|
+
print(f"[!] Connection Refused: {response.status_code} - {response.text}")
|
|
1054
|
+
return
|
|
1055
|
+
|
|
1056
|
+
for line in response.iter_lines():
|
|
1057
|
+
if line:
|
|
1058
|
+
decoded_line = line.decode('utf-8')
|
|
1059
|
+
if decoded_line.startswith('data: '):
|
|
1060
|
+
try:
|
|
1061
|
+
event = json.loads(decoded_line[6:])
|
|
1062
|
+
e_type = event.get('type')
|
|
1063
|
+
e_data = event.get('data', {})
|
|
1064
|
+
# Format timestamp (e.g. 14:20:05)
|
|
1065
|
+
ts = event.get('timestamp', '')[11:19] if event.get('timestamp') else "??:??:??"
|
|
1066
|
+
|
|
1067
|
+
if e_type == 'presence':
|
|
1068
|
+
print(f"[\033[90m{ts}\033[0m] ⚡ \033[94mPRESENCE\033[0m: {e_data.get('user')} is touching \033[1m{e_data.get('file')}\033[0m")
|
|
1069
|
+
elif e_type == 'evolution':
|
|
1070
|
+
print(f"[\033[90m{ts}\033[0m] 🧬 \033[92mEVOLUTION\033[0m: {e_data.get('user')} synced \033[1m{e_data.get('file')}\033[0m (Ω: {e_data.get('entropy')})")
|
|
1071
|
+
print(f" └─ \033[90m{e_data.get('insight')}\033[0m")
|
|
1072
|
+
elif e_type == 'system':
|
|
1073
|
+
print(f"[\033[90m{ts}\033[0m] 🛰️ \033[93mSYSTEM\033[0m: {event.get('message')}")
|
|
1074
|
+
except Exception as parse_err:
|
|
1075
|
+
pass # Ignore malformed stream chunks
|
|
1076
|
+
except KeyboardInterrupt:
|
|
1077
|
+
print("\n[*] Intelligence Stream Disconnected.")
|
|
1078
|
+
except Exception as e:
|
|
1079
|
+
print(f"[!] Stream Error: {e}")
|
|
1080
|
+
|
|
1081
|
+
|
|
1082
|
+
def dx_chat():
|
|
1083
|
+
global DX_API_KEY
|
|
1084
|
+
# Banner already printed in run_agent
|
|
1085
|
+
print("Ask anything about your project's memory.")
|
|
1086
|
+
print("Type 'exit' to quit.")
|
|
1087
|
+
print("="*60 + "\n")
|
|
1088
|
+
|
|
1089
|
+
import requests
|
|
1090
|
+
while True:
|
|
1091
|
+
try:
|
|
1092
|
+
p = input("[>] You: ").strip()
|
|
1093
|
+
if not p: continue
|
|
1094
|
+
if p.lower() in ['exit', 'quit', 'bye']: break
|
|
1095
|
+
|
|
1096
|
+
headers = {"X-DX-Key": DX_API_KEY}
|
|
1097
|
+
project_id = os.getenv("DX_PROJECT_ID")
|
|
1098
|
+
res = requests.post(f"{CLOUD_URL}/query", json={"query": p, "project_id": project_id}, headers=headers, timeout=60)
|
|
1099
|
+
|
|
1100
|
+
if res.status_code == 200:
|
|
1101
|
+
data = res.json()
|
|
1102
|
+
context = data.get("context", "...")
|
|
1103
|
+
intel = data.get("intelligence", {})
|
|
1104
|
+
signals = intel.get("signals", [])
|
|
1105
|
+
neighbors = intel.get("synaptic_neighborhood", [])
|
|
1106
|
+
|
|
1107
|
+
print(f"[*] Brain: {context}\n")
|
|
1108
|
+
|
|
1109
|
+
if signals or neighbors:
|
|
1110
|
+
print("-" * 60)
|
|
1111
|
+
print(f"[*] Sources used for this synthesis ({len(signals)} signals, {len(neighbors)} neural neighbors):")
|
|
1112
|
+
for s in signals:
|
|
1113
|
+
print(f" [SIGNAL] {s['path']} - {s['summary']}")
|
|
1114
|
+
for n in neighbors:
|
|
1115
|
+
print(f" [NEURAL] {n['path']} - {n['summary']}")
|
|
1116
|
+
print("-" * 60 + "\n")
|
|
1117
|
+
elif res.status_code == 401:
|
|
1118
|
+
print(f"[!] Error (401): Unauthorized. Your API Key might be invalid.\n")
|
|
1119
|
+
DX_API_KEY, _, _ = interactive_verification()
|
|
1120
|
+
else:
|
|
1121
|
+
print(f"[!] Brain Error ({res.status_code}): {res.text}\n")
|
|
1122
|
+
except KeyboardInterrupt:
|
|
1123
|
+
break
|
|
1124
|
+
except Exception as e:
|
|
1125
|
+
print(f"[!] Connection Error: {e}\n")
|
|
1126
|
+
break
|
|
1127
|
+
|
|
1128
|
+
def run_agent():
|
|
1129
|
+
global DX_API_KEY
|
|
1130
|
+
import sys
|
|
1131
|
+
# Universal Case-Insensitivity fix
|
|
1132
|
+
if len(sys.argv) > 1:
|
|
1133
|
+
valid_cmds = ["init", "watch", "chat", "tail", "sync", "mcp"]
|
|
1134
|
+
if sys.argv[1].lower() in valid_cmds:
|
|
1135
|
+
sys.argv[1] = sys.argv[1].lower()
|
|
1136
|
+
|
|
1137
|
+
parser = argparse.ArgumentParser(description="Dalexor MI Sovereign CLI")
|
|
1138
|
+
parser.add_argument("command", choices=["init", "watch", "chat", "tail", "sync", "mcp", "seal"], nargs="?", default="init")
|
|
1139
|
+
parser.add_argument("extra", nargs="?") # For file paths or messages
|
|
1140
|
+
args = parser.parse_args()
|
|
1141
|
+
|
|
1142
|
+
# MCP MODE: Skip all interactive prompts and stdout printing
|
|
1143
|
+
# MCP uses stdio for JSON-RPC, so any print() will corrupt the protocol
|
|
1144
|
+
if args.command == "mcp":
|
|
1145
|
+
# Load local .env just in case we are running in a project dir
|
|
1146
|
+
try:
|
|
1147
|
+
from dotenv import load_dotenv
|
|
1148
|
+
load_dotenv()
|
|
1149
|
+
except Exception as e:
|
|
1150
|
+
sys.stderr.write(f"[Dalexor MCP] Warning: Failed to load local .env: {e}\n")
|
|
1151
|
+
pass
|
|
1152
|
+
|
|
1153
|
+
# Get API key (Env > Global Config)
|
|
1154
|
+
DX_API_KEY = os.getenv("DX_API_KEY") or os.getenv("DALEXORMI_API_KEY") or GLOBAL_CONFIG.get("api_key")
|
|
1155
|
+
|
|
1156
|
+
project_id = os.getenv("DX_PROJECT_ID") or GLOBAL_CONFIG.get("project_id")
|
|
1157
|
+
if project_id:
|
|
1158
|
+
sys.stderr.write(f"[Dalexor MCP] Active Project ID: {project_id}\n")
|
|
1159
|
+
else:
|
|
1160
|
+
sys.stderr.write(f"[Dalexor MCP] No Project ID found in environment or config. Defaulting to Organization Scope.\n")
|
|
1161
|
+
|
|
1162
|
+
if not DX_API_KEY:
|
|
1163
|
+
sys.stderr.write(f"[Dalexor MCP] CRITICAL: No API Key found. Run 'dx init' or set DX_API_KEY.\n")
|
|
1164
|
+
sys.exit(1)
|
|
1165
|
+
|
|
1166
|
+
import traceback
|
|
1167
|
+
try:
|
|
1168
|
+
if isinstance(mcp, DummyMCP):
|
|
1169
|
+
sys.stderr.write("[Dalexor MCP] CRITICAL: MCP server is in dummy state. Check dependencies.\n")
|
|
1170
|
+
sys.exit(1)
|
|
1171
|
+
|
|
1172
|
+
sys.stderr.write(f"[Dalexor MCP] Handshake complete. Starting JSON-RPC server...\n")
|
|
1173
|
+
sys.stderr.flush()
|
|
1174
|
+
mcp.run(transport='stdio')
|
|
1175
|
+
except Exception as e:
|
|
1176
|
+
sys.stderr.write(f"[Dalexor MCP CRITICAL ERROR] {e}\n")
|
|
1177
|
+
sys.stderr.write(traceback.format_exc())
|
|
1178
|
+
sys.stderr.flush()
|
|
1179
|
+
sys.exit(1)
|
|
1180
|
+
return
|
|
1181
|
+
|
|
1182
|
+
# Step 1: Handle Initialization separately (forces prompt)
|
|
1183
|
+
if args.command == "init":
|
|
1184
|
+
dx_init()
|
|
1185
|
+
return
|
|
1186
|
+
|
|
1187
|
+
# Step 2: Ensure Auth & Get Plan (for non-init commands)
|
|
1188
|
+
# Returns (key, email, plan_display_string)
|
|
1189
|
+
key_tuple = interactive_verification()
|
|
1190
|
+
|
|
1191
|
+
# For other commands, we have the key now.
|
|
1192
|
+
if not key_tuple or not key_tuple[0]:
|
|
1193
|
+
print("[!] Error: Valid API Key required.")
|
|
1194
|
+
sys.exit(1)
|
|
1195
|
+
|
|
1196
|
+
DX_API_KEY = key_tuple[0]
|
|
1197
|
+
|
|
1198
|
+
# Print Banner for EVERY command (except MCP)
|
|
1199
|
+
print_banner(key_tuple[1], key_tuple[2]) # email, plan
|
|
1200
|
+
|
|
1201
|
+
if args.command == "chat": dx_chat(); return
|
|
1202
|
+
if args.command == "tail": dx_tail(); return
|
|
1203
|
+
# Pass args to dx_sync implicitly by making it global or passing it (easier to use global args in this structure)
|
|
1204
|
+
if args.command == "sync": dx_sync(args); return
|
|
1205
|
+
|
|
1206
|
+
# Watchdog & Sentinel
|
|
1207
|
+
def push_to_cloud(payload):
|
|
1208
|
+
import requests
|
|
1209
|
+
try:
|
|
1210
|
+
summary = payload.get('metadata', {}).get('summary', 'Evolution')
|
|
1211
|
+
|
|
1212
|
+
# Inject Project Metadata
|
|
1213
|
+
project_id = os.getenv("DX_PROJECT_ID")
|
|
1214
|
+
if not project_id:
|
|
1215
|
+
print(f"[!] Critical Alert: Intelligence push aborted. No project context active. Run 'dx init'.")
|
|
1216
|
+
return
|
|
1217
|
+
|
|
1218
|
+
payload["project_id"] = project_id
|
|
1219
|
+
payload["project_name"] = os.getenv("DX_PROJECT_NAME") or "Unnamed Project"
|
|
1220
|
+
payload["is_sovereign"] = DX_SOVEREIGN
|
|
1221
|
+
|
|
1222
|
+
print(f"[*] Uploading '{summary}' to Project '{payload['project_name']}'...")
|
|
1223
|
+
res = requests.post(f"{CLOUD_URL}/ingest", json=payload, headers={"X-DX-Key": DX_API_KEY}, timeout=60)
|
|
1224
|
+
if res.status_code == 200:
|
|
1225
|
+
print(f"[+] Synced: {res.json().get('ai_insight', 'Indexed')}")
|
|
1226
|
+
elif res.status_code == 429:
|
|
1227
|
+
print(f"[!] Rate Limit Exceeded: Your plan is currently throttled. Upgrade to Sovereign for near-unlimited velocity.")
|
|
1228
|
+
elif res.status_code == 402:
|
|
1229
|
+
print(f"[!] Quota Full: You have reached your Intelligence Atom limit. Please upgrade or manage your atoms at dalexor.mi")
|
|
1230
|
+
else:
|
|
1231
|
+
print(f"[!] Handshake Refused: {res.status_code} - {res.text}")
|
|
1232
|
+
except Exception as e:
|
|
1233
|
+
print(f"[!] Transmission Error: {e}")
|
|
1234
|
+
|
|
1235
|
+
tx = TransactionMemory(push_to_cloud)
|
|
1236
|
+
handler = CodeChangeHandler(tx)
|
|
1237
|
+
handler.warm_up(".")
|
|
1238
|
+
observer = Observer()
|
|
1239
|
+
observer.schedule(handler, ".", recursive=True)
|
|
1240
|
+
observer.start()
|
|
1241
|
+
|
|
1242
|
+
if args.command == "watch":
|
|
1243
|
+
print(f"[*] Telescope Sentinel Active in {os.getcwd()}. Press Ctrl+C to stop.")
|
|
1244
|
+
print("\n" + "-"*60)
|
|
1245
|
+
print(" MANUAL TRIGGER: THE 'S' KEY")
|
|
1246
|
+
print("-"*60)
|
|
1247
|
+
print(" While dx watch is running, you have a 'tactical trigger':")
|
|
1248
|
+
print("\n [Press S] (or k): Immediately flush all pending changes and")
|
|
1249
|
+
print(" tag them with Maximum Entropy (10.0), signaling a milestone.")
|
|
1250
|
+
print("\n [Bypass Coalescence]: A manual seal bypasses the 20-minute")
|
|
1251
|
+
print(" window, forcing the creation of a fresh, stable version.")
|
|
1252
|
+
print("-"*60 + "\n")
|
|
1253
|
+
|
|
1254
|
+
# Keyboard Listener for sealing milestones
|
|
1255
|
+
def kbd_loop():
|
|
1256
|
+
try:
|
|
1257
|
+
if os.name == 'nt':
|
|
1258
|
+
import msvcrt
|
|
1259
|
+
while True:
|
|
1260
|
+
if msvcrt.kbhit():
|
|
1261
|
+
ch = msvcrt.getch().decode('utf-8').lower()
|
|
1262
|
+
if ch in ['s', 'k', '\x10']: # S, K, or Ctrl+P (approx)
|
|
1263
|
+
tx.flush_all()
|
|
1264
|
+
time.sleep(0.1)
|
|
1265
|
+
else:
|
|
1266
|
+
import select
|
|
1267
|
+
import termios
|
|
1268
|
+
import tty
|
|
1269
|
+
def getch():
|
|
1270
|
+
fd = sys.stdin.fileno()
|
|
1271
|
+
old_settings = termios.tcgetattr(fd)
|
|
1272
|
+
try:
|
|
1273
|
+
tty.setraw(sys.stdin.fileno())
|
|
1274
|
+
ch = sys.stdin.read(1)
|
|
1275
|
+
finally:
|
|
1276
|
+
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
|
1277
|
+
return ch
|
|
1278
|
+
while True:
|
|
1279
|
+
if select.select([sys.stdin], [], [], 0.1)[0]:
|
|
1280
|
+
ch = getch().lower()
|
|
1281
|
+
if ch in ['s', 'k', '\x10']:
|
|
1282
|
+
tx.flush_all()
|
|
1283
|
+
time.sleep(0.1)
|
|
1284
|
+
except Exception as e:
|
|
1285
|
+
print(f"[!] Keyboard Sentinel Activation Error: {e}")
|
|
1286
|
+
|
|
1287
|
+
if sys.stdin.isatty():
|
|
1288
|
+
threading.Thread(target=kbd_loop, daemon=True).start()
|
|
1289
|
+
|
|
1290
|
+
try:
|
|
1291
|
+
while True: time.sleep(1)
|
|
1292
|
+
except Exception as e:
|
|
1293
|
+
print(f"[*] Watcher Stopped: {e}")
|
|
1294
|
+
observer.stop()
|
|
1295
|
+
|
|
1296
|
+
if args.command == "seal":
|
|
1297
|
+
msg = args.extra or "Manual Milestone Seal"
|
|
1298
|
+
print(f"[*] Manual Seal: Signal sent to Global Brain for Project {os.getenv('DX_PROJECT_ID')}")
|
|
1299
|
+
# Send a special atom to the cloud to signal a seal on the recently modified files
|
|
1300
|
+
# We can just push a virtual atom with dx_seal=True
|
|
1301
|
+
push_to_cloud({
|
|
1302
|
+
"content": f"MILESTONE: {msg}",
|
|
1303
|
+
"entropy_score": 10.0,
|
|
1304
|
+
"atom_type": "milestone",
|
|
1305
|
+
"metadata": {
|
|
1306
|
+
"summary": f"MANUAL SEAL: {msg}",
|
|
1307
|
+
"dx_seal": True,
|
|
1308
|
+
"manual": True
|
|
1309
|
+
}
|
|
1310
|
+
})
|
|
1311
|
+
return
|
|
1312
|
+
|
|
1313
|
+
if __name__ == "__main__":
|
|
1314
|
+
run_agent()
|
|
1315
|
+
|