qyro 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. qyro/__init__.py +17 -0
  2. qyro/adapters/__init__.py +4 -0
  3. qyro/adapters/language_adapters/__init__.py +4 -0
  4. qyro/adapters/language_adapters/c/__init__.py +4 -0
  5. qyro/adapters/language_adapters/python/__init__.py +4 -0
  6. qyro/adapters/language_adapters/python/python_adapter.py +584 -0
  7. qyro/cli/__init__.py +8 -0
  8. qyro/cli/__main__.py +5 -0
  9. qyro/cli/cli.py +392 -0
  10. qyro/cli/interactive.py +297 -0
  11. qyro/common/__init__.py +37 -0
  12. qyro/common/animation.py +82 -0
  13. qyro/common/builder.py +434 -0
  14. qyro/common/compiler.py +895 -0
  15. qyro/common/config.py +93 -0
  16. qyro/common/constants.py +99 -0
  17. qyro/common/errors.py +176 -0
  18. qyro/common/frontend.py +74 -0
  19. qyro/common/health.py +358 -0
  20. qyro/common/kafka_manager.py +192 -0
  21. qyro/common/logging.py +149 -0
  22. qyro/common/memory.py +147 -0
  23. qyro/common/metrics.py +301 -0
  24. qyro/common/monitoring.py +468 -0
  25. qyro/common/parser.py +91 -0
  26. qyro/common/platform.py +609 -0
  27. qyro/common/redis_memory.py +1108 -0
  28. qyro/common/rpc.py +287 -0
  29. qyro/common/sandbox.py +191 -0
  30. qyro/common/schema_loader.py +33 -0
  31. qyro/common/secure_sandbox.py +490 -0
  32. qyro/common/toolchain_validator.py +617 -0
  33. qyro/common/type_generator.py +176 -0
  34. qyro/common/validation.py +401 -0
  35. qyro/common/validator.py +204 -0
  36. qyro/gateway/__init__.py +8 -0
  37. qyro/gateway/gateway.py +303 -0
  38. qyro/orchestrator/__init__.py +8 -0
  39. qyro/orchestrator/orchestrator.py +1223 -0
  40. qyro-2.0.0.dist-info/METADATA +244 -0
  41. qyro-2.0.0.dist-info/RECORD +45 -0
  42. qyro-2.0.0.dist-info/WHEEL +5 -0
  43. qyro-2.0.0.dist-info/entry_points.txt +2 -0
  44. qyro-2.0.0.dist-info/licenses/LICENSE +21 -0
  45. qyro-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,204 @@
1
+ """
2
+ Nexus Input Validation
3
+ Schema validation and input sanitization for safe data handling.
4
+ """
5
+
6
+ import json
7
+ import re
8
+ from typing import Any, Dict, List, Optional, Union
9
+ from .errors import NexusError, ErrorCode, JSONError, Result
10
+
11
+
12
+ class SchemaValidator:
13
+ """
14
+ Validates JSON data against a schema definition.
15
+ Ensures type safety and structure consistency.
16
+ """
17
+
18
+ def __init__(self, schema: Dict[str, Any]):
19
+ self.schema = schema
20
+ self._type_map = {
21
+ str: "string",
22
+ int: "integer",
23
+ float: "number",
24
+ bool: "boolean",
25
+ list: "array",
26
+ dict: "object",
27
+ type(None): "null"
28
+ }
29
+
30
+ def validate(self, data: Dict[str, Any]) -> Result:
31
+ """
32
+ Validate data against schema.
33
+ Returns Result.ok(data) if valid, Result.err(error) if invalid.
34
+ """
35
+ try:
36
+ self._validate_object(data, self.schema, "root")
37
+ return Result.ok(data)
38
+ except NexusError as e:
39
+ return Result.err(e)
40
+
41
+ def _validate_object(self, data: Dict, schema: Dict, path: str):
42
+ """Recursively validate object structure."""
43
+ if not isinstance(data, dict):
44
+ raise JSONError(
45
+ ErrorCode.JSON_SCHEMA_MISMATCH,
46
+ f"Expected object at {path}, got {type(data).__name__}"
47
+ )
48
+
49
+ for key, expected_value in schema.items():
50
+ field_path = f"{path}.{key}"
51
+
52
+ if key not in data:
53
+ # Check if it's a required field (non-null in schema)
54
+ if expected_value is not None:
55
+ continue # Allow missing optional fields
56
+
57
+ actual_value = data.get(key)
58
+
59
+ if isinstance(expected_value, dict):
60
+ self._validate_object(actual_value, expected_value, field_path)
61
+ elif isinstance(expected_value, list):
62
+ self._validate_array(actual_value, expected_value, field_path)
63
+ else:
64
+ self._validate_type(actual_value, expected_value, field_path)
65
+
66
+ def _validate_array(self, data: List, schema_array: List, path: str):
67
+ """Validate array elements."""
68
+ if not isinstance(data, list):
69
+ raise JSONError(
70
+ ErrorCode.JSON_SCHEMA_MISMATCH,
71
+ f"Expected array at {path}, got {type(data).__name__}"
72
+ )
73
+
74
+ if schema_array and len(schema_array) > 0:
75
+ element_schema = schema_array[0]
76
+ for i, item in enumerate(data):
77
+ item_path = f"{path}[{i}]"
78
+ if isinstance(element_schema, dict):
79
+ self._validate_object(item, element_schema, item_path)
80
+ else:
81
+ self._validate_type(item, element_schema, item_path)
82
+
83
+ def _validate_type(self, value: Any, expected: Any, path: str):
84
+ """Validate primitive types."""
85
+ expected_type = type(expected)
86
+ actual_type = type(value)
87
+
88
+ # Allow int for float fields
89
+ if expected_type == float and actual_type == int:
90
+ return
91
+
92
+ # Allow None for any field
93
+ if value is None:
94
+ return
95
+
96
+ if actual_type != expected_type:
97
+ raise JSONError(
98
+ ErrorCode.JSON_SCHEMA_MISMATCH,
99
+ f"Type mismatch at {path}: expected {expected_type.__name__}, got {actual_type.__name__}"
100
+ )
101
+
102
+
103
+ class InputSanitizer:
104
+ """
105
+ Sanitizes user input to prevent injection and ensure safety.
106
+ """
107
+
108
+ # Patterns for dangerous content
109
+ DANGEROUS_PATTERNS = [
110
+ re.compile(r'<script\b[^>]*>[\s\S]*?</script>', re.IGNORECASE),
111
+ re.compile(r'javascript:', re.IGNORECASE),
112
+ re.compile(r'on\w+\s*=', re.IGNORECASE),
113
+ ]
114
+
115
+ @classmethod
116
+ def sanitize_string(cls, value: str, max_length: int = 10000) -> str:
117
+ """Sanitize a string input."""
118
+ if not isinstance(value, str):
119
+ return str(value)
120
+
121
+ # Truncate to max length
122
+ value = value[:max_length]
123
+
124
+ # Remove null bytes
125
+ value = value.replace('\x00', '')
126
+
127
+ # Escape HTML entities
128
+ value = (value
129
+ .replace('&', '&amp;')
130
+ .replace('<', '&lt;')
131
+ .replace('>', '&gt;')
132
+ .replace('"', '&quot;')
133
+ .replace("'", '&#x27;'))
134
+
135
+ return value
136
+
137
+ @classmethod
138
+ def sanitize_json(cls, data: Union[Dict, List, str]) -> Union[Dict, List]:
139
+ """Recursively sanitize JSON data."""
140
+ if isinstance(data, str):
141
+ return cls.sanitize_string(data)
142
+ elif isinstance(data, dict):
143
+ return {k: cls.sanitize_json(v) for k, v in data.items()}
144
+ elif isinstance(data, list):
145
+ return [cls.sanitize_json(item) for item in data]
146
+ else:
147
+ return data
148
+
149
+ @classmethod
150
+ def is_safe(cls, value: str) -> bool:
151
+ """Check if a string is safe (no dangerous patterns)."""
152
+ for pattern in cls.DANGEROUS_PATTERNS:
153
+ if pattern.search(value):
154
+ return False
155
+ return True
156
+
157
+
158
+ class RateLimiter:
159
+ """
160
+ Token bucket rate limiter for API endpoints.
161
+ """
162
+
163
+ def __init__(self, rate: int = 100, per_seconds: int = 60):
164
+ self.rate = rate
165
+ self.per_seconds = per_seconds
166
+ self._buckets: Dict[str, Dict] = {}
167
+
168
+ def is_allowed(self, key: str) -> bool:
169
+ """Check if request is allowed for given key."""
170
+ import time
171
+ now = time.time()
172
+
173
+ if key not in self._buckets:
174
+ self._buckets[key] = {
175
+ "tokens": self.rate,
176
+ "last_update": now
177
+ }
178
+
179
+ bucket = self._buckets[key]
180
+
181
+ # Refill tokens based on time passed
182
+ time_passed = now - bucket["last_update"]
183
+ tokens_to_add = time_passed * (self.rate / self.per_seconds)
184
+ bucket["tokens"] = min(self.rate, bucket["tokens"] + tokens_to_add)
185
+ bucket["last_update"] = now
186
+
187
+ # Check if we have a token available
188
+ if bucket["tokens"] >= 1:
189
+ bucket["tokens"] -= 1
190
+ return True
191
+
192
+ return False
193
+
194
+ def get_retry_after(self, key: str) -> float:
195
+ """Get seconds until next request is allowed."""
196
+ if key not in self._buckets:
197
+ return 0
198
+
199
+ bucket = self._buckets[key]
200
+ if bucket["tokens"] >= 1:
201
+ return 0
202
+
203
+ tokens_needed = 1 - bucket["tokens"]
204
+ return tokens_needed * (self.per_seconds / self.rate)
@@ -0,0 +1,8 @@
1
+ """
2
+ Qyro Gateway Service
3
+ API gateway for the polyglot runtime
4
+ """
5
+
6
+ from .gateway import QyroGateway
7
+
8
+ __all__ = ['QyroGateway']
@@ -0,0 +1,303 @@
1
+ """
2
+ Qyro Gateway Service
3
+ Provides a REST and WebSocket API for interacting with Qyro modules.
4
+ """
5
+
6
+ import asyncio
7
+ import asyncio
8
+ import json
9
+ import time
10
+ from typing import Dict, Any, Optional
11
+ from typing import Dict, Any, Optional
12
+ from fastapi import FastAPI, WebSocket, WebSocketDisconnect, HTTPException
13
+ from fastapi.middleware.cors import CORSMiddleware
14
+ import uvicorn
15
+ import uuid
16
+
17
+ from qyro.common.kafka_manager import KafkaManager
18
+ from qyro.common.config import QyroConfig
19
+ from qyro.common.logging import get_logger
20
+
21
+
22
+ logger = get_logger("Qyro.gateway")
23
+
24
+
25
+ class QyroGateway:
26
+ """REST and WebSocket gateway for Qyro applications."""
27
+
28
+ def __init__(self, host: str = "0.0.0.0", port: int = 8765, kafka_bootstrap_servers: str = "localhost:9092"):
29
+ self.host = host
30
+ self.port = port
31
+ self.app = FastAPI(title="Qyro Gateway", version="2.0.0")
32
+
33
+ # Initialize configuration
34
+ self.config = QyroConfig(
35
+ service_host=host,
36
+ service_port=port,
37
+ kafka_bootstrap_servers=kafka_bootstrap_servers
38
+ )
39
+
40
+ # Initialize Kafka manager
41
+ # Initialize Kafka manager
42
+ self.kafka_manager = None
43
+ try:
44
+ self.kafka_manager = KafkaManager(self.config)
45
+ except Exception:
46
+ logger.warning("Kafka setup failed, will try Redis fallback")
47
+
48
+ # Initialize Redis Memory
49
+ from qyro.common.redis_memory import RedisQyroMemory
50
+ self.redis_memory = None
51
+ try:
52
+ self.redis_memory = RedisQyroMemory(
53
+ host=self.config.redis_host,
54
+ port=self.config.redis_port,
55
+ db=self.config.redis_db,
56
+ password=self.config.redis_password
57
+ )
58
+ logger.info("Gateway connected to Redis")
59
+ except Exception as e:
60
+ logger.error(f"Gateway failed to connect to Redis: {e}")
61
+
62
+ if not self.kafka_manager and not self.redis_memory:
63
+ logger.warning("Gateway running without ANY messaging backend!")
64
+
65
+ # Start background listeners
66
+ import threading
67
+ self._listener_thread = threading.Thread(target=self._run_background_listeners, daemon=True)
68
+ self._listener_thread.start()
69
+
70
+ # WebSocket connections
71
+ self.active_connections: Dict[str, WebSocket] = {}
72
+
73
+ # Setup FastAPI routes and middleware
74
+ self._setup_routes()
75
+ self._setup_middleware()
76
+
77
+ logger.info(f"Qyro Gateway initialized on {host}:{port}")
78
+
79
+ def _setup_middleware(self):
80
+ """Setup FastAPI middleware."""
81
+ self.app.add_middleware(
82
+ CORSMiddleware,
83
+ allow_origins=["*"], # In production, configure specific origins
84
+ allow_credentials=True,
85
+ allow_methods=["*"],
86
+ allow_headers=["*"],
87
+ )
88
+
89
+ def _setup_routes(self):
90
+ """Setup FastAPI routes."""
91
+ @self.app.get("/")
92
+ async def root():
93
+ return {"message": "Qyro Gateway v2.0.0", "status": "running"}
94
+
95
+ @self.app.get("/health")
96
+ async def health():
97
+ return {"status": "healthy", "service": "Qyro-gateway"}
98
+
99
+ @self.app.get("/ready")
100
+ async def ready():
101
+ # Check if Kafka is available
102
+ kafka_ready = self.kafka_manager.producer is not None
103
+ return {"status": "ready", "kafka_connected": kafka_ready}
104
+
105
+ @self.app.get("/state")
106
+ async def get_state():
107
+ """Get the current application state."""
108
+ # This would typically fetch from Redis, but we'll use Kafka to broadcast a request
109
+ # and wait for responses from modules
110
+ try:
111
+ request_id = str(uuid.uuid4())
112
+ await self.kafka_manager.send_message(
113
+ topic=f"{self.config.kafka_topic_prefix}state_request",
114
+ message={"request_id": request_id, "action": "get_state"},
115
+ key=request_id
116
+ )
117
+
118
+ # In a real implementation, we'd wait for responses here
119
+ # For now, return a placeholder
120
+ return {"state": "placeholder", "request_id": request_id}
121
+ except Exception as e:
122
+ logger.error(f"Error getting state: {e}")
123
+ raise HTTPException(status_code=500, detail=str(e))
124
+
125
+ @self.app.post("/state")
126
+ async def update_state(state: Dict[str, Any]):
127
+ """Update the application state."""
128
+ try:
129
+ await self.kafka_manager.publish_state_change(state, "gateway")
130
+ return {"status": "success", "updated_keys": list(state.keys())}
131
+ except Exception as e:
132
+ logger.error(f"Error updating state: {e}")
133
+ raise HTTPException(status_code=500, detail=str(e))
134
+
135
+ @self.app.websocket("/ws")
136
+ async def websocket_endpoint(websocket: WebSocket):
137
+ await websocket.accept()
138
+ client_id = str(uuid.uuid4())
139
+ self.active_connections[client_id] = websocket
140
+ logger.info(f"WebSocket client connected: {client_id}")
141
+
142
+ try:
143
+ # Subscribe to Kafka state changes and forward to WebSocket
144
+ async def kafka_state_handler(state_diff):
145
+ if client_id in self.active_connections:
146
+ try:
147
+ await self.active_connections[client_id].send_text(json.dumps({
148
+ "type": "state_update",
149
+ "data": state_diff,
150
+ "timestamp": time.time()
151
+ }))
152
+ except Exception as e:
153
+ logger.error(f"Error sending to WebSocket: {e}")
154
+
155
+ # Start listening for state changes
156
+ if self.kafka_manager:
157
+ await self.kafka_manager.subscribe_to_state_changes(kafka_state_handler)
158
+
159
+ # Handle incoming WebSocket messages
160
+ while True:
161
+ data = await websocket.receive_text()
162
+ message = json.loads(data)
163
+
164
+ # Forward WebSocket messages to Kafka/Redis
165
+ if message.get("type") == "rpc_call":
166
+ request_id = str(uuid.uuid4())
167
+ if self.kafka_manager:
168
+ await self.kafka_manager.publish_rpc_request(
169
+ message.get("function", ""),
170
+ message.get("args", {}),
171
+ request_id
172
+ )
173
+ elif self.redis_memory:
174
+ # Simple Redis fallback for RPC (event publish)
175
+ event_data = {
176
+ "id": request_id,
177
+ "fn": message.get("function", ""),
178
+ "args": message.get("args", {}),
179
+ "caller": f"websocket_{client_id}"
180
+ }
181
+ # Publish to generic events channel which Python adapter listens to
182
+ # Python adapter expects: {"type": "function_call", "data": ...}
183
+ # But PythonAdapter.publish_event wraps it.
184
+ # We need to match PythonAdapter._dispatch_message logic.
185
+ # It listens to `Qyro:events`.
186
+ # Payload: {"type": "function_call", ...}
187
+ full_event = {
188
+ "type": "function_call",
189
+ "module": "gateway",
190
+ "pid": "gateway",
191
+ "timestamp": time.time(),
192
+ "data": event_data
193
+ }
194
+ self.redis_memory.publish_event(full_event)
195
+
196
+ elif message.get("type") == "state_update":
197
+ if self.kafka_manager:
198
+ await self.kafka_manager.publish_state_change(
199
+ message.get("data", {}),
200
+ f"websocket_{client_id}"
201
+ )
202
+ elif self.redis_memory:
203
+ self.redis_memory.write(message.get("data", {}))
204
+
205
+ except WebSocketDisconnect:
206
+ logger.info(f"WebSocket client disconnected: {client_id}")
207
+ except Exception as e:
208
+ logger.error(f"WebSocket error: {e}")
209
+ import traceback
210
+ logger.error(traceback.format_exc())
211
+ finally:
212
+ if client_id in self.active_connections:
213
+ del self.active_connections[client_id]
214
+
215
+ def _run_background_listeners(self):
216
+ """Run background listeners for Redis/Kafka."""
217
+ loop = asyncio.new_event_loop()
218
+ asyncio.set_event_loop(loop)
219
+
220
+ async def listeners():
221
+ if self.kafka_manager:
222
+ try:
223
+ await self.kafka_manager.start_producer()
224
+ asyncio.create_task(self.start_kafka_listeners())
225
+ except Exception as e:
226
+ logger.error(f"Kafka startup failed: {e}")
227
+ self.kafka_manager = None
228
+
229
+ if self.redis_memory and not self.kafka_manager:
230
+ logger.info("Using Redis listener fallback")
231
+ # Redis subscribes in a blocking way usually, so we need careful handling or just rely on the memory class
232
+ # The RedisQyroMemory class has subscribe_to_changes which uses a thread.
233
+ # We can reuse that.
234
+ self.redis_memory.subscribe_to_changes(self._handle_redis_state_change)
235
+
236
+ loop.run_until_complete(listeners())
237
+ loop.run_forever()
238
+
239
+ def _handle_redis_state_change(self, state):
240
+ """Handle state change from Redis."""
241
+ # Broadcast to WebSockets
242
+ asyncio.run_coroutine_threadsafe(self._broadcast_state_to_websockets(state), asyncio.get_event_loop())
243
+
244
+ async def _broadcast_state_to_websockets(self, state):
245
+ """Broadcast state to all connected clients."""
246
+ message = json.dumps({
247
+ "type": "state_update",
248
+ "data": state,
249
+ "timestamp": time.time()
250
+ })
251
+ disconnected = []
252
+ for client_id, ws in self.active_connections.items():
253
+ try:
254
+ await ws.send_text(message)
255
+ except:
256
+ disconnected.append(client_id)
257
+ for client_id in disconnected:
258
+ del self.active_connections[client_id]
259
+
260
+ async def start_kafka_listeners(self):
261
+ """Start Kafka listeners for gateway functionality."""
262
+ # Listen for module events
263
+ async def module_event_handler(event):
264
+ # Broadcast module events to all WebSocket connections
265
+ message = {
266
+ "type": "module_event",
267
+ "data": event,
268
+ "timestamp": asyncio.get_event_loop().time()
269
+ }
270
+
271
+ disconnected_clients = []
272
+ for client_id, websocket in self.active_connections.items():
273
+ try:
274
+ await websocket.send_text(json.dumps(message))
275
+ except Exception:
276
+ disconnected_clients.append(client_id)
277
+
278
+ # Clean up disconnected clients
279
+ for client_id in disconnected_clients:
280
+ if client_id in self.active_connections:
281
+ del self.active_connections[client_id]
282
+
283
+ # Subscribe to module events
284
+ try:
285
+ if self.kafka_manager:
286
+ await self.kafka_manager.consume_messages(
287
+ f"{self.config.kafka_topic_prefix}module_events",
288
+ module_event_handler
289
+ )
290
+ except Exception as e:
291
+ logger.error(f"Error starting Kafka listeners: {e}")
292
+
293
+ def start(self):
294
+ """Start the Qyro gateway service."""
295
+ logger.info(f"Starting Qyro Gateway on {self.host}:{self.port}")
296
+
297
+ # Start the FastAPI server
298
+ uvicorn.run(
299
+ self.app,
300
+ host=self.host,
301
+ port=self.port,
302
+ log_level="info" if not self.config.debug else "debug"
303
+ )
@@ -0,0 +1,8 @@
1
+ """
2
+ Qyro Orchestrator Service
3
+ Process orchestrator for the polyglot runtime
4
+ """
5
+
6
+ from .orchestrator import QyroOrchestrator
7
+
8
+ __all__ = ['QyroOrchestrator']