zexus 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -0
- package/README.md +2513 -0
- package/bin/zexus +2 -0
- package/bin/zpics +2 -0
- package/bin/zpm +2 -0
- package/bin/zx +2 -0
- package/bin/zx-deploy +2 -0
- package/bin/zx-dev +2 -0
- package/bin/zx-run +2 -0
- package/package.json +66 -0
- package/scripts/README.md +24 -0
- package/scripts/postinstall.js +44 -0
- package/shared_config.json +24 -0
- package/src/README.md +1525 -0
- package/src/tests/run_zexus_tests.py +117 -0
- package/src/tests/test_all_phases.zx +346 -0
- package/src/tests/test_blockchain_features.zx +306 -0
- package/src/tests/test_complexity_features.zx +321 -0
- package/src/tests/test_core_integration.py +185 -0
- package/src/tests/test_phase10_ecosystem.zx +177 -0
- package/src/tests/test_phase1_modifiers.zx +87 -0
- package/src/tests/test_phase2_plugins.zx +80 -0
- package/src/tests/test_phase3_security.zx +97 -0
- package/src/tests/test_phase4_vfs.zx +116 -0
- package/src/tests/test_phase5_types.zx +117 -0
- package/src/tests/test_phase6_metaprogramming.zx +125 -0
- package/src/tests/test_phase7_optimization.zx +132 -0
- package/src/tests/test_phase9_advanced_types.zx +157 -0
- package/src/tests/test_security_features.py +419 -0
- package/src/tests/test_security_features.zx +276 -0
- package/src/tests/test_simple_zx.zx +1 -0
- package/src/tests/test_verification_simple.zx +69 -0
- package/src/zexus/__init__.py +28 -0
- package/src/zexus/__main__.py +5 -0
- package/src/zexus/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/advanced_types.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/builtin_modules.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/capability_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/complexity_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/concurrency_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/config.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/dependency_injection.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/ecosystem.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/environment.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/error_reporter.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/hybrid_orchestrator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/metaprogramming.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/module_cache.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/object.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/optimization.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/plugin_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/policy_engine.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/security.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/stdlib_integration.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/strategy_recovery.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/syntax_validator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/type_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/virtual_filesystem.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_token.cpython-312.pyc +0 -0
- package/src/zexus/advanced_types.py +401 -0
- package/src/zexus/blockchain/__init__.py +40 -0
- package/src/zexus/blockchain/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/crypto.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/ledger.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/transaction.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/crypto.py +463 -0
- package/src/zexus/blockchain/ledger.py +255 -0
- package/src/zexus/blockchain/transaction.py +267 -0
- package/src/zexus/builtin_modules.py +284 -0
- package/src/zexus/builtin_plugins.py +317 -0
- package/src/zexus/capability_system.py +372 -0
- package/src/zexus/cli/__init__.py +2 -0
- package/src/zexus/cli/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/cli/__pycache__/main.cpython-312.pyc +0 -0
- package/src/zexus/cli/main.py +707 -0
- package/src/zexus/cli/zpm.py +203 -0
- package/src/zexus/compare_interpreter_compiler.py +146 -0
- package/src/zexus/compiler/__init__.py +169 -0
- package/src/zexus/compiler/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/compiler/bytecode.py +266 -0
- package/src/zexus/compiler/compat_runtime.py +277 -0
- package/src/zexus/compiler/lexer.py +257 -0
- package/src/zexus/compiler/parser.py +779 -0
- package/src/zexus/compiler/semantic.py +118 -0
- package/src/zexus/compiler/zexus_ast.py +454 -0
- package/src/zexus/complexity_system.py +575 -0
- package/src/zexus/concurrency_system.py +493 -0
- package/src/zexus/config.py +201 -0
- package/src/zexus/crypto_bridge.py +19 -0
- package/src/zexus/dependency_injection.py +423 -0
- package/src/zexus/ecosystem.py +434 -0
- package/src/zexus/environment.py +101 -0
- package/src/zexus/environment_manager.py +119 -0
- package/src/zexus/error_reporter.py +314 -0
- package/src/zexus/evaluator/__init__.py +12 -0
- package/src/zexus/evaluator/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/bytecode_compiler.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/core.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/expressions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/functions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/integration.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/statements.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/utils.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/bytecode_compiler.py +700 -0
- package/src/zexus/evaluator/core.py +891 -0
- package/src/zexus/evaluator/expressions.py +827 -0
- package/src/zexus/evaluator/functions.py +3989 -0
- package/src/zexus/evaluator/integration.py +396 -0
- package/src/zexus/evaluator/statements.py +4303 -0
- package/src/zexus/evaluator/utils.py +126 -0
- package/src/zexus/evaluator_original.py +2041 -0
- package/src/zexus/external_bridge.py +16 -0
- package/src/zexus/find_affected_imports.sh +155 -0
- package/src/zexus/hybrid_orchestrator.py +152 -0
- package/src/zexus/input_validation.py +259 -0
- package/src/zexus/lexer.py +571 -0
- package/src/zexus/logging.py +89 -0
- package/src/zexus/lsp/__init__.py +9 -0
- package/src/zexus/lsp/completion_provider.py +207 -0
- package/src/zexus/lsp/definition_provider.py +22 -0
- package/src/zexus/lsp/hover_provider.py +71 -0
- package/src/zexus/lsp/server.py +269 -0
- package/src/zexus/lsp/symbol_provider.py +31 -0
- package/src/zexus/metaprogramming.py +321 -0
- package/src/zexus/module_cache.py +89 -0
- package/src/zexus/module_manager.py +107 -0
- package/src/zexus/object.py +973 -0
- package/src/zexus/optimization.py +424 -0
- package/src/zexus/parser/__init__.py +31 -0
- package/src/zexus/parser/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_context.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_structural.cpython-312.pyc +0 -0
- package/src/zexus/parser/integration.py +86 -0
- package/src/zexus/parser/parser.py +3977 -0
- package/src/zexus/parser/strategy_context.py +7254 -0
- package/src/zexus/parser/strategy_structural.py +1033 -0
- package/src/zexus/persistence.py +391 -0
- package/src/zexus/plugin_system.py +290 -0
- package/src/zexus/policy_engine.py +365 -0
- package/src/zexus/profiler/__init__.py +5 -0
- package/src/zexus/profiler/profiler.py +233 -0
- package/src/zexus/purity_system.py +398 -0
- package/src/zexus/runtime/__init__.py +20 -0
- package/src/zexus/runtime/async_runtime.py +324 -0
- package/src/zexus/search_old_imports.sh +65 -0
- package/src/zexus/security.py +1407 -0
- package/src/zexus/stack_trace.py +233 -0
- package/src/zexus/stdlib/__init__.py +27 -0
- package/src/zexus/stdlib/blockchain.py +341 -0
- package/src/zexus/stdlib/compression.py +167 -0
- package/src/zexus/stdlib/crypto.py +124 -0
- package/src/zexus/stdlib/datetime.py +163 -0
- package/src/zexus/stdlib/db_mongo.py +199 -0
- package/src/zexus/stdlib/db_mysql.py +162 -0
- package/src/zexus/stdlib/db_postgres.py +163 -0
- package/src/zexus/stdlib/db_sqlite.py +133 -0
- package/src/zexus/stdlib/encoding.py +230 -0
- package/src/zexus/stdlib/fs.py +195 -0
- package/src/zexus/stdlib/http.py +219 -0
- package/src/zexus/stdlib/http_server.py +248 -0
- package/src/zexus/stdlib/json_module.py +61 -0
- package/src/zexus/stdlib/math.py +360 -0
- package/src/zexus/stdlib/os_module.py +265 -0
- package/src/zexus/stdlib/regex.py +148 -0
- package/src/zexus/stdlib/sockets.py +253 -0
- package/src/zexus/stdlib/test_framework.zx +208 -0
- package/src/zexus/stdlib/test_runner.zx +119 -0
- package/src/zexus/stdlib_integration.py +341 -0
- package/src/zexus/strategy_recovery.py +256 -0
- package/src/zexus/syntax_validator.py +356 -0
- package/src/zexus/testing/zpics.py +407 -0
- package/src/zexus/testing/zpics_runtime.py +369 -0
- package/src/zexus/type_system.py +374 -0
- package/src/zexus/validation_system.py +569 -0
- package/src/zexus/virtual_filesystem.py +355 -0
- package/src/zexus/vm/__init__.py +8 -0
- package/src/zexus/vm/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/async_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/bytecode.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/cache.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/jit.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/memory_manager.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/memory_pool.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/parallel_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/peephole_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/profiler.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/register_allocator.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/register_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/ssa_converter.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/async_optimizer.py +420 -0
- package/src/zexus/vm/bytecode.py +428 -0
- package/src/zexus/vm/bytecode_converter.py +297 -0
- package/src/zexus/vm/cache.py +532 -0
- package/src/zexus/vm/jit.py +720 -0
- package/src/zexus/vm/memory_manager.py +520 -0
- package/src/zexus/vm/memory_pool.py +511 -0
- package/src/zexus/vm/optimizer.py +478 -0
- package/src/zexus/vm/parallel_vm.py +899 -0
- package/src/zexus/vm/peephole_optimizer.py +452 -0
- package/src/zexus/vm/profiler.py +527 -0
- package/src/zexus/vm/register_allocator.py +462 -0
- package/src/zexus/vm/register_vm.py +520 -0
- package/src/zexus/vm/ssa_converter.py +757 -0
- package/src/zexus/vm/vm.py +1392 -0
- package/src/zexus/zexus_ast.py +1782 -0
- package/src/zexus/zexus_token.py +253 -0
- package/src/zexus/zpm/__init__.py +15 -0
- package/src/zexus/zpm/installer.py +116 -0
- package/src/zexus/zpm/package_manager.py +208 -0
- package/src/zexus/zpm/publisher.py +98 -0
- package/src/zexus/zpm/registry.py +110 -0
- package/src/zexus.egg-info/PKG-INFO +2235 -0
- package/src/zexus.egg-info/SOURCES.txt +876 -0
- package/src/zexus.egg-info/dependency_links.txt +1 -0
- package/src/zexus.egg-info/entry_points.txt +3 -0
- package/src/zexus.egg-info/not-zip-safe +1 -0
- package/src/zexus.egg-info/requires.txt +14 -0
- package/src/zexus.egg-info/top_level.txt +2 -0
- package/zexus.json +14 -0
|
@@ -0,0 +1,1407 @@
|
|
|
1
|
+
# src/zexus/security.py
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Advanced Security and Contract Features for Zexus
|
|
5
|
+
|
|
6
|
+
This module implements entity, verify, contract, and protect statements
|
|
7
|
+
providing a powerful security framework for Zexus programs.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import json
|
|
12
|
+
import uuid
|
|
13
|
+
import sqlite3
|
|
14
|
+
import time
|
|
15
|
+
|
|
16
|
+
# Try importing advanced database drivers
|
|
17
|
+
try:
|
|
18
|
+
import plyvel # For LevelDB
|
|
19
|
+
_LEVELDB_AVAILABLE = True
|
|
20
|
+
except ImportError:
|
|
21
|
+
_LEVELDB_AVAILABLE = False
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
import rocksdb # For RocksDB
|
|
25
|
+
_ROCKSDB_AVAILABLE = True
|
|
26
|
+
except ImportError:
|
|
27
|
+
_ROCKSDB_AVAILABLE = False
|
|
28
|
+
|
|
29
|
+
from .object import (
|
|
30
|
+
Environment, Map, String, Integer, Float, Boolean as BooleanObj,
|
|
31
|
+
Builtin, List, Null, EvaluationError as ObjectEvaluationError
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# Ensure storage directory exists
|
|
35
|
+
STORAGE_DIR = "chain_data"
|
|
36
|
+
if not os.path.exists(STORAGE_DIR):
|
|
37
|
+
os.makedirs(STORAGE_DIR)
|
|
38
|
+
|
|
39
|
+
# Audit logging directory
|
|
40
|
+
AUDIT_DIR = os.path.join(STORAGE_DIR, "audit_logs")
|
|
41
|
+
if not os.path.exists(AUDIT_DIR):
|
|
42
|
+
os.makedirs(AUDIT_DIR)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class AuditLog:
|
|
46
|
+
"""Comprehensive audit logging system for compliance tracking
|
|
47
|
+
|
|
48
|
+
Maintains audit trail of data access, modifications, and sensitive operations
|
|
49
|
+
for regulatory compliance (GDPR, HIPAA, SOC2, etc.)
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
def __init__(self, max_entries=10000, persist_to_file=False):
|
|
53
|
+
self.entries = [] # In-memory audit log
|
|
54
|
+
self.max_entries = max_entries
|
|
55
|
+
self.persist_to_file = persist_to_file
|
|
56
|
+
self.audit_file = os.path.join(AUDIT_DIR, f"audit_{uuid.uuid4().hex[:8]}.jsonl")
|
|
57
|
+
|
|
58
|
+
def log(self, data_name, action, data_type, timestamp=None, additional_context=None):
|
|
59
|
+
"""Log a single audit entry
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
data_name: Name of the data being audited
|
|
63
|
+
action: Action type (access, modification, deletion, etc.)
|
|
64
|
+
data_type: Type of data (STRING, MAP, ARRAY, FUNCTION, etc.)
|
|
65
|
+
timestamp: Optional ISO 8601 timestamp (auto-generated if None)
|
|
66
|
+
additional_context: Optional dict with additional audit context
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Audit entry dict
|
|
70
|
+
"""
|
|
71
|
+
import datetime
|
|
72
|
+
|
|
73
|
+
if timestamp is None:
|
|
74
|
+
timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
75
|
+
|
|
76
|
+
entry = {
|
|
77
|
+
"id": str(uuid.uuid4()),
|
|
78
|
+
"data_name": data_name,
|
|
79
|
+
"action": action,
|
|
80
|
+
"data_type": data_type,
|
|
81
|
+
"timestamp": timestamp,
|
|
82
|
+
"context": additional_context or {}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
self.entries.append(entry)
|
|
86
|
+
|
|
87
|
+
# Enforce max entries limit
|
|
88
|
+
if len(self.entries) > self.max_entries:
|
|
89
|
+
self.entries = self.entries[-self.max_entries:]
|
|
90
|
+
|
|
91
|
+
# Optionally persist to file
|
|
92
|
+
if self.persist_to_file:
|
|
93
|
+
self._write_entry_to_file(entry)
|
|
94
|
+
|
|
95
|
+
return entry
|
|
96
|
+
|
|
97
|
+
def _write_entry_to_file(self, entry):
|
|
98
|
+
"""Write audit entry to JSONL file"""
|
|
99
|
+
try:
|
|
100
|
+
with open(self.audit_file, 'a') as f:
|
|
101
|
+
f.write(json.dumps(entry) + '\n')
|
|
102
|
+
except IOError as e:
|
|
103
|
+
print(f"Warning: Could not write audit log to file: {e}")
|
|
104
|
+
|
|
105
|
+
def get_entries(self, data_name=None, action=None, limit=None):
|
|
106
|
+
"""Query audit log entries
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
data_name: Filter by data name (optional)
|
|
110
|
+
action: Filter by action type (optional)
|
|
111
|
+
limit: Limit number of results (optional)
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
List of matching audit entries
|
|
115
|
+
"""
|
|
116
|
+
results = self.entries
|
|
117
|
+
|
|
118
|
+
if data_name:
|
|
119
|
+
results = [e for e in results if e['data_name'] == data_name]
|
|
120
|
+
|
|
121
|
+
if action:
|
|
122
|
+
results = [e for e in results if e['action'] == action]
|
|
123
|
+
|
|
124
|
+
if limit:
|
|
125
|
+
results = results[-limit:]
|
|
126
|
+
|
|
127
|
+
return results
|
|
128
|
+
|
|
129
|
+
def clear(self):
|
|
130
|
+
"""Clear in-memory audit log"""
|
|
131
|
+
self.entries = []
|
|
132
|
+
|
|
133
|
+
def export_to_file(self, filename):
|
|
134
|
+
"""Export entire audit log to file"""
|
|
135
|
+
try:
|
|
136
|
+
with open(filename, 'w') as f:
|
|
137
|
+
json.dump(self.entries, f, indent=2)
|
|
138
|
+
return True
|
|
139
|
+
except IOError as e:
|
|
140
|
+
print(f"Warning: Could not export audit log: {e}")
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
def __repr__(self):
|
|
144
|
+
return f"AuditLog(entries={len(self.entries)}, max={self.max_entries})"
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class SecurityContext:
|
|
148
|
+
"""Global security context for enforcement"""
|
|
149
|
+
def __init__(self):
|
|
150
|
+
self.verify_checks = {} # Registered verification checks
|
|
151
|
+
self.protections = {} # Active protection rules
|
|
152
|
+
self.contracts = {} # Deployed contracts
|
|
153
|
+
self.middlewares = {} # Registered middleware
|
|
154
|
+
self.auth_config = None # Global auth configuration
|
|
155
|
+
self.cache_store = {} # Caching store
|
|
156
|
+
self.audit_log = AuditLog() # Audit logging system
|
|
157
|
+
# Registries for new commands/integration
|
|
158
|
+
self.restrictions = {} # id -> restriction entry
|
|
159
|
+
self._restrictions_index = {}# target.field -> id
|
|
160
|
+
self.trails = {} # id -> trail config
|
|
161
|
+
self.sandbox_runs = {} # id -> sandbox run metadata
|
|
162
|
+
# Sandbox policy store: name -> {allowed_builtins: set(...)}
|
|
163
|
+
self.sandbox_policies = {}
|
|
164
|
+
# Trail sinks: list of sink configs (type: 'file'|'stdout'|'callback')
|
|
165
|
+
self.trail_sinks = []
|
|
166
|
+
|
|
167
|
+
def log_audit(self, data_name, action, data_type, timestamp=None, context=None):
|
|
168
|
+
"""Log an audit entry through the security context
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
data_name: Name of data being audited
|
|
172
|
+
action: Action type (access, modification, deletion, etc.)
|
|
173
|
+
data_type: Type of data
|
|
174
|
+
timestamp: Optional ISO 8601 timestamp
|
|
175
|
+
context: Optional additional audit context
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
Audit entry dict
|
|
179
|
+
"""
|
|
180
|
+
return self.audit_log.log(data_name, action, data_type, timestamp, context)
|
|
181
|
+
|
|
182
|
+
# -------------------------------
|
|
183
|
+
# Restriction registry
|
|
184
|
+
# -------------------------------
|
|
185
|
+
def register_restriction(self, target, field, restriction_type, author=None, timestamp=None):
|
|
186
|
+
"""Register a field-level restriction.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
target: full target string (e.g. 'user.email')
|
|
190
|
+
field: property name (e.g. 'email')
|
|
191
|
+
restriction_type: string describing rule (e.g. 'read-only')
|
|
192
|
+
author: optional actor applying the restriction
|
|
193
|
+
timestamp: ISO timestamp (auto-generated if None)
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
restriction entry dict
|
|
197
|
+
"""
|
|
198
|
+
import datetime
|
|
199
|
+
rid = str(uuid.uuid4())
|
|
200
|
+
if timestamp is None:
|
|
201
|
+
timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
202
|
+
|
|
203
|
+
entry = {
|
|
204
|
+
'id': rid,
|
|
205
|
+
'target': target,
|
|
206
|
+
'field': field,
|
|
207
|
+
'restriction': restriction_type,
|
|
208
|
+
'author': author,
|
|
209
|
+
'timestamp': timestamp
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
self.restrictions[rid] = entry
|
|
213
|
+
# index by full path for quick lookup (store latest)
|
|
214
|
+
self._restrictions_index[f"{target}"] = rid
|
|
215
|
+
return entry
|
|
216
|
+
|
|
217
|
+
def get_restriction(self, target, field=None):
|
|
218
|
+
"""Lookup a restriction by target (and optional field). Returns entry or None."""
|
|
219
|
+
key = f"{target}"
|
|
220
|
+
rid = self._restrictions_index.get(key)
|
|
221
|
+
if not rid:
|
|
222
|
+
return None
|
|
223
|
+
return self.restrictions.get(rid)
|
|
224
|
+
|
|
225
|
+
def list_restrictions(self):
|
|
226
|
+
return list(self.restrictions.values())
|
|
227
|
+
|
|
228
|
+
def remove_restriction(self, rid):
|
|
229
|
+
entry = self.restrictions.pop(rid, None)
|
|
230
|
+
if entry:
|
|
231
|
+
k = entry.get('target')
|
|
232
|
+
if k and self._restrictions_index.get(k) == rid:
|
|
233
|
+
del self._restrictions_index[k]
|
|
234
|
+
return True
|
|
235
|
+
return False
|
|
236
|
+
|
|
237
|
+
# -------------------------------
|
|
238
|
+
# Trail registry
|
|
239
|
+
# -------------------------------
|
|
240
|
+
def register_trail(self, event_type, filter_key=None, author=None, timestamp=None):
|
|
241
|
+
import datetime
|
|
242
|
+
tid = str(uuid.uuid4())
|
|
243
|
+
if timestamp is None:
|
|
244
|
+
timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
245
|
+
entry = {
|
|
246
|
+
'id': tid,
|
|
247
|
+
'type': event_type,
|
|
248
|
+
'filter': filter_key,
|
|
249
|
+
'author': author,
|
|
250
|
+
'timestamp': timestamp,
|
|
251
|
+
'enabled': True
|
|
252
|
+
}
|
|
253
|
+
self.trails[tid] = entry
|
|
254
|
+
return entry
|
|
255
|
+
|
|
256
|
+
def list_trails(self):
|
|
257
|
+
return list(self.trails.values())
|
|
258
|
+
|
|
259
|
+
def remove_trail(self, tid):
|
|
260
|
+
if tid in self.trails:
|
|
261
|
+
del self.trails[tid]
|
|
262
|
+
return True
|
|
263
|
+
return False
|
|
264
|
+
|
|
265
|
+
# -------------------------------
|
|
266
|
+
# Sandbox run registry
|
|
267
|
+
# -------------------------------
|
|
268
|
+
def register_sandbox_run(self, parent_context=None, policy=None, result_summary=None, timestamp=None):
|
|
269
|
+
import datetime
|
|
270
|
+
sid = str(uuid.uuid4())
|
|
271
|
+
if timestamp is None:
|
|
272
|
+
timestamp = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
273
|
+
entry = {
|
|
274
|
+
'id': sid,
|
|
275
|
+
'parent': parent_context,
|
|
276
|
+
'policy': policy,
|
|
277
|
+
'result': result_summary,
|
|
278
|
+
'timestamp': timestamp
|
|
279
|
+
}
|
|
280
|
+
self.sandbox_runs[sid] = entry
|
|
281
|
+
return entry
|
|
282
|
+
|
|
283
|
+
def list_sandbox_runs(self):
|
|
284
|
+
return list(self.sandbox_runs.values())
|
|
285
|
+
|
|
286
|
+
# -------------------------------
|
|
287
|
+
# Sandbox policy management
|
|
288
|
+
# -------------------------------
|
|
289
|
+
def register_sandbox_policy(self, name, allowed_builtins=None):
|
|
290
|
+
"""Register a sandbox policy by name. `allowed_builtins` is an iterable of builtin names allowed inside the sandbox.
|
|
291
|
+
|
|
292
|
+
If `allowed_builtins` is None, the policy is permissive (allows all).
|
|
293
|
+
"""
|
|
294
|
+
if allowed_builtins is None:
|
|
295
|
+
allowed_set = None
|
|
296
|
+
else:
|
|
297
|
+
allowed_set = set(allowed_builtins)
|
|
298
|
+
self.sandbox_policies[name] = {'allowed_builtins': allowed_set}
|
|
299
|
+
return self.sandbox_policies[name]
|
|
300
|
+
|
|
301
|
+
def get_sandbox_policy(self, name):
|
|
302
|
+
return self.sandbox_policies.get(name)
|
|
303
|
+
|
|
304
|
+
# -------------------------------
|
|
305
|
+
# Trail sink management
|
|
306
|
+
# -------------------------------
|
|
307
|
+
def register_trail_sink(self, sink_type, **kwargs):
|
|
308
|
+
"""Register a trail sink.
|
|
309
|
+
|
|
310
|
+
sink_type: 'stdout' | 'file' | 'callback' | 'sqlite'
|
|
311
|
+
kwargs: for 'file' provide `path`; for 'callback' provide `callback` callable.
|
|
312
|
+
"""
|
|
313
|
+
sink = {'type': sink_type}
|
|
314
|
+
sink.update(kwargs)
|
|
315
|
+
self.trail_sinks.append(sink)
|
|
316
|
+
return sink
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
# -------------------------------
|
|
320
|
+
# Event dispatcher (Trail integration)
|
|
321
|
+
# -------------------------------
|
|
322
|
+
def emit_event(self, event_type, payload):
|
|
323
|
+
"""Emit an event through the trail registry.
|
|
324
|
+
|
|
325
|
+
- Matches active trails by `type` or `*`.
|
|
326
|
+
- Applies simple substring filter matching against stringified payload.
|
|
327
|
+
- For matching trails, record a derived audit entry and also print to stdout.
|
|
328
|
+
"""
|
|
329
|
+
try:
|
|
330
|
+
# simple stringify payload for filtering
|
|
331
|
+
payload_str = json.dumps(payload) if not isinstance(payload, str) else payload
|
|
332
|
+
except Exception:
|
|
333
|
+
try:
|
|
334
|
+
payload_str = str(payload)
|
|
335
|
+
except Exception:
|
|
336
|
+
payload_str = "<unserializable>"
|
|
337
|
+
|
|
338
|
+
for tid, trail in list(self.trails.items()):
|
|
339
|
+
ttype = trail.get('type')
|
|
340
|
+
flt = trail.get('filter')
|
|
341
|
+
# type match or wildcard
|
|
342
|
+
if ttype != '*' and ttype != event_type:
|
|
343
|
+
continue
|
|
344
|
+
|
|
345
|
+
# filter match if provided — support substring, key:value, and regex (prefix 're:')
|
|
346
|
+
if flt and flt != '*':
|
|
347
|
+
matched = False
|
|
348
|
+
try:
|
|
349
|
+
if isinstance(flt, str) and flt.startswith('re:'):
|
|
350
|
+
import re
|
|
351
|
+
pattern = flt[3:]
|
|
352
|
+
if re.search(pattern, payload_str):
|
|
353
|
+
matched = True
|
|
354
|
+
elif isinstance(flt, str) and ':' in flt:
|
|
355
|
+
# key:value pattern
|
|
356
|
+
k, v = flt.split(':', 1)
|
|
357
|
+
try:
|
|
358
|
+
# if payload is JSON object, check key
|
|
359
|
+
p_obj = json.loads(payload_str)
|
|
360
|
+
if isinstance(p_obj, dict) and k in p_obj and v in str(p_obj.get(k)):
|
|
361
|
+
matched = True
|
|
362
|
+
except Exception:
|
|
363
|
+
if k in payload_str and v in payload_str:
|
|
364
|
+
matched = True
|
|
365
|
+
else:
|
|
366
|
+
if flt in payload_str:
|
|
367
|
+
matched = True
|
|
368
|
+
except Exception:
|
|
369
|
+
matched = False
|
|
370
|
+
|
|
371
|
+
if not matched:
|
|
372
|
+
continue
|
|
373
|
+
|
|
374
|
+
# Create audit-like entry for the trail event
|
|
375
|
+
entry = {
|
|
376
|
+
'id': str(uuid.uuid4()),
|
|
377
|
+
'trail_id': tid,
|
|
378
|
+
'event_type': event_type,
|
|
379
|
+
'payload': payload_str,
|
|
380
|
+
'timestamp': time.time()
|
|
381
|
+
}
|
|
382
|
+
# Persist to audit log if enabled
|
|
383
|
+
try:
|
|
384
|
+
self.audit_log.entries.append(entry)
|
|
385
|
+
except Exception:
|
|
386
|
+
pass
|
|
387
|
+
|
|
388
|
+
# Deliver to configured sinks
|
|
389
|
+
for sink in list(self.trail_sinks):
|
|
390
|
+
try:
|
|
391
|
+
stype = sink.get('type')
|
|
392
|
+
if stype == 'stdout':
|
|
393
|
+
print(f"[TRAIL:{tid}] {event_type} -> {payload_str}")
|
|
394
|
+
elif stype == 'file':
|
|
395
|
+
path = sink.get('path') or os.path.join(AUDIT_DIR, 'trails.jsonl')
|
|
396
|
+
try:
|
|
397
|
+
with open(path, 'a', encoding='utf-8') as sf:
|
|
398
|
+
sf.write(json.dumps(entry) + '\n')
|
|
399
|
+
except Exception:
|
|
400
|
+
pass
|
|
401
|
+
elif stype == 'sqlite':
|
|
402
|
+
db_path = sink.get('db_path') or os.path.join(STORAGE_DIR, 'trails.db')
|
|
403
|
+
try:
|
|
404
|
+
conn = sqlite3.connect(db_path, check_same_thread=False)
|
|
405
|
+
cur = conn.cursor()
|
|
406
|
+
cur.execute('''CREATE TABLE IF NOT EXISTS trails (
|
|
407
|
+
id TEXT PRIMARY KEY,
|
|
408
|
+
trail_id TEXT,
|
|
409
|
+
event_type TEXT,
|
|
410
|
+
payload TEXT,
|
|
411
|
+
timestamp REAL
|
|
412
|
+
)''')
|
|
413
|
+
cur.execute('INSERT OR REPLACE INTO trails (id, trail_id, event_type, payload, timestamp) VALUES (?,?,?,?,?)', (
|
|
414
|
+
entry['id'], entry['trail_id'], entry['event_type'], entry['payload'], entry['timestamp']
|
|
415
|
+
))
|
|
416
|
+
conn.commit()
|
|
417
|
+
conn.close()
|
|
418
|
+
except Exception:
|
|
419
|
+
pass
|
|
420
|
+
elif stype == 'callback':
|
|
421
|
+
cb = sink.get('callback')
|
|
422
|
+
try:
|
|
423
|
+
if callable(cb):
|
|
424
|
+
cb(entry)
|
|
425
|
+
except Exception:
|
|
426
|
+
pass
|
|
427
|
+
except Exception:
|
|
428
|
+
pass
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def register_verify_check(self, name, check_func):
|
|
432
|
+
"""Register a verification check function"""
|
|
433
|
+
self.verify_checks[name] = check_func
|
|
434
|
+
|
|
435
|
+
def register_protection(self, name, rules):
|
|
436
|
+
"""Register a protection rule set"""
|
|
437
|
+
self.protections[name] = rules
|
|
438
|
+
|
|
439
|
+
def register_contract(self, name, contract):
|
|
440
|
+
"""Register a smart contract"""
|
|
441
|
+
self.contracts[name] = contract
|
|
442
|
+
|
|
443
|
+
def check_protection(self, target_name, context_data):
|
|
444
|
+
"""Check if target access is protected"""
|
|
445
|
+
if target_name not in self.protections:
|
|
446
|
+
return True # No protection = allowed
|
|
447
|
+
|
|
448
|
+
rules = self.protections[target_name]
|
|
449
|
+
|
|
450
|
+
# Check authentication requirement
|
|
451
|
+
if rules.get("auth_required", False):
|
|
452
|
+
if not context_data.get("authenticated"):
|
|
453
|
+
return False
|
|
454
|
+
|
|
455
|
+
# Check rate limiting
|
|
456
|
+
rate_limit = rules.get("rate_limit")
|
|
457
|
+
if rate_limit:
|
|
458
|
+
# Simple rate limit check (production would use timestamp tracking)
|
|
459
|
+
if context_data.get("request_count", 0) > rate_limit:
|
|
460
|
+
return False
|
|
461
|
+
|
|
462
|
+
# Check IP restrictions
|
|
463
|
+
client_ip = context_data.get("client_ip")
|
|
464
|
+
if client_ip:
|
|
465
|
+
blocked_ips = rules.get("blocked_ips", [])
|
|
466
|
+
if _is_ip_in_list(client_ip, blocked_ips):
|
|
467
|
+
return False
|
|
468
|
+
|
|
469
|
+
allowed_ips = rules.get("allowed_ips")
|
|
470
|
+
if allowed_ips and not _is_ip_in_list(client_ip, allowed_ips):
|
|
471
|
+
return False
|
|
472
|
+
|
|
473
|
+
return True
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
# Global security context
|
|
477
|
+
_security_context = SecurityContext()
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
def get_security_context():
|
|
481
|
+
"""Get the global security context"""
|
|
482
|
+
return _security_context
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
def _is_ip_in_list(ip, ip_list):
|
|
486
|
+
"""Check if IP matches CIDR or exact match in list"""
|
|
487
|
+
for pattern in ip_list:
|
|
488
|
+
if "/" in pattern: # CIDR notation
|
|
489
|
+
# Simplified CIDR check (would need proper IP math for production)
|
|
490
|
+
network_part = pattern.split("/")[0]
|
|
491
|
+
if ip.startswith(network_part.rsplit(".", 1)[0]):
|
|
492
|
+
return True
|
|
493
|
+
elif ip == pattern: # Exact match
|
|
494
|
+
return True
|
|
495
|
+
return False
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
# ===============================================
|
|
499
|
+
# ENTITY SYSTEM - Object-Oriented Data Structures
|
|
500
|
+
# ===============================================
|
|
501
|
+
|
|
502
|
+
class EntityDefinition:
|
|
503
|
+
"""Represents an entity definition with properties and methods"""
|
|
504
|
+
|
|
505
|
+
def __init__(self, name, properties, methods=None, parent=None):
|
|
506
|
+
self.name = name
|
|
507
|
+
self.properties = properties # {prop_name: {type, default_value}}
|
|
508
|
+
self.methods = methods or {} # {method_name: Action}
|
|
509
|
+
self.parent = parent # Parent entity (inheritance)
|
|
510
|
+
|
|
511
|
+
def create_instance(self, values=None):
|
|
512
|
+
"""Create an instance of this entity with dependency injection support"""
|
|
513
|
+
# Perform dependency injection for marked properties
|
|
514
|
+
injected_values = values or {}
|
|
515
|
+
|
|
516
|
+
# Check if this entity has injected dependencies
|
|
517
|
+
if hasattr(self, 'injected_deps') and self.injected_deps:
|
|
518
|
+
from zexus.dependency_injection import get_di_registry
|
|
519
|
+
|
|
520
|
+
registry = get_di_registry()
|
|
521
|
+
# Use __main__ as default module context
|
|
522
|
+
container = registry.get_container("__main__")
|
|
523
|
+
|
|
524
|
+
for dep_name in self.injected_deps:
|
|
525
|
+
if dep_name not in injected_values:
|
|
526
|
+
# Try to inject from DI container
|
|
527
|
+
try:
|
|
528
|
+
injected_value = container.get(dep_name)
|
|
529
|
+
injected_values[dep_name] = injected_value
|
|
530
|
+
except BaseException as e:
|
|
531
|
+
# Dependency not available - use NULL placeholder
|
|
532
|
+
from zexus.object import NULL
|
|
533
|
+
injected_values[dep_name] = NULL
|
|
534
|
+
|
|
535
|
+
instance = EntityInstance(self, injected_values)
|
|
536
|
+
return instance
|
|
537
|
+
|
|
538
|
+
def get_all_properties(self):
|
|
539
|
+
"""Get all properties including inherited ones, in correct order (parent first, then child)"""
|
|
540
|
+
props = {}
|
|
541
|
+
# First add parent properties
|
|
542
|
+
if self.parent:
|
|
543
|
+
parent_props = self.parent.get_all_properties()
|
|
544
|
+
props.update(parent_props)
|
|
545
|
+
# Then add/override with child properties
|
|
546
|
+
props.update(self.properties)
|
|
547
|
+
return props
|
|
548
|
+
|
|
549
|
+
|
|
550
|
+
class EntityInstance:
|
|
551
|
+
"""Represents an instance of an entity"""
|
|
552
|
+
|
|
553
|
+
def __init__(self, entity_def, values):
|
|
554
|
+
self.entity_def = entity_def
|
|
555
|
+
self.data = values or {}
|
|
556
|
+
self._validate_properties()
|
|
557
|
+
|
|
558
|
+
def _validate_properties(self):
|
|
559
|
+
"""Validate that all required properties are present and inject dependencies"""
|
|
560
|
+
all_props = self.entity_def.get_all_properties()
|
|
561
|
+
for prop_name, prop_config in all_props.items():
|
|
562
|
+
if prop_name not in self.data:
|
|
563
|
+
# Check if this is an injected dependency
|
|
564
|
+
if prop_config.get("injected", False):
|
|
565
|
+
# Try to inject from DI registry
|
|
566
|
+
try:
|
|
567
|
+
from zexus.dependency_injection import get_di_registry
|
|
568
|
+
from zexus.object import NULL
|
|
569
|
+
registry = get_di_registry()
|
|
570
|
+
container = registry.get_container("__main__")
|
|
571
|
+
if container:
|
|
572
|
+
injected_value = container.get(prop_name)
|
|
573
|
+
self.data[prop_name] = injected_value
|
|
574
|
+
else:
|
|
575
|
+
# No container, set to NULL
|
|
576
|
+
self.data[prop_name] = NULL
|
|
577
|
+
except Exception:
|
|
578
|
+
# If injection fails, set to NULL
|
|
579
|
+
from zexus.object import NULL
|
|
580
|
+
self.data[prop_name] = NULL
|
|
581
|
+
elif "default_value" in prop_config:
|
|
582
|
+
self.data[prop_name] = prop_config["default_value"]
|
|
583
|
+
|
|
584
|
+
def get(self, property_name):
|
|
585
|
+
"""Get property value"""
|
|
586
|
+
return self.data.get(property_name)
|
|
587
|
+
|
|
588
|
+
def set(self, property_name, value):
|
|
589
|
+
"""Set property value (prevent modification if property is sealed)"""
|
|
590
|
+
if property_name not in self.entity_def.get_all_properties():
|
|
591
|
+
raise ValueError(f"Unknown property: {property_name}")
|
|
592
|
+
existing = self.data.get(property_name)
|
|
593
|
+
# Avoid importing SealedObject here to prevent circular imports; use name-based check
|
|
594
|
+
if existing is not None and existing.__class__.__name__ == 'SealedObject':
|
|
595
|
+
raise ValueError(f"Cannot modify sealed property: {property_name}")
|
|
596
|
+
self.data[property_name] = value
|
|
597
|
+
|
|
598
|
+
def to_dict(self):
|
|
599
|
+
"""Convert to dictionary"""
|
|
600
|
+
return self.data
|
|
601
|
+
|
|
602
|
+
def __str__(self):
|
|
603
|
+
"""String representation of entity instance"""
|
|
604
|
+
entity_name = self.entity_def.name if hasattr(self.entity_def, 'name') else 'Entity'
|
|
605
|
+
# Format properties nicely
|
|
606
|
+
props = []
|
|
607
|
+
for key, value in self.data.items():
|
|
608
|
+
# Convert value to a readable string
|
|
609
|
+
if hasattr(value, 'value'):
|
|
610
|
+
# Object wrapper with value attribute (Integer, String, etc.)
|
|
611
|
+
props.append(f"{key}={value.value}")
|
|
612
|
+
elif hasattr(value, '__class__') and hasattr(value.__class__, '__name__'):
|
|
613
|
+
if value.__class__.__name__ in ['EntityInstance', 'SealedObject']:
|
|
614
|
+
props.append(f"{key}=<{value.__class__.__name__}>")
|
|
615
|
+
else:
|
|
616
|
+
try:
|
|
617
|
+
props.append(f"{key}={value}")
|
|
618
|
+
except:
|
|
619
|
+
props.append(f"{key}=<object>")
|
|
620
|
+
else:
|
|
621
|
+
props.append(f"{key}={value}")
|
|
622
|
+
props_str = ", ".join(props)
|
|
623
|
+
return f"{entity_name}({props_str})"
|
|
624
|
+
|
|
625
|
+
def __repr__(self):
|
|
626
|
+
"""Python representation"""
|
|
627
|
+
return self.__str__()
|
|
628
|
+
|
|
629
|
+
def call_method(self, method_name, args):
|
|
630
|
+
"""Call a method on this entity instance"""
|
|
631
|
+
if method_name not in self.entity_def.methods:
|
|
632
|
+
from zexus.object import EvaluationError
|
|
633
|
+
return EvaluationError(f"Method '{method_name}' not supported for ENTITY_INSTANCE")
|
|
634
|
+
|
|
635
|
+
# Get the method (Action or Function)
|
|
636
|
+
method = self.entity_def.methods[method_name]
|
|
637
|
+
|
|
638
|
+
# Create a new environment for the method execution
|
|
639
|
+
from zexus.environment import Environment
|
|
640
|
+
method_env = Environment(outer=method.env if hasattr(method, 'env') else None)
|
|
641
|
+
|
|
642
|
+
# Bind 'this' to the current instance in the method environment
|
|
643
|
+
method_env.set('this', self)
|
|
644
|
+
|
|
645
|
+
# Bind method parameters to arguments
|
|
646
|
+
if hasattr(method, 'parameters'):
|
|
647
|
+
for i, param in enumerate(method.parameters):
|
|
648
|
+
if i < len(args):
|
|
649
|
+
# Handle both Identifier objects and ParameterNode objects
|
|
650
|
+
if hasattr(param, 'name'):
|
|
651
|
+
# It's a ParameterNode with name and type
|
|
652
|
+
param_name = param.name.value if hasattr(param.name, 'value') else str(param.name)
|
|
653
|
+
elif hasattr(param, 'value'):
|
|
654
|
+
# It's an Identifier
|
|
655
|
+
param_name = param.value
|
|
656
|
+
else:
|
|
657
|
+
# Fallback to string representation
|
|
658
|
+
param_name = str(param)
|
|
659
|
+
method_env.set(param_name, args[i])
|
|
660
|
+
|
|
661
|
+
# Import evaluator to execute the method body
|
|
662
|
+
# Avoid circular import by importing here
|
|
663
|
+
from zexus.evaluator.core import Evaluator
|
|
664
|
+
evaluator = Evaluator()
|
|
665
|
+
|
|
666
|
+
# Execute the method body with stack trace
|
|
667
|
+
result = evaluator.eval_node(method.body, method_env, stack_trace=[])
|
|
668
|
+
|
|
669
|
+
# Unwrap return values
|
|
670
|
+
from zexus.object import ReturnValue
|
|
671
|
+
if isinstance(result, ReturnValue):
|
|
672
|
+
return result.value
|
|
673
|
+
|
|
674
|
+
return result
|
|
675
|
+
|
|
676
|
+
|
|
677
|
+
# ===============================================
|
|
678
|
+
# VERIFICATION SYSTEM - Security Checks
|
|
679
|
+
# ===============================================
|
|
680
|
+
|
|
681
|
+
class VerificationCheck:
|
|
682
|
+
"""Represents a single verification condition"""
|
|
683
|
+
|
|
684
|
+
def __init__(self, name, condition_func, error_message=""):
|
|
685
|
+
self.name = name
|
|
686
|
+
self.condition_func = condition_func
|
|
687
|
+
self.error_message = error_message or f"Verification check '{name}' failed"
|
|
688
|
+
|
|
689
|
+
def verify(self, context_data):
|
|
690
|
+
"""Execute verification check"""
|
|
691
|
+
try:
|
|
692
|
+
result = self.condition_func(context_data)
|
|
693
|
+
return (result, None) if result else (False, self.error_message)
|
|
694
|
+
except Exception as e:
|
|
695
|
+
return (False, str(e))
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
class VerifyWrapper:
|
|
699
|
+
"""Wraps a function with verification checks"""
|
|
700
|
+
|
|
701
|
+
def __init__(self, target_func, checks, error_handler=None):
|
|
702
|
+
self.target_func = target_func
|
|
703
|
+
self.checks = checks # List of VerificationCheck
|
|
704
|
+
self.error_handler = error_handler
|
|
705
|
+
|
|
706
|
+
def execute(self, args, context_data=None, env=None):
|
|
707
|
+
"""Execute target function with verification"""
|
|
708
|
+
context_data = context_data or {}
|
|
709
|
+
|
|
710
|
+
# Run all verification checks
|
|
711
|
+
for check in self.checks:
|
|
712
|
+
is_valid, error_msg = check.verify(context_data)
|
|
713
|
+
if not is_valid:
|
|
714
|
+
if self.error_handler:
|
|
715
|
+
return self.error_handler(error_msg, context_data, env)
|
|
716
|
+
else:
|
|
717
|
+
return ObjectEvaluationError(error_msg)
|
|
718
|
+
|
|
719
|
+
# All checks passed, execute target
|
|
720
|
+
return self.target_func(args, env)
|
|
721
|
+
|
|
722
|
+
|
|
723
|
+
# ===============================================
|
|
724
|
+
# CONTRACT PERSISTENCE BACKENDS
|
|
725
|
+
# ===============================================
|
|
726
|
+
|
|
727
|
+
class StorageBackend:
|
|
728
|
+
"""Interface for storage backends"""
|
|
729
|
+
def set(self, key, value): pass
|
|
730
|
+
def get(self, key): pass
|
|
731
|
+
def delete(self, key): pass
|
|
732
|
+
def close(self): pass
|
|
733
|
+
|
|
734
|
+
class InMemoryBackend(StorageBackend):
|
|
735
|
+
def __init__(self):
|
|
736
|
+
self.data = {}
|
|
737
|
+
def set(self, key, value): self.data[key] = value
|
|
738
|
+
def get(self, key): return self.data.get(key)
|
|
739
|
+
def delete(self, key):
|
|
740
|
+
if key in self.data: del self.data[key]
|
|
741
|
+
|
|
742
|
+
class SQLiteBackend(StorageBackend):
|
|
743
|
+
def __init__(self, db_path):
|
|
744
|
+
import sqlite3
|
|
745
|
+
self.conn = sqlite3.connect(db_path, check_same_thread=False)
|
|
746
|
+
self.cursor = self.conn.cursor()
|
|
747
|
+
self.cursor.execute("CREATE TABLE IF NOT EXISTS kv_store (key TEXT PRIMARY KEY, value TEXT)")
|
|
748
|
+
self.conn.commit()
|
|
749
|
+
|
|
750
|
+
def set(self, key, value):
|
|
751
|
+
self.cursor.execute("INSERT OR REPLACE INTO kv_store (key, value) VALUES (?, ?)", (key, value))
|
|
752
|
+
self.conn.commit()
|
|
753
|
+
|
|
754
|
+
def get(self, key):
|
|
755
|
+
self.cursor.execute("SELECT value FROM kv_store WHERE key=?", (key,))
|
|
756
|
+
row = self.cursor.fetchone()
|
|
757
|
+
return row[0] if row else None
|
|
758
|
+
|
|
759
|
+
def delete(self, key):
|
|
760
|
+
self.cursor.execute("DELETE FROM kv_store WHERE key=?", (key,))
|
|
761
|
+
self.conn.commit()
|
|
762
|
+
|
|
763
|
+
def close(self):
|
|
764
|
+
self.conn.close()
|
|
765
|
+
|
|
766
|
+
class LevelDBBackend(StorageBackend):
|
|
767
|
+
def __init__(self, db_path):
|
|
768
|
+
if not _LEVELDB_AVAILABLE: raise ImportError("plyvel not installed")
|
|
769
|
+
self.db = plyvel.DB(db_path, create_if_missing=True)
|
|
770
|
+
|
|
771
|
+
def set(self, key, value):
|
|
772
|
+
self.db.put(key.encode('utf-8'), value.encode('utf-8'))
|
|
773
|
+
|
|
774
|
+
def get(self, key):
|
|
775
|
+
res = self.db.get(key.encode('utf-8'))
|
|
776
|
+
return res.decode('utf-8') if res else None
|
|
777
|
+
|
|
778
|
+
def delete(self, key):
|
|
779
|
+
self.db.delete(key.encode('utf-8'))
|
|
780
|
+
|
|
781
|
+
def close(self):
|
|
782
|
+
self.db.close()
|
|
783
|
+
|
|
784
|
+
class RocksDBBackend(StorageBackend):
|
|
785
|
+
def __init__(self, db_path):
|
|
786
|
+
if not _ROCKSDB_AVAILABLE: raise ImportError("rocksdb not installed")
|
|
787
|
+
self.db = rocksdb.DB(db_path, rocksdb.Options(create_if_missing=True))
|
|
788
|
+
|
|
789
|
+
def set(self, key, value):
|
|
790
|
+
self.db.put(key.encode('utf-8'), value.encode('utf-8'))
|
|
791
|
+
|
|
792
|
+
def get(self, key):
|
|
793
|
+
res = self.db.get(key.encode('utf-8'))
|
|
794
|
+
return res.decode('utf-8') if res else None
|
|
795
|
+
|
|
796
|
+
def delete(self, key):
|
|
797
|
+
self.db.delete(key.encode('utf-8'))
|
|
798
|
+
|
|
799
|
+
# ===============================================
|
|
800
|
+
# CONTRACT SYSTEM - Blockchain State & Logic
|
|
801
|
+
# ===============================================
|
|
802
|
+
|
|
803
|
+
class ContractStorage:
|
|
804
|
+
"""Persistent storage for contract state with DB selection"""
|
|
805
|
+
|
|
806
|
+
def __init__(self, contract_id, db_type="sqlite"):
|
|
807
|
+
self.transaction_log = []
|
|
808
|
+
self.db_type = db_type
|
|
809
|
+
|
|
810
|
+
# Determine strict path
|
|
811
|
+
base_path = os.path.join(STORAGE_DIR, f"{contract_id}")
|
|
812
|
+
|
|
813
|
+
# Initialize Backend
|
|
814
|
+
if db_type == "leveldb" and _LEVELDB_AVAILABLE:
|
|
815
|
+
self.backend = LevelDBBackend(base_path)
|
|
816
|
+
elif db_type == "rocksdb" and _ROCKSDB_AVAILABLE:
|
|
817
|
+
self.backend = RocksDBBackend(f"{base_path}.rdb")
|
|
818
|
+
elif db_type == "sqlite":
|
|
819
|
+
self.backend = SQLiteBackend(f"{base_path}.sqlite")
|
|
820
|
+
else:
|
|
821
|
+
print(f" ⚠️ Storage Warning: '{db_type}' unavailable or unknown. Falling back to In-Memory.")
|
|
822
|
+
self.backend = InMemoryBackend()
|
|
823
|
+
|
|
824
|
+
def get(self, key):
|
|
825
|
+
"""Get value from storage and deserialize from JSON"""
|
|
826
|
+
raw_val = self.backend.get(key)
|
|
827
|
+
if raw_val is None:
|
|
828
|
+
return None
|
|
829
|
+
return self._deserialize(raw_val)
|
|
830
|
+
|
|
831
|
+
def set(self, key, value):
|
|
832
|
+
"""Serialize to JSON and set value in storage"""
|
|
833
|
+
serialized = self._serialize(value)
|
|
834
|
+
self.backend.set(key, serialized)
|
|
835
|
+
self._log_transaction("SET", key, serialized)
|
|
836
|
+
|
|
837
|
+
def delete(self, key):
|
|
838
|
+
"""Delete value from storage"""
|
|
839
|
+
self.backend.delete(key)
|
|
840
|
+
self._log_transaction("DELETE", key, None)
|
|
841
|
+
|
|
842
|
+
def _log_transaction(self, op, key, value):
|
|
843
|
+
"""Log transaction for audit trail"""
|
|
844
|
+
self.transaction_log.append({
|
|
845
|
+
"operation": op,
|
|
846
|
+
"key": key,
|
|
847
|
+
"value": value,
|
|
848
|
+
"timestamp": _get_timestamp()
|
|
849
|
+
})
|
|
850
|
+
|
|
851
|
+
def _serialize(self, obj):
|
|
852
|
+
"""Convert Zexus Object -> JSON String"""
|
|
853
|
+
if isinstance(obj, String):
|
|
854
|
+
return json.dumps({"type": "string", "val": obj.value})
|
|
855
|
+
elif isinstance(obj, Integer):
|
|
856
|
+
return json.dumps({"type": "integer", "val": obj.value})
|
|
857
|
+
elif isinstance(obj, Float):
|
|
858
|
+
return json.dumps({"type": "float", "val": obj.value})
|
|
859
|
+
elif isinstance(obj, BooleanObj):
|
|
860
|
+
return json.dumps({"type": "boolean", "val": obj.value})
|
|
861
|
+
elif isinstance(obj, List):
|
|
862
|
+
# Recursively serialize list elements
|
|
863
|
+
serialized_list = [self._serialize_val_recursive(e) for e in obj.elements]
|
|
864
|
+
return json.dumps({"type": "list", "val": serialized_list})
|
|
865
|
+
elif isinstance(obj, Map):
|
|
866
|
+
# Recursively serialize map elements
|
|
867
|
+
serialized_map = {k: self._serialize_val_recursive(v) for k, v in obj.pairs.items()}
|
|
868
|
+
return json.dumps({"type": "map", "val": serialized_map})
|
|
869
|
+
elif obj is Null or obj is None:
|
|
870
|
+
return json.dumps({"type": "null", "val": None})
|
|
871
|
+
else:
|
|
872
|
+
# Fallback for complex objects or strings
|
|
873
|
+
return json.dumps({"type": "string", "val": str(obj)})
|
|
874
|
+
|
|
875
|
+
def _serialize_val_recursive(self, obj):
|
|
876
|
+
"""Helper for nested structures (returns dict, not json string)"""
|
|
877
|
+
# This mirrors _serialize but returns the inner dict structure directly
|
|
878
|
+
if isinstance(obj, String): return {"type": "string", "val": obj.value}
|
|
879
|
+
elif isinstance(obj, Integer): return {"type": "integer", "val": obj.value}
|
|
880
|
+
elif isinstance(obj, BooleanObj): return {"type": "boolean", "val": obj.value}
|
|
881
|
+
elif isinstance(obj, List):
|
|
882
|
+
return {"type": "list", "val": [self._serialize_val_recursive(e) for e in obj.elements]}
|
|
883
|
+
elif isinstance(obj, Map):
|
|
884
|
+
return {"type": "map", "val": {k: self._serialize_val_recursive(v) for k, v in obj.pairs.items()}}
|
|
885
|
+
elif obj is Null: return {"type": "null", "val": None}
|
|
886
|
+
return {"type": "string", "val": str(obj)}
|
|
887
|
+
|
|
888
|
+
def _deserialize(self, json_str):
|
|
889
|
+
"""Convert JSON String -> Zexus Object"""
|
|
890
|
+
try:
|
|
891
|
+
data = json.loads(json_str)
|
|
892
|
+
return self._deserialize_recursive(data)
|
|
893
|
+
except Exception as e:
|
|
894
|
+
print(f"Settings corruption error: {e}")
|
|
895
|
+
return Null
|
|
896
|
+
|
|
897
|
+
def _deserialize_recursive(self, data):
|
|
898
|
+
"""Helper to reconstruct objects"""
|
|
899
|
+
dtype = data.get("type")
|
|
900
|
+
val = data.get("val")
|
|
901
|
+
|
|
902
|
+
if dtype == "string": return String(val)
|
|
903
|
+
elif dtype == "integer": return Integer(val)
|
|
904
|
+
elif dtype == "float": return Float(val)
|
|
905
|
+
elif dtype == "boolean": return BooleanObj(val)
|
|
906
|
+
elif dtype == "null": return Null
|
|
907
|
+
elif dtype == "list":
|
|
908
|
+
# Reconstruct list
|
|
909
|
+
elements = [self._deserialize_recursive(item) for item in val]
|
|
910
|
+
return List(elements)
|
|
911
|
+
elif dtype == "map":
|
|
912
|
+
# Reconstruct map
|
|
913
|
+
pairs = {k: self._deserialize_recursive(v) for k, v in val.items()}
|
|
914
|
+
return Map(pairs)
|
|
915
|
+
return String(str(val)) # Fallback
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
class SmartContract:
|
|
919
|
+
"""Represents a smart contract with persistent storage"""
|
|
920
|
+
|
|
921
|
+
def __init__(self, name, storage_vars, actions, blockchain_config=None, address=None):
|
|
922
|
+
self.name = name
|
|
923
|
+
self.storage_vars = storage_vars or []
|
|
924
|
+
self.actions = actions or {}
|
|
925
|
+
self.blockchain_config = blockchain_config or {}
|
|
926
|
+
|
|
927
|
+
# Generate a unique address/ID for this specific instance if not provided
|
|
928
|
+
self.address = address or str(uuid.uuid4())[:8]
|
|
929
|
+
|
|
930
|
+
# Default to SQLite, can be configured via blockchain_config
|
|
931
|
+
db_pref = (blockchain_config or {}).get("storage_engine", "sqlite")
|
|
932
|
+
|
|
933
|
+
# Initialize storage linked to unique address
|
|
934
|
+
# The unique ID ensures multiple "ZiverWallet()" calls don't overwrite each other
|
|
935
|
+
contract_id = f"{self.name}_{self.address}"
|
|
936
|
+
self.storage = ContractStorage(contract_id, db_type=db_pref)
|
|
937
|
+
self.is_deployed = False
|
|
938
|
+
|
|
939
|
+
def instantiate(self, args=None):
|
|
940
|
+
"""Create a new instance of this contract when called like ZiverWallet()."""
|
|
941
|
+
print(f"📄 SmartContract.instantiate() called for: {self.name}")
|
|
942
|
+
|
|
943
|
+
# Generate new unique address for the instance
|
|
944
|
+
new_address = str(uuid.uuid4())[:16]
|
|
945
|
+
|
|
946
|
+
# Create instance with clean storage connection
|
|
947
|
+
instance = SmartContract(
|
|
948
|
+
name=self.name,
|
|
949
|
+
storage_vars=self.storage_vars,
|
|
950
|
+
actions=self.actions,
|
|
951
|
+
blockchain_config=self.blockchain_config,
|
|
952
|
+
address=new_address
|
|
953
|
+
)
|
|
954
|
+
|
|
955
|
+
print(f" 🔗 Contract Address: {new_address}")
|
|
956
|
+
|
|
957
|
+
# Deploy the instance (initialize storage)
|
|
958
|
+
instance.deploy()
|
|
959
|
+
instance.parent_contract = self
|
|
960
|
+
|
|
961
|
+
print(f" Available actions: {list(self.actions.keys())}")
|
|
962
|
+
return instance
|
|
963
|
+
|
|
964
|
+
def __call__(self, *args):
|
|
965
|
+
return self.instantiate(args)
|
|
966
|
+
|
|
967
|
+
def deploy(self):
|
|
968
|
+
"""Deploy the contract and initialize persistent storage"""
|
|
969
|
+
# Checks if we should reset storage or strictly load existing
|
|
970
|
+
# For simplicity in this VM, subsequent runs act like "loading" if DB exists
|
|
971
|
+
self.is_deployed = True
|
|
972
|
+
|
|
973
|
+
# Initialize storage only if key doesn't exist (preserve persistence)
|
|
974
|
+
for var_node in self.storage_vars:
|
|
975
|
+
var_name = None
|
|
976
|
+
default_value = None
|
|
977
|
+
|
|
978
|
+
if hasattr(var_node, 'initial_value'):
|
|
979
|
+
var_name = var_node.name.value if hasattr(var_node.name, 'value') else var_node.name
|
|
980
|
+
default_value = var_node.initial_value
|
|
981
|
+
elif isinstance(var_node, dict) and "initial_value" in var_node:
|
|
982
|
+
var_name = var_node.get("name")
|
|
983
|
+
default_value = var_node["initial_value"]
|
|
984
|
+
|
|
985
|
+
if var_name:
|
|
986
|
+
# ONLY set if not already in DB (Persistence Logic)
|
|
987
|
+
if self.storage.get(var_name) is None:
|
|
988
|
+
if default_value is not None:
|
|
989
|
+
self.storage.set(var_name, default_value)
|
|
990
|
+
else:
|
|
991
|
+
# Set reasonable defaults for types if null
|
|
992
|
+
self.storage.set(var_name, Null)
|
|
993
|
+
|
|
994
|
+
def call_method(self, action_name, args):
|
|
995
|
+
"""Call a contract action - similar to EntityInstance.call_method"""
|
|
996
|
+
if not self.is_deployed:
|
|
997
|
+
from zexus.object import EvaluationError
|
|
998
|
+
return EvaluationError(f"Contract {self.name} not deployed")
|
|
999
|
+
|
|
1000
|
+
if action_name not in self.actions:
|
|
1001
|
+
from zexus.object import EvaluationError
|
|
1002
|
+
return EvaluationError(f"Action '{action_name}' not found in contract {self.name}")
|
|
1003
|
+
|
|
1004
|
+
# Get the action (Action object)
|
|
1005
|
+
action = self.actions[action_name]
|
|
1006
|
+
|
|
1007
|
+
# Create a new environment for the action execution
|
|
1008
|
+
from zexus.environment import Environment
|
|
1009
|
+
action_env = Environment(outer=action.env if hasattr(action, 'env') else None)
|
|
1010
|
+
|
|
1011
|
+
# Bind 'this' to the current contract instance in the action environment
|
|
1012
|
+
action_env.set('this', self)
|
|
1013
|
+
|
|
1014
|
+
# Make contract storage accessible in the action environment
|
|
1015
|
+
for var_node in self.storage_vars:
|
|
1016
|
+
# Extract variable name from node (same logic as in deploy)
|
|
1017
|
+
var_name = None
|
|
1018
|
+
if hasattr(var_node, 'name'):
|
|
1019
|
+
var_name = var_node.name.value if hasattr(var_node.name, 'value') else var_node.name
|
|
1020
|
+
elif isinstance(var_node, dict):
|
|
1021
|
+
var_name = var_node.get("name")
|
|
1022
|
+
elif isinstance(var_node, str):
|
|
1023
|
+
var_name = var_node
|
|
1024
|
+
|
|
1025
|
+
if var_name:
|
|
1026
|
+
stored_value = self.storage.get(var_name)
|
|
1027
|
+
if stored_value is not None:
|
|
1028
|
+
action_env.set(var_name, stored_value)
|
|
1029
|
+
|
|
1030
|
+
# Bind action parameters to arguments
|
|
1031
|
+
if hasattr(action, 'parameters'):
|
|
1032
|
+
for i, param in enumerate(action.parameters):
|
|
1033
|
+
if i < len(args):
|
|
1034
|
+
# Handle both Identifier objects and ParameterNode objects
|
|
1035
|
+
if hasattr(param, 'name'):
|
|
1036
|
+
# It's a ParameterNode with name and type
|
|
1037
|
+
param_name = param.name.value if hasattr(param.name, 'value') else str(param.name)
|
|
1038
|
+
else:
|
|
1039
|
+
# It's an Identifier
|
|
1040
|
+
param_name = param.value if hasattr(param, 'value') else str(param)
|
|
1041
|
+
|
|
1042
|
+
action_env.set(param_name, args[i])
|
|
1043
|
+
else:
|
|
1044
|
+
# If arg is missing, set to Null
|
|
1045
|
+
param_name = param.name.value if hasattr(param, 'name') else (param.value if hasattr(param, 'value') else str(param))
|
|
1046
|
+
from zexus.object import Null
|
|
1047
|
+
action_env.set(param_name, Null)
|
|
1048
|
+
|
|
1049
|
+
# Execute the action body
|
|
1050
|
+
from zexus.evaluator.core import Evaluator
|
|
1051
|
+
evaluator = Evaluator()
|
|
1052
|
+
result = evaluator.eval_node(action.body, action_env, stack_trace=[])
|
|
1053
|
+
|
|
1054
|
+
# Save any modified state variables back to storage
|
|
1055
|
+
for var_node in self.storage_vars:
|
|
1056
|
+
# Extract variable name from node (same logic as above)
|
|
1057
|
+
var_name = None
|
|
1058
|
+
if hasattr(var_node, 'name'):
|
|
1059
|
+
var_name = var_node.name.value if hasattr(var_node.name, 'value') else var_node.name
|
|
1060
|
+
elif isinstance(var_node, dict):
|
|
1061
|
+
var_name = var_node.get("name")
|
|
1062
|
+
elif isinstance(var_node, str):
|
|
1063
|
+
var_name = var_node
|
|
1064
|
+
|
|
1065
|
+
if var_name:
|
|
1066
|
+
current_value = action_env.get(var_name)
|
|
1067
|
+
if current_value is not None:
|
|
1068
|
+
self.storage.set(var_name, current_value)
|
|
1069
|
+
|
|
1070
|
+
return result
|
|
1071
|
+
|
|
1072
|
+
def execute_action(self, action_name, args, context, env=None):
|
|
1073
|
+
"""Execute a contract action"""
|
|
1074
|
+
if not self.is_deployed:
|
|
1075
|
+
return ObjectEvaluationError(f"Contract {self.name} not deployed")
|
|
1076
|
+
|
|
1077
|
+
if action_name not in self.actions:
|
|
1078
|
+
return ObjectEvaluationError(f"Unknown action: {action_name}")
|
|
1079
|
+
|
|
1080
|
+
return self.actions[action_name]
|
|
1081
|
+
|
|
1082
|
+
def get_state(self):
|
|
1083
|
+
return self.storage.backend.data if isinstance(self.storage.backend, InMemoryBackend) else {}
|
|
1084
|
+
|
|
1085
|
+
def get_balance(self, account=None):
|
|
1086
|
+
val = self.storage.get(f"balance_{account}") if account else self.storage.get("balance")
|
|
1087
|
+
return val or Integer(0)
|
|
1088
|
+
|
|
1089
|
+
|
|
1090
|
+
# ===============================================
|
|
1091
|
+
# PROTECTION SYSTEM - Security Guardrails
|
|
1092
|
+
# ===============================================
|
|
1093
|
+
|
|
1094
|
+
class ProtectionRule:
|
|
1095
|
+
"""Represents a single protection rule"""
|
|
1096
|
+
|
|
1097
|
+
def __init__(self, name, rule_config):
|
|
1098
|
+
self.name = name
|
|
1099
|
+
self.config = rule_config
|
|
1100
|
+
|
|
1101
|
+
def evaluate(self, context_data):
|
|
1102
|
+
"""Evaluate if protection allows access"""
|
|
1103
|
+
# Rate limiting
|
|
1104
|
+
if self.config.get("rate_limit"):
|
|
1105
|
+
if context_data.get("request_count", 0) > self.config["rate_limit"]:
|
|
1106
|
+
return False, "Rate limit exceeded"
|
|
1107
|
+
|
|
1108
|
+
# Authentication requirement
|
|
1109
|
+
if self.config.get("auth_required", False):
|
|
1110
|
+
if not context_data.get("user_authenticated"):
|
|
1111
|
+
return False, "Authentication required"
|
|
1112
|
+
|
|
1113
|
+
# Password strength
|
|
1114
|
+
if self.config.get("min_password_strength"):
|
|
1115
|
+
strength = context_data.get("password_strength", "weak")
|
|
1116
|
+
required = self.config["min_password_strength"]
|
|
1117
|
+
strength_levels = {"weak": 0, "medium": 1, "strong": 2, "very_strong": 3}
|
|
1118
|
+
if strength_levels.get(strength, 0) < strength_levels.get(required, 0):
|
|
1119
|
+
return False, f"Password must be {required}"
|
|
1120
|
+
|
|
1121
|
+
# Session timeout
|
|
1122
|
+
if self.config.get("session_timeout"):
|
|
1123
|
+
session_age = context_data.get("session_age_seconds", 0)
|
|
1124
|
+
if session_age > self.config["session_timeout"]:
|
|
1125
|
+
return False, "Session expired"
|
|
1126
|
+
|
|
1127
|
+
# HTTPS requirement
|
|
1128
|
+
if self.config.get("require_https", False):
|
|
1129
|
+
if not context_data.get("is_https", False):
|
|
1130
|
+
return False, "HTTPS required"
|
|
1131
|
+
|
|
1132
|
+
return True, None
|
|
1133
|
+
|
|
1134
|
+
|
|
1135
|
+
class ProtectionPolicy:
|
|
1136
|
+
"""Represents a set of protection rules for a target"""
|
|
1137
|
+
|
|
1138
|
+
def __init__(self, target_name, rules, enforcement_level="strict"):
|
|
1139
|
+
self.target_name = target_name
|
|
1140
|
+
self.rules = {} # {rule_name: ProtectionRule}
|
|
1141
|
+
self.enforcement_level = enforcement_level # strict, warn, audit
|
|
1142
|
+
|
|
1143
|
+
if isinstance(rules, dict):
|
|
1144
|
+
for rule_name, rule_config in rules.items():
|
|
1145
|
+
self.add_rule(rule_name, rule_config)
|
|
1146
|
+
|
|
1147
|
+
def add_rule(self, rule_name, rule_config):
|
|
1148
|
+
"""Add a protection rule"""
|
|
1149
|
+
self.rules[rule_name] = ProtectionRule(rule_name, rule_config)
|
|
1150
|
+
|
|
1151
|
+
def check_access(self, context_data):
|
|
1152
|
+
"""Check if access is allowed"""
|
|
1153
|
+
violations = []
|
|
1154
|
+
|
|
1155
|
+
for rule_name, rule in self.rules.items():
|
|
1156
|
+
allowed, error_msg = rule.evaluate(context_data)
|
|
1157
|
+
if not allowed:
|
|
1158
|
+
violations.append((rule_name, error_msg))
|
|
1159
|
+
|
|
1160
|
+
if violations:
|
|
1161
|
+
if self.enforcement_level == "strict":
|
|
1162
|
+
return False, violations[0][1]
|
|
1163
|
+
elif self.enforcement_level == "warn":
|
|
1164
|
+
return True, violations # Allow but warn
|
|
1165
|
+
elif self.enforcement_level == "audit":
|
|
1166
|
+
return True, violations # Allow but log
|
|
1167
|
+
|
|
1168
|
+
return True, None
|
|
1169
|
+
|
|
1170
|
+
|
|
1171
|
+
# ===============================================
|
|
1172
|
+
# MIDDLEWARE SYSTEM - Request/Response Processing
|
|
1173
|
+
# ===============================================
|
|
1174
|
+
|
|
1175
|
+
class Middleware:
|
|
1176
|
+
"""Represents a middleware handler"""
|
|
1177
|
+
|
|
1178
|
+
def __init__(self, name, handler_func):
|
|
1179
|
+
self.name = name
|
|
1180
|
+
self.handler_func = handler_func
|
|
1181
|
+
|
|
1182
|
+
def execute(self, request, response, env=None):
|
|
1183
|
+
"""Execute middleware"""
|
|
1184
|
+
try:
|
|
1185
|
+
return self.handler_func((request, response), env)
|
|
1186
|
+
except Exception as e:
|
|
1187
|
+
return ObjectEvaluationError(f"Middleware error: {str(e)}")
|
|
1188
|
+
|
|
1189
|
+
|
|
1190
|
+
class MiddlewareChain:
|
|
1191
|
+
"""Executes a chain of middleware"""
|
|
1192
|
+
|
|
1193
|
+
def __init__(self):
|
|
1194
|
+
self.middlewares = []
|
|
1195
|
+
|
|
1196
|
+
def add_middleware(self, middleware):
|
|
1197
|
+
"""Add middleware to chain"""
|
|
1198
|
+
self.middlewares.append(middleware)
|
|
1199
|
+
|
|
1200
|
+
def execute(self, request, response, env=None):
|
|
1201
|
+
"""Execute all middleware in order"""
|
|
1202
|
+
for middleware in self.middlewares:
|
|
1203
|
+
result = middleware.execute(request, response, env)
|
|
1204
|
+
if isinstance(result, ObjectEvaluationError):
|
|
1205
|
+
return result
|
|
1206
|
+
# Check if middleware set response to stop chain
|
|
1207
|
+
if response.get("_stop_chain"):
|
|
1208
|
+
break
|
|
1209
|
+
return response
|
|
1210
|
+
|
|
1211
|
+
|
|
1212
|
+
# ===============================================
|
|
1213
|
+
# AUTHENTICATION & AUTHORIZATION
|
|
1214
|
+
# ===============================================
|
|
1215
|
+
|
|
1216
|
+
class AuthConfig:
|
|
1217
|
+
"""Authentication configuration"""
|
|
1218
|
+
|
|
1219
|
+
def __init__(self, config_data=None):
|
|
1220
|
+
self.provider = "oauth2"
|
|
1221
|
+
self.scopes = ["read", "write"]
|
|
1222
|
+
self.token_expiry = 3600
|
|
1223
|
+
self.refresh_enabled = True
|
|
1224
|
+
|
|
1225
|
+
if config_data:
|
|
1226
|
+
self.provider = config_data.get("provider", self.provider)
|
|
1227
|
+
self.scopes = config_data.get("scopes", self.scopes)
|
|
1228
|
+
self.token_expiry = config_data.get("token_expiry", self.token_expiry)
|
|
1229
|
+
self.refresh_enabled = config_data.get("refresh_enabled", self.refresh_enabled)
|
|
1230
|
+
|
|
1231
|
+
def validate_token(self, token):
|
|
1232
|
+
"""Validate a token"""
|
|
1233
|
+
# In production, this would validate with OAuth provider
|
|
1234
|
+
return True
|
|
1235
|
+
|
|
1236
|
+
def is_token_expired(self, token_data):
|
|
1237
|
+
"""Check if token is expired"""
|
|
1238
|
+
import time
|
|
1239
|
+
if "issued_at" not in token_data:
|
|
1240
|
+
return True
|
|
1241
|
+
age = time.time() - token_data["issued_at"]
|
|
1242
|
+
return age > self.token_expiry
|
|
1243
|
+
|
|
1244
|
+
|
|
1245
|
+
# ===============================================
|
|
1246
|
+
# CACHING SYSTEM
|
|
1247
|
+
# ===============================================
|
|
1248
|
+
|
|
1249
|
+
class CachePolicy:
|
|
1250
|
+
"""Cache policy for a function"""
|
|
1251
|
+
|
|
1252
|
+
def __init__(self, ttl=3600, key_func=None, invalidate_on=None):
|
|
1253
|
+
self.ttl = ttl # Time to live in seconds
|
|
1254
|
+
self.key_func = key_func or (lambda x: str(x)) # Function to generate cache key
|
|
1255
|
+
self.invalidate_on = invalidate_on or [] # Events that invalidate cache
|
|
1256
|
+
self.cache = {}
|
|
1257
|
+
self.timestamps = {}
|
|
1258
|
+
|
|
1259
|
+
def get(self, key):
|
|
1260
|
+
"""Get cached value"""
|
|
1261
|
+
import time
|
|
1262
|
+
if key not in self.cache:
|
|
1263
|
+
return None
|
|
1264
|
+
|
|
1265
|
+
# Check if expired
|
|
1266
|
+
if time.time() - self.timestamps[key] > self.ttl:
|
|
1267
|
+
del self.cache[key]
|
|
1268
|
+
del self.timestamps[key]
|
|
1269
|
+
return None
|
|
1270
|
+
|
|
1271
|
+
return self.cache[key]
|
|
1272
|
+
|
|
1273
|
+
def set(self, key, value):
|
|
1274
|
+
"""Cache a value"""
|
|
1275
|
+
import time
|
|
1276
|
+
self.cache[key] = value
|
|
1277
|
+
self.timestamps[key] = time.time()
|
|
1278
|
+
|
|
1279
|
+
def invalidate(self, key=None):
|
|
1280
|
+
"""Invalidate cache entry or entire cache"""
|
|
1281
|
+
if key is None:
|
|
1282
|
+
self.cache.clear()
|
|
1283
|
+
self.timestamps.clear()
|
|
1284
|
+
elif key in self.cache:
|
|
1285
|
+
del self.cache[key]
|
|
1286
|
+
del self.timestamps[key]
|
|
1287
|
+
|
|
1288
|
+
|
|
1289
|
+
# ===============================================
|
|
1290
|
+
# SEALING / IMMUTABILITY
|
|
1291
|
+
# ===============================================
|
|
1292
|
+
|
|
1293
|
+
class SealedObject:
|
|
1294
|
+
"""Wraps an object and prevents mutation (assignments or property writes).
|
|
1295
|
+
|
|
1296
|
+
This is a lightweight runtime wrapper. The evaluator enforces immutability by
|
|
1297
|
+
checking for instances of SealedObject before allowing assignments.
|
|
1298
|
+
"""
|
|
1299
|
+
|
|
1300
|
+
def __init__(self, value):
|
|
1301
|
+
self._value = value
|
|
1302
|
+
|
|
1303
|
+
def get(self):
|
|
1304
|
+
return self._value
|
|
1305
|
+
|
|
1306
|
+
def inspect(self):
|
|
1307
|
+
# Delegate to inner object's inspect if available
|
|
1308
|
+
if hasattr(self._value, 'inspect'):
|
|
1309
|
+
return self._value.inspect()
|
|
1310
|
+
return str(self._value)
|
|
1311
|
+
|
|
1312
|
+
def type(self):
|
|
1313
|
+
# Delegate to inner object's type() if available, otherwise use its class name
|
|
1314
|
+
try:
|
|
1315
|
+
inner_type = self._value.type() if hasattr(self._value, 'type') else type(self._value).__name__
|
|
1316
|
+
except Exception:
|
|
1317
|
+
inner_type = type(self._value).__name__
|
|
1318
|
+
return f"Sealed<{inner_type}>"
|
|
1319
|
+
|
|
1320
|
+
def __repr__(self):
|
|
1321
|
+
return f"SealedObject({repr(self._value)})"
|
|
1322
|
+
|
|
1323
|
+
|
|
1324
|
+
# ===============================================
|
|
1325
|
+
# RATE LIMITING
|
|
1326
|
+
# ===============================================
|
|
1327
|
+
|
|
1328
|
+
class RateLimiter:
|
|
1329
|
+
"""Rate limiter for throttling"""
|
|
1330
|
+
|
|
1331
|
+
def __init__(self, requests_per_minute=100, burst_size=10, per_user=False):
|
|
1332
|
+
self.requests_per_minute = requests_per_minute
|
|
1333
|
+
self.burst_size = burst_size
|
|
1334
|
+
self.per_user = per_user
|
|
1335
|
+
self.request_counts = {} # {user_id: count}
|
|
1336
|
+
self.burst_counts = {} # {user_id: burst_count}
|
|
1337
|
+
|
|
1338
|
+
def allow_request(self, user_id=None):
|
|
1339
|
+
"""Check if request is allowed"""
|
|
1340
|
+
if not self.per_user:
|
|
1341
|
+
user_id = "global"
|
|
1342
|
+
|
|
1343
|
+
current_count = self.request_counts.get(user_id, 0)
|
|
1344
|
+
burst_count = self.burst_counts.get(user_id, 0)
|
|
1345
|
+
|
|
1346
|
+
# Check rate limit
|
|
1347
|
+
if current_count >= self.requests_per_minute:
|
|
1348
|
+
return False, "Rate limit exceeded"
|
|
1349
|
+
|
|
1350
|
+
# Check burst limit
|
|
1351
|
+
if burst_count >= self.burst_size:
|
|
1352
|
+
return False, "Burst limit exceeded"
|
|
1353
|
+
|
|
1354
|
+
self.request_counts[user_id] = current_count + 1
|
|
1355
|
+
self.burst_counts[user_id] = burst_count + 1
|
|
1356
|
+
|
|
1357
|
+
return True, None
|
|
1358
|
+
|
|
1359
|
+
def reset(self, user_id=None):
|
|
1360
|
+
"""Reset rate limit counters"""
|
|
1361
|
+
if user_id:
|
|
1362
|
+
if user_id in self.request_counts:
|
|
1363
|
+
del self.request_counts[user_id]
|
|
1364
|
+
if user_id in self.burst_counts:
|
|
1365
|
+
del self.burst_counts[user_id]
|
|
1366
|
+
else:
|
|
1367
|
+
self.request_counts.clear()
|
|
1368
|
+
self.burst_counts.clear()
|
|
1369
|
+
|
|
1370
|
+
|
|
1371
|
+
# ===============================================
|
|
1372
|
+
# UTILITY FUNCTIONS
|
|
1373
|
+
# ===============================================
|
|
1374
|
+
|
|
1375
|
+
def _get_timestamp():
|
|
1376
|
+
"""Get current timestamp"""
|
|
1377
|
+
import time
|
|
1378
|
+
return int(time.time() * 1000)
|
|
1379
|
+
|
|
1380
|
+
|
|
1381
|
+
def export_security_to_environment(env):
|
|
1382
|
+
"""Export security functions to environment"""
|
|
1383
|
+
# Entity creation
|
|
1384
|
+
def make_entity(entity_def, values=None):
|
|
1385
|
+
if isinstance(entity_def, EntityDefinition):
|
|
1386
|
+
return entity_def.create_instance(values)
|
|
1387
|
+
return ObjectEvaluationError("Invalid entity definition")
|
|
1388
|
+
|
|
1389
|
+
# Verification
|
|
1390
|
+
def make_verify(target, checks, error_handler=None):
|
|
1391
|
+
return VerifyWrapper(target, checks, error_handler)
|
|
1392
|
+
|
|
1393
|
+
# Contract deployment
|
|
1394
|
+
def deploy_contract(contract):
|
|
1395
|
+
if isinstance(contract, SmartContract):
|
|
1396
|
+
contract.deploy()
|
|
1397
|
+
return contract
|
|
1398
|
+
return ObjectEvaluationError("Invalid contract")
|
|
1399
|
+
|
|
1400
|
+
env.set("entity", Builtin(make_entity, "entity"))
|
|
1401
|
+
env.set("verify", Builtin(make_verify, "verify"))
|
|
1402
|
+
env.set("contract", Builtin(deploy_contract, "contract"))
|
|
1403
|
+
# sealing: make a variable/object immutable
|
|
1404
|
+
def make_seal(value):
|
|
1405
|
+
return SealedObject(value)
|
|
1406
|
+
|
|
1407
|
+
env.set("seal", Builtin(make_seal, "seal"))
|