zexus 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -0
- package/README.md +2513 -0
- package/bin/zexus +2 -0
- package/bin/zpics +2 -0
- package/bin/zpm +2 -0
- package/bin/zx +2 -0
- package/bin/zx-deploy +2 -0
- package/bin/zx-dev +2 -0
- package/bin/zx-run +2 -0
- package/package.json +66 -0
- package/scripts/README.md +24 -0
- package/scripts/postinstall.js +44 -0
- package/shared_config.json +24 -0
- package/src/README.md +1525 -0
- package/src/tests/run_zexus_tests.py +117 -0
- package/src/tests/test_all_phases.zx +346 -0
- package/src/tests/test_blockchain_features.zx +306 -0
- package/src/tests/test_complexity_features.zx +321 -0
- package/src/tests/test_core_integration.py +185 -0
- package/src/tests/test_phase10_ecosystem.zx +177 -0
- package/src/tests/test_phase1_modifiers.zx +87 -0
- package/src/tests/test_phase2_plugins.zx +80 -0
- package/src/tests/test_phase3_security.zx +97 -0
- package/src/tests/test_phase4_vfs.zx +116 -0
- package/src/tests/test_phase5_types.zx +117 -0
- package/src/tests/test_phase6_metaprogramming.zx +125 -0
- package/src/tests/test_phase7_optimization.zx +132 -0
- package/src/tests/test_phase9_advanced_types.zx +157 -0
- package/src/tests/test_security_features.py +419 -0
- package/src/tests/test_security_features.zx +276 -0
- package/src/tests/test_simple_zx.zx +1 -0
- package/src/tests/test_verification_simple.zx +69 -0
- package/src/zexus/__init__.py +28 -0
- package/src/zexus/__main__.py +5 -0
- package/src/zexus/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/advanced_types.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/builtin_modules.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/capability_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/complexity_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/concurrency_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/config.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/dependency_injection.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/ecosystem.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/environment.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/error_reporter.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/hybrid_orchestrator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/metaprogramming.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/module_cache.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/object.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/optimization.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/plugin_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/policy_engine.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/security.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/stdlib_integration.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/strategy_recovery.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/syntax_validator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/type_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/virtual_filesystem.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_token.cpython-312.pyc +0 -0
- package/src/zexus/advanced_types.py +401 -0
- package/src/zexus/blockchain/__init__.py +40 -0
- package/src/zexus/blockchain/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/crypto.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/ledger.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/transaction.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/crypto.py +463 -0
- package/src/zexus/blockchain/ledger.py +255 -0
- package/src/zexus/blockchain/transaction.py +267 -0
- package/src/zexus/builtin_modules.py +284 -0
- package/src/zexus/builtin_plugins.py +317 -0
- package/src/zexus/capability_system.py +372 -0
- package/src/zexus/cli/__init__.py +2 -0
- package/src/zexus/cli/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/cli/__pycache__/main.cpython-312.pyc +0 -0
- package/src/zexus/cli/main.py +707 -0
- package/src/zexus/cli/zpm.py +203 -0
- package/src/zexus/compare_interpreter_compiler.py +146 -0
- package/src/zexus/compiler/__init__.py +169 -0
- package/src/zexus/compiler/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/compiler/bytecode.py +266 -0
- package/src/zexus/compiler/compat_runtime.py +277 -0
- package/src/zexus/compiler/lexer.py +257 -0
- package/src/zexus/compiler/parser.py +779 -0
- package/src/zexus/compiler/semantic.py +118 -0
- package/src/zexus/compiler/zexus_ast.py +454 -0
- package/src/zexus/complexity_system.py +575 -0
- package/src/zexus/concurrency_system.py +493 -0
- package/src/zexus/config.py +201 -0
- package/src/zexus/crypto_bridge.py +19 -0
- package/src/zexus/dependency_injection.py +423 -0
- package/src/zexus/ecosystem.py +434 -0
- package/src/zexus/environment.py +101 -0
- package/src/zexus/environment_manager.py +119 -0
- package/src/zexus/error_reporter.py +314 -0
- package/src/zexus/evaluator/__init__.py +12 -0
- package/src/zexus/evaluator/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/bytecode_compiler.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/core.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/expressions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/functions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/integration.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/statements.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/utils.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/bytecode_compiler.py +700 -0
- package/src/zexus/evaluator/core.py +891 -0
- package/src/zexus/evaluator/expressions.py +827 -0
- package/src/zexus/evaluator/functions.py +3989 -0
- package/src/zexus/evaluator/integration.py +396 -0
- package/src/zexus/evaluator/statements.py +4303 -0
- package/src/zexus/evaluator/utils.py +126 -0
- package/src/zexus/evaluator_original.py +2041 -0
- package/src/zexus/external_bridge.py +16 -0
- package/src/zexus/find_affected_imports.sh +155 -0
- package/src/zexus/hybrid_orchestrator.py +152 -0
- package/src/zexus/input_validation.py +259 -0
- package/src/zexus/lexer.py +571 -0
- package/src/zexus/logging.py +89 -0
- package/src/zexus/lsp/__init__.py +9 -0
- package/src/zexus/lsp/completion_provider.py +207 -0
- package/src/zexus/lsp/definition_provider.py +22 -0
- package/src/zexus/lsp/hover_provider.py +71 -0
- package/src/zexus/lsp/server.py +269 -0
- package/src/zexus/lsp/symbol_provider.py +31 -0
- package/src/zexus/metaprogramming.py +321 -0
- package/src/zexus/module_cache.py +89 -0
- package/src/zexus/module_manager.py +107 -0
- package/src/zexus/object.py +973 -0
- package/src/zexus/optimization.py +424 -0
- package/src/zexus/parser/__init__.py +31 -0
- package/src/zexus/parser/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_context.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_structural.cpython-312.pyc +0 -0
- package/src/zexus/parser/integration.py +86 -0
- package/src/zexus/parser/parser.py +3977 -0
- package/src/zexus/parser/strategy_context.py +7254 -0
- package/src/zexus/parser/strategy_structural.py +1033 -0
- package/src/zexus/persistence.py +391 -0
- package/src/zexus/plugin_system.py +290 -0
- package/src/zexus/policy_engine.py +365 -0
- package/src/zexus/profiler/__init__.py +5 -0
- package/src/zexus/profiler/profiler.py +233 -0
- package/src/zexus/purity_system.py +398 -0
- package/src/zexus/runtime/__init__.py +20 -0
- package/src/zexus/runtime/async_runtime.py +324 -0
- package/src/zexus/search_old_imports.sh +65 -0
- package/src/zexus/security.py +1407 -0
- package/src/zexus/stack_trace.py +233 -0
- package/src/zexus/stdlib/__init__.py +27 -0
- package/src/zexus/stdlib/blockchain.py +341 -0
- package/src/zexus/stdlib/compression.py +167 -0
- package/src/zexus/stdlib/crypto.py +124 -0
- package/src/zexus/stdlib/datetime.py +163 -0
- package/src/zexus/stdlib/db_mongo.py +199 -0
- package/src/zexus/stdlib/db_mysql.py +162 -0
- package/src/zexus/stdlib/db_postgres.py +163 -0
- package/src/zexus/stdlib/db_sqlite.py +133 -0
- package/src/zexus/stdlib/encoding.py +230 -0
- package/src/zexus/stdlib/fs.py +195 -0
- package/src/zexus/stdlib/http.py +219 -0
- package/src/zexus/stdlib/http_server.py +248 -0
- package/src/zexus/stdlib/json_module.py +61 -0
- package/src/zexus/stdlib/math.py +360 -0
- package/src/zexus/stdlib/os_module.py +265 -0
- package/src/zexus/stdlib/regex.py +148 -0
- package/src/zexus/stdlib/sockets.py +253 -0
- package/src/zexus/stdlib/test_framework.zx +208 -0
- package/src/zexus/stdlib/test_runner.zx +119 -0
- package/src/zexus/stdlib_integration.py +341 -0
- package/src/zexus/strategy_recovery.py +256 -0
- package/src/zexus/syntax_validator.py +356 -0
- package/src/zexus/testing/zpics.py +407 -0
- package/src/zexus/testing/zpics_runtime.py +369 -0
- package/src/zexus/type_system.py +374 -0
- package/src/zexus/validation_system.py +569 -0
- package/src/zexus/virtual_filesystem.py +355 -0
- package/src/zexus/vm/__init__.py +8 -0
- package/src/zexus/vm/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/async_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/bytecode.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/cache.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/jit.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/memory_manager.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/memory_pool.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/parallel_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/peephole_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/profiler.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/register_allocator.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/register_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/ssa_converter.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/async_optimizer.py +420 -0
- package/src/zexus/vm/bytecode.py +428 -0
- package/src/zexus/vm/bytecode_converter.py +297 -0
- package/src/zexus/vm/cache.py +532 -0
- package/src/zexus/vm/jit.py +720 -0
- package/src/zexus/vm/memory_manager.py +520 -0
- package/src/zexus/vm/memory_pool.py +511 -0
- package/src/zexus/vm/optimizer.py +478 -0
- package/src/zexus/vm/parallel_vm.py +899 -0
- package/src/zexus/vm/peephole_optimizer.py +452 -0
- package/src/zexus/vm/profiler.py +527 -0
- package/src/zexus/vm/register_allocator.py +462 -0
- package/src/zexus/vm/register_vm.py +520 -0
- package/src/zexus/vm/ssa_converter.py +757 -0
- package/src/zexus/vm/vm.py +1392 -0
- package/src/zexus/zexus_ast.py +1782 -0
- package/src/zexus/zexus_token.py +253 -0
- package/src/zexus/zpm/__init__.py +15 -0
- package/src/zexus/zpm/installer.py +116 -0
- package/src/zexus/zpm/package_manager.py +208 -0
- package/src/zexus/zpm/publisher.py +98 -0
- package/src/zexus/zpm/registry.py +110 -0
- package/src/zexus.egg-info/PKG-INFO +2235 -0
- package/src/zexus.egg-info/SOURCES.txt +876 -0
- package/src/zexus.egg-info/dependency_links.txt +1 -0
- package/src/zexus.egg-info/entry_points.txt +3 -0
- package/src/zexus.egg-info/not-zip-safe +1 -0
- package/src/zexus.egg-info/requires.txt +14 -0
- package/src/zexus.egg-info/top_level.txt +2 -0
- package/zexus.json +14 -0
|
@@ -0,0 +1,493 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Concurrency & Performance System for Zexus Interpreter
|
|
3
|
+
|
|
4
|
+
Provides channels for message passing, atomic operations for safe concurrent access,
|
|
5
|
+
and support for async/await patterns. Designed for safe, race-free concurrent programming.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Dict, List, Any, Optional, Generic, TypeVar
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from enum import Enum
|
|
11
|
+
from threading import Lock, Condition, Event
|
|
12
|
+
import queue
|
|
13
|
+
import time
|
|
14
|
+
|
|
15
|
+
T = TypeVar('T')
|
|
16
|
+
|
|
17
|
+
# Sentinel value to signal channel is closed
|
|
18
|
+
class _ChannelClosedSentinel:
|
|
19
|
+
"""Sentinel object to wake up receivers when channel is closed"""
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
_CHANNEL_CLOSED_SENTINEL = _ChannelClosedSentinel()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ChannelMode(Enum):
|
|
26
|
+
"""Channel communication mode"""
|
|
27
|
+
UNBUFFERED = "unbuffered" # Blocks until receiver/sender ready
|
|
28
|
+
BUFFERED = "buffered" # Has internal queue with capacity
|
|
29
|
+
CLOSED = "closed" # Channel closed, no more communication
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class Channel(Generic[T]):
|
|
34
|
+
"""
|
|
35
|
+
Type-safe message passing channel
|
|
36
|
+
|
|
37
|
+
Supports:
|
|
38
|
+
- Unbuffered channels (synchronization point)
|
|
39
|
+
- Buffered channels (queue with capacity)
|
|
40
|
+
- Non-blocking sends/receives
|
|
41
|
+
- Close semantics
|
|
42
|
+
|
|
43
|
+
Example:
|
|
44
|
+
channel<integer> numbers;
|
|
45
|
+
send(numbers, 42);
|
|
46
|
+
value = receive(numbers);
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
name: str
|
|
50
|
+
element_type: Optional[str] = None
|
|
51
|
+
capacity: int = 0 # 0 = unbuffered
|
|
52
|
+
_queue: queue.Queue = field(default_factory=queue.Queue)
|
|
53
|
+
_closed: bool = field(default=False)
|
|
54
|
+
_lock: Lock = field(default_factory=Lock)
|
|
55
|
+
_send_ready: Condition = field(default=None)
|
|
56
|
+
_recv_ready: Condition = field(default=None)
|
|
57
|
+
_closed_event: Event = field(default_factory=Event)
|
|
58
|
+
|
|
59
|
+
def __post_init__(self):
|
|
60
|
+
if self.capacity > 0:
|
|
61
|
+
self._queue = queue.Queue(maxsize=self.capacity)
|
|
62
|
+
else:
|
|
63
|
+
self._queue = queue.Queue()
|
|
64
|
+
# Initialize Condition variables with the same lock
|
|
65
|
+
self._send_ready = Condition(self._lock)
|
|
66
|
+
self._recv_ready = Condition(self._lock)
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def is_open(self) -> bool:
|
|
70
|
+
"""Check if channel is open"""
|
|
71
|
+
with self._lock:
|
|
72
|
+
return not self._closed
|
|
73
|
+
|
|
74
|
+
def send(self, value: T, timeout: Optional[float] = None) -> bool:
|
|
75
|
+
"""
|
|
76
|
+
Send value to channel
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
value: Value to send
|
|
80
|
+
timeout: Maximum wait time (None = infinite)
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
True if sent, False if channel closed
|
|
84
|
+
|
|
85
|
+
Raises:
|
|
86
|
+
RuntimeError: If channel is closed
|
|
87
|
+
"""
|
|
88
|
+
# Check if closed (with lock)
|
|
89
|
+
with self._lock:
|
|
90
|
+
if self._closed:
|
|
91
|
+
raise RuntimeError(f"Cannot send on closed channel '{self.name}'")
|
|
92
|
+
|
|
93
|
+
# Send without holding lock (queue.Queue is thread-safe)
|
|
94
|
+
try:
|
|
95
|
+
if self.capacity == 0:
|
|
96
|
+
# Unbuffered: block until receiver ready
|
|
97
|
+
self._queue.put(value, timeout=timeout)
|
|
98
|
+
else:
|
|
99
|
+
# Buffered: block if full
|
|
100
|
+
self._queue.put(value, timeout=timeout)
|
|
101
|
+
|
|
102
|
+
# Notify receiver (with lock)
|
|
103
|
+
with self._lock:
|
|
104
|
+
self._recv_ready.notify()
|
|
105
|
+
return True
|
|
106
|
+
except queue.Full:
|
|
107
|
+
raise RuntimeError(f"Channel '{self.name}' buffer full")
|
|
108
|
+
except queue.Empty:
|
|
109
|
+
raise RuntimeError(f"Timeout sending to channel '{self.name}'")
|
|
110
|
+
|
|
111
|
+
def receive(self, timeout: Optional[float] = None) -> Optional[T]:
|
|
112
|
+
"""
|
|
113
|
+
Receive value from channel (blocking)
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
timeout: Maximum wait time (None = infinite)
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
Received value or None if channel closed and empty
|
|
120
|
+
|
|
121
|
+
Raises:
|
|
122
|
+
RuntimeError: On communication error
|
|
123
|
+
"""
|
|
124
|
+
# Check if closed first (with lock)
|
|
125
|
+
with self._lock:
|
|
126
|
+
if self._closed and self._queue.empty():
|
|
127
|
+
return None
|
|
128
|
+
|
|
129
|
+
# Receive without holding lock (queue.Queue is thread-safe)
|
|
130
|
+
try:
|
|
131
|
+
value = self._queue.get(timeout=timeout)
|
|
132
|
+
|
|
133
|
+
# Check if this is the closed sentinel
|
|
134
|
+
if isinstance(value, _ChannelClosedSentinel):
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
# Notify sender (with lock)
|
|
138
|
+
with self._lock:
|
|
139
|
+
self._send_ready.notify()
|
|
140
|
+
return value
|
|
141
|
+
except queue.Empty:
|
|
142
|
+
# Check if closed (with lock)
|
|
143
|
+
with self._lock:
|
|
144
|
+
if self._closed:
|
|
145
|
+
return None
|
|
146
|
+
raise RuntimeError(f"Timeout receiving from channel '{self.name}'")
|
|
147
|
+
|
|
148
|
+
def close(self):
|
|
149
|
+
"""Close channel - no more sends/receives allowed"""
|
|
150
|
+
with self._lock:
|
|
151
|
+
self._closed = True
|
|
152
|
+
self._closed_event.set()
|
|
153
|
+
# Put sentinel values to wake up any waiting receivers
|
|
154
|
+
# This ensures they immediately return None instead of timing out
|
|
155
|
+
try:
|
|
156
|
+
# For buffered channels, put one sentinel
|
|
157
|
+
if self.capacity > 0:
|
|
158
|
+
self._queue.put_nowait(_CHANNEL_CLOSED_SENTINEL)
|
|
159
|
+
# For unbuffered channels, use notification
|
|
160
|
+
else:
|
|
161
|
+
self._recv_ready.notify_all()
|
|
162
|
+
self._send_ready.notify_all()
|
|
163
|
+
except queue.Full:
|
|
164
|
+
# Queue is full, receivers will check closed flag anyway
|
|
165
|
+
pass
|
|
166
|
+
|
|
167
|
+
def __repr__(self) -> str:
|
|
168
|
+
mode = f"buffered({self.capacity})" if self.capacity > 0 else "unbuffered"
|
|
169
|
+
status = "closed" if self._closed else "open"
|
|
170
|
+
return f"Channel<{self.element_type}>({self.name}, {mode}, {status})"
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
@dataclass
|
|
174
|
+
class Atomic:
|
|
175
|
+
"""
|
|
176
|
+
Atomic operation wrapper - ensures indivisible execution
|
|
177
|
+
|
|
178
|
+
Provides mutex-protected code region where concurrent accesses
|
|
179
|
+
cannot interleave. Useful for short, critical sections.
|
|
180
|
+
|
|
181
|
+
Example:
|
|
182
|
+
atomic(counter = counter + 1);
|
|
183
|
+
|
|
184
|
+
atomic {
|
|
185
|
+
x = x + 1;
|
|
186
|
+
y = y + 1;
|
|
187
|
+
};
|
|
188
|
+
"""
|
|
189
|
+
|
|
190
|
+
_lock: Lock = field(default_factory=Lock)
|
|
191
|
+
_depth: int = field(default=0) # Reentrancy depth
|
|
192
|
+
|
|
193
|
+
def execute(self, operation, *args, **kwargs):
|
|
194
|
+
"""
|
|
195
|
+
Execute operation atomically
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
operation: Callable to execute
|
|
199
|
+
*args: Positional arguments
|
|
200
|
+
**kwargs: Keyword arguments
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
Result of operation
|
|
204
|
+
"""
|
|
205
|
+
with self._lock:
|
|
206
|
+
self._depth += 1
|
|
207
|
+
try:
|
|
208
|
+
return operation(*args, **kwargs)
|
|
209
|
+
finally:
|
|
210
|
+
self._depth -= 1
|
|
211
|
+
|
|
212
|
+
def acquire(self):
|
|
213
|
+
"""Acquire atomic lock (for manual control)"""
|
|
214
|
+
self._lock.acquire()
|
|
215
|
+
self._depth += 1
|
|
216
|
+
|
|
217
|
+
def release(self):
|
|
218
|
+
"""Release atomic lock (for manual control)"""
|
|
219
|
+
if self._depth > 0:
|
|
220
|
+
self._depth -= 1
|
|
221
|
+
self._lock.release()
|
|
222
|
+
|
|
223
|
+
def is_locked(self) -> bool:
|
|
224
|
+
"""Check if currently locked"""
|
|
225
|
+
return self._depth > 0
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
@dataclass
|
|
229
|
+
class WaitGroup:
|
|
230
|
+
"""
|
|
231
|
+
Wait group for synchronizing multiple async operations
|
|
232
|
+
|
|
233
|
+
Similar to Go's sync.WaitGroup - allows waiting for a collection
|
|
234
|
+
of tasks to complete. Useful for coordinating producer-consumer patterns.
|
|
235
|
+
|
|
236
|
+
Example:
|
|
237
|
+
let wg = wait_group()
|
|
238
|
+
wg.add(2) # Expecting 2 tasks
|
|
239
|
+
|
|
240
|
+
async action task1() {
|
|
241
|
+
# ... work ...
|
|
242
|
+
wg.done()
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
async action task2() {
|
|
246
|
+
# ... work ...
|
|
247
|
+
wg.done()
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async task1()
|
|
251
|
+
async task2()
|
|
252
|
+
wg.wait() # Blocks until both tasks call done()
|
|
253
|
+
"""
|
|
254
|
+
_count: int = field(default=0)
|
|
255
|
+
_lock: Lock = field(default_factory=Lock)
|
|
256
|
+
_zero_event: Event = field(default_factory=Event)
|
|
257
|
+
|
|
258
|
+
def __post_init__(self):
|
|
259
|
+
# Start with event set (count is 0)
|
|
260
|
+
self._zero_event.set()
|
|
261
|
+
|
|
262
|
+
def add(self, delta: int = 1):
|
|
263
|
+
"""Add delta to the wait group counter"""
|
|
264
|
+
with self._lock:
|
|
265
|
+
self._count += delta
|
|
266
|
+
if self._count < 0:
|
|
267
|
+
raise ValueError("WaitGroup counter cannot be negative")
|
|
268
|
+
if self._count == 0:
|
|
269
|
+
self._zero_event.set()
|
|
270
|
+
else:
|
|
271
|
+
self._zero_event.clear()
|
|
272
|
+
|
|
273
|
+
def done(self):
|
|
274
|
+
"""Decrement the wait group counter by 1"""
|
|
275
|
+
self.add(-1)
|
|
276
|
+
|
|
277
|
+
def wait(self, timeout: Optional[float] = None) -> bool:
|
|
278
|
+
"""
|
|
279
|
+
Wait until the counter reaches zero
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
timeout: Maximum wait time in seconds (None = infinite)
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
True if counter reached zero, False if timeout
|
|
286
|
+
"""
|
|
287
|
+
return self._zero_event.wait(timeout=timeout)
|
|
288
|
+
|
|
289
|
+
def count(self) -> int:
|
|
290
|
+
"""Get current counter value"""
|
|
291
|
+
with self._lock:
|
|
292
|
+
return self._count
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
@dataclass
|
|
296
|
+
class Barrier:
|
|
297
|
+
"""
|
|
298
|
+
Synchronization barrier for coordinating multiple tasks
|
|
299
|
+
|
|
300
|
+
Allows multiple tasks to wait at a barrier point until all have arrived.
|
|
301
|
+
Once all parties arrive, all are released simultaneously.
|
|
302
|
+
|
|
303
|
+
Example:
|
|
304
|
+
let barrier = barrier(2) # Wait for 2 tasks
|
|
305
|
+
|
|
306
|
+
async action task1() {
|
|
307
|
+
# ... phase 1 work ...
|
|
308
|
+
barrier.wait() # Wait for task2
|
|
309
|
+
# ... phase 2 work ...
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
async action task2() {
|
|
313
|
+
# ... phase 1 work ...
|
|
314
|
+
barrier.wait() # Wait for task1
|
|
315
|
+
# ... phase 2 work ...
|
|
316
|
+
}
|
|
317
|
+
"""
|
|
318
|
+
parties: int # Number of tasks that must call wait()
|
|
319
|
+
_count: int = field(default=0)
|
|
320
|
+
_generation: int = field(default=0)
|
|
321
|
+
_lock: Lock = field(default_factory=Lock)
|
|
322
|
+
_condition: Condition = field(default=None)
|
|
323
|
+
|
|
324
|
+
def __post_init__(self):
|
|
325
|
+
if self.parties <= 0:
|
|
326
|
+
raise ValueError("Barrier parties must be positive")
|
|
327
|
+
if self._condition is None:
|
|
328
|
+
self._condition = Condition(self._lock)
|
|
329
|
+
|
|
330
|
+
def wait(self, timeout: Optional[float] = None) -> int:
|
|
331
|
+
"""
|
|
332
|
+
Wait at the barrier until all parties arrive
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
timeout: Maximum wait time in seconds (None = infinite)
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
Barrier generation number (increments each cycle)
|
|
339
|
+
|
|
340
|
+
Raises:
|
|
341
|
+
RuntimeError: On timeout
|
|
342
|
+
"""
|
|
343
|
+
with self._condition:
|
|
344
|
+
generation = self._generation
|
|
345
|
+
self._count += 1
|
|
346
|
+
|
|
347
|
+
if self._count == self.parties:
|
|
348
|
+
# Last one to arrive - release all
|
|
349
|
+
self._count = 0
|
|
350
|
+
self._generation += 1
|
|
351
|
+
self._condition.notify_all()
|
|
352
|
+
return generation
|
|
353
|
+
else:
|
|
354
|
+
# Wait for others
|
|
355
|
+
while generation == self._generation:
|
|
356
|
+
if not self._condition.wait(timeout=timeout):
|
|
357
|
+
raise RuntimeError(f"Barrier timeout waiting for {self.parties - self._count} more tasks")
|
|
358
|
+
return generation
|
|
359
|
+
|
|
360
|
+
def reset(self):
|
|
361
|
+
"""Reset the barrier to initial state"""
|
|
362
|
+
with self._condition:
|
|
363
|
+
self._count = 0
|
|
364
|
+
self._generation += 1
|
|
365
|
+
self._condition.notify_all()
|
|
366
|
+
|
|
367
|
+
def __repr__(self) -> str:
|
|
368
|
+
return f"Atomic(depth={self._depth}, locked={self.is_locked()})"
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
class ConcurrencyManager:
|
|
372
|
+
"""
|
|
373
|
+
Central manager for all concurrency operations
|
|
374
|
+
|
|
375
|
+
Manages:
|
|
376
|
+
- Channel creation and lifecycle
|
|
377
|
+
- Atomic operation coordination
|
|
378
|
+
- Goroutine/task scheduling
|
|
379
|
+
- Deadlock detection
|
|
380
|
+
- Performance monitoring
|
|
381
|
+
"""
|
|
382
|
+
|
|
383
|
+
def __init__(self):
|
|
384
|
+
self.channels: Dict[str, Channel] = {}
|
|
385
|
+
self.atomics: Dict[str, Atomic] = {}
|
|
386
|
+
self._lock = Lock()
|
|
387
|
+
self._tasks: List[Any] = []
|
|
388
|
+
self._completed_count = 0
|
|
389
|
+
|
|
390
|
+
def create_channel(self, name: str, element_type: Optional[str] = None,
|
|
391
|
+
capacity: int = 0) -> Channel:
|
|
392
|
+
"""
|
|
393
|
+
Create a new channel
|
|
394
|
+
|
|
395
|
+
Args:
|
|
396
|
+
name: Channel name
|
|
397
|
+
element_type: Type of elements (for validation)
|
|
398
|
+
capacity: Buffer capacity (0 = unbuffered)
|
|
399
|
+
|
|
400
|
+
Returns:
|
|
401
|
+
Created channel
|
|
402
|
+
"""
|
|
403
|
+
with self._lock:
|
|
404
|
+
if name in self.channels:
|
|
405
|
+
raise ValueError(f"Channel '{name}' already exists")
|
|
406
|
+
|
|
407
|
+
channel = Channel(name=name, element_type=element_type, capacity=capacity)
|
|
408
|
+
self.channels[name] = channel
|
|
409
|
+
|
|
410
|
+
# Debug logging (optional)
|
|
411
|
+
# from .evaluator.utils import debug_log
|
|
412
|
+
# debug_log("ConcurrencyManager", f"Created channel: {channel}")
|
|
413
|
+
|
|
414
|
+
return channel
|
|
415
|
+
|
|
416
|
+
def get_channel(self, name: str) -> Optional[Channel]:
|
|
417
|
+
"""Get existing channel by name"""
|
|
418
|
+
with self._lock:
|
|
419
|
+
return self.channels.get(name)
|
|
420
|
+
|
|
421
|
+
def create_atomic(self, name: str) -> Atomic:
|
|
422
|
+
"""
|
|
423
|
+
Create atomic operation region
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
name: Atomic region identifier
|
|
427
|
+
|
|
428
|
+
Returns:
|
|
429
|
+
Atomic wrapper
|
|
430
|
+
"""
|
|
431
|
+
with self._lock:
|
|
432
|
+
if name in self.atomics:
|
|
433
|
+
return self.atomics[name]
|
|
434
|
+
|
|
435
|
+
atomic = Atomic()
|
|
436
|
+
self.atomics[name] = atomic
|
|
437
|
+
|
|
438
|
+
# Debug logging (optional)
|
|
439
|
+
# from .evaluator.utils import debug_log
|
|
440
|
+
# debug_log("ConcurrencyManager", f"Created atomic: {name}")
|
|
441
|
+
|
|
442
|
+
return atomic
|
|
443
|
+
|
|
444
|
+
def close_all_channels(self):
|
|
445
|
+
"""Close all open channels"""
|
|
446
|
+
with self._lock:
|
|
447
|
+
for channel in self.channels.values():
|
|
448
|
+
if channel.is_open:
|
|
449
|
+
channel.close()
|
|
450
|
+
|
|
451
|
+
def statistics(self) -> Dict[str, Any]:
|
|
452
|
+
"""Get concurrency statistics"""
|
|
453
|
+
with self._lock:
|
|
454
|
+
open_channels = sum(1 for ch in self.channels.values() if ch.is_open)
|
|
455
|
+
return {
|
|
456
|
+
"channels_created": len(self.channels),
|
|
457
|
+
"channels_open": open_channels,
|
|
458
|
+
"atomics_created": len(self.atomics),
|
|
459
|
+
"tasks_total": len(self._tasks),
|
|
460
|
+
"tasks_completed": self._completed_count
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
def __repr__(self) -> str:
|
|
464
|
+
stats = self.statistics()
|
|
465
|
+
return (f"ConcurrencyManager("
|
|
466
|
+
f"channels={stats['channels_open']}/{stats['channels_created']}, "
|
|
467
|
+
f"atomics={stats['atomics_created']}, "
|
|
468
|
+
f"tasks={stats['tasks_completed']}/{stats['tasks_total']})")
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
# Global singleton instance
|
|
472
|
+
_concurrency_manager: Optional[ConcurrencyManager] = None
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def get_concurrency_manager() -> ConcurrencyManager:
|
|
476
|
+
"""
|
|
477
|
+
Get or create the global concurrency manager instance
|
|
478
|
+
|
|
479
|
+
Returns:
|
|
480
|
+
ConcurrencyManager singleton
|
|
481
|
+
"""
|
|
482
|
+
global _concurrency_manager
|
|
483
|
+
if _concurrency_manager is None:
|
|
484
|
+
_concurrency_manager = ConcurrencyManager()
|
|
485
|
+
return _concurrency_manager
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def reset_concurrency_manager():
|
|
489
|
+
"""Reset the global concurrency manager (for testing)"""
|
|
490
|
+
global _concurrency_manager
|
|
491
|
+
if _concurrency_manager:
|
|
492
|
+
_concurrency_manager.close_all_channels()
|
|
493
|
+
_concurrency_manager = ConcurrencyManager()
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration manager for Zexus interpreter.
|
|
3
|
+
Provides per-user persistent config stored at ~/.zexus/config.json
|
|
4
|
+
Exports a `config` singleton with convenient helpers.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import json
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
|
|
12
|
+
DEFAULT_CONFIG = {
|
|
13
|
+
"debug": {
|
|
14
|
+
"enabled": False,
|
|
15
|
+
"level": "none", # none, minimal, full
|
|
16
|
+
"last_updated": None
|
|
17
|
+
},
|
|
18
|
+
"user_preferences": {
|
|
19
|
+
"show_warnings": True,
|
|
20
|
+
"color_output": True,
|
|
21
|
+
"max_output_lines": 1000
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
# Backwards-compatible runtime settings expected by earlier modules
|
|
26
|
+
DEFAULT_RUNTIME = {
|
|
27
|
+
'syntax_style': 'auto',
|
|
28
|
+
'enable_advanced_parsing': True,
|
|
29
|
+
'enable_debug_logs': False,
|
|
30
|
+
'enable_parser_debug': False, # OPTIMIZATION: Disable parser debug output for speed
|
|
31
|
+
# Legacy runtime flags expected by older modules
|
|
32
|
+
'use_hybrid_compiler': True,
|
|
33
|
+
'fallback_to_interpreter': True,
|
|
34
|
+
'compiler_line_threshold': 100,
|
|
35
|
+
'enable_execution_stats': False,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Config:
|
|
40
|
+
def __init__(self):
|
|
41
|
+
self.config_dir = Path.home() / ".zexus"
|
|
42
|
+
self.config_file = self.config_dir / "config.json"
|
|
43
|
+
self._data = DEFAULT_CONFIG.copy()
|
|
44
|
+
self._ensure_loaded()
|
|
45
|
+
|
|
46
|
+
# ensure runtime defaults exist for backward compatibility
|
|
47
|
+
self._data.setdefault('runtime', {})
|
|
48
|
+
for k, v in DEFAULT_RUNTIME.items():
|
|
49
|
+
self._data['runtime'].setdefault(k, v)
|
|
50
|
+
|
|
51
|
+
def _ensure_loaded(self):
|
|
52
|
+
try:
|
|
53
|
+
self.config_dir.mkdir(mode=0o700, exist_ok=True)
|
|
54
|
+
if self.config_file.exists():
|
|
55
|
+
with open(self.config_file, 'r', encoding='utf-8') as f:
|
|
56
|
+
content = json.load(f)
|
|
57
|
+
# Merge with defaults to keep compatibility
|
|
58
|
+
self._data = self._merge_dicts(DEFAULT_CONFIG, content)
|
|
59
|
+
else:
|
|
60
|
+
self._data = DEFAULT_CONFIG.copy()
|
|
61
|
+
self._write()
|
|
62
|
+
except Exception:
|
|
63
|
+
# If anything goes wrong, fall back to defaults in-memory
|
|
64
|
+
self._data = DEFAULT_CONFIG.copy()
|
|
65
|
+
|
|
66
|
+
def _merge_dicts(self, base, override):
|
|
67
|
+
result = base.copy()
|
|
68
|
+
for k, v in override.items():
|
|
69
|
+
if k in result and isinstance(result[k], dict) and isinstance(v, dict):
|
|
70
|
+
result[k] = self._merge_dicts(result[k], v)
|
|
71
|
+
else:
|
|
72
|
+
result[k] = v
|
|
73
|
+
return result
|
|
74
|
+
|
|
75
|
+
def _write(self):
|
|
76
|
+
try:
|
|
77
|
+
self.config_dir.mkdir(mode=0o700, exist_ok=True)
|
|
78
|
+
self._data['debug']['last_updated'] = datetime.now(timezone.utc).isoformat()
|
|
79
|
+
with open(self.config_file, 'w', encoding='utf-8') as f:
|
|
80
|
+
json.dump(self._data, f, indent=4)
|
|
81
|
+
except Exception:
|
|
82
|
+
# Fail silently; we do not want to crash the interpreter for config issues
|
|
83
|
+
pass
|
|
84
|
+
|
|
85
|
+
# Public API
|
|
86
|
+
@property
|
|
87
|
+
def debug_level(self):
|
|
88
|
+
return self._data.get('debug', {}).get('level', 'none')
|
|
89
|
+
|
|
90
|
+
@debug_level.setter
|
|
91
|
+
def debug_level(self, value):
|
|
92
|
+
if value not in ('none', 'minimal', 'full'):
|
|
93
|
+
raise ValueError('Invalid debug level')
|
|
94
|
+
self._data.setdefault('debug', {})['level'] = value
|
|
95
|
+
self._data['debug']['enabled'] = (value != 'none')
|
|
96
|
+
self._write()
|
|
97
|
+
|
|
98
|
+
def enable_debug(self, level='full'):
|
|
99
|
+
self.debug_level = level
|
|
100
|
+
|
|
101
|
+
def disable_debug(self):
|
|
102
|
+
self.debug_level = 'none'
|
|
103
|
+
|
|
104
|
+
def is_debug_full(self):
|
|
105
|
+
return self.debug_level == 'full'
|
|
106
|
+
|
|
107
|
+
def is_debug_minimal(self):
|
|
108
|
+
return self.debug_level == 'minimal'
|
|
109
|
+
|
|
110
|
+
def is_debug_none(self):
|
|
111
|
+
return self.debug_level == 'none'
|
|
112
|
+
|
|
113
|
+
# Backwards-compatible properties
|
|
114
|
+
@property
|
|
115
|
+
def syntax_style(self):
|
|
116
|
+
return self._data.get('runtime', {}).get('syntax_style', 'auto')
|
|
117
|
+
|
|
118
|
+
@syntax_style.setter
|
|
119
|
+
def syntax_style(self, value):
|
|
120
|
+
self._data.setdefault('runtime', {})['syntax_style'] = value
|
|
121
|
+
self._write()
|
|
122
|
+
|
|
123
|
+
@property
|
|
124
|
+
def enable_advanced_parsing(self):
|
|
125
|
+
return self._data.get('runtime', {}).get('enable_advanced_parsing', True)
|
|
126
|
+
|
|
127
|
+
@enable_advanced_parsing.setter
|
|
128
|
+
def enable_advanced_parsing(self, value):
|
|
129
|
+
self._data.setdefault('runtime', {})['enable_advanced_parsing'] = bool(value)
|
|
130
|
+
self._write()
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def enable_debug_logs(self):
|
|
134
|
+
# Map legacy flag to debug level
|
|
135
|
+
return self.debug_level != 'none'
|
|
136
|
+
|
|
137
|
+
@enable_debug_logs.setter
|
|
138
|
+
def enable_debug_logs(self, value):
|
|
139
|
+
if value:
|
|
140
|
+
if self.debug_level == 'none':
|
|
141
|
+
self.debug_level = 'minimal'
|
|
142
|
+
else:
|
|
143
|
+
self.debug_level = 'none'
|
|
144
|
+
|
|
145
|
+
# Legacy runtime properties
|
|
146
|
+
@property
|
|
147
|
+
def use_hybrid_compiler(self):
|
|
148
|
+
return self._data.get('runtime', {}).get('use_hybrid_compiler', True)
|
|
149
|
+
|
|
150
|
+
@use_hybrid_compiler.setter
|
|
151
|
+
def use_hybrid_compiler(self, value):
|
|
152
|
+
self._data.setdefault('runtime', {})['use_hybrid_compiler'] = bool(value)
|
|
153
|
+
self._write()
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def fallback_to_interpreter(self):
|
|
157
|
+
return self._data.get('runtime', {}).get('fallback_to_interpreter', True)
|
|
158
|
+
|
|
159
|
+
@fallback_to_interpreter.setter
|
|
160
|
+
def fallback_to_interpreter(self, value):
|
|
161
|
+
self._data.setdefault('runtime', {})['fallback_to_interpreter'] = bool(value)
|
|
162
|
+
self._write()
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
def compiler_line_threshold(self):
|
|
166
|
+
return int(self._data.get('runtime', {}).get('compiler_line_threshold', 100))
|
|
167
|
+
|
|
168
|
+
@compiler_line_threshold.setter
|
|
169
|
+
def compiler_line_threshold(self, value):
|
|
170
|
+
try:
|
|
171
|
+
v = int(value)
|
|
172
|
+
except Exception:
|
|
173
|
+
v = 100
|
|
174
|
+
self._data.setdefault('runtime', {})['compiler_line_threshold'] = v
|
|
175
|
+
self._write()
|
|
176
|
+
|
|
177
|
+
@property
|
|
178
|
+
def enable_execution_stats(self):
|
|
179
|
+
return bool(self._data.get('runtime', {}).get('enable_execution_stats', False))
|
|
180
|
+
|
|
181
|
+
@enable_execution_stats.setter
|
|
182
|
+
def enable_execution_stats(self, value):
|
|
183
|
+
self._data.setdefault('runtime', {})['enable_execution_stats'] = bool(value)
|
|
184
|
+
self._write()
|
|
185
|
+
|
|
186
|
+
# Helper logging function used by modules
|
|
187
|
+
def should_log(self, level='debug'):
|
|
188
|
+
"""Decide whether to emit a log of a particular level.
|
|
189
|
+
Levels: 'debug' (very verbose), 'info' (useful info), 'warn', 'error'
|
|
190
|
+
"""
|
|
191
|
+
dl = self.debug_level
|
|
192
|
+
if dl == 'full':
|
|
193
|
+
return True
|
|
194
|
+
if dl == 'minimal':
|
|
195
|
+
return level in ('error', 'warn', 'info')
|
|
196
|
+
# none
|
|
197
|
+
return level in ('error',)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
# Singleton
|
|
201
|
+
config = Config()
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
|
|
3
|
+
def sha256_hash(data):
|
|
4
|
+
return hashlib.sha256(data.encode()).hexdigest()
|
|
5
|
+
|
|
6
|
+
def generate_sphincs_keypair():
|
|
7
|
+
# Placeholder - will implement real SPHINCS+ later
|
|
8
|
+
return {
|
|
9
|
+
"public_key": "sphincs_public_key_placeholder",
|
|
10
|
+
"private_key": "sphincs_private_key_placeholder"
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
def sphincs_sign(message, private_key):
|
|
14
|
+
# Placeholder
|
|
15
|
+
return "sphincs_signature_placeholder"
|
|
16
|
+
|
|
17
|
+
def sphincs_verify(message, signature, public_key):
|
|
18
|
+
# Placeholder
|
|
19
|
+
return True
|